diff --git a/README.md b/README.md
index 0fecdb92f8ff1ff8acc3bf915516be3c3de9023e..3fa8e3d85675ed0d0a8ba072b8a5c558070a76e5 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,122 @@
----
-title: GaussianAnything AIGC3D
-emoji: 📈
-colorFrom: indigo
-colorTo: blue
-sdk: gradio
-sdk_version: 5.6.0
-app_file: app.py
-pinned: false
-license: other
-short_description: GaussianAnything generates high-quality and editable 2DGS.
----
-
-Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
+# GaussianAnything: arXiv 2024
+
+## setup the environment (the same env as LN3Diff)
+
+```bash
+conda create -n ga python=3.10
+conda activate ga
+pip intall -r requrements.txt # will install the surfel Gaussians environments automatically.
+```
+
+Then, install pytorch3d with
+```bash
+pip install git+https://github.com/facebookresearch/pytorch3d.git@stable
+```
+
+
+### :dromedary_camel: TODO
+
+- [x] Release inference code and checkpoints.
+- [x] Release Training code.
+- [x] Release pre-extracted latent codes for 3D diffusion training.
+- [ ] Release Gradio Demo.
+- [ ] Release the evaluation code.
+- [ ] Lint the code.
+
+
+# Inference
+
+Be aware to change the $logdir in the bash file accordingly.
+
+To load the checkpoint automatically: please replace ```/mnt/sfs-common/yslan/open-source``` with ```yslan/GaussianAnything/ckpts/checkpoints```.
+
+
+
+## Text-2-3D:
+
+Please update the caption for 3D generation in ```datasets/caption-forpaper.txt```. T o change the number of samples to be generated, please change ```$num_samples``` in the bash file.
+
+**stage-1**:
+```
+bash shell_scripts/release/inference/t23d/stage1-t23d.sh
+```
+then, set the ```$stage_1_output_dir``` to the ```$logdir``` of the above stage.
+
+**stage-2**:
+```
+bash shell_scripts/release/inference/t23d/stage2-t23d.sh
+```
+
+The results will be dumped to ```./logs/t23d/stage-2```
+
+## I23D (requires two stage generation):
+
+set the $data_dir accordingly. For some demo image, please download from [huggingfac.co/yslan/GaussianAnything/demo-img](https://huggingface.co/yslan/GaussianAnything/tree/main/demo-img).
+
+**stage-1**:
+```
+bash shell_scripts/release/inference/i23d/i23d-stage1.sh
+```
+
+then, set the $stage_1_output_dir to the $logdir of the above stage.
+
+**stage-2**:
+```
+bash shell_scripts/release/inference/i23d/i23d-stage1.sh
+```
+
+## 3D VAE Reconstruction:
+
+To encode a 3D asset into the latent point cloud, please download the pre-trained VAE checkpoint from [huggingfac.co/yslan/gaussiananything/ckpts/vae/model_rec1965000.pt](https://huggingface.co/yslan/GaussianAnything/blob/main/ckpts/vae/model_rec1965000.pt) to ```./checkpoint/model_rec1965000.pt```.
+
+Then, run the inference script
+
+```bash
+bash shell_scripts/release/inference/vae-3d.sh
+```
+
+This will encode the mulit-view 3D renderings in ```./assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0``` into the point-cloud structured latent code, and export them (along with the 2dgs mesh) in ```./logs/latent_dir/```. The exported latent code will be used for efficient 3D diffusion training.
+
+
+
+# Training (Flow Matching 3D Generation)
+All the training is conducted on 8 A100 (80GiB) with BF16 enabled. For training on V100, please use FP32 training by setting ```--use_amp``` False in the bash file. Feel free to tune the ```$batch_size``` in the bash file accordingly to match your VRAM.
+
+To facilitate reproducing the performance, we have uploaded the pre-extracted poind cloud-structured latent codes to the [huggingfac.co/yslan/gaussiananything/dataset/latent.tar.gz](https://huggingface.co/yslan/GaussianAnything/blob/main/dataset/latent.tar.gz) (34GiB required). Please download the pre extracted point cloud latent codes, unzip and set the ```$mv_latent_dir``` in the bash file accordingly.
+
+
+## Text to 3D:
+Please donwload the 3D caption from hugging face [huggingfac.co/yslan/GaussianAnything/dataset/text_captions_3dtopia.json](https://huggingface.co/yslan/GaussianAnything/blob/main/dataset/text_captions_3dtopia.json), and put it under ```dataset```.
+
+
+Note that if you want to train a specific class of Objaverse, just manually change the code at ```datasets/g_buffer_objaverse.py:3043```.
+
+**stage-1 training (point cloud generation)**:
+
+```
+bash shell_scripts/release/train/stage2-t23d/t23d-pcd-gen.sh
+```
+
+**stage-2 training (point cloud-conditioned KL feature generation)**:
+
+```
+bash shell_scripts/release/train/stage2-t23d/t23d-klfeat-gen.sh
+```
+
+## (single-view) Image to 3D
+Please download g-buffer dataset first.
+
+**stage-1 training (point cloud generation)**:
+
+```
+bash shell_scripts/release/train/stage2-i23d/i23d-pcd-gen.sh
+```
+
+**stage-2 training (point cloud-conditioned KL feature generation)**:
+
+```
+bash shell_scripts/release/train/stage2-i23d/i23d-klfeat-gen.sh
+```
+
+
\ No newline at end of file
diff --git a/app.py b/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c657a3220794ab9574ada584a7018a28ded9eab
--- /dev/null
+++ b/app.py
@@ -0,0 +1,397 @@
+import argparse
+import spaces
+import json
+import sys
+sys.path.append('.')
+import torch
+import torchvision
+from torchvision import transforms
+import numpy as np
+
+import os
+import gc
+import dnnlib
+from omegaconf import OmegaConf
+from PIL import Image
+from dnnlib.util import EasyDict
+
+import gradio as gr
+
+import rembg
+
+from huggingface_hub import hf_hub_download
+
+
+"""
+Generate a large batch of image samples from a model and save them as a large
+numpy array. This can be used to produce samples for FID evaluation.
+"""
+
+import os
+
+
+from pdb import set_trace as st
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ NUM_CLASSES,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+ add_dict_to_argparser,
+ args_to_dict,
+ continuous_diffusion_defaults,
+ control_net_defaults,
+)
+
+th.backends.cuda.matmul.allow_tf32 = True
+th.backends.cudnn.allow_tf32 = True
+th.backends.cudnn.enabled = True
+
+from pathlib import Path
+
+from tqdm import tqdm, trange
+import dnnlib
+from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, AE_with_Diffusion, rendering_options_defaults, eg3d_options_default, dataset_defaults
+
+from datasets.shapenet import load_eval_data
+from torch.utils.data import Subset
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+from transport.train_utils import parse_transport_args
+
+from utils.infer_utils import remove_background, resize_foreground
+
+SEED = 0
+
+def resize_to_224(img):
+ img = transforms.functional.resize(img, 518, # required by dino.
+ interpolation=transforms.InterpolationMode.LANCZOS)
+ return img
+
+
+def set_white_background(image):
+ image = np.array(image).astype(np.float32) / 255.0
+ mask = image[:, :, 3:4]
+ image = image[:, :, :3] * mask + (1 - mask)
+ image = Image.fromarray((image * 255.0).astype(np.uint8))
+ return image
+
+
+def check_input_image(input_image):
+ if input_image is None:
+ raise gr.Error("No image uploaded!")
+
+
+
+def main(args_1, args_2):
+
+ os.environ['MASTER_ADDR'] = 'localhost'
+ os.environ['MASTER_PORT'] = '12355'
+ os.environ["CUDA_VISIBLE_DEVICES"] = "0"
+ os.environ["RANK"] = "0"
+ os.environ["WORLD_SIZE"] = "1"
+
+ # args.rendering_kwargs = rendering_options_defaults(args)
+
+ dist_util.setup_dist(args_1)
+ logger.configure(dir=args_1.logdir)
+
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ # * set denoise model args
+ logger.log("creating model and diffusion...")
+ args_1.img_size = [args_1.image_size_encoder]
+ args_1.image_size = args_1.image_size_encoder # 224, follow the triplane size
+
+ args_2.img_size = [args_2.image_size_encoder]
+ args_2.image_size = args_2.image_size_encoder # 224, follow the triplane size
+
+ denoise_model_stage1, diffusion = create_model_and_diffusion(
+ **args_to_dict(args_1,
+ model_and_diffusion_defaults().keys()))
+
+ denoise_model_stage2, diffusion = create_model_and_diffusion(
+ **args_to_dict(args_2,
+ model_and_diffusion_defaults().keys()))
+
+ opts = eg3d_options_default()
+
+ denoise_model_stage1.to(dist_util.dev())
+ denoise_model_stage1.eval()
+ denoise_model_stage2.to(dist_util.dev())
+ denoise_model_stage2.eval()
+
+ # * auto-encoder reconstruction model
+ logger.log("creating 3DAE...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args_1,
+ encoder_and_nsr_defaults().keys()))
+
+ auto_encoder.to(dist_util.dev())
+ auto_encoder.eval()
+
+ # faster inference
+ # denoise_model = denoise_model.to(th.bfloat16)
+ # auto_encoder = auto_encoder.to(th.bfloat16)
+
+ # TODO, how to set the scale?
+ logger.log("create dataset")
+
+ if args_1.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ # load data if i23d
+ # if args.i23d:
+ # data = load_eval_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ # preprocess=auto_encoder.preprocess,
+ # **args_to_dict(args,
+ # dataset_defaults().keys()))
+ # else:
+ data = None # t23d sampling, only caption required
+
+
+ TrainLoop = {
+ 'flow_matching':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine,
+ 'flow_matching_gs':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine_gs, # slightly modified sampling and rendering for gs
+ }[args_1.trainer_name]
+
+ # continuous
+ sde_diffusion = None
+
+ auto_encoder.decoder.rendering_kwargs = args_1.rendering_kwargs
+ # stage_1_output_dir = args_2.stage_1_output_dir
+
+ training_loop_class_stage1 = TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model_stage1,
+ control_model=None, # to remove
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=None,
+ data=data,
+ eval_data=None,
+ **args_1)
+
+ training_loop_class_stage2 = TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model_stage2,
+ control_model=None, # to remove
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=None,
+ data=data,
+ eval_data=None,
+ **args_2)
+
+
+ css = """
+ h1 {
+ text-align: center;
+ display:block;
+ }
+ """
+
+
+ def preprocess(input_image, preprocess_background=True, foreground_ratio=0.85):
+ if preprocess_background:
+ rembg_session = rembg.new_session()
+ image = input_image.convert("RGB")
+ image = remove_background(image, rembg_session)
+ image = resize_foreground(image, foreground_ratio)
+ image = set_white_background(image)
+ else:
+ image = input_image
+ if image.mode == "RGBA":
+ image = set_white_background(image)
+ image = resize_to_224(image)
+ return image
+
+
+ @spaces.GPU(duration=50)
+ def cascaded_generation(processed_image, seed, cfg_scale):
+ # gc.collect()
+ # stage-1, generate pcd
+ stage_1_pcd = training_loop_class_stage1.eval_i23d_and_export_gradio(processed_image, seed, cfg_scale)
+ # stage-2, generate surfel Gaussians, tsdf mesh etc.
+ video_path, rgb_xyz_path, post_mesh_path = training_loop_class_stage2.eval_i23d_and_export_gradio(processed_image, seed, cfg_scale)
+ return video_path, rgb_xyz_path, post_mesh_path, stage_1_pcd
+
+ with gr.Blocks(css=css) as demo:
+ gr.Markdown(
+ """
+ # GaussianAnything: Interactive Point Cloud Latent Diffusion for 3D Generation
+ **GaussianAnything (arXiv 2024)** [[code](https://github.com/NIRVANALAN/GaussianAnything), [project page](https://nirvanalan.github.io/projects/GA/)] is a native 3D diffusion model that supports high-quality 2D Gaussians generation.
+ It first trains a 3D VAE on **Objaverse**, which compress each 3D asset into a compact point cloud-structured latent.
+ After that, a image/text-conditioned diffusion model is trained following LDM paradigm.
+ The model used in the demo adopts 3D DiT architecture and flow-matching framework, and supports single-image condition.
+ It is trained on 8 A100 GPUs for 1M iterations with batch size 256.
+ Locally, on an NVIDIA A100/A10 GPU, each image-conditioned diffusion generation can be done within 20 seconds (time varies due to the adaptive-step ODE solver used in flow-mathcing.)
+ Upload an image of an object or click on one of the provided examples to see how the GaussianAnything works.
+
+ The 3D viewer will render a .glb point cloud exported from the centers of the surfel Gaussians, and an integrated TSDF mesh.
+ For best results run the demo locally and render locally - to do so, clone the [main repository](https://github.com/NIRVANALAN/GaussianAnything).
+ """
+ )
+ with gr.Row(variant="panel"):
+ with gr.Column():
+ with gr.Row():
+ input_image = gr.Image(
+ label="Input Image",
+ image_mode="RGBA",
+ sources="upload",
+ type="pil",
+ elem_id="content_image",
+ )
+ processed_image = gr.Image(label="Processed Image", interactive=False)
+
+ # params
+ with gr.Row():
+ with gr.Column():
+ with gr.Row():
+ # with gr.Group():
+
+ cfg_scale = gr.Number(
+ label="CFG-scale", value=4.0, interactive=True,
+ )
+ seed = gr.Number(
+ label="Seed", value=42, interactive=True,
+ )
+
+ # num_steps = gr.Number(
+ # label="ODE Sampling Steps", value=250, interactive=True,
+ # )
+
+ # with gr.Column():
+ # with gr.Row():
+ # mesh_size = gr.Number(
+ # label="Mesh Resolution", value=192, interactive=True,
+ # )
+
+ # mesh_thres = gr.Number(
+ # label="Mesh Iso-surface", value=10, interactive=True,
+ # )
+
+ with gr.Row():
+ with gr.Group():
+ preprocess_background = gr.Checkbox(
+ label="Remove Background", value=False
+ )
+ with gr.Row():
+ submit = gr.Button("Generate", elem_id="generate", variant="primary")
+
+ with gr.Row(variant="panel"):
+ gr.Examples(
+ examples=[
+ str(path) for path in sorted(Path('./assets/demo-image-for-i23d/instantmesh').glob('**/*.png'))
+ ] + [str(path) for path in sorted(Path('./assets/demo-image-for-i23d/gso').glob('**/*.png'))],
+ inputs=[input_image],
+ cache_examples=False,
+ label="Examples",
+ examples_per_page=20,
+ )
+
+ with gr.Column():
+ with gr.Row():
+ with gr.Tab("Stage-2 Output"):
+ with gr.Column():
+ output_video = gr.Video(value=None, width=512, label="Rendered Video (2 LoDs)", autoplay=True, loop=True)
+ # output_video = gr.Video(value=None, width=256, label="Rendered Video", autoplay=True)
+ output_gs = gr.Model3D(
+ height=256,
+ label="2DGS Center",
+ pan_speed=0.5,
+ clear_color=(1,1,1,1), # loading glb file only.
+ )
+ output_model = gr.Model3D(
+ height=256,
+ label="TSDF Mesh",
+ pan_speed=0.5,
+ clear_color=(1,1,1,1), # loading tsdf ply files.
+ )
+
+ with gr.Tab("Stage-1 Output"):
+ with gr.Column():
+ output_model_stage1 = gr.Model3D(
+ height=256,
+ label="Stage-1",
+ pan_speed=0.5,
+ clear_color=(1,1,1,1), # loading tsdf ply files.
+ )
+
+
+
+ gr.Markdown(
+ """
+ ## Comments:
+ 1. The sampling time varies since ODE-based sampling method (dopri5 by default) has adaptive internal step, and reducing sampling steps may not reduce the overal sampling time. Sampling steps=250 is the emperical value that works well in most cases.
+ 2. The 3D viewer shows a colored .glb mesh extracted from volumetric tri-plane, and may differ slightly with the volume rendering result.
+ 3. If you find your result unsatisfying, tune the CFG scale and change the random seed. Usually slightly increase the CFG value can lead to better performance.
+ 3. Known limitations include:
+ - Texture details missing: since our VAE is trained on 192x192 resolution due the the resource constraints, the texture details generated by the final 3D-LDM may be blurry. We will keep improving the performance in the future.
+ 4. Regarding reconstruction performance, our model is slightly inferior to state-of-the-art multi-view LRM-based method (e.g. InstantMesh), but offers much better diversity, flexibility and editing potential due to the intrinsic nature of diffusion model.
+
+ ## How does it work?
+
+ GaussianAnything is a native 3D Latent Diffusion Model that supports direct 3D asset generation via diffusion sampling.
+ Compared to SDS-based ([DreamFusion](https://dreamfusion3d.github.io/)), mulit-view generation-based ([MVDream](https://arxiv.org/abs/2308.16512), [Zero123++](https://github.com/SUDO-AI-3D/zero123plus), [Instant3D](https://instant-3d.github.io/)) and feedforward 3D reconstruction-based ([LRM](https://yiconghong.me/LRM/), [InstantMesh](https://github.com/TencentARC/InstantMesh), [LGM](https://github.com/3DTopia/LGM)),
+ GaussianAnything supports feedforward 3D generation with a unified framework.
+ Like 2D/Video AIGC pipeline, GaussianAnything first trains a 3D-VAE and then conduct LDM training (text/image conditioned) on the learned latent space. Some related methods from the industry ([Shape-E](https://github.com/openai/shap-e), [CLAY](https://github.com/CLAY-3D/OpenCLAY), [Meta 3D Gen](https://arxiv.org/abs/2303.05371)) also follow the same paradigm.
+ Though currently the performance of the origin 3D LDM's works are overall inferior to reconstruction-based methods, we believe the proposed method has much potential and scales better with more data and compute resources, and may yield better 3D editing performance due to its compatability with diffusion model.
+ For more results see the [project page](https://nirvanalan.github.io/projects/GA/).
+ """
+ )
+
+ submit.click(fn=check_input_image, inputs=[input_image]).success(
+ fn=preprocess,
+ inputs=[input_image, preprocess_background],
+ outputs=[processed_image],
+ ).success(
+ # fn=reconstruct_and_export,
+ # inputs=[processed_image],
+ # outputs=[output_model, output_video],
+ fn=cascaded_generation,
+ inputs=[processed_image, seed, cfg_scale],
+ # inputs=[processed_image, num_steps, seed, mesh_size, mesh_thres, unconditional_guidance_scale, args.stage_1_output_dir],
+ outputs=[output_video, output_gs, output_model, output_model_stage1],
+ )
+
+ demo.queue(max_size=1)
+ demo.launch(share=True)
+
+if __name__ == "__main__":
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ with open('configs/gradio_i23d_stage2_args.json') as f:
+ args_2 = json.load(f)
+ args_2 = EasyDict(args_2)
+ args_2.local_rank = 0
+ args_2.gpus = 1
+
+ with open('configs/gradio_i23d_stage1_args.json') as f:
+ args_1 = json.load(f)
+ args_1 = EasyDict(args_1)
+ args_1.local_rank = 0
+ args_1.gpus = 1
+
+ main(args_1, args_2)
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..d7708dc0b2ee484aee973ba8769afd3bccaa61b0
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b80359fdef16e0762ec4ec96046d23e4db42a6d9111a901fab4d63aa6327864c
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..d976e8a11bd3472a4b197014429b9e03842b3ca1
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:98b99b4d3319a30495c8726d4e9a32347938aa99e918fa86401da3eeba1c47fa
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..65d5369067caffa797d08791b20e266fcb2223c1
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d737f763e225f89a78191e3c629fda149b280aa4213432e06baa12433f647707
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..658e627ded338d4ff6e8add661dba02cdf75c4ee
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:958056548219cde6d6dfcf0307c23a2b00e9b6dde9d2bae29081962890c849e5
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..8832e75b1b157ead347a985c81e27e259edada7d
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:94e5b0e456eae3296b484125aa707c4c1e5d03d2e017a710f1cb7960c1e5f8ed
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..bc71f4e758c3099a6364bf8451b1d0e0adf75812
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10017/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1b6285c92b810b127fa3780da3648788a9f58d2ddabd581847d802a06841c445
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..1bf0e8aaf869eeef66e6d880b6caed28741d51c7
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4027472cdb5b3e2a4c668294960aa56861dbd886c3fe9b645e5cdf1fc3d60a89
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..e24644f4601e52e8fc4ac1fbee79c4d32a228007
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ede83c74f20954e83d758e15d5917dba92525bc1f6b48162510061df48c1500f
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..70530796d3623516024732fa4b087704db599028
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:90b64de4f85562412008570b6210422801b05aed07f4ef9e239e6b957407cc0f
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..e843820c9391554aa62c592dd02a97fbe954914b
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2dbc68942442a97cf9a42723ee69c314e6970028a541b420e3a462fc0ba747f6
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..d9b5a12261ecf0768ad4c6ab37ed464182d57eb6
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:282630473f295afd5284813ee7b073d0ea3ce544445fb53d9007fbe3051689d2
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..af8e7427d6f13659ebc06fbae460b2dac82e27d1
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10031/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d367864f87085597f0164ad56fd83447695b4ea4245aa0d98fc67a7845839fad
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..af2c4b2f5ed71c71daca1ef83a522b40b7bc1dc4
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7117c23b4cc5c07630a56f65414f8b623ed67886096d96f1533ab0055a4d07b7
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..8fbe41ba0a560c26b447dea4f8ef775ade8dbedb
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a259ee690ac870ee8c9a5d528034a910d81c29bf592facde1c783fa62f5b5ac0
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..1c13468cf8a271deb4ba109ac26ae074e0b1368f
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9377f4782c5c8c54442ec752cb64e26b20efdc68dc532ae89fc464a98416a026
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..05af810d03932f036747f9ca5120c1c0c1f1ac69
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99e603f1f429a309d5d88ff5b98996ac59904aad7a1b17b864baef188cb7b7cd
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..dd1bbd592491b74bd434be438aa89450c5b2c67f
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cd9f62a5075c9acc4bcb9ee0b1983cfece27f9ce3dbffed241cf5cf02ac8f6df
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..2d2a3d084a12297d3bfe53e61f5d3d86821e8add
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10050/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ce7b47f5a7b3d88b1693d66af79b61aa17b424c816bb2ab1290a0ac98f59ea63
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..a83f3d6685155af4604db248955da82de36bc221
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b6bd3438f01f92caf7bb23014a5ec02b5304a3f3768ac97e408791415f222c7c
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..08bf6e203e4bc27ce31888071b3797e7f843e408
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:33030a0a3a980c9b7831e924c25b573b5d8a25961c8392971c03b06a82613c34
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..915c91372aca0f6bb0ecc69f3136bb5d7f5e594c
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2f04ec25dba691e4a57fb81d4d7442b6bc729d722ee79ef51b0fe86c8566388a
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..ad8205fe2c20a47deb3c9ba5f3aec9b05319305b
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ca81a811d44f2e735a34dadb75dd261832e23f6abd79636a1fe675c1803f3d65
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..5222d5fe50e6f13edf125fdf273cc3564a7bd464
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e38e13c22c0cd9ffac7dc334f72c9df3dbc15948a74e73e14d807fe771018d31
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..841abbbace28c196cdf639764b7c5f8b66a81b97
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10075/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fcddb1c415599aab1e841bb4a460344427e5367490cb0d30966e96954aa0744f
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..dadb2270615daf3ada0a4632dd9bd4eb69112b05
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:19db7cc1d5064687a6a4144aa486a0658d94f29ed7dacae745b1d2f5e8fa47b6
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..8aab9c63f9dd51ed3058e168f2f825b511c21bc2
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cc7da40d8b8e29ed695f04a5bd86ddefe7b700cb9e647ba56b9a2e36f1f054b6
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..4628a7ad54d15d9d4d42474a45e6ce2de87e1b44
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:42a4e4eeb24d8a262ed13521ba6e91de49474e216d61fd079ae7148ba689d172
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..4001f5cff369eeb030008ce5ca5c5fe098294892
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dc68dfa937cc6c012dfb3692f7502ba74156fa8e7a74738025e95c555726389e
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..1b2220344a846aa5d9ded32425a26a627eb35fee
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:abadb3c081d001607df41dd6572ccab30fec5a4d78e007cd27f88402cc6653d6
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..4d8db4f9ccf628e522d7ac8be0584e4e5eeddf76
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10118/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6d9c402a2a9a15c935a19ee400f56ff2bbee8fc56bc628f39b6a98a162baeca0
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..a2cec3b445f8f8796b6c33f7c7f22d6773c0e4e6
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:44213c91567fc2754967c0b4a941499165a4f48bf781bb0bca5ee042ce0276ef
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..82c0b21bbd0586dd2ac0eb456d91a4761ba22704
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bf128e21469f06af06597702f3816ba49caae49d60bd0cb283c9f140accc858d
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..45f62b365636500c801c948149953e0937ceeee7
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:19bdaa8de6263b6b28f45ea1078dc38684e5f299b72047af2f1253b2f89faac9
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..d7616769560fc36c080928a88395556c87e489d6
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:91ebae020c0250173070293dc8a13e0aa43c15b08caf2b65b46a9f732f897829
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..2a4e2e97edd108eb5bcd7af1eea89d9283f5357c
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d5ce368b8c1cb9b653e383e0b6bb01420e104a746e9848a855aab7d1a266ea22
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..2c72d0abb8b87507eceec68d212241a260758d9b
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10120/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:83545b5bc970f348c3a6671f34164896a2ee1ac6b0141cb11c524ba3c98ad91f
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..1d0231cc402a0d9bce123cfd91df34608987ee1a
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a3f4d41f65fbf790b9917522b4beef851a3fcd53bcc3e511c222eed13ecc2cb6
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..63d8acd4ad978deab4ef584953e1265df5c66d68
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4e03a7e43edf3103b1752d43d959393bfd2cd6d3e2afc696772ffd5fa8cc68f5
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..e640452cb12f0f706c76f5f40a40e384caa1b813
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7065dd2259f73730dc729431af3cb198afd211fbf38ac48e27b7a57f2065d5cd
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..612b3c67b06d7296191ca2a46870a7364a9ccd3b
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:993b9d849057f3a058449294db9cbda0f35a96e4f457d9f48b2de858e582e7db
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..7b5222369a6a7667091269f06ae808fab5a0a8cd
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4605afccf0fd6fa49071150b7f9643ac79ce88c573df860b087cb8c1c1c2004f
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..a1bd8c99e3df71bd153fdb95abda1e8d4eb3b984
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/10955/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:82a0e55ff4254fddcaa615b2dd12bc3d8942f76111313f588b8fc805d4698273
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/1/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/1/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..31489f928b15ae9d0c211bebfad4c687ecd0f143
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/1/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:86d5b3723ac6398e61c71048af194f75406ad567aaa2a13e7e688247361a4198
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/1/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/1/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..3f95be1a982c7176272081c67d5b6eb519715770
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/1/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d097407848b958efa6a87f564d83965b7d3a5cca506b0abc98285ca1ce2952c2
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/2/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/2/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..aec524ec215ffc34ec3a1d2e040d3e5f0035bd06
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/2/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fb8f52f9c3eea11ead7474e50aca3b55ca26a8332c83513735cf46992968e9c2
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/2/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/2/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..f09c3e1b48bc9dd9faa5c45bb8389075127708c6
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/2/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d89b2b9d82c1c5a06b25e4bde11aaa0e90d54a72a5e2b3e7077a0c2b8c89104d
+size 1728
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/3/bbox.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/3/bbox.npy
new file mode 100644
index 0000000000000000000000000000000000000000..87758524232a9112f405017190375752dea9ad65
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/3/bbox.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3c0e86d091d7dfb76e260040b51315277b61f9ab9f52b6a81154db0c2c9ca18c
+size 192
diff --git a/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/3/c.npy b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/3/c.npy
new file mode 100644
index 0000000000000000000000000000000000000000..05ea12e6aa39f4ebf82a32cbec3fffa9c95729ae
--- /dev/null
+++ b/assets/demo-image-for-i23d/for-vae-reconstruction/Animals/0/12926/3/c.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:17430f723d12e4117b6af3e6b8bf1e68e5f0d4cfead403c1eb583f217909fc96
+size 1728
diff --git a/assets/demo-image-for-i23d/gso/BAGEL_WITH_CHEESE.png b/assets/demo-image-for-i23d/gso/BAGEL_WITH_CHEESE.png
new file mode 100644
index 0000000000000000000000000000000000000000..cb3d166e91e85a1dbfa2eb477a6250458ac4bcea
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/BAGEL_WITH_CHEESE.png differ
diff --git a/assets/demo-image-for-i23d/gso/BALANCING_CACTUS.png b/assets/demo-image-for-i23d/gso/BALANCING_CACTUS.png
new file mode 100644
index 0000000000000000000000000000000000000000..37c7f01f5fd83b4d3f8bd3766999b1d8c2a659b4
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/BALANCING_CACTUS.png differ
diff --git a/assets/demo-image-for-i23d/gso/COAST_GUARD_BOAT.png b/assets/demo-image-for-i23d/gso/COAST_GUARD_BOAT.png
new file mode 100644
index 0000000000000000000000000000000000000000..34ff83b503cacdc8940c5f4af44db0c95e80af23
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/COAST_GUARD_BOAT.png differ
diff --git a/assets/demo-image-for-i23d/gso/CONE_SORTING.png b/assets/demo-image-for-i23d/gso/CONE_SORTING.png
new file mode 100644
index 0000000000000000000000000000000000000000..9471b6c0c78bde628d15cb5fb17d7f493490b913
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/CONE_SORTING.png differ
diff --git a/assets/demo-image-for-i23d/gso/FIRE_ENGINE.png b/assets/demo-image-for-i23d/gso/FIRE_ENGINE.png
new file mode 100644
index 0000000000000000000000000000000000000000..438c995a1b4ae1f418388e2218b1c7efde39014a
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/FIRE_ENGINE.png differ
diff --git a/assets/demo-image-for-i23d/gso/FOOD_BEVERAGE_SET.png b/assets/demo-image-for-i23d/gso/FOOD_BEVERAGE_SET.png
new file mode 100644
index 0000000000000000000000000000000000000000..63160d9335345d4765fca963def1b4232c622458
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/FOOD_BEVERAGE_SET.png differ
diff --git a/assets/demo-image-for-i23d/gso/GEOMETRIC_PEG_BOARD.png b/assets/demo-image-for-i23d/gso/GEOMETRIC_PEG_BOARD.png
new file mode 100644
index 0000000000000000000000000000000000000000..e5514baec0210099872d6ab5afad7052d5facebc
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/GEOMETRIC_PEG_BOARD.png differ
diff --git a/assets/demo-image-for-i23d/gso/Great_Dinos_Triceratops_Toy.png b/assets/demo-image-for-i23d/gso/Great_Dinos_Triceratops_Toy.png
new file mode 100644
index 0000000000000000000000000000000000000000..14dddc146933e157e7979966cceb35c12f50bdd0
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/Great_Dinos_Triceratops_Toy.png differ
diff --git a/assets/demo-image-for-i23d/gso/Schleich_African_Black_Rhino.png b/assets/demo-image-for-i23d/gso/Schleich_African_Black_Rhino.png
new file mode 100644
index 0000000000000000000000000000000000000000..360453223a3d6e09ae8accfe9824f7ee816b2ff9
Binary files /dev/null and b/assets/demo-image-for-i23d/gso/Schleich_African_Black_Rhino.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/blue_cat-input.png b/assets/demo-image-for-i23d/instantmesh/blue_cat-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..38e504fe1bcd6f6dfb1652d6ec6c424f546b0f4f
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/blue_cat-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/cake-input.png b/assets/demo-image-for-i23d/instantmesh/cake-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..bdf43116a789ea64d272d8eb3ac502761d824d60
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/cake-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/chair_armed-input.png b/assets/demo-image-for-i23d/instantmesh/chair_armed-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..8e3fce0466e0ec057e6134d10b5a4d48c5674cc9
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/chair_armed-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/extinguisher-input.png b/assets/demo-image-for-i23d/instantmesh/extinguisher-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..def99a52938f492a861bc524fb4b6c8cc0d02ded
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/extinguisher-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/fruit_elephant-input.png b/assets/demo-image-for-i23d/instantmesh/fruit_elephant-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..28b95938f69b530a71e3135d1ed98e08aaff2e38
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/fruit_elephant-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/house2-input.png b/assets/demo-image-for-i23d/instantmesh/house2-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..29e882ba76ef0d17482209933ec8573486ae1358
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/house2-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/pikachu-input.png b/assets/demo-image-for-i23d/instantmesh/pikachu-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..82706653425939ef276c46f017bed424644832a3
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/pikachu-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/plant-input.png b/assets/demo-image-for-i23d/instantmesh/plant-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..fddf7038e98a6be866e0fafe9831037913ae0fc2
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/plant-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/robot-input.png b/assets/demo-image-for-i23d/instantmesh/robot-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..3995c9498f5bdba1fbd3308415043a1ae142176a
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/robot-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/sword-input.png b/assets/demo-image-for-i23d/instantmesh/sword-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..6d35b70c9e1e2300a1508a92a36e4fe0586e31c7
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/sword-input.png differ
diff --git a/assets/demo-image-for-i23d/instantmesh/teasure_chest-input.png b/assets/demo-image-for-i23d/instantmesh/teasure_chest-input.png
new file mode 100644
index 0000000000000000000000000000000000000000..db962dbe37247e4ef2e21cebadb49d3b9fcec5f7
Binary files /dev/null and b/assets/demo-image-for-i23d/instantmesh/teasure_chest-input.png differ
diff --git a/assets/objv_eval_pose.pt b/assets/objv_eval_pose.pt
new file mode 100644
index 0000000000000000000000000000000000000000..cd61c56d751cf1bc695a8f8e6ed55e67bd13c24d
--- /dev/null
+++ b/assets/objv_eval_pose.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:09b6e0eb22da1ca5d1e4a4fd1bfe5b08d65d1b1926621aa22601b67f20904f9a
+size 4721
diff --git a/checkpoint/.gitkeep b/checkpoint/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/checkpoint/model_rec1965000.pt b/checkpoint/model_rec1965000.pt
new file mode 120000
index 0000000000000000000000000000000000000000..6c5c24eedc6c17fdc2eacb232b22eb7796b866a6
--- /dev/null
+++ b/checkpoint/model_rec1965000.pt
@@ -0,0 +1 @@
+/mnt/sfs-common/yslan/open-source/vae/model_rec1965000.pt
\ No newline at end of file
diff --git a/cldm/__pycache__/cldm.cpython-310.pyc b/cldm/__pycache__/cldm.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..416350827cc2229e320fdf877156adca3e051543
Binary files /dev/null and b/cldm/__pycache__/cldm.cpython-310.pyc differ
diff --git a/cldm/__pycache__/cldm.cpython-39.pyc b/cldm/__pycache__/cldm.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..70a61eda8d5c67b153dabc7dadd6614f230d8114
Binary files /dev/null and b/cldm/__pycache__/cldm.cpython-39.pyc differ
diff --git a/cldm/cldm.py b/cldm/cldm.py
new file mode 100644
index 0000000000000000000000000000000000000000..e78e8c9e9299ad50259fef980d53c12400b371a9
--- /dev/null
+++ b/cldm/cldm.py
@@ -0,0 +1,456 @@
+import torch
+import torch as th
+import torch.nn as nn
+
+from ldm.modules.diffusionmodules.util import (
+ conv_nd,
+ linear,
+ zero_module,
+ timestep_embedding,
+)
+
+from einops import rearrange, repeat
+from torchvision.utils import make_grid
+from ldm.modules.attention import SpatialTransformer
+# from ldm.modules.diffusionmodules.openaimodel import UNetModel, TimestepEmbedSequential, ResBlock, Downsample, AttentionBlock
+from guided_diffusion.unet import UNetModel, TimestepEmbedSequential, ResBlock, Downsample, AttentionBlock
+# from ldm.models.diffusion.ddpm import LatentDiffusion
+from ldm.util import log_txt_as_img, exists # , instantiate_from_config
+# from ldm.models.diffusion.ddim import DDIMSampler
+from pdb import set_trace as st
+
+
+class ControlledUnetModel(UNetModel):
+ def forward(self, x, timesteps=None, context=None, control=None, only_mid_control=False, get_attr='', **kwargs):
+
+ if get_attr != '': # not breaking the forward hooks
+ return getattr(self, get_attr)
+
+ hs = []
+ with torch.no_grad(): # fix middle_block, SD
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.roll_out:
+ x = rearrange(x, 'b (n c) h w->b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb, context)
+ hs.append(h)
+ h = self.middle_block(h, emb, context)
+
+ assert control is not None
+ # if control is not None:
+ h += control.pop()
+
+ for i, module in enumerate(self.output_blocks):
+ if only_mid_control or control is None:
+ h = torch.cat([h, hs.pop()], dim=1)
+ else:
+ # st()
+ h = torch.cat([h, hs.pop() + control.pop()], dim=1)
+ h = module(h, emb, context)
+
+ h = h.type(x.dtype)
+ h = self.out(h)
+ if self.roll_out:
+ return rearrange(h, 'b c h (n w) -> b (n c) h w', n=3)
+ return h
+
+
+class ControlNet(nn.Module):
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ hint_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ use_checkpoint=False,
+ use_fp16=False,
+ num_heads=-1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ # * new keys introduced in LDM
+ use_spatial_transformer=False, # custom transformer support
+ transformer_depth=1, # custom transformer support
+ context_dim=None, # custom transformer support
+ n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model
+ legacy=True,
+ disable_self_attentions=None,
+ num_attention_blocks=None,
+ disable_middle_self_attn=False,
+ use_linear_in_transformer=False,
+ roll_out=False,
+ ):
+ super().__init__()
+ self.roll_out = roll_out
+ if use_spatial_transformer:
+ assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...'
+
+ if context_dim is not None:
+ assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...'
+ from omegaconf.listconfig import ListConfig
+ if type(context_dim) == ListConfig:
+ context_dim = list(context_dim)
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ if num_heads == -1:
+ assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set'
+
+ if num_head_channels == -1:
+ assert num_heads != -1, 'Either num_heads or num_head_channels has to be set'
+
+ self.dims = dims
+ self.image_size = image_size
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ if isinstance(num_res_blocks, int):
+ self.num_res_blocks = len(channel_mult) * [num_res_blocks]
+ else:
+ if len(num_res_blocks) != len(channel_mult):
+ raise ValueError("provide num_res_blocks either as an int (globally constant) or "
+ "as a list/tuple (per-level) with the same length as channel_mult")
+ self.num_res_blocks = num_res_blocks
+ if disable_self_attentions is not None:
+ # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not
+ assert len(disable_self_attentions) == len(channel_mult)
+ if num_attention_blocks is not None:
+ assert len(num_attention_blocks) == len(self.num_res_blocks)
+ assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks))))
+ print(f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. "
+ f"This option has LESS priority than attention_resolutions {attention_resolutions}, "
+ f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, "
+ f"attention will still not be set.")
+
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ # self.use_checkpoint = use_checkpoint
+ self.use_checkpoint = False
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+ self.predict_codebook_ids = n_embed is not None
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ self.input_blocks = nn.ModuleList(
+ [
+ TimestepEmbedSequential(
+ conv_nd(dims, in_channels, model_channels, 3, padding=1)
+ )
+ ]
+ )
+ self.zero_convs = nn.ModuleList([self.make_zero_conv(model_channels)])
+
+ self.input_hint_block = TimestepEmbedSequential( # f=8
+ conv_nd(dims, hint_channels, 16, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 16, 16, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 16, 32, 3, padding=1, stride=2),
+ nn.SiLU(),
+ conv_nd(dims, 32, 32, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 32, 96, 3, padding=1, stride=2),
+ nn.SiLU(),
+ conv_nd(dims, 96, 96, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 96, 256, 3, padding=1, stride=2),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, 256, model_channels, 3, padding=1))
+ )
+
+ self._feature_size = model_channels
+ input_block_chans = [model_channels]
+ ch = model_channels
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for nr in range(self.num_res_blocks[level]):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=mult * model_channels,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = mult * model_channels
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ # num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ if exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if not exists(num_attention_blocks) or nr < num_attention_blocks[level]:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self.zero_convs.append(self.make_zero_conv(ch))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ self.zero_convs.append(self.make_zero_conv(ch))
+ ds *= 2
+ self._feature_size += ch
+
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ # num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer( # always uses a self-attn
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self.middle_block_out = self.make_zero_conv(ch)
+ self._feature_size += ch
+
+ def make_zero_conv(self, channels):
+ return TimestepEmbedSequential(zero_module(conv_nd(self.dims, channels, channels, 1, padding=0)))
+
+ def forward(self, x, hint, timesteps, context, **kwargs):
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb) # time condition embedding
+ guided_hint = self.input_hint_block(hint, emb, context) # B 320 8 8, if input resolution = 64
+
+ if self.roll_out:
+ x = rearrange(x, 'b (n c) h w->b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+ guided_hint = repeat(guided_hint, 'b c h w -> b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+
+ outs = []
+
+ h = x.type(self.dtype)
+ for module, zero_conv in zip(self.input_blocks, self.zero_convs):
+ if guided_hint is not None: # f=8, shall send in 128x128 img_sr
+ h = module(h, emb, context) # B 320 16 16
+ h += guided_hint
+ guided_hint = None
+ else:
+ h = module(h, emb, context)
+ outs.append(zero_conv(h, emb, context))
+
+ h = self.middle_block(h, emb, context)
+ outs.append(self.middle_block_out(h, emb, context))
+
+ return outs
+
+# ! do not support PL here
+# class ControlLDM(LatentDiffusion):
+
+# def __init__(self, control_stage_config, control_key, only_mid_control, *args, **kwargs):
+# super().__init__(*args, **kwargs)
+# self.control_model = instantiate_from_config(control_stage_config)
+# self.control_key = control_key
+# self.only_mid_control = only_mid_control
+# self.control_scales = [1.0] * 13
+
+# @torch.no_grad()
+# def get_input(self, batch, k, bs=None, *args, **kwargs):
+# x, c = super().get_input(batch, self.first_stage_key, *args, **kwargs)
+# control = batch[self.control_key]
+# if bs is not None:
+# control = control[:bs]
+# control = control.to(self.device)
+# control = einops.rearrange(control, 'b h w c -> b c h w')
+# control = control.to(memory_format=torch.contiguous_format).float()
+# return x, dict(c_crossattn=[c], c_concat=[control])
+
+# def apply_model(self, x_noisy, t, cond, *args, **kwargs):
+# assert isinstance(cond, dict)
+# diffusion_model = self.model.diffusion_model
+
+# cond_txt = torch.cat(cond['c_crossattn'], 1)
+
+# if cond['c_concat'] is None:
+# eps = diffusion_model(x=x_noisy, timesteps=t, context=cond_txt, control=None, only_mid_control=self.only_mid_control)
+# else:
+# control = self.control_model(x=x_noisy, hint=torch.cat(cond['c_concat'], 1), timesteps=t, context=cond_txt)
+# control = [c * scale for c, scale in zip(control, self.control_scales)]
+# eps = diffusion_model(x=x_noisy, timesteps=t, context=cond_txt, control=control, only_mid_control=self.only_mid_control)
+
+# return eps
+
+# @torch.no_grad()
+# def get_unconditional_conditioning(self, N):
+# return self.get_learned_conditioning([""] * N)
+
+# @torch.no_grad()
+# def log_images(self, batch, N=4, n_row=2, sample=False, ddim_steps=50, ddim_eta=0.0, return_keys=None,
+# quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
+# plot_diffusion_rows=False, unconditional_guidance_scale=9.0, unconditional_guidance_label=None,
+# use_ema_scope=True,
+# **kwargs):
+# use_ddim = ddim_steps is not None
+
+# log = dict()
+# z, c = self.get_input(batch, self.first_stage_key, bs=N)
+# c_cat, c = c["c_concat"][0][:N], c["c_crossattn"][0][:N]
+# N = min(z.shape[0], N)
+# n_row = min(z.shape[0], n_row)
+# log["reconstruction"] = self.decode_first_stage(z)
+# log["control"] = c_cat * 2.0 - 1.0
+# log["conditioning"] = log_txt_as_img((512, 512), batch[self.cond_stage_key], size=16)
+
+# if plot_diffusion_rows:
+# # get diffusion row
+# diffusion_row = list()
+# z_start = z[:n_row]
+# for t in range(self.num_timesteps):
+# if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
+# t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
+# t = t.to(self.device).long()
+# noise = torch.randn_like(z_start)
+# z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
+# diffusion_row.append(self.decode_first_stage(z_noisy))
+
+# diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
+# diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
+# diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
+# diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
+# log["diffusion_row"] = diffusion_grid
+
+# if sample:
+# # get denoise row
+# samples, z_denoise_row = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
+# batch_size=N, ddim=use_ddim,
+# ddim_steps=ddim_steps, eta=ddim_eta)
+# x_samples = self.decode_first_stage(samples)
+# log["samples"] = x_samples
+# if plot_denoise_rows:
+# denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
+# log["denoise_row"] = denoise_grid
+
+# if unconditional_guidance_scale > 1.0:
+# uc_cross = self.get_unconditional_conditioning(N)
+# uc_cat = c_cat # torch.zeros_like(c_cat)
+# uc_full = {"c_concat": [uc_cat], "c_crossattn": [uc_cross]}
+# samples_cfg, _ = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
+# batch_size=N, ddim=use_ddim,
+# ddim_steps=ddim_steps, eta=ddim_eta,
+# unconditional_guidance_scale=unconditional_guidance_scale,
+# unconditional_conditioning=uc_full,
+# )
+# x_samples_cfg = self.decode_first_stage(samples_cfg)
+# log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
+
+# return log
+
+# @torch.no_grad()
+# def sample_log(self, cond, batch_size, ddim, ddim_steps, **kwargs):
+# ddim_sampler = DDIMSampler(self)
+# b, c, h, w = cond["c_concat"][0].shape
+# shape = (self.channels, h // 8, w // 8)
+# samples, intermediates = ddim_sampler.sample(ddim_steps, batch_size, shape, cond, verbose=False, **kwargs)
+# return samples, intermediates
+
+# def configure_optimizers(self):
+# lr = self.learning_rate
+# params = list(self.control_model.parameters())
+# if not self.sd_locked:
+# params += list(self.model.diffusion_model.output_blocks.parameters())
+# params += list(self.model.diffusion_model.out.parameters())
+# opt = torch.optim.AdamW(params, lr=lr)
+# return opt
+
+# def low_vram_shift(self, is_diffusing):
+# if is_diffusing:
+# self.model = self.model.cuda()
+# self.control_model = self.control_model.cuda()
+# self.first_stage_model = self.first_stage_model.cpu()
+# self.cond_stage_model = self.cond_stage_model.cpu()
+# else:
+# self.model = self.model.cpu()
+# self.control_model = self.control_model.cpu()
+# self.first_stage_model = self.first_stage_model.cuda()
+# self.cond_stage_model = self.cond_stage_model.cuda()
diff --git a/cldm/ddim_hacked.py b/cldm/ddim_hacked.py
new file mode 100644
index 0000000000000000000000000000000000000000..25b1bc947272ad14d7f7e5e4d1809005253b63d0
--- /dev/null
+++ b/cldm/ddim_hacked.py
@@ -0,0 +1,317 @@
+"""SAMPLING ONLY."""
+
+import torch
+import numpy as np
+from tqdm import tqdm
+
+from ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like, extract_into_tensor
+
+
+class DDIMSampler(object):
+ def __init__(self, model, schedule="linear", **kwargs):
+ super().__init__()
+ self.model = model
+ self.ddpm_num_timesteps = model.num_timesteps
+ self.schedule = schedule
+
+ def register_buffer(self, name, attr):
+ if type(attr) == torch.Tensor:
+ if attr.device != torch.device("cuda"):
+ attr = attr.to(torch.device("cuda"))
+ setattr(self, name, attr)
+
+ def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True):
+ self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,
+ num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)
+ alphas_cumprod = self.model.alphas_cumprod
+ assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'
+ to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)
+
+ self.register_buffer('betas', to_torch(self.model.betas))
+ self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
+ self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())))
+ self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)))
+
+ # ddim sampling parameters
+ ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),
+ ddim_timesteps=self.ddim_timesteps,
+ eta=ddim_eta,verbose=verbose)
+ self.register_buffer('ddim_sigmas', ddim_sigmas)
+ self.register_buffer('ddim_alphas', ddim_alphas)
+ self.register_buffer('ddim_alphas_prev', ddim_alphas_prev)
+ self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas))
+ sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(
+ (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (
+ 1 - self.alphas_cumprod / self.alphas_cumprod_prev))
+ self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps)
+
+ @torch.no_grad()
+ def sample(self,
+ S,
+ batch_size,
+ shape,
+ conditioning=None,
+ callback=None,
+ normals_sequence=None,
+ img_callback=None,
+ quantize_x0=False,
+ eta=0.,
+ mask=None,
+ x0=None,
+ temperature=1.,
+ noise_dropout=0.,
+ score_corrector=None,
+ corrector_kwargs=None,
+ verbose=True,
+ x_T=None,
+ log_every_t=100,
+ unconditional_guidance_scale=1.,
+ unconditional_conditioning=None, # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...
+ dynamic_threshold=None,
+ ucg_schedule=None,
+ **kwargs
+ ):
+ if conditioning is not None:
+ if isinstance(conditioning, dict):
+ ctmp = conditioning[list(conditioning.keys())[0]]
+ while isinstance(ctmp, list): ctmp = ctmp[0]
+ cbs = ctmp.shape[0]
+ if cbs != batch_size:
+ print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
+
+ elif isinstance(conditioning, list):
+ for ctmp in conditioning:
+ if ctmp.shape[0] != batch_size:
+ print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
+
+ else:
+ if conditioning.shape[0] != batch_size:
+ print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}")
+
+ self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)
+ # sampling
+ C, H, W = shape
+ size = (batch_size, C, H, W)
+ print(f'Data shape for DDIM sampling is {size}, eta {eta}')
+
+ samples, intermediates = self.ddim_sampling(conditioning, size,
+ callback=callback,
+ img_callback=img_callback,
+ quantize_denoised=quantize_x0,
+ mask=mask, x0=x0,
+ ddim_use_original_steps=False,
+ noise_dropout=noise_dropout,
+ temperature=temperature,
+ score_corrector=score_corrector,
+ corrector_kwargs=corrector_kwargs,
+ x_T=x_T,
+ log_every_t=log_every_t,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ dynamic_threshold=dynamic_threshold,
+ ucg_schedule=ucg_schedule
+ )
+ return samples, intermediates
+
+ @torch.no_grad()
+ def ddim_sampling(self, cond, shape,
+ x_T=None, ddim_use_original_steps=False,
+ callback=None, timesteps=None, quantize_denoised=False,
+ mask=None, x0=None, img_callback=None, log_every_t=100,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
+ unconditional_guidance_scale=1., unconditional_conditioning=None, dynamic_threshold=None,
+ ucg_schedule=None):
+ device = self.model.betas.device
+ b = shape[0]
+ if x_T is None:
+ img = torch.randn(shape, device=device)
+ else:
+ img = x_T
+
+ if timesteps is None:
+ timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps
+ elif timesteps is not None and not ddim_use_original_steps:
+ subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1
+ timesteps = self.ddim_timesteps[:subset_end]
+
+ intermediates = {'x_inter': [img], 'pred_x0': [img]}
+ time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps)
+ total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]
+ print(f"Running DDIM Sampling with {total_steps} timesteps")
+
+ iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps)
+
+ for i, step in enumerate(iterator):
+ index = total_steps - i - 1
+ ts = torch.full((b,), step, device=device, dtype=torch.long)
+
+ if mask is not None:
+ assert x0 is not None
+ img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass?
+ img = img_orig * mask + (1. - mask) * img
+
+ if ucg_schedule is not None:
+ assert len(ucg_schedule) == len(time_range)
+ unconditional_guidance_scale = ucg_schedule[i]
+
+ outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,
+ quantize_denoised=quantize_denoised, temperature=temperature,
+ noise_dropout=noise_dropout, score_corrector=score_corrector,
+ corrector_kwargs=corrector_kwargs,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ dynamic_threshold=dynamic_threshold)
+ img, pred_x0 = outs
+ if callback: callback(i)
+ if img_callback: img_callback(pred_x0, i)
+
+ if index % log_every_t == 0 or index == total_steps - 1:
+ intermediates['x_inter'].append(img)
+ intermediates['pred_x0'].append(pred_x0)
+
+ return img, intermediates
+
+ @torch.no_grad()
+ def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
+ unconditional_guidance_scale=1., unconditional_conditioning=None,
+ dynamic_threshold=None):
+ b, *_, device = *x.shape, x.device
+
+ if unconditional_conditioning is None or unconditional_guidance_scale == 1.:
+ model_output = self.model.apply_model(x, t, c)
+ else:
+ model_t = self.model.apply_model(x, t, c)
+ model_uncond = self.model.apply_model(x, t, unconditional_conditioning)
+ model_output = model_uncond + unconditional_guidance_scale * (model_t - model_uncond)
+
+ if self.model.parameterization == "v":
+ e_t = self.model.predict_eps_from_z_and_v(x, t, model_output)
+ else:
+ e_t = model_output
+
+ if score_corrector is not None:
+ assert self.model.parameterization == "eps", 'not implemented'
+ e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)
+
+ alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas
+ alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev
+ sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas
+ sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas
+ # select parameters corresponding to the currently considered timestep
+ a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)
+ a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)
+ sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)
+ sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)
+
+ # current prediction for x_0
+ if self.model.parameterization != "v":
+ pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()
+ else:
+ pred_x0 = self.model.predict_start_from_z_and_v(x, t, model_output)
+
+ if quantize_denoised:
+ pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)
+
+ if dynamic_threshold is not None:
+ raise NotImplementedError()
+
+ # direction pointing to x_t
+ dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t
+ noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature
+ if noise_dropout > 0.:
+ noise = torch.nn.functional.dropout(noise, p=noise_dropout)
+ x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise
+ return x_prev, pred_x0
+
+ @torch.no_grad()
+ def encode(self, x0, c, t_enc, use_original_steps=False, return_intermediates=None,
+ unconditional_guidance_scale=1.0, unconditional_conditioning=None, callback=None):
+ timesteps = np.arange(self.ddpm_num_timesteps) if use_original_steps else self.ddim_timesteps
+ num_reference_steps = timesteps.shape[0]
+
+ assert t_enc <= num_reference_steps
+ num_steps = t_enc
+
+ if use_original_steps:
+ alphas_next = self.alphas_cumprod[:num_steps]
+ alphas = self.alphas_cumprod_prev[:num_steps]
+ else:
+ alphas_next = self.ddim_alphas[:num_steps]
+ alphas = torch.tensor(self.ddim_alphas_prev[:num_steps])
+
+ x_next = x0
+ intermediates = []
+ inter_steps = []
+ for i in tqdm(range(num_steps), desc='Encoding Image'):
+ t = torch.full((x0.shape[0],), timesteps[i], device=self.model.device, dtype=torch.long)
+ if unconditional_guidance_scale == 1.:
+ noise_pred = self.model.apply_model(x_next, t, c)
+ else:
+ assert unconditional_conditioning is not None
+ e_t_uncond, noise_pred = torch.chunk(
+ self.model.apply_model(torch.cat((x_next, x_next)), torch.cat((t, t)),
+ torch.cat((unconditional_conditioning, c))), 2)
+ noise_pred = e_t_uncond + unconditional_guidance_scale * (noise_pred - e_t_uncond)
+
+ xt_weighted = (alphas_next[i] / alphas[i]).sqrt() * x_next
+ weighted_noise_pred = alphas_next[i].sqrt() * (
+ (1 / alphas_next[i] - 1).sqrt() - (1 / alphas[i] - 1).sqrt()) * noise_pred
+ x_next = xt_weighted + weighted_noise_pred
+ if return_intermediates and i % (
+ num_steps // return_intermediates) == 0 and i < num_steps - 1:
+ intermediates.append(x_next)
+ inter_steps.append(i)
+ elif return_intermediates and i >= num_steps - 2:
+ intermediates.append(x_next)
+ inter_steps.append(i)
+ if callback: callback(i)
+
+ out = {'x_encoded': x_next, 'intermediate_steps': inter_steps}
+ if return_intermediates:
+ out.update({'intermediates': intermediates})
+ return x_next, out
+
+ @torch.no_grad()
+ def stochastic_encode(self, x0, t, use_original_steps=False, noise=None):
+ # fast, but does not allow for exact reconstruction
+ # t serves as an index to gather the correct alphas
+ if use_original_steps:
+ sqrt_alphas_cumprod = self.sqrt_alphas_cumprod
+ sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod
+ else:
+ sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas)
+ sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas
+
+ if noise is None:
+ noise = torch.randn_like(x0)
+ return (extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 +
+ extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise)
+
+ @torch.no_grad()
+ def decode(self, x_latent, cond, t_start, unconditional_guidance_scale=1.0, unconditional_conditioning=None,
+ use_original_steps=False, callback=None):
+
+ timesteps = np.arange(self.ddpm_num_timesteps) if use_original_steps else self.ddim_timesteps
+ timesteps = timesteps[:t_start]
+
+ time_range = np.flip(timesteps)
+ total_steps = timesteps.shape[0]
+ print(f"Running DDIM Sampling with {total_steps} timesteps")
+
+ iterator = tqdm(time_range, desc='Decoding image', total=total_steps)
+ x_dec = x_latent
+ for i, step in enumerate(iterator):
+ index = total_steps - i - 1
+ ts = torch.full((x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long)
+ x_dec, _ = self.p_sample_ddim(x_dec, cond, ts, index=index, use_original_steps=use_original_steps,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning)
+ if callback: callback(i)
+ return x_dec
diff --git a/cldm/hack.py b/cldm/hack.py
new file mode 100644
index 0000000000000000000000000000000000000000..454361e9d036cd1a6a79122c2fd16b489e4767b1
--- /dev/null
+++ b/cldm/hack.py
@@ -0,0 +1,111 @@
+import torch
+import einops
+
+import ldm.modules.encoders.modules
+import ldm.modules.attention
+
+from transformers import logging
+from ldm.modules.attention import default
+
+
+def disable_verbosity():
+ logging.set_verbosity_error()
+ print('logging improved.')
+ return
+
+
+def enable_sliced_attention():
+ ldm.modules.attention.CrossAttention.forward = _hacked_sliced_attentin_forward
+ print('Enabled sliced_attention.')
+ return
+
+
+def hack_everything(clip_skip=0):
+ disable_verbosity()
+ ldm.modules.encoders.modules.FrozenCLIPEmbedder.forward = _hacked_clip_forward
+ ldm.modules.encoders.modules.FrozenCLIPEmbedder.clip_skip = clip_skip
+ print('Enabled clip hacks.')
+ return
+
+
+# Written by Lvmin
+def _hacked_clip_forward(self, text):
+ PAD = self.tokenizer.pad_token_id
+ EOS = self.tokenizer.eos_token_id
+ BOS = self.tokenizer.bos_token_id
+
+ def tokenize(t):
+ return self.tokenizer(t, truncation=False, add_special_tokens=False)["input_ids"]
+
+ def transformer_encode(t):
+ if self.clip_skip > 1:
+ rt = self.transformer(input_ids=t, output_hidden_states=True)
+ return self.transformer.text_model.final_layer_norm(rt.hidden_states[-self.clip_skip])
+ else:
+ return self.transformer(input_ids=t, output_hidden_states=False).last_hidden_state
+
+ def split(x):
+ return x[75 * 0: 75 * 1], x[75 * 1: 75 * 2], x[75 * 2: 75 * 3]
+
+ def pad(x, p, i):
+ return x[:i] if len(x) >= i else x + [p] * (i - len(x))
+
+ raw_tokens_list = tokenize(text)
+ tokens_list = []
+
+ for raw_tokens in raw_tokens_list:
+ raw_tokens_123 = split(raw_tokens)
+ raw_tokens_123 = [[BOS] + raw_tokens_i + [EOS] for raw_tokens_i in raw_tokens_123]
+ raw_tokens_123 = [pad(raw_tokens_i, PAD, 77) for raw_tokens_i in raw_tokens_123]
+ tokens_list.append(raw_tokens_123)
+
+ tokens_list = torch.IntTensor(tokens_list).to(self.device)
+
+ feed = einops.rearrange(tokens_list, 'b f i -> (b f) i')
+ y = transformer_encode(feed)
+ z = einops.rearrange(y, '(b f) i c -> b (f i) c', f=3)
+
+ return z
+
+
+# Stolen from https://github.com/basujindal/stable-diffusion/blob/main/optimizedSD/splitAttention.py
+def _hacked_sliced_attentin_forward(self, x, context=None, mask=None):
+ h = self.heads
+
+ q = self.to_q(x)
+ context = default(context, x)
+ k = self.to_k(context)
+ v = self.to_v(context)
+ del context, x
+
+ q, k, v = map(lambda t: einops.rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v))
+
+ limit = k.shape[0]
+ att_step = 1
+ q_chunks = list(torch.tensor_split(q, limit // att_step, dim=0))
+ k_chunks = list(torch.tensor_split(k, limit // att_step, dim=0))
+ v_chunks = list(torch.tensor_split(v, limit // att_step, dim=0))
+
+ q_chunks.reverse()
+ k_chunks.reverse()
+ v_chunks.reverse()
+ sim = torch.zeros(q.shape[0], q.shape[1], v.shape[2], device=q.device)
+ del k, q, v
+ for i in range(0, limit, att_step):
+ q_buffer = q_chunks.pop()
+ k_buffer = k_chunks.pop()
+ v_buffer = v_chunks.pop()
+ sim_buffer = torch.einsum('b i d, b j d -> b i j', q_buffer, k_buffer) * self.scale
+
+ del k_buffer, q_buffer
+ # attention, what we cannot get enough of, by chunks
+
+ sim_buffer = sim_buffer.softmax(dim=-1)
+
+ sim_buffer = torch.einsum('b i j, b j d -> b i d', sim_buffer, v_buffer)
+ del v_buffer
+ sim[i:i + att_step, :, :] = sim_buffer
+
+ del sim_buffer
+ sim = einops.rearrange(sim, '(b h) n d -> b n (h d)', h=h)
+ return self.to_out(sim)
diff --git a/cldm/logger.py b/cldm/logger.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a8803846f2a8979f87f3cf9ea5b12869439e62f
--- /dev/null
+++ b/cldm/logger.py
@@ -0,0 +1,76 @@
+import os
+
+import numpy as np
+import torch
+import torchvision
+from PIL import Image
+from pytorch_lightning.callbacks import Callback
+from pytorch_lightning.utilities.distributed import rank_zero_only
+
+
+class ImageLogger(Callback):
+ def __init__(self, batch_frequency=2000, max_images=4, clamp=True, increase_log_steps=True,
+ rescale=True, disabled=False, log_on_batch_idx=False, log_first_step=False,
+ log_images_kwargs=None):
+ super().__init__()
+ self.rescale = rescale
+ self.batch_freq = batch_frequency
+ self.max_images = max_images
+ if not increase_log_steps:
+ self.log_steps = [self.batch_freq]
+ self.clamp = clamp
+ self.disabled = disabled
+ self.log_on_batch_idx = log_on_batch_idx
+ self.log_images_kwargs = log_images_kwargs if log_images_kwargs else {}
+ self.log_first_step = log_first_step
+
+ @rank_zero_only
+ def log_local(self, save_dir, split, images, global_step, current_epoch, batch_idx):
+ root = os.path.join(save_dir, "image_log", split)
+ for k in images:
+ grid = torchvision.utils.make_grid(images[k], nrow=4)
+ if self.rescale:
+ grid = (grid + 1.0) / 2.0 # -1,1 -> 0,1; c,h,w
+ grid = grid.transpose(0, 1).transpose(1, 2).squeeze(-1)
+ grid = grid.numpy()
+ grid = (grid * 255).astype(np.uint8)
+ filename = "{}_gs-{:06}_e-{:06}_b-{:06}.png".format(k, global_step, current_epoch, batch_idx)
+ path = os.path.join(root, filename)
+ os.makedirs(os.path.split(path)[0], exist_ok=True)
+ Image.fromarray(grid).save(path)
+
+ def log_img(self, pl_module, batch, batch_idx, split="train"):
+ check_idx = batch_idx # if self.log_on_batch_idx else pl_module.global_step
+ if (self.check_frequency(check_idx) and # batch_idx % self.batch_freq == 0
+ hasattr(pl_module, "log_images") and
+ callable(pl_module.log_images) and
+ self.max_images > 0):
+ logger = type(pl_module.logger)
+
+ is_train = pl_module.training
+ if is_train:
+ pl_module.eval()
+
+ with torch.no_grad():
+ images = pl_module.log_images(batch, split=split, **self.log_images_kwargs)
+
+ for k in images:
+ N = min(images[k].shape[0], self.max_images)
+ images[k] = images[k][:N]
+ if isinstance(images[k], torch.Tensor):
+ images[k] = images[k].detach().cpu()
+ if self.clamp:
+ images[k] = torch.clamp(images[k], -1., 1.)
+
+ self.log_local(pl_module.logger.save_dir, split, images,
+ pl_module.global_step, pl_module.current_epoch, batch_idx)
+
+ if is_train:
+ pl_module.train()
+
+ def check_frequency(self, check_idx):
+ return check_idx % self.batch_freq == 0
+
+ def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx):
+ if not self.disabled:
+ self.log_img(pl_module, batch, batch_idx, split="train")
diff --git a/cldm/model.py b/cldm/model.py
new file mode 100644
index 0000000000000000000000000000000000000000..fed3c31ac145b78907c7f771d1d8db6fb32d92ed
--- /dev/null
+++ b/cldm/model.py
@@ -0,0 +1,28 @@
+import os
+import torch
+
+from omegaconf import OmegaConf
+from ldm.util import instantiate_from_config
+
+
+def get_state_dict(d):
+ return d.get('state_dict', d)
+
+
+def load_state_dict(ckpt_path, location='cpu'):
+ _, extension = os.path.splitext(ckpt_path)
+ if extension.lower() == ".safetensors":
+ import safetensors.torch
+ state_dict = safetensors.torch.load_file(ckpt_path, device=location)
+ else:
+ state_dict = get_state_dict(torch.load(ckpt_path, map_location=torch.device(location)))
+ state_dict = get_state_dict(state_dict)
+ print(f'Loaded state_dict from [{ckpt_path}]')
+ return state_dict
+
+
+def create_model(config_path):
+ config = OmegaConf.load(config_path)
+ model = instantiate_from_config(config.model).cpu()
+ print(f'Loaded model config from [{config_path}]')
+ return model
diff --git a/configs/gradio_i23d_stage1_args.json b/configs/gradio_i23d_stage1_args.json
new file mode 100644
index 0000000000000000000000000000000000000000..b3805f6ba41b9ad0427bd04c08aa4d83b4bef6a1
--- /dev/null
+++ b/configs/gradio_i23d_stage1_args.json
@@ -0,0 +1,312 @@
+{
+ "dataset_size": -1,
+ "diffusion_input_size": 32,
+ "trainer_name": "flow_matching_gs",
+ "use_amp": true,
+ "train_vae": false,
+ "triplane_scaling_divider": 1.0,
+ "overfitting": false,
+ "num_workers": 1,
+ "image_size": 512,
+ "image_size_encoder": 512,
+ "iterations": 5000001,
+ "schedule_sampler": "uniform",
+ "anneal_lr": false,
+ "lr": 0.0,
+ "weight_decay": 0.05,
+ "lr_anneal_steps": 0,
+ "batch_size": 1,
+ "eval_batch_size": 1,
+ "microbatch": 1,
+ "ema_rate": "0.9999",
+ "log_interval": 100,
+ "eval_interval": 5000,
+ "save_interval": 10000,
+ "resume_checkpoint": "/mnt/sfs-common/yslan/open-source/checkpoints/i23d/stage-1/model_joint_denoise_rec_model2335000.pt",
+ "resume_checkpoint_EG3D": "",
+ "use_fp16": false,
+ "fp16_scale_growth": 0.001,
+ "data_dir": "./assets/demo-image-for-i23d/instantmesh",
+ "eval_data_dir": "./assets/demo-image-for-i23d/instantmesh",
+ "load_depth": true,
+ "logdir": "./logs/i23d/stage-1/dino_img/",
+ "load_submodule_name": "",
+ "ignore_resume_opt": false,
+ "denoised_ae": true,
+ "diffusion_ce_anneal": true,
+ "use_lmdb": false,
+ "interval": 5,
+ "freeze_triplane_decoder": false,
+ "objv_dataset": true,
+ "use_eos_feature": false,
+ "clip_grad_throld": 1.0,
+ "allow_tf32": true,
+ "save_img": false,
+ "use_train_trajectory": false,
+ "unconditional_guidance_scale": 1.0,
+ "num_samples": 2,
+ "num_instances": 10,
+ "num_channels": 320,
+ "num_res_blocks": 2,
+ "num_heads": 8,
+ "num_heads_upsample": -1,
+ "num_head_channels": -1,
+ "attention_resolutions": "4,2,1",
+ "channel_mult": "",
+ "dropout": 0.0,
+ "class_cond": false,
+ "use_checkpoint": false,
+ "use_scale_shift_norm": true,
+ "resblock_updown": false,
+ "use_new_attention_order": false,
+ "denoise_in_channels": 3,
+ "denoise_out_channels": 3,
+ "create_controlnet": false,
+ "create_dit": true,
+ "i23d": true,
+ "create_unet_with_hint": false,
+ "dit_model_arch": "DiT-PixArt-PCD-CLAY-L",
+ "use_spatial_transformer": true,
+ "transformer_depth": 1,
+ "context_dim": 1024,
+ "pooling_ctx_dim": 768,
+ "roll_out": true,
+ "n_embed": null,
+ "legacy": true,
+ "mixing_logit_init": 10000,
+ "hint_channels": 3,
+ "learn_sigma": false,
+ "diffusion_steps": 1000,
+ "noise_schedule": "linear",
+ "standarization_xt": false,
+ "timestep_respacing": "",
+ "use_kl": false,
+ "predict_xstart": false,
+ "predict_v": false,
+ "rescale_timesteps": false,
+ "rescale_learned_sigmas": false,
+ "mixed_prediction": false,
+ "sde_time_eps": 0.01,
+ "sde_beta_start": 0.1,
+ "sde_beta_end": 20.0,
+ "sde_sde_type": "vpsde",
+ "sde_sigma2_0": 0.0,
+ "iw_sample_p": "drop_sigma2t_iw",
+ "iw_sample_q": "ll_iw",
+ "iw_subvp_like_vp_sde": false,
+ "pred_type": "x_start",
+ "p_rendering_loss": false,
+ "unfix_logit": false,
+ "loss_type": "eps",
+ "loss_weight": "simple",
+ "enable_mixing_normal": false,
+ "dino_version": "mv-sd-dit-srt-pcd-structured-nopcd",
+ "encoder_in_channels": 15,
+ "img_size": [
+ 512
+ ],
+ "patch_size": 14,
+ "in_chans": 384,
+ "num_classes": 0,
+ "embed_dim": 384,
+ "depth": 6,
+ "mlp_ratio": 4.0,
+ "qkv_bias": false,
+ "qk_scale": null,
+ "drop_rate": 0.1,
+ "attn_drop_rate": 0.0,
+ "drop_path_rate": 0.0,
+ "norm_layer": "nn.LayerNorm",
+ "cls_token": false,
+ "encoder_cls_token": false,
+ "decoder_cls_token": false,
+ "sr_kwargs": {},
+ "sr_ratio": 2,
+ "use_clip": false,
+ "arch_encoder": "vits",
+ "arch_decoder": "vitb",
+ "load_pretrain_encoder": false,
+ "encoder_lr": 1e-05,
+ "encoder_weight_decay": 0.001,
+ "no_dim_up_mlp": true,
+ "dim_up_mlp_as_func": false,
+ "decoder_load_pretrained": true,
+ "uvit_skip_encoder": true,
+ "vae_p": 2,
+ "ldm_z_channels": 10,
+ "ldm_embed_dim": 10,
+ "use_conf_map": false,
+ "sd_E_ch": 64,
+ "z_channels": 10,
+ "latent_num": 768,
+ "sd_E_num_res_blocks": 1,
+ "num_frames": 8,
+ "arch_dit_decoder": "DiT2-B/2",
+ "return_all_dit_layers": false,
+ "lrm_decoder": false,
+ "gs_rendering": true,
+ "surfel_rendering": true,
+ "plane_n": 1,
+ "in_plane_attention": true,
+ "vae_dit_token_size": 16,
+ "decomposed": true,
+ "triplane_fg_bg": false,
+ "flexicube_decoder": false,
+ "cfg": "objverse_tuneray_aug_resolution_64_64_auto",
+ "density_reg": 0.0,
+ "density_reg_p_dist": 0.004,
+ "reg_type": "l1",
+ "triplane_decoder_lr": 5e-05,
+ "super_resolution_lr": 5e-05,
+ "c_scale": 1,
+ "nsr_lr": 0.02,
+ "triplane_size": 224,
+ "decoder_in_chans": 32,
+ "triplane_in_chans": 32,
+ "decoder_output_dim": 3,
+ "out_chans": 96,
+ "c_dim": 25,
+ "ray_start": 0.6,
+ "ray_end": 1.8,
+ "rendering_kwargs": {
+ "image_resolution": 256,
+ "disparity_space_sampling": false,
+ "clamp_mode": "softplus",
+ "c_gen_conditioning_zero": true,
+ "c_scale": 1,
+ "superresolution_noise_mode": "none",
+ "density_reg": 0.0,
+ "density_reg_p_dist": 0.004,
+ "reg_type": "l1",
+ "decoder_lr_mul": 1,
+ "decoder_activation": "sigmoid",
+ "sr_antialias": true,
+ "return_triplane_features": false,
+ "return_sampling_details_flag": true,
+ "superresolution_module": "torch_utils.components.NearestConvSR",
+ "depth_resolution": 64,
+ "depth_resolution_importance": 64,
+ "ray_start": "auto",
+ "ray_end": "auto",
+ "box_warp": 0.9,
+ "white_back": true,
+ "radius_range": [
+ 1.5,
+ 2
+ ],
+ "sampler_bbox_min": -0.45,
+ "sampler_bbox_max": 0.45,
+ "filter_out_of_bbox": true,
+ "PatchRaySampler": true,
+ "patch_rendering_resolution": 45,
+ "z_near": 1.05,
+ "z_far": 2.45
+ },
+ "sr_training": false,
+ "bcg_synthesis": false,
+ "bcg_synthesis_kwargs": {},
+ "patch_rendering_resolution": 45,
+ "vit_decoder_lr": 1e-05,
+ "vit_decoder_wd": 0.001,
+ "ae_classname": "vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded",
+ "use_wds": false,
+ "use_lmdb_compressed": false,
+ "compile": false,
+ "decode_encode_img_only": false,
+ "load_wds_diff": false,
+ "load_wds_latent": false,
+ "eval_load_wds_instance": true,
+ "shards_lst": "",
+ "eval_shards_lst": "",
+ "mv_input": true,
+ "duplicate_sample": false,
+ "orthog_duplicate": false,
+ "split_chunk_input": true,
+ "load_real": true,
+ "load_mv_real": false,
+ "load_gso": true,
+ "four_view_for_latent": false,
+ "single_view_for_i23d": false,
+ "shuffle_across_cls": true,
+ "load_extra_36_view": false,
+ "mv_latent_dir": "",
+ "append_depth": false,
+ "append_xyz": true,
+ "read_normal": true,
+ "plucker_embedding": true,
+ "perturb_pcd_scale": 0.0,
+ "gs_cam_format": true,
+ "frame_0_as_canonical": false,
+ "pcd_path": "",
+ "stage_1_output_dir": "",
+ "load_pcd": true,
+ "use_chunk": false,
+ "split_chunk_size": 16,
+ "load_caption_dataset": false,
+ "load_mv_dataset": false,
+ "export_mesh": false,
+ "color_criterion": "mse",
+ "l2_lambda": 1.0,
+ "lpips_lambda": 0.8,
+ "lpips_delay_iter": 0,
+ "sr_delay_iter": 0,
+ "kl_anneal": false,
+ "latent_lambda": 0.0,
+ "latent_criterion": "mse",
+ "kl_lambda": 0.0,
+ "pt_ft_kl": false,
+ "ft_kl": false,
+ "ssim_lambda": 0.0,
+ "l1_lambda": 0.0,
+ "id_lambda": 0.0,
+ "depth_lambda": 0.0,
+ "alpha_lambda": 1.0,
+ "fg_mse": false,
+ "bg_lamdba": 0.01,
+ "density_reg_every": 4,
+ "shape_uniform_lambda": 0.005,
+ "shape_importance_lambda": 0.01,
+ "shape_depth_lambda": 0.0,
+ "xyz_lambda": 0.0,
+ "emd_lambda": 0.0,
+ "cd_lambda": 0.0,
+ "pruning_ot_lambda": 0.0,
+ "lambda_normal": 0.0,
+ "lambda_dist": 0.0,
+ "lambda_scale_reg": 0.0,
+ "lambda_opa_reg": 0.0,
+ "rec_cvD_lambda": 0.01,
+ "nvs_cvD_lambda": 0.025,
+ "patchgan_disc_factor": 0.01,
+ "patchgan_disc_g_weight": 0.2,
+ "r1_gamma": 1.0,
+ "sds_lamdba": 1.0,
+ "nvs_D_lr_mul": 1,
+ "cano_D_lr_mul": 1,
+ "ce_balanced_kl": 1.0,
+ "p_eps_lambda": 1,
+ "symmetry_loss": false,
+ "depth_smoothness_lambda": 0.0,
+ "ce_lambda": 0.0,
+ "negative_entropy_lambda": 0.0,
+ "grad_clip": true,
+ "online_mask": false,
+ "fps_sampling": false,
+ "subset_fps_sampling": false,
+ "subset_half_fps_sampling": false,
+ "commitment_loss_lambda": 0.0,
+ "rand_aug_bg": false,
+ "only_mid_control": false,
+ "control_key": "img",
+ "normalize_clip_encoding": true,
+ "scale_clip_encoding": 1.0,
+ "cfg_dropout_prob": 0.1,
+ "cond_key": "img",
+ "path_type": "Linear",
+ "prediction": "velocity",
+ "sample_eps": null,
+ "train_eps": null,
+ "snr_type": "img-uniform-gvp-dino",
+ "local_rank": 0,
+ "gpus": 1
+}
\ No newline at end of file
diff --git a/configs/gradio_i23d_stage2_args.json b/configs/gradio_i23d_stage2_args.json
new file mode 100644
index 0000000000000000000000000000000000000000..ffa9296f1b7c04274d7fe86ae810d06e9c2a3889
--- /dev/null
+++ b/configs/gradio_i23d_stage2_args.json
@@ -0,0 +1,312 @@
+{
+ "dataset_size": -1,
+ "diffusion_input_size": 32,
+ "trainer_name": "flow_matching_gs",
+ "use_amp": true,
+ "train_vae": false,
+ "triplane_scaling_divider": 1.0,
+ "overfitting": false,
+ "num_workers": 2,
+ "image_size": 512,
+ "image_size_encoder": 512,
+ "iterations": 5000001,
+ "schedule_sampler": "uniform",
+ "anneal_lr": false,
+ "lr": 0.0001,
+ "weight_decay": 0.05,
+ "lr_anneal_steps": 0,
+ "batch_size": 1,
+ "eval_batch_size": 1,
+ "microbatch": 0,
+ "ema_rate": "0.9999",
+ "log_interval": 100,
+ "eval_interval": 5000,
+ "save_interval": 10000,
+ "resume_checkpoint": "/mnt/sfs-common/yslan/open-source/checkpoints/i23d/stage-2/model_joint_denoise_rec_model2505000.pt",
+ "resume_checkpoint_EG3D": "",
+ "use_fp16": false,
+ "fp16_scale_growth": 0.001,
+ "data_dir": "./assets/demo-image-for-i23d/instantmesh",
+ "eval_data_dir": "./assets/demo-image-for-i23d/instantmesh",
+ "load_depth": true,
+ "logdir": "./logs/i23d/stage-2/dino_img/",
+ "load_submodule_name": "",
+ "ignore_resume_opt": false,
+ "denoised_ae": true,
+ "diffusion_ce_anneal": true,
+ "use_lmdb": false,
+ "interval": 5,
+ "freeze_triplane_decoder": false,
+ "objv_dataset": true,
+ "use_eos_feature": false,
+ "clip_grad_throld": 1.0,
+ "allow_tf32": true,
+ "save_img": false,
+ "use_train_trajectory": false,
+ "unconditional_guidance_scale": 1.0,
+ "num_samples": 4,
+ "num_instances": 10,
+ "num_channels": 320,
+ "num_res_blocks": 2,
+ "num_heads": 8,
+ "num_heads_upsample": -1,
+ "num_head_channels": -1,
+ "attention_resolutions": "4,2,1",
+ "channel_mult": "",
+ "dropout": 0.0,
+ "class_cond": false,
+ "use_checkpoint": false,
+ "use_scale_shift_norm": true,
+ "resblock_updown": false,
+ "use_new_attention_order": false,
+ "denoise_in_channels": 10,
+ "denoise_out_channels": 10,
+ "create_controlnet": false,
+ "create_dit": true,
+ "i23d": true,
+ "create_unet_with_hint": false,
+ "dit_model_arch": "DiT-PixArt-PCD-CLAY-stage2-L",
+ "use_spatial_transformer": true,
+ "transformer_depth": 1,
+ "context_dim": 1024,
+ "pooling_ctx_dim": 768,
+ "roll_out": true,
+ "n_embed": null,
+ "legacy": true,
+ "mixing_logit_init": 10000,
+ "hint_channels": 3,
+ "learn_sigma": false,
+ "diffusion_steps": 1000,
+ "noise_schedule": "linear",
+ "standarization_xt": false,
+ "timestep_respacing": "",
+ "use_kl": false,
+ "predict_xstart": false,
+ "predict_v": false,
+ "rescale_timesteps": false,
+ "rescale_learned_sigmas": false,
+ "mixed_prediction": false,
+ "sde_time_eps": 0.01,
+ "sde_beta_start": 0.1,
+ "sde_beta_end": 20.0,
+ "sde_sde_type": "vpsde",
+ "sde_sigma2_0": 0.0,
+ "iw_sample_p": "drop_sigma2t_iw",
+ "iw_sample_q": "ll_iw",
+ "iw_subvp_like_vp_sde": false,
+ "pred_type": "x_start",
+ "p_rendering_loss": false,
+ "unfix_logit": false,
+ "loss_type": "eps",
+ "loss_weight": "simple",
+ "enable_mixing_normal": false,
+ "dino_version": "mv-sd-dit-srt-pcd-structured-nopcd",
+ "encoder_in_channels": 15,
+ "img_size": [
+ 512
+ ],
+ "patch_size": 14,
+ "in_chans": 384,
+ "num_classes": 0,
+ "embed_dim": 384,
+ "depth": 6,
+ "mlp_ratio": 4.0,
+ "qkv_bias": false,
+ "qk_scale": null,
+ "drop_rate": 0.1,
+ "attn_drop_rate": 0.0,
+ "drop_path_rate": 0.0,
+ "norm_layer": "nn.LayerNorm",
+ "cls_token": false,
+ "encoder_cls_token": false,
+ "decoder_cls_token": false,
+ "sr_kwargs": {},
+ "sr_ratio": 2,
+ "use_clip": false,
+ "arch_encoder": "vits",
+ "arch_decoder": "vitb",
+ "load_pretrain_encoder": false,
+ "encoder_lr": 1e-05,
+ "encoder_weight_decay": 0.001,
+ "no_dim_up_mlp": true,
+ "dim_up_mlp_as_func": false,
+ "decoder_load_pretrained": true,
+ "uvit_skip_encoder": true,
+ "vae_p": 2,
+ "ldm_z_channels": 10,
+ "ldm_embed_dim": 10,
+ "use_conf_map": false,
+ "sd_E_ch": 64,
+ "z_channels": 10,
+ "latent_num": 768,
+ "sd_E_num_res_blocks": 1,
+ "num_frames": 8,
+ "arch_dit_decoder": "DiT2-B/2",
+ "return_all_dit_layers": false,
+ "lrm_decoder": false,
+ "gs_rendering": true,
+ "surfel_rendering": true,
+ "plane_n": 1,
+ "in_plane_attention": true,
+ "vae_dit_token_size": 16,
+ "decomposed": true,
+ "triplane_fg_bg": false,
+ "flexicube_decoder": false,
+ "cfg": "objverse_tuneray_aug_resolution_64_64_auto",
+ "density_reg": 0.0,
+ "density_reg_p_dist": 0.004,
+ "reg_type": "l1",
+ "triplane_decoder_lr": 5e-05,
+ "super_resolution_lr": 5e-05,
+ "c_scale": 1,
+ "nsr_lr": 0.02,
+ "triplane_size": 224,
+ "decoder_in_chans": 32,
+ "triplane_in_chans": 32,
+ "decoder_output_dim": 3,
+ "out_chans": 96,
+ "c_dim": 25,
+ "ray_start": 0.6,
+ "ray_end": 1.8,
+ "rendering_kwargs": {
+ "image_resolution": 256,
+ "disparity_space_sampling": false,
+ "clamp_mode": "softplus",
+ "c_gen_conditioning_zero": true,
+ "c_scale": 1,
+ "superresolution_noise_mode": "none",
+ "density_reg": 0.0,
+ "density_reg_p_dist": 0.004,
+ "reg_type": "l1",
+ "decoder_lr_mul": 1,
+ "decoder_activation": "sigmoid",
+ "sr_antialias": true,
+ "return_triplane_features": false,
+ "return_sampling_details_flag": true,
+ "superresolution_module": "torch_utils.components.NearestConvSR",
+ "depth_resolution": 64,
+ "depth_resolution_importance": 64,
+ "ray_start": "auto",
+ "ray_end": "auto",
+ "box_warp": 0.9,
+ "white_back": true,
+ "radius_range": [
+ 1.5,
+ 2
+ ],
+ "sampler_bbox_min": -0.45,
+ "sampler_bbox_max": 0.45,
+ "filter_out_of_bbox": true,
+ "PatchRaySampler": true,
+ "patch_rendering_resolution": 45,
+ "z_near": 1.05,
+ "z_far": 2.45
+ },
+ "sr_training": false,
+ "bcg_synthesis": false,
+ "bcg_synthesis_kwargs": {},
+ "patch_rendering_resolution": 45,
+ "vit_decoder_lr": 1e-05,
+ "vit_decoder_wd": 0.001,
+ "ae_classname": "vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded",
+ "use_wds": false,
+ "use_lmdb_compressed": false,
+ "compile": false,
+ "decode_encode_img_only": false,
+ "load_wds_diff": false,
+ "load_wds_latent": false,
+ "eval_load_wds_instance": true,
+ "shards_lst": "",
+ "eval_shards_lst": "",
+ "mv_input": true,
+ "duplicate_sample": false,
+ "orthog_duplicate": false,
+ "split_chunk_input": true,
+ "load_real": true,
+ "load_mv_real": false,
+ "load_gso": true,
+ "four_view_for_latent": false,
+ "single_view_for_i23d": false,
+ "shuffle_across_cls": true,
+ "load_extra_36_view": false,
+ "mv_latent_dir": "",
+ "append_depth": false,
+ "append_xyz": true,
+ "read_normal": true,
+ "plucker_embedding": true,
+ "perturb_pcd_scale": 0.0,
+ "gs_cam_format": true,
+ "frame_0_as_canonical": false,
+ "pcd_path": "/cpfs01/user/lanyushi.p/data/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd/",
+ "stage_1_output_dir": "./logs/i23d/stage-1/dino_img/",
+ "load_pcd": true,
+ "use_chunk": false,
+ "split_chunk_size": 16,
+ "load_caption_dataset": false,
+ "load_mv_dataset": false,
+ "export_mesh": false,
+ "color_criterion": "mse",
+ "l2_lambda": 1.0,
+ "lpips_lambda": 0.8,
+ "lpips_delay_iter": 0,
+ "sr_delay_iter": 0,
+ "kl_anneal": false,
+ "latent_lambda": 0.0,
+ "latent_criterion": "mse",
+ "kl_lambda": 0.0,
+ "pt_ft_kl": false,
+ "ft_kl": false,
+ "ssim_lambda": 0.0,
+ "l1_lambda": 0.0,
+ "id_lambda": 0.0,
+ "depth_lambda": 0.0,
+ "alpha_lambda": 1.0,
+ "fg_mse": false,
+ "bg_lamdba": 0.01,
+ "density_reg_every": 4,
+ "shape_uniform_lambda": 0.005,
+ "shape_importance_lambda": 0.01,
+ "shape_depth_lambda": 0.0,
+ "xyz_lambda": 0.0,
+ "emd_lambda": 0.0,
+ "cd_lambda": 0.0,
+ "pruning_ot_lambda": 0.0,
+ "lambda_normal": 0.0,
+ "lambda_dist": 0.0,
+ "lambda_scale_reg": 0.0,
+ "lambda_opa_reg": 0.0,
+ "rec_cvD_lambda": 0.01,
+ "nvs_cvD_lambda": 0.025,
+ "patchgan_disc_factor": 0.01,
+ "patchgan_disc_g_weight": 0.2,
+ "r1_gamma": 1.0,
+ "sds_lamdba": 1.0,
+ "nvs_D_lr_mul": 1,
+ "cano_D_lr_mul": 1,
+ "ce_balanced_kl": 1.0,
+ "p_eps_lambda": 1,
+ "symmetry_loss": false,
+ "depth_smoothness_lambda": 0.0,
+ "ce_lambda": 0.0,
+ "negative_entropy_lambda": 0.0,
+ "grad_clip": true,
+ "online_mask": false,
+ "fps_sampling": false,
+ "subset_fps_sampling": false,
+ "subset_half_fps_sampling": false,
+ "commitment_loss_lambda": 0.0,
+ "rand_aug_bg": false,
+ "only_mid_control": false,
+ "control_key": "img",
+ "normalize_clip_encoding": true,
+ "scale_clip_encoding": 1.0,
+ "cfg_dropout_prob": 0.1,
+ "cond_key": "img-xyz",
+ "path_type": "Linear",
+ "prediction": "velocity",
+ "sample_eps": null,
+ "train_eps": null,
+ "snr_type": "img-uniform-gvp-dino-stage2",
+ "local_rank": 0,
+ "gpus": 1
+}
\ No newline at end of file
diff --git a/datasets/README.md b/datasets/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..336b8e83262764419aceae9c975c58bed0fbb47b
--- /dev/null
+++ b/datasets/README.md
@@ -0,0 +1,27 @@
+# Downloading datasets
+
+This directory includes instructions and scripts for downloading ImageNet and LSUN bedrooms for use in this codebase.
+
+## Class-conditional ImageNet
+
+For our class-conditional models, we use the official ILSVRC2012 dataset with manual center cropping and downsampling. To obtain this dataset, navigate to [this page on image-net.org](http://www.image-net.org/challenges/LSVRC/2012/downloads) and sign in (or create an account if you do not already have one). Then click on the link reading "Training images (Task 1 & 2)". This is a 138GB tar file containing 1000 sub-tar files, one per class.
+
+Once the file is downloaded, extract it and look inside. You should see 1000 `.tar` files. You need to extract each of these, which may be impractical to do by hand on your operating system. To automate the process on a Unix-based system, you can `cd` into the directory and run this short shell script:
+
+```
+for file in *.tar; do tar xf "$file"; rm "$file"; done
+```
+
+This will extract and remove each tar file in turn.
+
+Once all of the images have been extracted, the resulting directory should be usable as a data directory (the `--data_dir` argument for the training script). The filenames should all start with WNID (class ids) followed by underscores, like `n01440764_2708.JPEG`. Conveniently (but not by accident) this is how the automated data-loader expects to discover class labels.
+
+## LSUN bedroom
+
+To download and pre-process LSUN bedroom, clone [fyu/lsun](https://github.com/fyu/lsun) on GitHub and run their download script `python3 download.py bedroom`. The result will be an "lmdb" database named like `bedroom_train_lmdb`. You can pass this to our [lsun_bedroom.py](lsun_bedroom.py) script like so:
+
+```
+python lsun_bedroom.py bedroom_train_lmdb lsun_train_output_dir
+```
+
+This creates a directory called `lsun_train_output_dir`. This directory can be passed to the training scripts via the `--data_dir` argument.
diff --git a/datasets/__init__.py b/datasets/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6bff2ea70a118ab9bba3645fbf2de42187d73c1d
--- /dev/null
+++ b/datasets/__init__.py
@@ -0,0 +1,2 @@
+from . import g_buffer_objaverse
+from . import shapenet
\ No newline at end of file
diff --git a/datasets/__pycache__/__init__.cpython-39.pyc b/datasets/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..68ca17d43f9a3f027f8f98e198291fe390899fe0
Binary files /dev/null and b/datasets/__pycache__/__init__.cpython-39.pyc differ
diff --git a/datasets/__pycache__/eg3d_dataset.cpython-39.pyc b/datasets/__pycache__/eg3d_dataset.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..68ceab7814f1baafd817c7ae006da82e87624596
Binary files /dev/null and b/datasets/__pycache__/eg3d_dataset.cpython-39.pyc differ
diff --git a/datasets/__pycache__/g_buffer_objaverse.cpython-39.pyc b/datasets/__pycache__/g_buffer_objaverse.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8d3818033ceb5bdcb878afa21d7a2050c5b8f4b0
Binary files /dev/null and b/datasets/__pycache__/g_buffer_objaverse.cpython-39.pyc differ
diff --git a/datasets/__pycache__/shapenet.cpython-39.pyc b/datasets/__pycache__/shapenet.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..32039e600121e192e12adfe4af4c29568f0a0a89
Binary files /dev/null and b/datasets/__pycache__/shapenet.cpython-39.pyc differ
diff --git a/datasets/caption-forpaper.txt b/datasets/caption-forpaper.txt
new file mode 100644
index 0000000000000000000000000000000000000000..3a6922ca6fb023f4529f8e766a6cc07cb2d4bd7a
--- /dev/null
+++ b/datasets/caption-forpaper.txt
@@ -0,0 +1,21 @@
+An 18th century Cannon
+Mech Suit
+A voxelized dog
+Giraffe
+Astral Beacon
+low-poly model of a green pine tree, also resembling a Christmas tree.
+low poly tree model with green leaves.
+Fire Hydrants
+low poly blue chess piece model.
+A yellow rubber duck with red mouth.
+low poly blue chess piece model.
+A brown wooden table with a rectangular shape, featuring visible legs and vintage look.
+a sword with a red handle.
+A cute and friendly pink teddy bear with sitting pose.
+White Giraffe
+a police car
+an orchid flower planted in a clay pot
+a palm tree, low poly 3d model
+a plate of delicious tacos
+a red eyed tree frog, low poly
+an amigurumi bulldozer
\ No newline at end of file
diff --git a/datasets/capton-for-pointe.txt b/datasets/capton-for-pointe.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e3ed971853afcce4a152e8aebf546b68afa24c80
--- /dev/null
+++ b/datasets/capton-for-pointe.txt
@@ -0,0 +1 @@
+low poly blue chess piece model.
\ No newline at end of file
diff --git a/datasets/eg3d_dataset.py b/datasets/eg3d_dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..7799306fbde1eb29029b88e5e31dfdd56f63991d
--- /dev/null
+++ b/datasets/eg3d_dataset.py
@@ -0,0 +1,601 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Streaming images and labels from datasets created with dataset_tool.py."""
+
+import cv2
+import os
+import numpy as np
+import zipfile
+import PIL.Image
+import json
+import torch
+import dnnlib
+from torchvision import transforms
+
+from pdb import set_trace as st
+
+from .shapenet import LMDBDataset_MV_Compressed, decompress_array
+
+try:
+ import pyspng
+except ImportError:
+ pyspng = None
+
+#----------------------------------------------------------------------------
+
+
+# copide from eg3d/train.py
+def init_dataset_kwargs(data,
+ class_name='datasets.eg3d_dataset.ImageFolderDataset',
+ reso_gt=128):
+ # try:
+ # if data == 'None':
+ # dataset_kwargs = dnnlib.EasyDict({}) #
+ # dataset_kwargs.name = 'eg3d_dataset'
+ # dataset_kwargs.resolution = 128
+ # dataset_kwargs.use_labels = False
+ # dataset_kwargs.max_size = 70000
+ # return dataset_kwargs, 'eg3d_dataset'
+
+ dataset_kwargs = dnnlib.EasyDict(class_name=class_name,
+ reso_gt=reso_gt,
+ path=data,
+ use_labels=True,
+ max_size=None,
+ xflip=False)
+ dataset_obj = dnnlib.util.construct_class_by_name(
+ **dataset_kwargs) # Subclass of training.dataset.Dataset.
+ dataset_kwargs.resolution = dataset_obj.resolution # Be explicit about resolution.
+ dataset_kwargs.use_labels = dataset_obj.has_labels # Be explicit about labels.
+ dataset_kwargs.max_size = len(
+ dataset_obj) # Be explicit about dataset size.
+
+ return dataset_kwargs, dataset_obj.name
+ # except IOError as err:
+ # raise click.ClickException(f'--data: {err}')
+
+
+class Dataset(torch.utils.data.Dataset):
+
+ def __init__(
+ self,
+ name, # Name of the dataset.
+ raw_shape, # Shape of the raw image data (NCHW).
+ reso_gt=128,
+ max_size=None, # Artificially limit the size of the dataset. None = no limit. Applied before xflip.
+ use_labels=False, # Enable conditioning labels? False = label dimension is zero.
+ xflip=False, # Artificially double the size of the dataset via x-flips. Applied after max_size.
+ random_seed=0, # Random seed to use when applying max_size.
+ ):
+ self._name = name
+ self._raw_shape = list(raw_shape)
+ self._use_labels = use_labels
+ self._raw_labels = None
+ self._label_shape = None
+
+ # self.reso_gt = 128
+ self.reso_gt = reso_gt # ! hard coded
+ self.reso_encoder = 224
+
+ # Apply max_size.
+ self._raw_idx = np.arange(self._raw_shape[0], dtype=np.int64)
+ # self._raw_idx = np.arange(self.__len__(), dtype=np.int64)
+ if (max_size is not None) and (self._raw_idx.size > max_size):
+ np.random.RandomState(random_seed).shuffle(self._raw_idx)
+ self._raw_idx = np.sort(self._raw_idx[:max_size])
+
+ # Apply xflip.
+ self._xflip = np.zeros(self._raw_idx.size, dtype=np.uint8)
+ if xflip:
+ self._raw_idx = np.tile(self._raw_idx, 2)
+ self._xflip = np.concatenate(
+ [self._xflip, np.ones_like(self._xflip)])
+
+ # dino encoder normalizer
+ self.normalize_for_encoder_input = transforms.Compose([
+ transforms.ToTensor(),
+ transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]),
+ transforms.Resize(size=(self.reso_encoder, self.reso_encoder),
+ antialias=True), # type: ignore
+ ])
+
+ self.normalize_for_gt = transforms.Compose([
+ transforms.ToTensor(),
+ transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
+ transforms.Resize(size=(self.reso_gt, self.reso_gt),
+ antialias=True), # type: ignore
+ ])
+
+ def _get_raw_labels(self):
+ if self._raw_labels is None:
+ self._raw_labels = self._load_raw_labels(
+ ) if self._use_labels else None
+ if self._raw_labels is None:
+ self._raw_labels = np.zeros([self._raw_shape[0], 0],
+ dtype=np.float32)
+ assert isinstance(self._raw_labels, np.ndarray)
+ # assert self._raw_labels.shape[0] == self._raw_shape[0]
+ assert self._raw_labels.dtype in [np.float32, np.int64]
+ if self._raw_labels.dtype == np.int64:
+ assert self._raw_labels.ndim == 1
+ assert np.all(self._raw_labels >= 0)
+ self._raw_labels_std = self._raw_labels.std(0)
+ return self._raw_labels
+
+ def close(self): # to be overridden by subclass
+ pass
+
+ def _load_raw_image(self, raw_idx): # to be overridden by subclass
+ raise NotImplementedError
+
+ def _load_raw_labels(self): # to be overridden by subclass
+ raise NotImplementedError
+
+ def __getstate__(self):
+ return dict(self.__dict__, _raw_labels=None)
+
+ def __del__(self):
+ try:
+ self.close()
+ except:
+ pass
+
+ def __len__(self):
+ return self._raw_idx.size
+ # return self._get_raw_labels().shape[0]
+
+ def __getitem__(self, idx):
+ # print(self._raw_idx[idx], idx)
+
+ matte = self._load_raw_matte(self._raw_idx[idx])
+ assert isinstance(matte, np.ndarray)
+ assert list(matte.shape)[1:] == self.image_shape[1:]
+ if self._xflip[idx]:
+ assert matte.ndim == 1 # CHW
+ matte = matte[:, :, ::-1]
+ # matte_orig = matte.copy().astype(np.float32) / 255
+ matte_orig = matte.copy().astype(np.float32) # segmentation version
+ # assert matte_orig.max() == 1
+ matte = np.transpose(matte,
+ # (1, 2, 0)).astype(np.float32) / 255 # [0,1] range
+ (1, 2, 0)).astype(np.float32) # [0,1] range
+ matte = cv2.resize(matte, (self.reso_gt, self.reso_gt),
+ interpolation=cv2.INTER_NEAREST)
+ assert matte.min() >= 0 and matte.max(
+ ) <= 1, f'{matte.min(), matte.max()}'
+
+ if matte.ndim == 3: # H, W
+ matte = matte[..., 0]
+
+ image = self._load_raw_image(self._raw_idx[idx])
+
+ assert isinstance(image, np.ndarray)
+ assert list(image.shape) == self.image_shape
+ assert image.dtype == np.uint8
+ if self._xflip[idx]:
+ assert image.ndim == 3 # CHW
+ image = image[:, :, ::-1]
+
+ # blending
+ # blending = True
+ blending = False
+ if blending:
+ image = image * matte_orig + (1 - matte_orig) * cv2.GaussianBlur(
+ image, (5, 5), cv2.BORDER_DEFAULT)
+ # image = image * matte_orig
+
+ image = np.transpose(image, (1, 2, 0)).astype(
+ np.float32
+ ) / 255 # H W C for torchvision process, normalize to [0,1]
+
+ image_sr = torch.from_numpy(image)[..., :3].permute(
+ 2, 0, 1) * 2 - 1 # normalize to [-1,1]
+ image_to_encoder = self.normalize_for_encoder_input(image)
+
+ image_gt = cv2.resize(image, (self.reso_gt, self.reso_gt),
+ interpolation=cv2.INTER_AREA)
+ image_gt = torch.from_numpy(image_gt)[..., :3].permute(
+ 2, 0, 1) * 2 - 1 # normalize to [-1,1]
+
+ return dict(
+ c=self.get_label(idx),
+ img_to_encoder=image_to_encoder, # 224
+ img_sr=image_sr, # 512
+ img=image_gt, # [-1,1] range
+ # depth=torch.zeros_like(image_gt)[0, ...] # type: ignore
+ depth=matte,
+ depth_mask=matte,
+ # depth_mask=matte > 0,
+ # alpha=matte,
+ ) # return dict here
+
+ def get_label(self, idx):
+ label = self._get_raw_labels()[self._raw_idx[idx]]
+ if label.dtype == np.int64:
+ onehot = np.zeros(self.label_shape, dtype=np.float32)
+ onehot[label] = 1
+ label = onehot
+ return label.copy()
+
+ def get_details(self, idx):
+ d = dnnlib.EasyDict()
+ d.raw_idx = int(self._raw_idx[idx])
+ d.xflip = (int(self._xflip[idx]) != 0)
+ d.raw_label = self._get_raw_labels()[d.raw_idx].copy()
+ return d
+
+ def get_label_std(self):
+ return self._raw_labels_std
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def image_shape(self):
+ return list(self._raw_shape[1:])
+
+ @property
+ def num_channels(self):
+ assert len(self.image_shape) == 3 # CHW
+ return self.image_shape[0]
+
+ @property
+ def resolution(self):
+ assert len(self.image_shape) == 3 # CHW
+ assert self.image_shape[1] == self.image_shape[2]
+ return self.image_shape[1]
+
+ @property
+ def label_shape(self):
+ if self._label_shape is None:
+ raw_labels = self._get_raw_labels()
+ if raw_labels.dtype == np.int64:
+ self._label_shape = [int(np.max(raw_labels)) + 1]
+ else:
+ self._label_shape = raw_labels.shape[1:]
+ return list(self._label_shape)
+
+ @property
+ def label_dim(self):
+ assert len(self.label_shape) == 1
+ return self.label_shape[0]
+
+ @property
+ def has_labels(self):
+ return any(x != 0 for x in self.label_shape)
+
+ @property
+ def has_onehot_labels(self):
+ return self._get_raw_labels().dtype == np.int64
+
+
+#----------------------------------------------------------------------------
+
+
+class ImageFolderDataset(Dataset):
+
+ def __init__(
+ self,
+ path, # Path to directory or zip.
+ resolution=None, # Ensure specific resolution, None = highest available.
+ reso_gt=128,
+ **super_kwargs, # Additional arguments for the Dataset base class.
+ ):
+ self._path = path
+ self._matte_path = path.replace('unzipped_ffhq_512',
+ 'unzipped_ffhq_matte')
+ # self._matte_path = path.replace('unzipped_ffhq_512',
+ # 'ffhq_512_seg')
+ self._zipfile = None
+
+ if os.path.isdir(self._path):
+ self._type = 'dir'
+ self._all_fnames = {
+ os.path.relpath(os.path.join(root, fname), start=self._path)
+ for root, _dirs, files in os.walk(self._path)
+ for fname in files
+ }
+ elif self._file_ext(self._path) == '.zip':
+ self._type = 'zip'
+ self._all_fnames = set(self._get_zipfile().namelist())
+ else:
+ raise IOError('Path must point to a directory or zip')
+
+ PIL.Image.init()
+ self._image_fnames = sorted(
+ fname for fname in self._all_fnames
+ if self._file_ext(fname) in PIL.Image.EXTENSION)
+ if len(self._image_fnames) == 0:
+ raise IOError('No image files found in the specified path')
+
+ name = os.path.splitext(os.path.basename(self._path))[0]
+ raw_shape = [len(self._image_fnames)] + list(
+ self._load_raw_image(0).shape)
+ # raw_shape = [len(self._image_fnames)] + list(
+ # self._load_raw_image(0).shape)
+ if resolution is not None and (raw_shape[2] != resolution
+ or raw_shape[3] != resolution):
+ raise IOError('Image files do not match the specified resolution')
+ super().__init__(name=name,
+ raw_shape=raw_shape,
+ reso_gt=reso_gt,
+ **super_kwargs)
+
+ @staticmethod
+ def _file_ext(fname):
+ return os.path.splitext(fname)[1].lower()
+
+ def _get_zipfile(self):
+ assert self._type == 'zip'
+ if self._zipfile is None:
+ self._zipfile = zipfile.ZipFile(self._path)
+ return self._zipfile
+
+ def _open_file(self, fname):
+ if self._type == 'dir':
+ return open(os.path.join(self._path, fname), 'rb')
+ if self._type == 'zip':
+ return self._get_zipfile().open(fname, 'r')
+ return None
+
+ def _open_matte_file(self, fname):
+ if self._type == 'dir':
+ return open(os.path.join(self._matte_path, fname), 'rb')
+ # if self._type == 'zip':
+ # return self._get_zipfile().open(fname, 'r')
+ # return None
+
+ def close(self):
+ try:
+ if self._zipfile is not None:
+ self._zipfile.close()
+ finally:
+ self._zipfile = None
+
+ def __getstate__(self):
+ return dict(super().__getstate__(), _zipfile=None)
+
+ def _load_raw_image(self, raw_idx):
+ fname = self._image_fnames[raw_idx]
+ with self._open_file(fname) as f:
+ if pyspng is not None and self._file_ext(fname) == '.png':
+ image = pyspng.load(f.read())
+ else:
+ image = np.array(PIL.Image.open(f))
+ if image.ndim == 2:
+ image = image[:, :, np.newaxis] # HW => HWC
+ image = image.transpose(2, 0, 1) # HWC => CHW
+ return image
+
+ def _load_raw_matte(self, raw_idx):
+ # ! from seg version
+ fname = self._image_fnames[raw_idx]
+ with self._open_matte_file(fname) as f:
+ if pyspng is not None and self._file_ext(fname) == '.png':
+ image = pyspng.load(f.read())
+ else:
+ image = np.array(PIL.Image.open(f))
+ # if image.max() != 1:
+ image = (image > 0).astype(np.float32) # process segmentation
+ if image.ndim == 2:
+ image = image[:, :, np.newaxis] # HW => HWC
+ image = image.transpose(2, 0, 1) # HWC => CHW
+ return image
+
+ def _load_raw_matte_orig(self, raw_idx):
+ fname = self._image_fnames[raw_idx]
+ with self._open_matte_file(fname) as f:
+ if pyspng is not None and self._file_ext(fname) == '.png':
+ image = pyspng.load(f.read())
+ else:
+ image = np.array(PIL.Image.open(f))
+ st() # process segmentation
+ if image.ndim == 2:
+ image = image[:, :, np.newaxis] # HW => HWC
+ image = image.transpose(2, 0, 1) # HWC => CHW
+ return image
+
+ def _load_raw_labels(self):
+ fname = 'dataset.json'
+ if fname not in self._all_fnames:
+ return None
+ with self._open_file(fname) as f:
+ # st()
+ labels = json.load(f)['labels']
+ if labels is None:
+ return None
+ labels = dict(labels)
+ labels_ = []
+ for fname, _ in labels.items():
+ # if 'mirror' not in fname:
+ labels_.append(labels[fname])
+ labels = labels_
+ # !
+ # labels = [
+ # labels[fname.replace('\\', '/')] for fname in self._image_fnames
+ # ]
+ labels = np.array(labels)
+ labels = labels.astype({1: np.int64, 2: np.float32}[labels.ndim])
+ self._raw_labels = labels
+ return labels
+
+
+#----------------------------------------------------------------------------
+
+
+# class ImageFolderDatasetUnzipped(ImageFolderDataset):
+
+# def __init__(self, path, resolution=None, **super_kwargs):
+# super().__init__(path, resolution, **super_kwargs)
+
+
+# class ImageFolderDatasetPose(ImageFolderDataset):
+
+# def __init__(
+# self,
+# path, # Path to directory or zip.
+# resolution=None, # Ensure specific resolution, None = highest available.
+# **super_kwargs, # Additional arguments for the Dataset base class.
+# ):
+# super().__init__(path, resolution, **super_kwargs)
+# # only return labels
+
+# def __len__(self):
+# return self._raw_idx.size
+# # return self._get_raw_labels().shape[0]
+
+# def __getitem__(self, idx):
+# # image = self._load_raw_image(self._raw_idx[idx])
+# # assert isinstance(image, np.ndarray)
+# # assert list(image.shape) == self.image_shape
+# # assert image.dtype == np.uint8
+# # if self._xflip[idx]:
+# # assert image.ndim == 3 # CHW
+# # image = image[:, :, ::-1]
+# return dict(c=self.get_label(idx), ) # return dict here
+
+
+class ImageFolderDatasetLMDB(ImageFolderDataset):
+ def __init__(self, path, resolution=None, reso_gt=128, **super_kwargs):
+ super().__init__(path, resolution, reso_gt, **super_kwargs)
+
+ def __getitem__(self, idx):
+ # print(self._raw_idx[idx], idx)
+
+ matte = self._load_raw_matte(self._raw_idx[idx])
+ assert isinstance(matte, np.ndarray)
+ assert list(matte.shape)[1:] == self.image_shape[1:]
+ if self._xflip[idx]:
+ assert matte.ndim == 1 # CHW
+ matte = matte[:, :, ::-1]
+ # matte_orig = matte.copy().astype(np.float32) / 255
+ matte_orig = matte.copy().astype(np.float32) # segmentation version
+ assert matte_orig.max() <= 1 # some ffhq images are dirty, so may be all zero
+ matte = np.transpose(matte,
+ # (1, 2, 0)).astype(np.float32) / 255 # [0,1] range
+ (1, 2, 0)).astype(np.float32) # [0,1] range
+
+ # ! load 512 matte
+ # matte = cv2.resize(matte, (self.reso_gt, self.reso_gt),
+ # interpolation=cv2.INTER_NEAREST)
+
+ assert matte.min() >= 0 and matte.max(
+ ) <= 1, f'{matte.min(), matte.max()}'
+
+ if matte.ndim == 3: # H, W
+ matte = matte[..., 0]
+
+ image = self._load_raw_image(self._raw_idx[idx])
+
+ assert isinstance(image, np.ndarray)
+ assert list(image.shape) == self.image_shape
+ assert image.dtype == np.uint8
+ if self._xflip[idx]:
+ assert image.ndim == 3 # CHW
+ image = image[:, :, ::-1]
+
+ # blending
+ # blending = True
+ # blending = False
+ # if blending:
+ # image = image * matte_orig + (1 - matte_orig) * cv2.GaussianBlur(
+ # image, (5, 5), cv2.BORDER_DEFAULT)
+ # image = image * matte_orig
+
+ # image = np.transpose(image, (1, 2, 0)).astype(
+ # np.float32
+ # ) / 255 # H W C for torchvision process, normalize to [0,1]
+
+ # image_sr = torch.from_numpy(image)[..., :3].permute(
+ # 2, 0, 1) * 2 - 1 # normalize to [-1,1]
+ # image_to_encoder = self.normalize_for_encoder_input(image)
+
+ # image_gt = cv2.resize(image, (self.reso_gt, self.reso_gt),
+ # interpolation=cv2.INTER_AREA)
+ # image_gt = torch.from_numpy(image_gt)[..., :3].permute(
+ # 2, 0, 1) * 2 - 1 # normalize to [-1,1]
+
+ return dict(
+ c=self.get_label(idx),
+ # img_to_encoder=image_to_encoder, # 224
+ # img_sr=image_sr, # 512
+ img=image, # [-1,1] range
+ # depth=torch.zeros_like(image_gt)[0, ...] # type: ignore
+ # depth=matte,
+ depth_mask=matte,
+ ) # return dict here
+
+class LMDBDataset_MV_Compressed_eg3d(LMDBDataset_MV_Compressed):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ **kwargs):
+ super().__init__(lmdb_path, reso, reso_encoder, imgnet_normalize,
+ **kwargs)
+
+ self.normalize_for_encoder_input = transforms.Compose([
+ transforms.ToTensor(),
+ transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]),
+ transforms.Resize(size=(self.reso_encoder, self.reso_encoder),
+ antialias=True), # type: ignore
+ ])
+
+ self.normalize_for_gt = transforms.Compose([
+ transforms.ToTensor(),
+ transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
+ transforms.Resize(size=(self.reso, self.reso),
+ antialias=True), # type: ignore
+ ])
+
+ def __getitem__(self, idx):
+ # sample = super(LMDBDataset).__getitem__(idx)
+
+ # do gzip uncompress online
+ with self.env.begin(write=False) as txn:
+ img_key = f'{idx}-img'.encode('utf-8')
+ image = self.load_image_fn(txn.get(img_key))
+
+ depth_key = f'{idx}-depth_mask'.encode('utf-8')
+ # depth = decompress_array(txn.get(depth_key), (512,512), np.float32)
+ depth = decompress_array(txn.get(depth_key), (64,64), np.float32)
+
+ c_key = f'{idx}-c'.encode('utf-8')
+ c = decompress_array(txn.get(c_key), (25, ), np.float32)
+
+ # ! post processing, e.g., normalizing
+ depth = cv2.resize(depth, (self.reso, self.reso),
+ interpolation=cv2.INTER_NEAREST)
+
+ image = np.transpose(image, (1, 2, 0)).astype(
+ np.float32
+ ) / 255 # H W C for torchvision process, normalize to [0,1]
+
+ image_sr = torch.from_numpy(image)[..., :3].permute(
+ 2, 0, 1) * 2 - 1 # normalize to [-1,1]
+ image_to_encoder = self.normalize_for_encoder_input(image)
+
+ image_gt = cv2.resize(image, (self.reso, self.reso),
+ interpolation=cv2.INTER_AREA)
+ image_gt = torch.from_numpy(image_gt)[..., :3].permute(
+ 2, 0, 1) * 2 - 1 # normalize to [-1,1]
+
+
+ return {
+ 'img_to_encoder': image_to_encoder, # 224
+ 'img_sr': image_sr, # 512
+ 'img': image_gt, # [-1,1] range
+ 'c': c,
+ 'depth': depth,
+ 'depth_mask': depth,
+ }
diff --git a/datasets/g_buffer_objaverse.py b/datasets/g_buffer_objaverse.py
new file mode 100644
index 0000000000000000000000000000000000000000..635110c2dc408c4b79dbf39ffee97f4cb21b611f
--- /dev/null
+++ b/datasets/g_buffer_objaverse.py
@@ -0,0 +1,6363 @@
+import os
+from tqdm import tqdm
+import kiui
+from kiui.op import recenter
+import kornia
+import collections
+import math
+import time
+import itertools
+import pickle
+from typing import Any
+import lmdb
+import cv2
+import trimesh
+
+cv2.setNumThreads(0) # disable multiprocess
+# import imageio
+import imageio.v3 as imageio
+import numpy as np
+from PIL import Image
+import Imath
+import OpenEXR
+from pdb import set_trace as st
+from pathlib import Path
+import torchvision
+from torchvision.transforms import v2
+
+from einops import rearrange, repeat
+from functools import partial
+import io
+from scipy.stats import special_ortho_group
+import gzip
+import random
+import torch
+import torch as th
+from torch import nn
+import torch.nn.functional as F
+from torch.utils.data import DataLoader, Dataset
+from torchvision import transforms
+from torch.utils.data.distributed import DistributedSampler
+from pathlib import Path
+import lz4.frame
+from nsr.volumetric_rendering.ray_sampler import RaySampler
+import point_cloud_utils as pcu
+
+import torch.multiprocessing
+
+# torch.multiprocessing.set_sharing_strategy('file_system')
+
+from utils.general_utils import PILtoTorch, matrix_to_quaternion
+
+from guided_diffusion import logger
+import json
+
+import webdataset as wds
+from webdataset.shardlists import expand_source
+# st()
+
+from .shapenet import LMDBDataset, LMDBDataset_MV_Compressed, decompress_and_open_image_gzip, decompress_array
+from kiui.op import safe_normalize
+
+from utils.gs_utils.graphics_utils import getWorld2View2, getProjectionMatrix, getView2World
+
+from nsr.camera_utils import generate_input_camera
+
+
+def random_rotation_matrix():
+ # Generate a random rotation matrix in 3D
+ random_rotation_3d = special_ortho_group.rvs(3)
+
+ # Embed the 3x3 rotation matrix into a 4x4 matrix
+ rotation_matrix_4x4 = np.eye(4)
+ rotation_matrix_4x4[:3, :3] = random_rotation_3d
+
+ return rotation_matrix_4x4
+
+
+def fov2focal(fov, pixels):
+ return pixels / (2 * math.tan(fov / 2))
+
+
+def focal2fov(focal, pixels):
+ return 2 * math.atan(pixels / (2 * focal))
+
+
+def resize_depth_mask(depth_to_resize, resolution):
+ depth_resized = cv2.resize(depth_to_resize, (resolution, resolution),
+ interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+ return depth_resized, depth_resized > 0 # type: ignore
+
+
+def resize_depth_mask_Tensor(depth_to_resize, resolution):
+
+ if depth_to_resize.shape[-1] != resolution:
+ depth_resized = torch.nn.functional.interpolate(
+ input=depth_to_resize.unsqueeze(1),
+ size=(resolution, resolution),
+ # mode='bilinear',
+ mode='nearest',
+ # align_corners=False,
+ ).squeeze(1)
+ else:
+ depth_resized = depth_to_resize
+
+ return depth_resized.float(), depth_resized > 0 # type: ignore
+
+
+class PostProcess:
+
+ def __init__(
+ self,
+ reso,
+ reso_encoder,
+ imgnet_normalize,
+ plucker_embedding,
+ decode_encode_img_only,
+ mv_input,
+ split_chunk_input,
+ duplicate_sample,
+ append_depth,
+ gs_cam_format,
+ orthog_duplicate,
+ frame_0_as_canonical,
+ pcd_path=None,
+ load_pcd=False,
+ split_chunk_size=8,
+ append_xyz=False,
+ ) -> None:
+
+ self.load_pcd = load_pcd
+
+ if pcd_path is None: # hard-coded
+ pcd_path = '/cpfs01/user/lanyushi.p/data/FPS_PCD/pcd-V=6_256_again/fps-pcd/'
+
+ self.pcd_path = Path(pcd_path)
+
+ self.append_xyz = append_xyz
+ if append_xyz:
+ assert append_depth is False
+ self.frame_0_as_canonical = frame_0_as_canonical
+ self.gs_cam_format = gs_cam_format
+ self.append_depth = append_depth
+ self.plucker_embedding = plucker_embedding
+ self.decode_encode_img_only = decode_encode_img_only
+ self.duplicate_sample = duplicate_sample
+ self.orthog_duplicate = orthog_duplicate
+
+ self.zfar = 100.0
+ self.znear = 0.01
+
+ transformations = []
+ if not split_chunk_input:
+ transformations.append(transforms.ToTensor())
+
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+
+ self.reso_encoder = reso_encoder
+ self.reso = reso
+ self.instance_data_length = 40
+ # self.pair_per_instance = 1 # compat
+ self.mv_input = mv_input
+ self.split_chunk_input = split_chunk_input # 8
+ self.chunk_size = split_chunk_size if split_chunk_input else 40
+ # assert self.chunk_size in [8, 10]
+ self.V = self.chunk_size // 2 # 4 views as input
+ # else:
+ # assert self.chunk_size == 20
+ # self.V = 12 # 6 + 6 here
+
+ # st()
+ assert split_chunk_input
+ self.pair_per_instance = 1
+ # else:
+ # self.pair_per_instance = 4 if mv_input else 2 # check whether improves IO
+
+ self.ray_sampler = RaySampler() # load xyz
+
+ def gen_rays(self, c):
+ # Generate rays
+ intrinsics, c2w = c[16:], c[:16].reshape(4, 4)
+ self.h = self.reso_encoder
+ self.w = self.reso_encoder
+ yy, xx = torch.meshgrid(
+ torch.arange(self.h, dtype=torch.float32) + 0.5,
+ torch.arange(self.w, dtype=torch.float32) + 0.5,
+ indexing='ij')
+
+ # normalize to 0-1 pixel range
+ yy = yy / self.h
+ xx = xx / self.w
+
+ # K = np.array([f_x, 0, w / 2, 0, f_y, h / 2, 0, 0, 1]).reshape(3, 3)
+ cx, cy, fx, fy = intrinsics[2], intrinsics[5], intrinsics[
+ 0], intrinsics[4]
+ # cx *= self.w
+ # cy *= self.h
+
+ # f_x = f_y = fx * h / res_raw
+ c2w = torch.from_numpy(c2w).float()
+
+ xx = (xx - cx) / fx
+ yy = (yy - cy) / fy
+ zz = torch.ones_like(xx)
+ dirs = torch.stack((xx, yy, zz), dim=-1) # OpenCV convention
+ dirs /= torch.norm(dirs, dim=-1, keepdim=True)
+ dirs = dirs.reshape(-1, 3, 1)
+ del xx, yy, zz
+ # st()
+ dirs = (c2w[None, :3, :3] @ dirs)[..., 0]
+
+ origins = c2w[None, :3, 3].expand(self.h * self.w, -1).contiguous()
+ origins = origins.view(self.h, self.w, 3)
+ dirs = dirs.view(self.h, self.w, 3)
+
+ return origins, dirs
+
+ def _post_process_batch_sample(self,
+ sample): # sample is an instance batch here
+ caption, ins = sample[-2:]
+ instance_samples = []
+
+ for instance_idx in range(sample[0].shape[0]):
+ instance_samples.append(
+ self._post_process_sample(item[instance_idx]
+ for item in sample[:-2]))
+
+ return (*instance_samples, caption, ins)
+
+ def _post_process_sample(self, data_sample):
+ # raw_img, depth, c, bbox, caption, ins = data_sample
+ # st()
+ raw_img, depth, c, bbox = data_sample
+
+ bbox = (bbox * (self.reso / 256)).astype(
+ np.uint8) # normalize bbox to the reso range
+
+ if raw_img.shape[-2] != self.reso_encoder:
+ img_to_encoder = cv2.resize(raw_img,
+ (self.reso_encoder, self.reso_encoder),
+ interpolation=cv2.INTER_LANCZOS4)
+ else:
+ img_to_encoder = raw_img
+
+ img_to_encoder = self.normalize(img_to_encoder)
+ if self.plucker_embedding:
+ rays_o, rays_d = self.gen_rays(c)
+ rays_plucker = torch.cat(
+ [torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ dim=-1).permute(2, 0, 1) # [h, w, 6] -> 6,h,w
+ img_to_encoder = torch.cat([img_to_encoder, rays_plucker], 0)
+
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ img = torch.from_numpy(img).permute(2, 0, 1) / 127.5 - 1
+
+ if self.decode_encode_img_only:
+ depth_reso, fg_mask_reso = depth, depth
+ else:
+ depth_reso, fg_mask_reso = resize_depth_mask(depth, self.reso)
+
+ # return {
+ # # **sample,
+ # 'img_to_encoder': img_to_encoder,
+ # 'img': img,
+ # 'depth_mask': fg_mask_reso,
+ # # 'img_sr': img_sr,
+ # 'depth': depth_reso,
+ # 'c': c,
+ # 'bbox': bbox,
+ # 'caption': caption,
+ # 'ins': ins
+ # # ! no need to load img_sr for now
+ # }
+ # if len(data_sample) == 4:
+ return (img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox)
+ # else:
+ # return (img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox, data_sample[-2], data_sample[-1])
+
+ def canonicalize_pts(self, c, pcd, for_encoder=True, canonical_idx=0):
+ # pcd: sampled in world space
+
+ assert c.shape[0] == self.chunk_size
+ assert for_encoder
+
+ # st()
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+
+ cam_radius = np.linalg.norm(
+ c[[0, self.V]][:, :16].reshape(2, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 2, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(camera_poses[[0, self.V
+ ]]) # B 4 4
+ transform = np.expand_dims(transform, axis=1) # B 1 4 4
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ repeated_homo_pcd = np.repeat(np.concatenate(
+ [pcd, np.ones_like(pcd[..., 0:1])], -1)[None],
+ 2,
+ axis=0)[..., None] # B N 4 1
+ new_pcd = (transform @ repeated_homo_pcd)[..., :3, 0] # 2 N 3
+
+ return new_pcd
+
+ def canonicalize_pts_v6(self, c, pcd, for_encoder=True, canonical_idx=0):
+ exit() # deprecated function
+ # pcd: sampled in world space
+
+ assert c.shape[0] == self.chunk_size
+ assert for_encoder
+ encoder_canonical_idx = [0, 6, 12, 18]
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+
+ cam_radius = np.linalg.norm(
+ c[encoder_canonical_idx][:, :16].reshape(4, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 4, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(
+ camera_poses[encoder_canonical_idx]) # B 4 4
+ transform = np.expand_dims(transform, axis=1) # B 1 4 4
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ repeated_homo_pcd = np.repeat(np.concatenate(
+ [pcd, np.ones_like(pcd[..., 0:1])], -1)[None],
+ 4,
+ axis=0)[..., None] # B N 4 1
+ new_pcd = (transform @ repeated_homo_pcd)[..., :3, 0] # 2 N 3
+
+ return new_pcd
+
+ def normalize_camera(self, c, for_encoder=True, canonical_idx=0):
+ assert c.shape[0] == self.chunk_size # 8 o r10
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+
+ if for_encoder:
+ encoder_canonical_idx = [0, self.V]
+ # st()
+ cam_radius = np.linalg.norm(
+ c[encoder_canonical_idx][:, :16].reshape(2, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 2, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(
+ camera_poses[encoder_canonical_idx])
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ new_camera_poses = np.repeat(
+ transform, self.V, axis=0
+ ) @ camera_poses # [V, 4, 4]. np.repeat() is th.repeat_interleave()
+
+ else:
+ cam_radius = np.linalg.norm(
+ c[canonical_idx][:16].reshape(4, 4)[:3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.eye(4)
+ frame1_fixed_pos[2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(
+ camera_poses[canonical_idx]) # 4,4
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ new_camera_poses = np.repeat(transform[None],
+ self.chunk_size,
+ axis=0) @ camera_poses # [V, 4, 4]
+
+ c = np.concatenate([new_camera_poses.reshape(B, 16), c[:, 16:]],
+ axis=-1)
+
+ return c
+
+ def normalize_camera_v6(self, c, for_encoder=True, canonical_idx=0):
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+
+ if for_encoder:
+ assert c.shape[0] == 24
+ encoder_canonical_idx = [0, 6, 12, 18]
+ cam_radius = np.linalg.norm(
+ c[encoder_canonical_idx][:, :16].reshape(4, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 4, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(
+ camera_poses[encoder_canonical_idx])
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ new_camera_poses = np.repeat(transform, 6,
+ axis=0) @ camera_poses # [V, 4, 4]
+
+ else:
+ assert c.shape[0] == 12
+ cam_radius = np.linalg.norm(
+ c[canonical_idx][:16].reshape(4, 4)[:3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.eye(4)
+ frame1_fixed_pos[2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(
+ camera_poses[canonical_idx]) # 4,4
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ new_camera_poses = np.repeat(transform[None], 12,
+ axis=0) @ camera_poses # [V, 4, 4]
+
+ c = np.concatenate([new_camera_poses.reshape(B, 16), c[:, 16:]],
+ axis=-1)
+
+ return c
+
+ def get_plucker_ray(self, c):
+ rays_plucker = []
+ for idx in range(c.shape[0]):
+ rays_o, rays_d = self.gen_rays(c[idx])
+ rays_plucker.append(
+ torch.cat([torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ dim=-1).permute(2, 0, 1)) # [h, w, 6] -> 6,h,w
+ rays_plucker = torch.stack(rays_plucker, 0)
+ return rays_plucker
+
+ def _unproj_depth_given_c(self, c, depth):
+ # get xyz hxw for each pixel, like MCC
+ # img_size = self.reso
+ img_size = depth.shape[-1]
+
+ B = c.shape[0]
+
+ cam2world_matrix = c[:, :16].reshape(B, 4, 4)
+ intrinsics = c[:, 16:25].reshape(B, 3, 3)
+
+ ray_origins, ray_directions = self.ray_sampler( # shape:
+ cam2world_matrix, intrinsics, img_size)[:2]
+
+ depth = depth.reshape(B, -1).unsqueeze(-1)
+
+ xyz = ray_origins + depth * ray_directions # BV HW 3, already in the world space
+ xyz = xyz.reshape(B, img_size, img_size, 3).permute(0, 3, 1,
+ 2) # B 3 H W
+ xyz = xyz.clip(
+ -0.45, 0.45) # g-buffer saves depth with anti-alias = True .....
+ xyz = torch.where(xyz.abs() == 0.45, 0, xyz) # no boundary here? Yes.
+
+ return xyz
+
+ def _post_process_sample_batch(self, data_sample):
+ # raw_img, depth, c, bbox, caption, ins = data_sample
+
+ alpha = None
+ if len(data_sample) == 4:
+ raw_img, depth, c, bbox = data_sample
+ else:
+ raw_img, depth, c, alpha, bbox = data_sample # put c to position 2
+
+ if isinstance(depth, tuple):
+ self.append_normal = True
+ depth, normal = depth
+ else:
+ self.append_normal = False
+ normal = None
+
+ # if raw_img.shape[-1] == 4:
+ # depth_reso, _ = resize_depth_mask_Tensor(
+ # torch.from_numpy(depth), self.reso)
+ # raw_img, fg_mask_reso = raw_img[..., :3], raw_img[..., -1]
+ # # st() # ! check has 1 dim in alpha?
+ # else:
+ if not isinstance(depth, torch.Tensor):
+ depth = torch.from_numpy(depth).float()
+ else:
+ depth = depth.float()
+
+ depth_reso, fg_mask_reso = resize_depth_mask_Tensor(depth, self.reso)
+
+ if alpha is None:
+ alpha = fg_mask_reso
+ else:
+ # ! resize first
+ # st()
+ alpha = torch.from_numpy(alpha / 255.0).float()
+ if alpha.shape[-1] != self.reso: # bilinear inteprolate reshape
+ alpha = torch.nn.functional.interpolate(
+ input=alpha.unsqueeze(1),
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ ).squeeze(1)
+
+ if self.reso < 256:
+ bbox = (bbox * (self.reso / 256)).astype(
+ np.uint8) # normalize bbox to the reso range
+ else: # 3dgs
+ bbox = bbox.astype(np.uint8)
+
+ # st() # ! shall compat with 320 input
+
+ # assert raw_img.shape[-2] == self.reso_encoder
+
+ # img_to_encoder = cv2.resize(
+ # raw_img, (self.reso_encoder, self.reso_encoder),
+ # interpolation=cv2.INTER_LANCZOS4)
+ # else:
+ # img_to_encoder = raw_img
+
+ raw_img = torch.from_numpy(raw_img).permute(0, 3, 1,
+ 2) / 255.0 # [0,1]
+
+ if normal is not None:
+ normal = torch.from_numpy(normal).permute(0,3,1,2)
+
+ # if raw_img.shape[-1] != self.reso:
+
+
+ if raw_img.shape[1] != self.reso_encoder:
+ img_to_encoder = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso_encoder, self.reso_encoder),
+ mode='bilinear',
+ align_corners=False,)
+ img_to_encoder = self.normalize(img_to_encoder)
+
+ if normal is not None:
+ normal_for_encoder = torch.nn.functional.interpolate(
+ input=normal,
+ size=(self.reso_encoder, self.reso_encoder),
+ # mode='bilinear',
+ mode='nearest',
+ # align_corners=False,
+ )
+
+ else:
+ img_to_encoder = self.normalize(raw_img)
+ normal_for_encoder = normal
+
+ if raw_img.shape[-1] != self.reso:
+ img = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ ) # [-1,1] range
+ img = img * 2 - 1 # as gt
+
+ if normal is not None:
+ normal = torch.nn.functional.interpolate(
+ input=normal,
+ size=(self.reso, self.reso),
+ # mode='bilinear',
+ mode='nearest',
+ # align_corners=False,
+ )
+
+ else:
+ img = raw_img * 2 - 1
+
+
+ # fg_mask_reso = depth[..., -1:] # ! use
+
+ pad_v6_fn = lambda x: torch.concat([x, x[:4]], 0) if isinstance(
+ x, torch.Tensor) else np.concatenate([x, x[:4]], 0)
+
+ # ! processing encoder input image.
+
+ # ! normalize camera feats
+ if self.frame_0_as_canonical: # 4 views as input per batch
+
+ # if self.chunk_size in [8, 10]:
+ if True:
+ # encoder_canonical_idx = [0, 4]
+ # encoder_canonical_idx = [0, self.chunk_size//2]
+ encoder_canonical_idx = [0, self.V]
+
+ c_for_encoder = self.normalize_camera(c, for_encoder=True)
+ c_for_render = self.normalize_camera(
+ c,
+ for_encoder=False,
+ canonical_idx=encoder_canonical_idx[0]
+ ) # allocated to nv_c, frame0 (in 8 views) as the canonical
+ c_for_render_nv = self.normalize_camera(
+ c,
+ for_encoder=False,
+ canonical_idx=encoder_canonical_idx[1]
+ ) # allocated to nv_c, frame0 (in 8 views) as the canonical
+ c_for_render = np.concatenate([c_for_render, c_for_render_nv],
+ axis=-1) # for compat
+ # st()
+
+ else:
+ assert self.chunk_size == 20
+ c_for_encoder = self.normalize_camera_v6(c,
+ for_encoder=True) #
+
+ paired_c_0 = np.concatenate([c[0:6], c[12:18]])
+ paired_c_1 = np.concatenate([c[6:12], c[18:24]])
+
+ def process_paired_camera(paired_c):
+ c_for_render = self.normalize_camera_v6(
+ paired_c, for_encoder=False, canonical_idx=0
+ ) # allocated to nv_c, frame0 (in 8 views) as the canonical
+ c_for_render_nv = self.normalize_camera_v6(
+ paired_c, for_encoder=False, canonical_idx=6
+ ) # allocated to nv_c, frame0 (in 8 views) as the canonical
+
+ c_for_render = np.concatenate(
+ [c_for_render, c_for_render_nv], axis=-1) # for compat
+
+ return c_for_render
+
+ paired_c_for_render_0 = process_paired_camera(paired_c_0)
+ paired_c_for_render_1 = process_paired_camera(paired_c_1)
+
+ c_for_render = np.empty(shape=(24, 50))
+ c_for_render[list(range(6)) +
+ list(range(12, 18))] = paired_c_for_render_0
+ c_for_render[list(range(6, 12)) +
+ list(range(18, 24))] = paired_c_for_render_1
+
+ else: # use g-buffer canonical c
+ c_for_encoder, c_for_render = c, c
+
+ if self.append_normal and normal is not None:
+ img_to_encoder = torch.cat([img_to_encoder, normal_for_encoder],
+ # img_to_encoder = torch.cat([img_to_encoder, normal],
+ 1) # concat in C dim
+
+ if self.plucker_embedding:
+ # rays_plucker = self.get_plucker_ray(c)
+ rays_plucker = self.get_plucker_ray(c_for_encoder)
+ img_to_encoder = torch.cat([img_to_encoder, rays_plucker],
+ 1) # concat in C dim
+
+ # torchvision.utils.save_image(raw_img, 'tmp/inp.png', normalize=True, value_range=(0,1), nrow=1, padding=0)
+ # torchvision.utils.save_image(rays_plucker[:,:3], 'tmp/plucker.png', normalize=True, value_range=(-1,1), nrow=1, padding=0)
+ # torchvision.utils.save_image(depth_reso.unsqueeze(1), 'tmp/depth.png', normalize=True, nrow=1, padding=0)
+
+ c = torch.from_numpy(c_for_render).to(torch.float32)
+
+ if self.append_depth:
+ normalized_depth = torch.from_numpy(depth_reso).clone().unsqueeze(
+ 1) # min=0
+ # normalized_depth -= torch.min(normalized_depth) # always 0 here
+ # normalized_depth /= torch.max(normalized_depth)
+ # normalized_depth = normalized_depth.unsqueeze(1) * 2 - 1 # normalize to [-1,1]
+ # st()
+ img_to_encoder = torch.cat([img_to_encoder, normalized_depth],
+ 1) # concat in C dim
+ elif self.append_xyz:
+ depth_for_unproj = depth.clone()
+ depth_for_unproj[depth_for_unproj ==
+ 0] = 1e10 # so that rays_o will not appear in the final pcd.
+ xyz = self._unproj_depth_given_c(c.float(), depth)
+ # pcu.save_mesh_v(f'unproj_xyz_before_Nearest.ply', xyz[0:9].float().detach().permute(0,2,3,1).reshape(-1,3).cpu().numpy(),)
+
+ if xyz.shape[-1] != self.reso_encoder:
+ xyz = torch.nn.functional.interpolate(
+ input=xyz, # [-1,1]
+ # size=(self.reso, self.reso),
+ size=(self.reso_encoder, self.reso_encoder),
+ mode='nearest',
+ )
+
+ # pcu.save_mesh_v(f'unproj_xyz_afterNearest.ply', xyz[0:9].float().detach().permute(0,2,3,1).reshape(-1,3).cpu().numpy(),)
+ # st()
+ img_to_encoder = torch.cat([img_to_encoder, xyz], 1)
+
+ return (img_to_encoder, img, alpha, depth_reso, c,
+ torch.from_numpy(bbox))
+
+ def rand_sample_idx(self):
+ return random.randint(0, self.instance_data_length - 1)
+
+ def rand_pair(self):
+ return (self.rand_sample_idx() for _ in range(2))
+
+ def paired_post_process(self, sample):
+ # repeat n times?
+ all_inp_list = []
+ all_nv_list = []
+ caption, ins = sample[-2:]
+ # expanded_return = []
+ for _ in range(self.pair_per_instance):
+ cano_idx, nv_idx = self.rand_pair()
+ cano_sample = self._post_process_sample(item[cano_idx]
+ for item in sample[:-2])
+ nv_sample = self._post_process_sample(item[nv_idx]
+ for item in sample[:-2])
+ all_inp_list.extend(cano_sample)
+ all_nv_list.extend(nv_sample)
+ return (*all_inp_list, *all_nv_list, caption, ins)
+ # return [cano_sample, nv_sample, caption, ins]
+ # return (*cano_sample, *nv_sample, caption, ins)
+
+ def get_source_cw2wT(self, source_cameras_view_to_world):
+ return matrix_to_quaternion(
+ source_cameras_view_to_world[:3, :3].transpose(0, 1))
+
+ def c_to_3dgs_format(self, pose):
+ # TODO, switch to torch version (batched later)
+
+ c2w = pose[:16].reshape(4, 4) # 3x4
+
+ # ! load cam
+ w2c = np.linalg.inv(c2w)
+ R = np.transpose(
+ w2c[:3, :3]) # R is stored transposed due to 'glm' in CUDA code
+ T = w2c[:3, 3]
+ fx = pose[16]
+ FovX = focal2fov(fx, 1)
+ FovY = focal2fov(fx, 1)
+
+ tanfovx = math.tan(FovX * 0.5)
+ tanfovy = math.tan(FovY * 0.5)
+
+ assert tanfovx == tanfovy
+
+ trans = np.array([0.0, 0.0, 0.0])
+ scale = 1.0
+
+ view_world_transform = torch.tensor(getView2World(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+
+ world_view_transform = torch.tensor(getWorld2View2(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+ projection_matrix = getProjectionMatrix(znear=self.znear,
+ zfar=self.zfar,
+ fovX=FovX,
+ fovY=FovY).transpose(0, 1)
+ full_proj_transform = (world_view_transform.unsqueeze(0).bmm(
+ projection_matrix.unsqueeze(0))).squeeze(0)
+ camera_center = world_view_transform.inverse()[3, :3]
+
+ # ! check pytorch3d camera system alignment.
+
+ # item.update(viewpoint_cam=[viewpoint_cam])
+ c = {}
+ #
+ c["source_cv2wT_quat"] = self.get_source_cw2wT(view_world_transform)
+ c.update(
+ # projection_matrix=projection_matrix, # K
+ R=torch.from_numpy(R),
+ T=torch.from_numpy(T),
+ cam_view=world_view_transform, # world_view_transform
+ cam_view_proj=full_proj_transform, # full_proj_transform
+ cam_pos=camera_center,
+ tanfov=tanfovx, # TODO, fix in the renderer
+ orig_pose=torch.from_numpy(pose),
+ orig_c2w=torch.from_numpy(c2w),
+ orig_w2c=torch.from_numpy(w2c),
+ orig_intrin=torch.from_numpy(pose[16:]).reshape(3,3),
+ # tanfovy=tanfovy,
+ )
+
+ return c # dict for gs rendering
+
+ def paired_post_process_chunk(self, sample):
+ # st()
+
+ # sample_npz, ins, caption = sample_pyd # three items
+ # sample = *(sample[0][k] for k in ['raw_img', 'depth', 'c', 'bbox']), sample[-1], sample[-2]
+
+ # repeat n times?
+ all_inp_list = []
+ all_nv_list = []
+ auxiliary_sample = list(sample[-2:])
+ # caption, ins = sample[-2:]
+ ins = sample[-1]
+
+ assert sample[0].shape[0] == self.chunk_size # random chunks
+ # expanded_return = []
+
+ if self.load_pcd:
+ # fps_pcd = pcu.load_mesh_v(
+ # # str(self.pcd_path / ins / 'fps-24576.ply')) # N, 3
+ # str(self.pcd_path / ins / 'fps-4096.ply')) # N, 3
+ # # 'fps-4096.ply')) # N, 3
+ fps_pcd = trimesh.load(str(self.pcd_path / ins / 'fps-4096.ply')).vertices
+
+ auxiliary_sample += [fps_pcd]
+
+ assert self.duplicate_sample
+ # st()
+ if self.duplicate_sample:
+ # ! shuffle before process, since frame_0_as_canonical fixed c.
+
+ if self.chunk_size in [20, 18, 16, 12]:
+ shuffle_sample = sample[:-2] # no order shuffle required
+ else:
+ shuffle_sample = []
+ # indices = torch.randperm(self.chunk_size)
+ indices = np.random.permutation(self.chunk_size)
+ for _, item in enumerate(sample[:-2]):
+ shuffle_sample.append(item[indices]) # random shuffle
+
+ processed_sample = self._post_process_sample_batch(shuffle_sample)
+
+ # ! process pcd if frmae_0 alignment
+
+ if self.load_pcd:
+ if self.frame_0_as_canonical:
+ # ! normalize camera feats
+
+ # normalized camera feats as in paper (transform the first pose to a fixed position)
+ # if self.chunk_size == 20:
+ # auxiliary_sample[-1] = self.canonicalize_pts_v6(
+ # c=shuffle_sample[2],
+ # pcd=auxiliary_sample[-1],
+ # for_encoder=True) # B N 3
+ # else:
+ auxiliary_sample[-1] = self.canonicalize_pts(
+ c=shuffle_sample[2],
+ pcd=auxiliary_sample[-1],
+ for_encoder=True) # B N 3
+ else:
+ auxiliary_sample[-1] = np.repeat(
+ auxiliary_sample[-1][None], 2,
+ axis=0) # share the same camera syste, just repeat
+
+ assert not self.orthog_duplicate
+
+ # if self.chunk_size == 8:
+ all_inp_list.extend(item[:self.V] for item in processed_sample)
+ all_nv_list.extend(item[self.V:] for item in processed_sample)
+
+ # elif self.chunk_size == 20: # V=6
+ # # indices_v6 = [np.random.permutation(self.chunk_size)[:12] for _ in range(2)] # random sample 6 views from chunks
+ # all_inp_list.extend(item[:12] for item in processed_sample)
+ # # indices_v6 = np.concatenate([np.arange(12, 20), np.arange(0,4)])
+ # all_nv_list.extend(
+ # item[12:] for item in
+ # processed_sample) # already repeated inside batch fn
+ # else:
+ # raise NotImplementedError(self.chunk_size)
+
+ # else:
+ # all_inp_list.extend(item[:8] for item in processed_sample)
+ # all_nv_list.extend(item[8:] for item in processed_sample)
+
+ # st()
+
+ return (*all_inp_list, *all_nv_list, *auxiliary_sample)
+
+ else:
+ processed_sample = self._post_process_sample_batch( # avoid shuffle shorten processing time
+ item[:4] for item in sample[:-2])
+
+ all_inp_list.extend(item for item in processed_sample)
+ all_nv_list.extend(item
+ for item in processed_sample) # ! placeholder
+
+ # return (*all_inp_list, *all_nv_list, caption, ins)
+ return (*all_inp_list, *all_nv_list, *auxiliary_sample)
+
+ # randomly shuffle 8 views, avoid overfitting
+
+ def single_sample_create_dict_noBatch(self, sample, prefix=''):
+ # if len(sample) == 1:
+ # sample = sample[0]
+ # assert len(sample) == 6
+ img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox = sample
+
+ if self.gs_cam_format:
+ # TODO, can optimize later after model converges
+ B, V, _ = c.shape # B 4 25
+ c = rearrange(c, 'B V C -> (B V) C').cpu().numpy()
+ # c = c.cpu().numpy()
+ all_gs_c = [self.c_to_3dgs_format(pose) for pose in c]
+ # st()
+ # all_gs_c = self.c_to_3dgs_format(c.cpu().numpy())
+ c = {
+ k:
+ rearrange(torch.stack([gs_c[k] for gs_c in all_gs_c]),
+ '(B V) ... -> B V ...',
+ B=B,
+ V=V)
+ # torch.stack([gs_c[k] for gs_c in all_gs_c])
+ if isinstance(all_gs_c[0][k], torch.Tensor) else all_gs_c[0][k]
+ for k in all_gs_c[0].keys()
+ }
+ # c = collate_gs_c
+
+ return {
+ # **sample,
+ f'{prefix}img_to_encoder': img_to_encoder,
+ f'{prefix}img': img,
+ f'{prefix}depth_mask': fg_mask_reso,
+ f'{prefix}depth': depth_reso,
+ f'{prefix}c': c,
+ f'{prefix}bbox': bbox,
+ }
+
+ def single_sample_create_dict(self, sample, prefix=''):
+ # if len(sample) == 1:
+ # sample = sample[0]
+ # assert len(sample) == 6
+ img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox = sample
+
+ if self.gs_cam_format:
+ # TODO, can optimize later after model converges
+ B, V, _ = c.shape # B 4 25
+ c = rearrange(c, 'B V C -> (B V) C').cpu().numpy()
+ all_gs_c = [self.c_to_3dgs_format(pose) for pose in c]
+ c = {
+ k:
+ rearrange(torch.stack([gs_c[k] for gs_c in all_gs_c]),
+ '(B V) ... -> B V ...',
+ B=B,
+ V=V)
+ if isinstance(all_gs_c[0][k], torch.Tensor) else all_gs_c[0][k]
+ for k in all_gs_c[0].keys()
+ }
+ # c = collate_gs_c
+
+ return {
+ # **sample,
+ f'{prefix}img_to_encoder': img_to_encoder,
+ f'{prefix}img': img,
+ f'{prefix}depth_mask': fg_mask_reso,
+ f'{prefix}depth': depth_reso,
+ f'{prefix}c': c,
+ f'{prefix}bbox': bbox,
+ }
+
+ def single_instance_sample_create_dict(self, sample, prfix=''):
+ assert len(sample) == 42
+
+ inp_sample_list = [[] for _ in range(6)]
+
+ for item in sample[:40]:
+ for item_idx in range(6):
+ inp_sample_list[item_idx].append(item[0][item_idx])
+
+ inp_sample = self.single_sample_create_dict(
+ (torch.stack(item_list) for item_list in inp_sample_list),
+ prefix='')
+
+ return {
+ **inp_sample, #
+ 'caption': sample[-2],
+ 'ins': sample[-1]
+ }
+
+ def decode_gzip(self, sample_pyd, shape=(256, 256)):
+ # sample_npz, ins, caption = sample_pyd # three items
+ # c, bbox, depth, ins, caption, raw_img = sample_pyd[:5], sample_pyd[5:]
+
+ # wds.to_tuple('raw_img.jpeg', 'depth.jpeg',
+ # 'd_near.npy',
+ # 'd_far.npy',
+ # "c.npy", 'bbox.npy', 'ins.txt', 'caption.txt'),
+
+ # raw_img, depth, alpha_mask, d_near, d_far, c, bbox, ins, caption = sample_pyd
+ raw_img, depth_alpha, = sample_pyd
+ # return raw_img, depth_alpha
+ # raw_img, caption = sample_pyd
+ # return raw_img, caption
+ # st()
+ raw_img = rearrange(raw_img, 'h (b w) c -> b h w c', b=self.chunk_size)
+
+ depth = rearrange(depth, 'h (b w) c -> b h w c', b=self.chunk_size)
+
+ alpha_mask = rearrange(
+ alpha_mask, 'h (b w) c -> b h w c', b=self.chunk_size) / 255.0
+
+ d_far = d_far.reshape(self.chunk_size, 1, 1, 1)
+ d_near = d_near.reshape(self.chunk_size, 1, 1, 1)
+ # d = 1 / ( (d_normalized / 255) * (far-near) + near)
+ depth = 1 / ((depth / 255) * (d_far - d_near) + d_near)
+ depth = depth[..., 0] # decoded from jpeg
+
+ # depth = decompress_array(depth['depth'], (self.chunk_size, *shape),
+ # np.float32,
+ # decompress=True,
+ # decompress_fn=lz4.frame.decompress)
+
+ # return raw_img, depth, d_near, d_far, c, bbox, caption, ins
+
+ raw_img = np.concatenate([raw_img, alpha_mask[..., 0:1]], -1)
+
+ return raw_img, depth, c, bbox, caption, ins
+
+ def decode_zip(
+ self,
+ sample_pyd,
+ ):
+ shape = (self.reso_encoder, self.reso_encoder)
+ if isinstance(sample_pyd, tuple):
+ sample_pyd = sample_pyd[0]
+ assert isinstance(sample_pyd, dict)
+
+ raw_img = decompress_and_open_image_gzip(
+ sample_pyd['raw_img'],
+ is_img=True,
+ decompress=True,
+ decompress_fn=lz4.frame.decompress)
+
+ caption = sample_pyd['caption'].decode('utf-8')
+ ins = sample_pyd['ins'].decode('utf-8')
+
+ c = decompress_array(sample_pyd['c'], (
+ self.chunk_size,
+ 25,
+ ),
+ np.float32,
+ decompress=True,
+ decompress_fn=lz4.frame.decompress)
+
+ bbox = decompress_array(
+ sample_pyd['bbox'],
+ (
+ self.chunk_size,
+ 4,
+ ),
+ np.float32,
+ # decompress=False)
+ decompress=True,
+ decompress_fn=lz4.frame.decompress)
+
+ if self.decode_encode_img_only:
+ depth = np.zeros(shape=(self.chunk_size,
+ *shape)) # save loading time
+ else:
+ depth = decompress_array(sample_pyd['depth'],
+ (self.chunk_size, *shape),
+ np.float32,
+ decompress=True,
+ decompress_fn=lz4.frame.decompress)
+
+ # return {'raw_img': raw_img, 'depth': depth, 'bbox': bbox, 'caption': caption, 'ins': ins, 'c': c}
+ # return raw_img, depth, c, bbox, caption, ins
+ # return raw_img, bbox, caption, ins
+ # return bbox, caption, ins
+ return raw_img, depth, c, bbox, caption, ins
+ # ! run single-instance pipeline first
+ # return raw_img[0], depth[0], c[0], bbox[0], caption, ins
+
+ def create_dict_nobatch(self, sample):
+ # sample = [item[0] for item in sample] # wds wrap items in []
+
+ sample_length = 6
+ # if self.load_pcd:
+ # sample_length += 1
+
+ cano_sample_list = [[] for _ in range(sample_length)]
+ nv_sample_list = [[] for _ in range(sample_length)]
+ # st()
+ # bs = (len(sample)-2) // 6
+ for idx in range(0, self.pair_per_instance):
+
+ cano_sample = sample[sample_length * idx:sample_length * (idx + 1)]
+ nv_sample = sample[sample_length * self.pair_per_instance +
+ sample_length * idx:sample_length *
+ self.pair_per_instance + sample_length *
+ (idx + 1)]
+
+ for item_idx in range(sample_length):
+ if self.frame_0_as_canonical:
+ # ! cycle input/output view for more pairs
+ if item_idx == 4:
+ cano_sample_list[item_idx].append(
+ cano_sample[item_idx][..., :25])
+ nv_sample_list[item_idx].append(
+ nv_sample[item_idx][..., :25])
+
+ cano_sample_list[item_idx].append(
+ nv_sample[item_idx][..., 25:])
+ nv_sample_list[item_idx].append(
+ cano_sample[item_idx][..., 25:])
+
+ else:
+ cano_sample_list[item_idx].append(
+ cano_sample[item_idx])
+ nv_sample_list[item_idx].append(nv_sample[item_idx])
+
+ cano_sample_list[item_idx].append(nv_sample[item_idx])
+ nv_sample_list[item_idx].append(cano_sample[item_idx])
+
+ else:
+ cano_sample_list[item_idx].append(cano_sample[item_idx])
+ nv_sample_list[item_idx].append(nv_sample[item_idx])
+
+ cano_sample_list[item_idx].append(nv_sample[item_idx])
+ nv_sample_list[item_idx].append(cano_sample[item_idx])
+
+ cano_sample = self.single_sample_create_dict_noBatch(
+ (torch.stack(item_list, 0) for item_list in cano_sample_list),
+ prefix=''
+ ) # torch.Size([5, 10, 256, 256]). Since no batch dim here for now.
+
+ nv_sample = self.single_sample_create_dict_noBatch(
+ (torch.stack(item_list, 0) for item_list in nv_sample_list),
+ prefix='nv_')
+
+ ret_dict = {
+ **cano_sample,
+ **nv_sample,
+ }
+
+ if not self.load_pcd:
+ ret_dict.update({'caption': sample[-2], 'ins': sample[-1]})
+
+ else:
+ # if self.frame_0_as_canonical:
+ # # fps_pcd = rearrange( sample[-1], 'B V ... -> (B V) ...') # ! wrong order.
+ # # if self.chunk_size == 8:
+ # fps_pcd = rearrange(
+ # sample[-1], 'B V ... -> (V B) ...') # mimic torch.repeat
+ # # else:
+ # # fps_pcd = rearrange( sample[-1], 'B V ... -> (B V) ...') # ugly code to match the input format...
+ # else:
+ # fps_pcd = sample[-1].repeat(
+ # 2, 1,
+ # 1) # mimic torch.cat(), from torch.Size([3, 4096, 3])
+
+ # ! TODO, check fps_pcd order
+
+ ret_dict.update({
+ 'caption': sample[-3],
+ 'ins': sample[-2],
+ 'fps_pcd': sample[-1]
+ })
+
+ return ret_dict
+
+ def create_dict(self, sample):
+ # sample = [item[0] for item in sample] # wds wrap items in []
+ # st()
+
+ sample_length = 6
+ # if self.load_pcd:
+ # sample_length += 1
+
+ cano_sample_list = [[] for _ in range(sample_length)]
+ nv_sample_list = [[] for _ in range(sample_length)]
+ # st()
+ # bs = (len(sample)-2) // 6
+ for idx in range(0, self.pair_per_instance):
+
+ cano_sample = sample[sample_length * idx:sample_length * (idx + 1)]
+ nv_sample = sample[sample_length * self.pair_per_instance +
+ sample_length * idx:sample_length *
+ self.pair_per_instance + sample_length *
+ (idx + 1)]
+
+ for item_idx in range(sample_length):
+ if self.frame_0_as_canonical:
+ # ! cycle input/output view for more pairs
+ if item_idx == 4:
+ cano_sample_list[item_idx].append(
+ cano_sample[item_idx][..., :25])
+ nv_sample_list[item_idx].append(
+ nv_sample[item_idx][..., :25])
+
+ cano_sample_list[item_idx].append(
+ nv_sample[item_idx][..., 25:])
+ nv_sample_list[item_idx].append(
+ cano_sample[item_idx][..., 25:])
+
+ else:
+ cano_sample_list[item_idx].append(
+ cano_sample[item_idx])
+ nv_sample_list[item_idx].append(nv_sample[item_idx])
+
+ cano_sample_list[item_idx].append(nv_sample[item_idx])
+ nv_sample_list[item_idx].append(cano_sample[item_idx])
+
+ else:
+ cano_sample_list[item_idx].append(cano_sample[item_idx])
+ nv_sample_list[item_idx].append(nv_sample[item_idx])
+
+ cano_sample_list[item_idx].append(nv_sample[item_idx])
+ nv_sample_list[item_idx].append(cano_sample[item_idx])
+
+ # if self.split_chunk_input:
+ # cano_sample = self.single_sample_create_dict(
+ # (torch.cat(item_list, 0) for item_list in cano_sample_list),
+ # prefix='')
+ # nv_sample = self.single_sample_create_dict(
+ # (torch.cat(item_list, 0) for item_list in nv_sample_list),
+ # prefix='nv_')
+
+ # else:
+
+ # st()
+ cano_sample = self.single_sample_create_dict(
+ (torch.cat(item_list, 0) for item_list in cano_sample_list),
+ prefix='') # torch.Size([4, 4, 10, 256, 256])
+
+ nv_sample = self.single_sample_create_dict(
+ (torch.cat(item_list, 0) for item_list in nv_sample_list),
+ prefix='nv_')
+
+ ret_dict = {
+ **cano_sample,
+ **nv_sample,
+ }
+
+ if not self.load_pcd:
+ ret_dict.update({'caption': sample[-2], 'ins': sample[-1]})
+
+ else:
+ if self.frame_0_as_canonical:
+ # fps_pcd = rearrange( sample[-1], 'B V ... -> (B V) ...') # ! wrong order.
+ # if self.chunk_size == 8:
+ fps_pcd = rearrange(
+ sample[-1], 'B V ... -> (V B) ...') # mimic torch.repeat
+ # else:
+ # fps_pcd = rearrange( sample[-1], 'B V ... -> (B V) ...') # ugly code to match the input format...
+ else:
+ fps_pcd = sample[-1].repeat(
+ 2, 1,
+ 1) # mimic torch.cat(), from torch.Size([3, 4096, 3])
+
+ ret_dict.update({
+ 'caption': sample[-3],
+ 'ins': sample[-2],
+ 'fps_pcd': fps_pcd
+ })
+
+ return ret_dict
+
+ def prepare_mv_input(self, sample):
+
+ # sample = [item[0] for item in sample] # wds wrap items in []
+ bs = len(sample['caption']) # number of instances
+ chunk_size = sample['img'].shape[0] // bs
+
+ assert self.split_chunk_input
+
+ for k, v in sample.items():
+ if isinstance(v, torch.Tensor) and k != 'fps_pcd':
+ sample[k] = rearrange(v, "b f c ... -> (b f) c ...",
+ f=self.V).contiguous()
+
+ # # ! shift nv
+ # else:
+ # for k, v in sample.items():
+ # if k not in ['ins', 'caption']:
+
+ # rolled_idx = torch.LongTensor(
+ # list(
+ # itertools.chain.from_iterable(
+ # list(range(i, sample['img'].shape[0], bs))
+ # for i in range(bs))))
+
+ # v = torch.index_select(v, dim=0, index=rolled_idx)
+ # sample[k] = v
+
+ # # img = sample['img']
+ # # gt = sample['nv_img']
+ # # torchvision.utils.save_image(img[0], 'inp.jpg', normalize=True)
+ # # torchvision.utils.save_image(gt[0], 'nv.jpg', normalize=True)
+
+ # for k, v in sample.items():
+ # if 'nv' in k:
+ # rolled_idx = torch.LongTensor(
+ # list(
+ # itertools.chain.from_iterable(
+ # list(
+ # np.roll(
+ # np.arange(i * chunk_size, (i + 1) *
+ # chunk_size), 4)
+ # for i in range(bs)))))
+
+ # v = torch.index_select(v, dim=0, index=rolled_idx)
+ # sample[k] = v
+
+ # torchvision.utils.save_image(sample['nv_img'], 'nv.png', normalize=True)
+ # torchvision.utils.save_image(sample['img'], 'inp.png', normalize=True)
+
+ return sample
+
+
+def load_dataset(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_lmdb=False,
+ use_wds=False,
+ use_chunk=False,
+ use_lmdb_compressed=False,
+ infi_sampler=True):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # st()
+ if use_wds:
+ return load_wds_data(file_path, reso, reso_encoder, batch_size,
+ num_workers)
+
+ if use_lmdb:
+ logger.log('using LMDB dataset')
+ # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ if use_lmdb_compressed:
+ if 'nv' in trainer_name:
+ dataset_cls = Objv_LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = Objv_LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = Objv_LMDBDataset_NV_NoCompressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = Objv_LMDBDataset_MV_NoCompressed # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ # dataset = dataset_cls(file_path)
+ elif use_chunk:
+ dataset_cls = ChunkObjaverseDataset
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = NovelViewObjverseDataset
+ else:
+ dataset_cls = MultiViewObjverseDataset # 1.5-2iter/s
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size)
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+
+ if use_chunk:
+
+ def chunk_collate_fn(sample):
+ # st()
+ default_collate_sample = torch.utils.data.default_collate(
+ sample[0])
+ st()
+ return default_collate_sample
+
+ collate_fn = chunk_collate_fn
+ else:
+ collate_fn = None
+
+ loader = DataLoader(dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ pin_memory=True,
+ persistent_workers=num_workers > 0,
+ shuffle=use_chunk,
+ collate_fn=collate_fn)
+ return loader
+
+
+def chunk_collate_fn(sample):
+ sample = torch.utils.data.default_collate(sample)
+ # ! change from stack to cat
+ # sample = self.post_process.prepare_mv_input(sample)
+
+ bs = len(sample['caption']) # number of instances
+ # chunk_size = sample['img'].shape[0] // bs
+
+ def merge_internal_batch(sample, merge_b_only=False):
+ for k, v in sample.items():
+ if isinstance(v, torch.Tensor):
+ if v.ndim > 1:
+ if k == 'fps_pcd' or merge_b_only:
+ sample[k] = rearrange(
+ v,
+ "b1 b2 ... -> (b1 b2) ...").float().contiguous()
+
+ else:
+ sample[k] = rearrange(
+ v, "b1 b2 f c ... -> (b1 b2 f) c ...").float(
+ ).contiguous()
+ elif k == 'tanfov':
+ sample[k] = v[0].float().item() # tanfov.
+
+ if isinstance(sample['c'], dict): # 3dgs
+ merge_internal_batch(sample['c'], merge_b_only=True)
+ merge_internal_batch(sample['nv_c'], merge_b_only=True)
+
+ merge_internal_batch(sample)
+
+ return sample
+
+def chunk_ddpm_collate_fn(sample):
+ sample = torch.utils.data.default_collate(sample)
+ # ! change from stack to cat
+ # sample = self.post_process.prepare_mv_input(sample)
+
+ # bs = len(sample['caption']) # number of instances
+ # chunk_size = sample['img'].shape[0] // bs
+
+ def merge_internal_batch(sample, merge_b_only=False):
+ for k, v in sample.items():
+ if isinstance(v, torch.Tensor):
+ if v.ndim > 1:
+ # if k in ['c', 'latent']:
+ sample[k] = rearrange(
+ v,
+ "b1 b2 ... -> (b1 b2) ...").float().contiguous()
+
+ # else: # img
+ # sample[k] = rearrange(
+ # v, "b1 b2 f ... -> (b1 b2 f) ...").float(
+ # ).contiguous()
+
+ else: # caption & ins
+ v = [v[i][0] for i in range(len(v))]
+
+ merge_internal_batch(sample)
+
+ # if 'caption' in sample:
+ # sample['caption'] = sample['caption'][0] + sample['caption'][1]
+
+ return sample
+
+
+
+
+def load_data_cls(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_lmdb=False,
+ use_wds=False,
+ use_chunk=False,
+ use_lmdb_compressed=False,
+ # plucker_embedding=False,
+ # frame_0_as_canonical=False,
+ infi_sampler=True,
+ load_latent=False,
+ return_dataset=False,
+ load_caption_dataset=False,
+ load_mv_dataset=False,
+ **kwargs):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # st()
+ # if use_lmdb:
+ # logger.log('using LMDB dataset')
+ # # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+ # if 'nv' in trainer_name:
+ # dataset_cls = Objv_LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ # else:
+ # dataset_cls = Objv_LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ # # dataset = dataset_cls(file_path)
+
+ collate_fn = None
+
+ if use_lmdb:
+ logger.log('using LMDB dataset')
+ # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ if use_lmdb_compressed:
+ if 'nv' in trainer_name:
+ dataset_cls = Objv_LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = Objv_LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = Objv_LMDBDataset_NV_NoCompressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = Objv_LMDBDataset_MV_NoCompressed # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ elif use_chunk:
+ if load_latent:
+
+ # if 'gs_cam_format' in kwargs:
+ if kwargs['gs_cam_format']:
+ if load_caption_dataset:
+ dataset_cls = ChunkObjaverseDatasetDDPMgsT23D
+ collate_fn = chunk_ddpm_collate_fn
+ else:
+ if load_mv_dataset:
+ # dataset_cls = ChunkObjaverseDatasetDDPMgsMV23D # ! if multi-view
+ dataset_cls = ChunkObjaverseDatasetDDPMgsMV23DSynthetic # ! if multi-view
+ # collate_fn = chunk_ddpm_collate_fn
+ collate_fn = None
+ else:
+ dataset_cls = ChunkObjaverseDatasetDDPMgsI23D
+ collate_fn = None
+ else:
+ dataset_cls = ChunkObjaverseDatasetDDPM
+ collate_fn = chunk_ddpm_collate_fn
+ else:
+ dataset_cls = ChunkObjaverseDataset
+ collate_fn = chunk_collate_fn
+
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = NovelViewObjverseDataset # 1.5-2iter/s
+ else:
+ dataset_cls = MultiViewObjverseDataset
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ **kwargs
+ # plucker_embedding=plucker_embedding
+ )
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+
+ # st()
+ return dataset
+
+
+
+def load_data(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_lmdb=False,
+ use_wds=False,
+ use_chunk=False,
+ use_lmdb_compressed=False,
+ # plucker_embedding=False,
+ # frame_0_as_canonical=False,
+ infi_sampler=True,
+ load_latent=False,
+ return_dataset=False,
+ load_caption_dataset=False,
+ load_mv_dataset=False,
+ **kwargs):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # st()
+ # if use_lmdb:
+ # logger.log('using LMDB dataset')
+ # # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+ # if 'nv' in trainer_name:
+ # dataset_cls = Objv_LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ # else:
+ # dataset_cls = Objv_LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ # # dataset = dataset_cls(file_path)
+
+ collate_fn = None
+
+ if use_lmdb:
+ logger.log('using LMDB dataset')
+ # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ if use_lmdb_compressed:
+ if 'nv' in trainer_name:
+ dataset_cls = Objv_LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = Objv_LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = Objv_LMDBDataset_NV_NoCompressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = Objv_LMDBDataset_MV_NoCompressed # 2.5-3iter/s, but unstable, drops to 1 later.
+
+ elif use_chunk:
+ # st()
+ if load_latent:
+
+ if kwargs['gs_cam_format']:
+ if load_caption_dataset:
+ dataset_cls = ChunkObjaverseDatasetDDPMgsT23D
+ # collate_fn = chunk_ddpm_collate_fn
+ collate_fn = None
+ else:
+ if load_mv_dataset:
+ # dataset_cls = ChunkObjaverseDatasetDDPMgsMV23D
+ dataset_cls = ChunkObjaverseDatasetDDPMgsMV23DSynthetic # ! if multi-view
+ # collate_fn = chunk_ddpm_collate_fn
+ collate_fn = None
+ else:
+ # dataset_cls = ChunkObjaverseDatasetDDPMgsI23D # load i23d
+ # collate_fn = None
+ # load mv dataset for i23d
+ dataset_cls = ChunkObjaverseDatasetDDPMgsI23D_loadMV
+ collate_fn = chunk_ddpm_collate_fn
+ else:
+ dataset_cls = ChunkObjaverseDatasetDDPM
+ collate_fn = chunk_ddpm_collate_fn
+ else:
+ dataset_cls = ChunkObjaverseDataset
+ collate_fn = chunk_collate_fn
+
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = NovelViewObjverseDataset # 1.5-2iter/s
+ else:
+ dataset_cls = MultiViewObjverseDataset
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ **kwargs
+ # plucker_embedding=plucker_embedding
+ )
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+
+ # st()
+ if return_dataset:
+ return dataset
+
+ assert infi_sampler
+ if infi_sampler:
+ train_sampler = DistributedSampler(dataset=dataset,
+ shuffle=True,
+ drop_last=True)
+
+ loader = DataLoader(
+ dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=True,
+ pin_memory=True,
+ persistent_workers=num_workers > 0,
+ sampler=train_sampler,
+ collate_fn=collate_fn,
+ # prefetch_factor=3 if num_workers>0 else None,
+ )
+
+ while True:
+ yield from loader
+
+ # else:
+ # # loader = DataLoader(dataset,
+ # # batch_size=batch_size,
+ # # num_workers=num_workers,
+ # # drop_last=False,
+ # # pin_memory=True,
+ # # persistent_workers=num_workers > 0,
+ # # shuffle=False)
+ # st()
+ # return dataset
+
+
+def load_eval_data(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ num_workers=1,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ interval=1,
+ use_lmdb=False,
+ plucker_embedding=False,
+ load_real=False,
+ load_mv_real=False,
+ load_gso=False,
+ four_view_for_latent=False,
+ shuffle_across_cls=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ single_view_for_i23d=False,
+ use_chunk=False,
+ **kwargs,
+):
+ collate_fn = None
+
+ if use_lmdb:
+ logger.log('using LMDB dataset')
+ dataset_cls = Objv_LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=True,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ interval=interval)
+ elif use_chunk:
+ dataset = ChunkObjaverseDataset(
+ file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ # dataset_size=dataset_size,
+ gs_cam_format=gs_cam_format,
+ plucker_embedding=plucker_embedding,
+ wds_split_all=2,
+ # frame_0_as_canonical=frame_0_as_canonical,
+ **kwargs)
+ collate_fn = chunk_collate_fn
+
+ elif load_real:
+ if load_mv_real:
+ dataset_cls = RealMVDataset
+ elif load_gso:
+ # st()
+ dataset_cls = RealDataset_GSO
+ else: # single-view i23d
+ dataset_cls = RealDataset
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ test=True,
+ imgnet_normalize=imgnet_normalize,
+ interval=interval,
+ plucker_embedding=plucker_embedding)
+
+ else:
+ dataset = MultiViewObjverseDataset(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ test=True,
+ imgnet_normalize=imgnet_normalize,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ four_view_for_latent=four_view_for_latent,
+ load_extra_36_view=load_extra_36_view,
+ shuffle_across_cls=shuffle_across_cls,
+ gs_cam_format=gs_cam_format,
+ single_view_for_i23d=single_view_for_i23d,
+ **kwargs)
+
+ print('eval dataset size: {}'.format(len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset)
+ loader = DataLoader(
+ dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ shuffle=False,
+ collate_fn=collate_fn,
+ )
+ # sampler=train_sampler)
+ # return loader
+ return iter(loader)
+
+
+def load_data_for_lmdb(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ shuffle_across_cls=False,
+ four_view_for_latent=False,
+ wds_split=1):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # if 'nv' in trainer_name:
+ # dataset_cls = NovelViewDataset
+ # else:
+ # dataset_cls = MultiViewDataset
+ # st()
+ # dataset_cls = MultiViewObjverseDatasetforLMDB
+ dataset_cls = MultiViewObjverseDatasetforLMDB_nocaption
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split,
+ four_view_for_latent=four_view_for_latent)
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset, shuffle=True, drop_last=True)
+ loader = DataLoader(
+ dataset,
+ shuffle=False,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ # prefetch_factor=2,
+ # prefetch_factor=3,
+ pin_memory=True,
+ persistent_workers=num_workers > 0,
+ )
+ # sampler=train_sampler)
+
+ # while True:
+ # yield from loader
+ return loader, dataset.dataset_name, len(dataset)
+
+
+def load_lmdb_for_lmdb(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec'):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # if 'nv' in trainer_name:
+ # dataset_cls = NovelViewDataset
+ # else:
+ # dataset_cls = MultiViewDataset
+ # st()
+ dataset_cls = Objv_LMDBDataset_MV_Compressed_for_lmdb
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size)
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset, shuffle=True, drop_last=True)
+ loader = DataLoader(
+ dataset,
+ shuffle=False,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ # prefetch_factor=2,
+ # prefetch_factor=3,
+ pin_memory=True,
+ persistent_workers=True,
+ )
+ # sampler=train_sampler)
+
+ # while True:
+ # yield from loader
+ return loader, len(dataset)
+
+
+def load_memory_data(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ num_workers=1,
+ # load_depth=True,
+ preprocess=None,
+ imgnet_normalize=True,
+ use_chunk=True,
+ **kwargs):
+ # load a single-instance into the memory to speed up training IO
+ # dataset = MultiViewObjverseDataset(file_path,
+
+ collate_fn = None
+
+ if use_chunk:
+ dataset_cls = ChunkObjaverseDataset
+ collate_fn = chunk_collate_fn
+ else:
+ dataset_cls = NovelViewObjverseDataset
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ load_depth=True,
+ test=False,
+ overfitting=True,
+ imgnet_normalize=imgnet_normalize,
+ overfitting_bs=batch_size,
+ **kwargs)
+ logger.log('!!!!!!! memory dataset size: {} !!!!!!'.format(len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset)
+ loader = DataLoader(
+ dataset,
+ batch_size=len(dataset),
+ num_workers=num_workers,
+ drop_last=False,
+ shuffle=False,
+ collate_fn = collate_fn
+ )
+
+ all_data: dict = next(
+ iter(loader)
+ ) # torchvision.utils.save_image(all_data['img'], 'gt.jpg', normalize=True, value_range=(-1,1))
+
+ # st()
+
+ if kwargs.get('gs_cam_format', False): # gs rendering pipeline
+ # ! load V=4 images for training in a batch.
+ while True:
+ # st()
+
+ # indices = torch.randperm(len(dataset))[:4]
+ indices = torch.randperm(
+ len(dataset) * 2)[:batch_size] # all instances
+ # indices2 = torch.randperm(len(dataset))[:] # all instances
+
+ batch_c = collections.defaultdict(dict)
+ V = all_data['c']['source_cv2wT_quat'].shape[1]
+ for k in ['c', 'nv_c']:
+ for k_c, v_c in all_data[k].items():
+ if k_c == 'tanfov':
+ continue
+ try:
+ batch_c[k][
+ k_c] = torch.index_select( # ! chunk data reading pipeline
+ v_c,
+ dim=0,
+ index=indices
+ ).reshape(batch_size, V, *v_c.shape[2:]).float(
+ ) if isinstance(
+
+ v_c,
+ torch.Tensor) else v_c # float
+ except Exception as e:
+ st()
+ print(e)
+
+ # ! read chunk not required, already float
+ batch_c['c']['tanfov'] = all_data['c']['tanfov']
+ batch_c['nv_c']['tanfov'] = all_data['nv_c']['tanfov']
+
+ indices_range = torch.arange(indices[0]*V, (indices[0]+1)*V)
+ batch_data = {}
+ for k, v in all_data.items():
+ if k not in ['c', 'nv_c']:
+ try:
+ if k == 'fps_pcd':
+ batch_data[k] = torch.index_select(
+ v, dim=0, index=indices).float() if isinstance(
+ v, torch.Tensor) else v # float
+ else:
+ batch_data[k] = torch.index_select(
+ v, dim=0, index=indices_range).float() if isinstance(
+ v, torch.Tensor) else v # float
+ except:
+ st()
+ print(e)
+
+ memory_batch_data = {
+ **batch_data,
+ **batch_c,
+ }
+
+
+ yield memory_batch_data
+
+ else:
+ while True:
+ start_idx = np.random.randint(0, len(dataset) - batch_size + 1)
+ yield {
+ k: v[start_idx:start_idx + batch_size]
+ for k, v in all_data.items()
+ }
+
+
+def read_dnormal(normald_path, cond_pos, h=None, w=None):
+ cond_cam_dis = np.linalg.norm(cond_pos, 2)
+
+ near = 0.867 #sqrt(3) * 0.5
+ near_distance = cond_cam_dis - near
+
+ normald = cv2.imread(normald_path, cv2.IMREAD_UNCHANGED).astype(np.float32)
+ normal, depth = normald[..., :3], normald[..., 3:]
+
+ depth[depth < near_distance] = 0
+
+ if h is not None:
+ assert w is not None
+ if depth.shape[1] != h:
+ depth = cv2.resize(depth, (h, w), interpolation=cv2.INTER_NEAREST
+ ) # 512,512, 1 -> self.reso, self.reso
+ # depth = cv2.resize(depth, (h, w), interpolation=cv2.INTER_LANCZOS4
+ # ) # ! may fail if nearest. dirty data.
+ # st()
+ else:
+ depth = depth[..., 0]
+
+ if normal.shape[1] != h:
+ normal = cv2.resize(normal, (h, w),
+ interpolation=cv2.INTER_NEAREST
+ ) # 512,512, 1 -> self.reso, self.reso
+
+ else:
+ depth = depth[..., 0]
+
+ return torch.from_numpy(depth).float(), torch.from_numpy(normal).float()
+
+
+def get_intri(target_im=None, h=None, w=None, normalize=False):
+ if target_im is None:
+ assert (h is not None and w is not None)
+ else:
+ h, w = target_im.shape[:2]
+
+ fx = fy = 1422.222
+ res_raw = 1024
+ f_x = f_y = fx * h / res_raw
+ K = np.array([f_x, 0, w / 2, 0, f_y, h / 2, 0, 0, 1]).reshape(3, 3)
+ if normalize: # center is [0.5, 0.5], eg3d renderer tradition
+ K[:6] /= h
+ # print("intr: ", K)
+ return K
+
+
+def convert_pose(C2W):
+ # https://github.com/modelscope/richdreamer/blob/c3d9a77fa15fc42dbae12c2d41d64aaec14efd37/dataset/gobjaverse/depth_warp_example.py#L402
+ flip_yz = np.eye(4)
+ flip_yz[1, 1] = -1
+ flip_yz[2, 2] = -1
+ C2W = np.matmul(C2W, flip_yz)
+ return torch.from_numpy(C2W)
+
+
+def read_camera_matrix_single(json_file):
+ with open(json_file, 'r', encoding='utf8') as reader:
+ json_content = json.load(reader)
+ '''
+ # NOTE that different from unity2blender experiments.
+ camera_matrix = np.eye(4)
+ camera_matrix[:3, 0] = np.array(json_content['x'])
+ camera_matrix[:3, 1] = -np.array(json_content['y'])
+ camera_matrix[:3, 2] = -np.array(json_content['z'])
+ camera_matrix[:3, 3] = np.array(json_content['origin'])
+
+
+ '''
+ camera_matrix = np.eye(4) # blender-based
+ camera_matrix[:3, 0] = np.array(json_content['x'])
+ camera_matrix[:3, 1] = np.array(json_content['y'])
+ camera_matrix[:3, 2] = np.array(json_content['z'])
+ camera_matrix[:3, 3] = np.array(json_content['origin'])
+ # print(camera_matrix)
+ # '''
+
+ # return convert_pose(camera_matrix)
+ return camera_matrix
+
+
+def unity2blender(normal):
+ normal_clone = normal.copy()
+ normal_clone[..., 0] = -normal[..., -1]
+ normal_clone[..., 1] = -normal[..., 0]
+ normal_clone[..., 2] = normal[..., 1]
+
+ return normal_clone
+
+def unity2blender_fix(normal): # up blue, left green, front (towards inside) red
+ normal_clone = normal.copy()
+ # normal_clone[..., 0] = -normal[..., 2]
+ # normal_clone[..., 1] = -normal[..., 0]
+ normal_clone[..., 0] = -normal[..., 0] # swap r and g
+ normal_clone[..., 1] = -normal[..., 2]
+ normal_clone[..., 2] = normal[..., 1]
+
+ return normal_clone
+
+def unity2blender_th(normal):
+ assert normal.shape[1] == 3 # B 3 H W...
+ normal_clone = normal.clone()
+ normal_clone[:, 0, ...] = -normal[:, -1, ...]
+ normal_clone[:, 1, ...] = -normal[:, 0, ...]
+ normal_clone[:, 2, ...] = normal[:, 1, ...]
+
+ return normal_clone
+
+
+def blender2midas(img):
+ '''Blender: rub
+ midas: lub
+ '''
+ img[..., 0] = -img[..., 0]
+ img[..., 1] = -img[..., 1]
+ img[..., -1] = -img[..., -1]
+ return img
+
+
+def current_milli_time():
+ return round(time.time() * 1000)
+
+
+# modified from ShapeNet class
+class MultiViewObjverseDataset(Dataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=False,
+ **kwargs):
+ self.load_extra_36_view = load_extra_36_view
+ # st()
+ self.gs_cam_format = gs_cam_format
+ self.frame_0_as_canonical = frame_0_as_canonical
+ self.four_view_for_latent = four_view_for_latent # export 0 12 30 36, 4 views for reconstruction
+ self.single_view_for_i23d = single_view_for_i23d
+ self.file_path = file_path
+ self.overfitting = overfitting
+ self.scene_scale = scene_scale
+ self.reso = reso
+ self.reso_encoder = reso_encoder
+ self.classes = False
+ self.load_depth = load_depth
+ self.preprocess = preprocess
+ self.plucker_embedding = plucker_embedding
+ self.intrinsics = get_intri(h=self.reso, w=self.reso,
+ normalize=True).reshape(9)
+
+ assert not self.classes, "Not support class condition now."
+
+ dataset_name = Path(self.file_path).stem.split('_')[0]
+ self.dataset_name = dataset_name
+
+ self.zfar = 100.0
+ self.znear = 0.01
+
+ # if test:
+ # self.ins_list = sorted(os.listdir(self.file_path))[0:1] # the first 1 instance for evaluation reference.
+ # else:
+ # ! TODO, read from list?
+
+ def load_single_cls_instances(file_path):
+ ins_list = [] # the first 1 instance for evaluation reference.
+ # '''
+ # for dict_dir in os.listdir(file_path)[:]:
+ # for dict_dir in os.listdir(file_path)[:]:
+ for dict_dir in os.listdir(file_path):
+ # for dict_dir in os.listdir(file_path)[:2]:
+ for ins_dir in os.listdir(os.path.join(file_path, dict_dir)):
+
+ # self.ins_list.append(os.path.join(self.file_path, dict_dir, ins_dir,))
+ # /nas/shared/V2V/yslan/logs/nips23/Reconstruction/final/objav/vae/MV/170K/infer-latents/189w/v=6-rotate/latent_dir
+ # st() # check latent whether saved
+ # root = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/final/objav/vae/MV/170K/infer-latents/189w/v=6-rotate/latent_dir'
+ # if os.path.exists(os.path.join(root,file_path.split('/')[-1], dict_dir, ins_dir, 'latent.npy') ):
+ # continue
+ # pcd_root = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/pcd-V=8_24576_polish'
+ # pcd_root = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/pcd-V=10_4096_polish'
+ # if os.path.exists(
+ # os.path.join(pcd_root, 'fps-pcd',
+ # file_path.split('/')[-1], dict_dir,
+ # ins_dir, 'fps-4096.ply')):
+ # continue
+
+ # ! split=8 has some missing instances
+ # root = '/cpfs01/user/lanyushi.p/data/chunk-jpeg-normal/bs_16_fixsave3/170K/384/'
+ # if os.path.exists(os.path.join(root,file_path.split('/')[-1], dict_dir, ins_dir,) ):
+ # continue
+ # else:
+ # ins_list.append(
+ # os.path.join(file_path, dict_dir, ins_dir,
+ # 'campos_512_v4'))
+
+ # filter out some data
+ if not os.path.exists(os.path.join(file_path, dict_dir, ins_dir, 'campos_512_v2')):
+ continue
+ if not os.path.exists(os.path.join(file_path, dict_dir, ins_dir, 'campos_512_v2', '00025', '00025.png')):
+ continue
+ if len(os.listdir(os.path.join(file_path, dict_dir, ins_dir, 'campos_512_v2'))) != 40:
+ continue
+
+ ins_list.append(
+ os.path.join(file_path, dict_dir, ins_dir,
+ 'campos_512_v2'))
+
+ # '''
+ # check pcd performnace
+ # ins_list.append(
+ # os.path.join(file_path, '0', '10634',
+ # 'campos_512_v4'))
+ return ins_list
+
+ # st()
+ self.ins_list = []
+ # for subset in ['Animals', 'Transportations_tar', 'Furnitures']:
+ # for subset in ['Furnitures']:
+ # selected subset for training
+ # if False:
+ if True:
+ for subset in [ # ! around 17W instances in total.
+ # 'Animals',
+ # 'BuildingsOutdoor',
+ # 'daily-used',
+ # 'Furnitures',
+ # 'Food',
+ # 'Plants',
+ # 'Electronics',
+ # 'Transportations_tar',
+ # 'Human-Shape',
+ 'gobjaverse_alignment_unzip',
+ ]: # selected subset for training
+
+ # if os.path.exists(f'{self.file_path}/{subset}.txt'):
+ # dataset_list = f'{self.file_path}/{subset}_filtered.txt'
+ dataset_list = f'{self.file_path}/{subset}_filtered_more.txt'
+ assert os.path.exists(dataset_list)
+ if os.path.exists(dataset_list):
+ with open(dataset_list, 'r') as f:
+ self.ins_list += [os.path.join(self.file_path, item.strip()) for item in f.readlines()]
+ else:
+ self.ins_list += load_single_cls_instances(
+ os.path.join(self.file_path, subset))
+
+ # st()
+ # current_time = int(current_milli_time()
+ # ) # randomly shuffle given current time
+ # random.seed(current_time)
+ # random.shuffle(self.ins_list)
+
+ else: # preprocess single class
+ self.ins_list = load_single_cls_instances(self.file_path)
+
+ self.ins_list = sorted(self.ins_list)
+
+ if overfitting:
+ self.ins_list = self.ins_list[:1]
+
+ self.rgb_list = []
+ self.frame0_pose_list = []
+ self.pose_list = []
+ self.depth_list = []
+ self.data_ins_list = []
+ self.instance_data_length = -1
+
+ # self.pcd_path = Path('/cpfs01/shared/V2V/V2V_hdd/yslan/logs/nips23/Reconstruction/pcd-V=6/fps-pcd')
+ self.pcd_path = Path(
+ '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/pcd-V=6/fps-pcd')
+
+ with open(
+ '/nas/shared/public/yslan/data/text_captions_cap3d.json') as f:
+ # '/nas/shared/V2V/yslan/aigc3d/text_captions_cap3d.json') as f:
+ self.caption_data = json.load(f)
+
+ self.shuffle_across_cls = shuffle_across_cls
+
+
+ # for ins in self.ins_list[47000:]:
+ if four_view_for_latent: # also saving dense pcd
+ # self.wds_split_all = 1 # ! when dumping latent
+ # self.wds_split_all = 2 # ! when dumping latent
+ # self.wds_split_all = 4
+ # self.wds_split_all = 6
+ # self.wds_split_all = 4
+ # self.wds_split_all = 5
+ # self.wds_split_all = 6
+ # self.wds_split_all = 7
+ # self.wds_split_all = 1
+ self.wds_split_all = 8
+ # self.wds_split_all = 2
+ # ins_list_to_process = self.ins_list
+ all_ins_size = len(self.ins_list)
+ ratio_size = all_ins_size // self.wds_split_all + 1
+ # ratio_size = int(all_ins_size / self.wds_split_all) + 1
+
+ ins_list_to_process = self.ins_list[ratio_size *
+ (wds_split):ratio_size *
+ (wds_split + 1)]
+
+ else: # ! create shards dataset
+ # self.wds_split_all = 4
+ self.wds_split_all = 8
+ # self.wds_split_all = 1
+ all_ins_size = len(self.ins_list)
+
+ random.seed(0)
+ random.shuffle(self.ins_list) # avoid same category appears in the same shard
+
+ ratio_size = all_ins_size // self.wds_split_all + 1
+
+ ins_list_to_process = self.ins_list[ratio_size * # 1 - 8
+ (wds_split - 1):ratio_size *
+ wds_split]
+
+ # uniform_sample = False
+ uniform_sample = True
+ # st()
+ for ins in tqdm(ins_list_to_process):
+ # ins = os.path.join(
+ # # self.file_path, ins , 'campos_512_v4'
+ # self.file_path, ins ,
+ # # 'compos_512_v4'
+ # )
+ # cur_rgb_path = os.path.join(self.file_path, ins, 'compos_512_v4')
+ # cur_pose_path = os.path.join(self.file_path, ins, 'pose')
+
+ # st()
+ # ][:27])
+
+ if self.four_view_for_latent:
+ # cur_all_fname = [t.split('.')[0] for t in os.listdir(ins)
+ # ] # use full set for training
+ # cur_all_fname = [f'{idx:05d}' for idx in [0, 12, 30, 36]
+ # cur_all_fname = [f'{idx:05d}' for idx in [6,12,18,24]
+ # cur_all_fname = [f'{idx:05d}' for idx in [7,16,24,25]
+ # cur_all_fname = [f'{idx:05d}' for idx in [25,26,0,9,18,27,33,39]]
+ cur_all_fname = [
+ f'{idx:05d}'
+ for idx in [25, 26, 6, 12, 18, 24, 27, 31, 35, 39] # ! for extracting PCD
+ ]
+ # cur_all_fname = [f'{idx:05d}' for idx in [25,26,0,9,18,27,30,33,36,39]] # more down side for better bottom coverage.
+ # cur_all_fname = [f'{idx:05d}' for idx in [25,0, 7,15]]
+ # cur_all_fname = [f'{idx:05d}' for idx in [4,12,20,25,26]
+ # cur_all_fname = [f'{idx:05d}' for idx in [6,12,18,24,25,26]
+ # cur_all_fname = [f'{idx:05d}' for idx in [6,12,18,24,25,26, 39, 33, 27]
+ # cur_all_fname = [f'{idx:05d}' for idx in [6,12,18,24,25,26, 39, 33, 27]
+
+ # cur_all_fname = [
+ # f'{idx:05d}' for idx in [25, 26, 27, 30, 33, 36]
+ # ] # for pcd unprojection
+
+ # cur_all_fname = [
+ # f'{idx:05d}' for idx in [25, 26, 27, 30] # ! for infer latents
+ # ] #
+
+ # cur_all_fname = [
+ # f'{idx:05d}' for idx in [25, 27, 29, 31, 33, 35, 37
+ # ] # ! for infer latents
+ # ] #
+
+ # cur_all_fname = [
+ # f'{idx:05d}' for idx in [25, 27, 31, 35
+ # ] # ! for infer latents
+ # ] #
+
+ # cur_all_fname += [f'{idx:05d}' for idx in range(40) if idx not in [0,12,30,36]] # ! four views for inference
+ elif self.single_view_for_i23d:
+ # cur_all_fname = [f'{idx:05d}'
+ # for idx in [16]] # 20 is also fine
+ cur_all_fname = [f'{idx:05d}'
+ for idx in [2]] # ! furniture side view
+
+ else:
+ cur_all_fname = [t.split('.')[0] for t in os.listdir(ins)
+ ] # use full set for training
+
+ if shuffle_across_cls:
+ if uniform_sample:
+ cur_all_fname = sorted(cur_all_fname)
+ # 0-24, 25 views
+ # 25,26, 2 views
+ # 27-39, 13 views
+ uniform_all_fname = []
+
+ # !!!! if bs=9 or 8
+ for idx in range(6):
+ if idx % 2 == 0:
+ chunk_all_fname = [25]
+ else:
+ chunk_all_fname = [26]
+ # chunk_all_fname = [25] # no bottom view required as input
+ # start_1 = np.random.randint(0,5) # for first 24 views
+ # chunk_all_fname += [start_1+uniform_idx for uniform_idx in range(0,25,5)]
+
+ start_1 = np.random.randint(0,4) # for first 24 views, v=8
+ chunk_all_fname += [start_1+uniform_idx for uniform_idx in range(0,25,7)] # [0-21]
+
+ start_2 = np.random.randint(0,5) + 27 # for first 24 views
+ chunk_all_fname += [start_2, start_2 + 4, start_2 + 8]
+ assert len(chunk_all_fname) == 8, len(chunk_all_fname)
+ uniform_all_fname += [cur_all_fname[fname] for fname in chunk_all_fname]
+
+ # ! if bs=6
+ # for idx in range(8):
+
+ # if idx % 2 == 0:
+ # chunk_all_fname = [
+ # 25
+ # ] # no bottom view required as input
+ # else:
+ # chunk_all_fname = [
+ # 26
+ # ] # no bottom view required as input
+
+ # start_1 = np.random.randint(
+ # 0, 7) # for first 24 views
+ # # chunk_all_fname += [start_1+uniform_idx for uniform_idx in range(0,25,5)]
+ # chunk_all_fname += [
+ # start_1 + uniform_idx
+ # for uniform_idx in range(0, 25, 9)
+ # ] # 0 9 18
+ # start_2 = np.random.randint(
+ # 0, 7) + 27 # for first 24 views
+ # # chunk_all_fname += [start_2, start_2 + 4, start_2 + 8]
+ # chunk_all_fname += [start_2,
+ # start_2 + 6] # 2 frames
+ # assert len(chunk_all_fname) == 6
+ # uniform_all_fname += [
+ # cur_all_fname[fname]
+ # for fname in chunk_all_fname
+ # ]
+
+ cur_all_fname = uniform_all_fname
+
+ else:
+ current_time = int(current_milli_time(
+ )) # randomly shuffle given current time
+ random.seed(current_time)
+ random.shuffle(cur_all_fname)
+ else:
+ cur_all_fname = sorted(cur_all_fname)
+
+ # ! skip the check
+ # if self.instance_data_length == -1:
+ # self.instance_data_length = len(cur_all_fname)
+ # else:
+ # try: # data missing?
+ # assert len(cur_all_fname) == self.instance_data_length
+ # except:
+ # # with open('error_log.txt', 'a') as f:
+ # # f.write(str(e) + '\n')
+ # with open('missing_ins_new2.txt', 'a') as f:
+ # f.write(str(Path(ins.parent)) +
+ # '\n') # remove the "campos_512_v4"
+ # continue
+
+ # if test: # use middle image as the novel view model input
+ # mid_index = len(cur_all_fname) // 3 * 2
+ # cur_all_fname.insert(0, cur_all_fname[mid_index])
+
+ self.frame0_pose_list += ([
+ os.path.join(ins, fname, fname + '.json')
+ for fname in [cur_all_fname[0]]
+ ] * len(cur_all_fname))
+
+ self.pose_list += ([
+ os.path.join(ins, fname, fname + '.json')
+ for fname in cur_all_fname
+ ])
+ self.rgb_list += ([
+ os.path.join(ins, fname, fname + '.png')
+ for fname in cur_all_fname
+ ])
+
+ self.depth_list += ([
+ os.path.join(ins, fname, fname + '_nd.exr')
+ for fname in cur_all_fname
+ ])
+ self.data_ins_list += ([ins] * len(cur_all_fname))
+
+ # check
+
+ # ! setup normalizataion
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ # st()
+ self.normalize = transforms.Compose(transformations)
+
+ def get_source_cw2wT(self, source_cameras_view_to_world):
+ return matrix_to_quaternion(
+ source_cameras_view_to_world[:3, :3].transpose(0, 1))
+
+ def c_to_3dgs_format(self, pose):
+ # TODO, switch to torch version (batched later)
+
+ c2w = pose[:16].reshape(4, 4) # 3x4
+
+ # ! load cam
+ w2c = np.linalg.inv(c2w)
+ R = np.transpose(
+ w2c[:3, :3]) # R is stored transposed due to 'glm' in CUDA code
+ T = w2c[:3, 3]
+ fx = pose[16]
+ FovX = focal2fov(fx, 1)
+ FovY = focal2fov(fx, 1)
+
+ tanfovx = math.tan(FovX * 0.5)
+ tanfovy = math.tan(FovY * 0.5)
+
+ assert tanfovx == tanfovy
+
+ trans = np.array([0.0, 0.0, 0.0])
+ scale = 1.0
+
+ world_view_transform = torch.tensor(getWorld2View2(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+ projection_matrix = getProjectionMatrix(znear=self.znear,
+ zfar=self.zfar,
+ fovX=FovX,
+ fovY=FovY).transpose(0, 1)
+ full_proj_transform = (world_view_transform.unsqueeze(0).bmm(
+ projection_matrix.unsqueeze(0))).squeeze(0)
+ camera_center = world_view_transform.inverse()[3, :3]
+
+ view_world_transform = torch.tensor(getView2World(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+
+ # item.update(viewpoint_cam=[viewpoint_cam])
+ c = {}
+ c["source_cv2wT_quat"] = self.get_source_cw2wT(view_world_transform)
+ c.update(
+ # projection_matrix=projection_matrix, # K
+ cam_view=world_view_transform, # world_view_transform
+ cam_view_proj=full_proj_transform, # full_proj_transform
+ cam_pos=camera_center,
+ tanfov=tanfovx, # TODO, fix in the renderer
+ # orig_c2w=c2w,
+ # orig_w2c=w2c,
+ orig_pose=torch.from_numpy(pose),
+ orig_c2w=torch.from_numpy(c2w),
+ orig_w2c=torch.from_numpy(w2c),
+ # tanfovy=tanfovy,
+ )
+
+ return c # dict for gs rendering
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def load_bbox(self, mask):
+ # st()
+ nonzero_value = torch.nonzero(mask)
+ height, width = nonzero_value.max(dim=0)[0]
+ top, left = nonzero_value.min(dim=0)[0]
+ bbox = torch.tensor([top, left, height, width], dtype=torch.float32)
+ return bbox
+
+ def __getitem__(self, idx):
+ # try:
+
+ data = self._read_data(idx)
+ return data
+
+ # except Exception as e:
+ # # with open('error_log_pcd.txt', 'a') as f:
+ # with open('error_log_pcd.txt', 'a') as f:
+ # f.write(str(e) + '\n')
+ # with open('error_idx_pcd.txt', 'a') as f:
+ # f.write(str(self.data_ins_list[idx]) + '\n')
+ # print(e, flush=True)
+ # return {}
+
+ def gen_rays(self, c2w):
+ # Generate rays
+ self.h = self.reso_encoder
+ self.w = self.reso_encoder
+ yy, xx = torch.meshgrid(
+ torch.arange(self.h, dtype=torch.float32) + 0.5,
+ torch.arange(self.w, dtype=torch.float32) + 0.5,
+ indexing='ij')
+
+ # normalize to 0-1 pixel range
+ yy = yy / self.h
+ xx = xx / self.w
+
+ # K = np.array([f_x, 0, w / 2, 0, f_y, h / 2, 0, 0, 1]).reshape(3, 3)
+ cx, cy, fx, fy = self.intrinsics[2], self.intrinsics[
+ 5], self.intrinsics[0], self.intrinsics[4]
+ # cx *= self.w
+ # cy *= self.h
+
+ # f_x = f_y = fx * h / res_raw
+ c2w = torch.from_numpy(c2w).float()
+
+ xx = (xx - cx) / fx
+ yy = (yy - cy) / fy
+ zz = torch.ones_like(xx)
+ dirs = torch.stack((xx, yy, zz), dim=-1) # OpenCV convention
+ dirs /= torch.norm(dirs, dim=-1, keepdim=True)
+ dirs = dirs.reshape(-1, 3, 1)
+ del xx, yy, zz
+ # st()
+ dirs = (c2w[None, :3, :3] @ dirs)[..., 0]
+
+ origins = c2w[None, :3, 3].expand(self.h * self.w, -1).contiguous()
+ origins = origins.view(self.h, self.w, 3)
+ dirs = dirs.view(self.h, self.w, 3)
+
+ return origins, dirs
+
+ def normalize_camera(self, c, c_frame0):
+ # assert c.shape[0] == self.chunk_size # 8 o r10
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+ canonical_camera_poses = c_frame0[:, :16].reshape(B, 4, 4)
+
+ # if for_encoder:
+
+ # encoder_canonical_idx = [0, self.V]
+ # st()
+ cam_radius = np.linalg.norm(
+ c_frame0[:, :16].reshape(1, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 1, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(canonical_camera_poses)
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ new_camera_poses = np.repeat(
+ transform, 1, axis=0
+ ) @ camera_poses # [V, 4, 4]. np.repeat() is th.repeat_interleave()
+
+ # else:
+ # cam_radius = np.linalg.norm(
+ # c[canonical_idx][:16].reshape(4, 4)[:3, 3],
+ # axis=-1,
+ # keepdims=False
+ # ) # since g-buffer adopts dynamic radius here.
+ # frame1_fixed_pos = np.eye(4)
+ # frame1_fixed_pos[2, -1] = -cam_radius
+
+ # transform = frame1_fixed_pos @ np.linalg.inv(
+ # camera_poses[canonical_idx]) # 4,4
+ # # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ # new_camera_poses = np.repeat(
+ # transform[None], self.chunk_size,
+ # axis=0) @ camera_poses # [V, 4, 4]
+
+ # st()
+ c = np.concatenate([new_camera_poses.reshape(B, 16), c[:, 16:]],
+ axis=-1)
+ # st()
+
+ return c
+
+ def _read_data(
+ self,
+ idx,
+ ):
+ rgb_fname = self.rgb_list[idx]
+ pose_fname = self.pose_list[idx]
+
+ raw_img = imageio.imread(rgb_fname)
+
+ # ! RGBD
+ alpha_mask = raw_img[..., -1:] / 255
+ raw_img = alpha_mask * raw_img[..., :3] + (
+ 1 - alpha_mask) * np.ones_like(raw_img[..., :3]) * 255
+
+ raw_img = raw_img.astype(
+ np.uint8) # otherwise, float64 won't call ToTensor()
+
+ # return raw_img
+ # st()
+
+ if self.preprocess is None:
+ img_to_encoder = cv2.resize(raw_img,
+ (self.reso_encoder, self.reso_encoder),
+ interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+ img_to_encoder = img_to_encoder[
+ ..., :3] #[3, reso_encoder, reso_encoder]
+ img_to_encoder = self.normalize(img_to_encoder)
+ else:
+ img_to_encoder = self.preprocess(Image.open(rgb_fname)) # clip
+
+ # return img_to_encoder
+
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ # interpolation=cv2.INTER_AREA)
+
+ # img_sr = cv2.resize(raw_img, (512, 512), interpolation=cv2.INTER_AREA)
+ # img_sr = cv2.resize(raw_img, (256, 256), interpolation=cv2.INTER_AREA) # just as refinement, since eg3d uses 64->128 final resolution
+ # img_sr = cv2.resize(raw_img, (128, 128), interpolation=cv2.INTER_AREA) # just as refinement, since eg3d uses 64->128 final resolution
+
+ # img_sr = cv2.resize(
+ # raw_img, (128, 128), interpolation=cv2.INTER_LANCZOS4
+ # ) # just as refinement, since eg3d uses 64->128 final resolution
+
+ # img = torch.from_numpy(img)[..., :3].permute(
+ # 2, 0, 1) / 255.0 #[3, reso, reso]
+
+ img = torch.from_numpy(img)[..., :3].permute(
+ 2, 0, 1
+ ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ # img_sr = torch.from_numpy(img_sr)[..., :3].permute(
+ # 2, 0, 1
+ # ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ c2w = read_camera_matrix_single(pose_fname) #[1, 4, 4] -> [1, 16]
+ # c = np.concatenate([c2w, self.intrinsics], axis=0).reshape(25) # 25, no '1' dim needed.
+
+ # return c2w
+
+ # if self.load_depth:
+ # depth, depth_mask, depth_mask_sr = read_dnormal(self.depth_list[idx],
+ # try:
+ depth, normal = read_dnormal(self.depth_list[idx], c2w[:3, 3:],
+ self.reso, self.reso)
+
+ # ! frame0 alignment
+ # if self.frame_0_as_canonical:
+
+ # return depth
+ # except:
+ # # print(self.depth_list[idx])
+ # raise NotImplementedError(self.depth_list[idx])
+ # if depth
+
+ try:
+ bbox = self.load_bbox(depth > 0)
+ except:
+ print(rgb_fname, flush=True)
+ with open('error_log.txt', 'a') as f:
+ f.write(str(rgb_fname + '\n'))
+ bbox = self.load_bbox(torch.ones_like(depth))
+
+ # plucker
+
+ # ! normalize camera
+
+ c = np.concatenate([c2w.reshape(16), self.intrinsics],
+ axis=0).reshape(25).astype(
+ np.float32) # 25, no '1' dim needed.
+
+ if self.frame_0_as_canonical: # 4 views as input per batch
+ frame0_pose_name = self.frame0_pose_list[idx]
+ c2w_frame0 = read_camera_matrix_single(
+ frame0_pose_name) #[1, 4, 4] -> [1, 16]
+ c = self.normalize_camera(c[None], c2w_frame0[None])[0]
+ c2w = c[:16].reshape(4, 4) # !
+ # st()
+ # pass
+
+ rays_o, rays_d = self.gen_rays(c2w)
+ rays_plucker = torch.cat([torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ dim=-1) # [h, w, 6]
+
+ img_to_encoder = torch.cat(
+ [img_to_encoder, rays_plucker.permute(2, 0, 1)],
+ 0).float() # concat in C dim
+
+ # ! add depth as input
+
+ depth, normal = read_dnormal(self.depth_list[idx], c2w[:3, 3:],
+ self.reso_encoder, self.reso_encoder)
+ normalized_depth = depth.unsqueeze(0) # min=0
+ img_to_encoder = torch.cat([img_to_encoder, normalized_depth],
+ 0) # concat in C dim
+
+ if self.gs_cam_format:
+ c = self.c_to_3dgs_format(c)
+ else:
+ c = torch.from_numpy(c)
+
+ ret_dict = {
+ # 'rgb_fname': rgb_fname,
+ 'img_to_encoder': img_to_encoder,
+ 'img': img,
+ 'c': c,
+ # 'img_sr': img_sr,
+ # 'ins_name': self.data_ins_list[idx]
+ }
+
+ # ins = str(
+ # (Path(self.data_ins_list[idx]).relative_to(self.file_path)).parent)
+
+ pcd_ins = Path(self.data_ins_list[idx]).relative_to(
+ Path(self.file_path).parent).parent
+ # load pcd
+ # fps_pcd = pcu.load_mesh_v(
+ # str(self.pcd_path / pcd_ins / 'fps-10000.ply'))
+
+ ins = str( # for compat
+ (Path(self.data_ins_list[idx]).relative_to(self.file_path)).parent)
+ # if self.shuffle_across_cls:
+ caption = self.caption_data['/'.join(ins.split('/')[1:])]
+ # else:
+ # caption = self.caption_data[ins]
+
+ ret_dict.update({
+ 'depth': depth,
+ 'normal': normal,
+ 'alpha_mask': alpha_mask,
+ 'depth_mask': depth > 0,
+ # 'depth_mask_sr': depth_mask_sr,
+ 'bbox': bbox,
+ 'caption': caption,
+ 'rays_plucker': rays_plucker, # cam embedding used in lgm
+ 'ins': ins, # placeholder
+ # 'fps_pcd': fps_pcd,
+ })
+
+ return ret_dict
+
+
+# class MultiViewObjverseDatasetChunk(MultiViewObjverseDataset):
+
+# def __init__(self,
+# file_path,
+# reso,
+# reso_encoder,
+# preprocess=None,
+# classes=False,
+# load_depth=False,
+# test=False,
+# scene_scale=1,
+# overfitting=False,
+# imgnet_normalize=True,
+# dataset_size=-1,
+# overfitting_bs=-1,
+# interval=1,
+# plucker_embedding=False,
+# shuffle_across_cls=False,
+# wds_split=1,
+# four_view_for_latent=False,
+# single_view_for_i23d=False,
+# load_extra_36_view=False,
+# gs_cam_format=False,
+# **kwargs):
+# super().__init__(file_path, reso, reso_encoder, preprocess, classes,
+# load_depth, test, scene_scale, overfitting,
+# imgnet_normalize, dataset_size, overfitting_bs,
+# interval, plucker_embedding, shuffle_across_cls,
+# wds_split, four_view_for_latent, single_view_for_i23d,
+# load_extra_36_view, gs_cam_format, **kwargs)
+# # load 40 views at a time, for inferring latents.
+
+
+# TODO merge all the useful APIs together
+class ChunkObjaverseDataset(Dataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ wds_split_all=1,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ load_raw=False,
+ load_instance_only=False,
+ mv_latent_dir='',
+ perturb_pcd_scale=0.0,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__()
+
+ # st()
+ self.mv_latent_dir = mv_latent_dir
+
+ self.load_raw = load_raw
+ self.load_instance_only = load_instance_only
+ self.read_normal = read_normal
+ self.file_path = file_path
+ self.chunk_size = split_chunk_size
+ self.gs_cam_format = gs_cam_format
+ self.frame_0_as_canonical = frame_0_as_canonical
+ self.four_view_for_latent = four_view_for_latent # export 0 12 30 36, 4 views for reconstruction
+ self.overfitting = overfitting
+ self.scene_scale = scene_scale
+ self.reso = reso
+ self.reso_encoder = reso_encoder
+ self.classes = False
+ self.load_depth = load_depth
+ self.preprocess = preprocess
+ self.plucker_embedding = plucker_embedding
+ self.intrinsics = get_intri(h=self.reso, w=self.reso,
+ normalize=True).reshape(9)
+ self.perturb_pcd_scale = perturb_pcd_scale
+
+ assert not self.classes, "Not support class condition now."
+
+ dataset_name = Path(self.file_path).stem.split('_')[0]
+ self.dataset_name = dataset_name
+ self.ray_sampler = RaySampler()
+
+ self.zfar = 100.0
+ self.znear = 0.01
+
+ # ! load all chunk paths
+ self.chunk_list = []
+
+ # if dataset_size != -1: # predefined instance
+ # self.chunk_list = self.fetch_chunk_list(os.path.join(self.file_path, 'debug'))
+ # else:
+ # # for shard_idx in range(1, 5): # shard_dir 1-4 by default
+ # for shard_idx in os.listdir(self.file_path):
+ # self.chunk_list += self.fetch_chunk_list(os.path.join(self.file_path, shard_idx))
+
+ def load_single_cls_instances(file_path):
+ ins_list = [] # the first 1 instance for evaluation reference.
+
+ for dict_dir in os.listdir(file_path)[:]: # ! for debugging
+ for ins_dir in os.listdir(os.path.join(file_path, dict_dir)):
+ ins_list.append(
+ os.path.join(file_path, dict_dir, ins_dir,
+ 'campos_512_v4'))
+ return ins_list
+
+ # st()
+
+ if self.load_raw:
+
+ with open(
+ # '/nas/shared/V2V/yslan/aigc3d/text_captions_cap3d.json') as f:
+ # '/nas/shared/public/yslan//data/text_captions_cap3d.json') as f:
+ './dataset/text_captions_3dtopia.json') as f:
+ self.caption_data = json.load(f)
+
+ # with open
+ # # '/nas/shared/V2V/yslan/aigc3d/text_captions_cap3d.json') as f:
+ # '/nas/shared/public/yslan//data/text_captions_cap3d.json') as f:
+ # # '/cpfs01/shared/public/yhluo/Projects/threed/3D-Enhancer/develop/text_captions_3dtopia.json') as f:
+ # self.old_caption_data = json.load(f)
+
+ for subset in [ # ! around 17.6 W instances in total.
+ 'Animals',
+ # 'daily-used',
+ # 'BuildingsOutdoor',
+ # 'Furnitures',
+ # 'Food',
+ # 'Plants',
+ # 'Electronics',
+ # 'Transportations_tar',
+ # 'Human-Shape',
+ ]: # selected subset for training
+ # self.chunk_list += load_single_cls_instances(
+ # os.path.join(self.file_path, subset))
+ with open(f'shell_scripts/raw_img_list/{subset}.txt', 'r') as f:
+ self.chunk_list += [os.path.join(subset, item.strip()) for item in f.readlines()]
+
+ # st() # save to local
+ # with open('/cpfs01/user/lanyushi.p/Repo/diffusion-3d/shell_scripts/shards_list/chunk_list.txt', 'w') as f:
+ # f.writelines(self.chunk_list)
+ # load raw g-objv dataset
+ # self.img_ext = 'png' # ln3diff
+ # for k, v in dataset_json.items(): # directly load from folders instead
+ # self.chunk_list.extend(v)
+ else:
+
+ # ! direclty load from json
+ with open(f'{self.file_path}/dataset.json', 'r') as f:
+ dataset_json = json.load(f)
+ # dataset_json = {'Animals': ['Animals/0/10017/1']}
+
+ if self.chunk_size == 12:
+ self.img_ext = 'png' # ln3diff
+ for k, v in dataset_json.items():
+ self.chunk_list.extend(v)
+ else:
+ # extract latent
+ assert self.chunk_size in [16,18, 20]
+ self.img_ext = 'jpg' # more views
+ for k, v in dataset_json.items():
+ # if k != 'BuildingsOutdoor': # cannot be handled by gs
+ self.chunk_list.extend(v)
+
+ # filter
+ # st()
+ # root = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/final/objav/vae/gs/infer-latents/768/8x8/animals-gs-latent/latent_dir'
+ # root = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/final/objav/vae/gs/infer-latents/768/8x8/animals-gs-latent-dim=10-fullset/latent_dir'
+ # filtered_chunk_list = []
+ # for v in self.chunk_list:
+ # if os.path.exists(os.path.join(root, v[:-2], 'gaussians.npy') ):
+ # continue
+ # filtered_chunk_list.append(v)
+ # self.chunk_list = filtered_chunk_list
+
+ dataset_size = len(self.chunk_list)
+ self.chunk_list = sorted(self.chunk_list)
+
+ # self.chunk_list, self.eval_list = self.chunk_list[:int(dataset_size*0.95)], self.chunk_list[int(dataset_size*0.95):]
+ # self.chunk_list = self.eval_list
+
+ # self.wds_split_all = wds_split_all # for
+ # self.wds_split_all = 1
+ # self.wds_split_all = 7
+ # self.wds_split_all = 4
+ self.wds_split_all = 1
+
+
+ # ! filter
+ # st()
+
+ if wds_split_all != 1:
+ # ! retrieve the right wds split
+ all_ins_size = len(self.chunk_list)
+ ratio_size = all_ins_size // self.wds_split_all + 1
+ # ratio_size = int(all_ins_size / self.wds_split_all) + 1
+ print('ratio_size: ', ratio_size, 'all_ins_size: ', all_ins_size)
+
+ self.chunk_list = self.chunk_list[ratio_size *
+ (wds_split):ratio_size *
+ (wds_split + 1)]
+
+ # st()
+
+ # load images from raw
+ self.rgb_list = []
+
+ if self.load_instance_only:
+ for ins in tqdm(self.chunk_list):
+
+ ins_name = str(Path(ins).parent)
+ # cur_all_fname = [f'{t:05d}' for t in range(40)] # load all instances for now
+
+ self.rgb_list += ([
+ os.path.join(self.file_path, ins, fname + '.png')
+ for fname in [f'{t}' for t in range(2)]
+ # for fname in [f'{t:05d}' for t in range(2)]
+ ]) # synthetic mv data
+
+ # index mapping of mvi data to objv single-view data
+ self.mvi_objv_mapping = {
+ '0': '00000',
+ '1': '00012',
+ }
+
+ # load gt mv data
+
+ self.gt_chunk_list = []
+ self.gt_mv_file_path = '/cpfs01/user/lanyushi.p/data/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ assert self.chunk_size in [16,18, 20]
+
+ with open(f'{self.gt_mv_file_path}/dataset.json', 'r') as f:
+ dataset_json = json.load(f)
+ # dataset_json = {'Animals': dataset_json['Animals'] } #
+
+ self.img_ext = 'jpg' # more views
+ for k, v in dataset_json.items():
+ # if k != 'BuildingsOutdoor': # cannot be handled by gs
+ self.gt_chunk_list.extend(v)
+
+
+ elif self.load_raw:
+ for ins in tqdm(self.chunk_list):
+ #
+ # st()
+ # ins = ins[len('/nas/shared/V2V/yslan/aigc3d/unzip4/'):]
+ # ins_name = str(Path(ins).relative_to(self.file_path).parent)
+ ins_name = str(Path(ins).parent)
+ # latent_path = os.path.join(self.mv_latent_dir, ins_name, 'latent.npz')
+ # if not os.path.exists(latent_path):
+ # continue
+
+ cur_all_fname = [f'{t:05d}' for t in range(40)] # load all instances for now
+
+ self.rgb_list += ([
+ os.path.join(self.file_path, ins, fname, fname + '.png')
+ for fname in cur_all_fname
+ ])
+
+ self.post_process = PostProcess(
+ reso,
+ reso_encoder,
+ imgnet_normalize=imgnet_normalize,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=False,
+ mv_input=mv_input,
+ split_chunk_input=split_chunk_size,
+ duplicate_sample=True,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ gs_cam_format=gs_cam_format,
+ orthog_duplicate=False,
+ frame_0_as_canonical=frame_0_as_canonical,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ split_chunk_size=split_chunk_size,
+ )
+ self.kernel = torch.tensor([[0, 1, 0], [1, 1, 1], [0, 1, 0]])
+
+ # self.no_bottom = True # avoid loading bottom vew
+
+ def fetch_chunk_list(self, file_path):
+ if os.path.isdir(file_path):
+ chunks = [
+ os.path.join(file_path, fname)
+ for fname in os.listdir(file_path) if fname.isdigit()
+ ]
+ return chunks
+ else:
+ return []
+
+ def _pre_process_chunk(self):
+ # e.g., remove bottom view
+ pass
+
+ def read_chunk(self, chunk_path):
+ # equivalent to decode_zip() in wds
+
+ # reshape chunk
+ raw_img = imageio.imread(
+ os.path.join(chunk_path, f'raw_img.{self.img_ext}'))
+ h, bw, c = raw_img.shape
+ raw_img = raw_img.reshape(h, self.chunk_size, -1, c).transpose(
+ (1, 0, 2, 3))
+ c = np.load(os.path.join(chunk_path, 'c.npy'))
+
+ with open(os.path.join(chunk_path, 'caption.txt'),
+ 'r',
+ encoding="utf-8") as f:
+ caption = f.read()
+
+ with open(os.path.join(chunk_path, 'ins.txt'), 'r',
+ encoding="utf-8") as f:
+ ins = f.read()
+
+ bbox = np.load(os.path.join(chunk_path, 'bbox.npy'))
+
+ if self.chunk_size > 16:
+
+ depth_alpha = imageio.imread(
+ os.path.join(chunk_path, 'depth_alpha.jpg')) # 2h 10w
+ depth_alpha = depth_alpha.reshape(h * 2, self.chunk_size,
+ -1).transpose((1, 0, 2))
+
+ depth, alpha = np.split(depth_alpha, 2, axis=1)
+
+ d_near_far = np.load(os.path.join(chunk_path, 'd_near_far.npy'))
+
+ d_near = d_near_far[0].reshape(self.chunk_size, 1, 1)
+ d_far = d_near_far[1].reshape(self.chunk_size, 1, 1)
+ # d = 1 / ( (d_normalized / 255) * (far-near) + near)
+ depth = 1 / ((depth / 255) * (d_far - d_near) + d_near)
+
+ depth[depth > 2.9] = 0.0 # background as 0, follow old tradition
+
+ # ! filter anti-alias artifacts
+
+ erode_mask = kornia.morphology.erosion(
+ torch.from_numpy(alpha == 255).float().unsqueeze(1),
+ self.kernel) # B 1 H W
+ depth = (torch.from_numpy(depth).unsqueeze(1) * erode_mask).squeeze(
+ 1) # shrink anti-alias bug
+
+ else:
+ # load separate alpha and depth map
+
+ alpha = imageio.imread(
+ os.path.join(chunk_path, f'alpha.{self.img_ext}'))
+ alpha = alpha.reshape(h, self.chunk_size, h).transpose(
+ (1, 0, 2))
+ depth = np.load(os.path.join(chunk_path, 'depth.npz'))['depth']
+ # depth = depth * (alpha==255) # mask out background
+
+
+
+ # depth = np.stack([depth, alpha], -1) # rgba
+
+ # if self.no_bottom:
+ # raw_img
+ # pass
+
+ if self.read_normal:
+ normal = imageio.imread(os.path.join(
+ chunk_path, 'normal.png')).astype(np.float32) / 255.0
+
+ normal = (normal * 2 - 1).reshape(h, self.chunk_size, -1,
+ 3).transpose((1, 0, 2, 3))
+ # fix g-buffer normal rendering coordinate
+ # normal = unity2blender(normal) # ! still wrong
+ normal = unity2blender_fix(normal) # !
+ depth = (depth, normal) # ?
+
+ return raw_img, depth, c, alpha, bbox, caption, ins
+
+ def __len__(self):
+ return len(self.chunk_list)
+
+ def __getitem__(self, index) -> Any:
+ sample = self.read_chunk(
+ os.path.join(self.file_path, self.chunk_list[index]))
+ sample = self.post_process.paired_post_process_chunk(sample)
+
+ sample = self.post_process.create_dict_nobatch(sample)
+
+ # aug pcd
+ # st()
+ if self.perturb_pcd_scale > 0:
+ if random.random() > 0.5:
+ t = np.random.rand(sample['fps_pcd'].shape[0], 1, 1) * self.perturb_pcd_scale
+ sample['fps_pcd'] = sample['fps_pcd'] + t * np.random.randn(*sample['fps_pcd'].shape) # type: ignore
+ sample['fps_pcd'] = np.clip(sample['fps_pcd'], -0.45, 0.45) # truncate noisy augmentation
+
+ return sample
+
+
+class ChunkObjaverseDatasetDDPM(ChunkObjaverseDataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ load_raw=False,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=split_chunk_size,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ load_raw=load_raw,
+ mv_latent_dir=mv_latent_dir,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ self.n_cond_frames = 6
+ self.perspective_transformer = v2.RandomPerspective(distortion_scale=0.4, p=0.15, fill=1,
+ interpolation=torchvision.transforms.InterpolationMode.NEAREST)
+ self.mv_resize_cls = torchvision.transforms.Resize(320, interpolation=torchvision.transforms.InterpolationMode.BILINEAR,
+ max_size=None, antialias=True)
+
+ # ! read img c, caption.
+
+ def get_plucker_ray(self, c):
+ rays_plucker = []
+ for idx in range(c.shape[0]):
+ rays_o, rays_d = self.gen_rays(c[idx])
+ rays_plucker.append(
+ torch.cat([torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ dim=-1).permute(2, 0, 1)) # [h, w, 6] -> 6,h,w
+ rays_plucker = torch.stack(rays_plucker, 0)
+ return rays_plucker
+
+ def read_chunk(self, chunk_path):
+ # equivalent to decode_zip() in wds
+
+ # reshape chunk
+ raw_img = imageio.imread(
+ os.path.join(chunk_path, f'raw_img.{self.img_ext}')).astype(np.float32)
+ h, bw, c = raw_img.shape
+ raw_img = raw_img.reshape(h, self.chunk_size, -1, c).transpose(
+ (1, 0, 2, 3))
+
+ c = np.load(os.path.join(chunk_path, 'c.npy')).astype(np.float32)
+
+ with open(os.path.join(chunk_path, 'caption.txt'),
+ 'r',
+ encoding="utf-8") as f:
+ caption = f.read()
+
+ with open(os.path.join(chunk_path, 'ins.txt'), 'r',
+ encoding="utf-8") as f:
+ ins = f.read()
+
+ return raw_img, c, caption, ins
+
+ def _load_latent(self, ins):
+ # if 'adv' in self.mv_latent_dir: # new latent codes saved have 3 augmentations
+ # idx = random.choice([0,1,2])
+ # latent = np.load(os.path.join(self.mv_latent_dir, ins, f'latent-{idx}.npy')) # pre-calculated VAE latent
+ # else:
+ latent = np.load(os.path.join(self.mv_latent_dir, ins, 'latent.npy')) # pre-calculated VAE latent
+ latent = repeat(latent, 'C H W -> B C H W', B=2)
+ # return {'latent': latent}
+ return latent
+
+ def normalize_camera(self, c, c_frame0):
+ # assert c.shape[0] == self.chunk_size # 8 o r10
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+ canonical_camera_poses = c_frame0[:, :16].reshape(1, 4, 4)
+ inverse_canonical_pose = np.linalg.inv(canonical_camera_poses)
+ inverse_canonical_pose = np.repeat(inverse_canonical_pose, B, 0)
+
+ cam_radius = np.linalg.norm(
+ c_frame0[:, :16].reshape(1, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 1, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ inverse_canonical_pose
+
+ new_camera_poses = np.repeat(
+ transform, 1, axis=0
+ ) @ camera_poses # [V, 4, 4]. np.repeat() is th.repeat_interleave()
+
+ c = np.concatenate([new_camera_poses.reshape(B, 16), c[:, 16:]],
+ axis=-1)
+
+ return c
+
+ # @autocast
+ # def plucker_embedding(self, c):
+ # rays_o, rays_d = self.gen_rays(c)
+ # rays_plucker = torch.cat(
+ # [torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ # dim=-1).permute(2, 0, 1) # [h, w, 6] -> 6,h,w
+
+ # return rays_plucker
+
+ def __getitem__(self, index) -> Any:
+ raw_img, c, caption, ins = self.read_chunk(
+ os.path.join(self.file_path, self.chunk_list[index]))
+ # sample = self.post_process.paired_post_process_chunk(sample)
+
+ # ! random zoom in (scale augmentation)
+ # for i in range(img.shape[0]):
+ # for v in range(img.shape[1]):
+ # if random.random() > 0.8:
+ # rand_bg_scale = random.randint(60,99) / 100
+ # st()
+ # img[i,v] = recenter(img[i,v], np.ones_like(img[i,v]), border_ratio=rand_bg_scale)
+
+ # ! process
+ raw_img = torch.from_numpy(raw_img).permute(0, 3, 1, 2) / 255.0 # [0,1]
+
+ if raw_img.shape[-1] != self.reso:
+ raw_img = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ )
+ img = raw_img * 2 - 1 # as gt
+
+ # ! load latent
+ latent, _ = self._load_latent(ins)
+
+ # ! shuffle
+ indices = np.random.permutation(self.chunk_size)
+ img = img[indices]
+ c = c[indices]
+
+ img = self.perspective_transformer(img) # create 3D inconsistency
+
+ # ! split along V and repeat other stuffs accordingly
+ img = rearrange(img, '(B V) ... -> B V ...', B=2)[:, :self.n_cond_frames]
+ c = rearrange(c, '(B V) ... -> B V ...', B=2)[:, :self.n_cond_frames] # 2 6 25
+
+
+ # rand perspective aug
+ caption = [caption, caption]
+ ins = [ins, ins]
+
+ # load plucker coord
+ # st()
+ # plucker_c = self.get_plucker_ray(rearrange(c[:, 1:1+self.n_cond_frames], "b t ... -> (b t) ..."))
+ # plucker_c = rearrange(c, '(B V) ... -> B V ...', B=2) # 2 6 25
+
+ # use view-space camera tradition
+ c[0] = self.normalize_camera(c[0], c[0,0:1])
+ c[1] = self.normalize_camera(c[1], c[1,0:1])
+
+ # https://github.com/TencentARC/InstantMesh/blob/7fe95627cf819748f7830b2b278f302a9d798d17/src/model.py#L70
+ # c = np.concatenate([c[..., :12], c[..., 16:17], c[..., 20:21], c[..., 18:19], c[..., 21:22]], axis=-1)
+ # c = c + np.random.randn(*c.shape) * 0.04 - 0.02
+
+
+ # ! to dict
+ # sample = self.post_process.create_dict_nobatch(sample)
+ ret_dict = {
+ 'caption': caption,
+ 'ins': ins,
+ 'c': c,
+ 'img': img, # fix inp img range to [-1,1]
+ 'latent': latent,
+ # **latent
+ }
+
+ # st()
+
+ return ret_dict
+
+
+class ChunkObjaverseDatasetDDPMgs(ChunkObjaverseDatasetDDPM):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ load_raw=False,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ classes=classes,
+ load_depth=load_depth,
+ test=test,
+ scene_scale=scene_scale,
+ overfitting=overfitting,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ overfitting_bs=overfitting_bs,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split, # 4 splits to accelerate preprocessing
+ four_view_for_latent=four_view_for_latent,
+ single_view_for_i23d=single_view_for_i23d,
+ load_extra_36_view=load_extra_36_view,
+ gs_cam_format=gs_cam_format,
+ frame_0_as_canonical=frame_0_as_canonical,
+ split_chunk_size=split_chunk_size,
+ mv_input=mv_input,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ read_normal=read_normal,
+ mv_latent_dir=mv_latent_dir,
+ load_raw=load_raw,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ self.avoid_loading_first = False
+
+ # self.feat_scale_factor = torch.Tensor([0.99227685, 1.014337 , 0.20842505, 0.98727155, 0.3305389 ,
+ # 0.38729668, 1.0155401 , 0.9728264 , 1.0009694 , 0.97328585,
+ # 0.2881106 , 0.1652732 , 0.3482468 , 0.9971449 , 0.99895126,
+ # 0.18491288]).float().reshape(1,1,-1)
+
+ # stat for normalization
+ # self.xyz_mean = torch.Tensor([-0.00053714, 0.08095618, -0.01914407] ).reshape(1, 3).float()
+ # self.xyz_std = np.array([0.14593576, 0.15753542, 0.18873914] ).reshape(1,3).astype(np.float32)
+
+ # self.xyz_std = np.array([0.14593576, 0.15753542, 0.18873914] ).reshape(1,3).astype(np.float32)
+ self.xyz_std = 0.164 # a global scaler
+
+ self.kl_mean = np.array([ 0.0184, 0.0024, 0.0926, 0.0517, 0.1781, 0.7137, -0.0355, 0.0267,
+ 0.0183, 0.0164, -0.5090, 0.2406, 0.2733, -0.0256, -0.0285, 0.0761]).reshape(1,16).astype(np.float32)
+
+ self.kl_std = np.array([1.0018, 1.0309, 1.3001, 1.0160, 0.8182, 0.8023, 1.0591, 0.9789, 0.9966,
+ 0.9448, 0.8908, 1.4595, 0.7957, 0.9871, 1.0236, 1.2923]).reshape(1,16).astype(np.float32)
+
+
+ def normalize_pcd_act(self, x):
+ return x / self.xyz_std
+
+ def normalize_kl_feat(self, latent):
+ # return latent / self.feat_scale_factor
+ return (latent-self.kl_mean) / self.kl_std
+
+ def _load_latent(self, ins, rand_pick_one=False, pick_both=False):
+
+ if 'adv' in self.mv_latent_dir: # new latent codes saved have 3 augmentations
+ idx = random.choice([0,1,2])
+ # idx = random.choice([0])
+ latent = np.load(os.path.join(self.mv_latent_dir, ins, f'latent-{idx}.npz')) # pre-calculated VAE latent
+ else:
+ latent = np.load(os.path.join(self.mv_latent_dir, ins, 'latent.npz')) # pre-calculated VAE latent
+
+ latent, fps_xyz = latent['latent_normalized'], latent['query_pcd_xyz'] # 2,768,16; 2,768,3
+
+ if not pick_both:
+ if rand_pick_one:
+ rand_idx = random.randint(0,1)
+ else:
+ rand_idx = 0
+
+ latent, fps_xyz = latent[rand_idx:rand_idx+1], fps_xyz[rand_idx:rand_idx+1]
+
+ # per-channel normalize to std=1 & concat
+ # latent_pcd = np.concatenate([self.normalize_kl_feat(latent), self.normalize_pcd_act(fps_xyz)], -1)
+ # latent_pcd = np.concatenate([latent, self.normalize_pcd_act(fps_xyz)], -1)
+
+ # return latent_pcd, fps_xyz
+ return latent, fps_xyz
+
+
+ def __getitem__(self, index) -> Any:
+ raw_img, c, caption, ins = self.read_chunk(
+ os.path.join(self.file_path, self.chunk_list[index]))
+ # sample = self.post_process.paired_post_process_chunk(sample)
+
+ # ! random zoom in (scale augmentation)
+ # for i in range(img.shape[0]):
+ # for v in range(img.shape[1]):
+ # if random.random() > 0.8:
+ # rand_bg_scale = random.randint(60,99) / 100
+ # st()
+ # img[i,v] = recenter(img[i,v], np.ones_like(img[i,v]), border_ratio=rand_bg_scale)
+
+ # ! process
+ raw_img = torch.from_numpy(raw_img).permute(0, 3, 1, 2) / 255.0 # [0,1]
+
+ if raw_img.shape[-1] != self.reso:
+ raw_img = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ )
+ img = raw_img * 2 - 1 # as gt
+
+ # ! load latent
+ # latent, _ = self._load_latent(ins)
+
+ latent, fps_xyz = self._load_latent(ins, pick_both=True) # analyzing xyz/latent disentangled diffusion
+ # latent, fps_xyz = latent[0], fps_xyz[0] # remove batch dim here
+
+ # fps_xyz = fps_xyz / self.scaling_factor # for xyz training
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz)
+
+ if self.avoid_loading_first: # for training mv model
+ index = list(range(1,6)) + list(range(7,12))
+ img = img[index]
+ c = c[index]
+
+ # ! shuffle
+ indices = np.random.permutation(img.shape[0])
+ img = img[indices]
+ c = c[indices]
+
+ img = self.perspective_transformer(img) # create 3D inconsistency
+
+
+ # ! split along V and repeat other stuffs accordingly
+ img = rearrange(img, '(B V) ... -> B V ...', B=2)[:, :self.n_cond_frames]
+ c = rearrange(c, '(B V) ... -> B V ...', B=2)[:, :self.n_cond_frames] # 2 6 25
+
+ # rand perspective aug
+ caption = [caption, caption]
+ ins = [ins, ins]
+
+ # load plucker coord
+ # st()
+ # plucker_c = self.get_plucker_ray(rearrange(c[:, 1:1+self.n_cond_frames], "b t ... -> (b t) ..."))
+ # plucker_c = rearrange(c, '(B V) ... -> B V ...', B=2) # 2 6 25
+
+ # use view-space camera tradition
+ c[0] = self.normalize_camera(c[0], c[0,0:1])
+ c[1] = self.normalize_camera(c[1], c[1,0:1])
+
+ # ! to dict
+ # sample = self.post_process.create_dict_nobatch(sample)
+ ret_dict = {
+ 'caption': caption,
+ 'ins': ins,
+ 'c': c,
+ 'img': img, # fix inp img range to [-1,1]
+ 'latent': latent,
+ 'normalized-fps-xyz': normalized_fps_xyz
+ # **latent
+ }
+
+ # st()
+
+ return ret_dict
+
+
+
+
+class ChunkObjaverseDatasetDDPMgsT23D(ChunkObjaverseDatasetDDPMgs):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ classes=classes,
+ load_depth=load_depth,
+ test=test,
+ scene_scale=scene_scale,
+ overfitting=overfitting,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ overfitting_bs=overfitting_bs,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split, # 4 splits to accelerate preprocessing
+ four_view_for_latent=four_view_for_latent,
+ single_view_for_i23d=single_view_for_i23d,
+ load_extra_36_view=load_extra_36_view,
+ gs_cam_format=gs_cam_format,
+ frame_0_as_canonical=frame_0_as_canonical,
+ split_chunk_size=split_chunk_size,
+ mv_input=mv_input,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ read_normal=read_normal,
+ mv_latent_dir=mv_latent_dir,
+ load_raw=True,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ # def __len__(self):
+ # return 40
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def __getitem__(self, index) -> Any:
+
+
+ rgb_path = self.rgb_list[index]
+ ins = str(Path(rgb_path).relative_to(self.file_path).parent.parent.parent)
+
+ # load caption
+ caption = self.caption_data['/'.join(ins.split('/')[1:])]
+
+ # chunk_path = os.path.join(self.file_path, self.chunk_list[index])
+
+ # # load caption
+ # with open(os.path.join(chunk_path, 'caption.txt'),
+ # 'r',
+ # encoding="utf-8") as f:
+ # caption = f.read()
+
+ # # load latent
+ # with open(os.path.join(chunk_path, 'ins.txt'), 'r',
+ # encoding="utf-8") as f:
+ # ins = f.read()
+
+ latent, fps_xyz = self._load_latent(ins, True) # analyzing xyz/latent disentangled diffusion
+ latent, fps_xyz = latent[0], fps_xyz[0] # remove batch dim here
+
+ # fps_xyz = fps_xyz / self.scaling_factor # for xyz training
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz)
+
+
+ # ! to dict
+ ret_dict = {
+ # 'caption': caption,
+ 'latent': latent,
+ # 'img': img,
+ 'fps-xyz': fps_xyz,
+ 'normalized-fps-xyz': normalized_fps_xyz,
+ 'caption': caption
+ }
+
+ return ret_dict
+
+
+class ChunkObjaverseDatasetDDPMgsI23D(ChunkObjaverseDatasetDDPMgs):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ classes=classes,
+ load_depth=load_depth,
+ test=test,
+ scene_scale=scene_scale,
+ overfitting=overfitting,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ overfitting_bs=overfitting_bs,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split, # 4 splits to accelerate preprocessing
+ four_view_for_latent=four_view_for_latent,
+ single_view_for_i23d=single_view_for_i23d,
+ load_extra_36_view=load_extra_36_view,
+ gs_cam_format=gs_cam_format,
+ frame_0_as_canonical=frame_0_as_canonical,
+ split_chunk_size=split_chunk_size,
+ mv_input=mv_input,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ read_normal=read_normal,
+ mv_latent_dir=mv_latent_dir,
+ load_raw=True,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ assert self.load_raw
+ self.scaling_factor = np.array([0.14593576, 0.15753542, 0.18873914])
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ # def __len__(self):
+ # return 40
+
+ def __getitem__(self, index) -> Any:
+
+ rgb_path = self.rgb_list[index]
+ ins = str(Path(rgb_path).relative_to(self.file_path).parent.parent.parent)
+
+ raw_img = imageio.imread(rgb_path).astype(np.float32)
+ alpha_mask = raw_img[..., -1:] / 255
+ raw_img = alpha_mask * raw_img[..., :3] + (
+ 1 - alpha_mask) * np.ones_like(raw_img[..., :3]) * 255
+
+ raw_img = cv2.resize(raw_img, (self.reso, self.reso), interpolation=cv2.INTER_CUBIC)
+ raw_img = torch.from_numpy(raw_img).permute(2,0,1).clip(0,255) # [0,1]
+ img = raw_img / 127.5 - 1
+
+ # with open(os.path.join(chunk_path, 'caption.txt'),
+ # 'r',
+ # encoding="utf-8") as f:
+ # caption = f.read()
+
+ # latent = self._load_latent(ins, True)[0]
+ latent, fps_xyz = self._load_latent(ins, True) # analyzing xyz/latent disentangled diffusion
+ latent, fps_xyz = latent[0], fps_xyz[0]
+
+ # fps_xyz = fps_xyz / self.scaling_factor # for xyz training
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz)
+
+ # load caption
+ caption = self.caption_data['/'.join(ins.split('/')[1:])]
+
+ # ! to dict
+ ret_dict = {
+ # 'caption': caption,
+ 'latent': latent,
+ 'img': img.numpy(), # no idea whether loading Tensor leads to 'too many files opened'
+ 'fps-xyz': fps_xyz,
+ 'normalized-fps-xyz': normalized_fps_xyz,
+ 'caption': caption
+ }
+
+ return ret_dict
+
+class ChunkObjaverseDatasetDDPMgsMV23D(ChunkObjaverseDatasetDDPMgs):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ classes=classes,
+ load_depth=load_depth,
+ test=test,
+ scene_scale=scene_scale,
+ overfitting=overfitting,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ overfitting_bs=overfitting_bs,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split, # 4 splits to accelerate preprocessing
+ four_view_for_latent=four_view_for_latent,
+ single_view_for_i23d=single_view_for_i23d,
+ load_extra_36_view=load_extra_36_view,
+ gs_cam_format=gs_cam_format,
+ frame_0_as_canonical=frame_0_as_canonical,
+ split_chunk_size=split_chunk_size,
+ mv_input=mv_input,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ read_normal=read_normal,
+ mv_latent_dir=mv_latent_dir,
+ load_raw=False,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ assert not self.load_raw
+ # self.scaling_factor = np.array([0.14593576, 0.15753542, 0.18873914])
+
+ self.n_cond_frames = 4 # a easy version for now.
+ self.avoid_loading_first = True
+
+ def __getitem__(self, index) -> Any:
+ raw_img, c, caption, ins = self.read_chunk(
+ os.path.join(self.file_path, self.chunk_list[index]))
+
+ # ! process
+ raw_img = torch.from_numpy(raw_img).permute(0, 3, 1, 2) / 255.0 # [0,1]
+
+ if raw_img.shape[-1] != self.reso:
+ raw_img = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ )
+ img = raw_img * 2 - 1 # as gt
+
+ # ! load latent
+ # latent, _ = self._load_latent(ins)
+
+ latent, fps_xyz = self._load_latent(ins, pick_both=True) # analyzing xyz/latent disentangled diffusion
+ # latent, fps_xyz = latent[0], fps_xyz[0] # remove batch dim here
+
+ # fps_xyz = fps_xyz / self.scaling_factor # for xyz training
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz)
+
+ if self.avoid_loading_first: # for training mv model
+ index = list(range(1,self.chunk_size//2)) + list(range(self.chunk_size//2+1, self.chunk_size))
+ img = img[index]
+ c = c[index]
+
+ # ! shuffle
+ indices = np.random.permutation(img.shape[0])
+ img = img[indices]
+ c = c[indices]
+
+ aug_img = self.perspective_transformer(img) # create 3D inconsistency
+
+ # ! split along V and repeat other stuffs accordingly
+ img = rearrange(img, '(B V) ... -> B V ...', B=2)[:, 0:1] # only return first view (randomly sampled)
+
+ aug_img = rearrange(aug_img, '(B V) ... -> B V ...', B=2)[:, 1:self.n_cond_frames+1]
+ c = rearrange(c, '(B V) ... -> B V ...', B=2)[:, 1:self.n_cond_frames+1] # 2 6 25
+
+
+ # use view-space camera tradition
+ c[0] = self.normalize_camera(c[0], c[0,0:1])
+ c[1] = self.normalize_camera(c[1], c[1,0:1])
+
+ caption = [caption, caption]
+ ins = [ins, ins]
+
+ # ! to dict
+ # sample = self.post_process.create_dict_nobatch(sample)
+ ret_dict = {
+ 'caption': caption,
+ 'ins': ins,
+ 'c': c,
+ 'img': img, # fix inp img range to [-1,1]
+ 'mv_img': aug_img,
+ 'latent': latent,
+ 'normalized-fps-xyz': normalized_fps_xyz
+ # **latent
+ }
+
+ # st()
+
+ return ret_dict
+
+
+class ChunkObjaverseDatasetDDPMgsMV23DSynthetic(ChunkObjaverseDatasetDDPMgs):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ classes=classes,
+ load_depth=load_depth,
+ test=test,
+ scene_scale=scene_scale,
+ overfitting=overfitting,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ overfitting_bs=overfitting_bs,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split, # 4 splits to accelerate preprocessing
+ four_view_for_latent=four_view_for_latent,
+ single_view_for_i23d=single_view_for_i23d,
+ load_extra_36_view=load_extra_36_view,
+ gs_cam_format=gs_cam_format,
+ frame_0_as_canonical=frame_0_as_canonical,
+ split_chunk_size=split_chunk_size,
+ mv_input=mv_input,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ read_normal=read_normal,
+ mv_latent_dir=mv_latent_dir,
+ load_raw=True,
+ load_instance_only=True,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ # assert not self.load_raw
+ # self.scaling_factor = np.array([0.14593576, 0.15753542, 0.18873914])
+
+ self.n_cond_frames = 6 # a easy version for now.
+ self.avoid_loading_first = True
+ self.indices = np.array([0,1,2,3,4,5])
+ self.img_root_dir = '/cpfs01/user/lanyushi.p/data/unzip4_img'
+
+
+ azimuths = np.array([30, 90, 150, 210, 270, 330]).astype(float)
+ elevations = np.array([20, -10, 20, -10, 20, -10]).astype(float)
+
+ zero123pp_pose, _ = generate_input_camera(1.8, [[elevations[i], azimuths[i]] for i in range(6)], fov=30)
+ K = torch.Tensor([1.3889, 0.0000, 0.5000, 0.0000, 1.3889, 0.5000, 0.0000, 0.0000, 0.0039]).to(zero123pp_pose) # keeps the same
+ zero123pp_pose = torch.cat([zero123pp_pose.reshape(6,-1), K.unsqueeze(0).repeat(6,1)], dim=-1)
+
+
+ eval_camera = zero123pp_pose[self.indices].float().cpu().numpy() # for normalization
+ self.eval_camera = self.normalize_camera(eval_camera, eval_camera[0:1]) # the first img is not used.
+
+ # self.load_synthetic_only = False
+ self.load_synthetic_only = True
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def _getitem_synthetic(self, index) -> Any:
+
+ rgb_fname = Path(self.rgb_list[index])
+ # ins = self.mvi_objv_mapping(rgb_fname.parent.parent.stem)
+
+ # ins = str(Path(rgb_fname).parent.parent.stem)
+
+ ins = str((Path(rgb_fname).relative_to(self.file_path)).parent.parent)
+
+ mv_img = imageio.imread(rgb_fname)
+ # st()
+ mv_img = rearrange(mv_img, '(n h) (m w) c -> (n m) h w c', n=3, m=2)[self.indices] # (6, 3, 320, 320)
+ mv_img = np.stack([recenter(img, np.ones_like(img), border_ratio=0.1) for img in mv_img], axis=0)
+ mv_img = rearrange(mv_img, 'b h w c -> b c h w') # to torch tradition
+ mv_img = torch.from_numpy(mv_img) / 127.5 - 1
+
+ # ! load single-view image here
+ img_idx = self.mvi_objv_mapping[rgb_fname.stem]
+ img_path = os.path.join(self.img_root_dir, rgb_fname.parent.relative_to(self.file_path), img_idx, f'{img_idx}.png')
+
+ raw_img = imageio.imread(img_path).astype(np.float32)
+ alpha_mask = raw_img[..., -1:] / 255
+ raw_img = alpha_mask * raw_img[..., :3] + (
+ 1 - alpha_mask) * np.ones_like(raw_img[..., :3]) * 255
+
+ raw_img = cv2.resize(raw_img, (self.reso, self.reso), interpolation=cv2.INTER_CUBIC)
+ raw_img = torch.from_numpy(raw_img).permute(2,0,1).clip(0,255) # [0,1]
+ img = raw_img / 127.5 - 1
+
+ latent, fps_xyz = self._load_latent(ins, pick_both=False) # analyzing xyz/latent disentangled diffusion
+ latent, fps_xyz = latent[0], fps_xyz[0]
+
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz) # for stage-1
+
+ # use view-space camera tradition
+ # ins = [ins, ins]
+ # st()
+ caption = self.caption_data['/'.join(ins.split('/')[1:])]
+
+ # ! to dict
+ # sample = self.post_process.create_dict_nobatch(sample)
+ ret_dict = {
+ 'caption': caption,
+ # 'ins': ins,
+ 'c': self.eval_camera,
+ 'img': img, # fix inp img range to [-1,1]
+ 'mv_img': mv_img,
+ 'latent': latent,
+ 'normalized-fps-xyz': normalized_fps_xyz,
+ 'fps-xyz': fps_xyz,
+ }
+
+ return ret_dict
+
+
+ def _getitem_gt(self, index) -> Any:
+ raw_img, c, caption, ins = self.read_chunk(
+ os.path.join(self.gt_mv_file_path, self.gt_chunk_list[index]))
+
+ # ! process
+ raw_img = torch.from_numpy(raw_img).permute(0, 3, 1, 2) / 255.0 # [0,1]
+
+ if raw_img.shape[-1] != self.reso:
+ raw_img = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ )
+ img = raw_img * 2 - 1 # as gt
+
+ # ! load latent
+ # latent, _ = self._load_latent(ins)
+
+ latent, fps_xyz = self._load_latent(ins, pick_both=True) # analyzing xyz/latent disentangled diffusion
+ # latent, fps_xyz = latent[0], fps_xyz[0] # remove batch dim here
+
+ # fps_xyz = fps_xyz / self.scaling_factor # for xyz training
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz)
+
+ if self.avoid_loading_first: # for training mv model
+ index = list(range(1,self.chunk_size//2)) + list(range(self.chunk_size//2+1, self.chunk_size))
+ img = img[index]
+ c = c[index]
+
+ # ! shuffle
+ indices = np.random.permutation(img.shape[0])
+ img = img[indices]
+ c = c[indices]
+
+ # st()
+ aug_img = self.mv_resize_cls(img)
+ aug_img = self.perspective_transformer(aug_img) # create 3D inconsistency
+
+ # ! split along V and repeat other stuffs accordingly
+ img = rearrange(img, '(B V) ... -> B V ...', B=2)[:, 0:1] # only return first view (randomly sampled)
+
+ aug_img = rearrange(aug_img, '(B V) ... -> B V ...', B=2)[:, 1:self.n_cond_frames+1]
+ c = rearrange(c, '(B V) ... -> B V ...', B=2)[:, 1:self.n_cond_frames+1] # 2 6 25
+
+
+ # use view-space camera tradition
+ c[0] = self.normalize_camera(c[0], c[0,0:1])
+ c[1] = self.normalize_camera(c[1], c[1,0:1])
+
+ caption = [caption, caption]
+ ins = [ins, ins]
+
+ # ! to dict
+ # sample = self.post_process.create_dict_nobatch(sample)
+ ret_dict = {
+ 'caption': caption,
+ 'ins': ins,
+ 'c': c,
+ 'img': img, # fix inp img range to [-1,1]
+ 'mv_img': aug_img,
+ 'latent': latent,
+ 'normalized-fps-xyz': normalized_fps_xyz,
+ 'fps-xyz': fps_xyz,
+ }
+
+ return ret_dict
+
+
+ def __getitem__(self, index) -> Any:
+ # load synthetic version
+
+ try:
+ synthetic_mv = self._getitem_synthetic(index)
+ except Exception as e:
+ # logger.log(Path(self.rgb_list[index]), 'missing')
+ synthetic_mv = self._getitem_synthetic(random.randint(0, len(self.rgb_list)//2))
+
+ if self.load_synthetic_only:
+ return synthetic_mv
+
+ else:
+ # load gt mv chunk
+ gt_chunk_index = random.randint(0, len(self.gt_chunk_list)-1)
+ gt_mv = self._getitem_gt(gt_chunk_index)
+
+ # merge them together along batch dim
+ merged_mv = {}
+ for k, v in synthetic_mv.items(): # merge, synthetic - gt order
+ if k not in ['caption', 'ins']:
+ if k == 'img':
+ merged_mv[k] = np.concatenate([v[None], gt_mv[k][:, 0]], axis=0).astype(np.float32)
+ else:
+ merged_mv[k] = np.concatenate([v[None], gt_mv[k]], axis=0).astype(np.float32)
+ else:
+ merged_mv[k] = [v] + gt_mv[k] # list
+
+ return merged_mv
+
+
+
+
+
+class ChunkObjaverseDatasetDDPMgsI23D_loadMV(ChunkObjaverseDatasetDDPMgs):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ load_extra_36_view=False,
+ gs_cam_format=False,
+ frame_0_as_canonical=True,
+ split_chunk_size=10,
+ mv_input=True,
+ append_depth=False,
+ append_xyz=False,
+ pcd_path=None,
+ load_pcd=False,
+ read_normal=False,
+ mv_latent_dir='',
+ canonicalize_pcd=False,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs):
+
+ super().__init__(
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ classes=classes,
+ load_depth=load_depth,
+ test=test,
+ scene_scale=scene_scale,
+ overfitting=overfitting,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size,
+ overfitting_bs=overfitting_bs,
+ interval=interval,
+ plucker_embedding=plucker_embedding,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split, # 4 splits to accelerate preprocessing
+ four_view_for_latent=four_view_for_latent,
+ single_view_for_i23d=single_view_for_i23d,
+ load_extra_36_view=load_extra_36_view,
+ gs_cam_format=gs_cam_format,
+ frame_0_as_canonical=frame_0_as_canonical,
+ split_chunk_size=split_chunk_size,
+ mv_input=mv_input,
+ append_depth=append_depth,
+ append_xyz=append_xyz,
+ pcd_path=pcd_path,
+ load_pcd=load_pcd,
+ read_normal=read_normal,
+ mv_latent_dir=mv_latent_dir,
+ load_raw=False,
+ # shards_folder_num=4,
+ # eval=False,
+ **kwargs)
+
+ assert not self.load_raw
+ # self.scaling_factor = np.array([0.14593576, 0.15753542, 0.18873914])
+
+ self.n_cond_frames = 5 # a easy version for now.
+ self.avoid_loading_first = True
+
+ # self.canonicalize_pcd = canonicalize_pcd
+ # self.canonicalize_pcd = True
+ self.canonicalize_pcd = False
+
+ def canonicalize_xyz(self, c, pcd):
+
+ B = c.shape[0]
+ camera_poses_rot = c[:, :16].reshape(B, 4, 4)[:, :3, :3]
+
+ R_inv = np.transpose(camera_poses_rot, (0,2,1)) # w2c rotation
+
+ new_pcd = (R_inv @ np.transpose(pcd, (0,2,1))) # B 3 3 @ B 3 N
+ new_pcd = np.transpose(new_pcd, (0,2,1))
+
+ return new_pcd
+
+
+ def __getitem__(self, index) -> Any:
+ raw_img, c, caption, ins = self.read_chunk(
+ os.path.join(self.file_path, self.chunk_list[index]))
+
+ # ! process
+ raw_img = torch.from_numpy(raw_img).permute(0, 3, 1, 2) / 255.0 # [0,1]
+
+ if raw_img.shape[-1] != self.reso:
+ raw_img = torch.nn.functional.interpolate(
+ input=raw_img,
+ size=(self.reso, self.reso),
+ mode='bilinear',
+ align_corners=False,
+ )
+ img = raw_img * 2 - 1 # as gt
+
+ # ! load latent
+ # latent, _ = self._load_latent(ins)
+
+ if self.avoid_loading_first: # for training mv model
+ index = list(range(1,self.chunk_size//2)) + list(range(self.chunk_size//2+1, self.chunk_size))
+ img = img[index]
+ c = c[index]
+
+ # ! shuffle
+ indices = np.random.permutation(img.shape[0])[:self.n_cond_frames*2]
+ img = img[indices]
+ c = c[indices]
+
+ latent, fps_xyz = self._load_latent(ins, pick_both=True) # analyzing xyz/latent disentangled diffusion
+ # latent, fps_xyz = latent[0], fps_xyz[0] # remove batch dim here
+
+ fps_xyz = np.repeat(fps_xyz, self.n_cond_frames, 0)
+ latent = np.repeat(latent, self.n_cond_frames, 0)
+ normalized_fps_xyz = self.normalize_pcd_act(fps_xyz)
+
+ if self.canonicalize_pcd:
+ normalized_fps_xyz = self.canonicalize_xyz(c, normalized_fps_xyz)
+
+ # repeat
+ caption = [caption] * self.n_cond_frames * 2
+ ins = [ins] * self.n_cond_frames * 2
+
+ ret_dict = {
+ 'caption': caption,
+ 'ins': ins,
+ 'c': c,
+ 'img': img, # fix inp img range to [-1,1]
+ 'latent': latent,
+ 'normalized-fps-xyz': normalized_fps_xyz,
+ 'fps-xyz': fps_xyz,
+ # **latent
+ }
+
+ return ret_dict
+
+
+class RealDataset(Dataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ ) -> None:
+ super().__init__()
+
+ self.file_path = file_path
+ self.overfitting = overfitting
+ self.scene_scale = scene_scale
+ self.reso = reso
+ self.reso_encoder = reso_encoder
+ self.classes = False
+ self.load_depth = load_depth
+ self.preprocess = preprocess
+ self.plucker_embedding = plucker_embedding
+
+ self.rgb_list = []
+
+ all_fname = [
+ t for t in os.listdir(self.file_path)
+ if t.split('.')[1] in ['png', 'jpg']
+ ]
+
+ all_fname = [name for name in all_fname if '-input' in name ]
+
+ self.rgb_list += ([
+ os.path.join(self.file_path, fname) for fname in all_fname
+ ])
+
+ # st()
+
+ # if len(self.rgb_list) == 1:
+ # # placeholder
+ # self.rgb_list = self.rgb_list * 40
+
+ # ! setup normalizataion
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+
+ assert imgnet_normalize
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+ # camera = torch.load('eval_pose.pt', map_location='cpu')
+ # self.eval_camera = camera
+
+ # pre-cache
+ # self.calc_rays_plucker()
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def __getitem__(self, index) -> Any:
+ # return super().__getitem__(index)
+
+ rgb_fname = self.rgb_list[index]
+ # ! preprocess, normalize
+
+ raw_img = imageio.imread(rgb_fname)
+
+ # interpolation=cv2.INTER_AREA)
+ if raw_img.shape[-1] == 4:
+ alpha_mask = raw_img[..., 3:4] / 255.0
+ bg_white = np.ones_like(alpha_mask) * 255.0
+ raw_img = raw_img[..., :3] * alpha_mask + (
+ 1 - alpha_mask) * bg_white #[3, reso_encoder, reso_encoder]
+ raw_img = raw_img.astype(np.uint8)
+
+ # raw_img = recenter(raw_img, np.ones_like(raw_img), border_ratio=0.2)
+
+ # log gt
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ img = torch.from_numpy(img)[..., :3].permute(
+ 2, 0, 1
+ ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ ret_dict = {
+ # 'rgb_fname': rgb_fname,
+ # 'img_to_encoder':
+ # img_to_encoder.unsqueeze(0).repeat_interleave(40, 0),
+ 'img': img,
+ # 'c': self.eval_camera, # TODO, get pre-calculated samples
+ # 'ins': 'placeholder',
+ # 'bbox': 'placeholder',
+ # 'caption': 'placeholder',
+ }
+
+ # ! repeat as a intance
+
+ return ret_dict
+
+
+
+
+class RealDataset_GSO(Dataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ ) -> None:
+ super().__init__()
+
+ self.file_path = file_path
+ self.overfitting = overfitting
+ self.scene_scale = scene_scale
+ self.reso = reso
+ self.reso_encoder = reso_encoder
+ self.classes = False
+ self.load_depth = load_depth
+ self.preprocess = preprocess
+ self.plucker_embedding = plucker_embedding
+
+ self.rgb_list = []
+
+
+
+ # ! for gso-rendering
+ all_objs = os.listdir(self.file_path)
+ all_objs.sort()
+
+ if True: # instant-mesh picked images
+ # if False:
+ all_instances = os.listdir(self.file_path)
+ # all_fname = [
+ # t for t in all_instances
+ # if t.split('.')[1] in ['png', 'jpg']
+ # ]
+
+ # all_fname = [name for name in all_fname if '-input' in name ]
+
+ # all_fname = ['house2-input.png', 'plant-input.png']
+ all_fname = ['house2-input.png']
+
+ self.rgb_list = [os.path.join(self.file_path, name) for name in all_fname]
+
+ if False:
+ for obj_folder in tqdm(all_objs[515:]):
+ # for obj_folder in tqdm(all_objs[:515]):
+ # for obj_folder in tqdm(all_objs[:]):
+ # for obj_folder in tqdm(sorted(os.listdir(self.file_path))[515:]):
+ # for idx in range(0,25,5):
+ for idx in [0]: # only query frontal view is enough
+ self.rgb_list.append(os.path.join(self.file_path, obj_folder, 'rgba', f'{idx:03d}.png'))
+
+
+ # for free-3d rendering
+ if False:
+ # if True:
+ # all_instances = sorted(os.listdir(self.file_path))
+
+ all_instances = ['BAGEL_WITH_CHEESE',
+ 'BALANCING_CACTUS',
+ 'Baby_Elements_Stacking_Cups',
+ 'Breyer_Horse_Of_The_Year_2015',
+ 'COAST_GUARD_BOAT',
+ 'CONE_SORTING',
+ 'CREATIVE_BLOCKS_35_MM',
+ 'Cole_Hardware_Mini_Honey_Dipper',
+ 'FAIRY_TALE_BLOCKS',
+ 'FIRE_ENGINE',
+ 'FOOD_BEVERAGE_SET',
+ 'GEOMETRIC_PEG_BOARD',
+ 'Great_Dinos_Triceratops_Toy',
+ 'JUICER_SET',
+ 'STACKING_BEAR',
+ 'STACKING_RING',
+ 'Schleich_African_Black_Rhino']
+
+ for instance in all_instances:
+ self.rgb_list += ([
+ # os.path.join(self.file_path, instance, 'rgb', f'{fname:06d}.png') for fname in range(0,250,50)
+ # os.path.join(self.file_path, instance, 'rgb', f'{fname:06d}.png') for fname in range(0,250,100)
+ # os.path.join(self.file_path, instance, f'{fname:03d}.png') for fname in range(0,25,5)
+ os.path.join(self.file_path, instance, 'render_mvs_25', 'model', f'{fname:03d}.png') for fname in range(0,25,4)
+ ])
+
+ # if True: # g-objv animals images for i23d eval
+ if False:
+ # if True:
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200][:600]
+
+ for obj_folder in tqdm(all_objs[:]):
+ for idx in [0]: # only query frontal view is enough
+ self.rgb_list.append(os.path.join(self.file_path, obj_folder, f'{idx}.jpg'))
+
+
+ # ! setup normalizataion
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+
+ assert imgnet_normalize
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+ # camera = torch.load('eval_pose.pt', map_location='cpu')
+ # self.eval_camera = camera
+
+ # pre-cache
+ # self.calc_rays_plucker()
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def __getitem__(self, index) -> Any:
+ # return super().__getitem__(index)
+
+ rgb_fname = self.rgb_list[index]
+ # ! preprocess, normalize
+
+ raw_img = imageio.imread(rgb_fname)
+
+ # interpolation=cv2.INTER_AREA)
+ if raw_img.shape[-1] == 4:
+ alpha_mask = raw_img[..., 3:4] / 255.0
+ bg_white = np.ones_like(alpha_mask) * 255.0
+ raw_img = raw_img[..., :3] * alpha_mask + (
+ 1 - alpha_mask) * bg_white #[3, reso_encoder, reso_encoder]
+ raw_img = raw_img.astype(np.uint8)
+
+ # raw_img = recenter(raw_img, np.ones_like(raw_img), border_ratio=0.2)
+
+ # log gt
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ img = torch.from_numpy(img)[..., :3].permute(
+ 2, 0, 1
+ ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ ret_dict = {
+ 'img': img,
+ # 'ins': str(Path(rgb_fname).parent.parent.stem), # for gso-rendering
+ 'ins': str(Path(rgb_fname).relative_to(self.file_path)), # for gso-rendering
+ # 'ins': rgb_fname,
+ }
+
+ return ret_dict
+
+
+
+class RealMVDataset(Dataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1,
+ plucker_embedding=False,
+ shuffle_across_cls=False,
+ wds_split=1, # 4 splits to accelerate preprocessing
+ ) -> None:
+ super().__init__()
+
+ self.file_path = file_path
+ self.overfitting = overfitting
+ self.scene_scale = scene_scale
+ self.reso = reso
+ self.reso_encoder = reso_encoder
+ self.classes = False
+ self.load_depth = load_depth
+ self.preprocess = preprocess
+ self.plucker_embedding = plucker_embedding
+
+ self.rgb_list = []
+
+ all_fname = [
+ t for t in os.listdir(self.file_path)
+ if t.split('.')[1] in ['png', 'jpg']
+ ]
+ all_fname = [name for name in all_fname if '-input' in name ]
+ # all_fname = [name for name in all_fname if 'sorting_board-input' in name ]
+ # all_fname = [name for name in all_fname if 'teasure_chest-input' in name ]
+ # all_fname = [name for name in all_fname if 'bubble_mart_blue-input' in name ]
+ # all_fname = [name for name in all_fname if 'chair_comfort-input' in name ]
+ self.rgb_list += ([
+ os.path.join(self.file_path, fname) for fname in all_fname
+ ])
+ # if len(self.rgb_list) == 1:
+ # # placeholder
+ # self.rgb_list = self.rgb_list * 40
+
+ # ! setup normalizataion
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+
+ azimuths = np.array([30, 90, 150, 210, 270, 330]).astype(float)
+ elevations = np.array([20, -10, 20, -10, 20, -10]).astype(float)
+
+ # zero123pp_pose, _ = generate_input_camera(1.6, [[elevations[i], azimuths[i]] for i in range(6)], fov=30)
+ zero123pp_pose, _ = generate_input_camera(1.8, [[elevations[i], azimuths[i]] for i in range(6)], fov=30)
+ K = torch.Tensor([1.3889, 0.0000, 0.5000, 0.0000, 1.3889, 0.5000, 0.0000, 0.0000, 0.0039]).to(zero123pp_pose) # keeps the same
+ # st()
+ zero123pp_pose = torch.cat([zero123pp_pose.reshape(6,-1), K.unsqueeze(0).repeat(6,1)], dim=-1)
+
+ # ! directly adopt gt input
+ # self.indices = np.array([0,2,4,5])
+ # eval_camera = zero123pp_pose[self.indices]
+ # self.eval_camera = torch.cat([torch.zeros_like(eval_camera[0:1]),eval_camera], 0) # first c not used as condition here, just placeholder
+
+ # ! adopt mv-diffusion output as input.
+ # self.indices = np.array([1,0,2,4,5])
+ self.indices = np.array([0,1,2,3,4,5])
+ eval_camera = zero123pp_pose[self.indices].float().cpu().numpy() # for normalization
+
+ # eval_camera = zero123pp_pose[self.indices]
+ # self.eval_camera = eval_camera
+ # self.eval_camera = torch.cat([torch.zeros_like(eval_camera[0:1]),eval_camera], 0) # first c not used as condition here, just placeholder
+
+ # # * normalize here
+ self.eval_camera = self.normalize_camera(eval_camera, eval_camera[0:1]) # the first img is not used.
+
+ # self.mv_resize_cls = torchvision.transforms.Resize(320, interpolation=torchvision.transforms.InterpolationMode.BILINEAR,
+ # max_size=None, antialias=True)
+
+ def normalize_camera(self, c, c_frame0):
+ # assert c.shape[0] == self.chunk_size # 8 o r10
+
+ B = c.shape[0]
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+ canonical_camera_poses = c_frame0[:, :16].reshape(1, 4, 4)
+ inverse_canonical_pose = np.linalg.inv(canonical_camera_poses)
+ inverse_canonical_pose = np.repeat(inverse_canonical_pose, B, 0)
+
+ cam_radius = np.linalg.norm(
+ c_frame0[:, :16].reshape(1, 4, 4)[:, :3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+
+ frame1_fixed_pos = np.repeat(np.eye(4)[None], 1, axis=0)
+ frame1_fixed_pos[:, 2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ inverse_canonical_pose
+
+ new_camera_poses = np.repeat(
+ transform, 1, axis=0
+ ) @ camera_poses # [V, 4, 4]. np.repeat() is th.repeat_interleave()
+
+ c = np.concatenate([new_camera_poses.reshape(B, 16), c[:, 16:]],
+ axis=-1)
+
+ return c
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def __getitem__(self, index) -> Any:
+ # return super().__getitem__(index)
+
+ rgb_fname = self.rgb_list[index]
+
+ raw_img = imageio.imread(rgb_fname)[..., :3]
+ raw_img = cv2.resize(raw_img, (self.reso, self.reso), interpolation=cv2.INTER_CUBIC)
+ raw_img = torch.from_numpy(raw_img).permute(2,0,1).clip(0,255) # [0,1]
+ img = raw_img / 127.5 - 1
+
+
+ # ! if loading mv-diff output views
+ mv_img = imageio.imread(rgb_fname.replace('-input', ''))
+ mv_img = rearrange(mv_img, '(n h) (m w) c -> (n m) h w c', n=3, m=2)[self.indices] # (6, 3, 320, 320)
+ mv_img = np.stack([recenter(img, np.ones_like(img), border_ratio=0.1) for img in mv_img], axis=0)
+ mv_img = rearrange(mv_img, 'b h w c -> b c h w') # to torch tradition
+ mv_img = torch.from_numpy(mv_img) / 127.5 - 1
+
+ ret_dict = {
+ 'img': img,
+ 'mv_img': mv_img,
+ 'c': self.eval_camera,
+ 'caption': 'null',
+ }
+
+ return ret_dict
+
+
+
+
+
+class NovelViewObjverseDataset(MultiViewObjverseDataset):
+ """novel view prediction version.
+ """
+
+ def __init__(self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ **kwargs):
+ super().__init__(file_path, reso, reso_encoder, preprocess, classes,
+ load_depth, test, scene_scale, overfitting,
+ imgnet_normalize, dataset_size, overfitting_bs,
+ **kwargs)
+
+ def __getitem__(self, idx):
+ input_view = super().__getitem__(
+ idx) # get previous input view results
+
+ # get novel view of the same instance
+ novel_view = super().__getitem__(
+ (idx // self.instance_data_length) * self.instance_data_length +
+ random.randint(0, self.instance_data_length - 1))
+
+ # assert input_view['ins_name'] == novel_view['ins_name'], 'should sample novel view from the same instance'
+
+ input_view.update({f'nv_{k}': v for k, v in novel_view.items()})
+ return input_view
+
+
+class MultiViewObjverseDatasetforLMDB(MultiViewObjverseDataset):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ shuffle_across_cls=False,
+ wds_split=1,
+ four_view_for_latent=False,
+ ):
+ super().__init__(file_path,
+ reso,
+ reso_encoder,
+ preprocess,
+ classes,
+ load_depth,
+ test,
+ scene_scale,
+ overfitting,
+ imgnet_normalize,
+ dataset_size,
+ overfitting_bs,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split,
+ four_view_for_latent=four_view_for_latent)
+
+ # assert self.reso == 256
+ self.load_caption = True
+
+ with open(
+ # '/cpfs01/shared/V2V/V2V_hdd/yslan/aigc3d/text_captions_cap3d.json'
+ '/nas/shared/public/yslan/data/text_captions_cap3d.json') as f:
+ # '/nas/shared/V2V/yslan/aigc3d/text_captions_cap3d.json') as f:
+ self.caption_data = json.load(f)
+ # lmdb_path = '/cpfs01/user/yangpeiqing.p/yslan/data/Furnitures_uncompressed/'
+
+ # with open(os.path.join(lmdb_path, 'idx_to_ins_mapping.json')) as f:
+ # self.idx_to_ins_mapping = json.load(f)
+
+ def __len__(self):
+ return super().__len__()
+ # return 100 # for speed debug
+
+ def quantize_depth(self, depth):
+ # https://developers.google.com/depthmap-metadata/encoding
+ # RangeInverse encoding
+ bg = depth == 0
+ depth[bg] = 3 # no need to allocate capacity to it
+ disparity = 1 / depth
+
+ far = disparity.max().item() # np array here
+ near = disparity.min().item()
+
+ # d_normalized = (far * (depth-near) / (depth * far - near)) # [0,1] range
+ d_normalized = (disparity - near) / (far - near) # [0,1] range
+ # imageio.imwrite('depth_negative.jpeg', (((depth - near) / (far - near) * 255)<0).numpy().astype(np.uint8))
+ # imageio.imwrite('depth_negative.jpeg', ((depth <0)*255).numpy().astype(np.uint8))
+ d_normalized = np.nan_to_num(d_normalized.cpu().numpy())
+ d_normalized = (np.clip(d_normalized, 0, 1) * 255).astype(np.uint8)
+ # imageio.imwrite('depth.png', d_normalized)
+
+ # d = 1 / ( (d_normalized / 255) * (far-near) + near)
+ # diff = (d[~bg.numpy()] - depth[~bg].numpy()).sum()
+
+ return d_normalized, near, far # return disp
+
+ def __getitem__(self, idx):
+ # ret_dict = super().__getitem__(idx)
+ rgb_fname = self.rgb_list[idx]
+ pose_fname = self.pose_list[idx]
+ raw_img = imageio.imread(rgb_fname) # [..., :3]
+
+ assert raw_img.shape[-1] == 4
+
+ # st() # cv2.imwrite('img_CV2_90.jpg', a, [int(cv2.IMWRITE_JPEG_QUALITY), 90])
+ # if raw_img.shape[-1] == 4: # ! set bg to white
+
+ alpha_mask = raw_img[..., -1:] / 255 # [0,1]
+
+ raw_img = alpha_mask * raw_img[..., :3] + (
+ 1 - alpha_mask) * np.ones_like(raw_img[..., :3]) * 255
+
+ raw_img = np.concatenate([raw_img, alpha_mask * 255], -1)
+ raw_img = raw_img.astype(np.uint8)
+
+ raw_img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+ alpha_mask = raw_img[..., -1] / 255
+ raw_img = raw_img[..., :3]
+
+ # alpha_mask = cv2.resize(alpha_mask, (self.reso, self.reso),
+ # interpolation=cv2.INTER_LANCZOS4)
+
+ c2w = read_camera_matrix_single(pose_fname) #[1, 4, 4] -> [1, 16]
+ c = np.concatenate([c2w.reshape(16), self.intrinsics],
+ axis=0).reshape(25).astype(
+ np.float32) # 25, no '1' dim needed.
+ c = torch.from_numpy(c)
+ # c = np.concatenate([c2w, self.intrinsics], axis=0).reshape(25) # 25, no '1' dim needed.
+
+ # if self.load_depth:
+ # depth, depth_mask, depth_mask_sr = read_dnormal(self.depth_list[idx],
+ # try:
+ depth, normal = read_dnormal(self.depth_list[idx], c2w[:3, 3:],
+ self.reso, self.reso)
+
+ # ! quantize depth for fast decoding
+ # d_normalized, d_near, d_far = self.quantize_depth(depth)
+
+ # ! add frame_0 alignment
+
+ # try:
+
+ ins = str(
+ (Path(self.data_ins_list[idx]).relative_to(self.file_path)).parent)
+ # if self.shuffle_across_cls:
+ if self.load_caption:
+ caption = self.caption_data['/'.join(ins.split('/')[1:])]
+ bbox = self.load_bbox(torch.from_numpy(alpha_mask) > 0)
+ else:
+ caption = '' # since in g-alignment-xl, some instances will fail.
+ bbox = self.load_bbox(torch.from_numpy(np.ones_like(alpha_mask)) > 0)
+
+ # else:
+ # caption = self.caption_data[ins]
+
+ ret_dict = {
+ 'normal': normal,
+ 'raw_img': raw_img,
+ 'c': c,
+ # 'depth_mask': depth_mask, # 64x64 here?
+ 'bbox': bbox,
+ 'ins': ins,
+ 'caption': caption,
+ 'alpha_mask': alpha_mask,
+ 'depth': depth, # return for pcd creation
+ # 'd_normalized': d_normalized,
+ # 'd_near': d_near,
+ # 'd_far': d_far,
+ # 'fname': rgb_fname,
+ }
+ return ret_dict
+
+
+class MultiViewObjverseDatasetforLMDB_nocaption(MultiViewObjverseDatasetforLMDB):
+
+ def __init__(
+ self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ shuffle_across_cls=False,
+ wds_split=1,
+ four_view_for_latent=False,
+ ):
+ super().__init__(file_path,
+ reso,
+ reso_encoder,
+ preprocess,
+ classes,
+ load_depth,
+ test,
+ scene_scale,
+ overfitting,
+ imgnet_normalize,
+ dataset_size,
+ overfitting_bs,
+ shuffle_across_cls=shuffle_across_cls,
+ wds_split=wds_split,
+ four_view_for_latent=four_view_for_latent)
+
+ self.load_caption = False
+
+
+class Objv_LMDBDataset_MV_Compressed(LMDBDataset_MV_Compressed):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ test=False,
+ **kwargs):
+ super().__init__(lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize,
+ dataset_size=dataset_size,
+ **kwargs)
+ self.instance_data_length = 40 # ! could save some key attributes in LMDB
+ if test:
+ self.length = self.instance_data_length
+ elif dataset_size > 0:
+ self.length = dataset_size * self.instance_data_length
+
+ # load caption data, and idx-to-ins mapping
+ with open(
+ '/cpfs01/shared/V2V/V2V_hdd/yslan/aigc3d/text_captions_cap3d.json'
+ ) as f:
+ self.caption_data = json.load(f)
+ with open(os.path.join(lmdb_path, 'idx_to_ins_mapping.json')) as f:
+ self.idx_to_ins_mapping = json.load(f)
+
+ def _load_data(self, idx):
+ # '''
+ raw_img, depth, c, bbox = self._load_lmdb_data(idx)
+ # raw_img, depth, c, bbox = self._load_lmdb_data_no_decompress(idx)
+
+ # resize depth and bbox
+ caption = self.caption_data[self.idx_to_ins_mapping[str(idx)]]
+
+ return {
+ **self._post_process_sample(raw_img, depth),
+ 'c': c,
+ 'bbox': (bbox * (self.reso / 512.0)).astype(np.uint8),
+ # 'bbox': (bbox*(self.reso/256.0)).astype(np.uint8), # TODO, double check 512 in wds?
+ 'caption': caption
+ }
+ # '''
+ # raw_img, depth, c, bbox = self._load_lmdb_data_no_decompress(idx)
+ # st()
+ # return {}
+
+ def __getitem__(self, idx):
+ return self._load_data(idx)
+
+
+class Objv_LMDBDataset_MV_NoCompressed(Objv_LMDBDataset_MV_Compressed):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ test=False,
+ **kwargs):
+ super().__init__(lmdb_path, reso, reso_encoder, imgnet_normalize,
+ dataset_size, test, **kwargs)
+
+ def _load_data(self, idx):
+ # '''
+ raw_img, depth, c, bbox = self._load_lmdb_data_no_decompress(idx)
+
+ # resize depth and bbox
+ caption = self.caption_data[self.idx_to_ins_mapping[str(idx)]]
+
+ return {
+ **self._post_process_sample(raw_img, depth), 'c': c,
+ 'bbox': (bbox * (self.reso / 512.0)).astype(np.uint8),
+ 'caption': caption
+ }
+ return {}
+
+
+class Objv_LMDBDataset_NV_NoCompressed(Objv_LMDBDataset_MV_NoCompressed):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ test=False,
+ **kwargs):
+ super().__init__(lmdb_path, reso, reso_encoder, imgnet_normalize,
+ dataset_size, test, **kwargs)
+
+ def __getitem__(self, idx):
+ input_view = self._load_data(idx) # get previous input view results
+
+ # get novel view of the same instance
+ try:
+ novel_view = self._load_data(
+ (idx // self.instance_data_length) *
+ self.instance_data_length +
+ random.randint(0, self.instance_data_length - 1))
+ except Exception as e:
+ raise NotImplementedError(idx)
+
+ # assert input_view['ins_name'] == novel_view['ins_name'], 'should sample novel view from the same instance'
+
+ input_view.update({f'nv_{k}': v for k, v in novel_view.items()})
+ return input_view
+
+
+class Objv_LMDBDataset_MV_Compressed_for_lmdb(LMDBDataset_MV_Compressed):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ test=False,
+ **kwargs):
+ super().__init__(lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize,
+ dataset_size=dataset_size,
+ **kwargs)
+ self.instance_data_length = 40 # ! could save some key attributes in LMDB
+ if test:
+ self.length = self.instance_data_length
+ elif dataset_size > 0:
+ self.length = dataset_size * self.instance_data_length
+
+ # load caption data, and idx-to-ins mapping
+ with open(
+ '/cpfs01/shared/V2V/V2V_hdd/yslan/aigc3d/text_captions_cap3d.json'
+ ) as f:
+ self.caption_data = json.load(f)
+ with open(os.path.join(lmdb_path, 'idx_to_ins_mapping.json')) as f:
+ self.idx_to_ins_mapping = json.load(f)
+
+ # def _load_data(self, idx):
+ # # '''
+ # raw_img, depth, c, bbox = self._load_lmdb_data(idx)
+
+ # # resize depth and bbox
+ # caption = self.caption_data[self.idx_to_ins_mapping[str(idx)]]
+
+ # # st()
+
+ # return {
+ # **self._post_process_sample(raw_img, depth), 'c': c,
+ # 'bbox': (bbox*(self.reso/512.0)).astype(np.uint8),
+ # 'caption': caption
+ # }
+ # # '''
+ # # raw_img, depth, c, bbox = self._load_lmdb_data_no_decompress(idx)
+ # # st()
+ # # return {}
+
+ def load_bbox(self, mask):
+ nonzero_value = torch.nonzero(mask)
+ height, width = nonzero_value.max(dim=0)[0]
+ top, left = nonzero_value.min(dim=0)[0]
+ bbox = torch.tensor([top, left, height, width], dtype=torch.float32)
+ return bbox
+
+ def __getitem__(self, idx):
+ raw_img, depth, c, bbox = self._load_lmdb_data(idx)
+ return {'raw_img': raw_img, 'depth': depth, 'c': c, 'bbox': bbox}
+
+
+class Objv_LMDBDataset_NV_Compressed(Objv_LMDBDataset_MV_Compressed):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ **kwargs):
+ super().__init__(lmdb_path, reso, reso_encoder, imgnet_normalize,
+ dataset_size, **kwargs)
+
+ def __getitem__(self, idx):
+ input_view = self._load_data(idx) # get previous input view results
+
+ # get novel view of the same instance
+ try:
+ novel_view = self._load_data(
+ (idx // self.instance_data_length) *
+ self.instance_data_length +
+ random.randint(0, self.instance_data_length - 1))
+ except Exception as e:
+ raise NotImplementedError(idx)
+
+ # assert input_view['ins_name'] == novel_view['ins_name'], 'should sample novel view from the same instance'
+
+ input_view.update({f'nv_{k}': v for k, v in novel_view.items()})
+ return input_view
+
+
+#
+
+
+# test tar loading
+def load_wds_ResampledShard(file_path,
+ batch_size,
+ num_workers,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ plucker_embedding=False,
+ decode_encode_img_only=False,
+ load_instance=False,
+ mv_input=False,
+ split_chunk_input=False,
+ duplicate_sample=True,
+ append_depth=False,
+ append_normal=False,
+ gs_cam_format=False,
+ orthog_duplicate=False,
+ **kwargs):
+
+ # return raw_img, depth, c, bbox, sample_pyd['ins.pyd'], sample_pyd['fname.pyd']
+
+ post_process_cls = PostProcess(
+ reso,
+ reso_encoder,
+ imgnet_normalize=imgnet_normalize,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=decode_encode_img_only,
+ mv_input=mv_input,
+ split_chunk_input=split_chunk_input,
+ duplicate_sample=duplicate_sample,
+ append_depth=append_depth,
+ gs_cam_format=gs_cam_format,
+ orthog_duplicate=orthog_duplicate,
+ append_normal=append_normal,
+ )
+
+ # ! add shuffling
+
+ if isinstance(file_path, list): # lst of shard urls
+ all_shards = []
+ for url_path in file_path:
+ all_shards.extend(wds.shardlists.expand_source(url_path))
+ logger.log('all_shards', all_shards)
+ else:
+ all_shards = file_path # to be expanded
+
+ if not load_instance: # during reconstruction training, load pair
+ if not split_chunk_input:
+ dataset = wds.DataPipeline(
+ wds.ResampledShards(all_shards), # url_shard
+ # at this point we have an iterator over all the shards
+ wds.shuffle(50),
+ wds.split_by_worker, # if multi-node
+ wds.tarfile_to_samples(),
+ # add wds.split_by_node here if you are using multiple nodes
+ wds.shuffle(
+ 1000
+ ), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.decode(wds.autodecode.basichandlers), # TODO
+ wds.to_tuple(
+ "sample.pyd"), # extract the pyd from top level dict
+ wds.map(post_process_cls.decode_zip),
+ wds.map(post_process_cls.paired_post_process
+ ), # create input-novelview paired samples
+ # wds.map(post_process_cls._post_process_sample),
+ # wds.detshuffle(1000), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.batched(
+ 16,
+ partial=True,
+ # collation_fn=collate
+ ) # streaming more data at once, and rebatch later
+ )
+
+ elif load_gzip: # deprecated, no performance improve
+
+ dataset = wds.DataPipeline(
+ wds.ResampledShards(all_shards), # url_shard
+ # at this point we have an iterator over all the shards
+ wds.shuffle(10),
+ wds.split_by_worker, # if multi-node
+ wds.tarfile_to_samples(),
+ # add wds.split_by_node here if you are using multiple nodes
+ # wds.shuffle(
+ # 100
+ # ), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.decode('rgb8'), # TODO
+ # wds.decode(wds.autodecode.basichandlers), # TODO
+ # wds.to_tuple('raw_img.jpeg', 'depth.jpeg', 'alpha_mask.jpeg',
+ # 'd_near.npy', 'd_far.npy', "c.npy", 'bbox.npy',
+ # 'ins.txt', 'caption.txt'),
+ wds.to_tuple('raw_img.png', 'depth_alpha.png'),
+ # wds.to_tuple('raw_img.jpg', "c.npy", 'bbox.npy', 'depth.pyd', 'ins.txt', 'caption.txt'),
+ # wds.to_tuple('raw_img.jpg', "c.npy", 'bbox.npy', 'ins.txt', 'caption.txt'),
+ wds.map(post_process_cls.decode_gzip),
+ # wds.map(post_process_cls.paired_post_process_chunk
+ # ), # create input-novelview paired samples
+ wds.batched(
+ 20,
+ partial=True,
+ # collation_fn=collate
+ ) # streaming more data at once, and rebatch later
+ )
+
+ else:
+ dataset = wds.DataPipeline(
+ wds.ResampledShards(all_shards), # url_shard
+ # at this point we have an iterator over all the shards
+ wds.shuffle(100),
+ wds.split_by_worker, # if multi-node
+ wds.tarfile_to_samples(),
+ # add wds.split_by_node here if you are using multiple nodes
+ wds.shuffle(
+ 4000 // split_chunk_size
+ ), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.decode(wds.autodecode.basichandlers), # TODO
+ wds.to_tuple(
+ "sample.pyd"), # extract the pyd from top level dict
+ wds.map(post_process_cls.decode_zip),
+ wds.map(post_process_cls.paired_post_process_chunk
+ ), # create input-novelview paired samples
+ # wds.map(post_process_cls._post_process_sample),
+ # wds.detshuffle(1000), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.batched(
+ 120 // split_chunk_size,
+ partial=True,
+ # collation_fn=collate
+ ) # streaming more data at once, and rebatch later
+ )
+
+ loader_shard = wds.WebLoader(
+ dataset,
+ num_workers=num_workers,
+ drop_last=False,
+ batch_size=None,
+ shuffle=False,
+ persistent_workers=num_workers > 0).unbatched().shuffle(
+ 1000 // split_chunk_size).batched(batch_size).map(
+ post_process_cls.create_dict)
+
+ if mv_input:
+ loader_shard = loader_shard.map(post_process_cls.prepare_mv_input)
+
+ else: # load single instance during test/eval
+ assert batch_size == 1
+
+ dataset = wds.DataPipeline(
+ wds.ResampledShards(all_shards), # url_shard
+ # at this point we have an iterator over all the shards
+ wds.shuffle(50),
+ wds.split_by_worker, # if multi-node
+ wds.tarfile_to_samples(),
+ # add wds.split_by_node here if you are using multiple nodes
+ wds.detshuffle(
+ 100
+ ), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.decode(wds.autodecode.basichandlers), # TODO
+ wds.to_tuple("sample.pyd"), # extract the pyd from top level dict
+ wds.map(post_process_cls.decode_zip),
+ # wds.map(post_process_cls.paired_post_process), # create input-novelview paired samples
+ wds.map(post_process_cls._post_process_batch_sample),
+ # wds.detshuffle(1000), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.batched(
+ 2,
+ partial=True,
+ # collation_fn=collate
+ ) # streaming more data at once, and rebatch later
+ )
+
+ loader_shard = wds.WebLoader(
+ dataset,
+ num_workers=num_workers,
+ drop_last=False,
+ batch_size=None,
+ shuffle=False,
+ persistent_workers=num_workers
+ > 0).unbatched().shuffle(200).batched(batch_size).map(
+ post_process_cls.single_instance_sample_create_dict)
+
+ # persistent_workers=num_workers > 0).unbatched().batched(batch_size).map(post_process_cls.create_dict)
+ # 1000).batched(batch_size).map(post_process_cls.create_dict)
+ # .map(collate)
+ # .map(collate)
+
+ # .batched(batch_size)
+ #
+
+ # .unbatched().shuffle(1000).batched(batch_size).map(post_process)
+ # # https://github.com/webdataset/webdataset/issues/187
+
+ # return next(iter(loader_shard))
+ #return dataset
+ return loader_shard
+
+
+class PostProcessForDiff:
+
+ def __init__(
+ self,
+ reso,
+ reso_encoder,
+ imgnet_normalize,
+ plucker_embedding,
+ decode_encode_img_only,
+ mv_latent_dir,
+ ) -> None:
+ self.plucker_embedding = plucker_embedding
+
+ self.mv_latent_dir = mv_latent_dir
+ self.decode_encode_img_only = decode_encode_img_only
+
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+
+ self.reso_encoder = reso_encoder
+ self.reso = reso
+ self.instance_data_length = 40
+ # self.pair_per_instance = 1 # compat
+ self.pair_per_instance = 2 # check whether improves IO
+ # self.pair_per_instance = 3 # check whether improves IO
+ # self.pair_per_instance = 4 # check whether improves IO
+ self.camera = torch.load('eval_pose.pt', map_location='cpu').numpy()
+ self.canonical_frame = self.camera[25:26] # 1, 25 # inverse this
+ self.canonical_frame_pos = self.canonical_frame[:, :16].reshape(4, 4)
+
+ def get_rays_kiui(self, c, opengl=True):
+ h, w = self.reso_encoder, self.reso_encoder
+ intrinsics, pose = c[16:], c[:16].reshape(4, 4)
+ # cx, cy, fx, fy = intrinsics[2], intrinsics[5]
+ fx = fy = 525 # pixel space
+ cx = cy = 256 # rendering default K
+ factor = self.reso / (cx * 2) # 128 / 512
+ fx = fx * factor
+ fy = fy * factor
+
+ x, y = torch.meshgrid(
+ torch.arange(w, device=pose.device),
+ torch.arange(h, device=pose.device),
+ indexing="xy",
+ )
+ x = x.flatten()
+ y = y.flatten()
+
+ cx = w * 0.5
+ cy = h * 0.5
+
+ # focal = h * 0.5 / np.tan(0.5 * np.deg2rad(fovy))
+
+ camera_dirs = F.pad(
+ torch.stack(
+ [
+ (x - cx + 0.5) / fx,
+ (y - cy + 0.5) / fy * (-1.0 if opengl else 1.0),
+ ],
+ dim=-1,
+ ),
+ (0, 1),
+ value=(-1.0 if opengl else 1.0),
+ ) # [hw, 3]
+
+ rays_d = camera_dirs @ pose[:3, :3].transpose(0, 1) # [hw, 3]
+ rays_o = pose[:3, 3].unsqueeze(0).expand_as(rays_d) # [hw, 3]
+
+ rays_o = rays_o.view(h, w, 3)
+ rays_d = safe_normalize(rays_d).view(h, w, 3)
+
+ return rays_o, rays_d
+
+ def gen_rays(self, c):
+ # Generate rays
+ intrinsics, c2w = c[16:], c[:16].reshape(4, 4)
+ self.h = self.reso_encoder
+ self.w = self.reso_encoder
+ yy, xx = torch.meshgrid(
+ torch.arange(self.h, dtype=torch.float32) + 0.5,
+ torch.arange(self.w, dtype=torch.float32) + 0.5,
+ indexing='ij')
+
+ # normalize to 0-1 pixel range
+ yy = yy / self.h
+ xx = xx / self.w
+
+ # K = np.array([f_x, 0, w / 2, 0, f_y, h / 2, 0, 0, 1]).reshape(3, 3)
+ cx, cy, fx, fy = intrinsics[2], intrinsics[5], intrinsics[
+ 0], intrinsics[4]
+ # cx *= self.w
+ # cy *= self.h
+
+ # f_x = f_y = fx * h / res_raw
+ c2w = torch.from_numpy(c2w).float()
+
+ xx = (xx - cx) / fx
+ yy = (yy - cy) / fy
+ zz = torch.ones_like(xx)
+ dirs = torch.stack((xx, yy, zz), dim=-1) # OpenCV convention
+ dirs /= torch.norm(dirs, dim=-1, keepdim=True)
+ dirs = dirs.reshape(-1, 3, 1)
+ del xx, yy, zz
+ # st()
+ dirs = (c2w[None, :3, :3] @ dirs)[..., 0]
+
+ origins = c2w[None, :3, 3].expand(self.h * self.w, -1).contiguous()
+ origins = origins.view(self.h, self.w, 3)
+ dirs = dirs.view(self.h, self.w, 3)
+
+ return origins, dirs
+
+ def normalize_camera(self, c):
+ # assert c.shape[0] == self.chunk_size # 8 o r10
+
+ c = c[None] # api compat
+ B = c.shape[0]
+
+ camera_poses = c[:, :16].reshape(B, 4, 4) # 3x4
+
+ cam_radius = np.linalg.norm(
+ self.canonical_frame_pos.reshape(4, 4)[:3, 3],
+ axis=-1,
+ keepdims=False) # since g-buffer adopts dynamic radius here.
+ frame1_fixed_pos = np.eye(4)
+ frame1_fixed_pos[2, -1] = -cam_radius
+
+ transform = frame1_fixed_pos @ np.linalg.inv(
+ self.canonical_frame_pos) # 4,4
+ # from LGM, https://github.com/3DTopia/LGM/blob/fe8d12cff8c827df7bb77a3c8e8b37408cb6fe4c/core/provider_objaverse.py#L127
+ # transform = torch.tensor([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self.opt.cam_radius], [0, 0, 0, 1]], dtype=torch.float32) @ torch.inverse(c[[0,4]])
+
+ new_camera_poses = transform[None] @ camera_poses # [V, 4, 4]
+
+ c = np.concatenate([new_camera_poses.reshape(B, 16), c[:, 16:]],
+ axis=-1)
+
+ return c[0]
+
+ def _post_process_sample(self, data_sample):
+ # raw_img, depth, c, bbox, caption, ins = data_sample
+ raw_img, c, caption, ins = data_sample
+
+ # c = self.normalize_camera(c) @ if relative pose.
+
+ img = raw_img # 256x256
+
+ img = torch.from_numpy(img).permute(2, 0, 1) / 127.5 - 1
+
+ # load latent.
+ # latent_path = Path(self.mv_latent_dir, ins, 'latent.npy') # ! a converged version, before adding augmentation
+
+ # if random.random() > 0.5:
+ # latent_path = Path(self.mv_latent_dir, ins, 'latent.npy')
+ # else: # augmentation, double the dataset
+ latent_path = Path(
+ self.mv_latent_dir.replace('v=4-final', 'v=4-rotate'), ins,
+ 'latent.npy')
+
+ latent = np.load(latent_path)
+
+ # return (img_to_encoder, img, c, caption, ins)
+ return (latent, img, c, caption, ins)
+
+ def rand_sample_idx(self):
+ return random.randint(0, self.instance_data_length - 1)
+
+ def rand_pair(self):
+ return (self.rand_sample_idx() for _ in range(2))
+
+ def paired_post_process(self, sample):
+ # repeat n times?
+ all_inp_list = []
+ all_nv_list = []
+ caption, ins = sample[-2:]
+ # expanded_return = []
+ for _ in range(self.pair_per_instance):
+ cano_idx, nv_idx = self.rand_pair()
+ cano_sample = self._post_process_sample(item[cano_idx]
+ for item in sample[:-2])
+ nv_sample = self._post_process_sample(item[nv_idx]
+ for item in sample[:-2])
+ all_inp_list.extend(cano_sample)
+ all_nv_list.extend(nv_sample)
+ return (*all_inp_list, *all_nv_list, caption, ins)
+ # return [cano_sample, nv_sample, caption, ins]
+ # return (*cano_sample, *nv_sample, caption, ins)
+
+ # def single_sample_create_dict(self, sample, prefix=''):
+ # # if len(sample) == 1:
+ # # sample = sample[0]
+ # # assert len(sample) == 6
+ # img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox = sample
+ # return {
+ # # **sample,
+ # f'{prefix}img_to_encoder': img_to_encoder,
+ # f'{prefix}img': img,
+ # f'{prefix}depth_mask': fg_mask_reso,
+ # f'{prefix}depth': depth_reso,
+ # f'{prefix}c': c,
+ # f'{prefix}bbox': bbox,
+ # }
+
+ def single_sample_create_dict(self, sample, prefix=''):
+ # if len(sample) == 1:
+ # sample = sample[0]
+ # assert len(sample) == 6
+ # img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox = sample
+ # img_to_encoder, img, c, caption, ins = sample
+ # img, c, caption, ins = sample
+ latent, img, c, caption, ins = sample
+ # load latent
+ return {
+ # **sample,
+ # 'img_to_encoder': img_to_encoder,
+ 'latent': latent,
+ 'img': img,
+ 'c': c,
+ 'caption': caption,
+ 'ins': ins
+ }
+
+ def decode_zip(self, sample_pyd, shape=(256, 256)):
+ if isinstance(sample_pyd, tuple):
+ sample_pyd = sample_pyd[0]
+ assert isinstance(sample_pyd, dict)
+
+ raw_img = decompress_and_open_image_gzip(
+ sample_pyd['raw_img'],
+ is_img=True,
+ decompress=True,
+ decompress_fn=lz4.frame.decompress)
+
+ caption = sample_pyd['caption'].decode('utf-8')
+ ins = sample_pyd['ins'].decode('utf-8')
+
+ c = decompress_array(sample_pyd['c'], (25, ),
+ np.float32,
+ decompress=True,
+ decompress_fn=lz4.frame.decompress)
+
+ # bbox = decompress_array(
+ # sample_pyd['bbox'],
+ # (
+ # 40,
+ # 4,
+ # ),
+ # np.float32,
+ # # decompress=False)
+ # decompress=True,
+ # decompress_fn=lz4.frame.decompress)
+
+ # if self.decode_encode_img_only:
+ # depth = np.zeros(shape=(40, *shape)) # save loading time
+ # else:
+ # depth = decompress_array(sample_pyd['depth'], (40, *shape),
+ # np.float32,
+ # decompress=True,
+ # decompress_fn=lz4.frame.decompress)
+
+ # return {'raw_img': raw_img, 'depth': depth, 'bbox': bbox, 'caption': caption, 'ins': ins, 'c': c}
+ # return raw_img, depth, c, bbox, caption, ins
+ # return raw_img, bbox, caption, ins
+ # return bbox, caption, ins
+ return raw_img, c, caption, ins
+ # ! run single-instance pipeline first
+ # return raw_img[0], depth[0], c[0], bbox[0], caption, ins
+
+ def create_dict(self, sample):
+ # sample = [item[0] for item in sample] # wds wrap items in []
+ # cano_sample_list = [[] for _ in range(6)]
+ # nv_sample_list = [[] for _ in range(6)]
+ # for idx in range(0, self.pair_per_instance):
+ # cano_sample = sample[6*idx:6*(idx+1)]
+ # nv_sample = sample[6*self.pair_per_instance+6*idx:6*self.pair_per_instance+6*(idx+1)]
+
+ # for item_idx in range(6):
+ # cano_sample_list[item_idx].append(cano_sample[item_idx])
+ # nv_sample_list[item_idx].append(nv_sample[item_idx])
+
+ # # ! cycle input/output view for more pairs
+ # cano_sample_list[item_idx].append(nv_sample[item_idx])
+ # nv_sample_list[item_idx].append(cano_sample[item_idx])
+
+ cano_sample = self.single_sample_create_dict(sample, prefix='')
+ # nv_sample = self.single_sample_create_dict((torch.cat(item_list) for item_list in nv_sample_list) , prefix='nv_')
+
+ return cano_sample
+ # return {
+ # **cano_sample,
+ # # **nv_sample,
+ # 'caption': sample[-2],
+ # 'ins': sample[-1]
+ # }
+
+
+# test tar loading
+def load_wds_diff_ResampledShard(file_path,
+ batch_size,
+ num_workers,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ plucker_embedding=False,
+ decode_encode_img_only=False,
+ mv_latent_dir='',
+ **kwargs):
+
+ # return raw_img, depth, c, bbox, sample_pyd['ins.pyd'], sample_pyd['fname.pyd']
+
+ post_process_cls = PostProcessForDiff(
+ reso,
+ reso_encoder,
+ imgnet_normalize=imgnet_normalize,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=decode_encode_img_only,
+ mv_latent_dir=mv_latent_dir,
+ )
+
+ if isinstance(file_path, list): # lst of shard urls
+ all_shards = []
+ for url_path in file_path:
+ all_shards.extend(wds.shardlists.expand_source(url_path))
+ logger.log('all_shards', all_shards)
+ else:
+ all_shards = file_path # to be expanded
+
+ dataset = wds.DataPipeline(
+ wds.ResampledShards(all_shards), # url_shard
+ # at this point we have an iterator over all the shards
+ wds.shuffle(100),
+ wds.split_by_worker, # if multi-node
+ wds.tarfile_to_samples(),
+ # add wds.split_by_node here if you are using multiple nodes
+ wds.shuffle(
+ 20000
+ ), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.decode(wds.autodecode.basichandlers), # TODO
+ wds.to_tuple("sample.pyd"), # extract the pyd from top level dict
+ wds.map(post_process_cls.decode_zip),
+ # wds.map(post_process_cls.paired_post_process), # create input-novelview paired samples
+ wds.map(post_process_cls._post_process_sample),
+ # wds.detshuffle(1000), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.batched(
+ 100,
+ partial=True,
+ # collation_fn=collate
+ ) # streaming more data at once, and rebatch later
+ )
+
+ loader_shard = wds.WebLoader(
+ dataset,
+ num_workers=num_workers,
+ drop_last=False,
+ batch_size=None,
+ shuffle=False,
+ persistent_workers=num_workers
+ > 0).unbatched().shuffle(2500).batched(batch_size).map(
+ post_process_cls.create_dict)
+
+ # persistent_workers=num_workers > 0).unbatched().batched(batch_size).map(post_process_cls.create_dict)
+ # 1000).batched(batch_size).map(post_process_cls.create_dict)
+ # .map(collate)
+ # .map(collate)
+
+ # .batched(batch_size)
+ #
+
+ # .unbatched().shuffle(1000).batched(batch_size).map(post_process)
+ # # https://github.com/webdataset/webdataset/issues/187
+
+ # return next(iter(loader_shard))
+ #return dataset
+ return loader_shard
+
+
+def load_wds_data(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ num_workers=6,
+ plucker_embedding=False,
+ decode_encode_img_only=False,
+ load_wds_diff=False,
+ load_wds_latent=False,
+ load_instance=False, # for evaluation
+ mv_input=False,
+ split_chunk_input=False,
+ duplicate_sample=True,
+ mv_latent_dir='',
+ append_depth=False,
+ gs_cam_format=False,
+ orthog_duplicate=False,
+ **args):
+
+ if load_wds_diff:
+ # assert num_workers == 0 # on aliyun, worker=0 performs much much faster
+ wds_loader = load_wds_diff_ResampledShard(
+ file_path,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ reso=reso,
+ reso_encoder=reso_encoder,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=decode_encode_img_only,
+ mv_input=mv_input,
+ split_chunk_input=split_chunk_input,
+ append_depth=append_depth,
+ mv_latent_dir=mv_latent_dir,
+ gs_cam_format=gs_cam_format,
+ orthog_duplicate=orthog_duplicate,
+ )
+ elif load_wds_latent:
+ # for diffusion training, cache latent
+ wds_loader = load_wds_latent_ResampledShard(
+ file_path,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ reso=reso,
+ reso_encoder=reso_encoder,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=decode_encode_img_only,
+ mv_input=mv_input,
+ split_chunk_input=split_chunk_input,
+ )
+
+ # elif load_instance:
+ # wds_loader = load_wds_instance_ResampledShard(
+ # file_path,
+ # batch_size=batch_size,
+ # num_workers=num_workers,
+ # reso=reso,
+ # reso_encoder=reso_encoder,
+ # plucker_embedding=plucker_embedding,
+ # decode_encode_img_only=decode_encode_img_only
+ # )
+
+ else:
+ wds_loader = load_wds_ResampledShard(
+ file_path,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ reso=reso,
+ reso_encoder=reso_encoder,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=decode_encode_img_only,
+ load_instance=load_instance,
+ mv_input=mv_input,
+ split_chunk_input=split_chunk_input,
+ duplicate_sample=duplicate_sample,
+ append_depth=append_depth,
+ gs_cam_format=gs_cam_format,
+ orthog_duplicate=orthog_duplicate,
+ )
+
+ while True:
+ yield from wds_loader
+ # yield from wds_loader
+
+
+class PostProcess_forlatent:
+
+ def __init__(
+ self,
+ reso,
+ reso_encoder,
+ imgnet_normalize,
+ plucker_embedding,
+ decode_encode_img_only,
+ ) -> None:
+ self.plucker_embedding = plucker_embedding
+ self.decode_encode_img_only = decode_encode_img_only
+
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+
+ self.reso_encoder = reso_encoder
+ self.reso = reso
+ self.instance_data_length = 40
+ # self.pair_per_instance = 1 # compat
+ self.pair_per_instance = 2 # check whether improves IO
+ # self.pair_per_instance = 3 # check whether improves IO
+ # self.pair_per_instance = 4 # check whether improves IO
+
+ def _post_process_sample(self, data_sample):
+ # raw_img, depth, c, bbox, caption, ins = data_sample
+ raw_img, c, caption, ins = data_sample
+
+ # bbox = (bbox*(self.reso/256)).astype(np.uint8) # normalize bbox to the reso range
+
+ if raw_img.shape[-2] != self.reso_encoder:
+ img_to_encoder = cv2.resize(raw_img,
+ (self.reso_encoder, self.reso_encoder),
+ interpolation=cv2.INTER_LANCZOS4)
+ else:
+ img_to_encoder = raw_img
+
+ img_to_encoder = self.normalize(img_to_encoder)
+ if self.plucker_embedding:
+ rays_o, rays_d = self.gen_rays(c)
+ rays_plucker = torch.cat(
+ [torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ dim=-1).permute(2, 0, 1) # [h, w, 6] -> 6,h,w
+ img_to_encoder = torch.cat([img_to_encoder, rays_plucker], 0)
+
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ img = torch.from_numpy(img).permute(2, 0, 1) / 127.5 - 1
+
+ return (img_to_encoder, img, c, caption, ins)
+
+ def rand_sample_idx(self):
+ return random.randint(0, self.instance_data_length - 1)
+
+ def rand_pair(self):
+ return (self.rand_sample_idx() for _ in range(2))
+
+ def paired_post_process(self, sample):
+ # repeat n times?
+ all_inp_list = []
+ all_nv_list = []
+ caption, ins = sample[-2:]
+ # expanded_return = []
+ for _ in range(self.pair_per_instance):
+ cano_idx, nv_idx = self.rand_pair()
+ cano_sample = self._post_process_sample(item[cano_idx]
+ for item in sample[:-2])
+ nv_sample = self._post_process_sample(item[nv_idx]
+ for item in sample[:-2])
+ all_inp_list.extend(cano_sample)
+ all_nv_list.extend(nv_sample)
+ return (*all_inp_list, *all_nv_list, caption, ins)
+ # return [cano_sample, nv_sample, caption, ins]
+ # return (*cano_sample, *nv_sample, caption, ins)
+ def paired_post_process(self, sample):
+ # repeat n times?
+ all_inp_list = []
+ all_nv_list = []
+ caption, ins = sample[-2:]
+ # expanded_return = []
+ for _ in range(self.pair_per_instance):
+ cano_idx, nv_idx = self.rand_pair()
+ cano_sample = self._post_process_sample(item[cano_idx]
+ for item in sample[:-2])
+ nv_sample = self._post_process_sample(item[nv_idx]
+ for item in sample[:-2])
+ all_inp_list.extend(cano_sample)
+ all_nv_list.extend(nv_sample)
+ return (*all_inp_list, *all_nv_list, caption, ins)
+ # return [cano_sample, nv_sample, caption, ins]
+ # return (*cano_sample, *nv_sample, caption, ins)
+
+ # def single_sample_create_dict(self, sample, prefix=''):
+ # # if len(sample) == 1:
+ # # sample = sample[0]
+ # # assert len(sample) == 6
+ # img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox = sample
+ # return {
+ # # **sample,
+ # f'{prefix}img_to_encoder': img_to_encoder,
+ # f'{prefix}img': img,
+ # f'{prefix}depth_mask': fg_mask_reso,
+ # f'{prefix}depth': depth_reso,
+ # f'{prefix}c': c,
+ # f'{prefix}bbox': bbox,
+ # }
+
+ def single_sample_create_dict(self, sample, prefix=''):
+ # if len(sample) == 1:
+ # sample = sample[0]
+ # assert len(sample) == 6
+ # img_to_encoder, img, fg_mask_reso, depth_reso, c, bbox = sample
+ img_to_encoder, img, c, caption, ins = sample
+ return {
+ # **sample,
+ 'img_to_encoder': img_to_encoder,
+ 'img': img,
+ 'c': c,
+ 'caption': caption,
+ 'ins': ins
+ }
+
+ def decode_zip(self, sample_pyd, shape=(256, 256)):
+ if isinstance(sample_pyd, tuple):
+ sample_pyd = sample_pyd[0]
+ assert isinstance(sample_pyd, dict)
+
+ latent = sample_pyd['latent']
+ caption = sample_pyd['caption'].decode('utf-8')
+ c = sample_pyd['c']
+ # img = sample_pyd['img']
+ # st()
+
+ return latent, caption, c
+
+ def create_dict(self, sample):
+
+ return {
+ # **sample,
+ 'latent': sample[0],
+ 'caption': sample[1],
+ 'c': sample[2],
+ }
+
+
+# test tar loading
+def load_wds_latent_ResampledShard(file_path,
+ batch_size,
+ num_workers,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ plucker_embedding=False,
+ decode_encode_img_only=False,
+ **kwargs):
+
+ # return raw_img, depth, c, bbox, sample_pyd['ins.pyd'], sample_pyd['fname.pyd']
+
+ post_process_cls = PostProcess_forlatent(
+ reso,
+ reso_encoder,
+ imgnet_normalize=imgnet_normalize,
+ plucker_embedding=plucker_embedding,
+ decode_encode_img_only=decode_encode_img_only,
+ )
+
+ if isinstance(file_path, list): # lst of shard urls
+ all_shards = []
+ for url_path in file_path:
+ all_shards.extend(wds.shardlists.expand_source(url_path))
+ logger.log('all_shards', all_shards)
+ else:
+ all_shards = file_path # to be expanded
+
+ dataset = wds.DataPipeline(
+ wds.ResampledShards(all_shards), # url_shard
+ # at this point we have an iterator over all the shards
+ wds.shuffle(50),
+ wds.split_by_worker, # if multi-node
+ wds.tarfile_to_samples(),
+ # add wds.split_by_node here if you are using multiple nodes
+ wds.detshuffle(
+ 2500
+ ), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.decode(wds.autodecode.basichandlers), # TODO
+ wds.to_tuple("sample.pyd"), # extract the pyd from top level dict
+ wds.map(post_process_cls.decode_zip),
+ # wds.map(post_process_cls._post_process_sample),
+ # wds.detshuffle(1000), # shuffles in the memory, leverage large RAM for more efficient loading
+ wds.batched(
+ 150,
+ partial=True,
+ # collation_fn=collate
+ ) # streaming more data at once, and rebatch later
+ )
+
+ loader_shard = wds.WebLoader(
+ dataset,
+ num_workers=num_workers,
+ drop_last=False,
+ batch_size=None,
+ shuffle=False,
+ persistent_workers=num_workers
+ > 0).unbatched().shuffle(1000).batched(batch_size).map(
+ post_process_cls.create_dict)
+
+ # persistent_workers=num_workers > 0).unbatched().batched(batch_size).map(post_process_cls.create_dict)
+ # 1000).batched(batch_size).map(post_process_cls.create_dict)
+ # .map(collate)
+ # .map(collate)
+
+ # .batched(batch_size)
+ #
+
+ # .unbatched().shuffle(1000).batched(batch_size).map(post_process)
+ # # https://github.com/webdataset/webdataset/issues/187
+
+ # return next(iter(loader_shard))
+ #return dataset
+ return loader_shard
diff --git a/datasets/lsun_bedroom.py b/datasets/lsun_bedroom.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a5be22eef8c7434331a76ef5ed7332a98a446ef
--- /dev/null
+++ b/datasets/lsun_bedroom.py
@@ -0,0 +1,54 @@
+"""
+Convert an LSUN lmdb database into a directory of images.
+"""
+
+import argparse
+import io
+import os
+
+from PIL import Image
+import lmdb
+import numpy as np
+
+
+def read_images(lmdb_path, image_size):
+ env = lmdb.open(lmdb_path, map_size=1099511627776, max_readers=100, readonly=True)
+ with env.begin(write=False) as transaction:
+ cursor = transaction.cursor()
+ for _, webp_data in cursor:
+ img = Image.open(io.BytesIO(webp_data))
+ width, height = img.size
+ scale = image_size / min(width, height)
+ img = img.resize(
+ (int(round(scale * width)), int(round(scale * height))),
+ resample=Image.BOX,
+ )
+ arr = np.array(img)
+ h, w, _ = arr.shape
+ h_off = (h - image_size) // 2
+ w_off = (w - image_size) // 2
+ arr = arr[h_off : h_off + image_size, w_off : w_off + image_size]
+ yield arr
+
+
+def dump_images(out_dir, images, prefix):
+ if not os.path.exists(out_dir):
+ os.mkdir(out_dir)
+ for i, img in enumerate(images):
+ Image.fromarray(img).save(os.path.join(out_dir, f"{prefix}_{i:07d}.png"))
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--image-size", help="new image size", type=int, default=256)
+ parser.add_argument("--prefix", help="class name", type=str, default="bedroom")
+ parser.add_argument("lmdb_path", help="path to an LSUN lmdb database")
+ parser.add_argument("out_dir", help="path to output directory")
+ args = parser.parse_args()
+
+ images = read_images(args.lmdb_path, args.image_size)
+ dump_images(args.out_dir, images, args.prefix)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/datasets/shapenet.py b/datasets/shapenet.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c82b095ab9f3a8f24e87e57fe574f8683ba6ff5
--- /dev/null
+++ b/datasets/shapenet.py
@@ -0,0 +1,1022 @@
+import os
+import torchvision
+import pickle
+from typing import Any
+import lmdb
+import cv2
+import imageio
+import numpy as np
+from PIL import Image
+import Imath
+import OpenEXR
+from pdb import set_trace as st
+from pathlib import Path
+
+from functools import partial
+import io
+import gzip
+import random
+import torch
+from torch import nn
+import torch.nn.functional as F
+from torch.utils.data import DataLoader, Dataset
+from torchvision import transforms
+from torch.utils.data.distributed import DistributedSampler
+from pathlib import Path
+
+from guided_diffusion import logger
+
+def load_dataset(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_lmdb=False,
+ infi_sampler=True
+):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # st()
+ if use_lmdb:
+ logger.log('using LMDB dataset')
+ # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+ if 'nv' in trainer_name:
+ dataset_cls = LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ # dataset = dataset_cls(file_path)
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = NovelViewDataset # 1.5-2iter/s
+ else:
+ dataset_cls = MultiViewDataset
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size)
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+
+ loader = DataLoader(dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ pin_memory=True,
+ persistent_workers=num_workers > 0,
+ shuffle=False)
+ return loader
+
+
+def load_data(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_lmdb=False,
+ infi_sampler=True
+):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # st()
+ if use_lmdb:
+ logger.log('using LMDB dataset')
+ # dataset_cls = LMDBDataset_MV # 2.5-3iter/s, but unstable, drops to 1 later.
+ if 'nv' in trainer_name:
+ dataset_cls = LMDBDataset_NV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ else:
+ dataset_cls = LMDBDataset_MV_Compressed # 2.5-3iter/s, but unstable, drops to 1 later.
+ # dataset = dataset_cls(file_path)
+ else:
+ if 'nv' in trainer_name:
+ dataset_cls = NovelViewDataset # 1.5-2iter/s
+ else:
+ dataset_cls = MultiViewDataset
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size)
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+
+ # st()
+
+ if infi_sampler:
+ train_sampler = DistributedSampler(dataset=dataset,
+ shuffle=True,
+ drop_last=True)
+
+ loader = DataLoader(dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=True,
+ pin_memory=True,
+ persistent_workers=num_workers > 0,
+ sampler=train_sampler)
+
+ while True:
+ yield from loader
+
+ else:
+ # loader = DataLoader(dataset,
+ # batch_size=batch_size,
+ # num_workers=num_workers,
+ # drop_last=False,
+ # pin_memory=True,
+ # persistent_workers=num_workers > 0,
+ # shuffle=False)
+ st()
+ return dataset
+
+
+def load_eval_rays(file_path="",
+ reso=64,
+ reso_encoder=224,
+ imgnet_normalize=True):
+ dataset = MultiViewDataset(file_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=imgnet_normalize)
+ pose_list = dataset.single_pose_list
+ ray_list = []
+ for pose_fname in pose_list:
+ # c2w = dataset.get_c2w(pose_fname).reshape(1,4,4) #[1, 4, 4]
+ # rays_o, rays_d = dataset.gen_rays(c2w)
+ # ray_list.append(
+ # [rays_o.unsqueeze(0),
+ # rays_d.unsqueeze(0),
+ # c2w.reshape(-1, 16)])
+
+ c2w = dataset.get_c2w(pose_fname).reshape(16) #[1, 4, 4]
+
+ c = torch.cat([c2w, dataset.intrinsics],
+ dim=0).reshape(25) # 25, no '1' dim needed.
+ ray_list.append(c)
+
+ return ray_list
+
+
+def load_eval_data(file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ num_workers=1,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ interval=1,
+ **kwargs
+ ):
+
+ dataset = MultiViewDataset(file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ test=True,
+ imgnet_normalize=imgnet_normalize,
+ interval=interval, **kwargs)
+ print('eval dataset size: {}'.format(len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset)
+ loader = DataLoader(
+ dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ shuffle=False,
+ )
+ # sampler=train_sampler)
+ return loader
+
+
+def load_memory_data(file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ num_workers=1,
+ load_depth=True,
+ preprocess=None,
+ imgnet_normalize=True):
+ # load a single-instance into the memory to speed up training IO
+ dataset = MultiViewDataset(file_path,
+ reso,
+ reso_encoder,
+ preprocess=preprocess,
+ load_depth=True,
+ test=False,
+ overfitting=True,
+ imgnet_normalize=imgnet_normalize,
+ overfitting_bs=batch_size)
+ logger.log('!!!!!!! memory dataset size: {} !!!!!!'.format(len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset)
+ loader = DataLoader(
+ dataset,
+ batch_size=len(dataset),
+ num_workers=num_workers,
+ drop_last=False,
+ shuffle=False,
+ )
+
+ all_data: dict = next(iter(loader))
+ while True:
+ start_idx = np.random.randint(0, len(dataset) - batch_size + 1)
+ yield {
+ k: v[start_idx:start_idx + batch_size]
+ for k, v in all_data.items()
+ }
+
+
+class MultiViewDataset(Dataset):
+
+ def __init__(self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1,
+ interval=1):
+ self.file_path = file_path
+ self.overfitting = overfitting
+ self.scene_scale = scene_scale
+ self.reso = reso
+ self.reso_encoder = reso_encoder
+ self.classes = False
+ self.load_depth = load_depth
+ self.preprocess = preprocess
+ assert not self.classes, "Not support class condition now."
+
+ # self.ins_list = os.listdir(self.file_path)
+ # if test: # TODO
+
+ dataset_name = Path(self.file_path).stem.split('_')[0]
+ self.dataset_name = dataset_name
+
+ if test:
+ # ins_list_file = Path(self.file_path).parent / f'{dataset_name}_test_list.txt' # ? in domain
+ if dataset_name == 'chair':
+ self.ins_list = sorted(os.listdir(
+ self.file_path))[1:2] # more diversity
+ else:
+ self.ins_list = sorted(os.listdir(self.file_path))[
+ 0:1] # the first 1 instance for evaluation reference.
+ else:
+ # self.ins_list = sorted(Path(self.file_path).glob('[0-8]*'))
+ # self.ins_list = Path(self.file_path).glob('*')
+ # self.ins_list = list(Path(self.file_path).glob('*'))[:dataset_size]
+
+ # ins_list_file = Path(
+ # self.file_path).parent / f'{dataset_name}s_train_list.txt'
+ # assert ins_list_file.exists(), 'add training list for ShapeNet'
+ # with open(ins_list_file, 'r') as f:
+ # self.ins_list = [name.strip() for name in f.readlines()]
+
+ # if dataset_name == 'chair':
+ ins_list_file = Path(
+ self.file_path).parent / f'{dataset_name}_train_list.txt'
+ # st()
+ assert ins_list_file.exists(), 'add training list for ShapeNet'
+ with open(ins_list_file, 'r') as f:
+ self.ins_list = [name.strip()
+ for name in f.readlines()][:dataset_size]
+ # else:
+ # self.ins_list = Path(self.file_path).glob('*')
+
+ if overfitting:
+ self.ins_list = self.ins_list[:1]
+
+ self.rgb_list = []
+ self.pose_list = []
+ self.depth_list = []
+ self.data_ins_list = []
+ self.instance_data_length = -1
+ for ins in self.ins_list:
+ cur_rgb_path = os.path.join(self.file_path, ins, 'rgb')
+ cur_pose_path = os.path.join(self.file_path, ins, 'pose')
+
+ cur_all_fname = sorted([
+ t.split('.')[0] for t in os.listdir(cur_rgb_path)
+ if 'depth' not in t
+ ][::interval])
+ if self.instance_data_length == -1:
+ self.instance_data_length = len(cur_all_fname)
+ else:
+ assert len(cur_all_fname) == self.instance_data_length
+
+ # ! check filtered data
+ # for idx in range(len(cur_all_fname)):
+ # fname = cur_all_fname[idx]
+ # if not Path(os.path.join(cur_rgb_path, fname + '.png') ).exists():
+ # cur_all_fname.remove(fname)
+
+ # del cur_all_fname[idx]
+
+ if test:
+ mid_index = len(cur_all_fname) // 3 * 2
+ cur_all_fname.insert(0, cur_all_fname[mid_index])
+
+ self.pose_list += ([
+ os.path.join(cur_pose_path, fname + '.txt')
+ for fname in cur_all_fname
+ ])
+ self.rgb_list += ([
+ os.path.join(cur_rgb_path, fname + '.png')
+ for fname in cur_all_fname
+ ])
+
+ self.depth_list += ([
+ os.path.join(cur_rgb_path, fname + '_depth0001.exr')
+ for fname in cur_all_fname
+ ])
+ self.data_ins_list += ([ins] * len(cur_all_fname))
+
+ # validate overfitting on images
+ if overfitting:
+ # bs=9
+ # self.pose_list = self.pose_list[::50//9+1]
+ # self.rgb_list = self.rgb_list[::50//9+1]
+ # self.depth_list = self.depth_list[::50//9+1]
+ # bs=6
+ # self.pose_list = self.pose_list[::50//6+1]
+ # self.rgb_list = self.rgb_list[::50//6+1]
+ # self.depth_list = self.depth_list[::50//6+1]
+ # bs=3
+ assert overfitting_bs != -1
+ # bs=1
+ # self.pose_list = self.pose_list[25:26]
+ # self.rgb_list = self.rgb_list[25:26]
+ # self.depth_list = self.depth_list[25:26]
+
+ # uniform pose sampling
+ self.pose_list = self.pose_list[::50//overfitting_bs+1]
+ self.rgb_list = self.rgb_list[::50//overfitting_bs+1]
+ self.depth_list = self.depth_list[::50//overfitting_bs+1]
+
+ # sequentially sampling pose
+ # self.pose_list = self.pose_list[25:25+overfitting_bs]
+ # self.rgb_list = self.rgb_list[25:25+overfitting_bs]
+ # self.depth_list = self.depth_list[25:25+overfitting_bs]
+
+ # duplicate the same pose
+ # self.pose_list = [self.pose_list[25]] * overfitting_bs
+ # self.rgb_list = [self.rgb_list[25]] * overfitting_bs
+ # self.depth_list = [self.depth_list[25]] * overfitting_bs
+ # self.pose_list = [self.pose_list[28]] * overfitting_bs
+ # self.rgb_list = [self.rgb_list[28]] * overfitting_bs
+ # self.depth_list = [self.depth_list[28]] * overfitting_bs
+
+ self.single_pose_list = [
+ os.path.join(cur_pose_path, fname + '.txt')
+ for fname in cur_all_fname
+ ]
+
+ # st()
+
+ # if imgnet_normalize:
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+
+ # self.normalize_normalrange = transforms.Compose([
+ # transforms.ToTensor(),# [0,1] range
+ # transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
+ # ])
+
+ fx = fy = 525
+ cx = cy = 256 # rendering default K
+ factor = self.reso / (cx * 2) # 128 / 512
+ self.fx = fx * factor
+ self.fy = fy * factor
+ self.cx = cx * factor
+ self.cy = cy * factor
+
+ # ! fix scale for triplane ray_sampler(), here we adopt [0,1] uv range, not [0, w] img space range.
+ self.cx /= self.reso # 0.5
+ self.cy /= self.reso # 0.5
+ self.fx /= self.reso
+ self.fy /= self.reso
+
+ intrinsics = np.array([[self.fx, 0, self.cx], [0, self.fy, self.cy],
+ [0, 0, 1]]).reshape(9)
+ # self.intrinsics = torch.from_numpy(intrinsics).float()
+ self.intrinsics = intrinsics
+
+ def __len__(self):
+ return len(self.rgb_list)
+
+ def get_c2w(self, pose_fname):
+ with open(pose_fname, 'r') as f:
+ cam2world = f.readline().strip()
+ cam2world = [float(t) for t in cam2world.split(' ')]
+ c2w = torch.tensor(cam2world, dtype=torch.float32).reshape(4, 4)
+ return c2w
+
+ def gen_rays(self, c2w):
+ # Generate rays
+ self.h = self.reso
+ self.w = self.reso
+ yy, xx = torch.meshgrid(
+ torch.arange(self.h, dtype=torch.float32) + 0.5,
+ torch.arange(self.w, dtype=torch.float32) + 0.5,
+ indexing='ij')
+ xx = (xx - self.cx) / self.fx
+ yy = (yy - self.cy) / self.fy
+ zz = torch.ones_like(xx)
+ dirs = torch.stack((xx, yy, zz), dim=-1) # OpenCV convention
+ dirs /= torch.norm(dirs, dim=-1, keepdim=True)
+ dirs = dirs.reshape(1, -1, 3, 1)
+ del xx, yy, zz
+ dirs = (c2w[:, None, :3, :3] @ dirs)[..., 0]
+
+ origins = c2w[:, None, :3, 3].expand(-1, self.h * self.w,
+ -1).contiguous()
+ origins = origins.view(-1, 3)
+ dirs = dirs.view(-1, 3)
+
+ return origins, dirs
+
+ def read_depth(self, idx):
+ depth_path = self.depth_list[idx]
+ # image_path = os.path.join(depth_fname, self.image_names[index])
+ exr = OpenEXR.InputFile(depth_path)
+ header = exr.header()
+ size = (header['dataWindow'].max.x - header['dataWindow'].min.x + 1,
+ header['dataWindow'].max.y - header['dataWindow'].min.y + 1)
+ FLOAT = Imath.PixelType(Imath.PixelType.FLOAT)
+ depth_str = exr.channel('B', FLOAT)
+ depth = np.frombuffer(depth_str,
+ dtype=np.float32).reshape(size[1],
+ size[0]) # H W
+ depth = np.nan_to_num(depth, posinf=0, neginf=0)
+ depth = depth.reshape(size)
+
+ def resize_depth_mask(depth_to_resize, resolution):
+ depth_resized = cv2.resize(depth_to_resize,
+ (resolution, resolution),
+ interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+ return depth_resized > 0 # type: ignore
+
+ fg_mask_reso = resize_depth_mask(depth, self.reso)
+ fg_mask_sr = resize_depth_mask(depth, 128)
+
+ # depth = cv2.resize(depth, (self.reso, self.reso),
+ # interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+ # depth_mask = depth > 0
+ # depth = np.expand_dims(depth, axis=0).reshape(size)
+ # return torch.from_numpy(depth)
+ return torch.from_numpy(depth), torch.from_numpy(
+ fg_mask_reso), torch.from_numpy(fg_mask_sr)
+
+ def load_bbox(self, mask):
+ nonzero_value = torch.nonzero(mask)
+ height, width = nonzero_value.max(dim=0)[0]
+ top, left = nonzero_value.min(dim=0)[0]
+ bbox = torch.tensor([top, left, height, width], dtype=torch.float32)
+ return bbox
+
+ def __getitem__(self, idx):
+ rgb_fname = self.rgb_list[idx]
+ pose_fname = self.pose_list[idx]
+
+ raw_img = imageio.imread(rgb_fname)
+
+ if self.preprocess is None:
+ img_to_encoder = cv2.resize(raw_img,
+ (self.reso_encoder, self.reso_encoder),
+ interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+ img_to_encoder = img_to_encoder[
+ ..., :3] #[3, reso_encoder, reso_encoder]
+ img_to_encoder = self.normalize(img_to_encoder)
+ else:
+ img_to_encoder = self.preprocess(Image.open(rgb_fname)) # clip
+
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+
+ # img_sr = cv2.resize(raw_img, (512, 512), interpolation=cv2.INTER_AREA)
+ # img_sr = cv2.resize(raw_img, (256, 256), interpolation=cv2.INTER_AREA) # just as refinement, since eg3d uses 64->128 final resolution
+ # img_sr = cv2.resize(raw_img, (128, 128), interpolation=cv2.INTER_AREA) # just as refinement, since eg3d uses 64->128 final resolution
+ img_sr = cv2.resize(
+ raw_img, (128, 128), interpolation=cv2.INTER_LANCZOS4
+ ) # just as refinement, since eg3d uses 64->128 final resolution
+
+ # img = torch.from_numpy(img)[..., :3].permute(
+ # 2, 0, 1) / 255.0 #[3, reso, reso]
+
+ img = torch.from_numpy(img)[..., :3].permute(
+ 2, 0, 1
+ ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ img_sr = torch.from_numpy(img_sr)[..., :3].permute(
+ 2, 0, 1
+ ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ # c2w = self.get_c2w(pose_fname).reshape(1, 4, 4) #[1, 4, 4]
+ # rays_o, rays_d = self.gen_rays(c2w)
+ # return img_to_encoder, img, rays_o, rays_d, c2w.reshape(-1)
+
+ c2w = self.get_c2w(pose_fname).reshape(16) #[1, 4, 4] -> [1, 16]
+ # c = np.concatenate([c2w, self.intrinsics], axis=0).reshape(25) # 25, no '1' dim needed.
+ c = torch.cat([c2w, torch.from_numpy(self.intrinsics)],
+ dim=0).reshape(25) # 25, no '1' dim needed.
+ ret_dict = {
+ # 'rgb_fname': rgb_fname,
+ 'img_to_encoder': img_to_encoder,
+ 'img': img,
+ 'c': c,
+ 'img_sr': img_sr,
+ # 'ins_name': self.data_ins_list[idx]
+ }
+ if self.load_depth:
+ depth, depth_mask, depth_mask_sr = self.read_depth(idx)
+ bbox = self.load_bbox(depth_mask)
+ ret_dict.update({
+ 'depth': depth,
+ 'depth_mask': depth_mask,
+ 'depth_mask_sr': depth_mask_sr,
+ 'bbox': bbox
+ })
+ # rays_o, rays_d = self.gen_rays(c2w)
+ # return img_to_encoder, img, c
+ return ret_dict
+
+
+class MultiViewDatasetforLMDB(MultiViewDataset):
+
+ def __init__(self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1):
+ super().__init__(file_path, reso, reso_encoder, preprocess, classes,
+ load_depth, test, scene_scale, overfitting,
+ imgnet_normalize, dataset_size, overfitting_bs)
+
+ def __len__(self):
+ return super().__len__()
+ # return 100 # for speed debug
+
+ def __getitem__(self, idx):
+ # ret_dict = super().__getitem__(idx)
+ rgb_fname = self.rgb_list[idx]
+ pose_fname = self.pose_list[idx]
+ raw_img = imageio.imread(rgb_fname)[..., :3]
+
+ if raw_img.shape[-1] == 4: # ! set bg to white
+ alpha_mask = raw_img[..., -1:] / 255
+ raw_img = alpha_mask * raw_img[..., :3] + (1-alpha_mask) * np.ones_like(raw_img[..., :3]) * 255
+ raw_img = raw_img.astype(np.uint8)
+
+ raw_img = cv2.resize(raw_img,
+ (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ c2w = self.get_c2w(pose_fname).reshape(16) #[1, 4, 4] -> [1, 16]
+ # c = np.concatenate([c2w, self.intrinsics], axis=0).reshape(25) # 25, no '1' dim needed.
+ c = torch.cat([c2w, torch.from_numpy(self.intrinsics)],
+ dim=0).reshape(25) # 25, no '1' dim needed.
+
+ depth, depth_mask, depth_mask_sr = self.read_depth(idx)
+ bbox = self.load_bbox(depth_mask)
+ ret_dict = {
+ 'raw_img': raw_img,
+ 'c': c,
+ 'depth': depth,
+ # 'depth_mask': depth_mask, # 64x64 here?
+ 'bbox': bbox
+ }
+ return ret_dict
+
+
+def load_data_dryrun(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True):
+ # st()
+ dataset = MultiViewDataset(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize)
+ print('dataset size: {}'.format(len(dataset)))
+ # st()
+ # train_sampler = DistributedSampler(dataset=dataset)
+ loader = DataLoader(
+ dataset,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ # shuffle=shuffle,
+ drop_last=False,
+ )
+ # sampler=train_sampler)
+
+ return loader
+
+
+class NovelViewDataset(MultiViewDataset):
+ """novel view prediction version.
+ """
+
+ def __init__(self,
+ file_path,
+ reso,
+ reso_encoder,
+ preprocess=None,
+ classes=False,
+ load_depth=False,
+ test=False,
+ scene_scale=1,
+ overfitting=False,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ overfitting_bs=-1):
+ super().__init__(file_path, reso, reso_encoder, preprocess, classes,
+ load_depth, test, scene_scale, overfitting,
+ imgnet_normalize, dataset_size, overfitting_bs)
+
+ def __getitem__(self, idx):
+ input_view = super().__getitem__(
+ idx) # get previous input view results
+
+ # get novel view of the same instance
+ novel_view = super().__getitem__(
+ (idx // self.instance_data_length) * self.instance_data_length +
+ random.randint(0, self.instance_data_length - 1))
+
+ # assert input_view['ins_name'] == novel_view['ins_name'], 'should sample novel view from the same instance'
+
+ input_view.update({f'nv_{k}': v for k, v in novel_view.items()})
+ return input_view
+
+
+def load_data_for_lmdb(
+ file_path="",
+ reso=64,
+ reso_encoder=224,
+ batch_size=1,
+ # shuffle=True,
+ num_workers=6,
+ load_depth=False,
+ preprocess=None,
+ imgnet_normalize=True,
+ dataset_size=-1,
+ trainer_name='input_rec'):
+ # st()
+ # dataset_cls = {
+ # 'input_rec': MultiViewDataset,
+ # 'nv': NovelViewDataset,
+ # }[trainer_name]
+ # if 'nv' in trainer_name:
+ # dataset_cls = NovelViewDataset
+ # else:
+ # dataset_cls = MultiViewDataset
+ dataset_cls = MultiViewDatasetforLMDB
+
+ dataset = dataset_cls(file_path,
+ reso,
+ reso_encoder,
+ test=False,
+ preprocess=preprocess,
+ load_depth=load_depth,
+ imgnet_normalize=imgnet_normalize,
+ dataset_size=dataset_size)
+
+ logger.log('dataset_cls: {}, dataset size: {}'.format(
+ trainer_name, len(dataset)))
+ # train_sampler = DistributedSampler(dataset=dataset, shuffle=True, drop_last=True)
+ loader = DataLoader(
+ dataset,
+ shuffle=False,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ drop_last=False,
+ prefetch_factor=2,
+ # prefetch_factor=3,
+ pin_memory=True,
+ persistent_workers=True,
+ )
+ # sampler=train_sampler)
+
+ # while True:
+ # yield from loader
+ return loader, dataset.dataset_name, len(dataset)
+
+
+class LMDBDataset(Dataset):
+
+ def __init__(self, lmdb_path):
+ self.env = lmdb.open(
+ lmdb_path,
+ readonly=True,
+ max_readers=32,
+ lock=False,
+ readahead=False,
+ meminit=False,
+ )
+ self.num_samples = self.env.stat()['entries']
+ # self.start_idx = self.env.stat()['start_idx']
+ # self.end_idx = self.env.stat()['end_idx']
+
+ def __len__(self):
+ return self.num_samples
+
+ def __getitem__(self, idx):
+ with self.env.begin(write=False) as txn:
+ key = str(idx).encode('utf-8')
+ value = txn.get(key)
+
+ sample = pickle.loads(value)
+ return sample
+
+
+def resize_depth_mask(depth_to_resize, resolution):
+ depth_resized = cv2.resize(depth_to_resize, (resolution, resolution),
+ interpolation=cv2.INTER_LANCZOS4)
+ # interpolation=cv2.INTER_AREA)
+ return depth_resized, depth_resized > 0 # type: ignore
+
+
+class LMDBDataset_MV(LMDBDataset):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ **kwargs):
+ super().__init__(lmdb_path)
+
+ self.reso_encoder = reso_encoder
+ self.reso = reso
+
+ transformations = [
+ transforms.ToTensor(), # [0,1] range
+ ]
+ if imgnet_normalize:
+ transformations.append(
+ transforms.Normalize((0.485, 0.456, 0.406),
+ (0.229, 0.224, 0.225)) # type: ignore
+ )
+ else:
+ transformations.append(
+ transforms.Normalize((0.5, 0.5, 0.5),
+ (0.5, 0.5, 0.5))) # type: ignore
+
+ self.normalize = transforms.Compose(transformations)
+
+ def _post_process_sample(self, raw_img, depth):
+
+ if raw_img.shape[-1] == 4: # ! set bg to white
+ alpha_mask = raw_img[..., -1:] / 255
+ raw_img = alpha_mask * raw_img[..., :3] + (1-alpha_mask) * np.ones_like(raw_img[..., :3]) * 255
+ raw_img = raw_img.astype(np.uint8)
+
+ # if raw_img.shape[-1] == 4: # ! set bg to white
+ # raw_img = cv2.cvtColor(raw_img, cv2.COLOR_RGBA2RGB)
+
+ # img_to_encoder = cv2.resize(sample.pop('raw_img'),
+ if raw_img.shape[0] != self.reso_encoder:
+ img_to_encoder = cv2.resize(raw_img,
+ (self.reso_encoder, self.reso_encoder),
+ interpolation=cv2.INTER_LANCZOS4)
+ else:
+ img_to_encoder = raw_img
+
+ # interpolation=cv2.INTER_AREA)
+ # img_to_encoder = img_to_encoder[..., :
+ # 3] #[3, reso_encoder, reso_encoder]
+ img_to_encoder = self.normalize(img_to_encoder)
+
+ img = cv2.resize(raw_img, (self.reso, self.reso),
+ interpolation=cv2.INTER_LANCZOS4)
+
+ # if img.shape[-1] == 4:
+ # alpha_mask = img[..., -1:] > 0
+ # img = alpha_mask * img[..., :3] + (1-alpha_mask) * np.ones_like(img[..., :3]) * 255
+
+ img = torch.from_numpy(img)[..., :3].permute(
+ 2, 0, 1
+ ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ # img_sr = torch.from_numpy(raw_img)[..., :3].permute(
+ # 2, 0, 1
+ # ) / 127.5 - 1 #[3, reso, reso], normalize to [-1,1], follow triplane range
+
+ # depth
+ # fg_mask_reso = resize_depth_mask(sample['depth'], self.reso)
+ depth_reso, fg_mask_reso = resize_depth_mask(depth, self.reso)
+
+ return {
+ # **sample,
+ 'img_to_encoder': img_to_encoder,
+ 'img': img,
+ 'depth_mask': fg_mask_reso,
+ # 'img_sr': img_sr,
+ 'depth': depth_reso,
+ # ! no need to load img_sr for now
+ }
+
+ def __getitem__(self, idx):
+ sample = super().__getitem__(idx)
+ # do transformations online
+
+ return self._post_process_sample(sample['raw_img'], sample['depth'])
+ # return sample
+
+def load_bytes(inp_bytes, dtype, shape):
+ return np.frombuffer(inp_bytes, dtype=dtype).reshape(shape).copy()
+
+# Function to decompress an image using gzip and open with imageio
+def decompress_and_open_image_gzip(compressed_data, is_img=False, decompress=True, decompress_fn=gzip.decompress):
+ # Decompress the image data using gzip
+ if decompress:
+ compressed_data = decompress_fn(compressed_data)
+
+ # Read the decompressed image using imageio
+ if is_img:
+ compressed_data = imageio.v3.imread(io.BytesIO(compressed_data)).copy()
+ # return image
+ return compressed_data
+
+
+# Function to decompress an array using gzip
+def decompress_array(compressed_data, shape, dtype, decompress=True, decompress_fn=gzip.decompress):
+ # Decompress the array data using gzip
+ if decompress:
+ # compressed_data = gzip.decompress(compressed_data)
+ compressed_data = decompress_fn(compressed_data)
+
+ # Convert the decompressed data to a NumPy array
+ # arr = np.frombuffer(decompressed_data, dtype=dtype).reshape(shape)
+
+ return load_bytes(compressed_data, dtype, shape)
+
+
+class LMDBDataset_MV_Compressed(LMDBDataset_MV):
+
+ def __init__(self,
+ lmdb_path,
+ reso,
+ reso_encoder,
+ imgnet_normalize=True,
+ **kwargs):
+ super().__init__(lmdb_path, reso, reso_encoder, imgnet_normalize,
+ **kwargs)
+ with self.env.begin(write=False) as txn:
+ self.length = int(
+ txn.get('length'.encode('utf-8')).decode('utf-8')) - 40
+
+ self.load_image_fn = partial(decompress_and_open_image_gzip,
+ is_img=True)
+
+ def __len__(self):
+ return self.length
+
+ def _load_lmdb_data(self, idx):
+
+ with self.env.begin(write=False) as txn:
+ raw_img_key = f'{idx}-raw_img'.encode('utf-8')
+ raw_img = self.load_image_fn(txn.get(raw_img_key))
+
+ depth_key = f'{idx}-depth'.encode('utf-8')
+ depth = decompress_array(txn.get(depth_key), (512,512), np.float32)
+
+ c_key = f'{idx}-c'.encode('utf-8')
+ c = decompress_array(txn.get(c_key), (25, ), np.float32)
+
+ bbox_key = f'{idx}-bbox'.encode('utf-8')
+ bbox = decompress_array(txn.get(bbox_key), (4, ), np.float32)
+
+ return raw_img, depth, c, bbox
+
+ def _load_lmdb_data_no_decompress(self, idx):
+
+ with self.env.begin(write=False) as txn:
+ raw_img_key = f'{idx}-raw_img'.encode('utf-8')
+ # raw_img = txn.get(raw_img_key)
+ raw_img = self.load_image_fn(txn.get(raw_img_key), decompress=False)
+
+ depth_key = f'{idx}-depth'.encode('utf-8')
+ depth = decompress_array(txn.get(depth_key), (512,512), np.float32, decompress=False)
+ # depth = txn.get(depth_key), (512,512)
+
+ # c_key = f'{idx}-c'.encode('utf-8')
+ # c = txn.get(c_key), (25, ), np.float32
+
+ # bbox_key = f'{idx}-bbox'.encode('utf-8')
+ # bbox = txn.get(bbox_key)
+
+ c_key = f'{idx}-c'.encode('utf-8')
+ c = decompress_array(txn.get(c_key), (25, ), np.float32, decompress=False)
+
+ bbox_key = f'{idx}-bbox'.encode('utf-8')
+ bbox = decompress_array(txn.get(bbox_key), (4, ), np.float32, decompress=False)
+
+ return raw_img, depth, c, bbox
+
+ def __getitem__(self, idx):
+ # sample = super(LMDBDataset).__getitem__(idx)
+
+ # do gzip uncompress online
+ '''
+ raw_img, depth, c, bbox = self._load_lmdb_data(idx)
+
+ return {
+ **self._post_process_sample(raw_img, depth), 'c': c,
+ 'bbox': bbox*(self.reso/64.0),
+ # 'depth': depth,
+ }
+ '''
+ raw_img, depth, c, bbox = self._load_lmdb_data_no_decompress(idx)
+ return None
+
+
+class LMDBDataset_NV_Compressed(LMDBDataset_MV_Compressed):
+ def __init__(self, lmdb_path, reso, reso_encoder, imgnet_normalize=True, **kwargs):
+ super().__init__(lmdb_path, reso, reso_encoder, imgnet_normalize, **kwargs)
+ self.instance_data_length = 50 #
+
+ def __getitem__(self, idx):
+ input_view = super().__getitem__(
+ idx) # get previous input view results
+
+ # get novel view of the same instance
+ try:
+ novel_view = super().__getitem__(
+ (idx // self.instance_data_length) * self.instance_data_length +
+ random.randint(0, self.instance_data_length - 1))
+ except Exception as e:
+ raise NotImplementedError(idx)
+
+ assert input_view['ins_name'] == novel_view['ins_name'], 'should sample novel view from the same instance'
+
+ input_view.update({f'nv_{k}': v for k, v in novel_view.items()})
+ return input_view
\ No newline at end of file
diff --git a/datasets/text_captions_cap3d.json b/datasets/text_captions_cap3d.json
new file mode 120000
index 0000000000000000000000000000000000000000..221de3ead4ff1bcccb023c53eb985dcbb3143637
--- /dev/null
+++ b/datasets/text_captions_cap3d.json
@@ -0,0 +1 @@
+/home/yslan/Repo/open-source/LN3Diff/datasets/text_captions_cap3d.json
\ No newline at end of file
diff --git a/dit/__init__.py b/dit/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dit/__pycache__/__init__.cpython-310.pyc b/dit/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c086749465eb3cd6305018c46b9e88158e953a92
Binary files /dev/null and b/dit/__pycache__/__init__.cpython-310.pyc differ
diff --git a/dit/__pycache__/__init__.cpython-39.pyc b/dit/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b274a4a751e4bb7e1ffba0d0fbb382de56c1dc32
Binary files /dev/null and b/dit/__pycache__/__init__.cpython-39.pyc differ
diff --git a/dit/__pycache__/dit_decoder.cpython-39.pyc b/dit/__pycache__/dit_decoder.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f1d3d0d3b551e503def90a19bed5fcf54fad7fc9
Binary files /dev/null and b/dit/__pycache__/dit_decoder.cpython-39.pyc differ
diff --git a/dit/__pycache__/dit_i23d.cpython-39.pyc b/dit/__pycache__/dit_i23d.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..535b4b5b049de3f5ffb7203ac2f4b669e03bc8ad
Binary files /dev/null and b/dit/__pycache__/dit_i23d.cpython-39.pyc differ
diff --git a/dit/__pycache__/dit_models.cpython-39.pyc b/dit/__pycache__/dit_models.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..951c6a0ef553ca294a17ac44839156e03ba345e2
Binary files /dev/null and b/dit/__pycache__/dit_models.cpython-39.pyc differ
diff --git a/dit/__pycache__/dit_models_xformers.cpython-310.pyc b/dit/__pycache__/dit_models_xformers.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bb8ad85b31d12667c36f3c3dca83b7155c848e3d
Binary files /dev/null and b/dit/__pycache__/dit_models_xformers.cpython-310.pyc differ
diff --git a/dit/__pycache__/dit_models_xformers.cpython-39.pyc b/dit/__pycache__/dit_models_xformers.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c182cc582ba716b5a5e1c2197f50b32df9ade873
Binary files /dev/null and b/dit/__pycache__/dit_models_xformers.cpython-39.pyc differ
diff --git a/dit/__pycache__/dit_trilatent.cpython-310.pyc b/dit/__pycache__/dit_trilatent.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d6cb16015fea69da0eaead6077f2b675e3c9adc4
Binary files /dev/null and b/dit/__pycache__/dit_trilatent.cpython-310.pyc differ
diff --git a/dit/__pycache__/dit_trilatent.cpython-39.pyc b/dit/__pycache__/dit_trilatent.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..519a43ffb693ca52498047b5c72c69ecb3cfdac3
Binary files /dev/null and b/dit/__pycache__/dit_trilatent.cpython-39.pyc differ
diff --git a/dit/__pycache__/norm.cpython-310.pyc b/dit/__pycache__/norm.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a88c3400e4afd7ecf3df1959124447adc41bd708
Binary files /dev/null and b/dit/__pycache__/norm.cpython-310.pyc differ
diff --git a/dit/__pycache__/norm.cpython-39.pyc b/dit/__pycache__/norm.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7b5009b8d7ff5093d0fee88fe02054a8f7755a94
Binary files /dev/null and b/dit/__pycache__/norm.cpython-39.pyc differ
diff --git a/dit/dit_3d.py b/dit/dit_3d.py
new file mode 100644
index 0000000000000000000000000000000000000000..324099b19f4f83dcfadafd06b5faf20f716252cf
--- /dev/null
+++ b/dit/dit_3d.py
@@ -0,0 +1,212 @@
+import torch
+import torch.nn as nn
+import numpy as np
+import math
+
+from pdb import set_trace as st
+
+from .dit_models import DiT, DiTBlock, DiT_models, get_2d_sincos_pos_embed
+
+
+class DiT_Triplane_V1(DiT):
+ """
+ 1. merge the 3*H*W as L, and 8 as C only
+ 2. pachify, flat into 224*(224*3) with 8 channels for pachify
+ 3. unpachify accordingly
+ """
+
+ def __init__(self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=False):
+
+ input_size = (input_size, input_size*3)
+ super().__init__(input_size, patch_size, in_channels//3, hidden_size, # type: ignore
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma)
+
+ def initialize_weights(self):
+ """all the same except the PE part
+ """
+ # Initialize transformer layers:
+ def _basic_init(module):
+ if isinstance(module, nn.Linear):
+ torch.nn.init.xavier_uniform_(module.weight)
+ if module.bias is not None:
+ nn.init.constant_(module.bias, 0)
+
+ self.apply(_basic_init)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ pos_embed = get_2d_sincos_pos_embed(
+ self.pos_embed.shape[-1], self.x_embedder.grid_size)
+ # st()
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ # ! untouched below
+ # Initialize patch_embed like nn.Linear (instead of nn.Conv2d):
+ w = self.x_embedder.proj.weight.data
+ nn.init.xavier_uniform_(w.view([w.shape[0], -1]))
+ nn.init.constant_(self.x_embedder.proj.bias, 0)
+
+ # Initialize label embedding table:
+ if self.y_embedder is not None:
+ nn.init.normal_(self.y_embedder.embedding_table.weight, std=0.02)
+
+ # Initialize timestep embedding MLP:
+ nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)
+ nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)
+
+ # Zero-out adaLN modulation layers in DiT blocks:
+ for block in self.blocks:
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
+
+ # Zero-out output layers:
+ nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0)
+ nn.init.constant_(self.final_layer.linear.weight, 0)
+ nn.init.constant_(self.final_layer.linear.bias, 0)
+
+ def unpatchify(self, x):
+ # TODO
+ """
+ x: (N, L, patch_size**2 * C)
+ imgs: (N, H, W, C)
+ """
+ c = self.out_channels
+ p = self.x_embedder.patch_size[0] # type: ignore
+ h = w = int((x.shape[1]//3)**0.5)
+ assert h * w * 3 == x.shape[1] # merge triplane 3 dims with hw
+
+ x = x.reshape(shape=(x.shape[0], h, w, 3, p, p, c))
+ x = torch.einsum('nhwzpqc->nczhpwq', x)
+ imgs = x.reshape(shape=(x.shape[0], c*3, h * p, h * p)) # type: ignore
+ return imgs # B 8*3 H W
+
+ def forward(self, x, t, y=None):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+
+ # ! merge tri-channel into w chanenl for 3D-aware TX
+ x = x.reshape(x.shape[0], -1, 3, x.shape[2], x.shape[3]) # B 8 3 H W
+ x = x.permute(0,1,3,4,2).reshape(x.shape[0], -1, x.shape[-2], x.shape[-1]*3) # B 8 H W83
+
+ x = self.x_embedder(
+ x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+ t = self.t_embedder(t) # (N, D)
+
+ if self.y_embedder is not None:
+ assert y is not None
+ y = self.y_embedder(y, self.training) # (N, D)
+ c = t + y # (N, D)
+ else:
+ c = t
+
+ for block in self.blocks:
+ x = block(x, c) # (N, T, D)
+
+ x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels)
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ return x
+
+
+
+
+class DiT_Triplane_V1_learnedPE(DiT_Triplane_V1):
+ """
+ 1. learned PE, default cos/sin wave
+ """
+
+ def __init__(self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma)
+
+
+class DiT_Triplane_V1_fixed3DPE(DiT_Triplane_V1):
+ """
+ 1. 3D aware PE, fixed
+ """
+
+ def __init__(self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma)
+
+
+class DiT_Triplane_V1_learned3DPE(DiT_Triplane_V1):
+ """
+ 1. init with 3D aware PE, learnable
+ """
+
+ def __init__(self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma)
+
+def V1_Triplane_DiT_S_2(**kwargs):
+ return DiT_Triplane_V1(depth=12, hidden_size=384, patch_size=2, num_heads=6, **kwargs)
+
+def V1_Triplane_DiT_S_4(**kwargs):
+ return DiT_Triplane_V1(depth=12, hidden_size=384, patch_size=4, num_heads=6, **kwargs)
+
+def V1_Triplane_DiT_S_8(**kwargs):
+ return DiT_Triplane_V1(depth=12, hidden_size=384, patch_size=8, num_heads=6, **kwargs)
+
+def V1_Triplane_DiT_B_8(**kwargs):
+ return DiT_Triplane_V1(depth=12, hidden_size=768, patch_size=8, num_heads=12, **kwargs)
+
+def V1_Triplane_DiT_B_16(**kwargs): # ours cfg
+ return DiT_Triplane_V1(depth=12, hidden_size=768, patch_size=16, num_heads=12, **kwargs)
+
+DiT_models.update({
+ 'v1-T-DiT-S/2': V1_Triplane_DiT_S_2,
+ 'v1-T-DiT-S/4': V1_Triplane_DiT_S_4,
+ 'v1-T-DiT-S/8': V1_Triplane_DiT_S_8,
+ 'v1-T-DiT-B/8': V1_Triplane_DiT_B_8,
+ 'v1-T-DiT-B/16': V1_Triplane_DiT_B_16,
+})
\ No newline at end of file
diff --git a/dit/dit_decoder.py b/dit/dit_decoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..1179bb93cbb572c8007b818040aec1d238407d0b
--- /dev/null
+++ b/dit/dit_decoder.py
@@ -0,0 +1,345 @@
+import torch
+import torch.nn as nn
+import numpy as np
+import math
+
+from einops import rearrange
+from pdb import set_trace as st
+
+# from .dit_models import DiT, DiTBlock, DiT_models, get_2d_sincos_pos_embed, modulate, FinalLayer
+
+from .dit_models_xformers import DiT, DiTBlock, DiT_models, get_2d_sincos_pos_embed, modulate, FinalLayer
+# from .dit_models import DiT, DiTBlock, DiT_models, get_2d_sincos_pos_embed, modulate, FinalLayer
+
+
+def modulate2(x, shift, scale):
+ return x * (1 + scale) + shift
+
+
+class DiTBlock2(DiTBlock):
+ """
+ A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.
+ """
+
+ def __init__(self, hidden_size, num_heads, mlp_ratio=4, **block_kwargs):
+ super().__init__(hidden_size, num_heads, mlp_ratio, **block_kwargs)
+
+ def forward(self, x, c):
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ c).chunk(6, dim=-1)
+ # st()
+ x = x + gate_msa * self.attn(
+ modulate2(self.norm1(x), shift_msa, scale_msa))
+ x = x + gate_mlp * self.mlp(
+ modulate2(self.norm2(x), shift_mlp, scale_mlp))
+ return x
+
+
+class FinalLayer2(FinalLayer):
+ """
+ The final layer of DiT, basically the decoder_pred in MAE with adaLN.
+ """
+
+ def __init__(self, hidden_size, patch_size, out_channels):
+ super().__init__(hidden_size, patch_size, out_channels)
+
+ def forward(self, x, c):
+ shift, scale = self.adaLN_modulation(c).chunk(2, dim=-1)
+ x = modulate2(self.norm_final(x), shift, scale)
+ x = self.linear(x)
+ return x
+
+
+class DiT2(DiT):
+ # a conditional ViT
+ def __init__(self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ plane_n=3,
+ return_all_layers=False,
+ in_plane_attention=True,
+ vit_blk=...):
+ super().__init__(input_size,
+ patch_size,
+ in_channels,
+ hidden_size,
+ depth,
+ num_heads,
+ mlp_ratio,
+ class_dropout_prob,
+ num_classes,
+ learn_sigma,
+ mixing_logit_init,
+ mixed_prediction,
+ context_dim,
+ roll_out,
+ vit_blk=DiTBlock2,
+ final_layer_blk=FinalLayer2)
+ # st()
+
+ # no t and x embedder
+ del self.x_embedder
+ del self.t_embedder
+ del self.final_layer
+ torch.cuda.empty_cache()
+ self.clip_text_proj = None
+ self.plane_n = plane_n
+ self.return_all_layers = return_all_layers
+ self.in_plane_attention = in_plane_attention
+
+ def forward(self, c, *args, **kwargs):
+ # return super().forward(x, timesteps, context, y, get_attr, **kwargs)
+ """
+ Forward pass of DiT.
+ c: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ """
+
+ x = self.pos_embed.repeat(
+ c.shape[0], 1, 1).to(c.dtype) # (N, T, D), where T = H * W / patch_size ** 2
+
+ if self.return_all_layers:
+ all_layers = []
+
+ # if context is not None:
+ # c = context # B 3HW C
+
+ for blk_idx, block in enumerate(self.blocks):
+ if self.roll_out:
+ if self.in_plane_attention: # plane-wise output
+ if blk_idx % 2 == 0: # with-in plane self attention
+ x = rearrange(x, 'b (n l) c -> (b n) l c ', n=self.plane_n)
+ x = block(x,
+ rearrange(c,
+ 'b (n l) c -> (b n) l c ',
+ n=self.plane_n)) # (N, T, D)
+ # st()
+ if self.return_all_layers:
+
+ all_layers.append(
+ rearrange(x,
+ '(b n) l c -> b (n l) c',
+ n=self.plane_n))
+
+ # all_layers.append(x)
+ else: # global attention
+ x = rearrange(x, '(b n) l c -> b (n l) c ', n=self.plane_n)
+ x = block(x, c) # (N, T, D)
+ # st()
+ if self.return_all_layers:
+ # all merged into B dim
+ all_layers.append(x)
+ # all_layers.append(
+ # rearrange(x,
+ # 'b (n l) c -> (b n) l c',
+ # n=self.plane_n))
+ else:
+
+ # ! already b (n l) c
+ # if blk_idx == 0: # rearrange once
+ # x = rearrange(x, '(b n) l c -> b (n l) c ', n=self.plane_n)
+ x = block(x, c) # (N, T, D)
+ if self.return_all_layers:
+ # all merged into B dim
+ all_layers.append(x)
+
+ else:
+ x = block(x, c) # (N, T, D)
+
+ # x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels)
+
+ # if self.roll_out: # move n from L to B axis
+ # x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ # x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ # if self.roll_out: # move n from L to B axis
+ # x = rearrange(x, '(b n) c h w -> b (n c) h w', n=3)
+ # st()
+
+ if self.return_all_layers:
+ return all_layers
+ else:
+ # return x.to(torch.float32)
+ return x
+
+
+# class DiT2_DPT(DiT2):
+# def __init__(self, input_size=32, patch_size=2, in_channels=4, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4, class_dropout_prob=0.1, num_classes=1000, learn_sigma=True, mixing_logit_init=-3, mixed_prediction=True, context_dim=False, roll_out=False, plane_n=3, vit_blk=...):
+# super().__init__(input_size, patch_size, in_channels, hidden_size, depth, num_heads, mlp_ratio, class_dropout_prob, num_classes, learn_sigma, mixing_logit_init, mixed_prediction, context_dim, roll_out, plane_n, vit_blk)
+# self.return_all_layers = True
+
+#################################################################################
+# DiT2 Configs #
+#################################################################################
+
+
+def DiT2_XL_2(**kwargs):
+ return DiT2(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_XL_2_half(**kwargs):
+ return DiT2(depth=28 // 2,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_XL_4(**kwargs):
+ return DiT2(depth=28,
+ hidden_size=1152,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_XL_8(**kwargs):
+ return DiT2(depth=28,
+ hidden_size=1152,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_L_2(**kwargs):
+ return DiT2(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_L_2_stage1(**kwargs):
+ return DiT2(depth=24-6,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+def DiT2_L_2_stage1(**kwargs):
+ return DiT2(depth=24-6,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+def DiT2_L_2_half(**kwargs):
+ return DiT2(depth=24//2,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+def DiT2_L_2_half_ninelayer(**kwargs):
+ return DiT2(depth=9,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+
+def DiT2_L_4(**kwargs):
+ return DiT2(depth=24,
+ hidden_size=1024,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_L_8(**kwargs):
+ return DiT2(depth=24,
+ hidden_size=1024,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT2_B_2(**kwargs):
+ return DiT2(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT2_B_2_stage1(**kwargs):
+ return DiT2(depth=12, # ! just 12, stage-2 3 layers afterwards.
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+def DiT2_B_4(**kwargs):
+ return DiT2(depth=12,
+ hidden_size=768,
+ patch_size=4,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT2_B_8(**kwargs):
+ return DiT2(depth=12,
+ hidden_size=768,
+ patch_size=8,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT2_B_16(**kwargs): # ours cfg
+ return DiT2(depth=12,
+ hidden_size=768,
+ patch_size=16,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT2_S_2(**kwargs):
+ return DiT2(depth=12, hidden_size=384, patch_size=2, num_heads=6, **kwargs)
+
+
+def DiT2_S_4(**kwargs):
+ return DiT2(depth=12, hidden_size=384, patch_size=4, num_heads=6, **kwargs)
+
+
+def DiT2_S_8(**kwargs):
+ return DiT2(depth=12, hidden_size=384, patch_size=8, num_heads=6, **kwargs)
+
+
+DiT2_models = {
+ 'DiT2-XL/2': DiT2_XL_2,
+ 'DiT2-XL/2/half': DiT2_XL_2_half,
+ 'DiT2-XL/4': DiT2_XL_4,
+ 'DiT2-XL/8': DiT2_XL_8,
+ 'DiT2-L/2': DiT2_L_2,
+ 'DiT2-L/2/S1': DiT2_L_2_stage1,
+ 'DiT2-L/2/S1-v2': DiT2_L_2_stage1,
+ 'DiT2-B/2/S1': DiT2_B_2_stage1,
+ 'DiT2-L/4': DiT2_L_4,
+ 'DiT2-L/2-half': DiT2_L_2_half,
+ 'DiT2-L/2-ninelayer': DiT2_L_2_half_ninelayer,
+ 'DiT2-L/8': DiT2_L_8,
+ 'DiT2-B/2': DiT2_B_2,
+ 'DiT2-B/4': DiT2_B_4,
+ 'DiT2-B/8': DiT2_B_8,
+ 'DiT2-B/16': DiT2_B_16,
+ 'DiT2-S/2': DiT2_S_2,
+ 'DiT2-S/4': DiT2_S_4,
+ 'DiT2-S/8': DiT2_S_8,
+}
diff --git a/dit/dit_decoder_3d.py b/dit/dit_decoder_3d.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ce511098c4e0d098ba5b18190b161af6fae0ad2
--- /dev/null
+++ b/dit/dit_decoder_3d.py
@@ -0,0 +1,172 @@
+import torch.nn as nn
+from inspect import isfunction
+import math
+import torch
+import torch.nn.functional as F
+from torch import nn, einsum
+from einops import rearrange, repeat
+from pdb import set_trace as st
+
+from ldm.modules.attention import MemoryEfficientCrossAttention
+from .dit_decoder import DiT2
+
+class DiT3D(DiT2):
+ def __init__(self, input_size=32, patch_size=2, in_channels=4, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4, class_dropout_prob=0.1, num_classes=1000, learn_sigma=True, mixing_logit_init=-3, mixed_prediction=True, context_dim=False, roll_out=False, plane_n=3, return_all_layers=False, in_plane_attention=True, vit_blk=...):
+ super().__init__(input_size, patch_size, in_channels, hidden_size, depth, num_heads, mlp_ratio, class_dropout_prob, num_classes, learn_sigma, mixing_logit_init, mixed_prediction, context_dim, roll_out, plane_n, return_all_layers, in_plane_attention, vit_blk)
+ # follow point infinity, add "write" CA block per 6 blocks
+
+ # 25/4/2024, cascade a "read&write" block after the DiT base model.
+ self.read_ca = MemoryEfficientCrossAttention(hidden_size, context_dim)
+ self.point_infinity_blocks = nn.ModuleList([
+ vit_blk(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ for _ in range(2)
+ ])
+
+ def initialize_weights(self):
+ super().initialize_weights()
+
+ # Zero-out adaLN modulation layers in DiT blocks:
+ # ! no final layer anymore
+ for block in self.point_infinity_blocks:
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
+
+
+
+ def forward(self, c, *args, **kwargs):
+ x_base = super().forward(c, *args, **kwargs) # base latent
+ # add read&write block
+
+
+
+#################################################################################
+# DiT3D Configs #
+#################################################################################
+
+
+def DiT3DXL_2(**kwargs):
+ return DiT3D(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DXL_2_half(**kwargs):
+ return DiT3D(depth=28 // 2,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DXL_4(**kwargs):
+ return DiT3D(depth=28,
+ hidden_size=1152,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DXL_8(**kwargs):
+ return DiT3D(depth=28,
+ hidden_size=1152,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DL_2(**kwargs):
+ return DiT3D(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DL_2_half(**kwargs):
+ return DiT3D(depth=24 // 2,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DL_4(**kwargs):
+ return DiT3D(depth=24,
+ hidden_size=1024,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DL_8(**kwargs):
+ return DiT3D(depth=24,
+ hidden_size=1024,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT3DB_2(**kwargs):
+ return DiT3D(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT3DB_4(**kwargs):
+ return DiT3D(depth=12,
+ hidden_size=768,
+ patch_size=4,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT3DB_8(**kwargs):
+ return DiT3D(depth=12,
+ hidden_size=768,
+ patch_size=8,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT3DB_16(**kwargs): # ours cfg
+ return DiT3D(depth=12,
+ hidden_size=768,
+ patch_size=16,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT3DS_2(**kwargs):
+ return DiT3D(depth=12, hidden_size=384, patch_size=2, num_heads=6, **kwargs)
+
+
+def DiT3DS_4(**kwargs):
+ return DiT3D(depth=12, hidden_size=384, patch_size=4, num_heads=6, **kwargs)
+
+
+def DiT3DS_8(**kwargs):
+ return DiT3D(depth=12, hidden_size=384, patch_size=8, num_heads=6, **kwargs)
+
+
+DiT3Dmodels = {
+ 'DiT3D-XL/2': DiT3DXL_2,
+ 'DiT3D-XL/2/half': DiT3DXL_2_half,
+ 'DiT3D-XL/4': DiT3DXL_4,
+ 'DiT3D-XL/8': DiT3DXL_8,
+ 'DiT3D-L/2': DiT3DL_2,
+ 'DiT3D-L/2/half': DiT3DL_2_half,
+ 'DiT3D-L/4': DiT3DL_4,
+ 'DiT3D-L/8': DiT3DL_8,
+ 'DiT3D-B/2': DiT3DB_2,
+ 'DiT3D-B/4': DiT3DB_4,
+ 'DiT3D-B/8': DiT3DB_8,
+ 'DiT3D-B/16': DiT3DB_16,
+ 'DiT3D-S/2': DiT3DS_2,
+ 'DiT3D-S/4': DiT3DS_4,
+ 'DiT3D-S/8': DiT3DS_8,
+}
+
diff --git a/dit/dit_i23d.py b/dit/dit_i23d.py
new file mode 100644
index 0000000000000000000000000000000000000000..ee3b62ab2d38952f9d33b39b5e21a0e9ba7a378f
--- /dev/null
+++ b/dit/dit_i23d.py
@@ -0,0 +1,1697 @@
+import torch.nn as nn
+from inspect import isfunction
+import math
+import torch
+import torch.nn.functional as F
+from torch import nn, einsum
+from einops import rearrange, repeat
+from pdb import set_trace as st
+
+from ldm.modules.attention import MemoryEfficientCrossAttention
+from .dit_models_xformers import *
+# from apex.normalization import FusedLayerNorm as LayerNorm
+from torch.nn import LayerNorm
+# from apex.normalization import FusedRMSNorm as RMSNorm
+
+try:
+ from apex.normalization import FusedRMSNorm as RMSNorm
+except:
+ from dit.norm import RMSNorm
+
+from timm.models.vision_transformer import Mlp
+
+from vit.vit_triplane import XYZPosEmbed
+
+from .dit_trilatent import DiT_PCD_PixelArt
+
+
+class DiT_I23D(DiT):
+ # DiT with 3D_aware operations
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlock,
+ final_layer_blk=T2IFinalLayer,
+ enable_rope=False,
+ ):
+ # st()
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim, roll_out, vit_blk,
+ T2IFinalLayer, enable_rope=enable_rope)
+
+ assert self.roll_out
+
+ # if context_dim is not None:
+ # self.dino_proj = CaptionEmbedder(context_dim,
+ self.clip_ctx_dim = 1024 # vit-l
+ # self.dino_proj = CaptionEmbedder(self.clip_ctx_dim, # ! dino-vitl/14 here, for img-cond
+ self.dino_proj = CaptionEmbedder(context_dim, # ! dino-vitb/14 here, for MV-cond. hard coded for now...
+ # self.dino_proj = CaptionEmbedder(1024, # ! dino-vitb/14 here, for MV-cond. hard coded for now...
+ hidden_size,
+ act_layer=approx_gelu)
+
+ self.clip_spatial_proj = CaptionEmbedder(1024, # clip_I-L
+ hidden_size,
+ act_layer=approx_gelu)
+
+ def init_PE_3D_aware(self):
+
+ self.pos_embed = nn.Parameter(torch.zeros(
+ 1, self.plane_n * self.x_embedder.num_patches, self.embed_dim),
+ requires_grad=False)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ p = int(self.x_embedder.num_patches**0.5)
+ D = self.pos_embed.shape[-1]
+ grid_size = (self.plane_n, p * p) # B n HW C
+
+ pos_embed = get_2d_sincos_pos_embed(D, grid_size).reshape(
+ self.plane_n * p * p, D) # H*W, D
+
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ def initialize_weights(self):
+ super().initialize_weights()
+
+ # ! add 3d-aware PE
+ self.init_PE_3D_aware()
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+ # context = self.clip_text_proj(context)
+ clip_cls_token = self.clip_text_proj(context['vector'])
+ clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ # ! todo, return spatial clip features.
+
+ # if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w',
+ n=3) # downsample with same conv
+ x = self.x_embedder(x) # (b n) c h/f w/f
+
+ x = rearrange(x, '(b n) l c -> b (n l) c', n=3)
+ x = x + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ # if self.roll_out: # ! roll-out in the L dim, not B dim. add condition to all tokens.
+ # x = rearrange(x, '(b n) l c ->b (n l) c', n=3)
+
+ # assert context.ndim == 2
+ # if isinstance(context, dict):
+ # context = context['crossattn'] # sgm conditioner compat
+
+
+ # c = t + context
+ # else:
+ # c = t # BS 1024
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+ # x = rearrange(x, 'b n) c h w -> b (n c) h w', n=3)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+ # ! compat issue
+ def forward_with_cfg(self, x, t, context, cfg_scale):
+ """
+ Forward pass of SiT, but also batches the unconSiTional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ # half = x[: len(x) // 2]
+ # combined = torch.cat([half, half], dim=0)
+ eps = self.forward(x, t, context)
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ # eps, rest = model_out[:, :3], model_out[:, 3:]
+ cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ eps = torch.cat([half_eps, half_eps], dim=0)
+ return eps
+
+
+
+
+class DiT_I23D_PixelArt(DiT_I23D):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNorm,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=True,
+ enable_rope=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim, pooling_ctx_dim, roll_out, vit_blk,
+ final_layer_blk,
+ enable_rope=enable_rope)
+
+ # ! a shared one
+ self.adaLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 6 * hidden_size, bias=True))
+
+ # ! single
+ nn.init.constant_(self.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(self.adaLN_modulation[-1].bias, 0)
+
+ del self.clip_text_proj
+ if create_cap_embedder:
+ self.cap_embedder = nn.Sequential( # TODO, init with zero here.
+ LayerNorm(pooling_ctx_dim),
+ nn.Linear(
+ pooling_ctx_dim,
+ hidden_size,
+ ),
+ )
+
+ nn.init.constant_(self.cap_embedder[-1].weight, 0)
+ nn.init.constant_(self.cap_embedder[-1].bias, 0)
+ else:
+ self.cap_embedder = nn.Identity() # placeholder
+
+ print(self) # check model arch
+
+ self.attention_y_norm = RMSNorm(
+ 1024, eps=1e-5
+ ) # https://github.com/Alpha-VLLM/Lumina-T2X/blob/0c8dd6a07a3b7c18da3d91f37b1e00e7ae661293/lumina_t2i/models/model.py#L570C9-L570C61
+
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+ # context = self.clip_text_proj(context)
+ clip_cls_token = self.cap_embedder(context['vector'])
+ clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+ clip_spatial_token = self.attention_y_norm(clip_spatial_token) # avoid re-normalization in each blk
+
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ # if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w',
+ n=3) # downsample with same conv
+ x = self.x_embedder(x) # (b n) c h/f w/f
+
+ x = rearrange(x, '(b n) l c -> b (n l) c', n=3)
+ x = x + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ # if self.roll_out: # ! roll-out in the L dim, not B dim. add condition to all tokens.
+ # x = rearrange(x, '(b n) l c ->b (n l) c', n=3)
+
+ # assert context.ndim == 2
+ # if isinstance(context, dict):
+ # context = context['crossattn'] # sgm conditioner compat
+
+
+ # c = t + context
+ # else:
+ # c = t # BS 1024
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+ # x = rearrange(x, 'b n) c h w -> b (n c) h w', n=3)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+class DiT_I23D_PCD_PixelArt(DiT_I23D_PixelArt):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNorm,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=True,
+ use_clay_ca=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim, pooling_ctx_dim, roll_out, vit_blk,
+ final_layer_blk)
+
+ self.x_embedder = Mlp(in_features=in_channels,
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+ del self.pos_embed
+ self.use_clay_ca = use_clay_ca
+ if use_clay_ca:
+ del self.dino_proj # no prepending required.
+
+ # add ln_pred and ln_post, as in point-e. (does not help, worse performance)
+ # self.ln_pre = LayerNorm(hidden_size)
+ # self.ln_post = LayerNorm(hidden_size)
+ @staticmethod
+ def precompute_freqs_cis(
+ dim: int,
+ end: int,
+ theta: float = 10000.0,
+ rope_scaling_factor: float = 1.0,
+ ntk_factor: float = 1.0,
+ ):
+ """
+ Precompute the frequency tensor for complex exponentials (cis) with
+ given dimensions.
+
+ This function calculates a frequency tensor with complex exponentials
+ using the given dimension 'dim' and the end index 'end'. The 'theta'
+ parameter scales the frequencies. The returned tensor contains complex
+ values in complex64 data type.
+
+ Args:
+ dim (int): Dimension of the frequency tensor.
+ end (int): End index for precomputing frequencies.
+ theta (float, optional): Scaling factor for frequency computation.
+ Defaults to 10000.0.
+
+ Returns:
+ torch.Tensor: Precomputed frequency tensor with complex
+ exponentials.
+ """
+
+ theta = theta * ntk_factor
+
+ print(f"theta {theta} rope scaling {rope_scaling_factor} ntk {ntk_factor}")
+ freqs = 1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float().cuda() / dim))
+ t = torch.arange(end, device=freqs.device, dtype=torch.float) # type: ignore
+ t = t / rope_scaling_factor
+ freqs = torch.outer(t, freqs).float() # type: ignore
+ freqs_cis = torch.polar(torch.ones_like(freqs), freqs) # complex64
+ return freqs_cis
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ # global condition
+ if 'caption_vector' in context:
+ clip_cls_token = self.cap_embedder(context['caption_vector'])
+ elif 'img_vector' in context:
+ clip_cls_token = self.cap_embedder(context['img_vector'])
+ else:
+ clip_cls_token = 0
+
+ # spatial condition
+ clip_spatial_token, dino_spatial_token = context['img_crossattn'][..., :self.clip_ctx_dim], context['img_crossattn'][..., self.clip_ctx_dim:]
+ if not self.use_clay_ca:
+ dino_spatial_token=self.dino_proj(dino_spatial_token)
+
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token, clip_caption_token=context.get('caption_crossattn'))
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+# dino only version
+class DiT_I23D_PCD_PixelArt_noclip(DiT_I23D_PixelArt):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormNoClip,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=True,
+ use_clay_ca=False,
+ has_caption=False,
+ # has_rope=False,
+ rope_scaling_factor: float = 1.0,
+ ntk_factor: float = 1.0,
+ enable_rope=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim, pooling_ctx_dim, roll_out, vit_blk,
+ final_layer_blk, enable_rope=enable_rope)
+
+ self.x_embedder = Mlp(in_features=in_channels,
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+ del self.pos_embed
+ del self.dino_proj
+
+ self.enable_rope = enable_rope
+ if self.enable_rope: # implementation copied from Lumina-T2X code base
+ self.freqs_cis = DiT_I23D_PCD_PixelArt.precompute_freqs_cis(
+ hidden_size // num_heads,
+ 40000,
+ rope_scaling_factor=rope_scaling_factor,
+ ntk_factor=ntk_factor,
+ )
+ else:
+ self.freqs_cis = None
+
+ self.rope_scaling_factor = rope_scaling_factor
+ self.ntk_factor = ntk_factor
+
+ self.use_clay_ca = use_clay_ca
+
+ self.has_caption = has_caption
+ pooled_vector_dim = context_dim
+ if has_caption:
+ pooled_vector_dim += 768
+
+ self.pooled_vec_embedder = nn.Sequential( # TODO, init with zero here.
+ LayerNorm(pooled_vector_dim),
+ nn.Linear(
+ pooled_vector_dim,
+ hidden_size,
+ ),
+ )
+ nn.init.constant_(self.pooled_vec_embedder[-1].weight, 0)
+ nn.init.constant_(self.pooled_vec_embedder[-1].bias, 0)
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ # clip_cls_token = self.cap_embedder(context['vector'])
+ # clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+ # dino_spatial_token = context['crossattn']
+ dino_spatial_token = context['img_crossattn']
+ dino_pooled_vector = context['img_vector']
+ if self.has_caption:
+ clip_caption_token = context.get('caption_crossattn')
+ pooled_vector = torch.cat([dino_pooled_vector, context.get('caption_vector')], -1) # concat dino_vector
+ else:
+ clip_caption_token = None
+ pooled_vector = dino_pooled_vector
+
+
+ t = self.t_embedder(timesteps) + self.pooled_vec_embedder(pooled_vector)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ freqs_cis = None
+ if self.enable_rope:
+ freqs_cis=self.freqs_cis[: x.size(1)]
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_caption_token=clip_caption_token, freqs_cis=freqs_cis)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+# xyz-diff
+
+
+# xyz-cond tex diff
+class DiT_I23D_PCD_PixelArt_xyz_cond_kl_diff(DiT_I23D_PCD_PixelArt):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNorm,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=True,
+ use_pe_cond=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim, pooling_ctx_dim, roll_out, vit_blk,
+ final_layer_blk)
+
+ self.use_pe_cond = use_pe_cond
+ self.x_embedder = Mlp(in_features=in_channels+3*(1-use_pe_cond),
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+
+ if use_pe_cond:
+ self.xyz_pos_embed = XYZPosEmbed(hidden_size)
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ clip_cls_token = self.cap_embedder(context['vector'])
+ clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+
+ fps_xyz = context['fps-xyz']
+
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ if self.use_pe_cond:
+ x = self.x_embedder(x) + self.xyz_pos_embed(fps_xyz) # point-wise addition
+ else: # use concat to add info
+ x = torch.cat([fps_xyz, x], dim=-1)
+ x = self.x_embedder(x)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ x = self.final_layer(x, t) # no loss on the xyz side
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+# xyz-cond tex diff, but clay
+class DiT_I23D_PCD_PixelArt_noclip_clay_stage2(DiT_I23D_PCD_PixelArt_noclip):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNorm,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=True,
+ use_pe_cond=False,
+ has_caption=False,
+ use_clay_ca=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim, pooling_ctx_dim, roll_out, vit_blk,
+ final_layer_blk, use_clay_ca=use_clay_ca, has_caption=has_caption)
+
+ self.has_caption = False
+ self.use_pe_cond = use_pe_cond
+ self.x_embedder = Mlp(in_features=in_channels+3*(1-use_pe_cond),
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+
+ if use_pe_cond:
+ self.xyz_pos_embed = XYZPosEmbed(hidden_size)
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ dino_spatial_token = context['img_crossattn']
+ dino_pooled_vector = context['img_vector']
+ if self.has_caption:
+ clip_caption_token = context.get('caption_crossattn')
+ pooled_vector = torch.cat([dino_pooled_vector, context.get('caption_vector')], -1) # concat dino_vector
+ else:
+ clip_caption_token = None
+ pooled_vector = dino_pooled_vector
+
+ t = self.t_embedder(timesteps) + self.pooled_vec_embedder(pooled_vector)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ fps_xyz = context['fps-xyz']
+ if self.use_pe_cond:
+ x = self.x_embedder(x) + self.xyz_pos_embed(fps_xyz) # point-wise addition
+ else: # use concat to add info
+ x = torch.cat([fps_xyz, x], dim=-1)
+ x = self.x_embedder(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_caption_token=clip_caption_token)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+class DiT_I23D_PixelArt_MVCond(DiT_I23D_PixelArt):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim,
+ pooling_ctx_dim, roll_out, ImageCondDiTBlockPixelArtRMSNorm,
+ final_layer_blk, create_cap_embedder=create_cap_embedder)
+
+
+ # support multi-view img condition
+ # DINO handles global pooling here; clip takes care of camera-cond with ModLN
+ # Input DINO concat also + global pool. InstantMesh adopts DINO (but CA).
+ # expected: support dynamic numbers of frames? since CA, shall be capable of. Any number of context window size.
+ del self.dino_proj
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # st()
+ # (Pdb) p context.keys()
+ # dict_keys(['crossattn', 'vector', 'concat'])
+ # (Pdb) p context['vector'].shape
+ # torch.Size([2, 768])
+ # (Pdb) p context['crossattn'].shape
+ # torch.Size([2, 256, 1024])
+ # (Pdb) p context['concat'].shape
+ # torch.Size([2, 4, 256, 768]) # mv dino spatial features
+
+ # ! clip spatial tokens for append self-attn, thus add a projection layer (self.dino_proj)
+ # DINO features sent via crossattn, thus no proj required (already KV linear layers in crossattn blk)
+ clip_cls_token, clip_spatial_token = self.cap_embedder(context['vector']), self.clip_spatial_proj(context['crossattn']) # no norm here required? QK norm is enough, since self.ln_post(x) in vit
+ dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ # if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w',
+ n=3) # downsample with same conv
+ x = self.x_embedder(x) # (b n) c h/f w/f
+
+ x = rearrange(x, '(b n) l c -> b (n l) c', n=3)
+ x = x + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ for blk_idx, block in enumerate(self.blocks):
+ # x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token) # (N, T, D)
+ # ! DINO tokens for CA, CLIP tokens for append here.
+ x = block(x, t0, dino_spatial_token=clip_spatial_token, clip_spatial_token=dino_spatial_token) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+class DiT_I23D_PixelArt_MVCond_noClip(DiT_I23D_PixelArt):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim,
+ pooling_ctx_dim, roll_out,
+ ImageCondDiTBlockPixelArtRMSNormNoClip,
+ final_layer_blk,
+ create_cap_embedder=create_cap_embedder)
+
+
+ # support multi-view img condition
+ # DINO handles global pooling here; clip takes care of camera-cond with ModLN
+ # Input DINO concat also + global pool. InstantMesh adopts DINO (but CA).
+ # expected: support dynamic numbers of frames? since CA, shall be capable of. Any number of context window size.
+
+ del self.dino_proj
+ del self.clip_spatial_proj, self.cap_embedder # no clip required
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # st()
+ # (Pdb) p context.keys()
+ # dict_keys(['crossattn', 'vector', 'concat'])
+ # (Pdb) p context['vector'].shape
+ # torch.Size([2, 768])
+ # (Pdb) p context['crossattn'].shape
+ # torch.Size([2, 256, 1024])
+ # (Pdb) p context['concat'].shape
+ # torch.Size([2, 4, 256, 768]) # mv dino spatial features
+
+ # ! clip spatial tokens for append self-attn, thus add a projection layer (self.dino_proj)
+ # DINO features sent via crossattn, thus no proj required (already KV linear layers in crossattn blk)
+ # clip_cls_token, clip_spatial_token = self.cap_embedder(context['vector']), self.clip_spatial_proj(context['crossattn']) # no norm here required? QK norm is enough, since self.ln_post(x) in vit
+ dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t = self.t_embedder(timesteps)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ # if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w',
+ n=3) # downsample with same conv
+ x = self.x_embedder(x) # (b n) c h/f w/f
+
+ x = rearrange(x, '(b n) l c -> b (n l) c', n=3)
+ x = x + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ for blk_idx, block in enumerate(self.blocks):
+ # x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token) # (N, T, D)
+ # ! DINO tokens for CA, CLIP tokens for append here.
+ x = block(x, t0, dino_spatial_token=dino_spatial_token) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+
+
+
+# pcd-structured latent ddpm
+
+class DiT_pcd_I23D_PixelArt_MVCond(DiT_I23D_PixelArt_MVCond_noClip):
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim,
+ pooling_ctx_dim,
+ roll_out, ImageCondDiTBlockPixelArtRMSNorm,
+ final_layer_blk,
+ create_cap_embedder=create_cap_embedder)
+ # ! first, normalize xyz from [-0.45,0.45] to [-1,1]
+ # Then, encode xyz with point fourier feat + MLP projection, serves as PE here.
+ # a separate MLP for the KL feature
+ # add them together in the feature space
+ # use a single MLP (final_layer) to map them back to 16 + 3 dims.
+ self.x_embedder = Mlp(in_features=in_channels,
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+ del self.pos_embed
+
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ t = self.t_embedder(timesteps)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ # x = block(x, t0, dino_spatial_token=dino_spatial_token, clip_spatial_token=clip_spatial_token) # (N, T, D)
+ # ! DINO tokens for CA, CLIP tokens for append here.
+ x = block(x, t0, dino_spatial_token=dino_spatial_token)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+class DiT_pcd_I23D_PixelArt_MVCond_clay(DiT_PCD_PixelArt):
+ # fine-tune the mv model from text conditioned model
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False, **kwargs
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ # mixed_prediction, context_dim, roll_out, ImageCondDiTBlockPixelArt,
+ mixed_prediction, context_dim,
+ # pooling_ctx_dim,
+ roll_out, vit_blk,
+ final_layer_blk,)
+ # create_cap_embedder=create_cap_embedder)
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert context is not None
+
+ clip_cls_token = self.cap_embedder(context['caption_vector']) # pooled
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ # ! spatial tokens
+ dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # assert context.ndim == 2
+ # if isinstance(context, dict):
+ # context = context['caption_crossattn'] # sgm conditioner compat
+
+ # loop dit block
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, clip_caption_token=context['caption_crossattn'],
+ dino_spatial_token=dino_spatial_token) # (N, T, D)
+
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+# single-img pretrained clay
+
+class DiT_pcd_I23D_PixelArt_MVCond_clay_i23dpt(DiT_I23D_PCD_PixelArt_noclip):
+ # fine-tune the mv model from text conditioned model
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False, **kwargs
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim,
+ pooling_ctx_dim,
+ roll_out, vit_blk,
+ final_layer_blk,)
+
+ self.has_caption = False
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ # clip_cls_token = self.cap_embedder(context['vector'])
+ # clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+ # dino_spatial_token = context['crossattn']
+ # st()
+ dino_spatial_token = context['img_crossattn']
+ dino_pooled_vector = context['img_vector']
+ dino_mv_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ if self.has_caption:
+ clip_caption_token = context.get('caption_crossattn')
+ pooled_vector = torch.cat([dino_pooled_vector, context.get('caption_vector')], -1) # concat dino_vector
+ else:
+ clip_caption_token = None
+ pooled_vector = dino_pooled_vector
+
+
+ t = self.t_embedder(timesteps) + self.pooled_vec_embedder(pooled_vector)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, dino_mv_spatial_token=dino_mv_spatial_token)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+# stage 2
+class DiT_pcd_I23D_PixelArt_MVCond_clay_i23dpt_stage2(DiT_I23D_PCD_PixelArt_noclip):
+ # fine-tune the mv model from text conditioned model
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False, **kwargs
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim,
+ pooling_ctx_dim,
+ roll_out, vit_blk,
+ final_layer_blk,)
+
+ self.has_caption = False
+
+ self.use_pe_cond = True
+ self.x_embedder = Mlp(in_features=in_channels+3*(1-self.use_pe_cond),
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+
+ if self.use_pe_cond:
+ self.xyz_pos_embed = XYZPosEmbed(hidden_size)
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ # clip_cls_token = self.cap_embedder(context['vector'])
+ # clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+ # dino_spatial_token = context['crossattn']
+ # st()
+ dino_spatial_token = context['img_crossattn']
+ dino_pooled_vector = context['img_vector']
+ dino_mv_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ if self.has_caption:
+ clip_caption_token = context.get('caption_crossattn')
+ pooled_vector = torch.cat([dino_pooled_vector, context.get('caption_vector')], -1) # concat dino_vector
+ else:
+ clip_caption_token = None
+ pooled_vector = dino_pooled_vector
+
+
+ t = self.t_embedder(timesteps) + self.pooled_vec_embedder(pooled_vector)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ # x = self.x_embedder(x)
+
+ fps_xyz = context['fps-xyz']
+ if self.use_pe_cond:
+ x = self.x_embedder(x) + self.xyz_pos_embed(fps_xyz) # point-wise addition
+ else: # use concat to add info
+ x = torch.cat([fps_xyz, x], dim=-1)
+ x = self.x_embedder(x)
+
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, dino_spatial_token=dino_spatial_token, dino_mv_spatial_token=dino_mv_spatial_token)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+
+
+class DiT_pcd_I23D_PixelArt_MVCond_clay_i23dpt_noi23d(DiT_I23D_PCD_PixelArt_noclip):
+ # fine-tune the mv model from text conditioned model
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ pooling_ctx_dim=768,
+ roll_out=False,
+ vit_blk=ImageCondDiTBlockPixelArt,
+ final_layer_blk=FinalLayer,
+ create_cap_embedder=False, **kwargs
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim,
+ pooling_ctx_dim,
+ roll_out, vit_blk,
+ final_layer_blk,)
+
+ self.has_caption = False
+ del self.pooled_vec_embedder
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ # clip_cls_token = self.cap_embedder(context['vector'])
+ # clip_spatial_token, dino_spatial_token = context['crossattn'][..., :self.clip_ctx_dim], self.dino_proj(context['crossattn'][..., self.clip_ctx_dim:])
+ # dino_spatial_token = context['crossattn']
+ # st()
+ # dino_spatial_token = context['img_crossattn']
+ # dino_pooled_vector = context['img_vector']
+ dino_mv_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ if self.has_caption:
+ clip_caption_token = context.get('caption_crossattn')
+ pooled_vector = torch.cat([dino_pooled_vector, context.get('caption_vector')], -1) # concat dino_vector
+ else:
+ clip_caption_token = None
+ # pooled_vector = dino_pooled_vector
+ pooled_vector = None
+
+
+ # t = self.t_embedder(timesteps) + self.pooled_vec_embedder(pooled_vector)
+ t = self.t_embedder(timesteps)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ # x = block(x, t0, dino_spatial_token=dino_spatial_token, dino_mv_spatial_token=dino_mv_spatial_token)
+ x = block(x, t0, dino_mv_spatial_token=dino_mv_spatial_token)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+#################################################################################
+# DiT_I23D Configs #
+#################################################################################
+
+
+def DiT_XL_2(**kwargs):
+ return DiT_I23D(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_2(**kwargs):
+ return DiT_I23D(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_B_2(**kwargs):
+ return DiT_I23D(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT_B_1(**kwargs):
+ return DiT_I23D(depth=12,
+ hidden_size=768,
+ patch_size=1,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT_L_Pixelart_2(**kwargs):
+ return DiT_I23D_PixelArt(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_B_Pixelart_2(**kwargs):
+ return DiT_I23D_PixelArt(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+def DiT_L_Pixelart_MV_2(**kwargs):
+ return DiT_I23D_PixelArt_MVCond(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+def DiT_L_Pixelart_MV_2_noclip(**kwargs):
+ return DiT_I23D_PixelArt_MVCond_noClip(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+def DiT_XL_Pixelart_MV_2(**kwargs):
+ return DiT_I23D_PixelArt_MVCond(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+
+def DiT_B_Pixelart_MV_2(**kwargs):
+ return DiT_I23D_PixelArt_MVCond(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+# pcd latent
+
+def DiT_L_Pixelart_MV_pcd(**kwargs):
+ return DiT_pcd_I23D_PixelArt_MVCond(depth=24,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ **kwargs)
+
+# raw gs i23d
+def DiT_L_Pixelart_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt(depth=24,
+ # return DiT_I23D_PCD_PixelArt_noclip(depth=24,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ **kwargs)
+
+def DiT_L_Pixelart_clay_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ enable_rope=False,
+ **kwargs)
+
+def DiT_XL_Pixelart_clay_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip(depth=28,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ use_clay_ca=True,
+ hidden_size=1152,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ enable_rope=False,
+ **kwargs)
+
+
+def DiT_B_Pixelart_clay_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip(depth=12,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ use_clay_ca=True,
+ hidden_size=768,
+ patch_size=1, # no spatial compression here
+ num_heads=12,
+ **kwargs)
+
+def DiT_L_Pixelart_clay_pcd_stage2(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip_clay_stage2(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ use_pe_cond=True,
+ **kwargs)
+
+def DiT_B_Pixelart_clay_pcd_stage2(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip_clay_stage2(depth=12,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ use_clay_ca=True,
+ hidden_size=768,
+ patch_size=1, # no spatial compression here
+ num_heads=12,
+ use_pe_cond=True,
+ **kwargs)
+
+
+
+def DiT_L_Pixelart_clay_tandi_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayText,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ has_caption=True,
+ **kwargs)
+
+def DiT_B_Pixelart_clay_tandi_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt_noclip(depth=12,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayText,
+ use_clay_ca=True,
+ hidden_size=768,
+ patch_size=1, # no spatial compression here
+ num_heads=12,
+ has_caption=True,
+ **kwargs)
+
+
+def DiT_B_Pixelart_pcd(**kwargs):
+ return DiT_I23D_PCD_PixelArt(depth=12,
+ hidden_size=768,
+ patch_size=1, # no spatial compression here
+ num_heads=12,
+ **kwargs)
+
+def DiT_B_Pixelart_pcd_cond_diff(**kwargs):
+ return DiT_I23D_PCD_PixelArt_xyz_cond_kl_diff(depth=12,
+ hidden_size=768,
+ patch_size=1, # no spatial compression here
+ num_heads=12,
+ **kwargs)
+
+def DiT_B_Pixelart_pcd_cond_diff_pe(**kwargs):
+ return DiT_I23D_PCD_PixelArt_xyz_cond_kl_diff(depth=12,
+ hidden_size=768,
+ patch_size=1, # no spatial compression here
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ num_heads=12,
+ use_pe_cond=True,
+ **kwargs)
+
+def DiT_L_Pixelart_pcd_cond_diff_pe(**kwargs):
+ return DiT_I23D_PCD_PixelArt_xyz_cond_kl_diff(depth=24,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayLRM,
+ num_heads=16,
+ use_pe_cond=True,
+ **kwargs)
+
+# mv version
+
+def DiT_L_Pixelart_clay_mv_pcd(**kwargs):
+ return DiT_pcd_I23D_PixelArt_MVCond_clay(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayText,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_Pixelart_clay_mv_i23dpt_pcd(**kwargs):
+ return DiT_pcd_I23D_PixelArt_MVCond_clay_i23dpt(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayMV,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_Pixelart_clay_mv_i23dpt_pcd_noi23d(**kwargs):
+ return DiT_pcd_I23D_PixelArt_MVCond_clay_i23dpt_noi23d(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayMV_noi23d,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_Pixelart_clay_mv_i23dpt_pcd_stage2(**kwargs):
+ return DiT_pcd_I23D_PixelArt_MVCond_clay_i23dpt_stage2(depth=24,
+ vit_blk=ImageCondDiTBlockPixelArtRMSNormClayMV,
+ use_clay_ca=True,
+ hidden_size=1024,
+ patch_size=1, # no spatial compression here
+ num_heads=16,
+ **kwargs)
+
+
+
+
+DiT_models = {
+ 'DiT-XL/2': DiT_XL_2,
+ 'DiT-L/2': DiT_L_2,
+ 'DiT-B/2': DiT_B_2,
+ 'DiT-B/1': DiT_B_1,
+ 'DiT-PixArt-L/2': DiT_L_Pixelart_2,
+ 'DiT-PixArt-MV-XL/2': DiT_XL_Pixelart_MV_2,
+ # 'DiT-PixArt-MV-L/2': DiT_L_Pixelart_MV_2,
+ 'DiT-PixArt-MV-L/2': DiT_L_Pixelart_MV_2_noclip,
+ 'DiT-PixArt-MV-PCD-L': DiT_L_Pixelart_MV_pcd,
+ # raw xyz cond
+ 'DiT-PixArt-PCD-L': DiT_L_Pixelart_pcd,
+ 'DiT-PixArt-PCD-CLAY-XL': DiT_XL_Pixelart_clay_pcd,
+ 'DiT-PixArt-PCD-CLAY-L': DiT_L_Pixelart_clay_pcd,
+ 'DiT-PixArt-PCD-CLAY-B': DiT_B_Pixelart_clay_pcd,
+ 'DiT-PixArt-PCD-CLAY-stage2-B': DiT_B_Pixelart_clay_pcd_stage2,
+ 'DiT-PixArt-PCD-CLAY-stage2-L': DiT_L_Pixelart_clay_pcd_stage2,
+ 'DiT-PixArt-PCD-CLAY-TandI-L': DiT_L_Pixelart_clay_tandi_pcd,
+ 'DiT-PixArt-PCD-CLAY-TandI-B': DiT_B_Pixelart_clay_tandi_pcd,
+ 'DiT-PixArt-PCD-B': DiT_B_Pixelart_pcd,
+ # xyz-conditioned KL feature diffusion
+ 'DiT-PixArt-PCD-cond-diff-B': DiT_B_Pixelart_pcd_cond_diff,
+ 'DiT-PixArt-PCD-cond-diff-pe-B': DiT_B_Pixelart_pcd_cond_diff_pe,
+ 'DiT-PixArt-PCD-cond-diff-pe-L': DiT_L_Pixelart_pcd_cond_diff_pe,
+ 'DiT-PixArt-MV-B/2': DiT_B_Pixelart_MV_2,
+ 'DiT-PixArt-B/2': DiT_B_Pixelart_2,
+
+ # ! mv version following clay
+ 'DiT-PixArt-PCD-MV-L': DiT_L_Pixelart_clay_mv_pcd,
+ 'DiT-PixArt-PCD-MV-I23Dpt-L': DiT_L_Pixelart_clay_mv_i23dpt_pcd,
+ 'DiT-PixArt-PCD-MV-I23Dpt-L-noI23D': DiT_L_Pixelart_clay_mv_i23dpt_pcd_noi23d,
+ 'DiT-PixArt-PCD-MV-I23Dpt-L-stage2': DiT_L_Pixelart_clay_mv_i23dpt_pcd_stage2,
+}
diff --git a/dit/dit_models.py b/dit/dit_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..d296c973696c50eb6bdcfd01cdd05950b63b2879
--- /dev/null
+++ b/dit/dit_models.py
@@ -0,0 +1,702 @@
+# https://github.com/facebookresearch/DiT
+
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+# --------------------------------------------------------
+# References:
+# GLIDE: https://github.com/openai/glide-text2im
+# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py
+# --------------------------------------------------------
+
+import torch
+import torch.nn as nn
+import numpy as np
+import math
+# from timm.models.vision_transformer import PatchEmbed, Attention, Mlp
+from timm.models.vision_transformer import PatchEmbed, Mlp
+from einops import rearrange
+from pdb import set_trace as st
+
+# support flash attention and xformer acceleration
+from vit.vision_transformer import MemEffAttention as Attention
+
+# from torch.nn import LayerNorm
+# from xformers import triton
+# import xformers.triton
+# from xformers.triton import FusedLayerNorm as LayerNorm
+# from xformers.components.activations import build_activation, Activation
+# from xformers.components.feedforward import fused_mlp
+
+
+def modulate(x, shift, scale):
+ return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)
+
+
+#################################################################################
+# Embedding Layers for Timesteps and Class Labels #
+#################################################################################
+
+
+class TimestepEmbedder(nn.Module):
+ """
+ Embeds scalar timesteps into vector representations.
+ """
+
+ def __init__(self, hidden_size, frequency_embedding_size=256):
+ super().__init__()
+ self.mlp = nn.Sequential(
+ nn.Linear(frequency_embedding_size, hidden_size, bias=True),
+ nn.SiLU(),
+ nn.Linear(hidden_size, hidden_size, bias=True),
+ )
+ self.frequency_embedding_size = frequency_embedding_size
+
+ @staticmethod
+ def timestep_embedding(t, dim, max_period=10000):
+ """
+ Create sinusoidal timestep embeddings.
+ :param t: a 1-D Tensor of N indices, one per batch element.
+ These may be fractional.
+ :param dim: the dimension of the output.
+ :param max_period: controls the minimum frequency of the embeddings.
+ :return: an (N, D) Tensor of positional embeddings.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py
+ half = dim // 2
+ freqs = torch.exp(
+ -math.log(max_period) *
+ torch.arange(start=0, end=half, dtype=torch.float32) /
+ half).to(device=t.device)
+ args = t[:, None].float() * freqs[None]
+ embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
+ if dim % 2:
+ embedding = torch.cat(
+ [embedding, torch.zeros_like(embedding[:, :1])], dim=-1)
+ return embedding
+
+ def forward(self, t):
+ t_freq = self.timestep_embedding(t, self.frequency_embedding_size)
+ t_emb = self.mlp(t_freq)
+ return t_emb
+
+
+class LabelEmbedder(nn.Module):
+ """
+ Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance.
+ """
+
+ def __init__(self, num_classes, hidden_size, dropout_prob):
+ super().__init__()
+ use_cfg_embedding = dropout_prob > 0
+ self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding,
+ hidden_size)
+ self.num_classes = num_classes
+ self.dropout_prob = dropout_prob
+
+ def token_drop(self, labels, force_drop_ids=None):
+ """
+ Drops labels to enable classifier-free guidance.
+ """
+ if force_drop_ids is None:
+ drop_ids = torch.rand(labels.shape[0],
+ device=labels.device) < self.dropout_prob
+ else:
+ drop_ids = force_drop_ids == 1
+ labels = torch.where(drop_ids, self.num_classes, labels)
+ return labels
+
+ def forward(self, labels, train, force_drop_ids=None):
+ use_dropout = self.dropout_prob > 0
+ if (train and use_dropout) or (force_drop_ids is not None):
+ labels = self.token_drop(labels, force_drop_ids)
+ embeddings = self.embedding_table(labels)
+ return embeddings
+
+
+class ClipProjector(nn.Module):
+
+ def __init__(self, transformer_width, embed_dim, tx_width, *args,
+ **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ '''a CLIP text encoder projector, adapted from CLIP.encode_text
+ '''
+
+ self.text_projection = nn.Parameter(
+ torch.empty(transformer_width, embed_dim))
+ nn.init.normal_(self.text_projection, std=tx_width**-0.5)
+
+ def forward(self, clip_text_x):
+ return clip_text_x @ self.text_projection
+
+
+#################################################################################
+# Core DiT Model #
+#################################################################################
+
+# class DiTBlock(nn.Module):
+# """
+# A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.
+# """
+
+# def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs):
+# super().__init__()
+# nn.LayerNorm
+# self.norm1 = LayerNorm(
+# hidden_size,
+# affine=False,
+# # elementwise_affine=False,
+# eps=1e-6)
+# self.attn = Attention(hidden_size,
+# num_heads=num_heads,
+# qkv_bias=True,
+# **block_kwargs)
+# self.norm2 = LayerNorm(
+# hidden_size,
+# # elementwise_affine=False,
+# affine=False,
+# eps=1e-6)
+
+# mlp_hidden_dim = int(hidden_size * mlp_ratio)
+# approx_gelu = lambda: nn.GELU(approximate="tanh")
+
+# self.mlp = Mlp(in_features=hidden_size,
+# hidden_features=mlp_hidden_dim,
+# act_layer=approx_gelu,
+# drop=0)
+
+# # self.mlp = fused_mlp.FusedMLP(
+# # dim_model=hidden_size,
+# # dropout=0,
+# # activation=Activation.GeLU,
+# # hidden_layer_multiplier=mlp_ratio,
+# # )
+
+# self.adaLN_modulation = nn.Sequential(
+# nn.SiLU(), nn.Linear(hidden_size, 6 * hidden_size, bias=True))
+
+# def forward(self, x, c):
+# shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+# c).chunk(6, dim=1)
+# x = x + gate_msa.unsqueeze(1) * self.attn(
+# modulate(self.norm1(x), shift_msa, scale_msa))
+# x = x + gate_mlp.unsqueeze(1) * self.mlp(
+# modulate(self.norm2(x), shift_mlp, scale_mlp))
+# return x
+
+
+class DiTBlock(nn.Module):
+ """
+ A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.
+ """
+
+ def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs):
+ super().__init__()
+ self.norm1 = nn.LayerNorm(hidden_size,
+ elementwise_affine=False,
+ eps=1e-6)
+ self.attn = Attention(hidden_size,
+ num_heads=num_heads,
+ qkv_bias=True,
+ **block_kwargs)
+ self.norm2 = nn.LayerNorm(hidden_size,
+ elementwise_affine=False,
+ eps=1e-6)
+ mlp_hidden_dim = int(hidden_size * mlp_ratio)
+ approx_gelu = lambda: nn.GELU(approximate="tanh")
+ self.mlp = Mlp(in_features=hidden_size,
+ hidden_features=mlp_hidden_dim,
+ act_layer=approx_gelu,
+ drop=0)
+ self.adaLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 6 * hidden_size, bias=True))
+
+ def forward(self, x, c):
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ c).chunk(6, dim=1)
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ modulate(self.norm1(x), shift_msa, scale_msa))
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+ return x
+
+
+class DiTBlockRollOut(DiTBlock):
+ """
+ A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.
+ """
+
+ def __init__(self, hidden_size, num_heads, mlp_ratio=4, **block_kwargs):
+ super().__init__(hidden_size * 3, num_heads, mlp_ratio, **block_kwargs)
+
+ def forward(self, x, c):
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ c).chunk(6, dim=1)
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ modulate(self.norm1(x), shift_msa, scale_msa))
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+ return x
+
+
+class FinalLayer(nn.Module):
+ """
+ The final layer of DiT, basically the decoder_pred in MAE with adaLN.
+ """
+
+ def __init__(self, hidden_size, patch_size, out_channels):
+ super().__init__()
+ self.norm_final = nn.LayerNorm(
+ hidden_size,
+ # self.norm_final = LayerNorm(
+ hidden_size,
+ elementwise_affine=False,)
+ # affine=False,
+ # eps=1e-6)
+ self.linear = nn.Linear(hidden_size,
+ patch_size * patch_size * out_channels,
+ bias=True)
+ self.adaLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 2 * hidden_size, bias=True))
+
+ def forward(self, x, c):
+ shift, scale = self.adaLN_modulation(c).chunk(2, dim=1)
+ x = modulate(self.norm_final(x), shift, scale)
+ x = self.linear(x)
+ return x
+
+
+class DiT(nn.Module):
+ """
+ Diffusion model with a Transformer backbone.
+ """
+
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4.0,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ vit_blk=DiTBlock,
+ final_layer_blk=FinalLayer,
+ ):
+ super().__init__()
+ self.learn_sigma = learn_sigma
+ self.in_channels = in_channels
+ self.out_channels = in_channels * 2 if learn_sigma else in_channels
+ self.patch_size = patch_size
+ self.num_heads = num_heads
+ self.embed_dim = hidden_size
+
+ # st()
+ self.x_embedder = PatchEmbed(input_size,
+ patch_size,
+ in_channels,
+ hidden_size,
+ bias=True)
+ self.t_embedder = TimestepEmbedder(hidden_size)
+ if num_classes > 0:
+ self.y_embedder = LabelEmbedder(num_classes, hidden_size,
+ class_dropout_prob)
+ else:
+ self.y_embedder = None
+
+ if context_dim is not None:
+ self.clip_text_proj = ClipProjector(context_dim,
+ hidden_size,
+ tx_width=depth)
+ else:
+ self.clip_text_proj = None
+
+ self.roll_out = roll_out
+
+ num_patches = self.x_embedder.num_patches # 14*14*3
+ # Will use fixed sin-cos embedding:
+ self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size),
+ requires_grad=False)
+
+ # if not self.roll_out:
+ self.blocks = nn.ModuleList([
+ vit_blk(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ for _ in range(depth)
+ ])
+ # else:
+ # self.blocks = nn.ModuleList([
+ # DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) if idx % 2 == 0 else
+ # DiTBlockRollOut(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ # for idx in range(depth)
+ # ])
+
+ self.final_layer = final_layer_blk(hidden_size, patch_size,
+ self.out_channels)
+ self.initialize_weights()
+
+ self.mixed_prediction = mixed_prediction # This enables mixed prediction
+ if self.mixed_prediction:
+ if self.roll_out:
+ logit_ch = in_channels * 3
+ else:
+ logit_ch = in_channels
+ init = mixing_logit_init * torch.ones(
+ size=[1, logit_ch, 1, 1]) # hard coded for now
+ self.mixing_logit = torch.nn.Parameter(init, requires_grad=True)
+
+ def initialize_weights(self):
+ # Initialize transformer layers:
+ def _basic_init(module):
+ if isinstance(module, nn.Linear):
+ torch.nn.init.xavier_uniform_(module.weight)
+ if module.bias is not None:
+ nn.init.constant_(module.bias, 0)
+
+ self.apply(_basic_init)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ pos_embed = get_2d_sincos_pos_embed(
+ self.pos_embed.shape[-1], int(self.x_embedder.num_patches**0.5))
+ # st()
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ # Initialize patch_embed like nn.Linear (instead of nn.Conv2d):
+ w = self.x_embedder.proj.weight.data
+ nn.init.xavier_uniform_(w.view([w.shape[0], -1]))
+ nn.init.constant_(self.x_embedder.proj.bias, 0)
+
+ # Initialize label embedding table:
+ if self.y_embedder is not None:
+ nn.init.normal_(self.y_embedder.embedding_table.weight, std=0.02)
+
+ # Initialize timestep embedding MLP:
+ nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)
+ nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)
+
+ # Zero-out adaLN modulation layers in DiT blocks:
+ for block in self.blocks:
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
+
+ # Zero-out output layers:
+ nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0)
+ nn.init.constant_(self.final_layer.linear.weight, 0)
+ nn.init.constant_(self.final_layer.linear.bias, 0)
+
+ def unpatchify(self, x):
+ """
+ x: (N, T, patch_size**2 * C)
+ imgs: (N, H, W, C)
+ """
+ c = self.out_channels
+ # p = self.x_embedder.patch_size[0]
+ p = self.patch_size
+ h = w = int(x.shape[1]**0.5)
+ assert h * w == x.shape[1]
+
+ x = x.reshape(shape=(x.shape[0], h, w, p, p, c))
+ x = torch.einsum('nhwpqc->nchpwq', x)
+ imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p))
+ return imgs
+
+ # def forward(self, x, t, y=None, get_attr=''):
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+
+ if isinstance(context, dict):
+ context = context['crossattn'] # sgm conditioner compat
+
+ if get_attr != '': # not breaking the forward hooks
+ return getattr(self, get_attr)
+
+ t = self.t_embedder(timesteps) # (N, D)
+
+ st()
+ if self.roll_out: # !
+ x = rearrange(x, 'b (n c) h w->(b n) c h w', n=3)
+
+ x = self.x_embedder(
+ x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ if self.roll_out: # ! roll-out in the L dim, not B dim. add condition to all tokens.
+ x = rearrange(x, '(b n) l c ->b (n l) c', n=3)
+
+ if self.y_embedder is not None:
+ assert y is not None
+ y = self.y_embedder(y, self.training) # (N, D)
+ c = t + y # (N, D)
+
+ elif context is not None:
+ assert context.ndim == 2
+ context = self.clip_text_proj(context)
+
+ if context.shape[0] < t.shape[
+ 0]: # same caption context for different view input of the same ID
+ context = torch.repeat_interleave(context,
+ t.shape[0] //
+ context.shape[0],
+ dim=0)
+
+ # if context.ndim == 3: # compat version from SD
+ # context = context[:, 0, :]
+ c = t + context
+ else:
+ c = t # BS 1024
+
+ for blk_idx, block in enumerate(self.blocks):
+ # if self.roll_out:
+ # if blk_idx % 2 == 0: # with-in plane self attention
+ # x = rearrange(x, 'b (n l) c -> b l (n c) ', n=3)
+ # x = block(x, torch.repeat_interleave(c, 3, 0)) # (N, T, D)
+ # else: # global attention
+ # # x = rearrange(x, '(b n) l c -> b (n l) c ', n=3)
+ # x = rearrange(x, 'b l (n c) -> b (n l) c ', n=3)
+ # x = block(x, c) # (N, T, D)
+ # else:
+ x = block(x, c) # (N, T, D)
+
+ x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (n c) h w', n=3)
+ # x = rearrange(x, 'b n) c h w -> b (n c) h w', n=3)
+
+ return x
+
+ def forward_with_cfg(self, x, t, y, cfg_scale):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ half = x[:len(x) // 2]
+ combined = torch.cat([half, half], dim=0)
+ model_out = self.forward(combined, t, y)
+ # For exact reproducibility reasons, we apply classifier-free guidance on only
+ # three channels by default. The standard approach to cfg applies it to all channels.
+ # This can be done by uncommenting the following line and commenting-out the line following that.
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ eps, rest = model_out[:, :3], model_out[:, 3:]
+ cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ eps = torch.cat([half_eps, half_eps], dim=0)
+ return torch.cat([eps, rest], dim=1)
+
+ def forward_with_cfg_unconditional(self, x, t, y=None, cfg_scale=None):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ # half = x[:len(x) // 2]
+ # combined = torch.cat([half, half], dim=0)
+ combined = x
+ model_out = self.forward(combined, t, y)
+ # For exact reproducibility reasons, we apply classifier-free guidance on only
+ # three channels by default. The standard approach to cfg applies it to all channels.
+ # This can be done by uncommenting the following line and commenting-out the line following that.
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ # eps, rest = model_out[:, :3], model_out[:, 3:]
+ # cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ # half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ # eps = torch.cat([half_eps, half_eps], dim=0)
+ # return torch.cat([eps, rest], dim=1)
+ # st()
+ return model_out
+
+
+#################################################################################
+# Sine/Cosine Positional Embedding Functions #
+#################################################################################
+# https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py
+
+
+def get_2d_sincos_pos_embed(embed_dim,
+ grid_size,
+ cls_token=False,
+ extra_tokens=0):
+ """
+ grid_size: int of the grid height and width
+ return:
+ pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token)
+ """
+ if isinstance(grid_size, tuple):
+ grid_size_h, grid_size_w = grid_size
+ grid_h = np.arange(grid_size_h, dtype=np.float32)
+ grid_w = np.arange(grid_size_w, dtype=np.float32)
+ else:
+ grid_size_h = grid_size_w = grid_size
+ grid_h = np.arange(grid_size, dtype=np.float32)
+ grid_w = np.arange(grid_size, dtype=np.float32)
+
+ grid = np.meshgrid(grid_w, grid_h) # here w goes first
+ grid = np.stack(grid, axis=0)
+
+ grid = grid.reshape([2, 1, grid_size_h, grid_size_w])
+ pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid)
+ if cls_token and extra_tokens > 0:
+ pos_embed = np.concatenate(
+ [np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0)
+ return pos_embed
+
+
+def get_2d_sincos_pos_embed_from_grid(embed_dim, grid):
+ assert embed_dim % 2 == 0
+
+ # use half of dimensions to encode grid_h
+ emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2,
+ grid[0]) # (H*W, D/2)
+ emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2,
+ grid[1]) # (H*W, D/2)
+
+ emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D)
+ return emb
+
+
+def get_1d_sincos_pos_embed_from_grid(embed_dim, pos):
+ """
+ embed_dim: output dimension for each position
+ pos: a list of positions to be encoded: size (M,)
+ out: (M, D)
+ """
+ assert embed_dim % 2 == 0
+ omega = np.arange(embed_dim // 2, dtype=np.float64)
+ omega /= embed_dim / 2.
+ omega = 1. / 10000**omega # (D/2,)
+
+ pos = pos.reshape(-1) # (M,)
+ out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product
+
+ emb_sin = np.sin(out) # (M, D/2)
+ emb_cos = np.cos(out) # (M, D/2)
+
+ emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D)
+ return emb
+
+
+#################################################################################
+# DiT Configs #
+#################################################################################
+
+
+def DiT_XL_2(**kwargs):
+ return DiT(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_XL_4(**kwargs):
+ return DiT(depth=28,
+ hidden_size=1152,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_XL_8(**kwargs):
+ return DiT(depth=28,
+ hidden_size=1152,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_2(**kwargs):
+ return DiT(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_4(**kwargs):
+ return DiT(depth=24,
+ hidden_size=1024,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_8(**kwargs):
+ return DiT(depth=24,
+ hidden_size=1024,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_B_2(**kwargs):
+ return DiT(depth=12, hidden_size=768, patch_size=2, num_heads=12, **kwargs)
+
+
+def DiT_B_4(**kwargs):
+ return DiT(depth=12, hidden_size=768, patch_size=4, num_heads=12, **kwargs)
+
+
+def DiT_B_8(**kwargs):
+ return DiT(depth=12, hidden_size=768, patch_size=8, num_heads=12, **kwargs)
+
+
+def DiT_B_16(**kwargs): # ours cfg
+ return DiT(depth=12,
+ hidden_size=768,
+ patch_size=16,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT_S_2(**kwargs):
+ return DiT(depth=12, hidden_size=384, patch_size=2, num_heads=6, **kwargs)
+
+
+def DiT_S_4(**kwargs):
+ return DiT(depth=12, hidden_size=384, patch_size=4, num_heads=6, **kwargs)
+
+
+def DiT_S_8(**kwargs):
+ return DiT(depth=12, hidden_size=384, patch_size=8, num_heads=6, **kwargs)
+
+
+DiT_models = {
+ 'DiT-XL/2': DiT_XL_2,
+ 'DiT-XL/4': DiT_XL_4,
+ 'DiT-XL/8': DiT_XL_8,
+ 'DiT-L/2': DiT_L_2,
+ 'DiT-L/4': DiT_L_4,
+ 'DiT-L/8': DiT_L_8,
+ 'DiT-B/2': DiT_B_2,
+ 'DiT-B/4': DiT_B_4,
+ 'DiT-B/8': DiT_B_8,
+ 'DiT-B/16': DiT_B_16,
+ 'DiT-S/2': DiT_S_2,
+ 'DiT-S/4': DiT_S_4,
+ 'DiT-S/8': DiT_S_8,
+}
diff --git a/dit/dit_models_xformers.py b/dit/dit_models_xformers.py
new file mode 100644
index 0000000000000000000000000000000000000000..bfcc02d284c2fdc2d82627bbcc0c0ba63bfd78a1
--- /dev/null
+++ b/dit/dit_models_xformers.py
@@ -0,0 +1,1461 @@
+# https://github.com/facebookresearch/DiT
+
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+# --------------------------------------------------------
+# References:
+# GLIDE: https://github.com/openai/glide-text2im
+# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py
+# --------------------------------------------------------
+
+import torch
+import torch.nn as nn
+import numpy as np
+import math
+# from timm.models.vision_transformer import PatchEmbed, Attention, Mlp
+from timm.models.vision_transformer import PatchEmbed, Mlp
+from einops import rearrange
+from pdb import set_trace as st
+
+# support flash attention and xformer acceleration
+from ldm.modules.attention import CrossAttention
+from vit.vision_transformer import MemEffAttention as Attention
+# import apex
+# from apex.normalization import FusedRMSNorm as RMSNorm
+
+from torch.nn import LayerNorm
+
+try:
+ from apex.normalization import FusedRMSNorm as RMSNorm
+except:
+ from dit.norm import RMSNorm
+
+# from torch.nn import LayerNorm
+# from xformers import triton
+# import xformers.triton
+
+if torch.cuda.is_available():
+ # from xformers.triton import FusedLayerNorm as LayerNorm # compat issue
+ from xformers.components.activations import build_activation, Activation
+ from xformers.components.feedforward import fused_mlp
+ # from xformers.components.feedforward import mlp
+
+from ldm.modules.attention import MemoryEfficientCrossAttention, JointMemoryEfficientCrossAttention
+
+
+def modulate(x, shift, scale):
+ return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)
+
+
+def t2i_modulate(x, shift, scale): # for pix-art arch
+ return x * (1 + scale) + shift
+
+
+#################################################################################
+# Embedding Layers for Timesteps and Class Labels #
+#################################################################################
+
+
+class T2IFinalLayer(nn.Module):
+ """
+ The final layer of PixArt.
+ """
+ # from torch.nn import LayerNorm
+
+ def __init__(self, hidden_size, patch_size, out_channels):
+ super().__init__()
+ # self.norm_final = LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6)
+ self.norm_final = LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6)
+ self.linear = nn.Linear(hidden_size,
+ patch_size * patch_size * out_channels,
+ bias=True)
+ self.scale_shift_table = nn.Parameter(
+ torch.randn(2, hidden_size) / hidden_size**0.5)
+ self.adaLN_modulation = None
+ self.out_channels = out_channels
+
+ def forward(self, x, t):
+ shift, scale = (self.scale_shift_table[None] + t[:, None]).chunk(2,
+ dim=1)
+ x = t2i_modulate(self.norm_final(x), shift, scale)
+ x = self.linear(x)
+ return x
+
+
+class TimestepEmbedder(nn.Module):
+ """
+ Embeds scalar timesteps into vector representations.
+ """
+
+ def __init__(self, hidden_size, frequency_embedding_size=256):
+ super().__init__()
+ self.mlp = nn.Sequential(
+ nn.Linear(frequency_embedding_size, hidden_size, bias=True),
+ nn.SiLU(),
+ nn.Linear(hidden_size, hidden_size, bias=True),
+ )
+ self.frequency_embedding_size = frequency_embedding_size
+
+ @staticmethod
+ def timestep_embedding(t, dim, max_period=10000):
+ """
+ Create sinusoidal timestep embeddings.
+ :param t: a 1-D Tensor of N indices, one per batch element.
+ These may be fractional.
+ :param dim: the dimension of the output.
+ :param max_period: controls the minimum frequency of the embeddings.
+ :return: an (N, D) Tensor of positional embeddings.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py
+ half = dim // 2
+ freqs = torch.exp(
+ -math.log(max_period) *
+ torch.arange(start=0, end=half, dtype=torch.float32) /
+ half).to(device=t.device)
+ args = t[:, None].float() * freqs[None]
+ embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
+ if dim % 2:
+ embedding = torch.cat(
+ [embedding, torch.zeros_like(embedding[:, :1])], dim=-1)
+ return embedding
+
+ def forward(self, t):
+ t_freq = self.timestep_embedding(t, self.frequency_embedding_size)
+ t_emb = self.mlp(t_freq)
+ return t_emb
+
+
+class LabelEmbedder(nn.Module):
+ """
+ Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance.
+ """
+
+ def __init__(self, num_classes, hidden_size, dropout_prob):
+ super().__init__()
+ use_cfg_embedding = dropout_prob > 0
+ self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding,
+ hidden_size)
+ self.num_classes = num_classes
+ self.dropout_prob = dropout_prob
+
+ def token_drop(self, labels, force_drop_ids=None):
+ """
+ Drops labels to enable classifier-free guidance.
+ """
+ if force_drop_ids is None:
+ drop_ids = torch.rand(labels.shape[0],
+ device=labels.device) < self.dropout_prob
+ else:
+ drop_ids = force_drop_ids == 1
+ labels = torch.where(drop_ids, self.num_classes, labels)
+ return labels
+
+ def forward(self, labels, train, force_drop_ids=None):
+ use_dropout = self.dropout_prob > 0
+ if (train and use_dropout) or (force_drop_ids is not None):
+ labels = self.token_drop(labels, force_drop_ids)
+ embeddings = self.embedding_table(labels)
+ return embeddings
+
+
+class ClipProjector(nn.Module):
+
+ def __init__(self, transformer_width, embed_dim, tx_width, *args,
+ **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ '''a CLIP text encoder projector, adapted from CLIP.encode_text
+ '''
+
+ self.text_projection = nn.Parameter(
+ torch.empty(transformer_width, embed_dim))
+ nn.init.normal_(self.text_projection, std=tx_width**-0.5)
+
+ def forward(self, clip_text_x):
+ return clip_text_x @ self.text_projection
+
+
+def approx_gelu():
+ return nn.GELU(approximate="tanh")
+
+
+class CaptionEmbedder(nn.Module):
+ """
+ copied from https://github.com/hpcaitech/Open-Sora
+
+ Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance.
+ """
+
+ def __init__(self,
+ in_channels,
+ hidden_size,
+ act_layer=nn.GELU(approximate="tanh"),
+ token_num=120):
+ super().__init__()
+
+ self.y_proj = Mlp(in_features=in_channels,
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=act_layer,
+ drop=0)
+ # self.register_buffer("y_embedding", nn.Parameter(torch.randn(token_num, in_channels) / in_channels**0.5))
+ # self.uncond_prob = uncond_prob
+
+ # def token_drop(self, caption, force_drop_ids=None):
+ # """
+ # Drops labels to enable classifier-free guidance.
+ # """
+ # if force_drop_ids is None:
+ # drop_ids = torch.rand(caption.shape[0]).cuda() < self.uncond_prob
+ # else:
+ # drop_ids = force_drop_ids == 1
+ # caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption)
+ # return caption
+
+ def forward(self, caption, **kwargs):
+ # if train:
+ # assert caption.shape[2:] == self.y_embedding.shape
+ # use_dropout = self.uncond_prob > 0
+ # if (train and use_dropout) or (force_drop_ids is not None):
+ # caption = self.token_drop(caption, force_drop_ids)
+ caption = self.y_proj(caption)
+ return caption
+
+
+#################################################################################
+# Core DiT Model #
+#################################################################################
+
+
+class DiTBlock(nn.Module):
+ """
+ A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.
+ """
+
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ mlp_ratio=4.0,
+ context_dim=None,
+ # enable_rmsnorm=False,
+ norm_type='layernorm',
+ qk_norm=True,
+ # enable_rope=False,
+ **block_kwargs):
+ super().__init__()
+ # st()
+ assert qk_norm
+ # nn.LayerNorm
+ if norm_type == 'layernorm':
+ self.norm1 = LayerNorm(
+ hidden_size,
+ # affine=False,
+ elementwise_affine=False,
+ eps=1e-6)
+ self.norm2 = LayerNorm(
+ hidden_size,
+ # affine=False,
+ elementwise_affine=False,
+ eps=1e-6)
+ else:
+ assert norm_type == 'rmsnorm' # more robust to bf16 training.
+ self.norm1 = RMSNorm(hidden_size, eps=1e-5)
+ self.norm2 = RMSNorm(hidden_size, eps=1e-5)
+
+ # st()
+ self.attn = Attention(hidden_size,
+ num_heads=num_heads,
+ qkv_bias=True,
+ qk_norm=qk_norm,
+ **block_kwargs)
+ # mlp_hidden_dim = int(hidden_size * mlp_ratio)
+ # approx_gelu = lambda: nn.GELU(approximate="tanh")
+
+ # self.mlp = Mlp(in_features=hidden_size,
+ # hidden_features=mlp_hidden_dim,
+ # act_layer=approx_gelu,
+ # drop=0)
+
+ self.mlp = fused_mlp.FusedMLP(
+ dim_model=hidden_size,
+ dropout=0,
+ activation=Activation.GeLU,
+ hidden_layer_multiplier=int(mlp_ratio),
+ )
+
+ self.adaLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 6 * hidden_size, bias=True))
+
+ def forward(self, x, c):
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ c).chunk(6, dim=1)
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ modulate(self.norm1(x), shift_msa, scale_msa))
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+ return x
+
+
+class TextCondDiTBlock(DiTBlock):
+ # https://github.com/hpcaitech/Open-Sora/blob/68b8f60ff0ff4b3a3b63fe1d8cb17d66b7845ef7/opensora/models/stdit/stdit.py#L69
+ def __init__(self, hidden_size, num_heads, mlp_ratio=4, **block_kwargs):
+ super().__init__(hidden_size, num_heads, mlp_ratio, **block_kwargs)
+ self.cross_attn = MemoryEfficientCrossAttention(query_dim=hidden_size,
+ heads=num_heads)
+ # self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size**0.5)
+
+ def forward(self, x, t, context):
+ # B, N, C = x.shape
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # self.scale_shift_table[None] + t.reshape(B,6,-1)).chunk(6, dim=1)
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ t).chunk(6, dim=1)
+
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ modulate(self.norm1(x), shift_msa, scale_msa))
+
+ # add text embedder via pre-norm cross attention
+ x = x + self.cross_attn(x, context)
+
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+class PixelArtTextCondDiTBlock(DiTBlock):
+ # 1. add shared AdaLN
+ # 2. add return pooled vector token (in the outer loop already)
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ mlp_ratio=4,
+ context_dim=None,
+ qk_norm=True,
+ **block_kwargs):
+ super().__init__(hidden_size,
+ num_heads,
+ mlp_ratio,
+ norm_type='rmsnorm',
+ qk_norm=qk_norm,
+ **block_kwargs)
+ # super().__init__(hidden_size, num_heads, mlp_ratio, norm_type='layernorm', **block_kwargs)
+ self.cross_attn = MemoryEfficientCrossAttention(
+ query_dim=hidden_size, context_dim=context_dim, heads=num_heads, qk_norm=qk_norm) # ! force QK_Norm
+ self.scale_shift_table = nn.Parameter(
+ torch.randn(6, hidden_size) / hidden_size**0.5)
+ self.adaLN_modulation = None
+ self.attention_y_norm = RMSNorm(
+ context_dim, eps=1e-5
+ ) # https://github.com/Alpha-VLLM/Lumina-T2X/blob/0c8dd6a07a3b7c18da3d91f37b1e00e7ae661293/lumina_t2i/models/model.py#L570C9-L570C61
+
+ self.prenorm_ca_text = RMSNorm(hidden_size, eps=1e-5)
+
+ def forward(self, x, t, context):
+ B, N, C = x.shape
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # self.scale_shift_table[None] + t.reshape(B,6,-1)).chunk(6, dim=1)
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # t).chunk(6, dim=1)
+
+ x = x + gate_msa * self.attn(
+ t2i_modulate(self.norm1(x), shift_msa, scale_msa))
+
+ # add text embedder via cross attention
+ x = x + self.cross_attn(self.prenorm_ca_text(x), self.attention_y_norm(context))
+
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+class MMTextCondDiTBlock(DiTBlock):
+ # follow SD-3
+ def __init__(self, hidden_size, num_heads, mlp_ratio=4, **block_kwargs):
+ super().__init__(hidden_size, num_heads, mlp_ratio, **block_kwargs)
+ self.cross_attn = MemoryEfficientCrossAttention(query_dim=hidden_size,
+ heads=num_heads)
+ # self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size**0.5)
+
+ self.adaLN_modulation_img = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 6 * hidden_size, bias=True))
+
+ self.mlp_img = fused_mlp.FusedMLP(
+ dim_model=hidden_size,
+ dropout=0,
+ activation=Activation.GeLU,
+ hidden_layer_multiplier=int(mlp_ratio),
+ )
+
+ def forward(self, x, t, context):
+ # B, N, C = x.shape
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # self.scale_shift_table[None] + t.reshape(B,6,-1)).chunk(6, dim=1)
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ t).chunk(6, dim=1)
+
+ # TODO, batch inference with above
+ shift_msa_img, scale_msa_img, gate_msa_img, shift_mlp_img, scale_mlp_img, gate_mlp_img = self.adaLN_modulation_img(
+ t).chunk(6, dim=1)
+
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ modulate(self.norm1(x), shift_msa, scale_msa))
+
+ # add text embedder via cross attention
+ x = x + self.cross_attn(x, context)
+
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+# for image condition
+
+
+class ImageCondDiTBlock(DiTBlock):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ # enable_rmsnorm=False,
+ qk_norm=True,
+ enable_rope=False,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm,
+ enable_rope=enable_rope,
+ **block_kwargs)
+ assert qk_norm
+ self.cross_attn = MemoryEfficientCrossAttention(
+ query_dim=hidden_size,
+ context_dim=context_dim, # ! mv-cond
+ # context_dim=1280, # clip vit-G, adopted by SVD.
+ # context_dim=1024, # clip vit-L
+ heads=num_heads,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm,
+ enable_rope=enable_rope)
+ assert qk_norm
+ # self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size**0.5)
+
+ self.attention_y_norm = RMSNorm(
+ 1024, eps=1e-5
+ ) # https://github.com/Alpha-VLLM/Lumina-T2X/blob/0c8dd6a07a3b7c18da3d91f37b1e00e7ae661293/lumina_t2i/models/model.py#L570C9-L570C61
+
+ def forward(self, x, t, dino_spatial_token, clip_spatial_token):
+ # B, N, C = x.shape
+ # assert isinstance(context, dict)
+ # assert isinstance(context, dict) # clip + dino.
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # self.scale_shift_table[None] + t.reshape(B,6,-1)).chunk(6, dim=1)
+
+ # TODO t is t + [clip_cls] here. update in base class.
+
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ t).chunk(6, dim=1)
+
+ post_modulate_selfattn_feat = torch.cat([
+ modulate(self.norm1(x), shift_msa, scale_msa), dino_spatial_token
+ ],
+ dim=1) # concat in L dim
+
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ post_modulate_selfattn_feat
+ )[:, :x.shape[1]] # remove dino-feat to maintain unchanged dimension.
+
+ # add clip_i spatial embedder via cross attention
+ x = x + self.cross_attn(x, self.attention_y_norm(clip_spatial_token))
+
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+class ImageCondDiTBlockPixelArt(ImageCondDiTBlock):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ # enable_rmsnorm=False,
+ qk_norm=False,
+ norm_type='rmsnorm',
+ **block_kwargs):
+ # super().__init__(hidden_size, num_heads, mlp_ratio, enable_rmsnorm=True, **block_kwargs)
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ qk_norm=True, # otherwise AMP fail
+ norm_type=norm_type,
+ **block_kwargs)
+ self.scale_shift_table = nn.Parameter(
+ torch.randn(6, hidden_size) / hidden_size**0.5)
+ self.adaLN_modulation = None # single-adaLN
+ # self.attention_y_norm = RMSNorm(
+ # 1024, eps=1e-5
+ # ) # https://github.com/Alpha-VLLM/Lumina-T2X/blob/0c8dd6a07a3b7c18da3d91f37b1e00e7ae661293/lumina_t2i/models/model.py#L570C9-L570C61
+
+ def forward(self, x, t, dino_spatial_token, clip_spatial_token):
+ B, N, C = x.shape
+ # assert isinstance(context, dict)
+ # assert isinstance(context, dict) # clip + dino.
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # self.scale_shift_table[None] + t.reshape(B,6,-1)).chunk(6, dim=1)
+
+ # TODO t is t + [clip_cls] here. update in base class.
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # t).chunk(6, dim=1)
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+ # st()
+
+ post_modulate_selfattn_feat = torch.cat([
+ t2i_modulate(self.norm1(x), shift_msa, scale_msa),
+ dino_spatial_token
+ ],
+ dim=1) # concat in L dim
+
+ # x = x + gate_msa.unsqueeze(1) * self.attn(
+ x = x + gate_msa * self.attn(post_modulate_selfattn_feat)[:, :x.shape[
+ 1]] # remove dino-feat to maintain unchanged dimension.
+
+ # add clip_i spatial embedder via cross attention
+ x = x + self.cross_attn(x, clip_spatial_token) # attention_y_norm not required, since x_norm_patchtokens?
+ # x = x + self.cross_attn(x, self.attention_y_norm(clip_spatial_token)) # attention_y_norm not required, since x_norm_patchtokens?
+
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+class ImageCondDiTBlockPixelArtNoclip(ImageCondDiTBlock):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ # enable_rmsnorm=False,
+ qk_norm=False,
+ **block_kwargs):
+ # super().__init__(hidden_size, num_heads, mlp_ratio, enable_rmsnorm=True, **block_kwargs)
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ qk_norm=True, # otherwise AMP fail
+ **block_kwargs)
+ self.scale_shift_table = nn.Parameter(
+ torch.randn(6, hidden_size) / hidden_size**0.5)
+ self.adaLN_modulation = None # single-adaLN
+ # self.attention_y_norm = RMSNorm(
+ # 1024, eps=1e-5
+ # ) # https://github.com/Alpha-VLLM/Lumina-T2X/blob/0c8dd6a07a3b7c18da3d91f37b1e00e7ae661293/lumina_t2i/models/model.py#L570C9-L570C61
+
+ # def _forward_cross_attention(self, x, tokens):
+ # x = x + self.cross_attn(x, tokens) # attention_y_norm not required, since x_norm_patchtokens?
+ # return x
+
+ # def forward(self, x, t, dino_spatial_token, clip_spatial_token):
+ def forward(self, x, t, dino_spatial_token):
+ B, N, C = x.shape
+ # assert isinstance(context, dict)
+ # assert isinstance(context, dict) # clip + dino.
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # self.scale_shift_table[None] + t.reshape(B,6,-1)).chunk(6, dim=1)
+
+ # TODO t is t + [clip_cls] here. update in base class.
+
+ # shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ # t).chunk(6, dim=1)
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+ # st()
+
+ post_modulate_selfattn_feat = t2i_modulate(self.norm1(x), shift_msa, scale_msa)
+
+ # t2i_modulate(self.norm1(x), shift_msa, scale_msa),
+ # dino_spatial_token
+ # ],
+ # dim=1) # concat in L dim
+
+ # x = x + gate_msa.unsqueeze(1) * self.attn(
+ x = x + gate_msa * self.attn(post_modulate_selfattn_feat) # remove dino-feat to maintain unchanged dimension.
+
+ # add clip_i spatial embedder via cross attention
+ x = x + self.cross_attn(x, dino_spatial_token) # attention_y_norm not required, since x_norm_patchtokens?
+ # x = self._forward_cross_attention(x, dino_spatial_token)
+ # x = x + self.cross_attn(x, self.attention_y_norm(clip_spatial_token)) # attention_y_norm not required, since x_norm_patchtokens?
+
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+class ImageCondDiTBlockPixelArtRMSNorm(ImageCondDiTBlockPixelArt):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ # enable_rmsnorm=False,
+ norm_type='rmsnorm',
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ norm_type=norm_type,
+ **block_kwargs)
+
+
+class ImageCondDiTBlockPixelArtRMSNormNoClip(ImageCondDiTBlockPixelArtNoclip):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ enable_rmsnorm=False,
+ norm_type='rmsnorm',
+ **block_kwargs)
+
+# support more conditions
+class ImageCondDiTBlockPixelArtRMSNormClay(ImageCondDiTBlockPixelArtRMSNorm):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ qk_norm=True,
+ norm_type='rmsnorm',
+ enable_text_ca=False,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ norm_type=norm_type,
+ qk_norm=qk_norm,
+ **block_kwargs)
+
+ # ca for text
+ self.cross_attn_dino = MemoryEfficientCrossAttention(
+ query_dim=hidden_size,
+ context_dim=context_dim,
+ heads=num_heads,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm,
+ dim_head=hidden_size//num_heads)
+ # if enable_text_ca:
+ # self.cross_attn_text = MemoryEfficientCrossAttention(
+ # query_dim=hidden_size,
+ # context_dim=context_dim,
+ # heads=num_heads,
+ # # enable_rmsnorm=enable_rmsnorm,
+ # qk_norm=qk_norm)
+ # else:
+ self.cross_attn_text = lambda x, context: torch.zeros_like(x) # always return 0
+
+ # def _forward_cross_attention(self, x, tokens):
+ # x = x + self.cross_attn(x, tokens) # attention_y_norm not required, since x_norm_patchtokens?
+ # return x
+
+ def forward(self, x, t, dino_spatial_token, clip_spatial_token, clip_caption_token):
+ B, N, C = x.shape
+
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+
+ post_modulate_selfattn_feat = t2i_modulate(self.norm1(x), shift_msa, scale_msa)
+
+ x = x + gate_msa * self.attn(post_modulate_selfattn_feat) # remove dino-feat to maintain unchanged dimension.
+
+ # can be accelerated by parallel attention
+ x = x + self.cross_attn(x, clip_spatial_token) + self.cross_attn_dino(x, dino_spatial_token) + self.cross_attn_text(x, clip_caption_token)
+
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+# lrm-like, CA-SA-FFN
+class ImageCondDiTBlockPixelArtRMSNormClayLRM(ImageCondDiTBlockPixelArtRMSNorm):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ qk_norm=True,
+ norm_type='rmsnorm',
+ enable_text_ca=False,
+ enable_rope=False,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ norm_type=norm_type,
+ qk_norm=qk_norm,
+ enable_rope=enable_rope,
+ **block_kwargs)
+
+ # ca for text
+ self.cross_attn_dino = MemoryEfficientCrossAttention(
+ query_dim=hidden_size,
+ context_dim=context_dim,
+ heads=num_heads,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm,
+ dim_head=hidden_size//num_heads,
+ enable_rope=enable_rope)
+ self.prenorm_ca_dino = RMSNorm(hidden_size, eps=1e-5)
+ # if enable_text_ca:
+ # self.cross_attn_text = MemoryEfficientCrossAttention(
+ # query_dim=hidden_size,
+ # context_dim=context_dim,
+ # heads=num_heads,
+ # # enable_rmsnorm=enable_rmsnorm,
+ # qk_norm=qk_norm)
+ # else:
+ self.cross_attn_text = lambda x, context, freqs_cis: torch.zeros_like(x) # always return 0
+ self.prenorm_ca_text = nn.Identity()
+ del self.cross_attn # not for text now
+
+ # def _forward_cross_attention(self, x, tokens):
+ # x = x + self.cross_attn(x, tokens) # attention_y_norm not required, since x_norm_patchtokens?
+ # return x
+
+ def forward(self, x, t, dino_spatial_token, clip_caption_token=None, freqs_cis=None, **kwargs):
+ B, N, C = x.shape
+
+ # ! prepare adaLN
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+
+ # post_modulate_selfattn_feat = t2i_modulate(self.norm1(x), shift_msa, scale_msa)
+
+ # ! CA for spaital info (img, mv-img .etc.)
+ x = x + self.cross_attn_dino(self.prenorm_ca_dino(x), dino_spatial_token, freqs_cis=freqs_cis) # ! pre-norm & residual, as in CLAY
+
+ # ! SA
+ x = x + gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa), freqs_cis=freqs_cis) # remove dino-feat to maintain unchanged dimension.
+
+ # # ! CA for text
+ x = x + self.cross_attn_text(self.prenorm_ca_text(x), clip_caption_token, freqs_cis=freqs_cis)
+
+ # ! FFN
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+# support more conditions
+class ImageCondDiTBlockPixelArtRMSNormClayText(PixelArtTextCondDiTBlock):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ qk_norm=True,
+ # norm_type='rmsnorm',
+ enable_text_ca=False,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ # norm_type=norm_type,
+ qk_norm=qk_norm,
+ **block_kwargs)
+
+ # dino_hidden_size = 768
+ dino_hidden_size = 384 # a dit-s as in clay
+ self.cross_attn_dino = MemoryEfficientCrossAttention(
+ query_dim=hidden_size,
+ context_dim=dino_hidden_size,
+ heads=num_heads,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm)
+ self.prenorm_ca_dino = RMSNorm(hidden_size, eps=1e-5)
+
+
+ def forward(self, x, t, dino_spatial_token, clip_caption_token=None, **kwargs):
+ B, N, C = x.shape
+
+ # ! prepare adaLN
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+
+ # post_modulate_selfattn_feat = t2i_modulate(self.norm1(x), shift_msa, scale_msa)
+
+ # ! SA
+ x = x + gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)) # remove dino-feat to maintain unchanged dimension.
+
+ # # ! CA for text
+ # x = x + self.cross_attn(self.prenorm_ca_text(x), self.attention_y_norm(clip_caption_token))
+
+ # ! add more conditions through residuals.
+ x = x + self.cross_attn(self.prenorm_ca_text(x), self.attention_y_norm(clip_caption_token)) + self.cross_attn_dino(self.prenorm_ca_dino(x), dino_spatial_token)
+
+ # # ! CA for spaital info (img, mv-img .etc.)
+ # x = x + self.cross_attn_dino(self.prenorm_ca_dino(x), dino_spatial_token) # ! pre-norm & residual, as in CLAY
+
+ # ! FFN
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+# support more conditions
+class ImageCondDiTBlockPixelArtRMSNormClayMV(ImageCondDiTBlockPixelArtRMSNormClayLRM):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ qk_norm=True,
+ # norm_type='rmsnorm',
+ enable_text_ca=False,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ # norm_type=norm_type,
+ qk_norm=qk_norm,
+ **block_kwargs)
+
+ # dino_hidden_size = 768
+ dino_hidden_size = 384 # a dit-s as in clay
+ self.cross_attn_dino_mv = MemoryEfficientCrossAttention(
+ query_dim=hidden_size,
+ context_dim=dino_hidden_size,
+ heads=num_heads,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm)
+ self.prenorm_ca_dino_mv = RMSNorm(hidden_size, eps=1e-5)
+ # self.mv_alpha = nn.Parameter(torch.zeros(1)-2.5) # per-layer blending into the original parameters, like zero-conv.
+ # self.mv_alpha = nn.Parameter(torch.zeros(1)+2.5) # per-layer blending into the original parameters, like zero-conv.
+ self.mv_alpha = nn.Parameter(torch.zeros(1)) # per-layer blending into the original parameters, like zero-conv.
+
+
+ def forward(self, x, t, dino_spatial_token, dino_mv_spatial_token=None, **kwargs):
+ B, N, C = x.shape
+
+ # ! prepare adaLN
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+
+ # post_modulate_selfattn_feat = t2i_modulate(self.norm1(x), shift_msa, scale_msa)
+
+ # ! CA for spaital info (img, mv-img .etc.)
+ x = x + self.cross_attn_dino(self.prenorm_ca_dino(x), dino_spatial_token) + self.cross_attn_dino_mv(self.prenorm_ca_dino_mv(x), dino_mv_spatial_token) * (torch.nn.functional.tanh(self.mv_alpha))
+
+ # ! SA
+ x = x + gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)) # remove dino-feat to maintain unchanged dimension.
+
+ # # ! CA for text
+ # x = x + self.cross_attn_text(self.prenorm_ca_text(x), clip_caption_token)
+
+ # ! FFN
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+# no i23d ca
+class ImageCondDiTBlockPixelArtRMSNormClayMV_noi23d(ImageCondDiTBlockPixelArtRMSNormClayLRM):
+ # follow EMU and SVD, concat + cross attention. Also adopted by concurrent work Direct3D.
+ def __init__(self,
+ hidden_size,
+ num_heads,
+ context_dim,
+ mlp_ratio=4,
+ qk_norm=True,
+ # norm_type='rmsnorm',
+ enable_text_ca=False,
+ **block_kwargs):
+ super().__init__(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ # enable_rmsnorm=False,
+ # norm_type=norm_type,
+ qk_norm=qk_norm,
+ **block_kwargs)
+
+ # del self.cross_attn_dino, self.prenorm_ca_dino
+ del self.cross_attn_dino
+ self.prenorm_ca_dino_mv = self.prenorm_ca_dino
+ # dino_hidden_size = 768
+ dino_hidden_size = 384 # a dit-s as in clay
+ self.cross_attn_dino_mv = MemoryEfficientCrossAttention(
+ query_dim=hidden_size,
+ context_dim=dino_hidden_size,
+ heads=num_heads,
+ # enable_rmsnorm=enable_rmsnorm,
+ qk_norm=qk_norm)
+ # self.prenorm_ca_dino_mv = RMSNorm(hidden_size, eps=1e-5)
+ # self.mv_alpha = nn.Parameter(torch.zeros(1)-2.5) # per-layer blending into the original parameters, like zero-conv.
+ # self.mv_alpha = nn.Parameter(torch.zeros(1)+2.5) # per-layer blending into the original parameters, like zero-conv.
+ # self.mv_alpha = nn.Parameter(torch.zeros(1)) # per-layer blending into the original parameters, like zero-conv.
+
+
+ # def forward(self, x, t, dino_spatial_token, dino_mv_spatial_token=None, **kwargs):
+ def forward(self, x, t, dino_mv_spatial_token, **kwargs):
+ B, N, C = x.shape
+
+ # ! prepare adaLN
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (
+ self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1)
+
+ # post_modulate_selfattn_feat = t2i_modulate(self.norm1(x), shift_msa, scale_msa)
+
+ # ! CA for spaital info (img, mv-img .etc.)
+ # x = x + self.cross_attn_dino(self.prenorm_ca_dino(x), dino_spatial_token) + self.cross_attn_dino_mv(self.prenorm_ca_dino_mv(x), dino_mv_spatial_token) * (torch.nn.functional.tanh(self.mv_alpha))
+ x = x + self.cross_attn_dino_mv(self.prenorm_ca_dino_mv(x), dino_mv_spatial_token)
+
+ # ! SA
+ x = x + gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)) # remove dino-feat to maintain unchanged dimension.
+
+ # # ! CA for text
+ # x = x + self.cross_attn_text(self.prenorm_ca_text(x), clip_caption_token)
+
+ # ! FFN
+ x = x + gate_mlp * self.mlp(
+ t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))
+
+ return x
+
+
+
+
+class DiTBlockRollOut(DiTBlock):
+ """
+ A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.
+ """
+
+ def __init__(self, hidden_size, num_heads, mlp_ratio=4, **block_kwargs):
+ super().__init__(hidden_size * 3, num_heads, mlp_ratio, **block_kwargs)
+
+ def forward(self, x, c):
+ shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(
+ c).chunk(6, dim=1)
+ x = x + gate_msa.unsqueeze(1) * self.attn(
+ modulate(self.norm1(x), shift_msa, scale_msa))
+ x = x + gate_mlp.unsqueeze(1) * self.mlp(
+ modulate(self.norm2(x), shift_mlp, scale_mlp))
+ return x
+#
+
+class FinalLayer(nn.Module):
+ """
+ The final layer of DiT, basically the decoder_pred in MAE with adaLN.
+ """
+
+ def __init__(self, hidden_size, patch_size, out_channels):
+ super().__init__()
+ # self.norm_final = nn.LayerNorm(hidden_size,
+ self.norm_final = LayerNorm(
+ hidden_size,
+ elementwise_affine=False,# apex or nn kernel
+ # affine=False,
+ eps=1e-6)
+ self.linear = nn.Linear(hidden_size,
+ patch_size * patch_size * out_channels,
+ bias=True)
+ self.adaLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 2 * hidden_size, bias=True))
+
+ def forward(self, x, c):
+ shift, scale = self.adaLN_modulation(c).chunk(2, dim=1)
+ x = modulate(self.norm_final(x), shift, scale)
+ x = self.linear(x)
+ return x
+
+
+class DiT(nn.Module):
+ """
+ Diffusion model with a Transformer backbone.
+ """
+
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4.0,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ vit_blk=DiTBlock,
+ # vit_blk=TextCondDiTBlock,
+ final_layer_blk=FinalLayer,
+ enable_rope=False,
+ ):
+ super().__init__()
+ self.plane_n = 3
+ # st()
+
+ self.depth = depth
+ self.mlp_ratio = mlp_ratio
+ self.learn_sigma = learn_sigma
+ self.in_channels = in_channels
+ self.out_channels = in_channels * 2 if learn_sigma else in_channels
+ self.patch_size = patch_size
+ self.num_heads = num_heads
+ self.embed_dim = hidden_size
+
+ self.x_embedder = PatchEmbed(input_size,
+ patch_size,
+ in_channels,
+ hidden_size,
+ bias=True)
+ self.t_embedder = TimestepEmbedder(hidden_size)
+ if num_classes > 0:
+ self.y_embedder = LabelEmbedder(num_classes, hidden_size,
+ class_dropout_prob)
+ else:
+ self.y_embedder = None
+
+ if context_dim is not None:
+ self.clip_text_proj = CaptionEmbedder(context_dim,
+ hidden_size,
+ act_layer=approx_gelu)
+
+ else:
+ self.clip_text_proj = None
+
+ self.roll_out = roll_out
+
+ num_patches = self.x_embedder.num_patches # 14*14*3
+ # Will use fixed sin-cos embedding:
+ self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size),
+ requires_grad=False)
+
+ # if not self.roll_out:
+ self.blocks = nn.ModuleList([
+ vit_blk(hidden_size=hidden_size,
+ num_heads=num_heads,
+ mlp_ratio=mlp_ratio,
+ context_dim=context_dim,
+ enable_rope=enable_rope) for _ in range(depth)
+ ])
+ # else:
+ # self.blocks = nn.ModuleList([
+ # DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) if idx % 2 == 0 else
+ # DiTBlockRollOut(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ # for idx in range(depth)
+ # ])
+
+ self.final_layer = final_layer_blk(hidden_size, patch_size,
+ self.out_channels)
+ self.initialize_weights()
+
+ # self.mixed_prediction = mixed_prediction # This enables mixed prediction
+ # if self.mixed_prediction:
+ # if self.roll_out:
+ # logit_ch = in_channels * 3
+ # else:
+ # logit_ch = in_channels
+ # init = mixing_logit_init * torch.ones(
+ # size=[1, logit_ch, 1, 1]) # hard coded for now
+ # self.mixing_logit = torch.nn.Parameter(init, requires_grad=True)
+
+ # def len(self):
+ # return len(self.blocks)
+
+ def initialize_weights(self):
+ # Initialize transformer layers:
+ def _basic_init(module):
+ if isinstance(module, nn.Linear):
+ torch.nn.init.xavier_uniform_(module.weight)
+ if module.bias is not None:
+ nn.init.constant_(module.bias, 0)
+
+ self.apply(_basic_init)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ pos_embed = get_2d_sincos_pos_embed(
+ self.pos_embed.shape[-1], int(self.x_embedder.num_patches**0.5))
+ # st()
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ # Initialize patch_embed like nn.Linear (instead of nn.Conv2d):
+ w = self.x_embedder.proj.weight.data
+ nn.init.xavier_uniform_(w.view([w.shape[0], -1]))
+ nn.init.constant_(self.x_embedder.proj.bias, 0)
+
+ # Initialize label embedding table:
+ if self.y_embedder is not None:
+ nn.init.normal_(self.y_embedder.embedding_table.weight, std=0.02)
+
+ # Initialize timestep embedding MLP:
+ nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)
+ nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)
+
+ # Zero-out adaLN modulation layers in DiT blocks:
+ for block in self.blocks:
+ if block.adaLN_modulation is not None:
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
+
+ # Zero-out output layers:
+ if self.final_layer.adaLN_modulation is not None:
+ nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0)
+
+ nn.init.constant_(self.final_layer.linear.weight, 0)
+ nn.init.constant_(self.final_layer.linear.bias, 0)
+
+ def unpatchify(self, x):
+ """
+ x: (N, T, patch_size**2 * C)
+ imgs: (N, H, W, C)
+ """
+ c = self.out_channels
+ # p = self.x_embedder.patch_size[0]
+ p = self.patch_size
+ h = w = int(x.shape[1]**0.5)
+ assert h * w == x.shape[1]
+
+ x = x.reshape(shape=(x.shape[0], h, w, p, p, c))
+ x = torch.einsum('nhwpqc->nchpwq', x)
+ imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p))
+ return imgs
+
+ # def forward(self, x, t, y=None, get_attr=''):
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+
+ if get_attr != '': # not breaking the forward hooks
+ return getattr(self, get_attr)
+
+ t = self.t_embedder(timesteps) # (N, D)
+
+ if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w', n=3)
+
+ x = self.x_embedder(
+ x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ if self.roll_out: # ! roll-out in the L dim, not B dim. add condition to all tokens.
+ x = rearrange(x, '(b n) l c ->b (n l) c', n=3)
+
+ # if self.y_embedder is not None:
+ # assert y is not None
+ # y = self.y_embedder(y, self.training) # (N, D)
+ # c = t + y # (N, D)
+
+ assert context is not None
+
+ # assert context.ndim == 2
+ if isinstance(context, dict):
+ context = context['crossattn'] # sgm conditioner compat
+ context = self.clip_text_proj(context)
+
+ # c = t + context
+ # else:
+ # c = t # BS 1024
+
+ for blk_idx, block in enumerate(self.blocks):
+ # if self.roll_out:
+ # if blk_idx % 2 == 0: # with-in plane self attention
+ # x = rearrange(x, 'b (n l) c -> b l (n c) ', n=3)
+ # x = block(x, torch.repeat_interleave(c, 3, 0)) # (N, T, D)
+ # else: # global attention
+ # # x = rearrange(x, '(b n) l c -> b (n l) c ', n=3)
+ # x = rearrange(x, 'b l (n c) -> b (n l) c ', n=3)
+ # x = block(x, c) # (N, T, D)
+ # else:
+ # st()
+ x = block(x, t, context) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+ # x = rearrange(x, 'b n) c h w -> b (n c) h w', n=3)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32)
+
+ return x
+
+ def forward_with_cfg(self, x, t, y, cfg_scale):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ half = x[:len(x) // 2]
+ combined = torch.cat([half, half], dim=0)
+ model_out = self.forward(combined, t, y)
+ # For exact reproducibility reasons, we apply classifier-free guidance on only
+ # three channels by default. The standard approach to cfg applies it to all channels.
+ # This can be done by uncommenting the following line and commenting-out the line following that.
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ eps, rest = model_out[:, :3], model_out[:, 3:]
+ cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ eps = torch.cat([half_eps, half_eps], dim=0)
+ return torch.cat([eps, rest], dim=1)
+
+ def forward_with_cfg_unconditional(self, x, t, y=None, cfg_scale=None):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ # half = x[:len(x) // 2]
+ # combined = torch.cat([half, half], dim=0)
+ combined = x
+ model_out = self.forward(combined, t, y)
+ # For exact reproducibility reasons, we apply classifier-free guidance on only
+ # three channels by default. The standard approach to cfg applies it to all channels.
+ # This can be done by uncommenting the following line and commenting-out the line following that.
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ # eps, rest = model_out[:, :3], model_out[:, 3:]
+ # cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ # half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ # eps = torch.cat([half_eps, half_eps], dim=0)
+ # return torch.cat([eps, rest], dim=1)
+ # st()
+ return model_out
+
+
+#################################################################################
+# Sine/Cosine Positional Embedding Functions #
+#################################################################################
+# https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py
+
+
+def get_2d_sincos_pos_embed(embed_dim,
+ grid_size,
+ cls_token=False,
+ extra_tokens=0):
+ """
+ grid_size: int of the grid height and width
+ return:
+ pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token)
+ """
+ if isinstance(grid_size, tuple):
+ grid_size_h, grid_size_w = grid_size
+ grid_h = np.arange(grid_size_h, dtype=np.float32)
+ grid_w = np.arange(grid_size_w, dtype=np.float32)
+ else:
+ grid_size_h = grid_size_w = grid_size
+ grid_h = np.arange(grid_size, dtype=np.float32)
+ grid_w = np.arange(grid_size, dtype=np.float32)
+
+ grid = np.meshgrid(grid_w, grid_h) # here w goes first
+ grid = np.stack(grid, axis=0)
+
+ grid = grid.reshape([2, 1, grid_size_h, grid_size_w])
+ pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid)
+ if cls_token and extra_tokens > 0:
+ pos_embed = np.concatenate(
+ [np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0)
+ return pos_embed
+
+
+def get_2d_sincos_pos_embed_from_grid(embed_dim, grid):
+ assert embed_dim % 2 == 0
+
+ # use half of dimensions to encode grid_h
+ emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2,
+ grid[0]) # (H*W, D/2)
+ emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2,
+ grid[1]) # (H*W, D/2)
+
+ emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D)
+ return emb
+
+
+def get_1d_sincos_pos_embed_from_grid(embed_dim, pos):
+ """
+ embed_dim: output dimension for each position
+ pos: a list of positions to be encoded: size (M,)
+ out: (M, D)
+ """
+ assert embed_dim % 2 == 0
+ omega = np.arange(embed_dim // 2, dtype=np.float64)
+ omega /= embed_dim / 2.
+ omega = 1. / 10000**omega # (D/2,)
+
+ pos = pos.reshape(-1) # (M,)
+ out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product
+
+ emb_sin = np.sin(out) # (M, D/2)
+ emb_cos = np.cos(out) # (M, D/2)
+
+ emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D)
+ return emb
+
+
+#################################################################################
+# DiT Configs #
+#################################################################################
+
+
+def DiT_XL_2(**kwargs):
+ return DiT(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_XL_4(**kwargs):
+ return DiT(depth=28,
+ hidden_size=1152,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_XL_8(**kwargs):
+ return DiT(depth=28,
+ hidden_size=1152,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_2(**kwargs):
+ return DiT(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_4(**kwargs):
+ return DiT(depth=24,
+ hidden_size=1024,
+ patch_size=4,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_8(**kwargs):
+ return DiT(depth=24,
+ hidden_size=1024,
+ patch_size=8,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_B_2(**kwargs):
+ return DiT(depth=12, hidden_size=768, patch_size=2, num_heads=12, **kwargs)
+
+
+def DiT_B_4(**kwargs):
+ return DiT(depth=12, hidden_size=768, patch_size=4, num_heads=12, **kwargs)
+
+
+def DiT_B_8(**kwargs):
+ return DiT(depth=12, hidden_size=768, patch_size=8, num_heads=12, **kwargs)
+
+
+def DiT_B_16(**kwargs): # ours cfg
+ return DiT(depth=12,
+ hidden_size=768,
+ patch_size=16,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT_S_2(**kwargs):
+ return DiT(depth=12, hidden_size=384, patch_size=2, num_heads=6, **kwargs)
+
+
+def DiT_S_4(**kwargs):
+ return DiT(depth=12, hidden_size=384, patch_size=4, num_heads=6, **kwargs)
+
+
+def DiT_S_8(**kwargs):
+ return DiT(depth=12, hidden_size=384, patch_size=8, num_heads=6, **kwargs)
+
+
+DiT_models = {
+ 'DiT-XL/2': DiT_XL_2,
+ 'DiT-XL/4': DiT_XL_4,
+ 'DiT-XL/8': DiT_XL_8,
+ 'DiT-L/2': DiT_L_2,
+ 'DiT-L/4': DiT_L_4,
+ 'DiT-L/8': DiT_L_8,
+ 'DiT-B/2': DiT_B_2,
+ 'DiT-B/4': DiT_B_4,
+ 'DiT-B/8': DiT_B_8,
+ 'DiT-B/16': DiT_B_16,
+ 'DiT-S/2': DiT_S_2,
+ 'DiT-S/4': DiT_S_4,
+ 'DiT-S/8': DiT_S_8,
+}
diff --git a/dit/dit_trilatent.py b/dit/dit_trilatent.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f71234856ea0172fc5b20b70f727668d40efa57
--- /dev/null
+++ b/dit/dit_trilatent.py
@@ -0,0 +1,536 @@
+import torch.nn as nn
+from inspect import isfunction
+import math
+import torch
+import torch.nn.functional as F
+from torch import nn, einsum
+from einops import rearrange, repeat
+from pdb import set_trace as st
+from timm.models.vision_transformer import Mlp
+
+from ldm.modules.attention import MemoryEfficientCrossAttention
+from .dit_models_xformers import DiT, get_2d_sincos_pos_embed, DiTBlock, FinalLayer, t2i_modulate, PixelArtTextCondDiTBlock, T2IFinalLayer, approx_gelu
+
+from torch.nn import LayerNorm
+from vit.vit_triplane import XYZPosEmbed
+
+class DiT_TriLatent(DiT):
+ # DiT with 3D_aware operations
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ vit_blk=DiTBlock,
+ final_layer_blk=FinalLayer,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim, roll_out, vit_blk,
+ final_layer_blk)
+
+ assert self.roll_out
+
+ def init_PE_3D_aware(self):
+
+ self.pos_embed = nn.Parameter(torch.zeros(
+ 1, self.plane_n * self.x_embedder.num_patches, self.embed_dim),
+ requires_grad=False)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ p = int(self.x_embedder.num_patches**0.5)
+ D = self.pos_embed.shape[-1]
+ grid_size = (self.plane_n, p * p) # B n HW C
+
+ pos_embed = get_2d_sincos_pos_embed(D, grid_size).reshape(
+ self.plane_n * p * p, D) # H*W, D
+
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ def initialize_weights(self):
+ super().initialize_weights()
+
+ # ! add 3d-aware PE
+ self.init_PE_3D_aware()
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert context is not None
+
+ t = self.t_embedder(timesteps) # (N, D)
+
+ # if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w',
+ n=3) # downsample with same conv
+ x = self.x_embedder(x) # (b n) c h/f w/f
+
+ x = rearrange(x, '(b n) l c -> b (n l) c', n=3)
+ x = x + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ # if self.roll_out: # ! roll-out in the L dim, not B dim. add condition to all tokens.
+ # x = rearrange(x, '(b n) l c ->b (n l) c', n=3)
+
+ # assert context.ndim == 2
+ if isinstance(context, dict):
+ context = context['crossattn'] # sgm conditioner compat
+
+ context = self.clip_text_proj(context)
+
+ # c = t + context
+ # else:
+ # c = t # BS 1024
+
+ for blk_idx, block in enumerate(self.blocks):
+ # if self.roll_out:
+ if False:
+ if blk_idx % 2 == 0: # with-in plane self attention
+ x = rearrange(x, 'b (n l) c -> (b n) l c', n=3)
+ x = block(x, repeat(t, 'b c -> (b n) c ', n=3), # TODO, calculate once
+ repeat(context, 'b l c -> (b n) l c ', n=3)) # (N, T, D)
+
+ else: # global attention
+ x = rearrange(x, '(b n) l c -> b (n l) c ', n=self.plane_n)
+ x = block(x, t, context) # (N, T, D)
+ else:
+ x = block(x, t, context) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+ # x = rearrange(x, 'b n) c h w -> b (n c) h w', n=3)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32).contiguous()
+ # st()
+
+ return x
+
+
+class DiT_TriLatent_PixelArt(DiT_TriLatent):
+ # DiT with 3D_aware operations
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ vit_blk=DiTBlock,
+ final_layer_blk=FinalLayer,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim, roll_out, vit_blk,
+ final_layer_blk)
+
+ self.adaLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 6 * hidden_size, bias=True))
+ del self.clip_text_proj
+ self.cap_embedder = nn.Sequential( # TODO, init with zero here.
+ LayerNorm(context_dim),
+ nn.Linear(
+ context_dim,
+ hidden_size,
+ ),
+ )
+ nn.init.constant_(self.cap_embedder[-1].weight, 0)
+ nn.init.constant_(self.cap_embedder[-1].bias, 0)
+
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert context is not None
+
+ clip_cls_token = self.cap_embedder(context['vector']) # pooled
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ # if self.roll_out: # !
+ x = rearrange(x, 'b (c n) h w->(b n) c h w',
+ n=3) # downsample with same conv
+ x = self.x_embedder(x) # (b n) c h/f w/f
+
+ x = rearrange(x, '(b n) l c -> b (n l) c', n=3)
+ x = x + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+
+ # if self.roll_out: # ! roll-out in the L dim, not B dim. add condition to all tokens.
+ # x = rearrange(x, '(b n) l c ->b (n l) c', n=3)
+
+ # assert context.ndim == 2
+ if isinstance(context, dict):
+ context = context['crossattn'] # sgm conditioner compat
+
+ # context = self.clip_text_proj(context) # ! with rmsnorm here for
+
+ # c = t + context
+ # else:
+ # c = t # BS 1024
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, context) # (N, T, D)
+
+ # todo later
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, 'b (n l) c ->(b n) l c', n=3)
+
+ x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ if self.roll_out: # move n from L to B axis
+ x = rearrange(x, '(b n) c h w -> b (c n) h w', n=3)
+ # x = rearrange(x, 'b n) c h w -> b (n c) h w', n=3)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32).contiguous()
+ # st()
+
+ return x
+
+ # ! compat issue
+ def forward_with_cfg(self, x, t, context, cfg_scale):
+ """
+ Forward pass of SiT, but also batches the unconSiTional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ # half = x[: len(x) // 2]
+ # combined = torch.cat([half, half], dim=0)
+ eps = self.forward(x, t, context)
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ # eps, rest = model_out[:, :3], model_out[:, 3:]
+ cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ eps = torch.cat([half_eps, half_eps], dim=0)
+ return eps
+
+
+# PCD, general single-stage model.
+class DiT_PCD_PixelArt(DiT_TriLatent_PixelArt):
+ # DiT with 3D_aware operations
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ vit_blk=PixelArtTextCondDiTBlock,
+ final_layer_blk=FinalLayer,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim, roll_out, vit_blk,
+ final_layer_blk)
+ # an MLP to transform the input 19-dim feature to high-dim.
+ self.x_embedder = Mlp(in_features=in_channels,
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+ del self.pos_embed
+
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert context is not None
+
+ clip_cls_token = self.cap_embedder(context['caption_vector']) # pooled
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ x = self.x_embedder(x)
+
+ # assert context.ndim == 2
+ if isinstance(context, dict):
+ context = context['caption_crossattn'] # sgm conditioner compat
+
+ # loop dit block
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, context) # (N, T, D)
+
+ x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels)
+
+ # cast to float32 for better accuracy
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+# ! two-stage version, the second-stage here, for text pretraining.
+class DiT_PCD_PixelArt_tofeat(DiT_PCD_PixelArt):
+ # DiT with 3D_aware operations
+ def __init__(
+ self,
+ input_size=32,
+ patch_size=2,
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ mixing_logit_init=-3,
+ mixed_prediction=True,
+ context_dim=False,
+ roll_out=False,
+ vit_blk=DiTBlock,
+ final_layer_blk=FinalLayer,
+ use_pe_cond=True,
+ ):
+ super().__init__(input_size, patch_size, in_channels, hidden_size,
+ depth, num_heads, mlp_ratio, class_dropout_prob,
+ num_classes, learn_sigma, mixing_logit_init,
+ mixed_prediction, context_dim, roll_out, PixelArtTextCondDiTBlock,
+ final_layer_blk)
+
+ self.use_pe_cond = use_pe_cond
+ if use_pe_cond:
+ self.xyz_pos_embed = XYZPosEmbed(hidden_size)
+ else:
+ self.x_embedder = Mlp(in_features=in_channels+3,
+ hidden_features=hidden_size,
+ out_features=hidden_size,
+ act_layer=approx_gelu,
+ drop=0)
+
+
+ def forward(self,
+ x,
+ timesteps=None,
+ context=None,
+ y=None,
+ get_attr='',
+ **kwargs):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+ # t = timesteps
+ assert isinstance(context, dict)
+
+ # dino_spatial_token = rearrange(context['concat'], 'b v l c -> b (v l) c') # flatten MV dino features.
+
+ # t = self.t_embedder(timesteps)
+
+ clip_cls_token = self.cap_embedder(context['caption_vector']) # pooled
+ caption_crossattn, fps_xyz = context['caption_crossattn'], context['fps-xyz']
+
+ t = self.t_embedder(timesteps) + clip_cls_token # (N, D)
+ t0 = self.adaLN_modulation(t) # single-adaLN, B 6144
+
+ if self.use_pe_cond:
+ x = self.x_embedder(x) + self.xyz_pos_embed(fps_xyz) # point-wise addition
+ else: # use concat to add info
+ x = torch.cat([fps_xyz, x], dim=-1)
+ x = self.x_embedder(x)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_pre(x)
+
+ for blk_idx, block in enumerate(self.blocks):
+ x = block(x, t0, caption_crossattn)
+
+ # add a norm layer here, as in point-e
+ # x = self.ln_post(x)
+
+ x = self.final_layer(x, t) # no loss on the xyz side
+
+ x = x.to(torch.float32).contiguous()
+
+ return x
+
+
+
+#################################################################################
+# DiT_TriLatent Configs #
+#################################################################################
+
+
+def DiT_XL_2(**kwargs):
+ return DiT_TriLatent(depth=28,
+ hidden_size=1152,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_L_2(**kwargs):
+ return DiT_TriLatent(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ **kwargs)
+
+
+def DiT_B_2(**kwargs):
+ return DiT_TriLatent(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT_B_1(**kwargs):
+ return DiT_TriLatent(depth=12,
+ hidden_size=768,
+ patch_size=1,
+ num_heads=12,
+ **kwargs)
+
+
+def DiT_B_Pixelart_2(**kwargs):
+ return DiT_TriLatent_PixelArt(depth=12,
+ hidden_size=768,
+ patch_size=2,
+ num_heads=12,
+ # vit_blk=PixelArtTextCondDiTBlock,
+ final_layer_blk=T2IFinalLayer,
+ **kwargs)
+
+def DiT_L_Pixelart_2(**kwargs):
+ return DiT_TriLatent_PixelArt(depth=24,
+ hidden_size=1024,
+ patch_size=2,
+ num_heads=16,
+ # vit_blk=PixelArtTextCondDiTBlock,
+ final_layer_blk=T2IFinalLayer,
+ **kwargs)
+
+
+# PCD-DiT
+def DiT_PCD_B(**kwargs):
+
+ return DiT_PCD_PixelArt(depth=12,
+ hidden_size=768,
+ patch_size=1,
+ num_heads=12,
+ **kwargs)
+
+def DiT_PCD_L(**kwargs):
+
+ return DiT_PCD_PixelArt(depth=24,
+ hidden_size=1024,
+ patch_size=1,
+ num_heads=16,
+ **kwargs)
+
+def DiT_PCD_B_tofeat(**kwargs):
+
+ return DiT_PCD_PixelArt_tofeat(depth=12,
+ hidden_size=768,
+ patch_size=1,
+ num_heads=12,
+ **kwargs)
+
+def DiT_PCD_L_tofeat(**kwargs):
+
+ return DiT_PCD_PixelArt_tofeat(depth=24,
+ hidden_size=1024,
+ patch_size=1,
+ num_heads=16,
+ **kwargs)
+
+def DiT_PCD_XL_tofeat(**kwargs):
+
+ return DiT_PCD_PixelArt_tofeat(depth=28,
+ hidden_size=1152,
+ patch_size=1,
+ num_heads=16,
+ **kwargs)
+
+
+
+
+DiT_models = {
+ 'DiT-XL/2': DiT_XL_2,
+ 'DiT-L/2': DiT_L_2,
+ 'DiT-PixelArt-L/2': DiT_L_Pixelart_2,
+ 'DiT-PixelArt-B/2': DiT_B_Pixelart_2,
+ 'DiT-B/2': DiT_B_2,
+ 'DiT-B/1': DiT_B_1,
+ 'DiT-PCD-B': DiT_PCD_B,
+ 'DiT-PCD-L': DiT_PCD_L,
+ 'DiT-PCD-B-stage2-xyz2feat': DiT_PCD_B_tofeat,
+ 'DiT-PCD-L-stage2-xyz2feat': DiT_PCD_L_tofeat,
+ 'DiT-PCD-XL-stage2-xyz2feat': DiT_PCD_XL_tofeat,
+ # 'DiT-PCD-L-stage1-text': DiT_PCD_L_tofeat,
+}
diff --git a/dit/dit_wo_embedder.py b/dit/dit_wo_embedder.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a40a05b2b071d9b564551d46b78bafe43156121
--- /dev/null
+++ b/dit/dit_wo_embedder.py
@@ -0,0 +1,439 @@
+import torch
+import torch.nn as nn
+# import numpy as np
+# import math
+# from timm.models.vision_transformer import PatchEmbed, Attention, Mlp
+
+from .dit_models import TimestepEmbedder, LabelEmbedder, DiTBlock, get_2d_sincos_pos_embed
+
+
+class DiTwoEmbedder(nn.Module):
+ """
+ Diffusion model with a Transformer backbone, performing directly on the ViT token latents rather than spatial latents.
+ """
+
+ def __init__(
+ self,
+ input_size=224, # raw img input size
+ # patch_size=14, # dino version
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4.0,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ ):
+ super().__init__()
+ self.learn_sigma = learn_sigma
+ self.in_channels = in_channels
+ self.out_channels = in_channels * 2 if learn_sigma else in_channels
+ self.patch_size = 14 # dino-v2 patch sized fixed in this project
+ self.num_heads = num_heads
+
+ # self.x_embedder = PatchEmbed(input_size,
+ # patch_size,
+ # in_channels,
+ # hidden_size,
+ # bias=True)
+ self.t_embedder = TimestepEmbedder(hidden_size)
+ if num_classes > 0:
+ self.y_embedder = LabelEmbedder(num_classes, hidden_size,
+ class_dropout_prob)
+ else:
+ self.y_embedder = None
+
+ # num_patches = self.x_embedder.num_patches # 14*14*3
+ self.num_patches = (input_size // self.patch_size)**2
+
+ # Will use fixed sin-cos embedding:
+ self.pos_embed = nn.Parameter(torch.zeros(1, self.num_patches,
+ hidden_size),
+ requires_grad=False)
+
+ self.blocks = nn.ModuleList([
+ DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ for _ in range(depth)
+ ])
+ # self.final_layer = FinalLayer(hidden_size, patch_size,
+ # self.out_channels)
+ self.initialize_weights()
+
+ def initialize_weights(self):
+ # Initialize transformer layers:
+ def _basic_init(module):
+ if isinstance(module, nn.Linear):
+ torch.nn.init.xavier_uniform_(module.weight)
+ if module.bias is not None:
+ nn.init.constant_(module.bias, 0)
+
+ self.apply(_basic_init)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1],
+ int(self.num_patches**0.5))
+ # st()
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ # Initialize patch_embed like nn.Linear (instead of nn.Conv2d):
+ # w = self.x_embedder.proj.weight.data
+ # nn.init.xavier_uniform_(w.view([w.shape[0], -1]))
+ # nn.init.constant_(self.x_embedder.proj.bias, 0)
+
+ # Initialize label embedding table:
+ if self.y_embedder is not None:
+ nn.init.normal_(self.y_embedder.embedding_table.weight, std=0.02)
+
+ # Initialize timestep embedding MLP:
+ nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)
+ nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)
+
+ # Zero-out adaLN modulation layers in DiT blocks:
+ for block in self.blocks:
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
+
+ # Zero-out output layers:
+ # nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0)
+ # nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0)
+ # nn.init.constant_(self.final_layer.linear.weight, 0)
+ # nn.init.constant_(self.final_layer.linear.bias, 0)
+
+ def forward(self, x, t, y=None):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+
+ # ! no embedder operation
+ # x = self.x_embedder(
+ # x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+ x = x + self.pos_embed
+
+ t = self.t_embedder(t) # (N, D)
+
+ if self.y_embedder is not None:
+ assert y is not None
+ y = self.y_embedder(y, self.training) # (N, D)
+ c = t + y # (N, D)
+ else:
+ c = t
+
+ for block in self.blocks:
+ x = block(x, c) # (N, T, D)
+
+ # x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels)
+ # x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ return x
+
+ def forward_with_cfg(self, x, t, y, cfg_scale):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ half = x[:len(x) // 2]
+ combined = torch.cat([half, half], dim=0)
+ model_out = self.forward(combined, t, y)
+ # For exact reproducibility reasons, we apply classifier-free guidance on only
+ # three channels by default. The standard approach to cfg applies it to all channels.
+ # This can be done by uncommenting the following line and commenting-out the line following that.
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ eps, rest = model_out[:, :3], model_out[:, 3:]
+ cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ eps = torch.cat([half_eps, half_eps], dim=0)
+ return torch.cat([eps, rest], dim=1)
+
+ def forward_with_cfg_unconditional(self, x, t, y=None, cfg_scale=None):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+
+ combined = x
+ model_out = self.forward(combined, t, y)
+
+ return model_out
+
+
+class DiTwoEmbedderLongSkipConnection(nn.Module):
+
+ def __init__(
+ self,
+ input_size=224, # raw img input size
+ patch_size=14, # dino version
+ in_channels=4,
+ hidden_size=1152,
+ depth=28,
+ num_heads=16,
+ mlp_ratio=4.0,
+ class_dropout_prob=0.1,
+ num_classes=1000,
+ learn_sigma=True,
+ ):
+ """DiT with long skip-connections from U-ViT, CVPR 23'
+ """
+ super().__init__()
+ self.learn_sigma = learn_sigma
+ self.in_channels = in_channels
+ self.out_channels = in_channels * 2 if learn_sigma else in_channels
+ self.patch_size = patch_size
+ self.num_heads = num_heads
+
+ self.t_embedder = TimestepEmbedder(hidden_size)
+ if num_classes > 0:
+ self.y_embedder = LabelEmbedder(num_classes, hidden_size,
+ class_dropout_prob)
+ else:
+ self.y_embedder = None
+
+ # num_patches = self.x_embedder.num_patches # 14*14*3
+ self.num_patches = (input_size // patch_size)**2
+
+ # Will use fixed sin-cos embedding:
+ self.pos_embed = nn.Parameter(torch.zeros(1, self.num_patches,
+ hidden_size),
+ requires_grad=False)
+
+ self.blocks = nn.ModuleList([
+ DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ for _ in range(depth)
+ ])
+
+ # ! add long-skip-connections from U-ViT
+ self.in_blocks = nn.ModuleList([
+ DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ for _ in range(depth // 2)
+ ])
+
+ self.mid_block = DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+
+ self.out_blocks = nn.ModuleList([
+ DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio)
+ for _ in range(depth // 2)
+ ])
+
+ # ! needed or to be replaced?
+ # self.final_layer = FinalLayer(hidden_size, patch_size,
+ # self.out_channels)
+ self.initialize_weights()
+
+ def initialize_weights(self):
+ # Initialize transformer layers:
+ def _basic_init(module):
+ if isinstance(module, nn.Linear):
+ torch.nn.init.xavier_uniform_(module.weight)
+ if module.bias is not None:
+ nn.init.constant_(module.bias, 0)
+
+ self.apply(_basic_init)
+
+ # Initialize (and freeze) pos_embed by sin-cos embedding:
+ pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1],
+ int(self.num_patches**0.5))
+ # st()
+ self.pos_embed.data.copy_(
+ torch.from_numpy(pos_embed).float().unsqueeze(0))
+
+ # Initialize patch_embed like nn.Linear (instead of nn.Conv2d):
+ # w = self.x_embedder.proj.weight.data
+ # nn.init.xavier_uniform_(w.view([w.shape[0], -1]))
+ # nn.init.constant_(self.x_embedder.proj.bias, 0)
+
+ # Initialize label embedding table:
+ if self.y_embedder is not None:
+ nn.init.normal_(self.y_embedder.embedding_table.weight, std=0.02)
+
+ # Initialize timestep embedding MLP:
+ nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)
+ nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)
+
+ # Zero-out adaLN modulation layers in DiT blocks:
+ for block in self.blocks:
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
+
+ # Zero-out output layers:
+ # nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0)
+ # nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0)
+ # nn.init.constant_(self.final_layer.linear.weight, 0)
+ # nn.init.constant_(self.final_layer.linear.bias, 0)
+
+ def forward(self, x, t, y=None):
+ """
+ Forward pass of DiT.
+ x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images)
+ t: (N,) tensor of diffusion timesteps
+ y: (N,) tensor of class labels
+ """
+
+ # ! no embedder operation
+ # x = self.x_embedder(
+ # x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2
+ x = x + self.pos_embed
+
+ t = self.t_embedder(t) # (N, D)
+
+ if self.y_embedder is not None:
+ assert y is not None
+ y = self.y_embedder(y, self.training) # (N, D)
+ c = t + y # (N, D)
+ else:
+ c = t
+
+ # ! add long-skip-connections here
+
+ # for block in self.blocks:
+ # x = block(x, c) # (N, T, D)
+
+ skips = []
+ for blk in self.in_blocks:
+ x = blk(x)
+ skips.append(x)
+
+ x = self.mid_block(x)
+
+ for blk in self.out_blocks:
+ x = blk(x, skips.pop())
+
+ # ! the order of unpatchify and final_linear swaps in the baseline implementation
+ # x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels)
+ # x = self.unpatchify(x) # (N, out_channels, H, W)
+
+ return x
+
+ def forward_with_cfg(self, x, t, y, cfg_scale):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+ # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb
+ half = x[:len(x) // 2]
+ combined = torch.cat([half, half], dim=0)
+ model_out = self.forward(combined, t, y)
+ # For exact reproducibility reasons, we apply classifier-free guidance on only
+ # three channels by default. The standard approach to cfg applies it to all channels.
+ # This can be done by uncommenting the following line and commenting-out the line following that.
+ # eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]
+ eps, rest = model_out[:, :3], model_out[:, 3:]
+ cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)
+ half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)
+ eps = torch.cat([half_eps, half_eps], dim=0)
+ return torch.cat([eps, rest], dim=1)
+
+ def forward_with_cfg_unconditional(self, x, t, y=None, cfg_scale=None):
+ """
+ Forward pass of DiT, but also batches the unconditional forward pass for classifier-free guidance.
+ """
+
+ combined = x
+ model_out = self.forward(combined, t, y)
+
+ return model_out
+
+
+#################################################################################
+# DiT Configs #
+#################################################################################
+
+# def DiT_XL_2(**kwargs):
+# return DiT(depth=28,
+# hidden_size=1152,
+# patch_size=2,
+# num_heads=16,
+# **kwargs)
+
+# def DiT_XL_4(**kwargs):
+# return DiT(depth=28,
+# hidden_size=1152,
+# patch_size=4,
+# num_heads=16,
+# **kwargs)
+
+# def DiT_XL_8(**kwargs):
+# return DiT(depth=28,
+# hidden_size=1152,
+# patch_size=8,
+# num_heads=16,
+# **kwargs)
+
+# def DiT_L_2(**kwargs):
+# return DiT(depth=24,
+# hidden_size=1024,
+# patch_size=2,
+# num_heads=16,
+# **kwargs)
+
+# def DiT_L_4(**kwargs):
+# return DiT(depth=24,
+# hidden_size=1024,
+# patch_size=4,
+# num_heads=16,
+# **kwargs)
+
+# def DiT_L_8(**kwargs):
+# return DiT(depth=24,
+# hidden_size=1024,
+# patch_size=8,
+# num_heads=16,
+# **kwargs)
+
+# def DiT_B_2(**kwargs):
+# return DiT(depth=12, hidden_size=768, patch_size=2, num_heads=12, **kwargs)
+
+# def DiT_B_4(**kwargs):
+# return DiT(depth=12, hidden_size=768, patch_size=4, num_heads=12, **kwargs)
+
+# def DiT_B_8(**kwargs):
+# return DiT(depth=12, hidden_size=768, patch_size=8, num_heads=12, **kwargs)
+
+# def DiT_B_16(**kwargs): # ours cfg
+# return DiT(depth=12, hidden_size=768, patch_size=16, num_heads=12, **kwargs)
+
+# def DiT_S_2(**kwargs):
+# return DiT(depth=12, hidden_size=384, patch_size=2, num_heads=6, **kwargs)
+
+# def DiT_S_4(**kwargs):
+# return DiT(depth=12, hidden_size=384, patch_size=4, num_heads=6, **kwargs)
+
+# def DiT_S_8(**kwargs):
+# return DiT(depth=12, hidden_size=384, patch_size=8, num_heads=6, **kwargs)
+
+
+def DiT_woembed_S(**kwargs):
+ return DiTwoEmbedder(depth=12, hidden_size=384, num_heads=6, **kwargs)
+
+
+def DiT_woembed_B(**kwargs):
+ return DiTwoEmbedder(depth=12, hidden_size=768, num_heads=12, **kwargs)
+
+
+def DiT_woembed_L(**kwargs):
+ return DiTwoEmbedder(
+ depth=24,
+ hidden_size=1024,
+ num_heads=16,
+ **kwargs)
+
+
+DiT_woembed_models = {
+ # 'DiT-XL/2': DiT_XL_2,
+ # 'DiT-XL/4': DiT_XL_4,
+ # 'DiT-XL/8': DiT_XL_8,
+ # 'DiT-L/2': DiT_L_2,
+ # 'DiT-L/4': DiT_L_4,
+ # 'DiT-L/8': DiT_L_8,
+ # 'DiT-B/2': DiT_B_2,
+ # 'DiT-B/4': DiT_B_4,
+ # 'DiT-B/8': DiT_B_8,
+ # 'DiT-B/16': DiT_B_16,
+ # 'DiT-S/2': DiT_S_2,
+ # 'DiT-S/4': DiT_S_4,
+ # 'DiT-S/8': DiT_S_8,
+ 'DiT-wo-S': DiT_woembed_S,
+ 'DiT-wo-B': DiT_woembed_B,
+ 'DiT-wo-L': DiT_woembed_L,
+}
diff --git a/dit/norm.py b/dit/norm.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a89bb44e3f66038ed1f960cb6c65379bd227cc0
--- /dev/null
+++ b/dit/norm.py
@@ -0,0 +1,40 @@
+import numbers
+from typing import Dict, Optional, Tuple
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+from pdb import set_trace as st
+
+
+class RMSNorm(nn.Module):
+ def __init__(self, dim, eps: float = 1e-5, elementwise_affine: bool = True):
+ super().__init__()
+
+ self.eps = eps
+
+ if isinstance(dim, numbers.Integral):
+ dim = (dim,)
+
+ self.dim = torch.Size(dim)
+
+ if elementwise_affine:
+ self.weight = nn.Parameter(torch.ones(dim))
+ else:
+ self.weight = None
+
+ def forward(self, hidden_states):
+ input_dtype = hidden_states.dtype
+ variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True)
+ hidden_states = hidden_states * torch.rsqrt(variance + self.eps)
+
+ if self.weight is not None:
+ # convert into half-precision if necessary
+ if self.weight.dtype in [torch.float16, torch.bfloat16]:
+ hidden_states = hidden_states.to(self.weight.dtype)
+ hidden_states = hidden_states * self.weight
+ else:
+ hidden_states = hidden_states.to(input_dtype)
+
+ return hidden_states.to(input_dtype)
diff --git a/dnnlib/__init__.py b/dnnlib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd91ed142e955581e83948455fb71cd837215f61
--- /dev/null
+++ b/dnnlib/__init__.py
@@ -0,0 +1,11 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+from .util import EasyDict, make_cache_dir_path
diff --git a/dnnlib/__pycache__/__init__.cpython-310.pyc b/dnnlib/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ba6aedbb18cdeab74e8414837d640642e33f7c23
Binary files /dev/null and b/dnnlib/__pycache__/__init__.cpython-310.pyc differ
diff --git a/dnnlib/__pycache__/__init__.cpython-39.pyc b/dnnlib/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f36aa96f44ea1d8cd73a23ad8135eb0fdb1b982e
Binary files /dev/null and b/dnnlib/__pycache__/__init__.cpython-39.pyc differ
diff --git a/dnnlib/__pycache__/util.cpython-310.pyc b/dnnlib/__pycache__/util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..98284bb3ab6c918240a2277cb9adfd62ee8a7569
Binary files /dev/null and b/dnnlib/__pycache__/util.cpython-310.pyc differ
diff --git a/dnnlib/__pycache__/util.cpython-39.pyc b/dnnlib/__pycache__/util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..867125acf0aa14f5454d57d8c714ddbfb23e85a0
Binary files /dev/null and b/dnnlib/__pycache__/util.cpython-39.pyc differ
diff --git a/dnnlib/util.py b/dnnlib/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..2430b83469eca9217de68e9ab3fa7cdec14bc5b4
--- /dev/null
+++ b/dnnlib/util.py
@@ -0,0 +1,590 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Miscellaneous utility classes and functions."""
+
+import ctypes
+import fnmatch
+import importlib
+import inspect
+import numpy as np
+import os
+import shutil
+import sys
+import types
+import io
+import pickle
+import re
+import requests
+import html
+import hashlib
+import glob
+import tempfile
+import urllib
+import urllib.request
+import uuid
+
+from distutils.util import strtobool
+from typing import Any, List, Tuple, Union
+
+import torch
+
+# Util classes
+# ------------------------------------------------------------------------------------------
+
+def calculate_adaptive_weight(recon_loss, g_loss, last_layer, disc_weight_max=1.0):
+ recon_grads = torch.autograd.grad(recon_loss, last_layer, retain_graph=True)[0]
+ g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0]
+
+ d_weight = torch.norm(recon_grads) / (torch.norm(g_grads) + 1e-4)
+ d_weight = torch.clamp(d_weight, 0.0, disc_weight_max).detach()
+ return d_weight
+
+
+class EasyDict(dict):
+ """Convenience class that behaves like a dict but allows access with the attribute syntax."""
+
+ def __getattr__(self, name: str) -> Any:
+ try:
+ return self[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __setattr__(self, name: str, value: Any) -> None:
+ self[name] = value
+
+ def __delattr__(self, name: str) -> None:
+ del self[name]
+
+
+class Logger(object):
+ """Redirect stderr to stdout, optionally print stdout to a file, and optionally force flushing on both stdout and the file."""
+
+ def __init__(self,
+ file_name: str = None,
+ file_mode: str = "w",
+ should_flush: bool = True):
+ self.file = None
+
+ if file_name is not None:
+ self.file = open(file_name, file_mode)
+
+ self.should_flush = should_flush
+ self.stdout = sys.stdout
+ self.stderr = sys.stderr
+
+ sys.stdout = self
+ sys.stderr = self
+
+ def __enter__(self) -> "Logger":
+ return self
+
+ def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+ self.close()
+
+ def write(self, text: Union[str, bytes]) -> None:
+ """Write text to stdout (and a file) and optionally flush."""
+ if isinstance(text, bytes):
+ text = text.decode()
+ if len(
+ text
+ ) == 0: # workaround for a bug in VSCode debugger: sys.stdout.write(''); sys.stdout.flush() => crash
+ return
+
+ if self.file is not None:
+ self.file.write(text)
+
+ self.stdout.write(text)
+
+ if self.should_flush:
+ self.flush()
+
+ def flush(self) -> None:
+ """Flush written text to both stdout and a file, if open."""
+ if self.file is not None:
+ self.file.flush()
+
+ self.stdout.flush()
+
+ def close(self) -> None:
+ """Flush, close possible files, and remove stdout/stderr mirroring."""
+ self.flush()
+
+ # if using multiple loggers, prevent closing in wrong order
+ if sys.stdout is self:
+ sys.stdout = self.stdout
+ if sys.stderr is self:
+ sys.stderr = self.stderr
+
+ if self.file is not None:
+ self.file.close()
+ self.file = None
+
+
+# Cache directories
+# ------------------------------------------------------------------------------------------
+
+_dnnlib_cache_dir = None
+
+
+def set_cache_dir(path: str) -> None:
+ global _dnnlib_cache_dir
+ _dnnlib_cache_dir = path
+
+
+def make_cache_dir_path(*paths: str) -> str:
+ if _dnnlib_cache_dir is not None:
+ return os.path.join(_dnnlib_cache_dir, *paths)
+ if 'DNNLIB_CACHE_DIR' in os.environ:
+ return os.path.join(os.environ['DNNLIB_CACHE_DIR'], *paths)
+ if 'HOME' in os.environ:
+ return os.path.join(os.environ['HOME'], '.cache', 'dnnlib', *paths)
+ if 'USERPROFILE' in os.environ:
+ return os.path.join(os.environ['USERPROFILE'], '.cache', 'dnnlib',
+ *paths)
+ return os.path.join(tempfile.gettempdir(), '.cache', 'dnnlib', *paths)
+
+
+# Small util functions
+# ------------------------------------------------------------------------------------------
+
+
+def format_time(seconds: Union[int, float]) -> str:
+ """Convert the seconds to human readable string with days, hours, minutes and seconds."""
+ s = int(np.rint(seconds))
+
+ if s < 60:
+ return "{0}s".format(s)
+ elif s < 60 * 60:
+ return "{0}m {1:02}s".format(s // 60, s % 60)
+ elif s < 24 * 60 * 60:
+ return "{0}h {1:02}m {2:02}s".format(s // (60 * 60), (s // 60) % 60,
+ s % 60)
+ else:
+ return "{0}d {1:02}h {2:02}m".format(s // (24 * 60 * 60),
+ (s // (60 * 60)) % 24,
+ (s // 60) % 60)
+
+
+def format_time_brief(seconds: Union[int, float]) -> str:
+ """Convert the seconds to human readable string with days, hours, minutes and seconds."""
+ s = int(np.rint(seconds))
+
+ if s < 60:
+ return "{0}s".format(s)
+ elif s < 60 * 60:
+ return "{0}m {1:02}s".format(s // 60, s % 60)
+ elif s < 24 * 60 * 60:
+ return "{0}h {1:02}m".format(s // (60 * 60), (s // 60) % 60)
+ else:
+ return "{0}d {1:02}h".format(s // (24 * 60 * 60),
+ (s // (60 * 60)) % 24)
+
+
+def ask_yes_no(question: str) -> bool:
+ """Ask the user the question until the user inputs a valid answer."""
+ while True:
+ try:
+ print("{0} [y/n]".format(question))
+ return strtobool(input().lower())
+ except ValueError:
+ pass
+
+
+def tuple_product(t: Tuple) -> Any:
+ """Calculate the product of the tuple elements."""
+ result = 1
+
+ for v in t:
+ result *= v
+
+ return result
+
+
+_str_to_ctype = {
+ "uint8": ctypes.c_ubyte,
+ "uint16": ctypes.c_uint16,
+ "uint32": ctypes.c_uint32,
+ "uint64": ctypes.c_uint64,
+ "int8": ctypes.c_byte,
+ "int16": ctypes.c_int16,
+ "int32": ctypes.c_int32,
+ "int64": ctypes.c_int64,
+ "float32": ctypes.c_float,
+ "float64": ctypes.c_double
+}
+
+
+def get_dtype_and_ctype(type_obj: Any) -> Tuple[np.dtype, Any]:
+ """Given a type name string (or an object having a __name__ attribute), return matching Numpy and ctypes types that have the same size in bytes."""
+ type_str = None
+
+ if isinstance(type_obj, str):
+ type_str = type_obj
+ elif hasattr(type_obj, "__name__"):
+ type_str = type_obj.__name__
+ elif hasattr(type_obj, "name"):
+ type_str = type_obj.name
+ else:
+ raise RuntimeError("Cannot infer type name from input")
+
+ assert type_str in _str_to_ctype.keys()
+
+ my_dtype = np.dtype(type_str)
+ my_ctype = _str_to_ctype[type_str]
+
+ assert my_dtype.itemsize == ctypes.sizeof(my_ctype)
+
+ return my_dtype, my_ctype
+
+
+def is_pickleable(obj: Any) -> bool:
+ try:
+ with io.BytesIO() as stream:
+ pickle.dump(obj, stream)
+ return True
+ except:
+ return False
+
+
+# Functionality to import modules/objects by name, and call functions by name
+# ------------------------------------------------------------------------------------------
+
+
+def get_module_from_obj_name(obj_name: str) -> Tuple[types.ModuleType, str]:
+ """Searches for the underlying module behind the name to some python object.
+ Returns the module and the object name (original name with module part removed)."""
+
+ # allow convenience shorthands, substitute them by full names
+ obj_name = re.sub("^np.", "numpy.", obj_name)
+ obj_name = re.sub("^tf.", "tensorflow.", obj_name)
+
+ # list alternatives for (module_name, local_obj_name)
+ parts = obj_name.split(".")
+ name_pairs = [(".".join(parts[:i]), ".".join(parts[i:]))
+ for i in range(len(parts), 0, -1)]
+
+ # try each alternative in turn
+ for module_name, local_obj_name in name_pairs:
+ try:
+ module = importlib.import_module(
+ module_name) # may raise ImportError
+ get_obj_from_module(module,
+ local_obj_name) # may raise AttributeError
+ return module, local_obj_name
+ except:
+ pass
+
+ # maybe some of the modules themselves contain errors?
+ for module_name, _local_obj_name in name_pairs:
+ try:
+ importlib.import_module(module_name) # may raise ImportError
+ except ImportError:
+ if not str(sys.exc_info()[1]).startswith("No module named '" +
+ module_name + "'"):
+ raise
+
+ # maybe the requested attribute is missing?
+ for module_name, local_obj_name in name_pairs:
+ try:
+ module = importlib.import_module(
+ module_name) # may raise ImportError
+ get_obj_from_module(module,
+ local_obj_name) # may raise AttributeError
+ except ImportError:
+ pass
+
+ # we are out of luck, but we have no idea why
+ raise ImportError(obj_name)
+
+
+def get_obj_from_module(module: types.ModuleType, obj_name: str) -> Any:
+ """Traverses the object name and returns the last (rightmost) python object."""
+ if obj_name == '':
+ return module
+ obj = module
+ for part in obj_name.split("."):
+ obj = getattr(obj, part)
+ return obj
+
+
+def get_obj_by_name(name: str) -> Any:
+ """Finds the python object with the given name."""
+ module, obj_name = get_module_from_obj_name(name)
+ return get_obj_from_module(module, obj_name)
+
+
+def call_func_by_name(*args, func_name: str = None, **kwargs) -> Any:
+ """Finds the python object with the given name and calls it as a function."""
+ assert func_name is not None
+ func_obj = get_obj_by_name(func_name)
+ assert callable(func_obj)
+ return func_obj(*args, **kwargs)
+
+
+def construct_class_by_name(*args, class_name: str = None, **kwargs) -> Any:
+ """Finds the python class with the given name and constructs it with the given arguments."""
+ return call_func_by_name(*args, func_name=class_name, **kwargs)
+
+
+def get_module_dir_by_obj_name(obj_name: str) -> str:
+ """Get the directory path of the module containing the given object name."""
+ module, _ = get_module_from_obj_name(obj_name)
+ return os.path.dirname(inspect.getfile(module))
+
+
+def is_top_level_function(obj: Any) -> bool:
+ """Determine whether the given object is a top-level function, i.e., defined at module scope using 'def'."""
+ return callable(obj) and obj.__name__ in sys.modules[
+ obj.__module__].__dict__
+
+
+def get_top_level_function_name(obj: Any) -> str:
+ """Return the fully-qualified name of a top-level function."""
+ assert is_top_level_function(obj)
+ module = obj.__module__
+ if module == '__main__':
+ module = os.path.splitext(
+ os.path.basename(sys.modules[module].__file__))[0]
+ return module + "." + obj.__name__
+
+
+# File system helpers
+# ------------------------------------------------------------------------------------------
+
+
+def list_dir_recursively_with_ignore(
+ dir_path: str,
+ ignores: List[str] = None,
+ add_base_to_relative: bool = False) -> List[Tuple[str, str]]:
+ """List all files recursively in a given directory while ignoring given file and directory names.
+ Returns list of tuples containing both absolute and relative paths."""
+ assert os.path.isdir(dir_path)
+ base_name = os.path.basename(os.path.normpath(dir_path))
+
+ if ignores is None:
+ ignores = []
+
+ result = []
+
+ for root, dirs, files in os.walk(dir_path, topdown=True):
+ for ignore_ in ignores:
+ dirs_to_remove = [d for d in dirs if fnmatch.fnmatch(d, ignore_)]
+
+ # dirs need to be edited in-place
+ for d in dirs_to_remove:
+ dirs.remove(d)
+
+ files = [f for f in files if not fnmatch.fnmatch(f, ignore_)]
+
+ absolute_paths = [os.path.join(root, f) for f in files]
+ relative_paths = [os.path.relpath(p, dir_path) for p in absolute_paths]
+
+ if add_base_to_relative:
+ relative_paths = [
+ os.path.join(base_name, p) for p in relative_paths
+ ]
+
+ assert len(absolute_paths) == len(relative_paths)
+ result += zip(absolute_paths, relative_paths)
+
+ return result
+
+
+def copy_files_and_create_dirs(files: List[Tuple[str, str]]) -> None:
+ """Takes in a list of tuples of (src, dst) paths and copies files.
+ Will create all necessary directories."""
+ for file in files:
+ target_dir_name = os.path.dirname(file[1])
+
+ # will create all intermediate-level directories
+ if not os.path.exists(target_dir_name):
+ os.makedirs(target_dir_name)
+
+ shutil.copyfile(file[0], file[1])
+
+
+# URL helpers
+# ------------------------------------------------------------------------------------------
+
+
+def is_url(obj: Any, allow_file_urls: bool = False) -> bool:
+ """Determine whether the given object is a valid URL string."""
+ if not isinstance(obj, str) or not "://" in obj:
+ return False
+ if allow_file_urls and obj.startswith('file://'):
+ return True
+ try:
+ res = requests.compat.urlparse(obj)
+ if not res.scheme or not res.netloc or not "." in res.netloc:
+ return False
+ res = requests.compat.urlparse(requests.compat.urljoin(obj, "/"))
+ if not res.scheme or not res.netloc or not "." in res.netloc:
+ return False
+ except:
+ return False
+ return True
+
+
+def open_url(url: str,
+ cache_dir: str = None,
+ num_attempts: int = 10,
+ verbose: bool = True,
+ return_filename: bool = False,
+ cache: bool = True) -> Any:
+ """Download the given URL and return a binary-mode file object to access the data."""
+ assert num_attempts >= 1
+ assert not (return_filename and (not cache))
+
+ # Doesn't look like an URL scheme so interpret it as a local filename.
+ if not re.match('^[a-z]+://', url):
+ return url if return_filename else open(url, "rb")
+
+ # Handle file URLs. This code handles unusual file:// patterns that
+ # arise on Windows:
+ #
+ # file:///c:/foo.txt
+ #
+ # which would translate to a local '/c:/foo.txt' filename that's
+ # invalid. Drop the forward slash for such pathnames.
+ #
+ # If you touch this code path, you should test it on both Linux and
+ # Windows.
+ #
+ # Some internet resources suggest using urllib.request.url2pathname() but
+ # but that converts forward slashes to backslashes and this causes
+ # its own set of problems.
+ if url.startswith('file://'):
+ filename = urllib.parse.urlparse(url).path
+ if re.match(r'^/[a-zA-Z]:', filename):
+ filename = filename[1:]
+ return filename if return_filename else open(filename, "rb")
+
+ assert is_url(url)
+
+ # Lookup from cache.
+ if cache_dir is None:
+ cache_dir = make_cache_dir_path('downloads')
+
+ url_md5 = hashlib.md5(url.encode("utf-8")).hexdigest()
+ if cache:
+ cache_files = glob.glob(os.path.join(cache_dir, url_md5 + "_*"))
+ if len(cache_files) == 1:
+ filename = cache_files[0]
+ return filename if return_filename else open(filename, "rb")
+
+ # Download.
+ url_name = None
+ url_data = None
+ with requests.Session() as session:
+ if verbose:
+ print("Downloading %s ..." % url, end="", flush=True)
+ for attempts_left in reversed(range(num_attempts)):
+ try:
+ with session.get(url) as res:
+ res.raise_for_status()
+ if len(res.content) == 0:
+ raise IOError("No data received")
+
+ if len(res.content) < 8192:
+ content_str = res.content.decode("utf-8")
+ if "download_warning" in res.headers.get(
+ "Set-Cookie", ""):
+ links = [
+ html.unescape(link)
+ for link in content_str.split('"')
+ if "export=download" in link
+ ]
+ if len(links) == 1:
+ url = requests.compat.urljoin(url, links[0])
+ raise IOError("Google Drive virus checker nag")
+ if "Google Drive - Quota exceeded" in content_str:
+ raise IOError(
+ "Google Drive download quota exceeded -- please try again later"
+ )
+
+ match = re.search(
+ r'filename="([^"]*)"',
+ res.headers.get("Content-Disposition", ""))
+ url_name = match[1] if match else url
+ url_data = res.content
+ if verbose:
+ print(" done")
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ if not attempts_left:
+ if verbose:
+ print(" failed")
+ raise
+ if verbose:
+ print(".", end="", flush=True)
+
+ # Save to cache.
+ if cache:
+ safe_name = re.sub(r"[^0-9a-zA-Z-._]", "_", url_name)
+ cache_file = os.path.join(cache_dir, url_md5 + "_" + safe_name)
+ temp_file = os.path.join(
+ cache_dir,
+ "tmp_" + uuid.uuid4().hex + "_" + url_md5 + "_" + safe_name)
+ os.makedirs(cache_dir, exist_ok=True)
+ with open(temp_file, "wb") as f:
+ f.write(url_data)
+ os.replace(temp_file, cache_file) # atomic
+ if return_filename:
+ return cache_file
+
+ # Return data as file object.
+ assert not return_filename
+ return io.BytesIO(url_data)
+
+class InfiniteSampler(torch.utils.data.Sampler):
+
+ def __init__(self,
+ dataset,
+ rank=0,
+ num_replicas=1,
+ shuffle=True,
+ seed=0,
+ window_size=0.5):
+ assert len(dataset) > 0
+ assert num_replicas > 0
+ assert 0 <= rank < num_replicas
+ assert 0 <= window_size <= 1
+ super().__init__(dataset)
+ self.dataset = dataset
+ self.rank = rank
+ self.num_replicas = num_replicas
+ self.shuffle = shuffle
+ self.seed = seed
+ self.window_size = window_size
+
+ def __iter__(self):
+ order = np.arange(len(self.dataset))
+ rnd = None
+ window = 0
+ if self.shuffle:
+ rnd = np.random.RandomState(self.seed)
+ rnd.shuffle(order)
+ window = int(np.rint(order.size * self.window_size))
+
+ idx = 0
+ while True:
+ i = idx % order.size
+ if idx % self.num_replicas == self.rank:
+ yield order[i]
+ if window >= 2:
+ j = (i - rnd.randint(window)) % order.size
+ order[i], order[j] = order[j], order[i]
+ idx += 1
+
+def requires_grad(model, flag=True):
+ for p in model.parameters():
+ p.requires_grad = flag
diff --git a/eval_pose.pt b/eval_pose.pt
new file mode 100644
index 0000000000000000000000000000000000000000..cd61c56d751cf1bc695a8f8e6ed55e67bd13c24d
--- /dev/null
+++ b/eval_pose.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:09b6e0eb22da1ca5d1e4a4fd1bfe5b08d65d1b1926621aa22601b67f20904f9a
+size 4721
diff --git a/evaluations/README.md b/evaluations/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..6ad0ab6c0b3982ad60950df7ffa9af5662d31b2b
--- /dev/null
+++ b/evaluations/README.md
@@ -0,0 +1,72 @@
+# Evaluations
+
+To compare different generative models, we use FID, sFID, Precision, Recall, and Inception Score. These metrics can all be calculated using batches of samples, which we store in `.npz` (numpy) files.
+
+# Download batches
+
+We provide pre-computed sample batches for the reference datasets, our diffusion models, and several baselines we compare against. These are all stored in `.npz` format.
+
+Reference dataset batches contain pre-computed statistics over the whole dataset, as well as 10,000 images for computing Precision and Recall. All other batches contain 50,000 images which can be used to compute statistics and Precision/Recall.
+
+Here are links to download all of the sample and reference batches:
+
+ * LSUN
+ * LSUN bedroom: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/bedroom/VIRTUAL_lsun_bedroom256.npz)
+ * [ADM (dropout)](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/bedroom/admnet_dropout_lsun_bedroom.npz)
+ * [DDPM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/bedroom/ddpm_lsun_bedroom.npz)
+ * [IDDPM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/bedroom/iddpm_lsun_bedroom.npz)
+ * [StyleGAN](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/bedroom/stylegan_lsun_bedroom.npz)
+ * LSUN cat: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/cat/VIRTUAL_lsun_cat256.npz)
+ * [ADM (dropout)](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/cat/admnet_dropout_lsun_cat.npz)
+ * [StyleGAN2](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/cat/stylegan2_lsun_cat.npz)
+ * LSUN horse: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/horse/VIRTUAL_lsun_horse256.npz)
+ * [ADM (dropout)](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/horse/admnet_dropout_lsun_horse.npz)
+ * [ADM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/lsun/horse/admnet_lsun_horse.npz)
+
+ * ImageNet
+ * ImageNet 64x64: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/64/VIRTUAL_imagenet64_labeled.npz)
+ * [ADM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/64/admnet_imagenet64.npz)
+ * [IDDPM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/64/iddpm_imagenet64.npz)
+ * [BigGAN](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/64/biggan_deep_imagenet64.npz)
+ * ImageNet 128x128: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/128/VIRTUAL_imagenet128_labeled.npz)
+ * [ADM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/128/admnet_imagenet128.npz)
+ * [ADM-G](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/128/admnet_guided_imagenet128.npz)
+ * [ADM-G, 25 steps](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/128/admnet_guided_25step_imagenet128.npz)
+ * [BigGAN-deep (trunc=1.0)](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/128/biggan_deep_trunc1_imagenet128.npz)
+ * ImageNet 256x256: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/VIRTUAL_imagenet256_labeled.npz)
+ * [ADM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/admnet_imagenet256.npz)
+ * [ADM-G](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/admnet_guided_imagenet256.npz)
+ * [ADM-G, 25 step](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/admnet_guided_25step_imagenet256.npz)
+ * [ADM-G + ADM-U](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/admnet_guided_upsampled_imagenet256.npz)
+ * [ADM-U](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/admnet_upsampled_imagenet256.npz)
+ * [BigGAN-deep (trunc=1.0)](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/256/biggan_deep_trunc1_imagenet256.npz)
+ * ImageNet 512x512: [reference batch](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/VIRTUAL_imagenet512.npz)
+ * [ADM](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/admnet_imagenet512.npz)
+ * [ADM-G](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/admnet_guided_imagenet512.npz)
+ * [ADM-G, 25 step](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/admnet_guided_25step_imagenet512.npz)
+ * [ADM-G + ADM-U](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/admnet_guided_upsampled_imagenet512.npz)
+ * [ADM-U](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/admnet_upsampled_imagenet512.npz)
+ * [BigGAN-deep (trunc=1.0)](https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/imagenet/512/biggan_deep_trunc1_imagenet512.npz)
+
+# Run evaluations
+
+First, generate or download a batch of samples and download the corresponding reference batch for the given dataset. For this example, we'll use ImageNet 256x256, so the refernce batch is `VIRTUAL_imagenet256_labeled.npz` and we can use the sample batch `admnet_guided_upsampled_imagenet256.npz`.
+
+Next, run the `evaluator.py` script. The requirements of this script can be found in [requirements.txt](requirements.txt). Pass two arguments to the script: the reference batch and the sample batch. The script will download the InceptionV3 model used for evaluations into the current working directory (if it is not already present). This file is roughly 100MB.
+
+The output of the script will look something like this, where the first `...` is a bunch of verbose TensorFlow logging:
+
+```
+$ python evaluator.py VIRTUAL_imagenet256_labeled.npz admnet_guided_upsampled_imagenet256.npz
+...
+computing reference batch activations...
+computing/reading reference batch statistics...
+computing sample batch activations...
+computing/reading sample batch statistics...
+Computing evaluations...
+Inception Score: 215.8370361328125
+FID: 3.9425574129223264
+sFID: 6.140433703346162
+Precision: 0.8265
+Recall: 0.5309
+```
diff --git a/evaluations/evaluator.py b/evaluations/evaluator.py
new file mode 100644
index 0000000000000000000000000000000000000000..9590855d564dc94b9b779027a7eae3e3659dd215
--- /dev/null
+++ b/evaluations/evaluator.py
@@ -0,0 +1,653 @@
+import argparse
+import io
+import os
+import random
+import warnings
+import zipfile
+from abc import ABC, abstractmethod
+from contextlib import contextmanager
+from functools import partial
+from multiprocessing import cpu_count
+from multiprocessing.pool import ThreadPool
+from typing import Iterable, Optional, Tuple
+
+import numpy as np
+import requests
+import tensorflow.compat.v1 as tf
+from scipy import linalg
+from tqdm.auto import tqdm
+
+INCEPTION_V3_URL = "https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/classify_image_graph_def.pb"
+INCEPTION_V3_PATH = "classify_image_graph_def.pb"
+
+FID_POOL_NAME = "pool_3:0"
+FID_SPATIAL_NAME = "mixed_6/conv:0"
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("ref_batch", help="path to reference batch npz file")
+ parser.add_argument("sample_batch", help="path to sample batch npz file")
+ args = parser.parse_args()
+
+ config = tf.ConfigProto(
+ allow_soft_placement=True # allows DecodeJpeg to run on CPU in Inception graph
+ )
+ config.gpu_options.allow_growth = True
+ evaluator = Evaluator(tf.Session(config=config))
+
+ print("warming up TensorFlow...")
+ # This will cause TF to print a bunch of verbose stuff now rather
+ # than after the next print(), to help prevent confusion.
+ evaluator.warmup()
+
+ print("computing reference batch activations...")
+ ref_acts = evaluator.read_activations(args.ref_batch)
+ print("computing/reading reference batch statistics...")
+ ref_stats, ref_stats_spatial = evaluator.read_statistics(args.ref_batch, ref_acts)
+
+ print("computing sample batch activations...")
+ sample_acts = evaluator.read_activations(args.sample_batch)
+ print("computing/reading sample batch statistics...")
+ sample_stats, sample_stats_spatial = evaluator.read_statistics(args.sample_batch, sample_acts)
+
+ print("Computing evaluations...")
+ print("Inception Score:", evaluator.compute_inception_score(sample_acts[0]))
+ print("FID:", sample_stats.frechet_distance(ref_stats))
+ print("sFID:", sample_stats_spatial.frechet_distance(ref_stats_spatial))
+ prec, recall = evaluator.compute_prec_recall(ref_acts[0], sample_acts[0])
+ print("Precision:", prec)
+ print("Recall:", recall)
+
+
+class InvalidFIDException(Exception):
+ pass
+
+
+class FIDStatistics:
+ def __init__(self, mu: np.ndarray, sigma: np.ndarray):
+ self.mu = mu
+ self.sigma = sigma
+
+ def frechet_distance(self, other, eps=1e-6):
+ """
+ Compute the Frechet distance between two sets of statistics.
+ """
+ # https://github.com/bioinf-jku/TTUR/blob/73ab375cdf952a12686d9aa7978567771084da42/fid.py#L132
+ mu1, sigma1 = self.mu, self.sigma
+ mu2, sigma2 = other.mu, other.sigma
+
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert (
+ mu1.shape == mu2.shape
+ ), f"Training and test mean vectors have different lengths: {mu1.shape}, {mu2.shape}"
+ assert (
+ sigma1.shape == sigma2.shape
+ ), f"Training and test covariances have different dimensions: {sigma1.shape}, {sigma2.shape}"
+
+ diff = mu1 - mu2
+
+ # product might be almost singular
+ covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = (
+ "fid calculation produces singular product; adding %s to diagonal of cov estimates"
+ % eps
+ )
+ warnings.warn(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError("Imaginary component {}".format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean
+
+
+class Evaluator:
+ def __init__(
+ self,
+ session,
+ batch_size=64,
+ softmax_batch_size=512,
+ ):
+ self.sess = session
+ self.batch_size = batch_size
+ self.softmax_batch_size = softmax_batch_size
+ self.manifold_estimator = ManifoldEstimator(session)
+ with self.sess.graph.as_default():
+ self.image_input = tf.placeholder(tf.float32, shape=[None, None, None, 3])
+ self.softmax_input = tf.placeholder(tf.float32, shape=[None, 2048])
+ self.pool_features, self.spatial_features = _create_feature_graph(self.image_input)
+ self.softmax = _create_softmax_graph(self.softmax_input)
+
+ def warmup(self):
+ self.compute_activations(np.zeros([1, 8, 64, 64, 3]))
+
+ def read_activations(self, npz_path: str) -> Tuple[np.ndarray, np.ndarray]:
+ with open_npz_array(npz_path, "arr_0") as reader:
+ return self.compute_activations(reader.read_batches(self.batch_size))
+
+ def compute_activations(self, batches: Iterable[np.ndarray]) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ Compute image features for downstream evals.
+
+ :param batches: a iterator over NHWC numpy arrays in [0, 255].
+ :return: a tuple of numpy arrays of shape [N x X], where X is a feature
+ dimension. The tuple is (pool_3, spatial).
+ """
+ preds = []
+ spatial_preds = []
+ for batch in tqdm(batches):
+ batch = batch.astype(np.float32)
+ pred, spatial_pred = self.sess.run(
+ [self.pool_features, self.spatial_features], {self.image_input: batch}
+ )
+ preds.append(pred.reshape([pred.shape[0], -1]))
+ spatial_preds.append(spatial_pred.reshape([spatial_pred.shape[0], -1]))
+ return (
+ np.concatenate(preds, axis=0),
+ np.concatenate(spatial_preds, axis=0),
+ )
+
+ def read_statistics(
+ self, npz_path: str, activations: Tuple[np.ndarray, np.ndarray]
+ ) -> Tuple[FIDStatistics, FIDStatistics]:
+ obj = np.load(npz_path)
+ if "mu" in list(obj.keys()):
+ return FIDStatistics(obj["mu"], obj["sigma"]), FIDStatistics(
+ obj["mu_s"], obj["sigma_s"]
+ )
+ return tuple(self.compute_statistics(x) for x in activations)
+
+ def compute_statistics(self, activations: np.ndarray) -> FIDStatistics:
+ mu = np.mean(activations, axis=0)
+ sigma = np.cov(activations, rowvar=False)
+ return FIDStatistics(mu, sigma)
+
+ def compute_inception_score(self, activations: np.ndarray, split_size: int = 5000) -> float:
+ softmax_out = []
+ for i in range(0, len(activations), self.softmax_batch_size):
+ acts = activations[i : i + self.softmax_batch_size]
+ softmax_out.append(self.sess.run(self.softmax, feed_dict={self.softmax_input: acts}))
+ preds = np.concatenate(softmax_out, axis=0)
+ # https://github.com/openai/improved-gan/blob/4f5d1ec5c16a7eceb206f42bfc652693601e1d5c/inception_score/model.py#L46
+ scores = []
+ for i in range(0, len(preds), split_size):
+ part = preds[i : i + split_size]
+ kl = part * (np.log(part) - np.log(np.expand_dims(np.mean(part, 0), 0)))
+ kl = np.mean(np.sum(kl, 1))
+ scores.append(np.exp(kl))
+ return float(np.mean(scores))
+
+ def compute_prec_recall(
+ self, activations_ref: np.ndarray, activations_sample: np.ndarray
+ ) -> Tuple[float, float]:
+ radii_1 = self.manifold_estimator.manifold_radii(activations_ref)
+ radii_2 = self.manifold_estimator.manifold_radii(activations_sample)
+ pr = self.manifold_estimator.evaluate_pr(
+ activations_ref, radii_1, activations_sample, radii_2
+ )
+ return (float(pr[0][0]), float(pr[1][0]))
+
+
+class ManifoldEstimator:
+ """
+ A helper for comparing manifolds of feature vectors.
+
+ Adapted from https://github.com/kynkaat/improved-precision-and-recall-metric/blob/f60f25e5ad933a79135c783fcda53de30f42c9b9/precision_recall.py#L57
+ """
+
+ def __init__(
+ self,
+ session,
+ row_batch_size=10000,
+ col_batch_size=10000,
+ nhood_sizes=(3,),
+ clamp_to_percentile=None,
+ eps=1e-5,
+ ):
+ """
+ Estimate the manifold of given feature vectors.
+
+ :param session: the TensorFlow session.
+ :param row_batch_size: row batch size to compute pairwise distances
+ (parameter to trade-off between memory usage and performance).
+ :param col_batch_size: column batch size to compute pairwise distances.
+ :param nhood_sizes: number of neighbors used to estimate the manifold.
+ :param clamp_to_percentile: prune hyperspheres that have radius larger than
+ the given percentile.
+ :param eps: small number for numerical stability.
+ """
+ self.distance_block = DistanceBlock(session)
+ self.row_batch_size = row_batch_size
+ self.col_batch_size = col_batch_size
+ self.nhood_sizes = nhood_sizes
+ self.num_nhoods = len(nhood_sizes)
+ self.clamp_to_percentile = clamp_to_percentile
+ self.eps = eps
+
+ def warmup(self):
+ feats, radii = (
+ np.zeros([1, 2048], dtype=np.float32),
+ np.zeros([1, 1], dtype=np.float32),
+ )
+ self.evaluate_pr(feats, radii, feats, radii)
+
+ def manifold_radii(self, features: np.ndarray) -> np.ndarray:
+ num_images = len(features)
+
+ # Estimate manifold of features by calculating distances to k-NN of each sample.
+ radii = np.zeros([num_images, self.num_nhoods], dtype=np.float32)
+ distance_batch = np.zeros([self.row_batch_size, num_images], dtype=np.float32)
+ seq = np.arange(max(self.nhood_sizes) + 1, dtype=np.int32)
+
+ for begin1 in range(0, num_images, self.row_batch_size):
+ end1 = min(begin1 + self.row_batch_size, num_images)
+ row_batch = features[begin1:end1]
+
+ for begin2 in range(0, num_images, self.col_batch_size):
+ end2 = min(begin2 + self.col_batch_size, num_images)
+ col_batch = features[begin2:end2]
+
+ # Compute distances between batches.
+ distance_batch[
+ 0 : end1 - begin1, begin2:end2
+ ] = self.distance_block.pairwise_distances(row_batch, col_batch)
+
+ # Find the k-nearest neighbor from the current batch.
+ radii[begin1:end1, :] = np.concatenate(
+ [
+ x[:, self.nhood_sizes]
+ for x in _numpy_partition(distance_batch[0 : end1 - begin1, :], seq, axis=1)
+ ],
+ axis=0,
+ )
+
+ if self.clamp_to_percentile is not None:
+ max_distances = np.percentile(radii, self.clamp_to_percentile, axis=0)
+ radii[radii > max_distances] = 0
+ return radii
+
+ def evaluate(self, features: np.ndarray, radii: np.ndarray, eval_features: np.ndarray):
+ """
+ Evaluate if new feature vectors are at the manifold.
+ """
+ num_eval_images = eval_features.shape[0]
+ num_ref_images = radii.shape[0]
+ distance_batch = np.zeros([self.row_batch_size, num_ref_images], dtype=np.float32)
+ batch_predictions = np.zeros([num_eval_images, self.num_nhoods], dtype=np.int32)
+ max_realism_score = np.zeros([num_eval_images], dtype=np.float32)
+ nearest_indices = np.zeros([num_eval_images], dtype=np.int32)
+
+ for begin1 in range(0, num_eval_images, self.row_batch_size):
+ end1 = min(begin1 + self.row_batch_size, num_eval_images)
+ feature_batch = eval_features[begin1:end1]
+
+ for begin2 in range(0, num_ref_images, self.col_batch_size):
+ end2 = min(begin2 + self.col_batch_size, num_ref_images)
+ ref_batch = features[begin2:end2]
+
+ distance_batch[
+ 0 : end1 - begin1, begin2:end2
+ ] = self.distance_block.pairwise_distances(feature_batch, ref_batch)
+
+ # From the minibatch of new feature vectors, determine if they are in the estimated manifold.
+ # If a feature vector is inside a hypersphere of some reference sample, then
+ # the new sample lies at the estimated manifold.
+ # The radii of the hyperspheres are determined from distances of neighborhood size k.
+ samples_in_manifold = distance_batch[0 : end1 - begin1, :, None] <= radii
+ batch_predictions[begin1:end1] = np.any(samples_in_manifold, axis=1).astype(np.int32)
+
+ max_realism_score[begin1:end1] = np.max(
+ radii[:, 0] / (distance_batch[0 : end1 - begin1, :] + self.eps), axis=1
+ )
+ nearest_indices[begin1:end1] = np.argmin(distance_batch[0 : end1 - begin1, :], axis=1)
+
+ return {
+ "fraction": float(np.mean(batch_predictions)),
+ "batch_predictions": batch_predictions,
+ "max_realisim_score": max_realism_score,
+ "nearest_indices": nearest_indices,
+ }
+
+ def evaluate_pr(
+ self,
+ features_1: np.ndarray,
+ radii_1: np.ndarray,
+ features_2: np.ndarray,
+ radii_2: np.ndarray,
+ ) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ Evaluate precision and recall efficiently.
+
+ :param features_1: [N1 x D] feature vectors for reference batch.
+ :param radii_1: [N1 x K1] radii for reference vectors.
+ :param features_2: [N2 x D] feature vectors for the other batch.
+ :param radii_2: [N x K2] radii for other vectors.
+ :return: a tuple of arrays for (precision, recall):
+ - precision: an np.ndarray of length K1
+ - recall: an np.ndarray of length K2
+ """
+ features_1_status = np.zeros([len(features_1), radii_2.shape[1]], dtype=np.bool)
+ features_2_status = np.zeros([len(features_2), radii_1.shape[1]], dtype=np.bool)
+ for begin_1 in range(0, len(features_1), self.row_batch_size):
+ end_1 = begin_1 + self.row_batch_size
+ batch_1 = features_1[begin_1:end_1]
+ for begin_2 in range(0, len(features_2), self.col_batch_size):
+ end_2 = begin_2 + self.col_batch_size
+ batch_2 = features_2[begin_2:end_2]
+ batch_1_in, batch_2_in = self.distance_block.less_thans(
+ batch_1, radii_1[begin_1:end_1], batch_2, radii_2[begin_2:end_2]
+ )
+ features_1_status[begin_1:end_1] |= batch_1_in
+ features_2_status[begin_2:end_2] |= batch_2_in
+ return (
+ np.mean(features_2_status.astype(np.float64), axis=0),
+ np.mean(features_1_status.astype(np.float64), axis=0),
+ )
+
+
+class DistanceBlock:
+ """
+ Calculate pairwise distances between vectors.
+
+ Adapted from https://github.com/kynkaat/improved-precision-and-recall-metric/blob/f60f25e5ad933a79135c783fcda53de30f42c9b9/precision_recall.py#L34
+ """
+
+ def __init__(self, session):
+ self.session = session
+
+ # Initialize TF graph to calculate pairwise distances.
+ with session.graph.as_default():
+ self._features_batch1 = tf.placeholder(tf.float32, shape=[None, None])
+ self._features_batch2 = tf.placeholder(tf.float32, shape=[None, None])
+ distance_block_16 = _batch_pairwise_distances(
+ tf.cast(self._features_batch1, tf.float16),
+ tf.cast(self._features_batch2, tf.float16),
+ )
+ self.distance_block = tf.cond(
+ tf.reduce_all(tf.math.is_finite(distance_block_16)),
+ lambda: tf.cast(distance_block_16, tf.float32),
+ lambda: _batch_pairwise_distances(self._features_batch1, self._features_batch2),
+ )
+
+ # Extra logic for less thans.
+ self._radii1 = tf.placeholder(tf.float32, shape=[None, None])
+ self._radii2 = tf.placeholder(tf.float32, shape=[None, None])
+ dist32 = tf.cast(self.distance_block, tf.float32)[..., None]
+ self._batch_1_in = tf.math.reduce_any(dist32 <= self._radii2, axis=1)
+ self._batch_2_in = tf.math.reduce_any(dist32 <= self._radii1[:, None], axis=0)
+
+ def pairwise_distances(self, U, V):
+ """
+ Evaluate pairwise distances between two batches of feature vectors.
+ """
+ return self.session.run(
+ self.distance_block,
+ feed_dict={self._features_batch1: U, self._features_batch2: V},
+ )
+
+ def less_thans(self, batch_1, radii_1, batch_2, radii_2):
+ return self.session.run(
+ [self._batch_1_in, self._batch_2_in],
+ feed_dict={
+ self._features_batch1: batch_1,
+ self._features_batch2: batch_2,
+ self._radii1: radii_1,
+ self._radii2: radii_2,
+ },
+ )
+
+
+def _batch_pairwise_distances(U, V):
+ """
+ Compute pairwise distances between two batches of feature vectors.
+ """
+ with tf.variable_scope("pairwise_dist_block"):
+ # Squared norms of each row in U and V.
+ norm_u = tf.reduce_sum(tf.square(U), 1)
+ norm_v = tf.reduce_sum(tf.square(V), 1)
+
+ # norm_u as a column and norm_v as a row vectors.
+ norm_u = tf.reshape(norm_u, [-1, 1])
+ norm_v = tf.reshape(norm_v, [1, -1])
+
+ # Pairwise squared Euclidean distances.
+ D = tf.maximum(norm_u - 2 * tf.matmul(U, V, False, True) + norm_v, 0.0)
+
+ return D
+
+
+class NpzArrayReader(ABC):
+ @abstractmethod
+ def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
+ pass
+
+ @abstractmethod
+ def remaining(self) -> int:
+ pass
+
+ def read_batches(self, batch_size: int) -> Iterable[np.ndarray]:
+ def gen_fn():
+ while True:
+ batch = self.read_batch(batch_size)
+ if batch is None:
+ break
+ yield batch
+
+ rem = self.remaining()
+ num_batches = rem // batch_size + int(rem % batch_size != 0)
+ return BatchIterator(gen_fn, num_batches)
+
+
+class BatchIterator:
+ def __init__(self, gen_fn, length):
+ self.gen_fn = gen_fn
+ self.length = length
+
+ def __len__(self):
+ return self.length
+
+ def __iter__(self):
+ return self.gen_fn()
+
+
+class StreamingNpzArrayReader(NpzArrayReader):
+ def __init__(self, arr_f, shape, dtype):
+ self.arr_f = arr_f
+ self.shape = shape
+ self.dtype = dtype
+ self.idx = 0
+
+ def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
+ if self.idx >= self.shape[0]:
+ return None
+
+ bs = min(batch_size, self.shape[0] - self.idx)
+ self.idx += bs
+
+ if self.dtype.itemsize == 0:
+ return np.ndarray([bs, *self.shape[1:]], dtype=self.dtype)
+
+ read_count = bs * np.prod(self.shape[1:])
+ read_size = int(read_count * self.dtype.itemsize)
+ data = _read_bytes(self.arr_f, read_size, "array data")
+ return np.frombuffer(data, dtype=self.dtype).reshape([bs, *self.shape[1:]])
+
+ def remaining(self) -> int:
+ return max(0, self.shape[0] - self.idx)
+
+
+class MemoryNpzArrayReader(NpzArrayReader):
+ def __init__(self, arr):
+ self.arr = arr
+ self.idx = 0
+
+ @classmethod
+ def load(cls, path: str, arr_name: str):
+ with open(path, "rb") as f:
+ arr = np.load(f)[arr_name]
+ return cls(arr)
+
+ def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
+ if self.idx >= self.arr.shape[0]:
+ return None
+
+ res = self.arr[self.idx : self.idx + batch_size]
+ self.idx += batch_size
+ return res
+
+ def remaining(self) -> int:
+ return max(0, self.arr.shape[0] - self.idx)
+
+
+@contextmanager
+def open_npz_array(path: str, arr_name: str) -> NpzArrayReader:
+ with _open_npy_file(path, arr_name) as arr_f:
+ version = np.lib.format.read_magic(arr_f)
+ if version == (1, 0):
+ header = np.lib.format.read_array_header_1_0(arr_f)
+ elif version == (2, 0):
+ header = np.lib.format.read_array_header_2_0(arr_f)
+ else:
+ yield MemoryNpzArrayReader.load(path, arr_name)
+ return
+ shape, fortran, dtype = header
+ if fortran or dtype.hasobject:
+ yield MemoryNpzArrayReader.load(path, arr_name)
+ else:
+ yield StreamingNpzArrayReader(arr_f, shape, dtype)
+
+
+def _read_bytes(fp, size, error_template="ran out of data"):
+ """
+ Copied from: https://github.com/numpy/numpy/blob/fb215c76967739268de71aa4bda55dd1b062bc2e/numpy/lib/format.py#L788-L886
+
+ Read from file-like object until size bytes are read.
+ Raises ValueError if not EOF is encountered before size bytes are read.
+ Non-blocking objects only supported if they derive from io objects.
+ Required as e.g. ZipExtFile in python 2.6 can return less data than
+ requested.
+ """
+ data = bytes()
+ while True:
+ # io files (default in python3) return None or raise on
+ # would-block, python2 file will truncate, probably nothing can be
+ # done about that. note that regular files can't be non-blocking
+ try:
+ r = fp.read(size - len(data))
+ data += r
+ if len(r) == 0 or len(data) == size:
+ break
+ except io.BlockingIOError:
+ pass
+ if len(data) != size:
+ msg = "EOF: reading %s, expected %d bytes got %d"
+ raise ValueError(msg % (error_template, size, len(data)))
+ else:
+ return data
+
+
+@contextmanager
+def _open_npy_file(path: str, arr_name: str):
+ with open(path, "rb") as f:
+ with zipfile.ZipFile(f, "r") as zip_f:
+ if f"{arr_name}.npy" not in zip_f.namelist():
+ raise ValueError(f"missing {arr_name} in npz file")
+ with zip_f.open(f"{arr_name}.npy", "r") as arr_f:
+ yield arr_f
+
+
+def _download_inception_model():
+ if os.path.exists(INCEPTION_V3_PATH):
+ return
+ print("downloading InceptionV3 model...")
+ with requests.get(INCEPTION_V3_URL, stream=True) as r:
+ r.raise_for_status()
+ tmp_path = INCEPTION_V3_PATH + ".tmp"
+ with open(tmp_path, "wb") as f:
+ for chunk in tqdm(r.iter_content(chunk_size=8192)):
+ f.write(chunk)
+ os.rename(tmp_path, INCEPTION_V3_PATH)
+
+
+def _create_feature_graph(input_batch):
+ _download_inception_model()
+ prefix = f"{random.randrange(2**32)}_{random.randrange(2**32)}"
+ with open(INCEPTION_V3_PATH, "rb") as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+ pool3, spatial = tf.import_graph_def(
+ graph_def,
+ input_map={f"ExpandDims:0": input_batch},
+ return_elements=[FID_POOL_NAME, FID_SPATIAL_NAME],
+ name=prefix,
+ )
+ _update_shapes(pool3)
+ spatial = spatial[..., :7]
+ return pool3, spatial
+
+
+def _create_softmax_graph(input_batch):
+ _download_inception_model()
+ prefix = f"{random.randrange(2**32)}_{random.randrange(2**32)}"
+ with open(INCEPTION_V3_PATH, "rb") as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+ (matmul,) = tf.import_graph_def(
+ graph_def, return_elements=[f"softmax/logits/MatMul"], name=prefix
+ )
+ w = matmul.inputs[1]
+ logits = tf.matmul(input_batch, w)
+ return tf.nn.softmax(logits)
+
+
+def _update_shapes(pool3):
+ # https://github.com/bioinf-jku/TTUR/blob/73ab375cdf952a12686d9aa7978567771084da42/fid.py#L50-L63
+ ops = pool3.graph.get_operations()
+ for op in ops:
+ for o in op.outputs:
+ shape = o.get_shape()
+ if shape._dims is not None: # pylint: disable=protected-access
+ # shape = [s.value for s in shape] TF 1.x
+ shape = [s for s in shape] # TF 2.x
+ new_shape = []
+ for j, s in enumerate(shape):
+ if s == 1 and j == 0:
+ new_shape.append(None)
+ else:
+ new_shape.append(s)
+ o.__dict__["_shape_val"] = tf.TensorShape(new_shape)
+ return pool3
+
+
+def _numpy_partition(arr, kth, **kwargs):
+ num_workers = min(cpu_count(), len(arr))
+ chunk_size = len(arr) // num_workers
+ extra = len(arr) % num_workers
+
+ start_idx = 0
+ batches = []
+ for i in range(num_workers):
+ size = chunk_size + (1 if i < extra else 0)
+ batches.append(arr[start_idx : start_idx + size])
+ start_idx += size
+
+ with ThreadPool(num_workers) as pool:
+ return list(pool.map(partial(np.partition, kth=kth, **kwargs), batches))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/__init__.py b/evaluations/fidkid-pytorch/3d-metrics/evals/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/feature_extractor.cpython-39.pyc b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/feature_extractor.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..53ba7f74caa240b0958c88d3b0972d2a814113cc
Binary files /dev/null and b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/feature_extractor.cpython-39.pyc differ
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/npz_stream.cpython-39.pyc b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/npz_stream.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6e648ea5b507fb2f2f1c9387073a4833c076349b
Binary files /dev/null and b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/npz_stream.cpython-39.pyc differ
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/pointnet2_cls_ssg.cpython-39.pyc b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/pointnet2_cls_ssg.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b5937f9c49fa6b4d2ca12f68efa31e82b236a9c1
Binary files /dev/null and b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/pointnet2_cls_ssg.cpython-39.pyc differ
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/pointnet2_utils.cpython-39.pyc b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/pointnet2_utils.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..05e1f361a8470839bba54c29011af573bb44893d
Binary files /dev/null and b/evaluations/fidkid-pytorch/3d-metrics/evals/__pycache__/pointnet2_utils.cpython-39.pyc differ
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/compute_cd.py b/evaluations/fidkid-pytorch/3d-metrics/evals/compute_cd.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c17aea7762184cf0cd7b20fdf4c194ac0868c4a
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/compute_cd.py
@@ -0,0 +1,396 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+import json
+import argparse
+import numpy as np
+import torch
+import os
+import random
+import glob
+from tqdm import tqdm
+import kaolin as kal
+import point_cloud_utils as pcu
+import ipdb
+import pandas as pd
+import numpy as np
+from functools import partial
+from pdb import set_trace as st
+from pathlib import Path
+
+from functools import partial
+
+# unused, already matched
+# varyData = [
+# ["X", 270],
+# ["Z", 180],
+# ]
+
+def rotate_point_cloud(point_cloud, rotations):
+ """
+ Rotates a point cloud along specified axes by the given angles.
+ :param point_cloud: Nx3 numpy array of points
+ :param rotations: list of tuples [(axis, angle_in_degrees), ...]
+ Example: [('x', 90), ('y', 45)] for composite rotations
+ :return: Rotated point cloud as Nx3 numpy array
+ """
+ rotated_cloud = point_cloud.copy()
+ for axis, angle in rotations:
+ angle_rad = np.radians(angle) # Convert degrees to radians
+ R = rotation_matrix(axis, angle_rad)
+ rotated_cloud = np.dot(rotated_cloud, R.T) # Apply rotation matrix
+
+ return rotated_cloud
+
+
+# transformation to align all results in the same canonical space
+transformation_dict = {
+ 'gso': partial(rotate_point_cloud, rotations=[('x', 0)]), # no transformation
+ 'LGM_fixpose': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'CRM/Animals': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'Lara': partial(rotate_point_cloud, rotations=[('x', -110), ('z', 33)]),
+ 'ln3diff-lite/Animals': partial(rotate_point_cloud, rotations=[('x', 90)]),
+ 'One-2-3-45/Animals': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'splatter-img': partial(rotate_point_cloud, rotations=[('x', -60)]),
+ #
+ 'OpenLRM/Animals': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'shape-e/Animals': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ #
+ 'objv-gt': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'GA': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ # un-aligned
+ 'scale3d/eval/eval_nerf/Animals': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'scale3d/eval/eval_mesh/Animals': partial(rotate_point_cloud, rotations=[('x', 180), ('z', 180)]),
+}
+
+def VaryPoint(data, axis, degree):
+ # to rotate axis
+ xyzArray = {
+ 'X': np.array([[1, 0, 0],
+ [0, cos(radians(degree)), -sin(radians(degree))],
+ [0, sin(radians(degree)), cos(radians(degree))]]),
+ 'Y': np.array([[cos(radians(degree)), 0, sin(radians(degree))],
+ [0, 1, 0],
+ [-sin(radians(degree)), 0, cos(radians(degree))]]),
+ 'Z': np.array([[cos(radians(degree)), -sin(radians(degree)), 0],
+ [sin(radians(degree)), cos(radians(degree)), 0],
+ [0, 0, 1]])}
+ newData = np.dot(data, xyzArray[axis])
+ return newData
+
+from math import *
+def seed_everything(seed):
+ if seed < 0:
+ return
+ torch.manual_seed(seed)
+ np.random.seed(seed)
+ random.seed(seed)
+
+def read_pcd(name, n_sample=2048):
+ v = pcu.load_mesh_v(name)
+ point_clouds = np.random.permutation(v)[:n_sample, :]
+ return torch.from_numpy(point_clouds).unsqueeze(0)
+
+def get_score(results, use_same_numer_for_test=False):
+ if use_same_numer_for_test:
+ results = results[:, :results.shape[0]]
+ mmd = results.min(axis=1).mean()
+ min_ref = results.argmin(axis=0)
+ unique_idx = np.unique(min_ref)
+ cov = float(len(unique_idx)) / results.shape[0]
+
+ # if mmd < 1:
+ # # Chamfer distance
+ mmd = mmd * 1000 # for showing results
+
+ return mmd, cov * 100
+
+
+def rotation_matrix(axis, angle):
+ """
+ Returns a rotation matrix for a given axis and angle in radians.
+ :param axis: str, the axis to rotate around ('x', 'y', or 'z')
+ :param angle: float, the rotation angle in radians
+ :return: 3x3 rotation matrix
+ """
+ if axis == 'x':
+ return np.array([[1, 0, 0],
+ [0, np.cos(angle), -np.sin(angle)],
+ [0, np.sin(angle), np.cos(angle)]])
+ elif axis == 'y':
+ return np.array([[np.cos(angle), 0, np.sin(angle)],
+ [0, 1, 0],
+ [-np.sin(angle), 0, np.cos(angle)]])
+ elif axis == 'z':
+ return np.array([[np.cos(angle), -np.sin(angle), 0],
+ [np.sin(angle), np.cos(angle), 0],
+ [0, 0, 1]])
+ else:
+ raise ValueError("Axis must be 'x', 'y', or 'z'.")
+
+
+
+def scale_to_unit_sphere(points, center=None):
+ midpoints = (torch.max(points, axis=1)[0] + torch.min(points, axis=1)[0]) / 2
+ # midpoints = np.mean(points, axis=0)
+ points = points - midpoints
+ scale = torch.max(torch.sqrt(torch.sum(points ** 2, axis=2)))
+ points = points / scale
+ return points
+
+def sample_point_with_mesh_name(method_name, name, n_sample=2048, normalized_scale=1.0, rotate_degree=-90):
+ #ipdb.set_trace()
+ # if '.ply' in name:
+ # v = pcu.load_mesh_v(name)
+ # point_clouds = np.random.permutation(v)[:n_sample, :]
+ # scale = point_clouds.max()-point_clouds.min()
+
+ # point_clouds = point_clouds / scale #* normalized_scale # Make them in the same scale pcu.save_mesh_v('a.obj',point_clouds)
+ # #ipdb.set_trace()
+
+
+ # return torch.from_numpy(point_clouds).float().cuda().unsqueeze(dim=0)
+ try:
+ mesh_1 = kal.io.obj.import_mesh(name)
+
+ if mesh_1.vertices.shape[0] == 0:
+ return None
+ vertices = mesh_1.vertices.cuda()
+ #ipdb.set_trace()
+ #scale = (vertices.max(dim=0)[0] - vertices.min(dim=0)[0]).max()
+ mesh_v1 = vertices #/ scale #* normalized_scale pcu.save_mesh_v('a.ply',points[0].cpu().numpy())
+ mesh_f1 = mesh_1.faces.cuda()
+ points, _ = kal.ops.mesh.sample_points(mesh_v1.unsqueeze(dim=0), mesh_f1, n_sample)
+ #ipdb.set_trace()
+ points=scale_to_unit_sphere(points)
+ #ipdb.set_trace()
+ return points.cuda()
+ except:
+ v = pcu.load_mesh_v(name)
+ point_clouds = np.random.permutation(v)[:n_sample, :]
+ #scale = point_clouds.max()-point_clouds.min()
+
+ #point_clouds = point_clouds / scale #* normalized_scale # Make them in the same scale pcu.save_mesh_v('a.obj',point_clouds)
+ #ipdb.set_trace()
+ point_clouds=torch.from_numpy(point_clouds).float().cuda().unsqueeze(dim=0)
+ point_clouds=scale_to_unit_sphere(point_clouds)
+
+ # point_clouds=point_clouds*-1
+
+ # rand rotate
+ # rand_rot = [('x', random.randint(0,359)), ('y', random.randint(0,359)), ('z', random.randint(0,359))]
+ # rand_transform = partial(rotate_point_cloud, rotations=rand_rot) # no transformation
+ # point_clouds = rand_transform(point_clouds[0].cpu().numpy()) # since no canonical space
+ # point_clouds = torch.from_numpy(point_clouds).float().cuda().unsqueeze(dim=0)
+
+ # ipdb.set_trace()
+ pcd_transform = transformation_dict[method_name] # to the same canonical space
+ point_clouds = pcd_transform(point_clouds[0].cpu().numpy()) # since no canonical space
+ point_clouds = torch.from_numpy(point_clouds).float().cuda().unsqueeze(dim=0)
+
+
+ def VaryPoint(data, axis, degree):
+ xyzArray = {
+ 'X': np.array([[1, 0, 0],
+ [0, cos(radians(degree)), -sin(radians(degree))],
+ [0, sin(radians(degree)), cos(radians(degree))]]),
+ 'Y': np.array([[cos(radians(degree)), 0, sin(radians(degree))],
+ [0, 1, 0],
+ [-sin(radians(degree)), 0, cos(radians(degree))]]),
+ 'Z': np.array([[cos(radians(degree)), -sin(radians(degree)), 0],
+ [sin(radians(degree)), cos(radians(degree)), 0],
+ [0, 0, 1]])}
+ newData = np.dot(data, xyzArray[axis])
+ return newData
+ # if rorate_minus_90:
+
+ # varyData = [
+ # # ["X", rotate_degree], # stl file -90
+ # ]
+
+ # else:
+ varyData = [
+ ["X", 0], # stl file -90
+ ]
+
+ for para in varyData:
+ point_clouds_new = VaryPoint(point_clouds[0,:, :3].cpu().numpy(), para[0], para[1])
+ # ipdb.set_trace()
+
+ return torch.Tensor(point_clouds_new).cuda().unsqueeze(0)
+ #print('error')
+
+
+def chamfer_distance(method_name,ref_name,ref_pcs, sample_pcs, batch_size,save_name):
+ all_rec_pcs = []
+ n_sample = 2048
+ normalized_scale = 1.0
+ # ipdb.set_trace()
+
+ if os.path.exists(os.path.join(save_name,'gt.pth')):
+ # if False:
+ all_rec_pcs=torch.load(os.path.join(save_name,'gt.pth')).to('cuda')
+ else:
+ # if True:
+ for name in tqdm(ref_pcs):
+ # all_rec_pcs.append(sample_point_with_mesh_name(name, n_sample, normalized_scale=normalized_scale, rotate_degree=0))
+ all_rec_pcs.append(sample_point_with_mesh_name(ref_name, name, n_sample, normalized_scale=normalized_scale, rotate_degree=0))
+ # all_rec_pcs.append(read_pcd(name, n_sample))
+
+ all_rec_pcs = [p for p in all_rec_pcs if p is not None]
+ all_rec_pcs = torch.cat(all_rec_pcs, dim=0).to('cuda')
+ #ipdb.set_trace()
+ os.makedirs(os.path.join(save_name), exist_ok=True)
+ torch.save(all_rec_pcs,os.path.join(save_name,'gt.pth'))
+
+ # methodname=sample_pcs[0].split('/')[-2]
+ #ipdb.set_trace()
+ # if os.path.exists(os.path.join(save_name,'sample.pth')):
+ if False:
+ all_sample_pcs=torch.load(os.path.join(save_name,'sample.pth')).to('cuda')
+ else:
+ # if True:
+ all_sample_pcs = []
+ for name in tqdm(sample_pcs):
+ # This is generated
+ #ipdb.set_trace()
+ # all_sample_pcs.append(sample_point_with_mesh_name(name, n_sample, normalized_scale=normalized_scale, rotate_degree=90)) # all_sample_pcs.append(read_pcd(name, n_sample))
+ all_sample_pcs.append(sample_point_with_mesh_name(method_name, name, n_sample, normalized_scale=normalized_scale, rotate_degree=0)) # all_sample_pcs.append(read_pcd(name, n_sample))
+ # ipdb.set_trace()
+ pass
+
+ all_sample_pcs = [p for p in all_sample_pcs if p is not None]
+ all_sample_pcs = torch.cat(all_sample_pcs, dim=0).to('cuda')
+
+ os.makedirs(os.path.join(save_name), exist_ok=True)
+ torch.save(all_sample_pcs,os.path.join(save_name,'sample.pth'))
+
+ # ipdb.set_trace()
+
+ #all_sample_pcs+=(all_rec_pcs.mean(0).mean(0)-all_sample_pcs.mean(0).mean(0))
+ # all_rec_pcs-=all_rec_pcs.mean(1).unsqueeze(1)
+ # all_sample_pcs-=all_sample_pcs.mean(1).unsqueeze(1)
+
+ #ipdb.set_trace()
+
+ # for para in varyData:
+ # for i in range(len(all_sample_pcs)):
+ # #ipdb.set_trace()
+ # all_sample_pcs[i] = torch.Tensor(VaryPoint(all_sample_pcs[i,:, :3].cpu().numpy(), para[0], para[1])).cuda()
+ # all_sample_pcs+=(all_rec_pcs.mean(0).mean(0)-all_sample_pcs.mean(0).mean(0))
+ # all_sample_pcs.mean(0).mean(0)
+ # all_sample_pcs[:,1,:]-=0.1
+ #all_sample_pcs=all_sample_pcs[:3684]
+ #all_rec_pcs=all_rec_pcs[:1000] all_sample_pcs[...,2]*=-1 pcu.save_mesh_v('a.ply',all_rec_pcs[8].cpu().numpy()) pcu.save_mesh_v('b.ply',all_sample_pcs[1391].reshape(-1,3).cpu().numpy())
+ print('datapreparation')
+ #ipdb.set_trace()
+ all_cd = []
+ for i_ref_p in tqdm(range(len(all_rec_pcs))):
+ ref_p = all_rec_pcs[i_ref_p]
+ cd_lst = []
+ for sample_b_start in range(0, len(sample_pcs), batch_size):
+ sample_b_end = min(len(sample_pcs), sample_b_start + batch_size)
+ sample_batch = all_sample_pcs[sample_b_start:sample_b_end]
+
+ batch_size_sample = sample_batch.size(0)
+ chamfer = kal.metrics.pointcloud.chamfer_distance(
+ ref_p.unsqueeze(dim=0).expand(batch_size_sample, -1, -1),
+ sample_batch)
+ cd_lst.append(chamfer)
+ cd_lst = torch.cat(cd_lst, dim=0)
+ all_cd.append(cd_lst.unsqueeze(dim=0))
+ all_cd = torch.cat(all_cd, dim=0)
+ return all_cd
+
+
+def compute_all_metrics(method_name,ref_name,sample_pcs, ref_pcs, batch_size, save_name=None):
+ results = chamfer_distance(method_name,ref_name,ref_pcs, sample_pcs, batch_size,save_name).data.cpu().numpy()
+ #ipdb.set_trace()
+ #results = results[:, :results.shape[0] * 5] # Generation is 5 time of the testing set
+ cd_mmd, cd_cov = get_score(results, use_same_numer_for_test=False)
+ #ipdb.set_trace()
+ print('cov,mmd:',(cd_cov, cd_mmd, save_name))
+
+
+
+def evaluate(args):
+ # Set the random seed
+ # seed_everything(0) # for GA default
+ seed_everything(42)
+
+ ref_path=[]
+
+ # shapenet_cls = args.dataset_path.split('/')[-1]
+ # if shapenet_cls == 'chair':
+ # train_lst=np.loadtxt(f'/mnt/cache/yslan/get3d/{shapenet_cls}_train_list_srn.txt','str')
+ # else:
+ # train_lst=np.loadtxt(f'/mnt/cache/yslan/get3d/{shapenet_cls}_train_list.txt','str')
+ # for s in os.listdir(args.dataset_path):
+ # ipdb.set_trace()
+
+
+ # ipdb.set_trace()
+ # if 'GA' in args.dataset_path or 'objv-gt' in args.dataset_path:
+
+ gen_path_base='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps'
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ all_objs = dataset_json['Animals'][::3][1100:2200][:600] # pick top 600 instances.
+
+ ref_path = [os.path.join(args.dataset_path, f"{obj.replace('/', '-')}_pcd_4096.ply") for obj in all_objs]
+
+ # ipdb.set_trace()
+
+ # else:
+ # ref_path = sorted(glob.glob(f'{args.dataset_path}/*.ply') )
+
+ # for s in files:
+ # if os.path.exists(os.path.join(args.dataset_path, s, 'pcd_4096.ply')):
+ # ref_path = ref_path+[os.path.join(args.dataset_path, s, 'pcd_4096.ply')]
+
+ # for s in os.listdir(args.dataset_path):
+ # #ipdb.set_trace()
+ # # if s=='toy_boat':
+ # if os.path.isdir(os.path.join(args.dataset_path, s)):
+ # for instance in os.listdir(os.path.join(args.dataset_path, s)):
+ # if os.path.exists(os.path.join(args.dataset_path, s,instance,'Scan','Scan.obj')):
+
+ # ref_path = ref_path+[os.path.join(args.dataset_path, s, instance,'Scan','Scan.obj') ]
+
+ gen_path = args.gen_path
+ # method_name = '/'.join(gen_path.split('/')[-2:])
+ method_name = str(Path(gen_path).relative_to(gen_path_base))
+ ref_name = str(Path(args.dataset_path).relative_to(gen_path_base))
+
+ #ref_path=ref_path[::100]
+ gen_models = glob.glob(os.path.join(gen_path, '*.ply'))
+ gen_models = sorted(gen_models)
+
+
+ # if '_cond' in gen_path:
+ # args.save_name+='_cond'
+ gen_models = gen_models[:args.n_shape]
+ with torch.no_grad():
+ #ipdb.set_trace()
+ compute_all_metrics(method_name,ref_name,gen_models, ref_path, args.batch_size, args.save_name)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--save_name", type=str, default='/mnt/petrelfs/caoziang/3D_generation/cmetric/get3d/omni_final_surface', help="path to the save results")
+ parser.add_argument("--dataset_path", type=str,default='/mnt/petrelfs/share_data/wutong/DATA/OO3D/ply_files/4096', help="path to the original shapenet dataset")
+ parser.add_argument("--gen_path", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel11/ddpm_5/test',help="path to the generated models")
+ parser.add_argument("--n_points", type=int, default=2048, help="Number of points used for evaluation")
+ parser.add_argument("--batch_size", type=int, default=100, help="batch size to compute chamfer distance")
+ parser.add_argument("--n_shape", type=int, default=7500, help="number of shapes for evaluations")
+ parser.add_argument("--use_npz", type=bool, default=False, help="whether the generated shape is npz or not")
+ args = parser.parse_args()
+ evaluate(args)
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/eval_cd.sh b/evaluations/fidkid-pytorch/3d-metrics/evals/eval_cd.sh
new file mode 100644
index 0000000000000000000000000000000000000000..b70a58266dd00b732280d2f4e220b874427041d4
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/eval_cd.sh
@@ -0,0 +1,26 @@
+
+# for method_name in ln3diff-fixpose_192 CRM LGM_fixpose
+# for method_name in GA splatter-img LGM ln3diff-lite/Animals CRM/Animals Lara OpenLRM/Animals One-2-3-45/Animals shape-e/Animals
+
+# for method_name in scale3d/eval/eval_nerf/Animals scale3d/eval/eval_mesh/Animals
+for method_name in LGM_fixpose
+
+# for method_name in shape-e/Animals GA splatter-img LGM ln3diff-lite/Animals CRM/Animals Lara OpenLRM/Animals One-2-3-45/Animals CRM/Animals
+
+do
+
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+
+gen_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps
+objv_path="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/objv-gt"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-cd/
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-cd-noaug-3/
+
+python compute_cd.py \
+ --n_shape 600 \
+ --n_points 2048 \
+ --dataset_path ${objv_path} \
+ --gen_path ${gen_path}/${method_name} \
+ --save_name ${output_path}/${method_name}
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/eval_fpd_is.sh b/evaluations/fidkid-pytorch/3d-metrics/evals/eval_fpd_is.sh
new file mode 100644
index 0000000000000000000000000000000000000000..cef8cc31a781e5f362edd766bcc6dc412f22affa
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/eval_fpd_is.sh
@@ -0,0 +1,46 @@
+
+set -x
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+gso_pcd='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics-fps/gso'
+
+# gso_rendering="gso-rendering"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics-fps
+
+# ! for calculating direct fps sampling from surfel gs
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir
+
+# ! for pointnet models
+cache_dir=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics/cache
+
+# method_name=LGM
+# for method_name in LGM CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+
+# ! eval gso
+# for method_name in LGM CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img \
+# GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh \
+# GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD \
+
+# do
+
+# python fpd_eval.py \
+# $gso_pcd \
+# ${output_path}/${method_name} \
+# --cache_dir ${cache_dir} \
+
+# done
+
+# ! eval g-objv
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps
+
+for method_name in CRM/Animals Lara ln3diff GA/stage-2/dino_img/ditl-fromditlPCD \
+
+do
+
+python fpd_eval.py \
+ $gso_pcd \
+ ${output_path}/${method_name} \
+ --cache_dir ${cache_dir} \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/eval_fpd_is_objv.sh b/evaluations/fidkid-pytorch/3d-metrics/evals/eval_fpd_is_objv.sh
new file mode 100644
index 0000000000000000000000000000000000000000..e7af22d8061a5172e7f08edd305d74f248773c14
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/eval_fpd_is_objv.sh
@@ -0,0 +1,51 @@
+
+set -x
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+gso_pcd='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics-fps/gso'
+
+# gso_rendering="gso-rendering"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics-fps
+
+# ! for calculating direct fps sampling from surfel gs
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir
+
+# ! for pointnet models
+cache_dir=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics/cache
+
+# method_name=LGM
+# for method_name in LGM CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+
+# ! eval gso
+# for method_name in LGM CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img \
+# GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh \
+# GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD \
+
+# do
+
+# python fpd_eval.py \
+# $gso_pcd \
+# ${output_path}/${method_name} \
+# --cache_dir ${cache_dir} \
+
+# done
+
+# ! eval g-objv
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/
+objv_path="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/objv-gt"
+
+# for method_name in CRM/Animals Lara
+# for method_name in GA splatter-img shape-e/Animals LGM_fixpose ln3diff-lite/Animals Lara OpenLRM/Animals One-2-3-45/Animals
+# for method_name in LGM_fixpose
+
+for method_name in CRM/Animals scale3d/eval/eval_nerf/Animals scale3d/eval/eval_mesh/Animals
+
+do
+
+python fpd_eval_objv.py \
+ $objv_path \
+ ${output_path}/${method_name} \
+ --cache_dir ${cache_dir} \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/feature_extractor.py b/evaluations/fidkid-pytorch/3d-metrics/evals/feature_extractor.py
new file mode 100644
index 0000000000000000000000000000000000000000..38bababff4db86e9c220faddfac38ef2f6676e1c
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/feature_extractor.py
@@ -0,0 +1,130 @@
+from abc import ABC, abstractmethod
+from multiprocessing.pool import ThreadPool
+from typing import List, Optional, Tuple, Union
+from tqdm import tqdm
+
+from pdb import set_trace as st
+import numpy as np
+import torch
+
+from point_e.models.download import load_checkpoint
+
+from npz_stream import NpzStreamer
+from pointnet2_cls_ssg import get_model
+
+
+def get_torch_devices() -> List[Union[str, torch.device]]:
+ if torch.cuda.is_available():
+ return [torch.device(f"cuda:{i}") for i in range(torch.cuda.device_count())]
+ else:
+ return ["cpu"]
+
+
+class FeatureExtractor(ABC):
+ @property
+ @abstractmethod
+ def supports_predictions(self) -> bool:
+ pass
+
+ @property
+ @abstractmethod
+ def feature_dim(self) -> int:
+ pass
+
+ @property
+ @abstractmethod
+ def num_classes(self) -> int:
+ pass
+
+ @abstractmethod
+ def features_and_preds(self, streamer: NpzStreamer) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ For a stream of point cloud batches, compute feature vectors and class
+ predictions.
+
+ :param point_clouds: a streamer for a sample batch. Typically, arr_0
+ will contain the XYZ coordinates.
+ :return: a tuple (features, predictions)
+ - features: a [B x feature_dim] array of feature vectors.
+ - predictions: a [B x num_classes] array of probabilities.
+ """
+
+
+class PointNetClassifier(FeatureExtractor):
+ def __init__(
+ self,
+ devices: List[Union[str, torch.device]],
+ device_batch_size: int = 64,
+ cache_dir: Optional[str] = None,
+ ):
+ state_dict = load_checkpoint("pointnet", device=torch.device("cpu"), cache_dir=cache_dir)[
+ "model_state_dict"
+ ]
+
+ self.device_batch_size = device_batch_size
+ self.devices = devices
+ # self.models = []
+ # for device in devices:
+ model = get_model(num_class=40, normal_channel=False, width_mult=2)
+ model.load_state_dict(state_dict)
+ model.to('cuda')
+ model.eval()
+ # self.models.append(model)
+ self.model = model
+
+ @property
+ def supports_predictions(self) -> bool:
+ return True
+
+ @property
+ def feature_dim(self) -> int:
+ return 256
+
+ @property
+ def num_classes(self) -> int:
+ return 40
+
+ # def features_and_preds(self, streamer: NpzStreamer) -> Tuple[np.ndarray, np.ndarray]:
+ def features_and_preds(self, streamer) -> Tuple[np.ndarray, np.ndarray]:
+ # batch_size = self.device_batch_size * len(self.devices)
+ # batch_size = self.device_batch_size * len(self.devices)
+ point_clouds = streamer # switch to pytorch stream here
+ # point_clouds = (x["arr_0"] for x in streamer.stream(batch_size, ["arr_0"]))
+ device = 'cuda'
+
+ output_features = []
+ output_predictions = []
+
+ # st()
+
+ # with ThreadPool(len(self.devices)) as pool:
+
+ for _, batch in enumerate(tqdm(point_clouds)): # type: ignore
+ # batch = normalize_point_clouds(batch)
+ # batches = []
+ # for i, device in zip(range(0, len(batch), self.device_batch_size), self.devices):
+ # batches.append(
+ # batch = torch.from_numpy(batch).permute(0, 2, 1).to(dtype=torch.float32, device=device)
+ batch = batch.to(dtype=torch.float32, device=device).permute(0, 2, 1) # B 3 L
+
+ def compute_features(batch):
+ # batch = i_batch
+ with torch.no_grad():
+ return self.model(batch, features=True)
+
+ # for logits, _, features in pool.imap(compute_features, enumerate(batches)):
+ # for logits, _, features in pool.imap(compute_features, enumerate(batches)):
+
+ logits, _, features = compute_features(batch)
+ output_features.append(features.cpu().numpy())
+ output_predictions.append(logits.exp().cpu().numpy())
+
+ return np.concatenate(output_features, axis=0), np.concatenate(output_predictions, axis=0)
+
+
+def normalize_point_clouds(pc: np.ndarray) -> np.ndarray:
+ centroids = np.mean(pc, axis=1, keepdims=True)
+ pc = pc - centroids
+ m = np.max(np.sqrt(np.sum(pc**2, axis=-1, keepdims=True)), axis=1, keepdims=True)
+ pc = pc / m
+ return pc
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/fid_is.py b/evaluations/fidkid-pytorch/3d-metrics/evals/fid_is.py
new file mode 100644
index 0000000000000000000000000000000000000000..176d06309d0d57f979a669d416893bd423e3f526
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/fid_is.py
@@ -0,0 +1,81 @@
+"""
+Adapted from https://github.com/openai/guided-diffusion/blob/22e0df8183507e13a7813f8d38d51b072ca1e67c/evaluations/evaluator.py
+"""
+
+
+import warnings
+
+import numpy as np
+from scipy import linalg
+
+
+class InvalidFIDException(Exception):
+ pass
+
+
+class FIDStatistics:
+ def __init__(self, mu: np.ndarray, sigma: np.ndarray):
+ self.mu = mu
+ self.sigma = sigma
+
+ def frechet_distance(self, other, eps=1e-6):
+ """
+ Compute the Frechet distance between two sets of statistics.
+ """
+ # https://github.com/bioinf-jku/TTUR/blob/73ab375cdf952a12686d9aa7978567771084da42/fid.py#L132
+ mu1, sigma1 = self.mu, self.sigma
+ mu2, sigma2 = other.mu, other.sigma
+
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert (
+ mu1.shape == mu2.shape
+ ), f"Training and test mean vectors have different lengths: {mu1.shape}, {mu2.shape}"
+ assert (
+ sigma1.shape == sigma2.shape
+ ), f"Training and test covariances have different dimensions: {sigma1.shape}, {sigma2.shape}"
+
+ diff = mu1 - mu2
+
+ # product might be almost singular
+ covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = (
+ "fid calculation produces singular product; adding %s to diagonal of cov estimates"
+ % eps
+ )
+ warnings.warn(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError("Imaginary component {}".format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean
+
+
+def compute_statistics(feats: np.ndarray) -> FIDStatistics:
+ mu = np.mean(feats, axis=0)
+ sigma = np.cov(feats, rowvar=False)
+ return FIDStatistics(mu, sigma)
+
+
+def compute_inception_score(preds: np.ndarray, split_size: int = 5000) -> float:
+ # https://github.com/openai/improved-gan/blob/4f5d1ec5c16a7eceb206f42bfc652693601e1d5c/inception_score/model.py#L46
+ scores = []
+ for i in range(0, len(preds), split_size):
+ part = preds[i : i + split_size]
+ kl = part * (np.log(part) - np.log(np.expand_dims(np.mean(part, 0), 0)))
+ kl = np.mean(np.sum(kl, 1))
+ scores.append(np.exp(kl))
+ return float(np.mean(scores))
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/fpd_eval.py b/evaluations/fidkid-pytorch/3d-metrics/evals/fpd_eval.py
new file mode 100644
index 0000000000000000000000000000000000000000..836ebf989f1513b70b4ca4b86775667fe367ed06
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/fpd_eval.py
@@ -0,0 +1,330 @@
+# https://raw.githubusercontent.com/3dlg-hcvc/omages/refs/heads/main/src/evals/fpd_eval.py
+import os
+import random
+from tqdm import tqdm
+import glob
+from pdb import set_trace as st
+import trimesh
+import sys
+import numpy as np
+import scipy # should be version 1.11.1
+import torch
+
+import argparse
+# from point_e.evals.feature_extractor import PointNetClassifier, get_torch_devices
+from feature_extractor import PointNetClassifier, get_torch_devices
+from point_e.evals.fid_is import compute_statistics
+from point_e.evals.fid_is import compute_inception_score
+from point_e.evals.npz_stream import NpzStreamer
+
+import numpy as np
+
+def rotation_matrix(axis, angle):
+ """
+ Returns a rotation matrix for a given axis and angle in radians.
+ :param axis: str, the axis to rotate around ('x', 'y', or 'z')
+ :param angle: float, the rotation angle in radians
+ :return: 3x3 rotation matrix
+ """
+ if axis == 'x':
+ return np.array([[1, 0, 0],
+ [0, np.cos(angle), -np.sin(angle)],
+ [0, np.sin(angle), np.cos(angle)]])
+ elif axis == 'y':
+ return np.array([[np.cos(angle), 0, np.sin(angle)],
+ [0, 1, 0],
+ [-np.sin(angle), 0, np.cos(angle)]])
+ elif axis == 'z':
+ return np.array([[np.cos(angle), -np.sin(angle), 0],
+ [np.sin(angle), np.cos(angle), 0],
+ [0, 0, 1]])
+ else:
+ raise ValueError("Axis must be 'x', 'y', or 'z'.")
+
+
+def rotate_point_cloud(point_cloud, rotations):
+ """
+ Rotates a point cloud along specified axes by the given angles.
+ :param point_cloud: Nx3 numpy array of points
+ :param rotations: list of tuples [(axis, angle_in_degrees), ...]
+ Example: [('x', 90), ('y', 45)] for composite rotations
+ :return: Rotated point cloud as Nx3 numpy array
+ """
+ rotated_cloud = point_cloud.copy()
+ for axis, angle in rotations:
+ angle_rad = np.radians(angle) # Convert degrees to radians
+ R = rotation_matrix(axis, angle_rad)
+ rotated_cloud = np.dot(rotated_cloud, R.T) # Apply rotation matrix
+
+ return rotated_cloud
+
+from functools import partial
+# transformation dictionary
+transformation_dict = {
+ 'gso': partial(rotate_point_cloud, rotations=[('x', 0)]), # no transformation
+ 'LGM': partial(rotate_point_cloud, rotations=[('x', 90)]),
+ 'CRM': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'Lara': partial(rotate_point_cloud, rotations=[('x', -110), ('z', 33)]),
+ 'ln3diff': partial(rotate_point_cloud, rotations=[('x', 90)]),
+ 'One-2-3-45': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'splatter-img': partial(rotate_point_cloud, rotations=[('x', -60)]),
+ #
+ 'OpenLRM': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'shape-e': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ # un-aligned
+ 'ditl-fromditlPCD-fixPose-tomesh': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'ditl-fromditlPCD-fixPose-tomesh-ditxlPCD': partial(rotate_point_cloud, rotations=[('x', 0)]),
+}
+
+
+class PFID_evaluator():
+ def __init__(self, devices=['cuda:0'], batch_size=256, cache_dir='~/.temp/PFID_evaluator'):
+ self.__dict__.update(locals())
+ cache_dir = os.path.expanduser(cache_dir)
+ if not os.path.exists(cache_dir):
+ os.makedirs(cache_dir)
+ self.devices = [torch.device(d) for d in devices]
+ self.clf = PointNetClassifier(devices=self.devices, cache_dir=cache_dir, device_batch_size=self.batch_size)
+
+ def compute_pfid(self, pc_1, pc_2, return_feature=False):
+
+ # print("computing first batch activations")
+ # save clouds to npz files
+ npz_path1 = os.path.join(self.cache_dir, "temp1.npz")
+ npz_path2 = os.path.join(self.cache_dir, "temp2.npz")
+ np.savez(npz_path1, arr_0=pc_1)
+ np.savez(npz_path2, arr_0=pc_2)
+
+ features_1, _ = self.clf.features_and_preds(NpzStreamer(npz_path1))
+ stats_1 = compute_statistics(features_1)
+ # print(features_1.max(), features_1.min(), features_1.mean(), features_1.std() )
+ # print(stats_1.mu.shape, stats_1.sigma.shape)
+
+ features_2, _ = self.clf.features_and_preds(NpzStreamer(npz_path2))
+ stats_2 = compute_statistics(features_2)
+ # print(features_2.max(), features_2.min(), features_2.mean(), features_2.std() )
+ # print(stats_2.mu.shape, stats_2.sigma.shape)
+
+ if return_feature:
+ return features_1, features_2
+
+ #PFID = stats_1.frechet_distance(stats_2) # same result as the next line
+ PFID= frechet_distance(stats_1.mu, stats_1.sigma, stats_2.mu, stats_2.sigma)
+ PKID = kernel_distance(features_1, features_2)
+
+ print(f"P-FID: {PFID}", f"P-KID: {PKID}")
+ return dict(PFID=PFID, PKID=PKID)
+
+
+# from https://github.com/GaParmar/clean-fid/blob/main/cleanfid/fid.py
+"""
+Numpy implementation of the Frechet Distance.
+The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
+and X_2 ~ N(mu_2, C_2) is
+ d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
+Stable version by Danica J. Sutherland.
+Params:
+ mu1 : Numpy array containing the activations of a layer of the
+ inception net (like returned by the function 'get_predictions')
+ for generated samples.
+ mu2 : The sample mean over activations, precalculated on an
+ representative data set.
+ sigma1: The covariance matrix over activations for generated samples.
+ sigma2: The covariance matrix over activations, precalculated on an
+ representative data set.
+"""
+def frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert mu1.shape == mu2.shape, \
+ 'Training and test mean vectors have different lengths'
+ assert sigma1.shape == sigma2.shape, \
+ 'Training and test covariances have different dimensions'
+
+ diff = mu1 - mu2
+
+ # Product might be almost singular
+ covmean, _ = scipy.linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = ('fid calculation produces singular product; '
+ 'adding %s to diagonal of cov estimates') % eps
+ print(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = scipy.linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # Numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError('Imaginary component {}'.format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return (diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean)
+
+
+"""
+Compute the KID score given the sets of features
+"""
+def kernel_distance(feats1, feats2, num_subsets=100, max_subset_size=1000):
+ n = feats1.shape[1]
+ m = min(min(feats1.shape[0], feats2.shape[0]), max_subset_size)
+ t = 0
+ for _subset_idx in range(num_subsets):
+ x = feats2[np.random.choice(feats2.shape[0], m, replace=False)]
+ y = feats1[np.random.choice(feats1.shape[0], m, replace=False)]
+ a = (x @ x.T / n + 1) ** 3 + (y @ y.T / n + 1) ** 3
+ b = (x @ y.T / n + 1) ** 3
+ t += (a.sum() - np.diag(a).sum()) / (m - 1) - b.sum() * 2 / m
+ kid = t / num_subsets / m
+ return float(kid)
+
+
+# load and calculate fid, kid, is
+
+def normalize_point_clouds(pc: np.ndarray) -> np.ndarray:
+ # centroids = np.mean(pc, axis=1, keepdims=True)
+ centroids = np.mean(pc, axis=1, keepdims=True)
+ pc = pc - centroids
+ m = np.max(np.sqrt(np.sum(pc**2, axis=-1, keepdims=True)), axis=1, keepdims=True)
+ pc = pc / m
+ return pc
+
+
+class PCDPathDataset(torch.utils.data.Dataset):
+ def __init__(self, pcd_file_path, transformation, rand_aug=False):
+ files = sorted(glob.glob(f'{pcd_file_path}/*.ply') )
+ # assert len(files)==1030 # gso
+ self.files = files
+ self.transformation = transformation
+ # self.transforms = transforms
+ # self.reso=reso
+ self.rand_aug = rand_aug
+ # if rand_aug:
+ # else:
+ # self.rand_transform = None
+
+ def __len__(self):
+ return len(self.files)
+
+ def __getitem__(self, i):
+ path = self.files[i]
+
+ pcd = trimesh.load(path).vertices # pcu may fail sometimes
+ pcd = normalize_point_clouds(pcd[None])[0]
+ pcd = self.transformation(pcd)
+ if self.rand_aug is not None:
+ rand_rot = [('x', random.randint(0,359)), ('y', random.randint(0,359)), ('z', random.randint(0,359))]
+ rand_transform = partial(rotate_point_cloud, rotations=rand_rot) # no transformation
+ pcd = rand_transform(pcd) # since no canonical space
+
+ # try:
+ # assert pcd.shape[1]==4096
+ # except Exception as e:
+ # print(path)
+
+ return pcd
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--cache_dir", type=str, default=None)
+ parser.add_argument("batch_1", type=str)
+ parser.add_argument("batch_2", type=str)
+ args = parser.parse_args()
+
+ print("creating classifier...")
+ clf = PointNetClassifier(devices=get_torch_devices(), cache_dir=args.cache_dir)
+
+ worker=2
+ # force_recompute = False
+ force_recompute = True
+
+ feat_1_path = os.path.join(args.batch_1, 'feat.npy')
+ pred_1_path = os.path.join(args.batch_1, 'pred.npy')
+ # if not force_recompute and all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ # if all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ if not force_recompute and all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ print("loading activations", args.batch_1)
+ features_1 = np.load(feat_1_path)
+ preds_1 = np.load(pred_1_path)
+
+ else:
+ print("computing activations", args.batch_1)
+ # gt_dataset = PCDPathDataset(args.batch_1, transformation_dict['gso'])
+ gt_dataset = PCDPathDataset(args.batch_1, transformation_dict['gso'], rand_aug=True)
+
+ # gt
+ gt_loader = torch.utils.data.DataLoader(gt_dataset,
+ batch_size=64,
+ shuffle=False,
+ drop_last=False,
+ num_workers=worker)
+ features_1, preds_1 = clf.features_and_preds(gt_loader)
+ np.save(feat_1_path, features_1)
+ np.save(pred_1_path, preds_1)
+
+ feat_2_path = os.path.join(args.batch_2, 'feat.npy')
+ pred_2_path = os.path.join(args.batch_2, 'pred.npy')
+
+ if not force_recompute and all(os.path.exists(path) for path in [feat_2_path, pred_2_path]):
+ features_2 = np.load(feat_2_path)
+ preds_2 = np.load(pred_2_path)
+ print("loading activations", args.batch_2)
+ else:
+
+ print("computing activations", args.batch_2)
+ method_name = args.batch_2.split('/')[-1]
+ # st()
+ pcd_transformation = transformation_dict[method_name]
+
+ pred_dataset = PCDPathDataset(args.batch_2, transformation=pcd_transformation, rand_aug=True)
+
+ # worker=0
+ pred_loader = torch.utils.data.DataLoader(pred_dataset,
+ batch_size=64,
+ shuffle=False,
+ drop_last=False,
+ num_workers=worker)
+ features_2, preds_2 = clf.features_and_preds(pred_loader)
+ np.save(feat_2_path, features_2)
+ np.save(feat_2_path, preds_2)
+
+ print("computing statistics")
+
+ stats_1 = compute_statistics(features_1)
+ # print(features_1.max(), features_1.min(), features_1.mean(), features_1.std() )
+ # print(stats_1.mu.shape, stats_1.sigma.shape)
+
+ stats_2 = compute_statistics(features_2)
+ # print(features_2.max(), features_2.min(), features_2.mean(), features_2.std() )
+ # print(stats_2.mu.shape, stats_2.sigma.shape)
+
+ # if return_feature:
+ # return features_1, features_2
+
+ #PFID = stats_1.frechet_distance(stats_2) # same result as the next line
+ PFID= frechet_distance(stats_1.mu, stats_1.sigma, stats_2.mu, stats_2.sigma)
+ PKID = kernel_distance(features_1, features_2)
+
+ # _, preds = clf.features_and_preds(pred_loader)
+
+ # print(f"P-IS: {compute_inception_score(preds)}")
+ # print(f"P-IS: {compute_inception_score(preds)}")
+ method_name = args.batch_2.split('/')[-1]
+
+ # print(method_name, f"P-FID: {PFID}", f"P-KID: {PKID}", f"P-IS: {compute_inception_score(preds_2)}")
+ print(method_name, f"P-FID: {PFID}", f"P-KID: {PKID}")
+ # return dict(PFID=PFID, PKID=PKID)
+
+
+if __name__ == "__main__":
+ main()
+
+
+
+
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/fpd_eval_objv.py b/evaluations/fidkid-pytorch/3d-metrics/evals/fpd_eval_objv.py
new file mode 100644
index 0000000000000000000000000000000000000000..e3ec42d17fdac7a12a5b6846bdaf0fb67c2f18b4
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/fpd_eval_objv.py
@@ -0,0 +1,365 @@
+# https://raw.githubusercontent.com/3dlg-hcvc/omages/refs/heads/main/src/evals/fpd_eval.py
+import json
+import os
+import random
+from tqdm import tqdm
+import glob
+from pdb import set_trace as st
+import trimesh
+import sys
+import numpy as np
+import scipy # should be version 1.11.1
+import torch
+from pathlib import Path
+
+import argparse
+# from point_e.evals.feature_extractor import PointNetClassifier, get_torch_devices
+from feature_extractor import PointNetClassifier, get_torch_devices
+from point_e.evals.fid_is import compute_statistics
+from point_e.evals.fid_is import compute_inception_score
+from point_e.evals.npz_stream import NpzStreamer
+
+import numpy as np
+
+def rotation_matrix(axis, angle):
+ """
+ Returns a rotation matrix for a given axis and angle in radians.
+ :param axis: str, the axis to rotate around ('x', 'y', or 'z')
+ :param angle: float, the rotation angle in radians
+ :return: 3x3 rotation matrix
+ """
+ if axis == 'x':
+ return np.array([[1, 0, 0],
+ [0, np.cos(angle), -np.sin(angle)],
+ [0, np.sin(angle), np.cos(angle)]])
+ elif axis == 'y':
+ return np.array([[np.cos(angle), 0, np.sin(angle)],
+ [0, 1, 0],
+ [-np.sin(angle), 0, np.cos(angle)]])
+ elif axis == 'z':
+ return np.array([[np.cos(angle), -np.sin(angle), 0],
+ [np.sin(angle), np.cos(angle), 0],
+ [0, 0, 1]])
+ else:
+ raise ValueError("Axis must be 'x', 'y', or 'z'.")
+
+
+def rotate_point_cloud(point_cloud, rotations):
+ """
+ Rotates a point cloud along specified axes by the given angles.
+ :param point_cloud: Nx3 numpy array of points
+ :param rotations: list of tuples [(axis, angle_in_degrees), ...]
+ Example: [('x', 90), ('y', 45)] for composite rotations
+ :return: Rotated point cloud as Nx3 numpy array
+ """
+ rotated_cloud = point_cloud.copy()
+ for axis, angle in rotations:
+ angle_rad = np.radians(angle) # Convert degrees to radians
+ R = rotation_matrix(axis, angle_rad)
+ rotated_cloud = np.dot(rotated_cloud, R.T) # Apply rotation matrix
+
+ return rotated_cloud
+
+from functools import partial
+# transformation dictionary
+transformation_dict = {
+ 'gso': partial(rotate_point_cloud, rotations=[('x', 0)]), # no transformation
+ 'LGM_fixpose': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'CRM/Animals': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'Lara': partial(rotate_point_cloud, rotations=[('x', -110), ('z', 33)]),
+ 'ln3diff-lite/Animals': partial(rotate_point_cloud, rotations=[('x', 90)]),
+ 'One-2-3-45/Animals': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),
+ 'splatter-img': partial(rotate_point_cloud, rotations=[('x', -60)]),
+ #
+ 'OpenLRM/Animals': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'shape-e/Animals': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'objv-gt': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ # un-aligned
+ # 'ditl-fromditlPCD-fixPose-tomesh': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'GA': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'scale3d/eval/eval_nerf/Animals': partial(rotate_point_cloud, rotations=[('x', 0)]),
+ 'scale3d/eval/eval_mesh/Animals': partial(rotate_point_cloud, rotations=[('x', 180), ('z', 180)]),
+
+ # 'ditl-fromditlPCD-fixPose-tomesh-ditxlPCD': partial(rotate_point_cloud, rotations=[('x', 0)]),
+}
+
+
+class PFID_evaluator():
+ def __init__(self, devices=['cuda:0'], batch_size=256, cache_dir='~/.temp/PFID_evaluator'):
+ self.__dict__.update(locals())
+ cache_dir = os.path.expanduser(cache_dir)
+ if not os.path.exists(cache_dir):
+ os.makedirs(cache_dir)
+ self.devices = [torch.device(d) for d in devices]
+ self.clf = PointNetClassifier(devices=self.devices, cache_dir=cache_dir, device_batch_size=self.batch_size)
+
+ def compute_pfid(self, pc_1, pc_2, return_feature=False):
+
+ # print("computing first batch activations")
+ # save clouds to npz files
+ npz_path1 = os.path.join(self.cache_dir, "temp1.npz")
+ npz_path2 = os.path.join(self.cache_dir, "temp2.npz")
+ np.savez(npz_path1, arr_0=pc_1)
+ np.savez(npz_path2, arr_0=pc_2)
+
+ features_1, _ = self.clf.features_and_preds(NpzStreamer(npz_path1))
+ stats_1 = compute_statistics(features_1)
+ # print(features_1.max(), features_1.min(), features_1.mean(), features_1.std() )
+ # print(stats_1.mu.shape, stats_1.sigma.shape)
+
+ features_2, _ = self.clf.features_and_preds(NpzStreamer(npz_path2))
+ stats_2 = compute_statistics(features_2)
+ # print(features_2.max(), features_2.min(), features_2.mean(), features_2.std() )
+ # print(stats_2.mu.shape, stats_2.sigma.shape)
+
+ if return_feature:
+ return features_1, features_2
+
+ #PFID = stats_1.frechet_distance(stats_2) # same result as the next line
+ PFID= frechet_distance(stats_1.mu, stats_1.sigma, stats_2.mu, stats_2.sigma)
+ PKID = kernel_distance(features_1, features_2)
+
+ print(f"P-FID: {PFID}", f"P-KID: {PKID}")
+ return dict(PFID=PFID, PKID=PKID)
+
+
+# from https://github.com/GaParmar/clean-fid/blob/main/cleanfid/fid.py
+"""
+Numpy implementation of the Frechet Distance.
+The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
+and X_2 ~ N(mu_2, C_2) is
+ d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
+Stable version by Danica J. Sutherland.
+Params:
+ mu1 : Numpy array containing the activations of a layer of the
+ inception net (like returned by the function 'get_predictions')
+ for generated samples.
+ mu2 : The sample mean over activations, precalculated on an
+ representative data set.
+ sigma1: The covariance matrix over activations for generated samples.
+ sigma2: The covariance matrix over activations, precalculated on an
+ representative data set.
+"""
+def frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert mu1.shape == mu2.shape, \
+ 'Training and test mean vectors have different lengths'
+ assert sigma1.shape == sigma2.shape, \
+ 'Training and test covariances have different dimensions'
+
+ diff = mu1 - mu2
+
+ # Product might be almost singular
+ covmean, _ = scipy.linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = ('fid calculation produces singular product; '
+ 'adding %s to diagonal of cov estimates') % eps
+ print(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = scipy.linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # Numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError('Imaginary component {}'.format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return (diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean)
+
+
+"""
+Compute the KID score given the sets of features
+"""
+def kernel_distance(feats1, feats2, num_subsets=100, max_subset_size=1000):
+ n = feats1.shape[1]
+ m = min(min(feats1.shape[0], feats2.shape[0]), max_subset_size)
+ t = 0
+ for _subset_idx in range(num_subsets):
+ x = feats2[np.random.choice(feats2.shape[0], m, replace=False)]
+ y = feats1[np.random.choice(feats1.shape[0], m, replace=False)]
+ a = (x @ x.T / n + 1) ** 3 + (y @ y.T / n + 1) ** 3
+ b = (x @ y.T / n + 1) ** 3
+ t += (a.sum() - np.diag(a).sum()) / (m - 1) - b.sum() * 2 / m
+ kid = t / num_subsets / m
+ return float(kid)
+
+
+# load and calculate fid, kid, is
+
+def normalize_point_clouds(pc: np.ndarray) -> np.ndarray:
+ # centroids = np.mean(pc, axis=1, keepdims=True)
+ centroids = np.mean(pc, axis=1, keepdims=True)
+ pc = pc - centroids
+ m = np.max(np.sqrt(np.sum(pc**2, axis=-1, keepdims=True)), axis=1, keepdims=True)
+ pc = pc / m
+ return pc
+
+
+class PCDPathDataset(torch.utils.data.Dataset):
+ def __init__(self, pcd_file_path, transformation, rand_aug=False):
+ # files = sorted(glob.glob(f'{pcd_file_path}/*.ply') )
+
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ if 'GA' in pcd_file_path or 'objv-gt' in pcd_file_path:
+ files = [os.path.join(pcd_file_path, f"{obj.replace('/', '-')}_pcd_4096.ply") for obj in all_objs]
+ filter_files = []
+ for file in files:
+ if os.path.exists(file):
+ filter_files.append(file)
+ self.files = filter_files
+ else:
+ self.files = sorted(glob.glob(f'{pcd_file_path}/*.ply') )
+
+ self.transformation = transformation
+ # self.transforms = transforms
+ # self.reso=reso
+ self.rand_aug = rand_aug
+ # if rand_aug:
+ # else:
+ # self.rand_transform = None
+
+ def __len__(self):
+ return len(self.files)
+
+ def __getitem__(self, i):
+ path = self.files[i]
+
+ pcd = trimesh.load(path).vertices # pcu may fail sometimes
+ pcd = normalize_point_clouds(pcd[None])[0]
+ if self.transformation is not None:
+ pcd = self.transformation(pcd)
+
+ # if self.rand_aug:
+ # rand_rot = [('x', random.randint(0,359)), ('y', random.randint(0,359)), ('z', random.randint(0,359))]
+ # rand_transform = partial(rotate_point_cloud, rotations=rand_rot) # no transformation
+ # pcd = rand_transform(pcd) # since no canonical space
+
+ # try:
+ # assert pcd.shape[1]==4096
+ # except Exception as e:
+ # print(path)
+
+ return pcd
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--cache_dir", type=str, default=None)
+ parser.add_argument("batch_1", type=str)
+ parser.add_argument("batch_2", type=str)
+ args = parser.parse_args()
+
+ print("creating classifier...")
+ clf = PointNetClassifier(devices=get_torch_devices(), cache_dir=args.cache_dir)
+
+ worker=2
+ # force_recompute = False
+ force_recompute = True
+
+ feat_1_path = os.path.join(args.batch_1, 'feat.npy')
+ pred_1_path = os.path.join(args.batch_1, 'pred.npy')
+ # if not force_recompute and all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ # if all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ if not force_recompute and all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ # if all(os.path.exists(path) for path in [feat_1_path, pred_1_path]):
+ print("loading activations", args.batch_1)
+ features_1 = np.load(feat_1_path)
+ preds_1 = np.load(pred_1_path)
+
+ else:
+ print("computing activations", args.batch_1)
+ # gt_dataset = PCDPathDataset(args.batch_1, transformation_dict['gso'])
+ gt_dataset = PCDPathDataset(args.batch_1, transformation_dict['gso'], rand_aug=False)
+ # gt_dataset = PCDPathDataset(args.batch_1, None, rand_aug=True)
+
+ # gt
+ gt_loader = torch.utils.data.DataLoader(gt_dataset,
+ batch_size=64,
+ shuffle=False,
+ drop_last=False,
+ num_workers=worker)
+ features_1, preds_1 = clf.features_and_preds(gt_loader)
+ np.save(feat_1_path, features_1)
+ np.save(pred_1_path, preds_1)
+
+ feat_2_path = os.path.join(args.batch_2, 'feat.npy')
+ pred_2_path = os.path.join(args.batch_2, 'pred.npy')
+
+ if not force_recompute and all(os.path.exists(path) for path in [feat_2_path, pred_2_path]):
+ features_2 = np.load(feat_2_path)
+ preds_2 = np.load(pred_2_path)
+ print("loading activations", args.batch_2)
+ else:
+
+ gen_path_base='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/'
+ method_name = str(Path(args.batch_2).relative_to(gen_path_base))
+
+ # print("computing activations", args.batch_2)
+ print("computing activations", method_name)
+ # method_name = args.batch_2.split('/')[-1]
+ # st()
+
+ pcd_transformation = transformation_dict[method_name]
+ pred_dataset = PCDPathDataset(args.batch_2, transformation=pcd_transformation, rand_aug=False)
+ # pred_dataset = PCDPathDataset(args.batch_2, transformation=None, rand_aug=True)
+
+ # worker=0
+ pred_loader = torch.utils.data.DataLoader(pred_dataset,
+ batch_size=64,
+ shuffle=False,
+ drop_last=False,
+ num_workers=worker)
+ features_2, preds_2 = clf.features_and_preds(pred_loader)
+ np.save(feat_2_path, features_2)
+ np.save(feat_2_path, preds_2)
+
+ print("computing statistics")
+
+ stats_1 = compute_statistics(features_1)
+ # print(features_1.max(), features_1.min(), features_1.mean(), features_1.std() )
+ # print(stats_1.mu.shape, stats_1.sigma.shape)
+
+ stats_2 = compute_statistics(features_2)
+ # print(features_2.max(), features_2.min(), features_2.mean(), features_2.std() )
+ # print(stats_2.mu.shape, stats_2.sigma.shape)
+
+ # if return_feature:
+ # return features_1, features_2
+
+ #PFID = stats_1.frechet_distance(stats_2) # same result as the next line
+ PFID= frechet_distance(stats_1.mu, stats_1.sigma, stats_2.mu, stats_2.sigma)
+ PKID = kernel_distance(features_1, features_2)
+
+ # _, preds = clf.features_and_preds(pred_loader)
+
+ # print(f"P-IS: {compute_inception_score(preds)}")
+ # print(f"P-IS: {compute_inception_score(preds)}")
+ method_name = args.batch_2.split('/')[-1]
+
+ # print(method_name, f"P-FID: {PFID}", f"P-KID: {PKID}", f"P-IS: {compute_inception_score(preds_2)}")
+ print(method_name, f"P-FID: {PFID}", f"P-KID: {PKID}")
+ # return dict(PFID=PFID, PKID=PKID)
+
+
+if __name__ == "__main__":
+ main()
+
+
+
+
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/npz_stream.py b/evaluations/fidkid-pytorch/3d-metrics/evals/npz_stream.py
new file mode 100644
index 0000000000000000000000000000000000000000..12516392734b579542b2d6f9920d97eff444a562
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/npz_stream.py
@@ -0,0 +1,270 @@
+import glob
+import io
+import os
+import re
+import zipfile
+from abc import ABC, abstractmethod
+from contextlib import contextmanager
+from dataclasses import dataclass
+from typing import Dict, Iterator, List, Optional, Sequence, Tuple
+
+import numpy as np
+
+
+@dataclass
+class NumpyArrayInfo:
+ """
+ Information about an array in an npz file.
+ """
+
+ name: str
+ dtype: np.dtype
+ shape: Tuple[int]
+
+ @classmethod
+ def infos_from_first_file(cls, glob_path: str) -> Dict[str, "NumpyArrayInfo"]:
+ paths, _ = _npz_paths_and_length(glob_path)
+ return cls.infos_from_file(paths[0])
+
+ @classmethod
+ def infos_from_file(cls, npz_path: str) -> Dict[str, "NumpyArrayInfo"]:
+ """
+ Extract the info of every array in an npz file.
+ """
+ if not os.path.exists(npz_path):
+ raise FileNotFoundError(f"batch of samples was not found: {npz_path}")
+ results = {}
+ with open(npz_path, "rb") as f:
+ with zipfile.ZipFile(f, "r") as zip_f:
+ for name in zip_f.namelist():
+ if not name.endswith(".npy"):
+ continue
+ key_name = name[: -len(".npy")]
+ with zip_f.open(name, "r") as arr_f:
+ version = np.lib.format.read_magic(arr_f)
+ if version == (1, 0):
+ header = np.lib.format.read_array_header_1_0(arr_f)
+ elif version == (2, 0):
+ header = np.lib.format.read_array_header_2_0(arr_f)
+ else:
+ raise ValueError(f"unknown numpy array version: {version}")
+ shape, _, dtype = header
+ results[key_name] = cls(name=key_name, dtype=dtype, shape=shape)
+ return results
+
+ @property
+ def elem_shape(self) -> Tuple[int]:
+ return self.shape[1:]
+
+ def validate(self):
+ if self.name in {"R", "G", "B"}:
+ if len(self.shape) != 2:
+ raise ValueError(
+ f"expecting exactly 2-D shape for '{self.name}' but got: {self.shape}"
+ )
+ elif self.name == "arr_0":
+ if len(self.shape) < 2:
+ raise ValueError(f"expecting at least 2-D shape but got: {self.shape}")
+ elif len(self.shape) == 3:
+ # For audio, we require continuous samples.
+ if not np.issubdtype(self.dtype, np.floating):
+ raise ValueError(
+ f"invalid dtype for audio batch: {self.dtype} (expected float)"
+ )
+ elif self.dtype != np.uint8:
+ raise ValueError(f"invalid dtype for image batch: {self.dtype} (expected uint8)")
+
+
+class NpzStreamer:
+ def __init__(self, glob_path: str):
+ self.paths, self.trunc_length = _npz_paths_and_length(glob_path)
+ self.infos = NumpyArrayInfo.infos_from_file(self.paths[0])
+
+ def keys(self) -> List[str]:
+ return list(self.infos.keys())
+
+ def stream(self, batch_size: int, keys: Sequence[str]) -> Iterator[Dict[str, np.ndarray]]:
+ cur_batch = None
+ num_remaining = self.trunc_length
+ for path in self.paths:
+ if num_remaining is not None and num_remaining <= 0:
+ break
+ with open_npz_arrays(path, keys) as readers:
+ combined_reader = CombinedReader(keys, readers)
+ while num_remaining is None or num_remaining > 0:
+ read_bs = batch_size
+ if cur_batch is not None:
+ read_bs -= _dict_batch_size(cur_batch)
+ if num_remaining is not None:
+ read_bs = min(read_bs, num_remaining)
+
+ batch = combined_reader.read_batch(read_bs)
+ if batch is None:
+ break
+ if num_remaining is not None:
+ num_remaining -= _dict_batch_size(batch)
+ if cur_batch is None:
+ cur_batch = batch
+ else:
+ cur_batch = {
+ # pylint: disable=unsubscriptable-object
+ k: np.concatenate([cur_batch[k], v], axis=0)
+ for k, v in batch.items()
+ }
+ if _dict_batch_size(cur_batch) == batch_size:
+ yield cur_batch
+ cur_batch = None
+ if cur_batch is not None:
+ yield cur_batch
+
+
+def _npz_paths_and_length(glob_path: str) -> Tuple[List[str], Optional[int]]:
+ # Match slice syntax like path[:100].
+ count_match = re.match("^(.*)\\[:([0-9]*)\\]$", glob_path)
+ if count_match:
+ raw_path = count_match[1]
+ max_count = int(count_match[2])
+ else:
+ raw_path = glob_path
+ max_count = None
+ paths = sorted(glob.glob(raw_path))
+ if not len(paths):
+ raise ValueError(f"no paths found matching: {glob_path}")
+ return paths, max_count
+
+
+class NpzArrayReader(ABC):
+ @abstractmethod
+ def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
+ pass
+
+
+class StreamingNpzArrayReader(NpzArrayReader):
+ def __init__(self, arr_f, shape, dtype):
+ self.arr_f = arr_f
+ self.shape = shape
+ self.dtype = dtype
+ self.idx = 0
+
+ def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
+ if self.idx >= self.shape[0]:
+ return None
+
+ bs = min(batch_size, self.shape[0] - self.idx)
+ self.idx += bs
+
+ if self.dtype.itemsize == 0:
+ return np.ndarray([bs, *self.shape[1:]], dtype=self.dtype)
+
+ read_count = bs * np.prod(self.shape[1:])
+ read_size = int(read_count * self.dtype.itemsize)
+ data = _read_bytes(self.arr_f, read_size, "array data")
+ return np.frombuffer(data, dtype=self.dtype).reshape([bs, *self.shape[1:]])
+
+
+class MemoryNpzArrayReader(NpzArrayReader):
+ def __init__(self, arr):
+ self.arr = arr
+ self.idx = 0
+
+ @classmethod
+ def load(cls, path: str, arr_name: str):
+ with open(path, "rb") as f:
+ arr = np.load(f)[arr_name]
+ return cls(arr)
+
+ def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
+ if self.idx >= self.arr.shape[0]:
+ return None
+
+ res = self.arr[self.idx : self.idx + batch_size]
+ self.idx += batch_size
+ return res
+
+
+@contextmanager
+def open_npz_arrays(path: str, arr_names: Sequence[str]) -> List[NpzArrayReader]:
+ if not len(arr_names):
+ yield []
+ return
+ arr_name = arr_names[0]
+ with open_array(path, arr_name) as arr_f:
+ version = np.lib.format.read_magic(arr_f)
+ header = None
+ if version == (1, 0):
+ header = np.lib.format.read_array_header_1_0(arr_f)
+ elif version == (2, 0):
+ header = np.lib.format.read_array_header_2_0(arr_f)
+
+ if header is None:
+ reader = MemoryNpzArrayReader.load(path, arr_name)
+ else:
+ shape, fortran, dtype = header
+ if fortran or dtype.hasobject:
+ reader = MemoryNpzArrayReader.load(path, arr_name)
+ else:
+ reader = StreamingNpzArrayReader(arr_f, shape, dtype)
+
+ with open_npz_arrays(path, arr_names[1:]) as next_readers:
+ yield [reader] + next_readers
+
+
+class CombinedReader:
+ def __init__(self, keys: List[str], readers: List[NpzArrayReader]):
+ self.keys = keys
+ self.readers = readers
+
+ def read_batch(self, batch_size: int) -> Optional[Dict[str, np.ndarray]]:
+ batches = [r.read_batch(batch_size) for r in self.readers]
+ any_none = any(x is None for x in batches)
+ all_none = all(x is None for x in batches)
+ if any_none != all_none:
+ raise RuntimeError("different keys had different numbers of elements")
+ if any_none:
+ return None
+ if any(len(x) != len(batches[0]) for x in batches):
+ raise RuntimeError("different keys had different numbers of elements")
+ return dict(zip(self.keys, batches))
+
+
+def _read_bytes(fp, size, error_template="ran out of data"):
+ """
+ Copied from: https://github.com/numpy/numpy/blob/fb215c76967739268de71aa4bda55dd1b062bc2e/numpy/lib/format.py#L788-L886
+
+ Read from file-like object until size bytes are read.
+ Raises ValueError if not EOF is encountered before size bytes are read.
+ Non-blocking objects only supported if they derive from io objects.
+ Required as e.g. ZipExtFile in python 2.6 can return less data than
+ requested.
+ """
+ data = bytes()
+ while True:
+ # io files (default in python3) return None or raise on
+ # would-block, python2 file will truncate, probably nothing can be
+ # done about that. note that regular files can't be non-blocking
+ try:
+ r = fp.read(size - len(data))
+ data += r
+ if len(r) == 0 or len(data) == size:
+ break
+ except io.BlockingIOError:
+ pass
+ if len(data) != size:
+ msg = "EOF: reading %s, expected %d bytes got %d"
+ raise ValueError(msg % (error_template, size, len(data)))
+ else:
+ return data
+
+
+@contextmanager
+def open_array(path: str, arr_name: str):
+ with open(path, "rb") as f:
+ with zipfile.ZipFile(f, "r") as zip_f:
+ if f"{arr_name}.npy" not in zip_f.namelist():
+ raise ValueError(f"missing {arr_name} in npz file")
+ with zip_f.open(f"{arr_name}.npy", "r") as arr_f:
+ yield arr_f
+
+
+def _dict_batch_size(objs: Dict[str, np.ndarray]) -> int:
+ return len(next(iter(objs.values())))
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/pointnet2_cls_ssg.py b/evaluations/fidkid-pytorch/3d-metrics/evals/pointnet2_cls_ssg.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b1d736f459fefb7a2d33c7c8c863f9666a21d19
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/pointnet2_cls_ssg.py
@@ -0,0 +1,101 @@
+"""
+Based on: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/eb64fe0b4c24055559cea26299cb485dcb43d8dd/models/pointnet2_cls_ssg.py
+
+MIT License
+
+Copyright (c) 2019 benny
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+
+import torch.nn as nn
+import torch.nn.functional as F
+
+from pointnet2_utils import PointNetSetAbstraction
+
+
+class get_model(nn.Module):
+ def __init__(self, num_class, normal_channel=True, width_mult=1):
+ super(get_model, self).__init__()
+ self.width_mult = width_mult
+ in_channel = 6 if normal_channel else 3
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstraction(
+ npoint=512,
+ radius=0.2,
+ nsample=32,
+ in_channel=in_channel,
+ mlp=[64 * width_mult, 64 * width_mult, 128 * width_mult],
+ group_all=False,
+ )
+ self.sa2 = PointNetSetAbstraction(
+ npoint=128,
+ radius=0.4,
+ nsample=64,
+ in_channel=128 * width_mult + 3,
+ mlp=[128 * width_mult, 128 * width_mult, 256 * width_mult],
+ group_all=False,
+ )
+ self.sa3 = PointNetSetAbstraction(
+ npoint=None,
+ radius=None,
+ nsample=None,
+ in_channel=256 * width_mult + 3,
+ mlp=[256 * width_mult, 512 * width_mult, 1024 * width_mult],
+ group_all=True,
+ )
+ self.fc1 = nn.Linear(1024 * width_mult, 512 * width_mult)
+ self.bn1 = nn.BatchNorm1d(512 * width_mult)
+ self.drop1 = nn.Dropout(0.4)
+ self.fc2 = nn.Linear(512 * width_mult, 256 * width_mult)
+ self.bn2 = nn.BatchNorm1d(256 * width_mult)
+ self.drop2 = nn.Dropout(0.4)
+ self.fc3 = nn.Linear(256 * width_mult, num_class)
+
+ def forward(self, xyz, features=False):
+ B, _, _ = xyz.shape
+ if self.normal_channel:
+ norm = xyz[:, 3:, :]
+ xyz = xyz[:, :3, :]
+ else:
+ norm = None
+ l1_xyz, l1_points = self.sa1(xyz, norm)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ x = l3_points.view(B, 1024 * self.width_mult)
+ x = self.drop1(F.relu(self.bn1(self.fc1(x))))
+ result_features = self.bn2(self.fc2(x))
+ x = self.drop2(F.relu(result_features))
+ x = self.fc3(x)
+ x = F.log_softmax(x, -1)
+
+ if features:
+ return x, l3_points, result_features
+ else:
+ return x, l3_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/pointnet2_utils.py b/evaluations/fidkid-pytorch/3d-metrics/evals/pointnet2_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..409188b4e7db59f9ee30945f43fb4731479216ae
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/pointnet2_utils.py
@@ -0,0 +1,356 @@
+"""
+Based on: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/eb64fe0b4c24055559cea26299cb485dcb43d8dd/models/pointnet_utils.py
+
+MIT License
+
+Copyright (c) 2019 benny
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+
+from time import time
+
+import numpy as np
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zm;
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src**2, -1).view(B, N, 1)
+ dist += torch.sum(dst**2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = (
+ torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ )
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint, deterministic=False):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ if deterministic:
+ farthest = torch.arange(0, B, dtype=torch.long).to(device)
+ else:
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius**2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False, deterministic=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint, deterministic=deterministic) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat(
+ [grouped_xyz_norm, grouped_points], dim=-1
+ ) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(
+ self.npoint, self.radius, self.nsample, xyz, points, deterministic=not self.training
+ )
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)))
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S, deterministic=not self.training))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)))
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(
+ index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2
+ )
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)))
+ return new_points
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/readme.md b/evaluations/fidkid-pytorch/3d-metrics/evals/readme.md
new file mode 100644
index 0000000000000000000000000000000000000000..15a189469a60ad696ecd5ad0642cf423b9258641
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/readme.md
@@ -0,0 +1,5 @@
+All metrics: p-FID, p-KID, p-IS
+
+Mainly copied from ```https://github.com/openai/point-e/point_e/evals/```.
+
+The p-KID is copied from ```https://github.com/3dlg-hcvc/omages/blob/main/src/evals/fpd_eval.py```
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/blender_script.py b/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/blender_script.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ddb1cbe181db83f088df5c8c948cadaf49599d1
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/blender_script.py
@@ -0,0 +1,533 @@
+"""
+Script to run within Blender to render a 3D model as RGBAD images.
+
+Example usage
+
+ blender -b -P blender_script.py -- \
+ --input_path ../../examples/example_data/corgi.ply \
+ --output_path render_out
+
+Pass `--camera_pose z-circular-elevated` for the rendering used to compute
+CLIP R-Precision results.
+
+The output directory will include metadata json files for each rendered view,
+as well as a global metadata file for the render. Each image will be saved as
+a collection of 16-bit PNG files for each channel (rgbad), as well as a full
+grayscale render of the view.
+"""
+
+import argparse
+import json
+import math
+import os
+import random
+import sys
+
+import bpy
+from mathutils import Vector
+from mathutils.noise import random_unit_vector
+
+MAX_DEPTH = 5.0
+FORMAT_VERSION = 6
+UNIFORM_LIGHT_DIRECTION = [0.09387503, -0.63953443, -0.7630093]
+
+
+def clear_scene():
+ bpy.ops.object.select_all(action="SELECT")
+ bpy.ops.object.delete()
+
+
+def clear_lights():
+ bpy.ops.object.select_all(action="DESELECT")
+ for obj in bpy.context.scene.objects.values():
+ if isinstance(obj.data, bpy.types.Light):
+ obj.select_set(True)
+ bpy.ops.object.delete()
+
+
+def import_model(path):
+ clear_scene()
+ _, ext = os.path.splitext(path)
+ ext = ext.lower()
+ if ext == ".obj":
+ bpy.ops.import_scene.obj(filepath=path)
+ elif ext in [".glb", ".gltf"]:
+ bpy.ops.import_scene.gltf(filepath=path)
+ elif ext == ".stl":
+ bpy.ops.import_mesh.stl(filepath=path)
+ elif ext == ".fbx":
+ bpy.ops.import_scene.fbx(filepath=path)
+ elif ext == ".dae":
+ bpy.ops.wm.collada_import(filepath=path)
+ elif ext == ".ply":
+ bpy.ops.import_mesh.ply(filepath=path)
+ else:
+ raise RuntimeError(f"unexpected extension: {ext}")
+
+
+def scene_root_objects():
+ for obj in bpy.context.scene.objects.values():
+ if not obj.parent:
+ yield obj
+
+
+def scene_bbox(single_obj=None, ignore_matrix=False):
+ bbox_min = (math.inf,) * 3
+ bbox_max = (-math.inf,) * 3
+ found = False
+ for obj in scene_meshes() if single_obj is None else [single_obj]:
+ found = True
+ for coord in obj.bound_box:
+ coord = Vector(coord)
+ if not ignore_matrix:
+ coord = obj.matrix_world @ coord
+ bbox_min = tuple(min(x, y) for x, y in zip(bbox_min, coord))
+ bbox_max = tuple(max(x, y) for x, y in zip(bbox_max, coord))
+ if not found:
+ raise RuntimeError("no objects in scene to compute bounding box for")
+ return Vector(bbox_min), Vector(bbox_max)
+
+
+def scene_meshes():
+ for obj in bpy.context.scene.objects.values():
+ if isinstance(obj.data, (bpy.types.Mesh)):
+ yield obj
+
+
+def normalize_scene():
+ bbox_min, bbox_max = scene_bbox()
+ scale = 1 / max(bbox_max - bbox_min)
+
+ for obj in scene_root_objects():
+ obj.scale = obj.scale * scale
+
+ # Apply scale to matrix_world.
+ bpy.context.view_layer.update()
+
+ bbox_min, bbox_max = scene_bbox()
+ offset = -(bbox_min + bbox_max) / 2
+ for obj in scene_root_objects():
+ obj.matrix_world.translation += offset
+
+ bpy.ops.object.select_all(action="DESELECT")
+
+
+def create_camera():
+ # https://b3d.interplanety.org/en/how-to-create-camera-through-the-blender-python-api/
+ camera_data = bpy.data.cameras.new(name="Camera")
+ camera_object = bpy.data.objects.new("Camera", camera_data)
+ bpy.context.scene.collection.objects.link(camera_object)
+ bpy.context.scene.camera = camera_object
+
+
+def set_camera(direction, camera_dist=2.0):
+ camera_pos = -camera_dist * direction
+ bpy.context.scene.camera.location = camera_pos
+
+ # https://blender.stackexchange.com/questions/5210/pointing-the-camera-in-a-particular-direction-programmatically
+ rot_quat = direction.to_track_quat("-Z", "Y")
+ bpy.context.scene.camera.rotation_euler = rot_quat.to_euler()
+
+ bpy.context.view_layer.update()
+
+
+def randomize_camera(camera_dist=2.0):
+ direction = random_unit_vector()
+ set_camera(direction, camera_dist=camera_dist)
+
+
+def pan_camera(time, axis="Z", camera_dist=2.0, elevation=-0.1):
+ angle = time * math.pi * 2
+ direction = [-math.cos(angle), -math.sin(angle), -elevation]
+ assert axis in ["X", "Y", "Z"]
+ if axis == "X":
+ direction = [direction[2], *direction[:2]]
+ elif axis == "Y":
+ direction = [direction[0], -elevation, direction[1]]
+ direction = Vector(direction).normalized()
+ set_camera(direction, camera_dist=camera_dist)
+
+
+def place_camera(time, camera_pose_mode="random", camera_dist_min=2.0, camera_dist_max=2.0):
+ camera_dist = random.uniform(camera_dist_min, camera_dist_max)
+ if camera_pose_mode == "random":
+ randomize_camera(camera_dist=camera_dist)
+ elif camera_pose_mode == "z-circular":
+ pan_camera(time, axis="Z", camera_dist=camera_dist)
+ elif camera_pose_mode == "z-circular-elevated":
+ pan_camera(time, axis="Z", camera_dist=camera_dist, elevation=0.2617993878)
+ else:
+ raise ValueError(f"Unknown camera pose mode: {camera_pose_mode}")
+
+
+def create_light(location, energy=1.0, angle=0.5 * math.pi / 180):
+ # https://blender.stackexchange.com/questions/215624/how-to-create-a-light-with-the-python-api-in-blender-2-92
+ light_data = bpy.data.lights.new(name="Light", type="SUN")
+ light_data.energy = energy
+ light_data.angle = angle
+ light_object = bpy.data.objects.new(name="Light", object_data=light_data)
+
+ direction = -location
+ rot_quat = direction.to_track_quat("-Z", "Y")
+ light_object.rotation_euler = rot_quat.to_euler()
+ bpy.context.view_layer.update()
+
+ bpy.context.collection.objects.link(light_object)
+ light_object.location = location
+
+
+def create_random_lights(count=4, distance=2.0, energy=1.5):
+ clear_lights()
+ for _ in range(count):
+ create_light(random_unit_vector() * distance, energy=energy)
+
+
+def create_camera_light():
+ clear_lights()
+ create_light(bpy.context.scene.camera.location, energy=5.0)
+
+
+def create_uniform_light(backend):
+ clear_lights()
+ # Random direction to decorrelate axis-aligned sides.
+ pos = Vector(UNIFORM_LIGHT_DIRECTION)
+ angle = 0.0092 if backend == "CYCLES" else math.pi
+ create_light(pos, energy=5.0, angle=angle)
+ create_light(-pos, energy=5.0, angle=angle)
+
+
+def create_vertex_color_shaders():
+ # By default, Blender will ignore vertex colors in both the
+ # Eevee and Cycles backends, since these colors aren't
+ # associated with a material.
+ #
+ # What we do here is create a simple material shader and link
+ # the vertex color to the material color.
+ for obj in bpy.context.scene.objects.values():
+ if not isinstance(obj.data, (bpy.types.Mesh)):
+ continue
+
+ if len(obj.data.materials):
+ # We don't want to override any existing materials.
+ continue
+
+ color_keys = (obj.data.vertex_colors or {}).keys()
+ if not len(color_keys):
+ # Many objects will have no materials *or* vertex colors.
+ continue
+
+ mat = bpy.data.materials.new(name="VertexColored")
+ mat.use_nodes = True
+
+ # There should be a Principled BSDF by default.
+ bsdf_node = None
+ for node in mat.node_tree.nodes:
+ if node.type == "BSDF_PRINCIPLED":
+ bsdf_node = node
+ assert bsdf_node is not None, "material has no Principled BSDF node to modify"
+
+ socket_map = {}
+ for input in bsdf_node.inputs:
+ socket_map[input.name] = input
+
+ # Make sure nothing lights the object except for the diffuse color.
+ socket_map["Specular"].default_value = 0.0
+ socket_map["Roughness"].default_value = 1.0
+
+ v_color = mat.node_tree.nodes.new("ShaderNodeVertexColor")
+ v_color.layer_name = color_keys[0]
+
+ mat.node_tree.links.new(v_color.outputs[0], socket_map["Base Color"])
+
+ obj.data.materials.append(mat)
+
+
+def create_default_materials():
+ for obj in bpy.context.scene.objects.values():
+ if isinstance(obj.data, (bpy.types.Mesh)):
+ if not len(obj.data.materials):
+ mat = bpy.data.materials.new(name="DefaultMaterial")
+ mat.use_nodes = True
+ obj.data.materials.append(mat)
+
+
+def find_materials():
+ all_materials = set()
+ for obj in bpy.context.scene.objects.values():
+ if not isinstance(obj.data, (bpy.types.Mesh)):
+ continue
+ for mat in obj.data.materials:
+ all_materials.add(mat)
+ return all_materials
+
+
+def get_socket_value(tree, socket):
+ default = socket.default_value
+ if not isinstance(default, float):
+ default = list(default)
+ for link in tree.links:
+ if link.to_socket == socket:
+ return (link.from_socket, default)
+ return (None, default)
+
+
+def clear_socket_input(tree, socket):
+ for link in list(tree.links):
+ if link.to_socket == socket:
+ tree.links.remove(link)
+
+
+def set_socket_value(tree, socket, socket_and_default):
+ clear_socket_input(tree, socket)
+ old_source_socket, default = socket_and_default
+ if isinstance(default, float) and not isinstance(socket.default_value, float):
+ # Codepath for setting Emission to a previous alpha value.
+ socket.default_value = [default] * 3 + [1.0]
+ else:
+ socket.default_value = default
+ if old_source_socket is not None:
+ tree.links.new(old_source_socket, socket)
+
+
+def setup_nodes(output_path, capturing_material_alpha: bool = False):
+ tree = bpy.context.scene.node_tree
+ links = tree.links
+
+ for node in tree.nodes:
+ tree.nodes.remove(node)
+
+ # Helpers to perform math on links and constants.
+ def node_op(op: str, *args, clamp=False):
+ node = tree.nodes.new(type="CompositorNodeMath")
+ node.operation = op
+ if clamp:
+ node.use_clamp = True
+ for i, arg in enumerate(args):
+ if isinstance(arg, (int, float)):
+ node.inputs[i].default_value = arg
+ else:
+ links.new(arg, node.inputs[i])
+ return node.outputs[0]
+
+ def node_clamp(x, maximum=1.0):
+ return node_op("MINIMUM", x, maximum)
+
+ def node_mul(x, y, **kwargs):
+ return node_op("MULTIPLY", x, y, **kwargs)
+
+ input_node = tree.nodes.new(type="CompositorNodeRLayers")
+ input_node.scene = bpy.context.scene
+
+ input_sockets = {}
+ for output in input_node.outputs:
+ input_sockets[output.name] = output
+
+ if capturing_material_alpha:
+ color_socket = input_sockets["Image"]
+ else:
+ raw_color_socket = input_sockets["Image"]
+
+ # We apply sRGB here so that our fixed-point depth map and material
+ # alpha values are not sRGB, and so that we perform ambient+diffuse
+ # lighting in linear RGB space.
+ color_node = tree.nodes.new(type="CompositorNodeConvertColorSpace")
+ color_node.from_color_space = "Linear"
+ color_node.to_color_space = "sRGB"
+ tree.links.new(raw_color_socket, color_node.inputs[0])
+ color_socket = color_node.outputs[0]
+ split_node = tree.nodes.new(type="CompositorNodeSepRGBA")
+ tree.links.new(color_socket, split_node.inputs[0])
+ # Create separate file output nodes for every channel we care about.
+ # The process calling this script must decide how to recombine these
+ # channels, possibly into a single image.
+ for i, channel in enumerate("rgba") if not capturing_material_alpha else [(0, "MatAlpha")]:
+ output_node = tree.nodes.new(type="CompositorNodeOutputFile")
+ output_node.base_path = f"{output_path}_{channel}"
+ links.new(split_node.outputs[i], output_node.inputs[0])
+
+ if capturing_material_alpha:
+ # No need to re-write depth here.
+ return
+
+ depth_out = node_clamp(node_mul(input_sockets["Depth"], 1 / MAX_DEPTH))
+ output_node = tree.nodes.new(type="CompositorNodeOutputFile")
+ output_node.base_path = f"{output_path}_depth"
+ links.new(depth_out, output_node.inputs[0])
+
+
+def render_scene(output_path, fast_mode: bool):
+ use_workbench = bpy.context.scene.render.engine == "BLENDER_WORKBENCH"
+ if use_workbench:
+ # We must use a different engine to compute depth maps.
+ bpy.context.scene.render.engine = "BLENDER_EEVEE"
+ bpy.context.scene.eevee.taa_render_samples = 1 # faster, since we discard image.
+ if fast_mode:
+ if bpy.context.scene.render.engine == "BLENDER_EEVEE":
+ bpy.context.scene.eevee.taa_render_samples = 1
+ elif bpy.context.scene.render.engine == "CYCLES":
+ bpy.context.scene.cycles.samples = 256
+ else:
+ if bpy.context.scene.render.engine == "CYCLES":
+ # We should still impose a per-frame time limit
+ # so that we don't timeout completely.
+ bpy.context.scene.cycles.time_limit = 40
+ bpy.context.view_layer.update()
+ bpy.context.scene.use_nodes = True
+ bpy.context.scene.view_layers["ViewLayer"].use_pass_z = True
+ bpy.context.scene.view_settings.view_transform = "Raw" # sRGB done in graph nodes
+ bpy.context.scene.render.film_transparent = True
+ bpy.context.scene.render.resolution_x = 512
+ bpy.context.scene.render.resolution_y = 512
+ bpy.context.scene.render.image_settings.file_format = "PNG"
+ bpy.context.scene.render.image_settings.color_mode = "BW"
+ bpy.context.scene.render.image_settings.color_depth = "16"
+ bpy.context.scene.render.filepath = output_path
+ setup_nodes(output_path)
+ bpy.ops.render.render(write_still=True)
+
+ # The output images must be moved from their own sub-directories, or
+ # discarded if we are using workbench for the color.
+ for channel_name in ["r", "g", "b", "a", "depth"]:
+ sub_dir = f"{output_path}_{channel_name}"
+ image_path = os.path.join(sub_dir, os.listdir(sub_dir)[0])
+ name, ext = os.path.splitext(output_path)
+ if channel_name == "depth" or not use_workbench:
+ os.rename(image_path, f"{name}_{channel_name}{ext}")
+ else:
+ os.remove(image_path)
+ os.removedirs(sub_dir)
+
+ if use_workbench:
+ # Re-render RGBA using workbench with texture mode, since this seems
+ # to show the most reasonable colors when lighting is broken.
+ bpy.context.scene.use_nodes = False
+ bpy.context.scene.render.engine = "BLENDER_WORKBENCH"
+ bpy.context.scene.render.image_settings.color_mode = "RGBA"
+ bpy.context.scene.render.image_settings.color_depth = "8"
+ bpy.context.scene.display.shading.color_type = "TEXTURE"
+ bpy.context.scene.display.shading.light = "FLAT"
+ if fast_mode:
+ # Single pass anti-aliasing.
+ bpy.context.scene.display.render_aa = "FXAA"
+ os.remove(output_path)
+ bpy.ops.render.render(write_still=True)
+ bpy.context.scene.render.image_settings.color_mode = "BW"
+ bpy.context.scene.render.image_settings.color_depth = "16"
+
+
+def scene_fov():
+ x_fov = bpy.context.scene.camera.data.angle_x
+ y_fov = bpy.context.scene.camera.data.angle_y
+ width = bpy.context.scene.render.resolution_x
+ height = bpy.context.scene.render.resolution_y
+ if bpy.context.scene.camera.data.angle == x_fov:
+ y_fov = 2 * math.atan(math.tan(x_fov / 2) * height / width)
+ else:
+ x_fov = 2 * math.atan(math.tan(y_fov / 2) * width / height)
+ return x_fov, y_fov
+
+
+def write_camera_metadata(path):
+ x_fov, y_fov = scene_fov()
+ bbox_min, bbox_max = scene_bbox()
+ matrix = bpy.context.scene.camera.matrix_world
+ with open(path, "w") as f:
+ json.dump(
+ dict(
+ format_version=FORMAT_VERSION,
+ max_depth=MAX_DEPTH,
+ bbox=[list(bbox_min), list(bbox_max)],
+ origin=list(matrix.col[3])[:3],
+ x_fov=x_fov,
+ y_fov=y_fov,
+ x=list(matrix.col[0])[:3],
+ y=list(-matrix.col[1])[:3],
+ z=list(-matrix.col[2])[:3],
+ ),
+ f,
+ )
+
+
+def save_rendering_dataset(
+ input_path: str,
+ output_path: str,
+ num_images: int,
+ backend: str,
+ light_mode: str,
+ camera_pose: str,
+ camera_dist_min: float,
+ camera_dist_max: float,
+ fast_mode: bool,
+):
+ assert light_mode in ["random", "uniform", "camera"]
+ assert camera_pose in ["random", "z-circular", "z-circular-elevated"]
+
+ import_model(input_path)
+ bpy.context.scene.render.engine = backend
+ normalize_scene()
+ if light_mode == "random":
+ create_random_lights()
+ elif light_mode == "uniform":
+ create_uniform_light(backend)
+ create_camera()
+ create_vertex_color_shaders()
+ for i in range(num_images):
+ t = i / max(num_images - 1, 1) # same as np.linspace(0, 1, num_images)
+ place_camera(
+ t,
+ camera_pose_mode=camera_pose,
+ camera_dist_min=camera_dist_min,
+ camera_dist_max=camera_dist_max,
+ )
+ if light_mode == "camera":
+ create_camera_light()
+ render_scene(
+ os.path.join(output_path, f"{i:05}.png"),
+ fast_mode=fast_mode,
+ )
+ write_camera_metadata(os.path.join(output_path, f"{i:05}.json"))
+ with open(os.path.join(output_path, "info.json"), "w") as f:
+ info = dict(
+ backend=backend,
+ light_mode=light_mode,
+ fast_mode=fast_mode,
+ format_version=FORMAT_VERSION,
+ channels=["R", "G", "B", "A", "D"],
+ scale=0.5, # The scene is bounded by [-scale, scale].
+ )
+ json.dump(info, f)
+
+
+def main():
+ try:
+ dash_index = sys.argv.index("--")
+ except ValueError as exc:
+ raise ValueError("arguments must be preceded by '--'") from exc
+
+ raw_args = sys.argv[dash_index + 1 :]
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--input_path", required=True, type=str)
+ parser.add_argument("--output_path", required=True, type=str)
+ parser.add_argument("--num_images", type=int, default=20)
+ parser.add_argument("--backend", type=str, default="BLENDER_EEVEE")
+ parser.add_argument("--light_mode", type=str, default="uniform")
+ parser.add_argument("--camera_pose", type=str, default="random")
+ parser.add_argument("--camera_dist_min", type=float, default=2.0)
+ parser.add_argument("--camera_dist_max", type=float, default=2.0)
+ parser.add_argument("--fast_mode", action="store_true")
+ args = parser.parse_args(raw_args)
+
+ save_rendering_dataset(
+ input_path=args.input_path,
+ output_path=args.output_path,
+ num_images=args.num_images,
+ backend=args.backend,
+ light_mode=args.light_mode,
+ camera_pose=args.camera_pose,
+ camera_dist_min=args.camera_dist_min,
+ camera_dist_max=args.camera_dist_max,
+ fast_mode=args.fast_mode,
+ )
+
+
+main()
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/evaluate_pfid.py b/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/evaluate_pfid.py
new file mode 100644
index 0000000000000000000000000000000000000000..d392d957c941a092563566af7fd16e91ebacf57a
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/evaluate_pfid.py
@@ -0,0 +1,40 @@
+"""
+Evaluate P-FID between two batches of point clouds.
+
+The point cloud batches should be saved to two npz files, where there
+is an arr_0 key of shape [N x K x 3], where K is the dimensionality of
+each point cloud and N is the number of clouds.
+"""
+
+import argparse
+
+from point_e.evals.feature_extractor import PointNetClassifier, get_torch_devices
+from point_e.evals.fid_is import compute_statistics
+from point_e.evals.npz_stream import NpzStreamer
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--cache_dir", type=str, default=None)
+ parser.add_argument("batch_1", type=str)
+ parser.add_argument("batch_2", type=str)
+ args = parser.parse_args()
+
+ print("creating classifier...")
+ clf = PointNetClassifier(devices=get_torch_devices(), cache_dir=args.cache_dir)
+
+ print("computing first batch activations")
+
+ features_1, _ = clf.features_and_preds(NpzStreamer(args.batch_1))
+ stats_1 = compute_statistics(features_1)
+ del features_1
+
+ features_2, _ = clf.features_and_preds(NpzStreamer(args.batch_2))
+ stats_2 = compute_statistics(features_2)
+ del features_2
+
+ print(f"P-FID: {stats_1.frechet_distance(stats_2)}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/evaluate_pis.py b/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/evaluate_pis.py
new file mode 100644
index 0000000000000000000000000000000000000000..05395fc0a57b31d0ca314fe46aec088dbaff0772
--- /dev/null
+++ b/evaluations/fidkid-pytorch/3d-metrics/evals/scripts/evaluate_pis.py
@@ -0,0 +1,31 @@
+"""
+Evaluate P-IS of a batch of point clouds.
+
+The point cloud batch should be saved to an npz file, where there is an
+arr_0 key of shape [N x K x 3], where K is the dimensionality of each
+point cloud and N is the number of clouds.
+"""
+
+import argparse
+
+from point_e.evals.feature_extractor import PointNetClassifier, get_torch_devices
+from point_e.evals.fid_is import compute_inception_score
+from point_e.evals.npz_stream import NpzStreamer
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--cache_dir", type=str, default=None)
+ parser.add_argument("batch", type=str)
+ args = parser.parse_args()
+
+ print("creating classifier...")
+ clf = PointNetClassifier(devices=get_torch_devices(), cache_dir=args.cache_dir)
+
+ print("computing batch predictions")
+ _, preds = clf.features_and_preds(NpzStreamer(args.batch))
+ print(f"P-IS: {compute_inception_score(preds)}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/evaluations/fidkid-pytorch/align_axis.ipynb b/evaluations/fidkid-pytorch/align_axis.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..68fba8044bda3b320182a89dff6f06f859cb104b
--- /dev/null
+++ b/evaluations/fidkid-pytorch/align_axis.ipynb
@@ -0,0 +1,66505 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "ename": "ModuleNotFoundError",
+ "evalue": "No module named 'trimesh'",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
+ "Cell \u001b[0;32mIn[1], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mnumpy\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mnp\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtrimesh\u001b[39;00m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mos\u001b[39;00m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mshutil\u001b[39;00m\n",
+ "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'trimesh'"
+ ]
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "import trimesh\n",
+ "import os\n",
+ "import shutil\n",
+ "import imageio.v3 as imageio\n",
+ "from tqdm import tqdm"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "\n",
+ "def rotation_matrix(axis, angle):\n",
+ " \"\"\"\n",
+ " Returns a rotation matrix for a given axis and angle in radians.\n",
+ " :param axis: str, the axis to rotate around ('x', 'y', or 'z')\n",
+ " :param angle: float, the rotation angle in radians\n",
+ " :return: 3x3 rotation matrix\n",
+ " \"\"\"\n",
+ " if axis == 'x':\n",
+ " return np.array([[1, 0, 0],\n",
+ " [0, np.cos(angle), -np.sin(angle)],\n",
+ " [0, np.sin(angle), np.cos(angle)]])\n",
+ " elif axis == 'y':\n",
+ " return np.array([[np.cos(angle), 0, np.sin(angle)],\n",
+ " [0, 1, 0],\n",
+ " [-np.sin(angle), 0, np.cos(angle)]])\n",
+ " elif axis == 'z':\n",
+ " return np.array([[np.cos(angle), -np.sin(angle), 0],\n",
+ " [np.sin(angle), np.cos(angle), 0],\n",
+ " [0, 0, 1]])\n",
+ " else:\n",
+ " raise ValueError(\"Axis must be 'x', 'y', or 'z'.\")\n",
+ "\n",
+ "def rotate_point_cloud(point_cloud, rotations):\n",
+ " \"\"\"\n",
+ " Rotates a point cloud along specified axes by the given angles.\n",
+ " :param point_cloud: Nx3 numpy array of points\n",
+ " :param rotations: list of tuples [(axis, angle_in_degrees), ...]\n",
+ " Example: [('x', 90), ('y', 45)] for composite rotations\n",
+ " :return: Rotated point cloud as Nx3 numpy array\n",
+ " \"\"\"\n",
+ " rotated_cloud = point_cloud.copy()\n",
+ " for axis, angle in rotations:\n",
+ " angle_rad = np.radians(angle) # Convert degrees to radians\n",
+ " R = rotation_matrix(axis, angle_rad)\n",
+ " rotated_cloud = np.dot(rotated_cloud, R.T) # Apply rotation matrix\n",
+ " \n",
+ " return rotated_cloud\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[[ 7.07106781e-01 0.00000000e+00 -7.07106781e-01]\n",
+ " [ 7.07106781e-01 6.12323400e-17 7.07106781e-01]\n",
+ " [ 4.32978028e-17 -1.00000000e+00 4.32978028e-17]]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Example point cloud (Nx3 matrix)\n",
+ "point_cloud = np.array([[1, 0, 0],\n",
+ " [0, 1, 0],\n",
+ " [0, 0, 1]])\n",
+ "\n",
+ "# Composite rotation: 90 degrees around x, then 45 degrees around y\n",
+ "rotations = [('x', 90), ('y', 45)]\n",
+ "\n",
+ "# Rotate the point cloud\n",
+ "rotated_cloud = rotate_point_cloud(point_cloud, rotations)\n",
+ "print(rotated_cloud)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# rotate different to align in gso canonical space\n",
+ "# import open3d as o3d\n",
+ "# import numpy as np\n",
+ "import plotly.graph_objects as go\n",
+ "import matplotlib.pyplot as plt\n",
+ "import point_cloud_utils as pcu\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def vis_colored_pcd(points, colors, size=1.0):\n",
+ "\n",
+ " fig = go.Figure(\n",
+ " data=[\n",
+ " go.Scatter3d(\n",
+ " x=points[:,0], y=points[:,1], z=points[:,2],\n",
+ " mode='markers',\n",
+ " marker=dict(size=size, color=colors)\n",
+ " )\n",
+ "\n",
+ " ],\n",
+ " layout=dict( \n",
+ " paper_bgcolor='rgba(0,0,0,0)', \n",
+ " width=512, \n",
+ " height=512,\n",
+ " scene=dict(\n",
+ " xaxis=dict(visible=False),\n",
+ " yaxis=dict(visible=False),\n",
+ " zaxis=dict(visible=False)\n",
+ " )\n",
+ " )\n",
+ " )\n",
+ " config = {\n",
+ " 'toImageButtonOptions': {\n",
+ " 'format': 'png', # one of png, svg, jpeg, webp\n",
+ " 'filename': 'custom_image',\n",
+ " 'height': 512,\n",
+ " 'width': 512,\n",
+ " 'scale':4 # Multiply title/legend/axis/canvas sizes by this factor\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "\n",
+ " fig.show(config=config)\n",
+ "# fig.write_image(\"fig1.png\")\n",
+ "\n",
+ "# vis_colored_pcd(points, colors, size=1.3)\n",
+ "# vis_colored_pcd(split_points, splint_colors)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 84,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from functools import partial\n",
+ "# transformation dictionary\n",
+ "transformation_dict = {\n",
+ " 'gso': partial(rotate_point_cloud, rotations=[('x', 0)]), # no transformation\n",
+ " 'LGM': partial(rotate_point_cloud, rotations=[('x', 90)]),\n",
+ " 'CRM': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),\n",
+ " 'Lara': partial(rotate_point_cloud, rotations=[('x', -110), ('z', 33)]),\n",
+ " 'ln3diff': partial(rotate_point_cloud, rotations=[('x', 90)]),\n",
+ " 'One-2-3-45': partial(rotate_point_cloud, rotations=[('x', 90), ('z', 180)]),\n",
+ " 'splatter-img': partial(rotate_point_cloud, rotations=[('x', -60)]),\n",
+ " # \n",
+ " 'OpenLRM': partial(rotate_point_cloud, rotations=[('x', 0)]),\n",
+ " 'shape-e': partial(rotate_point_cloud, rotations=[('x', 0)]),\n",
+ " 'GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD': partial(rotate_point_cloud, rotations=[('x', 0)]),\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 68,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "\n",
+ "# instance_name = '3D_Dollhouse_Happy_Brother_pcd_4096'\n",
+ "# instance_name = 'FRUIT_VEGGIE_DOMINO_GRADIENT_pcd_4096'\n",
+ "# instance_name = 'SpiderMan_Titan_Hero_12Inch_Action_Figure_5Hnn4mtkFsP_pcd_4096'\n",
+ "instance_name = 'Android_Figure_Panda_pcd_4096'\n",
+ "\n",
+ "method_name='LGM'\n",
+ "save_root=f\"/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics-fps\"\n",
+ "\n",
+ "def vis_pcd(method_name, instance_name):\n",
+ "\n",
+ " pcd_path = os.path.join(save_root, method_name, f'{instance_name}.ply')\n",
+ " pcd = trimesh.load(pcd_path).vertices\n",
+ "\n",
+ " pcd = transformation_dict[method_name](pcd)\n",
+ "\n",
+ " color = np.zeros_like(pcd) # just black\n",
+ " vis_colored_pcd(pcd, color)\n",
+ "\n",
+ " # return pcd"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 85,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.plotly.v1+json": {
+ "config": {
+ "plotlyServerURL": "https://plot.ly",
+ "toImageButtonOptions": {
+ "filename": "custom_image",
+ "format": "png",
+ "height": 512,
+ "scale": 4,
+ "width": 512
+ }
+ },
+ "data": [
+ {
+ "marker": {
+ "color": [
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ]
+ ],
+ "size": 1
+ },
+ "mode": "markers",
+ "type": "scatter3d",
+ "x": [
+ 0.16714239120483398,
+ -0.13933205604553223,
+ -0.48967695236206055,
+ 0.4387861490249634,
+ -0.023944199085235596,
+ -0.06674981117248535,
+ -0.17016521096229553,
+ 0.15413442254066467,
+ -0.44320714473724365,
+ 0.17104530334472656,
+ -0.18231533467769623,
+ 0.43420323729515076,
+ 0.24582695960998535,
+ 0.14013591408729553,
+ -0.14537572860717773,
+ 0.2538689374923706,
+ -0.26007089018821716,
+ -0.11549761891365051,
+ 0.08398760855197906,
+ -0.1931912899017334,
+ 0.11851789057254791,
+ -0.2531135082244873,
+ -0.021267592906951904,
+ -0.21624967455863953,
+ -0.2775712013244629,
+ 0.11919178068637848,
+ 0.13471761345863342,
+ 0.2968190312385559,
+ 0.29669904708862305,
+ -0.42890095710754395,
+ 0.28010448813438416,
+ 0.4214717149734497,
+ -0.10813787579536438,
+ 0.005071908235549927,
+ -0.020708024501800537,
+ 0.10066448897123337,
+ 0.2768676280975342,
+ -0.24797740578651428,
+ -0.09800158441066742,
+ 0.14408241212368011,
+ 0.25004899501800537,
+ -0.043630450963974,
+ 0.00003750622272491455,
+ 0.09289883077144623,
+ -0.24649688601493835,
+ -0.4340258240699768,
+ 0.42326056957244873,
+ -0.18184524774551392,
+ 0.27580589056015015,
+ -0.16016632318496704,
+ 0.01103559136390686,
+ -0.3238770365715027,
+ 0.03503277897834778,
+ 0.19794780015945435,
+ -0.31065282225608826,
+ 0.002832353115081787,
+ -0.14126022160053253,
+ 0.2301076054573059,
+ -0.16584813594818115,
+ -0.26867806911468506,
+ -0.1723477691411972,
+ -0.05040893703699112,
+ -0.14089660346508026,
+ 0.2605350613594055,
+ 0.3010583221912384,
+ 0.19005262851715088,
+ 0.13904279470443726,
+ 0.19640052318572998,
+ -0.28750938177108765,
+ 0.061773717403411865,
+ -0.040003955364227295,
+ -0.05527248978614807,
+ -0.42733949422836304,
+ -0.008637666702270508,
+ -0.18689684569835663,
+ 0.10678142309188843,
+ -0.1421910673379898,
+ 0.21431565284729004,
+ -0.2267482727766037,
+ -0.359335720539093,
+ 0.3156912624835968,
+ -0.04957711696624756,
+ -0.22640687227249146,
+ 0.24182531237602234,
+ 0.31184208393096924,
+ 0.2696009874343872,
+ -0.2586950659751892,
+ -0.20567205548286438,
+ 0.20244374871253967,
+ -0.2429526001214981,
+ 0.030858993530273438,
+ 0.3834112882614136,
+ 0.11640360951423645,
+ 0.18269693851470947,
+ 0.05473822355270386,
+ 0.3602587580680847,
+ 0.4076017141342163,
+ -0.39660948514938354,
+ 0.06144428253173828,
+ 0.06697848439216614,
+ 0.17507261037826538,
+ 0.238669753074646,
+ -0.47697728872299194,
+ 0.17242136597633362,
+ -0.2491673231124878,
+ -0.17852738499641418,
+ -0.4838685989379883,
+ 0.01732860505580902,
+ 0.08843743801116943,
+ -0.08161512762308121,
+ -0.2630324959754944,
+ -0.2300986647605896,
+ 0.2675224542617798,
+ 0.40937361121177673,
+ -0.15015369653701782,
+ 0.27088314294815063,
+ 0.08856639266014099,
+ -0.06404715776443481,
+ -0.2459418773651123,
+ -0.10047391057014465,
+ -0.410029798746109,
+ -0.05022132396697998,
+ 0.2298281192779541,
+ -0.014483332633972168,
+ 0.17921751737594604,
+ -0.05943383276462555,
+ 0.08528217673301697,
+ 0.05958181619644165,
+ 0.22958189249038696,
+ -0.26150745153427124,
+ 0.313157320022583,
+ 0.288181871175766,
+ 0.10611134767532349,
+ -0.14519526064395905,
+ -0.08640818297863007,
+ -0.08789414167404175,
+ 0.11779224872589111,
+ -0.15469205379486084,
+ -0.070193350315094,
+ 0.07907670736312866,
+ 0.3268696665763855,
+ 0.44670236110687256,
+ -0.3315083682537079,
+ -0.21540513634681702,
+ 0.1865319013595581,
+ -0.2009577453136444,
+ 0.13175421953201294,
+ -0.2082071304321289,
+ -0.19328904151916504,
+ -0.1417294144630432,
+ 0.2792724370956421,
+ 0.04903280735015869,
+ 0.22564858198165894,
+ -0.07077197730541229,
+ 0.10201920568943024,
+ -0.4071170389652252,
+ 0.017640233039855957,
+ 0.34447261691093445,
+ 0.43450963497161865,
+ 0.185743510723114,
+ 0.03398601710796356,
+ -0.3311912417411804,
+ 0.07275933027267456,
+ 0.05220963805913925,
+ -0.1526617407798767,
+ -0.08770999312400818,
+ -0.10123682022094727,
+ 0.24520254135131836,
+ 0.2062968611717224,
+ -0.12442734092473984,
+ 0.20956873893737793,
+ -0.2828468084335327,
+ 0.03630286455154419,
+ 0.10231423377990723,
+ 0.25285112857818604,
+ 0.10077589750289917,
+ 0.131661057472229,
+ -0.05605071783065796,
+ -0.33031123876571655,
+ 0.16696378588676453,
+ -0.05204804241657257,
+ 0.17468267679214478,
+ -0.20939844846725464,
+ -0.19438715279102325,
+ -0.2641403079032898,
+ -0.223430335521698,
+ -0.4810851812362671,
+ 0.26636916399002075,
+ 0.21534910798072815,
+ 0.3489430546760559,
+ 0.03801296651363373,
+ -0.2348911166191101,
+ 0.35465574264526367,
+ 0.2813078463077545,
+ 0.030041392892599106,
+ -0.3665558695793152,
+ 0.28547781705856323,
+ 0.09130464494228363,
+ -0.24141305685043335,
+ 0.4282699525356293,
+ -0.2644343972206116,
+ -0.4429696798324585,
+ -0.2767396569252014,
+ -0.18984249234199524,
+ -0.1652040183544159,
+ -0.08234458416700363,
+ -0.1526700258255005,
+ 0.17297124862670898,
+ 0.3125300407409668,
+ 0.28522807359695435,
+ -0.1981523036956787,
+ -0.2488451600074768,
+ 0.28226184844970703,
+ -0.39803487062454224,
+ 0.1745494157075882,
+ -0.026171021163463593,
+ -0.04032415151596069,
+ -0.13885045051574707,
+ 0.06622403860092163,
+ -0.18954691290855408,
+ -0.04128670692443848,
+ -0.12717369198799133,
+ 0.0823519229888916,
+ -0.2737364172935486,
+ 0.2881665825843811,
+ 0.2123125195503235,
+ 0.05256272852420807,
+ -0.24902984499931335,
+ -0.20495367050170898,
+ 0.18869803845882416,
+ 0.24347200989723206,
+ 0.18057529628276825,
+ -0.027326375246047974,
+ -0.36558207869529724,
+ -0.3873281180858612,
+ -0.14982998371124268,
+ 0.2606346607208252,
+ 0.3424431085586548,
+ 0.4308129549026489,
+ 0.17987388372421265,
+ -0.18985053896903992,
+ -0.37196409702301025,
+ 0.3962700366973877,
+ 0.13300105929374695,
+ 0.25205570459365845,
+ -0.27207380533218384,
+ 0.3803340792655945,
+ -0.02816009521484375,
+ -0.11164728552103043,
+ 0.0064986348152160645,
+ -0.4583856761455536,
+ -0.06860411167144775,
+ -0.277138888835907,
+ -0.11985805630683899,
+ 0.2560787796974182,
+ 0.16492843627929688,
+ -0.13716065883636475,
+ -0.0594368577003479,
+ 0.0040129125118255615,
+ -0.09365283697843552,
+ -0.06528967618942261,
+ 0.3011012375354767,
+ -0.46554067730903625,
+ -0.47207140922546387,
+ -0.47524136304855347,
+ -0.23462361097335815,
+ 0.3531838059425354,
+ 0.17400550842285156,
+ -0.11393150687217712,
+ 0.2087329924106598,
+ 0.21310897171497345,
+ 0.2312893271446228,
+ 0.09840250015258789,
+ -0.006212085485458374,
+ 0.42145323753356934,
+ -0.05666108429431915,
+ -0.3041785657405853,
+ -0.3619382381439209,
+ 0.3531525433063507,
+ -0.14248377084732056,
+ -0.47637277841567993,
+ -0.3300105333328247,
+ 0.34031909704208374,
+ 0.1476881504058838,
+ -0.20058463513851166,
+ 0.31684792041778564,
+ 0.19599181413650513,
+ -0.14252173900604248,
+ -0.22550803422927856,
+ -0.07546544075012207,
+ -0.16124892234802246,
+ -0.43399232625961304,
+ -0.12915784120559692,
+ 0.1510123908519745,
+ 0.06019163131713867,
+ -0.2720388174057007,
+ -0.2823624610900879,
+ -0.24749815464019775,
+ 0.10922089219093323,
+ 0.2889748215675354,
+ -0.2121201455593109,
+ -0.2661500871181488,
+ -0.07166147232055664,
+ -0.09215009212493896,
+ -0.11946791410446167,
+ -0.14705432951450348,
+ 0.11452893912792206,
+ 0.4379866123199463,
+ 0.13464666903018951,
+ 0.15472814440727234,
+ -0.18882930278778076,
+ -0.15489071607589722,
+ -0.0010290443897247314,
+ -0.16450534760951996,
+ -0.03145897388458252,
+ -0.12373845279216766,
+ 0.02582821063697338,
+ 0.26132771372795105,
+ -0.09821522235870361,
+ -0.006322160363197327,
+ 0.26055294275283813,
+ -0.2656644582748413,
+ 0.19282907247543335,
+ 0.04016973078250885,
+ -0.027705609798431396,
+ 0.2497396469116211,
+ -0.08054736256599426,
+ 0.23144277930259705,
+ -0.07676327228546143,
+ -0.2365441918373108,
+ -0.20667578279972076,
+ -0.19487275183200836,
+ 0.2702012062072754,
+ 0.30574312806129456,
+ -0.4940389394760132,
+ 0.19793835282325745,
+ 0.18726462125778198,
+ -0.19441983103752136,
+ -0.15982544422149658,
+ 0.09473049640655518,
+ 0.23192238807678223,
+ -0.2516608238220215,
+ 0.01802182011306286,
+ 0.398636132478714,
+ 0.08500413596630096,
+ 0.18911069631576538,
+ 0.2934948205947876,
+ 0.11900316178798676,
+ 0.04204676300287247,
+ -0.36698395013809204,
+ 0.3110097050666809,
+ 0.28969764709472656,
+ 0.43683189153671265,
+ 0.06873080134391785,
+ 0.11007414013147354,
+ 0.16952498257160187,
+ 0.055064812302589417,
+ -0.14823126792907715,
+ -0.24198567867279053,
+ 0.017225950956344604,
+ -0.4426943063735962,
+ 0.3226522207260132,
+ -0.0886436328291893,
+ -0.012692593038082123,
+ 0.06841323524713516,
+ 0.09549766778945923,
+ -0.23095333576202393,
+ -0.234144389629364,
+ -0.2612258791923523,
+ 0.1063869446516037,
+ 0.022718161344528198,
+ 0.047695815563201904,
+ 0.016338348388671875,
+ 0.3039711117744446,
+ -0.4422087073326111,
+ -0.17177292704582214,
+ -0.25791996717453003,
+ 0.2483523190021515,
+ 0.4128603935241699,
+ 0.21735966205596924,
+ -0.4501335620880127,
+ 0.14630720019340515,
+ 0.4086104929447174,
+ 0.2707141637802124,
+ 0.11040087789297104,
+ -0.18653184175491333,
+ 0.12595203518867493,
+ -0.06344743818044662,
+ -0.47928422689437866,
+ 0.26207810640335083,
+ 0.0003631114959716797,
+ -0.09359049797058105,
+ 0.16079048812389374,
+ 0.05277252197265625,
+ 0.03860175609588623,
+ 0.2077419012784958,
+ 0.03745007514953613,
+ -0.24500781297683716,
+ 0.28072938323020935,
+ 0.03377421200275421,
+ 0.26930004358291626,
+ 0.4175655245780945,
+ -0.22148726880550385,
+ 0.14654305577278137,
+ 0.27131757140159607,
+ -0.17891964316368103,
+ 0.40780889987945557,
+ -0.26955172419548035,
+ 0.18877184391021729,
+ -0.24648064374923706,
+ -0.04150724411010742,
+ -0.3514179587364197,
+ -0.2787037193775177,
+ 0.22923439741134644,
+ 0.43625450134277344,
+ 0.09412917494773865,
+ 0.2736815810203552,
+ 0.06986290216445923,
+ -0.055667728185653687,
+ 0.4031050205230713,
+ 0.1416814774274826,
+ 0.147199809551239,
+ -0.07133981585502625,
+ 0.027988076210021973,
+ -0.10072463750839233,
+ 0.2521324157714844,
+ -0.17715686559677124,
+ 0.3446483016014099,
+ -0.33619099855422974,
+ -0.2760746479034424,
+ -0.04590502381324768,
+ -0.07784509658813477,
+ 0.1521252989768982,
+ -0.08196410536766052,
+ 0.14977198839187622,
+ 0.052989065647125244,
+ -0.19187581539154053,
+ 0.2532773017883301,
+ 0.42830389738082886,
+ -0.18365027010440826,
+ 0.1220807135105133,
+ -0.10770459473133087,
+ 0.02564382553100586,
+ -0.32156988978385925,
+ -0.059180594980716705,
+ 0.20238828659057617,
+ 0.160392165184021,
+ -0.23180389404296875,
+ -0.19163262844085693,
+ -0.2662162184715271,
+ -0.23080852627754211,
+ 0.1783353090286255,
+ -0.15236014127731323,
+ 0.23501116037368774,
+ 0.30382782220840454,
+ 0.16569599509239197,
+ -0.09623447060585022,
+ -0.26726797223091125,
+ 0.07924235612154007,
+ 0.28930389881134033,
+ 0.3652492165565491,
+ 0.12773770093917847,
+ -0.14381356537342072,
+ 0.11052721738815308,
+ 0.202700674533844,
+ 0.43649256229400635,
+ -0.02795308828353882,
+ -0.1925605833530426,
+ -0.09880848228931427,
+ 0.25209248065948486,
+ -0.23950296640396118,
+ 0.08192695677280426,
+ -0.15250247716903687,
+ -0.28337475657463074,
+ 0.17509794235229492,
+ -0.2818930447101593,
+ -0.26053285598754883,
+ 0.2222478687763214,
+ 0.05734148621559143,
+ 0.1638936996459961,
+ 0.14005833864212036,
+ -0.31231164932250977,
+ -0.04961800575256348,
+ -0.27827775478363037,
+ -0.011617876589298248,
+ -0.0022311508655548096,
+ -0.18853731453418732,
+ -0.2435172200202942,
+ 0.370056688785553,
+ 0.2816731929779053,
+ -0.39214348793029785,
+ 0.22370323538780212,
+ 0.26473793387413025,
+ 0.16478222608566284,
+ 0.2029682993888855,
+ 0.14389532804489136,
+ 0.0392560139298439,
+ -0.021850407123565674,
+ 0.24241125583648682,
+ -0.08289074897766113,
+ 0.18318620324134827,
+ 0.08279262483119965,
+ -0.15658751130104065,
+ 0.017159849405288696,
+ 0.22757819294929504,
+ -0.23095819354057312,
+ -0.4335210919380188,
+ -0.07765895128250122,
+ -0.1269364058971405,
+ 0.13214555382728577,
+ 0.19394952058792114,
+ -0.2721986174583435,
+ -0.01705077290534973,
+ -0.03099459409713745,
+ 0.10439518094062805,
+ 0.11555466800928116,
+ 0.06176787614822388,
+ -0.1900539994239807,
+ 0.23223727941513062,
+ -0.18332010507583618,
+ 0.003252089023590088,
+ 0.12240716814994812,
+ -0.47864723205566406,
+ 0.07890322804450989,
+ -0.09904250502586365,
+ -0.42624256014823914,
+ 0.1632102131843567,
+ 0.430430144071579,
+ -0.47155559062957764,
+ 0.3297918438911438,
+ -0.19506749510765076,
+ 0.25754067301750183,
+ 0.3121950030326843,
+ -0.16187751293182373,
+ 0.15397915244102478,
+ -0.22204481065273285,
+ -0.36759573221206665,
+ 0.37312567234039307,
+ 0.2971084415912628,
+ -0.2063010334968567,
+ -0.10490131378173828,
+ 0.19465219974517822,
+ -0.004586637020111084,
+ -0.3122602701187134,
+ -0.37991002202033997,
+ 0.3130011558532715,
+ -0.21554303169250488,
+ -0.38185393810272217,
+ -0.32059305906295776,
+ 0.2042740434408188,
+ -0.1428523063659668,
+ 0.090648353099823,
+ -0.017392456531524658,
+ -0.1088370531797409,
+ 0.29225969314575195,
+ 0.12001287937164307,
+ 0.30083897709846497,
+ 0.3954189121723175,
+ -0.11417827010154724,
+ 0.2843327522277832,
+ 0.2989708185195923,
+ -0.041447341442108154,
+ 0.22992315888404846,
+ 0.3607923090457916,
+ -0.05332095921039581,
+ -0.2054937481880188,
+ 0.0817907452583313,
+ -0.19214779138565063,
+ 0.17064052820205688,
+ -0.2783204913139343,
+ 0.004840224981307983,
+ 0.3131504952907562,
+ -0.06990387290716171,
+ -0.4192796051502228,
+ -0.008196145296096802,
+ 0.06535345315933228,
+ -0.14920756220817566,
+ -0.332236647605896,
+ -0.22520390152931213,
+ 0.01787477731704712,
+ -0.27018266916275024,
+ -0.03272250294685364,
+ 0.23591145873069763,
+ 0.18577700853347778,
+ 0.252059668302536,
+ 0.4281415343284607,
+ 0.34833580255508423,
+ -0.22247061133384705,
+ -0.2842656970024109,
+ -0.4601024389266968,
+ -0.19684690237045288,
+ 0.29175469279289246,
+ -0.18927840888500214,
+ 0.28536415100097656,
+ -0.1698756217956543,
+ -0.03535851836204529,
+ 0.21632379293441772,
+ 0.16686266660690308,
+ -0.2649111747741699,
+ 0.09065473079681396,
+ -0.2842123210430145,
+ -0.21816220879554749,
+ -0.03347096964716911,
+ 0.06430229544639587,
+ -0.04914504289627075,
+ -0.40775877237319946,
+ 0.4061046242713928,
+ 0.0015277266502380371,
+ -0.49610698223114014,
+ -0.26873084902763367,
+ -0.10790956020355225,
+ 0.23969018459320068,
+ -0.16730493307113647,
+ 0.2737633287906647,
+ 0.2158186137676239,
+ -0.04322710633277893,
+ 0.35420721769332886,
+ -0.1310015469789505,
+ 0.2628929615020752,
+ -0.07927632331848145,
+ -0.1634346842765808,
+ 0.30354416370391846,
+ -0.23194336891174316,
+ 0.04303000867366791,
+ -0.12292857468128204,
+ -0.07562339305877686,
+ -0.08149349689483643,
+ 0.16352578997612,
+ 0.06069666147232056,
+ -0.15507040917873383,
+ 0.24885612726211548,
+ -0.12363357841968536,
+ 0.08001917600631714,
+ -0.1876179575920105,
+ -0.11142617464065552,
+ 0.24198348820209503,
+ 0.11356598138809204,
+ 0.06586635112762451,
+ 0.07921645045280457,
+ 0.188886821269989,
+ 0.20388439297676086,
+ 0.27652788162231445,
+ -0.26688462495803833,
+ -0.3313153386116028,
+ -0.4349915683269501,
+ 0.055945366621017456,
+ -0.26118648052215576,
+ 0.18136179447174072,
+ 0.05659288167953491,
+ 0.07538360357284546,
+ -0.10420238226652145,
+ 0.011684954166412354,
+ 0.1469007432460785,
+ -0.26760250329971313,
+ -0.27466103434562683,
+ 0.011518806219100952,
+ 0.28025662899017334,
+ -0.04602794349193573,
+ -0.1617187261581421,
+ -0.006002247333526611,
+ 0.434640109539032,
+ 0.06991276144981384,
+ -0.22371543943881989,
+ 0.28652602434158325,
+ -0.12746846675872803,
+ 0.37311822175979614,
+ -0.12116770446300507,
+ -0.037429094314575195,
+ 0.24636083841323853,
+ 0.06848430633544922,
+ 0.320800244808197,
+ -0.24560099840164185,
+ -0.47974324226379395,
+ 0.1497005820274353,
+ -0.1216142475605011,
+ 0.0482543408870697,
+ -0.13167056441307068,
+ 0.26808851957321167,
+ -0.24239832162857056,
+ -0.24374645948410034,
+ 0.1653335690498352,
+ 0.24564719200134277,
+ -0.102389395236969,
+ 0.03147399425506592,
+ -0.35817810893058777,
+ -0.20795518159866333,
+ -0.24978554248809814,
+ 0.0736200213432312,
+ 0.1445382833480835,
+ -0.13888651132583618,
+ -0.044095948338508606,
+ 0.12498309463262558,
+ 0.33064210414886475,
+ -0.1751748025417328,
+ -0.24043059349060059,
+ -0.1748659610748291,
+ 0.10969328880310059,
+ 0.10121212899684906,
+ -0.2644564211368561,
+ 0.2200171947479248,
+ 0.28290069103240967,
+ 0.1110796332359314,
+ 0.4357525110244751,
+ -0.03873482346534729,
+ -0.37007591128349304,
+ -0.3473079204559326,
+ 0.24618124961853027,
+ -0.0766373872756958,
+ 0.15213629603385925,
+ 0.41645312309265137,
+ -0.19970674812793732,
+ -0.2998769283294678,
+ 0.42508628964424133,
+ -0.08188778162002563,
+ 0.054877281188964844,
+ -0.49163490533828735,
+ -0.4680352509021759,
+ 0.12934458255767822,
+ -0.04045906662940979,
+ -0.2553384304046631,
+ -0.014196991920471191,
+ -0.4393760859966278,
+ -0.39174318313598633,
+ 0.28728312253952026,
+ 0.04557892680168152,
+ -0.4614255130290985,
+ -0.20564907789230347,
+ -0.35356801748275757,
+ -0.12576836347579956,
+ 0.4294576942920685,
+ -0.4649355411529541,
+ 0.27411389350891113,
+ -0.2804161310195923,
+ 0.19705843925476074,
+ -0.23708122968673706,
+ -0.015884339809417725,
+ -0.23368065059185028,
+ 0.253653883934021,
+ -0.2620258033275604,
+ 0.1788915991783142,
+ 0.26657336950302124,
+ -0.31452566385269165,
+ 0.2203194499015808,
+ -0.05889177322387695,
+ 0.09741780161857605,
+ 0.43983644247055054,
+ -0.14750681817531586,
+ 0.37740039825439453,
+ -0.17870426177978516,
+ -0.2552078366279602,
+ 0.16383403539657593,
+ 0.25804847478866577,
+ 0.12598490715026855,
+ 0.2861664891242981,
+ 0.12303146719932556,
+ -0.3282449543476105,
+ -0.12435300648212433,
+ -0.39953744411468506,
+ -0.4848015606403351,
+ 0.17001062631607056,
+ 0.18983149528503418,
+ 0.27108514308929443,
+ 0.13641038537025452,
+ 0.14161145687103271,
+ -0.2048274129629135,
+ 0.08593866229057312,
+ 0.30250513553619385,
+ 0.06476578116416931,
+ -0.41805773973464966,
+ -0.13534362614154816,
+ -0.1538110375404358,
+ 0.19050294160842896,
+ 0.11167379468679428,
+ -0.11839741468429565,
+ 0.052856773138046265,
+ 0.2560652196407318,
+ -0.04213714599609375,
+ -0.19825568795204163,
+ 0.4386293292045593,
+ -0.06633845716714859,
+ 0.15628056228160858,
+ -0.47361189126968384,
+ -0.09046128392219543,
+ 0.0738271102309227,
+ -0.1816018521785736,
+ 0.22162091732025146,
+ -0.13634909689426422,
+ 0.13998568058013916,
+ -0.221883624792099,
+ 0.21780423820018768,
+ 0.2905489206314087,
+ -0.44682618975639343,
+ 0.07538816332817078,
+ -0.16835486888885498,
+ 0.12624332308769226,
+ -0.2698381543159485,
+ -0.3899979293346405,
+ -0.2721674144268036,
+ 0.431957870721817,
+ -0.17994391918182373,
+ 0.17128527164459229,
+ 0.43744122982025146,
+ 0.161431685090065,
+ -0.16888427734375,
+ -0.40441960096359253,
+ -0.22551625967025757,
+ 0.0585470050573349,
+ 0.26052331924438477,
+ -0.13694065809249878,
+ -0.4852946400642395,
+ 0.04976588487625122,
+ -0.1722821593284607,
+ 0.2041347622871399,
+ 0.3055117130279541,
+ -0.12577933073043823,
+ -0.26075637340545654,
+ -0.1771162748336792,
+ 0.2831730246543884,
+ 0.004841268062591553,
+ 0.3082762658596039,
+ -0.19474589824676514,
+ 0.445626437664032,
+ 0.07996850460767746,
+ -0.10851135849952698,
+ -0.1732282042503357,
+ -0.014079272747039795,
+ -0.11093994975090027,
+ -0.2582680583000183,
+ -0.15961089730262756,
+ -0.29800930619239807,
+ 0.19408410787582397,
+ -0.46613550186157227,
+ -0.24711471796035767,
+ -0.0512080192565918,
+ 0.2585445046424866,
+ 0.39119452238082886,
+ 0.058452099561691284,
+ -0.2747143805027008,
+ -0.20605939626693726,
+ 0.21066710352897644,
+ 0.14566481113433838,
+ 0.03587987273931503,
+ 0.1993948221206665,
+ 0.18560051918029785,
+ -0.23450270295143127,
+ 0.3271764814853668,
+ 0.4055841565132141,
+ -0.13338683545589447,
+ -0.12777793407440186,
+ -0.03590855002403259,
+ 0.17623990774154663,
+ -0.00008651614189147949,
+ -0.24950772523880005,
+ -0.1639922857284546,
+ 0.31215018033981323,
+ -0.0009739939123392105,
+ -0.03779876232147217,
+ 0.27773720026016235,
+ 0.185750812292099,
+ -0.0408935546875,
+ 0.008390158414840698,
+ -0.192735493183136,
+ -0.06606251001358032,
+ -0.07858562469482422,
+ -0.16197806596755981,
+ -0.29613104462623596,
+ 0.020484713837504387,
+ 0.4219837188720703,
+ 0.08024384081363678,
+ 0.06500214338302612,
+ 0.27896958589553833,
+ -0.22344130277633667,
+ -0.019530728459358215,
+ -0.25101274251937866,
+ -0.30304771661758423,
+ -0.01959829032421112,
+ 0.09643447399139404,
+ 0.13962435722351074,
+ 0.2867441773414612,
+ 0.19207468628883362,
+ 0.17168639600276947,
+ -0.06704793870449066,
+ 0.1946895718574524,
+ 0.268332839012146,
+ 0.2927809953689575,
+ 0.01699642837047577,
+ -0.4231932759284973,
+ 0.13655328750610352,
+ -0.10851877927780151,
+ -0.23669657111167908,
+ -0.057755231857299805,
+ -0.029656291007995605,
+ -0.45656126737594604,
+ 0.2501992881298065,
+ 0.2568897306919098,
+ 0.07818752527236938,
+ 0.29606038331985474,
+ 0.2775689959526062,
+ 0.2968115508556366,
+ -0.16777941584587097,
+ -0.06087812781333923,
+ 0.43861308693885803,
+ -0.21285134553909302,
+ 0.3924185037612915,
+ -0.1584879606962204,
+ 0.32215821743011475,
+ 0.18653631210327148,
+ -0.03516744077205658,
+ -0.2040576934814453,
+ -0.05740463733673096,
+ -0.2280978113412857,
+ -0.15257646143436432,
+ 0.23354512453079224,
+ 0.14224407076835632,
+ -0.06123459339141846,
+ -0.13474714756011963,
+ -0.38854342699050903,
+ -0.2612813711166382,
+ -0.09139618277549744,
+ 0.27922242879867554,
+ 0.14478625357151031,
+ -0.11643856763839722,
+ 0.37971654534339905,
+ -0.4674341380596161,
+ -0.25227880477905273,
+ -0.0670020580291748,
+ -0.2824871242046356,
+ -0.06023488938808441,
+ 0.0692930817604065,
+ -0.025055520236492157,
+ 0.2043112814426422,
+ 0.1856512427330017,
+ 0.03717019781470299,
+ -0.20288407802581787,
+ 0.2782996892929077,
+ -0.22660130262374878,
+ 0.3896193504333496,
+ -0.08573156595230103,
+ -0.4517415761947632,
+ 0.39093267917633057,
+ 0.09777462482452393,
+ -0.3525865077972412,
+ -0.14876627922058105,
+ 0.28511592745780945,
+ -0.32432401180267334,
+ -0.3351353704929352,
+ -0.2579018473625183,
+ -0.10720294713973999,
+ 0.37930408120155334,
+ 0.25645819306373596,
+ -0.27323341369628906,
+ -0.10336902737617493,
+ -0.29884687066078186,
+ 0.05010077357292175,
+ -0.23070919513702393,
+ 0.024915069341659546,
+ -0.03011491894721985,
+ 0.1510474681854248,
+ -0.46874433755874634,
+ -0.5011667013168335,
+ -0.0912814736366272,
+ 0.2568436861038208,
+ 0.268537700176239,
+ 0.00003762543201446533,
+ 0.19781452417373657,
+ -0.48983997106552124,
+ -0.48965024948120117,
+ -0.3333809971809387,
+ 0.18929502367973328,
+ -0.3472425043582916,
+ 0.08686947077512741,
+ 0.050462424755096436,
+ -0.12472228705883026,
+ 0.1893765926361084,
+ -0.006299138069152832,
+ 0.23767027258872986,
+ -0.20877951383590698,
+ -0.153457909822464,
+ -0.47034382820129395,
+ -0.2714892625808716,
+ -0.15519671142101288,
+ 0.25158584117889404,
+ 0.2709087133407593,
+ -0.2155264914035797,
+ 0.011279597878456116,
+ -0.12778232991695404,
+ 0.27707207202911377,
+ 0.43140479922294617,
+ -0.21908020973205566,
+ 0.4274923801422119,
+ 0.044821202754974365,
+ 0.10340713709592819,
+ -0.26110684871673584,
+ -0.3744007349014282,
+ 0.4301656484603882,
+ -0.1633605808019638,
+ -0.18103596568107605,
+ -0.3192264139652252,
+ 0.28422772884368896,
+ 0.22668814659118652,
+ -0.4032057821750641,
+ -0.06340479850769043,
+ -0.4053545594215393,
+ 0.18644513189792633,
+ 0.10080111026763916,
+ 0.27437761425971985,
+ 0.255842387676239,
+ -0.16140317916870117,
+ 0.131151020526886,
+ 0.31963270902633667,
+ -0.262704461812973,
+ -0.022635817527770996,
+ -0.3970423638820648,
+ 0.29499685764312744,
+ 0.11657406389713287,
+ -0.18599826097488403,
+ 0.2941175103187561,
+ -0.2216484546661377,
+ -0.2164057493209839,
+ -0.15877312421798706,
+ 0.20770564675331116,
+ 0.025989681482315063,
+ -0.13055521249771118,
+ 0.11896950006484985,
+ -0.13325434923171997,
+ 0.1252506673336029,
+ -0.2719636559486389,
+ 0.43201518058776855,
+ -0.2538563907146454,
+ 0.19069308042526245,
+ 0.20663365721702576,
+ 0.2077765315771103,
+ -0.2490168809890747,
+ 0.27939456701278687,
+ 0.22815734148025513,
+ -0.11955118179321289,
+ -0.2328009009361267,
+ -0.37082958221435547,
+ 0.07216621935367584,
+ 0.07975399494171143,
+ -0.20342224836349487,
+ -0.029256999492645264,
+ 0.04950076341629028,
+ 0.22179220616817474,
+ 0.007628947496414185,
+ 0.26296088099479675,
+ -0.25647443532943726,
+ 0.39088743925094604,
+ 0.22330868244171143,
+ 0.02030712366104126,
+ -0.056091442704200745,
+ 0.05254971981048584,
+ 0.15374723076820374,
+ 0.053758397698402405,
+ -0.09839519113302231,
+ -0.1407959908246994,
+ -0.06308430433273315,
+ 0.09429232776165009,
+ 0.15916121006011963,
+ 0.22713643312454224,
+ -0.05678504705429077,
+ 0.17100933194160461,
+ 0.04632577300071716,
+ -0.47887319326400757,
+ 0.2890204191207886,
+ -0.0578954815864563,
+ 0.0028617680072784424,
+ -0.16691896319389343,
+ -0.27712953090667725,
+ -0.15698456764221191,
+ 0.36193153262138367,
+ -0.13236042857170105,
+ 0.03781643509864807,
+ 0.38403740525245667,
+ 0.09923774003982544,
+ 0.23008093237876892,
+ 0.22494947910308838,
+ -0.1708061397075653,
+ -0.2175888866186142,
+ 0.42928168177604675,
+ -0.10440890491008759,
+ -0.16814205050468445,
+ 0.2795560956001282,
+ -0.15323412418365479,
+ -0.1375126987695694,
+ -0.22713977098464966,
+ -0.09778408706188202,
+ -0.4807612895965576,
+ 0.2712234854698181,
+ 0.22847023606300354,
+ -0.3585149049758911,
+ 0.24692665040493011,
+ -0.20600584149360657,
+ -0.2288927137851715,
+ 0.18575909733772278,
+ 0.3285123407840729,
+ 0.14309659600257874,
+ 0.4126935601234436,
+ -0.03957340121269226,
+ -0.14438152313232422,
+ -0.03790813684463501,
+ -0.24424618482589722,
+ -0.22539053857326508,
+ -0.1310451328754425,
+ 0.22522950172424316,
+ 0.026126757264137268,
+ -0.20298248529434204,
+ 0.24910950660705566,
+ -0.16014277935028076,
+ 0.2915397882461548,
+ 0.14586175978183746,
+ -0.40121224522590637,
+ 0.058826595544815063,
+ 0.054893702268600464,
+ -0.4229050874710083,
+ 0.3994292616844177,
+ 0.0709591805934906,
+ 0.2991067171096802,
+ -0.1178242564201355,
+ 0.26880067586898804,
+ 0.2009296864271164,
+ 0.16742700338363647,
+ -0.24931594729423523,
+ 0.03297640383243561,
+ 0.25981736183166504,
+ 0.18551617860794067,
+ 0.22554157674312592,
+ 0.19609278440475464,
+ 0.43862324953079224,
+ 0.19527658820152283,
+ 0.3289341628551483,
+ 0.2616775631904602,
+ 0.014492899179458618,
+ 0.3341277837753296,
+ 0.15720251202583313,
+ 0.2544929087162018,
+ -0.08494382351636887,
+ -0.08582192659378052,
+ -0.01685994863510132,
+ -0.1767095923423767,
+ 0.0937451422214508,
+ 0.28555524349212646,
+ -0.15879178047180176,
+ -0.10622802376747131,
+ 0.22782468795776367,
+ -0.14772243797779083,
+ 0.32023364305496216,
+ -0.2685304284095764,
+ -0.33018460869789124,
+ 0.06884533166885376,
+ -0.11646372079849243,
+ 0.08614866435527802,
+ 0.31603002548217773,
+ 0.0920238196849823,
+ -0.16655051708221436,
+ -0.0771777331829071,
+ 0.39376378059387207,
+ -0.14491254091262817,
+ -0.22649452090263367,
+ -0.07012930512428284,
+ -0.18269050121307373,
+ -0.44600820541381836,
+ -0.08367514610290527,
+ -0.09693129360675812,
+ 0.15428107976913452,
+ 0.181894451379776,
+ -0.28399085998535156,
+ 0.2791244387626648,
+ -0.11046329885721207,
+ 0.10269904136657715,
+ -0.15736332535743713,
+ 0.25381171703338623,
+ -0.2562631368637085,
+ -0.09924149513244629,
+ 0.10522349923849106,
+ 0.10327374935150146,
+ -0.3797566890716553,
+ 0.2402498573064804,
+ 0.2575482428073883,
+ -0.29405760765075684,
+ -0.17734099924564362,
+ 0.11843988299369812,
+ 0.38649964332580566,
+ 0.11886802315711975,
+ 0.25132548809051514,
+ 0.44746336340904236,
+ -0.1483505815267563,
+ 0.20276813209056854,
+ 0.19334715604782104,
+ 0.13358035683631897,
+ 0.35788553953170776,
+ -0.27110427618026733,
+ 0.050934746861457825,
+ -0.0008652210235595703,
+ 0.4281136989593506,
+ -0.011467888951301575,
+ 0.24483734369277954,
+ -0.1899755895137787,
+ -0.2376740574836731,
+ -0.45073843002319336,
+ 0.3110142946243286,
+ 0.1321590542793274,
+ -0.39770376682281494,
+ 0.07116088271141052,
+ -0.35670873522758484,
+ 0.25193461775779724,
+ -0.41629505157470703,
+ 0.2936820983886719,
+ -0.1230316162109375,
+ -0.2890291213989258,
+ 0.11423347890377045,
+ -0.08087766170501709,
+ 0.13684111833572388,
+ -0.15049070119857788,
+ -0.25340020656585693,
+ -0.19496437907218933,
+ -0.007168233394622803,
+ -0.11381291598081589,
+ -0.3015843629837036,
+ -0.051778435707092285,
+ 0.300537109375,
+ 0.20121073722839355,
+ 0.2392040193080902,
+ -0.05560234934091568,
+ -0.00855216383934021,
+ -0.21120527386665344,
+ 0.15396176278591156,
+ 0.08039374649524689,
+ 0.028217941522598267,
+ -0.14377422630786896,
+ 0.18326111137866974,
+ -0.10608828067779541,
+ -0.11527314782142639,
+ -0.07008352875709534,
+ -0.20804110169410706,
+ 0.4324328303337097,
+ 0.1354115605354309,
+ 0.017848825082182884,
+ 0.05051565170288086,
+ 0.2718762159347534,
+ 0.14896288514137268,
+ -0.2141999751329422,
+ -0.2610038220882416,
+ -0.26073384284973145,
+ 0.08975450694561005,
+ 0.15677030384540558,
+ 0.08526629209518433,
+ 0.17382237315177917,
+ 0.41518113017082214,
+ 0.4152597486972809,
+ -0.24712717533111572,
+ 0.02110329270362854,
+ -0.20081675052642822,
+ -0.2034848928451538,
+ 0.12927678227424622,
+ -0.2520195245742798,
+ -0.2690984010696411,
+ 0.17044883966445923,
+ -0.013380587100982666,
+ -0.08349350094795227,
+ -0.3257489502429962,
+ -0.4775434732437134,
+ 0.17254921793937683,
+ -0.07222545146942139,
+ 0.007281772792339325,
+ 0.34174978733062744,
+ 0.24084481596946716,
+ -0.34566882252693176,
+ 0.3045157790184021,
+ 0.39559006690979004,
+ -0.38280463218688965,
+ 0.036679305136203766,
+ -0.20103681087493896,
+ 0.38684046268463135,
+ -0.2443898320198059,
+ 0.2552987337112427,
+ 0.030621767044067383,
+ 0.29796043038368225,
+ 0.08256624639034271,
+ -0.24775725603103638,
+ -0.3901571035385132,
+ -0.37652909755706787,
+ -0.22961431741714478,
+ 0.07016104459762573,
+ 0.25231003761291504,
+ -0.28377166390419006,
+ -0.4574644863605499,
+ 0.16741672158241272,
+ -0.1147194504737854,
+ -0.1646745800971985,
+ 0.2955009639263153,
+ 0.08727565407752991,
+ 0.18472421169281006,
+ -0.07634758949279785,
+ -0.42783796787261963,
+ -0.021752476692199707,
+ -0.04956340789794922,
+ 0.10312911868095398,
+ 0.06111668050289154,
+ -0.24424725770950317,
+ -0.19240424036979675,
+ 0.050655901432037354,
+ -0.15535235404968262,
+ -0.4535456895828247,
+ 0.03185540437698364,
+ 0.29911676049232483,
+ 0.17033809423446655,
+ -0.286196768283844,
+ -0.0684216320514679,
+ -0.002575993537902832,
+ 0.4404408931732178,
+ -0.44771963357925415,
+ 0.017047010362148285,
+ -0.12135690450668335,
+ 0.2887575328350067,
+ 0.12009347975254059,
+ 0.14220762252807617,
+ 0.059301525354385376,
+ 0.08299896121025085,
+ 0.18519306182861328,
+ 0.17774152755737305,
+ -0.4384765923023224,
+ -0.03887826204299927,
+ 0.14305347204208374,
+ 0.281707763671875,
+ 0.30659031867980957,
+ -0.17927929759025574,
+ -0.09585505723953247,
+ 0.2094823718070984,
+ 0.42268556356430054,
+ 0.41222062706947327,
+ -0.25882378220558167,
+ -0.012315914034843445,
+ -0.3835497796535492,
+ -0.19396263360977173,
+ 0.2332727313041687,
+ -0.24255484342575073,
+ -0.08471202850341797,
+ 0.0896725207567215,
+ -0.09533824771642685,
+ 0.3583921790122986,
+ -0.1999315619468689,
+ 0.16647297143936157,
+ -0.27251294255256653,
+ 0.05079547315835953,
+ -0.4116995930671692,
+ 0.28413769602775574,
+ -0.49684813618659973,
+ 0.13495230674743652,
+ 0.2777244746685028,
+ 0.23489829897880554,
+ -0.11565732955932617,
+ -0.11797712743282318,
+ -0.10773943364620209,
+ 0.24760887026786804,
+ 0.1930142194032669,
+ -0.0002693980932235718,
+ 0.2877802848815918,
+ -0.05594080686569214,
+ 0.1801683008670807,
+ 0.07680091261863708,
+ 0.30916935205459595,
+ 0.09980034828186035,
+ 0.12514585256576538,
+ -0.22807049751281738,
+ 0.22730427980422974,
+ -0.3948381841182709,
+ 0.19548353552818298,
+ 0.2855389714241028,
+ 0.3221844732761383,
+ -0.23540586233139038,
+ -0.10564783215522766,
+ 0.23437702655792236,
+ 0.34368860721588135,
+ -0.263064980506897,
+ -0.17218898236751556,
+ 0.004938662052154541,
+ -0.28292518854141235,
+ 0.16935229301452637,
+ 0.1768336296081543,
+ 0.22067278623580933,
+ -0.31226980686187744,
+ -0.19903093576431274,
+ -0.22688895463943481,
+ 0.024169474840164185,
+ 0.3022902011871338,
+ 0.14769670367240906,
+ 0.1000964343547821,
+ -0.04987591505050659,
+ -0.21923065185546875,
+ 0.016240283846855164,
+ 0.1531623899936676,
+ -0.21939969062805176,
+ 0.37737125158309937,
+ 0.07926419377326965,
+ -0.01876021921634674,
+ -0.1288238912820816,
+ -0.28033554553985596,
+ 0.23669494688510895,
+ 0.086684450507164,
+ 0.18253132700920105,
+ -0.33053097128868103,
+ 0.0909513533115387,
+ 0.24940794706344604,
+ 0.03356418013572693,
+ -0.44213607907295227,
+ -0.1396566927433014,
+ 0.03180539608001709,
+ 0.28596192598342896,
+ 0.14876306056976318,
+ 0.2073209285736084,
+ 0.401839941740036,
+ 0.05444371700286865,
+ 0.11048358678817749,
+ 0.07061684876680374,
+ 0.219795823097229,
+ -0.047753095626831055,
+ 0.2835080623626709,
+ -0.2232975959777832,
+ 0.030075550079345703,
+ -0.021771252155303955,
+ 0.034053802490234375,
+ 0.13057833909988403,
+ 0.1020001694560051,
+ 0.011462271213531494,
+ -0.18802927434444427,
+ -0.20734190940856934,
+ 0.3122550845146179,
+ -0.05234372615814209,
+ -0.08444851636886597,
+ 0.24267446994781494,
+ 0.42829668521881104,
+ 0.2612048387527466,
+ -0.14330238103866577,
+ -0.22971022129058838,
+ -0.061162419617176056,
+ 0.3039418160915375,
+ 0.13444286584854126,
+ 0.17860949039459229,
+ -0.01585298776626587,
+ -0.06870472431182861,
+ 0.43472814559936523,
+ -0.2482917606830597,
+ -0.19269537925720215,
+ -0.46407389640808105,
+ -0.05823889374732971,
+ 0.006865948438644409,
+ 0.11494436115026474,
+ 0.15364162623882294,
+ 0.4102644622325897,
+ -0.13777631521224976,
+ 0.2640962600708008,
+ -0.18397453427314758,
+ -0.26361650228500366,
+ 0.19460710883140564,
+ -0.12973636388778687,
+ 0.2641725540161133,
+ -0.17153167724609375,
+ -0.05721879005432129,
+ -0.4623674154281616,
+ 0.11005076766014099,
+ -0.21403640508651733,
+ 0.22874373197555542,
+ -0.16726505756378174,
+ -0.16078051924705505,
+ 0.06438186764717102,
+ 0.21648269891738892,
+ -0.26227235794067383,
+ -0.25327837467193604,
+ 0.2754054665565491,
+ 0.05693291127681732,
+ -0.08042657375335693,
+ -0.12166225910186768,
+ -0.07860313355922699,
+ -0.07183890789747238,
+ -0.11766403913497925,
+ 0.027035832405090332,
+ -0.09600278735160828,
+ -0.4098360538482666,
+ 0.34618300199508667,
+ -0.49226123094558716,
+ 0.15646828711032867,
+ 0.4079042375087738,
+ 0.0918547511100769,
+ 0.07376605272293091,
+ -0.26089924573898315,
+ 0.42911309003829956,
+ 0.06764909625053406,
+ -0.023110847920179367,
+ 0.30856791138648987,
+ 0.08498001098632812,
+ -0.19490009546279907,
+ 0.2375202476978302,
+ -0.2906227111816406,
+ 0.2482682764530182,
+ -0.09425300359725952,
+ 0.03506341576576233,
+ -0.18081915378570557,
+ 0.3514353036880493,
+ 0.08527949452400208,
+ -0.05394989252090454,
+ -0.04561527818441391,
+ -0.2809155285358429,
+ -0.2249641716480255,
+ -0.11081534624099731,
+ 0.3139985203742981,
+ 0.18591678142547607,
+ -0.0835658609867096,
+ 0.42072221636772156,
+ 0.3631855249404907,
+ -0.022078275680541992,
+ -0.24010050296783447,
+ 0.4088292419910431,
+ -0.23830324411392212,
+ -0.27188563346862793,
+ 0.4321388006210327,
+ 0.06815491616725922,
+ -0.4632667601108551,
+ 0.20603932440280914,
+ 0.2526741623878479,
+ -0.17242524027824402,
+ -0.2635851502418518,
+ 0.06293816864490509,
+ -0.18215689063072205,
+ 0.27049627900123596,
+ 0.20313841104507446,
+ -0.33840394020080566,
+ -0.201166033744812,
+ -0.12153665721416473,
+ 0.16089576482772827,
+ -0.04112489894032478,
+ -0.011853575706481934,
+ 0.2125147581100464,
+ 0.18197734653949738,
+ 0.18079304695129395,
+ -0.46028202772140503,
+ -0.08815163373947144,
+ -0.04078035056591034,
+ 0.15880094468593597,
+ 0.05884939432144165,
+ -0.1772773265838623,
+ -0.4740281105041504,
+ -0.2842869460582733,
+ -0.3307077884674072,
+ -0.3534312844276428,
+ -0.06600168347358704,
+ 0.11582386493682861,
+ -0.4556872248649597,
+ -0.2283189594745636,
+ -0.12922269105911255,
+ 0.33232438564300537,
+ -0.47384315729141235,
+ 0.17492929100990295,
+ -0.1352820098400116,
+ 0.17206740379333496,
+ -0.12528009712696075,
+ 0.014767065644264221,
+ 0.11491133272647858,
+ 0.1732005774974823,
+ -0.2734198570251465,
+ 0.13479191064834595,
+ 0.12771081924438477,
+ -0.19010652601718903,
+ 0.11434642970561981,
+ 0.41808316111564636,
+ -0.17328259348869324,
+ -0.18026986718177795,
+ 0.18300816416740417,
+ -0.20743705332279205,
+ 0.4362040162086487,
+ 0.19401630759239197,
+ 0.14955377578735352,
+ 0.05963004380464554,
+ -0.2341330647468567,
+ -0.20535139739513397,
+ 0.370890736579895,
+ 0.4410404562950134,
+ -0.21491098403930664,
+ 0.2915743589401245,
+ 0.028042078018188477,
+ -0.4373096823692322,
+ -0.10193872451782227,
+ -0.1398642361164093,
+ -0.17587411403656006,
+ 0.17786666750907898,
+ -0.2841113805770874,
+ 0.20160764455795288,
+ 0.21819913387298584,
+ 0.061470940709114075,
+ 0.21103328466415405,
+ -0.2119455635547638,
+ 0.19524046778678894,
+ -0.20643188059329987,
+ 0.14900320768356323,
+ -0.42036527395248413,
+ 0.1305721402168274,
+ 0.12655261158943176,
+ -0.2700347304344177,
+ 0.19250957667827606,
+ 0.23780089616775513,
+ -0.05084899067878723,
+ -0.002315014600753784,
+ 0.17319747805595398,
+ -0.35754597187042236,
+ 0.011353254318237305,
+ -0.058100998401641846,
+ 0.19425638020038605,
+ 0.3617664575576782,
+ 0.4293946623802185,
+ -0.007706344127655029,
+ -0.3531792461872101,
+ -0.12592729926109314,
+ -0.27120864391326904,
+ 0.42643553018569946,
+ 0.13898253440856934,
+ -0.4609394669532776,
+ -0.2707761526107788,
+ -0.4402768909931183,
+ -0.2849319577217102,
+ -0.2586604952812195,
+ 0.014389336109161377,
+ -0.1086762398481369,
+ -0.2596319019794464,
+ 0.0897858515381813,
+ 0.12357926368713379,
+ -0.4689314365386963,
+ 0.09507852792739868,
+ 0.31660810112953186,
+ -0.0810215175151825,
+ -0.18807941675186157,
+ -0.038972437381744385,
+ -0.16401243209838867,
+ -0.2248970866203308,
+ -0.04754161834716797,
+ 0.32052797079086304,
+ -0.33515387773513794,
+ 0.22775450348854065,
+ -0.09563913196325302,
+ 0.43451744318008423,
+ -0.026448845863342285,
+ 0.14345088601112366,
+ 0.02727159857749939,
+ 0.21302369236946106,
+ 0.2520641088485718,
+ -0.06027916818857193,
+ -0.14273613691329956,
+ 0.05603955686092377,
+ -0.22440433502197266,
+ 0.4134577512741089,
+ -0.41840237379074097,
+ -0.1686323881149292,
+ 0.08242429792881012,
+ 0.14855211973190308,
+ 0.017034970223903656,
+ -0.3220592737197876,
+ -0.06471773982048035,
+ -0.14176788926124573,
+ 0.0027942657470703125,
+ 0.2112751603126526,
+ 0.313571035861969,
+ 0.22428059577941895,
+ 0.04308639466762543,
+ -0.06335942447185516,
+ 0.11381798982620239,
+ 0.2851506173610687,
+ -0.08845026791095734,
+ -0.35351285338401794,
+ -0.010430781170725822,
+ 0.4088478088378906,
+ 0.37269020080566406,
+ 0.3031996488571167,
+ 0.013585432432591915,
+ 0.355855792760849,
+ 0.13562467694282532,
+ 0.05968046188354492,
+ 0.43636178970336914,
+ -0.33063966035842896,
+ -0.18486356735229492,
+ -0.0013671815395355225,
+ 0.12757062911987305,
+ 0.08090949058532715,
+ 0.09744536876678467,
+ 0.33043044805526733,
+ -0.16525423526763916,
+ 0.07498061656951904,
+ 0.18030571937561035,
+ -0.07566869258880615,
+ 0.18900248408317566,
+ 0.16989430785179138,
+ 0.29490014910697937,
+ 0.24135801196098328,
+ 0.31271886825561523,
+ -0.1703081727027893,
+ 0.20062805712223053,
+ -0.055696114897727966,
+ 0.3700493276119232,
+ 0.11857232451438904,
+ 0.19998203217983246,
+ 0.10151898860931396,
+ -0.0562933087348938,
+ 0.31579694151878357,
+ 0.03843662142753601,
+ -0.36635899543762207,
+ 0.40921831130981445,
+ 0.37989187240600586,
+ -0.22365432977676392,
+ -0.24797841906547546,
+ 0.11430459469556808,
+ 0.1708507388830185,
+ 0.31905272603034973,
+ -0.4041963815689087,
+ 0.11439311504364014,
+ 0.07774269580841064,
+ 0.011344566941261292,
+ -0.17423339188098907,
+ 0.1499718427658081,
+ -0.21657270193099976,
+ -0.2724224030971527,
+ 0.23025882244110107,
+ -0.27737200260162354,
+ 0.01563727855682373,
+ 0.31630370020866394,
+ 0.003925427794456482,
+ -0.022861212491989136,
+ -0.16801080107688904,
+ 0.26996445655822754,
+ -0.4761377274990082,
+ -0.0718643069267273,
+ 0.06197327375411987,
+ 0.02263122797012329,
+ 0.22480061650276184,
+ -0.2622475028038025,
+ -0.2033940851688385,
+ -0.3972000777721405,
+ 0.28111329674720764,
+ 0.031423091888427734,
+ -0.08661748468875885,
+ 0.3757038414478302,
+ 0.35571712255477905,
+ -0.4652712643146515,
+ -0.20158731937408447,
+ 0.27113452553749084,
+ -0.2248934507369995,
+ -0.27101659774780273,
+ 0.26838505268096924,
+ -0.4076652526855469,
+ -0.27546870708465576,
+ 0.10263976454734802,
+ -0.2953779101371765,
+ -0.4068266749382019,
+ -0.15729586780071259,
+ 0.22141695022583008,
+ -0.15391702950000763,
+ 0.07316216826438904,
+ 0.1346307247877121,
+ 0.20496338605880737,
+ 0.23811927437782288,
+ 0.19077906012535095,
+ 0.26919835805892944,
+ 0.14838364720344543,
+ 0.053133249282836914,
+ -0.17556911706924438,
+ 0.16221076250076294,
+ -0.1941876858472824,
+ -0.26146945357322693,
+ 0.026477500796318054,
+ -0.4707488417625427,
+ 0.2639393210411072,
+ 0.4314156174659729,
+ -0.1709054708480835,
+ 0.2972627282142639,
+ -0.05521559715270996,
+ -0.21772485971450806,
+ -0.2233535647392273,
+ -0.11034375429153442,
+ -0.3678198754787445,
+ 0.24592378735542297,
+ 0.2952183485031128,
+ -0.03527442738413811,
+ -0.1638970971107483,
+ 0.14902156591415405,
+ -0.0316624753177166,
+ 0.25760847330093384,
+ -0.33200758695602417,
+ 0.1766013503074646,
+ -0.11720630526542664,
+ 0.00040346384048461914,
+ -0.17128616571426392,
+ 0.04005575180053711,
+ -0.05393856763839722,
+ 0.17186453938484192,
+ 0.11452831327915192,
+ 0.2634042799472809,
+ 0.271504670381546,
+ -0.26390910148620605,
+ -0.15528303384780884,
+ 0.03781026601791382,
+ 0.37606385350227356,
+ 0.07213052362203598,
+ 0.10420398414134979,
+ -0.015514045022428036,
+ -0.0733899474143982,
+ 0.08900671452283859,
+ 0.0733647346496582,
+ 0.27585354447364807,
+ 0.051572322845458984,
+ 0.26188522577285767,
+ 0.2758150100708008,
+ 0.319633424282074,
+ -0.3120397925376892,
+ -0.47988834977149963,
+ 0.3165457248687744,
+ -0.02573680877685547,
+ 0.07113973796367645,
+ 0.09103420376777649,
+ -0.06669473648071289,
+ 0.1003119945526123,
+ 0.18454062938690186,
+ -0.30879759788513184,
+ -0.008278101682662964,
+ 0.059723079204559326,
+ -0.06214326620101929,
+ -0.2419191300868988,
+ -0.026389479637145996,
+ 0.0808112621307373,
+ 0.08109033107757568,
+ -0.14977487921714783,
+ -0.27904680371284485,
+ -0.09990157186985016,
+ -0.1821899116039276,
+ 0.26524466276168823,
+ 0.02821158803999424,
+ 0.22392986714839935,
+ 0.04697737097740173,
+ -0.26619479060173035,
+ -0.1055791974067688,
+ -0.4466365575790405,
+ 0.19813260436058044,
+ -0.07052774727344513,
+ -0.09163838624954224,
+ 0.1282825767993927,
+ 0.1976747065782547,
+ 0.0628785490989685,
+ 0.3795490264892578,
+ -0.09315434098243713,
+ 0.21547263860702515,
+ 0.195043683052063,
+ 0.27796047925949097,
+ 0.06425595283508301,
+ -0.07752527296543121,
+ -0.05316825211048126,
+ -0.17511974275112152,
+ 0.2860175669193268,
+ 0.12032115459442139,
+ 0.3183198571205139,
+ 0.29980260133743286,
+ -0.17763707041740417,
+ -0.17637476325035095,
+ -0.23758068680763245,
+ 0.4202609658241272,
+ 0.16083796322345734,
+ 0.1848021149635315,
+ 0.07043039798736572,
+ 0.2779417634010315,
+ 0.2248152196407318,
+ 0.35919302701950073,
+ -0.3832954168319702,
+ -0.03480944037437439,
+ -0.17299912869930267,
+ 0.2600407302379608,
+ 0.18604877591133118,
+ 0.10295048356056213,
+ -0.08735537528991699,
+ -0.25816357135772705,
+ -0.18746501207351685,
+ 0.2743052840232849,
+ -0.2305375039577484,
+ 0.16333019733428955,
+ 0.21259650588035583,
+ 0.4308408498764038,
+ 0.14749285578727722,
+ 0.25828951597213745,
+ 0.3010018467903137,
+ 0.27200645208358765,
+ 0.1489715576171875,
+ -0.2650858759880066,
+ -0.1380699872970581,
+ -0.1612335592508316,
+ -0.2358866035938263,
+ -0.036540210247039795,
+ 0.2800122797489166,
+ -0.034900546073913574,
+ 0.2742852568626404,
+ 0.11759394407272339,
+ -0.23216432332992554,
+ -0.1284325271844864,
+ -0.09626772999763489,
+ 0.3016235828399658,
+ -0.20779427886009216,
+ -0.2803839445114136,
+ -0.18862447142601013,
+ 0.03936609625816345,
+ -0.13242721557617188,
+ 0.09131331741809845,
+ -0.2840474545955658,
+ -0.15037836134433746,
+ -0.26017335057258606,
+ -0.2483503818511963,
+ -0.01254957914352417,
+ -0.4496091604232788,
+ 0.20716409385204315,
+ -0.08840987086296082,
+ -0.26596659421920776,
+ -0.011094175279140472,
+ -0.23232650756835938,
+ 0.1524919867515564,
+ 0.37197327613830566,
+ -0.006337165832519531,
+ -0.09221630543470383,
+ -0.28132182359695435,
+ -0.4965705871582031,
+ 0.0632178783416748,
+ 0.025388076901435852,
+ -0.3866729736328125,
+ 0.01106022298336029,
+ -0.2988514304161072,
+ -0.19642746448516846,
+ -0.1607222557067871,
+ -0.17055803537368774,
+ 0.03564700484275818,
+ 0.23068463802337646,
+ 0.2576262950897217,
+ -0.03506677597761154,
+ -0.2533853352069855,
+ -0.20216584205627441,
+ -0.03179106116294861,
+ -0.175698384642601,
+ -0.0850064679980278,
+ 0.05349474400281906,
+ -0.16708867251873016,
+ -0.20139025151729584,
+ 0.15511128306388855,
+ -0.2876630425453186,
+ 0.268585205078125,
+ -0.17689017951488495,
+ -0.07670220732688904,
+ 0.06692539155483246,
+ -0.12922340631484985,
+ 0.31461527943611145,
+ 0.2786073386669159,
+ 0.10542833805084229,
+ 0.1969812512397766,
+ 0.3259431719779968,
+ 0.4406416416168213,
+ 0.3009090721607208,
+ -0.12167608737945557,
+ 0.28798288106918335,
+ -0.07364177703857422,
+ 0.092887282371521,
+ 0.38837093114852905,
+ -0.2597190737724304,
+ 0.15056702494621277,
+ -0.1562117338180542,
+ -0.24912843108177185,
+ -0.04383121430873871,
+ -0.2214166224002838,
+ 0.34614166617393494,
+ -0.0867186188697815,
+ 0.03922984004020691,
+ -0.1700831949710846,
+ -0.1700066328048706,
+ -0.0865093469619751,
+ 0.24452900886535645,
+ -0.16153544187545776,
+ 0.1577025055885315,
+ -0.2262689471244812,
+ 0.2745895981788635,
+ -0.14408361911773682,
+ 0.22869989275932312,
+ -0.10420341044664383,
+ 0.2413017302751541,
+ 0.181758850812912,
+ 0.15810978412628174,
+ -0.21351972222328186,
+ 0.18918117880821228,
+ 0.27223432064056396,
+ -0.4705451726913452,
+ -0.46180015802383423,
+ 0.11145418882369995,
+ -0.3834468722343445,
+ -0.39772510528564453,
+ -0.15470127761363983,
+ 0.15986870229244232,
+ -0.2110545039176941,
+ 0.05799025297164917,
+ -0.1302487850189209,
+ 0.06313362717628479,
+ -0.48854830861091614,
+ -0.17869432270526886,
+ -0.1420690417289734,
+ 0.2792562246322632,
+ 0.2510063648223877,
+ -0.11907365918159485,
+ 0.30622991919517517,
+ -0.2066800445318222,
+ -0.24591410160064697,
+ 0.07907313108444214,
+ -0.04039962589740753,
+ -0.3687936067581177,
+ 0.07152874767780304,
+ -0.2601546049118042,
+ -0.03690694645047188,
+ -0.23157641291618347,
+ 0.27892130613327026,
+ -0.0025484561920166016,
+ -0.22973906993865967,
+ -0.45955905318260193,
+ 0.3043619692325592,
+ -0.033614132553339005,
+ 0.32242652773857117,
+ 0.2525458037853241,
+ -0.24706876277923584,
+ -0.1163390576839447,
+ -0.24763837456703186,
+ 0.14924481511116028,
+ 0.31539273262023926,
+ -0.14561358094215393,
+ -0.4672454595565796,
+ 0.12656188011169434,
+ -0.09938646852970123,
+ 0.20429296791553497,
+ 0.34722191095352173,
+ -0.04522821307182312,
+ -0.38745900988578796,
+ 0.21581211686134338,
+ 0.16169695556163788,
+ -0.23248139023780823,
+ 0.4270452558994293,
+ -0.4203844964504242,
+ 0.14368057250976562,
+ 0.24690476059913635,
+ -0.22283606231212616,
+ -0.22052212059497833,
+ -0.023938238620758057,
+ 0.24793827533721924,
+ 0.22692564129829407,
+ -0.16769927740097046,
+ 0.12201183289289474,
+ -0.03172624856233597,
+ -0.23329803347587585,
+ -0.018372446298599243,
+ -0.22529879212379456,
+ 0.22829344868659973,
+ 0.20948171615600586,
+ -0.37175533175468445,
+ -0.24926477670669556,
+ 0.3372281789779663,
+ -0.26596060395240784,
+ -0.15759381651878357,
+ -0.3725341558456421,
+ 0.21956345438957214,
+ -0.4244738817214966,
+ -0.22083508968353271,
+ 0.41145485639572144,
+ 0.2764400541782379,
+ 0.24099060893058777,
+ -0.16411036252975464,
+ -0.11725714802742004,
+ -0.10276468098163605,
+ 0.11967465281486511,
+ 0.293451189994812,
+ 0.23758184909820557,
+ 0.0735318660736084,
+ -0.03645867481827736,
+ -0.4280353784561157,
+ -0.20639961957931519,
+ 0.43431878089904785,
+ -0.15201127529144287,
+ -0.042666926980018616,
+ -0.0843077301979065,
+ -0.27366122603416443,
+ 0.05553022027015686,
+ 0.12120994925498962,
+ -0.2520813047885895,
+ -0.11393857002258301,
+ 0.016212671995162964,
+ -0.11479145288467407,
+ -0.2464049756526947,
+ 0.03065088391304016,
+ 0.27125632762908936,
+ 0.2931419610977173,
+ 0.31667497754096985,
+ -0.07360246777534485,
+ 0.272333562374115,
+ 0.1129494309425354,
+ -0.24520394206047058,
+ -0.14632731676101685,
+ -0.457796186208725,
+ 0.41079384088516235,
+ -0.05486147105693817,
+ -0.022824488580226898,
+ 0.20696541666984558,
+ -0.01341468095779419,
+ -0.0025694668292999268,
+ 0.16225318610668182,
+ 0.1757635623216629,
+ -0.2053406834602356,
+ 0.14010868966579437,
+ -0.13228997588157654,
+ 0.013567626476287842,
+ 0.16149082779884338,
+ -0.2126273512840271,
+ 0.2954239845275879,
+ 0.14451229572296143,
+ 0.20537352561950684,
+ -0.35152149200439453,
+ 0.07463964819908142,
+ 0.004264429211616516,
+ 0.09011027216911316,
+ -0.3606600761413574,
+ -0.17392882704734802,
+ -0.2639371156692505,
+ -0.024241141974925995,
+ -0.27399691939353943,
+ -0.03877095505595207,
+ 0.36987948417663574,
+ -0.14074945449829102,
+ -0.13532496988773346,
+ 0.09040579199790955,
+ 0.3024633526802063,
+ 0.01700243353843689,
+ 0.2490476667881012,
+ -0.1372237205505371,
+ 0.25969210267066956,
+ 0.2758447229862213,
+ -0.12656991183757782,
+ 0.4196060299873352,
+ 0.25755083560943604,
+ 0.4225436747074127,
+ -0.4636368453502655,
+ 0.06970241665840149,
+ -0.07977303862571716,
+ 0.41141992807388306,
+ -0.09542599320411682,
+ 0.10276685655117035,
+ 0.310100257396698,
+ 0.3059803545475006,
+ 0.11313530802726746,
+ -0.3224535584449768,
+ 0.10999880731105804,
+ -0.029786482453346252,
+ -0.23527318239212036,
+ -0.10175102949142456,
+ 0.3074822425842285,
+ -0.17598211765289307,
+ 0.05654818192124367,
+ 0.28391754627227783,
+ 0.2942429780960083,
+ 0.18308767676353455,
+ 0.19133096933364868,
+ -0.41514715552330017,
+ 0.18966001272201538,
+ -0.26070448756217957,
+ -0.22639724612236023,
+ 0.3065890669822693,
+ -0.4327552914619446,
+ -0.22935903072357178,
+ 0.43558835983276367,
+ 0.22742512822151184,
+ -0.2609217166900635,
+ 0.2709985375404358,
+ -0.49369752407073975,
+ -0.234602689743042,
+ 0.3440802991390228,
+ 0.04003605246543884,
+ 0.30418530106544495,
+ 0.1841454803943634,
+ 0.022996842861175537,
+ -0.04424058645963669,
+ -0.2800735533237457,
+ 0.12682011723518372,
+ 0.08249896764755249,
+ -0.46352440118789673,
+ -0.2790566086769104,
+ -0.17012006044387817,
+ -0.2675018310546875,
+ 0.08914950489997864,
+ -0.04485994577407837,
+ -0.19348731637001038,
+ -0.2386484146118164,
+ 0.16725999116897583,
+ -0.21886108815670013,
+ 0.2546830177307129,
+ 0.24684709310531616,
+ 0.2205611765384674,
+ -0.18922825157642365,
+ 0.02071937918663025,
+ 0.4319809377193451,
+ 0.17190486192703247,
+ 0.22042691707611084,
+ -0.004704385995864868,
+ 0.2537105977535248,
+ 0.08513236045837402,
+ -0.2015792727470398,
+ -0.05443665385246277,
+ 0.03967843949794769,
+ -0.24614578485488892,
+ 0.06338053941726685,
+ 0.21557104587554932,
+ -0.15475249290466309,
+ -0.488832026720047,
+ 0.3092242479324341,
+ 0.18788683414459229,
+ -0.05398213863372803,
+ 0.39718857407569885,
+ -0.15417414903640747,
+ -0.15772265195846558,
+ -0.09106665849685669,
+ 0.20330046117305756,
+ -0.27883896231651306,
+ -0.2628204822540283,
+ -0.126556396484375,
+ -0.02381393313407898,
+ -0.09369093179702759,
+ -0.1518738865852356,
+ -0.27969714999198914,
+ 0.26718392968177795,
+ 0.027208417654037476,
+ -0.04642444849014282,
+ -0.07618477940559387,
+ -0.22299781441688538,
+ -0.0023390203714370728,
+ 0.0006568431854248047,
+ 0.2434019148349762,
+ 0.20120343565940857,
+ -0.0544508695602417,
+ 0.38106968998908997,
+ -0.23773837089538574,
+ 0.17166173458099365,
+ -0.07744261622428894,
+ -0.045596569776535034,
+ 0.030295683071017265,
+ -0.25233539938926697,
+ 0.10993653535842896,
+ 0.08435890078544617,
+ -0.13083457946777344,
+ -0.14140130579471588,
+ 0.334355890750885,
+ -0.4319162964820862,
+ 0.2694845199584961,
+ 0.020109713077545166,
+ 0.3581068217754364,
+ 0.19278687238693237,
+ -0.06825864315032959,
+ 0.04003136232495308,
+ -0.3175751566886902,
+ 0.26412805914878845,
+ 0.2774742841720581,
+ 0.22065109014511108,
+ 0.07243716716766357,
+ -0.43850159645080566,
+ 0.11488480865955353,
+ 0.2797364592552185,
+ 0.10318121314048767,
+ 0.12657096982002258,
+ 0.31623101234436035,
+ -0.43180543184280396,
+ 0.21479004621505737,
+ 0.19015228748321533,
+ -0.028373122215270996,
+ 0.2146090567111969,
+ 0.08854535222053528,
+ 0.0680508017539978,
+ 0.4314451217651367,
+ -0.2634730935096741,
+ 0.17043788731098175,
+ -0.21131786704063416,
+ -0.28245967626571655,
+ 0.15812483429908752,
+ 0.29639530181884766,
+ -0.17428694665431976,
+ -0.26990842819213867,
+ 0.10618868470191956,
+ 0.14376243948936462,
+ -0.08676007390022278,
+ -0.14535224437713623,
+ 0.02329622209072113,
+ 0.18931157886981964,
+ -0.040994077920913696,
+ -0.49416863918304443,
+ -0.27854347229003906,
+ 0.44204387068748474,
+ -0.16078917682170868,
+ 0.2134382128715515,
+ -0.18305015563964844,
+ -0.07124020159244537,
+ 0.22002477943897247,
+ 0.17193371057510376,
+ -0.22366684675216675,
+ 0.41932907700538635,
+ -0.44627314805984497,
+ -0.12185996770858765,
+ -0.1426190435886383,
+ -0.21217301487922668,
+ 0.43069133162498474,
+ -0.17067748308181763,
+ 0.18571650981903076,
+ 0.08248084783554077,
+ -0.286362886428833,
+ -0.3417224586009979,
+ -0.027210205793380737,
+ -0.2297663688659668,
+ 0.08608786761760712,
+ 0.12805882096290588,
+ -0.2421988695859909,
+ 0.028757035732269287,
+ 0.08180367946624756,
+ 0.17747080326080322,
+ 0.2797200679779053,
+ -0.24210211634635925,
+ 0.1894475370645523,
+ 0.15582400560379028,
+ 0.41145119071006775,
+ -0.42848026752471924,
+ 0.11391067504882812,
+ -0.1193980723619461,
+ -0.07361137866973877,
+ -0.26494988799095154,
+ -0.03387908637523651,
+ 0.059650227427482605,
+ -0.25261473655700684,
+ 0.13491831719875336,
+ 0.028637222945690155,
+ 0.23382696509361267,
+ 0.22454628348350525,
+ -0.34908151626586914,
+ -0.1789049208164215,
+ -0.20685574412345886,
+ 0.2912406325340271,
+ -0.032236844301223755,
+ 0.07140396535396576,
+ -0.36924996972084045,
+ 0.12055385112762451,
+ -0.3433222770690918,
+ -0.11447352170944214,
+ -0.0111006498336792,
+ 0.2754113972187042,
+ 0.26247990131378174,
+ -0.07708537578582764,
+ 0.07770145684480667,
+ 0.24820151925086975,
+ 0.3097299635410309,
+ -0.24154457449913025,
+ 0.43048134446144104,
+ 0.2662220001220703,
+ 0.27213555574417114,
+ -0.26436305046081543,
+ 0.33512964844703674,
+ 0.048550546169281006,
+ 0.42741867899894714,
+ -0.37463754415512085,
+ 0.17991888523101807,
+ 0.007427055388689041,
+ -0.07415652275085449,
+ 0.02735031768679619,
+ -0.26432693004608154,
+ -0.07346487045288086,
+ 0.189475417137146,
+ -0.4194053113460541,
+ -0.44591835141181946,
+ -0.18510526418685913,
+ 0.32634592056274414,
+ 0.12742829322814941,
+ 0.25516176223754883,
+ -0.2267543077468872,
+ -0.4829694628715515,
+ -0.04963997006416321,
+ 0.2865122854709625,
+ 0.12448170781135559,
+ 0.2618943452835083,
+ 0.22176983952522278,
+ 0.19232869148254395,
+ -0.14514613151550293,
+ -0.24738413095474243,
+ 0.06373980641365051,
+ -0.18333876132965088,
+ 0.34973788261413574,
+ 0.06796050071716309,
+ -0.008550561964511871,
+ 0.1145431399345398,
+ -0.3150405287742615,
+ -0.45502451062202454,
+ 0.43693846464157104,
+ 0.05192995071411133,
+ 0.022201508283615112,
+ -0.07547399401664734,
+ -0.01770085096359253,
+ -0.20119547843933105,
+ 0.09933887422084808,
+ 0.09446069598197937,
+ -0.47046852111816406,
+ 0.2205338478088379,
+ 0.24395936727523804,
+ -0.13347910344600677,
+ -0.25765079259872437,
+ -0.1299256980419159,
+ -0.2927923798561096,
+ 0.28171443939208984,
+ 0.1746354103088379,
+ 0.34186601638793945,
+ 0.253772497177124,
+ 0.1952051818370819,
+ 0.14764320850372314,
+ -0.24707505106925964,
+ 0.028798460960388184,
+ -0.17283032834529877,
+ 0.26851436495780945,
+ -0.1875079870223999,
+ -0.3546793758869171,
+ -0.16700321435928345,
+ 0.14121535420417786,
+ -0.012376487255096436,
+ 0.1876799762248993,
+ -0.10956841707229614,
+ -0.09174561500549316,
+ 0.3479175567626953,
+ -0.48303598165512085,
+ 0.3049790859222412,
+ -0.14889755845069885,
+ 0.22271928191184998,
+ -0.09123578667640686,
+ 0.21024248003959656,
+ 0.16128522157669067,
+ -0.14515411853790283,
+ 0.38462865352630615,
+ -0.3715114891529083,
+ 0.42873916029930115,
+ -0.24794667959213257,
+ 0.2081870436668396,
+ 0.39460843801498413,
+ -0.007759958505630493,
+ -0.09418368339538574,
+ -0.16703668236732483,
+ 0.38697075843811035,
+ -0.2637975811958313,
+ -0.05264317989349365,
+ -0.3401361405849457,
+ 0.265419065952301,
+ -0.22315505146980286,
+ -0.1460428535938263,
+ 0.07590058445930481,
+ -0.4965968430042267,
+ -0.19626469910144806,
+ 0.2793854773044586,
+ -0.13448947668075562,
+ 0.26805177330970764,
+ 0.26915496587753296,
+ -0.2086658477783203,
+ 0.4331231117248535,
+ 0.13631898164749146,
+ 0.2664646506309509,
+ 0.04605531692504883,
+ -0.108157217502594,
+ 0.3465036153793335,
+ -0.2816586494445801,
+ -0.27275538444519043,
+ -0.13117051124572754,
+ -0.22255557775497437,
+ -0.2969585955142975,
+ -0.46581822633743286,
+ -0.09811943769454956,
+ 0.36638790369033813,
+ 0.0659308135509491,
+ -0.261184960603714,
+ 0.3947184085845947,
+ -0.05147272348403931,
+ 0.23051172494888306,
+ 0.39489156007766724,
+ 0.2567512094974518,
+ -0.21262693405151367,
+ -0.08699467778205872,
+ -0.025167882442474365,
+ 0.15311986207962036,
+ -0.059055447578430176,
+ -0.1748323142528534,
+ 0.05018509924411774,
+ 0.1413332223892212,
+ 0.19427984952926636,
+ 0.17932820320129395,
+ -0.26411086320877075,
+ 0.2447119653224945,
+ -0.12555280327796936,
+ -0.18476691842079163,
+ -0.2687426805496216,
+ -0.1370713710784912,
+ -0.22911623120307922,
+ -0.3615875244140625,
+ 0.06046751141548157,
+ -0.37980377674102783,
+ -0.0320713147521019,
+ 0.0994800329208374,
+ 0.17480894923210144,
+ -0.16015970706939697,
+ 0.22883014380931854,
+ -0.2466849684715271,
+ -0.011438190937042236,
+ 0.2676500678062439,
+ -0.3097280263900757,
+ 0.43446844816207886,
+ -0.42093247175216675,
+ -0.20903125405311584,
+ -0.03713706135749817,
+ -0.36123955249786377,
+ -0.2550150156021118,
+ -0.31086719036102295,
+ 0.4014904797077179,
+ -0.4111101031303406,
+ 0.4331139922142029,
+ -0.05742752552032471,
+ 0.3940722942352295,
+ 0.00617825984954834,
+ 0.19277620315551758,
+ 0.2327641248703003,
+ 0.01953074336051941,
+ -0.15919005870819092,
+ 0.2412867546081543,
+ -0.03396385908126831,
+ 0.3096255660057068,
+ 0.11639764904975891,
+ 0.16185271739959717,
+ 0.42991358041763306,
+ -0.17793497443199158,
+ -0.273578405380249,
+ 0.26832395792007446,
+ 0.013958960771560669,
+ -0.043085917830467224,
+ -0.1654723584651947,
+ 0.2856638431549072,
+ 0.24402445554733276,
+ -0.08679305016994476,
+ -0.21747012436389923,
+ -0.36769917607307434,
+ -0.013484984636306763,
+ -0.40955764055252075,
+ 0.013985604047775269,
+ 0.15396983921527863,
+ 0.19057273864746094,
+ 0.04004061222076416,
+ 0.2731185555458069,
+ -0.09552150964736938,
+ -0.09735780954360962,
+ 0.3355831205844879,
+ 0.04359547048807144,
+ -0.19531360268592834,
+ -0.2611112594604492,
+ -0.2550573945045471,
+ -0.489986777305603,
+ -0.5000790357589722,
+ 0.13123831152915955,
+ 0.3743710219860077,
+ -0.33609655499458313,
+ -0.08913123607635498,
+ 0.21742141246795654,
+ -0.23850256204605103,
+ 0.04631588235497475,
+ -0.19259637594223022,
+ 0.0545230507850647,
+ -0.4507848620414734,
+ -0.39903050661087036,
+ -0.3838840425014496,
+ -0.48262304067611694,
+ -0.2780917286872864,
+ 0.3727377951145172,
+ 0.20273292064666748,
+ -0.08503496646881104,
+ -0.13996058702468872,
+ 0.12148770689964294,
+ 0.11538401246070862,
+ -0.09402209520339966,
+ 0.43960198760032654,
+ -0.21835899353027344,
+ -0.1821373701095581,
+ -0.11689923703670502,
+ -0.012848630547523499,
+ -0.053094878792762756,
+ -0.4878681004047394,
+ 0.1738450527191162,
+ 0.08176654577255249,
+ -0.31131649017333984,
+ -0.11168450117111206,
+ 0.2973203957080841,
+ -0.11987535655498505,
+ 0.2946739196777344,
+ -0.23423466086387634,
+ 0.04708489775657654,
+ -0.418912410736084,
+ 0.22787779569625854,
+ -0.06241825222969055,
+ 0.24361521005630493,
+ -0.11685799807310104,
+ 0.19753089547157288,
+ -0.11506575345993042,
+ 0.22972099483013153,
+ -0.25429004430770874,
+ -0.07869884371757507,
+ 0.09852447360754013,
+ -0.2571564316749573,
+ -0.17318373918533325,
+ 0.14196108281612396,
+ -0.20050230622291565,
+ -0.17868515849113464,
+ -0.017517179250717163,
+ -0.2578042149543762,
+ 0.4242876172065735,
+ 0.06366530060768127,
+ 0.28413957357406616,
+ 0.40873265266418457,
+ -0.012916624546051025,
+ -0.2695809006690979,
+ -0.13773870468139648,
+ 0.10064667463302612,
+ -0.09758307039737701,
+ -0.032852400094270706,
+ -0.4851928949356079,
+ 0.2414020299911499,
+ 0.41578030586242676,
+ -0.06967990100383759,
+ 0.4293145537376404,
+ 0.17050349712371826,
+ -0.4754495322704315,
+ 0.42057788372039795,
+ -0.10603590309619904,
+ -0.1692051887512207,
+ 0.07360982894897461,
+ -0.2086702287197113,
+ -0.2518990933895111,
+ -0.4421136975288391,
+ 0.2787417471408844,
+ -0.17759764194488525,
+ 0.44974273443222046,
+ 0.17111505568027496,
+ -0.17840653657913208,
+ 0.28791069984436035,
+ 0.3624221086502075,
+ 0.0005452781915664673,
+ -0.21793407201766968,
+ 0.2932147681713104,
+ 0.16501367092132568,
+ 0.07586061954498291,
+ -0.13145315647125244,
+ -0.2191462516784668,
+ -0.18985921144485474,
+ -0.13442102074623108,
+ 0.03459940850734711,
+ 0.009455084800720215,
+ 0.36890193819999695,
+ 0.05738699436187744,
+ 0.0075795650482177734,
+ -0.005184732377529144,
+ -0.15981778502464294,
+ -0.11715063452720642,
+ -0.2724824845790863,
+ -0.3630332052707672,
+ -0.14116472005844116,
+ -0.1725916862487793,
+ -0.17405079305171967,
+ 0.05681982636451721,
+ -0.47352010011672974,
+ 0.006825298070907593,
+ -0.17520791292190552,
+ -0.10926653444766998,
+ 0.42833447456359863,
+ -0.043376341462135315,
+ 0.23863041400909424,
+ -0.06990832090377808,
+ -0.04701375961303711,
+ -0.08144745230674744,
+ 0.21500633656978607,
+ 0.20298443734645844,
+ -0.30140843987464905,
+ 0.17985054850578308,
+ -0.2741214632987976,
+ 0.00302867591381073,
+ 0.19995108246803284,
+ -0.45907536149024963,
+ 0.1604273021221161,
+ 0.10658763349056244,
+ -0.39004868268966675,
+ 0.4337567985057831,
+ 0.16201575100421906,
+ 0.06180444359779358,
+ 0.0746009349822998,
+ -0.07756060361862183,
+ -0.13338130712509155,
+ -0.03439334034919739,
+ -0.21490006148815155,
+ -0.15261100232601166,
+ 0.41504967212677,
+ -0.026840537786483765,
+ 0.2422601580619812,
+ 0.26348578929901123,
+ 0.2217649221420288,
+ -0.17292039096355438,
+ 0.1454676389694214,
+ -0.46895626187324524,
+ -0.384604811668396,
+ 0.278116911649704,
+ 0.28142327070236206,
+ 0.015497714281082153,
+ 0.2892283797264099,
+ 0.010752320289611816,
+ -0.264809787273407,
+ -0.036543309688568115,
+ -0.05703485757112503,
+ -0.039160341024398804,
+ -0.09216371178627014,
+ -0.24430200457572937,
+ 0.17764081060886383,
+ 0.12707862257957458,
+ -0.10760098695755005,
+ -0.46282562613487244,
+ 0.1726102977991104,
+ 0.10425511002540588,
+ -0.05945219099521637,
+ 0.1464356780052185,
+ 0.1116427481174469,
+ 0.034284189343452454,
+ -0.11808490753173828,
+ 0.24230051040649414,
+ -0.4293210506439209,
+ 0.2691046893596649,
+ 0.4325742721557617,
+ -0.12001115083694458,
+ 0.2830544710159302,
+ 0.2629557251930237,
+ 0.005723088979721069,
+ 0.2648579478263855,
+ 0.2686537206172943,
+ 0.4216006398200989,
+ 0.0894138514995575,
+ 0.44008052349090576,
+ 0.4335419833660126,
+ -0.33372625708580017,
+ 0.2778983414173126,
+ -0.2838253974914551,
+ -0.3701757490634918,
+ -0.27427130937576294,
+ 0.22772902250289917,
+ 0.22598779201507568,
+ -0.0661478042602539,
+ -0.25719404220581055,
+ 0.06878329813480377,
+ -0.4416861832141876,
+ 0.19901001453399658,
+ -0.2853367030620575,
+ -0.060894474387168884,
+ 0.27824702858924866,
+ -0.23606276512145996,
+ 0.2886963188648224,
+ -0.024369560182094574,
+ 0.1688346266746521,
+ 0.01560191810131073,
+ -0.3778076469898224,
+ -0.1903541088104248,
+ 0.26088517904281616,
+ -0.21824121475219727,
+ 0.19288936257362366,
+ 0.20390255749225616,
+ -0.4176989793777466,
+ -0.04291176795959473,
+ 0.4491851031780243,
+ -0.15292245149612427,
+ -0.2668497562408447,
+ 0.31297338008880615,
+ 0.09566164016723633,
+ -0.2340385913848877,
+ 0.263449490070343,
+ 0.22926297783851624,
+ 0.25428861379623413,
+ -0.17871451377868652,
+ 0.2943161725997925,
+ 0.18661481142044067,
+ 0.19032645225524902,
+ 0.18361610174179077,
+ 0.320340096950531,
+ -0.4009399712085724,
+ -0.24506837129592896,
+ -0.4836454689502716,
+ 0.14539426565170288,
+ -0.145517498254776,
+ -0.11857900768518448,
+ -0.22184228897094727,
+ -0.09907197952270508,
+ -0.16821661591529846,
+ -0.27488261461257935,
+ -0.38666945695877075,
+ -0.45759373903274536,
+ 0.16079050302505493,
+ -0.40043431520462036,
+ 0.3949241042137146,
+ 0.39599448442459106,
+ 0.13235414028167725,
+ -0.16937009990215302,
+ -0.2367011308670044,
+ -0.12470613420009613,
+ 0.0970606729388237,
+ -0.03880169987678528,
+ -0.027486339211463928,
+ 0.25037187337875366,
+ -0.14417700469493866,
+ 0.24665796756744385,
+ -0.20576469600200653,
+ -0.4589803218841553,
+ -0.3048267066478729,
+ 0.2799888253211975,
+ -0.09952908754348755,
+ -0.15945088863372803,
+ -0.2670103907585144,
+ -0.19307760894298553,
+ -0.18041375279426575,
+ 0.28732830286026,
+ 0.23683130741119385,
+ 0.2627226710319519,
+ 0.11419618129730225,
+ -0.2523140013217926,
+ -0.00227278470993042,
+ -0.24993573129177094,
+ 0.01652875542640686,
+ -0.04723191261291504,
+ 0.08067628741264343,
+ -0.2711517810821533,
+ 0.11013513803482056,
+ -0.10353374481201172,
+ 0.11935685575008392,
+ -0.15570798516273499,
+ 0.30417555570602417,
+ 0.1663946509361267,
+ -0.1572532057762146,
+ -0.053518325090408325,
+ -0.013307753950357437,
+ 0.050357282161712646,
+ -0.09260828793048859,
+ 0.21179187297821045,
+ -0.07920089364051819,
+ -0.30729779601097107,
+ -0.07744957506656647,
+ 0.2538295388221741,
+ 0.2630499005317688,
+ -0.20318123698234558,
+ 0.3985087275505066,
+ -0.4668681025505066,
+ -0.24635516107082367,
+ 0.17301487922668457,
+ -0.33216381072998047,
+ 0.027355223894119263,
+ -0.252238005399704,
+ 0.36973416805267334,
+ 0.29690733551979065,
+ -0.21201741695404053,
+ -0.07423059642314911,
+ -0.21154487133026123,
+ -0.09131646156311035,
+ -0.10046252608299255,
+ -0.28586214780807495,
+ -0.17464742064476013,
+ -0.09982547163963318,
+ -0.068156898021698,
+ 0.21109680831432343,
+ -0.22095108032226562,
+ -0.2632431089878082,
+ -0.410308837890625,
+ 0.04642307758331299,
+ -0.25085094571113586,
+ 0.38691872358322144,
+ 0.42330342531204224,
+ 0.3150786757469177,
+ -0.11724875867366791,
+ 0.2585517168045044,
+ -0.4913073182106018,
+ 0.3129231929779053,
+ 0.2854565680027008,
+ -0.15846508741378784,
+ -0.17445939779281616,
+ 0.1668650060892105,
+ -0.04507726430892944,
+ -0.14404988288879395,
+ 0.012899577617645264,
+ 0.16221123933792114,
+ 0.0972958505153656,
+ -0.33713534474372864,
+ 0.28320398926734924,
+ -0.24491864442825317,
+ -0.23806488513946533,
+ 0.2593570947647095,
+ -0.1468481719493866,
+ 0.1318179965019226,
+ -0.2155543565750122,
+ -0.17704330384731293,
+ 0.28375786542892456,
+ 0.28320765495300293,
+ 0.3757317066192627,
+ 0.1883552074432373,
+ 0.23778223991394043,
+ -0.028199702501296997,
+ 0.07170754671096802,
+ 0.1594313085079193,
+ 0.06951731443405151,
+ 0.11274141073226929,
+ 0.36499693989753723,
+ -0.000845588743686676,
+ 0.08346736431121826,
+ 0.43562865257263184,
+ 0.22817230224609375,
+ 0.1274995505809784,
+ -0.26283425092697144,
+ 0.2644401788711548,
+ 0.0840080976486206,
+ -0.4227572977542877,
+ 0.1393786370754242,
+ 0.052368324249982834,
+ -0.09567999839782715,
+ 0.07011684775352478,
+ -0.07790645956993103,
+ 0.1357286125421524,
+ 0.2504046857357025,
+ -0.17208531498908997,
+ -0.11167493462562561,
+ -0.19669939577579498,
+ 0.22614893317222595,
+ 0.20466122031211853,
+ -0.25054657459259033,
+ 0.4323855936527252,
+ -0.03602227568626404,
+ -0.12162084877490997,
+ -0.23064745962619781,
+ 0.31381142139434814,
+ -0.06737703084945679,
+ -0.2068350613117218,
+ -0.2047499418258667,
+ -0.03781843185424805,
+ -0.21185965836048126,
+ -0.16199886798858643,
+ 0.15304870903491974,
+ -0.14959192276000977,
+ -0.05837738513946533,
+ 0.2753833532333374,
+ 0.23250752687454224,
+ -0.4855348467826843,
+ 0.11459803581237793,
+ 0.16314873099327087,
+ -0.11285971850156784,
+ -0.0036347582936286926,
+ -0.045521557331085205,
+ 0.05986309051513672,
+ -0.04981132224202156,
+ 0.1499031037092209,
+ -0.2793178856372833,
+ -0.24340809881687164,
+ -0.12606072425842285,
+ 0.18456164002418518,
+ 0.044129371643066406,
+ 0.18684136867523193,
+ -0.15798281133174896,
+ 0.1872272491455078,
+ -0.26897943019866943,
+ -0.4885216951370239,
+ 0.29780077934265137,
+ -0.2566699981689453,
+ -0.1416124701499939,
+ -0.005268304608762264,
+ -0.2723360061645508,
+ 0.4007560908794403,
+ 0.19540292024612427,
+ 0.05762740969657898,
+ -0.4713975191116333,
+ -0.37437981367111206,
+ -0.3221326768398285,
+ 0.09254568815231323,
+ -0.31626078486442566,
+ 0.14150264859199524,
+ -0.2683981657028198,
+ -0.18012414872646332,
+ -0.17864859104156494,
+ -0.09146061539649963,
+ 0.41794073581695557,
+ 0.04842326045036316,
+ -0.2118951678276062,
+ -0.26749423146247864,
+ -0.14585670828819275,
+ -0.13410650193691254,
+ 0.11292935907840729,
+ 0.4182940125465393,
+ 0.2831178903579712,
+ -0.4838401675224304,
+ -0.2190176397562027,
+ 0.0471937358379364,
+ -0.17537328600883484,
+ 0.23239821195602417,
+ -0.14619165658950806,
+ -0.18495827913284302,
+ -0.31741100549697876,
+ 0.06983217597007751,
+ 0.340646356344223,
+ -0.1952141523361206,
+ -0.022764187306165695,
+ -0.2846629023551941,
+ -0.18610703945159912,
+ 0.18012738227844238,
+ -0.02838999591767788,
+ 0.30891501903533936,
+ 0.4229123592376709,
+ 0.08263278007507324,
+ 0.4315798878669739,
+ -0.23625105619430542,
+ -0.43766769766807556,
+ 0.10072803497314453,
+ -0.32031142711639404,
+ -0.12036045640707016,
+ 0.11850476264953613,
+ -0.19065245985984802,
+ 0.15970325469970703,
+ -0.08753067255020142,
+ 0.06073667109012604,
+ 0.16101761162281036,
+ 0.3547445237636566,
+ 0.08707532286643982,
+ 0.1384296417236328,
+ -0.16923506557941437,
+ -0.11199736595153809,
+ -0.44416433572769165,
+ 0.26913982629776,
+ 0.2226579785346985,
+ -0.10961143672466278,
+ 0.13074539601802826,
+ -0.27825161814689636,
+ 0.26241618394851685,
+ 0.37382790446281433,
+ -0.11552309989929199,
+ -0.38987067341804504,
+ -0.041494548320770264,
+ -0.14068448543548584,
+ -0.0011667311191558838,
+ -0.07137475907802582,
+ 0.1781196892261505,
+ 0.10122781991958618,
+ -0.2649794816970825,
+ -0.23907452821731567,
+ -0.08474744856357574,
+ 0.0797833502292633,
+ 0.08847874402999878,
+ -0.27666088938713074,
+ 0.43615540862083435,
+ -0.013652816414833069,
+ -0.1068606749176979,
+ 0.2763468623161316,
+ -0.16502633690834045,
+ 0.19633346796035767,
+ -0.24180766940116882,
+ -0.21945065259933472,
+ 0.24340099096298218,
+ 0.39617088437080383,
+ 0.2208612561225891,
+ 0.2087797224521637,
+ -0.30178576707839966,
+ 0.28938013315200806,
+ 0.026156306266784668,
+ 0.23875242471694946,
+ -0.001745462417602539,
+ 0.1558818221092224,
+ -0.42773279547691345,
+ 0.1765710711479187,
+ -0.16467803716659546,
+ 0.3556565046310425,
+ -0.23825901746749878,
+ -0.12927837669849396,
+ -0.17876982688903809,
+ -0.01689779758453369,
+ 0.4403395652770996,
+ 0.21239279210567474,
+ 0.2741631865501404,
+ -0.4888615012168884,
+ -0.1218772903084755,
+ -0.1710367053747177,
+ 0.24048840999603271,
+ -0.2594001889228821,
+ -0.2582206726074219,
+ -0.09926638007164001,
+ 0.25598299503326416,
+ 0.17457014322280884,
+ -0.03271132707595825,
+ 0.14078083634376526,
+ 0.1647515892982483,
+ -0.17638343572616577,
+ 0.2578873932361603,
+ -0.10718953609466553,
+ -0.09076181799173355,
+ -0.20930778980255127,
+ -0.05018436908721924,
+ 0.2275140881538391,
+ 0.13454729318618774,
+ 0.19457565248012543,
+ 0.20952200889587402,
+ 0.10511356592178345,
+ 0.12721046805381775,
+ -0.16406011581420898,
+ 0.17615652084350586,
+ -0.43997716903686523,
+ -0.09101689606904984,
+ -0.19631952047348022,
+ 0.3905608057975769,
+ 0.08986759185791016,
+ 0.2446650266647339,
+ 0.15392917394638062,
+ -0.02389666438102722,
+ 0.27999359369277954,
+ 0.1154223382472992,
+ -0.48285573720932007,
+ 0.019170761108398438,
+ 0.32285982370376587,
+ 0.08864834904670715,
+ -0.025647997856140137,
+ -0.2480786144733429,
+ 0.1845458447933197,
+ -0.1691991686820984,
+ 0.3327939808368683,
+ -0.04253363609313965,
+ 0.03372424840927124,
+ 0.2530204653739929,
+ -0.3888712525367737,
+ 0.10484594106674194,
+ 0.3137607276439667,
+ -0.1459132432937622,
+ -0.25479626655578613,
+ 0.2602980136871338,
+ -0.21897727251052856,
+ -0.001804247498512268,
+ -0.09106731414794922,
+ -0.06058555841445923,
+ 0.05968990921974182,
+ 0.37101829051971436,
+ 0.031014680862426758,
+ 0.08305750787258148,
+ -0.28513556718826294,
+ 0.23280596733093262,
+ -0.3474850356578827,
+ -0.20608243346214294,
+ -0.16572201251983643,
+ -0.10138554871082306,
+ 0.14249007403850555,
+ -0.06252872943878174,
+ -0.2149425894021988,
+ -0.15618965029716492,
+ -0.04973117634654045,
+ 0.04770703613758087,
+ 0.3418278992176056,
+ 0.026412665843963623,
+ 0.19306665658950806,
+ 0.3035758435726166,
+ 0.004049316048622131,
+ -0.14062505960464478,
+ 0.24919886887073517,
+ 0.042563945055007935,
+ 0.24988949298858643,
+ -0.1411278247833252,
+ -0.4244314432144165,
+ 0.1938340961933136,
+ -0.12887030839920044,
+ 0.3939496576786041,
+ -0.2108287215232849,
+ 0.14769911766052246,
+ -0.10711713135242462,
+ 0.4259119927883148,
+ -0.22064970433712006,
+ -0.12286520004272461,
+ 0.43777620792388916,
+ 0.055145248770713806,
+ -0.21559566259384155,
+ 0.11288967728614807,
+ 0.18019700050354004,
+ -0.04995155334472656,
+ 0.1270453780889511,
+ -0.4490838646888733,
+ -0.133070707321167,
+ 0.19296133518218994,
+ 0.0038913413882255554,
+ -0.056157439947128296,
+ -0.337008535861969,
+ 0.17379607260227203,
+ 0.27980780601501465,
+ 0.2370653748512268,
+ -0.3143857717514038,
+ -0.19330674409866333,
+ -0.28177112340927124,
+ -0.056003838777542114,
+ -0.20891085267066956,
+ 0.14062225818634033,
+ -0.17999547719955444,
+ -0.42675650119781494,
+ 0.2832762897014618,
+ 0.15015794336795807,
+ -0.12458556890487671,
+ 0.03205901384353638,
+ 0.15472379326820374,
+ -0.05986282601952553,
+ 0.11643931269645691,
+ -0.11475121974945068,
+ 0.30228936672210693,
+ -0.17100679874420166,
+ -0.09093517065048218,
+ -0.02489432692527771,
+ -0.47759485244750977,
+ -0.20510368049144745,
+ 0.1794031858444214,
+ -0.1968001425266266,
+ 0.41032564640045166,
+ -0.06391479074954987,
+ 0.15856656432151794,
+ 0.28553467988967896,
+ 0.17427511513233185,
+ 0.43871068954467773,
+ 0.32330191135406494,
+ -0.02666798233985901,
+ 0.02158987522125244,
+ 0.04142370820045471,
+ -0.13187871873378754,
+ -0.23457515239715576,
+ 0.23858428001403809,
+ 0.026320666074752808,
+ 0.2294674515724182,
+ 0.0038061589002609253,
+ 0.26431137323379517,
+ 0.07929673790931702,
+ 0.2873661518096924,
+ 0.07228901982307434,
+ 0.19399502873420715,
+ 0.17102569341659546,
+ -0.17235496640205383,
+ -0.06952227652072906,
+ -0.05285165458917618,
+ -0.20123422145843506,
+ -0.2623786926269531,
+ 0.1595996618270874,
+ 0.19376862049102783,
+ 0.1537238359451294,
+ -0.3486095666885376,
+ 0.15506711602210999,
+ -0.4861171245574951,
+ -0.11580705642700195,
+ 0.42316189408302307,
+ -0.24743571877479553,
+ -0.15877032279968262,
+ -0.24650472402572632,
+ 0.043078601360321045,
+ -0.08173797279596329,
+ -0.3617410659790039,
+ 0.29680293798446655,
+ -0.28083324432373047,
+ -0.33521854877471924,
+ 0.16895650327205658,
+ 0.4352385699748993,
+ -0.13720372319221497,
+ 0.4236404299736023,
+ -0.1087416410446167,
+ -0.015368327498435974,
+ -0.15868143737316132,
+ 0.23597681522369385,
+ 0.27141720056533813,
+ 0.23318393528461456,
+ 0.28067851066589355,
+ 0.2158336639404297,
+ 0.4018985629081726,
+ -0.2566658854484558,
+ 0.3328150510787964,
+ 0.3051239252090454,
+ -0.0013025403022766113,
+ -0.07267507910728455,
+ 0.281841516494751,
+ 0.0618102103471756,
+ -0.05544000864028931,
+ -0.19136454164981842,
+ -0.04272864758968353,
+ 0.30265629291534424,
+ -0.20245423913002014,
+ 0.13845640420913696,
+ 0.18314631283283234,
+ -0.2311023473739624,
+ 0.01915249228477478,
+ 0.2522403299808502,
+ 0.29676786065101624,
+ -0.26825541257858276,
+ -0.007077664136886597,
+ -0.4952760338783264,
+ 0.2663699984550476,
+ 0.16127760708332062,
+ 0.05208262801170349,
+ -0.4084089398384094,
+ 0.19682061672210693,
+ -0.22860658168792725,
+ -0.26608073711395264,
+ -0.16100724041461945,
+ 0.1714298129081726,
+ 0.25874122977256775,
+ 0.2444952130317688,
+ -0.17963111400604248,
+ -0.04136097431182861,
+ -0.14799761772155762,
+ -0.13111382722854614,
+ -0.4116024374961853,
+ 0.43067654967308044,
+ 0.25288867950439453,
+ 0.14158812165260315,
+ 0.29147788882255554,
+ 0.2405274510383606,
+ 0.28933387994766235,
+ -0.02353501319885254,
+ -0.23112702369689941,
+ 0.170049786567688,
+ 0.4394821524620056,
+ -0.3710790276527405,
+ -0.2319321185350418,
+ 0.35228556394577026,
+ 0.19508576393127441,
+ 0.4388839304447174,
+ -0.465063214302063,
+ -0.18116770684719086,
+ 0.01311849057674408,
+ -0.38316941261291504,
+ 0.4083961248397827,
+ -0.05134188383817673,
+ -0.068448007106781,
+ -0.1570640206336975,
+ 0.25275975465774536,
+ -0.46575719118118286,
+ 0.4144884943962097,
+ -0.04854520410299301,
+ 0.41635021567344666,
+ -0.06833736598491669,
+ 0.3763597011566162,
+ 0.4315616190433502,
+ 0.2702581584453583,
+ 0.41117486357688904,
+ 0.037927329540252686,
+ -0.2682918310165405,
+ 0.12276986986398697,
+ 0.13682794570922852,
+ 0.3063005208969116,
+ -0.370502233505249,
+ 0.3016507029533386,
+ 0.23767566680908203,
+ 0.09309637546539307,
+ -0.030086517333984375,
+ 0.054340630769729614,
+ -0.27223140001296997,
+ 0.06496143341064453,
+ -0.28398287296295166,
+ 0.13596811890602112,
+ -0.1904035210609436,
+ -0.26219508051872253,
+ -0.2567121386528015,
+ 0.1344003975391388,
+ 0.22193053364753723,
+ -0.1305156946182251,
+ 0.4383872151374817,
+ -0.4517110288143158,
+ 0.07206416130065918,
+ -0.23174330592155457,
+ -0.49663370847702026,
+ -0.061308443546295166,
+ 0.41023939847946167,
+ -0.10499650239944458,
+ -0.4322618842124939,
+ 0.04843197762966156,
+ 0.16010133922100067,
+ -0.07071161270141602,
+ 0.27168920636177063,
+ 0.2639782726764679,
+ -0.14780235290527344,
+ -0.08588412404060364,
+ 0.15475612878799438,
+ 0.14648112654685974,
+ -0.2535545825958252,
+ -0.2457706332206726,
+ 0.22746992111206055,
+ -0.020694494247436523,
+ 0.16317015886306763,
+ -0.2316790521144867,
+ -0.44182801246643066,
+ -0.172999307513237,
+ 0.12999916076660156,
+ -0.1241040825843811,
+ -0.13289982080459595,
+ 0.37074580788612366,
+ 0.2843342423439026,
+ -0.011272534728050232,
+ 0.13694950938224792,
+ 0.32339057326316833,
+ 0.3026650547981262,
+ -0.014430582523345947,
+ -0.011229336261749268,
+ 0.4274064898490906,
+ 0.28905680775642395,
+ -0.1753278374671936,
+ 0.3461902141571045,
+ -0.21067756414413452,
+ 0.13715770840644836,
+ -0.4258365333080292,
+ -0.032407790422439575,
+ -0.3219832181930542,
+ 0.15684565901756287,
+ -0.3184502422809601,
+ -0.26692503690719604,
+ 0.14394807815551758,
+ -0.23133441805839539,
+ -0.1995851844549179,
+ 0.07797220349311829,
+ -0.0575903058052063,
+ -0.22522924840450287,
+ -0.19423899054527283,
+ 0.43027764558792114,
+ 0.30154621601104736,
+ -0.48278704285621643,
+ -0.06456679105758667,
+ 0.3343793451786041,
+ -0.05924318730831146,
+ 0.2313835620880127,
+ -0.2314358949661255,
+ 0.189045250415802,
+ -0.051171332597732544,
+ 0.21920914947986603,
+ 0.2351844310760498,
+ -0.06360819935798645,
+ 0.12038317322731018,
+ -0.05779431015253067,
+ -0.4292486310005188,
+ -0.20927193760871887,
+ -0.09869486093521118,
+ -0.35144150257110596,
+ 0.25630104541778564,
+ -0.03529542684555054,
+ -0.27609890699386597,
+ 0.2545416057109833,
+ -0.1868482530117035,
+ -0.17951633036136627,
+ -0.10542447865009308,
+ 0.09102180600166321,
+ 0.018382787704467773,
+ 0.22057902812957764,
+ -0.43625175952911377,
+ 0.319679319858551,
+ -0.40238505601882935,
+ 0.2696842551231384,
+ 0.2814038097858429,
+ -0.25143781304359436,
+ -0.489704430103302,
+ 0.10351040959358215,
+ -0.48188304901123047,
+ -0.12852589786052704,
+ 0.012125074863433838,
+ -0.4327716827392578,
+ 0.3702344298362732,
+ 0.4409167170524597,
+ 0.049580007791519165,
+ -0.34539079666137695,
+ -0.23957425355911255,
+ 0.16767430305480957,
+ -0.30625563859939575,
+ -0.16481298208236694,
+ 0.2576580047607422,
+ -0.18152248859405518,
+ -0.023435063660144806,
+ 0.06626981496810913,
+ 0.015005558729171753,
+ 0.20459400117397308,
+ -0.22188037633895874,
+ 0.20172260701656342,
+ 0.39030781388282776,
+ -0.19516173005104065,
+ -0.27618271112442017,
+ 0.40199387073516846,
+ -0.2233605980873108,
+ -0.17547599971294403,
+ 0.13590992987155914,
+ 0.29192137718200684,
+ -0.0865435004234314,
+ 0.11358952522277832,
+ -0.2505595088005066,
+ -0.4140726923942566,
+ 0.18898820877075195,
+ 0.18204492330551147,
+ -0.09945863485336304,
+ -0.17850077152252197,
+ 0.17662805318832397,
+ 0.1680445373058319,
+ -0.10341382026672363,
+ -0.27961039543151855,
+ 0.21766982972621918,
+ -0.24155020713806152,
+ -0.16726817190647125,
+ -0.18546493351459503,
+ -0.07263146340847015,
+ 0.09597575664520264,
+ 0.3177837133407593,
+ 0.11304709315299988,
+ -0.09681981801986694,
+ 0.028329282999038696,
+ -0.22187653183937073,
+ -0.13444462418556213,
+ -0.23267994821071625,
+ -0.1554139256477356,
+ 0.33457207679748535,
+ -0.015787653625011444,
+ 0.15639233589172363,
+ -0.4667891561985016,
+ 0.04486227035522461,
+ 0.11590710282325745,
+ 0.4195064902305603,
+ -0.2193860411643982,
+ -0.057829439640045166,
+ -0.19026996195316315,
+ 0.1659708023071289,
+ -0.24470680952072144,
+ 0.2785723805427551,
+ 0.17667962610721588,
+ 0.3097282946109772,
+ -0.39160314202308655,
+ -0.2679380178451538,
+ 0.30162113904953003,
+ 0.028450489044189453,
+ 0.26045796275138855,
+ -0.455007404088974,
+ 0.23134857416152954,
+ 0.18724259734153748,
+ 0.1915697455406189,
+ 0.046536803245544434,
+ 0.4358655512332916,
+ -0.2106442153453827,
+ -0.16453096270561218,
+ -0.1391705870628357,
+ 0.05435517430305481,
+ 0.26075878739356995,
+ 0.14917898178100586,
+ 0.05202732980251312,
+ 0.177992045879364,
+ 0.0933537483215332,
+ -0.06575614213943481,
+ 0.0909486711025238,
+ -0.0937691330909729,
+ -0.0706198513507843,
+ 0.41121432185173035,
+ 0.2460039258003235,
+ 0.1060667335987091,
+ 0.04290410876274109,
+ -0.2711554765701294,
+ -0.18169113993644714,
+ -0.011221647262573242,
+ 0.08904597163200378,
+ -0.25099238753318787,
+ 0.25905656814575195,
+ -0.08903640508651733,
+ -0.0761265754699707,
+ -0.21361017227172852,
+ 0.1870032548904419,
+ 0.2048998475074768,
+ -0.17261871695518494,
+ 0.3637295663356781,
+ 0.4203147292137146,
+ 0.3061331808567047,
+ -0.008552685379981995,
+ 0.23135939240455627,
+ 0.2994041442871094,
+ -0.025133609771728516,
+ -0.4662533104419708,
+ 0.23689591884613037,
+ -0.21724915504455566,
+ -0.276958703994751,
+ 0.07909439504146576,
+ 0.16224130988121033,
+ -0.15662223100662231,
+ 0.2744167447090149,
+ 0.20036329329013824,
+ 0.199835404753685,
+ -0.01662800833582878,
+ -0.32901325821876526,
+ 0.1910243034362793,
+ 0.4004131853580475,
+ -0.055764954537153244,
+ -0.06542162597179413,
+ 0.31587275862693787,
+ 0.1826961636543274,
+ -0.16225823760032654,
+ -0.014756537973880768,
+ -0.3499661386013031,
+ -0.17279912531375885,
+ -0.0405733585357666,
+ -0.10190863162279129,
+ 0.19630461931228638,
+ 0.2686929702758789,
+ 0.2125639170408249,
+ -0.1493152379989624,
+ -0.39967888593673706,
+ -0.2840161919593811,
+ -0.34550637006759644,
+ 0.2867162823677063,
+ 0.2860836088657379,
+ 0.12460523843765259,
+ 0.00868455320596695,
+ 0.3388040065765381,
+ 0.03243303298950195,
+ 0.4321193993091583,
+ -0.26903825998306274,
+ -0.36099421977996826,
+ -0.20801304280757904,
+ -0.040270812809467316,
+ 0.0107597466558218,
+ -0.47899746894836426,
+ 0.08046606183052063,
+ 0.2689381241798401,
+ 0.3691239356994629,
+ -0.25060343742370605,
+ -0.25696009397506714,
+ 0.27134862542152405,
+ 0.21679842472076416,
+ 0.1835760921239853,
+ -0.20596367120742798,
+ 0.19884932041168213,
+ -0.19599810242652893,
+ 0.1015799269080162,
+ 0.3322007656097412,
+ 0.14883577823638916,
+ 0.43692487478256226,
+ -0.26397791504859924,
+ -0.24720612168312073,
+ 0.31738927960395813,
+ 0.3112090229988098,
+ -0.051870763301849365,
+ 0.19839723408222198,
+ 0.10206501185894012,
+ 0.36553239822387695,
+ -0.0611879825592041,
+ 0.09518013149499893,
+ -0.0005122125148773193,
+ 0.3133845031261444,
+ 0.10356379300355911,
+ 0.23868608474731445,
+ -0.4759112298488617,
+ -0.2577546238899231,
+ -0.3850986063480377,
+ -0.5011512041091919,
+ 0.2387787103652954,
+ -0.40270712971687317,
+ -0.4437330961227417,
+ 0.32792484760284424,
+ 0.017404211685061455,
+ 0.2824205458164215,
+ -0.19598740339279175,
+ -0.3839951753616333,
+ 0.3264104127883911,
+ 0.06952756643295288,
+ 0.21231800317764282,
+ 0.1694377064704895,
+ 0.06898640096187592,
+ 0.11111865192651749,
+ -0.4508579671382904,
+ 0.09056267887353897,
+ -0.31668412685394287,
+ 0.11040833592414856,
+ 0.13142341375350952,
+ -0.10415972769260406,
+ -0.11217103898525238,
+ -0.15446627140045166,
+ -0.14529138803482056,
+ 0.06505630910396576,
+ -0.10298076272010803,
+ -0.19507451355457306,
+ 0.28260350227355957,
+ -0.1615085005760193,
+ -0.1727714240550995,
+ 0.419059693813324,
+ 0.18981125950813293,
+ 0.06220740079879761,
+ 0.3807069659233093,
+ -0.06401865929365158,
+ -0.1875922679901123,
+ 0.31244587898254395,
+ 0.2664667069911957,
+ 0.22277233004570007,
+ 0.4305956959724426,
+ 0.11280259490013123,
+ -0.044672295451164246,
+ 0.14295674860477448,
+ -0.47640112042427063,
+ -0.07128771394491196,
+ 0.3521946966648102,
+ -0.2710733115673065,
+ -0.1850147247314453,
+ 0.2321346253156662,
+ -0.24157902598381042,
+ -0.1084415465593338,
+ -0.13500767946243286,
+ 0.24096322059631348,
+ 0.07568470388650894,
+ -0.050899311900138855,
+ -0.29768669605255127,
+ -0.1009749174118042,
+ -0.2002044916152954,
+ -0.0629657730460167,
+ 0.18629293143749237,
+ 0.14850404858589172,
+ 0.10823129862546921,
+ -0.06530516594648361,
+ -0.13580864667892456,
+ 0.011535972356796265,
+ 0.19119587540626526,
+ 0.13702178001403809,
+ 0.22017645835876465,
+ 0.418539434671402,
+ 0.18658974766731262,
+ 0.21086692810058594,
+ -0.19316577911376953,
+ 0.10256446897983551,
+ -0.043316423892974854,
+ 0.008419811725616455,
+ 0.19191624224185944,
+ 0.07006761431694031,
+ 0.2427491545677185,
+ -0.13275539875030518,
+ -0.2586946487426758,
+ -0.19117654860019684,
+ 0.1456623375415802,
+ 0.14041903614997864,
+ 0.36155587434768677,
+ 0.43849977850914,
+ -0.1811203956604004,
+ 0.09779170155525208,
+ 0.29433614015579224,
+ 0.41643285751342773,
+ 0.047957271337509155,
+ -0.22687222063541412,
+ 0.2117135226726532,
+ -0.008216381072998047,
+ 0.2822992205619812,
+ -0.10668778419494629,
+ 0.06713069975376129,
+ -0.2035224437713623,
+ -0.19482743740081787,
+ 0.15733879804611206,
+ -0.16835922002792358,
+ 0.09546424448490143,
+ 0.29122984409332275,
+ 0.263956218957901,
+ -0.09148518741130829,
+ 0.2564404308795929,
+ -0.18232345581054688,
+ -0.4707610011100769,
+ 0.023041177541017532,
+ -0.233962282538414,
+ -0.47122591733932495,
+ 0.20670324563980103,
+ 0.25345247983932495,
+ -0.18952026963233948,
+ 0.12948372960090637,
+ -0.37451305985450745,
+ 0.24348688125610352,
+ 0.03200623393058777,
+ 0.17898499965667725,
+ -0.42208781838417053,
+ -0.08981892466545105,
+ -0.21118858456611633,
+ 0.1698741912841797,
+ -0.15925033390522003,
+ -0.13615500926971436,
+ -0.41416671872138977,
+ 0.09629577398300171,
+ -0.024705395102500916,
+ 0.2627045512199402,
+ -0.25765931606292725,
+ 0.15493175387382507,
+ -0.3467731475830078,
+ -0.131496861577034,
+ -0.08660078048706055,
+ -0.23551204800605774,
+ 0.17428290843963623,
+ -0.1759933978319168,
+ -0.11428728699684143,
+ 0.31005004048347473,
+ -0.1573036015033722,
+ -0.1682039499282837,
+ -0.027137890458106995,
+ -0.4555097222328186,
+ -0.4161389470100403,
+ -0.40041032433509827,
+ -0.2037736177444458,
+ -0.14388935267925262,
+ -0.08814752101898193,
+ 0.1777603030204773,
+ 0.30469459295272827,
+ -0.3811066746711731,
+ 0.28197240829467773,
+ 0.09298887848854065,
+ -0.16412228345870972,
+ -0.27052605152130127,
+ -0.17550307512283325,
+ 0.025854922831058502,
+ 0.24868345260620117,
+ -0.18044373393058777,
+ -0.3164830505847931,
+ -0.08885568380355835,
+ -0.023236632347106934,
+ -0.20938318967819214,
+ 0.43126142024993896,
+ -0.3780084550380707,
+ 0.09777355194091797,
+ -0.13423272967338562,
+ 0.3866739273071289,
+ -0.16815973818302155,
+ -0.1680462658405304,
+ -0.23774433135986328,
+ 0.11129581928253174,
+ 0.18634647130966187,
+ 0.1778385043144226,
+ 0.2656552791595459,
+ -0.07764515280723572,
+ -0.12415085732936859,
+ -0.26830190420150757,
+ -0.21012616157531738,
+ 0.10409902781248093,
+ 0.35037362575531006,
+ 0.3043609857559204,
+ 0.0715782642364502,
+ -0.24031603336334229,
+ -0.017897799611091614,
+ -0.22959890961647034,
+ 0.2746000289916992,
+ -0.24710232019424438,
+ -0.05923403054475784,
+ -0.1906866729259491,
+ -0.04857218265533447,
+ -0.24885177612304688,
+ 0.18582338094711304,
+ -0.08123542368412018,
+ 0.4266883134841919,
+ 0.2523460388183594,
+ -0.24337419867515564,
+ 0.17759248614311218,
+ 0.37241023778915405,
+ 0.37018778920173645,
+ 0.3167062997817993,
+ -0.3746558725833893,
+ -0.28310513496398926,
+ 0.3783397674560547,
+ -0.048702239990234375,
+ 0.2723524868488312,
+ -0.16610419750213623,
+ -0.11027014255523682,
+ 0.13151749968528748,
+ -0.2639511525630951,
+ 0.2664039731025696,
+ 0.2443721890449524,
+ -0.21163314580917358,
+ -0.21778124570846558,
+ 0.07815477252006531,
+ -0.05667777359485626,
+ -0.1554604172706604,
+ 0.2628617286682129,
+ 0.16205501556396484,
+ 0.13118329644203186,
+ -0.1086861789226532,
+ 0.19726184010505676,
+ 0.19325029850006104,
+ -0.15452441573143005,
+ -0.11548984050750732,
+ 0.19727778434753418,
+ 0.05075341835618019,
+ 0.07774490118026733,
+ -0.08058637380599976,
+ 0.43390482664108276,
+ 0.17143836617469788,
+ -0.0983496606349945,
+ -0.24144285917282104,
+ -0.452092707157135,
+ 0.2167988121509552,
+ 0.2011747807264328,
+ 0.28001075983047485,
+ -0.2975102961063385,
+ 0.015989243984222412,
+ 0.19464771449565887,
+ 0.2875484824180603,
+ 0.280625581741333,
+ 0.005239307880401611,
+ 0.10985586047172546,
+ -0.16128569841384888,
+ 0.2924416661262512,
+ 0.3904746174812317,
+ -0.19244033098220825,
+ -0.004958480596542358,
+ -0.4817885160446167,
+ -0.25932472944259644,
+ 0.14154210686683655,
+ 0.032481491565704346,
+ -0.1874656081199646,
+ 0.1707916259765625,
+ 0.28566974401474,
+ -0.15203216671943665,
+ 0.4304561913013458,
+ 0.23352128267288208,
+ -0.39490363001823425,
+ 0.12069138884544373,
+ 0.1595432162284851,
+ -0.3626556694507599,
+ 0.22515979409217834,
+ -0.16530217230319977,
+ 0.20330315828323364,
+ 0.430424302816391,
+ -0.12487964332103729,
+ -0.42876678705215454,
+ 0.2434634417295456,
+ -0.21446257829666138,
+ -0.1980665922164917,
+ -0.12932653725147247,
+ 0.2776358127593994,
+ -0.1478753685951233,
+ 0.20825880765914917,
+ -0.26657912135124207,
+ -0.056972816586494446,
+ 0.35248762369155884,
+ 0.005581080913543701,
+ -0.27572232484817505,
+ 0.1658160388469696,
+ 0.043727949261665344,
+ -0.06422032415866852,
+ 0.020097985863685608,
+ 0.24591928720474243,
+ -0.16447463631629944,
+ -0.15880268812179565,
+ 0.11615810543298721,
+ -0.2683584988117218,
+ 0.18037134408950806,
+ 0.42910104990005493,
+ -0.18115168809890747,
+ 0.013159103691577911,
+ -0.10288773477077484,
+ 0.23289638757705688,
+ 0.35640695691108704,
+ 0.018495790660381317,
+ 0.3957294821739197,
+ 0.12799829244613647,
+ -0.22053304314613342,
+ -0.20542412996292114,
+ -0.2735493779182434,
+ -0.2636517286300659,
+ 0.05287374556064606,
+ 0.2443510890007019,
+ -0.09009742736816406,
+ 0.1360410749912262,
+ -0.22734975814819336,
+ -0.17015990614891052,
+ -0.041680753231048584,
+ -0.02201153337955475,
+ -0.09662950038909912,
+ -0.31945890188217163,
+ -0.4758753776550293,
+ 0.07932400703430176,
+ -0.15009868144989014,
+ 0.2985461950302124,
+ -0.2817581593990326,
+ 0.10258228331804276,
+ 0.21441277861595154,
+ 0.292716383934021,
+ 0.1630312204360962,
+ -0.016391217708587646,
+ 0.2546119689941406,
+ 0.001844741404056549,
+ -0.046970516443252563,
+ 0.21421638131141663,
+ 0.28715792298316956,
+ 0.2102016806602478,
+ 0.20956826210021973,
+ 0.3466647267341614,
+ -0.2299683690071106,
+ -0.25915223360061646,
+ 0.41785866022109985,
+ 0.07099737226963043,
+ 0.22117382287979126,
+ 0.2887211740016937,
+ -0.035061031579971313,
+ 0.09970923513174057,
+ -0.23236918449401855,
+ -0.45870137214660645,
+ -0.21391060948371887,
+ 0.05113806203007698,
+ 0.29341569542884827,
+ -0.46914395689964294,
+ -0.271503210067749,
+ -0.09621456265449524,
+ -0.268949031829834,
+ -0.2321268916130066,
+ -0.2159164547920227,
+ -0.34157058596611023,
+ 0.10754958540201187,
+ 0.23604275286197662
+ ],
+ "y": [
+ 0.07203474640846252,
+ -0.047745704650878906,
+ -0.00906907394528389,
+ 0.009783454239368439,
+ -0.2695850133895874,
+ 0.29104191064834595,
+ -0.14122113585472107,
+ 0.21728655695915222,
+ 0.001727670431137085,
+ -0.226304292678833,
+ 0.20862458646297455,
+ -0.054383426904678345,
+ 0.19181907176971436,
+ -0.24017661809921265,
+ 0.22783124446868896,
+ -0.16501444578170776,
+ -0.09654450416564941,
+ -0.2549210786819458,
+ 0.28728991746902466,
+ -0.19908647239208221,
+ -0.015302136540412903,
+ 0.14907145500183105,
+ -0.05028700828552246,
+ 0.05944383144378662,
+ 0.001769423484802246,
+ -0.12549537420272827,
+ 0.2651529312133789,
+ 0.024033591151237488,
+ 0.10830312967300415,
+ 0.07431966066360474,
+ -0.08106887340545654,
+ 0.069974385201931,
+ 0.055999040603637695,
+ -0.18863603472709656,
+ 0.13664236664772034,
+ -0.254802405834198,
+ 0.12010413408279419,
+ -0.11477619409561157,
+ 0.2777027487754822,
+ -0.24654120206832886,
+ -0.07009199261665344,
+ -0.24197080731391907,
+ 0.13041925430297852,
+ 0.287220299243927,
+ 0.06838984787464142,
+ -0.10023913532495499,
+ -0.0965215414762497,
+ 0.22902756929397583,
+ -0.11831027269363403,
+ -0.22478851675987244,
+ 0.27684158086776733,
+ -0.03093421459197998,
+ -0.2955739498138428,
+ 0.19791918992996216,
+ -0.05896347761154175,
+ -0.26739710569381714,
+ -0.18326957523822784,
+ -0.10884720087051392,
+ -0.23251640796661377,
+ 0.12328332662582397,
+ -0.10228687524795532,
+ 0.27180686593055725,
+ 0.07184082269668579,
+ 0.1599072813987732,
+ 0.052057892084121704,
+ 0.24445325136184692,
+ -0.06396758556365967,
+ 0.09435708820819855,
+ 0.05889606475830078,
+ -0.020120680332183838,
+ -0.26962196826934814,
+ -0.07165157794952393,
+ -0.10233491659164429,
+ -0.05647355318069458,
+ 0.2290317714214325,
+ -0.2611371874809265,
+ 0.09273634850978851,
+ -0.19764560461044312,
+ -0.16107138991355896,
+ 0.07049399614334106,
+ -0.014289721846580505,
+ 0.26757073402404785,
+ 0.16143548488616943,
+ -0.1859319806098938,
+ 0.09593546390533447,
+ -0.15794941782951355,
+ -0.09391447901725769,
+ 0.21773666143417358,
+ 0.1851588487625122,
+ -0.12673139572143555,
+ 0.22041110694408417,
+ -0.092182457447052,
+ -0.14092674851417542,
+ -0.13547265529632568,
+ -0.15109694004058838,
+ -0.09276574850082397,
+ 0.05995550751686096,
+ 0.06653183698654175,
+ 0.2872241735458374,
+ 0.03015303611755371,
+ 0.23960089683532715,
+ 0.03555808961391449,
+ 0.00033080577850341797,
+ -0.01926995813846588,
+ 0.15755686163902283,
+ -0.025246581062674522,
+ -0.01614290475845337,
+ 0.29804056882858276,
+ 0.1032165139913559,
+ -0.13882672786712646,
+ 0.0004457831382751465,
+ -0.040934160351753235,
+ 0.049469247460365295,
+ 0.07947796583175659,
+ -0.21216604113578796,
+ 0.17066434025764465,
+ 0.19433332979679108,
+ -0.25180894136428833,
+ 0.12011206150054932,
+ -0.25351154804229736,
+ -0.0980333685874939,
+ 0.04047691822052002,
+ -0.19524887204170227,
+ 0.2972184419631958,
+ 0.26020097732543945,
+ 0.03574487566947937,
+ -0.23833751678466797,
+ 0.2248116135597229,
+ -0.009424488991498947,
+ 0.05037599802017212,
+ -0.048813819885253906,
+ 0.020884215831756592,
+ -0.2523409128189087,
+ -0.037793248891830444,
+ -0.25817567110061646,
+ 0.25186359882354736,
+ -0.2568863034248352,
+ 0.2558736801147461,
+ 0.20581568777561188,
+ -0.1280236542224884,
+ -0.09951725602149963,
+ -0.000028774142265319824,
+ -0.10286533832550049,
+ -0.05373692512512207,
+ -0.22780537605285645,
+ 0.15635567903518677,
+ 0.10321229696273804,
+ -0.1789320707321167,
+ -0.18984882533550262,
+ 0.25333917140960693,
+ -0.09284311532974243,
+ -0.27423667907714844,
+ 0.20417779684066772,
+ -0.1324664205312729,
+ 0.2649846374988556,
+ -0.056424856185913086,
+ -0.27394694089889526,
+ -0.004352748394012451,
+ -0.0015570176765322685,
+ -0.016092658042907715,
+ 0.030616670846939087,
+ -0.1021917462348938,
+ 0.06736545264720917,
+ -0.2603280544281006,
+ -0.22310709953308105,
+ -0.26255977153778076,
+ 0.16768836975097656,
+ 0.20169688761234283,
+ -0.1377582550048828,
+ 0.269056499004364,
+ -0.20259509980678558,
+ 0.019436657428741455,
+ -0.242212176322937,
+ 0.27697205543518066,
+ 0.12242832779884338,
+ 0.03141140937805176,
+ -0.1710263192653656,
+ 0.2950875759124756,
+ 0.07331520318984985,
+ 0.26166510581970215,
+ 0.25472787022590637,
+ 0.2352249026298523,
+ -0.13198241591453552,
+ -0.002886056900024414,
+ 0.13787376880645752,
+ 0.18927344679832458,
+ 0.04899761825799942,
+ 0.1034068912267685,
+ -0.2137119174003601,
+ -0.07404226064682007,
+ 0.26236504316329956,
+ -0.1438780575990677,
+ 0.07480409741401672,
+ -0.12956231832504272,
+ 0.2838931083679199,
+ -0.03629978001117706,
+ -0.11344945430755615,
+ 0.18014833331108093,
+ -0.12088832259178162,
+ -0.04343375563621521,
+ 0.08323019742965698,
+ 0.058181941509246826,
+ 0.05314081907272339,
+ 0.027272403240203857,
+ 0.222991943359375,
+ -0.151236891746521,
+ -0.1455443799495697,
+ -0.23737144470214844,
+ 0.011986196041107178,
+ 0.11865344643592834,
+ -0.1925848126411438,
+ -0.07444298267364502,
+ -0.05819277465343475,
+ 0.02441084384918213,
+ -0.08220013976097107,
+ -0.14746618270874023,
+ -0.26227957010269165,
+ -0.2367367446422577,
+ -0.05790826678276062,
+ -0.10432156920433044,
+ -0.05084353685379028,
+ 0.09232968091964722,
+ -0.05501741170883179,
+ -0.04192286729812622,
+ 0.0939980149269104,
+ 0.16398805379867554,
+ -0.10870334506034851,
+ 0.14565646648406982,
+ 0.2094815969467163,
+ -0.10469022393226624,
+ -0.14438384771347046,
+ -0.1685008406639099,
+ -0.2695314884185791,
+ 0.07644999027252197,
+ -0.10527968406677246,
+ -0.11611559242010117,
+ 0.15241535007953644,
+ 0.009714484214782715,
+ -0.05118173360824585,
+ -0.22945821285247803,
+ 0.12864726781845093,
+ 0.03931871056556702,
+ 0.007121361326426268,
+ -0.15663957595825195,
+ 0.13087111711502075,
+ 0.11823856830596924,
+ 0.08423250913619995,
+ 0.20000511407852173,
+ -0.14609184861183167,
+ 0.2989848852157593,
+ -0.08952182531356812,
+ -0.19822335243225098,
+ 0.07053697109222412,
+ 0.2489527463912964,
+ -0.16241031885147095,
+ 0.0345500111579895,
+ 0.17503847181797028,
+ 0.0013741254806518555,
+ -0.2772955596446991,
+ -0.25946083664894104,
+ 0.09837037324905396,
+ 0.09148794412612915,
+ -0.0661056786775589,
+ -0.07648496329784393,
+ -0.04308328032493591,
+ -0.14434948563575745,
+ -0.10277891159057617,
+ 0.02611704170703888,
+ 0.24599319696426392,
+ 0.20619440078735352,
+ -0.03140035271644592,
+ -0.1671939194202423,
+ -0.26133909821510315,
+ 0.024029433727264404,
+ -0.07511308789253235,
+ -0.28421998023986816,
+ 0.014765739440917969,
+ -0.08975689858198166,
+ 0.0867723822593689,
+ -0.23478588461875916,
+ 0.02629472315311432,
+ 0.05083268880844116,
+ 0.08577093482017517,
+ 0.2612524628639221,
+ 0.2058989703655243,
+ 0.019499748945236206,
+ 0.24121294915676117,
+ -0.23089784383773804,
+ -0.12453660368919373,
+ -0.030907005071640015,
+ 0.035456717014312744,
+ 0.060823842883110046,
+ 0.26798051595687866,
+ 0.14456874132156372,
+ 0.09737794101238251,
+ -0.06985777616500854,
+ -0.032446324825286865,
+ 0.052835941314697266,
+ 0.2599388360977173,
+ -0.10749149322509766,
+ -0.18001651763916016,
+ 0.05830413103103638,
+ 0.28905707597732544,
+ -0.25263512134552,
+ -0.11035007238388062,
+ -0.15241298079490662,
+ 0.25574052333831787,
+ -0.06042689085006714,
+ -0.156419038772583,
+ -0.23744302988052368,
+ -0.18963545560836792,
+ 0.26472240686416626,
+ 0.25096696615219116,
+ 0.1448356807231903,
+ 0.2833727300167084,
+ 0.0237748920917511,
+ -0.2618192434310913,
+ -0.003986239433288574,
+ -0.24154382944107056,
+ 0.29085540771484375,
+ 0.15817999839782715,
+ -0.1076422929763794,
+ -0.07837775349617004,
+ -0.2690615653991699,
+ -0.2647172212600708,
+ 0.053586650639772415,
+ 0.1040739119052887,
+ 0.21164828538894653,
+ 0.28748732805252075,
+ 0.13326403498649597,
+ -0.17129439115524292,
+ 0.188949853181839,
+ -0.12189587950706482,
+ -0.057378947734832764,
+ -0.03939700126647949,
+ -0.21649375557899475,
+ -0.059398941695690155,
+ -0.16641199588775635,
+ 0.012445745058357716,
+ -0.2644084095954895,
+ 0.19352203607559204,
+ -0.06298945099115372,
+ 0.1791589856147766,
+ 0.08897757530212402,
+ -0.20833030343055725,
+ 0.1312999278306961,
+ 0.07801079750061035,
+ 0.2786257266998291,
+ -0.26765403151512146,
+ -0.1040189266204834,
+ 0.05272538959980011,
+ 0.05901479721069336,
+ 0.003546610474586487,
+ -0.1807830035686493,
+ -0.08226320147514343,
+ 0.23340749740600586,
+ -0.02877485752105713,
+ -0.22469168901443481,
+ -0.12958860397338867,
+ -0.218732088804245,
+ 0.0707782506942749,
+ -0.08761473000049591,
+ -0.011908158659934998,
+ -0.23135681450366974,
+ -0.2678619921207428,
+ 0.22641593217849731,
+ 0.18315361440181732,
+ 0.10517041385173798,
+ -0.05763351917266846,
+ -0.25880250334739685,
+ 0.09391659498214722,
+ 0.2927226424217224,
+ -0.10273651778697968,
+ -0.046204403042793274,
+ -0.06444340944290161,
+ 0.1770053505897522,
+ 0.008362293243408203,
+ -0.1613662838935852,
+ 0.008668608963489532,
+ 0.21112702786922455,
+ 0.04892230033874512,
+ 0.2724888324737549,
+ -0.1016334593296051,
+ -0.1210324764251709,
+ 0.047709837555885315,
+ -0.06742990761995316,
+ -0.19488602876663208,
+ -0.09354442358016968,
+ -0.018746376037597656,
+ -0.13834744691848755,
+ -0.1374918669462204,
+ -0.24852384626865387,
+ 0.24289387464523315,
+ 0.0003782808780670166,
+ 0.296608030796051,
+ -0.2031225562095642,
+ 0.26134487986564636,
+ 0.17234815657138824,
+ 0.09454810619354248,
+ -0.02612742781639099,
+ 0.15955674648284912,
+ 0.063243567943573,
+ 0.20196384191513062,
+ 0.06432139873504639,
+ 0.12183964252471924,
+ -0.08238732069730759,
+ -0.09552079439163208,
+ 0.10197871923446655,
+ -0.16888156533241272,
+ 0.04254838824272156,
+ 0.29742008447647095,
+ -0.08830267190933228,
+ -0.07581490278244019,
+ -0.20645147562026978,
+ 0.01418149471282959,
+ -0.12945061922073364,
+ -0.022970333695411682,
+ 0.2722858786582947,
+ -0.25704342126846313,
+ -0.10276001691818237,
+ 0.17172420024871826,
+ -0.0728115439414978,
+ 0.07432791590690613,
+ 0.18678215146064758,
+ 0.28152215480804443,
+ -0.16926950216293335,
+ -0.055846452713012695,
+ 0.06972786784172058,
+ 0.08327421545982361,
+ 0.07325750589370728,
+ -0.26076310873031616,
+ 0.1546422839164734,
+ -0.24088463187217712,
+ -0.19958344101905823,
+ -0.23784728348255157,
+ 0.26954931020736694,
+ -0.20681852102279663,
+ -0.10806141793727875,
+ 0.05408203601837158,
+ 0.11668753623962402,
+ 0.09143054485321045,
+ 0.2596741318702698,
+ 0.295785129070282,
+ -0.0952686071395874,
+ -0.10524147748947144,
+ 0.23569077253341675,
+ -0.24214433133602142,
+ 0.01691564917564392,
+ 0.09556031227111816,
+ -0.09278136491775513,
+ 0.16951334476470947,
+ 0.026566769927740097,
+ 0.25625717639923096,
+ 0.09808439016342163,
+ -0.08243325352668762,
+ -0.259738951921463,
+ -0.08216185867786407,
+ -0.033425211906433105,
+ -0.10316693782806396,
+ -0.05184611678123474,
+ -0.054778218269348145,
+ 0.2771671414375305,
+ -0.23368506133556366,
+ 0.2364853322505951,
+ 0.2353314459323883,
+ 0.050180431455373764,
+ 0.23263442516326904,
+ 0.22736001014709473,
+ 0.2317524254322052,
+ -0.16881641745567322,
+ -0.09084445238113403,
+ 0.27697455883026123,
+ -0.19873830676078796,
+ 0.0014807786792516708,
+ -0.028542891144752502,
+ -0.08903765678405762,
+ 0.12185072898864746,
+ 0.2198459506034851,
+ 0.14722472429275513,
+ -0.12421086430549622,
+ 0.09176164865493774,
+ 0.06998026371002197,
+ 0.2755257785320282,
+ 0.0751444548368454,
+ 0.002912551164627075,
+ 0.07726287841796875,
+ -0.1986803114414215,
+ -0.13784247636795044,
+ -0.03762328624725342,
+ 0.13555702567100525,
+ 0.07311856746673584,
+ -0.08603635430335999,
+ -0.017953842878341675,
+ 0.07029592990875244,
+ 0.24035577476024628,
+ 0.2721270024776459,
+ -0.2234029471874237,
+ -0.2695692181587219,
+ 0.20135098695755005,
+ 0.08458012342453003,
+ 0.18539869785308838,
+ -0.0999574065208435,
+ 0.25489723682403564,
+ 0.292238712310791,
+ -0.19312411546707153,
+ -0.1530669927597046,
+ -0.011510401964187622,
+ -0.226509690284729,
+ -0.16384965181350708,
+ -0.2508663535118103,
+ -0.20072777569293976,
+ 0.09899510443210602,
+ 0.2916693687438965,
+ 0.07919090986251831,
+ 0.07571685314178467,
+ -0.2515043020248413,
+ -0.27597776055336,
+ 0.2244693785905838,
+ -0.05832456052303314,
+ 0.23582983016967773,
+ 0.2766389846801758,
+ -0.14732247591018677,
+ 0.040947407484054565,
+ -0.21354681253433228,
+ -0.25707846879959106,
+ -0.09323269128799438,
+ -0.2113628387451172,
+ -0.04589572548866272,
+ -0.07945934683084488,
+ 0.08336162567138672,
+ -0.09987622499465942,
+ 0.16430026292800903,
+ -0.01729343831539154,
+ -0.2263306975364685,
+ 0.22381041944026947,
+ 0.19563376903533936,
+ -0.01680690050125122,
+ -0.09485882520675659,
+ 0.008227705955505371,
+ 0.06795047223567963,
+ -0.16356444358825684,
+ -0.2148142158985138,
+ -0.26211094856262207,
+ -0.00926857441663742,
+ -0.10633468627929688,
+ 0.056055374443531036,
+ -0.15254059433937073,
+ -0.1015576720237732,
+ -0.07229393720626831,
+ 0.07207953929901123,
+ 0.021709665656089783,
+ -0.2634589970111847,
+ -0.29212039709091187,
+ 0.2740808129310608,
+ -0.09746313095092773,
+ -0.013706296682357788,
+ -0.08727720379829407,
+ 0.03237530216574669,
+ 0.2754804193973541,
+ 0.08674562722444534,
+ 0.04719418287277222,
+ -0.09289908409118652,
+ 0.11437129974365234,
+ 0.04218439757823944,
+ 0.05042874813079834,
+ -0.01750817894935608,
+ 0.289144903421402,
+ -0.20696628093719482,
+ 0.17931050062179565,
+ -0.01854988932609558,
+ -0.2719762325286865,
+ 0.08371925354003906,
+ 0.22529199719429016,
+ -0.0909162163734436,
+ 0.24712714552879333,
+ -0.06430892646312714,
+ 0.058878421783447266,
+ 0.05456557869911194,
+ -0.0010740160942077637,
+ -0.274289071559906,
+ 0.10887439548969269,
+ 0.1717398762702942,
+ -0.17584538459777832,
+ -0.12558430433273315,
+ 0.15007472038269043,
+ -0.08436226099729538,
+ -0.09997710585594177,
+ 0.11293518543243408,
+ 0.06386047601699829,
+ -0.04215443134307861,
+ -0.1625778079032898,
+ 0.06909355521202087,
+ 0.047323763370513916,
+ 0.12845361232757568,
+ -0.03476187586784363,
+ -0.0207749605178833,
+ -0.209120512008667,
+ -0.01806885004043579,
+ 0.10029718279838562,
+ 0.1464427262544632,
+ 0.022979266941547394,
+ -0.1150311529636383,
+ -0.19465899467468262,
+ 0.03216981887817383,
+ 0.2557141184806824,
+ 0.07253837585449219,
+ -0.022439658641815186,
+ -0.25465914607048035,
+ 0.025110453367233276,
+ -0.05302608013153076,
+ -0.23742766678333282,
+ 0.21003377437591553,
+ 0.1836608648300171,
+ 0.15884383022785187,
+ -0.20153504610061646,
+ 0.14923015236854553,
+ 0.07743695378303528,
+ 0.20406214892864227,
+ 0.15675494074821472,
+ 0.2732084095478058,
+ -0.00811178982257843,
+ -0.09819754958152771,
+ -0.030396699905395508,
+ -0.15619663894176483,
+ 0.12375270575284958,
+ -0.11341121792793274,
+ 0.22292238473892212,
+ -0.19183209538459778,
+ -0.26698940992355347,
+ -0.02969270944595337,
+ -0.16661310195922852,
+ 0.22883343696594238,
+ 0.010244756937026978,
+ -0.2080545723438263,
+ 0.26527535915374756,
+ 0.16630160808563232,
+ 0.24602964520454407,
+ 0.0034735500812530518,
+ -0.2705940306186676,
+ 0.014125853776931763,
+ -0.21794933080673218,
+ 0.12682563066482544,
+ -0.07922914624214172,
+ -0.07809847593307495,
+ 0.045614928007125854,
+ 0.278179407119751,
+ 0.030820637941360474,
+ -0.14010310173034668,
+ -0.2757208049297333,
+ 0.08389392495155334,
+ -0.06779664754867554,
+ -0.17214399576187134,
+ 0.26387664675712585,
+ -0.07696568965911865,
+ -0.053608477115631104,
+ -0.2736675441265106,
+ 0.119497150182724,
+ -0.087004154920578,
+ 0.24793583154678345,
+ 0.26989874243736267,
+ 0.03474510461091995,
+ 0.29240745306015015,
+ 0.0197678804397583,
+ 0.03871678560972214,
+ -0.23868775367736816,
+ -0.09874552488327026,
+ 0.13502097129821777,
+ -0.16288644075393677,
+ -0.18091809749603271,
+ -0.2629065215587616,
+ -0.07757669687271118,
+ 0.16199174523353577,
+ -0.011729031801223755,
+ 0.01898801326751709,
+ -0.23944461345672607,
+ 0.28663724660873413,
+ 0.011786818504333496,
+ -0.1366487592458725,
+ -0.013824760913848877,
+ 0.12252512574195862,
+ -0.2383817434310913,
+ -0.1534525752067566,
+ -0.2545012831687927,
+ -0.2478422373533249,
+ 0.03027176856994629,
+ -0.18568025529384613,
+ -0.11116278171539307,
+ -0.2477024793624878,
+ 0.26575398445129395,
+ -0.12997257709503174,
+ -0.24154990911483765,
+ 0.19322317838668823,
+ 0.03883442282676697,
+ -0.14747333526611328,
+ 0.1809440553188324,
+ 0.24419379234313965,
+ 0.2784629464149475,
+ -0.2858460545539856,
+ 0.12303698062896729,
+ 0.035624027252197266,
+ -0.10650821030139923,
+ -0.2571563720703125,
+ -0.014280229806900024,
+ 0.2897912263870239,
+ 0.07350558042526245,
+ 0.0006234645843505859,
+ 0.19321022927761078,
+ -0.27544623613357544,
+ -0.2450128197669983,
+ -0.10229384899139404,
+ 0.1985580325126648,
+ -0.02277037501335144,
+ 0.033242881298065186,
+ -0.24734556674957275,
+ 0.22195382416248322,
+ 0.017132043838500977,
+ 0.024345219135284424,
+ -0.10635481029748917,
+ 0.005621433258056641,
+ 0.14246082305908203,
+ -0.2950443625450134,
+ 0.06316789984703064,
+ 0.06363213062286377,
+ -0.0028614988550543785,
+ -0.07095605134963989,
+ -0.046835094690322876,
+ -0.07120931148529053,
+ -0.059536613523960114,
+ -0.15159577131271362,
+ -0.03855419158935547,
+ -0.0785822868347168,
+ 0.08786341547966003,
+ 0.03433534502983093,
+ -0.23088747262954712,
+ 0.1557484269142151,
+ 0.3014317750930786,
+ 0.09830582141876221,
+ 0.17552125453948975,
+ 0.02838432788848877,
+ -0.07982051372528076,
+ -0.1519307792186737,
+ 0.06466442346572876,
+ 0.2166043519973755,
+ -0.2618144750595093,
+ -0.14134497940540314,
+ 0.053290486335754395,
+ -0.1676906943321228,
+ 0.08783021569252014,
+ -0.19897985458374023,
+ 0.09909959137439728,
+ -0.1311461329460144,
+ 0.17756730318069458,
+ 0.2721443772315979,
+ -0.10052299499511719,
+ 0.2761739492416382,
+ 0.02967655658721924,
+ -0.24771541357040405,
+ -0.06239718198776245,
+ -0.05112117528915405,
+ 0.2578131854534149,
+ 0.20230981707572937,
+ -0.13918054103851318,
+ -0.2466454654932022,
+ -0.05074255168437958,
+ -0.09373204410076141,
+ -0.26292112469673157,
+ -0.06001201272010803,
+ -0.06908254325389862,
+ -0.1034916341304779,
+ -0.2329138219356537,
+ 0.25432851910591125,
+ 0.2357219159603119,
+ 0.13385814428329468,
+ -0.19587382674217224,
+ 0.198592409491539,
+ -0.11521434783935547,
+ -0.2488800585269928,
+ 0.20072966814041138,
+ 0.042562663555145264,
+ -0.13625240325927734,
+ -0.22192275524139404,
+ 0.04102295637130737,
+ 0.06393256783485413,
+ -0.09916388988494873,
+ -0.07036250829696655,
+ 0.17152559757232666,
+ 0.243408203125,
+ -0.2616439461708069,
+ -0.16859003901481628,
+ -0.14664766192436218,
+ 0.11789900064468384,
+ -0.06900626420974731,
+ 0.27073943614959717,
+ -0.21406641602516174,
+ -0.25508296489715576,
+ -0.009201295673847198,
+ 0.004809357225894928,
+ -0.03912651538848877,
+ -0.04458838701248169,
+ -0.2149410843849182,
+ -0.23073619604110718,
+ -0.012755066156387329,
+ 0.26114505529403687,
+ 0.06535837054252625,
+ 0.04731452465057373,
+ -0.1620713621377945,
+ 0.2673664689064026,
+ -0.10740840435028076,
+ 0.21478134393692017,
+ -0.009418629109859467,
+ 0.14635735750198364,
+ -0.12165413796901703,
+ 0.22194495797157288,
+ -0.00928393006324768,
+ 0.25997084379196167,
+ 0.1283162534236908,
+ -0.1781543493270874,
+ -0.12115415930747986,
+ -0.09445232152938843,
+ -0.052521705627441406,
+ -0.18571150302886963,
+ -0.02804061770439148,
+ -0.25962716341018677,
+ 0.2765311300754547,
+ -0.0013875961303710938,
+ 0.19776694476604462,
+ -0.12907856702804565,
+ -0.07927053421735764,
+ -0.10744974762201309,
+ 0.08069247007369995,
+ -0.1019764244556427,
+ 0.05694040656089783,
+ 0.1608652025461197,
+ -0.03793448209762573,
+ -0.09379293024539948,
+ 0.08305317163467407,
+ 0.03932271897792816,
+ 0.06861943006515503,
+ 0.1275707185268402,
+ -0.14304092526435852,
+ -0.16166606545448303,
+ 0.2919168472290039,
+ 0.2262946367263794,
+ 0.02870127558708191,
+ 0.015020928345620632,
+ -0.05926030874252319,
+ -0.04136466979980469,
+ -0.08555097877979279,
+ -0.23013031482696533,
+ 0.2885730266571045,
+ 0.06535890698432922,
+ -0.27117305994033813,
+ 0.15557512640953064,
+ -0.14879553020000458,
+ 0.056416869163513184,
+ 0.049559712409973145,
+ -0.27096036076545715,
+ 0.0257033109664917,
+ 0.23251932859420776,
+ -0.2749289274215698,
+ 0.2817002534866333,
+ 0.22018077969551086,
+ 0.29172632098197937,
+ 0.2626727819442749,
+ 0.15288899838924408,
+ -0.09804350137710571,
+ 0.13218484818935394,
+ -0.08773823082447052,
+ 0.28824394941329956,
+ -0.04729461669921875,
+ 0.12716543674468994,
+ 0.1946374475955963,
+ 0.2902357578277588,
+ -0.029568038880825043,
+ 0.06655138731002808,
+ 0.2728317975997925,
+ -0.245667964220047,
+ 0.27022138237953186,
+ 0.09763860702514648,
+ -0.20157772302627563,
+ -0.05689677596092224,
+ -0.2649992108345032,
+ -0.18969547748565674,
+ 0.06459034979343414,
+ 0.08750426769256592,
+ -0.2731778621673584,
+ -0.10470573604106903,
+ 0.23903712630271912,
+ 0.20301710069179535,
+ -0.12929970026016235,
+ -0.2654557526111603,
+ -0.20032204687595367,
+ 0.06189696490764618,
+ -0.05431783199310303,
+ 0.07019639015197754,
+ -0.13380664587020874,
+ -0.08319306373596191,
+ -0.08924929797649384,
+ 0.03597778081893921,
+ 0.21959179639816284,
+ -0.26360270380973816,
+ -0.0351148322224617,
+ -0.17448806762695312,
+ -0.06881092488765717,
+ -0.22704708576202393,
+ -0.009053289890289307,
+ -0.01135525107383728,
+ -0.027494758367538452,
+ 0.17330098152160645,
+ -0.2287050485610962,
+ 0.15960663557052612,
+ 0.24438074231147766,
+ -0.125368133187294,
+ -0.2478928416967392,
+ 0.039185941219329834,
+ -0.23552411794662476,
+ 0.07170438766479492,
+ 0.09082233905792236,
+ -0.15822738409042358,
+ 0.1508556604385376,
+ -0.10662377625703812,
+ -0.014142245054244995,
+ -0.009985476732254028,
+ 0.03215622901916504,
+ -0.10548585653305054,
+ 0.18541981279850006,
+ -0.07631292939186096,
+ -0.029530465602874756,
+ 0.04701811075210571,
+ -0.09305837750434875,
+ -0.056840479373931885,
+ -0.2260914444923401,
+ 0.06699520349502563,
+ -0.16103222966194153,
+ -0.04951286315917969,
+ 0.18063117563724518,
+ 0.07776790857315063,
+ 0.27079713344573975,
+ -0.09058025479316711,
+ 0.08335860073566437,
+ -0.17122501134872437,
+ -0.10675400495529175,
+ -0.2009914219379425,
+ -0.08878672122955322,
+ 0.07991719245910645,
+ -0.039918482303619385,
+ 0.14610005915164948,
+ -0.20120108127593994,
+ 0.09443747997283936,
+ 0.1586090326309204,
+ 0.09700140357017517,
+ -0.2569218575954437,
+ -0.09310555458068848,
+ 0.28396424651145935,
+ -0.13009260594844818,
+ 0.2979506850242615,
+ 0.2730204463005066,
+ 0.26844149827957153,
+ 0.02195422351360321,
+ -0.003946974873542786,
+ -0.2547425627708435,
+ -0.15125474333763123,
+ -0.13880613446235657,
+ 0.2539530396461487,
+ 0.1274646818637848,
+ -0.04338639974594116,
+ -0.047499388456344604,
+ -0.0915575623512268,
+ 0.21742284297943115,
+ 0.06869091093540192,
+ 0.26485925912857056,
+ -0.19393309950828552,
+ 0.09739142656326294,
+ -0.041697412729263306,
+ 0.16975896060466766,
+ 0.1776377260684967,
+ 0.19927364587783813,
+ 0.23623287677764893,
+ -0.034295715391635895,
+ 0.10894343256950378,
+ 0.03385740518569946,
+ 0.18834218382835388,
+ -0.14465197920799255,
+ -0.16204231977462769,
+ 0.29614269733428955,
+ -0.24264460802078247,
+ 0.09407676756381989,
+ -0.016624242067337036,
+ -0.09543311595916748,
+ 0.006914019584655762,
+ -0.2740604281425476,
+ -0.1347416639328003,
+ 0.06245899200439453,
+ -0.09979097545146942,
+ -0.08016272634267807,
+ 0.02430322766304016,
+ 0.24444985389709473,
+ -0.02127659320831299,
+ 0.052739500999450684,
+ -0.19811835885047913,
+ 0.07744404673576355,
+ 0.24042779207229614,
+ -0.028381139039993286,
+ -0.2090996503829956,
+ -0.18642789125442505,
+ 0.13100522756576538,
+ 0.12987875938415527,
+ 0.2074541449546814,
+ 0.08766354620456696,
+ 0.09629499912261963,
+ -0.11541640758514404,
+ -0.25854408740997314,
+ -0.10078397393226624,
+ 0.07841736078262329,
+ 0.27549850940704346,
+ -0.07023155689239502,
+ -0.016953513026237488,
+ 0.16635185480117798,
+ -0.14774662256240845,
+ 0.2546502947807312,
+ 0.16221600770950317,
+ -0.26837944984436035,
+ 0.05896962434053421,
+ -0.25992149114608765,
+ -0.10294532775878906,
+ -0.15172821283340454,
+ -0.072083979845047,
+ 0.035372793674468994,
+ -0.12104016542434692,
+ -0.22677397727966309,
+ -0.21295732259750366,
+ -0.09651318192481995,
+ -0.10970345139503479,
+ -0.038079410791397095,
+ -0.1843957006931305,
+ -0.24590438604354858,
+ 0.1832975149154663,
+ 0.05336087942123413,
+ 0.2643863558769226,
+ -0.25485900044441223,
+ 0.20168769359588623,
+ 0.27200210094451904,
+ -0.2575686275959015,
+ 0.10242709517478943,
+ -0.2715376019477844,
+ 0.15954822301864624,
+ -0.0989314615726471,
+ -0.09903323650360107,
+ 0.21421140432357788,
+ -0.03706236556172371,
+ 0.13152861595153809,
+ 0.11167153716087341,
+ 0.13293106853961945,
+ 0.23762916028499603,
+ 0.02085062861442566,
+ 0.04042850434780121,
+ 0.2852190136909485,
+ -0.20738458633422852,
+ -0.1870986521244049,
+ -0.1943071484565735,
+ -0.06081685423851013,
+ 0.0837792158126831,
+ -0.24107737839221954,
+ -0.06811970472335815,
+ 0.0709347128868103,
+ 0.2869659662246704,
+ -0.01061636209487915,
+ -0.21725322306156158,
+ 0.057939767837524414,
+ -0.22203022241592407,
+ -0.005775928497314453,
+ -0.14994049072265625,
+ 0.25143134593963623,
+ 0.05236750841140747,
+ 0.2689288556575775,
+ -0.09450370073318481,
+ 0.08265411853790283,
+ 0.2268373966217041,
+ 0.14089058339595795,
+ -0.07721588760614395,
+ 0.0754292905330658,
+ -0.09337460994720459,
+ 0.08493900299072266,
+ -0.07289392501115799,
+ -0.080241858959198,
+ -0.16157102584838867,
+ 0.28322410583496094,
+ -0.03195992857217789,
+ -0.13116103410720825,
+ 0.1985185593366623,
+ 0.07873165607452393,
+ 0.18300777673721313,
+ -0.14417314529418945,
+ 0.18402528762817383,
+ -0.158375084400177,
+ 0.09326690435409546,
+ 0.007736802101135254,
+ 0.006822943687438965,
+ -0.12633289396762848,
+ 0.20959725975990295,
+ 0.2757571339607239,
+ -0.10576358437538147,
+ 0.08812908828258514,
+ -0.25114208459854126,
+ -0.1789960265159607,
+ 0.015492856502532959,
+ 0.15821081399917603,
+ -0.16416433453559875,
+ 0.04067045450210571,
+ 0.11016478389501572,
+ 0.2524763345718384,
+ -0.020661890506744385,
+ -0.2591016888618469,
+ 0.020018696784973145,
+ 0.07198530435562134,
+ 0.017191261053085327,
+ -0.26307058334350586,
+ -0.042576149106025696,
+ -0.24077343940734863,
+ 0.12120155990123749,
+ -0.005756184458732605,
+ -0.1557745784521103,
+ -0.06890372931957245,
+ 0.28948986530303955,
+ -0.10380043089389801,
+ 0.0057291388511657715,
+ 0.1823575496673584,
+ 0.1825442910194397,
+ -0.02580738067626953,
+ -0.1789913773536682,
+ -0.08778601884841919,
+ 0.009149253368377686,
+ -0.13690488040447235,
+ 0.09495508670806885,
+ -0.23734569549560547,
+ -0.13561609387397766,
+ 0.013064950704574585,
+ 0.25229567289352417,
+ 0.2881883680820465,
+ -0.10529434680938721,
+ 0.09410238265991211,
+ -0.1163078248500824,
+ -0.22182542085647583,
+ 0.16905641555786133,
+ 0.1954658329486847,
+ 0.17449696362018585,
+ -0.01388096809387207,
+ -0.03239203244447708,
+ -0.10242992639541626,
+ -0.08889591693878174,
+ -0.2105541229248047,
+ -0.013223648071289062,
+ 0.028414934873580933,
+ 0.1020580530166626,
+ -0.2186298668384552,
+ -0.1256687045097351,
+ 0.04525068402290344,
+ -0.23543637990951538,
+ -0.14722840487957,
+ -0.16753479838371277,
+ -0.20723044872283936,
+ -0.07375657558441162,
+ 0.26865464448928833,
+ 0.09794726967811584,
+ -0.15499752759933472,
+ -0.2275170385837555,
+ -0.011639654636383057,
+ -0.08070620894432068,
+ -0.16051101684570312,
+ -0.1678760051727295,
+ 0.13568072021007538,
+ 0.08555406332015991,
+ -0.10797926783561707,
+ -0.2589326500892639,
+ -0.2637743353843689,
+ -0.049336791038513184,
+ 0.06824040412902832,
+ -0.18401965498924255,
+ -0.05074596405029297,
+ 0.05437600612640381,
+ -0.03757670521736145,
+ 0.20904400944709778,
+ -0.0847623348236084,
+ -0.21513491868972778,
+ 0.008143730461597443,
+ -0.017981648445129395,
+ 0.06613582372665405,
+ 0.24075525999069214,
+ -0.07682198286056519,
+ -0.24667322635650635,
+ -0.09755122661590576,
+ 0.04784250259399414,
+ -0.2706063389778137,
+ 0.2879779636859894,
+ -0.06950873136520386,
+ 0.29420435428619385,
+ -0.182789146900177,
+ -0.15301698446273804,
+ -0.0063905417919158936,
+ 0.05573457479476929,
+ -0.02067624405026436,
+ -0.23212432861328125,
+ -0.1034083142876625,
+ -0.2882699966430664,
+ -0.09917604923248291,
+ -0.17979085445404053,
+ 0.026788055896759033,
+ 0.08176669478416443,
+ -0.2330213189125061,
+ 0.03314846754074097,
+ -0.07660381495952606,
+ -0.2613937258720398,
+ 0.25068777799606323,
+ 0.10555578023195267,
+ -0.08980825543403625,
+ -0.20510363578796387,
+ -0.24577561020851135,
+ 0.28054243326187134,
+ -0.08886563777923584,
+ 0.000713057816028595,
+ -0.08507758378982544,
+ -0.0005894005298614502,
+ 0.20623603463172913,
+ -0.25989997386932373,
+ 0.10406982898712158,
+ -0.049362242221832275,
+ 0.09493023157119751,
+ 0.28979378938674927,
+ -0.0015504732728004456,
+ 0.26411008834838867,
+ 0.247356116771698,
+ -0.04350347816944122,
+ 0.11155547201633453,
+ -0.25988662242889404,
+ -0.18028421700000763,
+ 0.03289921581745148,
+ 0.10384255647659302,
+ -0.2886457145214081,
+ 0.1754407286643982,
+ 0.14308255910873413,
+ 0.044357120990753174,
+ -0.17150568962097168,
+ -0.02271025814116001,
+ 0.0792241021990776,
+ 0.07284653186798096,
+ -0.14934420585632324,
+ -0.1031506210565567,
+ -0.12157481908798218,
+ 0.0736432671546936,
+ 0.06732797622680664,
+ -0.12590476870536804,
+ 0.2754729688167572,
+ -0.16439178586006165,
+ -0.07550984621047974,
+ -0.13189995288848877,
+ 0.07412725687026978,
+ 0.051545217633247375,
+ -0.22895634174346924,
+ -0.27015817165374756,
+ 0.27882063388824463,
+ -0.011948049068450928,
+ 0.025934122502803802,
+ -0.24530725181102753,
+ -0.26815372705459595,
+ -0.21463710069656372,
+ 0.07983988523483276,
+ 0.1929025650024414,
+ 0.06334149837493896,
+ -0.028206586837768555,
+ -0.030991651117801666,
+ -0.07265335321426392,
+ -0.18550080060958862,
+ 0.17507246136665344,
+ -0.09681341052055359,
+ -0.05936238169670105,
+ 0.06715467572212219,
+ -0.27078303694725037,
+ -0.07979762554168701,
+ 0.030318617820739746,
+ 0.14652293920516968,
+ -0.10360252857208252,
+ -0.04455679655075073,
+ -0.15198832750320435,
+ 0.25247615575790405,
+ 0.19129051268100739,
+ -0.0743456482887268,
+ 0.06545132398605347,
+ 0.10271012037992477,
+ -0.24820658564567566,
+ 0.2519959509372711,
+ 0.0038901567459106445,
+ 0.27976709604263306,
+ -0.11824220418930054,
+ 0.06116068363189697,
+ 0.07284373044967651,
+ -0.27235546708106995,
+ 0.2870961129665375,
+ 0.2740941643714905,
+ 0.2805013060569763,
+ -0.13061460852622986,
+ 0.2190490961074829,
+ 0.011908397078514099,
+ -0.13643020391464233,
+ 0.003994464874267578,
+ -0.25646984577178955,
+ -0.09098014235496521,
+ 0.056781917810440063,
+ -0.008698135614395142,
+ 0.292604923248291,
+ -0.27018028497695923,
+ 0.038654446601867676,
+ -0.09539604187011719,
+ -0.05855828523635864,
+ -0.15442204475402832,
+ 0.022210020571947098,
+ 0.28000131249427795,
+ -0.04168619215488434,
+ 0.2943153381347656,
+ -0.2587125897407532,
+ -0.04936552047729492,
+ 0.2545078992843628,
+ -0.07157686352729797,
+ 0.28838151693344116,
+ -0.24690908193588257,
+ -0.09448429942131042,
+ 0.04265350103378296,
+ -0.09760388731956482,
+ -0.1330292522907257,
+ -0.1711963266134262,
+ 0.06471221894025803,
+ 0.00037860870361328125,
+ 0.11057424545288086,
+ -0.24414369463920593,
+ -0.09199362993240356,
+ -0.1968546211719513,
+ -0.19286738336086273,
+ -0.046882107853889465,
+ -0.25758999586105347,
+ 0.20498046278953552,
+ 0.26744168996810913,
+ 0.04075634479522705,
+ 0.10562559962272644,
+ 0.03705577552318573,
+ 0.1013365387916565,
+ -0.10152041912078857,
+ 0.07588309049606323,
+ 0.11646783351898193,
+ -0.011098623275756836,
+ 0.2598496377468109,
+ 0.09595286846160889,
+ -0.20683497190475464,
+ 0.2728871703147888,
+ -0.02262919396162033,
+ 0.27710920572280884,
+ 0.19299755990505219,
+ -0.07866257429122925,
+ -0.16665256023406982,
+ -0.01813298463821411,
+ -0.08706274628639221,
+ 0.2387867271900177,
+ 0.06402134895324707,
+ -0.018951982259750366,
+ 0.2857557535171509,
+ 0.1443973332643509,
+ 0.050017714500427246,
+ 0.20218390226364136,
+ -0.10515329241752625,
+ 0.03667110204696655,
+ 0.1291605830192566,
+ -0.059436291456222534,
+ -0.13698600232601166,
+ -0.19266587495803833,
+ -0.18763887882232666,
+ -0.09615594148635864,
+ 0.09864276647567749,
+ 0.19995272159576416,
+ 0.2675182819366455,
+ -0.056148409843444824,
+ 0.1861681342124939,
+ -0.22664539515972137,
+ 0.17885243892669678,
+ -0.09167510271072388,
+ -0.1844434142112732,
+ 0.0438791885972023,
+ 0.21917343139648438,
+ 0.09260009229183197,
+ 0.23798123002052307,
+ 0.10226200520992279,
+ -0.06945450603961945,
+ 0.12820729613304138,
+ 0.2260478138923645,
+ 0.2625594735145569,
+ 0.1989220827817917,
+ 0.060789644718170166,
+ -0.09990620613098145,
+ 0.10610538721084595,
+ 0.26860249042510986,
+ -0.004554137587547302,
+ -0.1463177502155304,
+ 0.2643170952796936,
+ 0.22112542390823364,
+ -0.07086479663848877,
+ -0.1371886283159256,
+ -0.16623497009277344,
+ 0.06548120081424713,
+ -0.02887246012687683,
+ -0.16052216291427612,
+ 0.2725678086280823,
+ 0.13902413845062256,
+ 0.07886165380477905,
+ -0.19239050149917603,
+ -0.09271305799484253,
+ 0.2880285978317261,
+ 0.017095576971769333,
+ -0.07401865720748901,
+ 0.18689006567001343,
+ -0.2885458469390869,
+ -0.1341710090637207,
+ -0.01678985357284546,
+ 0.10143910348415375,
+ 0.0538824200630188,
+ -0.2751232981681824,
+ 0.27972227334976196,
+ 0.05308002233505249,
+ -0.29345497488975525,
+ -0.1240541934967041,
+ 0.2094433307647705,
+ 0.031808674335479736,
+ 0.059455469250679016,
+ 0.1422823667526245,
+ -0.15470433235168457,
+ -0.05642816424369812,
+ 0.17536000907421112,
+ 0.2411060631275177,
+ 0.18493497371673584,
+ 0.07060220837593079,
+ 0.06459367275238037,
+ -0.25275719165802,
+ -0.23319068551063538,
+ 0.300091415643692,
+ 0.2847605347633362,
+ 0.014236867427825928,
+ 0.161529541015625,
+ 0.08268627524375916,
+ -0.07352069020271301,
+ -0.12219566106796265,
+ 0.29627543687820435,
+ 0.16186347603797913,
+ -0.10159584134817123,
+ 0.07637465000152588,
+ 0.06961093842983246,
+ -0.10647763311862946,
+ -0.19416241347789764,
+ -0.09340895712375641,
+ -0.20895138382911682,
+ -0.2390945851802826,
+ -0.14613088965415955,
+ 0.23012632131576538,
+ -0.23610693216323853,
+ -0.033105313777923584,
+ 0.17347176373004913,
+ 0.15703776478767395,
+ 0.17342177033424377,
+ 0.24970608949661255,
+ -0.05848442018032074,
+ -0.20799729228019714,
+ 0.21645858883857727,
+ 0.02760617434978485,
+ -0.10203555226325989,
+ -0.12620927393436432,
+ -0.04404965043067932,
+ -0.2621806859970093,
+ 0.23559963703155518,
+ 0.2662007212638855,
+ -0.10819539427757263,
+ -0.17454828321933746,
+ -0.2585645616054535,
+ -0.26739776134490967,
+ 0.06248988211154938,
+ -0.08666110038757324,
+ 0.004609972238540649,
+ -0.14313232898712158,
+ -0.019847840070724487,
+ 0.22334514558315277,
+ -0.0926053524017334,
+ 0.13207954168319702,
+ -0.08209413290023804,
+ 0.2804352939128876,
+ 0.25044649839401245,
+ 0.07013577222824097,
+ 0.24919947981834412,
+ 0.22221606969833374,
+ -0.01455802470445633,
+ 0.07447010278701782,
+ 0.18128859996795654,
+ 0.24860811233520508,
+ 0.27679985761642456,
+ 0.24711567163467407,
+ -0.0783238410949707,
+ 0.15225955843925476,
+ -0.07710126042366028,
+ -0.25881683826446533,
+ -0.04943332076072693,
+ -0.07984292507171631,
+ -0.23683911561965942,
+ 0.09129849076271057,
+ -0.036396175622940063,
+ 0.28845036029815674,
+ 0.07007411122322083,
+ 0.08488845825195312,
+ 0.27872878313064575,
+ -0.1344957947731018,
+ -0.10327106714248657,
+ -0.13806556165218353,
+ -0.060293495655059814,
+ -0.06183415651321411,
+ 0.2195541262626648,
+ -0.03108847141265869,
+ 0.2388594150543213,
+ 0.10186058282852173,
+ -0.016724323853850365,
+ 0.01581329107284546,
+ 0.08492165803909302,
+ 0.22033590078353882,
+ -0.07189583778381348,
+ 0.231826514005661,
+ -0.02384704351425171,
+ 0.2240750640630722,
+ 0.2188798189163208,
+ 0.22285470366477966,
+ 0.023755520582199097,
+ -0.27015095949172974,
+ 0.2081553041934967,
+ 0.2556496262550354,
+ 0.2523093819618225,
+ -0.08100984245538712,
+ -0.25889700651168823,
+ 0.20670795440673828,
+ -0.1819096803665161,
+ -0.24938610196113586,
+ -0.1877444088459015,
+ 0.0446467399597168,
+ 0.03517502546310425,
+ 0.021046042442321777,
+ -0.09708213806152344,
+ -0.2589471936225891,
+ -0.10597890615463257,
+ 0.06534785032272339,
+ 0.07833003997802734,
+ 0.06386639177799225,
+ -0.10195329785346985,
+ 0.01642245054244995,
+ 0.00657045841217041,
+ 0.2677026391029358,
+ 0.1554417759180069,
+ 0.26767462491989136,
+ -0.2743719220161438,
+ 0.27018851041793823,
+ -0.13427132368087769,
+ 0.0821915864944458,
+ -0.2708931267261505,
+ 0.22912663221359253,
+ -0.12437288463115692,
+ 0.2799530625343323,
+ -0.0773133635520935,
+ -0.21565066277980804,
+ -0.19601774215698242,
+ -0.22426623106002808,
+ -0.1868937611579895,
+ 0.0057123154401779175,
+ 0.10117829591035843,
+ -0.08109089732170105,
+ -0.014305531978607178,
+ 0.13783465325832367,
+ 0.12662801146507263,
+ 0.01747235655784607,
+ 0.01380687952041626,
+ 0.06052341312170029,
+ 0.09289935231208801,
+ 0.2465265989303589,
+ -0.08310335874557495,
+ 0.13873086869716644,
+ 0.26251184940338135,
+ -0.049620598554611206,
+ 0.05646359920501709,
+ -0.01539296843111515,
+ 0.06625938415527344,
+ -0.20031972229480743,
+ 0.17262476682662964,
+ -0.21229398250579834,
+ -0.18014764785766602,
+ -0.21261560916900635,
+ 0.024254560470581055,
+ -0.24503174424171448,
+ -0.07603002339601517,
+ 0.04157157987356186,
+ -0.12587732076644897,
+ 0.08272155374288559,
+ -0.017136454582214355,
+ 0.13015836477279663,
+ 0.2762213349342346,
+ 0.16108503937721252,
+ 0.25323954224586487,
+ 0.06333070248365402,
+ 0.2938709259033203,
+ -0.12134295701980591,
+ -0.2212168574333191,
+ -0.053729891777038574,
+ 0.04506736993789673,
+ 0.2790079712867737,
+ 0.0792129635810852,
+ -0.24621403217315674,
+ 0.07803525030612946,
+ -0.08027549088001251,
+ -0.2480316460132599,
+ -0.005440345034003258,
+ -0.1060640811920166,
+ 0.0209866464138031,
+ 0.04846656322479248,
+ 0.05653659999370575,
+ 0.2997274696826935,
+ 0.2587904930114746,
+ 0.12862488627433777,
+ -0.26505357027053833,
+ 0.07192455232143402,
+ 0.05987730622291565,
+ -0.2842071056365967,
+ 0.030120298266410828,
+ -0.23969966173171997,
+ -0.17850154638290405,
+ 0.007306158542633057,
+ -0.21822772920131683,
+ 0.18521708250045776,
+ -0.1846671998500824,
+ -0.03239826858043671,
+ -0.0914689302444458,
+ 0.18868550658226013,
+ -0.0946342945098877,
+ 0.02607378363609314,
+ -0.2826590836048126,
+ 0.2710916996002197,
+ -0.12102049589157104,
+ 0.21713432669639587,
+ 0.18287979066371918,
+ -0.2583692669868469,
+ 0.25693345069885254,
+ 0.26513171195983887,
+ 0.18914783000946045,
+ -0.0507240891456604,
+ 0.07042044401168823,
+ 0.012894213199615479,
+ 0.22645676136016846,
+ -0.021935738623142242,
+ 0.04755702614784241,
+ 0.06212818622589111,
+ -0.22333288192749023,
+ -0.16603821516036987,
+ -0.24969765543937683,
+ 0.1965857446193695,
+ 0.002379082143306732,
+ -0.17909987270832062,
+ 0.28200531005859375,
+ 0.06937187910079956,
+ -0.2448101043701172,
+ 0.13610364496707916,
+ 0.08378398418426514,
+ -0.10025328397750854,
+ -0.214097261428833,
+ -0.10324913263320923,
+ 0.08716434240341187,
+ 0.06706932187080383,
+ 0.20139235258102417,
+ 0.08788716793060303,
+ -0.18995356559753418,
+ -0.026113033294677734,
+ 0.027059555053710938,
+ -0.06619620323181152,
+ 0.1258240044116974,
+ 0.20803731679916382,
+ -0.2523304224014282,
+ -0.017608672380447388,
+ 0.2471451759338379,
+ -0.09573003649711609,
+ 0.10224899649620056,
+ 0.2888268828392029,
+ 0.1490464210510254,
+ -0.23641890287399292,
+ 0.18279555439949036,
+ 0.2599930763244629,
+ -0.07761192321777344,
+ 0.15114808082580566,
+ -0.03606283664703369,
+ 0.0011172257363796234,
+ 0.23952805995941162,
+ 0.04313269257545471,
+ 0.07905727624893188,
+ -0.2543294429779053,
+ 0.20953744649887085,
+ -0.25477972626686096,
+ 0.2665551006793976,
+ -0.09894716739654541,
+ -0.2873489260673523,
+ 0.07949837297201157,
+ 0.08232271671295166,
+ -0.1107642650604248,
+ -0.15012836456298828,
+ -0.052981406450271606,
+ 0.2805611789226532,
+ -0.22848182916641235,
+ 0.062177419662475586,
+ 0.07590842247009277,
+ -0.15077954530715942,
+ 0.12831318378448486,
+ 0.29942262172698975,
+ 0.18426337838172913,
+ -0.21676431596279144,
+ 0.19692522287368774,
+ 0.09725509583950043,
+ -0.18657159805297852,
+ -0.007429718971252441,
+ -0.2635684013366699,
+ 0.007578179240226746,
+ 0.27192866802215576,
+ -0.26610463857650757,
+ 0.0009293556213378906,
+ -0.01919853687286377,
+ -0.009877264499664307,
+ 0.18327116966247559,
+ -0.29195207357406616,
+ -0.2590249478816986,
+ 0.1836608648300171,
+ 0.12387531995773315,
+ 0.046630650758743286,
+ -0.10605758428573608,
+ 0.1077040433883667,
+ -0.08850923925638199,
+ -0.05799531936645508,
+ 0.0657920241355896,
+ -0.024170279502868652,
+ -0.02896958589553833,
+ 0.1816435605287552,
+ -0.13948704302310944,
+ -0.1518116593360901,
+ -0.08764028549194336,
+ 0.165431410074234,
+ 0.05043095350265503,
+ 0.1094222366809845,
+ 0.28588980436325073,
+ -0.08815193176269531,
+ -0.10426139831542969,
+ -0.07387789338827133,
+ 0.22109103202819824,
+ 0.18491922318935394,
+ 0.12150724232196808,
+ -0.22176355123519897,
+ 0.23895451426506042,
+ -0.11261105537414551,
+ 0.21550099551677704,
+ 0.1514400839805603,
+ -0.015817701816558838,
+ 0.009175434708595276,
+ -0.03688734769821167,
+ -0.2411128282546997,
+ -0.12820054590702057,
+ -0.07696908712387085,
+ 0.12488099932670593,
+ -0.07708081603050232,
+ 0.16109135746955872,
+ 0.0638258159160614,
+ -0.21244436502456665,
+ -0.092201828956604,
+ -0.09854930639266968,
+ -0.1054568886756897,
+ 0.03361675888299942,
+ -0.25091809034347534,
+ 0.06154680252075195,
+ -0.17405825853347778,
+ -0.07142853736877441,
+ 0.10681404173374176,
+ -0.2256784439086914,
+ -0.13794946670532227,
+ -0.24691355228424072,
+ 0.17025309801101685,
+ 0.07473625242710114,
+ 0.23735396564006805,
+ 0.2652350664138794,
+ -0.2681737244129181,
+ 0.20264099538326263,
+ -0.2663739323616028,
+ -0.23781073093414307,
+ -0.10688942670822144,
+ -0.051142506301403046,
+ 0.1261662244796753,
+ 0.12329035997390747,
+ 0.06004652380943298,
+ 0.24337446689605713,
+ -0.270521879196167,
+ -0.09962552785873413,
+ -0.2341943085193634,
+ -0.1035909652709961,
+ 0.2795752286911011,
+ 0.287996381521225,
+ -0.15452030301094055,
+ 0.28726279735565186,
+ 0.032750505954027176,
+ -0.23499542474746704,
+ -0.1584128737449646,
+ -0.13733649253845215,
+ 0.05491971969604492,
+ 0.08298128843307495,
+ -0.04742102324962616,
+ -0.0935477614402771,
+ -0.26655158400535583,
+ -0.17746701836585999,
+ 0.2512613534927368,
+ 0.28523847460746765,
+ -0.25366300344467163,
+ -0.09643231332302094,
+ 0.050460562109947205,
+ -0.030551493167877197,
+ -0.2601901888847351,
+ -0.26779860258102417,
+ 0.05604473501443863,
+ -0.26869311928749084,
+ 0.2635000944137573,
+ -0.2676544487476349,
+ -0.13048312067985535,
+ -0.011233687400817871,
+ -0.15864121913909912,
+ 0.1419486403465271,
+ 0.14459949731826782,
+ 0.1548318862915039,
+ -0.0571901798248291,
+ 0.2720324397087097,
+ 0.1244511604309082,
+ -0.24588467180728912,
+ -0.09445226192474365,
+ -0.19331002235412598,
+ 0.06987100839614868,
+ 0.21046209335327148,
+ -0.2520243227481842,
+ 0.026808619499206543,
+ 0.29253512620925903,
+ -0.09695911407470703,
+ 0.23931735754013062,
+ 0.1372469961643219,
+ 0.1598636507987976,
+ -0.09011000394821167,
+ 0.24760209023952484,
+ -0.04899194836616516,
+ -0.17838844656944275,
+ 0.06937962770462036,
+ 0.06258560717105865,
+ 0.21447217464447021,
+ 0.061778247356414795,
+ 0.09643363952636719,
+ -0.05407434701919556,
+ -0.11437618732452393,
+ 0.14685994386672974,
+ -0.09830451011657715,
+ -0.237740159034729,
+ -0.21508800983428955,
+ -0.26177966594696045,
+ -0.03435242176055908,
+ 0.14945697784423828,
+ -0.10377538204193115,
+ 0.004065573215484619,
+ 0.28807443380355835,
+ -0.16149932146072388,
+ -0.1431611180305481,
+ -0.20501773059368134,
+ -0.2629024386405945,
+ 0.2785530090332031,
+ -0.03458130359649658,
+ -0.11729550361633301,
+ 0.15035152435302734,
+ -0.1392727494239807,
+ -0.06078965961933136,
+ -0.20026236772537231,
+ 0.03182631731033325,
+ 0.17844919860363007,
+ -0.16395333409309387,
+ -0.04261523485183716,
+ -0.11851894855499268,
+ -0.24769121408462524,
+ 0.12790527939796448,
+ -0.13577866554260254,
+ -0.1520734429359436,
+ -0.1338897943496704,
+ -0.011863768100738525,
+ -0.055313706398010254,
+ -0.24021309614181519,
+ -0.13516558706760406,
+ 0.2796190679073334,
+ 0.18938349187374115,
+ -0.13746613264083862,
+ -0.26208943128585815,
+ -0.06434613466262817,
+ -0.16802221536636353,
+ 0.06609926372766495,
+ 0.0014773756265640259,
+ -0.2719880938529968,
+ 0.2661968469619751,
+ 0.2874293327331543,
+ 0.006779193878173828,
+ -0.2338770031929016,
+ 0.08865037560462952,
+ 0.14148548245429993,
+ -0.17792412638664246,
+ 0.05629172921180725,
+ 0.2136116623878479,
+ -0.09974449872970581,
+ -0.04651924967765808,
+ 0.28637993335723877,
+ -0.07005828619003296,
+ 0.24298453330993652,
+ -0.09545804560184479,
+ 0.18539591133594513,
+ -0.14303167164325714,
+ 0.05613619089126587,
+ -0.026268064975738525,
+ -0.04246032238006592,
+ -0.23465579748153687,
+ 0.06831026077270508,
+ 0.24310484528541565,
+ -0.09128075838088989,
+ -0.19963127374649048,
+ 0.24330943822860718,
+ 0.1911965012550354,
+ 0.29129520058631897,
+ -0.1927645206451416,
+ -0.16314184665679932,
+ -0.03621301054954529,
+ -0.1166536808013916,
+ -0.2026253491640091,
+ 0.28306496143341064,
+ 0.24613749980926514,
+ -0.18568915128707886,
+ -0.26549071073532104,
+ 0.03340461850166321,
+ 0.22233116626739502,
+ 0.04370853304862976,
+ -0.061562299728393555,
+ 0.12787216901779175,
+ -0.20706063508987427,
+ -0.08455104380846024,
+ 0.09452316164970398,
+ -0.2302897572517395,
+ -0.06599682569503784,
+ -0.10449013113975525,
+ -0.2674221396446228,
+ -0.09770339727401733,
+ -0.07542175054550171,
+ -0.03824642300605774,
+ -0.07903158664703369,
+ 0.23196940124034882,
+ 0.09564705193042755,
+ -0.1405264139175415,
+ -0.2632039785385132,
+ -0.06511801481246948,
+ 0.03840463608503342,
+ 0.2004888653755188,
+ 0.036280691623687744,
+ 0.07914086431264877,
+ 0.2919127345085144,
+ 0.11578148603439331,
+ 0.05183848738670349,
+ 0.21901249885559082,
+ 0.22688424587249756,
+ -0.20757362246513367,
+ -0.12106756120920181,
+ 0.28423118591308594,
+ 0.09302643686532974,
+ 0.2606023848056793,
+ 0.25453561544418335,
+ 0.15249952673912048,
+ -0.04726678133010864,
+ -0.023347198963165283,
+ 0.05869215726852417,
+ -0.16096371412277222,
+ 0.030570149421691895,
+ -0.13151434063911438,
+ 0.1081055998802185,
+ 0.07805225998163223,
+ -0.11648422479629517,
+ -0.09341838955879211,
+ 0.040193408727645874,
+ -0.04037073254585266,
+ 0.26078274846076965,
+ -0.10565441846847534,
+ 0.07163107395172119,
+ -0.00958435982465744,
+ -0.15556877851486206,
+ 0.21123746037483215,
+ -0.014161944389343262,
+ 0.14639189839363098,
+ -0.2729594111442566,
+ 0.006021842360496521,
+ -0.05987098813056946,
+ -0.1799129843711853,
+ 0.11612211167812347,
+ -0.0748860239982605,
+ -0.250657856464386,
+ -0.06359708309173584,
+ 0.18977075815200806,
+ 0.10757279396057129,
+ -0.11529646813869476,
+ -0.032535433769226074,
+ -0.10688310861587524,
+ -0.033641695976257324,
+ -0.044090986251831055,
+ -0.2649545967578888,
+ -0.15009725093841553,
+ -0.06950005888938904,
+ -0.27395328879356384,
+ -0.15791448950767517,
+ -0.09120893478393555,
+ 0.0010889917612075806,
+ 0.0545838326215744,
+ 0.04505111277103424,
+ -0.11809110641479492,
+ 0.16103136539459229,
+ -0.249701589345932,
+ 0.04342758655548096,
+ -0.20294171571731567,
+ 0.0080812256783247,
+ -0.23715651035308838,
+ 0.061512529850006104,
+ 0.013672471046447754,
+ -0.22543704509735107,
+ 0.19351620972156525,
+ 0.07400703430175781,
+ -0.26251280307769775,
+ -0.08797132968902588,
+ -0.2102574110031128,
+ -0.23835492134094238,
+ -0.14896424114704132,
+ -0.07927781343460083,
+ 0.00940847396850586,
+ 0.1238175481557846,
+ 0.20061606168746948,
+ -0.089279904961586,
+ -0.005649983882904053,
+ -0.17982330918312073,
+ -0.032143816351890564,
+ -0.2006295770406723,
+ 0.23521490395069122,
+ -0.24767988920211792,
+ 0.24758785963058472,
+ 0.12167376279830933,
+ -0.11703267693519592,
+ -0.14108312129974365,
+ -0.03774521127343178,
+ -0.02700825035572052,
+ 0.05828738212585449,
+ 0.0037841200828552246,
+ 0.09234422445297241,
+ 0.012841194868087769,
+ -0.03394065052270889,
+ -0.07109400629997253,
+ 0.2251327633857727,
+ 0.059479594230651855,
+ -0.017066627740859985,
+ -0.09059637784957886,
+ 0.11968345940113068,
+ 0.1627565622329712,
+ -0.07940542697906494,
+ -0.10378098487854004,
+ -0.2528446316719055,
+ 0.23512713611125946,
+ 0.06911066174507141,
+ 0.16998611390590668,
+ -0.26949697732925415,
+ 0.299308180809021,
+ 0.0694056898355484,
+ -0.13379858434200287,
+ 0.012238264083862305,
+ 0.06709402799606323,
+ 0.1385655701160431,
+ 0.2872084975242615,
+ -0.05257982015609741,
+ -0.2712487280368805,
+ 0.11213512718677521,
+ -0.1055338978767395,
+ 0.10062527656555176,
+ 0.25502556562423706,
+ 0.24657875299453735,
+ 0.09356321394443512,
+ -0.27552706003189087,
+ 0.1349647045135498,
+ 0.0936286449432373,
+ -0.0584757924079895,
+ 0.2617111802101135,
+ 0.1582149863243103,
+ 0.273736834526062,
+ -0.11034277081489563,
+ -0.23365093767642975,
+ -0.039025336503982544,
+ -0.012187615036964417,
+ 0.2603659927845001,
+ -0.1432870626449585,
+ -0.20981651544570923,
+ -0.270207017660141,
+ -0.2962145209312439,
+ 0.243144690990448,
+ 0.24279406666755676,
+ 0.13126647472381592,
+ 0.19820035994052887,
+ 0.2673027515411377,
+ 0.15019965171813965,
+ -0.23564445972442627,
+ 0.10414472222328186,
+ 0.06918317824602127,
+ -0.23423075675964355,
+ -0.12197929620742798,
+ -0.09914147853851318,
+ 0.27331745624542236,
+ 0.0257912278175354,
+ -0.07765987515449524,
+ -0.10677929222583771,
+ 0.24427300691604614,
+ 0.04127061367034912,
+ 0.1320846676826477,
+ 0.08739399909973145,
+ -0.29171696305274963,
+ 0.07964491844177246,
+ 0.11686399579048157,
+ -0.00033590197563171387,
+ -0.11320159584283829,
+ -0.09428471326828003,
+ 0.22508378326892853,
+ -0.14461754262447357,
+ 0.0781141072511673,
+ -0.1356269121170044,
+ -0.13318699598312378,
+ 0.24490469694137573,
+ -0.017782926559448242,
+ 0.038724303245544434,
+ -0.08901470899581909,
+ 0.018637046217918396,
+ 0.0389711856842041,
+ -0.22149518132209778,
+ 0.009258851408958435,
+ 0.28184568881988525,
+ -0.030716262757778168,
+ 0.0035033375024795532,
+ -0.0688822865486145,
+ -0.2574344873428345,
+ -0.10097956657409668,
+ 0.09173548221588135,
+ 0.22406232357025146,
+ 0.17361651360988617,
+ 0.27364811301231384,
+ 0.09307640790939331,
+ -0.0947132557630539,
+ 0.19788119196891785,
+ 0.0909772515296936,
+ 0.10863715410232544,
+ -0.22345846891403198,
+ 0.24468523263931274,
+ -0.1007271409034729,
+ 0.22336158156394958,
+ -0.0762682557106018,
+ 0.10532629489898682,
+ 0.08632296323776245,
+ -0.10143528878688812,
+ 0.0802297294139862,
+ -0.0011959001421928406,
+ -0.18113675713539124,
+ -0.0888742059469223,
+ 0.1649220883846283,
+ 0.006028011441230774,
+ 0.03820575773715973,
+ -0.10080522298812866,
+ -0.06693994998931885,
+ 0.009069263935089111,
+ -0.03994039446115494,
+ 0.2877500653266907,
+ -0.04582798480987549,
+ 0.027310490608215332,
+ 0.07182919979095459,
+ 0.2893178462982178,
+ 0.045925699174404144,
+ -0.030567392706871033,
+ -0.21612462401390076,
+ -0.015196606516838074,
+ -0.137406587600708,
+ -0.26941239833831787,
+ -0.15797173976898193,
+ 0.17669928073883057,
+ 0.03276163339614868,
+ -0.15995478630065918,
+ -0.174237459897995,
+ 0.10684023052453995,
+ 0.1892329454421997,
+ -0.2017143964767456,
+ -0.24217158555984497,
+ -0.06058505177497864,
+ -0.2087448537349701,
+ 0.2135096788406372,
+ 0.289885938167572,
+ 0.1789611577987671,
+ 0.014278233051300049,
+ 0.21046021580696106,
+ 0.15780124068260193,
+ 0.2820483446121216,
+ 0.1657332181930542,
+ -0.023986943066120148,
+ 0.22141185402870178,
+ -0.15815263986587524,
+ -0.015404820442199707,
+ -0.0974951982498169,
+ 0.005337834358215332,
+ 0.21965515613555908,
+ -0.10678315162658691,
+ 0.2089810073375702,
+ -0.225364550948143,
+ 0.24115800857543945,
+ -0.23059965670108795,
+ -0.03029567003250122,
+ 0.06814559549093246,
+ 0.26238393783569336,
+ 0.27124297618865967,
+ 0.28048938512802124,
+ -0.2239619940519333,
+ 0.022324085235595703,
+ 0.05261343717575073,
+ 0.265846312046051,
+ -0.06476151943206787,
+ 0.2550662159919739,
+ -0.05517426133155823,
+ -0.1520925760269165,
+ 0.2822142243385315,
+ -0.16798478364944458,
+ -0.20983245968818665,
+ 0.26966196298599243,
+ -0.029411014169454575,
+ 0.16381478309631348,
+ 0.0553177148103714,
+ -0.13537059724330902,
+ 0.2870500683784485,
+ 0.19582591950893402,
+ -0.08225119113922119,
+ -0.2175232172012329,
+ 0.28463566303253174,
+ -0.24401164054870605,
+ 0.152238667011261,
+ 0.08987608551979065,
+ -0.09140108525753021,
+ -0.14108175039291382,
+ -0.1930938959121704,
+ 0.014752984046936035,
+ -0.22466140985488892,
+ 0.019143063575029373,
+ -0.12234574556350708,
+ -0.09296172857284546,
+ 0.04576331377029419,
+ -0.0977017879486084,
+ 0.13715258240699768,
+ 0.056445419788360596,
+ -0.05160704255104065,
+ 0.27299219369888306,
+ -0.12976251542568207,
+ 0.2573939859867096,
+ -0.25793713331222534,
+ 0.030643850564956665,
+ 0.06537234783172607,
+ 0.22064955532550812,
+ -0.12942560017108917,
+ -0.26582181453704834,
+ -0.19940847158432007,
+ 0.033627212047576904,
+ -0.06605380773544312,
+ 0.015067264437675476,
+ 0.06673997640609741,
+ 0.2194085717201233,
+ -0.18552184104919434,
+ -0.006010591983795166,
+ 0.2632318139076233,
+ 0.11248759925365448,
+ 0.2417285442352295,
+ -0.08378279209136963,
+ 0.12957045435905457,
+ -0.1813223659992218,
+ 0.12178752571344376,
+ -0.23412257432937622,
+ -0.14454910159111023,
+ 0.23202478885650635,
+ 0.021962136030197144,
+ 0.030380725860595703,
+ 0.042726606130599976,
+ -0.009861677885055542,
+ -0.14864838123321533,
+ -0.13182468712329865,
+ 0.16544552147388458,
+ 0.2442096471786499,
+ 0.2109413743019104,
+ -0.08501659333705902,
+ -0.16401910781860352,
+ -0.027699947357177734,
+ 0.02686399221420288,
+ 0.2706243395805359,
+ -0.007929116487503052,
+ -0.1441759467124939,
+ -0.04826897382736206,
+ 0.24072784185409546,
+ -0.2274618148803711,
+ -0.2740326523780823,
+ 0.011171430349349976,
+ 0.06101042032241821,
+ 0.2651214599609375,
+ -0.1324826180934906,
+ -0.2638576626777649,
+ -0.2527920603752136,
+ 0.1736477017402649,
+ -0.1549832820892334,
+ -0.2585359811782837,
+ 0.0031640231609344482,
+ 0.08880408853292465,
+ -0.08067536354064941,
+ -0.156297504901886,
+ -0.24285928905010223,
+ 0.017421305179595947,
+ -0.09282749891281128,
+ -0.25349435210227966,
+ 0.07899469137191772,
+ -0.26579755544662476,
+ -0.05883777141571045,
+ 0.2847434878349304,
+ 0.28679680824279785,
+ 0.12057460844516754,
+ 0.16394567489624023,
+ -0.20964166522026062,
+ -0.10909104347229004,
+ -0.08295953273773193,
+ -0.10845589637756348,
+ -0.21789145469665527,
+ -0.16958925127983093,
+ 0.027362942695617676,
+ 0.25784820318222046,
+ 0.2350102663040161,
+ 0.07840597629547119,
+ -0.2713770568370819,
+ 0.034017741680145264,
+ -0.045168593525886536,
+ 0.295806348323822,
+ -0.11138314753770828,
+ 0.04845546931028366,
+ 0.00489141047000885,
+ -0.15334755182266235,
+ -0.16911837458610535,
+ 0.021865516901016235,
+ 0.16379399597644806,
+ 0.04185980558395386,
+ 0.00299227237701416,
+ -0.11560574173927307,
+ -0.09316110610961914,
+ 0.08306479454040527,
+ -0.26961368322372437,
+ -0.07751697301864624,
+ -0.10175976157188416,
+ -0.23414427042007446,
+ -0.013812392950057983,
+ 0.2802620232105255,
+ 0.2860630750656128,
+ 0.13312089443206787,
+ -0.24694302678108215,
+ -0.0992373526096344,
+ -0.0968826413154602,
+ -0.08878278732299805,
+ -0.21826274693012238,
+ -0.09805962443351746,
+ 0.2648676633834839,
+ 0.18035370111465454,
+ 0.010961055755615234,
+ -0.0393783263862133,
+ -0.05958414077758789,
+ 0.05211856588721275,
+ -0.24749913811683655,
+ 0.14299389719963074,
+ 0.06230246275663376,
+ 0.23955653607845306,
+ 0.053340233862400055,
+ -0.11710801720619202,
+ -0.15979602932929993,
+ 0.1557151824235916,
+ 0.06567847728729248,
+ -0.2704458236694336,
+ 0.22227931022644043,
+ 0.0916627049446106,
+ -0.03922322764992714,
+ 0.0218239426612854,
+ -0.01569432020187378,
+ -0.2189847230911255,
+ -0.17073684930801392,
+ 0.16424670815467834,
+ 0.034044407308101654,
+ 0.21569472551345825,
+ 0.002897888422012329,
+ -0.2791377604007721,
+ -0.06500768661499023,
+ 0.21960321068763733,
+ -0.1747477650642395,
+ -0.22730576992034912,
+ 0.018267333507537842,
+ -0.20015284419059753,
+ -0.02356097102165222,
+ 0.06775489449501038,
+ 0.1210024505853653,
+ -0.04084622859954834,
+ -0.07676714658737183,
+ -0.05570933222770691,
+ 0.07084065675735474,
+ -0.00371435284614563,
+ 0.298603892326355,
+ -0.17329943180084229,
+ -0.15210887789726257,
+ 0.20565380156040192,
+ -0.04071807861328125,
+ -0.17307844758033752,
+ 0.2621843218803406,
+ -0.2637844979763031,
+ -0.20918142795562744,
+ 0.26842424273490906,
+ 0.04065674543380737,
+ -0.006692022085189819,
+ -0.047466032207012177,
+ -0.04383218288421631,
+ 0.22023731470108032,
+ 0.012084446847438812,
+ -0.22799384593963623,
+ 0.20943307876586914,
+ 0.07400193810462952,
+ -0.2391412854194641,
+ 0.014461472630500793,
+ -0.10027176141738892,
+ 0.03694349527359009,
+ 0.1579711139202118,
+ 0.22623512148857117,
+ -0.09510040283203125,
+ -0.27041512727737427,
+ -0.10620826482772827,
+ 0.01537865400314331,
+ -0.013299383223056793,
+ -0.035987138748168945,
+ -0.05353833734989166,
+ 0.03338116407394409,
+ -0.1475270837545395,
+ 0.19688236713409424,
+ -0.15415093302726746,
+ -0.22298604249954224,
+ -0.00971531867980957,
+ 0.15230901539325714,
+ 0.06971579790115356,
+ -0.23619282245635986,
+ -0.09983592480421066,
+ -0.14653784036636353,
+ -0.18677875399589539,
+ -0.0551915168762207,
+ 0.2720196843147278,
+ -0.07288824021816254,
+ 0.2952007055282593,
+ 0.07928591966629028,
+ 0.05720037221908569,
+ 0.05670207738876343,
+ 0.09252941608428955,
+ -0.24273964762687683,
+ 0.14174383878707886,
+ -0.08741255104541779,
+ 0.03980341553688049,
+ 0.11874441802501678,
+ 0.0790708065032959,
+ -0.06759539246559143,
+ 0.1304968297481537,
+ -0.0891074538230896,
+ -0.01684322953224182,
+ -0.16250979900360107,
+ -0.10017207264900208,
+ 0.17351818084716797,
+ -0.16436897218227386,
+ 0.042704276740550995,
+ -0.10779257863759995,
+ 0.09766051173210144,
+ 0.288978636264801,
+ -0.016818583011627197,
+ -0.2884557545185089,
+ -0.07269534468650818,
+ -0.14168667793273926,
+ -0.0004774853587150574,
+ 0.10341007262468338,
+ 0.1974853277206421,
+ 0.17604878544807434,
+ 0.019023984670639038,
+ -0.07034850120544434,
+ 0.19864732027053833,
+ 0.1833914816379547,
+ -0.010898321866989136,
+ 0.29490625858306885,
+ 0.035520076751708984,
+ -0.23920245468616486,
+ 0.09521856904029846,
+ -0.15273422002792358,
+ -0.22599118947982788,
+ 0.16118770837783813,
+ -0.1281624585390091,
+ -0.022099614143371582,
+ 0.15210282802581787,
+ 0.08157144486904144,
+ 0.059297189116477966,
+ -0.05674862861633301,
+ -0.02463054656982422,
+ -0.0711619183421135,
+ 0.05191683769226074,
+ -0.0937594473361969,
+ -0.09772169589996338,
+ 0.07989953458309174,
+ -0.10501763224601746,
+ -0.03579016029834747,
+ -0.017730534076690674,
+ 0.08888810873031616,
+ 0.07968814671039581,
+ -0.21952682733535767,
+ -0.1915552020072937,
+ 0.2578524351119995,
+ -0.2275860607624054,
+ -0.17101037502288818,
+ 0.1787024289369583,
+ 0.09653514623641968,
+ 0.18074622750282288,
+ 0.0006892979145050049,
+ -0.02614656090736389,
+ 0.24408525228500366,
+ 0.11640912294387817,
+ 0.14572888612747192,
+ -0.24282240867614746,
+ 0.23687130212783813,
+ 0.25617343187332153,
+ 0.1072864830493927,
+ 0.17745724320411682,
+ -0.25728684663772583,
+ 0.2093317210674286,
+ -0.07744218409061432,
+ 0.28978484869003296,
+ -0.103040412068367,
+ 0.2756059765815735,
+ -0.21629199385643005,
+ -0.21468311548233032,
+ 0.2928059697151184,
+ -0.11803530156612396,
+ -0.26432913541793823,
+ -0.029337234795093536,
+ -0.08013057708740234,
+ -0.2700638771057129,
+ -0.202897310256958,
+ 0.0060277581214904785,
+ -0.05802391469478607,
+ -0.049624085426330566,
+ 0.007471323013305664,
+ -0.12811440229415894,
+ 0.06942647695541382,
+ -0.09670722484588623,
+ 0.2844497263431549,
+ 0.22266241908073425,
+ 0.14331936836242676,
+ 0.11267977952957153,
+ -0.1795407235622406,
+ 0.2922202944755554,
+ -0.08740991353988647,
+ 0.07682770490646362,
+ -0.01616191864013672,
+ -0.008040808141231537,
+ 0.08989414572715759,
+ 0.08429718017578125,
+ -0.1844440996646881,
+ -0.125036358833313,
+ -0.2321564257144928,
+ -0.15940901637077332,
+ 0.2347141057252884,
+ 0.28477442264556885,
+ 0.046138107776641846,
+ -0.1654946506023407,
+ 0.23668581247329712,
+ 0.031566157937049866,
+ 0.2992624044418335,
+ -0.20330190658569336,
+ -0.03600095212459564,
+ -0.13694240152835846,
+ 0.08038836717605591,
+ 0.06293189525604248,
+ -0.24772487580776215,
+ 0.013359308242797852,
+ -0.07133294641971588,
+ 0.08642680197954178,
+ -0.12480917572975159,
+ 0.29030710458755493,
+ -0.1023077666759491,
+ -0.1936863511800766,
+ 0.27068841457366943,
+ -0.012562565505504608,
+ -0.2477482557296753,
+ -0.21918994188308716,
+ 0.2748463749885559,
+ 0.21551096439361572,
+ -0.07992041110992432,
+ 0.08268341422080994,
+ -0.2524569630622864,
+ 0.14147847890853882,
+ -0.032267481088638306,
+ 0.27006882429122925,
+ 0.1688787341117859,
+ -0.08162546157836914,
+ -0.003505527973175049,
+ 0.10608017444610596,
+ 0.06752970814704895,
+ -0.000490337610244751,
+ 0.11288722604513168,
+ 0.05125877261161804,
+ -0.25611966848373413,
+ 0.1020960807800293,
+ 0.24388772249221802,
+ -0.0039011090993881226,
+ 0.09086675941944122,
+ -0.04888355731964111,
+ 0.03201262652873993,
+ -0.19300585985183716,
+ 0.07493500411510468,
+ -0.2547285556793213,
+ -0.020695000886917114,
+ -0.10030326247215271,
+ 0.0357820987701416,
+ 0.026352673768997192,
+ 0.011637955904006958,
+ -0.14028577506542206,
+ -0.11450827121734619,
+ 0.20924825966358185,
+ -0.10354611277580261,
+ -0.07433830201625824,
+ -0.10800403356552124,
+ -0.20187300443649292,
+ -0.005403384566307068,
+ -0.23632895946502686,
+ -0.11855712532997131,
+ 0.08967630565166473,
+ -0.08570972084999084,
+ -0.2858119010925293,
+ 0.17454856634140015,
+ -0.10800552368164062,
+ 0.24268507957458496,
+ 0.2903050184249878,
+ -0.24300473928451538,
+ -0.16972720623016357,
+ -0.19871628284454346,
+ 0.26252758502960205,
+ 0.2800590991973877,
+ -0.07859271764755249,
+ 0.07712393999099731,
+ 0.018567562103271484,
+ -0.07376314699649811,
+ -0.2690485715866089,
+ -0.12899518013000488,
+ 0.21912336349487305,
+ 0.09932993352413177,
+ -0.10377773642539978,
+ 0.08628757297992706,
+ -0.21686673164367676,
+ -0.07675440609455109,
+ -0.2615337371826172,
+ -0.018800050020217896,
+ -0.27211129665374756,
+ -0.21597759425640106,
+ 0.08728682994842529,
+ -0.03723341226577759,
+ 0.29070401191711426,
+ 0.17531748116016388,
+ -0.25168102979660034,
+ 0.12825775146484375,
+ -0.25828641653060913,
+ 0.11178669333457947,
+ -0.17921805381774902,
+ -0.07870738953351974,
+ 0.08541607856750488,
+ 0.07893934845924377,
+ 0.29031902551651,
+ -0.21377676725387573,
+ -0.052479952573776245,
+ -0.23237395286560059,
+ 0.28082531690597534,
+ 0.028137095272541046,
+ -0.014090001583099365,
+ 0.262825608253479,
+ -0.25351375341415405,
+ 0.04995429515838623,
+ -0.2602798342704773,
+ -0.10978761315345764,
+ -0.16904157400131226,
+ -0.06188225746154785,
+ -0.06187428534030914,
+ 0.06080088019371033,
+ -0.29668593406677246,
+ -0.1529715657234192,
+ -0.14217370748519897,
+ -0.2180296778678894,
+ 0.038329124450683594,
+ 0.2534504532814026,
+ 0.05235061049461365,
+ 0.06920373439788818,
+ 0.0047209858894348145,
+ 0.10617393255233765,
+ -0.036843061447143555,
+ -0.11381848901510239,
+ 0.10808277130126953,
+ 0.0589677095413208,
+ -0.019490569829940796,
+ -0.2786080241203308,
+ -0.259796142578125,
+ -0.13755619525909424,
+ 0.15378683805465698,
+ 0.25584328174591064,
+ -0.035903096199035645,
+ 0.27467796206474304,
+ -0.0693659782409668,
+ -0.2261866331100464,
+ 0.2758346199989319,
+ -0.2718777656555176,
+ 0.25737178325653076,
+ 0.11064651608467102,
+ 0.1606602966785431,
+ -0.24884694814682007,
+ -0.17747190594673157,
+ -0.033103734254837036,
+ 0.14602798223495483,
+ -0.06916135549545288,
+ -0.16715402901172638,
+ -0.07382281124591827,
+ -0.15252238512039185,
+ 0.2983613610267639,
+ 0.13566964864730835,
+ -0.023715972900390625,
+ -0.03498752415180206,
+ -0.25835946202278137,
+ 0.037533849477767944,
+ 0.02581346035003662,
+ 0.0729641318321228,
+ 0.15842247009277344,
+ 0.06240314245223999,
+ 0.01839040219783783,
+ -0.06763485074043274,
+ 0.15598440170288086,
+ -0.19266629219055176,
+ -0.2589951753616333,
+ -0.009742513298988342,
+ 0.2836732864379883,
+ 0.06573891639709473,
+ -0.21405893564224243,
+ -0.03860737383365631,
+ 0.09971404075622559,
+ -0.07032973319292068,
+ 0.17513680458068848,
+ -0.1124112606048584,
+ -0.26556122303009033,
+ 0.2609596848487854,
+ 0.2946646213531494,
+ 0.06977295875549316,
+ 0.1397581547498703,
+ -0.15809890627861023,
+ 0.19690483808517456,
+ -0.03903341293334961,
+ -0.0765504240989685,
+ -0.07395333051681519,
+ -0.25100845098495483,
+ -0.008947998285293579,
+ 0.2563159465789795,
+ 0.11647927761077881,
+ 0.05082845687866211,
+ 0.28252407908439636,
+ 0.10880551487207413,
+ 0.16547173261642456,
+ -0.19156861305236816,
+ -0.1709027886390686,
+ -0.1254904568195343,
+ 0.0032541267573833466,
+ -0.014270888641476631,
+ -0.18493680655956268,
+ -0.025119692087173462,
+ 0.05944955348968506,
+ 0.0022521615028381348,
+ 0.174904465675354,
+ -0.06023162603378296,
+ -0.15219742059707642,
+ 0.24900370836257935,
+ -0.08801576495170593,
+ -0.14887499809265137,
+ -0.15058523416519165,
+ -0.011015623807907104,
+ 0.08073927462100983,
+ 0.07517218589782715,
+ 0.03920125961303711,
+ 0.26398542523384094,
+ -0.10207349061965942,
+ 0.07286341488361359,
+ 0.05594843626022339,
+ -0.12878908216953278,
+ -0.21562105417251587,
+ -0.10360246896743774,
+ -0.24535340070724487,
+ -0.187208890914917,
+ -0.22192376852035522,
+ 0.07390505075454712,
+ 0.16117380559444427,
+ -0.23352015018463135,
+ 0.15127384662628174,
+ -0.07550680637359619,
+ 0.05404304713010788,
+ 0.010179638862609863,
+ 0.14131981134414673,
+ -0.20980429649353027,
+ -0.0534258633852005,
+ 0.0965394377708435,
+ 0.21595382690429688,
+ -0.20236903429031372,
+ 0.08939865231513977,
+ -0.186137393116951,
+ 0.14593958854675293,
+ -0.2625979483127594,
+ 0.08927041292190552,
+ 0.2502753734588623,
+ -0.07812582701444626,
+ -0.2740679383277893,
+ -0.2590928077697754,
+ -0.25320011377334595,
+ 0.0953584611415863,
+ -0.14171874523162842,
+ -0.19437171518802643,
+ -0.18122977018356323,
+ 0.05311310291290283,
+ 0.07733681052923203,
+ -0.23667411506175995,
+ 0.048877716064453125,
+ -0.2518099546432495,
+ -0.19765722751617432,
+ 0.2453266978263855,
+ -0.12567591667175293,
+ 0.19968315958976746,
+ -0.26283764839172363,
+ 0.045059382915496826,
+ -0.07697597146034241,
+ -0.028453439474105835,
+ 0.1735798716545105,
+ -0.0944145917892456,
+ 0.0807710587978363,
+ 0.00003275275230407715,
+ -0.1125791147351265,
+ 0.02717190980911255,
+ -0.10414555668830872,
+ 0.11320240795612335,
+ 0.1484239101409912,
+ -0.09126755595207214,
+ 0.07983320951461792,
+ 0.01784074306488037,
+ -0.1552378237247467,
+ 0.1737355887889862,
+ -0.21812421083450317,
+ 0.18828418850898743,
+ -0.04289326071739197,
+ 0.10722607374191284,
+ -0.03219256550073624,
+ 0.2482336461544037,
+ 0.229839026927948,
+ -0.15748494863510132,
+ -0.08721089363098145,
+ -0.0941406786441803,
+ -0.12826311588287354,
+ 0.027494847774505615,
+ -0.10030317306518555,
+ -0.018552720546722412,
+ 0.005648255348205566,
+ 0.18605270981788635,
+ -0.15845966339111328,
+ 0.03168356418609619,
+ -0.024420499801635742,
+ -0.11635971069335938,
+ 0.2441646009683609,
+ 0.18480700254440308,
+ 0.25783097743988037,
+ 0.034582555294036865,
+ 0.018122971057891846,
+ -0.2707769274711609,
+ -0.2306690812110901,
+ 0.2686063051223755,
+ -0.08658021688461304,
+ -0.09828490018844604,
+ 0.16575628519058228,
+ -0.12767156958580017,
+ -0.15457725524902344,
+ 0.13441574573516846,
+ -0.2529428005218506,
+ 0.19980451464653015,
+ -0.21431533992290497,
+ -0.003025844693183899,
+ -0.11901775002479553,
+ 0.030494138598442078,
+ 0.017174553126096725,
+ 0.1136791929602623,
+ 0.2994420528411865,
+ -0.26320624351501465,
+ -0.23233762383460999,
+ 0.16797804832458496,
+ 0.25947466492652893,
+ -0.10288971662521362,
+ -0.2854885458946228,
+ -0.26701095700263977,
+ -0.01653023064136505,
+ 0.19509941339492798,
+ 0.22194010019302368,
+ 0.13546088337898254,
+ 0.068285271525383,
+ -0.25745195150375366,
+ 0.07195964455604553,
+ 0.27467185258865356,
+ 0.22737839818000793,
+ -0.15773022174835205,
+ -0.08195261657238007,
+ -0.263620525598526,
+ 0.27421361207962036,
+ 0.1928725242614746,
+ 0.2392473816871643,
+ -0.2548794150352478,
+ -0.19872403144836426,
+ -0.1987825632095337,
+ 0.21940934658050537,
+ 0.1403685063123703,
+ -0.0803508460521698,
+ -0.03455007076263428,
+ -0.2407102882862091,
+ -0.11956985294818878,
+ -0.04057508707046509,
+ -0.26281628012657166,
+ -0.14695292711257935,
+ 0.21307796239852905,
+ 0.28713858127593994,
+ -0.17818361520767212,
+ -0.22696532309055328,
+ -0.2601240277290344,
+ -0.21380972862243652,
+ 0.28017184138298035,
+ 0.11419957876205444,
+ -0.13191282749176025,
+ 0.02371877431869507,
+ -0.24093526601791382,
+ 0.2493409514427185,
+ -0.0034692883491516113,
+ 0.30051475763320923,
+ -0.27032971382141113,
+ -0.2634710669517517,
+ -0.26790308952331543,
+ -0.2644956707954407,
+ 0.07768487930297852,
+ -0.13709646463394165,
+ 0.2519655227661133,
+ 0.007834136486053467,
+ -0.2389194667339325,
+ 0.22077114880084991,
+ 0.21248675882816315,
+ -0.1598512828350067,
+ -0.04987192153930664,
+ 0.02059009298682213,
+ 0.058994993567466736,
+ 0.1365969181060791,
+ 0.2568054497241974,
+ 0.27612268924713135,
+ -0.006540477275848389,
+ -0.0828942060470581,
+ -0.2113444209098816,
+ 0.023717761039733887,
+ -0.026506543159484863,
+ -0.05487090349197388,
+ -0.02182769775390625,
+ -0.14011171460151672,
+ -0.10102438926696777,
+ -0.24964815378189087,
+ 0.11851096898317337,
+ -0.1260792464017868,
+ 0.16205915808677673,
+ -0.24014919996261597,
+ -0.09279769659042358,
+ 0.05189788341522217,
+ 0.20236550271511078,
+ 0.13085275888442993,
+ 0.017705097794532776,
+ 0.25029921531677246,
+ -0.13587737083435059,
+ -0.09286445379257202,
+ -0.10747531056404114,
+ 0.007612351328134537,
+ -0.03732055425643921,
+ 0.001295328140258789,
+ -0.1388542205095291,
+ 0.20650383830070496,
+ 0.2590261697769165,
+ 0.23044905066490173,
+ -0.07421498000621796,
+ 0.28066110610961914,
+ -0.08816397190093994,
+ -0.08052192628383636,
+ -0.2445451021194458,
+ 0.03944912552833557,
+ -0.02053278684616089,
+ 0.2112979292869568,
+ -0.2229825258255005,
+ -0.07285399734973907,
+ 0.02979458123445511,
+ -0.28990158438682556,
+ -0.06418922543525696,
+ 0.171360582113266,
+ 0.041302263736724854,
+ -0.024237096309661865,
+ 0.04068708419799805,
+ 0.154873326420784,
+ 0.26966768503189087,
+ 0.18059873580932617,
+ 0.2635238766670227,
+ 0.08033281564712524,
+ -0.1320139467716217,
+ 0.08554047346115112,
+ 0.06433522701263428,
+ -0.2621484398841858,
+ -0.18385320901870728,
+ -0.20568758249282837,
+ 0.2279030978679657,
+ 0.05914318561553955,
+ -0.09962654113769531,
+ -0.2043016254901886,
+ -0.2546950578689575,
+ 0.2724856734275818,
+ 0.04129934310913086,
+ -0.15268748998641968,
+ -0.06652259826660156,
+ -0.23998180031776428,
+ 0.07034280896186829,
+ -0.25301694869995117,
+ 0.26234859228134155,
+ -0.27233314514160156,
+ 0.2706764340400696,
+ -0.23279666900634766,
+ -0.22376155853271484,
+ 0.0653710663318634,
+ -0.1367003321647644,
+ -0.25611549615859985,
+ -0.12733346223831177,
+ -0.23882563412189484,
+ -0.09768087416887283,
+ 0.017104148864746094,
+ 0.29118382930755615,
+ 0.24312257766723633,
+ -0.10820799320936203,
+ 0.19568407535552979,
+ -0.062046557664871216,
+ 0.1134796142578125,
+ -0.16868862509727478,
+ 0.2000775933265686,
+ -0.08999496698379517,
+ 0.20829454064369202,
+ 0.006932750344276428,
+ -0.07732098549604416,
+ 0.10158534348011017,
+ 0.2970876097679138,
+ -0.18470291793346405,
+ 0.12193325161933899,
+ 0.2533313035964966,
+ -0.09393185377120972,
+ 0.045739248394966125,
+ -0.14076268672943115,
+ 0.07617992162704468,
+ -0.029894188046455383,
+ 0.2695915102958679,
+ -0.04835718870162964,
+ -0.2676219344139099,
+ 0.0347500815987587,
+ 0.2190760374069214,
+ 0.14575254917144775,
+ 0.03555077314376831,
+ -0.18854132294654846,
+ -0.19518500566482544,
+ 0.07065150141716003,
+ -0.10284006595611572,
+ -0.0877382680773735,
+ 0.22840160131454468,
+ -0.12556177377700806,
+ 0.2464810311794281,
+ -0.00804370641708374,
+ 0.271788090467453,
+ 0.2622983157634735,
+ -0.06172429397702217,
+ -0.15920954942703247,
+ 0.13623011112213135,
+ 0.25512516498565674,
+ -0.1779070794582367,
+ 0.08594009280204773,
+ -0.15078476071357727,
+ -0.14378687739372253,
+ -0.20989441871643066,
+ -0.1510949432849884,
+ -0.26879990100860596,
+ 0.0783379077911377,
+ -0.19024136662483215,
+ 0.23055121302604675,
+ 0.05430036783218384,
+ -0.07780355215072632,
+ -0.18797901272773743,
+ -0.09510058164596558,
+ -0.08440053462982178,
+ 0.18002863228321075,
+ 0.24518807232379913,
+ 0.28829121589660645,
+ 0.06453216075897217,
+ 0.281106173992157,
+ -0.016205132007598877,
+ -0.2713698148727417,
+ -0.0019423365592956543,
+ -0.26763269305229187,
+ -0.27159634232521057,
+ -0.028292715549468994,
+ 0.17052632570266724,
+ 0.23832198977470398,
+ -0.090626060962677,
+ -0.08205637335777283,
+ -0.02286870777606964,
+ -0.1197863221168518,
+ -0.09915122389793396,
+ -0.2592451870441437,
+ 0.04110157489776611,
+ -0.2201535999774933,
+ 0.12076377123594284,
+ 0.17993956804275513,
+ 0.19725677371025085,
+ -0.2728385925292969,
+ 0.04862682521343231,
+ -0.12418395280838013,
+ 0.2782908082008362,
+ 0.09506601095199585,
+ 0.29537177085876465,
+ 0.27855825424194336,
+ 0.018131673336029053,
+ 0.21133790910243988,
+ -0.02665799856185913,
+ -0.1156068742275238,
+ 0.01894727349281311,
+ -0.24875199794769287,
+ 0.24961039423942566,
+ 0.24325984716415405,
+ 0.19477373361587524,
+ -0.0932324230670929,
+ -0.10730546712875366,
+ 0.2403600513935089,
+ 0.08463042974472046,
+ -0.2736583352088928,
+ -0.12160741537809372,
+ -0.07431033253669739,
+ 0.29961854219436646,
+ -0.15879392623901367,
+ -0.16309118270874023,
+ -0.1712881326675415,
+ 0.14047640562057495,
+ -0.14217445254325867,
+ -0.03651118278503418,
+ -0.029804736375808716,
+ -0.16813255846500397,
+ 0.08538499474525452,
+ 0.18685126304626465,
+ -0.18043075501918793,
+ 0.27530765533447266,
+ 0.058621764183044434,
+ -0.13955390453338623,
+ 0.04692898690700531,
+ -0.06008438020944595,
+ -0.25328516960144043,
+ 0.18110579252243042,
+ 0.2734050750732422,
+ -0.2329551875591278,
+ 0.022120222449302673,
+ 0.2732037901878357,
+ -0.07596075534820557,
+ -0.23998196423053741,
+ 0.11221900582313538,
+ -0.12114948034286499,
+ -0.08047080039978027,
+ -0.011237680912017822,
+ 0.19375914335250854,
+ -0.12631332874298096,
+ -0.1769590675830841,
+ 0.08233681321144104,
+ -0.1960960477590561,
+ 0.05184870958328247,
+ -0.14284878969192505,
+ 0.21024078130722046,
+ 0.11847150325775146,
+ 0.20742398500442505,
+ -0.017285913228988647,
+ 0.07599234580993652,
+ 0.09005331993103027,
+ 0.10541395843029022,
+ -0.2518586814403534,
+ 0.04490804672241211,
+ -0.12646210193634033,
+ -0.19122040271759033,
+ 0.05109348148107529,
+ 0.05730739235877991,
+ -0.041450172662734985,
+ -0.16870105266571045,
+ -0.283040851354599,
+ -0.06892134994268417,
+ -0.16968166828155518,
+ -0.139948308467865,
+ -0.1863902509212494,
+ -0.09760025888681412,
+ -0.26193922758102417,
+ -0.13316041231155396,
+ -0.030292868614196777,
+ -0.14523911476135254,
+ -0.042965494096279144,
+ -0.03645394742488861,
+ 0.2784140706062317,
+ -0.006508588790893555,
+ 0.12710286676883698,
+ -0.06211504340171814,
+ -0.1433526575565338,
+ 0.17060276865959167,
+ -0.2670995593070984,
+ 0.20538035035133362,
+ -0.2323085069656372,
+ 0.1749047040939331,
+ -0.1977613866329193,
+ -0.08994466066360474,
+ 0.2299070656299591,
+ 0.22526592016220093,
+ -0.10447625815868378,
+ -0.21436890959739685,
+ -0.2709394693374634,
+ -0.24236059188842773,
+ 0.21602299809455872,
+ -0.09334269165992737,
+ -0.23910540342330933,
+ -0.0958554744720459,
+ -0.2451002150774002,
+ -0.07663178443908691,
+ -0.08371757715940475,
+ -0.015510231256484985,
+ -0.15834234654903412,
+ 0.045587003231048584,
+ -0.11790835857391357,
+ -0.22513169050216675,
+ -0.11831772327423096,
+ 0.2424355447292328,
+ -0.15043655037879944,
+ -0.09773635864257812,
+ 0.07064634561538696,
+ 0.06312195956707001,
+ -0.08999571204185486,
+ -0.12938934564590454,
+ -0.03489571809768677,
+ 0.24572914838790894,
+ -0.09437069296836853,
+ -0.2487035095691681,
+ 0.24330604076385498,
+ -0.225446879863739,
+ -0.18751072883605957,
+ 0.13293951749801636,
+ 0.04755902290344238,
+ -0.10696257650852203,
+ -0.21592038869857788,
+ 0.07696020603179932,
+ 0.029358863830566406,
+ 0.08821707963943481,
+ 0.024909108877182007,
+ -0.09414483606815338,
+ 0.20651179552078247,
+ 0.10849007964134216,
+ 0.2830226421356201,
+ 0.16368648409843445,
+ 0.2028796672821045,
+ 0.25687021017074585,
+ -0.06155925989151001,
+ 0.1722790002822876,
+ 0.26368841528892517,
+ 0.06847316026687622,
+ 0.12751361727714539,
+ -0.27349787950515747,
+ -0.056512653827667236,
+ -0.099040687084198,
+ -0.03984534740447998,
+ 0.29266148805618286,
+ 0.01214231550693512,
+ 0.1340317726135254,
+ 0.011107265949249268,
+ -0.190983846783638,
+ 0.06409847736358643,
+ -0.2099260687828064,
+ -0.060784853994846344,
+ 0.09637093544006348,
+ 0.04306906461715698,
+ 0.25915229320526123,
+ -0.14338356256484985,
+ -0.1007700264453888,
+ -0.21353501081466675,
+ -0.26961541175842285,
+ -0.23348906636238098,
+ 0.24916213750839233,
+ 0.07187609374523163,
+ -0.004315584897994995,
+ 0.188003808259964,
+ 0.23792597651481628,
+ 0.04157659411430359,
+ 0.201790452003479,
+ 0.11221369355916977,
+ 0.16212958097457886,
+ -0.10692974925041199,
+ -0.06032747030258179,
+ 0.00168595090508461,
+ -0.10797879099845886,
+ 0.03366054594516754,
+ 0.0866624116897583,
+ -0.07100837677717209,
+ -0.053911566734313965,
+ -0.009811162948608398,
+ -0.08224374055862427,
+ -0.2931835651397705,
+ 0.05117614567279816,
+ 0.08096432685852051,
+ 0.2844654321670532,
+ -0.1267021894454956,
+ 0.25392547249794006,
+ 0.18062597513198853,
+ -0.04907471314072609,
+ 0.06864815950393677,
+ -0.2665480077266693,
+ 0.07234430313110352,
+ 0.25752124190330505,
+ 0.09531605243682861,
+ 0.01691371202468872,
+ 0.025817275047302246,
+ -0.06681478023529053,
+ 0.284621000289917,
+ -0.07053148746490479,
+ 0.27904653549194336,
+ -0.25041019916534424,
+ -0.008299857378005981,
+ -0.1073153018951416,
+ -0.09618419408798218,
+ 0.1321662813425064,
+ -0.18933284282684326,
+ -0.26682063937187195,
+ 0.029104217886924744,
+ 0.07610303163528442,
+ -0.08966803550720215,
+ 0.011656641960144043,
+ 0.2763245701789856,
+ -0.11190542578697205,
+ -0.0554305799305439,
+ 0.08099199831485748,
+ 0.023880600929260254,
+ 0.08633425831794739,
+ -0.15922367572784424,
+ 0.030281532555818558,
+ 0.04056501388549805,
+ -0.2718201279640198,
+ -0.1481095552444458,
+ -0.008406184613704681,
+ 0.1456897109746933,
+ -0.06073828786611557,
+ 0.24280911684036255,
+ -0.09963643550872803,
+ 0.2765955924987793,
+ -0.028628915548324585,
+ 0.26008257269859314,
+ -0.09673994779586792,
+ -0.1477336287498474,
+ 0.06388123333454132,
+ 0.1925971657037735,
+ -0.24044150114059448,
+ -0.13262750208377838,
+ 0.05577284097671509,
+ 0.148769348859787,
+ -0.12449190020561218,
+ -0.27014073729515076,
+ -0.23804134130477905,
+ -0.12651175260543823,
+ -0.09409762918949127,
+ -0.12290700525045395,
+ 0.27282267808914185,
+ -0.24029096961021423,
+ -0.1849743127822876,
+ 0.06491325795650482,
+ -0.019978046417236328,
+ 0.08541390299797058,
+ -0.10093367099761963,
+ -0.08075618743896484,
+ 0.030741333961486816,
+ -0.09377765655517578,
+ 0.1509588062763214,
+ -0.08559945225715637,
+ 0.11492818593978882,
+ -0.07421883940696716,
+ -0.09243524074554443,
+ 0.1088833212852478,
+ 0.21872538328170776,
+ 0.030065715312957764,
+ -0.2477858066558838,
+ -0.06513243913650513,
+ 0.26898735761642456,
+ -0.0936272144317627,
+ -0.10753904283046722,
+ 0.2663968801498413,
+ 0.18468841910362244,
+ 0.19052237272262573,
+ 0.29080015420913696,
+ 0.28827908635139465,
+ -0.14974665641784668,
+ -0.14380687475204468,
+ -0.0666949599981308,
+ -0.09715777635574341,
+ -0.061824291944503784,
+ -0.0687842071056366,
+ 0.0116872638463974,
+ -0.047651439905166626,
+ 0.20276746153831482,
+ -0.13321071863174438,
+ -0.03828686475753784,
+ -0.15418940782546997,
+ -0.05872326344251633,
+ 0.20650580525398254,
+ -0.26389217376708984,
+ -0.25848180055618286,
+ -0.03602880239486694,
+ 0.07362592220306396,
+ -0.16530030965805054,
+ 0.26611918210983276,
+ -0.09602737426757812,
+ 0.1730896532535553,
+ 0.041543930768966675,
+ -0.09623169898986816,
+ 0.12029993534088135,
+ 0.22288185358047485,
+ 0.22169142961502075,
+ 0.260140061378479,
+ -0.25528156757354736,
+ 0.2573469877243042,
+ 0.03296530246734619,
+ -0.09320622682571411,
+ -0.09979701042175293,
+ -0.1029742956161499,
+ 0.11526933312416077,
+ -0.10416203737258911,
+ -0.12058475613594055,
+ -0.052966222167015076,
+ 0.23245525360107422,
+ -0.06409502029418945,
+ 0.21878910064697266,
+ 0.27328550815582275,
+ 0.051387012004852295,
+ 0.002222776412963867,
+ -0.03921480476856232,
+ 0.21465003490447998,
+ 0.057007014751434326,
+ -0.12036734819412231,
+ -0.235399067401886,
+ 0.0770520567893982,
+ -0.026373445987701416,
+ 0.1151883602142334,
+ -0.20072609186172485,
+ 0.27458035945892334,
+ -0.00718139111995697,
+ 0.02833026647567749,
+ -0.21184659004211426,
+ 0.18753582239151,
+ 0.2233039140701294,
+ -0.10211652517318726,
+ 0.06266510486602783,
+ -0.02327120304107666,
+ 0.01662379503250122,
+ 0.19724541902542114,
+ -0.055579185485839844,
+ 0.24913880228996277,
+ -0.0073555707931518555,
+ -0.2606964111328125,
+ -0.24512863159179688,
+ 0.14366194605827332,
+ 0.06993842124938965,
+ 0.24454493820667267,
+ 0.11469435691833496,
+ 0.09321949630975723,
+ 0.2370009422302246,
+ 0.2533628046512604,
+ 0.2571412920951843,
+ -0.25356021523475647,
+ -0.03627514839172363,
+ -0.15146896243095398,
+ 0.003791511058807373,
+ 0.24922651052474976,
+ -0.1932336688041687,
+ 0.26868152618408203,
+ -0.11980981379747391,
+ -0.09624086320400238,
+ 0.08595740795135498,
+ -0.2618032693862915,
+ -0.2706660032272339,
+ -0.1702522188425064,
+ 0.04122734069824219,
+ 0.09790229797363281,
+ 0.1974189281463623,
+ 0.08848714828491211,
+ -0.2698994278907776,
+ 0.06535756587982178,
+ 0.029128599911928177,
+ -0.043812572956085205,
+ -0.1505439281463623,
+ -0.08711069822311401,
+ -0.00017938017845153809,
+ 0.053713180124759674,
+ -0.20428365468978882,
+ -0.23922044038772583,
+ -0.12539047002792358,
+ 0.1287466585636139,
+ -0.04076831787824631,
+ -0.06064969301223755,
+ 0.05727687478065491,
+ 0.08417336642742157,
+ -0.022924840450286865,
+ -0.17517268657684326,
+ 0.1248994767665863,
+ -0.05781775712966919,
+ -0.12945523858070374,
+ -0.08756047487258911,
+ -0.11395472288131714,
+ -0.20757141709327698,
+ 0.01646856777369976,
+ 0.10125648975372314,
+ 0.02863316237926483,
+ -0.2295207977294922,
+ 0.26544779539108276,
+ -0.008903056383132935,
+ -0.22840148210525513,
+ 0.040285274386405945,
+ -0.111175537109375,
+ -0.26597684621810913,
+ 0.07171115279197693,
+ 0.2724292278289795,
+ 0.16313670575618744,
+ 0.2677688002586365,
+ -0.03637579083442688,
+ 0.1895536184310913,
+ -0.15254509449005127,
+ 0.2753453850746155,
+ -0.008865952491760254,
+ -0.20560872554779053,
+ 0.26464760303497314,
+ -0.2686978578567505,
+ 0.15822911262512207,
+ 0.09348633885383606,
+ 0.2671995162963867,
+ -0.002424962818622589,
+ -0.15229463577270508,
+ 0.0000909566879272461,
+ 0.181875541806221,
+ -0.011009372770786285,
+ -0.10723277926445007,
+ -0.05792659521102905,
+ -0.03966212272644043,
+ -0.26488250494003296,
+ -0.040859490633010864,
+ -0.05333850532770157,
+ -0.25927993655204773,
+ -0.0830385684967041,
+ 0.17093509435653687,
+ -0.17166492342948914,
+ -0.06602880358695984,
+ 0.1691279411315918,
+ -0.006511867046356201,
+ 0.25111040472984314,
+ -0.14567163586616516,
+ 0.22210299968719482,
+ 0.18916088342666626,
+ -0.1281854510307312,
+ 0.03291642665863037,
+ -0.004400983452796936,
+ 0.06594592332839966,
+ -0.2545931339263916,
+ 0.1160927414894104,
+ -0.07414722442626953,
+ 0.2508406937122345,
+ 0.020036756992340088,
+ -0.2476385533809662,
+ -0.09797245264053345,
+ 0.1784401834011078,
+ 0.12355175614356995,
+ -0.11629626154899597,
+ -0.06297135353088379,
+ -0.13510751724243164,
+ -0.16418108344078064,
+ -0.23876243829727173,
+ -0.04487597942352295,
+ 0.07114356756210327,
+ 0.048324018716812134,
+ 0.11468324065208435,
+ 0.09358277916908264,
+ 0.25818443298339844,
+ 0.2884402275085449,
+ -0.018404003232717514,
+ -0.283788800239563,
+ -0.0035147666931152344,
+ 0.11059972643852234,
+ 0.07484817504882812,
+ 0.05107027292251587,
+ 0.2737794518470764,
+ 0.24643215537071228,
+ -0.03008556365966797,
+ 0.2883175313472748,
+ -0.11719223856925964,
+ -0.07631322741508484,
+ 0.15150469541549683,
+ -0.10925546288490295,
+ -0.1276174932718277,
+ 0.19313377141952515,
+ 0.23446673154830933,
+ -0.19176223874092102,
+ -0.219282865524292,
+ 0.2042291760444641,
+ 0.283805251121521,
+ 0.07175882905721664,
+ 0.10422645509243011,
+ -0.004864297807216644,
+ -0.09434260427951813,
+ -0.11868301033973694,
+ 0.002053380012512207,
+ -0.0451505184173584,
+ -0.09737047553062439,
+ 0.2211260199546814,
+ -0.13380831480026245,
+ -0.09562551975250244,
+ -0.24220961332321167,
+ -0.11456888914108276,
+ 0.25977957248687744,
+ 0.05640096962451935,
+ -0.09917855262756348,
+ -0.07127273827791214,
+ -0.024623490869998932,
+ 0.1425890326499939,
+ 0.046645164489746094,
+ 0.008478224277496338,
+ -0.12743788957595825,
+ 0.06723886728286743,
+ -0.04736530780792236,
+ 0.0547175407409668,
+ 0.03747819364070892,
+ -0.11664307117462158,
+ 0.22437784075737,
+ -0.020547330379486084,
+ 0.09378905594348907,
+ -0.2607482075691223,
+ 0.1997917890548706,
+ -0.17305108904838562,
+ 0.27021461725234985,
+ 0.06493496894836426,
+ 0.06491219997406006,
+ -0.268713116645813,
+ -0.08794604241847992,
+ -0.27899035811424255,
+ 0.24478071928024292,
+ 0.2739081382751465,
+ -0.2473246455192566,
+ 0.2572503685951233,
+ -0.2358747273683548,
+ 0.26471146941185,
+ 0.2804241180419922,
+ 0.06397861242294312,
+ -0.08498620986938477,
+ 0.08018496632575989,
+ -0.11502480506896973,
+ 0.0721905305981636,
+ -0.07528704404830933,
+ -0.26406943798065186,
+ -0.0920303463935852,
+ -0.28282785415649414,
+ -0.19424033164978027,
+ 0.04630352556705475,
+ 0.13531535863876343,
+ -0.11707533150911331,
+ -0.06918735057115555,
+ -0.14536502957344055,
+ 0.2746792435646057,
+ 0.0949384868144989,
+ 0.04037892818450928,
+ -0.13752079010009766,
+ 0.08183571696281433,
+ 0.08464482426643372,
+ 0.21942882239818573,
+ -0.20471107959747314,
+ 0.17733478546142578,
+ 0.20397399365901947,
+ 0.0873727798461914,
+ -0.17950654029846191,
+ 0.08163607120513916,
+ 0.10824775695800781,
+ -0.09920138120651245,
+ 0.2653205394744873,
+ 0.14902156591415405,
+ -0.2118712067604065,
+ 0.24827519059181213,
+ 0.029164135456085205,
+ 0.20902995765209198,
+ 0.07110917568206787,
+ 0.2398897260427475,
+ 0.0038216710090637207,
+ -0.08843976259231567,
+ 0.23804669082164764,
+ -0.14437538385391235,
+ -0.09554123878479004,
+ -0.22042137384414673,
+ -0.21318137645721436,
+ -0.05120239779353142,
+ 0.14881843328475952,
+ -0.02254563570022583,
+ 0.19484862685203552,
+ 0.24287426471710205,
+ 0.28308314085006714,
+ -0.09400676190853119,
+ 0.07370838522911072,
+ 0.09321483969688416,
+ 0.23124337196350098,
+ -0.2441580891609192,
+ -0.0005137622356414795,
+ 0.05961146950721741,
+ -0.027181029319763184,
+ -0.18527141213417053,
+ -0.14376115798950195,
+ 0.10378599166870117,
+ -0.08766448497772217,
+ -0.2745181918144226,
+ -0.14330735802650452,
+ -0.18030722439289093,
+ 0.09717589616775513,
+ 0.14987272024154663,
+ -0.25258147716522217,
+ -0.19911912083625793,
+ 0.20845776796340942,
+ -0.031118720769882202,
+ 0.2647498846054077,
+ -0.06388264894485474,
+ 0.24527591466903687,
+ 0.020505035296082497,
+ -0.1373494267463684,
+ -0.26426786184310913,
+ 0.1867283582687378,
+ -0.20311757922172546,
+ -0.06785938143730164,
+ 0.2977275848388672,
+ -0.13828766345977783,
+ -0.06860634684562683,
+ 0.19401775300502777,
+ 0.16999629139900208,
+ -0.09806174039840698,
+ -0.028122752904891968,
+ 0.061929166316986084,
+ -0.17184503376483917,
+ -0.27506065368652344,
+ -0.21760782599449158,
+ 0.07278946042060852,
+ 0.07171046733856201,
+ 0.2068656086921692,
+ -0.23829492926597595,
+ -0.22416901588439941,
+ 0.2223730832338333,
+ -0.10008993744850159,
+ -0.25728797912597656,
+ 0.2765228748321533,
+ 0.019414670765399933,
+ -0.10493330657482147,
+ 0.15754002332687378,
+ -0.08283865451812744,
+ -0.03226293623447418,
+ 0.28368550539016724,
+ 0.1337532252073288,
+ -0.22836364805698395,
+ 0.25197023153305054,
+ 0.273570716381073,
+ 0.04440349340438843,
+ 0.23065048456192017,
+ 0.24819087982177734,
+ 0.010882854461669922,
+ 0.0638245940208435,
+ -0.0057236552238464355,
+ -0.0751156210899353,
+ -0.09742039442062378,
+ 0.15372875332832336,
+ -0.26171308755874634,
+ 0.055043190717697144,
+ 0.050106823444366455,
+ 0.07045108079910278,
+ 0.10779277980327606,
+ -0.22204479575157166,
+ -0.22123576700687408,
+ 0.008196413516998291,
+ -0.21553194522857666,
+ 0.29994213581085205,
+ -0.1839955449104309,
+ 0.24278312921524048,
+ 0.027478933334350586,
+ -0.25846537947654724,
+ 0.28957632184028625,
+ -0.16533246636390686,
+ 0.01538395881652832,
+ 0.08031105995178223,
+ 0.2839284837245941,
+ 0.05249440670013428,
+ 0.08177125453948975,
+ 0.053593218326568604,
+ -0.21286320686340332,
+ -0.13742640614509583,
+ 0.28337764739990234,
+ -0.22949400544166565,
+ -0.17420713603496552,
+ 0.13280248641967773,
+ -0.2564193308353424,
+ 0.25371742248535156,
+ 0.0883898138999939,
+ -0.16181597113609314,
+ -0.06647396087646484,
+ -0.05097582936286926,
+ 0.0807725191116333,
+ -0.28963854908943176,
+ 0.1675982028245926,
+ 0.21429088711738586,
+ -0.15577933192253113,
+ 0.1383572220802307,
+ 0.09737801551818848,
+ 0.022236347198486328,
+ -0.05826455354690552,
+ 0.2535576820373535,
+ 0.14373892545700073,
+ -0.22118766605854034,
+ 0.28439682722091675,
+ -0.04194854199886322,
+ 0.03358471393585205,
+ -0.1183057427406311,
+ -0.22394874691963196,
+ 0.0793546736240387,
+ 0.03349858522415161,
+ -0.07098248600959778,
+ 0.0007999837398529053,
+ 0.02376823127269745,
+ -0.029087930917739868,
+ 0.29629653692245483,
+ 0.13534009456634521,
+ 0.2247503101825714,
+ -0.2565639019012451,
+ -0.24447745084762573,
+ 0.07377684116363525,
+ -0.04489962011575699,
+ -0.1791902482509613,
+ -0.04154530167579651,
+ 0.19219249486923218,
+ -0.270110547542572,
+ 0.2877039909362793,
+ 0.2561131715774536,
+ -0.15452450513839722,
+ 0.05690106749534607,
+ -0.24622249603271484,
+ 0.030305325984954834,
+ 0.24275054037570953,
+ 0.009160399436950684,
+ 0.16942694783210754,
+ 0.15307360887527466,
+ 0.22704333066940308,
+ 0.27466779947280884,
+ -0.08822420239448547,
+ 0.23109784722328186,
+ -0.034959495067596436,
+ 0.21810072660446167,
+ -0.0032685697078704834,
+ -0.08981973677873611,
+ 0.054911017417907715,
+ 0.20748987793922424,
+ 0.23232871294021606,
+ 0.06699493527412415,
+ 0.00804474949836731,
+ 0.19890308380126953,
+ -0.09716558456420898,
+ 0.12724727392196655,
+ -0.017750203609466553,
+ -0.2697616219520569,
+ 0.09315800666809082,
+ 0.2424490749835968,
+ -0.029060781002044678,
+ -0.048948392271995544,
+ 0.22875148057937622,
+ 0.2610582113265991,
+ 0.00506281852722168,
+ -0.07444307208061218,
+ -0.1501627266407013,
+ -0.28978025913238525,
+ 0.2266649603843689,
+ -0.13268721103668213,
+ -0.10851249098777771,
+ -0.23672142624855042,
+ -0.07148842513561249,
+ 0.19726237654685974,
+ -0.045436128973960876,
+ 0.19259455800056458,
+ 0.23622801899909973,
+ -0.005665391683578491,
+ -0.1888737678527832,
+ 0.053075626492500305,
+ 0.23264488577842712,
+ 0.02680748701095581,
+ 0.2246047854423523,
+ 0.01536327600479126,
+ -0.1775413155555725,
+ 0.17062193155288696,
+ -0.18639028072357178,
+ 0.2640243172645569,
+ 0.13262233138084412,
+ -0.22510065138339996,
+ 0.20740659534931183,
+ -0.04281333088874817,
+ -0.24288374185562134,
+ -0.10271596908569336,
+ -0.27193978428840637,
+ 0.05329570174217224,
+ 0.26003798842430115,
+ -0.25971877574920654,
+ -0.1821538805961609,
+ 0.16409695148468018,
+ 0.1988597810268402,
+ -0.13624019920825958,
+ 0.03674156963825226,
+ -0.12387186288833618,
+ 0.1236114501953125,
+ -0.2522485852241516,
+ 0.007663547992706299,
+ 0.17937958240509033,
+ 0.09136110544204712,
+ 0.27517473697662354,
+ -0.19465425610542297,
+ 0.08787447214126587,
+ 0.16657328605651855,
+ -0.09133702516555786,
+ 0.2650860548019409,
+ 0.20348723232746124,
+ 0.21096046268939972,
+ 0.016784250736236572,
+ -0.08657258749008179,
+ -0.24551069736480713,
+ 0.0523228645324707,
+ -0.24450120329856873,
+ -0.1484065055847168,
+ 0.12061650305986404,
+ 0.00808650255203247,
+ 0.015529915690422058,
+ 0.20186692476272583,
+ 0.2730425000190735,
+ 0.06678158044815063,
+ -0.0695040225982666,
+ 0.049950361251831055,
+ -0.22306418418884277,
+ 0.0615105926990509,
+ 0.003848731517791748,
+ 0.2836388051509857,
+ -0.04335784912109375,
+ -0.10194975137710571,
+ -0.2069920301437378,
+ 0.1879901885986328,
+ 0.18005849421024323,
+ 0.29710114002227783,
+ 0.09085728228092194,
+ -0.19246512651443481,
+ 0.11610956490039825,
+ 0.09856098890304565,
+ -0.22307884693145752,
+ 0.0853913426399231,
+ 0.06782566010951996,
+ -0.0639464259147644,
+ -0.043975770473480225,
+ 0.28159934282302856,
+ 0.2223082333803177,
+ -0.0013878345489501953,
+ -0.2857876718044281,
+ -0.26279836893081665,
+ -0.13310158252716064,
+ 0.05402415990829468,
+ 0.15967360138893127,
+ 0.28429436683654785,
+ -0.07187962532043457,
+ 0.0010132789611816406,
+ 0.1106928288936615,
+ -0.25465357303619385,
+ -0.07475914061069489,
+ 0.17716588079929352,
+ 0.03263060003519058,
+ 0.05695199966430664,
+ -0.13627707958221436,
+ 0.11592306196689606
+ ],
+ "z": [
+ -0.4235476553440094,
+ 0.4445751905441284,
+ -0.1710118055343628,
+ 0.16094975173473358,
+ -0.04128082096576691,
+ 0.02504068613052368,
+ -0.4945513606071472,
+ 0.35278668999671936,
+ 0.21079924702644348,
+ 0.30372533202171326,
+ -0.3316706418991089,
+ -0.20241189002990723,
+ -0.06366926431655884,
+ -0.3262461721897125,
+ 0.3221898078918457,
+ 0.00958096981048584,
+ 0.004574891179800034,
+ 0.24014070630073547,
+ -0.2685980796813965,
+ -0.22854027152061462,
+ 0.45852982997894287,
+ -0.07425737380981445,
+ -0.3314507007598877,
+ 0.1749950647354126,
+ -0.30672571063041687,
+ -0.5233712196350098,
+ 0.12497454881668091,
+ 0.33205050230026245,
+ -0.25882256031036377,
+ 0.009945660829544067,
+ -0.33604031801223755,
+ -0.04000389575958252,
+ -0.4807865619659424,
+ 0.41143879294395447,
+ 0.44935935735702515,
+ 0.12117037177085876,
+ 0.13541395962238312,
+ 0.2811846137046814,
+ -0.16637647151947021,
+ -0.13537979125976562,
+ 0.17383988201618195,
+ -0.336136132478714,
+ -0.3396320939064026,
+ -0.08625301718711853,
+ 0.3532467484474182,
+ 0.051105350255966187,
+ 0.020751118659973145,
+ 0.1479853391647339,
+ -0.16554445028305054,
+ 0.06809011846780777,
+ 0.2658795714378357,
+ -0.14242222905158997,
+ 0.2610812783241272,
+ -0.33791860938072205,
+ 0.14884299039840698,
+ -0.19184906780719757,
+ 0.3743654489517212,
+ 0.37635689973831177,
+ -0.07986085116863251,
+ -0.21907176077365875,
+ -0.33785030245780945,
+ -0.3318822979927063,
+ -0.33566877245903015,
+ 0.2770134508609772,
+ -0.11400678753852844,
+ -0.1839015781879425,
+ -0.33108124136924744,
+ 0.41287094354629517,
+ 0.02941516414284706,
+ -0.4416483938694,
+ 0.1228344589471817,
+ -0.4852340817451477,
+ -0.09471765160560608,
+ 0.46281111240386963,
+ 0.01629173755645752,
+ -0.010444819927215576,
+ 0.42991119623184204,
+ 0.1425437331199646,
+ 0.15799903869628906,
+ 0.13386662304401398,
+ -0.230615496635437,
+ 0.14822371304035187,
+ 0.24907419085502625,
+ -0.2661305069923401,
+ 0.017361581325531006,
+ 0.2695760130882263,
+ -0.2298843264579773,
+ -0.16615690290927887,
+ 0.06152951717376709,
+ -0.11283901333808899,
+ 0.37257951498031616,
+ -0.09462043642997742,
+ 0.4218474328517914,
+ -0.42022499442100525,
+ -0.3347332775592804,
+ 0.12960200011730194,
+ -0.17029905319213867,
+ -0.10588446259498596,
+ 0.029435962438583374,
+ -0.3278842568397522,
+ 0.2334248125553131,
+ -0.33682334423065186,
+ 0.09862726926803589,
+ -0.4961674213409424,
+ 0.08341613411903381,
+ -0.42451000213623047,
+ -0.04186004400253296,
+ -0.1712804138660431,
+ 0.4467617869377136,
+ -0.3981694281101227,
+ 0.26575610041618347,
+ 0.36690860986709595,
+ 0.2206297218799591,
+ 0.08007949590682983,
+ -0.3333212435245514,
+ -0.16988515853881836,
+ -0.3326249122619629,
+ 0.3384982943534851,
+ -0.3327004909515381,
+ -0.24113544821739197,
+ 0.164147287607193,
+ -0.39123958349227905,
+ -0.09504857659339905,
+ -0.06796696782112122,
+ -0.010762572288513184,
+ 0.4646526277065277,
+ 0.36345618963241577,
+ 0.18657103180885315,
+ 0.40965405106544495,
+ -0.10778683423995972,
+ 0.26007264852523804,
+ 0.10992034524679184,
+ -0.2296520471572876,
+ -0.5192928910255432,
+ -0.14101840555667877,
+ 0.24002204835414886,
+ 0.21982824802398682,
+ -0.24791409075260162,
+ 0.39685672521591187,
+ -0.4319692850112915,
+ -0.008818328380584717,
+ 0.029452839866280556,
+ -0.0643005520105362,
+ 0.1746962070465088,
+ 0.05325192213058472,
+ 0.36017489433288574,
+ -0.33369189500808716,
+ -0.003725498914718628,
+ 0.25139322876930237,
+ -0.06611765921115875,
+ 0.07532790303230286,
+ -0.10754141211509705,
+ 0.15968842804431915,
+ 0.42825567722320557,
+ 0.28960293531417847,
+ -0.1779651641845703,
+ 0.0490419864654541,
+ 0.18023335933685303,
+ -0.09840062260627747,
+ -0.3997466266155243,
+ 0.46746134757995605,
+ 0.06362655758857727,
+ -0.41344019770622253,
+ -0.3060237765312195,
+ 0.1600152850151062,
+ 0.026208221912384033,
+ -0.33871859312057495,
+ -0.24974161386489868,
+ -0.3341400623321533,
+ 0.0930251032114029,
+ -0.18139249086380005,
+ -0.20286908745765686,
+ 0.18804419040679932,
+ -0.1820363998413086,
+ -0.3388940691947937,
+ -0.5027631521224976,
+ 0.15467888116836548,
+ -0.24124443531036377,
+ -0.04930421710014343,
+ -0.27536332607269287,
+ 0.3271016478538513,
+ -0.09885571897029877,
+ 0.35826706886291504,
+ -0.34221023321151733,
+ 0.1664334535598755,
+ -0.25202566385269165,
+ -0.09967911243438721,
+ 0.3443351089954376,
+ 0.22678129374980927,
+ -0.17167764902114868,
+ -0.3403781056404114,
+ -0.2978289723396301,
+ 0.15118181705474854,
+ -0.0804247111082077,
+ 0.10768496990203857,
+ 0.2073436975479126,
+ -0.24866998195648193,
+ 0.41462913155555725,
+ 0.08666224777698517,
+ 0.09772396087646484,
+ 0.27042055130004883,
+ 0.1470961570739746,
+ 0.11906170845031738,
+ 0.41553550958633423,
+ 0.2426634579896927,
+ -0.4793228507041931,
+ 0.18978239595890045,
+ -0.052638113498687744,
+ -0.30816930532455444,
+ -0.06905093789100647,
+ -0.1432909369468689,
+ -0.3397812247276306,
+ 0.3397864103317261,
+ -0.17553576827049255,
+ 0.42924243211746216,
+ -0.3363695442676544,
+ 0.20415303111076355,
+ 0.31495729088783264,
+ -0.37144386768341064,
+ -0.4250960946083069,
+ -0.4094758629798889,
+ -0.4126015305519104,
+ -0.5099776983261108,
+ -0.08203524351119995,
+ -0.17998147010803223,
+ 0.35707932710647583,
+ 0.45511677861213684,
+ 0.005626201629638672,
+ -0.08648455142974854,
+ -0.49439316987991333,
+ 0.1960749626159668,
+ 0.38225057721138,
+ -0.2663222551345825,
+ 0.057756900787353516,
+ -0.012061715126037598,
+ 0.4119829833507538,
+ 0.011573374271392822,
+ -0.16329425573349,
+ -0.03991907089948654,
+ -0.24592560529708862,
+ 0.15365594625473022,
+ 0.20193952322006226,
+ -0.22555312514305115,
+ -0.33488619327545166,
+ 0.20579659938812256,
+ -0.14034442603588104,
+ -0.1043882966041565,
+ -0.33978909254074097,
+ -0.3298324942588806,
+ -0.28197962045669556,
+ -0.028097398579120636,
+ 0.39394259452819824,
+ -0.27891573309898376,
+ -0.31944215297698975,
+ 0.08566038310527802,
+ -0.33060312271118164,
+ 0.39232438802719116,
+ -0.4580581486225128,
+ 0.3272748589515686,
+ -0.06794089078903198,
+ -0.3343968391418457,
+ 0.2697685956954956,
+ 0.12545175850391388,
+ -0.1516050100326538,
+ 0.03581438958644867,
+ -0.18246132135391235,
+ 0.05861067771911621,
+ 0.4387608468532562,
+ 0.17215503752231598,
+ 0.29513782262802124,
+ -0.3336086869239807,
+ 0.32902371883392334,
+ 0.2880065143108368,
+ -0.3382068872451782,
+ 0.16064929962158203,
+ 0.2761398255825043,
+ 0.1716667115688324,
+ -0.12662434577941895,
+ -0.03916245698928833,
+ -0.18825985491275787,
+ 0.029372096061706543,
+ -0.11921937763690948,
+ 0.08291417360305786,
+ 0.05088921636343002,
+ 0.08368873596191406,
+ 0.2639138698577881,
+ 0.10032105445861816,
+ 0.00039565563201904297,
+ 0.21759837865829468,
+ 0.46051350235939026,
+ -0.39803361892700195,
+ 0.0762549638748169,
+ -0.0010767579078674316,
+ 0.4129416346549988,
+ -0.32440322637557983,
+ -0.16160503029823303,
+ -0.24881193041801453,
+ -0.3387610912322998,
+ 0.2228134721517563,
+ 0.22936689853668213,
+ 0.07317830622196198,
+ -0.03431671857833862,
+ -0.10558515787124634,
+ 0.1616268754005432,
+ -0.5259305238723755,
+ -0.4041265845298767,
+ -0.3272579312324524,
+ -0.1298278570175171,
+ -0.4616641104221344,
+ 0.15617692470550537,
+ 0.31794029474258423,
+ -0.1340768039226532,
+ 0.1958722174167633,
+ -0.3392767310142517,
+ 0.0810525119304657,
+ 0.4523273706436157,
+ 0.113883376121521,
+ 0.18164271116256714,
+ 0.08833430707454681,
+ -0.0022533386945724487,
+ 0.07813906669616699,
+ 0.15581673383712769,
+ -0.3789316415786743,
+ -0.025725066661834717,
+ -0.10658919811248779,
+ 0.38354337215423584,
+ 0.4490659832954407,
+ -0.137833833694458,
+ -0.03936523199081421,
+ 0.30698591470718384,
+ -0.08221007883548737,
+ 0.2988932132720947,
+ 0.13455325365066528,
+ 0.1271306276321411,
+ -0.10141271352767944,
+ -0.3093537986278534,
+ -0.4475187063217163,
+ -0.3434193730354309,
+ -0.4733045995235443,
+ 0.052738040685653687,
+ 0.23031087219715118,
+ 0.2268255352973938,
+ 0.42000240087509155,
+ 0.0175640732049942,
+ -0.3339301347732544,
+ -0.33848536014556885,
+ -0.31726792454719543,
+ -0.02172684669494629,
+ -0.2434144914150238,
+ 0.11748981475830078,
+ -0.2227455973625183,
+ 0.1617969572544098,
+ -0.16171443462371826,
+ 0.40916427969932556,
+ 0.4526832103729248,
+ 0.17081335186958313,
+ 0.46904367208480835,
+ -0.27159062027931213,
+ -0.04569143056869507,
+ -0.3374404311180115,
+ -0.05536150187253952,
+ -0.11657209694385529,
+ -0.511705756187439,
+ 0.3676767945289612,
+ -0.1822831630706787,
+ 0.3719814419746399,
+ 0.1393943727016449,
+ 0.21316838264465332,
+ 0.30182039737701416,
+ -0.0875130295753479,
+ 0.460930734872818,
+ -0.04419669508934021,
+ -0.33141034841537476,
+ -0.2893691062927246,
+ 0.20459002256393433,
+ 0.20218946039676666,
+ 0.34300899505615234,
+ -0.20524084568023682,
+ -0.021052759140729904,
+ -0.012795358896255493,
+ -0.15271788835525513,
+ -0.13828016817569733,
+ 0.08454257249832153,
+ 0.3225623071193695,
+ 0.45948338508605957,
+ 0.4095422923564911,
+ 0.3809257745742798,
+ -0.3333471417427063,
+ 0.16038697957992554,
+ -0.29851070046424866,
+ 0.44530245661735535,
+ -0.3028148412704468,
+ 0.29401591420173645,
+ -0.3793749511241913,
+ -0.11084610223770142,
+ 0.0018076002597808838,
+ 0.3287813663482666,
+ -0.16091641783714294,
+ 0.08045985549688339,
+ -0.32734811305999756,
+ -0.2853696942329407,
+ 0.14319002628326416,
+ -0.029106438159942627,
+ -0.48222285509109497,
+ -0.12974587082862854,
+ -0.49661505222320557,
+ -0.1686539202928543,
+ 0.08623873442411423,
+ 0.1874857246875763,
+ 0.22771432995796204,
+ -0.1687980443239212,
+ 0.18423256278038025,
+ 0.05955827236175537,
+ 0.28545016050338745,
+ 0.08993759006261826,
+ -0.37224993109703064,
+ -0.3361018896102905,
+ 0.14884620904922485,
+ -0.19058501720428467,
+ -0.033259421586990356,
+ -0.3365941047668457,
+ -0.5189986228942871,
+ -0.433075487613678,
+ -0.33703017234802246,
+ -0.27579623460769653,
+ -0.14141705632209778,
+ -0.37161895632743835,
+ -0.1472751796245575,
+ 0.008767843246459961,
+ -0.1841689646244049,
+ 0.06531286239624023,
+ 0.4224904775619507,
+ -0.19727575778961182,
+ -0.33563607931137085,
+ 0.09737339615821838,
+ 0.22292457520961761,
+ 0.12199607491493225,
+ 0.011232376098632812,
+ -0.11020421981811523,
+ 0.3971901535987854,
+ -0.3948240578174591,
+ 0.2939457595348358,
+ -0.22742831707000732,
+ -0.008942008018493652,
+ -0.43793225288391113,
+ 0.035844847559928894,
+ 0.0021821558475494385,
+ 0.14138799905776978,
+ -0.3407789468765259,
+ -0.28558874130249023,
+ -0.3113352656364441,
+ -0.456488698720932,
+ -0.18902722001075745,
+ 0.16425596177577972,
+ -0.20258712768554688,
+ 0.24726873636245728,
+ 0.4478321373462677,
+ 0.17141097784042358,
+ -0.4836333692073822,
+ 0.2106449007987976,
+ 0.17353788018226624,
+ -0.23074114322662354,
+ -0.13346189260482788,
+ 0.17044085264205933,
+ -0.24279934167861938,
+ 0.01075957715511322,
+ 0.3706842362880707,
+ -0.22111541032791138,
+ 0.3584878444671631,
+ -0.044658154249191284,
+ 0.3395458161830902,
+ 0.09325909614562988,
+ 0.20848938822746277,
+ -0.12661999464035034,
+ 0.43706202507019043,
+ -0.045279860496520996,
+ -0.2776761054992676,
+ -0.2949169874191284,
+ 0.4398949444293976,
+ -0.3641813397407532,
+ 0.4386964440345764,
+ 0.08294995129108429,
+ 0.2687617242336273,
+ 0.16768574714660645,
+ 0.46810221672058105,
+ -0.3386459946632385,
+ -0.2940635085105896,
+ 0.03653842210769653,
+ -0.214885413646698,
+ -0.2121327966451645,
+ -0.028094932436943054,
+ -0.3366972804069519,
+ 0.3711375296115875,
+ -0.3694230318069458,
+ -0.05460542440414429,
+ -0.07706648111343384,
+ 0.3844480514526367,
+ 0.014472544193267822,
+ 0.10203524678945541,
+ -0.3828306198120117,
+ 0.18718591332435608,
+ -0.3303033113479614,
+ 0.05048041045665741,
+ 0.05714936554431915,
+ 0.04934185743331909,
+ -0.2463662326335907,
+ -0.1896059513092041,
+ -0.030465543270111084,
+ -0.4549020230770111,
+ 0.32831376791000366,
+ 0.34323033690452576,
+ -0.09188991785049438,
+ -0.11971724033355713,
+ 0.46205028891563416,
+ -0.4541677236557007,
+ -0.28210264444351196,
+ 0.22133120894432068,
+ -0.2830278277397156,
+ 0.39341527223587036,
+ -0.03673282265663147,
+ 0.15109360218048096,
+ -0.4113593101501465,
+ -0.03387865424156189,
+ 0.1854182779788971,
+ 0.289590060710907,
+ 0.10648936033248901,
+ 0.20082959532737732,
+ 0.045448292046785355,
+ -0.07656875252723694,
+ -0.08459311723709106,
+ 0.165872722864151,
+ 0.1572442203760147,
+ 0.3062373101711273,
+ 0.26839208602905273,
+ -0.33249837160110474,
+ 0.0321696400642395,
+ -0.16449087858200073,
+ 0.012088000774383545,
+ 0.21569666266441345,
+ 0.38420671224594116,
+ 0.4037143588066101,
+ -0.12567338347434998,
+ -0.31113913655281067,
+ -0.08310729265213013,
+ 0.05121442675590515,
+ -0.2730410099029541,
+ 0.2888662815093994,
+ -0.06444376707077026,
+ -0.10268762707710266,
+ -0.33422914147377014,
+ -0.3380511999130249,
+ -0.1376170516014099,
+ 0.2460750937461853,
+ 0.04581868648529053,
+ 0.28421810269355774,
+ -0.5119390487670898,
+ -0.06259006261825562,
+ 0.1753939390182495,
+ -0.21456021070480347,
+ -0.03099370002746582,
+ -0.16762998700141907,
+ 0.4552982747554779,
+ 0.3761214315891266,
+ -0.1958751082420349,
+ -0.33734577894210815,
+ 0.4024680256843567,
+ -0.13909271359443665,
+ -0.04716436564922333,
+ 0.37922024726867676,
+ -0.1711239218711853,
+ -0.14305375516414642,
+ 0.12663447856903076,
+ -0.33657634258270264,
+ -0.145493745803833,
+ -0.33572396636009216,
+ -0.42111122608184814,
+ -0.4405210018157959,
+ 0.1743755042552948,
+ 0.18623186647891998,
+ -0.06875838339328766,
+ -0.04421083629131317,
+ 0.41596972942352295,
+ -0.3277803659439087,
+ 0.4045494496822357,
+ 0.3237791061401367,
+ -0.07750213146209717,
+ -0.06206011772155762,
+ 0.35886791348457336,
+ -0.2310018539428711,
+ 0.08269003033638,
+ 0.20902395248413086,
+ 0.31523245573043823,
+ -0.34305381774902344,
+ 0.24288935959339142,
+ -0.4716379940509796,
+ -0.37216800451278687,
+ -0.0453115850687027,
+ -0.33464306592941284,
+ 0.004088282585144043,
+ -0.33503299951553345,
+ -0.26421236991882324,
+ -0.34388110041618347,
+ -0.3330096900463104,
+ -0.44460952281951904,
+ 0.21251389384269714,
+ 0.11426840722560883,
+ 0.13424456119537354,
+ 0.1580698937177658,
+ -0.13863450288772583,
+ -0.2078848034143448,
+ 0.35098499059677124,
+ -0.1845325529575348,
+ 0.34093108773231506,
+ -0.09134340286254883,
+ -0.22815042734146118,
+ -0.3389230966567993,
+ 0.02823352813720703,
+ -0.3384050726890564,
+ -0.03771233558654785,
+ 0.10466867685317993,
+ 0.43100398778915405,
+ 0.03570830821990967,
+ -0.3382200002670288,
+ 0.4417252540588379,
+ -0.3419575095176697,
+ -0.5057450532913208,
+ 0.1963983029127121,
+ -0.3346906304359436,
+ 0.015913724899291992,
+ -0.34019461274147034,
+ 0.14107780158519745,
+ 0.22134751081466675,
+ 0.4658423662185669,
+ 0.03463411331176758,
+ -0.10790607333183289,
+ -0.32902416586875916,
+ 0.33017686009407043,
+ -0.48116499185562134,
+ -0.0521126389503479,
+ -0.36427921056747437,
+ 0.09475797414779663,
+ 0.04638795554637909,
+ 0.11961963772773743,
+ 0.10433902591466904,
+ 0.190607488155365,
+ 0.2754964828491211,
+ 0.30007633566856384,
+ -0.4658379852771759,
+ 0.3001740276813507,
+ -0.37076348066329956,
+ -0.5185806751251221,
+ -0.32892632484436035,
+ -0.19134822487831116,
+ -0.11064829677343369,
+ -0.3072715103626251,
+ 0.20965242385864258,
+ 0.30412471294403076,
+ -0.38232165575027466,
+ 0.10811761021614075,
+ 0.31873005628585815,
+ -0.06543512642383575,
+ -0.2179774045944214,
+ 0.3842748999595642,
+ -0.3386983871459961,
+ 0.12577643990516663,
+ -0.13985824584960938,
+ 0.4059543311595917,
+ 0.4270724654197693,
+ 0.22612369060516357,
+ 0.08937618136405945,
+ 0.08394676446914673,
+ -0.2191954255104065,
+ 0.0026788711547851562,
+ -0.5023999214172363,
+ -0.038334667682647705,
+ -0.29564541578292847,
+ -0.5081665515899658,
+ 0.042401038110256195,
+ 0.22864019870758057,
+ 0.2629011869430542,
+ -0.2878539264202118,
+ -0.09327161312103271,
+ -0.18798354268074036,
+ 0.3466252088546753,
+ -0.1562713086605072,
+ -0.18860790133476257,
+ -0.15677344799041748,
+ 0.15589070320129395,
+ 0.25937947630882263,
+ -0.3611982464790344,
+ -0.0693700760602951,
+ 0.3886474370956421,
+ 0.16155138611793518,
+ 0.38207340240478516,
+ -0.117529958486557,
+ -0.09414023160934448,
+ 0.0358501672744751,
+ 0.251201331615448,
+ 0.047598958015441895,
+ 0.4109661877155304,
+ -0.12100040912628174,
+ -0.17790716886520386,
+ -0.2208903431892395,
+ -0.2932576537132263,
+ -0.06846262514591217,
+ 0.1927204728126526,
+ 0.020156770944595337,
+ 0.2370581328868866,
+ -0.09065563976764679,
+ -0.12622280418872833,
+ 0.2541210353374481,
+ 0.12686455249786377,
+ -0.20012322068214417,
+ 0.2040247917175293,
+ -0.3357313871383667,
+ -0.06902032345533371,
+ 0.13581174612045288,
+ -0.3335373103618622,
+ -0.4191153347492218,
+ 0.12408086657524109,
+ 0.29202473163604736,
+ -0.1128552258014679,
+ 0.17183685302734375,
+ 0.1462940275669098,
+ -0.32734301686286926,
+ -0.18141140043735504,
+ -0.3385913372039795,
+ -0.1564304232597351,
+ -0.5147069692611694,
+ 0.0013670027256011963,
+ 0.013171374797821045,
+ 0.20215532183647156,
+ -0.16253972053527832,
+ 0.261704683303833,
+ -0.03493371605873108,
+ -0.21795296669006348,
+ 0.16933086514472961,
+ -0.12541544437408447,
+ 0.16962063312530518,
+ -0.33188900351524353,
+ -0.24745798110961914,
+ 0.13562214374542236,
+ -0.08751600980758667,
+ -0.23533642292022705,
+ -0.4774565100669861,
+ 0.10674747079610825,
+ -0.3379794657230377,
+ 0.1070021316409111,
+ -0.10279637575149536,
+ 0.3250420093536377,
+ -0.5165238976478577,
+ -0.21726730465888977,
+ -0.2787129580974579,
+ -0.29884499311447144,
+ 0.08294522762298584,
+ -0.07886970043182373,
+ -0.09865570068359375,
+ 0.20322030782699585,
+ -0.018165329471230507,
+ -0.2282748818397522,
+ 0.3360605835914612,
+ -0.0214616060256958,
+ 0.058425307273864746,
+ 0.44951415061950684,
+ 0.3823028802871704,
+ 0.32935693860054016,
+ 0.30029964447021484,
+ 0.462272047996521,
+ -0.040634885430336,
+ -0.23021674156188965,
+ -0.28977590799331665,
+ -0.13764998316764832,
+ 0.43015122413635254,
+ 0.1844705045223236,
+ 0.4006304442882538,
+ -0.337760329246521,
+ 0.16460895538330078,
+ 0.17398905754089355,
+ 0.05944406986236572,
+ -0.33516764640808105,
+ 0.3461993336677551,
+ 0.09011229872703552,
+ 0.4545169472694397,
+ -0.39249205589294434,
+ -0.45339709520339966,
+ 0.19514769315719604,
+ 0.26463526487350464,
+ 0.28641626238822937,
+ 0.11193603277206421,
+ 0.16016167402267456,
+ -0.2986852526664734,
+ 0.16226717829704285,
+ -0.3247004449367523,
+ -0.16352157294750214,
+ -0.051424071192741394,
+ 0.305875301361084,
+ 0.21585038304328918,
+ -0.12628039717674255,
+ -0.08751814067363739,
+ -0.005898714065551758,
+ -0.16230887174606323,
+ -0.053030312061309814,
+ 0.0904073566198349,
+ 0.41486090421676636,
+ -0.14205990731716156,
+ -0.14106884598731995,
+ -0.16497506201267242,
+ 0.19104880094528198,
+ 0.3601321876049042,
+ 0.056911394000053406,
+ -0.3344082832336426,
+ -0.3923475444316864,
+ 0.25607848167419434,
+ -0.2698429226875305,
+ 0.13569310307502747,
+ -0.1781669557094574,
+ 0.3554897904396057,
+ -0.20539626479148865,
+ 0.45783209800720215,
+ -0.24202004075050354,
+ 0.161869615316391,
+ 0.14797931909561157,
+ -0.2644959092140198,
+ -0.065245121717453,
+ -0.38620099425315857,
+ 0.39122116565704346,
+ 0.42496076226234436,
+ 0.264803409576416,
+ -0.5227599143981934,
+ -0.008150801062583923,
+ -0.4396419823169708,
+ 0.005200996994972229,
+ 0.043324679136276245,
+ -0.45501965284347534,
+ 0.3521800637245178,
+ -0.06565779447555542,
+ -0.37105706334114075,
+ -0.14239221811294556,
+ 0.19519028067588806,
+ 0.36821675300598145,
+ 0.4038380980491638,
+ -0.005861490964889526,
+ 0.13737356662750244,
+ -0.40147170424461365,
+ -0.3410416841506958,
+ 0.16201503574848175,
+ 0.17518943548202515,
+ 0.4329451322555542,
+ 0.2093724012374878,
+ -0.029022112488746643,
+ 0.4318345785140991,
+ -0.23720620572566986,
+ -0.2594699561595917,
+ -0.44003403186798096,
+ 0.24965086579322815,
+ 0.4647953510284424,
+ -0.1446067988872528,
+ 0.36695942282676697,
+ -0.31019309163093567,
+ 0.3159239888191223,
+ -0.3222578465938568,
+ -0.12275657057762146,
+ -0.19952188432216644,
+ 0.17930370569229126,
+ 0.3706047832965851,
+ 0.03168848156929016,
+ 0.45075374841690063,
+ 0.11919304728507996,
+ -0.005438625812530518,
+ -0.4752063751220703,
+ -0.012727707624435425,
+ -0.21289952099323273,
+ 0.04270362854003906,
+ 0.3339090049266815,
+ -0.07696941494941711,
+ 0.11714440584182739,
+ -0.3285592198371887,
+ 0.011600255966186523,
+ -0.22147613763809204,
+ -0.07540369033813477,
+ -0.4845947027206421,
+ -0.10509976744651794,
+ 0.3054521381855011,
+ 0.3505609631538391,
+ -0.1289215087890625,
+ 0.009502798318862915,
+ 0.004026040434837341,
+ 0.19978457689285278,
+ 0.3887333869934082,
+ -0.006784200668334961,
+ -0.021896421909332275,
+ 0.3902135193347931,
+ 0.04286965727806091,
+ -0.3365103602409363,
+ -0.337915301322937,
+ 0.4394494295120239,
+ -0.14953777194023132,
+ 0.1142268180847168,
+ 0.2948255240917206,
+ 0.29123714566230774,
+ -0.28545358777046204,
+ -0.1670893430709839,
+ -0.2734711170196533,
+ -0.2080014944076538,
+ -0.03651183843612671,
+ -0.19066698849201202,
+ -0.4587381184101105,
+ 0.46271803975105286,
+ -0.3392261266708374,
+ 0.37266018986701965,
+ 0.17013156414031982,
+ 0.169022798538208,
+ 0.33263903856277466,
+ -0.2429366111755371,
+ -0.4458678066730499,
+ -0.3062306344509125,
+ 0.026959210634231567,
+ -0.3125751316547394,
+ -0.44066402316093445,
+ -0.2489970624446869,
+ 0.43288689851760864,
+ 0.45418593287467957,
+ 0.16549605131149292,
+ 0.1736101359128952,
+ -0.0740090012550354,
+ -0.3394010663032532,
+ 0.17054814100265503,
+ -0.4913863241672516,
+ 0.4631578326225281,
+ -0.33365458250045776,
+ 0.41962558031082153,
+ 0.12743553519248962,
+ -0.32787975668907166,
+ -0.045556001365184784,
+ 0.15257614850997925,
+ -0.07382762432098389,
+ -0.14166104793548584,
+ -0.31738796830177307,
+ -0.12162531167268753,
+ -0.021418049931526184,
+ -0.33570966124534607,
+ -0.02789938449859619,
+ 0.3414875864982605,
+ 0.0033116042613983154,
+ 0.048246681690216064,
+ 0.18621179461479187,
+ -0.11187416315078735,
+ 0.36251792311668396,
+ 0.05111861228942871,
+ 0.23131883144378662,
+ -0.24602441489696503,
+ -0.010081231594085693,
+ 0.13452398777008057,
+ 0.07393348217010498,
+ 0.31530845165252686,
+ -0.07394352555274963,
+ 0.18012124300003052,
+ -0.038887739181518555,
+ -0.1755135953426361,
+ -0.1119794249534607,
+ 0.1240273118019104,
+ 0.30490726232528687,
+ 0.22503942251205444,
+ 0.3573461174964905,
+ 0.3950919508934021,
+ -0.1503981649875641,
+ -0.06164088845252991,
+ 0.1515035331249237,
+ 0.206753671169281,
+ 0.09647828340530396,
+ 0.25249016284942627,
+ -0.3347005844116211,
+ -0.3684173822402954,
+ -0.3725726008415222,
+ -0.337705135345459,
+ 0.30313751101493835,
+ -0.3067741394042969,
+ -0.32945698499679565,
+ 0.1930488795042038,
+ 0.1256963312625885,
+ 0.43523871898651123,
+ -0.021688079461455345,
+ -0.1208324283361435,
+ 0.04016035795211792,
+ -0.037767648696899414,
+ 0.06476163864135742,
+ 0.1618615984916687,
+ 0.06984668970108032,
+ 0.19356423616409302,
+ 0.12594151496887207,
+ -0.1546022593975067,
+ -0.3328990340232849,
+ 0.3190003037452698,
+ 0.15514534711837769,
+ -0.011452794075012207,
+ -0.4329984188079834,
+ -0.16067487001419067,
+ 0.1588314175605774,
+ 0.09208986163139343,
+ -0.13215962052345276,
+ 0.060084760189056396,
+ 0.36164504289627075,
+ 0.21590358018875122,
+ 0.16325479745864868,
+ 0.1425926685333252,
+ 0.09828483313322067,
+ 0.16948023438453674,
+ 0.16919252276420593,
+ -0.4299320876598358,
+ -0.049837708473205566,
+ 0.12030059099197388,
+ 0.3435610234737396,
+ -0.11843091249465942,
+ 0.044436097145080566,
+ -0.11753512918949127,
+ -0.4129020869731903,
+ 0.33776435256004333,
+ 0.3090256452560425,
+ 0.25286272168159485,
+ -0.017354607582092285,
+ -0.34252455830574036,
+ -0.2759639322757721,
+ 0.4467202425003052,
+ 0.02611851692199707,
+ -0.3376709520816803,
+ -0.49823275208473206,
+ -0.2555784285068512,
+ -0.14508706331253052,
+ -0.26663562655448914,
+ -0.01965099573135376,
+ -0.2744346857070923,
+ 0.401934415102005,
+ -0.31240585446357727,
+ 0.11181741952896118,
+ 0.09184402227401733,
+ -0.15746323764324188,
+ 0.0000455658882856369,
+ -0.13055822253227234,
+ 0.3197483420372009,
+ 0.1983988881111145,
+ 0.1255028247833252,
+ 0.23931092023849487,
+ 0.05930766463279724,
+ -0.3421565294265747,
+ 0.08345457911491394,
+ 0.12207678705453873,
+ -0.18983858823776245,
+ 0.04295358061790466,
+ 0.06398341804742813,
+ 0.47193336486816406,
+ 0.44691118597984314,
+ 0.4520514905452728,
+ -0.33341383934020996,
+ 0.3499082922935486,
+ -0.4989602863788605,
+ -0.4845784604549408,
+ -0.13996534049510956,
+ 0.3914906680583954,
+ 0.3531937897205353,
+ -0.2986467182636261,
+ 0.4560297131538391,
+ -0.3298640847206116,
+ -0.33530277013778687,
+ -0.11710993200540543,
+ -0.25150251388549805,
+ -0.06939968466758728,
+ -0.3352274000644684,
+ 0.3054671287536621,
+ 0.0839497447013855,
+ 0.10326530039310455,
+ -0.20166754722595215,
+ 0.3824204206466675,
+ 0.19480693340301514,
+ 0.14557552337646484,
+ 0.12959250807762146,
+ 0.15807056427001953,
+ 0.39585810899734497,
+ 0.08151957392692566,
+ -0.34041714668273926,
+ 0.061326563358306885,
+ -0.4448546767234802,
+ -0.3715531826019287,
+ 0.11649307608604431,
+ -0.33954206109046936,
+ -0.5223270058631897,
+ -0.2125774323940277,
+ 0.002299439162015915,
+ 0.11165627837181091,
+ 0.09016548097133636,
+ 0.26729077100753784,
+ -0.020646393299102783,
+ 0.05350644886493683,
+ 0.0004845559597015381,
+ 0.0932786613702774,
+ 0.15357205271720886,
+ 0.05110146105289459,
+ 0.45001453161239624,
+ 0.027943715453147888,
+ 0.4461067318916321,
+ 0.20122137665748596,
+ 0.3014240860939026,
+ 0.24741294980049133,
+ -0.33833104372024536,
+ 0.277700811624527,
+ -0.16101649403572083,
+ -0.3291652202606201,
+ 0.2110438346862793,
+ 0.05178710073232651,
+ -0.3638216555118561,
+ -0.1027722880244255,
+ -0.31289568543434143,
+ -0.18119782209396362,
+ 0.12396275997161865,
+ -0.40775546431541443,
+ -0.08376744389533997,
+ 0.14461928606033325,
+ -0.2164045125246048,
+ -0.3233564496040344,
+ -0.2743963599205017,
+ -0.17127424478530884,
+ 0.43546485900878906,
+ -0.3362530469894409,
+ 0.1652240753173828,
+ -0.2621779441833496,
+ 0.04836198687553406,
+ -0.4270448684692383,
+ -0.15482249855995178,
+ 0.02785879373550415,
+ -0.13405659794807434,
+ -0.3325914442539215,
+ 0.11566609144210815,
+ -0.3366885185241699,
+ -0.3336513638496399,
+ -0.011854708194732666,
+ 0.22275406122207642,
+ 0.16173100471496582,
+ 0.46199700236320496,
+ 0.33201321959495544,
+ -0.259469211101532,
+ -0.4713430404663086,
+ -0.4166916012763977,
+ -0.04227185249328613,
+ 0.23283183574676514,
+ 0.41021406650543213,
+ -0.31805646419525146,
+ -0.3405762314796448,
+ 0.26961585879325867,
+ 0.2726927697658539,
+ 0.02316349744796753,
+ -0.45457345247268677,
+ -0.3372393846511841,
+ -0.5026578307151794,
+ -0.19959726929664612,
+ -0.33520984649658203,
+ -0.2362765073776245,
+ -0.3672305941581726,
+ -0.20502978563308716,
+ 0.03642146289348602,
+ -0.33737730979919434,
+ 0.4194994270801544,
+ -0.19530263543128967,
+ 0.06844779849052429,
+ 0.2718818485736847,
+ -0.3463364243507385,
+ -0.41966331005096436,
+ -0.21222829818725586,
+ -0.2753552198410034,
+ 0.31310850381851196,
+ -0.4129209816455841,
+ 0.4068775177001953,
+ 0.40645354986190796,
+ 0.3736245632171631,
+ 0.04557693004608154,
+ 0.32323408126831055,
+ 0.08471667766571045,
+ 0.4538255035877228,
+ 0.08788034319877625,
+ -0.014540672302246094,
+ 0.37095528841018677,
+ -0.11462908983230591,
+ 0.42321252822875977,
+ -0.3362019658088684,
+ 0.1487894058227539,
+ -0.3326376676559448,
+ 0.387206494808197,
+ 0.107864148914814,
+ -0.39473408460617065,
+ -0.2757805287837982,
+ -0.41231557726860046,
+ 0.1252487450838089,
+ 0.09661867469549179,
+ -0.3026585578918457,
+ -0.07166647911071777,
+ 0.09402842819690704,
+ -0.16359996795654297,
+ -0.15253260731697083,
+ -0.233035147190094,
+ 0.16619491577148438,
+ 0.364972859621048,
+ 0.11479943990707397,
+ 0.1614075005054474,
+ 0.20144113898277283,
+ 0.13029855489730835,
+ 0.27301010489463806,
+ -0.08606681227684021,
+ 0.2873528003692627,
+ 0.21061956882476807,
+ 0.2387349158525467,
+ 0.17123687267303467,
+ 0.14854559302330017,
+ -0.519672691822052,
+ 0.06817728281021118,
+ 0.15607813000679016,
+ -0.3389572501182556,
+ 0.30348527431488037,
+ 0.0863465815782547,
+ 0.1887275129556656,
+ -0.027522921562194824,
+ 0.09475588798522949,
+ 0.46719086170196533,
+ 0.2507060468196869,
+ -0.3343300223350525,
+ -0.21733081340789795,
+ 0.03337576985359192,
+ 0.4582500755786896,
+ 0.3925987482070923,
+ -0.40250831842422485,
+ -0.04732561111450195,
+ 0.47149837017059326,
+ -0.10191214084625244,
+ 0.0699993371963501,
+ 0.4435206651687622,
+ 0.42860186100006104,
+ 0.09956840425729752,
+ -0.1145927906036377,
+ -0.03117242455482483,
+ -0.3662090301513672,
+ 0.29368555545806885,
+ 0.42484378814697266,
+ -0.31346583366394043,
+ 0.4490138292312622,
+ -0.31797540187835693,
+ -0.32559114694595337,
+ -0.05866307020187378,
+ 0.45928871631622314,
+ -0.49052953720092773,
+ 0.4504568576812744,
+ -0.3333548307418823,
+ 0.11250635981559753,
+ -0.08356694877147675,
+ -0.20607760548591614,
+ 0.2967091500759125,
+ 0.334413081407547,
+ 0.14619320631027222,
+ -0.3836682140827179,
+ 0.23259222507476807,
+ 0.2754088044166565,
+ -0.32391810417175293,
+ 0.05760800838470459,
+ 0.06538498401641846,
+ -0.11628180742263794,
+ -0.0029653534293174744,
+ 0.27710646390914917,
+ 0.303832471370697,
+ 0.38672947883605957,
+ -0.11676973104476929,
+ 0.133565291762352,
+ -0.08693349361419678,
+ 0.2264460325241089,
+ -0.23342269659042358,
+ -0.1599888801574707,
+ 0.4169657528400421,
+ 0.33392947912216187,
+ 0.11183083057403564,
+ 0.344400554895401,
+ 0.18618227541446686,
+ -0.21346253156661987,
+ -0.25568002462387085,
+ -0.47517213225364685,
+ -0.2938894033432007,
+ 0.08158309757709503,
+ -0.17803943157196045,
+ 0.08555971086025238,
+ 0.19896292686462402,
+ -0.279242604970932,
+ -0.08308005332946777,
+ -0.0856236070394516,
+ 0.423650324344635,
+ 0.03567397594451904,
+ 0.014490187168121338,
+ -0.3340320289134979,
+ 0.058648400008678436,
+ -0.3877262771129608,
+ -0.4635334014892578,
+ -0.027128517627716064,
+ 0.09600916504859924,
+ 0.05304831266403198,
+ -0.30380895733833313,
+ -0.24744975566864014,
+ 0.1504002809524536,
+ 0.05583447217941284,
+ -0.34919270873069763,
+ 0.15938037633895874,
+ 0.17826060950756073,
+ 0.14894846081733704,
+ -0.028853178024291992,
+ -0.4512147605419159,
+ -0.2281726598739624,
+ -0.27031227946281433,
+ 0.12510010600090027,
+ 0.13481566309928894,
+ -0.06315281987190247,
+ -0.3331111669540405,
+ -0.38388651609420776,
+ 0.16544950008392334,
+ -0.053260087966918945,
+ -0.5206840634346008,
+ -0.08712419867515564,
+ -0.2993328869342804,
+ -0.3370952904224396,
+ -0.07409849762916565,
+ -0.16134846210479736,
+ -0.09448625147342682,
+ -0.16686129570007324,
+ -0.08654797077178955,
+ -0.31216537952423096,
+ 0.40421992540359497,
+ -0.5197815299034119,
+ 0.3541993796825409,
+ 0.035016030073165894,
+ 0.17490354180335999,
+ 0.28774362802505493,
+ -0.3375163674354553,
+ 0.18417146801948547,
+ -0.2621251940727234,
+ 0.016905754804611206,
+ 0.20147645473480225,
+ -0.03791651129722595,
+ 0.39553600549697876,
+ 0.13652163743972778,
+ 0.17599886655807495,
+ 0.17366844415664673,
+ -0.3618031442165375,
+ -0.16743624210357666,
+ -0.3315606713294983,
+ -0.05375850200653076,
+ 0.01621192693710327,
+ -0.08141911029815674,
+ 0.2892731726169586,
+ -0.3374340534210205,
+ 0.25016894936561584,
+ -0.14104902744293213,
+ -0.5120599269866943,
+ -0.24506863951683044,
+ -0.10029059648513794,
+ -0.471363365650177,
+ 0.43290507793426514,
+ 0.20188723504543304,
+ -0.4128095805644989,
+ 0.269941508769989,
+ -0.44560879468917847,
+ -0.3035293519496918,
+ -0.21226218342781067,
+ -0.3378908634185791,
+ 0.37032508850097656,
+ -0.04087787866592407,
+ 0.023974120616912842,
+ -0.3357662260532379,
+ 0.2750893235206604,
+ -0.17758768796920776,
+ -0.08452244102954865,
+ 0.39232736825942993,
+ -0.061850905418395996,
+ -0.0954277515411377,
+ -0.20109489560127258,
+ 0.22303523123264313,
+ 0.22136211395263672,
+ 0.13261345028877258,
+ -0.33680805563926697,
+ -0.08762186765670776,
+ 0.3312920033931732,
+ -0.04019930958747864,
+ 0.2812621295452118,
+ 0.20388486981391907,
+ -0.3371085524559021,
+ -0.2855451703071594,
+ 0.33086133003234863,
+ -0.3695751428604126,
+ -0.3469271659851074,
+ 0.3319474458694458,
+ 0.1904740035533905,
+ -0.10479763150215149,
+ -0.14606508612632751,
+ -0.1758546084165573,
+ -0.3608476221561432,
+ -0.3388012945652008,
+ -0.17358404397964478,
+ 0.16992288827896118,
+ -0.33614712953567505,
+ 0.17325368523597717,
+ 0.3129388391971588,
+ -0.1396147906780243,
+ -0.40177541971206665,
+ -0.296730101108551,
+ 0.46479207277297974,
+ 0.20904666185379028,
+ -0.4825974106788635,
+ 0.1614859402179718,
+ -0.15317663550376892,
+ -0.4534255862236023,
+ 0.20628899335861206,
+ -0.06216549873352051,
+ -0.19191136956214905,
+ 0.46010273694992065,
+ -0.48905837535858154,
+ 0.006032183766365051,
+ 0.24665027856826782,
+ 0.25634583830833435,
+ 0.16013777256011963,
+ -0.321647584438324,
+ -0.3402089476585388,
+ 0.3224048614501953,
+ -0.17089927196502686,
+ -0.4810025095939636,
+ 0.23631036281585693,
+ 0.1830550730228424,
+ -0.0031609535217285156,
+ -0.2545839548110962,
+ -0.36643868684768677,
+ -0.3392419219017029,
+ -0.16676822304725647,
+ 0.1252295970916748,
+ -0.24890170991420746,
+ 0.29491573572158813,
+ -0.18397286534309387,
+ 0.4588581919670105,
+ -0.19200704991817474,
+ -0.019656246528029442,
+ 0.0877714455127716,
+ -0.18473684787750244,
+ -0.005997968837618828,
+ -0.0008757412433624268,
+ -0.18897856771945953,
+ 0.40797919034957886,
+ 0.09268563985824585,
+ -0.4129268527030945,
+ -0.10183402895927429,
+ 0.4189774990081787,
+ -0.5126389265060425,
+ -0.1195780336856842,
+ -0.3655898869037628,
+ 0.15973564982414246,
+ -0.3379729092121124,
+ 0.07882904261350632,
+ 0.03116515278816223,
+ 0.09462413191795349,
+ 0.11774072051048279,
+ -0.30617406964302063,
+ -0.09590363502502441,
+ 0.13540217280387878,
+ -0.33506596088409424,
+ 0.2814982831478119,
+ 0.16602793335914612,
+ -0.22134654223918915,
+ -0.4954926669597626,
+ 0.3939032554626465,
+ 0.10172206163406372,
+ 0.25310659408569336,
+ -0.042416736483573914,
+ -0.2745946943759918,
+ -0.3339095711708069,
+ -0.2615705132484436,
+ 0.3386634588241577,
+ 0.3012371063232422,
+ 0.44566237926483154,
+ -0.33670589327812195,
+ -0.17673903703689575,
+ 0.2590183615684509,
+ 0.1422932744026184,
+ -0.13555240631103516,
+ -0.018888264894485474,
+ -0.38847845792770386,
+ -0.007138274610042572,
+ -0.3384826183319092,
+ -0.42499369382858276,
+ -0.02803438901901245,
+ -0.10839676856994629,
+ 0.12031334638595581,
+ 0.3390463590621948,
+ -0.06854283809661865,
+ 0.3418228030204773,
+ -0.2538200914859772,
+ -0.33312737941741943,
+ -0.03871072828769684,
+ 0.25348353385925293,
+ -0.3350944519042969,
+ 0.24855925142765045,
+ -0.12585249543190002,
+ 0.1539023220539093,
+ 0.43147486448287964,
+ -0.4561641216278076,
+ -0.31187349557876587,
+ -0.2786487340927124,
+ 0.3628530502319336,
+ -0.32383403182029724,
+ 0.09805721044540405,
+ -0.42897993326187134,
+ -0.22312821447849274,
+ -0.004379309713840485,
+ -0.0748881995677948,
+ -0.32729822397232056,
+ 0.11281973123550415,
+ -0.004075825214385986,
+ -0.153009831905365,
+ -0.05904337763786316,
+ 0.020699530839920044,
+ 0.38162046670913696,
+ -0.01997542381286621,
+ -0.025346115231513977,
+ 0.18752165138721466,
+ -0.36014068126678467,
+ -0.32847851514816284,
+ 0.45998838543891907,
+ 0.10785813629627228,
+ 0.19488625228405,
+ -0.20697294175624847,
+ -0.1669512391090393,
+ -0.06127822399139404,
+ 0.1638489067554474,
+ 0.1970873922109604,
+ -0.33854252099990845,
+ -0.013120114803314209,
+ 0.03572985529899597,
+ 0.02910161018371582,
+ -0.038964688777923584,
+ 0.04392647743225098,
+ -0.21513260900974274,
+ 0.39915740489959717,
+ 0.15404236316680908,
+ 0.35758399963378906,
+ 0.22701415419578552,
+ -0.13642150163650513,
+ -0.23660403490066528,
+ 0.16115319728851318,
+ 0.0925549864768982,
+ -0.1619042009115219,
+ 0.4365960955619812,
+ -0.020737916231155396,
+ 0.20326173305511475,
+ -0.4611648917198181,
+ 0.03330028057098389,
+ 0.06392407417297363,
+ -0.4829854965209961,
+ 0.03535209596157074,
+ -0.3350578844547272,
+ -0.26883697509765625,
+ -0.11587280035018921,
+ 0.2577958106994629,
+ 0.3798808455467224,
+ 0.11461576819419861,
+ 0.2594044804573059,
+ 0.3508566915988922,
+ -0.3440554738044739,
+ -0.14685481786727905,
+ -0.19500228762626648,
+ -0.1258758306503296,
+ -0.07121234387159348,
+ 0.22152110934257507,
+ 0.02410215139389038,
+ -0.2000029981136322,
+ -0.3318440318107605,
+ -0.49140363931655884,
+ -0.33246657252311707,
+ 0.23432469367980957,
+ 0.3776633143424988,
+ -0.2162536382675171,
+ 0.060133785009384155,
+ -0.3417145013809204,
+ 0.29892057180404663,
+ 0.36065614223480225,
+ 0.1311262547969818,
+ 0.4271185100078583,
+ -0.21633154153823853,
+ -0.3392302989959717,
+ -0.40163975954055786,
+ -0.07777351140975952,
+ 0.4164721965789795,
+ 0.11367356777191162,
+ -0.3357000946998596,
+ 0.06747063994407654,
+ -0.2393360137939453,
+ -0.1543383002281189,
+ -0.34269461035728455,
+ 0.028394997119903564,
+ 0.18890786170959473,
+ -0.4893728196620941,
+ -0.3543582260608673,
+ 0.06500372290611267,
+ -0.35812753438949585,
+ 0.3535323739051819,
+ 0.10259267687797546,
+ 0.4289979338645935,
+ -0.12297707796096802,
+ 0.16150087118148804,
+ 0.029204130172729492,
+ -0.4615238606929779,
+ 0.32330751419067383,
+ -0.1871623396873474,
+ 0.16180263459682465,
+ 0.29169797897338867,
+ 0.03138294816017151,
+ -0.06575065851211548,
+ -0.11126856505870819,
+ 0.09088823199272156,
+ -0.29801231622695923,
+ -0.17712050676345825,
+ -0.02024829387664795,
+ -0.17403605580329895,
+ -0.19512802362442017,
+ -0.08010989427566528,
+ -0.14196649193763733,
+ 0.2582031488418579,
+ 0.08615255355834961,
+ 0.02475607395172119,
+ 0.4525975286960602,
+ -0.05513179302215576,
+ 0.22350859642028809,
+ -0.28670158982276917,
+ 0.36147937178611755,
+ -0.028855470940470695,
+ -0.37323081493377686,
+ -0.30937129259109497,
+ -0.28001970052719116,
+ 0.40715736150741577,
+ -0.20938640832901,
+ -0.11757725477218628,
+ 0.0753818154335022,
+ -0.5158337354660034,
+ -0.1045365035533905,
+ 0.22098559141159058,
+ -0.25647827982902527,
+ 0.4521154761314392,
+ -0.17843595147132874,
+ -0.18706178665161133,
+ 0.14514601230621338,
+ 0.07501280307769775,
+ 0.17394280433654785,
+ 0.1662052869796753,
+ -0.22399616241455078,
+ 0.03734561428427696,
+ -0.36228108406066895,
+ 0.17006731033325195,
+ 0.4439677596092224,
+ -0.3243710994720459,
+ 0.10856175422668457,
+ -0.33793529868125916,
+ 0.17377272248268127,
+ 0.3517794907093048,
+ 0.2026776224374771,
+ 0.24186380207538605,
+ 0.16115446388721466,
+ 0.13385406136512756,
+ -0.3961026072502136,
+ 0.34870341420173645,
+ -0.272135466337204,
+ -0.41032522916793823,
+ 0.04394220933318138,
+ -0.33773499727249146,
+ -0.09867596626281738,
+ -0.0004891157150268555,
+ 0.28333407640457153,
+ 0.38892197608947754,
+ 0.12486657500267029,
+ -0.33470582962036133,
+ -0.37382742762565613,
+ 0.02636568993330002,
+ 0.17800816893577576,
+ -0.340598464012146,
+ -0.3360302448272705,
+ -0.21186697483062744,
+ 0.4654711186885834,
+ 0.20107078552246094,
+ -0.036158084869384766,
+ 0.41759929060935974,
+ -0.1715191900730133,
+ 0.39602333307266235,
+ 0.17699941992759705,
+ 0.35869383811950684,
+ -0.17283892631530762,
+ -0.2858673930168152,
+ -0.13030534982681274,
+ 0.284695565700531,
+ -0.4194856882095337,
+ 0.008456677198410034,
+ -0.4182746112346649,
+ 0.08177158236503601,
+ -0.1514149308204651,
+ 0.16854611039161682,
+ 0.14881479740142822,
+ 0.24128401279449463,
+ 0.06715494394302368,
+ 0.23077479004859924,
+ 0.005816936492919922,
+ 0.05142676830291748,
+ -0.00940406322479248,
+ -0.031061291694641113,
+ -0.32249584794044495,
+ 0.008132517337799072,
+ -0.12796665728092194,
+ 0.15264463424682617,
+ -0.002112358808517456,
+ -0.4374661445617676,
+ 0.44308802485466003,
+ -0.19809864461421967,
+ -0.34157484769821167,
+ -0.3376385569572449,
+ 0.06319528818130493,
+ -0.2744424343109131,
+ -0.18981726467609406,
+ -0.20115876197814941,
+ -0.036798954010009766,
+ 0.28865379095077515,
+ 0.1788462996482849,
+ -0.0796080231666565,
+ -0.4468364417552948,
+ 0.16051554679870605,
+ 0.13060516119003296,
+ 0.4123810827732086,
+ 0.2474421262741089,
+ -0.3122695982456207,
+ 0.03497512638568878,
+ 0.01774771511554718,
+ 0.4017144441604614,
+ -0.08665188401937485,
+ 0.21965646743774414,
+ 0.46035122871398926,
+ 0.4562767744064331,
+ 0.12306183576583862,
+ 0.16517740488052368,
+ 0.05696234107017517,
+ 0.23276463150978088,
+ -0.18913429975509644,
+ -0.058759644627571106,
+ 0.03384298086166382,
+ -0.012454032897949219,
+ 0.18959321081638336,
+ 0.15264904499053955,
+ -0.24878466129302979,
+ -0.01131121814250946,
+ 0.05483025312423706,
+ 0.4271429181098938,
+ -0.060088545083999634,
+ 0.37337726354599,
+ -0.33375322818756104,
+ 0.3625483512878418,
+ -0.08828474581241608,
+ 0.17659807205200195,
+ -0.11958712339401245,
+ -0.1358555555343628,
+ -0.5154788494110107,
+ 0.4690645635128021,
+ -0.3909122347831726,
+ -0.028063058853149414,
+ 0.3813740015029907,
+ -0.3159627616405487,
+ -0.3381110429763794,
+ -0.04869905114173889,
+ 0.05145813524723053,
+ 0.08000552654266357,
+ 0.1431581974029541,
+ -0.2317003756761551,
+ -0.48289602994918823,
+ 0.3583540916442871,
+ 0.17833071947097778,
+ -0.1309063732624054,
+ -0.10502907633781433,
+ 0.109276682138443,
+ -0.315917044878006,
+ 0.4564834237098694,
+ 0.015904158353805542,
+ 0.41610920429229736,
+ -0.18395531177520752,
+ -0.30751773715019226,
+ -0.022321045398712158,
+ 0.11848831176757812,
+ -0.2982650399208069,
+ -0.2854752838611603,
+ 0.32243290543556213,
+ 0.08279186487197876,
+ -0.03570570796728134,
+ 0.41919705271720886,
+ -0.5113572478294373,
+ 0.33055007457733154,
+ -0.04656088352203369,
+ 0.016901731491088867,
+ 0.1424432247877121,
+ 0.20599466562271118,
+ -0.07002533972263336,
+ -0.33612775802612305,
+ -0.5204375982284546,
+ 0.2658017873764038,
+ -0.1661587655544281,
+ 0.42658087611198425,
+ 0.24725797772407532,
+ 0.31804993748664856,
+ 0.163870170712471,
+ 0.24406306445598602,
+ 0.2852669358253479,
+ -0.15270328521728516,
+ 0.01999354362487793,
+ 0.009967058897018433,
+ -0.0913534164428711,
+ -0.29560157656669617,
+ -0.33771371841430664,
+ -0.33890002965927124,
+ -0.2958100438117981,
+ -0.20196330547332764,
+ -0.3612704277038574,
+ 0.1625368595123291,
+ 0.46864408254623413,
+ 0.33269160985946655,
+ -0.1310492604970932,
+ 0.16913390159606934,
+ -0.23465406894683838,
+ 0.2206183224916458,
+ -0.009628310799598694,
+ -0.5140770673751831,
+ -0.10345849394798279,
+ -0.49774169921875,
+ 0.3893764317035675,
+ 0.2980723977088928,
+ 0.43948256969451904,
+ -0.33547577261924744,
+ 0.3008437156677246,
+ -0.24534039199352264,
+ 0.21557871997356415,
+ -0.0024477243423461914,
+ 0.2795422375202179,
+ -0.3504115343093872,
+ -0.338355153799057,
+ -0.10052657127380371,
+ 0.4271930456161499,
+ -0.12025314569473267,
+ -0.16533954441547394,
+ 0.21337562799453735,
+ -0.3395613431930542,
+ 0.376495897769928,
+ 0.035114824771881104,
+ -0.3304290771484375,
+ -0.4949408769607544,
+ -0.33910292387008667,
+ -0.3414996266365051,
+ -0.13363248109817505,
+ 0.3739653527736664,
+ -0.09414155781269073,
+ -0.06702350080013275,
+ -0.43172499537467957,
+ -0.4958922863006592,
+ -0.32252490520477295,
+ 0.048766106367111206,
+ 0.06117749214172363,
+ 0.005572229623794556,
+ 0.0434703528881073,
+ 0.35565680265426636,
+ 0.17567813396453857,
+ -0.038604870438575745,
+ -0.16478323936462402,
+ -0.14254260063171387,
+ 0.19356758892536163,
+ 0.06847846508026123,
+ -0.3351823687553406,
+ -0.042616844177246094,
+ -0.12358760833740234,
+ 0.22864940762519836,
+ -0.45085838437080383,
+ 0.25164714455604553,
+ 0.06286093592643738,
+ 0.43927937746047974,
+ -0.32341694831848145,
+ -0.17080503702163696,
+ 0.3909873962402344,
+ -0.268947571516037,
+ 0.31951290369033813,
+ 0.06751006841659546,
+ 0.3063696324825287,
+ -0.061079442501068115,
+ -0.33594584465026855,
+ -0.46560007333755493,
+ 0.26288577914237976,
+ -0.396477073431015,
+ -0.33892351388931274,
+ 0.18793094158172607,
+ -0.14470574259757996,
+ -0.08811989426612854,
+ -0.04907602071762085,
+ 0.4078143239021301,
+ -0.09355485439300537,
+ 0.22966402769088745,
+ 0.23928362131118774,
+ 0.14353561401367188,
+ 0.41713201999664307,
+ 0.03513224050402641,
+ -0.038331031799316406,
+ -0.11204564571380615,
+ -0.18041346967220306,
+ 0.2902207374572754,
+ 0.30127066373825073,
+ 0.0632561445236206,
+ -0.3355725109577179,
+ 0.17193731665611267,
+ 0.2318825125694275,
+ -0.3345714211463928,
+ -0.18312740325927734,
+ -0.30532023310661316,
+ 0.1969800591468811,
+ 0.22157281637191772,
+ 0.07458364963531494,
+ 0.41240477561950684,
+ 0.41259443759918213,
+ -0.2553507685661316,
+ -0.13243699073791504,
+ -0.39334365725517273,
+ 0.16605046391487122,
+ 0.13285106420516968,
+ 0.1719927191734314,
+ 0.06934088468551636,
+ 0.06130746006965637,
+ -0.05206287279725075,
+ 0.29099178314208984,
+ 0.036997318267822266,
+ 0.30708280205726624,
+ -0.01570439338684082,
+ -0.35171347856521606,
+ -0.24141250550746918,
+ -0.01129944622516632,
+ 0.02141273021697998,
+ -0.19183620810508728,
+ 0.40906190872192383,
+ -0.2661861777305603,
+ -0.3403015434741974,
+ -0.024687141180038452,
+ -0.4962201416492462,
+ -0.10696405172348022,
+ -0.08610531687736511,
+ 0.2758568525314331,
+ -0.49040162563323975,
+ -0.3491475582122803,
+ 0.3377029001712799,
+ -0.1462249457836151,
+ -0.3228863477706909,
+ 0.31352755427360535,
+ -0.33371302485466003,
+ 0.1423269659280777,
+ -0.11188820004463196,
+ -0.10529449582099915,
+ -0.3383406400680542,
+ 0.0020785927772521973,
+ -0.4363551139831543,
+ -0.1086188480257988,
+ 0.17121803760528564,
+ 0.3271319568157196,
+ 0.37152498960494995,
+ -0.4648014307022095,
+ -0.33716776967048645,
+ -0.2639574110507965,
+ -0.3402791917324066,
+ -0.1775180697441101,
+ 0.38281017541885376,
+ 0.1715940237045288,
+ 0.16991102695465088,
+ 0.3974706530570984,
+ -0.08562973141670227,
+ -0.33614933490753174,
+ -0.1610998511314392,
+ 0.12442594766616821,
+ 0.3312661647796631,
+ 0.19057929515838623,
+ 0.4398772120475769,
+ -0.3345422148704529,
+ -0.47111693024635315,
+ 0.3982706069946289,
+ -0.4986952543258667,
+ -0.3353205919265747,
+ 0.15811222791671753,
+ -0.47204792499542236,
+ 0.13969621062278748,
+ 0.05603566765785217,
+ 0.1595832109451294,
+ 0.16361582279205322,
+ -0.039583027362823486,
+ -0.07434427738189697,
+ -0.48450759053230286,
+ -0.4619479775428772,
+ -0.18992045521736145,
+ -0.4174453020095825,
+ -0.33364278078079224,
+ -0.12839080393314362,
+ 0.020724982023239136,
+ -0.47608858346939087,
+ 0.19475418329238892,
+ -0.19358250498771667,
+ 0.36829933524131775,
+ 0.3033784329891205,
+ 0.09893834590911865,
+ 0.2766265869140625,
+ 0.24280062317848206,
+ -0.4616106152534485,
+ -0.43297943472862244,
+ 0.02512681484222412,
+ -0.501743495464325,
+ 0.25181958079338074,
+ -0.20528513193130493,
+ -0.2712918519973755,
+ 0.12966325879096985,
+ 0.034127771854400635,
+ 0.1332869529724121,
+ -0.09520667791366577,
+ 0.3214043378829956,
+ 0.4659184217453003,
+ -0.1820111870765686,
+ 0.3518751263618469,
+ 0.10810168087482452,
+ -0.20817804336547852,
+ 0.35423216223716736,
+ 0.37012583017349243,
+ -0.24178189039230347,
+ 0.25278693437576294,
+ -0.11925646662712097,
+ -0.5102384090423584,
+ 0.1683773398399353,
+ 0.31767094135284424,
+ 0.05197882652282715,
+ -0.00047722458839416504,
+ -0.14080601930618286,
+ -0.08081722259521484,
+ -0.23050767183303833,
+ -0.11729377508163452,
+ -0.03621561825275421,
+ -0.1859627366065979,
+ 0.42471975088119507,
+ -0.16257192194461823,
+ -0.3376545310020447,
+ -0.34104782342910767,
+ 0.4147143065929413,
+ 0.38448941707611084,
+ -0.2525879740715027,
+ 0.03997993469238281,
+ 0.09363201260566711,
+ 0.1966775357723236,
+ 0.1607324481010437,
+ -0.3344961702823639,
+ 0.015909254550933838,
+ 0.4068533778190613,
+ 0.42572999000549316,
+ 0.11563356220722198,
+ 0.24481534957885742,
+ 0.10685230791568756,
+ 0.319919615983963,
+ -0.49328628182411194,
+ 0.1918550729751587,
+ -0.2662579119205475,
+ -0.13236773014068604,
+ 0.3829662501811981,
+ 0.14404511451721191,
+ -0.32018157839775085,
+ -0.15216615796089172,
+ -0.516220211982727,
+ 0.4373569190502167,
+ 0.060449421405792236,
+ -0.33992016315460205,
+ -0.21688221395015717,
+ 0.21396178007125854,
+ -0.08161713182926178,
+ -0.19836223125457764,
+ 0.10052146017551422,
+ 0.19634011387825012,
+ -0.07551580667495728,
+ 0.43274936079978943,
+ 0.4274844527244568,
+ -0.252267450094223,
+ -0.23202171921730042,
+ -0.045383989810943604,
+ 0.43677225708961487,
+ -0.12143674492835999,
+ -0.39031538367271423,
+ -0.3316347599029541,
+ 0.3177606761455536,
+ 0.34730467200279236,
+ -0.13459481298923492,
+ 0.06894274055957794,
+ -0.15269294381141663,
+ -0.20147408545017242,
+ -0.336927592754364,
+ -0.21157224476337433,
+ 0.059675633907318115,
+ 0.3147624731063843,
+ -0.15783238410949707,
+ 0.10696128010749817,
+ -0.2293463945388794,
+ 0.1846664547920227,
+ 0.44028064608573914,
+ -0.20695586502552032,
+ -0.20984822511672974,
+ 0.27059605717658997,
+ 0.14964991807937622,
+ -0.28808358311653137,
+ 0.3537086546421051,
+ -0.3389866352081299,
+ -0.23901274800300598,
+ -0.3351154923439026,
+ 0.1186458170413971,
+ 0.19680196046829224,
+ -0.1577102541923523,
+ -0.27587050199508667,
+ 0.3905102610588074,
+ 0.13568207621574402,
+ 0.2903240919113159,
+ 0.46555399894714355,
+ -0.5054974555969238,
+ 0.07142168283462524,
+ -0.2573077082633972,
+ -0.32502949237823486,
+ -0.33955252170562744,
+ -0.22368013858795166,
+ 0.2908245027065277,
+ -0.12586593627929688,
+ 0.42721059918403625,
+ 0.4475405216217041,
+ -0.5027613043785095,
+ -0.12862831354141235,
+ 0.35318654775619507,
+ 0.32693153619766235,
+ -0.4295302629470825,
+ -0.32251089811325073,
+ -0.2251601219177246,
+ 0.2392873764038086,
+ -0.028290346264839172,
+ 0.3635736107826233,
+ -0.14558544754981995,
+ 0.08999621868133545,
+ -0.4048094153404236,
+ 0.3784925937652588,
+ 0.003400653600692749,
+ -0.20062792301177979,
+ -0.5045918822288513,
+ 0.1611434817314148,
+ 0.27806708216667175,
+ -0.12805277109146118,
+ 0.09634017944335938,
+ 0.4497917890548706,
+ -0.3382261395454407,
+ 0.06008273363113403,
+ 0.0860830545425415,
+ 0.07486945390701294,
+ -0.3943519592285156,
+ -0.3364121615886688,
+ 0.025405526161193848,
+ -0.03318542242050171,
+ -0.18847405910491943,
+ -0.2236272692680359,
+ -0.07143432646989822,
+ 0.29000258445739746,
+ -0.19691202044487,
+ 0.33450987935066223,
+ -0.25745752453804016,
+ -0.02086823433637619,
+ 0.36535799503326416,
+ -0.13813893496990204,
+ 0.1309453248977661,
+ -0.1519346833229065,
+ -0.058834195137023926,
+ -0.04778459668159485,
+ 0.15218716859817505,
+ 0.007904887199401855,
+ 0.46652206778526306,
+ -0.20752686262130737,
+ -0.3976430296897888,
+ 0.07998394966125488,
+ -0.32623058557510376,
+ -0.1396380364894867,
+ -0.47089511156082153,
+ -0.19608962535858154,
+ 0.14792554080486298,
+ -0.30246350169181824,
+ -0.28631842136383057,
+ 0.24820339679718018,
+ -0.4553631544113159,
+ 0.08757549524307251,
+ 0.35623085498809814,
+ -0.022160232067108154,
+ -0.47673100233078003,
+ -0.09932154417037964,
+ 0.26039859652519226,
+ 0.3575798571109772,
+ -0.02203500270843506,
+ 0.29739683866500854,
+ 0.36711278557777405,
+ 0.07868048548698425,
+ 0.33396661281585693,
+ -0.20461077988147736,
+ 0.06629004329442978,
+ 0.11190599948167801,
+ -0.4975360929965973,
+ 0.15256181359291077,
+ 0.41990333795547485,
+ -0.21405360102653503,
+ 0.1339060515165329,
+ -0.47090643644332886,
+ 0.012037225067615509,
+ 0.3878212571144104,
+ 0.034200362861156464,
+ 0.006947435438632965,
+ -0.3897469937801361,
+ 0.3842630982398987,
+ 0.016308367252349854,
+ -0.3369491696357727,
+ -0.2038211226463318,
+ 0.17625930905342102,
+ 0.23825496435165405,
+ -0.22177237272262573,
+ -0.3212662935256958,
+ -0.08573958277702332,
+ 0.1573607176542282,
+ 0.025462880730628967,
+ 0.13505038619041443,
+ -0.29517775774002075,
+ 0.16530710458755493,
+ 0.21352611482143402,
+ -0.39033955335617065,
+ 0.1427534967660904,
+ -0.33801642060279846,
+ -0.3270195722579956,
+ 0.12486353516578674,
+ -0.07478928565979004,
+ -0.1035504937171936,
+ 0.12522830069065094,
+ 0.17882278561592102,
+ -0.28134655952453613,
+ -0.33955058455467224,
+ -0.4973104000091553,
+ -0.0018346905708312988,
+ 0.4086834788322449,
+ 0.24041365087032318,
+ 0.37704992294311523,
+ 0.024510160088539124,
+ -0.017118513584136963,
+ 0.40126246213912964,
+ 0.020823419094085693,
+ 0.17528992891311646,
+ -0.06361408531665802,
+ -0.33564743399620056,
+ 0.1822110414505005,
+ -0.0534224659204483,
+ -0.4707556664943695,
+ -0.333793967962265,
+ 0.12026846408843994,
+ -0.33736664056777954,
+ -0.17675074934959412,
+ 0.3680238127708435,
+ -0.379071444272995,
+ -0.1792919635772705,
+ 0.11252292990684509,
+ -0.10492902249097824,
+ 0.3118109107017517,
+ 0.30534932017326355,
+ 0.24324262142181396,
+ 0.12481647729873657,
+ -0.22903203964233398,
+ -0.4468676447868347,
+ 0.03670957684516907,
+ -0.15552490949630737,
+ 0.46364524960517883,
+ -0.39336255192756653,
+ -0.04637156426906586,
+ 0.2537776529788971,
+ 0.33607417345046997,
+ 0.14529258012771606,
+ -0.15176115930080414,
+ -0.0010121166706085205,
+ 0.260527104139328,
+ -0.28169092535972595,
+ 0.14237432181835175,
+ -0.33968088030815125,
+ 0.3912935256958008,
+ 0.43256697058677673,
+ -0.054521143436431885,
+ 0.4463656544685364,
+ 0.1584760695695877,
+ 0.46858924627304077,
+ -0.1048797070980072,
+ -0.2811381220817566,
+ 0.13311171531677246,
+ -0.3402663469314575,
+ 0.3479633033275604,
+ 0.3720059394836426,
+ 0.20712271332740784,
+ 0.1381106674671173,
+ -0.49656471610069275,
+ -0.18845829367637634,
+ 0.01932377740740776,
+ 0.19857454299926758,
+ 0.06993347406387329,
+ -0.513958215713501,
+ 0.3365154564380646,
+ -0.063016876578331,
+ 0.062413349747657776,
+ -0.28529155254364014,
+ 0.29925623536109924,
+ -0.2402648627758026,
+ 0.12092626094818115,
+ 0.21721407771110535,
+ -0.31972405314445496,
+ 0.1045030951499939,
+ -0.07731366157531738,
+ 0.15815383195877075,
+ -0.33666789531707764,
+ -0.24137768149375916,
+ 0.4389580488204956,
+ 0.33220142126083374,
+ 0.21515317261219025,
+ 0.36461931467056274,
+ -0.06724637746810913,
+ -0.21137991547584534,
+ 0.08342088758945465,
+ -0.3051377534866333,
+ 0.44334375858306885,
+ 0.21531516313552856,
+ 0.2734209895133972,
+ -0.07002562284469604,
+ -0.2754972279071808,
+ -0.3105478584766388,
+ 0.40749019384384155,
+ 0.39971548318862915,
+ -0.33953121304512024,
+ 0.39355969429016113,
+ -0.005099952220916748,
+ 0.056418076157569885,
+ 0.3018517792224884,
+ 0.2272791862487793,
+ -0.33495771884918213,
+ 0.36473962664604187,
+ -0.04111252725124359,
+ 0.27856719493865967,
+ 0.17552265524864197,
+ -0.5156985521316528,
+ -0.04345761239528656,
+ 0.10802876949310303,
+ 0.19764676690101624,
+ -0.5009037852287292,
+ -0.33504927158355713,
+ -0.10999810695648193,
+ 0.30769073963165283,
+ -0.10216289758682251,
+ -0.08823839575052261,
+ 0.2030538022518158,
+ 0.20917075872421265,
+ -0.2608673572540283,
+ 0.13363584876060486,
+ -0.1990828812122345,
+ 0.03686481714248657,
+ -0.11165285110473633,
+ 0.2420715093612671,
+ 0.4702056050300598,
+ 0.0439663827419281,
+ -0.30523353815078735,
+ -0.09400885552167892,
+ -0.3138294219970703,
+ -0.4156818985939026,
+ -0.1220509260892868,
+ 0.027749571949243546,
+ -0.06588423252105713,
+ -0.07107681035995483,
+ -0.3070639669895172,
+ 0.07563405483961105,
+ 0.16674759984016418,
+ -0.03780065476894379,
+ -0.42896750569343567,
+ 0.3297305703163147,
+ -0.257417768239975,
+ -0.01942121982574463,
+ 0.4058677554130554,
+ -0.16013729572296143,
+ -0.3512868583202362,
+ -0.02624499797821045,
+ 0.43269988894462585,
+ 0.19959014654159546,
+ 0.0773899257183075,
+ -0.1505972146987915,
+ 0.3769334852695465,
+ -0.3433419466018677,
+ 0.172357976436615,
+ 0.15442080795764923,
+ -0.19205765426158905,
+ -0.3367798328399658,
+ 0.43268468976020813,
+ -0.3382452130317688,
+ 0.4650074243545532,
+ 0.03506305813789368,
+ -0.511931300163269,
+ 0.2743489742279053,
+ 0.05653664469718933,
+ -0.11008594930171967,
+ 0.31127703189849854,
+ -0.3299986720085144,
+ 0.27884846925735474,
+ 0.3620937466621399,
+ 0.16885587573051453,
+ 0.1312273144721985,
+ 0.41223710775375366,
+ 0.17202985286712646,
+ -0.3362989127635956,
+ -0.42144569754600525,
+ 0.4432501494884491,
+ -0.33845406770706177,
+ -0.28617948293685913,
+ -0.3421922028064728,
+ -0.20996785163879395,
+ 0.2854313552379608,
+ -0.16903561353683472,
+ 0.37514233589172363,
+ -0.16123172640800476,
+ 0.20147764682769775,
+ 0.2962331175804138,
+ 0.11890655755996704,
+ -0.490181028842926,
+ -0.1779291033744812,
+ -0.17468810081481934,
+ -0.26687800884246826,
+ 0.22720825672149658,
+ 0.40805113315582275,
+ 0.19696956872940063,
+ -0.3207371234893799,
+ -0.3980782926082611,
+ -0.09662145376205444,
+ 0.16687029600143433,
+ 0.0032581202685832977,
+ -0.005806446075439453,
+ -0.12829238176345825,
+ 0.07565885782241821,
+ -0.1340145468711853,
+ -0.09770776331424713,
+ 0.4432812035083771,
+ 0.43003615736961365,
+ -0.22240442037582397,
+ 0.3076067566871643,
+ -0.4720744788646698,
+ -0.14312365651130676,
+ -0.0013064444065093994,
+ -0.12393898516893387,
+ -0.5052333474159241,
+ 0.381656289100647,
+ -0.1476987898349762,
+ -0.3414999842643738,
+ -0.33829376101493835,
+ 0.14700645208358765,
+ 0.33270442485809326,
+ -0.08878394961357117,
+ 0.11227375268936157,
+ -0.013296544551849365,
+ -0.2106180191040039,
+ 0.34991922974586487,
+ -0.06646621227264404,
+ -0.35183799266815186,
+ 0.16637754440307617,
+ -0.16098639369010925,
+ -0.14678266644477844,
+ 0.020238837227225304,
+ 0.21549063920974731,
+ -0.07046008110046387,
+ 0.11968797445297241,
+ -0.3379802107810974,
+ -0.02277517318725586,
+ -0.44818419218063354,
+ -0.005574345588684082,
+ -0.18566679954528809,
+ -0.46837976574897766,
+ 0.19836807250976562,
+ 0.06691956520080566,
+ 0.019088730216026306,
+ 0.26735442876815796,
+ 0.4606524705886841,
+ 0.4517199993133545,
+ -0.3783578872680664,
+ -0.025699123740196228,
+ -0.404160737991333,
+ 0.2806573510169983,
+ 0.44547054171562195,
+ 0.39165154099464417,
+ -0.3289083242416382,
+ -0.2942075729370117,
+ -0.0025441348552703857,
+ -0.3387545347213745,
+ -0.34206169843673706,
+ -0.1395607590675354,
+ 0.3783355951309204,
+ 0.11718493700027466,
+ 0.20534470677375793,
+ -0.14226257801055908,
+ -0.1562138795852661,
+ 0.3600977063179016,
+ -0.39065781235694885,
+ 0.3968315124511719,
+ -0.014370173215866089,
+ 0.34203386306762695,
+ -0.2864256799221039,
+ -0.32268863916397095,
+ 0.10001599788665771,
+ -0.05044504255056381,
+ -0.025512859225273132,
+ 0.2050086408853531,
+ -0.34034812450408936,
+ 0.45445144176483154,
+ 0.18717846274375916,
+ 0.28163012862205505,
+ 0.01336580514907837,
+ -0.04058796167373657,
+ -0.01778295636177063,
+ -0.21628476679325104,
+ 0.4562930464744568,
+ 0.06712952256202698,
+ 0.46470797061920166,
+ -0.22976915538311005,
+ -0.033956900238990784,
+ 0.341288685798645,
+ -0.11370235681533813,
+ 0.07130756974220276,
+ -0.3386777639389038,
+ 0.040093183517456055,
+ 0.4047377407550812,
+ -0.504730761051178,
+ 0.11850571632385254,
+ 0.09332972764968872,
+ -0.19328543543815613,
+ 0.036248981952667236,
+ 0.18316566944122314,
+ 0.3538913130760193,
+ -0.11292214691638947,
+ 0.041971683502197266,
+ 0.28205835819244385,
+ -0.16895262897014618,
+ -0.0625079870223999,
+ -0.14393165707588196,
+ -0.28259265422821045,
+ 0.09195221215486526,
+ 0.24236628413200378,
+ 0.16217517852783203,
+ 0.27513331174850464,
+ -0.1759926974773407,
+ -0.30132168531417847,
+ 0.23541146516799927,
+ 0.45962095260620117,
+ -0.15363764762878418,
+ -0.004063505679368973,
+ 0.009552210569381714,
+ 0.17011097073554993,
+ 0.32527482509613037,
+ -0.12936431169509888,
+ -0.16105008125305176,
+ -0.33157506585121155,
+ -0.15592455863952637,
+ -0.09310096502304077,
+ -0.058755338191986084,
+ -0.038596779108047485,
+ 0.26741093397140503,
+ -0.33170753717422485,
+ -0.051169849932193756,
+ -0.017349809408187866,
+ -0.15254788100719452,
+ 0.0820673406124115,
+ 0.2171027958393097,
+ 0.07922123372554779,
+ 0.14314764738082886,
+ -0.053985387086868286,
+ 0.33156460523605347,
+ -0.3335471749305725,
+ 0.2287697046995163,
+ 0.39959824085235596,
+ 0.2030584216117859,
+ -0.01915842294692993,
+ 0.037932783365249634,
+ -0.2925786077976227,
+ -0.23786497116088867,
+ -0.5033121109008789,
+ -0.08962921053171158,
+ -0.3370315432548523,
+ -0.08078122138977051,
+ -0.40008869767189026,
+ -0.42867082357406616,
+ -0.09680518507957458,
+ 0.1533229947090149,
+ -0.2527434527873993,
+ 0.44238853454589844,
+ -0.10726860165596008,
+ 0.24318194389343262,
+ 0.012714385986328125,
+ 0.02521812915802002,
+ 0.07107582688331604,
+ 0.1670854538679123,
+ 0.39335060119628906,
+ 0.329304039478302,
+ -0.25334590673446655,
+ 0.025020062923431396,
+ -0.15913823246955872,
+ -0.17648546397686005,
+ 0.45458894968032837,
+ -0.2538038492202759,
+ -0.16129589080810547,
+ -0.4503416419029236,
+ 0.09385594725608826,
+ 0.30493324995040894,
+ -0.47494393587112427,
+ -0.33725982904434204,
+ 0.2615155875682831,
+ 0.059873998165130615,
+ -0.450630784034729,
+ -0.14850664138793945,
+ 0.15978962182998657,
+ -0.1781982183456421,
+ 0.04555231332778931,
+ 0.12750858068466187,
+ 0.46287018060684204,
+ 0.4472816586494446,
+ 0.46052104234695435,
+ -0.054718419909477234,
+ 0.2654585838317871,
+ 0.01608368754386902,
+ 0.16721974313259125,
+ 0.09095892310142517,
+ -0.33497384190559387,
+ -0.1603538990020752,
+ 0.16792309284210205,
+ 0.457693874835968,
+ -0.4209142029285431,
+ -0.4076259136199951,
+ -0.21862590312957764,
+ -0.09546512365341187,
+ 0.08963176608085632,
+ 0.010676741600036621,
+ -0.31278833746910095,
+ 0.050079166889190674,
+ 0.019923806190490723,
+ -0.4134063422679901,
+ -0.26760363578796387,
+ -0.15877312421798706,
+ 0.3061576187610626,
+ 0.26640060544013977,
+ -0.19164437055587769,
+ -0.3125039339065552,
+ -0.06834858655929565,
+ -0.11853283643722534,
+ -0.1601165533065796,
+ -0.12115877866744995,
+ -0.14236079156398773,
+ 0.2741337716579437,
+ -0.33200448751449585,
+ 0.043889403343200684,
+ -0.4290037453174591,
+ 0.46275562047958374,
+ -0.1216346025466919,
+ -0.376531183719635,
+ 0.3671221137046814,
+ -0.11332786083221436,
+ -0.054477035999298096,
+ -0.394622802734375,
+ 0.25248628854751587,
+ -0.33654093742370605,
+ 0.20215252041816711,
+ -0.060366660356521606,
+ 0.10602357983589172,
+ -0.02917379140853882,
+ -0.42306870222091675,
+ -0.023760847747325897,
+ -0.04888278245925903,
+ -0.1701010763645172,
+ -0.20680049061775208,
+ -0.3392186164855957,
+ -0.28249993920326233,
+ 0.3911221921443939,
+ 0.16037112474441528,
+ -0.09598493576049805,
+ 0.42388927936553955,
+ -0.1631281077861786,
+ -0.25216060876846313,
+ 0.13729044795036316,
+ 0.19954711198806763,
+ 0.3281329274177551,
+ -0.27979111671447754,
+ 0.20271915197372437,
+ -0.07394251227378845,
+ 0.03070276975631714,
+ -0.3237440586090088,
+ -0.32638275623321533,
+ 0.32443714141845703,
+ 0.4248724579811096,
+ -0.3356340527534485,
+ 0.3758362829685211,
+ -0.5196105241775513,
+ -0.13319915533065796,
+ 0.2645375728607178,
+ 0.16202834248542786,
+ 0.13858425617218018,
+ 0.269309937953949,
+ 0.4244678020477295,
+ 0.23863372206687927,
+ 0.02571624517440796,
+ -0.006680488586425781,
+ 0.36392295360565186,
+ 0.09816867113113403,
+ -0.32716691493988037,
+ 0.25497594475746155,
+ -0.32721585035324097,
+ 0.29844534397125244,
+ -0.4162271320819855,
+ 0.2297164797782898,
+ 0.1452406644821167,
+ -0.37629008293151855,
+ -0.05472104251384735,
+ -0.2580628991127014,
+ 0.45344075560569763,
+ -0.08628171682357788,
+ 0.1484600305557251,
+ 0.1610926389694214,
+ 0.09449104219675064,
+ 0.32552075386047363,
+ -0.27292290329933167,
+ -0.33208727836608887,
+ -0.3333970904350281,
+ 0.002683699131011963,
+ 0.036919787526130676,
+ -0.18351316452026367,
+ 0.137038916349411,
+ -0.09140844643115997,
+ -0.43049272894859314,
+ 0.21169351041316986,
+ -0.16150902211666107,
+ -0.12276454269886017,
+ -0.18604901432991028,
+ 0.1815965473651886,
+ 0.07294358313083649,
+ -0.1579950451850891,
+ 0.0798216313123703,
+ -0.1264108419418335,
+ 0.08094146847724915,
+ -0.15415215492248535,
+ 0.06452304124832153,
+ 0.2009950876235962,
+ -0.2916162312030792,
+ -0.3410443067550659,
+ -0.20947766304016113,
+ -0.0037382422015070915,
+ 0.33065271377563477,
+ -0.29903432726860046,
+ 0.02424868941307068,
+ -0.03416638448834419,
+ 0.1439259648323059,
+ 0.4550758898258209,
+ 0.1051332950592041,
+ 0.03260049223899841,
+ -0.22795283794403076,
+ -0.12940624356269836,
+ -0.14199727773666382,
+ -0.017286956310272217,
+ 0.15740439295768738,
+ 0.3701793849468231,
+ 0.22209769487380981,
+ -0.23320667445659637,
+ 0.4334952235221863,
+ 0.41252100467681885,
+ -0.16012173891067505,
+ 0.34661728143692017,
+ 0.07781374454498291,
+ -0.26896509528160095,
+ 0.10673582553863525,
+ -0.29399338364601135,
+ -0.15590989589691162,
+ 0.27535974979400635,
+ -0.194078266620636,
+ 0.2204831838607788,
+ -0.24742090702056885,
+ -0.4331369698047638,
+ 0.34314143657684326,
+ -0.421059787273407,
+ 0.35642164945602417,
+ -0.4795261025428772,
+ -0.11715376377105713,
+ -0.1833295375108719,
+ -0.056821368634700775,
+ -0.03868144750595093,
+ -0.44426509737968445,
+ -0.3149718642234802,
+ -0.5264541506767273,
+ 0.3132677674293518,
+ -0.3985263407230377,
+ -0.34052401781082153,
+ -0.20393404364585876,
+ 0.04743871092796326,
+ -0.17056810855865479,
+ -0.24733415246009827,
+ -0.054996639490127563,
+ 0.11917638778686523,
+ -0.18714255094528198,
+ 0.4275682866573334,
+ -0.2567029595375061,
+ -0.3315858244895935,
+ -0.17763875424861908,
+ 0.40155500173568726,
+ 0.37912237644195557,
+ -0.3398362398147583,
+ 0.30201268196105957,
+ 0.0781707689166069,
+ 0.21335172653198242,
+ 0.16687268018722534,
+ 0.08311372995376587,
+ 0.15139447152614594,
+ -0.29159116744995117,
+ 0.38227519392967224,
+ 0.4301801323890686,
+ 0.024422526359558105,
+ -0.3017428517341614,
+ -0.14792050421237946,
+ -0.20021747052669525,
+ -0.11349865794181824,
+ -0.05543398857116699,
+ 0.059710174798965454,
+ 0.17068737745285034,
+ 0.32435107231140137,
+ 0.3250851035118103,
+ -0.08945351839065552,
+ -0.17071887850761414,
+ 0.3497483730316162,
+ -0.0664815604686737,
+ -0.37707221508026123,
+ -0.33632171154022217,
+ 0.39506232738494873,
+ -0.40908536314964294,
+ -0.29952311515808105,
+ 0.04125445336103439,
+ -0.38277551531791687,
+ -0.14700549840927124,
+ 0.40262457728385925,
+ 0.17202100157737732,
+ 0.43645453453063965,
+ 0.0826113224029541,
+ -0.23912692070007324,
+ -0.12749481201171875,
+ 0.4547341465950012,
+ -0.3343106508255005,
+ -0.2715311646461487,
+ -0.3414367437362671,
+ -0.0848444253206253,
+ 0.14618021249771118,
+ 0.16124475002288818,
+ -0.37871623039245605,
+ -0.02945992350578308,
+ 0.4552164673805237,
+ -0.23326420783996582,
+ 0.05418521165847778,
+ 0.09657645225524902,
+ 0.4011572003364563,
+ -0.3368110656738281,
+ 0.17095041275024414,
+ 0.3617372512817383,
+ -0.33828842639923096,
+ 0.16713201999664307,
+ -0.34103941917419434,
+ -0.5092445611953735,
+ 0.34180477261543274,
+ -0.006406672298908234,
+ 0.06373205780982971,
+ -0.12909963726997375,
+ 0.18436259031295776,
+ 0.45000526309013367,
+ -0.3402141034603119,
+ -0.05020993947982788,
+ -0.09544704854488373,
+ -0.2887688875198364,
+ 0.39640218019485474,
+ -0.0625171959400177,
+ -0.08456380665302277,
+ -0.28161466121673584,
+ 0.27984416484832764,
+ 0.08674559742212296,
+ 0.3127259314060211,
+ -0.20796746015548706,
+ -0.372238427400589,
+ 0.44233182072639465,
+ -0.2531195282936096,
+ -0.10946550220251083,
+ -0.3227141499519348,
+ 0.12815070152282715,
+ -0.1442272663116455,
+ -0.005968600511550903,
+ 0.2963041365146637,
+ -0.18174797296524048,
+ -0.3393903076648712,
+ -0.1918199509382248,
+ -0.16715776920318604,
+ -0.08430224657058716,
+ 0.12611863017082214,
+ -0.06297087669372559,
+ 0.18071183562278748,
+ -0.33403468132019043,
+ 0.17723506689071655,
+ -0.23418016731739044,
+ 0.06384602934122086,
+ 0.28800293803215027,
+ 0.42612409591674805,
+ 0.14169785380363464,
+ -0.12155531346797943,
+ 0.22207552194595337,
+ 0.2144014537334442,
+ -0.15812838077545166,
+ 0.2874252200126648,
+ 0.1927080601453781,
+ 0.1461995393037796,
+ 0.3702603578567505,
+ 0.132676899433136,
+ 0.06219446659088135,
+ -0.057309627532958984,
+ 0.36750513315200806,
+ -0.33495503664016724,
+ -0.4079834222793579,
+ -0.08479970693588257,
+ -0.008021533489227295,
+ -0.20615896582603455,
+ -0.0185488760471344,
+ 0.2725338935852051,
+ -0.20845627784729004,
+ -0.27877914905548096,
+ 0.276774525642395,
+ 0.02525586634874344,
+ 0.008809268474578857,
+ -0.3871762752532959,
+ -0.24311399459838867,
+ 0.3327621817588806,
+ 0.2414471060037613,
+ 0.04974600672721863,
+ -0.34271299839019775,
+ -0.14609140157699585,
+ -0.12241747230291367,
+ -0.1388586163520813,
+ 0.04357290267944336,
+ 0.270873099565506,
+ 0.315568745136261,
+ 0.08412176370620728,
+ 0.1595071256160736,
+ 0.17960381507873535,
+ -0.02558651566505432,
+ 0.1962585151195526,
+ 0.2733200192451477,
+ -0.5076841115951538,
+ -0.23412249982357025,
+ -0.11453190445899963,
+ -0.2853476405143738,
+ -0.2524188458919525,
+ 0.2354832887649536,
+ -0.2468714714050293,
+ -0.245486319065094,
+ 0.2901514768600464,
+ -0.44717884063720703,
+ 0.37236952781677246,
+ -0.3266601860523224,
+ 0.33411288261413574,
+ -0.3350600600242615,
+ -0.16273751854896545,
+ -0.17139530181884766,
+ -0.3226795792579651,
+ -0.19615450501441956,
+ -0.16036710143089294,
+ 0.24872498214244843,
+ 0.2822805345058441,
+ -0.09923306107521057,
+ 0.2121627926826477,
+ -0.38808324933052063,
+ 0.07672587037086487,
+ 0.20607689023017883,
+ 0.17975789308547974,
+ -0.42304518818855286,
+ 0.03769974038004875,
+ -0.0010117068886756897,
+ -0.07862210273742676,
+ -0.47989800572395325,
+ -0.3404534161090851,
+ -0.3283873200416565,
+ 0.06801365315914154,
+ 0.4656916558742523,
+ -0.2871098816394806,
+ -0.12088635563850403,
+ -0.49515625834465027,
+ 0.16360600292682648,
+ -0.5073058009147644,
+ -0.048208869993686676,
+ -0.28020745515823364,
+ 0.046295925974845886,
+ 0.16364887356758118,
+ -0.3299180269241333,
+ -0.46014147996902466,
+ -0.28381460905075073,
+ 0.004744313657283783,
+ -0.08349335193634033,
+ 0.16006022691726685,
+ 0.07211589813232422,
+ 0.13736319541931152,
+ 0.3970465660095215,
+ 0.16756758093833923,
+ -0.21700111031532288,
+ 0.4172690510749817,
+ 0.1708294153213501,
+ -0.3386944830417633,
+ -0.18438836932182312,
+ 0.04099412262439728,
+ 0.2448258101940155,
+ 0.1472209095954895,
+ -0.14290738105773926,
+ -0.17106378078460693,
+ 0.4608091413974762,
+ 0.18251311779022217,
+ 0.41015249490737915,
+ 0.27748486399650574,
+ 0.3501259386539459,
+ -0.02152635157108307,
+ -0.34770500659942627,
+ -0.33462488651275635,
+ 0.4316737949848175,
+ -0.16448712348937988,
+ -0.17794138193130493,
+ 0.14092929661273956,
+ 0.22347024083137512,
+ 0.20513588190078735,
+ -0.07213373482227325,
+ -0.33977431058883667,
+ 0.029357684776186943,
+ -0.07893484830856323,
+ -0.10381722450256348,
+ -0.11372844874858856,
+ -0.033312685787677765,
+ -0.19657737016677856,
+ -0.30335021018981934,
+ 0.10491877794265747,
+ -0.08545561879873276,
+ -0.020584583282470703,
+ -0.06325310468673706,
+ 0.2536107897758484,
+ 0.1080770492553711,
+ -0.3293763995170593,
+ 0.036043524742126465,
+ -0.1331748068332672,
+ 0.14276331663131714,
+ -0.3886015713214874,
+ 0.19051292538642883,
+ -0.06240367889404297,
+ -0.1830528825521469,
+ 0.02147841453552246,
+ 0.23749205470085144,
+ 0.08757802844047546,
+ 0.20264427363872528,
+ -0.3896741271018982,
+ 0.3222089111804962,
+ 0.09223446249961853,
+ -0.12701338529586792,
+ 0.12795227766036987,
+ 0.25179994106292725,
+ 0.4223966598510742,
+ 0.14616918563842773,
+ -0.30990105867385864,
+ -0.0937122106552124,
+ -0.2856786847114563,
+ 0.45299792289733887,
+ -0.2898709774017334,
+ 0.14468127489089966,
+ -0.38465416431427,
+ 0.39747995138168335,
+ 0.09507913142442703,
+ 0.35264599323272705,
+ -0.11528714746236801,
+ -0.4067080318927765,
+ 0.07743144035339355,
+ 0.11359892040491104,
+ 0.12088313698768616,
+ 0.27487725019454956,
+ -0.12319609522819519,
+ 0.3818513751029968,
+ 0.324458509683609,
+ 0.387157678604126,
+ 0.10544896125793457,
+ -0.02974534034729004,
+ -0.3353191614151001,
+ 0.19726154208183289,
+ 0.0913134515285492,
+ 0.46471938490867615,
+ 0.07426053285598755,
+ -0.08669886738061905,
+ -0.3904076814651489,
+ -0.1239994615316391,
+ -0.33790838718414307,
+ 0.1559935212135315,
+ 0.05380524322390556,
+ -0.33661940693855286,
+ 0.33922529220581055,
+ 0.40412306785583496,
+ 0.051932334899902344,
+ 0.3486611247062683,
+ 0.23050753772258759,
+ -0.4467979669570923,
+ -0.33739805221557617,
+ 0.21322083473205566,
+ 0.09660795331001282,
+ -0.5020313262939453,
+ -0.08237648010253906,
+ 0.09262649714946747,
+ 0.4540255069732666,
+ 0.1566314697265625,
+ 0.16730853915214539,
+ 0.062495142221450806,
+ 0.21896612644195557,
+ -0.20134785771369934,
+ -0.01616138219833374,
+ -0.23352524638175964,
+ 0.17499804496765137,
+ -0.027712106704711914,
+ 0.32775402069091797,
+ 0.20417794585227966,
+ -0.3353848457336426,
+ 0.1596900224685669,
+ 0.09668374061584473,
+ -0.3338700532913208,
+ 0.47094976902008057,
+ 0.3307930529117584,
+ 0.11338654160499573,
+ 0.33268365263938904,
+ 0.15197035670280457,
+ 0.3370092213153839,
+ 0.16769912838935852,
+ -0.06924331188201904,
+ -0.09241403639316559,
+ 0.004242420196533203,
+ -0.466265469789505,
+ 0.442707896232605,
+ -0.3154517412185669,
+ 0.07065403461456299,
+ -0.053966447710990906,
+ 0.2658407688140869,
+ 0.16914686560630798,
+ -0.2624417245388031,
+ 0.19340559840202332,
+ 0.37024563550949097,
+ -0.4130870997905731,
+ -0.281150758266449,
+ 0.13747769594192505,
+ -0.3353043794631958,
+ 0.15015804767608643,
+ 0.4254654049873352,
+ -0.4037356376647949,
+ -0.33765310049057007,
+ 0.002723097801208496,
+ 0.07100057601928711,
+ -0.4293134808540344,
+ -0.22748678922653198,
+ -0.21610990166664124,
+ -0.4420683979988098,
+ -0.3159044086933136,
+ 0.4288279116153717,
+ 0.340526819229126,
+ -0.3761449158191681,
+ 0.21137875318527222,
+ -0.3353425860404968,
+ 0.376606822013855,
+ 0.09715193510055542,
+ 0.08675703406333923,
+ 0.36736658215522766,
+ -0.18659231066703796,
+ -0.05509716272354126,
+ 0.23580724000930786,
+ -0.4895421862602234,
+ -0.14968593418598175,
+ 0.15408088266849518,
+ -0.3257509469985962,
+ 0.23879238963127136,
+ -0.00033587217330932617,
+ -0.41551095247268677,
+ 0.029038041830062866,
+ 0.11751794815063477,
+ -0.20768782496452332,
+ 0.40626877546310425,
+ -0.3299407362937927,
+ 0.4571683704853058,
+ -0.1319655179977417,
+ 0.3633628487586975,
+ 0.23452597856521606,
+ -0.2112831175327301,
+ -0.02027350664138794,
+ -0.1616370975971222,
+ 0.1564575731754303,
+ 0.4350331425666809,
+ -0.12748652696609497,
+ -0.34142446517944336,
+ 0.2389836311340332,
+ 0.20948702096939087,
+ -0.1847553700208664,
+ -0.4318949282169342,
+ -0.33632320165634155,
+ -0.3274495303630829,
+ -0.34321165084838867,
+ -0.4316193759441376,
+ -0.33544325828552246,
+ 0.45435184240341187,
+ -0.24866151809692383,
+ -0.36045801639556885,
+ 0.4158838391304016,
+ 0.3090165853500366,
+ -0.017362654209136963,
+ -0.027614295482635498,
+ -0.3339015245437622,
+ -0.3259769380092621,
+ 0.10755322873592377,
+ 0.015101909637451172,
+ -0.334634006023407,
+ 0.16226039826869965,
+ -0.4344463348388672,
+ -0.18766117095947266,
+ 0.16994518041610718,
+ 0.1002008318901062,
+ -0.33600854873657227,
+ 0.4496111571788788,
+ -0.5195885896682739,
+ 0.16098350286483765,
+ 0.02449759654700756,
+ -0.2952740788459778,
+ 0.0884857177734375,
+ -0.339111864566803,
+ -0.04365503787994385,
+ 0.40018904209136963,
+ 0.05821758508682251,
+ -0.33569735288619995,
+ 0.22432191669940948,
+ -0.5017393231391907,
+ -0.21854567527770996,
+ 0.2810697853565216,
+ 0.17905795574188232,
+ -0.23489990830421448,
+ 0.05627039074897766,
+ -0.24950483441352844,
+ -0.4630242586135864,
+ 0.25003403425216675,
+ -0.14362692832946777,
+ 0.43805134296417236,
+ -0.10976948589086533,
+ 0.38892117142677307,
+ -0.15858876705169678,
+ -0.13523125648498535,
+ -0.14170819520950317,
+ -0.223596453666687,
+ 0.33482593297958374,
+ -0.46025440096855164,
+ 0.16928470134735107,
+ 0.06177341938018799,
+ -0.21224409341812134,
+ 0.1704338788986206,
+ -0.3832751214504242,
+ -0.04879465699195862,
+ -0.3324766159057617,
+ -0.022358417510986328,
+ 0.11937069892883301,
+ 0.36083149909973145,
+ -0.18462145328521729,
+ -0.14779795706272125,
+ 0.116997130215168,
+ 0.4011043310165405,
+ 0.29595693945884705,
+ 0.29508447647094727,
+ -0.007248841226100922,
+ 0.34358713030815125,
+ -0.060922980308532715,
+ -0.32000285387039185,
+ -0.33399897813796997,
+ -0.33765465021133423,
+ -0.24696698784828186,
+ 0.08918371051549911,
+ -0.3372378349304199,
+ 0.11087031662464142,
+ 0.3106895685195923,
+ -0.26046431064605713,
+ 0.2863503694534302,
+ -0.2902723252773285,
+ -0.3321235179901123,
+ 0.2765214145183563,
+ -0.02102118730545044,
+ 0.16077660024166107,
+ -0.2140217125415802,
+ -0.3219490647315979,
+ -0.021111011505126953,
+ -0.09984079003334045,
+ 0.21780598163604736,
+ 0.4458966851234436,
+ 0.4074108302593231,
+ -0.12049403041601181,
+ -0.013559818267822266,
+ 0.3628241717815399,
+ 0.1044444888830185,
+ -0.4321916997432709,
+ 0.0467056930065155,
+ 0.20720717310905457,
+ 0.36276036500930786,
+ 0.012961745262145996,
+ -0.030358709394931793,
+ 0.054972440004348755,
+ -0.30272746086120605,
+ 0.017169296741485596,
+ 0.14715847373008728,
+ -0.23215815424919128,
+ -0.33051586151123047,
+ 0.15477323532104492,
+ -0.08516892790794373,
+ -0.007747083902359009,
+ -0.33832740783691406,
+ 0.2138698399066925,
+ -0.3346588611602783,
+ 0.10366696119308472,
+ -0.024503499269485474,
+ -0.341982901096344,
+ -0.012805994600057602,
+ -0.43242278695106506,
+ -0.16804733872413635,
+ 0.18381047248840332,
+ -0.43881621956825256,
+ 0.26066672801971436,
+ 0.19257444143295288,
+ 0.032290369272232056,
+ -0.3066229820251465,
+ -0.39244434237480164,
+ 0.031553447246551514,
+ -0.004474610090255737,
+ 0.06528803706169128,
+ -0.10299378633499146,
+ -0.2716073989868164,
+ -0.06551423668861389,
+ 0.3182224631309509,
+ 0.03261154890060425,
+ -0.021399110555648804,
+ -0.3371772766113281,
+ -0.20891475677490234,
+ -0.24476540088653564,
+ -0.23157989978790283,
+ -0.2615877389907837,
+ -0.11854222416877747,
+ -0.25115537643432617,
+ 0.08734723180532455,
+ -0.010122716426849365,
+ -0.34392184019088745,
+ -0.33604755997657776,
+ -0.010579314082860947,
+ -0.35555022954940796,
+ -0.2960790693759918,
+ -0.328646719455719,
+ -0.1576898694038391,
+ -0.029240518808364868,
+ 0.38971906900405884,
+ 0.16667068004608154,
+ 0.32943522930145264,
+ 0.45362192392349243,
+ -0.3326682448387146,
+ -0.4094863533973694,
+ 0.15480539202690125,
+ 0.18402591347694397,
+ 0.28392937779426575,
+ -0.22839701175689697,
+ -0.06295907497406006,
+ -0.33602768182754517,
+ 0.16457536816596985,
+ 0.33443546295166016,
+ -0.0520588755607605,
+ 0.15410903096199036,
+ -0.5065442323684692,
+ 0.23142951726913452,
+ 0.2014528512954712,
+ -0.2825413942337036,
+ -0.42264068126678467,
+ 0.4044364094734192,
+ 0.1353214681148529,
+ -0.5099636316299438,
+ 0.24248889088630676,
+ -0.0134955495595932,
+ 0.366659551858902,
+ 0.12927238643169403,
+ -0.21195918321609497,
+ -0.33829885721206665,
+ -0.08649298548698425,
+ -0.5120539665222168,
+ -0.1881406307220459,
+ -0.13745427131652832,
+ -0.33875855803489685,
+ 0.14226418733596802,
+ 0.11857181787490845,
+ 0.46204522252082825,
+ -0.5207183361053467,
+ -0.13379615545272827,
+ 0.26797962188720703,
+ 0.45738619565963745,
+ 0.4423598349094391,
+ -0.12802249193191528,
+ -0.04982990026473999,
+ -0.3700205385684967,
+ 0.11076240241527557,
+ 0.37778255343437195,
+ 0.362931489944458,
+ 0.1948055624961853,
+ -0.3225938081741333,
+ -0.11971394717693329,
+ -0.15361273288726807,
+ 0.07398855686187744,
+ 0.1365160048007965,
+ 0.032523393630981445,
+ -0.16264665126800537,
+ -0.3341656029224396,
+ -0.09968288242816925,
+ -0.09360611438751221,
+ -0.31488725543022156,
+ 0.36809423565864563,
+ -0.06451573967933655,
+ -0.1088901162147522,
+ -0.14812514185905457,
+ -0.5009490251541138,
+ 0.1797751486301422,
+ -0.4972216784954071,
+ -0.060223162174224854,
+ -0.21243411302566528,
+ -0.3544568419456482,
+ 0.41839954257011414,
+ 0.4089481830596924,
+ -0.10215912759304047,
+ 0.127888485789299,
+ 0.00780743733048439,
+ 0.45835527777671814,
+ -0.008453760296106339,
+ 0.023149892687797546,
+ -0.1433763951063156,
+ -0.11031129956245422,
+ 0.09250877052545547,
+ -0.3383254110813141,
+ -0.0056329891085624695,
+ 0.3476719260215759,
+ 0.07294926047325134,
+ -0.11005374789237976,
+ -0.31725478172302246,
+ -0.2831471860408783,
+ 0.1998184472322464,
+ -0.33481651544570923,
+ -0.1311715543270111,
+ 0.04921102523803711,
+ 0.14806295931339264,
+ -0.33668941259384155,
+ -0.0070876628160476685,
+ 0.058036208152770996,
+ -0.08757966756820679,
+ 0.35662996768951416,
+ -0.062392860651016235,
+ 0.200604647397995,
+ 0.31254303455352783,
+ 0.16073083877563477,
+ 0.17656296491622925,
+ -0.14633850753307343,
+ 0.3803900480270386,
+ -0.1041228175163269,
+ 0.03270379453897476,
+ -0.1892974078655243,
+ 0.04734966903924942,
+ 0.43073007464408875,
+ 0.21341991424560547,
+ 0.23921528458595276,
+ 0.312565416097641,
+ 0.46891334652900696,
+ -0.3359676003456116,
+ -0.06645423173904419,
+ -0.30175983905792236,
+ -0.07939338684082031,
+ -0.1514812707901001,
+ 0.40479016304016113,
+ -0.13444525003433228,
+ 0.18192970752716064,
+ 0.007142303511500359,
+ 0.4206872582435608,
+ 0.2169797420501709,
+ 0.1633855104446411,
+ 0.05082114040851593,
+ -0.3223004937171936,
+ 0.09517526626586914,
+ -0.1018141657114029,
+ -0.2055278718471527,
+ -0.34120163321495056,
+ -0.3985735774040222,
+ 0.11429804563522339,
+ -0.1932324320077896,
+ 0.06776732206344604,
+ -0.15384018421173096,
+ -0.17672061920166016,
+ -0.33798566460609436,
+ 0.34955865144729614,
+ -0.17598950862884521,
+ 0.1322946548461914,
+ 0.1256154179573059,
+ -0.5195267200469971,
+ 0.0880422592163086,
+ -0.42913007736206055,
+ 0.006183266639709473,
+ 0.09602649509906769,
+ -0.22773796319961548,
+ 0.4457106292247772,
+ 0.35967767238616943,
+ 0.36024776101112366,
+ -0.042826078832149506,
+ -0.2522745132446289,
+ -0.4629005491733551,
+ 0.10449595749378204,
+ 0.46906745433807373,
+ -0.482692152261734,
+ -0.17976750433444977,
+ 0.389509379863739,
+ -0.4034680128097534,
+ 0.10387665033340454,
+ -0.3059883117675781,
+ -0.3043466806411743,
+ 0.028455525636672974,
+ -0.4912364184856415,
+ 0.24675869941711426,
+ -0.12069568037986755,
+ 0.0891830250620842,
+ -0.31951403617858887,
+ -0.33509159088134766,
+ -0.11557188630104065,
+ 0.17392733693122864,
+ -0.340006947517395,
+ -0.48602521419525146,
+ -0.4053676426410675,
+ 0.39808040857315063,
+ -0.14202886819839478,
+ -0.3403811752796173,
+ -0.3816123306751251,
+ -0.25670984387397766,
+ 0.32265982031822205,
+ -0.3369980752468109,
+ 0.20625221729278564,
+ -0.3272339105606079,
+ -0.3694099485874176,
+ 0.07067936658859253,
+ -0.3677298426628113,
+ 0.10821941494941711,
+ 0.42213961482048035,
+ 0.2851313352584839,
+ 0.1587890386581421,
+ -0.03879985213279724,
+ -0.33403897285461426,
+ -0.32884931564331055,
+ -0.32006436586380005,
+ -0.1721951961517334,
+ -0.33862757682800293,
+ -0.06708675622940063,
+ -0.17036178708076477,
+ 0.35746362805366516,
+ 0.2537623643875122,
+ 0.4645186960697174,
+ 0.1666238158941269,
+ -0.3531668484210968,
+ -0.34218692779541016,
+ -0.4337998032569885,
+ 0.003012537956237793,
+ -0.025628626346588135,
+ 0.301039457321167,
+ 0.33434581756591797,
+ -0.3342609703540802,
+ -0.3089563250541687,
+ 0.15754029154777527,
+ -0.13590601086616516,
+ 0.31979817152023315,
+ -0.25426825881004333,
+ -0.27174752950668335,
+ -0.33523499965667725,
+ 0.4425089955329895,
+ -0.03640618920326233,
+ 0.24596303701400757,
+ -0.3109985888004303,
+ 0.3431084454059601,
+ 0.4477561116218567,
+ -0.11904054880142212,
+ -0.37500500679016113,
+ 0.14770588278770447,
+ 0.19922220706939697,
+ 0.44933420419692993,
+ 0.12754464149475098,
+ -0.24160891771316528,
+ -0.4572523832321167,
+ 0.35539764165878296,
+ 0.15371491014957428,
+ 0.36514008045196533,
+ 0.45099082589149475,
+ -0.5218851566314697,
+ -0.33356773853302,
+ -0.17193523049354553,
+ 0.16206097602844238,
+ 0.27128031849861145,
+ 0.21185794472694397,
+ -0.013937115669250488,
+ -0.12781912088394165,
+ -0.124148428440094,
+ 0.31619125604629517,
+ 0.231744647026062,
+ 0.0071465373039245605,
+ 0.16534852981567383,
+ 0.30686646699905396,
+ 0.052257001399993896,
+ -0.25680986046791077,
+ 0.07493680715560913,
+ 0.3786514103412628,
+ 0.16379660367965698,
+ 0.36947643756866455,
+ 0.1296583116054535,
+ -0.029536783695220947,
+ 0.305223673582077,
+ 0.15516269207000732,
+ -0.2038557231426239,
+ 0.08360493183135986,
+ 0.11420285701751709,
+ 0.304957777261734,
+ 0.13833287358283997,
+ 0.09537959098815918,
+ 0.11759687960147858,
+ -0.32115960121154785,
+ -0.0038881003856658936,
+ -0.13471347093582153,
+ 0.4295108914375305,
+ -0.17739517986774445,
+ 0.16751164197921753,
+ 0.10882800817489624,
+ 0.26051414012908936,
+ -0.22610527276992798,
+ -0.369110107421875,
+ -0.14402125775814056,
+ 0.4295627772808075,
+ 0.1125415563583374,
+ -0.3334263265132904,
+ 0.44114041328430176,
+ 0.3392450511455536,
+ -0.13320139050483704,
+ 0.44747602939605713,
+ 0.3831402659416199,
+ 0.17925751209259033,
+ -0.04197511076927185,
+ -0.13866615295410156,
+ -0.14214067161083221,
+ 0.16100531816482544,
+ 0.15838102996349335,
+ 0.20908769965171814,
+ 0.1689295768737793,
+ 0.467754602432251,
+ -0.09248849004507065,
+ -0.17940858006477356,
+ -0.1770760416984558,
+ 0.07546621561050415,
+ 0.10990281403064728,
+ 0.06272825598716736,
+ 0.3563665747642517,
+ 0.24894839525222778,
+ -0.4665837585926056,
+ 0.001244872808456421,
+ -0.0862872302532196,
+ 0.14696794748306274,
+ 0.2662590742111206,
+ 0.329809308052063,
+ -0.048330195248126984,
+ -0.2590000629425049,
+ -0.20790573954582214,
+ -0.023168504238128662,
+ -0.3376893401145935,
+ -0.2272869050502777,
+ -0.34215235710144043,
+ -0.10140586644411087,
+ 0.4209848642349243,
+ -0.37600046396255493,
+ 0.09535747021436691,
+ 0.4237228035926819,
+ -0.25091785192489624,
+ 0.030370354652404785,
+ 0.29851314425468445,
+ -0.24589312076568604,
+ 0.264087051153183,
+ -0.32833582162857056,
+ 0.35041528940200806,
+ -0.1856217384338379,
+ -0.396714448928833,
+ 0.25174272060394287,
+ -0.4162677228450775,
+ -0.012987732887268066,
+ -0.479449599981308,
+ -0.10272729396820068,
+ -0.13112157583236694,
+ 0.12521398067474365,
+ 0.2318505495786667,
+ -0.03950950875878334,
+ -0.338132381439209,
+ -0.34148478507995605,
+ -0.30461999773979187,
+ -0.38821154832839966,
+ -0.3366057276725769,
+ 0.04917418956756592,
+ 0.27871060371398926,
+ 0.3442160487174988,
+ 0.38440564274787903,
+ -0.27512043714523315,
+ -0.48812195658683777,
+ 0.3854963779449463,
+ -0.4134785234928131,
+ 0.31074434518814087,
+ 0.470551997423172,
+ -0.3932172656059265,
+ 0.16824784874916077,
+ 0.17886584997177124,
+ -0.06673240661621094,
+ -0.32213693857192993,
+ -0.11779916286468506,
+ 0.40992456674575806,
+ 0.4293694496154785,
+ -0.44708651304244995,
+ 0.4120062589645386,
+ -0.07725591212511063,
+ 0.014482498168945312,
+ -0.3397091031074524,
+ -0.4471060037612915,
+ 0.24965685606002808,
+ -0.2040911316871643,
+ 0.2340318262577057,
+ -0.5124659538269043,
+ 0.15649843215942383,
+ -0.09650948643684387,
+ 0.3347800076007843,
+ -0.43933773040771484,
+ 0.2802203297615051,
+ -0.09008044004440308,
+ -0.13696475327014923,
+ 0.1452733874320984,
+ -0.3384348750114441,
+ -0.32807451486587524,
+ -0.1687568724155426,
+ -0.22246906161308289,
+ -0.333901047706604,
+ 0.06458482146263123,
+ 0.40644875168800354,
+ -0.12716779112815857,
+ -0.35779860615730286,
+ 0.17120134830474854,
+ -0.3372630774974823,
+ 0.3126993477344513,
+ 0.2755977511405945,
+ -0.2621510624885559,
+ 0.25701582431793213,
+ -0.10167013108730316,
+ -0.11863154172897339,
+ -0.06499257683753967,
+ 0.03843206167221069,
+ 0.17273840308189392,
+ 0.13227003812789917,
+ -0.40477311611175537,
+ -0.5142897367477417,
+ 0.17364473640918732,
+ 0.21154987812042236,
+ -0.11356884241104126,
+ 0.04134470224380493,
+ 0.08452695608139038,
+ -0.45356154441833496,
+ 0.023896770551800728,
+ -0.0764215886592865,
+ -0.057656288146972656,
+ -0.338910847902298,
+ 0.12584036588668823,
+ 0.2078620046377182,
+ 0.13333487510681152,
+ 0.3804778456687927,
+ -0.17357927560806274,
+ -0.33699336647987366,
+ 0.11419335752725601,
+ 0.4478580951690674,
+ -0.17802631855010986,
+ -0.33526933193206787,
+ 0.14035552740097046,
+ -0.14587371051311493,
+ -0.2831358313560486,
+ -0.23793628811836243,
+ 0.28294289112091064,
+ -0.002055227756500244,
+ -0.3383066654205322,
+ -0.03800179064273834,
+ -0.19326859712600708,
+ -0.16040343046188354,
+ 0.18570531904697418,
+ -0.34139400720596313,
+ 0.30606189370155334,
+ -0.41889962553977966,
+ 0.288677453994751,
+ -0.0814431831240654,
+ 0.06939385831356049,
+ 0.3791213035583496,
+ 0.28477245569229126,
+ 0.3038962185382843,
+ -0.05176103115081787,
+ -0.15491273999214172,
+ 0.24298283457756042,
+ -0.2125440537929535,
+ 0.17203158140182495,
+ 0.21889257431030273,
+ 0.001078099012374878,
+ 0.15609291195869446,
+ 0.17593598365783691,
+ -0.021233558654785156,
+ -0.13513296842575073,
+ -0.474109411239624,
+ -0.12245751917362213,
+ -0.34215596318244934,
+ 0.12613707780838013,
+ 0.09000787138938904,
+ -0.038802534341812134,
+ -0.03612785041332245,
+ -0.33692166209220886,
+ -0.06622663140296936,
+ -0.29922255873680115,
+ 0.2711479663848877,
+ -0.1813274621963501,
+ -0.00458836555480957,
+ 0.45617735385894775,
+ -0.17452025413513184,
+ 0.11208954453468323,
+ 0.23160679638385773,
+ -0.25122809410095215,
+ 0.3942631483078003,
+ -0.15657469630241394,
+ 0.23379409313201904,
+ 0.22897018492221832,
+ -0.44816821813583374,
+ -0.3376668691635132,
+ -0.3316112160682678,
+ 0.04310399293899536,
+ -0.14276017248630524,
+ 0.006869865581393242,
+ 0.029129326343536377,
+ 0.3854384422302246,
+ -0.09321367740631104,
+ 0.32010042667388916,
+ 0.063845694065094,
+ -0.20732372999191284,
+ -0.1625206172466278,
+ 0.2092686891555786,
+ -0.17580156028270721,
+ -0.22725915908813477,
+ -0.18433818221092224,
+ 0.0008265972137451172,
+ -0.3278651833534241,
+ -0.10285896062850952,
+ -0.2803758382797241,
+ 0.28984570503234863,
+ -0.33834201097488403,
+ 0.011197984218597412,
+ -0.3395370841026306,
+ 0.1078307032585144,
+ 0.31590113043785095,
+ -0.16242526471614838,
+ -0.09228020906448364,
+ 0.2892126739025116,
+ -0.3536338806152344,
+ 0.22008328139781952,
+ 0.45721226930618286,
+ -0.25723040103912354,
+ 0.4292304217815399,
+ 0.38061487674713135,
+ -0.33914488554000854,
+ 0.12019604444503784,
+ 0.24210673570632935,
+ -0.4953470528125763,
+ 0.3723961412906647,
+ -0.07324439287185669,
+ -0.3335632085800171,
+ 0.4609675109386444,
+ -0.34060022234916687,
+ -0.12353535741567612,
+ 0.2781705856323242,
+ -0.1793612390756607,
+ 0.3383496403694153,
+ -0.11968731880187988,
+ -0.33727169036865234,
+ 0.4128042161464691,
+ -0.25775638222694397,
+ 0.35281598567962646,
+ -0.26832377910614014,
+ -0.40658873319625854,
+ -0.2994612157344818,
+ 0.21411031484603882,
+ 0.1773611307144165,
+ 0.00017818808555603027,
+ 0.20902806520462036,
+ -0.17633673548698425,
+ 0.29911938309669495,
+ -0.5029623508453369,
+ 0.28503525257110596,
+ 0.04142221063375473,
+ 0.40842559933662415,
+ -0.07774895429611206,
+ -0.06995737552642822,
+ 0.11760517954826355,
+ -0.302129864692688,
+ -0.18236958980560303,
+ -0.3325130343437195,
+ 0.31878578662872314,
+ -0.15263152122497559,
+ -0.32466232776641846,
+ 0.42815226316452026,
+ -0.22447744011878967,
+ 0.1278771162033081,
+ 0.35301536321640015,
+ 0.21224568784236908,
+ -0.15932847559452057,
+ 0.23381520807743073,
+ -0.10229003429412842,
+ -0.28491055965423584,
+ 0.2897034287452698,
+ 0.11859656870365143,
+ 0.21612617373466492,
+ 0.28593710064888,
+ 0.3515225648880005,
+ 0.041662126779556274,
+ -0.047592103481292725,
+ -0.09901243448257446,
+ -0.040844082832336426,
+ 0.11627089977264404,
+ 0.4088061451911926,
+ 0.4278428852558136,
+ -0.1437200903892517,
+ -0.395980566740036,
+ -0.4477449953556061,
+ 0.4271019399166107,
+ 0.13353894650936127,
+ 0.24339768290519714,
+ -0.2152409851551056,
+ 0.2160765826702118,
+ -0.32818710803985596,
+ -0.12971024215221405,
+ 0.00008381903171539307,
+ -0.05658207833766937,
+ -0.33480823040008545,
+ -0.0830659568309784,
+ 0.24838250875473022,
+ -0.04591947793960571,
+ -0.2607797682285309,
+ -0.3133693337440491,
+ -0.24427764117717743,
+ 0.18836767971515656,
+ -0.3343186378479004,
+ 0.34806501865386963,
+ -0.4212576150894165,
+ 0.2127130925655365,
+ -0.39983034133911133,
+ -0.3877202868461609,
+ 0.40554261207580566,
+ 0.10910852253437042,
+ -0.08088816702365875,
+ 0.004100292921066284,
+ -0.4545513391494751,
+ -0.32045626640319824,
+ -0.10027539730072021,
+ -0.28161877393722534,
+ -0.07293063402175903,
+ 0.41920220851898193,
+ -0.2586366832256317,
+ -0.32735204696655273,
+ -0.33876752853393555,
+ -0.29414719343185425,
+ -0.06381160020828247,
+ 0.4584757685661316,
+ 0.15917348861694336,
+ -0.08559286594390869,
+ 0.4003840386867523,
+ 0.2535674571990967,
+ -0.07812067866325378,
+ -0.33982396125793457,
+ -0.17430603504180908,
+ 0.11094105243682861,
+ 0.054770857095718384,
+ 0.032886020839214325,
+ 0.20049524307250977,
+ 0.23509901762008667,
+ 0.0047318339347839355,
+ 0.2814335823059082,
+ 0.13399088382720947,
+ -0.3387734591960907,
+ 0.05714884400367737,
+ 0.31226611137390137,
+ 0.1134905219078064,
+ -0.229850634932518,
+ -0.2662825584411621,
+ -0.09099769592285156,
+ -0.09828609228134155,
+ 0.394187867641449,
+ 0.1606040596961975,
+ -0.4913911521434784,
+ -0.33994680643081665
+ ]
+ }
+ ],
+ "layout": {
+ "height": 512,
+ "paper_bgcolor": "rgba(0,0,0,0)",
+ "scene": {
+ "xaxis": {
+ "visible": false
+ },
+ "yaxis": {
+ "visible": false
+ },
+ "zaxis": {
+ "visible": false
+ }
+ },
+ "template": {
+ "data": {
+ "bar": [
+ {
+ "error_x": {
+ "color": "#2a3f5f"
+ },
+ "error_y": {
+ "color": "#2a3f5f"
+ },
+ "marker": {
+ "line": {
+ "color": "#E5ECF6",
+ "width": 0.5
+ },
+ "pattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ }
+ },
+ "type": "bar"
+ }
+ ],
+ "barpolar": [
+ {
+ "marker": {
+ "line": {
+ "color": "#E5ECF6",
+ "width": 0.5
+ },
+ "pattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ }
+ },
+ "type": "barpolar"
+ }
+ ],
+ "carpet": [
+ {
+ "aaxis": {
+ "endlinecolor": "#2a3f5f",
+ "gridcolor": "white",
+ "linecolor": "white",
+ "minorgridcolor": "white",
+ "startlinecolor": "#2a3f5f"
+ },
+ "baxis": {
+ "endlinecolor": "#2a3f5f",
+ "gridcolor": "white",
+ "linecolor": "white",
+ "minorgridcolor": "white",
+ "startlinecolor": "#2a3f5f"
+ },
+ "type": "carpet"
+ }
+ ],
+ "choropleth": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "type": "choropleth"
+ }
+ ],
+ "contour": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "contour"
+ }
+ ],
+ "contourcarpet": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "type": "contourcarpet"
+ }
+ ],
+ "heatmap": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "heatmap"
+ }
+ ],
+ "heatmapgl": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "heatmapgl"
+ }
+ ],
+ "histogram": [
+ {
+ "marker": {
+ "pattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ }
+ },
+ "type": "histogram"
+ }
+ ],
+ "histogram2d": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "histogram2d"
+ }
+ ],
+ "histogram2dcontour": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "histogram2dcontour"
+ }
+ ],
+ "mesh3d": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "type": "mesh3d"
+ }
+ ],
+ "parcoords": [
+ {
+ "line": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "parcoords"
+ }
+ ],
+ "pie": [
+ {
+ "automargin": true,
+ "type": "pie"
+ }
+ ],
+ "scatter": [
+ {
+ "fillpattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ },
+ "type": "scatter"
+ }
+ ],
+ "scatter3d": [
+ {
+ "line": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatter3d"
+ }
+ ],
+ "scattercarpet": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattercarpet"
+ }
+ ],
+ "scattergeo": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattergeo"
+ }
+ ],
+ "scattergl": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattergl"
+ }
+ ],
+ "scattermapbox": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattermapbox"
+ }
+ ],
+ "scatterpolar": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatterpolar"
+ }
+ ],
+ "scatterpolargl": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatterpolargl"
+ }
+ ],
+ "scatterternary": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatterternary"
+ }
+ ],
+ "surface": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "surface"
+ }
+ ],
+ "table": [
+ {
+ "cells": {
+ "fill": {
+ "color": "#EBF0F8"
+ },
+ "line": {
+ "color": "white"
+ }
+ },
+ "header": {
+ "fill": {
+ "color": "#C8D4E3"
+ },
+ "line": {
+ "color": "white"
+ }
+ },
+ "type": "table"
+ }
+ ]
+ },
+ "layout": {
+ "annotationdefaults": {
+ "arrowcolor": "#2a3f5f",
+ "arrowhead": 0,
+ "arrowwidth": 1
+ },
+ "autotypenumbers": "strict",
+ "coloraxis": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "colorscale": {
+ "diverging": [
+ [
+ 0,
+ "#8e0152"
+ ],
+ [
+ 0.1,
+ "#c51b7d"
+ ],
+ [
+ 0.2,
+ "#de77ae"
+ ],
+ [
+ 0.3,
+ "#f1b6da"
+ ],
+ [
+ 0.4,
+ "#fde0ef"
+ ],
+ [
+ 0.5,
+ "#f7f7f7"
+ ],
+ [
+ 0.6,
+ "#e6f5d0"
+ ],
+ [
+ 0.7,
+ "#b8e186"
+ ],
+ [
+ 0.8,
+ "#7fbc41"
+ ],
+ [
+ 0.9,
+ "#4d9221"
+ ],
+ [
+ 1,
+ "#276419"
+ ]
+ ],
+ "sequential": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "sequentialminus": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ]
+ },
+ "colorway": [
+ "#636efa",
+ "#EF553B",
+ "#00cc96",
+ "#ab63fa",
+ "#FFA15A",
+ "#19d3f3",
+ "#FF6692",
+ "#B6E880",
+ "#FF97FF",
+ "#FECB52"
+ ],
+ "font": {
+ "color": "#2a3f5f"
+ },
+ "geo": {
+ "bgcolor": "white",
+ "lakecolor": "white",
+ "landcolor": "#E5ECF6",
+ "showlakes": true,
+ "showland": true,
+ "subunitcolor": "white"
+ },
+ "hoverlabel": {
+ "align": "left"
+ },
+ "hovermode": "closest",
+ "mapbox": {
+ "style": "light"
+ },
+ "paper_bgcolor": "white",
+ "plot_bgcolor": "#E5ECF6",
+ "polar": {
+ "angularaxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ },
+ "bgcolor": "#E5ECF6",
+ "radialaxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ }
+ },
+ "scene": {
+ "xaxis": {
+ "backgroundcolor": "#E5ECF6",
+ "gridcolor": "white",
+ "gridwidth": 2,
+ "linecolor": "white",
+ "showbackground": true,
+ "ticks": "",
+ "zerolinecolor": "white"
+ },
+ "yaxis": {
+ "backgroundcolor": "#E5ECF6",
+ "gridcolor": "white",
+ "gridwidth": 2,
+ "linecolor": "white",
+ "showbackground": true,
+ "ticks": "",
+ "zerolinecolor": "white"
+ },
+ "zaxis": {
+ "backgroundcolor": "#E5ECF6",
+ "gridcolor": "white",
+ "gridwidth": 2,
+ "linecolor": "white",
+ "showbackground": true,
+ "ticks": "",
+ "zerolinecolor": "white"
+ }
+ },
+ "shapedefaults": {
+ "line": {
+ "color": "#2a3f5f"
+ }
+ },
+ "ternary": {
+ "aaxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ },
+ "baxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ },
+ "bgcolor": "#E5ECF6",
+ "caxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ }
+ },
+ "title": {
+ "x": 0.05
+ },
+ "xaxis": {
+ "automargin": true,
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": "",
+ "title": {
+ "standoff": 15
+ },
+ "zerolinecolor": "white",
+ "zerolinewidth": 2
+ },
+ "yaxis": {
+ "automargin": true,
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": "",
+ "title": {
+ "standoff": 15
+ },
+ "zerolinecolor": "white",
+ "zerolinewidth": 2
+ }
+ }
+ },
+ "width": 512
+ }
+ },
+ "text/html": [
+ "
"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# vis_pcd('LGM', instance_name)\n",
+ "# vis_pcd('Lara', instance_name) # wrong\n",
+ "# vis_pcd('ln3diff', instance_name) # \n",
+ "# vis_pcd('CRM', instance_name) # \n",
+ "# vis_pcd('One-2-3-45', instance_name) # \n",
+ "# vis_pcd('splatter-img', instance_name) # \n",
+ "# vis_pcd('shape-e', instance_name) # \n",
+ "# vis_pcd('CRM', instance_name) # \n",
+ "# vis_pcd('OpenLRM', instance_name) # \n",
+ "\n",
+ "vis_pcd('GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD', instance_name) # "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 87,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.plotly.v1+json": {
+ "config": {
+ "plotlyServerURL": "https://plot.ly",
+ "toImageButtonOptions": {
+ "filename": "custom_image",
+ "format": "png",
+ "height": 512,
+ "scale": 4,
+ "width": 512
+ }
+ },
+ "data": [
+ {
+ "marker": {
+ "color": [
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 0,
+ 0
+ ]
+ ],
+ "size": 1
+ },
+ "mode": "markers",
+ "type": "scatter3d",
+ "x": [
+ 0.2833046317100525,
+ -0.36672449111938477,
+ 0.1618460714817047,
+ -0.2850576639175415,
+ 0.1370265781879425,
+ -0.07545945048332214,
+ 0.02693888545036316,
+ -0.23691348731517792,
+ 0.01939380168914795,
+ 0.39341574907302856,
+ -0.05054304003715515,
+ -0.32072752714157104,
+ -0.3001289963722229,
+ 0.051969170570373535,
+ 0.20146174728870392,
+ 0.23606908321380615,
+ 0.29867780208587646,
+ -0.12470825016498566,
+ -0.13422830402851105,
+ -0.029136240482330322,
+ 0.25340908765792847,
+ 0.17603954672813416,
+ -0.18720602989196777,
+ -0.05373565852642059,
+ -0.04732781648635864,
+ 0.2610872983932495,
+ -0.19595006108283997,
+ -0.24938496947288513,
+ 0.3901260793209076,
+ 0.052450984716415405,
+ -0.22918915748596191,
+ 0.16674917936325073,
+ -0.01940542459487915,
+ -0.33273768424987793,
+ 0.11679509282112122,
+ 0.1322093904018402,
+ -0.1289118528366089,
+ -0.298168420791626,
+ -0.29005372524261475,
+ 0.09052774310112,
+ -0.09262222051620483,
+ 0.0030327439308166504,
+ -0.190410777926445,
+ -0.1582946479320526,
+ 0.23073498904705048,
+ 0.0015435684472322464,
+ -0.2158275842666626,
+ 0.10058912634849548,
+ 0.19723963737487793,
+ 0.2805785536766052,
+ 0.2723308205604553,
+ 0.0642516016960144,
+ -0.2888885736465454,
+ 0.055645495653152466,
+ 0.23880678415298462,
+ 0.03395664691925049,
+ 0.29877346754074097,
+ 0.23663899302482605,
+ 0.2504235804080963,
+ -0.16897743940353394,
+ -0.1388799101114273,
+ 0.16481930017471313,
+ -0.1726347804069519,
+ 0.15345942974090576,
+ -0.288867324590683,
+ -0.21848183870315552,
+ 0.16001972556114197,
+ 0.006322726607322693,
+ -0.2341979444026947,
+ -0.0744536966085434,
+ 0.31494462490081787,
+ -0.1476772427558899,
+ 0.036460213363170624,
+ -0.06433901190757751,
+ -0.25553399324417114,
+ 0.10004609823226929,
+ -0.09534162282943726,
+ -0.12825144827365875,
+ -0.08645081520080566,
+ -0.3650730848312378,
+ -0.21925599873065948,
+ 0.38599106669425964,
+ 0.23275944590568542,
+ 0.2923068702220917,
+ 0.06903377175331116,
+ -0.022502973675727844,
+ -0.2910642921924591,
+ 0.18961811065673828,
+ -0.22634226083755493,
+ -0.27755701541900635,
+ -0.010168015956878662,
+ -0.2733649015426636,
+ -0.30204063653945923,
+ -0.3719084858894348,
+ 0.17814040184020996,
+ 0.2852565050125122,
+ 0.1385534405708313,
+ 0.324948787689209,
+ 0.3538321554660797,
+ -0.01405632495880127,
+ -0.012257963418960571,
+ -0.13719730079174042,
+ 0.23401832580566406,
+ -0.12407267093658447,
+ -0.390618234872818,
+ -0.06672132015228271,
+ -0.14873631298542023,
+ 0.08701671659946442,
+ -0.28486862778663635,
+ 0.06595820188522339,
+ -0.03722023963928223,
+ 0.3015994429588318,
+ 0.22524809837341309,
+ 0.2783130407333374,
+ 0.07386444509029388,
+ 0.15759924054145813,
+ -0.15723061561584473,
+ -0.08435430377721786,
+ -0.30132439732551575,
+ 0.11092105507850647,
+ -0.25886332988739014,
+ 0.13469822704792023,
+ 0.028863608837127686,
+ 0.16043925285339355,
+ 0.28194373846054077,
+ -0.24336397647857666,
+ -0.3019561767578125,
+ 0.29410263895988464,
+ -0.19294457137584686,
+ 0.16907203197479248,
+ -0.14023980498313904,
+ 0.25130242109298706,
+ 0.3163894712924957,
+ 0.03066331148147583,
+ -0.004064228385686874,
+ -0.21294084191322327,
+ -0.2813452482223511,
+ -0.22775979340076447,
+ -0.02292724698781967,
+ 0.1713671088218689,
+ -0.10050465911626816,
+ 0.2574520707130432,
+ -0.264451265335083,
+ -0.03285491466522217,
+ 0.10201391577720642,
+ -0.2745312452316284,
+ -0.2218111753463745,
+ -0.1465270221233368,
+ -0.14497500658035278,
+ -0.08646853268146515,
+ -0.17185378074645996,
+ 0.309034526348114,
+ 0.22355711460113525,
+ 0.22959880530834198,
+ -0.27960824966430664,
+ 0.1807226687669754,
+ -0.23471695184707642,
+ -0.04513159394264221,
+ 0.3577505946159363,
+ 0.11079743504524231,
+ -0.19857633113861084,
+ 0.18284495174884796,
+ 0.018298998475074768,
+ -0.12809395790100098,
+ 0.043143391609191895,
+ 0.19966033101081848,
+ -0.2783140540122986,
+ 0.06568378210067749,
+ 0.27087652683258057,
+ 0.30053186416625977,
+ 0.21067461371421814,
+ 0.13997593522071838,
+ -0.012451887130737305,
+ -0.09686630964279175,
+ -0.1639992594718933,
+ 0.11022451519966125,
+ -0.1095547303557396,
+ 0.29106569290161133,
+ 0.25005844235420227,
+ 0.22403860092163086,
+ -0.15007096529006958,
+ -0.008697614073753357,
+ -0.05289727449417114,
+ 0.03958547115325928,
+ 0.09961599111557007,
+ -0.28492721915245056,
+ -0.35908186435699463,
+ -0.04597003385424614,
+ 0.09725642204284668,
+ 0.12502801418304443,
+ 0.07733437418937683,
+ 0.057792723178863525,
+ 0.08491284400224686,
+ 0.25713151693344116,
+ -0.3254369795322418,
+ 0.29539167881011963,
+ -0.38626211881637573,
+ -0.13308992981910706,
+ -0.18556949496269226,
+ -0.09986019134521484,
+ 0.021684885025024414,
+ -0.15073545277118683,
+ 0.39476630091667175,
+ 0.39155471324920654,
+ -0.24326390027999878,
+ 0.2655003070831299,
+ -0.07006990909576416,
+ 0.029102802276611328,
+ 0.07592880725860596,
+ 0.20417307317256927,
+ 0.17461222410202026,
+ 0.28923696279525757,
+ -0.13210010528564453,
+ 0.053261782974004745,
+ 0.06484167277812958,
+ -0.3017244338989258,
+ -0.2954609990119934,
+ -0.23187562823295593,
+ -0.26802390813827515,
+ -0.27967169880867004,
+ 0.12701255083084106,
+ -0.2426731288433075,
+ 0.11689959466457367,
+ -0.04696810245513916,
+ -0.011658728122711182,
+ 0.2957841753959656,
+ 0.0234910249710083,
+ -0.2181185483932495,
+ 0.28315114974975586,
+ -0.3033602833747864,
+ -0.032503336668014526,
+ 0.027934879064559937,
+ 0.21464788913726807,
+ 0.26090818643569946,
+ 0.022442221641540527,
+ -0.0017210841178894043,
+ -0.34803253412246704,
+ -0.008059082552790642,
+ -0.39279693365097046,
+ 0.2799606919288635,
+ -0.3026840090751648,
+ 0.22226080298423767,
+ -0.2972567081451416,
+ -0.2061452865600586,
+ 0.06328405439853668,
+ -0.1895689070224762,
+ 0.19992157816886902,
+ -0.16112923622131348,
+ -0.27097979187965393,
+ 0.19795635342597961,
+ -0.07572650909423828,
+ -0.12193971872329712,
+ -0.0238645076751709,
+ 0.1951875388622284,
+ 0.24432411789894104,
+ -0.18193703889846802,
+ -0.291714608669281,
+ 0.31636008620262146,
+ 0.07472234964370728,
+ -0.06694436073303223,
+ -0.2834596335887909,
+ 0.27909591794013977,
+ -0.21089357137680054,
+ 0.004503369331359863,
+ 0.12560655176639557,
+ -0.0707644522190094,
+ 0.11210798472166061,
+ -0.2834070324897766,
+ -0.26139143109321594,
+ 0.19937436282634735,
+ 0.3784843981266022,
+ -0.25837981700897217,
+ -0.06684401631355286,
+ 0.12393705546855927,
+ 0.3408661484718323,
+ -0.17485670745372772,
+ -0.15084268152713776,
+ -0.19003134965896606,
+ -0.2725774049758911,
+ 0.2999449670314789,
+ -0.2785460352897644,
+ 0.09849868714809418,
+ -0.09032716602087021,
+ 0.001900702714920044,
+ 0.09880828857421875,
+ -0.25218847393989563,
+ 0.006887771189212799,
+ -0.20241636037826538,
+ 0.28567153215408325,
+ 0.25499191880226135,
+ 0.38463640213012695,
+ -0.016497373580932617,
+ 0.012162905186414719,
+ 0.13281768560409546,
+ -0.09404708445072174,
+ 0.15689711272716522,
+ -0.041493482887744904,
+ -0.19223999977111816,
+ -0.09948723018169403,
+ 0.38692983984947205,
+ 0.06099510192871094,
+ 0.17337414622306824,
+ -0.29992955923080444,
+ 0.18604105710983276,
+ -0.17117512226104736,
+ -0.12952011823654175,
+ -0.026062868535518646,
+ -0.11038950085639954,
+ -0.0032088756561279297,
+ -0.3408876955509186,
+ 0.21110424399375916,
+ -0.255500853061676,
+ 0.14421945810317993,
+ -0.24778464436531067,
+ 0.1430111825466156,
+ -0.050096988677978516,
+ 0.10969865322113037,
+ 0.39515259861946106,
+ -0.23943227529525757,
+ 0.008053034543991089,
+ -0.30463486909866333,
+ 0.30218660831451416,
+ 0.04404944181442261,
+ 0.1954728364944458,
+ -0.2950136661529541,
+ -0.09746593236923218,
+ -0.3323410749435425,
+ 0.22144199907779694,
+ -0.06056144833564758,
+ -0.11941714584827423,
+ 0.050164759159088135,
+ -0.3786439895629883,
+ 0.33708590269088745,
+ 0.328581303358078,
+ 0.24038389325141907,
+ 0.011598542332649231,
+ 0.2655397057533264,
+ -0.09808476269245148,
+ 0.14126984775066376,
+ -0.061350613832473755,
+ 0.3652816116809845,
+ -0.14020636677742004,
+ 0.1468021273612976,
+ 0.11540532112121582,
+ 0.07816336303949356,
+ 0.26881909370422363,
+ -0.14079150557518005,
+ 0.251116007566452,
+ 0.19531935453414917,
+ 0.030396729707717896,
+ 0.08287674188613892,
+ -0.07449650764465332,
+ 0.1906057596206665,
+ 0.16873854398727417,
+ -0.2681303024291992,
+ -0.3597450256347656,
+ 0.2645779550075531,
+ 0.28360503911972046,
+ 0.2569754421710968,
+ 0.0899779200553894,
+ -0.17984482645988464,
+ -0.22768768668174744,
+ -0.16850148141384125,
+ 0.13460415601730347,
+ -0.2517350912094116,
+ 0.2804548740386963,
+ 0.16056731343269348,
+ 0.058931171894073486,
+ 0.297980934381485,
+ 0.22265559434890747,
+ 0.23378083109855652,
+ 0.1711840033531189,
+ 0.2895767390727997,
+ -0.07840490341186523,
+ -0.2674593925476074,
+ -0.10867007076740265,
+ -0.39150217175483704,
+ -0.2086951732635498,
+ 0.13306795060634613,
+ -0.26590704917907715,
+ -0.10367242991924286,
+ -0.2303425669670105,
+ -0.2477697730064392,
+ -0.18566325306892395,
+ 0.264069139957428,
+ -0.29568013548851013,
+ 0.11343653500080109,
+ -0.047784507274627686,
+ -0.3408450484275818,
+ -0.25744009017944336,
+ 0.2753536105155945,
+ 0.30902624130249023,
+ 0.21290776133537292,
+ 0.1654375195503235,
+ -0.2866286635398865,
+ 0.27884867787361145,
+ -0.19848963618278503,
+ -0.12667888402938843,
+ -0.18265652656555176,
+ 0.01886962354183197,
+ -0.38829153776168823,
+ 0.0168008953332901,
+ 0.2594578266143799,
+ 0.1989830732345581,
+ 0.2211068570613861,
+ 0.14397352933883667,
+ -0.2602556347846985,
+ -0.032091543078422546,
+ -0.3041420578956604,
+ -0.18419155478477478,
+ -0.09302648901939392,
+ 0.11647044122219086,
+ -0.1762203425168991,
+ 0.22755277156829834,
+ -0.1199386715888977,
+ 0.19022494554519653,
+ 0.2941609025001526,
+ 0.10851338505744934,
+ -0.10303470492362976,
+ -0.19847354292869568,
+ -0.008722953498363495,
+ -0.017949938774108887,
+ -0.004898078739643097,
+ 0.05832952260971069,
+ -0.2505151629447937,
+ -0.18565618991851807,
+ -0.06368449330329895,
+ -0.07679998874664307,
+ -0.2896195650100708,
+ 0.03628510236740112,
+ 0.40412911772727966,
+ 0.04895384609699249,
+ -0.18554337322711945,
+ -0.24574309587478638,
+ 0.2998489737510681,
+ 0.08846642076969147,
+ 0.008697915822267532,
+ 0.18829047679901123,
+ 0.009783387184143066,
+ 0.2876906991004944,
+ -0.2912129759788513,
+ -0.01781684160232544,
+ 0.16640812158584595,
+ 0.0023951977491378784,
+ -0.2788941264152527,
+ -0.0956622064113617,
+ 0.33871978521347046,
+ -0.016859233379364014,
+ 0.2003578543663025,
+ -0.28025174140930176,
+ 0.35110747814178467,
+ 0.07129961252212524,
+ -0.2965732216835022,
+ -0.19032949209213257,
+ 0.11910438537597656,
+ 0.0460820198059082,
+ -0.19793081283569336,
+ 0.2920626401901245,
+ 0.18744558095932007,
+ -0.2284144163131714,
+ -0.0872088074684143,
+ -0.3005048632621765,
+ -0.2229272723197937,
+ -0.3058980703353882,
+ 0.2147301286458969,
+ -0.19660985469818115,
+ 0.34947413206100464,
+ 0.283503919839859,
+ 0.06107068061828613,
+ -0.18666687607765198,
+ -0.08607006072998047,
+ 0.12237593531608582,
+ -0.18301154673099518,
+ 0.2799411416053772,
+ -0.029229164123535156,
+ -0.1249883770942688,
+ 0.29282963275909424,
+ -0.1045953631401062,
+ 0.1201552152633667,
+ -0.2853073477745056,
+ 0.20543885231018066,
+ 0.003494858741760254,
+ 0.2151646614074707,
+ -0.1171482652425766,
+ -0.3319370746612549,
+ -0.10172706842422485,
+ -0.3954010009765625,
+ 0.00932583212852478,
+ 0.27386224269866943,
+ 0.2612026333808899,
+ -0.3150036633014679,
+ 0.250968873500824,
+ 0.2967301607131958,
+ 0.15944045782089233,
+ -0.21251028776168823,
+ 0.05050203204154968,
+ -0.25922733545303345,
+ 0.06798678636550903,
+ 0.08180081844329834,
+ 0.16064846515655518,
+ -0.15133121609687805,
+ -0.029717495664954185,
+ 0.2552754878997803,
+ -0.2900161147117615,
+ -0.2543165683746338,
+ -0.04874265193939209,
+ 0.09533855319023132,
+ 0.28967350721359253,
+ 0.05027282238006592,
+ -0.2708178162574768,
+ -0.0536198616027832,
+ -0.08798038959503174,
+ 0.11408049613237381,
+ 0.10247301310300827,
+ 0.20388194918632507,
+ -0.15786808729171753,
+ -0.2921418845653534,
+ -0.02304919809103012,
+ 0.40189826488494873,
+ -0.13847455382347107,
+ -0.2828460931777954,
+ -0.2928955852985382,
+ 0.2262105941772461,
+ -0.30360788106918335,
+ -0.11610531806945801,
+ -0.19900047779083252,
+ 0.24965578317642212,
+ -0.12272630631923676,
+ -0.13535648584365845,
+ 0.27604979276657104,
+ 0.12986275553703308,
+ 0.031180638819932938,
+ 0.23402756452560425,
+ -0.12304192781448364,
+ -0.28630608320236206,
+ 0.10694071650505066,
+ 0.29492875933647156,
+ -0.04742538928985596,
+ -0.16467617452144623,
+ 0.21338552236557007,
+ 0.14842957258224487,
+ 0.04033675789833069,
+ -0.11083507537841797,
+ 0.2926432490348816,
+ -0.34509342908859253,
+ -0.04334595799446106,
+ 0.05222618579864502,
+ 0.09434747695922852,
+ -0.1988322138786316,
+ -0.08124768733978271,
+ -0.27124732732772827,
+ -0.019482940435409546,
+ -0.22451616823673248,
+ 0.11135182529687881,
+ -0.30085289478302,
+ 0.1539032757282257,
+ -0.2395462989807129,
+ 0.04069054126739502,
+ -0.2324771285057068,
+ -0.15909570455551147,
+ 0.3927459716796875,
+ 0.016515761613845825,
+ -0.28716880083084106,
+ -0.2278164029121399,
+ -0.19789859652519226,
+ -0.07493612170219421,
+ 0.356370747089386,
+ 0.11023911833763123,
+ -0.10925579071044922,
+ -0.0508909597992897,
+ -0.2712428569793701,
+ 0.2951042056083679,
+ -0.05228531360626221,
+ 0.28285592794418335,
+ 0.1774592399597168,
+ 0.33530181646347046,
+ -0.18704599142074585,
+ 0.07887301594018936,
+ 0.11021968722343445,
+ -0.2974065840244293,
+ -0.043736815452575684,
+ -0.03993856906890869,
+ 0.08584475517272949,
+ -0.30582380294799805,
+ 0.012501955032348633,
+ 0.3205633759498596,
+ 0.1924860179424286,
+ -0.058795541524887085,
+ 0.016964435577392578,
+ 0.17994272708892822,
+ 0.24036240577697754,
+ 0.3001381754875183,
+ -0.29094183444976807,
+ -0.2675302028656006,
+ -0.21205781400203705,
+ -0.00014713406562805176,
+ -0.04896529018878937,
+ -0.2916668951511383,
+ 0.28694677352905273,
+ 0.0023405328392982483,
+ 0.002795696258544922,
+ -0.3636053502559662,
+ -0.3096197545528412,
+ -0.27636969089508057,
+ 0.16933926939964294,
+ -0.08615672588348389,
+ 0.03613942861557007,
+ 0.14959298074245453,
+ -0.0357549786567688,
+ -0.2643551230430603,
+ -0.18274807929992676,
+ -0.26083505153656006,
+ 0.2761843204498291,
+ 0.30135953426361084,
+ -0.23080778121948242,
+ 0.4036474823951721,
+ 0.1632394790649414,
+ -0.15239983797073364,
+ 0.17882704734802246,
+ -0.14832717180252075,
+ -0.3757637143135071,
+ -0.29978469014167786,
+ 0.3000079095363617,
+ 0.19217658042907715,
+ 0.14457657933235168,
+ 0.2753244936466217,
+ 0.17234966158866882,
+ 0.2797820568084717,
+ 0.07554802298545837,
+ -0.1675851196050644,
+ 0.1423514485359192,
+ 0.05353713035583496,
+ 0.2408512979745865,
+ -0.04019749164581299,
+ -0.1453583836555481,
+ 0.2059316635131836,
+ 0.21068716049194336,
+ 0.2542634606361389,
+ -0.35263827443122864,
+ -0.28070956468582153,
+ 0.08193334937095642,
+ 0.15063166618347168,
+ 0.3558933138847351,
+ -0.18471352756023407,
+ 0.15821903944015503,
+ -0.3011360764503479,
+ 0.2804316282272339,
+ 0.016993045806884766,
+ 0.007831618189811707,
+ 0.2307395040988922,
+ -0.044158220291137695,
+ -0.26003265380859375,
+ 0.3145177364349365,
+ -0.16304680705070496,
+ 0.12428376078605652,
+ 0.2820431590080261,
+ 0.2459910809993744,
+ -0.09577587246894836,
+ 0.025508537888526917,
+ 0.1381203830242157,
+ 0.13985106348991394,
+ -0.07448136806488037,
+ -0.38191792368888855,
+ 0.13074588775634766,
+ -0.25768235325813293,
+ 0.03308182954788208,
+ 0.061354056000709534,
+ 0.27404046058654785,
+ 0.08773380517959595,
+ -0.23350653052330017,
+ 0.25599122047424316,
+ 0.02866789698600769,
+ -0.2949594557285309,
+ 0.20257359743118286,
+ -0.2214842140674591,
+ -0.2954542636871338,
+ 0.23210331797599792,
+ -0.21004092693328857,
+ -0.2988116145133972,
+ 0.28098922967910767,
+ 0.03207963705062866,
+ -0.08104166388511658,
+ -0.041966669261455536,
+ 0.153013676404953,
+ -0.30383366346359253,
+ -0.1135706752538681,
+ 0.29445764422416687,
+ 0.1344720721244812,
+ 0.004084721207618713,
+ -0.36285898089408875,
+ -0.1285555064678192,
+ -0.30518484115600586,
+ -0.05417907238006592,
+ 0.3548899292945862,
+ -0.3879748582839966,
+ 0.26276662945747375,
+ -0.3409406840801239,
+ 0.31399187445640564,
+ -0.07947321236133575,
+ 0.23703181743621826,
+ -0.30384692549705505,
+ -0.13103672862052917,
+ 0.06742024421691895,
+ 0.19256591796875,
+ -0.2718937397003174,
+ -0.09754949808120728,
+ -0.30297666788101196,
+ -0.3246765732765198,
+ 0.22866904735565186,
+ -0.27490824460983276,
+ 0.07752934098243713,
+ 0.007276296615600586,
+ 0.2134077250957489,
+ 0.3041537404060364,
+ -0.13869427144527435,
+ -0.02427332103252411,
+ 0.2767001986503601,
+ 0.08047822117805481,
+ -0.07483813166618347,
+ 0.06448537111282349,
+ -0.1854141354560852,
+ 0.23433616757392883,
+ -0.2931921184062958,
+ 0.09545354545116425,
+ -0.202123761177063,
+ -0.38055145740509033,
+ 0.17939776182174683,
+ -0.37223827838897705,
+ 0.379879355430603,
+ -0.26286780834198,
+ -0.271589994430542,
+ 0.3821524381637573,
+ -0.2699422240257263,
+ -0.0009305775165557861,
+ -0.0772094577550888,
+ -0.22102272510528564,
+ 0.254832923412323,
+ -0.21790280938148499,
+ -0.21997597813606262,
+ -0.3261357843875885,
+ 0.25755399465560913,
+ -0.21912972629070282,
+ 0.28250500559806824,
+ 0.03192402422428131,
+ -0.13698594272136688,
+ -0.018281489610671997,
+ -0.16100388765335083,
+ -0.02078840881586075,
+ -0.19002404808998108,
+ -0.006246298551559448,
+ -0.3876490294933319,
+ 0.18705427646636963,
+ -0.025653362274169922,
+ -0.279319703578949,
+ 0.4005318582057953,
+ 0.18510067462921143,
+ 0.06181912124156952,
+ -0.0593741238117218,
+ 0.030071526765823364,
+ 0.30066782236099243,
+ 0.31271475553512573,
+ -0.07137322425842285,
+ 0.2452840805053711,
+ 0.011406302452087402,
+ 0.22491943836212158,
+ -0.2930535674095154,
+ 0.22169983386993408,
+ 0.08947279304265976,
+ 0.05398356914520264,
+ -0.13774406909942627,
+ -0.2992386817932129,
+ -0.2639002799987793,
+ 0.07105253636837006,
+ -0.1410435438156128,
+ 0.09640228748321533,
+ 0.21441467106342316,
+ -0.2975729703903198,
+ 0.2343675047159195,
+ 0.26113924384117126,
+ -0.2182309627532959,
+ -0.24826990067958832,
+ -0.022723853588104248,
+ -0.07995104789733887,
+ -0.3787320852279663,
+ 0.1813231110572815,
+ 0.1402832418680191,
+ 0.23177596926689148,
+ -0.12231701612472534,
+ 0.2762215733528137,
+ -0.2809758186340332,
+ -0.07405853271484375,
+ 0.37598490715026855,
+ 0.2952635884284973,
+ 0.06552746891975403,
+ -0.2646148204803467,
+ -0.08930552005767822,
+ 0.3976824879646301,
+ -0.09467539191246033,
+ -0.27095115184783936,
+ -0.21767526865005493,
+ 0.2800019383430481,
+ -0.25253748893737793,
+ -0.03560976684093475,
+ 0.20599119365215302,
+ 0.03279542922973633,
+ 0.10670405626296997,
+ 0.3000897169113159,
+ 0.2035083770751953,
+ -0.12995782494544983,
+ 0.08203740417957306,
+ 0.02342384308576584,
+ 0.12498430907726288,
+ -0.036270081996917725,
+ -0.1296672523021698,
+ 0.3552262783050537,
+ 0.04624897241592407,
+ 0.3052769601345062,
+ -0.2981796860694885,
+ -0.2836061120033264,
+ 0.10891538858413696,
+ 0.09142208099365234,
+ 0.043659113347530365,
+ 0.05301034450531006,
+ -0.04125797748565674,
+ -0.19770023226737976,
+ -0.2460862398147583,
+ 0.013290703296661377,
+ 0.00022459030151367188,
+ 0.2771422266960144,
+ 0.062380075454711914,
+ 0.20756641030311584,
+ -0.24552366137504578,
+ -0.05149127170443535,
+ -0.07239538431167603,
+ -0.14477024972438812,
+ -0.24889612197875977,
+ -0.005107499659061432,
+ 0.34979474544525146,
+ -0.1543174386024475,
+ -0.30205196142196655,
+ -0.01637711189687252,
+ -0.28433358669281006,
+ 0.3611682653427124,
+ 0.10497765243053436,
+ 0.24898278713226318,
+ 0.04619690775871277,
+ -0.12277311831712723,
+ -0.14394831657409668,
+ 0.27192026376724243,
+ -0.261453241109848,
+ -0.03236877918243408,
+ 0.30015599727630615,
+ 0.2783457934856415,
+ 0.08492657542228699,
+ -0.24356043338775635,
+ 0.03413181006908417,
+ 0.07248762249946594,
+ -0.39615002274513245,
+ 0.1439610868692398,
+ 0.09774866700172424,
+ 0.11702433228492737,
+ -0.2817027270793915,
+ -0.05489666759967804,
+ 0.042919546365737915,
+ -0.2998861074447632,
+ -0.26671600341796875,
+ 0.30697524547576904,
+ 0.18075644969940186,
+ -0.08306056261062622,
+ -0.36941343545913696,
+ 0.28506773710250854,
+ -0.22574478387832642,
+ 0.24584603309631348,
+ -0.19979767501354218,
+ 0.1055273786187172,
+ 0.1446196436882019,
+ -0.1710738241672516,
+ -0.19013580679893494,
+ 0.2082872986793518,
+ 0.10052114725112915,
+ -0.12946513295173645,
+ -0.2464045286178589,
+ 0.30028679966926575,
+ 0.2833605706691742,
+ -0.24875932931900024,
+ 0.14855104684829712,
+ -0.11311071366071701,
+ 0.1987532377243042,
+ -0.15344873070716858,
+ 0.1051667332649231,
+ -0.27017319202423096,
+ 0.13306215405464172,
+ 0.16051055490970612,
+ -0.2961198091506958,
+ -0.1707434505224228,
+ 0.2006249725818634,
+ -0.23609387874603271,
+ 0.29259157180786133,
+ -0.11755037307739258,
+ 0.3816258907318115,
+ 0.2873062491416931,
+ 0.2892875373363495,
+ 0.045250020921230316,
+ -0.26008957624435425,
+ -0.30288368463516235,
+ -0.035116374492645264,
+ 0.29976266622543335,
+ 0.19330736994743347,
+ -0.17050671577453613,
+ -0.11293244361877441,
+ 0.10542382299900055,
+ 0.01948535442352295,
+ 0.023358643054962158,
+ 0.17330724000930786,
+ -0.01989676058292389,
+ 0.1323338747024536,
+ -0.3367665112018585,
+ -0.37851482629776,
+ -0.2600021958351135,
+ -0.1813029646873474,
+ -0.09392714500427246,
+ -0.02401992678642273,
+ -0.20895656943321228,
+ 0.24933704733848572,
+ 0.29031115770339966,
+ 0.005006253719329834,
+ -0.058676280081272125,
+ -0.05730469897389412,
+ -0.06555038690567017,
+ 0.0890449583530426,
+ 0.24686472117900848,
+ 0.26467978954315186,
+ -0.05686172842979431,
+ 0.1826685667037964,
+ -0.15860587358474731,
+ 0.2596433162689209,
+ 0.09450158476829529,
+ 0.26774847507476807,
+ -0.028619728982448578,
+ 0.16898231208324432,
+ -0.15289300680160522,
+ -0.01334536075592041,
+ 0.16447746753692627,
+ 0.3745993375778198,
+ 0.23238292336463928,
+ -0.3348742127418518,
+ -0.1936795711517334,
+ 0.1350250095129013,
+ -0.35108572244644165,
+ -0.2812252938747406,
+ 0.2926400899887085,
+ -0.1987428367137909,
+ -0.18964773416519165,
+ -0.12125113606452942,
+ 0.21456164121627808,
+ 0.3876186013221741,
+ -0.1050119400024414,
+ -0.16725438833236694,
+ -0.2736174762248993,
+ -0.22576874494552612,
+ -0.015517368912696838,
+ 0.04086998105049133,
+ -0.1724831461906433,
+ -0.16110023856163025,
+ -0.30250903964042664,
+ -0.08575955033302307,
+ 0.08923828601837158,
+ -0.29211264848709106,
+ 0.27708083391189575,
+ -0.014382898807525635,
+ -0.1632789969444275,
+ 0.14996546506881714,
+ 0.29419469833374023,
+ -0.334136962890625,
+ 0.15838032960891724,
+ 0.21763017773628235,
+ -0.14820034801959991,
+ 0.20417720079421997,
+ -0.06547686457633972,
+ -0.20962512493133545,
+ -0.30312711000442505,
+ -0.18548938632011414,
+ -0.02454531192779541,
+ -0.018457669764757156,
+ -0.255172997713089,
+ 0.27198851108551025,
+ 0.3009885847568512,
+ -0.20314735174179077,
+ -0.09295530617237091,
+ -0.16247054934501648,
+ 0.03417319059371948,
+ 0.1379762440919876,
+ -0.38810843229293823,
+ 0.2405356764793396,
+ 0.2868931293487549,
+ -0.08169129490852356,
+ -0.12123480439186096,
+ -0.16202157735824585,
+ -0.024837777018547058,
+ 0.20259439945220947,
+ 0.1596704125404358,
+ -0.004095397889614105,
+ 0.13659699261188507,
+ 0.1015852689743042,
+ -0.1277066469192505,
+ 0.2698427438735962,
+ 0.25591832399368286,
+ -0.28734177350997925,
+ 0.2972809076309204,
+ 0.229895681142807,
+ -0.1520170271396637,
+ -0.03617715835571289,
+ -0.11778126657009125,
+ 0.02305501699447632,
+ 0.21697860956192017,
+ -0.32818281650543213,
+ -0.1997748613357544,
+ -0.053973596543073654,
+ 0.2051214724779129,
+ 0.29952478408813477,
+ -0.0692022442817688,
+ 0.13702452182769775,
+ -0.23398596048355103,
+ -0.11993704736232758,
+ 0.39789271354675293,
+ 0.26650214195251465,
+ -0.2207983434200287,
+ -0.2893744707107544,
+ -0.2036326378583908,
+ 0.11236661672592163,
+ -0.04852116107940674,
+ 0.16006912291049957,
+ 0.010525047779083252,
+ 0.052865028381347656,
+ 0.16189076006412506,
+ -0.3265037536621094,
+ -0.21580840647220612,
+ 0.29562926292419434,
+ 0.17602509260177612,
+ 0.3517792820930481,
+ -0.06384492665529251,
+ 0.1770372986793518,
+ -0.15711888670921326,
+ 0.3084973990917206,
+ -0.26672741770744324,
+ 0.21835750341415405,
+ -0.02685678005218506,
+ 0.2930578589439392,
+ -0.29174375534057617,
+ 0.11418303847312927,
+ 0.20392228662967682,
+ -0.3039512634277344,
+ 0.16526645421981812,
+ 0.4001898169517517,
+ 0.24733787775039673,
+ -0.0974627137184143,
+ -0.28638893365859985,
+ -0.26638802886009216,
+ -0.16180720925331116,
+ -0.0019470974802970886,
+ 0.006519734859466553,
+ 0.38307932019233704,
+ -0.00223405659198761,
+ -0.2693318724632263,
+ -0.2789791226387024,
+ -0.19326478242874146,
+ 0.22132501006126404,
+ -0.29828914999961853,
+ 0.10410124063491821,
+ 0.08207905292510986,
+ 0.3905714750289917,
+ 0.11709490418434143,
+ 0.29704129695892334,
+ -0.29349493980407715,
+ -0.16453243792057037,
+ -0.16928905248641968,
+ -0.16593819856643677,
+ 0.27890652418136597,
+ 0.04432713985443115,
+ 0.28798073530197144,
+ -0.006660938262939453,
+ -0.020055297762155533,
+ 0.0704013854265213,
+ 0.03506039083003998,
+ 0.3021577000617981,
+ -0.1610010266304016,
+ -0.018695205450057983,
+ -0.1255628764629364,
+ 0.3008495569229126,
+ -0.03590831160545349,
+ -0.09952867031097412,
+ 0.06563298404216766,
+ -0.0723961740732193,
+ -0.2832075357437134,
+ -0.38337981700897217,
+ -0.058246493339538574,
+ 0.3012058436870575,
+ 0.2127910852432251,
+ 0.018174022436141968,
+ 0.1688014268875122,
+ 0.07741013169288635,
+ 0.11917674541473389,
+ 0.01992657780647278,
+ 0.3631720542907715,
+ 0.19689947366714478,
+ -0.10236567258834839,
+ -0.1630963683128357,
+ 0.2199205905199051,
+ 0.19609200954437256,
+ 0.03429645299911499,
+ 0.009923219680786133,
+ -0.2986222207546234,
+ -0.012715518474578857,
+ -0.09334969520568848,
+ -0.2591654062271118,
+ 0.13134634494781494,
+ 0.006557494401931763,
+ -0.05599468946456909,
+ 0.23990613222122192,
+ 0.2227831482887268,
+ 0.24876728653907776,
+ 0.36253294348716736,
+ -0.303396999835968,
+ 0.25108033418655396,
+ 0.1529247760772705,
+ 0.11450737714767456,
+ -0.23073947429656982,
+ -0.25403666496276855,
+ 0.17483586072921753,
+ -0.1986236572265625,
+ -0.01692691445350647,
+ -0.034987568855285645,
+ -0.1198863685131073,
+ 0.27887845039367676,
+ 0.17928889393806458,
+ 0.020662307739257812,
+ 0.2668822407722473,
+ -0.11805003881454468,
+ -0.06388843059539795,
+ -0.21411561965942383,
+ 0.01372450590133667,
+ -0.24946141242980957,
+ -0.279031366109848,
+ 0.2738889455795288,
+ -0.20854970812797546,
+ 0.12233035266399384,
+ -0.28195589780807495,
+ 0.018698811531066895,
+ -0.16715490818023682,
+ -0.20540332794189453,
+ -0.2636367082595825,
+ 0.26795583963394165,
+ 0.12876352667808533,
+ -0.10543406009674072,
+ 0.20127403736114502,
+ -0.11703312397003174,
+ 0.15292581915855408,
+ 0.24806071817874908,
+ -0.17905572056770325,
+ 0.29870790243148804,
+ 0.06924900412559509,
+ -0.16038404405117035,
+ 0.34786686301231384,
+ 0.22728413343429565,
+ 0.06010061502456665,
+ -0.1273660659790039,
+ -0.258827805519104,
+ -0.08360844850540161,
+ 0.1948663294315338,
+ -0.025863125920295715,
+ -0.05280709266662598,
+ -0.17841726541519165,
+ -0.2109546959400177,
+ -0.2901599407196045,
+ -0.014976516366004944,
+ -0.1700950264930725,
+ -0.25256434082984924,
+ 0.13991329073905945,
+ 0.19870518147945404,
+ 0.01960045099258423,
+ -0.39243578910827637,
+ 0.33701127767562866,
+ -0.20922014117240906,
+ -0.07008987665176392,
+ -0.20192095637321472,
+ -0.26280927658081055,
+ 0.2520809769630432,
+ -0.2268969714641571,
+ -0.07167309522628784,
+ -0.00029752403497695923,
+ -0.11725904047489166,
+ 0.29754114151000977,
+ -0.042043089866638184,
+ 0.27858591079711914,
+ -0.03848855197429657,
+ 0.15253551304340363,
+ -0.1319931596517563,
+ 0.23687788844108582,
+ 0.11251258850097656,
+ -0.23326092958450317,
+ 0.2951905131340027,
+ 0.05647611618041992,
+ 0.03406164050102234,
+ 0.05826413631439209,
+ 0.3981969654560089,
+ -0.05857989937067032,
+ 0.2702087461948395,
+ -0.044413864612579346,
+ 0.05309021472930908,
+ -0.041106224060058594,
+ -0.2916693389415741,
+ -0.09792125225067139,
+ -0.1057346761226654,
+ -0.20922774076461792,
+ 0.01803213357925415,
+ 0.1515023112297058,
+ 0.009668111801147461,
+ -0.36418336629867554,
+ -0.0502239465713501,
+ -0.15151634812355042,
+ -0.26676517724990845,
+ 0.04160240292549133,
+ -0.2817147374153137,
+ 0.12016773223876953,
+ 0.25322771072387695,
+ -0.19258826971054077,
+ -0.23177248239517212,
+ -0.2376498281955719,
+ -0.01643425226211548,
+ 0.20907992124557495,
+ 0.19951952993869781,
+ -0.14403414726257324,
+ 0.3636677861213684,
+ -0.09271574020385742,
+ -0.11299341917037964,
+ -0.010258093476295471,
+ -0.033310115337371826,
+ 0.26828157901763916,
+ 0.16783183813095093,
+ -0.022497110068798065,
+ 0.15191484987735748,
+ -0.05219221115112305,
+ 0.2823997735977173,
+ -0.25327014923095703,
+ -0.2906925678253174,
+ 0.0073816776275634766,
+ -0.05486832559108734,
+ -0.23999565839767456,
+ 0.19197216629981995,
+ 0.1497374325990677,
+ 0.002429276704788208,
+ -0.3038216829299927,
+ 0.20382541418075562,
+ -0.03463524580001831,
+ 0.005234360694885254,
+ -0.3544016182422638,
+ -0.14766308665275574,
+ -0.08565757423639297,
+ 0.3332217335700989,
+ 0.190515398979187,
+ 0.38285526633262634,
+ 0.08513671159744263,
+ 0.048652976751327515,
+ -0.24067063629627228,
+ -0.3766059875488281,
+ 0.04942634701728821,
+ 0.40223440527915955,
+ -0.11268055438995361,
+ -0.1588461995124817,
+ -0.2195277214050293,
+ 0.17031152546405792,
+ 0.215057373046875,
+ 0.2674795389175415,
+ -0.3809437155723572,
+ -0.19781969487667084,
+ 0.1859123706817627,
+ -0.23306214809417725,
+ -0.06791794300079346,
+ -0.09063515067100525,
+ 0.19976934790611267,
+ -0.33392709493637085,
+ 0.2586905360221863,
+ 0.164631187915802,
+ 0.3205573558807373,
+ -0.11473680287599564,
+ -0.01593007892370224,
+ 0.21293753385543823,
+ -0.08231884241104126,
+ 0.282722532749176,
+ -0.2446800172328949,
+ 0.17143897712230682,
+ 0.2897028625011444,
+ 0.06186407804489136,
+ 0.2544218897819519,
+ 0.35580700635910034,
+ 0.030584752559661865,
+ 0.10762689262628555,
+ -0.19065511226654053,
+ -0.3065587282180786,
+ 0.04923856258392334,
+ 0.0030422210693359375,
+ 0.14465108513832092,
+ 0.028244316577911377,
+ -0.11431726813316345,
+ -0.07637502253055573,
+ -0.1619408130645752,
+ -0.2314235270023346,
+ 0.09185504913330078,
+ -0.15195083618164062,
+ -0.2396230697631836,
+ 0.4008772075176239,
+ 0.28675705194473267,
+ -0.17243245244026184,
+ -0.13055485486984253,
+ -0.183170884847641,
+ 0.10953713953495026,
+ -0.1853768676519394,
+ 0.3966027796268463,
+ 0.2643030881881714,
+ 0.37666386365890503,
+ 0.24013042449951172,
+ -0.27088484168052673,
+ -0.034614622592926025,
+ -0.2800289988517761,
+ 0.2683368921279907,
+ 0.11951984465122223,
+ 0.34611183404922485,
+ 0.07917005568742752,
+ -0.2843021750450134,
+ -0.12898269295692444,
+ -0.154410719871521,
+ 0.26070207357406616,
+ 0.16170036792755127,
+ -0.31599313020706177,
+ 0.2885420322418213,
+ -0.22491484880447388,
+ -0.3154186010360718,
+ 0.23590481281280518,
+ 0.23272082209587097,
+ -0.17059281468391418,
+ -0.11366987228393555,
+ -0.26958465576171875,
+ -0.2916288375854492,
+ -0.2843674421310425,
+ -0.17259591817855835,
+ -0.30311232805252075,
+ -0.3900067210197449,
+ 0.20952805876731873,
+ 0.13712197542190552,
+ -0.29949796199798584,
+ -0.3607686460018158,
+ -0.25742262601852417,
+ -0.2434183657169342,
+ -0.1666770577430725,
+ 0.2720600962638855,
+ 0.014531541615724564,
+ -0.301260769367218,
+ 0.2897299528121948,
+ 0.14056485891342163,
+ 0.07245763391256332,
+ -0.29589301347732544,
+ -0.09271040558815002,
+ -0.06916919350624084,
+ -0.34054985642433167,
+ 0.09494064003229141,
+ -0.30833473801612854,
+ -0.22487330436706543,
+ -0.21119838953018188,
+ -0.12052232027053833,
+ 0.10517436265945435,
+ 0.20988953113555908,
+ 0.15871357917785645,
+ -0.297584593296051,
+ 0.2994852066040039,
+ -0.25124332308769226,
+ 0.08210256695747375,
+ -0.27394333481788635,
+ -0.16114342212677002,
+ -0.08726245164871216,
+ -0.05099162459373474,
+ -0.28154605627059937,
+ 0.05557394027709961,
+ -0.2701306641101837,
+ -0.2931121587753296,
+ 0.2621909976005554,
+ 0.2772546708583832,
+ -0.02109488844871521,
+ 0.08061158657073975,
+ 0.03182403743267059,
+ 0.3078051209449768,
+ -0.046678364276885986,
+ -0.2366463840007782,
+ -0.09077173471450806,
+ 0.03993085026741028,
+ -0.1826295554637909,
+ -0.23222267627716064,
+ 0.024235360324382782,
+ 0.20134428143501282,
+ 0.28803694248199463,
+ 0.12129718065261841,
+ -0.14884114265441895,
+ -0.2294006645679474,
+ 0.25926774740219116,
+ -0.32179728150367737,
+ -0.10202932357788086,
+ 0.3303343951702118,
+ -0.18152719736099243,
+ 0.1137411892414093,
+ 0.22748297452926636,
+ 0.18339306116104126,
+ -0.04920867085456848,
+ -0.29180431365966797,
+ -0.32255375385284424,
+ -0.29579317569732666,
+ 0.2309836745262146,
+ 0.1429533064365387,
+ -0.2820838987827301,
+ -0.28696128726005554,
+ -0.27368342876434326,
+ 0.1712750792503357,
+ -0.09368950128555298,
+ -0.2005285620689392,
+ 0.18355989456176758,
+ 0.08039671927690506,
+ -0.08360306918621063,
+ 0.12978874146938324,
+ -0.05761563777923584,
+ 0.058985546231269836,
+ -0.03125983476638794,
+ -0.36582285165786743,
+ -0.07041533291339874,
+ -0.18127721548080444,
+ 0.004232499748468399,
+ 0.18997785449028015,
+ -0.024663399904966354,
+ -0.3674163818359375,
+ -0.2960887551307678,
+ 0.29907727241516113,
+ 0.2880275547504425,
+ -0.3890559673309326,
+ 0.0992550253868103,
+ 0.0007679462432861328,
+ 0.24983549118041992,
+ 0.12388134002685547,
+ -0.08423380553722382,
+ -0.013830259442329407,
+ 0.08747454732656479,
+ -0.2709313631057739,
+ -0.2956092953681946,
+ -0.1299157440662384,
+ -0.15956848859786987,
+ 0.2949545979499817,
+ 0.01912067085504532,
+ -0.015798509120941162,
+ -0.16693267226219177,
+ 0.07807761430740356,
+ -0.19750961661338806,
+ -0.12560606002807617,
+ -0.06773702055215836,
+ -0.016850292682647705,
+ 0.006071567535400391,
+ 0.14044418931007385,
+ 0.30155330896377563,
+ -0.3049861192703247,
+ -0.09751474112272263,
+ -0.1359248161315918,
+ -0.3035830557346344,
+ 0.11169165372848511,
+ 0.1137315034866333,
+ 0.2727293372154236,
+ 0.2980943024158478,
+ -0.24062199890613556,
+ 0.14550381898880005,
+ -0.016439683735370636,
+ -0.006371855735778809,
+ 0.1968543380498886,
+ 0.17441660165786743,
+ 0.09206241369247437,
+ 0.1362786740064621,
+ 0.19367748498916626,
+ 0.17757654190063477,
+ -0.06501597166061401,
+ 0.17219114303588867,
+ -0.20362061262130737,
+ 0.24436751008033752,
+ -0.09724113345146179,
+ -0.20829038321971893,
+ 0.14997005462646484,
+ -0.01832503080368042,
+ 0.2657378911972046,
+ -0.21564827859401703,
+ 0.0848739743232727,
+ -0.26940399408340454,
+ 0.22985658049583435,
+ 0.20075953006744385,
+ 0.17954832315444946,
+ -0.24949601292610168,
+ -0.13946020603179932,
+ -0.2066589593887329,
+ -0.06693863868713379,
+ 0.07988429069519043,
+ -0.26638299226760864,
+ -0.10191720724105835,
+ -0.05399978160858154,
+ 0.3010086417198181,
+ 0.18992158770561218,
+ -0.3914889097213745,
+ -0.08493775129318237,
+ 0.38115713000297546,
+ 0.12163867056369781,
+ 0.2845562696456909,
+ -0.005679041147232056,
+ -0.2854795455932617,
+ -0.15361498296260834,
+ -0.03945710510015488,
+ 0.18345162272453308,
+ 0.09767071157693863,
+ 0.05813120678067207,
+ 0.2671937048435211,
+ -0.12459203600883484,
+ 0.03229913115501404,
+ -0.2124684900045395,
+ -0.18525880575180054,
+ -0.13212808966636658,
+ -0.16321848332881927,
+ 0.01370537281036377,
+ -0.15003596246242523,
+ 0.2762526273727417,
+ -0.2419167160987854,
+ 0.08885955810546875,
+ -0.2746787667274475,
+ 0.10720750689506531,
+ 0.11931627988815308,
+ -0.02419942617416382,
+ 0.12455374002456665,
+ 0.10276550054550171,
+ 0.19872528314590454,
+ -0.08098699152469635,
+ -0.3835110068321228,
+ 0.214691162109375,
+ -0.27912062406539917,
+ 0.1437266767024994,
+ 0.18430207669734955,
+ 0.060606449842453,
+ 0.029013365507125854,
+ 0.10551637411117554,
+ 0.02264326810836792,
+ 0.2691020369529724,
+ 0.3770517408847809,
+ 0.16874516010284424,
+ 0.236872136592865,
+ -0.12857523560523987,
+ -0.37606555223464966,
+ -0.29581940174102783,
+ 0.222476065158844,
+ -0.29895836114883423,
+ -0.28047609329223633,
+ 0.2640400826931,
+ 0.005471765995025635,
+ -0.10435350239276886,
+ -0.2160082757472992,
+ 0.047395315021276474,
+ -0.2503244876861572,
+ -0.28018203377723694,
+ -0.15759527683258057,
+ -0.20505976676940918,
+ 0.027564533054828644,
+ -0.2614806592464447,
+ 0.1935221552848816,
+ -0.15437588095664978,
+ -0.24265697598457336,
+ -0.1815740466117859,
+ 0.052776485681533813,
+ -0.15661153197288513,
+ 0.29681870341300964,
+ -0.28261005878448486,
+ -0.24832406640052795,
+ -0.3898945450782776,
+ 0.08956149220466614,
+ -0.11889947950839996,
+ -0.27521297335624695,
+ 0.30380916595458984,
+ 0.40292632579803467,
+ 0.25149595737457275,
+ -0.10192954540252686,
+ -0.29027122259140015,
+ -0.11018480360507965,
+ 0.209625244140625,
+ -0.27328717708587646,
+ -0.30308812856674194,
+ 0.2524624764919281,
+ -0.2833878993988037,
+ 0.06839969009160995,
+ -0.3820231258869171,
+ 0.19162391126155853,
+ -0.35555189847946167,
+ -0.25346267223358154,
+ -0.07731197774410248,
+ 0.19276437163352966,
+ -0.15832844376564026,
+ -0.24534529447555542,
+ -0.03867834806442261,
+ 0.13678109645843506,
+ 0.00850069522857666,
+ -0.2954633831977844,
+ -0.2998187243938446,
+ 0.07159236073493958,
+ 0.14295728504657745,
+ 0.2231772094964981,
+ -0.290533185005188,
+ -0.1773936152458191,
+ 0.30431172251701355,
+ 0.29003310203552246,
+ 0.02494712918996811,
+ -0.1985694319009781,
+ -0.20248723030090332,
+ -0.3051600456237793,
+ 0.2529539167881012,
+ 0.2912440299987793,
+ -0.0405745655298233,
+ -0.07797813415527344,
+ -0.11018222570419312,
+ -0.01150524616241455,
+ -0.08925344794988632,
+ -0.3960533142089844,
+ -0.19231396913528442,
+ -0.027093082666397095,
+ 0.09241518378257751,
+ 0.3101367652416229,
+ -0.12718230485916138,
+ 0.29413852095603943,
+ -0.14812250435352325,
+ -0.19522690773010254,
+ 0.19736513495445251,
+ -0.2174154371023178,
+ 0.31857356429100037,
+ -0.25493910908699036,
+ 0.27246713638305664,
+ -0.2787654399871826,
+ -0.25857698917388916,
+ 0.11370696127414703,
+ 0.10928308963775635,
+ 0.0969700962305069,
+ 0.1041986346244812,
+ 0.11963018774986267,
+ -0.14192122220993042,
+ 0.17061913013458252,
+ 0.1060580313205719,
+ -0.1436281055212021,
+ 0.29981574416160583,
+ 0.2886630892753601,
+ 0.21078285574913025,
+ 0.054542601108551025,
+ -0.19965779781341553,
+ -0.20832031965255737,
+ 0.2776375114917755,
+ -0.03599974513053894,
+ 0.12928229570388794,
+ -0.07087843120098114,
+ 0.13023406267166138,
+ -0.23432326316833496,
+ 0.12953579425811768,
+ -0.0852651596069336,
+ 0.14389878511428833,
+ -0.026684194803237915,
+ 0.20553317666053772,
+ 0.033864736557006836,
+ 0.28164854645729065,
+ -0.18958720564842224,
+ -0.09549731016159058,
+ 0.2744523286819458,
+ 0.26992547512054443,
+ 0.05664658546447754,
+ -0.09975272417068481,
+ 0.04652111232280731,
+ 0.024763494729995728,
+ 0.2676495611667633,
+ 0.27521997690200806,
+ -0.026833802461624146,
+ -0.34493502974510193,
+ -0.20110273361206055,
+ 0.28495264053344727,
+ 0.006855204701423645,
+ -0.174452543258667,
+ -0.30334287881851196,
+ 0.01331937313079834,
+ -0.0879870355129242,
+ 0.25054359436035156,
+ -0.009356379508972168,
+ 0.06802496314048767,
+ 0.18597835302352905,
+ 0.39550769329071045,
+ 0.1786084771156311,
+ 0.1588045060634613,
+ -0.296148419380188,
+ -0.21388143301010132,
+ 0.2808080017566681,
+ 0.2740952670574188,
+ -0.21928291022777557,
+ 0.39077627658843994,
+ -0.029023289680480957,
+ -0.022716283798217773,
+ 0.2922179102897644,
+ -0.38201552629470825,
+ -0.02859252691268921,
+ -0.26180511713027954,
+ 0.2983412742614746,
+ 0.08591336011886597,
+ 0.27969324588775635,
+ -0.05489540100097656,
+ 0.11165980994701385,
+ 0.3002353310585022,
+ 0.2207622528076172,
+ 0.20393823087215424,
+ 0.009364806115627289,
+ -0.035512685775756836,
+ -0.2971862554550171,
+ -0.17892146110534668,
+ 0.03540663793683052,
+ -0.3289051651954651,
+ -0.12582597136497498,
+ -0.22344839572906494,
+ 0.2643326222896576,
+ -0.28412893414497375,
+ -0.2969106435775757,
+ 0.23660343885421753,
+ -0.06878143548965454,
+ -0.2883729338645935,
+ 0.16439607739448547,
+ -0.22168388962745667,
+ 0.15692639350891113,
+ -0.2897603213787079,
+ -0.3014094829559326,
+ -0.04141801595687866,
+ 0.13103389739990234,
+ -0.02658519148826599,
+ -0.2293669879436493,
+ 0.1451491266489029,
+ -0.21896320581436157,
+ -0.017109649255871773,
+ 0.37789320945739746,
+ -0.08833003044128418,
+ -0.049967408180236816,
+ 0.05394935607910156,
+ -0.2878793478012085,
+ 0.06944354623556137,
+ -0.1528182029724121,
+ -0.24948447942733765,
+ 0.21589025855064392,
+ 0.11341443657875061,
+ -0.0894833505153656,
+ 0.2803080976009369,
+ -0.30602848529815674,
+ 0.2959749698638916,
+ 0.25047463178634644,
+ 0.19792696833610535,
+ 0.3576918840408325,
+ 0.29569151997566223,
+ -0.1828603297472,
+ -0.20551201701164246,
+ 0.4026224613189697,
+ 0.0006082057952880859,
+ 0.35963988304138184,
+ 0.0021371766924858093,
+ -0.2887905240058899,
+ 0.06151043251156807,
+ -0.1815776228904724,
+ -0.30125194787979126,
+ 0.04020267724990845,
+ 0.028938978910446167,
+ -0.11691674590110779,
+ 0.031051993370056152,
+ 0.24063101410865784,
+ -0.01710343360900879,
+ 0.08599722385406494,
+ 0.14630460739135742,
+ 0.14427974820137024,
+ -0.2846854329109192,
+ 0.13155174255371094,
+ -0.029191434383392334,
+ 0.15137748420238495,
+ 0.3312038481235504,
+ -0.2480531632900238,
+ -0.25161081552505493,
+ -0.1979174166917801,
+ -0.3538493812084198,
+ -0.2602613568305969,
+ 0.28208231925964355,
+ -0.1574568897485733,
+ -0.2398483157157898,
+ 0.1957944929599762,
+ 0.1870923638343811,
+ -0.3405291438102722,
+ 0.18406516313552856,
+ 0.12446048855781555,
+ 0.01580198109149933,
+ -0.12497268617153168,
+ 0.29989558458328247,
+ 0.36507582664489746,
+ 0.08825437724590302,
+ 0.14204750955104828,
+ -0.07900120317935944,
+ -0.15103739500045776,
+ -0.3492092490196228,
+ -0.022737272083759308,
+ -0.2619168758392334,
+ -0.03002789616584778,
+ 0.1784062534570694,
+ 0.21702255308628082,
+ 0.1040486991405487,
+ -0.20240718126296997,
+ -0.002360701560974121,
+ 0.1742209494113922,
+ 0.16428142786026,
+ 0.08382466435432434,
+ -0.20717597007751465,
+ -0.04432940483093262,
+ 0.004552558064460754,
+ -0.30321240425109863,
+ -0.101898692548275,
+ 0.16926424205303192,
+ -0.17458641529083252,
+ -0.3050227761268616,
+ -0.15262901782989502,
+ -0.28151777386665344,
+ -0.3641502857208252,
+ -0.19889408349990845,
+ -0.2127968668937683,
+ 0.22199827432632446,
+ 0.2838858366012573,
+ 0.3176255524158478,
+ 0.39304107427597046,
+ -0.2115931510925293,
+ 0.006679832935333252,
+ 0.3289204239845276,
+ -0.08941245079040527,
+ -0.29890957474708557,
+ -0.05282318592071533,
+ -0.07593317329883575,
+ 0.1880871206521988,
+ 0.03554295003414154,
+ -0.32998982071876526,
+ 0.18209770321846008,
+ -0.15637260675430298,
+ 0.2466520071029663,
+ 0.29891839623451233,
+ 0.23853254318237305,
+ 0.2881925702095032,
+ -0.29119014739990234,
+ 0.1961861103773117,
+ 0.2951241433620453,
+ 0.32933348417282104,
+ 0.004304364323616028,
+ 0.2140446901321411,
+ 0.08822639286518097,
+ -0.2897986173629761,
+ -0.20894908905029297,
+ -0.3326742649078369,
+ -0.3003283441066742,
+ 0.02046838402748108,
+ -0.01102292537689209,
+ 0.2686625123023987,
+ -0.35963600873947144,
+ 0.00836266577243805,
+ 0.03333568572998047,
+ 0.012970209121704102,
+ 0.2336454838514328,
+ 0.3795892894268036,
+ -0.18524152040481567,
+ -0.2931469976902008,
+ 0.021671637892723083,
+ 0.2162386178970337,
+ -0.11477848887443542,
+ -0.11259324103593826,
+ 0.2882238030433655,
+ 0.020681381225585938,
+ -0.28345903754234314,
+ -0.298187792301178,
+ 0.1839699149131775,
+ 0.06557899713516235,
+ -0.29374146461486816,
+ 0.29795509576797485,
+ -0.08062318712472916,
+ 0.03542785346508026,
+ -0.27349168062210083,
+ -0.2132546305656433,
+ -0.38872650265693665,
+ 0.16085708141326904,
+ 0.08262291550636292,
+ 0.12112338095903397,
+ 0.29857951402664185,
+ -0.30463647842407227,
+ -0.26455652713775635,
+ -0.21423685550689697,
+ -0.20067405700683594,
+ 0.10251182317733765,
+ -0.11535477638244629,
+ 0.2287779152393341,
+ 0.22241663932800293,
+ -0.09933243691921234,
+ 0.26774483919143677,
+ -0.06796285510063171,
+ 0.040937021374702454,
+ -0.13531026244163513,
+ 0.2985415458679199,
+ -0.3679845333099365,
+ 0.21800848841667175,
+ -0.2323637753725052,
+ 0.26949024200439453,
+ -0.29950815439224243,
+ 0.2860743999481201,
+ 0.07493874430656433,
+ -0.13419240713119507,
+ 0.29186534881591797,
+ 0.11800509691238403,
+ 0.041799336671829224,
+ 0.014691025018692017,
+ -0.10157300531864166,
+ -0.23732025921344757,
+ 0.16835367679595947,
+ -0.12967121601104736,
+ 0.22078512609004974,
+ -0.13097801804542542,
+ 0.37358516454696655,
+ -0.08043402433395386,
+ 0.3013125956058502,
+ 0.20260071754455566,
+ 0.10088950395584106,
+ 0.2783820927143097,
+ 0.2811849117279053,
+ 0.17308616638183594,
+ 0.2750915288925171,
+ -0.193741112947464,
+ -0.014403186738491058,
+ 0.3330501914024353,
+ -0.13343331217765808,
+ 0.08096455037593842,
+ 0.24145105481147766,
+ 0.2764597535133362,
+ -0.27425745129585266,
+ -0.2966439425945282,
+ -0.15359878540039062,
+ 0.05058905482292175,
+ -0.263518363237381,
+ -0.12080472707748413,
+ 0.1350613236427307,
+ 0.2981910705566406,
+ 0.27630168199539185,
+ -0.3940407931804657,
+ -0.0610404908657074,
+ -0.024716109037399292,
+ 0.09418460726737976,
+ 0.017108798027038574,
+ 0.03669056296348572,
+ -0.25642693042755127,
+ 0.04493161290884018,
+ -0.3148021996021271,
+ 0.28975367546081543,
+ -0.3028734624385834,
+ -0.2509295344352722,
+ -0.25031882524490356,
+ -0.30308273434638977,
+ 0.10846006870269775,
+ 0.26770713925361633,
+ -0.03577270358800888,
+ 0.03432956337928772,
+ 0.27103284001350403,
+ 0.21999835968017578,
+ 0.09085079282522202,
+ -0.0312538743019104,
+ 0.370343953371048,
+ 0.14324909448623657,
+ 0.11016024649143219,
+ -0.1356891244649887,
+ 0.08383813500404358,
+ -0.22541210055351257,
+ -0.11153969913721085,
+ 0.24092477560043335,
+ 0.26514482498168945,
+ 0.3720012903213501,
+ 0.40038979053497314,
+ 0.07875144481658936,
+ -0.17618827521800995,
+ 0.20660430192947388,
+ 0.2999541163444519,
+ -0.2119256556034088,
+ 0.05292314291000366,
+ 0.24929288029670715,
+ 0.29079052805900574,
+ -0.2435099482536316,
+ -0.29362428188323975,
+ 0.29902827739715576,
+ -0.2895970940589905,
+ -0.24548761546611786,
+ -0.12421683222055435,
+ 0.10779568552970886,
+ -0.267702579498291,
+ 0.02001899480819702,
+ 0.37018880248069763,
+ 0.21039672195911407,
+ -0.38688692450523376,
+ -0.2665339708328247,
+ -0.29389631748199463,
+ -0.26629555225372314,
+ 0.26653027534484863,
+ -0.0653504729270935,
+ 0.13756945729255676,
+ -0.26285886764526367,
+ -0.031951457262039185,
+ -0.21253174543380737,
+ -0.21232837438583374,
+ 0.17444753646850586,
+ -0.06246650218963623,
+ -0.3011071979999542,
+ 0.17918585240840912,
+ 0.3768279552459717,
+ -0.15175877511501312,
+ 0.2888741195201874,
+ -0.029671132564544678,
+ 0.2655029892921448,
+ 0.16072505712509155,
+ -0.30321723222732544,
+ -0.14004480838775635,
+ 0.06965796649456024,
+ 0.060984671115875244,
+ 0.3034202754497528,
+ -0.192987859249115,
+ 0.16146188974380493,
+ -0.3175085783004761,
+ 0.261586457490921,
+ 0.24610847234725952,
+ -0.3899727463722229,
+ 0.11894512176513672,
+ -0.13936269283294678,
+ 0.08332198858261108,
+ -0.27570202946662903,
+ -0.20616567134857178,
+ -0.20843416452407837,
+ -0.09890112280845642,
+ 0.10060716420412064,
+ 0.21154969930648804,
+ 0.33756205439567566,
+ 0.24444067478179932,
+ -0.17734864354133606,
+ -0.14498569071292877,
+ -0.3026849925518036,
+ 0.016637802124023438,
+ -0.0515977144241333,
+ -0.2188604772090912,
+ 0.24123764038085938,
+ -0.28573715686798096,
+ 0.28358203172683716,
+ -0.05299060046672821,
+ -0.09951230138540268,
+ 0.0280933678150177,
+ 0.1158539354801178,
+ 0.3684159517288208,
+ -0.18323099613189697,
+ 0.10368245840072632,
+ 0.1472262740135193,
+ -0.22848987579345703,
+ 0.09070613235235214,
+ 0.07253682613372803,
+ 0.25193026661872864,
+ -0.3669669032096863,
+ -0.24868550896644592,
+ -0.06304645538330078,
+ 0.2541852295398712,
+ 0.035760462284088135,
+ 0.3317396640777588,
+ -0.1928139626979828,
+ 0.3807160258293152,
+ -0.27627313137054443,
+ 0.02030712366104126,
+ -0.030738115310668945,
+ -0.3148702383041382,
+ 0.21091026067733765,
+ 0.10004577040672302,
+ 0.1821286678314209,
+ 0.3136667013168335,
+ -0.1744210124015808,
+ -0.013352394104003906,
+ -0.12334377318620682,
+ -0.04442816972732544,
+ -0.2840537428855896,
+ -0.024666979908943176,
+ -0.25166571140289307,
+ 0.010225236415863037,
+ -0.15608656406402588,
+ -0.26106059551239014,
+ 0.15462082624435425,
+ 0.08307576179504395,
+ -0.019002586603164673,
+ -0.20244255661964417,
+ 0.12892678380012512,
+ -0.07192152738571167,
+ 0.21781134605407715,
+ -0.1945946216583252,
+ -0.07464644312858582,
+ 0.2703883945941925,
+ -0.022808372974395752,
+ 0.08265616744756699,
+ 0.058383047580718994,
+ 0.06396976113319397,
+ -0.29564183950424194,
+ -0.014121875166893005,
+ 0.19136841595172882,
+ -0.30213725566864014,
+ -0.3490641713142395,
+ -0.2852080464363098,
+ 0.06386777758598328,
+ -0.27001774311065674,
+ 0.2620507478713989,
+ 0.1614498645067215,
+ -0.0462343692779541,
+ 0.20193277299404144,
+ -0.26547786593437195,
+ -0.11650097370147705,
+ 0.21112847328186035,
+ -0.2692013382911682,
+ -0.26795971393585205,
+ 0.16486269235610962,
+ -0.27067309617996216,
+ 0.17391636967658997,
+ -0.24083910882472992,
+ -0.008022457361221313,
+ 0.14211922883987427,
+ -0.18127202987670898,
+ 0.1494080126285553,
+ 0.165083110332489,
+ -0.017701715230941772,
+ -0.3030923008918762,
+ -0.19921894371509552,
+ 0.11570477485656738,
+ -0.32515573501586914,
+ -0.28816157579421997,
+ 0.3321772515773773,
+ 0.2909708321094513,
+ -0.32927706837654114,
+ -0.22654877603054047,
+ 0.10057741403579712,
+ 0.1465662717819214,
+ -0.11304500699043274,
+ -0.2810453772544861,
+ 0.18607842922210693,
+ -0.2087962031364441,
+ -0.22175705432891846,
+ -0.18259945511817932,
+ -0.22984489798545837,
+ -0.010431617498397827,
+ 0.26693177223205566,
+ 0.15596114099025726,
+ -0.2725065350532532,
+ 0.21667388081550598,
+ -0.2476206123828888,
+ -0.2688995599746704,
+ 0.02092042565345764,
+ -0.2558530569076538,
+ -0.07425807416439056,
+ -0.20364415645599365,
+ 0.1476415991783142,
+ -0.1734631508588791,
+ 0.08800337463617325,
+ 0.06573110818862915,
+ 0.13860300183296204,
+ -0.003626793622970581,
+ -0.3060099482536316,
+ 0.1838090717792511,
+ -0.07764112949371338,
+ 0.25788673758506775,
+ -0.2771885395050049,
+ 0.3409886956214905,
+ 0.12382449209690094,
+ 0.20451021194458008,
+ -0.266724169254303,
+ 0.21554696559906006,
+ 0.2779198884963989,
+ 0.01180770993232727,
+ 0.24507009983062744,
+ 0.08744990825653076,
+ 0.3775252401828766,
+ -0.1357555091381073,
+ -0.25858181715011597,
+ -0.005803607404232025,
+ -0.16804315149784088,
+ -0.11243647336959839,
+ 0.1512908786535263,
+ -0.07584068179130554,
+ -0.20784229040145874,
+ 0.23972636461257935,
+ -0.17190265655517578,
+ -0.38011792302131653,
+ 0.02007269859313965,
+ -0.14664143323898315,
+ 0.1445155143737793,
+ -0.16653501987457275,
+ -0.22574958205223083,
+ 0.3509131669998169,
+ 0.3989325165748596,
+ 0.08723840117454529,
+ 0.3591806888580322,
+ -0.3511059582233429,
+ -0.29550641775131226,
+ -0.02091662585735321,
+ -0.3755878806114197,
+ 0.05777677893638611,
+ -0.06894026696681976,
+ -0.08545136451721191,
+ -0.2895164489746094,
+ 0.005987420678138733,
+ -0.09806368499994278,
+ 0.2650132179260254,
+ 0.26696938276290894,
+ -0.1029958575963974,
+ 0.12103444337844849,
+ -0.29876798391342163,
+ 0.130304753780365,
+ 0.37718701362609863,
+ -0.15383055806159973,
+ 0.13190752267837524,
+ 0.22654683887958527,
+ 0.07943412661552429,
+ 0.33156824111938477,
+ -0.31097766757011414,
+ -0.03454332426190376,
+ 0.17934934794902802,
+ -0.23678356409072876,
+ -0.2381359338760376,
+ -0.13680043816566467,
+ 0.297765851020813,
+ -0.26632922887802124,
+ 0.1448439359664917,
+ -0.09129184484481812,
+ -0.14170311391353607,
+ 0.051540493965148926,
+ -0.26812753081321716,
+ -0.0010504871606826782,
+ 0.343406081199646,
+ 0.09323398768901825,
+ 0.22457152605056763,
+ -0.2674208879470825,
+ -0.2723158895969391,
+ -0.18223193287849426,
+ 0.29038941860198975,
+ -0.017396152019500732,
+ 0.06718967854976654,
+ -0.26995959877967834,
+ -0.11695955693721771,
+ -0.06714454293251038,
+ 0.39446771144866943,
+ 0.1621468961238861,
+ 0.21808260679244995,
+ -0.09960752725601196,
+ -0.14169186353683472,
+ 0.030283600091934204,
+ -0.2190583348274231,
+ 0.12009042501449585,
+ -0.29427284002304077,
+ 0.2975947856903076,
+ -0.22244560718536377,
+ -0.11469440162181854,
+ 0.1976008415222168,
+ -0.2674182057380676,
+ 0.1483089029788971,
+ 0.24880646169185638,
+ -0.08303099870681763,
+ 0.2728970944881439,
+ 0.2976890206336975,
+ 0.28544890880584717,
+ -0.28067877888679504,
+ 0.11781665682792664,
+ 0.035728082060813904,
+ -0.2040940225124359,
+ -0.2757453918457031,
+ 0.39196252822875977,
+ -0.11954337358474731,
+ 0.2818833589553833,
+ -0.0970660075545311,
+ 0.12679779529571533,
+ -0.29929253458976746,
+ 0.02810126543045044,
+ -0.018758147954940796,
+ 0.07026475667953491,
+ -0.06879740953445435,
+ -0.24021321535110474,
+ -0.19513164460659027,
+ 0.38685083389282227,
+ 0.034404098987579346,
+ -0.28032827377319336,
+ 0.06330254673957825,
+ -0.21360379457473755,
+ -0.06982928514480591,
+ -0.22846612334251404,
+ -0.1407836675643921,
+ 0.150514155626297,
+ -0.1990984082221985,
+ 0.4013931155204773,
+ -0.10336276888847351,
+ 0.12835684418678284,
+ -0.34576186537742615,
+ -0.2773646116256714,
+ 0.21831530332565308,
+ 0.148332417011261,
+ 0.022816598415374756,
+ -0.37214457988739014,
+ 0.33876094222068787,
+ 0.24624747037887573,
+ 0.05082109570503235,
+ 0.036832183599472046,
+ -0.10708826780319214,
+ 0.049700990319252014,
+ -0.2565636932849884,
+ 0.23664700984954834,
+ 0.24757516384124756,
+ -0.2088637799024582,
+ 0.007658779621124268,
+ -0.18839700520038605,
+ -0.10011687874794006,
+ 0.17288899421691895,
+ 0.23420840501785278,
+ -0.11202561110258102,
+ -0.3889797031879425,
+ -0.06789448857307434,
+ 0.24878206849098206,
+ 0.18628281354904175,
+ -0.014906466007232666,
+ -0.31199970841407776,
+ 0.2025775909423828,
+ 0.3607287108898163,
+ 0.1464085578918457,
+ 0.22585882246494293,
+ -0.28160783648490906,
+ -0.011149704456329346,
+ 0.3021796941757202,
+ 0.27767646312713623,
+ -0.16932860016822815,
+ -0.10925322771072388,
+ 0.2175586223602295,
+ -0.29529839754104614,
+ 0.03001347929239273,
+ -0.2889915108680725,
+ -0.10954272747039795,
+ 0.21858014166355133,
+ 0.23051220178604126,
+ -0.0203181654214859,
+ -0.14596150815486908,
+ -0.11134141683578491,
+ -0.22680437564849854,
+ -0.04159599542617798,
+ -0.37369751930236816,
+ 0.2978869676589966,
+ -0.03632622957229614,
+ -0.23139351606369019,
+ -0.2806195616722107,
+ -0.015444450080394745,
+ 0.29664936661720276,
+ 0.1230536699295044,
+ -0.29758548736572266,
+ 0.06518261134624481,
+ -0.2411566972732544,
+ 0.2609437108039856,
+ -0.26067835092544556,
+ 0.2779831886291504,
+ -0.2738308310508728,
+ 0.013833850622177124,
+ -0.001775011420249939,
+ -0.04326342046260834,
+ 0.39515161514282227,
+ -0.3016822338104248,
+ 0.1888734996318817,
+ 0.1738760769367218,
+ 0.33130306005477905,
+ -0.11371368169784546,
+ -0.10819852352142334,
+ -0.04058760404586792,
+ 0.1680862307548523,
+ -0.04228755831718445,
+ 0.05407804250717163,
+ 0.30184364318847656,
+ 0.06474605202674866,
+ 0.02572581171989441,
+ 0.10518157482147217,
+ 0.25206369161605835,
+ 0.028112515807151794,
+ 0.06439155340194702,
+ 0.3372896909713745,
+ -0.3421146869659424,
+ -0.29305776953697205,
+ 0.14433914422988892,
+ -0.38824349641799927,
+ -0.25689247250556946,
+ 0.1131318211555481,
+ 0.06897154450416565,
+ -0.29418855905532837,
+ 0.0852271318435669,
+ 0.056754350662231445,
+ 0.05687712877988815,
+ 0.10270538181066513,
+ 0.28754210472106934,
+ 0.07765361666679382,
+ 0.08834418654441833,
+ 0.11575251817703247,
+ -0.23162919282913208,
+ 0.07838481664657593,
+ 0.08259791135787964,
+ 0.048575591295957565,
+ 0.2780131697654724,
+ -0.09185875207185745,
+ -0.23062676191329956,
+ 0.07908934354782104,
+ 0.34268444776535034,
+ 0.2767704725265503,
+ -0.10596537590026855,
+ -0.15483088791370392,
+ 0.03969487547874451,
+ -0.07999147474765778,
+ -0.021001070737838745,
+ 0.2952585816383362,
+ -0.2655546963214874,
+ -0.215164452791214,
+ 0.0039653778076171875,
+ 0.17121335864067078,
+ -0.04225081205368042,
+ -0.3616425395011902,
+ 0.39423033595085144,
+ -0.26290756464004517,
+ -0.2797495126724243,
+ 0.19656270742416382,
+ -0.1180368959903717,
+ 0.2729094922542572,
+ -0.22002838551998138,
+ -0.38684141635894775,
+ 0.20771610736846924,
+ -0.12081998586654663,
+ -0.02298206090927124,
+ -0.2832541763782501,
+ 0.14703452587127686,
+ 0.04831606149673462,
+ -0.07393090426921844,
+ 0.28482431173324585,
+ -0.1474514901638031,
+ 0.1679634302854538,
+ 0.03589072823524475,
+ -0.06851927191019058,
+ 0.02004176378250122,
+ -0.28513914346694946,
+ 0.06676158308982849,
+ 0.056803882122039795,
+ -0.02108721435070038,
+ 0.03579604625701904,
+ 0.28314918279647827,
+ -0.3035530745983124,
+ 0.12104013562202454,
+ 0.015014111995697021,
+ -0.012404963374137878,
+ 0.06940186023712158,
+ -0.099527508020401,
+ 0.08779716491699219,
+ -0.031015440821647644,
+ 0.12641924619674683,
+ 0.2045270800590515,
+ 0.023824334144592285,
+ 0.06594133377075195,
+ -0.29138219356536865,
+ -0.1185649037361145,
+ -0.018491491675376892,
+ 0.18699389696121216,
+ 0.027117013931274414,
+ -0.34289801120758057,
+ -0.010926783084869385,
+ 0.0002089063636958599,
+ -0.30543917417526245,
+ 0.08709405362606049,
+ 0.20858854055404663,
+ -0.2628331184387207,
+ 0.234147310256958,
+ 0.09057724475860596,
+ -0.12643007934093475,
+ -0.30217206478118896,
+ 0.3359549641609192,
+ 0.12036289274692535,
+ -0.09751812368631363,
+ -0.08673375844955444,
+ -0.03141838312149048,
+ -0.26385387778282166,
+ 0.26906928420066833,
+ -0.004007399082183838,
+ 0.20514482259750366,
+ -0.20549499988555908,
+ -0.3364804983139038,
+ 0.40349623560905457,
+ 0.019942641258239746,
+ -0.00961800292134285,
+ 0.245285302400589,
+ 0.05178017169237137,
+ 0.19385842978954315,
+ 0.20449276268482208,
+ -0.11577004194259644,
+ -0.29605039954185486,
+ 0.2533547282218933,
+ -0.27021324634552,
+ 0.1743462085723877,
+ 0.16904357075691223,
+ 0.3067564070224762,
+ 0.2712373733520508,
+ 0.1818915605545044,
+ 0.007762305438518524,
+ 0.37827566266059875,
+ 0.3945768475532532,
+ 0.35669559240341187,
+ -0.19002626836299896,
+ 0.0806465744972229,
+ 0.09745562076568604,
+ -0.06682905554771423,
+ -0.14266172051429749,
+ 0.28470200300216675,
+ -0.06892341375350952,
+ -0.047607481479644775,
+ -0.2199389934539795,
+ -0.0456615686416626,
+ -0.0094119468703866,
+ -0.03022545576095581,
+ -0.21257826685905457,
+ -0.008589088916778564,
+ 0.25300371646881104,
+ -0.0920075997710228,
+ -0.11568404734134674,
+ -0.0056711286306381226,
+ 0.08867710828781128,
+ 0.3939242959022522,
+ -0.17730207741260529,
+ 0.30005428194999695,
+ -0.28345608711242676,
+ -0.13957414031028748,
+ 0.2083556056022644,
+ -0.219715416431427,
+ 0.284869909286499,
+ -0.38684332370758057,
+ -0.16176514327526093,
+ -0.07495999336242676,
+ -0.06860905885696411,
+ 0.0690087378025055,
+ -0.11353257298469543,
+ 0.17506718635559082,
+ 0.24747449159622192,
+ -0.1462763547897339,
+ -0.28480249643325806,
+ -0.1847173571586609,
+ -0.21663585305213928,
+ 0.26065558195114136,
+ 0.11955113708972931,
+ 0.050350964069366455,
+ -0.27173131704330444,
+ -0.12097889184951782,
+ 0.18047136068344116,
+ -0.2835771441459656,
+ 0.2283656895160675,
+ -0.16382484138011932,
+ 0.02815788984298706,
+ 0.3018382489681244,
+ -0.052326202392578125,
+ -0.2776314616203308,
+ -0.25790250301361084,
+ 0.19930782914161682,
+ 0.15412133932113647,
+ 0.17539390921592712,
+ -0.09209888428449631,
+ 0.005386471748352051,
+ -0.036102086305618286,
+ 0.26405438780784607,
+ 0.07250577211380005,
+ 0.004800617694854736,
+ 0.3559860587120056,
+ 0.2665489614009857,
+ 0.20069706439971924,
+ -0.176395446062088,
+ 0.2942809760570526,
+ -0.2954649329185486,
+ -0.2658745050430298,
+ 0.2927761375904083,
+ -0.07548409700393677,
+ -0.015138208866119385,
+ -0.16438937187194824,
+ -0.1313299536705017,
+ -0.26481372117996216,
+ -0.03515799716114998,
+ -0.06880366802215576,
+ 0.39066874980926514,
+ 0.02778935432434082,
+ 0.07846113294363022,
+ 0.186119943857193,
+ -0.13024169206619263,
+ 0.2728506326675415,
+ 0.16732679307460785,
+ -0.30213096737861633,
+ 0.18672336637973785,
+ 0.21150970458984375,
+ -0.14240097999572754,
+ -0.2004496455192566,
+ 0.11646538972854614,
+ 0.26301339268684387,
+ -0.06007027626037598,
+ 0.39250069856643677,
+ 0.3068998456001282,
+ -0.10095778852701187,
+ -0.16214662790298462,
+ 0.29076021909713745,
+ -0.0003275871276855469,
+ -0.36630484461784363,
+ -0.34150272607803345,
+ -0.10732406377792358,
+ 0.027951985597610474,
+ -0.13243979215621948,
+ -0.35772278904914856,
+ -0.04804147034883499,
+ -0.18512263894081116,
+ 0.28460174798965454,
+ 0.2738698720932007,
+ 0.23441624641418457,
+ 0.18352173268795013,
+ -0.0322403609752655,
+ 0.2736538052558899,
+ 0.3583754599094391,
+ -0.16191411018371582,
+ 0.29848212003707886,
+ 0.21906955540180206,
+ 0.10730290412902832,
+ 0.005618244409561157,
+ -0.1427881121635437,
+ -0.029918193817138672,
+ 0.3013702929019928,
+ -0.28443264961242676,
+ -0.12089169770479202,
+ 0.09246046841144562,
+ -0.32339298725128174,
+ 0.15022040903568268,
+ 0.23007428646087646,
+ -0.00009648501873016357,
+ 0.19763615727424622,
+ -0.16770261526107788,
+ -0.39431875944137573,
+ 0.1578400731086731,
+ -0.18022599816322327,
+ 0.060805365443229675,
+ -0.18227499723434448,
+ 0.26507097482681274,
+ -0.2028583288192749,
+ -0.3027704358100891,
+ -0.2532060742378235,
+ 0.1934604048728943,
+ 0.10793450474739075,
+ 0.04148849844932556,
+ 0.23527389764785767,
+ -0.213449627161026,
+ -0.1016092598438263,
+ 0.19562974572181702,
+ -0.10063061118125916,
+ -0.0038298964500427246,
+ 0.1191633939743042,
+ 0.1960681676864624,
+ 0.29691359400749207,
+ 0.21737706661224365,
+ 0.1958191990852356,
+ 0.27955979108810425,
+ 0.08786091208457947,
+ 0.29851198196411133,
+ -0.2947075664997101,
+ 0.20961618423461914,
+ 0.17099833488464355,
+ -0.00938105583190918,
+ -0.23298095166683197,
+ 0.09363263845443726,
+ -0.17582762241363525,
+ -0.26783692836761475,
+ -0.09332208335399628,
+ 0.29096874594688416,
+ -0.16612505912780762,
+ 0.012249499559402466,
+ 0.21340835094451904,
+ -0.29735511541366577,
+ 0.1622571498155594,
+ -0.3652644753456116,
+ -0.22301605343818665,
+ 0.33489999175071716,
+ 0.13189125061035156,
+ -0.2485094666481018,
+ 0.14779132604599,
+ -0.21052496135234833,
+ 0.1014779806137085,
+ -0.3758190870285034,
+ 0.07931119203567505,
+ 0.23164045810699463,
+ 0.2289281189441681,
+ -0.14324280619621277,
+ -0.3036467432975769,
+ -0.27659669518470764,
+ 0.26244473457336426,
+ 0.2520594298839569,
+ -0.25156882405281067,
+ 0.02345883846282959,
+ -0.10272246599197388,
+ -0.170676589012146,
+ -0.1214953064918518,
+ -0.1360490322113037,
+ 0.26095452904701233,
+ 0.03332924842834473,
+ -0.28701329231262207,
+ -0.08013206720352173,
+ -0.07873795181512833,
+ 0.17682558298110962,
+ 0.18763083219528198,
+ -0.3042824864387512,
+ 0.20103144645690918,
+ 0.24398648738861084,
+ 0.27554428577423096,
+ -0.05606670677661896,
+ -0.3822411000728607,
+ 0.2527133822441101,
+ 0.10111397504806519,
+ 0.29908186197280884,
+ 0.21259713172912598,
+ -0.1776420921087265,
+ 0.07254157960414886,
+ 0.02186065912246704,
+ 0.057811230421066284,
+ -0.10926252603530884,
+ 0.09388452768325806,
+ -0.15506094694137573,
+ -0.37852323055267334,
+ -0.3492530584335327,
+ 0.28066879510879517,
+ 0.05458351969718933,
+ -0.002459608018398285,
+ 0.388855904340744,
+ -0.12554144859313965,
+ 0.159373477101326,
+ 0.21212485432624817,
+ -0.2883151173591614,
+ 0.39136773347854614,
+ 0.14457085728645325,
+ 0.09815390408039093,
+ -0.25133591890335083,
+ -0.083111971616745,
+ 0.15320876240730286,
+ -0.09969735145568848,
+ 0.12169462442398071,
+ 0.3008551001548767,
+ 0.18522661924362183,
+ 0.07447117567062378,
+ 0.14890360832214355,
+ -0.2837349772453308,
+ -0.24775278568267822,
+ 0.08192332088947296,
+ -0.04564809799194336,
+ -0.06598624587059021,
+ -0.09843441098928452,
+ -0.29682886600494385,
+ 0.280748188495636,
+ 0.06564930081367493,
+ 0.2862868905067444,
+ 0.06961452960968018,
+ 0.2758605480194092,
+ 0.2467482089996338,
+ 0.17685776948928833,
+ -0.2588699460029602,
+ 0.1699995994567871,
+ 0.1397128403186798,
+ -0.05511191487312317,
+ -0.04820871353149414,
+ 0.10989058017730713,
+ -0.20351965725421906,
+ 0.22894245386123657,
+ 0.1425090730190277,
+ 0.3088560104370117,
+ 0.15765181183815002,
+ 0.2744847536087036,
+ 0.02484917640686035,
+ 0.1266353726387024,
+ 0.1827489733695984,
+ -0.00928778201341629,
+ -0.24407505989074707,
+ -0.1957329362630844,
+ -0.13127517700195312,
+ -0.08483415842056274,
+ 0.1103840172290802,
+ 0.05357280373573303,
+ 0.1593969464302063,
+ 0.24806010723114014,
+ -0.13923123478889465,
+ -0.20676982402801514,
+ 0.02282211184501648,
+ -0.03230738639831543,
+ 0.1023305356502533,
+ 0.044398367404937744,
+ 0.3011149764060974,
+ 0.22930225729942322,
+ 0.03370599448680878,
+ 0.1620582491159439,
+ 0.24083751440048218,
+ -0.20792624354362488,
+ -0.11604803800582886,
+ 0.09922786056995392,
+ 0.03202313184738159,
+ -0.376991331577301,
+ 0.06977902352809906,
+ -0.04530203342437744,
+ 0.18945974111557007,
+ 0.08282678574323654,
+ 0.09330440312623978,
+ 0.3599024713039398,
+ -0.2977227568626404,
+ 0.29173099994659424,
+ -0.05828848481178284,
+ 0.2806779444217682,
+ 0.024095922708511353,
+ -0.1785447597503662,
+ -0.28192418813705444,
+ 0.30221641063690186,
+ -0.06837659329175949,
+ -0.05864826589822769,
+ 0.2950354814529419,
+ 0.07964876294136047,
+ 0.020649969577789307,
+ 0.24227970838546753,
+ 0.20773571729660034,
+ -0.2601969838142395,
+ 0.09624457359313965,
+ -0.04429689049720764,
+ -0.33125606179237366,
+ 0.05756416916847229,
+ -0.22540003061294556,
+ -0.2584264278411865,
+ 0.08197405934333801,
+ 0.1656404733657837,
+ 0.09946942329406738,
+ -0.10696953535079956,
+ -0.004997998476028442,
+ -0.22147971391677856,
+ -0.16153717041015625,
+ -0.19796979427337646,
+ 0.23500579595565796,
+ -0.2004707306623459,
+ 0.09439495205879211,
+ -0.11050871014595032,
+ 0.26175734400749207,
+ 0.01887112855911255,
+ 0.2905728220939636,
+ -0.23564980924129486,
+ 0.3011420965194702,
+ -0.14323343336582184,
+ 0.2830761671066284,
+ -0.05908188223838806,
+ -0.05257675051689148,
+ -0.27975350618362427,
+ -0.08119639754295349,
+ 0.0813835859298706,
+ 0.19604754447937012,
+ -0.23030531406402588,
+ -0.15474224090576172,
+ 0.10441133379936218,
+ -0.2445424199104309,
+ 0.07777351140975952,
+ -0.3509756028652191,
+ 0.3149617314338684,
+ -0.09616219997406006,
+ 0.003587201237678528,
+ 0.13595426082611084,
+ -0.2540067136287689,
+ -0.37073540687561035,
+ -0.2327999472618103,
+ 0.2806437015533447,
+ 0.12620694935321808,
+ 0.2433387041091919,
+ -0.10661876201629639,
+ -0.10606217384338379,
+ -0.1887401044368744,
+ -0.21630674600601196,
+ -0.290241003036499,
+ 0.3987574577331543,
+ -0.19239121675491333,
+ -0.29655468463897705,
+ 0.25221526622772217,
+ -0.22415584325790405,
+ 0.15646672248840332,
+ -0.13015116751194,
+ 0.12854909896850586,
+ -0.1412728726863861,
+ 0.03462210297584534,
+ -0.29860442876815796,
+ 0.06827399134635925,
+ 0.08719238638877869,
+ 0.12665337324142456,
+ -0.1496773362159729,
+ -0.24352002143859863,
+ -0.11288195848464966,
+ -0.07273069024085999,
+ 0.18287087976932526,
+ 0.0839032530784607,
+ -0.13245424628257751,
+ 0.29712438583374023,
+ -0.29162532091140747,
+ 0.37054362893104553,
+ 0.15375357866287231,
+ 0.26782411336898804,
+ 0.23332226276397705,
+ -0.30204421281814575,
+ -0.07059627771377563,
+ -0.026701688766479492,
+ -0.29917794466018677,
+ 0.255926251411438,
+ -0.06930685043334961,
+ -0.038767606019973755,
+ 0.29844504594802856,
+ 0.2703776955604553,
+ -0.2005157172679901,
+ 0.06987005472183228,
+ -0.20363348722457886,
+ -0.06450812518596649,
+ -0.2108389437198639,
+ -0.27996504306793213,
+ 0.12613770365715027,
+ 0.2893364131450653,
+ 0.25305357575416565,
+ -0.1854761242866516,
+ -0.18145149946212769,
+ -0.27075231075286865,
+ -0.05705469846725464,
+ -0.24307531118392944,
+ -0.2580784559249878,
+ 0.17150932550430298,
+ 0.08844135701656342,
+ 0.1319878101348877,
+ 0.33198875188827515,
+ 0.3897498846054077,
+ 0.11778482794761658,
+ -0.06540711224079132,
+ 0.20844155550003052,
+ 0.04723380506038666,
+ 0.0004624277353286743,
+ -0.1064673364162445,
+ -0.25682738423347473,
+ 0.17211854457855225,
+ -0.26131194829940796,
+ 0.003701508045196533,
+ -0.015083804726600647,
+ -0.3573720455169678,
+ -0.29905498027801514,
+ -0.21910881996154785,
+ -0.09352517127990723,
+ 0.13144861161708832,
+ 0.013256281614303589,
+ -0.06027844548225403,
+ 0.3014279007911682,
+ -0.29973089694976807,
+ -0.0624561607837677,
+ -0.289856493473053,
+ 0.06928744912147522,
+ 0.12877917289733887,
+ -0.2720239758491516,
+ -0.249020516872406,
+ 0.02204596996307373,
+ -0.2776910066604614,
+ 0.03622499108314514,
+ -0.26311200857162476,
+ -0.3903225362300873,
+ 0.2350035458803177,
+ 0.11396217346191406,
+ 0.2278551459312439,
+ 0.180109441280365,
+ 0.03785356879234314,
+ 0.22237956523895264,
+ -0.18218453228473663,
+ 0.18895329535007477,
+ -0.25820988416671753,
+ -0.2734379172325134,
+ 0.25800079107284546,
+ 0.21469464898109436,
+ -0.3028274178504944,
+ 0.13501709699630737,
+ -0.24851274490356445,
+ 0.24947887659072876,
+ 0.06830823421478271,
+ -0.043141886591911316,
+ 0.2817957401275635,
+ 0.3982294499874115,
+ -0.23306353390216827,
+ 0.19398760795593262,
+ 0.12490103393793106,
+ -0.1873558610677719,
+ -0.05422358214855194,
+ 0.2596694529056549,
+ 0.04728078842163086,
+ 0.3687502443790436,
+ 0.015371382236480713,
+ 0.022957339882850647,
+ 0.19936569035053253,
+ 0.20141518115997314,
+ -0.10011684894561768,
+ 0.20553050935268402,
+ 0.12353235483169556,
+ 0.21020708978176117,
+ 0.2408483773469925,
+ -0.17422974109649658,
+ 0.21385425329208374,
+ 0.13010159134864807,
+ 0.016941949725151062,
+ 0.22985708713531494,
+ 0.03798839449882507,
+ 0.07506227493286133,
+ -0.34807902574539185,
+ -0.21270060539245605,
+ -0.12810087203979492,
+ 0.19648195803165436,
+ 0.26660001277923584,
+ -0.04473963379859924,
+ 0.1369185447692871,
+ 0.2915564179420471,
+ 0.04927361011505127,
+ 0.11560302972793579,
+ 0.2938140034675598,
+ -0.1691393107175827,
+ 0.2859475910663605,
+ 0.40418004989624023,
+ 0.23132139444351196,
+ 0.06786102056503296,
+ -0.27810558676719666,
+ -0.2958042621612549,
+ -0.1047872006893158,
+ -0.2711045444011688,
+ 0.1949087679386139,
+ 0.15396559238433838,
+ 0.29188528656959534,
+ -0.1366966962814331,
+ 0.12854424118995667,
+ -0.29410839080810547,
+ 0.16105541586875916,
+ 0.3746054470539093,
+ -0.1946081519126892,
+ -0.18684744834899902,
+ -0.30132734775543213,
+ -0.1384989619255066,
+ 0.2984771728515625,
+ 0.10858207941055298,
+ -0.2010454386472702,
+ -0.18040113151073456,
+ 0.3789788782596588,
+ -0.12157559394836426,
+ -0.19858720898628235,
+ -0.2664196491241455,
+ -0.15799599885940552,
+ -0.08655397593975067,
+ -0.23659434914588928,
+ -0.3946550190448761,
+ -0.21071824431419373,
+ 0.09149789810180664,
+ -0.2294664978981018,
+ 0.053539544343948364,
+ -0.05760050565004349,
+ -0.14527511596679688,
+ -0.07736843824386597,
+ 0.14152157306671143,
+ 0.17138123512268066,
+ -0.199235737323761,
+ 0.31439098715782166,
+ -0.3772348165512085,
+ 0.1182805597782135,
+ 0.07651227712631226,
+ 0.023783087730407715,
+ 0.2840263843536377,
+ 0.08304303884506226,
+ 0.2806382179260254,
+ 0.11747419834136963,
+ 0.25940433144569397,
+ -0.05211474001407623,
+ 0.16894876956939697,
+ -0.29918742179870605,
+ 0.2873624861240387,
+ 0.05804529786109924,
+ -0.2686448097229004,
+ 0.21415436267852783,
+ 0.21458959579467773,
+ 0.23675012588500977,
+ -0.17548257112503052,
+ -0.25338947772979736,
+ -0.18519440293312073,
+ -0.1605389416217804,
+ 0.09507739543914795,
+ -0.2946726381778717,
+ 0.3178788721561432,
+ 0.005423232913017273,
+ -0.035597413778305054,
+ 0.09879541397094727,
+ 0.29008325934410095,
+ 0.25012245774269104,
+ 0.12884396314620972,
+ 0.2777584195137024,
+ -0.025746524333953857,
+ -0.1502813696861267,
+ 0.16613519191741943,
+ -0.1666245311498642,
+ -0.004225894808769226,
+ -0.11078143119812012,
+ -0.1170818954706192,
+ 0.3664702773094177,
+ 0.1653718501329422,
+ 0.19190865755081177,
+ 0.0553680956363678,
+ 0.16148953139781952,
+ -0.21057230234146118,
+ 0.2220882624387741,
+ -0.026146426796913147,
+ -0.3140282928943634,
+ -0.19690990447998047,
+ -0.24862611293792725,
+ -0.14417293667793274,
+ -0.15605872869491577,
+ -0.17162254452705383,
+ -0.04989756643772125,
+ -0.1301988959312439,
+ 0.18828719854354858,
+ -0.005527198314666748,
+ 0.2959926128387451,
+ -0.19372987747192383,
+ 0.11625131964683533,
+ -0.046510785818099976,
+ 0.17275458574295044,
+ -0.26622796058654785,
+ 0.02791004627943039,
+ 0.09133080393075943,
+ -0.24519914388656616,
+ 0.006395965814590454,
+ 0.2883222699165344,
+ -0.18360160291194916,
+ 0.3522271513938904,
+ -0.03713145852088928,
+ -0.047253191471099854,
+ -0.2915077805519104,
+ 0.27413463592529297,
+ 0.1158648282289505,
+ 0.30076929926872253,
+ -0.2820923924446106,
+ 0.10957658290863037,
+ -0.0311724990606308,
+ 0.12590937316417694,
+ -0.16788525879383087,
+ 0.15000513195991516,
+ -0.05868461728096008,
+ 0.030110865831375122,
+ -0.27686142921447754,
+ 0.057486485689878464,
+ 0.28950023651123047,
+ -0.1229046881198883,
+ -0.20975077152252197,
+ -0.04711354523897171,
+ 0.03566378355026245,
+ 0.2748240530490875,
+ 0.014234893023967743,
+ -0.13280566036701202,
+ -0.08729919791221619,
+ -0.3721437454223633,
+ -0.19329243898391724,
+ 0.09860773384571075,
+ 0.10871011018753052,
+ 0.19883783161640167,
+ 0.23945358395576477,
+ -0.1528203785419464,
+ 0.080230712890625,
+ -0.2462908774614334,
+ 0.2470903992652893,
+ -0.18673640489578247,
+ -0.3025689721107483,
+ 0.2621702551841736,
+ -0.03276604413986206,
+ -0.28970062732696533,
+ 0.21925055980682373,
+ -0.31826508045196533,
+ -0.033362358808517456,
+ -0.20648270845413208,
+ 0.07908612489700317,
+ 0.1541239619255066,
+ -0.04360662400722504,
+ 0.002543836832046509,
+ 0.01888108253479004,
+ 0.2614367604255676,
+ -0.2622259855270386,
+ 0.03285093605518341,
+ 0.2661792039871216,
+ 0.06661909818649292,
+ -0.0035203099250793457,
+ -0.18854115903377533,
+ -0.35849493741989136,
+ 0.006038963794708252,
+ -0.15368042886257172,
+ -0.07900723814964294,
+ 0.05829811096191406,
+ -0.2977268099784851,
+ 0.07620042562484741,
+ -0.17632675170898438,
+ 0.07226984947919846,
+ -0.25558388233184814,
+ -0.27849477529525757,
+ 0.2840251922607422,
+ 0.2560143768787384,
+ -0.23518723249435425,
+ -0.04909123480319977,
+ -0.3940564692020416,
+ -0.29567989706993103,
+ -0.25776535272598267,
+ 0.10194087028503418,
+ 0.18508735299110413,
+ -0.012397661805152893,
+ -0.010133877396583557,
+ 0.31805723905563354,
+ 0.2168045938014984,
+ -0.2343563437461853,
+ 0.11896464228630066,
+ 0.01319807767868042,
+ -0.25039857625961304,
+ 0.2925434708595276,
+ -0.17375992238521576,
+ -0.14634594321250916,
+ 0.2733761668205261,
+ 0.2702180743217468,
+ -0.002295464277267456,
+ 0.272116482257843,
+ 0.28446006774902344,
+ 0.2248387336730957,
+ -0.24246740341186523,
+ 0.09726667404174805,
+ -0.027633965015411377,
+ -0.13655534386634827,
+ -0.21295136213302612,
+ 0.11228887736797333,
+ -0.30271315574645996,
+ 0.0035741031169891357,
+ -0.29402613639831543,
+ 0.1347401738166809,
+ 0.2936641573905945,
+ 0.2990281283855438,
+ -0.2048007845878601,
+ -0.009080346673727036,
+ -0.10759638249874115,
+ -0.23013007640838623,
+ -0.1319003701210022,
+ 0.13052278757095337,
+ -0.01845896989107132,
+ -0.29916977882385254,
+ -0.1480659395456314,
+ 0.39853519201278687,
+ 0.26297470927238464,
+ -0.05734923481941223,
+ -0.17794086039066315,
+ 0.24308162927627563,
+ 0.06728702783584595,
+ -0.15134471654891968,
+ 0.2870182991027832,
+ 0.1792914718389511,
+ 0.22791987657546997,
+ 0.014569208025932312,
+ 0.25921452045440674,
+ 0.002592325210571289,
+ -0.14902524650096893,
+ -0.30889496207237244,
+ 0.12276476621627808,
+ -0.059354811906814575,
+ 0.28592032194137573,
+ -0.10676006972789764,
+ -0.3017205595970154,
+ -0.04179660975933075,
+ -0.27110058069229126,
+ -0.020083725452423096,
+ 0.3201187252998352,
+ -0.06968599557876587,
+ -0.007288232445716858,
+ 0.03488636016845703,
+ 0.08426123112440109,
+ -0.2612433433532715,
+ 0.349118709564209,
+ 0.21945759654045105,
+ -0.31797340512275696,
+ 0.17885905504226685,
+ -0.1386614739894867,
+ -0.029402591288089752,
+ -0.14327508211135864,
+ 0.284476101398468,
+ -0.30442219972610474,
+ 0.14087170362472534,
+ -0.30226677656173706,
+ -0.21352055668830872,
+ -0.29453641176223755,
+ -0.1759037971496582,
+ 0.15426748991012573,
+ -0.17182472348213196,
+ 0.01063963770866394,
+ -0.12020060420036316,
+ -0.27514511346817017,
+ 0.170683354139328,
+ -0.1778668612241745,
+ 0.257371723651886,
+ -0.3838508129119873,
+ 0.2725437581539154,
+ -0.18972283601760864,
+ -0.1096542477607727,
+ -0.07947629690170288,
+ -0.18061493337154388,
+ 0.2520701587200165,
+ 0.13456571102142334,
+ -0.007329836487770081,
+ 0.04364827275276184,
+ -0.10850900411605835,
+ 0.06551426649093628,
+ -0.013412296772003174,
+ 0.2256041169166565,
+ -0.13602948188781738,
+ 0.2053413987159729,
+ -0.1680874526500702,
+ 0.323240727186203,
+ 0.2971675395965576,
+ -0.30101656913757324,
+ 0.12677782773971558,
+ 0.19712433218955994,
+ 0.2805050015449524,
+ 0.12230610847473145,
+ 0.13146498799324036,
+ 0.278018057346344,
+ -0.11044313758611679,
+ -0.18660879135131836,
+ -0.28305530548095703,
+ -0.30041640996932983,
+ 0.02844715118408203,
+ 0.013429254293441772,
+ -0.20033150911331177,
+ -0.29684174060821533,
+ -0.16069826483726501,
+ 0.20827074348926544,
+ -0.17880602180957794,
+ 0.3536502718925476,
+ 0.06909267604351044,
+ 0.016644418239593506,
+ -0.17592047154903412,
+ -0.03311975300312042,
+ -0.19604986906051636,
+ 0.04304642975330353,
+ -0.27321702241897583,
+ 0.28989583253860474,
+ -0.0041841864585876465,
+ 0.39286094903945923,
+ -0.25956493616104126,
+ -0.15820693969726562,
+ 0.19764280319213867,
+ -0.291091650724411,
+ -0.2562502324581146,
+ 0.295438677072525,
+ -0.23987948894500732,
+ -0.237329363822937,
+ 0.20279914140701294,
+ -0.2884789705276489,
+ -0.27578747272491455,
+ 0.29445353150367737,
+ 0.2633002996444702,
+ -0.3934774696826935,
+ 0.039962317794561386,
+ 0.02567048743367195,
+ 0.2551819086074829,
+ -0.17035339772701263,
+ -0.015512794256210327,
+ -0.0832778736948967,
+ 0.023437976837158203,
+ -0.04033989459276199,
+ -0.19643108546733856,
+ -0.2777627110481262,
+ -0.2962701916694641,
+ -0.02796781063079834,
+ 0.30117133259773254,
+ 0.13222646713256836,
+ -0.04730409383773804,
+ 0.1636507511138916,
+ 0.27447569370269775,
+ 0.26484256982803345,
+ -0.2955993711948395,
+ -0.08544975519180298,
+ -0.1599186658859253,
+ 0.07980576157569885,
+ 0.29665011167526245,
+ -0.11888948082923889,
+ -0.0013926327228546143,
+ -0.029581069946289062,
+ 0.2747626304626465,
+ 0.00982600450515747,
+ 0.06841188669204712,
+ -0.2665403485298157,
+ -0.3925735056400299,
+ 0.4024809002876282,
+ -0.253703773021698,
+ -0.10960298776626587,
+ -0.29304206371307373,
+ 0.021483778953552246,
+ -0.05882912874221802,
+ 0.21250700950622559,
+ 0.010112747550010681,
+ -0.2756962180137634,
+ 0.2890074849128723,
+ -0.17177817225456238,
+ -0.21433326601982117,
+ 0.15005484223365784,
+ -0.10041576623916626,
+ 0.3996197283267975,
+ -0.037906937301158905,
+ -0.3156474828720093,
+ -0.09052575379610062,
+ 0.1642039567232132,
+ -0.24905997514724731,
+ -0.13639497756958008,
+ 0.2735290825366974,
+ -0.057414352893829346,
+ -0.03369402885437012,
+ -0.30094704031944275,
+ -0.14568063616752625,
+ -0.2812638282775879,
+ 0.24702900648117065,
+ -0.0008202120661735535,
+ 0.27354010939598083,
+ 0.10904067754745483,
+ -0.3872362971305847,
+ 0.3249663710594177,
+ 0.1966441571712494,
+ -0.05352899432182312,
+ 0.24574102461338043,
+ -0.2170131802558899,
+ -0.27067244052886963,
+ 0.3004477024078369,
+ 0.28645241260528564,
+ 0.2762390673160553,
+ -0.289002001285553,
+ 0.3977104425430298,
+ 0.3013063073158264,
+ -0.2926463186740875,
+ 0.06907767057418823,
+ -0.3603578209877014,
+ -0.3869378864765167,
+ 0.02164328098297119,
+ -0.29938387870788574,
+ 0.09840923547744751,
+ 0.0006677508354187012,
+ -0.288313627243042,
+ -0.12852627038955688,
+ 0.29714322090148926,
+ -0.13082674145698547,
+ 0.2193244844675064,
+ 0.18917089700698853,
+ -0.2501453161239624,
+ 0.26524919271469116,
+ 0.13270407915115356,
+ -0.24272218346595764,
+ 0.12190884351730347,
+ 0.2589632272720337,
+ -0.289079487323761,
+ 0.10719326883554459,
+ -0.34227433800697327,
+ 0.17876797914505005,
+ 0.15521560609340668,
+ 0.12891265749931335,
+ 0.23384714126586914,
+ 0.2921257019042969,
+ 0.15916632115840912,
+ 0.04386091232299805,
+ 0.16230809688568115,
+ -0.13039353489875793,
+ 0.06381142139434814,
+ 0.1913434863090515,
+ 0.00012102723121643066,
+ -0.2060360312461853,
+ 0.1430797278881073,
+ -0.2911260426044464,
+ -0.18435348570346832,
+ -0.06471578776836395,
+ -0.12753358483314514,
+ -0.15429913997650146,
+ -0.2745571732521057,
+ 0.16185003519058228,
+ 0.09075793623924255,
+ 0.029927492141723633,
+ -0.23735496401786804,
+ 0.1027148962020874,
+ -0.15963327884674072,
+ -0.1033615916967392,
+ 0.2905445694923401,
+ -0.03083053231239319,
+ 0.13602644205093384,
+ 0.17269152402877808,
+ -0.12647473812103271,
+ -0.2949220836162567,
+ -0.2788854241371155,
+ 0.20981964468955994,
+ -0.27365362644195557,
+ -0.35295569896698,
+ -0.0426822155714035,
+ 0.17563822865486145,
+ 0.17730899155139923,
+ 0.13829278945922852,
+ -0.11090606451034546,
+ -0.019983291625976562,
+ -0.03066188097000122,
+ -0.2658054232597351,
+ -0.2723850905895233,
+ 0.1594868153333664,
+ 0.04520648717880249,
+ 0.28130972385406494,
+ 0.19643518328666687,
+ 0.17031487822532654,
+ -0.3029114603996277,
+ 0.09275203943252563,
+ -0.15800631046295166,
+ 0.15257775783538818,
+ 0.06457257270812988,
+ 0.06808847188949585,
+ -0.21729406714439392,
+ -0.26257824897766113,
+ -0.3904292583465576,
+ -0.09488487243652344,
+ -0.27490758895874023,
+ 0.11308015882968903,
+ -0.07001981139183044,
+ -0.2037128210067749,
+ -0.28357529640197754,
+ -0.3690812587738037,
+ -0.05475461483001709,
+ 0.2516510486602783,
+ 0.1371048241853714,
+ -0.05313275009393692,
+ 0.02888047695159912,
+ 0.12814830243587494,
+ 0.36687707901000977,
+ 0.22441095113754272,
+ 0.1286112666130066,
+ -0.2840149402618408,
+ -0.1344604790210724,
+ -0.35740530490875244,
+ -0.09287111461162567,
+ -0.33799195289611816,
+ -0.3230639100074768,
+ 0.006761670112609863,
+ -0.15731701254844666,
+ 0.04637506604194641,
+ 0.3939734399318695,
+ -0.03109988570213318,
+ -0.14025168120861053,
+ 0.2350406050682068,
+ 0.06533008813858032,
+ 0.12172873318195343,
+ -0.35728031396865845,
+ -0.28497007489204407,
+ -0.1048876941204071,
+ -0.3248216509819031,
+ 0.04025980830192566,
+ -0.07837186008691788,
+ -0.037503957748413086,
+ -0.14198026061058044,
+ -0.1192353367805481,
+ 0.031151890754699707,
+ -0.21528562903404236,
+ -0.27834460139274597,
+ 0.23246484994888306,
+ -0.2769734263420105,
+ -0.2058127224445343,
+ 0.27491432428359985,
+ 0.09046778082847595,
+ 0.2573631703853607,
+ -0.010628938674926758,
+ 0.33145153522491455,
+ -0.03468668460845947,
+ -0.0648779422044754,
+ -0.13560140132904053,
+ -0.02327185869216919,
+ 0.24053260684013367,
+ -0.18792326748371124,
+ -0.2890583276748657,
+ 0.2378906011581421,
+ 0.3144757151603699,
+ -0.2532000243663788,
+ 0.29994842410087585,
+ 0.1940096616744995,
+ 0.009328432381153107,
+ 0.2797868847846985,
+ -0.20778538286685944,
+ -0.05547972768545151,
+ 0.1417827010154724,
+ -0.2343653291463852,
+ 0.29691576957702637,
+ 0.2084016501903534,
+ 0.10525363683700562,
+ 0.2845717668533325,
+ 0.03322914242744446,
+ -0.1994740068912506,
+ 0.07092463970184326,
+ -0.25184258818626404,
+ 0.029816582798957825,
+ -0.08791930973529816,
+ -0.1105116605758667,
+ -0.1681394875049591,
+ 0.1284637600183487,
+ -0.26585128903388977,
+ 0.0001299530267715454,
+ 0.1230362057685852,
+ 0.2991918921470642,
+ 0.11243125796318054,
+ 0.22713708877563477,
+ 0.15911419689655304,
+ 0.05016756057739258,
+ -0.004392936825752258,
+ 0.2440207600593567,
+ 0.24301572144031525,
+ 0.2599800229072571,
+ -0.2305147647857666,
+ -0.20405316352844238,
+ -0.11355747282505035,
+ 0.21792876720428467,
+ -0.11719954758882523,
+ 0.15692563354969025,
+ 0.07922506332397461,
+ -0.3037148118019104,
+ -0.009255476295948029,
+ -0.09114380180835724,
+ -0.29839080572128296,
+ -0.3503969609737396,
+ 0.16174450516700745,
+ -0.07446515560150146,
+ 0.008478447794914246,
+ -0.17851895093917847,
+ -0.2748257517814636,
+ -0.29651397466659546,
+ -0.29467540979385376,
+ 0.36674046516418457,
+ -0.2944023013114929,
+ -0.04057872295379639,
+ 0.12652279436588287,
+ -0.04050993546843529,
+ 0.22878775000572205,
+ -0.25745731592178345,
+ 0.29590708017349243,
+ -0.28344082832336426,
+ -0.30371329188346863,
+ -0.18131890892982483,
+ -0.11185848712921143,
+ 0.24390913546085358,
+ -0.27399498224258423,
+ 0.2821442484855652,
+ -0.28731971979141235,
+ 0.15466275811195374,
+ -0.22487539052963257,
+ 0.3402727246284485,
+ 0.23376011848449707,
+ 0.25687676668167114,
+ 0.01259012520313263,
+ -0.1961139291524887,
+ -0.2705179452896118,
+ 0.12823666632175446,
+ -0.11666947603225708,
+ 0.005707584321498871,
+ -0.36149275302886963,
+ -0.20994526147842407,
+ 0.28432950377464294,
+ -0.25098568201065063,
+ 0.29110708832740784,
+ -0.2943706214427948,
+ -0.0774882584810257,
+ 0.34590280055999756,
+ -0.16221430897712708,
+ -0.16735368967056274,
+ -0.22584295272827148,
+ -0.25165778398513794,
+ -0.17460113763809204,
+ -0.13846972584724426,
+ -0.388671338558197,
+ 0.23730361461639404,
+ -0.36566638946533203,
+ -0.0565243661403656,
+ -0.17463506758213043,
+ 0.05400878190994263,
+ 0.301432728767395,
+ -0.27608722448349,
+ 0.11026787757873535,
+ -0.09948085248470306,
+ -0.09227918833494186,
+ 0.26950597763061523,
+ -0.27217817306518555,
+ 0.39125731587409973,
+ -0.02483983337879181,
+ 0.36771345138549805,
+ 0.2726627290248871,
+ 0.16879802942276,
+ -0.17972847819328308,
+ 0.25318726897239685,
+ -0.1530817151069641,
+ 0.13881325721740723,
+ 0.05653351545333862,
+ 0.20410892367362976,
+ -0.20555025339126587,
+ 0.27227169275283813,
+ -0.09642189741134644,
+ 0.20474693179130554,
+ -0.1292407512664795,
+ -0.037072211503982544,
+ -0.08004490286111832,
+ 0.09553495794534683,
+ -0.02850457653403282,
+ -0.11086660623550415,
+ 0.1386675238609314,
+ 0.2534404993057251,
+ -0.007902726531028748,
+ 0.1476125717163086,
+ -0.2524615526199341,
+ -0.054006099700927734,
+ 0.3360360860824585,
+ -0.25657734274864197,
+ 0.04513520002365112,
+ 0.3909132778644562,
+ 0.11662083864212036,
+ 0.28069576621055603,
+ -0.21355605125427246,
+ 0.35263070464134216,
+ -0.0426362007856369,
+ -0.23452547192573547,
+ -0.09747855365276337,
+ -0.1798298954963684,
+ 0.16809388995170593,
+ 0.02714630961418152,
+ -0.07809242606163025,
+ -0.15794189274311066,
+ -0.15589751303195953,
+ -0.30470478534698486,
+ 0.16588695347309113,
+ -0.19027221202850342,
+ 0.06526044011116028,
+ 0.22681081295013428,
+ 0.28699609637260437,
+ 0.1290876269340515,
+ 0.15274950861930847,
+ 0.025694668292999268,
+ -0.30361083149909973,
+ -0.14462268352508545,
+ 0.3335472345352173,
+ 0.3994808793067932,
+ -0.17459042370319366,
+ 0.2767367660999298,
+ 0.36860191822052,
+ 0.1267312914133072,
+ 0.07440260052680969,
+ -0.26075121760368347,
+ -0.05549299716949463,
+ -0.30459168553352356,
+ -0.21663294732570648,
+ 0.18352490663528442,
+ -0.21043440699577332,
+ 0.1853780746459961,
+ -0.23698052763938904,
+ 0.16617685556411743,
+ 0.2025933861732483,
+ -0.07539001107215881,
+ -0.15817074477672577,
+ 0.36575549840927124,
+ -0.22799932956695557,
+ -0.28915536403656006,
+ -0.24681974947452545,
+ 0.1869557946920395,
+ 0.1518060266971588,
+ -0.18506121635437012,
+ 0.2153642177581787,
+ 0.16454653441905975,
+ 0.03617709130048752,
+ 0.2754082679748535,
+ -0.03786993771791458,
+ -0.2858978509902954,
+ -0.014390408992767334,
+ -0.28282231092453003,
+ -0.30247074365615845,
+ 0.012302219867706299,
+ -0.3027512729167938,
+ -0.24193991720676422,
+ 0.12373322248458862,
+ 0.20939218997955322,
+ -0.24304622411727905,
+ 0.02746725082397461,
+ -0.20908743143081665,
+ 0.2206602692604065,
+ 0.1904476433992386,
+ -0.09400784969329834,
+ -0.11965715885162354,
+ 0.17629043757915497,
+ -0.18359732627868652,
+ 0.2426224648952484,
+ 0.1885136067867279,
+ 0.31273630261421204,
+ -0.07804270833730698,
+ 0.3881722092628479,
+ -0.19494256377220154,
+ 0.20874324440956116,
+ -0.003093898296356201,
+ -0.24429118633270264,
+ -0.1232738047838211,
+ 0.30093100666999817,
+ -0.2686825394630432,
+ 0.35747236013412476,
+ 0.10722821950912476,
+ -0.19672995805740356,
+ -0.22473829984664917,
+ -0.21124857664108276,
+ -0.2001267671585083,
+ 0.3848646581172943,
+ -0.09419149905443192,
+ -0.039961159229278564,
+ -0.25434812903404236,
+ -0.10498318076133728,
+ 0.15177133679389954,
+ 0.29957377910614014,
+ -0.16893571615219116,
+ 0.19744938611984253,
+ -0.12756404280662537,
+ 0.20803067088127136,
+ 0.4003223478794098,
+ -0.20659780502319336,
+ -0.19898641109466553,
+ 0.09815770387649536,
+ 0.1993046998977661,
+ -0.26046305894851685,
+ -0.009149551391601562,
+ -0.2997070550918579,
+ -0.3017162084579468,
+ -0.1368839144706726,
+ -0.18390288949012756,
+ -0.28202641010284424,
+ 0.07856452465057373,
+ -0.05948224663734436,
+ 0.11481785774230957,
+ -0.1269720196723938,
+ 0.27736735343933105,
+ 0.2159128189086914,
+ 0.05949974060058594,
+ -0.3162783980369568,
+ -0.20224398374557495,
+ 0.2978929877281189,
+ -0.3313945531845093,
+ 0.18743890523910522,
+ -0.15275830030441284,
+ 0.08189310133457184,
+ 0.19532358646392822,
+ -0.12521475553512573,
+ 0.29940274357795715,
+ 0.042713671922683716,
+ 0.10096369683742523,
+ -0.03366374969482422,
+ 0.26756981015205383,
+ 0.1231730729341507,
+ -0.14916425943374634,
+ 0.21355381608009338,
+ -0.025195524096488953,
+ 0.19187593460083008,
+ 0.19307324290275574,
+ -0.2810223698616028,
+ -0.22596988081932068,
+ -0.3029753565788269,
+ 0.15374058485031128,
+ 0.2619630694389343,
+ -0.2835840582847595,
+ 0.34066706895828247,
+ -0.052369579672813416,
+ -0.05693429708480835,
+ -0.23509834706783295,
+ 0.05459246039390564,
+ -0.27302616834640503,
+ 0.23611313104629517,
+ 0.14899522066116333,
+ 0.0073820799589157104,
+ 0.25632792711257935,
+ 0.17989248037338257,
+ -0.30141258239746094,
+ 0.10899949073791504,
+ 0.18679386377334595,
+ 0.04817235469818115,
+ 0.07005000114440918,
+ 0.031614214181900024,
+ -0.37648969888687134,
+ -0.2980424761772156,
+ 0.2868872880935669,
+ -0.10398751497268677,
+ -0.0073785483837127686,
+ -0.09334032237529755,
+ 0.3028503656387329,
+ 0.18659456074237823,
+ 0.036254703998565674,
+ -0.03328457474708557,
+ 0.2817131280899048,
+ 0.048461414873600006,
+ -0.02489057183265686,
+ -0.1861952543258667,
+ 0.15434479713439941,
+ 0.04159337282180786,
+ 0.17435762286186218,
+ 0.14441335201263428,
+ 0.15365153551101685,
+ 0.034177541732788086,
+ -0.1934903860092163,
+ 0.17927540838718414,
+ -0.05215311795473099,
+ -0.2398403286933899,
+ -0.2967320680618286,
+ 0.2772725820541382,
+ -0.24827039241790771,
+ -0.21990129351615906,
+ 0.29669302701950073,
+ -0.32625818252563477,
+ 0.3708904981613159,
+ -0.28678882122039795,
+ -0.2608308792114258,
+ 0.044491082429885864,
+ 0.18699851632118225,
+ -0.23819640278816223,
+ -0.0009014308452606201,
+ -0.24373608827590942,
+ 0.3772526979446411,
+ 0.027871862053871155,
+ 0.029477238655090332,
+ -0.20019206404685974,
+ -0.3153155744075775,
+ -0.2367381453514099,
+ 0.013807326555252075,
+ -0.1503414809703827,
+ 0.018323630094528198,
+ -0.26327601075172424,
+ -0.08495434373617172,
+ 0.04577481746673584,
+ -0.07607820630073547,
+ -0.25588512420654297,
+ -0.32383185625076294,
+ -0.15508392453193665,
+ -0.3096720576286316,
+ 0.0006171464920043945,
+ 0.2961553931236267,
+ 0.20685338973999023,
+ -0.13059552013874054,
+ 0.09867333620786667,
+ -0.01612822711467743,
+ -0.2423233985900879,
+ 0.12398423254489899,
+ 0.2511058449745178,
+ -0.1896933913230896,
+ -0.15719610452651978,
+ -0.3026246428489685,
+ -0.3886334300041199,
+ 0.20898962020874023,
+ -0.14557605981826782,
+ 0.021449267864227295,
+ -0.21892626583576202,
+ -0.33828282356262207,
+ 0.26608458161354065,
+ -0.1960429847240448,
+ 0.3573663830757141,
+ -0.2632778584957123,
+ -0.2242966890335083,
+ 0.401225209236145,
+ -0.38692358136177063,
+ -0.1390645056962967,
+ -0.3767290413379669,
+ 0.09912754595279694,
+ 0.05354778468608856,
+ 0.30053648352622986,
+ 0.29901689291000366,
+ -0.06442606449127197,
+ -0.08515091240406036,
+ -0.025983095169067383,
+ 0.31228384375572205,
+ 0.11761337518692017,
+ 0.16079194843769073,
+ 0.1985987424850464,
+ 0.22484338283538818,
+ -0.0505908727645874,
+ 0.30983060598373413,
+ -0.26218777894973755,
+ -0.11482858657836914,
+ 0.2997437119483948,
+ 0.10851340740919113,
+ -0.24344706535339355,
+ -0.15897250175476074,
+ 0.08265824615955353,
+ 0.09811422228813171,
+ 0.1386350691318512,
+ -0.1526329219341278,
+ -0.06404176354408264,
+ -0.019956037402153015,
+ -0.2531333565711975,
+ 0.21963441371917725,
+ -0.2496957778930664,
+ -0.1069752499461174,
+ -0.013848811388015747,
+ -0.3030087947845459,
+ 0.20644038915634155,
+ 0.0006203949451446533,
+ -0.30326318740844727,
+ -0.030803203582763672,
+ 0.08840090036392212
+ ],
+ "y": [
+ 0.10198676586151123,
+ -0.2162356674671173,
+ 0.12279263883829117,
+ 0.05339445173740387,
+ -0.266259104013443,
+ 0.30023109912872314,
+ -0.2413126528263092,
+ 0.12103334069252014,
+ 0.18127445876598358,
+ 0.21439692378044128,
+ -0.2111208438873291,
+ -0.2667381167411804,
+ 0.06191970407962799,
+ 0.3019772171974182,
+ -0.04860111325979233,
+ -0.14061498641967773,
+ -0.03585951030254364,
+ -0.05620935559272766,
+ -0.27058520913124084,
+ 0.28634411096572876,
+ -0.15748059749603271,
+ 0.2490963339805603,
+ 0.24416407942771912,
+ 0.016637444496154785,
+ 0.06771060824394226,
+ 0.06168326735496521,
+ -0.172787606716156,
+ -0.11167307198047638,
+ 0.24267233908176422,
+ -0.29241687059402466,
+ 0.20638732612133026,
+ 0.24512642621994019,
+ -0.2646048069000244,
+ -0.14264929294586182,
+ -0.06906998157501221,
+ 0.10730275511741638,
+ -0.2697276175022125,
+ -0.026224642992019653,
+ -0.08001211285591125,
+ -0.06917911767959595,
+ 0.24499380588531494,
+ -0.18177419900894165,
+ -0.1900111436843872,
+ 0.11363232135772705,
+ -0.18856072425842285,
+ 0.1732553243637085,
+ 0.22212055325508118,
+ -0.24758584797382355,
+ 0.2305603325366974,
+ 0.10873163491487503,
+ -0.12038677930831909,
+ 0.299389511346817,
+ 0.09945224225521088,
+ 0.026259005069732666,
+ -0.0182349756360054,
+ 0.3048437833786011,
+ -0.02397957444190979,
+ 0.18866455554962158,
+ 0.013082131743431091,
+ 0.24045249819755554,
+ -0.09669935703277588,
+ -0.204234778881073,
+ -0.03510218858718872,
+ -0.25249242782592773,
+ -0.26008570194244385,
+ -0.20644795894622803,
+ 0.23207250237464905,
+ -0.2997363805770874,
+ 0.1852990984916687,
+ 0.2952688932418823,
+ 0.257172167301178,
+ -0.24198521673679352,
+ 0.2211235761642456,
+ 0.3012078106403351,
+ 0.09699185192584991,
+ 0.23046720027923584,
+ -0.27381619811058044,
+ 0.16411983966827393,
+ -0.17629389464855194,
+ -0.2641320824623108,
+ 0.003988146781921387,
+ 0.150091290473938,
+ 0.15596479177474976,
+ 0.07541079074144363,
+ 0.05009624361991882,
+ -0.03226029872894287,
+ 0.000770270824432373,
+ -0.15331998467445374,
+ -0.19595566391944885,
+ -0.0814899280667305,
+ -0.10779888182878494,
+ -0.12908008694648743,
+ -0.0355297327041626,
+ -0.16151458024978638,
+ -0.2383115440607071,
+ -0.0503002405166626,
+ -0.26407504081726074,
+ 0.17111021280288696,
+ 0.14340811967849731,
+ 0.1016053855419159,
+ -0.29593127965927124,
+ 0.276839017868042,
+ -0.1863037347793579,
+ -0.2740163207054138,
+ -0.2076421082019806,
+ -0.292395681142807,
+ 0.12133520841598511,
+ -0.27976173162460327,
+ 0.11242830753326416,
+ 0.2902342677116394,
+ -0.2774375081062317,
+ -0.014797866344451904,
+ 0.13329829275608063,
+ 0.22835302352905273,
+ -0.1778331995010376,
+ 0.05484229326248169,
+ 0.2665867209434509,
+ 0.043606579303741455,
+ -0.03761162608861923,
+ -0.09182274341583252,
+ -0.1101723313331604,
+ 0.2725621461868286,
+ 0.3074497580528259,
+ 0.259287029504776,
+ -0.10158596187829971,
+ 0.18739384412765503,
+ 0.02208930253982544,
+ -0.05293753743171692,
+ 0.02041482925415039,
+ -0.14264504611492157,
+ 0.06786580383777618,
+ 0.17883539199829102,
+ 0.23239225149154663,
+ -0.14877276122570038,
+ 0.2892681062221527,
+ 0.05496496707201004,
+ 0.12011599540710449,
+ -0.19838514924049377,
+ -0.07916292548179626,
+ 0.01014813780784607,
+ 0.24139273166656494,
+ 0.15689417719841003,
+ 0.15847492218017578,
+ 0.16355527937412262,
+ 0.14454734325408936,
+ -0.12299688160419464,
+ -0.18988829851150513,
+ -0.1808234453201294,
+ 0.23221421241760254,
+ -0.15059590339660645,
+ 0.25456786155700684,
+ 0.24767142534255981,
+ 0.15054574608802795,
+ 0.19897620379924774,
+ -0.2445765733718872,
+ -0.2167566865682602,
+ 0.186050683259964,
+ -0.2976897954940796,
+ 0.1253143846988678,
+ -0.20242133736610413,
+ -0.22765934467315674,
+ 0.024624377489089966,
+ -0.014520540833473206,
+ -0.11090341955423355,
+ -0.28595536947250366,
+ -0.07311177253723145,
+ 0.12811265885829926,
+ -0.2866809666156769,
+ -0.12688612937927246,
+ 0.009896785020828247,
+ -0.06217634677886963,
+ 0.2716609835624695,
+ 0.24232342839241028,
+ 0.29216140508651733,
+ -0.24917465448379517,
+ -0.2745523452758789,
+ -0.27219921350479126,
+ 0.07615712285041809,
+ -0.16257458925247192,
+ -0.1407633274793625,
+ -0.25980645418167114,
+ 0.0909191370010376,
+ 0.3034968376159668,
+ -0.03570161759853363,
+ 0.2904699444770813,
+ -0.26186561584472656,
+ -0.1455620527267456,
+ -0.2939281463623047,
+ 0.17668017745018005,
+ -0.021339990198612213,
+ 0.2656726837158203,
+ -0.29377007484436035,
+ 0.15964321792125702,
+ -0.09908807277679443,
+ -0.18316364288330078,
+ 0.025173015892505646,
+ -0.24800986051559448,
+ -0.09421521425247192,
+ 0.19262906908988953,
+ 0.28078263998031616,
+ -0.2423182725906372,
+ 0.27176767587661743,
+ 0.2292158603668213,
+ 0.16048511862754822,
+ 0.18822920322418213,
+ 0.07856741547584534,
+ 0.04629737138748169,
+ -0.12007724493741989,
+ 0.2953184247016907,
+ -0.21704131364822388,
+ 0.2079831063747406,
+ -0.08073108643293381,
+ -0.017506539821624756,
+ 0.2764589786529541,
+ -0.030797123908996582,
+ 0.04966564476490021,
+ -0.16646379232406616,
+ 0.11092770099639893,
+ -0.13325148820877075,
+ 0.08046932518482208,
+ -0.2604161500930786,
+ -0.015661388635635376,
+ 0.2645055651664734,
+ -0.21160531044006348,
+ 0.273388534784317,
+ 0.05645914375782013,
+ 0.036815136671066284,
+ 0.22069573402404785,
+ 0.09977871179580688,
+ 0.03800489008426666,
+ 0.1779785305261612,
+ 0.10259044170379639,
+ -0.2041136771440506,
+ -0.06359891593456268,
+ -0.2973911166191101,
+ -0.20358729362487793,
+ -0.26326847076416016,
+ 0.30970942974090576,
+ -0.1918286681175232,
+ -0.10377117991447449,
+ -0.0013787448406219482,
+ 0.09355506300926208,
+ 0.057914070785045624,
+ -0.08753509819507599,
+ 0.2073621153831482,
+ -0.23210102319717407,
+ 0.22518283128738403,
+ 0.20182764530181885,
+ -0.21007061004638672,
+ -0.22405320405960083,
+ -0.14045383036136627,
+ -0.208637535572052,
+ 0.30749449133872986,
+ 0.2336377501487732,
+ 0.060947224497795105,
+ -0.24135127663612366,
+ -0.08107316493988037,
+ 0.1299377679824829,
+ -0.2890438139438629,
+ 0.13863444328308105,
+ -0.2569936513900757,
+ 0.22556734085083008,
+ -0.08937525749206543,
+ 0.30616939067840576,
+ 0.2569969892501831,
+ -0.06701642274856567,
+ 0.28611913323402405,
+ -0.07179242372512817,
+ -0.1468031406402588,
+ 0.22876191139221191,
+ 0.24147719144821167,
+ 0.04608839750289917,
+ -0.2930235266685486,
+ -0.26913732290267944,
+ 0.263788640499115,
+ -0.23832035064697266,
+ 0.0688176304101944,
+ 0.10824057459831238,
+ 0.016781121492385864,
+ 0.019134521484375,
+ -0.13619965314865112,
+ 0.08231839537620544,
+ -0.24512971937656403,
+ -0.17235641181468964,
+ 0.1632843017578125,
+ -0.045216381549835205,
+ -0.084247887134552,
+ -0.17573490738868713,
+ 0.0242747962474823,
+ -0.06769606471061707,
+ 0.22191222012043,
+ -0.28648239374160767,
+ -0.29782021045684814,
+ 0.066391721367836,
+ 0.29454612731933594,
+ -0.2004043459892273,
+ 0.004769623279571533,
+ 0.24163462221622467,
+ 0.1689683347940445,
+ 0.17347759008407593,
+ -0.10804179310798645,
+ 0.08456212282180786,
+ -0.031035244464874268,
+ -0.23212766647338867,
+ -0.032231152057647705,
+ 0.2798043489456177,
+ 0.3083978295326233,
+ 0.09986349940299988,
+ 0.22067463397979736,
+ -0.27109426259994507,
+ -0.21041619777679443,
+ 0.15085172653198242,
+ -0.2606307864189148,
+ -0.19173693656921387,
+ -0.1016007661819458,
+ -0.013569273054599762,
+ -0.13586726784706116,
+ 0.16732043027877808,
+ -0.17687132954597473,
+ 0.3062285780906677,
+ -0.01440247893333435,
+ 0.011160612106323242,
+ -0.2915070056915283,
+ -0.08826187252998352,
+ -0.0648173987865448,
+ -0.28146737813949585,
+ -0.14550724625587463,
+ 0.19343096017837524,
+ -0.1974382996559143,
+ -0.27603596448898315,
+ 0.21114113926887512,
+ -0.21774420142173767,
+ 0.13103431463241577,
+ 0.2664317488670349,
+ -0.1764448881149292,
+ 0.022467195987701416,
+ -0.11194749176502228,
+ 0.2937226891517639,
+ -0.08135613799095154,
+ -0.2931743264198303,
+ 0.23280268907546997,
+ -0.2282814383506775,
+ 0.26644599437713623,
+ -0.003205537796020508,
+ -0.04129636287689209,
+ 0.11482051014900208,
+ 0.003531038761138916,
+ -0.15430915355682373,
+ 0.15278470516204834,
+ -0.1020175889134407,
+ -0.285550594329834,
+ 0.19347454607486725,
+ -0.2304140031337738,
+ -0.2150428295135498,
+ -0.14307239651679993,
+ -0.14839208126068115,
+ -0.14118263125419617,
+ 0.16426938772201538,
+ 0.15814799070358276,
+ -0.25767627358436584,
+ 0.10586124658584595,
+ 0.17629916965961456,
+ -0.23336610198020935,
+ 0.1858653575181961,
+ 0.17893946170806885,
+ 0.20268183946609497,
+ 0.17397242784500122,
+ 0.11147330701351166,
+ 0.029032647609710693,
+ 0.20430392026901245,
+ 0.19325554370880127,
+ -0.09587201476097107,
+ 0.133103609085083,
+ 0.07094492763280869,
+ 0.1493036150932312,
+ -0.28106868267059326,
+ -0.22432637214660645,
+ 0.22659125924110413,
+ 0.1039397120475769,
+ 0.13199567794799805,
+ 0.09471598267555237,
+ -0.16558203101158142,
+ 0.18161189556121826,
+ 0.1990211009979248,
+ -0.009952373802661896,
+ 0.06475397199392319,
+ -0.1351281702518463,
+ -0.2893702983856201,
+ -0.2772939205169678,
+ -0.14134037494659424,
+ 0.09954055398702621,
+ 0.17438891530036926,
+ 0.1904732584953308,
+ -0.24797026813030243,
+ -0.041248619556427,
+ -0.10753896832466125,
+ 0.0789872407913208,
+ 0.27977001667022705,
+ -0.04132315516471863,
+ 0.29521483182907104,
+ -0.20926018059253693,
+ 0.0520038902759552,
+ 0.15232625603675842,
+ 0.023661255836486816,
+ -0.18490439653396606,
+ -0.26123809814453125,
+ -0.21780234575271606,
+ -0.14123079180717468,
+ -0.16125716269016266,
+ -0.23947158455848694,
+ -0.2691860795021057,
+ -0.2355198860168457,
+ 0.2542181611061096,
+ -0.10712605714797974,
+ 0.2612833082675934,
+ 0.23752203583717346,
+ -0.06018616259098053,
+ 0.2842734456062317,
+ 0.29055798053741455,
+ 0.1572170853614807,
+ -0.2979515790939331,
+ -0.04726439714431763,
+ 0.0791158676147461,
+ 0.28282296657562256,
+ -0.19670385122299194,
+ 0.24683493375778198,
+ 0.29532554745674133,
+ -0.09264703840017319,
+ 0.10218369960784912,
+ -0.2624817490577698,
+ 0.1900622844696045,
+ -0.2688184380531311,
+ -0.13907179236412048,
+ -0.13452894985675812,
+ 0.026818037033081055,
+ 0.2922865152359009,
+ 0.21702039241790771,
+ 0.06064775586128235,
+ 0.3067026138305664,
+ 0.2390560805797577,
+ -0.07743970304727554,
+ 0.14343610405921936,
+ -0.07737791538238525,
+ -0.07219943404197693,
+ -0.10623359680175781,
+ -0.2833298444747925,
+ 0.254259318113327,
+ 0.20250222086906433,
+ -0.16846418380737305,
+ 0.10989147424697876,
+ 0.1259584277868271,
+ -0.2219671607017517,
+ -0.05546855926513672,
+ 0.04672127962112427,
+ 0.2711724638938904,
+ 0.25023403763771057,
+ 0.010526850819587708,
+ -0.06785817444324493,
+ 0.010291963815689087,
+ 0.0443485863506794,
+ 0.11644719541072845,
+ 0.05059032887220383,
+ 0.21158021688461304,
+ 0.016258880496025085,
+ -0.20671653747558594,
+ 0.2395102083683014,
+ 0.12939023971557617,
+ -0.03481698036193848,
+ 0.14733049273490906,
+ -0.2339848130941391,
+ 0.07180905342102051,
+ 0.03176271915435791,
+ -0.2399539351463318,
+ 0.2210884690284729,
+ 0.1324140429496765,
+ 0.2739182412624359,
+ -0.06145055592060089,
+ -0.15760140120983124,
+ -0.1871553510427475,
+ 0.11249764263629913,
+ -0.017480939626693726,
+ -0.3003012239933014,
+ 0.008584033697843552,
+ 0.11473319679498672,
+ -0.2560594081878662,
+ -0.2082109898328781,
+ -0.20171430706977844,
+ -0.19554948806762695,
+ -0.01471450924873352,
+ 0.15140429139137268,
+ -0.1354454904794693,
+ 0.1859126091003418,
+ -0.0400657057762146,
+ 0.06283557415008545,
+ 0.22438395023345947,
+ 0.22376269102096558,
+ 0.16410458087921143,
+ -0.16654238104820251,
+ -0.27836930751800537,
+ 0.25986891984939575,
+ 0.26968225836753845,
+ -0.2975962162017822,
+ -0.15369629859924316,
+ 0.09457319974899292,
+ -0.17386801540851593,
+ 0.26373690366744995,
+ -0.07599711418151855,
+ 0.1457034945487976,
+ 0.30369865894317627,
+ -0.0020062923431396484,
+ 0.2939526438713074,
+ 0.25943583250045776,
+ 0.28388261795043945,
+ -0.278830885887146,
+ 0.2259005308151245,
+ -0.06954950094223022,
+ 0.0577004998922348,
+ 0.11616235971450806,
+ 0.19749757647514343,
+ -0.2205868363380432,
+ 0.10834136605262756,
+ -0.22184263169765472,
+ 0.20143315196037292,
+ -0.01072288304567337,
+ -0.1113014817237854,
+ 0.08887015283107758,
+ 0.10401511192321777,
+ -0.14893227815628052,
+ 0.20863068103790283,
+ 0.1215609610080719,
+ 0.13385045528411865,
+ -0.1637921929359436,
+ -0.1857995092868805,
+ 0.2845020890235901,
+ -0.00749284029006958,
+ 0.2258240282535553,
+ 0.0590437650680542,
+ -0.29307010769844055,
+ 0.2489272952079773,
+ 0.21554192900657654,
+ 0.2670018672943115,
+ 0.30573374032974243,
+ -0.13361334800720215,
+ 0.06745222210884094,
+ -0.16710321605205536,
+ 0.25289228558540344,
+ 0.29674032330513,
+ -0.2675807476043701,
+ 0.21987080574035645,
+ -0.2873942255973816,
+ 0.14010894298553467,
+ 0.3097859025001526,
+ -0.1985597014427185,
+ -0.2735401391983032,
+ -0.02741175889968872,
+ 0.22625330090522766,
+ -0.13424932956695557,
+ 0.009114310145378113,
+ 0.20409265160560608,
+ -0.25658249855041504,
+ 0.16005520522594452,
+ -0.29497820138931274,
+ -0.09439802169799805,
+ 0.0730401873588562,
+ 0.0013248324394226074,
+ -0.2556602358818054,
+ 0.1853047013282776,
+ 0.2839895486831665,
+ 0.025112703442573547,
+ -0.1844167709350586,
+ -0.23956996202468872,
+ 0.0557912215590477,
+ 0.20313477516174316,
+ 0.10288377106189728,
+ 0.24648743867874146,
+ 0.13473087549209595,
+ -0.21353495121002197,
+ 0.29794323444366455,
+ 0.11907508224248886,
+ -0.21305036544799805,
+ -0.20846831798553467,
+ 0.30484575033187866,
+ -0.28584396839141846,
+ 0.00921088457107544,
+ -0.296383261680603,
+ 0.22310489416122437,
+ 0.21527808904647827,
+ 0.23081690073013306,
+ -0.12438717484474182,
+ -0.22701847553253174,
+ -0.0328088104724884,
+ -0.03522936999797821,
+ -0.08306509256362915,
+ 0.1516738384962082,
+ -0.21077802777290344,
+ -0.2127569615840912,
+ 0.3035716414451599,
+ 0.09271544218063354,
+ -0.08707958459854126,
+ 0.2729511857032776,
+ -0.2923855483531952,
+ -0.256680965423584,
+ -0.2733955383300781,
+ -0.04664096236228943,
+ -0.245252788066864,
+ -0.021238654851913452,
+ 0.003209352493286133,
+ -0.03260043263435364,
+ -0.2571685314178467,
+ -0.21985048055648804,
+ 0.15788081288337708,
+ -0.06639260053634644,
+ 0.12014000862836838,
+ 0.011177018284797668,
+ 0.14633230865001678,
+ 0.20389258861541748,
+ 0.2563988268375397,
+ -0.14912262558937073,
+ -0.2382829189300537,
+ -0.07259982824325562,
+ -0.16163009405136108,
+ 0.047791093587875366,
+ -0.0025091171264648438,
+ -0.22678467631340027,
+ -0.17015856504440308,
+ 0.22074389457702637,
+ -0.011552251875400543,
+ -0.10623343288898468,
+ 0.20028817653656006,
+ 0.009092986583709717,
+ -0.23263230919837952,
+ -0.04570680856704712,
+ -0.17790067195892334,
+ -0.25600868463516235,
+ 0.2718668282032013,
+ -0.20491695404052734,
+ -0.038583844900131226,
+ -0.1552983522415161,
+ -0.2759767770767212,
+ -0.10883688926696777,
+ -0.285047709941864,
+ -0.11571189761161804,
+ 0.25843989849090576,
+ -0.23623532056808472,
+ 0.06492778658866882,
+ 0.01380947045981884,
+ -0.10676491260528564,
+ 0.12881547212600708,
+ -0.2800937592983246,
+ -0.1355060338973999,
+ 0.005179218947887421,
+ 0.12006960809230804,
+ 0.2535567283630371,
+ 0.22611021995544434,
+ 0.26158708333969116,
+ 0.10426533967256546,
+ 0.11658656597137451,
+ 0.0038304924964904785,
+ -0.08095806837081909,
+ 0.2725202441215515,
+ -0.2615508735179901,
+ 0.2803976535797119,
+ -0.16903400421142578,
+ 0.09309768676757812,
+ 0.14746281504631042,
+ 0.2666991055011749,
+ -0.2907254695892334,
+ -0.11616182327270508,
+ 0.28017938137054443,
+ -0.15300026535987854,
+ -0.15362662076950073,
+ -0.139506533741951,
+ 0.08479772508144379,
+ -0.06615202128887177,
+ -0.04000908136367798,
+ -0.2685924172401428,
+ 0.18552981317043304,
+ 0.04779103398323059,
+ -0.05322934687137604,
+ 0.07322841882705688,
+ 0.07458549737930298,
+ -0.23988908529281616,
+ -0.1712263822555542,
+ 0.2615411877632141,
+ 0.010956913232803345,
+ -0.0641249418258667,
+ 0.06153860688209534,
+ -0.25216615200042725,
+ 0.2097691148519516,
+ -0.16704493761062622,
+ -0.24843108654022217,
+ -0.2661955952644348,
+ 0.3048354983329773,
+ 0.25326859951019287,
+ -0.1804545521736145,
+ 0.02185535430908203,
+ -0.23620732128620148,
+ 0.1231185793876648,
+ 0.06327599287033081,
+ -0.13060131669044495,
+ -0.1540849804878235,
+ -0.261050283908844,
+ 0.20599639415740967,
+ -0.10633131861686707,
+ -0.12128162384033203,
+ 0.2922642230987549,
+ 0.015453264117240906,
+ -0.138250470161438,
+ -0.19127953052520752,
+ 0.1379845142364502,
+ 0.17936991155147552,
+ 0.30680960416793823,
+ -0.18108084797859192,
+ 0.14212852716445923,
+ -0.26507067680358887,
+ 0.14074566960334778,
+ 0.21452420949935913,
+ -0.04622267186641693,
+ 0.2986583709716797,
+ 0.29941409826278687,
+ 0.24844488501548767,
+ 0.05259937047958374,
+ -0.15665215253829956,
+ 0.19958257675170898,
+ 0.21368157863616943,
+ -0.1820264458656311,
+ 0.12671425938606262,
+ -0.1601819097995758,
+ 0.24957992136478424,
+ -0.22195661067962646,
+ -0.1456873118877411,
+ 0.15172165632247925,
+ 0.14612051844596863,
+ 0.037131428718566895,
+ -0.03828608989715576,
+ -0.20597004890441895,
+ -0.15640053153038025,
+ 0.16178667545318604,
+ -0.20673716068267822,
+ -0.26882392168045044,
+ -0.10170319676399231,
+ -0.12430907785892487,
+ -0.10326904058456421,
+ 0.2721586227416992,
+ 0.2773725986480713,
+ 0.012145429849624634,
+ -0.0370609313249588,
+ -0.20617397129535675,
+ 0.1119820699095726,
+ 0.30680572986602783,
+ -0.2425273060798645,
+ 0.19727569818496704,
+ -0.29572468996047974,
+ -0.11486805230379105,
+ 0.17722944915294647,
+ -0.03909879922866821,
+ 0.009132906794548035,
+ 0.027524977922439575,
+ -0.166696697473526,
+ 0.020428135991096497,
+ 0.14540594816207886,
+ -0.29179826378822327,
+ 0.1546890139579773,
+ 0.30790674686431885,
+ 0.20250871777534485,
+ -0.06865659356117249,
+ -0.17808152735233307,
+ -0.00024014711380004883,
+ -0.27666497230529785,
+ 0.26286810636520386,
+ 0.06371396780014038,
+ 0.08132272958755493,
+ -0.1107337474822998,
+ -0.2631114721298218,
+ 0.2565827965736389,
+ 0.09362217783927917,
+ -0.004706203937530518,
+ -0.05951434373855591,
+ 0.04620450735092163,
+ 0.16896569728851318,
+ -0.056636154651641846,
+ -0.040992774069309235,
+ -0.1874161958694458,
+ -0.23893384635448456,
+ -0.2348760962486267,
+ 0.22939708828926086,
+ -0.18785691261291504,
+ 0.2517887353897095,
+ -0.11650693416595459,
+ -0.10823814570903778,
+ -0.29257068037986755,
+ 0.14241254329681396,
+ 0.05973842740058899,
+ -0.03324764966964722,
+ 0.05620235204696655,
+ -0.17329618334770203,
+ 0.20789121091365814,
+ -0.15631693601608276,
+ 0.027596082538366318,
+ 0.21627986431121826,
+ 0.057396069169044495,
+ -0.16160085797309875,
+ 0.2899039387702942,
+ 0.21912896633148193,
+ -0.2969421148300171,
+ 0.0371069610118866,
+ 0.02171081304550171,
+ -0.16576868295669556,
+ -0.2657729983329773,
+ 0.2970049977302551,
+ -0.2785518169403076,
+ 0.15562742948532104,
+ -0.29858192801475525,
+ 0.22726929187774658,
+ 0.20337629318237305,
+ -0.2918160855770111,
+ 0.25786614418029785,
+ -0.03938572108745575,
+ 0.04254201054573059,
+ -0.22131705284118652,
+ 0.29012078046798706,
+ -0.2965768575668335,
+ -0.21063676476478577,
+ 0.056932926177978516,
+ -0.199992835521698,
+ 0.1855878233909607,
+ -0.06841173768043518,
+ 0.07706952095031738,
+ -0.05024159699678421,
+ 0.2988756597042084,
+ -0.026895105838775635,
+ -0.17474237084388733,
+ -0.29195594787597656,
+ -0.29175835847854614,
+ -0.07026419043540955,
+ 0.17802125215530396,
+ 0.20083391666412354,
+ 0.26570695638656616,
+ 0.11100175976753235,
+ -0.038273394107818604,
+ -0.2976858615875244,
+ -0.10285362601280212,
+ 0.24690908193588257,
+ -0.27852165699005127,
+ -0.1473553478717804,
+ 0.1628386378288269,
+ 0.04085049033164978,
+ 0.11430191993713379,
+ 0.2118319869041443,
+ -0.2180970311164856,
+ 0.13670042157173157,
+ -0.008601903915405273,
+ 0.07351456582546234,
+ 0.2605016231536865,
+ 0.19141709804534912,
+ -0.19000166654586792,
+ -0.14274729788303375,
+ -0.2099168449640274,
+ -0.10958106815814972,
+ 0.08448854088783264,
+ 0.28178834915161133,
+ 0.09767574071884155,
+ 0.10220271348953247,
+ 0.0664513111114502,
+ 0.06480556726455688,
+ 0.14983493089675903,
+ 0.14091241359710693,
+ -0.2321186363697052,
+ -0.28083038330078125,
+ -0.2576543390750885,
+ -0.015586912631988525,
+ -0.1957073211669922,
+ -0.09848836064338684,
+ -0.1267111748456955,
+ -0.07239872217178345,
+ -0.23304671049118042,
+ -0.11064571142196655,
+ 0.001453859731554985,
+ 0.1701844334602356,
+ -0.2817114591598511,
+ 0.10900241136550903,
+ -0.17107635736465454,
+ -0.012489497661590576,
+ 0.01891416311264038,
+ 0.011897921562194824,
+ -0.24339452385902405,
+ 0.28917229175567627,
+ -0.001525282859802246,
+ 0.10345494747161865,
+ 0.15333275496959686,
+ -0.013063758611679077,
+ -0.10092481970787048,
+ -0.2512727975845337,
+ -0.2658088207244873,
+ -0.19954419136047363,
+ 0.2274549901485443,
+ 0.19621986150741577,
+ -0.037484586238861084,
+ -0.2760794460773468,
+ 0.16806572675704956,
+ -0.06935366988182068,
+ -0.07750827074050903,
+ -0.2937623858451843,
+ 0.018408264964818954,
+ -0.0017219185829162598,
+ 0.30503708124160767,
+ -0.030476510524749756,
+ 0.10551103949546814,
+ 0.25808486342430115,
+ -0.2580222189426422,
+ -0.2727936804294586,
+ -0.10384774208068848,
+ 0.3068397641181946,
+ 0.2515692710876465,
+ -0.14369776844978333,
+ 0.2389153093099594,
+ -0.27372726798057556,
+ -0.2513936161994934,
+ -0.07543376088142395,
+ 0.2500148415565491,
+ 0.2070235311985016,
+ -0.007023274898529053,
+ 0.2280537486076355,
+ 0.17203772068023682,
+ -0.07374262809753418,
+ -0.2642385959625244,
+ -0.10914921760559082,
+ 0.14688502252101898,
+ 0.3024443984031677,
+ -0.28315338492393494,
+ 0.07976347953081131,
+ 0.030123095959424973,
+ -0.29324018955230713,
+ -0.19754275679588318,
+ -0.2552902400493622,
+ 0.11551019549369812,
+ 0.2908758521080017,
+ 0.1907232403755188,
+ -0.2977149486541748,
+ 0.1795036494731903,
+ -0.25979021191596985,
+ -0.20806372165679932,
+ 0.2563744783401489,
+ 0.2406788170337677,
+ -0.1865268349647522,
+ -0.14777261018753052,
+ 0.22782355546951294,
+ -0.2661136984825134,
+ -0.14731180667877197,
+ -0.09642502665519714,
+ -0.06718528270721436,
+ -0.2251368761062622,
+ 0.21187126636505127,
+ 0.2606426179409027,
+ -0.20673781633377075,
+ 0.23627245426177979,
+ -0.10965022444725037,
+ 0.23789739608764648,
+ 0.13733845949172974,
+ -0.1996382474899292,
+ 0.20492005348205566,
+ 0.2048291116952896,
+ -0.23423869907855988,
+ 0.15632887184619904,
+ -0.02508068084716797,
+ 0.2952647805213928,
+ 0.17880132794380188,
+ -0.03124988079071045,
+ 0.02050444483757019,
+ -0.01963105797767639,
+ 0.26082777976989746,
+ 0.2502475380897522,
+ -0.06058622524142265,
+ -0.21177589893341064,
+ -0.051174700260162354,
+ 0.2114207148551941,
+ -0.26332592964172363,
+ -0.21597760915756226,
+ 0.043403178453445435,
+ 0.07198986411094666,
+ -0.00543367862701416,
+ 0.05574965476989746,
+ 0.30773743987083435,
+ -0.23521721363067627,
+ 0.06866908073425293,
+ 0.14503741264343262,
+ 0.24687045812606812,
+ -0.033528923988342285,
+ -0.2815703749656677,
+ -0.18705499172210693,
+ 0.21947740018367767,
+ -0.26167815923690796,
+ -0.23577648401260376,
+ 0.1655997335910797,
+ 0.09330438077449799,
+ -0.28698283433914185,
+ 0.2844473421573639,
+ -0.25158455967903137,
+ 0.29613393545150757,
+ -0.20174407958984375,
+ -0.2517068684101105,
+ -0.2596117854118347,
+ 0.27422410249710083,
+ -0.10183566808700562,
+ 0.2813083529472351,
+ -0.06895513087511063,
+ 0.13147324323654175,
+ -0.08510033786296844,
+ 0.051698893308639526,
+ 0.19824421405792236,
+ -0.05460277199745178,
+ -0.2981354296207428,
+ -0.24008077383041382,
+ -0.17895042896270752,
+ 0.1865091174840927,
+ -0.1399591565132141,
+ -0.21557369828224182,
+ 0.26871347427368164,
+ -0.13430318236351013,
+ 0.03094378113746643,
+ -0.264096200466156,
+ 0.20110195875167847,
+ -0.07359611988067627,
+ -0.24524995684623718,
+ 0.18782013654708862,
+ -0.13623839616775513,
+ 0.21476030349731445,
+ -0.15271291136741638,
+ -0.22424854338169098,
+ -0.2765657305717468,
+ 0.010732173919677734,
+ 0.08114144951105118,
+ 0.3089887797832489,
+ 0.2887679934501648,
+ 0.021710380911827087,
+ -0.13817763328552246,
+ -0.16014546155929565,
+ -0.04639321565628052,
+ 0.03457419574260712,
+ 0.1286247968673706,
+ -0.2864443063735962,
+ -0.07548628747463226,
+ -0.25685226917266846,
+ 0.12963026762008667,
+ -0.20295566320419312,
+ 0.1271459013223648,
+ 0.1956140547990799,
+ 0.24891337752342224,
+ -0.05065193772315979,
+ 0.13610903918743134,
+ -0.21647295355796814,
+ 0.04236522316932678,
+ 0.2306114137172699,
+ 0.22596211731433868,
+ 0.1346631944179535,
+ 0.1504031866788864,
+ -0.09749841690063477,
+ 0.1288420557975769,
+ -0.2149624228477478,
+ 0.056667715311050415,
+ 0.3092711567878723,
+ 0.14452670514583588,
+ 0.2579728960990906,
+ -0.09032460302114487,
+ -0.24532946944236755,
+ -0.0009458959102630615,
+ 0.20579981803894043,
+ 0.060334861278533936,
+ 0.11446532607078552,
+ -0.2512267231941223,
+ 0.15645137429237366,
+ -0.09175187349319458,
+ -0.024704158306121826,
+ 0.031989842653274536,
+ 0.2603304982185364,
+ 0.2603977918624878,
+ -0.04235076904296875,
+ 0.1404045969247818,
+ -0.2881333827972412,
+ 0.08424872159957886,
+ -0.05935945361852646,
+ 0.19795769453048706,
+ 0.29800260066986084,
+ -0.21918907761573792,
+ 0.25220972299575806,
+ 0.185297429561615,
+ -0.2861718535423279,
+ -0.11167082190513611,
+ 0.02484823763370514,
+ 0.3052024841308594,
+ 0.09234219044446945,
+ 0.09318865835666656,
+ -0.2770456075668335,
+ -0.10683548450469971,
+ -0.17514987289905548,
+ 0.302303671836853,
+ -0.010261714458465576,
+ -0.21049459278583527,
+ 0.14674952626228333,
+ 0.05087251961231232,
+ -0.28444206714630127,
+ -0.26662418246269226,
+ -0.28286993503570557,
+ 0.13753028213977814,
+ 0.15659190714359283,
+ -0.28415846824645996,
+ 0.030031979084014893,
+ 0.2070489525794983,
+ 0.23290568590164185,
+ 0.3042196035385132,
+ -0.2959691882133484,
+ 0.072043776512146,
+ -0.11170905828475952,
+ 0.2612735331058502,
+ -0.12324322760105133,
+ 0.2767895758152008,
+ 0.08664052188396454,
+ -0.23644238710403442,
+ -0.07502514123916626,
+ -0.19766634702682495,
+ 0.17353308200836182,
+ 0.2631586790084839,
+ -0.020557016134262085,
+ -0.13152098655700684,
+ 0.039476942270994186,
+ 0.283500075340271,
+ 0.15793205797672272,
+ 0.17498552799224854,
+ -0.09463430941104889,
+ 0.23450663685798645,
+ -0.2269112765789032,
+ 0.16350537538528442,
+ 0.08995503187179565,
+ -0.09017598628997803,
+ 0.20483863353729248,
+ 0.2860628664493561,
+ -0.13456204533576965,
+ 0.13583284616470337,
+ -0.12980099022388458,
+ 0.12417017668485641,
+ 0.27533817291259766,
+ -0.16568198800086975,
+ -0.25279903411865234,
+ 0.12973648309707642,
+ -0.21824246644973755,
+ -0.27069419622421265,
+ -0.12614336609840393,
+ 0.23335987329483032,
+ -0.24924105405807495,
+ -0.1971321403980255,
+ -0.2212727665901184,
+ -0.13334010541439056,
+ 0.06299303472042084,
+ -0.1456824541091919,
+ 0.22731593251228333,
+ -0.2777542471885681,
+ -0.22884619235992432,
+ 0.17280960083007812,
+ -0.0742984414100647,
+ -0.004517197608947754,
+ 0.2289702296257019,
+ 0.140403151512146,
+ 0.2555093467235565,
+ -0.10333634912967682,
+ -0.24485570192337036,
+ 0.2807144522666931,
+ 0.08827757835388184,
+ 0.2767922282218933,
+ 0.2336673140525818,
+ -0.29860973358154297,
+ 0.102297842502594,
+ 0.2532385587692261,
+ -0.21616697311401367,
+ -0.08425968885421753,
+ 0.30736231803894043,
+ -0.00783279538154602,
+ -0.1830158829689026,
+ -0.15496352314949036,
+ 0.20995551347732544,
+ 0.3062512278556824,
+ -0.19513297080993652,
+ 0.12872326374053955,
+ -0.213112473487854,
+ -0.1946486085653305,
+ -0.22041523456573486,
+ -0.14620855450630188,
+ 0.09224143624305725,
+ 0.035124003887176514,
+ 0.29939645528793335,
+ 0.0781906247138977,
+ -0.02592751383781433,
+ 0.04220513999462128,
+ -0.04900026321411133,
+ 0.050035081803798676,
+ -0.07280551642179489,
+ -0.2554871439933777,
+ 0.18674862384796143,
+ -0.17898932099342346,
+ -0.23528403043746948,
+ 0.1999579668045044,
+ 0.057631850242614746,
+ -0.268057644367218,
+ -0.2972506880760193,
+ -0.2502923905849457,
+ 0.18641799688339233,
+ -0.2805676758289337,
+ -0.08278727531433105,
+ -0.265581339597702,
+ -0.2941107153892517,
+ 0.28433698415756226,
+ 0.09248552471399307,
+ 0.2080850601196289,
+ 0.09338520467281342,
+ 0.029758960008621216,
+ -0.18787330389022827,
+ 0.26404446363449097,
+ -0.09275823831558228,
+ -0.266773521900177,
+ 0.3057142496109009,
+ -0.06485292315483093,
+ 0.09649485349655151,
+ 0.007123589515686035,
+ 0.12208026647567749,
+ -0.2535476088523865,
+ 0.1731128692626953,
+ 0.21009108424186707,
+ 0.20428872108459473,
+ 0.19330671429634094,
+ 0.305949866771698,
+ -0.2116982638835907,
+ 0.057149335741996765,
+ 0.03593966364860535,
+ 0.15808245539665222,
+ 0.29191452264785767,
+ -0.27968984842300415,
+ -0.29978471994400024,
+ 0.16114377975463867,
+ -0.13255345821380615,
+ 0.21513941884040833,
+ -0.030109643936157227,
+ 0.15997707843780518,
+ 0.17609387636184692,
+ 0.0013414174318313599,
+ 0.15794095396995544,
+ 0.08034458756446838,
+ 0.21778693795204163,
+ -0.02704937756061554,
+ -0.1354222297668457,
+ -0.22672677040100098,
+ -0.09838789701461792,
+ 0.295920729637146,
+ 0.030743025243282318,
+ -0.06762664020061493,
+ -0.20830845832824707,
+ -0.1508062481880188,
+ -0.14901286363601685,
+ -0.2546743154525757,
+ 0.2979689836502075,
+ 0.2465701699256897,
+ -0.19193920493125916,
+ 0.2066306471824646,
+ -0.2823438048362732,
+ -0.29262620210647583,
+ -0.1685023307800293,
+ -0.23702087998390198,
+ 0.3011685013771057,
+ 0.21678265929222107,
+ 0.08257341384887695,
+ -0.13958919048309326,
+ -0.17937731742858887,
+ -0.24400430917739868,
+ -0.17272305488586426,
+ -0.1349535882472992,
+ -0.24118512868881226,
+ 0.23713475465774536,
+ -0.23296616971492767,
+ -0.1630827784538269,
+ -0.26712074875831604,
+ -0.2801361680030823,
+ -0.20536142587661743,
+ -0.15495437383651733,
+ -0.014960944652557373,
+ 0.25666069984436035,
+ 0.25956302881240845,
+ -0.0887167751789093,
+ -0.29595330357551575,
+ 0.06624433398246765,
+ 0.29763108491897583,
+ 0.14249172806739807,
+ 0.14767925441265106,
+ 0.24840158224105835,
+ -0.020181655883789062,
+ 0.30000802874565125,
+ 0.10324835777282715,
+ 0.21672919392585754,
+ -0.13102054595947266,
+ -0.17112690210342407,
+ 0.18737104535102844,
+ -0.24385325610637665,
+ -0.29420316219329834,
+ -0.05258989334106445,
+ 0.0006799846887588501,
+ -0.1351199746131897,
+ 0.28686192631721497,
+ 0.03799092769622803,
+ -0.2557392716407776,
+ 0.20483028888702393,
+ 0.038640424609184265,
+ 0.2690051198005676,
+ -0.01870208978652954,
+ 0.1878337860107422,
+ -0.08360698074102402,
+ -0.21420079469680786,
+ 0.2307336926460266,
+ 0.2502225637435913,
+ -0.03713345527648926,
+ -0.23137813806533813,
+ 0.1949509084224701,
+ 0.189397394657135,
+ 0.13982731103897095,
+ 0.16630831360816956,
+ 0.1412578821182251,
+ -0.013838529586791992,
+ -0.11516769230365753,
+ 0.13768184185028076,
+ -0.053542643785476685,
+ 0.2581636905670166,
+ -0.05316895246505737,
+ -0.17851954698562622,
+ 0.11170852184295654,
+ 0.2249097228050232,
+ -0.04071664810180664,
+ -0.17580458521842957,
+ -0.18157756328582764,
+ 0.08702978491783142,
+ -0.1916351616382599,
+ -0.27659446001052856,
+ -0.1785494089126587,
+ 0.011939749121665955,
+ -0.11699831485748291,
+ -0.2766706943511963,
+ -0.1319924294948578,
+ -0.2607978880405426,
+ -0.0744810625910759,
+ 0.10958069562911987,
+ 0.026847034692764282,
+ -0.1990983784198761,
+ -0.02981507033109665,
+ 0.27251148223876953,
+ -0.041717708110809326,
+ -0.1895151138305664,
+ -0.1473008096218109,
+ 0.18406003713607788,
+ 0.11046633124351501,
+ 0.12378641963005066,
+ 0.3065669536590576,
+ 0.027759522199630737,
+ 0.08447080850601196,
+ -0.21153917908668518,
+ 0.2989451289176941,
+ -0.06650146842002869,
+ -0.2857375144958496,
+ 0.017090201377868652,
+ -0.2749788761138916,
+ 0.28142619132995605,
+ -0.27595949172973633,
+ 0.1927993893623352,
+ 0.06082507222890854,
+ -0.17840605974197388,
+ -0.27740907669067383,
+ -0.022112727165222168,
+ 0.06154894828796387,
+ 0.06530687212944031,
+ 0.04106307029724121,
+ 0.1800784468650818,
+ -0.26231133937835693,
+ -0.10059637576341629,
+ 0.26324641704559326,
+ -0.21069589257240295,
+ -0.20753896236419678,
+ 0.12438732385635376,
+ -0.07881516218185425,
+ -0.24276775121688843,
+ -0.1481565237045288,
+ -0.14032906293869019,
+ 0.11596956849098206,
+ -0.17243951559066772,
+ 0.264125794172287,
+ 0.2680484354496002,
+ 0.2570273280143738,
+ 0.30664902925491333,
+ -0.19526177644729614,
+ -0.25343769788742065,
+ 0.019771724939346313,
+ -0.17177683115005493,
+ 0.09850825369358063,
+ 0.23807133734226227,
+ 0.22585558891296387,
+ 0.2399185299873352,
+ 0.11864331364631653,
+ -0.21879532933235168,
+ 0.20478898286819458,
+ -0.1472310870885849,
+ -0.27111122012138367,
+ 0.29065650701522827,
+ 0.1230347752571106,
+ -0.24142050743103027,
+ 0.28245532512664795,
+ -0.19295942783355713,
+ -0.08916634321212769,
+ 0.21018719673156738,
+ 0.09286613762378693,
+ -0.21290868520736694,
+ -0.05662792921066284,
+ -0.16542041301727295,
+ -0.10220116376876831,
+ -0.10564237087965012,
+ 0.1033007800579071,
+ 0.107810378074646,
+ 0.25137966871261597,
+ 0.2932512164115906,
+ -0.22463375329971313,
+ 0.234935462474823,
+ -0.05584901571273804,
+ 0.10718443989753723,
+ 0.09669715166091919,
+ 0.290561318397522,
+ -0.27699631452560425,
+ 0.17727059125900269,
+ -0.15294215083122253,
+ 0.17494124174118042,
+ 0.10171157121658325,
+ -0.29838722944259644,
+ 0.011832565069198608,
+ -0.2882654070854187,
+ -0.267422080039978,
+ -0.06598876416683197,
+ -0.02585884928703308,
+ 0.13522052764892578,
+ -0.1819538027048111,
+ -0.16269046068191528,
+ -0.29834917187690735,
+ -0.12879334390163422,
+ 0.16429221630096436,
+ 0.06942257285118103,
+ 0.10230430960655212,
+ -0.007930845022201538,
+ 0.14623267948627472,
+ 0.0746975913643837,
+ 0.1377173364162445,
+ -0.2527310252189636,
+ 0.059444859623909,
+ -0.11010637134313583,
+ 0.1726926565170288,
+ 0.06220529228448868,
+ 0.2492443323135376,
+ -0.008077740669250488,
+ 0.2831578254699707,
+ 0.24928522109985352,
+ -0.29671916365623474,
+ -0.07966361939907074,
+ 0.21087637543678284,
+ -0.012277543544769287,
+ 0.017055749893188477,
+ -0.06032902002334595,
+ -0.2646942138671875,
+ 0.025377638638019562,
+ 0.13963979482650757,
+ 0.1387748420238495,
+ 0.09120559692382812,
+ -0.03567636013031006,
+ -0.10820025205612183,
+ 0.09244692325592041,
+ 0.30781298875808716,
+ 0.1126055121421814,
+ 0.026775628328323364,
+ -0.09555196762084961,
+ 0.29159101843833923,
+ 0.09877514839172363,
+ -0.22794058918952942,
+ 0.03424385190010071,
+ 0.03962632268667221,
+ 0.2503984868526459,
+ 0.08164355158805847,
+ -0.1043340265750885,
+ -0.12415152788162231,
+ 0.17653703689575195,
+ -0.25764527916908264,
+ -0.19395405054092407,
+ 0.009353727102279663,
+ 0.20142656564712524,
+ -0.20096641778945923,
+ -0.23206248879432678,
+ 0.19315308332443237,
+ 0.19602710008621216,
+ 0.24583038687705994,
+ 0.0753076821565628,
+ -0.12434013932943344,
+ 0.22947698831558228,
+ 0.2319004237651825,
+ -0.23117932677268982,
+ 0.15292894840240479,
+ 0.28663766384124756,
+ -0.07364098727703094,
+ -0.009654328227043152,
+ -0.2290986180305481,
+ -0.20906926691532135,
+ 0.0849744975566864,
+ 0.230106383562088,
+ -0.2731717824935913,
+ -0.09378375113010406,
+ 0.2925449013710022,
+ -0.09936317801475525,
+ 0.26873230934143066,
+ -0.1026105284690857,
+ -0.04078470170497894,
+ -0.2725563049316406,
+ 0.3033072352409363,
+ -0.0559956431388855,
+ 0.2825744152069092,
+ -0.13693737983703613,
+ 0.2240476906299591,
+ -0.06127312034368515,
+ 0.24549072980880737,
+ -0.25060904026031494,
+ -0.29546940326690674,
+ 0.25988316535949707,
+ 0.11793406307697296,
+ -0.1772216260433197,
+ 0.26986363530158997,
+ 0.025286167860031128,
+ 0.28419554233551025,
+ 0.12959147989749908,
+ 0.08428847789764404,
+ -0.2706705629825592,
+ 0.28919535875320435,
+ -0.01464216411113739,
+ 0.008500754833221436,
+ -0.2152218222618103,
+ -0.1646275818347931,
+ -0.11121311038732529,
+ 0.08392873406410217,
+ 0.1322222650051117,
+ 0.07729104161262512,
+ 0.28989189863204956,
+ -0.27840837836265564,
+ -0.29686957597732544,
+ 0.19468629360198975,
+ 0.13870882987976074,
+ -0.24499976634979248,
+ -0.0049322545528411865,
+ 0.10898828506469727,
+ -0.1628253012895584,
+ 0.025769833475351334,
+ -0.19923317432403564,
+ 0.041903056204319,
+ 0.120228111743927,
+ -0.122001051902771,
+ -0.031411170959472656,
+ 0.17935369908809662,
+ 0.20149710774421692,
+ -0.0158841609954834,
+ 0.15438228845596313,
+ -0.2513970732688904,
+ -0.058937765657901764,
+ -0.22118288278579712,
+ 0.04564929008483887,
+ 0.1601194143295288,
+ -0.21381297707557678,
+ -0.252228319644928,
+ 0.12646245956420898,
+ -0.2111670970916748,
+ -0.00807693600654602,
+ -0.02556067705154419,
+ -0.047756075859069824,
+ 0.1151091605424881,
+ -0.2007288932800293,
+ -0.18173860013484955,
+ 0.17905700206756592,
+ 0.286834716796875,
+ -0.05094057321548462,
+ 0.20108386874198914,
+ 0.18525122106075287,
+ -0.16237756609916687,
+ -0.2555531859397888,
+ 0.0987568348646164,
+ -0.1299540102481842,
+ -0.044986791908741,
+ 0.13723626732826233,
+ -0.019043520092964172,
+ -0.16068924963474274,
+ -0.0205879807472229,
+ 0.19232429563999176,
+ -0.187443345785141,
+ -0.07939696311950684,
+ -0.2537158727645874,
+ 0.17247039079666138,
+ -0.17887026071548462,
+ -0.0019651204347610474,
+ -0.22147074341773987,
+ -0.11103445291519165,
+ 0.24722892045974731,
+ 0.1287057101726532,
+ -0.29994016885757446,
+ 0.016979694366455078,
+ -0.04770785570144653,
+ 0.1388060748577118,
+ 0.27031493186950684,
+ 0.04237404465675354,
+ -0.07880528271198273,
+ -0.24447423219680786,
+ 0.2356945276260376,
+ -0.07797598838806152,
+ -0.04556325078010559,
+ -0.11466538906097412,
+ 0.050971418619155884,
+ 0.007249921560287476,
+ -0.15901464223861694,
+ -0.0728960633277893,
+ 0.30433034896850586,
+ -0.29108601808547974,
+ 0.05278030037879944,
+ 0.10081285238265991,
+ -0.2874654531478882,
+ -0.21893152594566345,
+ 0.24301642179489136,
+ 0.29098647832870483,
+ 0.2827792763710022,
+ 0.13491082191467285,
+ -0.18154004216194153,
+ -0.06019429862499237,
+ 0.2731449007987976,
+ 0.09376369416713715,
+ -0.07178056240081787,
+ 0.21662676334381104,
+ 0.25203239917755127,
+ -0.2017938494682312,
+ 0.08200740814208984,
+ 0.12435629963874817,
+ -0.14530737698078156,
+ 0.2713223695755005,
+ 0.25000065565109253,
+ -0.2479766607284546,
+ -0.26417773962020874,
+ -0.27113020420074463,
+ 0.2738529443740845,
+ -0.2448520064353943,
+ 0.17666319012641907,
+ -0.264812707901001,
+ -0.01889932155609131,
+ 0.2371838241815567,
+ -0.2096506953239441,
+ 0.21664923429489136,
+ 0.021092288196086884,
+ 0.029759526252746582,
+ 0.22654491662979126,
+ -0.29596132040023804,
+ -0.20389655232429504,
+ 0.26896268129348755,
+ -0.263730525970459,
+ -0.18574869632720947,
+ -0.22613252699375153,
+ 0.2290465086698532,
+ 0.26918911933898926,
+ 0.020245075225830078,
+ 0.19017866253852844,
+ 0.3042713701725006,
+ -0.10155778378248215,
+ 0.24399372935295105,
+ -0.15748530626296997,
+ -0.09266234934329987,
+ -0.1283477395772934,
+ -0.2922772467136383,
+ -0.2809087038040161,
+ 0.17977476119995117,
+ -0.1747034639120102,
+ 0.1891438364982605,
+ 0.006918758153915405,
+ -0.023330509662628174,
+ -0.2368932068347931,
+ -0.2219243049621582,
+ 0.09695636481046677,
+ -0.1953621804714203,
+ 0.2548726797103882,
+ 0.021238118410110474,
+ -0.2892274856567383,
+ 0.282639741897583,
+ -0.051310569047927856,
+ 0.29698288440704346,
+ 0.05810168385505676,
+ -0.2333373725414276,
+ 0.21902544796466827,
+ 0.23197394609451294,
+ -0.10000413656234741,
+ -0.05636471509933472,
+ 0.13123819231987,
+ -0.10766136646270752,
+ -0.11967139691114426,
+ 0.20970594882965088,
+ 0.15900206565856934,
+ 0.27573466300964355,
+ 0.19271226227283478,
+ 0.060287922620773315,
+ -0.23549020290374756,
+ 0.22517424821853638,
+ -0.2186877727508545,
+ 0.02955937385559082,
+ -0.23475882411003113,
+ 0.22722065448760986,
+ -0.2924213409423828,
+ 0.28397661447525024,
+ 0.000004649162292480469,
+ -0.13523182272911072,
+ -0.21744120121002197,
+ -0.004718273878097534,
+ 0.3067992329597473,
+ -0.1463920623064041,
+ -0.14433905482292175,
+ 0.3039783239364624,
+ -0.26519355177879333,
+ -0.25661033391952515,
+ 0.21457472443580627,
+ 0.20771795511245728,
+ -0.19940122961997986,
+ 0.0735204815864563,
+ 0.13046160340309143,
+ -0.28933456540107727,
+ 0.09992121160030365,
+ -0.12026556581258774,
+ -0.05509662628173828,
+ -0.2507798671722412,
+ 0.09470927715301514,
+ 0.03763002157211304,
+ 0.3051414489746094,
+ 0.27317628264427185,
+ 0.17085713148117065,
+ 0.08452221751213074,
+ -0.25545167922973633,
+ 0.01931115984916687,
+ 0.11551791429519653,
+ 0.24955463409423828,
+ 0.2970394492149353,
+ -0.14818550646305084,
+ -0.16441184282302856,
+ 0.10339093208312988,
+ -0.07082921266555786,
+ 0.26855483651161194,
+ 0.17637169361114502,
+ -0.09277904778718948,
+ 0.28384435176849365,
+ 0.07308775186538696,
+ 0.10797713696956635,
+ -0.27116256952285767,
+ 0.05221542716026306,
+ 0.17718565464019775,
+ 0.21580007672309875,
+ 0.15902653336524963,
+ 0.1535881757736206,
+ -0.019883453845977783,
+ 0.22986304759979248,
+ 0.20555239915847778,
+ -0.06611794233322144,
+ 0.13895833492279053,
+ 0.060789912939071655,
+ -0.1597445011138916,
+ 0.2672795057296753,
+ -0.23832917213439941,
+ -0.04143202304840088,
+ -0.005991771817207336,
+ 0.2953934669494629,
+ 0.2853986918926239,
+ -0.25944745540618896,
+ -0.1783553957939148,
+ 0.30130141973495483,
+ 0.11396759748458862,
+ 0.08517789840698242,
+ 0.2638036608695984,
+ -0.050863996148109436,
+ 0.24314206838607788,
+ -0.2934243381023407,
+ -0.2342071235179901,
+ 0.20760349929332733,
+ 0.1336749792098999,
+ -0.08424937725067139,
+ 0.23763573169708252,
+ -0.1954849809408188,
+ -0.08437812328338623,
+ -0.10189063847064972,
+ -0.053031861782073975,
+ -0.10037300735712051,
+ -0.11440384387969971,
+ 0.1786886751651764,
+ -0.13948072493076324,
+ 0.2429942637681961,
+ -0.1136322021484375,
+ 0.09999442100524902,
+ -0.274544358253479,
+ 0.02674737572669983,
+ 0.13511347770690918,
+ -0.09091567993164062,
+ 0.24066698551177979,
+ -0.2882910370826721,
+ -0.19970981776714325,
+ -0.2114231288433075,
+ -0.3001781105995178,
+ -0.20630905032157898,
+ 0.2169961929321289,
+ 0.24494600296020508,
+ -0.20553892850875854,
+ -0.08441899716854095,
+ -0.21437585353851318,
+ -0.2743622660636902,
+ -0.23909324407577515,
+ -0.2160966992378235,
+ 0.23795872926712036,
+ -0.19896548986434937,
+ -0.27645793557167053,
+ -0.29766905307769775,
+ 0.038027435541152954,
+ 0.2933424115180969,
+ -0.24352890253067017,
+ -0.0893513560295105,
+ 0.011505573987960815,
+ 0.24445629119873047,
+ 0.12304800748825073,
+ -0.15512126684188843,
+ -0.19472509622573853,
+ 0.18069201707839966,
+ -0.15513542294502258,
+ 0.04485486447811127,
+ 0.26335036754608154,
+ 0.16584479808807373,
+ 0.19469743967056274,
+ 0.21679413318634033,
+ 0.2001095414161682,
+ -0.09098295867443085,
+ 0.05786113440990448,
+ 0.3019567131996155,
+ 0.16485075652599335,
+ -0.15335994958877563,
+ -0.26412704586982727,
+ -0.2795286178588867,
+ 0.24503153562545776,
+ 0.11308214068412781,
+ 0.13478049635887146,
+ -0.040345698595047,
+ 0.18696367740631104,
+ -0.08069057762622833,
+ 0.093121737241745,
+ 0.23231881856918335,
+ 0.16520018875598907,
+ 0.25164496898651123,
+ -0.13047611713409424,
+ -0.20567584037780762,
+ 0.29336339235305786,
+ -0.0815998911857605,
+ 0.06808724999427795,
+ -0.1390860676765442,
+ -0.2597367763519287,
+ 0.06395986676216125,
+ 0.30705583095550537,
+ -0.13323405385017395,
+ -0.24820655584335327,
+ 0.30866101384162903,
+ 0.04150255024433136,
+ -0.21584820747375488,
+ 0.13915947079658508,
+ 0.24634337425231934,
+ -0.009609699249267578,
+ -0.0014744400978088379,
+ -0.16555002331733704,
+ 0.2139919400215149,
+ 0.0665915310382843,
+ -0.23420849442481995,
+ -0.08221203088760376,
+ -0.10984772443771362,
+ 0.11671668291091919,
+ -0.006818115711212158,
+ -0.18717245757579803,
+ -0.28368109464645386,
+ -0.06197154521942139,
+ -0.034563325345516205,
+ -0.19465892016887665,
+ 0.3058278560638428,
+ -0.12782011926174164,
+ 0.10172383487224579,
+ -0.18041101098060608,
+ -0.23964053392410278,
+ -0.27523964643478394,
+ -0.09123814105987549,
+ 0.0004951357841491699,
+ 0.0021399259567260742,
+ 0.04356466233730316,
+ -0.2082827091217041,
+ 0.2360471487045288,
+ -0.27884647250175476,
+ -0.12391388416290283,
+ 0.03021206706762314,
+ 0.20630449056625366,
+ 0.028939589858055115,
+ -0.13057303428649902,
+ -0.16489803791046143,
+ 0.3055729269981384,
+ 0.09669867157936096,
+ 0.032857343554496765,
+ -0.15356165170669556,
+ 0.16771799325942993,
+ -0.13645929098129272,
+ -0.017236895859241486,
+ -0.036052823066711426,
+ -0.08528792858123779,
+ -0.2881964445114136,
+ -0.2690061926841736,
+ 0.07590481638908386,
+ 0.28113967180252075,
+ -0.2912949323654175,
+ 0.052542686462402344,
+ 0.09148663282394409,
+ 0.1975952386856079,
+ -0.18391193449497223,
+ -0.04444703459739685,
+ 0.20536834001541138,
+ -0.08710610866546631,
+ 0.13801255822181702,
+ -0.22988510131835938,
+ -0.013284265995025635,
+ 0.22608648240566254,
+ 0.25623804330825806,
+ 0.17736947536468506,
+ 0.03225358948111534,
+ 0.15647563338279724,
+ -0.09616965800523758,
+ -0.22720322012901306,
+ 0.3099065124988556,
+ 0.17185315489768982,
+ -0.27080395817756653,
+ 0.2974127531051636,
+ -0.17496955394744873,
+ -0.11367710679769516,
+ -0.12365177273750305,
+ -0.06055039167404175,
+ 0.16272595524787903,
+ -0.008172959089279175,
+ -0.05853229761123657,
+ -0.21399444341659546,
+ 0.27448561787605286,
+ -0.03632921725511551,
+ 0.09854394197463989,
+ -0.22141453623771667,
+ -0.2914368212223053,
+ 0.26225557923316956,
+ -0.040658771991729736,
+ 0.01070868968963623,
+ -0.2977016866207123,
+ -0.017715364694595337,
+ 0.3036566376686096,
+ -0.15066182613372803,
+ 0.08088727295398712,
+ 0.04458886384963989,
+ 0.10067713260650635,
+ -0.19208365678787231,
+ -0.03125825524330139,
+ 0.20086166262626648,
+ -0.03966420888900757,
+ -0.20121099054813385,
+ 0.1828940510749817,
+ 0.131451815366745,
+ 0.1601187139749527,
+ 0.29288285970687866,
+ 0.17997455596923828,
+ 0.249059796333313,
+ -0.08556246757507324,
+ 0.2837771773338318,
+ -0.09238803386688232,
+ -0.12129028141498566,
+ -0.20117366313934326,
+ 0.2586105465888977,
+ -0.17611101269721985,
+ 0.14513814449310303,
+ 0.1847066730260849,
+ 0.21316608786582947,
+ -0.28599783778190613,
+ -0.017401069402694702,
+ -0.017209410667419434,
+ -0.003885716199874878,
+ -0.06289783120155334,
+ -0.011836469173431396,
+ -0.16335752606391907,
+ 0.237932026386261,
+ 0.16543281078338623,
+ -0.07351082563400269,
+ 0.031868308782577515,
+ 0.056339651346206665,
+ 0.18590682744979858,
+ -0.11017769575119019,
+ 0.0618518590927124,
+ 0.06547597050666809,
+ 0.29790663719177246,
+ 0.1413031667470932,
+ -0.21275851130485535,
+ -0.1805894374847412,
+ 0.15418541431427002,
+ 0.08226421475410461,
+ -0.23294833302497864,
+ 0.14378488063812256,
+ 0.30201512575149536,
+ 0.2254893183708191,
+ -0.1797284185886383,
+ -0.2945086359977722,
+ 0.2253217101097107,
+ -0.18108174204826355,
+ -0.24013523757457733,
+ 0.2717795670032501,
+ -0.020250976085662842,
+ -0.1792418360710144,
+ 0.24867117404937744,
+ 0.26810771226882935,
+ 0.23360145092010498,
+ 0.13938777148723602,
+ -0.13825461268424988,
+ -0.13820397853851318,
+ 0.04776930809020996,
+ -0.088062584400177,
+ -0.04451936483383179,
+ -0.2571059465408325,
+ 0.21145030856132507,
+ -0.2226485311985016,
+ -0.24976202845573425,
+ -0.272541880607605,
+ 0.17384633421897888,
+ -0.16990673542022705,
+ -0.22961226105690002,
+ 0.2476365566253662,
+ 0.011988885700702667,
+ 0.2917247414588928,
+ 0.13688039779663086,
+ -0.15022489428520203,
+ -0.155234694480896,
+ 0.2716638147830963,
+ -0.08366969227790833,
+ 0.21720577776432037,
+ 0.25252199172973633,
+ 0.17873427271842957,
+ 0.09342274069786072,
+ -0.05048317462205887,
+ -0.19912445545196533,
+ -0.1887872815132141,
+ -0.23483048379421234,
+ -0.10305316746234894,
+ -0.17581719160079956,
+ 0.0852595865726471,
+ 0.10002604126930237,
+ -0.2950092554092407,
+ -0.2835673689842224,
+ -0.2964171767234802,
+ 0.21430543065071106,
+ 0.260542631149292,
+ 0.2110729068517685,
+ 0.14170265197753906,
+ 0.2574297785758972,
+ 0.2080785632133484,
+ -0.08198991417884827,
+ -0.2537420988082886,
+ -0.07517766952514648,
+ -0.17807254195213318,
+ 0.18406182527542114,
+ 0.30196326971054077,
+ -0.15618878602981567,
+ 0.12992237508296967,
+ 0.13343968987464905,
+ 0.1725817322731018,
+ 0.1923319548368454,
+ -0.22345149517059326,
+ 0.3082156777381897,
+ -0.2691068649291992,
+ -0.24954254925251007,
+ 0.001340717077255249,
+ -0.27702796459198,
+ -0.2218773514032364,
+ 0.13564693927764893,
+ -0.24390941858291626,
+ 0.30798885226249695,
+ -0.2729414105415344,
+ -0.2972906827926636,
+ 0.016426950693130493,
+ 0.12964123487472534,
+ 0.17420467734336853,
+ -0.19361461699008942,
+ 0.21539530158042908,
+ 0.16323328018188477,
+ 0.26352643966674805,
+ -0.28617942333221436,
+ 0.30654042959213257,
+ -0.21982795000076294,
+ 0.04176020622253418,
+ 0.04811692237854004,
+ 0.16485580801963806,
+ 0.24192076921463013,
+ 0.29854345321655273,
+ 0.13357818126678467,
+ 0.3090335726737976,
+ 0.1835373193025589,
+ 0.2135833203792572,
+ 0.12348224222660065,
+ -0.032638609409332275,
+ 0.20488625764846802,
+ -0.08330139517784119,
+ 0.04004386067390442,
+ -0.26444166898727417,
+ -0.09807305037975311,
+ -0.2652238607406616,
+ -0.03192019462585449,
+ -0.1275312304496765,
+ 0.20324623584747314,
+ 0.12542171776294708,
+ 0.22776353359222412,
+ 0.1252649575471878,
+ -0.08866339921951294,
+ -0.046341970562934875,
+ -0.1562538743019104,
+ -0.030713826417922974,
+ -0.012775793671607971,
+ -0.13487768173217773,
+ -0.20340204238891602,
+ -0.19571536779403687,
+ 0.28205960988998413,
+ 0.018110379576683044,
+ 0.13422609865665436,
+ 0.15276265144348145,
+ 0.05397211015224457,
+ -0.20835113525390625,
+ -0.011426299810409546,
+ 0.23745286464691162,
+ -0.09418520331382751,
+ -0.2687332034111023,
+ -0.07076407223939896,
+ 0.26529690623283386,
+ 0.07366570830345154,
+ -0.27754640579223633,
+ -0.19990813732147217,
+ 0.15922975540161133,
+ 0.03975363075733185,
+ 0.2490132749080658,
+ -0.005371272563934326,
+ -0.23249207437038422,
+ 0.14592225849628448,
+ 0.21564549207687378,
+ -0.23596838116645813,
+ 0.14648157358169556,
+ -0.2985650300979614,
+ 0.05128394067287445,
+ 0.12253159284591675,
+ -0.24432003498077393,
+ 0.2116582691669464,
+ -0.1439957618713379,
+ 0.14564648270606995,
+ -0.05386239290237427,
+ 0.1723717451095581,
+ 0.2893489599227905,
+ -0.03553935885429382,
+ -0.25738489627838135,
+ 0.03402337431907654,
+ -0.06626695394515991,
+ -0.1905992329120636,
+ -0.10041004419326782,
+ 0.08366470038890839,
+ -0.00021088123321533203,
+ 0.17684698104858398,
+ -0.28786972165107727,
+ 0.13491517305374146,
+ -0.08361363410949707,
+ 0.1420278251171112,
+ 0.018543988466262817,
+ 0.17753252387046814,
+ 0.15634411573410034,
+ -0.07369005680084229,
+ -0.10913816094398499,
+ -0.22834762930870056,
+ 0.17384880781173706,
+ 0.18389736115932465,
+ 0.25377601385116577,
+ 0.18186044692993164,
+ -0.14647692441940308,
+ -0.2966045141220093,
+ 0.179268479347229,
+ 0.2867293059825897,
+ -0.25697070360183716,
+ -0.00786956399679184,
+ 0.22656941413879395,
+ 0.1034071147441864,
+ 0.07603675127029419,
+ -0.2508751153945923,
+ 0.24911051988601685,
+ -0.017613500356674194,
+ 0.26814931631088257,
+ 0.24731341004371643,
+ -0.1872565746307373,
+ 0.17458637058734894,
+ 0.17475281655788422,
+ 0.2925885021686554,
+ 0.12697553634643555,
+ -0.15250492095947266,
+ -0.006299644708633423,
+ 0.07043468952178955,
+ -0.16086190938949585,
+ 0.3017907738685608,
+ 0.30041420459747314,
+ -0.17540359497070312,
+ -0.08453342318534851,
+ -0.19864749908447266,
+ -0.2546904981136322,
+ -0.03238508105278015,
+ 0.1333676427602768,
+ -0.2801504135131836,
+ -0.274008572101593,
+ 0.06221616268157959,
+ -0.1894548088312149,
+ 0.15141236782073975,
+ 0.22401025891304016,
+ -0.2645772397518158,
+ -0.04961109161376953,
+ 0.029157444834709167,
+ 0.2579347491264343,
+ -0.13822942972183228,
+ 0.1448274850845337,
+ 0.24516797065734863,
+ 0.05202434957027435,
+ -0.16620856523513794,
+ 0.26571890711784363,
+ -0.048900194466114044,
+ -0.12801513075828552,
+ -0.26044508814811707,
+ -0.17640677094459534,
+ 0.08885043859481812,
+ -0.13468852639198303,
+ -0.1336401402950287,
+ 0.1431007981300354,
+ 0.2381916046142578,
+ -0.07490891218185425,
+ -0.19155317544937134,
+ 0.14838701486587524,
+ -0.12695732712745667,
+ 0.24742859601974487,
+ 0.0046156346797943115,
+ -0.2992141842842102,
+ 0.29776161909103394,
+ 0.0972016304731369,
+ 0.10580012202262878,
+ 0.26446789503097534,
+ 0.16406361758708954,
+ -0.055416882038116455,
+ -0.20351389050483704,
+ -0.03954155743122101,
+ -0.26561057567596436,
+ -0.27628350257873535,
+ 0.2028156816959381,
+ 0.2297375500202179,
+ 0.08797425031661987,
+ -0.03360176086425781,
+ -0.13397760689258575,
+ 0.2850711941719055,
+ -0.19792741537094116,
+ 0.1492687463760376,
+ -0.03052160143852234,
+ 0.1769641637802124,
+ 0.06832396984100342,
+ 0.04587578773498535,
+ 0.045670121908187866,
+ 0.0497797429561615,
+ -0.25401586294174194,
+ -0.27270036935806274,
+ 0.30375999212265015,
+ 0.028266996145248413,
+ 0.10782834887504578,
+ 0.23599833250045776,
+ 0.20129095017910004,
+ 0.0018302202224731445,
+ -0.2838055491447449,
+ 0.11570417881011963,
+ -0.04799404740333557,
+ -0.29792794585227966,
+ -0.19283227622509003,
+ -0.29046258330345154,
+ 0.3001067042350769,
+ -0.17926695942878723,
+ -0.23025238513946533,
+ 0.2306070625782013,
+ -0.14209789037704468,
+ -0.10623662173748016,
+ -0.2882879376411438,
+ -0.18130677938461304,
+ -0.29044145345687866,
+ 0.20577067136764526,
+ 0.2760249376296997,
+ 0.1928882598876953,
+ 0.08428812026977539,
+ 0.18116572499275208,
+ 0.2813265025615692,
+ -0.24911433458328247,
+ -0.14344573020935059,
+ -0.2510707974433899,
+ 0.14786112308502197,
+ 0.10430523753166199,
+ -0.16354691982269287,
+ -0.15908266603946686,
+ 0.2622990012168884,
+ -0.11902952194213867,
+ 0.28398311138153076,
+ 0.024809569120407104,
+ -0.26887476444244385,
+ 0.136213481426239,
+ -0.09840293228626251,
+ -0.1664983034133911,
+ -0.15639379620552063,
+ 0.04190005362033844,
+ -0.08642277121543884,
+ 0.13726571202278137,
+ -0.24769264459609985,
+ 0.2185538411140442,
+ 0.16583146154880524,
+ -0.1387695074081421,
+ -0.2313077449798584,
+ -0.2477756142616272,
+ -0.162980854511261,
+ 0.2406540811061859,
+ -0.30044806003570557,
+ -0.1470295786857605,
+ -0.1820293664932251,
+ 0.12812340259552002,
+ 0.08515819907188416,
+ -0.19633300602436066,
+ 0.024285733699798584,
+ -0.06050825119018555,
+ 0.1294146180152893,
+ -0.10991770029067993,
+ -0.15476834774017334,
+ 0.11145651340484619,
+ 0.07116222381591797,
+ 0.08039575815200806,
+ -0.07251697778701782,
+ -0.09062065929174423,
+ 0.2880517840385437,
+ 0.20843753218650818,
+ -0.19082307815551758,
+ -0.08626365661621094,
+ 0.2732781767845154,
+ -0.23683488368988037,
+ -0.19915235042572021,
+ -0.050518691539764404,
+ -0.26342281699180603,
+ 0.03811168670654297,
+ -0.11107313632965088,
+ -0.190108060836792,
+ 0.12623602151870728,
+ -0.2806686758995056,
+ 0.059470027685165405,
+ -0.111683189868927,
+ -0.17172658443450928,
+ -0.284359872341156,
+ 0.1257929801940918,
+ -0.10832080245018005,
+ 0.023657426238059998,
+ 0.11988857388496399,
+ -0.24877578020095825,
+ 0.30027902126312256,
+ 0.2269415259361267,
+ -0.1905728578567505,
+ 0.16530543565750122,
+ -0.031603291630744934,
+ 0.22764146327972412,
+ -0.09173500537872314,
+ 0.12588322162628174,
+ -0.2751828730106354,
+ -0.272865891456604,
+ 0.27175843715667725,
+ -0.15972822904586792,
+ -0.28719836473464966,
+ -0.29276710748672485,
+ 0.005811601877212524,
+ -0.02691720426082611,
+ 0.20401450991630554,
+ 0.017331048846244812,
+ 0.08880937099456787,
+ 0.3064882755279541,
+ -0.2924579381942749,
+ 0.1200748085975647,
+ -0.2625369429588318,
+ -0.025623321533203125,
+ -0.17495714128017426,
+ -0.18067732453346252,
+ 0.16905272006988525,
+ 0.28564512729644775,
+ 0.26657384634017944,
+ 0.025279343128204346,
+ -0.2820938229560852,
+ 0.30038097500801086,
+ -0.05672624707221985,
+ -0.2068489044904709,
+ -0.08301855623722076,
+ -0.25893479585647583,
+ -0.06510108709335327,
+ -0.263212114572525,
+ 0.2022804319858551,
+ -0.28820091485977173,
+ 0.1898059844970703,
+ -0.026919841766357422,
+ -0.0748562216758728,
+ -0.2864542007446289,
+ -0.0790712982416153,
+ -0.27689141035079956,
+ 0.2258736789226532,
+ 0.22007787227630615,
+ 0.2915579080581665,
+ 0.1129198670387268,
+ 0.236066073179245,
+ -0.11463609337806702,
+ 0.3070628046989441,
+ -0.054356545209884644,
+ -0.138810396194458,
+ -0.17369717359542847,
+ -0.26540976762771606,
+ -0.0992523729801178,
+ -0.16077393293380737,
+ -0.2659062147140503,
+ 0.23369590938091278,
+ 0.1559521108865738,
+ -0.036534614861011505,
+ -0.22047019004821777,
+ -0.2736009955406189,
+ -0.015168634243309498,
+ -0.018318772315979004,
+ -0.1773865520954132,
+ -0.1951640546321869,
+ 0.00162506103515625,
+ -0.27259892225265503,
+ -0.18496769666671753,
+ -0.21727320551872253,
+ 0.057637929916381836,
+ -0.19304564595222473,
+ 0.09838269650936127,
+ -0.24790364503860474,
+ 0.044074565172195435,
+ -0.29300403594970703,
+ -0.2761368751525879,
+ -0.1834152340888977,
+ 0.07898491621017456,
+ 0.1711110770702362,
+ 0.3030620813369751,
+ -0.16592790186405182,
+ -0.29495304822921753,
+ -0.0401684045791626,
+ -0.017479440197348595,
+ -0.16624152660369873,
+ -0.2646152377128601,
+ -0.2909163236618042,
+ 0.208204448223114,
+ 0.26419442892074585,
+ 0.16012978553771973,
+ -0.2232421636581421,
+ 0.2776701748371124,
+ 0.11723995208740234,
+ 0.3057714104652405,
+ -0.030190087854862213,
+ -0.08340387791395187,
+ 0.28583335876464844,
+ 0.23647776246070862,
+ -0.23159581422805786,
+ -0.2986973822116852,
+ -0.2678784132003784,
+ -0.27234670519828796,
+ 0.25592392683029175,
+ -0.009803324937820435,
+ -0.1788291186094284,
+ -0.21276485919952393,
+ -0.22134801745414734,
+ -0.1545008420944214,
+ 0.1315048336982727,
+ -0.19360211491584778,
+ 0.02529376745223999,
+ 0.13610613346099854,
+ -0.24351108074188232,
+ 0.12395483255386353,
+ 0.15132202208042145,
+ -0.29497408866882324,
+ -0.22179216146469116,
+ -0.06529676914215088,
+ -0.20331454277038574,
+ -0.06320635229349136,
+ -0.21765092015266418,
+ -0.1490495502948761,
+ 0.21702387928962708,
+ -0.10238799452781677,
+ -0.2990029752254486,
+ 0.11296360194683075,
+ -0.0986279845237732,
+ 0.1901392936706543,
+ -0.1343483328819275,
+ -0.12747722864151,
+ -0.05915665626525879,
+ 0.1826099157333374,
+ 0.008718583732843399,
+ -0.24124634265899658,
+ 0.2531973123550415,
+ 0.14277541637420654,
+ 0.20188558101654053,
+ 0.09238280355930328,
+ 0.2139170616865158,
+ 0.1386181265115738,
+ 0.20953050255775452,
+ 0.25134479999542236,
+ 0.24320870637893677,
+ 0.28815001249313354,
+ 0.011203855276107788,
+ -0.2735341489315033,
+ 0.11165997385978699,
+ -0.0966273844242096,
+ -0.28861287236213684,
+ 0.305817186832428,
+ -0.20200052857398987,
+ 0.004813745617866516,
+ 0.03337915241718292,
+ -0.21081072092056274,
+ 0.22484806180000305,
+ -0.272866427898407,
+ -0.15718746185302734,
+ 0.08710864186286926,
+ -0.13120833039283752,
+ 0.3080137372016907,
+ 0.29188913106918335,
+ 0.2170596867799759,
+ 0.22477790713310242,
+ 0.008870959281921387,
+ 0.11893080174922943,
+ 0.2572789192199707,
+ -0.058104127645492554,
+ -0.16014915704727173,
+ 0.09598290175199509,
+ -0.22288133203983307,
+ 0.22019433975219727,
+ -0.03970763832330704,
+ -0.20039202272891998,
+ 0.2887122929096222,
+ 0.19794107973575592,
+ -0.10846003890037537,
+ 0.03442130982875824,
+ 0.2713475823402405,
+ -0.1428305059671402,
+ 0.10245969891548157,
+ -0.2092781662940979,
+ -0.14423209428787231,
+ 0.28197115659713745,
+ -0.2283855676651001,
+ -0.13368403911590576,
+ 0.28436022996902466,
+ -0.06239980459213257,
+ 0.08175188302993774,
+ -0.1920602023601532,
+ -0.18173164129257202,
+ -0.11960677802562714,
+ 0.23804324865341187,
+ -0.29571542143821716,
+ -0.06846380233764648,
+ -0.11095386743545532,
+ 0.21408547461032867,
+ 0.26396694779396057,
+ 0.036647021770477295,
+ -0.2865588665008545,
+ -0.08828875422477722,
+ 0.15702521800994873,
+ -0.14005827903747559,
+ 0.19962194561958313,
+ 0.0012722909450531006,
+ 0.25047048926353455,
+ 0.11199717968702316,
+ 0.12816733121871948,
+ -0.027236564084887505,
+ 0.21782642602920532,
+ -0.059613049030303955,
+ -0.13861578702926636,
+ 0.06791582703590393,
+ 0.21713683009147644,
+ 0.27446845173835754,
+ -0.2287798523902893,
+ 0.11132973432540894,
+ 0.06143605709075928,
+ 0.09511128067970276,
+ -0.2582549452781677,
+ 0.15907901525497437,
+ 0.08108006417751312,
+ -0.28873029351234436,
+ -0.19554319977760315,
+ 0.2483537197113037,
+ -0.1235150694847107,
+ -0.24649524688720703,
+ 0.0018846914172172546,
+ 0.2115810513496399,
+ -0.11310490220785141,
+ 0.2626889944076538,
+ 0.18482956290245056,
+ 0.19688183069229126,
+ 0.09794759750366211,
+ -0.2949812114238739,
+ 0.19163690507411957,
+ 0.2585347890853882,
+ -0.18873348832130432,
+ -0.2523939609527588,
+ 0.0727224349975586,
+ 0.1640845388174057,
+ -0.2591651380062103,
+ -0.13865122199058533,
+ -0.2752084732055664,
+ -0.2943422496318817,
+ -0.14195799827575684,
+ -0.2674729824066162,
+ -0.2956135869026184,
+ -0.23555660247802734,
+ 0.08119292557239532,
+ 0.126388818025589,
+ 0.0777045488357544,
+ 0.02038252353668213,
+ 0.0077272504568099976,
+ 0.17883777618408203,
+ 0.25975602865219116,
+ 0.0826815664768219,
+ 0.2457730770111084,
+ 0.17596739530563354,
+ 0.26652055978775024,
+ 0.1226232647895813,
+ -0.2639436721801758,
+ 0.26877719163894653,
+ 0.014192398637533188,
+ -0.10265499353408813,
+ -0.11248016357421875,
+ 0.059749603271484375,
+ -0.1472945660352707,
+ -0.13573023676872253,
+ -0.18250170350074768,
+ 0.18567830324172974,
+ -0.001191847026348114,
+ 0.25874239206314087,
+ -0.2286243438720703,
+ 0.2226812094449997,
+ 0.024922311305999756,
+ 0.29925161600112915,
+ 0.24866631627082825,
+ 0.02424132078886032,
+ -0.13148266077041626,
+ -0.26629185676574707,
+ 0.17608825862407684,
+ -0.22575098276138306,
+ -0.08844371885061264,
+ 0.30282193422317505,
+ -0.12328720092773438,
+ -0.21013861894607544,
+ 0.2761949300765991,
+ -0.21260154247283936,
+ -0.19451013207435608,
+ -0.0799233615398407,
+ -0.26874619722366333,
+ -0.22346419095993042,
+ 0.24638766050338745,
+ -0.18382105231285095,
+ 0.23305583000183105,
+ 0.2013934850692749,
+ 0.19791367650032043,
+ 0.04041340947151184,
+ -0.05284243822097778,
+ -0.2113857865333557,
+ 0.04271203279495239,
+ 0.3066563010215759,
+ 0.15392470359802246,
+ 0.29085391759872437,
+ -0.2457946538925171,
+ 0.03970855474472046,
+ -0.06630319356918335,
+ -0.04895830154418945,
+ -0.05092844367027283,
+ 0.30881547927856445,
+ 0.21579653024673462,
+ 0.06933152675628662,
+ 0.2588755488395691,
+ -0.2514849901199341,
+ -0.022665977478027344,
+ 0.12210521101951599,
+ 0.2748827338218689,
+ 0.09013208746910095,
+ -0.23750737309455872,
+ -0.21720939874649048,
+ 0.27845773100852966,
+ -0.161017045378685,
+ -0.16002295911312103,
+ 0.19523349404335022,
+ -0.023420415818691254,
+ 0.2745208740234375,
+ 0.007143616676330566,
+ 0.13067549467086792,
+ -0.10267988592386246,
+ 0.09143024682998657,
+ -0.20010912418365479,
+ 0.2776133418083191,
+ 0.2910430431365967,
+ 0.25636404752731323,
+ -0.0041284263134002686,
+ -0.2337796986103058,
+ 0.06565946340560913,
+ -0.29580092430114746,
+ 0.10530102252960205,
+ 0.2971858084201813,
+ -0.21138805150985718,
+ 0.009650260210037231,
+ 0.01820245385169983,
+ 0.021338410675525665,
+ -0.169408917427063,
+ 0.11234064400196075,
+ 0.12226907908916473,
+ 0.15537682175636292,
+ -0.2409861981868744,
+ -0.07645606994628906,
+ 0.2885165214538574,
+ 0.017598867416381836,
+ 0.03034311532974243,
+ 0.2521209716796875,
+ -0.0031832456588745117,
+ -0.14451837539672852,
+ 0.30000442266464233,
+ -0.25985389947891235,
+ -0.28429320454597473,
+ -0.2451610118150711,
+ -0.20084939897060394,
+ -0.14808306097984314,
+ 0.10628366470336914,
+ -0.293438196182251,
+ -0.2770113945007324,
+ 0.2324826419353485,
+ -0.26727792620658875,
+ 0.21376824378967285,
+ 0.21574825048446655,
+ -0.0705752968788147,
+ 0.16485106945037842,
+ 0.2527289390563965,
+ -0.2800511121749878,
+ -0.16293646395206451,
+ 0.2968953251838684,
+ 0.05984139442443848,
+ -0.13988390564918518,
+ -0.2721349000930786,
+ 0.027183279395103455,
+ 0.2410033941268921,
+ 0.2007180154323578,
+ 0.08032691478729248,
+ -0.10078869760036469,
+ 0.03617525100708008,
+ 0.07352016866207123,
+ 0.30432459712028503,
+ -0.21803949773311615,
+ 0.29211747646331787,
+ -0.04889420419931412,
+ 0.11119239032268524,
+ -0.2081407606601715,
+ -0.036100514233112335,
+ -0.2886328101158142,
+ -0.11504742503166199,
+ -0.1670575737953186,
+ 0.24781441688537598,
+ -0.21419376134872437,
+ 0.15576975047588348,
+ 0.1807907670736313,
+ -0.25885671377182007,
+ 0.28562355041503906,
+ -0.03472393751144409,
+ 0.08838063478469849,
+ -0.18995235860347748,
+ -0.26098453998565674,
+ 0.12893164157867432,
+ 0.23177242279052734,
+ 0.07389097660779953,
+ 0.3055925965309143,
+ 0.2420869916677475,
+ 0.036690473556518555,
+ 0.009631901979446411,
+ 0.16894033551216125,
+ 0.22272685170173645,
+ 0.2803145945072174,
+ -0.2816457450389862,
+ 0.28645187616348267,
+ -0.18219898641109467,
+ -0.08702287077903748,
+ 0.16160401701927185,
+ -0.08271884173154831,
+ 0.057611286640167236,
+ -0.2907969355583191,
+ 0.181019127368927,
+ 0.2627142667770386,
+ 0.30334222316741943,
+ 0.2517582178115845,
+ 0.17889761924743652,
+ -0.143625408411026,
+ -0.24011242389678955,
+ -0.14841201901435852,
+ 0.22723686695098877,
+ 0.28593865036964417,
+ -0.2208562195301056,
+ -0.16326573491096497,
+ -0.25104379653930664,
+ 0.17293617129325867,
+ 0.2303532063961029,
+ 0.20403435826301575,
+ -0.05982816219329834,
+ -0.28302425146102905,
+ 0.1350231170654297,
+ 0.06605708599090576,
+ -0.06685149669647217,
+ 0.30182063579559326,
+ -0.026057502254843712,
+ -0.11682772636413574,
+ 0.2131018489599228,
+ 0.1155814528465271,
+ -0.0006755292415618896,
+ -0.012052446603775024,
+ -0.033601313829422,
+ 0.061050429940223694,
+ 0.2862922251224518,
+ -0.29738539457321167,
+ -0.14838051795959473,
+ 0.08573344349861145,
+ 0.10267406702041626,
+ 0.16148152947425842,
+ 0.303796648979187,
+ -0.27537214756011963,
+ 0.2912448048591614,
+ -0.1957411766052246,
+ 0.16805732250213623,
+ 0.02136152982711792,
+ 0.25428158044815063,
+ -0.2437339425086975,
+ -0.2815794348716736,
+ 0.3101401925086975,
+ 0.17564545571804047,
+ -0.2465800642967224,
+ 0.1926255226135254,
+ -0.0379902720451355,
+ 0.11917871236801147,
+ -0.05980396270751953,
+ 0.25221943855285645,
+ 0.1606590896844864,
+ -0.29205000400543213,
+ 0.24424421787261963,
+ -0.18954885005950928,
+ -0.01650368422269821,
+ 0.17009630799293518,
+ -0.07058905065059662,
+ 0.0758877843618393,
+ 0.1264784336090088,
+ -0.22976696491241455,
+ -0.29076144099235535,
+ -0.28517448902130127,
+ -0.18289333581924438,
+ -0.05598729848861694,
+ -0.10504552721977234,
+ -0.1818367838859558,
+ 0.03248146176338196,
+ 0.09730517864227295,
+ -0.1459660530090332,
+ 0.15587785840034485,
+ -0.1617031991481781,
+ -0.29700079560279846,
+ -0.01952800154685974,
+ -0.2044575810432434,
+ -0.16212832927703857,
+ 0.013594746589660645,
+ -0.1066603809595108,
+ 0.27644145488739014,
+ -0.1718457043170929,
+ 0.2906346023082733,
+ 0.22752216458320618,
+ 0.08970850706100464,
+ -0.2107260525226593,
+ -0.020339369773864746,
+ 0.21433216333389282,
+ -0.23226343095302582,
+ 0.014223232865333557,
+ -0.07931873947381973,
+ -0.15188661217689514,
+ -0.2542709708213806,
+ -0.10474193096160889,
+ 0.2616150379180908,
+ -0.14813828468322754,
+ 0.3071281313896179,
+ -0.26902446150779724,
+ 0.2680554986000061,
+ 0.2919021248817444,
+ -0.09989893436431885,
+ -0.12937915325164795,
+ 0.1863495409488678,
+ 0.10407999157905579,
+ -0.1900196075439453,
+ -0.12362003326416016,
+ 0.2138058841228485,
+ 0.28001850843429565,
+ -0.03669807314872742,
+ -0.05731010437011719,
+ 0.13450366258621216,
+ 0.017627984285354614,
+ 0.0492144376039505,
+ -0.16657766699790955,
+ -0.0039000213146209717,
+ -0.19464361667633057,
+ -0.05982530117034912,
+ 0.06719830632209778,
+ 0.17369669675827026,
+ 0.3011808395385742,
+ 0.30686092376708984,
+ 0.03181546926498413,
+ 0.13490524888038635,
+ 0.08647862076759338,
+ 0.22028040885925293,
+ 0.2308923900127411,
+ 0.3032706379890442,
+ 0.042076289653778076,
+ -0.2551395893096924,
+ 0.2677597999572754,
+ 0.24340879917144775,
+ 0.15219193696975708,
+ 0.010981500148773193,
+ -0.2317286729812622,
+ -0.06514652073383331,
+ -0.20175088942050934,
+ 0.18922492861747742,
+ -0.03963543474674225,
+ -0.24289782345294952,
+ 0.04912298917770386,
+ 0.27593153715133667,
+ 0.2605786621570587,
+ 0.21431834995746613,
+ 0.0789291262626648,
+ 0.3003454804420471,
+ -0.047706544399261475,
+ 0.2885456681251526,
+ -0.027394384145736694,
+ 0.27799761295318604,
+ 0.16549277305603027,
+ -0.19628366827964783,
+ 0.07214275002479553,
+ -0.10964596271514893,
+ -0.29781490564346313,
+ -0.1509624421596527,
+ -0.1934286653995514,
+ 0.21834713220596313,
+ -0.28380462527275085,
+ 0.27440154552459717,
+ 0.028600424528121948,
+ -0.17612046003341675,
+ 0.011876925826072693,
+ -0.048523470759391785,
+ 0.3017808198928833,
+ -0.04973381757736206,
+ -0.2632104158401489,
+ -0.2670968770980835,
+ 0.1412518471479416,
+ 0.1321260929107666,
+ 0.17002415657043457,
+ -0.013424359261989594,
+ -0.22077591717243195,
+ 0.10699564218521118,
+ -0.20619738101959229,
+ 0.07968932390213013,
+ 0.14469093084335327,
+ -0.19219425320625305,
+ 0.0651790127158165,
+ 0.30376818776130676,
+ -0.1985762119293213,
+ 0.2507835626602173,
+ 0.24020415544509888,
+ 0.14653408527374268,
+ 0.13563790917396545,
+ -0.13914859294891357,
+ 0.20363008975982666,
+ 0.021572917699813843,
+ 0.2740650475025177,
+ -0.14983493089675903,
+ -0.1437894105911255,
+ 0.24976474046707153,
+ 0.27936428785324097,
+ 0.2305181920528412,
+ 0.1786191463470459,
+ -0.1918163299560547,
+ 0.19193464517593384,
+ -0.08555218577384949,
+ -0.08928091824054718,
+ 0.021698951721191406,
+ -0.09183788299560547,
+ 0.2844310402870178,
+ 0.23014461994171143,
+ 0.30741167068481445,
+ -0.20806485414505005,
+ 0.07815442979335785,
+ -0.2207718938589096,
+ -0.08341246843338013,
+ -0.03414049744606018,
+ 0.11940392851829529,
+ -0.037926286458969116,
+ 0.02961447834968567,
+ 0.2545803487300873,
+ 0.2146773636341095,
+ -0.2688313126564026,
+ 0.30524903535842896,
+ -0.1911727488040924,
+ 0.0924464613199234,
+ 0.2960319519042969,
+ -0.27698951959609985,
+ 0.2000499665737152,
+ -0.27267539501190186,
+ 0.10108354687690735,
+ 0.030139625072479248,
+ 0.2994338870048523,
+ -0.04618819057941437,
+ -0.07309374213218689,
+ 0.2710554301738739,
+ -0.2755499482154846,
+ -0.055984191596508026,
+ 0.04844604432582855,
+ 0.005199909210205078,
+ 0.1924377679824829,
+ -0.15584854781627655,
+ -0.2919471859931946,
+ 0.12426887452602386,
+ -0.060493141412734985,
+ -0.01795153319835663,
+ -0.23952537775039673,
+ -0.05295383930206299,
+ 0.2631191909313202,
+ -0.06991451978683472,
+ 0.027451157569885254,
+ -0.09988075494766235,
+ 0.042887210845947266,
+ -0.2320985198020935,
+ 0.2394188493490219,
+ -0.1842709183692932,
+ -0.18448355793952942,
+ 0.03222295641899109,
+ -0.19717183709144592,
+ -0.03150096535682678,
+ -0.277532160282135,
+ -0.2215467095375061,
+ 0.0721360445022583,
+ 0.2542790174484253,
+ -0.2622312307357788,
+ 0.2341676950454712,
+ -0.1245243027806282,
+ -0.25420138239860535,
+ -0.15681028366088867,
+ 0.06179875135421753,
+ -0.20961475372314453,
+ -0.20216882228851318,
+ 0.10423193871974945,
+ 0.2060251235961914,
+ 0.01021122932434082,
+ -0.13124588131904602,
+ -0.2620221972465515,
+ -0.11144418269395828,
+ -0.251903772354126,
+ -0.15906888246536255,
+ 0.2110249400138855,
+ 0.2552551031112671,
+ -0.1735350489616394,
+ -0.27029359340667725,
+ -0.04900097846984863,
+ 0.037328287959098816,
+ -0.05708380788564682,
+ -0.10297491401433945,
+ 0.060457199811935425,
+ -0.15464311838150024,
+ -0.12895314395427704,
+ 0.035770803689956665,
+ 0.2469283640384674,
+ -0.05351400375366211,
+ -0.08407551050186157,
+ -0.012673377990722656,
+ 0.07282477617263794,
+ 0.2150883674621582,
+ 0.2147253453731537,
+ -0.18146833777427673,
+ -0.2273148000240326,
+ 0.17567849159240723,
+ -0.1551620364189148,
+ -0.25093358755111694,
+ -0.13669809699058533,
+ -0.0735621452331543,
+ 0.13996444642543793,
+ 0.07458561658859253,
+ -0.2967910170555115,
+ -0.28037017583847046,
+ -0.07134556770324707,
+ 0.18473529815673828,
+ -0.2679526209831238,
+ 0.1147582158446312,
+ -0.029812276363372803,
+ -0.23572766780853271,
+ -0.2472190260887146,
+ -0.2058519870042801,
+ -0.24140329658985138,
+ 0.1349969208240509,
+ 0.10408318042755127,
+ 0.1360291689634323,
+ 0.10019895434379578,
+ -0.08148229122161865,
+ 0.2585762143135071,
+ -0.24564474821090698,
+ 0.06957659125328064,
+ 0.1148914098739624,
+ 0.30567413568496704,
+ -0.13376086950302124,
+ 0.09238061308860779,
+ 0.17947286367416382,
+ 0.1500835120677948,
+ -0.21796303987503052,
+ 0.2580565810203552,
+ -0.008510112762451172,
+ 0.27566489577293396,
+ -0.01133638620376587,
+ -0.05879037082195282,
+ -0.03865611553192139,
+ 0.24221760034561157,
+ 0.2739805579185486,
+ 0.2946450114250183,
+ -0.24125760793685913,
+ -0.23364415764808655,
+ 0.22103095054626465,
+ -0.2197921723127365,
+ -0.08221730589866638,
+ -0.2998571991920471,
+ 0.08424898982048035,
+ -0.016734004020690918,
+ 0.2444012463092804,
+ -0.23822304606437683,
+ -0.2864828109741211,
+ -0.07976898550987244,
+ 0.004162795841693878,
+ 0.08125433325767517,
+ 0.013400942087173462,
+ -0.0017158687114715576,
+ 0.2856656312942505,
+ -0.2898092269897461,
+ 0.24287891387939453,
+ 0.11271356046199799,
+ 0.14360299706459045,
+ -0.25198203325271606,
+ 0.22124409675598145,
+ 0.1291644424200058,
+ 0.2024291455745697,
+ -0.07794205844402313,
+ -0.11403852701187134,
+ 0.22513818740844727,
+ -0.29686152935028076,
+ 0.2873527705669403,
+ 0.21881884336471558,
+ -0.2633729577064514,
+ -0.27077633142471313,
+ -0.16145861148834229,
+ -0.24734073877334595,
+ -0.21526175737380981,
+ -0.28213709592819214,
+ -0.23512053489685059,
+ 0.07347916066646576,
+ -0.05154258757829666,
+ -0.16170063614845276,
+ 0.29504820704460144,
+ -0.16168856620788574,
+ -0.11446624994277954,
+ -0.010546490550041199,
+ 0.04834866523742676,
+ -0.08364015072584152,
+ -0.014291465282440186,
+ -0.2363932728767395,
+ 0.21006470918655396,
+ -0.1505511999130249,
+ -0.29518526792526245,
+ -0.2115129828453064,
+ 0.2963395118713379,
+ -0.25383105874061584,
+ -0.0940135046839714,
+ 0.0759895071387291,
+ -0.28525084257125854,
+ -0.03373662382364273,
+ -0.12981069087982178,
+ 0.295798122882843,
+ 0.1165604293346405,
+ 0.03733739256858826,
+ -0.20343664288520813,
+ 0.24655431509017944,
+ -0.2272014617919922,
+ 0.21776127815246582,
+ -0.2549387812614441,
+ -0.05769693851470947,
+ 0.26933369040489197,
+ -0.04343940317630768,
+ 0.29723039269447327,
+ 0.13007120788097382,
+ -0.2885330319404602,
+ 0.13644587993621826,
+ -0.11591924726963043,
+ -0.09263825416564941,
+ 0.16097897291183472,
+ 0.19411861896514893,
+ -0.22590652108192444,
+ -0.1945008784532547,
+ -0.015170549973845482,
+ -0.15264782309532166,
+ 0.15559378266334534,
+ -0.059091269969940186,
+ 0.15632015466690063,
+ 0.20880919694900513,
+ 0.26156580448150635,
+ 0.1954038292169571,
+ 0.1764432191848755,
+ 0.10684722661972046,
+ -0.29903364181518555,
+ -0.16176976263523102,
+ -0.05821987986564636,
+ -0.2157500982284546,
+ -0.22451698780059814,
+ -0.12060994654893875,
+ -0.13012617826461792,
+ 0.18906597793102264,
+ -0.09250766038894653,
+ 0.09917275607585907,
+ 0.1100236028432846,
+ 0.18902237713336945,
+ 0.2918413579463959,
+ 0.13104680180549622,
+ 0.27762269973754883,
+ -0.004962503910064697,
+ -0.2721601128578186,
+ -0.006637930870056152,
+ 0.3100619912147522,
+ 0.07723334431648254,
+ 0.07696574926376343,
+ 0.055638909339904785,
+ -0.02562004327774048,
+ 0.017183609306812286,
+ -0.2998952269554138,
+ -0.04471099376678467,
+ 0.13847759366035461,
+ 0.04974627494812012,
+ 0.27711713314056396,
+ -0.12880846858024597,
+ -0.036121025681495667,
+ 0.2706683576107025,
+ 0.19651970267295837,
+ -0.14109551906585693,
+ -0.09181991219520569,
+ 0.23654544353485107,
+ 0.18067729473114014,
+ -0.28733476996421814,
+ 0.257413387298584,
+ -0.0893908366560936,
+ 0.247053861618042,
+ 0.1993662416934967,
+ 0.2985759377479553,
+ -0.14752286672592163,
+ 0.30748099088668823,
+ 0.23700276017189026,
+ -0.26730507612228394,
+ 0.12316548824310303,
+ 0.2845759391784668,
+ 0.09310882538557053,
+ -0.1675754189491272,
+ 0.04033637046813965,
+ 0.3051416277885437,
+ -0.09699442982673645,
+ -0.29816609621047974,
+ 0.26230207085609436,
+ 0.28096163272857666,
+ -0.05278480052947998,
+ -0.27551186084747314,
+ -0.28001344203948975,
+ 0.0016383230686187744,
+ 0.26299941539764404,
+ 0.04901909828186035,
+ -0.23053434491157532,
+ -0.23807939887046814,
+ 0.1216210424900055,
+ 0.12957972288131714,
+ -0.2660679817199707,
+ 0.06200063228607178,
+ 0.03669191896915436,
+ 0.09287506341934204,
+ 0.03141102194786072,
+ 0.04529601335525513,
+ -0.0671234279870987,
+ -0.025512784719467163,
+ -0.2561213970184326,
+ 0.2579578161239624,
+ 0.055845797061920166,
+ -0.2734489142894745,
+ -0.12065543234348297,
+ 0.25331205129623413,
+ 0.17551550269126892,
+ -0.05740787833929062,
+ -0.2495330572128296,
+ -0.08089165389537811,
+ -0.19558897614479065,
+ -0.27714547514915466,
+ 0.3000144362449646,
+ 0.15353938937187195,
+ 0.04432547092437744,
+ -0.2655908465385437,
+ 0.3086889982223511,
+ 0.08585698902606964,
+ -0.27952033281326294,
+ 0.2985697388648987,
+ -0.20072972774505615,
+ -0.07783080637454987,
+ 0.2759987711906433,
+ 0.13745972514152527,
+ -0.2514617443084717,
+ 0.1313713788986206,
+ 0.0485333651304245,
+ 0.056824326515197754,
+ -0.2544303834438324,
+ -0.020991086959838867,
+ -0.10440750420093536,
+ -0.24731001257896423,
+ -0.26729342341423035,
+ 0.08883185684680939,
+ 0.1594865322113037,
+ -0.19764429330825806,
+ -0.10131841897964478,
+ -0.03761482238769531,
+ -0.1698770821094513,
+ 0.2843506932258606,
+ -0.17864954471588135,
+ -0.06256137788295746,
+ -0.25679075717926025,
+ 0.20960047841072083,
+ -0.1681082546710968,
+ 0.13196265697479248,
+ -0.0926155149936676,
+ -0.29982125759124756,
+ 0.10414761304855347,
+ -0.12829911708831787,
+ -0.22569692134857178,
+ -0.10701996088027954,
+ 0.12164947390556335,
+ 0.23874503374099731,
+ 0.3104705214500427,
+ 0.23050335049629211,
+ -0.1493566930294037,
+ 0.26544952392578125,
+ 0.23113813996315002,
+ -0.25789135694503784,
+ -0.15662430226802826,
+ 0.04719105362892151,
+ 0.19110152125358582,
+ 0.1937117576599121,
+ 0.22622308135032654,
+ 0.10279572010040283,
+ -0.11118530482053757,
+ 0.05954158306121826,
+ 0.04227742552757263,
+ -0.21423159539699554,
+ 0.16396662592887878,
+ -0.29850703477859497,
+ -0.0452704131603241,
+ -0.24576711654663086,
+ -0.2652135491371155,
+ 0.29798418283462524,
+ 0.21972286701202393,
+ 0.07789310812950134,
+ 0.2374311089515686,
+ -0.10443699359893799,
+ -0.03680992126464844,
+ 0.18729928135871887,
+ 0.01242256909608841,
+ -0.06036245822906494,
+ 0.27530911564826965,
+ 0.2571765184402466,
+ 0.15016943216323853,
+ 0.00018972158432006836,
+ -0.2670711874961853,
+ -0.2629033923149109,
+ -0.2554469108581543,
+ 0.2784866392612457,
+ -0.042622484266757965,
+ -0.27781057357788086,
+ -0.29967835545539856,
+ -0.29774773120880127,
+ -0.032761067152023315,
+ 0.2641924023628235,
+ -0.2899877429008484,
+ 0.15187466144561768,
+ -0.2363530397415161,
+ 0.1906890571117401,
+ 0.16925764083862305,
+ -0.2805599570274353,
+ -0.06750930845737457,
+ -0.18251073360443115,
+ 0.052240341901779175,
+ -0.14979049563407898,
+ 0.04132329672574997,
+ -0.1211654469370842,
+ -0.08476875722408295,
+ -0.24745717644691467,
+ -0.2086203247308731,
+ -0.06892305612564087,
+ 0.13299205899238586,
+ 0.22122007608413696,
+ -0.2084541916847229,
+ -0.16450250148773193,
+ -0.12204980850219727,
+ 0.23799081146717072,
+ 0.05908918380737305,
+ 0.20415212213993073,
+ 0.1415024995803833,
+ -0.2962301969528198,
+ 0.1994258612394333,
+ 0.036113619804382324,
+ 0.27201056480407715,
+ 0.08356687426567078,
+ -0.011891752481460571,
+ -0.23759619891643524,
+ 0.21995016932487488,
+ 0.14616823196411133,
+ -0.20000535249710083,
+ 0.13315768539905548,
+ -0.22363296151161194,
+ 0.18093442916870117,
+ -0.16926127672195435,
+ -0.1974969208240509,
+ -0.24511200189590454,
+ 0.013582706451416016,
+ 0.09257937967777252,
+ 0.11332964897155762,
+ 0.10200673341751099,
+ 0.17327415943145752,
+ -0.003521054983139038,
+ 0.09193003177642822,
+ -0.14871875941753387,
+ -0.2566314935684204,
+ -0.225874662399292,
+ 0.3054671883583069,
+ 0.05911850929260254,
+ -0.2820732295513153,
+ -0.14699850976467133,
+ -0.08969573676586151,
+ -0.0982573926448822,
+ 0.04627290368080139,
+ -0.2490367889404297,
+ 0.21013709902763367,
+ -0.07653197646141052,
+ -0.13083618879318237,
+ 0.14402589201927185,
+ 0.10039792954921722,
+ -0.05866968631744385,
+ -0.21629004180431366,
+ 0.06623876094818115,
+ -0.08890748023986816,
+ -0.019560545682907104,
+ -0.2715145945549011,
+ 0.10892730951309204,
+ 0.17832159996032715,
+ -0.14637666940689087,
+ 0.13492944836616516,
+ 0.01237727701663971,
+ -0.22604113817214966,
+ 0.11409088969230652,
+ 0.003103882074356079,
+ -0.23349636793136597,
+ -0.28925251960754395,
+ 0.23653718829154968,
+ -0.2952542006969452,
+ -0.22047671675682068,
+ 0.2707294523715973,
+ 0.05198490619659424,
+ 0.10254722833633423,
+ -0.2866847515106201,
+ -0.2706201672554016,
+ -0.06082072854042053,
+ -0.14646226167678833,
+ -0.0995779037475586,
+ 0.2152787744998932,
+ -0.15236811339855194,
+ 0.19762489199638367,
+ -0.08291894197463989,
+ -0.02170640230178833,
+ 0.28894633054733276,
+ 0.07998102903366089,
+ 0.2379302978515625,
+ -0.2645215690135956,
+ -0.2278907597064972,
+ 0.2815951704978943,
+ 0.07493072748184204,
+ 0.12410855293273926,
+ -0.012127220630645752,
+ 0.1364811658859253,
+ -0.14443963766098022,
+ -0.2825543284416199,
+ 0.19353221356868744,
+ -0.24095898866653442,
+ -0.22848206758499146,
+ -0.1350146234035492,
+ -0.27530789375305176,
+ 0.17889639735221863,
+ 0.1569530963897705,
+ 0.14094525575637817,
+ 0.0386883020401001,
+ 0.2542859613895416,
+ 0.22384321689605713,
+ 0.004841446876525879,
+ -0.20196384191513062,
+ 0.029640674591064453,
+ 0.17190414667129517,
+ 0.2653948664665222,
+ 0.07371004670858383,
+ 0.1542484164237976,
+ -0.28329160809516907,
+ 0.21800339221954346,
+ -0.14539197087287903,
+ -0.1930720955133438,
+ -0.22915136814117432,
+ 0.13384193181991577,
+ -0.2746874690055847,
+ 0.11844030022621155,
+ 0.01833994686603546,
+ -0.031861066818237305,
+ -0.26740843057632446,
+ -0.2605190873146057,
+ 0.054448485374450684,
+ -0.10133665800094604,
+ -0.29320430755615234,
+ -0.295097291469574,
+ 0.2791931629180908,
+ 0.15251009166240692,
+ -0.19662104547023773,
+ -0.1266317069530487,
+ 0.11112189292907715,
+ -0.22888413071632385,
+ -0.16286754608154297,
+ 0.08486074209213257,
+ -0.19233909249305725,
+ -0.27427369356155396,
+ -0.29577362537384033,
+ -0.1703655868768692,
+ -0.1512356996536255,
+ 0.2311866283416748,
+ -0.0173586905002594,
+ -0.26709848642349243,
+ -0.18400228023529053,
+ -0.2882072925567627,
+ 0.2795824706554413,
+ -0.15264776349067688,
+ 0.05500313639640808,
+ 0.2631824314594269,
+ -0.16940993070602417,
+ -0.2392539381980896,
+ 0.1359819769859314,
+ 0.308061420917511,
+ 0.11381971836090088,
+ -0.277296245098114,
+ -0.29341310262680054,
+ -0.2027537226676941,
+ -0.2514410614967346,
+ 0.19441944360733032,
+ 0.05264286696910858,
+ 0.183380126953125,
+ 0.21424931287765503,
+ -0.03193388879299164,
+ 0.1651642918586731,
+ -0.018190190196037292,
+ 0.12750276923179626,
+ -0.2068524956703186,
+ -0.27873992919921875,
+ 0.13278710842132568,
+ -0.21095702052116394,
+ 0.1854102611541748,
+ 0.24408161640167236,
+ -0.09031227231025696,
+ -0.11178340017795563,
+ 0.25276660919189453,
+ 0.1734154224395752,
+ -0.020385384559631348,
+ 0.231443852186203,
+ -0.04386186599731445,
+ -0.057255566120147705,
+ 0.10429207980632782,
+ -0.2946557402610779,
+ 0.2692549526691437,
+ -0.14727111160755157,
+ -0.03967663645744324,
+ -0.17935705184936523,
+ 0.16128912568092346,
+ -0.09007897973060608,
+ 0.14171874523162842,
+ 0.023447543382644653,
+ -0.09196708351373672,
+ -0.16571325063705444,
+ -0.021430745720863342,
+ 0.179446280002594,
+ 0.2873077392578125,
+ 0.2144809365272522,
+ 0.27666568756103516,
+ -0.06511521339416504,
+ 0.11349707841873169,
+ 0.12198329716920853,
+ -0.03857666254043579,
+ -0.15151840448379517,
+ 0.06770455837249756,
+ 0.06500592827796936,
+ -0.2955341935157776,
+ 0.14732740819454193,
+ -0.1388091742992401,
+ 0.18023890256881714,
+ 0.027952488511800766,
+ 0.14536233246326447,
+ 0.23271283507347107,
+ 0.2868887782096863,
+ -0.14737185835838318,
+ -0.24190819263458252,
+ -0.21153974533081055,
+ 0.2950768768787384,
+ 0.03746980428695679,
+ -0.30028319358825684,
+ 0.2936301827430725,
+ 0.05763137340545654,
+ -0.17583012580871582,
+ -0.2494795024394989,
+ -0.29061442613601685,
+ -0.2708260715007782,
+ 0.25318431854248047,
+ -0.016933396458625793,
+ 0.07190689444541931,
+ 0.08482977747917175,
+ 0.2038387656211853,
+ 0.010751456022262573,
+ 0.2909216284751892,
+ 0.11812162399291992,
+ 0.3025064468383789,
+ 0.19869816303253174,
+ -0.11319130659103394,
+ -0.0021535158157348633,
+ -0.06166090816259384,
+ 0.012659072875976562,
+ -0.005634620785713196,
+ 0.15191131830215454,
+ -0.1721544861793518,
+ -0.24745768308639526,
+ 0.23296621441841125,
+ 0.10295429825782776,
+ -0.2544635236263275,
+ -0.11712062358856201,
+ 0.25504326820373535,
+ 0.11956757307052612,
+ -0.15287792682647705,
+ 0.1970580816268921,
+ -0.017967641353607178,
+ 0.06879483163356781,
+ -0.06826126575469971,
+ 0.2828714847564697,
+ -0.13016048073768616,
+ -0.2531536817550659,
+ -0.20406118035316467,
+ -0.09401488304138184,
+ -0.09342461079359055,
+ -0.07052985578775406,
+ 0.08011719584465027,
+ 0.05859869718551636,
+ 0.26698774099349976,
+ -0.04708263278007507,
+ 0.10675330460071564,
+ 0.1254698634147644,
+ 0.11653387546539307,
+ 0.2540092468261719,
+ 0.05696713924407959,
+ -0.18567293882369995,
+ -0.18085265159606934,
+ -0.15289917588233948,
+ 0.0020427405834198,
+ 0.25708985328674316,
+ 0.05041390657424927,
+ -0.0017904937267303467,
+ -0.11915688961744308,
+ 0.14600545167922974,
+ 0.06615892052650452,
+ -0.14119385182857513,
+ 0.1340162754058838,
+ 0.14532363414764404,
+ 0.15922123193740845,
+ 0.1575583517551422,
+ 0.13483959436416626,
+ 0.214046448469162,
+ -0.23132361471652985,
+ 0.006288453936576843,
+ 0.1470363438129425,
+ -0.257493257522583,
+ -0.25454187393188477,
+ 0.21284563839435577,
+ -0.08951932936906815,
+ 0.17953895032405853,
+ -0.12470559775829315,
+ 0.1857595592737198,
+ -0.21595153212547302,
+ -0.07743778824806213,
+ -0.22443479299545288,
+ -0.29059112071990967,
+ 0.2810850143432617,
+ -0.29912087321281433,
+ 0.24307626485824585,
+ 0.25225329399108887,
+ -0.157201886177063,
+ 0.29751694202423096,
+ 0.2664332091808319,
+ -0.15266811847686768,
+ -0.2866993546485901,
+ 0.15563292801380157,
+ -0.16450433433055878,
+ 0.2118421196937561,
+ 0.2428683638572693,
+ -0.27460598945617676,
+ 0.10813841223716736,
+ -0.17606240510940552,
+ 0.2660583257675171,
+ 0.13961809873580933,
+ 0.1974894106388092,
+ -0.2849818468093872,
+ -0.18466657400131226,
+ -0.09769052267074585,
+ -0.16636797785758972,
+ 0.21314720809459686,
+ -0.004341304302215576,
+ 0.11375726759433746,
+ 0.02113867551088333,
+ 0.13695645332336426,
+ 0.22060006856918335,
+ -0.26695284247398376,
+ 0.013496160507202148,
+ 0.21265560388565063,
+ 0.2752542495727539,
+ 0.22506311535835266,
+ -0.1784542202949524,
+ -0.26760512590408325,
+ -0.08096370100975037,
+ 0.1909549981355667,
+ 0.17372548580169678,
+ -0.2471482753753662,
+ -0.06348764896392822,
+ 0.24803268909454346,
+ -0.09812331199645996,
+ 0.20414873957633972,
+ -0.1892874836921692,
+ 0.30225783586502075,
+ -0.014253973960876465,
+ 0.18776971101760864,
+ 0.2435523271560669,
+ 0.06700283288955688,
+ -0.23167018592357635,
+ -0.19572114944458008,
+ -0.025794707238674164,
+ -0.21966838836669922,
+ 0.2880256175994873,
+ -0.00537264347076416,
+ 0.2421075701713562,
+ -0.10931462794542313,
+ -0.1595591902732849,
+ 0.15079665184020996,
+ -0.2310352623462677,
+ -0.18699389696121216,
+ 0.24848143756389618,
+ -0.19257375597953796,
+ -0.03421097993850708,
+ -0.25128984451293945,
+ -0.11828507483005524,
+ 0.3054719567298889,
+ 0.11063195765018463,
+ -0.04705919325351715,
+ 0.11928437650203705,
+ -0.15527397394180298,
+ 0.3087315857410431,
+ -0.02662673592567444,
+ 0.13189059495925903,
+ -0.27006739377975464,
+ 0.21996146440505981,
+ 0.1878606528043747,
+ -0.12510961294174194,
+ -0.06623008847236633,
+ -0.20198732614517212,
+ -0.21177932620048523,
+ -0.09900614619255066,
+ -0.12390343099832535,
+ 0.22861087322235107,
+ 0.2473037838935852,
+ 0.18042981624603271,
+ -0.21426445245742798,
+ 0.2533239424228668,
+ 0.27103203535079956,
+ 0.2237611711025238,
+ 0.23163524270057678,
+ 0.21962931752204895,
+ -0.29657793045043945,
+ -0.03565174341201782,
+ -0.2757441997528076,
+ -0.007871925830841064,
+ -0.23790347576141357,
+ 0.2582797408103943,
+ 0.04528532177209854,
+ -0.16734284162521362,
+ -0.1917722225189209,
+ 0.039674341678619385,
+ 0.23557597398757935,
+ 0.1895541250705719,
+ 0.083759605884552,
+ -0.2884114384651184,
+ -0.20537972450256348,
+ 0.19304925203323364,
+ -0.15229180455207825,
+ -0.017101310193538666,
+ 0.15860086679458618,
+ 0.04729987680912018,
+ 0.22096794843673706,
+ -0.1502523422241211,
+ 0.19303087890148163,
+ 0.174028217792511,
+ -0.14920559525489807,
+ 0.2898857593536377,
+ -0.0027961134910583496,
+ 0.16408082842826843,
+ 0.20429223775863647,
+ 0.049164704978466034,
+ -0.2395697832107544,
+ -0.059295520186424255,
+ 0.24898117780685425,
+ 0.11999903619289398,
+ -0.21459272503852844,
+ -0.19155871868133545,
+ 0.25045397877693176,
+ -0.07981281727552414,
+ -0.11237215995788574,
+ -0.20449557900428772,
+ -0.2829444110393524,
+ -0.23297564685344696,
+ 0.13520565629005432,
+ 0.04086296260356903,
+ -0.1500861942768097,
+ -0.2019903063774109,
+ 0.014950722455978394,
+ -0.03064674139022827,
+ 0.23279467225074768,
+ -0.2626033425331116,
+ 0.02984541654586792,
+ 0.30558502674102783,
+ 0.21516239643096924,
+ 0.14703959226608276,
+ -0.011882007122039795,
+ 0.2774437665939331,
+ 0.25457507371902466,
+ -0.16961759328842163,
+ 0.3091096878051758,
+ -0.030038118362426758,
+ 0.23424763977527618,
+ -0.08807134628295898,
+ -0.1994633674621582,
+ 0.012068182229995728,
+ 0.0715487003326416,
+ 0.19735097885131836,
+ -0.10557004809379578,
+ 0.2609325647354126,
+ -0.29532307386398315,
+ -0.05105391889810562,
+ -0.19168215990066528,
+ -0.2914696931838989,
+ -0.11377960443496704,
+ 0.18971657752990723,
+ 0.26725926995277405,
+ 0.3072872459888458,
+ 0.11782288551330566,
+ 0.22441422939300537,
+ -0.02815903350710869,
+ 0.12997300922870636,
+ 0.016797363758087158,
+ -0.2928194999694824,
+ -0.28609561920166016,
+ -0.2961113452911377,
+ -0.19768677651882172,
+ 0.057976484298706055,
+ 0.08968546986579895,
+ -0.28269848227500916,
+ 0.3082413673400879,
+ 0.2654399275779724,
+ 0.25694146752357483,
+ -0.18391233682632446,
+ -0.1255377233028412,
+ 0.15711015462875366,
+ 0.10686273872852325,
+ -0.2871571183204651,
+ 0.29121100902557373,
+ 0.07365107536315918,
+ 0.06663253903388977,
+ -0.2959279716014862,
+ -0.20061251521110535,
+ 0.2696956396102905,
+ -0.15584704279899597,
+ -0.26655682921409607,
+ -0.07203897833824158,
+ 0.07749703526496887,
+ 0.3027721047401428,
+ 0.19445374608039856,
+ 0.06360012292861938,
+ 0.12002786993980408,
+ 0.16995921730995178,
+ -0.20209729671478271,
+ 0.24801212549209595,
+ -0.2720320224761963,
+ 0.1724787950515747,
+ 0.1095302402973175,
+ -0.21710577607154846,
+ -0.27775436639785767,
+ 0.018427610397338867,
+ -0.18005800247192383,
+ 0.07698652148246765,
+ -0.18816977739334106,
+ 0.1460435390472412,
+ -0.23048877716064453,
+ -0.2687230110168457,
+ 0.23502394556999207,
+ -0.2774161100387573,
+ -0.18436051905155182,
+ 0.06121234595775604,
+ -0.03584776818752289,
+ -0.26526129245758057,
+ 0.11249557137489319,
+ -0.2746412456035614,
+ 0.03832101821899414,
+ 0.08804291486740112,
+ 0.17094339430332184,
+ -0.27758997678756714,
+ -0.11173378676176071,
+ -0.26479530334472656,
+ 0.2877677083015442,
+ 0.05145619809627533,
+ 0.15074925124645233,
+ -0.26999664306640625,
+ -0.2801441550254822,
+ -0.29600095748901367,
+ 0.19097664952278137,
+ 0.21099403500556946,
+ 0.1456804871559143,
+ 0.19534701108932495,
+ -0.05805172026157379,
+ -0.011439919471740723,
+ -0.18679314851760864,
+ 0.2203332483768463,
+ -0.25452911853790283,
+ -0.17804086208343506,
+ 0.15068604052066803,
+ -0.2679436206817627,
+ -0.1355595886707306,
+ -0.23047780990600586,
+ 0.12954062223434448,
+ 0.0024159252643585205,
+ 0.16992318630218506,
+ 0.19739621877670288,
+ -0.17735731601715088,
+ -0.10351524502038956,
+ -0.25298774242401123,
+ -0.24428951740264893,
+ 0.19384528696537018,
+ 0.003690183162689209,
+ 0.043338242918252945,
+ -0.2905351221561432,
+ 0.05142528563737869,
+ 0.19073837995529175,
+ 0.24486976861953735,
+ -0.0958804115653038,
+ 0.24485254287719727,
+ -0.22175082564353943,
+ -0.0024302899837493896,
+ 0.30308133363723755,
+ 0.13878530263900757,
+ -0.09568461030721664,
+ 0.23998144268989563,
+ 0.024506449699401855,
+ 0.09513571858406067,
+ -0.03189627826213837,
+ -0.04758620262145996,
+ 0.09151513874530792,
+ -0.2682848870754242,
+ -0.26321566104888916,
+ 0.249114990234375,
+ -0.15180028975009918,
+ 0.25192946195602417,
+ -0.19719958305358887,
+ 0.205684632062912,
+ 0.10769298672676086,
+ -0.17956021428108215,
+ 0.27100077271461487,
+ 0.00510159507393837,
+ 0.22194185853004456,
+ -0.2842054069042206,
+ -0.0030969083309173584,
+ 0.3059482276439667,
+ -0.2845495939254761
+ ],
+ "z": [
+ -0.23968802392482758,
+ 0.18774911761283875,
+ 0.41469138860702515,
+ -0.312965989112854,
+ 0.08103674650192261,
+ 0.039382487535476685,
+ -0.3336455225944519,
+ 0.35876142978668213,
+ -0.5157099366188049,
+ 0.11791706085205078,
+ 0.40880322456359863,
+ -0.1613810658454895,
+ 0.016177833080291748,
+ -0.23383387923240662,
+ -0.4865911900997162,
+ 0.3204563558101654,
+ 0.014307783916592598,
+ -0.5105382204055786,
+ 0.03995436429977417,
+ 0.30907779932022095,
+ -0.2182673215866089,
+ -0.006619155406951904,
+ -0.1791641265153885,
+ 0.4735506474971771,
+ -0.3287245035171509,
+ 0.22014859318733215,
+ -0.3308415412902832,
+ 0.3452731966972351,
+ -0.10494101047515869,
+ -0.12153686583042145,
+ 0.1601114273071289,
+ 0.2476508468389511,
+ 0.2131538689136505,
+ -0.0001499652862548828,
+ 0.4521159827709198,
+ -0.38339197635650635,
+ -0.1686253845691681,
+ 0.1820620447397232,
+ -0.18113496899604797,
+ -0.3445190191268921,
+ -0.3323950171470642,
+ -0.5010328888893127,
+ 0.19685524702072144,
+ -0.45964115858078003,
+ -0.04799330234527588,
+ 0.43881064653396606,
+ -0.01363450288772583,
+ 0.34454700350761414,
+ -0.17464062571525574,
+ -0.06848588585853577,
+ 0.15620940923690796,
+ 0.13313782215118408,
+ -0.14178064465522766,
+ -0.5035527348518372,
+ -0.3326890468597412,
+ -0.07313698530197144,
+ -0.14956066012382507,
+ 0.12564677000045776,
+ 0.36394768953323364,
+ 0.29158815741539,
+ 0.43931907415390015,
+ 0.2098269760608673,
+ -0.37490591406822205,
+ -0.2642124891281128,
+ 0.0742260217666626,
+ -0.07496696710586548,
+ -0.31712162494659424,
+ 0.028648853302001953,
+ -0.2988191843032837,
+ 0.17821715772151947,
+ 0.013787850737571716,
+ 0.317574143409729,
+ -0.38871926069259644,
+ -0.16841760277748108,
+ 0.22894853353500366,
+ 0.371196448802948,
+ -0.2989364564418793,
+ 0.4175918698310852,
+ -0.4019601345062256,
+ -0.030702292919158936,
+ 0.4101080298423767,
+ 0.0034238100051879883,
+ 0.31180211901664734,
+ 0.07613736391067505,
+ 0.46774041652679443,
+ -0.42272961139678955,
+ 0.3045199513435364,
+ -0.33164182305336,
+ -0.21162967383861542,
+ -0.307076632976532,
+ 0.4629228115081787,
+ 0.11960476636886597,
+ -0.06747615337371826,
+ -0.11411786079406738,
+ -0.14563238620758057,
+ 0.2604922652244568,
+ -0.03689628839492798,
+ 0.20314693450927734,
+ -0.1519414633512497,
+ -0.4391588866710663,
+ -0.22191838920116425,
+ -0.07749669253826141,
+ 0.06681975722312927,
+ 0.15592418611049652,
+ 0.07336965203285217,
+ -0.05831027030944824,
+ -0.33511626720428467,
+ 0.18453067541122437,
+ 0.12340936064720154,
+ 0.2433781921863556,
+ 0.32082700729370117,
+ 0.1380314975976944,
+ -0.32734599709510803,
+ -0.08498749136924744,
+ 0.42377957701683044,
+ -0.47824206948280334,
+ 0.10529688000679016,
+ -0.500428318977356,
+ 0.07022726535797119,
+ -0.4483320116996765,
+ 0.23366189002990723,
+ -0.10042855143547058,
+ 0.03307436406612396,
+ 0.1121906042098999,
+ -0.07568696141242981,
+ -0.10859648138284683,
+ -0.21024560928344727,
+ -0.2518720030784607,
+ -0.4890380799770355,
+ 0.4019021987915039,
+ 0.44910287857055664,
+ 0.0018389225006103516,
+ -0.1812502145767212,
+ -0.372540682554245,
+ -0.31655609607696533,
+ -0.38847050070762634,
+ -0.23892289400100708,
+ 0.026277601718902588,
+ -0.5097805261611938,
+ 0.4332309365272522,
+ 0.3649114966392517,
+ -0.1538802981376648,
+ 0.03993295133113861,
+ -0.3549237549304962,
+ -0.4831222593784332,
+ -0.09700793027877808,
+ 0.2877814769744873,
+ 0.39556437730789185,
+ 0.19582165777683258,
+ -0.4941840171813965,
+ -0.2808225750923157,
+ 0.1447584629058838,
+ 0.21063289046287537,
+ -0.2569054365158081,
+ 0.16744932532310486,
+ 0.3010035753250122,
+ 0.2547481656074524,
+ 0.10583209991455078,
+ 0.11837375164031982,
+ -0.3333427309989929,
+ 0.10855967551469803,
+ -0.3917812705039978,
+ -0.33090853691101074,
+ -0.349679172039032,
+ 0.2793380916118622,
+ -0.3994065225124359,
+ -0.05014806240797043,
+ -0.266237735748291,
+ 0.009514451026916504,
+ -0.0669170618057251,
+ 0.39865103363990784,
+ -0.22933229804039001,
+ 0.38464850187301636,
+ -0.25198236107826233,
+ -0.24699033796787262,
+ -0.190471351146698,
+ 0.2445061206817627,
+ -0.1499105989933014,
+ -0.1295316517353058,
+ 0.22606506943702698,
+ -0.0840306431055069,
+ 0.46892601251602173,
+ -0.07063141465187073,
+ 0.4734257757663727,
+ -0.014110252261161804,
+ -0.07518213987350464,
+ 0.13851028680801392,
+ -0.1421394944190979,
+ -0.3379688262939453,
+ -0.5196360349655151,
+ -0.3234916925430298,
+ 0.09593380987644196,
+ 0.4317657947540283,
+ -0.31616511940956116,
+ -0.18115973472595215,
+ -0.2879221737384796,
+ -0.11206277459859848,
+ -0.4359893798828125,
+ 0.3584933280944824,
+ 0.2642935514450073,
+ 0.379810631275177,
+ 0.016467973589897156,
+ -0.0212668776512146,
+ -0.0796123817563057,
+ -0.21021497249603271,
+ 0.3152216076850891,
+ -0.41325581073760986,
+ -0.452440470457077,
+ -0.15491294860839844,
+ 0.13860729336738586,
+ 0.33737510442733765,
+ 0.08230262994766235,
+ 0.4571344256401062,
+ 0.3229431211948395,
+ -0.4207761287689209,
+ -0.06380409002304077,
+ 0.18852919340133667,
+ -0.33333972096443176,
+ -0.24026481807231903,
+ 0.3043912649154663,
+ 0.2605765163898468,
+ -0.3366543650627136,
+ 0.18709999322891235,
+ -0.3413696587085724,
+ 0.22269615530967712,
+ -0.002035662531852722,
+ -0.41188082098960876,
+ 0.0824224054813385,
+ 0.15401038527488708,
+ 0.10367818176746368,
+ -0.44896477460861206,
+ -0.5144109129905701,
+ -0.2757231593132019,
+ 0.3342083692550659,
+ -0.04935675859451294,
+ -0.4259756803512573,
+ 0.12811511754989624,
+ 0.10179482400417328,
+ -0.03554214537143707,
+ -0.16966485977172852,
+ -0.13648036122322083,
+ 0.37736186385154724,
+ 0.1783713400363922,
+ 0.40502989292144775,
+ -0.4597572982311249,
+ -0.14237059652805328,
+ 0.18086427450180054,
+ -0.333757221698761,
+ -0.1289811134338379,
+ 0.01242411881685257,
+ 0.4452495276927948,
+ -0.3356722891330719,
+ -0.24383988976478577,
+ -0.07912614941596985,
+ -0.3301493227481842,
+ -0.013367652893066406,
+ -0.006193697452545166,
+ 0.019129067659378052,
+ 0.0030537843704223633,
+ 0.44964635372161865,
+ 0.0008088350296020508,
+ 0.07809066772460938,
+ -0.3372470736503601,
+ -0.1418381780385971,
+ 0.30773526430130005,
+ 0.46587562561035156,
+ 0.06452435255050659,
+ 0.2905324101448059,
+ -0.16425852477550507,
+ 0.059375956654548645,
+ 0.05264931917190552,
+ 0.36656779050827026,
+ 0.0279831662774086,
+ -0.10707969218492508,
+ -0.05381196737289429,
+ 0.2512306571006775,
+ -0.5207326412200928,
+ 0.41121187806129456,
+ 0.23309893906116486,
+ -0.20607182383537292,
+ 0.04914654791355133,
+ -0.5165351629257202,
+ 0.36159658432006836,
+ 0.4379093050956726,
+ -0.4095120429992676,
+ 0.3728053569793701,
+ -0.38533663749694824,
+ 0.35342898964881897,
+ 0.2737691402435303,
+ 0.20019015669822693,
+ -0.1711837351322174,
+ -0.303985595703125,
+ 0.147795632481575,
+ 0.4464866816997528,
+ 0.1057371273636818,
+ 0.3636402189731598,
+ -0.36669349670410156,
+ -0.11083907634019852,
+ -0.334006667137146,
+ 0.17433446645736694,
+ 0.45794564485549927,
+ -0.3340267241001129,
+ -0.2670553922653198,
+ -0.07530677318572998,
+ -0.44266772270202637,
+ -0.18026039004325867,
+ -0.008938640356063843,
+ -0.40714025497436523,
+ -0.3344007730484009,
+ 0.03274928033351898,
+ -0.19710515439510345,
+ 0.189158096909523,
+ 0.1557280421257019,
+ 0.14189037680625916,
+ -0.378686785697937,
+ -0.4998842179775238,
+ -0.33136290311813354,
+ 0.07123901695013046,
+ -0.2809518575668335,
+ 0.1670762598514557,
+ 0.008748054504394531,
+ 0.06601399183273315,
+ -0.18863940238952637,
+ -0.33525192737579346,
+ 0.12917456030845642,
+ -0.22424247860908508,
+ -0.06550151109695435,
+ 0.2590169906616211,
+ -0.45789068937301636,
+ -0.024586766958236694,
+ 0.40747421979904175,
+ -0.16943693161010742,
+ -0.046815574169158936,
+ -0.12795224785804749,
+ 0.17300283908843994,
+ 0.4766707420349121,
+ 0.26506319642066956,
+ -0.026738256216049194,
+ -0.505590558052063,
+ 0.1748954951763153,
+ 0.17157259583473206,
+ 0.19741305708885193,
+ -0.16402484476566315,
+ 0.4583834409713745,
+ -0.4830654263496399,
+ 0.2525191903114319,
+ -0.5206290483474731,
+ -0.2854301929473877,
+ 0.3714168071746826,
+ -0.3305792808532715,
+ -0.06347858905792236,
+ 0.41808590292930603,
+ 0.07288646697998047,
+ -0.3182130455970764,
+ -0.010757625102996826,
+ 0.05425414443016052,
+ 0.09411308169364929,
+ -0.09937655925750732,
+ -0.21475213766098022,
+ -0.31877946853637695,
+ -0.3904377818107605,
+ 0.3142772912979126,
+ -0.30545008182525635,
+ 0.3935626149177551,
+ 0.11107218265533447,
+ 0.17336955666542053,
+ -0.3337348401546478,
+ 0.4573745131492615,
+ 0.1794644594192505,
+ -0.12654465436935425,
+ -0.05398070067167282,
+ -0.45534345507621765,
+ 0.09308570623397827,
+ 0.4655817151069641,
+ -0.17139983177185059,
+ 0.0930398628115654,
+ 0.014547228813171387,
+ -0.24013228714466095,
+ -0.44295990467071533,
+ 0.27606719732284546,
+ -0.47394028306007385,
+ 0.23426014184951782,
+ -0.0496995747089386,
+ 0.22403556108474731,
+ 0.2098536193370819,
+ -0.25584787130355835,
+ 0.4337116777896881,
+ 0.258579283952713,
+ -0.08443479239940643,
+ 0.2834167778491974,
+ -0.2986310124397278,
+ -0.17627596855163574,
+ -0.312438040971756,
+ -0.20197179913520813,
+ 0.23742681741714478,
+ -0.23508328199386597,
+ -0.48386305570602417,
+ 0.15751048922538757,
+ 0.43158257007598877,
+ 0.27742043137550354,
+ 0.13418042659759521,
+ -0.4690064787864685,
+ -0.274100124835968,
+ -0.3343954086303711,
+ 0.26327916979789734,
+ 0.020558267831802368,
+ -0.03468960523605347,
+ -0.5223339796066284,
+ -0.12837916612625122,
+ 0.054049670696258545,
+ 0.3094409704208374,
+ 0.20640432834625244,
+ -0.041907742619514465,
+ 0.3663448691368103,
+ 0.31445151567459106,
+ -0.24077367782592773,
+ -0.0380607545375824,
+ -0.27541017532348633,
+ -0.12772715091705322,
+ -0.33426010608673096,
+ -0.09915709495544434,
+ 0.47399818897247314,
+ -0.36495524644851685,
+ 0.18729643523693085,
+ 0.08525552600622177,
+ 0.1504669189453125,
+ -0.29797422885894775,
+ -0.5208223462104797,
+ 0.0679636001586914,
+ 0.22133135795593262,
+ -0.12584343552589417,
+ 0.33243197202682495,
+ 0.39706122875213623,
+ -0.3170284032821655,
+ -0.12498557567596436,
+ -0.07204269617795944,
+ -0.4527769982814789,
+ 0.4169312119483948,
+ -0.1981809437274933,
+ -0.031411588191986084,
+ -0.23642870783805847,
+ -0.5005126595497131,
+ 0.42787548899650574,
+ -0.4585944712162018,
+ 0.18248721957206726,
+ -0.10423175990581512,
+ 0.0989333987236023,
+ -0.3901771605014801,
+ 0.3544263541698456,
+ -0.29261675477027893,
+ 0.0611039400100708,
+ 0.39014261960983276,
+ -0.1327035129070282,
+ 0.42770442366600037,
+ 0.24378663301467896,
+ 0.370084673166275,
+ -0.4156365394592285,
+ 0.1607540249824524,
+ -0.4709285795688629,
+ -0.33780428767204285,
+ -0.3308805823326111,
+ -0.12204068899154663,
+ -0.1522335708141327,
+ 0.052921801805496216,
+ -0.1221688985824585,
+ 0.03717479109764099,
+ 0.17096105217933655,
+ -0.30866941809654236,
+ -0.5172204971313477,
+ -0.20127835869789124,
+ -0.36500123143196106,
+ -0.5184460878372192,
+ 0.16817733645439148,
+ -0.14430910348892212,
+ -0.3987223505973816,
+ -0.3019341826438904,
+ -0.10845717787742615,
+ -0.34149518609046936,
+ 0.39990663528442383,
+ 0.014297045767307281,
+ -0.4139092266559601,
+ 0.08592990785837173,
+ 0.4024815559387207,
+ 0.445014089345932,
+ 0.17746460437774658,
+ 0.39672863483428955,
+ -0.08658027648925781,
+ -0.3510302007198334,
+ 0.3222307562828064,
+ 0.17482687532901764,
+ 0.08923125267028809,
+ 0.05326193571090698,
+ -0.2016804814338684,
+ -0.41785383224487305,
+ -0.06637722998857498,
+ -0.33789804577827454,
+ -0.2549809217453003,
+ -0.33120518922805786,
+ 0.24294129014015198,
+ 0.04159180819988251,
+ -0.23588824272155762,
+ -0.022679515182971954,
+ -0.07705184072256088,
+ -0.19494065642356873,
+ -0.096334308385849,
+ 0.35099881887435913,
+ -0.3952872157096863,
+ -0.033278875052928925,
+ -0.019942179322242737,
+ 0.35168373584747314,
+ 0.24614062905311584,
+ 0.2124190330505371,
+ 0.11572146415710449,
+ 0.13234621286392212,
+ 0.11315830796957016,
+ -0.34011733531951904,
+ 0.2565962076187134,
+ -0.33489352464675903,
+ 0.01804959774017334,
+ 0.3635921776294708,
+ 0.18238836526870728,
+ -0.1747226119041443,
+ -0.2045518308877945,
+ 0.13401301205158234,
+ -0.48187875747680664,
+ -0.4341866075992584,
+ 0.19176015257835388,
+ 0.42686426639556885,
+ 0.3814809322357178,
+ -0.18549597263336182,
+ -0.33483433723449707,
+ -0.42720702290534973,
+ 0.016312599182128906,
+ 0.0657224953174591,
+ -0.3128734529018402,
+ -0.33291929960250854,
+ 0.12700378894805908,
+ -0.25769659876823425,
+ 0.24224650859832764,
+ -0.00536993145942688,
+ -0.052499666810035706,
+ 0.08925757557153702,
+ -0.41187232732772827,
+ -0.0942198634147644,
+ 0.19454306364059448,
+ -0.33296024799346924,
+ -0.29363393783569336,
+ 0.29754742980003357,
+ -0.30662664771080017,
+ -0.17753976583480835,
+ -0.0979042649269104,
+ 0.044373348355293274,
+ -0.1621493399143219,
+ -0.24864986538887024,
+ -0.21045219898223877,
+ 0.19888895750045776,
+ 0.192592591047287,
+ -0.44890502095222473,
+ 0.02904433012008667,
+ 0.11321753263473511,
+ 0.11977261304855347,
+ -0.2686418890953064,
+ 0.0732702910900116,
+ 0.3937980532646179,
+ -0.3468835949897766,
+ 0.2032669484615326,
+ -0.18706798553466797,
+ -0.19119150936603546,
+ 0.46363282203674316,
+ -0.503886342048645,
+ 0.11478288471698761,
+ -0.24966716766357422,
+ -0.3343004584312439,
+ -0.008863717317581177,
+ -0.12256261706352234,
+ -0.10784196853637695,
+ 0.3188800811767578,
+ 0.03113025426864624,
+ 0.44010666012763977,
+ 0.20192310214042664,
+ -0.4022253453731537,
+ -0.12247572839260101,
+ 0.0572611540555954,
+ -0.033476054668426514,
+ -0.14853550493717194,
+ 0.19096535444259644,
+ 0.29088661074638367,
+ 0.3916633725166321,
+ -0.49889540672302246,
+ 0.2494593858718872,
+ 0.37487003207206726,
+ 0.09286338090896606,
+ -0.07614000141620636,
+ -0.00981256365776062,
+ -0.26107871532440186,
+ 0.4122817814350128,
+ 0.13860362768173218,
+ -0.08196833729743958,
+ 0.034997761249542236,
+ 0.3424092233181,
+ 0.25466713309288025,
+ 0.0720868706703186,
+ -0.0354086309671402,
+ 0.33184364438056946,
+ 0.11271971464157104,
+ 0.4686965346336365,
+ -0.3807792067527771,
+ 0.4436897039413452,
+ 0.36363181471824646,
+ -0.08174929022789001,
+ 0.39030617475509644,
+ 0.2001991719007492,
+ -0.1309693306684494,
+ 0.013997882604598999,
+ 0.22899401187896729,
+ -0.059935569763183594,
+ -0.274382084608078,
+ -0.3369331359863281,
+ -0.02924920618534088,
+ -0.3971104621887207,
+ -0.0038006603717803955,
+ -0.1710018813610077,
+ -0.24755264818668365,
+ -0.236606627702713,
+ -0.33297663927078247,
+ 0.03178757429122925,
+ -0.5091972351074219,
+ -0.12194366753101349,
+ -0.37446364760398865,
+ 0.442935049533844,
+ 0.3305228352546692,
+ -0.32888272404670715,
+ 0.11197596788406372,
+ -0.33017605543136597,
+ -0.12700921297073364,
+ 0.18495631217956543,
+ -0.3711581230163574,
+ -0.022474750876426697,
+ -0.1286294013261795,
+ -0.146945059299469,
+ -0.15622377395629883,
+ -0.3322184085845947,
+ -0.00624576210975647,
+ -0.06622341275215149,
+ -0.3721855580806732,
+ -0.2745685875415802,
+ -0.030550003051757812,
+ 0.45721742510795593,
+ 0.31653207540512085,
+ -0.3217526972293854,
+ -0.43620142340660095,
+ 0.3193345069885254,
+ 0.058865681290626526,
+ 0.3380732238292694,
+ -0.31144577264785767,
+ 0.03627388924360275,
+ 0.32967618107795715,
+ -0.5198984146118164,
+ 0.46854737401008606,
+ 0.0004953444004058838,
+ -0.14845289289951324,
+ 0.3012385666370392,
+ 0.09422928094863892,
+ -0.48750266432762146,
+ -0.30217063426971436,
+ -0.3290848731994629,
+ 0.14487676322460175,
+ -0.278598815202713,
+ 0.2809465229511261,
+ 0.33108875155448914,
+ -0.172733873128891,
+ 0.45149415731430054,
+ -0.021620213985443115,
+ -0.44322332739830017,
+ 0.4048803448677063,
+ -0.12509167194366455,
+ 0.18500834703445435,
+ -0.4459660053253174,
+ 0.029166176915168762,
+ 0.26005664467811584,
+ 0.47044479846954346,
+ -0.331786572933197,
+ 0.43580004572868347,
+ 0.15542110800743103,
+ -0.0943835973739624,
+ 0.4568023383617401,
+ -0.04547351598739624,
+ -0.3035494387149811,
+ 0.4149838089942932,
+ -0.1564389169216156,
+ -0.3142259120941162,
+ 0.1355087161064148,
+ 0.09147049486637115,
+ -0.16565191745758057,
+ 0.03955182433128357,
+ -0.32584595680236816,
+ -0.18473440408706665,
+ 0.1342601180076599,
+ -0.45203498005867004,
+ 0.18596109747886658,
+ -0.03401841223239899,
+ 0.28038957715034485,
+ -0.41658639907836914,
+ 0.40082094073295593,
+ -0.28531304001808167,
+ -0.08468322455883026,
+ 0.16829967498779297,
+ 0.16339918971061707,
+ -0.23709183931350708,
+ 0.08681276440620422,
+ -0.4894249737262726,
+ -0.2789350748062134,
+ 0.30542057752609253,
+ 0.17914967238903046,
+ -0.127078115940094,
+ 0.45357176661491394,
+ 0.11947564035654068,
+ 0.4661784768104553,
+ -0.20984074473381042,
+ -0.11390136182308197,
+ 0.07747799158096313,
+ 0.3768577575683594,
+ -0.07855713367462158,
+ 0.40342995524406433,
+ 0.2912345230579376,
+ 0.16645407676696777,
+ -0.3338542580604553,
+ -0.06318067014217377,
+ -0.06306770443916321,
+ 0.05333957076072693,
+ 0.15868592262268066,
+ -0.12010173499584198,
+ 0.14444506168365479,
+ -0.36941882967948914,
+ -0.5211145877838135,
+ 0.1715160608291626,
+ 0.038555823266506195,
+ 0.3527696132659912,
+ -0.028307035565376282,
+ 0.09208594262599945,
+ 0.31630563735961914,
+ 0.3761417269706726,
+ 0.11896944046020508,
+ 0.2261347770690918,
+ -0.025637030601501465,
+ -0.3322661519050598,
+ -0.48322272300720215,
+ -0.46848201751708984,
+ -0.33747148513793945,
+ -0.08522197604179382,
+ -0.07042691111564636,
+ 0.2095164805650711,
+ -0.18218570947647095,
+ -0.051445841789245605,
+ -0.02497551590204239,
+ 0.4224950671195984,
+ 0.47216206789016724,
+ -0.470986008644104,
+ -0.4688984751701355,
+ 0.10674035549163818,
+ -0.1447974443435669,
+ -0.013520210981369019,
+ 0.27200937271118164,
+ -0.03815491497516632,
+ 0.0857388973236084,
+ 0.17118766903877258,
+ -0.3074935972690582,
+ -0.5144286751747131,
+ -0.3099325895309448,
+ 0.2756209969520569,
+ 0.13720448315143585,
+ 0.3428851068019867,
+ -0.32918721437454224,
+ -0.21522974967956543,
+ 0.33731263875961304,
+ -0.33314740657806396,
+ 0.2583618760108948,
+ -0.33230990171432495,
+ 0.3413841128349304,
+ 0.19043627381324768,
+ -0.33392560482025146,
+ -0.48528963327407837,
+ -0.3626306354999542,
+ 0.10701481997966766,
+ 0.17094731330871582,
+ 0.3450131118297577,
+ -0.08913484215736389,
+ 0.23365728557109833,
+ 0.06340640783309937,
+ -0.20949137210845947,
+ 0.0743250846862793,
+ -0.03557711839675903,
+ -0.20888352394104004,
+ -0.3798873722553253,
+ 0.22551089525222778,
+ 0.42495590448379517,
+ 0.07577399909496307,
+ -0.4545392096042633,
+ 0.1937301754951477,
+ -0.19244831800460815,
+ -0.30769842863082886,
+ -0.21532753109931946,
+ 0.18946459889411926,
+ -0.2842407524585724,
+ -0.002202630043029785,
+ 0.4592912495136261,
+ -0.16537462174892426,
+ 0.20139214396476746,
+ -0.27751001715660095,
+ 0.08958128094673157,
+ 0.18635940551757812,
+ 0.4188065528869629,
+ 0.053959041833877563,
+ -0.3326677083969116,
+ 0.19783169031143188,
+ -0.2290460765361786,
+ -0.07888692617416382,
+ -0.1697533130645752,
+ 0.31753039360046387,
+ 0.3739475607872009,
+ 0.16550639271736145,
+ 0.05833214521408081,
+ -0.33410704135894775,
+ 0.47225844860076904,
+ 0.23516297340393066,
+ -0.006798505783081055,
+ -0.3445621728897095,
+ -0.4047900140285492,
+ 0.2999354600906372,
+ -0.19248464703559875,
+ -0.4534350633621216,
+ 0.17813676595687866,
+ -0.21489618718624115,
+ 0.13650190830230713,
+ -0.4680095613002777,
+ -0.15077167749404907,
+ -0.4910389184951782,
+ -0.09264034032821655,
+ 0.43132999539375305,
+ -0.022919923067092896,
+ 0.17541585862636566,
+ 0.026186227798461914,
+ 0.1347195953130722,
+ -0.021475419402122498,
+ 0.25835832953453064,
+ 0.43970832228660583,
+ -0.5216690897941589,
+ -0.42249417304992676,
+ -0.007321834564208984,
+ 0.004898980259895325,
+ -0.4391400218009949,
+ -0.10189241170883179,
+ 0.1832018494606018,
+ 0.22558480501174927,
+ 0.07450616359710693,
+ 0.4235185980796814,
+ 0.44314536452293396,
+ -0.13500088453292847,
+ 0.4304916262626648,
+ 0.4538538455963135,
+ -0.13422107696533203,
+ 0.26549962162971497,
+ 0.4627936780452728,
+ -0.5118041038513184,
+ 0.062068402767181396,
+ -0.2179628163576126,
+ -0.07828938961029053,
+ -0.2835724651813507,
+ 0.273320734500885,
+ 0.0076684653759002686,
+ 0.28367531299591064,
+ -0.11181241273880005,
+ 0.22231952846050262,
+ -0.3361990451812744,
+ -0.4929783344268799,
+ 0.23420663177967072,
+ 0.4194026291370392,
+ -0.4551498591899872,
+ 0.33852794766426086,
+ 0.09306614100933075,
+ -0.37449193000793457,
+ -0.13781413435935974,
+ -0.035146892070770264,
+ 0.23535853624343872,
+ 0.3822714686393738,
+ 0.2880029082298279,
+ 0.008747026324272156,
+ -0.36389660835266113,
+ -0.4967360496520996,
+ -0.3725943863391876,
+ 0.20849651098251343,
+ -0.41611066460609436,
+ 0.05413447320461273,
+ 0.03630882501602173,
+ 0.35989317297935486,
+ -0.04133516550064087,
+ -0.26177355647087097,
+ 0.18466907739639282,
+ -0.0709357038140297,
+ -0.16255107522010803,
+ -0.2861430048942566,
+ -0.2073814570903778,
+ -0.08371566981077194,
+ -0.33300113677978516,
+ -0.17579984664916992,
+ -0.1930771768093109,
+ 0.05474540591239929,
+ 0.3995290696620941,
+ -0.08022844791412354,
+ 0.18891751766204834,
+ -0.2864895761013031,
+ -0.4157449007034302,
+ 0.13196556270122528,
+ 0.07676398754119873,
+ 0.4508501887321472,
+ 0.2281157523393631,
+ -0.0068961381912231445,
+ -0.14925971627235413,
+ 0.3508809804916382,
+ -0.22295448184013367,
+ -0.33507809042930603,
+ 0.4761132299900055,
+ 0.11863434314727783,
+ -0.11992068588733673,
+ -0.00045609474182128906,
+ 0.3536073565483093,
+ 0.45833083987236023,
+ -0.3311517834663391,
+ -0.009266912937164307,
+ -0.11214405298233032,
+ 0.35184890031814575,
+ 0.1994069516658783,
+ -0.09493124485015869,
+ 0.3336065411567688,
+ -0.046481192111968994,
+ 0.29439571499824524,
+ -0.2248319685459137,
+ -0.07027362287044525,
+ -0.0670708417892456,
+ 0.37547415494918823,
+ 0.009114086627960205,
+ -0.37830543518066406,
+ -0.2052408754825592,
+ 0.09870743006467819,
+ -0.17734891176223755,
+ -0.11146724224090576,
+ 0.2534242570400238,
+ 0.1185801550745964,
+ -0.03303883969783783,
+ 0.2623280882835388,
+ 0.11966586858034134,
+ -0.10077866911888123,
+ 0.1890747845172882,
+ 0.18925762176513672,
+ 0.041171252727508545,
+ 0.014767393469810486,
+ 0.4484618902206421,
+ -0.3152252733707428,
+ -0.13657918572425842,
+ 0.1129436194896698,
+ -0.4272633194923401,
+ -0.49657824635505676,
+ 0.29104042053222656,
+ -0.3350798785686493,
+ -0.10313249379396439,
+ 0.1428779661655426,
+ -0.44182372093200684,
+ 0.2858148217201233,
+ 0.31108924746513367,
+ -0.36025363206863403,
+ 0.17643359303474426,
+ 0.28022074699401855,
+ -0.14681017398834229,
+ 0.205549418926239,
+ -0.5181946158409119,
+ 0.1476302444934845,
+ 0.07862712442874908,
+ -0.04408940672874451,
+ -0.34967631101608276,
+ -0.3533633351325989,
+ -0.2399563491344452,
+ -0.5092430114746094,
+ -0.04585063457489014,
+ 0.38751518726348877,
+ -0.3314080536365509,
+ -0.06822559237480164,
+ 0.09828770160675049,
+ -0.33696308732032776,
+ -0.2632388472557068,
+ -0.3347073793411255,
+ -0.42518720030784607,
+ -0.22183270752429962,
+ -0.03006643056869507,
+ -0.3023911714553833,
+ 0.11722493171691895,
+ -0.13776543736457825,
+ -0.2216196060180664,
+ -0.15607687830924988,
+ 0.26821577548980713,
+ 0.23648275434970856,
+ -0.117367684841156,
+ -0.32843518257141113,
+ 0.08848166465759277,
+ 0.44905054569244385,
+ 0.12029868364334106,
+ 0.23320743441581726,
+ -0.3081967830657959,
+ -0.2724238634109497,
+ 0.03379065915942192,
+ -0.09162750840187073,
+ 0.4454326331615448,
+ 0.14083445072174072,
+ 0.22309307754039764,
+ -0.39077404141426086,
+ 0.2940908372402191,
+ 0.03547593951225281,
+ 0.275479793548584,
+ 0.21976390480995178,
+ 0.3736359477043152,
+ -0.02726203203201294,
+ 0.33584627509117126,
+ -0.3332189917564392,
+ 0.38512665033340454,
+ 0.34011310338974,
+ 0.14334307610988617,
+ -0.10627198219299316,
+ -0.11371058225631714,
+ 0.016418516635894775,
+ 0.021041810512542725,
+ 0.027211129665374756,
+ -0.40124282240867615,
+ 0.4288775324821472,
+ 0.07000714540481567,
+ 0.2859243154525757,
+ -0.5018738508224487,
+ 0.12282669544219971,
+ 0.20222771167755127,
+ -0.07083450257778168,
+ -0.43168917298316956,
+ 0.016718149185180664,
+ -0.28847742080688477,
+ -0.5002574920654297,
+ 0.15072283148765564,
+ 0.06325772404670715,
+ 0.17455554008483887,
+ 0.3630443811416626,
+ 0.4248467981815338,
+ -0.1109069287776947,
+ -0.29700767993927,
+ -0.42617276310920715,
+ -0.15614259243011475,
+ -0.013060331344604492,
+ 0.30735892057418823,
+ -0.13437959551811218,
+ 0.23272672295570374,
+ 0.4363946318626404,
+ 0.11075076460838318,
+ 0.23956245183944702,
+ 0.22827579081058502,
+ 0.47370445728302,
+ 0.00589260458946228,
+ 0.14988398551940918,
+ -0.33253610134124756,
+ 0.3204008936882019,
+ -0.14563709497451782,
+ -0.3816302716732025,
+ -0.16831952333450317,
+ -0.2037602812051773,
+ -0.5035852789878845,
+ 0.22069717943668365,
+ 0.03821108490228653,
+ -0.34833410382270813,
+ -0.27797186374664307,
+ 0.28035968542099,
+ -0.15792438387870789,
+ 0.048783183097839355,
+ -0.4085018038749695,
+ 0.14669859409332275,
+ 0.2444220781326294,
+ -0.2694929540157318,
+ -0.4058690071105957,
+ -0.35497912764549255,
+ -0.2632915675640106,
+ 0.40427082777023315,
+ -0.14587891101837158,
+ 0.3859068751335144,
+ 0.29253894090652466,
+ -0.38907575607299805,
+ 0.14495301246643066,
+ -0.2764502763748169,
+ -0.43944039940834045,
+ -0.5202791094779968,
+ 0.2372877150774002,
+ 0.14448830485343933,
+ 0.12936770915985107,
+ -0.24018877744674683,
+ -0.18432903289794922,
+ 0.09077495336532593,
+ -0.5195516347885132,
+ -0.34125179052352905,
+ -0.1962299644947052,
+ 0.18396419286727905,
+ -0.3071514964103699,
+ -0.0687917172908783,
+ -0.3313676118850708,
+ 0.04223521053791046,
+ -0.5184478759765625,
+ 0.04174858331680298,
+ 0.01917549967765808,
+ -0.159455344080925,
+ -0.19222863018512726,
+ 0.09985664486885071,
+ -0.5153117775917053,
+ 0.33757251501083374,
+ 0.31279316544532776,
+ 0.03472107648849487,
+ -0.4924949109554291,
+ 0.3823598623275757,
+ 0.36457863450050354,
+ -0.01574382185935974,
+ -0.18273767828941345,
+ -0.13250601291656494,
+ 0.10112163424491882,
+ 0.28925955295562744,
+ 0.4396314024925232,
+ -0.051582470536231995,
+ -0.3228667676448822,
+ 0.15359807014465332,
+ -0.3899476230144501,
+ -0.2739304304122925,
+ -0.3359139859676361,
+ -0.411544531583786,
+ -0.5035403966903687,
+ 0.18283262848854065,
+ -0.3238791525363922,
+ 0.310138076543808,
+ -0.14681434631347656,
+ -0.3339155912399292,
+ -0.518953800201416,
+ 0.3837016820907593,
+ 0.19770051538944244,
+ -0.2488187551498413,
+ -0.04310891032218933,
+ -0.22194039821624756,
+ 0.06404896080493927,
+ -0.06579083204269409,
+ 0.08356925845146179,
+ 0.3941369354724884,
+ -0.11814379692077637,
+ -0.30946341156959534,
+ 0.1457376480102539,
+ -0.05333749204874039,
+ -0.5075362920761108,
+ -0.3808939456939697,
+ -0.1386275589466095,
+ 0.011931642889976501,
+ 0.18793144822120667,
+ -0.2467201054096222,
+ 0.4262987971305847,
+ 0.17863328754901886,
+ 0.3863905966281891,
+ 0.416370689868927,
+ 0.061683185398578644,
+ -0.33066368103027344,
+ 0.36848536133766174,
+ -0.25945430994033813,
+ 0.19408251345157623,
+ -0.31733769178390503,
+ -0.20792806148529053,
+ 0.019886165857315063,
+ -0.3289155662059784,
+ -0.008630290627479553,
+ 0.14141148328781128,
+ -0.11717209964990616,
+ 0.1430063545703888,
+ -0.4963109791278839,
+ 0.05498707294464111,
+ 0.41215187311172485,
+ 0.23798340559005737,
+ -0.2400079071521759,
+ -0.0011859685182571411,
+ -0.01298367977142334,
+ -0.1876874566078186,
+ -0.4241652190685272,
+ -0.2310541868209839,
+ -0.12351292371749878,
+ -0.32303136587142944,
+ 0.4022168517112732,
+ -0.11524677276611328,
+ -0.45848268270492554,
+ -0.5213119983673096,
+ -0.07276549190282822,
+ -0.5107749104499817,
+ 0.2930726110935211,
+ 0.46894317865371704,
+ -0.06539919972419739,
+ -0.33553504943847656,
+ -0.26643311977386475,
+ -0.3271256685256958,
+ -0.07353474199771881,
+ 0.15992145240306854,
+ 0.18732482194900513,
+ 0.12335312366485596,
+ -0.32991039752960205,
+ 0.10111862421035767,
+ 0.2950323820114136,
+ 0.2928723394870758,
+ 0.19218115508556366,
+ 0.02693885564804077,
+ -0.3187783360481262,
+ 0.15054351091384888,
+ 0.4007870554924011,
+ -0.3505983352661133,
+ -0.3682215213775635,
+ -0.44738107919692993,
+ -0.13110288977622986,
+ -0.48266375064849854,
+ -0.06304363906383514,
+ 0.05926685035228729,
+ -0.42845097184181213,
+ -0.31941932439804077,
+ -0.4165486991405487,
+ -0.01645168662071228,
+ 0.3111717700958252,
+ 0.14864832162857056,
+ 0.32192277908325195,
+ 0.10959425568580627,
+ -0.1788163185119629,
+ -0.1663977950811386,
+ -0.0849611908197403,
+ -0.3336383104324341,
+ 0.4517818093299866,
+ 0.1904986947774887,
+ -0.28442060947418213,
+ 0.12621481716632843,
+ 0.11503946781158447,
+ -0.4763792157173157,
+ -0.2043760120868683,
+ 0.22807171940803528,
+ -0.3906693756580353,
+ 0.40188315510749817,
+ 0.4332806468009949,
+ -0.3108764886856079,
+ 0.26201653480529785,
+ -0.2856748402118683,
+ -0.37554696202278137,
+ 0.4727369546890259,
+ 0.22772054374217987,
+ -0.11169517040252686,
+ -0.4797951579093933,
+ 0.24060243368148804,
+ 0.022507548332214355,
+ -0.35963553190231323,
+ -0.4397321343421936,
+ -0.5125434994697571,
+ 0.1699240803718567,
+ 0.24790965020656586,
+ 0.07514898478984833,
+ 0.16552817821502686,
+ -0.3212558627128601,
+ 0.17641110718250275,
+ -0.23276720941066742,
+ -0.1571153998374939,
+ 0.27357709407806396,
+ 0.1445501148700714,
+ 0.16126558184623718,
+ -0.09089824557304382,
+ 0.4552079439163208,
+ 0.414374440908432,
+ 0.3201134502887726,
+ 0.01607036590576172,
+ 0.2329304814338684,
+ 0.12512701749801636,
+ 0.05046096444129944,
+ -0.1417720913887024,
+ -0.17643198370933533,
+ -0.3114859461784363,
+ -0.3195205628871918,
+ 0.1845976710319519,
+ 0.2747047543525696,
+ -0.1518980860710144,
+ 0.35175785422325134,
+ -0.2423669844865799,
+ -0.017285704612731934,
+ -0.5040134191513062,
+ -0.25860950350761414,
+ 0.3957284092903137,
+ -0.05385884642601013,
+ -0.002624303102493286,
+ 0.3194894790649414,
+ 0.17741775512695312,
+ 0.2521803379058838,
+ -0.05693817138671875,
+ 0.2269705832004547,
+ -0.18937358260154724,
+ -0.4043560028076172,
+ -0.3325755000114441,
+ -0.32984691858291626,
+ 0.19142234325408936,
+ -0.033244818449020386,
+ -0.369681179523468,
+ 0.4467378854751587,
+ -0.34439989924430847,
+ -0.05852174758911133,
+ 0.46963000297546387,
+ 0.03911656141281128,
+ -0.034605979919433594,
+ -0.5191327929496765,
+ 0.14257940649986267,
+ 0.39096346497535706,
+ 0.04838082194328308,
+ -0.25835147500038147,
+ 0.18869251012802124,
+ 0.3600078225135803,
+ 0.11094215512275696,
+ 0.4594777822494507,
+ -0.27931487560272217,
+ -0.15761923789978027,
+ -0.02755124866962433,
+ 0.0722162127494812,
+ 0.24359937012195587,
+ -0.2724798917770386,
+ -0.4729883074760437,
+ -0.004238396883010864,
+ -0.10410039126873016,
+ -0.513230562210083,
+ 0.02317678928375244,
+ -0.4544645845890045,
+ -0.10259044170379639,
+ -0.4498260021209717,
+ 0.22560347616672516,
+ 0.2243955433368683,
+ 0.38181543350219727,
+ 0.205952987074852,
+ -0.1211208701133728,
+ 0.2571120262145996,
+ -0.06694793701171875,
+ 0.14241743087768555,
+ -0.33302438259124756,
+ -0.336642861366272,
+ -0.14280739426612854,
+ -0.19358745217323303,
+ 0.10586278140544891,
+ 0.24102115631103516,
+ -0.4325827360153198,
+ -0.14861804246902466,
+ 0.10287690162658691,
+ 0.4063490629196167,
+ -0.2599003314971924,
+ 0.14890539646148682,
+ -0.17722678184509277,
+ 0.25333431363105774,
+ 0.1816846877336502,
+ -0.3574453592300415,
+ 0.1824272871017456,
+ -0.10772973299026489,
+ -0.24730199575424194,
+ -0.19169366359710693,
+ -0.32966962456703186,
+ 0.0005868375301361084,
+ -0.032933443784713745,
+ -0.03586865961551666,
+ -0.5009465217590332,
+ -0.04549308866262436,
+ -0.3026374280452728,
+ -0.09843111038208008,
+ 0.28281933069229126,
+ -0.4184409976005554,
+ 0.41120487451553345,
+ -0.13811922073364258,
+ -0.3413107395172119,
+ -0.448234498500824,
+ -0.09372923523187637,
+ 0.07079917192459106,
+ 0.023112157359719276,
+ 0.3254384994506836,
+ 0.29159706830978394,
+ -0.18791639804840088,
+ -0.33706825971603394,
+ -0.3711102306842804,
+ 0.041873931884765625,
+ 0.46593397855758667,
+ 0.03262152522802353,
+ 0.137595534324646,
+ -0.26102739572525024,
+ -0.2657856345176697,
+ -0.5166407227516174,
+ 0.19590270519256592,
+ 0.3467255234718323,
+ -0.04881744086742401,
+ 0.029322952032089233,
+ -0.05066961050033569,
+ 0.23020297288894653,
+ -0.4786541163921356,
+ 0.3757795989513397,
+ 0.37815767526626587,
+ -0.33471739292144775,
+ -0.1085892915725708,
+ 0.012432761490345001,
+ -0.4747936725616455,
+ -0.32702353596687317,
+ -0.23379984498023987,
+ 0.1591004729270935,
+ 0.06250900775194168,
+ -0.1994514763355255,
+ 0.08270710706710815,
+ 0.0838095098733902,
+ 0.14708365499973297,
+ -0.14852982759475708,
+ -0.41873255372047424,
+ 0.4112865626811981,
+ 0.035908520221710205,
+ -0.1884399652481079,
+ -0.2067449986934662,
+ 0.2836390733718872,
+ -0.4480528235435486,
+ -0.23966580629348755,
+ -0.264323890209198,
+ 0.29233965277671814,
+ -0.0784977376461029,
+ -0.1558658480644226,
+ -0.041975028812885284,
+ -0.2957381010055542,
+ -0.4168756306171417,
+ 0.4568290114402771,
+ 0.43926385045051575,
+ 0.27895882725715637,
+ 0.30283528566360474,
+ -0.3812404274940491,
+ 0.025536663830280304,
+ -0.3328894376754761,
+ -0.474164754152298,
+ -0.025950197130441666,
+ -0.4187231659889221,
+ 0.240043506026268,
+ -0.09306293725967407,
+ 0.07693655788898468,
+ -0.22935879230499268,
+ 0.02581334114074707,
+ -0.12689322233200073,
+ 0.42474332451820374,
+ -0.07025137543678284,
+ 0.23413193225860596,
+ -0.33458179235458374,
+ -0.4014959931373596,
+ -0.39855408668518066,
+ 0.46679043769836426,
+ 0.114875927567482,
+ -0.15637953579425812,
+ 0.4316869378089905,
+ 0.1806189864873886,
+ -0.1738283932209015,
+ 0.46214795112609863,
+ -0.5021302700042725,
+ 0.4382416009902954,
+ 0.3590153455734253,
+ 0.42566728591918945,
+ 0.03653629124164581,
+ 0.36804449558258057,
+ -0.14413493871688843,
+ -0.4304966330528259,
+ 0.3677215576171875,
+ 0.031558215618133545,
+ 0.08219432830810547,
+ -0.5205961465835571,
+ -0.2488655149936676,
+ 0.13008233904838562,
+ -0.45594096183776855,
+ -0.39649438858032227,
+ 0.2816200852394104,
+ -0.12275217473506927,
+ -0.3286927342414856,
+ -0.3380511701107025,
+ -0.21539756655693054,
+ -0.49611395597457886,
+ 0.41618603467941284,
+ -0.35413360595703125,
+ -0.11946436762809753,
+ -0.4110030233860016,
+ 0.10425157845020294,
+ -0.3647546172142029,
+ -0.44118842482566833,
+ -0.15096354484558105,
+ -0.3956078886985779,
+ 0.34216344356536865,
+ -0.5000028610229492,
+ 0.21675920486450195,
+ -0.1754857897758484,
+ 0.42506879568099976,
+ 0.33741700649261475,
+ 0.23910990357398987,
+ 0.40494677424430847,
+ -0.1100662350654602,
+ -0.2853299081325531,
+ 0.31618088483810425,
+ -0.051259271800518036,
+ 0.36731868982315063,
+ 0.43058863282203674,
+ 0.16036590933799744,
+ -0.3354017436504364,
+ -0.33256250619888306,
+ -0.041119933128356934,
+ 0.1814582198858261,
+ -0.520182192325592,
+ 0.106406569480896,
+ -0.2078365534543991,
+ 0.04219585657119751,
+ -0.3323047161102295,
+ 0.14719107747077942,
+ 0.057547032833099365,
+ 0.15045660734176636,
+ 0.2934328317642212,
+ -0.027767211198806763,
+ -0.052419111132621765,
+ -0.5210908055305481,
+ -0.5085413455963135,
+ 0.26937416195869446,
+ 0.06674864888191223,
+ -0.3193656802177429,
+ -0.14505276083946228,
+ -0.4755520820617676,
+ 0.014875352382659912,
+ -0.33724385499954224,
+ 0.33515092730522156,
+ -0.20119351148605347,
+ -0.24005305767059326,
+ -0.30243510007858276,
+ 0.07269397377967834,
+ -0.17014405131340027,
+ 0.31305959820747375,
+ 0.3438705801963806,
+ -0.16270726919174194,
+ -0.36506739258766174,
+ -0.33199772238731384,
+ -0.0021041929721832275,
+ 0.014319658279418945,
+ -0.4793131947517395,
+ 0.4704885482788086,
+ 0.1617422103881836,
+ 0.3299773037433624,
+ -0.17539945244789124,
+ -0.46373608708381653,
+ 0.3949231505393982,
+ 0.46560370922088623,
+ -0.31294381618499756,
+ 0.16185301542282104,
+ -0.10082626342773438,
+ 0.0220167338848114,
+ 0.10221230983734131,
+ 0.14749741554260254,
+ 0.38133591413497925,
+ -0.4781340956687927,
+ -0.03513228893280029,
+ 0.24677607417106628,
+ 0.12614282965660095,
+ -0.27796393632888794,
+ -0.20244169235229492,
+ 0.18466195464134216,
+ 0.4761599004268646,
+ 0.4184098541736603,
+ 0.18829452991485596,
+ -0.36312615871429443,
+ 0.2899688482284546,
+ 0.13891740143299103,
+ -0.3722238540649414,
+ -0.009079664945602417,
+ 0.47380802035331726,
+ -0.12502335011959076,
+ -0.30089929699897766,
+ -0.28178346157073975,
+ 0.19514578580856323,
+ -0.31691664457321167,
+ -0.46769076585769653,
+ 0.44578418135643005,
+ -0.17456576228141785,
+ -0.10424411296844482,
+ -0.012990444898605347,
+ -0.06724423915147781,
+ -0.3865179717540741,
+ 0.09346291422843933,
+ -0.319779634475708,
+ 0.19601979851722717,
+ -0.09817799925804138,
+ -0.050591111183166504,
+ -0.31969040632247925,
+ 0.09766142070293427,
+ -0.4617524743080139,
+ -0.4102926254272461,
+ 0.17082762718200684,
+ -0.04482269287109375,
+ 0.06614053249359131,
+ 0.32431095838546753,
+ 0.4173813462257385,
+ -0.15755856037139893,
+ 0.40981149673461914,
+ 0.16260996460914612,
+ -0.1894485056400299,
+ -0.4704657793045044,
+ 0.41986584663391113,
+ 0.34389299154281616,
+ 0.20705290138721466,
+ 0.3763386607170105,
+ 0.4255407750606537,
+ 0.056506186723709106,
+ -0.30173754692077637,
+ 0.1066167950630188,
+ 0.4446941316127777,
+ 0.06160089373588562,
+ -0.33330589532852173,
+ -0.1467357873916626,
+ 0.13613104820251465,
+ 0.17218542098999023,
+ -0.1721518337726593,
+ -0.3338738679885864,
+ 0.39952290058135986,
+ -0.48780402541160583,
+ 0.10910904407501221,
+ 0.010982885956764221,
+ -0.07782608270645142,
+ -0.1528148651123047,
+ 0.1017514169216156,
+ 0.46184617280960083,
+ -0.4715701937675476,
+ 0.012672066688537598,
+ -0.10826921463012695,
+ -0.06311199069023132,
+ 0.23887516558170319,
+ 0.2462720274925232,
+ -0.01116606593132019,
+ -0.3370290994644165,
+ 0.026582956314086914,
+ 0.063558429479599,
+ -0.36237937211990356,
+ -0.46972909569740295,
+ -0.264542818069458,
+ 0.11889742314815521,
+ 0.029382191598415375,
+ 0.2348649799823761,
+ -0.1743861585855484,
+ -0.28740227222442627,
+ 0.278531551361084,
+ 0.21789467334747314,
+ 0.19147038459777832,
+ 0.235501229763031,
+ -0.16449910402297974,
+ -0.20686659216880798,
+ 0.0850893035531044,
+ 0.41432639956474304,
+ -0.017966747283935547,
+ -0.061877429485321045,
+ 0.05509161949157715,
+ -0.22586864233016968,
+ -0.36777591705322266,
+ 0.42460405826568604,
+ -0.3990573287010193,
+ -0.1146538108587265,
+ -0.11667215824127197,
+ 0.3797399401664734,
+ 0.19324815273284912,
+ -0.2790859043598175,
+ -0.23575559258460999,
+ 0.3538326621055603,
+ 0.3848886489868164,
+ -0.1925331950187683,
+ 0.47552719712257385,
+ 0.18958690762519836,
+ -0.21093010902404785,
+ -0.003735959529876709,
+ -0.24895747005939484,
+ -0.41956570744514465,
+ 0.2479119896888733,
+ -0.17603206634521484,
+ 0.17473123967647552,
+ -0.19705624878406525,
+ -0.5131977796554565,
+ -0.3425915241241455,
+ 0.11524617671966553,
+ 0.18647250533103943,
+ -0.44863930344581604,
+ 0.19182515144348145,
+ 0.18105322122573853,
+ -0.16641117632389069,
+ -0.4710707664489746,
+ -0.1266668140888214,
+ -0.12131714820861816,
+ 0.2795834541320801,
+ 0.23985974490642548,
+ 0.35646599531173706,
+ 0.1880752444267273,
+ -0.5182058811187744,
+ 0.046237021684646606,
+ 0.05104905366897583,
+ 0.27449601888656616,
+ -0.42606469988822937,
+ -0.10554614663124084,
+ -0.33475691080093384,
+ 0.08868008852005005,
+ 0.03550577163696289,
+ -0.29195258021354675,
+ -0.051134154200553894,
+ 0.3367684483528137,
+ -0.47208914160728455,
+ -0.28138160705566406,
+ 0.07690875232219696,
+ 0.4021752178668976,
+ 0.09629005193710327,
+ -0.243864968419075,
+ 0.36924728751182556,
+ -0.05799448490142822,
+ -0.1810082048177719,
+ -0.24855029582977295,
+ -0.12837371230125427,
+ 0.35164526104927063,
+ -0.01714533567428589,
+ 0.47611290216445923,
+ 0.11545455455780029,
+ 0.16710788011550903,
+ -0.3370325565338135,
+ -0.13253241777420044,
+ 0.15229594707489014,
+ 0.306446373462677,
+ 0.056734830141067505,
+ 0.061723679304122925,
+ -0.1626957654953003,
+ -0.05236528813838959,
+ 0.1984175443649292,
+ -0.241831436753273,
+ -0.16862797737121582,
+ 0.4159761667251587,
+ -0.3372243046760559,
+ -0.23766177892684937,
+ -0.23704110085964203,
+ -0.09311097860336304,
+ -0.09480232745409012,
+ 0.16542282700538635,
+ 0.4392034411430359,
+ -0.3371626138687134,
+ 0.18199819326400757,
+ -0.33885324001312256,
+ 0.46254080533981323,
+ -0.03638140857219696,
+ 0.015016868710517883,
+ 0.44676122069358826,
+ 0.435538113117218,
+ -0.03947429358959198,
+ -0.32819294929504395,
+ -0.10157011449337006,
+ -0.27884694933891296,
+ 0.38557422161102295,
+ -0.07822439819574356,
+ -0.4803844690322876,
+ -0.09512948989868164,
+ 0.0014247708022594452,
+ -0.11794087290763855,
+ -0.0938226580619812,
+ 0.26483017206192017,
+ -0.17312473058700562,
+ -0.12607020139694214,
+ -0.3524637222290039,
+ -0.21178768575191498,
+ -0.030431777238845825,
+ 0.47166410088539124,
+ -0.12606698274612427,
+ -0.3828408718109131,
+ -0.011402487754821777,
+ 0.21507027745246887,
+ -0.17560315132141113,
+ 0.0034070611000061035,
+ 0.4748570919036865,
+ 0.24067829549312592,
+ -0.10142332315444946,
+ 0.3544142544269562,
+ -0.208137646317482,
+ -0.2969003915786743,
+ 0.4495059847831726,
+ -0.3869253098964691,
+ -0.29128605127334595,
+ 0.3066278398036957,
+ 0.1922004520893097,
+ -0.2819114327430725,
+ -0.31293731927871704,
+ -0.19200003147125244,
+ -0.3224720358848572,
+ -0.32925093173980713,
+ -0.025175750255584717,
+ 0.1991978883743286,
+ 0.22850221395492554,
+ -0.19583269953727722,
+ -0.45119547843933105,
+ 0.36702582240104675,
+ -0.33238670229911804,
+ 0.35643938183784485,
+ 0.09401211142539978,
+ 0.043449562042951584,
+ -0.3312070667743683,
+ 0.46616798639297485,
+ 0.0642109140753746,
+ 0.041529178619384766,
+ -0.012845993041992188,
+ -0.32958492636680603,
+ 0.320054292678833,
+ -0.08113188296556473,
+ 0.377920925617218,
+ -0.18711161613464355,
+ 0.08563363552093506,
+ -0.10407447814941406,
+ -0.3346400558948517,
+ -0.18581387400627136,
+ 0.013581126928329468,
+ -0.37206417322158813,
+ -0.2899797260761261,
+ 0.1879265308380127,
+ -0.25566157698631287,
+ 0.3326411843299866,
+ -0.33312779664993286,
+ 0.30739825963974,
+ -0.3133789896965027,
+ -0.12370729446411133,
+ -0.04049065709114075,
+ 0.05323491990566254,
+ -0.09170690178871155,
+ -0.3371933698654175,
+ 0.00494404137134552,
+ 0.31149736046791077,
+ 0.1480773389339447,
+ -0.09010946750640869,
+ 0.3324021100997925,
+ 0.3356506824493408,
+ 0.1865098774433136,
+ 0.2370595932006836,
+ -0.10108736157417297,
+ -0.1389942765235901,
+ -0.3163379728794098,
+ -0.42088553309440613,
+ 0.2025488018989563,
+ 0.45752212405204773,
+ -0.23103266954421997,
+ 0.16552680730819702,
+ 0.435402512550354,
+ 0.3780701756477356,
+ -0.32478952407836914,
+ -0.116495281457901,
+ 0.11180157214403152,
+ -0.3986685872077942,
+ 0.30642300844192505,
+ 0.13475584983825684,
+ -0.02797083929181099,
+ -0.1316475123167038,
+ -0.11486339569091797,
+ 0.1370193213224411,
+ 0.18661785125732422,
+ -0.16933996975421906,
+ 0.4562901556491852,
+ 0.15963049232959747,
+ -0.03714238852262497,
+ -0.20973774790763855,
+ 0.41076549887657166,
+ 0.060917824506759644,
+ 0.16447263956069946,
+ -0.49161940813064575,
+ -0.11149098724126816,
+ -0.02752375602722168,
+ -0.1715763807296753,
+ -0.06376606225967407,
+ -0.49086540937423706,
+ -0.33555489778518677,
+ 0.3313564658164978,
+ -0.007564187049865723,
+ -0.43238645792007446,
+ 0.18722522258758545,
+ -0.4937116801738739,
+ -0.2247220277786255,
+ -0.5131341218948364,
+ -0.3293994665145874,
+ -0.02469348907470703,
+ -0.39072367548942566,
+ -0.07757407426834106,
+ -0.2896813154220581,
+ 0.20765864849090576,
+ 0.2621913552284241,
+ -0.2568309009075165,
+ -0.09356223791837692,
+ 0.41355645656585693,
+ -0.04709649831056595,
+ 0.02536749839782715,
+ 0.39621657133102417,
+ -0.10123208910226822,
+ 0.26575276255607605,
+ -0.30151763558387756,
+ -0.48509758710861206,
+ -0.27327102422714233,
+ -0.059242069721221924,
+ -0.3305254578590393,
+ -0.1407454013824463,
+ 0.09570670127868652,
+ -0.07987716048955917,
+ -0.43737202882766724,
+ 0.38803017139434814,
+ 0.11366918683052063,
+ -0.5162310600280762,
+ -0.2367139607667923,
+ -0.504593014717102,
+ 0.003820061683654785,
+ 0.4445553421974182,
+ -0.10015538334846497,
+ 0.10903811454772949,
+ 0.23090291023254395,
+ 0.3505163788795471,
+ -0.3227147161960602,
+ -0.24202370643615723,
+ -0.10229253768920898,
+ -0.039360761642456055,
+ -0.04416963458061218,
+ 0.05063813924789429,
+ -0.22050414979457855,
+ -0.2737930417060852,
+ -0.4436348080635071,
+ -0.3756384253501892,
+ 0.13765288889408112,
+ -0.3308526873588562,
+ 0.45573508739471436,
+ 0.1738450527191162,
+ -0.48113495111465454,
+ 0.03413867950439453,
+ 0.38454923033714294,
+ 0.0029737651348114014,
+ -0.2600530683994293,
+ -0.32433584332466125,
+ 0.17192256450653076,
+ -0.3109567165374756,
+ 0.38833388686180115,
+ -0.2985016405582428,
+ -0.2549801170825958,
+ 0.0744665265083313,
+ -0.18391668796539307,
+ 0.09780049324035645,
+ 0.05636000633239746,
+ -0.005338553339242935,
+ -0.09724518656730652,
+ -0.12248682975769043,
+ -0.06609395146369934,
+ 0.40658536553382874,
+ -0.39814573526382446,
+ 0.22406482696533203,
+ 0.38122043013572693,
+ -0.024497583508491516,
+ 0.16007301211357117,
+ 0.18065571784973145,
+ -0.05815846472978592,
+ -0.11975151300430298,
+ 0.3585437536239624,
+ -0.5025165677070618,
+ -0.36605140566825867,
+ 0.08273625373840332,
+ 0.37222546339035034,
+ -0.25794610381126404,
+ 0.18545836210250854,
+ -0.07316732406616211,
+ 0.07944819331169128,
+ 0.3543156385421753,
+ 0.11695349216461182,
+ 0.04045391082763672,
+ -0.3343399167060852,
+ 0.18652302026748657,
+ -0.4879131615161896,
+ 0.43056705594062805,
+ -0.04198199510574341,
+ -0.3224070966243744,
+ 0.11350744962692261,
+ -0.33614033460617065,
+ 0.030270040035247803,
+ 0.4373241662979126,
+ -0.10298135876655579,
+ -0.40560412406921387,
+ 0.44897717237472534,
+ 0.057188570499420166,
+ -0.3234870731830597,
+ -0.0685422420501709,
+ -0.2509130537509918,
+ 0.19084620475769043,
+ -0.0030492842197418213,
+ -0.08962845802307129,
+ -0.4741256535053253,
+ -0.3890271484851837,
+ -0.2176724076271057,
+ 0.40701714158058167,
+ -0.4430897831916809,
+ -0.24209600687026978,
+ 0.11915057897567749,
+ 0.23863542079925537,
+ 0.053353846073150635,
+ -0.00021582841873168945,
+ 0.28349068760871887,
+ -0.23539650440216064,
+ -0.4186658263206482,
+ 0.45575201511383057,
+ 0.18982157111167908,
+ 0.18718498945236206,
+ -0.09230321645736694,
+ 0.06488543748855591,
+ 0.06434450298547745,
+ 0.014836907386779785,
+ 0.17524120211601257,
+ -0.01603788137435913,
+ -0.1801799237728119,
+ -0.03637915849685669,
+ -0.32909709215164185,
+ 0.16121286153793335,
+ -0.23845861852169037,
+ -0.04167044162750244,
+ 0.18955719470977783,
+ -0.22363023459911346,
+ 0.3320944607257843,
+ -0.1544051468372345,
+ 0.36470913887023926,
+ -0.15528911352157593,
+ -0.2761276960372925,
+ 0.14689165353775024,
+ -0.3587600588798523,
+ 0.06965059041976929,
+ -0.33243200182914734,
+ 0.04032395780086517,
+ -0.37979856133461,
+ -0.3474797010421753,
+ 0.20892640948295593,
+ -0.18094778060913086,
+ 0.2455769181251526,
+ -0.04240139573812485,
+ 0.025172803550958633,
+ 0.0346144437789917,
+ -0.1571439653635025,
+ -0.14909163117408752,
+ 0.33319544792175293,
+ 0.4548363983631134,
+ -0.2832464575767517,
+ 0.06226307153701782,
+ -0.33301758766174316,
+ 0.36555016040802,
+ 0.30717766284942627,
+ -0.4275902211666107,
+ -0.1927623748779297,
+ 0.13841363787651062,
+ -0.2101191282272339,
+ -0.49692660570144653,
+ -0.49606600403785706,
+ -0.1787235140800476,
+ -0.41229772567749023,
+ -0.33512866497039795,
+ 0.38829031586647034,
+ 0.044685058295726776,
+ 0.244792640209198,
+ 0.0876733809709549,
+ -0.038225919008255005,
+ 0.16205906867980957,
+ 0.16627547144889832,
+ 0.3804437518119812,
+ -0.108944833278656,
+ -0.32322531938552856,
+ 0.4336719512939453,
+ 0.257049560546875,
+ 0.001801714301109314,
+ 0.45818495750427246,
+ 0.3494638204574585,
+ -0.3258609175682068,
+ 0.18632780015468597,
+ 0.051820918917655945,
+ 0.11876308917999268,
+ 0.11025944352149963,
+ -0.5207539796829224,
+ -0.08377419412136078,
+ 0.3702765703201294,
+ -0.17588704824447632,
+ 0.18359214067459106,
+ 0.10284435749053955,
+ 0.34303492307662964,
+ -0.17753702402114868,
+ -0.33471351861953735,
+ -0.21240219473838806,
+ 0.27724260091781616,
+ -0.04228407144546509,
+ -0.09558568894863129,
+ -0.2661775052547455,
+ -0.10999122262001038,
+ -0.0033832713961601257,
+ 0.323334276676178,
+ -0.4817836880683899,
+ -0.09002929925918579,
+ 0.4249343276023865,
+ 0.3586275577545166,
+ 0.12896490097045898,
+ -0.015457842499017715,
+ 0.15734465420246124,
+ -0.13622860610485077,
+ -0.16548240184783936,
+ 0.45101428031921387,
+ -0.4724360406398773,
+ 0.19203922152519226,
+ 0.013224005699157715,
+ -0.08032184839248657,
+ -0.1501707136631012,
+ 0.020480215549468994,
+ -0.46621212363243103,
+ -0.39540427923202515,
+ -0.5194336175918579,
+ 0.2619999647140503,
+ -0.4513359069824219,
+ -0.3765549063682556,
+ 0.16294972598552704,
+ 0.09807252883911133,
+ -0.09806975722312927,
+ 0.23368880152702332,
+ 0.34980568289756775,
+ -0.29610592126846313,
+ 0.3587847948074341,
+ -0.33007293939590454,
+ 0.08327680826187134,
+ -0.3081909120082855,
+ 0.45084285736083984,
+ -0.3484094738960266,
+ -0.07993894815444946,
+ 0.2246391773223877,
+ 0.4362237751483917,
+ -0.06152373552322388,
+ 0.23202115297317505,
+ -0.07429225742816925,
+ 0.32278719544410706,
+ -0.5149596929550171,
+ -0.3356299102306366,
+ -0.3338538408279419,
+ -0.39632487297058105,
+ -0.4087826907634735,
+ 0.1579366773366928,
+ 0.0608748197555542,
+ -0.40108370780944824,
+ 0.1225874125957489,
+ 0.2643453776836395,
+ -0.07603077590465546,
+ -0.017913520336151123,
+ -0.1418527364730835,
+ -0.003196224570274353,
+ -0.4341384172439575,
+ -0.5050723552703857,
+ 0.2084275484085083,
+ 0.23578989505767822,
+ 0.13673895597457886,
+ 0.3754565119743347,
+ 0.13844378292560577,
+ -0.2358112931251526,
+ 0.19698697328567505,
+ -0.04921853542327881,
+ 0.18872451782226562,
+ -0.3337208032608032,
+ 0.08133041858673096,
+ -0.03364449739456177,
+ 0.3112817108631134,
+ -0.24350976943969727,
+ 0.4733552932739258,
+ -0.023983001708984375,
+ 0.2580658197402954,
+ 0.419927179813385,
+ -0.08856309950351715,
+ 0.43856626749038696,
+ -0.3679438829421997,
+ -0.3337635099887848,
+ -0.3534160256385803,
+ -0.4276576042175293,
+ 0.0358620285987854,
+ -0.33005237579345703,
+ -0.20199525356292725,
+ 0.2687959671020508,
+ 0.1892348974943161,
+ 0.1890760064125061,
+ 0.45439445972442627,
+ 0.20955118536949158,
+ 0.07434619218111038,
+ -0.33436381816864014,
+ -0.14534151554107666,
+ 0.3961365818977356,
+ -0.28070345520973206,
+ -0.36227166652679443,
+ -0.08820684254169464,
+ 0.4038889408111572,
+ 0.1857542246580124,
+ -0.1650257408618927,
+ -0.3350149989128113,
+ 0.13748493790626526,
+ 0.13349738717079163,
+ -0.5150901079177856,
+ -0.1736105978488922,
+ 0.34823620319366455,
+ -0.5128756761550903,
+ -0.04635262489318848,
+ 0.37661346793174744,
+ -0.5119277834892273,
+ 0.1271466314792633,
+ 0.2656747102737427,
+ -0.297071635723114,
+ 0.20143014192581177,
+ 0.02338126301765442,
+ -0.20249760150909424,
+ 0.14966368675231934,
+ -0.13600772619247437,
+ 0.28253981471061707,
+ 0.4719078838825226,
+ 0.14632433652877808,
+ 0.1044948399066925,
+ -0.1453947126865387,
+ -0.43095409870147705,
+ 0.15424519777297974,
+ -0.3877335488796234,
+ 0.3415986895561218,
+ 0.33737218379974365,
+ -0.2856649160385132,
+ -0.1726372390985489,
+ 0.10138112306594849,
+ -0.03193202614784241,
+ -0.3332056999206543,
+ 0.17147532105445862,
+ -0.32846081256866455,
+ -0.2557714283466339,
+ 0.38694626092910767,
+ 0.46761298179626465,
+ 0.04349081590771675,
+ 0.05119366943836212,
+ -0.3365659713745117,
+ -0.22156426310539246,
+ 0.3889433741569519,
+ 0.29667967557907104,
+ 0.25246453285217285,
+ 0.06894709169864655,
+ 0.266440749168396,
+ -0.0074433982372283936,
+ -0.3451152443885803,
+ -0.5116890668869019,
+ 0.44944775104522705,
+ -0.26356399059295654,
+ 0.4536343514919281,
+ -0.1820460557937622,
+ -0.4478035271167755,
+ 0.18132972717285156,
+ -0.06386503577232361,
+ 0.1743457317352295,
+ 0.17693427205085754,
+ 0.2522338628768921,
+ -0.0017074346542358398,
+ -0.09091675281524658,
+ 0.31719833612442017,
+ -0.4285769760608673,
+ -0.32805997133255005,
+ -0.10235506296157837,
+ 0.43397819995880127,
+ -0.17067554593086243,
+ 0.4640854597091675,
+ 0.1683785617351532,
+ 0.32070475816726685,
+ 0.2628011107444763,
+ 0.3596869707107544,
+ 0.0012720972299575806,
+ -0.008676618337631226,
+ -0.3307017982006073,
+ -0.28237998485565186,
+ 0.30500343441963196,
+ -0.1948849856853485,
+ -0.5205146670341492,
+ 0.10977692902088165,
+ -0.42978471517562866,
+ 0.31505724787712097,
+ -0.2270205318927765,
+ 0.25955456495285034,
+ 0.05351978540420532,
+ -0.04220937192440033,
+ -0.09907209128141403,
+ -0.45671573281288147,
+ 0.24118304252624512,
+ -0.04862123727798462,
+ 0.39656177163124084,
+ 0.2984580099582672,
+ -0.06372830271720886,
+ -0.42343899607658386,
+ -0.04623696208000183,
+ 0.02136009931564331,
+ -0.5009866952896118,
+ 0.12364298105239868,
+ -0.26052623987197876,
+ -0.20401710271835327,
+ 0.13246434926986694,
+ 0.0710906833410263,
+ -0.4485633671283722,
+ -0.2667575180530548,
+ -0.21406465768814087,
+ 0.23467671871185303,
+ -0.15710818767547607,
+ -0.20991359651088715,
+ 0.09237483143806458,
+ 0.3779628276824951,
+ -0.45666512846946716,
+ -0.05794752389192581,
+ -0.3036426305770874,
+ 0.23568931221961975,
+ 0.019945085048675537,
+ -0.11999969929456711,
+ 0.3459344506263733,
+ 0.4282434284687042,
+ 0.44057074189186096,
+ 0.07613474130630493,
+ -0.03111976385116577,
+ -0.3185015320777893,
+ -0.31626343727111816,
+ 0.47476011514663696,
+ 0.2914786636829376,
+ 0.45056596398353577,
+ 0.19172292947769165,
+ 0.25761279463768005,
+ 0.1810881644487381,
+ 0.4153740406036377,
+ -0.35730504989624023,
+ 0.39813369512557983,
+ 0.20463716983795166,
+ 0.1889776587486267,
+ 0.2884729504585266,
+ -0.3518694341182709,
+ -0.0077787358313798904,
+ 0.3642667829990387,
+ 0.1312774419784546,
+ -0.09882885217666626,
+ 0.06430048495531082,
+ -0.008349418640136719,
+ 0.18555065989494324,
+ 0.0904773473739624,
+ -0.36420658230781555,
+ -0.06890690326690674,
+ -0.31818580627441406,
+ -0.4861586093902588,
+ 0.11160773038864136,
+ -0.055007100105285645,
+ 0.3994975686073303,
+ -0.3329542279243469,
+ -0.3335893154144287,
+ 0.07788383960723877,
+ -0.32981258630752563,
+ -0.04837515950202942,
+ -0.17346826195716858,
+ -0.07487696409225464,
+ -0.12542569637298584,
+ 0.46798691153526306,
+ 0.03853708505630493,
+ 0.3602956533432007,
+ 0.13778597116470337,
+ 0.47131478786468506,
+ -0.13071121275424957,
+ -0.1499461829662323,
+ 0.4611659049987793,
+ -0.14234250783920288,
+ 0.10602176189422607,
+ 0.3157120943069458,
+ 0.09155386686325073,
+ 0.43880000710487366,
+ -0.1472727656364441,
+ -0.2886834144592285,
+ 0.21765094995498657,
+ 0.2950100302696228,
+ 0.36636072397232056,
+ 0.10720198601484299,
+ -0.09559866786003113,
+ -0.29949164390563965,
+ 0.4010311961174011,
+ 0.4243233799934387,
+ 0.09367507696151733,
+ 0.12225483357906342,
+ 0.24786722660064697,
+ -0.47731515765190125,
+ 0.035464487969875336,
+ -0.2573383152484894,
+ 0.266953706741333,
+ 0.20179885625839233,
+ -0.332782506942749,
+ 0.1863514631986618,
+ -0.06306761503219604,
+ 0.08784259855747223,
+ -0.4703163504600525,
+ 0.41682901978492737,
+ -0.5193461179733276,
+ 0.20993733406066895,
+ -0.01765495538711548,
+ 0.07451547682285309,
+ 0.12596994638442993,
+ -0.16674113273620605,
+ -0.30080628395080566,
+ 0.3960128128528595,
+ 0.015452533960342407,
+ -0.22408375144004822,
+ 0.08692508935928345,
+ 0.3355293869972229,
+ 0.18642956018447876,
+ -0.2562752962112427,
+ -0.13831742107868195,
+ 0.46946805715560913,
+ 0.39271822571754456,
+ -0.22902929782867432,
+ 0.18852382898330688,
+ -0.4710519313812256,
+ 0.279734343290329,
+ -0.13382428884506226,
+ 0.025120198726654053,
+ -0.41947734355926514,
+ -0.34370025992393494,
+ 0.2735026776790619,
+ 0.1236647516489029,
+ -0.3355169892311096,
+ 0.28096187114715576,
+ 0.18989664316177368,
+ -0.03566834330558777,
+ 0.030119597911834717,
+ -0.3764234483242035,
+ 0.3871634006500244,
+ -0.5162277221679688,
+ -0.06853178143501282,
+ -0.01641666889190674,
+ -0.21440356969833374,
+ -0.3194188177585602,
+ 0.20730629563331604,
+ 0.418745756149292,
+ -0.5191129446029663,
+ -0.14958231151103973,
+ -0.07886886596679688,
+ -0.15066620707511902,
+ 0.18906724452972412,
+ -0.27318117022514343,
+ -0.22135710716247559,
+ 0.1861097812652588,
+ 0.40939101576805115,
+ -0.019778624176979065,
+ -0.3052295744419098,
+ 0.4612433612346649,
+ -0.3200289309024811,
+ 0.18772929906845093,
+ 0.21217858791351318,
+ 0.4701154828071594,
+ -0.3832061290740967,
+ -0.1397850513458252,
+ -0.3023761808872223,
+ -0.46189287304878235,
+ -0.25251340866088867,
+ 0.1870899498462677,
+ -0.3692959249019623,
+ 0.2829863429069519,
+ -0.5069196224212646,
+ 0.04465314745903015,
+ 0.4408605098724365,
+ -0.13587158918380737,
+ 0.28004181385040283,
+ -0.08238977193832397,
+ 0.41451168060302734,
+ 0.2269306778907776,
+ 0.27190500497817993,
+ -0.35378676652908325,
+ 0.19104915857315063,
+ -0.49284741282463074,
+ 0.4011593163013458,
+ -0.15402209758758545,
+ 0.3845784664154053,
+ -0.26438313722610474,
+ 0.47038733959198,
+ 0.01570737361907959,
+ -0.012323945760726929,
+ -0.33531299233436584,
+ -0.04854375123977661,
+ 0.04328814148902893,
+ 0.06766880303621292,
+ 0.2337247133255005,
+ 0.3702474534511566,
+ -0.022533655166625977,
+ -0.3328014612197876,
+ 0.1159369945526123,
+ 0.12356306612491608,
+ -0.31139063835144043,
+ -0.5060071349143982,
+ 0.39662402868270874,
+ -0.170404851436615,
+ -0.12965017557144165,
+ 0.3361660838127136,
+ 0.44772809743881226,
+ -0.33208808302879333,
+ -0.20680375397205353,
+ -0.059994395822286606,
+ 0.3140166997909546,
+ -0.44769802689552307,
+ -0.42147374153137207,
+ -0.12014647573232651,
+ 0.18239045143127441,
+ -0.11474022269248962,
+ -0.44355103373527527,
+ 0.14783769845962524,
+ -0.3230065107345581,
+ -0.3279930353164673,
+ 0.3360057473182678,
+ -0.3310655653476715,
+ -0.37481915950775146,
+ -0.15439404547214508,
+ -0.045125074684619904,
+ 0.2080484926700592,
+ -0.008137598633766174,
+ -0.10325068235397339,
+ -0.09998157620429993,
+ 0.13633757829666138,
+ 0.4125988483428955,
+ -0.4742931127548218,
+ 0.12393456697463989,
+ 0.13907566666603088,
+ 0.08706748485565186,
+ -0.20425111055374146,
+ 0.18427127599716187,
+ 0.4642844796180725,
+ 0.31642472743988037,
+ -0.3574519455432892,
+ 0.06129691004753113,
+ -0.2674761116504669,
+ 0.007647395133972168,
+ -0.23537065088748932,
+ -0.4719228446483612,
+ 0.47566670179367065,
+ -0.36496976017951965,
+ 0.03768712282180786,
+ 0.33908385038375854,
+ -0.19660833477973938,
+ 0.45982837677001953,
+ 0.43713268637657166,
+ 0.1234692931175232,
+ -0.09808763861656189,
+ 0.09501722455024719,
+ 0.3175518810749054,
+ -0.038622722029685974,
+ 0.08113324642181396,
+ 0.2964439392089844,
+ -0.3864804804325104,
+ 0.3461403548717499,
+ -0.029251744970679283,
+ 0.09502202272415161,
+ 0.20216265320777893,
+ 0.46840566396713257,
+ -0.3496122658252716,
+ -0.30460357666015625,
+ -0.3351348638534546,
+ -0.3333946466445923,
+ 0.3650950789451599,
+ -0.1611209511756897,
+ 0.1075659990310669,
+ -0.4111540615558624,
+ 0.08188772201538086,
+ -0.1256915181875229,
+ -0.004905179142951965,
+ 0.39226245880126953,
+ 0.14084047079086304,
+ -0.03816226124763489,
+ 0.4215686321258545,
+ -0.30590325593948364,
+ 0.10172849893569946,
+ 0.3828660845756531,
+ -0.4274802505970001,
+ -0.16650933027267456,
+ 0.15476882457733154,
+ 0.3166550099849701,
+ -0.3163067400455475,
+ -0.3039762079715729,
+ 0.07876446843147278,
+ -0.4078870117664337,
+ -0.012058794498443604,
+ 0.4682105779647827,
+ -0.43351197242736816,
+ 0.024960994720458984,
+ -0.44204720854759216,
+ -0.3360881805419922,
+ 0.11101259291172028,
+ 0.27361804246902466,
+ -0.3323096036911011,
+ -0.39445391297340393,
+ 0.179479718208313,
+ -0.18461543321609497,
+ -0.14501875638961792,
+ -0.2322113811969757,
+ 0.3997049927711487,
+ 0.20163404941558838,
+ 0.3247454762458801,
+ -0.4055033326148987,
+ 0.3506532907485962,
+ 0.46683835983276367,
+ 0.22779560089111328,
+ -0.019567131996154785,
+ -0.5082137584686279,
+ 0.09767448902130127,
+ 0.18695849180221558,
+ -0.3240141272544861,
+ 0.10275089740753174,
+ -0.17004632949829102,
+ -0.2034032940864563,
+ 0.31217992305755615,
+ 0.38082170486450195,
+ 0.18760812282562256,
+ 0.1987628936767578,
+ 0.39546146988868713,
+ 0.3067251741886139,
+ 0.058445125818252563,
+ 0.16358906030654907,
+ -0.12392090260982513,
+ 0.4114682078361511,
+ -0.06725926697254181,
+ 0.1455976665019989,
+ -0.5139069557189941,
+ 0.028639376163482666,
+ 0.14824122190475464,
+ -0.2814473509788513,
+ -0.203346848487854,
+ -0.3378210663795471,
+ -0.0070442259311676025,
+ -0.0752948522567749,
+ -0.11322677135467529,
+ -0.2901860475540161,
+ -0.24285997450351715,
+ 0.36886194348335266,
+ -0.45272669196128845,
+ -0.34788429737091064,
+ -0.08768036961555481,
+ -0.04262152314186096,
+ 0.43636074662208557,
+ 0.07730786502361298,
+ 0.31231290102005005,
+ 0.30307430028915405,
+ -0.51036536693573,
+ -0.10344770550727844,
+ -0.32919490337371826,
+ -0.1850835084915161,
+ -0.06791239976882935,
+ -0.4566211700439453,
+ -0.5197039842605591,
+ -0.0845537781715393,
+ 0.41839006543159485,
+ -0.2886400818824768,
+ -0.5076020359992981,
+ -0.4030117094516754,
+ 0.12247839570045471,
+ -0.0896185114979744,
+ 0.33545833826065063,
+ -0.5072660446166992,
+ -0.16906166076660156,
+ -0.15917956829071045,
+ 0.22637388110160828,
+ 0.3876112103462219,
+ 0.0888708159327507,
+ 0.09026975929737091,
+ -0.13854050636291504,
+ -0.46894294023513794,
+ -0.17917653918266296,
+ 0.3416078984737396,
+ -0.09984326362609863,
+ 0.2868485152721405,
+ 0.2530573904514313,
+ -0.3376176953315735,
+ -0.49235862493515015,
+ -0.20985522866249084,
+ 0.16131627559661865,
+ 0.030314043164253235,
+ 0.2850037217140198,
+ 0.15797922015190125,
+ -0.151507169008255,
+ -0.3369441628456116,
+ -0.18186882138252258,
+ -0.2756766378879547,
+ -0.06379720568656921,
+ 0.4295490086078644,
+ -0.18975651264190674,
+ 0.33568674325942993,
+ -0.2566572427749634,
+ 0.10677628219127655,
+ 0.21304693818092346,
+ 0.4615717828273773,
+ -0.28895872831344604,
+ -0.34484878182411194,
+ 0.04569196701049805,
+ 0.070567786693573,
+ -0.0732337087392807,
+ -0.03912040591239929,
+ 0.12772244215011597,
+ -0.33722466230392456,
+ 0.1579335331916809,
+ -0.12132710218429565,
+ -0.33108222484588623,
+ 0.3109962046146393,
+ 0.04332229495048523,
+ 0.1857585906982422,
+ 0.05020272731781006,
+ 0.43312254548072815,
+ -0.23651504516601562,
+ 0.3895299434661865,
+ -0.0027664899826049805,
+ 0.1446976512670517,
+ -0.11668302118778229,
+ 0.23415327072143555,
+ 0.3354298770427704,
+ 0.00827762484550476,
+ 0.33026254177093506,
+ -0.22841762006282806,
+ -0.2601417601108551,
+ -0.5217412710189819,
+ 0.34651070833206177,
+ 0.33407092094421387,
+ 0.14407162368297577,
+ -0.060149937868118286,
+ -0.18130603432655334,
+ 0.40138164162635803,
+ -0.4939374327659607,
+ -0.35622096061706543,
+ -0.01332855224609375,
+ -0.3251664340496063,
+ 0.20990946888923645,
+ -0.009969592094421387,
+ 0.44403019547462463,
+ 0.02976713329553604,
+ 0.22425943613052368,
+ 0.131801575422287,
+ -0.26157134771347046,
+ 0.4031834006309509,
+ -0.10052472352981567,
+ -0.5028502941131592,
+ -0.49861985445022583,
+ -0.2126571536064148,
+ 0.3184415400028229,
+ 0.11431884765625,
+ 0.29334115982055664,
+ 0.17576444149017334,
+ -0.07712608575820923,
+ -0.11573797464370728,
+ 0.0014584660530090332,
+ -0.31583893299102783,
+ 0.03685048222541809,
+ 0.25662708282470703,
+ -0.3266652226448059,
+ 0.015230953693389893,
+ -0.29285794496536255,
+ 0.15612518787384033,
+ -0.30868902802467346,
+ -0.17467284202575684,
+ -0.1816544532775879,
+ -0.23736661672592163,
+ 0.43644624948501587,
+ -0.4825708270072937,
+ 0.13550692796707153,
+ 0.08595603704452515,
+ -0.03206534683704376,
+ -0.3979875445365906,
+ -0.41530394554138184,
+ -0.12760376930236816,
+ 0.3817550539970398,
+ 0.46107929944992065,
+ -0.2240627408027649,
+ -0.3369297683238983,
+ -0.1074160635471344,
+ -0.2257615327835083,
+ -0.2163252830505371,
+ 0.40445247292518616,
+ 0.2396823912858963,
+ -0.1084449291229248,
+ -0.25819918513298035,
+ 0.15549203753471375,
+ -0.25625312328338623,
+ 0.07189127802848816,
+ -0.33378830552101135,
+ -0.33373600244522095,
+ 0.34976908564567566,
+ 0.3016273081302643,
+ -0.5142537951469421,
+ -0.34270697832107544,
+ 0.15793833136558533,
+ -0.19423867762088776,
+ 0.16425639390945435,
+ 0.23513518273830414,
+ 0.2923203706741333,
+ 0.1554991751909256,
+ -0.324503093957901,
+ -0.33672964572906494,
+ 0.4762410819530487,
+ 0.2765067219734192,
+ 0.27412694692611694,
+ 0.01266343891620636,
+ 0.25304752588272095,
+ 0.044233739376068115,
+ 0.42562544345855713,
+ -0.49826183915138245,
+ 0.18655720353126526,
+ -0.4516226649284363,
+ -0.4690476357936859,
+ 0.2578721344470978,
+ -0.42885148525238037,
+ 0.23598167300224304,
+ 0.1312035769224167,
+ -0.02292877435684204,
+ 0.2736968696117401,
+ -0.4274228513240814,
+ -0.2966853082180023,
+ 0.23457081615924835,
+ -0.10415509343147278,
+ -0.24287250638008118,
+ -0.3323987126350403,
+ -0.44749128818511963,
+ -0.009973905980587006,
+ 0.4291894733905792,
+ -0.33520030975341797,
+ 0.19053298234939575,
+ -0.39712557196617126,
+ -0.0024179965257644653,
+ -0.04547414928674698,
+ -0.13815778493881226,
+ 0.1405053734779358,
+ -0.19856595993041992,
+ 0.30045899748802185,
+ -0.4815046787261963,
+ 0.33719512820243835,
+ -0.1317460834980011,
+ 0.049620453268289566,
+ 0.47155454754829407,
+ -0.5146172642707825,
+ -0.137079119682312,
+ 0.26154589653015137,
+ -0.07372555881738663,
+ 0.2824442386627197,
+ 0.39477863907814026,
+ 0.336490660905838,
+ -0.47377097606658936,
+ -0.17514559626579285,
+ -0.026242122054100037,
+ 0.26645398139953613,
+ -0.2592957019805908,
+ -0.06018597632646561,
+ -0.5159084796905518,
+ 0.14147940278053284,
+ 0.21347454190254211,
+ 0.0699179470539093,
+ 0.029836297035217285,
+ -0.3203604519367218,
+ 0.2638803720474243,
+ 0.342237651348114,
+ -0.3325061798095703,
+ 0.3979421854019165,
+ 0.46108078956604004,
+ 0.3428357243537903,
+ -0.09571826457977295,
+ -0.28838351368904114,
+ -0.0802990198135376,
+ 0.16880926489830017,
+ 0.15868620574474335,
+ -0.33553048968315125,
+ 0.18437020480632782,
+ 0.4679918885231018,
+ 0.45667344331741333,
+ -0.15831172466278076,
+ 0.05563914775848389,
+ -0.1361486166715622,
+ 0.3367229104042053,
+ 0.3940460979938507,
+ 0.42965075373649597,
+ 0.41117095947265625,
+ -0.33645832538604736,
+ 0.4570835828781128,
+ -0.007828859612345695,
+ 0.19473302364349365,
+ -0.38863199949264526,
+ 0.17358914017677307,
+ 0.4504058361053467,
+ -0.05796605348587036,
+ 0.16572530567646027,
+ 0.3988955616950989,
+ 0.021759748458862305,
+ -0.18176156282424927,
+ -0.09240752458572388,
+ 0.07812380790710449,
+ 0.37822508811950684,
+ 0.4186560809612274,
+ 0.010158956050872803,
+ 0.3024649918079376,
+ -0.1532066911458969,
+ 0.15228325128555298,
+ 0.26470404863357544,
+ 0.3429751992225647,
+ 0.21840302646160126,
+ 0.10037428140640259,
+ -0.370903879404068,
+ 0.2877337336540222,
+ 0.41850316524505615,
+ 0.07017272710800171,
+ -0.019341111183166504,
+ 0.2338322103023529,
+ -0.17100583016872406,
+ -0.4368072748184204,
+ -0.33741652965545654,
+ -0.1323641538619995,
+ -0.4589557647705078,
+ -0.4433559775352478,
+ 0.4027082920074463,
+ 0.39566925168037415,
+ -0.05403992533683777,
+ -0.05340838432312012,
+ 0.2501090168952942,
+ 0.05347800254821777,
+ 0.44172364473342896,
+ -0.32110247015953064,
+ 0.23776382207870483,
+ -0.25972476601600647,
+ -0.40271374583244324,
+ -0.5043267011642456,
+ 0.04082489013671875,
+ 0.1280132234096527,
+ 0.011164844036102295,
+ -0.02410009503364563,
+ -0.05589038133621216,
+ -0.19899076223373413,
+ -0.41458261013031006,
+ -0.3356701135635376,
+ -0.08282741159200668,
+ 0.07466357946395874,
+ -0.4247518479824066,
+ 0.020726993680000305,
+ 0.2635740041732788,
+ -0.052006423473358154,
+ -0.29502809047698975,
+ 0.4339449107646942,
+ 0.26865389943122864,
+ 0.33590322732925415,
+ -0.4371991455554962,
+ -0.03130161762237549,
+ -0.33170825242996216,
+ -0.06737367808818817,
+ 0.4642101526260376,
+ 0.11324287950992584,
+ 0.004417300224304199,
+ 0.15761035680770874,
+ 0.4489707052707672,
+ 0.14902406930923462,
+ -0.4332301914691925,
+ 0.2373920977115631,
+ -0.34983402490615845,
+ 0.2449701875448227,
+ -0.16682615876197815,
+ 0.3502199649810791,
+ 0.21438059210777283,
+ -0.5045859217643738,
+ -0.12194398045539856,
+ -0.05409372225403786,
+ 0.20273154973983765,
+ 0.10413086414337158,
+ -0.15577124059200287,
+ -0.20684650540351868,
+ -0.3800356984138489,
+ 0.430570125579834,
+ 0.1597074419260025,
+ 0.08649182319641113,
+ -0.0980565994977951,
+ 0.2795577049255371,
+ -0.32004261016845703,
+ -0.13320845365524292,
+ -0.02583390474319458,
+ 0.330643892288208,
+ 0.43883848190307617,
+ 0.18901431560516357,
+ -0.33440542221069336,
+ 0.19010478258132935,
+ -0.15335148572921753,
+ -0.3308538496494293,
+ -0.33859169483184814,
+ -0.2175346314907074,
+ -0.3341091275215149,
+ -0.27789807319641113,
+ -0.10608500242233276,
+ 0.030232936143875122,
+ 0.07136814296245575,
+ 0.24138954281806946,
+ -0.15872246026992798,
+ 0.1774122565984726,
+ -0.2979055643081665,
+ -0.1907048523426056,
+ -0.06486781686544418,
+ -0.303546667098999,
+ -0.3018339276313782,
+ -0.33164748549461365,
+ 0.3237442970275879,
+ -0.014850258827209473,
+ 0.12610524892807007,
+ -0.09413844347000122,
+ -0.3230047821998596,
+ 0.4455186426639557,
+ 0.4169672727584839,
+ -0.4337165057659149,
+ 0.1857418417930603,
+ 0.3040958046913147,
+ -0.17893081903457642,
+ -0.08191582560539246,
+ 0.4147520065307617,
+ -0.3334510922431946,
+ 0.049559906125068665,
+ -0.5157519578933716,
+ -0.47419172525405884,
+ -0.4554598927497864,
+ -0.39215272665023804,
+ -0.33223462104797363,
+ -0.20440071821212769,
+ -0.1464010775089264,
+ 0.03188443183898926,
+ -0.1681775450706482,
+ 0.04826030135154724,
+ 0.46556350588798523,
+ -0.13345469534397125,
+ -0.10976406186819077,
+ 0.30017757415771484,
+ 0.1369638890028,
+ -0.3335479497909546,
+ 0.3319574296474457,
+ -0.2969878911972046,
+ -0.5193246603012085,
+ 0.06461510807275772,
+ 0.2005038559436798,
+ 0.07582229375839233,
+ 0.17748652398586273,
+ -0.5188894271850586,
+ 0.2771895229816437,
+ -0.07555967569351196,
+ 0.3094567656517029,
+ 0.05342698097229004,
+ -0.2798118591308594,
+ 0.15190991759300232,
+ 0.4645425081253052,
+ -0.05714750289916992,
+ 0.4127868413925171,
+ -0.2582288980484009,
+ 0.043169863522052765,
+ -0.5227877497673035,
+ -0.460762619972229,
+ -0.2966731786727905,
+ 0.29749590158462524,
+ 0.11800432205200195,
+ 0.22434020042419434,
+ 0.36083900928497314,
+ -0.2284899652004242,
+ -0.3358810544013977,
+ -0.03216388821601868,
+ 0.00806039571762085,
+ -0.2047211229801178,
+ 0.42861467599868774,
+ -0.12768396735191345,
+ -0.302678644657135,
+ 0.1772443801164627,
+ 0.24410024285316467,
+ -0.18297892808914185,
+ 0.43456918001174927,
+ -0.33743324875831604,
+ -0.04293268918991089,
+ 0.187350794672966,
+ -0.5139598250389099,
+ -0.05564755201339722,
+ -0.4931061267852783,
+ 0.451846718788147,
+ -0.16202351450920105,
+ 0.4543258547782898,
+ 0.2699272036552429,
+ 0.38573047518730164,
+ 0.23614221811294556,
+ 0.032926544547080994,
+ -0.1451575756072998,
+ -0.23101618885993958,
+ -0.3794354200363159,
+ -0.45104271173477173,
+ 0.2425622195005417,
+ 0.4547152519226074,
+ 0.27477648854255676,
+ -0.3321712911128998,
+ 0.2585901618003845,
+ 0.4734644889831543,
+ -0.29238957166671753,
+ 0.0483560711145401,
+ 0.12708348035812378,
+ 0.4726126194000244,
+ 0.2341928780078888,
+ -0.09926697611808777,
+ 0.09834787249565125,
+ -0.022732138633728027,
+ 0.30920055508613586,
+ 0.06838566064834595,
+ 0.38668113946914673,
+ -0.2282433956861496,
+ 0.4388938844203949,
+ 0.034248583018779755,
+ -0.12472808361053467,
+ -0.47637397050857544,
+ 0.1712205559015274,
+ -0.039477378129959106,
+ -0.2439601868391037,
+ -0.0659058690071106,
+ 0.1675211787223816,
+ 0.05390074849128723,
+ 0.47433796525001526,
+ -0.31682687997817993,
+ 0.03633199632167816,
+ -0.3275343179702759,
+ -0.3341561555862427,
+ 0.4395766258239746,
+ -0.3897719085216522,
+ 0.16537103056907654,
+ 0.42071533203125,
+ -0.43936458230018616,
+ 0.355669766664505,
+ 0.18247127532958984,
+ -0.3722462058067322,
+ -0.3311368227005005,
+ 0.15835295617580414,
+ 0.069464772939682,
+ -0.38137680292129517,
+ -0.25873762369155884,
+ 0.41883838176727295,
+ 0.19952327013015747,
+ 0.15955443680286407,
+ 0.4744013845920563,
+ 0.18091227114200592,
+ -0.4948720335960388,
+ -0.4476701021194458,
+ -0.2642812728881836,
+ -0.006318897008895874,
+ -0.29948562383651733,
+ 0.2637484073638916,
+ -0.12632732093334198,
+ 0.014692246913909912,
+ -0.4073331952095032,
+ 0.3847596049308777,
+ 0.3683522641658783,
+ 0.1059490516781807,
+ -0.00239013135433197,
+ -0.3774659037590027,
+ 0.16417378187179565,
+ 0.3815574049949646,
+ 0.23984606564044952,
+ 0.10641428083181381,
+ 0.32059726119041443,
+ -0.509036660194397,
+ 0.1298568844795227,
+ 0.32841330766677856,
+ -0.03342549502849579,
+ 0.2696227431297302,
+ 0.21075446903705597,
+ -0.33868736028671265,
+ 0.4111781418323517,
+ -0.3303237855434418,
+ 0.40313321352005005,
+ -0.2549200654029846,
+ -0.4878682494163513,
+ -0.043072015047073364,
+ 0.4403999447822571,
+ -0.1519412398338318,
+ 0.03857114911079407,
+ 0.18826937675476074,
+ 0.10116523504257202,
+ -0.3257763385772705,
+ 0.004207491874694824,
+ -0.4787464439868927,
+ 0.08781760931015015,
+ 0.30113860964775085,
+ 0.07066895067691803,
+ 0.35615766048431396,
+ 0.40603405237197876,
+ 0.37284111976623535,
+ 0.40541836619377136,
+ -0.05530831217765808,
+ 0.24837929010391235,
+ 0.32650884985923767,
+ -0.39647066593170166,
+ 0.11940324306488037,
+ 0.3190041184425354,
+ -0.43321648240089417,
+ 0.18622678518295288,
+ -0.24288317561149597,
+ -0.11986073851585388,
+ -0.16503553092479706,
+ 0.25838324427604675,
+ -0.23411601781845093,
+ -0.1389489620923996,
+ 0.4642544984817505,
+ 0.4709851145744324,
+ 0.29709795117378235,
+ 0.20696379244327545,
+ -0.2989765703678131,
+ 0.2648889422416687,
+ -0.3031804859638214,
+ -0.5140215158462524,
+ -0.3622184097766876,
+ 0.13192293047904968,
+ -0.1829860806465149,
+ -0.3942631185054779,
+ -0.26323044300079346,
+ -0.5221788287162781,
+ -0.32527631521224976,
+ 0.17463335394859314,
+ -0.02328205108642578,
+ 0.4105417728424072,
+ -0.020819488912820816,
+ 0.3092094957828522,
+ -0.08012917637825012,
+ -0.05843400955200195,
+ -0.23457938432693481,
+ -0.28233370184898376,
+ 0.3970872163772583,
+ -0.11393460631370544,
+ 0.24212384223937988,
+ -0.25936272740364075,
+ -0.45656058192253113,
+ -0.5042991638183594,
+ 0.4468344449996948,
+ -0.36652714014053345,
+ -0.06436801701784134,
+ 0.24102121591567993,
+ 0.2968236804008484,
+ -0.4921106994152069,
+ -0.00958726555109024,
+ -0.28062570095062256,
+ -0.27210673689842224,
+ 0.3342750668525696,
+ 0.2240411788225174,
+ 0.1355004608631134,
+ 0.0504642128944397,
+ 0.4296879768371582,
+ 0.27465856075286865,
+ -0.20187315344810486,
+ 0.3665506839752197,
+ 0.1576445996761322,
+ 0.07422792911529541,
+ -0.375295490026474,
+ -0.10218245536088943,
+ -0.3378875255584717,
+ -0.26720038056373596,
+ -0.10571186244487762,
+ 0.0872604101896286,
+ -0.24183599650859833,
+ -0.4958818256855011,
+ 0.1783595085144043,
+ -0.24796009063720703,
+ -0.3537270426750183,
+ 0.04665551707148552,
+ -0.5205618143081665,
+ -0.3293690085411072,
+ 0.45453310012817383,
+ -0.04465818405151367,
+ -0.520105242729187,
+ -0.1941295713186264,
+ 0.12449067831039429,
+ 0.12116968631744385,
+ -0.08870041370391846,
+ -0.5219393968582153,
+ 0.23834747076034546,
+ 0.01835651695728302,
+ -0.2332978993654251,
+ 0.2623583674430847,
+ 0.009178340435028076,
+ 0.011513739824295044,
+ -0.18440096080303192,
+ 0.25971055030822754,
+ -0.24421262741088867,
+ 0.14888274669647217,
+ 0.3328161835670471,
+ 0.10740292072296143,
+ -0.3848055899143219,
+ -0.31568217277526855,
+ 0.0706670731306076,
+ 0.422699898481369,
+ -0.20717574656009674,
+ -0.2589106857776642,
+ 0.23800377547740936,
+ -0.08346287161111832,
+ -0.14386016130447388,
+ 0.2378903031349182,
+ -0.38861358165740967,
+ 0.23478275537490845,
+ -0.28374549746513367,
+ 0.3670451045036316,
+ -0.15106171369552612,
+ 0.3944977819919586,
+ -0.18543162941932678,
+ -0.1940520703792572,
+ 0.43392035365104675,
+ -0.4558996260166168,
+ 0.11333015561103821,
+ 0.2438354194164276,
+ 0.06285300850868225,
+ -0.427859902381897,
+ -0.06634163856506348,
+ -0.35928258299827576,
+ -0.08466386795043945,
+ -0.42617669701576233,
+ 0.07076631486415863,
+ 0.007890800014138222,
+ -0.3983868360519409,
+ -0.18439164757728577,
+ -0.2516191303730011,
+ -0.023751169443130493,
+ 0.3805772662162781,
+ 0.2174426019191742,
+ -0.0869942456483841,
+ -0.3083943724632263,
+ -0.3222557604312897,
+ -0.23953375220298767,
+ 0.05729438364505768,
+ -0.33639073371887207,
+ -0.32938578724861145,
+ -0.07864731550216675,
+ -0.03960059583187103,
+ -0.5160858631134033,
+ 0.1775178462266922,
+ 0.15140840411186218,
+ -0.48534005880355835,
+ 0.38135015964508057,
+ -0.281220018863678,
+ 0.3713221549987793,
+ 0.09114652872085571,
+ -0.027288734912872314,
+ -0.022817105054855347,
+ -0.0013706237077713013,
+ 0.2941383421421051,
+ 0.4164727032184601,
+ -0.21715065836906433,
+ -0.31629565358161926,
+ -0.018678486347198486,
+ 0.2510330080986023,
+ 0.4273034930229187,
+ 0.3455996513366699,
+ -0.1927194893360138,
+ -0.09002191573381424,
+ -0.4056703746318817,
+ -0.3206602931022644,
+ 0.20712757110595703,
+ 0.013609111309051514,
+ 0.06704962253570557,
+ 0.1864299774169922,
+ -0.33497512340545654,
+ -0.028614714741706848,
+ -0.3289939761161804,
+ 0.0733221024274826,
+ 0.4219927191734314,
+ 0.4561668336391449,
+ -0.1873038113117218,
+ 0.4608784317970276,
+ 0.2548387944698334,
+ -0.14577539265155792,
+ 0.05937869846820831,
+ -0.0032094717025756836,
+ -0.2396526038646698,
+ -0.03511744737625122,
+ 0.040992558002471924,
+ -0.15075208246707916,
+ -0.14280009269714355,
+ -0.2691860496997833,
+ -0.15961356461048126,
+ -0.09265169501304626,
+ -0.2251417636871338,
+ 0.16515931487083435,
+ 0.2521035671234131,
+ -0.06834632158279419,
+ 0.2273615300655365,
+ -0.011953398585319519,
+ -0.5192170143127441,
+ 0.09711955487728119,
+ -0.32770273089408875,
+ -0.1356508731842041,
+ 0.3539862632751465,
+ -0.017195165157318115,
+ -0.442584365606308,
+ 0.46905219554901123,
+ -0.16041380167007446,
+ -0.2884588837623596,
+ -0.28395625948905945,
+ -0.39595827460289,
+ -0.2254483699798584,
+ 0.4483885169029236,
+ 0.23524737358093262,
+ -0.06313738226890564,
+ 0.16259294748306274,
+ -0.3241269886493683,
+ -0.04492765665054321,
+ -0.31816011667251587,
+ -0.006968110799789429,
+ 0.2971048653125763,
+ -0.23567284643650055,
+ 0.04621554911136627,
+ 0.012305974960327148,
+ -0.04074928164482117,
+ 0.31616222858428955,
+ 0.3563142716884613,
+ -0.07367944717407227,
+ 0.16271081566810608,
+ -0.12430739402770996,
+ -0.04266159236431122,
+ 0.18092167377471924,
+ -0.010150313377380371,
+ -0.22149908542633057,
+ -0.46790581941604614,
+ 0.4705738425254822,
+ 0.35831665992736816,
+ 0.4749431610107422,
+ -0.1607816219329834,
+ 0.09984663128852844,
+ -0.026917695999145508,
+ -0.21603286266326904,
+ 0.4381510615348816,
+ -0.33175432682037354,
+ -0.041791707277297974,
+ -0.4192429780960083,
+ 0.19690680503845215,
+ 0.448185533285141,
+ 0.29099780321121216,
+ 0.37506550550460815,
+ -0.3345661163330078,
+ -0.018038451671600342,
+ 0.09279078245162964,
+ -0.3337289094924927,
+ -0.11140276491641998,
+ -0.18123973906040192,
+ 0.18794655799865723,
+ 0.3682622015476227,
+ 0.38512271642684937,
+ 0.05235785245895386,
+ -0.4122284948825836,
+ 0.15299397706985474,
+ -0.05986267328262329,
+ -0.1873558759689331,
+ -0.3323366045951843,
+ 0.02863675355911255,
+ 0.24214962124824524,
+ -0.0031265318393707275,
+ -0.0037078857421875,
+ 0.1379694938659668,
+ -0.283450186252594,
+ -0.021682515740394592,
+ -0.1173323392868042,
+ -0.052891120314598083,
+ 0.11400222778320312,
+ -0.33163952827453613,
+ 0.11134570837020874,
+ 0.05740758776664734,
+ -0.14693418145179749,
+ 0.15444403886795044,
+ 0.036863088607788086,
+ 0.4506318271160126,
+ 0.12667560577392578,
+ -0.46421003341674805,
+ 0.14631062746047974,
+ 0.32179564237594604,
+ -0.26627117395401,
+ -0.48482808470726013,
+ 0.3261570334434509,
+ -0.2281668335199356,
+ -0.47136497497558594,
+ 0.37964463233947754,
+ -0.3320998251438141,
+ 0.20305824279785156,
+ 0.09010541439056396,
+ 0.46115463972091675,
+ 0.015007991343736649,
+ -0.333770751953125,
+ 0.3870398998260498,
+ 0.4231909215450287,
+ 0.21541836857795715,
+ 0.1855582594871521,
+ 0.31880760192871094,
+ -0.520966649055481,
+ -0.5099628567695618,
+ 0.21242865920066833,
+ -0.16577452421188354,
+ -0.060776978731155396,
+ -0.2755116820335388,
+ -0.05835753679275513,
+ 0.02282196283340454,
+ 0.239795982837677,
+ -0.4439048767089844,
+ 0.2538980543613434,
+ 0.17786699533462524,
+ -0.4113759696483612,
+ 0.123067706823349,
+ -0.40857023000717163,
+ -0.3345842957496643,
+ -0.34827008843421936,
+ 0.08992815017700195,
+ -0.353813111782074,
+ -0.4998980760574341,
+ 0.16392679512500763,
+ -0.17455041408538818,
+ 0.3892470896244049,
+ -0.16580834984779358,
+ -0.303879976272583,
+ -0.20291614532470703,
+ -0.1740817129611969,
+ -0.22184140980243683,
+ 0.40722087025642395,
+ -0.20543262362480164,
+ 0.03838235139846802,
+ 0.3025725185871124,
+ 0.35455837845802307,
+ 0.0678558349609375,
+ -0.3233906924724579,
+ -0.39390626549720764,
+ 0.18666252493858337,
+ -0.3634563088417053,
+ 0.10177022218704224,
+ 0.004668474197387695,
+ -0.4930371344089508,
+ -0.3325215280056,
+ 0.13422659039497375,
+ -0.34425175189971924,
+ 0.19161462783813477,
+ 0.14722785353660583,
+ -0.4246262311935425,
+ -0.2516798973083496,
+ -0.39881646633148193,
+ 0.43974363803863525,
+ 0.18476033210754395,
+ 0.1685761958360672,
+ -0.1818292737007141,
+ 0.051668018102645874,
+ -0.3340432345867157,
+ -0.08180449157953262,
+ -0.09432008862495422,
+ 0.45651721954345703,
+ -0.3864242434501648,
+ -0.31430724263191223,
+ -0.11416396498680115,
+ 0.2055351734161377,
+ 0.3537464737892151,
+ -0.33587533235549927,
+ -0.16203844547271729,
+ -0.15489891171455383,
+ 0.06678876280784607,
+ -0.15949547290802002,
+ -0.03324484825134277,
+ 0.430999755859375,
+ -0.1847342550754547,
+ -0.32805049419403076,
+ 0.18271249532699585,
+ -0.45390379428863525,
+ 0.20612919330596924,
+ 0.004504144191741943,
+ -0.20952802896499634,
+ -0.33666718006134033,
+ 0.44436684250831604,
+ -0.15188679099082947,
+ -0.41599708795547485,
+ 0.05977070331573486,
+ 0.08419757336378098,
+ -0.18405789136886597,
+ 0.13171666860580444,
+ -0.11018800735473633,
+ 0.3052477240562439,
+ 0.32323014736175537,
+ -0.1703205108642578,
+ 0.3829370439052582,
+ -0.33122342824935913,
+ 0.047507137060165405,
+ -0.46800708770751953,
+ 0.1064404845237732,
+ -0.17700640857219696,
+ -0.2878882884979248,
+ 0.09944266080856323,
+ 0.09925258159637451,
+ 0.330136775970459,
+ 0.23261818289756775,
+ 0.0096967164427042,
+ 0.4655996561050415,
+ 0.1636132001876831,
+ 0.4757157564163208,
+ 0.010734587907791138,
+ -0.4579729437828064,
+ -0.3068816661834717,
+ -0.3346572518348694,
+ -0.3489873707294464,
+ -0.2573474049568176,
+ -0.12906458973884583,
+ 0.054049041122198105,
+ 0.2143491804599762,
+ 0.0889802873134613,
+ -0.11191248893737793,
+ -0.16655102372169495,
+ -0.2814633250236511,
+ -0.3560001254081726,
+ -0.3074224889278412,
+ -0.3371238708496094,
+ 0.13372336328029633,
+ -0.1445346474647522,
+ -0.3202162981033325,
+ -0.1591818928718567,
+ 0.32324671745300293,
+ -0.3389540910720825,
+ -0.27432459592819214,
+ 0.45095786452293396,
+ -0.47275155782699585,
+ 0.4605141878128052,
+ -0.11146816611289978,
+ 0.4750254154205322,
+ 0.4240241050720215,
+ -0.15404875576496124,
+ 0.22767391800880432,
+ -0.0830620527267456,
+ -0.3233586549758911,
+ 0.46279966831207275,
+ -0.4077000319957733,
+ 0.10986295342445374,
+ 0.4263498783111572,
+ 0.3814346194267273,
+ -0.34394627809524536,
+ 0.11140751838684082,
+ -0.5120925903320312,
+ 0.30331650376319885,
+ -0.13382470607757568,
+ 0.347827672958374,
+ 0.35055452585220337,
+ 0.14354676008224487,
+ -0.08081664144992828,
+ 0.20145131647586823,
+ 0.19361019134521484,
+ 0.3503812849521637,
+ 0.14067870378494263,
+ 0.006558060646057129,
+ 0.09761929512023926,
+ -0.21265822649002075,
+ -0.156196728348732,
+ -0.17520874738693237,
+ -0.025121092796325684,
+ -0.042962443083524704,
+ 0.3373125195503235,
+ -0.05899941921234131,
+ 0.3426898717880249,
+ -0.21645858883857727,
+ 0.06120345741510391,
+ -0.18582972884178162,
+ 0.284995436668396,
+ 0.2870786190032959,
+ 0.4345133602619171,
+ 0.1775912344455719,
+ -0.038396432995796204,
+ 0.3301096260547638,
+ -0.2901000678539276,
+ -0.3073630928993225,
+ -0.07287006080150604,
+ 0.4358941614627838,
+ -0.33343958854675293,
+ -0.22388143837451935,
+ -0.07217332720756531,
+ -0.12999922037124634,
+ -0.22210693359375,
+ 0.008862406015396118,
+ -0.33329325914382935,
+ 0.07713888585567474,
+ 0.34830987453460693,
+ 0.08408671617507935,
+ -0.5051279664039612,
+ -0.33910876512527466,
+ -0.32361769676208496,
+ -0.5113667249679565,
+ 0.17298287153244019,
+ -0.5105555057525635,
+ 0.14647454023361206,
+ 0.28322070837020874,
+ -0.2441907525062561,
+ 0.3559108078479767,
+ -0.060791611671447754,
+ -0.10406863689422607,
+ -0.3899603486061096,
+ -0.1346045732498169,
+ -0.43578121066093445,
+ -0.4828993082046509,
+ 0.371207058429718,
+ 0.3419838845729828,
+ -0.23825672268867493,
+ -0.5218729972839355,
+ 0.11349248886108398,
+ -0.1376376748085022,
+ 0.09205222129821777,
+ -0.49683862924575806,
+ 0.08763444423675537,
+ -0.512008786201477,
+ 0.12193693965673447,
+ -0.22176474332809448,
+ -0.35527127981185913,
+ 0.4620119631290436,
+ 0.44089388847351074,
+ -0.2687382102012634,
+ 0.03215208649635315,
+ 0.05666985362768173,
+ -0.4938613772392273,
+ 0.017382532358169556,
+ -0.06635665893554688,
+ 0.23721808195114136,
+ -0.49689996242523193,
+ 0.25769490003585815,
+ -0.14276710152626038,
+ 0.2480567991733551,
+ -0.4278242588043213,
+ 0.3974965214729309,
+ -0.32696640491485596,
+ 0.08572942018508911,
+ -0.3339715003967285,
+ -0.108279287815094,
+ 0.4492151141166687,
+ -0.33603721857070923,
+ 0.03849661350250244,
+ 0.2648831903934479,
+ 0.10930526256561279,
+ -0.3304717242717743,
+ 0.29775434732437134,
+ -0.11371773481369019,
+ 0.2693345546722412,
+ -0.09117701649665833,
+ 0.26871031522750854,
+ 0.27419567108154297,
+ 0.19802406430244446,
+ 0.17023050785064697,
+ -0.48091983795166016,
+ -0.12359669804573059,
+ 0.11946350336074829,
+ -0.15308913588523865,
+ 0.33568575978279114,
+ -0.11202315986156464,
+ 0.45248034596443176,
+ -0.19499558210372925,
+ 0.10769045352935791,
+ -0.3307231664657593,
+ -0.3697570264339447,
+ -0.3574844300746918,
+ -0.33451855182647705,
+ 0.44681036472320557,
+ -0.4428303837776184,
+ 0.09865936636924744,
+ -0.3334652781486511,
+ 0.3011966347694397,
+ 0.32816851139068604,
+ 0.3911809027194977,
+ -0.16176912188529968,
+ 0.14714044332504272,
+ 0.22036322951316833,
+ -0.43369728326797485,
+ 0.07063072919845581,
+ 0.44176703691482544,
+ -0.19003453850746155,
+ -0.042864471673965454,
+ 0.06641072034835815,
+ 0.2894153594970703,
+ 0.06471729278564453,
+ -0.3816896677017212,
+ 0.40688541531562805,
+ -0.09893578290939331,
+ -0.13532187044620514,
+ 0.08548598736524582,
+ 0.31992030143737793,
+ 0.09503123909235,
+ -0.40273651480674744,
+ -0.26594606041908264,
+ -0.02445775270462036,
+ -0.515852689743042,
+ 0.08092319220304489,
+ -0.3077094554901123,
+ -0.5104184746742249,
+ 0.150238037109375,
+ 0.3750403821468353,
+ -0.04023945331573486,
+ -0.31462812423706055,
+ -0.0921066403388977,
+ -0.331767201423645,
+ 0.0606267936527729,
+ 0.24255669116973877,
+ 0.43608415126800537,
+ 0.36823564767837524,
+ -0.004238724708557129,
+ -0.07834270596504211,
+ -0.0024260133504867554,
+ -0.4719846248626709,
+ -0.0365467369556427,
+ -0.06458567082881927,
+ 0.028864502906799316,
+ 0.0566275417804718,
+ 0.34590059518814087,
+ -0.18077559769153595,
+ -0.16986721754074097,
+ -0.06715621799230576,
+ -0.3887961506843567,
+ -0.3372628688812256,
+ 0.0727071762084961,
+ 0.1866864562034607,
+ -0.5144051909446716,
+ -0.35614916682243347,
+ 0.2367885857820511,
+ -0.27126938104629517,
+ -0.012445449829101562,
+ 0.23794251680374146,
+ -0.15886318683624268,
+ 0.3258926272392273,
+ 0.13081222772598267,
+ -0.2953171133995056,
+ -0.08258306980133057,
+ -0.18166911602020264,
+ 0.38409221172332764,
+ 0.08085170388221741,
+ -0.14543557167053223,
+ 0.09633439779281616,
+ -0.022924065589904785,
+ -0.5188485383987427,
+ 0.36790138483047485,
+ 0.18606877326965332,
+ -0.38185712695121765,
+ -0.046244293451309204,
+ 0.17844237387180328,
+ -0.38849589228630066,
+ -0.29366979002952576,
+ -0.07607979327440262,
+ 0.40886515378952026,
+ -0.33262574672698975,
+ -0.26230481266975403,
+ 0.39965155720710754,
+ 0.41286420822143555,
+ 0.37320461869239807,
+ -0.32746797800064087,
+ 0.06422576308250427,
+ 0.3536079525947571,
+ 0.37944090366363525,
+ -0.050080180168151855,
+ -0.38489830493927,
+ 0.05872553586959839,
+ -0.4752463400363922,
+ -0.2515544593334198,
+ -0.1752631962299347,
+ 0.45346808433532715,
+ -0.02377486228942871,
+ 0.16444700956344604,
+ -0.33336642384529114,
+ 0.4206559956073761,
+ 0.19579800963401794,
+ -0.4970530569553375,
+ -0.18377885222434998,
+ -0.1541931927204132,
+ 0.27851665019989014,
+ 0.19943386316299438,
+ 0.3891363739967346,
+ -0.04193486273288727,
+ -0.1361924409866333,
+ 0.3183016777038574,
+ -0.5188942551612854,
+ -0.4975099265575409,
+ -0.12244196236133575,
+ 0.240681454539299,
+ -0.22224068641662598,
+ 0.02408352494239807,
+ 0.38776135444641113,
+ -0.4052346646785736,
+ 0.33733996748924255,
+ -0.2791752815246582,
+ 0.24046236276626587,
+ 0.18659710884094238,
+ 0.0870552808046341,
+ -0.4991007447242737,
+ -0.1902763843536377,
+ 0.27666449546813965,
+ 0.09147422015666962,
+ -0.15616217255592346,
+ -0.43077296018600464,
+ 0.048451781272888184,
+ 0.12720435857772827,
+ -0.012294232845306396,
+ -0.021668434143066406,
+ -0.5177459716796875,
+ 0.12554551661014557,
+ -0.1379328966140747,
+ 0.27160537242889404,
+ -0.21929419040679932,
+ 0.09695600718259811,
+ -0.22889122366905212,
+ 0.23456957936286926,
+ 0.2965127229690552,
+ 0.15818403661251068,
+ -0.4866352081298828,
+ -0.436916708946228,
+ 0.13511715829372406,
+ -0.2503044903278351,
+ -0.035670310258865356,
+ -0.16927647590637207,
+ -0.27427050471305847,
+ 0.03020995855331421,
+ 0.14618447422981262,
+ -0.24320371448993683,
+ -0.3243446350097656,
+ -0.09744811058044434,
+ 0.35021764039993286,
+ 0.4559406638145447,
+ 0.44540801644325256,
+ -0.17920562624931335,
+ 0.26376283168792725,
+ 0.29389292001724243,
+ -0.5049819350242615,
+ -0.4633098244667053,
+ 0.012088902294635773,
+ -0.32388246059417725,
+ 0.041839420795440674,
+ 0.4030844271183014,
+ 0.1898409128189087,
+ 0.4186766445636749,
+ 0.41915231943130493,
+ -0.2776997685432434,
+ 0.12005800008773804,
+ -0.11008758097887039,
+ 0.0362623929977417,
+ -0.29396358132362366,
+ -0.18171384930610657,
+ 0.0010069608688354492,
+ 0.0402335561811924,
+ -0.1742200255393982,
+ 0.13949385285377502,
+ 0.02813476324081421,
+ 0.310574471950531,
+ -0.377126008272171,
+ -0.26486635208129883,
+ -0.3883855938911438,
+ 0.04211091995239258,
+ -0.07823535799980164,
+ 0.39338457584381104,
+ 0.34093883633613586,
+ -0.22887687385082245,
+ -0.13320130109786987,
+ -0.11630231142044067,
+ 0.4727635681629181,
+ -0.5012379288673401,
+ 0.19453799724578857,
+ -0.3183373510837555,
+ 0.2962515652179718,
+ 0.47227999567985535,
+ 0.4635770320892334,
+ 0.010319828987121582,
+ -0.0809774100780487,
+ -0.3371870815753937,
+ 0.15200135111808777,
+ 0.3077837824821472,
+ -0.09436741471290588,
+ 0.3599706292152405,
+ -0.0787438452243805,
+ -0.05584808066487312,
+ -0.2010495662689209,
+ 0.038417160511016846,
+ -0.33302855491638184,
+ 0.18880879878997803,
+ 0.20520645380020142,
+ -0.39550989866256714,
+ -0.2135331928730011,
+ -0.14260494709014893,
+ 0.03628230094909668,
+ 0.2767258286476135,
+ 0.43313562870025635,
+ -0.3315364718437195,
+ 0.10984355211257935,
+ -0.22124218940734863,
+ 0.060803383588790894,
+ 0.032044410705566406,
+ -0.33199262619018555,
+ 0.3313281238079071,
+ 0.03514358401298523,
+ -0.08269521594047546,
+ -0.34009963274002075,
+ -0.028131425380706787,
+ -0.3247237205505371,
+ -0.5011646747589111,
+ -0.17263519763946533,
+ 0.09123009443283081,
+ -0.22472408413887024,
+ 0.46673673391342163,
+ -0.37363046407699585,
+ 0.1604805886745453,
+ -0.41732579469680786,
+ 0.26930010318756104,
+ -0.07488569617271423,
+ 0.3939894735813141,
+ -0.11028322577476501,
+ -0.06192252039909363,
+ 0.3340057134628296,
+ 0.35732603073120117,
+ -0.14108851552009583,
+ 0.4479057192802429,
+ -0.335988312959671,
+ -0.46644777059555054,
+ -0.5189304351806641,
+ 0.1876346468925476,
+ -0.05313652753829956,
+ -0.3143397569656372,
+ -0.5133914947509766,
+ 0.37389588356018066,
+ 0.1017841100692749,
+ -0.28189849853515625,
+ 0.21820126473903656,
+ -0.33815908432006836,
+ -0.3282995820045471,
+ -0.11693956702947617,
+ -0.20743608474731445,
+ 0.3025720715522766,
+ 0.17462795972824097,
+ 0.13968008756637573,
+ -0.017810940742492676
+ ]
+ }
+ ],
+ "layout": {
+ "height": 512,
+ "paper_bgcolor": "rgba(0,0,0,0)",
+ "scene": {
+ "xaxis": {
+ "visible": false
+ },
+ "yaxis": {
+ "visible": false
+ },
+ "zaxis": {
+ "visible": false
+ }
+ },
+ "template": {
+ "data": {
+ "bar": [
+ {
+ "error_x": {
+ "color": "#2a3f5f"
+ },
+ "error_y": {
+ "color": "#2a3f5f"
+ },
+ "marker": {
+ "line": {
+ "color": "#E5ECF6",
+ "width": 0.5
+ },
+ "pattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ }
+ },
+ "type": "bar"
+ }
+ ],
+ "barpolar": [
+ {
+ "marker": {
+ "line": {
+ "color": "#E5ECF6",
+ "width": 0.5
+ },
+ "pattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ }
+ },
+ "type": "barpolar"
+ }
+ ],
+ "carpet": [
+ {
+ "aaxis": {
+ "endlinecolor": "#2a3f5f",
+ "gridcolor": "white",
+ "linecolor": "white",
+ "minorgridcolor": "white",
+ "startlinecolor": "#2a3f5f"
+ },
+ "baxis": {
+ "endlinecolor": "#2a3f5f",
+ "gridcolor": "white",
+ "linecolor": "white",
+ "minorgridcolor": "white",
+ "startlinecolor": "#2a3f5f"
+ },
+ "type": "carpet"
+ }
+ ],
+ "choropleth": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "type": "choropleth"
+ }
+ ],
+ "contour": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "contour"
+ }
+ ],
+ "contourcarpet": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "type": "contourcarpet"
+ }
+ ],
+ "heatmap": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "heatmap"
+ }
+ ],
+ "heatmapgl": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "heatmapgl"
+ }
+ ],
+ "histogram": [
+ {
+ "marker": {
+ "pattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ }
+ },
+ "type": "histogram"
+ }
+ ],
+ "histogram2d": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "histogram2d"
+ }
+ ],
+ "histogram2dcontour": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "histogram2dcontour"
+ }
+ ],
+ "mesh3d": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "type": "mesh3d"
+ }
+ ],
+ "parcoords": [
+ {
+ "line": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "parcoords"
+ }
+ ],
+ "pie": [
+ {
+ "automargin": true,
+ "type": "pie"
+ }
+ ],
+ "scatter": [
+ {
+ "fillpattern": {
+ "fillmode": "overlay",
+ "size": 10,
+ "solidity": 0.2
+ },
+ "type": "scatter"
+ }
+ ],
+ "scatter3d": [
+ {
+ "line": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatter3d"
+ }
+ ],
+ "scattercarpet": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattercarpet"
+ }
+ ],
+ "scattergeo": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattergeo"
+ }
+ ],
+ "scattergl": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattergl"
+ }
+ ],
+ "scattermapbox": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scattermapbox"
+ }
+ ],
+ "scatterpolar": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatterpolar"
+ }
+ ],
+ "scatterpolargl": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatterpolargl"
+ }
+ ],
+ "scatterternary": [
+ {
+ "marker": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "type": "scatterternary"
+ }
+ ],
+ "surface": [
+ {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ },
+ "colorscale": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "type": "surface"
+ }
+ ],
+ "table": [
+ {
+ "cells": {
+ "fill": {
+ "color": "#EBF0F8"
+ },
+ "line": {
+ "color": "white"
+ }
+ },
+ "header": {
+ "fill": {
+ "color": "#C8D4E3"
+ },
+ "line": {
+ "color": "white"
+ }
+ },
+ "type": "table"
+ }
+ ]
+ },
+ "layout": {
+ "annotationdefaults": {
+ "arrowcolor": "#2a3f5f",
+ "arrowhead": 0,
+ "arrowwidth": 1
+ },
+ "autotypenumbers": "strict",
+ "coloraxis": {
+ "colorbar": {
+ "outlinewidth": 0,
+ "ticks": ""
+ }
+ },
+ "colorscale": {
+ "diverging": [
+ [
+ 0,
+ "#8e0152"
+ ],
+ [
+ 0.1,
+ "#c51b7d"
+ ],
+ [
+ 0.2,
+ "#de77ae"
+ ],
+ [
+ 0.3,
+ "#f1b6da"
+ ],
+ [
+ 0.4,
+ "#fde0ef"
+ ],
+ [
+ 0.5,
+ "#f7f7f7"
+ ],
+ [
+ 0.6,
+ "#e6f5d0"
+ ],
+ [
+ 0.7,
+ "#b8e186"
+ ],
+ [
+ 0.8,
+ "#7fbc41"
+ ],
+ [
+ 0.9,
+ "#4d9221"
+ ],
+ [
+ 1,
+ "#276419"
+ ]
+ ],
+ "sequential": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ],
+ "sequentialminus": [
+ [
+ 0,
+ "#0d0887"
+ ],
+ [
+ 0.1111111111111111,
+ "#46039f"
+ ],
+ [
+ 0.2222222222222222,
+ "#7201a8"
+ ],
+ [
+ 0.3333333333333333,
+ "#9c179e"
+ ],
+ [
+ 0.4444444444444444,
+ "#bd3786"
+ ],
+ [
+ 0.5555555555555556,
+ "#d8576b"
+ ],
+ [
+ 0.6666666666666666,
+ "#ed7953"
+ ],
+ [
+ 0.7777777777777778,
+ "#fb9f3a"
+ ],
+ [
+ 0.8888888888888888,
+ "#fdca26"
+ ],
+ [
+ 1,
+ "#f0f921"
+ ]
+ ]
+ },
+ "colorway": [
+ "#636efa",
+ "#EF553B",
+ "#00cc96",
+ "#ab63fa",
+ "#FFA15A",
+ "#19d3f3",
+ "#FF6692",
+ "#B6E880",
+ "#FF97FF",
+ "#FECB52"
+ ],
+ "font": {
+ "color": "#2a3f5f"
+ },
+ "geo": {
+ "bgcolor": "white",
+ "lakecolor": "white",
+ "landcolor": "#E5ECF6",
+ "showlakes": true,
+ "showland": true,
+ "subunitcolor": "white"
+ },
+ "hoverlabel": {
+ "align": "left"
+ },
+ "hovermode": "closest",
+ "mapbox": {
+ "style": "light"
+ },
+ "paper_bgcolor": "white",
+ "plot_bgcolor": "#E5ECF6",
+ "polar": {
+ "angularaxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ },
+ "bgcolor": "#E5ECF6",
+ "radialaxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ }
+ },
+ "scene": {
+ "xaxis": {
+ "backgroundcolor": "#E5ECF6",
+ "gridcolor": "white",
+ "gridwidth": 2,
+ "linecolor": "white",
+ "showbackground": true,
+ "ticks": "",
+ "zerolinecolor": "white"
+ },
+ "yaxis": {
+ "backgroundcolor": "#E5ECF6",
+ "gridcolor": "white",
+ "gridwidth": 2,
+ "linecolor": "white",
+ "showbackground": true,
+ "ticks": "",
+ "zerolinecolor": "white"
+ },
+ "zaxis": {
+ "backgroundcolor": "#E5ECF6",
+ "gridcolor": "white",
+ "gridwidth": 2,
+ "linecolor": "white",
+ "showbackground": true,
+ "ticks": "",
+ "zerolinecolor": "white"
+ }
+ },
+ "shapedefaults": {
+ "line": {
+ "color": "#2a3f5f"
+ }
+ },
+ "ternary": {
+ "aaxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ },
+ "baxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ },
+ "bgcolor": "#E5ECF6",
+ "caxis": {
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": ""
+ }
+ },
+ "title": {
+ "x": 0.05
+ },
+ "xaxis": {
+ "automargin": true,
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": "",
+ "title": {
+ "standoff": 15
+ },
+ "zerolinecolor": "white",
+ "zerolinewidth": 2
+ },
+ "yaxis": {
+ "automargin": true,
+ "gridcolor": "white",
+ "linecolor": "white",
+ "ticks": "",
+ "title": {
+ "standoff": 15
+ },
+ "zerolinecolor": "white",
+ "zerolinewidth": 2
+ }
+ }
+ },
+ "width": 512
+ }
+ },
+ "text/html": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "vis_pcd('gso', instance_name)\n",
+ "# vis_pcd('shape-e', instance_name) # \n",
+ "# vis_pcd('LGM', instance_name) # \n",
+ "# vis_pcd('splatter-img', instance_name) # "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# organize a dataset for fid eval\n",
+ "\n",
+ "objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'\n",
+ "dataset_json = os.path.join(objv_dataset, 'dataset.json')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import json\n",
+ "with open(dataset_json, 'r') as f:\n",
+ " dataset_json = json.load(f)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "dict_keys(['Transportations_tar', 'Furnitures', 'Food', 'BuildingsOutdoor', 'Electronics', 'daily-used', 'Human-Shape', 'Plants', 'Animals'])"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "dataset_json.keys()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "37461"
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "len(dataset_json['Animals'])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# instances_for_use = dataset_json['Animals'][::3][:6250]\n",
+ "instances_for_use = dataset_json['Animals'][::3][:6250][1100:2200]\n",
+ "# instances_for_use = dataset_json['Animals'][:6250]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['Animals/53/275475/1', 'Animals/53/278344/1', 'Animals/53/279428/1']"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# len(instances_for_use)\n",
+ "instances_for_use[:3]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 1100/1100 [01:32<00:00, 11.87it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "# save to local dir, with the same format as obj_folder, 8 images \n",
+ "# output_dir = '/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval'\n",
+ "output_dir = '/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-new'\n",
+ "# output_dir = '/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K'\n",
+ "# output_dir = '/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K-new'\n",
+ "\n",
+ "# for object in instances_for_use:\n",
+ "for idx, object in enumerate(tqdm(instances_for_use[:])):\n",
+ " img_path = os.path.join(objv_dataset, object, 'raw_img.jpg')\n",
+ " img = imageio.imread(img_path)\n",
+ "\n",
+ " if idx % 3 == 0:\n",
+ " indices_to_save = [0,1]\n",
+ " else:\n",
+ " indices_to_save = [1,0]\n",
+ " # indices_to_save = [0,1,2,3,4,9,10,11,12][:2]\n",
+ " # else:\n",
+ " # indices_to_save = [1,2,3,4,9,10,11,12]\n",
+ "\n",
+ " instance_output_dir = os.path.join(output_dir, object)\n",
+ " os.makedirs(instance_output_dir, exist_ok=True)\n",
+ "\n",
+ " for idx, w_position in enumerate(indices_to_save):\n",
+ " patch = img[:, w_position*512:(w_position+1)*512, :]\n",
+ " imageio.imwrite(os.path.join(instance_output_dir, f'{idx}.jpg'), patch)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# convert gt pcd to the right format\n",
+ "\n",
+ "\n",
+ "\n",
+ "objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'\n",
+ "dataset_json = os.path.join(objv_dataset, 'dataset.json')\n",
+ "with open(dataset_json, 'r') as f:\n",
+ " dataset_json = json.load(f)\n",
+ "\n",
+ "all_objs = dataset_json['Animals'][::3][1100:2200]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 1100/1100 [00:33<00:00, 32.89it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# from tqdm import tqdm \n",
+ "# import shutil\n",
+ "\n",
+ "# ! copy gt\n",
+ "\n",
+ "gt_path = \"/mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd\"\n",
+ "output_path = \"/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/objv-gt\"\n",
+ "\n",
+ "for obj_folder in tqdm(all_objs):\n",
+ " save_name = '-'.join(obj_folder.split('/'))\n",
+ " obj_folder = '/'.join(obj_folder.split('/')[:-1])\n",
+ "\n",
+ " shutil.copy(os.path.join(gt_path, obj_folder, 'fps-4096.ply'), os.path.join(output_path, f'{save_name}_pcd_4096.ply'))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 1100/1100 [00:14<00:00, 77.74it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# ! copy ours pred\n",
+ "\n",
+ "pred_path = \"/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/GA/stage-2/dino_img/ditl-fromditlPCD\"\n",
+ "output_path = \"/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/GA\"\n",
+ "\n",
+ "for obj_folder in tqdm(all_objs[:]):\n",
+ " save_name = '-'.join(obj_folder.split('/'))\n",
+ " obj_folder = '/'.join(obj_folder.split('/')[1:-1])\n",
+ " try:\n",
+ " shutil.copy(os.path.join(pred_path, obj_folder, '0', 'sample-0-gaussian-4096.ply'), os.path.join(output_path, f'{save_name}_pcd_4096.ply'))\n",
+ " except:\n",
+ " continue"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# copy output images into a single folder for clean-FID calculation\n",
+ "\n",
+ "\n",
+ "objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'\n",
+ "dataset_json = os.path.join(objv_dataset, 'dataset.json')\n",
+ "with open(dataset_json, 'r') as f:\n",
+ " dataset_json = json.load(f)\n",
+ "\n",
+ "# all_objs = dataset_json['Animals'][::3][:6250]\n",
+ "all_objs = dataset_json['Animals'][::3][1100:2200]\n",
+ "all_objs = all_objs[:600]\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 45,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 600/600 [00:00<00:00, 17405.80it/s]\n",
+ "100%|██████████| 14400/14400 [03:20<00:00, 71.66it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "import shutil\n",
+ "\n",
+ "# for method_name in ln3diff-fixpose_192 CRM \n",
+ "# for method_name in \n",
+ "# OpenLRM/Animals One-2-3-45/Animals shape-e/Animals splatter-img/Animals \n",
+ "\n",
+ "output_path='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv'\n",
+ "\n",
+ "# for method_name in ['LGM_fixpose', 'GA-fixpose']:\n",
+ "# for method_name in []:\n",
+ "# for method_name in ['LGM_fixpose/Animals', 'GA/stage-2/dino_img/ditl-fromditlPCD/']:\n",
+ "for method_name in ['GA/stage-2/dino_img/ditl-fromditlPCD/']:\n",
+ "# for method_name in ['objv-gt']:\n",
+ " path = os.path.join(output_path, method_name)\n",
+ " files = []\n",
+ "\n",
+ " for obj_folder in tqdm(all_objs):\n",
+ " obj_folder = '/'.join(obj_folder.split('/')[1:])\n",
+ " for idx in range(24):\n",
+ " # files.append(os.path.join(path, obj_folder, f'{idx}.jpg'))\n",
+ " if 'Lara' in path:\n",
+ " files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', f'{idx}.jpg'))\n",
+ " # elif 'LGM' in path:\n",
+ " # files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))\n",
+ " elif 'GA' in path:\n",
+ " files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'sample-0-{idx}.jpg'))\n",
+ " elif 'LRM' in path:\n",
+ " files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))\n",
+ " else:\n",
+ " files.append(os.path.join(path, obj_folder, '0', f'{idx}.jpg'))\n",
+ "\n",
+ "\n",
+ "\n",
+ " output_path = '/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/lint-dir'\n",
+ "\n",
+ " method_output_path = os.path.join(output_path, method_name)\n",
+ " os.makedirs(method_output_path, exist_ok=True)\n",
+ "\n",
+ " for idx, file_path in enumerate(tqdm(files[:])):\n",
+ " shutil.copy(file_path, os.path.join(method_output_path, f'{idx}.jpg'))\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 39,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 10/10 [00:00<00:00, 60.38it/s]\n"
+ ]
+ }
+ ],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 40,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-new/LGM_fixpose/Animals/9.jpg'"
+ ]
+ },
+ "execution_count": 40,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "os.path.join(method_output_path, f'{idx}.jpg')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 600/600 [00:00<00:00, 20171.71it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "gso_rendering=\"/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K\"\n",
+ "\n",
+ "\n",
+ "# for method_name in ['GA/stage-2/dino_img/ditl-fromditlPCD/']:\n",
+ "# # for method_name in ['objv-gt']:\n",
+ "# path = os.path.join(output_path, method_name)\n",
+ "files = []\n",
+ "\n",
+ "for obj_folder in tqdm(all_objs):\n",
+ " obj_folder = obj_folder[:-2] # to load 3 chunks\n",
+ " for batch in range(1,4):\n",
+ " for idx in range(8):\n",
+ " files.append(os.path.join(gso_rendering, obj_folder, str(batch), f'{idx}.jpg'))\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 14400/14400 [04:21<00:00, 55.06it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "output_path = '/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/lint-dir'\n",
+ "gt_output_path = os.path.join(output_path, 'objv-gt')\n",
+ "\n",
+ "os.makedirs(gt_output_path, exist_ok=True)\n",
+ "\n",
+ "for idx, file_path in enumerate(tqdm(files[:])):\n",
+ " shutil.copy(file_path, os.path.join(gt_output_path, f'{idx}.jpg'))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "all_vid_paths = \"/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/2-30_CONSTRUCTION_SET-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/60-BAGEL_WITH_CHEESE-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/62-BALANCING_CACTUS-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/76-Baby_Elements_Stacking_Cups-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/98-Breyer_Horse_Of_The_Year_2015-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/112-COAST_GUARD_BOAT-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/113-CONE_SORTING-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/115-CREATIVE_BLOCKS_35_MM-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/155-Cole_Hardware_Mini_Honey_Dipper-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/275-FAIRY_TALE_BLOCKS-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/277-FIRE_ENGINE-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/280-FOOD_BEVERAGE_SET-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/303-GEOMETRIC_PEG_BOARD-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/324-Great_Dinos_Triceratops_Toy-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/365-JUICER_SET-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/789-STACKING_BEAR-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/791-STACKING_RING-0-sample-0-gs.mp4 /mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/GA-fixpose/803-Schleich_African_Black_Rhino-0-sample-0-gs.mp4 \""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['BAGEL_WITH_CHEESE',\n",
+ " 'BALANCING_CACTUS',\n",
+ " 'Baby_Elements_Stacking_Cups',\n",
+ " 'Breyer_Horse_Of_The_Year_2015',\n",
+ " 'COAST_GUARD_BOAT',\n",
+ " 'CONE_SORTING',\n",
+ " 'CREATIVE_BLOCKS_35_MM',\n",
+ " 'Cole_Hardware_Mini_Honey_Dipper',\n",
+ " 'FAIRY_TALE_BLOCKS',\n",
+ " 'FIRE_ENGINE',\n",
+ " 'FOOD_BEVERAGE_SET',\n",
+ " 'GEOMETRIC_PEG_BOARD',\n",
+ " 'Great_Dinos_Triceratops_Toy',\n",
+ " 'JUICER_SET',\n",
+ " 'STACKING_BEAR',\n",
+ " 'STACKING_RING',\n",
+ " 'Schleich_African_Black_Rhino']"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "instances = [\n",
+ " path.split('/')[-1].split('-')[1]\n",
+ " for path in all_vid_paths.split()\n",
+ "]\n",
+ "instances[1:]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "base",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/evaluations/fidkid-pytorch/clean_fid_gen3d_baseline.sh b/evaluations/fidkid-pytorch/clean_fid_gen3d_baseline.sh
new file mode 100644
index 0000000000000000000000000000000000000000..3a3992738602ca18410b14d033aebb897dbf8852
--- /dev/null
+++ b/evaluations/fidkid-pytorch/clean_fid_gen3d_baseline.sh
@@ -0,0 +1,60 @@
+set -x
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="gso-rendering"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir
+
+
+# ! gso stuffs
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K"
+gt_rendering="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/lint-dir/objv-gt"
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/lint-dir
+
+# method_name=LGM
+# for method_name in CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD
+# for method_name in CRM/Animals ln3diff Lara
+# Lara
+
+# for method_name in Lara ln3diff/Animals
+# for method_name in Lara GA/stage-2/dino_img/ditl-fromditlPCD ln3diff/Animals
+# for method_name in CRM/Animals
+# for method_name in ln3diff-lite/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditxlPCD
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD LGM_fixpose/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditxlPCD-47w
+
+
+# for method_name in GA/stage-2/dino_img/ditl-withtop LGM_fixpose_withtop/Animals ln3diff-lite-withtop/Animals
+
+
+# ! GSO
+
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+# for method_name in ln3diff-fixpose_192 CRM LGM_fixpose
+# for method_name in GA-fixpose
+# for method_name in LGM_fixpose_r=1.8/Animals
+for method_name in GA/stage-2/dino_img/ditl-fromditlPCD LGM_fixpose/Animals
+
+# OpenLRM/Animals One-2-3-45/Animals shape-e/Animals splatter-img/Animals
+
+do
+
+python cleanfid_score_gso.py $gt_rendering ${output_path}/${method_name} \
+ --dataset ${method_name} \
+ --num-workers 4 \
+ --reso 512 \
+ # --save-stats \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/cleanfid_score_gso.py b/evaluations/fidkid-pytorch/cleanfid_score_gso.py
new file mode 100644
index 0000000000000000000000000000000000000000..75c5baf29aae63a459af565c26f91db3efe4bbb0
--- /dev/null
+++ b/evaluations/fidkid-pytorch/cleanfid_score_gso.py
@@ -0,0 +1,518 @@
+"""Calculates the Frechet Inception Distance (FID) to evalulate GANs
+
+The FID metric calculates the distance between two distributions of images.
+Typically, we have summary statistics (mean & covariance matrix) of one
+of these distributions, while the 2nd distribution is given by a GAN.
+
+When run as a stand-alone program, it compares the distribution of
+images that are stored as PNG/JPEG at a specified location with a
+distribution given by summary statistics (in pickle format).
+
+The FID is calculated by assuming that X_1 and X_2 are the activations of
+the pool_3 layer of the inception net for generated samples and real world
+samples respectively.
+
+See --help to see further details.
+
+Code apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead
+of Tensorflow
+
+Copyright 2018 Institute of Bioinformatics, JKU Linz
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import ipdb
+import os
+from pathlib import Path
+from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
+import pyiqa
+
+from pdb import set_trace as st
+
+import json
+import numpy as np
+import torch
+import torchvision.transforms as TF
+from PIL import Image
+from scipy import linalg
+from torch.nn.functional import adaptive_avg_pool2d
+import cv2
+try:
+ from tqdm import tqdm
+except ImportError:
+ # If tqdm is not available, provide a mock version of it
+ def tqdm(x):
+ return x
+
+from pytorch_fid.inception import InceptionV3
+
+parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
+parser.add_argument('--batch-size', type=int, default=100,
+ help='Batch size to use')
+parser.add_argument('--reso', type=int, default=128,
+ help='Batch size to use')
+parser.add_argument('--num-workers', type=int, default=8,
+ help=('Number of processes to use for data loading. '
+ 'Defaults to `min(8, num_cpus)`'))
+parser.add_argument('--device', type=str, default=None,
+ help='Device to use. Like cuda, cuda:0 or cpu')
+parser.add_argument('--dataset', type=str, default='omni',
+ help='Device to use. Like cuda, cuda:0 or cpu')
+parser.add_argument('--dims', type=int, default=2048,
+ choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),
+ help=('Dimensionality of Inception features to use. '
+ 'By default, uses pool3 features'))
+parser.add_argument('--save-stats', action='store_true',
+ help=('Generate an npz archive from a directory of samples. '
+ 'The first path is used as input and the second as output.'))
+parser.add_argument('path', type=str, nargs=2,
+ help=('Paths to the generated images or '
+ 'to .npz statistic files'))
+
+IMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',
+ 'tif', 'tiff', 'webp'}
+
+
+class ImagePathDataset(torch.utils.data.Dataset):
+ def __init__(self, files, reso,transforms=None):
+ self.files = files
+ self.transforms = transforms
+ self.reso=reso
+
+ def __len__(self):
+ return len(self.files)
+
+ def __getitem__(self, i):
+ path = self.files[i]
+ #ipdb.set_trace()
+ try:
+ img=cv2.imread(path)
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(self.reso,self.reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ except:
+ img=cv2.imread(self.files[0])
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(self.reso,self.reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ print(path)
+
+
+ #img = Image.open(path).convert('RGB')
+ if self.transforms is not None:
+ img = self.transforms(img)
+ #ipdb.set_trace()
+
+ return img
+
+
+def get_activations(files, model, batch_size=50, dims=2048, device='cpu',
+ num_workers=16,reso=128):
+ """Calculates the activations of the pool_3 layer for all images.
+
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : Batch size of images for the model to process at once.
+ Make sure that the number of samples is a multiple of
+ the batch size, otherwise some samples are ignored. This
+ behavior is retained to match the original FID score
+ implementation.
+ -- dims : Dimensionality of features returned by Inception
+ -- device : Device to run calculations
+ -- num_workers : Number of parallel dataloader workers
+
+ Returns:
+ -- A numpy array of dimension (num images, dims) that contains the
+ activations of the given tensor when feeding inception with the
+ query tensor.
+ """
+ model.eval()
+
+ if batch_size > len(files):
+ print(('Warning: batch size is bigger than the data size. '
+ 'Setting batch size to data size'))
+ batch_size = len(files)
+
+ dataset = ImagePathDataset(files, reso,transforms=TF.ToTensor())
+ dataloader = torch.utils.data.DataLoader(dataset,
+ batch_size=batch_size,
+ shuffle=False,
+ drop_last=False,
+ num_workers=num_workers)
+
+ pred_arr = np.empty((len(files), dims))
+
+ start_idx = 0
+
+ for batch in tqdm(dataloader):
+ batch = batch.to(device)
+ #ipdb.set_trace()
+
+ with torch.no_grad():
+ pred = model(batch)[0]
+
+ # If model output is not scalar, apply global spatial average pooling.
+ # This happens if you choose a dimensionality not equal 2048.
+ if pred.size(2) != 1 or pred.size(3) != 1:
+ pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
+
+ #ipdb.set_trace()
+
+ pred = pred.squeeze(3).squeeze(2).cpu().numpy()
+
+ pred_arr[start_idx:start_idx + pred.shape[0]] = pred
+
+ start_idx = start_idx + pred.shape[0]
+
+ return pred_arr
+
+
+def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
+ """Numpy implementation of the Frechet Distance.
+ The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
+ and X_2 ~ N(mu_2, C_2) is
+ d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
+
+ Stable version by Dougal J. Sutherland.
+
+ Params:
+ -- mu1 : Numpy array containing the activations of a layer of the
+ inception net (like returned by the function 'get_predictions')
+ for generated samples.
+ -- mu2 : The sample mean over activations, precalculated on an
+ representative data set.
+ -- sigma1: The covariance matrix over activations for generated samples.
+ -- sigma2: The covariance matrix over activations, precalculated on an
+ representative data set.
+
+ Returns:
+ -- : The Frechet Distance.
+ """
+ #ipdb.set_trace()
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert mu1.shape == mu2.shape, \
+ 'Training and test mean vectors have different lengths'
+ assert sigma1.shape == sigma2.shape, \
+ 'Training and test covariances have different dimensions'
+
+ diff = mu1 - mu2
+
+ # Product might be almost singular
+ covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = ('fid calculation produces singular product; '
+ 'adding %s to diagonal of cov estimates') % eps
+ print(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # Numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError('Imaginary component {}'.format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return (diff.dot(diff) + np.trace(sigma1)
+ + np.trace(sigma2) - 2 * tr_covmean)
+
+
+def calculate_activation_statistics(files, model, batch_size=50, dims=2048,
+ device='cpu', num_workers=1,reso=128):
+ """Calculation of the statistics used by the FID.
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : The images numpy array is split into batches with
+ batch size batch_size. A reasonable batch size
+ depends on the hardware.
+ -- dims : Dimensionality of features returned by Inception
+ -- device : Device to run calculations
+ -- num_workers : Number of parallel dataloader workers
+
+ Returns:
+ -- mu : The mean over samples of the activations of the pool_3 layer of
+ the inception model.
+ -- sigma : The covariance matrix of the activations of the pool_3 layer of
+ the inception model.
+ """
+ act = get_activations(files, model, batch_size, dims, device, num_workers,reso=reso)
+ mu = np.mean(act, axis=0)
+ sigma = np.cov(act, rowvar=False)
+ return mu, sigma
+
+
+def compute_statistics_of_path(path, model, batch_size, dims, device,
+ num_workers=1,reso=512,dataset='gso'):
+ basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-withtop/gso_gt"
+
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid-withtop/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-/metrics/fid-withtop/gso_gt"
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600][:]
+ # all_objs = all_objs[100:600]
+ # all_objs = all_objs[:500]
+
+
+ # if 'shapenet' in dataset:
+ # if 'shapenet' in dataset:
+
+ try:
+ try:
+
+ m=np.load(os.path.join(basepath,path.split('/')[-1]+str(reso)+'mean.npy'))
+ s=np.load(os.path.join(basepath,path.split('/')[-1]+str(reso)+'std.npy'))
+ print('loading_dataset',dataset)
+ except:
+ files=[]
+ # ! load instances for I23D inference
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # files.append(img_name)
+
+ # ! free3d rendering
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # img_name = os.path.join(path, obj_folder, 'render_mvs_25', 'model', f'{idx:03}.png')
+ # files.append(img_name)
+
+ # ! objv loading
+ for obj_folder in tqdm(all_objs):
+ obj_folder = obj_folder[:-2] # to load 3 chunks
+ for batch in range(1,4):
+ for idx in range(8):
+ files.append(os.path.join(path, obj_folder, str(batch), f'{idx}.jpg'))
+
+ # for name in os.listdir(path):
+ # #ipdb.set_trace()
+ # # if name not in false1: #and name not in false2 and name not in false3:
+ # if name in false1: #and name not in false2 and name not in false3:
+ # img=os.path.join(path,name,'rgb')
+ # #ipdb.set_trace()
+ # files = files+sorted([os.path.join(img, idd) for idd in os.listdir(img) if idd.endswith('.png')])
+
+ if len(files) > 50000:
+ files = files[:50000]
+ break
+
+ #files=files[:5]
+ m, s = calculate_activation_statistics(files, model, batch_size,
+ dims, device, num_workers,reso=reso)
+ path = Path(path)
+ # ipdb.set_trace()
+ np.save(os.path.join(basepath,path.name+str(reso)+'mean'), m)
+ np.save(os.path.join(basepath,path.name+str(reso)+'std'), s)
+ except Exception as e:
+ print(f'{dataset} failed, ', e)
+
+
+ return m, s
+
+
+def compute_statistics_of_path_new(path, model, batch_size, dims, device,
+ num_workers=1,reso=128,dataset='omni'):
+ # basepath='/mnt/lustre/yslan/logs/nips23/LSGM/cldm/cmetric/shapenet-outs/fid'+str(reso)+'test'+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/metrics/fid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-subset/'+str(reso)+dataset
+
+ basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-withtop/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid/'+str(reso)+dataset
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+ sample_name=path.split('/')[-1]
+
+ try:
+ try:
+ # ipdb.set_trace()
+ m=np.load(os.path.join(basepath,sample_name+str(reso)+'mean.npy'))
+ s=np.load(os.path.join(basepath,sample_name+str(reso)+'std.npy'))
+ print('loading_sample')
+ except:
+ files=[]
+
+ # for name in os.listdir(path):
+ # img=os.path.join(path,name)
+ # files.append(img) # ! directly append
+
+ # for loading gso-like folder
+ # st()
+ # for obj_folder in sorted(os.listdir(path)):
+
+ # if obj_folder == 'runs':
+ # continue
+
+ # if not os.path.isdir(os.path.join(path, obj_folder)):
+ # continue
+
+ # for idx in [0]:
+ # for i in range(24):
+ # if 'GA' in path:
+ # img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ # else:
+ # img=os.path.join(path,obj_folder, str(idx),f'{i}.jpg')
+ # # ipdb.set_trace()
+ # files.append(img)
+
+ for obj_folder in tqdm(all_objs):
+ obj_folder = '/'.join(obj_folder.split('/')[1:])
+ for idx in range(24):
+ # files.append(os.path.join(path, obj_folder, f'{idx}.jpg'))
+ if 'Lara' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', f'{idx}.jpg'))
+ elif 'GA' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'sample-0-{idx}.jpg'))
+ elif 'LRM' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))
+ else:
+ files.append(os.path.join(path, obj_folder, '0', f'{idx}.jpg'))
+
+
+ files=files[:50000]
+ m, s = calculate_activation_statistics(files, model, batch_size,
+ dims, device, num_workers,reso=reso)
+ path = Path(path)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'mean'), m)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'std'), s)
+ except Exception as e:
+ print('error sample image', e)
+
+ #ipdb.set_trace()
+
+ return m, s
+
+
+def calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1,reso=128,dataset='omni'):
+ """Calculates the FID of two paths"""
+ # for p in paths:
+ # if not os.path.exists(p):
+ # raise RuntimeError('Invalid path: %s' % p)
+
+ # block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+
+ # model = InceptionV3([block_idx]).to(device)
+
+ musiq_metric = pyiqa.create_metric('musiq')
+ all_musiq = []
+
+ for file in tqdm(os.listdir(str(paths[1]))[:]):
+ musiq_value = musiq_metric(os.path.join(paths[1], file))
+ all_musiq.append(musiq_value)
+
+ musiq_value = sum(all_musiq) / len(all_musiq)
+
+ # fid_metric = pyiqa.create_metric('fid')
+ # fid_value = fid_metric(paths[0], paths[1])
+
+
+ # m1, s1 = compute_statistics_of_path(paths[0], model, batch_size, # ! GT data
+ # dims, device, num_workers,reso=reso,dataset=dataset)
+ # # ipdb.set_trace()
+ # m2, s2 = compute_statistics_of_path_new(paths[1], model, batch_size, # ! generated data
+ # dims, device, num_workers,reso=reso,dataset=dataset)
+ # fid_value = calculate_frechet_distance(m1, s1, m2, s2)
+
+ # return fid_value
+ return musiq_value
+
+
+def save_fid_stats(paths, batch_size, device, dims, num_workers=1):
+ """Calculates the FID of two paths"""
+ # if not os.path.exists(paths[0]):
+ # raise RuntimeError('Invalid path: %s' % paths[0])
+
+ # if os.path.exists(paths[1]):
+ # raise RuntimeError('Existing output file: %s' % paths[1])
+
+ block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+
+ model = InceptionV3([block_idx]).to(device)
+
+ print(f"Saving statistics for {paths[0]}")
+
+ m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,
+ dims, device, num_workers)
+
+ np.savez_compressed(paths[1], mu=m1, sigma=s1)
+
+
+def main():
+ args = parser.parse_args()
+
+ if args.device is None:
+ device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')
+ else:
+ device = torch.device(args.device)
+
+ if args.num_workers is None:
+ try:
+ num_cpus = len(os.sched_getaffinity(0))
+ except AttributeError:
+ # os.sched_getaffinity is not available under Windows, use
+ # os.cpu_count instead (which may not return the *available* number
+ # of CPUs).
+ num_cpus = os.cpu_count()
+
+ num_workers = min(num_cpus, 8) if num_cpus is not None else 0
+ else:
+ num_workers = args.num_workers
+
+ if args.save_stats:
+ save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers)
+ return
+ #ipdb.set_trace()
+ fid_value = calculate_fid_given_paths(args.path,
+ args.batch_size,
+ device,
+ args.dims,
+ num_workers,args.reso,args.dataset)
+ print(f'{args.dataset} FID: ', fid_value)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/evaluations/fidkid-pytorch/fid_gen3d_baseline.sh b/evaluations/fidkid-pytorch/fid_gen3d_baseline.sh
new file mode 100644
index 0000000000000000000000000000000000000000..3939d76ade987c53e40f513e04b34ad3846eb4eb
--- /dev/null
+++ b/evaluations/fidkid-pytorch/fid_gen3d_baseline.sh
@@ -0,0 +1,61 @@
+set -x
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="gso-rendering"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir
+
+
+# ! gso stuffs
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+gso_rendering="/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K"
+
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv
+
+# method_name=LGM
+# for method_name in CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD
+# for method_name in CRM/Animals ln3diff Lara
+# Lara
+
+# for method_name in Lara ln3diff/Animals
+# for method_name in Lara GA/stage-2/dino_img/ditl-fromditlPCD ln3diff/Animals
+# for method_name in CRM/Animals
+# for method_name in ln3diff-lite/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditxlPCD
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD LGM_fixpose/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditxlPCD-47w
+
+
+# for method_name in GA/stage-2/dino_img/ditl-withtop LGM_fixpose_withtop/Animals ln3diff-lite-withtop/Animals
+
+
+# ! GSO
+
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+# for method_name in ln3diff-fixpose_192 CRM LGM_fixpose
+# for method_name in GA-fixpose
+# for method_name in LGM_fixpose_r=1.8/Animals
+# for method_name in GA/stage-2/dino_img/ditl-witholdmodel
+for method_name in scale3d/eval/eval_nerf/Animals scale3d/eval/eval_mesh/Animals
+
+# OpenLRM/Animals One-2-3-45/Animals shape-e/Animals splatter-img/Animals
+
+do
+
+python fid_score_gso.py $gso_rendering ${output_path}/${method_name} \
+ --dataset ${method_name} \
+ --num-workers 4 \
+ --reso 512 \
+ # --save-stats \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/fid_score_gso.py b/evaluations/fidkid-pytorch/fid_score_gso.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0a473aec63ff4368553034ffcbbb619c70f1db3
--- /dev/null
+++ b/evaluations/fidkid-pytorch/fid_score_gso.py
@@ -0,0 +1,503 @@
+"""Calculates the Frechet Inception Distance (FID) to evalulate GANs
+
+The FID metric calculates the distance between two distributions of images.
+Typically, we have summary statistics (mean & covariance matrix) of one
+of these distributions, while the 2nd distribution is given by a GAN.
+
+When run as a stand-alone program, it compares the distribution of
+images that are stored as PNG/JPEG at a specified location with a
+distribution given by summary statistics (in pickle format).
+
+The FID is calculated by assuming that X_1 and X_2 are the activations of
+the pool_3 layer of the inception net for generated samples and real world
+samples respectively.
+
+See --help to see further details.
+
+Code apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead
+of Tensorflow
+
+Copyright 2018 Institute of Bioinformatics, JKU Linz
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import ipdb
+import os
+from pathlib import Path
+from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
+
+import json
+import numpy as np
+import torch
+import torchvision.transforms as TF
+from PIL import Image
+from scipy import linalg
+from torch.nn.functional import adaptive_avg_pool2d
+import cv2
+try:
+ from tqdm import tqdm
+except ImportError:
+ # If tqdm is not available, provide a mock version of it
+ def tqdm(x):
+ return x
+
+from pytorch_fid.inception import InceptionV3
+
+parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
+parser.add_argument('--batch-size', type=int, default=100,
+ help='Batch size to use')
+parser.add_argument('--reso', type=int, default=128,
+ help='Batch size to use')
+parser.add_argument('--num-workers', type=int, default=8,
+ help=('Number of processes to use for data loading. '
+ 'Defaults to `min(8, num_cpus)`'))
+parser.add_argument('--device', type=str, default=None,
+ help='Device to use. Like cuda, cuda:0 or cpu')
+parser.add_argument('--dataset', type=str, default='omni',
+ help='Device to use. Like cuda, cuda:0 or cpu')
+parser.add_argument('--dims', type=int, default=2048,
+ choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),
+ help=('Dimensionality of Inception features to use. '
+ 'By default, uses pool3 features'))
+parser.add_argument('--save-stats', action='store_true',
+ help=('Generate an npz archive from a directory of samples. '
+ 'The first path is used as input and the second as output.'))
+parser.add_argument('path', type=str, nargs=2,
+ help=('Paths to the generated images or '
+ 'to .npz statistic files'))
+
+IMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',
+ 'tif', 'tiff', 'webp'}
+
+
+class ImagePathDataset(torch.utils.data.Dataset):
+ def __init__(self, files, reso,transforms=None):
+ self.files = files
+ self.transforms = transforms
+ self.reso=reso
+
+ def __len__(self):
+ return len(self.files)
+
+ def __getitem__(self, i):
+ path = self.files[i]
+ #ipdb.set_trace()
+ try:
+ img=cv2.imread(path)
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(self.reso,self.reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ except:
+ img=cv2.imread(self.files[0])
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(self.reso,self.reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ print(path)
+
+
+ #img = Image.open(path).convert('RGB')
+ if self.transforms is not None:
+ img = self.transforms(img)
+ #ipdb.set_trace()
+
+ return img
+
+
+def get_activations(files, model, batch_size=50, dims=2048, device='cpu',
+ num_workers=16,reso=128):
+ """Calculates the activations of the pool_3 layer for all images.
+
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : Batch size of images for the model to process at once.
+ Make sure that the number of samples is a multiple of
+ the batch size, otherwise some samples are ignored. This
+ behavior is retained to match the original FID score
+ implementation.
+ -- dims : Dimensionality of features returned by Inception
+ -- device : Device to run calculations
+ -- num_workers : Number of parallel dataloader workers
+
+ Returns:
+ -- A numpy array of dimension (num images, dims) that contains the
+ activations of the given tensor when feeding inception with the
+ query tensor.
+ """
+ model.eval()
+
+ if batch_size > len(files):
+ print(('Warning: batch size is bigger than the data size. '
+ 'Setting batch size to data size'))
+ batch_size = len(files)
+
+ dataset = ImagePathDataset(files, reso,transforms=TF.ToTensor())
+ dataloader = torch.utils.data.DataLoader(dataset,
+ batch_size=batch_size,
+ shuffle=False,
+ drop_last=False,
+ num_workers=num_workers)
+
+ pred_arr = np.empty((len(files), dims))
+
+ start_idx = 0
+
+ for batch in tqdm(dataloader):
+ batch = batch.to(device)
+ #ipdb.set_trace()
+
+ with torch.no_grad():
+ pred = model(batch)[0]
+
+ # If model output is not scalar, apply global spatial average pooling.
+ # This happens if you choose a dimensionality not equal 2048.
+ if pred.size(2) != 1 or pred.size(3) != 1:
+ pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
+
+ #ipdb.set_trace()
+
+ pred = pred.squeeze(3).squeeze(2).cpu().numpy()
+
+ pred_arr[start_idx:start_idx + pred.shape[0]] = pred
+
+ start_idx = start_idx + pred.shape[0]
+
+ return pred_arr
+
+
+def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
+ """Numpy implementation of the Frechet Distance.
+ The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
+ and X_2 ~ N(mu_2, C_2) is
+ d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
+
+ Stable version by Dougal J. Sutherland.
+
+ Params:
+ -- mu1 : Numpy array containing the activations of a layer of the
+ inception net (like returned by the function 'get_predictions')
+ for generated samples.
+ -- mu2 : The sample mean over activations, precalculated on an
+ representative data set.
+ -- sigma1: The covariance matrix over activations for generated samples.
+ -- sigma2: The covariance matrix over activations, precalculated on an
+ representative data set.
+
+ Returns:
+ -- : The Frechet Distance.
+ """
+ #ipdb.set_trace()
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert mu1.shape == mu2.shape, \
+ 'Training and test mean vectors have different lengths'
+ assert sigma1.shape == sigma2.shape, \
+ 'Training and test covariances have different dimensions'
+
+ diff = mu1 - mu2
+
+ # Product might be almost singular
+ covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = ('fid calculation produces singular product; '
+ 'adding %s to diagonal of cov estimates') % eps
+ print(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # Numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError('Imaginary component {}'.format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return (diff.dot(diff) + np.trace(sigma1)
+ + np.trace(sigma2) - 2 * tr_covmean)
+
+
+def calculate_activation_statistics(files, model, batch_size=50, dims=2048,
+ device='cpu', num_workers=1,reso=128):
+ """Calculation of the statistics used by the FID.
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : The images numpy array is split into batches with
+ batch size batch_size. A reasonable batch size
+ depends on the hardware.
+ -- dims : Dimensionality of features returned by Inception
+ -- device : Device to run calculations
+ -- num_workers : Number of parallel dataloader workers
+
+ Returns:
+ -- mu : The mean over samples of the activations of the pool_3 layer of
+ the inception model.
+ -- sigma : The covariance matrix of the activations of the pool_3 layer of
+ the inception model.
+ """
+ act = get_activations(files, model, batch_size, dims, device, num_workers,reso=reso)
+ mu = np.mean(act, axis=0)
+ sigma = np.cov(act, rowvar=False)
+ return mu, sigma
+
+
+def compute_statistics_of_path(path, model, batch_size, dims, device,
+ num_workers=1,reso=512,dataset='gso'):
+ basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-withtop/gso_gt"
+
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid-withtop/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-/metrics/fid-withtop/gso_gt"
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600][:]
+ # all_objs = all_objs[100:600]
+ # all_objs = all_objs[:500]
+
+
+ # if 'shapenet' in dataset:
+ # if 'shapenet' in dataset:
+
+ try:
+ try:
+
+ m=np.load(os.path.join(basepath,path.split('/')[-1]+str(reso)+'mean.npy'))
+ s=np.load(os.path.join(basepath,path.split('/')[-1]+str(reso)+'std.npy'))
+ print('loading_dataset',dataset)
+ except:
+ files=[]
+ # ! load instances for I23D inference
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # files.append(img_name)
+
+ # ! free3d rendering
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # img_name = os.path.join(path, obj_folder, 'render_mvs_25', 'model', f'{idx:03}.png')
+ # files.append(img_name)
+
+ # ! objv loading
+ for obj_folder in tqdm(all_objs):
+ obj_folder = obj_folder[:-2] # to load 3 chunks
+ for batch in range(1,4):
+ for idx in range(8):
+ files.append(os.path.join(path, obj_folder, str(batch), f'{idx}.jpg'))
+
+ # for name in os.listdir(path):
+ # #ipdb.set_trace()
+ # # if name not in false1: #and name not in false2 and name not in false3:
+ # if name in false1: #and name not in false2 and name not in false3:
+ # img=os.path.join(path,name,'rgb')
+ # #ipdb.set_trace()
+ # files = files+sorted([os.path.join(img, idd) for idd in os.listdir(img) if idd.endswith('.png')])
+
+ if len(files) > 50000:
+ files = files[:50000]
+ break
+
+ #files=files[:5]
+ m, s = calculate_activation_statistics(files, model, batch_size,
+ dims, device, num_workers,reso=reso)
+ path = Path(path)
+ # ipdb.set_trace()
+ np.save(os.path.join(basepath,path.name+str(reso)+'mean'), m)
+ np.save(os.path.join(basepath,path.name+str(reso)+'std'), s)
+ except Exception as e:
+ print(f'{dataset} failed, ', e)
+
+
+ return m, s
+
+
+def compute_statistics_of_path_new(path, model, batch_size, dims, device,
+ num_workers=1,reso=128,dataset='omni'):
+ # basepath='/mnt/lustre/yslan/logs/nips23/LSGM/cldm/cmetric/shapenet-outs/fid'+str(reso)+'test'+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/metrics/fid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-subset/'+str(reso)+dataset
+
+ basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-withtop/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid/'+str(reso)+dataset
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+ sample_name=path.split('/')[-1]
+
+ try:
+ try:
+ # ipdb.set_trace()
+ m=np.load(os.path.join(basepath,sample_name+str(reso)+'mean.npy'))
+ s=np.load(os.path.join(basepath,sample_name+str(reso)+'std.npy'))
+ print('loading_sample')
+ except:
+ files=[]
+
+ # for name in os.listdir(path):
+ # img=os.path.join(path,name)
+ # files.append(img) # ! directly append
+
+ # for loading gso-like folder
+ # st()
+ # for obj_folder in sorted(os.listdir(path)):
+
+ # if obj_folder == 'runs':
+ # continue
+
+ # if not os.path.isdir(os.path.join(path, obj_folder)):
+ # continue
+
+ # for idx in [0]:
+ # for i in range(24):
+ # if 'GA' in path:
+ # img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ # else:
+ # img=os.path.join(path,obj_folder, str(idx),f'{i}.jpg')
+ # # ipdb.set_trace()
+ # files.append(img)
+
+ for obj_folder in tqdm(all_objs):
+ obj_folder = '/'.join(obj_folder.split('/')[1:])
+ for idx in range(24):
+ # files.append(os.path.join(path, obj_folder, f'{idx}.jpg'))
+ if 'Lara' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', f'{idx}.jpg'))
+ elif 'GA' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'sample-0-{idx}.jpg'))
+ elif 'scale3d' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '1', f'{idx}.png'))
+ elif 'LRM' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))
+ else:
+ files.append(os.path.join(path, obj_folder, '0', f'{idx}.jpg'))
+
+
+ files=files[:50000]
+ m, s = calculate_activation_statistics(files, model, batch_size,
+ dims, device, num_workers,reso=reso)
+ path = Path(path)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'mean'), m)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'std'), s)
+ except Exception as e:
+ print('error sample image', e)
+
+ #ipdb.set_trace()
+
+ return m, s
+
+
+def calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1,reso=128,dataset='omni'):
+ """Calculates the FID of two paths"""
+ # for p in paths:
+ # if not os.path.exists(p):
+ # raise RuntimeError('Invalid path: %s' % p)
+
+ block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+
+ model = InceptionV3([block_idx]).to(device)
+
+ m1, s1 = compute_statistics_of_path(paths[0], model, batch_size, # ! GT data
+ dims, device, num_workers,reso=reso,dataset=dataset)
+ # ipdb.set_trace()
+ m2, s2 = compute_statistics_of_path_new(paths[1], model, batch_size, # ! generated data
+ dims, device, num_workers,reso=reso,dataset=dataset)
+ fid_value = calculate_frechet_distance(m1, s1, m2, s2)
+
+ return fid_value
+
+
+def save_fid_stats(paths, batch_size, device, dims, num_workers=1):
+ """Calculates the FID of two paths"""
+ # if not os.path.exists(paths[0]):
+ # raise RuntimeError('Invalid path: %s' % paths[0])
+
+ # if os.path.exists(paths[1]):
+ # raise RuntimeError('Existing output file: %s' % paths[1])
+
+ block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+
+ model = InceptionV3([block_idx]).to(device)
+
+ print(f"Saving statistics for {paths[0]}")
+
+ m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,
+ dims, device, num_workers)
+
+ np.savez_compressed(paths[1], mu=m1, sigma=s1)
+
+
+def main():
+ args = parser.parse_args()
+
+ if args.device is None:
+ device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')
+ else:
+ device = torch.device(args.device)
+
+ if args.num_workers is None:
+ try:
+ num_cpus = len(os.sched_getaffinity(0))
+ except AttributeError:
+ # os.sched_getaffinity is not available under Windows, use
+ # os.cpu_count instead (which may not return the *available* number
+ # of CPUs).
+ num_cpus = os.cpu_count()
+
+ num_workers = min(num_cpus, 8) if num_cpus is not None else 0
+ else:
+ num_workers = args.num_workers
+
+ if args.save_stats:
+ save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers)
+ return
+ #ipdb.set_trace()
+ fid_value = calculate_fid_given_paths(args.path,
+ args.batch_size,
+ device,
+ args.dims,
+ num_workers,args.reso,args.dataset)
+ print(f'{args.dataset} FID: ', fid_value)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/evaluations/fidkid-pytorch/fps-sample_pcd4096.sh b/evaluations/fidkid-pytorch/fps-sample_pcd4096.sh
new file mode 100644
index 0000000000000000000000000000000000000000..ff3179f701ab795795908ed5300c63e7f84dd034
--- /dev/null
+++ b/evaluations/fidkid-pytorch/fps-sample_pcd4096.sh
@@ -0,0 +1,67 @@
+set -x
+
+# n_proc=4
+# n_points=4096
+# focal=525
+
+# pre-requisites: kaolin and nvdiffrast
+# pip install kaolin==0.16.0 -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.3.0_cu118.html
+
+n_proc=1
+n_points=4096
+focal=24 # as in blender config
+
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/02958343/fffb1660a38af30ba4cf3601fb6b2442/models/
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/02958343/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd/car
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/03001627/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd-full/chair
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/02691156/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd/plane
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/03001627/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd-full/chair
+
+# ! render gt
+# shape_root=/mnt/sfs-common/yslan/Dataset/GSO/gso-unzip/gso-unzip
+# save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics/gso
+
+# ! use FPS later:
+# splatter-img LGM
+
+# for method_name in ln3diff One-2-3-45 OpenLRM shape-e Lara CRM splatter-img
+# for method_name in ln3diff One-2-3-45 OpenLRM
+# for method_name in shape-e Lara CRM splatter-img
+# for method_name in CRM splatter-img
+
+# for method_name in splatter-img LGM
+
+# for method_name in splatter-img/Animals LGM/Animals
+# for method_name in splatter-img LGM
+
+for method_name in LGM_fixpose
+
+do
+
+# shape_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/${method_name}
+# save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics/${method_name}
+
+shape_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/${method_name}
+save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/${method_name}
+
+python fps_gs.py \
+ --shape_root ${shape_root} \
+ --save_root ${save_root} \
+ --n_proc ${n_proc} \
+ --n_points ${n_points} \
+ --image_height 512 \
+ --image_width 512 \
+ --focal_length_x ${focal} \
+ --focal_length_y ${focal} \
+ --principal_point_x 256 \
+ --principal_point_y 256 \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/fps_gs.py b/evaluations/fidkid-pytorch/fps_gs.py
new file mode 100644
index 0000000000000000000000000000000000000000..902b7022777200ff6e11c26482e5cd78f369b5b5
--- /dev/null
+++ b/evaluations/fidkid-pytorch/fps_gs.py
@@ -0,0 +1,444 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+import json
+import math
+import numpy as np
+import os
+import argparse
+import multiprocessing as mp
+from multiprocessing import Pool
+import trimesh
+import tqdm
+import torch
+import nvdiffrast.torch as dr
+import kaolin as kal
+import glob
+import ipdb
+import pytorch3d.ops
+
+parser = argparse.ArgumentParser(description='sample surface points from mesh')
+parser.add_argument(
+ '--n_proc', type=int, default=8,
+ help='Number of processes to run in parallel'
+ '(0 means sequential execution).')
+parser.add_argument(
+ '--n_points', type=int, default=5000,
+ help='Number of points to sample per model.')
+parser.add_argument(
+ '--n_views', type=int, default=100,
+ help='Number of views per model.')
+parser.add_argument(
+ '--image_height', type=int, default=640,
+ help='Depth image height.')
+parser.add_argument(
+ '--image_width', type=int, default=640,
+ help='Depth image width.')
+parser.add_argument(
+ '--focal_length_x', type=float, default=640,
+ help='Focal length in x direction.')
+parser.add_argument(
+ '--focal_length_y', type=float, default=640,
+ help='Focal length in y direction.')
+parser.add_argument(
+ '--principal_point_x', type=float, default=320,
+ help='Principal point location in x direction.')
+parser.add_argument(
+ '--principal_point_y', type=float, default=320,
+ help='Principal point location in y direction.')
+parser.add_argument("--shape_root", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel27_omni_ablation2/ddpm_5000/test', help="path to the save resules shapenet dataset")
+parser.add_argument("--save_root", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel27_omni_ablation2/ddpm_vis_ab2surface', help="path to the split shapenet dataset")
+
+options = parser.parse_args()
+
+# create array for inverse mapping
+coordspx2 = np.stack(np.nonzero(np.ones((options.image_height, options.image_width))), -1).astype(np.float32)
+coordspx2 = coordspx2[:, ::-1]
+fusion_intrisics = np.array(
+ [
+ [options.focal_length_x, 0, options.principal_point_x],
+ [0, options.focal_length_y, options.principal_point_y],
+ [0, 0, 1]
+ ])
+# glctx = dr.RasterizeGLContext() # EGL/egl.h: No such file or directory
+glctx = dr.RasterizeCudaContext()
+
+
+def CalcLinearZ(depth):
+ # depth = depth * 2 - 1
+ zFar = 100.0
+ zNear = 0.1
+ linear = zNear / (zFar - depth * (zFar - zNear)) * zFar
+ return linear
+
+
+def projection_cv_new(fx, fy, cx, cy, width, height, n=1.0, f=50.0):
+ return np.array(
+ [[-2 * fx / width, 0.0, (width - 2 * cx) / width, 0.0],
+ [0.0, -2 * fy / height, (height - 2 * cy) / height, 0.0],
+ [0.0, 0.0, (-f - n) / (f - n), -2.0 * f * n / (f - n)],
+ [0.0, 0.0, -1.0, 0.0]])
+
+
+def interpolate(attr, rast, attr_idx, rast_db=None):
+ return dr.interpolate(
+ attr.contiguous(), rast, attr_idx, rast_db=rast_db,
+ diff_attrs=None if rast_db is None else 'all')
+
+
+def render_nvdiffrast(v_pos, tris, T_bx4x4):
+ # T_bx4x4 - world to cam
+ proj = projection_cv_new(
+ fx=options.focal_length_x, fy=options.focal_length_y, cx=options.principal_point_x,
+ cy=options.principal_point_y,
+ width=options.image_width, height=options.image_height, n=0.1, f=100.0)
+
+ fix = torch.eye(4, dtype=torch.float32, device='cuda')
+ fix[2, 2] = -1
+ fix[1, 1] = -1
+ fix[0, 0] = -1
+ fix = fix.unsqueeze(0).repeat(T_bx4x4.shape[0], 1, 1)
+
+ proj = torch.tensor(proj, dtype=torch.float32, device='cuda').unsqueeze(0).repeat(T_bx4x4.shape[0], 1, 1)
+ T_world_cam_bx4x4 = torch.bmm(fix, T_bx4x4)
+ mvp = torch.bmm(proj, T_world_cam_bx4x4)
+ v_pos_clip = torch.matmul(
+ torch.nn.functional.pad(v_pos, pad=(0, 1), mode='constant', value=1.0),
+ torch.transpose(mvp, 1, 2))
+ rast, db = dr.rasterize(
+ glctx, torch.tensor(v_pos_clip, dtype=torch.float32, device='cuda'), tris.int(),
+ (options.image_height, options.image_width))
+
+ v_pos_cam = torch.matmul(
+ torch.nn.functional.pad(v_pos, pad=(0, 1), mode='constant', value=1.0),
+ torch.transpose(T_world_cam_bx4x4, 1, 2))
+ gb_pos_cam, _ = interpolate(v_pos_cam, rast, tris.int())
+ depth_maps = gb_pos_cam[..., 2].abs()
+ return depth_maps
+
+
+def as_mesh(scene_or_mesh):
+ """
+ Convert a possible scene to a mesh.
+
+ If conversion occurs, the returned mesh has only vertex and face data.
+ """
+ if isinstance(scene_or_mesh, trimesh.Scene):
+ if len(scene_or_mesh.geometry) == 0:
+ mesh = None # empty scene
+ else:
+ # we lose texture information here
+ mesh = trimesh.util.concatenate(
+ tuple(
+ trimesh.Trimesh(vertices=g.vertices, faces=g.faces)
+ for g in scene_or_mesh.geometry.values()))
+ else:
+ assert (isinstance(scene_or_mesh, trimesh.Trimesh))
+ mesh = scene_or_mesh
+ return mesh
+
+
+def render(mesh_v, mesh_f, Rs):
+ """
+ Render the given mesh using the generated views.
+
+ :param base_mesh: mesh to render
+ :type base_mesh: mesh.Mesh
+ :param Rs: rotation matrices
+ :type Rs: [numpy.ndarray]
+ :return: depth maps
+ :rtype: numpy.ndarray
+ """
+ T_bx4x4 = torch.zeros((options.n_views, 4, 4), dtype=torch.float32, device='cuda')
+ T_bx4x4[:, 3, 3] = 1
+ T_bx4x4[:, 2, 3] = 1
+ T_bx4x4[:, :3, :3] = torch.tensor(Rs, dtype=torch.float32, device='cuda')
+ depthmaps = render_nvdiffrast(
+ mesh_v,
+ mesh_f, T_bx4x4)
+ return depthmaps
+
+
+def get_points():
+ """
+ :param n_points: number of points
+ :type n_points: int
+ :return: list of points
+ :rtype: numpy.ndarray
+ """
+
+ rnd = 1.
+ points = []
+ offset = 2. / options.n_views
+ increment = math.pi * (3. - math.sqrt(5.))
+
+ for i in range(options.n_views):
+ y = ((i * offset) - 1) + (offset / 2)
+ r = math.sqrt(1 - pow(y, 2))
+
+ phi = ((i + rnd) % options.n_views) * increment
+
+ x = math.cos(phi) * r
+ z = math.sin(phi) * r
+
+ points.append([x, y, z])
+ return np.array(points)
+
+
+def get_views(semi_sphere=False):
+ """
+ Generate a set of views to generate depth maps from.
+
+ :param n_views: number of views per axis
+ :type n_views: int
+ :return: rotation matrices
+ :rtype: [numpy.ndarray]
+ """
+
+ Rs = []
+ points = get_points()
+ if semi_sphere:
+ points[:, 2] = -np.abs(points[:, 2]) - 0.1
+
+ for i in range(points.shape[0]):
+ longitude = - math.atan2(points[i, 0], points[i, 1])
+ latitude = math.atan2(points[i, 2], math.sqrt(points[i, 0] ** 2 + points[i, 1] ** 2))
+
+ R_x = np.array(
+ [[1, 0, 0],
+ [0, math.cos(latitude), -math.sin(latitude)],
+ [0, math.sin(latitude), math.cos(latitude)]])
+ R_y = np.array(
+ [[math.cos(longitude), 0, math.sin(longitude)],
+ [0, 1, 0],
+ [-math.sin(longitude), 0, math.cos(longitude)]])
+ R = R_x @ R_y
+ Rs.append(R)
+
+ return Rs
+
+
+def fusion(depthmaps, Rs):
+ """
+ Fuse the rendered depth maps.
+
+ :param depthmaps: depth maps
+ :type depthmaps: numpy.ndarray
+ :param Rs: rotation matrices corresponding to views
+ :type Rs: [numpy.ndarray]
+ :return: (T)SDF
+ :rtype: numpy.ndarray
+ """
+
+ # sample points inside mask
+ sample_per_view = options.n_points // options.n_views
+ sample_bxn = torch.zeros((options.n_views, sample_per_view), device='cuda', dtype=torch.long)
+ for i in range(len(Rs)):
+ mask = depthmaps[i] > 0
+ valid_idx = torch.nonzero(mask.reshape(-1)).squeeze(-1)
+ idx = list(range(valid_idx.shape[0]))
+ np.random.shuffle(idx)
+ idx = idx[:sample_per_view]
+ sample_bxn[i] = torch.tensor(valid_idx[idx])
+
+ depthmaps = torch.gather(depthmaps.reshape(options.n_views, -1), 1, sample_bxn)
+
+ inv_Ks_bx3x3 = torch.tensor(np.linalg.inv(fusion_intrisics), dtype=torch.float32, device='cuda').unsqueeze(
+ 0).repeat(options.n_views, 1, 1)
+ T_bx4x4 = torch.zeros((options.n_views, 4, 4), dtype=torch.float32, device='cuda')
+ T_bx4x4[:, 3, 3] = 1
+ T_bx4x4[:, 2, 3] = 1
+ T_bx4x4[:, :3, :3] = torch.tensor(Rs, dtype=torch.float32, device='cuda')
+ inv_T_bx4x4 = torch.inverse(T_bx4x4)
+
+ tf_coords_bxpx2 = torch.tensor(coordspx2.copy(), dtype=torch.float32, device='cuda').unsqueeze(0).repeat(
+ options.n_views, 1, 1)
+ tf_coords_bxpx2 = torch.gather(tf_coords_bxpx2, 1, sample_bxn.unsqueeze(-1).repeat(1, 1, 2))
+
+ tf_coords_bxpx3 = torch.cat([tf_coords_bxpx2, torch.ones_like(tf_coords_bxpx2[..., :1])], -1)
+ tf_coords_bxpx3 *= depthmaps.reshape(options.n_views, -1, 1)
+ tf_cam_bxpx3 = torch.bmm(inv_Ks_bx3x3, tf_coords_bxpx3.transpose(1, 2)).transpose(1, 2)
+ tf_cam_bxpx4 = torch.cat([tf_cam_bxpx3, torch.ones_like(tf_cam_bxpx3[..., :1])], -1)
+ tf_world_bxpx3 = torch.bmm(inv_T_bx4x4, tf_cam_bxpx4.transpose(1, 2)).transpose(1, 2)[..., :3]
+
+ return tf_world_bxpx3.reshape(-1, 3)
+
+
+def normalize(vertices, faces, normalized_scale=0.9, rotate_x=False):
+ vertices = vertices.cuda()
+
+ if rotate_x: # rotate along x axis for 90 degrees to match the two coordiantes
+ rot_mat = torch.eye(n=3, device='cuda')
+ theta = np.pi / 90 # degree
+ rot_mat[1,1] = np.cos(theta)
+ rot_mat[2,2] = np.cos(theta)
+ rot_mat[1,2] =-np.sin(theta)
+ rot_mat[2,1] = np.sin(theta)
+ # ipdb.set_trace()
+ vertices = rot_mat @ vertices.transpose(0,1)
+ vertices = vertices.transpose(0,1)
+
+ scale = (vertices.max(dim=0)[0] - vertices.min(dim=0)[0]).max()
+ mesh_v1 = vertices / scale * normalized_scale
+ mesh_f1 = faces.long().cuda()
+ return mesh_v1, mesh_f1
+
+
+
+def sample_surface_pts(path):
+ # ipdb.set_trace()
+ try:
+ mesh_path, output_pth, debug = path
+ # mesh = kal.io.obj.import_mesh(mesh_path)
+ # ipdb.set_trace()
+ mesh = trimesh.load(mesh_path) # fail to load ply?
+ #ipdb.set_trace()
+ if mesh.vertices.shape[0] == 0:
+ return
+ mesh_v = torch.Tensor(mesh.vertices)
+ mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=0.9, rotate_x=True)
+
+ # generate camera matrices
+ # Rs = get_views()
+ # Rs = get_views(semi_sphere=True)
+ Rs = get_views(semi_sphere=False)
+ # get depth images
+ depths = render(mesh_v, mesh_f, Rs)
+ # project to world space
+ try:
+ pcd = fusion(depths, Rs)
+ except:
+ return
+ pcd = pcd.cpu().numpy()
+
+ #np.savez(output_pth, pcd=pcd)
+ #ipdb.set_trace()
+ #if debug:
+ pcd = trimesh.points.PointCloud(pcd)
+ pcd.export(output_pth.replace('.npz', '.obj'))
+ except Exception as e:
+ # print('error')
+ print(e, flush=True)
+
+
+if __name__ == '__main__':
+ mp.set_start_method('spawn')
+
+ shapenet_root = options.shape_root
+ save_root = options.save_root
+
+ debug = True
+ #model_list = sorted(os.listdir(shapenet_root))[:7500]
+
+ # model_list=glob.glob(os.path.join(shapenet_root, '*.obj'))
+ # os.makedirs(save_root, exist_ok=True)
+
+ # cmds = [(os.path.join(shapenet_root, id.split('/')[-1]), os.path.join(save_root, id.split('/')[-1]), debug) for id in model_list]
+
+ # cmds = [(os.path.join(shapenet_root, id.split('/')[-1]), os.path.join(save_root, 'pcd_4096.ply'), debug) for id in model_list]
+
+ # cmds += [(os.path.join(shapenet_root, id.split('/')[-1]), os.path.join(save_root, 'test.obj'), debug) for id in model_list]
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ cmds = []
+
+ # for instance_name in os.listdir(shapenet_root)[:]:
+ # cmds += [(os.path.join(shapenet_root, instance_name), os.path.join(save_root, f'{instance_name.split(".")[0]}_pcd_4096.ply'), debug)]
+
+ # ! for gt
+ # for obj_folder in sorted(os.listdir(shapenet_root)):
+ # cmds += [(os.path.join(shapenet_root, obj_folder, 'meshes/model.obj'), os.path.join(save_root, f'{obj_folder}_pcd_4096.ply'), debug)]
+
+ # ! for baseline samples
+
+ os.makedirs(save_root, exist_ok=True)
+
+ # ! free3d
+ # for obj_folder in tqdm.tqdm(sorted(os.listdir(shapenet_root))):
+ # if not os.path.isdir(os.path.join(shapenet_root, obj_folder)):
+ # continue
+
+ # if 'LGM' in shapenet_root:
+ # gs_path = os.path.join(shapenet_root,obj_folder, f'0gaussian.ply')
+ # else: # splatter-img
+ # gs_path = os.path.join(shapenet_root,obj_folder, f'0/mesh.ply')
+
+ # pcd = trimesh.load(gs_path).vertices # unsqueeze()
+ # fps_pcd, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4096,
+ # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4000,
+ # random_start_point=True) # B self.latent_num
+ # # assert fps_pcd.shape[1] == 4096
+
+ # pcd = trimesh.points.PointCloud(fps_pcd[0].cpu().numpy())
+ # output_path = os.path.join(save_root, f'{obj_folder}_pcd_4096.ply')
+ # pcd.export(output_path.replace('.npz', '.obj'))
+
+ # objv
+ # for obj_folder in tqdm.tqdm(sorted(os.listdir(all_objs))):
+ for obj_folder in tqdm.tqdm(all_objs):
+ # ipdb.set_trace()
+
+ if not os.path.isdir(os.path.join(shapenet_root, obj_folder)):
+ continue
+
+ save_name = '-'.join(obj_folder.split('/'))
+
+
+ if 'LGM' in shapenet_root:
+ gs_path = os.path.join(shapenet_root,obj_folder, f'0gaussian.ply')
+ else: # splatter-img
+ gs_path = os.path.join(shapenet_root,obj_folder, f'0/mesh.ply')
+
+ pcd = trimesh.load(gs_path).vertices # unsqueeze()
+ fps_pcd, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4096,
+ torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4000,
+ random_start_point=True) # B self.latent_num
+ # assert fps_pcd.shape[1] == 4096
+
+ pcd = trimesh.points.PointCloud(fps_pcd[0].cpu().numpy())
+ output_path = os.path.join(save_root, f'{save_name}_pcd_4096.ply')
+ pcd.export(output_path.replace('.npz', '.obj'))
+
+
+ # ! lgm
+
+ # for idx in [0]:
+ # for i in range(10):
+ # img=os.path.join(shapenet_root,obj_folder, str(idx),f'{i}.jpg')
+ # img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ # files.append(img)
+
+ # if 'CRM' in shapenet_root:
+ # # ipdb.set_trace()
+ # mesh_path = glob.glob(os.path.join(shapenet_root, obj_folder, f'{idx}', '*.obj'))[0]
+ # else:
+
+ # if os.path.exists((os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.obj'))):
+ # mesh_path = os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.obj')
+ # else:
+ # mesh_path = os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.ply')
+
+ # cmds += [(mesh_path, os.path.join(save_root, f'{obj_folder}_pcd_4096.ply'), debug)]
+
+
+ if options.n_proc == 0:
+ for filepath in tqdm.tqdm(cmds):
+ sample_surface_pts(filepath)
+ else:
+ with Pool(options.n_proc) as p:
+ list(tqdm.tqdm(p.imap(sample_surface_pts, cmds), total=len(cmds)))
diff --git a/evaluations/fidkid-pytorch/kid_gen3d_baseline.sh b/evaluations/fidkid-pytorch/kid_gen3d_baseline.sh
new file mode 100644
index 0000000000000000000000000000000000000000..17fd01f125d4a59978ec8bdd0829ba3c106f6bfa
--- /dev/null
+++ b/evaluations/fidkid-pytorch/kid_gen3d_baseline.sh
@@ -0,0 +1,43 @@
+set -x
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="gso-rendering"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir
+
+# method_name=LGM
+# for method_name in LGM CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD
+
+
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv
+gso_rendering="/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K"
+
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+# ! objv dataset eval
+# for method_name in Lara CRM/Animals ln3diff-lite/Animals GA/stage-2/dino_img/ditl-fromditxlPCD One-2-3-45/Animals shape-e/Animals splatter-img/Animals OpenLRM/Animals LGM/Animals
+# LGM_fixpose/Animals
+# for method_name in One-2-3-45/Animals
+
+# for method_name in ln3diff-fixpose_192 CRM LGM_fixpose One-2-3-45/Animals Lara CRM/Animals ln3diff-lite/Animals GA/stage-2/dino_img/ditl-fromditxlPCD One-2-3-45/Animals shape-e/Animals splatter-img/Animals OpenLRM/Animals LGM/Animals
+
+# for method_name in GA-fixpose
+
+# for method_name in scale3d/eval/eval_nerf/Animals scale3d/eval/eval_mesh/Animals
+
+for method_name in LGM_fixpose/Animals Lara CRM/Animals ln3diff-lite/Animals GA/stage-2/dino_img/ditl-fromditxlPCD One-2-3-45/Animals shape-e/Animals splatter-img/Animals OpenLRM/Animals
+
+do
+
+python kid_score_gso.py \
+ --true $gso_rendering \
+ --fake ${output_path}/${method_name} \
+ --dataset ${method_name} \
+ --reso 512 \
+ --gpu 0 \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/kid_score_gso.py b/evaluations/fidkid-pytorch/kid_score_gso.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a7ac1372ac82d83f13ed10f3382d05ead183b15
--- /dev/null
+++ b/evaluations/fidkid-pytorch/kid_score_gso.py
@@ -0,0 +1,525 @@
+#!/usr/bin/env python3
+"""Calculates the Kernel Inception Distance (KID) to evalulate GANs
+"""
+import os
+import pathlib
+import sys
+from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
+
+import json
+import numpy as np
+import torch
+from sklearn.metrics.pairwise import polynomial_kernel
+from scipy import linalg
+from PIL import Image
+from torch.nn.functional import adaptive_avg_pool2d
+import ipdb
+try:
+ from tqdm import tqdm
+except ImportError:
+ # If not tqdm is not available, provide a mock version of it
+ def tqdm(x): return x
+import cv2
+from models.inception import InceptionV3
+from models.lenet import LeNet5
+import glob
+import pathlib
+
+def get_activations(files, model, batch_size=50, dims=2048,
+ cuda=False, verbose=False,reso=128):
+ """Calculates the activations of the pool_3 layer for all images.
+
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : Batch size of images for the model to process at once.
+ Make sure that the number of samples is a multiple of
+ the batch size, otherwise some samples are ignored. This
+ behavior is retained to match the original FID score
+ implementation.
+ -- dims : Dimensionality of features returned by Inception
+ -- cuda : If set to True, use GPU
+ -- verbose : If set to True and parameter out_step is given, the number
+ of calculated batches is reported.
+ Returns:
+ -- A numpy array of dimension (num images, dims) that contains the
+ activations of the given tensor when feeding inception with the
+ query tensor.
+ """
+ model.eval()
+
+ is_numpy = True if type(files[0]) == np.ndarray else False
+
+ if len(files) % batch_size != 0:
+ print(('Warning: number of images is not a multiple of the '
+ 'batch size. Some samples are going to be ignored.'))
+ if batch_size > len(files):
+ print(('Warning: batch size is bigger than the data size. '
+ 'Setting batch size to data size'))
+ batch_size = len(files)
+
+ n_batches = len(files) // batch_size
+ n_used_imgs = n_batches * batch_size
+
+ pred_arr = np.empty((n_used_imgs, dims))
+
+ for i in tqdm(range(n_batches)):
+ if verbose:
+ print('\rPropagating batch %d/%d' % (i + 1, n_batches), end='', flush=True)
+ start = i * batch_size
+ end = start + batch_size
+ if is_numpy:
+ images = np.copy(files[start:end]) + 1
+ images /= 2.
+ else:
+ images=[]
+ #ipdb.set_trace()
+ for f in files[start:end]:
+ try:
+ img=cv2.imread(str(f))
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(reso,reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ except:
+ img=cv2.imread(str(files[0]))
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(reso,reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ print(str(f))
+ #ipdb.set_trace()
+ images.append(img)
+ #ipdb.set_trace()
+
+
+ #images = [np.array(Image.open(str(f)).convert('RGB')) for f in files[start:end]]
+ images = np.stack(images).astype(np.float32) / 255.
+ # Reshape to (n_images, 3, height, width)
+ images = images.transpose((0, 3, 1, 2))
+ #ipdb.set_trace()
+
+
+ batch = torch.from_numpy(images).type(torch.FloatTensor)
+ if cuda:
+ batch = batch.cuda()
+
+ pred = model(batch)[0]
+
+ # If model output is not scalar, apply global spatial average pooling.
+ # This happens if you choose a dimensionality not equal 2048.
+ if pred.shape[2] != 1 or pred.shape[3] != 1:
+ pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
+
+ pred_arr[start:end] = pred.cpu().data.numpy().reshape(batch_size, -1)
+
+ if verbose:
+ print('done', np.min(images))
+
+ return pred_arr
+
+
+def extract_lenet_features(imgs, net):
+ net.eval()
+ feats = []
+ imgs = imgs.reshape([-1, 100] + list(imgs.shape[1:]))
+ if imgs[0].min() < -0.001:
+ imgs = (imgs + 1)/2.0
+ print(imgs.shape, imgs.min(), imgs.max())
+ imgs = torch.from_numpy(imgs)
+ for i, images in enumerate(imgs):
+ feats.append(net.extract_features(images).detach().cpu().numpy())
+ feats = np.vstack(feats)
+ return feats
+
+
+def _compute_activations(path, model, batch_size, dims, cuda, model_type,reso,dataset):
+ sample_name=path.split('/')[-1]
+ # basepath='/mnt/petrelfs/caoziang/3D_generation/cmetric/kid'+str(reso)
+ # basepath='/mnt/lustre/yslan/logs/nips23/LSGM/cldm/cmetric/shapenet-outs/kid'+str(reso)+'test'+dataset
+ # basepath='/mnt/lustre/yslan/logs/nips23/LSGM/cldm/cmetric/shapenet-outs-testTra/kid'+str(reso)+'test'+dataset
+
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/metrics/kid/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/kid/gso_gt"
+
+ basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/kid/gso_gt"
+
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+ files=[]
+
+ #path = pathlib.Path(path)
+ #oripath=path
+
+ # ! objv dataset
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ for obj_folder in tqdm(all_objs):
+ obj_folder = obj_folder[:-2] # to load 3 chunks
+ for batch in range(1,4):
+ for idx in range(8):
+ files.append(os.path.join(path, obj_folder, str(batch), f'{idx}.jpg'))
+
+
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # img_name = os.path.join(path, obj_folder, 'render_mvs_25', 'model', f'{idx:03}.png')
+ # files.append(img_name)
+
+
+ '''
+ if not os.path.exists(os.path.join(basepath,path.split('/')[-1]+str(reso)+'kid.npy')):
+ import glob
+ import pathlib
+ path = pathlib.Path(path)
+ if not type(path) == np.ndarray:
+ files=[]
+
+ # load gso
+ for obj_folder in tqdm(sorted(os.listdir(path))):
+ for idx in range(0,25):
+ # for idx in [0]:
+ img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ files.append(img_name)
+
+ if len(files) > 50000:
+ files = files[:50000]
+ break
+ '''
+
+ if model_type == 'inception':
+ if os.path.exists(os.path.join(basepath,sample_name+str(reso)+'kid.npy')):
+ act=np.load(os.path.join(basepath,sample_name+str(reso)+'kid.npy'))
+ print('load_dataset',dataset)
+ else:
+ act = get_activations(files, model, batch_size, dims, cuda,reso=reso)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'kid'),act)
+ elif model_type == 'lenet':
+ act = extract_lenet_features(files, model)
+ #ipdb.set_trace()
+ return act
+
+
+def _compute_activations_new(path, model, batch_size, dims, cuda, model_type,reso,dataset):
+ sample_name=path.split('/')[-1]
+ # basepath='/mnt/petrelfs/caoziang/3D_generation/cmetric/get3d/kid'+str(reso)+'test'+dataset
+ # basepath='/mnt/lustre/yslan/logs/nips23/LSGM/cldm/cmetric/shapenet-outs/kid'+str(reso)+'test'+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/metrics/kid'+str(reso)+'test'+dataset
+ # if '_cond' in path:
+ # basepath=basepath+'_cond'
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/kid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/kid/'+str(reso)+dataset
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ all_objs = dataset_json['Animals'][::3][1100:2200][:600]
+
+
+ if not type(path) == np.ndarray:
+ import glob
+ import pathlib
+
+ #ipdb.set_trace()
+
+ files=[]
+
+ # path = pathlib.Path(path)
+ # for classname in os.listdir(path):
+ # classpath=os.path.join(path,classname)
+ # #ipdb.set_trace()
+
+ # for instance in os.listdir(classpath):
+
+ # if os.path.isdir(os.path.join(classpath,instance)):
+ # img=os.path.join(classpath,instance)
+ # if 'diffusion' in img:
+ # files = files+sorted([os.path.join(img, idd) for idd in os.listdir(img) if idd.endswith('ddpm.png')])
+ # else:
+
+ '''
+ for obj_folder in sorted(os.listdir(path)):
+ if not os.path.isdir(os.path.join(path, obj_folder)):
+ continue
+ # for idx in os.listdir(os.path.join(path, obj_folder)):
+ # for idx in range(0,25,5):
+ for idx in [0]:
+ for i in range(10):
+ # img=os.path.join(path,obj_folder, str(idx),f'{i}.jpg')
+
+ if 'GA' in path:
+ img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ else:
+ img=os.path.join(path,obj_folder, str(idx),f'{i}.jpg')
+
+ files.append(img)
+ '''
+
+ # ! objv
+ for obj_folder in tqdm(all_objs):
+ obj_folder = '/'.join(obj_folder.split('/')[1:])
+ for idx in range(24):
+ # files.append(os.path.join(path, obj_folder, f'{idx}.jpg'))
+ if 'Lara' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', f'{idx}.jpg'))
+ elif 'GA' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'sample-0-{idx}.jpg'))
+ elif 'scale3d' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '1', f'{idx}.png'))
+ elif 'LRM' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))
+ else:
+ files.append(os.path.join(path, obj_folder, '0', f'{idx}.jpg'))
+
+ # ! gso
+ # for obj_folder in sorted(os.listdir(path)):
+
+ # if obj_folder == 'runs':
+ # continue
+
+ # if not os.path.isdir(os.path.join(path, obj_folder)):
+ # continue
+
+ # for idx in [0]:
+ # for i in range(24):
+ # if 'GA' in path:
+ # img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ # else:
+ # img=os.path.join(path,obj_folder, str(idx),f'{i}.jpg')
+ # # ipdb.set_trace()
+ # files.append(img)
+
+
+
+ # for name in os.listdir(path):
+ # #ipdb.set_trace()
+ # # if os.path.isdir(os.path.join(path,name)): # ! no cls
+
+ # img=os.path.join(path,name)
+ # files.append(img) # ! directly append
+
+ # files = files+sorted([os.path.join(img, idd) for idd in os.listdir(img) if idd.endswith('.png')])
+
+
+ # ipdb.set_trace()
+ files=files[:50000]
+
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+
+ #ipdb.set_trace()
+ if model_type == 'inception':
+ if os.path.exists(os.path.join(basepath,sample_name+str(reso)+'kid.npy')):
+ act=np.load(os.path.join(basepath,sample_name+str(reso)+'kid.npy'))
+ print('load_sample')
+ else:
+ act = get_activations(files, model, batch_size, dims, cuda,reso=reso)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'kid'),act)
+ elif model_type == 'lenet':
+ act = extract_lenet_features(files, model)
+ #ipdb.set_trace()
+ return act
+
+def calculate_kid_given_paths(paths, batch_size, cuda, dims, model_type='inception',reso=128,dataset='omni'):
+ """Calculates the KID of two paths"""
+ pths = []
+ for p in paths:
+ if not os.path.exists(p):
+ raise RuntimeError('Invalid path: %s' % p)
+ if os.path.isdir(p):
+ pths.append(p)
+ # elif p.endswith('.npy'):
+ # np_imgs = np.load(p)
+ # if np_imgs.shape[0] > 50000: np_imgs = np_imgs[np.random.permutation(np.arange(np_imgs.shape[0]))][:50000]
+ # pths.append(np_imgs)
+
+ if model_type == 'inception':
+ block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+ model = InceptionV3([block_idx])
+ elif model_type == 'lenet':
+ model = LeNet5()
+ model.load_state_dict(torch.load('./models/lenet.pth'))
+ if cuda:
+ model.cuda()
+
+ act_true = _compute_activations(pths[0], model, batch_size, dims, cuda, model_type,reso,dataset)
+ pths = pths[1:]
+ results = []
+ #ipdb.set_trace()
+ for j, pth in enumerate(pths):
+ print(paths[j+1])
+ actj = _compute_activations_new(pth, model, batch_size, dims, cuda, model_type,reso,dataset)
+ #ipdb.set_trace()
+ kid_values = polynomial_mmd_averages(act_true, actj, n_subsets=100)
+ results.append((paths[j+1], kid_values[0].mean(), kid_values[0].std()))
+ return results
+
+def _sqn(arr):
+ flat = np.ravel(arr)
+ return flat.dot(flat)
+
+
+def polynomial_mmd_averages(codes_g, codes_r, n_subsets=50, subset_size=1000,
+ ret_var=True, output=sys.stdout, **kernel_args):
+ m = min(codes_g.shape[0], codes_r.shape[0])
+ mmds = np.zeros(n_subsets)
+ if ret_var:
+ vars = np.zeros(n_subsets)
+ choice = np.random.choice
+ #ipdb.set_trace()
+
+ with tqdm(range(n_subsets), desc='MMD', file=output) as bar:
+ for i in bar:
+
+ g = codes_g[choice(len(codes_g), subset_size, replace=False)]
+ r = codes_r[choice(len(codes_r), subset_size, replace=False)]
+ o = polynomial_mmd(g, r, **kernel_args, var_at_m=m, ret_var=ret_var)
+ if ret_var:
+ mmds[i], vars[i] = o
+ else:
+ mmds[i] = o
+ bar.set_postfix({'mean': mmds[:i+1].mean()})
+ return (mmds, vars) if ret_var else mmds
+
+
+def polynomial_mmd(codes_g, codes_r, degree=3, gamma=None, coef0=1,
+ var_at_m=None, ret_var=True):
+ # use k(x, y) = (gamma + coef0)^degree
+ # default gamma is 1 / dim
+ X = codes_g
+ Y = codes_r
+
+ K_XX = polynomial_kernel(X, degree=degree, gamma=gamma, coef0=coef0)
+ K_YY = polynomial_kernel(Y, degree=degree, gamma=gamma, coef0=coef0)
+ K_XY = polynomial_kernel(X, Y, degree=degree, gamma=gamma, coef0=coef0)
+
+ return _mmd2_and_variance(K_XX, K_XY, K_YY,
+ var_at_m=var_at_m, ret_var=ret_var)
+
+def _mmd2_and_variance(K_XX, K_XY, K_YY, unit_diagonal=False,
+ mmd_est='unbiased', block_size=1024,
+ var_at_m=None, ret_var=True):
+ # based on
+ # https://github.com/dougalsutherland/opt-mmd/blob/master/two_sample/mmd.py
+ # but changed to not compute the full kernel matrix at once
+ m = K_XX.shape[0]
+ assert K_XX.shape == (m, m)
+ assert K_XY.shape == (m, m)
+ assert K_YY.shape == (m, m)
+ if var_at_m is None:
+ var_at_m = m
+
+ # Get the various sums of kernels that we'll use
+ # Kts drop the diagonal, but we don't need to compute them explicitly
+ if unit_diagonal:
+ diag_X = diag_Y = 1
+ sum_diag_X = sum_diag_Y = m
+ sum_diag2_X = sum_diag2_Y = m
+ else:
+ diag_X = np.diagonal(K_XX)
+ diag_Y = np.diagonal(K_YY)
+
+ sum_diag_X = diag_X.sum()
+ sum_diag_Y = diag_Y.sum()
+
+ sum_diag2_X = _sqn(diag_X)
+ sum_diag2_Y = _sqn(diag_Y)
+
+ Kt_XX_sums = K_XX.sum(axis=1) - diag_X
+ Kt_YY_sums = K_YY.sum(axis=1) - diag_Y
+ K_XY_sums_0 = K_XY.sum(axis=0)
+ K_XY_sums_1 = K_XY.sum(axis=1)
+
+ Kt_XX_sum = Kt_XX_sums.sum()
+ Kt_YY_sum = Kt_YY_sums.sum()
+ K_XY_sum = K_XY_sums_0.sum()
+
+ if mmd_est == 'biased':
+ mmd2 = ((Kt_XX_sum + sum_diag_X) / (m * m)
+ + (Kt_YY_sum + sum_diag_Y) / (m * m)
+ - 2 * K_XY_sum / (m * m))
+ else:
+ assert mmd_est in {'unbiased', 'u-statistic'}
+ mmd2 = (Kt_XX_sum + Kt_YY_sum) / (m * (m-1))
+ if mmd_est == 'unbiased':
+ mmd2 -= 2 * K_XY_sum / (m * m)
+ else:
+ mmd2 -= 2 * (K_XY_sum - np.trace(K_XY)) / (m * (m-1))
+
+ if not ret_var:
+ return mmd2
+
+ Kt_XX_2_sum = _sqn(K_XX) - sum_diag2_X
+ Kt_YY_2_sum = _sqn(K_YY) - sum_diag2_Y
+ K_XY_2_sum = _sqn(K_XY)
+
+ dot_XX_XY = Kt_XX_sums.dot(K_XY_sums_1)
+ dot_YY_YX = Kt_YY_sums.dot(K_XY_sums_0)
+
+ m1 = m - 1
+ m2 = m - 2
+ zeta1_est = (
+ 1 / (m * m1 * m2) * (
+ _sqn(Kt_XX_sums) - Kt_XX_2_sum + _sqn(Kt_YY_sums) - Kt_YY_2_sum)
+ - 1 / (m * m1)**2 * (Kt_XX_sum**2 + Kt_YY_sum**2)
+ + 1 / (m * m * m1) * (
+ _sqn(K_XY_sums_1) + _sqn(K_XY_sums_0) - 2 * K_XY_2_sum)
+ - 2 / m**4 * K_XY_sum**2
+ - 2 / (m * m * m1) * (dot_XX_XY + dot_YY_YX)
+ + 2 / (m**3 * m1) * (Kt_XX_sum + Kt_YY_sum) * K_XY_sum
+ )
+ zeta2_est = (
+ 1 / (m * m1) * (Kt_XX_2_sum + Kt_YY_2_sum)
+ - 1 / (m * m1)**2 * (Kt_XX_sum**2 + Kt_YY_sum**2)
+ + 2 / (m * m) * K_XY_2_sum
+ - 2 / m**4 * K_XY_sum**2
+ - 4 / (m * m * m1) * (dot_XX_XY + dot_YY_YX)
+ + 4 / (m**3 * m1) * (Kt_XX_sum + Kt_YY_sum) * K_XY_sum
+ )
+ var_est = (4 * (var_at_m - 2) / (var_at_m * (var_at_m - 1)) * zeta1_est
+ + 2 / (var_at_m * (var_at_m - 1)) * zeta2_est)
+
+ return mmd2, var_est
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
+ parser.add_argument('--true', type=str, required=True,
+ help=('Path to the true images'))
+ parser.add_argument('--fake', type=str, nargs='+', required=True,
+ help=('Path to the generated images'))
+ parser.add_argument('--batch-size', type=int, default=100,
+ help='Batch size to use')
+ parser.add_argument('--reso', type=int, default=128,
+ help='Batch size to use')
+ parser.add_argument('--dims', type=int, default=2048,
+ choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),
+ help=('Dimensionality of Inception features to use. '
+ 'By default, uses pool3 features'))
+ parser.add_argument('-c', '--gpu', default='0', type=str,
+ help='GPU to use (leave blank for CPU only)')
+ parser.add_argument('--model', default='inception', type=str,
+ help='inception or lenet')
+ parser.add_argument('--dataset', default='omni', type=str,
+ help='inception or lenet')
+ args = parser.parse_args()
+ print(args)
+ #ipdb.set_trace()
+ os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
+ paths = [args.true] + args.fake
+
+
+ results = calculate_kid_given_paths(paths, args.batch_size,True, args.dims, model_type=args.model,reso=args.reso,dataset=args.dataset)
+ for p, m, s in results:
+ print('KID (%s): %.6f (%.6f)' % (p, m, s))
diff --git a/evaluations/fidkid-pytorch/models/__pycache__/inception.cpython-39.pyc b/evaluations/fidkid-pytorch/models/__pycache__/inception.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..36e8eb769d7138fc13ad8fc5fc6d54c4c3cda49f
Binary files /dev/null and b/evaluations/fidkid-pytorch/models/__pycache__/inception.cpython-39.pyc differ
diff --git a/evaluations/fidkid-pytorch/models/__pycache__/lenet.cpython-39.pyc b/evaluations/fidkid-pytorch/models/__pycache__/lenet.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f01d059e16357e7e6a11eb5b01b58c568ff59e61
Binary files /dev/null and b/evaluations/fidkid-pytorch/models/__pycache__/lenet.cpython-39.pyc differ
diff --git a/evaluations/fidkid-pytorch/models/inception.py b/evaluations/fidkid-pytorch/models/inception.py
new file mode 100644
index 0000000000000000000000000000000000000000..6da57bf4de182c4ddeb93f0acf948055bc369efb
--- /dev/null
+++ b/evaluations/fidkid-pytorch/models/inception.py
@@ -0,0 +1,141 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from torchvision import models
+
+
+class InceptionV3(nn.Module):
+ """Pretrained InceptionV3 network returning feature maps"""
+
+ # Index of default block of inception to return,
+ # corresponds to output of final average pooling
+ DEFAULT_BLOCK_INDEX = 3
+
+ # Maps feature dimensionality to their output blocks indices
+ BLOCK_INDEX_BY_DIM = {
+ 64: 0, # First max pooling features
+ 192: 1, # Second max pooling featurs
+ 768: 2, # Pre-aux classifier features
+ 2048: 3 # Final average pooling features
+ }
+
+ def __init__(self,
+ output_blocks=[DEFAULT_BLOCK_INDEX],
+ resize_input=True,
+ normalize_input=True,
+ requires_grad=False):
+ """Build pretrained InceptionV3
+
+ Parameters
+ ----------
+ output_blocks : list of int
+ Indices of blocks to return features of. Possible values are:
+ - 0: corresponds to output of first max pooling
+ - 1: corresponds to output of second max pooling
+ - 2: corresponds to output which is fed to aux classifier
+ - 3: corresponds to output of final average pooling
+ resize_input : bool
+ If true, bilinearly resizes input to width and height 299 before
+ feeding input to model. As the network without fully connected
+ layers is fully convolutional, it should be able to handle inputs
+ of arbitrary size, so resizing might not be strictly needed
+ normalize_input : bool
+ If true, scales the input from range (0, 1) to the range the
+ pretrained Inception network expects, namely (-1, 1)
+ requires_grad : bool
+ If true, parameters of the model require gradient. Possibly useful
+ for finetuning the network
+ """
+ super(InceptionV3, self).__init__()
+
+ self.resize_input = resize_input
+ self.normalize_input = normalize_input
+ self.output_blocks = sorted(output_blocks)
+ self.last_needed_block = max(output_blocks)
+
+ assert self.last_needed_block <= 3, \
+ 'Last possible output block index is 3'
+
+ self.blocks = nn.ModuleList()
+
+ inception = models.inception_v3(pretrained=True)
+
+ # Block 0: input to maxpool1
+ block0 = [
+ inception.Conv2d_1a_3x3,
+ inception.Conv2d_2a_3x3,
+ inception.Conv2d_2b_3x3,
+ nn.MaxPool2d(kernel_size=3, stride=2)
+ ]
+ self.blocks.append(nn.Sequential(*block0))
+
+ # Block 1: maxpool1 to maxpool2
+ if self.last_needed_block >= 1:
+ block1 = [
+ inception.Conv2d_3b_1x1,
+ inception.Conv2d_4a_3x3,
+ nn.MaxPool2d(kernel_size=3, stride=2)
+ ]
+ self.blocks.append(nn.Sequential(*block1))
+
+ # Block 2: maxpool2 to aux classifier
+ if self.last_needed_block >= 2:
+ block2 = [
+ inception.Mixed_5b,
+ inception.Mixed_5c,
+ inception.Mixed_5d,
+ inception.Mixed_6a,
+ inception.Mixed_6b,
+ inception.Mixed_6c,
+ inception.Mixed_6d,
+ inception.Mixed_6e,
+ ]
+ self.blocks.append(nn.Sequential(*block2))
+
+ # Block 3: aux classifier to final avgpool
+ if self.last_needed_block >= 3:
+ block3 = [
+ inception.Mixed_7a,
+ inception.Mixed_7b,
+ inception.Mixed_7c,
+ nn.AdaptiveAvgPool2d(output_size=(1, 1))
+ ]
+ self.blocks.append(nn.Sequential(*block3))
+
+ for param in self.parameters():
+ param.requires_grad = requires_grad
+
+ def forward(self, inp):
+ """Get Inception feature maps
+
+ Parameters
+ ----------
+ inp : torch.autograd.Variable
+ Input tensor of shape Bx3xHxW. Values are expected to be in
+ range (0.0, 1.0)
+
+ Returns
+ -------
+ List of torch.autograd.Variable, corresponding to the selected output
+ block, sorted ascending by index
+ """
+ outp = []
+ x = inp
+
+ if self.resize_input:
+ x = F.interpolate(x,
+ size=(299, 299),
+ mode='bilinear',
+ align_corners=False)
+
+ if self.normalize_input:
+ x = 2 * x - 1 # Scale from range (0, 1) to range (-1, 1)
+
+ for idx, block in enumerate(self.blocks):
+ x = block(x)
+ if idx in self.output_blocks:
+ outp.append(x)
+
+ if idx == self.last_needed_block:
+ break
+
+ return outp
diff --git a/evaluations/fidkid-pytorch/models/lenet.pth b/evaluations/fidkid-pytorch/models/lenet.pth
new file mode 100644
index 0000000000000000000000000000000000000000..29f3b245cad59581f6ea12005062a9ea225ac6a4
--- /dev/null
+++ b/evaluations/fidkid-pytorch/models/lenet.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e4bed3817dd7723c605580e7b7393538a5dfc3508aacc4890b07e7f07c16bebf
+size 248652
diff --git a/evaluations/fidkid-pytorch/models/lenet.py b/evaluations/fidkid-pytorch/models/lenet.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab495e46bd9125c7534ec7fcc545ea7f8f213e8b
--- /dev/null
+++ b/evaluations/fidkid-pytorch/models/lenet.py
@@ -0,0 +1,50 @@
+import torch.nn as nn
+from collections import OrderedDict
+
+
+class LeNet5(nn.Module):
+ """
+ Input - 1x32x32
+ C1 - 6@28x28 (5x5 kernel)
+ tanh
+ S2 - 6@14x14 (2x2 kernel, stride 2) Subsampling
+ C3 - 16@10x10 (5x5 kernel, complicated shit)
+ tanh
+ S4 - 16@5x5 (2x2 kernel, stride 2) Subsampling
+ C5 - 120@1x1 (5x5 kernel)
+ F6 - 84
+ tanh
+ F7 - 10 (Output)
+ """
+ def __init__(self):
+ super(LeNet5, self).__init__()
+
+ self.convnet = nn.Sequential(OrderedDict([
+ ('c1', nn.Conv2d(1, 6, kernel_size=(5, 5))),
+ ('tanh1', nn.Tanh()),
+ ('s2', nn.MaxPool2d(kernel_size=(2, 2), stride=2, padding=1)),
+ ('c3', nn.Conv2d(6, 16, kernel_size=(5, 5))),
+ ('tanh3', nn.Tanh()),
+ ('s4', nn.MaxPool2d(kernel_size=(2, 2), stride=2, padding=1)),
+ ('c5', nn.Conv2d(16, 120, kernel_size=(5, 5))),
+ ('tanh5', nn.Tanh())
+ ]))
+
+ self.fc = nn.Sequential(OrderedDict([
+ ('f6', nn.Linear(120, 84)),
+ ('tanh6', nn.Tanh()),
+ ('f7', nn.Linear(84, 10)),
+ ('sig7', nn.LogSoftmax(dim=-1))
+ ]))
+
+ def forward(self, img):
+ output = self.convnet(img)
+ output = output.view(img.size(0), -1)
+ output = self.fc(output)
+ return output
+
+ def extract_features(self, img):
+ output = self.convnet(img.float())
+ output = output.view(img.size(0), -1)
+ output = self.fc[1](self.fc[0](output))
+ return output
diff --git a/evaluations/fidkid-pytorch/musiq_fid_gen3d_baseline.sh b/evaluations/fidkid-pytorch/musiq_fid_gen3d_baseline.sh
new file mode 100644
index 0000000000000000000000000000000000000000..cc6afb5390c0cf7519169210de19c3b6e31dd090
--- /dev/null
+++ b/evaluations/fidkid-pytorch/musiq_fid_gen3d_baseline.sh
@@ -0,0 +1,65 @@
+set -x
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/gso-rendering"
+# gso_rendering="gso-rendering"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir
+
+
+# ! gso stuffs
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/Obajverse/Objv-animals-for-FID_eval-2K"
+gt_rendering="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/lint-dir/objv-gt"
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/lint-dir
+output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/
+
+# method_name=LGM
+# for method_name in CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in CRM Lara ln3diff One-2-3-45 OpenLRM shape-e splatter-img
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD
+# for method_name in CRM/Animals ln3diff Lara
+# Lara
+
+# for method_name in Lara ln3diff/Animals
+# for method_name in Lara GA/stage-2/dino_img/ditl-fromditlPCD ln3diff/Animals
+# for method_name in CRM/Animals
+# for method_name in ln3diff-lite/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditxlPCD
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD LGM_fixpose/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditxlPCD-47w
+
+
+
+
+# ! GSO
+
+# output_path=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d
+# gso_rendering="/mnt/sfs-common/yslan/Dataset/GSO/google_scanned_blender_25_w2c"
+
+# for method_name in ln3diff-fixpose_192 CRM LGM_fixpose
+# for method_name in GA-fixpose
+# for method_name in LGM_fixpose_r=1.8/Animals
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD LGM_fixpose/Animals
+
+
+# for method_name in Lara ln3diff/Animals OpenLRM/Animals One-2-3-45/Animals shape-e/Animals splatter-img/Animals CRM/Animals
+# for method_name in Lara
+# for method_name in Lara
+
+for method_name in scale3d/eval/eval_nerf/Animals scale3d/eval/eval_mesh/Animals
+
+do
+
+python musiq_score_gso.py $gt_rendering ${output_path}/${method_name} \
+ --dataset ${method_name} \
+ --num-workers 4 \
+ --reso 512 \
+ # --save-stats \
+
+done
\ No newline at end of file
diff --git a/evaluations/fidkid-pytorch/musiq_score_gso.py b/evaluations/fidkid-pytorch/musiq_score_gso.py
new file mode 100644
index 0000000000000000000000000000000000000000..a83405e9024543d8efafedd130d04c80c6bce592
--- /dev/null
+++ b/evaluations/fidkid-pytorch/musiq_score_gso.py
@@ -0,0 +1,555 @@
+"""Calculates the Frechet Inception Distance (FID) to evalulate GANs
+
+The FID metric calculates the distance between two distributions of images.
+Typically, we have summary statistics (mean & covariance matrix) of one
+of these distributions, while the 2nd distribution is given by a GAN.
+
+When run as a stand-alone program, it compares the distribution of
+images that are stored as PNG/JPEG at a specified location with a
+distribution given by summary statistics (in pickle format).
+
+The FID is calculated by assuming that X_1 and X_2 are the activations of
+the pool_3 layer of the inception net for generated samples and real world
+samples respectively.
+
+See --help to see further details.
+
+Code apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead
+of Tensorflow
+
+Copyright 2018 Institute of Bioinformatics, JKU Linz
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import ipdb
+import os
+from pathlib import Path
+from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
+import pyiqa
+
+from pdb import set_trace as st
+
+import json
+import numpy as np
+import torch
+import torchvision.transforms as TF
+from PIL import Image
+from scipy import linalg
+from torch.nn.functional import adaptive_avg_pool2d
+import cv2
+try:
+ from tqdm import tqdm
+except ImportError:
+ # If tqdm is not available, provide a mock version of it
+ def tqdm(x):
+ return x
+
+from pytorch_fid.inception import InceptionV3
+
+parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
+parser.add_argument('--batch-size', type=int, default=100,
+ help='Batch size to use')
+parser.add_argument('--reso', type=int, default=128,
+ help='Batch size to use')
+parser.add_argument('--num-workers', type=int, default=8,
+ help=('Number of processes to use for data loading. '
+ 'Defaults to `min(8, num_cpus)`'))
+parser.add_argument('--device', type=str, default=None,
+ help='Device to use. Like cuda, cuda:0 or cpu')
+parser.add_argument('--dataset', type=str, default='omni',
+ help='Device to use. Like cuda, cuda:0 or cpu')
+parser.add_argument('--dims', type=int, default=2048,
+ choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),
+ help=('Dimensionality of Inception features to use. '
+ 'By default, uses pool3 features'))
+parser.add_argument('--save-stats', action='store_true',
+ help=('Generate an npz archive from a directory of samples. '
+ 'The first path is used as input and the second as output.'))
+parser.add_argument('path', type=str, nargs=2,
+ help=('Paths to the generated images or '
+ 'to .npz statistic files'))
+
+IMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',
+ 'tif', 'tiff', 'webp'}
+
+
+class ImagePathDataset(torch.utils.data.Dataset):
+ def __init__(self, files, reso,transforms=None):
+ self.files = files
+ self.transforms = transforms
+ self.reso=reso
+
+ def __len__(self):
+ return len(self.files)
+
+ def __getitem__(self, i):
+ path = self.files[i]
+ #ipdb.set_trace()
+ try:
+ img=cv2.imread(path)
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(self.reso,self.reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ except:
+ img=cv2.imread(self.files[0])
+ #if img.mean(-1)>254.9:
+ #img[np.where(img.mean(-1)>254.9)]=0
+ img=cv2.resize(img,(self.reso,self.reso),interpolation=cv2.INTER_CUBIC)
+ img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
+ print(path)
+
+
+ #img = Image.open(path).convert('RGB')
+ if self.transforms is not None:
+ img = self.transforms(img)
+ #ipdb.set_trace()
+
+ return img
+
+
+def get_activations(files, model, batch_size=50, dims=2048, device='cpu',
+ num_workers=16,reso=128):
+ """Calculates the activations of the pool_3 layer for all images.
+
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : Batch size of images for the model to process at once.
+ Make sure that the number of samples is a multiple of
+ the batch size, otherwise some samples are ignored. This
+ behavior is retained to match the original FID score
+ implementation.
+ -- dims : Dimensionality of features returned by Inception
+ -- device : Device to run calculations
+ -- num_workers : Number of parallel dataloader workers
+
+ Returns:
+ -- A numpy array of dimension (num images, dims) that contains the
+ activations of the given tensor when feeding inception with the
+ query tensor.
+ """
+ model.eval()
+
+ if batch_size > len(files):
+ print(('Warning: batch size is bigger than the data size. '
+ 'Setting batch size to data size'))
+ batch_size = len(files)
+
+ dataset = ImagePathDataset(files, reso,transforms=TF.ToTensor())
+ dataloader = torch.utils.data.DataLoader(dataset,
+ batch_size=batch_size,
+ shuffle=False,
+ drop_last=False,
+ num_workers=num_workers)
+
+ pred_arr = np.empty((len(files), dims))
+
+ start_idx = 0
+
+ for batch in tqdm(dataloader):
+ batch = batch.to(device)
+ #ipdb.set_trace()
+
+ with torch.no_grad():
+ pred = model(batch)[0]
+
+ # If model output is not scalar, apply global spatial average pooling.
+ # This happens if you choose a dimensionality not equal 2048.
+ if pred.size(2) != 1 or pred.size(3) != 1:
+ pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
+
+ #ipdb.set_trace()
+
+ pred = pred.squeeze(3).squeeze(2).cpu().numpy()
+
+ pred_arr[start_idx:start_idx + pred.shape[0]] = pred
+
+ start_idx = start_idx + pred.shape[0]
+
+ return pred_arr
+
+
+def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
+ """Numpy implementation of the Frechet Distance.
+ The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
+ and X_2 ~ N(mu_2, C_2) is
+ d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
+
+ Stable version by Dougal J. Sutherland.
+
+ Params:
+ -- mu1 : Numpy array containing the activations of a layer of the
+ inception net (like returned by the function 'get_predictions')
+ for generated samples.
+ -- mu2 : The sample mean over activations, precalculated on an
+ representative data set.
+ -- sigma1: The covariance matrix over activations for generated samples.
+ -- sigma2: The covariance matrix over activations, precalculated on an
+ representative data set.
+
+ Returns:
+ -- : The Frechet Distance.
+ """
+ #ipdb.set_trace()
+ mu1 = np.atleast_1d(mu1)
+ mu2 = np.atleast_1d(mu2)
+
+ sigma1 = np.atleast_2d(sigma1)
+ sigma2 = np.atleast_2d(sigma2)
+
+ assert mu1.shape == mu2.shape, \
+ 'Training and test mean vectors have different lengths'
+ assert sigma1.shape == sigma2.shape, \
+ 'Training and test covariances have different dimensions'
+
+ diff = mu1 - mu2
+
+ # Product might be almost singular
+ covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
+ if not np.isfinite(covmean).all():
+ msg = ('fid calculation produces singular product; '
+ 'adding %s to diagonal of cov estimates') % eps
+ print(msg)
+ offset = np.eye(sigma1.shape[0]) * eps
+ covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
+
+ # Numerical error might give slight imaginary component
+ if np.iscomplexobj(covmean):
+ if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
+ m = np.max(np.abs(covmean.imag))
+ raise ValueError('Imaginary component {}'.format(m))
+ covmean = covmean.real
+
+ tr_covmean = np.trace(covmean)
+
+ return (diff.dot(diff) + np.trace(sigma1)
+ + np.trace(sigma2) - 2 * tr_covmean)
+
+
+def calculate_activation_statistics(files, model, batch_size=50, dims=2048,
+ device='cpu', num_workers=1,reso=128):
+ """Calculation of the statistics used by the FID.
+ Params:
+ -- files : List of image files paths
+ -- model : Instance of inception model
+ -- batch_size : The images numpy array is split into batches with
+ batch size batch_size. A reasonable batch size
+ depends on the hardware.
+ -- dims : Dimensionality of features returned by Inception
+ -- device : Device to run calculations
+ -- num_workers : Number of parallel dataloader workers
+
+ Returns:
+ -- mu : The mean over samples of the activations of the pool_3 layer of
+ the inception model.
+ -- sigma : The covariance matrix of the activations of the pool_3 layer of
+ the inception model.
+ """
+ act = get_activations(files, model, batch_size, dims, device, num_workers,reso=reso)
+ mu = np.mean(act, axis=0)
+ sigma = np.cov(act, rowvar=False)
+ return mu, sigma
+
+
+def compute_statistics_of_path(path, model, batch_size, dims, device,
+ num_workers=1,reso=512,dataset='gso'):
+ basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-withtop/gso_gt"
+
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid-withtop/gso_gt"
+ # basepath="/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-/metrics/fid-withtop/gso_gt"
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600][:]
+ # all_objs = all_objs[100:600]
+ # all_objs = all_objs[:500]
+
+
+ # if 'shapenet' in dataset:
+ # if 'shapenet' in dataset:
+
+ try:
+ try:
+
+ m=np.load(os.path.join(basepath,path.split('/')[-1]+str(reso)+'mean.npy'))
+ s=np.load(os.path.join(basepath,path.split('/')[-1]+str(reso)+'std.npy'))
+ print('loading_dataset',dataset)
+ except:
+ files=[]
+ # ! load instances for I23D inference
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # files.append(img_name)
+
+ # ! free3d rendering
+ # for obj_folder in tqdm(sorted(os.listdir(path))):
+ # for idx in range(0,25):
+ # # img_name = os.path.join(path, obj_folder, 'rgba', f'{idx:03}.png')
+ # img_name = os.path.join(path, obj_folder, 'render_mvs_25', 'model', f'{idx:03}.png')
+ # files.append(img_name)
+
+ # ! objv loading
+ for obj_folder in tqdm(all_objs):
+ obj_folder = obj_folder[:-2] # to load 3 chunks
+ for batch in range(1,4):
+ for idx in range(8):
+ files.append(os.path.join(path, obj_folder, str(batch), f'{idx}.jpg'))
+
+ # for name in os.listdir(path):
+ # #ipdb.set_trace()
+ # # if name not in false1: #and name not in false2 and name not in false3:
+ # if name in false1: #and name not in false2 and name not in false3:
+ # img=os.path.join(path,name,'rgb')
+ # #ipdb.set_trace()
+ # files = files+sorted([os.path.join(img, idd) for idd in os.listdir(img) if idd.endswith('.png')])
+
+ if len(files) > 50000:
+ files = files[:50000]
+ break
+
+ #files=files[:5]
+ m, s = calculate_activation_statistics(files, model, batch_size,
+ dims, device, num_workers,reso=reso)
+ path = Path(path)
+ # ipdb.set_trace()
+ np.save(os.path.join(basepath,path.name+str(reso)+'mean'), m)
+ np.save(os.path.join(basepath,path.name+str(reso)+'std'), s)
+ except Exception as e:
+ print(f'{dataset} failed, ', e)
+
+
+ return m, s
+
+
+def compute_statistics_of_path_new(path, model, batch_size, dims, device,
+ num_workers=1,reso=128,dataset='omni'):
+ # basepath='/mnt/lustre/yslan/logs/nips23/LSGM/cldm/cmetric/shapenet-outs/fid'+str(reso)+'test'+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/metrics/fid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-subset/'+str(reso)+dataset
+
+ basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid/'+str(reso)+dataset
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/metrics/fid-withtop/'+str(reso)+dataset
+
+ # basepath='/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-free3d/metrics/fid/'+str(reso)+dataset
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ os.makedirs(os.path.join(basepath), exist_ok=True)
+ sample_name=path.split('/')[-1]
+
+ try:
+ try:
+ # ipdb.set_trace()
+ m=np.load(os.path.join(basepath,sample_name+str(reso)+'mean.npy'))
+ s=np.load(os.path.join(basepath,sample_name+str(reso)+'std.npy'))
+ print('loading_sample')
+ except:
+ files=[]
+
+ # for name in os.listdir(path):
+ # img=os.path.join(path,name)
+ # files.append(img) # ! directly append
+
+ # for loading gso-like folder
+ # st()
+ # for obj_folder in sorted(os.listdir(path)):
+
+ # if obj_folder == 'runs':
+ # continue
+
+ # if not os.path.isdir(os.path.join(path, obj_folder)):
+ # continue
+
+ # for idx in [0]:
+ # for i in range(24):
+ # if 'GA' in path:
+ # img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ # else:
+ # img=os.path.join(path,obj_folder, str(idx),f'{i}.jpg')
+ # # ipdb.set_trace()
+ # files.append(img)
+
+ for obj_folder in tqdm(all_objs):
+ obj_folder = '/'.join(obj_folder.split('/')[1:])
+ for idx in range(24):
+ # files.append(os.path.join(path, obj_folder, f'{idx}.jpg'))
+ if 'Lara' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', f'{idx}.jpg'))
+ elif 'GA' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'sample-0-{idx}.jpg'))
+ elif 'scale3d' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '1', f'{idx}.png'))
+ elif 'LRM' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))
+ else:
+ files.append(os.path.join(path, obj_folder, '0', f'{idx}.jpg'))
+
+
+ files=files[:50000]
+ m, s = calculate_activation_statistics(files, model, batch_size,
+ dims, device, num_workers,reso=reso)
+ path = Path(path)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'mean'), m)
+ np.save(os.path.join(basepath,sample_name+str(reso)+'std'), s)
+ except Exception as e:
+ print('error sample image', e)
+
+ #ipdb.set_trace()
+
+ return m, s
+
+musiq_metric = pyiqa.create_metric('musiq')
+
+def calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1,reso=128,dataset='omni'):
+ """Calculates the FID of two paths"""
+ # for p in paths:
+ # if not os.path.exists(p):
+ # raise RuntimeError('Invalid path: %s' % p)
+
+ # block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+
+ # model = InceptionV3([block_idx]).to(device)
+
+ # fid_metric = pyiqa.create_metric('fid')
+ # fid_value = fid_metric(paths[0], paths[1])
+
+ all_musiq = []
+
+
+ path = paths[1]
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ sample_name=path.split('/')[-1]
+
+ files=[]
+
+ for obj_folder in tqdm(all_objs):
+ obj_folder = '/'.join(obj_folder.split('/')[1:])
+ for idx in range(24):
+ # files.append(os.path.join(path, obj_folder, f'{idx}.jpg'))
+ if 'Lara' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', f'{idx}.jpg'))
+ elif 'GA' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'sample-0-{idx}.jpg'))
+ elif 'LRM' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg'))
+ elif 'scale3d' in path:
+ files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '1', f'{idx}.png'))
+ else:
+ files.append(os.path.join(path, obj_folder, '0', f'{idx}.jpg'))
+
+ # for file in tqdm(os.listdir(str(paths[1]))[:]):
+ for file in tqdm(files):
+ if os.path.exists(file):
+ # musiq_value = musiq_metric(os.path.join(paths[1], file))
+ musiq_value = musiq_metric(os.path.join(file))
+ all_musiq.append(musiq_value)
+
+ musiq_value = sum(all_musiq) / len(all_musiq)
+
+
+
+ # m1, s1 = compute_statistics_of_path(paths[0], model, batch_size, # ! GT data
+ # dims, device, num_workers,reso=reso,dataset=dataset)
+ # # ipdb.set_trace()
+ # m2, s2 = compute_statistics_of_path_new(paths[1], model, batch_size, # ! generated data
+ # dims, device, num_workers,reso=reso,dataset=dataset)
+ # fid_value = calculate_frechet_distance(m1, s1, m2, s2)
+
+ # return fid_value
+ return musiq_value
+
+
+def save_fid_stats(paths, batch_size, device, dims, num_workers=1):
+ """Calculates the FID of two paths"""
+ # if not os.path.exists(paths[0]):
+ # raise RuntimeError('Invalid path: %s' % paths[0])
+
+ # if os.path.exists(paths[1]):
+ # raise RuntimeError('Existing output file: %s' % paths[1])
+
+ block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
+
+ model = InceptionV3([block_idx]).to(device)
+
+ print(f"Saving statistics for {paths[0]}")
+
+ m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,
+ dims, device, num_workers)
+
+ np.savez_compressed(paths[1], mu=m1, sigma=s1)
+
+
+def main():
+ args = parser.parse_args()
+
+ if args.device is None:
+ device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')
+ else:
+ device = torch.device(args.device)
+
+ if args.num_workers is None:
+ try:
+ num_cpus = len(os.sched_getaffinity(0))
+ except AttributeError:
+ # os.sched_getaffinity is not available under Windows, use
+ # os.cpu_count instead (which may not return the *available* number
+ # of CPUs).
+ num_cpus = os.cpu_count()
+
+ num_workers = min(num_cpus, 8) if num_cpus is not None else 0
+ else:
+ num_workers = args.num_workers
+
+ if args.save_stats:
+ save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers)
+ return
+ #ipdb.set_trace()
+ fid_value = calculate_fid_given_paths(args.path,
+ args.batch_size,
+ device,
+ args.dims,
+ num_workers,args.reso,args.dataset)
+ print(f'{args.dataset} FID: ', fid_value)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/evaluations/fidkid-pytorch/sample_pcd4096.sh b/evaluations/fidkid-pytorch/sample_pcd4096.sh
new file mode 100644
index 0000000000000000000000000000000000000000..ee7d810fba743eea3244798c1af008e09c111c87
--- /dev/null
+++ b/evaluations/fidkid-pytorch/sample_pcd4096.sh
@@ -0,0 +1,98 @@
+set -x
+
+# n_proc=4
+# n_points=4096
+focal=525
+
+# pre-requisites: kaolin and nvdiffrast
+# pip install kaolin==0.16.0 -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.3.0_cu118.html
+
+n_proc=1
+# n_points=4096
+# n_points=35000 # will fps later
+n_points=16384 # will fps later
+# focal=24 # as in blender config
+
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/02958343/fffb1660a38af30ba4cf3601fb6b2442/models/
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/02958343/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd/car
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/03001627/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd-full/chair
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/02691156/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd/plane
+
+# shape_root=/mnt/lustre/share/fzhong/shapenet/ShapeNetCore.v2/03001627/
+# save_root=/mnt/lustre/yslan/3D_Dataset/shapenet/pcd-full/chair
+
+# ! render gt
+# shape_root=/mnt/sfs-common/yslan/Dataset/GSO/gso-unzip/gso-unzip
+
+# save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics/gso
+
+# ! use FPS later:
+# splatter-img LGM
+
+# for method_name in ln3diff One-2-3-45 OpenLRM shape-e Lara CRM splatter-img
+# for method_name in ln3diff One-2-3-45 OpenLRM
+# for method_name in shape-e Lara CRM
+# for method_name in gso
+
+# for method_name in CRM splatter-img
+
+
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh-ditxlPCD
+
+# '''
+# for method_name in GA/stage-2/dino_img/ditl-fromditlPCD-fixPose-tomesh
+
+# do
+
+# shape_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/${method_name}
+# # save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics/${method_name}
+
+# save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir/3D-metrics-fps/${method_name}
+
+# python sample_surface_gso.py \
+# --shape_root ${shape_root} \
+# --save_root ${save_root} \
+# --n_proc ${n_proc} \
+# --n_points ${n_points} \
+# --image_height 512 \
+# --image_width 512 \
+# --focal_length_x ${focal} \
+# --focal_length_y ${focal} \
+# --principal_point_x 256 \
+# --principal_point_y 256 \
+
+# done
+# '''
+
+
+# for method_name in Lara CRM/Animals ln3diff-lite/Animals
+
+# for method_name in Lara CRM/Animals ln3diff-lite/Animals
+# for method_name in OpenLRM/Animals One-2-3-45/Animals shape-e/Animals
+
+for method_name in scale3d/eval/eval_nerf/Animals scale3d/eval/eval_mesh/Animals
+
+do
+
+shape_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/${method_name}
+save_root=/mnt/sfs-common/yslan/Repo/3dgen/FID-KID-Outputdir-objv/3D-metrics-fps/${method_name}
+
+python sample_surface_objv.py \
+ --shape_root ${shape_root} \
+ --save_root ${save_root} \
+ --n_proc ${n_proc} \
+ --n_points ${n_points} \
+ --image_height 512 \
+ --image_width 512 \
+ --focal_length_x ${focal} \
+ --focal_length_y ${focal} \
+ --principal_point_x 256 \
+ --principal_point_y 256 \
+
+done
diff --git a/evaluations/fidkid-pytorch/sample_surface_gso.py b/evaluations/fidkid-pytorch/sample_surface_gso.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c07a541f6687bf1a07caa61e2c05313d7d99ecd
--- /dev/null
+++ b/evaluations/fidkid-pytorch/sample_surface_gso.py
@@ -0,0 +1,409 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import math
+import pytorch3d.ops
+import numpy as np
+import os
+import argparse
+import multiprocessing as mp
+from multiprocessing import Pool
+import trimesh
+import tqdm
+import torch
+import nvdiffrast.torch as dr
+import kaolin as kal
+import glob
+import ipdb
+
+parser = argparse.ArgumentParser(description='sample surface points from mesh')
+parser.add_argument(
+ '--n_proc', type=int, default=8,
+ help='Number of processes to run in parallel'
+ '(0 means sequential execution).')
+parser.add_argument(
+ '--n_points', type=int, default=4096,
+ help='Number of points to sample per model.')
+parser.add_argument(
+ '--n_views', type=int, default=100,
+ help='Number of views per model.')
+parser.add_argument(
+ '--image_height', type=int, default=640,
+ help='Depth image height.')
+parser.add_argument(
+ '--image_width', type=int, default=640,
+ help='Depth image width.')
+parser.add_argument(
+ '--focal_length_x', type=float, default=640,
+ help='Focal length in x direction.')
+parser.add_argument(
+ '--focal_length_y', type=float, default=640,
+ help='Focal length in y direction.')
+parser.add_argument(
+ '--principal_point_x', type=float, default=320,
+ help='Principal point location in x direction.')
+parser.add_argument(
+ '--principal_point_y', type=float, default=320,
+ help='Principal point location in y direction.')
+parser.add_argument("--shape_root", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel27_omni_ablation2/ddpm_5000/test', help="path to the save resules shapenet dataset")
+parser.add_argument("--save_root", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel27_omni_ablation2/ddpm_vis_ab2surface', help="path to the split shapenet dataset")
+
+options = parser.parse_args()
+
+# create array for inverse mapping
+coordspx2 = np.stack(np.nonzero(np.ones((options.image_height, options.image_width))), -1).astype(np.float32)
+coordspx2 = coordspx2[:, ::-1]
+fusion_intrisics = np.array(
+ [
+ [options.focal_length_x, 0, options.principal_point_x],
+ [0, options.focal_length_y, options.principal_point_y],
+ [0, 0, 1]
+ ])
+# glctx = dr.RasterizeGLContext() # EGL/egl.h: No such file or directory
+glctx = dr.RasterizeCudaContext()
+
+
+def CalcLinearZ(depth):
+ # depth = depth * 2 - 1
+ zFar = 100.0
+ zNear = 0.1
+ linear = zNear / (zFar - depth * (zFar - zNear)) * zFar
+ return linear
+
+
+def projection_cv_new(fx, fy, cx, cy, width, height, n=1.0, f=50.0):
+ return np.array(
+ [[-2 * fx / width, 0.0, (width - 2 * cx) / width, 0.0],
+ [0.0, -2 * fy / height, (height - 2 * cy) / height, 0.0],
+ [0.0, 0.0, (-f - n) / (f - n), -2.0 * f * n / (f - n)],
+ [0.0, 0.0, -1.0, 0.0]])
+
+
+def interpolate(attr, rast, attr_idx, rast_db=None):
+ return dr.interpolate(
+ attr.contiguous(), rast, attr_idx, rast_db=rast_db,
+ diff_attrs=None if rast_db is None else 'all')
+
+
+def render_nvdiffrast(v_pos, tris, T_bx4x4):
+ # T_bx4x4 - world to cam
+ proj = projection_cv_new(
+ fx=options.focal_length_x, fy=options.focal_length_y, cx=options.principal_point_x,
+ cy=options.principal_point_y,
+ width=options.image_width, height=options.image_height, n=0.1, f=100.0)
+
+ fix = torch.eye(4, dtype=torch.float32, device='cuda')
+ fix[2, 2] = -1
+ fix[1, 1] = -1
+ fix[0, 0] = -1
+ fix = fix.unsqueeze(0).repeat(T_bx4x4.shape[0], 1, 1)
+
+ proj = torch.tensor(proj, dtype=torch.float32, device='cuda').unsqueeze(0).repeat(T_bx4x4.shape[0], 1, 1)
+ T_world_cam_bx4x4 = torch.bmm(fix, T_bx4x4)
+ mvp = torch.bmm(proj, T_world_cam_bx4x4)
+ v_pos_clip = torch.matmul(
+ torch.nn.functional.pad(v_pos, pad=(0, 1), mode='constant', value=1.0),
+ torch.transpose(mvp, 1, 2))
+ rast, db = dr.rasterize(
+ glctx, torch.tensor(v_pos_clip, dtype=torch.float32, device='cuda'), tris.int(),
+ (options.image_height, options.image_width))
+
+ v_pos_cam = torch.matmul(
+ torch.nn.functional.pad(v_pos, pad=(0, 1), mode='constant', value=1.0),
+ torch.transpose(T_world_cam_bx4x4, 1, 2))
+ gb_pos_cam, _ = interpolate(v_pos_cam, rast, tris.int())
+ depth_maps = gb_pos_cam[..., 2].abs()
+ return depth_maps
+
+
+def as_mesh(scene_or_mesh):
+ """
+ Convert a possible scene to a mesh.
+
+ If conversion occurs, the returned mesh has only vertex and face data.
+ """
+ if isinstance(scene_or_mesh, trimesh.Scene):
+ if len(scene_or_mesh.geometry) == 0:
+ mesh = None # empty scene
+ else:
+ # we lose texture information here
+ mesh = trimesh.util.concatenate(
+ tuple(
+ trimesh.Trimesh(vertices=g.vertices, faces=g.faces)
+ for g in scene_or_mesh.geometry.values()))
+ else:
+ assert (isinstance(scene_or_mesh, trimesh.Trimesh))
+ mesh = scene_or_mesh
+ return mesh
+
+
+def render(mesh_v, mesh_f, Rs):
+ """
+ Render the given mesh using the generated views.
+
+ :param base_mesh: mesh to render
+ :type base_mesh: mesh.Mesh
+ :param Rs: rotation matrices
+ :type Rs: [numpy.ndarray]
+ :return: depth maps
+ :rtype: numpy.ndarray
+ """
+ T_bx4x4 = torch.zeros((options.n_views, 4, 4), dtype=torch.float32, device='cuda')
+ T_bx4x4[:, 3, 3] = 1
+ T_bx4x4[:, 2, 3] = 1
+ T_bx4x4[:, :3, :3] = torch.tensor(Rs, dtype=torch.float32, device='cuda')
+ depthmaps = render_nvdiffrast(
+ mesh_v,
+ mesh_f, T_bx4x4)
+ return depthmaps
+
+
+def get_points():
+ """
+ :param n_points: number of points
+ :type n_points: int
+ :return: list of points
+ :rtype: numpy.ndarray
+ """
+
+ rnd = 1.
+ points = []
+ offset = 2. / options.n_views
+ increment = math.pi * (3. - math.sqrt(5.))
+
+ for i in range(options.n_views):
+ y = ((i * offset) - 1) + (offset / 2)
+ r = math.sqrt(1 - pow(y, 2))
+
+ phi = ((i + rnd) % options.n_views) * increment
+
+ x = math.cos(phi) * r
+ z = math.sin(phi) * r
+
+ points.append([x, y, z])
+ return np.array(points)
+
+
+def get_views(semi_sphere=False):
+ """
+ Generate a set of views to generate depth maps from.
+
+ :param n_views: number of views per axis
+ :type n_views: int
+ :return: rotation matrices
+ :rtype: [numpy.ndarray]
+ """
+
+ Rs = []
+ points = get_points()
+ if semi_sphere:
+ points[:, 2] = -np.abs(points[:, 2]) - 0.1
+
+ for i in range(points.shape[0]):
+ longitude = - math.atan2(points[i, 0], points[i, 1])
+ latitude = math.atan2(points[i, 2], math.sqrt(points[i, 0] ** 2 + points[i, 1] ** 2))
+
+ R_x = np.array(
+ [[1, 0, 0],
+ [0, math.cos(latitude), -math.sin(latitude)],
+ [0, math.sin(latitude), math.cos(latitude)]])
+ R_y = np.array(
+ [[math.cos(longitude), 0, math.sin(longitude)],
+ [0, 1, 0],
+ [-math.sin(longitude), 0, math.cos(longitude)]])
+ R = R_x @ R_y
+ Rs.append(R)
+
+ return Rs
+
+
+def fusion(depthmaps, Rs):
+ """
+ Fuse the rendered depth maps.
+
+ :param depthmaps: depth maps
+ :type depthmaps: numpy.ndarray
+ :param Rs: rotation matrices corresponding to views
+ :type Rs: [numpy.ndarray]
+ :return: (T)SDF
+ :rtype: numpy.ndarray
+ """
+
+ # sample points inside mask
+ sample_per_view = options.n_points // options.n_views
+ sample_bxn = torch.zeros((options.n_views, sample_per_view), device='cuda', dtype=torch.long)
+ for i in range(len(Rs)):
+ mask = depthmaps[i] > 0
+ valid_idx = torch.nonzero(mask.reshape(-1)).squeeze(-1)
+ idx = list(range(valid_idx.shape[0]))
+ np.random.shuffle(idx)
+ idx = idx[:sample_per_view]
+ sample_bxn[i] = torch.tensor(valid_idx[idx])
+
+ depthmaps = torch.gather(depthmaps.reshape(options.n_views, -1), 1, sample_bxn)
+
+ inv_Ks_bx3x3 = torch.tensor(np.linalg.inv(fusion_intrisics), dtype=torch.float32, device='cuda').unsqueeze(
+ 0).repeat(options.n_views, 1, 1)
+ T_bx4x4 = torch.zeros((options.n_views, 4, 4), dtype=torch.float32, device='cuda')
+ T_bx4x4[:, 3, 3] = 1
+ T_bx4x4[:, 2, 3] = 1
+ T_bx4x4[:, :3, :3] = torch.tensor(Rs, dtype=torch.float32, device='cuda')
+ inv_T_bx4x4 = torch.inverse(T_bx4x4)
+
+ tf_coords_bxpx2 = torch.tensor(coordspx2.copy(), dtype=torch.float32, device='cuda').unsqueeze(0).repeat(
+ options.n_views, 1, 1)
+ tf_coords_bxpx2 = torch.gather(tf_coords_bxpx2, 1, sample_bxn.unsqueeze(-1).repeat(1, 1, 2))
+
+ tf_coords_bxpx3 = torch.cat([tf_coords_bxpx2, torch.ones_like(tf_coords_bxpx2[..., :1])], -1)
+ tf_coords_bxpx3 *= depthmaps.reshape(options.n_views, -1, 1)
+ tf_cam_bxpx3 = torch.bmm(inv_Ks_bx3x3, tf_coords_bxpx3.transpose(1, 2)).transpose(1, 2)
+ tf_cam_bxpx4 = torch.cat([tf_cam_bxpx3, torch.ones_like(tf_cam_bxpx3[..., :1])], -1)
+ tf_world_bxpx3 = torch.bmm(inv_T_bx4x4, tf_cam_bxpx4.transpose(1, 2)).transpose(1, 2)[..., :3]
+
+ return tf_world_bxpx3.reshape(-1, 3)
+
+
+def normalize(vertices, faces, normalized_scale=0.9, rotate_x=False, center_xyz=False):
+ vertices = vertices.cuda()
+
+ if center_xyz: # some mesh's center is not origin
+ vertices = vertices - vertices.mean(0, keepdim=True)
+
+ if rotate_x: # rotate along x axis for 90 degrees to match the two coordiantes
+ rot_mat = torch.eye(n=3, device='cuda')
+ theta = np.pi / 90 # degree
+ rot_mat[1,1] = np.cos(theta)
+ rot_mat[2,2] = np.cos(theta)
+ rot_mat[1,2] =-np.sin(theta)
+ rot_mat[2,1] = np.sin(theta)
+ # ipdb.set_trace()
+ vertices = rot_mat @ vertices.transpose(0,1)
+ vertices = vertices.transpose(0,1)
+
+ scale = (vertices.max(dim=0)[0] - vertices.min(dim=0)[0]).max()
+ mesh_v1 = vertices / scale * normalized_scale
+ mesh_f1 = faces.long().cuda()
+ return mesh_v1, mesh_f1
+
+
+
+def sample_surface_pts(path):
+ # ipdb.set_trace()
+ try:
+ mesh_path, output_pth, debug = path
+ # mesh = kal.io.obj.import_mesh(mesh_path)
+ # ipdb.set_trace()
+ mesh = trimesh.load(mesh_path) # fail to load ply?
+ #ipdb.set_trace()
+ if mesh.vertices.shape[0] == 0:
+ return
+
+ mesh_v = torch.Tensor(mesh.vertices)
+ # mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=0.9, rotate_x=True)
+ # mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=1.0, rotate_x=True, center_xyz=True)
+ mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=1.0, rotate_x=False, center_xyz=True)
+
+ # generate camera matrices
+ # Rs = get_views()
+ # Rs = get_views(semi_sphere=True)
+ Rs = get_views(semi_sphere=False)
+ # get depth images
+ depths = render(mesh_v, mesh_f, Rs)
+ # project to world space
+ try:
+ pcd = fusion(depths, Rs)
+ except:
+ return
+
+ # fps again
+
+ pcd, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4096,
+ # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4000,
+ pcd.unsqueeze(0).cuda(), K=4000,
+ random_start_point=True) # B self.latent_num
+ pcd = pcd[0]
+
+ pcd = pcd.cpu().numpy()
+
+ #np.savez(output_pth, pcd=pcd)
+ #ipdb.set_trace()
+ #if debug:
+ pcd = trimesh.points.PointCloud(pcd)
+ pcd.export(output_pth.replace('.npz', '.obj'))
+ except Exception as e:
+ # print('error')
+ print(e, flush=True)
+
+
+if __name__ == '__main__':
+ mp.set_start_method('spawn')
+
+ shapenet_root = options.shape_root
+ save_root = options.save_root
+
+ debug = True
+ #model_list = sorted(os.listdir(shapenet_root))[:7500]
+
+ # model_list=glob.glob(os.path.join(shapenet_root, '*.obj'))
+ # os.makedirs(save_root, exist_ok=True)
+
+ # cmds = [(os.path.join(shapenet_root, id.split('/')[-1]), os.path.join(save_root, id.split('/')[-1]), debug) for id in model_list]
+
+ # cmds = [(os.path.join(shapenet_root, id.split('/')[-1]), os.path.join(save_root, 'pcd_4096.ply'), debug) for id in model_list]
+
+ # cmds += [(os.path.join(shapenet_root, id.split('/')[-1]), os.path.join(save_root, 'test.obj'), debug) for id in model_list]
+
+ cmds = []
+
+ # for instance_name in os.listdir(shapenet_root)[:]:
+ # cmds += [(os.path.join(shapenet_root, instance_name), os.path.join(save_root, f'{instance_name.split(".")[0]}_pcd_4096.ply'), debug)]
+
+ # ! for gt
+ # for obj_folder in sorted(os.listdir(shapenet_root)):
+ # cmds += [(os.path.join(shapenet_root, obj_folder, 'meshes/model.obj'), os.path.join(save_root, f'{obj_folder}_pcd_4096.ply'), debug)]
+
+ # ! for baseline samples
+
+ os.makedirs(save_root, exist_ok=True)
+
+ for obj_folder in sorted(os.listdir(shapenet_root)):
+ if not os.path.isdir(os.path.join(shapenet_root, obj_folder)):
+ continue
+ for idx in [0]:
+ ipdb.set_trace()
+
+ # for i in range(10):
+ # img=os.path.join(shapenet_root,obj_folder, str(idx),f'{i}.jpg')
+ # img=os.path.join(path,obj_folder, str(idx),f'sample-0-{i}.jpg')
+ # files.append(img)
+
+ if 'CRM' in shapenet_root:
+ # ipdb.set_trace()
+ mesh_path = glob.glob(os.path.join(shapenet_root, obj_folder, f'{idx}', '*.obj'))[0]
+ elif 'Lara' in shapenet_root:
+ mesh_path = glob.glob(os.path.join(shapenet_root, '/'.join(obj_folder.split('/')[:-1]), '0.jpg', '*.obj'))[0]
+
+ elif 'LRM' in shapenet_root:
+ mesh_path = glob.glob(os.path.join(shapenet_root, '/'.join(obj_folder.split('/')[:-1]), '0', f'{idx}.jpg', '*.obj'))[0]
+ else:
+ if os.path.exists((os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.obj'))):
+ mesh_path = os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.obj')
+ else:
+ mesh_path = os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.ply')
+
+ cmds += [(mesh_path, os.path.join(save_root, f'{obj_folder}_pcd_4096.ply'), debug)]
+
+
+ if options.n_proc == 0:
+ for filepath in tqdm.tqdm(cmds):
+ sample_surface_pts(filepath)
+ else:
+ with Pool(options.n_proc) as p:
+ list(tqdm.tqdm(p.imap(sample_surface_pts, cmds), total=len(cmds)))
diff --git a/evaluations/fidkid-pytorch/sample_surface_objv.py b/evaluations/fidkid-pytorch/sample_surface_objv.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb61b87a8200912cf2955e834bea89dea244b902
--- /dev/null
+++ b/evaluations/fidkid-pytorch/sample_surface_objv.py
@@ -0,0 +1,412 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+from pdb import set_trace as st
+import json
+import math
+import pytorch3d.ops
+import numpy as np
+import os
+import argparse
+import multiprocessing as mp
+from multiprocessing import Pool
+import trimesh
+import tqdm
+import torch
+import nvdiffrast.torch as dr
+import kaolin as kal
+import glob
+import ipdb
+
+parser = argparse.ArgumentParser(description='sample surface points from mesh')
+parser.add_argument(
+ '--n_proc', type=int, default=8,
+ help='Number of processes to run in parallel'
+ '(0 means sequential execution).')
+parser.add_argument(
+ '--n_points', type=int, default=4096,
+ help='Number of points to sample per model.')
+parser.add_argument(
+ '--n_views', type=int, default=100,
+ help='Number of views per model.')
+parser.add_argument(
+ '--image_height', type=int, default=640,
+ help='Depth image height.')
+parser.add_argument(
+ '--image_width', type=int, default=640,
+ help='Depth image width.')
+parser.add_argument(
+ '--focal_length_x', type=float, default=640,
+ help='Focal length in x direction.')
+parser.add_argument(
+ '--focal_length_y', type=float, default=640,
+ help='Focal length in y direction.')
+parser.add_argument(
+ '--principal_point_x', type=float, default=320,
+ help='Principal point location in x direction.')
+parser.add_argument(
+ '--principal_point_y', type=float, default=320,
+ help='Principal point location in y direction.')
+parser.add_argument("--shape_root", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel27_omni_ablation2/ddpm_5000/test', help="path to the save resules shapenet dataset")
+parser.add_argument("--save_root", type=str, default='/mnt/petrelfs/caoziang/3D_generation/Checkpoint_all/diffusion_shapenet_testmodel27_omni_ablation2/ddpm_vis_ab2surface', help="path to the split shapenet dataset")
+
+options = parser.parse_args()
+
+# create array for inverse mapping
+coordspx2 = np.stack(np.nonzero(np.ones((options.image_height, options.image_width))), -1).astype(np.float32)
+coordspx2 = coordspx2[:, ::-1]
+fusion_intrisics = np.array(
+ [
+ [options.focal_length_x, 0, options.principal_point_x],
+ [0, options.focal_length_y, options.principal_point_y],
+ [0, 0, 1]
+ ])
+# glctx = dr.RasterizeGLContext() # EGL/egl.h: No such file or directory
+glctx = dr.RasterizeCudaContext()
+
+
+def CalcLinearZ(depth):
+ # depth = depth * 2 - 1
+ zFar = 100.0
+ zNear = 0.1
+ linear = zNear / (zFar - depth * (zFar - zNear)) * zFar
+ return linear
+
+
+def projection_cv_new(fx, fy, cx, cy, width, height, n=1.0, f=50.0):
+ return np.array(
+ [[-2 * fx / width, 0.0, (width - 2 * cx) / width, 0.0],
+ [0.0, -2 * fy / height, (height - 2 * cy) / height, 0.0],
+ [0.0, 0.0, (-f - n) / (f - n), -2.0 * f * n / (f - n)],
+ [0.0, 0.0, -1.0, 0.0]])
+
+
+def interpolate(attr, rast, attr_idx, rast_db=None):
+ return dr.interpolate(
+ attr.contiguous(), rast, attr_idx, rast_db=rast_db,
+ diff_attrs=None if rast_db is None else 'all')
+
+
+def render_nvdiffrast(v_pos, tris, T_bx4x4):
+ # T_bx4x4 - world to cam
+ proj = projection_cv_new(
+ fx=options.focal_length_x, fy=options.focal_length_y, cx=options.principal_point_x,
+ cy=options.principal_point_y,
+ width=options.image_width, height=options.image_height, n=0.1, f=100.0)
+
+ fix = torch.eye(4, dtype=torch.float32, device='cuda')
+ fix[2, 2] = -1
+ fix[1, 1] = -1
+ fix[0, 0] = -1
+ fix = fix.unsqueeze(0).repeat(T_bx4x4.shape[0], 1, 1)
+
+ proj = torch.tensor(proj, dtype=torch.float32, device='cuda').unsqueeze(0).repeat(T_bx4x4.shape[0], 1, 1)
+ T_world_cam_bx4x4 = torch.bmm(fix, T_bx4x4)
+ mvp = torch.bmm(proj, T_world_cam_bx4x4)
+ v_pos_clip = torch.matmul(
+ torch.nn.functional.pad(v_pos, pad=(0, 1), mode='constant', value=1.0),
+ torch.transpose(mvp, 1, 2))
+ rast, db = dr.rasterize(
+ glctx, torch.tensor(v_pos_clip, dtype=torch.float32, device='cuda'), tris.int(),
+ (options.image_height, options.image_width))
+
+ v_pos_cam = torch.matmul(
+ torch.nn.functional.pad(v_pos, pad=(0, 1), mode='constant', value=1.0),
+ torch.transpose(T_world_cam_bx4x4, 1, 2))
+ gb_pos_cam, _ = interpolate(v_pos_cam, rast, tris.int())
+ depth_maps = gb_pos_cam[..., 2].abs()
+ return depth_maps
+
+
+def as_mesh(scene_or_mesh):
+ """
+ Convert a possible scene to a mesh.
+
+ If conversion occurs, the returned mesh has only vertex and face data.
+ """
+ if isinstance(scene_or_mesh, trimesh.Scene):
+ if len(scene_or_mesh.geometry) == 0:
+ mesh = None # empty scene
+ else:
+ # we lose texture information here
+ mesh = trimesh.util.concatenate(
+ tuple(
+ trimesh.Trimesh(vertices=g.vertices, faces=g.faces)
+ for g in scene_or_mesh.geometry.values()))
+ else:
+ assert (isinstance(scene_or_mesh, trimesh.Trimesh))
+ mesh = scene_or_mesh
+ return mesh
+
+
+def render(mesh_v, mesh_f, Rs):
+ """
+ Render the given mesh using the generated views.
+
+ :param base_mesh: mesh to render
+ :type base_mesh: mesh.Mesh
+ :param Rs: rotation matrices
+ :type Rs: [numpy.ndarray]
+ :return: depth maps
+ :rtype: numpy.ndarray
+ """
+ T_bx4x4 = torch.zeros((options.n_views, 4, 4), dtype=torch.float32, device='cuda')
+ T_bx4x4[:, 3, 3] = 1
+ T_bx4x4[:, 2, 3] = 1
+ T_bx4x4[:, :3, :3] = torch.tensor(Rs, dtype=torch.float32, device='cuda')
+ depthmaps = render_nvdiffrast(
+ mesh_v,
+ mesh_f, T_bx4x4)
+ return depthmaps
+
+
+def get_points():
+ """
+ :param n_points: number of points
+ :type n_points: int
+ :return: list of points
+ :rtype: numpy.ndarray
+ """
+
+ rnd = 1.
+ points = []
+ offset = 2. / options.n_views
+ increment = math.pi * (3. - math.sqrt(5.))
+
+ for i in range(options.n_views):
+ y = ((i * offset) - 1) + (offset / 2)
+ r = math.sqrt(1 - pow(y, 2))
+
+ phi = ((i + rnd) % options.n_views) * increment
+
+ x = math.cos(phi) * r
+ z = math.sin(phi) * r
+
+ points.append([x, y, z])
+ return np.array(points)
+
+
+def get_views(semi_sphere=False):
+ """
+ Generate a set of views to generate depth maps from.
+
+ :param n_views: number of views per axis
+ :type n_views: int
+ :return: rotation matrices
+ :rtype: [numpy.ndarray]
+ """
+
+ Rs = []
+ points = get_points()
+ if semi_sphere:
+ points[:, 2] = -np.abs(points[:, 2]) - 0.1
+
+ for i in range(points.shape[0]):
+ longitude = - math.atan2(points[i, 0], points[i, 1])
+ latitude = math.atan2(points[i, 2], math.sqrt(points[i, 0] ** 2 + points[i, 1] ** 2))
+
+ R_x = np.array(
+ [[1, 0, 0],
+ [0, math.cos(latitude), -math.sin(latitude)],
+ [0, math.sin(latitude), math.cos(latitude)]])
+ R_y = np.array(
+ [[math.cos(longitude), 0, math.sin(longitude)],
+ [0, 1, 0],
+ [-math.sin(longitude), 0, math.cos(longitude)]])
+ R = R_x @ R_y
+ Rs.append(R)
+
+ return Rs
+
+
+def fusion(depthmaps, Rs):
+ """
+ Fuse the rendered depth maps.
+
+ :param depthmaps: depth maps
+ :type depthmaps: numpy.ndarray
+ :param Rs: rotation matrices corresponding to views
+ :type Rs: [numpy.ndarray]
+ :return: (T)SDF
+ :rtype: numpy.ndarray
+ """
+
+ # sample points inside mask
+ sample_per_view = options.n_points // options.n_views
+ sample_bxn = torch.zeros((options.n_views, sample_per_view), device='cuda', dtype=torch.long)
+ for i in range(len(Rs)):
+ mask = depthmaps[i] > 0
+ valid_idx = torch.nonzero(mask.reshape(-1)).squeeze(-1)
+ idx = list(range(valid_idx.shape[0]))
+ np.random.shuffle(idx)
+ idx = idx[:sample_per_view]
+ sample_bxn[i] = torch.tensor(valid_idx[idx])
+
+ depthmaps = torch.gather(depthmaps.reshape(options.n_views, -1), 1, sample_bxn)
+
+ inv_Ks_bx3x3 = torch.tensor(np.linalg.inv(fusion_intrisics), dtype=torch.float32, device='cuda').unsqueeze(
+ 0).repeat(options.n_views, 1, 1)
+ T_bx4x4 = torch.zeros((options.n_views, 4, 4), dtype=torch.float32, device='cuda')
+ T_bx4x4[:, 3, 3] = 1
+ T_bx4x4[:, 2, 3] = 1
+ T_bx4x4[:, :3, :3] = torch.tensor(Rs, dtype=torch.float32, device='cuda')
+ inv_T_bx4x4 = torch.inverse(T_bx4x4)
+
+ tf_coords_bxpx2 = torch.tensor(coordspx2.copy(), dtype=torch.float32, device='cuda').unsqueeze(0).repeat(
+ options.n_views, 1, 1)
+ tf_coords_bxpx2 = torch.gather(tf_coords_bxpx2, 1, sample_bxn.unsqueeze(-1).repeat(1, 1, 2))
+
+ tf_coords_bxpx3 = torch.cat([tf_coords_bxpx2, torch.ones_like(tf_coords_bxpx2[..., :1])], -1)
+ tf_coords_bxpx3 *= depthmaps.reshape(options.n_views, -1, 1)
+ tf_cam_bxpx3 = torch.bmm(inv_Ks_bx3x3, tf_coords_bxpx3.transpose(1, 2)).transpose(1, 2)
+ tf_cam_bxpx4 = torch.cat([tf_cam_bxpx3, torch.ones_like(tf_cam_bxpx3[..., :1])], -1)
+ tf_world_bxpx3 = torch.bmm(inv_T_bx4x4, tf_cam_bxpx4.transpose(1, 2)).transpose(1, 2)[..., :3]
+
+ return tf_world_bxpx3.reshape(-1, 3)
+
+
+def normalize(vertices, faces, normalized_scale=0.9, rotate_x=False, center_xyz=False):
+ vertices = vertices.cuda()
+
+ if center_xyz: # some mesh's center is not origin
+ vertices = vertices - vertices.mean(0, keepdim=True)
+
+ if rotate_x: # rotate along x axis for 90 degrees to match the two coordiantes
+ rot_mat = torch.eye(n=3, device='cuda')
+ theta = np.pi / 90 # degree
+ rot_mat[1,1] = np.cos(theta)
+ rot_mat[2,2] = np.cos(theta)
+ rot_mat[1,2] =-np.sin(theta)
+ rot_mat[2,1] = np.sin(theta)
+ # ipdb.set_trace()
+ vertices = rot_mat @ vertices.transpose(0,1)
+ vertices = vertices.transpose(0,1)
+
+ scale = (vertices.max(dim=0)[0] - vertices.min(dim=0)[0]).max()
+ mesh_v1 = vertices / scale * normalized_scale
+ mesh_f1 = faces.long().cuda()
+ return mesh_v1, mesh_f1
+
+
+
+def sample_surface_pts(path):
+ # ipdb.set_trace()
+ try:
+ mesh_path, output_pth, debug = path
+ # mesh = kal.io.obj.import_mesh(mesh_path)
+ # ipdb.set_trace()
+ mesh = trimesh.load(mesh_path) # fail to load ply?
+ #ipdb.set_trace()
+ if mesh.vertices.shape[0] == 0:
+ return
+
+ mesh_v = torch.Tensor(mesh.vertices)
+ # mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=0.9, rotate_x=True)
+ # mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=1.0, rotate_x=True, center_xyz=True)
+ mesh_v, mesh_f = normalize(mesh_v, torch.Tensor(mesh.faces), normalized_scale=1.0, rotate_x=False, center_xyz=True)
+
+ # generate camera matrices
+ # Rs = get_views()
+ # Rs = get_views(semi_sphere=True)
+ Rs = get_views(semi_sphere=False)
+ # get depth images
+ depths = render(mesh_v, mesh_f, Rs)
+ # project to world space
+ try:
+ pcd = fusion(depths, Rs)
+ except:
+ return
+
+ # fps again
+
+ pcd, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4096,
+ # torch.from_numpy(pcd).unsqueeze(0).cuda(), K=4000,
+ pcd.unsqueeze(0).cuda(), K=4096,
+ random_start_point=True) # B self.latent_num
+ pcd = pcd[0]
+
+ pcd = pcd.cpu().numpy()
+
+ #np.savez(output_pth, pcd=pcd)
+ #ipdb.set_trace()
+ #if debug:
+ pcd = trimesh.points.PointCloud(pcd)
+ pcd.export(output_pth.replace('.npz', '.obj'))
+ except Exception as e:
+ # print('error')
+ print(e, flush=True)
+
+
+if __name__ == '__main__':
+ mp.set_start_method('spawn')
+
+ shapenet_root = options.shape_root
+ save_root = options.save_root
+
+ debug = True
+
+ cmds = []
+
+ # ! for baseline samples
+
+ objv_dataset = '/mnt/sfs-common/yslan/Dataset/Obajverse/chunk-jpeg-normal/bs_16_fixsave3/170K/512/'
+ dataset_json = os.path.join(objv_dataset, 'dataset.json')
+ with open(dataset_json, 'r') as f:
+ dataset_json = json.load(f)
+
+ # all_objs = dataset_json['Animals'][::3][:6250]
+ all_objs = dataset_json['Animals'][::3][1100:2200]
+ all_objs = all_objs[:600]
+
+ os.makedirs(save_root, exist_ok=True)
+
+
+ for obj_folder in tqdm.tqdm(all_objs):
+ obj_folder = '/'.join(obj_folder.split('/')[1:])
+ for idx in [0]:
+
+ if 'CRM' in shapenet_root:
+ # ipdb.set_trace()
+ mesh_path = glob.glob(os.path.join(shapenet_root, obj_folder, f'{idx}', '*.obj'))[0]
+ elif 'Lara' in shapenet_root:
+ try:
+ mesh_path = glob.glob(os.path.join(shapenet_root, '/'.join(obj_folder.split('/')[:-1]), f'{idx}.jpg', '*.obj'))[0]
+ except:
+ continue
+
+ elif 'scale3d' in shapenet_root:
+ if 'eval_nerf' in shapenet_root:
+ mesh_path = glob.glob(os.path.join(shapenet_root, '/'.join(obj_folder.split('/')[:-1]), '1', 'mesh', '*.ply'))[0]
+ elif 'eval_mesh' in shapenet_root:
+ mesh_path = glob.glob(os.path.join(shapenet_root, '/'.join(obj_folder.split('/')[:-1]), '1', 'mesh', '*.obj'))[0]
+
+ # files.append(os.path.join(path, '/'.join(obj_folder.split('/')[:-1]), '1', f'{idx}.png'))
+
+ elif 'LRM' in shapenet_root:
+ mesh_path = glob.glob(os.path.join(shapenet_root, '/'.join(obj_folder.split('/')[:-1]), '0', '*.ply'))[0]
+
+ elif 'ln3diff' in shapenet_root:
+ mesh_path = glob.glob(os.path.join(shapenet_root, obj_folder, '0', 'mesh.obj'))[0]
+ else:
+ if os.path.exists((os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.obj'))):
+ mesh_path = os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.obj')
+ else:
+ mesh_path = os.path.join(shapenet_root, obj_folder, f'{idx}/mesh.ply')
+
+ save_name = '-'.join(obj_folder.split('/'))
+ cmds += [(mesh_path, os.path.join(save_root, f'{save_name}_pcd_4096.ply'), debug)]
+
+
+ if options.n_proc == 0:
+ for filepath in tqdm.tqdm(cmds):
+ sample_surface_pts(filepath)
+ else:
+ with Pool(options.n_proc) as p:
+ list(tqdm.tqdm(p.imap(sample_surface_pts, cmds), total=len(cmds)))
diff --git a/evaluations/requirements.txt b/evaluations/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..fc6df305a4169b13bcfab5e238e4ff1c97b6baaa
--- /dev/null
+++ b/evaluations/requirements.txt
@@ -0,0 +1,4 @@
+tensorflow-gpu>=2.0
+scipy
+requests
+tqdm
\ No newline at end of file
diff --git a/guided_diffusion/__init__.py b/guided_diffusion/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d018714d642ef967ed0152f418538c4019b2340
--- /dev/null
+++ b/guided_diffusion/__init__.py
@@ -0,0 +1,4 @@
+"""
+Codebase for "Improved Denoising Diffusion Probabilistic Models".
+Also merged continuous_diffusion.py from LSGM: https://github.com/NVlabs/LSGM
+"""
diff --git a/guided_diffusion/__pycache__/__init__.cpython-310.pyc b/guided_diffusion/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c277829f2fb256f204c418c6fd1548c08a73cdc3
Binary files /dev/null and b/guided_diffusion/__pycache__/__init__.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/__init__.cpython-39.pyc b/guided_diffusion/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..adcca73d6d56cda9d5850e785b0f0d5e8eb7fd3f
Binary files /dev/null and b/guided_diffusion/__pycache__/__init__.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/continuous_diffusion.cpython-39.pyc b/guided_diffusion/__pycache__/continuous_diffusion.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2d05683073ddd2030d679ea2d61f196edfdd2145
Binary files /dev/null and b/guided_diffusion/__pycache__/continuous_diffusion.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/continuous_diffusion_utils.cpython-39.pyc b/guided_diffusion/__pycache__/continuous_diffusion_utils.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c4e62a7c0cd82b2cb905b11043dd16f617b25d3a
Binary files /dev/null and b/guided_diffusion/__pycache__/continuous_diffusion_utils.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/continuous_distributions.cpython-39.pyc b/guided_diffusion/__pycache__/continuous_distributions.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cee45e60342d471b7f9e54493b338c02540648eb
Binary files /dev/null and b/guided_diffusion/__pycache__/continuous_distributions.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/dist_util.cpython-310.pyc b/guided_diffusion/__pycache__/dist_util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8413bd128494d8dcaab2c2811a8ca0de23d0eeca
Binary files /dev/null and b/guided_diffusion/__pycache__/dist_util.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/dist_util.cpython-39.pyc b/guided_diffusion/__pycache__/dist_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f6265129153101ff474466dcb7eed1a64c29c460
Binary files /dev/null and b/guided_diffusion/__pycache__/dist_util.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/fp16_util.cpython-310.pyc b/guided_diffusion/__pycache__/fp16_util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6361b2a18bc040170a3944bb4f72aebba834d8fe
Binary files /dev/null and b/guided_diffusion/__pycache__/fp16_util.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/fp16_util.cpython-39.pyc b/guided_diffusion/__pycache__/fp16_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a45a0d7f241d82dcf6da67c722f473c6ee4a610f
Binary files /dev/null and b/guided_diffusion/__pycache__/fp16_util.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/gaussian_diffusion.cpython-310.pyc b/guided_diffusion/__pycache__/gaussian_diffusion.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8934b9f92778765d14d7daa1a0460a08adf8950b
Binary files /dev/null and b/guided_diffusion/__pycache__/gaussian_diffusion.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/gaussian_diffusion.cpython-39.pyc b/guided_diffusion/__pycache__/gaussian_diffusion.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bdfa36f85c9df1c500b5270c7096cbf50df2a79a
Binary files /dev/null and b/guided_diffusion/__pycache__/gaussian_diffusion.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/logger.cpython-310.pyc b/guided_diffusion/__pycache__/logger.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c82d6742f45671226cdf0812cc5c1a55c53c84e5
Binary files /dev/null and b/guided_diffusion/__pycache__/logger.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/logger.cpython-39.pyc b/guided_diffusion/__pycache__/logger.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d47fa1b3cafd8e09d406c15adc9d2c32cfc1ebb
Binary files /dev/null and b/guided_diffusion/__pycache__/logger.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/losses.cpython-310.pyc b/guided_diffusion/__pycache__/losses.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..714b84bc300e142c511e6db16aff107dc8b299e2
Binary files /dev/null and b/guided_diffusion/__pycache__/losses.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/losses.cpython-39.pyc b/guided_diffusion/__pycache__/losses.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9b12619094ba8a741255ed4020f8057980f1b92e
Binary files /dev/null and b/guided_diffusion/__pycache__/losses.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/nn.cpython-310.pyc b/guided_diffusion/__pycache__/nn.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4789805b14d32c559dce53310cbd98a5ce3778d2
Binary files /dev/null and b/guided_diffusion/__pycache__/nn.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/nn.cpython-39.pyc b/guided_diffusion/__pycache__/nn.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..632bd30869c6f2b86b3a646e970dc0fee68bbbaf
Binary files /dev/null and b/guided_diffusion/__pycache__/nn.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/resample.cpython-310.pyc b/guided_diffusion/__pycache__/resample.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..697f0a89cc85bb253e2778da1416b56d99cc9a47
Binary files /dev/null and b/guided_diffusion/__pycache__/resample.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/resample.cpython-39.pyc b/guided_diffusion/__pycache__/resample.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ef0003a5becb8ba823607f9921371639ef7d75a0
Binary files /dev/null and b/guided_diffusion/__pycache__/resample.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/respace.cpython-310.pyc b/guided_diffusion/__pycache__/respace.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..68f08f1c026f0ac8e4e51e2a57f4577e756c94e0
Binary files /dev/null and b/guided_diffusion/__pycache__/respace.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/respace.cpython-39.pyc b/guided_diffusion/__pycache__/respace.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b1405458501a3bc931f541bc7c197de4526728af
Binary files /dev/null and b/guided_diffusion/__pycache__/respace.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/script_util.cpython-310.pyc b/guided_diffusion/__pycache__/script_util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aca621d8c286cb55c2658b6fda73fb52ad660185
Binary files /dev/null and b/guided_diffusion/__pycache__/script_util.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/script_util.cpython-39.pyc b/guided_diffusion/__pycache__/script_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b9c342c32ccfef9b5a760aa532673965cd0cdf92
Binary files /dev/null and b/guided_diffusion/__pycache__/script_util.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/train_util.cpython-39.pyc b/guided_diffusion/__pycache__/train_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2fef7285b554b85574b132862b810fca8d05ec34
Binary files /dev/null and b/guided_diffusion/__pycache__/train_util.cpython-39.pyc differ
diff --git a/guided_diffusion/__pycache__/unet.cpython-310.pyc b/guided_diffusion/__pycache__/unet.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dda60fb4d6a2d312027b9a8b10c3275429cbf428
Binary files /dev/null and b/guided_diffusion/__pycache__/unet.cpython-310.pyc differ
diff --git a/guided_diffusion/__pycache__/unet.cpython-39.pyc b/guided_diffusion/__pycache__/unet.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e28c6be677d4e13937034d7afcadbba9781ba2a3
Binary files /dev/null and b/guided_diffusion/__pycache__/unet.cpython-39.pyc differ
diff --git a/guided_diffusion/continuous_diffusion.py b/guided_diffusion/continuous_diffusion.py
new file mode 100644
index 0000000000000000000000000000000000000000..775733d8f9bb47c909368c17ace82d415c98ac39
--- /dev/null
+++ b/guided_diffusion/continuous_diffusion.py
@@ -0,0 +1,795 @@
+# ---------------------------------------------------------------
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# This work is licensed under the NVIDIA Source Code License
+# for LSGM. To view a copy of this license, see the LICENSE file.
+# ---------------------------------------------------------------
+
+from pdb import set_trace as st
+from abc import ABC, abstractmethod
+import numpy as np
+import torch
+import gc
+from .continuous_distributions import log_p_standard_normal, log_p_var_normal
+from .continuous_diffusion_utils import trace_df_dx_hutchinson, sample_gaussian_like, sample_rademacher_like, get_mixed_prediction
+from torchdiffeq import odeint
+from torch.cuda.amp import autocast
+from timeit import default_timer as timer
+
+from guided_diffusion import dist_util, logger
+
+
+def make_diffusion(args):
+ """ simple diffusion factory function to return diffusion instances. Only use this to create continuous diffusions """
+ if args.sde_sde_type == 'geometric_sde':
+ return DiffusionGeometric(args)
+ elif args.sde_sde_type == 'vpsde':
+ return DiffusionVPSDE(args)
+ elif args.sde_sde_type == 'sub_vpsde':
+ return DiffusionSubVPSDE(args)
+ elif args.sde_sde_type == 'vesde':
+ return DiffusionVESDE(args)
+ else:
+ raise ValueError("Unrecognized sde type: {}".format(args.sde_sde_type))
+
+
+class DiffusionBase(ABC):
+ """
+ Abstract base class for all diffusion implementations.
+ """
+
+ def __init__(self, args):
+ super().__init__()
+ self.args = args
+ self.sigma2_0 = args.sde_sigma2_0
+ self.sde_type = args.sde_sde_type
+
+ @abstractmethod
+ def f(self, t):
+ """ returns the drift coefficient at time t: f(t) """
+ pass
+
+ @abstractmethod
+ def g2(self, t):
+ """ returns the squared diffusion coefficient at time t: g^2(t) """
+ pass
+
+ @abstractmethod
+ def var(self, t):
+ """ returns variance at time t, \sigma_t^2"""
+ pass
+
+ @abstractmethod
+ def e2int_f(self, t):
+ """ returns e^{\int_0^t f(s) ds} which corresponds to the coefficient of mean at time t. """
+ pass
+
+ @abstractmethod
+ def inv_var(self, var):
+ """ inverse of the variance function at input variance var. """
+ pass
+
+ @abstractmethod
+ def mixing_component(self, x_noisy, var_t, t, enabled):
+ """ returns mixing component which is the optimal denoising model assuming that q(z_0) is N(0, 1) """
+ pass
+
+ def sample_q(self, x_init, noise, var_t, m_t):
+ """ returns a sample from diffusion process at time t """
+ return m_t * x_init + torch.sqrt(var_t) * noise
+
+ def log_snr(self, m_t, var_t):
+ return torch.log((torch.square(m_t) / var_t))
+
+ def _predict_x0_from_eps(self, z, eps, logsnr):
+ """eps = (z - alpha * x0) / sigma
+ """
+ return torch.sqrt(1 + torch.exp(-logsnr)) * (
+ z - eps * torch.rsqrt(1 + torch.exp(logsnr)))
+
+ def _predict_eps_from_x0(self, z, x0, logsnr):
+ """x = (z - sigma * eps) / alpha
+ """
+ return torch.sqrt(1 + torch.exp(logsnr)) * (
+ z - x0 * torch.rsqrt(1 + torch.exp(-logsnr)))
+
+ def _predict_eps_from_z_and_v(self, v_t, var_t, z, m_t):
+ # TODO, use logsnr here?
+ return torch.sqrt(var_t) * z + m_t * v_t
+
+ def _predict_x0_from_z_and_v(self, v_t, var_t, z, m_t):
+ return torch.sqrt(var_t) * v_t + m_t * z
+
+ def cross_entropy_const(self, ode_eps):
+ """ returns cross entropy factor with variance according to ode integration cutoff ode_eps """
+ # _, c, h, w = x_init.shape
+ return 0.5 * (1.0 + torch.log(2.0 * np.pi * self.var(
+ t=torch.tensor(ode_eps, device=dist_util.dev()))))
+
+ def compute_ode_nll(self, dae, eps, ode_eps, ode_solver_tol,
+ enable_autocast, no_autograd, num_samples, report_std):
+ """ calculates NLL based on ODE framework, assuming integration cutoff ode_eps """
+ # ODE solver starts consuming the CPU memory without this on large models
+ # https://github.com/scipy/scipy/issues/10070
+ gc.collect()
+
+ dae.eval()
+
+ def ode_func(t, state):
+ """ the ode function (including log probability integration for NLL calculation) """
+ global nfe_counter
+ nfe_counter = nfe_counter + 1
+
+ x = state[0].detach()
+ x.requires_grad_(True)
+ noise = sample_gaussian_like(
+ x) # could also use rademacher noise (sample_rademacher_like)
+ with torch.set_grad_enabled(True):
+ with autocast(enabled=enable_autocast):
+ variance = self.var(t=t)
+ mixing_component = self.mixing_component(
+ x_noisy=x,
+ var_t=variance,
+ t=t,
+ enabled=dae.mixed_prediction)
+ pred_params = dae(x=x, t=t)
+ params = get_mixed_prediction(dae.mixed_prediction,
+ pred_params,
+ dae.mixing_logit,
+ mixing_component)
+ dx_dt = self.f(t=t) * x + 0.5 * self.g2(
+ t=t) * params / torch.sqrt(variance)
+
+ with autocast(enabled=False):
+ dlogp_x_dt = -trace_df_dx_hutchinson(
+ dx_dt, x, noise, no_autograd).view(x.shape[0], 1)
+
+ return (dx_dt, dlogp_x_dt)
+
+ # NFE counter
+ global nfe_counter
+
+ nll_all, nfe_all = [], []
+ for i in range(num_samples):
+ # integrated log probability
+ logp_diff_t0 = torch.zeros(eps.shape[0], 1, device=dist_util.dev())
+
+ nfe_counter = 0
+
+ # solve the ODE
+ x_t, logp_diff_t = odeint(
+ ode_func,
+ (eps, logp_diff_t0),
+ torch.tensor([ode_eps, 1.0], device=dist_util.dev()),
+ atol=ode_solver_tol,
+ rtol=ode_solver_tol,
+ method="scipy_solver",
+ options={"solver": 'RK45'},
+ )
+ # last output values
+ x_t0, logp_diff_t0 = x_t[-1], logp_diff_t[-1]
+
+ # prior
+ if self.sde_type == 'vesde':
+ logp_prior = torch.sum(log_p_var_normal(x_t0,
+ var=self.sigma2_max),
+ dim=[1, 2, 3])
+ else:
+ logp_prior = torch.sum(log_p_standard_normal(x_t0),
+ dim=[1, 2, 3])
+
+ log_likelihood = logp_prior - logp_diff_t0.view(-1)
+
+ nll_all.append(-log_likelihood)
+ nfe_all.append(nfe_counter)
+
+ nfe_mean = np.mean(nfe_all)
+ nll_all = torch.stack(nll_all, dim=1)
+ nll_mean = torch.mean(nll_all, dim=1)
+ if num_samples > 1 and report_std:
+ nll_stddev = torch.std(nll_all, dim=1)
+ nll_stddev_batch = torch.mean(nll_stddev)
+ nll_stderror_batch = nll_stddev_batch / np.sqrt(num_samples)
+ else:
+ nll_stddev_batch = None
+ nll_stderror_batch = None
+ return nll_mean, nfe_mean, nll_stddev_batch, nll_stderror_batch
+
+ def sample_model_ode(self,
+ dae,
+ num_samples,
+ shape,
+ ode_eps,
+ ode_solver_tol,
+ enable_autocast,
+ temp,
+ noise=None):
+ """ generates samples using the ODE framework, assuming integration cutoff ode_eps """
+ # ODE solver starts consuming the CPU memory without this on large models
+ # https://github.com/scipy/scipy/issues/10070
+ gc.collect()
+
+ dae.eval()
+
+ def ode_func(t, x):
+ """ the ode function (sampling only, no NLL stuff) """
+ global nfe_counter
+ nfe_counter = nfe_counter + 1
+ with autocast(enabled=enable_autocast):
+ variance = self.var(t=t)
+ mixing_component = self.mixing_component(
+ x_noisy=x,
+ var_t=variance,
+ t=t,
+ enabled=dae.mixed_prediction)
+ pred_params = dae(x=x, t=t)
+ params = get_mixed_prediction(dae.mixed_prediction,
+ pred_params, dae.mixing_logit,
+ mixing_component)
+ dx_dt = self.f(t=t) * x + 0.5 * self.g2(
+ t=t) * params / torch.sqrt(variance)
+
+ return dx_dt
+
+ # the initial noise
+ if noise is None:
+ noise = torch.randn(size=[num_samples] + shape,
+ device=dist_util.dev())
+
+ if self.sde_type == 'vesde':
+ noise_init = temp * noise * np.sqrt(self.sigma2_max)
+ else:
+ noise_init = temp * noise
+
+ # NFE counter
+ global nfe_counter
+ nfe_counter = 0
+
+ # solve the ODE
+ start = timer()
+ samples_out = odeint(
+ ode_func,
+ noise_init,
+ torch.tensor([1.0, ode_eps], device=dist_util.dev()),
+ atol=ode_solver_tol,
+ rtol=ode_solver_tol,
+ method="scipy_solver",
+ options={"solver": 'RK45'},
+ )
+ end = timer()
+ ode_solve_time = end - start
+
+ return samples_out[-1], nfe_counter, ode_solve_time
+
+ # def iw_quantities(self, size, time_eps, iw_sample_mode, iw_subvp_like_vp_sde):
+ def iw_quantities(self, iw_sample_mode, size=None):
+
+ args = self.args
+ time_eps, iw_subvp_like_vp_sde = args.sde_time_eps, args.iw_subvp_like_vp_sde
+ if size is None:
+ size = args.batch_size
+
+ if self.sde_type in ['geometric_sde', 'vpsde']:
+ return self._iw_quantities_vpsdelike(size, time_eps,
+ iw_sample_mode)
+ elif self.sde_type in ['sub_vpsde']:
+ return self._iw_quantities_subvpsdelike(size, time_eps,
+ iw_sample_mode,
+ iw_subvp_like_vp_sde)
+ elif self.sde_type in ['vesde']:
+ return self._iw_quantities_vesde(size, time_eps, iw_sample_mode)
+ else:
+ raise NotImplementedError
+
+ def _iw_quantities_vpsdelike(self, size, time_eps, iw_sample_mode):
+ """
+ For all SDEs where the underlying SDE is of the form dz = -0.5 * beta(t) * z * dt + sqrt{beta(t)} * dw, like
+ for the VPSDE.
+ """
+ rho = torch.rand(size=[size], device=dist_util.dev())
+
+ # In the following, obj_weight_t corresponds to the weight in front of the l2 loss for the given iw_sample_mode.
+ # obj_weight_t_ll corresponds to the weight that converts the weighting scheme in iw_sample_mode to likelihood
+ # weighting.
+
+ if iw_sample_mode == 'll_uniform':
+ # uniform t sampling - likelihood obj. for both q and p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'll_iw': # ! q-obj
+ # importance sampling for likelihood obj. - likelihood obj. for both q and p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ sigma2_1, sigma2_eps = self.var(ones), self.var(time_eps * ones)
+ log_sigma2_1, log_sigma2_eps = torch.log(sigma2_1), torch.log(
+ sigma2_eps)
+ var_t = torch.exp(rho * log_sigma2_1 +
+ (1 - rho) * log_sigma2_eps) # sigma square
+ t = self.inv_var(var_t)
+ m_t, g2_t = self.e2int_f(t), self.g2(t) # m_t is alpha_bar
+ obj_weight_t = obj_weight_t_ll = 0.5 * (
+ log_sigma2_1 - log_sigma2_eps) / (1.0 - var_t)
+
+ elif iw_sample_mode == 'drop_all_uniform':
+ # uniform t sampling - likelihood obj. for q, all-prefactors-dropped obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = torch.ones(1, device=dist_util.dev())
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'drop_all_iw':
+ # importance sampling for all-pref.-dropped obj. - likelihood obj. for q, all-pref.-dropped obj. for p
+ assert self.sde_type == 'vpsde', 'Importance sampling for fully unweighted objective is currently only ' \
+ 'implemented for the regular VPSDE.'
+ t = torch.sqrt(1.0 / self.delta_beta_half) * torch.erfinv(
+ rho * self.const_norm_2 + self.const_erf) - self.beta_frac
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = self.const_norm / (1.0 - var_t)
+ obj_weight_t_ll = obj_weight_t * g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'drop_sigma2t_iw': # ! default mode for p
+ # importance sampling for inv_sigma2_t-dropped obj. - likelihood obj. for q, inv_sigma2_t-dropped obj. for p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ sigma2_1, sigma2_eps = self.var(ones), self.var(time_eps * ones)
+ var_t = rho * sigma2_1 + (1 - rho) * sigma2_eps # ! sigma square
+ t = self.inv_var(var_t)
+ m_t, g2_t = self.e2int_f(t), self.g2(t) # ! m_t: alpha_bar sqrt
+ obj_weight_t = 0.5 * (sigma2_1 - sigma2_eps) / (1.0 - var_t)
+ obj_weight_t_ll = obj_weight_t / var_t
+
+ elif iw_sample_mode == 'drop_sigma2t_uniform':
+ # uniform sampling for inv_sigma2_t-dropped obj. - likelihood obj. for q, inv_sigma2_t-dropped obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = g2_t / 2.0
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'rescale_iw':
+ # importance sampling for 1/(1-sigma2_t) resc. obj. - likelihood obj. for q, 1/(1-sigma2_t) resc. obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = 0.5 / (1.0 - var_t)
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ else:
+ raise ValueError(
+ "Unrecognized importance sampling type: {}".format(
+ iw_sample_mode))
+
+ return t, var_t.view(-1, 1, 1, 1), m_t.view(-1, 1, 1, 1), obj_weight_t.view(-1, 1, 1, 1), \
+ obj_weight_t_ll.view(-1, 1, 1, 1), g2_t.view(-1, 1, 1, 1)
+
+ def _iw_quantities_subvpsdelike(self, size, time_eps, iw_sample_mode,
+ iw_subvp_like_vp_sde):
+ """
+ For all SDEs where the underlying SDE is of the form
+ dz = -0.5 * beta(t) * z * dt + sqrt{beta(t) * (1 - exp[-2 * betaintegral])} * dw, like for the Sub-VPSDE.
+ When iw_subvp_like_vp_sde is True, then we define the importance sampling distributions based on an analogous
+ VPSDE, while stile using the Sub-VPSDE. The motivation is that deriving the correct importance sampling
+ distributions for the Sub-VPSDE itself is hard, but the importance sampling distributions from analogous VPSDEs
+ probably already significantly reduce the variance also for the Sub-VPSDE.
+ """
+ rho = torch.rand(size=[size], device=dist_util.dev())
+
+ # In the following, obj_weight_t corresponds to the weight in front of the l2 loss for the given iw_sample_mode.
+ # obj_weight_t_ll corresponds to the weight that converts the weighting scheme in iw_sample_mode to likelihood
+ # weighting.
+ if iw_sample_mode == 'll_uniform':
+ # uniform t sampling - likelihood obj. for both q and p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'll_iw':
+ if iw_subvp_like_vp_sde:
+ # importance sampling for vpsde likelihood obj. - sub-vpsde likelihood obj. for both q and p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ sigma2_1, sigma2_eps = self.var_vpsde(ones), self.var_vpsde(
+ time_eps * ones)
+ log_sigma2_1, log_sigma2_eps = torch.log(sigma2_1), torch.log(
+ sigma2_eps)
+ var_t_vpsde = torch.exp(rho * log_sigma2_1 +
+ (1 - rho) * log_sigma2_eps)
+ t = self.inv_var_vpsde(var_t_vpsde)
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = obj_weight_t_ll = g2_t / (2.0 * var_t) * \
+ (log_sigma2_1 - log_sigma2_eps) * var_t_vpsde / (1 - var_t_vpsde) / self.beta(t)
+ else:
+ raise NotImplementedError
+
+ elif iw_sample_mode == 'drop_all_uniform':
+ # uniform t sampling - likelihood obj. for q, all-prefactors-dropped obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = torch.ones(1, device=dist_util.dev())
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'drop_all_iw':
+ if iw_subvp_like_vp_sde:
+ # importance sampling for all-pref.-dropped obj. - likelihood obj. for q, all-pref.-dropped obj. for p
+ assert self.sde_type == 'sub_vpsde', 'Importance sampling for fully unweighted objective is ' \
+ 'currently only implemented for the Sub-VPSDE.'
+ t = torch.sqrt(1.0 / self.delta_beta_half) * torch.erfinv(
+ rho * self.const_norm_2 + self.const_erf) - self.beta_frac
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = self.const_norm / (1.0 - self.var_vpsde(t))
+ obj_weight_t_ll = obj_weight_t * g2_t / (2.0 * var_t)
+ else:
+ raise NotImplementedError
+
+ elif iw_sample_mode == 'drop_sigma2t_iw':
+ if iw_subvp_like_vp_sde:
+ # importance sampling for inv_sigma2_t-dropped obj. - likelihood obj. for q, inv_sigma2_t-dropped obj. for p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ sigma2_1, sigma2_eps = self.var_vpsde(ones), self.var_vpsde(
+ time_eps * ones)
+ var_t_vpsde = rho * sigma2_1 + (1 - rho) * sigma2_eps
+ t = self.inv_var_vpsde(var_t_vpsde)
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = 0.5 * g2_t / self.beta(t) * (
+ sigma2_1 - sigma2_eps) / (1.0 - var_t_vpsde)
+ obj_weight_t_ll = obj_weight_t / var_t
+ else:
+ raise NotImplementedError
+
+ elif iw_sample_mode == 'drop_sigma2t_uniform':
+ # uniform sampling for inv_sigma2_t-dropped obj. - likelihood obj. for q, inv_sigma2_t-dropped obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = g2_t / 2.0
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'rescale_iw':
+ # importance sampling for 1/(1-sigma2_t) resc. obj. - likelihood obj. for q, 1/(1-sigma2_t) resc. obj. for p
+ # Note that we use the sub-vpsde variance to scale the p objective! It's not clear what's optimal here!
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = 0.5 / (1.0 - var_t)
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ else:
+ raise ValueError(
+ "Unrecognized importance sampling type: {}".format(
+ iw_sample_mode))
+
+ return t, var_t.view(-1, 1, 1, 1), m_t.view(-1, 1, 1, 1), obj_weight_t.view(-1, 1, 1, 1), \
+ obj_weight_t_ll.view(-1, 1, 1, 1), g2_t.view(-1, 1, 1, 1)
+
+ def _iw_quantities_vesde(self, size, time_eps, iw_sample_mode):
+ """
+ For the VESDE.
+ """
+ rho = torch.rand(size=[size], device=dist_util.dev())
+
+ # In the following, obj_weight_t corresponds to the weight in front of the l2 loss for the given iw_sample_mode.
+ # obj_weight_t_ll corresponds to the weight that converts the weighting scheme in iw_sample_mode to likelihood
+ # weighting.
+ if iw_sample_mode == 'll_uniform':
+ # uniform t sampling - likelihood obj. for both q and p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'll_iw':
+ # importance sampling for likelihood obj. - likelihood obj. for both q and p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ nsigma2_1, nsigma2_eps, sigma2_eps = self.var_N(ones), self.var_N(
+ time_eps * ones), self.var(time_eps * ones)
+ log_frac_sigma2_1, log_frac_sigma2_eps = torch.log(
+ self.sigma2_max / nsigma2_1), torch.log(nsigma2_eps /
+ sigma2_eps)
+ var_N_t = (1.0 - self.sigma2_min) / (
+ 1.0 - torch.exp(rho *
+ (log_frac_sigma2_1 + log_frac_sigma2_eps) -
+ log_frac_sigma2_eps))
+ t = self.inv_var_N(var_N_t)
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = obj_weight_t_ll = 0.5 * (
+ log_frac_sigma2_1 +
+ log_frac_sigma2_eps) * self.var_N(t) / (1.0 - self.sigma2_min)
+
+ elif iw_sample_mode == 'drop_all_uniform':
+ # uniform t sampling - likelihood obj. for q, all-prefactors-dropped obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = torch.ones(1, device=dist_util.dev())
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'drop_all_iw':
+ # importance sampling for all-pref.-dropped obj. - likelihood obj. for q, all-pref.-dropped obj. for p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ nsigma2_1, nsigma2_eps, sigma2_eps = self.var_N(ones), self.var_N(
+ time_eps * ones), self.var(time_eps * ones)
+ log_frac_sigma2_1, log_frac_sigma2_eps = torch.log(
+ self.sigma2_max / nsigma2_1), torch.log(nsigma2_eps /
+ sigma2_eps)
+ var_N_t = (1.0 - self.sigma2_min) / (
+ 1.0 - torch.exp(rho *
+ (log_frac_sigma2_1 + log_frac_sigma2_eps) -
+ log_frac_sigma2_eps))
+ t = self.inv_var_N(var_N_t)
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t_ll = 0.5 * (log_frac_sigma2_1 +
+ log_frac_sigma2_eps) * self.var_N(t) / (
+ 1.0 - self.sigma2_min)
+ obj_weight_t = 2.0 * obj_weight_t_ll / np.log(
+ self.sigma2_max / self.sigma2_min)
+
+ elif iw_sample_mode == 'drop_sigma2t_iw':
+ # importance sampling for inv_sigma2_t-dropped obj. - likelihood obj. for q, inv_sigma2_t-dropped obj. for p
+ ones = torch.ones_like(rho, device=dist_util.dev())
+ nsigma2_1, nsigma2_eps = self.var_N(ones), self.var_N(time_eps *
+ ones)
+ var_N_t = torch.exp(rho * torch.log(nsigma2_1) +
+ (1 - rho) * torch.log(nsigma2_eps))
+ t = self.inv_var_N(var_N_t)
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = 0.5 * torch.log(
+ nsigma2_1 / nsigma2_eps) * self.var_N(t)
+ obj_weight_t_ll = obj_weight_t / var_t
+
+ elif iw_sample_mode == 'drop_sigma2t_uniform':
+ # uniform sampling for inv_sigma2_t-dropped obj. - likelihood obj. for q, inv_sigma2_t-dropped obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = g2_t / 2.0
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ elif iw_sample_mode == 'rescale_iw':
+ # uniform sampling for 1/(1-sigma2_t) resc. obj. - likelihood obj. for q, 1/(1-sigma2_t) resc. obj. for p
+ t = rho * (1. - time_eps) + time_eps
+ var_t, m_t, g2_t = self.var(t), self.e2int_f(t), self.g2(t)
+ obj_weight_t = 0.5 / (1.0 - var_t)
+ obj_weight_t_ll = g2_t / (2.0 * var_t)
+
+ else:
+ raise ValueError(
+ "Unrecognized importance sampling type: {}".format(
+ iw_sample_mode))
+
+ return t, var_t.view(-1, 1, 1, 1), m_t.view(-1, 1, 1, 1), obj_weight_t.view(-1, 1, 1, 1), \
+ obj_weight_t_ll.view(-1, 1, 1, 1), g2_t.view(-1, 1, 1, 1)
+
+
+class DiffusionGeometric(DiffusionBase):
+ """
+ Diffusion implementation with dz = -0.5 * beta(t) * z * dt + sqrt(beta(t)) * dW SDE and geometric progression of
+ variance. This is our new diffusion.
+ """
+
+ def __init__(self, args):
+ super().__init__(args)
+ self.sigma2_min = args.sde_sigma2_min
+ self.sigma2_max = args.sde_sigma2_max
+
+ def f(self, t):
+ return -0.5 * self.g2(t)
+
+ def g2(self, t):
+ sigma2_geom = self.sigma2_min * (
+ (self.sigma2_max / self.sigma2_min)**t)
+ log_term = np.log(self.sigma2_max / self.sigma2_min)
+ return sigma2_geom * log_term / (1.0 - self.sigma2_0 +
+ self.sigma2_min - sigma2_geom)
+
+ def var(self, t):
+ return self.sigma2_min * ((self.sigma2_max / self.sigma2_min)**
+ t) - self.sigma2_min + self.sigma2_0
+
+ def e2int_f(self, t):
+ return torch.sqrt(1.0 + self.sigma2_min *
+ (1.0 - (self.sigma2_max / self.sigma2_min)**t) /
+ (1.0 - self.sigma2_0))
+
+ def inv_var(self, var):
+ return torch.log(
+ (var + self.sigma2_min - self.sigma2_0) /
+ self.sigma2_min) / np.log(self.sigma2_max / self.sigma2_min)
+
+ def mixing_component(self, x_noisy, var_t, t, enabled):
+ if enabled:
+ return torch.sqrt(var_t) * x_noisy
+ else:
+ return None
+
+
+class DiffusionVPSDE(DiffusionBase):
+ """
+ Diffusion implementation of the VPSDE. This uses the same SDE like DiffusionGeometric but with linear beta(t).
+ Note that we need to scale beta_start and beta_end by 1000 relative to JH's DDPM values, since our t is in [0,1].
+ """
+
+ def __init__(self, args):
+ super().__init__(args)
+ # self.beta_start = args.sde_beta_start # 0.1
+ # self.beta_end = args.sde_beta_end # 20
+
+ # ! hard coded, in the scale of 1000.
+ # beta_start = scale * 0.0001
+ # beta_end = scale * 0.02
+
+ self.beta_start = 0.1
+ self.beta_end = 20
+
+ # auxiliary constants
+ self.time_eps = args.sde_time_eps # 0.01 by default in LSGM. Any influence?
+ self.delta_beta_half = torch.tensor(0.5 *
+ (self.beta_end - self.beta_start),
+ device=dist_util.dev())
+ self.beta_frac = torch.tensor(self.beta_start /
+ (self.beta_end - self.beta_start),
+ device=dist_util.dev())
+ self.const_aq = (1.0 - self.sigma2_0) * torch.exp(
+ 0.5 * self.beta_frac) * torch.sqrt(
+ 0.25 * np.pi / self.delta_beta_half)
+ self.const_erf = torch.erf(
+ torch.sqrt(self.delta_beta_half) *
+ (self.time_eps + self.beta_frac))
+ self.const_norm = self.const_aq * (torch.erf(
+ torch.sqrt(self.delta_beta_half) *
+ (1.0 + self.beta_frac)) - self.const_erf)
+ self.const_norm_2 = torch.erf(
+ torch.sqrt(self.delta_beta_half) *
+ (1.0 + self.beta_frac)) - self.const_erf
+
+ def f(self, t):
+ return -0.5 * self.g2(t)
+
+ def g2(self, t):
+ return self.beta_start + (self.beta_end - self.beta_start) * t
+
+ def var(self, t):
+ return 1.0 - (1.0 - self.sigma2_0
+ ) * torch.exp(-self.beta_start * t - 0.5 *
+ (self.beta_end - self.beta_start) * t * t)
+
+ def e2int_f(self, t):
+ return torch.exp(-0.5 * self.beta_start * t - 0.25 *
+ (self.beta_end - self.beta_start) * t * t)
+
+ def inv_var(self, var):
+ c = torch.log((1 - var) / (1 - self.sigma2_0))
+ a = self.beta_end - self.beta_start
+ t = (-self.beta_start +
+ torch.sqrt(np.square(self.beta_start) - 2 * a * c)) / a
+ return t
+
+ def mixing_component(self, x_noisy, var_t, t, enabled):
+ if enabled:
+ return torch.sqrt(var_t) * x_noisy
+ else:
+ return None
+
+ def mixing_component_x0(self, x_noisy, var_t, t, enabled):
+ if enabled:
+ # return torch.sqrt(var_t) * x_noisy
+ return torch.sqrt(1-var_t) * x_noisy # zt * alpha_t
+ else:
+ return None
+
+
+class DiffusionSubVPSDE(DiffusionBase):
+ """
+ Diffusion implementation of the sub-VPSDE. Note that this uses a different SDE compared to the above two diffusions.
+ """
+
+ def __init__(self, args):
+ super().__init__(args)
+ self.beta_start = args.sde_beta_start
+ self.beta_end = args.sde_beta_end
+
+ # auxiliary constants (assumes regular VPSDE)
+ self.time_eps = args.sde_time_eps
+ self.delta_beta_half = torch.tensor(0.5 *
+ (self.beta_end - self.beta_start),
+ device=dist_util.dev())
+ self.beta_frac = torch.tensor(self.beta_start /
+ (self.beta_end - self.beta_start),
+ device=dist_util.dev())
+ self.const_aq = (1.0 - self.sigma2_0) * torch.exp(
+ 0.5 * self.beta_frac) * torch.sqrt(
+ 0.25 * np.pi / self.delta_beta_half)
+ self.const_erf = torch.erf(
+ torch.sqrt(self.delta_beta_half) *
+ (self.time_eps + self.beta_frac))
+ self.const_norm = self.const_aq * (torch.erf(
+ torch.sqrt(self.delta_beta_half) *
+ (1.0 + self.beta_frac)) - self.const_erf)
+ self.const_norm_2 = torch.erf(
+ torch.sqrt(self.delta_beta_half) *
+ (1.0 + self.beta_frac)) - self.const_erf
+
+ def f(self, t):
+ return -0.5 * self.beta(t)
+
+ def g2(self, t):
+ return self.beta(t) * (
+ 1.0 - torch.exp(-2.0 * self.beta_start * t -
+ (self.beta_end - self.beta_start) * t * t))
+
+ def var(self, t):
+ int_term = torch.exp(-self.beta_start * t - 0.5 *
+ (self.beta_end - self.beta_start) * t * t)
+ return torch.square(1.0 - int_term) + self.sigma2_0 * int_term
+
+ def e2int_f(self, t):
+ return torch.exp(-0.5 * self.beta_start * t - 0.25 *
+ (self.beta_end - self.beta_start) * t * t)
+
+ def beta(self, t):
+ """ auxiliary beta function """
+ return self.beta_start + (self.beta_end - self.beta_start) * t
+
+ def inv_var(self, var):
+ raise NotImplementedError
+
+ def mixing_component(self, x_noisy, var_t, t, enabled):
+ if enabled:
+ int_term = torch.exp(-self.beta_start * t - 0.5 *
+ (self.beta_end - self.beta_start) * t *
+ t).view(-1, 1, 1, 1)
+ return torch.sqrt(var_t) * x_noisy / (
+ torch.square(1.0 - int_term) + int_term)
+ else:
+ return None
+
+ def var_vpsde(self, t):
+ return 1.0 - (1.0 - self.sigma2_0
+ ) * torch.exp(-self.beta_start * t - 0.5 *
+ (self.beta_end - self.beta_start) * t * t)
+
+ def inv_var_vpsde(self, var):
+ c = torch.log((1 - var) / (1 - self.sigma2_0))
+ a = self.beta_end - self.beta_start
+ t = (-self.beta_start +
+ torch.sqrt(np.square(self.beta_start) - 2 * a * c)) / a
+ return t
+
+
+class DiffusionVESDE(DiffusionBase):
+ """
+ Diffusion implementation of the VESDE with dz = sqrt(beta(t)) * dW
+ """
+
+ def __init__(self, args):
+ super().__init__(args)
+ self.sigma2_min = args.sde_sigma2_min
+ self.sigma2_max = args.sde_sigma2_max
+ assert self.sigma2_min == self.sigma2_0, "VESDE was proposed implicitly assuming sigma2_min = sigma2_0!"
+
+ def f(self, t):
+ return torch.zeros_like(t, device=dist_util.dev())
+
+ def g2(self, t):
+ return self.sigma2_min * np.log(self.sigma2_max / self.sigma2_min) * (
+ (self.sigma2_max / self.sigma2_min)**t)
+
+ def var(self, t):
+ return self.sigma2_min * ((self.sigma2_max / self.sigma2_min)**
+ t) - self.sigma2_min + self.sigma2_0
+
+ def e2int_f(self, t):
+ return torch.ones_like(t, device=dist_util.dev())
+
+ def inv_var(self, var):
+ return torch.log(
+ (var + self.sigma2_min - self.sigma2_0) /
+ self.sigma2_min) / np.log(self.sigma2_max / self.sigma2_min)
+
+ def mixing_component(self, x_noisy, var_t, t, enabled):
+ if enabled:
+ return torch.sqrt(var_t) * x_noisy / (self.sigma2_min * (
+ (self.sigma2_max / self.sigma2_min)**t.view(-1, 1, 1, 1)) -
+ self.sigma2_min + 1.0)
+ else:
+ return None
+
+ def var_N(self, t):
+ return 1.0 - self.sigma2_min + self.sigma2_min * (
+ (self.sigma2_max / self.sigma2_min)**t)
+
+ def inv_var_N(self, var):
+ return torch.log(
+ (var + self.sigma2_min - 1.0) / self.sigma2_min) / np.log(
+ self.sigma2_max / self.sigma2_min)
diff --git a/guided_diffusion/continuous_diffusion_utils.py b/guided_diffusion/continuous_diffusion_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..d70c43111c37cd219e545d14b6bfb5b2f95ca30f
--- /dev/null
+++ b/guided_diffusion/continuous_diffusion_utils.py
@@ -0,0 +1,815 @@
+# ---------------------------------------------------------------
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# This work is licensed under the NVIDIA Source Code License
+# for LSGM. To view a copy of this license, see the LICENSE file.
+# ---------------------------------------------------------------
+
+import logging
+import os
+import math
+import shutil
+import time
+import sys
+import types
+
+import torch
+import torch.nn as nn
+import numpy as np
+import torch.distributed as dist
+# from util.distributions import PixelNormal
+from torch.cuda.amp import autocast
+
+# from tensorboardX import SummaryWriter
+
+
+class AvgrageMeter(object):
+
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.avg = 0
+ self.sum = 0
+ self.cnt = 0
+
+ def update(self, val, n=1):
+ self.sum += val * n
+ self.cnt += n
+ self.avg = self.sum / self.cnt
+
+
+class ExpMovingAvgrageMeter(object):
+
+ def __init__(self, momentum=0.9):
+ self.momentum = momentum
+ self.reset()
+
+ def reset(self):
+ self.avg = 0
+
+ def update(self, val):
+ self.avg = (1. - self.momentum) * self.avg + self.momentum * val
+
+
+class DummyDDP(nn.Module):
+ def __init__(self, model):
+ super(DummyDDP, self).__init__()
+ self.module = model
+
+ def forward(self, *input, **kwargs):
+ return self.module(*input, **kwargs)
+
+
+def count_parameters_in_M(model):
+ return np.sum(np.prod(v.size()) for name, v in model.named_parameters() if "auxiliary" not in name) / 1e6
+
+
+def save_checkpoint(state, is_best, save):
+ filename = os.path.join(save, 'checkpoint.pth.tar')
+ torch.save(state, filename)
+ if is_best:
+ best_filename = os.path.join(save, 'model_best.pth.tar')
+ shutil.copyfile(filename, best_filename)
+
+
+def save(model, model_path):
+ torch.save(model.state_dict(), model_path)
+
+
+def load(model, model_path):
+ model.load_state_dict(torch.load(model_path))
+
+
+def create_exp_dir(path, scripts_to_save=None):
+ if not os.path.exists(path):
+ os.makedirs(path, exist_ok=True)
+ print('Experiment dir : {}'.format(path))
+
+ if scripts_to_save is not None:
+ if not os.path.exists(os.path.join(path, 'scripts')):
+ os.mkdir(os.path.join(path, 'scripts'))
+ for script in scripts_to_save:
+ dst_file = os.path.join(path, 'scripts', os.path.basename(script))
+ shutil.copyfile(script, dst_file)
+
+
+class Logger(object):
+ def __init__(self, rank, save):
+ # other libraries may set logging before arriving at this line.
+ # by reloading logging, we can get rid of previous configs set by other libraries.
+ from importlib import reload
+ reload(logging)
+ self.rank = rank
+ if self.rank == 0:
+ log_format = '%(asctime)s %(message)s'
+ logging.basicConfig(stream=sys.stdout, level=logging.INFO,
+ format=log_format, datefmt='%m/%d %I:%M:%S %p')
+ fh = logging.FileHandler(os.path.join(save, 'log.txt'))
+ fh.setFormatter(logging.Formatter(log_format))
+ logging.getLogger().addHandler(fh)
+ self.start_time = time.time()
+
+ def info(self, string, *args):
+ if self.rank == 0:
+ elapsed_time = time.time() - self.start_time
+ elapsed_time = time.strftime(
+ '(Elapsed: %H:%M:%S) ', time.gmtime(elapsed_time))
+ if isinstance(string, str):
+ string = elapsed_time + string
+ else:
+ logging.info(elapsed_time)
+ logging.info(string, *args)
+
+
+class Writer(object):
+ def __init__(self, rank, save):
+ self.rank = rank
+ if self.rank == 0:
+ self.writer = SummaryWriter(log_dir=save, flush_secs=20)
+
+ def add_scalar(self, *args, **kwargs):
+ if self.rank == 0:
+ self.writer.add_scalar(*args, **kwargs)
+
+ def add_figure(self, *args, **kwargs):
+ if self.rank == 0:
+ self.writer.add_figure(*args, **kwargs)
+
+ def add_image(self, *args, **kwargs):
+ if self.rank == 0:
+ self.writer.add_image(*args, **kwargs)
+
+ def add_histogram(self, *args, **kwargs):
+ if self.rank == 0:
+ self.writer.add_histogram(*args, **kwargs)
+
+ def add_histogram_if(self, write, *args, **kwargs):
+ if write and False: # Used for debugging.
+ self.add_histogram(*args, **kwargs)
+
+ def close(self, *args, **kwargs):
+ if self.rank == 0:
+ self.writer.close()
+
+
+def common_init(rank, seed, save_dir):
+ # we use different seeds per gpu. But we sync the weights after model initialization.
+ torch.manual_seed(rank + seed)
+ np.random.seed(rank + seed)
+ torch.cuda.manual_seed(rank + seed)
+ torch.cuda.manual_seed_all(rank + seed)
+ torch.backends.cudnn.benchmark = True
+
+ # prepare logging and tensorboard summary
+ logging = Logger(rank, save_dir)
+ writer = Writer(rank, save_dir)
+
+ return logging, writer
+
+
+def reduce_tensor(tensor, world_size):
+ rt = tensor.clone()
+ dist.all_reduce(rt, op=dist.ReduceOp.SUM)
+ rt /= world_size
+ return rt
+
+
+def get_stride_for_cell_type(cell_type):
+ if cell_type.startswith('normal') or cell_type.startswith('combiner'):
+ stride = 1
+ elif cell_type.startswith('down'):
+ stride = 2
+ elif cell_type.startswith('up'):
+ stride = -1
+ else:
+ raise NotImplementedError(cell_type)
+
+ return stride
+
+
+def get_cout(cin, stride):
+ if stride == 1:
+ cout = cin
+ elif stride == -1:
+ cout = cin // 2
+ elif stride == 2:
+ cout = 2 * cin
+
+ return cout
+
+
+def kl_balancer_coeff(num_scales, groups_per_scale, fun):
+ if fun == 'equal':
+ coeff = torch.cat([torch.ones(groups_per_scale[num_scales - i - 1]) for i in range(num_scales)], dim=0).cuda()
+ elif fun == 'linear':
+ coeff = torch.cat([(2 ** i) * torch.ones(groups_per_scale[num_scales - i - 1]) for i in range(num_scales)],
+ dim=0).cuda()
+ elif fun == 'sqrt':
+ coeff = torch.cat(
+ [np.sqrt(2 ** i) * torch.ones(groups_per_scale[num_scales - i - 1]) for i in range(num_scales)],
+ dim=0).cuda()
+ elif fun == 'square':
+ coeff = torch.cat(
+ [np.square(2 ** i) / groups_per_scale[num_scales - i - 1] * torch.ones(groups_per_scale[num_scales - i - 1])
+ for i in range(num_scales)], dim=0).cuda()
+ else:
+ raise NotImplementedError
+ # convert min to 1.
+ coeff /= torch.min(coeff)
+ return coeff
+
+
+def kl_per_group(kl_all):
+ kl_vals = torch.mean(kl_all, dim=0)
+ kl_coeff_i = torch.abs(kl_all)
+ kl_coeff_i = torch.mean(kl_coeff_i, dim=0, keepdim=True) + 0.01
+
+ return kl_coeff_i, kl_vals
+
+
+def kl_balancer(kl_all, kl_coeff=1.0, kl_balance=False, alpha_i=None):
+ if kl_balance and kl_coeff < 1.0:
+ alpha_i = alpha_i.unsqueeze(0)
+
+ kl_all = torch.stack(kl_all, dim=1)
+ kl_coeff_i, kl_vals = kl_per_group(kl_all)
+ total_kl = torch.sum(kl_coeff_i)
+
+ kl_coeff_i = kl_coeff_i / alpha_i * total_kl
+ kl_coeff_i = kl_coeff_i / torch.mean(kl_coeff_i, dim=1, keepdim=True)
+ kl = torch.sum(kl_all * kl_coeff_i.detach(), dim=1)
+
+ # for reporting
+ kl_coeffs = kl_coeff_i.squeeze(0)
+ else:
+ kl_all = torch.stack(kl_all, dim=1)
+ kl_vals = torch.mean(kl_all, dim=0)
+ # kl = torch.sum(kl_all, dim=1)
+ # kl = torch.mean(kl_all, dim=1)
+ kl = torch.mean(kl_all)
+ kl_coeffs = torch.ones(size=(len(kl_vals),))
+
+ return kl_coeff * kl, kl_coeffs, kl_vals
+
+
+def kl_per_group_vada(all_log_q, all_neg_log_p):
+ assert len(all_log_q) == len(all_neg_log_p)
+
+ kl_all_list = []
+ kl_diag = []
+ for log_q, neg_log_p in zip(all_log_q, all_neg_log_p):
+ # kl_diag.append(torch.mean(torch.sum(neg_log_p + log_q, dim=[2, 3]), dim=0))
+ kl_diag.append(torch.mean(torch.mean(neg_log_p + log_q, dim=[2, 3]), dim=0))
+ # kl_all_list.append(torch.sum(neg_log_p + log_q, dim=[1, 2, 3]))
+ kl_all_list.append(torch.mean(neg_log_p + log_q, dim=[1, 2, 3]))
+
+ # kl_all = torch.stack(kl_all, dim=1) # batch x num_total_groups
+ kl_vals = torch.mean(torch.stack(kl_all_list, dim=1), dim=0) # mean per group
+
+ return kl_all_list, kl_vals, kl_diag
+
+
+def kl_coeff(step, total_step, constant_step, min_kl_coeff, max_kl_coeff):
+ # return max(min(max_kl_coeff * (step - constant_step) / total_step, max_kl_coeff), min_kl_coeff)
+ return max(min(min_kl_coeff + (max_kl_coeff - min_kl_coeff) * (step - constant_step) / total_step, max_kl_coeff), min_kl_coeff)
+
+
+def log_iw(decoder, x, log_q, log_p, crop=False):
+ recon = reconstruction_loss(decoder, x, crop)
+ return - recon - log_q + log_p
+
+
+def reconstruction_loss(decoder, x, crop=False):
+ from util.distributions import DiscMixLogistic
+
+ recon = decoder.log_p(x)
+ if crop:
+ recon = recon[:, :, 2:30, 2:30]
+
+ if isinstance(decoder, DiscMixLogistic):
+ return - torch.sum(recon, dim=[1, 2]) # summation over RGB is done.
+ else:
+ return - torch.sum(recon, dim=[1, 2, 3])
+
+
+def vae_terms(all_log_q, all_eps):
+ from util.distributions import log_p_standard_normal
+
+ # compute kl
+ kl_all = []
+ kl_diag = []
+ log_p, log_q = 0., 0.
+ for log_q_conv, eps in zip(all_log_q, all_eps):
+ log_p_conv = log_p_standard_normal(eps)
+ kl_per_var = log_q_conv - log_p_conv
+ kl_diag.append(torch.mean(torch.sum(kl_per_var, dim=[2, 3]), dim=0))
+ kl_all.append(torch.sum(kl_per_var, dim=[1, 2, 3]))
+ log_q += torch.sum(log_q_conv, dim=[1, 2, 3])
+ log_p += torch.sum(log_p_conv, dim=[1, 2, 3])
+ return log_q, log_p, kl_all, kl_diag
+
+
+def sum_log_q(all_log_q):
+ log_q = 0.
+ for log_q_conv in all_log_q:
+ log_q += torch.sum(log_q_conv, dim=[1, 2, 3])
+
+ return log_q
+
+
+def cross_entropy_normal(all_eps):
+ from util.distributions import log_p_standard_normal
+
+ cross_entropy = 0.
+ neg_log_p_per_group = []
+ for eps in all_eps:
+ neg_log_p_conv = - log_p_standard_normal(eps)
+ neg_log_p = torch.sum(neg_log_p_conv, dim=[1, 2, 3])
+ cross_entropy += neg_log_p
+ neg_log_p_per_group.append(neg_log_p_conv)
+
+ return cross_entropy, neg_log_p_per_group
+
+
+def tile_image(batch_image, n, m=None):
+ if m is None:
+ m = n
+ assert n * m == batch_image.size(0)
+ channels, height, width = batch_image.size(1), batch_image.size(2), batch_image.size(3)
+ batch_image = batch_image.view(n, m, channels, height, width)
+ batch_image = batch_image.permute(2, 0, 3, 1, 4) # n, height, n, width, c
+ batch_image = batch_image.contiguous().view(channels, n * height, m * width)
+ return batch_image
+
+
+def average_gradients_naive(params, is_distributed):
+ """ Gradient averaging. """
+ if is_distributed:
+ size = float(dist.get_world_size())
+ for param in params:
+ if param.requires_grad:
+ param.grad.data /= size
+ dist.all_reduce(param.grad.data, op=dist.ReduceOp.SUM)
+
+
+def average_gradients(params, is_distributed):
+ """ Gradient averaging. """
+ if is_distributed:
+ if isinstance(params, types.GeneratorType):
+ params = [p for p in params]
+
+ size = float(dist.get_world_size())
+ grad_data = []
+ grad_size = []
+ grad_shapes = []
+ # Gather all grad values
+ for param in params:
+ if param.requires_grad:
+ grad_size.append(param.grad.data.numel())
+ grad_shapes.append(list(param.grad.data.shape))
+ grad_data.append(param.grad.data.flatten())
+ grad_data = torch.cat(grad_data).contiguous()
+
+ # All-reduce grad values
+ grad_data /= size
+ dist.all_reduce(grad_data, op=dist.ReduceOp.SUM)
+
+ # Put back the reduce grad values to parameters
+ base = 0
+ for i, param in enumerate(params):
+ if param.requires_grad:
+ param.grad.data = grad_data[base:base + grad_size[i]].view(grad_shapes[i])
+ base += grad_size[i]
+
+
+def average_params(params, is_distributed):
+ """ parameter averaging. """
+ if is_distributed:
+ size = float(dist.get_world_size())
+ for param in params:
+ param.data /= size
+ dist.all_reduce(param.data, op=dist.ReduceOp.SUM)
+
+
+def average_tensor(t, is_distributed):
+ if is_distributed:
+ size = float(dist.get_world_size())
+ dist.all_reduce(t.data, op=dist.ReduceOp.SUM)
+ t.data /= size
+
+
+def broadcast_params(params, is_distributed):
+ if is_distributed:
+ for param in params:
+ dist.broadcast(param.data, src=0)
+
+
+def num_output(dataset):
+ if dataset in {'mnist', 'omniglot'}:
+ return 28 * 28
+ elif dataset == 'cifar10':
+ return 3 * 32 * 32
+ elif dataset.startswith('celeba') or dataset.startswith('imagenet') or dataset.startswith('lsun'):
+ size = int(dataset.split('_')[-1])
+ return 3 * size * size
+ elif dataset == 'ffhq':
+ return 3 * 256 * 256
+ else:
+ raise NotImplementedError
+
+
+def get_input_size(dataset):
+ if dataset in {'mnist', 'omniglot'}:
+ return 32
+ elif dataset == 'cifar10':
+ return 32
+ elif dataset.startswith('celeba') or dataset.startswith('imagenet') or dataset.startswith('lsun'):
+ size = int(dataset.split('_')[-1])
+ return size
+ elif dataset == 'ffhq':
+ return 256
+ else:
+ raise NotImplementedError
+
+
+def get_bpd_coeff(dataset):
+ n = num_output(dataset)
+ return 1. / np.log(2.) / n
+
+
+def get_channel_multiplier(dataset, num_scales):
+ if dataset in {'cifar10', 'omniglot'}:
+ mult = (1, 1, 1)
+ elif dataset in {'celeba_256', 'ffhq', 'lsun_church_256'}:
+ if num_scales == 3:
+ mult = (1, 1, 1) # used for prior at 16
+ elif num_scales == 4:
+ mult = (1, 2, 2, 2) # used for prior at 32
+ elif num_scales == 5:
+ mult = (1, 1, 2, 2, 2) # used for prior at 64
+ elif dataset == 'mnist':
+ mult = (1, 1)
+ else:
+ raise NotImplementedError
+
+ return mult
+
+
+def get_attention_scales(dataset):
+ if dataset in {'cifar10', 'omniglot'}:
+ attn = (True, False, False)
+ elif dataset in {'celeba_256', 'ffhq', 'lsun_church_256'}:
+ # attn = (False, True, False, False) # used for 32
+ attn = (False, False, True, False, False) # used for 64
+ elif dataset == 'mnist':
+ attn = (True, False)
+ else:
+ raise NotImplementedError
+
+ return attn
+
+
+def change_bit_length(x, num_bits):
+ if num_bits != 8:
+ x = torch.floor(x * 255 / 2 ** (8 - num_bits))
+ x /= (2 ** num_bits - 1)
+ return x
+
+
+def view4D(t, size, inplace=True):
+ """
+ Equal to view(-1, 1, 1, 1).expand(size)
+ Designed because of this bug:
+ https://github.com/pytorch/pytorch/pull/48696
+ """
+ if inplace:
+ return t.unsqueeze_(-1).unsqueeze_(-1).unsqueeze_(-1).expand(size)
+ else:
+ return t.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1).expand(size)
+
+
+def get_arch_cells(arch_type, use_se):
+ if arch_type == 'res_mbconv':
+ arch_cells = dict()
+ arch_cells['normal_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_dec'] = {'conv_branch': ['mconv_e6k5g0'], 'se': use_se}
+ arch_cells['up_dec'] = {'conv_branch': ['mconv_e6k5g0'], 'se': use_se}
+ arch_cells['normal_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_post'] = {'conv_branch': ['mconv_e3k5g0'], 'se': use_se}
+ arch_cells['up_post'] = {'conv_branch': ['mconv_e3k5g0'], 'se': use_se}
+ arch_cells['ar_nn'] = ['']
+ elif arch_type == 'res_bnswish':
+ arch_cells = dict()
+ arch_cells['normal_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_dec'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['up_dec'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_post'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['up_post'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['ar_nn'] = ['']
+ elif arch_type == 'res_bnswish2':
+ arch_cells = dict()
+ arch_cells['normal_enc'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['down_enc'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['normal_dec'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['up_dec'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['normal_pre'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['down_pre'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['normal_post'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['up_post'] = {'conv_branch': ['res_bnswish_x2'], 'se': use_se}
+ arch_cells['ar_nn'] = ['']
+ elif arch_type == 'res_mbconv_attn':
+ arch_cells = dict()
+ arch_cells['normal_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish', ], 'se': use_se, 'attn_type': 'attn'}
+ arch_cells['down_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se, 'attn_type': 'attn'}
+ arch_cells['normal_dec'] = {'conv_branch': ['mconv_e6k5g0'], 'se': use_se, 'attn_type': 'attn'}
+ arch_cells['up_dec'] = {'conv_branch': ['mconv_e6k5g0'], 'se': use_se, 'attn_type': 'attn'}
+ arch_cells['normal_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_post'] = {'conv_branch': ['mconv_e3k5g0'], 'se': use_se}
+ arch_cells['up_post'] = {'conv_branch': ['mconv_e3k5g0'], 'se': use_se}
+ arch_cells['ar_nn'] = ['']
+ elif arch_type == 'res_mbconv_attn_half':
+ arch_cells = dict()
+ arch_cells['normal_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_enc'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_dec'] = {'conv_branch': ['mconv_e6k5g0'], 'se': use_se, 'attn_type': 'attn'}
+ arch_cells['up_dec'] = {'conv_branch': ['mconv_e6k5g0'], 'se': use_se, 'attn_type': 'attn'}
+ arch_cells['normal_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['down_pre'] = {'conv_branch': ['res_bnswish', 'res_bnswish'], 'se': use_se}
+ arch_cells['normal_post'] = {'conv_branch': ['mconv_e3k5g0'], 'se': use_se}
+ arch_cells['up_post'] = {'conv_branch': ['mconv_e3k5g0'], 'se': use_se}
+ arch_cells['ar_nn'] = ['']
+ else:
+ raise NotImplementedError
+
+ return arch_cells
+
+
+def groups_per_scale(num_scales, num_groups_per_scale):
+ g = []
+ n = num_groups_per_scale
+ for s in range(num_scales):
+ assert n >= 1
+ g.append(n)
+ return g
+
+
+class PositionalEmbedding(nn.Module):
+ def __init__(self, embedding_dim, scale):
+ super(PositionalEmbedding, self).__init__()
+ self.embedding_dim = embedding_dim
+ self.scale = scale
+
+ def forward(self, timesteps):
+ assert len(timesteps.shape) == 1
+ timesteps = timesteps * self.scale
+ half_dim = self.embedding_dim // 2
+ emb = math.log(10000) / (half_dim - 1)
+ emb = torch.exp(torch.arange(half_dim) * -emb)
+ emb = emb.to(device=timesteps.device)
+ emb = timesteps[:, None] * emb[None, :]
+ emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)
+ return emb
+
+
+class RandomFourierEmbedding(nn.Module):
+ def __init__(self, embedding_dim, scale):
+ super(RandomFourierEmbedding, self).__init__()
+ self.w = nn.Parameter(torch.randn(size=(1, embedding_dim // 2)) * scale, requires_grad=False)
+
+ def forward(self, timesteps):
+ emb = torch.mm(timesteps[:, None], self.w * 2 * 3.14159265359)
+ return torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)
+
+
+def init_temb_fun(embedding_type, embedding_scale, embedding_dim):
+ if embedding_type == 'positional':
+ temb_fun = PositionalEmbedding(embedding_dim, embedding_scale)
+ elif embedding_type == 'fourier':
+ temb_fun = RandomFourierEmbedding(embedding_dim, embedding_scale)
+ else:
+ raise NotImplementedError
+
+ return temb_fun
+
+def get_dae_model(args, num_input_channels):
+ if args.dae_arch == 'ncsnpp':
+ # we need to import NCSNpp after processes are launched on the multi gpu training.
+ from score_sde.ncsnpp import NCSNpp
+ dae = NCSNpp(args, num_input_channels)
+ else:
+ raise NotImplementedError
+
+ return dae
+
+def symmetrize_image_data(images):
+ return 2.0 * images - 1.0
+
+
+def unsymmetrize_image_data(images):
+ return (images + 1.) / 2.
+
+
+def normalize_symmetric(images):
+ """
+ Normalize images by dividing the largest intensity. Used for visualizing the intermediate steps.
+ """
+ b = images.shape[0]
+ m, _ = torch.max(torch.abs(images).view(b, -1), dim=1)
+ images /= (m.view(b, 1, 1, 1) + 1e-3)
+
+ return images
+
+
+@torch.jit.script
+def soft_clamp5(x: torch.Tensor):
+ return x.div(5.).tanh_().mul(5.) # 5. * torch.tanh(x / 5.) <--> soft differentiable clamp between [-5, 5]
+
+@torch.jit.script
+def soft_clamp(x: torch.Tensor, a: torch.Tensor):
+ return x.div(a).tanh_().mul(a)
+
+class SoftClamp5(nn.Module):
+ def __init__(self):
+ super(SoftClamp5, self).__init__()
+
+ def forward(self, x):
+ return soft_clamp5(x)
+
+
+def override_architecture_fields(args, stored_args, logging):
+ # list of architecture parameters used in NVAE:
+ architecture_fields = ['arch_instance', 'num_nf', 'num_latent_scales', 'num_groups_per_scale',
+ 'num_latent_per_group', 'num_channels_enc', 'num_preprocess_blocks',
+ 'num_preprocess_cells', 'num_cell_per_cond_enc', 'num_channels_dec',
+ 'num_postprocess_blocks', 'num_postprocess_cells', 'num_cell_per_cond_dec',
+ 'decoder_dist', 'num_x_bits', 'log_sig_q_scale',
+ 'progressive_input_vae', 'channel_mult']
+
+ # backward compatibility
+ """ We have broken backward compatibility. No need to se these manually
+ if not hasattr(stored_args, 'log_sig_q_scale'):
+ logging.info('*** Setting %s manually ****', 'log_sig_q_scale')
+ setattr(stored_args, 'log_sig_q_scale', 5.)
+
+ if not hasattr(stored_args, 'latent_grad_cutoff'):
+ logging.info('*** Setting %s manually ****', 'latent_grad_cutoff')
+ setattr(stored_args, 'latent_grad_cutoff', 0.)
+
+ if not hasattr(stored_args, 'progressive_input_vae'):
+ logging.info('*** Setting %s manually ****', 'progressive_input_vae')
+ setattr(stored_args, 'progressive_input_vae', 'none')
+
+ if not hasattr(stored_args, 'progressive_output_vae'):
+ logging.info('*** Setting %s manually ****', 'progressive_output_vae')
+ setattr(stored_args, 'progressive_output_vae', 'none')
+ """
+
+ if not hasattr(stored_args, 'num_x_bits'):
+ logging.info('*** Setting %s manually ****', 'num_x_bits')
+ setattr(stored_args, 'num_x_bits', 8)
+
+ if not hasattr(stored_args, 'channel_mult'):
+ logging.info('*** Setting %s manually ****', 'channel_mult')
+ setattr(stored_args, 'channel_mult', [1, 2])
+
+ for f in architecture_fields:
+ if not hasattr(args, f) or getattr(args, f) != getattr(stored_args, f):
+ logging.info('Setting %s from loaded checkpoint', f)
+ setattr(args, f, getattr(stored_args, f))
+
+
+def init_processes(rank, size, fn, args):
+ """ Initialize the distributed environment. """
+ os.environ['MASTER_ADDR'] = args.master_address
+ os.environ['MASTER_PORT'] = '6020'
+ torch.cuda.set_device(args.local_rank)
+ dist.init_process_group(backend='nccl', init_method='env://', rank=rank, world_size=size)
+ fn(args)
+ dist.barrier()
+ dist.destroy_process_group()
+
+
+def sample_rademacher_like(y):
+ return torch.randint(low=0, high=2, size=y.shape, device='cuda') * 2 - 1
+
+
+def sample_gaussian_like(y):
+ return torch.randn_like(y, device='cuda')
+
+
+def trace_df_dx_hutchinson(f, x, noise, no_autograd):
+ """
+ Hutchinson's trace estimator for Jacobian df/dx, O(1) call to autograd
+ """
+ if no_autograd:
+ # the following is compatible with checkpointing
+ torch.sum(f * noise).backward()
+ # torch.autograd.backward(tensors=[f], grad_tensors=[noise])
+ jvp = x.grad
+ trJ = torch.sum(jvp * noise, dim=[1, 2, 3])
+ x.grad = None
+ else:
+ jvp = torch.autograd.grad(f, x, noise, create_graph=False)[0]
+ trJ = torch.sum(jvp * noise, dim=[1, 2, 3])
+ # trJ = torch.einsum('bijk,bijk->b', jvp, noise) # we could test if there's a speed difference in einsum vs sum
+
+ return trJ
+
+def different_p_q_objectives(iw_sample_p, iw_sample_q):
+ assert iw_sample_p in ['ll_uniform', 'drop_all_uniform', 'll_iw', 'drop_all_iw', 'drop_sigma2t_iw', 'rescale_iw',
+ 'drop_sigma2t_uniform']
+ assert iw_sample_q in ['reweight_p_samples', 'll_uniform', 'll_iw']
+ # In these cases, we reuse the likelihood-based p-objective (either the uniform sampling version or the importance
+ # sampling version) also for q.
+ if iw_sample_p in ['ll_uniform', 'll_iw'] and iw_sample_q == 'reweight_p_samples':
+ return False
+ # In these cases, we are using a non-likelihood-based objective for p, and hence definitly need to use another q
+ # objective.
+ else:
+ return True
+
+
+# def decoder_output(dataset, logits, fixed_log_scales=None):
+# if dataset in {'cifar10', 'celeba_64', 'celeba_256', 'imagenet_32', 'imagenet_64', 'ffhq',
+# 'lsun_bedroom_128', 'lsun_bedroom_256', 'mnist', 'omniglot',
+# 'lsun_church_256'}:
+# return PixelNormal(logits, fixed_log_scales)
+# else:
+# raise NotImplementedError
+
+
+def get_mixed_prediction(mixed_prediction, param, mixing_logit, mixing_component=None):
+ if mixed_prediction:
+ assert mixing_component is not None, 'Provide mixing component when mixed_prediction is enabled.'
+ coeff = torch.sigmoid(mixing_logit)
+ param = (1 - coeff) * mixing_component + coeff * param
+
+ return param
+
+
+def set_vesde_sigma_max(args, vae, train_queue, logging, is_distributed):
+ logging.info('')
+ logging.info('Calculating max. pairwise distance in latent space to set sigma2_max for VESDE...')
+
+ eps_list = []
+ vae.eval()
+ for step, x in enumerate(train_queue):
+ x = x[0] if len(x) > 1 else x
+ x = x.cuda()
+ x = symmetrize_image_data(x)
+
+ # run vae
+ with autocast(enabled=args.autocast_train):
+ with torch.set_grad_enabled(False):
+ logits, all_log_q, all_eps = vae(x)
+ eps = torch.cat(all_eps, dim=1)
+
+ eps_list.append(eps.detach())
+
+ # concat eps tensor on each GPU and then gather all on all GPUs
+ eps_this_rank = torch.cat(eps_list, dim=0)
+ if is_distributed:
+ eps_all_gathered = [torch.zeros_like(eps_this_rank)] * dist.get_world_size()
+ dist.all_gather(eps_all_gathered, eps_this_rank)
+ eps_full = torch.cat(eps_all_gathered, dim=0)
+ else:
+ eps_full = eps_this_rank
+
+ # max pairwise distance squared between all latent encodings, is computed on CPU
+ eps_full = eps_full.cpu().float()
+ eps_full = eps_full.flatten(start_dim=1).unsqueeze(0)
+ max_pairwise_dist_sqr = torch.cdist(eps_full, eps_full).square().max()
+ max_pairwise_dist_sqr = max_pairwise_dist_sqr.cuda()
+
+ # to be safe, we broadcast to all GPUs if we are in distributed environment. Shouldn't be necessary in principle.
+ if is_distributed:
+ dist.broadcast(max_pairwise_dist_sqr, src=0)
+
+ args.sigma2_max = max_pairwise_dist_sqr.item()
+
+ logging.info('Done! Set args.sigma2_max set to {}'.format(args.sigma2_max))
+ logging.info('')
+ return args
+
+
+def mask_inactive_variables(x, is_active):
+ x = x * is_active
+ return x
+
+
+def common_x_operations(x, num_x_bits):
+ x = x[0] if len(x) > 1 else x
+ x = x.cuda()
+
+ # change bit length
+ x = change_bit_length(x, num_x_bits)
+ x = symmetrize_image_data(x)
+
+ return x
diff --git a/guided_diffusion/continuous_distributions.py b/guided_diffusion/continuous_distributions.py
new file mode 100644
index 0000000000000000000000000000000000000000..288c3d9a6ade4d2b2bef64704d666a00ddc499c0
--- /dev/null
+++ b/guided_diffusion/continuous_distributions.py
@@ -0,0 +1,284 @@
+# ---------------------------------------------------------------
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# This work is licensed under the NVIDIA Source Code License
+# for LSGM. To view a copy of this license, see the LICENSE file.
+# ---------------------------------------------------------------
+
+import torch
+import torch.nn.functional as F
+from torch.distributions.bernoulli import Bernoulli as Bern
+import numpy as np
+from pdb import set_trace as st
+# from util import utils
+from .continuous_diffusion_utils import view4D
+
+@torch.jit.script
+def sample_normal_jit(mu, sigma):
+ rho = mu.mul(0).normal_()
+ z = rho.mul_(sigma).add_(mu)
+ return z, rho
+
+
+@torch.jit.script
+def log_p_standard_normal(samples):
+ log_p = - 0.5 * torch.square(samples) - 0.9189385332 # 0.5 * np.log(2 * np.pi)
+ return log_p
+
+
+def log_p_var_normal(samples, var):
+ log_p = - 0.5 * torch.square(samples) / var - 0.5 * np.log(var) - 0.9189385332 # 0.5 * np.log(2 * np.pi)
+ return log_p
+
+
+def one_hot(indices, depth, dim):
+ indices = indices.unsqueeze(dim)
+ size = list(indices.size())
+ size[dim] = depth
+ y_onehot = torch.zeros(size).cuda()
+ y_onehot.zero_()
+ y_onehot.scatter_(dim, indices, 1)
+
+ return y_onehot
+
+# TODO: merge this with the next class
+class PixelNormal(object):
+ def __init__(self, param, fixed_log_scales=None):
+ size = param.size()
+ C = size[1]
+ if fixed_log_scales is None:
+ self.num_c = C // 2
+ self.means = param[:, :self.num_c, :, :] # B, 1 or 3, H, W
+ self.log_scales = torch.clamp(param[:, self.num_c:, :, :], min=-7.0) # B, 1 or 3, H, W
+ raise NotImplementedError
+ else:
+ self.num_c = C
+ self.means = param # B, 1 or 3, H, W
+ self.log_scales = view4D(fixed_log_scales, size) # B, 1 or 3, H, W
+
+ def get_params(self):
+ return self.means, self.log_scales, self.num_c
+
+ def log_prob(self, samples):
+ B, C, H, W = samples.size()
+ assert C == self.num_c
+
+ log_probs = -0.5 * torch.square(self.means - samples) * torch.exp(-2.0 * self.log_scales) - self.log_scales - 0.9189385332 # -0.5*log(2*pi)
+ return log_probs
+
+ def sample(self, t=1.):
+ z, rho = sample_normal_jit(self.means, torch.exp(self.log_scales)*t) # B, 3, H, W
+ return z
+
+ def log_prob_discrete(self, samples):
+ """
+ Calculates discrete pixel probabilities.
+ """
+ # samples should be in [-1, 1] already
+ B, C, H, W = samples.size()
+ assert C == self.num_c
+
+ centered = samples - self.means
+ inv_stdv = torch.exp(- self.log_scales)
+ plus_in = inv_stdv * (centered + 1. / 255.)
+ cdf_plus = torch.distributions.Normal(0, 1).cdf(plus_in)
+ min_in = inv_stdv * (centered - 1. / 255.)
+ cdf_min = torch.distributions.Normal(0, 1).cdf(min_in)
+ log_cdf_plus = torch.log(torch.clamp(cdf_plus, min=1e-12))
+ log_one_minus_cdf_min = torch.log(torch.clamp(1. - cdf_min, min=1e-12))
+ cdf_delta = cdf_plus - cdf_min
+ log_probs = torch.where(samples < -0.999, log_cdf_plus, torch.where(samples > 0.999, log_one_minus_cdf_min,
+ torch.log(torch.clamp(cdf_delta, min=1e-12))))
+
+ assert log_probs.size() == samples.size()
+ return log_probs
+
+ def mean(self):
+ return self.means
+
+
+class Normal:
+ def __init__(self, mu, log_sigma):
+ self.mu = mu
+ self.log_sigma = log_sigma
+ self.sigma = torch.exp(log_sigma)
+
+ def sample(self, t=1.):
+ return sample_normal_jit(self.mu, self.sigma * t)
+
+ def sample_given_rho(self, rho):
+ return rho * self.sigma + self.mu
+
+ def log_p(self, samples):
+ normalized_samples = (samples - self.mu) / self.sigma
+ log_p = - 0.5 * normalized_samples * normalized_samples - 0.5 * np.log(2 * np.pi) - self.log_sigma
+ return log_p
+
+ def kl(self, normal_dist):
+ term1 = (self.mu - normal_dist.mu) / normal_dist.sigma
+ term2 = self.sigma / normal_dist.sigma
+
+ return 0.5 * (term1 * term1 + term2 * term2) - 0.5 - torch.log(self.log_sigma) + normal_dist.log_sigma
+
+ def mean(self):
+ return self.mu
+
+
+class Bernoulli:
+ def __init__(self, logits):
+ self.dist = Bern(logits=logits)
+
+ def log_p(self, samples):
+ # convert samples to {0, 1}
+ samples = (samples + 1.) / 2
+ return self.dist.log_prob(samples)
+
+ def mean(self):
+ # map the mean to [-1, 1]
+ return 2 * self.dist.mean - 1.
+
+class DiscLogistic:
+ def __init__(self, param):
+ B, C, H, W = param.size()
+ self.num_c = C // 2
+ self.means = param[:, :self.num_c, :, :] # B, 3, H, W
+ self.log_scales = torch.clamp(param[:, self.num_c:, :, :], min=-7.0) # B, 3, H, W
+
+ def log_p(self, samples):
+ assert torch.max(samples) <= 1.0 and torch.min(samples) >= -1.0
+
+ B, C, H, W = samples.size()
+ assert C == self.num_c
+
+ centered = samples - self.means # B, 3, H, W
+ inv_stdv = torch.exp(- self.log_scales)
+ plus_in = inv_stdv * (centered + 1. / 255.)
+ cdf_plus = torch.sigmoid(plus_in)
+ min_in = inv_stdv * (centered - 1. / 255.)
+ cdf_min = torch.sigmoid(min_in)
+ log_cdf_plus = plus_in - F.softplus(plus_in)
+ log_one_minus_cdf_min = - F.softplus(min_in)
+ cdf_delta = cdf_plus - cdf_min
+ mid_in = inv_stdv * centered
+ log_pdf_mid = mid_in - self.log_scales - 2. * F.softplus(mid_in)
+
+ log_prob_mid_safe = torch.where(cdf_delta > 1e-5,
+ torch.log(torch.clamp(cdf_delta, min=1e-10)),
+ log_pdf_mid - np.log(127.5))
+
+ log_probs = torch.where(samples < -0.999, log_cdf_plus, torch.where(samples > 0.999, log_one_minus_cdf_min,
+ log_prob_mid_safe)) # B, 3, H, W
+
+ return log_probs
+
+ def sample(self):
+ u = torch.Tensor(self.means.size()).uniform_(1e-5, 1. - 1e-5).cuda() # B, 3, H, W
+ x = self.means + torch.exp(self.log_scales) * (torch.log(u) - torch.log(1. - u)) # B, 3, H, W
+ x = torch.clamp(x, -1, 1.)
+ return x
+
+ def mean(self):
+ return self.means
+
+
+class DiscMixLogistic:
+ def __init__(self, param, num_mix=10, num_bits=8):
+ B, C, H, W = param.size()
+ self.num_mix = num_mix
+ self.logit_probs = param[:, :num_mix, :, :] # B, M, H, W
+ l = param[:, num_mix:, :, :].view(B, 3, 3 * num_mix, H, W) # B, 3, 3 * M, H, W
+ self.means = l[:, :, :num_mix, :, :] # B, 3, M, H, W
+ self.log_scales = torch.clamp(l[:, :, num_mix:2 * num_mix, :, :], min=-7.0) # B, 3, M, H, W
+ self.coeffs = torch.tanh(l[:, :, 2 * num_mix:3 * num_mix, :, :]) # B, 3, M, H, W
+ self.max_val = 2. ** num_bits - 1
+
+ def log_p(self, samples):
+ assert torch.max(samples) <= 1.0 and torch.min(samples) >= -1.0
+
+ B, C, H, W = samples.size()
+ assert C == 3, 'only RGB images are considered.'
+
+ samples = samples.unsqueeze(4) # B, 3, H , W
+ samples = samples.expand(-1, -1, -1, -1, self.num_mix).permute(0, 1, 4, 2, 3) # B, 3, M, H, W
+ mean1 = self.means[:, 0, :, :, :] # B, M, H, W
+ mean2 = self.means[:, 1, :, :, :] + \
+ self.coeffs[:, 0, :, :, :] * samples[:, 0, :, :, :] # B, M, H, W
+ mean3 = self.means[:, 2, :, :, :] + \
+ self.coeffs[:, 1, :, :, :] * samples[:, 0, :, :, :] + \
+ self.coeffs[:, 2, :, :, :] * samples[:, 1, :, :, :] # B, M, H, W
+
+ mean1 = mean1.unsqueeze(1) # B, 1, M, H, W
+ mean2 = mean2.unsqueeze(1) # B, 1, M, H, W
+ mean3 = mean3.unsqueeze(1) # B, 1, M, H, W
+ means = torch.cat([mean1, mean2, mean3], dim=1) # B, 3, M, H, W
+ centered = samples - means # B, 3, M, H, W
+
+ inv_stdv = torch.exp(- self.log_scales)
+ plus_in = inv_stdv * (centered + 1. / self.max_val)
+ cdf_plus = torch.sigmoid(plus_in)
+ min_in = inv_stdv * (centered - 1. / self.max_val)
+ cdf_min = torch.sigmoid(min_in)
+ log_cdf_plus = plus_in - F.softplus(plus_in)
+ log_one_minus_cdf_min = - F.softplus(min_in)
+ cdf_delta = cdf_plus - cdf_min
+ mid_in = inv_stdv * centered
+ log_pdf_mid = mid_in - self.log_scales - 2. * F.softplus(mid_in)
+
+ log_prob_mid_safe = torch.where(cdf_delta > 1e-5,
+ torch.log(torch.clamp(cdf_delta, min=1e-10)),
+ log_pdf_mid - np.log(self.max_val / 2))
+
+ log_probs = torch.where(samples < -0.999, log_cdf_plus, torch.where(samples > 0.999, log_one_minus_cdf_min,
+ log_prob_mid_safe)) # B, 3, M, H, W
+
+ log_probs = torch.sum(log_probs, 1) + F.log_softmax(self.logit_probs, dim=1) # B, M, H, W
+ return torch.logsumexp(log_probs, dim=1) # B, H, W
+
+ def sample(self, t=1.):
+ gumbel = -torch.log(- torch.log(torch.Tensor(self.logit_probs.size()).uniform_(1e-5, 1. - 1e-5).cuda())) # B, M, H, W
+ sel = one_hot(torch.argmax(self.logit_probs / t + gumbel, 1), self.num_mix, dim=1) # B, M, H, W
+ sel = sel.unsqueeze(1) # B, 1, M, H, W
+
+ # select logistic parameters
+ means = torch.sum(self.means * sel, dim=2) # B, 3, H, W
+ log_scales = torch.sum(self.log_scales * sel, dim=2) # B, 3, H, W
+ coeffs = torch.sum(self.coeffs * sel, dim=2) # B, 3, H, W
+
+ # cells from logistic & clip to interval
+ # we don't actually round to the nearest 8bit value when sampling
+ u = torch.Tensor(means.size()).uniform_(1e-5, 1. - 1e-5).cuda() # B, 3, H, W
+ x = means + torch.exp(log_scales) * t * (torch.log(u) - torch.log(1. - u)) # B, 3, H, W
+
+ x0 = torch.clamp(x[:, 0, :, :], -1, 1.) # B, H, W
+ x1 = torch.clamp(x[:, 1, :, :] + coeffs[:, 0, :, :] * x0, -1, 1) # B, H, W
+ x2 = torch.clamp(x[:, 2, :, :] + coeffs[:, 1, :, :] * x0 + coeffs[:, 2, :, :] * x1, -1, 1) # B, H, W
+
+ x0 = x0.unsqueeze(1)
+ x1 = x1.unsqueeze(1)
+ x2 = x2.unsqueeze(1)
+
+ x = torch.cat([x0, x1, x2], 1)
+ return x
+
+ def mean(self):
+ sel = torch.softmax(self.logit_probs, dim=1) # B, M, H, W
+ sel = sel.unsqueeze(1) # B, 1, M, H, W
+
+ # select logistic parameters
+ means = torch.sum(self.means * sel, dim=2) # B, 3, H, W
+ coeffs = torch.sum(self.coeffs * sel, dim=2) # B, 3, H, W
+
+ # we don't sample from logistic components, because of the linear dependencies, we use mean
+ x = means # B, 3, H, W
+ x0 = torch.clamp(x[:, 0, :, :], -1, 1.) # B, H, W
+ x1 = torch.clamp(x[:, 1, :, :] + coeffs[:, 0, :, :] * x0, -1, 1) # B, H, W
+ x2 = torch.clamp(x[:, 2, :, :] + coeffs[:, 1, :, :] * x0 + coeffs[:, 2, :, :] * x1, -1, 1) # B, H, W
+
+ x0 = x0.unsqueeze(1)
+ x1 = x1.unsqueeze(1)
+ x2 = x2.unsqueeze(1)
+
+ x = torch.cat([x0, x1, x2], 1)
+ return x
+
+
diff --git a/guided_diffusion/dist_util.py b/guided_diffusion/dist_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9208d42dcac29dc20101a867906d386406078ba
--- /dev/null
+++ b/guided_diffusion/dist_util.py
@@ -0,0 +1,170 @@
+"""
+Helpers for distributed training.
+"""
+
+import datetime
+import io
+import os
+import socket
+
+import blobfile as bf
+from pdb import set_trace as st
+# from mpi4py import MPI
+import torch as th
+import torch.distributed as dist
+
+# Change this to reflect your cluster layout.
+# The GPU for a given rank is (rank % GPUS_PER_NODE).
+GPUS_PER_NODE = 8
+SETUP_RETRY_COUNT = 3
+
+
+def get_rank():
+ if not dist.is_available():
+ return 0
+
+ if not dist.is_initialized():
+ return 0
+
+ return dist.get_rank()
+
+
+def synchronize():
+ if not dist.is_available():
+ return
+
+ if not dist.is_initialized():
+ return
+
+ world_size = dist.get_world_size()
+
+ if world_size == 1:
+ return
+
+ dist.barrier()
+
+
+def get_world_size():
+ if not dist.is_available():
+ return 1
+
+ if not dist.is_initialized():
+ return 1
+
+ return dist.get_world_size()
+
+
+def setup_dist(args):
+ """
+ Setup a distributed process group.
+ """
+ if dist.is_initialized():
+ return
+
+ # print(f"{os.environ['MASTER_ADDR']=} {args.master_port=}")
+
+ # dist.init_process_group(backend='nccl', init_method='env://', rank=args.local_rank, world_size=th.cuda.device_count(), timeout=datetime.timedelta(seconds=5400))
+ # st() no mark
+ dist.init_process_group(backend='nccl', init_method='env://', timeout=datetime.timedelta(seconds=54000))
+ print(f"{args.local_rank=} init complete")
+
+ # synchronize() # extra memory on rank 0, why?
+
+ th.cuda.empty_cache()
+
+def cleanup():
+ dist.destroy_process_group()
+
+def dev():
+ """
+ Get the device to use for torch.distributed.
+ """
+ if th.cuda.is_available():
+
+ if get_world_size() > 1:
+ return th.device(f"cuda:{get_rank() % GPUS_PER_NODE}")
+ return th.device(f"cuda")
+
+ return th.device("cpu")
+
+
+# def load_state_dict(path, submodule_name='', **kwargs):
+def load_state_dict(path, **kwargs):
+ """
+ Load a PyTorch file without redundant fetches across MPI ranks.
+ """
+ # chunk_size = 2 ** 30 # MPI has a relatively small size limit
+ # if get_rank() == 0:
+ # with bf.BlobFile(path, "rb") as f:
+ # data = f.read()
+ # num_chunks = len(data) // chunk_size
+ # if len(data) % chunk_size:
+ # num_chunks += 1
+ # MPI.COMM_WORLD.bcast(num_chunks)
+ # for i in range(0, len(data), chunk_size):
+ # MPI.COMM_WORLD.bcast(data[i : i + chunk_size])
+ # else:
+ # num_chunks = MPI.COMM_WORLD.bcast(None)
+ # data = bytes()
+ # for _ in range(num_chunks):
+ # data += MPI.COMM_WORLD.bcast(None)
+
+ # return th.load(io.BytesIO(data), **kwargs)
+ # with open(path) as f:
+ ckpt = th.load(path, **kwargs)
+ # if submodule_name != '':
+ # assert submodule_name in ckpt
+ # return ckpt[submodule_name]
+ # else:
+ return ckpt
+
+
+def sync_params(params):
+ """
+ Synchronize a sequence of Tensors across ranks from rank 0.
+ """
+ # for k, p in params:
+ for p in params:
+ with th.no_grad():
+ try:
+ dist.broadcast(p, 0)
+ except Exception as e:
+ print(k, e)
+ # print(e)
+
+
+def _find_free_port():
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.bind(("", 0))
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ return s.getsockname()[1]
+ finally:
+ s.close()
+
+
+_num_moments = 3 # [num_scalars, sum_of_scalars, sum_of_squares]
+_reduce_dtype = th.float32 # Data type to use for initial per-tensor reduction.
+_counter_dtype = th.float64 # Data type to use for the internal counters.
+_rank = 0 # Rank of the current process.
+_sync_device = None # Device to use for multiprocess communication. None = single-process.
+_sync_called = False # Has _sync() been called yet?
+_counters = dict() # Running counters on each device, updated by report(): name => device => torch.Tensor
+_cumulative = dict() # Cumulative counters on the CPU, updated by _sync(): name => torch.Tensor
+
+def init_multiprocessing(rank, sync_device):
+ r"""Initializes `torch_utils.training_stats` for collecting statistics
+ across multiple processes.
+ This function must be called after
+ `torch.distributed.init_process_group()` and before `Collector.update()`.
+ The call is not necessary if multi-process collection is not needed.
+ Args:
+ rank: Rank of the current process.
+ sync_device: PyTorch device to use for inter-process
+ communication, or None to disable multi-process
+ collection. Typically `torch.device('cuda', rank)`.
+ """
+ global _rank, _sync_device
+ assert not _sync_called
+ _rank = rank
+ _sync_device = sync_device
\ No newline at end of file
diff --git a/guided_diffusion/fp16_util.py b/guided_diffusion/fp16_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..c92ebf408c962141cda925ea8c096fde8d596376
--- /dev/null
+++ b/guided_diffusion/fp16_util.py
@@ -0,0 +1,326 @@
+"""
+Helpers to train with 16-bit precision.
+"""
+
+import numpy as np
+import torch as th
+import torch.nn as nn
+from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
+
+from . import logger
+
+INITIAL_LOG_LOSS_SCALE = 20.0
+
+
+def convert_module_to_f16(l):
+ """
+ Convert primitive modules to float16.
+ """
+ if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Conv3d)):
+ l.weight.data = l.weight.data.half()
+ if l.bias is not None:
+ l.bias.data = l.bias.data.half()
+
+
+def convert_module_to_f32(l):
+ """
+ Convert primitive modules to float32, undoing convert_module_to_f16().
+ """
+ if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Conv3d)):
+ l.weight.data = l.weight.data.float()
+ if l.bias is not None:
+ l.bias.data = l.bias.data.float()
+
+
+def make_master_params(param_groups_and_shapes):
+ """
+ Copy model parameters into a (differently-shaped) list of full-precision
+ parameters.
+ """
+ master_params = []
+ for param_group, shape in param_groups_and_shapes:
+ master_param = nn.Parameter(
+ _flatten_dense_tensors([
+ param.detach().float() for (_, param) in param_group
+ ]).view(shape))
+ master_param.requires_grad = True
+ master_params.append(master_param)
+ return master_params
+
+
+def model_grads_to_master_grads(param_groups_and_shapes, master_params):
+ """
+ Copy the gradients from the model parameters into the master parameters
+ from make_master_params().
+ """
+ for master_param, (param_group, shape) in zip(master_params,
+ param_groups_and_shapes):
+ master_param.grad = _flatten_dense_tensors([
+ param_grad_or_zeros(param) for (_, param) in param_group
+ ]).view(shape)
+
+
+def master_params_to_model_params(param_groups_and_shapes, master_params):
+ """
+ Copy the master parameter data back into the model parameters.
+ """
+ # Without copying to a list, if a generator is passed, this will
+ # silently not copy any parameters.
+ for master_param, (param_group, _) in zip(master_params,
+ param_groups_and_shapes):
+ for (_, param), unflat_master_param in zip(
+ param_group,
+ unflatten_master_params(param_group, master_param.view(-1))):
+ param.detach().copy_(unflat_master_param)
+
+
+def unflatten_master_params(param_group, master_param):
+ return _unflatten_dense_tensors(master_param,
+ [param for (_, param) in param_group])
+
+
+def get_param_groups_and_shapes(named_model_params):
+ named_model_params = list(named_model_params)
+ scalar_vector_named_params = (
+ [(n, p) for (n, p) in named_model_params if p.ndim <= 1],
+ (-1),
+ )
+ matrix_named_params = (
+ [(n, p) for (n, p) in named_model_params if p.ndim > 1],
+ (1, -1),
+ )
+ return [scalar_vector_named_params, matrix_named_params]
+
+
+def master_params_to_state_dict(model, param_groups_and_shapes, master_params,
+ use_fp16):
+ if use_fp16:
+ state_dict = model.state_dict()
+ for master_param, (param_group, _) in zip(master_params,
+ param_groups_and_shapes):
+ for (name, _), unflat_master_param in zip(
+ param_group,
+ unflatten_master_params(param_group,
+ master_param.view(-1))):
+ assert name in state_dict
+ state_dict[name] = unflat_master_param
+ else:
+ state_dict = model.state_dict()
+ for i, (name, _value) in enumerate(model.named_parameters()):
+ assert name in state_dict
+ state_dict[name] = master_params[i]
+ return state_dict
+
+
+def state_dict_to_master_params(model, state_dict, use_fp16):
+ if use_fp16:
+ named_model_params = [(name, state_dict[name])
+ for name, _ in model.named_parameters()]
+ param_groups_and_shapes = get_param_groups_and_shapes(
+ named_model_params)
+ master_params = make_master_params(param_groups_and_shapes)
+ else:
+ master_params = [
+ state_dict[name] for name, _ in model.named_parameters()
+ ]
+ return master_params
+
+
+def zero_master_grads(master_params):
+ for param in master_params:
+ param.grad = None
+
+
+def zero_grad(model_params):
+ for param in model_params:
+ # Taken from https://pytorch.org/docs/stable/_modules/torch/optim/optimizer.html#Optimizer.add_param_group
+ if param.grad is not None:
+ param.grad.detach_()
+ param.grad.zero_()
+
+
+def param_grad_or_zeros(param):
+ if param.grad is not None:
+ return param.grad.data.detach()
+ else:
+ return th.zeros_like(param)
+
+
+class MixedPrecisionTrainer:
+
+ def __init__(self,
+ *,
+ model,
+ use_fp16=False,
+ use_amp=False,
+ fp16_scale_growth=1e-3,
+ initial_lg_loss_scale=INITIAL_LOG_LOSS_SCALE,
+ model_name='ddpm',
+ submodule_name='',
+ model_params=None,
+ clip_grad_throld=1.0):
+ self.clip_grad_throld = clip_grad_throld
+ self.model_name = model_name
+ self.model = model
+ self.use_fp16 = use_fp16
+ self.use_amp = use_amp
+ if self.use_amp:
+ # https://github.com/pytorch/pytorch/issues/40497#issuecomment-1262373602
+ # https://github.com/pytorch/pytorch/issues/111739
+ # self.scaler = th.cuda.amp.GradScaler(enabled=use_amp,
+ # init_scale=2**15,
+ # growth_interval=100)
+
+ # https://discuss.pytorch.org/t/gradscaler-amp-causes-nan-weights-on-backward-pass/177542
+ self.scaler = th.cuda.amp.GradScaler(enabled=use_amp,
+ # growth_factor=2.0,
+ growth_factor=1.5, # slower growth
+ backoff_factor=0.25, # faster backoff
+ init_scale=2**16, #
+ growth_interval=2000)
+
+ logger.log(model_name, 'enables AMP to accelerate training')
+ else:
+ logger.log(model_name, 'not enables AMP to accelerate training')
+
+ self.fp16_scale_growth = fp16_scale_growth
+
+ self.model_params = list(self.model.parameters(
+ )) if model_params is None else list(model_params) if not isinstance(
+ model_params, list) else model_params
+ self.master_params = self.model_params
+ self.param_groups_and_shapes = None
+ self.lg_loss_scale = initial_lg_loss_scale
+
+ if self.use_fp16:
+ self.param_groups_and_shapes = get_param_groups_and_shapes(
+ self.model.named_parameters())
+ self.master_params = make_master_params(
+ self.param_groups_and_shapes)
+ self.model.convert_to_fp16()
+
+ def zero_grad(self):
+ zero_grad(self.model_params)
+
+ def backward(self, loss: th.Tensor, disable_amp=False, **kwargs):
+ """**kwargs: retain_graph=True
+ """
+ if self.use_fp16:
+ loss_scale = 2**self.lg_loss_scale
+ (loss * loss_scale).backward(**kwargs)
+ elif self.use_amp and not disable_amp:
+ self.scaler.scale(loss).backward(**kwargs)
+ else:
+ loss.backward(**kwargs)
+
+ # def optimize(self, opt: th.optim.Optimizer, clip_grad=False):
+ def optimize(self, opt: th.optim.Optimizer, clip_grad=True):
+ if self.use_fp16:
+ return self._optimize_fp16(opt)
+ elif self.use_amp:
+ return self._optimize_amp(opt, clip_grad)
+ else:
+ return self._optimize_normal(opt, clip_grad)
+
+ def _optimize_fp16(self, opt: th.optim.Optimizer):
+ logger.logkv_mean("lg_loss_scale", self.lg_loss_scale)
+ model_grads_to_master_grads(self.param_groups_and_shapes,
+ self.master_params)
+ grad_norm, param_norm = self._compute_norms(
+ grad_scale=2**self.lg_loss_scale)
+ if check_overflow(grad_norm):
+ self.lg_loss_scale -= 1
+ logger.log(
+ f"Found NaN, decreased lg_loss_scale to {self.lg_loss_scale}")
+ zero_master_grads(self.master_params)
+ return False
+
+ logger.logkv_mean("grad_norm", grad_norm)
+ logger.logkv_mean("param_norm", param_norm)
+
+ for p in self.master_params:
+ p.grad.mul_(1.0 / (2**self.lg_loss_scale))
+ opt.step()
+ zero_master_grads(self.master_params)
+ master_params_to_model_params(self.param_groups_and_shapes,
+ self.master_params)
+ self.lg_loss_scale += self.fp16_scale_growth
+ return True
+
+ def _optimize_amp(self, opt: th.optim.Optimizer, clip_grad=False):
+ # https://pytorch.org/docs/stable/notes/amp_examples.html#gradient-clipping
+ assert clip_grad
+ # clip_grad = False # ! debugging
+ self.scaler.unscale_(opt) # to calculate accurate gradients
+
+ if clip_grad:
+ th.nn.utils.clip_grad_norm_( # type: ignore
+ self.master_params,
+ # 5.0,
+ self.clip_grad_throld,
+ norm_type=2,
+ error_if_nonfinite=False,
+ foreach=True,
+ ) # clip before compute_norm
+
+ grad_norm, param_norm = self._compute_norms()
+ logger.logkv_mean("grad_norm", grad_norm)
+ logger.logkv_mean("param_norm", param_norm)
+
+ self.scaler.step(opt)
+ self.scaler.update()
+ return True
+
+ def _optimize_normal(self,
+ opt: th.optim.Optimizer,
+ clip_grad: bool = False):
+
+ assert clip_grad
+ # st()
+ if clip_grad:
+ th.nn.utils.clip_grad_norm_( # type: ignore
+ self.master_params,
+ # 5.0,
+ self.clip_grad_throld,
+ norm_type=2,
+ error_if_nonfinite=False,
+ foreach=True,
+ ) # clip before compute_norm
+
+ grad_norm, param_norm = self._compute_norms()
+ logger.logkv_mean("grad_norm", grad_norm)
+ logger.logkv_mean("param_norm", param_norm)
+ opt.step()
+ return True
+
+ def _compute_norms(self, grad_scale=1.0):
+ grad_norm = 0.0
+ param_norm = 0.0
+ for p in self.master_params:
+ with th.no_grad():
+ param_norm += th.norm(p, p=2, dtype=th.float32).item()**2
+ if p.grad is not None:
+ grad_norm += th.norm(p.grad, p=2,
+ dtype=th.float32).item()**2
+ return np.sqrt(grad_norm) / grad_scale, np.sqrt(param_norm)
+
+ def master_params_to_state_dict(self, master_params, model=None):
+ if model is None:
+ model = self.model
+ return master_params_to_state_dict(model, self.param_groups_and_shapes,
+ master_params, self.use_fp16)
+
+ def state_dict_to_master_params(self, state_dict, model=None):
+ if model is None:
+ model = self.model
+ return state_dict_to_master_params(model, state_dict, self.use_fp16)
+
+ def state_dict_to_master_params_given_submodule_name(
+ self, state_dict, submodule_name):
+ return state_dict_to_master_params(getattr(self.model, submodule_name),
+ state_dict, self.use_fp16)
+
+
+def check_overflow(value):
+ return (value == float("inf")) or (value == -float("inf")) or (value
+ != value)
diff --git a/guided_diffusion/gaussian_diffusion.py b/guided_diffusion/gaussian_diffusion.py
new file mode 100644
index 0000000000000000000000000000000000000000..08b6c2ca19c392901e6d6144ea2404e899ad8ba8
--- /dev/null
+++ b/guided_diffusion/gaussian_diffusion.py
@@ -0,0 +1,1203 @@
+"""
+This code started out as a PyTorch port of Ho et al's diffusion models:
+https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/diffusion_utils_2.py
+
+Docstrings have been added, as well as DDIM sampling and a new collection of beta schedules.
+"""
+
+from pdb import set_trace as st
+import enum
+import math
+
+import numpy as np
+import torch as th
+
+from .nn import mean_flat
+from .losses import normal_kl, discretized_gaussian_log_likelihood
+from . import dist_util
+
+
+def get_named_beta_schedule(schedule_name, num_diffusion_timesteps):
+ """
+ Get a pre-defined beta schedule for the given name.
+
+ The beta schedule library consists of beta schedules which remain similar
+ in the limit of num_diffusion_timesteps.
+ Beta schedules may be added, but should not be removed or changed once
+ they are committed to maintain backwards compatibility.
+ """
+ if schedule_name == "linear": # * used here
+ # Linear schedule from Ho et al, extended to work for any number of
+ # diffusion steps.
+ scale = 1000 / num_diffusion_timesteps
+ beta_start = scale * 0.0001
+ beta_end = scale * 0.02
+ return np.linspace(beta_start,
+ beta_end,
+ num_diffusion_timesteps,
+ dtype=np.float64)
+
+ elif schedule_name == "linear_simple":
+ return betas_for_alpha_bar_linear_simple(num_diffusion_timesteps,
+ lambda t: 0.001 / (1.001 - t))
+
+ elif schedule_name == "cosine":
+ return betas_for_alpha_bar(
+ num_diffusion_timesteps,
+ lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2)**2,
+ )
+
+ else:
+ raise NotImplementedError(f"unknown beta schedule: {schedule_name}")
+
+
+def betas_for_alpha_bar_linear_simple(num_diffusion_timesteps,
+ alpha_bar,
+ max_beta=0.999):
+ """proposed by Chen Ting, on the importance of noise schedule, arXiv 2023.
+ gamma = 1-t
+ """
+ betas = []
+ for i in range(num_diffusion_timesteps):
+ t = i / num_diffusion_timesteps
+ betas.append(min(max_beta, alpha_bar(t)))
+
+ return betas
+
+
+def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999):
+ """
+ Create a beta schedule that discretizes the given alpha_t_bar function,
+ which defines the cumulative product of (1-beta) over time from t = [0,1].
+
+ :param num_diffusion_timesteps: the number of betas to produce.
+ :param alpha_bar: a lambda that takes an argument t from 0 to 1 and
+ produces the cumulative product of (1-beta) up to that
+ part of the diffusion process.
+ :param max_beta: the maximum beta to use; use values lower than 1 to
+ prevent singularities.
+ """
+ betas = []
+ for i in range(num_diffusion_timesteps):
+ t1 = i / num_diffusion_timesteps
+ t2 = (i + 1) / num_diffusion_timesteps
+ betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))
+ return np.array(betas)
+
+
+class ModelMeanType(enum.Enum):
+ """
+ Which type of output the model predicts.
+ """
+
+ PREVIOUS_X = enum.auto() # the model predicts x_{t-1}
+ START_X = enum.auto() # the model predicts x_0
+ EPSILON = enum.auto() # the model predicts epsilon
+ V = enum.auto() # the model predicts velosity
+
+
+class ModelVarType(enum.Enum):
+ """
+ What is used as the model's output variance.
+
+ The LEARNED_RANGE option has been added to allow the model to predict
+ values between FIXED_SMALL and FIXED_LARGE, making its job easier.
+ """
+
+ LEARNED = enum.auto()
+ FIXED_SMALL = enum.auto()
+ FIXED_LARGE = enum.auto()
+ LEARNED_RANGE = enum.auto()
+
+
+class LossType(enum.Enum):
+ MSE = enum.auto() # use raw MSE loss (and KL when learning variances)
+ RESCALED_MSE = (
+ enum.auto()
+ ) # use raw MSE loss (with RESCALED_KL when learning variances)
+ KL = enum.auto() # use the variational lower-bound
+ RESCALED_KL = enum.auto() # like KL, but rescale to estimate the full VLB
+
+ def is_vb(self):
+ return self == LossType.KL or self == LossType.RESCALED_KL
+
+
+class GaussianDiffusion:
+ """
+ Utilities for training and sampling diffusion models.
+
+ Ported directly from here, and then adapted over time to further experimentation.
+ https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/diffusion_utils_2.py#L42
+
+ :param betas: a 1-D numpy array of betas for each diffusion timestep,
+ starting at T and going to 1.
+ :param model_mean_type: a ModelMeanType determining what the model outputs.
+ :param model_var_type: a ModelVarType determining how variance is output.
+ :param loss_type: a LossType determining the loss function to use.
+ :param rescale_timesteps: if True, pass floating point timesteps into the
+ model so that they are always scaled like in the
+ original paper (0 to 1000).
+ """
+ '''
+ defaults:
+ learn_sigma=False,
+ diffusion_steps=1000,
+ noise_schedule="linear",
+ timestep_respacing="",
+ use_kl=False,
+ predict_xstart=False,
+ rescale_timesteps=False,
+ rescale_learned_sigmas=False,
+ '''
+
+ def __init__(
+ self,
+ *,
+ betas,
+ model_mean_type,
+ model_var_type,
+ loss_type,
+ rescale_timesteps=False,
+ standarization_xt=False,
+ ):
+ self.model_mean_type = model_mean_type
+ self.model_var_type = model_var_type
+ self.loss_type = loss_type
+ self.rescale_timesteps = rescale_timesteps
+ self.standarization_xt = standarization_xt
+
+ # Use float64 for accuracy.
+ betas = np.array(betas, dtype=np.float64)
+ self.betas = betas
+ assert len(betas.shape) == 1, "betas must be 1-D"
+ assert (betas > 0).all() and (betas <= 1).all()
+
+ self.num_timesteps = int(betas.shape[0])
+
+ alphas = 1.0 - betas
+ self.alphas_cumprod = np.cumprod(alphas, axis=0)
+ self.alphas_cumprod_prev = np.append(1.0, self.alphas_cumprod[:-1])
+ self.alphas_cumprod_next = np.append(self.alphas_cumprod[1:], 0.0)
+ assert self.alphas_cumprod_prev.shape == (self.num_timesteps, )
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.sqrt_alphas_cumprod = np.sqrt(self.alphas_cumprod)
+ self.sqrt_one_minus_alphas_cumprod = np.sqrt(1.0 - self.alphas_cumprod)
+ self.log_one_minus_alphas_cumprod = np.log(1.0 - self.alphas_cumprod)
+ self.sqrt_recip_alphas_cumprod = np.sqrt(1.0 / self.alphas_cumprod)
+ self.sqrt_recipm1_alphas_cumprod = np.sqrt(
+ 1.0 / self.alphas_cumprod -
+ 1) # sqrt(1/cumprod(alphas) - 1), for calculating x_0 from x_t
+
+ # calculations for posterior q(x_{t-1} | x_t, x_0)
+ self.posterior_variance = (betas * (1.0 - self.alphas_cumprod_prev) /
+ (1.0 - self.alphas_cumprod))
+ # log calculation clipped because the posterior variance is 0 at the
+ # beginning of the diffusion chain.
+ self.posterior_log_variance_clipped = np.log(
+ np.append(self.posterior_variance[1], self.posterior_variance[1:]))
+ self.posterior_mean_coef1 = (betas *
+ np.sqrt(self.alphas_cumprod_prev) /
+ (1.0 - self.alphas_cumprod))
+ self.posterior_mean_coef2 = ((1.0 - self.alphas_cumprod_prev) *
+ np.sqrt(alphas) /
+ (1.0 - self.alphas_cumprod))
+
+ def q_mean_variance(self, x_start, t):
+ """
+ Get the distribution q(x_t | x_0).
+
+ :param x_start: the [N x C x ...] tensor of noiseless inputs.
+ :param t: the number of diffusion steps (minus 1). Here, 0 means one step.
+ :return: A tuple (mean, variance, log_variance), all of x_start's shape.
+ """
+ mean = (
+ _extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) *
+ x_start)
+ variance = _extract_into_tensor(1.0 - self.alphas_cumprod, t,
+ x_start.shape)
+ log_variance = _extract_into_tensor(self.log_one_minus_alphas_cumprod,
+ t, x_start.shape)
+ return mean, variance, log_variance
+
+ def q_sample(self, x_start, t, noise=None, return_detail=False):
+ """
+ Diffuse the data for a given number of diffusion steps.
+
+ In other words, sample from q(x_t | x_0).
+
+ :param x_start: the initial data batch.
+ :param t: the number of diffusion steps (minus 1). Here, 0 means one step.
+ :param noise: if specified, the split-out normal noise.
+ :return: A noisy version of x_start.
+ """
+ if noise is None:
+ noise = th.randn_like(x_start)
+ assert noise.shape == x_start.shape
+ alpha_bar = _extract_into_tensor(self.sqrt_alphas_cumprod, t,
+ x_start.shape)
+ one_minus_alpha_bar = _extract_into_tensor(
+ self.sqrt_one_minus_alphas_cumprod, t, x_start.shape)
+ xt = (alpha_bar * x_start + one_minus_alpha_bar * noise)
+
+ if self.standarization_xt:
+ xt = xt / (1e-5 + xt.std(dim=list(range(1, xt.ndim)), keepdim=True)
+ ) # B 1 1 1 #
+
+ if return_detail:
+ return xt, alpha_bar, one_minus_alpha_bar
+
+ return xt
+
+ def q_posterior_mean_variance(self, x_start, x_t, t):
+ """
+ Compute the mean and variance of the diffusion posterior:
+
+ q(x_{t-1} | x_t, x_0)
+
+ """
+ assert x_start.shape == x_t.shape
+ posterior_mean = (
+ _extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) *
+ x_start +
+ _extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) *
+ x_t)
+ posterior_variance = _extract_into_tensor(self.posterior_variance, t,
+ x_t.shape)
+ posterior_log_variance_clipped = _extract_into_tensor(
+ self.posterior_log_variance_clipped, t, x_t.shape)
+ assert (posterior_mean.shape[0] == posterior_variance.shape[0] ==
+ posterior_log_variance_clipped.shape[0] == x_start.shape[0])
+ return posterior_mean, posterior_variance, posterior_log_variance_clipped
+
+ def p_mean_variance(self,
+ model,
+ x,
+ t,
+ c=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ model_kwargs=None,
+ mixing_normal=False,
+ direct_return_model_output=False):
+ """
+ Apply the model to get p(x_{t-1} | x_t), as well as a prediction of
+ the initial x, x_0.
+
+ :param model: the model, which takes a signal and a batch of timesteps
+ as input.
+ :param x: the [N x C x ...] tensor at time t.
+ :param t: a 1-D Tensor of timesteps.
+ :param clip_denoised: if True, clip the denoised signal into [-1, 1].
+ :param denoised_fn: if not None, a function which applies to the
+ x_start prediction before it is used to sample. Applies before
+ clip_denoised.
+ :param model_kwargs: if not None, a dict of extra keyword arguments to
+ pass to the model. This can be used for conditioning.
+ :return: a dict with the following keys:
+ - 'mean': the model mean output.
+ - 'variance': the model variance output.
+ - 'log_variance': the log of 'variance'.
+ - 'pred_xstart': the prediction for x_0.
+ """
+ # lazy import to avoid partially initialized import
+ from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction
+
+ if model_kwargs is None:
+ model_kwargs = {}
+
+ # if mixing_normal is not None:
+ # t = t / self.num_timesteps # [0,1] for SDE diffusion
+
+ B, C = x.shape[:2]
+ assert t.shape == (B, )
+ model_output = model(x, self._scale_timesteps(t), c=c, mixing_normal=mixing_normal, **model_kwargs)
+ if direct_return_model_output:
+ return model_output
+
+ if self.model_mean_type == ModelMeanType.V:
+ v_transformed_to_eps_flag = False
+
+ if mixing_normal: # directly change the model predicted eps logits
+ if self.model_mean_type == ModelMeanType.START_X:
+ mixing_component = self.get_mixing_component_x0(x, t, enabled=True)
+ else:
+ assert self.model_mean_type in [ModelMeanType.EPSILON, ModelMeanType.V]
+ mixing_component = self.get_mixing_component(x, t, enabled=True)
+
+ if self.model_mean_type == ModelMeanType.V:
+ model_output = self._predict_eps_from_z_and_v(x, t, model_output)
+ v_transformed_to_eps_flag = True
+ # ! transform result to v first?
+ # model_output =
+ model_output = get_mixed_prediction(True,
+ model_output,
+ model.mixing_logit,
+ mixing_component)
+ else:
+ # st()
+ if self.model_mean_type == ModelMeanType.V:
+ model_output = self._predict_eps_from_z_and_v(x, t, model_output)
+ v_transformed_to_eps_flag = True
+
+ if self.model_var_type in [
+ ModelVarType.LEARNED, ModelVarType.LEARNED_RANGE
+ ]:
+ assert model_output.shape == (B, C * 2, *x.shape[2:])
+ model_output, model_var_values = th.split(model_output, C, dim=1)
+ if self.model_var_type == ModelVarType.LEARNED:
+ model_log_variance = model_var_values
+ model_variance = th.exp(model_log_variance)
+ else:
+ min_log = _extract_into_tensor(
+ self.posterior_log_variance_clipped, t, x.shape)
+ max_log = _extract_into_tensor(np.log(self.betas), t, x.shape)
+ # The model_var_values is [-1, 1] for [min_var, max_var].
+ frac = (model_var_values + 1) / 2
+ model_log_variance = frac * max_log + (1 - frac) * min_log
+ model_variance = th.exp(model_log_variance)
+ else:
+ model_variance, model_log_variance = {
+ # for fixedlarge, we set the initial (log-)variance like so
+ # to get a better decoder log likelihood.
+ # ?
+ ModelVarType.FIXED_LARGE: ( # * used here
+ np.append(self.posterior_variance[1], self.betas[1:]),
+ np.log(
+ np.append(self.posterior_variance[1], self.betas[1:])),
+ ),
+ ModelVarType.FIXED_SMALL: (
+ self.posterior_variance,
+ self.posterior_log_variance_clipped,
+ ),
+ }[self.model_var_type]
+ model_variance = _extract_into_tensor(model_variance, t, x.shape)
+ model_log_variance = _extract_into_tensor(model_log_variance, t,
+ x.shape)
+
+ def process_xstart(x):
+ if denoised_fn is not None:
+ x = denoised_fn(x)
+ if clip_denoised:
+ return x.clamp(-1, 1)
+ return x
+
+ if self.model_mean_type == ModelMeanType.PREVIOUS_X:
+ pred_xstart = process_xstart(
+ self._predict_xstart_from_xprev(x_t=x, t=t,
+ xprev=model_output))
+ model_mean = model_output
+ elif self.model_mean_type in [
+ ModelMeanType.START_X, ModelMeanType.EPSILON, ModelMeanType.V
+ ]:
+ if self.model_mean_type == ModelMeanType.START_X:
+ pred_xstart = process_xstart(model_output)
+ else: # * used here
+ if self.model_mean_type == ModelMeanType.V:
+ assert v_transformed_to_eps_flag # type: ignore
+ pred_xstart = process_xstart( # * return the x_0 using self._predict_xstart_from_eps as the denoised_fn
+ self._predict_xstart_from_eps(x_t=x, t=t,
+ eps=model_output))
+ model_mean, _, _ = self.q_posterior_mean_variance(
+ x_start=pred_xstart, x_t=x, t=t)
+ else:
+ raise NotImplementedError(self.model_mean_type)
+
+ assert (model_mean.shape == model_log_variance.shape ==
+ pred_xstart.shape == x.shape)
+ return {
+ "mean": model_mean,
+ "variance": model_variance,
+ "log_variance": model_log_variance,
+ "pred_xstart": pred_xstart,
+ }
+
+ def _predict_xstart_from_eps(self, x_t, t, eps):
+ assert x_t.shape == eps.shape
+ return (_extract_into_tensor(self.sqrt_recip_alphas_cumprod, t,
+ x_t.shape) * x_t -
+ _extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t,
+ x_t.shape) * eps)
+
+ def _predict_xstart_from_xprev(self, x_t, t, xprev):
+ assert x_t.shape == xprev.shape
+ return ( # (xprev - coef2*x_t) / coef1
+ _extract_into_tensor(1.0 / self.posterior_mean_coef1, t, x_t.shape)
+ * xprev - _extract_into_tensor(
+ self.posterior_mean_coef2 / self.posterior_mean_coef1, t,
+ x_t.shape) * x_t)
+
+ def _predict_eps_from_xstart(self, x_t, t, pred_xstart):
+ return (_extract_into_tensor(self.sqrt_recip_alphas_cumprod, t,
+ x_t.shape) * x_t -
+ pred_xstart) / _extract_into_tensor(
+ self.sqrt_recipm1_alphas_cumprod, t, x_t.shape)
+
+ # https://github.com/Stability-AI/stablediffusion/blob/cf1d67a6fd5ea1aa600c4df58e5b47da45f6bdbf/ldm/models/diffusion/ddpm.py#L288
+ def _predict_start_from_z_and_v(self, x_t, t, v):
+ # self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
+ # self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
+ return (
+ _extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * x_t -
+ _extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * v
+ )
+
+ def _predict_eps_from_z_and_v(self, x_t, t, v):
+ return (
+ _extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * v +
+ _extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * x_t
+ )
+
+ def _scale_timesteps(self, t):
+ if self.rescale_timesteps:
+ return t.float() * (1000.0 / self.num_timesteps)
+ return t
+
+ def condition_mean(self, cond_fn, p_mean_var, x, t, model_kwargs=None):
+ """
+ Compute the mean for the previous step, given a function cond_fn that
+ computes the gradient of a conditional log probability with respect to
+ x. In particular, cond_fn computes grad(log(p(y|x))), and we want to
+ condition on y.
+
+ This uses the conditioning strategy from Sohl-Dickstein et al. (2015).
+ """
+ gradient = cond_fn(x, self._scale_timesteps(t), **model_kwargs)
+ new_mean = (p_mean_var["mean"].float() +
+ p_mean_var["variance"] * gradient.float())
+ return new_mean
+
+ def condition_score(self, cond_fn, p_mean_var, x, t, model_kwargs=None):
+ """
+ Compute what the p_mean_variance output would have been, should the
+ model's score function be conditioned by cond_fn.
+
+ See condition_mean() for details on cond_fn.
+
+ Unlike condition_mean(), this instead uses the conditioning strategy
+ from Song et al (2020).
+ """
+ alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape)
+
+ eps = self._predict_eps_from_xstart(x, t, p_mean_var["pred_xstart"])
+ eps = eps - (1 - alpha_bar).sqrt() * cond_fn(
+ x, self._scale_timesteps(t), **model_kwargs)
+
+ out = p_mean_var.copy()
+ out["pred_xstart"] = self._predict_xstart_from_eps(x, t, eps)
+ out["mean"], _, _ = self.q_posterior_mean_variance(
+ x_start=out["pred_xstart"], x_t=x, t=t)
+ return out
+
+ def p_sample(
+ self,
+ model,
+ x,
+ t,
+ cond=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ mixing_normal=False,
+ ):
+ """
+ Sample x_{t-1} from the model at the given timestep.
+
+ :param model: the model to sample from.
+ :param x: the current tensor at x_{t-1}.
+ :param t: the value of t, starting at 0 for the first diffusion step.
+ :param clip_denoised: if True, clip the x_start prediction to [-1, 1].
+ :param denoised_fn: if not None, a function which applies to the
+ x_start prediction before it is used to sample.
+ :param cond_fn: if not None, this is a gradient function that acts
+ similarly to the model.
+ :param model_kwargs: if not None, a dict of extra keyword arguments to
+ pass to the model. This can be used for conditioning.
+ :return: a dict containing the following keys:
+ - 'sample': a random sample from the model.
+ - 'pred_xstart': a prediction of x_0.
+ """
+ out = self.p_mean_variance(model,
+ x,
+ t,
+ c=cond,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ model_kwargs=model_kwargs,
+ mixing_normal=mixing_normal)
+ noise = th.randn_like(x)
+ nonzero_mask = ((t != 0).float().view(-1, *([1] * (len(x.shape) - 1)))
+ ) # no noise when t == 0
+ if cond_fn is not None:
+ out["mean"] = self.condition_mean(cond_fn,
+ out,
+ x,
+ t,
+ model_kwargs=model_kwargs)
+ sample = out["mean"] + nonzero_mask * th.exp(
+ 0.5 * out["log_variance"]) * noise
+ return {"sample": sample, "pred_xstart": out["pred_xstart"]}
+
+ def get_mixing_component(self, x_noisy, t, enabled):
+ # alpha_bars = th.gather(self._alpha_bars, 0, timestep-1)
+ if enabled:
+ # one_minus_alpha_bars_sqrt = utils.view4D(th.sqrt(1.0 - alpha_bars), size)
+ one_minus_alpha_bars_sqrt = _extract_into_tensor(
+ self.sqrt_one_minus_alphas_cumprod, t, x_noisy.shape)
+ mixing_component = one_minus_alpha_bars_sqrt * x_noisy
+ else:
+ mixing_component = None
+
+ return mixing_component
+
+ def get_mixing_component_x0(self, x_noisy, t, enabled):
+ # alpha_bars = th.gather(self._alpha_bars, 0, timestep-1)
+ if enabled:
+ # one_minus_alpha_bars_sqrt = utils.view4D(th.sqrt(1.0 - alpha_bars), size)
+ one_minus_alpha_bars_sqrt = _extract_into_tensor(
+ self.sqrt_alphas_cumprod, t, x_noisy.shape)
+ mixing_component = one_minus_alpha_bars_sqrt * x_noisy
+ else:
+ mixing_component = None
+
+ return mixing_component
+
+ def p_sample_mixing_component(
+ self,
+ model,
+ x,
+ t,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ ):
+ """
+ Sample x_{t-1} from the model at the given timestep.
+
+ :param model: the model to sample from.
+ :param x: the current tensor at x_{t-1}.
+ :param t: the value of t, starting at 0 for the first diffusion step.
+ :param clip_denoised: if True, clip the x_start prediction to [-1, 1].
+ :param denoised_fn: if not None, a function which applies to the
+ x_start prediction before it is used to sample.
+ :param cond_fn: if not None, this is a gradient function that acts
+ similarly to the model.
+ :param model_kwargs: if not None, a dict of extra keyword arguments to
+ pass to the model. This can be used for conditioning.
+ :return: a dict containing the following keys:
+ - 'sample': a random sample from the model.
+ - 'pred_xstart': a prediction of x_0.
+ """
+
+ assert self.model_mean_type == ModelMeanType.EPSILON, 'currently LSGM only implemented for EPSILON prediction'
+
+ out = self.p_mean_variance(
+ model,
+ x,
+ t / self.
+ num_timesteps, # trained on SDE diffusion, normalize steps to (0,1]
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ model_kwargs=model_kwargs,
+ )
+ # mixing_component = self.get_mixing_component(x, t, enabled=True)
+ # out['mean'] = get_mixed_prediction(model.mixed_prediction, out['mean'], model.mixing_logit, mixing_component)
+
+ noise = th.randn_like(x)
+ nonzero_mask = ((t != 0).float().view(-1, *([1] * (len(x.shape) - 1)))
+ ) # no noise when t == 0
+ if cond_fn is not None:
+ out["mean"] = self.condition_mean(cond_fn,
+ out,
+ x,
+ t,
+ model_kwargs=model_kwargs)
+ sample = out["mean"] + nonzero_mask * th.exp(
+ 0.5 * out["log_variance"]) * noise
+ return {"sample": sample, "pred_xstart": out["pred_xstart"]}
+
+ def p_sample_loop(
+ self,
+ model,
+ shape,
+ cond=None,
+ noise=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ device=None,
+ progress=False,
+ mixing_normal=False,
+ ):
+ """
+ Generate samples from the model.
+
+ :param model: the model module.
+ :param shape: the shape of the samples, (N, C, H, W).
+ :param noise: if specified, the noise from the encoder to sample.
+ Should be of the same shape as `shape`.
+ :param clip_denoised: if True, clip x_start predictions to [-1, 1].
+ :param denoised_fn: if not None, a function which applies to the
+ x_start prediction before it is used to sample.
+ :param cond_fn: if not None, this is a gradient function that acts
+ similarly to the model.
+ :param model_kwargs: if not None, a dict of extra keyword arguments to
+ pass to the model. This can be used for conditioning.
+ :param device: if specified, the device to create the samples on.
+ If not specified, use a model parameter's device.
+ :param progress: if True, show a tqdm progress bar.
+ :return: a non-differentiable batch of samples.
+ """
+ final = None
+ for sample in self.p_sample_loop_progressive(
+ model,
+ shape,
+ cond=cond,
+ noise=noise,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ cond_fn=cond_fn,
+ model_kwargs=model_kwargs,
+ device=device,
+ progress=progress,
+ mixing_normal=mixing_normal):
+ final = sample
+ return final["sample"]
+
+ def p_sample_loop_progressive(
+ self,
+ model,
+ shape,
+ cond=None,
+ noise=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ device=None,
+ progress=False,
+ mixing_normal=False,
+ ):
+ """
+ Generate samples from the model and yield intermediate samples from
+ each timestep of diffusion.
+
+ Arguments are the same as p_sample_loop().
+ Returns a generator over dicts, where each dict is the return value of
+ p_sample().
+ """
+ if device is None:
+ device = dist_util.dev()
+ # device = next(model.parameters()).device
+ assert isinstance(shape, (tuple, list))
+ if noise is not None:
+ img = noise
+ else:
+ img = th.randn(*shape, device=device)
+ indices = list(range(self.num_timesteps))[::-1]
+
+ if progress:
+ # Lazy import so that we don't depend on tqdm.
+ from tqdm.auto import tqdm
+
+ indices = tqdm(indices)
+
+ for i in indices:
+ t = th.tensor([i] * shape[0], device=device)
+ with th.no_grad():
+ out = self.p_sample(model,
+ img,
+ t,
+ cond=cond,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ cond_fn=cond_fn,
+ model_kwargs=model_kwargs,
+ mixing_normal=mixing_normal)
+ yield out
+ img = out["sample"]
+
+ def ddim_sample(
+ self,
+ model,
+ x,
+ t,
+ cond=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ eta=0.0,
+ unconditional_guidance_scale=1.,
+ unconditional_conditioning=None,
+ mixing_normal=False,
+ ):
+ """
+ Sample x_{t-1} from the model using DDIM.
+
+ Same usage as p_sample().
+ """
+
+ if unconditional_guidance_scale != 1.0:
+ assert cond is not None
+ if unconditional_conditioning is None:
+ unconditional_conditioning = {
+ k: th.zeros_like(cond[k]) for k in cond.keys()
+ }
+ # ImageEmbedding adopts zero as the null embedding
+ # st()
+
+ if unconditional_conditioning is None or unconditional_guidance_scale == 1.:
+ # e_t = self.model.apply_model(x, t, c)
+
+ out = self.p_mean_variance(
+ model,
+ x,
+ t,
+ c=cond,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ model_kwargs=model_kwargs,
+ mixing_normal=mixing_normal,
+ )
+ eps = self._predict_eps_from_xstart(x, t, out["pred_xstart"])
+
+ else:
+ assert cond is not None
+ x_in = th.cat([x] * 2)
+ t_in = th.cat([t] * 2)
+ c_in = {}
+ for k in cond:
+ c_in[k] = th.cat([unconditional_conditioning[k], cond[k]])
+
+ model_uncond, model_t = self.p_mean_variance(
+ model,
+ x_in,
+ t_in,
+ c=c_in,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ model_kwargs=model_kwargs,
+ mixing_normal=mixing_normal,
+ direct_return_model_output=True, # ! compat with _wrapper
+ ).chunk(2)
+ # Usually our model outputs epsilon, but we re-derive it
+ # model_uncond, model_t = model(x_in, self._scale_timesteps(t_in), c=c_in, mixing_normal=mixing_normal, **model_kwargs).chunk(2)
+
+ # in case we used x_start or x_prev prediction.
+ # st()
+
+ # ! guidance
+ # e_t_uncond, e_t = eps.chunk(2)
+ model_out = model_uncond + unconditional_guidance_scale * (model_t - model_uncond)
+
+ if self.model_mean_type == ModelMeanType.V:
+ eps = self._predict_eps_from_z_and_v(x, t, model_out)
+
+ # eps = self._predict_eps_from_xstart(x_in, t_in, out["pred_xstart"])
+
+ if cond_fn is not None:
+ out = self.condition_score(cond_fn,
+ out,
+ x,
+ t,
+ model_kwargs=model_kwargs)
+
+ # eps = self._predict_eps_from_xstart(x, t, out["pred_xstart"])
+ # ! re-derive xstart
+ pred_x0 = self._predict_xstart_from_eps(x, t, eps)
+
+ alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape)
+ alpha_bar_prev = _extract_into_tensor(self.alphas_cumprod_prev, t,
+ x.shape)
+ sigma = (eta * th.sqrt((1 - alpha_bar_prev) / (1 - alpha_bar)) *
+ th.sqrt(1 - alpha_bar / alpha_bar_prev))
+ # Equation 12.
+ noise = th.randn_like(x)
+ mean_pred = (pred_x0 * th.sqrt(alpha_bar_prev) +
+ th.sqrt(1 - alpha_bar_prev - sigma**2) * eps)
+ nonzero_mask = ((t != 0).float().view(-1, *([1] * (len(x.shape) - 1)))
+ ) # no noise when t == 0
+ sample = mean_pred + nonzero_mask * sigma * noise
+ return {"sample": sample, "pred_xstart": pred_x0}
+
+ def ddim_reverse_sample(
+ self,
+ model,
+ x,
+ t,
+ clip_denoised=True,
+ denoised_fn=None,
+ model_kwargs=None,
+ eta=0.0,
+ ):
+ """
+ Sample x_{t+1} from the model using DDIM reverse ODE.
+ """
+ assert eta == 0.0, "Reverse ODE only for deterministic path"
+ out = self.p_mean_variance(
+ model,
+ x,
+ t,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ model_kwargs=model_kwargs,
+ )
+ # Usually our model outputs epsilon, but we re-derive it
+ # in case we used x_start or x_prev prediction.
+ eps = (_extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x.shape)
+ * x - out["pred_xstart"]) / _extract_into_tensor(
+ self.sqrt_recipm1_alphas_cumprod, t, x.shape)
+ alpha_bar_next = _extract_into_tensor(self.alphas_cumprod_next, t,
+ x.shape)
+
+ # Equation 12. reversed
+ mean_pred = (out["pred_xstart"] * th.sqrt(alpha_bar_next) +
+ th.sqrt(1 - alpha_bar_next) * eps)
+
+ return {"sample": mean_pred, "pred_xstart": out["pred_xstart"]}
+
+ def ddim_sample_loop(
+ self,
+ model,
+ shape,
+ cond=None,
+ noise=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ device=None,
+ progress=False,
+ eta=0.0,
+ mixing_normal=False,
+ unconditional_guidance_scale=1.0,
+ unconditional_conditioning=None,
+ ):
+ """
+ Generate samples from the model using DDIM.
+
+ Same usage as p_sample_loop().
+ """
+ final = None
+ for sample in self.ddim_sample_loop_progressive(
+ model,
+ shape,
+ cond=cond,
+ noise=noise,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ cond_fn=cond_fn,
+ model_kwargs=model_kwargs,
+ device=device,
+ progress=progress,
+ eta=eta,mixing_normal=mixing_normal,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ ):
+ final = sample
+ return final["sample"]
+
+ def ddim_sample_loop_progressive(
+ self,
+ model,
+ shape,
+ cond=None,
+ noise=None,
+ clip_denoised=True,
+ denoised_fn=None,
+ cond_fn=None,
+ model_kwargs=None,
+ device=None,
+ progress=False,
+ eta=0.0,
+ mixing_normal=False,
+ unconditional_guidance_scale=1.0,
+ unconditional_conditioning=None,
+ ):
+ """
+ Use DDIM to sample from the model and yield intermediate samples from
+ each timestep of DDIM.
+
+ Same usage as p_sample_loop_progressive().
+ """
+ if device is None:
+ device = next(model.parameters()).device
+ assert isinstance(shape, (tuple, list))
+ if noise is not None:
+ img = noise
+ else:
+ img = th.randn(*shape, device=device)
+ indices = list(range(self.num_timesteps))[::-1]
+
+ if progress:
+ # Lazy import so that we don't depend on tqdm.
+ from tqdm.auto import tqdm
+
+ indices = tqdm(indices)
+
+ for i in indices:
+ t = th.tensor([i] * shape[0], device=device)
+ with th.no_grad():
+ out = self.ddim_sample(
+ model,
+ img,
+ t,
+ cond=cond,
+ clip_denoised=clip_denoised,
+ denoised_fn=denoised_fn,
+ cond_fn=cond_fn,
+ model_kwargs=model_kwargs,
+ eta=eta,
+ mixing_normal=mixing_normal,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ )
+ yield out
+ img = out["sample"]
+
+ def _vb_terms_bpd(self,
+ model,
+ x_start,
+ x_t,
+ t,
+ clip_denoised=True,
+ model_kwargs=None):
+ """
+ Get a term for the variational lower-bound.
+
+ The resulting units are bits (rather than nats, as one might expect).
+ This allows for comparison to other papers.
+
+ :return: a dict with the following keys:
+ - 'output': a shape [N] tensor of NLLs or KLs.
+ - 'pred_xstart': the x_0 predictions.
+ """
+ true_mean, _, true_log_variance_clipped = self.q_posterior_mean_variance(
+ x_start=x_start, x_t=x_t, t=t)
+ out = self.p_mean_variance(model,
+ x_t,
+ t,
+ clip_denoised=clip_denoised,
+ model_kwargs=model_kwargs)
+ kl = normal_kl(true_mean, true_log_variance_clipped, out["mean"],
+ out["log_variance"])
+ kl = mean_flat(kl) / np.log(2.0)
+
+ decoder_nll = -discretized_gaussian_log_likelihood(
+ x_start, means=out["mean"], log_scales=0.5 * out["log_variance"])
+ assert decoder_nll.shape == x_start.shape
+ decoder_nll = mean_flat(decoder_nll) / np.log(2.0)
+
+ # At the first timestep return the decoder NLL,
+ # otherwise return KL(q(x_{t-1}|x_t,x_0) || p(x_{t-1}|x_t))
+ output = th.where((t == 0), decoder_nll, kl)
+ return {"output": output, "pred_xstart": out["pred_xstart"]}
+
+ def training_losses(self,
+ model,
+ x_start,
+ t,
+ model_kwargs=None,
+ noise=None,
+ return_detail=False):
+ """
+ Compute training losses for a single timestep.
+
+ :param model: the model to evaluate loss on.
+ :param x_start: the [N x C x ...] tensor of inputs.
+ :param t: a batch of timestep indices.
+ :param model_kwargs: if not None, a dict of extra keyword arguments to
+ pass to the model. This can be used for conditioning.
+ :param noise: if specified, the specific Gaussian noise to try to remove.
+ :return: a dict with the key "loss" containing a tensor of shape [N].
+ Some mean or variance settings may also have other keys.
+ """
+ if model_kwargs is None: # * micro_cond
+ model_kwargs = {}
+ if noise is None:
+ noise = th.randn_like(x_start) # x_start is the x0 image
+ x_t = self.q_sample(x_start,
+ t,
+ noise=noise,
+ return_detail=return_detail
+ ) # * add noise according to predefined schedule
+ if return_detail:
+ x_t, alpha_bar, _ = x_t
+
+ # terms = {}
+ terms = {"x_t": x_t}
+
+ if self.loss_type == LossType.KL or self.loss_type == LossType.RESCALED_KL:
+ terms["loss"] = self._vb_terms_bpd(
+ model=model,
+ x_start=x_start,
+ x_t=x_t,
+ t=t,
+ clip_denoised=False,
+ model_kwargs=model_kwargs,
+ )["output"]
+ if self.loss_type == LossType.RESCALED_KL:
+ terms["loss"] *= self.num_timesteps
+ elif self.loss_type == LossType.MSE or self.loss_type == LossType.RESCALED_MSE:
+ model_output = model(
+ x_t, self._scale_timesteps(t), **model_kwargs
+ ) # directly predict epsilon or x_0; no learned sigma
+
+ if self.model_var_type in [
+ ModelVarType.LEARNED,
+ ModelVarType.LEARNED_RANGE,
+ ]:
+ B, C = x_t.shape[:2]
+ assert model_output.shape == (B, C * 2, *x_t.shape[2:])
+ model_output, model_var_values = th.split(model_output,
+ C,
+ dim=1)
+ # Learn the variance using the variational bound, but don't let
+ # it affect our mean prediction.
+ frozen_out = th.cat([model_output.detach(), model_var_values],
+ dim=1)
+ terms["vb"] = self._vb_terms_bpd(
+ model=lambda *args, r=frozen_out: r,
+ x_start=x_start,
+ x_t=x_t,
+ t=t,
+ clip_denoised=False,
+ )["output"]
+ if self.loss_type == LossType.RESCALED_MSE:
+ # Divide by 1000 for equivalence with initial implementation.
+ # Without a factor of 1/1000, the VB term hurts the MSE term.
+ terms["vb"] *= self.num_timesteps / 1000.0
+
+ target = {
+ ModelMeanType.PREVIOUS_X:
+ self.q_posterior_mean_variance(x_start=x_start, x_t=x_t,
+ t=t)[0],
+ ModelMeanType.START_X:
+ x_start,
+ ModelMeanType.EPSILON:
+ noise,
+ }[self.model_mean_type] # ModelMeanType.EPSILON
+ # st()
+ assert model_output.shape == target.shape == x_start.shape
+ terms["mse"] = mean_flat((target - model_output)**2)
+
+ terms['model_output'] = model_output
+ # terms['target'] = target # TODO, flag.
+ if return_detail:
+ terms.update({
+ 'diffusion_target': target,
+ 'alpha_bar': alpha_bar,
+ # 'one_minus_alpha':one_minus_alpha
+ # 'noise': noise
+ })
+
+ if "vb" in terms:
+ terms["loss"] = terms["mse"] + terms["vb"]
+ else:
+ terms["loss"] = terms["mse"]
+ else:
+ raise NotImplementedError(self.loss_type)
+
+ return terms
+
+ def _prior_bpd(self, x_start):
+ """
+ Get the prior KL term for the variational lower-bound, measured in
+ bits-per-dim.
+
+ This term can't be optimized, as it only depends on the encoder.
+
+ :param x_start: the [N x C x ...] tensor of inputs.
+ :return: a batch of [N] KL values (in bits), one per batch element.
+ """
+ batch_size = x_start.shape[0]
+ t = th.tensor([self.num_timesteps - 1] * batch_size,
+ device=x_start.device)
+ qt_mean, _, qt_log_variance = self.q_mean_variance(x_start, t)
+ kl_prior = normal_kl(mean1=qt_mean,
+ logvar1=qt_log_variance,
+ mean2=0.0,
+ logvar2=0.0)
+ return mean_flat(kl_prior) / np.log(2.0)
+
+ def calc_bpd_loop(self,
+ model,
+ x_start,
+ clip_denoised=True,
+ model_kwargs=None):
+ """
+ Compute the entire variational lower-bound, measured in bits-per-dim,
+ as well as other related quantities.
+
+ :param model: the model to evaluate loss on.
+ :param x_start: the [N x C x ...] tensor of inputs.
+ :param clip_denoised: if True, clip denoised samples.
+ :param model_kwargs: if not None, a dict of extra keyword arguments to
+ pass to the model. This can be used for conditioning.
+
+ :return: a dict containing the following keys:
+ - total_bpd: the total variational lower-bound, per batch element.
+ - prior_bpd: the prior term in the lower-bound.
+ - vb: an [N x T] tensor of terms in the lower-bound.
+ - xstart_mse: an [N x T] tensor of x_0 MSEs for each timestep.
+ - mse: an [N x T] tensor of epsilon MSEs for each timestep.
+ """
+ device = x_start.device
+ batch_size = x_start.shape[0]
+
+ vb = []
+ xstart_mse = []
+ mse = []
+ for t in list(range(self.num_timesteps))[::-1]:
+ t_batch = th.tensor([t] * batch_size, device=device)
+ noise = th.randn_like(x_start)
+ x_t = self.q_sample(x_start=x_start, t=t_batch, noise=noise)
+ # Calculate VLB term at the current timestep
+ with th.no_grad():
+ out = self._vb_terms_bpd(
+ model,
+ x_start=x_start,
+ x_t=x_t,
+ t=t_batch,
+ clip_denoised=clip_denoised,
+ model_kwargs=model_kwargs,
+ )
+ vb.append(out["output"])
+ xstart_mse.append(mean_flat((out["pred_xstart"] - x_start)**2))
+ eps = self._predict_eps_from_xstart(x_t, t_batch,
+ out["pred_xstart"])
+ mse.append(mean_flat((eps - noise)**2))
+
+ vb = th.stack(vb, dim=1)
+ xstart_mse = th.stack(xstart_mse, dim=1)
+ mse = th.stack(mse, dim=1)
+
+ prior_bpd = self._prior_bpd(x_start)
+ total_bpd = vb.sum(dim=1) + prior_bpd
+ return {
+ "total_bpd": total_bpd,
+ "prior_bpd": prior_bpd,
+ "vb": vb,
+ "xstart_mse": xstart_mse,
+ "mse": mse,
+ }
+
+
+def _extract_into_tensor(arr, timesteps, broadcast_shape):
+ """
+ Extract values from a 1-D numpy array for a batch of indices.
+
+ :param arr: the 1-D numpy array.
+ :param timesteps: a tensor of indices into the array to extract.
+ :param broadcast_shape: a larger shape of K dimensions with the batch
+ dimension equal to the length of timesteps.
+ :return: a tensor of shape [batch_size, 1, ...] where the shape has K dims.
+ """
+ res = th.from_numpy(arr).to(device=timesteps.device)[timesteps].float()
+ while len(res.shape) < len(broadcast_shape):
+ res = res[..., None]
+ return res.expand(broadcast_shape)
diff --git a/guided_diffusion/image_datasets.py b/guided_diffusion/image_datasets.py
new file mode 100644
index 0000000000000000000000000000000000000000..93022ae208a01e72eb162d7b63c07bf94a6afe3b
--- /dev/null
+++ b/guided_diffusion/image_datasets.py
@@ -0,0 +1,167 @@
+import math
+import random
+
+from PIL import Image
+import blobfile as bf
+from mpi4py import MPI
+import numpy as np
+from torch.utils.data import DataLoader, Dataset
+
+
+def load_data(
+ *,
+ data_dir,
+ batch_size,
+ image_size,
+ class_cond=False,
+ deterministic=False,
+ random_crop=False,
+ random_flip=True,
+):
+ """
+ For a dataset, create a generator over (images, kwargs) pairs.
+
+ Each images is an NCHW float tensor, and the kwargs dict contains zero or
+ more keys, each of which map to a batched Tensor of their own.
+ The kwargs dict can be used for class labels, in which case the key is "y"
+ and the values are integer tensors of class labels.
+
+ :param data_dir: a dataset directory.
+ :param batch_size: the batch size of each returned pair.
+ :param image_size: the size to which images are resized.
+ :param class_cond: if True, include a "y" key in returned dicts for class
+ label. If classes are not available and this is true, an
+ exception will be raised.
+ :param deterministic: if True, yield results in a deterministic order.
+ :param random_crop: if True, randomly crop the images for augmentation.
+ :param random_flip: if True, randomly flip the images for augmentation.
+ """
+ if not data_dir:
+ raise ValueError("unspecified data directory")
+ all_files = _list_image_files_recursively(data_dir)
+ classes = None
+ if class_cond:
+ # Assume classes are the first part of the filename,
+ # before an underscore.
+ class_names = [bf.basename(path).split("_")[0] for path in all_files]
+ sorted_classes = {x: i for i, x in enumerate(sorted(set(class_names)))}
+ classes = [sorted_classes[x] for x in class_names]
+ dataset = ImageDataset(
+ image_size,
+ all_files,
+ classes=classes,
+ shard=MPI.COMM_WORLD.Get_rank(),
+ num_shards=MPI.COMM_WORLD.Get_size(),
+ random_crop=random_crop,
+ random_flip=random_flip,
+ )
+ if deterministic:
+ loader = DataLoader(
+ dataset, batch_size=batch_size, shuffle=False, num_workers=1, drop_last=True
+ )
+ else:
+ loader = DataLoader(
+ dataset, batch_size=batch_size, shuffle=True, num_workers=1, drop_last=True
+ )
+ while True:
+ yield from loader
+
+
+def _list_image_files_recursively(data_dir):
+ results = []
+ for entry in sorted(bf.listdir(data_dir)):
+ full_path = bf.join(data_dir, entry)
+ ext = entry.split(".")[-1]
+ if "." in entry and ext.lower() in ["jpg", "jpeg", "png", "gif"]:
+ results.append(full_path)
+ elif bf.isdir(full_path):
+ results.extend(_list_image_files_recursively(full_path))
+ return results
+
+
+class ImageDataset(Dataset):
+ def __init__(
+ self,
+ resolution,
+ image_paths,
+ classes=None,
+ shard=0,
+ num_shards=1,
+ random_crop=False,
+ random_flip=True,
+ ):
+ super().__init__()
+ self.resolution = resolution
+ self.local_images = image_paths[shard:][::num_shards]
+ self.local_classes = None if classes is None else classes[shard:][::num_shards]
+ self.random_crop = random_crop
+ self.random_flip = random_flip
+
+ def __len__(self):
+ return len(self.local_images)
+
+ def __getitem__(self, idx):
+ path = self.local_images[idx]
+ with bf.BlobFile(path, "rb") as f:
+ pil_image = Image.open(f)
+ pil_image.load()
+ pil_image = pil_image.convert("RGB")
+
+ if self.random_crop:
+ arr = random_crop_arr(pil_image, self.resolution)
+ else:
+ arr = center_crop_arr(pil_image, self.resolution)
+
+ if self.random_flip and random.random() < 0.5:
+ arr = arr[:, ::-1]
+
+ arr = arr.astype(np.float32) / 127.5 - 1
+
+ out_dict = {}
+ if self.local_classes is not None:
+ out_dict["y"] = np.array(self.local_classes[idx], dtype=np.int64)
+ return np.transpose(arr, [2, 0, 1]), out_dict
+
+
+def center_crop_arr(pil_image, image_size):
+ # We are not on a new enough PIL to support the `reducing_gap`
+ # argument, which uses BOX downsampling at powers of two first.
+ # Thus, we do it by hand to improve downsample quality.
+ while min(*pil_image.size) >= 2 * image_size:
+ pil_image = pil_image.resize(
+ tuple(x // 2 for x in pil_image.size), resample=Image.BOX
+ )
+
+ scale = image_size / min(*pil_image.size)
+ pil_image = pil_image.resize(
+ tuple(round(x * scale) for x in pil_image.size), resample=Image.BICUBIC
+ )
+
+ arr = np.array(pil_image)
+ crop_y = (arr.shape[0] - image_size) // 2
+ crop_x = (arr.shape[1] - image_size) // 2
+ return arr[crop_y : crop_y + image_size, crop_x : crop_x + image_size]
+
+
+def random_crop_arr(pil_image, image_size, min_crop_frac=0.8, max_crop_frac=1.0):
+ min_smaller_dim_size = math.ceil(image_size / max_crop_frac)
+ max_smaller_dim_size = math.ceil(image_size / min_crop_frac)
+ smaller_dim_size = random.randrange(min_smaller_dim_size, max_smaller_dim_size + 1)
+
+ # We are not on a new enough PIL to support the `reducing_gap`
+ # argument, which uses BOX downsampling at powers of two first.
+ # Thus, we do it by hand to improve downsample quality.
+ while min(*pil_image.size) >= 2 * smaller_dim_size:
+ pil_image = pil_image.resize(
+ tuple(x // 2 for x in pil_image.size), resample=Image.BOX
+ )
+
+ scale = smaller_dim_size / min(*pil_image.size)
+ pil_image = pil_image.resize(
+ tuple(round(x * scale) for x in pil_image.size), resample=Image.BICUBIC
+ )
+
+ arr = np.array(pil_image)
+ crop_y = random.randrange(arr.shape[0] - image_size + 1)
+ crop_x = random.randrange(arr.shape[1] - image_size + 1)
+ return arr[crop_y : crop_y + image_size, crop_x : crop_x + image_size]
diff --git a/guided_diffusion/logger.py b/guided_diffusion/logger.py
new file mode 100644
index 0000000000000000000000000000000000000000..5bcc37050913426603e230fabda3c18a0f9a7a47
--- /dev/null
+++ b/guided_diffusion/logger.py
@@ -0,0 +1,508 @@
+"""
+Logger copied from OpenAI baselines to avoid extra RL-based dependencies:
+https://github.com/openai/baselines/blob/ea25b9e8b234e6ee1bca43083f8f3cf974143998/baselines/logger.py
+"""
+
+import os
+import sys
+import shutil
+import os.path as osp
+import json
+import time
+import datetime
+import tempfile
+import warnings
+from collections import defaultdict
+from contextlib import contextmanager
+from pdb import set_trace as st
+
+DEBUG = 10
+INFO = 20
+WARN = 30
+ERROR = 40
+
+DISABLED = 50
+
+
+class KVWriter(object):
+ def writekvs(self, kvs):
+ raise NotImplementedError
+
+
+class SeqWriter(object):
+ def writeseq(self, seq):
+ raise NotImplementedError
+
+
+class HumanOutputFormat(KVWriter, SeqWriter):
+ def __init__(self, filename_or_file):
+ if isinstance(filename_or_file, str):
+ self.file = open(filename_or_file, "wt")
+ self.own_file = True
+ else:
+ assert hasattr(filename_or_file, "read"), (
+ "expected file or str, got %s" % filename_or_file
+ )
+ self.file = filename_or_file
+ self.own_file = False
+
+ def writekvs(self, kvs):
+ # Create strings for printing
+ key2str = {}
+ for (key, val) in sorted(kvs.items()):
+ if hasattr(val, "__float__"):
+ valstr = "%-8.3g" % val
+ else:
+ valstr = str(val)
+ key2str[self._truncate(key)] = self._truncate(valstr)
+
+ # Find max widths
+ if len(key2str) == 0:
+ print("WARNING: tried to write empty key-value dict")
+ return
+ else:
+ keywidth = max(map(len, key2str.keys()))
+ valwidth = max(map(len, key2str.values()))
+
+ # Write out the data
+ dashes = "-" * (keywidth + valwidth + 7)
+ lines = [dashes]
+ for (key, val) in sorted(key2str.items(), key=lambda kv: kv[0].lower()):
+ lines.append(
+ "| %s%s | %s%s |"
+ % (key, " " * (keywidth - len(key)), val, " " * (valwidth - len(val)))
+ )
+ lines.append(dashes)
+ self.file.write("\n".join(lines) + "\n")
+
+ # Flush the output to the file
+ self.file.flush()
+
+ def _truncate(self, s):
+ maxlen = 30
+ return s[: maxlen - 3] + "..." if len(s) > maxlen else s
+
+ def writeseq(self, seq):
+ seq = list(seq)
+ for (i, elem) in enumerate(seq):
+ self.file.write(elem)
+ if i < len(seq) - 1: # add space unless this is the last one
+ self.file.write(" ")
+ self.file.write("\n")
+ self.file.flush()
+
+ def close(self):
+ if self.own_file:
+ self.file.close()
+
+
+class JSONOutputFormat(KVWriter):
+ def __init__(self, filename):
+ self.file = open(filename, "wt")
+
+ def writekvs(self, kvs):
+ for k, v in sorted(kvs.items()):
+ if hasattr(v, "dtype"):
+ kvs[k] = float(v)
+ self.file.write(json.dumps(kvs) + "\n")
+ self.file.flush()
+
+ def close(self):
+ self.file.close()
+
+
+class CSVOutputFormat(KVWriter):
+ def __init__(self, filename):
+ self.file = open(filename, "w+t")
+ self.keys = []
+ self.sep = ","
+
+ def writekvs(self, kvs):
+ # Add our current row to the history
+ extra_keys = list(kvs.keys() - self.keys)
+ extra_keys.sort()
+ if extra_keys:
+ self.keys.extend(extra_keys)
+ self.file.seek(0)
+ lines = self.file.readlines()
+ self.file.seek(0)
+ for (i, k) in enumerate(self.keys):
+ if i > 0:
+ self.file.write(",")
+ self.file.write(k)
+ self.file.write("\n")
+ for line in lines[1:]:
+ self.file.write(line[:-1])
+ self.file.write(self.sep * len(extra_keys))
+ self.file.write("\n")
+ for (i, k) in enumerate(self.keys):
+ if i > 0:
+ self.file.write(",")
+ v = kvs.get(k)
+ if v is not None:
+ self.file.write(str(v))
+ self.file.write("\n")
+ self.file.flush()
+
+ def close(self):
+ self.file.close()
+
+
+class TensorBoardOutputFormat(KVWriter):
+ """
+ Dumps key/value pairs into TensorBoard's numeric format.
+ """
+
+ def __init__(self, dir):
+ os.makedirs(dir, exist_ok=True)
+ self.dir = dir
+ self.step = 1
+ prefix = "events"
+ path = osp.join(osp.abspath(dir), prefix)
+ import tensorflow as tf
+ from tensorflow.python import pywrap_tensorflow
+ from tensorflow.core.util import event_pb2
+ from tensorflow.python.util import compat
+
+ self.tf = tf
+ self.event_pb2 = event_pb2
+ self.pywrap_tensorflow = pywrap_tensorflow
+ self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path))
+
+ def writekvs(self, kvs):
+ def summary_val(k, v):
+ kwargs = {"tag": k, "simple_value": float(v)}
+ return self.tf.Summary.Value(**kwargs)
+
+ summary = self.tf.Summary(value=[summary_val(k, v) for k, v in kvs.items()])
+ event = self.event_pb2.Event(wall_time=time.time(), summary=summary)
+ event.step = (
+ self.step
+ ) # is there any reason why you'd want to specify the step?
+ self.writer.WriteEvent(event)
+ self.writer.Flush()
+ self.step += 1
+
+ def close(self):
+ if self.writer:
+ self.writer.Close()
+ self.writer = None
+
+
+def make_output_format(format, ev_dir, log_suffix=""):
+ os.makedirs(ev_dir, exist_ok=True)
+ if format == "stdout":
+ return HumanOutputFormat(sys.stdout)
+ elif format == "log":
+ return HumanOutputFormat(osp.join(ev_dir, "log%s.txt" % log_suffix))
+ elif format == "json":
+ return JSONOutputFormat(osp.join(ev_dir, "progress%s.json" % log_suffix))
+ elif format == "csv":
+ return CSVOutputFormat(osp.join(ev_dir, "progress%s.csv" % log_suffix))
+ elif format == "tensorboard":
+ return TensorBoardOutputFormat(osp.join(ev_dir, "tb%s" % log_suffix))
+ else:
+ raise ValueError("Unknown format specified: %s" % (format,))
+
+
+# ================================================================
+# API
+# ================================================================
+
+
+def logkv(key, val):
+ """
+ Log a value of some diagnostic
+ Call this once for each diagnostic quantity, each iteration
+ If called many times, last value will be used.
+ """
+ get_current().logkv(key, val)
+
+
+def logkv_mean(key, val):
+ """
+ The same as logkv(), but if called many times, values averaged.
+ """
+ get_current().logkv_mean(key, val)
+
+def log_hist(key, val):
+ """
+ The same as logkv(), but if called many times, values averaged.
+ """
+ get_current().logkv_mean(key, val)
+
+
+def logkvs(d):
+ """
+ Log a dictionary of key-value pairs
+ """
+ for (k, v) in d.items():
+ logkv(k, v)
+
+
+def dumpkvs():
+ """
+ Write all of the diagnostics from the current iteration
+ """
+ return get_current().dumpkvs()
+
+
+def getkvs():
+ return get_current().name2val
+
+
+def log(*args, level=INFO):
+ """
+ Write the sequence of args, with no separators, to the console and output files (if you've configured an output file).
+ """
+ get_current().log(*args, level=level)
+
+
+def debug(*args):
+ log(*args, level=DEBUG)
+
+
+def info(*args):
+ log(*args, level=INFO)
+
+
+def warn(*args):
+ log(*args, level=WARN)
+
+
+def error(*args):
+ log(*args, level=ERROR)
+
+
+def set_level(level):
+ """
+ Set logging threshold on current logger.
+ """
+ get_current().set_level(level)
+
+
+def set_comm(comm):
+ get_current().set_comm(comm)
+
+
+def get_dir():
+ """
+ Get directory that log files are being written to.
+ will be None if there is no output directory (i.e., if you didn't call start)
+ """
+ return get_current().get_dir()
+
+def get_tensorboard_writer():
+ """get the tensorboard writer
+ """
+ pass
+
+
+record_tabular = logkv
+dump_tabular = dumpkvs
+
+
+@contextmanager
+def profile_kv(scopename):
+ logkey = "wait_" + scopename
+ tstart = time.time()
+ try:
+ yield
+ finally:
+ get_current().name2val[logkey] += time.time() - tstart
+
+
+def profile(n):
+ """
+ Usage:
+ @profile("my_func")
+ def my_func(): code
+ """
+
+ def decorator_with_name(func):
+ def func_wrapper(*args, **kwargs):
+ with profile_kv(n):
+ return func(*args, **kwargs)
+
+ return func_wrapper
+
+ return decorator_with_name
+
+
+# ================================================================
+# Backend
+# ================================================================
+
+
+def get_current():
+ if Logger.CURRENT is None:
+ _configure_default_logger()
+
+ return Logger.CURRENT
+
+
+class Logger(object):
+ DEFAULT = None # A logger with no output files. (See right below class definition)
+ # So that you can still log to the terminal without setting up any output files
+ CURRENT = None # Current logger being used by the free functions above
+
+ def __init__(self, dir, output_formats, comm=None):
+ self.name2val = defaultdict(float) # values this iteration
+ self.name2cnt = defaultdict(int)
+ self.level = INFO
+ self.dir = dir
+ self.output_formats = output_formats
+ self.comm = comm
+
+ # Logging API, forwarded
+ # ----------------------------------------
+ def logkv(self, key, val):
+ self.name2val[key] = val
+
+ def logkv_mean(self, key, val):
+ oldval, cnt = self.name2val[key], self.name2cnt[key]
+ self.name2val[key] = oldval * cnt / (cnt + 1) + val / (cnt + 1)
+ self.name2cnt[key] = cnt + 1
+
+ def dumpkvs(self):
+ if self.comm is None:
+ d = self.name2val
+ else:
+ d = mpi_weighted_mean(
+ self.comm,
+ {
+ name: (val, self.name2cnt.get(name, 1))
+ for (name, val) in self.name2val.items()
+ },
+ )
+ if self.comm.rank != 0:
+ d["dummy"] = 1 # so we don't get a warning about empty dict
+ out = d.copy() # Return the dict for unit testing purposes
+ for fmt in self.output_formats:
+ if isinstance(fmt, KVWriter):
+ fmt.writekvs(d)
+ self.name2val.clear()
+ self.name2cnt.clear()
+ return out
+
+ def log(self, *args, level=INFO):
+ if self.level <= level:
+ self._do_log(args)
+
+ # Configuration
+ # ----------------------------------------
+ def set_level(self, level):
+ self.level = level
+
+ def set_comm(self, comm):
+ self.comm = comm
+
+ def get_dir(self):
+ return self.dir
+
+ def close(self):
+ for fmt in self.output_formats:
+ fmt.close()
+
+ # Misc
+ # ----------------------------------------
+ def _do_log(self, args):
+ for fmt in self.output_formats:
+ if isinstance(fmt, SeqWriter):
+ fmt.writeseq(map(str, args))
+
+
+def get_rank_without_mpi_import():
+ # check environment variables here instead of importing mpi4py
+ # to avoid calling MPI_Init() when this module is imported
+ for varname in ["PMI_RANK", "OMPI_COMM_WORLD_RANK"]:
+ if varname in os.environ:
+ return int(os.environ[varname])
+ return 0
+
+
+def mpi_weighted_mean(comm, local_name2valcount):
+ """
+ Copied from: https://github.com/openai/baselines/blob/ea25b9e8b234e6ee1bca43083f8f3cf974143998/baselines/common/mpi_util.py#L110
+ Perform a weighted average over dicts that are each on a different node
+ Input: local_name2valcount: dict mapping key -> (value, count)
+ Returns: key -> mean
+ """
+ all_name2valcount = comm.gather(local_name2valcount)
+ if comm.rank == 0:
+ name2sum = defaultdict(float)
+ name2count = defaultdict(float)
+ for n2vc in all_name2valcount:
+ for (name, (val, count)) in n2vc.items():
+ try:
+ val = float(val)
+ except ValueError:
+ if comm.rank == 0:
+ warnings.warn(
+ "WARNING: tried to compute mean on non-float {}={}".format(
+ name, val
+ )
+ )
+ else:
+ name2sum[name] += val * count
+ name2count[name] += count
+ return {name: name2sum[name] / name2count[name] for name in name2sum}
+ else:
+ return {}
+
+
+def configure(dir=None, format_strs=None, comm=None, log_suffix=""):
+ """
+ If comm is provided, average all numerical stats across that comm
+ """
+ if dir is None:
+ dir = os.getenv("OPENAI_LOGDIR")
+ if dir is None:
+ dir = osp.join(
+ tempfile.gettempdir(),
+ datetime.datetime.now().strftime("openai-%Y-%m-%d-%H-%M-%S-%f"),
+ )
+ assert isinstance(dir, str)
+ dir = os.path.expanduser(dir)
+ os.makedirs(os.path.expanduser(dir), exist_ok=True)
+
+ rank = get_rank_without_mpi_import()
+ if rank > 0:
+ log_suffix = log_suffix + "-rank%03i" % rank
+
+ if format_strs is None:
+ if rank == 0:
+ format_strs = os.getenv("OPENAI_LOG_FORMAT", "stdout,log,csv").split(",")
+ else:
+ format_strs = os.getenv("OPENAI_LOG_FORMAT_MPI", "log").split(",")
+ format_strs = filter(None, format_strs)
+ # st()
+ output_formats = [make_output_format(f, dir, log_suffix) for f in format_strs]
+
+ Logger.CURRENT = Logger(dir=dir, output_formats=output_formats, comm=comm)
+ if output_formats:
+ log("Logging to %s" % dir)
+
+
+def _configure_default_logger():
+ configure()
+ Logger.DEFAULT = Logger.CURRENT
+
+
+def reset():
+ if Logger.CURRENT is not Logger.DEFAULT:
+ Logger.CURRENT.close()
+ Logger.CURRENT = Logger.DEFAULT
+ log("Reset logger")
+
+
+@contextmanager
+def scoped_configure(dir=None, format_strs=None, comm=None):
+ prevlogger = Logger.CURRENT
+ configure(dir=dir, format_strs=format_strs, comm=comm)
+ try:
+ yield
+ finally:
+ Logger.CURRENT.close()
+ Logger.CURRENT = prevlogger
+
diff --git a/guided_diffusion/losses.py b/guided_diffusion/losses.py
new file mode 100644
index 0000000000000000000000000000000000000000..251e42e4f36a31bb5e1aeda874b3a45d722000a2
--- /dev/null
+++ b/guided_diffusion/losses.py
@@ -0,0 +1,77 @@
+"""
+Helpers for various likelihood-based losses. These are ported from the original
+Ho et al. diffusion models codebase:
+https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/utils.py
+"""
+
+import numpy as np
+
+import torch as th
+
+
+def normal_kl(mean1, logvar1, mean2, logvar2):
+ """
+ Compute the KL divergence between two gaussians.
+
+ Shapes are automatically broadcasted, so batches can be compared to
+ scalars, among other use cases.
+ """
+ tensor = None
+ for obj in (mean1, logvar1, mean2, logvar2):
+ if isinstance(obj, th.Tensor):
+ tensor = obj
+ break
+ assert tensor is not None, "at least one argument must be a Tensor"
+
+ # Force variances to be Tensors. Broadcasting helps convert scalars to
+ # Tensors, but it does not work for th.exp().
+ logvar1, logvar2 = [
+ x if isinstance(x, th.Tensor) else th.tensor(x).to(tensor)
+ for x in (logvar1, logvar2)
+ ]
+
+ return 0.5 * (
+ -1.0
+ + logvar2
+ - logvar1
+ + th.exp(logvar1 - logvar2)
+ + ((mean1 - mean2) ** 2) * th.exp(-logvar2)
+ )
+
+
+def approx_standard_normal_cdf(x):
+ """
+ A fast approximation of the cumulative distribution function of the
+ standard normal.
+ """
+ return 0.5 * (1.0 + th.tanh(np.sqrt(2.0 / np.pi) * (x + 0.044715 * th.pow(x, 3))))
+
+
+def discretized_gaussian_log_likelihood(x, *, means, log_scales):
+ """
+ Compute the log-likelihood of a Gaussian distribution discretizing to a
+ given image.
+
+ :param x: the target images. It is assumed that this was uint8 values,
+ rescaled to the range [-1, 1].
+ :param means: the Gaussian mean Tensor.
+ :param log_scales: the Gaussian log stddev Tensor.
+ :return: a tensor like x of log probabilities (in nats).
+ """
+ assert x.shape == means.shape == log_scales.shape
+ centered_x = x - means
+ inv_stdv = th.exp(-log_scales)
+ plus_in = inv_stdv * (centered_x + 1.0 / 255.0)
+ cdf_plus = approx_standard_normal_cdf(plus_in)
+ min_in = inv_stdv * (centered_x - 1.0 / 255.0)
+ cdf_min = approx_standard_normal_cdf(min_in)
+ log_cdf_plus = th.log(cdf_plus.clamp(min=1e-12))
+ log_one_minus_cdf_min = th.log((1.0 - cdf_min).clamp(min=1e-12))
+ cdf_delta = cdf_plus - cdf_min
+ log_probs = th.where(
+ x < -0.999,
+ log_cdf_plus,
+ th.where(x > 0.999, log_one_minus_cdf_min, th.log(cdf_delta.clamp(min=1e-12))),
+ )
+ assert log_probs.shape == x.shape
+ return log_probs
diff --git a/guided_diffusion/nn.py b/guided_diffusion/nn.py
new file mode 100644
index 0000000000000000000000000000000000000000..9fc05ff451c095b98e54a2098223d10816abc32d
--- /dev/null
+++ b/guided_diffusion/nn.py
@@ -0,0 +1,171 @@
+"""
+Various utilities for neural networks.
+"""
+
+import math
+
+import torch as th
+import torch.nn as nn
+
+
+# PyTorch 1.7 has SiLU, but we support PyTorch 1.5.
+class SiLU(nn.Module):
+ def forward(self, x):
+ return x * th.sigmoid(x)
+
+
+class GroupNorm32(nn.GroupNorm):
+ def forward(self, x):
+ return super().forward(x.float()).type(x.dtype)
+
+
+def conv_nd(dims, *args, **kwargs):
+ """
+ Create a 1D, 2D, or 3D convolution module.
+ """
+ if dims == 1:
+ return nn.Conv1d(*args, **kwargs)
+ elif dims == 2:
+ return nn.Conv2d(*args, **kwargs)
+ elif dims == 3:
+ return nn.Conv3d(*args, **kwargs)
+ raise ValueError(f"unsupported dimensions: {dims}")
+
+
+def linear(*args, **kwargs):
+ """
+ Create a linear module.
+ """
+ return nn.Linear(*args, **kwargs)
+
+
+def avg_pool_nd(dims, *args, **kwargs):
+ """
+ Create a 1D, 2D, or 3D average pooling module.
+ """
+ if dims == 1:
+ return nn.AvgPool1d(*args, **kwargs)
+ elif dims == 2:
+ return nn.AvgPool2d(*args, **kwargs)
+ elif dims == 3:
+ return nn.AvgPool3d(*args, **kwargs)
+ raise ValueError(f"unsupported dimensions: {dims}")
+
+
+def update_ema(target_params, source_params, rate=0.99):
+ """
+ Update target parameters to be closer to those of source parameters using
+ an exponential moving average.
+
+ :param target_params: the target parameter sequence.
+ :param source_params: the source parameter sequence.
+ :param rate: the EMA rate (closer to 1 means slower).
+ """
+ for targ, src in zip(target_params, source_params):
+ targ.detach().mul_(rate).add_(src, alpha=1 - rate)
+
+
+def zero_module(module):
+ """
+ Zero out the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().zero_()
+ return module
+
+
+def scale_module(module, scale):
+ """
+ Scale the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().mul_(scale)
+ return module
+
+
+def mean_flat(tensor):
+ """
+ Take the mean over all non-batch dimensions.
+ """
+ return tensor.mean(dim=list(range(1, len(tensor.shape))))
+
+
+def normalization(channels):
+ """
+ Make a standard normalization layer.
+
+ :param channels: number of input channels.
+ :return: an nn.Module for normalization.
+ """
+ return GroupNorm32(32, channels)
+
+
+def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False):
+ """
+ Create sinusoidal timestep embeddings.
+
+ :param timesteps: a 1-D Tensor of N indices, one per batch element.
+ These may be fractional.
+ :param dim: the dimension of the output.
+ :param max_period: controls the minimum frequency of the embeddings.
+ :return: an [N x dim] Tensor of positional embeddings.
+ """
+ half = dim // 2
+ freqs = th.exp(
+ -math.log(max_period) * th.arange(start=0, end=half, dtype=th.float32) / half
+ ).to(device=timesteps.device)
+ args = timesteps[:, None].float() * freqs[None]
+ embedding = th.cat([th.cos(args), th.sin(args)], dim=-1)
+ if dim % 2:
+ embedding = th.cat([embedding, th.zeros_like(embedding[:, :1])], dim=-1)
+ return embedding
+
+
+def checkpoint(func, inputs, params, flag):
+ """
+ Evaluate a function without caching intermediate activations, allowing for
+ reduced memory at the expense of extra compute in the backward pass.
+
+ :param func: the function to evaluate.
+ :param inputs: the argument sequence to pass to `func`.
+ :param params: a sequence of parameters `func` depends on but does not
+ explicitly take as arguments.
+ :param flag: if False, disable gradient checkpointing.
+ """
+ if flag:
+ args = tuple(inputs) + tuple(params)
+ return CheckpointFunction.apply(func, len(inputs), *args)
+ else:
+ return func(*inputs)
+
+
+class CheckpointFunction(th.autograd.Function):
+ @staticmethod
+ @th.autocast(device_type='cuda')
+ def forward(ctx, run_function, length, *args):
+ ctx.run_function = run_function
+ ctx.input_tensors = list(args[:length])
+ ctx.input_params = list(args[length:])
+ with th.no_grad():
+ output_tensors = ctx.run_function(*ctx.input_tensors)
+ return output_tensors
+
+ @staticmethod
+ def backward(ctx, *output_grads):
+ ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors]
+ with th.enable_grad():
+ # Fixes a bug where the first op in run_function modifies the
+ # Tensor storage in place, which is not allowed for detach()'d
+ # Tensors.
+ shallow_copies = [x.view_as(x) for x in ctx.input_tensors]
+ output_tensors = ctx.run_function(*shallow_copies)
+ input_grads = th.autograd.grad(
+ output_tensors,
+ ctx.input_tensors + ctx.input_params,
+ output_grads,
+ allow_unused=True,
+ )
+ del ctx.input_tensors
+ del ctx.input_params
+ del output_tensors
+ return (None, None) + input_grads
diff --git a/guided_diffusion/resample.py b/guided_diffusion/resample.py
new file mode 100644
index 0000000000000000000000000000000000000000..c82eccdcd47c468d41e7cbe02de6a731f2c9bf81
--- /dev/null
+++ b/guided_diffusion/resample.py
@@ -0,0 +1,154 @@
+from abc import ABC, abstractmethod
+
+import numpy as np
+import torch as th
+import torch.distributed as dist
+
+
+def create_named_schedule_sampler(name, diffusion):
+ """
+ Create a ScheduleSampler from a library of pre-defined samplers.
+
+ :param name: the name of the sampler.
+ :param diffusion: the diffusion object to sample for.
+ """
+ if name == "uniform":
+ return UniformSampler(diffusion)
+ elif name == "loss-second-moment":
+ return LossSecondMomentResampler(diffusion)
+ else:
+ raise NotImplementedError(f"unknown schedule sampler: {name}")
+
+
+class ScheduleSampler(ABC):
+ """
+ A distribution over timesteps in the diffusion process, intended to reduce
+ variance of the objective.
+
+ By default, samplers perform unbiased importance sampling, in which the
+ objective's mean is unchanged.
+ However, subclasses may override sample() to change how the resampled
+ terms are reweighted, allowing for actual changes in the objective.
+ """
+
+ @abstractmethod
+ def weights(self):
+ """
+ Get a numpy array of weights, one per diffusion step.
+
+ The weights needn't be normalized, but must be positive.
+ """
+
+ def sample(self, batch_size, device):
+ """
+ Importance-sample timesteps for a batch.
+
+ :param batch_size: the number of timesteps.
+ :param device: the torch device to save to.
+ :return: a tuple (timesteps, weights):
+ - timesteps: a tensor of timestep indices.
+ - weights: a tensor of weights to scale the resulting losses.
+ """
+ w = self.weights()
+ p = w / np.sum(w)
+ indices_np = np.random.choice(len(p), size=(batch_size,), p=p)
+ indices = th.from_numpy(indices_np).long().to(device)
+ weights_np = 1 / (len(p) * p[indices_np])
+ weights = th.from_numpy(weights_np).float().to(device)
+ return indices, weights
+
+
+class UniformSampler(ScheduleSampler):
+ def __init__(self, diffusion):
+ self.diffusion = diffusion
+ self._weights = np.ones([diffusion.num_timesteps])
+
+ def weights(self):
+ return self._weights
+
+
+class LossAwareSampler(ScheduleSampler):
+ def update_with_local_losses(self, local_ts, local_losses):
+ """
+ Update the reweighting using losses from a model.
+
+ Call this method from each rank with a batch of timesteps and the
+ corresponding losses for each of those timesteps.
+ This method will perform synchronization to make sure all of the ranks
+ maintain the exact same reweighting.
+
+ :param local_ts: an integer Tensor of timesteps.
+ :param local_losses: a 1D Tensor of losses.
+ """
+ batch_sizes = [
+ th.tensor([0], dtype=th.int32, device=local_ts.device)
+ for _ in range(dist.get_world_size())
+ ]
+ dist.all_gather(
+ batch_sizes,
+ th.tensor([len(local_ts)], dtype=th.int32, device=local_ts.device),
+ )
+
+ # Pad all_gather batches to be the maximum batch size.
+ batch_sizes = [x.item() for x in batch_sizes]
+ max_bs = max(batch_sizes)
+
+ timestep_batches = [th.zeros(max_bs).to(local_ts) for bs in batch_sizes]
+ loss_batches = [th.zeros(max_bs).to(local_losses) for bs in batch_sizes]
+ dist.all_gather(timestep_batches, local_ts)
+ dist.all_gather(loss_batches, local_losses)
+ timesteps = [
+ x.item() for y, bs in zip(timestep_batches, batch_sizes) for x in y[:bs]
+ ]
+ losses = [x.item() for y, bs in zip(loss_batches, batch_sizes) for x in y[:bs]]
+ self.update_with_all_losses(timesteps, losses)
+
+ @abstractmethod
+ def update_with_all_losses(self, ts, losses):
+ """
+ Update the reweighting using losses from a model.
+
+ Sub-classes should override this method to update the reweighting
+ using losses from the model.
+
+ This method directly updates the reweighting without synchronizing
+ between workers. It is called by update_with_local_losses from all
+ ranks with identical arguments. Thus, it should have deterministic
+ behavior to maintain state across workers.
+
+ :param ts: a list of int timesteps.
+ :param losses: a list of float losses, one per timestep.
+ """
+
+
+class LossSecondMomentResampler(LossAwareSampler):
+ def __init__(self, diffusion, history_per_term=10, uniform_prob=0.001):
+ self.diffusion = diffusion
+ self.history_per_term = history_per_term
+ self.uniform_prob = uniform_prob
+ self._loss_history = np.zeros(
+ [diffusion.num_timesteps, history_per_term], dtype=np.float64
+ )
+ self._loss_counts = np.zeros([diffusion.num_timesteps], dtype=np.int)
+
+ def weights(self):
+ if not self._warmed_up():
+ return np.ones([self.diffusion.num_timesteps], dtype=np.float64)
+ weights = np.sqrt(np.mean(self._loss_history ** 2, axis=-1))
+ weights /= np.sum(weights)
+ weights *= 1 - self.uniform_prob
+ weights += self.uniform_prob / len(weights)
+ return weights
+
+ def update_with_all_losses(self, ts, losses):
+ for t, loss in zip(ts, losses):
+ if self._loss_counts[t] == self.history_per_term:
+ # Shift out the oldest loss term.
+ self._loss_history[t, :-1] = self._loss_history[t, 1:]
+ self._loss_history[t, -1] = loss
+ else:
+ self._loss_history[t, self._loss_counts[t]] = loss
+ self._loss_counts[t] += 1
+
+ def _warmed_up(self):
+ return (self._loss_counts == self.history_per_term).all()
diff --git a/guided_diffusion/respace.py b/guided_diffusion/respace.py
new file mode 100644
index 0000000000000000000000000000000000000000..19b29db2bb029b74abaf15874dfb6668be8d365d
--- /dev/null
+++ b/guided_diffusion/respace.py
@@ -0,0 +1,135 @@
+import numpy as np
+import torch as th
+from pdb import set_trace as st
+
+from .gaussian_diffusion import GaussianDiffusion
+
+
+def space_timesteps(num_timesteps, section_counts):
+ """
+ Create a list of timesteps to use from an original diffusion process,
+ given the number of timesteps we want to take from equally-sized portions
+ of the original process.
+
+ For example, if there's 300 timesteps and the section counts are [10,15,20]
+ then the first 100 timesteps are strided to be 10 timesteps, the second 100
+ are strided to be 15 timesteps, and the final 100 are strided to be 20.
+
+ If the stride is a string starting with "ddim", then the fixed striding
+ from the DDIM paper is used, and only one section is allowed.
+
+ :param num_timesteps: the number of diffusion steps in the original
+ process to divide up.
+ :param section_counts: either a list of numbers, or a string containing
+ comma-separated numbers, indicating the step count
+ per section. As a special case, use "ddimN" where N
+ is a number of steps to use the striding from the
+ DDIM paper.
+ :return: a set of diffusion steps from the original process to use.
+ """
+ if isinstance(section_counts, str):
+ if section_counts.startswith("ddim"):
+ desired_count = int(section_counts[len("ddim") :])
+ for i in range(1, num_timesteps):
+ if len(range(0, num_timesteps, i)) == desired_count:
+ return set(range(0, num_timesteps, i))
+ raise ValueError(
+ f"cannot create exactly {num_timesteps} steps with an integer stride"
+ )
+ section_counts = [int(x) for x in section_counts.split(",")]
+ size_per = num_timesteps // len(section_counts)
+ extra = num_timesteps % len(section_counts)
+ start_idx = 0
+ all_steps = []
+ for i, section_count in enumerate(section_counts):
+ size = size_per + (1 if i < extra else 0)
+ if size < section_count:
+ raise ValueError(
+ f"cannot divide section of {size} steps into {section_count}"
+ )
+ if section_count <= 1:
+ frac_stride = 1
+ else:
+ frac_stride = (size - 1) / (section_count - 1)
+ cur_idx = 0.0
+ taken_steps = []
+ for _ in range(section_count):
+ taken_steps.append(start_idx + round(cur_idx))
+ cur_idx += frac_stride
+ all_steps += taken_steps
+ start_idx += size
+ return set(all_steps)
+
+
+class SpacedDiffusion(GaussianDiffusion):
+ """
+ A diffusion process which can skip steps in a base diffusion process.
+
+ :param use_timesteps: a collection (sequence or set) of timesteps from the
+ original diffusion process to retain.
+ :param kwargs: the kwargs to create the base diffusion process.
+ """
+
+ def __init__(self, use_timesteps, **kwargs):
+ self.use_timesteps = set(use_timesteps)
+ self.timestep_map = []
+ self.original_num_steps = len(kwargs["betas"])
+
+ base_diffusion = GaussianDiffusion(**kwargs) # pylint: disable=missing-kwoa
+ last_alpha_cumprod = 1.0
+ new_betas = []
+ for i, alpha_cumprod in enumerate(base_diffusion.alphas_cumprod):
+ if i in self.use_timesteps:
+ new_betas.append(1 - alpha_cumprod / last_alpha_cumprod)
+ last_alpha_cumprod = alpha_cumprod
+ self.timestep_map.append(i)
+ kwargs["betas"] = np.array(new_betas)
+ super().__init__(**kwargs)
+
+ def p_mean_variance(
+ self, model, *args, **kwargs
+ ): # pylint: disable=signature-differs
+ return super().p_mean_variance(self._wrap_model(model), *args, **kwargs)
+
+ def training_losses(
+ self, model, *args, **kwargs
+ ): # pylint: disable=signature-differs
+ return super().training_losses(self._wrap_model(model), *args, **kwargs)
+
+ def condition_mean(self, cond_fn, *args, **kwargs):
+ return super().condition_mean(self._wrap_model(cond_fn), *args, **kwargs)
+
+ def condition_score(self, cond_fn, *args, **kwargs):
+ return super().condition_score(self._wrap_model(cond_fn), *args, **kwargs)
+
+ def _wrap_model(self, model):
+ if isinstance(model, _WrappedModel):
+ return model
+ return _WrappedModel(
+ model, self.timestep_map, self.rescale_timesteps, self.original_num_steps
+ )
+
+ def _scale_timesteps(self, t):
+ # Scaling is done by the wrapped model.
+ return t
+
+
+class _WrappedModel:
+ def __init__(self, model, timestep_map, rescale_timesteps, original_num_steps):
+ self.model = model
+ self.timestep_map = timestep_map
+ self.rescale_timesteps = rescale_timesteps
+ self.original_num_steps = original_num_steps
+
+ def __call__(self, x, ts, c=None, mixing_normal=False, **kwargs):
+ map_tensor = th.tensor(self.timestep_map, device=ts.device, dtype=ts.dtype)
+ new_ts = map_tensor[ts]
+ if self.rescale_timesteps:
+ new_ts = new_ts.float() * (1000.0 / self.original_num_steps)
+ # assert mixing_normal
+ new_ts = new_ts / self.original_num_steps # already respaced to 1000 steps
+ if mixing_normal:
+ self.mixing_logit = self.model.ddp_model(x=None, # will be queried in gaussian_diffusion.py
+ timesteps=None,
+ get_attr='mixing_logit')
+ return self.model.apply_model_inference(x,new_ts, c, **kwargs) # send in "self" not "Unet", to use cldm
diff --git a/guided_diffusion/script_util.py b/guided_diffusion/script_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..680b6b0adb57181b6ee0c3bf390a3105d72e1e5b
--- /dev/null
+++ b/guided_diffusion/script_util.py
@@ -0,0 +1,729 @@
+import argparse
+import inspect
+
+from pdb import set_trace as st
+
+from cldm.cldm import ControlledUnetModel, ControlNet
+
+from . import gaussian_diffusion as gd
+from .respace import SpacedDiffusion, space_timesteps
+from .unet import SuperResModel, UNetModel, EncoderUNetModel, UNetModelWithHint
+import torch as th
+# from dit.dit_models_xformers import DiT_models, TextCondDiTBlock
+from dit.dit_models_xformers import TextCondDiTBlock, ImageCondDiTBlock, FinalLayer
+from dit.dit_trilatent import DiT_models as DiT_models_t23d
+from dit.dit_i23d import DiT_models as DiT_models_i23d
+
+# if th.cuda.is_available():
+# from xformers.triton import FusedLayerNorm as LayerNorm
+
+NUM_CLASSES = 1000
+
+
+def diffusion_defaults():
+ """
+ Defaults for image and classifier training.
+ """
+ return dict(
+ learn_sigma=False,
+ diffusion_steps=1000,
+ noise_schedule="linear",
+ standarization_xt=False,
+ timestep_respacing="",
+ use_kl=False,
+ predict_xstart=False,
+ predict_v=False,
+ rescale_timesteps=False,
+ rescale_learned_sigmas=False,
+ mixed_prediction=False, # ! to assign later
+ )
+
+
+def classifier_defaults():
+ """
+ Defaults for classifier models.
+ """
+ return dict(
+ image_size=64,
+ classifier_use_fp16=False,
+ classifier_width=128,
+ classifier_depth=2,
+ classifier_attention_resolutions="32,16,8", # 16
+ classifier_use_scale_shift_norm=True, # False
+ classifier_resblock_updown=True, # False
+ classifier_pool="attention",
+ )
+
+
+def control_net_defaults():
+ res = dict(
+ only_mid_control=False, # TODO
+ control_key='img',
+ normalize_clip_encoding=False, # zero-shot text inference
+ scale_clip_encoding=1.0,
+ cfg_dropout_prob=0.0, # dropout condition for CFG training
+ cond_key='caption',
+ )
+ return res
+
+
+def continuous_diffusion_defaults():
+ # NVlabs/LSGM/train_vada.py
+ res = dict(
+ sde_time_eps=1e-2,
+ sde_beta_start=0.1,
+ sde_beta_end=20.0,
+ sde_sde_type='vpsde',
+ sde_sigma2_0=0.0, # ?
+ iw_sample_p='drop_sigma2t_iw',
+ iw_sample_q='ll_iw',
+ iw_subvp_like_vp_sde=False,
+ train_vae=True,
+ pred_type='eps', # [x0, eps]
+ # joint_train=False,
+ p_rendering_loss=False,
+ unfix_logit=False,
+ loss_type='eps',
+ loss_weight='simple', # snr snr_sqrt sigmoid_snr
+ # train_vae_denoise_rendering=False,
+ diffusion_ce_anneal=True,
+ enable_mixing_normal=True,
+ )
+
+ return res
+
+
+def model_and_diffusion_defaults():
+ """
+ Defaults for image training.
+ """
+ res = dict(
+ # image_size=64,
+ diffusion_input_size=224,
+ num_channels=128,
+ num_res_blocks=2,
+ num_heads=4,
+ num_heads_upsample=-1,
+ num_head_channels=-1,
+ attention_resolutions="16,8",
+ channel_mult="",
+ dropout=0.0,
+ class_cond=False,
+ use_checkpoint=False,
+ use_scale_shift_norm=True,
+ resblock_updown=False,
+ use_fp16=False,
+ use_new_attention_order=False,
+ denoise_in_channels=3,
+ denoise_out_channels=3,
+ # ! controlnet args
+ create_controlnet=False,
+ create_dit=False,
+ i23d=False,
+ create_unet_with_hint=False,
+ dit_model_arch='DiT-L/2',
+ # ! ldm unet support
+ use_spatial_transformer=False, # custom transformer support
+ transformer_depth=1, # custom transformer support
+ context_dim=-1, # custom transformer support
+ pooling_ctx_dim=-1,
+ roll_out=False, # whether concat in batch, not channel
+ n_embed=
+ None, # custom support for prediction of discrete ids into codebook of first stage vq model
+ legacy=True,
+ mixing_logit_init=-6,
+ hint_channels=3,
+ # unconditional_guidance_scale=1.0,
+ # normalize_clip_encoding=False, # for zero-shot conditioning
+ )
+ res.update(diffusion_defaults())
+ # res.update(continuous_diffusion_defaults())
+ return res
+
+
+def classifier_and_diffusion_defaults():
+ res = classifier_defaults()
+ res.update(diffusion_defaults())
+ return res
+
+
+def create_model_and_diffusion(
+ # image_size,
+ diffusion_input_size,
+ class_cond,
+ learn_sigma,
+ num_channels,
+ num_res_blocks,
+ channel_mult,
+ num_heads,
+ num_head_channels,
+ num_heads_upsample,
+ attention_resolutions,
+ dropout,
+ diffusion_steps,
+ noise_schedule,
+ timestep_respacing,
+ use_kl,
+ predict_xstart,
+ predict_v,
+ rescale_timesteps,
+ rescale_learned_sigmas,
+ use_checkpoint,
+ use_scale_shift_norm,
+ resblock_updown,
+ use_fp16,
+ use_new_attention_order,
+ denoise_in_channels,
+ denoise_out_channels,
+ standarization_xt,
+ mixed_prediction,
+ # controlnet
+ create_controlnet,
+ # only_mid_control,
+ # control_key,
+ use_spatial_transformer,
+ transformer_depth,
+ context_dim,
+ pooling_ctx_dim,
+ n_embed,
+ legacy,
+ mixing_logit_init,
+ create_dit,
+ i23d,
+ create_unet_with_hint,
+ dit_model_arch,
+ roll_out,
+ hint_channels,
+ # unconditional_guidance_scale,
+ # normalize_clip_encoding,
+):
+ model = create_model(
+ diffusion_input_size,
+ num_channels,
+ num_res_blocks,
+ channel_mult=channel_mult,
+ learn_sigma=learn_sigma,
+ class_cond=class_cond,
+ use_checkpoint=use_checkpoint,
+ attention_resolutions=attention_resolutions,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ num_heads_upsample=num_heads_upsample,
+ use_scale_shift_norm=use_scale_shift_norm,
+ dropout=dropout,
+ resblock_updown=resblock_updown,
+ use_fp16=use_fp16,
+ use_new_attention_order=use_new_attention_order,
+ denoise_in_channels=denoise_in_channels,
+ denoise_out_channels=denoise_out_channels,
+ mixed_prediction=mixed_prediction,
+ create_controlnet=create_controlnet,
+ # only_mid_control=only_mid_control,
+ # control_key=control_key,
+ use_spatial_transformer=use_spatial_transformer,
+ transformer_depth=transformer_depth,
+ context_dim=context_dim,
+ pooling_ctx_dim=pooling_ctx_dim,
+ n_embed=n_embed,
+ legacy=legacy,
+ mixing_logit_init=mixing_logit_init,
+ create_dit=create_dit,
+ i23d=i23d,
+ create_unet_with_hint=create_unet_with_hint,
+ dit_model_arch=dit_model_arch,
+ roll_out=roll_out,
+ hint_channels=hint_channels,
+ # normalize_clip_encoding=normalize_clip_encoding,
+ )
+ diffusion = create_gaussian_diffusion(
+ diffusion_steps=diffusion_steps,
+ learn_sigma=learn_sigma,
+ noise_schedule=noise_schedule,
+ use_kl=use_kl,
+ predict_xstart=predict_xstart,
+ predict_v=predict_v,
+ rescale_timesteps=rescale_timesteps,
+ rescale_learned_sigmas=rescale_learned_sigmas,
+ timestep_respacing=timestep_respacing,
+ standarization_xt=standarization_xt,
+ )
+ return model, diffusion
+
+
+def create_model(
+ image_size,
+ num_channels,
+ num_res_blocks,
+ channel_mult="",
+ learn_sigma=False,
+ class_cond=False,
+ use_checkpoint=False,
+ attention_resolutions="16",
+ num_heads=1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ dropout=0,
+ resblock_updown=False,
+ use_fp16=False,
+ use_new_attention_order=False,
+ # denoise_in_channels=3,
+ denoise_in_channels=-1,
+ denoise_out_channels=3,
+ mixed_prediction=False,
+ create_controlnet=False,
+ create_dit=False,
+ # t23d=True,
+ i23d=False,
+ create_unet_with_hint=False,
+ dit_model_arch='DiT-L/2',
+ hint_channels=3,
+ use_spatial_transformer=False, # custom transformer support
+ transformer_depth=1, # custom transformer support
+ context_dim=None, # custom transformer support
+ pooling_ctx_dim=768,
+ n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model
+ legacy=True,
+ mixing_logit_init=-6,
+ roll_out=False,
+ # normalize_clip_encoding=False,
+):
+ if channel_mult == "":
+ if image_size == 512:
+ channel_mult = (0.5, 1, 1, 2, 2, 4, 4)
+ elif image_size == 448:
+ channel_mult = (0.5, 1, 1, 2, 2, 4, 4)
+ elif image_size == 320: # ffhq
+ channel_mult = (0.5, 1, 1, 2, 2, 4, 4)
+ elif image_size == 224 and denoise_in_channels == 144: # ffhq
+ channel_mult = (1, 1, 2, 3, 4, 4)
+ elif image_size == 224:
+ channel_mult = (1, 1, 2, 2, 4, 4)
+ elif image_size == 256:
+ channel_mult = (1, 1, 2, 2, 4, 4)
+ elif image_size == 128:
+ channel_mult = (1, 1, 2, 3, 4)
+ elif image_size == 64:
+ channel_mult = (1, 2, 3, 4)
+
+ elif image_size == 32: # https://github.com/CompVis/latent-diffusion/blob/a506df5756472e2ebaf9078affdde2c4f1502cd4/configs/latent-diffusion/lsun_churches-ldm-kl-8.yaml#L37
+ channel_mult = (1, 2, 4, 4)
+
+ elif image_size == 16: # B,12,16,16. just for baseline check. not good performance.
+ channel_mult = (1, 2, 3, 4)
+ else:
+ raise ValueError(f"unsupported image size: {image_size}")
+ else:
+ channel_mult = tuple(
+ int(ch_mult) for ch_mult in channel_mult.split(","))
+
+ attention_ds = []
+ for res in attention_resolutions.split(","):
+ attention_ds.append(image_size // int(res))
+
+ if create_controlnet:
+
+ controlledUnetModel = ControlledUnetModel(
+ image_size=image_size,
+ in_channels=denoise_in_channels,
+ model_channels=num_channels,
+ # out_channels=(3 if not learn_sigma else 6),
+ out_channels=(denoise_out_channels
+ if not learn_sigma else denoise_out_channels * 2),
+ num_res_blocks=num_res_blocks,
+ attention_resolutions=tuple(attention_ds),
+ dropout=dropout,
+ channel_mult=channel_mult,
+ num_classes=(NUM_CLASSES if class_cond else None),
+ use_checkpoint=use_checkpoint,
+ use_fp16=use_fp16,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ num_heads_upsample=num_heads_upsample,
+ use_scale_shift_norm=use_scale_shift_norm,
+ resblock_updown=resblock_updown,
+ use_new_attention_order=use_new_attention_order,
+ mixed_prediction=mixed_prediction,
+ # ldm support
+ use_spatial_transformer=use_spatial_transformer,
+ transformer_depth=transformer_depth,
+ context_dim=context_dim,
+ pooling_ctx_dim=pooling_ctx_dim,
+ n_embed=n_embed,
+ legacy=legacy,
+ mixing_logit_init=mixing_logit_init,
+ roll_out=roll_out
+ )
+
+ controlNet = ControlNet(
+ image_size=image_size,
+ in_channels=denoise_in_channels,
+ model_channels=num_channels,
+ # ! condition channels
+ hint_channels=hint_channels,
+ # out_channels=(3 if not learn_sigma else 6),
+ # out_channels=(denoise_out_channels
+ # if not learn_sigma else denoise_out_channels * 2),
+ num_res_blocks=num_res_blocks,
+ attention_resolutions=tuple(attention_ds),
+ dropout=dropout,
+ channel_mult=channel_mult,
+ # num_classes=(NUM_CLASSES if class_cond else None),
+ use_checkpoint=use_checkpoint,
+ use_fp16=use_fp16,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ num_heads_upsample=num_heads_upsample,
+ use_scale_shift_norm=use_scale_shift_norm,
+ resblock_updown=resblock_updown,
+ use_new_attention_order=use_new_attention_order,
+ roll_out=roll_out
+ )
+ # mixed_prediction=mixed_prediction)
+
+ return controlledUnetModel, controlNet
+
+ elif create_dit:
+ # if i23d
+ if i23d:
+ return DiT_models_i23d[dit_model_arch](
+ input_size=image_size,
+ num_classes=0,
+ learn_sigma=learn_sigma,
+ in_channels=denoise_in_channels,
+ context_dim=context_dim, # add CLIP text embedding
+ roll_out=roll_out,
+ # vit_blk=ImageCondDiTBlock,
+ pooling_ctx_dim=pooling_ctx_dim,)
+ else: # t23d
+ return DiT_models_t23d[dit_model_arch](
+ input_size=image_size,
+ num_classes=0,
+ learn_sigma=learn_sigma,
+ in_channels=denoise_in_channels,
+ context_dim=context_dim, # add CLIP text embedding
+ roll_out=roll_out,
+ # vit_blk=TextCondDiTBlock
+ )
+ else:
+
+ if create_unet_with_hint:
+ unet_cls = UNetModelWithHint
+ else:
+ unet_cls = UNetModel
+
+ # st()
+ return unet_cls(
+ image_size=image_size,
+ in_channels=denoise_in_channels,
+ model_channels=num_channels,
+ # out_channels=(3 if not learn_sigma else 6),
+ out_channels=(denoise_out_channels
+ if not learn_sigma else denoise_out_channels * 2),
+ num_res_blocks=num_res_blocks,
+ attention_resolutions=tuple(attention_ds),
+ dropout=dropout,
+ channel_mult=channel_mult,
+ num_classes=(NUM_CLASSES if class_cond else None),
+ use_checkpoint=use_checkpoint,
+ use_fp16=use_fp16,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ num_heads_upsample=num_heads_upsample,
+ use_scale_shift_norm=use_scale_shift_norm,
+ resblock_updown=resblock_updown,
+ use_new_attention_order=use_new_attention_order,
+ mixed_prediction=mixed_prediction,
+ # ldm support
+ use_spatial_transformer=use_spatial_transformer,
+ transformer_depth=transformer_depth,
+ context_dim=context_dim,
+ pooling_ctx_dim=pooling_ctx_dim,
+ n_embed=n_embed,
+ legacy=legacy,
+ mixing_logit_init=mixing_logit_init,
+ roll_out=roll_out,
+ hint_channels=hint_channels,
+ # normalize_clip_encoding=normalize_clip_encoding,
+ )
+
+
+def create_classifier_and_diffusion(
+ image_size,
+ classifier_use_fp16,
+ classifier_width,
+ classifier_depth,
+ classifier_attention_resolutions,
+ classifier_use_scale_shift_norm,
+ classifier_resblock_updown,
+ classifier_pool,
+ learn_sigma,
+ diffusion_steps,
+ noise_schedule,
+ timestep_respacing,
+ use_kl,
+ predict_xstart,
+ rescale_timesteps,
+ rescale_learned_sigmas,
+):
+ classifier = create_classifier(
+ image_size,
+ classifier_use_fp16,
+ classifier_width,
+ classifier_depth,
+ classifier_attention_resolutions,
+ classifier_use_scale_shift_norm,
+ classifier_resblock_updown,
+ classifier_pool,
+ )
+ diffusion = create_gaussian_diffusion(
+ steps=diffusion_steps,
+ learn_sigma=learn_sigma,
+ noise_schedule=noise_schedule,
+ use_kl=use_kl,
+ predict_xstart=predict_xstart,
+ rescale_timesteps=rescale_timesteps,
+ rescale_learned_sigmas=rescale_learned_sigmas,
+ timestep_respacing=timestep_respacing,
+ )
+ return classifier, diffusion
+
+
+def create_classifier(
+ image_size,
+ classifier_use_fp16,
+ classifier_width,
+ classifier_depth,
+ classifier_attention_resolutions,
+ classifier_use_scale_shift_norm,
+ classifier_resblock_updown,
+ classifier_pool,
+):
+ if image_size == 512:
+ channel_mult = (0.5, 1, 1, 2, 2, 4, 4)
+ elif image_size == 256:
+ channel_mult = (1, 1, 2, 2, 4, 4)
+ elif image_size == 128:
+ channel_mult = (1, 1, 2, 3, 4)
+ elif image_size == 64:
+ channel_mult = (1, 2, 3, 4)
+ else:
+ raise ValueError(f"unsupported image size: {image_size}")
+
+ attention_ds = []
+ for res in classifier_attention_resolutions.split(","):
+ attention_ds.append(image_size // int(res))
+
+ return EncoderUNetModel(
+ image_size=image_size,
+ in_channels=3,
+ model_channels=classifier_width,
+ out_channels=1000,
+ num_res_blocks=classifier_depth,
+ attention_resolutions=tuple(attention_ds),
+ channel_mult=channel_mult,
+ use_fp16=classifier_use_fp16,
+ num_head_channels=64,
+ use_scale_shift_norm=classifier_use_scale_shift_norm,
+ resblock_updown=classifier_resblock_updown,
+ pool=classifier_pool,
+ )
+
+
+def sr_model_and_diffusion_defaults():
+ res = model_and_diffusion_defaults()
+ res["large_size"] = 256
+ res["small_size"] = 64
+ arg_names = inspect.getfullargspec(sr_create_model_and_diffusion)[0]
+ for k in res.copy().keys():
+ if k not in arg_names:
+ del res[k]
+ return res
+
+
+def sr_create_model_and_diffusion(
+ large_size,
+ small_size,
+ class_cond,
+ learn_sigma,
+ num_channels,
+ num_res_blocks,
+ num_heads,
+ num_head_channels,
+ num_heads_upsample,
+ attention_resolutions,
+ dropout,
+ diffusion_steps,
+ noise_schedule,
+ timestep_respacing,
+ use_kl,
+ predict_xstart,
+ rescale_timesteps,
+ rescale_learned_sigmas,
+ use_checkpoint,
+ use_scale_shift_norm,
+ resblock_updown,
+ use_fp16,
+):
+ model = sr_create_model(
+ large_size,
+ small_size,
+ num_channels,
+ num_res_blocks,
+ learn_sigma=learn_sigma,
+ class_cond=class_cond,
+ use_checkpoint=use_checkpoint,
+ attention_resolutions=attention_resolutions,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ num_heads_upsample=num_heads_upsample,
+ use_scale_shift_norm=use_scale_shift_norm,
+ dropout=dropout,
+ resblock_updown=resblock_updown,
+ use_fp16=use_fp16,
+ )
+ diffusion = create_gaussian_diffusion(
+ steps=diffusion_steps,
+ learn_sigma=learn_sigma,
+ noise_schedule=noise_schedule,
+ use_kl=use_kl,
+ predict_xstart=predict_xstart,
+ rescale_timesteps=rescale_timesteps,
+ rescale_learned_sigmas=rescale_learned_sigmas,
+ timestep_respacing=timestep_respacing,
+ )
+ return model, diffusion
+
+
+def sr_create_model(
+ large_size,
+ small_size,
+ num_channels,
+ num_res_blocks,
+ learn_sigma,
+ class_cond,
+ use_checkpoint,
+ attention_resolutions,
+ num_heads,
+ num_head_channels,
+ num_heads_upsample,
+ use_scale_shift_norm,
+ dropout,
+ resblock_updown,
+ use_fp16,
+):
+ _ = small_size # hack to prevent unused variable
+
+ if large_size == 512:
+ channel_mult = (1, 1, 2, 2, 4, 4)
+ elif large_size == 256:
+ channel_mult = (1, 1, 2, 2, 4, 4)
+ elif large_size == 64:
+ channel_mult = (1, 2, 3, 4)
+ else:
+ raise ValueError(f"unsupported large size: {large_size}")
+
+ attention_ds = []
+ for res in attention_resolutions.split(","):
+ attention_ds.append(large_size // int(res))
+
+ return SuperResModel(
+ image_size=large_size,
+ in_channels=3,
+ model_channels=num_channels,
+ out_channels=(3 if not learn_sigma else 6),
+ num_res_blocks=num_res_blocks,
+ attention_resolutions=tuple(attention_ds),
+ dropout=dropout,
+ channel_mult=channel_mult,
+ num_classes=(NUM_CLASSES if class_cond else None),
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ num_heads_upsample=num_heads_upsample,
+ use_scale_shift_norm=use_scale_shift_norm,
+ resblock_updown=resblock_updown,
+ use_fp16=use_fp16,
+ )
+
+
+def create_gaussian_diffusion(
+ *,
+ diffusion_steps=1000,
+ learn_sigma=False,
+ sigma_small=False,
+ noise_schedule="linear",
+ use_kl=False,
+ predict_xstart=False,
+ predict_v=False,
+ rescale_timesteps=False,
+ rescale_learned_sigmas=False,
+ timestep_respacing="",
+ standarization_xt=False,
+):
+ betas = gd.get_named_beta_schedule(noise_schedule, diffusion_steps)
+ if use_kl:
+ loss_type = gd.LossType.RESCALED_KL
+ elif rescale_learned_sigmas:
+ loss_type = gd.LossType.RESCALED_MSE
+ else:
+ loss_type = gd.LossType.MSE # * used here.
+ if not timestep_respacing:
+ timestep_respacing = [diffusion_steps]
+
+ if predict_xstart:
+ model_mean_type = gd.ModelMeanType.START_X
+ elif predict_v:
+ model_mean_type = gd.ModelMeanType.V
+ else:
+ model_mean_type = gd.ModelMeanType.EPSILON
+
+ # model_mean_type=(
+ # gd.ModelMeanType.EPSILON if not predict_xstart else
+ # gd.ModelMeanType.START_X # * used gd.ModelMeanType.EPSILON
+ # ),
+
+ return SpacedDiffusion(
+ use_timesteps=space_timesteps(diffusion_steps, timestep_respacing),
+ betas=betas,
+ model_mean_type=model_mean_type,
+ # (
+ # gd.ModelMeanType.EPSILON if not predict_xstart else
+ # gd.ModelMeanType.START_X # * used gd.ModelMeanType.EPSILON
+ # ),
+ model_var_type=((
+ gd.ModelVarType.FIXED_LARGE # * used here
+ if not sigma_small else gd.ModelVarType.FIXED_SMALL)
+ if not learn_sigma else gd.ModelVarType.LEARNED_RANGE),
+ loss_type=loss_type,
+ rescale_timesteps=rescale_timesteps,
+ standarization_xt=standarization_xt,
+ )
+
+
+def add_dict_to_argparser(parser, default_dict):
+ for k, v in default_dict.items():
+ v_type = type(v)
+ if v is None:
+ v_type = str
+ elif isinstance(v, bool):
+ v_type = str2bool
+ parser.add_argument(f"--{k}", default=v, type=v_type)
+
+
+def args_to_dict(args, keys):
+ return {k: getattr(args, k) for k in keys}
+
+
+def str2bool(v):
+ """
+ https://stackoverflow.com/questions/15008758/parsing-boolean-values-with-argparse
+ """
+ if isinstance(v, bool):
+ return v
+ if v.lower() in ("yes", "true", "t", "y", "1"):
+ return True
+ elif v.lower() in ("no", "false", "f", "n", "0"):
+ return False
+ else:
+ raise argparse.ArgumentTypeError("boolean value expected")
diff --git a/guided_diffusion/train_util.py b/guided_diffusion/train_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..2593ada9076b000c35f220cac6971653e8d82748
--- /dev/null
+++ b/guided_diffusion/train_util.py
@@ -0,0 +1,827 @@
+import copy
+from tqdm import tqdm, trange
+import imageio
+from pdb import set_trace as st
+import functools
+import os
+import numpy as np
+
+import blobfile as bf
+import torch as th
+import torch.distributed as dist
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+import matplotlib.pyplot as plt
+from torch.optim import AdamW
+
+from . import dist_util, logger
+from .fp16_util import MixedPrecisionTrainer
+from .nn import update_ema
+from .resample import LossAwareSampler, UniformSampler
+
+from pathlib import Path
+
+# For ImageNet experiments, this was a good default value.
+# We found that the lg_loss_scale quickly climbed to
+# 20-21 within the first ~1K steps of training.
+INITIAL_LOG_LOSS_SCALE = 20.0
+
+# use_amp = True
+# use_amp = False
+# if use_amp:
+# logger.log('ddpm use AMP to accelerate training')
+
+
+class TrainLoop:
+
+ def __init__(
+ self,
+ *,
+ model,
+ diffusion,
+ data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ schedule_sampler=None,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ use_amp=False,
+ model_name='ddpm',
+ train_vae=True,
+ compile=False,
+ clip_grad_throld=1.0,
+ **kwargs
+ ):
+
+ self.kwargs = kwargs
+ self.clip_grad_throld = clip_grad_throld
+ self.pool_512 = th.nn.AdaptiveAvgPool2d((512, 512))
+ self.pool_256 = th.nn.AdaptiveAvgPool2d((256, 256))
+ self.pool_128 = th.nn.AdaptiveAvgPool2d((128, 128))
+ self.pool_64 = th.nn.AdaptiveAvgPool2d((64, 64))
+
+ self.use_amp = use_amp
+
+ self.dtype = th.float32
+ # if use_amp:
+ # if th.backends.cuda.matmul.allow_tf32: # a100
+ # self.dtype = th.bfloat16
+ # else:
+ # self.dtype = th.float16
+ # else:
+
+ if use_amp:
+ if th.cuda.get_device_capability(0)[0] < 8:
+ self.dtype = th.float16 # e.g., v100
+ else:
+ self.dtype = th.bfloat16 # e.g., a100 / a6000
+
+ self.model_name = model_name
+ self.model = model
+
+ self.diffusion = diffusion
+ self.data = data
+ self.batch_size = batch_size
+ self.microbatch = microbatch if microbatch > 0 else batch_size
+ self.lr = lr
+ self.ema_rate = ([ema_rate] if isinstance(ema_rate, float) else
+ [float(x) for x in ema_rate.split(",")])
+ self.log_interval = log_interval
+ self.save_interval = save_interval
+ self.resume_checkpoint = resume_checkpoint
+ self.use_fp16 = use_fp16
+ self.fp16_scale_growth = fp16_scale_growth
+ self.schedule_sampler = schedule_sampler or UniformSampler(diffusion)
+ self.weight_decay = weight_decay
+ self.lr_anneal_steps = lr_anneal_steps
+
+ self.step = 0
+ self.resume_step = 0
+ self.global_batch = self.batch_size * dist.get_world_size()
+ self.train_vae = train_vae
+
+ self.sync_cuda = th.cuda.is_available()
+ self.triplane_scaling_divider = 1.0
+ self.latent_name = 'latent_normalized_2Ddiffusion' # normalized triplane latent
+ self.render_latent_behaviour = 'decode_after_vae' # directly render using triplane operations
+ self._setup_model()
+ self._load_model()
+ self._setup_opt()
+
+ def _load_model(self):
+ self._load_and_sync_parameters()
+
+ def _setup_opt(self):
+ self.opt = AdamW(self.mp_trainer.master_params,
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+
+ def _setup_model(self):
+
+ # st()
+ self.mp_trainer = MixedPrecisionTrainer(
+ model=self.model,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=self.fp16_scale_growth,
+ use_amp=self.use_amp,
+ model_name=self.model_name,
+ clip_grad_throld=self.clip_grad_throld,
+ )
+
+ if self.resume_step:
+ self._load_optimizer_state()
+ # Model was resumed, either due to a restart or a checkpoint
+ # being specified at the command line.
+ self.ema_params = [
+ self._load_ema_parameters(rate) for rate in self.ema_rate
+ ]
+ else:
+ self.ema_params = [
+ copy.deepcopy(self.mp_trainer.master_params)
+ for _ in range(len(self.ema_rate))
+ ]
+
+ # for compatability
+
+ # print('creating DDP')
+ if th.cuda.is_available():
+ self.use_ddp = True
+ self.ddpm_model = self.model
+ self.ddp_model = DDP(
+ self.model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ if dist.get_world_size() > 1:
+ logger.warn("Distributed training requires CUDA. "
+ "Gradients will not be synchronized properly!")
+ self.use_ddp = False
+ self.ddp_model = self.model
+ # print('creating DDP done')
+
+ # if compile:
+ # self.model = th.compile(self.model) # some op will break graph now
+ # logger.warn("compiling...")
+
+
+ def _load_and_sync_parameters(self):
+ resume_checkpoint, resume_step = find_resume_checkpoint(
+ ) or self.resume_checkpoint
+
+ if resume_checkpoint:
+ if not Path(resume_checkpoint).exists():
+ logger.log(
+ f"failed to load model from checkpoint: {resume_checkpoint}, not exist"
+ )
+ return
+
+ # self.resume_step = parse_resume_step_from_filename(resume_checkpoint)
+ self.resume_step = resume_step # TODO, EMA part
+ if dist.get_rank() == 0:
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ # if model is None:
+ # model = self.model
+ self.model.load_state_dict(
+ dist_util.load_state_dict(
+ resume_checkpoint,
+ map_location=dist_util.dev(),
+ ))
+
+ # ! debugging, remove to check which key fails.
+ dist_util.sync_params(self.model.parameters())
+ # dist_util.sync_params(self.model.named_parameters())
+
+ def _load_ema_parameters(self,
+ rate,
+ model=None,
+ mp_trainer=None,
+ model_name='ddpm'):
+
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer
+ if model is None:
+ model = self.model
+
+ ema_params = copy.deepcopy(mp_trainer.master_params)
+
+ main_checkpoint, _ = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+ ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step,
+ rate, model_name)
+ if ema_checkpoint:
+
+ if dist_util.get_rank() == 0:
+
+ if not Path(ema_checkpoint).exists():
+ logger.log(
+ f"failed to load EMA from checkpoint: {ema_checkpoint}, not exist"
+ )
+ return
+
+ logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ state_dict = dist_util.load_state_dict(
+ ema_checkpoint, map_location=map_location)
+
+ model_ema_state_dict = model.state_dict()
+
+ for k, v in state_dict.items():
+ if k in model_ema_state_dict.keys() and v.size(
+ ) == model_ema_state_dict[k].size():
+ model_ema_state_dict[k] = v
+
+ # elif 'IN' in k and model_name == 'rec' and getattr(model.decoder, 'decomposed_IN', False):
+ # model_ema_state_dict[k.replace('IN', 'superresolution.norm.norm_layer')] = v # decomposed IN
+
+ else:
+ print('ignore key: ', k, ": ", v.size())
+
+ ema_params = mp_trainer.state_dict_to_master_params(
+ model_ema_state_dict)
+
+ del state_dict
+
+ # print('ema mark 3, ', model_name, flush=True)
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(ema_params)
+ # print('ema mark 4, ', model_name, flush=True)
+ # del ema_params
+ return ema_params
+
+ def _load_ema_parameters_freezeAE(
+ self,
+ rate,
+ model,
+ # mp_trainer=None,
+ model_name='rec'):
+
+ # if mp_trainer is None:
+ # mp_trainer = self.mp_trainer
+ # if model is None:
+ # model = self.model_rec
+
+ # ema_params = copy.deepcopy(mp_trainer.master_params)
+
+ main_checkpoint, _ = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+ ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step,
+ rate, model_name)
+ if ema_checkpoint:
+
+ if dist_util.get_rank() == 0:
+
+ if not Path(ema_checkpoint).exists():
+ logger.log(
+ f"failed to load EMA from checkpoint: {ema_checkpoint}, not exist"
+ )
+ return
+
+ logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ state_dict = dist_util.load_state_dict(
+ ema_checkpoint, map_location=map_location)
+
+ model_ema_state_dict = model.state_dict()
+
+ for k, v in state_dict.items():
+ if k in model_ema_state_dict.keys() and v.size(
+ ) == model_ema_state_dict[k].size():
+ model_ema_state_dict[k] = v
+ else:
+ print('ignore key: ', k, ": ", v.size())
+
+ ema_params = mp_trainer.state_dict_to_master_params(
+ model_ema_state_dict)
+
+ del state_dict
+
+ # print('ema mark 3, ', model_name, flush=True)
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(ema_params)
+ # print('ema mark 4, ', model_name, flush=True)
+ # del ema_params
+ return ema_params
+
+ # def _load_ema_parameters(self, rate):
+ # ema_params = copy.deepcopy(self.mp_trainer.master_params)
+
+ # main_checkpoint, _ = find_resume_checkpoint() or self.resume_checkpoint
+ # ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step, rate)
+ # if ema_checkpoint:
+ # if dist.get_rank() == 0:
+ # logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+ # state_dict = dist_util.load_state_dict(
+ # ema_checkpoint, map_location=dist_util.dev()
+ # )
+ # ema_params = self.mp_trainer.state_dict_to_master_params(state_dict)
+
+ # dist_util.sync_params(ema_params)
+ # return ema_params
+
+ def _load_optimizer_state(self):
+ main_checkpoint, _ = find_resume_checkpoint() or self.resume_checkpoint
+ opt_checkpoint = bf.join(bf.dirname(main_checkpoint),
+ f"opt{self.resume_step:06}.pt")
+ if bf.exists(opt_checkpoint):
+ logger.log(
+ f"loading optimizer state from checkpoint: {opt_checkpoint}")
+ state_dict = dist_util.load_state_dict(
+ opt_checkpoint, map_location=dist_util.dev())
+ self.opt.load_state_dict(state_dict)
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+ batch, cond = next(self.data)
+ self.run_step(batch, cond)
+ if self.step % self.log_interval == 0:
+ logger.dumpkvs()
+ if self.step % self.save_interval == 0:
+ self.save()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+ self.step += 1
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+
+ def run_step(self, batch, cond):
+ self.forward_backward(batch, cond)
+ took_step = self.mp_trainer.optimize(self.opt)
+ if took_step:
+ self._update_ema()
+ self._anneal_lr()
+ self.log_step()
+
+ def forward_backward(self, batch, cond):
+ self.mp_trainer.zero_grad()
+ for i in range(0, batch.shape[0], self.microbatch):
+
+ # st()
+ with th.autocast(device_type=dist_util.dev(),
+ dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ micro = batch[i:i + self.microbatch].to(dist_util.dev())
+ micro_cond = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in cond.items()
+ }
+ last_batch = (i + self.microbatch) >= batch.shape[0]
+ t, weights = self.schedule_sampler.sample(
+ micro.shape[0], dist_util.dev())
+
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ micro,
+ t,
+ model_kwargs=micro_cond,
+ )
+
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ else:
+ with self.ddp_model.no_sync():
+ losses = compute_losses()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ loss = (losses["loss"] * weights).mean()
+ log_loss_dict(self.diffusion, t,
+ {k: v * weights
+ for k, v in losses.items()})
+
+ self.mp_trainer.backward(loss)
+
+ def _update_ema(self):
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ update_ema(params, self.mp_trainer.master_params, rate=rate)
+
+ def _anneal_lr(self):
+ if not self.lr_anneal_steps:
+ return
+ frac_done = (self.step + self.resume_step) / self.lr_anneal_steps
+ lr = self.lr * (1 - frac_done)
+ for param_group in self.opt.param_groups:
+ param_group["lr"] = lr
+
+ def log_step(self):
+ logger.logkv("step", self.step + self.resume_step)
+ logger.logkv("samples",
+ (self.step + self.resume_step + 1) * self.global_batch)
+
+ @th.no_grad()
+ def _make_vis_img(self, pred):
+
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+
+ pred_depth = pred_depth.cpu()[0].permute(1, 2, 0).numpy()
+ pred_depth = (plt.cm.viridis(pred_depth[..., 0])[..., :3]) * 2 - 1
+ pred_depth = th.from_numpy(pred_depth).to(
+ pred['image_raw'].device).permute(2, 0, 1).unsqueeze(0)
+ # rend_normal = pred['rend_normal']
+
+ # if 'image_sr' in pred:
+
+ # gen_img = pred['image_sr']
+
+ # if pred['image_sr'].shape[-1] == 512:
+
+ # pred_vis = th.cat([
+ # micro['img_sr'],
+ # self.pool_512(pred['image_raw']), gen_img,
+ # self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # elif pred['image_sr'].shape[-1] == 128:
+
+ # pred_vis = th.cat([
+ # micro['img_sr'],
+ # self.pool_128(pred['image_raw']), pred['image_sr'],
+ # self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # else:
+ gen_img = pred['image_raw']
+
+ pred_vis = th.cat(
+ [
+ gen_img,
+ # rend_normal,
+ pred_depth,
+ ],
+ dim=-1) # B, 3, H, W
+
+ return pred_vis
+
+ @th.inference_mode()
+ def render_video_given_triplane(self,
+ planes,
+ rec_model,
+ name_prefix='0',
+ save_img=False,
+ render_reference=None,
+ export_mesh=False):
+
+ planes *= self.triplane_scaling_divider # if setting clip_denoised=True, the sampled planes will lie in [-1,1]. Thus, values beyond [+- std] will be abandoned in this version. Move to IN for later experiments.
+
+ # sr_w_code = getattr(self.ddp_rec_model.module.decoder, 'w_avg', None)
+ # sr_w_code = None
+ batch_size = planes.shape[0]
+
+ # if sr_w_code is not None:
+ # sr_w_code = sr_w_code.reshape(1, 1,
+ # -1).repeat_interleave(batch_size, 0)
+
+ # used during diffusion sampling inference
+ # if not save_img:
+
+ # ! mesh
+
+ if planes.shape[1] == 16: # ffhq/car
+ ddpm_latent = {
+ self.latent_name: planes[:, :12],
+ 'bg_plane': planes[:, 12:16],
+ }
+ else:
+ ddpm_latent = {
+ self.latent_name: planes,
+ }
+
+ ddpm_latent.update(rec_model(latent=ddpm_latent, behaviour='decode_after_vae_no_render'))
+
+ if export_mesh:
+ # if True:
+ # mesh_size = 512
+ # mesh_size = 256
+ mesh_size = 384
+ # mesh_size = 320
+ # mesh_thres = 3 # TODO, requires tuning
+ # mesh_thres = 5 # TODO, requires tuning
+ mesh_thres = 10 # TODO, requires tuning
+ import mcubes
+ import trimesh
+ dump_path = f'{logger.get_dir()}/mesh/'
+
+ os.makedirs(dump_path, exist_ok=True)
+
+ grid_out = rec_model(
+ latent=ddpm_latent,
+ grid_size=mesh_size,
+ behaviour='triplane_decode_grid',
+ )
+
+ vtx, faces = mcubes.marching_cubes(grid_out['sigma'].squeeze(0).squeeze(-1).cpu().numpy(), mesh_thres)
+ vtx = vtx / (mesh_size - 1) * 2 - 1
+
+ # vtx_tensor = th.tensor(vtx, dtype=th.float32, device=dist_util.dev()).unsqueeze(0)
+ # vtx_colors = self.model.synthesizer.forward_points(planes, vtx_tensor)['rgb'].squeeze(0).cpu().numpy() # (0, 1)
+ # vtx_colors = (vtx_colors * 255).astype(np.uint8)
+
+ # mesh = trimesh.Trimesh(vertices=vtx, faces=faces, vertex_colors=vtx_colors)
+ mesh = trimesh.Trimesh(vertices=vtx, faces=faces,)
+
+ mesh_dump_path = os.path.join(dump_path, f'{name_prefix}.ply')
+ mesh.export(mesh_dump_path, 'ply')
+
+ print(f"Mesh dumped to {dump_path}")
+ del grid_out, mesh
+ th.cuda.empty_cache()
+ # return
+
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4',
+ mode='I',
+ fps=15,
+ codec='libx264')
+
+ if planes.shape[1] == 16: # ffhq/car
+ ddpm_latent = {
+ self.latent_name: planes[:, :12],
+ 'bg_plane': planes[:, 12:16],
+ }
+ else:
+ ddpm_latent = {
+ self.latent_name: planes,
+ }
+
+ # TODO, duplicated?
+ ddpm_latent.update(rec_model(latent=ddpm_latent, behaviour='decode_after_vae_no_render'))
+
+ # planes = planes.repeat_interleave(micro['c'].shape[0], 0)
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ # micro_batchsize = 2
+ # micro_batchsize = batch_size
+
+ if render_reference is None:
+ render_reference = self.eval_data # compat
+ else: # use train_traj
+ for key in ['ins', 'bbox', 'caption']:
+ if key in render_reference:
+ render_reference.pop(key)
+ # render_reference.pop('bbox')
+ # render_reference.pop('caption')
+
+ # compat lst for enumerate
+ render_reference = [ { k:v[idx:idx+1] for k, v in render_reference.items() } for idx in range(40) ]
+
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+ for i, batch in enumerate(tqdm(render_reference)):
+ micro = {
+ k: v.to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+ # micro = {'c': batch['c'].to(dist_util.dev()).repeat_interleave(batch_size, 0)}
+
+ # all_pred = []
+ pred = rec_model(
+ img=None,
+ c=micro['c'],
+ latent=ddpm_latent,
+ # latent={
+ # # k: v.repeat_interleave(micro['c'].shape[0], 0) if v is not None else None
+ # k: v.repeat_interleave(micro['c'].shape[0], 0) if v is not None else None
+ # for k, v in ddpm_latent.items()
+ # },
+ behaviour='triplane_dec')
+
+ # if True:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ # pred_depth.min())
+
+ # if 'image_sr' in pred:
+
+ # gen_img = pred['image_sr']
+
+ # if pred['image_sr'].shape[-1] == 512:
+
+ # pred_vis = th.cat([
+ # micro['img_sr'],
+ # self.pool_512(pred['image_raw']), gen_img,
+ # self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # elif pred['image_sr'].shape[-1] == 128:
+
+ # pred_vis = th.cat([
+ # micro['img_sr'],
+ # self.pool_128(pred['image_raw']), pred['image_sr'],
+ # self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # else:
+ # gen_img = pred['image_raw']
+
+ # pred_vis = th.cat(
+ # [
+ # # self.pool_128(micro['img']),
+ # self.pool_128(gen_img),
+ # self.pool_128(pred_depth.repeat_interleave(3, dim=1))
+ # ],
+ # dim=-1) # B, 3, H, W
+ pred_vis = self._make_vis_img(pred)
+
+ if save_img:
+ for batch_idx in range(gen_img.shape[0]):
+ sampled_img = Image.fromarray(
+ (gen_img[batch_idx].permute(1, 2, 0).cpu().numpy() *
+ 127.5 + 127.5).clip(0, 255).astype(np.uint8))
+ if sampled_img.size != (512, 512):
+ sampled_img = sampled_img.resize(
+ (128, 128), Image.HAMMING) # for shapenet
+ sampled_img.save(logger.get_dir() +
+ '/FID_Cals/{}_{}.png'.format(
+ int(name_prefix) * batch_size +
+ batch_idx, i))
+ # print('FID_Cals/{}_{}.png'.format(int(name_prefix)*batch_size+batch_idx, i))
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # if vis.shape[0] > 1:
+ # vis = np.concatenate(np.split(vis, vis.shape[0], axis=0),
+ # axis=-3)
+
+ # if not save_img:
+ for j in range(vis.shape[0]
+ ): # ! currently only export one plane at a time
+ video_out.append_data(vis[j])
+
+ # if not save_img:
+ video_out.close()
+ del video_out
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4')
+
+ del vis, pred_vis, micro, pred,
+
+ def save(self):
+
+ def save_checkpoint(rate, params):
+ state_dict = self.mp_trainer.master_params_to_state_dict(params)
+ if dist.get_rank() == 0:
+ logger.log(f"saving model {rate}...")
+ if not rate:
+ filename = f"model{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, self.mp_trainer.master_params)
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ if dist.get_rank() == 0:
+ with bf.BlobFile(
+ bf.join(get_blob_logdir(),
+ f"opt{(self.step+self.resume_step):07d}.pt"),
+ "wb",
+ ) as f:
+ th.save(self.opt.state_dict(), f)
+
+ dist.barrier()
+
+
+def parse_resume_step_from_filename(filename):
+ """
+ Parse filenames of the form path/to/modelNNNNNN.pt, where NNNNNN is the
+ checkpoint's number of steps.
+ """
+ # split1 = Path(filename).stem[-6:]
+ split1 = Path(filename).stem[-7:]
+ # split = filename.split("model")
+ # if len(split) < 2:
+ # return 0
+ # split1 = split[-1].split(".")[0]
+ try:
+ return int(split1)
+ except ValueError:
+ print('fail to load model step', split1)
+ return 0
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+def find_resume_checkpoint(resume_checkpoint='', model_name='ddpm'):
+ # On your infrastructure, you may want to override this to automatically
+ # discover the latest checkpoint on your blob storage, etc.
+
+ if resume_checkpoint != '':
+ step = parse_resume_step_from_filename(resume_checkpoint)
+ split = resume_checkpoint.split("model")
+ resume_ckpt_path = str(
+ Path(split[0]) / f'model_{model_name}{step:07d}.pt')
+ else:
+ resume_ckpt_path = ''
+ step = 0
+
+ return resume_ckpt_path, step
+
+
+def find_ema_checkpoint(main_checkpoint, step, rate, model_name=''):
+ if main_checkpoint is None:
+ return None
+ if model_name == '':
+ filename = f"ema_{rate}_{(step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(step):07d}.pt"
+ path = bf.join(bf.dirname(main_checkpoint), filename)
+ # print(path)
+ # st()
+ if bf.exists(path):
+ print('fine ema model', path)
+ return path
+ else:
+ print('fail to find ema model', path)
+ return None
+
+
+def log_loss_dict(diffusion, ts, losses):
+ for key, values in losses.items():
+ logger.logkv_mean(key, values.mean().item())
+ # Log the quantiles (four quartiles, in particular).
+ for sub_t, sub_loss in zip(ts.cpu().numpy(),
+ values.detach().cpu().numpy()):
+ quartile = int(4 * sub_t / diffusion.num_timesteps)
+ logger.logkv_mean(f"{key}_q{quartile}", sub_loss)
+
+
+def log_rec3d_loss_dict(loss_dict):
+ for key, values in loss_dict.items():
+ try:
+ logger.logkv_mean(key, values.mean().item())
+ except:
+ print('type error:', key)
+
+
+
+def calc_average_loss(all_loss_dicts, verbose=True):
+ all_scores = {} # todo, defaultdict
+ mean_all_scores = {}
+
+ for loss_dict in all_loss_dicts:
+ for k, v in loss_dict.items():
+ v = v.item()
+ if k not in all_scores:
+ # all_scores[f'{k}_val'] = [v]
+ all_scores[k] = [v]
+ else:
+ all_scores[k].append(v)
+
+ for k, v in all_scores.items():
+ mean = np.mean(v)
+ std = np.std(v)
+ if k in ['loss_lpis', 'loss_ssim']:
+ mean = 1 - mean
+ result_str = '{} average loss is {:.4f} +- {:.4f}'.format(k, mean, std)
+ mean_all_scores[k] = mean
+ if verbose:
+ print(result_str)
+
+ val_scores_for_logging = {
+ f'{k}_val': v
+ for k, v in mean_all_scores.items()
+ }
+ return val_scores_for_logging
\ No newline at end of file
diff --git a/guided_diffusion/train_util_accelerate.py b/guided_diffusion/train_util_accelerate.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9850ba04fbc27a25876157fb4ff0c5919f1657c
--- /dev/null
+++ b/guided_diffusion/train_util_accelerate.py
@@ -0,0 +1,500 @@
+import copy
+from pdb import set_trace as st
+import functools
+import os
+import numpy as np
+
+import blobfile as bf
+import torch as th
+import torch.distributed as dist
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+
+from . import dist_util, logger
+from .fp16_util import MixedPrecisionTrainer
+from .nn import update_ema
+from .resample import LossAwareSampler, UniformSampler
+
+from pathlib import Path
+
+# For ImageNet experiments, this was a good default value.
+# We found that the lg_loss_scale quickly climbed to
+# 20-21 within the first ~1K steps of training.
+INITIAL_LOG_LOSS_SCALE = 20.0
+
+# use_amp = True
+# use_amp = False
+# if use_amp:
+# logger.log('ddpm use AMP to accelerate training')
+
+
+class TrainLoop:
+
+ def __init__(
+ self,
+ *,
+ model,
+ diffusion,
+ data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ schedule_sampler=None,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ use_amp=False,
+ ):
+ self.model = model
+ self.diffusion = diffusion
+ self.data = data
+ self.batch_size = batch_size
+ self.microbatch = microbatch if microbatch > 0 else batch_size
+ self.lr = lr
+ self.ema_rate = ([ema_rate] if isinstance(ema_rate, float) else
+ [float(x) for x in ema_rate.split(",")])
+ self.log_interval = log_interval
+ self.save_interval = save_interval
+ self.resume_checkpoint = resume_checkpoint
+ self.use_fp16 = use_fp16
+ self.fp16_scale_growth = fp16_scale_growth
+ self.schedule_sampler = schedule_sampler or UniformSampler(diffusion)
+ self.weight_decay = weight_decay
+ self.lr_anneal_steps = lr_anneal_steps
+
+ self.step = 0
+ self.resume_step = 0
+ self.global_batch = self.batch_size * dist.get_world_size()
+
+ self.sync_cuda = th.cuda.is_available()
+
+ self._load_and_sync_parameters()
+ self.mp_trainer = MixedPrecisionTrainer(
+ model=self.model,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ use_amp=use_amp,
+ )
+
+ self.opt = AdamW(self.mp_trainer.master_params,
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+ if self.resume_step:
+ self._load_optimizer_state()
+ # Model was resumed, either due to a restart or a checkpoint
+ # being specified at the command line.
+ self.ema_params = [
+ self._load_ema_parameters(rate) for rate in self.ema_rate
+ ]
+ else:
+ self.ema_params = [
+ copy.deepcopy(self.mp_trainer.master_params)
+ for _ in range(len(self.ema_rate))
+ ]
+
+ # print('creating DDP')
+ if th.cuda.is_available():
+ self.use_ddp = True
+ self.ddp_model = DDP(
+ self.model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ if dist.get_world_size() > 1:
+ logger.warn("Distributed training requires CUDA. "
+ "Gradients will not be synchronized properly!")
+ self.use_ddp = False
+ self.ddp_model = self.model
+ # print('creating DDP done')
+
+ def _load_and_sync_parameters(self):
+ resume_checkpoint, resume_step = find_resume_checkpoint(
+ ) or self.resume_checkpoint
+
+ if resume_checkpoint:
+ if not Path(resume_checkpoint).exists():
+ logger.log(
+ f"failed to load model from checkpoint: {resume_checkpoint}, not exist"
+ )
+ return
+
+ # self.resume_step = parse_resume_step_from_filename(resume_checkpoint)
+ self.resume_step = resume_step # TODO, EMA part
+ if dist.get_rank() == 0:
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ # if model is None:
+ # model = self.model
+ self.model.load_state_dict(
+ dist_util.load_state_dict(
+ resume_checkpoint,
+ map_location=dist_util.dev(),
+ ))
+
+ dist_util.sync_params(self.model.parameters())
+
+ def _load_ema_parameters(self,
+ rate,
+ model=None,
+ mp_trainer=None,
+ model_name='ddpm'):
+
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer
+ if model is None:
+ model = self.model
+
+ ema_params = copy.deepcopy(mp_trainer.master_params)
+
+ main_checkpoint, _ = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+ ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step,
+ rate, model_name)
+ if ema_checkpoint:
+
+ if dist_util.get_rank() == 0:
+
+ if not Path(ema_checkpoint).exists():
+ logger.log(
+ f"failed to load EMA from checkpoint: {ema_checkpoint}, not exist"
+ )
+ return
+
+ logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ state_dict = dist_util.load_state_dict(
+ ema_checkpoint, map_location=map_location)
+
+ model_ema_state_dict = model.state_dict()
+
+ for k, v in state_dict.items():
+ if k in model_ema_state_dict.keys() and v.size(
+ ) == model_ema_state_dict[k].size():
+ model_ema_state_dict[k] = v
+ else:
+ logger.log('ignore key: ', k, ": ", v.size())
+
+ ema_params = mp_trainer.state_dict_to_master_params(
+ model_ema_state_dict)
+
+ del state_dict
+
+ # print('ema mark 3, ', model_name, flush=True)
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(ema_params)
+ # print('ema mark 4, ', model_name, flush=True)
+ # del ema_params
+ return ema_params
+
+ def _load_ema_parameters_freezeAE(
+ self,
+ rate,
+ model,
+ # mp_trainer=None,
+ model_name='rec'):
+
+ # if mp_trainer is None:
+ # mp_trainer = self.mp_trainer
+ # if model is None:
+ # model = self.model_rec
+
+ # ema_params = copy.deepcopy(mp_trainer.master_params)
+
+ main_checkpoint, _ = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+ ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step,
+ rate, model_name)
+ if ema_checkpoint:
+
+ if dist_util.get_rank() == 0:
+
+ if not Path(ema_checkpoint).exists():
+ logger.log(
+ f"failed to load EMA from checkpoint: {ema_checkpoint}, not exist"
+ )
+ return
+
+ logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ state_dict = dist_util.load_state_dict(
+ ema_checkpoint, map_location=map_location)
+
+ model_ema_state_dict = model.state_dict()
+
+ for k, v in state_dict.items():
+ if k in model_ema_state_dict.keys() and v.size(
+ ) == model_ema_state_dict[k].size():
+ model_ema_state_dict[k] = v
+ else:
+ logger.log('ignore key: ', k, ": ", v.size())
+
+ ema_params = mp_trainer.state_dict_to_master_params(
+ model_ema_state_dict)
+
+ del state_dict
+
+ # print('ema mark 3, ', model_name, flush=True)
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(ema_params)
+ # print('ema mark 4, ', model_name, flush=True)
+ # del ema_params
+ return ema_params
+
+ # def _load_ema_parameters(self, rate):
+ # ema_params = copy.deepcopy(self.mp_trainer.master_params)
+
+ # main_checkpoint, _ = find_resume_checkpoint() or self.resume_checkpoint
+ # ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step, rate)
+ # if ema_checkpoint:
+ # if dist.get_rank() == 0:
+ # logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+ # state_dict = dist_util.load_state_dict(
+ # ema_checkpoint, map_location=dist_util.dev()
+ # )
+ # ema_params = self.mp_trainer.state_dict_to_master_params(state_dict)
+
+ # dist_util.sync_params(ema_params)
+ # return ema_params
+
+ def _load_optimizer_state(self):
+ main_checkpoint, _ = find_resume_checkpoint() or self.resume_checkpoint
+ opt_checkpoint = bf.join(bf.dirname(main_checkpoint),
+ f"opt{self.resume_step:06}.pt")
+ if bf.exists(opt_checkpoint):
+ logger.log(
+ f"loading optimizer state from checkpoint: {opt_checkpoint}")
+ state_dict = dist_util.load_state_dict(
+ opt_checkpoint, map_location=dist_util.dev())
+ self.opt.load_state_dict(state_dict)
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+ batch, cond = next(self.data)
+ self.run_step(batch, cond)
+ if self.step % self.log_interval == 0:
+ logger.dumpkvs()
+ if self.step % self.save_interval == 0:
+ self.save()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+ self.step += 1
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+
+ def run_step(self, batch, cond):
+ self.forward_backward(batch, cond)
+ took_step = self.mp_trainer.optimize(self.opt)
+ if took_step:
+ self._update_ema()
+ self._anneal_lr()
+ self.log_step()
+
+ def forward_backward(self, batch, cond):
+ self.mp_trainer.zero_grad()
+ for i in range(0, batch.shape[0], self.microbatch):
+
+ # st()
+ with th.autocast(device_type=dist_util.dev(),
+ dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ micro = batch[i:i + self.microbatch].to(dist_util.dev())
+ micro_cond = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in cond.items()
+ }
+ last_batch = (i + self.microbatch) >= batch.shape[0]
+ t, weights = self.schedule_sampler.sample(
+ micro.shape[0], dist_util.dev())
+
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ micro,
+ t,
+ model_kwargs=micro_cond,
+ )
+
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ else:
+ with self.ddp_model.no_sync():
+ losses = compute_losses()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ loss = (losses["loss"] * weights).mean()
+ log_loss_dict(self.diffusion, t,
+ {k: v * weights
+ for k, v in losses.items()})
+
+ self.mp_trainer.backward(loss)
+
+ def _update_ema(self):
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ update_ema(params, self.mp_trainer.master_params, rate=rate)
+
+ def _anneal_lr(self):
+ if not self.lr_anneal_steps:
+ return
+ frac_done = (self.step + self.resume_step) / self.lr_anneal_steps
+ lr = self.lr * (1 - frac_done)
+ for param_group in self.opt.param_groups:
+ param_group["lr"] = lr
+
+ def log_step(self):
+ logger.logkv("step", self.step + self.resume_step)
+ logger.logkv("samples",
+ (self.step + self.resume_step + 1) * self.global_batch)
+
+ def save(self):
+
+ def save_checkpoint(rate, params):
+ state_dict = self.mp_trainer.master_params_to_state_dict(params)
+ if dist.get_rank() == 0:
+ logger.log(f"saving model {rate}...")
+ if not rate:
+ filename = f"model{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, self.mp_trainer.master_params)
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ if dist.get_rank() == 0:
+ with bf.BlobFile(
+ bf.join(get_blob_logdir(),
+ f"opt{(self.step+self.resume_step):07d}.pt"),
+ "wb",
+ ) as f:
+ th.save(self.opt.state_dict(), f)
+
+ dist.barrier()
+
+
+def parse_resume_step_from_filename(filename):
+ """
+ Parse filenames of the form path/to/modelNNNNNN.pt, where NNNNNN is the
+ checkpoint's number of steps.
+ """
+ split1 = Path(filename).stem[-6:]
+ # split = filename.split("model")
+ # if len(split) < 2:
+ # return 0
+ # split1 = split[-1].split(".")[0]
+ try:
+ return int(split1)
+ except ValueError:
+ print('fail to load model step', split1)
+ return 0
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+def find_resume_checkpoint(resume_checkpoint='', model_name='ddpm'):
+ # On your infrastructure, you may want to override this to automatically
+ # discover the latest checkpoint on your blob storage, etc.
+
+ if resume_checkpoint != '':
+ step = parse_resume_step_from_filename(resume_checkpoint)
+ split = resume_checkpoint.split("model")
+ resume_ckpt_path = str(
+ Path(split[0]) / f'model_{model_name}{step:07d}.pt')
+ else:
+ resume_ckpt_path = ''
+ step = 0
+
+ return resume_ckpt_path, step
+
+
+def find_ema_checkpoint(main_checkpoint, step, rate, model_name=''):
+ if main_checkpoint is None:
+ return None
+ if model_name == '':
+ filename = f"ema_{rate}_{(step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(step):07d}.pt"
+ path = bf.join(bf.dirname(main_checkpoint), filename)
+ # print(path)
+ # st()
+ if bf.exists(path):
+ print('load ema model', path)
+ return path
+ else:
+ print('fail to load ema model', path)
+ return None
+
+
+def log_loss_dict(diffusion, ts, losses):
+ for key, values in losses.items():
+ logger.logkv_mean(key, values.mean().item())
+ # Log the quantiles (four quartiles, in particular).
+ for sub_t, sub_loss in zip(ts.cpu().numpy(),
+ values.detach().cpu().numpy()):
+ quartile = int(4 * sub_t / diffusion.num_timesteps)
+ logger.logkv_mean(f"{key}_q{quartile}", sub_loss)
+
+
+def log_rec3d_loss_dict(loss_dict):
+ for key, values in loss_dict.items():
+ logger.logkv_mean(key, values.mean().item())
+
+
+def calc_average_loss(all_loss_dicts):
+ all_scores = {} # todo, defaultdict
+ mean_all_scores = {}
+
+ for loss_dict in all_loss_dicts:
+ for k, v in loss_dict.items():
+ v = v.item()
+ if k not in all_scores:
+ # all_scores[f'{k}_val'] = [v]
+ all_scores[k] = [v]
+ else:
+ all_scores[k].append(v)
+
+ for k, v in all_scores.items():
+ mean = np.mean(v)
+ std = np.std(v)
+ if k in ['loss_lpis', 'loss_ssim']:
+ mean = 1 - mean
+ result_str = '{} average loss is {:.4f} +- {:.4f}'.format(k, mean, std)
+ mean_all_scores[k] = mean
+ print(result_str)
+
+ val_scores_for_logging = {
+ f'{k}_val': v
+ for k, v in mean_all_scores.items()
+ }
+ return val_scores_for_logging
\ No newline at end of file
diff --git a/guided_diffusion/unet.py b/guided_diffusion/unet.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f617306558e70d9a1a255e4883e1864c778bb78
--- /dev/null
+++ b/guided_diffusion/unet.py
@@ -0,0 +1,1116 @@
+from abc import abstractmethod
+
+import math
+
+import numpy as np
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+from pdb import set_trace as st
+from einops import rearrange, repeat
+
+from .fp16_util import convert_module_to_f16, convert_module_to_f32
+from .nn import (
+ checkpoint,
+ conv_nd,
+ linear,
+ avg_pool_nd,
+ zero_module,
+ normalization,
+ timestep_embedding,
+)
+
+from ldm.modules.attention import SpatialTransformer
+
+class AttentionPool2d(nn.Module):
+ """
+ Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py
+ """
+
+ def __init__(
+ self,
+ spacial_dim: int,
+ embed_dim: int,
+ num_heads_channels: int,
+ output_dim: int = None,
+ ):
+ super().__init__()
+ self.positional_embedding = nn.Parameter(
+ th.randn(embed_dim, spacial_dim ** 2 + 1) / embed_dim ** 0.5
+ )
+ self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1)
+ self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1)
+ self.num_heads = embed_dim // num_heads_channels
+ self.attention = QKVAttention(self.num_heads)
+
+ def forward(self, x):
+ b, c, *_spatial = x.shape
+ x = x.reshape(b, c, -1) # NC(HW)
+ x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1)
+ x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1)
+ x = self.qkv_proj(x)
+ x = self.attention(x)
+ x = self.c_proj(x)
+ return x[:, :, 0]
+
+
+class TimestepBlock(nn.Module):
+ """
+ Any module where forward() takes timestep embeddings as a second argument.
+ """
+
+ @abstractmethod
+ def forward(self, x, emb):
+ """
+ Apply the module to `x` given `emb` timestep embeddings.
+ """
+
+
+# class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
+# """
+# A sequential module that passes timestep embeddings to the children that
+# support it as an extra input.
+# """
+
+# def forward(self, x, emb):
+# for layer in self:
+# if isinstance(layer, TimestepBlock):
+# x = layer(x, emb)
+# else:
+# x = layer(x)
+# return x
+
+# from LDM openaimodel.py
+class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
+ """
+ A sequential module that passes timestep embeddings to the children that
+ support it as an extra input.
+ """
+
+ def forward(self, x, emb, context=None):
+ for layer in self:
+ if isinstance(layer, TimestepBlock):
+ x = layer(x, emb)
+ elif isinstance(layer, SpatialTransformer):
+ x = layer(x, context)
+ else:
+ x = layer(x)
+ return x
+
+
+
+class Upsample(nn.Module):
+ """
+ An upsampling layer with an optional convolution.
+
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ upsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(self, channels, use_conv, dims=2, out_channels=None):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ if use_conv:
+ self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=1)
+
+ def forward(self, x):
+ assert x.shape[1] == self.channels
+
+ with th.autocast(enabled=False, device_type='cuda'): # only handles the execusion, not data typeS
+ if self.dims == 3:
+ x = F.interpolate(
+ x.float(), (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest"
+ )
+ else:
+ x = F.interpolate(x.float(), scale_factor=2, mode="nearest")
+
+ if self.use_conv:
+ x = self.conv(x)
+ return x
+
+
+class Downsample(nn.Module):
+ """
+ A downsampling layer with an optional convolution.
+
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ downsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(self, channels, use_conv, dims=2, out_channels=None):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ stride = 2 if dims != 3 else (1, 2, 2)
+ if use_conv:
+ self.op = conv_nd(
+ dims, self.channels, self.out_channels, 3, stride=stride, padding=1
+ )
+ else:
+ assert self.channels == self.out_channels
+ self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)
+
+ def forward(self, x):
+ assert x.shape[1] == self.channels
+ return self.op(x)
+
+
+class ResBlock(TimestepBlock):
+ """
+ A residual block that can optionally change the number of channels.
+
+ :param channels: the number of input channels.
+ :param emb_channels: the number of timestep embedding channels.
+ :param dropout: the rate of dropout.
+ :param out_channels: if specified, the number of out channels.
+ :param use_conv: if True and out_channels is specified, use a spatial
+ convolution instead of a smaller 1x1 convolution to change the
+ channels in the skip connection.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param use_checkpoint: if True, use gradient checkpointing on this module.
+ :param up: if True, use this block for upsampling.
+ :param down: if True, use this block for downsampling.
+ """
+
+ def __init__(
+ self,
+ channels,
+ emb_channels,
+ dropout,
+ out_channels=None,
+ use_conv=False,
+ use_scale_shift_norm=False,
+ dims=2,
+ use_checkpoint=False,
+ up=False,
+ down=False,
+ ):
+ super().__init__()
+ self.channels = channels
+ self.emb_channels = emb_channels
+ self.dropout = dropout
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.use_checkpoint = use_checkpoint
+ self.use_scale_shift_norm = use_scale_shift_norm
+
+ self.in_layers = nn.Sequential(
+ normalization(channels),
+ nn.SiLU(),
+ conv_nd(dims, channels, self.out_channels, 3, padding=1),
+ )
+
+ self.updown = up or down
+
+ if up:
+ self.h_upd = Upsample(channels, False, dims)
+ self.x_upd = Upsample(channels, False, dims)
+ elif down:
+ self.h_upd = Downsample(channels, False, dims)
+ self.x_upd = Downsample(channels, False, dims)
+ else:
+ self.h_upd = self.x_upd = nn.Identity()
+
+ self.emb_layers = nn.Sequential(
+ nn.SiLU(),
+ linear(
+ emb_channels,
+ 2 * self.out_channels if use_scale_shift_norm else self.out_channels,
+ ),
+ )
+ self.out_layers = nn.Sequential(
+ normalization(self.out_channels),
+ nn.SiLU(),
+ nn.Dropout(p=dropout),
+ zero_module(
+ conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1)
+ ),
+ )
+
+ if self.out_channels == channels:
+ self.skip_connection = nn.Identity()
+ elif use_conv:
+ self.skip_connection = conv_nd(
+ dims, channels, self.out_channels, 3, padding=1
+ )
+ else:
+ self.skip_connection = conv_nd(dims, channels, self.out_channels, 1)
+
+ def forward(self, x, emb):
+ """
+ Apply the block to a Tensor, conditioned on a timestep embedding.
+
+ :param x: an [N x C x ...] Tensor of features.
+ :param emb: an [N x emb_channels] Tensor of timestep embeddings.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+ # return checkpoint(
+ # self._forward, (x, emb), self.parameters(), self.use_checkpoint
+ # )
+ return self._forward(x, emb)
+
+ def _forward(self, x, emb):
+ if self.updown:
+ in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1]
+ h = in_rest(x)
+ h = self.h_upd(h)
+ x = self.x_upd(x)
+ h = in_conv(h)
+ else:
+ h = self.in_layers(x)
+ emb_out = self.emb_layers(emb).type(h.dtype)
+ while len(emb_out.shape) < len(h.shape):
+ emb_out = emb_out[..., None]
+ if self.use_scale_shift_norm:
+ out_norm, out_rest = self.out_layers[0], self.out_layers[1:]
+ scale, shift = th.chunk(emb_out, 2, dim=1)
+ h = out_norm(h) * (1 + scale) + shift
+ h = out_rest(h)
+ else:
+ h = h + emb_out
+ h = self.out_layers(h)
+ return self.skip_connection(x) + h
+
+
+class AttentionBlock(nn.Module):
+ """
+ An attention block that allows spatial positions to attend to each other.
+
+ Originally ported from here, but adapted to the N-d case.
+ https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66.
+ """
+
+ def __init__(
+ self,
+ channels,
+ num_heads=1,
+ num_head_channels=-1,
+ use_checkpoint=False,
+ use_new_attention_order=False,
+ ):
+ super().__init__()
+ self.channels = channels
+ if num_head_channels == -1:
+ self.num_heads = num_heads
+ else:
+ assert (
+ channels % num_head_channels == 0
+ ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}"
+ self.num_heads = channels // num_head_channels
+ self.use_checkpoint = use_checkpoint
+ self.norm = normalization(channels)
+ self.qkv = conv_nd(1, channels, channels * 3, 1)
+ if use_new_attention_order:
+ # split qkv before split heads
+ self.attention = QKVAttention(self.num_heads)
+ else:
+ # split heads before split qkv
+ self.attention = QKVAttentionLegacy(self.num_heads)
+
+ self.proj_out = zero_module(conv_nd(1, channels, channels, 1))
+
+ # def forward(self, x):
+ # return checkpoint(self._forward, (x,), self.parameters(), True)
+
+ # def _forward(self, x):
+ # b, c, *spatial = x.shape
+ # x = x.reshape(b, c, -1)
+ # qkv = self.qkv(self.norm(x))
+ # h = self.attention(qkv)
+ # h = self.proj_out(h)
+ # return (x + h).reshape(b, c, *spatial)
+
+ # ! disable checkpoint here since it is incompatible with torch.amp
+ def forward(self, x):
+ b, c, *spatial = x.shape
+ x = x.reshape(b, c, -1)
+ qkv = self.qkv(self.norm(x))
+ h = self.attention(qkv)
+ h = self.proj_out(h)
+ return (x + h).reshape(b, c, *spatial)
+
+
+def count_flops_attn(model, _x, y):
+ """
+ A counter for the `thop` package to count the operations in an
+ attention operation.
+ Meant to be used like:
+ macs, params = thop.profile(
+ model,
+ inputs=(inputs, timestamps),
+ custom_ops={QKVAttention: QKVAttention.count_flops},
+ )
+ """
+ b, c, *spatial = y[0].shape
+ num_spatial = int(np.prod(spatial))
+ # We perform two matmuls with the same number of ops.
+ # The first computes the weight matrix, the second computes
+ # the combination of the value vectors.
+ matmul_ops = 2 * b * (num_spatial ** 2) * c
+ model.total_ops += th.DoubleTensor([matmul_ops])
+
+
+class QKVAttentionLegacy(nn.Module):
+ """
+ A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping
+ """
+
+ def __init__(self, n_heads):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv):
+ """
+ Apply QKV attention.
+
+ :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts", q * scale, k * scale
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v)
+ return a.reshape(bs, -1, length)
+
+ @staticmethod
+ def count_flops(model, _x, y):
+ return count_flops_attn(model, _x, y)
+
+
+class QKVAttention(nn.Module):
+ """
+ A module which performs QKV attention and splits in a different order.
+ """
+
+ def __init__(self, n_heads):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv):
+ """
+ Apply QKV attention.
+
+ :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.chunk(3, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts",
+ (q * scale).view(bs * self.n_heads, ch, length),
+ (k * scale).view(bs * self.n_heads, ch, length),
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length))
+ return a.reshape(bs, -1, length)
+
+ @staticmethod
+ def count_flops(model, _x, y):
+ return count_flops_attn(model, _x, y)
+
+
+class UNetModel(nn.Module):
+ """
+ The full UNet model with attention and timestep embedding.
+ :param in_channels: channels in the input Tensor.
+ :param model_channels: base channel count for the model.
+ :param out_channels: channels in the output Tensor.
+ :param num_res_blocks: number of residual blocks per downsample.
+ :param attention_resolutions: a collection of downsample rates at which
+ attention will take place. May be a set, list, or tuple.
+ For example, if this contains 4, then at 4x downsampling, attention
+ will be used.
+ :param dropout: the dropout probability.
+ :param channel_mult: channel multiplier for each level of the UNet.
+ :param conv_resample: if True, use learned convolutions for upsampling and
+ downsampling.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param num_classes: if specified (as an int), then this model will be
+ class-conditional with `num_classes` classes.
+ :param use_checkpoint: use gradient checkpointing to reduce memory usage.
+ :param num_heads: the number of attention heads in each attention layer.
+ :param num_heads_channels: if specified, ignore num_heads and instead use
+ a fixed channel width per attention head.
+ :param num_heads_upsample: works with num_heads to set a different number
+ of heads for upsampling. Deprecated.
+ :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.
+ :param resblock_updown: use residual blocks for up/downsampling.
+ :param use_new_attention_order: use a different attention pattern for potentially
+ increased efficiency.
+ """
+
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ out_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ num_classes=None,
+ use_checkpoint=False,
+ use_fp16=False,
+ num_heads=-1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ mixed_prediction=False,
+ use_spatial_transformer=False, # custom transformer support
+ transformer_depth=1, # custom transformer support
+ context_dim=-1, # custom transformer support
+ n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model
+ legacy=True,
+ mixing_logit_init=-6,
+ roll_out=False,**kwargs
+ ):
+ super().__init__()
+ self.roll_out = roll_out
+ if context_dim == -1:
+ context_dim = None
+
+ if use_spatial_transformer:
+ assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...'
+
+ if context_dim is not None:
+ assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...'
+ # from omegaconf.listconfig import ListConfig
+ # if type(context_dim) == ListConfig:
+ # context_dim = list(context_dim)
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ if num_heads == -1:
+ assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set'
+
+ if num_head_channels == -1:
+ assert num_heads != -1, 'Either num_heads or num_head_channels has to be set'
+
+ self.image_size = image_size
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ self.num_res_blocks = num_res_blocks
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.num_classes = num_classes
+ self.use_checkpoint = use_checkpoint
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+ self.predict_codebook_ids = n_embed is not None
+
+ # follow LSGM
+ self.mixed_prediction = mixed_prediction # This enables mixed prediction
+ if self.mixed_prediction:
+ if self.roll_out:
+ init = mixing_logit_init * th.ones(size=[1, in_channels*3, 1, 1]) # hard coded for now
+ else:
+ init = mixing_logit_init * th.ones(size=[1, in_channels, 1, 1]) # hard coded for now
+ self.mixing_logit = th.nn.Parameter(init, requires_grad=True)
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if self.num_classes is not None:
+ self.label_emb = nn.Embedding(num_classes, time_embed_dim)
+
+ self.input_blocks = nn.ModuleList(
+ [
+ TimestepEmbedSequential(
+ conv_nd(dims, in_channels, model_channels, 3, padding=1)
+ )
+ ]
+ )
+ self._feature_size = model_channels
+ input_block_chans = [model_channels]
+ ch = model_channels
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for _ in range(num_res_blocks):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=mult * model_channels,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = mult * model_channels
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+
+ self.output_blocks = nn.ModuleList([])
+ for level, mult in list(enumerate(channel_mult))[::-1]:
+ for i in range(num_res_blocks + 1):
+ ich = input_block_chans.pop()
+ layers = [
+ ResBlock(
+ ch + ich,
+ time_embed_dim,
+ dropout,
+ out_channels=model_channels * mult,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = model_channels * mult
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads_upsample,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim
+ )
+ )
+ if level and i == num_res_blocks:
+ out_ch = ch
+ layers.append(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ up=True,
+ )
+ if resblock_updown
+ else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)
+ )
+ ds //= 2
+ self.output_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),
+ )
+ if self.predict_codebook_ids:
+ self.id_predictor = nn.Sequential(
+ normalization(ch),
+ conv_nd(dims, model_channels, n_embed, 1),
+ #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits
+ )
+
+ def convert_to_fp16(self):
+ """
+ Convert the torso of the model to float16.
+ """
+ self.input_blocks.apply(convert_module_to_f16)
+ self.middle_block.apply(convert_module_to_f16)
+ self.output_blocks.apply(convert_module_to_f16)
+
+ def convert_to_fp32(self):
+ """
+ Convert the torso of the model to float32.
+ """
+ self.input_blocks.apply(convert_module_to_f32)
+ self.middle_block.apply(convert_module_to_f32)
+ self.output_blocks.apply(convert_module_to_f32)
+
+ def forward(self, x, timesteps=None, context=None, y=None, get_attr='', **kwargs):
+ """
+ Apply the model to an input batch.
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :param context: conditioning plugged in via crossattn
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+
+ if isinstance(context, dict):
+ context = context['crossattn'] # sgm conditioner compat
+
+ if get_attr != '': # not breaking the forward hooks
+ return getattr(self, get_attr)
+
+
+ # if forward
+ # assert context is not None
+
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional"
+ hs = []
+ # st()
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.roll_out:
+ # x = rearrange(x, 'b (n c) h w->b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+ # ! fix order bug
+ x = rearrange(x, 'b (c n) h w->b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+
+ if self.num_classes is not None:
+ assert y.shape == (x.shape[0],)
+ emb = emb + self.label_emb(y)
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb, context)
+ hs.append(h)
+ h = self.middle_block(h, emb, context)
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb, context)
+ h = h.type(x.dtype)
+
+ if self.predict_codebook_ids:
+ return self.id_predictor(h)
+ else:
+ h = self.out(h)
+ if self.roll_out:
+ # return rearrange(h, 'b c h (n w) -> b (n c) h w', n=3)
+ # ! fix order bug
+ return rearrange(h, 'b c h (n w) -> b (c n) h w', n=3)
+ return h
+
+
+class SuperResModel(UNetModel):
+ """
+ A UNetModel that performs super-resolution.
+
+ Expects an extra kwarg `low_res` to condition on a low-resolution image.
+ """
+
+ def __init__(self, image_size, in_channels, *args, **kwargs):
+ super().__init__(image_size, in_channels * 2, *args, **kwargs)
+
+ def forward(self, x, timesteps, low_res=None, **kwargs):
+ _, _, new_height, new_width = x.shape
+ upsampled = F.interpolate(low_res, (new_height, new_width), mode="bilinear")
+ x = th.cat([x, upsampled], dim=1)
+ return super().forward(x, timesteps, **kwargs)
+
+
+class EncoderUNetModel(nn.Module):
+ """
+ The half UNet model with attention and timestep embedding.
+
+ For usage, see UNet.
+ """
+
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ out_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ use_checkpoint=False,
+ use_fp16=False,
+ num_heads=1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ pool="adaptive",
+ ):
+ super().__init__()
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ self.num_res_blocks = num_res_blocks
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.use_checkpoint = use_checkpoint
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ ch = int(channel_mult[0] * model_channels)
+ self.input_blocks = nn.ModuleList(
+ [TimestepEmbedSequential(conv_nd(dims, in_channels, ch, 3, padding=1))]
+ )
+ self._feature_size = ch
+ input_block_chans = [ch]
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for _ in range(num_res_blocks):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=int(mult * model_channels),
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = int(mult * model_channels)
+ if ds in attention_resolutions:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+ self.pool = pool
+ if pool == "adaptive":
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ nn.AdaptiveAvgPool2d((1, 1)),
+ zero_module(conv_nd(dims, ch, out_channels, 1)),
+ nn.Flatten(),
+ )
+ elif pool == "attention":
+ assert num_head_channels != -1
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ AttentionPool2d(
+ (image_size // ds), ch, num_head_channels, out_channels
+ ),
+ )
+ elif pool == "spatial":
+ self.out = nn.Sequential(
+ nn.Linear(self._feature_size, 2048),
+ nn.ReLU(),
+ nn.Linear(2048, self.out_channels),
+ )
+ elif pool == "spatial_v2":
+ self.out = nn.Sequential(
+ nn.Linear(self._feature_size, 2048),
+ normalization(2048),
+ nn.SiLU(),
+ nn.Linear(2048, self.out_channels),
+ )
+ else:
+ raise NotImplementedError(f"Unexpected {pool} pooling")
+
+ def convert_to_fp16(self):
+ """
+ Convert the torso of the model to float16.
+ """
+ self.input_blocks.apply(convert_module_to_f16)
+ self.middle_block.apply(convert_module_to_f16)
+
+ def convert_to_fp32(self):
+ """
+ Convert the torso of the model to float32.
+ """
+ self.input_blocks.apply(convert_module_to_f32)
+ self.middle_block.apply(convert_module_to_f32)
+
+ def forward(self, x, timesteps):
+ """
+ Apply the model to an input batch.
+
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :return: an [N x K] Tensor of outputs.
+ """
+ emb = self.time_embed(timestep_embedding(timesteps, self.model_channels))
+
+ results = []
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb)
+ if self.pool.startswith("spatial"):
+ results.append(h.type(x.dtype).mean(dim=(2, 3)))
+ h = self.middle_block(h, emb)
+ if self.pool.startswith("spatial"):
+ results.append(h.type(x.dtype).mean(dim=(2, 3)))
+ h = th.cat(results, axis=-1)
+ return self.out(h)
+ else:
+ h = h.type(x.dtype)
+ return self.out(h)
+
+
+class UNetModelWithHint(UNetModel):
+ def __init__(self, image_size, in_channels, model_channels, hint_channels, out_channels, num_res_blocks, attention_resolutions, dropout=0, channel_mult=(1, 2, 4, 8), conv_resample=True, dims=2, num_classes=None, use_checkpoint=False, use_fp16=False, num_heads=-1, num_head_channels=-1, num_heads_upsample=-1, use_scale_shift_norm=False, resblock_updown=False, use_new_attention_order=False, mixed_prediction=False, use_spatial_transformer=False, transformer_depth=1, context_dim=-1, n_embed=None, legacy=True, mixing_logit_init=-6, roll_out=False):
+ super().__init__(image_size, in_channels, model_channels, out_channels, num_res_blocks, attention_resolutions, dropout, channel_mult, conv_resample, dims, num_classes, use_checkpoint, use_fp16, num_heads, num_head_channels, num_heads_upsample, use_scale_shift_norm, resblock_updown, use_new_attention_order, mixed_prediction, use_spatial_transformer, transformer_depth, context_dim, n_embed, legacy, mixing_logit_init, roll_out)
+
+ # lite encoder, borrowed from ControlNet
+
+ self.input_hint_block = TimestepEmbedSequential( # f=8
+ conv_nd(dims, hint_channels, 16, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 16, 16, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 16, 32, 3, padding=1, stride=2),
+ nn.SiLU(),
+ conv_nd(dims, 32, 32, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 32, 96, 3, padding=1, stride=2),
+ nn.SiLU(),
+ conv_nd(dims, 96, 96, 3, padding=1),
+ nn.SiLU(),
+ conv_nd(dims, 96, 256, 3, padding=1, stride=2),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, 256, model_channels, 3, padding=1))
+ )
+
+ def forward(self, x, hint, timesteps=None, context=None, y=None, get_attr='', **kwargs):
+ """
+ Apply the model to an input batch.
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :param context: conditioning plugged in via crossattn
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+
+ # st()
+
+ # if forward
+ # assert context is not None
+
+ assert context is not None
+ # assert (y is not None) == (
+ # self.num_classes is not None
+ # ), "must specify y if and only if the model is class-conditional"
+ hs = []
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.roll_out:
+ x = rearrange(x, 'b (n c) h w->b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+
+ # if self.num_classes is not None:
+ # assert y.shape == (x.shape[0],)
+ # emb = emb + self.label_emb(y)
+
+ guided_hint = self.input_hint_block(hint, emb, context)
+
+ if self.roll_out:
+ guided_hint = repeat(guided_hint, 'b c h w -> b c h (n w)', n=3) # torch.Size([84, 4, 32, 96])
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ if guided_hint is not None:
+ h = module(h, emb, context) # B, 320, 32, 96
+
+ h += guided_hint
+ guided_hint = None
+ else:
+ h = module(h, emb, context)
+
+ hs.append(h)
+
+ h = self.middle_block(h, emb, context)
+
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb, context)
+ h = h.type(x.dtype)
+
+ if self.predict_codebook_ids:
+ return self.id_predictor(h)
+ else:
+ h = self.out(h)
+ if self.roll_out:
+ return rearrange(h, 'b c h (n w) -> b (n c) h w', n=3)
+ return h
\ No newline at end of file
diff --git a/guided_diffusion/unet_adm.py b/guided_diffusion/unet_adm.py
new file mode 100644
index 0000000000000000000000000000000000000000..99df55076b10f2b30f0a7e8712995194d731b8ae
--- /dev/null
+++ b/guided_diffusion/unet_adm.py
@@ -0,0 +1,934 @@
+from abc import abstractmethod
+
+import math
+
+import numpy as np
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from .fp16_util import convert_module_to_f16, convert_module_to_f32
+from .nn import (
+ checkpoint,
+ conv_nd,
+ linear,
+ avg_pool_nd,
+ zero_module,
+ normalization,
+ timestep_embedding,
+)
+
+from ldm.modules.attention import SpatialTransformer
+
+class AttentionPool2d(nn.Module):
+ """
+ Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py
+ """
+
+ def __init__(
+ self,
+ spacial_dim: int,
+ embed_dim: int,
+ num_heads_channels: int,
+ output_dim: int = None,
+ ):
+ super().__init__()
+ self.positional_embedding = nn.Parameter(
+ th.randn(embed_dim, spacial_dim ** 2 + 1) / embed_dim ** 0.5
+ )
+ self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1)
+ self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1)
+ self.num_heads = embed_dim // num_heads_channels
+ self.attention = QKVAttention(self.num_heads)
+
+ def forward(self, x):
+ b, c, *_spatial = x.shape
+ x = x.reshape(b, c, -1) # NC(HW)
+ x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1)
+ x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1)
+ x = self.qkv_proj(x)
+ x = self.attention(x)
+ x = self.c_proj(x)
+ return x[:, :, 0]
+
+
+class TimestepBlock(nn.Module):
+ """
+ Any module where forward() takes timestep embeddings as a second argument.
+ """
+
+ @abstractmethod
+ def forward(self, x, emb):
+ """
+ Apply the module to `x` given `emb` timestep embeddings.
+ """
+
+
+# class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
+# """
+# A sequential module that passes timestep embeddings to the children that
+# support it as an extra input.
+# """
+
+# def forward(self, x, emb):
+# for layer in self:
+# if isinstance(layer, TimestepBlock):
+# x = layer(x, emb)
+# else:
+# x = layer(x)
+# return x
+
+# from LDM openaimodel.py
+class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
+ """
+ A sequential module that passes timestep embeddings to the children that
+ support it as an extra input.
+ """
+
+ def forward(self, x, emb, context=None):
+ for layer in self:
+ if isinstance(layer, TimestepBlock):
+ x = layer(x, emb)
+ elif isinstance(layer, SpatialTransformer):
+ x = layer(x, context)
+ else:
+ x = layer(x)
+ return x
+
+
+
+class Upsample(nn.Module):
+ """
+ An upsampling layer with an optional convolution.
+
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ upsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(self, channels, use_conv, dims=2, out_channels=None):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ if use_conv:
+ self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=1)
+
+ def forward(self, x):
+ assert x.shape[1] == self.channels
+ if self.dims == 3:
+ x = F.interpolate(
+ x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest"
+ )
+ else:
+ x = F.interpolate(x, scale_factor=2, mode="nearest")
+ if self.use_conv:
+ x = self.conv(x)
+ return x
+
+
+class Downsample(nn.Module):
+ """
+ A downsampling layer with an optional convolution.
+
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ downsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(self, channels, use_conv, dims=2, out_channels=None):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ stride = 2 if dims != 3 else (1, 2, 2)
+ if use_conv:
+ self.op = conv_nd(
+ dims, self.channels, self.out_channels, 3, stride=stride, padding=1
+ )
+ else:
+ assert self.channels == self.out_channels
+ self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)
+
+ def forward(self, x):
+ assert x.shape[1] == self.channels
+ return self.op(x)
+
+
+class ResBlock(TimestepBlock):
+ """
+ A residual block that can optionally change the number of channels.
+
+ :param channels: the number of input channels.
+ :param emb_channels: the number of timestep embedding channels.
+ :param dropout: the rate of dropout.
+ :param out_channels: if specified, the number of out channels.
+ :param use_conv: if True and out_channels is specified, use a spatial
+ convolution instead of a smaller 1x1 convolution to change the
+ channels in the skip connection.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param use_checkpoint: if True, use gradient checkpointing on this module.
+ :param up: if True, use this block for upsampling.
+ :param down: if True, use this block for downsampling.
+ """
+
+ def __init__(
+ self,
+ channels,
+ emb_channels,
+ dropout,
+ out_channels=None,
+ use_conv=False,
+ use_scale_shift_norm=False,
+ dims=2,
+ use_checkpoint=False,
+ up=False,
+ down=False,
+ ):
+ super().__init__()
+ self.channels = channels
+ self.emb_channels = emb_channels
+ self.dropout = dropout
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.use_checkpoint = use_checkpoint
+ self.use_scale_shift_norm = use_scale_shift_norm
+
+ self.in_layers = nn.Sequential(
+ normalization(channels),
+ nn.SiLU(),
+ conv_nd(dims, channels, self.out_channels, 3, padding=1),
+ )
+
+ self.updown = up or down
+
+ if up:
+ self.h_upd = Upsample(channels, False, dims)
+ self.x_upd = Upsample(channels, False, dims)
+ elif down:
+ self.h_upd = Downsample(channels, False, dims)
+ self.x_upd = Downsample(channels, False, dims)
+ else:
+ self.h_upd = self.x_upd = nn.Identity()
+
+ self.emb_layers = nn.Sequential(
+ nn.SiLU(),
+ linear(
+ emb_channels,
+ 2 * self.out_channels if use_scale_shift_norm else self.out_channels,
+ ),
+ )
+ self.out_layers = nn.Sequential(
+ normalization(self.out_channels),
+ nn.SiLU(),
+ nn.Dropout(p=dropout),
+ zero_module(
+ conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1)
+ ),
+ )
+
+ if self.out_channels == channels:
+ self.skip_connection = nn.Identity()
+ elif use_conv:
+ self.skip_connection = conv_nd(
+ dims, channels, self.out_channels, 3, padding=1
+ )
+ else:
+ self.skip_connection = conv_nd(dims, channels, self.out_channels, 1)
+
+ def forward(self, x, emb):
+ """
+ Apply the block to a Tensor, conditioned on a timestep embedding.
+
+ :param x: an [N x C x ...] Tensor of features.
+ :param emb: an [N x emb_channels] Tensor of timestep embeddings.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+ return checkpoint(
+ self._forward, (x, emb), self.parameters(), self.use_checkpoint
+ )
+
+ def _forward(self, x, emb):
+ if self.updown:
+ in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1]
+ h = in_rest(x)
+ h = self.h_upd(h)
+ x = self.x_upd(x)
+ h = in_conv(h)
+ else:
+ h = self.in_layers(x)
+ emb_out = self.emb_layers(emb).type(h.dtype)
+ while len(emb_out.shape) < len(h.shape):
+ emb_out = emb_out[..., None]
+ if self.use_scale_shift_norm:
+ out_norm, out_rest = self.out_layers[0], self.out_layers[1:]
+ scale, shift = th.chunk(emb_out, 2, dim=1)
+ h = out_norm(h) * (1 + scale) + shift
+ h = out_rest(h)
+ else:
+ h = h + emb_out
+ h = self.out_layers(h)
+ return self.skip_connection(x) + h
+
+
+class AttentionBlock(nn.Module):
+ """
+ An attention block that allows spatial positions to attend to each other.
+
+ Originally ported from here, but adapted to the N-d case.
+ https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66.
+ """
+
+ def __init__(
+ self,
+ channels,
+ num_heads=1,
+ num_head_channels=-1,
+ use_checkpoint=False,
+ use_new_attention_order=False,
+ ):
+ super().__init__()
+ self.channels = channels
+ if num_head_channels == -1:
+ self.num_heads = num_heads
+ else:
+ assert (
+ channels % num_head_channels == 0
+ ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}"
+ self.num_heads = channels // num_head_channels
+ self.use_checkpoint = use_checkpoint
+ self.norm = normalization(channels)
+ self.qkv = conv_nd(1, channels, channels * 3, 1)
+ if use_new_attention_order:
+ # split qkv before split heads
+ self.attention = QKVAttention(self.num_heads)
+ else:
+ # split heads before split qkv
+ self.attention = QKVAttentionLegacy(self.num_heads)
+
+ self.proj_out = zero_module(conv_nd(1, channels, channels, 1))
+
+ # def forward(self, x):
+ # return checkpoint(self._forward, (x,), self.parameters(), True)
+
+ # def _forward(self, x):
+ # b, c, *spatial = x.shape
+ # x = x.reshape(b, c, -1)
+ # qkv = self.qkv(self.norm(x))
+ # h = self.attention(qkv)
+ # h = self.proj_out(h)
+ # return (x + h).reshape(b, c, *spatial)
+
+ # ! disable checkpoint here since it is incompatible with torch.amp
+ def forward(self, x):
+ b, c, *spatial = x.shape
+ x = x.reshape(b, c, -1)
+ qkv = self.qkv(self.norm(x))
+ h = self.attention(qkv)
+ h = self.proj_out(h)
+ return (x + h).reshape(b, c, *spatial)
+
+
+def count_flops_attn(model, _x, y):
+ """
+ A counter for the `thop` package to count the operations in an
+ attention operation.
+ Meant to be used like:
+ macs, params = thop.profile(
+ model,
+ inputs=(inputs, timestamps),
+ custom_ops={QKVAttention: QKVAttention.count_flops},
+ )
+ """
+ b, c, *spatial = y[0].shape
+ num_spatial = int(np.prod(spatial))
+ # We perform two matmuls with the same number of ops.
+ # The first computes the weight matrix, the second computes
+ # the combination of the value vectors.
+ matmul_ops = 2 * b * (num_spatial ** 2) * c
+ model.total_ops += th.DoubleTensor([matmul_ops])
+
+
+class QKVAttentionLegacy(nn.Module):
+ """
+ A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping
+ """
+
+ def __init__(self, n_heads):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv):
+ """
+ Apply QKV attention.
+
+ :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts", q * scale, k * scale
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v)
+ return a.reshape(bs, -1, length)
+
+ @staticmethod
+ def count_flops(model, _x, y):
+ return count_flops_attn(model, _x, y)
+
+
+class QKVAttention(nn.Module):
+ """
+ A module which performs QKV attention and splits in a different order.
+ """
+
+ def __init__(self, n_heads):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv):
+ """
+ Apply QKV attention.
+
+ :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.chunk(3, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts",
+ (q * scale).view(bs * self.n_heads, ch, length),
+ (k * scale).view(bs * self.n_heads, ch, length),
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length))
+ return a.reshape(bs, -1, length)
+
+ @staticmethod
+ def count_flops(model, _x, y):
+ return count_flops_attn(model, _x, y)
+
+
+class UNetModel(nn.Module):
+ """
+ The full UNet model with attention and timestep embedding.
+
+ :param in_channels: channels in the input Tensor.
+ :param model_channels: base channel count for the model.
+ :param out_channels: channels in the output Tensor.
+ :param num_res_blocks: number of residual blocks per downsample.
+ :param attention_resolutions: a collection of downsample rates at which
+ attention will take place. May be a set, list, or tuple.
+ For example, if this contains 4, then at 4x downsampling, attention
+ will be used.
+ :param dropout: the dropout probability.
+ :param channel_mult: channel multiplier for each level of the UNet.
+ :param conv_resample: if True, use learned convolutions for upsampling and
+ downsampling.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param num_classes: if specified (as an int), then this model will be
+ class-conditional with `num_classes` classes.
+ :param use_checkpoint: use gradient checkpointing to reduce memory usage.
+ :param num_heads: the number of attention heads in each attention layer.
+ :param num_heads_channels: if specified, ignore num_heads and instead use
+ a fixed channel width per attention head.
+ :param num_heads_upsample: works with num_heads to set a different number
+ of heads for upsampling. Deprecated.
+ :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.
+ :param resblock_updown: use residual blocks for up/downsampling.
+ :param use_new_attention_order: use a different attention pattern for potentially
+ increased efficiency.
+ """
+
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ out_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ num_classes=None,
+ use_checkpoint=False,
+ use_fp16=False,
+ num_heads=1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ mixed_prediction=False,
+ ):
+ super().__init__()
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ self.image_size = image_size
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ self.num_res_blocks = num_res_blocks
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.num_classes = num_classes
+ self.use_checkpoint = use_checkpoint
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+
+ # follow LSGM
+ self.mixed_prediction = mixed_prediction # This enables mixed prediction
+ if self.mixed_prediction:
+ init = -6 * th.ones(size=[1, in_channels, 1, 1]) # hard coded for now
+ self.mixing_logit = th.nn.Parameter(init, requires_grad=True)
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if self.num_classes is not None:
+ self.label_emb = nn.Embedding(num_classes, time_embed_dim)
+
+ ch = input_ch = int(channel_mult[0] * model_channels)
+ self.input_blocks = nn.ModuleList(
+ [TimestepEmbedSequential(conv_nd(dims, in_channels, ch, 3, padding=1))]
+ )
+ self._feature_size = ch
+ input_block_chans = [ch]
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for _ in range(num_res_blocks):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=int(mult * model_channels),
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = int(mult * model_channels)
+ if ds in attention_resolutions:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+
+ self.output_blocks = nn.ModuleList([])
+ for level, mult in list(enumerate(channel_mult))[::-1]:
+ for i in range(num_res_blocks + 1):
+ ich = input_block_chans.pop()
+ layers = [
+ ResBlock(
+ ch + ich,
+ time_embed_dim,
+ dropout,
+ out_channels=int(model_channels * mult),
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = int(model_channels * mult)
+ if ds in attention_resolutions:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads_upsample,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ )
+ )
+ if level and i == num_res_blocks:
+ out_ch = ch
+ layers.append(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ up=True,
+ )
+ if resblock_updown
+ else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)
+ )
+ ds //= 2
+ self.output_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, input_ch, out_channels, 3, padding=1)),
+ )
+
+ def convert_to_fp16(self):
+ """
+ Convert the torso of the model to float16.
+ """
+ self.input_blocks.apply(convert_module_to_f16)
+ self.middle_block.apply(convert_module_to_f16)
+ self.output_blocks.apply(convert_module_to_f16)
+
+ def convert_to_fp32(self):
+ """
+ Convert the torso of the model to float32.
+ """
+ self.input_blocks.apply(convert_module_to_f32)
+ self.middle_block.apply(convert_module_to_f32)
+ self.output_blocks.apply(convert_module_to_f32)
+
+ # @th.autocast(device_type='cuda')
+ def forward(self, x, timesteps, y=None, get_attr=''):
+ """
+ Apply the model to an input batch.
+
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+
+ if get_attr != '': # not breaking the forward hooks
+ return getattr(self, get_attr)
+
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional"
+
+ hs = []
+ emb = self.time_embed(timestep_embedding(timesteps, self.model_channels))
+
+ if self.num_classes is not None:
+ assert y.shape == (x.shape[0],)
+ emb = emb + self.label_emb(y)
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb)
+ hs.append(h)
+ h = self.middle_block(h, emb)
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb)
+ h = h.type(x.dtype)
+ return self.out(h)
+
+
+class SuperResModel(UNetModel):
+ """
+ A UNetModel that performs super-resolution.
+
+ Expects an extra kwarg `low_res` to condition on a low-resolution image.
+ """
+
+ def __init__(self, image_size, in_channels, *args, **kwargs):
+ super().__init__(image_size, in_channels * 2, *args, **kwargs)
+
+ def forward(self, x, timesteps, low_res=None, **kwargs):
+ _, _, new_height, new_width = x.shape
+ upsampled = F.interpolate(low_res, (new_height, new_width), mode="bilinear")
+ x = th.cat([x, upsampled], dim=1)
+ return super().forward(x, timesteps, **kwargs)
+
+
+class EncoderUNetModel(nn.Module):
+ """
+ The half UNet model with attention and timestep embedding.
+
+ For usage, see UNet.
+ """
+
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ out_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ use_checkpoint=False,
+ use_fp16=False,
+ num_heads=1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ pool="adaptive",
+ ):
+ super().__init__()
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ self.num_res_blocks = num_res_blocks
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.use_checkpoint = use_checkpoint
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ ch = int(channel_mult[0] * model_channels)
+ self.input_blocks = nn.ModuleList(
+ [TimestepEmbedSequential(conv_nd(dims, in_channels, ch, 3, padding=1))]
+ )
+ self._feature_size = ch
+ input_block_chans = [ch]
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for _ in range(num_res_blocks):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=int(mult * model_channels),
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = int(mult * model_channels)
+ if ds in attention_resolutions:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=num_head_channels,
+ use_new_attention_order=use_new_attention_order,
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+ self.pool = pool
+ if pool == "adaptive":
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ nn.AdaptiveAvgPool2d((1, 1)),
+ zero_module(conv_nd(dims, ch, out_channels, 1)),
+ nn.Flatten(),
+ )
+ elif pool == "attention":
+ assert num_head_channels != -1
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ AttentionPool2d(
+ (image_size // ds), ch, num_head_channels, out_channels
+ ),
+ )
+ elif pool == "spatial":
+ self.out = nn.Sequential(
+ nn.Linear(self._feature_size, 2048),
+ nn.ReLU(),
+ nn.Linear(2048, self.out_channels),
+ )
+ elif pool == "spatial_v2":
+ self.out = nn.Sequential(
+ nn.Linear(self._feature_size, 2048),
+ normalization(2048),
+ nn.SiLU(),
+ nn.Linear(2048, self.out_channels),
+ )
+ else:
+ raise NotImplementedError(f"Unexpected {pool} pooling")
+
+ def convert_to_fp16(self):
+ """
+ Convert the torso of the model to float16.
+ """
+ self.input_blocks.apply(convert_module_to_f16)
+ self.middle_block.apply(convert_module_to_f16)
+
+ def convert_to_fp32(self):
+ """
+ Convert the torso of the model to float32.
+ """
+ self.input_blocks.apply(convert_module_to_f32)
+ self.middle_block.apply(convert_module_to_f32)
+
+ def forward(self, x, timesteps):
+ """
+ Apply the model to an input batch.
+
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :return: an [N x K] Tensor of outputs.
+ """
+ emb = self.time_embed(timestep_embedding(timesteps, self.model_channels))
+
+ results = []
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb)
+ if self.pool.startswith("spatial"):
+ results.append(h.type(x.dtype).mean(dim=(2, 3)))
+ h = self.middle_block(h, emb)
+ if self.pool.startswith("spatial"):
+ results.append(h.type(x.dtype).mean(dim=(2, 3)))
+ h = th.cat(results, axis=-1)
+ return self.out(h)
+ else:
+ h = h.type(x.dtype)
+ return self.out(h)
diff --git a/ldm/__init__.py b/ldm/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/__pycache__/__init__.cpython-310.pyc b/ldm/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9d75f487ee6385affc95473b04622988179ec302
Binary files /dev/null and b/ldm/__pycache__/__init__.cpython-310.pyc differ
diff --git a/ldm/__pycache__/__init__.cpython-39.pyc b/ldm/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..adbb36a78e6489e658825738d3dccdf49d188468
Binary files /dev/null and b/ldm/__pycache__/__init__.cpython-39.pyc differ
diff --git a/ldm/__pycache__/util.cpython-310.pyc b/ldm/__pycache__/util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e20edbcc7eacb222314b312c6b1a6aefd9788d2f
Binary files /dev/null and b/ldm/__pycache__/util.cpython-310.pyc differ
diff --git a/ldm/__pycache__/util.cpython-39.pyc b/ldm/__pycache__/util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2dc32077f1af0b840fe2b4646f9f01ad9a1cd7af
Binary files /dev/null and b/ldm/__pycache__/util.cpython-39.pyc differ
diff --git a/ldm/data/__init__.py b/ldm/data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/data/util.py b/ldm/data/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b60ceb2349e3bd7900ff325740e2022d2903b1c
--- /dev/null
+++ b/ldm/data/util.py
@@ -0,0 +1,24 @@
+import torch
+
+from ldm.modules.midas.api import load_midas_transform
+
+
+class AddMiDaS(object):
+ def __init__(self, model_type):
+ super().__init__()
+ self.transform = load_midas_transform(model_type)
+
+ def pt2np(self, x):
+ x = ((x + 1.0) * .5).detach().cpu().numpy()
+ return x
+
+ def np2pt(self, x):
+ x = torch.from_numpy(x) * 2 - 1.
+ return x
+
+ def __call__(self, sample):
+ # sample['jpg'] is tensor hwc in [-1, 1] at this point
+ x = self.pt2np(sample['jpg'])
+ x = self.transform({"image": x})["image"]
+ sample['midas_in'] = x
+ return sample
\ No newline at end of file
diff --git a/ldm/models/autoencoder.py b/ldm/models/autoencoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..d122549995ce2cd64092c81a58419ed4a15a02fd
--- /dev/null
+++ b/ldm/models/autoencoder.py
@@ -0,0 +1,219 @@
+import torch
+import pytorch_lightning as pl
+import torch.nn.functional as F
+from contextlib import contextmanager
+
+from ldm.modules.diffusionmodules.model import Encoder, Decoder
+from ldm.modules.distributions.distributions import DiagonalGaussianDistribution
+
+from ldm.util import instantiate_from_config
+from ldm.modules.ema import LitEma
+
+
+class AutoencoderKL(pl.LightningModule):
+ def __init__(self,
+ ddconfig,
+ lossconfig,
+ embed_dim,
+ ckpt_path=None,
+ ignore_keys=[],
+ image_key="image",
+ colorize_nlabels=None,
+ monitor=None,
+ ema_decay=None,
+ learn_logvar=False
+ ):
+ super().__init__()
+ self.learn_logvar = learn_logvar
+ self.image_key = image_key
+ self.encoder = Encoder(**ddconfig)
+ self.decoder = Decoder(**ddconfig)
+ self.loss = instantiate_from_config(lossconfig)
+ assert ddconfig["double_z"]
+ self.quant_conv = torch.nn.Conv2d(2*ddconfig["z_channels"], 2*embed_dim, 1)
+ self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1)
+ self.embed_dim = embed_dim
+ if colorize_nlabels is not None:
+ assert type(colorize_nlabels)==int
+ self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1))
+ if monitor is not None:
+ self.monitor = monitor
+
+ self.use_ema = ema_decay is not None
+ if self.use_ema:
+ self.ema_decay = ema_decay
+ assert 0. < ema_decay < 1.
+ self.model_ema = LitEma(self, decay=ema_decay)
+ print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
+
+ if ckpt_path is not None:
+ self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys)
+
+ def init_from_ckpt(self, path, ignore_keys=list()):
+ sd = torch.load(path, map_location="cpu")["state_dict"]
+ keys = list(sd.keys())
+ for k in keys:
+ for ik in ignore_keys:
+ if k.startswith(ik):
+ print("Deleting key {} from state_dict.".format(k))
+ del sd[k]
+ self.load_state_dict(sd, strict=False)
+ print(f"Restored from {path}")
+
+ @contextmanager
+ def ema_scope(self, context=None):
+ if self.use_ema:
+ self.model_ema.store(self.parameters())
+ self.model_ema.copy_to(self)
+ if context is not None:
+ print(f"{context}: Switched to EMA weights")
+ try:
+ yield None
+ finally:
+ if self.use_ema:
+ self.model_ema.restore(self.parameters())
+ if context is not None:
+ print(f"{context}: Restored training weights")
+
+ def on_train_batch_end(self, *args, **kwargs):
+ if self.use_ema:
+ self.model_ema(self)
+
+ def encode(self, x):
+ h = self.encoder(x)
+ moments = self.quant_conv(h)
+ posterior = DiagonalGaussianDistribution(moments)
+ return posterior
+
+ def decode(self, z):
+ z = self.post_quant_conv(z)
+ dec = self.decoder(z)
+ return dec
+
+ def forward(self, input, sample_posterior=True):
+ posterior = self.encode(input)
+ if sample_posterior:
+ z = posterior.sample()
+ else:
+ z = posterior.mode()
+ dec = self.decode(z)
+ return dec, posterior
+
+ def get_input(self, batch, k):
+ x = batch[k]
+ if len(x.shape) == 3:
+ x = x[..., None]
+ x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float()
+ return x
+
+ def training_step(self, batch, batch_idx, optimizer_idx):
+ inputs = self.get_input(batch, self.image_key)
+ reconstructions, posterior = self(inputs)
+
+ if optimizer_idx == 0:
+ # train encoder+decoder+logvar
+ aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step,
+ last_layer=self.get_last_layer(), split="train")
+ self.log("aeloss", aeloss, prog_bar=True, logger=True, on_step=True, on_epoch=True)
+ self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=False)
+ return aeloss
+
+ if optimizer_idx == 1:
+ # train the discriminator
+ discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step,
+ last_layer=self.get_last_layer(), split="train")
+
+ self.log("discloss", discloss, prog_bar=True, logger=True, on_step=True, on_epoch=True)
+ self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False)
+ return discloss
+
+ def validation_step(self, batch, batch_idx):
+ log_dict = self._validation_step(batch, batch_idx)
+ with self.ema_scope():
+ log_dict_ema = self._validation_step(batch, batch_idx, postfix="_ema")
+ return log_dict
+
+ def _validation_step(self, batch, batch_idx, postfix=""):
+ inputs = self.get_input(batch, self.image_key)
+ reconstructions, posterior = self(inputs)
+ aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, 0, self.global_step,
+ last_layer=self.get_last_layer(), split="val"+postfix)
+
+ discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, 1, self.global_step,
+ last_layer=self.get_last_layer(), split="val"+postfix)
+
+ self.log(f"val{postfix}/rec_loss", log_dict_ae[f"val{postfix}/rec_loss"])
+ self.log_dict(log_dict_ae)
+ self.log_dict(log_dict_disc)
+ return self.log_dict
+
+ def configure_optimizers(self):
+ lr = self.learning_rate
+ ae_params_list = list(self.encoder.parameters()) + list(self.decoder.parameters()) + list(
+ self.quant_conv.parameters()) + list(self.post_quant_conv.parameters())
+ if self.learn_logvar:
+ print(f"{self.__class__.__name__}: Learning logvar")
+ ae_params_list.append(self.loss.logvar)
+ opt_ae = torch.optim.Adam(ae_params_list,
+ lr=lr, betas=(0.5, 0.9))
+ opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(),
+ lr=lr, betas=(0.5, 0.9))
+ return [opt_ae, opt_disc], []
+
+ def get_last_layer(self):
+ return self.decoder.conv_out.weight
+
+ @torch.no_grad()
+ def log_images(self, batch, only_inputs=False, log_ema=False, **kwargs):
+ log = dict()
+ x = self.get_input(batch, self.image_key)
+ x = x.to(self.device)
+ if not only_inputs:
+ xrec, posterior = self(x)
+ if x.shape[1] > 3:
+ # colorize with random projection
+ assert xrec.shape[1] > 3
+ x = self.to_rgb(x)
+ xrec = self.to_rgb(xrec)
+ log["samples"] = self.decode(torch.randn_like(posterior.sample()))
+ log["reconstructions"] = xrec
+ if log_ema or self.use_ema:
+ with self.ema_scope():
+ xrec_ema, posterior_ema = self(x)
+ if x.shape[1] > 3:
+ # colorize with random projection
+ assert xrec_ema.shape[1] > 3
+ xrec_ema = self.to_rgb(xrec_ema)
+ log["samples_ema"] = self.decode(torch.randn_like(posterior_ema.sample()))
+ log["reconstructions_ema"] = xrec_ema
+ log["inputs"] = x
+ return log
+
+ def to_rgb(self, x):
+ assert self.image_key == "segmentation"
+ if not hasattr(self, "colorize"):
+ self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x))
+ x = F.conv2d(x, weight=self.colorize)
+ x = 2.*(x-x.min())/(x.max()-x.min()) - 1.
+ return x
+
+
+class IdentityFirstStage(torch.nn.Module):
+ def __init__(self, *args, vq_interface=False, **kwargs):
+ self.vq_interface = vq_interface
+ super().__init__()
+
+ def encode(self, x, *args, **kwargs):
+ return x
+
+ def decode(self, x, *args, **kwargs):
+ return x
+
+ def quantize(self, x, *args, **kwargs):
+ if self.vq_interface:
+ return x, None, [None, None, None]
+ return x
+
+ def forward(self, x, *args, **kwargs):
+ return x
+
diff --git a/ldm/models/diffusion/__init__.py b/ldm/models/diffusion/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/models/diffusion/ddim.py b/ldm/models/diffusion/ddim.py
new file mode 100644
index 0000000000000000000000000000000000000000..27ead0ea914c64c747b64e690662899fb3801144
--- /dev/null
+++ b/ldm/models/diffusion/ddim.py
@@ -0,0 +1,336 @@
+"""SAMPLING ONLY."""
+
+import torch
+import numpy as np
+from tqdm import tqdm
+
+from ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like, extract_into_tensor
+
+
+class DDIMSampler(object):
+ def __init__(self, model, schedule="linear", **kwargs):
+ super().__init__()
+ self.model = model
+ self.ddpm_num_timesteps = model.num_timesteps
+ self.schedule = schedule
+
+ def register_buffer(self, name, attr):
+ if type(attr) == torch.Tensor:
+ if attr.device != torch.device("cuda"):
+ attr = attr.to(torch.device("cuda"))
+ setattr(self, name, attr)
+
+ def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True):
+ self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,
+ num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)
+ alphas_cumprod = self.model.alphas_cumprod
+ assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'
+ to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)
+
+ self.register_buffer('betas', to_torch(self.model.betas))
+ self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
+ self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())))
+ self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)))
+
+ # ddim sampling parameters
+ ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),
+ ddim_timesteps=self.ddim_timesteps,
+ eta=ddim_eta,verbose=verbose)
+ self.register_buffer('ddim_sigmas', ddim_sigmas)
+ self.register_buffer('ddim_alphas', ddim_alphas)
+ self.register_buffer('ddim_alphas_prev', ddim_alphas_prev)
+ self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas))
+ sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(
+ (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (
+ 1 - self.alphas_cumprod / self.alphas_cumprod_prev))
+ self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps)
+
+ @torch.no_grad()
+ def sample(self,
+ S,
+ batch_size,
+ shape,
+ conditioning=None,
+ callback=None,
+ normals_sequence=None,
+ img_callback=None,
+ quantize_x0=False,
+ eta=0.,
+ mask=None,
+ x0=None,
+ temperature=1.,
+ noise_dropout=0.,
+ score_corrector=None,
+ corrector_kwargs=None,
+ verbose=True,
+ x_T=None,
+ log_every_t=100,
+ unconditional_guidance_scale=1.,
+ unconditional_conditioning=None, # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...
+ dynamic_threshold=None,
+ ucg_schedule=None,
+ **kwargs
+ ):
+ if conditioning is not None:
+ if isinstance(conditioning, dict):
+ ctmp = conditioning[list(conditioning.keys())[0]]
+ while isinstance(ctmp, list): ctmp = ctmp[0]
+ cbs = ctmp.shape[0]
+ if cbs != batch_size:
+ print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
+
+ elif isinstance(conditioning, list):
+ for ctmp in conditioning:
+ if ctmp.shape[0] != batch_size:
+ print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
+
+ else:
+ if conditioning.shape[0] != batch_size:
+ print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}")
+
+ self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)
+ # sampling
+ C, H, W = shape
+ size = (batch_size, C, H, W)
+ print(f'Data shape for DDIM sampling is {size}, eta {eta}')
+
+ samples, intermediates = self.ddim_sampling(conditioning, size,
+ callback=callback,
+ img_callback=img_callback,
+ quantize_denoised=quantize_x0,
+ mask=mask, x0=x0,
+ ddim_use_original_steps=False,
+ noise_dropout=noise_dropout,
+ temperature=temperature,
+ score_corrector=score_corrector,
+ corrector_kwargs=corrector_kwargs,
+ x_T=x_T,
+ log_every_t=log_every_t,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ dynamic_threshold=dynamic_threshold,
+ ucg_schedule=ucg_schedule
+ )
+ return samples, intermediates
+
+ @torch.no_grad()
+ def ddim_sampling(self, cond, shape,
+ x_T=None, ddim_use_original_steps=False,
+ callback=None, timesteps=None, quantize_denoised=False,
+ mask=None, x0=None, img_callback=None, log_every_t=100,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
+ unconditional_guidance_scale=1., unconditional_conditioning=None, dynamic_threshold=None,
+ ucg_schedule=None):
+ device = self.model.betas.device
+ b = shape[0]
+ if x_T is None:
+ img = torch.randn(shape, device=device)
+ else:
+ img = x_T
+
+ if timesteps is None:
+ timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps
+ elif timesteps is not None and not ddim_use_original_steps:
+ subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1
+ timesteps = self.ddim_timesteps[:subset_end]
+
+ intermediates = {'x_inter': [img], 'pred_x0': [img]}
+ time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps)
+ total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]
+ print(f"Running DDIM Sampling with {total_steps} timesteps")
+
+ iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps)
+
+ for i, step in enumerate(iterator):
+ index = total_steps - i - 1
+ ts = torch.full((b,), step, device=device, dtype=torch.long)
+
+ if mask is not None:
+ assert x0 is not None
+ img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass?
+ img = img_orig * mask + (1. - mask) * img
+
+ if ucg_schedule is not None:
+ assert len(ucg_schedule) == len(time_range)
+ unconditional_guidance_scale = ucg_schedule[i]
+
+ outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,
+ quantize_denoised=quantize_denoised, temperature=temperature,
+ noise_dropout=noise_dropout, score_corrector=score_corrector,
+ corrector_kwargs=corrector_kwargs,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ dynamic_threshold=dynamic_threshold)
+ img, pred_x0 = outs
+ if callback: callback(i)
+ if img_callback: img_callback(pred_x0, i)
+
+ if index % log_every_t == 0 or index == total_steps - 1:
+ intermediates['x_inter'].append(img)
+ intermediates['pred_x0'].append(pred_x0)
+
+ return img, intermediates
+
+ @torch.no_grad()
+ def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
+ unconditional_guidance_scale=1., unconditional_conditioning=None,
+ dynamic_threshold=None):
+ b, *_, device = *x.shape, x.device
+
+ if unconditional_conditioning is None or unconditional_guidance_scale == 1.:
+ model_output = self.model.apply_model(x, t, c)
+ else:
+ x_in = torch.cat([x] * 2)
+ t_in = torch.cat([t] * 2)
+ if isinstance(c, dict):
+ assert isinstance(unconditional_conditioning, dict)
+ c_in = dict()
+ for k in c:
+ if isinstance(c[k], list):
+ c_in[k] = [torch.cat([
+ unconditional_conditioning[k][i],
+ c[k][i]]) for i in range(len(c[k]))]
+ else:
+ c_in[k] = torch.cat([
+ unconditional_conditioning[k],
+ c[k]])
+ elif isinstance(c, list):
+ c_in = list()
+ assert isinstance(unconditional_conditioning, list)
+ for i in range(len(c)):
+ c_in.append(torch.cat([unconditional_conditioning[i], c[i]]))
+ else:
+ c_in = torch.cat([unconditional_conditioning, c])
+ model_uncond, model_t = self.model.apply_model(x_in, t_in, c_in).chunk(2)
+ model_output = model_uncond + unconditional_guidance_scale * (model_t - model_uncond)
+
+ if self.model.parameterization == "v":
+ e_t = self.model.predict_eps_from_z_and_v(x, t, model_output)
+ else:
+ e_t = model_output
+
+ if score_corrector is not None:
+ assert self.model.parameterization == "eps", 'not implemented'
+ e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)
+
+ alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas
+ alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev
+ sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas
+ sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas
+ # select parameters corresponding to the currently considered timestep
+ a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)
+ a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)
+ sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)
+ sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)
+
+ # current prediction for x_0
+ if self.model.parameterization != "v":
+ pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()
+ else:
+ pred_x0 = self.model.predict_start_from_z_and_v(x, t, model_output)
+
+ if quantize_denoised:
+ pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)
+
+ if dynamic_threshold is not None:
+ raise NotImplementedError()
+
+ # direction pointing to x_t
+ dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t
+ noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature
+ if noise_dropout > 0.:
+ noise = torch.nn.functional.dropout(noise, p=noise_dropout)
+ x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise
+ return x_prev, pred_x0
+
+ @torch.no_grad()
+ def encode(self, x0, c, t_enc, use_original_steps=False, return_intermediates=None,
+ unconditional_guidance_scale=1.0, unconditional_conditioning=None, callback=None):
+ num_reference_steps = self.ddpm_num_timesteps if use_original_steps else self.ddim_timesteps.shape[0]
+
+ assert t_enc <= num_reference_steps
+ num_steps = t_enc
+
+ if use_original_steps:
+ alphas_next = self.alphas_cumprod[:num_steps]
+ alphas = self.alphas_cumprod_prev[:num_steps]
+ else:
+ alphas_next = self.ddim_alphas[:num_steps]
+ alphas = torch.tensor(self.ddim_alphas_prev[:num_steps])
+
+ x_next = x0
+ intermediates = []
+ inter_steps = []
+ for i in tqdm(range(num_steps), desc='Encoding Image'):
+ t = torch.full((x0.shape[0],), i, device=self.model.device, dtype=torch.long)
+ if unconditional_guidance_scale == 1.:
+ noise_pred = self.model.apply_model(x_next, t, c)
+ else:
+ assert unconditional_conditioning is not None
+ e_t_uncond, noise_pred = torch.chunk(
+ self.model.apply_model(torch.cat((x_next, x_next)), torch.cat((t, t)),
+ torch.cat((unconditional_conditioning, c))), 2)
+ noise_pred = e_t_uncond + unconditional_guidance_scale * (noise_pred - e_t_uncond)
+
+ xt_weighted = (alphas_next[i] / alphas[i]).sqrt() * x_next
+ weighted_noise_pred = alphas_next[i].sqrt() * (
+ (1 / alphas_next[i] - 1).sqrt() - (1 / alphas[i] - 1).sqrt()) * noise_pred
+ x_next = xt_weighted + weighted_noise_pred
+ if return_intermediates and i % (
+ num_steps // return_intermediates) == 0 and i < num_steps - 1:
+ intermediates.append(x_next)
+ inter_steps.append(i)
+ elif return_intermediates and i >= num_steps - 2:
+ intermediates.append(x_next)
+ inter_steps.append(i)
+ if callback: callback(i)
+
+ out = {'x_encoded': x_next, 'intermediate_steps': inter_steps}
+ if return_intermediates:
+ out.update({'intermediates': intermediates})
+ return x_next, out
+
+ @torch.no_grad()
+ def stochastic_encode(self, x0, t, use_original_steps=False, noise=None):
+ # fast, but does not allow for exact reconstruction
+ # t serves as an index to gather the correct alphas
+ if use_original_steps:
+ sqrt_alphas_cumprod = self.sqrt_alphas_cumprod
+ sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod
+ else:
+ sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas)
+ sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas
+
+ if noise is None:
+ noise = torch.randn_like(x0)
+ return (extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 +
+ extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise)
+
+ @torch.no_grad()
+ def decode(self, x_latent, cond, t_start, unconditional_guidance_scale=1.0, unconditional_conditioning=None,
+ use_original_steps=False, callback=None):
+
+ timesteps = np.arange(self.ddpm_num_timesteps) if use_original_steps else self.ddim_timesteps
+ timesteps = timesteps[:t_start]
+
+ time_range = np.flip(timesteps)
+ total_steps = timesteps.shape[0]
+ print(f"Running DDIM Sampling with {total_steps} timesteps")
+
+ iterator = tqdm(time_range, desc='Decoding image', total=total_steps)
+ x_dec = x_latent
+ for i, step in enumerate(iterator):
+ index = total_steps - i - 1
+ ts = torch.full((x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long)
+ x_dec, _ = self.p_sample_ddim(x_dec, cond, ts, index=index, use_original_steps=use_original_steps,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning)
+ if callback: callback(i)
+ return x_dec
\ No newline at end of file
diff --git a/ldm/models/diffusion/ddpm.py b/ldm/models/diffusion/ddpm.py
new file mode 100644
index 0000000000000000000000000000000000000000..f71a44af48c8cba8e97849b7e6813b3e6f9fe83c
--- /dev/null
+++ b/ldm/models/diffusion/ddpm.py
@@ -0,0 +1,1797 @@
+"""
+wild mixture of
+https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py
+https://github.com/openai/improved-diffusion/blob/e94489283bb876ac1477d5dd7709bbbd2d9902ce/improved_diffusion/gaussian_diffusion.py
+https://github.com/CompVis/taming-transformers
+-- merci
+"""
+
+import torch
+import torch.nn as nn
+import numpy as np
+import pytorch_lightning as pl
+from torch.optim.lr_scheduler import LambdaLR
+from einops import rearrange, repeat
+from contextlib import contextmanager, nullcontext
+from functools import partial
+import itertools
+from tqdm import tqdm
+from torchvision.utils import make_grid
+from pytorch_lightning.utilities.distributed import rank_zero_only
+from omegaconf import ListConfig
+
+from ldm.util import log_txt_as_img, exists, default, ismap, isimage, mean_flat, count_params, instantiate_from_config
+from ldm.modules.ema import LitEma
+from ldm.modules.distributions.distributions import normal_kl, DiagonalGaussianDistribution
+from ldm.models.autoencoder import IdentityFirstStage, AutoencoderKL
+from ldm.modules.diffusionmodules.util import make_beta_schedule, extract_into_tensor, noise_like
+from ldm.models.diffusion.ddim import DDIMSampler
+
+
+__conditioning_keys__ = {'concat': 'c_concat',
+ 'crossattn': 'c_crossattn',
+ 'adm': 'y'}
+
+
+def disabled_train(self, mode=True):
+ """Overwrite model.train with this function to make sure train/eval mode
+ does not change anymore."""
+ return self
+
+
+def uniform_on_device(r1, r2, shape, device):
+ return (r1 - r2) * torch.rand(*shape, device=device) + r2
+
+
+class DDPM(pl.LightningModule):
+ # classic DDPM with Gaussian diffusion, in image space
+ def __init__(self,
+ unet_config,
+ timesteps=1000,
+ beta_schedule="linear",
+ loss_type="l2",
+ ckpt_path=None,
+ ignore_keys=[],
+ load_only_unet=False,
+ monitor="val/loss",
+ use_ema=True,
+ first_stage_key="image",
+ image_size=256,
+ channels=3,
+ log_every_t=100,
+ clip_denoised=True,
+ linear_start=1e-4,
+ linear_end=2e-2,
+ cosine_s=8e-3,
+ given_betas=None,
+ original_elbo_weight=0.,
+ v_posterior=0., # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta
+ l_simple_weight=1.,
+ conditioning_key=None,
+ parameterization="eps", # all assuming fixed variance schedules
+ scheduler_config=None,
+ use_positional_encodings=False,
+ learn_logvar=False,
+ logvar_init=0.,
+ make_it_fit=False,
+ ucg_training=None,
+ reset_ema=False,
+ reset_num_ema_updates=False,
+ ):
+ super().__init__()
+ assert parameterization in ["eps", "x0", "v"], 'currently only supporting "eps" and "x0" and "v"'
+ self.parameterization = parameterization
+ print(f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode")
+ self.cond_stage_model = None
+ self.clip_denoised = clip_denoised
+ self.log_every_t = log_every_t
+ self.first_stage_key = first_stage_key
+ self.image_size = image_size # try conv?
+ self.channels = channels
+ self.use_positional_encodings = use_positional_encodings
+ self.model = DiffusionWrapper(unet_config, conditioning_key)
+ count_params(self.model, verbose=True)
+ self.use_ema = use_ema
+ if self.use_ema:
+ self.model_ema = LitEma(self.model)
+ print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
+
+ self.use_scheduler = scheduler_config is not None
+ if self.use_scheduler:
+ self.scheduler_config = scheduler_config
+
+ self.v_posterior = v_posterior
+ self.original_elbo_weight = original_elbo_weight
+ self.l_simple_weight = l_simple_weight
+
+ if monitor is not None:
+ self.monitor = monitor
+ self.make_it_fit = make_it_fit
+ if reset_ema: assert exists(ckpt_path)
+ if ckpt_path is not None:
+ self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet)
+ if reset_ema:
+ assert self.use_ema
+ print(f"Resetting ema to pure model weights. This is useful when restoring from an ema-only checkpoint.")
+ self.model_ema = LitEma(self.model)
+ if reset_num_ema_updates:
+ print(" +++++++++++ WARNING: RESETTING NUM_EMA UPDATES TO ZERO +++++++++++ ")
+ assert self.use_ema
+ self.model_ema.reset_num_updates()
+
+ self.register_schedule(given_betas=given_betas, beta_schedule=beta_schedule, timesteps=timesteps,
+ linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s)
+
+ self.loss_type = loss_type
+
+ self.learn_logvar = learn_logvar
+ logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,))
+ if self.learn_logvar:
+ self.logvar = nn.Parameter(self.logvar, requires_grad=True)
+ else:
+ self.register_buffer('logvar', logvar)
+
+ self.ucg_training = ucg_training or dict()
+ if self.ucg_training:
+ self.ucg_prng = np.random.RandomState()
+
+ def register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000,
+ linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
+ if exists(given_betas):
+ betas = given_betas
+ else:
+ betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end,
+ cosine_s=cosine_s)
+ alphas = 1. - betas
+ alphas_cumprod = np.cumprod(alphas, axis=0)
+ alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1])
+
+ timesteps, = betas.shape
+ self.num_timesteps = int(timesteps)
+ self.linear_start = linear_start
+ self.linear_end = linear_end
+ assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep'
+
+ to_torch = partial(torch.tensor, dtype=torch.float32)
+
+ self.register_buffer('betas', to_torch(betas))
+ self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
+ self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
+ self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
+ self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod)))
+ self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod)))
+ self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1)))
+
+ # calculations for posterior q(x_{t-1} | x_t, x_0)
+ posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / (
+ 1. - alphas_cumprod) + self.v_posterior * betas
+ # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t)
+ self.register_buffer('posterior_variance', to_torch(posterior_variance))
+ # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain
+ self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20))))
+ self.register_buffer('posterior_mean_coef1', to_torch(
+ betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod)))
+ self.register_buffer('posterior_mean_coef2', to_torch(
+ (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod)))
+
+ if self.parameterization == "eps":
+ lvlb_weights = self.betas ** 2 / (
+ 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod))
+ elif self.parameterization == "x0":
+ lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod))
+ elif self.parameterization == "v":
+ lvlb_weights = torch.ones_like(self.betas ** 2 / (
+ 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod)))
+ else:
+ raise NotImplementedError("mu not supported")
+ lvlb_weights[0] = lvlb_weights[1]
+ self.register_buffer('lvlb_weights', lvlb_weights, persistent=False)
+ assert not torch.isnan(self.lvlb_weights).all()
+
+ @contextmanager
+ def ema_scope(self, context=None):
+ if self.use_ema:
+ self.model_ema.store(self.model.parameters())
+ self.model_ema.copy_to(self.model)
+ if context is not None:
+ print(f"{context}: Switched to EMA weights")
+ try:
+ yield None
+ finally:
+ if self.use_ema:
+ self.model_ema.restore(self.model.parameters())
+ if context is not None:
+ print(f"{context}: Restored training weights")
+
+ @torch.no_grad()
+ def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
+ sd = torch.load(path, map_location="cpu")
+ if "state_dict" in list(sd.keys()):
+ sd = sd["state_dict"]
+ keys = list(sd.keys())
+ for k in keys:
+ for ik in ignore_keys:
+ if k.startswith(ik):
+ print("Deleting key {} from state_dict.".format(k))
+ del sd[k]
+ if self.make_it_fit:
+ n_params = len([name for name, _ in
+ itertools.chain(self.named_parameters(),
+ self.named_buffers())])
+ for name, param in tqdm(
+ itertools.chain(self.named_parameters(),
+ self.named_buffers()),
+ desc="Fitting old weights to new weights",
+ total=n_params
+ ):
+ if not name in sd:
+ continue
+ old_shape = sd[name].shape
+ new_shape = param.shape
+ assert len(old_shape) == len(new_shape)
+ if len(new_shape) > 2:
+ # we only modify first two axes
+ assert new_shape[2:] == old_shape[2:]
+ # assumes first axis corresponds to output dim
+ if not new_shape == old_shape:
+ new_param = param.clone()
+ old_param = sd[name]
+ if len(new_shape) == 1:
+ for i in range(new_param.shape[0]):
+ new_param[i] = old_param[i % old_shape[0]]
+ elif len(new_shape) >= 2:
+ for i in range(new_param.shape[0]):
+ for j in range(new_param.shape[1]):
+ new_param[i, j] = old_param[i % old_shape[0], j % old_shape[1]]
+
+ n_used_old = torch.ones(old_shape[1])
+ for j in range(new_param.shape[1]):
+ n_used_old[j % old_shape[1]] += 1
+ n_used_new = torch.zeros(new_shape[1])
+ for j in range(new_param.shape[1]):
+ n_used_new[j] = n_used_old[j % old_shape[1]]
+
+ n_used_new = n_used_new[None, :]
+ while len(n_used_new.shape) < len(new_shape):
+ n_used_new = n_used_new.unsqueeze(-1)
+ new_param /= n_used_new
+
+ sd[name] = new_param
+
+ missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(
+ sd, strict=False)
+ print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
+ if len(missing) > 0:
+ print(f"Missing Keys:\n {missing}")
+ if len(unexpected) > 0:
+ print(f"\nUnexpected Keys:\n {unexpected}")
+
+ def q_mean_variance(self, x_start, t):
+ """
+ Get the distribution q(x_t | x_0).
+ :param x_start: the [N x C x ...] tensor of noiseless inputs.
+ :param t: the number of diffusion steps (minus 1). Here, 0 means one step.
+ :return: A tuple (mean, variance, log_variance), all of x_start's shape.
+ """
+ mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start)
+ variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape)
+ log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape)
+ return mean, variance, log_variance
+
+ def predict_start_from_noise(self, x_t, t, noise):
+ return (
+ extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t -
+ extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise
+ )
+
+ def predict_start_from_z_and_v(self, x_t, t, v):
+ # self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
+ # self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
+ return (
+ extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * x_t -
+ extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * v
+ )
+
+ def predict_eps_from_z_and_v(self, x_t, t, v):
+ return (
+ extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * v +
+ extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * x_t
+ )
+
+ def q_posterior(self, x_start, x_t, t):
+ posterior_mean = (
+ extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start +
+ extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t
+ )
+ posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape)
+ posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape)
+ return posterior_mean, posterior_variance, posterior_log_variance_clipped
+
+ def p_mean_variance(self, x, t, clip_denoised: bool):
+ model_out = self.model(x, t)
+ if self.parameterization == "eps":
+ x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)
+ elif self.parameterization == "x0":
+ x_recon = model_out
+ if clip_denoised:
+ x_recon.clamp_(-1., 1.)
+
+ model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
+ return model_mean, posterior_variance, posterior_log_variance
+
+ @torch.no_grad()
+ def p_sample(self, x, t, clip_denoised=True, repeat_noise=False):
+ b, *_, device = *x.shape, x.device
+ model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised)
+ noise = noise_like(x.shape, device, repeat_noise)
+ # no noise when t == 0
+ nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))
+ return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise
+
+ @torch.no_grad()
+ def p_sample_loop(self, shape, return_intermediates=False):
+ device = self.betas.device
+ b = shape[0]
+ img = torch.randn(shape, device=device)
+ intermediates = [img]
+ for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps):
+ img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long),
+ clip_denoised=self.clip_denoised)
+ if i % self.log_every_t == 0 or i == self.num_timesteps - 1:
+ intermediates.append(img)
+ if return_intermediates:
+ return img, intermediates
+ return img
+
+ @torch.no_grad()
+ def sample(self, batch_size=16, return_intermediates=False):
+ image_size = self.image_size
+ channels = self.channels
+ return self.p_sample_loop((batch_size, channels, image_size, image_size),
+ return_intermediates=return_intermediates)
+
+ def q_sample(self, x_start, t, noise=None):
+ noise = default(noise, lambda: torch.randn_like(x_start))
+ return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +
+ extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise)
+
+ def get_v(self, x, noise, t):
+ return (
+ extract_into_tensor(self.sqrt_alphas_cumprod, t, x.shape) * noise -
+ extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x.shape) * x
+ )
+
+ def get_loss(self, pred, target, mean=True):
+ if self.loss_type == 'l1':
+ loss = (target - pred).abs()
+ if mean:
+ loss = loss.mean()
+ elif self.loss_type == 'l2':
+ if mean:
+ loss = torch.nn.functional.mse_loss(target, pred)
+ else:
+ loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
+ else:
+ raise NotImplementedError("unknown loss type '{loss_type}'")
+
+ return loss
+
+ def p_losses(self, x_start, t, noise=None):
+ noise = default(noise, lambda: torch.randn_like(x_start))
+ x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
+ model_out = self.model(x_noisy, t)
+
+ loss_dict = {}
+ if self.parameterization == "eps":
+ target = noise
+ elif self.parameterization == "x0":
+ target = x_start
+ elif self.parameterization == "v":
+ target = self.get_v(x_start, noise, t)
+ else:
+ raise NotImplementedError(f"Parameterization {self.parameterization} not yet supported")
+
+ loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3])
+
+ log_prefix = 'train' if self.training else 'val'
+
+ loss_dict.update({f'{log_prefix}/loss_simple': loss.mean()})
+ loss_simple = loss.mean() * self.l_simple_weight
+
+ loss_vlb = (self.lvlb_weights[t] * loss).mean()
+ loss_dict.update({f'{log_prefix}/loss_vlb': loss_vlb})
+
+ loss = loss_simple + self.original_elbo_weight * loss_vlb
+
+ loss_dict.update({f'{log_prefix}/loss': loss})
+
+ return loss, loss_dict
+
+ def forward(self, x, *args, **kwargs):
+ # b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size
+ # assert h == img_size and w == img_size, f'height and width of image must be {img_size}'
+ t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()
+ return self.p_losses(x, t, *args, **kwargs)
+
+ def get_input(self, batch, k):
+ x = batch[k]
+ if len(x.shape) == 3:
+ x = x[..., None]
+ x = rearrange(x, 'b h w c -> b c h w')
+ x = x.to(memory_format=torch.contiguous_format).float()
+ return x
+
+ def shared_step(self, batch):
+ x = self.get_input(batch, self.first_stage_key)
+ loss, loss_dict = self(x)
+ return loss, loss_dict
+
+ def training_step(self, batch, batch_idx):
+ for k in self.ucg_training:
+ p = self.ucg_training[k]["p"]
+ val = self.ucg_training[k]["val"]
+ if val is None:
+ val = ""
+ for i in range(len(batch[k])):
+ if self.ucg_prng.choice(2, p=[1 - p, p]):
+ batch[k][i] = val
+
+ loss, loss_dict = self.shared_step(batch)
+
+ self.log_dict(loss_dict, prog_bar=True,
+ logger=True, on_step=True, on_epoch=True)
+
+ self.log("global_step", self.global_step,
+ prog_bar=True, logger=True, on_step=True, on_epoch=False)
+
+ if self.use_scheduler:
+ lr = self.optimizers().param_groups[0]['lr']
+ self.log('lr_abs', lr, prog_bar=True, logger=True, on_step=True, on_epoch=False)
+
+ return loss
+
+ @torch.no_grad()
+ def validation_step(self, batch, batch_idx):
+ _, loss_dict_no_ema = self.shared_step(batch)
+ with self.ema_scope():
+ _, loss_dict_ema = self.shared_step(batch)
+ loss_dict_ema = {key + '_ema': loss_dict_ema[key] for key in loss_dict_ema}
+ self.log_dict(loss_dict_no_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)
+ self.log_dict(loss_dict_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)
+
+ def on_train_batch_end(self, *args, **kwargs):
+ if self.use_ema:
+ self.model_ema(self.model)
+
+ def _get_rows_from_list(self, samples):
+ n_imgs_per_row = len(samples)
+ denoise_grid = rearrange(samples, 'n b c h w -> b n c h w')
+ denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')
+ denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)
+ return denoise_grid
+
+ @torch.no_grad()
+ def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs):
+ log = dict()
+ x = self.get_input(batch, self.first_stage_key)
+ N = min(x.shape[0], N)
+ n_row = min(x.shape[0], n_row)
+ x = x.to(self.device)[:N]
+ log["inputs"] = x
+
+ # get diffusion row
+ diffusion_row = list()
+ x_start = x[:n_row]
+
+ for t in range(self.num_timesteps):
+ if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
+ t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
+ t = t.to(self.device).long()
+ noise = torch.randn_like(x_start)
+ x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
+ diffusion_row.append(x_noisy)
+
+ log["diffusion_row"] = self._get_rows_from_list(diffusion_row)
+
+ if sample:
+ # get denoise row
+ with self.ema_scope("Plotting"):
+ samples, denoise_row = self.sample(batch_size=N, return_intermediates=True)
+
+ log["samples"] = samples
+ log["denoise_row"] = self._get_rows_from_list(denoise_row)
+
+ if return_keys:
+ if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:
+ return log
+ else:
+ return {key: log[key] for key in return_keys}
+ return log
+
+ def configure_optimizers(self):
+ lr = self.learning_rate
+ params = list(self.model.parameters())
+ if self.learn_logvar:
+ params = params + [self.logvar]
+ opt = torch.optim.AdamW(params, lr=lr)
+ return opt
+
+
+class LatentDiffusion(DDPM):
+ """main class"""
+
+ def __init__(self,
+ first_stage_config,
+ cond_stage_config,
+ num_timesteps_cond=None,
+ cond_stage_key="image",
+ cond_stage_trainable=False,
+ concat_mode=True,
+ cond_stage_forward=None,
+ conditioning_key=None,
+ scale_factor=1.0,
+ scale_by_std=False,
+ force_null_conditioning=False,
+ *args, **kwargs):
+ self.force_null_conditioning = force_null_conditioning
+ self.num_timesteps_cond = default(num_timesteps_cond, 1)
+ self.scale_by_std = scale_by_std
+ assert self.num_timesteps_cond <= kwargs['timesteps']
+ # for backwards compatibility after implementation of DiffusionWrapper
+ if conditioning_key is None:
+ conditioning_key = 'concat' if concat_mode else 'crossattn'
+ if cond_stage_config == '__is_unconditional__' and not self.force_null_conditioning:
+ conditioning_key = None
+ ckpt_path = kwargs.pop("ckpt_path", None)
+ reset_ema = kwargs.pop("reset_ema", False)
+ reset_num_ema_updates = kwargs.pop("reset_num_ema_updates", False)
+ ignore_keys = kwargs.pop("ignore_keys", [])
+ super().__init__(conditioning_key=conditioning_key, *args, **kwargs)
+ self.concat_mode = concat_mode
+ self.cond_stage_trainable = cond_stage_trainable
+ self.cond_stage_key = cond_stage_key
+ try:
+ self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1
+ except:
+ self.num_downs = 0
+ if not scale_by_std:
+ self.scale_factor = scale_factor
+ else:
+ self.register_buffer('scale_factor', torch.tensor(scale_factor))
+ self.instantiate_first_stage(first_stage_config)
+ self.instantiate_cond_stage(cond_stage_config)
+ self.cond_stage_forward = cond_stage_forward
+ self.clip_denoised = False
+ self.bbox_tokenizer = None
+
+ self.restarted_from_ckpt = False
+ if ckpt_path is not None:
+ self.init_from_ckpt(ckpt_path, ignore_keys)
+ self.restarted_from_ckpt = True
+ if reset_ema:
+ assert self.use_ema
+ print(
+ f"Resetting ema to pure model weights. This is useful when restoring from an ema-only checkpoint.")
+ self.model_ema = LitEma(self.model)
+ if reset_num_ema_updates:
+ print(" +++++++++++ WARNING: RESETTING NUM_EMA UPDATES TO ZERO +++++++++++ ")
+ assert self.use_ema
+ self.model_ema.reset_num_updates()
+
+ def make_cond_schedule(self, ):
+ self.cond_ids = torch.full(size=(self.num_timesteps,), fill_value=self.num_timesteps - 1, dtype=torch.long)
+ ids = torch.round(torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond)).long()
+ self.cond_ids[:self.num_timesteps_cond] = ids
+
+ @rank_zero_only
+ @torch.no_grad()
+ def on_train_batch_start(self, batch, batch_idx, dataloader_idx):
+ # only for very first batch
+ if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 and batch_idx == 0 and not self.restarted_from_ckpt:
+ assert self.scale_factor == 1., 'rather not use custom rescaling and std-rescaling simultaneously'
+ # set rescale weight to 1./std of encodings
+ print("### USING STD-RESCALING ###")
+ x = super().get_input(batch, self.first_stage_key)
+ x = x.to(self.device)
+ encoder_posterior = self.encode_first_stage(x)
+ z = self.get_first_stage_encoding(encoder_posterior).detach()
+ del self.scale_factor
+ self.register_buffer('scale_factor', 1. / z.flatten().std())
+ print(f"setting self.scale_factor to {self.scale_factor}")
+ print("### USING STD-RESCALING ###")
+
+ def register_schedule(self,
+ given_betas=None, beta_schedule="linear", timesteps=1000,
+ linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
+ super().register_schedule(given_betas, beta_schedule, timesteps, linear_start, linear_end, cosine_s)
+
+ self.shorten_cond_schedule = self.num_timesteps_cond > 1
+ if self.shorten_cond_schedule:
+ self.make_cond_schedule()
+
+ def instantiate_first_stage(self, config):
+ model = instantiate_from_config(config)
+ self.first_stage_model = model.eval()
+ self.first_stage_model.train = disabled_train
+ for param in self.first_stage_model.parameters():
+ param.requires_grad = False
+
+ def instantiate_cond_stage(self, config):
+ if not self.cond_stage_trainable:
+ if config == "__is_first_stage__":
+ print("Using first stage also as cond stage.")
+ self.cond_stage_model = self.first_stage_model
+ elif config == "__is_unconditional__":
+ print(f"Training {self.__class__.__name__} as an unconditional model.")
+ self.cond_stage_model = None
+ # self.be_unconditional = True
+ else:
+ model = instantiate_from_config(config)
+ self.cond_stage_model = model.eval()
+ self.cond_stage_model.train = disabled_train
+ for param in self.cond_stage_model.parameters():
+ param.requires_grad = False
+ else:
+ assert config != '__is_first_stage__'
+ assert config != '__is_unconditional__'
+ model = instantiate_from_config(config)
+ self.cond_stage_model = model
+
+ def _get_denoise_row_from_list(self, samples, desc='', force_no_decoder_quantization=False):
+ denoise_row = []
+ for zd in tqdm(samples, desc=desc):
+ denoise_row.append(self.decode_first_stage(zd.to(self.device),
+ force_not_quantize=force_no_decoder_quantization))
+ n_imgs_per_row = len(denoise_row)
+ denoise_row = torch.stack(denoise_row) # n_log_step, n_row, C, H, W
+ denoise_grid = rearrange(denoise_row, 'n b c h w -> b n c h w')
+ denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')
+ denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)
+ return denoise_grid
+
+ def get_first_stage_encoding(self, encoder_posterior):
+ if isinstance(encoder_posterior, DiagonalGaussianDistribution):
+ z = encoder_posterior.sample()
+ elif isinstance(encoder_posterior, torch.Tensor):
+ z = encoder_posterior
+ else:
+ raise NotImplementedError(f"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented")
+ return self.scale_factor * z
+
+ def get_learned_conditioning(self, c):
+ if self.cond_stage_forward is None:
+ if hasattr(self.cond_stage_model, 'encode') and callable(self.cond_stage_model.encode):
+ c = self.cond_stage_model.encode(c)
+ if isinstance(c, DiagonalGaussianDistribution):
+ c = c.mode()
+ else:
+ c = self.cond_stage_model(c)
+ else:
+ assert hasattr(self.cond_stage_model, self.cond_stage_forward)
+ c = getattr(self.cond_stage_model, self.cond_stage_forward)(c)
+ return c
+
+ def meshgrid(self, h, w):
+ y = torch.arange(0, h).view(h, 1, 1).repeat(1, w, 1)
+ x = torch.arange(0, w).view(1, w, 1).repeat(h, 1, 1)
+
+ arr = torch.cat([y, x], dim=-1)
+ return arr
+
+ def delta_border(self, h, w):
+ """
+ :param h: height
+ :param w: width
+ :return: normalized distance to image border,
+ wtith min distance = 0 at border and max dist = 0.5 at image center
+ """
+ lower_right_corner = torch.tensor([h - 1, w - 1]).view(1, 1, 2)
+ arr = self.meshgrid(h, w) / lower_right_corner
+ dist_left_up = torch.min(arr, dim=-1, keepdims=True)[0]
+ dist_right_down = torch.min(1 - arr, dim=-1, keepdims=True)[0]
+ edge_dist = torch.min(torch.cat([dist_left_up, dist_right_down], dim=-1), dim=-1)[0]
+ return edge_dist
+
+ def get_weighting(self, h, w, Ly, Lx, device):
+ weighting = self.delta_border(h, w)
+ weighting = torch.clip(weighting, self.split_input_params["clip_min_weight"],
+ self.split_input_params["clip_max_weight"], )
+ weighting = weighting.view(1, h * w, 1).repeat(1, 1, Ly * Lx).to(device)
+
+ if self.split_input_params["tie_braker"]:
+ L_weighting = self.delta_border(Ly, Lx)
+ L_weighting = torch.clip(L_weighting,
+ self.split_input_params["clip_min_tie_weight"],
+ self.split_input_params["clip_max_tie_weight"])
+
+ L_weighting = L_weighting.view(1, 1, Ly * Lx).to(device)
+ weighting = weighting * L_weighting
+ return weighting
+
+ def get_fold_unfold(self, x, kernel_size, stride, uf=1, df=1): # todo load once not every time, shorten code
+ """
+ :param x: img of size (bs, c, h, w)
+ :return: n img crops of size (n, bs, c, kernel_size[0], kernel_size[1])
+ """
+ bs, nc, h, w = x.shape
+
+ # number of crops in image
+ Ly = (h - kernel_size[0]) // stride[0] + 1
+ Lx = (w - kernel_size[1]) // stride[1] + 1
+
+ if uf == 1 and df == 1:
+ fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
+ unfold = torch.nn.Unfold(**fold_params)
+
+ fold = torch.nn.Fold(output_size=x.shape[2:], **fold_params)
+
+ weighting = self.get_weighting(kernel_size[0], kernel_size[1], Ly, Lx, x.device).to(x.dtype)
+ normalization = fold(weighting).view(1, 1, h, w) # normalizes the overlap
+ weighting = weighting.view((1, 1, kernel_size[0], kernel_size[1], Ly * Lx))
+
+ elif uf > 1 and df == 1:
+ fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
+ unfold = torch.nn.Unfold(**fold_params)
+
+ fold_params2 = dict(kernel_size=(kernel_size[0] * uf, kernel_size[0] * uf),
+ dilation=1, padding=0,
+ stride=(stride[0] * uf, stride[1] * uf))
+ fold = torch.nn.Fold(output_size=(x.shape[2] * uf, x.shape[3] * uf), **fold_params2)
+
+ weighting = self.get_weighting(kernel_size[0] * uf, kernel_size[1] * uf, Ly, Lx, x.device).to(x.dtype)
+ normalization = fold(weighting).view(1, 1, h * uf, w * uf) # normalizes the overlap
+ weighting = weighting.view((1, 1, kernel_size[0] * uf, kernel_size[1] * uf, Ly * Lx))
+
+ elif df > 1 and uf == 1:
+ fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
+ unfold = torch.nn.Unfold(**fold_params)
+
+ fold_params2 = dict(kernel_size=(kernel_size[0] // df, kernel_size[0] // df),
+ dilation=1, padding=0,
+ stride=(stride[0] // df, stride[1] // df))
+ fold = torch.nn.Fold(output_size=(x.shape[2] // df, x.shape[3] // df), **fold_params2)
+
+ weighting = self.get_weighting(kernel_size[0] // df, kernel_size[1] // df, Ly, Lx, x.device).to(x.dtype)
+ normalization = fold(weighting).view(1, 1, h // df, w // df) # normalizes the overlap
+ weighting = weighting.view((1, 1, kernel_size[0] // df, kernel_size[1] // df, Ly * Lx))
+
+ else:
+ raise NotImplementedError
+
+ return fold, unfold, normalization, weighting
+
+ @torch.no_grad()
+ def get_input(self, batch, k, return_first_stage_outputs=False, force_c_encode=False,
+ cond_key=None, return_original_cond=False, bs=None, return_x=False):
+ x = super().get_input(batch, k)
+ if bs is not None:
+ x = x[:bs]
+ x = x.to(self.device)
+ encoder_posterior = self.encode_first_stage(x)
+ z = self.get_first_stage_encoding(encoder_posterior).detach()
+
+ if self.model.conditioning_key is not None and not self.force_null_conditioning:
+ if cond_key is None:
+ cond_key = self.cond_stage_key
+ if cond_key != self.first_stage_key:
+ if cond_key in ['caption', 'coordinates_bbox', "txt"]:
+ xc = batch[cond_key]
+ elif cond_key in ['class_label', 'cls']:
+ xc = batch
+ else:
+ xc = super().get_input(batch, cond_key).to(self.device)
+ else:
+ xc = x
+ if not self.cond_stage_trainable or force_c_encode:
+ if isinstance(xc, dict) or isinstance(xc, list):
+ c = self.get_learned_conditioning(xc)
+ else:
+ c = self.get_learned_conditioning(xc.to(self.device))
+ else:
+ c = xc
+ if bs is not None:
+ c = c[:bs]
+
+ if self.use_positional_encodings:
+ pos_x, pos_y = self.compute_latent_shifts(batch)
+ ckey = __conditioning_keys__[self.model.conditioning_key]
+ c = {ckey: c, 'pos_x': pos_x, 'pos_y': pos_y}
+
+ else:
+ c = None
+ xc = None
+ if self.use_positional_encodings:
+ pos_x, pos_y = self.compute_latent_shifts(batch)
+ c = {'pos_x': pos_x, 'pos_y': pos_y}
+ out = [z, c]
+ if return_first_stage_outputs:
+ xrec = self.decode_first_stage(z)
+ out.extend([x, xrec])
+ if return_x:
+ out.extend([x])
+ if return_original_cond:
+ out.append(xc)
+ return out
+
+ @torch.no_grad()
+ def decode_first_stage(self, z, predict_cids=False, force_not_quantize=False):
+ if predict_cids:
+ if z.dim() == 4:
+ z = torch.argmax(z.exp(), dim=1).long()
+ z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None)
+ z = rearrange(z, 'b h w c -> b c h w').contiguous()
+
+ z = 1. / self.scale_factor * z
+ return self.first_stage_model.decode(z)
+
+ @torch.no_grad()
+ def encode_first_stage(self, x):
+ return self.first_stage_model.encode(x)
+
+ def shared_step(self, batch, **kwargs):
+ x, c = self.get_input(batch, self.first_stage_key)
+ loss = self(x, c)
+ return loss
+
+ def forward(self, x, c, *args, **kwargs):
+ t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()
+ if self.model.conditioning_key is not None:
+ assert c is not None
+ if self.cond_stage_trainable:
+ c = self.get_learned_conditioning(c)
+ if self.shorten_cond_schedule: # TODO: drop this option
+ tc = self.cond_ids[t].to(self.device)
+ c = self.q_sample(x_start=c, t=tc, noise=torch.randn_like(c.float()))
+ return self.p_losses(x, c, t, *args, **kwargs)
+
+ def apply_model(self, x_noisy, t, cond, return_ids=False):
+ if isinstance(cond, dict):
+ # hybrid case, cond is expected to be a dict
+ pass
+ else:
+ if not isinstance(cond, list):
+ cond = [cond]
+ key = 'c_concat' if self.model.conditioning_key == 'concat' else 'c_crossattn'
+ cond = {key: cond}
+
+ x_recon = self.model(x_noisy, t, **cond)
+
+ if isinstance(x_recon, tuple) and not return_ids:
+ return x_recon[0]
+ else:
+ return x_recon
+
+ def _predict_eps_from_xstart(self, x_t, t, pred_xstart):
+ return (extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - pred_xstart) / \
+ extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape)
+
+ def _prior_bpd(self, x_start):
+ """
+ Get the prior KL term for the variational lower-bound, measured in
+ bits-per-dim.
+ This term can't be optimized, as it only depends on the encoder.
+ :param x_start: the [N x C x ...] tensor of inputs.
+ :return: a batch of [N] KL values (in bits), one per batch element.
+ """
+ batch_size = x_start.shape[0]
+ t = torch.tensor([self.num_timesteps - 1] * batch_size, device=x_start.device)
+ qt_mean, _, qt_log_variance = self.q_mean_variance(x_start, t)
+ kl_prior = normal_kl(mean1=qt_mean, logvar1=qt_log_variance, mean2=0.0, logvar2=0.0)
+ return mean_flat(kl_prior) / np.log(2.0)
+
+ def p_losses(self, x_start, cond, t, noise=None):
+ noise = default(noise, lambda: torch.randn_like(x_start))
+ x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
+ model_output = self.apply_model(x_noisy, t, cond)
+
+ loss_dict = {}
+ prefix = 'train' if self.training else 'val'
+
+ if self.parameterization == "x0":
+ target = x_start
+ elif self.parameterization == "eps":
+ target = noise
+ elif self.parameterization == "v":
+ target = self.get_v(x_start, noise, t)
+ else:
+ raise NotImplementedError()
+
+ loss_simple = self.get_loss(model_output, target, mean=False).mean([1, 2, 3])
+ loss_dict.update({f'{prefix}/loss_simple': loss_simple.mean()})
+
+ logvar_t = self.logvar[t].to(self.device)
+ loss = loss_simple / torch.exp(logvar_t) + logvar_t
+ # loss = loss_simple / torch.exp(self.logvar) + self.logvar
+ if self.learn_logvar:
+ loss_dict.update({f'{prefix}/loss_gamma': loss.mean()})
+ loss_dict.update({'logvar': self.logvar.data.mean()})
+
+ loss = self.l_simple_weight * loss.mean()
+
+ loss_vlb = self.get_loss(model_output, target, mean=False).mean(dim=(1, 2, 3))
+ loss_vlb = (self.lvlb_weights[t] * loss_vlb).mean()
+ loss_dict.update({f'{prefix}/loss_vlb': loss_vlb})
+ loss += (self.original_elbo_weight * loss_vlb)
+ loss_dict.update({f'{prefix}/loss': loss})
+
+ return loss, loss_dict
+
+ def p_mean_variance(self, x, c, t, clip_denoised: bool, return_codebook_ids=False, quantize_denoised=False,
+ return_x0=False, score_corrector=None, corrector_kwargs=None):
+ t_in = t
+ model_out = self.apply_model(x, t_in, c, return_ids=return_codebook_ids)
+
+ if score_corrector is not None:
+ assert self.parameterization == "eps"
+ model_out = score_corrector.modify_score(self, model_out, x, t, c, **corrector_kwargs)
+
+ if return_codebook_ids:
+ model_out, logits = model_out
+
+ if self.parameterization == "eps":
+ x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)
+ elif self.parameterization == "x0":
+ x_recon = model_out
+ else:
+ raise NotImplementedError()
+
+ if clip_denoised:
+ x_recon.clamp_(-1., 1.)
+ if quantize_denoised:
+ x_recon, _, [_, _, indices] = self.first_stage_model.quantize(x_recon)
+ model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
+ if return_codebook_ids:
+ return model_mean, posterior_variance, posterior_log_variance, logits
+ elif return_x0:
+ return model_mean, posterior_variance, posterior_log_variance, x_recon
+ else:
+ return model_mean, posterior_variance, posterior_log_variance
+
+ @torch.no_grad()
+ def p_sample(self, x, c, t, clip_denoised=False, repeat_noise=False,
+ return_codebook_ids=False, quantize_denoised=False, return_x0=False,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None):
+ b, *_, device = *x.shape, x.device
+ outputs = self.p_mean_variance(x=x, c=c, t=t, clip_denoised=clip_denoised,
+ return_codebook_ids=return_codebook_ids,
+ quantize_denoised=quantize_denoised,
+ return_x0=return_x0,
+ score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)
+ if return_codebook_ids:
+ raise DeprecationWarning("Support dropped.")
+ model_mean, _, model_log_variance, logits = outputs
+ elif return_x0:
+ model_mean, _, model_log_variance, x0 = outputs
+ else:
+ model_mean, _, model_log_variance = outputs
+
+ noise = noise_like(x.shape, device, repeat_noise) * temperature
+ if noise_dropout > 0.:
+ noise = torch.nn.functional.dropout(noise, p=noise_dropout)
+ # no noise when t == 0
+ nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))
+
+ if return_codebook_ids:
+ return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, logits.argmax(dim=1)
+ if return_x0:
+ return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, x0
+ else:
+ return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise
+
+ @torch.no_grad()
+ def progressive_denoising(self, cond, shape, verbose=True, callback=None, quantize_denoised=False,
+ img_callback=None, mask=None, x0=None, temperature=1., noise_dropout=0.,
+ score_corrector=None, corrector_kwargs=None, batch_size=None, x_T=None, start_T=None,
+ log_every_t=None):
+ if not log_every_t:
+ log_every_t = self.log_every_t
+ timesteps = self.num_timesteps
+ if batch_size is not None:
+ b = batch_size if batch_size is not None else shape[0]
+ shape = [batch_size] + list(shape)
+ else:
+ b = batch_size = shape[0]
+ if x_T is None:
+ img = torch.randn(shape, device=self.device)
+ else:
+ img = x_T
+ intermediates = []
+ if cond is not None:
+ if isinstance(cond, dict):
+ cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else
+ list(map(lambda x: x[:batch_size], cond[key])) for key in cond}
+ else:
+ cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]
+
+ if start_T is not None:
+ timesteps = min(timesteps, start_T)
+ iterator = tqdm(reversed(range(0, timesteps)), desc='Progressive Generation',
+ total=timesteps) if verbose else reversed(
+ range(0, timesteps))
+ if type(temperature) == float:
+ temperature = [temperature] * timesteps
+
+ for i in iterator:
+ ts = torch.full((b,), i, device=self.device, dtype=torch.long)
+ if self.shorten_cond_schedule:
+ assert self.model.conditioning_key != 'hybrid'
+ tc = self.cond_ids[ts].to(cond.device)
+ cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))
+
+ img, x0_partial = self.p_sample(img, cond, ts,
+ clip_denoised=self.clip_denoised,
+ quantize_denoised=quantize_denoised, return_x0=True,
+ temperature=temperature[i], noise_dropout=noise_dropout,
+ score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)
+ if mask is not None:
+ assert x0 is not None
+ img_orig = self.q_sample(x0, ts)
+ img = img_orig * mask + (1. - mask) * img
+
+ if i % log_every_t == 0 or i == timesteps - 1:
+ intermediates.append(x0_partial)
+ if callback: callback(i)
+ if img_callback: img_callback(img, i)
+ return img, intermediates
+
+ @torch.no_grad()
+ def p_sample_loop(self, cond, shape, return_intermediates=False,
+ x_T=None, verbose=True, callback=None, timesteps=None, quantize_denoised=False,
+ mask=None, x0=None, img_callback=None, start_T=None,
+ log_every_t=None):
+
+ if not log_every_t:
+ log_every_t = self.log_every_t
+ device = self.betas.device
+ b = shape[0]
+ if x_T is None:
+ img = torch.randn(shape, device=device)
+ else:
+ img = x_T
+
+ intermediates = [img]
+ if timesteps is None:
+ timesteps = self.num_timesteps
+
+ if start_T is not None:
+ timesteps = min(timesteps, start_T)
+ iterator = tqdm(reversed(range(0, timesteps)), desc='Sampling t', total=timesteps) if verbose else reversed(
+ range(0, timesteps))
+
+ if mask is not None:
+ assert x0 is not None
+ assert x0.shape[2:3] == mask.shape[2:3] # spatial size has to match
+
+ for i in iterator:
+ ts = torch.full((b,), i, device=device, dtype=torch.long)
+ if self.shorten_cond_schedule:
+ assert self.model.conditioning_key != 'hybrid'
+ tc = self.cond_ids[ts].to(cond.device)
+ cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))
+
+ img = self.p_sample(img, cond, ts,
+ clip_denoised=self.clip_denoised,
+ quantize_denoised=quantize_denoised)
+ if mask is not None:
+ img_orig = self.q_sample(x0, ts)
+ img = img_orig * mask + (1. - mask) * img
+
+ if i % log_every_t == 0 or i == timesteps - 1:
+ intermediates.append(img)
+ if callback: callback(i)
+ if img_callback: img_callback(img, i)
+
+ if return_intermediates:
+ return img, intermediates
+ return img
+
+ @torch.no_grad()
+ def sample(self, cond, batch_size=16, return_intermediates=False, x_T=None,
+ verbose=True, timesteps=None, quantize_denoised=False,
+ mask=None, x0=None, shape=None, **kwargs):
+ if shape is None:
+ shape = (batch_size, self.channels, self.image_size, self.image_size)
+ if cond is not None:
+ if isinstance(cond, dict):
+ cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else
+ list(map(lambda x: x[:batch_size], cond[key])) for key in cond}
+ else:
+ cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]
+ return self.p_sample_loop(cond,
+ shape,
+ return_intermediates=return_intermediates, x_T=x_T,
+ verbose=verbose, timesteps=timesteps, quantize_denoised=quantize_denoised,
+ mask=mask, x0=x0)
+
+ @torch.no_grad()
+ def sample_log(self, cond, batch_size, ddim, ddim_steps, **kwargs):
+ if ddim:
+ ddim_sampler = DDIMSampler(self)
+ shape = (self.channels, self.image_size, self.image_size)
+ samples, intermediates = ddim_sampler.sample(ddim_steps, batch_size,
+ shape, cond, verbose=False, **kwargs)
+
+ else:
+ samples, intermediates = self.sample(cond=cond, batch_size=batch_size,
+ return_intermediates=True, **kwargs)
+
+ return samples, intermediates
+
+ @torch.no_grad()
+ def get_unconditional_conditioning(self, batch_size, null_label=None):
+ if null_label is not None:
+ xc = null_label
+ if isinstance(xc, ListConfig):
+ xc = list(xc)
+ if isinstance(xc, dict) or isinstance(xc, list):
+ c = self.get_learned_conditioning(xc)
+ else:
+ if hasattr(xc, "to"):
+ xc = xc.to(self.device)
+ c = self.get_learned_conditioning(xc)
+ else:
+ if self.cond_stage_key in ["class_label", "cls"]:
+ xc = self.cond_stage_model.get_unconditional_conditioning(batch_size, device=self.device)
+ return self.get_learned_conditioning(xc)
+ else:
+ raise NotImplementedError("todo")
+ if isinstance(c, list): # in case the encoder gives us a list
+ for i in range(len(c)):
+ c[i] = repeat(c[i], '1 ... -> b ...', b=batch_size).to(self.device)
+ else:
+ c = repeat(c, '1 ... -> b ...', b=batch_size).to(self.device)
+ return c
+
+ @torch.no_grad()
+ def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=50, ddim_eta=0., return_keys=None,
+ quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
+ plot_diffusion_rows=True, unconditional_guidance_scale=1., unconditional_guidance_label=None,
+ use_ema_scope=True,
+ **kwargs):
+ ema_scope = self.ema_scope if use_ema_scope else nullcontext
+ use_ddim = ddim_steps is not None
+
+ log = dict()
+ z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key,
+ return_first_stage_outputs=True,
+ force_c_encode=True,
+ return_original_cond=True,
+ bs=N)
+ N = min(x.shape[0], N)
+ n_row = min(x.shape[0], n_row)
+ log["inputs"] = x
+ log["reconstruction"] = xrec
+ if self.model.conditioning_key is not None:
+ if hasattr(self.cond_stage_model, "decode"):
+ xc = self.cond_stage_model.decode(c)
+ log["conditioning"] = xc
+ elif self.cond_stage_key in ["caption", "txt"]:
+ xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
+ log["conditioning"] = xc
+ elif self.cond_stage_key in ['class_label', "cls"]:
+ try:
+ xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
+ log['conditioning'] = xc
+ except KeyError:
+ # probably no "human_label" in batch
+ pass
+ elif isimage(xc):
+ log["conditioning"] = xc
+ if ismap(xc):
+ log["original_conditioning"] = self.to_rgb(xc)
+
+ if plot_diffusion_rows:
+ # get diffusion row
+ diffusion_row = list()
+ z_start = z[:n_row]
+ for t in range(self.num_timesteps):
+ if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
+ t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
+ t = t.to(self.device).long()
+ noise = torch.randn_like(z_start)
+ z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
+ diffusion_row.append(self.decode_first_stage(z_noisy))
+
+ diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
+ diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
+ diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
+ diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
+ log["diffusion_row"] = diffusion_grid
+
+ if sample:
+ # get denoise row
+ with ema_scope("Sampling"):
+ samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta)
+ # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
+ x_samples = self.decode_first_stage(samples)
+ log["samples"] = x_samples
+ if plot_denoise_rows:
+ denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
+ log["denoise_row"] = denoise_grid
+
+ if quantize_denoised and not isinstance(self.first_stage_model, AutoencoderKL) and not isinstance(
+ self.first_stage_model, IdentityFirstStage):
+ # also display when quantizing x0 while sampling
+ with ema_scope("Plotting Quantized Denoised"):
+ samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta,
+ quantize_denoised=True)
+ # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True,
+ # quantize_denoised=True)
+ x_samples = self.decode_first_stage(samples.to(self.device))
+ log["samples_x0_quantized"] = x_samples
+
+ if unconditional_guidance_scale > 1.0:
+ uc = self.get_unconditional_conditioning(N, unconditional_guidance_label)
+ if self.model.conditioning_key == "crossattn-adm":
+ uc = {"c_crossattn": [uc], "c_adm": c["c_adm"]}
+ with ema_scope("Sampling with classifier-free guidance"):
+ samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=uc,
+ )
+ x_samples_cfg = self.decode_first_stage(samples_cfg)
+ log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
+
+ if inpaint:
+ # make a simple center square
+ b, h, w = z.shape[0], z.shape[2], z.shape[3]
+ mask = torch.ones(N, h, w).to(self.device)
+ # zeros will be filled in
+ mask[:, h // 4:3 * h // 4, w // 4:3 * w // 4] = 0.
+ mask = mask[:, None, ...]
+ with ema_scope("Plotting Inpaint"):
+ samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim, eta=ddim_eta,
+ ddim_steps=ddim_steps, x0=z[:N], mask=mask)
+ x_samples = self.decode_first_stage(samples.to(self.device))
+ log["samples_inpainting"] = x_samples
+ log["mask"] = mask
+
+ # outpaint
+ mask = 1. - mask
+ with ema_scope("Plotting Outpaint"):
+ samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim, eta=ddim_eta,
+ ddim_steps=ddim_steps, x0=z[:N], mask=mask)
+ x_samples = self.decode_first_stage(samples.to(self.device))
+ log["samples_outpainting"] = x_samples
+
+ if plot_progressive_rows:
+ with ema_scope("Plotting Progressives"):
+ img, progressives = self.progressive_denoising(c,
+ shape=(self.channels, self.image_size, self.image_size),
+ batch_size=N)
+ prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation")
+ log["progressive_row"] = prog_row
+
+ if return_keys:
+ if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:
+ return log
+ else:
+ return {key: log[key] for key in return_keys}
+ return log
+
+ def configure_optimizers(self):
+ lr = self.learning_rate
+ params = list(self.model.parameters())
+ if self.cond_stage_trainable:
+ print(f"{self.__class__.__name__}: Also optimizing conditioner params!")
+ params = params + list(self.cond_stage_model.parameters())
+ if self.learn_logvar:
+ print('Diffusion model optimizing logvar')
+ params.append(self.logvar)
+ opt = torch.optim.AdamW(params, lr=lr)
+ if self.use_scheduler:
+ assert 'target' in self.scheduler_config
+ scheduler = instantiate_from_config(self.scheduler_config)
+
+ print("Setting up LambdaLR scheduler...")
+ scheduler = [
+ {
+ 'scheduler': LambdaLR(opt, lr_lambda=scheduler.schedule),
+ 'interval': 'step',
+ 'frequency': 1
+ }]
+ return [opt], scheduler
+ return opt
+
+ @torch.no_grad()
+ def to_rgb(self, x):
+ x = x.float()
+ if not hasattr(self, "colorize"):
+ self.colorize = torch.randn(3, x.shape[1], 1, 1).to(x)
+ x = nn.functional.conv2d(x, weight=self.colorize)
+ x = 2. * (x - x.min()) / (x.max() - x.min()) - 1.
+ return x
+
+
+class DiffusionWrapper(pl.LightningModule):
+ def __init__(self, diff_model_config, conditioning_key):
+ super().__init__()
+ self.sequential_cross_attn = diff_model_config.pop("sequential_crossattn", False)
+ self.diffusion_model = instantiate_from_config(diff_model_config)
+ self.conditioning_key = conditioning_key
+ assert self.conditioning_key in [None, 'concat', 'crossattn', 'hybrid', 'adm', 'hybrid-adm', 'crossattn-adm']
+
+ def forward(self, x, t, c_concat: list = None, c_crossattn: list = None, c_adm=None):
+ if self.conditioning_key is None:
+ out = self.diffusion_model(x, t)
+ elif self.conditioning_key == 'concat':
+ xc = torch.cat([x] + c_concat, dim=1)
+ out = self.diffusion_model(xc, t)
+ elif self.conditioning_key == 'crossattn':
+ if not self.sequential_cross_attn:
+ cc = torch.cat(c_crossattn, 1)
+ else:
+ cc = c_crossattn
+ out = self.diffusion_model(x, t, context=cc)
+ elif self.conditioning_key == 'hybrid':
+ xc = torch.cat([x] + c_concat, dim=1)
+ cc = torch.cat(c_crossattn, 1)
+ out = self.diffusion_model(xc, t, context=cc)
+ elif self.conditioning_key == 'hybrid-adm':
+ assert c_adm is not None
+ xc = torch.cat([x] + c_concat, dim=1)
+ cc = torch.cat(c_crossattn, 1)
+ out = self.diffusion_model(xc, t, context=cc, y=c_adm)
+ elif self.conditioning_key == 'crossattn-adm':
+ assert c_adm is not None
+ cc = torch.cat(c_crossattn, 1)
+ out = self.diffusion_model(x, t, context=cc, y=c_adm)
+ elif self.conditioning_key == 'adm':
+ cc = c_crossattn[0]
+ out = self.diffusion_model(x, t, y=cc)
+ else:
+ raise NotImplementedError()
+
+ return out
+
+
+class LatentUpscaleDiffusion(LatentDiffusion):
+ def __init__(self, *args, low_scale_config, low_scale_key="LR", noise_level_key=None, **kwargs):
+ super().__init__(*args, **kwargs)
+ # assumes that neither the cond_stage nor the low_scale_model contain trainable params
+ assert not self.cond_stage_trainable
+ self.instantiate_low_stage(low_scale_config)
+ self.low_scale_key = low_scale_key
+ self.noise_level_key = noise_level_key
+
+ def instantiate_low_stage(self, config):
+ model = instantiate_from_config(config)
+ self.low_scale_model = model.eval()
+ self.low_scale_model.train = disabled_train
+ for param in self.low_scale_model.parameters():
+ param.requires_grad = False
+
+ @torch.no_grad()
+ def get_input(self, batch, k, cond_key=None, bs=None, log_mode=False):
+ if not log_mode:
+ z, c = super().get_input(batch, k, force_c_encode=True, bs=bs)
+ else:
+ z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
+ force_c_encode=True, return_original_cond=True, bs=bs)
+ x_low = batch[self.low_scale_key][:bs]
+ x_low = rearrange(x_low, 'b h w c -> b c h w')
+ x_low = x_low.to(memory_format=torch.contiguous_format).float()
+ zx, noise_level = self.low_scale_model(x_low)
+ if self.noise_level_key is not None:
+ # get noise level from batch instead, e.g. when extracting a custom noise level for bsr
+ raise NotImplementedError('TODO')
+
+ all_conds = {"c_concat": [zx], "c_crossattn": [c], "c_adm": noise_level}
+ if log_mode:
+ # TODO: maybe disable if too expensive
+ x_low_rec = self.low_scale_model.decode(zx)
+ return z, all_conds, x, xrec, xc, x_low, x_low_rec, noise_level
+ return z, all_conds
+
+ @torch.no_grad()
+ def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
+ plot_denoise_rows=False, plot_progressive_rows=True, plot_diffusion_rows=True,
+ unconditional_guidance_scale=1., unconditional_guidance_label=None, use_ema_scope=True,
+ **kwargs):
+ ema_scope = self.ema_scope if use_ema_scope else nullcontext
+ use_ddim = ddim_steps is not None
+
+ log = dict()
+ z, c, x, xrec, xc, x_low, x_low_rec, noise_level = self.get_input(batch, self.first_stage_key, bs=N,
+ log_mode=True)
+ N = min(x.shape[0], N)
+ n_row = min(x.shape[0], n_row)
+ log["inputs"] = x
+ log["reconstruction"] = xrec
+ log["x_lr"] = x_low
+ log[f"x_lr_rec_@noise_levels{'-'.join(map(lambda x: str(x), list(noise_level.cpu().numpy())))}"] = x_low_rec
+ if self.model.conditioning_key is not None:
+ if hasattr(self.cond_stage_model, "decode"):
+ xc = self.cond_stage_model.decode(c)
+ log["conditioning"] = xc
+ elif self.cond_stage_key in ["caption", "txt"]:
+ xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
+ log["conditioning"] = xc
+ elif self.cond_stage_key in ['class_label', 'cls']:
+ xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
+ log['conditioning'] = xc
+ elif isimage(xc):
+ log["conditioning"] = xc
+ if ismap(xc):
+ log["original_conditioning"] = self.to_rgb(xc)
+
+ if plot_diffusion_rows:
+ # get diffusion row
+ diffusion_row = list()
+ z_start = z[:n_row]
+ for t in range(self.num_timesteps):
+ if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
+ t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
+ t = t.to(self.device).long()
+ noise = torch.randn_like(z_start)
+ z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
+ diffusion_row.append(self.decode_first_stage(z_noisy))
+
+ diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
+ diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
+ diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
+ diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
+ log["diffusion_row"] = diffusion_grid
+
+ if sample:
+ # get denoise row
+ with ema_scope("Sampling"):
+ samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta)
+ # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
+ x_samples = self.decode_first_stage(samples)
+ log["samples"] = x_samples
+ if plot_denoise_rows:
+ denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
+ log["denoise_row"] = denoise_grid
+
+ if unconditional_guidance_scale > 1.0:
+ uc_tmp = self.get_unconditional_conditioning(N, unconditional_guidance_label)
+ # TODO explore better "unconditional" choices for the other keys
+ # maybe guide away from empty text label and highest noise level and maximally degraded zx?
+ uc = dict()
+ for k in c:
+ if k == "c_crossattn":
+ assert isinstance(c[k], list) and len(c[k]) == 1
+ uc[k] = [uc_tmp]
+ elif k == "c_adm": # todo: only run with text-based guidance?
+ assert isinstance(c[k], torch.Tensor)
+ #uc[k] = torch.ones_like(c[k]) * self.low_scale_model.max_noise_level
+ uc[k] = c[k]
+ elif isinstance(c[k], list):
+ uc[k] = [c[k][i] for i in range(len(c[k]))]
+ else:
+ uc[k] = c[k]
+
+ with ema_scope("Sampling with classifier-free guidance"):
+ samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=uc,
+ )
+ x_samples_cfg = self.decode_first_stage(samples_cfg)
+ log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
+
+ if plot_progressive_rows:
+ with ema_scope("Plotting Progressives"):
+ img, progressives = self.progressive_denoising(c,
+ shape=(self.channels, self.image_size, self.image_size),
+ batch_size=N)
+ prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation")
+ log["progressive_row"] = prog_row
+
+ return log
+
+
+class LatentFinetuneDiffusion(LatentDiffusion):
+ """
+ Basis for different finetunas, such as inpainting or depth2image
+ To disable finetuning mode, set finetune_keys to None
+ """
+
+ def __init__(self,
+ concat_keys: tuple,
+ finetune_keys=("model.diffusion_model.input_blocks.0.0.weight",
+ "model_ema.diffusion_modelinput_blocks00weight"
+ ),
+ keep_finetune_dims=4,
+ # if model was trained without concat mode before and we would like to keep these channels
+ c_concat_log_start=None, # to log reconstruction of c_concat codes
+ c_concat_log_end=None,
+ *args, **kwargs
+ ):
+ ckpt_path = kwargs.pop("ckpt_path", None)
+ ignore_keys = kwargs.pop("ignore_keys", list())
+ super().__init__(*args, **kwargs)
+ self.finetune_keys = finetune_keys
+ self.concat_keys = concat_keys
+ self.keep_dims = keep_finetune_dims
+ self.c_concat_log_start = c_concat_log_start
+ self.c_concat_log_end = c_concat_log_end
+ if exists(self.finetune_keys): assert exists(ckpt_path), 'can only finetune from a given checkpoint'
+ if exists(ckpt_path):
+ self.init_from_ckpt(ckpt_path, ignore_keys)
+
+ def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
+ sd = torch.load(path, map_location="cpu")
+ if "state_dict" in list(sd.keys()):
+ sd = sd["state_dict"]
+ keys = list(sd.keys())
+ for k in keys:
+ for ik in ignore_keys:
+ if k.startswith(ik):
+ print("Deleting key {} from state_dict.".format(k))
+ del sd[k]
+
+ # make it explicit, finetune by including extra input channels
+ if exists(self.finetune_keys) and k in self.finetune_keys:
+ new_entry = None
+ for name, param in self.named_parameters():
+ if name in self.finetune_keys:
+ print(
+ f"modifying key '{name}' and keeping its original {self.keep_dims} (channels) dimensions only")
+ new_entry = torch.zeros_like(param) # zero init
+ assert exists(new_entry), 'did not find matching parameter to modify'
+ new_entry[:, :self.keep_dims, ...] = sd[k]
+ sd[k] = new_entry
+
+ missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(
+ sd, strict=False)
+ print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
+ if len(missing) > 0:
+ print(f"Missing Keys: {missing}")
+ if len(unexpected) > 0:
+ print(f"Unexpected Keys: {unexpected}")
+
+ @torch.no_grad()
+ def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
+ quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
+ plot_diffusion_rows=True, unconditional_guidance_scale=1., unconditional_guidance_label=None,
+ use_ema_scope=True,
+ **kwargs):
+ ema_scope = self.ema_scope if use_ema_scope else nullcontext
+ use_ddim = ddim_steps is not None
+
+ log = dict()
+ z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key, bs=N, return_first_stage_outputs=True)
+ c_cat, c = c["c_concat"][0], c["c_crossattn"][0]
+ N = min(x.shape[0], N)
+ n_row = min(x.shape[0], n_row)
+ log["inputs"] = x
+ log["reconstruction"] = xrec
+ if self.model.conditioning_key is not None:
+ if hasattr(self.cond_stage_model, "decode"):
+ xc = self.cond_stage_model.decode(c)
+ log["conditioning"] = xc
+ elif self.cond_stage_key in ["caption", "txt"]:
+ xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
+ log["conditioning"] = xc
+ elif self.cond_stage_key in ['class_label', 'cls']:
+ xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
+ log['conditioning'] = xc
+ elif isimage(xc):
+ log["conditioning"] = xc
+ if ismap(xc):
+ log["original_conditioning"] = self.to_rgb(xc)
+
+ if not (self.c_concat_log_start is None and self.c_concat_log_end is None):
+ log["c_concat_decoded"] = self.decode_first_stage(c_cat[:, self.c_concat_log_start:self.c_concat_log_end])
+
+ if plot_diffusion_rows:
+ # get diffusion row
+ diffusion_row = list()
+ z_start = z[:n_row]
+ for t in range(self.num_timesteps):
+ if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
+ t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
+ t = t.to(self.device).long()
+ noise = torch.randn_like(z_start)
+ z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
+ diffusion_row.append(self.decode_first_stage(z_noisy))
+
+ diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
+ diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
+ diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
+ diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
+ log["diffusion_row"] = diffusion_grid
+
+ if sample:
+ # get denoise row
+ with ema_scope("Sampling"):
+ samples, z_denoise_row = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
+ batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta)
+ # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
+ x_samples = self.decode_first_stage(samples)
+ log["samples"] = x_samples
+ if plot_denoise_rows:
+ denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
+ log["denoise_row"] = denoise_grid
+
+ if unconditional_guidance_scale > 1.0:
+ uc_cross = self.get_unconditional_conditioning(N, unconditional_guidance_label)
+ uc_cat = c_cat
+ uc_full = {"c_concat": [uc_cat], "c_crossattn": [uc_cross]}
+ with ema_scope("Sampling with classifier-free guidance"):
+ samples_cfg, _ = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
+ batch_size=N, ddim=use_ddim,
+ ddim_steps=ddim_steps, eta=ddim_eta,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=uc_full,
+ )
+ x_samples_cfg = self.decode_first_stage(samples_cfg)
+ log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
+
+ return log
+
+
+class LatentInpaintDiffusion(LatentFinetuneDiffusion):
+ """
+ can either run as pure inpainting model (only concat mode) or with mixed conditionings,
+ e.g. mask as concat and text via cross-attn.
+ To disable finetuning mode, set finetune_keys to None
+ """
+
+ def __init__(self,
+ concat_keys=("mask", "masked_image"),
+ masked_image_key="masked_image",
+ *args, **kwargs
+ ):
+ super().__init__(concat_keys, *args, **kwargs)
+ self.masked_image_key = masked_image_key
+ assert self.masked_image_key in concat_keys
+
+ @torch.no_grad()
+ def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
+ # note: restricted to non-trainable encoders currently
+ assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for inpainting'
+ z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
+ force_c_encode=True, return_original_cond=True, bs=bs)
+
+ assert exists(self.concat_keys)
+ c_cat = list()
+ for ck in self.concat_keys:
+ cc = rearrange(batch[ck], 'b h w c -> b c h w').to(memory_format=torch.contiguous_format).float()
+ if bs is not None:
+ cc = cc[:bs]
+ cc = cc.to(self.device)
+ bchw = z.shape
+ if ck != self.masked_image_key:
+ cc = torch.nn.functional.interpolate(cc, size=bchw[-2:])
+ else:
+ cc = self.get_first_stage_encoding(self.encode_first_stage(cc))
+ c_cat.append(cc)
+ c_cat = torch.cat(c_cat, dim=1)
+ all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
+ if return_first_stage_outputs:
+ return z, all_conds, x, xrec, xc
+ return z, all_conds
+
+ @torch.no_grad()
+ def log_images(self, *args, **kwargs):
+ log = super(LatentInpaintDiffusion, self).log_images(*args, **kwargs)
+ log["masked_image"] = rearrange(args[0]["masked_image"],
+ 'b h w c -> b c h w').to(memory_format=torch.contiguous_format).float()
+ return log
+
+
+class LatentDepth2ImageDiffusion(LatentFinetuneDiffusion):
+ """
+ condition on monocular depth estimation
+ """
+
+ def __init__(self, depth_stage_config, concat_keys=("midas_in",), *args, **kwargs):
+ super().__init__(concat_keys=concat_keys, *args, **kwargs)
+ self.depth_model = instantiate_from_config(depth_stage_config)
+ self.depth_stage_key = concat_keys[0]
+
+ @torch.no_grad()
+ def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
+ # note: restricted to non-trainable encoders currently
+ assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for depth2img'
+ z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
+ force_c_encode=True, return_original_cond=True, bs=bs)
+
+ assert exists(self.concat_keys)
+ assert len(self.concat_keys) == 1
+ c_cat = list()
+ for ck in self.concat_keys:
+ cc = batch[ck]
+ if bs is not None:
+ cc = cc[:bs]
+ cc = cc.to(self.device)
+ cc = self.depth_model(cc)
+ cc = torch.nn.functional.interpolate(
+ cc,
+ size=z.shape[2:],
+ mode="bicubic",
+ align_corners=False,
+ )
+
+ depth_min, depth_max = torch.amin(cc, dim=[1, 2, 3], keepdim=True), torch.amax(cc, dim=[1, 2, 3],
+ keepdim=True)
+ cc = 2. * (cc - depth_min) / (depth_max - depth_min + 0.001) - 1.
+ c_cat.append(cc)
+ c_cat = torch.cat(c_cat, dim=1)
+ all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
+ if return_first_stage_outputs:
+ return z, all_conds, x, xrec, xc
+ return z, all_conds
+
+ @torch.no_grad()
+ def log_images(self, *args, **kwargs):
+ log = super().log_images(*args, **kwargs)
+ depth = self.depth_model(args[0][self.depth_stage_key])
+ depth_min, depth_max = torch.amin(depth, dim=[1, 2, 3], keepdim=True), \
+ torch.amax(depth, dim=[1, 2, 3], keepdim=True)
+ log["depth"] = 2. * (depth - depth_min) / (depth_max - depth_min) - 1.
+ return log
+
+
+class LatentUpscaleFinetuneDiffusion(LatentFinetuneDiffusion):
+ """
+ condition on low-res image (and optionally on some spatial noise augmentation)
+ """
+ def __init__(self, concat_keys=("lr",), reshuffle_patch_size=None,
+ low_scale_config=None, low_scale_key=None, *args, **kwargs):
+ super().__init__(concat_keys=concat_keys, *args, **kwargs)
+ self.reshuffle_patch_size = reshuffle_patch_size
+ self.low_scale_model = None
+ if low_scale_config is not None:
+ print("Initializing a low-scale model")
+ assert exists(low_scale_key)
+ self.instantiate_low_stage(low_scale_config)
+ self.low_scale_key = low_scale_key
+
+ def instantiate_low_stage(self, config):
+ model = instantiate_from_config(config)
+ self.low_scale_model = model.eval()
+ self.low_scale_model.train = disabled_train
+ for param in self.low_scale_model.parameters():
+ param.requires_grad = False
+
+ @torch.no_grad()
+ def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
+ # note: restricted to non-trainable encoders currently
+ assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for upscaling-ft'
+ z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
+ force_c_encode=True, return_original_cond=True, bs=bs)
+
+ assert exists(self.concat_keys)
+ assert len(self.concat_keys) == 1
+ # optionally make spatial noise_level here
+ c_cat = list()
+ noise_level = None
+ for ck in self.concat_keys:
+ cc = batch[ck]
+ cc = rearrange(cc, 'b h w c -> b c h w')
+ if exists(self.reshuffle_patch_size):
+ assert isinstance(self.reshuffle_patch_size, int)
+ cc = rearrange(cc, 'b c (p1 h) (p2 w) -> b (p1 p2 c) h w',
+ p1=self.reshuffle_patch_size, p2=self.reshuffle_patch_size)
+ if bs is not None:
+ cc = cc[:bs]
+ cc = cc.to(self.device)
+ if exists(self.low_scale_model) and ck == self.low_scale_key:
+ cc, noise_level = self.low_scale_model(cc)
+ c_cat.append(cc)
+ c_cat = torch.cat(c_cat, dim=1)
+ if exists(noise_level):
+ all_conds = {"c_concat": [c_cat], "c_crossattn": [c], "c_adm": noise_level}
+ else:
+ all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
+ if return_first_stage_outputs:
+ return z, all_conds, x, xrec, xc
+ return z, all_conds
+
+ @torch.no_grad()
+ def log_images(self, *args, **kwargs):
+ log = super().log_images(*args, **kwargs)
+ log["lr"] = rearrange(args[0]["lr"], 'b h w c -> b c h w')
+ return log
diff --git a/ldm/models/diffusion/dpm_solver/__init__.py b/ldm/models/diffusion/dpm_solver/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7427f38c07530afbab79154ea8aaf88c4bf70a08
--- /dev/null
+++ b/ldm/models/diffusion/dpm_solver/__init__.py
@@ -0,0 +1 @@
+from .sampler import DPMSolverSampler
\ No newline at end of file
diff --git a/ldm/models/diffusion/dpm_solver/dpm_solver.py b/ldm/models/diffusion/dpm_solver/dpm_solver.py
new file mode 100644
index 0000000000000000000000000000000000000000..095e5ba3ce0b1aa7f4b3f1e2e5d8fff7cfe6dc8c
--- /dev/null
+++ b/ldm/models/diffusion/dpm_solver/dpm_solver.py
@@ -0,0 +1,1154 @@
+import torch
+import torch.nn.functional as F
+import math
+from tqdm import tqdm
+
+
+class NoiseScheduleVP:
+ def __init__(
+ self,
+ schedule='discrete',
+ betas=None,
+ alphas_cumprod=None,
+ continuous_beta_0=0.1,
+ continuous_beta_1=20.,
+ ):
+ """Create a wrapper class for the forward SDE (VP type).
+ ***
+ Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t.
+ We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images.
+ ***
+ The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ).
+ We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper).
+ Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have:
+ log_alpha_t = self.marginal_log_mean_coeff(t)
+ sigma_t = self.marginal_std(t)
+ lambda_t = self.marginal_lambda(t)
+ Moreover, as lambda(t) is an invertible function, we also support its inverse function:
+ t = self.inverse_lambda(lambda_t)
+ ===============================================================
+ We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]).
+ 1. For discrete-time DPMs:
+ For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by:
+ t_i = (i + 1) / N
+ e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1.
+ We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3.
+ Args:
+ betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details)
+ alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details)
+ Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`.
+ **Important**: Please pay special attention for the args for `alphas_cumprod`:
+ The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that
+ q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ).
+ Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have
+ alpha_{t_n} = \sqrt{\hat{alpha_n}},
+ and
+ log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}).
+ 2. For continuous-time DPMs:
+ We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise
+ schedule are the default settings in DDPM and improved-DDPM:
+ Args:
+ beta_min: A `float` number. The smallest beta for the linear schedule.
+ beta_max: A `float` number. The largest beta for the linear schedule.
+ cosine_s: A `float` number. The hyperparameter in the cosine schedule.
+ cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule.
+ T: A `float` number. The ending time of the forward process.
+ ===============================================================
+ Args:
+ schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs,
+ 'linear' or 'cosine' for continuous-time DPMs.
+ Returns:
+ A wrapper object of the forward SDE (VP type).
+
+ ===============================================================
+ Example:
+ # For discrete-time DPMs, given betas (the beta array for n = 0, 1, ..., N - 1):
+ >>> ns = NoiseScheduleVP('discrete', betas=betas)
+ # For discrete-time DPMs, given alphas_cumprod (the \hat{alpha_n} array for n = 0, 1, ..., N - 1):
+ >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod)
+ # For continuous-time DPMs (VPSDE), linear schedule:
+ >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.)
+ """
+
+ if schedule not in ['discrete', 'linear', 'cosine']:
+ raise ValueError(
+ "Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(
+ schedule))
+
+ self.schedule = schedule
+ if schedule == 'discrete':
+ if betas is not None:
+ log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0)
+ else:
+ assert alphas_cumprod is not None
+ log_alphas = 0.5 * torch.log(alphas_cumprod)
+ self.total_N = len(log_alphas)
+ self.T = 1.
+ self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1))
+ self.log_alpha_array = log_alphas.reshape((1, -1,))
+ else:
+ self.total_N = 1000
+ self.beta_0 = continuous_beta_0
+ self.beta_1 = continuous_beta_1
+ self.cosine_s = 0.008
+ self.cosine_beta_max = 999.
+ self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (
+ 1. + self.cosine_s) / math.pi - self.cosine_s
+ self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.))
+ self.schedule = schedule
+ if schedule == 'cosine':
+ # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T.
+ # Note that T = 0.9946 may be not the optimal setting. However, we find it works well.
+ self.T = 0.9946
+ else:
+ self.T = 1.
+
+ def marginal_log_mean_coeff(self, t):
+ """
+ Compute log(alpha_t) of a given continuous-time label t in [0, T].
+ """
+ if self.schedule == 'discrete':
+ return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device),
+ self.log_alpha_array.to(t.device)).reshape((-1))
+ elif self.schedule == 'linear':
+ return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0
+ elif self.schedule == 'cosine':
+ log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.))
+ log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0
+ return log_alpha_t
+
+ def marginal_alpha(self, t):
+ """
+ Compute alpha_t of a given continuous-time label t in [0, T].
+ """
+ return torch.exp(self.marginal_log_mean_coeff(t))
+
+ def marginal_std(self, t):
+ """
+ Compute sigma_t of a given continuous-time label t in [0, T].
+ """
+ return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t)))
+
+ def marginal_lambda(self, t):
+ """
+ Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T].
+ """
+ log_mean_coeff = self.marginal_log_mean_coeff(t)
+ log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff))
+ return log_mean_coeff - log_std
+
+ def inverse_lambda(self, lamb):
+ """
+ Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t.
+ """
+ if self.schedule == 'linear':
+ tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb))
+ Delta = self.beta_0 ** 2 + tmp
+ return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0)
+ elif self.schedule == 'discrete':
+ log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb)
+ t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]),
+ torch.flip(self.t_array.to(lamb.device), [1]))
+ return t.reshape((-1,))
+ else:
+ log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb))
+ t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (
+ 1. + self.cosine_s) / math.pi - self.cosine_s
+ t = t_fn(log_alpha)
+ return t
+
+
+def model_wrapper(
+ model,
+ noise_schedule,
+ model_type="noise",
+ model_kwargs={},
+ guidance_type="uncond",
+ condition=None,
+ unconditional_condition=None,
+ guidance_scale=1.,
+ classifier_fn=None,
+ classifier_kwargs={},
+):
+ """Create a wrapper function for the noise prediction model.
+ DPM-Solver needs to solve the continuous-time diffusion ODEs. For DPMs trained on discrete-time labels, we need to
+ firstly wrap the model function to a noise prediction model that accepts the continuous time as the input.
+ We support four types of the diffusion model by setting `model_type`:
+ 1. "noise": noise prediction model. (Trained by predicting noise).
+ 2. "x_start": data prediction model. (Trained by predicting the data x_0 at time 0).
+ 3. "v": velocity prediction model. (Trained by predicting the velocity).
+ The "v" prediction is derivation detailed in Appendix D of [1], and is used in Imagen-Video [2].
+ [1] Salimans, Tim, and Jonathan Ho. "Progressive distillation for fast sampling of diffusion models."
+ arXiv preprint arXiv:2202.00512 (2022).
+ [2] Ho, Jonathan, et al. "Imagen Video: High Definition Video Generation with Diffusion Models."
+ arXiv preprint arXiv:2210.02303 (2022).
+
+ 4. "score": marginal score function. (Trained by denoising score matching).
+ Note that the score function and the noise prediction model follows a simple relationship:
+ ```
+ noise(x_t, t) = -sigma_t * score(x_t, t)
+ ```
+ We support three types of guided sampling by DPMs by setting `guidance_type`:
+ 1. "uncond": unconditional sampling by DPMs.
+ The input `model` has the following format:
+ ``
+ model(x, t_input, **model_kwargs) -> noise | x_start | v | score
+ ``
+ 2. "classifier": classifier guidance sampling [3] by DPMs and another classifier.
+ The input `model` has the following format:
+ ``
+ model(x, t_input, **model_kwargs) -> noise | x_start | v | score
+ ``
+ The input `classifier_fn` has the following format:
+ ``
+ classifier_fn(x, t_input, cond, **classifier_kwargs) -> logits(x, t_input, cond)
+ ``
+ [3] P. Dhariwal and A. Q. Nichol, "Diffusion models beat GANs on image synthesis,"
+ in Advances in Neural Information Processing Systems, vol. 34, 2021, pp. 8780-8794.
+ 3. "classifier-free": classifier-free guidance sampling by conditional DPMs.
+ The input `model` has the following format:
+ ``
+ model(x, t_input, cond, **model_kwargs) -> noise | x_start | v | score
+ ``
+ And if cond == `unconditional_condition`, the model output is the unconditional DPM output.
+ [4] Ho, Jonathan, and Tim Salimans. "Classifier-free diffusion guidance."
+ arXiv preprint arXiv:2207.12598 (2022).
+
+ The `t_input` is the time label of the model, which may be discrete-time labels (i.e. 0 to 999)
+ or continuous-time labels (i.e. epsilon to T).
+ We wrap the model function to accept only `x` and `t_continuous` as inputs, and outputs the predicted noise:
+ ``
+ def model_fn(x, t_continuous) -> noise:
+ t_input = get_model_input_time(t_continuous)
+ return noise_pred(model, x, t_input, **model_kwargs)
+ ``
+ where `t_continuous` is the continuous time labels (i.e. epsilon to T). And we use `model_fn` for DPM-Solver.
+ ===============================================================
+ Args:
+ model: A diffusion model with the corresponding format described above.
+ noise_schedule: A noise schedule object, such as NoiseScheduleVP.
+ model_type: A `str`. The parameterization type of the diffusion model.
+ "noise" or "x_start" or "v" or "score".
+ model_kwargs: A `dict`. A dict for the other inputs of the model function.
+ guidance_type: A `str`. The type of the guidance for sampling.
+ "uncond" or "classifier" or "classifier-free".
+ condition: A pytorch tensor. The condition for the guided sampling.
+ Only used for "classifier" or "classifier-free" guidance type.
+ unconditional_condition: A pytorch tensor. The condition for the unconditional sampling.
+ Only used for "classifier-free" guidance type.
+ guidance_scale: A `float`. The scale for the guided sampling.
+ classifier_fn: A classifier function. Only used for the classifier guidance.
+ classifier_kwargs: A `dict`. A dict for the other inputs of the classifier function.
+ Returns:
+ A noise prediction model that accepts the noised data and the continuous time as the inputs.
+ """
+
+ def get_model_input_time(t_continuous):
+ """
+ Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time.
+ For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N].
+ For continuous-time DPMs, we just use `t_continuous`.
+ """
+ if noise_schedule.schedule == 'discrete':
+ return (t_continuous - 1. / noise_schedule.total_N) * 1000.
+ else:
+ return t_continuous
+
+ def noise_pred_fn(x, t_continuous, cond=None):
+ if t_continuous.reshape((-1,)).shape[0] == 1:
+ t_continuous = t_continuous.expand((x.shape[0]))
+ t_input = get_model_input_time(t_continuous)
+ if cond is None:
+ output = model(x, t_input, **model_kwargs)
+ else:
+ output = model(x, t_input, cond, **model_kwargs)
+ if model_type == "noise":
+ return output
+ elif model_type == "x_start":
+ alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous)
+ dims = x.dim()
+ return (x - expand_dims(alpha_t, dims) * output) / expand_dims(sigma_t, dims)
+ elif model_type == "v":
+ alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous)
+ dims = x.dim()
+ return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x
+ elif model_type == "score":
+ sigma_t = noise_schedule.marginal_std(t_continuous)
+ dims = x.dim()
+ return -expand_dims(sigma_t, dims) * output
+
+ def cond_grad_fn(x, t_input):
+ """
+ Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t).
+ """
+ with torch.enable_grad():
+ x_in = x.detach().requires_grad_(True)
+ log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs)
+ return torch.autograd.grad(log_prob.sum(), x_in)[0]
+
+ def model_fn(x, t_continuous):
+ """
+ The noise predicition model function that is used for DPM-Solver.
+ """
+ if t_continuous.reshape((-1,)).shape[0] == 1:
+ t_continuous = t_continuous.expand((x.shape[0]))
+ if guidance_type == "uncond":
+ return noise_pred_fn(x, t_continuous)
+ elif guidance_type == "classifier":
+ assert classifier_fn is not None
+ t_input = get_model_input_time(t_continuous)
+ cond_grad = cond_grad_fn(x, t_input)
+ sigma_t = noise_schedule.marginal_std(t_continuous)
+ noise = noise_pred_fn(x, t_continuous)
+ return noise - guidance_scale * expand_dims(sigma_t, dims=cond_grad.dim()) * cond_grad
+ elif guidance_type == "classifier-free":
+ if guidance_scale == 1. or unconditional_condition is None:
+ return noise_pred_fn(x, t_continuous, cond=condition)
+ else:
+ x_in = torch.cat([x] * 2)
+ t_in = torch.cat([t_continuous] * 2)
+ c_in = torch.cat([unconditional_condition, condition])
+ noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2)
+ return noise_uncond + guidance_scale * (noise - noise_uncond)
+
+ assert model_type in ["noise", "x_start", "v"]
+ assert guidance_type in ["uncond", "classifier", "classifier-free"]
+ return model_fn
+
+
+class DPM_Solver:
+ def __init__(self, model_fn, noise_schedule, predict_x0=False, thresholding=False, max_val=1.):
+ """Construct a DPM-Solver.
+ We support both the noise prediction model ("predicting epsilon") and the data prediction model ("predicting x0").
+ If `predict_x0` is False, we use the solver for the noise prediction model (DPM-Solver).
+ If `predict_x0` is True, we use the solver for the data prediction model (DPM-Solver++).
+ In such case, we further support the "dynamic thresholding" in [1] when `thresholding` is True.
+ The "dynamic thresholding" can greatly improve the sample quality for pixel-space DPMs with large guidance scales.
+ Args:
+ model_fn: A noise prediction model function which accepts the continuous-time input (t in [epsilon, T]):
+ ``
+ def model_fn(x, t_continuous):
+ return noise
+ ``
+ noise_schedule: A noise schedule object, such as NoiseScheduleVP.
+ predict_x0: A `bool`. If true, use the data prediction model; else, use the noise prediction model.
+ thresholding: A `bool`. Valid when `predict_x0` is True. Whether to use the "dynamic thresholding" in [1].
+ max_val: A `float`. Valid when both `predict_x0` and `thresholding` are True. The max value for thresholding.
+
+ [1] Chitwan Saharia, William Chan, Saurabh Saxena, Lala Li, Jay Whang, Emily Denton, Seyed Kamyar Seyed Ghasemipour, Burcu Karagol Ayan, S Sara Mahdavi, Rapha Gontijo Lopes, et al. Photorealistic text-to-image diffusion models with deep language understanding. arXiv preprint arXiv:2205.11487, 2022b.
+ """
+ self.model = model_fn
+ self.noise_schedule = noise_schedule
+ self.predict_x0 = predict_x0
+ self.thresholding = thresholding
+ self.max_val = max_val
+
+ def noise_prediction_fn(self, x, t):
+ """
+ Return the noise prediction model.
+ """
+ return self.model(x, t)
+
+ def data_prediction_fn(self, x, t):
+ """
+ Return the data prediction model (with thresholding).
+ """
+ noise = self.noise_prediction_fn(x, t)
+ dims = x.dim()
+ alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t)
+ x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims)
+ if self.thresholding:
+ p = 0.995 # A hyperparameter in the paper of "Imagen" [1].
+ s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1)
+ s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims)
+ x0 = torch.clamp(x0, -s, s) / s
+ return x0
+
+ def model_fn(self, x, t):
+ """
+ Convert the model to the noise prediction model or the data prediction model.
+ """
+ if self.predict_x0:
+ return self.data_prediction_fn(x, t)
+ else:
+ return self.noise_prediction_fn(x, t)
+
+ def get_time_steps(self, skip_type, t_T, t_0, N, device):
+ """Compute the intermediate time steps for sampling.
+ Args:
+ skip_type: A `str`. The type for the spacing of the time steps. We support three types:
+ - 'logSNR': uniform logSNR for the time steps.
+ - 'time_uniform': uniform time for the time steps. (**Recommended for high-resolutional data**.)
+ - 'time_quadratic': quadratic time for the time steps. (Used in DDIM for low-resolutional data.)
+ t_T: A `float`. The starting time of the sampling (default is T).
+ t_0: A `float`. The ending time of the sampling (default is epsilon).
+ N: A `int`. The total number of the spacing of the time steps.
+ device: A torch device.
+ Returns:
+ A pytorch tensor of the time steps, with the shape (N + 1,).
+ """
+ if skip_type == 'logSNR':
+ lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device))
+ lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device))
+ logSNR_steps = torch.linspace(lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1).to(device)
+ return self.noise_schedule.inverse_lambda(logSNR_steps)
+ elif skip_type == 'time_uniform':
+ return torch.linspace(t_T, t_0, N + 1).to(device)
+ elif skip_type == 'time_quadratic':
+ t_order = 2
+ t = torch.linspace(t_T ** (1. / t_order), t_0 ** (1. / t_order), N + 1).pow(t_order).to(device)
+ return t
+ else:
+ raise ValueError(
+ "Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type))
+
+ def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device):
+ """
+ Get the order of each step for sampling by the singlestep DPM-Solver.
+ We combine both DPM-Solver-1,2,3 to use all the function evaluations, which is named as "DPM-Solver-fast".
+ Given a fixed number of function evaluations by `steps`, the sampling procedure by DPM-Solver-fast is:
+ - If order == 1:
+ We take `steps` of DPM-Solver-1 (i.e. DDIM).
+ - If order == 2:
+ - Denote K = (steps // 2). We take K or (K + 1) intermediate time steps for sampling.
+ - If steps % 2 == 0, we use K steps of DPM-Solver-2.
+ - If steps % 2 == 1, we use K steps of DPM-Solver-2 and 1 step of DPM-Solver-1.
+ - If order == 3:
+ - Denote K = (steps // 3 + 1). We take K intermediate time steps for sampling.
+ - If steps % 3 == 0, we use (K - 2) steps of DPM-Solver-3, and 1 step of DPM-Solver-2 and 1 step of DPM-Solver-1.
+ - If steps % 3 == 1, we use (K - 1) steps of DPM-Solver-3 and 1 step of DPM-Solver-1.
+ - If steps % 3 == 2, we use (K - 1) steps of DPM-Solver-3 and 1 step of DPM-Solver-2.
+ ============================================
+ Args:
+ order: A `int`. The max order for the solver (2 or 3).
+ steps: A `int`. The total number of function evaluations (NFE).
+ skip_type: A `str`. The type for the spacing of the time steps. We support three types:
+ - 'logSNR': uniform logSNR for the time steps.
+ - 'time_uniform': uniform time for the time steps. (**Recommended for high-resolutional data**.)
+ - 'time_quadratic': quadratic time for the time steps. (Used in DDIM for low-resolutional data.)
+ t_T: A `float`. The starting time of the sampling (default is T).
+ t_0: A `float`. The ending time of the sampling (default is epsilon).
+ device: A torch device.
+ Returns:
+ orders: A list of the solver order of each step.
+ """
+ if order == 3:
+ K = steps // 3 + 1
+ if steps % 3 == 0:
+ orders = [3, ] * (K - 2) + [2, 1]
+ elif steps % 3 == 1:
+ orders = [3, ] * (K - 1) + [1]
+ else:
+ orders = [3, ] * (K - 1) + [2]
+ elif order == 2:
+ if steps % 2 == 0:
+ K = steps // 2
+ orders = [2, ] * K
+ else:
+ K = steps // 2 + 1
+ orders = [2, ] * (K - 1) + [1]
+ elif order == 1:
+ K = 1
+ orders = [1, ] * steps
+ else:
+ raise ValueError("'order' must be '1' or '2' or '3'.")
+ if skip_type == 'logSNR':
+ # To reproduce the results in DPM-Solver paper
+ timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device)
+ else:
+ timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[
+ torch.cumsum(torch.tensor([0, ] + orders)).to(device)]
+ return timesteps_outer, orders
+
+ def denoise_to_zero_fn(self, x, s):
+ """
+ Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization.
+ """
+ return self.data_prediction_fn(x, s)
+
+ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=False):
+ """
+ DPM-Solver-1 (equivalent to DDIM) from time `s` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ s: A pytorch tensor. The starting time, with the shape (x.shape[0],).
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ model_s: A pytorch tensor. The model function evaluated at time `s`.
+ If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it.
+ return_intermediate: A `bool`. If true, also return the model value at time `s`.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ ns = self.noise_schedule
+ dims = x.dim()
+ lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t)
+ h = lambda_t - lambda_s
+ log_alpha_s, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(t)
+ sigma_s, sigma_t = ns.marginal_std(s), ns.marginal_std(t)
+ alpha_t = torch.exp(log_alpha_t)
+
+ if self.predict_x0:
+ phi_1 = torch.expm1(-h)
+ if model_s is None:
+ model_s = self.model_fn(x, s)
+ x_t = (
+ expand_dims(sigma_t / sigma_s, dims) * x
+ - expand_dims(alpha_t * phi_1, dims) * model_s
+ )
+ if return_intermediate:
+ return x_t, {'model_s': model_s}
+ else:
+ return x_t
+ else:
+ phi_1 = torch.expm1(h)
+ if model_s is None:
+ model_s = self.model_fn(x, s)
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x
+ - expand_dims(sigma_t * phi_1, dims) * model_s
+ )
+ if return_intermediate:
+ return x_t, {'model_s': model_s}
+ else:
+ return x_t
+
+ def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, return_intermediate=False,
+ solver_type='dpm_solver'):
+ """
+ Singlestep solver DPM-Solver-2 from time `s` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ s: A pytorch tensor. The starting time, with the shape (x.shape[0],).
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ r1: A `float`. The hyperparameter of the second-order solver.
+ model_s: A pytorch tensor. The model function evaluated at time `s`.
+ If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it.
+ return_intermediate: A `bool`. If true, also return the model value at time `s` and `s1` (the intermediate time).
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ if solver_type not in ['dpm_solver', 'taylor']:
+ raise ValueError("'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format(solver_type))
+ if r1 is None:
+ r1 = 0.5
+ ns = self.noise_schedule
+ dims = x.dim()
+ lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t)
+ h = lambda_t - lambda_s
+ lambda_s1 = lambda_s + r1 * h
+ s1 = ns.inverse_lambda(lambda_s1)
+ log_alpha_s, log_alpha_s1, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(
+ s1), ns.marginal_log_mean_coeff(t)
+ sigma_s, sigma_s1, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std(t)
+ alpha_s1, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_t)
+
+ if self.predict_x0:
+ phi_11 = torch.expm1(-r1 * h)
+ phi_1 = torch.expm1(-h)
+
+ if model_s is None:
+ model_s = self.model_fn(x, s)
+ x_s1 = (
+ expand_dims(sigma_s1 / sigma_s, dims) * x
+ - expand_dims(alpha_s1 * phi_11, dims) * model_s
+ )
+ model_s1 = self.model_fn(x_s1, s1)
+ if solver_type == 'dpm_solver':
+ x_t = (
+ expand_dims(sigma_t / sigma_s, dims) * x
+ - expand_dims(alpha_t * phi_1, dims) * model_s
+ - (0.5 / r1) * expand_dims(alpha_t * phi_1, dims) * (model_s1 - model_s)
+ )
+ elif solver_type == 'taylor':
+ x_t = (
+ expand_dims(sigma_t / sigma_s, dims) * x
+ - expand_dims(alpha_t * phi_1, dims) * model_s
+ + (1. / r1) * expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * (
+ model_s1 - model_s)
+ )
+ else:
+ phi_11 = torch.expm1(r1 * h)
+ phi_1 = torch.expm1(h)
+
+ if model_s is None:
+ model_s = self.model_fn(x, s)
+ x_s1 = (
+ expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x
+ - expand_dims(sigma_s1 * phi_11, dims) * model_s
+ )
+ model_s1 = self.model_fn(x_s1, s1)
+ if solver_type == 'dpm_solver':
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x
+ - expand_dims(sigma_t * phi_1, dims) * model_s
+ - (0.5 / r1) * expand_dims(sigma_t * phi_1, dims) * (model_s1 - model_s)
+ )
+ elif solver_type == 'taylor':
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x
+ - expand_dims(sigma_t * phi_1, dims) * model_s
+ - (1. / r1) * expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * (model_s1 - model_s)
+ )
+ if return_intermediate:
+ return x_t, {'model_s': model_s, 'model_s1': model_s1}
+ else:
+ return x_t
+
+ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., model_s=None, model_s1=None,
+ return_intermediate=False, solver_type='dpm_solver'):
+ """
+ Singlestep solver DPM-Solver-3 from time `s` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ s: A pytorch tensor. The starting time, with the shape (x.shape[0],).
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ r1: A `float`. The hyperparameter of the third-order solver.
+ r2: A `float`. The hyperparameter of the third-order solver.
+ model_s: A pytorch tensor. The model function evaluated at time `s`.
+ If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it.
+ model_s1: A pytorch tensor. The model function evaluated at time `s1` (the intermediate time given by `r1`).
+ If `model_s1` is None, we evaluate the model at `s1`; otherwise we directly use it.
+ return_intermediate: A `bool`. If true, also return the model value at time `s`, `s1` and `s2` (the intermediate times).
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ if solver_type not in ['dpm_solver', 'taylor']:
+ raise ValueError("'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format(solver_type))
+ if r1 is None:
+ r1 = 1. / 3.
+ if r2 is None:
+ r2 = 2. / 3.
+ ns = self.noise_schedule
+ dims = x.dim()
+ lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t)
+ h = lambda_t - lambda_s
+ lambda_s1 = lambda_s + r1 * h
+ lambda_s2 = lambda_s + r2 * h
+ s1 = ns.inverse_lambda(lambda_s1)
+ s2 = ns.inverse_lambda(lambda_s2)
+ log_alpha_s, log_alpha_s1, log_alpha_s2, log_alpha_t = ns.marginal_log_mean_coeff(
+ s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(s2), ns.marginal_log_mean_coeff(t)
+ sigma_s, sigma_s1, sigma_s2, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std(
+ s2), ns.marginal_std(t)
+ alpha_s1, alpha_s2, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_s2), torch.exp(log_alpha_t)
+
+ if self.predict_x0:
+ phi_11 = torch.expm1(-r1 * h)
+ phi_12 = torch.expm1(-r2 * h)
+ phi_1 = torch.expm1(-h)
+ phi_22 = torch.expm1(-r2 * h) / (r2 * h) + 1.
+ phi_2 = phi_1 / h + 1.
+ phi_3 = phi_2 / h - 0.5
+
+ if model_s is None:
+ model_s = self.model_fn(x, s)
+ if model_s1 is None:
+ x_s1 = (
+ expand_dims(sigma_s1 / sigma_s, dims) * x
+ - expand_dims(alpha_s1 * phi_11, dims) * model_s
+ )
+ model_s1 = self.model_fn(x_s1, s1)
+ x_s2 = (
+ expand_dims(sigma_s2 / sigma_s, dims) * x
+ - expand_dims(alpha_s2 * phi_12, dims) * model_s
+ + r2 / r1 * expand_dims(alpha_s2 * phi_22, dims) * (model_s1 - model_s)
+ )
+ model_s2 = self.model_fn(x_s2, s2)
+ if solver_type == 'dpm_solver':
+ x_t = (
+ expand_dims(sigma_t / sigma_s, dims) * x
+ - expand_dims(alpha_t * phi_1, dims) * model_s
+ + (1. / r2) * expand_dims(alpha_t * phi_2, dims) * (model_s2 - model_s)
+ )
+ elif solver_type == 'taylor':
+ D1_0 = (1. / r1) * (model_s1 - model_s)
+ D1_1 = (1. / r2) * (model_s2 - model_s)
+ D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1)
+ D2 = 2. * (D1_1 - D1_0) / (r2 - r1)
+ x_t = (
+ expand_dims(sigma_t / sigma_s, dims) * x
+ - expand_dims(alpha_t * phi_1, dims) * model_s
+ + expand_dims(alpha_t * phi_2, dims) * D1
+ - expand_dims(alpha_t * phi_3, dims) * D2
+ )
+ else:
+ phi_11 = torch.expm1(r1 * h)
+ phi_12 = torch.expm1(r2 * h)
+ phi_1 = torch.expm1(h)
+ phi_22 = torch.expm1(r2 * h) / (r2 * h) - 1.
+ phi_2 = phi_1 / h - 1.
+ phi_3 = phi_2 / h - 0.5
+
+ if model_s is None:
+ model_s = self.model_fn(x, s)
+ if model_s1 is None:
+ x_s1 = (
+ expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x
+ - expand_dims(sigma_s1 * phi_11, dims) * model_s
+ )
+ model_s1 = self.model_fn(x_s1, s1)
+ x_s2 = (
+ expand_dims(torch.exp(log_alpha_s2 - log_alpha_s), dims) * x
+ - expand_dims(sigma_s2 * phi_12, dims) * model_s
+ - r2 / r1 * expand_dims(sigma_s2 * phi_22, dims) * (model_s1 - model_s)
+ )
+ model_s2 = self.model_fn(x_s2, s2)
+ if solver_type == 'dpm_solver':
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x
+ - expand_dims(sigma_t * phi_1, dims) * model_s
+ - (1. / r2) * expand_dims(sigma_t * phi_2, dims) * (model_s2 - model_s)
+ )
+ elif solver_type == 'taylor':
+ D1_0 = (1. / r1) * (model_s1 - model_s)
+ D1_1 = (1. / r2) * (model_s2 - model_s)
+ D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1)
+ D2 = 2. * (D1_1 - D1_0) / (r2 - r1)
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x
+ - expand_dims(sigma_t * phi_1, dims) * model_s
+ - expand_dims(sigma_t * phi_2, dims) * D1
+ - expand_dims(sigma_t * phi_3, dims) * D2
+ )
+
+ if return_intermediate:
+ return x_t, {'model_s': model_s, 'model_s1': model_s1, 'model_s2': model_s2}
+ else:
+ return x_t
+
+ def multistep_dpm_solver_second_update(self, x, model_prev_list, t_prev_list, t, solver_type="dpm_solver"):
+ """
+ Multistep solver DPM-Solver-2 from time `t_prev_list[-1]` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ model_prev_list: A list of pytorch tensor. The previous computed model values.
+ t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],)
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ if solver_type not in ['dpm_solver', 'taylor']:
+ raise ValueError("'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format(solver_type))
+ ns = self.noise_schedule
+ dims = x.dim()
+ model_prev_1, model_prev_0 = model_prev_list
+ t_prev_1, t_prev_0 = t_prev_list
+ lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_1), ns.marginal_lambda(
+ t_prev_0), ns.marginal_lambda(t)
+ log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t)
+ sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t)
+ alpha_t = torch.exp(log_alpha_t)
+
+ h_0 = lambda_prev_0 - lambda_prev_1
+ h = lambda_t - lambda_prev_0
+ r0 = h_0 / h
+ D1_0 = expand_dims(1. / r0, dims) * (model_prev_0 - model_prev_1)
+ if self.predict_x0:
+ if solver_type == 'dpm_solver':
+ x_t = (
+ expand_dims(sigma_t / sigma_prev_0, dims) * x
+ - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0
+ - 0.5 * expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * D1_0
+ )
+ elif solver_type == 'taylor':
+ x_t = (
+ expand_dims(sigma_t / sigma_prev_0, dims) * x
+ - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0
+ + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1_0
+ )
+ else:
+ if solver_type == 'dpm_solver':
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x
+ - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0
+ - 0.5 * expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * D1_0
+ )
+ elif solver_type == 'taylor':
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x
+ - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0
+ - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1_0
+ )
+ return x_t
+
+ def multistep_dpm_solver_third_update(self, x, model_prev_list, t_prev_list, t, solver_type='dpm_solver'):
+ """
+ Multistep solver DPM-Solver-3 from time `t_prev_list[-1]` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ model_prev_list: A list of pytorch tensor. The previous computed model values.
+ t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],)
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ ns = self.noise_schedule
+ dims = x.dim()
+ model_prev_2, model_prev_1, model_prev_0 = model_prev_list
+ t_prev_2, t_prev_1, t_prev_0 = t_prev_list
+ lambda_prev_2, lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_2), ns.marginal_lambda(
+ t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t)
+ log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t)
+ sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t)
+ alpha_t = torch.exp(log_alpha_t)
+
+ h_1 = lambda_prev_1 - lambda_prev_2
+ h_0 = lambda_prev_0 - lambda_prev_1
+ h = lambda_t - lambda_prev_0
+ r0, r1 = h_0 / h, h_1 / h
+ D1_0 = expand_dims(1. / r0, dims) * (model_prev_0 - model_prev_1)
+ D1_1 = expand_dims(1. / r1, dims) * (model_prev_1 - model_prev_2)
+ D1 = D1_0 + expand_dims(r0 / (r0 + r1), dims) * (D1_0 - D1_1)
+ D2 = expand_dims(1. / (r0 + r1), dims) * (D1_0 - D1_1)
+ if self.predict_x0:
+ x_t = (
+ expand_dims(sigma_t / sigma_prev_0, dims) * x
+ - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0
+ + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1
+ - expand_dims(alpha_t * ((torch.exp(-h) - 1. + h) / h ** 2 - 0.5), dims) * D2
+ )
+ else:
+ x_t = (
+ expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x
+ - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0
+ - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1
+ - expand_dims(sigma_t * ((torch.exp(h) - 1. - h) / h ** 2 - 0.5), dims) * D2
+ )
+ return x_t
+
+ def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False, solver_type='dpm_solver', r1=None,
+ r2=None):
+ """
+ Singlestep DPM-Solver with the order `order` from time `s` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ s: A pytorch tensor. The starting time, with the shape (x.shape[0],).
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ order: A `int`. The order of DPM-Solver. We only support order == 1 or 2 or 3.
+ return_intermediate: A `bool`. If true, also return the model value at time `s`, `s1` and `s2` (the intermediate times).
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ r1: A `float`. The hyperparameter of the second-order or third-order solver.
+ r2: A `float`. The hyperparameter of the third-order solver.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ if order == 1:
+ return self.dpm_solver_first_update(x, s, t, return_intermediate=return_intermediate)
+ elif order == 2:
+ return self.singlestep_dpm_solver_second_update(x, s, t, return_intermediate=return_intermediate,
+ solver_type=solver_type, r1=r1)
+ elif order == 3:
+ return self.singlestep_dpm_solver_third_update(x, s, t, return_intermediate=return_intermediate,
+ solver_type=solver_type, r1=r1, r2=r2)
+ else:
+ raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order))
+
+ def multistep_dpm_solver_update(self, x, model_prev_list, t_prev_list, t, order, solver_type='dpm_solver'):
+ """
+ Multistep DPM-Solver with the order `order` from time `t_prev_list[-1]` to time `t`.
+ Args:
+ x: A pytorch tensor. The initial value at time `s`.
+ model_prev_list: A list of pytorch tensor. The previous computed model values.
+ t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],)
+ t: A pytorch tensor. The ending time, with the shape (x.shape[0],).
+ order: A `int`. The order of DPM-Solver. We only support order == 1 or 2 or 3.
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ Returns:
+ x_t: A pytorch tensor. The approximated solution at time `t`.
+ """
+ if order == 1:
+ return self.dpm_solver_first_update(x, t_prev_list[-1], t, model_s=model_prev_list[-1])
+ elif order == 2:
+ return self.multistep_dpm_solver_second_update(x, model_prev_list, t_prev_list, t, solver_type=solver_type)
+ elif order == 3:
+ return self.multistep_dpm_solver_third_update(x, model_prev_list, t_prev_list, t, solver_type=solver_type)
+ else:
+ raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order))
+
+ def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol=0.05, theta=0.9, t_err=1e-5,
+ solver_type='dpm_solver'):
+ """
+ The adaptive step size solver based on singlestep DPM-Solver.
+ Args:
+ x: A pytorch tensor. The initial value at time `t_T`.
+ order: A `int`. The (higher) order of the solver. We only support order == 2 or 3.
+ t_T: A `float`. The starting time of the sampling (default is T).
+ t_0: A `float`. The ending time of the sampling (default is epsilon).
+ h_init: A `float`. The initial step size (for logSNR).
+ atol: A `float`. The absolute tolerance of the solver. For image data, the default setting is 0.0078, followed [1].
+ rtol: A `float`. The relative tolerance of the solver. The default setting is 0.05.
+ theta: A `float`. The safety hyperparameter for adapting the step size. The default setting is 0.9, followed [1].
+ t_err: A `float`. The tolerance for the time. We solve the diffusion ODE until the absolute error between the
+ current time and `t_0` is less than `t_err`. The default setting is 1e-5.
+ solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers.
+ The type slightly impacts the performance. We recommend to use 'dpm_solver' type.
+ Returns:
+ x_0: A pytorch tensor. The approximated solution at time `t_0`.
+ [1] A. Jolicoeur-Martineau, K. Li, R. Piché-Taillefer, T. Kachman, and I. Mitliagkas, "Gotta go fast when generating data with score-based models," arXiv preprint arXiv:2105.14080, 2021.
+ """
+ ns = self.noise_schedule
+ s = t_T * torch.ones((x.shape[0],)).to(x)
+ lambda_s = ns.marginal_lambda(s)
+ lambda_0 = ns.marginal_lambda(t_0 * torch.ones_like(s).to(x))
+ h = h_init * torch.ones_like(s).to(x)
+ x_prev = x
+ nfe = 0
+ if order == 2:
+ r1 = 0.5
+ lower_update = lambda x, s, t: self.dpm_solver_first_update(x, s, t, return_intermediate=True)
+ higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1,
+ solver_type=solver_type,
+ **kwargs)
+ elif order == 3:
+ r1, r2 = 1. / 3., 2. / 3.
+ lower_update = lambda x, s, t: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1,
+ return_intermediate=True,
+ solver_type=solver_type)
+ higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_third_update(x, s, t, r1=r1, r2=r2,
+ solver_type=solver_type,
+ **kwargs)
+ else:
+ raise ValueError("For adaptive step size solver, order must be 2 or 3, got {}".format(order))
+ while torch.abs((s - t_0)).mean() > t_err:
+ t = ns.inverse_lambda(lambda_s + h)
+ x_lower, lower_noise_kwargs = lower_update(x, s, t)
+ x_higher = higher_update(x, s, t, **lower_noise_kwargs)
+ delta = torch.max(torch.ones_like(x).to(x) * atol, rtol * torch.max(torch.abs(x_lower), torch.abs(x_prev)))
+ norm_fn = lambda v: torch.sqrt(torch.square(v.reshape((v.shape[0], -1))).mean(dim=-1, keepdim=True))
+ E = norm_fn((x_higher - x_lower) / delta).max()
+ if torch.all(E <= 1.):
+ x = x_higher
+ s = t
+ x_prev = x_lower
+ lambda_s = ns.marginal_lambda(s)
+ h = torch.min(theta * h * torch.float_power(E, -1. / order).float(), lambda_0 - lambda_s)
+ nfe += order
+ print('adaptive solver nfe', nfe)
+ return x
+
+ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time_uniform',
+ method='singlestep', lower_order_final=True, denoise_to_zero=False, solver_type='dpm_solver',
+ atol=0.0078, rtol=0.05,
+ ):
+ """
+ Compute the sample at time `t_end` by DPM-Solver, given the initial `x` at time `t_start`.
+ =====================================================
+ We support the following algorithms for both noise prediction model and data prediction model:
+ - 'singlestep':
+ Singlestep DPM-Solver (i.e. "DPM-Solver-fast" in the paper), which combines different orders of singlestep DPM-Solver.
+ We combine all the singlestep solvers with order <= `order` to use up all the function evaluations (steps).
+ The total number of function evaluations (NFE) == `steps`.
+ Given a fixed NFE == `steps`, the sampling procedure is:
+ - If `order` == 1:
+ - Denote K = steps. We use K steps of DPM-Solver-1 (i.e. DDIM).
+ - If `order` == 2:
+ - Denote K = (steps // 2) + (steps % 2). We take K intermediate time steps for sampling.
+ - If steps % 2 == 0, we use K steps of singlestep DPM-Solver-2.
+ - If steps % 2 == 1, we use (K - 1) steps of singlestep DPM-Solver-2 and 1 step of DPM-Solver-1.
+ - If `order` == 3:
+ - Denote K = (steps // 3 + 1). We take K intermediate time steps for sampling.
+ - If steps % 3 == 0, we use (K - 2) steps of singlestep DPM-Solver-3, and 1 step of singlestep DPM-Solver-2 and 1 step of DPM-Solver-1.
+ - If steps % 3 == 1, we use (K - 1) steps of singlestep DPM-Solver-3 and 1 step of DPM-Solver-1.
+ - If steps % 3 == 2, we use (K - 1) steps of singlestep DPM-Solver-3 and 1 step of singlestep DPM-Solver-2.
+ - 'multistep':
+ Multistep DPM-Solver with the order of `order`. The total number of function evaluations (NFE) == `steps`.
+ We initialize the first `order` values by lower order multistep solvers.
+ Given a fixed NFE == `steps`, the sampling procedure is:
+ Denote K = steps.
+ - If `order` == 1:
+ - We use K steps of DPM-Solver-1 (i.e. DDIM).
+ - If `order` == 2:
+ - We firstly use 1 step of DPM-Solver-1, then use (K - 1) step of multistep DPM-Solver-2.
+ - If `order` == 3:
+ - We firstly use 1 step of DPM-Solver-1, then 1 step of multistep DPM-Solver-2, then (K - 2) step of multistep DPM-Solver-3.
+ - 'singlestep_fixed':
+ Fixed order singlestep DPM-Solver (i.e. DPM-Solver-1 or singlestep DPM-Solver-2 or singlestep DPM-Solver-3).
+ We use singlestep DPM-Solver-`order` for `order`=1 or 2 or 3, with total [`steps` // `order`] * `order` NFE.
+ - 'adaptive':
+ Adaptive step size DPM-Solver (i.e. "DPM-Solver-12" and "DPM-Solver-23" in the paper).
+ We ignore `steps` and use adaptive step size DPM-Solver with a higher order of `order`.
+ You can adjust the absolute tolerance `atol` and the relative tolerance `rtol` to balance the computatation costs
+ (NFE) and the sample quality.
+ - If `order` == 2, we use DPM-Solver-12 which combines DPM-Solver-1 and singlestep DPM-Solver-2.
+ - If `order` == 3, we use DPM-Solver-23 which combines singlestep DPM-Solver-2 and singlestep DPM-Solver-3.
+ =====================================================
+ Some advices for choosing the algorithm:
+ - For **unconditional sampling** or **guided sampling with small guidance scale** by DPMs:
+ Use singlestep DPM-Solver ("DPM-Solver-fast" in the paper) with `order = 3`.
+ e.g.
+ >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, predict_x0=False)
+ >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=3,
+ skip_type='time_uniform', method='singlestep')
+ - For **guided sampling with large guidance scale** by DPMs:
+ Use multistep DPM-Solver with `predict_x0 = True` and `order = 2`.
+ e.g.
+ >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, predict_x0=True)
+ >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=2,
+ skip_type='time_uniform', method='multistep')
+ We support three types of `skip_type`:
+ - 'logSNR': uniform logSNR for the time steps. **Recommended for low-resolutional images**
+ - 'time_uniform': uniform time for the time steps. **Recommended for high-resolutional images**.
+ - 'time_quadratic': quadratic time for the time steps.
+ =====================================================
+ Args:
+ x: A pytorch tensor. The initial value at time `t_start`
+ e.g. if `t_start` == T, then `x` is a sample from the standard normal distribution.
+ steps: A `int`. The total number of function evaluations (NFE).
+ t_start: A `float`. The starting time of the sampling.
+ If `T` is None, we use self.noise_schedule.T (default is 1.0).
+ t_end: A `float`. The ending time of the sampling.
+ If `t_end` is None, we use 1. / self.noise_schedule.total_N.
+ e.g. if total_N == 1000, we have `t_end` == 1e-3.
+ For discrete-time DPMs:
+ - We recommend `t_end` == 1. / self.noise_schedule.total_N.
+ For continuous-time DPMs:
+ - We recommend `t_end` == 1e-3 when `steps` <= 15; and `t_end` == 1e-4 when `steps` > 15.
+ order: A `int`. The order of DPM-Solver.
+ skip_type: A `str`. The type for the spacing of the time steps. 'time_uniform' or 'logSNR' or 'time_quadratic'.
+ method: A `str`. The method for sampling. 'singlestep' or 'multistep' or 'singlestep_fixed' or 'adaptive'.
+ denoise_to_zero: A `bool`. Whether to denoise to time 0 at the final step.
+ Default is `False`. If `denoise_to_zero` is `True`, the total NFE is (`steps` + 1).
+ This trick is firstly proposed by DDPM (https://arxiv.org/abs/2006.11239) and
+ score_sde (https://arxiv.org/abs/2011.13456). Such trick can improve the FID
+ for diffusion models sampling by diffusion SDEs for low-resolutional images
+ (such as CIFAR-10). However, we observed that such trick does not matter for
+ high-resolutional images. As it needs an additional NFE, we do not recommend
+ it for high-resolutional images.
+ lower_order_final: A `bool`. Whether to use lower order solvers at the final steps.
+ Only valid for `method=multistep` and `steps < 15`. We empirically find that
+ this trick is a key to stabilizing the sampling by DPM-Solver with very few steps
+ (especially for steps <= 10). So we recommend to set it to be `True`.
+ solver_type: A `str`. The taylor expansion type for the solver. `dpm_solver` or `taylor`. We recommend `dpm_solver`.
+ atol: A `float`. The absolute tolerance of the adaptive step size solver. Valid when `method` == 'adaptive'.
+ rtol: A `float`. The relative tolerance of the adaptive step size solver. Valid when `method` == 'adaptive'.
+ Returns:
+ x_end: A pytorch tensor. The approximated solution at time `t_end`.
+ """
+ t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end
+ t_T = self.noise_schedule.T if t_start is None else t_start
+ device = x.device
+ if method == 'adaptive':
+ with torch.no_grad():
+ x = self.dpm_solver_adaptive(x, order=order, t_T=t_T, t_0=t_0, atol=atol, rtol=rtol,
+ solver_type=solver_type)
+ elif method == 'multistep':
+ assert steps >= order
+ timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device)
+ assert timesteps.shape[0] - 1 == steps
+ with torch.no_grad():
+ vec_t = timesteps[0].expand((x.shape[0]))
+ model_prev_list = [self.model_fn(x, vec_t)]
+ t_prev_list = [vec_t]
+ # Init the first `order` values by lower order multistep DPM-Solver.
+ for init_order in tqdm(range(1, order), desc="DPM init order"):
+ vec_t = timesteps[init_order].expand(x.shape[0])
+ x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, init_order,
+ solver_type=solver_type)
+ model_prev_list.append(self.model_fn(x, vec_t))
+ t_prev_list.append(vec_t)
+ # Compute the remaining values by `order`-th order multistep DPM-Solver.
+ for step in tqdm(range(order, steps + 1), desc="DPM multistep"):
+ vec_t = timesteps[step].expand(x.shape[0])
+ if lower_order_final and steps < 15:
+ step_order = min(order, steps + 1 - step)
+ else:
+ step_order = order
+ x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, step_order,
+ solver_type=solver_type)
+ for i in range(order - 1):
+ t_prev_list[i] = t_prev_list[i + 1]
+ model_prev_list[i] = model_prev_list[i + 1]
+ t_prev_list[-1] = vec_t
+ # We do not need to evaluate the final model value.
+ if step < steps:
+ model_prev_list[-1] = self.model_fn(x, vec_t)
+ elif method in ['singlestep', 'singlestep_fixed']:
+ if method == 'singlestep':
+ timesteps_outer, orders = self.get_orders_and_timesteps_for_singlestep_solver(steps=steps, order=order,
+ skip_type=skip_type,
+ t_T=t_T, t_0=t_0,
+ device=device)
+ elif method == 'singlestep_fixed':
+ K = steps // order
+ orders = [order, ] * K
+ timesteps_outer = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=K, device=device)
+ for i, order in enumerate(orders):
+ t_T_inner, t_0_inner = timesteps_outer[i], timesteps_outer[i + 1]
+ timesteps_inner = self.get_time_steps(skip_type=skip_type, t_T=t_T_inner.item(), t_0=t_0_inner.item(),
+ N=order, device=device)
+ lambda_inner = self.noise_schedule.marginal_lambda(timesteps_inner)
+ vec_s, vec_t = t_T_inner.tile(x.shape[0]), t_0_inner.tile(x.shape[0])
+ h = lambda_inner[-1] - lambda_inner[0]
+ r1 = None if order <= 1 else (lambda_inner[1] - lambda_inner[0]) / h
+ r2 = None if order <= 2 else (lambda_inner[2] - lambda_inner[0]) / h
+ x = self.singlestep_dpm_solver_update(x, vec_s, vec_t, order, solver_type=solver_type, r1=r1, r2=r2)
+ if denoise_to_zero:
+ x = self.denoise_to_zero_fn(x, torch.ones((x.shape[0],)).to(device) * t_0)
+ return x
+
+
+#############################################################
+# other utility functions
+#############################################################
+
+def interpolate_fn(x, xp, yp):
+ """
+ A piecewise linear function y = f(x), using xp and yp as keypoints.
+ We implement f(x) in a differentiable way (i.e. applicable for autograd).
+ The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.)
+ Args:
+ x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver).
+ xp: PyTorch tensor with shape [C, K], where K is the number of keypoints.
+ yp: PyTorch tensor with shape [C, K].
+ Returns:
+ The function values f(x), with shape [N, C].
+ """
+ N, K = x.shape[0], xp.shape[1]
+ all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2)
+ sorted_all_x, x_indices = torch.sort(all_x, dim=2)
+ x_idx = torch.argmin(x_indices, dim=2)
+ cand_start_idx = x_idx - 1
+ start_idx = torch.where(
+ torch.eq(x_idx, 0),
+ torch.tensor(1, device=x.device),
+ torch.where(
+ torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx,
+ ),
+ )
+ end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1)
+ start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2)
+ end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2)
+ start_idx2 = torch.where(
+ torch.eq(x_idx, 0),
+ torch.tensor(0, device=x.device),
+ torch.where(
+ torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx,
+ ),
+ )
+ y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1)
+ start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2)
+ end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2)
+ cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x)
+ return cand
+
+
+def expand_dims(v, dims):
+ """
+ Expand the tensor `v` to the dim `dims`.
+ Args:
+ `v`: a PyTorch tensor with shape [N].
+ `dim`: a `int`.
+ Returns:
+ a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`.
+ """
+ return v[(...,) + (None,) * (dims - 1)]
\ No newline at end of file
diff --git a/ldm/models/diffusion/dpm_solver/sampler.py b/ldm/models/diffusion/dpm_solver/sampler.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d137b8cf36718c1c58faa09f9dd919e5fb2977b
--- /dev/null
+++ b/ldm/models/diffusion/dpm_solver/sampler.py
@@ -0,0 +1,87 @@
+"""SAMPLING ONLY."""
+import torch
+
+from .dpm_solver import NoiseScheduleVP, model_wrapper, DPM_Solver
+
+
+MODEL_TYPES = {
+ "eps": "noise",
+ "v": "v"
+}
+
+
+class DPMSolverSampler(object):
+ def __init__(self, model, **kwargs):
+ super().__init__()
+ self.model = model
+ to_torch = lambda x: x.clone().detach().to(torch.float32).to(model.device)
+ self.register_buffer('alphas_cumprod', to_torch(model.alphas_cumprod))
+
+ def register_buffer(self, name, attr):
+ if type(attr) == torch.Tensor:
+ if attr.device != torch.device("cuda"):
+ attr = attr.to(torch.device("cuda"))
+ setattr(self, name, attr)
+
+ @torch.no_grad()
+ def sample(self,
+ S,
+ batch_size,
+ shape,
+ conditioning=None,
+ callback=None,
+ normals_sequence=None,
+ img_callback=None,
+ quantize_x0=False,
+ eta=0.,
+ mask=None,
+ x0=None,
+ temperature=1.,
+ noise_dropout=0.,
+ score_corrector=None,
+ corrector_kwargs=None,
+ verbose=True,
+ x_T=None,
+ log_every_t=100,
+ unconditional_guidance_scale=1.,
+ unconditional_conditioning=None,
+ # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...
+ **kwargs
+ ):
+ if conditioning is not None:
+ if isinstance(conditioning, dict):
+ cbs = conditioning[list(conditioning.keys())[0]].shape[0]
+ if cbs != batch_size:
+ print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
+ else:
+ if conditioning.shape[0] != batch_size:
+ print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}")
+
+ # sampling
+ C, H, W = shape
+ size = (batch_size, C, H, W)
+
+ print(f'Data shape for DPM-Solver sampling is {size}, sampling steps {S}')
+
+ device = self.model.betas.device
+ if x_T is None:
+ img = torch.randn(size, device=device)
+ else:
+ img = x_T
+
+ ns = NoiseScheduleVP('discrete', alphas_cumprod=self.alphas_cumprod)
+
+ model_fn = model_wrapper(
+ lambda x, t, c: self.model.apply_model(x, t, c),
+ ns,
+ model_type=MODEL_TYPES[self.model.parameterization],
+ guidance_type="classifier-free",
+ condition=conditioning,
+ unconditional_condition=unconditional_conditioning,
+ guidance_scale=unconditional_guidance_scale,
+ )
+
+ dpm_solver = DPM_Solver(model_fn, ns, predict_x0=True, thresholding=False)
+ x = dpm_solver.sample(img, steps=S, skip_type="time_uniform", method="multistep", order=2, lower_order_final=True)
+
+ return x.to(device), None
\ No newline at end of file
diff --git a/ldm/models/diffusion/plms.py b/ldm/models/diffusion/plms.py
new file mode 100644
index 0000000000000000000000000000000000000000..7002a365d27168ced0a04e9a4d83e088f8284eae
--- /dev/null
+++ b/ldm/models/diffusion/plms.py
@@ -0,0 +1,244 @@
+"""SAMPLING ONLY."""
+
+import torch
+import numpy as np
+from tqdm import tqdm
+from functools import partial
+
+from ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like
+from ldm.models.diffusion.sampling_util import norm_thresholding
+
+
+class PLMSSampler(object):
+ def __init__(self, model, schedule="linear", **kwargs):
+ super().__init__()
+ self.model = model
+ self.ddpm_num_timesteps = model.num_timesteps
+ self.schedule = schedule
+
+ def register_buffer(self, name, attr):
+ if type(attr) == torch.Tensor:
+ if attr.device != torch.device("cuda"):
+ attr = attr.to(torch.device("cuda"))
+ setattr(self, name, attr)
+
+ def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True):
+ if ddim_eta != 0:
+ raise ValueError('ddim_eta must be 0 for PLMS')
+ self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,
+ num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)
+ alphas_cumprod = self.model.alphas_cumprod
+ assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'
+ to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)
+
+ self.register_buffer('betas', to_torch(self.model.betas))
+ self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
+ self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())))
+ self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())))
+ self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)))
+
+ # ddim sampling parameters
+ ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),
+ ddim_timesteps=self.ddim_timesteps,
+ eta=ddim_eta,verbose=verbose)
+ self.register_buffer('ddim_sigmas', ddim_sigmas)
+ self.register_buffer('ddim_alphas', ddim_alphas)
+ self.register_buffer('ddim_alphas_prev', ddim_alphas_prev)
+ self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas))
+ sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(
+ (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (
+ 1 - self.alphas_cumprod / self.alphas_cumprod_prev))
+ self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps)
+
+ @torch.no_grad()
+ def sample(self,
+ S,
+ batch_size,
+ shape,
+ conditioning=None,
+ callback=None,
+ normals_sequence=None,
+ img_callback=None,
+ quantize_x0=False,
+ eta=0.,
+ mask=None,
+ x0=None,
+ temperature=1.,
+ noise_dropout=0.,
+ score_corrector=None,
+ corrector_kwargs=None,
+ verbose=True,
+ x_T=None,
+ log_every_t=100,
+ unconditional_guidance_scale=1.,
+ unconditional_conditioning=None,
+ # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...
+ dynamic_threshold=None,
+ **kwargs
+ ):
+ if conditioning is not None:
+ if isinstance(conditioning, dict):
+ cbs = conditioning[list(conditioning.keys())[0]].shape[0]
+ if cbs != batch_size:
+ print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
+ else:
+ if conditioning.shape[0] != batch_size:
+ print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}")
+
+ self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)
+ # sampling
+ C, H, W = shape
+ size = (batch_size, C, H, W)
+ print(f'Data shape for PLMS sampling is {size}')
+
+ samples, intermediates = self.plms_sampling(conditioning, size,
+ callback=callback,
+ img_callback=img_callback,
+ quantize_denoised=quantize_x0,
+ mask=mask, x0=x0,
+ ddim_use_original_steps=False,
+ noise_dropout=noise_dropout,
+ temperature=temperature,
+ score_corrector=score_corrector,
+ corrector_kwargs=corrector_kwargs,
+ x_T=x_T,
+ log_every_t=log_every_t,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ dynamic_threshold=dynamic_threshold,
+ )
+ return samples, intermediates
+
+ @torch.no_grad()
+ def plms_sampling(self, cond, shape,
+ x_T=None, ddim_use_original_steps=False,
+ callback=None, timesteps=None, quantize_denoised=False,
+ mask=None, x0=None, img_callback=None, log_every_t=100,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
+ unconditional_guidance_scale=1., unconditional_conditioning=None,
+ dynamic_threshold=None):
+ device = self.model.betas.device
+ b = shape[0]
+ if x_T is None:
+ img = torch.randn(shape, device=device)
+ else:
+ img = x_T
+
+ if timesteps is None:
+ timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps
+ elif timesteps is not None and not ddim_use_original_steps:
+ subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1
+ timesteps = self.ddim_timesteps[:subset_end]
+
+ intermediates = {'x_inter': [img], 'pred_x0': [img]}
+ time_range = list(reversed(range(0,timesteps))) if ddim_use_original_steps else np.flip(timesteps)
+ total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]
+ print(f"Running PLMS Sampling with {total_steps} timesteps")
+
+ iterator = tqdm(time_range, desc='PLMS Sampler', total=total_steps)
+ old_eps = []
+
+ for i, step in enumerate(iterator):
+ index = total_steps - i - 1
+ ts = torch.full((b,), step, device=device, dtype=torch.long)
+ ts_next = torch.full((b,), time_range[min(i + 1, len(time_range) - 1)], device=device, dtype=torch.long)
+
+ if mask is not None:
+ assert x0 is not None
+ img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass?
+ img = img_orig * mask + (1. - mask) * img
+
+ outs = self.p_sample_plms(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,
+ quantize_denoised=quantize_denoised, temperature=temperature,
+ noise_dropout=noise_dropout, score_corrector=score_corrector,
+ corrector_kwargs=corrector_kwargs,
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=unconditional_conditioning,
+ old_eps=old_eps, t_next=ts_next,
+ dynamic_threshold=dynamic_threshold)
+ img, pred_x0, e_t = outs
+ old_eps.append(e_t)
+ if len(old_eps) >= 4:
+ old_eps.pop(0)
+ if callback: callback(i)
+ if img_callback: img_callback(pred_x0, i)
+
+ if index % log_every_t == 0 or index == total_steps - 1:
+ intermediates['x_inter'].append(img)
+ intermediates['pred_x0'].append(pred_x0)
+
+ return img, intermediates
+
+ @torch.no_grad()
+ def p_sample_plms(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,
+ temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
+ unconditional_guidance_scale=1., unconditional_conditioning=None, old_eps=None, t_next=None,
+ dynamic_threshold=None):
+ b, *_, device = *x.shape, x.device
+
+ def get_model_output(x, t):
+ if unconditional_conditioning is None or unconditional_guidance_scale == 1.:
+ e_t = self.model.apply_model(x, t, c)
+ else:
+ x_in = torch.cat([x] * 2)
+ t_in = torch.cat([t] * 2)
+ c_in = torch.cat([unconditional_conditioning, c])
+ e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2)
+ e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond)
+
+ if score_corrector is not None:
+ assert self.model.parameterization == "eps"
+ e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)
+
+ return e_t
+
+ alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas
+ alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev
+ sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas
+ sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas
+
+ def get_x_prev_and_pred_x0(e_t, index):
+ # select parameters corresponding to the currently considered timestep
+ a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)
+ a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)
+ sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)
+ sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)
+
+ # current prediction for x_0
+ pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()
+ if quantize_denoised:
+ pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)
+ if dynamic_threshold is not None:
+ pred_x0 = norm_thresholding(pred_x0, dynamic_threshold)
+ # direction pointing to x_t
+ dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t
+ noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature
+ if noise_dropout > 0.:
+ noise = torch.nn.functional.dropout(noise, p=noise_dropout)
+ x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise
+ return x_prev, pred_x0
+
+ e_t = get_model_output(x, t)
+ if len(old_eps) == 0:
+ # Pseudo Improved Euler (2nd order)
+ x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t, index)
+ e_t_next = get_model_output(x_prev, t_next)
+ e_t_prime = (e_t + e_t_next) / 2
+ elif len(old_eps) == 1:
+ # 2nd order Pseudo Linear Multistep (Adams-Bashforth)
+ e_t_prime = (3 * e_t - old_eps[-1]) / 2
+ elif len(old_eps) == 2:
+ # 3nd order Pseudo Linear Multistep (Adams-Bashforth)
+ e_t_prime = (23 * e_t - 16 * old_eps[-1] + 5 * old_eps[-2]) / 12
+ elif len(old_eps) >= 3:
+ # 4nd order Pseudo Linear Multistep (Adams-Bashforth)
+ e_t_prime = (55 * e_t - 59 * old_eps[-1] + 37 * old_eps[-2] - 9 * old_eps[-3]) / 24
+
+ x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t_prime, index)
+
+ return x_prev, pred_x0, e_t
diff --git a/ldm/models/diffusion/sampling_util.py b/ldm/models/diffusion/sampling_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..7eff02be6d7c54d43ee6680636ac0698dd3b3f33
--- /dev/null
+++ b/ldm/models/diffusion/sampling_util.py
@@ -0,0 +1,22 @@
+import torch
+import numpy as np
+
+
+def append_dims(x, target_dims):
+ """Appends dimensions to the end of a tensor until it has target_dims dimensions.
+ From https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/utils.py"""
+ dims_to_append = target_dims - x.ndim
+ if dims_to_append < 0:
+ raise ValueError(f'input has {x.ndim} dims but target_dims is {target_dims}, which is less')
+ return x[(...,) + (None,) * dims_to_append]
+
+
+def norm_thresholding(x0, value):
+ s = append_dims(x0.pow(2).flatten(1).mean(1).sqrt().clamp(min=value), x0.ndim)
+ return x0 * (value / s)
+
+
+def spatial_norm_thresholding(x0, value):
+ # b c h w
+ s = x0.pow(2).mean(1, keepdim=True).sqrt().clamp(min=value)
+ return x0 * (value / s)
\ No newline at end of file
diff --git a/ldm/modules/__pycache__/attention.cpython-310.pyc b/ldm/modules/__pycache__/attention.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d8f6885068c56d4286242d5c363034050a376894
Binary files /dev/null and b/ldm/modules/__pycache__/attention.cpython-310.pyc differ
diff --git a/ldm/modules/__pycache__/attention.cpython-39.pyc b/ldm/modules/__pycache__/attention.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d8fbfe065849056add86baf3d798e73713ba57e
Binary files /dev/null and b/ldm/modules/__pycache__/attention.cpython-39.pyc differ
diff --git a/ldm/modules/__pycache__/x_transformer.cpython-39.pyc b/ldm/modules/__pycache__/x_transformer.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7661ad21e0239d4942c10b2d2d30d9fba27a49f1
Binary files /dev/null and b/ldm/modules/__pycache__/x_transformer.cpython-39.pyc differ
diff --git a/ldm/modules/attention.py b/ldm/modules/attention.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5f67923e2b008d05dd69eab4cea8f6d5ce85451
--- /dev/null
+++ b/ldm/modules/attention.py
@@ -0,0 +1,778 @@
+from typing import List, Optional, Tuple
+
+from inspect import isfunction
+import math
+import torch
+import torch.nn.functional as F
+from torch import nn, einsum
+from einops import rearrange, repeat
+from packaging import version
+from pdb import set_trace as st
+
+from ldm.modules.diffusionmodules.util import checkpoint
+
+# from torch.nn import LayerNorm
+try:
+ from apex.normalization import FusedRMSNorm as RMSNorm
+except:
+ from dit.norm import RMSNorm
+
+
+# CrossAttn precision handling
+import os
+_ATTN_PRECISION = os.environ.get("ATTN_PRECISION", "fp32")
+from xformers.ops import MemoryEfficientAttentionFlashAttentionOp, MemoryEfficientAttentionCutlassOp
+# from xformers.ops import RMSNorm, fmha, rope_padded
+# import apex
+# from apex.normalization import FusedRMSNorm as RMSNorm
+
+if version.parse(torch.__version__) >= version.parse("2.0.0"):
+ SDP_IS_AVAILABLE = True
+ # from torch.backends.cuda import SDPBackend, sdp_kernel
+ from torch.nn.attention import sdpa_kernel, SDPBackend
+
+ BACKEND_MAP = {
+ SDPBackend.MATH: {
+ "enable_math": True,
+ "enable_flash": False,
+ "enable_mem_efficient": False,
+ },
+ SDPBackend.FLASH_ATTENTION: {
+ "enable_math": False,
+ "enable_flash": True,
+ "enable_mem_efficient": False,
+ },
+ SDPBackend.EFFICIENT_ATTENTION: {
+ "enable_math": False,
+ "enable_flash": False,
+ "enable_mem_efficient": True,
+ },
+ None: {"enable_math": True, "enable_flash": True, "enable_mem_efficient": True},
+ }
+else:
+ from contextlib import nullcontext
+
+ SDP_IS_AVAILABLE = False
+ sdpa_kernel = nullcontext
+ BACKEND_MAP = {}
+ logpy.warn(
+ f"No SDP backend available, likely because you are running in pytorch "
+ f"versions < 2.0. In fact, you are using PyTorch {torch.__version__}. "
+ f"You might want to consider upgrading."
+ )
+
+
+def exists(val):
+ return val is not None
+
+
+def uniq(arr):
+ return{el: True for el in arr}.keys()
+
+
+def default(val, d):
+ if exists(val):
+ return val
+ return d() if isfunction(d) else d
+
+
+def max_neg_value(t):
+ return -torch.finfo(t.dtype).max
+
+
+def init_(tensor):
+ dim = tensor.shape[-1]
+ std = 1 / math.sqrt(dim)
+ tensor.uniform_(-std, std)
+ return tensor
+
+
+# feedforward
+class GEGLU(nn.Module):
+ def __init__(self, dim_in, dim_out):
+ super().__init__()
+ self.proj = nn.Linear(dim_in, dim_out * 2)
+
+ def forward(self, x):
+ x, gate = self.proj(x).chunk(2, dim=-1)
+ return x * F.gelu(gate)
+
+
+class FeedForward(nn.Module):
+ def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):
+ super().__init__()
+ inner_dim = int(dim * mult)
+ dim_out = default(dim_out, dim)
+ project_in = nn.Sequential(
+ nn.Linear(dim, inner_dim),
+ nn.GELU()
+ ) if not glu else GEGLU(dim, inner_dim)
+
+ self.net = nn.Sequential(
+ project_in,
+ nn.Dropout(dropout),
+ nn.Linear(inner_dim, dim_out)
+ )
+
+ def forward(self, x):
+ return self.net(x)
+
+
+def zero_module(module):
+ """
+ Zero out the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().zero_()
+ return module
+
+
+def Normalize(in_channels):
+ return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True)
+
+
+class LinearAttention(nn.Module):
+ def __init__(self, dim, heads=4, dim_head=32):
+ super().__init__()
+ self.heads = heads
+ hidden_dim = dim_head * heads
+ self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias = False)
+ self.to_out = nn.Conv2d(hidden_dim, dim, 1)
+
+ def forward(self, x):
+ b, c, h, w = x.shape
+ qkv = self.to_qkv(x)
+ q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads = self.heads, qkv=3)
+ k = k.softmax(dim=-1)
+ context = torch.einsum('bhdn,bhen->bhde', k, v)
+ out = torch.einsum('bhde,bhdn->bhen', context, q)
+ out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w)
+ return self.to_out(out)
+
+
+class SpatialSelfAttention(nn.Module):
+ def __init__(self, in_channels):
+ super().__init__()
+ self.in_channels = in_channels
+
+ self.norm = Normalize(in_channels)
+ self.q = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.k = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.v = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.proj_out = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+
+ def forward(self, x):
+ h_ = x
+ h_ = self.norm(h_)
+ q = self.q(h_)
+ k = self.k(h_)
+ v = self.v(h_)
+
+ # compute attention
+ b,c,h,w = q.shape
+ q = rearrange(q, 'b c h w -> b (h w) c')
+ k = rearrange(k, 'b c h w -> b c (h w)')
+ w_ = torch.einsum('bij,bjk->bik', q, k)
+
+ w_ = w_ * (int(c)**(-0.5))
+ w_ = torch.nn.functional.softmax(w_, dim=2)
+
+ # attend to values
+ v = rearrange(v, 'b c h w -> b c (h w)')
+ w_ = rearrange(w_, 'b i j -> b j i')
+ h_ = torch.einsum('bij,bjk->bik', v, w_)
+ h_ = rearrange(h_, 'b c (h w) -> b c h w', h=h)
+ h_ = self.proj_out(h_)
+
+ return x+h_
+
+class CrossAttention(nn.Module):
+ def __init__(
+ self,
+ query_dim,
+ context_dim=None,
+ heads=8,
+ dim_head=64,
+ dropout=0.0,
+ # backend=None,
+ backend=SDPBackend.FLASH_ATTENTION, # FA implemented by torch.
+ **kwargs,
+ ):
+ super().__init__()
+ inner_dim = dim_head * heads
+ context_dim = default(context_dim, query_dim)
+
+ self.scale = dim_head**-0.5
+ self.heads = heads
+
+ self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
+ self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+ self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+
+ self.to_out = nn.Sequential(
+ nn.Linear(inner_dim, query_dim), nn.Dropout(dropout)
+ )
+ self.backend = backend
+
+ def forward(
+ self,
+ x,
+ context=None,
+ mask=None,
+ additional_tokens=None,
+ n_times_crossframe_attn_in_self=0,
+ ):
+ h = self.heads
+
+ if additional_tokens is not None:
+ # get the number of masked tokens at the beginning of the output sequence
+ n_tokens_to_mask = additional_tokens.shape[1]
+ # add additional token
+ x = torch.cat([additional_tokens, x], dim=1)
+
+ q = self.to_q(x)
+ context = default(context, x)
+ k = self.to_k(context)
+ v = self.to_v(context)
+
+ if n_times_crossframe_attn_in_self:
+ # reprogramming cross-frame attention as in https://arxiv.org/abs/2303.13439
+ assert x.shape[0] % n_times_crossframe_attn_in_self == 0
+ n_cp = x.shape[0] // n_times_crossframe_attn_in_self
+ k = repeat(
+ k[::n_times_crossframe_attn_in_self], "b ... -> (b n) ...", n=n_cp
+ )
+ v = repeat(
+ v[::n_times_crossframe_attn_in_self], "b ... -> (b n) ...", n=n_cp
+ )
+
+ q, k, v = map(lambda t: rearrange(t, "b n (h d) -> b h n d", h=h), (q, k, v))
+
+ ## old
+ """
+ sim = einsum('b i d, b j d -> b i j', q, k) * self.scale
+ del q, k
+
+ if exists(mask):
+ mask = rearrange(mask, 'b ... -> b (...)')
+ max_neg_value = -torch.finfo(sim.dtype).max
+ mask = repeat(mask, 'b j -> (b h) () j', h=h)
+ sim.masked_fill_(~mask, max_neg_value)
+
+ # attention, what we cannot get enough of
+ sim = sim.softmax(dim=-1)
+
+ out = einsum('b i j, b j d -> b i d', sim, v)
+ """
+ ## new
+ # with sdpa_kernel(**BACKEND_MAP[self.backend]):
+ with sdpa_kernel([self.backend]): # new signature
+ # print("dispatching into backend", self.backend, "q/k/v shape: ", q.shape, k.shape, v.shape)
+ out = F.scaled_dot_product_attention(
+ q, k, v, attn_mask=mask
+ ) # scale is dim_head ** -0.5 per default
+
+ del q, k, v
+ out = rearrange(out, "b h n d -> b n (h d)", h=h)
+
+ if additional_tokens is not None:
+ # remove additional token
+ out = out[:, n_tokens_to_mask:]
+ return self.to_out(out)
+
+# class CrossAttention(nn.Module):
+# def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.):
+# super().__init__()
+# inner_dim = dim_head * heads
+# context_dim = default(context_dim, query_dim)
+
+# self.scale = dim_head ** -0.5
+# self.heads = heads
+
+# self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
+# self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+# self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+
+# self.to_out = nn.Sequential(
+# nn.Linear(inner_dim, query_dim),
+# nn.Dropout(dropout)
+# )
+
+# def forward(self, x, context=None, mask=None):
+# h = self.heads
+
+# q = self.to_q(x)
+# context = default(context, x)
+# k = self.to_k(context)
+# v = self.to_v(context)
+
+# q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v))
+
+# sim = einsum('b i d, b j d -> b i j', q, k) * self.scale
+
+# if exists(mask):
+# mask = rearrange(mask, 'b ... -> b (...)')
+# max_neg_value = -torch.finfo(sim.dtype).max
+# mask = repeat(mask, 'b j -> (b h) () j', h=h)
+# sim.masked_fill_(~mask, max_neg_value)
+
+# # attention, what we cannot get enough of
+# attn = sim.softmax(dim=-1)
+
+# out = einsum('b i j, b j d -> b i d', attn, v)
+# out = rearrange(out, '(b h) n d -> b n (h d)', h=h)
+# return self.to_out(out)
+
+
+# class BasicTransformerBlock(nn.Module):
+# def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True):
+# super().__init__()
+# self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout) # is a self-attention
+# self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
+# self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim,
+# heads=n_heads, dim_head=d_head, dropout=dropout) # is self-attn if context is none
+# self.norm1 = nn.LayerNorm(dim)
+# self.norm2 = nn.LayerNorm(dim)
+# self.norm3 = nn.LayerNorm(dim)
+# self.checkpoint = checkpoint
+
+# def forward(self, x, context=None):
+# return checkpoint(self._forward, (x, context), self.parameters(), self.checkpoint)
+
+# def _forward(self, x, context=None):
+# x = self.attn1(self.norm1(x)) + x
+# x = self.attn2(self.norm2(x), context=context) + x
+# x = self.ff(self.norm3(x)) + x
+# return x
+
+
+try:
+ # from xformers.triton import FusedLayerNorm as LayerNorm
+ import xformers
+ import xformers.ops
+ XFORMERS_IS_AVAILBLE = True
+except:
+ XFORMERS_IS_AVAILBLE = False
+
+from typing import Optional, Any
+
+class MemoryEfficientCrossAttention(nn.Module):
+ # https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
+ def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0, enable_rmsnorm=False, qk_norm=False, no_flash_op=False, enable_rope=False, qk_norm_fullseq=False,):
+ super().__init__()
+ print(f"Setting up {self.__class__.__name__}. Query dim is {query_dim}, context_dim is {context_dim} and using "
+ f"{heads} heads.")
+ inner_dim = dim_head * heads
+ context_dim = default(context_dim, query_dim)
+
+ self.heads = heads
+ self.dim_head = dim_head
+
+ self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
+ self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+
+ self.enable_rope = enable_rope
+
+ # if enable_rmsnorm:
+ # self.q_rmsnorm = RMSNorm(query_dim, eps=1e-5)
+ # self.k_rmsnorm = RMSNorm(context_dim, eps=1e-5)
+
+ if qk_norm_fullseq: # as in lumina
+ self.q_norm = RMSNorm(inner_dim, elementwise_affine=True) if qk_norm else nn.Identity()
+ self.k_norm = RMSNorm(inner_dim, elementwise_affine=True) if qk_norm else nn.Identity()
+ else:
+ self.q_norm = RMSNorm(self.dim_head, elementwise_affine=True) if qk_norm else nn.Identity()
+ self.k_norm = RMSNorm(self.dim_head, elementwise_affine=True) if qk_norm else nn.Identity()
+
+ # if not qk_norm:
+ # logpy.warn(
+ # f"No QK Norm activated, wish you good luck..."
+ # )
+
+ # self.enable_rmsnorm = enable_rmsnorm
+
+ self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+ # self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+ # self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+
+ self.to_out = nn.Sequential(nn.Linear(inner_dim, query_dim), nn.Dropout(dropout))
+ if no_flash_op:
+ self.attention_op = MemoryEfficientAttentionCutlassOp # force flash attention
+ else:
+ self.attention_op: Optional[Any] = None # enable
+
+ @staticmethod
+ def reshape_for_broadcast(freqs_cis: torch.Tensor, x: torch.Tensor):
+ """
+ Reshape frequency tensor for broadcasting it with another tensor.
+
+ This function reshapes the frequency tensor to have the same shape as
+ the target tensor 'x' for the purpose of broadcasting the frequency
+ tensor during element-wise operations.
+
+ Args:
+ freqs_cis (torch.Tensor): Frequency tensor to be reshaped.
+ x (torch.Tensor): Target tensor for broadcasting compatibility.
+
+ Returns:
+ torch.Tensor: Reshaped frequency tensor.
+
+ Raises:
+ AssertionError: If the frequency tensor doesn't match the expected
+ shape.
+ AssertionError: If the target tensor 'x' doesn't have the expected
+ number of dimensions.
+ """
+ ndim = x.ndim
+ assert 0 <= 1 < ndim
+ assert freqs_cis.shape == (x.shape[-2], x.shape[-1])
+ shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)]
+ return freqs_cis.view(*shape)
+
+ @staticmethod
+ def apply_rotary_emb(
+ xq: torch.Tensor,
+ xk: torch.Tensor,
+ freqs_cis: torch.Tensor,
+ ) -> Tuple[torch.Tensor, torch.Tensor]:
+ """
+ Apply rotary embeddings to input tensors using the given frequency
+ tensor.
+
+ This function applies rotary embeddings to the given query 'xq' and
+ key 'xk' tensors using the provided frequency tensor 'freqs_cis'. The
+ input tensors are reshaped as complex numbers, and the frequency tensor
+ is reshaped for broadcasting compatibility. The resulting tensors
+ contain rotary embeddings and are returned as real tensors.
+
+ Args:
+ xq (torch.Tensor): Query tensor to apply rotary embeddings.
+ xk (torch.Tensor): Key tensor to apply rotary embeddings.
+ freqs_cis (torch.Tensor): Precomputed frequency tensor for complex
+ exponentials.
+
+ Returns:
+ Tuple[torch.Tensor, torch.Tensor]: Tuple of modified query tensor
+ and key tensor with rotary embeddings.
+ """
+ with torch.cuda.amp.autocast(enabled=False):
+ xq_ = torch.view_as_complex(xq.float().reshape(*xq.shape[:-1], -1, 2))
+ xk_ = torch.view_as_complex(xk.float().reshape(*xk.shape[:-1], -1, 2))
+ freqs_cis = MemoryEfficientCrossAttention.reshape_for_broadcast(freqs_cis, xq_)
+ xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3)
+ xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3)
+ return xq_out.type_as(xq), xk_out.type_as(xk)
+
+
+ def forward(self, x, context=None, freqs_cis=None, mask=None):
+
+ q = self.to_q(x)
+ context = default(context, x)
+ k = self.to_k(context)
+
+ v = self.to_v(context)
+
+ dtype = q.dtype
+
+ b, _, _ = q.shape
+ if self.enable_rope:
+ q, k = self.q_norm(q), self.k_norm(k) # for stable amp training
+
+ q, k, v = map(
+ lambda t: t.unsqueeze(3)
+ .reshape(b, t.shape[1], self.heads, self.dim_head)
+ .permute(0, 2, 1, 3)
+ # .reshape(b * self.heads, t.shape[1], self.dim_head)
+ .reshape(b, self.heads, t.shape[1], self.dim_head)
+ .contiguous(),
+ (q, k, v),
+ )
+
+ assert freqs_cis is not None
+ q, k = MemoryEfficientCrossAttention.apply_rotary_emb(q, k, freqs_cis=freqs_cis)
+ q, k = q.to(dtype), k.to(dtype)
+ pass
+
+ else:
+ q, k, v = map(
+ lambda t: t.unsqueeze(3)
+ .reshape(b, t.shape[1], self.heads, self.dim_head)
+ .permute(0, 2, 1, 3)
+ .reshape(b * self.heads, t.shape[1], self.dim_head)
+ .contiguous(),
+ (q, k, v),
+ )
+ q, k = self.q_norm(q), self.k_norm(k) # for stable amp training
+
+ # actually compute the attention, what we cannot get enough of
+ # out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None, op=self.attention_op)
+
+ # actually compute the attention, what we cannot get enough of
+ if version.parse(xformers.__version__) >= version.parse("0.0.21"):
+ # NOTE: workaround for
+ # https://github.com/facebookresearch/xformers/issues/845
+ max_bs = 32768
+ N = q.shape[0]
+ n_batches = math.ceil(N / max_bs)
+ out = list()
+ for i_batch in range(n_batches):
+ batch = slice(i_batch * max_bs, (i_batch + 1) * max_bs)
+ out.append(
+ xformers.ops.memory_efficient_attention(
+ q[batch],
+ k[batch],
+ v[batch],
+ attn_bias=None,
+ # op=self.attention_op,
+ )
+ )
+ out = torch.cat(out, 0)
+ else:
+ out = xformers.ops.memory_efficient_attention(
+ q, k, v, attn_bias=None, op=self.attention_op
+ )
+
+ # TODO: Use this directly in the attention operation, as a bias
+ if exists(mask):
+ raise NotImplementedError
+ out = (
+ out.unsqueeze(0)
+ .reshape(b, self.heads, out.shape[1], self.dim_head)
+ .permute(0, 2, 1, 3)
+ .reshape(b, out.shape[1], self.heads * self.dim_head)
+ )
+ return self.to_out(out)
+
+
+
+class JointMemoryEfficientCrossAttention(nn.Module):
+ # https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
+ def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0):
+ super().__init__()
+ print(f"Setting up {self.__class__.__name__}. Query dim is {query_dim}, context_dim is {context_dim} and using "
+ f"{heads} heads.")
+ inner_dim = dim_head * heads
+ context_dim = default(context_dim, query_dim)
+
+ self.heads = heads
+ self.dim_head = dim_head
+
+ self.to_qkv_t = nn.Linear(query_dim, inner_dim, bias=False)
+ self.to_qkv_i = nn.Linear(query_dim, inner_dim, bias=False)
+
+ # self.to_k = nn.Linear(context_dim*2, inner_dim, bias=False)
+ # self.to_v = nn.Linear(context_dim*2, inner_dim, bias=False)
+ # self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+ # self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+
+ self.to_out = nn.Sequential(nn.Linear(inner_dim, query_dim), nn.Dropout(dropout))
+ self.attention_op: Optional[Any] = None
+ # self.attention_op: Optional[Any] = MemoryEfficientAttentionFlashAttentionOp
+
+ # TODO, add later for stable AMP training.
+ # self.rms_norm_t_q = RMSNorm(args.dim, eps=args.norm_eps)
+ # self.rms_norm_t_k = RMSNorm(args.dim, eps=args.norm_eps)
+ # self.rms_norm_i_q = RMSNorm(args.dim, eps=args.norm_eps)
+ # self.rms_norm_i_k = RMSNorm(args.dim, eps=args.norm_eps)
+
+
+ def forward(self, x, context=None, mask=None):
+ q = self.to_q(x)
+ context = default(context, x)
+ k = self.to_k(context)
+ v = self.to_v(context)
+
+ b, _, _ = q.shape
+ q, k, v = map(
+ lambda t: t.unsqueeze(3)
+ .reshape(b, t.shape[1], self.heads, self.dim_head)
+ .permute(0, 2, 1, 3)
+ .reshape(b * self.heads, t.shape[1], self.dim_head)
+ .contiguous(),
+ (q, k, v),
+ )
+
+ # actually compute the attention, what we cannot get enough of
+ out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None, op=self.attention_op)
+
+ if exists(mask):
+ raise NotImplementedError
+ out = (
+ out.unsqueeze(0)
+ .reshape(b, self.heads, out.shape[1], self.dim_head)
+ .permute(0, 2, 1, 3)
+ .reshape(b, out.shape[1], self.heads * self.dim_head)
+ )
+ return self.to_out(out)
+
+
+class BasicTransformerBlock(nn.Module):
+ ATTENTION_MODES = {
+ "softmax": CrossAttention, # vanilla attention
+ "softmax-xformers": MemoryEfficientCrossAttention
+ }
+ def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True,
+ disable_self_attn=False):
+ super().__init__()
+ attn_mode = "softmax-xformers" if XFORMERS_IS_AVAILBLE else "softmax"
+ assert attn_mode in self.ATTENTION_MODES
+ attn_cls = self.ATTENTION_MODES[attn_mode]
+ self.disable_self_attn = disable_self_attn
+ self.attn1 = attn_cls(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout,
+ context_dim=context_dim if self.disable_self_attn else None) # is a self-attention if not self.disable_self_attn
+ self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
+ self.attn2 = attn_cls(query_dim=dim, context_dim=context_dim,
+ heads=n_heads, dim_head=d_head, dropout=dropout) # is self-attn if context is none
+ self.norm1 = nn.LayerNorm(dim)
+ self.norm2 = nn.LayerNorm(dim)
+ self.norm3 = nn.LayerNorm(dim)
+ self.checkpoint = checkpoint
+
+ def forward(self, x, context=None):
+ # return checkpoint(self._forward, (x, context), self.parameters(), self.checkpoint)
+ return self._forward(x, context)
+
+ def _forward(self, x, context=None):
+ x = self.attn1(self.norm1(x), context=context if self.disable_self_attn else None) + x
+ x = self.attn2(self.norm2(x), context=context) + x
+ x = self.ff(self.norm3(x)) + x
+ return x
+
+
+class SpatialTransformer(nn.Module):
+ """
+ Transformer block for image-like data.
+ First, project the input (aka embedding)
+ and reshape to b, t, d.
+ Then apply standard transformer action.
+ Finally, reshape to image
+ """
+ def __init__(self, in_channels, n_heads, d_head,
+ depth=1, dropout=0., context_dim=None):
+ super().__init__()
+ self.in_channels = in_channels
+ inner_dim = n_heads * d_head
+ self.norm = Normalize(in_channels)
+
+ self.proj_in = nn.Conv2d(in_channels,
+ inner_dim,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+
+ self.transformer_blocks = nn.ModuleList(
+ [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim)
+ for d in range(depth)]
+ )
+
+ self.proj_out = zero_module(nn.Conv2d(inner_dim,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0))
+
+ def forward(self, x, context=None):
+ # note: if no context is given, cross-attention defaults to self-attention
+ b, c, h, w = x.shape
+ x_in = x
+ x = self.norm(x)
+ x = self.proj_in(x)
+ x = rearrange(x, 'b c h w -> b (h w) c')
+ for block in self.transformer_blocks:
+ x = block(x, context=context)
+ x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w)
+ x = self.proj_out(x)
+ return x + x_in
+
+
+
+class BasicTransformerBlock3D(BasicTransformerBlock):
+
+ def forward(self, x, context=None, num_frames=1):
+ # return checkpoint(self._forward, (x, context, num_frames), self.parameters(), self.checkpoint)
+ return self._forward(x, context, num_frames) # , self.parameters(), self.checkpoint
+
+ def _forward(self, x, context=None, num_frames=1):
+ x = rearrange(x, "(b f) l c -> b (f l) c", f=num_frames).contiguous()
+ x = self.attn1(self.norm1(x), context=context if self.disable_self_attn else None) + x
+ x = rearrange(x, "b (f l) c -> (b f) l c", f=num_frames).contiguous()
+ x = self.attn2(self.norm2(x), context=context) + x
+ x = self.ff(self.norm3(x)) + x
+ return x
+
+
+class SpatialTransformer3D(nn.Module):
+ ''' 3D self-attention '''
+ def __init__(self, in_channels, n_heads, d_head,
+ depth=1, dropout=0., context_dim=None,
+ disable_self_attn=False, use_linear=False,
+ use_checkpoint=True):
+ super().__init__()
+ if exists(context_dim) and not isinstance(context_dim, list):
+ context_dim = [context_dim]
+ elif context_dim is None:
+ context_dim = [None] * depth
+
+ self.in_channels = in_channels
+ inner_dim = n_heads * d_head
+ self.norm = Normalize(in_channels)
+ if not use_linear:
+ self.proj_in = nn.Conv2d(in_channels,
+ inner_dim,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ else:
+ self.proj_in = nn.Linear(in_channels, inner_dim)
+
+ self.transformer_blocks = nn.ModuleList(
+ [BasicTransformerBlock3D(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim[d],
+ disable_self_attn=disable_self_attn, checkpoint=use_checkpoint)
+ for d in range(depth)]
+ )
+ if not use_linear:
+ self.proj_out = zero_module(nn.Conv2d(inner_dim,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0))
+ else:
+ self.proj_out = zero_module(nn.Linear(in_channels, inner_dim))
+ self.use_linear = use_linear
+
+ def forward(self, x, context=None, num_frames=1):
+ # note: if no context is given, cross-attention defaults to self-attention
+ if not isinstance(context, list):
+ context = [context]
+ b, c, h, w = x.shape
+ x_in = x
+ x = self.norm(x)
+ if not self.use_linear:
+ x = self.proj_in(x)
+ x = rearrange(x, 'b c h w -> b (h w) c').contiguous()
+ if self.use_linear:
+ x = self.proj_in(x)
+ for i, block in enumerate(self.transformer_blocks):
+ x = block(x, context=context[i], num_frames=num_frames)
+ if self.use_linear:
+ x = self.proj_out(x)
+ x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous()
+ if not self.use_linear:
+ x = self.proj_out(x)
+ return x + x_in
\ No newline at end of file
diff --git a/ldm/modules/diffusionmodules/__init__.py b/ldm/modules/diffusionmodules/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc b/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ce8bbae2e14542fbc5bd3b9354ccf8c7e9b4487c
Binary files /dev/null and b/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc differ
diff --git a/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-39.pyc b/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..32bedfdcdbfde1b9e55aafdacd464646dc0a447a
Binary files /dev/null and b/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-39.pyc differ
diff --git a/ldm/modules/diffusionmodules/__pycache__/model.cpython-39.pyc b/ldm/modules/diffusionmodules/__pycache__/model.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..53733aa9c17c1c1ef7d64b9bf2253ffca8dd4558
Binary files /dev/null and b/ldm/modules/diffusionmodules/__pycache__/model.cpython-39.pyc differ
diff --git a/ldm/modules/diffusionmodules/__pycache__/mv_unet.cpython-39.pyc b/ldm/modules/diffusionmodules/__pycache__/mv_unet.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2aadaa6ebfecdd2cb2ae3c2c19e96bc2f4cd602c
Binary files /dev/null and b/ldm/modules/diffusionmodules/__pycache__/mv_unet.cpython-39.pyc differ
diff --git a/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc b/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d774a7a7606d7e505d90c701430ac44987714edc
Binary files /dev/null and b/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc differ
diff --git a/ldm/modules/diffusionmodules/__pycache__/util.cpython-39.pyc b/ldm/modules/diffusionmodules/__pycache__/util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fb5d8f1751ae97d12c8410250977fe95f841abdd
Binary files /dev/null and b/ldm/modules/diffusionmodules/__pycache__/util.cpython-39.pyc differ
diff --git a/ldm/modules/diffusionmodules/model.py b/ldm/modules/diffusionmodules/model.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbeb067b7a2f174edafea1b64fbf4e1e100ce002
--- /dev/null
+++ b/ldm/modules/diffusionmodules/model.py
@@ -0,0 +1,1016 @@
+# pytorch_diffusion + derived encoder decoder
+import math
+import torch
+import torch.nn as nn
+import numpy as np
+from einops import rearrange
+from typing import Optional, Any
+
+# from ldm.modules.attention import MemoryEfficientCrossAttention
+# from .modules.attention import MemoryEfficientCrossAttention
+from ldm.modules.attention import SpatialTransformer3D
+from pdb import set_trace as st
+from xformers.ops import MemoryEfficientAttentionFlashAttentionOp
+from ldm.modules.attention import MemoryEfficientCrossAttention
+
+from nsr.volumetric_rendering.ray_sampler import RaySampler
+import kornia
+import point_cloud_utils as pcu
+
+try:
+ import xformers
+ import xformers.ops
+ XFORMERS_IS_AVAILBLE = True
+except:
+ XFORMERS_IS_AVAILBLE = False
+ print("No module 'xformers'. Proceeding without it.")
+
+
+def get_timestep_embedding(timesteps, embedding_dim):
+ """
+ This matches the implementation in Denoising Diffusion Probabilistic Models:
+ From Fairseq.
+ Build sinusoidal embeddings.
+ This matches the implementation in tensor2tensor, but differs slightly
+ from the description in Section 3.5 of "Attention Is All You Need".
+ """
+ assert len(timesteps.shape) == 1
+
+ half_dim = embedding_dim // 2
+ emb = math.log(10000) / (half_dim - 1)
+ emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb)
+ emb = emb.to(device=timesteps.device)
+ emb = timesteps.float()[:, None] * emb[None, :]
+ emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)
+ if embedding_dim % 2 == 1: # zero pad
+ emb = torch.nn.functional.pad(emb, (0,1,0,0))
+ return emb
+
+
+def nonlinearity(x):
+ # swish
+ return x*torch.sigmoid(x)
+
+
+def Normalize(in_channels, num_groups=32):
+ return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True)
+
+
+class Upsample(nn.Module):
+ def __init__(self, in_channels, with_conv):
+ super().__init__()
+ self.with_conv = with_conv
+ if self.with_conv:
+ self.conv = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ # turn off amp autocast here
+ def forward(self, x):
+
+ # with torch.cuda.amp.autocast(enabled=False, dtype=torch.bfloat16, cache_enabled=True): # only handles the execusion, not data typeS
+ with torch.autocast(enabled=False, device_type='cuda'): # only handles the execusion, not data typeS
+ x = torch.nn.functional.interpolate(x.float(), scale_factor=2.0, mode="nearest")
+ if self.with_conv:
+ x = self.conv(x)
+ return x
+
+
+class Downsample(nn.Module):
+ def __init__(self, in_channels, with_conv):
+ super().__init__()
+ self.with_conv = with_conv
+ if self.with_conv:
+ # no asymmetric padding in torch conv, must do it ourselves
+ self.conv = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=3,
+ stride=2,
+ padding=0)
+
+ def forward(self, x):
+ if self.with_conv:
+ pad = (0,1,0,1)
+ x = torch.nn.functional.pad(x, pad, mode="constant", value=0)
+ x = self.conv(x)
+ else:
+ x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2)
+ return x
+
+
+class ResnetBlock(nn.Module):
+ def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False,
+ dropout, temb_channels=512):
+ super().__init__()
+ self.in_channels = in_channels
+ out_channels = in_channels if out_channels is None else out_channels
+ self.out_channels = out_channels
+ self.use_conv_shortcut = conv_shortcut
+
+ self.norm1 = Normalize(in_channels)
+ self.conv1 = torch.nn.Conv2d(in_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+ if temb_channels > 0:
+ self.temb_proj = torch.nn.Linear(temb_channels,
+ out_channels)
+ self.norm2 = Normalize(out_channels)
+ self.dropout = torch.nn.Dropout(dropout)
+ self.conv2 = torch.nn.Conv2d(out_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+ if self.in_channels != self.out_channels:
+ if self.use_conv_shortcut:
+ self.conv_shortcut = torch.nn.Conv2d(in_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+ else:
+ self.nin_shortcut = torch.nn.Conv2d(in_channels,
+ out_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+
+ def forward(self, x, temb):
+ h = x
+ h = self.norm1(h)
+ h = nonlinearity(h)
+ h = self.conv1(h)
+
+ if temb is not None:
+ h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None]
+
+ h = self.norm2(h)
+ h = nonlinearity(h)
+ h = self.dropout(h)
+ h = self.conv2(h)
+
+ if self.in_channels != self.out_channels:
+ if self.use_conv_shortcut:
+ x = self.conv_shortcut(x)
+ else:
+ x = self.nin_shortcut(x)
+
+ return x+h
+
+
+class AttnBlock(nn.Module):
+ def __init__(self, in_channels):
+ super().__init__()
+ self.in_channels = in_channels
+
+ self.norm = Normalize(in_channels)
+ self.q = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.k = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.v = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.proj_out = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+
+ def forward(self, x):
+ h_ = x
+ h_ = self.norm(h_)
+ q = self.q(h_)
+ k = self.k(h_)
+ v = self.v(h_)
+
+ # compute attention
+ b,c,h,w = q.shape
+ q = q.reshape(b,c,h*w)
+ q = q.permute(0,2,1) # b,hw,c
+ k = k.reshape(b,c,h*w) # b,c,hw
+ w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j]
+ w_ = w_ * (int(c)**(-0.5))
+ w_ = torch.nn.functional.softmax(w_, dim=2)
+
+ # attend to values
+ v = v.reshape(b,c,h*w)
+ w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q)
+ h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j]
+ h_ = h_.reshape(b,c,h,w)
+
+ h_ = self.proj_out(h_)
+
+ return x+h_
+
+class MemoryEfficientAttnBlock(nn.Module):
+ """
+ Uses xformers efficient implementation,
+ see https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
+ Note: this is a single-head self-attention operation
+ """
+ #
+ def __init__(self, in_channels):
+ super().__init__()
+ self.in_channels = in_channels
+
+ self.norm = Normalize(in_channels)
+ self.q = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.k = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.v = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.proj_out = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=1,
+ stride=1,
+ padding=0)
+ self.attention_op: Optional[Any] = None
+ # self.attention_op: Optional[Any] = MemoryEfficientAttentionFlashAttentionOp
+
+ def forward(self, x):
+ h_ = x
+ h_ = self.norm(h_)
+ q = self.q(h_)
+ k = self.k(h_)
+ v = self.v(h_)
+
+ # compute attention
+ B, C, H, W = q.shape
+ q, k, v = map(lambda x: rearrange(x, 'b c h w -> b (h w) c'), (q, k, v))
+
+ q, k, v = map(
+ lambda t: t.unsqueeze(3)
+ .reshape(B, t.shape[1], 1, C)
+ .permute(0, 2, 1, 3)
+ .reshape(B * 1, t.shape[1], C)
+ .contiguous(),
+ (q, k, v),
+ )
+ out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None, op=self.attention_op)
+
+ out = (
+ out.unsqueeze(0)
+ .reshape(B, 1, out.shape[1], C)
+ .permute(0, 2, 1, 3)
+ .reshape(B, out.shape[1], C)
+ )
+ out = rearrange(out, 'b (h w) c -> b c h w', b=B, h=H, w=W, c=C)
+ out = self.proj_out(out)
+ return x+out
+
+
+# class MemoryEfficientCrossAttentionWrapper(MemoryEfficientCrossAttention):
+# def forward(self, x, context=None, mask=None):
+# b, c, h, w = x.shape
+# x = rearrange(x, 'b c h w -> b (h w) c')
+# out = super().forward(x, context=context, mask=mask)
+# out = rearrange(out, 'b (h w) c -> b c h w', h=h, w=w, c=c)
+# return x + out
+
+
+def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None):
+ assert attn_type in ["vanilla", "vanilla-xformers", "memory-efficient-cross-attn", "linear", "none", "mv-vanilla"], f'attn_type {attn_type} unknown'
+ if XFORMERS_IS_AVAILBLE and attn_type == "vanilla":
+ attn_type = "vanilla-xformers"
+ print(f"making attention of type '{attn_type}' with {in_channels} in_channels")
+ if attn_type == "vanilla":
+ assert attn_kwargs is None
+ return AttnBlock(in_channels)
+ elif attn_type == "mv-vanilla":
+ assert attn_kwargs is not None
+ return SpatialTransformer3D(in_channels, **attn_kwargs) # TODO
+ elif attn_type == "vanilla-xformers":
+ print(f"building MemoryEfficientAttnBlock with {in_channels} in_channels...")
+ return MemoryEfficientAttnBlock(in_channels)
+ elif type == "memory-efficient-cross-attn":
+ attn_kwargs["query_dim"] = in_channels
+ return MemoryEfficientCrossAttentionWrapper(**attn_kwargs)
+ elif attn_type == "none":
+ return nn.Identity(in_channels)
+ else:
+ raise NotImplementedError()
+
+
+class Model(nn.Module):
+ def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,
+ attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,
+ resolution, use_timestep=True, use_linear_attn=False, attn_type="vanilla", attn_kwargs={}):
+ super().__init__()
+ if use_linear_attn: attn_type = "linear"
+ self.ch = ch
+ self.temb_ch = self.ch*4
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ self.resolution = resolution
+ self.in_channels = in_channels
+
+ self.use_timestep = use_timestep
+ if self.use_timestep:
+ # timestep embedding
+ self.temb = nn.Module()
+ self.temb.dense = nn.ModuleList([
+ torch.nn.Linear(self.ch,
+ self.temb_ch),
+ torch.nn.Linear(self.temb_ch,
+ self.temb_ch),
+ ])
+
+ # downsampling
+ self.conv_in = torch.nn.Conv2d(in_channels,
+ self.ch,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ curr_res = resolution
+ in_ch_mult = (1,)+tuple(ch_mult)
+ self.down = nn.ModuleList()
+ for i_level in range(self.num_resolutions):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_in = ch*in_ch_mult[i_level]
+ block_out = ch*ch_mult[i_level]
+ for i_block in range(self.num_res_blocks):
+ block.append(ResnetBlock(in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout))
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type, attn_kwargs=attn_kwargs))
+ down = nn.Module()
+ down.block = block
+ down.attn = attn
+ if i_level != self.num_resolutions-1:
+ down.downsample = Downsample(block_in, resamp_with_conv)
+ curr_res = curr_res // 2
+ self.down.append(down)
+
+ # middle
+ self.mid = nn.Module()
+ self.mid.block_1 = ResnetBlock(in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout)
+ self.mid.attn_1 = make_attn(block_in, attn_type=attn_type, attn_kwargs=attn_kwargs)
+ self.mid.block_2 = ResnetBlock(in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout)
+
+ # upsampling
+ self.up = nn.ModuleList()
+ for i_level in reversed(range(self.num_resolutions)):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_out = ch*ch_mult[i_level]
+ skip_in = ch*ch_mult[i_level]
+ for i_block in range(self.num_res_blocks+1):
+ if i_block == self.num_res_blocks:
+ skip_in = ch*in_ch_mult[i_level]
+ block.append(ResnetBlock(in_channels=block_in+skip_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout))
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type, attn_kwargs=attn_kwargs))
+ up = nn.Module()
+ up.block = block
+ up.attn = attn
+ if i_level != 0:
+ up.upsample = Upsample(block_in, resamp_with_conv)
+ curr_res = curr_res * 2
+ self.up.insert(0, up) # prepend to get consistent order
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = torch.nn.Conv2d(block_in,
+ out_ch,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, x, t=None, context=None):
+ #assert x.shape[2] == x.shape[3] == self.resolution
+ if context is not None:
+ # assume aligned context, cat along channel axis
+ x = torch.cat((x, context), dim=1)
+ if self.use_timestep:
+ # timestep embedding
+ assert t is not None
+ temb = get_timestep_embedding(t, self.ch)
+ temb = self.temb.dense[0](temb)
+ temb = nonlinearity(temb)
+ temb = self.temb.dense[1](temb)
+ else:
+ temb = None
+
+ # downsampling
+ hs = [self.conv_in(x)]
+ for i_level in range(self.num_resolutions):
+ for i_block in range(self.num_res_blocks):
+ h = self.down[i_level].block[i_block](hs[-1], temb)
+ if len(self.down[i_level].attn) > 0:
+ h = self.down[i_level].attn[i_block](h)
+ hs.append(h)
+ if i_level != self.num_resolutions-1:
+ hs.append(self.down[i_level].downsample(hs[-1]))
+
+ # middle
+ h = hs[-1]
+ h = self.mid.block_1(h, temb)
+ h = self.mid.attn_1(h)
+ h = self.mid.block_2(h, temb)
+
+ # upsampling
+ for i_level in reversed(range(self.num_resolutions)):
+ for i_block in range(self.num_res_blocks+1):
+ h = self.up[i_level].block[i_block](
+ torch.cat([h, hs.pop()], dim=1), temb)
+ if len(self.up[i_level].attn) > 0:
+ h = self.up[i_level].attn[i_block](h)
+ if i_level != 0:
+ h = self.up[i_level].upsample(h)
+
+ # end
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h)
+ return h
+
+ def get_last_layer(self):
+ return self.conv_out.weight
+
+
+class Encoder(nn.Module):
+ def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,
+ attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,
+ resolution,
+ z_channels, double_z=True,
+ use_linear_attn=False, attn_type="vanilla",
+ attn_kwargs={},
+ add_fusion_layer=False,
+ **ignore_kwargs):
+ super().__init__()
+ if use_linear_attn: attn_type = "linear"
+ self.ch = ch
+ self.temb_ch = 0
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ self.resolution = resolution
+ self.in_channels = in_channels
+ self.z_channels = z_channels
+
+ # downsampling
+ self.conv_in = torch.nn.Conv2d(in_channels,
+ self.ch,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ curr_res = resolution
+ in_ch_mult = (1,)+tuple(ch_mult)
+ self.in_ch_mult = in_ch_mult
+ self.down = nn.ModuleList()
+ for i_level in range(self.num_resolutions):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_in = ch*in_ch_mult[i_level]
+ block_out = ch*ch_mult[i_level]
+ for i_block in range(self.num_res_blocks):
+ block.append(ResnetBlock(in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout))
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type, attn_kwargs=attn_kwargs))
+ down = nn.Module()
+ down.block = block
+ down.attn = attn
+ if i_level != self.num_resolutions-1:
+ down.downsample = Downsample(block_in, resamp_with_conv)
+ curr_res = curr_res // 2
+ self.down.append(down)
+
+ # middle
+ self.mid = nn.Module()
+ self.mid.block_1 = ResnetBlock(in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout)
+ self.mid.attn_1 = make_attn(block_in, attn_type=attn_type, attn_kwargs=attn_kwargs)
+ self.mid.block_2 = ResnetBlock(in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout)
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = torch.nn.Conv2d(block_in,
+ 2*z_channels if double_z else z_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+ # TODO: use attention-based? Later.
+ if add_fusion_layer: # fusion 4 frames
+ self.fusion_layer = torch.nn.Conv2d(2*z_channels*4 if double_z else z_channels*4,
+ 2*z_channels if double_z else z_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, x, **kwargs):
+ # timestep embedding
+ temb = None
+
+ # downsampling
+ hs = [self.conv_in(x)]
+ for i_level in range(self.num_resolutions):
+ for i_block in range(self.num_res_blocks):
+ h = self.down[i_level].block[i_block](hs[-1], temb)
+ if len(self.down[i_level].attn) > 0:
+ h = self.down[i_level].attn[i_block](h)
+ hs.append(h)
+ if i_level != self.num_resolutions-1:
+ hs.append(self.down[i_level].downsample(hs[-1]))
+
+ # middle
+ h = hs[-1]
+ h = self.mid.block_1(h, temb)
+ h = self.mid.attn_1(h, **kwargs)
+ h = self.mid.block_2(h, temb)
+
+ # end
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h)
+ return h
+
+class MVEncoder(Encoder):
+ def __init__(self, *, ch, out_ch, ch_mult=(1, 2, 4, 8), num_res_blocks, attn_resolutions, dropout=0, resamp_with_conv=True, in_channels, resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="mv-vanilla", **ignore_kwargs):
+ super().__init__(ch=ch, out_ch=out_ch, ch_mult=ch_mult, num_res_blocks=num_res_blocks, attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, in_channels=in_channels, resolution=resolution, z_channels=z_channels, double_z=double_z, use_linear_attn=use_linear_attn, attn_type=attn_type,
+ add_fusion_layer=True,
+ **ignore_kwargs)
+ self.num_frames = 4
+
+
+ def forward(self, x):
+ h = super().forward(x, num_frames=self.num_frames)
+ # multi-view aggregation, as in pixel-nerf
+ h = h.chunk(x.shape[0] // self.num_frames) # features from the same single instance aggregated here
+ # h = [feat.max(keepdim=True, dim=0)[0] for feat in h] # max pooling
+ h = [self.fusion_layer(torch.cat(feat.chunk(feat.shape[0]), dim=1)) for feat in h] # conv pooling
+ return torch.cat(h, dim=0)
+ # return torch.cat(h, dim=0).to(torch.float32)
+
+
+class MVEncoderGS(Encoder):
+ # support pixle-aligned rendering
+ def __init__(self, *, ch, out_ch, ch_mult=(1, 2, 4, 8), num_res_blocks, attn_resolutions, dropout=0, resamp_with_conv=True, in_channels, resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="mv-vanilla", **ignore_kwargs):
+ super().__init__(ch=ch, out_ch=out_ch, ch_mult=ch_mult, num_res_blocks=num_res_blocks, attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, in_channels=in_channels, resolution=resolution, z_channels=z_channels, double_z=double_z, use_linear_attn=use_linear_attn, attn_type=attn_type,
+ add_fusion_layer=False,
+ **ignore_kwargs)
+ self.num_frames = 4
+
+
+ def forward(self, x):
+ h = super().forward(x, num_frames=self.num_frames)
+
+ # multi-view aggregation, as in pixel-nerf
+ h = h.chunk(x.shape[0] // self.num_frames) # features from the same single instance aggregated here
+ # st()
+
+ # concat
+ # torch.cat(latent, 1)
+ h = [rearrange(latent, 'B C H W -> 1 (B C) H W') for latent in h] # basically concat
+ h = torch.cat(h, dim=0)
+
+ return h # B 16 H W when V=4, z_channels=2
+
+class MVEncoderGSDynamicInp(Encoder):
+ # support dynamic length input, e.g., up to 40 views during training/inference.
+ def __init__(self, *, ch, out_ch, ch_mult=(1, 2, 4, 8), num_res_blocks, attn_resolutions, dropout=0, resamp_with_conv=True, in_channels, resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="mv-vanilla", num_frames, **ignore_kwargs):
+ super().__init__(ch=ch, out_ch=out_ch, ch_mult=ch_mult, num_res_blocks=num_res_blocks, attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, in_channels=in_channels, resolution=resolution, z_channels=z_channels, double_z=double_z, use_linear_attn=use_linear_attn, attn_type=attn_type,
+ add_fusion_layer=False,
+ **ignore_kwargs)
+ self.num_frames = num_frames
+
+ def forward(self, x, num_frames=None):
+ h = super().forward(x, num_frames=self.num_frames)
+ # multi-view aggregation, as in pixel-nerf
+
+ if num_frames is None:
+ num_frames = self.num_frames # 4 for now, test later.
+
+ assert num_frames > 4
+
+ h = h.chunk(x.shape[0] // num_frames) # features from the same single instance aggregated here
+ h = [feat.mean(keepdim=True, dim=0) for feat in h] # average pooling, 1 C H W for each h
+
+ return torch.cat(h, dim=0)
+
+# unproject VAE latent here, since SD-VAE latent 0almost pixel aligned.
+class MVEncoderUnprojRGB(Encoder):
+ # support dynamic length input, e.g., up to 40 views during training/inference.
+ def __init__(self, *, ch, out_ch, ch_mult=(1, 2, 4, 8), num_res_blocks, attn_resolutions, dropout=0, resamp_with_conv=True, in_channels, resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="mv-vanilla", num_frames, latent_num=768*3, **ignore_kwargs):
+ super().__init__(ch=ch, out_ch=out_ch, ch_mult=ch_mult, num_res_blocks=num_res_blocks, attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, in_channels=in_channels, resolution=resolution, z_channels=z_channels, double_z=double_z, use_linear_attn=use_linear_attn, attn_type=attn_type,
+ add_fusion_layer=False,
+ **ignore_kwargs)
+ self.num_frames = num_frames
+ # self.ray_sampler = RaySampler()
+ self.mean_filter = lambda x: kornia.filters.box_blur(x, (8,8)) # f=8
+ self.conv_out = nn.Identity()
+ self.latent_num = latent_num # 768 * 3 by default
+
+
+ def forward(self, x, c, depth, num_frames=None):
+ if num_frames is None:
+ num_frames = self.num_frames
+ assert num_frames >=6
+
+ h = super().forward(x, num_frames=self.num_frames) # ! support data augmentation, different FPS different latent corresponding to the same instance?
+
+ # 1. unproj tokens
+
+ # query =
+
+
+ # 2. fps sampling, 768*3 latents here from 32x32x9 overall tokens & record the xyz.
+
+ _, fps_idx = pytorch3d.ops.sample_farthest_points(
+ gt_pos.unsqueeze(0), K=self.latent_num)
+
+ # 2.5 Cross attend.
+ # 3. add vit TX (5 layers, concat xyz-PE)
+ # 4. tokens apply VAE (separate? check later.)
+
+ st()
+
+
+
+class MVEncoderGSDynamicInp_CA(Encoder):
+ # support dynamic length input, e.g., up to 40 views during training/inference.
+ def __init__(self, *, ch, out_ch, ch_mult=(1, 2, 4, 8), num_res_blocks, attn_resolutions, dropout=0, resamp_with_conv=True, in_channels, resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="mv-vanilla", num_frames, **ignore_kwargs):
+ super().__init__(ch=ch, out_ch=out_ch, ch_mult=ch_mult, num_res_blocks=num_res_blocks, attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, in_channels=in_channels, resolution=resolution, z_channels=z_channels, double_z=double_z, use_linear_attn=use_linear_attn, attn_type=attn_type,
+ add_fusion_layer=False,
+ **ignore_kwargs)
+ self.num_frames = num_frames
+ query_dim = z_channels*(1+double_z)
+
+ self.readout_ca = MemoryEfficientCrossAttention(
+ query_dim,
+ 2*z_channels if double_z else z_channels,
+ )
+ self.latent_embedding = nn.Parameter(
+ torch.randn(1, 32 * 32 * 3, query_dim))
+
+ def forward(self, x, num_frames=None):
+ x = super().forward(x, num_frames=self.num_frames)
+ # multi-view aggregation, as in pixel-nerf
+
+ if num_frames is None:
+ num_frames = self.num_frames # 4 for now, test later.
+
+ x = rearrange(x, '(B V) C H W -> B (V H W) C', V=self.num_frames) # for cross-attention
+ x = self.readout_ca(self.latent_embedding.repeat(x.shape[0], 1, 1), x)
+
+ x = rearrange(x, 'B (N H W) C -> B C (N H) W', H=32, W=32, N=3)
+
+ return x
+
+
+class Decoder(nn.Module):
+ def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,
+ attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,
+ resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False,
+ attn_type="vanilla-xformers", **ignorekwargs):
+ super().__init__()
+ if use_linear_attn: attn_type = "linear"
+ self.ch = ch
+ self.temb_ch = 0
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ self.resolution = resolution
+ self.in_channels = in_channels
+ self.give_pre_end = give_pre_end
+ self.tanh_out = tanh_out
+
+ # compute in_ch_mult, block_in and curr_res at lowest res
+ in_ch_mult = (1,)+tuple(ch_mult)
+ block_in = ch*ch_mult[self.num_resolutions-1]
+ curr_res = resolution // 2**(self.num_resolutions-1)
+ self.z_shape = (1,z_channels,curr_res,curr_res)
+ print("Working with z of shape {} = {} dimensions.".format(
+ self.z_shape, np.prod(self.z_shape)))
+
+ # z to block_in
+ self.conv_in = torch.nn.Conv2d(z_channels,
+ block_in,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ # middle
+ self.mid = nn.Module()
+ self.mid.block_1 = ResnetBlock(in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout)
+ self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)
+ self.mid.block_2 = ResnetBlock(in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout)
+
+ # upsampling
+ self.up = nn.ModuleList()
+ for i_level in reversed(range(self.num_resolutions)):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_out = ch*ch_mult[i_level]
+ for i_block in range(self.num_res_blocks+1):
+ block.append(ResnetBlock(in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout))
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type))
+ up = nn.Module()
+ up.block = block
+ up.attn = attn
+ if i_level != 0:
+ up.upsample = Upsample(block_in, resamp_with_conv)
+ curr_res = curr_res * 2
+ self.up.insert(0, up) # prepend to get consistent order
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = torch.nn.Conv2d(block_in,
+ out_ch,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, z):
+ #assert z.shape[1:] == self.z_shape[1:]
+ self.last_z_shape = z.shape
+
+ # timestep embedding
+ temb = None
+
+ # z to block_in
+ h = self.conv_in(z)
+
+ # middle
+ h = self.mid.block_1(h, temb)
+ h = self.mid.attn_1(h)
+ h = self.mid.block_2(h, temb)
+
+ # upsampling
+ for i_level in reversed(range(self.num_resolutions)):
+ for i_block in range(self.num_res_blocks+1):
+ h = self.up[i_level].block[i_block](h, temb)
+ if len(self.up[i_level].attn) > 0:
+ h = self.up[i_level].attn[i_block](h)
+ if i_level != 0:
+ h = self.up[i_level].upsample(h)
+
+ # end
+ if self.give_pre_end:
+ return h
+
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h)
+ if self.tanh_out:
+ h = torch.tanh(h)
+ return h
+
+
+class SimpleDecoder(nn.Module):
+ def __init__(self, in_channels, out_channels, *args, **kwargs):
+ super().__init__()
+ self.model = nn.ModuleList([nn.Conv2d(in_channels, in_channels, 1),
+ ResnetBlock(in_channels=in_channels,
+ out_channels=2 * in_channels,
+ temb_channels=0, dropout=0.0),
+ ResnetBlock(in_channels=2 * in_channels,
+ out_channels=4 * in_channels,
+ temb_channels=0, dropout=0.0),
+ ResnetBlock(in_channels=4 * in_channels,
+ out_channels=2 * in_channels,
+ temb_channels=0, dropout=0.0),
+ nn.Conv2d(2*in_channels, in_channels, 1),
+ Upsample(in_channels, with_conv=True)])
+ # end
+ self.norm_out = Normalize(in_channels)
+ self.conv_out = torch.nn.Conv2d(in_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, x):
+ for i, layer in enumerate(self.model):
+ if i in [1,2,3]:
+ x = layer(x, None)
+ else:
+ x = layer(x)
+
+ h = self.norm_out(x)
+ h = nonlinearity(h)
+ x = self.conv_out(h)
+ return x
+
+
+class UpsampleDecoder(nn.Module):
+ def __init__(self, in_channels, out_channels, ch, num_res_blocks, resolution,
+ ch_mult=(2,2), dropout=0.0):
+ super().__init__()
+ # upsampling
+ self.temb_ch = 0
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ block_in = in_channels
+ curr_res = resolution // 2 ** (self.num_resolutions - 1)
+ self.res_blocks = nn.ModuleList()
+ self.upsample_blocks = nn.ModuleList()
+ for i_level in range(self.num_resolutions):
+ res_block = []
+ block_out = ch * ch_mult[i_level]
+ for i_block in range(self.num_res_blocks + 1):
+ res_block.append(ResnetBlock(in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout))
+ block_in = block_out
+ self.res_blocks.append(nn.ModuleList(res_block))
+ if i_level != self.num_resolutions - 1:
+ self.upsample_blocks.append(Upsample(block_in, True))
+ curr_res = curr_res * 2
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = torch.nn.Conv2d(block_in,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, x):
+ # upsampling
+ h = x
+ for k, i_level in enumerate(range(self.num_resolutions)):
+ for i_block in range(self.num_res_blocks + 1):
+ h = self.res_blocks[i_level][i_block](h, None)
+ if i_level != self.num_resolutions - 1:
+ h = self.upsample_blocks[k](h)
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h)
+ return h
+
+
+class LatentRescaler(nn.Module):
+ def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2):
+ super().__init__()
+ # residual block, interpolate, residual block
+ self.factor = factor
+ self.conv_in = nn.Conv2d(in_channels,
+ mid_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+ self.res_block1 = nn.ModuleList([ResnetBlock(in_channels=mid_channels,
+ out_channels=mid_channels,
+ temb_channels=0,
+ dropout=0.0) for _ in range(depth)])
+ self.attn = AttnBlock(mid_channels)
+ self.res_block2 = nn.ModuleList([ResnetBlock(in_channels=mid_channels,
+ out_channels=mid_channels,
+ temb_channels=0,
+ dropout=0.0) for _ in range(depth)])
+
+ self.conv_out = nn.Conv2d(mid_channels,
+ out_channels,
+ kernel_size=1,
+ )
+
+ def forward(self, x):
+ x = self.conv_in(x)
+ for block in self.res_block1:
+ x = block(x, None)
+ x = torch.nn.functional.interpolate(x, size=(int(round(x.shape[2]*self.factor)), int(round(x.shape[3]*self.factor))))
+ x = self.attn(x)
+ for block in self.res_block2:
+ x = block(x, None)
+ x = self.conv_out(x)
+ return x
+
+
+class MergedRescaleEncoder(nn.Module):
+ def __init__(self, in_channels, ch, resolution, out_ch, num_res_blocks,
+ attn_resolutions, dropout=0.0, resamp_with_conv=True,
+ ch_mult=(1,2,4,8), rescale_factor=1.0, rescale_module_depth=1):
+ super().__init__()
+ intermediate_chn = ch * ch_mult[-1]
+ self.encoder = Encoder(in_channels=in_channels, num_res_blocks=num_res_blocks, ch=ch, ch_mult=ch_mult,
+ z_channels=intermediate_chn, double_z=False, resolution=resolution,
+ attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv,
+ out_ch=None)
+ self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=intermediate_chn,
+ mid_channels=intermediate_chn, out_channels=out_ch, depth=rescale_module_depth)
+
+ def forward(self, x):
+ x = self.encoder(x)
+ x = self.rescaler(x)
+ return x
+
+
+class MergedRescaleDecoder(nn.Module):
+ def __init__(self, z_channels, out_ch, resolution, num_res_blocks, attn_resolutions, ch, ch_mult=(1,2,4,8),
+ dropout=0.0, resamp_with_conv=True, rescale_factor=1.0, rescale_module_depth=1):
+ super().__init__()
+ tmp_chn = z_channels*ch_mult[-1]
+ self.decoder = Decoder(out_ch=out_ch, z_channels=tmp_chn, attn_resolutions=attn_resolutions, dropout=dropout,
+ resamp_with_conv=resamp_with_conv, in_channels=None, num_res_blocks=num_res_blocks,
+ ch_mult=ch_mult, resolution=resolution, ch=ch)
+ self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=z_channels, mid_channels=tmp_chn,
+ out_channels=tmp_chn, depth=rescale_module_depth)
+
+ def forward(self, x):
+ x = self.rescaler(x)
+ x = self.decoder(x)
+ return x
+
+
+class Upsampler(nn.Module):
+ def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2):
+ super().__init__()
+ assert out_size >= in_size
+ num_blocks = int(np.log2(out_size//in_size))+1
+ factor_up = 1.+ (out_size % in_size)
+ print(f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}")
+ self.rescaler = LatentRescaler(factor=factor_up, in_channels=in_channels, mid_channels=2*in_channels,
+ out_channels=in_channels)
+ self.decoder = Decoder(out_ch=out_channels, resolution=out_size, z_channels=in_channels, num_res_blocks=2,
+ attn_resolutions=[], in_channels=None, ch=in_channels,
+ ch_mult=[ch_mult for _ in range(num_blocks)])
+
+ def forward(self, x):
+ x = self.rescaler(x)
+ x = self.decoder(x)
+ return x
+
+
+class Resize(nn.Module):
+ def __init__(self, in_channels=None, learned=False, mode="bilinear"):
+ super().__init__()
+ self.with_conv = learned
+ self.mode = mode
+ if self.with_conv:
+ print(f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode")
+ raise NotImplementedError()
+ assert in_channels is not None
+ # no asymmetric padding in torch conv, must do it ourselves
+ self.conv = torch.nn.Conv2d(in_channels,
+ in_channels,
+ kernel_size=4,
+ stride=2,
+ padding=1)
+
+ def forward(self, x, scale_factor=1.0):
+ if scale_factor==1.0:
+ return x
+ else:
+ x = torch.nn.functional.interpolate(x, mode=self.mode, align_corners=False, scale_factor=scale_factor)
+ return x
+
+
+# ! lgm unet
\ No newline at end of file
diff --git a/ldm/modules/diffusionmodules/mv_unet.py b/ldm/modules/diffusionmodules/mv_unet.py
new file mode 100644
index 0000000000000000000000000000000000000000..309f370995d782362656dccb5645068f365b9965
--- /dev/null
+++ b/ldm/modules/diffusionmodules/mv_unet.py
@@ -0,0 +1,456 @@
+from numpy import sqrt
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+import numpy as np
+from typing import Tuple, Literal
+from functools import partial
+
+from pdb import set_trace as st
+
+# from core.attention import MemEffAttention
+from vit.vision_transformer import MemEffAttention
+
+
+class MVAttention(nn.Module):
+
+ def __init__(
+ self,
+ dim: int,
+ num_heads: int = 8,
+ qkv_bias: bool = False,
+ proj_bias: bool = True,
+ attn_drop: float = 0.0,
+ proj_drop: float = 0.0,
+ groups: int = 32,
+ eps: float = 1e-5,
+ residual: bool = True,
+ skip_scale: float = 1,
+ num_frames: int = 4, # WARN: hardcoded!
+ ):
+ super().__init__()
+
+ self.residual = residual
+ self.skip_scale = skip_scale
+ self.num_frames = num_frames
+
+ self.norm = nn.GroupNorm(num_groups=groups,
+ num_channels=dim,
+ eps=eps,
+ affine=True)
+ self.attn = MemEffAttention(dim, num_heads, qkv_bias, proj_bias,
+ attn_drop, proj_drop)
+
+ def forward(self, x):
+ # x: [B*V, C, H, W]
+ BV, C, H, W = x.shape
+ B = BV // self.num_frames # assert BV % self.num_frames == 0
+
+ res = x
+ x = self.norm(x)
+
+ x = x.reshape(B, self.num_frames, C, H,
+ W).permute(0, 1, 3, 4, 2).reshape(B, -1, C)
+ x = self.attn(x)
+ x = x.reshape(B, self.num_frames, H, W,
+ C).permute(0, 1, 4, 2, 3).reshape(BV, C, H, W)
+
+ if self.residual:
+ x = (x + res) * self.skip_scale
+ return x
+
+
+class ResnetBlock(nn.Module):
+
+ def __init__(
+ self,
+ in_channels: int,
+ out_channels: int,
+ resample: Literal['default', 'up', 'down'] = 'default',
+ groups: int = 32,
+ eps: float = 1e-5,
+ skip_scale: float = 1, # multiplied to output
+ ):
+ super().__init__()
+
+ self.in_channels = in_channels
+ self.out_channels = out_channels
+ self.skip_scale = skip_scale
+
+ self.norm1 = nn.GroupNorm(num_groups=groups,
+ num_channels=in_channels,
+ eps=eps,
+ affine=True)
+ self.conv1 = nn.Conv2d(in_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ self.norm2 = nn.GroupNorm(num_groups=groups,
+ num_channels=out_channels,
+ eps=eps,
+ affine=True)
+ self.conv2 = nn.Conv2d(out_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ self.act = F.silu
+
+ self.resample = None
+ if resample == 'up':
+ self.resample = partial(F.interpolate,
+ scale_factor=2.0,
+ mode="nearest")
+ elif resample == 'down':
+ self.resample = nn.AvgPool2d(kernel_size=2, stride=2)
+
+ self.shortcut = nn.Identity()
+ if self.in_channels != self.out_channels:
+ self.shortcut = nn.Conv2d(in_channels,
+ out_channels,
+ kernel_size=1,
+ bias=True)
+
+ def forward(self, x):
+ res = x
+
+ x = self.norm1(x)
+ x = self.act(x)
+
+ if self.resample:
+ res = self.resample(res)
+ x = self.resample(x)
+
+ x = self.conv1(x)
+ x = self.norm2(x)
+ x = self.act(x)
+ x = self.conv2(x)
+
+ x = (x + self.shortcut(res)) * self.skip_scale
+
+ return x
+
+
+class DownBlock(nn.Module):
+
+ def __init__(
+ self,
+ in_channels: int,
+ out_channels: int,
+ num_layers: int = 1,
+ downsample: bool = True,
+ attention: bool = True,
+ attention_heads: int = 16,
+ skip_scale: float = 1,
+ ):
+ super().__init__()
+
+ nets = []
+ attns = []
+ for i in range(num_layers):
+ in_channels = in_channels if i == 0 else out_channels
+ nets.append(
+ ResnetBlock(in_channels, out_channels, skip_scale=skip_scale))
+ if attention:
+ attns.append(
+ MVAttention(out_channels,
+ attention_heads,
+ skip_scale=skip_scale))
+ else:
+ attns.append(None)
+ self.nets = nn.ModuleList(nets)
+ self.attns = nn.ModuleList(attns)
+
+ self.downsample = None
+ if downsample:
+ self.downsample = nn.Conv2d(out_channels,
+ out_channels,
+ kernel_size=3,
+ stride=2,
+ padding=1)
+
+ def forward(self, x):
+ xs = []
+
+ for attn, net in zip(self.attns, self.nets):
+ x = net(x)
+ if attn:
+ x = attn(x)
+ xs.append(x)
+
+ if self.downsample:
+ x = self.downsample(x)
+ xs.append(x)
+
+ return x, xs
+
+
+class MidBlock(nn.Module):
+
+ def __init__(
+ self,
+ in_channels: int,
+ num_layers: int = 1,
+ attention: bool = True,
+ attention_heads: int = 16,
+ skip_scale: float = 1,
+ ):
+ super().__init__()
+
+ nets = []
+ attns = []
+ # first layer
+ nets.append(
+ ResnetBlock(in_channels, in_channels, skip_scale=skip_scale))
+ # more layers
+ for i in range(num_layers):
+ nets.append(
+ ResnetBlock(in_channels, in_channels, skip_scale=skip_scale))
+ if attention:
+ attns.append(
+ MVAttention(in_channels,
+ attention_heads,
+ skip_scale=skip_scale))
+ else:
+ attns.append(None)
+ self.nets = nn.ModuleList(nets)
+ self.attns = nn.ModuleList(attns)
+
+ def forward(self, x):
+ x = self.nets[0](x)
+ for attn, net in zip(self.attns, self.nets[1:]):
+ if attn:
+ x = attn(x)
+ x = net(x)
+ return x
+
+
+class UpBlock(nn.Module):
+
+ def __init__(
+ self,
+ in_channels: int,
+ prev_out_channels: int,
+ out_channels: int,
+ num_layers: int = 1,
+ upsample: bool = True,
+ attention: bool = True,
+ attention_heads: int = 16,
+ skip_scale: float = 1,
+ ):
+ super().__init__()
+
+ nets = []
+ attns = []
+ for i in range(num_layers):
+ cin = in_channels if i == 0 else out_channels
+ cskip = prev_out_channels if (i == num_layers -
+ 1) else out_channels
+
+ nets.append(
+ ResnetBlock(cin + cskip, out_channels, skip_scale=skip_scale))
+ if attention:
+ attns.append(
+ MVAttention(out_channels,
+ attention_heads,
+ skip_scale=skip_scale))
+ else:
+ attns.append(None)
+ self.nets = nn.ModuleList(nets)
+ self.attns = nn.ModuleList(attns)
+
+ self.upsample = None
+ if upsample:
+ self.upsample = nn.Conv2d(out_channels,
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, x, xs):
+
+ for attn, net in zip(self.attns, self.nets):
+ res_x = xs[-1]
+ xs = xs[:-1]
+ x = torch.cat([x, res_x], dim=1)
+ x = net(x)
+ if attn:
+ x = attn(x)
+
+ if self.upsample:
+ x = F.interpolate(x, scale_factor=2.0, mode='nearest')
+ x = self.upsample(x)
+
+ return x
+
+
+# it could be asymmetric!
+class MVUNet(nn.Module):
+
+ def __init__(
+ self,
+ in_channels: int = 3,
+ out_channels: int = 3,
+ down_channels: Tuple[int, ...] = (64, 128, 256, 512, 1024),
+ down_attention: Tuple[bool,
+ ...] = (False, False, False, True, True),
+ mid_attention: bool = True,
+ up_channels: Tuple[int, ...] = (1024, 512, 256),
+ up_attention: Tuple[bool, ...] = (True, True, False),
+ layers_per_block: int = 2,
+ skip_scale: float = np.sqrt(0.5),
+ ):
+ super().__init__()
+
+ # first
+ self.conv_in = nn.Conv2d(in_channels,
+ down_channels[0],
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ # down
+ down_blocks = []
+ cout = down_channels[0]
+ for i in range(len(down_channels)):
+ cin = cout
+ cout = down_channels[i]
+
+ down_blocks.append(
+ DownBlock(
+ cin,
+ cout,
+ num_layers=layers_per_block,
+ downsample=(i
+ != len(down_channels) - 1), # not final layer
+ attention=down_attention[i],
+ skip_scale=skip_scale,
+ ))
+ self.down_blocks = nn.ModuleList(down_blocks)
+
+ # mid
+ self.mid_block = MidBlock(down_channels[-1],
+ attention=mid_attention,
+ skip_scale=skip_scale)
+
+ # up
+ up_blocks = []
+ cout = up_channels[0]
+ for i in range(len(up_channels)):
+ cin = cout
+ cout = up_channels[i]
+ cskip = down_channels[max(-2 - i,
+ -len(down_channels))] # for assymetric
+
+ up_blocks.append(
+ UpBlock(
+ cin,
+ cskip,
+ cout,
+ num_layers=layers_per_block + 1, # one more layer for up
+ upsample=(i != len(up_channels) - 1), # not final layer
+ attention=up_attention[i],
+ skip_scale=skip_scale,
+ ))
+ self.up_blocks = nn.ModuleList(up_blocks)
+
+ # last
+ self.norm_out = nn.GroupNorm(num_channels=up_channels[-1],
+ num_groups=32,
+ eps=1e-5)
+ self.conv_out = nn.Conv2d(up_channels[-1],
+ out_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ def forward(self, x):
+ # x: [B, Cin, H, W]
+
+ # first
+ x = self.conv_in(x)
+
+ # down
+ xss = [x]
+ for block in self.down_blocks:
+ x, xs = block(x)
+ xss.extend(xs)
+
+ # mid
+ x = self.mid_block(x) # 32 (B V) 1024 16 16
+
+ # up
+ for block in self.up_blocks:
+ xs = xss[-len(block.nets):]
+ xss = xss[:-len(block.nets)]
+ x = block(x, xs)
+
+ # last
+ x = self.norm_out(x)
+ x = F.silu(x)
+ x = self.conv_out(x) # [B, Cout, H', W']
+
+ return x
+
+
+class LGM_MVEncoder(MVUNet):
+
+ def __init__(
+ self,
+ in_channels: int = 3,
+ out_channels: int = 3,
+ down_channels: Tuple[int] = (64, 128, 256, 512, 1024),
+ down_attention: Tuple[bool] = (False, False, False, True, True),
+ mid_attention: bool = True,
+ up_channels: Tuple[int] = (1024, 512, 256),
+ up_attention: Tuple[bool] = (True, True, False),
+ layers_per_block: int = 2,
+ skip_scale: float = np.sqrt(0.5),
+ z_channels=4,
+ double_z=True,
+ add_fusion_layer=True,
+ ):
+ super().__init__(in_channels, out_channels, down_channels,
+ down_attention, mid_attention, up_channels,
+ up_attention, layers_per_block, skip_scale)
+ del self.up_blocks
+
+ self.conv_out = torch.nn.Conv2d(up_channels[0],
+ 2 *
+ z_channels if double_z else z_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+ if add_fusion_layer: # fusion 4 frames
+ self.fusion_layer = torch.nn.Conv2d(
+ 2 * z_channels * 4 if double_z else z_channels * 4,
+ 2 * z_channels if double_z else z_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1)
+
+ self.num_frames = 4 # !hard coded
+
+ def forward(self, x):
+ # first
+ x = self.conv_in(x)
+
+ # down
+ xss = [x]
+ for block in self.down_blocks:
+ x, xs = block(x)
+ xss.extend(xs)
+
+ # mid
+ x = self.mid_block(x) # 32 (B V) 1024 16 16
+
+ # multi-view aggregation, as in pixel-nerf
+ x = x.chunk(x.shape[0] // self.num_frames) # features from the same single instance aggregated here
+ # h = [feat.max(keepdim=True, dim=0)[0] for feat in h] # max pooling
+ x = [self.fusion_layer(torch.cat(feat.chunk(feat.shape[0]), dim=1)) for feat in x] # conv pooling
+ st()
+ return torch.cat(x, dim=0)
\ No newline at end of file
diff --git a/ldm/modules/diffusionmodules/openaimodel.py b/ldm/modules/diffusionmodules/openaimodel.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab9b7d0bdb1841de703e0fe037b9d4a4ff987594
--- /dev/null
+++ b/ldm/modules/diffusionmodules/openaimodel.py
@@ -0,0 +1,1245 @@
+from abc import abstractmethod
+import math
+
+import numpy as np
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from ldm.modules.diffusionmodules.util import (
+ checkpoint,
+ conv_nd,
+ linear,
+ avg_pool_nd,
+ zero_module,
+ normalization,
+ timestep_embedding,
+)
+from ..attention import SpatialTransformer, SpatialTransformer3D, exists
+from ldm.util import exists
+
+from pdb import set_trace as st
+
+
+# dummy replace
+def convert_module_to_f16(x):
+ pass
+
+def convert_module_to_f32(x):
+ pass
+
+
+## go
+class AttentionPool2d(nn.Module):
+ """
+ Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py
+ """
+
+ def __init__(
+ self,
+ spacial_dim: int,
+ embed_dim: int,
+ num_heads_channels: int,
+ output_dim: int = None,
+ ):
+ super().__init__()
+ self.positional_embedding = nn.Parameter(th.randn(embed_dim, spacial_dim ** 2 + 1) / embed_dim ** 0.5)
+ self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1)
+ self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1)
+ self.num_heads = embed_dim // num_heads_channels
+ self.attention = QKVAttention(self.num_heads)
+
+ def forward(self, x):
+ b, c, *_spatial = x.shape
+ x = x.reshape(b, c, -1) # NC(HW)
+ x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1)
+ x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1)
+ x = self.qkv_proj(x)
+ x = self.attention(x)
+ x = self.c_proj(x)
+ return x[:, :, 0]
+
+
+class TimestepBlock(nn.Module):
+ """
+ Any module where forward() takes timestep embeddings as a second argument.
+ """
+
+ @abstractmethod
+ def forward(self, x, emb):
+ """
+ Apply the module to `x` given `emb` timestep embeddings.
+ """
+
+
+class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
+ """
+ A sequential module that passes timestep embeddings to the children that
+ support it as an extra input.
+ """
+
+ def forward(self, x, emb, context=None):
+ for layer in self:
+ if isinstance(layer, TimestepBlock):
+ x = layer(x, emb)
+ elif isinstance(layer, SpatialTransformer):
+ x = layer(x, context)
+ else:
+ x = layer(x)
+ return x
+
+
+class Upsample(nn.Module):
+ """
+ An upsampling layer with an optional convolution.
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ upsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ if use_conv:
+ self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=padding)
+
+ def forward(self, x):
+ assert x.shape[1] == self.channels
+ if self.dims == 3:
+ x = F.interpolate(
+ x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest"
+ )
+ else:
+ x = F.interpolate(x, scale_factor=2, mode="nearest")
+ if self.use_conv:
+ x = self.conv(x)
+ return x
+
+class TransposedUpsample(nn.Module):
+ 'Learned 2x upsampling without padding'
+ def __init__(self, channels, out_channels=None, ks=5):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+
+ self.up = nn.ConvTranspose2d(self.channels,self.out_channels,kernel_size=ks,stride=2)
+
+ def forward(self,x):
+ return self.up(x)
+
+
+class Downsample(nn.Module):
+ """
+ A downsampling layer with an optional convolution.
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ downsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(self, channels, use_conv, dims=2, out_channels=None,padding=1):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ stride = 2 if dims != 3 else (1, 2, 2)
+ if use_conv:
+ self.op = conv_nd(
+ dims, self.channels, self.out_channels, 3, stride=stride, padding=padding
+ )
+ else:
+ assert self.channels == self.out_channels
+ self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)
+
+ def forward(self, x):
+ assert x.shape[1] == self.channels
+ return self.op(x)
+
+
+class ResBlock(TimestepBlock):
+ """
+ A residual block that can optionally change the number of channels.
+ :param channels: the number of input channels.
+ :param emb_channels: the number of timestep embedding channels.
+ :param dropout: the rate of dropout.
+ :param out_channels: if specified, the number of out channels.
+ :param use_conv: if True and out_channels is specified, use a spatial
+ convolution instead of a smaller 1x1 convolution to change the
+ channels in the skip connection.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param use_checkpoint: if True, use gradient checkpointing on this module.
+ :param up: if True, use this block for upsampling.
+ :param down: if True, use this block for downsampling.
+ """
+
+ def __init__(
+ self,
+ channels,
+ emb_channels,
+ dropout,
+ out_channels=None,
+ use_conv=False,
+ use_scale_shift_norm=False,
+ dims=2,
+ use_checkpoint=False,
+ up=False,
+ down=False,
+ ):
+ super().__init__()
+ self.channels = channels
+ self.emb_channels = emb_channels
+ self.dropout = dropout
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.use_checkpoint = use_checkpoint
+ self.use_scale_shift_norm = use_scale_shift_norm
+
+ self.in_layers = nn.Sequential(
+ normalization(channels),
+ nn.SiLU(),
+ conv_nd(dims, channels, self.out_channels, 3, padding=1),
+ )
+
+ self.updown = up or down
+
+ if up:
+ self.h_upd = Upsample(channels, False, dims)
+ self.x_upd = Upsample(channels, False, dims)
+ elif down:
+ self.h_upd = Downsample(channels, False, dims)
+ self.x_upd = Downsample(channels, False, dims)
+ else:
+ self.h_upd = self.x_upd = nn.Identity()
+
+ self.emb_layers = nn.Sequential(
+ nn.SiLU(),
+ linear(
+ emb_channels,
+ 2 * self.out_channels if use_scale_shift_norm else self.out_channels,
+ ),
+ )
+ self.out_layers = nn.Sequential(
+ normalization(self.out_channels),
+ nn.SiLU(),
+ nn.Dropout(p=dropout),
+ zero_module(
+ conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1)
+ ),
+ )
+
+ if self.out_channels == channels:
+ self.skip_connection = nn.Identity()
+ elif use_conv:
+ self.skip_connection = conv_nd(
+ dims, channels, self.out_channels, 3, padding=1
+ )
+ else:
+ self.skip_connection = conv_nd(dims, channels, self.out_channels, 1)
+
+ def forward(self, x, emb):
+ """
+ Apply the block to a Tensor, conditioned on a timestep embedding.
+ :param x: an [N x C x ...] Tensor of features.
+ :param emb: an [N x emb_channels] Tensor of timestep embeddings.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+ return checkpoint(
+ self._forward, (x, emb), self.parameters(), self.use_checkpoint
+ )
+
+
+ def _forward(self, x, emb):
+ if self.updown:
+ in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1]
+ h = in_rest(x)
+ h = self.h_upd(h)
+ x = self.x_upd(x)
+ h = in_conv(h)
+ else:
+ h = self.in_layers(x)
+ emb_out = self.emb_layers(emb).type(h.dtype)
+ while len(emb_out.shape) < len(h.shape):
+ emb_out = emb_out[..., None]
+ if self.use_scale_shift_norm:
+ out_norm, out_rest = self.out_layers[0], self.out_layers[1:]
+ scale, shift = th.chunk(emb_out, 2, dim=1)
+ h = out_norm(h) * (1 + scale) + shift
+ h = out_rest(h)
+ else:
+ h = h + emb_out
+ h = self.out_layers(h)
+ return self.skip_connection(x) + h
+
+
+class AttentionBlock(nn.Module):
+ """
+ An attention block that allows spatial positions to attend to each other.
+ Originally ported from here, but adapted to the N-d case.
+ https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66.
+ """
+
+ def __init__(
+ self,
+ channels,
+ num_heads=1,
+ num_head_channels=-1,
+ use_checkpoint=False,
+ use_new_attention_order=False,
+ ):
+ super().__init__()
+ self.channels = channels
+ if num_head_channels == -1:
+ self.num_heads = num_heads
+ else:
+ assert (
+ channels % num_head_channels == 0
+ ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}"
+ self.num_heads = channels // num_head_channels
+ self.use_checkpoint = use_checkpoint
+ self.norm = normalization(channels)
+ self.qkv = conv_nd(1, channels, channels * 3, 1)
+ if use_new_attention_order:
+ # split qkv before split heads
+ self.attention = QKVAttention(self.num_heads)
+ else:
+ # split heads before split qkv
+ self.attention = QKVAttentionLegacy(self.num_heads)
+
+ self.proj_out = zero_module(conv_nd(1, channels, channels, 1))
+
+ def forward(self, x):
+ return checkpoint(self._forward, (x,), self.parameters(), True) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!!
+ #return pt_checkpoint(self._forward, x) # pytorch
+
+ def _forward(self, x):
+ b, c, *spatial = x.shape
+ x = x.reshape(b, c, -1)
+ qkv = self.qkv(self.norm(x))
+ h = self.attention(qkv)
+ h = self.proj_out(h)
+ return (x + h).reshape(b, c, *spatial)
+
+
+def count_flops_attn(model, _x, y):
+ """
+ A counter for the `thop` package to count the operations in an
+ attention operation.
+ Meant to be used like:
+ macs, params = thop.profile(
+ model,
+ inputs=(inputs, timestamps),
+ custom_ops={QKVAttention: QKVAttention.count_flops},
+ )
+ """
+ b, c, *spatial = y[0].shape
+ num_spatial = int(np.prod(spatial))
+ # We perform two matmuls with the same number of ops.
+ # The first computes the weight matrix, the second computes
+ # the combination of the value vectors.
+ matmul_ops = 2 * b * (num_spatial ** 2) * c
+ model.total_ops += th.DoubleTensor([matmul_ops])
+
+
+class QKVAttentionLegacy(nn.Module):
+ """
+ A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping
+ """
+
+ def __init__(self, n_heads):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv):
+ """
+ Apply QKV attention.
+ :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts", q * scale, k * scale
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v)
+ return a.reshape(bs, -1, length)
+
+ @staticmethod
+ def count_flops(model, _x, y):
+ return count_flops_attn(model, _x, y)
+
+
+class QKVAttention(nn.Module):
+ """
+ A module which performs QKV attention and splits in a different order.
+ """
+
+ def __init__(self, n_heads):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv):
+ """
+ Apply QKV attention.
+ :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.chunk(3, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts",
+ (q * scale).view(bs * self.n_heads, ch, length),
+ (k * scale).view(bs * self.n_heads, ch, length),
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length))
+ return a.reshape(bs, -1, length)
+
+ @staticmethod
+ def count_flops(model, _x, y):
+ return count_flops_attn(model, _x, y)
+
+
+class UNetModel(nn.Module):
+ """
+ The full UNet model with attention and timestep embedding.
+ :param in_channels: channels in the input Tensor.
+ :param model_channels: base channel count for the model.
+ :param out_channels: channels in the output Tensor.
+ :param num_res_blocks: number of residual blocks per downsample.
+ :param attention_resolutions: a collection of downsample rates at which
+ attention will take place. May be a set, list, or tuple.
+ For example, if this contains 4, then at 4x downsampling, attention
+ will be used.
+ :param dropout: the dropout probability.
+ :param channel_mult: channel multiplier for each level of the UNet.
+ :param conv_resample: if True, use learned convolutions for upsampling and
+ downsampling.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param num_classes: if specified (as an int), then this model will be
+ class-conditional with `num_classes` classes.
+ :param use_checkpoint: use gradient checkpointing to reduce memory usage.
+ :param num_heads: the number of attention heads in each attention layer.
+ :param num_heads_channels: if specified, ignore num_heads and instead use
+ a fixed channel width per attention head.
+ :param num_heads_upsample: works with num_heads to set a different number
+ of heads for upsampling. Deprecated.
+ :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.
+ :param resblock_updown: use residual blocks for up/downsampling.
+ :param use_new_attention_order: use a different attention pattern for potentially
+ increased efficiency.
+ """
+
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ out_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ num_classes=None,
+ use_checkpoint=False,
+ use_fp16=False,
+ num_heads=-1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ use_spatial_transformer=False, # custom transformer support
+ transformer_depth=1, # custom transformer support
+ context_dim=None, # custom transformer support
+ n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model
+ legacy=True,
+ disable_self_attentions=None,
+ num_attention_blocks=None,
+ disable_middle_self_attn=False,
+ use_linear_in_transformer=False,
+ ):
+ super().__init__()
+ if use_spatial_transformer:
+ assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...'
+
+ if context_dim is not None:
+ assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...'
+ from omegaconf.listconfig import ListConfig
+ if type(context_dim) == ListConfig:
+ context_dim = list(context_dim)
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ if num_heads == -1:
+ assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set'
+
+ if num_head_channels == -1:
+ assert num_heads != -1, 'Either num_heads or num_head_channels has to be set'
+
+ self.image_size = image_size
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ if isinstance(num_res_blocks, int):
+ self.num_res_blocks = len(channel_mult) * [num_res_blocks]
+ else:
+ if len(num_res_blocks) != len(channel_mult):
+ raise ValueError("provide num_res_blocks either as an int (globally constant) or "
+ "as a list/tuple (per-level) with the same length as channel_mult")
+ self.num_res_blocks = num_res_blocks
+ if disable_self_attentions is not None:
+ # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not
+ assert len(disable_self_attentions) == len(channel_mult)
+ if num_attention_blocks is not None:
+ assert len(num_attention_blocks) == len(self.num_res_blocks)
+ assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks))))
+ print(f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. "
+ f"This option has LESS priority than attention_resolutions {attention_resolutions}, "
+ f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, "
+ f"attention will still not be set.")
+
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.num_classes = num_classes
+ self.use_checkpoint = use_checkpoint
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+ self.predict_codebook_ids = n_embed is not None
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if self.num_classes is not None:
+ if isinstance(self.num_classes, int):
+ self.label_emb = nn.Embedding(num_classes, time_embed_dim)
+ elif self.num_classes == "continuous":
+ print("setting up linear c_adm embedding layer")
+ self.label_emb = nn.Linear(1, time_embed_dim)
+ else:
+ raise ValueError()
+
+ self.input_blocks = nn.ModuleList(
+ [
+ TimestepEmbedSequential(
+ conv_nd(dims, in_channels, model_channels, 3, padding=1)
+ )
+ ]
+ )
+ self._feature_size = model_channels
+ input_block_chans = [model_channels]
+ ch = model_channels
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for nr in range(self.num_res_blocks[level]):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=mult * model_channels,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = mult * model_channels
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ if exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if not exists(num_attention_blocks) or nr < num_attention_blocks[level]:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer( # always uses a self-attn
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+
+ self.output_blocks = nn.ModuleList([])
+ for level, mult in list(enumerate(channel_mult))[::-1]:
+ for i in range(self.num_res_blocks[level] + 1):
+ ich = input_block_chans.pop()
+ layers = [
+ ResBlock(
+ ch + ich,
+ time_embed_dim,
+ dropout,
+ out_channels=model_channels * mult,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = model_channels * mult
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ if exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if not exists(num_attention_blocks) or i < num_attention_blocks[level]:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads_upsample,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ )
+ )
+ if level and i == self.num_res_blocks[level]:
+ out_ch = ch
+ layers.append(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ up=True,
+ )
+ if resblock_updown
+ else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)
+ )
+ ds //= 2
+ self.output_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),
+ )
+ if self.predict_codebook_ids:
+ self.id_predictor = nn.Sequential(
+ normalization(ch),
+ conv_nd(dims, model_channels, n_embed, 1),
+ #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits
+ )
+
+ def convert_to_fp16(self):
+ """
+ Convert the torso of the model to float16.
+ """
+ self.input_blocks.apply(convert_module_to_f16)
+ self.middle_block.apply(convert_module_to_f16)
+ self.output_blocks.apply(convert_module_to_f16)
+
+ def convert_to_fp32(self):
+ """
+ Convert the torso of the model to float32.
+ """
+ self.input_blocks.apply(convert_module_to_f32)
+ self.middle_block.apply(convert_module_to_f32)
+ self.output_blocks.apply(convert_module_to_f32)
+
+ def forward(self, x, timesteps=None, context=None, y=None,**kwargs):
+ """
+ Apply the model to an input batch.
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :param context: conditioning plugged in via crossattn
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional"
+ hs = []
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.num_classes is not None:
+ assert y.shape[0] == x.shape[0]
+ emb = emb + self.label_emb(y)
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb, context)
+ hs.append(h)
+ h = self.middle_block(h, emb, context)
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb, context)
+ h = h.type(x.dtype)
+ if self.predict_codebook_ids:
+ return self.id_predictor(h)
+ else:
+ return self.out(h)
+
+
+
+class MultiViewUNetModel(nn.Module):
+ """
+ The full multi-view UNet model with attention, timestep embedding and camera embedding.
+ :param in_channels: channels in the input Tensor.
+ :param model_channels: base channel count for the model.
+ :param out_channels: channels in the output Tensor.
+ :param num_res_blocks: number of residual blocks per downsample.
+ :param attention_resolutions: a collection of downsample rates at which
+ attention will take place. May be a set, list, or tuple.
+ For example, if this contains 4, then at 4x downsampling, attention
+ will be used.
+ :param dropout: the dropout probability.
+ :param channel_mult: channel multiplier for each level of the UNet.
+ :param conv_resample: if True, use learned convolutions for upsampling and
+ downsampling.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param num_classes: if specified (as an int), then this model will be
+ class-conditional with `num_classes` classes.
+ :param use_checkpoint: use gradient checkpointing to reduce memory usage.
+ :param num_heads: the number of attention heads in each attention layer.
+ :param num_heads_channels: if specified, ignore num_heads and instead use
+ a fixed channel width per attention head.
+ :param num_heads_upsample: works with num_heads to set a different number
+ of heads for upsampling. Deprecated.
+ :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.
+ :param resblock_updown: use residual blocks for up/downsampling.
+ :param use_new_attention_order: use a different attention pattern for potentially
+ increased efficiency.
+ :param camera_dim: dimensionality of camera input.
+ """
+
+ def __init__(
+ self,
+ image_size,
+ in_channels,
+ model_channels,
+ out_channels,
+ num_res_blocks,
+ attention_resolutions,
+ dropout=0,
+ channel_mult=(1, 2, 4, 8),
+ conv_resample=True,
+ dims=2,
+ num_classes=None,
+ use_checkpoint=False,
+ use_fp16=False,
+ use_bf16=False,
+ num_heads=-1,
+ num_head_channels=-1,
+ num_heads_upsample=-1,
+ use_scale_shift_norm=False,
+ resblock_updown=False,
+ use_new_attention_order=False,
+ use_spatial_transformer=False, # custom transformer support
+ transformer_depth=1, # custom transformer support
+ context_dim=None, # custom transformer support
+ n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model
+ legacy=True,
+ disable_self_attentions=None,
+ num_attention_blocks=None,
+ disable_middle_self_attn=False,
+ use_linear_in_transformer=False,
+ adm_in_channels=None,
+ camera_dim=None,
+ ):
+ super().__init__()
+ if use_spatial_transformer:
+ assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...'
+
+ if context_dim is not None:
+ assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...'
+ from omegaconf.listconfig import ListConfig
+ if type(context_dim) == ListConfig:
+ context_dim = list(context_dim)
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ if num_heads == -1:
+ assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set'
+
+ if num_head_channels == -1:
+ assert num_heads != -1, 'Either num_heads or num_head_channels has to be set'
+
+ self.image_size = image_size
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ if isinstance(num_res_blocks, int):
+ self.num_res_blocks = len(channel_mult) * [num_res_blocks]
+ else:
+ if len(num_res_blocks) != len(channel_mult):
+ raise ValueError("provide num_res_blocks either as an int (globally constant) or "
+ "as a list/tuple (per-level) with the same length as channel_mult")
+ self.num_res_blocks = num_res_blocks
+ if disable_self_attentions is not None:
+ # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not
+ assert len(disable_self_attentions) == len(channel_mult)
+ if num_attention_blocks is not None:
+ assert len(num_attention_blocks) == len(self.num_res_blocks)
+ assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks))))
+ print(f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. "
+ f"This option has LESS priority than attention_resolutions {attention_resolutions}, "
+ f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, "
+ f"attention will still not be set.")
+
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.num_classes = num_classes
+ self.use_checkpoint = use_checkpoint
+ self.dtype = th.float16 if use_fp16 else th.float32
+ self.dtype = th.bfloat16 if use_bf16 else self.dtype
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+ self.predict_codebook_ids = n_embed is not None
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if camera_dim is not None:
+ time_embed_dim = model_channels * 4
+ self.camera_embed = nn.Sequential(
+ linear(camera_dim, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if self.num_classes is not None:
+ if isinstance(self.num_classes, int):
+ self.label_emb = nn.Embedding(num_classes, time_embed_dim)
+ elif self.num_classes == "continuous":
+ print("setting up linear c_adm embedding layer")
+ self.label_emb = nn.Linear(1, time_embed_dim)
+ elif self.num_classes == "sequential":
+ assert adm_in_channels is not None
+ self.label_emb = nn.Sequential(
+ nn.Sequential(
+ linear(adm_in_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+ )
+ else:
+ raise ValueError()
+
+ self.input_blocks = nn.ModuleList(
+ [
+ TimestepEmbedSequential(
+ conv_nd(dims, in_channels, model_channels, 3, padding=1)
+ )
+ ]
+ )
+ self._feature_size = model_channels
+ input_block_chans = [model_channels]
+ ch = model_channels
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for nr in range(self.num_res_blocks[level]):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=mult * model_channels,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = mult * model_channels
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ if exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if not exists(num_attention_blocks) or nr < num_attention_blocks[level]:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer3D(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer3D( # always uses a self-attn
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ ),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+
+ self.output_blocks = nn.ModuleList([])
+ for level, mult in list(enumerate(channel_mult))[::-1]:
+ for i in range(self.num_res_blocks[level] + 1):
+ ich = input_block_chans.pop()
+ layers = [
+ ResBlock(
+ ch + ich,
+ time_embed_dim,
+ dropout,
+ out_channels=model_channels * mult,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = model_channels * mult
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+ if legacy:
+ #num_heads = 1
+ dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
+ if exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if not exists(num_attention_blocks) or i < num_attention_blocks[level]:
+ layers.append(
+ AttentionBlock(
+ ch,
+ use_checkpoint=use_checkpoint,
+ num_heads=num_heads_upsample,
+ num_head_channels=dim_head,
+ use_new_attention_order=use_new_attention_order,
+ ) if not use_spatial_transformer else SpatialTransformer3D(
+ ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
+ disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer,
+ use_checkpoint=use_checkpoint
+ )
+ )
+ if level and i == self.num_res_blocks[level]:
+ out_ch = ch
+ layers.append(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ up=True,
+ )
+ if resblock_updown
+ else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)
+ )
+ ds //= 2
+ self.output_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),
+ )
+ if self.predict_codebook_ids:
+ self.id_predictor = nn.Sequential(
+ normalization(ch),
+ conv_nd(dims, model_channels, n_embed, 1),
+ #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits
+ )
+
+ def convert_to_fp16(self):
+ """
+ Convert the torso of the model to float16.
+ """
+ self.input_blocks.apply(convert_module_to_f16)
+ self.middle_block.apply(convert_module_to_f16)
+ self.output_blocks.apply(convert_module_to_f16)
+
+ def convert_to_fp32(self):
+ """
+ Convert the torso of the model to float32.
+ """
+ self.input_blocks.apply(convert_module_to_f32)
+ self.middle_block.apply(convert_module_to_f32)
+ self.output_blocks.apply(convert_module_to_f32)
+
+ def forward(self, x, timesteps=None, context=None, y=None, camera=None, num_frames=1, **kwargs):
+ """
+ Apply the model to an input batch.
+ :param x: an [(N x F) x C x ...] Tensor of inputs. F is the number of frames (views).
+ :param timesteps: a 1-D batch of timesteps.
+ :param context: conditioning plugged in via crossattn
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :param num_frames: a integer indicating number of frames for tensor reshaping.
+ :return: an [(N x F) x C x ...] Tensor of outputs. F is the number of frames (views).
+ """
+ assert x.shape[0] % num_frames == 0, "[UNet] input batch size must be dividable by num_frames!"
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional"
+ hs = []
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.num_classes is not None:
+ assert y.shape[0] == x.shape[0]
+ emb = emb + self.label_emb(y)
+
+ # Add camera embeddings
+ if camera is not None:
+ assert camera.shape[0] == emb.shape[0]
+ emb = emb + self.camera_embed(camera)
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb, context, num_frames=num_frames)
+ hs.append(h)
+ h = self.middle_block(h, emb, context, num_frames=num_frames)
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb, context, num_frames=num_frames)
+ h = h.type(x.dtype)
+ if self.predict_codebook_ids:
+ return self.id_predictor(h)
+ else:
+ return self.out(h)
+
+class MultiViewUNetModel_Encoder(MultiViewUNetModel):
+ def __init__(self, image_size, in_channels, model_channels, out_channels, num_res_blocks, attention_resolutions, dropout=0, channel_mult=(1, 2, 4, 8), conv_resample=True, dims=2, num_classes=None, use_checkpoint=False, use_fp16=False, use_bf16=False, num_heads=-1, num_head_channels=-1, num_heads_upsample=-1, use_scale_shift_norm=False, resblock_updown=False, use_new_attention_order=False, use_spatial_transformer=False, transformer_depth=1, context_dim=None, n_embed=None, legacy=True, disable_self_attentions=None, num_attention_blocks=None, disable_middle_self_attn=False, use_linear_in_transformer=False, adm_in_channels=None, camera_dim=None):
+ super().__init__(image_size, in_channels, model_channels, out_channels, num_res_blocks, attention_resolutions, dropout, channel_mult, conv_resample, dims, num_classes, use_checkpoint, use_fp16, use_bf16, num_heads, num_head_channels, num_heads_upsample, use_scale_shift_norm, resblock_updown, use_new_attention_order, use_spatial_transformer, transformer_depth, context_dim, n_embed, legacy, disable_self_attentions, num_attention_blocks, disable_middle_self_attn, use_linear_in_transformer, adm_in_channels, camera_dim)
+ del self.time_embed, self.camera_embed
+ del self.output_blocks
+
+ def forward(self, x, timesteps=None, context=None, y=None, camera=None, num_frames=1, **kwargs):
+ # return super().forward(x, timesteps, context, y, camera, num_frames, **kwargs)
+
+ """
+ Apply the model to an input batch.
+ :param x: an [(N x F) x C x ...] Tensor of inputs. F is the number of frames (views).
+ :param timesteps: a 1-D batch of timesteps.
+ :param context: conditioning plugged in via crossattn
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :param num_frames: a integer indicating number of frames for tensor reshaping.
+ :return: an [(N x F) x C x ...] Tensor of outputs. F is the number of frames (views).
+ """
+ assert x.shape[0] % num_frames == 0, "[UNet] input batch size must be dividable by num_frames!"
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional"
+ hs = []
+ # t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ # emb = self.time_embed(t_emb)
+
+ # if self.num_classes is not None:
+ # assert y.shape[0] == x.shape[0]
+ # emb = emb + self.label_emb(y)
+
+ # Add camera embeddings
+ # if camera is not None:
+ # assert camera.shape[0] == emb.shape[0]
+ emb = self.camera_embed(camera) # ! TODO?
+
+ h = x.type(self.dtype)
+ for module in self.input_blocks:
+ h = module(h, emb, context, num_frames=num_frames)
+ hs.append(h)
+ h = self.middle_block(h, emb, context, num_frames=num_frames)
+ # TODO, add a concat -> pooling to fuse multiple frames; change it to 3D-aware tokens
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb, context, num_frames=num_frames)
+ h = h.type(x.dtype)
+ if self.predict_codebook_ids:
+ return self.id_predictor(h)
+ else:
+ return self.out(h)
\ No newline at end of file
diff --git a/ldm/modules/diffusionmodules/upscaling.py b/ldm/modules/diffusionmodules/upscaling.py
new file mode 100644
index 0000000000000000000000000000000000000000..03816662098ce1ffac79bd939b892e867ab91988
--- /dev/null
+++ b/ldm/modules/diffusionmodules/upscaling.py
@@ -0,0 +1,81 @@
+import torch
+import torch.nn as nn
+import numpy as np
+from functools import partial
+
+from ldm.modules.diffusionmodules.util import extract_into_tensor, make_beta_schedule
+from ldm.util import default
+
+
+class AbstractLowScaleModel(nn.Module):
+ # for concatenating a downsampled image to the latent representation
+ def __init__(self, noise_schedule_config=None):
+ super(AbstractLowScaleModel, self).__init__()
+ if noise_schedule_config is not None:
+ self.register_schedule(**noise_schedule_config)
+
+ def register_schedule(self, beta_schedule="linear", timesteps=1000,
+ linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
+ betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end,
+ cosine_s=cosine_s)
+ alphas = 1. - betas
+ alphas_cumprod = np.cumprod(alphas, axis=0)
+ alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1])
+
+ timesteps, = betas.shape
+ self.num_timesteps = int(timesteps)
+ self.linear_start = linear_start
+ self.linear_end = linear_end
+ assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep'
+
+ to_torch = partial(torch.tensor, dtype=torch.float32)
+
+ self.register_buffer('betas', to_torch(betas))
+ self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
+ self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
+ self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
+ self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod)))
+ self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod)))
+ self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1)))
+
+ def q_sample(self, x_start, t, noise=None):
+ noise = default(noise, lambda: torch.randn_like(x_start))
+ return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +
+ extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise)
+
+ def forward(self, x):
+ return x, None
+
+ def decode(self, x):
+ return x
+
+
+class SimpleImageConcat(AbstractLowScaleModel):
+ # no noise level conditioning
+ def __init__(self):
+ super(SimpleImageConcat, self).__init__(noise_schedule_config=None)
+ self.max_noise_level = 0
+
+ def forward(self, x):
+ # fix to constant noise level
+ return x, torch.zeros(x.shape[0], device=x.device).long()
+
+
+class ImageConcatWithNoiseAugmentation(AbstractLowScaleModel):
+ def __init__(self, noise_schedule_config, max_noise_level=1000, to_cuda=False):
+ super().__init__(noise_schedule_config=noise_schedule_config)
+ self.max_noise_level = max_noise_level
+
+ def forward(self, x, noise_level=None):
+ if noise_level is None:
+ noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long()
+ else:
+ assert isinstance(noise_level, torch.Tensor)
+ z = self.q_sample(x, noise_level)
+ return z, noise_level
+
+
+
diff --git a/ldm/modules/diffusionmodules/util.py b/ldm/modules/diffusionmodules/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1245db832d567e251e66e03cb27c312276c8174
--- /dev/null
+++ b/ldm/modules/diffusionmodules/util.py
@@ -0,0 +1,274 @@
+# adopted from
+# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py
+# and
+# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py
+# and
+# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py
+#
+# thanks!
+
+
+import os
+import math
+import torch
+import torch.nn as nn
+import numpy as np
+from einops import repeat
+
+from ldm.util import instantiate_from_config
+
+from pdb import set_trace as st
+
+
+def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
+ if schedule == "linear":
+ betas = (
+ torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2
+ )
+
+ elif schedule == "cosine":
+ timesteps = (
+ torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s
+ )
+ alphas = timesteps / (1 + cosine_s) * np.pi / 2
+ alphas = torch.cos(alphas).pow(2)
+ alphas = alphas / alphas[0]
+ betas = 1 - alphas[1:] / alphas[:-1]
+ betas = np.clip(betas, a_min=0, a_max=0.999)
+
+ elif schedule == "sqrt_linear":
+ betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64)
+ elif schedule == "sqrt":
+ betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5
+ else:
+ raise ValueError(f"schedule '{schedule}' unknown.")
+ return betas.numpy()
+
+
+def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True):
+ if ddim_discr_method == 'uniform':
+ c = num_ddpm_timesteps // num_ddim_timesteps
+ ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c)))
+ elif ddim_discr_method == 'quad':
+ ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int)
+ else:
+ raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"')
+
+ # assert ddim_timesteps.shape[0] == num_ddim_timesteps
+ # add one to get the final alpha values right (the ones from first scale to data during sampling)
+ steps_out = ddim_timesteps + 1
+ if verbose:
+ print(f'Selected timesteps for ddim sampler: {steps_out}')
+ return steps_out
+
+
+def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True):
+ # select alphas for computing the variance schedule
+ alphas = alphacums[ddim_timesteps]
+ alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist())
+
+ # according the the formula provided in https://arxiv.org/abs/2010.02502
+ sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev))
+ if verbose:
+ print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}')
+ print(f'For the chosen value of eta, which is {eta}, '
+ f'this results in the following sigma_t schedule for ddim sampler {sigmas}')
+ return sigmas, alphas, alphas_prev
+
+
+def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999):
+ """
+ Create a beta schedule that discretizes the given alpha_t_bar function,
+ which defines the cumulative product of (1-beta) over time from t = [0,1].
+ :param num_diffusion_timesteps: the number of betas to produce.
+ :param alpha_bar: a lambda that takes an argument t from 0 to 1 and
+ produces the cumulative product of (1-beta) up to that
+ part of the diffusion process.
+ :param max_beta: the maximum beta to use; use values lower than 1 to
+ prevent singularities.
+ """
+ betas = []
+ for i in range(num_diffusion_timesteps):
+ t1 = i / num_diffusion_timesteps
+ t2 = (i + 1) / num_diffusion_timesteps
+ betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))
+ return np.array(betas)
+
+
+def extract_into_tensor(a, t, x_shape):
+ b, *_ = t.shape
+ out = a.gather(-1, t)
+ return out.reshape(b, *((1,) * (len(x_shape) - 1)))
+
+
+@torch.autocast(device_type="cuda")
+def checkpoint(func, inputs, params, flag):
+ """
+ Evaluate a function without caching intermediate activations, allowing for
+ reduced memory at the expense of extra compute in the backward pass.
+ :param func: the function to evaluate.
+ :param inputs: the argument sequence to pass to `func`.
+ :param params: a sequence of parameters `func` depends on but does not
+ explicitly take as arguments.
+ :param flag: if False, disable gradient checkpointing.
+ """
+ if flag:
+ args = tuple(inputs) + tuple(params)
+ return CheckpointFunction.apply(func, len(inputs), *args)
+ else:
+ return func(*inputs)
+
+
+class CheckpointFunction(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, run_function, length, *args):
+ # st()
+ ctx.run_function = run_function
+ ctx.input_tensors = list(args[:length])
+ ctx.input_params = list(args[length:])
+ ctx.gpu_autocast_kwargs = {"enabled": torch.is_autocast_enabled(),
+ "dtype": torch.get_autocast_gpu_dtype(),
+ "cache_enabled": torch.is_autocast_cache_enabled()}
+ with torch.no_grad():
+ output_tensors = ctx.run_function(*ctx.input_tensors)
+ return output_tensors
+
+ @staticmethod
+ def backward(ctx, *output_grads):
+ ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors]
+ with torch.enable_grad(), \
+ torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs):
+ # Fixes a bug where the first op in run_function modifies the
+ # Tensor storage in place, which is not allowed for detach()'d
+ # Tensors.
+ shallow_copies = [x.view_as(x) for x in ctx.input_tensors]
+ output_tensors = ctx.run_function(*shallow_copies)
+ input_grads = torch.autograd.grad(
+ output_tensors,
+ ctx.input_tensors + ctx.input_params,
+ output_grads,
+ allow_unused=True,
+ )
+ del ctx.input_tensors
+ del ctx.input_params
+ del output_tensors
+ return (None, None) + input_grads
+
+
+def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False):
+ """
+ Create sinusoidal timestep embeddings.
+ :param timesteps: a 1-D Tensor of N indices, one per batch element.
+ These may be fractional.
+ :param dim: the dimension of the output.
+ :param max_period: controls the minimum frequency of the embeddings.
+ :return: an [N x dim] Tensor of positional embeddings.
+ """
+ if not repeat_only:
+ half = dim // 2
+ freqs = torch.exp(
+ -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half
+ ).to(device=timesteps.device)
+ args = timesteps[:, None].float() * freqs[None]
+ embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
+ if dim % 2:
+ embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1)
+ else:
+ embedding = repeat(timesteps, 'b -> b d', d=dim)
+ return embedding
+
+
+def zero_module(module):
+ """
+ Zero out the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().zero_()
+ return module
+
+
+def scale_module(module, scale):
+ """
+ Scale the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().mul_(scale)
+ return module
+
+
+def mean_flat(tensor):
+ """
+ Take the mean over all non-batch dimensions.
+ """
+ return tensor.mean(dim=list(range(1, len(tensor.shape))))
+
+
+def normalization(channels):
+ """
+ Make a standard normalization layer.
+ :param channels: number of input channels.
+ :return: an nn.Module for normalization.
+ """
+ return GroupNorm32(32, channels)
+
+
+# PyTorch 1.7 has SiLU, but we support PyTorch 1.5.
+class SiLU(nn.Module):
+ def forward(self, x):
+ return x * torch.sigmoid(x)
+
+
+class GroupNorm32(nn.GroupNorm):
+ def forward(self, x):
+ return super().forward(x.float()).type(x.dtype)
+
+def conv_nd(dims, *args, **kwargs):
+ """
+ Create a 1D, 2D, or 3D convolution module.
+ """
+ if dims == 1:
+ return nn.Conv1d(*args, **kwargs)
+ elif dims == 2:
+ return nn.Conv2d(*args, **kwargs)
+ elif dims == 3:
+ return nn.Conv3d(*args, **kwargs)
+ raise ValueError(f"unsupported dimensions: {dims}")
+
+
+def linear(*args, **kwargs):
+ """
+ Create a linear module.
+ """
+ return nn.Linear(*args, **kwargs)
+
+
+def avg_pool_nd(dims, *args, **kwargs):
+ """
+ Create a 1D, 2D, or 3D average pooling module.
+ """
+ if dims == 1:
+ return nn.AvgPool1d(*args, **kwargs)
+ elif dims == 2:
+ return nn.AvgPool2d(*args, **kwargs)
+ elif dims == 3:
+ return nn.AvgPool3d(*args, **kwargs)
+ raise ValueError(f"unsupported dimensions: {dims}")
+
+
+class HybridConditioner(nn.Module):
+
+ def __init__(self, c_concat_config, c_crossattn_config):
+ super().__init__()
+ self.concat_conditioner = instantiate_from_config(c_concat_config)
+ self.crossattn_conditioner = instantiate_from_config(c_crossattn_config)
+
+ def forward(self, c_concat, c_crossattn):
+ c_concat = self.concat_conditioner(c_concat)
+ c_crossattn = self.crossattn_conditioner(c_crossattn)
+ return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]}
+
+
+def noise_like(shape, device, repeat=False):
+ repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1)))
+ noise = lambda: torch.randn(shape, device=device)
+ return repeat_noise() if repeat else noise()
\ No newline at end of file
diff --git a/ldm/modules/distributions/__init__.py b/ldm/modules/distributions/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/modules/distributions/distributions.py b/ldm/modules/distributions/distributions.py
new file mode 100644
index 0000000000000000000000000000000000000000..f2b8ef901130efc171aa69742ca0244d94d3f2e9
--- /dev/null
+++ b/ldm/modules/distributions/distributions.py
@@ -0,0 +1,92 @@
+import torch
+import numpy as np
+
+
+class AbstractDistribution:
+ def sample(self):
+ raise NotImplementedError()
+
+ def mode(self):
+ raise NotImplementedError()
+
+
+class DiracDistribution(AbstractDistribution):
+ def __init__(self, value):
+ self.value = value
+
+ def sample(self):
+ return self.value
+
+ def mode(self):
+ return self.value
+
+
+class DiagonalGaussianDistribution(object):
+ def __init__(self, parameters, deterministic=False):
+ self.parameters = parameters
+ self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)
+ self.logvar = torch.clamp(self.logvar, -30.0, 20.0)
+ self.deterministic = deterministic
+ self.std = torch.exp(0.5 * self.logvar)
+ self.var = torch.exp(self.logvar)
+ if self.deterministic:
+ self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device)
+
+ def sample(self):
+ x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device)
+ return x
+
+ def kl(self, other=None):
+ if self.deterministic:
+ return torch.Tensor([0.])
+ else:
+ if other is None:
+ return 0.5 * torch.sum(torch.pow(self.mean, 2)
+ + self.var - 1.0 - self.logvar,
+ dim=[1, 2, 3])
+ else:
+ return 0.5 * torch.sum(
+ torch.pow(self.mean - other.mean, 2) / other.var
+ + self.var / other.var - 1.0 - self.logvar + other.logvar,
+ dim=[1, 2, 3])
+
+ def nll(self, sample, dims=[1,2,3]):
+ if self.deterministic:
+ return torch.Tensor([0.])
+ logtwopi = np.log(2.0 * np.pi)
+ return 0.5 * torch.sum(
+ logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var,
+ dim=dims)
+
+ def mode(self):
+ return self.mean
+
+
+def normal_kl(mean1, logvar1, mean2, logvar2):
+ """
+ source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12
+ Compute the KL divergence between two gaussians.
+ Shapes are automatically broadcasted, so batches can be compared to
+ scalars, among other use cases.
+ """
+ tensor = None
+ for obj in (mean1, logvar1, mean2, logvar2):
+ if isinstance(obj, torch.Tensor):
+ tensor = obj
+ break
+ assert tensor is not None, "at least one argument must be a Tensor"
+
+ # Force variances to be Tensors. Broadcasting helps convert scalars to
+ # Tensors, but it does not work for torch.exp().
+ logvar1, logvar2 = [
+ x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor)
+ for x in (logvar1, logvar2)
+ ]
+
+ return 0.5 * (
+ -1.0
+ + logvar2
+ - logvar1
+ + torch.exp(logvar1 - logvar2)
+ + ((mean1 - mean2) ** 2) * torch.exp(-logvar2)
+ )
diff --git a/ldm/modules/ema.py b/ldm/modules/ema.py
new file mode 100644
index 0000000000000000000000000000000000000000..bded25019b9bcbcd0260f0b8185f8c7859ca58c4
--- /dev/null
+++ b/ldm/modules/ema.py
@@ -0,0 +1,80 @@
+import torch
+from torch import nn
+
+
+class LitEma(nn.Module):
+ def __init__(self, model, decay=0.9999, use_num_upates=True):
+ super().__init__()
+ if decay < 0.0 or decay > 1.0:
+ raise ValueError('Decay must be between 0 and 1')
+
+ self.m_name2s_name = {}
+ self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32))
+ self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int) if use_num_upates
+ else torch.tensor(-1, dtype=torch.int))
+
+ for name, p in model.named_parameters():
+ if p.requires_grad:
+ # remove as '.'-character is not allowed in buffers
+ s_name = name.replace('.', '')
+ self.m_name2s_name.update({name: s_name})
+ self.register_buffer(s_name, p.clone().detach().data)
+
+ self.collected_params = []
+
+ def reset_num_updates(self):
+ del self.num_updates
+ self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int))
+
+ def forward(self, model):
+ decay = self.decay
+
+ if self.num_updates >= 0:
+ self.num_updates += 1
+ decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates))
+
+ one_minus_decay = 1.0 - decay
+
+ with torch.no_grad():
+ m_param = dict(model.named_parameters())
+ shadow_params = dict(self.named_buffers())
+
+ for key in m_param:
+ if m_param[key].requires_grad:
+ sname = self.m_name2s_name[key]
+ shadow_params[sname] = shadow_params[sname].type_as(m_param[key])
+ shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key]))
+ else:
+ assert not key in self.m_name2s_name
+
+ def copy_to(self, model):
+ m_param = dict(model.named_parameters())
+ shadow_params = dict(self.named_buffers())
+ for key in m_param:
+ if m_param[key].requires_grad:
+ m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data)
+ else:
+ assert not key in self.m_name2s_name
+
+ def store(self, parameters):
+ """
+ Save the current parameters for restoring later.
+ Args:
+ parameters: Iterable of `torch.nn.Parameter`; the parameters to be
+ temporarily stored.
+ """
+ self.collected_params = [param.clone() for param in parameters]
+
+ def restore(self, parameters):
+ """
+ Restore the parameters stored with the `store` method.
+ Useful to validate the model with EMA parameters without affecting the
+ original optimization process. Store the parameters before the
+ `copy_to` method. After validation (or model saving), use this to
+ restore the former parameters.
+ Args:
+ parameters: Iterable of `torch.nn.Parameter`; the parameters to be
+ updated with the stored parameters.
+ """
+ for c_param, param in zip(self.collected_params, parameters):
+ param.data.copy_(c_param.data)
diff --git a/ldm/modules/encoders/__init__.py b/ldm/modules/encoders/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/modules/encoders/__pycache__/__init__.cpython-39.pyc b/ldm/modules/encoders/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b2928472682cc5b4098ccae5ce5dca15a7ee9bf6
Binary files /dev/null and b/ldm/modules/encoders/__pycache__/__init__.cpython-39.pyc differ
diff --git a/ldm/modules/encoders/__pycache__/modules.cpython-39.pyc b/ldm/modules/encoders/__pycache__/modules.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ff26c58f9c4550405a2ebbe4f6e9e33c5ca3800a
Binary files /dev/null and b/ldm/modules/encoders/__pycache__/modules.cpython-39.pyc differ
diff --git a/ldm/modules/encoders/modules.py b/ldm/modules/encoders/modules.py
new file mode 100644
index 0000000000000000000000000000000000000000..52acef3f8d84ea982d50be49bf08b50c3e6f9a47
--- /dev/null
+++ b/ldm/modules/encoders/modules.py
@@ -0,0 +1,570 @@
+import torch
+from typing import Dict, List, Optional, Tuple, Union
+import functools
+import fsspec
+import os
+import open_clip
+import torch.nn as nn
+from functools import partial
+import clip
+from einops import rearrange, repeat
+import kornia
+import numpy as np
+from inspect import isfunction
+
+from pdb import set_trace as st
+# from transformers import CLIPTokenizer, CLIPTextModel
+
+from ...util import (append_dims, autocast, count_params, default,
+ disabled_train, expand_dims_like, instantiate_from_config)
+
+from ..x_transformer import Encoder, TransformerWrapper # TODO: can we directly rely on lucidrains code and simply add this as a reuirement? --> test
+
+
+class AbstractEncoder(nn.Module):
+ def __init__(self):
+ super().__init__()
+
+ def encode(self, *args, **kwargs):
+ raise NotImplementedError
+
+
+
+class ClassEmbedder(nn.Module):
+ def __init__(self, embed_dim, n_classes=1000, key='class'):
+ super().__init__()
+ self.key = key
+ self.embedding = nn.Embedding(n_classes, embed_dim)
+
+ def forward(self, batch, key=None):
+ if key is None:
+ key = self.key
+ # this is for use in crossattn
+ c = batch[key][:, None]
+ c = self.embedding(c)
+ return c
+
+
+class TransformerEmbedder(AbstractEncoder):
+ """Some transformer encoder layers"""
+ def __init__(self, n_embed, n_layer, vocab_size, max_seq_len=77, device="cuda"):
+ super().__init__()
+ self.device = device
+ self.transformer = TransformerWrapper(num_tokens=vocab_size, max_seq_len=max_seq_len,
+ attn_layers=Encoder(dim=n_embed, depth=n_layer))
+
+ def forward(self, tokens):
+ tokens = tokens.to(self.device) # meh
+ z = self.transformer(tokens, return_embeddings=True)
+ return z
+
+ def encode(self, x):
+ return self(x)
+
+
+class BERTTokenizer(AbstractEncoder):
+ """ Uses a pretrained BERT tokenizer by huggingface. Vocab size: 30522 (?)"""
+ def __init__(self, device="cuda", vq_interface=True, max_length=77):
+ super().__init__()
+ from transformers import BertTokenizerFast # TODO: add to reuquirements
+ self.tokenizer = BertTokenizerFast.from_pretrained("bert-base-uncased")
+ self.device = device
+ self.vq_interface = vq_interface
+ self.max_length = max_length
+
+ def forward(self, text):
+ batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True,
+ return_overflowing_tokens=False, padding="max_length", return_tensors="pt")
+ tokens = batch_encoding["input_ids"].to(self.device)
+ return tokens
+
+ @torch.no_grad()
+ def encode(self, text):
+ tokens = self(text)
+ if not self.vq_interface:
+ return tokens
+ return None, None, [None, None, tokens]
+
+ def decode(self, text):
+ return text
+
+
+class BERTEmbedder(AbstractEncoder):
+ """Uses the BERT tokenizr model and add some transformer encoder layers"""
+ def __init__(self, n_embed, n_layer, vocab_size=30522, max_seq_len=77,
+ device="cuda",use_tokenizer=True, embedding_dropout=0.0):
+ super().__init__()
+ self.use_tknz_fn = use_tokenizer
+ if self.use_tknz_fn:
+ self.tknz_fn = BERTTokenizer(vq_interface=False, max_length=max_seq_len)
+ self.device = device
+ self.transformer = TransformerWrapper(num_tokens=vocab_size, max_seq_len=max_seq_len,
+ attn_layers=Encoder(dim=n_embed, depth=n_layer),
+ emb_dropout=embedding_dropout)
+
+ def forward(self, text):
+ if self.use_tknz_fn:
+ tokens = self.tknz_fn(text)#.to(self.device)
+ else:
+ tokens = text
+ z = self.transformer(tokens, return_embeddings=True)
+ return z
+
+ def encode(self, text):
+ # output of length 77
+ return self(text)
+
+
+class SpatialRescaler(nn.Module):
+ def __init__(self,
+ n_stages=1,
+ method='bilinear',
+ multiplier=0.5,
+ in_channels=3,
+ out_channels=None,
+ bias=False):
+ super().__init__()
+ self.n_stages = n_stages
+ assert self.n_stages >= 0
+ assert method in ['nearest','linear','bilinear','trilinear','bicubic','area']
+ self.multiplier = multiplier
+ self.interpolator = partial(torch.nn.functional.interpolate, mode=method)
+ self.remap_output = out_channels is not None
+ if self.remap_output:
+ print(f'Spatial Rescaler mapping from {in_channels} to {out_channels} channels after resizing.')
+ self.channel_mapper = nn.Conv2d(in_channels,out_channels,1,bias=bias)
+
+ def forward(self,x):
+ for stage in range(self.n_stages):
+ x = self.interpolator(x, scale_factor=self.multiplier)
+
+
+ if self.remap_output:
+ x = self.channel_mapper(x)
+ return x
+
+ def encode(self, x):
+ return self(x)
+
+class FrozenCLIPEmbedder(AbstractEncoder):
+ """Uses the CLIP transformer encoder for text (from Hugging Face)"""
+ def __init__(self, version="openai/clip-vit-large-patch14", device="cuda", max_length=77, use_eos_feature=False):
+ super().__init__()
+ from transformers import CLIPTokenizer, CLIPTextModel
+ self.tokenizer = CLIPTokenizer.from_pretrained(version)
+ self.transformer = CLIPTextModel.from_pretrained(version)
+
+ self.device = device
+ self.max_length = max_length
+ self.freeze()
+ self.use_eos_feature = use_eos_feature
+
+ def freeze(self):
+ self.transformer = self.transformer.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def forward(self, text):
+ batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True,
+ return_overflowing_tokens=False, padding="max_length", return_tensors="pt")
+ tokens = batch_encoding["input_ids"].to(self.device)
+ outputs = self.transformer(input_ids=tokens)
+
+ if self.use_eos_feature: # for DiT
+ z = outputs.pooler_output # N 77 C
+ else:
+ z = outputs.last_hidden_state # N 77 C
+ return z
+
+ def encode(self, text):
+ return self(text)
+
+class TextEmbedder(nn.Module):
+ """
+ Embeds text prompt into vector representations. Also handles text dropout for classifier-free guidance.
+ """
+ def __init__(self, dropout_prob=0.1, use_eos_feature=False):
+ super().__init__()
+ self.text_encodder = FrozenCLIPEmbedder(use_eos_feature=use_eos_feature) # no normalization projection
+ self.dropout_prob = dropout_prob
+
+ def token_drop(self, text_prompts, force_drop_ids=None):
+ """
+ Drops text to enable classifier-free guidance.
+ """
+ if force_drop_ids is None:
+ drop_ids = np.random.uniform(0, 1, len(text_prompts)) < self.dropout_prob
+ else:
+ drop_ids = force_drop_ids == 1
+ labels = list(np.where(drop_ids, "None", text_prompts))
+ # print(labels)
+ return labels
+
+ def forward(self, text_prompts, train, force_drop_ids=None):
+ use_dropout = self.dropout_prob > 0
+ if (train and use_dropout) or (force_drop_ids is not None):
+ text_prompts = self.token_drop(text_prompts, force_drop_ids)
+ embeddings = self.text_encodder(text_prompts)
+ return embeddings
+
+class FrozenCLIPTextEmbedder(nn.Module):
+ """
+ Uses the CLIP transformer encoder for text.
+ """
+ def __init__(self, version='ViT-L/14', device="cuda", max_length=77, n_repeat=1, normalize=True, dropout_prob=0., scale_clip_encoding=None):
+ super().__init__()
+ self.model, _ = clip.load(version, jit=False, device=device)
+ self.device = device
+ self.max_length = max_length
+ self.n_repeat = n_repeat
+ self.normalize = normalize
+ self.dropout_prob = dropout_prob
+ self.scale_clip_encoding = scale_clip_encoding
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def forward(self, text):
+ tokens = clip.tokenize(text).to(self.device)
+ z = self.model.encode_text(tokens)
+ if self.normalize:
+ z = z / torch.linalg.norm(z, dim=1, keepdim=True)
+
+ if self.scale_clip_encoding is not None:
+ z = z * self.scale_clip_encoding
+
+ return z
+
+ def token_drop(self, text_prompts, force_drop_ids=None):
+ """
+ Drops text to enable classifier-free guidance.
+ """
+ if force_drop_ids is None:
+ drop_ids = np.random.uniform(0, 1, len(text_prompts)) < self.dropout_prob
+ else:
+ drop_ids = force_drop_ids == 1
+ labels = list(np.where(drop_ids, "None", text_prompts))
+ # print(labels)
+ return labels
+
+
+ def encode(self, text):
+ z = self(text)
+
+ if z.ndim==2: # match cross attention shape
+ z = z[:, None, :]
+ z = repeat(z, 'b 1 d -> b k d', k=self.n_repeat)
+
+ return z
+
+
+class FrozenClipImageEmbedder(nn.Module):
+ """
+ Uses the CLIP image encoder.
+ """
+ def __init__(
+ self,
+ model,
+ jit=False,
+ device='cuda' if torch.cuda.is_available() else 'cpu',
+ antialias=False,
+ n_repeat=1,
+ dropout_prob=0.2, # follow Rodin
+ normalize_encoding=False,
+ scale_clip_encoding=1.0,
+ ):
+ super().__init__()
+ self.model, _ = clip.load(name=model, device=device, jit=jit)
+ self.n_repeat = n_repeat
+ self.normalize_encoding = normalize_encoding
+ self.scale_clip_encoding = torch.tensor(scale_clip_encoding, dtype=torch.float32, device=device)
+
+ self.antialias = antialias
+
+ self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False)
+ self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False)
+
+ self.dropout_prob = dropout_prob
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+
+ def preprocess(self, x):
+ # normalize to [0,1]
+ x = kornia.geometry.resize(x, (224, 224),
+ interpolation='bicubic',align_corners=True,
+ antialias=self.antialias)
+ x = (x + 1.) / 2.
+ # renormalize according to clip
+ x = kornia.enhance.normalize(x, self.mean, self.std) # type: ignore
+ return x
+
+ def token_drop(self, z):
+ """
+ zero the image encoding to enable classifier-free guidance.
+ """
+ drop_ids = np.random.uniform(0, 1, z.shape[0]) < self.dropout_prob # idx token to drop
+ drop_ids = torch.from_numpy(drop_ids).unsqueeze(1).expand_as(z).bool().to(z.device)
+ z = torch.where(drop_ids, torch.zeros_like(z), z)
+ return z
+
+
+ def forward(self, x):
+ # x is assumed to be in range [-1,1]
+ # return self.model.encode_image(self.preprocess(x))
+ z = self.model.encode_image(self.preprocess(x))
+
+ # ? normalized features, seems not working?
+ if self.normalize_encoding:
+ z = z / torch.linalg.norm(z, dim=1, keepdim=True)
+ if self.scale_clip_encoding:
+ # st()
+ z = z * self.scale_clip_encoding
+
+ if self.dropout_prob>0: # for cfg
+ z = self.token_drop(z)
+
+ if z.ndim==2:
+ # repeat 1 dim, for context shape compatability.
+ z = z[:, None, :]
+ z = repeat(z, 'b 1 d -> b k d', k=self.n_repeat)
+ return z
+
+
+class AbstractEmbModel(nn.Module):
+ def __init__(self):
+ super().__init__()
+ self._is_trainable = None
+ self._ucg_rate = None
+ self._input_key = None
+
+ @property
+ def is_trainable(self) -> bool:
+ return self._is_trainable
+
+ @property
+ def ucg_rate(self) -> Union[float, torch.Tensor]:
+ return self._ucg_rate
+
+ @property
+ def input_key(self) -> str:
+ return self._input_key
+
+ @is_trainable.setter
+ def is_trainable(self, value: bool):
+ self._is_trainable = value
+
+ @ucg_rate.setter
+ def ucg_rate(self, value: Union[float, torch.Tensor]):
+ self._ucg_rate = value
+
+ @input_key.setter
+ def input_key(self, value: str):
+ self._input_key = value
+
+ @is_trainable.deleter
+ def is_trainable(self):
+ del self._is_trainable
+
+ @ucg_rate.deleter
+ def ucg_rate(self):
+ del self._ucg_rate
+
+ @input_key.deleter
+ def input_key(self):
+ del self._input_key
+
+
+
+class FrozenOpenCLIPImageEmbedder(AbstractEmbModel):
+ """
+ Uses the OpenCLIP vision transformer encoder for images
+ """
+
+ def __init__(
+ self,
+ arch="ViT-H-14",
+ version="laion2b_s32b_b79k",
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ antialias=True,
+ ucg_rate=0.0,
+ unsqueeze_dim=False,
+ repeat_to_max_len=False,
+ num_image_crops=0,
+ output_tokens=False,
+ init_device=None,
+ ):
+ super().__init__()
+ model, _, _ = open_clip.create_model_and_transforms(
+ arch,
+ device=torch.device(default(init_device, "cpu")),
+ pretrained=version,
+ )
+ del model.transformer
+ self.model = model
+ self.max_crops = num_image_crops
+ self.pad_to_max_len = self.max_crops > 0
+ self.repeat_to_max_len = repeat_to_max_len and (not self.pad_to_max_len)
+ self.device = device
+ self.max_length = max_length
+ if freeze:
+ self.freeze()
+
+ self.antialias = antialias
+
+ self.register_buffer(
+ "mean", torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False
+ )
+ self.register_buffer(
+ "std", torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False
+ )
+ self.ucg_rate = ucg_rate
+ self.unsqueeze_dim = unsqueeze_dim
+ self.stored_batch = None
+ self.model.visual.output_tokens = output_tokens
+ self.output_tokens = output_tokens
+
+ def preprocess(self, x):
+ # normalize to [0,1]
+ x = kornia.geometry.resize(
+ x,
+ (224, 224),
+ interpolation="bicubic",
+ align_corners=True,
+ antialias=self.antialias,
+ )
+ x = (x + 1.0) / 2.0
+ # renormalize according to clip
+ x = kornia.enhance.normalize(x, self.mean, self.std)
+ return x
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ @autocast
+ def forward(self, image, no_dropout=False):
+ z = self.encode_with_vision_transformer(image)
+ tokens = None
+ if self.output_tokens:
+ z, tokens = z[0], z[1]
+ z = z.to(image.dtype)
+ if self.ucg_rate > 0.0 and not no_dropout and not (self.max_crops > 0):
+ z = (
+ torch.bernoulli(
+ (1.0 - self.ucg_rate) * torch.ones(z.shape[0], device=z.device)
+ )[:, None]
+ * z
+ )
+ if tokens is not None:
+ tokens = (
+ expand_dims_like(
+ torch.bernoulli(
+ (1.0 - self.ucg_rate)
+ * torch.ones(tokens.shape[0], device=tokens.device)
+ ),
+ tokens,
+ )
+ * tokens
+ )
+ if self.unsqueeze_dim:
+ z = z[:, None, :]
+ if self.output_tokens:
+ assert not self.repeat_to_max_len
+ assert not self.pad_to_max_len
+ return tokens, z
+ if self.repeat_to_max_len:
+ if z.dim() == 2:
+ z_ = z[:, None, :]
+ else:
+ z_ = zsgm/configs/txt2img-clipl-compat.yaml
+ return repeat(z_, "b 1 d -> b n d", n=self.max_length), z
+ elif self.pad_to_max_len:
+ assert z.dim() == 3
+ z_pad = torch.cat(
+ (
+ z,
+ torch.zeros(
+ z.shape[0],
+ self.max_length - z.shape[1],
+ z.shape[2],
+ device=z.device,
+ ),
+ ),
+ 1,
+ )
+ return z_pad, z_pad[:, 0, ...]
+ return z
+
+ def encode_with_vision_transformer(self, img):
+ # if self.max_crops > 0:
+ # img = self.preprocess_by_cropping(img)
+ if img.dim() == 5:
+ assert self.max_crops == img.shape[1]
+ img = rearrange(img, "b n c h w -> (b n) c h w")
+ img = self.preprocess(img)
+ if not self.output_tokens:
+ assert not self.model.visual.output_tokens
+ x = self.model.visual(img)
+ tokens = None
+ else:
+ assert self.model.visual.output_tokens
+ x, tokens = self.model.visual(img)
+ if self.max_crops > 0:
+ x = rearrange(x, "(b n) d -> b n d", n=self.max_crops)
+ # drop out between 0 and all along the sequence axis
+ x = (
+ torch.bernoulli(
+ (1.0 - self.ucg_rate)
+ * torch.ones(x.shape[0], x.shape[1], 1, device=x.device)
+ )
+ * x
+ )
+ if tokens is not None:
+ tokens = rearrange(tokens, "(b n) t d -> b t (n d)", n=self.max_crops)
+ print(
+ f"You are running very experimental token-concat in {self.__class__.__name__}. "
+ f"Check what you are doing, and then remove this message."
+ )
+ if self.output_tokens:
+ return x, tokens
+ return x
+
+ def encode(self, text):
+ return self(text)
+
+class FrozenOpenCLIPImagePredictionEmbedder(AbstractEmbModel):
+ def __init__(
+ self,
+ # open_clip_embedding_config: Dict,
+ n_cond_frames: int,
+ n_copies: int,
+ open_clip_module,
+ ):
+ super().__init__()
+
+ self.n_cond_frames = n_cond_frames
+ self.n_copies = n_copies
+ # self.open_clip = instantiate_from_config(open_clip_embedding_config)
+ self.open_clip = open_clip_module
+
+ def forward(self, vid):
+ vid = self.open_clip(vid)
+ vid = rearrange(vid, "(b t) d -> b t d", t=self.n_cond_frames)
+ vid = repeat(vid, "b t d -> (b s) t d", s=self.n_copies)
+
+ return vid
+
+
+if __name__ == "__main__":
+ from ldm.util import count_params
+ model = FrozenCLIPEmbedder()
+ count_params(model, verbose=True)
diff --git a/ldm/modules/image_degradation/__init__.py b/ldm/modules/image_degradation/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7836cada81f90ded99c58d5942eea4c3477f58fc
--- /dev/null
+++ b/ldm/modules/image_degradation/__init__.py
@@ -0,0 +1,2 @@
+from ldm.modules.image_degradation.bsrgan import degradation_bsrgan_variant as degradation_fn_bsr
+from ldm.modules.image_degradation.bsrgan_light import degradation_bsrgan_variant as degradation_fn_bsr_light
diff --git a/ldm/modules/image_degradation/bsrgan.py b/ldm/modules/image_degradation/bsrgan.py
new file mode 100644
index 0000000000000000000000000000000000000000..32ef56169978e550090261cddbcf5eb611a6173b
--- /dev/null
+++ b/ldm/modules/image_degradation/bsrgan.py
@@ -0,0 +1,730 @@
+# -*- coding: utf-8 -*-
+"""
+# --------------------------------------------
+# Super-Resolution
+# --------------------------------------------
+#
+# Kai Zhang (cskaizhang@gmail.com)
+# https://github.com/cszn
+# From 2019/03--2021/08
+# --------------------------------------------
+"""
+
+import numpy as np
+import cv2
+import torch
+
+from functools import partial
+import random
+from scipy import ndimage
+import scipy
+import scipy.stats as ss
+from scipy.interpolate import interp2d
+from scipy.linalg import orth
+import albumentations
+
+import ldm.modules.image_degradation.utils_image as util
+
+
+def modcrop_np(img, sf):
+ '''
+ Args:
+ img: numpy image, WxH or WxHxC
+ sf: scale factor
+ Return:
+ cropped image
+ '''
+ w, h = img.shape[:2]
+ im = np.copy(img)
+ return im[:w - w % sf, :h - h % sf, ...]
+
+
+"""
+# --------------------------------------------
+# anisotropic Gaussian kernels
+# --------------------------------------------
+"""
+
+
+def analytic_kernel(k):
+ """Calculate the X4 kernel from the X2 kernel (for proof see appendix in paper)"""
+ k_size = k.shape[0]
+ # Calculate the big kernels size
+ big_k = np.zeros((3 * k_size - 2, 3 * k_size - 2))
+ # Loop over the small kernel to fill the big one
+ for r in range(k_size):
+ for c in range(k_size):
+ big_k[2 * r:2 * r + k_size, 2 * c:2 * c + k_size] += k[r, c] * k
+ # Crop the edges of the big kernel to ignore very small values and increase run time of SR
+ crop = k_size // 2
+ cropped_big_k = big_k[crop:-crop, crop:-crop]
+ # Normalize to 1
+ return cropped_big_k / cropped_big_k.sum()
+
+
+def anisotropic_Gaussian(ksize=15, theta=np.pi, l1=6, l2=6):
+ """ generate an anisotropic Gaussian kernel
+ Args:
+ ksize : e.g., 15, kernel size
+ theta : [0, pi], rotation angle range
+ l1 : [0.1,50], scaling of eigenvalues
+ l2 : [0.1,l1], scaling of eigenvalues
+ If l1 = l2, will get an isotropic Gaussian kernel.
+ Returns:
+ k : kernel
+ """
+
+ v = np.dot(np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]), np.array([1., 0.]))
+ V = np.array([[v[0], v[1]], [v[1], -v[0]]])
+ D = np.array([[l1, 0], [0, l2]])
+ Sigma = np.dot(np.dot(V, D), np.linalg.inv(V))
+ k = gm_blur_kernel(mean=[0, 0], cov=Sigma, size=ksize)
+
+ return k
+
+
+def gm_blur_kernel(mean, cov, size=15):
+ center = size / 2.0 + 0.5
+ k = np.zeros([size, size])
+ for y in range(size):
+ for x in range(size):
+ cy = y - center + 1
+ cx = x - center + 1
+ k[y, x] = ss.multivariate_normal.pdf([cx, cy], mean=mean, cov=cov)
+
+ k = k / np.sum(k)
+ return k
+
+
+def shift_pixel(x, sf, upper_left=True):
+ """shift pixel for super-resolution with different scale factors
+ Args:
+ x: WxHxC or WxH
+ sf: scale factor
+ upper_left: shift direction
+ """
+ h, w = x.shape[:2]
+ shift = (sf - 1) * 0.5
+ xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0)
+ if upper_left:
+ x1 = xv + shift
+ y1 = yv + shift
+ else:
+ x1 = xv - shift
+ y1 = yv - shift
+
+ x1 = np.clip(x1, 0, w - 1)
+ y1 = np.clip(y1, 0, h - 1)
+
+ if x.ndim == 2:
+ x = interp2d(xv, yv, x)(x1, y1)
+ if x.ndim == 3:
+ for i in range(x.shape[-1]):
+ x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1)
+
+ return x
+
+
+def blur(x, k):
+ '''
+ x: image, NxcxHxW
+ k: kernel, Nx1xhxw
+ '''
+ n, c = x.shape[:2]
+ p1, p2 = (k.shape[-2] - 1) // 2, (k.shape[-1] - 1) // 2
+ x = torch.nn.functional.pad(x, pad=(p1, p2, p1, p2), mode='replicate')
+ k = k.repeat(1, c, 1, 1)
+ k = k.view(-1, 1, k.shape[2], k.shape[3])
+ x = x.view(1, -1, x.shape[2], x.shape[3])
+ x = torch.nn.functional.conv2d(x, k, bias=None, stride=1, padding=0, groups=n * c)
+ x = x.view(n, c, x.shape[2], x.shape[3])
+
+ return x
+
+
+def gen_kernel(k_size=np.array([15, 15]), scale_factor=np.array([4, 4]), min_var=0.6, max_var=10., noise_level=0):
+ """"
+ # modified version of https://github.com/assafshocher/BlindSR_dataset_generator
+ # Kai Zhang
+ # min_var = 0.175 * sf # variance of the gaussian kernel will be sampled between min_var and max_var
+ # max_var = 2.5 * sf
+ """
+ # Set random eigen-vals (lambdas) and angle (theta) for COV matrix
+ lambda_1 = min_var + np.random.rand() * (max_var - min_var)
+ lambda_2 = min_var + np.random.rand() * (max_var - min_var)
+ theta = np.random.rand() * np.pi # random theta
+ noise = -noise_level + np.random.rand(*k_size) * noise_level * 2
+
+ # Set COV matrix using Lambdas and Theta
+ LAMBDA = np.diag([lambda_1, lambda_2])
+ Q = np.array([[np.cos(theta), -np.sin(theta)],
+ [np.sin(theta), np.cos(theta)]])
+ SIGMA = Q @ LAMBDA @ Q.T
+ INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :]
+
+ # Set expectation position (shifting kernel for aligned image)
+ MU = k_size // 2 - 0.5 * (scale_factor - 1) # - 0.5 * (scale_factor - k_size % 2)
+ MU = MU[None, None, :, None]
+
+ # Create meshgrid for Gaussian
+ [X, Y] = np.meshgrid(range(k_size[0]), range(k_size[1]))
+ Z = np.stack([X, Y], 2)[:, :, :, None]
+
+ # Calcualte Gaussian for every pixel of the kernel
+ ZZ = Z - MU
+ ZZ_t = ZZ.transpose(0, 1, 3, 2)
+ raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) * (1 + noise)
+
+ # shift the kernel so it will be centered
+ # raw_kernel_centered = kernel_shift(raw_kernel, scale_factor)
+
+ # Normalize the kernel and return
+ # kernel = raw_kernel_centered / np.sum(raw_kernel_centered)
+ kernel = raw_kernel / np.sum(raw_kernel)
+ return kernel
+
+
+def fspecial_gaussian(hsize, sigma):
+ hsize = [hsize, hsize]
+ siz = [(hsize[0] - 1.0) / 2.0, (hsize[1] - 1.0) / 2.0]
+ std = sigma
+ [x, y] = np.meshgrid(np.arange(-siz[1], siz[1] + 1), np.arange(-siz[0], siz[0] + 1))
+ arg = -(x * x + y * y) / (2 * std * std)
+ h = np.exp(arg)
+ h[h < scipy.finfo(float).eps * h.max()] = 0
+ sumh = h.sum()
+ if sumh != 0:
+ h = h / sumh
+ return h
+
+
+def fspecial_laplacian(alpha):
+ alpha = max([0, min([alpha, 1])])
+ h1 = alpha / (alpha + 1)
+ h2 = (1 - alpha) / (alpha + 1)
+ h = [[h1, h2, h1], [h2, -4 / (alpha + 1), h2], [h1, h2, h1]]
+ h = np.array(h)
+ return h
+
+
+def fspecial(filter_type, *args, **kwargs):
+ '''
+ python code from:
+ https://github.com/ronaldosena/imagens-medicas-2/blob/40171a6c259edec7827a6693a93955de2bd39e76/Aulas/aula_2_-_uniform_filter/matlab_fspecial.py
+ '''
+ if filter_type == 'gaussian':
+ return fspecial_gaussian(*args, **kwargs)
+ if filter_type == 'laplacian':
+ return fspecial_laplacian(*args, **kwargs)
+
+
+"""
+# --------------------------------------------
+# degradation models
+# --------------------------------------------
+"""
+
+
+def bicubic_degradation(x, sf=3):
+ '''
+ Args:
+ x: HxWxC image, [0, 1]
+ sf: down-scale factor
+ Return:
+ bicubicly downsampled LR image
+ '''
+ x = util.imresize_np(x, scale=1 / sf)
+ return x
+
+
+def srmd_degradation(x, k, sf=3):
+ ''' blur + bicubic downsampling
+ Args:
+ x: HxWxC image, [0, 1]
+ k: hxw, double
+ sf: down-scale factor
+ Return:
+ downsampled LR image
+ Reference:
+ @inproceedings{zhang2018learning,
+ title={Learning a single convolutional super-resolution network for multiple degradations},
+ author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
+ booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
+ pages={3262--3271},
+ year={2018}
+ }
+ '''
+ x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap') # 'nearest' | 'mirror'
+ x = bicubic_degradation(x, sf=sf)
+ return x
+
+
+def dpsr_degradation(x, k, sf=3):
+ ''' bicubic downsampling + blur
+ Args:
+ x: HxWxC image, [0, 1]
+ k: hxw, double
+ sf: down-scale factor
+ Return:
+ downsampled LR image
+ Reference:
+ @inproceedings{zhang2019deep,
+ title={Deep Plug-and-Play Super-Resolution for Arbitrary Blur Kernels},
+ author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
+ booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
+ pages={1671--1681},
+ year={2019}
+ }
+ '''
+ x = bicubic_degradation(x, sf=sf)
+ x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
+ return x
+
+
+def classical_degradation(x, k, sf=3):
+ ''' blur + downsampling
+ Args:
+ x: HxWxC image, [0, 1]/[0, 255]
+ k: hxw, double
+ sf: down-scale factor
+ Return:
+ downsampled LR image
+ '''
+ x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
+ # x = filters.correlate(x, np.expand_dims(np.flip(k), axis=2))
+ st = 0
+ return x[st::sf, st::sf, ...]
+
+
+def add_sharpening(img, weight=0.5, radius=50, threshold=10):
+ """USM sharpening. borrowed from real-ESRGAN
+ Input image: I; Blurry image: B.
+ 1. K = I + weight * (I - B)
+ 2. Mask = 1 if abs(I - B) > threshold, else: 0
+ 3. Blur mask:
+ 4. Out = Mask * K + (1 - Mask) * I
+ Args:
+ img (Numpy array): Input image, HWC, BGR; float32, [0, 1].
+ weight (float): Sharp weight. Default: 1.
+ radius (float): Kernel size of Gaussian blur. Default: 50.
+ threshold (int):
+ """
+ if radius % 2 == 0:
+ radius += 1
+ blur = cv2.GaussianBlur(img, (radius, radius), 0)
+ residual = img - blur
+ mask = np.abs(residual) * 255 > threshold
+ mask = mask.astype('float32')
+ soft_mask = cv2.GaussianBlur(mask, (radius, radius), 0)
+
+ K = img + weight * residual
+ K = np.clip(K, 0, 1)
+ return soft_mask * K + (1 - soft_mask) * img
+
+
+def add_blur(img, sf=4):
+ wd2 = 4.0 + sf
+ wd = 2.0 + 0.2 * sf
+ if random.random() < 0.5:
+ l1 = wd2 * random.random()
+ l2 = wd2 * random.random()
+ k = anisotropic_Gaussian(ksize=2 * random.randint(2, 11) + 3, theta=random.random() * np.pi, l1=l1, l2=l2)
+ else:
+ k = fspecial('gaussian', 2 * random.randint(2, 11) + 3, wd * random.random())
+ img = ndimage.filters.convolve(img, np.expand_dims(k, axis=2), mode='mirror')
+
+ return img
+
+
+def add_resize(img, sf=4):
+ rnum = np.random.rand()
+ if rnum > 0.8: # up
+ sf1 = random.uniform(1, 2)
+ elif rnum < 0.7: # down
+ sf1 = random.uniform(0.5 / sf, 1)
+ else:
+ sf1 = 1.0
+ img = cv2.resize(img, (int(sf1 * img.shape[1]), int(sf1 * img.shape[0])), interpolation=random.choice([1, 2, 3]))
+ img = np.clip(img, 0.0, 1.0)
+
+ return img
+
+
+# def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
+# noise_level = random.randint(noise_level1, noise_level2)
+# rnum = np.random.rand()
+# if rnum > 0.6: # add color Gaussian noise
+# img += np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
+# elif rnum < 0.4: # add grayscale Gaussian noise
+# img += np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
+# else: # add noise
+# L = noise_level2 / 255.
+# D = np.diag(np.random.rand(3))
+# U = orth(np.random.rand(3, 3))
+# conv = np.dot(np.dot(np.transpose(U), D), U)
+# img += np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
+# img = np.clip(img, 0.0, 1.0)
+# return img
+
+def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
+ noise_level = random.randint(noise_level1, noise_level2)
+ rnum = np.random.rand()
+ if rnum > 0.6: # add color Gaussian noise
+ img = img + np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
+ elif rnum < 0.4: # add grayscale Gaussian noise
+ img = img + np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
+ else: # add noise
+ L = noise_level2 / 255.
+ D = np.diag(np.random.rand(3))
+ U = orth(np.random.rand(3, 3))
+ conv = np.dot(np.dot(np.transpose(U), D), U)
+ img = img + np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
+ img = np.clip(img, 0.0, 1.0)
+ return img
+
+
+def add_speckle_noise(img, noise_level1=2, noise_level2=25):
+ noise_level = random.randint(noise_level1, noise_level2)
+ img = np.clip(img, 0.0, 1.0)
+ rnum = random.random()
+ if rnum > 0.6:
+ img += img * np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
+ elif rnum < 0.4:
+ img += img * np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
+ else:
+ L = noise_level2 / 255.
+ D = np.diag(np.random.rand(3))
+ U = orth(np.random.rand(3, 3))
+ conv = np.dot(np.dot(np.transpose(U), D), U)
+ img += img * np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
+ img = np.clip(img, 0.0, 1.0)
+ return img
+
+
+def add_Poisson_noise(img):
+ img = np.clip((img * 255.0).round(), 0, 255) / 255.
+ vals = 10 ** (2 * random.random() + 2.0) # [2, 4]
+ if random.random() < 0.5:
+ img = np.random.poisson(img * vals).astype(np.float32) / vals
+ else:
+ img_gray = np.dot(img[..., :3], [0.299, 0.587, 0.114])
+ img_gray = np.clip((img_gray * 255.0).round(), 0, 255) / 255.
+ noise_gray = np.random.poisson(img_gray * vals).astype(np.float32) / vals - img_gray
+ img += noise_gray[:, :, np.newaxis]
+ img = np.clip(img, 0.0, 1.0)
+ return img
+
+
+def add_JPEG_noise(img):
+ quality_factor = random.randint(30, 95)
+ img = cv2.cvtColor(util.single2uint(img), cv2.COLOR_RGB2BGR)
+ result, encimg = cv2.imencode('.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), quality_factor])
+ img = cv2.imdecode(encimg, 1)
+ img = cv2.cvtColor(util.uint2single(img), cv2.COLOR_BGR2RGB)
+ return img
+
+
+def random_crop(lq, hq, sf=4, lq_patchsize=64):
+ h, w = lq.shape[:2]
+ rnd_h = random.randint(0, h - lq_patchsize)
+ rnd_w = random.randint(0, w - lq_patchsize)
+ lq = lq[rnd_h:rnd_h + lq_patchsize, rnd_w:rnd_w + lq_patchsize, :]
+
+ rnd_h_H, rnd_w_H = int(rnd_h * sf), int(rnd_w * sf)
+ hq = hq[rnd_h_H:rnd_h_H + lq_patchsize * sf, rnd_w_H:rnd_w_H + lq_patchsize * sf, :]
+ return lq, hq
+
+
+def degradation_bsrgan(img, sf=4, lq_patchsize=72, isp_model=None):
+ """
+ This is the degradation model of BSRGAN from the paper
+ "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
+ ----------
+ img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)
+ sf: scale factor
+ isp_model: camera ISP model
+ Returns
+ -------
+ img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
+ hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
+ """
+ isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
+ sf_ori = sf
+
+ h1, w1 = img.shape[:2]
+ img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
+ h, w = img.shape[:2]
+
+ if h < lq_patchsize * sf or w < lq_patchsize * sf:
+ raise ValueError(f'img size ({h1}X{w1}) is too small!')
+
+ hq = img.copy()
+
+ if sf == 4 and random.random() < scale2_prob: # downsample1
+ if np.random.rand() < 0.5:
+ img = cv2.resize(img, (int(1 / 2 * img.shape[1]), int(1 / 2 * img.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ img = util.imresize_np(img, 1 / 2, True)
+ img = np.clip(img, 0.0, 1.0)
+ sf = 2
+
+ shuffle_order = random.sample(range(7), 7)
+ idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
+ if idx1 > idx2: # keep downsample3 last
+ shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
+
+ for i in shuffle_order:
+
+ if i == 0:
+ img = add_blur(img, sf=sf)
+
+ elif i == 1:
+ img = add_blur(img, sf=sf)
+
+ elif i == 2:
+ a, b = img.shape[1], img.shape[0]
+ # downsample2
+ if random.random() < 0.75:
+ sf1 = random.uniform(1, 2 * sf)
+ img = cv2.resize(img, (int(1 / sf1 * img.shape[1]), int(1 / sf1 * img.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
+ k_shifted = shift_pixel(k, sf)
+ k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
+ img = ndimage.filters.convolve(img, np.expand_dims(k_shifted, axis=2), mode='mirror')
+ img = img[0::sf, 0::sf, ...] # nearest downsampling
+ img = np.clip(img, 0.0, 1.0)
+
+ elif i == 3:
+ # downsample3
+ img = cv2.resize(img, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
+ img = np.clip(img, 0.0, 1.0)
+
+ elif i == 4:
+ # add Gaussian noise
+ img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)
+
+ elif i == 5:
+ # add JPEG noise
+ if random.random() < jpeg_prob:
+ img = add_JPEG_noise(img)
+
+ elif i == 6:
+ # add processed camera sensor noise
+ if random.random() < isp_prob and isp_model is not None:
+ with torch.no_grad():
+ img, hq = isp_model.forward(img.copy(), hq)
+
+ # add final JPEG compression noise
+ img = add_JPEG_noise(img)
+
+ # random crop
+ img, hq = random_crop(img, hq, sf_ori, lq_patchsize)
+
+ return img, hq
+
+
+# todo no isp_model?
+def degradation_bsrgan_variant(image, sf=4, isp_model=None):
+ """
+ This is the degradation model of BSRGAN from the paper
+ "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
+ ----------
+ sf: scale factor
+ isp_model: camera ISP model
+ Returns
+ -------
+ img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
+ hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
+ """
+ image = util.uint2single(image)
+ isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
+ sf_ori = sf
+
+ h1, w1 = image.shape[:2]
+ image = image.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
+ h, w = image.shape[:2]
+
+ hq = image.copy()
+
+ if sf == 4 and random.random() < scale2_prob: # downsample1
+ if np.random.rand() < 0.5:
+ image = cv2.resize(image, (int(1 / 2 * image.shape[1]), int(1 / 2 * image.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ image = util.imresize_np(image, 1 / 2, True)
+ image = np.clip(image, 0.0, 1.0)
+ sf = 2
+
+ shuffle_order = random.sample(range(7), 7)
+ idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
+ if idx1 > idx2: # keep downsample3 last
+ shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
+
+ for i in shuffle_order:
+
+ if i == 0:
+ image = add_blur(image, sf=sf)
+
+ elif i == 1:
+ image = add_blur(image, sf=sf)
+
+ elif i == 2:
+ a, b = image.shape[1], image.shape[0]
+ # downsample2
+ if random.random() < 0.75:
+ sf1 = random.uniform(1, 2 * sf)
+ image = cv2.resize(image, (int(1 / sf1 * image.shape[1]), int(1 / sf1 * image.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
+ k_shifted = shift_pixel(k, sf)
+ k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
+ image = ndimage.filters.convolve(image, np.expand_dims(k_shifted, axis=2), mode='mirror')
+ image = image[0::sf, 0::sf, ...] # nearest downsampling
+ image = np.clip(image, 0.0, 1.0)
+
+ elif i == 3:
+ # downsample3
+ image = cv2.resize(image, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
+ image = np.clip(image, 0.0, 1.0)
+
+ elif i == 4:
+ # add Gaussian noise
+ image = add_Gaussian_noise(image, noise_level1=2, noise_level2=25)
+
+ elif i == 5:
+ # add JPEG noise
+ if random.random() < jpeg_prob:
+ image = add_JPEG_noise(image)
+
+ # elif i == 6:
+ # # add processed camera sensor noise
+ # if random.random() < isp_prob and isp_model is not None:
+ # with torch.no_grad():
+ # img, hq = isp_model.forward(img.copy(), hq)
+
+ # add final JPEG compression noise
+ image = add_JPEG_noise(image)
+ image = util.single2uint(image)
+ example = {"image":image}
+ return example
+
+
+# TODO incase there is a pickle error one needs to replace a += x with a = a + x in add_speckle_noise etc...
+def degradation_bsrgan_plus(img, sf=4, shuffle_prob=0.5, use_sharp=True, lq_patchsize=64, isp_model=None):
+ """
+ This is an extended degradation model by combining
+ the degradation models of BSRGAN and Real-ESRGAN
+ ----------
+ img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)
+ sf: scale factor
+ use_shuffle: the degradation shuffle
+ use_sharp: sharpening the img
+ Returns
+ -------
+ img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
+ hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
+ """
+
+ h1, w1 = img.shape[:2]
+ img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
+ h, w = img.shape[:2]
+
+ if h < lq_patchsize * sf or w < lq_patchsize * sf:
+ raise ValueError(f'img size ({h1}X{w1}) is too small!')
+
+ if use_sharp:
+ img = add_sharpening(img)
+ hq = img.copy()
+
+ if random.random() < shuffle_prob:
+ shuffle_order = random.sample(range(13), 13)
+ else:
+ shuffle_order = list(range(13))
+ # local shuffle for noise, JPEG is always the last one
+ shuffle_order[2:6] = random.sample(shuffle_order[2:6], len(range(2, 6)))
+ shuffle_order[9:13] = random.sample(shuffle_order[9:13], len(range(9, 13)))
+
+ poisson_prob, speckle_prob, isp_prob = 0.1, 0.1, 0.1
+
+ for i in shuffle_order:
+ if i == 0:
+ img = add_blur(img, sf=sf)
+ elif i == 1:
+ img = add_resize(img, sf=sf)
+ elif i == 2:
+ img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)
+ elif i == 3:
+ if random.random() < poisson_prob:
+ img = add_Poisson_noise(img)
+ elif i == 4:
+ if random.random() < speckle_prob:
+ img = add_speckle_noise(img)
+ elif i == 5:
+ if random.random() < isp_prob and isp_model is not None:
+ with torch.no_grad():
+ img, hq = isp_model.forward(img.copy(), hq)
+ elif i == 6:
+ img = add_JPEG_noise(img)
+ elif i == 7:
+ img = add_blur(img, sf=sf)
+ elif i == 8:
+ img = add_resize(img, sf=sf)
+ elif i == 9:
+ img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)
+ elif i == 10:
+ if random.random() < poisson_prob:
+ img = add_Poisson_noise(img)
+ elif i == 11:
+ if random.random() < speckle_prob:
+ img = add_speckle_noise(img)
+ elif i == 12:
+ if random.random() < isp_prob and isp_model is not None:
+ with torch.no_grad():
+ img, hq = isp_model.forward(img.copy(), hq)
+ else:
+ print('check the shuffle!')
+
+ # resize to desired size
+ img = cv2.resize(img, (int(1 / sf * hq.shape[1]), int(1 / sf * hq.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+
+ # add final JPEG compression noise
+ img = add_JPEG_noise(img)
+
+ # random crop
+ img, hq = random_crop(img, hq, sf, lq_patchsize)
+
+ return img, hq
+
+
+if __name__ == '__main__':
+ print("hey")
+ img = util.imread_uint('utils/test.png', 3)
+ print(img)
+ img = util.uint2single(img)
+ print(img)
+ img = img[:448, :448]
+ h = img.shape[0] // 4
+ print("resizing to", h)
+ sf = 4
+ deg_fn = partial(degradation_bsrgan_variant, sf=sf)
+ for i in range(20):
+ print(i)
+ img_lq = deg_fn(img)
+ print(img_lq)
+ img_lq_bicubic = albumentations.SmallestMaxSize(max_size=h, interpolation=cv2.INTER_CUBIC)(image=img)["image"]
+ print(img_lq.shape)
+ print("bicubic", img_lq_bicubic.shape)
+ print(img_hq.shape)
+ lq_nearest = cv2.resize(util.single2uint(img_lq), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
+ interpolation=0)
+ lq_bicubic_nearest = cv2.resize(util.single2uint(img_lq_bicubic), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
+ interpolation=0)
+ img_concat = np.concatenate([lq_bicubic_nearest, lq_nearest, util.single2uint(img_hq)], axis=1)
+ util.imsave(img_concat, str(i) + '.png')
+
+
diff --git a/ldm/modules/image_degradation/bsrgan_light.py b/ldm/modules/image_degradation/bsrgan_light.py
new file mode 100644
index 0000000000000000000000000000000000000000..808c7f882cb75e2ba2340d5b55881d11927351f0
--- /dev/null
+++ b/ldm/modules/image_degradation/bsrgan_light.py
@@ -0,0 +1,651 @@
+# -*- coding: utf-8 -*-
+import numpy as np
+import cv2
+import torch
+
+from functools import partial
+import random
+from scipy import ndimage
+import scipy
+import scipy.stats as ss
+from scipy.interpolate import interp2d
+from scipy.linalg import orth
+import albumentations
+
+import ldm.modules.image_degradation.utils_image as util
+
+"""
+# --------------------------------------------
+# Super-Resolution
+# --------------------------------------------
+#
+# Kai Zhang (cskaizhang@gmail.com)
+# https://github.com/cszn
+# From 2019/03--2021/08
+# --------------------------------------------
+"""
+
+def modcrop_np(img, sf):
+ '''
+ Args:
+ img: numpy image, WxH or WxHxC
+ sf: scale factor
+ Return:
+ cropped image
+ '''
+ w, h = img.shape[:2]
+ im = np.copy(img)
+ return im[:w - w % sf, :h - h % sf, ...]
+
+
+"""
+# --------------------------------------------
+# anisotropic Gaussian kernels
+# --------------------------------------------
+"""
+
+
+def analytic_kernel(k):
+ """Calculate the X4 kernel from the X2 kernel (for proof see appendix in paper)"""
+ k_size = k.shape[0]
+ # Calculate the big kernels size
+ big_k = np.zeros((3 * k_size - 2, 3 * k_size - 2))
+ # Loop over the small kernel to fill the big one
+ for r in range(k_size):
+ for c in range(k_size):
+ big_k[2 * r:2 * r + k_size, 2 * c:2 * c + k_size] += k[r, c] * k
+ # Crop the edges of the big kernel to ignore very small values and increase run time of SR
+ crop = k_size // 2
+ cropped_big_k = big_k[crop:-crop, crop:-crop]
+ # Normalize to 1
+ return cropped_big_k / cropped_big_k.sum()
+
+
+def anisotropic_Gaussian(ksize=15, theta=np.pi, l1=6, l2=6):
+ """ generate an anisotropic Gaussian kernel
+ Args:
+ ksize : e.g., 15, kernel size
+ theta : [0, pi], rotation angle range
+ l1 : [0.1,50], scaling of eigenvalues
+ l2 : [0.1,l1], scaling of eigenvalues
+ If l1 = l2, will get an isotropic Gaussian kernel.
+ Returns:
+ k : kernel
+ """
+
+ v = np.dot(np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]), np.array([1., 0.]))
+ V = np.array([[v[0], v[1]], [v[1], -v[0]]])
+ D = np.array([[l1, 0], [0, l2]])
+ Sigma = np.dot(np.dot(V, D), np.linalg.inv(V))
+ k = gm_blur_kernel(mean=[0, 0], cov=Sigma, size=ksize)
+
+ return k
+
+
+def gm_blur_kernel(mean, cov, size=15):
+ center = size / 2.0 + 0.5
+ k = np.zeros([size, size])
+ for y in range(size):
+ for x in range(size):
+ cy = y - center + 1
+ cx = x - center + 1
+ k[y, x] = ss.multivariate_normal.pdf([cx, cy], mean=mean, cov=cov)
+
+ k = k / np.sum(k)
+ return k
+
+
+def shift_pixel(x, sf, upper_left=True):
+ """shift pixel for super-resolution with different scale factors
+ Args:
+ x: WxHxC or WxH
+ sf: scale factor
+ upper_left: shift direction
+ """
+ h, w = x.shape[:2]
+ shift = (sf - 1) * 0.5
+ xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0)
+ if upper_left:
+ x1 = xv + shift
+ y1 = yv + shift
+ else:
+ x1 = xv - shift
+ y1 = yv - shift
+
+ x1 = np.clip(x1, 0, w - 1)
+ y1 = np.clip(y1, 0, h - 1)
+
+ if x.ndim == 2:
+ x = interp2d(xv, yv, x)(x1, y1)
+ if x.ndim == 3:
+ for i in range(x.shape[-1]):
+ x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1)
+
+ return x
+
+
+def blur(x, k):
+ '''
+ x: image, NxcxHxW
+ k: kernel, Nx1xhxw
+ '''
+ n, c = x.shape[:2]
+ p1, p2 = (k.shape[-2] - 1) // 2, (k.shape[-1] - 1) // 2
+ x = torch.nn.functional.pad(x, pad=(p1, p2, p1, p2), mode='replicate')
+ k = k.repeat(1, c, 1, 1)
+ k = k.view(-1, 1, k.shape[2], k.shape[3])
+ x = x.view(1, -1, x.shape[2], x.shape[3])
+ x = torch.nn.functional.conv2d(x, k, bias=None, stride=1, padding=0, groups=n * c)
+ x = x.view(n, c, x.shape[2], x.shape[3])
+
+ return x
+
+
+def gen_kernel(k_size=np.array([15, 15]), scale_factor=np.array([4, 4]), min_var=0.6, max_var=10., noise_level=0):
+ """"
+ # modified version of https://github.com/assafshocher/BlindSR_dataset_generator
+ # Kai Zhang
+ # min_var = 0.175 * sf # variance of the gaussian kernel will be sampled between min_var and max_var
+ # max_var = 2.5 * sf
+ """
+ # Set random eigen-vals (lambdas) and angle (theta) for COV matrix
+ lambda_1 = min_var + np.random.rand() * (max_var - min_var)
+ lambda_2 = min_var + np.random.rand() * (max_var - min_var)
+ theta = np.random.rand() * np.pi # random theta
+ noise = -noise_level + np.random.rand(*k_size) * noise_level * 2
+
+ # Set COV matrix using Lambdas and Theta
+ LAMBDA = np.diag([lambda_1, lambda_2])
+ Q = np.array([[np.cos(theta), -np.sin(theta)],
+ [np.sin(theta), np.cos(theta)]])
+ SIGMA = Q @ LAMBDA @ Q.T
+ INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :]
+
+ # Set expectation position (shifting kernel for aligned image)
+ MU = k_size // 2 - 0.5 * (scale_factor - 1) # - 0.5 * (scale_factor - k_size % 2)
+ MU = MU[None, None, :, None]
+
+ # Create meshgrid for Gaussian
+ [X, Y] = np.meshgrid(range(k_size[0]), range(k_size[1]))
+ Z = np.stack([X, Y], 2)[:, :, :, None]
+
+ # Calcualte Gaussian for every pixel of the kernel
+ ZZ = Z - MU
+ ZZ_t = ZZ.transpose(0, 1, 3, 2)
+ raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) * (1 + noise)
+
+ # shift the kernel so it will be centered
+ # raw_kernel_centered = kernel_shift(raw_kernel, scale_factor)
+
+ # Normalize the kernel and return
+ # kernel = raw_kernel_centered / np.sum(raw_kernel_centered)
+ kernel = raw_kernel / np.sum(raw_kernel)
+ return kernel
+
+
+def fspecial_gaussian(hsize, sigma):
+ hsize = [hsize, hsize]
+ siz = [(hsize[0] - 1.0) / 2.0, (hsize[1] - 1.0) / 2.0]
+ std = sigma
+ [x, y] = np.meshgrid(np.arange(-siz[1], siz[1] + 1), np.arange(-siz[0], siz[0] + 1))
+ arg = -(x * x + y * y) / (2 * std * std)
+ h = np.exp(arg)
+ h[h < scipy.finfo(float).eps * h.max()] = 0
+ sumh = h.sum()
+ if sumh != 0:
+ h = h / sumh
+ return h
+
+
+def fspecial_laplacian(alpha):
+ alpha = max([0, min([alpha, 1])])
+ h1 = alpha / (alpha + 1)
+ h2 = (1 - alpha) / (alpha + 1)
+ h = [[h1, h2, h1], [h2, -4 / (alpha + 1), h2], [h1, h2, h1]]
+ h = np.array(h)
+ return h
+
+
+def fspecial(filter_type, *args, **kwargs):
+ '''
+ python code from:
+ https://github.com/ronaldosena/imagens-medicas-2/blob/40171a6c259edec7827a6693a93955de2bd39e76/Aulas/aula_2_-_uniform_filter/matlab_fspecial.py
+ '''
+ if filter_type == 'gaussian':
+ return fspecial_gaussian(*args, **kwargs)
+ if filter_type == 'laplacian':
+ return fspecial_laplacian(*args, **kwargs)
+
+
+"""
+# --------------------------------------------
+# degradation models
+# --------------------------------------------
+"""
+
+
+def bicubic_degradation(x, sf=3):
+ '''
+ Args:
+ x: HxWxC image, [0, 1]
+ sf: down-scale factor
+ Return:
+ bicubicly downsampled LR image
+ '''
+ x = util.imresize_np(x, scale=1 / sf)
+ return x
+
+
+def srmd_degradation(x, k, sf=3):
+ ''' blur + bicubic downsampling
+ Args:
+ x: HxWxC image, [0, 1]
+ k: hxw, double
+ sf: down-scale factor
+ Return:
+ downsampled LR image
+ Reference:
+ @inproceedings{zhang2018learning,
+ title={Learning a single convolutional super-resolution network for multiple degradations},
+ author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
+ booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
+ pages={3262--3271},
+ year={2018}
+ }
+ '''
+ x = ndimage.convolve(x, np.expand_dims(k, axis=2), mode='wrap') # 'nearest' | 'mirror'
+ x = bicubic_degradation(x, sf=sf)
+ return x
+
+
+def dpsr_degradation(x, k, sf=3):
+ ''' bicubic downsampling + blur
+ Args:
+ x: HxWxC image, [0, 1]
+ k: hxw, double
+ sf: down-scale factor
+ Return:
+ downsampled LR image
+ Reference:
+ @inproceedings{zhang2019deep,
+ title={Deep Plug-and-Play Super-Resolution for Arbitrary Blur Kernels},
+ author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
+ booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
+ pages={1671--1681},
+ year={2019}
+ }
+ '''
+ x = bicubic_degradation(x, sf=sf)
+ x = ndimage.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
+ return x
+
+
+def classical_degradation(x, k, sf=3):
+ ''' blur + downsampling
+ Args:
+ x: HxWxC image, [0, 1]/[0, 255]
+ k: hxw, double
+ sf: down-scale factor
+ Return:
+ downsampled LR image
+ '''
+ x = ndimage.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
+ # x = filters.correlate(x, np.expand_dims(np.flip(k), axis=2))
+ st = 0
+ return x[st::sf, st::sf, ...]
+
+
+def add_sharpening(img, weight=0.5, radius=50, threshold=10):
+ """USM sharpening. borrowed from real-ESRGAN
+ Input image: I; Blurry image: B.
+ 1. K = I + weight * (I - B)
+ 2. Mask = 1 if abs(I - B) > threshold, else: 0
+ 3. Blur mask:
+ 4. Out = Mask * K + (1 - Mask) * I
+ Args:
+ img (Numpy array): Input image, HWC, BGR; float32, [0, 1].
+ weight (float): Sharp weight. Default: 1.
+ radius (float): Kernel size of Gaussian blur. Default: 50.
+ threshold (int):
+ """
+ if radius % 2 == 0:
+ radius += 1
+ blur = cv2.GaussianBlur(img, (radius, radius), 0)
+ residual = img - blur
+ mask = np.abs(residual) * 255 > threshold
+ mask = mask.astype('float32')
+ soft_mask = cv2.GaussianBlur(mask, (radius, radius), 0)
+
+ K = img + weight * residual
+ K = np.clip(K, 0, 1)
+ return soft_mask * K + (1 - soft_mask) * img
+
+
+def add_blur(img, sf=4):
+ wd2 = 4.0 + sf
+ wd = 2.0 + 0.2 * sf
+
+ wd2 = wd2/4
+ wd = wd/4
+
+ if random.random() < 0.5:
+ l1 = wd2 * random.random()
+ l2 = wd2 * random.random()
+ k = anisotropic_Gaussian(ksize=random.randint(2, 11) + 3, theta=random.random() * np.pi, l1=l1, l2=l2)
+ else:
+ k = fspecial('gaussian', random.randint(2, 4) + 3, wd * random.random())
+ img = ndimage.convolve(img, np.expand_dims(k, axis=2), mode='mirror')
+
+ return img
+
+
+def add_resize(img, sf=4):
+ rnum = np.random.rand()
+ if rnum > 0.8: # up
+ sf1 = random.uniform(1, 2)
+ elif rnum < 0.7: # down
+ sf1 = random.uniform(0.5 / sf, 1)
+ else:
+ sf1 = 1.0
+ img = cv2.resize(img, (int(sf1 * img.shape[1]), int(sf1 * img.shape[0])), interpolation=random.choice([1, 2, 3]))
+ img = np.clip(img, 0.0, 1.0)
+
+ return img
+
+
+# def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
+# noise_level = random.randint(noise_level1, noise_level2)
+# rnum = np.random.rand()
+# if rnum > 0.6: # add color Gaussian noise
+# img += np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
+# elif rnum < 0.4: # add grayscale Gaussian noise
+# img += np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
+# else: # add noise
+# L = noise_level2 / 255.
+# D = np.diag(np.random.rand(3))
+# U = orth(np.random.rand(3, 3))
+# conv = np.dot(np.dot(np.transpose(U), D), U)
+# img += np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
+# img = np.clip(img, 0.0, 1.0)
+# return img
+
+def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
+ noise_level = random.randint(noise_level1, noise_level2)
+ rnum = np.random.rand()
+ if rnum > 0.6: # add color Gaussian noise
+ img = img + np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
+ elif rnum < 0.4: # add grayscale Gaussian noise
+ img = img + np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
+ else: # add noise
+ L = noise_level2 / 255.
+ D = np.diag(np.random.rand(3))
+ U = orth(np.random.rand(3, 3))
+ conv = np.dot(np.dot(np.transpose(U), D), U)
+ img = img + np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
+ img = np.clip(img, 0.0, 1.0)
+ return img
+
+
+def add_speckle_noise(img, noise_level1=2, noise_level2=25):
+ noise_level = random.randint(noise_level1, noise_level2)
+ img = np.clip(img, 0.0, 1.0)
+ rnum = random.random()
+ if rnum > 0.6:
+ img += img * np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
+ elif rnum < 0.4:
+ img += img * np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
+ else:
+ L = noise_level2 / 255.
+ D = np.diag(np.random.rand(3))
+ U = orth(np.random.rand(3, 3))
+ conv = np.dot(np.dot(np.transpose(U), D), U)
+ img += img * np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
+ img = np.clip(img, 0.0, 1.0)
+ return img
+
+
+def add_Poisson_noise(img):
+ img = np.clip((img * 255.0).round(), 0, 255) / 255.
+ vals = 10 ** (2 * random.random() + 2.0) # [2, 4]
+ if random.random() < 0.5:
+ img = np.random.poisson(img * vals).astype(np.float32) / vals
+ else:
+ img_gray = np.dot(img[..., :3], [0.299, 0.587, 0.114])
+ img_gray = np.clip((img_gray * 255.0).round(), 0, 255) / 255.
+ noise_gray = np.random.poisson(img_gray * vals).astype(np.float32) / vals - img_gray
+ img += noise_gray[:, :, np.newaxis]
+ img = np.clip(img, 0.0, 1.0)
+ return img
+
+
+def add_JPEG_noise(img):
+ quality_factor = random.randint(80, 95)
+ img = cv2.cvtColor(util.single2uint(img), cv2.COLOR_RGB2BGR)
+ result, encimg = cv2.imencode('.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), quality_factor])
+ img = cv2.imdecode(encimg, 1)
+ img = cv2.cvtColor(util.uint2single(img), cv2.COLOR_BGR2RGB)
+ return img
+
+
+def random_crop(lq, hq, sf=4, lq_patchsize=64):
+ h, w = lq.shape[:2]
+ rnd_h = random.randint(0, h - lq_patchsize)
+ rnd_w = random.randint(0, w - lq_patchsize)
+ lq = lq[rnd_h:rnd_h + lq_patchsize, rnd_w:rnd_w + lq_patchsize, :]
+
+ rnd_h_H, rnd_w_H = int(rnd_h * sf), int(rnd_w * sf)
+ hq = hq[rnd_h_H:rnd_h_H + lq_patchsize * sf, rnd_w_H:rnd_w_H + lq_patchsize * sf, :]
+ return lq, hq
+
+
+def degradation_bsrgan(img, sf=4, lq_patchsize=72, isp_model=None):
+ """
+ This is the degradation model of BSRGAN from the paper
+ "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
+ ----------
+ img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)
+ sf: scale factor
+ isp_model: camera ISP model
+ Returns
+ -------
+ img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
+ hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
+ """
+ isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
+ sf_ori = sf
+
+ h1, w1 = img.shape[:2]
+ img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
+ h, w = img.shape[:2]
+
+ if h < lq_patchsize * sf or w < lq_patchsize * sf:
+ raise ValueError(f'img size ({h1}X{w1}) is too small!')
+
+ hq = img.copy()
+
+ if sf == 4 and random.random() < scale2_prob: # downsample1
+ if np.random.rand() < 0.5:
+ img = cv2.resize(img, (int(1 / 2 * img.shape[1]), int(1 / 2 * img.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ img = util.imresize_np(img, 1 / 2, True)
+ img = np.clip(img, 0.0, 1.0)
+ sf = 2
+
+ shuffle_order = random.sample(range(7), 7)
+ idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
+ if idx1 > idx2: # keep downsample3 last
+ shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
+
+ for i in shuffle_order:
+
+ if i == 0:
+ img = add_blur(img, sf=sf)
+
+ elif i == 1:
+ img = add_blur(img, sf=sf)
+
+ elif i == 2:
+ a, b = img.shape[1], img.shape[0]
+ # downsample2
+ if random.random() < 0.75:
+ sf1 = random.uniform(1, 2 * sf)
+ img = cv2.resize(img, (int(1 / sf1 * img.shape[1]), int(1 / sf1 * img.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
+ k_shifted = shift_pixel(k, sf)
+ k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
+ img = ndimage.convolve(img, np.expand_dims(k_shifted, axis=2), mode='mirror')
+ img = img[0::sf, 0::sf, ...] # nearest downsampling
+ img = np.clip(img, 0.0, 1.0)
+
+ elif i == 3:
+ # downsample3
+ img = cv2.resize(img, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
+ img = np.clip(img, 0.0, 1.0)
+
+ elif i == 4:
+ # add Gaussian noise
+ img = add_Gaussian_noise(img, noise_level1=2, noise_level2=8)
+
+ elif i == 5:
+ # add JPEG noise
+ if random.random() < jpeg_prob:
+ img = add_JPEG_noise(img)
+
+ elif i == 6:
+ # add processed camera sensor noise
+ if random.random() < isp_prob and isp_model is not None:
+ with torch.no_grad():
+ img, hq = isp_model.forward(img.copy(), hq)
+
+ # add final JPEG compression noise
+ img = add_JPEG_noise(img)
+
+ # random crop
+ img, hq = random_crop(img, hq, sf_ori, lq_patchsize)
+
+ return img, hq
+
+
+# todo no isp_model?
+def degradation_bsrgan_variant(image, sf=4, isp_model=None, up=False):
+ """
+ This is the degradation model of BSRGAN from the paper
+ "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
+ ----------
+ sf: scale factor
+ isp_model: camera ISP model
+ Returns
+ -------
+ img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
+ hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
+ """
+ image = util.uint2single(image)
+ isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
+ sf_ori = sf
+
+ h1, w1 = image.shape[:2]
+ image = image.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
+ h, w = image.shape[:2]
+
+ hq = image.copy()
+
+ if sf == 4 and random.random() < scale2_prob: # downsample1
+ if np.random.rand() < 0.5:
+ image = cv2.resize(image, (int(1 / 2 * image.shape[1]), int(1 / 2 * image.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ image = util.imresize_np(image, 1 / 2, True)
+ image = np.clip(image, 0.0, 1.0)
+ sf = 2
+
+ shuffle_order = random.sample(range(7), 7)
+ idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
+ if idx1 > idx2: # keep downsample3 last
+ shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
+
+ for i in shuffle_order:
+
+ if i == 0:
+ image = add_blur(image, sf=sf)
+
+ # elif i == 1:
+ # image = add_blur(image, sf=sf)
+
+ if i == 0:
+ pass
+
+ elif i == 2:
+ a, b = image.shape[1], image.shape[0]
+ # downsample2
+ if random.random() < 0.8:
+ sf1 = random.uniform(1, 2 * sf)
+ image = cv2.resize(image, (int(1 / sf1 * image.shape[1]), int(1 / sf1 * image.shape[0])),
+ interpolation=random.choice([1, 2, 3]))
+ else:
+ k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
+ k_shifted = shift_pixel(k, sf)
+ k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
+ image = ndimage.convolve(image, np.expand_dims(k_shifted, axis=2), mode='mirror')
+ image = image[0::sf, 0::sf, ...] # nearest downsampling
+
+ image = np.clip(image, 0.0, 1.0)
+
+ elif i == 3:
+ # downsample3
+ image = cv2.resize(image, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
+ image = np.clip(image, 0.0, 1.0)
+
+ elif i == 4:
+ # add Gaussian noise
+ image = add_Gaussian_noise(image, noise_level1=1, noise_level2=2)
+
+ elif i == 5:
+ # add JPEG noise
+ if random.random() < jpeg_prob:
+ image = add_JPEG_noise(image)
+ #
+ # elif i == 6:
+ # # add processed camera sensor noise
+ # if random.random() < isp_prob and isp_model is not None:
+ # with torch.no_grad():
+ # img, hq = isp_model.forward(img.copy(), hq)
+
+ # add final JPEG compression noise
+ image = add_JPEG_noise(image)
+ image = util.single2uint(image)
+ if up:
+ image = cv2.resize(image, (w1, h1), interpolation=cv2.INTER_CUBIC) # todo: random, as above? want to condition on it then
+ example = {"image": image}
+ return example
+
+
+
+
+if __name__ == '__main__':
+ print("hey")
+ img = util.imread_uint('utils/test.png', 3)
+ img = img[:448, :448]
+ h = img.shape[0] // 4
+ print("resizing to", h)
+ sf = 4
+ deg_fn = partial(degradation_bsrgan_variant, sf=sf)
+ for i in range(20):
+ print(i)
+ img_hq = img
+ img_lq = deg_fn(img)["image"]
+ img_hq, img_lq = util.uint2single(img_hq), util.uint2single(img_lq)
+ print(img_lq)
+ img_lq_bicubic = albumentations.SmallestMaxSize(max_size=h, interpolation=cv2.INTER_CUBIC)(image=img_hq)["image"]
+ print(img_lq.shape)
+ print("bicubic", img_lq_bicubic.shape)
+ print(img_hq.shape)
+ lq_nearest = cv2.resize(util.single2uint(img_lq), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
+ interpolation=0)
+ lq_bicubic_nearest = cv2.resize(util.single2uint(img_lq_bicubic),
+ (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
+ interpolation=0)
+ img_concat = np.concatenate([lq_bicubic_nearest, lq_nearest, util.single2uint(img_hq)], axis=1)
+ util.imsave(img_concat, str(i) + '.png')
diff --git a/ldm/modules/image_degradation/utils/test.png b/ldm/modules/image_degradation/utils/test.png
new file mode 100644
index 0000000000000000000000000000000000000000..4249b43de0f22707758d13c240268a401642f6e6
Binary files /dev/null and b/ldm/modules/image_degradation/utils/test.png differ
diff --git a/ldm/modules/image_degradation/utils_image.py b/ldm/modules/image_degradation/utils_image.py
new file mode 100644
index 0000000000000000000000000000000000000000..0175f155ad900ae33c3c46ed87f49b352e3faf98
--- /dev/null
+++ b/ldm/modules/image_degradation/utils_image.py
@@ -0,0 +1,916 @@
+import os
+import math
+import random
+import numpy as np
+import torch
+import cv2
+from torchvision.utils import make_grid
+from datetime import datetime
+#import matplotlib.pyplot as plt # TODO: check with Dominik, also bsrgan.py vs bsrgan_light.py
+
+
+os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
+
+
+'''
+# --------------------------------------------
+# Kai Zhang (github: https://github.com/cszn)
+# 03/Mar/2019
+# --------------------------------------------
+# https://github.com/twhui/SRGAN-pyTorch
+# https://github.com/xinntao/BasicSR
+# --------------------------------------------
+'''
+
+
+IMG_EXTENSIONS = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP', '.tif']
+
+
+def is_image_file(filename):
+ return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
+
+
+def get_timestamp():
+ return datetime.now().strftime('%y%m%d-%H%M%S')
+
+
+def imshow(x, title=None, cbar=False, figsize=None):
+ plt.figure(figsize=figsize)
+ plt.imshow(np.squeeze(x), interpolation='nearest', cmap='gray')
+ if title:
+ plt.title(title)
+ if cbar:
+ plt.colorbar()
+ plt.show()
+
+
+def surf(Z, cmap='rainbow', figsize=None):
+ plt.figure(figsize=figsize)
+ ax3 = plt.axes(projection='3d')
+
+ w, h = Z.shape[:2]
+ xx = np.arange(0,w,1)
+ yy = np.arange(0,h,1)
+ X, Y = np.meshgrid(xx, yy)
+ ax3.plot_surface(X,Y,Z,cmap=cmap)
+ #ax3.contour(X,Y,Z, zdim='z',offset=-2,cmap=cmap)
+ plt.show()
+
+
+'''
+# --------------------------------------------
+# get image pathes
+# --------------------------------------------
+'''
+
+
+def get_image_paths(dataroot):
+ paths = None # return None if dataroot is None
+ if dataroot is not None:
+ paths = sorted(_get_paths_from_images(dataroot))
+ return paths
+
+
+def _get_paths_from_images(path):
+ assert os.path.isdir(path), '{:s} is not a valid directory'.format(path)
+ images = []
+ for dirpath, _, fnames in sorted(os.walk(path)):
+ for fname in sorted(fnames):
+ if is_image_file(fname):
+ img_path = os.path.join(dirpath, fname)
+ images.append(img_path)
+ assert images, '{:s} has no valid image file'.format(path)
+ return images
+
+
+'''
+# --------------------------------------------
+# split large images into small images
+# --------------------------------------------
+'''
+
+
+def patches_from_image(img, p_size=512, p_overlap=64, p_max=800):
+ w, h = img.shape[:2]
+ patches = []
+ if w > p_max and h > p_max:
+ w1 = list(np.arange(0, w-p_size, p_size-p_overlap, dtype=np.int))
+ h1 = list(np.arange(0, h-p_size, p_size-p_overlap, dtype=np.int))
+ w1.append(w-p_size)
+ h1.append(h-p_size)
+# print(w1)
+# print(h1)
+ for i in w1:
+ for j in h1:
+ patches.append(img[i:i+p_size, j:j+p_size,:])
+ else:
+ patches.append(img)
+
+ return patches
+
+
+def imssave(imgs, img_path):
+ """
+ imgs: list, N images of size WxHxC
+ """
+ img_name, ext = os.path.splitext(os.path.basename(img_path))
+
+ for i, img in enumerate(imgs):
+ if img.ndim == 3:
+ img = img[:, :, [2, 1, 0]]
+ new_path = os.path.join(os.path.dirname(img_path), img_name+str('_s{:04d}'.format(i))+'.png')
+ cv2.imwrite(new_path, img)
+
+
+def split_imageset(original_dataroot, taget_dataroot, n_channels=3, p_size=800, p_overlap=96, p_max=1000):
+ """
+ split the large images from original_dataroot into small overlapped images with size (p_size)x(p_size),
+ and save them into taget_dataroot; only the images with larger size than (p_max)x(p_max)
+ will be splitted.
+ Args:
+ original_dataroot:
+ taget_dataroot:
+ p_size: size of small images
+ p_overlap: patch size in training is a good choice
+ p_max: images with smaller size than (p_max)x(p_max) keep unchanged.
+ """
+ paths = get_image_paths(original_dataroot)
+ for img_path in paths:
+ # img_name, ext = os.path.splitext(os.path.basename(img_path))
+ img = imread_uint(img_path, n_channels=n_channels)
+ patches = patches_from_image(img, p_size, p_overlap, p_max)
+ imssave(patches, os.path.join(taget_dataroot,os.path.basename(img_path)))
+ #if original_dataroot == taget_dataroot:
+ #del img_path
+
+'''
+# --------------------------------------------
+# makedir
+# --------------------------------------------
+'''
+
+
+def mkdir(path):
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+
+def mkdirs(paths):
+ if isinstance(paths, str):
+ mkdir(paths)
+ else:
+ for path in paths:
+ mkdir(path)
+
+
+def mkdir_and_rename(path):
+ if os.path.exists(path):
+ new_name = path + '_archived_' + get_timestamp()
+ print('Path already exists. Rename it to [{:s}]'.format(new_name))
+ os.rename(path, new_name)
+ os.makedirs(path)
+
+
+'''
+# --------------------------------------------
+# read image from path
+# opencv is fast, but read BGR numpy image
+# --------------------------------------------
+'''
+
+
+# --------------------------------------------
+# get uint8 image of size HxWxn_channles (RGB)
+# --------------------------------------------
+def imread_uint(path, n_channels=3):
+ # input: path
+ # output: HxWx3(RGB or GGG), or HxWx1 (G)
+ if n_channels == 1:
+ img = cv2.imread(path, 0) # cv2.IMREAD_GRAYSCALE
+ img = np.expand_dims(img, axis=2) # HxWx1
+ elif n_channels == 3:
+ img = cv2.imread(path, cv2.IMREAD_UNCHANGED) # BGR or G
+ if img.ndim == 2:
+ img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB) # GGG
+ else:
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # RGB
+ return img
+
+
+# --------------------------------------------
+# matlab's imwrite
+# --------------------------------------------
+def imsave(img, img_path):
+ img = np.squeeze(img)
+ if img.ndim == 3:
+ img = img[:, :, [2, 1, 0]]
+ cv2.imwrite(img_path, img)
+
+def imwrite(img, img_path):
+ img = np.squeeze(img)
+ if img.ndim == 3:
+ img = img[:, :, [2, 1, 0]]
+ cv2.imwrite(img_path, img)
+
+
+
+# --------------------------------------------
+# get single image of size HxWxn_channles (BGR)
+# --------------------------------------------
+def read_img(path):
+ # read image by cv2
+ # return: Numpy float32, HWC, BGR, [0,1]
+ img = cv2.imread(path, cv2.IMREAD_UNCHANGED) # cv2.IMREAD_GRAYSCALE
+ img = img.astype(np.float32) / 255.
+ if img.ndim == 2:
+ img = np.expand_dims(img, axis=2)
+ # some images have 4 channels
+ if img.shape[2] > 3:
+ img = img[:, :, :3]
+ return img
+
+
+'''
+# --------------------------------------------
+# image format conversion
+# --------------------------------------------
+# numpy(single) <---> numpy(unit)
+# numpy(single) <---> tensor
+# numpy(unit) <---> tensor
+# --------------------------------------------
+'''
+
+
+# --------------------------------------------
+# numpy(single) [0, 1] <---> numpy(unit)
+# --------------------------------------------
+
+
+def uint2single(img):
+
+ return np.float32(img/255.)
+
+
+def single2uint(img):
+
+ return np.uint8((img.clip(0, 1)*255.).round())
+
+
+def uint162single(img):
+
+ return np.float32(img/65535.)
+
+
+def single2uint16(img):
+
+ return np.uint16((img.clip(0, 1)*65535.).round())
+
+
+# --------------------------------------------
+# numpy(unit) (HxWxC or HxW) <---> tensor
+# --------------------------------------------
+
+
+# convert uint to 4-dimensional torch tensor
+def uint2tensor4(img):
+ if img.ndim == 2:
+ img = np.expand_dims(img, axis=2)
+ return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().div(255.).unsqueeze(0)
+
+
+# convert uint to 3-dimensional torch tensor
+def uint2tensor3(img):
+ if img.ndim == 2:
+ img = np.expand_dims(img, axis=2)
+ return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().div(255.)
+
+
+# convert 2/3/4-dimensional torch tensor to uint
+def tensor2uint(img):
+ img = img.data.squeeze().float().clamp_(0, 1).cpu().numpy()
+ if img.ndim == 3:
+ img = np.transpose(img, (1, 2, 0))
+ return np.uint8((img*255.0).round())
+
+
+# --------------------------------------------
+# numpy(single) (HxWxC) <---> tensor
+# --------------------------------------------
+
+
+# convert single (HxWxC) to 3-dimensional torch tensor
+def single2tensor3(img):
+ return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float()
+
+
+# convert single (HxWxC) to 4-dimensional torch tensor
+def single2tensor4(img):
+ return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().unsqueeze(0)
+
+
+# convert torch tensor to single
+def tensor2single(img):
+ img = img.data.squeeze().float().cpu().numpy()
+ if img.ndim == 3:
+ img = np.transpose(img, (1, 2, 0))
+
+ return img
+
+# convert torch tensor to single
+def tensor2single3(img):
+ img = img.data.squeeze().float().cpu().numpy()
+ if img.ndim == 3:
+ img = np.transpose(img, (1, 2, 0))
+ elif img.ndim == 2:
+ img = np.expand_dims(img, axis=2)
+ return img
+
+
+def single2tensor5(img):
+ return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1, 3).float().unsqueeze(0)
+
+
+def single32tensor5(img):
+ return torch.from_numpy(np.ascontiguousarray(img)).float().unsqueeze(0).unsqueeze(0)
+
+
+def single42tensor4(img):
+ return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1, 3).float()
+
+
+# from skimage.io import imread, imsave
+def tensor2img(tensor, out_type=np.uint8, min_max=(0, 1)):
+ '''
+ Converts a torch Tensor into an image Numpy array of BGR channel order
+ Input: 4D(B,(3/1),H,W), 3D(C,H,W), or 2D(H,W), any range, RGB channel order
+ Output: 3D(H,W,C) or 2D(H,W), [0,255], np.uint8 (default)
+ '''
+ tensor = tensor.squeeze().float().cpu().clamp_(*min_max) # squeeze first, then clamp
+ tensor = (tensor - min_max[0]) / (min_max[1] - min_max[0]) # to range [0,1]
+ n_dim = tensor.dim()
+ if n_dim == 4:
+ n_img = len(tensor)
+ img_np = make_grid(tensor, nrow=int(math.sqrt(n_img)), normalize=False).numpy()
+ img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
+ elif n_dim == 3:
+ img_np = tensor.numpy()
+ img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
+ elif n_dim == 2:
+ img_np = tensor.numpy()
+ else:
+ raise TypeError(
+ 'Only support 4D, 3D and 2D tensor. But received with dimension: {:d}'.format(n_dim))
+ if out_type == np.uint8:
+ img_np = (img_np * 255.0).round()
+ # Important. Unlike matlab, numpy.unit8() WILL NOT round by default.
+ return img_np.astype(out_type)
+
+
+'''
+# --------------------------------------------
+# Augmentation, flipe and/or rotate
+# --------------------------------------------
+# The following two are enough.
+# (1) augmet_img: numpy image of WxHxC or WxH
+# (2) augment_img_tensor4: tensor image 1xCxWxH
+# --------------------------------------------
+'''
+
+
+def augment_img(img, mode=0):
+ '''Kai Zhang (github: https://github.com/cszn)
+ '''
+ if mode == 0:
+ return img
+ elif mode == 1:
+ return np.flipud(np.rot90(img))
+ elif mode == 2:
+ return np.flipud(img)
+ elif mode == 3:
+ return np.rot90(img, k=3)
+ elif mode == 4:
+ return np.flipud(np.rot90(img, k=2))
+ elif mode == 5:
+ return np.rot90(img)
+ elif mode == 6:
+ return np.rot90(img, k=2)
+ elif mode == 7:
+ return np.flipud(np.rot90(img, k=3))
+
+
+def augment_img_tensor4(img, mode=0):
+ '''Kai Zhang (github: https://github.com/cszn)
+ '''
+ if mode == 0:
+ return img
+ elif mode == 1:
+ return img.rot90(1, [2, 3]).flip([2])
+ elif mode == 2:
+ return img.flip([2])
+ elif mode == 3:
+ return img.rot90(3, [2, 3])
+ elif mode == 4:
+ return img.rot90(2, [2, 3]).flip([2])
+ elif mode == 5:
+ return img.rot90(1, [2, 3])
+ elif mode == 6:
+ return img.rot90(2, [2, 3])
+ elif mode == 7:
+ return img.rot90(3, [2, 3]).flip([2])
+
+
+def augment_img_tensor(img, mode=0):
+ '''Kai Zhang (github: https://github.com/cszn)
+ '''
+ img_size = img.size()
+ img_np = img.data.cpu().numpy()
+ if len(img_size) == 3:
+ img_np = np.transpose(img_np, (1, 2, 0))
+ elif len(img_size) == 4:
+ img_np = np.transpose(img_np, (2, 3, 1, 0))
+ img_np = augment_img(img_np, mode=mode)
+ img_tensor = torch.from_numpy(np.ascontiguousarray(img_np))
+ if len(img_size) == 3:
+ img_tensor = img_tensor.permute(2, 0, 1)
+ elif len(img_size) == 4:
+ img_tensor = img_tensor.permute(3, 2, 0, 1)
+
+ return img_tensor.type_as(img)
+
+
+def augment_img_np3(img, mode=0):
+ if mode == 0:
+ return img
+ elif mode == 1:
+ return img.transpose(1, 0, 2)
+ elif mode == 2:
+ return img[::-1, :, :]
+ elif mode == 3:
+ img = img[::-1, :, :]
+ img = img.transpose(1, 0, 2)
+ return img
+ elif mode == 4:
+ return img[:, ::-1, :]
+ elif mode == 5:
+ img = img[:, ::-1, :]
+ img = img.transpose(1, 0, 2)
+ return img
+ elif mode == 6:
+ img = img[:, ::-1, :]
+ img = img[::-1, :, :]
+ return img
+ elif mode == 7:
+ img = img[:, ::-1, :]
+ img = img[::-1, :, :]
+ img = img.transpose(1, 0, 2)
+ return img
+
+
+def augment_imgs(img_list, hflip=True, rot=True):
+ # horizontal flip OR rotate
+ hflip = hflip and random.random() < 0.5
+ vflip = rot and random.random() < 0.5
+ rot90 = rot and random.random() < 0.5
+
+ def _augment(img):
+ if hflip:
+ img = img[:, ::-1, :]
+ if vflip:
+ img = img[::-1, :, :]
+ if rot90:
+ img = img.transpose(1, 0, 2)
+ return img
+
+ return [_augment(img) for img in img_list]
+
+
+'''
+# --------------------------------------------
+# modcrop and shave
+# --------------------------------------------
+'''
+
+
+def modcrop(img_in, scale):
+ # img_in: Numpy, HWC or HW
+ img = np.copy(img_in)
+ if img.ndim == 2:
+ H, W = img.shape
+ H_r, W_r = H % scale, W % scale
+ img = img[:H - H_r, :W - W_r]
+ elif img.ndim == 3:
+ H, W, C = img.shape
+ H_r, W_r = H % scale, W % scale
+ img = img[:H - H_r, :W - W_r, :]
+ else:
+ raise ValueError('Wrong img ndim: [{:d}].'.format(img.ndim))
+ return img
+
+
+def shave(img_in, border=0):
+ # img_in: Numpy, HWC or HW
+ img = np.copy(img_in)
+ h, w = img.shape[:2]
+ img = img[border:h-border, border:w-border]
+ return img
+
+
+'''
+# --------------------------------------------
+# image processing process on numpy image
+# channel_convert(in_c, tar_type, img_list):
+# rgb2ycbcr(img, only_y=True):
+# bgr2ycbcr(img, only_y=True):
+# ycbcr2rgb(img):
+# --------------------------------------------
+'''
+
+
+def rgb2ycbcr(img, only_y=True):
+ '''same as matlab rgb2ycbcr
+ only_y: only return Y channel
+ Input:
+ uint8, [0, 255]
+ float, [0, 1]
+ '''
+ in_img_type = img.dtype
+ img.astype(np.float32)
+ if in_img_type != np.uint8:
+ img *= 255.
+ # convert
+ if only_y:
+ rlt = np.dot(img, [65.481, 128.553, 24.966]) / 255.0 + 16.0
+ else:
+ rlt = np.matmul(img, [[65.481, -37.797, 112.0], [128.553, -74.203, -93.786],
+ [24.966, 112.0, -18.214]]) / 255.0 + [16, 128, 128]
+ if in_img_type == np.uint8:
+ rlt = rlt.round()
+ else:
+ rlt /= 255.
+ return rlt.astype(in_img_type)
+
+
+def ycbcr2rgb(img):
+ '''same as matlab ycbcr2rgb
+ Input:
+ uint8, [0, 255]
+ float, [0, 1]
+ '''
+ in_img_type = img.dtype
+ img.astype(np.float32)
+ if in_img_type != np.uint8:
+ img *= 255.
+ # convert
+ rlt = np.matmul(img, [[0.00456621, 0.00456621, 0.00456621], [0, -0.00153632, 0.00791071],
+ [0.00625893, -0.00318811, 0]]) * 255.0 + [-222.921, 135.576, -276.836]
+ if in_img_type == np.uint8:
+ rlt = rlt.round()
+ else:
+ rlt /= 255.
+ return rlt.astype(in_img_type)
+
+
+def bgr2ycbcr(img, only_y=True):
+ '''bgr version of rgb2ycbcr
+ only_y: only return Y channel
+ Input:
+ uint8, [0, 255]
+ float, [0, 1]
+ '''
+ in_img_type = img.dtype
+ img.astype(np.float32)
+ if in_img_type != np.uint8:
+ img *= 255.
+ # convert
+ if only_y:
+ rlt = np.dot(img, [24.966, 128.553, 65.481]) / 255.0 + 16.0
+ else:
+ rlt = np.matmul(img, [[24.966, 112.0, -18.214], [128.553, -74.203, -93.786],
+ [65.481, -37.797, 112.0]]) / 255.0 + [16, 128, 128]
+ if in_img_type == np.uint8:
+ rlt = rlt.round()
+ else:
+ rlt /= 255.
+ return rlt.astype(in_img_type)
+
+
+def channel_convert(in_c, tar_type, img_list):
+ # conversion among BGR, gray and y
+ if in_c == 3 and tar_type == 'gray': # BGR to gray
+ gray_list = [cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) for img in img_list]
+ return [np.expand_dims(img, axis=2) for img in gray_list]
+ elif in_c == 3 and tar_type == 'y': # BGR to y
+ y_list = [bgr2ycbcr(img, only_y=True) for img in img_list]
+ return [np.expand_dims(img, axis=2) for img in y_list]
+ elif in_c == 1 and tar_type == 'RGB': # gray/y to BGR
+ return [cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) for img in img_list]
+ else:
+ return img_list
+
+
+'''
+# --------------------------------------------
+# metric, PSNR and SSIM
+# --------------------------------------------
+'''
+
+
+# --------------------------------------------
+# PSNR
+# --------------------------------------------
+def calculate_psnr(img1, img2, border=0):
+ # img1 and img2 have range [0, 255]
+ #img1 = img1.squeeze()
+ #img2 = img2.squeeze()
+ if not img1.shape == img2.shape:
+ raise ValueError('Input images must have the same dimensions.')
+ h, w = img1.shape[:2]
+ img1 = img1[border:h-border, border:w-border]
+ img2 = img2[border:h-border, border:w-border]
+
+ img1 = img1.astype(np.float64)
+ img2 = img2.astype(np.float64)
+ mse = np.mean((img1 - img2)**2)
+ if mse == 0:
+ return float('inf')
+ return 20 * math.log10(255.0 / math.sqrt(mse))
+
+
+# --------------------------------------------
+# SSIM
+# --------------------------------------------
+def calculate_ssim(img1, img2, border=0):
+ '''calculate SSIM
+ the same outputs as MATLAB's
+ img1, img2: [0, 255]
+ '''
+ #img1 = img1.squeeze()
+ #img2 = img2.squeeze()
+ if not img1.shape == img2.shape:
+ raise ValueError('Input images must have the same dimensions.')
+ h, w = img1.shape[:2]
+ img1 = img1[border:h-border, border:w-border]
+ img2 = img2[border:h-border, border:w-border]
+
+ if img1.ndim == 2:
+ return ssim(img1, img2)
+ elif img1.ndim == 3:
+ if img1.shape[2] == 3:
+ ssims = []
+ for i in range(3):
+ ssims.append(ssim(img1[:,:,i], img2[:,:,i]))
+ return np.array(ssims).mean()
+ elif img1.shape[2] == 1:
+ return ssim(np.squeeze(img1), np.squeeze(img2))
+ else:
+ raise ValueError('Wrong input image dimensions.')
+
+
+def ssim(img1, img2):
+ C1 = (0.01 * 255)**2
+ C2 = (0.03 * 255)**2
+
+ img1 = img1.astype(np.float64)
+ img2 = img2.astype(np.float64)
+ kernel = cv2.getGaussianKernel(11, 1.5)
+ window = np.outer(kernel, kernel.transpose())
+
+ mu1 = cv2.filter2D(img1, -1, window)[5:-5, 5:-5] # valid
+ mu2 = cv2.filter2D(img2, -1, window)[5:-5, 5:-5]
+ mu1_sq = mu1**2
+ mu2_sq = mu2**2
+ mu1_mu2 = mu1 * mu2
+ sigma1_sq = cv2.filter2D(img1**2, -1, window)[5:-5, 5:-5] - mu1_sq
+ sigma2_sq = cv2.filter2D(img2**2, -1, window)[5:-5, 5:-5] - mu2_sq
+ sigma12 = cv2.filter2D(img1 * img2, -1, window)[5:-5, 5:-5] - mu1_mu2
+
+ ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
+ (sigma1_sq + sigma2_sq + C2))
+ return ssim_map.mean()
+
+
+'''
+# --------------------------------------------
+# matlab's bicubic imresize (numpy and torch) [0, 1]
+# --------------------------------------------
+'''
+
+
+# matlab 'imresize' function, now only support 'bicubic'
+def cubic(x):
+ absx = torch.abs(x)
+ absx2 = absx**2
+ absx3 = absx**3
+ return (1.5*absx3 - 2.5*absx2 + 1) * ((absx <= 1).type_as(absx)) + \
+ (-0.5*absx3 + 2.5*absx2 - 4*absx + 2) * (((absx > 1)*(absx <= 2)).type_as(absx))
+
+
+def calculate_weights_indices(in_length, out_length, scale, kernel, kernel_width, antialiasing):
+ if (scale < 1) and (antialiasing):
+ # Use a modified kernel to simultaneously interpolate and antialias- larger kernel width
+ kernel_width = kernel_width / scale
+
+ # Output-space coordinates
+ x = torch.linspace(1, out_length, out_length)
+
+ # Input-space coordinates. Calculate the inverse mapping such that 0.5
+ # in output space maps to 0.5 in input space, and 0.5+scale in output
+ # space maps to 1.5 in input space.
+ u = x / scale + 0.5 * (1 - 1 / scale)
+
+ # What is the left-most pixel that can be involved in the computation?
+ left = torch.floor(u - kernel_width / 2)
+
+ # What is the maximum number of pixels that can be involved in the
+ # computation? Note: it's OK to use an extra pixel here; if the
+ # corresponding weights are all zero, it will be eliminated at the end
+ # of this function.
+ P = math.ceil(kernel_width) + 2
+
+ # The indices of the input pixels involved in computing the k-th output
+ # pixel are in row k of the indices matrix.
+ indices = left.view(out_length, 1).expand(out_length, P) + torch.linspace(0, P - 1, P).view(
+ 1, P).expand(out_length, P)
+
+ # The weights used to compute the k-th output pixel are in row k of the
+ # weights matrix.
+ distance_to_center = u.view(out_length, 1).expand(out_length, P) - indices
+ # apply cubic kernel
+ if (scale < 1) and (antialiasing):
+ weights = scale * cubic(distance_to_center * scale)
+ else:
+ weights = cubic(distance_to_center)
+ # Normalize the weights matrix so that each row sums to 1.
+ weights_sum = torch.sum(weights, 1).view(out_length, 1)
+ weights = weights / weights_sum.expand(out_length, P)
+
+ # If a column in weights is all zero, get rid of it. only consider the first and last column.
+ weights_zero_tmp = torch.sum((weights == 0), 0)
+ if not math.isclose(weights_zero_tmp[0], 0, rel_tol=1e-6):
+ indices = indices.narrow(1, 1, P - 2)
+ weights = weights.narrow(1, 1, P - 2)
+ if not math.isclose(weights_zero_tmp[-1], 0, rel_tol=1e-6):
+ indices = indices.narrow(1, 0, P - 2)
+ weights = weights.narrow(1, 0, P - 2)
+ weights = weights.contiguous()
+ indices = indices.contiguous()
+ sym_len_s = -indices.min() + 1
+ sym_len_e = indices.max() - in_length
+ indices = indices + sym_len_s - 1
+ return weights, indices, int(sym_len_s), int(sym_len_e)
+
+
+# --------------------------------------------
+# imresize for tensor image [0, 1]
+# --------------------------------------------
+def imresize(img, scale, antialiasing=True):
+ # Now the scale should be the same for H and W
+ # input: img: pytorch tensor, CHW or HW [0,1]
+ # output: CHW or HW [0,1] w/o round
+ need_squeeze = True if img.dim() == 2 else False
+ if need_squeeze:
+ img.unsqueeze_(0)
+ in_C, in_H, in_W = img.size()
+ out_C, out_H, out_W = in_C, math.ceil(in_H * scale), math.ceil(in_W * scale)
+ kernel_width = 4
+ kernel = 'cubic'
+
+ # Return the desired dimension order for performing the resize. The
+ # strategy is to perform the resize first along the dimension with the
+ # smallest scale factor.
+ # Now we do not support this.
+
+ # get weights and indices
+ weights_H, indices_H, sym_len_Hs, sym_len_He = calculate_weights_indices(
+ in_H, out_H, scale, kernel, kernel_width, antialiasing)
+ weights_W, indices_W, sym_len_Ws, sym_len_We = calculate_weights_indices(
+ in_W, out_W, scale, kernel, kernel_width, antialiasing)
+ # process H dimension
+ # symmetric copying
+ img_aug = torch.FloatTensor(in_C, in_H + sym_len_Hs + sym_len_He, in_W)
+ img_aug.narrow(1, sym_len_Hs, in_H).copy_(img)
+
+ sym_patch = img[:, :sym_len_Hs, :]
+ inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(1, inv_idx)
+ img_aug.narrow(1, 0, sym_len_Hs).copy_(sym_patch_inv)
+
+ sym_patch = img[:, -sym_len_He:, :]
+ inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(1, inv_idx)
+ img_aug.narrow(1, sym_len_Hs + in_H, sym_len_He).copy_(sym_patch_inv)
+
+ out_1 = torch.FloatTensor(in_C, out_H, in_W)
+ kernel_width = weights_H.size(1)
+ for i in range(out_H):
+ idx = int(indices_H[i][0])
+ for j in range(out_C):
+ out_1[j, i, :] = img_aug[j, idx:idx + kernel_width, :].transpose(0, 1).mv(weights_H[i])
+
+ # process W dimension
+ # symmetric copying
+ out_1_aug = torch.FloatTensor(in_C, out_H, in_W + sym_len_Ws + sym_len_We)
+ out_1_aug.narrow(2, sym_len_Ws, in_W).copy_(out_1)
+
+ sym_patch = out_1[:, :, :sym_len_Ws]
+ inv_idx = torch.arange(sym_patch.size(2) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(2, inv_idx)
+ out_1_aug.narrow(2, 0, sym_len_Ws).copy_(sym_patch_inv)
+
+ sym_patch = out_1[:, :, -sym_len_We:]
+ inv_idx = torch.arange(sym_patch.size(2) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(2, inv_idx)
+ out_1_aug.narrow(2, sym_len_Ws + in_W, sym_len_We).copy_(sym_patch_inv)
+
+ out_2 = torch.FloatTensor(in_C, out_H, out_W)
+ kernel_width = weights_W.size(1)
+ for i in range(out_W):
+ idx = int(indices_W[i][0])
+ for j in range(out_C):
+ out_2[j, :, i] = out_1_aug[j, :, idx:idx + kernel_width].mv(weights_W[i])
+ if need_squeeze:
+ out_2.squeeze_()
+ return out_2
+
+
+# --------------------------------------------
+# imresize for numpy image [0, 1]
+# --------------------------------------------
+def imresize_np(img, scale, antialiasing=True):
+ # Now the scale should be the same for H and W
+ # input: img: Numpy, HWC or HW [0,1]
+ # output: HWC or HW [0,1] w/o round
+ img = torch.from_numpy(img)
+ need_squeeze = True if img.dim() == 2 else False
+ if need_squeeze:
+ img.unsqueeze_(2)
+
+ in_H, in_W, in_C = img.size()
+ out_C, out_H, out_W = in_C, math.ceil(in_H * scale), math.ceil(in_W * scale)
+ kernel_width = 4
+ kernel = 'cubic'
+
+ # Return the desired dimension order for performing the resize. The
+ # strategy is to perform the resize first along the dimension with the
+ # smallest scale factor.
+ # Now we do not support this.
+
+ # get weights and indices
+ weights_H, indices_H, sym_len_Hs, sym_len_He = calculate_weights_indices(
+ in_H, out_H, scale, kernel, kernel_width, antialiasing)
+ weights_W, indices_W, sym_len_Ws, sym_len_We = calculate_weights_indices(
+ in_W, out_W, scale, kernel, kernel_width, antialiasing)
+ # process H dimension
+ # symmetric copying
+ img_aug = torch.FloatTensor(in_H + sym_len_Hs + sym_len_He, in_W, in_C)
+ img_aug.narrow(0, sym_len_Hs, in_H).copy_(img)
+
+ sym_patch = img[:sym_len_Hs, :, :]
+ inv_idx = torch.arange(sym_patch.size(0) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(0, inv_idx)
+ img_aug.narrow(0, 0, sym_len_Hs).copy_(sym_patch_inv)
+
+ sym_patch = img[-sym_len_He:, :, :]
+ inv_idx = torch.arange(sym_patch.size(0) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(0, inv_idx)
+ img_aug.narrow(0, sym_len_Hs + in_H, sym_len_He).copy_(sym_patch_inv)
+
+ out_1 = torch.FloatTensor(out_H, in_W, in_C)
+ kernel_width = weights_H.size(1)
+ for i in range(out_H):
+ idx = int(indices_H[i][0])
+ for j in range(out_C):
+ out_1[i, :, j] = img_aug[idx:idx + kernel_width, :, j].transpose(0, 1).mv(weights_H[i])
+
+ # process W dimension
+ # symmetric copying
+ out_1_aug = torch.FloatTensor(out_H, in_W + sym_len_Ws + sym_len_We, in_C)
+ out_1_aug.narrow(1, sym_len_Ws, in_W).copy_(out_1)
+
+ sym_patch = out_1[:, :sym_len_Ws, :]
+ inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(1, inv_idx)
+ out_1_aug.narrow(1, 0, sym_len_Ws).copy_(sym_patch_inv)
+
+ sym_patch = out_1[:, -sym_len_We:, :]
+ inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
+ sym_patch_inv = sym_patch.index_select(1, inv_idx)
+ out_1_aug.narrow(1, sym_len_Ws + in_W, sym_len_We).copy_(sym_patch_inv)
+
+ out_2 = torch.FloatTensor(out_H, out_W, in_C)
+ kernel_width = weights_W.size(1)
+ for i in range(out_W):
+ idx = int(indices_W[i][0])
+ for j in range(out_C):
+ out_2[:, i, j] = out_1_aug[:, idx:idx + kernel_width, j].mv(weights_W[i])
+ if need_squeeze:
+ out_2.squeeze_()
+
+ return out_2.numpy()
+
+
+if __name__ == '__main__':
+ print('---')
+# img = imread_uint('test.bmp', 3)
+# img = uint2single(img)
+# img_bicubic = imresize_np(img, 1/4)
\ No newline at end of file
diff --git a/ldm/modules/midas/__init__.py b/ldm/modules/midas/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/modules/midas/api.py b/ldm/modules/midas/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..b58ebbffd942a2fc22264f0ab47e400c26b9f41c
--- /dev/null
+++ b/ldm/modules/midas/api.py
@@ -0,0 +1,170 @@
+# based on https://github.com/isl-org/MiDaS
+
+import cv2
+import torch
+import torch.nn as nn
+from torchvision.transforms import Compose
+
+from ldm.modules.midas.midas.dpt_depth import DPTDepthModel
+from ldm.modules.midas.midas.midas_net import MidasNet
+from ldm.modules.midas.midas.midas_net_custom import MidasNet_small
+from ldm.modules.midas.midas.transforms import Resize, NormalizeImage, PrepareForNet
+
+
+ISL_PATHS = {
+ "dpt_large": "midas_models/dpt_large-midas-2f21e586.pt",
+ "dpt_hybrid": "midas_models/dpt_hybrid-midas-501f0c75.pt",
+ "midas_v21": "",
+ "midas_v21_small": "",
+}
+
+
+def disabled_train(self, mode=True):
+ """Overwrite model.train with this function to make sure train/eval mode
+ does not change anymore."""
+ return self
+
+
+def load_midas_transform(model_type):
+ # https://github.com/isl-org/MiDaS/blob/master/run.py
+ # load transform only
+ if model_type == "dpt_large": # DPT-Large
+ net_w, net_h = 384, 384
+ resize_mode = "minimal"
+ normalization = NormalizeImage(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
+
+ elif model_type == "dpt_hybrid": # DPT-Hybrid
+ net_w, net_h = 384, 384
+ resize_mode = "minimal"
+ normalization = NormalizeImage(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
+
+ elif model_type == "midas_v21":
+ net_w, net_h = 384, 384
+ resize_mode = "upper_bound"
+ normalization = NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
+
+ elif model_type == "midas_v21_small":
+ net_w, net_h = 256, 256
+ resize_mode = "upper_bound"
+ normalization = NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
+
+ else:
+ assert False, f"model_type '{model_type}' not implemented, use: --model_type large"
+
+ transform = Compose(
+ [
+ Resize(
+ net_w,
+ net_h,
+ resize_target=None,
+ keep_aspect_ratio=True,
+ ensure_multiple_of=32,
+ resize_method=resize_mode,
+ image_interpolation_method=cv2.INTER_CUBIC,
+ ),
+ normalization,
+ PrepareForNet(),
+ ]
+ )
+
+ return transform
+
+
+def load_model(model_type):
+ # https://github.com/isl-org/MiDaS/blob/master/run.py
+ # load network
+ model_path = ISL_PATHS[model_type]
+ if model_type == "dpt_large": # DPT-Large
+ model = DPTDepthModel(
+ path=model_path,
+ backbone="vitl16_384",
+ non_negative=True,
+ )
+ net_w, net_h = 384, 384
+ resize_mode = "minimal"
+ normalization = NormalizeImage(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
+
+ elif model_type == "dpt_hybrid": # DPT-Hybrid
+ model = DPTDepthModel(
+ path=model_path,
+ backbone="vitb_rn50_384",
+ non_negative=True,
+ )
+ net_w, net_h = 384, 384
+ resize_mode = "minimal"
+ normalization = NormalizeImage(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
+
+ elif model_type == "midas_v21":
+ model = MidasNet(model_path, non_negative=True)
+ net_w, net_h = 384, 384
+ resize_mode = "upper_bound"
+ normalization = NormalizeImage(
+ mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+ )
+
+ elif model_type == "midas_v21_small":
+ model = MidasNet_small(model_path, features=64, backbone="efficientnet_lite3", exportable=True,
+ non_negative=True, blocks={'expand': True})
+ net_w, net_h = 256, 256
+ resize_mode = "upper_bound"
+ normalization = NormalizeImage(
+ mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+ )
+
+ else:
+ print(f"model_type '{model_type}' not implemented, use: --model_type large")
+ assert False
+
+ transform = Compose(
+ [
+ Resize(
+ net_w,
+ net_h,
+ resize_target=None,
+ keep_aspect_ratio=True,
+ ensure_multiple_of=32,
+ resize_method=resize_mode,
+ image_interpolation_method=cv2.INTER_CUBIC,
+ ),
+ normalization,
+ PrepareForNet(),
+ ]
+ )
+
+ return model.eval(), transform
+
+
+class MiDaSInference(nn.Module):
+ MODEL_TYPES_TORCH_HUB = [
+ "DPT_Large",
+ "DPT_Hybrid",
+ "MiDaS_small"
+ ]
+ MODEL_TYPES_ISL = [
+ "dpt_large",
+ "dpt_hybrid",
+ "midas_v21",
+ "midas_v21_small",
+ ]
+
+ def __init__(self, model_type):
+ super().__init__()
+ assert (model_type in self.MODEL_TYPES_ISL)
+ model, _ = load_model(model_type)
+ self.model = model
+ self.model.train = disabled_train
+
+ def forward(self, x):
+ # x in 0..1 as produced by calling self.transform on a 0..1 float64 numpy array
+ # NOTE: we expect that the correct transform has been called during dataloading.
+ with torch.no_grad():
+ prediction = self.model(x)
+ prediction = torch.nn.functional.interpolate(
+ prediction.unsqueeze(1),
+ size=x.shape[2:],
+ mode="bicubic",
+ align_corners=False,
+ )
+ assert prediction.shape == (x.shape[0], 1, x.shape[2], x.shape[3])
+ return prediction
+
diff --git a/ldm/modules/midas/midas/__init__.py b/ldm/modules/midas/midas/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/ldm/modules/midas/midas/base_model.py b/ldm/modules/midas/midas/base_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..5cf430239b47ec5ec07531263f26f5c24a2311cd
--- /dev/null
+++ b/ldm/modules/midas/midas/base_model.py
@@ -0,0 +1,16 @@
+import torch
+
+
+class BaseModel(torch.nn.Module):
+ def load(self, path):
+ """Load model from file.
+
+ Args:
+ path (str): file path
+ """
+ parameters = torch.load(path, map_location=torch.device('cpu'))
+
+ if "optimizer" in parameters:
+ parameters = parameters["model"]
+
+ self.load_state_dict(parameters)
diff --git a/ldm/modules/midas/midas/blocks.py b/ldm/modules/midas/midas/blocks.py
new file mode 100644
index 0000000000000000000000000000000000000000..2145d18fa98060a618536d9a64fe6589e9be4f78
--- /dev/null
+++ b/ldm/modules/midas/midas/blocks.py
@@ -0,0 +1,342 @@
+import torch
+import torch.nn as nn
+
+from .vit import (
+ _make_pretrained_vitb_rn50_384,
+ _make_pretrained_vitl16_384,
+ _make_pretrained_vitb16_384,
+ forward_vit,
+)
+
+def _make_encoder(backbone, features, use_pretrained, groups=1, expand=False, exportable=True, hooks=None, use_vit_only=False, use_readout="ignore",):
+ if backbone == "vitl16_384":
+ pretrained = _make_pretrained_vitl16_384(
+ use_pretrained, hooks=hooks, use_readout=use_readout
+ )
+ scratch = _make_scratch(
+ [256, 512, 1024, 1024], features, groups=groups, expand=expand
+ ) # ViT-L/16 - 85.0% Top1 (backbone)
+ elif backbone == "vitb_rn50_384":
+ pretrained = _make_pretrained_vitb_rn50_384(
+ use_pretrained,
+ hooks=hooks,
+ use_vit_only=use_vit_only,
+ use_readout=use_readout,
+ )
+ scratch = _make_scratch(
+ [256, 512, 768, 768], features, groups=groups, expand=expand
+ ) # ViT-H/16 - 85.0% Top1 (backbone)
+ elif backbone == "vitb16_384":
+ pretrained = _make_pretrained_vitb16_384(
+ use_pretrained, hooks=hooks, use_readout=use_readout
+ )
+ scratch = _make_scratch(
+ [96, 192, 384, 768], features, groups=groups, expand=expand
+ ) # ViT-B/16 - 84.6% Top1 (backbone)
+ elif backbone == "resnext101_wsl":
+ pretrained = _make_pretrained_resnext101_wsl(use_pretrained)
+ scratch = _make_scratch([256, 512, 1024, 2048], features, groups=groups, expand=expand) # efficientnet_lite3
+ elif backbone == "efficientnet_lite3":
+ pretrained = _make_pretrained_efficientnet_lite3(use_pretrained, exportable=exportable)
+ scratch = _make_scratch([32, 48, 136, 384], features, groups=groups, expand=expand) # efficientnet_lite3
+ else:
+ print(f"Backbone '{backbone}' not implemented")
+ assert False
+
+ return pretrained, scratch
+
+
+def _make_scratch(in_shape, out_shape, groups=1, expand=False):
+ scratch = nn.Module()
+
+ out_shape1 = out_shape
+ out_shape2 = out_shape
+ out_shape3 = out_shape
+ out_shape4 = out_shape
+ if expand==True:
+ out_shape1 = out_shape
+ out_shape2 = out_shape*2
+ out_shape3 = out_shape*4
+ out_shape4 = out_shape*8
+
+ scratch.layer1_rn = nn.Conv2d(
+ in_shape[0], out_shape1, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
+ )
+ scratch.layer2_rn = nn.Conv2d(
+ in_shape[1], out_shape2, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
+ )
+ scratch.layer3_rn = nn.Conv2d(
+ in_shape[2], out_shape3, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
+ )
+ scratch.layer4_rn = nn.Conv2d(
+ in_shape[3], out_shape4, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
+ )
+
+ return scratch
+
+
+def _make_pretrained_efficientnet_lite3(use_pretrained, exportable=False):
+ efficientnet = torch.hub.load(
+ "rwightman/gen-efficientnet-pytorch",
+ "tf_efficientnet_lite3",
+ pretrained=use_pretrained,
+ exportable=exportable
+ )
+ return _make_efficientnet_backbone(efficientnet)
+
+
+def _make_efficientnet_backbone(effnet):
+ pretrained = nn.Module()
+
+ pretrained.layer1 = nn.Sequential(
+ effnet.conv_stem, effnet.bn1, effnet.act1, *effnet.blocks[0:2]
+ )
+ pretrained.layer2 = nn.Sequential(*effnet.blocks[2:3])
+ pretrained.layer3 = nn.Sequential(*effnet.blocks[3:5])
+ pretrained.layer4 = nn.Sequential(*effnet.blocks[5:9])
+
+ return pretrained
+
+
+def _make_resnet_backbone(resnet):
+ pretrained = nn.Module()
+ pretrained.layer1 = nn.Sequential(
+ resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet.layer1
+ )
+
+ pretrained.layer2 = resnet.layer2
+ pretrained.layer3 = resnet.layer3
+ pretrained.layer4 = resnet.layer4
+
+ return pretrained
+
+
+def _make_pretrained_resnext101_wsl(use_pretrained):
+ resnet = torch.hub.load("facebookresearch/WSL-Images", "resnext101_32x8d_wsl")
+ return _make_resnet_backbone(resnet)
+
+
+
+class Interpolate(nn.Module):
+ """Interpolation module.
+ """
+
+ def __init__(self, scale_factor, mode, align_corners=False):
+ """Init.
+
+ Args:
+ scale_factor (float): scaling
+ mode (str): interpolation mode
+ """
+ super(Interpolate, self).__init__()
+
+ self.interp = nn.functional.interpolate
+ self.scale_factor = scale_factor
+ self.mode = mode
+ self.align_corners = align_corners
+
+ def forward(self, x):
+ """Forward pass.
+
+ Args:
+ x (tensor): input
+
+ Returns:
+ tensor: interpolated data
+ """
+
+ x = self.interp(
+ x, scale_factor=self.scale_factor, mode=self.mode, align_corners=self.align_corners
+ )
+
+ return x
+
+
+class ResidualConvUnit(nn.Module):
+ """Residual convolution module.
+ """
+
+ def __init__(self, features):
+ """Init.
+
+ Args:
+ features (int): number of features
+ """
+ super().__init__()
+
+ self.conv1 = nn.Conv2d(
+ features, features, kernel_size=3, stride=1, padding=1, bias=True
+ )
+
+ self.conv2 = nn.Conv2d(
+ features, features, kernel_size=3, stride=1, padding=1, bias=True
+ )
+
+ self.relu = nn.ReLU(inplace=True)
+
+ def forward(self, x):
+ """Forward pass.
+
+ Args:
+ x (tensor): input
+
+ Returns:
+ tensor: output
+ """
+ out = self.relu(x)
+ out = self.conv1(out)
+ out = self.relu(out)
+ out = self.conv2(out)
+
+ return out + x
+
+
+class FeatureFusionBlock(nn.Module):
+ """Feature fusion block.
+ """
+
+ def __init__(self, features):
+ """Init.
+
+ Args:
+ features (int): number of features
+ """
+ super(FeatureFusionBlock, self).__init__()
+
+ self.resConfUnit1 = ResidualConvUnit(features)
+ self.resConfUnit2 = ResidualConvUnit(features)
+
+ def forward(self, *xs):
+ """Forward pass.
+
+ Returns:
+ tensor: output
+ """
+ output = xs[0]
+
+ if len(xs) == 2:
+ output += self.resConfUnit1(xs[1])
+
+ output = self.resConfUnit2(output)
+
+ output = nn.functional.interpolate(
+ output, scale_factor=2, mode="bilinear", align_corners=True
+ )
+
+ return output
+
+
+
+
+class ResidualConvUnit_custom(nn.Module):
+ """Residual convolution module.
+ """
+
+ def __init__(self, features, activation, bn):
+ """Init.
+
+ Args:
+ features (int): number of features
+ """
+ super().__init__()
+
+ self.bn = bn
+
+ self.groups=1
+
+ self.conv1 = nn.Conv2d(
+ features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups
+ )
+
+ self.conv2 = nn.Conv2d(
+ features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups
+ )
+
+ if self.bn==True:
+ self.bn1 = nn.BatchNorm2d(features)
+ self.bn2 = nn.BatchNorm2d(features)
+
+ self.activation = activation
+
+ self.skip_add = nn.quantized.FloatFunctional()
+
+ def forward(self, x):
+ """Forward pass.
+
+ Args:
+ x (tensor): input
+
+ Returns:
+ tensor: output
+ """
+
+ out = self.activation(x)
+ out = self.conv1(out)
+ if self.bn==True:
+ out = self.bn1(out)
+
+ out = self.activation(out)
+ out = self.conv2(out)
+ if self.bn==True:
+ out = self.bn2(out)
+
+ if self.groups > 1:
+ out = self.conv_merge(out)
+
+ return self.skip_add.add(out, x)
+
+ # return out + x
+
+
+class FeatureFusionBlock_custom(nn.Module):
+ """Feature fusion block.
+ """
+
+ def __init__(self, features, activation, deconv=False, bn=False, expand=False, align_corners=True):
+ """Init.
+
+ Args:
+ features (int): number of features
+ """
+ super(FeatureFusionBlock_custom, self).__init__()
+
+ self.deconv = deconv
+ self.align_corners = align_corners
+
+ self.groups=1
+
+ self.expand = expand
+ out_features = features
+ if self.expand==True:
+ out_features = features//2
+
+ self.out_conv = nn.Conv2d(features, out_features, kernel_size=1, stride=1, padding=0, bias=True, groups=1)
+
+ self.resConfUnit1 = ResidualConvUnit_custom(features, activation, bn)
+ self.resConfUnit2 = ResidualConvUnit_custom(features, activation, bn)
+
+ self.skip_add = nn.quantized.FloatFunctional()
+
+ def forward(self, *xs):
+ """Forward pass.
+
+ Returns:
+ tensor: output
+ """
+ output = xs[0]
+
+ if len(xs) == 2:
+ res = self.resConfUnit1(xs[1])
+ output = self.skip_add.add(output, res)
+ # output += res
+
+ output = self.resConfUnit2(output)
+
+ output = nn.functional.interpolate(
+ output, scale_factor=2, mode="bilinear", align_corners=self.align_corners
+ )
+
+ output = self.out_conv(output)
+
+ return output
+
diff --git a/ldm/modules/midas/midas/dpt_depth.py b/ldm/modules/midas/midas/dpt_depth.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e9aab5d2767dffea39da5b3f30e2798688216f1
--- /dev/null
+++ b/ldm/modules/midas/midas/dpt_depth.py
@@ -0,0 +1,109 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+from .base_model import BaseModel
+from .blocks import (
+ FeatureFusionBlock,
+ FeatureFusionBlock_custom,
+ Interpolate,
+ _make_encoder,
+ forward_vit,
+)
+
+
+def _make_fusion_block(features, use_bn):
+ return FeatureFusionBlock_custom(
+ features,
+ nn.ReLU(False),
+ deconv=False,
+ bn=use_bn,
+ expand=False,
+ align_corners=True,
+ )
+
+
+class DPT(BaseModel):
+ def __init__(
+ self,
+ head,
+ features=256,
+ backbone="vitb_rn50_384",
+ readout="project",
+ channels_last=False,
+ use_bn=False,
+ ):
+
+ super(DPT, self).__init__()
+
+ self.channels_last = channels_last
+
+ hooks = {
+ "vitb_rn50_384": [0, 1, 8, 11],
+ "vitb16_384": [2, 5, 8, 11],
+ "vitl16_384": [5, 11, 17, 23],
+ }
+
+ # Instantiate backbone and reassemble blocks
+ self.pretrained, self.scratch = _make_encoder(
+ backbone,
+ features,
+ False, # Set to true of you want to train from scratch, uses ImageNet weights
+ groups=1,
+ expand=False,
+ exportable=False,
+ hooks=hooks[backbone],
+ use_readout=readout,
+ )
+
+ self.scratch.refinenet1 = _make_fusion_block(features, use_bn)
+ self.scratch.refinenet2 = _make_fusion_block(features, use_bn)
+ self.scratch.refinenet3 = _make_fusion_block(features, use_bn)
+ self.scratch.refinenet4 = _make_fusion_block(features, use_bn)
+
+ self.scratch.output_conv = head
+
+
+ def forward(self, x):
+ if self.channels_last == True:
+ x.contiguous(memory_format=torch.channels_last)
+
+ layer_1, layer_2, layer_3, layer_4 = forward_vit(self.pretrained, x)
+
+ layer_1_rn = self.scratch.layer1_rn(layer_1)
+ layer_2_rn = self.scratch.layer2_rn(layer_2)
+ layer_3_rn = self.scratch.layer3_rn(layer_3)
+ layer_4_rn = self.scratch.layer4_rn(layer_4)
+
+ path_4 = self.scratch.refinenet4(layer_4_rn)
+ path_3 = self.scratch.refinenet3(path_4, layer_3_rn)
+ path_2 = self.scratch.refinenet2(path_3, layer_2_rn)
+ path_1 = self.scratch.refinenet1(path_2, layer_1_rn)
+
+ out = self.scratch.output_conv(path_1)
+
+ return out
+
+
+class DPTDepthModel(DPT):
+ def __init__(self, path=None, non_negative=True, **kwargs):
+ features = kwargs["features"] if "features" in kwargs else 256
+
+ head = nn.Sequential(
+ nn.Conv2d(features, features // 2, kernel_size=3, stride=1, padding=1),
+ Interpolate(scale_factor=2, mode="bilinear", align_corners=True),
+ nn.Conv2d(features // 2, 32, kernel_size=3, stride=1, padding=1),
+ nn.ReLU(True),
+ nn.Conv2d(32, 1, kernel_size=1, stride=1, padding=0),
+ nn.ReLU(True) if non_negative else nn.Identity(),
+ nn.Identity(),
+ )
+
+ super().__init__(head, **kwargs)
+
+ if path is not None:
+ self.load(path)
+
+ def forward(self, x):
+ return super().forward(x).squeeze(dim=1)
+
diff --git a/ldm/modules/midas/midas/midas_net.py b/ldm/modules/midas/midas/midas_net.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a954977800b0a0f48807e80fa63041910e33c1f
--- /dev/null
+++ b/ldm/modules/midas/midas/midas_net.py
@@ -0,0 +1,76 @@
+"""MidashNet: Network for monocular depth estimation trained by mixing several datasets.
+This file contains code that is adapted from
+https://github.com/thomasjpfan/pytorch_refinenet/blob/master/pytorch_refinenet/refinenet/refinenet_4cascade.py
+"""
+import torch
+import torch.nn as nn
+
+from .base_model import BaseModel
+from .blocks import FeatureFusionBlock, Interpolate, _make_encoder
+
+
+class MidasNet(BaseModel):
+ """Network for monocular depth estimation.
+ """
+
+ def __init__(self, path=None, features=256, non_negative=True):
+ """Init.
+
+ Args:
+ path (str, optional): Path to saved model. Defaults to None.
+ features (int, optional): Number of features. Defaults to 256.
+ backbone (str, optional): Backbone network for encoder. Defaults to resnet50
+ """
+ print("Loading weights: ", path)
+
+ super(MidasNet, self).__init__()
+
+ use_pretrained = False if path is None else True
+
+ self.pretrained, self.scratch = _make_encoder(backbone="resnext101_wsl", features=features, use_pretrained=use_pretrained)
+
+ self.scratch.refinenet4 = FeatureFusionBlock(features)
+ self.scratch.refinenet3 = FeatureFusionBlock(features)
+ self.scratch.refinenet2 = FeatureFusionBlock(features)
+ self.scratch.refinenet1 = FeatureFusionBlock(features)
+
+ self.scratch.output_conv = nn.Sequential(
+ nn.Conv2d(features, 128, kernel_size=3, stride=1, padding=1),
+ Interpolate(scale_factor=2, mode="bilinear"),
+ nn.Conv2d(128, 32, kernel_size=3, stride=1, padding=1),
+ nn.ReLU(True),
+ nn.Conv2d(32, 1, kernel_size=1, stride=1, padding=0),
+ nn.ReLU(True) if non_negative else nn.Identity(),
+ )
+
+ if path:
+ self.load(path)
+
+ def forward(self, x):
+ """Forward pass.
+
+ Args:
+ x (tensor): input data (image)
+
+ Returns:
+ tensor: depth
+ """
+
+ layer_1 = self.pretrained.layer1(x)
+ layer_2 = self.pretrained.layer2(layer_1)
+ layer_3 = self.pretrained.layer3(layer_2)
+ layer_4 = self.pretrained.layer4(layer_3)
+
+ layer_1_rn = self.scratch.layer1_rn(layer_1)
+ layer_2_rn = self.scratch.layer2_rn(layer_2)
+ layer_3_rn = self.scratch.layer3_rn(layer_3)
+ layer_4_rn = self.scratch.layer4_rn(layer_4)
+
+ path_4 = self.scratch.refinenet4(layer_4_rn)
+ path_3 = self.scratch.refinenet3(path_4, layer_3_rn)
+ path_2 = self.scratch.refinenet2(path_3, layer_2_rn)
+ path_1 = self.scratch.refinenet1(path_2, layer_1_rn)
+
+ out = self.scratch.output_conv(path_1)
+
+ return torch.squeeze(out, dim=1)
diff --git a/ldm/modules/midas/midas/midas_net_custom.py b/ldm/modules/midas/midas/midas_net_custom.py
new file mode 100644
index 0000000000000000000000000000000000000000..50e4acb5e53d5fabefe3dde16ab49c33c2b7797c
--- /dev/null
+++ b/ldm/modules/midas/midas/midas_net_custom.py
@@ -0,0 +1,128 @@
+"""MidashNet: Network for monocular depth estimation trained by mixing several datasets.
+This file contains code that is adapted from
+https://github.com/thomasjpfan/pytorch_refinenet/blob/master/pytorch_refinenet/refinenet/refinenet_4cascade.py
+"""
+import torch
+import torch.nn as nn
+
+from .base_model import BaseModel
+from .blocks import FeatureFusionBlock, FeatureFusionBlock_custom, Interpolate, _make_encoder
+
+
+class MidasNet_small(BaseModel):
+ """Network for monocular depth estimation.
+ """
+
+ def __init__(self, path=None, features=64, backbone="efficientnet_lite3", non_negative=True, exportable=True, channels_last=False, align_corners=True,
+ blocks={'expand': True}):
+ """Init.
+
+ Args:
+ path (str, optional): Path to saved model. Defaults to None.
+ features (int, optional): Number of features. Defaults to 256.
+ backbone (str, optional): Backbone network for encoder. Defaults to resnet50
+ """
+ print("Loading weights: ", path)
+
+ super(MidasNet_small, self).__init__()
+
+ use_pretrained = False if path else True
+
+ self.channels_last = channels_last
+ self.blocks = blocks
+ self.backbone = backbone
+
+ self.groups = 1
+
+ features1=features
+ features2=features
+ features3=features
+ features4=features
+ self.expand = False
+ if "expand" in self.blocks and self.blocks['expand'] == True:
+ self.expand = True
+ features1=features
+ features2=features*2
+ features3=features*4
+ features4=features*8
+
+ self.pretrained, self.scratch = _make_encoder(self.backbone, features, use_pretrained, groups=self.groups, expand=self.expand, exportable=exportable)
+
+ self.scratch.activation = nn.ReLU(False)
+
+ self.scratch.refinenet4 = FeatureFusionBlock_custom(features4, self.scratch.activation, deconv=False, bn=False, expand=self.expand, align_corners=align_corners)
+ self.scratch.refinenet3 = FeatureFusionBlock_custom(features3, self.scratch.activation, deconv=False, bn=False, expand=self.expand, align_corners=align_corners)
+ self.scratch.refinenet2 = FeatureFusionBlock_custom(features2, self.scratch.activation, deconv=False, bn=False, expand=self.expand, align_corners=align_corners)
+ self.scratch.refinenet1 = FeatureFusionBlock_custom(features1, self.scratch.activation, deconv=False, bn=False, align_corners=align_corners)
+
+
+ self.scratch.output_conv = nn.Sequential(
+ nn.Conv2d(features, features//2, kernel_size=3, stride=1, padding=1, groups=self.groups),
+ Interpolate(scale_factor=2, mode="bilinear"),
+ nn.Conv2d(features//2, 32, kernel_size=3, stride=1, padding=1),
+ self.scratch.activation,
+ nn.Conv2d(32, 1, kernel_size=1, stride=1, padding=0),
+ nn.ReLU(True) if non_negative else nn.Identity(),
+ nn.Identity(),
+ )
+
+ if path:
+ self.load(path)
+
+
+ def forward(self, x):
+ """Forward pass.
+
+ Args:
+ x (tensor): input data (image)
+
+ Returns:
+ tensor: depth
+ """
+ if self.channels_last==True:
+ print("self.channels_last = ", self.channels_last)
+ x.contiguous(memory_format=torch.channels_last)
+
+
+ layer_1 = self.pretrained.layer1(x)
+ layer_2 = self.pretrained.layer2(layer_1)
+ layer_3 = self.pretrained.layer3(layer_2)
+ layer_4 = self.pretrained.layer4(layer_3)
+
+ layer_1_rn = self.scratch.layer1_rn(layer_1)
+ layer_2_rn = self.scratch.layer2_rn(layer_2)
+ layer_3_rn = self.scratch.layer3_rn(layer_3)
+ layer_4_rn = self.scratch.layer4_rn(layer_4)
+
+
+ path_4 = self.scratch.refinenet4(layer_4_rn)
+ path_3 = self.scratch.refinenet3(path_4, layer_3_rn)
+ path_2 = self.scratch.refinenet2(path_3, layer_2_rn)
+ path_1 = self.scratch.refinenet1(path_2, layer_1_rn)
+
+ out = self.scratch.output_conv(path_1)
+
+ return torch.squeeze(out, dim=1)
+
+
+
+def fuse_model(m):
+ prev_previous_type = nn.Identity()
+ prev_previous_name = ''
+ previous_type = nn.Identity()
+ previous_name = ''
+ for name, module in m.named_modules():
+ if prev_previous_type == nn.Conv2d and previous_type == nn.BatchNorm2d and type(module) == nn.ReLU:
+ # print("FUSED ", prev_previous_name, previous_name, name)
+ torch.quantization.fuse_modules(m, [prev_previous_name, previous_name, name], inplace=True)
+ elif prev_previous_type == nn.Conv2d and previous_type == nn.BatchNorm2d:
+ # print("FUSED ", prev_previous_name, previous_name)
+ torch.quantization.fuse_modules(m, [prev_previous_name, previous_name], inplace=True)
+ # elif previous_type == nn.Conv2d and type(module) == nn.ReLU:
+ # print("FUSED ", previous_name, name)
+ # torch.quantization.fuse_modules(m, [previous_name, name], inplace=True)
+
+ prev_previous_type = previous_type
+ prev_previous_name = previous_name
+ previous_type = type(module)
+ previous_name = name
\ No newline at end of file
diff --git a/ldm/modules/midas/midas/transforms.py b/ldm/modules/midas/midas/transforms.py
new file mode 100644
index 0000000000000000000000000000000000000000..350cbc11662633ad7f8968eb10be2e7de6e384e9
--- /dev/null
+++ b/ldm/modules/midas/midas/transforms.py
@@ -0,0 +1,234 @@
+import numpy as np
+import cv2
+import math
+
+
+def apply_min_size(sample, size, image_interpolation_method=cv2.INTER_AREA):
+ """Rezise the sample to ensure the given size. Keeps aspect ratio.
+
+ Args:
+ sample (dict): sample
+ size (tuple): image size
+
+ Returns:
+ tuple: new size
+ """
+ shape = list(sample["disparity"].shape)
+
+ if shape[0] >= size[0] and shape[1] >= size[1]:
+ return sample
+
+ scale = [0, 0]
+ scale[0] = size[0] / shape[0]
+ scale[1] = size[1] / shape[1]
+
+ scale = max(scale)
+
+ shape[0] = math.ceil(scale * shape[0])
+ shape[1] = math.ceil(scale * shape[1])
+
+ # resize
+ sample["image"] = cv2.resize(
+ sample["image"], tuple(shape[::-1]), interpolation=image_interpolation_method
+ )
+
+ sample["disparity"] = cv2.resize(
+ sample["disparity"], tuple(shape[::-1]), interpolation=cv2.INTER_NEAREST
+ )
+ sample["mask"] = cv2.resize(
+ sample["mask"].astype(np.float32),
+ tuple(shape[::-1]),
+ interpolation=cv2.INTER_NEAREST,
+ )
+ sample["mask"] = sample["mask"].astype(bool)
+
+ return tuple(shape)
+
+
+class Resize(object):
+ """Resize sample to given size (width, height).
+ """
+
+ def __init__(
+ self,
+ width,
+ height,
+ resize_target=True,
+ keep_aspect_ratio=False,
+ ensure_multiple_of=1,
+ resize_method="lower_bound",
+ image_interpolation_method=cv2.INTER_AREA,
+ ):
+ """Init.
+
+ Args:
+ width (int): desired output width
+ height (int): desired output height
+ resize_target (bool, optional):
+ True: Resize the full sample (image, mask, target).
+ False: Resize image only.
+ Defaults to True.
+ keep_aspect_ratio (bool, optional):
+ True: Keep the aspect ratio of the input sample.
+ Output sample might not have the given width and height, and
+ resize behaviour depends on the parameter 'resize_method'.
+ Defaults to False.
+ ensure_multiple_of (int, optional):
+ Output width and height is constrained to be multiple of this parameter.
+ Defaults to 1.
+ resize_method (str, optional):
+ "lower_bound": Output will be at least as large as the given size.
+ "upper_bound": Output will be at max as large as the given size. (Output size might be smaller than given size.)
+ "minimal": Scale as least as possible. (Output size might be smaller than given size.)
+ Defaults to "lower_bound".
+ """
+ self.__width = width
+ self.__height = height
+
+ self.__resize_target = resize_target
+ self.__keep_aspect_ratio = keep_aspect_ratio
+ self.__multiple_of = ensure_multiple_of
+ self.__resize_method = resize_method
+ self.__image_interpolation_method = image_interpolation_method
+
+ def constrain_to_multiple_of(self, x, min_val=0, max_val=None):
+ y = (np.round(x / self.__multiple_of) * self.__multiple_of).astype(int)
+
+ if max_val is not None and y > max_val:
+ y = (np.floor(x / self.__multiple_of) * self.__multiple_of).astype(int)
+
+ if y < min_val:
+ y = (np.ceil(x / self.__multiple_of) * self.__multiple_of).astype(int)
+
+ return y
+
+ def get_size(self, width, height):
+ # determine new height and width
+ scale_height = self.__height / height
+ scale_width = self.__width / width
+
+ if self.__keep_aspect_ratio:
+ if self.__resize_method == "lower_bound":
+ # scale such that output size is lower bound
+ if scale_width > scale_height:
+ # fit width
+ scale_height = scale_width
+ else:
+ # fit height
+ scale_width = scale_height
+ elif self.__resize_method == "upper_bound":
+ # scale such that output size is upper bound
+ if scale_width < scale_height:
+ # fit width
+ scale_height = scale_width
+ else:
+ # fit height
+ scale_width = scale_height
+ elif self.__resize_method == "minimal":
+ # scale as least as possbile
+ if abs(1 - scale_width) < abs(1 - scale_height):
+ # fit width
+ scale_height = scale_width
+ else:
+ # fit height
+ scale_width = scale_height
+ else:
+ raise ValueError(
+ f"resize_method {self.__resize_method} not implemented"
+ )
+
+ if self.__resize_method == "lower_bound":
+ new_height = self.constrain_to_multiple_of(
+ scale_height * height, min_val=self.__height
+ )
+ new_width = self.constrain_to_multiple_of(
+ scale_width * width, min_val=self.__width
+ )
+ elif self.__resize_method == "upper_bound":
+ new_height = self.constrain_to_multiple_of(
+ scale_height * height, max_val=self.__height
+ )
+ new_width = self.constrain_to_multiple_of(
+ scale_width * width, max_val=self.__width
+ )
+ elif self.__resize_method == "minimal":
+ new_height = self.constrain_to_multiple_of(scale_height * height)
+ new_width = self.constrain_to_multiple_of(scale_width * width)
+ else:
+ raise ValueError(f"resize_method {self.__resize_method} not implemented")
+
+ return (new_width, new_height)
+
+ def __call__(self, sample):
+ width, height = self.get_size(
+ sample["image"].shape[1], sample["image"].shape[0]
+ )
+
+ # resize sample
+ sample["image"] = cv2.resize(
+ sample["image"],
+ (width, height),
+ interpolation=self.__image_interpolation_method,
+ )
+
+ if self.__resize_target:
+ if "disparity" in sample:
+ sample["disparity"] = cv2.resize(
+ sample["disparity"],
+ (width, height),
+ interpolation=cv2.INTER_NEAREST,
+ )
+
+ if "depth" in sample:
+ sample["depth"] = cv2.resize(
+ sample["depth"], (width, height), interpolation=cv2.INTER_NEAREST
+ )
+
+ sample["mask"] = cv2.resize(
+ sample["mask"].astype(np.float32),
+ (width, height),
+ interpolation=cv2.INTER_NEAREST,
+ )
+ sample["mask"] = sample["mask"].astype(bool)
+
+ return sample
+
+
+class NormalizeImage(object):
+ """Normlize image by given mean and std.
+ """
+
+ def __init__(self, mean, std):
+ self.__mean = mean
+ self.__std = std
+
+ def __call__(self, sample):
+ sample["image"] = (sample["image"] - self.__mean) / self.__std
+
+ return sample
+
+
+class PrepareForNet(object):
+ """Prepare sample for usage as network input.
+ """
+
+ def __init__(self):
+ pass
+
+ def __call__(self, sample):
+ image = np.transpose(sample["image"], (2, 0, 1))
+ sample["image"] = np.ascontiguousarray(image).astype(np.float32)
+
+ if "mask" in sample:
+ sample["mask"] = sample["mask"].astype(np.float32)
+ sample["mask"] = np.ascontiguousarray(sample["mask"])
+
+ if "disparity" in sample:
+ disparity = sample["disparity"].astype(np.float32)
+ sample["disparity"] = np.ascontiguousarray(disparity)
+
+ if "depth" in sample:
+ depth = sample["depth"].astype(np.float32)
+ sample["depth"] = np.ascontiguousarray(depth)
+
+ return sample
diff --git a/ldm/modules/midas/midas/vit.py b/ldm/modules/midas/midas/vit.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea46b1be88b261b0dec04f3da0256f5f66f88a74
--- /dev/null
+++ b/ldm/modules/midas/midas/vit.py
@@ -0,0 +1,491 @@
+import torch
+import torch.nn as nn
+import timm
+import types
+import math
+import torch.nn.functional as F
+
+
+class Slice(nn.Module):
+ def __init__(self, start_index=1):
+ super(Slice, self).__init__()
+ self.start_index = start_index
+
+ def forward(self, x):
+ return x[:, self.start_index :]
+
+
+class AddReadout(nn.Module):
+ def __init__(self, start_index=1):
+ super(AddReadout, self).__init__()
+ self.start_index = start_index
+
+ def forward(self, x):
+ if self.start_index == 2:
+ readout = (x[:, 0] + x[:, 1]) / 2
+ else:
+ readout = x[:, 0]
+ return x[:, self.start_index :] + readout.unsqueeze(1)
+
+
+class ProjectReadout(nn.Module):
+ def __init__(self, in_features, start_index=1):
+ super(ProjectReadout, self).__init__()
+ self.start_index = start_index
+
+ self.project = nn.Sequential(nn.Linear(2 * in_features, in_features), nn.GELU())
+
+ def forward(self, x):
+ readout = x[:, 0].unsqueeze(1).expand_as(x[:, self.start_index :])
+ features = torch.cat((x[:, self.start_index :], readout), -1)
+
+ return self.project(features)
+
+
+class Transpose(nn.Module):
+ def __init__(self, dim0, dim1):
+ super(Transpose, self).__init__()
+ self.dim0 = dim0
+ self.dim1 = dim1
+
+ def forward(self, x):
+ x = x.transpose(self.dim0, self.dim1)
+ return x
+
+
+def forward_vit(pretrained, x):
+ b, c, h, w = x.shape
+
+ glob = pretrained.model.forward_flex(x)
+
+ layer_1 = pretrained.activations["1"]
+ layer_2 = pretrained.activations["2"]
+ layer_3 = pretrained.activations["3"]
+ layer_4 = pretrained.activations["4"]
+
+ layer_1 = pretrained.act_postprocess1[0:2](layer_1)
+ layer_2 = pretrained.act_postprocess2[0:2](layer_2)
+ layer_3 = pretrained.act_postprocess3[0:2](layer_3)
+ layer_4 = pretrained.act_postprocess4[0:2](layer_4)
+
+ unflatten = nn.Sequential(
+ nn.Unflatten(
+ 2,
+ torch.Size(
+ [
+ h // pretrained.model.patch_size[1],
+ w // pretrained.model.patch_size[0],
+ ]
+ ),
+ )
+ )
+
+ if layer_1.ndim == 3:
+ layer_1 = unflatten(layer_1)
+ if layer_2.ndim == 3:
+ layer_2 = unflatten(layer_2)
+ if layer_3.ndim == 3:
+ layer_3 = unflatten(layer_3)
+ if layer_4.ndim == 3:
+ layer_4 = unflatten(layer_4)
+
+ layer_1 = pretrained.act_postprocess1[3 : len(pretrained.act_postprocess1)](layer_1)
+ layer_2 = pretrained.act_postprocess2[3 : len(pretrained.act_postprocess2)](layer_2)
+ layer_3 = pretrained.act_postprocess3[3 : len(pretrained.act_postprocess3)](layer_3)
+ layer_4 = pretrained.act_postprocess4[3 : len(pretrained.act_postprocess4)](layer_4)
+
+ return layer_1, layer_2, layer_3, layer_4
+
+
+def _resize_pos_embed(self, posemb, gs_h, gs_w):
+ posemb_tok, posemb_grid = (
+ posemb[:, : self.start_index],
+ posemb[0, self.start_index :],
+ )
+
+ gs_old = int(math.sqrt(len(posemb_grid)))
+
+ posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2)
+ posemb_grid = F.interpolate(posemb_grid, size=(gs_h, gs_w), mode="bilinear")
+ posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_h * gs_w, -1)
+
+ posemb = torch.cat([posemb_tok, posemb_grid], dim=1)
+
+ return posemb
+
+
+def forward_flex(self, x):
+ b, c, h, w = x.shape
+
+ pos_embed = self._resize_pos_embed(
+ self.pos_embed, h // self.patch_size[1], w // self.patch_size[0]
+ )
+
+ B = x.shape[0]
+
+ if hasattr(self.patch_embed, "backbone"):
+ x = self.patch_embed.backbone(x)
+ if isinstance(x, (list, tuple)):
+ x = x[-1] # last feature if backbone outputs list/tuple of features
+
+ x = self.patch_embed.proj(x).flatten(2).transpose(1, 2)
+
+ if getattr(self, "dist_token", None) is not None:
+ cls_tokens = self.cls_token.expand(
+ B, -1, -1
+ ) # stole cls_tokens impl from Phil Wang, thanks
+ dist_token = self.dist_token.expand(B, -1, -1)
+ x = torch.cat((cls_tokens, dist_token, x), dim=1)
+ else:
+ cls_tokens = self.cls_token.expand(
+ B, -1, -1
+ ) # stole cls_tokens impl from Phil Wang, thanks
+ x = torch.cat((cls_tokens, x), dim=1)
+
+ x = x + pos_embed
+ x = self.pos_drop(x)
+
+ for blk in self.blocks:
+ x = blk(x)
+
+ x = self.norm(x)
+
+ return x
+
+
+activations = {}
+
+
+def get_activation(name):
+ def hook(model, input, output):
+ activations[name] = output
+
+ return hook
+
+
+def get_readout_oper(vit_features, features, use_readout, start_index=1):
+ if use_readout == "ignore":
+ readout_oper = [Slice(start_index)] * len(features)
+ elif use_readout == "add":
+ readout_oper = [AddReadout(start_index)] * len(features)
+ elif use_readout == "project":
+ readout_oper = [
+ ProjectReadout(vit_features, start_index) for out_feat in features
+ ]
+ else:
+ assert (
+ False
+ ), "wrong operation for readout token, use_readout can be 'ignore', 'add', or 'project'"
+
+ return readout_oper
+
+
+def _make_vit_b16_backbone(
+ model,
+ features=[96, 192, 384, 768],
+ size=[384, 384],
+ hooks=[2, 5, 8, 11],
+ vit_features=768,
+ use_readout="ignore",
+ start_index=1,
+):
+ pretrained = nn.Module()
+
+ pretrained.model = model
+ pretrained.model.blocks[hooks[0]].register_forward_hook(get_activation("1"))
+ pretrained.model.blocks[hooks[1]].register_forward_hook(get_activation("2"))
+ pretrained.model.blocks[hooks[2]].register_forward_hook(get_activation("3"))
+ pretrained.model.blocks[hooks[3]].register_forward_hook(get_activation("4"))
+
+ pretrained.activations = activations
+
+ readout_oper = get_readout_oper(vit_features, features, use_readout, start_index)
+
+ # 32, 48, 136, 384
+ pretrained.act_postprocess1 = nn.Sequential(
+ readout_oper[0],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[0],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ nn.ConvTranspose2d(
+ in_channels=features[0],
+ out_channels=features[0],
+ kernel_size=4,
+ stride=4,
+ padding=0,
+ bias=True,
+ dilation=1,
+ groups=1,
+ ),
+ )
+
+ pretrained.act_postprocess2 = nn.Sequential(
+ readout_oper[1],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[1],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ nn.ConvTranspose2d(
+ in_channels=features[1],
+ out_channels=features[1],
+ kernel_size=2,
+ stride=2,
+ padding=0,
+ bias=True,
+ dilation=1,
+ groups=1,
+ ),
+ )
+
+ pretrained.act_postprocess3 = nn.Sequential(
+ readout_oper[2],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[2],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ )
+
+ pretrained.act_postprocess4 = nn.Sequential(
+ readout_oper[3],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[3],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ nn.Conv2d(
+ in_channels=features[3],
+ out_channels=features[3],
+ kernel_size=3,
+ stride=2,
+ padding=1,
+ ),
+ )
+
+ pretrained.model.start_index = start_index
+ pretrained.model.patch_size = [16, 16]
+
+ # We inject this function into the VisionTransformer instances so that
+ # we can use it with interpolated position embeddings without modifying the library source.
+ pretrained.model.forward_flex = types.MethodType(forward_flex, pretrained.model)
+ pretrained.model._resize_pos_embed = types.MethodType(
+ _resize_pos_embed, pretrained.model
+ )
+
+ return pretrained
+
+
+def _make_pretrained_vitl16_384(pretrained, use_readout="ignore", hooks=None):
+ model = timm.create_model("vit_large_patch16_384", pretrained=pretrained)
+
+ hooks = [5, 11, 17, 23] if hooks == None else hooks
+ return _make_vit_b16_backbone(
+ model,
+ features=[256, 512, 1024, 1024],
+ hooks=hooks,
+ vit_features=1024,
+ use_readout=use_readout,
+ )
+
+
+def _make_pretrained_vitb16_384(pretrained, use_readout="ignore", hooks=None):
+ model = timm.create_model("vit_base_patch16_384", pretrained=pretrained)
+
+ hooks = [2, 5, 8, 11] if hooks == None else hooks
+ return _make_vit_b16_backbone(
+ model, features=[96, 192, 384, 768], hooks=hooks, use_readout=use_readout
+ )
+
+
+def _make_pretrained_deitb16_384(pretrained, use_readout="ignore", hooks=None):
+ model = timm.create_model("vit_deit_base_patch16_384", pretrained=pretrained)
+
+ hooks = [2, 5, 8, 11] if hooks == None else hooks
+ return _make_vit_b16_backbone(
+ model, features=[96, 192, 384, 768], hooks=hooks, use_readout=use_readout
+ )
+
+
+def _make_pretrained_deitb16_distil_384(pretrained, use_readout="ignore", hooks=None):
+ model = timm.create_model(
+ "vit_deit_base_distilled_patch16_384", pretrained=pretrained
+ )
+
+ hooks = [2, 5, 8, 11] if hooks == None else hooks
+ return _make_vit_b16_backbone(
+ model,
+ features=[96, 192, 384, 768],
+ hooks=hooks,
+ use_readout=use_readout,
+ start_index=2,
+ )
+
+
+def _make_vit_b_rn50_backbone(
+ model,
+ features=[256, 512, 768, 768],
+ size=[384, 384],
+ hooks=[0, 1, 8, 11],
+ vit_features=768,
+ use_vit_only=False,
+ use_readout="ignore",
+ start_index=1,
+):
+ pretrained = nn.Module()
+
+ pretrained.model = model
+
+ if use_vit_only == True:
+ pretrained.model.blocks[hooks[0]].register_forward_hook(get_activation("1"))
+ pretrained.model.blocks[hooks[1]].register_forward_hook(get_activation("2"))
+ else:
+ pretrained.model.patch_embed.backbone.stages[0].register_forward_hook(
+ get_activation("1")
+ )
+ pretrained.model.patch_embed.backbone.stages[1].register_forward_hook(
+ get_activation("2")
+ )
+
+ pretrained.model.blocks[hooks[2]].register_forward_hook(get_activation("3"))
+ pretrained.model.blocks[hooks[3]].register_forward_hook(get_activation("4"))
+
+ pretrained.activations = activations
+
+ readout_oper = get_readout_oper(vit_features, features, use_readout, start_index)
+
+ if use_vit_only == True:
+ pretrained.act_postprocess1 = nn.Sequential(
+ readout_oper[0],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[0],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ nn.ConvTranspose2d(
+ in_channels=features[0],
+ out_channels=features[0],
+ kernel_size=4,
+ stride=4,
+ padding=0,
+ bias=True,
+ dilation=1,
+ groups=1,
+ ),
+ )
+
+ pretrained.act_postprocess2 = nn.Sequential(
+ readout_oper[1],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[1],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ nn.ConvTranspose2d(
+ in_channels=features[1],
+ out_channels=features[1],
+ kernel_size=2,
+ stride=2,
+ padding=0,
+ bias=True,
+ dilation=1,
+ groups=1,
+ ),
+ )
+ else:
+ pretrained.act_postprocess1 = nn.Sequential(
+ nn.Identity(), nn.Identity(), nn.Identity()
+ )
+ pretrained.act_postprocess2 = nn.Sequential(
+ nn.Identity(), nn.Identity(), nn.Identity()
+ )
+
+ pretrained.act_postprocess3 = nn.Sequential(
+ readout_oper[2],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[2],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ )
+
+ pretrained.act_postprocess4 = nn.Sequential(
+ readout_oper[3],
+ Transpose(1, 2),
+ nn.Unflatten(2, torch.Size([size[0] // 16, size[1] // 16])),
+ nn.Conv2d(
+ in_channels=vit_features,
+ out_channels=features[3],
+ kernel_size=1,
+ stride=1,
+ padding=0,
+ ),
+ nn.Conv2d(
+ in_channels=features[3],
+ out_channels=features[3],
+ kernel_size=3,
+ stride=2,
+ padding=1,
+ ),
+ )
+
+ pretrained.model.start_index = start_index
+ pretrained.model.patch_size = [16, 16]
+
+ # We inject this function into the VisionTransformer instances so that
+ # we can use it with interpolated position embeddings without modifying the library source.
+ pretrained.model.forward_flex = types.MethodType(forward_flex, pretrained.model)
+
+ # We inject this function into the VisionTransformer instances so that
+ # we can use it with interpolated position embeddings without modifying the library source.
+ pretrained.model._resize_pos_embed = types.MethodType(
+ _resize_pos_embed, pretrained.model
+ )
+
+ return pretrained
+
+
+def _make_pretrained_vitb_rn50_384(
+ pretrained, use_readout="ignore", hooks=None, use_vit_only=False
+):
+ model = timm.create_model("vit_base_resnet50_384", pretrained=pretrained)
+
+ hooks = [0, 1, 8, 11] if hooks == None else hooks
+ return _make_vit_b_rn50_backbone(
+ model,
+ features=[256, 512, 768, 768],
+ size=[384, 384],
+ hooks=hooks,
+ use_vit_only=use_vit_only,
+ use_readout=use_readout,
+ )
diff --git a/ldm/modules/midas/utils.py b/ldm/modules/midas/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a9d3b5b66370fa98da9e067ba53ead848ea9a59
--- /dev/null
+++ b/ldm/modules/midas/utils.py
@@ -0,0 +1,189 @@
+"""Utils for monoDepth."""
+import sys
+import re
+import numpy as np
+import cv2
+import torch
+
+
+def read_pfm(path):
+ """Read pfm file.
+
+ Args:
+ path (str): path to file
+
+ Returns:
+ tuple: (data, scale)
+ """
+ with open(path, "rb") as file:
+
+ color = None
+ width = None
+ height = None
+ scale = None
+ endian = None
+
+ header = file.readline().rstrip()
+ if header.decode("ascii") == "PF":
+ color = True
+ elif header.decode("ascii") == "Pf":
+ color = False
+ else:
+ raise Exception("Not a PFM file: " + path)
+
+ dim_match = re.match(r"^(\d+)\s(\d+)\s$", file.readline().decode("ascii"))
+ if dim_match:
+ width, height = list(map(int, dim_match.groups()))
+ else:
+ raise Exception("Malformed PFM header.")
+
+ scale = float(file.readline().decode("ascii").rstrip())
+ if scale < 0:
+ # little-endian
+ endian = "<"
+ scale = -scale
+ else:
+ # big-endian
+ endian = ">"
+
+ data = np.fromfile(file, endian + "f")
+ shape = (height, width, 3) if color else (height, width)
+
+ data = np.reshape(data, shape)
+ data = np.flipud(data)
+
+ return data, scale
+
+
+def write_pfm(path, image, scale=1):
+ """Write pfm file.
+
+ Args:
+ path (str): pathto file
+ image (array): data
+ scale (int, optional): Scale. Defaults to 1.
+ """
+
+ with open(path, "wb") as file:
+ color = None
+
+ if image.dtype.name != "float32":
+ raise Exception("Image dtype must be float32.")
+
+ image = np.flipud(image)
+
+ if len(image.shape) == 3 and image.shape[2] == 3: # color image
+ color = True
+ elif (
+ len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1
+ ): # greyscale
+ color = False
+ else:
+ raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.")
+
+ file.write("PF\n" if color else "Pf\n".encode())
+ file.write("%d %d\n".encode() % (image.shape[1], image.shape[0]))
+
+ endian = image.dtype.byteorder
+
+ if endian == "<" or endian == "=" and sys.byteorder == "little":
+ scale = -scale
+
+ file.write("%f\n".encode() % scale)
+
+ image.tofile(file)
+
+
+def read_image(path):
+ """Read image and output RGB image (0-1).
+
+ Args:
+ path (str): path to file
+
+ Returns:
+ array: RGB image (0-1)
+ """
+ img = cv2.imread(path)
+
+ if img.ndim == 2:
+ img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
+
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) / 255.0
+
+ return img
+
+
+def resize_image(img):
+ """Resize image and make it fit for network.
+
+ Args:
+ img (array): image
+
+ Returns:
+ tensor: data ready for network
+ """
+ height_orig = img.shape[0]
+ width_orig = img.shape[1]
+
+ if width_orig > height_orig:
+ scale = width_orig / 384
+ else:
+ scale = height_orig / 384
+
+ height = (np.ceil(height_orig / scale / 32) * 32).astype(int)
+ width = (np.ceil(width_orig / scale / 32) * 32).astype(int)
+
+ img_resized = cv2.resize(img, (width, height), interpolation=cv2.INTER_AREA)
+
+ img_resized = (
+ torch.from_numpy(np.transpose(img_resized, (2, 0, 1))).contiguous().float()
+ )
+ img_resized = img_resized.unsqueeze(0)
+
+ return img_resized
+
+
+def resize_depth(depth, width, height):
+ """Resize depth map and bring to CPU (numpy).
+
+ Args:
+ depth (tensor): depth
+ width (int): image width
+ height (int): image height
+
+ Returns:
+ array: processed depth
+ """
+ depth = torch.squeeze(depth[0, :, :, :]).to("cpu")
+
+ depth_resized = cv2.resize(
+ depth.numpy(), (width, height), interpolation=cv2.INTER_CUBIC
+ )
+
+ return depth_resized
+
+def write_depth(path, depth, bits=1):
+ """Write depth map to pfm and png file.
+
+ Args:
+ path (str): filepath without extension
+ depth (array): depth
+ """
+ write_pfm(path + ".pfm", depth.astype(np.float32))
+
+ depth_min = depth.min()
+ depth_max = depth.max()
+
+ max_val = (2**(8*bits))-1
+
+ if depth_max - depth_min > np.finfo("float").eps:
+ out = max_val * (depth - depth_min) / (depth_max - depth_min)
+ else:
+ out = np.zeros(depth.shape, dtype=depth.type)
+
+ if bits == 1:
+ cv2.imwrite(path + ".png", out.astype("uint8"))
+ elif bits == 2:
+ cv2.imwrite(path + ".png", out.astype("uint16"))
+
+ return
diff --git a/ldm/modules/x_transformer.py b/ldm/modules/x_transformer.py
new file mode 100644
index 0000000000000000000000000000000000000000..5fc15bf9cfe0111a910e7de33d04ffdec3877576
--- /dev/null
+++ b/ldm/modules/x_transformer.py
@@ -0,0 +1,641 @@
+"""shout-out to https://github.com/lucidrains/x-transformers/tree/main/x_transformers"""
+import torch
+from torch import nn, einsum
+import torch.nn.functional as F
+from functools import partial
+from inspect import isfunction
+from collections import namedtuple
+from einops import rearrange, repeat, reduce
+
+# constants
+
+DEFAULT_DIM_HEAD = 64
+
+Intermediates = namedtuple('Intermediates', [
+ 'pre_softmax_attn',
+ 'post_softmax_attn'
+])
+
+LayerIntermediates = namedtuple('Intermediates', [
+ 'hiddens',
+ 'attn_intermediates'
+])
+
+
+class AbsolutePositionalEmbedding(nn.Module):
+ def __init__(self, dim, max_seq_len):
+ super().__init__()
+ self.emb = nn.Embedding(max_seq_len, dim)
+ self.init_()
+
+ def init_(self):
+ nn.init.normal_(self.emb.weight, std=0.02)
+
+ def forward(self, x):
+ n = torch.arange(x.shape[1], device=x.device)
+ return self.emb(n)[None, :, :]
+
+
+class FixedPositionalEmbedding(nn.Module):
+ def __init__(self, dim):
+ super().__init__()
+ inv_freq = 1. / (10000 ** (torch.arange(0, dim, 2).float() / dim))
+ self.register_buffer('inv_freq', inv_freq)
+
+ def forward(self, x, seq_dim=1, offset=0):
+ t = torch.arange(x.shape[seq_dim], device=x.device).type_as(self.inv_freq) + offset
+ sinusoid_inp = torch.einsum('i , j -> i j', t, self.inv_freq)
+ emb = torch.cat((sinusoid_inp.sin(), sinusoid_inp.cos()), dim=-1)
+ return emb[None, :, :]
+
+
+# helpers
+
+def exists(val):
+ return val is not None
+
+
+def default(val, d):
+ if exists(val):
+ return val
+ return d() if isfunction(d) else d
+
+
+def always(val):
+ def inner(*args, **kwargs):
+ return val
+ return inner
+
+
+def not_equals(val):
+ def inner(x):
+ return x != val
+ return inner
+
+
+def equals(val):
+ def inner(x):
+ return x == val
+ return inner
+
+
+def max_neg_value(tensor):
+ return -torch.finfo(tensor.dtype).max
+
+
+# keyword argument helpers
+
+def pick_and_pop(keys, d):
+ values = list(map(lambda key: d.pop(key), keys))
+ return dict(zip(keys, values))
+
+
+def group_dict_by_key(cond, d):
+ return_val = [dict(), dict()]
+ for key in d.keys():
+ match = bool(cond(key))
+ ind = int(not match)
+ return_val[ind][key] = d[key]
+ return (*return_val,)
+
+
+def string_begins_with(prefix, str):
+ return str.startswith(prefix)
+
+
+def group_by_key_prefix(prefix, d):
+ return group_dict_by_key(partial(string_begins_with, prefix), d)
+
+
+def groupby_prefix_and_trim(prefix, d):
+ kwargs_with_prefix, kwargs = group_dict_by_key(partial(string_begins_with, prefix), d)
+ kwargs_without_prefix = dict(map(lambda x: (x[0][len(prefix):], x[1]), tuple(kwargs_with_prefix.items())))
+ return kwargs_without_prefix, kwargs
+
+
+# classes
+class Scale(nn.Module):
+ def __init__(self, value, fn):
+ super().__init__()
+ self.value = value
+ self.fn = fn
+
+ def forward(self, x, **kwargs):
+ x, *rest = self.fn(x, **kwargs)
+ return (x * self.value, *rest)
+
+
+class Rezero(nn.Module):
+ def __init__(self, fn):
+ super().__init__()
+ self.fn = fn
+ self.g = nn.Parameter(torch.zeros(1))
+
+ def forward(self, x, **kwargs):
+ x, *rest = self.fn(x, **kwargs)
+ return (x * self.g, *rest)
+
+
+class ScaleNorm(nn.Module):
+ def __init__(self, dim, eps=1e-5):
+ super().__init__()
+ self.scale = dim ** -0.5
+ self.eps = eps
+ self.g = nn.Parameter(torch.ones(1))
+
+ def forward(self, x):
+ norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
+ return x / norm.clamp(min=self.eps) * self.g
+
+
+class RMSNorm(nn.Module):
+ def __init__(self, dim, eps=1e-8):
+ super().__init__()
+ self.scale = dim ** -0.5
+ self.eps = eps
+ self.g = nn.Parameter(torch.ones(dim))
+
+ def forward(self, x):
+ norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
+ return x / norm.clamp(min=self.eps) * self.g
+
+
+class Residual(nn.Module):
+ def forward(self, x, residual):
+ return x + residual
+
+
+class GRUGating(nn.Module):
+ def __init__(self, dim):
+ super().__init__()
+ self.gru = nn.GRUCell(dim, dim)
+
+ def forward(self, x, residual):
+ gated_output = self.gru(
+ rearrange(x, 'b n d -> (b n) d'),
+ rearrange(residual, 'b n d -> (b n) d')
+ )
+
+ return gated_output.reshape_as(x)
+
+
+# feedforward
+
+class GEGLU(nn.Module):
+ def __init__(self, dim_in, dim_out):
+ super().__init__()
+ self.proj = nn.Linear(dim_in, dim_out * 2)
+
+ def forward(self, x):
+ x, gate = self.proj(x).chunk(2, dim=-1)
+ return x * F.gelu(gate)
+
+
+class FeedForward(nn.Module):
+ def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):
+ super().__init__()
+ inner_dim = int(dim * mult)
+ dim_out = default(dim_out, dim)
+ project_in = nn.Sequential(
+ nn.Linear(dim, inner_dim),
+ nn.GELU()
+ ) if not glu else GEGLU(dim, inner_dim)
+
+ self.net = nn.Sequential(
+ project_in,
+ nn.Dropout(dropout),
+ nn.Linear(inner_dim, dim_out)
+ )
+
+ def forward(self, x):
+ return self.net(x)
+
+
+# attention.
+class Attention(nn.Module):
+ def __init__(
+ self,
+ dim,
+ dim_head=DEFAULT_DIM_HEAD,
+ heads=8,
+ causal=False,
+ mask=None,
+ talking_heads=False,
+ sparse_topk=None,
+ use_entmax15=False,
+ num_mem_kv=0,
+ dropout=0.,
+ on_attn=False
+ ):
+ super().__init__()
+ if use_entmax15:
+ raise NotImplementedError("Check out entmax activation instead of softmax activation!")
+ self.scale = dim_head ** -0.5
+ self.heads = heads
+ self.causal = causal
+ self.mask = mask
+
+ inner_dim = dim_head * heads
+
+ self.to_q = nn.Linear(dim, inner_dim, bias=False)
+ self.to_k = nn.Linear(dim, inner_dim, bias=False)
+ self.to_v = nn.Linear(dim, inner_dim, bias=False)
+ self.dropout = nn.Dropout(dropout)
+
+ # talking heads
+ self.talking_heads = talking_heads
+ if talking_heads:
+ self.pre_softmax_proj = nn.Parameter(torch.randn(heads, heads))
+ self.post_softmax_proj = nn.Parameter(torch.randn(heads, heads))
+
+ # explicit topk sparse attention
+ self.sparse_topk = sparse_topk
+
+ # entmax
+ #self.attn_fn = entmax15 if use_entmax15 else F.softmax
+ self.attn_fn = F.softmax
+
+ # add memory key / values
+ self.num_mem_kv = num_mem_kv
+ if num_mem_kv > 0:
+ self.mem_k = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head))
+ self.mem_v = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head))
+
+ # attention on attention
+ self.attn_on_attn = on_attn
+ self.to_out = nn.Sequential(nn.Linear(inner_dim, dim * 2), nn.GLU()) if on_attn else nn.Linear(inner_dim, dim)
+
+ def forward(
+ self,
+ x,
+ context=None,
+ mask=None,
+ context_mask=None,
+ rel_pos=None,
+ sinusoidal_emb=None,
+ prev_attn=None,
+ mem=None
+ ):
+ b, n, _, h, talking_heads, device = *x.shape, self.heads, self.talking_heads, x.device
+ kv_input = default(context, x)
+
+ q_input = x
+ k_input = kv_input
+ v_input = kv_input
+
+ if exists(mem):
+ k_input = torch.cat((mem, k_input), dim=-2)
+ v_input = torch.cat((mem, v_input), dim=-2)
+
+ if exists(sinusoidal_emb):
+ # in shortformer, the query would start at a position offset depending on the past cached memory
+ offset = k_input.shape[-2] - q_input.shape[-2]
+ q_input = q_input + sinusoidal_emb(q_input, offset=offset)
+ k_input = k_input + sinusoidal_emb(k_input)
+
+ q = self.to_q(q_input)
+ k = self.to_k(k_input)
+ v = self.to_v(v_input)
+
+ q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h=h), (q, k, v))
+
+ input_mask = None
+ if any(map(exists, (mask, context_mask))):
+ q_mask = default(mask, lambda: torch.ones((b, n), device=device).bool())
+ k_mask = q_mask if not exists(context) else context_mask
+ k_mask = default(k_mask, lambda: torch.ones((b, k.shape[-2]), device=device).bool())
+ q_mask = rearrange(q_mask, 'b i -> b () i ()')
+ k_mask = rearrange(k_mask, 'b j -> b () () j')
+ input_mask = q_mask * k_mask
+
+ if self.num_mem_kv > 0:
+ mem_k, mem_v = map(lambda t: repeat(t, 'h n d -> b h n d', b=b), (self.mem_k, self.mem_v))
+ k = torch.cat((mem_k, k), dim=-2)
+ v = torch.cat((mem_v, v), dim=-2)
+ if exists(input_mask):
+ input_mask = F.pad(input_mask, (self.num_mem_kv, 0), value=True)
+
+ dots = einsum('b h i d, b h j d -> b h i j', q, k) * self.scale
+ mask_value = max_neg_value(dots)
+
+ if exists(prev_attn):
+ dots = dots + prev_attn
+
+ pre_softmax_attn = dots
+
+ if talking_heads:
+ dots = einsum('b h i j, h k -> b k i j', dots, self.pre_softmax_proj).contiguous()
+
+ if exists(rel_pos):
+ dots = rel_pos(dots)
+
+ if exists(input_mask):
+ dots.masked_fill_(~input_mask, mask_value)
+ del input_mask
+
+ if self.causal:
+ i, j = dots.shape[-2:]
+ r = torch.arange(i, device=device)
+ mask = rearrange(r, 'i -> () () i ()') < rearrange(r, 'j -> () () () j')
+ mask = F.pad(mask, (j - i, 0), value=False)
+ dots.masked_fill_(mask, mask_value)
+ del mask
+
+ if exists(self.sparse_topk) and self.sparse_topk < dots.shape[-1]:
+ top, _ = dots.topk(self.sparse_topk, dim=-1)
+ vk = top[..., -1].unsqueeze(-1).expand_as(dots)
+ mask = dots < vk
+ dots.masked_fill_(mask, mask_value)
+ del mask
+
+ attn = self.attn_fn(dots, dim=-1)
+ post_softmax_attn = attn
+
+ attn = self.dropout(attn)
+
+ if talking_heads:
+ attn = einsum('b h i j, h k -> b k i j', attn, self.post_softmax_proj).contiguous()
+
+ out = einsum('b h i j, b h j d -> b h i d', attn, v)
+ out = rearrange(out, 'b h n d -> b n (h d)')
+
+ intermediates = Intermediates(
+ pre_softmax_attn=pre_softmax_attn,
+ post_softmax_attn=post_softmax_attn
+ )
+
+ return self.to_out(out), intermediates
+
+
+class AttentionLayers(nn.Module):
+ def __init__(
+ self,
+ dim,
+ depth,
+ heads=8,
+ causal=False,
+ cross_attend=False,
+ only_cross=False,
+ use_scalenorm=False,
+ use_rmsnorm=False,
+ use_rezero=False,
+ rel_pos_num_buckets=32,
+ rel_pos_max_distance=128,
+ position_infused_attn=False,
+ custom_layers=None,
+ sandwich_coef=None,
+ par_ratio=None,
+ residual_attn=False,
+ cross_residual_attn=False,
+ macaron=False,
+ pre_norm=True,
+ gate_residual=False,
+ **kwargs
+ ):
+ super().__init__()
+ ff_kwargs, kwargs = groupby_prefix_and_trim('ff_', kwargs)
+ attn_kwargs, _ = groupby_prefix_and_trim('attn_', kwargs)
+
+ dim_head = attn_kwargs.get('dim_head', DEFAULT_DIM_HEAD)
+
+ self.dim = dim
+ self.depth = depth
+ self.layers = nn.ModuleList([])
+
+ self.has_pos_emb = position_infused_attn
+ self.pia_pos_emb = FixedPositionalEmbedding(dim) if position_infused_attn else None
+ self.rotary_pos_emb = always(None)
+
+ assert rel_pos_num_buckets <= rel_pos_max_distance, 'number of relative position buckets must be less than the relative position max distance'
+ self.rel_pos = None
+
+ self.pre_norm = pre_norm
+
+ self.residual_attn = residual_attn
+ self.cross_residual_attn = cross_residual_attn
+
+ norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm
+ norm_class = RMSNorm if use_rmsnorm else norm_class
+ norm_fn = partial(norm_class, dim)
+
+ norm_fn = nn.Identity if use_rezero else norm_fn
+ branch_fn = Rezero if use_rezero else None
+
+ if cross_attend and not only_cross:
+ default_block = ('a', 'c', 'f')
+ elif cross_attend and only_cross:
+ default_block = ('c', 'f')
+ else:
+ default_block = ('a', 'f')
+
+ if macaron:
+ default_block = ('f',) + default_block
+
+ if exists(custom_layers):
+ layer_types = custom_layers
+ elif exists(par_ratio):
+ par_depth = depth * len(default_block)
+ assert 1 < par_ratio <= par_depth, 'par ratio out of range'
+ default_block = tuple(filter(not_equals('f'), default_block))
+ par_attn = par_depth // par_ratio
+ depth_cut = par_depth * 2 // 3 # 2 / 3 attention layer cutoff suggested by PAR paper
+ par_width = (depth_cut + depth_cut // par_attn) // par_attn
+ assert len(default_block) <= par_width, 'default block is too large for par_ratio'
+ par_block = default_block + ('f',) * (par_width - len(default_block))
+ par_head = par_block * par_attn
+ layer_types = par_head + ('f',) * (par_depth - len(par_head))
+ elif exists(sandwich_coef):
+ assert sandwich_coef > 0 and sandwich_coef <= depth, 'sandwich coefficient should be less than the depth'
+ layer_types = ('a',) * sandwich_coef + default_block * (depth - sandwich_coef) + ('f',) * sandwich_coef
+ else:
+ layer_types = default_block * depth
+
+ self.layer_types = layer_types
+ self.num_attn_layers = len(list(filter(equals('a'), layer_types)))
+
+ for layer_type in self.layer_types:
+ if layer_type == 'a':
+ layer = Attention(dim, heads=heads, causal=causal, **attn_kwargs)
+ elif layer_type == 'c':
+ layer = Attention(dim, heads=heads, **attn_kwargs)
+ elif layer_type == 'f':
+ layer = FeedForward(dim, **ff_kwargs)
+ layer = layer if not macaron else Scale(0.5, layer)
+ else:
+ raise Exception(f'invalid layer type {layer_type}')
+
+ if isinstance(layer, Attention) and exists(branch_fn):
+ layer = branch_fn(layer)
+
+ if gate_residual:
+ residual_fn = GRUGating(dim)
+ else:
+ residual_fn = Residual()
+
+ self.layers.append(nn.ModuleList([
+ norm_fn(),
+ layer,
+ residual_fn
+ ]))
+
+ def forward(
+ self,
+ x,
+ context=None,
+ mask=None,
+ context_mask=None,
+ mems=None,
+ return_hiddens=False
+ ):
+ hiddens = []
+ intermediates = []
+ prev_attn = None
+ prev_cross_attn = None
+
+ mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers
+
+ for ind, (layer_type, (norm, block, residual_fn)) in enumerate(zip(self.layer_types, self.layers)):
+ is_last = ind == (len(self.layers) - 1)
+
+ if layer_type == 'a':
+ hiddens.append(x)
+ layer_mem = mems.pop(0)
+
+ residual = x
+
+ if self.pre_norm:
+ x = norm(x)
+
+ if layer_type == 'a':
+ out, inter = block(x, mask=mask, sinusoidal_emb=self.pia_pos_emb, rel_pos=self.rel_pos,
+ prev_attn=prev_attn, mem=layer_mem)
+ elif layer_type == 'c':
+ out, inter = block(x, context=context, mask=mask, context_mask=context_mask, prev_attn=prev_cross_attn)
+ elif layer_type == 'f':
+ out = block(x)
+
+ x = residual_fn(out, residual)
+
+ if layer_type in ('a', 'c'):
+ intermediates.append(inter)
+
+ if layer_type == 'a' and self.residual_attn:
+ prev_attn = inter.pre_softmax_attn
+ elif layer_type == 'c' and self.cross_residual_attn:
+ prev_cross_attn = inter.pre_softmax_attn
+
+ if not self.pre_norm and not is_last:
+ x = norm(x)
+
+ if return_hiddens:
+ intermediates = LayerIntermediates(
+ hiddens=hiddens,
+ attn_intermediates=intermediates
+ )
+
+ return x, intermediates
+
+ return x
+
+
+class Encoder(AttentionLayers):
+ def __init__(self, **kwargs):
+ assert 'causal' not in kwargs, 'cannot set causality on encoder'
+ super().__init__(causal=False, **kwargs)
+
+
+
+class TransformerWrapper(nn.Module):
+ def __init__(
+ self,
+ *,
+ num_tokens,
+ max_seq_len,
+ attn_layers,
+ emb_dim=None,
+ max_mem_len=0.,
+ emb_dropout=0.,
+ num_memory_tokens=None,
+ tie_embedding=False,
+ use_pos_emb=True
+ ):
+ super().__init__()
+ assert isinstance(attn_layers, AttentionLayers), 'attention layers must be one of Encoder or Decoder'
+
+ dim = attn_layers.dim
+ emb_dim = default(emb_dim, dim)
+
+ self.max_seq_len = max_seq_len
+ self.max_mem_len = max_mem_len
+ self.num_tokens = num_tokens
+
+ self.token_emb = nn.Embedding(num_tokens, emb_dim)
+ self.pos_emb = AbsolutePositionalEmbedding(emb_dim, max_seq_len) if (
+ use_pos_emb and not attn_layers.has_pos_emb) else always(0)
+ self.emb_dropout = nn.Dropout(emb_dropout)
+
+ self.project_emb = nn.Linear(emb_dim, dim) if emb_dim != dim else nn.Identity()
+ self.attn_layers = attn_layers
+ self.norm = nn.LayerNorm(dim)
+
+ self.init_()
+
+ self.to_logits = nn.Linear(dim, num_tokens) if not tie_embedding else lambda t: t @ self.token_emb.weight.t()
+
+ # memory tokens (like [cls]) from Memory Transformers paper
+ num_memory_tokens = default(num_memory_tokens, 0)
+ self.num_memory_tokens = num_memory_tokens
+ if num_memory_tokens > 0:
+ self.memory_tokens = nn.Parameter(torch.randn(num_memory_tokens, dim))
+
+ # let funnel encoder know number of memory tokens, if specified
+ if hasattr(attn_layers, 'num_memory_tokens'):
+ attn_layers.num_memory_tokens = num_memory_tokens
+
+ def init_(self):
+ nn.init.normal_(self.token_emb.weight, std=0.02)
+
+ def forward(
+ self,
+ x,
+ return_embeddings=False,
+ mask=None,
+ return_mems=False,
+ return_attn=False,
+ mems=None,
+ **kwargs
+ ):
+ b, n, device, num_mem = *x.shape, x.device, self.num_memory_tokens
+ x = self.token_emb(x)
+ x += self.pos_emb(x)
+ x = self.emb_dropout(x)
+
+ x = self.project_emb(x)
+
+ if num_mem > 0:
+ mem = repeat(self.memory_tokens, 'n d -> b n d', b=b)
+ x = torch.cat((mem, x), dim=1)
+
+ # auto-handle masking after appending memory tokens
+ if exists(mask):
+ mask = F.pad(mask, (num_mem, 0), value=True)
+
+ x, intermediates = self.attn_layers(x, mask=mask, mems=mems, return_hiddens=True, **kwargs)
+ x = self.norm(x)
+
+ mem, x = x[:, :num_mem], x[:, num_mem:]
+
+ out = self.to_logits(x) if not return_embeddings else x
+
+ if return_mems:
+ hiddens = intermediates.hiddens
+ new_mems = list(map(lambda pair: torch.cat(pair, dim=-2), zip(mems, hiddens))) if exists(mems) else hiddens
+ new_mems = list(map(lambda t: t[..., -self.max_mem_len:, :].detach(), new_mems))
+ return out, new_mems
+
+ if return_attn:
+ attn_maps = list(map(lambda t: t.post_softmax_attn, intermediates.attn_intermediates))
+ return out, attn_maps
+
+ return out
+
diff --git a/ldm/test_sd_decoder.py b/ldm/test_sd_decoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ff4485b0bd1b0242ea3fc0952f09aeeb9b26eff
--- /dev/null
+++ b/ldm/test_sd_decoder.py
@@ -0,0 +1,29 @@
+import numpy as np
+import torch
+
+from modules.diffusionmodules.model import SimpleDecoder, Decoder
+
+# decoder = SimpleDecoder(4,32)
+# https://github.com/CompVis/stable-diffusion/blob/main/configs/autoencoder/autoencoder_kl_32x32x4.yaml
+decoder = Decoder(
+ ch=64,
+ out_ch=32,
+ ch_mult=(1, 2),
+ num_res_blocks=2,
+ # num_res_blocks=1,
+ dropout=0.0,
+ attn_resolutions=(),
+ z_channels=4,
+ resolution=64,
+ in_channels=3,
+).cuda()
+
+input_tensor = torch.randn(1,4,32,32,).cuda()
+# input_tensor = torch.randn(
+# 1,
+# 96,
+# 32,
+# 32,
+# )
+
+print(decoder(input_tensor).shape)
diff --git a/ldm/util.py b/ldm/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..66d9b2a69db2898323cbf2ad26a09ac8b2facd11
--- /dev/null
+++ b/ldm/util.py
@@ -0,0 +1,275 @@
+import functools
+import importlib
+import os
+from functools import partial
+from inspect import isfunction
+
+import fsspec
+import numpy as np
+import torch
+from PIL import Image, ImageDraw, ImageFont
+from safetensors.torch import load_file as load_safetensors
+
+
+def disabled_train(self, mode=True):
+ """Overwrite model.train with this function to make sure train/eval mode
+ does not change anymore."""
+ return self
+
+
+def get_string_from_tuple(s):
+ try:
+ # Check if the string starts and ends with parentheses
+ if s[0] == "(" and s[-1] == ")":
+ # Convert the string to a tuple
+ t = eval(s)
+ # Check if the type of t is tuple
+ if type(t) == tuple:
+ return t[0]
+ else:
+ pass
+ except:
+ pass
+ return s
+
+
+def is_power_of_two(n):
+ """
+ chat.openai.com/chat
+ Return True if n is a power of 2, otherwise return False.
+
+ The function is_power_of_two takes an integer n as input and returns True if n is a power of 2, otherwise it returns False.
+ The function works by first checking if n is less than or equal to 0. If n is less than or equal to 0, it can't be a power of 2, so the function returns False.
+ If n is greater than 0, the function checks whether n is a power of 2 by using a bitwise AND operation between n and n-1. If n is a power of 2, then it will have only one bit set to 1 in its binary representation. When we subtract 1 from a power of 2, all the bits to the right of that bit become 1, and the bit itself becomes 0. So, when we perform a bitwise AND between n and n-1, we get 0 if n is a power of 2, and a non-zero value otherwise.
+ Thus, if the result of the bitwise AND operation is 0, then n is a power of 2 and the function returns True. Otherwise, the function returns False.
+
+ """
+ if n <= 0:
+ return False
+ return (n & (n - 1)) == 0
+
+
+def autocast(f, enabled=True):
+ def do_autocast(*args, **kwargs):
+ with torch.cuda.amp.autocast(
+ enabled=enabled,
+ dtype=torch.get_autocast_gpu_dtype(),
+ cache_enabled=torch.is_autocast_cache_enabled(),
+ ):
+ return f(*args, **kwargs)
+
+ return do_autocast
+
+
+def load_partial_from_config(config):
+ return partial(get_obj_from_str(config["target"]), **config.get("params", dict()))
+
+
+def log_txt_as_img(wh, xc, size=10):
+ # wh a tuple of (width, height)
+ # xc a list of captions to plot
+ b = len(xc)
+ txts = list()
+ for bi in range(b):
+ txt = Image.new("RGB", wh, color="white")
+ draw = ImageDraw.Draw(txt)
+ font = ImageFont.truetype("data/DejaVuSans.ttf", size=size)
+ nc = int(40 * (wh[0] / 256))
+ if isinstance(xc[bi], list):
+ text_seq = xc[bi][0]
+ else:
+ text_seq = xc[bi]
+ lines = "\n".join(
+ text_seq[start : start + nc] for start in range(0, len(text_seq), nc)
+ )
+
+ try:
+ draw.text((0, 0), lines, fill="black", font=font)
+ except UnicodeEncodeError:
+ print("Cant encode string for logging. Skipping.")
+
+ txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0
+ txts.append(txt)
+ txts = np.stack(txts)
+ txts = torch.tensor(txts)
+ return txts
+
+
+def partialclass(cls, *args, **kwargs):
+ class NewCls(cls):
+ __init__ = functools.partialmethod(cls.__init__, *args, **kwargs)
+
+ return NewCls
+
+
+def make_path_absolute(path):
+ fs, p = fsspec.core.url_to_fs(path)
+ if fs.protocol == "file":
+ return os.path.abspath(p)
+ return path
+
+
+def ismap(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+ return (len(x.shape) == 4) and (x.shape[1] > 3)
+
+
+def isimage(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+ return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1)
+
+
+def isheatmap(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+
+ return x.ndim == 2
+
+
+def isneighbors(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+ return x.ndim == 5 and (x.shape[2] == 3 or x.shape[2] == 1)
+
+
+def exists(x):
+ return x is not None
+
+
+def expand_dims_like(x, y):
+ while x.dim() != y.dim():
+ x = x.unsqueeze(-1)
+ return x
+
+
+def default(val, d):
+ if exists(val):
+ return val
+ return d() if isfunction(d) else d
+
+
+def mean_flat(tensor):
+ """
+ https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86
+ Take the mean over all non-batch dimensions.
+ """
+ return tensor.mean(dim=list(range(1, len(tensor.shape))))
+
+
+def count_params(model, verbose=False):
+ total_params = sum(p.numel() for p in model.parameters())
+ if verbose:
+ print(f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.")
+ return total_params
+
+
+def instantiate_from_config(config):
+ if not "target" in config:
+ if config == "__is_first_stage__":
+ return None
+ elif config == "__is_unconditional__":
+ return None
+ raise KeyError("Expected key `target` to instantiate.")
+ return get_obj_from_str(config["target"])(**config.get("params", dict()))
+
+
+def get_obj_from_str(string, reload=False, invalidate_cache=True):
+ module, cls = string.rsplit(".", 1)
+ if invalidate_cache:
+ importlib.invalidate_caches()
+ if reload:
+ module_imp = importlib.import_module(module)
+ importlib.reload(module_imp)
+ return getattr(importlib.import_module(module, package=None), cls)
+
+
+def append_zero(x):
+ return torch.cat([x, x.new_zeros([1])])
+
+
+def append_dims(x, target_dims):
+ """Appends dimensions to the end of a tensor until it has target_dims dimensions."""
+ dims_to_append = target_dims - x.ndim
+ if dims_to_append < 0:
+ raise ValueError(
+ f"input has {x.ndim} dims but target_dims is {target_dims}, which is less"
+ )
+ return x[(...,) + (None,) * dims_to_append]
+
+
+def load_model_from_config(config, ckpt, verbose=True, freeze=True):
+ print(f"Loading model from {ckpt}")
+ if ckpt.endswith("ckpt"):
+ pl_sd = torch.load(ckpt, map_location="cpu")
+ if "global_step" in pl_sd:
+ print(f"Global Step: {pl_sd['global_step']}")
+ sd = pl_sd["state_dict"]
+ elif ckpt.endswith("safetensors"):
+ sd = load_safetensors(ckpt)
+ else:
+ raise NotImplementedError
+
+ model = instantiate_from_config(config.model)
+
+ m, u = model.load_state_dict(sd, strict=False)
+
+ if len(m) > 0 and verbose:
+ print("missing keys:")
+ print(m)
+ if len(u) > 0 and verbose:
+ print("unexpected keys:")
+ print(u)
+
+ if freeze:
+ for param in model.parameters():
+ param.requires_grad = False
+
+ model.eval()
+ return model
+
+
+def get_configs_path() -> str:
+ """
+ Get the `configs` directory.
+ For a working copy, this is the one in the root of the repository,
+ but for an installed copy, it's in the `sgm` package (see pyproject.toml).
+ """
+ this_dir = os.path.dirname(__file__)
+ candidates = (
+ os.path.join(this_dir, "configs"),
+ os.path.join(this_dir, "..", "configs"),
+ )
+ for candidate in candidates:
+ candidate = os.path.abspath(candidate)
+ if os.path.isdir(candidate):
+ return candidate
+ raise FileNotFoundError(f"Could not find SGM configs in {candidates}")
+
+
+def get_nested_attribute(obj, attribute_path, depth=None, return_key=False):
+ """
+ Will return the result of a recursive get attribute call.
+ E.g.:
+ a.b.c
+ = getattr(getattr(a, "b"), "c")
+ = get_nested_attribute(a, "b.c")
+ If any part of the attribute call is an integer x with current obj a, will
+ try to call a[x] instead of a.x first.
+ """
+ attributes = attribute_path.split(".")
+ if depth is not None and depth > 0:
+ attributes = attributes[:depth]
+ assert len(attributes) > 0, "At least one attribute should be selected"
+ current_attribute = obj
+ current_key = None
+ for level, attribute in enumerate(attributes):
+ current_key = ".".join(attributes[: level + 1])
+ try:
+ id_ = int(attribute)
+ current_attribute = current_attribute[id_]
+ except ValueError:
+ current_attribute = getattr(current_attribute, attribute)
+
+ return (current_attribute, current_key) if return_key else current_attribute
diff --git a/nsr/__init__.py b/nsr/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3f5ce82485e0aee06a09d3bbd1f40e5b28fa5af
--- /dev/null
+++ b/nsr/__init__.py
@@ -0,0 +1,16 @@
+# triplane, tensorF etc.
+from .train_util import TrainLoop3DRec, TrainLoop3DRecTrajVis
+# from .train_util_cvD import TrainLoop3DcvD
+
+# from .cvD.nvsD import TrainLoop3DcvD_nvsD
+# from .cvD.nvsD_nosr import TrainLoop3DcvD_nvsD_noSR
+# from .cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD, TrainLoop3DcvD_nvsD_canoD_eg3d
+# from .cvD.nvsD_canoD_mask import TrainLoop3DcvD_nvsD_canoD_canomask
+# from .cvD.canoD import TrainLoop3DcvD_canoD
+# from .cvD.nvsD_canoD_multiview import TrainLoop3DcvD_nvsD_canoD_multiview
+
+# * difffusion trainer
+from .train_util_diffusion import TrainLoop3DDiffusion
+from .train_util_diffusion_dit import TrainLoop3DDiffusionDiT
+
+from .lsgm import *
\ No newline at end of file
diff --git a/nsr/__pycache__/__init__.cpython-310.pyc b/nsr/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e1b1045d71d20fa96e6dd96da288dd3d2fefeffd
Binary files /dev/null and b/nsr/__pycache__/__init__.cpython-310.pyc differ
diff --git a/nsr/__pycache__/__init__.cpython-39.pyc b/nsr/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6576ddb608daeddf2ab8b9f11ec32b5f95425809
Binary files /dev/null and b/nsr/__pycache__/__init__.cpython-39.pyc differ
diff --git a/nsr/__pycache__/camera_utils.cpython-39.pyc b/nsr/__pycache__/camera_utils.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3062b57a3d832cc696a7ae4f7fe697ecc9de165b
Binary files /dev/null and b/nsr/__pycache__/camera_utils.cpython-39.pyc differ
diff --git a/nsr/__pycache__/common_blks.cpython-39.pyc b/nsr/__pycache__/common_blks.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c0a6c59e0fbb961ebb20a41385110131fd2b2632
Binary files /dev/null and b/nsr/__pycache__/common_blks.cpython-39.pyc differ
diff --git a/nsr/__pycache__/confnet.cpython-39.pyc b/nsr/__pycache__/confnet.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9988f69dafb469e84b09f884276289d0646c3100
Binary files /dev/null and b/nsr/__pycache__/confnet.cpython-39.pyc differ
diff --git a/nsr/__pycache__/dual_discriminator.cpython-39.pyc b/nsr/__pycache__/dual_discriminator.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2695ca73648724b80f75d3945a06c710d686a710
Binary files /dev/null and b/nsr/__pycache__/dual_discriminator.cpython-39.pyc differ
diff --git a/nsr/__pycache__/gs.cpython-310.pyc b/nsr/__pycache__/gs.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..45b51c54b49b39a2e47ad6c2290785b8fd36e73a
Binary files /dev/null and b/nsr/__pycache__/gs.cpython-310.pyc differ
diff --git a/nsr/__pycache__/gs.cpython-39.pyc b/nsr/__pycache__/gs.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a2acc4d004e8c3e8e56c24b1d9000c26310bf90d
Binary files /dev/null and b/nsr/__pycache__/gs.cpython-39.pyc differ
diff --git a/nsr/__pycache__/gs_surfel.cpython-39.pyc b/nsr/__pycache__/gs_surfel.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f5cfa65a73c662dbd89d73cf7665a9bcfcd4679
Binary files /dev/null and b/nsr/__pycache__/gs_surfel.cpython-39.pyc differ
diff --git a/nsr/__pycache__/networks_stylegan2.cpython-39.pyc b/nsr/__pycache__/networks_stylegan2.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc9f3e0b3720ac35b3b5310c1d57b56b864129b1
Binary files /dev/null and b/nsr/__pycache__/networks_stylegan2.cpython-39.pyc differ
diff --git a/nsr/__pycache__/script_util.cpython-39.pyc b/nsr/__pycache__/script_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2d97e4853378a34a007a21c92f9f3f062b053b42
Binary files /dev/null and b/nsr/__pycache__/script_util.cpython-39.pyc differ
diff --git a/nsr/__pycache__/superresolution.cpython-39.pyc b/nsr/__pycache__/superresolution.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..619b316e35aa81586f83a1293ea7f010ad7c35d8
Binary files /dev/null and b/nsr/__pycache__/superresolution.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_nv_util.cpython-39.pyc b/nsr/__pycache__/train_nv_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e89121d02b90436f1afb3b0d9635992d73b2dd7b
Binary files /dev/null and b/nsr/__pycache__/train_nv_util.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util.cpython-310.pyc b/nsr/__pycache__/train_util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3e4d3fbebdc586534a41cbc25493e07078f02557
Binary files /dev/null and b/nsr/__pycache__/train_util.cpython-310.pyc differ
diff --git a/nsr/__pycache__/train_util.cpython-39.pyc b/nsr/__pycache__/train_util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ecf5c236d4fecefd6da45ab47d6917c39e5a3785
Binary files /dev/null and b/nsr/__pycache__/train_util.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_cvD.cpython-39.pyc b/nsr/__pycache__/train_util_cvD.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d39b3e139e30325d5afbdbdaf8980bb3b377560b
Binary files /dev/null and b/nsr/__pycache__/train_util_cvD.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_diffusion.cpython-39.pyc b/nsr/__pycache__/train_util_diffusion.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4d5992de37201c45f5195278072e6584aefbb46c
Binary files /dev/null and b/nsr/__pycache__/train_util_diffusion.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_diffusion_dit.cpython-39.pyc b/nsr/__pycache__/train_util_diffusion_dit.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6a1bef1737162d7ab7c192171ffda908ef1792b0
Binary files /dev/null and b/nsr/__pycache__/train_util_diffusion_dit.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_diffusion_single_stage.cpython-39.pyc b/nsr/__pycache__/train_util_diffusion_single_stage.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dad86cdc2ccaf5275f5a0a37086dc5f9a35740e9
Binary files /dev/null and b/nsr/__pycache__/train_util_diffusion_single_stage.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_diffusion_single_stage_sds.cpython-39.pyc b/nsr/__pycache__/train_util_diffusion_single_stage_sds.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b895c8001602c0863e568ad6c5e015e4845bcdc7
Binary files /dev/null and b/nsr/__pycache__/train_util_diffusion_single_stage_sds.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_with_eg3d.cpython-39.pyc b/nsr/__pycache__/train_util_with_eg3d.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..681e399518fe4438357ac4a62288e0161012526a
Binary files /dev/null and b/nsr/__pycache__/train_util_with_eg3d.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_with_eg3d_hybrid.cpython-39.pyc b/nsr/__pycache__/train_util_with_eg3d_hybrid.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..efb7c7ef67c2c9479388f0b0a8e2fe29f4c65772
Binary files /dev/null and b/nsr/__pycache__/train_util_with_eg3d_hybrid.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_with_eg3d_hybrid_eg3dD.cpython-39.pyc b/nsr/__pycache__/train_util_with_eg3d_hybrid_eg3dD.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7ff24dcf786694ed521367d3f386cd17d4cbd6bf
Binary files /dev/null and b/nsr/__pycache__/train_util_with_eg3d_hybrid_eg3dD.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_with_eg3d_real.cpython-39.pyc b/nsr/__pycache__/train_util_with_eg3d_real.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ebdfe71dabe280d4128cb1570078a1af6fa49645
Binary files /dev/null and b/nsr/__pycache__/train_util_with_eg3d_real.cpython-39.pyc differ
diff --git a/nsr/__pycache__/train_util_with_eg3d_real_D.cpython-39.pyc b/nsr/__pycache__/train_util_with_eg3d_real_D.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c859cdc1ec2ecf255caba0ae3dc2b5d27c98281a
Binary files /dev/null and b/nsr/__pycache__/train_util_with_eg3d_real_D.cpython-39.pyc differ
diff --git a/nsr/__pycache__/triplane.cpython-39.pyc b/nsr/__pycache__/triplane.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6574df2e1b05945889a15b367d5dd60130af6b18
Binary files /dev/null and b/nsr/__pycache__/triplane.cpython-39.pyc differ
diff --git a/nsr/augment.py b/nsr/augment.py
new file mode 100755
index 0000000000000000000000000000000000000000..3efbf1270a94f08413075c986deeb1570a80f543
--- /dev/null
+++ b/nsr/augment.py
@@ -0,0 +1,431 @@
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# NVIDIA CORPORATION and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION is strictly prohibited.
+
+import numpy as np
+import scipy.signal
+import torch
+from torch_utils import persistence
+from torch_utils import misc
+from torch_utils.ops import upfirdn2d
+from torch_utils.ops import grid_sample_gradfix
+from torch_utils.ops import conv2d_gradfix
+
+#----------------------------------------------------------------------------
+# Coefficients of various wavelet decomposition low-pass filters.
+
+wavelets = {
+ 'haar': [0.7071067811865476, 0.7071067811865476],
+ 'db1': [0.7071067811865476, 0.7071067811865476],
+ 'db2': [-0.12940952255092145, 0.22414386804185735, 0.836516303737469, 0.48296291314469025],
+ 'db3': [0.035226291882100656, -0.08544127388224149, -0.13501102001039084, 0.4598775021193313, 0.8068915093133388, 0.3326705529509569],
+ 'db4': [-0.010597401784997278, 0.032883011666982945, 0.030841381835986965, -0.18703481171888114, -0.02798376941698385, 0.6308807679295904, 0.7148465705525415, 0.23037781330885523],
+ 'db5': [0.003335725285001549, -0.012580751999015526, -0.006241490213011705, 0.07757149384006515, -0.03224486958502952, -0.24229488706619015, 0.13842814590110342, 0.7243085284385744, 0.6038292697974729, 0.160102397974125],
+ 'db6': [-0.00107730108499558, 0.004777257511010651, 0.0005538422009938016, -0.031582039318031156, 0.02752286553001629, 0.09750160558707936, -0.12976686756709563, -0.22626469396516913, 0.3152503517092432, 0.7511339080215775, 0.4946238903983854, 0.11154074335008017],
+ 'db7': [0.0003537138000010399, -0.0018016407039998328, 0.00042957797300470274, 0.012550998556013784, -0.01657454163101562, -0.03802993693503463, 0.0806126091510659, 0.07130921926705004, -0.22403618499416572, -0.14390600392910627, 0.4697822874053586, 0.7291320908465551, 0.39653931948230575, 0.07785205408506236],
+ 'db8': [-0.00011747678400228192, 0.0006754494059985568, -0.0003917403729959771, -0.00487035299301066, 0.008746094047015655, 0.013981027917015516, -0.04408825393106472, -0.01736930100202211, 0.128747426620186, 0.00047248457399797254, -0.2840155429624281, -0.015829105256023893, 0.5853546836548691, 0.6756307362980128, 0.3128715909144659, 0.05441584224308161],
+ 'sym2': [-0.12940952255092145, 0.22414386804185735, 0.836516303737469, 0.48296291314469025],
+ 'sym3': [0.035226291882100656, -0.08544127388224149, -0.13501102001039084, 0.4598775021193313, 0.8068915093133388, 0.3326705529509569],
+ 'sym4': [-0.07576571478927333, -0.02963552764599851, 0.49761866763201545, 0.8037387518059161, 0.29785779560527736, -0.09921954357684722, -0.012603967262037833, 0.0322231006040427],
+ 'sym5': [0.027333068345077982, 0.029519490925774643, -0.039134249302383094, 0.1993975339773936, 0.7234076904024206, 0.6339789634582119, 0.01660210576452232, -0.17532808990845047, -0.021101834024758855, 0.019538882735286728],
+ 'sym6': [0.015404109327027373, 0.0034907120842174702, -0.11799011114819057, -0.048311742585633, 0.4910559419267466, 0.787641141030194, 0.3379294217276218, -0.07263752278646252, -0.021060292512300564, 0.04472490177066578, 0.0017677118642428036, -0.007800708325034148],
+ 'sym7': [0.002681814568257878, -0.0010473848886829163, -0.01263630340325193, 0.03051551316596357, 0.0678926935013727, -0.049552834937127255, 0.017441255086855827, 0.5361019170917628, 0.767764317003164, 0.2886296317515146, -0.14004724044296152, -0.10780823770381774, 0.004010244871533663, 0.010268176708511255],
+ 'sym8': [-0.0033824159510061256, -0.0005421323317911481, 0.03169508781149298, 0.007607487324917605, -0.1432942383508097, -0.061273359067658524, 0.4813596512583722, 0.7771857517005235, 0.3644418948353314, -0.05194583810770904, -0.027219029917056003, 0.049137179673607506, 0.003808752013890615, -0.01495225833704823, -0.0003029205147213668, 0.0018899503327594609],
+}
+
+#----------------------------------------------------------------------------
+# Helpers for constructing transformation matrices.
+
+def matrix(*rows, device=None):
+ assert all(len(row) == len(rows[0]) for row in rows)
+ elems = [x for row in rows for x in row]
+ ref = [x for x in elems if isinstance(x, torch.Tensor)]
+ if len(ref) == 0:
+ return misc.constant(np.asarray(rows), device=device)
+ assert device is None or device == ref[0].device
+ elems = [x if isinstance(x, torch.Tensor) else misc.constant(x, shape=ref[0].shape, device=ref[0].device) for x in elems]
+ return torch.stack(elems, dim=-1).reshape(ref[0].shape + (len(rows), -1))
+
+def translate2d(tx, ty, **kwargs):
+ return matrix(
+ [1, 0, tx],
+ [0, 1, ty],
+ [0, 0, 1],
+ **kwargs)
+
+def translate3d(tx, ty, tz, **kwargs):
+ return matrix(
+ [1, 0, 0, tx],
+ [0, 1, 0, ty],
+ [0, 0, 1, tz],
+ [0, 0, 0, 1],
+ **kwargs)
+
+def scale2d(sx, sy, **kwargs):
+ return matrix(
+ [sx, 0, 0],
+ [0, sy, 0],
+ [0, 0, 1],
+ **kwargs)
+
+def scale3d(sx, sy, sz, **kwargs):
+ return matrix(
+ [sx, 0, 0, 0],
+ [0, sy, 0, 0],
+ [0, 0, sz, 0],
+ [0, 0, 0, 1],
+ **kwargs)
+
+def rotate2d(theta, **kwargs):
+ return matrix(
+ [torch.cos(theta), torch.sin(-theta), 0],
+ [torch.sin(theta), torch.cos(theta), 0],
+ [0, 0, 1],
+ **kwargs)
+
+def rotate3d(v, theta, **kwargs):
+ vx = v[..., 0]; vy = v[..., 1]; vz = v[..., 2]
+ s = torch.sin(theta); c = torch.cos(theta); cc = 1 - c
+ return matrix(
+ [vx*vx*cc+c, vx*vy*cc-vz*s, vx*vz*cc+vy*s, 0],
+ [vy*vx*cc+vz*s, vy*vy*cc+c, vy*vz*cc-vx*s, 0],
+ [vz*vx*cc-vy*s, vz*vy*cc+vx*s, vz*vz*cc+c, 0],
+ [0, 0, 0, 1],
+ **kwargs)
+
+def translate2d_inv(tx, ty, **kwargs):
+ return translate2d(-tx, -ty, **kwargs)
+
+def scale2d_inv(sx, sy, **kwargs):
+ return scale2d(1 / sx, 1 / sy, **kwargs)
+
+def rotate2d_inv(theta, **kwargs):
+ return rotate2d(-theta, **kwargs)
+
+#----------------------------------------------------------------------------
+# Versatile image augmentation pipeline from the paper
+# "Training Generative Adversarial Networks with Limited Data".
+#
+# All augmentations are disabled by default; individual augmentations can
+# be enabled by setting their probability multipliers to 1.
+
+@persistence.persistent_class
+class AugmentPipe(torch.nn.Module):
+ def __init__(self,
+ xflip=0, rotate90=0, xint=0, xint_max=0.125,
+ scale=0, rotate=0, aniso=0, xfrac=0, scale_std=0.2, rotate_max=1, aniso_std=0.2, xfrac_std=0.125,
+ brightness=0, contrast=0, lumaflip=0, hue=0, saturation=0, brightness_std=0.2, contrast_std=0.5, hue_max=1, saturation_std=1,
+ imgfilter=0, imgfilter_bands=[1,1,1,1], imgfilter_std=1,
+ noise=0, cutout=0, noise_std=0.1, cutout_size=0.5,
+ ):
+ super().__init__()
+ self.register_buffer('p', torch.ones([])) # Overall multiplier for augmentation probability.
+
+ # Pixel blitting.
+ self.xflip = float(xflip) # Probability multiplier for x-flip.
+ self.rotate90 = float(rotate90) # Probability multiplier for 90 degree rotations.
+ self.xint = float(xint) # Probability multiplier for integer translation.
+ self.xint_max = float(xint_max) # Range of integer translation, relative to image dimensions.
+
+ # General geometric transformations.
+ self.scale = float(scale) # Probability multiplier for isotropic scaling.
+ self.rotate = float(rotate) # Probability multiplier for arbitrary rotation.
+ self.aniso = float(aniso) # Probability multiplier for anisotropic scaling.
+ self.xfrac = float(xfrac) # Probability multiplier for fractional translation.
+ self.scale_std = float(scale_std) # Log2 standard deviation of isotropic scaling.
+ self.rotate_max = float(rotate_max) # Range of arbitrary rotation, 1 = full circle.
+ self.aniso_std = float(aniso_std) # Log2 standard deviation of anisotropic scaling.
+ self.xfrac_std = float(xfrac_std) # Standard deviation of frational translation, relative to image dimensions.
+
+ # Color transformations.
+ self.brightness = float(brightness) # Probability multiplier for brightness.
+ self.contrast = float(contrast) # Probability multiplier for contrast.
+ self.lumaflip = float(lumaflip) # Probability multiplier for luma flip.
+ self.hue = float(hue) # Probability multiplier for hue rotation.
+ self.saturation = float(saturation) # Probability multiplier for saturation.
+ self.brightness_std = float(brightness_std) # Standard deviation of brightness.
+ self.contrast_std = float(contrast_std) # Log2 standard deviation of contrast.
+ self.hue_max = float(hue_max) # Range of hue rotation, 1 = full circle.
+ self.saturation_std = float(saturation_std) # Log2 standard deviation of saturation.
+
+ # Image-space filtering.
+ self.imgfilter = float(imgfilter) # Probability multiplier for image-space filtering.
+ self.imgfilter_bands = list(imgfilter_bands) # Probability multipliers for individual frequency bands.
+ self.imgfilter_std = float(imgfilter_std) # Log2 standard deviation of image-space filter amplification.
+
+ # Image-space corruptions.
+ self.noise = float(noise) # Probability multiplier for additive RGB noise.
+ self.cutout = float(cutout) # Probability multiplier for cutout.
+ self.noise_std = float(noise_std) # Standard deviation of additive RGB noise.
+ self.cutout_size = float(cutout_size) # Size of the cutout rectangle, relative to image dimensions.
+
+ # Setup orthogonal lowpass filter for geometric augmentations.
+ self.register_buffer('Hz_geom', upfirdn2d.setup_filter(wavelets['sym6']))
+
+ # Construct filter bank for image-space filtering.
+ Hz_lo = np.asarray(wavelets['sym2']) # H(z)
+ Hz_hi = Hz_lo * ((-1) ** np.arange(Hz_lo.size)) # H(-z)
+ Hz_lo2 = np.convolve(Hz_lo, Hz_lo[::-1]) / 2 # H(z) * H(z^-1) / 2
+ Hz_hi2 = np.convolve(Hz_hi, Hz_hi[::-1]) / 2 # H(-z) * H(-z^-1) / 2
+ Hz_fbank = np.eye(4, 1) # Bandpass(H(z), b_i)
+ for i in range(1, Hz_fbank.shape[0]):
+ Hz_fbank = np.dstack([Hz_fbank, np.zeros_like(Hz_fbank)]).reshape(Hz_fbank.shape[0], -1)[:, :-1]
+ Hz_fbank = scipy.signal.convolve(Hz_fbank, [Hz_lo2])
+ Hz_fbank[i, (Hz_fbank.shape[1] - Hz_hi2.size) // 2 : (Hz_fbank.shape[1] + Hz_hi2.size) // 2] += Hz_hi2
+ self.register_buffer('Hz_fbank', torch.as_tensor(Hz_fbank, dtype=torch.float32))
+
+ def forward(self, images, debug_percentile=None):
+ assert isinstance(images, torch.Tensor) and images.ndim == 4
+ batch_size, num_channels, height, width = images.shape
+ device = images.device
+ if debug_percentile is not None:
+ debug_percentile = torch.as_tensor(debug_percentile, dtype=torch.float32, device=device)
+
+ # -------------------------------------
+ # Select parameters for pixel blitting.
+ # -------------------------------------
+
+ # Initialize inverse homogeneous 2D transform: G_inv @ pixel_out ==> pixel_in
+ I_3 = torch.eye(3, device=device)
+ G_inv = I_3
+
+ # Apply x-flip with probability (xflip * strength).
+ if self.xflip > 0:
+ i = torch.floor(torch.rand([batch_size], device=device) * 2)
+ i = torch.where(torch.rand([batch_size], device=device) < self.xflip * self.p, i, torch.zeros_like(i))
+ if debug_percentile is not None:
+ i = torch.full_like(i, torch.floor(debug_percentile * 2))
+ G_inv = G_inv @ scale2d_inv(1 - 2 * i, 1)
+
+ # Apply 90 degree rotations with probability (rotate90 * strength).
+ if self.rotate90 > 0:
+ i = torch.floor(torch.rand([batch_size], device=device) * 4)
+ i = torch.where(torch.rand([batch_size], device=device) < self.rotate90 * self.p, i, torch.zeros_like(i))
+ if debug_percentile is not None:
+ i = torch.full_like(i, torch.floor(debug_percentile * 4))
+ G_inv = G_inv @ rotate2d_inv(-np.pi / 2 * i)
+
+ # Apply integer translation with probability (xint * strength).
+ if self.xint > 0:
+ t = (torch.rand([batch_size, 2], device=device) * 2 - 1) * self.xint_max
+ t = torch.where(torch.rand([batch_size, 1], device=device) < self.xint * self.p, t, torch.zeros_like(t))
+ if debug_percentile is not None:
+ t = torch.full_like(t, (debug_percentile * 2 - 1) * self.xint_max)
+ G_inv = G_inv @ translate2d_inv(torch.round(t[:,0] * width), torch.round(t[:,1] * height))
+
+ # --------------------------------------------------------
+ # Select parameters for general geometric transformations.
+ # --------------------------------------------------------
+
+ # Apply isotropic scaling with probability (scale * strength).
+ if self.scale > 0:
+ s = torch.exp2(torch.randn([batch_size], device=device) * self.scale_std)
+ s = torch.where(torch.rand([batch_size], device=device) < self.scale * self.p, s, torch.ones_like(s))
+ if debug_percentile is not None:
+ s = torch.full_like(s, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.scale_std))
+ G_inv = G_inv @ scale2d_inv(s, s)
+
+ # Apply pre-rotation with probability p_rot.
+ p_rot = 1 - torch.sqrt((1 - self.rotate * self.p).clamp(0, 1)) # P(pre OR post) = p
+ if self.rotate > 0:
+ theta = (torch.rand([batch_size], device=device) * 2 - 1) * np.pi * self.rotate_max
+ theta = torch.where(torch.rand([batch_size], device=device) < p_rot, theta, torch.zeros_like(theta))
+ if debug_percentile is not None:
+ theta = torch.full_like(theta, (debug_percentile * 2 - 1) * np.pi * self.rotate_max)
+ G_inv = G_inv @ rotate2d_inv(-theta) # Before anisotropic scaling.
+
+ # Apply anisotropic scaling with probability (aniso * strength).
+ if self.aniso > 0:
+ s = torch.exp2(torch.randn([batch_size], device=device) * self.aniso_std)
+ s = torch.where(torch.rand([batch_size], device=device) < self.aniso * self.p, s, torch.ones_like(s))
+ if debug_percentile is not None:
+ s = torch.full_like(s, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.aniso_std))
+ G_inv = G_inv @ scale2d_inv(s, 1 / s)
+
+ # Apply post-rotation with probability p_rot.
+ if self.rotate > 0:
+ theta = (torch.rand([batch_size], device=device) * 2 - 1) * np.pi * self.rotate_max
+ theta = torch.where(torch.rand([batch_size], device=device) < p_rot, theta, torch.zeros_like(theta))
+ if debug_percentile is not None:
+ theta = torch.zeros_like(theta)
+ G_inv = G_inv @ rotate2d_inv(-theta) # After anisotropic scaling.
+
+ # Apply fractional translation with probability (xfrac * strength).
+ if self.xfrac > 0:
+ t = torch.randn([batch_size, 2], device=device) * self.xfrac_std
+ t = torch.where(torch.rand([batch_size, 1], device=device) < self.xfrac * self.p, t, torch.zeros_like(t))
+ if debug_percentile is not None:
+ t = torch.full_like(t, torch.erfinv(debug_percentile * 2 - 1) * self.xfrac_std)
+ G_inv = G_inv @ translate2d_inv(t[:,0] * width, t[:,1] * height)
+
+ # ----------------------------------
+ # Execute geometric transformations.
+ # ----------------------------------
+
+ # Execute if the transform is not identity.
+ if G_inv is not I_3:
+
+ # Calculate padding.
+ cx = (width - 1) / 2
+ cy = (height - 1) / 2
+ cp = matrix([-cx, -cy, 1], [cx, -cy, 1], [cx, cy, 1], [-cx, cy, 1], device=device) # [idx, xyz]
+ cp = G_inv @ cp.t() # [batch, xyz, idx]
+ Hz_pad = self.Hz_geom.shape[0] // 4
+ margin = cp[:, :2, :].permute(1, 0, 2).flatten(1) # [xy, batch * idx]
+ margin = torch.cat([-margin, margin]).max(dim=1).values # [x0, y0, x1, y1]
+ margin = margin + misc.constant([Hz_pad * 2 - cx, Hz_pad * 2 - cy] * 2, device=device)
+ margin = margin.max(misc.constant([0, 0] * 2, device=device))
+ margin = margin.min(misc.constant([width-1, height-1] * 2, device=device))
+ mx0, my0, mx1, my1 = margin.ceil().to(torch.int32)
+
+ # Pad image and adjust origin.
+ images = torch.nn.functional.pad(input=images, pad=[mx0,mx1,my0,my1], mode='reflect')
+ G_inv = translate2d((mx0 - mx1) / 2, (my0 - my1) / 2) @ G_inv
+
+ # Upsample.
+ images = upfirdn2d.upsample2d(x=images, f=self.Hz_geom, up=2)
+ G_inv = scale2d(2, 2, device=device) @ G_inv @ scale2d_inv(2, 2, device=device)
+ G_inv = translate2d(-0.5, -0.5, device=device) @ G_inv @ translate2d_inv(-0.5, -0.5, device=device)
+
+ # Execute transformation.
+ shape = [batch_size, num_channels, (height + Hz_pad * 2) * 2, (width + Hz_pad * 2) * 2]
+ G_inv = scale2d(2 / images.shape[3], 2 / images.shape[2], device=device) @ G_inv @ scale2d_inv(2 / shape[3], 2 / shape[2], device=device)
+ grid = torch.nn.functional.affine_grid(theta=G_inv[:,:2,:], size=shape, align_corners=False)
+ images = grid_sample_gradfix.grid_sample(images, grid)
+
+ # Downsample and crop.
+ images = upfirdn2d.downsample2d(x=images, f=self.Hz_geom, down=2, padding=-Hz_pad*2, flip_filter=True)
+
+ # --------------------------------------------
+ # Select parameters for color transformations.
+ # --------------------------------------------
+
+ # Initialize homogeneous 3D transformation matrix: C @ color_in ==> color_out
+ I_4 = torch.eye(4, device=device)
+ C = I_4
+
+ # Apply brightness with probability (brightness * strength).
+ if self.brightness > 0:
+ b = torch.randn([batch_size], device=device) * self.brightness_std
+ b = torch.where(torch.rand([batch_size], device=device) < self.brightness * self.p, b, torch.zeros_like(b))
+ if debug_percentile is not None:
+ b = torch.full_like(b, torch.erfinv(debug_percentile * 2 - 1) * self.brightness_std)
+ C = translate3d(b, b, b) @ C
+
+ # Apply contrast with probability (contrast * strength).
+ if self.contrast > 0:
+ c = torch.exp2(torch.randn([batch_size], device=device) * self.contrast_std)
+ c = torch.where(torch.rand([batch_size], device=device) < self.contrast * self.p, c, torch.ones_like(c))
+ if debug_percentile is not None:
+ c = torch.full_like(c, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.contrast_std))
+ C = scale3d(c, c, c) @ C
+
+ # Apply luma flip with probability (lumaflip * strength).
+ v = misc.constant(np.asarray([1, 1, 1, 0]) / np.sqrt(3), device=device) # Luma axis.
+ if self.lumaflip > 0:
+ i = torch.floor(torch.rand([batch_size, 1, 1], device=device) * 2)
+ i = torch.where(torch.rand([batch_size, 1, 1], device=device) < self.lumaflip * self.p, i, torch.zeros_like(i))
+ if debug_percentile is not None:
+ i = torch.full_like(i, torch.floor(debug_percentile * 2))
+ C = (I_4 - 2 * v.ger(v) * i) @ C # Householder reflection.
+
+ # Apply hue rotation with probability (hue * strength).
+ if self.hue > 0 and num_channels > 1:
+ theta = (torch.rand([batch_size], device=device) * 2 - 1) * np.pi * self.hue_max
+ theta = torch.where(torch.rand([batch_size], device=device) < self.hue * self.p, theta, torch.zeros_like(theta))
+ if debug_percentile is not None:
+ theta = torch.full_like(theta, (debug_percentile * 2 - 1) * np.pi * self.hue_max)
+ C = rotate3d(v, theta) @ C # Rotate around v.
+
+ # Apply saturation with probability (saturation * strength).
+ if self.saturation > 0 and num_channels > 1:
+ s = torch.exp2(torch.randn([batch_size, 1, 1], device=device) * self.saturation_std)
+ s = torch.where(torch.rand([batch_size, 1, 1], device=device) < self.saturation * self.p, s, torch.ones_like(s))
+ if debug_percentile is not None:
+ s = torch.full_like(s, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.saturation_std))
+ C = (v.ger(v) + (I_4 - v.ger(v)) * s) @ C
+
+ # ------------------------------
+ # Execute color transformations.
+ # ------------------------------
+
+ # Execute if the transform is not identity.
+ if C is not I_4:
+ images = images.reshape([batch_size, num_channels, height * width])
+ if num_channels == 3:
+ images = C[:, :3, :3] @ images + C[:, :3, 3:]
+ elif num_channels == 1:
+ C = C[:, :3, :].mean(dim=1, keepdims=True)
+ images = images * C[:, :, :3].sum(dim=2, keepdims=True) + C[:, :, 3:]
+ else:
+ raise ValueError('Image must be RGB (3 channels) or L (1 channel)')
+ images = images.reshape([batch_size, num_channels, height, width])
+
+ # ----------------------
+ # Image-space filtering.
+ # ----------------------
+
+ if self.imgfilter > 0:
+ num_bands = self.Hz_fbank.shape[0]
+ assert len(self.imgfilter_bands) == num_bands
+ expected_power = misc.constant(np.array([10, 1, 1, 1]) / 13, device=device) # Expected power spectrum (1/f).
+
+ # Apply amplification for each band with probability (imgfilter * strength * band_strength).
+ g = torch.ones([batch_size, num_bands], device=device) # Global gain vector (identity).
+ for i, band_strength in enumerate(self.imgfilter_bands):
+ t_i = torch.exp2(torch.randn([batch_size], device=device) * self.imgfilter_std)
+ t_i = torch.where(torch.rand([batch_size], device=device) < self.imgfilter * self.p * band_strength, t_i, torch.ones_like(t_i))
+ if debug_percentile is not None:
+ t_i = torch.full_like(t_i, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.imgfilter_std)) if band_strength > 0 else torch.ones_like(t_i)
+ t = torch.ones([batch_size, num_bands], device=device) # Temporary gain vector.
+ t[:, i] = t_i # Replace i'th element.
+ t = t / (expected_power * t.square()).sum(dim=-1, keepdims=True).sqrt() # Normalize power.
+ g = g * t # Accumulate into global gain.
+
+ # Construct combined amplification filter.
+ Hz_prime = g @ self.Hz_fbank # [batch, tap]
+ Hz_prime = Hz_prime.unsqueeze(1).repeat([1, num_channels, 1]) # [batch, channels, tap]
+ Hz_prime = Hz_prime.reshape([batch_size * num_channels, 1, -1]) # [batch * channels, 1, tap]
+
+ # Apply filter.
+ p = self.Hz_fbank.shape[1] // 2
+ images = images.reshape([1, batch_size * num_channels, height, width])
+ images = torch.nn.functional.pad(input=images, pad=[p,p,p,p], mode='reflect')
+ images = conv2d_gradfix.conv2d(input=images, weight=Hz_prime.unsqueeze(2), groups=batch_size*num_channels)
+ images = conv2d_gradfix.conv2d(input=images, weight=Hz_prime.unsqueeze(3), groups=batch_size*num_channels)
+ images = images.reshape([batch_size, num_channels, height, width])
+
+ # ------------------------
+ # Image-space corruptions.
+ # ------------------------
+
+ # Apply additive RGB noise with probability (noise * strength).
+ if self.noise > 0:
+ sigma = torch.randn([batch_size, 1, 1, 1], device=device).abs() * self.noise_std
+ sigma = torch.where(torch.rand([batch_size, 1, 1, 1], device=device) < self.noise * self.p, sigma, torch.zeros_like(sigma))
+ if debug_percentile is not None:
+ sigma = torch.full_like(sigma, torch.erfinv(debug_percentile) * self.noise_std)
+ images = images + torch.randn([batch_size, num_channels, height, width], device=device) * sigma
+
+ # Apply cutout with probability (cutout * strength).
+ if self.cutout > 0:
+ size = torch.full([batch_size, 2, 1, 1, 1], self.cutout_size, device=device)
+ size = torch.where(torch.rand([batch_size, 1, 1, 1, 1], device=device) < self.cutout * self.p, size, torch.zeros_like(size))
+ center = torch.rand([batch_size, 2, 1, 1, 1], device=device)
+ if debug_percentile is not None:
+ size = torch.full_like(size, self.cutout_size)
+ center = torch.full_like(center, debug_percentile)
+ coord_x = torch.arange(width, device=device).reshape([1, 1, 1, -1])
+ coord_y = torch.arange(height, device=device).reshape([1, 1, -1, 1])
+ mask_x = (((coord_x + 0.5) / width - center[:, 0]).abs() >= size[:, 0] / 2)
+ mask_y = (((coord_y + 0.5) / height - center[:, 1]).abs() >= size[:, 1] / 2)
+ mask = torch.logical_or(mask_x, mask_y).to(torch.float32)
+ images = images * mask
+
+ return images
+
+#----------------------------------------------------------------------------
diff --git a/nsr/camera_utils.py b/nsr/camera_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..b3c5fa32397e14090246aa518035f2128525d936
--- /dev/null
+++ b/nsr/camera_utils.py
@@ -0,0 +1,286 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""
+Helper functions for constructing camera parameter matrices. Primarily used in visualization and inference scripts.
+"""
+
+import math
+
+import torch as th
+import torch
+import torch.nn as nn
+import numpy as np
+
+from nsr.volumetric_rendering import math_utils
+
+
+class GaussianCameraPoseSampler:
+ """
+ Samples pitch and yaw from a Gaussian distribution and returns a camera pose.
+ Camera is specified as looking at the origin.
+ If horizontal and vertical stddev (specified in radians) are zero, gives a
+ deterministic camera pose with yaw=horizontal_mean, pitch=vertical_mean.
+ The coordinate system is specified with y-up, z-forward, x-left.
+ Horizontal mean is the azimuthal angle (rotation around y axis) in radians,
+ vertical mean is the polar angle (angle from the y axis) in radians.
+ A point along the z-axis has azimuthal_angle=0, polar_angle=pi/2.
+
+ Example:
+ For a camera pose looking at the origin with the camera at position [0, 0, 1]:
+ cam2world = GaussianCameraPoseSampler.sample(math.pi/2, math.pi/2, radius=1)
+ """
+
+ @staticmethod
+ def sample(horizontal_mean,
+ vertical_mean,
+ horizontal_stddev=0,
+ vertical_stddev=0,
+ radius=1,
+ batch_size=1,
+ device='cpu'):
+ h = torch.randn((batch_size, 1),
+ device=device) * horizontal_stddev + horizontal_mean
+ v = torch.randn(
+ (batch_size, 1), device=device) * vertical_stddev + vertical_mean
+ v = torch.clamp(v, 1e-5, math.pi - 1e-5)
+
+ theta = h
+ v = v / math.pi
+ phi = torch.arccos(1 - 2 * v)
+
+ camera_origins = torch.zeros((batch_size, 3), device=device)
+
+ camera_origins[:, 0:1] = radius * torch.sin(phi) * torch.cos(math.pi -
+ theta)
+ camera_origins[:, 2:3] = radius * torch.sin(phi) * torch.sin(math.pi -
+ theta)
+ camera_origins[:, 1:2] = radius * torch.cos(phi)
+
+ forward_vectors = math_utils.normalize_vecs(-camera_origins)
+ return create_cam2world_matrix(forward_vectors, camera_origins)
+
+
+class LookAtPoseSampler:
+ """
+ Same as GaussianCameraPoseSampler, except the
+ camera is specified as looking at 'lookat_position', a 3-vector.
+
+ Example:
+ For a camera pose looking at the origin with the camera at position [0, 0, 1]:
+ cam2world = LookAtPoseSampler.sample(math.pi/2, math.pi/2, torch.tensor([0, 0, 0]), radius=1)
+ """
+
+ @staticmethod
+ def sample(horizontal_mean,
+ vertical_mean,
+ lookat_position,
+ horizontal_stddev=0.,
+ vertical_stddev=0.,
+ radius=1,
+ batch_size=1,
+ device='cpu'):
+ h = torch.randn((batch_size, 1),
+ device=device) * horizontal_stddev + horizontal_mean
+ v = torch.randn(
+ (batch_size, 1), device=device) * vertical_stddev + vertical_mean
+ v = torch.clamp(v, 1e-5, math.pi - 1e-5)
+
+ theta = h
+ v = v / math.pi
+ phi = torch.arccos(1 - 2 * v)
+
+ camera_origins = torch.zeros((batch_size, 3), device=device)
+
+ camera_origins[:, 0:1] = radius * torch.sin(phi) * torch.cos(math.pi -
+ theta)
+ camera_origins[:, 2:3] = radius * torch.sin(phi) * torch.sin(math.pi -
+ theta)
+ camera_origins[:, 1:2] = radius * torch.cos(phi)
+
+ # forward_vectors = math_utils.normalize_vecs(-camera_origins)
+ forward_vectors = math_utils.normalize_vecs(lookat_position -
+ camera_origins)
+ return create_cam2world_matrix(forward_vectors, camera_origins)
+
+
+class UniformCameraPoseSampler:
+ """
+ Same as GaussianCameraPoseSampler, except the
+ pose is sampled from a uniform distribution with range +-[horizontal/vertical]_stddev.
+
+ Example:
+ For a batch of random camera poses looking at the origin with yaw sampled from [-pi/2, +pi/2] radians:
+
+ cam2worlds = UniformCameraPoseSampler.sample(math.pi/2, math.pi/2, horizontal_stddev=math.pi/2, radius=1, batch_size=16)
+ """
+
+ @staticmethod
+ def sample(horizontal_mean,
+ vertical_mean,
+ horizontal_stddev=0,
+ vertical_stddev=0,
+ radius=1,
+ batch_size=1,
+ device='cpu'):
+ h = (torch.rand((batch_size, 1), device=device) * 2 -
+ 1) * horizontal_stddev + horizontal_mean
+ v = (torch.rand((batch_size, 1), device=device) * 2 -
+ 1) * vertical_stddev + vertical_mean
+ v = torch.clamp(v, 1e-5, math.pi - 1e-5)
+
+ theta = h
+ v = v / math.pi
+ phi = torch.arccos(1 - 2 * v)
+
+ camera_origins = torch.zeros((batch_size, 3), device=device)
+
+ camera_origins[:, 0:1] = radius * torch.sin(phi) * torch.cos(math.pi -
+ theta)
+ camera_origins[:, 2:3] = radius * torch.sin(phi) * torch.sin(math.pi -
+ theta)
+ camera_origins[:, 1:2] = radius * torch.cos(phi)
+
+ forward_vectors = math_utils.normalize_vecs(-camera_origins)
+ return create_cam2world_matrix(forward_vectors, camera_origins)
+
+
+def create_cam2world_matrix(forward_vector, origin):
+ """
+ Takes in the direction the camera is pointing and the camera origin and returns a cam2world matrix.
+ Works on batches of forward_vectors, origins. Assumes y-axis is up and that there is no camera roll.
+ """
+
+ forward_vector = math_utils.normalize_vecs(forward_vector)
+ up_vector = torch.tensor([0, 1, 0],
+ dtype=torch.float,
+ device=origin.device).expand_as(forward_vector)
+
+ right_vector = -math_utils.normalize_vecs(
+ torch.cross(up_vector, forward_vector, dim=-1))
+ up_vector = math_utils.normalize_vecs(
+ torch.cross(forward_vector, right_vector, dim=-1))
+
+ rotation_matrix = torch.eye(4, device=origin.device).unsqueeze(0).repeat(
+ forward_vector.shape[0], 1, 1)
+ rotation_matrix[:, :3, :3] = torch.stack(
+ (right_vector, up_vector, forward_vector), axis=-1)
+
+ translation_matrix = torch.eye(4,
+ device=origin.device).unsqueeze(0).repeat(
+ forward_vector.shape[0], 1, 1)
+ translation_matrix[:, :3, 3] = origin
+ cam2world = (translation_matrix @ rotation_matrix)[:, :, :]
+ assert (cam2world.shape[1:] == (4, 4))
+ return cam2world
+
+
+def FOV_to_intrinsics(fov_degrees, device='cpu'):
+ """
+ Creates a 3x3 camera intrinsics matrix from the camera field of view, specified in degrees.
+ Note the intrinsics are returned as normalized by image size, rather than in pixel units.
+ Assumes principal point is at image center.
+ """
+
+ focal_length = float(1 / (math.tan(fov_degrees * 3.14159 / 360) * 1.414))
+ intrinsics = torch.tensor(
+ [[focal_length, 0, 0.5], [0, focal_length, 0.5], [0, 0, 1]],
+ device=device)
+ return intrinsics
+
+def generate_input_camera(r, poses, device='cpu', fov=30):
+ def normalize_vecs(vectors): return vectors / (torch.norm(vectors, dim=-1, keepdim=True))
+ poses = np.deg2rad(poses)
+ poses = torch.tensor(poses).float()
+ pitch = poses[:, 0]
+ yaw = poses[:, 1]
+
+ z = r*torch.sin(pitch)
+ x = r*torch.cos(pitch)*torch.cos(yaw)
+ y = r*torch.cos(pitch)*torch.sin(yaw)
+ cam_pos = torch.stack([x, y, z], dim=-1).reshape(z.shape[0], -1).to(device)
+
+ forward_vector = normalize_vecs(-cam_pos)
+ up_vector = torch.tensor([0, 0, -1], dtype=torch.float,
+ device=device).reshape(-1).expand_as(forward_vector)
+ left_vector = normalize_vecs(torch.cross(up_vector, forward_vector,
+ dim=-1))
+
+ up_vector = normalize_vecs(torch.cross(forward_vector, left_vector,
+ dim=-1))
+ rotate = torch.stack(
+ (left_vector, up_vector, forward_vector), dim=-1)
+
+ rotation_matrix = torch.eye(4, device=device).unsqueeze(0).repeat(forward_vector.shape[0], 1, 1)
+ rotation_matrix[:, :3, :3] = rotate
+
+ translation_matrix = torch.eye(4, device=device).unsqueeze(0).repeat(forward_vector.shape[0], 1, 1)
+ translation_matrix[:, :3, 3] = cam_pos
+ cam2world = translation_matrix @ rotation_matrix
+
+ fx = 0.5/np.tan(np.deg2rad(fov/2))
+ fxfycxcy = torch.tensor([fx, fx, 0.5, 0.5], dtype=rotate.dtype, device=device)
+
+ return cam2world, fxfycxcy
+
+def uni_mesh_path(frame_number=16, radius=1.8):
+ azimuths = []
+ elevations = []
+
+ # for elevation in [0,-30,30]:
+ # for elevation in [0,-30,30, -65, 65]:
+ # for elevation in [0,-30,30, -60, 60]:
+ for elevation in [60,30, 0, -30, -60]:
+
+ for i in range(frame_number): # 1030 * 5 * 10, for FID 50K
+
+ # azi, elevation = sample_uniform_cameras_on_sphere()
+ # azi, elevation = azi[0] / np.pi * 180, elevation[0] / np.pi * 180
+ # azi, elevation = azi[0] / np.pi * 180, 0
+ azi = i / frame_number * 360 # [0, 2pi]
+ azimuths.append(azi)
+ elevations.append(elevation)
+
+ azimuths = np.array(azimuths)
+ elevations = np.array(elevations)
+
+ all_frame_number = azimuths.shape[0]
+
+ # azimuths = np.array(list(range(0,360,30))).astype(float)
+ # frame_number = azimuths.shape[0]
+ # elevations = np.array([10]*azimuths.shape[0]).astype(float)
+
+ zero123pp_pose, _ = generate_input_camera(radius, [[elevations[i], azimuths[i]] for i in range(all_frame_number)], fov=30)
+ K = th.Tensor([1.3889, 0.0000, 0.5000, 0.0000, 1.3889, 0.5000, 0.0000, 0.0000, 0.0039]).to(zero123pp_pose) # keeps the same
+ mesh_pathes = th.cat([zero123pp_pose.reshape(all_frame_number,-1), K.unsqueeze(0).repeat(all_frame_number,1)], dim=-1).cpu().numpy()
+
+ return mesh_pathes
+
+
+
+def sample_uniform_cameras_on_sphere(num_samples=1):
+ # Step 1: Sample azimuth angles uniformly from [0, 2*pi)
+ theta = np.random.uniform(0, 2 * np.pi, num_samples)
+
+ # Step 2: Sample cos(phi) uniformly from [-1, 1]
+ cos_phi = np.random.uniform(-1, 1, num_samples)
+
+ # Step 3: Calculate the elevation angle (phi) from cos(phi)
+ phi = np.arccos(cos_phi) # phi will be in [0, pi]
+
+ # Step 4: Convert spherical coordinates to Cartesian coordinates (x, y, z)
+ # x = np.sin(phi) * np.cos(theta)
+ # y = np.sin(phi) * np.sin(theta)
+ # z = np.cos(phi)
+
+ # Combine the x, y, z coordinates into a single array
+ # cameras = np.vstack((x, y, z)).T # Shape: (num_samples, 3)
+
+ # return cameras
+ return theta, phi
diff --git a/nsr/common_blks.py b/nsr/common_blks.py
new file mode 100644
index 0000000000000000000000000000000000000000..2789aea5a060c5a94d21e8a69cd566e6211a76e5
--- /dev/null
+++ b/nsr/common_blks.py
@@ -0,0 +1,216 @@
+
+
+# https://github.com/sxyu/pixel-nerf/blob/master/src/model/resnetfc.py
+from torch import nn
+import torch
+
+from vit.vision_transformer import Mlp, DropPath
+
+
+# Resnet Blocks
+class ResnetBlockFC(nn.Module):
+ """
+ Fully connected ResNet Block class.
+ Taken from DVR code.
+ :param size_in (int): input dimension
+ :param size_out (int): output dimension
+ :param size_h (int): hidden dimension
+ """
+ def __init__(self, size_in, size_out=None, size_h=None, beta=0.0, init_as_zero=False):
+ super().__init__()
+ # Attributes
+ if size_out is None:
+ size_out = size_in
+
+ if size_h is None:
+ size_h = min(size_in, size_out)
+
+ self.size_in = size_in
+ self.size_h = size_h
+ self.size_out = size_out
+ # Submodules
+ self.fc_0 = nn.Linear(size_in, size_h)
+ self.fc_1 = nn.Linear(size_h, size_out)
+
+ # Init
+ nn.init.constant_(self.fc_0.bias, 0.0)
+ if init_as_zero:
+ nn.init.zeros_(self.fc_0.weight)
+ else:
+ nn.init.kaiming_normal_(self.fc_0.weight, a=0, mode="fan_in")
+ nn.init.constant_(self.fc_1.bias, 0.0)
+ nn.init.zeros_(self.fc_1.weight)
+
+ if beta > 0:
+ self.activation = nn.Softplus(beta=beta)
+ else:
+ self.activation = nn.ReLU()
+
+ if size_in == size_out:
+ self.shortcut = None
+ else:
+ self.shortcut = nn.Linear(size_in, size_out, bias=False)
+ # nn.init.constant_(self.shortcut.bias, 0.0)
+ nn.init.kaiming_normal_(self.shortcut.weight, a=0, mode="fan_in")
+
+ def forward(self, x):
+ # with profiler.record_function("resblock"):
+ net = self.fc_0(self.activation(x))
+ dx = self.fc_1(self.activation(net))
+
+ if self.shortcut is not None:
+ x_s = self.shortcut(x)
+ else:
+ x_s = x
+ return x_s + dx
+
+
+
+
+# Resnet Blocks
+class ResnetBlockFCViT(nn.Module):
+ """
+ Fully connected ResNet Block class.
+ Taken from DVR code.
+ :param size_in (int): input dimension
+ :param size_out (int): output dimension
+ :param size_h (int): hidden dimension
+ """
+ def __init__(self, size_in, size_out=None, size_h=None, beta=0.0, init_as_zero=False):
+ super().__init__()
+ # Attributes
+ if size_out is None:
+ size_out = size_in
+
+ if size_h is None:
+ size_h = min(size_in, size_out)
+
+ self.size_in = size_in
+ self.size_h = size_h
+ self.size_out = size_out
+ # Submodules
+ self.fc_0 = nn.Linear(size_in, size_h)
+ self.fc_1 = nn.Linear(size_h, size_out)
+
+ # Init
+ nn.init.constant_(self.fc_0.bias, 0.0)
+ if init_as_zero:
+ nn.init.zeros_(self.fc_0.weight)
+ else:
+ nn.init.kaiming_normal_(self.fc_0.weight, a=0, mode="fan_in")
+ nn.init.constant_(self.fc_1.bias, 0.0)
+ nn.init.zeros_(self.fc_1.weight)
+
+ if beta > 0:
+ self.activation = nn.Softplus(beta=beta)
+ else:
+ self.activation = nn.ReLU()
+
+ if size_in == size_out:
+ self.shortcut = None
+ else:
+ self.shortcut = nn.Linear(size_in, size_out, bias=False)
+ # nn.init.constant_(self.shortcut.bias, 0.0)
+ nn.init.kaiming_normal_(self.shortcut.weight, a=0, mode="fan_in")
+
+ def forward(self, x):
+ # with profiler.record_function("resblock"):
+ net = self.fc_0(self.activation(x))
+ dx = self.fc_1(self.activation(net))
+
+ if self.shortcut is not None:
+ x_s = self.shortcut(x)
+ else:
+ x_s = x
+ return x_s + dx
+
+
+# class Block(nn.Module):
+# def __init__(self,
+# dim,
+# num_heads,
+# mlp_ratio=4.,
+# qkv_bias=False,
+# qk_scale=None,
+# drop=0.,
+# attn_drop=0.,
+# drop_path=0.,
+# act_layer=nn.GELU,
+# norm_layer=nn.LayerNorm):
+# super().__init__()
+# self.norm1 = norm_layer(dim)
+# self.attn = Attention(dim,
+# num_heads=num_heads,
+# qkv_bias=qkv_bias,
+# qk_scale=qk_scale,
+# attn_drop=attn_drop,
+# proj_drop=drop)
+# self.drop_path = DropPath(
+# drop_path) if drop_path > 0. else nn.Identity()
+# self.norm2 = norm_layer(dim)
+# mlp_hidden_dim = int(dim * mlp_ratio)
+# self.mlp = Mlp(in_features=dim,
+# hidden_features=mlp_hidden_dim,
+# act_layer=act_layer,
+# drop=drop)
+
+# def forward(self, x, return_attention=False):
+# y, attn = self.attn(self.norm1(x))
+# if return_attention:
+# return attn
+# x = x + self.drop_path(y)
+# x = x + self.drop_path(self.mlp(self.norm2(x)))
+# return x
+
+
+
+
+class ResMlp(nn.Module):
+ def __init__(self,
+
+ size_in,
+ size_out=None,
+ size_h=None,
+ drop=0.,
+ drop_path=0.,
+ act_layer=nn.GELU,
+ norm_layer=nn.LayerNorm,
+ ):
+ super().__init__()
+
+ # Attributes
+ if size_out is None:
+ size_out = size_in
+ if size_h is None:
+ size_h = min(size_in, size_out)
+ self.size_in = size_in
+ self.size_h = size_h
+ self.size_out = size_out
+
+ # Submodules
+ self.norm1 = norm_layer(size_in) # ? how to use
+
+ self.mlp = Mlp(in_features=size_in,
+ out_features=size_out,
+ act_layer=act_layer,
+ drop=drop)
+
+ # Residual shortcuts
+ if size_in == size_out:
+ self.shortcut = None
+ else:
+ self.shortcut = nn.Linear(size_in, size_out, bias=False)
+ self.norm2 = norm_layer(size_in)
+
+ self.drop_path = DropPath(
+ drop_path) if drop_path > 0. else nn.Identity()
+
+ def forward(self, x):
+ dx = self.mlp(self.norm1(x))
+
+ if self.shortcut is not None:
+ x_s = self.shortcut(self.norm2(x))
+ else:
+ x_s = x
+
+ return x_s + self.drop_path(dx)
\ No newline at end of file
diff --git a/nsr/confnet.py b/nsr/confnet.py
new file mode 100644
index 0000000000000000000000000000000000000000..65d908ffaf5c5131aaef75bac769c23b8a760b2f
--- /dev/null
+++ b/nsr/confnet.py
@@ -0,0 +1,63 @@
+import torch
+import torch.nn as nn
+import torchvision
+
+
+EPS = 1e-7
+
+class ConfNet(nn.Module):
+ def __init__(self, cin=3, cout=1, zdim=128, nf=64):
+ super(ConfNet, self).__init__()
+ ## downsampling
+ network = [
+ nn.Conv2d(cin, nf, kernel_size=4, stride=2, padding=1, bias=False), # 64x64 -> 32x32
+ nn.GroupNorm(16, nf),
+ nn.LeakyReLU(0.2, inplace=True),
+ nn.Conv2d(nf, nf*2, kernel_size=4, stride=2, padding=1, bias=False), # 32x32 -> 16x16
+ nn.GroupNorm(16*2, nf*2),
+ nn.LeakyReLU(0.2, inplace=True),
+ nn.Conv2d(nf*2, nf*4, kernel_size=4, stride=2, padding=1, bias=False), # 16x16 -> 8x8
+ nn.GroupNorm(16*4, nf*4),
+ nn.LeakyReLU(0.2, inplace=True),
+ nn.Conv2d(nf*4, nf*8, kernel_size=4, stride=2, padding=1, bias=False), # 8x8 -> 4x4
+ nn.LeakyReLU(0.2, inplace=True),
+ nn.Conv2d(nf*8, zdim, kernel_size=4, stride=1, padding=0, bias=False), # 4x4 -> 1x1
+ nn.ReLU(inplace=True)]
+ ## upsampling
+ network += [
+ nn.ConvTranspose2d(zdim, nf*8, kernel_size=4, padding=0, bias=False), # 1x1 -> 4x4
+ nn.ReLU(inplace=True),
+ nn.ConvTranspose2d(nf*8, nf*4, kernel_size=4, stride=2, padding=1, bias=False), # 4x4 -> 8x8
+ nn.GroupNorm(16*4, nf*4),
+ nn.ReLU(inplace=True),
+ nn.ConvTranspose2d(nf*4, nf*2, kernel_size=4, stride=2, padding=1, bias=False), # 8x8 -> 16x16
+ nn.GroupNorm(16*2, nf*2),
+ nn.ReLU(inplace=True)]
+ self.network = nn.Sequential(*network)
+
+ # ! only the symmetric confidence is required
+ # out_net1 = [
+ # nn.ConvTranspose2d(nf*2, nf, kernel_size=4, stride=2, padding=1, bias=False), # 16x16 -> 32x32
+ # nn.GroupNorm(16, nf),
+ # nn.ReLU(inplace=True),
+ # nn.ConvTranspose2d(nf, nf, kernel_size=4, stride=2, padding=1, bias=False), # 32x32 -> 64x64
+ # nn.GroupNorm(16, nf),
+ # nn.ReLU(inplace=True),
+ # nn.Conv2d(nf, 2, kernel_size=5, stride=1, padding=2, bias=False), # 64x64
+ # # nn.Conv2d(nf, 1, kernel_size=5, stride=1, padding=2, bias=False), # 64x64
+ # nn.Softplus()
+ # ]
+ # self.out_net1 = nn.Sequential(*out_net1)
+
+ # ! for perceptual loss
+ out_net2 = [nn.Conv2d(nf*2, 2, kernel_size=3, stride=1, padding=1, bias=False), # 16x16
+ nn.Softplus()
+ # nn.Sigmoid()
+ ]
+ self.out_net2 = nn.Sequential(*out_net2)
+
+ def forward(self, input):
+ out = self.network(input)
+ # return self.out_net1(out)
+ return self.out_net2(out)
+ # return self.out_net1(out), self.out_net2(out)
\ No newline at end of file
diff --git a/nsr/cvD/__init__.py b/nsr/cvD/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/nsr/cvD/__pycache__/__init__.cpython-39.pyc b/nsr/cvD/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..477fb79554cf5e04e889ce34ecf0b75db0eac5a9
Binary files /dev/null and b/nsr/cvD/__pycache__/__init__.cpython-39.pyc differ
diff --git a/nsr/cvD/__pycache__/canoD.cpython-39.pyc b/nsr/cvD/__pycache__/canoD.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d5261c4d48b2de8e1fe1787e775c9982611dc49d
Binary files /dev/null and b/nsr/cvD/__pycache__/canoD.cpython-39.pyc differ
diff --git a/nsr/cvD/__pycache__/nvsD.cpython-39.pyc b/nsr/cvD/__pycache__/nvsD.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..efac1e81c028c7032910ca0ab1a688257f33d02a
Binary files /dev/null and b/nsr/cvD/__pycache__/nvsD.cpython-39.pyc differ
diff --git a/nsr/cvD/__pycache__/nvsD_canoD.cpython-39.pyc b/nsr/cvD/__pycache__/nvsD_canoD.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e124a6704398b91fe0b7d93fa7cd03d0bafb7f81
Binary files /dev/null and b/nsr/cvD/__pycache__/nvsD_canoD.cpython-39.pyc differ
diff --git a/nsr/cvD/__pycache__/nvsD_canoD_mask.cpython-39.pyc b/nsr/cvD/__pycache__/nvsD_canoD_mask.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8515d2980d77d0158bea154af77b930ccbf703da
Binary files /dev/null and b/nsr/cvD/__pycache__/nvsD_canoD_mask.cpython-39.pyc differ
diff --git a/nsr/cvD/__pycache__/nvsD_canoD_multiview.cpython-39.pyc b/nsr/cvD/__pycache__/nvsD_canoD_multiview.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6b729a8d3d71e8504ee3a380d05578a0e2ece02a
Binary files /dev/null and b/nsr/cvD/__pycache__/nvsD_canoD_multiview.cpython-39.pyc differ
diff --git a/nsr/cvD/__pycache__/nvsD_nosr.cpython-39.pyc b/nsr/cvD/__pycache__/nvsD_nosr.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d98cdaa2dbafec7a60098d4b23472d2acc471f72
Binary files /dev/null and b/nsr/cvD/__pycache__/nvsD_nosr.cpython-39.pyc differ
diff --git a/nsr/cvD/canoD.py b/nsr/cvD/canoD.py
new file mode 100644
index 0000000000000000000000000000000000000000..67aee15e9429743a64cc117b4641d70d2fc1df35
--- /dev/null
+++ b/nsr/cvD/canoD.py
@@ -0,0 +1,480 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from ..train_util import TrainLoopBasic, TrainLoop3DRec
+import vision_aided_loss
+from dnnlib.util import calculate_adaptive_weight
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+from ..train_util_cvD import TrainLoop3DcvD
+# from .nvD import
+
+
+class TrainLoop3DcvD_canoD(TrainLoop3DcvD):
+
+ def __init__(self,
+ *,
+ model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ **kwargs):
+ super().__init__(model=model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ use_amp=use_amp, cvD_name='cano_cvD',
+ **kwargs)
+
+ device = dist_util.dev()
+
+ # self.canonical_cvD = vision_aided_loss.Discriminator(
+ # cv_type='clip', loss_type='multilevel_sigmoid_s',
+ # device=device).to(device)
+ # self.canonical_cvD.cv_ensemble.requires_grad_(
+ # False) # Freeze feature extractor
+
+ # self._load_and_sync_parameters(model=self.canonical_cvD,
+ # model_name='cvD')
+
+ # self.mp_trainer_canonical_cvD = MixedPrecisionTrainer(
+ # model=self.canonical_cvD,
+ # use_fp16=self.use_fp16,
+ # fp16_scale_growth=fp16_scale_growth,
+ # model_name='canonical_cvD',
+ # use_amp=use_amp)
+
+ # self.opt_cano_cvD = AdamW(
+ # self.mp_trainer_canonical_cvD.master_params,
+ # lr=1e-5, # same as the G
+ # betas=(0, 0.99),
+ # eps=1e-8) # dlr in biggan cfg
+
+ # if self.use_ddp:
+ # self.ddp_canonical_cvD = DDP(
+ # self.canonical_cvD,
+ # device_ids=[dist_util.dev()],
+ # output_device=dist_util.dev(),
+ # broadcast_buffers=False,
+ # bucket_cap_mb=128,
+ # find_unused_parameters=False,
+ # )
+ # else:
+ # self.ddp_canonical_cvD = self.canonical_cvD
+
+ th.cuda.empty_cache()
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step_rec':
+ self.forward_G_rec(batch)
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ # elif step == 'g_step_nvs':
+ # self.forward_G_nvs(batch)
+ # took_step_g_nvs = self.mp_trainer.optimize(self.opt)
+
+ # if took_step_g_nvs:
+ # self._update_ema() # g_ema
+
+ elif step == 'd_step':
+ self.forward_D(batch)
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ # _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ else:
+ return
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_rec')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cano_cvD')
+ # self.save(self.mp_trainer_canonical_cvD, 'cano_cvD')
+
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cano_cvD')
+ # self.save(self.mp_trainer_canonical_cvD, 'cano_cvD')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def forward_D(self, batch): # update D
+ # self.mp_trainer_canonical_cvD.zero_grad()
+ self.mp_trainer_cvD.zero_grad()
+
+ self.rec_model.requires_grad_(False)
+
+ # update two D
+ self.ddp_nvs_cvD.requires_grad_(True)
+ # self.ddp_canonical_cvD.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_cvD.use_amp):
+
+ novel_view_c = th.cat([
+ micro['c'][batch_size // 2:], micro['c'][batch_size // 2:]
+ ])
+
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ # TODO, optimize with one encoder, and two triplane decoder
+ cano_pred = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ # nvs_pred = self.rec_model(latent=latent,
+ # c=novel_view_c,
+ # behaviour='triplane_dec')
+
+ # d_loss_nvs = self.run_D_Diter(
+ # real=cano_pred['image_raw'],
+ # fake=nvs_pred['image_raw'],
+ # D=self.ddp_cvD) # TODO, add SR for FFHQ
+
+ d_loss_cano = self.run_D_Diter(
+ real=micro['img_to_encoder'],
+ fake=cano_pred['image_raw'],
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ # log_rec3d_loss_dict({'vision_aided_loss/D_nvs': d_loss_nvs})
+ log_rec3d_loss_dict({'vision_aided_loss/D_cano': d_loss_cano})
+
+ self.mp_trainer_cvD.backward(d_loss_cano)
+ # self.mp_trainer_cvD.backward(d_loss_nvs)
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ # self.ddp_canonical_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ pred_for_rec = pred
+
+ if last_batch or not self.use_ddp:
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+ else:
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+
+ # add cvD supervision
+ vision_aided_loss = self.ddp_nvs_cvD(
+ pred_for_rec['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ last_layer = self.rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ -1].weight # type: ignore
+
+ d_weight = calculate_adaptive_weight(
+ loss, vision_aided_loss, last_layer,
+ # disc_weight_max=1) * 1
+ disc_weight_max=0.1) * 0.1
+ loss += vision_aided_loss * d_weight
+
+ loss_dict.update({
+ 'vision_aided_loss/G_rec': vision_aided_loss,
+ 'd_weight': d_weight
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(loss) # no nvs cvD loss, following VQ3D
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ # self.ddp_canonical_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False) # only use novel view D
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_cvD.use_amp):
+
+ pred_nv = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][batch_size // 2:],
+ micro['c'][:batch_size // 2],
+ ])) # ! render novel views only for D loss
+
+ # add cvD supervision
+ vision_aided_loss = self.ddp_nvs_cvD(
+ pred_nv['image_raw'], for_G=True).mean() # [B, 1] shape
+
+ loss = vision_aided_loss * 0.1
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs':
+ vision_aided_loss,
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred_nv['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred_nv['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred_nv:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred_nv['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # print(vis.shape)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
diff --git a/nsr/cvD/nvsD.py b/nsr/cvD/nvsD.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e2923d0376263f4c9c95259a71479ff488508e7
--- /dev/null
+++ b/nsr/cvD/nvsD.py
@@ -0,0 +1,538 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from ..train_util import TrainLoopBasic, TrainLoop3DRec
+import vision_aided_loss
+from dnnlib.util import calculate_adaptive_weight
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+from ..train_util_cvD import TrainLoop3DcvD
+
+
+class TrainLoop3DcvD_nvsD(TrainLoop3DcvD):
+
+ def __init__(self,
+ *,
+ # model,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ use_amp=use_amp,
+ **kwargs)
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step_rec':
+ self.forward_G_rec(batch)
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'g_step_nvs':
+ self.forward_G_nvs(batch)
+ took_step_g_nvs = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_nvs:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step':
+ self.forward_D(batch)
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_rec') # pure VAE reconstruction
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+
+ def forward_D(self, batch): # update D
+ self.rec_model.requires_grad_(False)
+
+ self.mp_trainer_cvD.zero_grad()
+ self.ddp_nvs_cvD.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_cvD.use_amp):
+
+ novel_view_c = th.cat([
+ micro['c'][1:], micro['c'][:1]
+ ])
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ cano_pred = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ nvs_pred = self.rec_model(latent=latent,
+ c=novel_view_c,
+ behaviour='triplane_dec')
+
+ if 'image_sr' in nvs_pred:
+ d_loss_nvs = self.run_D_Diter(
+ real=th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ], dim=1),
+ fake=th.cat([
+ th.nn.functional.interpolate(
+ nvs_pred['image_raw'],
+ size=nvs_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ nvs_pred['image_sr'],
+ ], dim=1),
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ else:
+ d_loss_nvs = self.run_D_Diter(
+ real=cano_pred['image_raw'],
+ fake=nvs_pred['image_raw'],
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/D_nvs': d_loss_nvs})
+ self.mp_trainer_cvD.backward(d_loss_nvs)
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ pred_for_rec = pred
+
+ if last_batch or not self.use_ddp:
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+ else:
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+ self.ddp_nvs_cvD.requires_grad_(False) # only use novel view D
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ nvs_pred = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][1:],
+ micro['c'][:1],
+ ])) # ! render novel views only for D loss
+
+ # add cvD supervision
+
+ if 'image_sr' in nvs_pred:
+ # concat sr and raw results
+ vision_aided_loss = self.ddp_nvs_cvD(
+ th.cat([
+ th.nn.functional.interpolate(
+ nvs_pred['image_raw'],
+ size=nvs_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ nvs_pred['image_sr'],
+ ], dim=1),
+ for_G=True).mean()
+ else:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ nvs_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ loss = vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs': loss
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = nvs_pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = nvs_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in nvs_pred:
+
+ if nvs_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat([
+ self.pool_512(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif nvs_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat([
+ self.pool_256(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat([
+ self.pool_128(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+
+ def save(self, mp_trainer=None, model_name='rec'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, mp_trainer.master_params)
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ dist.barrier()
+
+ def _load_and_sync_parameters(self, model=None, model_name='rec'):
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.rec_model # default model in the parent class
+
+ print(resume_checkpoint)
+
+ if resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ print(f'mark {model_name} loading ', flush=True)
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ print(f'mark {model_name} loading finished', flush=True)
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+ elif 'IN' in k:
+ print('ignore ', k)
+ else:
+ print('!!!! ignore key: ', k, ": ", v.size(),
+ 'shape in model: ', model_state_dict[k].size())
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ print(f'synced {model_name} params')
diff --git a/nsr/cvD/nvsD_canoD.py b/nsr/cvD/nvsD_canoD.py
new file mode 100644
index 0000000000000000000000000000000000000000..d59297b766dadbb72e9b9a1f23370a71fe1a4bbd
--- /dev/null
+++ b/nsr/cvD/nvsD_canoD.py
@@ -0,0 +1,1043 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from ..train_util import TrainLoopBasic, TrainLoop3DRec
+import vision_aided_loss
+from dnnlib.util import calculate_adaptive_weight
+
+def flip_yaw(pose_matrix):
+ flipped = pose_matrix.clone()
+ flipped[:, 0, 1] *= -1
+ flipped[:, 0, 2] *= -1
+ flipped[:, 1, 0] *= -1
+ flipped[:, 2, 0] *= -1
+ flipped[:, 0, 3] *= -1
+ # st()
+ return flipped
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+from ..train_util_cvD import TrainLoop3DcvD
+# from .nvD import
+
+
+class TrainLoop3DcvD_nvsD_canoD(TrainLoop3DcvD):
+ # class TrainLoop3DcvD_nvsD_canoD():
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ use_amp=use_amp,
+ **kwargs)
+
+ device = dist_util.dev()
+
+ self.cano_cvD = vision_aided_loss.Discriminator(
+ cv_type='clip', loss_type='multilevel_sigmoid_s',
+ device=device).to(device)
+ self.cano_cvD.cv_ensemble.requires_grad_(
+ False) # Freeze feature extractor
+ # self.cano_cvD.train()
+
+ cvD_model_params = list(self.cano_cvD.parameters())
+ SR_TRAINING = False
+ if SR_TRAINING: # replace the conv1 with 6 channel input
+ # width, patch_size = self.nvs_cvD.cv_ensemble
+ vision_width, vision_patch_size = [
+ self.cano_cvD.cv_ensemble.models[0].model.conv1.weight.shape[k]
+ for k in [0, -1]
+ ]
+ self.cano_cvD.cv_ensemble.models[0].model.conv1 = th.nn.Conv2d(
+ in_channels=6,
+ out_channels=vision_width,
+ kernel_size=vision_patch_size,
+ stride=vision_patch_size,
+ bias=False).to(dist_util.dev())
+ cvD_model_params += list(
+ self.cano_cvD.cv_ensemble.models[0].model.conv1.parameters())
+
+ self.cano_cvD.cv_ensemble.models[
+ 0].image_mean = self.cano_cvD.cv_ensemble.models[
+ 0].image_mean.repeat(2)
+ self.cano_cvD.cv_ensemble.models[
+ 0].image_std = self.cano_cvD.cv_ensemble.models[
+ 0].image_std.repeat(2)
+
+ # logger.log(f'cano_cvD_model_params: {cvD_model_params}')
+
+ self._load_and_sync_parameters(model=self.cano_cvD,
+ model_name='cano_cvD')
+
+ self.mp_trainer_canonical_cvD = MixedPrecisionTrainer(
+ model=self.cano_cvD,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='canonical_cvD',
+ use_amp=use_amp,
+ model_params=cvD_model_params)
+
+ # cano_lr = 2e-5 * (lr / 1e-5) # D_lr=2e-4 in cvD by default
+ # cano_lr = 5e-5 * (lr / 1e-5) # D_lr=2e-4 in cvD by default
+ cano_lr = 2e-4 * (
+ lr / 1e-5) # D_lr=2e-4 in cvD by default. 1e-4 still overfitting
+ self.opt_cano_cvD = AdamW(
+ self.mp_trainer_canonical_cvD.master_params,
+ lr=cano_lr, # same as the G
+ betas=(0, 0.999),
+ eps=1e-8) # dlr in biggan cfg
+
+ logger.log(f'cpt_cano_cvD lr: {cano_lr}')
+
+ if self.use_ddp:
+ self.ddp_cano_cvD = DDP(
+ self.cano_cvD,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ self.ddp_cano_cvD = self.cano_cvD
+
+ th.cuda.empty_cache()
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step_rec':
+ self.forward_G_rec(batch)
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step_rec':
+ self.forward_D(batch, behaviour='rec')
+ # _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ elif step == 'g_step_nvs':
+ self.forward_G_nvs(batch)
+ took_step_g_nvs = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_nvs:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step_nvs':
+ self.forward_D(batch, behaviour='nvs')
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ # _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ batch = next(self.data)
+
+ if self.novel_view_poses is None:
+ self.novel_view_poses = th.roll(batch['c'], 1, 0).to(
+ dist_util.dev()) # save for eval visualization use
+
+ self.run_step(batch, 'g_step_rec')
+
+ # if self.step % 2 == 0:
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_rec')
+
+ # if self.step % 2 == 1:
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_nvs')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, self.mp_trainer_cvD.model_name)
+ self.save(self.mp_trainer_canonical_cvD,
+ self.mp_trainer_canonical_cvD.model_name)
+
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save()
+ self.save(self.mp_trainer_cvD,
+ self.mp_trainer_cvD.model_name)
+ self.save(self.mp_trainer_canonical_cvD,
+ self.mp_trainer_canonical_cvD.model_name)
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def forward_D(self, batch, behaviour): # update D
+ self.mp_trainer_canonical_cvD.zero_grad()
+ self.mp_trainer_cvD.zero_grad()
+
+ self.rec_model.requires_grad_(False)
+ # self.ddp_model.requires_grad_(False)
+
+ # update two D
+ if behaviour == 'nvs':
+ self.ddp_nvs_cvD.requires_grad_(True)
+ self.ddp_cano_cvD.requires_grad_(False)
+ else: # update rec canonical D
+ self.ddp_nvs_cvD.requires_grad_(False)
+ self.ddp_cano_cvD.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_canonical_cvD.use_amp):
+
+ novel_view_c = th.cat([micro['c'][1:], micro['c'][:1]])
+
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ cano_pred = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ # TODO, optimize with one encoder, and two triplane decoder
+ # FIXME: quit autocast to runbackward
+ if behaviour == 'rec':
+
+ if 'image_sr' in cano_pred:
+ # try concat them in batch
+ # d_loss = self.run_D_Diter(
+ # real=th.cat([
+ # th.nn.functional.interpolate(
+ # micro['img'],
+ # size=micro['img_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # micro['img_sr'],
+ # ],
+ # dim=1),
+ # fake=th.cat([
+ # th.nn.functional.interpolate(
+ # cano_pred['image_raw'],
+ # size=cano_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # cano_pred['image_sr'],
+ # ],
+ # dim=1),
+ # D=self.ddp_cano_cvD) # TODO, add SR for FFHQ
+
+ d_loss = self.run_D_Diter(
+ real= micro['img_sr'],
+ fake=cano_pred['image_sr'],
+ D=self.ddp_cano_cvD) # TODO, add SR for FFHQ
+
+ else:
+ d_loss = self.run_D_Diter(
+ real=micro['img'],
+ fake=cano_pred['image_raw'],
+ D=self.ddp_cano_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/D_cano': d_loss})
+ # self.mp_trainer_canonical_cvD.backward(d_loss)
+ else:
+ assert behaviour == 'nvs'
+
+ nvs_pred = self.rec_model(latent=latent,
+ c=novel_view_c,
+ behaviour='triplane_dec')
+
+ if 'image_sr' in nvs_pred:
+
+ d_loss = self.run_D_Diter(
+ real=cano_pred['image_sr'],
+ # th.cat([
+ # th.nn.functional.interpolate(
+ # cano_pred['image_raw'],
+ # size=cano_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # ],
+ # dim=1),
+ fake= nvs_pred['image_sr'],
+ # th.cat([
+ # th.nn.functional.interpolate(
+ # nvs_pred['image_raw'],
+ # size=nvs_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # ],
+ # dim=1),
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ else:
+ d_loss = self.run_D_Diter(
+ real=cano_pred['image_raw'],
+ fake=nvs_pred['image_raw'],
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/D_nvs': d_loss})
+ # self.mp_trainer_cvD.backward(d_loss)
+
+ if behaviour == 'rec':
+ self.mp_trainer_canonical_cvD.backward(d_loss)
+ else:
+ assert behaviour == 'nvs'
+ self.mp_trainer_cvD.backward(d_loss)
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ cano_pred = pred
+
+ # if last_batch or not self.use_ddp:
+ # loss, loss_dict = self.loss_class(cano_pred,
+ # target_for_rec,
+ # test_mode=False,
+ # step=self.step +
+ # self.resume_step)
+ # else:
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, fg_mask = self.loss_class(cano_pred,
+ target_for_rec,
+ test_mode=False,
+ step=self.step +
+ self.resume_step,
+ return_fg_mask=True)
+
+ # cano_pred_img = cano_pred['image_raw']
+
+ if self.loss_class.opt.symmetry_loss:
+ pose, intrinsics = micro['c'][:, :16].reshape(
+ -1, 4, 4), micro['c'][:, 16:]
+ flipped_pose = flip_yaw(pose)
+ mirror_c = th.cat(
+ [flipped_pose.reshape(-1, 16), intrinsics], -1)
+
+ nvs_pred = self.rec_model(latent={
+ k: v
+ for k, v in pred.items() if 'latent' in k
+ },
+ c=mirror_c,
+ behaviour='triplane_dec',
+ return_raw_only=True)
+ # cano_pred_img = th.cat([cano_pred_img, nvs_pred['image_raw']], 0)
+
+ # concat data for supervision
+ nvs_gt = {
+ k: th.flip(target_for_rec[k], [-1])
+ for k in
+ ['img'] # fliplr leads to wrong color; B 3 H W shape
+ }
+ flipped_fg_mask = th.flip(fg_mask, [-1])
+ if 'conf_sigma' in pred:
+ conf_sigma = th.flip(pred['conf_sigma'], [-1])
+ conf_sigma = th.nn.AdaptiveAvgPool2d(fg_mask.shape[-2:])(conf_sigma) # dynamically resize to target img size
+ else:
+ conf_sigma=None
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss_symm, loss_dict_symm = self.loss_class.calc_2d_rec_loss(
+ nvs_pred['image_raw'],
+ nvs_gt['img'],
+ flipped_fg_mask,
+ # test_mode=True,
+ test_mode=False,
+ step=self.step + self.resume_step,
+ conf_sigma=conf_sigma,
+ )
+ loss += (loss_symm * 1.0) # as in unsup3d
+ # if conf_sigma is not None:
+ # conf_loss = th.nn.functional.mse_loss(conf_sigma, flipped_fg_mask) * 0.2
+ # loss += conf_loss # a log that regularizes all confidence to 1
+ # loss_dict[f'conf_loss'] = conf_loss
+ for k, v in loss_dict_symm.items():
+ loss_dict[f'{k}_symm'] = v
+
+
+ # add cvD supervision
+ # ! TODO
+
+ if 'image_sr' in cano_pred:
+ # concat both resolution
+ # vision_aided_loss = self.ddp_cano_cvD(
+ # th.cat([
+ # th.nn.functional.interpolate(
+ # cano_pred['image_raw'],
+ # size=cano_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # cano_pred['image_sr'],
+ # ],
+ # dim=1), # 6 channel input
+ # for_G=True).mean() # [B, 1] shape
+
+ vision_aided_loss = self.ddp_cano_cvD(
+ cano_pred['image_sr'],
+ for_G=True).mean() # [B, 1] shape
+
+ else:
+ vision_aided_loss = self.ddp_cano_cvD(
+ cano_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ # last_layer = self.rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ # -1].weight # type: ignore
+
+ d_weight = th.tensor(self.loss_class.opt.rec_cvD_lambda).to(
+ dist_util.dev())
+ # d_weight = calculate_adaptive_weight(
+ # loss,
+ # vision_aided_loss,
+ # last_layer,
+ # disc_weight_max=0.1) * self.loss_class.opt.rec_cvD_lambda
+ loss += vision_aided_loss * d_weight
+
+ loss_dict.update({
+ 'vision_aided_loss/G_rec':
+ (vision_aided_loss * d_weight).detach(),
+ 'd_weight':
+ d_weight
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(
+ loss) # no nvs cvD loss, following VQ3D
+
+ # DDP some parameters no grad:
+ # for name, p in self.ddp_model.named_parameters():
+ # if p.grad is None:
+ # print(f"(in rec)found rec unused param: {name}")
+
+ # ! move to other places, add tensorboard
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ # with th.no_grad():
+ # # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ # gt_depth = micro['depth']
+ # if gt_depth.ndim == 3:
+ # gt_depth = gt_depth.unsqueeze(1)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # # if True:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_img = pred['image_raw']
+ # gt_img = micro['img']
+
+ # if 'image_sr' in pred:
+ # if pred['image_sr'].shape[-1] == 512:
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ # elif pred['image_sr'].shape[-1] == 256:
+ # pred_img = th.cat(
+ # [self.pool_256(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_256(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_256(pred_depth)
+ # gt_depth = self.pool_256(gt_depth)
+
+ # else:
+ # pred_img = th.cat(
+ # [self.pool_128(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_128(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # gt_depth = self.pool_128(gt_depth)
+ # pred_depth = self.pool_128(pred_depth)
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ # gt_vis = th.cat(
+ # [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ # dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # pred_vis = th.cat(
+ # [pred_img,
+ # pred_depth.repeat_interleave(3, dim=1)],
+ # dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ # )
+ # print(
+ # 'log vis to: ',
+ # f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ # )
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ pred_img = pred['image_raw'].clip(-1,1)
+ gt_img = micro['img']
+
+ # infer novel view also
+ pred_nv_img = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ if 'image_depth' in pred:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_nv_depth = norm_depth(
+ pred_nv_img['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_nv_depth = th.zeros_like(gt_depth)
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ pred_nv_depth = self.pool_512(pred_nv_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ pred_vis_nv = th.cat(
+ [self.pool_512(pred_nv_img['image_raw']), pred_nv_img['image_sr']],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ if gt_img.shape[-1] == 64:
+ gt_depth = self.pool_64(gt_depth)
+ elif gt_img.shape[-1] == 128:
+ gt_depth = self.pool_128(gt_depth)
+ # else:
+ # gt_depth = self.pool_64(gt_depth)
+
+ # st()
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ # pred_nv_img['image_raw'].clip(-1,1),
+ pred_vis_nv,
+ pred_nv_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+ pred_vis = th.cat([pred_vis, pred_vis_nv],
+ dim=-2) # cat in H dim
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(
+ vis, nrow=vis.shape[-1] // 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg', normalize=True, value_range=(-1,1))
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False) # only use novel view D
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ nvs_pred = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][1:],
+ micro['c'][:1],
+ ])) # ! render novel views only for D loss
+
+ # add cvD supervision
+
+ if 'image_sr' in nvs_pred:
+ # concat sr and raw results
+ # vision_aided_loss = self.ddp_nvs_cvD(
+ # # pred_nv['image_sr'],
+ # # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ # th.cat([
+ # th.nn.functional.interpolate(
+ # nvs_pred['image_raw'],
+ # size=nvs_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # nvs_pred['image_sr'],
+ # ],
+ # dim=1),
+ # for_G=True).mean() # ! for debugging
+
+ vision_aided_loss = self.ddp_nvs_cvD(
+ # pred_nv['image_sr'],
+ # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ nvs_pred['image_sr'],
+ for_G=True).mean() # ! for debugging
+
+
+ # supervise sr only
+ # vision_aided_loss = self.ddp_nvs_cvD(
+ # # pred_nv['image_sr'],
+ # # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ # th.cat([nvs_pred['image_sr'],
+ # th.nn.functional.interpolate(nvs_pred['image_raw'], size=nvs_pred['image_sr'].shape[2:], mode='bilinear',
+ # align_corners=False,
+ # antialias=True),]),
+ # for_G=True).mean() # ! for debugging
+
+ # pred_nv['image_raw'], for_G=True).mean() # [B, 1] shape
+ else:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ nvs_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ loss = vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs': loss
+ # vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda,
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ if dist_util.get_rank() == 0 and self.step % 500 == 1:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+
+ # if True:
+ # pred_depth = nvs_pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ pred_depth = norm_depth(nvs_pred['image_depth'])
+ pred_img = nvs_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in nvs_pred:
+
+ if nvs_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat([
+ self.pool_512(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif nvs_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat([
+ self.pool_256(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat([
+ self.pool_128(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+
+ if gt_img.shape[-1] == 64:
+ gt_depth = self.pool_64(gt_depth)
+ elif gt_img.shape[-1] == 128:
+ gt_depth = self.pool_128(gt_depth)
+
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # print(vis.shape)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+
+class TrainLoop3DcvD_nvsD_canoD_eg3d(TrainLoop3DcvD_nvsD_canoD):
+ def __init__(self, *, rec_model, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, load_submodule_name='', ignore_resume_opt=False, use_amp=False, **kwargs):
+ super().__init__(rec_model=rec_model, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, load_submodule_name=load_submodule_name, ignore_resume_opt=ignore_resume_opt, use_amp=use_amp, **kwargs)
+ self.rendering_kwargs = self.rec_model.module.decoder.triplane_decoder.rendering_kwargs # type: ignore
+ self._prepare_nvs_pose() # for eval novelview visualization
+
+ @th.inference_mode()
+ def eval_novelview_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}_batch_{i}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ for idx, c in enumerate(self.all_nvs_params):
+ pred = self.rec_model(img=micro['img_to_encoder'],
+ c=c.unsqueeze(0).repeat_interleave(micro['img'].shape[0], 0)) # pred: (B, 3, 64, 64)
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+
+ # st()
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ # ! cooncat h dim
+ pred_vis = pred_vis.permute(0,2,3,1).flatten(0,1) # H W 3
+
+ # vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ # vis = pred_vis.permute(1,2,0).cpu().numpy()
+ vis = pred_vis.cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # for j in range(vis.shape[0]):
+ # video_out.append_data(vis[j])
+ video_out.append_data(vis)
+
+ video_out.close()
+
+ th.cuda.empty_cache()
+
+
+ def _prepare_nvs_pose(self):
+ from nsr.camera_utils import LookAtPoseSampler, FOV_to_intrinsics
+
+ device = dist_util.dev()
+
+ fov_deg = 18.837 # for ffhq/afhq
+ intrinsics = FOV_to_intrinsics(fov_deg, device=device)
+
+ all_nvs_params = []
+
+ pitch_range = 0.25
+ yaw_range = 0.35
+ num_keyframes = 10 # how many nv poses to sample from
+ w_frames = 1
+
+ cam_pivot = th.Tensor(self.rendering_kwargs.get('avg_camera_pivot')).to(device)
+ cam_radius = self.rendering_kwargs.get('avg_camera_radius')
+
+ for frame_idx in range(num_keyframes):
+
+ cam2world_pose = LookAtPoseSampler.sample(3.14/2 + yaw_range * np.sin(2 * 3.14 * frame_idx / (num_keyframes * w_frames)),
+ 3.14/2 -0.05 + pitch_range * np.cos(2 * 3.14 * frame_idx / (num_keyframes * w_frames)),
+ cam_pivot, radius=cam_radius, device=device)
+
+ camera_params = th.cat([cam2world_pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1)
+
+ all_nvs_params.append(camera_params)
+
+ self.all_nvs_params = th.cat(all_nvs_params, 0)
\ No newline at end of file
diff --git a/nsr/cvD/nvsD_canoD_mask.py b/nsr/cvD/nvsD_canoD_mask.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b1cb4ee9bbf9b7ade208c53ba0b5d628cdd1a4d
--- /dev/null
+++ b/nsr/cvD/nvsD_canoD_mask.py
@@ -0,0 +1,835 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from ..train_util import TrainLoopBasic, TrainLoop3DRec
+import vision_aided_loss
+from dnnlib.util import calculate_adaptive_weight
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+from ..train_util_cvD import TrainLoop3DcvD
+# from .nvD import
+
+
+class TrainLoop3DcvD_nvsD_canoD_canomask(TrainLoop3DcvD):
+ # class TrainLoop3DcvD_nvsD_canoD():
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ use_amp=use_amp,
+ **kwargs)
+
+ device = dist_util.dev()
+
+ self.cano_cvD = vision_aided_loss.Discriminator(
+ cv_type='clip', loss_type='multilevel_sigmoid_s',
+ device=device).to(device)
+ self.cano_cvD.cv_ensemble.requires_grad_(
+ False) # Freeze feature extractor
+ # self.cano_cvD.train()
+
+ cvD_model_params = list(self.cano_cvD.parameters())
+ SR_TRAINING = False
+ if SR_TRAINING: # replace the conv1 with 6 channel input
+ # width, patch_size = self.nvs_cvD.cv_ensemble
+ vision_width, vision_patch_size = [
+ self.cano_cvD.cv_ensemble.models[0].model.conv1.weight.shape[k]
+ for k in [0, -1]
+ ]
+ self.cano_cvD.cv_ensemble.models[0].model.conv1 = th.nn.Conv2d(
+ in_channels=6,
+ out_channels=vision_width,
+ kernel_size=vision_patch_size,
+ stride=vision_patch_size,
+ bias=False).to(dist_util.dev())
+ cvD_model_params += list(
+ self.cano_cvD.cv_ensemble.models[0].model.conv1.parameters())
+
+ self.cano_cvD.cv_ensemble.models[
+ 0].image_mean = self.cano_cvD.cv_ensemble.models[
+ 0].image_mean.repeat(2)
+ self.cano_cvD.cv_ensemble.models[
+ 0].image_std = self.cano_cvD.cv_ensemble.models[
+ 0].image_std.repeat(2)
+
+ # logger.log(f'cano_cvD_model_params: {cvD_model_params}')
+
+ self._load_and_sync_parameters(model=self.cano_cvD,
+ model_name='cano_cvD')
+
+ self.mp_trainer_canonical_cvD = MixedPrecisionTrainer(
+ model=self.cano_cvD,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='canonical_cvD',
+ use_amp=use_amp,
+ model_params=cvD_model_params)
+
+ # cano_lr = 2e-5 * (lr / 1e-5) # D_lr=2e-4 in cvD by default
+ # cano_lr = 5e-5 * (lr / 1e-5) # D_lr=2e-4 in cvD by default
+ cano_lr = 2e-4 * (
+ lr / 1e-5) # D_lr=2e-4 in cvD by default. 1e-4 still overfitting
+ self.opt_cano_cvD = AdamW(
+ self.mp_trainer_canonical_cvD.master_params,
+ lr=cano_lr, # same as the G
+ betas=(0, 0.999),
+ eps=1e-8) # dlr in biggan cfg
+
+ logger.log(f'cpt_cano_cvD lr: {cano_lr}')
+
+ if self.use_ddp:
+ self.ddp_cano_cvD = DDP(
+ self.cano_cvD,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ self.ddp_cano_cvD = self.cano_cvD
+
+ th.cuda.empty_cache()
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step_rec':
+ self.forward_G_rec(batch)
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step_rec':
+ self.forward_D(batch, behaviour='rec')
+ # _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ elif step == 'g_step_nvs':
+ self.forward_G_nvs(batch)
+ took_step_g_nvs = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_nvs:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step_nvs':
+ self.forward_D(batch, behaviour='nvs')
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ # _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ batch = next(self.data)
+
+ if self.novel_view_poses is None:
+ self.novel_view_poses = th.roll(batch['c'], 1, 0).to(
+ dist_util.dev()) # save for eval visualization use
+
+ self.run_step(batch, 'g_step_rec')
+
+ # if self.step % 2 == 0:
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_rec')
+
+ # if self.step % 2 == 1:
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_nvs')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, self.mp_trainer_cvD.model_name)
+ self.save(self.mp_trainer_canonical_cvD,
+ self.mp_trainer_canonical_cvD.model_name)
+
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save()
+ self.save(self.mp_trainer_cvD,
+ self.mp_trainer_cvD.model_name)
+ self.save(self.mp_trainer_canonical_cvD,
+ self.mp_trainer_canonical_cvD.model_name)
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def forward_D(self, batch, behaviour): # update D
+ self.mp_trainer_canonical_cvD.zero_grad()
+ self.mp_trainer_cvD.zero_grad()
+
+ self.rec_model.requires_grad_(False)
+ # self.ddp_model.requires_grad_(False)
+
+ # update two D
+ if behaviour == 'nvs':
+ self.ddp_nvs_cvD.requires_grad_(True)
+ self.ddp_cano_cvD.requires_grad_(False)
+ else: # update rec canonical D
+ self.ddp_nvs_cvD.requires_grad_(False)
+ self.ddp_cano_cvD.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_canonical_cvD.use_amp):
+
+ novel_view_c = th.cat([micro['c'][1:], micro['c'][:1]])
+
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ cano_pred = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ # TODO, optimize with one encoder, and two triplane decoder
+ if behaviour == 'rec':
+
+ if 'image_sr' in cano_pred:
+ # d_loss_cano = self.run_D_Diter(
+ # # real=micro['img_sr'],
+ # # fake=cano_pred['image_sr'],
+ # real=0.5 * micro['img_sr'] + 0.5 * th.nn.functional.interpolate(micro['img'], size=micro['img_sr'].shape[2:], mode='bilinear'),
+ # fake=0.5 * cano_pred['image_sr'] + 0.5 * th.nn.functional.interpolate(cano_pred['image_raw'], size=cano_pred['image_sr'].shape[2:], mode='bilinear'),
+ # D=self.ddp_canonical_cvD) # ! failed, color bias
+
+ # try concat them in batch
+ d_loss_cano = self.run_D_Diter(
+ real=th.cat([
+ th.nn.functional.interpolate(
+ micro['img'],
+ size=micro['img_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ micro['img_sr'],
+ ],
+ dim=1),
+ fake=th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ],
+ dim=1),
+ D=self.ddp_cano_cvD) # TODO, add SR for FFHQ
+
+ else:
+ d_loss_cano = self.run_D_Diter(
+ real=micro['img'],
+ fake=cano_pred['image_raw'],
+ D=self.ddp_cano_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/D_cano': d_loss_cano})
+ self.mp_trainer_canonical_cvD.backward(d_loss_cano)
+ else:
+ assert behaviour == 'nvs'
+
+ nvs_pred = self.rec_model(latent=latent,
+ c=novel_view_c,
+ behaviour='triplane_dec')
+
+ if 'image_sr' in nvs_pred:
+ # d_loss_nvs = self.run_D_Diter(
+ # # real=cano_pred['image_sr'],
+ # # fake=nvs_pred['image_sr'],
+ # real=0.5 * cano_pred['image_sr'] + 0.5 * th.nn.functional.interpolate(cano_pred['image_raw'], size=cano_pred['image_sr'].shape[2:], mode='bilinear'),
+ # fake=0.5 * nvs_pred['image_sr'] + 0.5 * th.nn.functional.interpolate(nvs_pred['image_raw'], size=nvs_pred['image_sr'].shape[2:], mode='bilinear'),
+ # D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ d_loss_nvs = self.run_D_Diter(
+ real=th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ],
+ dim=1),
+ fake=th.cat([
+ th.nn.functional.interpolate(
+ nvs_pred['image_raw'],
+ size=nvs_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ nvs_pred['image_sr'],
+ ],
+ dim=1),
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ else:
+ d_loss_nvs = self.run_D_Diter(
+ real=cano_pred['silhouette_normalized_3channel'],
+ fake=nvs_pred['silhouette_normalized_3channel'],
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/D_nvs_silhouette': d_loss_nvs})
+ self.mp_trainer_cvD.backward(d_loss_nvs)
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ cano_pred = pred
+
+ if last_batch or not self.use_ddp:
+ loss, loss_dict = self.loss_class(cano_pred,
+ target_for_rec,
+ test_mode=False,
+ step=self.step +
+ self.resume_step)
+ else:
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict = self.loss_class(cano_pred,
+ target_for_rec,
+ test_mode=False,
+ step=self.step +
+ self.resume_step)
+
+ # add cvD supervision
+ # ! TODO
+
+ if 'image_sr' in cano_pred:
+ # concat both resolution
+ vision_aided_loss = self.ddp_cano_cvD(
+ th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ],
+ dim=1), # 6 channel input
+ for_G=True).mean() # [B, 1] shape
+
+ else:
+ vision_aided_loss = self.ddp_cano_cvD(
+ cano_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ # last_layer = self.rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ # -1].weight # type: ignore
+
+ d_weight = th.tensor(self.loss_class.opt.rec_cvD_lambda).to(
+ dist_util.dev())
+ # d_weight = calculate_adaptive_weight(
+ # loss,
+ # vision_aided_loss,
+ # last_layer,
+ # disc_weight_max=0.1) * self.loss_class.opt.rec_cvD_lambda
+ loss += vision_aided_loss * d_weight
+
+ loss_dict.update({
+ 'vision_aided_loss/G_rec':
+ (vision_aided_loss * d_weight).detach(),
+ 'd_weight':
+ d_weight
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(
+ loss) # no nvs cvD loss, following VQ3D
+
+ # DDP some parameters no grad:
+ # for name, p in self.ddp_model.named_parameters():
+ # if p.grad is None:
+ # print(f"(in rec)found rec unused param: {name}")
+
+ # ! move to other places, add tensorboard
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ # with th.no_grad():
+ # # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ # gt_depth = micro['depth']
+ # if gt_depth.ndim == 3:
+ # gt_depth = gt_depth.unsqueeze(1)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # # if True:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_img = pred['image_raw']
+ # gt_img = micro['img']
+
+ # if 'image_sr' in pred:
+ # if pred['image_sr'].shape[-1] == 512:
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ # elif pred['image_sr'].shape[-1] == 256:
+ # pred_img = th.cat(
+ # [self.pool_256(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_256(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_256(pred_depth)
+ # gt_depth = self.pool_256(gt_depth)
+
+ # else:
+ # pred_img = th.cat(
+ # [self.pool_128(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_128(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # gt_depth = self.pool_128(gt_depth)
+ # pred_depth = self.pool_128(pred_depth)
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ # gt_vis = th.cat(
+ # [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ # dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # pred_vis = th.cat(
+ # [pred_img,
+ # pred_depth.repeat_interleave(3, dim=1)],
+ # dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ # )
+ # print(
+ # 'log vis to: ',
+ # f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ # )
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth):
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ return pred_depth
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # infer novel view also
+ pred_nv_img = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ if 'image_depth' in pred:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_nv_depth = norm_depth(
+ pred_nv_img['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_nv_depth = th.zeros_like(gt_depth)
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ pred_nv_img['image_raw'],
+ pred_nv_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+ pred_vis = th.cat([pred_vis, pred_vis_nv],
+ dim=-2) # cat in H dim
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(
+ vis, nrow=vis.shape[-1] // 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False) # only use novel view D
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ nvs_pred = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][1:],
+ micro['c'][:1],
+ ])) # ! render novel views only for D loss
+
+ # add cvD supervision
+
+ if 'image_sr' in nvs_pred:
+ # concat sr and raw results
+ vision_aided_loss = self.ddp_nvs_cvD(
+ # pred_nv['image_sr'],
+ # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ th.cat([
+ th.nn.functional.interpolate(
+ nvs_pred['image_raw'],
+ size=nvs_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ nvs_pred['image_sr'],
+ ],
+ dim=1),
+ for_G=True).mean() # ! for debugging
+
+ # supervise sr only
+ # vision_aided_loss = self.ddp_nvs_cvD(
+ # # pred_nv['image_sr'],
+ # # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ # th.cat([nvs_pred['image_sr'],
+ # th.nn.functional.interpolate(nvs_pred['image_raw'], size=nvs_pred['image_sr'].shape[2:], mode='bilinear',
+ # align_corners=False,
+ # antialias=True),]),
+ # for_G=True).mean() # ! for debugging
+
+ # pred_nv['image_raw'], for_G=True).mean() # [B, 1] shape
+ else:
+ # vision_aided_loss = self.ddp_nvs_cvD(
+ # nvs_pred['image_raw'],
+ # for_G=True).mean() # [B, 1] shape
+ vision_aided_loss = self.ddp_nvs_cvD(
+ nvs_pred['silhouette_normalized_3channel'],
+ for_G=True).mean() # [B, 1] shape
+
+ loss = vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs_silhouette': loss
+ # vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda,
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ if dist_util.get_rank() == 0 and self.step % 500 == 1:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = nvs_pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = nvs_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in nvs_pred:
+
+ if nvs_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat([
+ self.pool_512(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif nvs_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat([
+ self.pool_256(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat([
+ self.pool_128(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # print(vis.shape)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
diff --git a/nsr/cvD/nvsD_canoD_multiview.py b/nsr/cvD/nvsD_canoD_multiview.py
new file mode 100644
index 0000000000000000000000000000000000000000..16b6e56185127679907652b32b008355c35b6f78
--- /dev/null
+++ b/nsr/cvD/nvsD_canoD_multiview.py
@@ -0,0 +1,551 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from .nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+
+class TrainLoop3DcvD_nvsD_canoD_multiview(TrainLoop3DcvD_nvsD_canoD):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ use_amp=use_amp,
+ **kwargs)
+ assert not self.mp_trainer_rec.use_amp, 'amp may lead to grad nan?'
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ target_cano = {}
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ for k, v in micro.items():
+ if k[:2] == 'nv':
+ orig_key = k.replace('nv_', '')
+ # target_nvs[orig_key] = v
+ target_cano[orig_key] = micro[orig_key]
+
+ # last_batch = (i + self.microbatch) >= batch_size
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ cano_pred = pred
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, fg_mask = self.loss_class(
+ cano_pred,
+ target_for_rec,
+ test_mode=False,
+ step=self.step + self.resume_step,
+ return_fg_mask=True)
+
+ if 'image_sr' in cano_pred:
+ raise NotImplementedError()
+ # concat both resolution
+ vision_aided_loss = self.ddp_cano_cvD(
+ th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ],
+ dim=1), # 6 channel input
+ for_G=True).mean() # [B, 1] shape
+
+ else:
+ vision_aided_loss = self.ddp_cano_cvD(
+ cano_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ # last_layer = self.rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ # -1].weight # type: ignore
+
+ d_weight = th.tensor(self.loss_class.opt.rec_cvD_lambda).to(
+ dist_util.dev())
+ # d_weight = calculate_adaptive_weight(
+ # loss,
+ # vision_aided_loss,
+ # last_layer,
+ # disc_weight_max=0.1) * self.loss_class.opt.rec_cvD_lambda
+ loss += vision_aided_loss * d_weight
+
+ loss_dict.update({
+ 'vision_aided_loss/G_rec':
+ (vision_aided_loss * d_weight).detach(),
+ 'd_weight':
+ d_weight
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(
+ loss) # no nvs cvD loss, following VQ3D
+
+ # DDP some parameters no grad:
+ # for name, p in self.ddp_model.named_parameters():
+ # if p.grad is None:
+ # print(f"(in rec)found rec unused param: {name}")
+
+ # ! move to other places, add tensorboard
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ # with th.no_grad():
+ # # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ # gt_depth = micro['depth']
+ # if gt_depth.ndim == 3:
+ # gt_depth = gt_depth.unsqueeze(1)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # # if True:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_img = pred['image_raw']
+ # gt_img = micro['img']
+
+ # if 'image_sr' in pred:
+ # if pred['image_sr'].shape[-1] == 512:
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ # elif pred['image_sr'].shape[-1] == 256:
+ # pred_img = th.cat(
+ # [self.pool_256(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_256(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_256(pred_depth)
+ # gt_depth = self.pool_256(gt_depth)
+
+ # else:
+ # pred_img = th.cat(
+ # [self.pool_128(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_128(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # gt_depth = self.pool_128(gt_depth)
+ # pred_depth = self.pool_128(pred_depth)
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ # gt_vis = th.cat(
+ # [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ # dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # pred_vis = th.cat(
+ # [pred_img,
+ # pred_depth.repeat_interleave(3, dim=1)],
+ # dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ # )
+ # print(
+ # 'log vis to: ',
+ # f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ # )
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ pred_img = pred['image_raw'].clip(-1, 1)
+ gt_img = micro['img']
+
+ # infer novel view also
+ pred_nv_img = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ if 'image_depth' in pred:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_nv_depth = norm_depth(pred_nv_img['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_nv_depth = th.zeros_like(gt_depth)
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ if gt_img.shape[-1] == 64:
+ gt_depth = self.pool_64(gt_depth)
+ elif gt_img.shape[-1] == 128:
+ gt_depth = self.pool_128(gt_depth)
+ # else:
+ # gt_depth = self.pool_64(gt_depth)
+
+ # st()
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ pred_nv_img['image_raw'].clip(-1, 1),
+ pred_nv_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+ pred_vis = th.cat([pred_vis, pred_vis_nv],
+ dim=-2) # cat in H dim
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(
+ vis, nrow=vis.shape[-1] // 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ normalize=True,
+ value_range=(-1, 1))
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False) # only use novel view D
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+ target_nvs = {}
+
+ for k, v in micro.items():
+ if k[:2] == 'nv':
+ orig_key = k.replace('nv_', '')
+ target_nvs[orig_key] = v
+ # target_cano[orig_key] = micro[orig_key]
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ nvs_pred = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['nv_c'],
+ ) # predict novel view here
+ # c=th.cat([
+ # micro['c'][1:],
+ # micro['c'][:1],
+ # ])) # ! render novel views only for D loss
+
+ # add cvD supervision
+
+ if 'image_sr' in nvs_pred:
+ raise NotImplementedError()
+ # concat sr and raw results
+ vision_aided_loss = self.ddp_nvs_cvD(
+ # pred_nv['image_sr'],
+ # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ th.cat([
+ th.nn.functional.interpolate(
+ nvs_pred['image_raw'],
+ size=nvs_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ nvs_pred['image_sr'],
+ ],
+ dim=1),
+ for_G=True).mean() # ! for debugging
+
+ # supervise sr only
+ # vision_aided_loss = self.ddp_nvs_cvD(
+ # # pred_nv['image_sr'],
+ # # 0.5 * pred_nv['image_sr'] + 0.5 * th.nn.functional.interpolate(pred_nv['image_raw'], size=pred_nv['image_sr'].shape[2:], mode='bilinear'),
+ # th.cat([nvs_pred['image_sr'],
+ # th.nn.functional.interpolate(nvs_pred['image_raw'], size=nvs_pred['image_sr'].shape[2:], mode='bilinear',
+ # align_corners=False,
+ # antialias=True),]),
+ # for_G=True).mean() # ! for debugging
+
+ # pred_nv['image_raw'], for_G=True).mean() # [B, 1] shape
+ else:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ nvs_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ # ! add nv reconstruction loss
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, fg_mask = self.loss_class(
+ nvs_pred,
+ target_nvs,
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None)
+ log_rec3d_loss_dict(loss_dict)
+
+ loss += vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs':
+ vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda,
+ **{f'{k}_nv': v for k, v in loss_dict.items()}
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ if dist_util.get_rank() == 0 and self.step % 500 == 1:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+
+ # if True:
+ # pred_depth = nvs_pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ pred_depth = norm_depth(nvs_pred['image_depth'])
+ pred_img = nvs_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in nvs_pred:
+
+ if nvs_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat([
+ self.pool_512(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif nvs_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat([
+ self.pool_256(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat([
+ self.pool_128(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ if gt_img.shape[-1] == 64:
+ gt_depth = self.pool_64(gt_depth)
+ elif gt_img.shape[-1] == 128:
+ gt_depth = self.pool_128(gt_depth)
+
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # print(vis.shape)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
diff --git a/nsr/cvD/nvsD_nosr.py b/nsr/cvD/nvsD_nosr.py
new file mode 100644
index 0000000000000000000000000000000000000000..5bb1a7c599966a7168d39dca28370e1d222f9887
--- /dev/null
+++ b/nsr/cvD/nvsD_nosr.py
@@ -0,0 +1,545 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from ..train_util import TrainLoopBasic, TrainLoop3DRec
+import vision_aided_loss
+from dnnlib.util import calculate_adaptive_weight
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+from ..train_util_cvD import TrainLoop3DcvD
+
+
+class TrainLoop3DcvD_nvsD_noSR(TrainLoop3DcvD):
+
+ def __init__(self,
+ *,
+ # model,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ use_amp=use_amp,
+ SR_TRAINING=False,
+ **kwargs)
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step_rec':
+ self.forward_G_rec(batch)
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'g_step_nvs':
+ self.forward_G_nvs(batch)
+ took_step_g_nvs = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_nvs:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step':
+ self.forward_D(batch)
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_rec') # pure VAE reconstruction
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+
+ def forward_D(self, batch): # update D
+ self.rec_model.requires_grad_(False)
+
+ self.mp_trainer_cvD.zero_grad()
+ self.ddp_nvs_cvD.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_cvD.use_amp):
+
+ novel_view_c = th.cat([
+ micro['c'][1:], micro['c'][:1]
+ ])
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ cano_pred = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec',
+ return_raw_only=True)
+
+ nvs_pred = self.rec_model(latent=latent,
+ c=novel_view_c,
+ behaviour='triplane_dec',
+ return_raw_only=True)
+
+ # if 'image_sr' in nvs_pred:
+ # d_loss_nvs = self.run_D_Diter(
+ # real=th.cat([
+ # th.nn.functional.interpolate(
+ # cano_pred['image_raw'],
+ # size=cano_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # cano_pred['image_sr'],
+ # ], dim=1),
+ # fake=th.cat([
+ # th.nn.functional.interpolate(
+ # nvs_pred['image_raw'],
+ # size=nvs_pred['image_sr'].shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=True),
+ # nvs_pred['image_sr'],
+ # ], dim=1),
+ # D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ # else:
+ d_loss_nvs = self.run_D_Diter(
+ real=cano_pred['image_raw'],
+ fake=nvs_pred['image_raw'],
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/D_nvs': d_loss_nvs})
+ self.mp_trainer_cvD.backward(d_loss_nvs)
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ pred_for_rec = pred
+
+ if last_batch or not self.use_ddp:
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+ else:
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+ self.ddp_nvs_cvD.requires_grad_(False) # only use novel view D
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ nvs_pred = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][1:],
+ micro['c'][:1],
+ ])) # ! render novel views only for D loss
+
+ # add cvD supervision
+
+ # if 'image_sr' in nvs_pred:
+ # # concat sr and raw results
+ # vision_aided_loss = self.ddp_nvs_cvD(
+ # nvs_pred['image_raw'],
+ # # th.cat([
+ # # th.nn.functional.interpolate(
+ # # size=nvs_pred['image_sr'].shape[2:],
+ # # mode='bilinear',
+ # # align_corners=False,
+ # # antialias=True),
+ # # # nvs_pred['image_sr'],
+ # # ], dim=1),
+ # for_G=True).mean()
+ # else:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ nvs_pred['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ loss = vision_aided_loss * self.loss_class.opt.nvs_cvD_lambda
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs': loss
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = nvs_pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = nvs_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in nvs_pred:
+
+ if nvs_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat([
+ self.pool_512(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif nvs_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat([
+ self.pool_256(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat([
+ self.pool_128(pred_img), nvs_pred['image_sr']
+ ],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ print(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+
+ def save(self, mp_trainer=None, model_name='rec'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, mp_trainer.master_params)
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ dist.barrier()
+
+ def _load_and_sync_parameters(self, model=None, model_name='rec'):
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.rec_model # default model in the parent class
+
+ print(resume_checkpoint)
+
+ if resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ print(f'mark {model_name} loading ', flush=True)
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ print(f'mark {model_name} loading finished', flush=True)
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+ elif 'IN' in k:
+ print('ignore ', k)
+ else:
+ print('!!!! ignore key: ', k, ": ", v.size(),)
+ # 'shape in model: ', model_state_dict[k].size())
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ print(f'synced {model_name} params')
diff --git a/nsr/dual_discriminator.py b/nsr/dual_discriminator.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9156230b656f7ecc9dc2c0792cc5bb54ad5a0a1
--- /dev/null
+++ b/nsr/dual_discriminator.py
@@ -0,0 +1,480 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Discriminator architectures from the paper
+"Efficient Geometry-aware 3D Generative Adversarial Networks"."""
+
+import numpy as np
+import torch
+from torch_utils import persistence
+from torch_utils.ops import upfirdn2d
+from .networks_stylegan2 import DiscriminatorBlock, MappingNetwork, DiscriminatorEpilogue
+from pdb import set_trace as st
+
+
+@persistence.persistent_class
+class SingleDiscriminator(torch.nn.Module):
+ def __init__(
+ self,
+ c_dim, # Conditioning label (C) dimensionality.
+ img_resolution, # Input resolution.
+ img_channels, # Number of input color channels.
+ architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ channel_base=32768, # Overall multiplier for the number of channels.
+ channel_max=512, # Maximum number of channels in any layer.
+ num_fp16_res=4, # Use FP16 for the N highest resolutions.
+ conv_clamp=256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ cmap_dim=None, # Dimensionality of mapped conditioning label, None = default.
+ sr_upsample_factor=1, # Ignored for SingleDiscriminator
+ block_kwargs={}, # Arguments for DiscriminatorBlock.
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ epilogue_kwargs={}, # Arguments for DiscriminatorEpilogue.
+ ):
+ super().__init__()
+ self.c_dim = c_dim
+ self.img_resolution = img_resolution
+ self.img_resolution_log2 = int(np.log2(img_resolution))
+ self.img_channels = img_channels
+ self.block_resolutions = [
+ 2**i for i in range(self.img_resolution_log2, 2, -1)
+ ]
+ channels_dict = {
+ res: min(channel_base // res, channel_max)
+ for res in self.block_resolutions + [4]
+ }
+ fp16_resolution = max(2**(self.img_resolution_log2 + 1 - num_fp16_res),
+ 8)
+
+ if cmap_dim is None:
+ cmap_dim = channels_dict[4]
+ if c_dim == 0:
+ cmap_dim = 0
+
+ common_kwargs = dict(img_channels=img_channels,
+ architecture=architecture,
+ conv_clamp=conv_clamp)
+ cur_layer_idx = 0
+ for res in self.block_resolutions:
+ in_channels = channels_dict[res] if res < img_resolution else 0
+ tmp_channels = channels_dict[res]
+ out_channels = channels_dict[res // 2]
+ use_fp16 = (res >= fp16_resolution)
+ block = DiscriminatorBlock(in_channels,
+ tmp_channels,
+ out_channels,
+ resolution=res,
+ first_layer_idx=cur_layer_idx,
+ use_fp16=use_fp16,
+ **block_kwargs,
+ **common_kwargs)
+ setattr(self, f'b{res}', block)
+ cur_layer_idx += block.num_layers
+ if c_dim > 0:
+ self.mapping = MappingNetwork(z_dim=0,
+ c_dim=c_dim,
+ w_dim=cmap_dim,
+ num_ws=None,
+ w_avg_beta=None,
+ **mapping_kwargs)
+ self.b4 = DiscriminatorEpilogue(channels_dict[4],
+ cmap_dim=cmap_dim,
+ resolution=4,
+ **epilogue_kwargs,
+ **common_kwargs)
+
+ def forward(self, img, c, update_emas=False, **block_kwargs):
+ img = img['image']
+
+ _ = update_emas # unused
+ x = None
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, **block_kwargs)
+
+ cmap = None
+ if self.c_dim > 0:
+ cmap = self.mapping(None, c)
+ x = self.b4(x, img, cmap)
+ return x
+
+ def extra_repr(self):
+ return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}'
+
+
+#----------------------------------------------------------------------------
+
+
+def filtered_resizing(image_orig_tensor, size, f, filter_mode='antialiased'):
+ if filter_mode == 'antialiased':
+ ada_filtered_64 = torch.nn.functional.interpolate(image_orig_tensor,
+ size=(size, size),
+ mode='bilinear',
+ align_corners=False,
+ antialias=True)
+ elif filter_mode == 'classic':
+ ada_filtered_64 = upfirdn2d.upsample2d(image_orig_tensor, f, up=2)
+ ada_filtered_64 = torch.nn.functional.interpolate(ada_filtered_64,
+ size=(size * 2 + 2,
+ size * 2 + 2),
+ mode='bilinear',
+ align_corners=False)
+ ada_filtered_64 = upfirdn2d.downsample2d(ada_filtered_64,
+ f,
+ down=2,
+ flip_filter=True,
+ padding=-1)
+ elif filter_mode == 'none':
+ ada_filtered_64 = torch.nn.functional.interpolate(image_orig_tensor,
+ size=(size, size),
+ mode='bilinear',
+ align_corners=False)
+ elif type(filter_mode) == float:
+ assert 0 < filter_mode < 1
+
+ filtered = torch.nn.functional.interpolate(image_orig_tensor,
+ size=(size, size),
+ mode='bilinear',
+ align_corners=False,
+ antialias=True)
+ aliased = torch.nn.functional.interpolate(image_orig_tensor,
+ size=(size, size),
+ mode='bilinear',
+ align_corners=False,
+ antialias=False)
+ ada_filtered_64 = (1 -
+ filter_mode) * aliased + (filter_mode) * filtered
+
+ return ada_filtered_64
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class DualDiscriminator(torch.nn.Module):
+ def __init__(
+ self,
+ c_dim, # Conditioning label (C) dimensionality.
+ img_resolution, # Input resolution.
+ img_channels, # Number of input color channels.
+ architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ channel_base=32768, # Overall multiplier for the number of channels.
+ channel_max=512, # Maximum number of channels in any layer.
+ num_fp16_res=4, # Use FP16 for the N highest resolutions.
+ conv_clamp=256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ cmap_dim=None, # Dimensionality of mapped conditioning label, None = default.
+ disc_c_noise=0, # Corrupt camera parameters with X std dev of noise before disc. pose conditioning.
+ block_kwargs={}, # Arguments for DiscriminatorBlock.
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ epilogue_kwargs={}, # Arguments for DiscriminatorEpilogue.
+ ):
+ super().__init__()
+ # img_channels *= 2
+ if img_channels == 3:
+ img_channels *= 2
+
+ self.c_dim = c_dim
+ self.img_resolution = img_resolution
+ self.img_resolution_log2 = int(np.log2(img_resolution))
+ self.img_channels = img_channels
+ self.block_resolutions = [
+ 2**i for i in range(self.img_resolution_log2, 2, -1)
+ ]
+ channels_dict = {
+ res: min(channel_base // res, channel_max)
+ for res in self.block_resolutions + [4]
+ }
+ fp16_resolution = max(2**(self.img_resolution_log2 + 1 - num_fp16_res),
+ 8)
+
+ if cmap_dim is None:
+ cmap_dim = channels_dict[4]
+ if c_dim == 0:
+ cmap_dim = 0
+
+ common_kwargs = dict(img_channels=img_channels,
+ architecture=architecture,
+ conv_clamp=conv_clamp)
+ cur_layer_idx = 0
+ for res in self.block_resolutions:
+ in_channels = channels_dict[res] if res < img_resolution else 0
+ tmp_channels = channels_dict[res]
+ out_channels = channels_dict[res // 2]
+ use_fp16 = (res >= fp16_resolution)
+ block = DiscriminatorBlock(in_channels,
+ tmp_channels,
+ out_channels,
+ resolution=res,
+ first_layer_idx=cur_layer_idx,
+ use_fp16=use_fp16,
+ **block_kwargs,
+ **common_kwargs)
+ setattr(self, f'b{res}', block)
+ cur_layer_idx += block.num_layers
+ if c_dim > 0:
+ self.mapping = MappingNetwork(z_dim=0,
+ c_dim=c_dim,
+ w_dim=cmap_dim,
+ num_ws=None,
+ w_avg_beta=None,
+ **mapping_kwargs)
+ self.b4 = DiscriminatorEpilogue(channels_dict[4],
+ cmap_dim=cmap_dim,
+ resolution=4,
+ **epilogue_kwargs,
+ **common_kwargs)
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter([1, 3, 3, 1]))
+ self.disc_c_noise = disc_c_noise
+
+ def forward(self, img, c, update_emas=False, **block_kwargs):
+ image_raw = filtered_resizing(img['image_raw'],
+ # size=img['image'].shape[-1],
+ size=img['image_sr'].shape[-1],
+ f=self.resample_filter)
+ # img = torch.cat([img['image'], image_raw], 1)
+ img = torch.cat([img['image_sr'], image_raw], 1)
+
+ _ = update_emas # unused
+ x = None
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, **block_kwargs)
+
+ cmap = None
+ if self.c_dim > 0:
+ if self.disc_c_noise > 0:
+ c += torch.randn_like(c) * c.std(0) * self.disc_c_noise
+ cmap = self.mapping(None, c)
+ x = self.b4(x, img, cmap)
+ return x
+
+ def extra_repr(self):
+ return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}'
+
+
+@persistence.persistent_class
+class GeoDualDiscriminator(DualDiscriminator):
+ def __init__(self, c_dim, img_resolution, img_channels, architecture='resnet', channel_base=32768, channel_max=512, num_fp16_res=4, conv_clamp=256, cmap_dim=None, disc_c_noise=0, block_kwargs={}, mapping_kwargs={}, epilogue_kwargs={}, normal_condition=False):
+ super().__init__(c_dim, img_resolution, img_channels, architecture, channel_base, channel_max, num_fp16_res, conv_clamp, cmap_dim, disc_c_noise, block_kwargs, mapping_kwargs, epilogue_kwargs)
+ self.normal_condition = normal_condition
+
+ def forward(self, img, c, update_emas=False, **block_kwargs):
+ image= img['image']
+ image_raw = filtered_resizing(img['image_raw'],
+ size=img['image'].shape[-1],
+ f=self.resample_filter)
+ D_input_img = torch.cat([image, image_raw], 1)
+
+ image_depth = filtered_resizing(img['image_depth'], size=img['image'].shape[-1], f=self.resample_filter)
+ if self.normal_condition and 'normal' in img:
+ image_normal = filtered_resizing(img['normal'], size=img['image'].shape[-1], f=self.resample_filter)
+ D_input_img = torch.cat([D_input_img, image_depth, image_normal], 1)
+ else:
+ D_input_img = torch.cat([D_input_img, image_depth], 1)
+
+ img = D_input_img
+
+ _ = update_emas # unused
+ x = None
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, **block_kwargs)
+
+ cmap = None
+ if self.c_dim > 0:
+ if self.disc_c_noise > 0:
+ c += torch.randn_like(c) * c.std(0) * self.disc_c_noise
+ cmap = self.mapping(None, c)
+ x = self.b4(x, img, cmap)
+ return x
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class DummyDualDiscriminator(torch.nn.Module):
+ def __init__(
+ self,
+ c_dim, # Conditioning label (C) dimensionality.
+ img_resolution, # Input resolution.
+ img_channels, # Number of input color channels.
+ architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ channel_base=32768, # Overall multiplier for the number of channels.
+ channel_max=512, # Maximum number of channels in any layer.
+ num_fp16_res=4, # Use FP16 for the N highest resolutions.
+ conv_clamp=256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ cmap_dim=None, # Dimensionality of mapped conditioning label, None = default.
+ block_kwargs={}, # Arguments for DiscriminatorBlock.
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ epilogue_kwargs={}, # Arguments for DiscriminatorEpilogue.
+ ):
+ super().__init__()
+ img_channels *= 2
+
+ self.c_dim = c_dim
+ self.img_resolution = img_resolution
+ self.img_resolution_log2 = int(np.log2(img_resolution))
+ self.img_channels = img_channels
+ self.block_resolutions = [
+ 2**i for i in range(self.img_resolution_log2, 2, -1)
+ ]
+ channels_dict = {
+ res: min(channel_base // res, channel_max)
+ for res in self.block_resolutions + [4]
+ }
+ fp16_resolution = max(2**(self.img_resolution_log2 + 1 - num_fp16_res),
+ 8)
+
+ if cmap_dim is None:
+ cmap_dim = channels_dict[4]
+ if c_dim == 0:
+ cmap_dim = 0
+
+ common_kwargs = dict(img_channels=img_channels,
+ architecture=architecture,
+ conv_clamp=conv_clamp)
+ cur_layer_idx = 0
+ for res in self.block_resolutions:
+ in_channels = channels_dict[res] if res < img_resolution else 0
+ tmp_channels = channels_dict[res]
+ out_channels = channels_dict[res // 2]
+ use_fp16 = (res >= fp16_resolution)
+ block = DiscriminatorBlock(in_channels,
+ tmp_channels,
+ out_channels,
+ resolution=res,
+ first_layer_idx=cur_layer_idx,
+ use_fp16=use_fp16,
+ **block_kwargs,
+ **common_kwargs)
+ setattr(self, f'b{res}', block)
+ cur_layer_idx += block.num_layers
+ if c_dim > 0:
+ self.mapping = MappingNetwork(z_dim=0,
+ c_dim=c_dim,
+ w_dim=cmap_dim,
+ num_ws=None,
+ w_avg_beta=None,
+ **mapping_kwargs)
+ self.b4 = DiscriminatorEpilogue(channels_dict[4],
+ cmap_dim=cmap_dim,
+ resolution=4,
+ **epilogue_kwargs,
+ **common_kwargs)
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter([1, 3, 3, 1]))
+
+ self.raw_fade = 1
+
+ def forward(self, img, c, update_emas=False, **block_kwargs):
+ self.raw_fade = max(0, self.raw_fade - 1 / (500000 / 32))
+
+ image_raw = filtered_resizing(img['image_raw'],
+ size=img['image'].shape[-1],
+ f=self.resample_filter) * self.raw_fade
+ img = torch.cat([img['image'], image_raw], 1)
+
+ _ = update_emas # unused
+ x = None
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, **block_kwargs)
+
+ cmap = None
+ if self.c_dim > 0:
+ cmap = self.mapping(None, c)
+ x = self.b4(x, img, cmap)
+ return x
+
+ def extra_repr(self):
+ return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}'
+
+
+#----------------------------------------------------------------------------
+
+# panohead
+# Tri-discriminator: upsampled image, super-resolved image, and segmentation mask
+# V2: first concatenate imgs and seg mask, using only one conv block
+@persistence.persistent_class
+class MaskDualDiscriminatorV2(torch.nn.Module):
+ def __init__(self,
+ c_dim, # Conditioning label (C) dimensionality.
+ img_resolution, # Input resolution.
+ img_channels, # Number of input color channels.
+ seg_resolution, # Input resolution.
+ seg_channels, # Number of input color channels.
+ architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ channel_base = 32768, # Overall multiplier for the number of channels.
+ channel_max = 512, # Maximum number of channels in any layer.
+ num_fp16_res = 4, # Use FP16 for the N highest resolutions.
+ conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ cmap_dim = None, # Dimensionality of mapped conditioning label, None = default.
+ disc_c_noise = 0, # Corrupt camera parameters with X std dev of noise before disc. pose conditioning.
+ block_kwargs = {}, # Arguments for DiscriminatorBlock.
+ mapping_kwargs = {}, # Arguments for MappingNetwork.
+ epilogue_kwargs = {}, # Arguments for DiscriminatorEpilogue.
+ ):
+ super().__init__()
+ img_channels = img_channels * 2 + seg_channels
+
+ self.c_dim = c_dim
+ self.img_resolution = img_resolution
+ self.img_resolution_log2 = int(np.log2(img_resolution))
+ self.img_channels = img_channels
+ self.block_resolutions = [2 ** i for i in range(self.img_resolution_log2, 2, -1)]
+ channels_dict = {res: min(channel_base // res, channel_max) for res in self.block_resolutions + [4]}
+ fp16_resolution = max(2 ** (self.img_resolution_log2 + 1 - num_fp16_res), 8)
+
+ if cmap_dim is None:
+ cmap_dim = channels_dict[4]
+ if c_dim == 0:
+ cmap_dim = 0
+
+ common_kwargs = dict(img_channels=img_channels, architecture=architecture, conv_clamp=conv_clamp)
+ cur_layer_idx = 0
+ for res in self.block_resolutions:
+ in_channels = channels_dict[res] if res < img_resolution else 0
+ tmp_channels = channels_dict[res]
+ out_channels = channels_dict[res // 2]
+ use_fp16 = (res >= fp16_resolution)
+ block = DiscriminatorBlock(in_channels, tmp_channels, out_channels, resolution=res,
+ first_layer_idx=cur_layer_idx, use_fp16=use_fp16, **block_kwargs, **common_kwargs)
+ setattr(self, f'b{res}', block)
+ cur_layer_idx += block.num_layers
+ if c_dim > 0:
+ self.mapping = MappingNetwork(z_dim=0, c_dim=c_dim, w_dim=cmap_dim, num_ws=None, w_avg_beta=None, **mapping_kwargs)
+ self.b4 = DiscriminatorEpilogue(channels_dict[4], cmap_dim=cmap_dim, resolution=4, **epilogue_kwargs, **common_kwargs)
+ self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1]))
+ self.disc_c_noise = disc_c_noise
+
+ def forward(self, img, c, update_emas=False, **block_kwargs):
+ image_raw = filtered_resizing(img['image_raw'], size=img['image'].shape[-1], f=self.resample_filter)
+ seg = filtered_resizing(img['image_mask'], size=img['image'].shape[-1], f=self.resample_filter)
+ seg = 2 * seg - 1 # normalize to [-1,1]
+ img = torch.cat([img['image'], image_raw, seg], 1)
+
+ _ = update_emas # unused
+ x = None
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, **block_kwargs)
+
+ cmap = None
+ if self.c_dim > 0:
+ if self.disc_c_noise > 0: c += torch.randn_like(c) * c.std(0) * self.disc_c_noise
+ cmap = self.mapping(None, c)
+ x = self.b4(x, img, cmap)
+ return x
+
+ def extra_repr(self):
+ return ' '.join([
+ f'c_dim={self.c_dim:d},',
+ f'img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d},',
+ f'seg_resolution={self.seg_resolution:d}, seg_channels={self.seg_channels:d}'])
\ No newline at end of file
diff --git a/nsr/gaussian_renderer/__init__.py b/nsr/gaussian_renderer/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f74e336af41e042dfb9f1c308e40caf17d0b3211
--- /dev/null
+++ b/nsr/gaussian_renderer/__init__.py
@@ -0,0 +1,100 @@
+#
+# Copyright (C) 2023, Inria
+# GRAPHDECO research group, https://team.inria.fr/graphdeco
+# All rights reserved.
+#
+# This software is free for non-commercial, research and evaluation use
+# under the terms of the LICENSE.md file.
+#
+# For inquiries contact george.drettakis@inria.fr
+#
+
+import torch
+import math
+from diff_gaussian_rasterization import GaussianRasterizationSettings, GaussianRasterizer
+from scene.gaussian_model import GaussianModel
+from utils.sh_utils import eval_sh
+
+def render(viewpoint_camera, pc : GaussianModel, pipe, bg_color : torch.Tensor, scaling_modifier = 1.0, override_color = None):
+ """
+ Render the scene.
+
+ Background tensor (bg_color) must be on GPU!
+ """
+
+ # Create zero tensor. We will use it to make pytorch return gradients of the 2D (screen-space) means
+ screenspace_points = torch.zeros_like(pc.get_xyz, dtype=pc.get_xyz.dtype, requires_grad=True, device="cuda") + 0
+ try:
+ screenspace_points.retain_grad()
+ except:
+ pass
+
+ # Set up rasterization configuration
+ tanfovx = math.tan(viewpoint_camera.FoVx * 0.5)
+ tanfovy = math.tan(viewpoint_camera.FoVy * 0.5)
+
+ raster_settings = GaussianRasterizationSettings(
+ image_height=int(viewpoint_camera.image_height),
+ image_width=int(viewpoint_camera.image_width),
+ tanfovx=tanfovx,
+ tanfovy=tanfovy,
+ bg=bg_color,
+ scale_modifier=scaling_modifier,
+ viewmatrix=viewpoint_camera.world_view_transform,
+ projmatrix=viewpoint_camera.full_proj_transform,
+ sh_degree=pc.active_sh_degree,
+ campos=viewpoint_camera.camera_center,
+ prefiltered=False,
+ debug=pipe.debug
+ )
+
+ rasterizer = GaussianRasterizer(raster_settings=raster_settings)
+
+ means3D = pc.get_xyz
+ means2D = screenspace_points
+ opacity = pc.get_opacity
+
+ # If precomputed 3d covariance is provided, use it. If not, then it will be computed from
+ # scaling / rotation by the rasterizer.
+ scales = None
+ rotations = None
+ cov3D_precomp = None
+ if pipe.compute_cov3D_python:
+ cov3D_precomp = pc.get_covariance(scaling_modifier)
+ else:
+ scales = pc.get_scaling
+ rotations = pc.get_rotation
+
+ # If precomputed colors are provided, use them. Otherwise, if it is desired to precompute colors
+ # from SHs in Python, do it. If not, then SH -> RGB conversion will be done by rasterizer.
+ shs = None
+ colors_precomp = None
+ if override_color is None:
+ if pipe.convert_SHs_python:
+ shs_view = pc.get_features.transpose(1, 2).view(-1, 3, (pc.max_sh_degree+1)**2)
+ dir_pp = (pc.get_xyz - viewpoint_camera.camera_center.repeat(pc.get_features.shape[0], 1))
+ dir_pp_normalized = dir_pp/dir_pp.norm(dim=1, keepdim=True)
+ sh2rgb = eval_sh(pc.active_sh_degree, shs_view, dir_pp_normalized)
+ colors_precomp = torch.clamp_min(sh2rgb + 0.5, 0.0)
+ else:
+ shs = pc.get_features
+ else:
+ colors_precomp = override_color
+
+ # Rasterize visible Gaussians to image, obtain their radii (on screen).
+ rendered_image, radii = rasterizer(
+ means3D = means3D,
+ means2D = means2D,
+ shs = shs,
+ colors_precomp = colors_precomp,
+ opacities = opacity,
+ scales = scales,
+ rotations = rotations,
+ cov3D_precomp = cov3D_precomp)
+
+ # Those Gaussians that were frustum culled or had a radius of 0 were not visible.
+ # They will be excluded from value updates used in the splitting criteria.
+ return {"render": rendered_image,
+ "viewspace_points": screenspace_points,
+ "visibility_filter" : radii > 0,
+ "radii": radii}
diff --git a/nsr/gaussian_renderer/network_gui.py b/nsr/gaussian_renderer/network_gui.py
new file mode 100644
index 0000000000000000000000000000000000000000..df2f9dae782b24527ae5b09f91ca4009361de53f
--- /dev/null
+++ b/nsr/gaussian_renderer/network_gui.py
@@ -0,0 +1,86 @@
+#
+# Copyright (C) 2023, Inria
+# GRAPHDECO research group, https://team.inria.fr/graphdeco
+# All rights reserved.
+#
+# This software is free for non-commercial, research and evaluation use
+# under the terms of the LICENSE.md file.
+#
+# For inquiries contact george.drettakis@inria.fr
+#
+
+import torch
+import traceback
+import socket
+import json
+from scene.cameras import MiniCam
+
+host = "127.0.0.1"
+port = 6009
+
+conn = None
+addr = None
+
+listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+def init(wish_host, wish_port):
+ global host, port, listener
+ host = wish_host
+ port = wish_port
+ listener.bind((host, port))
+ listener.listen()
+ listener.settimeout(0)
+
+def try_connect():
+ global conn, addr, listener
+ try:
+ conn, addr = listener.accept()
+ print(f"\nConnected by {addr}")
+ conn.settimeout(None)
+ except Exception as inst:
+ pass
+
+def read():
+ global conn
+ messageLength = conn.recv(4)
+ messageLength = int.from_bytes(messageLength, 'little')
+ message = conn.recv(messageLength)
+ return json.loads(message.decode("utf-8"))
+
+def send(message_bytes, verify):
+ global conn
+ if message_bytes != None:
+ conn.sendall(message_bytes)
+ conn.sendall(len(verify).to_bytes(4, 'little'))
+ conn.sendall(bytes(verify, 'ascii'))
+
+def receive():
+ message = read()
+
+ width = message["resolution_x"]
+ height = message["resolution_y"]
+
+ if width != 0 and height != 0:
+ try:
+ do_training = bool(message["train"])
+ fovy = message["fov_y"]
+ fovx = message["fov_x"]
+ znear = message["z_near"]
+ zfar = message["z_far"]
+ do_shs_python = bool(message["shs_python"])
+ do_rot_scale_python = bool(message["rot_scale_python"])
+ keep_alive = bool(message["keep_alive"])
+ scaling_modifier = message["scaling_modifier"]
+ world_view_transform = torch.reshape(torch.tensor(message["view_matrix"]), (4, 4)).cuda()
+ world_view_transform[:,1] = -world_view_transform[:,1]
+ world_view_transform[:,2] = -world_view_transform[:,2]
+ full_proj_transform = torch.reshape(torch.tensor(message["view_projection_matrix"]), (4, 4)).cuda()
+ full_proj_transform[:,1] = -full_proj_transform[:,1]
+ custom_cam = MiniCam(width, height, fovy, fovx, znear, zfar, world_view_transform, full_proj_transform)
+ except Exception as e:
+ print("")
+ traceback.print_exc()
+ raise e
+ return custom_cam, do_training, do_shs_python, do_rot_scale_python, keep_alive, scaling_modifier
+ else:
+ return None, None, None, None, None, None
\ No newline at end of file
diff --git a/nsr/geometry/__init__.py b/nsr/geometry/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..89e9a6c2fffe82a55693885dae78c1a630924389
--- /dev/null
+++ b/nsr/geometry/__init__.py
@@ -0,0 +1,7 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
diff --git a/nsr/geometry/camera/__init__.py b/nsr/geometry/camera/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5c7082e47c65a08e25489b3c3fd010d07ad9758
--- /dev/null
+++ b/nsr/geometry/camera/__init__.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+from torch import nn
+
+
+class Camera(nn.Module):
+ def __init__(self):
+ super(Camera, self).__init__()
+ pass
diff --git a/nsr/geometry/camera/perspective_camera.py b/nsr/geometry/camera/perspective_camera.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f1e1906096476bcb411033601251115c64c97f9
--- /dev/null
+++ b/nsr/geometry/camera/perspective_camera.py
@@ -0,0 +1,36 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+from . import Camera
+import numpy as np
+
+
+def projection(x=0.1, n=1.0, f=50.0, near_plane=None):
+ if near_plane is None:
+ near_plane = n
+ return np.array(
+ [[n / x, 0, 0, 0],
+ [0, n / -x, 0, 0],
+ [0, 0, -(f + near_plane) / (f - near_plane), -(2 * f * near_plane) / (f - near_plane)],
+ [0, 0, -1, 0]]).astype(np.float32)
+
+
+class PerspectiveCamera(Camera):
+ def __init__(self, fovy=49.0, focal=None, device='cuda'):
+ super(PerspectiveCamera, self).__init__()
+ self.device = device
+ if focal is None:
+ focal = np.tan(fovy / 180.0 * np.pi * 0.5)
+ self.proj_mtx = torch.from_numpy(projection(x=focal, f=1000.0, n=1.0, near_plane=0.1)).to(self.device).unsqueeze(dim=0)
+
+ def project(self, points_bxnx4):
+ out = torch.matmul(
+ points_bxnx4,
+ torch.transpose(self.proj_mtx, 1, 2))
+ return out
diff --git a/nsr/geometry/render/__init__.py b/nsr/geometry/render/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..483cfabbf395853f1ca3e67b856d5f17b9889d1b
--- /dev/null
+++ b/nsr/geometry/render/__init__.py
@@ -0,0 +1,8 @@
+import torch
+
+class Renderer():
+ def __init__(self):
+ pass
+
+ def forward(self):
+ pass
\ No newline at end of file
diff --git a/nsr/geometry/render/neural_render.py b/nsr/geometry/render/neural_render.py
new file mode 100644
index 0000000000000000000000000000000000000000..473464480125c050ee6dba973450678a197145fb
--- /dev/null
+++ b/nsr/geometry/render/neural_render.py
@@ -0,0 +1,121 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+import torch.nn.functional as F
+import nvdiffrast.torch as dr
+from . import Renderer
+
+_FG_LUT = None
+
+
+def interpolate(attr, rast, attr_idx, rast_db=None):
+ return dr.interpolate(
+ attr.contiguous(), rast, attr_idx, rast_db=rast_db,
+ diff_attrs=None if rast_db is None else 'all')
+
+
+def xfm_points(points, matrix, use_python=True):
+ '''Transform points.
+ Args:
+ points: Tensor containing 3D points with shape [minibatch_size, num_vertices, 3] or [1, num_vertices, 3]
+ matrix: A 4x4 transform matrix with shape [minibatch_size, 4, 4]
+ use_python: Use PyTorch's torch.matmul (for validation)
+ Returns:
+ Transformed points in homogeneous 4D with shape [minibatch_size, num_vertices, 4].
+ '''
+ out = torch.matmul(torch.nn.functional.pad(points, pad=(0, 1), mode='constant', value=1.0), torch.transpose(matrix, 1, 2))
+ if torch.is_anomaly_enabled():
+ assert torch.all(torch.isfinite(out)), "Output of xfm_points contains inf or NaN"
+ return out
+
+
+def dot(x, y):
+ return torch.sum(x * y, -1, keepdim=True)
+
+
+def compute_vertex_normal(v_pos, t_pos_idx):
+ i0 = t_pos_idx[:, 0]
+ i1 = t_pos_idx[:, 1]
+ i2 = t_pos_idx[:, 2]
+
+ v0 = v_pos[i0, :]
+ v1 = v_pos[i1, :]
+ v2 = v_pos[i2, :]
+
+ face_normals = torch.cross(v1 - v0, v2 - v0)
+
+ # Splat face normals to vertices
+ v_nrm = torch.zeros_like(v_pos)
+ v_nrm.scatter_add_(0, i0[:, None].repeat(1, 3), face_normals)
+ v_nrm.scatter_add_(0, i1[:, None].repeat(1, 3), face_normals)
+ v_nrm.scatter_add_(0, i2[:, None].repeat(1, 3), face_normals)
+
+ # Normalize, replace zero (degenerated) normals with some default value
+ v_nrm = torch.where(
+ dot(v_nrm, v_nrm) > 1e-20, v_nrm, torch.as_tensor([0.0, 0.0, 1.0]).to(v_nrm)
+ )
+ v_nrm = F.normalize(v_nrm, dim=1)
+ assert torch.all(torch.isfinite(v_nrm))
+
+ return v_nrm
+
+
+class NeuralRender(Renderer):
+ def __init__(self, device='cuda', camera_model=None):
+ super(NeuralRender, self).__init__()
+ self.device = device
+ self.ctx = dr.RasterizeCudaContext(device=device)
+ self.projection_mtx = None
+ self.camera = camera_model
+
+ def render_mesh(
+ self,
+ mesh_v_pos_bxnx3,
+ mesh_t_pos_idx_fx3,
+ camera_mv_bx4x4,
+ mesh_v_feat_bxnxd,
+ resolution=256,
+ spp=1,
+ device='cuda',
+ hierarchical_mask=False
+ ):
+ assert not hierarchical_mask
+
+ mtx_in = torch.tensor(camera_mv_bx4x4, dtype=torch.float32, device=device) if not torch.is_tensor(camera_mv_bx4x4) else camera_mv_bx4x4
+ v_pos = xfm_points(mesh_v_pos_bxnx3, mtx_in) # Rotate it to camera coordinates
+ v_pos_clip = self.camera.project(v_pos) # Projection in the camera
+
+ v_nrm = compute_vertex_normal(mesh_v_pos_bxnx3[0], mesh_t_pos_idx_fx3.long()) # vertex normals in world coordinates
+
+ # Render the image,
+ # Here we only return the feature (3D location) at each pixel, which will be used as the input for neural render
+ num_layers = 1
+ mask_pyramid = None
+ assert mesh_t_pos_idx_fx3.shape[0] > 0 # Make sure we have shapes
+ mesh_v_feat_bxnxd = torch.cat([mesh_v_feat_bxnxd.repeat(v_pos.shape[0], 1, 1), v_pos], dim=-1) # Concatenate the pos
+
+ with dr.DepthPeeler(self.ctx, v_pos_clip, mesh_t_pos_idx_fx3, [resolution * spp, resolution * spp]) as peeler:
+ for _ in range(num_layers):
+ rast, db = peeler.rasterize_next_layer()
+ gb_feat, _ = interpolate(mesh_v_feat_bxnxd, rast, mesh_t_pos_idx_fx3)
+
+ hard_mask = torch.clamp(rast[..., -1:], 0, 1)
+ antialias_mask = dr.antialias(
+ hard_mask.clone().contiguous(), rast, v_pos_clip,
+ mesh_t_pos_idx_fx3)
+
+ depth = gb_feat[..., -2:-1]
+ ori_mesh_feature = gb_feat[..., :-4]
+
+ normal, _ = interpolate(v_nrm[None, ...], rast, mesh_t_pos_idx_fx3)
+ normal = dr.antialias(normal.clone().contiguous(), rast, v_pos_clip, mesh_t_pos_idx_fx3)
+ normal = F.normalize(normal, dim=-1)
+ normal = torch.lerp(torch.zeros_like(normal), (normal + 1.0) / 2.0, hard_mask.float()) # black background
+
+ return ori_mesh_feature, antialias_mask, hard_mask, rast, v_pos_clip, mask_pyramid, depth, normal
diff --git a/nsr/geometry/rep_3d/__init__.py b/nsr/geometry/rep_3d/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..a3d5628a8433298477d1963f92578d47106b4a0f
--- /dev/null
+++ b/nsr/geometry/rep_3d/__init__.py
@@ -0,0 +1,18 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+import numpy as np
+
+
+class Geometry():
+ def __init__(self):
+ pass
+
+ def forward(self):
+ pass
diff --git a/nsr/geometry/rep_3d/dmtet.py b/nsr/geometry/rep_3d/dmtet.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6a709380abac0bbf66fd1c8582485f3982223e4
--- /dev/null
+++ b/nsr/geometry/rep_3d/dmtet.py
@@ -0,0 +1,504 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+import numpy as np
+import os
+from . import Geometry
+from .dmtet_utils import get_center_boundary_index
+import torch.nn.functional as F
+
+
+###############################################################################
+# DMTet utility functions
+###############################################################################
+def create_mt_variable(device):
+ triangle_table = torch.tensor(
+ [
+ [-1, -1, -1, -1, -1, -1],
+ [1, 0, 2, -1, -1, -1],
+ [4, 0, 3, -1, -1, -1],
+ [1, 4, 2, 1, 3, 4],
+ [3, 1, 5, -1, -1, -1],
+ [2, 3, 0, 2, 5, 3],
+ [1, 4, 0, 1, 5, 4],
+ [4, 2, 5, -1, -1, -1],
+ [4, 5, 2, -1, -1, -1],
+ [4, 1, 0, 4, 5, 1],
+ [3, 2, 0, 3, 5, 2],
+ [1, 3, 5, -1, -1, -1],
+ [4, 1, 2, 4, 3, 1],
+ [3, 0, 4, -1, -1, -1],
+ [2, 0, 1, -1, -1, -1],
+ [-1, -1, -1, -1, -1, -1]
+ ], dtype=torch.long, device=device)
+
+ num_triangles_table = torch.tensor([0, 1, 1, 2, 1, 2, 2, 1, 1, 2, 2, 1, 2, 1, 1, 0], dtype=torch.long, device=device)
+ base_tet_edges = torch.tensor([0, 1, 0, 2, 0, 3, 1, 2, 1, 3, 2, 3], dtype=torch.long, device=device)
+ v_id = torch.pow(2, torch.arange(4, dtype=torch.long, device=device))
+ return triangle_table, num_triangles_table, base_tet_edges, v_id
+
+
+def sort_edges(edges_ex2):
+ with torch.no_grad():
+ order = (edges_ex2[:, 0] > edges_ex2[:, 1]).long()
+ order = order.unsqueeze(dim=1)
+ a = torch.gather(input=edges_ex2, index=order, dim=1)
+ b = torch.gather(input=edges_ex2, index=1 - order, dim=1)
+ return torch.stack([a, b], -1)
+
+
+###############################################################################
+# marching tetrahedrons (differentiable)
+###############################################################################
+
+def marching_tets(pos_nx3, sdf_n, tet_fx4, triangle_table, num_triangles_table, base_tet_edges, v_id):
+ with torch.no_grad():
+ occ_n = sdf_n > 0
+ occ_fx4 = occ_n[tet_fx4.reshape(-1)].reshape(-1, 4)
+ occ_sum = torch.sum(occ_fx4, -1)
+ valid_tets = (occ_sum > 0) & (occ_sum < 4)
+ occ_sum = occ_sum[valid_tets]
+
+ # find all vertices
+ all_edges = tet_fx4[valid_tets][:, base_tet_edges].reshape(-1, 2)
+ all_edges = sort_edges(all_edges)
+ unique_edges, idx_map = torch.unique(all_edges, dim=0, return_inverse=True)
+
+ unique_edges = unique_edges.long()
+ mask_edges = occ_n[unique_edges.reshape(-1)].reshape(-1, 2).sum(-1) == 1
+ mapping = torch.ones((unique_edges.shape[0]), dtype=torch.long, device=sdf_n.device) * -1
+ mapping[mask_edges] = torch.arange(mask_edges.sum(), dtype=torch.long, device=sdf_n.device)
+ idx_map = mapping[idx_map] # map edges to verts
+
+ interp_v = unique_edges[mask_edges] # .long()
+ edges_to_interp = pos_nx3[interp_v.reshape(-1)].reshape(-1, 2, 3)
+ edges_to_interp_sdf = sdf_n[interp_v.reshape(-1)].reshape(-1, 2, 1)
+ edges_to_interp_sdf[:, -1] *= -1
+
+ denominator = edges_to_interp_sdf.sum(1, keepdim=True)
+
+ edges_to_interp_sdf = torch.flip(edges_to_interp_sdf, [1]) / denominator
+ verts = (edges_to_interp * edges_to_interp_sdf).sum(1)
+
+ idx_map = idx_map.reshape(-1, 6)
+
+ tetindex = (occ_fx4[valid_tets] * v_id.unsqueeze(0)).sum(-1)
+ num_triangles = num_triangles_table[tetindex]
+
+ # Generate triangle indices
+ faces = torch.cat(
+ (
+ torch.gather(
+ input=idx_map[num_triangles == 1], dim=1,
+ index=triangle_table[tetindex[num_triangles == 1]][:, :3]).reshape(-1, 3),
+ torch.gather(
+ input=idx_map[num_triangles == 2], dim=1,
+ index=triangle_table[tetindex[num_triangles == 2]][:, :6]).reshape(-1, 3),
+ ), dim=0)
+ return verts, faces
+
+
+def create_tetmesh_variables(device='cuda'):
+ tet_table = torch.tensor(
+ [[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+ [0, 4, 5, 6, -1, -1, -1, -1, -1, -1, -1, -1],
+ [1, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1],
+ [1, 0, 8, 7, 0, 5, 8, 7, 0, 5, 6, 8],
+ [2, 5, 7, 9, -1, -1, -1, -1, -1, -1, -1, -1],
+ [2, 0, 9, 7, 0, 4, 9, 7, 0, 4, 6, 9],
+ [2, 1, 9, 5, 1, 4, 9, 5, 1, 4, 8, 9],
+ [6, 0, 1, 2, 6, 1, 2, 8, 6, 8, 2, 9],
+ [3, 6, 8, 9, -1, -1, -1, -1, -1, -1, -1, -1],
+ [3, 0, 9, 8, 0, 4, 9, 8, 0, 4, 5, 9],
+ [3, 1, 9, 6, 1, 4, 9, 6, 1, 4, 7, 9],
+ [5, 0, 1, 3, 5, 1, 3, 7, 5, 7, 3, 9],
+ [3, 2, 8, 6, 2, 5, 8, 6, 2, 5, 7, 8],
+ [4, 0, 2, 3, 4, 2, 3, 7, 4, 7, 3, 8],
+ [4, 1, 2, 3, 4, 2, 3, 5, 4, 5, 3, 6],
+ [-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]], dtype=torch.long, device=device)
+ num_tets_table = torch.tensor([0, 1, 1, 3, 1, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 0], dtype=torch.long, device=device)
+ return tet_table, num_tets_table
+
+
+def marching_tets_tetmesh(
+ pos_nx3, sdf_n, tet_fx4, triangle_table, num_triangles_table, base_tet_edges, v_id,
+ return_tet_mesh=False, ori_v=None, num_tets_table=None, tet_table=None):
+ with torch.no_grad():
+ occ_n = sdf_n > 0
+ occ_fx4 = occ_n[tet_fx4.reshape(-1)].reshape(-1, 4)
+ occ_sum = torch.sum(occ_fx4, -1)
+ valid_tets = (occ_sum > 0) & (occ_sum < 4)
+ occ_sum = occ_sum[valid_tets]
+
+ # find all vertices
+ all_edges = tet_fx4[valid_tets][:, base_tet_edges].reshape(-1, 2)
+ all_edges = sort_edges(all_edges)
+ unique_edges, idx_map = torch.unique(all_edges, dim=0, return_inverse=True)
+
+ unique_edges = unique_edges.long()
+ mask_edges = occ_n[unique_edges.reshape(-1)].reshape(-1, 2).sum(-1) == 1
+ mapping = torch.ones((unique_edges.shape[0]), dtype=torch.long, device=sdf_n.device) * -1
+ mapping[mask_edges] = torch.arange(mask_edges.sum(), dtype=torch.long, device=sdf_n.device)
+ idx_map = mapping[idx_map] # map edges to verts
+
+ interp_v = unique_edges[mask_edges] # .long()
+ edges_to_interp = pos_nx3[interp_v.reshape(-1)].reshape(-1, 2, 3)
+ edges_to_interp_sdf = sdf_n[interp_v.reshape(-1)].reshape(-1, 2, 1)
+ edges_to_interp_sdf[:, -1] *= -1
+
+ denominator = edges_to_interp_sdf.sum(1, keepdim=True)
+
+ edges_to_interp_sdf = torch.flip(edges_to_interp_sdf, [1]) / denominator
+ verts = (edges_to_interp * edges_to_interp_sdf).sum(1)
+
+ idx_map = idx_map.reshape(-1, 6)
+
+ tetindex = (occ_fx4[valid_tets] * v_id.unsqueeze(0)).sum(-1)
+ num_triangles = num_triangles_table[tetindex]
+
+ # Generate triangle indices
+ faces = torch.cat(
+ (
+ torch.gather(
+ input=idx_map[num_triangles == 1], dim=1,
+ index=triangle_table[tetindex[num_triangles == 1]][:, :3]).reshape(-1, 3),
+ torch.gather(
+ input=idx_map[num_triangles == 2], dim=1,
+ index=triangle_table[tetindex[num_triangles == 2]][:, :6]).reshape(-1, 3),
+ ), dim=0)
+ if not return_tet_mesh:
+ return verts, faces
+ occupied_verts = ori_v[occ_n]
+ mapping = torch.ones((pos_nx3.shape[0]), dtype=torch.long, device="cuda") * -1
+ mapping[occ_n] = torch.arange(occupied_verts.shape[0], device="cuda")
+ tet_fx4 = mapping[tet_fx4.reshape(-1)].reshape((-1, 4))
+
+ idx_map = torch.cat([tet_fx4[valid_tets] + verts.shape[0], idx_map], -1) # t x 10
+ tet_verts = torch.cat([verts, occupied_verts], 0)
+ num_tets = num_tets_table[tetindex]
+
+ tets = torch.cat(
+ (
+ torch.gather(input=idx_map[num_tets == 1], dim=1, index=tet_table[tetindex[num_tets == 1]][:, :4]).reshape(
+ -1,
+ 4),
+ torch.gather(input=idx_map[num_tets == 3], dim=1, index=tet_table[tetindex[num_tets == 3]][:, :12]).reshape(
+ -1,
+ 4),
+ ), dim=0)
+ # add fully occupied tets
+ fully_occupied = occ_fx4.sum(-1) == 4
+ tet_fully_occupied = tet_fx4[fully_occupied] + verts.shape[0]
+ tets = torch.cat([tets, tet_fully_occupied])
+
+ return verts, faces, tet_verts, tets
+
+
+###############################################################################
+# Compact tet grid
+###############################################################################
+
+def compact_tets(pos_nx3, sdf_n, tet_fx4):
+ with torch.no_grad():
+ # Find surface tets
+ occ_n = sdf_n > 0
+ occ_fx4 = occ_n[tet_fx4.reshape(-1)].reshape(-1, 4)
+ occ_sum = torch.sum(occ_fx4, -1)
+ valid_tets = (occ_sum > 0) & (occ_sum < 4) # one value per tet, these are the surface tets
+
+ valid_vtx = tet_fx4[valid_tets].reshape(-1)
+ unique_vtx, idx_map = torch.unique(valid_vtx, dim=0, return_inverse=True)
+ new_pos = pos_nx3[unique_vtx]
+ new_sdf = sdf_n[unique_vtx]
+ new_tets = idx_map.reshape(-1, 4)
+ return new_pos, new_sdf, new_tets
+
+
+###############################################################################
+# Subdivide volume
+###############################################################################
+
+def batch_subdivide_volume(tet_pos_bxnx3, tet_bxfx4, grid_sdf):
+ device = tet_pos_bxnx3.device
+ # get new verts
+ tet_fx4 = tet_bxfx4[0]
+ edges = [0, 1, 0, 2, 0, 3, 1, 2, 1, 3, 2, 3]
+ all_edges = tet_fx4[:, edges].reshape(-1, 2)
+ all_edges = sort_edges(all_edges)
+ unique_edges, idx_map = torch.unique(all_edges, dim=0, return_inverse=True)
+ idx_map = idx_map + tet_pos_bxnx3.shape[1]
+ all_values = torch.cat([tet_pos_bxnx3, grid_sdf], -1)
+ mid_points_pos = all_values[:, unique_edges.reshape(-1)].reshape(
+ all_values.shape[0], -1, 2,
+ all_values.shape[-1]).mean(2)
+ new_v = torch.cat([all_values, mid_points_pos], 1)
+ new_v, new_sdf = new_v[..., :3], new_v[..., 3]
+
+ # get new tets
+
+ idx_a, idx_b, idx_c, idx_d = tet_fx4[:, 0], tet_fx4[:, 1], tet_fx4[:, 2], tet_fx4[:, 3]
+ idx_ab = idx_map[0::6]
+ idx_ac = idx_map[1::6]
+ idx_ad = idx_map[2::6]
+ idx_bc = idx_map[3::6]
+ idx_bd = idx_map[4::6]
+ idx_cd = idx_map[5::6]
+
+ tet_1 = torch.stack([idx_a, idx_ab, idx_ac, idx_ad], dim=1)
+ tet_2 = torch.stack([idx_b, idx_bc, idx_ab, idx_bd], dim=1)
+ tet_3 = torch.stack([idx_c, idx_ac, idx_bc, idx_cd], dim=1)
+ tet_4 = torch.stack([idx_d, idx_ad, idx_cd, idx_bd], dim=1)
+ tet_5 = torch.stack([idx_ab, idx_ac, idx_ad, idx_bd], dim=1)
+ tet_6 = torch.stack([idx_ab, idx_ac, idx_bd, idx_bc], dim=1)
+ tet_7 = torch.stack([idx_cd, idx_ac, idx_bd, idx_ad], dim=1)
+ tet_8 = torch.stack([idx_cd, idx_ac, idx_bc, idx_bd], dim=1)
+
+ tet_np = torch.cat([tet_1, tet_2, tet_3, tet_4, tet_5, tet_6, tet_7, tet_8], dim=0)
+ tet_np = tet_np.reshape(1, -1, 4).expand(tet_pos_bxnx3.shape[0], -1, -1)
+ tet = tet_np.long().to(device)
+
+ return new_v, tet, new_sdf
+
+
+###############################################################################
+# Adjacency
+###############################################################################
+def tet_to_tet_adj_sparse(tet_tx4):
+ # include self connection!!!!!!!!!!!!!!!!!!!
+ with torch.no_grad():
+ t = tet_tx4.shape[0]
+ device = tet_tx4.device
+ idx_array = torch.LongTensor(
+ [0, 1, 2,
+ 1, 0, 3,
+ 2, 3, 0,
+ 3, 2, 1]).to(device).reshape(4, 3).unsqueeze(0).expand(t, -1, -1) # (t, 4, 3)
+
+ # get all faces
+ all_faces = torch.gather(input=tet_tx4.unsqueeze(1).expand(-1, 4, -1), index=idx_array, dim=-1).reshape(
+ -1,
+ 3) # (tx4, 3)
+ all_faces_tet_idx = torch.arange(t, device=device).unsqueeze(-1).expand(-1, 4).reshape(-1)
+ # sort and group
+ all_faces_sorted, _ = torch.sort(all_faces, dim=1)
+
+ all_faces_unique, inverse_indices, counts = torch.unique(
+ all_faces_sorted, dim=0, return_counts=True,
+ return_inverse=True)
+ tet_face_fx3 = all_faces_unique[counts == 2]
+ counts = counts[inverse_indices] # tx4
+ valid = (counts == 2)
+
+ group = inverse_indices[valid]
+ # print (inverse_indices.shape, group.shape, all_faces_tet_idx.shape)
+ _, indices = torch.sort(group)
+ all_faces_tet_idx_grouped = all_faces_tet_idx[valid][indices]
+ tet_face_tetidx_fx2 = torch.stack([all_faces_tet_idx_grouped[::2], all_faces_tet_idx_grouped[1::2]], dim=-1)
+
+ tet_adj_idx = torch.cat([tet_face_tetidx_fx2, torch.flip(tet_face_tetidx_fx2, [1])])
+ adj_self = torch.arange(t, device=tet_tx4.device)
+ adj_self = torch.stack([adj_self, adj_self], -1)
+ tet_adj_idx = torch.cat([tet_adj_idx, adj_self])
+
+ tet_adj_idx = torch.unique(tet_adj_idx, dim=0)
+ values = torch.ones(
+ tet_adj_idx.shape[0], device=tet_tx4.device).float()
+ adj_sparse = torch.sparse.FloatTensor(
+ tet_adj_idx.t(), values, torch.Size([t, t]))
+
+ # normalization
+ neighbor_num = 1.0 / torch.sparse.sum(
+ adj_sparse, dim=1).to_dense()
+ values = torch.index_select(neighbor_num, 0, tet_adj_idx[:, 0])
+ adj_sparse = torch.sparse.FloatTensor(
+ tet_adj_idx.t(), values, torch.Size([t, t]))
+ return adj_sparse
+
+
+###############################################################################
+# Compact grid
+###############################################################################
+
+def get_tet_bxfx4x3(bxnxz, bxfx4):
+ n_batch, z = bxnxz.shape[0], bxnxz.shape[2]
+ gather_input = bxnxz.unsqueeze(2).expand(
+ n_batch, bxnxz.shape[1], 4, z)
+ gather_index = bxfx4.unsqueeze(-1).expand(
+ n_batch, bxfx4.shape[1], 4, z).long()
+ tet_bxfx4xz = torch.gather(
+ input=gather_input, dim=1, index=gather_index)
+
+ return tet_bxfx4xz
+
+
+def shrink_grid(tet_pos_bxnx3, tet_bxfx4, grid_sdf):
+ with torch.no_grad():
+ assert tet_pos_bxnx3.shape[0] == 1
+
+ occ = grid_sdf[0] > 0
+ occ_sum = get_tet_bxfx4x3(occ.unsqueeze(0).unsqueeze(-1), tet_bxfx4).reshape(-1, 4).sum(-1)
+ mask = (occ_sum > 0) & (occ_sum < 4)
+
+ # build connectivity graph
+ adj_matrix = tet_to_tet_adj_sparse(tet_bxfx4[0])
+ mask = mask.float().unsqueeze(-1)
+
+ # Include a one ring of neighbors
+ for i in range(1):
+ mask = torch.sparse.mm(adj_matrix, mask)
+ mask = mask.squeeze(-1) > 0
+
+ mapping = torch.zeros((tet_pos_bxnx3.shape[1]), device=tet_pos_bxnx3.device, dtype=torch.long)
+ new_tet_bxfx4 = tet_bxfx4[:, mask].long()
+ selected_verts_idx = torch.unique(new_tet_bxfx4)
+ new_tet_pos_bxnx3 = tet_pos_bxnx3[:, selected_verts_idx]
+ mapping[selected_verts_idx] = torch.arange(selected_verts_idx.shape[0], device=tet_pos_bxnx3.device)
+ new_tet_bxfx4 = mapping[new_tet_bxfx4.reshape(-1)].reshape(new_tet_bxfx4.shape)
+ new_grid_sdf = grid_sdf[:, selected_verts_idx]
+ return new_tet_pos_bxnx3, new_tet_bxfx4, new_grid_sdf
+
+
+###############################################################################
+# Regularizer
+###############################################################################
+
+def sdf_reg_loss(sdf, all_edges):
+ sdf_f1x6x2 = sdf[all_edges.reshape(-1)].reshape(-1, 2)
+ mask = torch.sign(sdf_f1x6x2[..., 0]) != torch.sign(sdf_f1x6x2[..., 1])
+ sdf_f1x6x2 = sdf_f1x6x2[mask]
+ sdf_diff = torch.nn.functional.binary_cross_entropy_with_logits(
+ sdf_f1x6x2[..., 0],
+ (sdf_f1x6x2[..., 1] > 0).float()) + \
+ torch.nn.functional.binary_cross_entropy_with_logits(
+ sdf_f1x6x2[..., 1],
+ (sdf_f1x6x2[..., 0] > 0).float())
+ return sdf_diff
+
+
+def sdf_reg_loss_batch(sdf, all_edges):
+ sdf_f1x6x2 = sdf[:, all_edges.reshape(-1)].reshape(sdf.shape[0], -1, 2)
+ mask = torch.sign(sdf_f1x6x2[..., 0]) != torch.sign(sdf_f1x6x2[..., 1])
+ sdf_f1x6x2 = sdf_f1x6x2[mask]
+ sdf_diff = torch.nn.functional.binary_cross_entropy_with_logits(sdf_f1x6x2[..., 0], (sdf_f1x6x2[..., 1] > 0).float()) + \
+ torch.nn.functional.binary_cross_entropy_with_logits(sdf_f1x6x2[..., 1], (sdf_f1x6x2[..., 0] > 0).float())
+ return sdf_diff
+
+
+###############################################################################
+# Geometry interface
+###############################################################################
+class DMTetGeometry(Geometry):
+ def __init__(
+ self, grid_res=64, scale=2.0, device='cuda', renderer=None,
+ render_type='neural_render', args=None):
+ super(DMTetGeometry, self).__init__()
+ self.grid_res = grid_res
+ self.device = device
+ self.args = args
+ tets = np.load('data/tets/%d_compress.npz' % (grid_res))
+ self.verts = torch.from_numpy(tets['vertices']).float().to(self.device)
+ # Make sure the tet is zero-centered and length is equal to 1
+ length = self.verts.max(dim=0)[0] - self.verts.min(dim=0)[0]
+ length = length.max()
+ mid = (self.verts.max(dim=0)[0] + self.verts.min(dim=0)[0]) / 2.0
+ self.verts = (self.verts - mid.unsqueeze(dim=0)) / length
+ if isinstance(scale, list):
+ self.verts[:, 0] = self.verts[:, 0] * scale[0]
+ self.verts[:, 1] = self.verts[:, 1] * scale[1]
+ self.verts[:, 2] = self.verts[:, 2] * scale[1]
+ else:
+ self.verts = self.verts * scale
+ self.indices = torch.from_numpy(tets['tets']).long().to(self.device)
+ self.triangle_table, self.num_triangles_table, self.base_tet_edges, self.v_id = create_mt_variable(self.device)
+ self.tet_table, self.num_tets_table = create_tetmesh_variables(self.device)
+ # Parameters for regularization computation
+ edges = torch.tensor([0, 1, 0, 2, 0, 3, 1, 2, 1, 3, 2, 3], dtype=torch.long, device=self.device)
+ all_edges = self.indices[:, edges].reshape(-1, 2)
+ all_edges_sorted = torch.sort(all_edges, dim=1)[0]
+ self.all_edges = torch.unique(all_edges_sorted, dim=0)
+
+ # Parameters used for fix boundary sdf
+ self.center_indices, self.boundary_indices = get_center_boundary_index(self.verts)
+ self.renderer = renderer
+ self.render_type = render_type
+
+ def getAABB(self):
+ return torch.min(self.verts, dim=0).values, torch.max(self.verts, dim=0).values
+
+ def get_mesh(self, v_deformed_nx3, sdf_n, with_uv=False, indices=None):
+ if indices is None:
+ indices = self.indices
+ verts, faces = marching_tets(
+ v_deformed_nx3, sdf_n, indices, self.triangle_table,
+ self.num_triangles_table, self.base_tet_edges, self.v_id)
+ faces = torch.cat(
+ [faces[:, 0:1],
+ faces[:, 2:3],
+ faces[:, 1:2], ], dim=-1)
+ return verts, faces
+
+ def get_tet_mesh(self, v_deformed_nx3, sdf_n, with_uv=False, indices=None):
+ if indices is None:
+ indices = self.indices
+ verts, faces, tet_verts, tets = marching_tets_tetmesh(
+ v_deformed_nx3, sdf_n, indices, self.triangle_table,
+ self.num_triangles_table, self.base_tet_edges, self.v_id, return_tet_mesh=True,
+ num_tets_table=self.num_tets_table, tet_table=self.tet_table, ori_v=v_deformed_nx3)
+ faces = torch.cat(
+ [faces[:, 0:1],
+ faces[:, 2:3],
+ faces[:, 1:2], ], dim=-1)
+ return verts, faces, tet_verts, tets
+
+ def render_mesh(self, mesh_v_nx3, mesh_f_fx3, camera_mv_bx4x4, resolution=256, hierarchical_mask=False):
+ return_value = dict()
+ if self.render_type == 'neural_render':
+ tex_pos, mask, hard_mask, rast, v_pos_clip, mask_pyramid, depth = self.renderer.render_mesh(
+ mesh_v_nx3.unsqueeze(dim=0),
+ mesh_f_fx3.int(),
+ camera_mv_bx4x4,
+ mesh_v_nx3.unsqueeze(dim=0),
+ resolution=resolution,
+ device=self.device,
+ hierarchical_mask=hierarchical_mask
+ )
+
+ return_value['tex_pos'] = tex_pos
+ return_value['mask'] = mask
+ return_value['hard_mask'] = hard_mask
+ return_value['rast'] = rast
+ return_value['v_pos_clip'] = v_pos_clip
+ return_value['mask_pyramid'] = mask_pyramid
+ return_value['depth'] = depth
+ else:
+ raise NotImplementedError
+
+ return return_value
+
+ def render(self, v_deformed_bxnx3=None, sdf_bxn=None, camera_mv_bxnviewx4x4=None, resolution=256):
+ # Here I assume a batch of meshes (can be different mesh and geometry), for the other shapes, the batch is 1
+ v_list = []
+ f_list = []
+ n_batch = v_deformed_bxnx3.shape[0]
+ all_render_output = []
+ for i_batch in range(n_batch):
+ verts_nx3, faces_fx3 = self.get_mesh(v_deformed_bxnx3[i_batch], sdf_bxn[i_batch])
+ v_list.append(verts_nx3)
+ f_list.append(faces_fx3)
+ render_output = self.render_mesh(verts_nx3, faces_fx3, camera_mv_bxnviewx4x4[i_batch], resolution)
+ all_render_output.append(render_output)
+
+ # Concatenate all render output
+ return_keys = all_render_output[0].keys()
+ return_value = dict()
+ for k in return_keys:
+ value = [v[k] for v in all_render_output]
+ return_value[k] = value
+ # We can do concatenation outside of the render
+ return return_value
diff --git a/nsr/geometry/rep_3d/dmtet_utils.py b/nsr/geometry/rep_3d/dmtet_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d466a9e78c49d947c115707693aa18d759885ad
--- /dev/null
+++ b/nsr/geometry/rep_3d/dmtet_utils.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+
+
+def get_center_boundary_index(verts):
+ length_ = torch.sum(verts ** 2, dim=-1)
+ center_idx = torch.argmin(length_)
+ boundary_neg = verts == verts.max()
+ boundary_pos = verts == verts.min()
+ boundary = torch.bitwise_or(boundary_pos, boundary_neg)
+ boundary = torch.sum(boundary.float(), dim=-1)
+ boundary_idx = torch.nonzero(boundary)
+ return center_idx, boundary_idx.squeeze(dim=-1)
diff --git a/nsr/geometry/rep_3d/extract_texture_map.py b/nsr/geometry/rep_3d/extract_texture_map.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5d62bb5a6c5cdf632fb504db3d2dfa99a3abbd3
--- /dev/null
+++ b/nsr/geometry/rep_3d/extract_texture_map.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+import xatlas
+import numpy as np
+import nvdiffrast.torch as dr
+
+
+# ==============================================================================================
+def interpolate(attr, rast, attr_idx, rast_db=None):
+ return dr.interpolate(attr.contiguous(), rast, attr_idx, rast_db=rast_db, diff_attrs=None if rast_db is None else 'all')
+
+
+def xatlas_uvmap(ctx, mesh_v, mesh_pos_idx, resolution):
+ vmapping, indices, uvs = xatlas.parametrize(mesh_v.detach().cpu().numpy(), mesh_pos_idx.detach().cpu().numpy())
+
+ # Convert to tensors
+ indices_int64 = indices.astype(np.uint64, casting='same_kind').view(np.int64)
+
+ uvs = torch.tensor(uvs, dtype=torch.float32, device=mesh_v.device)
+ mesh_tex_idx = torch.tensor(indices_int64, dtype=torch.int64, device=mesh_v.device)
+ # mesh_v_tex. ture
+ uv_clip = uvs[None, ...] * 2.0 - 1.0
+
+ # pad to four component coordinate
+ uv_clip4 = torch.cat((uv_clip, torch.zeros_like(uv_clip[..., 0:1]), torch.ones_like(uv_clip[..., 0:1])), dim=-1)
+
+ # rasterize
+ rast, _ = dr.rasterize(ctx, uv_clip4, mesh_tex_idx.int(), (resolution, resolution))
+
+ # Interpolate world space position
+ gb_pos, _ = interpolate(mesh_v[None, ...], rast, mesh_pos_idx.int())
+ mask = rast[..., 3:4] > 0
+ return uvs, mesh_tex_idx, gb_pos, mask
diff --git a/nsr/geometry/rep_3d/flexicubes.py b/nsr/geometry/rep_3d/flexicubes.py
new file mode 100644
index 0000000000000000000000000000000000000000..26d7b91b6266d802baaf55b64238629cd0f740d0
--- /dev/null
+++ b/nsr/geometry/rep_3d/flexicubes.py
@@ -0,0 +1,579 @@
+# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+import torch
+from .tables import *
+
+__all__ = [
+ 'FlexiCubes'
+]
+
+
+class FlexiCubes:
+ """
+ This class implements the FlexiCubes method for extracting meshes from scalar fields.
+ It maintains a series of lookup tables and indices to support the mesh extraction process.
+ FlexiCubes, a differentiable variant of the Dual Marching Cubes (DMC) scheme, enhances
+ the geometric fidelity and mesh quality of reconstructed meshes by dynamically adjusting
+ the surface representation through gradient-based optimization.
+
+ During instantiation, the class loads DMC tables from a file and transforms them into
+ PyTorch tensors on the specified device.
+
+ Attributes:
+ device (str): Specifies the computational device (default is "cuda").
+ dmc_table (torch.Tensor): Dual Marching Cubes (DMC) table that encodes the edges
+ associated with each dual vertex in 256 Marching Cubes (MC) configurations.
+ num_vd_table (torch.Tensor): Table holding the number of dual vertices in each of
+ the 256 MC configurations.
+ check_table (torch.Tensor): Table resolving ambiguity in cases C16 and C19
+ of the DMC configurations.
+ tet_table (torch.Tensor): Lookup table used in tetrahedralizing the isosurface.
+ quad_split_1 (torch.Tensor): Indices for splitting a quad into two triangles
+ along one diagonal.
+ quad_split_2 (torch.Tensor): Alternative indices for splitting a quad into
+ two triangles along the other diagonal.
+ quad_split_train (torch.Tensor): Indices for splitting a quad into four triangles
+ during training by connecting all edges to their midpoints.
+ cube_corners (torch.Tensor): Defines the positions of a standard unit cube's
+ eight corners in 3D space, ordered starting from the origin (0,0,0),
+ moving along the x-axis, then y-axis, and finally z-axis.
+ Used as a blueprint for generating a voxel grid.
+ cube_corners_idx (torch.Tensor): Cube corners indexed as powers of 2, used
+ to retrieve the case id.
+ cube_edges (torch.Tensor): Edge connections in a cube, listed in pairs.
+ Used to retrieve edge vertices in DMC.
+ edge_dir_table (torch.Tensor): A mapping tensor that associates edge indices with
+ their corresponding axis. For instance, edge_dir_table[0] = 0 indicates that the
+ first edge is oriented along the x-axis.
+ dir_faces_table (torch.Tensor): A tensor that maps the corresponding axis of shared edges
+ across four adjacent cubes to the shared faces of these cubes. For instance,
+ dir_faces_table[0] = [5, 4] implies that for four cubes sharing an edge along
+ the x-axis, the first and second cubes share faces indexed as 5 and 4, respectively.
+ This tensor is only utilized during isosurface tetrahedralization.
+ adj_pairs (torch.Tensor):
+ A tensor containing index pairs that correspond to neighboring cubes that share the same edge.
+ qef_reg_scale (float):
+ The scaling factor applied to the regularization loss to prevent issues with singularity
+ when solving the QEF. This parameter is only used when a 'grad_func' is specified.
+ weight_scale (float):
+ The scale of weights in FlexiCubes. Should be between 0 and 1.
+ """
+
+ def __init__(self, device="cuda", qef_reg_scale=1e-3, weight_scale=0.99):
+
+ self.device = device
+ self.dmc_table = torch.tensor(dmc_table, dtype=torch.long, device=device, requires_grad=False)
+ self.num_vd_table = torch.tensor(num_vd_table,
+ dtype=torch.long, device=device, requires_grad=False)
+ self.check_table = torch.tensor(
+ check_table,
+ dtype=torch.long, device=device, requires_grad=False)
+
+ self.tet_table = torch.tensor(tet_table, dtype=torch.long, device=device, requires_grad=False)
+ self.quad_split_1 = torch.tensor([0, 1, 2, 0, 2, 3], dtype=torch.long, device=device, requires_grad=False)
+ self.quad_split_2 = torch.tensor([0, 1, 3, 3, 1, 2], dtype=torch.long, device=device, requires_grad=False)
+ self.quad_split_train = torch.tensor(
+ [0, 1, 1, 2, 2, 3, 3, 0], dtype=torch.long, device=device, requires_grad=False)
+
+ self.cube_corners = torch.tensor([[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0], [0, 0, 1], [
+ 1, 0, 1], [0, 1, 1], [1, 1, 1]], dtype=torch.float, device=device)
+ self.cube_corners_idx = torch.pow(2, torch.arange(8, requires_grad=False))
+ self.cube_edges = torch.tensor([0, 1, 1, 5, 4, 5, 0, 4, 2, 3, 3, 7, 6, 7, 2, 6,
+ 2, 0, 3, 1, 7, 5, 6, 4], dtype=torch.long, device=device, requires_grad=False)
+
+ self.edge_dir_table = torch.tensor([0, 2, 0, 2, 0, 2, 0, 2, 1, 1, 1, 1],
+ dtype=torch.long, device=device)
+ self.dir_faces_table = torch.tensor([
+ [[5, 4], [3, 2], [4, 5], [2, 3]],
+ [[5, 4], [1, 0], [4, 5], [0, 1]],
+ [[3, 2], [1, 0], [2, 3], [0, 1]]
+ ], dtype=torch.long, device=device)
+ self.adj_pairs = torch.tensor([0, 1, 1, 3, 3, 2, 2, 0], dtype=torch.long, device=device)
+ self.qef_reg_scale = qef_reg_scale
+ self.weight_scale = weight_scale
+
+ def construct_voxel_grid(self, res):
+ """
+ Generates a voxel grid based on the specified resolution.
+
+ Args:
+ res (int or list[int]): The resolution of the voxel grid. If an integer
+ is provided, it is used for all three dimensions. If a list or tuple
+ of 3 integers is provided, they define the resolution for the x,
+ y, and z dimensions respectively.
+
+ Returns:
+ (torch.Tensor, torch.Tensor): Returns the vertices and the indices of the
+ cube corners (index into vertices) of the constructed voxel grid.
+ The vertices are centered at the origin, with the length of each
+ dimension in the grid being one.
+ """
+ base_cube_f = torch.arange(8).to(self.device)
+ if isinstance(res, int):
+ res = (res, res, res)
+ voxel_grid_template = torch.ones(res, device=self.device)
+
+ res = torch.tensor([res], dtype=torch.float, device=self.device)
+ coords = torch.nonzero(voxel_grid_template).float() / res # N, 3
+ verts = (self.cube_corners.unsqueeze(0) / res + coords.unsqueeze(1)).reshape(-1, 3)
+ cubes = (base_cube_f.unsqueeze(0) +
+ torch.arange(coords.shape[0], device=self.device).unsqueeze(1) * 8).reshape(-1)
+
+ verts_rounded = torch.round(verts * 10**5) / (10**5)
+ verts_unique, inverse_indices = torch.unique(verts_rounded, dim=0, return_inverse=True)
+ cubes = inverse_indices[cubes.reshape(-1)].reshape(-1, 8)
+
+ return verts_unique - 0.5, cubes
+
+ def __call__(self, x_nx3, s_n, cube_fx8, res, beta_fx12=None, alpha_fx8=None,
+ gamma_f=None, training=False, output_tetmesh=False, grad_func=None):
+ r"""
+ Main function for mesh extraction from scalar field using FlexiCubes. This function converts
+ discrete signed distance fields, encoded on voxel grids and additional per-cube parameters,
+ to triangle or tetrahedral meshes using a differentiable operation as described in
+ `Flexible Isosurface Extraction for Gradient-Based Mesh Optimization`_. FlexiCubes enhances
+ mesh quality and geometric fidelity by adjusting the surface representation based on gradient
+ optimization. The output surface is differentiable with respect to the input vertex positions,
+ scalar field values, and weight parameters.
+
+ If you intend to extract a surface mesh from a fixed Signed Distance Field without the
+ optimization of parameters, it is suggested to provide the "grad_func" which should
+ return the surface gradient at any given 3D position. When grad_func is provided, the process
+ to determine the dual vertex position adapts to solve a Quadratic Error Function (QEF), as
+ described in the `Manifold Dual Contouring`_ paper, and employs an smart splitting strategy.
+ Please note, this approach is non-differentiable.
+
+ For more details and example usage in optimization, refer to the
+ `Flexible Isosurface Extraction for Gradient-Based Mesh Optimization`_ SIGGRAPH 2023 paper.
+
+ Args:
+ x_nx3 (torch.Tensor): Coordinates of the voxel grid vertices, can be deformed.
+ s_n (torch.Tensor): Scalar field values at each vertex of the voxel grid. Negative values
+ denote that the corresponding vertex resides inside the isosurface. This affects
+ the directions of the extracted triangle faces and volume to be tetrahedralized.
+ cube_fx8 (torch.Tensor): Indices of 8 vertices for each cube in the voxel grid.
+ res (int or list[int]): The resolution of the voxel grid. If an integer is provided, it
+ is used for all three dimensions. If a list or tuple of 3 integers is provided, they
+ specify the resolution for the x, y, and z dimensions respectively.
+ beta_fx12 (torch.Tensor, optional): Weight parameters for the cube edges to adjust dual
+ vertices positioning. Defaults to uniform value for all edges.
+ alpha_fx8 (torch.Tensor, optional): Weight parameters for the cube corners to adjust dual
+ vertices positioning. Defaults to uniform value for all vertices.
+ gamma_f (torch.Tensor, optional): Weight parameters to control the splitting of
+ quadrilaterals into triangles. Defaults to uniform value for all cubes.
+ training (bool, optional): If set to True, applies differentiable quad splitting for
+ training. Defaults to False.
+ output_tetmesh (bool, optional): If set to True, outputs a tetrahedral mesh, otherwise,
+ outputs a triangular mesh. Defaults to False.
+ grad_func (callable, optional): A function to compute the surface gradient at specified
+ 3D positions (input: Nx3 positions). The function should return gradients as an Nx3
+ tensor. If None, the original FlexiCubes algorithm is utilized. Defaults to None.
+
+ Returns:
+ (torch.Tensor, torch.LongTensor, torch.Tensor): Tuple containing:
+ - Vertices for the extracted triangular/tetrahedral mesh.
+ - Faces for the extracted triangular/tetrahedral mesh.
+ - Regularizer L_dev, computed per dual vertex.
+
+ .. _Flexible Isosurface Extraction for Gradient-Based Mesh Optimization:
+ https://research.nvidia.com/labs/toronto-ai/flexicubes/
+ .. _Manifold Dual Contouring:
+ https://people.engr.tamu.edu/schaefer/research/dualsimp_tvcg.pdf
+ """
+
+ surf_cubes, occ_fx8 = self._identify_surf_cubes(s_n, cube_fx8)
+ if surf_cubes.sum() == 0:
+ return torch.zeros(
+ (0, 3),
+ device=self.device), torch.zeros(
+ (0, 4),
+ dtype=torch.long, device=self.device) if output_tetmesh else torch.zeros(
+ (0, 3),
+ dtype=torch.long, device=self.device), torch.zeros(
+ (0),
+ device=self.device)
+ beta_fx12, alpha_fx8, gamma_f = self._normalize_weights(beta_fx12, alpha_fx8, gamma_f, surf_cubes)
+
+ case_ids = self._get_case_id(occ_fx8, surf_cubes, res)
+
+ surf_edges, idx_map, edge_counts, surf_edges_mask = self._identify_surf_edges(s_n, cube_fx8, surf_cubes)
+
+ vd, L_dev, vd_gamma, vd_idx_map = self._compute_vd(
+ x_nx3, cube_fx8[surf_cubes], surf_edges, s_n, case_ids, beta_fx12, alpha_fx8, gamma_f, idx_map, grad_func)
+ vertices, faces, s_edges, edge_indices = self._triangulate(
+ s_n, surf_edges, vd, vd_gamma, edge_counts, idx_map, vd_idx_map, surf_edges_mask, training, grad_func)
+ if not output_tetmesh:
+ return vertices, faces, L_dev
+ else:
+ vertices, tets = self._tetrahedralize(
+ x_nx3, s_n, cube_fx8, vertices, faces, surf_edges, s_edges, vd_idx_map, case_ids, edge_indices,
+ surf_cubes, training)
+ return vertices, tets, L_dev
+
+ def _compute_reg_loss(self, vd, ue, edge_group_to_vd, vd_num_edges):
+ """
+ Regularizer L_dev as in Equation 8
+ """
+ dist = torch.norm(ue - torch.index_select(input=vd, index=edge_group_to_vd, dim=0), dim=-1)
+ mean_l2 = torch.zeros_like(vd[:, 0])
+ mean_l2 = (mean_l2).index_add_(0, edge_group_to_vd, dist) / vd_num_edges.squeeze(1).float()
+ mad = (dist - torch.index_select(input=mean_l2, index=edge_group_to_vd, dim=0)).abs()
+ return mad
+
+ def _normalize_weights(self, beta_fx12, alpha_fx8, gamma_f, surf_cubes):
+ """
+ Normalizes the given weights to be non-negative. If input weights are None, it creates and returns a set of weights of ones.
+ """
+ n_cubes = surf_cubes.shape[0]
+
+ if beta_fx12 is not None:
+ beta_fx12 = (torch.tanh(beta_fx12) * self.weight_scale + 1)
+ else:
+ beta_fx12 = torch.ones((n_cubes, 12), dtype=torch.float, device=self.device)
+
+ if alpha_fx8 is not None:
+ alpha_fx8 = (torch.tanh(alpha_fx8) * self.weight_scale + 1)
+ else:
+ alpha_fx8 = torch.ones((n_cubes, 8), dtype=torch.float, device=self.device)
+
+ if gamma_f is not None:
+ gamma_f = torch.sigmoid(gamma_f) * self.weight_scale + (1 - self.weight_scale)/2
+ else:
+ gamma_f = torch.ones((n_cubes), dtype=torch.float, device=self.device)
+
+ return beta_fx12[surf_cubes], alpha_fx8[surf_cubes], gamma_f[surf_cubes]
+
+ @torch.no_grad()
+ def _get_case_id(self, occ_fx8, surf_cubes, res):
+ """
+ Obtains the ID of topology cases based on cell corner occupancy. This function resolves the
+ ambiguity in the Dual Marching Cubes (DMC) configurations as described in Section 1.3 of the
+ supplementary material. It should be noted that this function assumes a regular grid.
+ """
+ case_ids = (occ_fx8[surf_cubes] * self.cube_corners_idx.to(self.device).unsqueeze(0)).sum(-1)
+
+ problem_config = self.check_table.to(self.device)[case_ids]
+ to_check = problem_config[..., 0] == 1
+ problem_config = problem_config[to_check]
+ if not isinstance(res, (list, tuple)):
+ res = [res, res, res]
+
+ # The 'problematic_configs' only contain configurations for surface cubes. Next, we construct a 3D array,
+ # 'problem_config_full', to store configurations for all cubes (with default config for non-surface cubes).
+ # This allows efficient checking on adjacent cubes.
+ problem_config_full = torch.zeros(list(res) + [5], device=self.device, dtype=torch.long)
+ vol_idx = torch.nonzero(problem_config_full[..., 0] == 0) # N, 3
+ vol_idx_problem = vol_idx[surf_cubes][to_check]
+ problem_config_full[vol_idx_problem[..., 0], vol_idx_problem[..., 1], vol_idx_problem[..., 2]] = problem_config
+ vol_idx_problem_adj = vol_idx_problem + problem_config[..., 1:4]
+
+ within_range = (
+ vol_idx_problem_adj[..., 0] >= 0) & (
+ vol_idx_problem_adj[..., 0] < res[0]) & (
+ vol_idx_problem_adj[..., 1] >= 0) & (
+ vol_idx_problem_adj[..., 1] < res[1]) & (
+ vol_idx_problem_adj[..., 2] >= 0) & (
+ vol_idx_problem_adj[..., 2] < res[2])
+
+ vol_idx_problem = vol_idx_problem[within_range]
+ vol_idx_problem_adj = vol_idx_problem_adj[within_range]
+ problem_config = problem_config[within_range]
+ problem_config_adj = problem_config_full[vol_idx_problem_adj[..., 0],
+ vol_idx_problem_adj[..., 1], vol_idx_problem_adj[..., 2]]
+ # If two cubes with cases C16 and C19 share an ambiguous face, both cases are inverted.
+ to_invert = (problem_config_adj[..., 0] == 1)
+ idx = torch.arange(case_ids.shape[0], device=self.device)[to_check][within_range][to_invert]
+ case_ids.index_put_((idx,), problem_config[to_invert][..., -1])
+ return case_ids
+
+ @torch.no_grad()
+ def _identify_surf_edges(self, s_n, cube_fx8, surf_cubes):
+ """
+ Identifies grid edges that intersect with the underlying surface by checking for opposite signs. As each edge
+ can be shared by multiple cubes, this function also assigns a unique index to each surface-intersecting edge
+ and marks the cube edges with this index.
+ """
+ occ_n = s_n < 0
+ all_edges = cube_fx8[surf_cubes][:, self.cube_edges].reshape(-1, 2)
+ unique_edges, _idx_map, counts = torch.unique(all_edges, dim=0, return_inverse=True, return_counts=True)
+
+ unique_edges = unique_edges.long()
+ mask_edges = occ_n[unique_edges.reshape(-1)].reshape(-1, 2).sum(-1) == 1
+
+ surf_edges_mask = mask_edges[_idx_map]
+ counts = counts[_idx_map]
+
+ mapping = torch.ones((unique_edges.shape[0]), dtype=torch.long, device=cube_fx8.device) * -1
+ mapping[mask_edges] = torch.arange(mask_edges.sum(), device=cube_fx8.device)
+ # Shaped as [number of cubes x 12 edges per cube]. This is later used to map a cube edge to the unique index
+ # for a surface-intersecting edge. Non-surface-intersecting edges are marked with -1.
+ idx_map = mapping[_idx_map]
+ surf_edges = unique_edges[mask_edges]
+ return surf_edges, idx_map, counts, surf_edges_mask
+
+ @torch.no_grad()
+ def _identify_surf_cubes(self, s_n, cube_fx8):
+ """
+ Identifies grid cubes that intersect with the underlying surface by checking if the signs at
+ all corners are not identical.
+ """
+ occ_n = s_n < 0
+ occ_fx8 = occ_n[cube_fx8.reshape(-1)].reshape(-1, 8)
+ _occ_sum = torch.sum(occ_fx8, -1)
+ surf_cubes = (_occ_sum > 0) & (_occ_sum < 8)
+ return surf_cubes, occ_fx8
+
+ def _linear_interp(self, edges_weight, edges_x):
+ """
+ Computes the location of zero-crossings on 'edges_x' using linear interpolation with 'edges_weight'.
+ """
+ edge_dim = edges_weight.dim() - 2
+ assert edges_weight.shape[edge_dim] == 2
+ edges_weight = torch.cat([torch.index_select(input=edges_weight, index=torch.tensor(1, device=self.device), dim=edge_dim), -
+ torch.index_select(input=edges_weight, index=torch.tensor(0, device=self.device), dim=edge_dim)], edge_dim)
+ denominator = edges_weight.sum(edge_dim)
+ ue = (edges_x * edges_weight).sum(edge_dim) / denominator
+ return ue
+
+ def _solve_vd_QEF(self, p_bxnx3, norm_bxnx3, c_bx3=None):
+ p_bxnx3 = p_bxnx3.reshape(-1, 7, 3)
+ norm_bxnx3 = norm_bxnx3.reshape(-1, 7, 3)
+ c_bx3 = c_bx3.reshape(-1, 3)
+ A = norm_bxnx3
+ B = ((p_bxnx3) * norm_bxnx3).sum(-1, keepdims=True)
+
+ A_reg = (torch.eye(3, device=p_bxnx3.device) * self.qef_reg_scale).unsqueeze(0).repeat(p_bxnx3.shape[0], 1, 1)
+ B_reg = (self.qef_reg_scale * c_bx3).unsqueeze(-1)
+ A = torch.cat([A, A_reg], 1)
+ B = torch.cat([B, B_reg], 1)
+ dual_verts = torch.linalg.lstsq(A, B).solution.squeeze(-1)
+ return dual_verts
+
+ def _compute_vd(self, x_nx3, surf_cubes_fx8, surf_edges, s_n, case_ids, beta_fx12, alpha_fx8, gamma_f, idx_map, grad_func):
+ """
+ Computes the location of dual vertices as described in Section 4.2
+ """
+ alpha_nx12x2 = torch.index_select(input=alpha_fx8, index=self.cube_edges, dim=1).reshape(-1, 12, 2)
+ surf_edges_x = torch.index_select(input=x_nx3, index=surf_edges.reshape(-1), dim=0).reshape(-1, 2, 3)
+ surf_edges_s = torch.index_select(input=s_n, index=surf_edges.reshape(-1), dim=0).reshape(-1, 2, 1)
+ zero_crossing = self._linear_interp(surf_edges_s, surf_edges_x)
+
+ idx_map = idx_map.reshape(-1, 12)
+ num_vd = torch.index_select(input=self.num_vd_table, index=case_ids, dim=0)
+ edge_group, edge_group_to_vd, edge_group_to_cube, vd_num_edges, vd_gamma = [], [], [], [], []
+
+ total_num_vd = 0
+ vd_idx_map = torch.zeros((case_ids.shape[0], 12), dtype=torch.long, device=self.device, requires_grad=False)
+ if grad_func is not None:
+ normals = torch.nn.functional.normalize(grad_func(zero_crossing), dim=-1)
+ vd = []
+ for num in torch.unique(num_vd):
+ cur_cubes = (num_vd == num) # consider cubes with the same numbers of vd emitted (for batching)
+ curr_num_vd = cur_cubes.sum() * num
+ curr_edge_group = self.dmc_table[case_ids[cur_cubes], :num].reshape(-1, num * 7)
+ curr_edge_group_to_vd = torch.arange(
+ curr_num_vd, device=self.device).unsqueeze(-1).repeat(1, 7) + total_num_vd
+ total_num_vd += curr_num_vd
+ curr_edge_group_to_cube = torch.arange(idx_map.shape[0], device=self.device)[
+ cur_cubes].unsqueeze(-1).repeat(1, num * 7).reshape_as(curr_edge_group)
+
+ curr_mask = (curr_edge_group != -1)
+ edge_group.append(torch.masked_select(curr_edge_group, curr_mask))
+ edge_group_to_vd.append(torch.masked_select(curr_edge_group_to_vd.reshape_as(curr_edge_group), curr_mask))
+ edge_group_to_cube.append(torch.masked_select(curr_edge_group_to_cube, curr_mask))
+ vd_num_edges.append(curr_mask.reshape(-1, 7).sum(-1, keepdims=True))
+ vd_gamma.append(torch.masked_select(gamma_f, cur_cubes).unsqueeze(-1).repeat(1, num).reshape(-1))
+
+ if grad_func is not None:
+ with torch.no_grad():
+ cube_e_verts_idx = idx_map[cur_cubes]
+ curr_edge_group[~curr_mask] = 0
+
+ verts_group_idx = torch.gather(input=cube_e_verts_idx, dim=1, index=curr_edge_group)
+ verts_group_idx[verts_group_idx == -1] = 0
+ verts_group_pos = torch.index_select(
+ input=zero_crossing, index=verts_group_idx.reshape(-1), dim=0).reshape(-1, num.item(), 7, 3)
+ v0 = x_nx3[surf_cubes_fx8[cur_cubes][:, 0]].reshape(-1, 1, 1, 3).repeat(1, num.item(), 1, 1)
+ curr_mask = curr_mask.reshape(-1, num.item(), 7, 1)
+ verts_centroid = (verts_group_pos * curr_mask).sum(2) / (curr_mask.sum(2))
+
+ normals_bx7x3 = torch.index_select(input=normals, index=verts_group_idx.reshape(-1), dim=0).reshape(
+ -1, num.item(), 7,
+ 3)
+ curr_mask = curr_mask.squeeze(2)
+ vd.append(self._solve_vd_QEF((verts_group_pos - v0) * curr_mask, normals_bx7x3 * curr_mask,
+ verts_centroid - v0.squeeze(2)) + v0.reshape(-1, 3))
+ edge_group = torch.cat(edge_group)
+ edge_group_to_vd = torch.cat(edge_group_to_vd)
+ edge_group_to_cube = torch.cat(edge_group_to_cube)
+ vd_num_edges = torch.cat(vd_num_edges)
+ vd_gamma = torch.cat(vd_gamma)
+
+ if grad_func is not None:
+ vd = torch.cat(vd)
+ L_dev = torch.zeros([1], device=self.device)
+ else:
+ vd = torch.zeros((total_num_vd, 3), device=self.device)
+ beta_sum = torch.zeros((total_num_vd, 1), device=self.device)
+
+ idx_group = torch.gather(input=idx_map.reshape(-1), dim=0, index=edge_group_to_cube * 12 + edge_group)
+
+ x_group = torch.index_select(input=surf_edges_x, index=idx_group.reshape(-1), dim=0).reshape(-1, 2, 3)
+ s_group = torch.index_select(input=surf_edges_s, index=idx_group.reshape(-1), dim=0).reshape(-1, 2, 1)
+
+ zero_crossing_group = torch.index_select(
+ input=zero_crossing, index=idx_group.reshape(-1), dim=0).reshape(-1, 3)
+
+ alpha_group = torch.index_select(input=alpha_nx12x2.reshape(-1, 2), dim=0,
+ index=edge_group_to_cube * 12 + edge_group).reshape(-1, 2, 1)
+ ue_group = self._linear_interp(s_group * alpha_group, x_group)
+
+ beta_group = torch.gather(input=beta_fx12.reshape(-1), dim=0,
+ index=edge_group_to_cube * 12 + edge_group).reshape(-1, 1)
+ beta_sum = beta_sum.index_add_(0, index=edge_group_to_vd, source=beta_group)
+ vd = vd.index_add_(0, index=edge_group_to_vd, source=ue_group * beta_group) / beta_sum
+ L_dev = self._compute_reg_loss(vd, zero_crossing_group, edge_group_to_vd, vd_num_edges)
+
+ v_idx = torch.arange(vd.shape[0], device=self.device) # + total_num_vd
+
+ vd_idx_map = (vd_idx_map.reshape(-1)).scatter(dim=0, index=edge_group_to_cube *
+ 12 + edge_group, src=v_idx[edge_group_to_vd])
+
+ return vd, L_dev, vd_gamma, vd_idx_map
+
+ def _triangulate(self, s_n, surf_edges, vd, vd_gamma, edge_counts, idx_map, vd_idx_map, surf_edges_mask, training, grad_func):
+ """
+ Connects four neighboring dual vertices to form a quadrilateral. The quadrilaterals are then split into
+ triangles based on the gamma parameter, as described in Section 4.3.
+ """
+ with torch.no_grad():
+ group_mask = (edge_counts == 4) & surf_edges_mask # surface edges shared by 4 cubes.
+ group = idx_map.reshape(-1)[group_mask]
+ vd_idx = vd_idx_map[group_mask]
+ edge_indices, indices = torch.sort(group, stable=True)
+ quad_vd_idx = vd_idx[indices].reshape(-1, 4)
+
+ # Ensure all face directions point towards the positive SDF to maintain consistent winding.
+ s_edges = s_n[surf_edges[edge_indices.reshape(-1, 4)[:, 0]].reshape(-1)].reshape(-1, 2)
+ flip_mask = s_edges[:, 0] > 0
+ quad_vd_idx = torch.cat((quad_vd_idx[flip_mask][:, [0, 1, 3, 2]],
+ quad_vd_idx[~flip_mask][:, [2, 3, 1, 0]]))
+ if grad_func is not None:
+ # when grad_func is given, split quadrilaterals along the diagonals with more consistent gradients.
+ with torch.no_grad():
+ vd_gamma = torch.nn.functional.normalize(grad_func(vd), dim=-1)
+ quad_gamma = torch.index_select(input=vd_gamma, index=quad_vd_idx.reshape(-1), dim=0).reshape(-1, 4, 3)
+ gamma_02 = (quad_gamma[:, 0] * quad_gamma[:, 2]).sum(-1, keepdims=True)
+ gamma_13 = (quad_gamma[:, 1] * quad_gamma[:, 3]).sum(-1, keepdims=True)
+ else:
+ quad_gamma = torch.index_select(input=vd_gamma, index=quad_vd_idx.reshape(-1), dim=0).reshape(-1, 4)
+ gamma_02 = torch.index_select(input=quad_gamma, index=torch.tensor(
+ 0, device=self.device), dim=1) * torch.index_select(input=quad_gamma, index=torch.tensor(2, device=self.device), dim=1)
+ gamma_13 = torch.index_select(input=quad_gamma, index=torch.tensor(
+ 1, device=self.device), dim=1) * torch.index_select(input=quad_gamma, index=torch.tensor(3, device=self.device), dim=1)
+ if not training:
+ mask = (gamma_02 > gamma_13).squeeze(1)
+ faces = torch.zeros((quad_gamma.shape[0], 6), dtype=torch.long, device=quad_vd_idx.device)
+ faces[mask] = quad_vd_idx[mask][:, self.quad_split_1]
+ faces[~mask] = quad_vd_idx[~mask][:, self.quad_split_2]
+ faces = faces.reshape(-1, 3)
+ else:
+ vd_quad = torch.index_select(input=vd, index=quad_vd_idx.reshape(-1), dim=0).reshape(-1, 4, 3)
+ vd_02 = (torch.index_select(input=vd_quad, index=torch.tensor(0, device=self.device), dim=1) +
+ torch.index_select(input=vd_quad, index=torch.tensor(2, device=self.device), dim=1)) / 2
+ vd_13 = (torch.index_select(input=vd_quad, index=torch.tensor(1, device=self.device), dim=1) +
+ torch.index_select(input=vd_quad, index=torch.tensor(3, device=self.device), dim=1)) / 2
+ weight_sum = (gamma_02 + gamma_13) + 1e-8
+ vd_center = ((vd_02 * gamma_02.unsqueeze(-1) + vd_13 * gamma_13.unsqueeze(-1)) /
+ weight_sum.unsqueeze(-1)).squeeze(1)
+ vd_center_idx = torch.arange(vd_center.shape[0], device=self.device) + vd.shape[0]
+ vd = torch.cat([vd, vd_center])
+ faces = quad_vd_idx[:, self.quad_split_train].reshape(-1, 4, 2)
+ faces = torch.cat([faces, vd_center_idx.reshape(-1, 1, 1).repeat(1, 4, 1)], -1).reshape(-1, 3)
+ return vd, faces, s_edges, edge_indices
+
+ def _tetrahedralize(
+ self, x_nx3, s_n, cube_fx8, vertices, faces, surf_edges, s_edges, vd_idx_map, case_ids, edge_indices,
+ surf_cubes, training):
+ """
+ Tetrahedralizes the interior volume to produce a tetrahedral mesh, as described in Section 4.5.
+ """
+ occ_n = s_n < 0
+ occ_fx8 = occ_n[cube_fx8.reshape(-1)].reshape(-1, 8)
+ occ_sum = torch.sum(occ_fx8, -1)
+
+ inside_verts = x_nx3[occ_n]
+ mapping_inside_verts = torch.ones((occ_n.shape[0]), dtype=torch.long, device=self.device) * -1
+ mapping_inside_verts[occ_n] = torch.arange(occ_n.sum(), device=self.device) + vertices.shape[0]
+ """
+ For each grid edge connecting two grid vertices with different
+ signs, we first form a four-sided pyramid by connecting one
+ of the grid vertices with four mesh vertices that correspond
+ to the grid edge and then subdivide the pyramid into two tetrahedra
+ """
+ inside_verts_idx = mapping_inside_verts[surf_edges[edge_indices.reshape(-1, 4)[:, 0]].reshape(-1, 2)[
+ s_edges < 0]]
+ if not training:
+ inside_verts_idx = inside_verts_idx.unsqueeze(1).expand(-1, 2).reshape(-1)
+ else:
+ inside_verts_idx = inside_verts_idx.unsqueeze(1).expand(-1, 4).reshape(-1)
+
+ tets_surface = torch.cat([faces, inside_verts_idx.unsqueeze(-1)], -1)
+ """
+ For each grid edge connecting two grid vertices with the
+ same sign, the tetrahedron is formed by the two grid vertices
+ and two vertices in consecutive adjacent cells
+ """
+ inside_cubes = (occ_sum == 8)
+ inside_cubes_center = x_nx3[cube_fx8[inside_cubes].reshape(-1)].reshape(-1, 8, 3).mean(1)
+ inside_cubes_center_idx = torch.arange(
+ inside_cubes_center.shape[0], device=inside_cubes.device) + vertices.shape[0] + inside_verts.shape[0]
+
+ surface_n_inside_cubes = surf_cubes | inside_cubes
+ edge_center_vertex_idx = torch.ones(((surface_n_inside_cubes).sum(), 13),
+ dtype=torch.long, device=x_nx3.device) * -1
+ surf_cubes = surf_cubes[surface_n_inside_cubes]
+ inside_cubes = inside_cubes[surface_n_inside_cubes]
+ edge_center_vertex_idx[surf_cubes, :12] = vd_idx_map.reshape(-1, 12)
+ edge_center_vertex_idx[inside_cubes, 12] = inside_cubes_center_idx
+
+ all_edges = cube_fx8[surface_n_inside_cubes][:, self.cube_edges].reshape(-1, 2)
+ unique_edges, _idx_map, counts = torch.unique(all_edges, dim=0, return_inverse=True, return_counts=True)
+ unique_edges = unique_edges.long()
+ mask_edges = occ_n[unique_edges.reshape(-1)].reshape(-1, 2).sum(-1) == 2
+ mask = mask_edges[_idx_map]
+ counts = counts[_idx_map]
+ mapping = torch.ones((unique_edges.shape[0]), dtype=torch.long, device=self.device) * -1
+ mapping[mask_edges] = torch.arange(mask_edges.sum(), device=self.device)
+ idx_map = mapping[_idx_map]
+
+ group_mask = (counts == 4) & mask
+ group = idx_map.reshape(-1)[group_mask]
+ edge_indices, indices = torch.sort(group)
+ cube_idx = torch.arange((_idx_map.shape[0] // 12), dtype=torch.long,
+ device=self.device).unsqueeze(1).expand(-1, 12).reshape(-1)[group_mask]
+ edge_idx = torch.arange((12), dtype=torch.long, device=self.device).unsqueeze(
+ 0).expand(_idx_map.shape[0] // 12, -1).reshape(-1)[group_mask]
+ # Identify the face shared by the adjacent cells.
+ cube_idx_4 = cube_idx[indices].reshape(-1, 4)
+ edge_dir = self.edge_dir_table[edge_idx[indices]].reshape(-1, 4)[..., 0]
+ shared_faces_4x2 = self.dir_faces_table[edge_dir].reshape(-1)
+ cube_idx_4x2 = cube_idx_4[:, self.adj_pairs].reshape(-1)
+ # Identify an edge of the face with different signs and
+ # select the mesh vertex corresponding to the identified edge.
+ case_ids_expand = torch.ones((surface_n_inside_cubes).sum(), dtype=torch.long, device=x_nx3.device) * 255
+ case_ids_expand[surf_cubes] = case_ids
+ cases = case_ids_expand[cube_idx_4x2]
+ quad_edge = edge_center_vertex_idx[cube_idx_4x2, self.tet_table[cases, shared_faces_4x2]].reshape(-1, 2)
+ mask = (quad_edge == -1).sum(-1) == 0
+ inside_edge = mapping_inside_verts[unique_edges[mask_edges][edge_indices].reshape(-1)].reshape(-1, 2)
+ tets_inside = torch.cat([quad_edge, inside_edge], -1)[mask]
+
+ tets = torch.cat([tets_surface, tets_inside])
+ vertices = torch.cat([vertices, inside_verts, inside_cubes_center])
+ return vertices, tets
diff --git a/nsr/geometry/rep_3d/flexicubes_geometry.py b/nsr/geometry/rep_3d/flexicubes_geometry.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf050ee20361f78957839942f83fe77efde231b7
--- /dev/null
+++ b/nsr/geometry/rep_3d/flexicubes_geometry.py
@@ -0,0 +1,120 @@
+# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+
+import torch
+import numpy as np
+import os
+from . import Geometry
+from .flexicubes import FlexiCubes # replace later
+from .dmtet import sdf_reg_loss_batch
+import torch.nn.functional as F
+
+def get_center_boundary_index(grid_res, device):
+ v = torch.zeros((grid_res + 1, grid_res + 1, grid_res + 1), dtype=torch.bool, device=device)
+ v[grid_res // 2 + 1, grid_res // 2 + 1, grid_res // 2 + 1] = True
+ center_indices = torch.nonzero(v.reshape(-1))
+
+ v[grid_res // 2 + 1, grid_res // 2 + 1, grid_res // 2 + 1] = False
+ v[:2, ...] = True
+ v[-2:, ...] = True
+ v[:, :2, ...] = True
+ v[:, -2:, ...] = True
+ v[:, :, :2] = True
+ v[:, :, -2:] = True
+ boundary_indices = torch.nonzero(v.reshape(-1))
+ return center_indices, boundary_indices
+
+###############################################################################
+# Geometry interface
+###############################################################################
+class FlexiCubesGeometry(Geometry):
+ def __init__(
+ self, grid_res=64, scale=2.0, device='cuda', renderer=None,
+ render_type='neural_render', args=None):
+ super(FlexiCubesGeometry, self).__init__()
+ self.grid_res = grid_res
+ self.device = device
+ self.args = args
+ self.fc = FlexiCubes(device, weight_scale=0.5)
+ self.verts, self.indices = self.fc.construct_voxel_grid(grid_res)
+ if isinstance(scale, list):
+ self.verts[:, 0] = self.verts[:, 0] * scale[0]
+ self.verts[:, 1] = self.verts[:, 1] * scale[1]
+ self.verts[:, 2] = self.verts[:, 2] * scale[1]
+ else:
+ self.verts = self.verts * scale
+
+ all_edges = self.indices[:, self.fc.cube_edges].reshape(-1, 2)
+ self.all_edges = torch.unique(all_edges, dim=0)
+
+ # Parameters used for fix boundary sdf
+ self.center_indices, self.boundary_indices = get_center_boundary_index(self.grid_res, device)
+ self.renderer = renderer
+ self.render_type = render_type
+
+ def getAABB(self):
+ return torch.min(self.verts, dim=0).values, torch.max(self.verts, dim=0).values
+
+ def get_mesh(self, v_deformed_nx3, sdf_n, weight_n=None, with_uv=False, indices=None, is_training=False):
+ if indices is None:
+ indices = self.indices
+
+ verts, faces, v_reg_loss = self.fc(v_deformed_nx3, sdf_n, indices, self.grid_res,
+ beta_fx12=weight_n[:, :12], alpha_fx8=weight_n[:, 12:20],
+ gamma_f=weight_n[:, 20], training=is_training
+ )
+ return verts, faces, v_reg_loss
+
+
+ def render_mesh(self, mesh_v_nx3, mesh_f_fx3, camera_mv_bx4x4, resolution=256, hierarchical_mask=False):
+ return_value = dict()
+ if self.render_type == 'neural_render':
+ tex_pos, mask, hard_mask, rast, v_pos_clip, mask_pyramid, depth, normal = self.renderer.render_mesh(
+ mesh_v_nx3.unsqueeze(dim=0),
+ mesh_f_fx3.int(),
+ camera_mv_bx4x4,
+ mesh_v_nx3.unsqueeze(dim=0),
+ resolution=resolution,
+ device=self.device,
+ hierarchical_mask=hierarchical_mask
+ )
+
+ return_value['tex_pos'] = tex_pos
+ return_value['mask'] = mask
+ return_value['hard_mask'] = hard_mask
+ return_value['rast'] = rast
+ return_value['v_pos_clip'] = v_pos_clip
+ return_value['mask_pyramid'] = mask_pyramid
+ return_value['depth'] = depth
+ return_value['normal'] = normal
+ else:
+ raise NotImplementedError
+
+ return return_value
+
+ def render(self, v_deformed_bxnx3=None, sdf_bxn=None, camera_mv_bxnviewx4x4=None, resolution=256):
+ # Here I assume a batch of meshes (can be different mesh and geometry), for the other shapes, the batch is 1
+ v_list = []
+ f_list = []
+ n_batch = v_deformed_bxnx3.shape[0]
+ all_render_output = []
+ for i_batch in range(n_batch):
+ verts_nx3, faces_fx3 = self.get_mesh(v_deformed_bxnx3[i_batch], sdf_bxn[i_batch])
+ v_list.append(verts_nx3)
+ f_list.append(faces_fx3)
+ render_output = self.render_mesh(verts_nx3, faces_fx3, camera_mv_bxnviewx4x4[i_batch], resolution)
+ all_render_output.append(render_output)
+
+ # Concatenate all render output
+ return_keys = all_render_output[0].keys()
+ return_value = dict()
+ for k in return_keys:
+ value = [v[k] for v in all_render_output]
+ return_value[k] = value
+ # We can do concatenation outside of the render
+ return return_value
diff --git a/nsr/geometry/rep_3d/tables.py b/nsr/geometry/rep_3d/tables.py
new file mode 100644
index 0000000000000000000000000000000000000000..5873e7727b5595a1e4fbc3bd10ae5be8f3d06cca
--- /dev/null
+++ b/nsr/geometry/rep_3d/tables.py
@@ -0,0 +1,791 @@
+# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# NVIDIA CORPORATION & AFFILIATES and its licensors retain all intellectual property
+# and proprietary rights in and to this software, related documentation
+# and any modifications thereto. Any use, reproduction, disclosure or
+# distribution of this software and related documentation without an express
+# license agreement from NVIDIA CORPORATION & AFFILIATES is strictly prohibited.
+dmc_table = [
+[[-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 8, 9, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 7, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 5, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 5, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 8, 9, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 7, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 7, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 5, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 8, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 8, 9, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 7, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [4, 7, 8, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 7, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 8, 11, -1, -1, -1], [4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 5, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 5, 8, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 8, 9, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 7, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 7, 8, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 5, 7, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 9, 10, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 8, 9, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 7, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 9, 10, -1, -1, -1], [4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 7, 9, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [4, 5, 9, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 5, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 5, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 8, 9, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 7, 9, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 7, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 5, 7, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 8, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 9, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[8, 9, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [1, 3, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 7, 10, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 9, 10, 11, -1, -1], [4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 9, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [1, 3, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 8, 10, 11, -1, -1], [4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 5, 10, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 8, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 8, 9, -1, -1, -1], [1, 3, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 7, 9, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 7, 8, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 8, 9, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 6, 8, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 6, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [4, 6, 8, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 6, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [4, 5, 9, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 5, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 5, 8, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 6, 8, 9, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 6, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 6, 8, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 5, 6, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 6, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 6, 7, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [2, 3, 6, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 6, 7, 8, 9, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 6, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [2, 3, 4, 6, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 6, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [2, 3, 6, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 6, 7, 8, -1, -1], [4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 5, -1, -1, -1], [2, 3, 6, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 5, 6, 7, 8], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 5, 6, 8, 9, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 6, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 2, 3, 5, 6, 8], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 5, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 10, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 9, 10, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 8, 9, 10, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 6, 8, 11, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 6, 11, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 9, 10, -1, -1, -1], [4, 6, 8, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 6, 9, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [4, 5, 9, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1]],
+[[0, 2, 4, 5, 10, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 5, 8, 10, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 6, 8, 9, 11, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 6, 9, 11, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 6, 8, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 5, 6, 10, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 6, 7, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 6, 7, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 6, 7, 9, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[6, 7, 8, 9, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 6, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 6, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 6, 8, 9, 10], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 6, 9, 10, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [1, 3, 6, 7, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 6, 7, 8, 10, -1], [4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 5, 6, 7, 10], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 6, 7, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 5, 6, 8, 9, 10], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 6, 9, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 8, 9, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 7, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [4, 7, 8, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 7, 9, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 6, 9, 10, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [4, 6, 9, 10, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 6, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 6, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[6, 7, 8, 9, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 6, 7, 9, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 6, 7, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 6, 7, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 11, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 8, 11, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 8, 9, 11, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 7, 11, -1, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [4, 7, 8, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [5, 6, 10, -1, -1, -1, -1]],
+[[1, 2, 4, 7, 9, 11, -1], [5, 6, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 6, 9, 10, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 8, 11, -1, -1, -1], [4, 6, 9, 10, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 6, 10, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 6, 8, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[6, 7, 8, 9, 10, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 6, 7, 9, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 6, 7, 8, 10, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 6, 7, 10, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 5, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [1, 2, 5, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 6, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 5, 6, 8, 9, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [1, 2, 5, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 7, -1, -1, -1], [1, 2, 5, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 6, 9, -1, -1], [4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 5, 6, 7, 9], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 6, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [1, 2, 4, 6, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 6, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 6, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 6, 7, 8, 9, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 2, 3, 6, 7, 9], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 6, 7, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 6, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 5, 6, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 6, 8, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 6, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 6, 8, 9, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [1, 3, 5, 6, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 5, 6, 7, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 6, 9, 11, -1], [4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 6, 7, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 6, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 6, 8, 9, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 6, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 6, 8, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 6, 7, 8, 9, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 6, 7, 8, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[6, 7, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [5, 7, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [5, 7, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 8, 9, -1, -1, -1], [5, 7, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 8, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 5, 10, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [4, 5, 8, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 5, 9, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 9, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [4, 7, 9, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 7, 10, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 7, 8, 10, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[8, 9, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 9, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 8, 10, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 10, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 5, 7, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 7, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [2, 3, 5, 7, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 5, 7, 8, 9, 10], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 5, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 5, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [2, 3, 4, 5, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 5, 9, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 7, 9, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 7, 8, 9, 10], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 2, 3, 4, 7, 10], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 8, 9, 10, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 9, 10, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 2, 3, 8, 10, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 10, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 5, 7, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [1, 2, 5, 7, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 5, 7, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 5, 7, 8, 9, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 5, 8, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 2, 3, 4, 5, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 5, 8, 9, 11], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 4, 7, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [1, 2, 4, 7, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 4, 7, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 4, 7, 8, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 2, 8, 9, 11, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 2, 3, 9, 11, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 2, 8, 11, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[2, 3, 11, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 5, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 5, 7, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 5, 7, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[5, 7, 8, 9, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 5, 8, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 5, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 5, 8, 9, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 5, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 4, 7, 9, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 4, 7, 8, 9, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 4, 7, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[4, 7, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[1, 3, 8, 9, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 1, 9, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[0, 3, 8, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]],
+[[-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1, -1]]
+]
+num_vd_table = [0, 1, 1, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 2, 2,
+2, 1, 2, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 1, 2, 3, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2,
+1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 2, 2, 1, 1, 2, 1, 2, 3, 2, 2, 1, 1, 1, 1,
+1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 3, 2, 2, 2, 2, 2, 1, 3, 4, 2,
+2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 2,
+3, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 2, 3, 2, 3, 2, 4, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1,
+2, 1, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1,
+1, 2, 1, 1, 1, 2, 2, 2, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2,
+1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1,
+1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0]
+check_table = [
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 1, 0, 0, 194],
+[1, -1, 0, 0, 193],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 1, 0, 164],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, -1, 0, 161],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 0, 1, 152],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 0, 1, 145],
+[1, 0, 0, 1, 144],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 0, -1, 137],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 1, 0, 133],
+[1, 0, 1, 0, 132],
+[1, 1, 0, 0, 131],
+[1, 1, 0, 0, 130],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 0, 1, 100],
+[0, 0, 0, 0, 0],
+[1, 0, 0, 1, 98],
+[0, 0, 0, 0, 0],
+[1, 0, 0, 1, 96],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 1, 0, 88],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, -1, 0, 82],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 1, 0, 74],
+[0, 0, 0, 0, 0],
+[1, 0, 1, 0, 72],
+[0, 0, 0, 0, 0],
+[1, 0, 0, -1, 70],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, -1, 0, 0, 67],
+[0, 0, 0, 0, 0],
+[1, -1, 0, 0, 65],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 1, 0, 0, 56],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, -1, 0, 0, 52],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 1, 0, 0, 44],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 1, 0, 0, 40],
+[0, 0, 0, 0, 0],
+[1, 0, 0, -1, 38],
+[1, 0, -1, 0, 37],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, -1, 0, 33],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, -1, 0, 0, 28],
+[0, 0, 0, 0, 0],
+[1, 0, -1, 0, 26],
+[1, 0, 0, -1, 25],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, -1, 0, 0, 20],
+[0, 0, 0, 0, 0],
+[1, 0, -1, 0, 18],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 0, -1, 9],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[1, 0, 0, -1, 6],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0]
+]
+tet_table = [
+[-1, -1, -1, -1, -1, -1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[4, 4, 4, 4, 4, 4],
+[0, 0, 0, 0, 0, 0],
+[4, 0, 0, 4, 4, -1],
+[1, 1, 1, 1, 1, 1],
+[4, 4, 4, 4, 4, 4],
+[0, 4, 0, 4, 4, -1],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[5, 5, 5, 5, 5, 5],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 0, 2, -1, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[2, -1, 2, 4, 4, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 0, 2, 4, 4, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 4, 2, 4, 4, 2],
+[0, 4, 0, 4, 4, 0],
+[2, 0, 2, 0, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 5, 2, 5, 5, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 0, 2, 0, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[1, 1, 1, 1, 1, 1],
+[0, 1, 1, -1, 0, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[4, 1, 1, 4, 4, 1],
+[0, 1, 1, 0, 0, 1],
+[4, 0, 0, 4, 4, 0],
+[2, 2, 2, 2, 2, 2],
+[-1, 1, 1, 4, 4, 1],
+[0, 1, 1, 4, 4, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[5, 1, 1, 5, 5, 1],
+[0, 1, 1, 0, 0, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[8, 8, 8, 8, 8, 8],
+[1, 1, 1, 4, 4, 1],
+[0, 0, 0, 0, 0, 0],
+[4, 0, 0, 4, 4, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 4, 4, 1],
+[0, 4, 0, 4, 4, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 5, 5, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[5, 5, 5, 5, 5, 5],
+[6, 6, 6, 6, 6, 6],
+[6, -1, 0, 6, 0, 6],
+[6, 0, 0, 6, 0, 6],
+[6, 1, 1, 6, 1, 6],
+[4, 4, 4, 4, 4, 4],
+[0, 0, 0, 0, 0, 0],
+[4, 0, 0, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[6, 4, -1, 6, 4, 6],
+[6, 4, 0, 6, 4, 6],
+[6, 0, 0, 6, 0, 6],
+[6, 1, 1, 6, 1, 6],
+[5, 5, 5, 5, 5, 5],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 0, 2, 2, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 0, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 4, 2, 2, 4, 2],
+[0, 4, 0, 4, 4, 0],
+[2, 0, 2, 2, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[6, 1, 1, 6, -1, 6],
+[6, 1, 1, 6, 0, 6],
+[6, 0, 0, 6, 0, 6],
+[6, 2, 2, 6, 2, 6],
+[4, 1, 1, 4, 4, 1],
+[0, 1, 1, 0, 0, 1],
+[4, 0, 0, 4, 4, 4],
+[2, 2, 2, 2, 2, 2],
+[6, 1, 1, 6, 4, 6],
+[6, 1, 1, 6, 4, 6],
+[6, 0, 0, 6, 0, 6],
+[6, 2, 2, 6, 2, 6],
+[5, 1, 1, 5, 5, 1],
+[0, 1, 1, 0, 0, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[6, 6, 6, 6, 6, 6],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 1, 4, 1],
+[0, 4, 0, 4, 4, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 5, 0, 5, 0, 5],
+[5, 5, 5, 5, 5, 5],
+[5, 5, 5, 5, 5, 5],
+[0, 5, 0, 5, 0, 5],
+[-1, 5, 0, 5, 0, 5],
+[1, 5, 1, 5, 1, 5],
+[4, 5, -1, 5, 4, 5],
+[0, 5, 0, 5, 0, 5],
+[4, 5, 0, 5, 4, 5],
+[1, 5, 1, 5, 1, 5],
+[4, 4, 4, 4, 4, 4],
+[0, 4, 0, 4, 4, 4],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[6, 6, 6, 6, 6, 6],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[2, 5, 2, 5, -1, 5],
+[0, 5, 0, 5, 0, 5],
+[2, 5, 2, 5, 0, 5],
+[1, 5, 1, 5, 1, 5],
+[2, 5, 2, 5, 4, 5],
+[0, 5, 0, 5, 0, 5],
+[2, 5, 2, 5, 4, 5],
+[1, 5, 1, 5, 1, 5],
+[2, 4, 2, 4, 4, 2],
+[0, 4, 0, 4, 4, 4],
+[2, 0, 2, 0, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 6, 2, 6, 6, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 0, 2, 0, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[1, 1, 1, 1, 1, 1],
+[0, 1, 1, 1, 0, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[4, 1, 1, 1, 4, 1],
+[0, 1, 1, 1, 0, 1],
+[4, 0, 0, 4, 4, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[5, 5, 5, 5, 5, 5],
+[1, 1, 1, 1, 4, 1],
+[0, 0, 0, 0, 0, 0],
+[4, 0, 0, 4, 4, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[6, 0, 0, 6, 0, 6],
+[0, 0, 0, 0, 0, 0],
+[6, 6, 6, 6, 6, 6],
+[5, 5, 5, 5, 5, 5],
+[5, 5, 0, 5, 0, 5],
+[5, 5, 0, 5, 0, 5],
+[5, 5, 1, 5, 1, 5],
+[4, 4, 4, 4, 4, 4],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 0, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[4, 4, 4, 4, 4, 4],
+[4, 4, 0, 4, 4, 4],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[8, 8, 8, 8, 8, 8],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 0, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 1, 1, 4, 4, 1],
+[2, 2, 2, 2, 2, 2],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[1, 1, 1, 1, 1, 1],
+[1, 1, 1, 1, 1, 1],
+[1, 1, 1, 1, 0, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[2, 4, 2, 4, 4, 2],
+[1, 1, 1, 1, 1, 1],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[2, 2, 2, 2, 2, 2],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[5, 5, 5, 5, 5, 5],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[4, 4, 4, 4, 4, 4],
+[1, 1, 1, 1, 1, 1],
+[0, 0, 0, 0, 0, 0],
+[0, 0, 0, 0, 0, 0],
+[12, 12, 12, 12, 12, 12]
+]
diff --git a/nsr/gs.py b/nsr/gs.py
new file mode 100644
index 0000000000000000000000000000000000000000..ead51536ccb55eebd63d27aadc991edb28b6d218
--- /dev/null
+++ b/nsr/gs.py
@@ -0,0 +1,206 @@
+import numpy as np
+
+from pdb import set_trace as st
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+from diff_gaussian_rasterization import (
+ GaussianRasterizationSettings,
+ GaussianRasterizer,
+)
+
+import kiui
+
+class GaussianRenderer:
+ def __init__(self, output_size, out_chans, rendering_kwargs, **kwargs):
+
+ # self.opt = opt
+ self.bg_color = torch.tensor([1, 1, 1], dtype=torch.float32, device="cuda")
+ # self.bg_color = torch.tensor([0,0,1], dtype=torch.float32, device="cuda")
+
+ self.output_size = output_size
+ self.out_chans = out_chans
+ self.rendering_kwargs = rendering_kwargs
+
+ # intrinsics
+ # self.tan_half_fov = np.tan(0.5 * np.deg2rad(self.opt.fovy))
+ # self.proj_matrix = torch.zeros(4, 4, dtype=torch.float32)
+ # self.proj_matrix[0, 0] = 1 / self.tan_half_fov
+ # self.proj_matrix[1, 1] = 1 / self.tan_half_fov
+ # self.proj_matrix[2, 2] = (opt.zfar + opt.znear) / (opt.zfar - opt.znear)
+ # self.proj_matrix[3, 2] = - (opt.zfar * opt.znear) / (opt.zfar - opt.znear)
+ # self.proj_matrix[2, 3] = 1
+
+ def render(self, gaussians, cam_view, cam_view_proj, cam_pos, tanfov, bg_color=None, scale_modifier=1):
+ # gaussians: [B, N, 14]
+ # cam_view, cam_view_proj: [B, V, 4, 4]
+ # cam_pos: [B, V, 3]
+
+ device = gaussians.device
+ B, V = cam_view.shape[:2]
+
+ # loop of loop...
+ images = []
+ alphas = []
+ depths = []
+
+ if bg_color is None:
+ bg_color = self.bg_color
+
+ for b in range(B):
+
+ # pos, opacity, scale, rotation, shs
+ means3D = gaussians[b, :, 0:3].contiguous().float()
+ opacity = gaussians[b, :, 3:4].contiguous().float()
+ scales = gaussians[b, :, 4:7].contiguous().float()
+ rotations = gaussians[b, :, 7:11].contiguous().float()
+ rgbs = gaussians[b, :, 11:].contiguous().float() # [N, 3]
+
+ for v in range(V):
+
+ # render novel views
+ view_matrix = cam_view[b, v].float()
+ view_proj_matrix = cam_view_proj[b, v].float()
+ campos = cam_pos[b, v].float()
+
+ raster_settings = GaussianRasterizationSettings(
+ image_height=self.output_size,
+ image_width=self.output_size,
+ tanfovx=tanfov,
+ tanfovy=tanfov,
+ bg=bg_color,
+ scale_modifier=scale_modifier,
+ viewmatrix=view_matrix,
+ projmatrix=view_proj_matrix,
+ sh_degree=0,
+ campos=campos,
+ prefiltered=False,
+ debug=False,
+ )
+
+ rasterizer = GaussianRasterizer(raster_settings=raster_settings)
+
+ # Rasterize visible Gaussians to image, obtain their radii (on screen).
+ rendered_image, radii, rendered_depth, rendered_alpha = rasterizer(
+ means3D=means3D,
+ means2D=torch.zeros_like(means3D, dtype=torch.float32, device=device),
+ shs=None,
+ colors_precomp=rgbs,
+ opacities=opacity,
+ scales=scales,
+ rotations=rotations,
+ cov3D_precomp=None,
+ )
+
+ rendered_image = rendered_image.clamp(0, 1)
+
+ images.append(rendered_image)
+ alphas.append(rendered_alpha)
+ depths.append(rendered_depth)
+
+ images = torch.stack(images, dim=0).view(B, V, 3, self.output_size, self.output_size)
+ alphas = torch.stack(alphas, dim=0).view(B, V, 1, self.output_size, self.output_size)
+ depths = torch.stack(depths, dim=0).view(B, V, 1, self.output_size, self.output_size)
+
+ # images = torch.stack(images, dim=0).view(B*V, 3, self.output_size, self.output_size)
+ # alphas = torch.stack(alphas, dim=0).view(B*V, 1, self.output_size, self.output_size)
+ # depths = torch.stack(depths, dim=0).view(B*V, 1, self.output_size, self.output_size)
+
+ return {
+ "image": images, # [B, V, 3, H, W]
+ "alpha": alphas, # [B, V, 1, H, W]
+ "depth": depths,
+ }
+
+
+ def save_ply(self, gaussians, path, compatible=True):
+ # gaussians: [B, N, 14]
+ # compatible: save pre-activated gaussians as in the original paper
+
+ assert gaussians.shape[0] == 1, 'only support batch size 1'
+
+ from plyfile import PlyData, PlyElement
+
+ means3D = gaussians[0, :, 0:3].contiguous().float()
+ opacity = gaussians[0, :, 3:4].contiguous().float()
+ scales = gaussians[0, :, 4:7].contiguous().float()
+ rotations = gaussians[0, :, 7:11].contiguous().float()
+ shs = gaussians[0, :, 11:].unsqueeze(1).contiguous().float() # [N, 1, 3]
+
+ # prune by opacity
+ mask = opacity.squeeze(-1) >= 0.005
+ means3D = means3D[mask]
+ opacity = opacity[mask]
+ scales = scales[mask]
+ rotations = rotations[mask]
+ shs = shs[mask]
+
+ # invert activation to make it compatible with the original ply format
+ if compatible:
+ opacity = kiui.op.inverse_sigmoid(opacity)
+ scales = torch.log(scales + 1e-8)
+ shs = (shs - 0.5) / 0.28209479177387814
+
+ xyzs = means3D.detach().cpu().numpy()
+ f_dc = shs.detach().transpose(1, 2).flatten(start_dim=1).contiguous().cpu().numpy()
+ opacities = opacity.detach().cpu().numpy()
+ scales = scales.detach().cpu().numpy()
+ rotations = rotations.detach().cpu().numpy()
+
+ l = ['x', 'y', 'z']
+ # All channels except the 3 DC
+ for i in range(f_dc.shape[1]):
+ l.append('f_dc_{}'.format(i))
+ l.append('opacity')
+ for i in range(scales.shape[1]):
+ l.append('scale_{}'.format(i))
+ for i in range(rotations.shape[1]):
+ l.append('rot_{}'.format(i))
+
+ dtype_full = [(attribute, 'f4') for attribute in l]
+
+ elements = np.empty(xyzs.shape[0], dtype=dtype_full)
+ attributes = np.concatenate((xyzs, f_dc, opacities, scales, rotations), axis=1)
+ elements[:] = list(map(tuple, attributes))
+ el = PlyElement.describe(elements, 'vertex')
+
+ PlyData([el]).write(path)
+
+ def load_ply(self, path, compatible=True):
+
+ from plyfile import PlyData, PlyElement
+
+ plydata = PlyData.read(path)
+
+ xyz = np.stack((np.asarray(plydata.elements[0]["x"]),
+ np.asarray(plydata.elements[0]["y"]),
+ np.asarray(plydata.elements[0]["z"])), axis=1)
+ print("Number of points at loading : ", xyz.shape[0])
+
+ opacities = np.asarray(plydata.elements[0]["opacity"])[..., np.newaxis]
+
+ shs = np.zeros((xyz.shape[0], 3))
+ shs[:, 0] = np.asarray(plydata.elements[0]["f_dc_0"])
+ shs[:, 1] = np.asarray(plydata.elements[0]["f_dc_1"])
+ shs[:, 2] = np.asarray(plydata.elements[0]["f_dc_2"])
+
+ scale_names = [p.name for p in plydata.elements[0].properties if p.name.startswith("scale_")]
+ scales = np.zeros((xyz.shape[0], len(scale_names)))
+ for idx, attr_name in enumerate(scale_names):
+ scales[:, idx] = np.asarray(plydata.elements[0][attr_name])
+
+ rot_names = [p.name for p in plydata.elements[0].properties if p.name.startswith("rot_")]
+ rots = np.zeros((xyz.shape[0], len(rot_names)))
+ for idx, attr_name in enumerate(rot_names):
+ rots[:, idx] = np.asarray(plydata.elements[0][attr_name])
+
+ gaussians = np.concatenate([xyz, opacities, scales, rots, shs], axis=1)
+ gaussians = torch.from_numpy(gaussians).float() # cpu
+
+ if compatible:
+ gaussians[..., 3:4] = torch.sigmoid(gaussians[..., 3:4])
+ gaussians[..., 4:7] = torch.exp(gaussians[..., 4:7])
+ gaussians[..., 11:] = 0.28209479177387814 * gaussians[..., 11:] + 0.5
+
+ return gaussians
\ No newline at end of file
diff --git a/nsr/gs_surfel.py b/nsr/gs_surfel.py
new file mode 100644
index 0000000000000000000000000000000000000000..2591a99c15b31d5e4201ca5991beaaeadaf4ddcd
--- /dev/null
+++ b/nsr/gs_surfel.py
@@ -0,0 +1,364 @@
+# modified from : 2dgs/gaussian_renderer/__init__.py
+import numpy as np
+
+from pdb import set_trace as st
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+# from diff_gaussian_rasterization import (
+# GaussianRasterizationSettings,
+# GaussianRasterizer,
+# )
+
+from torch.profiler import profile, record_function, ProfilerActivity
+from diff_surfel_rasterization import GaussianRasterizationSettings, GaussianRasterizer
+from utils.point_utils import depth_to_normal, depth_to_normal_2
+
+import kiui
+
+
+class GaussianRenderer2DGS:
+ def __init__(self, output_size, out_chans, rendering_kwargs, **kwargs):
+
+ # self.opt = opt
+ self.bg_color = torch.tensor([1, 1, 1], dtype=torch.float32, device="cuda")
+ # self.bg_color = torch.tensor([0,0,1], dtype=torch.float32, device="cuda")
+
+ self.output_size = output_size
+ self.out_chans = out_chans
+ self.rendering_kwargs = rendering_kwargs
+
+ # intrinsics
+ # self.tan_half_fov = np.tan(0.5 * np.deg2rad(self.opt.fovy))
+ # self.proj_matrix = torch.zeros(4, 4, dtype=torch.float32)
+ # self.proj_matrix[0, 0] = 1 / self.tan_half_fov
+ # self.proj_matrix[1, 1] = 1 / self.tan_half_fov
+ # self.proj_matrix[2, 2] = (opt.zfar + opt.znear) / (opt.zfar - opt.znear)
+ # self.proj_matrix[3, 2] = - (opt.zfar * opt.znear) / (opt.zfar - opt.znear)
+ # self.proj_matrix[2, 3] = 1
+
+ def render(self, gaussians, cam_view, cam_view_proj, cam_pos, tanfov, bg_color=None, scale_modifier=1, output_size=None):
+ # gaussians: [B, N, 14-1]
+ # cam_view, cam_view_proj: [B, V, 4, 4]
+ # cam_pos: [B, V, 3]
+
+ if output_size is None:
+ output_size = self.output_size
+
+ device = gaussians.device
+ B, V = cam_view.shape[:2]
+ assert gaussians.shape[2] == 13 # scale with 2dof
+ gaussians = gaussians.contiguous().float() # gs rendering in fp32
+
+ # loop of loop...
+ images = []
+ alphas = []
+ depths = []
+ # surf_normals = []
+ rend_normals = []
+ dists = []
+
+ if bg_color is None:
+ bg_color = self.bg_color
+
+ for b in range(B):
+
+ # pos, opacity, scale, rotation, shs
+ means3D = gaussians[b, :, 0:3].contiguous().float()
+ opacity = gaussians[b, :, 3:4].contiguous().float()
+ scales = gaussians[b, :, 4:6].contiguous().float()
+ rotations = gaussians[b, :, 6:10].contiguous().float()
+ rgbs = gaussians[b, :, 10:13].contiguous().float() # [N, 3]
+
+ for v in range(V):
+
+ # render novel views
+ view_matrix = cam_view[b, v].float() # world_view_transform
+ view_proj_matrix = cam_view_proj[b, v].float()
+ campos = cam_pos[b, v].float()
+
+ # with profile(activities=[ProfilerActivity.CUDA, ProfilerActivity.CPU,], record_shapes=True) as prof:
+
+ # with record_function("rendering"):
+
+ raster_settings = GaussianRasterizationSettings(
+ image_height=output_size,
+ image_width=output_size,
+ tanfovx=tanfov,
+ tanfovy=tanfov,
+ bg=bg_color,
+ scale_modifier=scale_modifier,
+ viewmatrix=view_matrix,
+ projmatrix=view_proj_matrix,
+ sh_degree=0,
+ campos=campos,
+ prefiltered=False,
+ debug=False,
+ )
+
+ rasterizer = GaussianRasterizer(raster_settings=raster_settings)
+
+ # Rasterize visible Gaussians to image, obtain their radii (on screen).
+ # rendered_image, radii, rendered_depth, rendered_alpha = rasterizer(
+ rendered_image, radii, allmap = rasterizer(
+ means3D=means3D,
+ means2D=torch.zeros_like(means3D, dtype=torch.float32, device=device),
+ shs=None,
+ colors_precomp=rgbs,
+ opacities=opacity,
+ scales=scales,
+ rotations=rotations,
+ cov3D_precomp=None,
+ # cov3D_precomp = cov3D_precomp
+ )
+
+ # print(prof.key_averages().table(sort_by="cuda_time_total", row_limit=20))
+
+ # with profile(activities=[ProfilerActivity.CUDA, ProfilerActivity.CPU,], record_shapes=True) as prof:
+
+ # ! additional regularizations
+ render_alpha = allmap[1:2]
+
+ # get normal map
+ # transform normal from view space to world space
+ # with record_function("render_normal"):
+ render_normal = allmap[2:5]
+ # render_normal = (render_normal.permute(1,2,0) @ (viewpoint_camera.world_view_transform[:3,:3].T)).permute(2,0,1)
+ render_normal = (render_normal.permute(1,2,0) @ (view_matrix[:3,:3].T)).permute(2,0,1)
+
+ # with record_function("render_depth"):
+
+ # get median depth map
+ render_depth_median = allmap[5:6]
+ render_depth_median = torch.nan_to_num(render_depth_median, 0, 0)
+
+ # get expected depth map
+ render_depth_expected = allmap[0:1]
+ render_depth_expected = (render_depth_expected / render_alpha)
+ render_depth_expected = torch.nan_to_num(render_depth_expected, 0, 0)
+
+ # get depth distortion map
+ render_dist = allmap[6:7]
+
+ # psedo surface attributes
+ # surf depth is either median or expected by setting depth_ratio to 1 or 0
+ # for bounded scene, use median depth, i.e., depth_ratio = 1;
+ # for unbounded scene, use expected depth, i.e., depth_ration = 0, to reduce disk anliasing.
+
+ # ! hard coded depth_ratio = 1 for objaverse
+ surf_depth = render_depth_median
+ # with record_function("surf_normal"):
+ # depth_ratio = 1
+ # # surf_depth = render_depth_expected * (1-depth_ratio) + (depth_ratio) * render_depth_median
+
+ # # assume the depth points form the 'surface' and generate psudo surface normal for regularizations.
+ # # surf_normal = depth_to_normal(viewpoint_camera, surf_depth)
+ # surf_normal = depth_to_normal_2(world_view_transform=view_matrix, tanfov=tanfov, W=self.output_size, H=self.output_size, depth=surf_depth)
+ # surf_normal = surf_normal.permute(2,0,1)
+ # # remember to multiply with accum_alpha since render_normal is unnormalized.
+ # surf_normal = surf_normal * (render_alpha).detach()
+
+ # ! images
+ rendered_image = rendered_image.clamp(0, 1)
+
+ # images.append(rendered_image)
+ # alphas.append(rendered_alpha)
+ # depths.append(rendered_depth)
+
+ images.append(rendered_image)
+ alphas.append(render_alpha)
+ depths.append(surf_depth)
+ # surf_normals.append(surf_normal)
+ rend_normals.append(render_normal)
+ dists.append(render_dist)
+
+ # print(prof.key_averages().table(sort_by="cuda_time_total", row_limit=20))
+ # st()
+ pass
+
+ images = torch.stack(images, dim=0).view(B, V, 3, output_size, output_size)
+ alphas = torch.stack(alphas, dim=0).view(B, V, 1, output_size, output_size)
+ depths = torch.stack(depths, dim=0).view(B, V, 1, output_size, output_size)
+
+ # approximated surface normal? No, direct depth supervision here.
+ # surf_normals = torch.stack(surf_normals, dim=0).view(B, V, 3, self.output_size, self.output_size)
+
+ # disk normal
+ rend_normals = torch.stack(rend_normals, dim=0).view(B, V, 3, output_size, output_size)
+ dists = torch.stack(dists, dim=0).view(B, V, 1, output_size, output_size)
+
+ # images = torch.stack(images, dim=0).view(B*V, 3, self.output_size, self.output_size)
+ # alphas = torch.stack(alphas, dim=0).view(B*V, 1, self.output_size, self.output_size)
+ # depths = torch.stack(depths, dim=0).view(B*V, 1, self.output_size, self.output_size)
+
+ return {
+ "image": images, # [B, V, 3, H, W]
+ "alpha": alphas, # [B, V, 1, H, W]
+ "depth": depths,
+ # "surf_normal": surf_normals,
+ "rend_normal": rend_normals,
+ "dist": dists
+ }
+
+ # TODO, save/load 2dgs Gaussians
+
+ def save_2dgs_ply(self, path, gaussians, compatible=True):
+ # gaussians: [B, N, 13]
+
+ mkdir_p(os.path.dirname(path))
+ assert gaussians.shape[0] == 1, 'only support batch size 1'
+
+ from plyfile import PlyData, PlyElement
+
+ means3D = gaussians[0, :, 0:3].contiguous().float()
+ opacity = gaussians[0, :, 3:4].contiguous().float()
+ scales = gaussians[0, :, 4:6].contiguous().float()
+ rotations = gaussians[0, :, 6:10].contiguous().float()
+ shs = gaussians[0, :, 10:].unsqueeze(1).contiguous().float() # [N, 1, 3]
+
+ # invert activation to make it compatible with the original ply format
+ if compatible:
+ opacity = kiui.op.inverse_sigmoid(opacity)
+ scales = torch.log(scales + 1e-8)
+ shs = (shs - 0.5) / 0.28209479177387814
+
+ xyzs = means3D.detach().cpu().numpy()
+ f_dc = shs.detach().transpose(1, 2).flatten(start_dim=1).contiguous().cpu().numpy()
+ opacities = opacity.detach().cpu().numpy()
+ scales = scales.detach().cpu().numpy()
+ rotations = rotations.detach().cpu().numpy()
+
+ # xyz = self._xyz.detach().cpu().numpy()
+ # normals = np.zeros_like(xyz)
+ # f_dc = self._features_dc.detach().transpose(1, 2).flatten(start_dim=1).contiguous().cpu().numpy()
+ # f_rest = self._features_rest.detach().transpose(1, 2).flatten(start_dim=1).contiguous().cpu().numpy()
+ # opacities = self._opacity.detach().cpu().numpy()
+ # scale = self._scaling.detach().cpu().numpy()
+ # rotation = self._rotation.detach().cpu().numpy()
+
+ # dtype_full = [(attribute, 'f4') for attribute in self.construct_list_of_attributes()]
+
+ l = ['x', 'y', 'z']
+ # All channels except the 3 DC
+ for i in range(f_dc.shape[1]):
+ l.append('f_dc_{}'.format(i))
+
+ # save normals also
+ for i in range(f_dc.shape[1]):
+ l.append('f_dc_{}'.format(i))
+
+ l.append('opacity')
+ for i in range(scales.shape[1]):
+ l.append('scale_{}'.format(i))
+ for i in range(rotations.shape[1]):
+ l.append('rot_{}'.format(i))
+
+ dtype_full = [(attribute, 'f4') for attribute in l]
+
+ elements = np.empty(xyz.shape[0], dtype=dtype_full)
+ # attributes = np.concatenate((xyzs, f_dc, opacities, scales, rotations), axis=1)
+ # attributes = np.concatenate((xyz, normals, f_dc, f_rest, opacities, scale, rotation), axis=1)
+ attributes = np.concatenate((xyz, normals, f_dc, opacities, scale, rotation), axis=1)
+ elements[:] = list(map(tuple, attributes))
+ el = PlyElement.describe(elements, 'vertex')
+ PlyData([el]).write(path)
+
+
+
+ # def save_ply(self, gaussians, path, compatible=True):
+ # # gaussians: [B, N, 14]
+ # # compatible: save pre-activated gaussians as in the original paper
+
+ # assert gaussians.shape[0] == 1, 'only support batch size 1'
+
+ # from plyfile import PlyData, PlyElement
+
+ # means3D = gaussians[0, :, 0:3].contiguous().float()
+ # opacity = gaussians[0, :, 3:4].contiguous().float()
+ # scales = gaussians[0, :, 4:7].contiguous().float()
+ # rotations = gaussians[0, :, 7:11].contiguous().float()
+ # shs = gaussians[0, :, 11:].unsqueeze(1).contiguous().float() # [N, 1, 3]
+
+ # # prune by opacity
+ # mask = opacity.squeeze(-1) >= 0.005
+ # means3D = means3D[mask]
+ # opacity = opacity[mask]
+ # scales = scales[mask]
+ # rotations = rotations[mask]
+ # shs = shs[mask]
+
+ # # invert activation to make it compatible with the original ply format
+ # if compatible:
+ # opacity = kiui.op.inverse_sigmoid(opacity)
+ # scales = torch.log(scales + 1e-8)
+ # shs = (shs - 0.5) / 0.28209479177387814
+
+ # xyzs = means3D.detach().cpu().numpy()
+ # f_dc = shs.detach().transpose(1, 2).flatten(start_dim=1).contiguous().cpu().numpy()
+ # opacities = opacity.detach().cpu().numpy()
+ # scales = scales.detach().cpu().numpy()
+ # rotations = rotations.detach().cpu().numpy()
+
+ # l = ['x', 'y', 'z']
+ # # All channels except the 3 DC
+ # for i in range(f_dc.shape[1]):
+ # l.append('f_dc_{}'.format(i))
+ # l.append('opacity')
+ # for i in range(scales.shape[1]):
+ # l.append('scale_{}'.format(i))
+ # for i in range(rotations.shape[1]):
+ # l.append('rot_{}'.format(i))
+
+ # dtype_full = [(attribute, 'f4') for attribute in l]
+
+ # elements = np.empty(xyzs.shape[0], dtype=dtype_full)
+ # attributes = np.concatenate((xyzs, f_dc, opacities, scales, rotations), axis=1)
+ # elements[:] = list(map(tuple, attributes))
+ # el = PlyElement.describe(elements, 'vertex')
+
+ # PlyData([el]).write(path)
+
+ def load_2dgs_ply(self, path, compatible=True):
+
+ from plyfile import PlyData, PlyElement
+
+ plydata = PlyData.read(path)
+
+ xyz = np.stack((np.asarray(plydata.elements[0]["x"]),
+ np.asarray(plydata.elements[0]["y"]),
+ np.asarray(plydata.elements[0]["z"])), axis=1)
+ print("Number of points at loading : ", xyz.shape[0])
+
+ opacities = np.asarray(plydata.elements[0]["opacity"])[..., np.newaxis]
+
+ shs = np.zeros((xyz.shape[0], 3))
+ shs[:, 0] = np.asarray(plydata.elements[0]["f_dc_0"])
+ shs[:, 1] = np.asarray(plydata.elements[0]["f_dc_1"])
+ shs[:, 2] = np.asarray(plydata.elements[0]["f_dc_2"])
+
+ scale_names = [p.name for p in plydata.elements[0].properties if p.name.startswith("scale_")]
+ scales = np.zeros((xyz.shape[0], len(scale_names)))
+ for idx, attr_name in enumerate(scale_names):
+ scales[:, idx] = np.asarray(plydata.elements[0][attr_name])
+
+ rot_names = [p.name for p in plydata.elements[0].properties if p.name.startswith("rot_")]
+ rots = np.zeros((xyz.shape[0], len(rot_names)))
+ for idx, attr_name in enumerate(rot_names):
+ rots[:, idx] = np.asarray(plydata.elements[0][attr_name])
+
+
+ normal_names = [p.name for p in plydata.elements[0].properties if p.name.startswith("rot_")]
+ rots = np.zeros((xyz.shape[0], len(rot_names)))
+ for idx, attr_name in enumerate(rot_names):
+ rots[:, idx] = np.asarray(plydata.elements[0][attr_name])
+
+ gaussians = np.concatenate([xyz, opacities, scales, rots, shs], axis=1)
+ gaussians = torch.from_numpy(gaussians).float() # cpu
+
+ if compatible:
+ gaussians[..., 3:4] = torch.sigmoid(gaussians[..., 3:4])
+ gaussians[..., 4:7] = torch.exp(gaussians[..., 4:7])
+ gaussians[..., 11:] = 0.28209479177387814 * gaussians[..., 11:] + 0.5
+
+ return gaussians
\ No newline at end of file
diff --git a/nsr/losses/__init__.py b/nsr/losses/__init__.py
new file mode 100755
index 0000000000000000000000000000000000000000..1447a4305c77bfbe3d50a0d27bb9d2d893ec7621
--- /dev/null
+++ b/nsr/losses/__init__.py
@@ -0,0 +1,10 @@
+# 2d reconstruction losses
+from .id_loss import IDLoss
+# from .lms import HeatmapLoss # for faces
+# from .lpips_deprecated.lpips import LPIPS
+
+# manage import
+__all__ = [
+ # 'LPIPS',
+ 'IDLoss',
+]
diff --git a/nsr/losses/__pycache__/__init__.cpython-39.pyc b/nsr/losses/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3f11a0151d6fdd02b14c820aaacbd483ab85fa14
Binary files /dev/null and b/nsr/losses/__pycache__/__init__.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/builder.cpython-39.pyc b/nsr/losses/__pycache__/builder.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a5e7adb9cc7097c6a932991d617a7b8ed47f729b
Binary files /dev/null and b/nsr/losses/__pycache__/builder.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/helpers.cpython-39.pyc b/nsr/losses/__pycache__/helpers.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..32ae81bd8e067fc571c072a97f44d4ca8914f394
Binary files /dev/null and b/nsr/losses/__pycache__/helpers.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/id_loss.cpython-39.pyc b/nsr/losses/__pycache__/id_loss.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a74c2872bf4df5dffdc148190a8a7d36a6d78886
Binary files /dev/null and b/nsr/losses/__pycache__/id_loss.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/model_irse.cpython-39.pyc b/nsr/losses/__pycache__/model_irse.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4e36976164241d573cb616d29d890b96423d540e
Binary files /dev/null and b/nsr/losses/__pycache__/model_irse.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/paths_config.cpython-39.pyc b/nsr/losses/__pycache__/paths_config.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6af4b03c1c41e8146c83f09c6d0305d9d09777d7
Binary files /dev/null and b/nsr/losses/__pycache__/paths_config.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/sdfstudio_losses.cpython-39.pyc b/nsr/losses/__pycache__/sdfstudio_losses.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..adb9f32b9f214892ac015898da401cd8c8188b6e
Binary files /dev/null and b/nsr/losses/__pycache__/sdfstudio_losses.cpython-39.pyc differ
diff --git a/nsr/losses/__pycache__/vqperceptual.cpython-39.pyc b/nsr/losses/__pycache__/vqperceptual.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2f597ed846a4a313efb3b8f15209b678fba2e562
Binary files /dev/null and b/nsr/losses/__pycache__/vqperceptual.cpython-39.pyc differ
diff --git a/nsr/losses/builder.py b/nsr/losses/builder.py
new file mode 100644
index 0000000000000000000000000000000000000000..b386e6f237857119f7149ea36357e577a19ab114
--- /dev/null
+++ b/nsr/losses/builder.py
@@ -0,0 +1,1096 @@
+EPS = 1e-7
+
+import kornia
+from typing import Dict, Iterator, List, Optional, Tuple, Union
+import torchvision
+from guided_diffusion import dist_util, logger
+from pdb import set_trace as st
+from torch.nn import functional as F
+import numpy as np
+import torch
+import torch.nn as nn
+import lpips
+
+from . import *
+
+from .sdfstudio_losses import ScaleAndShiftInvariantLoss
+from ldm.util import default, instantiate_from_config
+from .vqperceptual import hinge_d_loss, vanilla_d_loss
+from torch.autograd import Variable
+
+from math import exp
+
+def gaussian(window_size, sigma):
+ gauss = torch.Tensor([exp(-(x - window_size // 2) ** 2 / float(2 * sigma ** 2)) for x in range(window_size)])
+ return gauss / gauss.sum()
+
+
+def create_window(window_size, channel):
+ _1D_window = gaussian(window_size, 1.5).unsqueeze(1)
+ _2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0)
+ window = Variable(_2D_window.expand(channel, 1, window_size, window_size).contiguous())
+ return window
+
+
+def _ssim(img1, img2, window, window_size, channel, size_average=True):
+ mu1 = F.conv2d(img1, window, padding=window_size // 2, groups=channel)
+ mu2 = F.conv2d(img2, window, padding=window_size // 2, groups=channel)
+
+ mu1_sq = mu1.pow(2)
+ mu2_sq = mu2.pow(2)
+ mu1_mu2 = mu1 * mu2
+
+ sigma1_sq = F.conv2d(img1 * img1, window, padding=window_size // 2, groups=channel) - mu1_sq
+ sigma2_sq = F.conv2d(img2 * img2, window, padding=window_size // 2, groups=channel) - mu2_sq
+ sigma12 = F.conv2d(img1 * img2, window, padding=window_size // 2, groups=channel) - mu1_mu2
+
+ C1 = 0.01 ** 2
+ C2 = 0.03 ** 2
+
+ ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2))
+
+ if size_average:
+ return ssim_map.mean()
+ else:
+ return ssim_map.mean(1).mean(1).mean(1)
+
+
+
+def weights_init(m):
+ classname = m.__class__.__name__
+ if classname.find("Conv") != -1:
+ nn.init.normal_(m.weight.data, 0.0, 0.02)
+ elif classname.find("BatchNorm") != -1:
+ nn.init.normal_(m.weight.data, 1.0, 0.02)
+ nn.init.constant_(m.bias.data, 0)
+
+
+# Main loss function used for ZoeDepth. Copy/paste from AdaBins repo (https://github.com/shariqfarooq123/AdaBins/blob/0952d91e9e762be310bb4cd055cbfe2448c0ce20/loss.py#L7)
+def extract_key(prediction, key):
+ if isinstance(prediction, dict):
+ return prediction[key]
+ return prediction
+
+
+class SILogLoss(nn.Module):
+ """SILog loss (pixel-wise)"""
+
+ def __init__(self, beta=0.15):
+ super(SILogLoss, self).__init__()
+ self.name = 'SILog'
+ self.beta = beta
+
+ def forward(self,
+ input,
+ target,
+ mask=None,
+ interpolate=True,
+ return_interpolated=False):
+ # input = extract_key(input, KEY_OUTPUT)
+ if input.shape[-1] != target.shape[-1] and interpolate:
+ input = nn.functional.interpolate(input,
+ target.shape[-2:],
+ mode='bilinear',
+ align_corners=True)
+ intr_input = input
+ else:
+ intr_input = input
+
+ if target.ndim == 3:
+ target = target.unsqueeze(1)
+
+ if mask is not None:
+ if mask.ndim == 3:
+ mask = mask.unsqueeze(1)
+
+ input = input[mask]
+ target = target[mask]
+
+ # with torch.amp.autocast(enabled=False): # amp causes NaNs in this loss function
+
+ alpha = 1e-7
+ g = torch.log(input + alpha) - torch.log(target + alpha)
+
+ # n, c, h, w = g.shape
+ # norm = 1/(h*w)
+ # Dg = norm * torch.sum(g**2) - (0.85/(norm**2)) * (torch.sum(g))**2
+
+ Dg = torch.var(g) + self.beta * torch.pow(torch.mean(g), 2)
+
+ loss = 10 * torch.sqrt(Dg)
+
+ if torch.isnan(loss):
+ print("Nan SILog loss")
+ print("input:", input.shape)
+ print("target:", target.shape)
+ print("G", torch.sum(torch.isnan(g)))
+ print("Input min max", torch.min(input), torch.max(input))
+ print("Target min max", torch.min(target), torch.max(target))
+ print("Dg", torch.isnan(Dg))
+ print("loss", torch.isnan(loss))
+
+ if not return_interpolated:
+ return loss
+
+ return loss, intr_input
+
+
+def get_outnorm(x: torch.Tensor, out_norm: str = '') -> torch.Tensor:
+ """ Common function to get a loss normalization value. Can
+ normalize by either the batch size ('b'), the number of
+ channels ('c'), the image size ('i') or combinations
+ ('bi', 'bci', etc)
+ """
+ # b, c, h, w = x.size()
+ img_shape = x.shape
+
+ if not out_norm:
+ return 1
+
+ norm = 1
+ if 'b' in out_norm:
+ # normalize by batch size
+ # norm /= b
+ norm /= img_shape[0]
+ if 'c' in out_norm:
+ # normalize by the number of channels
+ # norm /= c
+ norm /= img_shape[-3]
+ if 'i' in out_norm:
+ # normalize by image/map size
+ # norm /= h*w
+ norm /= img_shape[-1] * img_shape[-2]
+
+ return norm
+
+
+class CharbonnierLoss(torch.nn.Module):
+ """Charbonnier Loss (L1)"""
+
+ def __init__(self, eps=1e-6, out_norm: str = 'bci'):
+ super(CharbonnierLoss, self).__init__()
+ self.eps = eps
+ self.out_norm = out_norm
+
+ def forward(self, x, y):
+ norm = get_outnorm(x, self.out_norm)
+ loss = torch.sum(torch.sqrt((x - y).pow(2) + self.eps**2))
+ return loss * norm
+
+
+def feature_vae_loss(feature):
+ # kld = torch.mean(-0.5 * torch.sum(1 + log_var - mu ** 2 - log_var.exp(), dim = 1), dim = 0)
+
+ # feature dim: B C H W
+ mu = feature.mean(1)
+ var = feature.var(1)
+ log_var = torch.log(var)
+ kld = torch.mean(-0.5 * torch.sum(1 + log_var - mu**2 - var, dim=1), dim=0)
+ return kld
+
+
+def kl_coeff(step, total_step, constant_step, min_kl_coeff, max_kl_coeff):
+ # return max(min(max_kl_coeff * (step - constant_step) / total_step, max_kl_coeff), min_kl_coeff)
+ kl_lambda = max(
+ min(
+ min_kl_coeff + (max_kl_coeff - min_kl_coeff) *
+ (step - constant_step) / total_step, max_kl_coeff), min_kl_coeff)
+ return torch.tensor(kl_lambda, device=dist_util.dev())
+
+
+def depth_smoothness_loss(alpha_pred, depth_pred):
+ # from PesonNeRF paper.
+ # all Tensor shape B 1 H W
+ geom_loss = (
+ alpha_pred[..., :-1] * alpha_pred[..., 1:] * (
+ depth_pred[..., :-1] - depth_pred[..., 1:] # W dim
+ ).square()).mean() # mean of ([8, 1, 64, 63])
+
+ geom_loss += (alpha_pred[..., :-1, :] * alpha_pred[..., 1:, :] *
+ (depth_pred[..., :-1, :] - depth_pred[..., 1:, :]).square()
+ ).mean() # H dim, ([8, 1, 63, 64])
+
+ return geom_loss
+
+
+# https://github.com/elliottwu/unsup3d/blob/master/unsup3d/networks.py#L140
+class LPIPSLoss(torch.nn.Module):
+
+ def __init__(
+ self,
+ loss_weight=1.0,
+ use_input_norm=True,
+ range_norm=True,
+ # n1p1_input=True,
+ ):
+ super(LPIPSLoss, self).__init__()
+ # self.perceptual = lpips.LPIPS(net="alex", spatial=False).eval()
+ self.perceptual = lpips.LPIPS(net="vgg", spatial=False).eval()
+ self.loss_weight = loss_weight
+ self.use_input_norm = use_input_norm
+ self.range_norm = range_norm
+
+ # if self.use_input_norm:
+ # # the mean is for image with range [0, 1]
+ # self.register_buffer(
+ # 'mean',
+ # torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1))
+ # # the std is for image with range [0, 1]
+ # self.register_buffer(
+ # 'std',
+ # torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1))
+
+ def forward(self, pred, target, conf_sigma_percl=None):
+ # st()
+ # ! add large image support, only sup 128x128 patch
+ lpips_loss = self.perceptual(target.contiguous(), pred.contiguous())
+ return self.loss_weight * lpips_loss.mean()
+
+
+# mask-aware perceptual loss
+class PerceptualLoss(nn.Module):
+
+ def __init__(self, requires_grad=False):
+ super(PerceptualLoss, self).__init__()
+ mean_rgb = torch.FloatTensor([0.485, 0.456, 0.406])
+ std_rgb = torch.FloatTensor([0.229, 0.224, 0.225])
+ self.register_buffer('mean_rgb', mean_rgb)
+ self.register_buffer('std_rgb', std_rgb)
+
+ vgg_pretrained_features = torchvision.models.vgg16(
+ pretrained=True).features
+ self.slice1 = nn.Sequential()
+ self.slice2 = nn.Sequential()
+ self.slice3 = nn.Sequential()
+ self.slice4 = nn.Sequential()
+ for x in range(4):
+ self.slice1.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(4, 9):
+ self.slice2.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(9, 16):
+ self.slice3.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(16, 23):
+ self.slice4.add_module(str(x), vgg_pretrained_features[x])
+ if not requires_grad:
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def normalize(self, x):
+ out = x / 2 + 0.5
+ out = (out - self.mean_rgb.view(1, 3, 1, 1)) / self.std_rgb.view(
+ 1, 3, 1, 1)
+ return out
+
+ def __call__(self, im1, im2, mask=None, conf_sigma=None):
+ im = torch.cat([im1, im2], 0)
+ im = self.normalize(im) # normalize input
+
+ ## compute features
+ feats = []
+ f = self.slice1(im)
+ feats += [torch.chunk(f, 2, dim=0)]
+ f = self.slice2(f)
+ feats += [torch.chunk(f, 2, dim=0)]
+ f = self.slice3(f)
+ feats += [torch.chunk(f, 2, dim=0)]
+ f = self.slice4(f)
+ feats += [torch.chunk(f, 2, dim=0)]
+
+ losses = []
+ for f1, f2 in feats[2:3]: # use relu3_3 features only
+ loss = (f1 - f2)**2
+ if conf_sigma is not None:
+ loss = loss / (2 * conf_sigma**2 + EPS) + (conf_sigma +
+ EPS).log()
+ if mask is not None:
+ b, c, h, w = loss.shape
+ _, _, hm, wm = mask.shape
+ sh, sw = hm // h, wm // w
+ mask0 = nn.functional.avg_pool2d(mask,
+ kernel_size=(sh, sw),
+ stride=(sh,
+ sw)).expand_as(loss)
+ loss = (loss * mask0).sum() / mask0.sum()
+ else:
+ loss = loss.mean()
+ losses += [loss]
+ return sum(losses)
+
+
+# add confidence support, unsup3d version
+def photometric_loss_laplace(im1, im2, mask=None, conf_sigma=None):
+ loss = (im1 - im2).abs()
+ # loss = (im1 - im2).square()
+ if conf_sigma is not None:
+ loss = loss * 2**0.5 / (conf_sigma + EPS) + (conf_sigma + EPS).log()
+
+ if mask is not None:
+ mask = mask.expand_as(loss)
+ loss = (loss * mask).sum() / mask.sum()
+
+ else:
+ loss = loss.mean()
+
+ return loss
+
+
+# gaussian likelihood version, What Uncertainties Do We Need in Bayesian Deep Learning for Computer Vision?
+# also used in the mask-aware vgg loss
+def photometric_loss(im1, im2, mask=None, conf_sigma=None):
+ # loss = torch.nn.functional.mse_loss(im1, im2, reduce='none')
+ loss = (im1 - im2).square()
+
+ if conf_sigma is not None:
+ loss = loss / (2 * conf_sigma**2 + EPS) + (conf_sigma + EPS).log()
+
+ if mask is not None:
+ mask = mask.expand_as(loss)
+ loss = (loss * mask).sum() / mask.sum()
+
+ else:
+ loss = loss.mean()
+
+ return loss
+
+
+class E3DGELossClass(torch.nn.Module):
+
+ def __init__(self, device, opt) -> None:
+ super().__init__()
+
+ self.opt = opt
+ self.device = device
+ self.criterionImg = {
+ 'mse': torch.nn.MSELoss(),
+ 'l1': torch.nn.L1Loss(),
+ 'charbonnier': CharbonnierLoss(),
+ }[opt.color_criterion]
+
+ self.criterion_latent = {
+ 'mse': torch.nn.MSELoss(),
+ 'l1': torch.nn.L1Loss(),
+ 'vae': feature_vae_loss
+ }[opt.latent_criterion]
+
+ # self.criterionLPIPS = LPIPS(net_type='alex', device=device).eval()
+ if opt.lpips_lambda > 0:
+ self.criterionLPIPS = LPIPSLoss(loss_weight=opt.lpips_lambda)
+ # self.criterionLPIPS = torch.nn.MSELoss()
+
+ if opt.id_lambda > 0:
+ self.criterionID = IDLoss(device=device).eval()
+ self.id_loss_pool = torch.nn.AdaptiveAvgPool2d((256, 256))
+
+ # define 3d rec loss, for occupancy
+ # self.criterion3d_rec = torch.nn.SmoothL1Loss(reduction='none')
+ # self.criterion_alpha = torch.nn.SmoothL1Loss()
+
+ # self.criterion3d_rec = torch.nn.MSELoss(reduction='none')
+ self.criterion_alpha = torch.nn.L1Loss()
+
+ if self.opt.xyz_lambda > 0:
+ # self.criterion_xyz = torch.nn.SmoothL1Loss()
+ self.criterion_xyz = torch.nn.L1Loss() # follow LION, but noisy xyz here...
+
+ if self.opt.depth_lambda > 0:
+ # ! this depth loss not converging, no idea why
+ self.criterion3d_rec = ScaleAndShiftInvariantLoss(alpha=0.5,
+ scales=1)
+ else:
+ self.criterion3d_rec = torch.nn.SmoothL1Loss(reduction='none')
+
+ # self.silog_loss = SILogLoss()
+
+ if self.opt.lambda_opa_reg > 0:
+ # self.beta_mvp_dist = torch.distributions.beta.Beta(torch.tensor(0.5, device=device), torch.tensor(0.5, device=device))
+ # self.beta_mvp_base_dist = torch.distributions.beta.Beta(torch.tensor(10, device=device), torch.tensor(0.5, device=device)) # force close to 1 for base
+ # self.beta_mvp_base_dist = torch.distributions.beta.Beta(torch.tensor(0.6, device=device), torch.tensor(0.2, device=device)) # force close to 1 for base
+ self.beta_mvp_base_dist = torch.distributions.beta.Beta(torch.tensor(0.5, device=device), torch.tensor(0.25, device=device)) # force close to 1 for base
+
+ logger.log('init loss class finished', )
+
+ def calc_scale_invariant_depth_loss(self, pred_depth: torch.Tensor,
+ gt_depth: torch.Tensor,
+ gt_depth_mask: torch.Tensor):
+ """apply 3d shape reconstruction supervision. Basically supervise the depth with L1 loss
+ """
+
+ shape_loss_dict = {}
+ assert gt_depth_mask is not None
+ shape_loss = self.criterion3d_rec(pred_depth, gt_depth, gt_depth_mask)
+
+ # if shape_loss > 0.2: # hinge loss, avoid ood gradient
+ # shape_loss = torch.zeros_like(shape_loss)
+ # else:
+ shape_loss = shape_loss.clamp(0.04) # g-buffer depth is very noisy
+ shape_loss *= self.opt.depth_lambda
+
+ shape_loss_dict['loss_depth'] = shape_loss
+ # shape_loss_dict['depth_fgratio'] = gt_depth_mask.mean()
+
+ # return l_si, shape_loss_dict
+ return shape_loss, shape_loss_dict
+
+ def calc_depth_loss(self, pred_depth: torch.Tensor, gt_depth: torch.Tensor,
+ gt_depth_mask: torch.Tensor):
+ """apply 3d shape reconstruction supervision. Basically supervise the depth with L1 loss
+ """
+
+ shape_loss_dict = {}
+ shape_loss = self.criterion3d_rec(pred_depth, gt_depth)
+ assert gt_depth_mask is not None
+
+ shape_loss *= gt_depth_mask
+ shape_loss = shape_loss.sum() / gt_depth_mask.sum()
+
+ # else:
+ # shape_loss /= pred_depth.numel()
+ # l_si = self.silog_loss(pred_depth, gt_depth, mask=None, interpolate=True, return_interpolated=False)
+
+ # l_si *= self.opt.depth_lambda
+ # shape_loss_dict['loss_depth'] = l_si
+ shape_loss_dict['loss_depth'] = shape_loss.clamp(
+ min=0, max=0.1) * self.opt.depth_lambda
+
+ # shape_loss_dict['loss_depth'] = shape_loss.clamp(
+ # min=0, max=0.5) * self.opt.depth_lambda
+
+ # return l_si, shape_loss_dict
+ return shape_loss, shape_loss_dict
+
+ @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def calc_alpha_loss(self, pred_alpha, gt_depth_mask):
+ # return self.criterionImg(alpha, gt_depth_mask.float())
+
+ if gt_depth_mask.ndim == 3:
+ gt_depth_mask = gt_depth_mask.unsqueeze(1)
+
+ if gt_depth_mask.shape[1] == 3:
+ gt_depth_mask = gt_depth_mask[:, 0:1, ...] # B 1 H W
+
+ assert pred_alpha.shape == gt_depth_mask.shape
+
+ alpha_loss = self.criterion_alpha(pred_alpha, gt_depth_mask)
+ # st()
+
+ return alpha_loss
+
+ @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def calc_mask_mse_loss(
+ self,
+ input,
+ gt,
+ gt_depth_mask,
+ # conf_sigma=None,
+ conf_sigma_l1=None,
+ # conf_sigma_percl=None,
+ use_fg_ratio=False):
+ if gt_depth_mask.ndim == 3:
+ gt_depth_mask = gt_depth_mask.unsqueeze(1).repeat_interleave(3, 1)
+ else:
+ assert gt_depth_mask.shape == input.shape
+ gt_depth_mask = gt_depth_mask.float()
+
+ if conf_sigma_l1 is None:
+ rec_loss = torch.nn.functional.mse_loss(
+ input.float(), gt.float(),
+ reduction='none') # 'sum' already divide by batch size n
+ else:
+ rec_loss = photometric_loss(
+ input, gt, gt_depth_mask, conf_sigma_l1
+ ) # ! only cauclate laplace on the foreground, or bg confidence low, large gradient.
+ return rec_loss
+ # rec_loss = torch.nn.functional.l1_loss( # for laplace loss
+ # input.float(), gt.float(),
+ # reduction='none') # 'sum' already divide by batch size n
+ # gt_depth_mask = torch.ones_like(gt_depth_mask) # ! DEBUGGING
+
+ # if conf_sigma is not None: # from unsup3d, but a L2 version
+ # rec_loss = rec_loss * 2**0.5 / (conf_sigma + EPS) + (conf_sigma +
+ # EPS).log()
+ # return rec_loss.mean()
+ # rec_loss = torch.exp(-(rec_loss * 2**0.5 / (conf_sigma + EPS))) * 1/(conf_sigma +
+ # EPS) / (2**0.5)
+
+ fg_size = gt_depth_mask.sum()
+ # fg_ratio = fg_size / torch.ones_like(gt_depth_mask).sum() if use_fg_ratio else 1
+ fg_loss = rec_loss * gt_depth_mask
+ fg_loss = fg_loss.sum() / fg_size # * fg_ratio
+
+ if self.opt.bg_lamdba > 0:
+ bg_loss = rec_loss * (1 - gt_depth_mask)
+ bg_loss = bg_loss.sum() / (1 - gt_depth_mask).sum()
+ rec_loss = fg_loss + bg_loss * self.opt.bg_lamdba
+ else:
+ rec_loss = fg_loss
+
+ return rec_loss
+
+ @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def calc_2d_rec_loss(
+ self,
+ input,
+ gt,
+ depth_fg_mask,
+ test_mode=True,
+ step=1,
+ ignore_lpips=False,
+ # conf_sigma=None,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None,
+ pred_alpha=None,
+ ):
+ opt = self.opt
+ loss_dict = {}
+
+ # logger.log(test_mode)
+ # logger.log(input.min(), input.max(), gt.min(), gt.max())
+ if test_mode or not opt.fg_mse:
+ rec_loss = self.criterionImg(input, gt)
+ else:
+ rec_loss = self.calc_mask_mse_loss(
+ input,
+ gt,
+ depth_fg_mask,
+ conf_sigma_l1=conf_sigma_l1,
+ )
+ # conf_sigma_percl=conf_sigma_percl)
+ # conf_sigma)
+
+ # if step == 300:
+ # st()
+
+ if opt.lpips_lambda > 0 and step >= opt.lpips_delay_iter and not ignore_lpips: # tricky solution to avoid NAN in LPIPS loss
+
+ # with torch.autocast(device_type='cuda',
+ # dtype=torch.float16,
+ # enabled=False):
+ # if test_mode or not opt.fg_mse: # no need to calculate background lpips for ease of computation
+ # inp_for_lpips = input * pred_alpha + torch.ones_like(input) * (1-pred_alpha)
+ # gt_for_lpips = gt * depth_fg_mask + torch.ones_like(gt) * (1-depth_fg_mask)
+
+ inp_for_lpips = input * pred_alpha
+ gt_for_lpips = gt * depth_fg_mask
+
+ width = input.shape[-1]
+ if width == 192: # triplane here
+ lpips_loss = self.criterionLPIPS( # loss on 128x128 center crop
+ inp_for_lpips[:, :, width//2-64:width//2+64, width//2-64:width//2+64],
+ gt_for_lpips[:, :, width//2-64:width//2+64, width//2-64:width//2+64],
+ conf_sigma_percl=conf_sigma_percl,
+ )
+ elif width >256:
+ # elif width >192:
+ # lpips_loss = self.criterionLPIPS(
+ # F.interpolate(inp_for_lpips, (256,256), mode='bilinear'),
+ # F.interpolate(gt_for_lpips, (256,256), mode='bilinear'),
+ # conf_sigma_percl=conf_sigma_percl,
+ # )
+
+ # patch = 80
+
+ # patch = 128
+ patch = 144
+ middle_point = width // 2
+ lpips_loss = self.criterionLPIPS( # loss on 128x128 center crop
+ inp_for_lpips[:, :, middle_point-patch:middle_point+patch, middle_point-patch:middle_point+patch],
+ gt_for_lpips[:, :, middle_point-patch:middle_point+patch, middle_point-patch:middle_point+patch],
+ conf_sigma_percl=conf_sigma_percl,
+ )
+
+ else: # directly supervise when <= 256
+ # ! add foreground mask
+ assert pred_alpha is not None
+ lpips_loss = self.criterionLPIPS(
+ inp_for_lpips,
+ gt_for_lpips,
+ # conf_sigma_percl=conf_sigma_percl,
+ )
+ # else: # fg lpips
+ # assert depth_fg_mask.shape == input.shape
+ # lpips_loss = self.criterionLPIPS(
+ # input.contiguous() * depth_fg_mask,
+ # gt.contiguous() * depth_fg_mask).mean()
+ else:
+ lpips_loss = torch.tensor(0., device=input.device)
+
+ if opt.ssim_lambda > 0:
+ loss_ssim = self.ssim_loss(input, gt) #?
+ else:
+ loss_ssim = torch.tensor(0., device=input.device)
+
+ loss_psnr = self.psnr((input / 2 + 0.5), (gt / 2 + 0.5), 1.0)
+
+ if opt.id_lambda > 0:
+ loss_id = self._calc_loss_id(input, gt)
+ else:
+ loss_id = torch.tensor(0., device=input.device)
+
+ if opt.l1_lambda > 0:
+ loss_l1 = F.l1_loss(input, gt)
+ else:
+ loss_l1 = torch.tensor(0., device=input.device)
+
+ # loss = rec_loss * opt.l2_lambda + lpips_loss * opt.lpips_lambda + loss_id * opt.id_lambda + loss_ssim * opt.ssim_lambda
+ rec_loss = rec_loss * opt.l2_lambda
+ loss = rec_loss + lpips_loss + loss_id * opt.id_lambda + loss_ssim * opt.ssim_lambda + opt.l1_lambda * loss_l1
+
+ # if return_dict:
+ loss_dict['loss_l2'] = rec_loss
+ loss_dict['loss_id'] = loss_id
+ loss_dict['loss_lpips'] = lpips_loss
+ loss_dict['loss'] = loss
+ loss_dict['loss_ssim'] = loss_ssim
+
+ # metrics to report, not involved in training
+ loss_dict['mae'] = loss_l1
+ loss_dict['PSNR'] = loss_psnr
+ loss_dict['SSIM'] = 1 - loss_ssim # Todo
+ loss_dict['ID_SIM'] = 1 - loss_id
+
+ return loss, loss_dict
+
+ @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def calc_shape_rec_loss(
+ self,
+ pred_shape: dict,
+ gt_shape: dict,
+ device,
+ ):
+ """apply 3d shape reconstruction supervision. Basically supervise the densities with L1 loss
+
+ Args:
+ pred_shape (dict): dict contains reconstructed shape information
+ gt_shape (dict): dict contains gt shape information
+ supervise_sdf (bool, optional): whether supervise sdf rec. Defaults to True.
+ supervise_surface_normal (bool, optional): whether supervise surface rec. Defaults to False.
+
+ Returns:
+ dict: shape reconstruction loss
+ """
+
+ shape_loss_dict = {}
+ shape_loss = 0
+ # assert supervise_sdf or supervise_surface_normal, 'should at least supervise one types of shape reconstruction'
+ # todo, add weights
+
+ if self.opt.shape_uniform_lambda > 0:
+ shape_loss_dict['coarse'] = self.criterion3d_rec(
+ pred_shape['coarse_densities'].squeeze(),
+ gt_shape['coarse_densities'].squeeze())
+ shape_loss += shape_loss_dict[
+ 'coarse'] * self.opt.shape_uniform_lambda
+
+ if self.opt.shape_importance_lambda > 0:
+ shape_loss_dict['fine'] = self.criterion3d_rec(
+ pred_shape['fine_densities'].squeeze(), # ? how to supervise
+ gt_shape['fine_densities'].squeeze())
+ shape_loss += shape_loss_dict[
+ 'fine'] * self.opt.shape_importance_lambda
+
+ loss_depth = self.criterion_alpha(pred_shape['image_depth'],
+ gt_shape['image_depth'])
+
+ shape_loss += loss_depth * self.opt.shape_depth_lambda
+ shape_loss_dict.update(dict(loss_depth=loss_depth))
+ # TODO, add on surface pts supervision ?
+
+ return shape_loss, shape_loss_dict
+
+ @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def psnr(self, input, target, max_val):
+ return kornia.metrics.psnr(input, target, max_val)
+
+ # @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def ssim_loss(self, img1, img2, window_size=11, size_average=True):
+ channel = img1.size(-3)
+ window = create_window(window_size, channel)
+
+ if img1.is_cuda:
+ window = window.cuda(img1.get_device())
+ window = window.type_as(img1)
+
+ return 1 - _ssim(img1, img2, window, window_size, channel, size_average)
+
+ @torch.autocast(device_type='cuda', dtype=torch.float16, enabled=False)
+ def forward(self,
+ pred,
+ gt,
+ test_mode=True,
+ step=1,
+ return_fg_mask=False,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None,
+ ignore_kl=False,
+ ignore_lpips=False,
+ *args,
+ **kwargs):
+
+ with torch.autocast(device_type='cuda',
+ dtype=torch.float16,
+ enabled=False):
+
+ loss = torch.tensor(0., device=self.device)
+ loss_dict = {}
+
+ if 'image_mask' in pred:
+ pred_alpha = pred['image_mask'] # B 1 H W
+ else:
+ N, _, H, W = pred['image_depth'].shape
+ pred_alpha = pred['weights_samples'].permute(0, 2, 1).reshape(
+ N, 1, H, W)
+
+ # balance rec_loss with logvar
+ # if 'depth_mask' in gt:
+ if self.opt.online_mask:
+ # https://github.com/elliottwu/unsup3d/blob/dc961410d61684561f19525c2f7e9ee6f4dacb91/unsup3d/model.py#L193
+ margin = (self.opt.max_depth - self.opt.min_depth) / 2
+ fg_mask = (pred['image_depth']
+ < self.opt.max_depth + margin).float() # B 1 H W
+ fg_mask = fg_mask.repeat_interleave(3, 1).float()
+
+ else:
+ if 'depth_mask' in gt:
+ if gt['depth_mask'].shape[1] != 1:
+ fg_mask = gt['depth_mask'].unsqueeze(1)
+ else:
+ fg_mask = gt['depth_mask']
+ fg_mask = fg_mask.repeat_interleave(
+ 3, 1).float()
+ else:
+ fg_mask = None
+
+ loss_2d, loss_2d_dict = self.calc_2d_rec_loss(
+ pred['image_raw'],
+ gt['img'],
+ fg_mask,
+ test_mode=test_mode,
+ step=step,
+ ignore_lpips=ignore_lpips,
+ conf_sigma_l1=conf_sigma_l1,
+ conf_sigma_percl=conf_sigma_percl,
+ pred_alpha=pred_alpha,
+ )
+ # ignore_lpips=self.opt.fg_mse)
+
+ if self.opt.kl_lambda > 0 and not ignore_kl:
+ # assert 'posterior' in pred, 'logvar' in pred
+ assert 'posterior' in pred
+
+ if self.opt.kl_anneal:
+ kl_lambda = kl_coeff(
+ step=step,
+ constant_step=5e3, # 1w steps
+ total_step=25e3, # 5w steps in total
+ min_kl_coeff=max(1e-9, self.opt.kl_lambda / 1e4),
+ max_kl_coeff=self.opt.kl_lambda)
+ loss_dict['kl_lambda'] = kl_lambda
+ else:
+ loss_dict['kl_lambda'] = torch.tensor(
+ self.opt.kl_lambda, device=dist_util.dev())
+
+ if self.opt.pt_ft_kl:
+ pt_kl, ft_kl = pred['posterior'].kl(pt_ft_separate=True)
+ kl_batch = pt_kl.shape[0]
+
+ # loss_dict['kl_loss_pt'] = pt_kl.sum() * loss_dict['kl_lambda'] * 0.01 / kl_batch
+ loss_dict['kl_loss_pt'] = pt_kl.sum() * loss_dict['kl_lambda'] * 0 # no compression at all.
+ loss_dict['kl_loss_ft'] = ft_kl.sum() * loss_dict['kl_lambda'] / kl_batch
+ loss = loss + loss_dict['kl_loss_pt'] + loss_dict['kl_loss_ft']
+
+ loss_dict['latent_mu_pt'] = pred['posterior'].mean[:, :3].mean()
+ loss_dict['latent_std_pt'] = pred['posterior'].std[:, :3].mean()
+
+ loss_dict['latent_mu_ft'] = pred['posterior'].mean[:, 3:].mean()
+ loss_dict['latent_std_ft'] = pred['posterior'].std[:, 3:].mean()
+
+ elif self.opt.ft_kl:
+
+ ft_kl = pred['posterior'].kl(ft_separate=True)
+ kl_batch = ft_kl.shape[0]
+
+ loss_dict['kl_loss_ft'] = ft_kl.sum() * loss_dict['kl_lambda'] / kl_batch
+ loss = loss + loss_dict['kl_loss_ft']
+
+ loss_dict['latent_mu_ft'] = pred['posterior'].mean[:, :].square().mean().float().detach()
+ loss_dict['latent_std_ft'] = pred['posterior'].std[:, :].mean().float().detach()
+
+ else:
+
+ kl_loss = pred['posterior'].kl()
+ kl_loss = torch.sum(kl_loss) / kl_loss.shape[0]
+
+ loss_dict['kl_loss'] = kl_loss * loss_dict['kl_lambda']
+ loss += loss_dict['kl_loss']
+ loss_dict['latent_mu'] = pred['posterior'].mean.mean()
+ loss_dict['latent_std'] = pred['posterior'].std.mean()
+
+ # nll_loss = loss_2d / torch.exp(pred['logvar']) + pred['logvar'] # nll_loss
+ nll_loss = loss_2d
+ loss += nll_loss
+
+ loss_dict.update(dict(nll_loss=nll_loss))
+
+ # loss_dict['latent_mu'] = pred['latent_normalized'].mean()
+ # loss_dict['latent_max'] = pred['latent_normalized'].max()
+ # loss_dict['latent_min'] = pred['latent_normalized'].min()
+ # loss_dict['latent_std'] = pred['latent_normalized'].std()
+ # pred[
+ # 'latent_normalized_2Ddiffusion'].mean()
+ # loss_dict['latent_std'] = pred[
+ # 'latent_normalized_2Ddiffusion'].std()
+ # loss_dict['latent_max'] = pred[
+ # 'latent_normalized_2Ddiffusion'].max()
+ # loss_dict['latent_min'] = pred[
+ # 'latent_normalized_2Ddiffusion'].min()
+
+ else:
+ loss += loss_2d
+
+ # if 'image_sr' in pred and pred['image_sr'].shape==gt['img_sr']:
+ if 'image_sr' in pred:
+
+ if 'depth_mask_sr' in gt:
+ depth_mask_sr = gt['depth_mask_sr'].unsqueeze(
+ 1).repeat_interleave(3, 1).float()
+ else:
+ depth_mask_sr = None
+
+ loss_sr, loss_sr_dict = self.calc_2d_rec_loss(
+ pred['image_sr'],
+ gt['img_sr'],
+ depth_fg_mask=depth_mask_sr,
+ # test_mode=test_mode,
+ test_mode=True,
+ step=step)
+ loss_sr_lambda = 1
+ if step < self.opt.sr_delay_iter:
+ loss_sr_lambda = 0
+ loss += loss_sr * loss_sr_lambda
+ for k, v in loss_sr_dict.items():
+ loss_dict['sr_' + k] = v * loss_sr_lambda
+
+ if self.opt.depth_lambda > 0: # TODO, switch to scale-agnostic depth loss
+ assert 'depth' in gt
+ pred_depth = pred['image_depth']
+ if pred_depth.ndim == 4:
+ pred_depth = pred_depth.squeeze(1) # B H W
+
+ # _, shape_loss_dict = self.calc_depth_loss(
+ # pred_depth, gt['depth'], fg_mask[:, 0, ...])
+ _, shape_loss_dict = self.calc_scale_invariant_depth_loss(
+ pred_depth, gt['depth'], fg_mask[:, 0, ...])
+ loss += shape_loss_dict['loss_depth']
+ loss_dict.update(shape_loss_dict)
+
+ # if self.opt.latent_lambda > 0: # make sure the latent suits diffusion learning
+ # latent_mu = pred['latent'].mean()
+ # loss_latent = self.criterion_latent(
+ # latent_mu, torch.zeros_like(
+ # latent_mu)) # only regularize the mean value here
+ # loss_dict['loss_latent'] = loss_latent
+ # loss += loss_latent * self.opt.latent_lambda
+
+ if self.opt.alpha_lambda > 0 and 'image_depth' in pred:
+ loss_alpha = self.calc_alpha_loss(pred_alpha, fg_mask)
+ loss_dict['loss_alpha'] = loss_alpha * self.opt.alpha_lambda
+ loss += loss_alpha * self.opt.alpha_lambda
+
+ if self.opt.depth_smoothness_lambda > 0:
+ loss_depth_smoothness = depth_smoothness_loss(
+ pred_alpha,
+ pred['image_depth']) * self.opt.depth_smoothness_lambda
+ loss_dict['loss_depth_smoothness'] = loss_depth_smoothness
+ loss += loss_depth_smoothness
+
+ loss_2d_dict['all_loss'] = loss
+ loss_dict.update(loss_2d_dict)
+
+ # if return_fg_mask:
+ return loss, loss_dict, fg_mask
+ # else:
+ # return loss, loss_dict
+
+ def _calc_loss_id(self, input, gt):
+ if input.shape[-1] != 256:
+ arcface_input = self.id_loss_pool(input)
+ id_loss_gt = self.id_loss_pool(gt)
+ else:
+ arcface_input = input
+ id_loss_gt = gt
+
+ loss_id, _, _ = self.criterionID(arcface_input, id_loss_gt, id_loss_gt)
+
+ return loss_id
+
+ def calc_2d_rec_loss_misaligned(self, input, gt):
+ """id loss + vgg loss
+
+ Args:
+ input (_type_): _description_
+ gt (_type_): _description_
+ depth_mask (_type_): _description_
+ test_mode (bool, optional): _description_. Defaults to True.
+ """
+ opt = self.opt
+ loss_dict = {}
+
+ if opt.lpips_lambda > 0:
+ with torch.autocast(
+ device_type='cuda', dtype=torch.float16,
+ enabled=False): # close AMP for lpips to avoid nan
+ lpips_loss = self.criterionLPIPS(input, gt)
+ else:
+ lpips_loss = torch.tensor(0., device=input.device)
+
+ if opt.id_lambda > 0:
+ loss_id = self._calc_loss_id(input, gt)
+ else:
+ loss_id = torch.tensor(0., device=input.device)
+
+ loss_dict['loss_id_real'] = loss_id
+ loss_dict['loss_lpips_real'] = lpips_loss
+
+ loss = lpips_loss * opt.lpips_lambda + loss_id * opt.id_lambda
+
+ return loss, loss_dict
+
+
+class E3DGE_with_AdvLoss(E3DGELossClass):
+ # adapted from sgm/modules/autoencoding/losses/discriminator_loss.py
+
+ def __init__(
+ self,
+ device,
+ opt,
+ discriminator_config: Optional[Dict] = None,
+ disc_num_layers: int = 3,
+ disc_in_channels: int = 3,
+ disc_start: int = 0,
+ disc_loss: str = "hinge",
+ disc_factor: float = 1.0,
+ disc_weight: float = 1.0,
+ regularization_weights: Union[None, Dict[str, float]] = None,
+ dtype=torch.float32,
+ # additional_log_keys: Optional[List[str]] = None,
+ ) -> None:
+ super().__init__(
+ device,
+ opt,
+ )
+
+ # ! initialize GAN loss
+ discriminator_config = default(
+ discriminator_config,
+ {
+ "target":
+ "nsr.losses.disc.NLayerDiscriminator",
+ "params": {
+ "input_nc": disc_in_channels,
+ "n_layers": disc_num_layers,
+ "use_actnorm": False,
+ },
+ },
+ )
+
+ self.discriminator = instantiate_from_config(
+ discriminator_config).apply(weights_init)
+ self.discriminator_iter_start = disc_start
+ self.disc_loss = hinge_d_loss if disc_loss == "hinge" else vanilla_d_loss
+ self.disc_factor = disc_factor
+ self.discriminator_weight = disc_weight # self.regularization_weights = default(regularization_weights, {})
+
+ # self.forward_keys = [
+ # "optimizer_idx",
+ # "global_step",
+ # "last_layer",
+ # "split",
+ # "regularization_log",
+ # ]
+
+ # self.additional_log_keys = set(default(additional_log_keys, []))
+ # self.additional_log_keys.update(set(
+ # self.regularization_weights.keys()))
+
+ def get_trainable_parameters(self) -> Iterator[nn.Parameter]:
+ return self.discriminator.parameters()
+
+ def forward(self,
+ pred,
+ gt,
+ behaviour: str,
+ test_mode=True,
+ step=1,
+ return_fg_mask=False,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None,
+ ignore_d_loss=False,
+ *args,
+ **kwargs):
+
+ # now the GAN part
+ reconstructions = pred['image_raw']
+ inputs = gt['img']
+
+ if behaviour == 'g_step':
+
+ nll_loss, loss_dict, fg_mask = super().forward(
+ pred,
+ gt,
+ test_mode,
+ step,
+ return_fg_mask,
+ conf_sigma_l1,
+ conf_sigma_percl,
+ *args,
+ **kwargs)
+
+ # generator update
+ if not ignore_d_loss and (step >= self.discriminator_iter_start or not self.training):
+ logits_fake = self.discriminator(reconstructions.contiguous())
+ g_loss = -torch.mean(logits_fake)
+ if self.training:
+ d_weight = torch.tensor(self.discriminator_weight)
+ # d_weight = self.calculate_adaptive_weight(
+ # nll_loss, g_loss, last_layer=last_layer)
+ else:
+ d_weight = torch.tensor(1.0)
+ else:
+ d_weight = torch.tensor(0.0)
+ g_loss = torch.tensor(0.0, requires_grad=True)
+
+ g_loss = g_loss * d_weight * self.disc_factor
+
+ loss = nll_loss + g_loss
+
+ # TODO
+ loss_dict.update({
+ f"loss/g": g_loss.detach().mean(),
+ })
+
+ # return loss, log
+ return loss, loss_dict, fg_mask
+
+ elif behaviour == 'd_step' and not ignore_d_loss:
+ # second pass for discriminator update
+ logits_real = self.discriminator(inputs.contiguous().detach())
+ logits_fake = self.discriminator(
+ reconstructions.contiguous().detach())
+
+ if step >= self.discriminator_iter_start or not self.training:
+ d_loss = self.disc_factor * self.disc_loss(
+ logits_real, logits_fake)
+ else:
+ d_loss = torch.tensor(0.0, requires_grad=True)
+
+ loss_dict = {}
+
+ loss_dict.update({
+ "loss/disc": d_loss.clone().detach().mean(),
+ "logits/real": logits_real.detach().mean(),
+ "logits/fake": logits_fake.detach().mean(),
+ })
+
+ return d_loss, loss_dict, None
+ else:
+ raise NotImplementedError(f"Unknown optimizer behaviour {behaviour}")
diff --git a/nsr/losses/disc.py b/nsr/losses/disc.py
new file mode 100644
index 0000000000000000000000000000000000000000..43e64ea7af0c1184e858525deef4b19fde86c511
--- /dev/null
+++ b/nsr/losses/disc.py
@@ -0,0 +1,88 @@
+import functools
+
+import torch.nn as nn
+
+from .util import ActNorm
+
+
+def weights_init(m):
+ classname = m.__class__.__name__
+ if classname.find("Conv") != -1:
+ nn.init.normal_(m.weight.data, 0.0, 0.02)
+ elif classname.find("BatchNorm") != -1:
+ nn.init.normal_(m.weight.data, 1.0, 0.02)
+ nn.init.constant_(m.bias.data, 0)
+
+
+class NLayerDiscriminator(nn.Module):
+ """Defines a PatchGAN discriminator as in Pix2Pix
+ --> see https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/models/networks.py
+ """
+
+ def __init__(self, input_nc=3, ndf=64, n_layers=3, use_actnorm=False):
+ """Construct a PatchGAN discriminator
+ Parameters:
+ input_nc (int) -- the number of channels in input images
+ ndf (int) -- the number of filters in the last conv layer
+ n_layers (int) -- the number of conv layers in the discriminator
+ norm_layer -- normalization layer
+ """
+ super(NLayerDiscriminator, self).__init__()
+ if not use_actnorm:
+ norm_layer = nn.BatchNorm2d
+ else:
+ norm_layer = ActNorm
+ if (
+ type(norm_layer) == functools.partial
+ ): # no need to use bias as BatchNorm2d has affine parameters
+ use_bias = norm_layer.func != nn.BatchNorm2d
+ else:
+ use_bias = norm_layer != nn.BatchNorm2d
+
+ kw = 4
+ padw = 1
+ sequence = [
+ nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw),
+ nn.LeakyReLU(0.2, True),
+ ]
+ nf_mult = 1
+ nf_mult_prev = 1
+ for n in range(1, n_layers): # gradually increase the number of filters
+ nf_mult_prev = nf_mult
+ nf_mult = min(2**n, 8)
+ sequence += [
+ nn.Conv2d(
+ ndf * nf_mult_prev,
+ ndf * nf_mult,
+ kernel_size=kw,
+ stride=2,
+ padding=padw,
+ bias=use_bias,
+ ),
+ norm_layer(ndf * nf_mult),
+ nn.LeakyReLU(0.2, True),
+ ]
+
+ nf_mult_prev = nf_mult
+ nf_mult = min(2**n_layers, 8)
+ sequence += [
+ nn.Conv2d(
+ ndf * nf_mult_prev,
+ ndf * nf_mult,
+ kernel_size=kw,
+ stride=1,
+ padding=padw,
+ bias=use_bias,
+ ),
+ norm_layer(ndf * nf_mult),
+ nn.LeakyReLU(0.2, True),
+ ]
+
+ sequence += [
+ nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)
+ ] # output 1 channel prediction map
+ self.main = nn.Sequential(*sequence)
+
+ def forward(self, input):
+ """Standard forward."""
+ return self.main(input)
diff --git a/nsr/losses/helpers.py b/nsr/losses/helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..abaf044f5fb0d7ec98b21e820cb8291560024e3b
--- /dev/null
+++ b/nsr/losses/helpers.py
@@ -0,0 +1,378 @@
+from collections import namedtuple
+from pdb import set_trace as st
+import torch
+import numpy as np
+import torch.nn.functional as F
+import torch.nn as nn
+from torch.nn import Conv2d, BatchNorm2d, PReLU, ReLU, Sigmoid, MaxPool2d, AdaptiveAvgPool2d, Sequential, Module
+"""
+ArcFace implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch)
+"""
+
+# from nsr.networks_stylegan2 import FullyConnectedLayer as EqualLinear
+
+# class GradualStyleBlock(Module):
+
+# def __init__(self, in_c, out_c, spatial):
+# super(GradualStyleBlock, self).__init__()
+# self.out_c = out_c
+# self.spatial = spatial
+# num_pools = int(np.log2(spatial))
+# modules = []
+# modules += [
+# Conv2d(in_c, out_c, kernel_size=3, stride=2, padding=1),
+# nn.LeakyReLU()
+# ]
+# for i in range(num_pools - 1):
+# modules += [
+# Conv2d(out_c, out_c, kernel_size=3, stride=2, padding=1),
+# nn.LeakyReLU()
+# ]
+# self.convs = nn.Sequential(*modules)
+# self.linear = EqualLinear(out_c, out_c, lr_multiplier=1)
+
+# def forward(self, x):
+# x = self.convs(x)
+# x = x.reshape(-1, self.out_c)
+# x = self.linear(x)
+# return x
+
+
+# from project.models.model import ModulatedConv2d
+class DemodulatedConv2d(nn.Module):
+ def __init__(self,
+ in_channel,
+ out_channel,
+ kernel_size=3,
+ stride=1,
+ padding=0,
+ bias=False,
+ dilation=1):
+ super().__init__()
+ # https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/issues/411. fix droplet issue
+
+ self.eps = 1e-8
+
+ if not isinstance(kernel_size, tuple):
+ self.kernel_size = (kernel_size, kernel_size)
+ else:
+ self.kernel_size = kernel_size
+
+ self.in_channel = in_channel
+ self.out_channel = out_channel
+
+ self.weight = nn.Parameter(
+ # torch.randn(1, out_channel, in_channel, kernel_size, kernel_size)
+ torch.randn(1, out_channel, in_channel, *kernel_size))
+ self.bias = None
+ if bias:
+ self.bias = nn.Parameter(torch.randn(out_channel))
+
+ self.stride = stride
+ self.padding = padding
+ self.dilation = dilation
+
+ def forward(self, input):
+ batch, in_channel, height, width = input.shape
+
+ demod = torch.rsqrt(self.weight.pow(2).sum([2, 3, 4]) + 1e-8)
+ demod = demod.repeat_interleave(batch, 0)
+ weight = self.weight * demod.view(batch, self.out_channel, 1, 1, 1)
+
+ weight = weight.view(
+ # batch * self.out_channel, in_channel, self.kernel_size, self.kernel_size
+ batch * self.out_channel,
+ in_channel,
+ *self.kernel_size)
+
+ input = input.view(1, batch * in_channel, height, width)
+ if self.bias is None:
+ out = F.conv2d(input,
+ weight,
+ padding=self.padding,
+ groups=batch,
+ dilation=self.dilation,
+ stride=self.stride)
+ else:
+ out = F.conv2d(input,
+ weight,
+ bias=self.bias,
+ padding=self.padding,
+ groups=batch,
+ dilation=self.dilation,
+ stride=self.stride)
+ _, _, height, width = out.shape
+ out = out.view(batch, self.out_channel, height, width)
+
+ return out
+
+
+class Flatten(Module):
+ def forward(self, input):
+ return input.reshape(input.size(0), -1)
+
+
+def l2_norm(input, axis=1):
+ norm = torch.norm(input, 2, axis, True)
+ output = torch.div(input, norm)
+ return output
+
+
+class Bottleneck(namedtuple('Block', ['in_channel', 'depth', 'stride'])):
+ """ A named tuple describing a ResNet block. """
+
+
+def get_block(in_channel, depth, num_units, stride=2):
+ return [Bottleneck(in_channel, depth, stride)
+ ] + [Bottleneck(depth, depth, 1) for i in range(num_units - 1)]
+
+
+def get_blocks(num_layers):
+ if num_layers == 50:
+ blocks = [
+ get_block(in_channel=64, depth=64, num_units=3),
+ get_block(in_channel=64, depth=128, num_units=4),
+ get_block(in_channel=128, depth=256, num_units=14),
+ get_block(in_channel=256, depth=512, num_units=3)
+ ]
+ elif num_layers == 100:
+ blocks = [
+ get_block(in_channel=64, depth=64, num_units=3),
+ get_block(in_channel=64, depth=128, num_units=13),
+ get_block(in_channel=128, depth=256, num_units=30),
+ get_block(in_channel=256, depth=512, num_units=3)
+ ]
+ elif num_layers == 152:
+ blocks = [
+ get_block(in_channel=64, depth=64, num_units=3),
+ get_block(in_channel=64, depth=128, num_units=8),
+ get_block(in_channel=128, depth=256, num_units=36),
+ get_block(in_channel=256, depth=512, num_units=3)
+ ]
+ else:
+ raise ValueError(
+ "Invalid number of layers: {}. Must be one of [50, 100, 152]".
+ format(num_layers))
+ return blocks
+
+
+class SEModule(Module):
+ def __init__(self, channels, reduction):
+ super(SEModule, self).__init__()
+ self.avg_pool = AdaptiveAvgPool2d(1)
+ self.fc1 = Conv2d(channels,
+ channels // reduction,
+ kernel_size=1,
+ padding=0,
+ bias=False)
+ self.relu = ReLU(inplace=True)
+ self.fc2 = Conv2d(channels // reduction,
+ channels,
+ kernel_size=1,
+ padding=0,
+ bias=False)
+ self.sigmoid = Sigmoid()
+
+ def forward(self, x):
+ module_input = x
+ x = self.avg_pool(x)
+ x = self.fc1(x)
+ x = self.relu(x)
+ x = self.fc2(x)
+ x = self.sigmoid(x)
+ return module_input * x
+
+
+class bottleneck_IR(Module):
+ def __init__(self,
+ in_channel,
+ depth,
+ stride,
+ norm_layer=None,
+ demodulate=False):
+ super(bottleneck_IR, self).__init__()
+ if norm_layer is None:
+ norm_layer = BatchNorm2d
+ if demodulate:
+ conv2d = DemodulatedConv2d
+ else:
+ conv2d = Conv2d
+
+ if in_channel == depth:
+ self.shortcut_layer = MaxPool2d(1, stride)
+ else:
+ self.shortcut_layer = Sequential(
+ # Conv2d(in_channel, depth, (1, 1), stride, bias=False),
+ conv2d(in_channel, depth, (1, 1), stride, bias=False),
+ norm_layer(depth))
+
+
+# BatchNorm2d(depth)
+ self.res_layer = Sequential(
+ # BatchNorm2d(in_channel),
+ norm_layer(in_channel),
+ # Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
+ conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
+ PReLU(depth),
+ # Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
+ conv2d(depth, depth, (3, 3), stride, 1, bias=False),
+ norm_layer(depth))
+ # BatchNorm2d(depth))
+
+ def forward(self, x):
+ shortcut = self.shortcut_layer(x)
+ res = self.res_layer(x)
+ return res + shortcut
+
+
+class bottleneck_IR_SE(Module):
+ def __init__(self, in_channel, depth, stride):
+ super(bottleneck_IR_SE, self).__init__()
+ if in_channel == depth:
+ self.shortcut_layer = MaxPool2d(1, stride)
+ else:
+ self.shortcut_layer = Sequential(
+ Conv2d(in_channel, depth, (1, 1), stride, bias=False),
+ BatchNorm2d(depth))
+ self.res_layer = Sequential(
+ BatchNorm2d(in_channel),
+ Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
+ PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
+ BatchNorm2d(depth), SEModule(depth, 16))
+
+ def forward(self, x):
+ shortcut = self.shortcut_layer(x)
+ res = self.res_layer(x)
+ return res + shortcut
+
+
+def _upsample_add(x, y):
+ """Upsample and add two feature maps.
+ Args:
+ x: (Variable) top feature map to be upsampled.
+ y: (Variable) lateral feature map.
+ Returns:
+ (Variable) added feature map.
+ Note in PyTorch, when input size is odd, the upsampled feature map
+ with `F.upsample(..., scale_factor=2, mode='nearest')`
+ maybe not equal to the lateral feature map size.
+ e.g.
+ original input size: [N,_,15,15] ->
+ conv2d feature map size: [N,_,8,8] ->
+ upsampled feature map size: [N,_,16,16]
+ So we choose bilinear upsample which supports arbitrary output sizes.
+ """
+ _, _, H, W = y.size()
+ return F.interpolate(x, size=(H, W), mode='bilinear',
+ align_corners=True) + y
+
+
+# from NeuRay
+def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
+ """3x3 convolution with padding"""
+ return nn.Conv2d(in_planes,
+ out_planes,
+ kernel_size=3,
+ stride=stride,
+ padding=dilation,
+ groups=groups,
+ bias=False,
+ dilation=dilation,
+ padding_mode='reflect')
+
+
+def conv1x1(in_planes, out_planes, stride=1):
+ """1x1 convolution"""
+ return nn.Conv2d(in_planes,
+ out_planes,
+ kernel_size=1,
+ stride=stride,
+ bias=False,
+ padding_mode='reflect')
+
+
+class ResidualBlock(nn.Module):
+ def __init__(self,
+ dim_in,
+ dim_out,
+ dim_inter=None,
+ use_norm=True,
+ norm_layer=nn.BatchNorm2d,
+ bias=False):
+ super().__init__()
+ if dim_inter is None:
+ dim_inter = dim_out
+
+ if use_norm:
+ self.conv = nn.Sequential(
+ norm_layer(dim_in),
+ nn.ReLU(True),
+ nn.Conv2d(dim_in,
+ dim_inter,
+ 3,
+ 1,
+ 1,
+ bias=bias,
+ padding_mode='reflect'),
+ norm_layer(dim_inter),
+ nn.ReLU(True),
+ nn.Conv2d(dim_inter,
+ dim_out,
+ 3,
+ 1,
+ 1,
+ bias=bias,
+ padding_mode='reflect'),
+ )
+ else:
+ self.conv = nn.Sequential(
+ nn.ReLU(True),
+ nn.Conv2d(dim_in, dim_inter, 3, 1, 1),
+ nn.ReLU(True),
+ nn.Conv2d(dim_inter, dim_out, 3, 1, 1),
+ )
+
+ self.short_cut = None
+ if dim_in != dim_out:
+ self.short_cut = nn.Conv2d(dim_in, dim_out, 1, 1)
+
+ def forward(self, feats):
+ feats_out = self.conv(feats)
+ if self.short_cut is not None:
+ feats_out = self.short_cut(feats) + feats_out
+ else:
+ feats_out = feats_out + feats
+ return feats_out
+
+
+class conv(nn.Module):
+ def __init__(self, num_in_layers, num_out_layers, kernel_size, stride):
+ super(conv, self).__init__()
+ self.kernel_size = kernel_size
+ self.conv = nn.Conv2d(num_in_layers,
+ num_out_layers,
+ kernel_size=kernel_size,
+ stride=stride,
+ padding=(self.kernel_size - 1) // 2,
+ padding_mode='reflect')
+ self.bn = nn.InstanceNorm2d(num_out_layers,
+ track_running_stats=False,
+ affine=True)
+
+ def forward(self, x):
+ return F.elu(self.bn(self.conv(x)), inplace=True)
+
+
+class upconv(nn.Module):
+ def __init__(self, num_in_layers, num_out_layers, kernel_size, scale):
+ super(upconv, self).__init__()
+ self.scale = scale
+ self.conv = conv(num_in_layers, num_out_layers, kernel_size, 1)
+
+ def forward(self, x):
+ x = nn.functional.interpolate(x,
+ scale_factor=self.scale,
+ align_corners=True,
+ mode='bilinear')
+ return self.conv(x)
+
diff --git a/nsr/losses/id_loss.py b/nsr/losses/id_loss.py
new file mode 100755
index 0000000000000000000000000000000000000000..8b0cc5f3421a6ff74048963d9734eae8f6226d7e
--- /dev/null
+++ b/nsr/losses/id_loss.py
@@ -0,0 +1,63 @@
+import torch
+from pdb import set_trace as st
+from torch import nn
+from .model_irse import Backbone
+from .paths_config import model_paths
+
+
+class IDLoss(nn.Module):
+
+ def __init__(self, device):
+ # super(IDLoss, self).__init__()
+ super().__init__()
+ print('Loading ResNet ArcFace')
+ self.facenet = Backbone(input_size=112,
+ num_layers=50,
+ drop_ratio=0.6,
+ mode='ir_se').to(device)
+ # self.facenet.load_state_dict(torch.load(model_paths['ir_se50']))
+ try:
+ face_net_model = torch.load(model_paths['ir_se50'],
+ map_location=device)
+ except Exception as e:
+ face_net_model = torch.load(model_paths['ir_se50_hwc'],
+ map_location=device)
+
+ self.facenet.load_state_dict(face_net_model)
+
+ self.face_pool = torch.nn.AdaptiveAvgPool2d((112, 112))
+ self.facenet.eval()
+
+ def extract_feats(self, x):
+ x = x[:, :, 35:223, 32:220] # Crop interesting region
+ x = self.face_pool(x)
+ x_feats = self.facenet(x)
+ return x_feats
+
+ def forward(self, y_hat, y, x):
+ n_samples, _, H, W = x.shape
+ assert H == W == 256, 'idloss needs 256*256 input images'
+
+ x_feats = self.extract_feats(x)
+ y_feats = self.extract_feats(y) # Otherwise use the feature from there
+ y_hat_feats = self.extract_feats(y_hat)
+ y_feats = y_feats.detach()
+ loss = 0
+ sim_improvement = 0
+ id_logs = []
+ count = 0
+ for i in range(n_samples):
+ diff_target = y_hat_feats[i].dot(y_feats[i])
+ diff_input = y_hat_feats[i].dot(x_feats[i])
+ diff_views = y_feats[i].dot(x_feats[i])
+ id_logs.append({
+ 'diff_target': float(diff_target),
+ 'diff_input': float(diff_input),
+ 'diff_views': float(diff_views)
+ })
+ loss += 1 - diff_target
+ id_diff = float(diff_target) - float(diff_views)
+ sim_improvement += id_diff
+ count += 1
+
+ return loss / count, sim_improvement / count, id_logs
diff --git a/nsr/losses/lms.py b/nsr/losses/lms.py
new file mode 100644
index 0000000000000000000000000000000000000000..c8e71f89d6e429f4e81f11675042beb09c4009ff
--- /dev/null
+++ b/nsr/losses/lms.py
@@ -0,0 +1,94 @@
+# ------------------------------------------------------------------------------
+# https://github.dev/HRNet/HigherHRNet-Human-Pose-Estimation
+# Copyright (c) Microsoft
+# Licensed under the MIT License.
+# Written by Bin Xiao (leoxiaobin@gmail.com)
+# Modified by Bowen Cheng (bcheng9@illinois.edu)
+# ------------------------------------------------------------------------------
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import numpy as np
+
+import logging
+
+import torch
+import torch.nn as nn
+from pdb import set_trace as st
+
+logger = logging.getLogger(__name__)
+
+
+class HeatmapGenerator():
+ def __init__(self, heatmap_size, num_joints=68, sigma=2):
+ self.heatmap_size = heatmap_size
+ # self.image_size = image_size
+ self.num_joints = num_joints
+ if sigma < 0:
+ sigma = self.heatmap_size / 64
+ self.sigma = sigma
+ size = 6 * sigma + 3
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0, y0 = 3 * sigma + 1, 3 * sigma + 1
+ self.g = np.exp(-((x - x0)**2 + (y - y0)**2) / (2 * sigma**2))
+
+ # def __call__(self, joints, image_size: np.ndarray):
+ def __call__(self, joints, image_size: int):
+ """generate heatmap gt from joints
+
+ Args:
+ joints (np.ndarray): N,3
+
+ Returns:
+ hms: N,H,W
+ """
+ hms = np.zeros((self.num_joints, self.heatmap_size, self.heatmap_size),
+ dtype=np.float32)
+ sigma = self.sigma
+
+ # feat_stride = image_size / [self.heatmap_size, self.heatmap_size]
+ feat_stride = image_size / self.heatmap_size
+ for idx, pt in enumerate(joints):
+ # for idx, pt in enumerate(p):
+ if pt[2] > 0:
+ # x = int(pt[0] / feat_stride[0] + 0.5)
+ # y = int(pt[1] / feat_stride[1] + 0.5) # normalize joints to heatmap size
+ x = int(pt[0] / feat_stride + 0.5)
+ y = int(pt[1] / feat_stride +
+ 0.5) # normalize joints to heatmap size
+ if x < 0 or y < 0 or \
+ x >= self.heatmap_size or y >= self.heatmap_size:
+ continue
+
+ ul = int(np.round(x - 3 * sigma - 1)), int(
+ np.round(y - 3 * sigma - 1))
+ br = int(np.round(x + 3 * sigma + 2)), int(
+ np.round(y + 3 * sigma + 2))
+
+ c, d = max(0, -ul[0]), min(br[0], self.heatmap_size) - ul[0]
+ a, b = max(0, -ul[1]), min(br[1], self.heatmap_size) - ul[1]
+
+ cc, dd = max(0, ul[0]), min(br[0], self.heatmap_size)
+ aa, bb = max(0, ul[1]), min(br[1], self.heatmap_size)
+ hms[idx, aa:bb, cc:dd] = np.maximum(hms[idx, aa:bb, cc:dd],
+ self.g[a:b, c:d])
+ return hms
+
+
+class HeatmapLoss(nn.Module):
+ def __init__(self):
+ super().__init__()
+
+ def forward(self, pred, gt, mask=None):
+ # todo, add mask
+ assert pred.size() == gt.size()
+ loss = ((pred - gt)**2)
+ if mask is not None:
+ loss = loss * mask[:, None, :, :].expand_as(pred)
+ # loss = loss.mean(dim=3).mean(dim=2).mean(dim=1)
+ loss = loss.mean()
+ # loss = loss.mean(dim=3).mean(dim=2).sum(dim=1)
+ return loss
diff --git a/nsr/losses/model_irse.py b/nsr/losses/model_irse.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c5a0f6d927eb8523a6df7589bd5e05c936b9669
--- /dev/null
+++ b/nsr/losses/model_irse.py
@@ -0,0 +1,110 @@
+from torch.nn import Linear, Conv2d, BatchNorm1d, BatchNorm2d, PReLU, Dropout, Sequential, Module
+from .helpers import get_blocks, Flatten, bottleneck_IR, bottleneck_IR_SE, l2_norm
+"""
+Modified Backbone implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch)
+"""
+
+
+class Backbone(Module):
+ def __init__(self,
+ input_size,
+ num_layers,
+ mode='ir',
+ drop_ratio=0.4,
+ affine=True):
+ super(Backbone, self).__init__()
+ assert input_size in [112, 224], "input_size should be 112 or 224"
+ assert num_layers in [50, 100,
+ 152], "num_layers should be 50, 100 or 152"
+ assert mode in ['ir', 'ir_se'], "mode should be ir or ir_se"
+ blocks = get_blocks(num_layers)
+ if mode == 'ir':
+ unit_module = bottleneck_IR
+ elif mode == 'ir_se':
+ unit_module = bottleneck_IR_SE
+ self.input_layer = Sequential(Conv2d(3, 64, (3, 3), 1, 1, bias=False),
+ BatchNorm2d(64), PReLU(64))
+ if input_size == 112:
+ self.output_layer = Sequential(BatchNorm2d(512),
+ Dropout(drop_ratio), Flatten(),
+ Linear(512 * 7 * 7, 512),
+ BatchNorm1d(512, affine=affine))
+ else:
+ self.output_layer = Sequential(BatchNorm2d(512),
+ Dropout(drop_ratio), Flatten(),
+ Linear(512 * 14 * 14, 512),
+ BatchNorm1d(512, affine=affine))
+
+ modules = []
+ for block in blocks:
+ for bottleneck in block:
+ modules.append(
+ unit_module(bottleneck.in_channel, bottleneck.depth,
+ bottleneck.stride))
+ self.body = Sequential(*modules)
+
+ def forward(self, x):
+ x = self.input_layer(x)
+ x = self.body(x)
+ x = self.output_layer(x)
+ return l2_norm(x)
+
+
+def IR_50(input_size):
+ """Constructs a ir-50 model."""
+ model = Backbone(input_size,
+ num_layers=50,
+ mode='ir',
+ drop_ratio=0.4,
+ affine=False)
+ return model
+
+
+def IR_101(input_size):
+ """Constructs a ir-101 model."""
+ model = Backbone(input_size,
+ num_layers=100,
+ mode='ir',
+ drop_ratio=0.4,
+ affine=False)
+ return model
+
+
+def IR_152(input_size):
+ """Constructs a ir-152 model."""
+ model = Backbone(input_size,
+ num_layers=152,
+ mode='ir',
+ drop_ratio=0.4,
+ affine=False)
+ return model
+
+
+def IR_SE_50(input_size):
+ """Constructs a ir_se-50 model."""
+ model = Backbone(input_size,
+ num_layers=50,
+ mode='ir_se',
+ drop_ratio=0.4,
+ affine=False)
+ return model
+
+
+def IR_SE_101(input_size):
+ """Constructs a ir_se-101 model."""
+ model = Backbone(input_size,
+ num_layers=100,
+ mode='ir_se',
+ drop_ratio=0.4,
+ affine=False)
+ return model
+
+
+def IR_SE_152(input_size):
+ """Constructs a ir_se-152 model."""
+ model = Backbone(input_size,
+ num_layers=152,
+ mode='ir_se',
+ drop_ratio=0.4,
+ affine=False)
+ return model
diff --git a/nsr/losses/paths_config.py b/nsr/losses/paths_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..efd4e8094ee6bbf1acb0c82c22528c31e4b5480e
--- /dev/null
+++ b/nsr/losses/paths_config.py
@@ -0,0 +1,24 @@
+model_paths = {
+ 'ir_se50': 'pretrained_models/model_ir_se50.pth',
+ 'resnet34': 'pretrained_models/resnet34-333f7ec4.pth',
+ 'stylegan_ffhq': 'pretrained_models/stylegan2-ffhq-config-f.pt',
+ 'stylegan_cars': 'pretrained_models/stylegan2-car-config-f.pt',
+ 'stylegan_church': 'pretrained_models/stylegan2-church-config-f.pt',
+ 'stylegan_horse': 'pretrained_models/stylegan2-horse-config-f.pt',
+ 'stylegan_ada_wild': 'pretrained_models/afhqwild.pt',
+ 'stylegan_toonify': 'pretrained_models/ffhq_cartoon_blended.pt',
+ 'shape_predictor':
+ 'pretrained_models/shape_predictor_68_face_landmarks.dat',
+ 'circular_face': 'pretrained_models/CurricularFace_Backbone.pth',
+ 'mtcnn_pnet': 'pretrained_models/mtcnn/pnet.npy',
+ 'mtcnn_rnet': 'pretrained_models/mtcnn/rnet.npy',
+ 'mtcnn_onet': 'pretrained_models/mtcnn/onet.npy',
+ 'moco': 'pretrained_models/moco_v2_800ep_pretrain.pt'
+}
+
+project_basedir = '/mnt/lustre/yslan/Repo/Research/SIGA22/BaseModels/StyleSDF'
+
+for k, v in model_paths.items():
+ model_paths[k] = f'{project_basedir}/project/utils/misc/' + model_paths[k]
+
+model_paths['ir_se50_hwc'] = '/home/yslan/datasets/model_ir_se50.pth'
diff --git a/nsr/losses/sdfstudio_losses.py b/nsr/losses/sdfstudio_losses.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e560b07c8baec71febb8b4235d09e2cb68b2d0c
--- /dev/null
+++ b/nsr/losses/sdfstudio_losses.py
@@ -0,0 +1,771 @@
+# Copyright 2022 The Nerfstudio Team. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Collection of Losses.
+"""
+
+import torch
+import torch.nn.functional as F
+from torch import nn
+from torchtyping import TensorType
+from torch.autograd import Variable
+import numpy as np
+from math import exp
+
+# from nerfstudio.cameras.rays import RaySamples
+# from nerfstudio.field_components.field_heads import FieldHeadNames
+
+L1Loss = nn.L1Loss
+MSELoss = nn.MSELoss
+
+LOSSES = {"L1": L1Loss, "MSE": MSELoss}
+
+EPS = 1.0e-7
+
+
+def outer(
+ t0_starts: TensorType[..., "num_samples_0"],
+ t0_ends: TensorType[..., "num_samples_0"],
+ t1_starts: TensorType[..., "num_samples_1"],
+ t1_ends: TensorType[..., "num_samples_1"],
+ y1: TensorType[..., "num_samples_1"],
+) -> TensorType[..., "num_samples_0"]:
+ """Faster version of
+
+ https://github.com/kakaobrain/NeRF-Factory/blob/f61bb8744a5cb4820a4d968fb3bfbed777550f4a/src/model/mipnerf360/helper.py#L117
+ https://github.com/google-research/multinerf/blob/b02228160d3179300c7d499dca28cb9ca3677f32/internal/stepfun.py#L64
+
+ Args:
+ t0_starts: start of the interval edges
+ t0_ends: end of the interval edges
+ t1_starts: start of the interval edges
+ t1_ends: end of the interval edges
+ y1: weights
+ """
+ cy1 = torch.cat([torch.zeros_like(y1[..., :1]), torch.cumsum(y1, dim=-1)], dim=-1)
+
+ idx_lo = torch.searchsorted(t1_starts.contiguous(), t0_starts.contiguous(), side="right") - 1
+ idx_lo = torch.clamp(idx_lo, min=0, max=y1.shape[-1] - 1)
+ idx_hi = torch.searchsorted(t1_ends.contiguous(), t0_ends.contiguous(), side="right")
+ idx_hi = torch.clamp(idx_hi, min=0, max=y1.shape[-1] - 1)
+ cy1_lo = torch.take_along_dim(cy1[..., :-1], idx_lo, dim=-1)
+ cy1_hi = torch.take_along_dim(cy1[..., 1:], idx_hi, dim=-1)
+ y0_outer = cy1_hi - cy1_lo
+
+ return y0_outer
+
+
+def lossfun_outer(
+ t: TensorType[..., "num_samples+1"],
+ w: TensorType[..., "num_samples"],
+ t_env: TensorType[..., "num_samples+1"],
+ w_env: TensorType[..., "num_samples"],
+):
+ """
+ https://github.com/kakaobrain/NeRF-Factory/blob/f61bb8744a5cb4820a4d968fb3bfbed777550f4a/src/model/mipnerf360/helper.py#L136
+ https://github.com/google-research/multinerf/blob/b02228160d3179300c7d499dca28cb9ca3677f32/internal/stepfun.py#L80
+
+ Args:
+ t: interval edges
+ w: weights
+ t_env: interval edges of the upper bound enveloping historgram
+ w_env: weights that should upper bound the inner (t,w) histogram
+ """
+ w_outer = outer(t[..., :-1], t[..., 1:], t_env[..., :-1], t_env[..., 1:], w_env)
+ return torch.clip(w - w_outer, min=0) ** 2 / (w + EPS)
+
+
+def ray_samples_to_sdist(ray_samples):
+ """Convert ray samples to s space"""
+ starts = ray_samples.spacing_starts
+ ends = ray_samples.spacing_ends
+ sdist = torch.cat([starts[..., 0], ends[..., -1:, 0]], dim=-1) # (num_rays, num_samples + 1)
+ return sdist
+
+
+def interlevel_loss(weights_list, ray_samples_list):
+ """Calculates the proposal loss in the MipNeRF-360 paper.
+
+ https://github.com/kakaobrain/NeRF-Factory/blob/f61bb8744a5cb4820a4d968fb3bfbed777550f4a/src/model/mipnerf360/model.py#L515
+ https://github.com/google-research/multinerf/blob/b02228160d3179300c7d499dca28cb9ca3677f32/internal/train_utils.py#L133
+ """
+ c = ray_samples_to_sdist(ray_samples_list[-1]).detach()
+ w = weights_list[-1][..., 0].detach()
+ loss_interlevel = 0.0
+ for ray_samples, weights in zip(ray_samples_list[:-1], weights_list[:-1]):
+ sdist = ray_samples_to_sdist(ray_samples)
+ cp = sdist # (num_rays, num_samples + 1)
+ wp = weights[..., 0] # (num_rays, num_samples)
+ loss_interlevel += torch.mean(lossfun_outer(c, w, cp, wp))
+ return loss_interlevel
+
+
+## zip-NeRF losses
+def blur_stepfun(x, y, r):
+ x_c = torch.cat([x - r, x + r], dim=-1)
+ x_r, x_idx = torch.sort(x_c, dim=-1)
+ zeros = torch.zeros_like(y[:, :1])
+ y_1 = (torch.cat([y, zeros], dim=-1) - torch.cat([zeros, y], dim=-1)) / (2 * r)
+ x_idx = x_idx[:, :-1]
+ y_2 = torch.cat([y_1, -y_1], dim=-1)[
+ torch.arange(x_idx.shape[0]).reshape(-1, 1).expand(x_idx.shape).to(x_idx.device), x_idx
+ ]
+
+ y_r = torch.cumsum((x_r[:, 1:] - x_r[:, :-1]) * torch.cumsum(y_2, dim=-1), dim=-1)
+ y_r = torch.cat([zeros, y_r], dim=-1)
+ return x_r, y_r
+
+
+def interlevel_loss_zip(weights_list, ray_samples_list):
+ """Calculates the proposal loss in the Zip-NeRF paper."""
+ c = ray_samples_to_sdist(ray_samples_list[-1]).detach()
+ w = weights_list[-1][..., 0].detach()
+
+ # 1. normalize
+ w_normalize = w / (c[:, 1:] - c[:, :-1])
+
+ loss_interlevel = 0.0
+ for ray_samples, weights, r in zip(ray_samples_list[:-1], weights_list[:-1], [0.03, 0.003]):
+ # 2. step blur with different r
+ x_r, y_r = blur_stepfun(c, w_normalize, r)
+ y_r = torch.clip(y_r, min=0)
+ assert (y_r >= 0.0).all()
+
+ # 3. accumulate
+ y_cum = torch.cumsum((y_r[:, 1:] + y_r[:, :-1]) * 0.5 * (x_r[:, 1:] - x_r[:, :-1]), dim=-1)
+ y_cum = torch.cat([torch.zeros_like(y_cum[:, :1]), y_cum], dim=-1)
+
+ # 4 loss
+ sdist = ray_samples_to_sdist(ray_samples)
+ cp = sdist # (num_rays, num_samples + 1)
+ wp = weights[..., 0] # (num_rays, num_samples)
+
+ # resample
+ inds = torch.searchsorted(x_r, cp, side="right")
+ below = torch.clamp(inds - 1, 0, x_r.shape[-1] - 1)
+ above = torch.clamp(inds, 0, x_r.shape[-1] - 1)
+ cdf_g0 = torch.gather(x_r, -1, below)
+ bins_g0 = torch.gather(y_cum, -1, below)
+ cdf_g1 = torch.gather(x_r, -1, above)
+ bins_g1 = torch.gather(y_cum, -1, above)
+
+ t = torch.clip(torch.nan_to_num((cp - cdf_g0) / (cdf_g1 - cdf_g0), 0), 0, 1)
+ bins = bins_g0 + t * (bins_g1 - bins_g0)
+
+ w_gt = bins[:, 1:] - bins[:, :-1]
+
+ # TODO here might be unstable when wp is very small
+ loss_interlevel += torch.mean(torch.clip(w_gt - wp, min=0) ** 2 / (wp + 1e-5))
+
+ return loss_interlevel
+
+
+# Verified
+def lossfun_distortion(t, w):
+ """
+ https://github.com/kakaobrain/NeRF-Factory/blob/f61bb8744a5cb4820a4d968fb3bfbed777550f4a/src/model/mipnerf360/helper.py#L142
+ https://github.com/google-research/multinerf/blob/b02228160d3179300c7d499dca28cb9ca3677f32/internal/stepfun.py#L266
+ """
+ ut = (t[..., 1:] + t[..., :-1]) / 2
+ dut = torch.abs(ut[..., :, None] - ut[..., None, :])
+ loss_inter = torch.sum(w * torch.sum(w[..., None, :] * dut, dim=-1), dim=-1)
+
+ loss_intra = torch.sum(w**2 * (t[..., 1:] - t[..., :-1]), dim=-1) / 3
+
+ return loss_inter + loss_intra
+
+
+def distortion_loss(weights_list, ray_samples_list):
+ """From mipnerf360"""
+ c = ray_samples_to_sdist(ray_samples_list[-1])
+ w = weights_list[-1][..., 0]
+ loss = torch.mean(lossfun_distortion(c, w))
+ return loss
+
+
+# def nerfstudio_distortion_loss(
+# ray_samples: RaySamples,
+# densities: TensorType["bs":..., "num_samples", 1] = None,
+# weights: TensorType["bs":..., "num_samples", 1] = None,
+# ) -> TensorType["bs":..., 1]:
+# """Ray based distortion loss proposed in MipNeRF-360. Returns distortion Loss.
+
+# .. math::
+
+# \\mathcal{L}(\\mathbf{s}, \\mathbf{w}) =\\iint\\limits_{-\\infty}^{\\,\\,\\,\\infty}
+# \\mathbf{w}_\\mathbf{s}(u)\\mathbf{w}_\\mathbf{s}(v)|u - v|\\,d_{u}\\,d_{v}
+
+# where :math:`\\mathbf{w}_\\mathbf{s}(u)=\\sum_i w_i \\mathbb{1}_{[\\mathbf{s}_i, \\mathbf{s}_{i+1})}(u)`
+# is the weight at location :math:`u` between bin locations :math:`s_i` and :math:`s_{i+1}`.
+
+# Args:
+# ray_samples: Ray samples to compute loss over
+# densities: Predicted sample densities
+# weights: Predicted weights from densities and sample locations
+# """
+# if torch.is_tensor(densities):
+# assert not torch.is_tensor(weights), "Cannot use both densities and weights"
+# # Compute the weight at each sample location
+# weights = ray_samples.get_weights(densities)
+# if torch.is_tensor(weights):
+# assert not torch.is_tensor(densities), "Cannot use both densities and weights"
+
+# starts = ray_samples.spacing_starts
+# ends = ray_samples.spacing_ends
+
+# assert starts is not None and ends is not None, "Ray samples must have spacing starts and ends"
+# midpoints = (starts + ends) / 2.0 # (..., num_samples, 1)
+
+# loss = (
+# weights * weights[..., None, :, 0] * torch.abs(midpoints - midpoints[..., None, :, 0])
+# ) # (..., num_samples, num_samples)
+# loss = torch.sum(loss, dim=(-1, -2))[..., None] # (..., num_samples)
+# loss = loss + 1 / 3.0 * torch.sum(weights**2 * (ends - starts), dim=-2)
+
+# return loss
+
+
+def orientation_loss(
+ weights: TensorType["bs":..., "num_samples", 1],
+ normals: TensorType["bs":..., "num_samples", 3],
+ viewdirs: TensorType["bs":..., 3],
+):
+ """Orientation loss proposed in Ref-NeRF.
+ Loss that encourages that all visible normals are facing towards the camera.
+ """
+ w = weights
+ n = normals
+ v = viewdirs
+ n_dot_v = (n * v[..., None, :]).sum(axis=-1)
+ return (w[..., 0] * torch.fmin(torch.zeros_like(n_dot_v), n_dot_v) ** 2).sum(dim=-1)
+
+
+def pred_normal_loss(
+ weights: TensorType["bs":..., "num_samples", 1],
+ normals: TensorType["bs":..., "num_samples", 3],
+ pred_normals: TensorType["bs":..., "num_samples", 3],
+):
+ """Loss between normals calculated from density and normals from prediction network."""
+ return (weights[..., 0] * (1.0 - torch.sum(normals * pred_normals, dim=-1))).sum(dim=-1)
+
+
+def monosdf_normal_loss(normal_pred: torch.Tensor, normal_gt: torch.Tensor):
+ """normal consistency loss as monosdf
+
+ Args:
+ normal_pred (torch.Tensor): volume rendered normal
+ normal_gt (torch.Tensor): monocular normal
+ """
+ normal_gt = torch.nn.functional.normalize(normal_gt, p=2, dim=-1)
+ normal_pred = torch.nn.functional.normalize(normal_pred, p=2, dim=-1)
+ l1 = torch.abs(normal_pred - normal_gt).sum(dim=-1).mean()
+ cos = (1.0 - torch.sum(normal_pred * normal_gt, dim=-1)).mean()
+ return l1 + cos
+
+
+# copy from MiDaS
+def compute_scale_and_shift(prediction, target, mask):
+ # system matrix: A = [[a_00, a_01], [a_10, a_11]]
+ a_00 = torch.sum(mask * prediction * prediction, (1, 2))
+ a_01 = torch.sum(mask * prediction, (1, 2))
+ a_11 = torch.sum(mask, (1, 2))
+
+ # right hand side: b = [b_0, b_1]
+ b_0 = torch.sum(mask * prediction * target, (1, 2))
+ b_1 = torch.sum(mask * target, (1, 2))
+
+ # solution: x = A^-1 . b = [[a_11, -a_01], [-a_10, a_00]] / (a_00 * a_11 - a_01 * a_10) . b
+ x_0 = torch.zeros_like(b_0)
+ x_1 = torch.zeros_like(b_1)
+
+ det = a_00 * a_11 - a_01 * a_01
+ valid = det.nonzero()
+
+ x_0[valid] = (a_11[valid] * b_0[valid] - a_01[valid] * b_1[valid]) / det[valid]
+ x_1[valid] = (-a_01[valid] * b_0[valid] + a_00[valid] * b_1[valid]) / det[valid]
+
+ return x_0, x_1
+
+
+def reduction_batch_based(image_loss, M):
+ # average of all valid pixels of the batch
+
+ # avoid division by 0 (if sum(M) = sum(sum(mask)) = 0: sum(image_loss) = 0)
+ divisor = torch.sum(M)
+
+ if divisor == 0:
+ return 0
+ else:
+ return torch.sum(image_loss) / divisor
+
+
+def reduction_image_based(image_loss, M):
+ # mean of average of valid pixels of an image
+
+ # avoid division by 0 (if M = sum(mask) = 0: image_loss = 0)
+ valid = M.nonzero()
+
+ image_loss[valid] = image_loss[valid] / M[valid]
+
+ return torch.mean(image_loss)
+
+
+def mse_loss(prediction, target, mask, reduction=reduction_batch_based):
+ M = torch.sum(mask, (1, 2))
+ res = prediction - target
+ image_loss = torch.sum(mask * res * res, (1, 2))
+
+ return reduction(image_loss, 2 * M)
+
+
+def gradient_loss(prediction, target, mask, reduction=reduction_batch_based):
+ M = torch.sum(mask, (1, 2))
+
+ diff = prediction - target
+ diff = torch.mul(mask, diff)
+
+ grad_x = torch.abs(diff[:, :, 1:] - diff[:, :, :-1])
+ mask_x = torch.mul(mask[:, :, 1:], mask[:, :, :-1])
+ grad_x = torch.mul(mask_x, grad_x)
+
+ grad_y = torch.abs(diff[:, 1:, :] - diff[:, :-1, :])
+ mask_y = torch.mul(mask[:, 1:, :], mask[:, :-1, :])
+ grad_y = torch.mul(mask_y, grad_y)
+
+ image_loss = torch.sum(grad_x, (1, 2)) + torch.sum(grad_y, (1, 2))
+
+ return reduction(image_loss, M)
+
+
+class MiDaSMSELoss(nn.Module):
+ def __init__(self, reduction="batch-based"):
+ super().__init__()
+
+ if reduction == "batch-based":
+ self.__reduction = reduction_batch_based
+ else:
+ self.__reduction = reduction_image_based
+
+ def forward(self, prediction, target, mask):
+ return mse_loss(prediction, target, mask, reduction=self.__reduction)
+
+
+class GradientLoss(nn.Module):
+ def __init__(self, scales=4, reduction="batch-based"):
+ super().__init__()
+
+ if reduction == "batch-based":
+ self.__reduction = reduction_batch_based
+ else:
+ self.__reduction = reduction_image_based
+
+ self.__scales = scales
+
+ def forward(self, prediction, target, mask):
+ total = 0
+
+ for scale in range(self.__scales):
+ step = pow(2, scale)
+
+ total += gradient_loss(
+ prediction[:, ::step, ::step],
+ target[:, ::step, ::step],
+ mask[:, ::step, ::step],
+ reduction=self.__reduction,
+ )
+
+ return total
+
+
+class ScaleAndShiftInvariantLoss(nn.Module):
+ def __init__(self, alpha=0.5, scales=4, reduction="batch-based"):
+ super().__init__()
+
+ self.__data_loss = MiDaSMSELoss(reduction=reduction)
+ self.__regularization_loss = GradientLoss(scales=scales, reduction=reduction)
+ self.__alpha = alpha
+
+ self.__prediction_ssi = None
+
+ def forward(self, prediction, target, mask):
+ scale, shift = compute_scale_and_shift(prediction, target, mask)
+ self.__prediction_ssi = scale.view(-1, 1, 1) * prediction + shift.view(-1, 1, 1)
+
+ total = self.__data_loss(self.__prediction_ssi, target, mask)
+ if self.__alpha > 0:
+ total += self.__alpha * self.__regularization_loss(self.__prediction_ssi, target, mask)
+
+ return total
+
+ def __get_prediction_ssi(self):
+ return self.__prediction_ssi
+
+ prediction_ssi = property(__get_prediction_ssi)
+
+
+# end copy
+
+
+# copy from https://github.com/svip-lab/Indoor-SfMLearner/blob/0d682b7ce292484e5e3e2161fc9fc07e2f5ca8d1/layers.py#L218
+class SSIM(nn.Module):
+ """Layer to compute the SSIM loss between a pair of images"""
+
+ def __init__(self, patch_size):
+ super(SSIM, self).__init__()
+ self.mu_x_pool = nn.AvgPool2d(patch_size, 1)
+ self.mu_y_pool = nn.AvgPool2d(patch_size, 1)
+ self.sig_x_pool = nn.AvgPool2d(patch_size, 1)
+ self.sig_y_pool = nn.AvgPool2d(patch_size, 1)
+ self.sig_xy_pool = nn.AvgPool2d(patch_size, 1)
+
+ self.refl = nn.ReflectionPad2d(patch_size // 2)
+
+ self.C1 = 0.01**2
+ self.C2 = 0.03**2
+
+ def forward(self, x, y):
+ x = self.refl(x)
+ y = self.refl(y)
+
+ mu_x = self.mu_x_pool(x)
+ mu_y = self.mu_y_pool(y)
+
+ sigma_x = self.sig_x_pool(x**2) - mu_x**2
+ sigma_y = self.sig_y_pool(y**2) - mu_y**2
+ sigma_xy = self.sig_xy_pool(x * y) - mu_x * mu_y
+
+ SSIM_n = (2 * mu_x * mu_y + self.C1) * (2 * sigma_xy + self.C2)
+ SSIM_d = (mu_x**2 + mu_y**2 + self.C1) * (sigma_x + sigma_y + self.C2)
+
+ return torch.clamp((1 - SSIM_n / SSIM_d) / 2, 0, 1)
+
+
+# TODO test different losses
+class NCC(nn.Module):
+ """Layer to compute the normalization cross correlation (NCC) of patches"""
+
+ def __init__(self, patch_size: int = 11, min_patch_variance: float = 0.01):
+ super(NCC, self).__init__()
+ self.patch_size = patch_size
+ self.min_patch_variance = min_patch_variance
+
+ def forward(self, x, y):
+ # TODO if we use gray image we should do it right after loading the image to save computations
+ # to gray image
+ x = torch.mean(x, dim=1)
+ y = torch.mean(y, dim=1)
+
+ x_mean = torch.mean(x, dim=(1, 2), keepdim=True)
+ y_mean = torch.mean(y, dim=(1, 2), keepdim=True)
+
+ x_normalized = x - x_mean
+ y_normalized = y - y_mean
+
+ norm = torch.sum(x_normalized * y_normalized, dim=(1, 2))
+ var = torch.square(x_normalized).sum(dim=(1, 2)) * torch.square(y_normalized).sum(dim=(1, 2))
+ denom = torch.sqrt(var + 1e-6)
+
+ ncc = norm / (denom + 1e-6)
+
+ # ignore pathces with low variances
+ not_valid = (torch.square(x_normalized).sum(dim=(1, 2)) < self.min_patch_variance) | (
+ torch.square(y_normalized).sum(dim=(1, 2)) < self.min_patch_variance
+ )
+ ncc[not_valid] = 1.0
+
+ score = 1 - ncc.clip(-1.0, 1.0) # 0->2: smaller, better
+ return score[:, None, None, None]
+
+
+class MultiViewLoss(nn.Module):
+ """compute multi-view consistency loss"""
+
+ def __init__(self, patch_size: int = 11, topk: int = 4, min_patch_variance: float = 0.01):
+ super(MultiViewLoss, self).__init__()
+ self.patch_size = patch_size
+ self.topk = topk
+ self.min_patch_variance = min_patch_variance
+ # TODO make metric configurable
+ # self.ssim = SSIM(patch_size=patch_size)
+ # self.ncc = NCC(patch_size=patch_size)
+ self.ssim = NCC(patch_size=patch_size, min_patch_variance=min_patch_variance)
+
+ self.iter = 0
+
+ def forward(self, patches: torch.Tensor, valid: torch.Tensor):
+ """take the mim
+
+ Args:
+ patches (torch.Tensor): _description_
+ valid (torch.Tensor): _description_
+
+ Returns:
+ _type_: _description_
+ """
+ num_imgs, num_rays, _, num_channels = patches.shape
+
+ if num_rays <= 0:
+ return torch.tensor(0.0).to(patches.device)
+
+ ref_patches = (
+ patches[:1, ...]
+ .reshape(1, num_rays, self.patch_size, self.patch_size, num_channels)
+ .expand(num_imgs - 1, num_rays, self.patch_size, self.patch_size, num_channels)
+ .reshape(-1, self.patch_size, self.patch_size, num_channels)
+ .permute(0, 3, 1, 2)
+ ) # [N_src*N_rays, 3, patch_size, patch_size]
+ src_patches = (
+ patches[1:, ...]
+ .reshape(num_imgs - 1, num_rays, self.patch_size, self.patch_size, num_channels)
+ .reshape(-1, self.patch_size, self.patch_size, num_channels)
+ .permute(0, 3, 1, 2)
+ ) # [N_src*N_rays, 3, patch_size, patch_size]
+
+ # apply same reshape to the valid mask
+ src_patches_valid = (
+ valid[1:, ...]
+ .reshape(num_imgs - 1, num_rays, self.patch_size, self.patch_size, 1)
+ .reshape(-1, self.patch_size, self.patch_size, 1)
+ .permute(0, 3, 1, 2)
+ ) # [N_src*N_rays, 1, patch_size, patch_size]
+
+ ssim = self.ssim(ref_patches.detach(), src_patches)
+ ssim = torch.mean(ssim, dim=(1, 2, 3))
+ ssim = ssim.reshape(num_imgs - 1, num_rays)
+
+ # ignore invalid patch by setting ssim error to very large value
+ ssim_valid = (
+ src_patches_valid.reshape(-1, self.patch_size * self.patch_size).all(dim=-1).reshape(num_imgs - 1, num_rays)
+ )
+ # we should mask the error after we select the topk value, otherwise we might select far way patches that happens to be inside the image
+ # ssim[torch.logical_not(ssim_valid)] = 1.1 # max ssim_error is 1
+
+ min_ssim, idx = torch.topk(ssim, k=self.topk, largest=False, dim=0, sorted=True)
+
+ min_ssim_valid = ssim_valid[idx, torch.arange(num_rays)[None].expand_as(idx)]
+ # TODO how to set this value for better visualization
+ min_ssim[torch.logical_not(min_ssim_valid)] = 0.0 # max ssim_error is 1
+
+ if False:
+ # visualization of topK error computations
+
+ import cv2
+ import numpy as np
+
+ vis_patch_num = num_rays
+ K = min(100, vis_patch_num)
+
+ image = (
+ patches[:, :vis_patch_num, :, :]
+ .reshape(-1, vis_patch_num, self.patch_size, self.patch_size, 3)
+ .permute(1, 2, 0, 3, 4)
+ .reshape(vis_patch_num * self.patch_size, -1, 3)
+ )
+
+ src_patches_reshaped = src_patches.reshape(
+ num_imgs - 1, num_rays, 3, self.patch_size, self.patch_size
+ ).permute(1, 0, 3, 4, 2)
+ idx = idx.permute(1, 0)
+
+ selected_patch = (
+ src_patches_reshaped[torch.arange(num_rays)[:, None].expand(idx.shape), idx]
+ .permute(0, 2, 1, 3, 4)
+ .reshape(num_rays, self.patch_size, self.topk * self.patch_size, 3)[:vis_patch_num]
+ .reshape(-1, self.topk * self.patch_size, 3)
+ )
+
+ # apply same reshape to the valid mask
+ src_patches_valid_reshaped = src_patches_valid.reshape(
+ num_imgs - 1, num_rays, 1, self.patch_size, self.patch_size
+ ).permute(1, 0, 3, 4, 2)
+
+ selected_patch_valid = (
+ src_patches_valid_reshaped[torch.arange(num_rays)[:, None].expand(idx.shape), idx]
+ .permute(0, 2, 1, 3, 4)
+ .reshape(num_rays, self.patch_size, self.topk * self.patch_size, 1)[:vis_patch_num]
+ .reshape(-1, self.topk * self.patch_size, 1)
+ )
+ # valid to image
+ selected_patch_valid = selected_patch_valid.expand_as(selected_patch).float()
+ # breakpoint()
+
+ image = torch.cat([selected_patch_valid, selected_patch, image], dim=1)
+ # select top rays with highest errors
+
+ image = image.reshape(num_rays, self.patch_size, -1, 3)
+
+ _, idx2 = torch.topk(
+ torch.sum(min_ssim, dim=0) / (min_ssim_valid.float().sum(dim=0) + 1e-6),
+ k=K,
+ largest=True,
+ dim=0,
+ sorted=True,
+ )
+
+ image = image[idx2].reshape(K * self.patch_size, -1, 3)
+
+ cv2.imwrite(f"vis/{self.iter}.png", (image.detach().cpu().numpy() * 255).astype(np.uint8)[..., ::-1])
+ self.iter += 1
+ if self.iter == 9:
+ breakpoint()
+
+ return torch.sum(min_ssim) / (min_ssim_valid.float().sum() + 1e-6)
+
+
+# sensor depth loss, adapted from https://github.com/dazinovic/neural-rgbd-surface-reconstruction/blob/main/losses.py
+# class SensorDepthLoss(nn.Module):
+# """Sensor Depth loss"""
+
+# def __init__(self, truncation: float):
+# super(SensorDepthLoss, self).__init__()
+# self.truncation = truncation # 0.05 * 0.3 5cm scaled
+
+# def forward(self, batch, outputs):
+# """take the mim
+
+# Args:
+# batch (Dict): inputs
+# outputs (Dict): outputs data from surface model
+
+# Returns:
+# l1_loss: l1 loss
+# freespace_loss: free space loss
+# sdf_loss: sdf loss
+# """
+# depth_pred = outputs["depth"]
+# depth_gt = batch["sensor_depth"].to(depth_pred.device)[..., None]
+# valid_gt_mask = depth_gt > 0.0
+
+# l1_loss = torch.sum(valid_gt_mask * torch.abs(depth_gt - depth_pred)) / (valid_gt_mask.sum() + 1e-6)
+
+# # free space loss and sdf loss
+# ray_samples = outputs["ray_samples"]
+# filed_outputs = outputs["field_outputs"]
+# pred_sdf = filed_outputs[FieldHeadNames.SDF][..., 0]
+# directions_norm = outputs["directions_norm"]
+
+# z_vals = ray_samples.frustums.starts[..., 0] / directions_norm
+
+# truncation = self.truncation
+# front_mask = valid_gt_mask & (z_vals < (depth_gt - truncation))
+# back_mask = valid_gt_mask & (z_vals > (depth_gt + truncation))
+# sdf_mask = valid_gt_mask & (~front_mask) & (~back_mask)
+
+# num_fs_samples = front_mask.sum()
+# num_sdf_samples = sdf_mask.sum()
+# num_samples = num_fs_samples + num_sdf_samples + 1e-6
+# fs_weight = 1.0 - num_fs_samples / num_samples
+# sdf_weight = 1.0 - num_sdf_samples / num_samples
+
+# free_space_loss = torch.mean((F.relu(truncation - pred_sdf) * front_mask) ** 2) * fs_weight
+
+# sdf_loss = torch.mean(((z_vals + pred_sdf) - depth_gt) ** 2 * sdf_mask) * sdf_weight
+
+# return l1_loss, free_space_loss, sdf_loss
+
+r"""Implements Stochastic Structural SIMilarity(S3IM) algorithm.
+It is proposed in the ICCV2023 paper
+`S3IM: Stochastic Structural SIMilarity and Its Unreasonable Effectiveness for Neural Fields`.
+
+Arguments:
+ s3im_kernel_size (int): kernel size in ssim's convolution(default: 4)
+ s3im_stride (int): stride in ssim's convolution(default: 4)
+ s3im_repeat_time (int): repeat time in re-shuffle virtual patch(default: 10)
+ s3im_patch_height (height): height of virtual patch(default: 64)
+"""
+
+class S3IM(torch.nn.Module):
+ def __init__(self, s3im_kernel_size = 4, s3im_stride=4, s3im_repeat_time=10, s3im_patch_height=64, size_average = True):
+ super(S3IM, self).__init__()
+ self.s3im_kernel_size = s3im_kernel_size
+ self.s3im_stride = s3im_stride
+ self.s3im_repeat_time = s3im_repeat_time
+ self.s3im_patch_height = s3im_patch_height
+ self.size_average = size_average
+ self.channel = 1
+ self.s3im_kernel = self.create_kernel(s3im_kernel_size, self.channel)
+
+
+ def gaussian(self, s3im_kernel_size, sigma):
+ gauss = torch.Tensor([exp(-(x - s3im_kernel_size//2)**2/float(2*sigma**2)) for x in range(s3im_kernel_size)])
+ return gauss/gauss.sum()
+
+ def create_kernel(self, s3im_kernel_size, channel):
+ _1D_window = self.gaussian(s3im_kernel_size, 1.5).unsqueeze(1)
+ _2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0)
+ s3im_kernel = Variable(_2D_window.expand(channel, 1, s3im_kernel_size, s3im_kernel_size).contiguous())
+ return s3im_kernel
+
+ def _ssim(self, img1, img2, s3im_kernel, s3im_kernel_size, channel, size_average = True, s3im_stride=None):
+ mu1 = F.conv2d(img1, s3im_kernel, padding = (s3im_kernel_size-1)//2, groups = channel, stride=s3im_stride)
+ mu2 = F.conv2d(img2, s3im_kernel, padding = (s3im_kernel_size-1)//2, groups = channel, stride=s3im_stride)
+
+ mu1_sq = mu1.pow(2)
+ mu2_sq = mu2.pow(2)
+ mu1_mu2 = mu1*mu2
+
+ sigma1_sq = F.conv2d(img1*img1, s3im_kernel, padding = (s3im_kernel_size-1)//2, groups = channel, stride=s3im_stride) - mu1_sq
+ sigma2_sq = F.conv2d(img2*img2, s3im_kernel, padding = (s3im_kernel_size-1)//2, groups = channel, stride=s3im_stride) - mu2_sq
+ sigma12 = F.conv2d(img1*img2, s3im_kernel, padding = (s3im_kernel_size-1)//2, groups = channel, stride=s3im_stride) - mu1_mu2
+
+ C1 = 0.01**2
+ C2 = 0.03**2
+
+ ssim_map = ((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*(sigma1_sq + sigma2_sq + C2))
+
+ if size_average:
+ return ssim_map.mean()
+ else:
+ return ssim_map.mean(1).mean(1).mean(1)
+
+ def ssim_loss(self, img1, img2):
+ """
+ img1, img2: torch.Tensor([b,c,h,w])
+ """
+ (_, channel, _, _) = img1.size()
+
+ if channel == self.channel and self.s3im_kernel.data.type() == img1.data.type():
+ s3im_kernel = self.s3im_kernel
+ else:
+ s3im_kernel = self.create_kernel(self.s3im_kernel_size, channel)
+
+ if img1.is_cuda:
+ s3im_kernel = s3im_kernel.cuda(img1.get_device())
+ s3im_kernel = s3im_kernel.type_as(img1)
+
+ self.s3im_kernel = s3im_kernel
+ self.channel = channel
+
+
+ return self._ssim(img1, img2, s3im_kernel, self.s3im_kernel_size, channel, self.size_average, s3im_stride=self.s3im_stride)
+
+ def forward(self, src_vec, tar_vec):
+ loss = 0.0
+ index_list = []
+ for i in range(self.s3im_repeat_time):
+ if i == 0:
+ tmp_index = torch.arange(len(tar_vec))
+ index_list.append(tmp_index)
+ else:
+ ran_idx = torch.randperm(len(tar_vec))
+ index_list.append(ran_idx)
+ res_index = torch.cat(index_list)
+ tar_all = tar_vec[res_index]
+ src_all = src_vec[res_index]
+ tar_patch = tar_all.permute(1, 0).reshape(1, 3, self.s3im_patch_height, -1)
+ src_patch = src_all.permute(1, 0).reshape(1, 3, self.s3im_patch_height, -1)
+ loss = (1 - self.ssim_loss(src_patch, tar_patch))
+ return loss
+
diff --git a/nsr/losses/util.py b/nsr/losses/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..49c76e370bf16888ab61f42844b3c9f14ad9014c
--- /dev/null
+++ b/nsr/losses/util.py
@@ -0,0 +1,128 @@
+import hashlib
+import os
+
+import requests
+import torch
+import torch.nn as nn
+from tqdm import tqdm
+
+URL_MAP = {"vgg_lpips": "https://heibox.uni-heidelberg.de/f/607503859c864bc1b30b/?dl=1"}
+
+CKPT_MAP = {"vgg_lpips": "vgg.pth"}
+
+MD5_MAP = {"vgg_lpips": "d507d7349b931f0638a25a48a722f98a"}
+
+
+def download(url, local_path, chunk_size=1024):
+ os.makedirs(os.path.split(local_path)[0], exist_ok=True)
+ with requests.get(url, stream=True) as r:
+ total_size = int(r.headers.get("content-length", 0))
+ with tqdm(total=total_size, unit="B", unit_scale=True) as pbar:
+ with open(local_path, "wb") as f:
+ for data in r.iter_content(chunk_size=chunk_size):
+ if data:
+ f.write(data)
+ pbar.update(chunk_size)
+
+
+def md5_hash(path):
+ with open(path, "rb") as f:
+ content = f.read()
+ return hashlib.md5(content).hexdigest()
+
+
+def get_ckpt_path(name, root, check=False):
+ assert name in URL_MAP
+ path = os.path.join(root, CKPT_MAP[name])
+ if not os.path.exists(path) or (check and not md5_hash(path) == MD5_MAP[name]):
+ print("Downloading {} model from {} to {}".format(name, URL_MAP[name], path))
+ download(URL_MAP[name], path)
+ md5 = md5_hash(path)
+ assert md5 == MD5_MAP[name], md5
+ return path
+
+
+class ActNorm(nn.Module):
+ def __init__(
+ self, num_features, logdet=False, affine=True, allow_reverse_init=False
+ ):
+ assert affine
+ super().__init__()
+ self.logdet = logdet
+ self.loc = nn.Parameter(torch.zeros(1, num_features, 1, 1))
+ self.scale = nn.Parameter(torch.ones(1, num_features, 1, 1))
+ self.allow_reverse_init = allow_reverse_init
+
+ self.register_buffer("initialized", torch.tensor(0, dtype=torch.uint8))
+
+ def initialize(self, input):
+ with torch.no_grad():
+ flatten = input.permute(1, 0, 2, 3).contiguous().view(input.shape[1], -1)
+ mean = (
+ flatten.mean(1)
+ .unsqueeze(1)
+ .unsqueeze(2)
+ .unsqueeze(3)
+ .permute(1, 0, 2, 3)
+ )
+ std = (
+ flatten.std(1)
+ .unsqueeze(1)
+ .unsqueeze(2)
+ .unsqueeze(3)
+ .permute(1, 0, 2, 3)
+ )
+
+ self.loc.data.copy_(-mean)
+ self.scale.data.copy_(1 / (std + 1e-6))
+
+ def forward(self, input, reverse=False):
+ if reverse:
+ return self.reverse(input)
+ if len(input.shape) == 2:
+ input = input[:, :, None, None]
+ squeeze = True
+ else:
+ squeeze = False
+
+ _, _, height, width = input.shape
+
+ if self.training and self.initialized.item() == 0:
+ self.initialize(input)
+ self.initialized.fill_(1)
+
+ h = self.scale * (input + self.loc)
+
+ if squeeze:
+ h = h.squeeze(-1).squeeze(-1)
+
+ if self.logdet:
+ log_abs = torch.log(torch.abs(self.scale))
+ logdet = height * width * torch.sum(log_abs)
+ logdet = logdet * torch.ones(input.shape[0]).to(input)
+ return h, logdet
+
+ return h
+
+ def reverse(self, output):
+ if self.training and self.initialized.item() == 0:
+ if not self.allow_reverse_init:
+ raise RuntimeError(
+ "Initializing ActNorm in reverse direction is "
+ "disabled by default. Use allow_reverse_init=True to enable."
+ )
+ else:
+ self.initialize(output)
+ self.initialized.fill_(1)
+
+ if len(output.shape) == 2:
+ output = output[:, :, None, None]
+ squeeze = True
+ else:
+ squeeze = False
+
+ h = output / self.scale - self.loc
+
+ if squeeze:
+ h = h.squeeze(-1).squeeze(-1)
+ return h
diff --git a/nsr/losses/vqperceptual.py b/nsr/losses/vqperceptual.py
new file mode 100644
index 0000000000000000000000000000000000000000..6195f0a6ed7ee6fd32c1bccea071e6075e95ee43
--- /dev/null
+++ b/nsr/losses/vqperceptual.py
@@ -0,0 +1,17 @@
+import torch
+import torch.nn.functional as F
+
+
+def hinge_d_loss(logits_real, logits_fake):
+ loss_real = torch.mean(F.relu(1.0 - logits_real))
+ loss_fake = torch.mean(F.relu(1.0 + logits_fake))
+ d_loss = 0.5 * (loss_real + loss_fake)
+ return d_loss
+
+
+def vanilla_d_loss(logits_real, logits_fake):
+ d_loss = 0.5 * (
+ torch.mean(torch.nn.functional.softplus(-logits_real))
+ + torch.mean(torch.nn.functional.softplus(logits_fake))
+ )
+ return d_loss
diff --git a/nsr/lsgm/__init__.py b/nsr/lsgm/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..41d6b4c80aa783662862376381d4a59298c12156
--- /dev/null
+++ b/nsr/lsgm/__init__.py
@@ -0,0 +1,9 @@
+# sde diffusion
+from .train_util_diffusion_lsgm import TrainLoop3DDiffusionLSGM
+from .train_util_diffusion_vpsde import TrainLoop3DDiffusion_vpsde
+from .train_util_diffusion_lsgm_noD import TrainLoop3DDiffusionLSGM_noD
+
+# sgm, lsgm
+from .crossattn_cldm import *
+from .sgm_DiffusionEngine import *
+from .flow_matching_trainer import *
\ No newline at end of file
diff --git a/nsr/lsgm/__pycache__/__init__.cpython-39.pyc b/nsr/lsgm/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dcc5d6633692797e7e7d186a7a45e39578e6fac9
Binary files /dev/null and b/nsr/lsgm/__pycache__/__init__.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/controlLDM.cpython-39.pyc b/nsr/lsgm/__pycache__/controlLDM.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1c3392091760843eb6e06d2a507d88c2c487171a
Binary files /dev/null and b/nsr/lsgm/__pycache__/controlLDM.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/crossattn_cldm.cpython-39.pyc b/nsr/lsgm/__pycache__/crossattn_cldm.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..81c0f0bd024da709cf1ceca030ecfebed0c5754b
Binary files /dev/null and b/nsr/lsgm/__pycache__/crossattn_cldm.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/flow_matching_trainer.cpython-39.pyc b/nsr/lsgm/__pycache__/flow_matching_trainer.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fce3a6e67147402c1bfa0900664bea3b0942dfe2
Binary files /dev/null and b/nsr/lsgm/__pycache__/flow_matching_trainer.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/sgm_DiffusionEngine.cpython-39.pyc b/nsr/lsgm/__pycache__/sgm_DiffusionEngine.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..614c07f68455b46fa361ec4a94be395df113c724
Binary files /dev/null and b/nsr/lsgm/__pycache__/sgm_DiffusionEngine.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/train_util_diffusion_lsgm.cpython-39.pyc b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..215efd1a58a4239a945d29c1c9a3f789c27443b0
Binary files /dev/null and b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_cvD_joint.cpython-39.pyc b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_cvD_joint.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..87e2ba15ce039f5dfb00a282f2d8b94d6369a804
Binary files /dev/null and b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_cvD_joint.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_noD.cpython-39.pyc b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_noD.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..67e27533edb0f3c95e499292268b3ce55f575e52
Binary files /dev/null and b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_noD.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_noD_joint.cpython-39.pyc b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_noD_joint.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9a0525ae93a1f84ad8c3859bfa9bc49f0a21ad9a
Binary files /dev/null and b/nsr/lsgm/__pycache__/train_util_diffusion_lsgm_noD_joint.cpython-39.pyc differ
diff --git a/nsr/lsgm/__pycache__/train_util_diffusion_vpsde.cpython-39.pyc b/nsr/lsgm/__pycache__/train_util_diffusion_vpsde.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc69a4f7243a5d0807841ae3e83eeb6acd6b438f
Binary files /dev/null and b/nsr/lsgm/__pycache__/train_util_diffusion_vpsde.cpython-39.pyc differ
diff --git a/nsr/lsgm/controlLDM.py b/nsr/lsgm/controlLDM.py
new file mode 100644
index 0000000000000000000000000000000000000000..d181a869008408c44cc4408393ea16431c5ad232
--- /dev/null
+++ b/nsr/lsgm/controlLDM.py
@@ -0,0 +1,556 @@
+"""
+from ControlNet/cldm/cldm.py
+"""
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+import einops
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+import dnnlib
+from dnnlib.util import requires_grad
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+
+
+class TrainLoop3DDiffusionLSGM_Control(TrainLoop3DDiffusionLSGMJointnoD):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ **kwargs)
+ self.resume_cldm_checkpoint = resume_cldm_checkpoint
+ self.control_model = control_model
+ self.control_key = control_key
+ self.only_mid_control = only_mid_control
+ self.control_scales = [1.0] * 13
+ self.sd_locked = True
+ self._setup_control_model()
+
+ def _setup_control_model(self):
+
+ requires_grad(self.rec_model, False)
+ requires_grad(self.ddpm_model, self.sd_locked)
+
+ self.mp_cldm_trainer = MixedPrecisionTrainer(
+ model=self.control_model,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=self.fp16_scale_growth,
+ use_amp=self.use_amp,
+ model_name='cldm')
+
+ self.ddp_control_model = DDP(
+ self.control_model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+
+ # ! load trainable copy
+ try:
+ logger.log(f"load pretrained controlnet, not trainable copy.")
+ self._load_and_sync_parameters(model=self.control_model,
+ model_name='cldm',
+ resume_checkpoint=self.resume_cldm_checkpoint,
+ ) # if available
+ except:
+ logger.log(f"load trainable copy to controlnet")
+ self._load_and_sync_parameters(
+ model=self.control_model,
+ model_name='ddpm') # load pre-trained SD
+
+ cldm_param = [{
+ 'name': 'cldm.parameters()',
+ 'params': self.control_model.parameters(),
+ }]
+ if self.sde_diffusion.args.unfix_logit:
+ self.ddpm_model.mixing_logit.requires_grad_(True)
+ cldm_param.append({
+ 'name': 'mixing_logit',
+ 'params': self.ddpm_model.mixing_logit,
+ })
+
+ self.opt_cldm = AdamW(cldm_param,
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+ if self.sd_locked:
+ del self.opt
+
+ # def _load_model(self):
+ # super()._load_model()
+ # # ! load pre-trained "SD" and controlNet also
+ # self._load_and_sync_parameters(model=self.contro,
+ # model_name='cldm') #
+
+ # def _setup_opt(self):
+ # TODO, two optims groups.
+
+ # for rec_param_group in self._init_optim_groups(self.rec_model):
+ # self.opt.add_param_group(rec_param_group)
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ # dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, step='cldm_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ # if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ # self.eval_ddpm_sample()
+ self.eval_cldm()
+ # if self.sde_diffusion.args.train_vae:
+ # self.eval_loop()
+
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save(self.mp_cldm_trainer,
+ self.mp_cldm_trainer.model_name)
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save(self.mp_cldm_trainer,
+ self.mp_cldm_trainer.model_name)
+ # if self.sde_diffusion.args.train_vae:
+ # self.save(self.mp_trainer_rec,
+ # self.mp_trainer_rec.model_name)
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save(
+ self.mp_cldm_trainer,
+ self.mp_cldm_trainer.model_name) # rec and ddpm all fixed.
+ # st()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def _update_cldm_ema(self):
+ for rate, params in zip(self.ema_rate, self.ema_cldm_params):
+ update_ema(params, self.mp_cldm_trainer.master_params, rate=rate)
+
+ def run_step(self, batch, step='cldm_step'):
+
+ # if step == 'diffusion_step_rec':
+
+ if step == 'cldm_step':
+ self.cldm_train_step(batch)
+
+ # if took_step_ddpm:
+ # self._update_cldm_ema()
+
+ self._anneal_lr()
+ self.log_step()
+
+ @th.no_grad()
+ def get_c_input(self, batch, bs=None, *args, **kwargs):
+ # x, c = super().get_input(batch, self.first_stage_key, *args, **kwargs)
+ control = batch[self.control_key]
+ if bs is not None:
+ control = control[:bs]
+ # control = control.to(self.device)
+ # control = einops.rearrange(control, 'b h w c -> b c h w')
+ control = control.to(memory_format=th.contiguous_format).float()
+ # return x, dict(c_crossattn=[c], c_concat=[control])
+ return dict(c_concat=[control])
+
+ # for compatablity with p_sample, to lint
+ def apply_model_inference(self, x_noisy, t, c, model_kwargs={}):
+ control = self.ddp_control_model(x=x_noisy,
+ hint=th.cat(c['c_concat'], 1),
+ timesteps=t,
+ context=None)
+ control = [c * scale for c, scale in zip(control, self.control_scales)]
+ pred_params = self.ddp_ddpm_model(
+ x_noisy, t, **{
+ **model_kwargs, 'control': control
+ })
+ return pred_params
+
+ def apply_control_model(self, p_sample_batch, cond):
+ x_noisy, t, = (p_sample_batch[k] for k in ('eps_t_p', 't_p'))
+
+ control = self.ddp_control_model(x=x_noisy,
+ hint=th.cat(cond['c_concat'], 1),
+ timesteps=t,
+ context=None)
+ control = [c * scale for c, scale in zip(control, self.control_scales)]
+ return control
+
+ def apply_model(self, p_sample_batch, cond, model_kwargs={}):
+ control = self.apply_control_model(p_sample_batch,
+ cond) # len(control): 13
+ return super().apply_model(p_sample_batch, **{
+ **model_kwargs, 'control': control
+ })
+
+ # ddpm + rec loss
+ def cldm_train_step(self, batch, behaviour='cano', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+
+ # ! enable the gradient of both models
+ requires_grad(self.ddp_control_model, True)
+
+ self.mp_cldm_trainer.zero_grad() # !!!!
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_cldm_trainer.use_amp):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+ # eps = vae_out[self.latent_name]
+ eps = vae_out.pop(self.latent_name)
+
+ p_sample_batch = self.prepare_ddpm(eps)
+ cond = self.get_c_input(micro)
+
+ # ! running diffusion forward
+ ddpm_ret = self.apply_model(p_sample_batch, cond)
+ if self.sde_diffusion.args.p_rendering_loss:
+
+ target = micro
+ pred = self.ddp_rec_model(
+ # latent=vae_out,
+ latent={
+ # **vae_out,
+ self.latent_name:
+ ddpm_ret['pred_x0_p'],
+ 'latent_name': self.latent_name
+ },
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ with self.ddp_control_model.no_sync(): # type: ignore
+ p_vae_recon_loss, rec_loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ log_rec3d_loss_dict(rec_loss_dict)
+ # log_rec3d_loss_dict(
+ # dict(p_vae_recon_loss=p_vae_recon_loss, ))
+ loss = p_vae_recon_loss + ddpm_ret['p_eps_objective'] # TODO, add obj_weight_t_p?
+ else:
+ loss = ddpm_ret['p_eps_objective']
+
+ # =====================================================================
+
+ self.mp_cldm_trainer.backward(loss) # joint gradient descent
+
+ # update ddpm accordingly
+ self.mp_cldm_trainer.optimize(self.opt_cldm)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.log_control_images(vae_out, p_sample_batch, micro,
+ ddpm_ret)
+
+ @th.inference_mode()
+ def log_control_images(self, vae_out, p_sample_batch, micro, ddpm_ret):
+
+ eps_t_p, t_p, logsnr_p = (p_sample_batch[k] for k in (
+ 'eps_t_p',
+ 't_p',
+ 'logsnr_p',
+ ))
+ pred_eps_p = ddpm_ret['pred_eps_p']
+
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k: v[0:1].to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ else:
+ gt_depth = th.zeros_like(gt_img[:, 0:1, ...])
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+
+ gt_img = self.pool_128(gt_img)
+ gt_depth = self.pool_128(gt_depth)
+ cond = self.get_c_input(micro)
+ hint = th.cat(cond['c_concat'], 1)
+
+ gt_vis = th.cat([
+ gt_img,
+ self.pool_128(hint), gt_img,
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=eps_t_p[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps_p, logsnr_p) # for VAE loss, denosied latent
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=pred_x0[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat(
+ [
+ self.pool_128(img) for img in (
+ pred_img[0:1],
+ noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1], # controlnet result
+ pred_depth[0:1].repeat_interleave(3, dim=1))
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis],
+ dim=-2)[0].permute(1, 2,
+ 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}.jpg'
+ )
+
+ th.cuda.empty_cache()
+
+ @th.inference_mode()
+ def eval_cldm(self):
+ self.control_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+ batch = next(iter(self.eval_data))
+
+ # use the first frame as the condition now
+ novel_view_cond = {
+ k: v[0:1].to(dist_util.dev()) # .repeat_interleave(
+ # micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ cond = self.get_c_input(novel_view_cond)
+ hint = th.cat(cond['c_concat'], 1)
+
+ # record cond images
+ torchvision.utils.save_image(
+ hint,
+ f'{logger.get_dir()}/{self.step + self.resume_step}_cond.jpg',
+ normalize=True,
+ value_range=(-1, 1))
+
+ # broadcast to args.batch_size
+ cond = {
+ k:
+ [cond.repeat_interleave(args.batch_size, 0) for cond in cond_list]
+ for k, cond_list in cond.items() # list of Tensors
+ }
+
+ for i in range(1):
+ triplane_sample = sample_fn(
+ self,
+ (
+ args.batch_size,
+ self.rec_model.decoder.ldm_z_channels * 3, # type: ignore
+ self.diffusion_input_size,
+ self.diffusion_input_size),
+ cond=cond,
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ mixing_normal=True, # !
+ device=dist_util.dev())
+ th.cuda.empty_cache()
+
+ self.render_video_given_triplane(
+ triplane_sample,
+ self.rec_model, # compatible with join_model
+ name_prefix=f'{self.step + self.resume_step}_{i}')
+
+ del triplane_sample
+ th.cuda.empty_cache()
+
+ self.control_model.train()
\ No newline at end of file
diff --git a/nsr/lsgm/crossattn_cldm.py b/nsr/lsgm/crossattn_cldm.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e0f0744f1466f8a22b5ea0821eb0e9e50ca6215
--- /dev/null
+++ b/nsr/lsgm/crossattn_cldm.py
@@ -0,0 +1,1147 @@
+"""
+https://github.com/CompVis/stable-diffusion/blob/21f890f9da3cfbeaba8e2ac3c425ee9e998d5229/ldm/models/diffusion/ddpm.py#L30
+"""
+import copy
+import functools
+import random
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+from click import prompt
+import einops
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+from ldm.modules.encoders.modules import FrozenClipImageEmbedder, TextEmbedder, FrozenCLIPTextEmbedder, FrozenOpenCLIPImagePredictionEmbedder, FrozenOpenCLIPImageEmbedder
+
+import dnnlib
+from dnnlib.util import requires_grad
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+# from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+# from .controlLDM import TrainLoop3DDiffusionLSGM_Control # joint diffusion and rec class
+from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+
+__conditioning_keys__ = {
+ 'concat': 'c_concat',
+ 'crossattn': 'c_crossattn',
+ 'adm': 'y'
+}
+
+
+def disabled_train(self, mode=True):
+ """Overwrite model.train with this function to make sure train/eval mode
+ does not change anymore."""
+ return self
+
+
+class TrainLoop3DDiffusionLSGM_crossattn(TrainLoop3DDiffusionLSGMJointnoD):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ normalize_clip_encoding=False,
+ scale_clip_encoding=1.0,
+ cfg_dropout_prob=0.,
+ cond_key='img_sr',
+ use_eos_feature=False,
+ compile=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ control_model=control_model,
+ control_key=control_key,
+ only_mid_control=only_mid_control,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=resume_cldm_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ compile=compile,
+ **kwargs)
+ self.conditioning_key = 'c_crossattn'
+ self.cond_key = cond_key
+ self.instantiate_cond_stage(normalize_clip_encoding,
+ scale_clip_encoding, cfg_dropout_prob,
+ use_eos_feature)
+ requires_grad(self.rec_model, False)
+ self.rec_model.eval()
+
+ # self.normalize_clip_encoding = normalize_clip_encoding
+ # self.cfg_dropout_prob = cfg_dropout_prob
+
+ def instantiate_cond_stage(self, normalize_clip_encoding,
+ scale_clip_encoding, cfg_dropout_prob,
+ use_eos_feature):
+ # https://github.com/CompVis/stable-diffusion/blob/21f890f9da3cfbeaba8e2ac3c425ee9e998d5229/ldm/models/diffusion/ddpm.py#L509C1-L509C46
+ # self.cond_stage_model.train = disabled_train # type: ignore
+ if self.cond_key == 'caption':
+ self.cond_txt_model = TextEmbedder(dropout_prob=cfg_dropout_prob,
+ use_eos_feature=use_eos_feature)
+ elif self.cond_key == 'img':
+ self.cond_img_model = FrozenOpenCLIPImagePredictionEmbedder(
+ 1, 1,
+ FrozenOpenCLIPImageEmbedder(freeze=True,
+ device=dist_util.dev(),
+ init_device=dist_util.dev()))
+
+ else: # zero-shot Text to 3D using normalized clip latent
+ self.cond_stage_model = FrozenClipImageEmbedder(
+ 'ViT-L/14',
+ dropout_prob=cfg_dropout_prob,
+ normalize_encoding=normalize_clip_encoding,
+ scale_clip_encoding=scale_clip_encoding)
+ self.cond_stage_model.freeze()
+
+ self.cond_txt_model = FrozenCLIPTextEmbedder(
+ dropout_prob=cfg_dropout_prob,
+ scale_clip_encoding=scale_clip_encoding)
+ self.cond_txt_model.freeze()
+
+ @th.no_grad()
+ def get_c_input(self,
+ batch,
+ bs=None,
+ use_text=False,
+ prompt="",
+ force_drop_ids=None,
+ *args,
+ **kwargs):
+ if use_text:
+ cond_inp = prompt
+ else:
+ if 'caption' in self.cond_key: # support caption-img
+ cond_inp = batch['caption']
+ else:
+ cond_inp = batch[self.cond_key]
+ # if bs is not None:
+ # cond_inp = cond_inp[:bs]
+
+ # using clip to transform control to tokens for crossattn
+ control = None
+ if 'caption' in self.cond_key:
+ c = self.cond_txt_model(
+ cond_inp,
+ train=self.ddpm_model.training,
+ force_drop_ids=force_drop_ids,
+ ) # ! SD training text condition injection layer
+ if bs is None: # duplicated sample
+ if c.shape[0] != batch['c'].shape[0]:
+ c = th.repeat_interleave(c,
+ batch['c'].shape[0] // c.shape[0],
+ dim=0)
+ else:
+ assert c.shape[0] == bs
+
+ # st()
+ # if 'img' in self.cond_key:
+
+ # ! later
+ # if 'img' in batch:
+ # control = batch['img'] + 0.02 * th.randn_like(
+ # batch['img']) # follow SVD?
+
+ elif self.cond_key == 'img':
+ c = self.cond_img_model(cond_inp)
+ # control = batch['img']
+ control = batch['img'] + 0.02 * th.randn_like(
+ batch['img']) # follow SVD?
+
+ else: # zero shot
+ if use_text: # for test
+ assert prompt != ""
+ c = self.cond_txt_model.encode(prompt) # ! for test
+ else:
+ cond_inp = cond_inp.to(
+ memory_format=th.contiguous_format).float()
+ c = self.cond_stage_model(cond_inp) # BS 768
+
+ # if c.shape[0] < batch['img_to_encoder'].shape[0]:
+ # c = th.repeat_interleave(c, batch['img_to_encoder'].shape[0]//c.shape[0], dim=0)
+
+ # return dict(c_concat=[control])
+ # return dict(c_crossattn=c, c_concat=batch['img'])
+ # if self.cond_key == 'img':
+ # return dict(c_crossattn=c, c_concat=control)
+ return dict(c_crossattn=c)
+
+ # else:
+ # return dict(c_crossattn=c)
+
+ # return dict(__conditioning_keys__[self.cond_key]=)
+ # return {self.conditioning_key: [c], 'c_concat': [cond_inp]}
+ # return {self.conditioning_key: c, 'c_concat': [cond_inp]}
+
+ # TODO, merge the APIs
+ def apply_model_inference(self, x_noisy, t, c, model_kwargs={}):
+ pred_params = self.ddp_ddpm_model(x_noisy,
+ timesteps=t,
+ **{
+ **model_kwargs, 'context':
+ c['c_crossattn'],
+ 'hint':
+ c.get('c_concat', None)
+ })
+ return pred_params
+
+ def apply_model(self, p_sample_batch, cond, model_kwargs={}):
+ return super().apply_model(
+ p_sample_batch,
+ **{
+ **model_kwargs, 'context': cond['c_crossattn'],
+ 'hint': cond.get('c_concat', None)
+ # **cond,
+ })
+
+ def run_step(self, batch, step='ldm_step'):
+
+ # if step == 'diffusion_step_rec':
+
+ if step == 'ldm_step':
+ self.ldm_train_step(batch)
+
+ # if took_step_ddpm:
+ # self._update_cldm_ema()
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ # eval camera
+ camera = th.load('eval_pose.pt', map_location=dist_util.dev())
+
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ # dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, step='ldm_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ try:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+ except Exception as e:
+ continue # no storage
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ # st()
+ if self.step % self.eval_interval == 0:
+ # if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ # self.eval_ddpm_sample()
+ # self.eval_cldm(use_ddim=True, unconditional_guidance_scale=7.5, prompt="") # during training, use image as condition
+ if self.cond_key == 'caption':
+ prompt_list = [
+ # "a voxelized dog",
+ # "a cute toy cat",
+ 'Yellow rubber duck',
+ # "a blue plastic chair.",
+ # "An Eiffel Tower.",
+ # "A bowl of food.",
+ # "An engineer.",
+ # 'A bowl of vegetables',
+ # 'A birthday cupcake',
+ # 'A chair that looks like an avocado',
+ ]
+ self.eval_cldm(
+ # use_ddim=False,
+ # prompt="a voxelized dog",
+ # prompt="a blue plastic chair.",
+ # prompt="An Eiffel Tower.",
+ # prompt="A bowl of food.",
+ # prompt="An engineer.",
+ # prompt='A bowl of vegetables',
+ # prompt='A birthday cupcake',
+ prompt=prompt_list[random.randint(0,len(prompt_list)-1)],
+ use_train_trajectory=False,
+ camera=camera) # fix condition bug first
+ else:
+ # i23d
+ self.eval_cldm(
+ use_train_trajectory=False,
+ camera=camera) # fix condition bug first
+
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ # if self.step % self.save_interval == 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+
+ # ! delete old ones
+ if (self.step - 1) % 1e5 == 0:
+ for idx in range(1,10):
+ os.remove(f"model_{self.mp_trainer.model_name}{(self.step+self.resume_step-idx*1e4):07d}.pt" )
+
+ # if self.sde_diffusion.args.train_vae:
+ # self.save(self.mp_trainer_rec,
+ # self.mp_trainer_rec.model_name)
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save(self.mp_trainer,
+ self.mp_trainer.model_name) # rec and ddpm all fixed.
+ # st()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ # ddpm + rec loss
+ def ldm_train_step(self, batch, behaviour='cano', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+
+ # ! enable the gradient of both models
+ requires_grad(self.ddpm_model, True)
+
+ self.mp_trainer.zero_grad() # !!!!
+
+ if 'img' in batch:
+ batch_size = batch['img'].shape[0]
+ else:
+ batch_size = len(batch['caption'])
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.bfloat16,
+ enabled=self.mp_trainer.use_amp):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ if 'latent' in micro:
+ vae_out = {self.latent_name: micro['latent']}
+ else:
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+
+ eps = vae_out[self.latent_name] / self.triplane_scaling_divider
+ # eps = vae_out.pop(self.latent_name)
+
+ if 'bg_plane' in vae_out:
+ eps = th.cat((eps, vae_out['bg_plane']),
+ dim=1) # include background, B 12+4 32 32
+
+ p_sample_batch = self.prepare_ddpm(eps)
+ cond = self.get_c_input(micro, bs=eps.shape[0])
+
+ # ! running diffusion forward
+ ddpm_ret = self.apply_model(p_sample_batch, cond)
+ if self.sde_diffusion.args.p_rendering_loss:
+
+ target = micro
+ pred = self.ddp_rec_model(
+ # latent=vae_out,
+ latent={
+ # **vae_out,
+ self.latent_name: ddpm_ret['pred_x0_p'],
+ 'latent_name': self.latent_name
+ },
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ with self.ddp_control_model.no_sync(): # type: ignore
+ p_vae_recon_loss, rec_loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ log_rec3d_loss_dict(rec_loss_dict)
+ # log_rec3d_loss_dict(
+ # dict(p_vae_recon_loss=p_vae_recon_loss, ))
+ loss = p_vae_recon_loss + ddpm_ret[
+ 'p_eps_objective'] # TODO, add obj_weight_t_p?
+ else:
+ loss = ddpm_ret['p_eps_objective'].mean()
+
+ # =====================================================================
+
+ self.mp_trainer.backward(loss) # joint gradient descent
+
+ # update ddpm accordingly
+ self.mp_trainer.optimize(self.opt)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.log_control_images(vae_out, p_sample_batch, micro, ddpm_ret)
+
+ @th.inference_mode()
+ def log_control_images(self, vae_out, p_sample_batch, micro, ddpm_ret):
+
+ eps_t_p, t_p, logsnr_p = (p_sample_batch[k] for k in (
+ 'eps_t_p',
+ 't_p',
+ 'logsnr_p',
+ ))
+ pred_eps_p = ddpm_ret['pred_eps_p']
+
+ if 'posterior' in vae_out:
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k: v[0:1].to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+
+ pred_img = pred['image_raw']
+ if 'img' in micro:
+ gt_img = micro['img']
+ else:
+ gt_img = th.zeros_like(pred['image_raw'])
+
+ if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ else:
+ gt_depth = th.zeros_like(gt_img[:, 0:1, ...])
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ gt_img = self.pool_128(gt_img)
+ gt_depth = self.pool_128(gt_depth)
+ # cond = self.get_c_input(micro)
+ # hint = th.cat(cond['c_concat'], 1)
+
+ gt_vis = th.cat(
+ [
+ gt_img,
+ gt_img,
+ gt_img,
+ # self.pool_128(hint),
+ # gt_img,
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ if 'bg_plane' in vae_out:
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ eps_t_p[0:1, :12] * self.triplane_scaling_divider,
+ 'bg_plane':
+ eps_t_p[0:1, 12:16] * self.triplane_scaling_divider,
+ }
+ else:
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ eps_t_p[0:1] * self.triplane_scaling_divider,
+ }
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=noised_latent,
+ # latent=eps_t_p[0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps_p, logsnr_p) # for VAE loss, denosied latent
+
+ if 'bg_plane' in vae_out:
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ pred_x0[0:1, :12] * self.triplane_scaling_divider,
+ 'bg_plane':
+ pred_x0[0:1, 12:16] * self.triplane_scaling_divider,
+ }
+ else:
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ pred_x0[0:1] * self.triplane_scaling_divider,
+ }
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=denoised_latent,
+ # latent=pred_x0[0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat(
+ [
+ self.pool_128(img) for img in (
+ pred_img[0:1],
+ noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1], # controlnet result
+ pred_depth[0:1].repeat_interleave(3, dim=1))
+ ],
+ dim=-1) # B, 3, H, W
+
+ if 'img' in micro:
+ vis = th.cat([gt_vis, pred_vis],
+ dim=-2)[0].permute(1, 2,
+ 0).cpu() # ! pred in range[-1, 1]
+ else:
+ vis = pred_vis[0].permute(1, 2, 0).cpu()
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}.jpg'
+ )
+
+ # if self.cond_key == 'caption':
+ # with open(f'{logger.get_dir()}/{self.step+self.resume_step}caption_{t_p[0].item():3}.txt', 'w') as f:
+ # f.write(micro['caption'][0])
+
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}.jpg'
+ )
+
+ th.cuda.empty_cache()
+
+ @th.inference_mode()
+ def eval_cldm(
+ self,
+ prompt="",
+ use_ddim=False,
+ unconditional_guidance_scale=1.0,
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ num_instances=1,
+ ):
+ self.ddpm_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False,
+ use_ddim=use_ddim))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+
+ # use the first frame as the condition now
+ extra_kwargs = {}
+
+ uc = None
+ if args.use_ddim:
+ if unconditional_guidance_scale != 1.0:
+ uc = self.get_c_input(
+ {self.cond_key: 'None'},
+ use_text=True,
+ prompt="None",
+ bs=1, # TODO, support BS>1 later
+ force_drop_ids=np.array(
+ [ # ! make sure using dropped tokens
+ 1
+ ])) # use specific prompt for debug
+ extra_kwargs.update(
+ dict(
+ unconditional_guidance_scale=unconditional_guidance_scale,
+ unconditional_conditioning=uc, # TODO
+ # {
+ # k : unconditional_guidance_scale
+ # for k in cond.keys()
+ # }
+ ))
+
+ # hint = th.cat(cond['c_concat'], 1)
+
+ # record cond images
+ # broadcast to args.batch_size
+
+ for instance in range(num_instances):
+
+ if self.cond_key == 'caption':
+ if camera is not None:
+ batch = {'c': camera.clone()}
+ else:
+ if use_train_trajectory:
+ batch = next(iter(self.data))
+ else:
+ try:
+ batch = next(self.eval_data)
+ except Exception as e:
+ self.eval_data = iter(self.eval_data)
+ batch = next(self.eval_data)
+
+ if camera is not None:
+ batch['c'] = camera.clone()
+
+ # ! generate new samples
+
+ novel_view_cond = {
+ k:
+ v[0:1].to(dist_util.dev())
+ if isinstance(v, th.Tensor) else v[0:1]
+ # micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+
+ cond = self.get_c_input(
+ novel_view_cond, use_text=prompt != "",
+ prompt=prompt) # use specific prompt for debug
+
+ cond = {
+ k: cond_v.repeat_interleave(args.batch_size, 0)
+ for k, cond_v in cond.items()
+ # if k == self.conditioning_key
+ }
+
+ if self.cond_key == 'caption':
+ if prompt != '':
+ with open(
+ f'{logger.get_dir()}/triplane_{self.step+self.resume_step}_{instance}_caption.txt',
+ 'w') as f:
+ f.write(prompt)
+ else:
+ with open(
+ f'{logger.get_dir()}/triplane_{self.step+self.resume_step}_{instance}_caption.txt',
+ 'w') as f:
+ try:
+ f.write(novel_view_cond['caption'][0])
+ except Exception as e:
+ pass
+
+ elif self.cond_key == 'img':
+ torchvision.utils.save_image(
+ cond['c_concat'],
+ f'{logger.get_dir()}/{self.step + self.resume_step}_{instance}_cond.jpg',
+ normalize=True,
+ value_range=(-1, 1))
+
+ # continue
+
+ for i in range(num_samples):
+ triplane_sample = sample_fn(
+ self,
+ (
+ args.batch_size,
+ self.ddpm_model.in_channels
+ if not self.ddpm_model.roll_out else 3 *
+ self.ddpm_model.in_channels, # type: ignore
+ self.diffusion_input_size,
+ self.diffusion_input_size),
+ cond=cond,
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ # mixing_normal=True, # !
+ mixing_normal=self.ddpm_model.mixed_prediction, # !
+ device=dist_util.dev(),
+ **extra_kwargs)
+ th.cuda.empty_cache()
+
+ self.render_video_given_triplane(
+ triplane_sample,
+ self.rec_model, # compatible with join_model
+ name_prefix=
+ f'{self.step + self.resume_step}_{instance}_{i}',
+ save_img=save_img,
+ render_reference=batch,
+ export_mesh=False)
+
+ # save gt
+ # video_out = imageio.get_writer(
+ # f'{logger.get_dir()}/triplane_{self.step + self.resume_step}_{i}_reference.mp4',
+ # mode='I',
+ # fps=15,
+ # codec='libx264')
+
+ # for j in range(batch['img'].shape[0]
+ # ): # ! currently only export one plane at a time
+ # cpu_gt = batch['img'][j].cpu().permute(1,2,0).numpy()
+ # cpu_gt = (cpu_gt*127.5)+127.5
+ # video_out.append_data(cpu_gt.astype(np.uint8))
+
+ # video_out.close()
+ # del video_out
+
+ # del triplane_sample
+ # th.cuda.empty_cache()
+
+ self.ddpm_model.train()
+
+
+class TrainLoop3DDiffusionLSGM_crossattn_controlNet(
+ TrainLoop3DDiffusionLSGM_crossattn):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ normalize_clip_encoding=False,
+ scale_clip_encoding=1,
+ cfg_dropout_prob=0,
+ cond_key='img_sr',
+ use_eos_feature=False,
+ compile=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ control_model=control_model,
+ control_key=control_key,
+ only_mid_control=only_mid_control,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=resume_cldm_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ normalize_clip_encoding=normalize_clip_encoding,
+ scale_clip_encoding=scale_clip_encoding,
+ cfg_dropout_prob=cfg_dropout_prob,
+ cond_key=cond_key,
+ use_eos_feature=use_eos_feature,
+ compile=compile,
+ **kwargs)
+
+ # st()
+ self.control_model = control_model
+ self.control_key = control_key
+ self.only_mid_control = only_mid_control
+ self.control_scales = [1.0] * 13
+ self.sd_locked = True
+ self._setup_control_model()
+
+ def _setup_control_model(self):
+
+ requires_grad(self.rec_model, False)
+ requires_grad(self.ddpm_model, False)
+
+ self.mp_cldm_trainer = MixedPrecisionTrainer(
+ model=self.control_model,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=self.fp16_scale_growth,
+ use_amp=self.use_amp,
+ model_name='cldm')
+
+ self.ddp_control_model = DDP(
+ self.control_model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+
+ requires_grad(self.ddp_control_model, True)
+
+ # ! load trainable copy
+ # TODO
+ # st()
+ try:
+ logger.log(f"load pretrained controlnet, not trainable copy.")
+ self._load_and_sync_parameters(model=self.control_model,
+ model_name='cldm',
+ resume_checkpoint=self.resume_cldm_checkpoint,
+ ) # if available
+ except:
+ logger.log(f"load trainable copy to controlnet")
+ model_state_dict = self.control_model.state_dict()
+ for k, v in self.ddpm_model.state_dict().items():
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+
+ self.control_model.load_state_dict(model_state_dict)
+
+ # self._load_and_sync_parameters(
+ # model=self.control_model,
+ # model_name='ddpm') # load pre-trained SD
+
+ cldm_param = [{
+ 'name': 'cldm.parameters()',
+ 'params': self.control_model.parameters(),
+ }]
+ # if self.sde_diffusion.args.unfix_logit:
+ # self.ddpm_model.mixing_logit.requires_grad_(True)
+ # cldm_param.append({
+ # 'name': 'mixing_logit',
+ # 'params': self.ddpm_model.mixing_logit,
+ # })
+
+ self.opt_cldm = AdamW(cldm_param,
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+ if self.sd_locked:
+ del self.opt
+ del self.mp_trainer
+
+ # add control during inference
+ def apply_model_inference(self, x_noisy, t, c, model_kwargs={}):
+
+ control = self.ddp_control_model(
+ x=x_noisy,
+ # hint=th.cat(c['c_concat'], 1),
+ hint=c['c_concat'],
+ timesteps=t,
+ context=None)
+ control = [c * scale for c, scale in zip(control, self.control_scales)]
+ model_kwargs.update({'control': control})
+
+ return super().apply_model_inference(x_noisy, t, c, model_kwargs)
+
+ def apply_control_model(self, p_sample_batch, cond):
+ x_noisy, t, = (p_sample_batch[k] for k in ('eps_t_p', 't_p'))
+
+ control = self.ddp_control_model(
+ x=x_noisy,
+ # hint=th.cat(cond['c_concat'], 1),
+ hint=cond['c_concat'],
+ timesteps=t,
+ context=None)
+
+ control = [c * scale for c, scale in zip(control, self.control_scales)]
+ return control
+
+ def apply_model(self, p_sample_batch, cond, model_kwargs={}):
+
+ control = self.apply_control_model(p_sample_batch,
+ cond) # len(control): 13
+ model_kwargs.update({'control': control})
+
+ return super().apply_model(p_sample_batch, cond, model_kwargs)
+
+ # cldm loss
+ def ldm_train_step(self, batch, behaviour='cano', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+
+ # ! enable the gradient of both models
+ requires_grad(self.ddp_control_model, True)
+ self.mp_cldm_trainer.zero_grad() # !!!!
+
+ if 'img' in batch:
+ batch_size = batch['img'].shape[0]
+ else:
+ batch_size = len(batch['caption'])
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_cldm_trainer.use_amp):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ if 'latent' in micro:
+ vae_out = {self.latent_name: micro['latent']}
+ else:
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+
+ eps = vae_out[self.latent_name] / self.triplane_scaling_divider
+ # eps = vae_out.pop(self.latent_name)
+
+ if 'bg_plane' in vae_out:
+ eps = th.cat((eps, vae_out['bg_plane']),
+ dim=1) # include background, B 12+4 32 32
+
+ p_sample_batch = self.prepare_ddpm(eps)
+ cond = self.get_c_input(micro, bs=eps.shape[0])
+
+ # ! running diffusion forward
+ ddpm_ret = self.apply_model(p_sample_batch, cond)
+ if self.sde_diffusion.args.p_rendering_loss:
+
+ target = micro
+ pred = self.ddp_rec_model(
+ # latent=vae_out,
+ latent={
+ # **vae_out,
+ self.latent_name: ddpm_ret['pred_x0_p'],
+ 'latent_name': self.latent_name
+ },
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ with self.ddp_control_model.no_sync(): # type: ignore
+ p_vae_recon_loss, rec_loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ log_rec3d_loss_dict(rec_loss_dict)
+ # log_rec3d_loss_dict(
+ # dict(p_vae_recon_loss=p_vae_recon_loss, ))
+ loss = p_vae_recon_loss + ddpm_ret[
+ 'p_eps_objective'] # TODO, add obj_weight_t_p?
+ else:
+ loss = ddpm_ret['p_eps_objective'].mean()
+
+ # =====================================================================
+
+ self.mp_cldm_trainer.backward(loss) # joint gradient descent
+ # p self.control_model.input_hint_block[0].bias
+
+ # update ddpm accordingly
+ self.mp_cldm_trainer.optimize(self.opt_cldm)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.log_control_images(vae_out, p_sample_batch, micro, ddpm_ret)
+
+ def run_loop(self):
+ # eval camera
+ camera = th.load('eval_pose.pt', map_location=dist_util.dev())
+
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ # dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, step='ldm_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ # self.eval_ddpm_sample()
+ # self.eval_cldm(use_ddim=True, unconditional_guidance_scale=7.5, prompt="") # during training, use image as condition
+ if self.cond_key == 'caption':
+ self.eval_cldm(
+ use_ddim=False,
+ prompt="a voxelized dog",
+ # prompt="a blue plastic chair.",
+ use_train_trajectory=False,
+ camera=camera) # fix condition bug first
+ else:
+ pass # TODO
+ # self.eval_cldm(use_ddim=False,
+ # prompt="",
+ # use_train_trajectory=False,
+ # camera=camera) # fix condition bug first
+ # if self.sde_diffusion.args.train_vae:
+ # self.eval_loop()
+
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ self.save(self.mp_cldm_trainer,
+ self.mp_cldm_trainer.model_name)
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ # if self.sde_diffusion.args.train_vae:
+ # self.save(self.mp_trainer_rec,
+ # self.mp_trainer_rec.model_name)
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ # self.save(self.mp_trainer,
+ # self.mp_trainer.model_name) # rec and ddpm all fixed.
+ # st()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
diff --git a/nsr/lsgm/flow_matching_trainer.py b/nsr/lsgm/flow_matching_trainer.py
new file mode 100644
index 0000000000000000000000000000000000000000..91acd9f2e30d86debce0a39dff627efb83171f8c
--- /dev/null
+++ b/nsr/lsgm/flow_matching_trainer.py
@@ -0,0 +1,2246 @@
+"""
+https://github.com/CompVis/stable-diffusion/blob/21f890f9da3cfbeaba8e2ac3c425ee9e998d5229/ldm/models/diffusion/ddpm.py#L30
+"""
+import random
+import pytorch3d
+import copy
+import point_cloud_utils as pcu
+import cv2
+import matplotlib.pyplot as plt
+import torch
+import gc
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+from click import prompt
+import einops
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+import trimesh
+from nsr.camera_utils import generate_input_camera
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+from ldm.modules.encoders.modules import FrozenClipImageEmbedder, TextEmbedder, FrozenCLIPTextEmbedder, FrozenOpenCLIPImagePredictionEmbedder, FrozenOpenCLIPImageEmbedder
+from nsr.camera_utils import generate_input_camera, uni_mesh_path
+
+import dnnlib
+from dnnlib.util import requires_grad
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+# from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+# from .controlLDM import TrainLoop3DDiffusionLSGM_Control # joint diffusion and rec class
+from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+
+# ! add new schedulers from https://github.com/Stability-AI/generative-models
+
+from .crossattn_cldm import TrainLoop3DDiffusionLSGM_crossattn
+
+# import SD stuffs
+from typing import Any, Dict, List, Optional, Tuple, Union
+from contextlib import contextmanager
+from omegaconf import ListConfig, OmegaConf
+from sgm.modules import UNCONDITIONAL_CONFIG
+
+from sgm.util import (default, disabled_train, get_obj_from_str,
+ instantiate_from_config, log_txt_as_img)
+
+from transport import create_transport, Sampler
+import math
+
+# for gs rendering
+from utils.gs_utils.graphics_utils import getWorld2View2, getProjectionMatrix, getView2World
+from utils.general_utils import matrix_to_quaternion
+from utils.mesh_util import post_process_mesh, to_cam_open3d_compat
+from datasets.g_buffer_objaverse import focal2fov, fov2focal
+
+import open3d as o3d
+
+# from sgm.sampling_utils.demo.streamlit_helpers import init_sampling
+
+# Function to generate a rotation matrix for an arbitrary theta along the x-axis
+def rotation_matrix_x(theta_degrees):
+ theta = np.radians(theta_degrees) # Convert degrees to radians
+ cos_theta = np.cos(theta)
+ sin_theta = np.sin(theta)
+
+ rotation_matrix = np.array([[1, 0, 0],
+ [0, cos_theta, -sin_theta],
+ [0, sin_theta, cos_theta]])
+ return rotation_matrix
+
+
+def sample_uniform_cameras_on_sphere(num_samples=1):
+ # Step 1: Sample azimuth angles uniformly from [0, 2*pi)
+ theta = np.random.uniform(0, 2 * np.pi, num_samples)
+
+ # Step 2: Sample cos(phi) uniformly from [-1, 1]
+ cos_phi = np.random.uniform(-1, 1, num_samples)
+
+ # Step 3: Calculate the elevation angle (phi) from cos(phi)
+ phi = np.arccos(cos_phi) # phi will be in [0, pi]
+
+ # Step 4: Convert spherical coordinates to Cartesian coordinates (x, y, z)
+ # x = np.sin(phi) * np.cos(theta)
+ # y = np.sin(phi) * np.sin(theta)
+ # z = np.cos(phi)
+
+ # Combine the x, y, z coordinates into a single array
+ # cameras = np.vstack((x, y, z)).T # Shape: (num_samples, 3)
+
+ # return cameras
+ return theta, phi
+
+
+
+class FlowMatchingEngine(TrainLoop3DDiffusionLSGM_crossattn):
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ normalize_clip_encoding=False,
+ scale_clip_encoding=1,
+ cfg_dropout_prob=0,
+ cond_key='img_sr',
+ use_eos_feature=False,
+ compile=False,
+ snr_type='lognorm',
+ # denoiser_config,
+ # conditioner_config: Union[None, Dict, ListConfig,
+ # OmegaConf] = None,
+ # sampler_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ # loss_fn_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ control_model=control_model,
+ control_key=control_key,
+ only_mid_control=only_mid_control,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=resume_cldm_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ normalize_clip_encoding=normalize_clip_encoding,
+ scale_clip_encoding=scale_clip_encoding,
+ cfg_dropout_prob=cfg_dropout_prob,
+ cond_key=cond_key,
+ use_eos_feature=use_eos_feature,
+ compile=compile,
+ **kwargs)
+
+ # ! sgm diffusion pipeline
+ # ! reuse the conditioner
+ self.snr_type = snr_type
+ self.latent_key = 'latent'
+
+ if self.cond_key == 'caption': # ! text pretrain
+ if snr_type == 'stage2-t23d':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage2-t23d.yaml')['ldm_configs']
+ elif snr_type == 'stage1-t23d':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage1-t23d.yaml')['ldm_configs']
+ self.latent_key = 'normalized-fps-xyz' # learn xyz diff
+ else: # just simple t23d, no xyz condition
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/t23d-clipl-compat-fm.yaml')['ldm_configs']
+ else: #
+
+ # assert 'lognorm' in snr_type
+ if snr_type == 'lognorm': # by default
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat-fm-lognorm.yaml')['ldm_configs']
+ # st()
+ # if snr_type == 'lognorm-highres': # by default
+ elif snr_type == 'img-uniform-gvp': # by default
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform.yaml')['ldm_configs']
+ # self.latent_key = 'fps-xyz' # xyz diffusion
+ self.latent_key = 'normalized-fps-xyz' # to std
+
+ elif snr_type == 'img-uniform-gvp-dino': # by default
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly.yaml')['ldm_configs']
+ self.latent_key = 'normalized-fps-xyz' # to std
+
+ # elif snr_type == 'img-uniform-gvp-dino-xl': # by default
+ # ldm_configs = OmegaConf.load(
+ # 'sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly.yaml')['ldm_configs']
+ # self.latent_key = 'normalized-fps-xyz' # to std
+
+ elif snr_type == 'img-uniform-gvp-dino-stage2': # by default
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage2-i23d.yaml')['ldm_configs']
+ # self.latent_key = 'normalized-fps-xyz' # to std
+
+ elif snr_type == 'img-uniform-gvp-clay': # contains both text and image condition
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay.yaml')['ldm_configs']
+ # self.latent_key = 'fps-xyz' # xyz diffusion
+ self.latent_key = 'normalized-fps-xyz' # to std
+
+ elif snr_type == 'pcd-cond-tex':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond.yaml')['ldm_configs']
+ # 'sgm/configs/img23d-clipl-compat-fm-lognorm-336.yaml')['ldm_configs']
+
+ # ! stage-2 text-xyz conditioned
+ elif snr_type == 'stage2-t23d':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage2-t23d.yaml')['ldm_configs']
+
+ elif snr_type == 'lognorm-mv':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/mv23d-clipl-compat-fm-lognorm.yaml')['ldm_configs']
+
+ # ! mv version
+ elif snr_type == 'lognorm-mv-plucker':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm-noclip.yaml')['ldm_configs']
+ # 'sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm.yaml')['ldm_configs']
+
+ elif snr_type == 'stage1-mv-t23dpt':
+ self.latent_key = 'normalized-fps-xyz' # learn xyz diff
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage1-mv23d-t23dpt.yaml')['ldm_configs']
+
+ elif snr_type == 'stage1-mv-i23dpt':
+ self.latent_key = 'normalized-fps-xyz' # learn xyz diff
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage1-mv23d-i23dpt.yaml')['ldm_configs']
+
+ elif snr_type == 'stage1-mv-i23dpt-noi23d':
+ self.latent_key = 'normalized-fps-xyz' # learn xyz diff
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage1-mv23d-i23dpt-noi23d.yaml')['ldm_configs']
+
+ elif snr_type == 'stage2-mv-i23dpt':
+ # self.latent_key = 'normalized-fps-xyz' # learn xyz diff
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/stage2-mv23d-i23dpt.yaml')['ldm_configs']
+
+
+ else:
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat-fm.yaml')['ldm_configs']
+
+ self.loss_fn = (
+ instantiate_from_config(ldm_configs.loss_fn_config)
+ # if loss_fn_config is not None
+ # else None
+ )
+
+ # self.denoiser = instantiate_from_config(
+ # ldm_configs.denoiser_config).to(dist_util.dev())
+
+ self.transport_sampler = Sampler(self.loss_fn.transport, guider_config=ldm_configs.guider_config)
+
+ self.conditioner = instantiate_from_config(
+ default(ldm_configs.conditioner_config,
+ UNCONDITIONAL_CONFIG)).to(dist_util.dev())
+
+ # ! setup optimizer (with cond embedder params here)
+ self._set_grad_flag()
+ self._setup_opt2()
+ self._load_model2()
+
+
+ def _set_grad_flag(self):
+ requires_grad(self.ddpm_model, True) # do not change this flag during training.
+
+ def _setup_opt(self):
+ pass # see below
+
+ def _setup_opt2(self):
+ # ! add trainable conditioner parameters
+ # https://github.com/Stability-AI/generative-models/blob/fbdc58cab9f4ee2be7a5e1f2e2787ecd9311942f/sgm/models/diffusion.py#L219
+
+ # params = list(self.ddpm_model.parameters())
+
+ # https://discuss.pytorch.org/t/how-the-pytorch-freeze-network-in-some-layers-only-the-rest-of-the-training/7088/7
+ self.opt = AdamW([{
+ 'name': 'ddpm',
+ # 'params': self.ddpm_model.parameters(),
+ 'params': filter(lambda p: p.requires_grad, self.ddpm_model.parameters()), # if you want to freeze some layers
+ },
+ ],
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+
+
+ embedder_params = []
+ for embedder in self.conditioner.embedders:
+ if embedder.is_trainable:
+ embedder_params = embedder_params + list(embedder.parameters())
+
+
+ if len(embedder_params) != 0:
+ self.opt.add_param_group(
+ {
+ 'name': 'embedder',
+ 'params': embedder_params,
+ 'lr': self.lr*0.5, # smaller lr to finetune dino/clip
+ }
+ )
+
+ print(self.opt)
+
+ def save(self, mp_trainer=None, model_name='ddpm'):
+ # save embedder params also
+ super().save(mp_trainer, model_name)
+
+ # save embedder ckpt
+ if dist_util.get_rank() == 0:
+ for embedder in self.conditioner.embedders:
+ if embedder.is_trainable:
+ # embedder_params = embedder_params + list(embedder.parameters())
+ model_name = embedder.__class__.__name__
+ filename = f"embedder_{model_name}{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(embedder.state_dict(), f)
+
+ dist_util.synchronize()
+
+ def _load_model2(self):
+
+ # ! load embedder
+ for embedder in self.conditioner.embedders:
+ if embedder.is_trainable:
+ # embedder_params = embedder_params + list(embedder.parameters())
+ model_name = embedder.__class__.__name__
+ filename = f"embedder_{model_name}{(self.step+self.resume_step):07d}.pt"
+ # embedder_FrozenDinov2ImageEmbedderMV2115000.pt
+
+ # with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ # "wb") as f:
+ # th.save(embedder.state_dict(), f)
+
+ split = self.resume_checkpoint.split("model")
+ resume_checkpoint = str(
+ Path(split[0]) / filename)
+ if os.path.exists(resume_checkpoint):
+ if dist.get_rank() == 0:
+ logger.log(
+ f"loading cond embedder from checkpoint: {resume_checkpoint}...")
+ # if model is None:
+ # model = self.model
+ embedder.load_state_dict(
+ dist_util.load_state_dict(
+ resume_checkpoint,
+ map_location=dist_util.dev(),
+ ))
+ else:
+ logger.log(f'{resume_checkpoint} not found.')
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(embedder.parameters())
+
+
+ def instantiate_cond_stage(self, normalize_clip_encoding,
+ scale_clip_encoding, cfg_dropout_prob,
+ use_eos_feature):
+ # https://github.com/CompVis/stable-diffusion/blob/21f890f9da3cfbeaba8e2ac3c425ee9e998d5229/ldm/models/diffusion/ddpm.py#L509C1-L509C46
+ # self.cond_stage_model.train = disabled_train # type: ignore
+ # if self.cond_key == 'caption':
+ # self.cond_txt_model = TextEmbedder(dropout_prob=cfg_dropout_prob,
+ # use_eos_feature=use_eos_feature)
+ # elif self.cond_key == 'img':
+ # self.cond_img_model = FrozenOpenCLIPImagePredictionEmbedder(
+ # 1, 1,
+ # FrozenOpenCLIPImageEmbedder(freeze=True,
+ # device=dist_util.dev(),
+ # init_device=dist_util.dev()))
+
+ # else: # zero-shot Text to 3D using normalized clip latent
+ # self.cond_stage_model = FrozenClipImageEmbedder(
+ # 'ViT-L/14',
+ # dropout_prob=cfg_dropout_prob,
+ # normalize_encoding=normalize_clip_encoding,
+ # scale_clip_encoding=scale_clip_encoding)
+ # self.cond_stage_model.freeze()
+
+ # self.cond_txt_model = FrozenCLIPTextEmbedder(
+ # dropout_prob=cfg_dropout_prob,
+ # scale_clip_encoding=scale_clip_encoding)
+ # self.cond_txt_model.freeze()
+ pass # initialized in the self.__init__() using SD api
+
+
+
+ # ! already merged
+ def prepare_ddpm(self, eps, mode='p'):
+ raise NotImplementedError('already implemented in self.denoiser')
+
+ # merged from noD.py
+
+ # use sota denoiser, loss_fn etc.
+ def ldm_train_step(self, batch, behaviour='cano', *args, **kwargs):
+
+ # ! enable the gradient of both models
+ # requires_grad(self.ddpm_model, True)
+ self._set_grad_flag() # more flexible
+
+ self.mp_trainer.zero_grad() # !!!!
+
+ if 'img' in batch:
+ batch_size = batch['img'].shape[0]
+ else:
+ batch_size = len(batch['caption'])
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v[i:i+self.microbatch]
+ for k, v in batch.items()
+ }
+
+ # move condition to self.dtype
+ # =================================== ae part ===================================
+ # with th.cuda.amp.autocast(dtype=th.bfloat16,
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ assert 'latent' in micro
+ # st() # torchvision.utils.save_image(micro['img'], 'tmp/img.png', normalize=True, value_range=(-1,1))
+ # vae_out = {self.latent_name: micro['latent']}
+ # else:
+ # vae_out = self.ddp_rec_model(
+ # img=micro['img_to_encoder'],
+ # c=micro['c'],
+ # behaviour='encoder_vae',
+ # ) # pred: (B, 3, 64, 64)
+
+ # eps = vae_out[self.latent_name] / self.triplane_scaling_divider
+ # ! if training xyz only
+ # eps = vae_out[self.latent_name][..., -3:] / self.triplane_scaling_divider
+
+ # ! if training texture only
+ eps = micro[self.latent_key] / self.triplane_scaling_divider
+
+ if self.cond_key == 'img-c':
+ micro['img-c'] = {
+ # 'img': micro['img'].to(self.dtype),
+ 'img': micro['mv_img'].to(self.dtype), # for compat issue
+ 'c': micro['c'].to(self.dtype),
+ }
+
+ # log_rec3d_loss_dict({
+ # f"mv-alpha/{i}": self.ddpm_model.blocks[i].mv_alpha[0] for i in range(len(self.ddpm_model.blocks))
+ # })
+
+
+ loss, loss_other_info = self.loss_fn(self.ddp_ddpm_model,
+ # self.denoiser,
+ self.conditioner,
+ eps.to(self.dtype),
+ micro) # type: ignore
+ loss = loss.mean()
+ # log_rec3d_loss_dict({})
+
+ log_rec3d_loss_dict({
+ # 'eps_mean':
+ # eps.mean(),
+ # 'eps_std':
+ # eps.std([1, 2, 3]).mean(0),
+ # 'pred_x0_std':
+ # loss_other_info['model_output'].std([1, 2, 3]).mean(0),
+ "p_loss":
+ loss,
+ })
+
+ self.mp_trainer.backward(loss) # joint gradient descent
+
+ # update ddpm accordingly
+ self.mp_trainer.optimize(self.opt)
+
+ # ! directly eval_cldm() for sampling.
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ # self.log_control_images(vae_out, micro, loss_other_info)
+
+ @th.inference_mode()
+ def log_control_images(self, vae_out, micro, ddpm_ret):
+
+ if 'posterior' in vae_out:
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {self.latent_name: vae_out[self.latent_name][0:1].to(self.dtype)}
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+ pred = self.ddp_rec_model(latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+
+ assert isinstance(pred, dict)
+
+ pred_img = pred['image_raw']
+ if 'img' in micro:
+ gt_img = micro['img']
+ else:
+ gt_img = th.zeros_like(pred['image_raw'])
+
+ if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ else:
+ gt_depth = th.zeros_like(gt_img[:, 0:1, ...])
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ gt_img = self.pool_128(gt_img)
+ gt_depth = self.pool_128(gt_depth)
+ # cond = self.get_c_input(micro)
+ # hint = th.cat(cond['c_concat'], 1)
+
+ gt_vis = th.cat(
+ [
+ gt_img,
+ gt_img,
+ gt_img,
+ # self.pool_128(hint),
+ # gt_img,
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ # self.sampler
+
+ noised_latent, sigmas, x_start = [
+ ddpm_ret[k] for k in ['noised_input', 'sigmas', 'model_output']
+ ]
+
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ noised_latent[0:1].to(self.dtype) * self.triplane_scaling_divider,
+ }
+
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ x_start[0:1].to(self.dtype) * self.triplane_scaling_divider,
+ }
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=noised_latent,
+ behaviour=self.render_latent_behaviour)
+
+ # pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ # eps_t_p, pred_eps_p, logsnr_p) # for VAE loss, denosied latent
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=denoised_latent,
+ # latent=pred_x0[0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat(
+ [
+ self.pool_128(img) for img in (
+ pred_img[0:1],
+ noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1], # controlnet result
+ pred_depth[0:1].repeat_interleave(3, dim=1))
+ ],
+ dim=-1) # B, 3, H, W
+
+ if 'img' in micro:
+ vis = th.cat([gt_vis, pred_vis],
+ dim=-2)[0].permute(1, 2,
+ 0).cpu() # ! pred in range[-1, 1]
+ else:
+ vis = pred_vis[0].permute(1, 2, 0).cpu()
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ img_save_path = f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{sigmas[0].item():3}.jpg'
+ Image.fromarray(vis).save(img_save_path)
+
+ # if self.cond_key == 'caption':
+ # with open(f'{logger.get_dir()}/{self.step+self.resume_step}caption_{t_p[0].item():3}.txt', 'w') as f:
+ # f.write(micro['caption'][0])
+
+ print('log denoised vis to: ', img_save_path)
+
+ th.cuda.empty_cache()
+
+ @th.no_grad()
+ def sample(
+ self,
+ cond: Dict,
+ uc: Union[Dict, None] = None,
+ batch_size: int = 16,
+ shape: Union[None, Tuple, List] = None,
+ use_cfg=True,
+ # cfg_scale=4, # default value in SiT
+ # cfg_scale=1.5, # default value in SiT
+ cfg_scale=4.0, # default value in SiT
+ seed=42,
+ **kwargs,
+ ):
+
+ logger.log(f'cfg_scale: {cfg_scale}, seed: {seed}')
+
+ # self.sampler
+ sample_fn = self.transport_sampler.sample_ode(num_steps=250, cfg=True) # default ode sampling setting.
+
+ th.manual_seed(seed) # to reproduce result
+ zs = th.randn(batch_size, *shape).to(dist_util.dev()).to(self.dtype)
+ # st()
+ assert use_cfg
+ # sample_model_kwargs = {'uc': uc, 'cond': cond}
+ model_fn = self.ddpm_model.forward_with_cfg # default
+
+ # ! prepare_inputs in VanillaCFG, for compat issue
+ c_out = {}
+ for k in cond:
+ # if k in ["vector", "crossattn", "concat", 'fps-xyz']:
+ c_out[k] = th.cat((cond[k], uc[k]), 0)
+ # else:
+ # assert cond[k] == uc[k]
+ # c_out[k] = cond[k]
+ sample_model_kwargs = {'context': c_out, 'cfg_scale': cfg_scale}
+ zs = th.cat([zs, zs], 0)
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ samples = sample_fn(zs, model_fn, **sample_model_kwargs)[-1]
+ samples, _ = samples.chunk(2, dim=0) # Remove null class samples
+
+ # return samples
+ return samples * self.triplane_scaling_divider
+
+ @th.inference_mode()
+ def eval_cldm(
+ self,
+ prompt="",
+ # use_ddim=False,
+ # unconditional_guidance_scale=1.0,
+ unconditional_guidance_scale=4.0,
+ seed=42,
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ num_instances=1,
+ export_mesh=False,
+ ):
+ # ! slightly modified for new API. combined with
+ # /cpfs01/shared/V2V/V2V_hdd/yslan/Repo/generative-models/sgm/models/diffusion.py:249 log_images()
+ # TODO, support batch_size > 1
+
+ self.ddpm_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False))
+
+ model_kwargs = {}
+
+ uc = None
+ log = dict()
+
+ ucg_keys = [self.cond_key] # i23d
+
+ # if self.cond_key == 'caption':
+ if self.cond_key in ['caption', 'img-xyz']:
+ # batch_c = {self.cond_key: prompt}
+ # batch_c = {self.cond_key: prompt}
+ batch_c = next(self.data) # ! use training set to evaluate t23d for now.
+ elif self.cond_key == 'img-caption':
+ batch_c = {'caption': prompt, 'img': batch['img'].to(dist_util.dev()).to(self.dtype)}
+ else:
+ batch = next(self.data) # random cond here
+ if self.cond_key == 'img-c':
+ batch_c = {
+ self.cond_key: {
+ # 'img': batch['img'].to(self.dtype).to(dist_util.dev()),
+ 'img': batch['mv_img'].to(self.dtype).to(dist_util.dev()),
+ 'c': batch['c'].to(self.dtype).to(dist_util.dev()),
+ },
+ 'img': batch['img'].to(self.dtype).to(dist_util.dev()) # required by clip
+ }
+
+ else:
+ batch_c = {self.cond_key: batch[self.cond_key].to(dist_util.dev()).to(self.dtype)}
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ c, uc = self.conditioner.get_unconditional_conditioning(
+ batch_c,
+ force_uc_zero_embeddings=ucg_keys
+ if len(self.conditioner.embedders) > 0 else [],
+ )
+
+ sampling_kwargs = {'seed': seed, 'cfg_scale': unconditional_guidance_scale}
+
+ N = 3 # hard coded, to update
+ z_shape = (
+ N,
+ self.ddpm_model.in_channels if not self.ddpm_model.roll_out else
+ 3 * self.ddpm_model.in_channels, # type: ignore
+ self.diffusion_input_size,
+ self.diffusion_input_size)
+
+ for k in c:
+ if isinstance(c[k], th.Tensor):
+ c[k], uc[k] = map(lambda y: y[k][:N].to(dist_util.dev()),
+ (c, uc))
+
+ samples = self.sample(c,
+ shape=z_shape[1:],
+ uc=uc,
+ batch_size=N,
+ **sampling_kwargs)
+ # st() # do rendering first
+
+
+ # ! get c
+ if 'img' in self.cond_key:
+ img_save_path = f'{logger.get_dir()}/{self.step+self.resume_step}_imgcond.jpg'
+ if 'c' in self.cond_key:
+ torchvision.utils.save_image(batch_c['img'][0], img_save_path, value_range=(-1,1), normalize=True, padding=0) # torch.Size([24, 6, 3, 256, 256])
+ th.save(batch_c['img-c']['c'][0], f'{logger.get_dir()}/{self.step+self.resume_step}_c.pt')
+ else:
+ torchvision.utils.save_image(batch_c['img'][0:1], img_save_path, value_range=(-1,1), normalize=True, padding=0)
+
+ assert camera is not None
+ batch = {'c': camera.clone()}
+
+ # else:
+ # if use_train_trajectory:
+ # batch = next(iter(self.data))
+ # else:
+ # try:
+ # batch = next(self.eval_data)
+ # except Exception as e:
+ # self.eval_data = iter(self.eval_data)
+ # batch = next(self.eval_data)
+
+ # if camera is not None:
+ # batch['c'] = camera.clone()
+
+
+ # rendering
+ for i in range(samples.shape[0]):
+ th.cuda.empty_cache()
+
+ # ! render sampled latent
+ name_prefix = f'{self.step + self.resume_step}_{i}'
+
+ if self.cond_key == 'caption':
+ name_prefix = f'{name_prefix}_{prompt}'
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ self.render_video_given_triplane(
+ samples[i:i+1].to(self.dtype), # default version
+ self.rec_model, # compatible with join_model
+ name_prefix=name_prefix,
+ save_img=save_img,
+ render_reference=batch,
+ export_mesh=False)
+
+ self.ddpm_model.train()
+
+
+class FlowMatchingEngine_gs(FlowMatchingEngine):
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ normalize_clip_encoding=False,
+ scale_clip_encoding=1,
+ cfg_dropout_prob=0,
+ cond_key='img_sr',
+ use_eos_feature=False,
+ compile=False,
+ snr_type='lognorm',
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ control_model=control_model,
+ control_key=control_key,
+ only_mid_control=only_mid_control,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=resume_cldm_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ normalize_clip_encoding=normalize_clip_encoding,
+ scale_clip_encoding=scale_clip_encoding,
+ cfg_dropout_prob=cfg_dropout_prob,
+ cond_key=cond_key,
+ use_eos_feature=use_eos_feature,
+ compile=compile,
+ snr_type=snr_type,
+ **kwargs)
+
+ self.gs_bg_color=th.tensor([1,1,1], dtype=th.float32, device=dist_util.dev())
+ self.latent_name = 'latent_normalized' # normalized triplane latent
+ # self.pcd_unnormalize_fn = lambda x: x.clip(-1,1) * 0.45 # [-1,1] -> [-0.45, 0.45] as in g-buffer dataset.
+ # self.pcd_unnormalize_fn = lambda x: (x * 0.1862).clip(-0.45, 0.45) # [-1,1] -> [-0.45, 0.45] as in g-buffer dataset.
+
+ # /cpfs01/user/lanyushi.p/logs/nips24/LSGM/t23d/FM/9cls/gs/i23d/dit-b/gpu4-batch32-lr1e-4-gs_surf_latent_224-drop0.33-same
+ # self.pcd_unnormalize_fn = lambda x: (x * 0.158).clip(-0.45, 0.45) # [-1,1] -> [-0.45, 0.45] as in g-buffer dataset.
+
+ # self.feat_scale_factor = th.Tensor([0.99227685, 1.014337 , 0.20842505, 0.98727155, 0.3305389 ,
+ # 0.38729668, 1.0155401 , 0.9728264 , 1.0009694 , 0.97328585,
+ # 0.2881106 , 0.1652732 , 0.3482468 , 0.9971449 , 0.99895126,
+ # 0.18491288]).float().reshape(1,1,-1)
+
+ # stat for normalization
+ # self.xyz_mean = torch.Tensor([-0.00053714, 0.08095618, -0.01914407] ).reshape(1, 3).float()
+ # self.xyz_std = th.Tensor([0.14593576, 0.15753542, 0.18873914] ).reshape(1,3).float().to(dist_util.dev())
+ self.xyz_std = 0.164
+
+ # ! for debug
+ self.kl_mean = th.Tensor([ 0.0184, 0.0024, 0.0926, 0.0517, 0.1781, 0.7137, -0.0355, 0.0267,
+ 0.0183, 0.0164, -0.5090, 0.2406, 0.2733, -0.0256, -0.0285, 0.0761]).reshape(1,16).float().to(dist_util.dev())
+ self.kl_std = th.Tensor([1.0018, 1.0309, 1.3001, 1.0160, 0.8182, 0.8023, 1.0591, 0.9789, 0.9966,
+ 0.9448, 0.8908, 1.4595, 0.7957, 0.9871, 1.0236, 1.2923]).reshape(1,16).float().to(dist_util.dev())
+
+ # ! for surfel-gs rendering
+ self.zfar = 100.0
+ self.znear = 0.01
+
+ def unnormalize_pcd_act(self, x):
+ return x * self.xyz_std
+
+ def unnormalize_kl_feat(self, latent):
+ # return latent / self.feat_scale_factor
+ # return (latent-self.kl_mean) / self.kl_std
+ return (latent * self.kl_std) + self.kl_mean
+
+ # def unnormalize_kl_feat(self, latent):
+ # return latent * self.feat_scale_factor
+
+ @th.inference_mode()
+ def eval_cldm(
+ self,
+ prompt="Yellow rubber duck",
+ # use_ddim=False,
+ # unconditional_guidance_scale=1.0,
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ num_instances=1,
+ export_mesh=False,
+ ):
+ self.ddpm_model.eval()
+
+ # args = dnnlib.EasyDict(
+ # dict(
+ # batch_size=1,
+ # image_size=self.diffusion_input_size,
+ # denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ # out_chans, # type: ignore
+ # clip_denoised=False,
+ # class_cond=False))
+
+ model_kwargs = {}
+
+ uc = None
+ log = dict()
+
+ ucg_keys = [self.cond_key] # i23d
+
+ if self.cond_key == 'caption':
+ if prompt == '':
+ batch = next(self.data) # random cond here
+ batch_c = {self.cond_key: prompt,
+ 'fps-xyz': batch['fps-xyz'].to(self.dtype).to(dist_util.dev()),
+ }
+ else:
+ # ! TODO, update the cascaded generation fps-xyz loading. Manual load for now.
+ batch_c = {
+ self.cond_key: prompt
+ }
+ if self.latent_key == 'latent': # stage 2
+ # hard-coded path for now
+ # fps_xyz_output_prefix = '/nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/eval/clip_text/50w-iter/'
+ # stage1_num_steps = '500000'
+
+ batch = next(self.data) # random cond here
+ batch_c[self.cond_key] = batch[self.cond_key][0:1] # colorizing GT xyz
+ gt_xyz = batch['fps-xyz'][0:1]
+ gt_kl_latent = batch['latent'][0:1]
+
+ # cascaded = False
+ # st()
+ # if self.step % 1e4 == 0:
+ # cascaded = True
+ # else:
+
+ cascaded = False
+ prompt = batch[self.cond_key][0:1] # replace with on-the-fly GT point clouds
+
+ batch_c[self.cond_key] = prompt
+
+ # ! for logging two-stage cascaded result. change the path to your stage-1 output pcd logdir.
+ if cascaded: # ! use stage-1 as output
+ # fps_xyz_output_prefix = '/nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/eval/clip_text/60w-iter/'
+ fps_xyz_output_prefix = ''
+ stage1_num_steps = '600000'
+
+ fps_xyz = torch.from_numpy(pcu.load_mesh_v(f'{fps_xyz_output_prefix}/{stage1_num_steps}_0_{prompt}.ply') ).clip(-0.45,0.45).unsqueeze(0)
+
+ batch_c.update({
+ 'fps-xyz': fps_xyz.to(self.dtype).to(dist_util.dev())
+ })
+ else:
+ # use gt as condition
+ # batch_c = {k: v[0:1].to(self.dtype).to(dist_util.dev()) for k, v in batch_c.items() if k in [self.cond_key, 'fps-xyz']}
+ for k in ['fps-xyz']:
+ batch_c[k] = batch[k][0:1].to(self.dtype).to(dist_util.dev())
+ batch_c[self.cond_key] = prompt
+ else:
+ batch = next(self.data) # random cond here
+
+ #! debugging, get GT xyz and KL latent for disentangled debugging
+
+ if self.cond_key == 'img-c':
+ prompt = batch['caption'][0:1]
+ batch_c = {
+ self.cond_key: {
+ 'img': batch['mv_img'][0:1].to(self.dtype).to(dist_util.dev()),
+ 'c': batch['c'][0:1].to(self.dtype).to(dist_util.dev()),
+ },
+ 'img': batch['img'][0:1].to(self.dtype).to(dist_util.dev()),
+ 'caption': prompt,
+ 'fps-xyz': batch['fps-xyz'][0:1].to(self.dtype).to(dist_util.dev())
+ }
+
+ elif self.cond_key == 'img-caption':
+ batch_c = {'caption': prompt, 'img': batch['img'].to(dist_util.dev()).to(self.dtype)}
+
+ elif self.cond_key == 'img-xyz':
+ # load local xyz here
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/cpfs01/user/lanyushi.p/Repo/diffusion-3d/tmp/sampled-0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/cpfs01/user/lanyushi.p/Repo/diffusion-3d/tmp/sampled-2.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/cpfs01/user/lanyushi.p/Repo/diffusion-3d/tmp/sampled-1.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/cpfs01/user/lanyushi.p/Repo/diffusion-3d/tmp/sampled-3.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output_fullset_stillclip_but448_eval/1725000_0_0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output_fullset_stillclip_but448_eval/1725000_0_0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/eval/dino_img/debug/1875000_0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/eval/dino_img/debug/1875000_0_0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+
+ # ! edit
+ # st()
+ # fps_xyz[..., 2:3] *= 4
+ # fps_xyz[..., 2:3] *= 3
+
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v('/nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output_fullset_stillclip_but448_eval/1725000_0_1.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ batch_c = {
+ # 'img': batch['img'][[1,0]].to(self.dtype).to(dist_util.dev()),
+ 'img': batch['img'][0:1].to(self.dtype).to(dist_util.dev()),
+ 'fps-xyz': batch['fps-xyz'][0:1].to(self.dtype).to(dist_util.dev()),
+ # 'caption': batch['caption']
+ # 'fps-xyz': fps_xyz.repeat(batch['img'].shape[0],1,1).to(self.dtype).to(dist_util.dev()),
+ }
+
+ else:
+
+ # gt_xyz = batch['fps-xyz'][0:1]
+ # gt_kl_latent = batch['latent'][0:1]
+
+ batch_c = {self.cond_key: batch[self.cond_key][0:1].to(dist_util.dev()).to(self.dtype), }
+
+ # swap for more results, hard-coded here.
+ # if 'img' in batch_c:
+ # batch_c['img'] = batch_c['img'][[1,0]]
+ # batch_c['fps-xyz'] = batch_c['fps-xyz'][[1,0]]
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ c, uc = self.conditioner.get_unconditional_conditioning(
+ batch_c,
+ force_uc_zero_embeddings=ucg_keys
+ if len(self.conditioner.embedders) > 0 else [],
+ )
+
+ sampling_kwargs = {}
+
+ N = num_samples # hard coded, to update
+ z_shape = (N, 768, self.ddpm_model.in_channels)
+
+ for k in c:
+ if isinstance(c[k], th.Tensor):
+ # c[k], uc[k] = map(lambda y: y[k][:N].to(dist_util.dev()),
+ # (c, uc))
+ assert c[k].shape[0] == 1 # ! support batch inference
+ c[k], uc[k] = map(lambda y: y[k].repeat_interleave(N, 0).to(dist_util.dev()),
+ (c, uc)) # support bs>1 sampling given a condition
+
+ samples = self.sample(c,
+ shape=z_shape[1:],
+ uc=uc,
+ batch_size=N,
+ **sampling_kwargs)
+
+ # ! get c
+ if 'img' in self.cond_key:
+ img_save_path = f'{logger.get_dir()}/{self.step+self.resume_step}_imgcond.jpg'
+ if 'c' in self.cond_key:
+ mv_img_save_path = f'{logger.get_dir()}/{self.step+self.resume_step}_mv-imgcond.jpg'
+ torchvision.utils.save_image(batch_c['img-c']['img'][0], mv_img_save_path, value_range=(-1,1), normalize=True, padding=0) # torch.Size([24, 6, 3, 256, 256])
+ torchvision.utils.save_image(batch_c['img'][0], img_save_path, value_range=(-1,1), normalize=True, padding=0) # torch.Size([24, 6, 3, 256, 256])
+ else:
+ torchvision.utils.save_image(batch_c['img'], img_save_path, value_range=(-1,1), normalize=True, padding=0)
+
+ assert camera is not None
+ batch = {'c': camera.clone()}
+
+ # rendering
+ for i in range(samples.shape[0]):
+ th.cuda.empty_cache()
+
+ # ! render sampled latent
+ name_prefix = f'{self.step + self.resume_step}_{i}'
+
+ # if self.cond_key in ['caption', 'img-c']:
+ if self.cond_key in ['caption']:
+ if isinstance(prompt, list):
+ name_prefix = f'{name_prefix}_{"-".join(prompt[0].split())}'
+ else:
+ name_prefix = f'{name_prefix}_{"-".join(prompt.split())}'
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ # # ! todo, transform to gs camera
+ if self.latent_key != 'latent': # normalized-xyz
+ pcu.save_mesh_v( f'{logger.get_dir()}/{name_prefix}.ply', self.unnormalize_pcd_act(samples[i]).detach().cpu().float().numpy())
+ logger.log(f'point cloud saved to {logger.get_dir()}/{name_prefix}.ply')
+ else:
+ # ! editing debug
+ self.render_gs_video_given_latent(
+ # samples[i:i+1].to(self.dtype), # default version
+ # th.cat([gt_kl_latent.to(samples), gt_xyz.to(samples)], dim=-1),
+
+ # ! xyz-cond kl feature gen:
+ th.cat([samples[i:i+1], batch_c['fps-xyz'][i:i+1]], dim=-1), # ! debugging xyz diffusion
+
+ # ! xyz debugging
+ # th.cat([gt_kl_latent.to(samples), samples[i:i+1]], dim=-1), # ! debugging xyz diffusion
+ # th.cat([samples[i:i+1], gt_xyz.to(samples), ], dim=-1) # ! debugging kl feature diffusion
+ self.rec_model, # compatible with join_model
+ name_prefix=name_prefix,
+ save_img=save_img,
+ render_reference=batch,
+ export_mesh=False)
+
+ # st()
+ pass
+
+ # for noise_scale in np.linspace(0,0.1, 10):
+ # per_scale_name_prefix = f'{name_prefix}_{noise_scale}'
+ # self.render_gs_video_given_latent( th.cat([gt_kl_latent.to(samples), (gt_xyz+noise_scale*th.randn_like(gt_xyz)).to(samples)], dim=-1), self.rec_model, name_prefix=per_scale_name_prefix, save_img=save_img, render_reference=batch, export_mesh=False)
+ # self.render_gs_video_given_latent( th.cat([gt_kl_latent.to(samples), (batch_c['fps-xyz'][0:1]).to(samples)], dim=-1), self.rec_model, name_prefix=per_scale_name_prefix, save_img=save_img, render_reference=batch, export_mesh=False)
+
+ # pcu.save_mesh_v( f'{logger.get_dir()}/sampled-4.ply', self.unnormalize_pcd_act(samples[0]).detach().cpu().float().numpy())
+ # st()
+ # pcu.save_mesh_v( f'tmp/sampled-3.ply', self.unnormalize_pcd_act(samples[0]).detach().cpu().float().numpy())
+
+ gc.collect()
+ self.ddpm_model.train()
+
+ @torch.no_grad()
+ def export_mesh_from_2dgs(self, all_rgbs, all_depths, all_alphas, cam_pathes, idx, i):
+ # https://github.com/autonomousvision/LaRa/blob/main/evaluation.py
+ n_thread = 1 # avoid TSDF cpu hanging bug.
+ os.environ["MKL_NUM_THREADS"] = f"{n_thread}"
+ os.environ["NUMEXPR_NUM_THREADS"] = f"{n_thread}"
+ os.environ["OMP_NUM_THREADS"] = f"4"
+ os.environ["VECLIB_MAXIMUM_THREADS"] = f"{n_thread}"
+ os.environ["OPENBLAS_NUM_THREADS"] = f"{n_thread}"
+
+ # copied from: https://github.com/hbb1/2d-gaussian-splatting/blob/19eb5f1e091a582e911b4282fe2832bac4c89f0f/render.py#L23
+ logger.log("exporting mesh ...")
+ # os.makedirs(train_dir, exist_ok=True)
+ train_dir = logger.get_dir()
+
+ # for g-objv
+ # aabb = [-0.5,-0.5,-0.5,0.5,0.5,0.5]
+ # aabb = None
+ aabb = [-0.45,-0.45,-0.45,0.45,0.45,0.45]
+ self.aabb = np.array(aabb).reshape(2,3)*1.1
+
+ # center = self.aabb.mean(0)
+ # radius = np.linalg.norm(self.aabb[1] - self.aabb[0]) * 0.5
+ # voxel_size = radius / 256
+ # sdf_trunc = voxel_size * 2
+ # print("using aabb")
+
+ # set the active_sh to 0 to export only diffuse texture
+
+ # gaussExtractor.gaussians.active_sh_degree = 0
+ # gaussExtractor.reconstruction(scene.getTrainCameras())
+
+ # extract the mesh and save
+ # if args.unbounded:
+ # name = 'fuse_unbounded.ply'
+ # mesh = gaussExtractor.extract_mesh_unbounded(resolution=args.mesh_res)
+ # else:
+
+ # name = f'{idx}-{i}-fuse.ply'
+ # name = f'mesh.obj'
+ name = f'{idx}/mesh_raw.obj'
+ # st()
+ # depth_trunc = (radius * 2.0) if depth_trunc < 0 else depth_trunc
+ # voxel_size = (depth_trunc / mesh_res) if voxel_size < 0 else voxel_size
+ # sdf_trunc = 5.0 * voxel_size if sdf_trunc < 0 else sdf_trunc
+ # mesh = self.extract_mesh_bounded(all_rgbs, all_depths, all_alphas, cam_pathes, voxel_size=voxel_size, sdf_trunc=sdf_trunc, depth_trunc=depth_trunc, mask_backgrond=False)
+ mesh = self.extract_mesh_bounded(all_rgbs, all_depths, all_alphas, cam_pathes)
+
+ o3d.io.write_triangle_mesh(os.path.join(train_dir, name), mesh)
+ logger.log("mesh saved at {}".format(os.path.join(train_dir, name)))
+ # post-process the mesh and save, saving the largest N clusters
+ # mesh_post = post_process_mesh(mesh, cluster_to_keep=num_cluster)
+ mesh_post = post_process_mesh(mesh)
+ # o3d.io.write_triangle_mesh(os.path.join(train_dir, name.replace('.obj', '_post.obj')), mesh_post)
+
+ mesh_vertices = np.asarray(mesh_post.vertices) # Convert vertices to a numpy array
+ rotated_vertices = mesh_vertices @ rotation_matrix_x(-90).T
+ mesh_post.vertices = o3d.utility.Vector3dVector(rotated_vertices) # Update vertices
+ post_mesh_path = os.path.join(train_dir, name.replace('_raw.obj', '.obj'))
+
+ o3d.io.write_triangle_mesh(post_mesh_path, mesh_post)
+
+ logger.log("mesh post processed saved at {}".format(post_mesh_path))
+ return post_mesh_path
+
+
+ @torch.no_grad()
+ def extract_mesh_bounded(self, rgbmaps, depthmaps, alpha_maps, cam_pathes, voxel_size=0.004, sdf_trunc=0.02, depth_trunc=3, alpha_thres=0.08, mask_backgrond=False):
+ """
+ Perform TSDF fusion given a fixed depth range, used in the paper.
+
+ voxel_size: the voxel size of the volume
+ sdf_trunc: truncation value
+ depth_trunc: maximum depth range, should depended on the scene's scales
+ mask_backgrond: whether to mask backgroud, only works when the dataset have masks
+
+ return o3d.mesh
+ """
+
+ if self.aabb is not None: # as in lara.
+ center = self.aabb.mean(0)
+ # radius = np.linalg.norm(self.aabb[1] - self.aabb[0]) * 0.5
+ radius = np.linalg.norm(self.aabb[1] - self.aabb[0]) * 0.5
+ # voxel_size = radius / 256
+ # voxel_size = radius / 192 # less holes
+ voxel_size = radius / 160 # smaller
+ # sdf_trunc = voxel_size * 16 # less holes, slower integration
+ sdf_trunc = voxel_size * 12 #
+ print("using aabb")
+
+ volume = o3d.pipelines.integration.ScalableTSDFVolume(
+ voxel_length= voxel_size,
+ sdf_trunc=sdf_trunc,
+ color_type=o3d.pipelines.integration.TSDFVolumeColorType.RGB8
+ )
+
+ print("Running tsdf volume integration ...")
+ print(f'voxel_size: {voxel_size}')
+ print(f'sdf_trunc: {sdf_trunc}')
+ print(f'depth_truc: {depth_trunc}')
+
+ # render_reference = th.load('eval_pose.pt', map_location='cpu').numpy()
+
+ # ! use uni_mesh_path, from Lara, Chen et al, ECCV 24'
+
+ # '''
+
+
+ # for i, cam_o3d in tqdm(enumerate(to_cam_open3d(self.viewpoint_stack)), desc="TSDF integration progress"):
+ for i, cam in tqdm(enumerate(cam_pathes), desc="TSDF integration progress"):
+ # rgb = self.rgbmaps[i]
+ # depth = self.depthmaps[i]
+ cam = self.c_to_3dgs_format(cam)
+ cam_o3d = to_cam_open3d_compat(cam)
+
+ rgb = rgbmaps[i][0]
+ depth = depthmaps[i][0]
+ alpha = alpha_maps[i][0]
+
+ # if we have mask provided, use it
+ # if mask_backgrond and (self.viewpoint_stack[i].gt_alpha_mask is not None):
+ # depth[(self.viewpoint_stack[i].gt_alpha_mask < 0.5)] = 0
+
+ depth[(alpha < alpha_thres)] = 0
+ if self.aabb is not None:
+ campos = cam['cam_pos'].cpu().numpy()
+ depth_trunc = np.linalg.norm(campos - center, axis=-1) + radius
+
+ # make open3d rgbd
+ rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(
+ o3d.geometry.Image(np.asarray(np.clip(rgb.permute(1,2,0).cpu().numpy(), 0.0, 1.0) * 255, order="C", dtype=np.uint8)),
+ o3d.geometry.Image(np.asarray(depth.permute(1,2,0).cpu().numpy(), order="C")),
+ depth_trunc = depth_trunc,
+ convert_rgb_to_intensity=False,
+ depth_scale = 1.0
+ )
+
+ volume.integrate(rgbd, intrinsic=cam_o3d.intrinsic, extrinsic=cam_o3d.extrinsic)
+
+ mesh = volume.extract_triangle_mesh()
+ return mesh
+
+
+ @th.inference_mode()
+ def render_gs_video_given_latent(self,
+ planes,
+ rec_model,
+ name_prefix='0',
+ save_img=False,
+ render_reference=None,
+ export_mesh=False,
+ output_dir=None,
+ for_fid=False,):
+
+ batch_size, L, C = planes.shape
+
+ # ddpm_latent = { self.latent_name: planes[..., :-3] * self.feat_scale_factor.to(planes), # kl-reg latent
+ # 'query_pcd_xyz': self.pcd_unnormalize_fn(planes[..., -3:]) }
+
+ # ddpm_latent = { self.latent_name: self.unnormalize_kl_feat(planes[..., :-3]), # kl-reg latent
+ # ddpm_latent = { self.latent_name: planes[..., :-3], # kl-reg latent
+ # 'query_pcd_xyz': self.unnormalize_pcd_act(planes[..., -3:]) }
+
+ ddpm_latent = { self.latent_name: planes[..., :-3], # kl-reg latent
+ 'query_pcd_xyz': planes[..., -3:]}
+
+ ddpm_latent.update(rec_model(latent=ddpm_latent, behaviour='decode_gs_after_vae_no_render'))
+
+ # ! editing debug, raw scaling
+
+ # for beacon
+ # edited_fps_xyz[..., 2] *= 1.5
+ # edited_fps_xyz[..., :2] *= 0.75
+
+ # z_mask = edited_fps_xyz[..., 2] > 0
+ # edited_fps_xyz[..., 2] *= 1.25 # only apply to upper points
+
+ # z_dim_coord = edited_fps_xyz[..., 2]
+ # edited_fps_xyz[..., 2] = th.where(z_dim_coord>0, z_dim_coord*1.25, z_dim_coord)
+ # edited_fps_xyz[..., :2] *= 0.6
+
+ fine_scale = 'gaussians_upsampled_3'
+ # ddpm_latent[fine_scale][..., :2] *= 1.5
+ # ddpm_latent[fine_scale][..., 2:3] *= 0.75
+
+ # ddpm_latent[fine_scale][..., :2] *= 3
+ # ddpm_latent[fine_scale][..., 2:3] *= 0.75
+
+ # z_dim_coord = ddpm_latent[fine_scale][..., 2]
+ # ddpm_latent[fine_scale][..., 2] = th.where(z_dim_coord>0.24, z_dim_coord+0.1, z_dim_coord)
+
+ # pcu.save_mesh_v(f'{output_dir}/gaussian.ply', ddpm_latent['gaussians_upsampled'][0, ..., :3].cpu().numpy())
+ # fps-downsampling?
+ pred_gaussians_xyz = ddpm_latent['gaussians_upsampled_3'][..., :3]
+ fine_gs = ddpm_latent[fine_scale]
+
+ # K=4096
+ # query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # pred_gaussians_xyz, K=K,
+ # # random_start_point=False) # B self.latent_num
+ # random_start_point=True) # B self.latent_num
+
+ if output_dir is None:
+ output_dir = logger.get_dir()
+
+ # pcu.save_mesh_v(f'{output_dir}/{name_prefix}-gaussian-{K}.ply', query_pcd_xyz[0].cpu().numpy())
+
+ np.save(f'{output_dir}/{name_prefix}-gaussian.npy', fine_gs.cpu().numpy()[0]) # L, 13
+
+ # also save rgb point cloud
+ fine_gs_numpy = fine_gs.cpu().numpy()
+ rgb_xyz_path = f'{output_dir}/{name_prefix}-gaussian-pcd.glb'
+ # pcu.save_mesh_vc(rgb_xyz_path, v=fine_gs_numpy[..., :3], c=fine_gs_numpy[..., 3:6])
+ vtx = np.transpose(rotation_matrix_x(-90) @ np.transpose(fine_gs_numpy[0, :, :3]))
+ cloud = trimesh.PointCloud(vtx, colors=fine_gs_numpy[0, :, 10:13])
+ # Save the point cloud to an OBJ file
+ _ = cloud.export(rgb_xyz_path)
+
+ # return None, None
+ video_path = f'{output_dir}/{name_prefix}-gs.mp4'
+
+ try:
+ # video_out = imageio.get_writer(
+ # f'{output_dir}/gs_{name_prefix}.mp4',
+ # mode='I',
+ # fps=15,
+ # codec='libx264')
+
+ video_out = imageio.get_writer(
+ video_path,
+ mode='I',
+ fps=15,
+ codec='libx264')
+
+ except Exception as e:
+ logger.log(e)
+
+ # return # some caption are too tired and cannot be parsed as file name
+
+ # !for FID
+
+ ''' # if for uniform FID rendering. Will not adopt this later.
+ azimuths = []
+ elevations = []
+ frame_number = 10
+
+ for i in range(frame_number): # 1030 * 5 * 10, for FID 50K
+
+ azi, elevation = sample_uniform_cameras_on_sphere()
+ # azi, elevation = azi[0] / np.pi * 180, elevation[0] / np.pi * 180
+ # azi, elevation = azi[0] / np.pi * 180, 0
+ azi, elevation = azi[0] / np.pi * 180, (elevation[0]-np.pi*0.5) / np.pi * 180 # [-0.5 pi, 0.5 pi]
+ azimuths.append(azi)
+ elevations.append(elevation)
+
+ azimuths = np.array(azimuths)
+ elevations = np.array(elevations)
+
+ # azimuths = np.array(list(range(0,360,30))).astype(float)
+ # frame_number = azimuths.shape[0]
+ # elevations = np.array([10]*azimuths.shape[0]).astype(float)
+
+ zero123pp_pose, _ = generate_input_camera(1.8, [[elevations[i], azimuths[i]] for i in range(frame_number)], fov=30)
+ K = th.Tensor([1.3889, 0.0000, 0.5000, 0.0000, 1.3889, 0.5000, 0.0000, 0.0000, 0.0039]).to(zero123pp_pose) # keeps the same
+ render_reference = th.cat([zero123pp_pose.reshape(frame_number,-1), K.unsqueeze(0).repeat(frame_number,1)], dim=-1).cpu().numpy()
+ '''
+
+ assert render_reference is not None
+ # render_reference = th.load('eval_pose.pt', map_location='cpu').numpy()[:24]
+
+ # rand_start_idx = random.randint(0,2)
+ # render_reference = render_reference[rand_start_idx::3] # randomly render 8 views, maintain fixed azimuths
+ # assert len(render_reference)==8
+
+ # assert render_reference is None
+ # render_reference = self.eval_data # compat
+ # else: # use train_traj
+
+ # for key in ['ins', 'bbox', 'caption']:
+ # if key in render_reference:
+ # render_reference.pop(key)
+
+ # render_reference = [ { k:v[idx:idx+1] for k, v in render_reference.items() } for idx in range(40) ]
+ all_rgbs, all_depths, all_alphas = [], [], []
+
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+ for i, micro_c in enumerate(tqdm(render_reference)):
+ # micro = {
+ # k: v.to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ # for k, v in batch.items()
+ # }
+
+ # c = self.eval_data.post_process.c_to_3dgs_format(micro_c)
+ c = self.c_to_3dgs_format(micro_c)
+ for k in c.keys(): # to cuda
+ if isinstance(c[k], th.Tensor) and k != 'tanfov':
+ c[k] = c[k].unsqueeze(0).unsqueeze(0).to(dist_util.dev()) # actually, could render 40 views together.
+ c['tanfov'] = th.tensor(c['tanfov']).to(dist_util.dev())
+
+ pred = rec_model(
+ img=None,
+ c=c, # TODO, to dict
+ latent=ddpm_latent, # render gs
+ behaviour='triplane_dec',
+ bg_color=self.gs_bg_color,
+ render_all_scale=True, # for better visualization
+ )
+
+ # ! if visualizing a single scale
+ fine_scale_key = list(pred.keys())[-1]
+
+ all_rgbs.append(einops.rearrange(pred[fine_scale_key]['image'], 'B V ... -> (B V) ...'))
+ all_depths.append(einops.rearrange(pred[fine_scale_key]['depth'], 'B V ... -> (B V) ...'))
+ all_alphas.append(einops.rearrange(pred[fine_scale_key]['alpha'], 'B V ... -> (B V) ...'))
+
+ all_pred_vis = {}
+ # for key in pred.keys():
+ # st()
+ for key in ['gaussians_base', fine_scale_key]: # only show two LoDs
+ pred_scale = pred[key] # only show finest result here
+ for k in pred_scale.keys():
+ pred_scale[k] = einops.rearrange(pred_scale[k], 'B V ... -> (B V) ...') # merge
+
+ pred_vis = self._make_vis_img(pred_scale)
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ all_pred_vis[key] = vis
+
+ all_pred_vis_concat = np.concatenate([cv2.resize(all_pred_vis[k][0], (512*3, 512)) for k in all_pred_vis.keys()], axis=0)
+
+ video_out.append_data(all_pred_vis_concat)
+
+ if save_img: # for fid
+ for idx in range(len(all_rgbs)):
+ sampled_img = Image.fromarray(
+ (all_rgbs[idx][0].permute(1, 2, 0).cpu().numpy() *
+ 255).clip(0, 255).astype(np.uint8))
+ sampled_img.save(os.path.join(output_dir,f'{name_prefix}-{idx}.jpg'))
+
+
+ # if not save_img:
+ video_out.close()
+ print('logged video to: ',
+ f'{output_dir}/{name_prefix}.mp4')
+
+ del video_out, pred, pred_vis, vis
+ return all_rgbs, all_depths, all_alphas, video_path, rgb_xyz_path
+
+ @th.no_grad()
+ def _make_vis_img(self, pred):
+
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ pred_depth = pred_depth.cpu()[0].permute(1, 2, 0).numpy()
+ pred_depth = (plt.cm.viridis(pred_depth[..., 0])[..., :3]) * 2 - 1
+ pred_depth = th.from_numpy(pred_depth).to(
+ pred['image_raw'].device).permute(2, 0, 1).unsqueeze(0)
+
+ gen_img = pred['image_raw']
+ rend_normal = pred['rend_normal']
+
+ pred_vis = th.cat(
+ [
+ gen_img,
+ rend_normal,
+ pred_depth,
+ ],
+ dim=-1) # B, 3, H, W
+
+ return pred_vis
+
+
+ def _set_grad_flag(self):
+ requires_grad(self.ddpm_model, True) #
+
+ @th.inference_mode()
+ def sample_and_save(self, batch_c, ucg_keys, num_samples, camera, save_img, idx=0, save_dir='', export_mesh=False, stage1_idx=0, cfg_scale=4.0, seed=42):
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ c, uc = self.conditioner.get_unconditional_conditioning(
+ batch_c,
+ force_uc_zero_embeddings=ucg_keys
+ if len(self.conditioner.embedders) > 0 else [],
+ )
+
+ sampling_kwargs = {
+ 'cfg_scale': cfg_scale, # default value in SiT
+ 'seed': seed,
+ }
+
+ N = num_samples # hard coded, to update
+ z_shape = (N, 768, self.ddpm_model.in_channels)
+
+ for k in c:
+ if isinstance(c[k], th.Tensor):
+ # c[k], uc[k] = map(lambda y: y[k][:N].to(dist_util.dev()),
+ # (c, uc))
+ assert c[k].shape[0] == 1 # ! support batch inference
+ c[k], uc[k] = map(lambda y: y[k].repeat_interleave(N, 0).to(dist_util.dev()),
+ (c, uc)) # support bs>1 sampling given a condition
+
+ samples = self.sample(c,
+ shape=z_shape[1:],
+ uc=uc,
+ batch_size=N,
+ **sampling_kwargs)
+
+ # ! get c
+ if save_dir == '':
+ save_dir = logger.get_dir()
+
+ if 'img' in self.cond_key:
+ # img_save_path = f'{save_dir}/{idx}_imgcond.jpg'
+ img_save_path = f'{save_dir}/{idx}/imgcond.jpg'
+ os.makedirs(f'{save_dir}/{idx}', exist_ok=True)
+ if 'c' in self.cond_key:
+ torchvision.utils.save_image(batch_c['img-c']['img'][0], img_save_path, value_range=(-1,1), normalize=True, padding=0) # torch.Size([24, 6, 3, 256, 256])
+ else:
+ torchvision.utils.save_image(batch_c['img'], img_save_path, value_range=(-1,1), normalize=True, padding=0)
+
+ assert camera is not None
+ # batch = {'c': camera.clone()}
+
+ # rendering
+ for i in range(samples.shape[0]):
+ th.cuda.empty_cache()
+
+ if self.cond_key in ['caption']:
+ name_prefix = f'{batch_c["caption"]}_sample-{stage1_idx}-{i}'
+ else:
+ # ! render sampled latent
+ # name_prefix = f'{idx}_sample-{i}'
+ name_prefix = f'{idx}/sample-{stage1_idx}-{i}'
+
+ # if self.cond_key in ['caption', 'img-c']:
+ cam_pathes = uni_mesh_path(10)
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ # # ! todo, transform to gs camera
+ if self.latent_key != 'latent': # normalized-xyz
+
+ pcd_export_dir = f'{save_dir}/{name_prefix}.glb' # pcu fails on py=3.9
+ vtx = self.unnormalize_pcd_act(samples[i]).detach().cpu().float().numpy()
+ cloud = trimesh.PointCloud(vtx @ rotation_matrix_x(-90).T, colors=np.ones_like(vtx)*0.75)
+ _ = cloud.export(pcd_export_dir) # for gradio display
+ logger.log(f'stage-1 glb point cloud saved to {pcd_export_dir}')
+
+ pcd_export_dir_forstage1 = f'{save_dir}/{name_prefix}.ply' # pcu fails on py=3.9
+ cloud = trimesh.PointCloud(vtx)
+ _ = cloud.export(pcd_export_dir_forstage1) # for stage-1 cascaded loading.
+
+ logger.log(f'point cloud saved to {pcd_export_dir_forstage1}')
+ return pcd_export_dir
+ else:
+ # ! editing debug
+ all_rgbs, all_depths, all_alphas, video_path, rgb_xyz_path = self.render_gs_video_given_latent(
+ th.cat([samples[i:i+1], batch_c['fps-xyz'][0:1]], dim=-1), # ! debugging xyz diffusion
+ self.rec_model, # compatible with join_model
+ name_prefix=name_prefix,
+ save_img=save_img,
+ render_reference=cam_pathes,
+ export_mesh=False,)
+ # for_fid=False)
+
+ if export_mesh:
+ post_mesh_path=self.export_mesh_from_2dgs(all_rgbs, all_depths, all_alphas, cam_pathes, idx, i)
+ else:
+ post_mesh_path = ''
+
+ return video_path, rgb_xyz_path, post_mesh_path
+
+ # mesh = self.extract_mesh_bounded(all_rgbs, all_depths, all_alphas, cam_pathes, voxel_size=voxel_size, sdf_trunc=sdf_trunc, depth_trunc=depth_trunc, mask_backgrond=False)
+
+
+ @th.inference_mode()
+ def eval_and_export(
+ self,
+ prompt="Yellow rubber duck",
+ # use_ddim=False,
+ # unconditional_guidance_scale=1.0,
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ stage_1_output_dir='',
+ num_instances=1,
+ export_mesh=False,
+ ):
+ self.ddpm_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False))
+
+ model_kwargs = {}
+
+ uc = None
+ log = dict()
+
+ ucg_keys = [self.cond_key] # i23d
+
+ def sample_and_save(batch_c, idx=0):
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ c, uc = self.conditioner.get_unconditional_conditioning(
+ batch_c,
+ force_uc_zero_embeddings=ucg_keys
+ if len(self.conditioner.embedders) > 0 else [],
+ )
+
+ sampling_kwargs = {}
+
+ N = num_samples # hard coded, to update
+ z_shape = (N, 768, self.ddpm_model.in_channels)
+
+ for k in c:
+ if isinstance(c[k], th.Tensor):
+ # c[k], uc[k] = map(lambda y: y[k][:N].to(dist_util.dev()),
+ # (c, uc))
+ assert c[k].shape[0] == 1 # ! support batch inference
+ c[k], uc[k] = map(lambda y: y[k].repeat_interleave(N, 0).to(dist_util.dev()),
+ (c, uc)) # support bs>1 sampling given a condition
+
+ samples = self.sample(c,
+ shape=z_shape[1:],
+ uc=uc,
+ batch_size=N,
+ **sampling_kwargs)
+
+ # ! get c
+ if 'img' in self.cond_key:
+ img_save_path = f'{logger.get_dir()}/{idx}_imgcond.jpg'
+ if 'c' in self.cond_key:
+ torchvision.utils.save_image(batch_c['img-c']['img'][0], img_save_path, value_range=(-1,1), normalize=True, padding=0) # torch.Size([24, 6, 3, 256, 256])
+ else:
+ torchvision.utils.save_image(batch_c['img'], img_save_path, value_range=(-1,1), normalize=True, padding=0)
+
+ assert camera is not None
+ batch = {'c': camera.clone()}
+
+ # rendering
+ for i in range(samples.shape[0]):
+ th.cuda.empty_cache()
+
+ if self.cond_key in ['caption']:
+ name_prefix = f'{batch_c["caption"]}_sample-{idx}-{i}'
+ else:
+ # ! render sampled latent
+ name_prefix = f'{idx}_sample-{i}'
+
+ # if self.cond_key in ['caption', 'img-c']:
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ # # ! todo, transform to gs camera
+ if self.latent_key != 'latent': # normalized-xyz
+ stage1_pcd_path = f'{logger.get_dir()}/{name_prefix}.glb'
+ pcu.save_mesh_v( stage1_pcd_path, self.unnormalize_pcd_act(samples[i]).detach().cpu().float().numpy())
+ logger.log(f'point cloud saved to {stage1_pcd_path}')
+ else:
+ # ! editing debug
+ all_rgbs, all_depths, all_alphas, _, _ = self.render_gs_video_given_latent(
+ # samples[i:i+1].to(self.dtype), # default version
+ # th.cat([gt_kl_latent.to(samples), gt_xyz.to(samples)], dim=-1),
+
+ # ! xyz-cond kl feature gen:
+ # th.cat([samples[i:i+1], batch_c['fps-xyz'][i:i+1]], dim=-1), # ! debugging xyz diffusion
+ th.cat([samples[i:i+1], batch_c['fps-xyz'][0:1]], dim=-1), # ! debugging xyz diffusion
+
+ # ! xyz debugging
+ # th.cat([gt_kl_latent.to(samples), samples[i:i+1]], dim=-1), # ! debugging xyz diffusion
+ # th.cat([samples[i:i+1], gt_xyz.to(samples), ], dim=-1) # ! debugging kl feature diffusion
+ self.rec_model, # compatible with join_model
+ name_prefix=name_prefix,
+ save_img=save_img,
+ render_reference=batch,
+ export_mesh=False)
+
+ # if export_mesh:
+ # self.export_mesh_from_2dgs(all_rgbs, all_depths, idx, i)
+
+ if self.cond_key == 'caption':
+ assert prompt != ''
+ batch_c = {self.cond_key: prompt}
+
+ if self.latent_key == 'latent': # t23d, stage-2
+ for i in range(2): # 8 * num_samples here
+ fps_xyz = torch.from_numpy(pcu.load_mesh_v(f'{stage_1_output_dir}/{prompt}_sample-0-{i}.ply') ).clip(-0.45,0.45).unsqueeze(0)
+
+ # ! if editing, change the latent points accordingly.
+ # edited_fps_xyz = fps_xyz.clone() # B N 3
+ # z_dim_coord = edited_fps_xyz[..., 2]
+ # edited_fps_xyz[..., 2] = th.where(z_dim_coord>0.24, z_dim_coord+0.075, z_dim_coord)
+
+ batch_c.update({
+ 'fps-xyz': fps_xyz.to(self.dtype).to(dist_util.dev())
+ # 'fps-xyz': edited_fps_xyz.to(self.dtype).to(dist_util.dev())
+ })
+
+ sample_and_save(batch_c, idx=i)
+ else:
+ sample_and_save(batch_c)
+
+
+
+ @th.inference_mode()
+ def eval_t23d_and_export(
+ self,
+ prompt="Yellow rubber duck",
+ # use_ddim=False,
+ # unconditional_guidance_scale=1.0,
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ stage_1_output_dir='',
+ num_instances=1,
+ export_mesh=False,
+ ):
+ self.ddpm_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False))
+
+ model_kwargs = {}
+
+ uc = None
+ log = dict()
+
+ ucg_keys = [self.cond_key] # i23d
+
+ assert self.cond_key == 'caption' and prompt != ''
+ batch_c = {self.cond_key: prompt}
+
+ if self.latent_key == 'latent': # t23d, stage-2
+ fps_xyz = torch.from_numpy(pcu.load_mesh_v(f'{stage_1_output_dir}/{prompt}_sample-0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ batch_c.update({
+ 'fps-xyz': fps_xyz.to(self.dtype).to(dist_util.dev())
+ })
+
+ self.sample_and_save(batch_c, ucg_keys, num_samples, camera,)
+
+
+ @th.inference_mode()
+ def eval_i23d_and_export(
+ self,
+ prompt="Yellow rubber duck",
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ stage_1_output_dir='',
+ num_instances=1,
+ export_mesh=False,
+ ):
+ self.ddpm_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False))
+
+ model_kwargs = {}
+
+ uc = None
+ log = dict()
+
+ ucg_keys = [self.cond_key] # i23d
+
+
+ for idx, batch in enumerate(tqdm(self.data)):
+
+ ins = batch['ins'][0]
+
+ # obj_folder, _, frame = ins.split('/')
+ ins = ins.split('/')
+ # obj_folder, frame = ins[0], ins[-1] # for gso
+
+ if len(ins) >2:
+ if ins[1] == 'render_mvs_25': # gso
+ obj_folder, frame = ins[0], int(ins[-1].split('.')[0])
+ ins_name = f'{obj_folder}/{str(frame)}'
+ else:
+ obj_folder, frame = os.path.join(ins[1], ins[2]), ins[-1] # for objv
+ frame = int(frame.split('.')[0])
+ ins_name = f'{obj_folder}/{str(frame)}'
+ else: # folder of images, e.g., instantmesh
+ ins_name = ins[0].split('.')[0]
+
+
+ # pcd_export_dir = f'{logger.get_dir()}/{ins_name}/sample-0.ply'
+
+ # if os.path.exists(pcd_export_dir):
+ # continue
+
+ #! debugging, get GT xyz and KL latent for disentangled debugging
+
+ if self.cond_key == 'img-c': # mv23d
+ prompt = batch['caption'][0:1]
+ batch_c = {
+ self.cond_key: {
+ 'img': batch['mv_img'][0:1].to(self.dtype).to(dist_util.dev()),
+ 'c': batch['c'][0:1].to(self.dtype).to(dist_util.dev()),
+ },
+ 'img': batch['img'][0:1].to(self.dtype).to(dist_util.dev()),
+ 'caption': prompt,
+ }
+
+ if self.latent_key == 'latent': # stage-2
+ fps_xyz = torch.from_numpy(pcu.load_mesh_v(f'{stage_1_output_dir}/{idx}_sample-0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ # fps_xyz = torch.from_numpy(pcu.load_mesh_v(f'{stage_1_output_dir}/sample-0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ batch_c.update({
+ 'fps-xyz': fps_xyz[0:1].to(self.dtype).to(dist_util.dev()),
+ })
+
+ # elif self.cond_key == 'img-caption':
+ # batch_c = {'caption': prompt, 'img': batch['img'].to(dist_util.dev()).to(self.dtype)}
+
+ elif self.cond_key == 'img-xyz': # stage-2
+
+ for i in range(2):
+
+ stage1_pcd_output_path = f'{stage_1_output_dir}/{ins_name}/sample-0-{i}.ply'
+
+ fps_xyz = trimesh.load(stage1_pcd_output_path).vertices # pcu may fail on py=3.9
+ fps_xyz = torch.from_numpy(fps_xyz).clip(-0.45,0.45).unsqueeze(0)
+
+ # fps_xyz = None # ! TODO, load from local directory
+ batch_c = {
+ 'img': batch['img'][0:1].to(self.dtype).to(dist_util.dev()),
+ 'fps-xyz': fps_xyz[0:1].to(self.dtype).to(dist_util.dev()),
+ }
+
+ self.sample_and_save(batch_c, ucg_keys, num_samples, camera, save_img, idx=ins_name, export_mesh=export_mesh, stage1_idx=i) # type: ignore
+
+ else: # stage-1 data
+ batch_c = {self.cond_key: batch[self.cond_key][0:1].to(dist_util.dev()).to(self.dtype), }
+ if self.cond_key == 'caption' and self.latent_key == 'latent': # t23d, stage-2
+ fps_xyz = torch.from_numpy(pcu.load_mesh_v(f'{stage_1_output_dir}/{idx}_sample-0.ply') ).clip(-0.45,0.45).unsqueeze(0)
+ batch_c.update({
+ 'fps-xyz': fps_xyz.to(self.dtype).to(dist_util.dev())
+ })
+
+
+ # save_dir = f'{logger.get_dir()}/{ins}'
+ # os.mkdir(save_dir, exists_ok=True, parents=True)
+
+ # self.sample_and_save(batch_c, ucg_keys, num_samples, camera, save_img, idx=f'{idx}-{ins}', export_mesh=export_mesh)
+ self.sample_and_save(batch_c, ucg_keys, num_samples, camera, save_img, idx=ins_name, export_mesh=export_mesh) # type: ignore
+
+
+ gc.collect()
+
+ @th.inference_mode()
+ def eval_i23d_and_export_gradio(
+ self,
+ inp_img,
+ seed=42,
+ cfg_scale=4.0, # default value in neural ode
+ save_img=False,
+ **kwargs,
+ ):
+
+ # logger.log(
+ # unconditional_guidance_scale,
+ # seed,
+ # mesh_size,
+ # mesh_thres,
+ # )
+
+ sampling_kwargs = {
+ 'cfg_scale': cfg_scale, # default value in SiT
+ 'seed': seed,
+ }
+
+ camera = th.load('assets/objv_eval_pose.pt', map_location=dist_util.dev())[:24]
+ inp_img = th.from_numpy(inp_img).permute(2,0,1).unsqueeze(0) / 127.5 - 1 # to [-1,1]
+
+ num_samples=1
+ export_mesh=True
+
+ # self.ddpm_model.eval()
+
+ # args = dnnlib.EasyDict(
+ # dict(
+ # batch_size=1,
+ # image_size=self.diffusion_input_size,
+ # denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ # out_chans, # type: ignore
+ # clip_denoised=False,
+ # class_cond=False))
+
+ # model_kwargs = {}
+
+ ucg_keys = [self.cond_key] # i23d
+
+ ins_name = 'house2-input' # for debug here
+
+ if self.cond_key == 'img-xyz': # stage-2
+
+ i = 0 # for gradio only
+ # for i in range(1):
+ stage_1_output_dir="./logs/i23d/stage-1/dino_img/"
+ stage1_pcd_output_path = f'{stage_1_output_dir}/{ins_name}/sample-0-{i}.ply'
+
+ fps_xyz = trimesh.load(stage1_pcd_output_path).vertices # pcu may fail on py=3.9
+ fps_xyz = torch.from_numpy(fps_xyz).clip(-0.45,0.45).unsqueeze(0)
+
+ logger.log('loading stage-1 point cloud from: ', stage1_pcd_output_path)
+
+ # fps_xyz = None # ! TODO, load from local directory
+ # batch_c = {
+ # 'img': batch['img'][0:1].to(self.dtype).to(dist_util.dev()),
+ # 'fps-xyz': fps_xyz[0:1].to(self.dtype).to(dist_util.dev()),
+ # }
+ batch_c = {'img': inp_img.to(dist_util.dev()).to(self.dtype),
+ 'fps-xyz': fps_xyz[0:1].to(self.dtype).to(dist_util.dev())}
+
+ # no need to return here?
+ video_path, rgb_xyz_path, post_mesh_path = self.sample_and_save(batch_c, ucg_keys, num_samples, camera, save_img, idx=ins_name, export_mesh=export_mesh, stage1_idx=i, **sampling_kwargs) # type: ignore
+
+ # video_path = './logs/i23d/stage-2/dino_img/house2-input/sample-0-0-gs.mp4'
+ # rgb_xyz_path = './logs/i23d/stage-2/dino_img/low-poly-model-of-a-green-pine-tree,-also-resembling-a-Christmas-tree.-vc.ply'
+ assert post_mesh_path != ''
+
+ return video_path, rgb_xyz_path, post_mesh_path
+
+ else: # stage-1 data
+ # batch_c = {self.cond_key: batch[self.cond_key][0:1].to(dist_util.dev()).to(self.dtype), }
+
+ # raise NotImplementedError('stage-2 only')
+ batch_c = {'img': inp_img.to(dist_util.dev()).to(self.dtype)}
+ pcd_export_dir = self.sample_and_save(batch_c, ucg_keys, num_samples, camera, save_img, idx=ins_name, export_mesh=export_mesh, **sampling_kwargs) # type: ignore
+ return pcd_export_dir
+
+
+
+
+ def get_source_cw2wT(self, source_cameras_view_to_world):
+ return matrix_to_quaternion(
+ source_cameras_view_to_world[:3, :3].transpose(0, 1))
+
+ def c_to_3dgs_format(self, pose):
+ # TODO, switch to torch version (batched later)
+
+ c2w = pose[:16].reshape(4, 4) # 3x4
+
+ # ! load cam
+ w2c = np.linalg.inv(c2w)
+ R = np.transpose(
+ w2c[:3, :3]) # R is stored transposed due to 'glm' in CUDA code
+ T = w2c[:3, 3]
+ fx = pose[16]
+ FovX = focal2fov(fx, 1)
+ FovY = focal2fov(fx, 1)
+
+ tanfovx = math.tan(FovX * 0.5)
+ tanfovy = math.tan(FovY * 0.5)
+
+ assert tanfovx == tanfovy
+
+ trans = np.array([0.0, 0.0, 0.0])
+ scale = 1.0
+
+ world_view_transform = torch.tensor(getWorld2View2(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+ projection_matrix = getProjectionMatrix(znear=self.znear,
+ zfar=self.zfar,
+ fovX=FovX,
+ fovY=FovY).transpose(0, 1)
+ full_proj_transform = (world_view_transform.unsqueeze(0).bmm(
+ projection_matrix.unsqueeze(0))).squeeze(0)
+ camera_center = world_view_transform.inverse()[3, :3]
+
+ view_world_transform = torch.tensor(getView2World(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+
+ # item.update(viewpoint_cam=[viewpoint_cam])
+ c = {}
+ c["source_cv2wT_quat"] = self.get_source_cw2wT(view_world_transform)
+ c.update(
+ projection_matrix=projection_matrix, # K
+ cam_view=world_view_transform, # world_view_transform
+ cam_view_proj=full_proj_transform, # full_proj_transform
+ cam_pos=camera_center,
+ tanfov=tanfovx, # TODO, fix in the renderer
+ # orig_c2w=c2w,
+ # orig_w2c=w2c,
+ orig_pose=torch.from_numpy(pose),
+ orig_c2w=torch.from_numpy(c2w),
+ orig_w2c=torch.from_numpy(w2c),
+ # tanfovy=tanfovy,
+ )
+
+ return c # dict for gs rendering
+
+
+
+class FlowMatchingEngine_gs_clay(FlowMatchingEngine_gs):
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ normalize_clip_encoding=False,
+ scale_clip_encoding=1,
+ cfg_dropout_prob=0,
+ cond_key='img_sr',
+ use_eos_feature=False,
+ compile=False,
+ snr_type='lognorm',
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ control_model=control_model,
+ control_key=control_key,
+ only_mid_control=only_mid_control,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=resume_cldm_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ normalize_clip_encoding=normalize_clip_encoding,
+ scale_clip_encoding=scale_clip_encoding,
+ cfg_dropout_prob=cfg_dropout_prob,
+ cond_key=cond_key,
+ use_eos_feature=use_eos_feature,
+ compile=compile,
+ snr_type=snr_type,
+ **kwargs)
+
+ # self._init_new_ca_weight() # after ckpt loading
+
+
+ def _set_grad_flag(self):
+ # unfree CA only
+ requires_grad(self.ddpm_model, True) #
+ # for k, v in self.ddpm_model.named_parameters():
+ # # if 'cross_attn_dino' in k:
+ # if 'mv' in k: # for mv dino
+ # v.requires_grad_(True)
+ # if self.step == 0:
+ # logger.log(k)
+ # else:
+ # v.requires_grad_(False)
+
+ def _init_new_ca_weight(self):
+ blks_to_copy = ['cross_attn_dino', 'prenorm_ca_dino']
+
+ for blk in self.ddpm_model.blocks:
+ for param_name in blks_to_copy:
+ try:
+ getattr(blk, param_name.replace('dino', 'dino_mv')).load_state_dict(getattr(blk, param_name).state_dict())
+ except Exception as e:
+ logger.log(e) # some key misalignment
diff --git a/nsr/lsgm/sgm_DiffusionEngine.py b/nsr/lsgm/sgm_DiffusionEngine.py
new file mode 100644
index 0000000000000000000000000000000000000000..5cc04e56b9e3243330ae212df18f5d7ccfbb8578
--- /dev/null
+++ b/nsr/lsgm/sgm_DiffusionEngine.py
@@ -0,0 +1,510 @@
+"""
+https://github.com/CompVis/stable-diffusion/blob/21f890f9da3cfbeaba8e2ac3c425ee9e998d5229/ldm/models/diffusion/ddpm.py#L30
+"""
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+from click import prompt
+import einops
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+from ldm.modules.encoders.modules import FrozenClipImageEmbedder, TextEmbedder, FrozenCLIPTextEmbedder, FrozenOpenCLIPImagePredictionEmbedder, FrozenOpenCLIPImageEmbedder
+
+import dnnlib
+from dnnlib.util import requires_grad
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+# from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+# from .controlLDM import TrainLoop3DDiffusionLSGM_Control # joint diffusion and rec class
+from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD # joint diffusion and rec class
+
+# ! add new schedulers from https://github.com/Stability-AI/generative-models
+
+from .crossattn_cldm import TrainLoop3DDiffusionLSGM_crossattn
+
+# import SD stuffs
+from typing import Any, Dict, List, Optional, Tuple, Union
+from contextlib import contextmanager
+from omegaconf import ListConfig, OmegaConf
+from sgm.modules import UNCONDITIONAL_CONFIG
+
+from sgm.util import (default, disabled_train, get_obj_from_str,
+ instantiate_from_config, log_txt_as_img)
+
+# from sgm.sampling_utils.demo.streamlit_helpers import init_sampling
+
+
+class DiffusionEngineLSGM(TrainLoop3DDiffusionLSGM_crossattn):
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ control_model,
+ control_key,
+ only_mid_control,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ normalize_clip_encoding=False,
+ scale_clip_encoding=1,
+ cfg_dropout_prob=0,
+ cond_key='img_sr',
+ use_eos_feature=False,
+ compile=False,
+ # denoiser_config,
+ # conditioner_config: Union[None, Dict, ListConfig,
+ # OmegaConf] = None,
+ # sampler_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ # loss_fn_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ control_model=control_model,
+ control_key=control_key,
+ only_mid_control=only_mid_control,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ resume_cldm_checkpoint=resume_cldm_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ normalize_clip_encoding=normalize_clip_encoding,
+ scale_clip_encoding=scale_clip_encoding,
+ cfg_dropout_prob=cfg_dropout_prob,
+ cond_key=cond_key,
+ use_eos_feature=use_eos_feature,
+ compile=compile,
+ **kwargs)
+
+ # ! sgm diffusion pipeline
+ if self.cond_key == 'caption':
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/txt2img-clipl-compat.yaml')['ldm_configs']
+ else:
+ ldm_configs = OmegaConf.load(
+ 'sgm/configs/img23d-clipl-compat.yaml')['ldm_configs']
+
+ self.loss_fn = (
+ instantiate_from_config(ldm_configs.loss_fn_config)
+ # if loss_fn_config is not None
+ # else None
+ )
+ self.denoiser = instantiate_from_config(
+ ldm_configs.denoiser_config).to(dist_util.dev())
+ self.sampler = (instantiate_from_config(ldm_configs.sampler_config))
+
+ self.conditioner = instantiate_from_config(
+ default(ldm_configs.conditioner_config,
+ UNCONDITIONAL_CONFIG)).to(dist_util.dev())
+
+ # ! already merged
+ def prepare_ddpm(self, eps, mode='p'):
+ raise NotImplementedError('already implemented in self.denoiser')
+
+ # merged from noD.py
+
+ # use sota denoiser, loss_fn etc.
+ def ldm_train_step(self, batch, behaviour='cano', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+
+ # ! enable the gradient of both models
+ requires_grad(self.ddpm_model, True)
+
+ self.mp_trainer.zero_grad() # !!!!
+
+ if 'img' in batch:
+ batch_size = batch['img'].shape[0]
+ else:
+ batch_size = len(batch['caption'])
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ assert 'latent' in micro
+ vae_out = {self.latent_name: micro['latent'].to(self.dtype)}
+ # else:
+ # vae_out = self.ddp_rec_model(
+ # img=micro['img_to_encoder'],
+ # c=micro['c'],
+ # behaviour='encoder_vae',
+ # ) # pred: (B, 3, 64, 64)
+
+ eps = vae_out[self.latent_name] / self.triplane_scaling_divider
+ # eps = vae_out.pop(self.latent_name)
+
+ # if 'bg_plane' in vae_out:
+ # eps = th.cat((eps, vae_out['bg_plane']),
+ # dim=1) # include background, B 12+4 32 32
+
+ # ! SD loss
+ # cond = self.get_c_input(micro, bs=eps.shape[0])
+ loss, loss_other_info = self.loss_fn(self.ddp_ddpm_model,
+ self.denoiser,
+ self.conditioner, eps,
+ micro) # type: ignore
+ loss = loss.mean()
+ log_rec3d_loss_dict({})
+
+ log_rec3d_loss_dict({
+ 'eps_mean':
+ eps.mean(),
+ 'eps_std':
+ eps.std([1, 2, 3]).mean(0),
+ 'pred_x0_std':
+ loss_other_info['model_output'].std([1, 2, 3]).mean(0),
+ "p_loss":
+ loss,
+ })
+
+ self.mp_trainer.backward(loss) # joint gradient descent
+
+ # update ddpm accordingly
+ self.mp_trainer.optimize(self.opt)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.log_control_images(vae_out, micro, loss_other_info)
+
+ @th.inference_mode()
+ def log_control_images(self, vae_out, micro, ddpm_ret):
+
+ # eps_t_p, t_p, logsnr_p = (p_sample_batch[k] for k in (
+ # 'eps_t_p',
+ # 't_p',
+ # 'logsnr_p',
+ # ))
+ # pred_eps_p = ddpm_ret['pred_eps_p']
+
+ if 'posterior' in vae_out:
+ vae_out.pop('posterior') # for calculating kl loss
+ # vae_out_for_pred = {
+ # k: v[0:1].to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ # for k, v in vae_out.items()
+ # }
+ vae_out_for_pred = {self.latent_name: vae_out[self.latent_name][0:1].to(self.dtype)}
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+ pred = self.ddp_rec_model(latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+
+ assert isinstance(pred, dict)
+
+ pred_img = pred['image_raw']
+ if 'img' in micro:
+ gt_img = micro['img']
+ else:
+ gt_img = th.zeros_like(pred['image_raw'])
+
+ if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ else:
+ gt_depth = th.zeros_like(gt_img[:, 0:1, ...])
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ gt_img = self.pool_128(gt_img)
+ gt_depth = self.pool_128(gt_depth)
+ # cond = self.get_c_input(micro)
+ # hint = th.cat(cond['c_concat'], 1)
+
+ gt_vis = th.cat(
+ [
+ gt_img,
+ gt_img,
+ gt_img,
+ # self.pool_128(hint),
+ # gt_img,
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ noised_latent, sigmas, x_start = [
+ ddpm_ret[k] for k in ['noised_input', 'sigmas', 'model_output']
+ ]
+
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ noised_latent[0:1].to(self.dtype) * self.triplane_scaling_divider,
+ }
+
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ x_start[0:1].to(self.dtype) * self.triplane_scaling_divider,
+ }
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=noised_latent,
+ behaviour=self.render_latent_behaviour)
+
+ # pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ # eps_t_p, pred_eps_p, logsnr_p) # for VAE loss, denosied latent
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=denoised_latent,
+ # latent=pred_x0[0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat(
+ [
+ self.pool_128(img) for img in (
+ pred_img[0:1],
+ noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1], # controlnet result
+ pred_depth[0:1].repeat_interleave(3, dim=1))
+ ],
+ dim=-1) # B, 3, H, W
+
+ if 'img' in micro:
+ vis = th.cat([gt_vis, pred_vis],
+ dim=-2)[0].permute(1, 2,
+ 0).cpu() # ! pred in range[-1, 1]
+ else:
+ vis = pred_vis[0].permute(1, 2, 0).cpu()
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ img_save_path = f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{sigmas[0].item():3}.jpg'
+ Image.fromarray(vis).save(img_save_path)
+
+ # if self.cond_key == 'caption':
+ # with open(f'{logger.get_dir()}/{self.step+self.resume_step}caption_{t_p[0].item():3}.txt', 'w') as f:
+ # f.write(micro['caption'][0])
+
+ print('log denoised vis to: ', img_save_path)
+
+ th.cuda.empty_cache()
+
+ @th.no_grad()
+ def sample(
+ self,
+ cond: Dict,
+ uc: Union[Dict, None] = None,
+ batch_size: int = 16,
+ shape: Union[None, Tuple, List] = None,
+ **kwargs,
+ ):
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ randn = th.randn(batch_size, *shape).to(dist_util.dev()).to(self.dtype)
+
+ denoiser = lambda input, sigma, c: self.denoiser(
+ self.model, input, sigma, c, **kwargs)
+ samples = self.sampler(denoiser, randn, cond, uc=uc)
+
+ return samples
+
+ @th.inference_mode()
+ def eval_cldm(
+ self,
+ prompt="",
+ use_ddim=False,
+ unconditional_guidance_scale=1.0,
+ save_img=False,
+ use_train_trajectory=False,
+ camera=None,
+ num_samples=1,
+ num_instances=1,
+ ):
+ # ! slightly modified for new API. combined with
+ # /cpfs01/shared/V2V/V2V_hdd/yslan/Repo/generative-models/sgm/models/diffusion.py:249 log_images()
+ # TODO, support batch_size > 1
+
+ self.ddpm_model.eval()
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.rec_model.decoder.triplane_decoder.
+ out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False,
+ use_ddim=use_ddim))
+
+ model_kwargs = {}
+
+ uc = None
+ log = dict()
+
+ ucg_keys = [self.cond_key]
+
+ batch_c = {self.cond_key: prompt}
+
+ c, uc = self.conditioner.get_unconditional_conditioning(
+ batch_c,
+ force_uc_zero_embeddings=ucg_keys
+ if len(self.conditioner.embedders) > 0 else [],
+ )
+
+ sampling_kwargs = {}
+
+ N = 1 # hard coded, to update
+ z_shape = (
+ N,
+ self.ddpm_model.in_channels if not self.ddpm_model.roll_out else
+ 3 * self.ddpm_model.in_channels, # type: ignore
+ self.diffusion_input_size,
+ self.diffusion_input_size)
+
+ for k in c:
+ if isinstance(c[k], th.Tensor):
+ c[k], uc[k] = map(lambda y: y[k][:N].to(dist_util.dev()),
+ (c, uc))
+
+ samples = self.sample(c,
+ shape=z_shape[1:],
+ uc=uc,
+ batch_size=N,
+ **sampling_kwargs)
+ # st() # do rendering first
+
+ # ! get c
+
+ if self.cond_key == 'caption':
+ if camera is not None:
+ batch = {'c': camera.clone()}
+ else:
+ if use_train_trajectory:
+ batch = next(iter(self.data))
+ else:
+ try:
+ batch = next(self.eval_data)
+ except Exception as e:
+ self.eval_data = iter(self.eval_data)
+ batch = next(self.eval_data)
+
+ if camera is not None:
+ batch['c'] = camera.clone()
+
+ # rendering
+ for i in range(samples.shape[0]):
+ th.cuda.empty_cache()
+
+ # ! render sampled latent
+
+ with th.cuda.amp.autocast(dtype=self.dtype,
+ enabled=self.mp_trainer.use_amp):
+
+ self.render_video_given_triplane(
+ samples[i:i+1].to(self.dtype),
+ self.rec_model, # compatible with join_model
+ name_prefix=
+ f'{self.step + self.resume_step}_{i}_{prompt}',
+ save_img=save_img,
+ render_reference=batch,
+ export_mesh=False)
+
+ self.ddpm_model.train()
diff --git a/nsr/lsgm/train_util_diffusion_lsgm.py b/nsr/lsgm/train_util_diffusion_lsgm.py
new file mode 100644
index 0000000000000000000000000000000000000000..661146146b2b67cdaad65fe6f7baefab56f95ab6
--- /dev/null
+++ b/nsr/lsgm/train_util_diffusion_lsgm.py
@@ -0,0 +1,583 @@
+"""
+Modified from:
+https://github.com/NVlabs/LSGM/blob/main/training_obj_joint.py
+"""
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+import dnnlib
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+
+class TrainLoop3DDiffusionLSGM(TrainLoop3DDiffusion,TrainLoop3DcvD_nvsD_canoD):
+ def __init__(self, *, rec_model, denoise_model, diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, schedule_sampler=None, weight_decay=0, lr_anneal_steps=0, iterations=10001, ignore_resume_opt=False, freeze_ae=False, denoised_ae=True, triplane_scaling_divider=10, use_amp=False, diffusion_input_size=224, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, schedule_sampler=schedule_sampler, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, ignore_resume_opt=ignore_resume_opt, freeze_ae=freeze_ae, denoised_ae=denoised_ae, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, **kwargs)
+
+ def run_step(self, batch, step='g_step'):
+
+ if step == 'diffusion_step_rec':
+ self.forward_diffusion(batch, behaviour='diffusion_step_rec')
+ _ = self.mp_trainer_rec.optimize(self.opt_rec) # TODO, update two groups of parameters
+ took_step_ddpm = self.mp_trainer.optimize(self.opt) # TODO, update two groups of parameters
+
+ if took_step_ddpm:
+ self._update_ema() # g_ema # TODO, ema only needs to track ddpm, remove ema tracking in rec
+
+ elif step == 'd_step_rec':
+ self.forward_D(batch, behaviour='rec')
+ # _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ elif step == 'diffusion_step_nvs':
+ self.forward_diffusion(batch, behaviour='diffusion_step_nvs')
+ _ = self.mp_trainer_rec.optimize(self.opt_rec) # TODO, update two groups of parameters
+ took_step_ddpm = self.mp_trainer.optimize(self.opt) # TODO, update two groups of parameters
+
+ if took_step_ddpm:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step_nvs':
+ self.forward_D(batch, behaviour='nvs')
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ # batch = next(self.data)
+ # self.run_step(batch, 'g_step_rec')
+
+ batch = next(self.data)
+ self.run_step(batch, step='diffusion_step_rec')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_rec')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, step='diffusion_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_nvs')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ self.save(self.mp_trainer_rec, self.mp_trainer_rec.model_name)
+ self.save(self.mp_trainer_cvD, 'cvD')
+ self.save(self.mp_trainer_canonical_cvD, 'cano_cvD')
+
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ self.save(self.mp_trainer_rec, self.mp_trainer_rec.model_name)
+ self.save(self.mp_trainer_cvD, 'cvD')
+ self.save(self.mp_trainer_canonical_cvD, 'cano_cvD')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def forward_diffusion(self, batch, behaviour='rec', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ self.ddp_model.requires_grad_(True)
+ self.ddp_rec_model.requires_grad_(True)
+
+ # if behaviour != 'diff' and 'rec' in behaviour:
+ # if behaviour != 'diff' and 'rec' in behaviour: # pure diffusion step
+ # self.ddp_rec_model.requires_grad_(True)
+ for param in self.ddp_rec_model.module.decoder.triplane_decoder.parameters( # type: ignore
+ ): # type: ignore
+ param.requires_grad_(False) # ! disable triplane_decoder grad in each iteration indepenently;
+ # else:
+
+ self.mp_trainer_rec.zero_grad()
+ self.mp_trainer.zero_grad()
+
+ # ! no 'sds' step now, both add sds grad back to ViT
+
+ # assert behaviour != 'sds'
+ # if behaviour == 'sds':
+ # else:
+ # self.ddp_ddpm_model.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ vae_nelbo_loss = th.tensor(0.0).to(dist_util.dev())
+ vision_aided_loss = th.tensor(0.0).to(dist_util.dev())
+ denoise_loss = th.tensor(0.0).to(dist_util.dev())
+ d_weight = th.tensor(0.0).to(dist_util.dev())
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp
+ and not self.freeze_ae):
+
+ # apply vae
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='enc_dec_wo_triplane') # pred: (B, 3, 64, 64)
+
+
+ if behaviour == 'diffusion_step_rec':
+ target = micro
+ pred = self.ddp_rec_model(latent=vae_out,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ # vae reconstruction loss
+ if last_batch or not self.use_ddp:
+ vae_nelbo_loss, loss_dict = self.loss_class(pred,
+ target,
+ test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ vae_nelbo_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+
+ last_layer = self.ddp_rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ -1].weight # type: ignore
+
+ if 'image_sr' in pred:
+ vision_aided_loss = self.ddp_cano_cvD(
+ 0.5 * pred['image_sr'] +
+ 0.5 * th.nn.functional.interpolate(
+ pred['image_raw'],
+ size=pred['image_sr'].shape[2:],
+ mode='bilinear'),
+ for_G=True).mean() # [B, 1] shape
+ else:
+ vision_aided_loss = self.ddp_cano_cvD(
+ pred['image_raw'], for_G=True
+ ).mean(
+ ) # [B, 1] shape
+
+ d_weight = calculate_adaptive_weight(
+ vae_nelbo_loss,
+ vision_aided_loss,
+ last_layer,
+ # disc_weight_max=1) * 1
+ disc_weight_max=1) * self.loss_class.opt.rec_cvD_lambda
+ # d_weight = self.loss_class.opt.rec_cvD_lambda # since decoder is fixed here. set to 0.001
+
+ vision_aided_loss *= d_weight
+
+ # d_weight = self.loss_class.opt.rec_cvD_lambda
+ loss_dict.update({
+ 'vision_aided_loss/G_rec':
+ vision_aided_loss,
+ 'd_weight_G_rec':
+ d_weight,
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ elif behaviour == 'diffusion_step_nvs':
+
+ novel_view_c = th.cat([micro['c'][1:], micro['c'][:1]])
+
+ pred = self.ddp_rec_model(latent=vae_out,
+ c=novel_view_c,
+ behaviour='triplane_dec')
+
+ if 'image_sr' in pred:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ # pred_for_rec['image_sr'],
+ 0.5 * pred['image_sr'] +
+ 0.5 * th.nn.functional.interpolate(
+ pred['image_raw'],
+ size=pred['image_sr'].shape[2:],
+ mode='bilinear'),
+ for_G=True).mean() # [B, 1] shape
+ else:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ pred['image_raw'], for_G=True
+ ).mean(
+ ) # [B, 1] shape
+
+ d_weight = self.loss_class.opt.nvs_cvD_lambda
+ vision_aided_loss *= d_weight
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs':
+ vision_aided_loss,
+ })
+
+ # ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # elif behaviour == 'diff':
+ # self.ddp_rec_model.requires_grad_(False)
+ # # assert self.ddp_rec_model.module.requires_grad == False, 'freeze ddpm_rec for pure diff step'
+ else:
+ raise NotImplementedError(behaviour)
+ # assert behaviour == 'sds'
+
+ # pred = None
+
+ # if behaviour != 'sds': # also train diffusion
+ # assert pred is not None
+
+ # TODO, train diff and sds together, available?
+ eps = vae_out[self.latent_name]
+
+ # if behaviour != 'sds':
+ # micro_to_denoise.detach_()
+ eps.requires_grad_(True) # single stage diffusion
+
+ t, weights = self.schedule_sampler.sample(
+ eps.shape[0], dist_util.dev())
+ noise = th.randn(size=vae_out.size(), device='cuda') # note that this noise value is currently shared!
+
+ model_kwargs = {}
+
+ # ?
+ # or directly use SSD NeRF version?
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+
+ # ! handle the sampling
+
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ diffusion.iw_quantities(args.batch_size, args.time_eps, args.iw_sample_p, args.iw_subvp_like_vp_sde)
+ eps_t_p = diffusion.sample_q(vae_out, noise, var_t_p, m_t_p)
+
+ # in case we want to train q (vae) with another batch using a different sampling scheme for times t
+ if args.iw_sample_q in ['ll_uniform', 'll_iw']:
+ t_q, var_t_q, m_t_q, obj_weight_t_q, _, g2_t_q = \
+ diffusion.iw_quantities(args.batch_size, args.time_eps, args.iw_sample_q, args.iw_subvp_like_vp_sde)
+ eps_t_q = diffusion.sample_q(vae_out, noise, var_t_q, m_t_q)
+
+ eps_t_p = eps_t_p.detach().requires_grad_(True)
+ eps_t = th.cat([eps_t_p, eps_t_q], dim=0)
+ var_t = th.cat([var_t_p, var_t_q], dim=0)
+ t = th.cat([t_p, t_q], dim=0)
+ noise = th.cat([noise, noise], dim=0)
+ else:
+ eps_t, m_t, var_t, t, g2_t = eps_t_p, m_t_p, var_t_p, t_p, g2_t_p
+
+ # run the diffusion
+
+ # mixing normal trick
+ # TODO, create a new partial training_losses function
+ mixing_component = diffusion.mixing_component(eps_t, var_t, t, enabled=dae.mixed_prediction) # TODO, which should I use?
+ params = utils.get_mixed_prediction(dae.mixed_prediction, pred_params, dae.mixing_logit, mixing_component)
+
+ # nelbo loss with kl balancing
+
+
+
+
+ # ! remainign parts of cross entropy in likelihook training
+
+ cross_entropy_per_var += diffusion.cross_entropy_const(args.time_eps)
+ cross_entropy = th.sum(cross_entropy_per_var, dim=[1, 2, 3])
+ cross_entropy += remaining_neg_log_p_total # for remaining scales if there is any
+ all_neg_log_p = vae.decompose_eps(cross_entropy_per_var)
+ all_neg_log_p.extend(remaining_neg_log_p_per_ver) # add the remaining neg_log_p
+ kl_all_list, kl_vals_per_group, kl_diag_list = utils.kl_per_group_vada(all_log_q, all_neg_log_p)
+
+
+ kl_coeff = 1.0
+
+ # ! calculate p/q loss;
+ # ? no spectral regularizer here
+ # ? try adding grid_clip and sn later on.
+ q_loss = th.mean(nelbo_loss)
+ p_loss = th.mean(p_objective)
+
+ # backpropagate q_loss for vae and update vae params, if trained
+ if args.train_vae:
+ grad_scalar.scale(q_loss).backward(retain_graph=utils.different_p_q_objectives(args.iw_sample_p, args.iw_sample_q))
+ utils.average_gradients(vae.parameters(), args.distributed)
+ if args.grad_clip_max_norm > 0.: # apply gradient clipping
+ grad_scalar.unscale_(vae_optimizer)
+ th.nn.utils.clip_grad_norm_(vae.parameters(), max_norm=args.grad_clip_max_norm)
+ grad_scalar.step(vae_optimizer)
+
+ # if we use different p and q objectives or are not training the vae, discard gradients and backpropagate p_loss
+ if utils.different_p_q_objectives(args.iw_sample_p, args.iw_sample_q) or not args.train_vae:
+ if args.train_vae:
+ # discard current gradients computed by weighted loss for VAE
+ dae_optimizer.zero_grad()
+
+ # compute gradients with unweighted loss
+ grad_scalar.scale(p_loss).backward()
+
+ # update dae parameters
+ utils.average_gradients(dae.parameters(), args.distributed)
+ if args.grad_clip_max_norm > 0.: # apply gradient clipping
+ grad_scalar.unscale_(dae_optimizer)
+ th.nn.utils.clip_grad_norm_(dae.parameters(), max_norm=args.grad_clip_max_norm)
+ grad_scalar.step(dae_optimizer)
+
+
+ # unpack separate objectives, in case we want to train q (vae) using a different sampling scheme for times t
+ if args.iw_sample_q in ['ll_uniform', 'll_iw']:
+ l2_term_p, l2_term_q = th.chunk(l2_term, chunks=2, dim=0)
+ p_objective = th.sum(obj_weight_t_p * l2_term_p, dim=[1, 2, 3])
+ # cross_entropy_per_var = obj_weight_t_q * l2_term_q
+ else:
+ p_objective = th.sum(obj_weight_t_p * l2_term, dim=[1, 2, 3])
+ # cross_entropy_per_var = obj_weight_t_q * l2_term
+
+ # print(micro_to_denoise.min(), micro_to_denoise.max())
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ eps, # x_start
+ t,
+ model_kwargs=model_kwargs,
+ return_detail=True)
+
+ # ! DDPM step
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ # denoised_out = denoised_fn()
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ losses = compute_losses()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ denoise_loss = (losses["loss"] * weights).mean()
+
+ x_t = losses.pop('x_t')
+ model_output = losses.pop('model_output')
+ diffusion_target = losses.pop('diffusion_target')
+ alpha_bar = losses.pop('alpha_bar')
+
+ log_loss_dict(self.diffusion, t,
+ {k: v * weights
+ for k, v in losses.items()})
+
+ # if behaviour == 'sds':
+ # ! calculate sds grad, and add to the grad of
+
+ # if 'rec' in behaviour and self.loss_class.opt.sds_lamdba > 0: # only enable sds along with rec step
+ # w = (
+ # 1 - alpha_bar**2
+ # ) / self.triplane_scaling_divider * self.loss_class.opt.sds_lamdba # https://github.com/ashawkey/stable-dreamfusion/issues/106
+ # sds_grad = denoise_loss.clone().detach(
+ # ) * w # * https://pytorch.org/docs/stable/generated/th.Tensor.detach.html. detach() returned Tensor share the same storage with previous one. add clone() here.
+
+ # # ae_loss = AddGradient.apply(latent[self.latent_name], sds_grad) # add sds_grad during backward
+
+ # def sds_hook(grad_to_add):
+
+ # def modify_grad(grad):
+ # return grad + grad_to_add # add the sds grad to the original grad for BP
+
+ # return modify_grad
+
+ # eps[self.latent_name].register_hook(
+ # sds_hook(sds_grad)) # merge sds grad with rec/nvs ae step
+
+ loss = vae_nelbo_loss + denoise_loss + vision_aided_loss # caluclate loss within AMP
+
+ # ! cvD loss
+
+ # exit AMP before backward
+ self.mp_trainer_rec.backward(loss)
+ self.mp_trainer.backward(loss)
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0 and behaviour != 'diff':
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ # st()
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # if 'image_sr' in pred: # TODO
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=x_t[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ # if denoised_out is None:
+ # if not self.denoised_ae:
+ # denoised_out = denoised_fn()
+
+ if self.diffusion.model_mean_type == ModelMeanType.START_X:
+ pred_xstart = model_output
+ else: # * used here
+ pred_xstart = self.diffusion._predict_xstart_from_eps(
+ x_t=x_t, t=t, eps=model_output)
+
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=pred_xstart[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ # denoised_out = denoised_ae_pred
+
+ # if not self.denoised_ae:
+ # denoised_ae_pred = self.ddp_rec_model(
+ # img=None,
+ # c=micro['c'][0:1],
+ # latent=denoised_out['pred_xstart'][0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically
+ # behaviour=self.render_latent_behaviour)
+ # else:
+ # assert denoised_ae_pred is not None
+ # denoised_ae_pred['image_raw'] = denoised_ae_pred[
+ # 'image_raw'][0:1]
+
+ # print(pred_img.shape)
+ # print('denoised_ae:', self.denoised_ae)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+ # s
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis = th.cat([
+ # self.pool_128(micro['img']), x_t[:, :3, ...],
+ # denoised_out['pred_xstart'][:, :3, ...]
+ # ],
+ # dim=-1)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ )
+
+ th.cuda.empty_cache()
diff --git a/nsr/lsgm/train_util_diffusion_lsgm_cvD_joint.py b/nsr/lsgm/train_util_diffusion_lsgm_cvD_joint.py
new file mode 100644
index 0000000000000000000000000000000000000000..24d33e66d442d48d89b0b43b1e014564b468a171
--- /dev/null
+++ b/nsr/lsgm/train_util_diffusion_lsgm_cvD_joint.py
@@ -0,0 +1,1953 @@
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+
+import vision_aided_loss
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+from dnnlib.util import requires_grad
+from guided_diffusion.nn import update_ema
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+
+from .train_util_diffusion_lsgm_noD_joint import TrainLoop3DDiffusionLSGMJointnoD
+
+from nsr.losses.builder import kl_coeff
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+class TrainLoop3DDiffusionLSGM_cvD(TrainLoop3DDiffusionLSGMJointnoD):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ triplane_scaling_divider=1,
+ use_amp=False,
+ diffusion_input_size=224,
+ init_cvD=True,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ **kwargs)
+
+ # self.setup_cvD()
+ # def setup_cvD(self):
+ device = dist_util.dev()
+
+ # TODO copied from nvs_canoD, could be merged
+ # * create vision aided model
+ # TODO, load model api
+
+ # nvs D
+ if init_cvD:
+ self.nvs_cvD = vision_aided_loss.Discriminator(
+ cv_type='clip', loss_type='multilevel_sigmoid_s',
+ device=device).to(device)
+ self.nvs_cvD.cv_ensemble.requires_grad_(
+ False) # Freeze feature extractor
+ self._load_and_sync_parameters(model=self.nvs_cvD, model_name='cvD')
+
+ self.mp_trainer_nvs_cvD = MixedPrecisionTrainer(
+ model=self.nvs_cvD,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='cvD',
+ use_amp=use_amp,
+ # use_amp=
+ # False, # assert len(optimizer_state["found_inf_per_device"]) > 0, "No inf checks were recorded for this optimizer."
+ model_params=list(self.nvs_cvD.decoder.parameters()))
+ cvD_lr = 2e-4 * (lr / 1e-5) * self.loss_class.opt.nvs_D_lr_mul
+ # cvD_lr = 1e-5*(lr/1e-5)
+ self.opt_cvD = AdamW(self.mp_trainer_nvs_cvD.master_params,
+ lr=cvD_lr,
+ betas=(0, 0.999),
+ eps=1e-8) # dlr in biggan cfg
+
+ logger.log(f'cpt_cvD lr: {cvD_lr}')
+
+ if self.use_ddp:
+ self.ddp_nvs_cvD = DDP(
+ self.nvs_cvD,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ self.ddp_nvs_cvD = self.nvs_cvD
+
+ # cano d
+ self.cano_cvD = vision_aided_loss.Discriminator(
+ cv_type='clip', loss_type='multilevel_sigmoid_s',
+ device=device).to(device)
+ self.cano_cvD.cv_ensemble.requires_grad_(
+ False) # Freeze feature extractor
+ # self.cano_cvD.train()
+
+ self._load_and_sync_parameters(model=self.cano_cvD,
+ model_name='cano_cvD')
+
+ self.mp_trainer_cano_cvD = MixedPrecisionTrainer(
+ model=self.cano_cvD,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='canonical_cvD',
+ use_amp=use_amp,
+ model_params=list(self.cano_cvD.decoder.parameters()))
+
+ cano_lr = 2e-4 * (
+ lr / 1e-5) # D_lr=2e-4 in cvD by default. 1e-4 still overfitting
+ self.opt_cano_cvD = AdamW(
+ self.mp_trainer_cano_cvD.master_params,
+ lr=cano_lr, # same as the G
+ betas=(0, 0.999),
+ eps=1e-8) # dlr in biggan cfg
+
+ logger.log(f'cpt_cano_cvD lr: {cano_lr}')
+
+ self.ddp_cano_cvD = DDP(
+ self.cano_cvD,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+
+ # Fix decoder
+ requires_grad(self.rec_model.decoder, False)
+
+ def _post_run_step(self):
+ if self.step % self.log_interval == 0 and dist_util.get_rank() == 0 and self.step != 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ # if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_ddpm_sample(self.rec_model)
+ if self.sde_diffusion.args.train_vae:
+ self.eval_loop(self.rec_model)
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ exit()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ # dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, 'cano_ddpm_only')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'cano_ddpm_step')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'd_step_rec')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'nvs_ddpm_step')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'd_step_nvs')
+
+ self._post_run_step()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+ if step == 'ce_ddpm_step':
+ self.ce_ddpm_step(batch)
+
+ elif step in ['ce', 'ddpm', 'cano_ddpm_only']:
+ self.joint_rec_ddpm(batch, step)
+
+ elif step == 'cano_ddpm_step':
+ self.joint_rec_ddpm(batch, 'cano')
+
+ elif step == 'd_step_rec':
+ self.forward_D(batch, behaviour='rec')
+
+ elif step == 'nvs_ddpm_step':
+ self.joint_rec_ddpm(batch, 'nvs')
+
+ elif step == 'd_step_nvs':
+ self.forward_D(batch, behaviour='nvs')
+
+ self._anneal_lr()
+ self.log_step()
+
+ def flip_encoder_grad(self, mode=True):
+ requires_grad(self.rec_model.encoder, mode)
+
+ def forward_D(self, batch, behaviour): # update D
+
+ self.flip_encoder_grad(False)
+ self.rec_model.eval()
+ # self.ddp_model.requires_grad_(False)
+
+ # update two D
+ if behaviour == 'nvs':
+ self.mp_trainer_nvs_cvD.zero_grad()
+ self.ddp_nvs_cvD.requires_grad_(True)
+ self.ddp_nvs_cvD.train()
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_cano_cvD.eval()
+ else: # update rec canonical D
+ self.mp_trainer_cano_cvD.zero_grad()
+ self.ddp_nvs_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.eval()
+ self.ddp_cano_cvD.requires_grad_(True)
+ self.ddp_cano_cvD.train()
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_cano_cvD.use_amp):
+
+ latent = self.ddp_rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ cano_pred = self.ddp_rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ # TODO, optimize with one encoder, and two triplane decoder
+ # FIXME quit autocast to runbackward
+ if behaviour == 'rec':
+ if 'image_sr' in cano_pred:
+ # d_loss_cano = self.run_D_Diter(
+ # # real=micro['img_sr'],
+ # # fake=cano_pred['image_sr'],
+ # real=0.5 * micro['img_sr'] + 0.5 * th.nn.functional.interpolate(micro['img'], size=micro['img_sr'].shape[2:], mode='bilinear'),
+ # fake=0.5 * cano_pred['image_sr'] + 0.5 * th.nn.functional.interpolate(cano_pred['image_raw'], size=cano_pred['image_sr'].shape[2:], mode='bilinear'),
+ # D=self.ddp_canonical_cvD) # ! failed, color bias
+
+ # try concat them in batch
+ d_loss = self.run_D_Diter(
+ real=th.cat([
+ th.nn.functional.interpolate(
+ micro['img'],
+ size=micro['img_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ micro['img_sr'],
+ ],
+ dim=1),
+ fake=th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ],
+ dim=1),
+ D=self.ddp_cano_cvD) # TODO, add SR for FFHQ
+ else:
+ d_loss = self.run_D_Diter(real=micro['img'],
+ fake=cano_pred['image_raw'],
+ D=self.ddp_cano_cvD)
+
+ log_rec3d_loss_dict({'vision_aided_loss/D_cano': d_loss})
+ # self.mp_trainer_canonical_cvD.backward(d_loss_cano)
+ else:
+ assert behaviour == 'nvs'
+ novel_view_c = th.roll(micro['c'], 1, 0)
+
+ nvs_pred = self.ddp_rec_model(latent=latent,
+ c=novel_view_c,
+ behaviour='triplane_dec')
+
+ if 'image_sr' in nvs_pred:
+
+ d_loss = self.run_D_Diter(
+ real=th.cat([
+ th.nn.functional.interpolate(
+ cano_pred['image_raw'],
+ size=cano_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ cano_pred['image_sr'],
+ ],
+ dim=1),
+ fake=th.cat([
+ th.nn.functional.interpolate(
+ nvs_pred['image_raw'],
+ size=nvs_pred['image_sr'].shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=True),
+ nvs_pred['image_sr'],
+ ],
+ dim=1),
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ else:
+ d_loss = self.run_D_Diter(
+ real=cano_pred['image_raw'],
+ fake=nvs_pred['image_raw'],
+ D=self.ddp_nvs_cvD) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict({'vision_aided_loss/D_nvs': d_loss})
+ # self.mp_trainer_cvD.backward(d_loss_nvs)
+ # quit autocast to run backward()
+ if behaviour == 'rec':
+ self.mp_trainer_cano_cvD.backward(d_loss)
+ # assert len(optimizer_state["found_inf_per_device"]) > 0, "No inf checks were recorded for this optimizer."
+ _ = self.mp_trainer_cano_cvD.optimize(self.opt_cano_cvD)
+ else:
+ assert behaviour == 'nvs'
+ self.mp_trainer_nvs_cvD.backward(d_loss)
+ _ = self.mp_trainer_nvs_cvD.optimize(self.opt_cvD)
+
+ self.flip_encoder_grad(True)
+ self.rec_model.train()
+
+ # def forward_ddpm(self, eps):
+ # args = self.sde_diffusion.args
+
+ # # sample noise
+ # noise = th.randn(size=eps.size(), device=eps.device
+ # ) # note that this noise value is currently shared!
+
+ # # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ # t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ # self.sde_diffusion.iw_quantities(args.iw_sample_p)
+ # eps_t_p = self.sde_diffusion.sample_q(eps, noise, var_t_p, m_t_p)
+ # # logsnr_p = self.sde_diffusion.log_snr(m_t_p,
+ # # var_t_p) # for p only
+
+ # pred_eps_p, pred_x0_p, logsnr_p = self.ddpm_step(
+ # eps_t_p, t_p, m_t_p, var_t_p)
+
+ # # ! batchify for mixing_component
+ # # mixing normal trick
+ # mixing_component = self.sde_diffusion.mixing_component(
+ # eps_t_p, var_t_p, t_p, enabled=True) # TODO, which should I use?
+ # pred_eps_p = get_mixed_prediction(
+ # True, pred_eps_p,
+ # self.ddp_ddpm_model(x=None,
+ # timesteps=None,
+ # get_attr='mixing_logit'), mixing_component)
+
+ # # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ # with self.ddp_ddpm_model.no_sync(): # type: ignore
+ # l2_term_p = th.square(pred_eps_p - noise) # ? weights
+
+ # p_eps_objective = th.mean(obj_weight_t_p * l2_term_p)
+
+ # log_rec3d_loss_dict(
+ # dict(mixing_logit=self.ddp_ddpm_model(
+ # x=None, timesteps=None, get_attr='mixing_logit').detach(), ))
+
+ # return {
+ # 'pred_eps_p': pred_eps_p,
+ # 'eps_t_p': eps_t_p,
+ # 'p_eps_objective': p_eps_objective,
+ # 'pred_x0_p': pred_x0_p,
+ # 'logsnr_p': logsnr_p
+ # }
+
+ # ddpm + rec loss
+ def joint_rec_ddpm(self, batch, behaviour='cano', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+
+ # ! enable the gradient of both models
+ # requires_grad(self.rec_model, True)
+ self.flip_encoder_grad(True)
+ self.rec_model.train()
+
+ requires_grad(self.ddpm_model, True)
+ self.ddpm_model.train()
+
+ requires_grad(self.ddp_cano_cvD, False)
+ requires_grad(self.ddp_nvs_cvD, False)
+ self.ddp_cano_cvD.eval()
+ self.ddp_nvs_cvD.eval()
+
+ self.mp_trainer.zero_grad()
+
+ # if args.train_vae:
+ # for param in self.rec_model.decoder.triplane_decoder.parameters( # type: ignore
+ # ): # type: ignore
+ # param.requires_grad_(
+ # False
+ # ) # ! disable triplane_decoder grad in each iteration indepenently;
+
+ assert args.train_vae
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+ # and args.train_vae):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+ vision_aided_loss = th.tensor(0.).to(dist_util.dev())
+
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+ eps = vae_out[self.latent_name]
+
+ if 'bg_plane' in vae_out:
+ eps = th.cat((eps, vae_out['bg_plane']), dim=1) # include background, B 12+4 32 32
+
+ # eps = pred[self.latent_name]
+ # eps = vae_out.pop(self.latent_name)
+
+ # ! running diffusion forward
+ p_sample_batch = self.prepare_ddpm(eps)
+ # ddpm_ret = self.forward_ddpm(eps)
+ ddpm_ret = self.apply_model(p_sample_batch)
+ # p_loss = ddpm_ret['p_eps_objective']
+ loss += ddpm_ret['p_eps_objective'].mean()
+
+ # =====================================================================
+ # ! reconstruction loss + gan loss
+ if behaviour != 'cano_ddpm_only':
+ if behaviour == 'cano':
+ cano_pred = self.ddp_rec_model(
+ latent=vae_out,
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ with self.ddp_model.no_sync(): # type: ignore
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ cano_pred, micro, test_mode=False)
+ loss += q_vae_recon_loss
+
+ # add gan loss
+ vision_aided_loss = self.ddp_cano_cvD(
+ cano_pred['image_raw'], for_G=True
+ ).mean(
+ ) * self.loss_class.opt.rec_cvD_lambda # [B, 1] shape
+
+ loss_dict.update({
+ 'vision_aided_loss/G_rec':
+ vision_aided_loss.detach(),
+ })
+ log_rec3d_loss_dict(loss_dict)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.cano_ddpm_log(cano_pred, micro, ddpm_ret)
+
+ else:
+ assert behaviour == 'nvs'
+
+ nvs_pred = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=th.roll(micro['c'], 1, 0),
+ ) # ! render novel views only for D loss
+
+ vision_aided_loss = self.ddp_nvs_cvD(
+ nvs_pred['image_raw'], for_G=True
+ ).mean(
+ ) * self.loss_class.opt.nvs_cvD_lambda # [B, 1] shape
+
+ log_rec3d_loss_dict(
+ {'vision_aided_loss/G_nvs': vision_aided_loss})
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 1:
+ self.nvs_log(nvs_pred, micro)
+
+ else:
+ cano_pred = self.ddp_rec_model(
+ latent=vae_out,
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ with self.ddp_model.no_sync(): # type: ignore
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ {
+ **vae_out, # include latent here.
+ **cano_pred,
+ },
+ micro,
+ test_mode=False)
+ # pred,
+ # micro,
+ # test_mode=False)
+ log_rec3d_loss_dict(loss_dict)
+ loss += q_vae_recon_loss
+
+ loss += vision_aided_loss
+
+ self.mp_trainer.backward(loss)
+
+ # quit for loop
+ _ = self.mp_trainer.optimize(self.opt, clip_grad=self.loss_class.opt.grad_clip)
+
+ @th.inference_mode()
+ def cano_ddpm_log(self, cano_pred, micro, ddpm_ret):
+ assert isinstance(cano_pred, dict)
+ behaviour = 'cano'
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+
+ if 'image_depth' in cano_pred:
+ pred_depth = cano_pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ pred_img = cano_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in cano_pred:
+ if cano_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), cano_pred['image_sr']], dim=-1)
+ gt_img = th.cat([self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif cano_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), cano_pred['image_sr']], dim=-1)
+ gt_img = th.cat([self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), cano_pred['image_sr']], dim=-1)
+ gt_img = th.cat([self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat([
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+ eps_t_p, pred_eps_p, logsnr_p = (ddpm_ret[k]
+ for k in ('eps_t_p', 'pred_eps_p',
+ 'logsnr_p'))
+
+ if 'bg_plane' in cano_pred:
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion': eps_t_p[0:1, :12] * self.triplane_scaling_divider,
+ 'bg_plane': eps_t_p[0:1, 12:16] * self.triplane_scaling_divider,
+ }
+ else:
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion': eps_t_p[0:1] * self.triplane_scaling_divider,
+ }
+
+ # st() # split bg_plane here
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=noised_latent,
+ behaviour=self.render_latent_behaviour)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps_p, logsnr_p) # for VAE loss, denosied latent
+
+ if 'bg_plane' in cano_pred:
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion': pred_x0[0:1, :12] * self.triplane_scaling_divider,
+ 'bg_plane': pred_x0[0:1, 12:16] * self.triplane_scaling_divider,
+ }
+ else:
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion': pred_x0[0:1] * self.triplane_scaling_divider,
+ }
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=denoised_latent,
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis],
+ dim=-2)[0].permute(1, 2,
+ 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ f'{logger.get_dir()}/{self.step+self.resume_step}_{behaviour}.jpg')
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_{behaviour}.jpg')
+ del vis, pred_vis, pred_x0, pred_eps_p, micro
+
+ th.cuda.empty_cache()
+
+ @th.inference_mode()
+ def nvs_log(self, nvs_pred, micro):
+ behaviour = 'nvs'
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 1:
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = nvs_pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ pred_img = nvs_pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in nvs_pred:
+
+ if nvs_pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), nvs_pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']], dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif nvs_pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), nvs_pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']], dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), nvs_pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']], dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img, pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis, normalize=True, scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg')
+ print('log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg')
+
+ # ! all copied from train_util_cvD.py; should merge later.
+ def run_D_Diter(self, real, fake, D=None):
+ # Dmain: Minimize logits for generated images and maximize logits for real images.
+ if D is None:
+ D = self.ddp_nvs_cvD
+
+ lossD = D(real, for_real=True).mean() + D(fake, for_real=False).mean()
+ return lossD
+
+ def save(self, mp_trainer=None, model_name='rec'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, mp_trainer.master_params)
+
+ if model_name == 'ddpm':
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ dist.barrier()
+
+ def _load_and_sync_parameters(self, model=None, model_name='rec'):
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.ddp_rec_model # default model in the parent class
+
+ logger.log(resume_checkpoint)
+
+ if resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ logger.log(f'mark {model_name} loading ', )
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ logger.log(f'mark {model_name} loading finished', )
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+
+ # elif 'IN' in k and model_name == 'rec' and getattr(model.decoder, 'decomposed_IN', False):
+ # model_state_dict[k.replace('IN', 'superresolution.norm.norm_layer')] = v # decomposed IN
+ elif 'attn.wk' in k or 'attn.wv' in k: # old qkv
+ logger.log('ignore ', k)
+
+ elif 'decoder.vit_decoder.blocks' in k:
+ # st()
+ # load from 2D ViT pre-trained into 3D ViT blocks.
+ assert len(model.decoder.vit_decoder.blocks[0].vit_blks
+ ) == 2 # assert depth=2 here.
+ fusion_ca_depth = len(
+ model.decoder.vit_decoder.blocks[0].vit_blks)
+ vit_subblk_index = int(k.split('.')[3])
+ vit_blk_keyname = ('.').join(k.split('.')[4:])
+ fusion_blk_index = vit_subblk_index // fusion_ca_depth
+ fusion_blk_subindex = vit_subblk_index % fusion_ca_depth
+ model_state_dict[
+ f'decoder.vit_decoder.blocks.{fusion_blk_index}.vit_blks.{fusion_blk_subindex}.{vit_blk_keyname}'] = v
+ # logger.log('load 2D ViT weight: {}'.format(f'decoder.vit_decoder.blocks.{fusion_blk_index}.vit_blks.{fusion_blk_subindex}.{vit_blk_keyname}'))
+
+ elif 'IN' in k:
+ logger.log('ignore ', k)
+
+ elif 'quant_conv' in k:
+ logger.log('ignore ', k)
+
+ else:
+ logger.log(
+ '!!!! ignore key: ',
+ k,
+ ": ",
+ v.size(),
+ )
+ if k in model_state_dict:
+ logger.log('shape in model: ',
+ model_state_dict[k].size())
+ else:
+ logger.log(k, 'not in model_state_dict')
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ logger.log(f'synced {model_name} params')
+
+
+class TrainLoop3DDiffusionLSGM_cvD_scaling(TrainLoop3DDiffusionLSGM_cvD):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ triplane_scaling_divider=1,
+ use_amp=False,
+ diffusion_input_size=224,
+ init_cvD=True,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ init_cvD=init_cvD,
+ **kwargs)
+
+ def _update_latent_stat_ema(self, latent: th.Tensor):
+ # update the miu/var of ema_latent
+ for rate, params in zip(self.ema_rate,
+ [self.ddpm_model.ema_latent_mean]):
+ update_ema(params, latent.mean(0, keepdim=True), rate=rate)
+ for rate, params in zip(self.ema_rate,
+ [self.ddpm_model.ema_latent_std]):
+ update_ema(params, latent.std([1,2,3]).mean(0, keepdim=True), rate=rate)
+
+ log_rec3d_loss_dict({'ema_latent_std': self.ddpm_model.ema_latent_std.mean()})
+ log_rec3d_loss_dict({'ema_latent_mean': self.ddpm_model.ema_latent_mean.mean()})
+
+ # def _init_optim_groups(self, rec_model, freeze_decoder=True):
+ # # unfreeze decoder when scaling is enabled
+ # return super()._init_optim_groups(rec_model, freeze_decoder=False)
+
+ def _standarize(self, eps):
+ # scaled_eps = (eps - self.ddpm_model.ema_latent_mean
+ # ) / self.ddpm_model.ema_latent_std
+ # scaled_eps = eps - self.ddpm_model.ema_latent_mean
+ # scaled_eps = eps.div(self.ddpm_model.ema_latent_std)
+ # scaled_eps = eps + self.ddpm_model.ema_latent_std
+ scaled_eps = eps.add(-self.ddpm_model.ema_latent_mean).mul(1/self.ddpm_model.ema_latent_std)
+ return scaled_eps
+
+ def _unstandarize(self, scaled_eps):
+ return scaled_eps.mul(self.ddpm_model.ema_latent_std).add(self.ddpm_model.ema_latent_mean)
+
+
+class TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm(TrainLoop3DDiffusionLSGM_cvD_scaling):
+ def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224,init_cvD=False, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size,
+ init_cvD=init_cvD, **kwargs)
+
+ def _setup_opt(self):
+ # TODO, two optims groups.
+ self.opt = AdamW([{
+ 'name': 'ddpm',
+ 'params': self.ddpm_model.parameters(),
+ }],
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+
+ for rec_param_group in self._init_optim_groups(self.rec_model, True): # freeze D
+ self.opt.add_param_group(rec_param_group)
+ logger.log(self.opt)
+
+
+ def next_n_batch(self, n=1):
+ '''sample n batch at the same time.
+ '''
+ all_batch_list = [next(self.data) for _ in range(n)]
+ return {
+ k: th.cat([batch[k] for batch in all_batch_list], 0)
+ for k in all_batch_list[0].keys()
+ }
+ # pass
+
+ def subset_batch(self, batch=None, micro_batchsize=4, big_endian=False):
+ '''sample a batch subset
+ '''
+ if batch is None:
+ batch = next(self.data)
+ if big_endian:
+ return {
+ k: v[-micro_batchsize:]
+ for k, v in batch.items()
+ }
+ else:
+ return {
+ k: v[:micro_batchsize]
+ for k, v in batch.items()
+ }
+ # pass
+
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ # dist_util.synchronize()
+
+ # batch = self.next_n_batch(n=4)
+ batch = self.next_n_batch(n=6) # effective BS=72
+ self.run_step(batch, 'ddpm') # ddpm fixed
+
+ batch = next(self.data)
+ self.run_step(batch, 'ce')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'cano_ddpm_step')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'd_step_rec')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'nvs_ddpm_step')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'd_step_nvs')
+
+ self._post_run_step()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ # def _init_optim_groups(self, rec_model, freeze_decoder=True):
+ # # unfreeze decoder when scaling is enabled
+ # # return super()._init_optim_groups(rec_model, freeze_decoder=False)
+ # return super()._init_optim_groups(rec_model, freeze_decoder=True)
+
+ def entropy_weight(self, normal_entropy=None):
+ return self.loss_class.opt.negative_entropy_lambda
+
+ # ddpm + rec loss
+ def joint_rec_ddpm(self, batch, behaviour='ddpm', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+
+ # ! enable the gradient of both models
+ # requires_grad(self.rec_model, True)
+ # if behaviour == 'ce': # ll sampling? later. train encoder.
+ if 'ce' in behaviour: # ll sampling? later. train encoder.
+ ##############################################
+ ###### Update the VAE encoder/decoder ########
+ ##############################################
+ requires_grad(self.ddpm_model, False)
+ self.ddpm_model.eval()
+ ce_flag = True
+
+ if behaviour == 'ce_E': # unfreeze E and freeze D
+ requires_grad(self.rec_model.encoder, True)
+ self.rec_model.encoder.train()
+ requires_grad(self.rec_model.decoder, False)
+ self.rec_model.decoder.eval()
+
+ else: # train all
+ requires_grad(self.rec_model, True)
+ self.rec_model.train()
+
+ else: # train ddpm.
+ ce_flag = False
+ # self.flip_encoder_grad(False)
+ requires_grad(self.rec_model, False)
+ self.rec_model.eval()
+ requires_grad(self.ddpm_model, True)
+ self.ddpm_model.train()
+
+ self.mp_trainer.zero_grad()
+
+ # assert args.train_vae
+
+ batch_size = batch['img'].shape[0]
+
+ # for i in range(0, batch_size, self.microbatch):
+ for i in range(0, batch_size, batch_size):
+
+ micro = {
+ k:
+ v[i:i + batch_size].to(dist_util.dev()) if isinstance(
+ # v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=self.mp_trainer.use_amp):
+ enabled=False):
+ # and args.train_vae):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ # with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=False):
+ # quit amp in encoder, avoid nan.
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+ eps = vae_out[self.latent_name]
+
+ # ! prepare for diffusion
+ if 'bg_plane' in vae_out:
+ eps = th.cat((eps, vae_out['bg_plane']), dim=1) # include background, B 12+4 32 32
+
+ if ce_flag:
+ p_sample_batch = self.prepare_ddpm(eps, 'q')
+ else: # sgm prior
+ eps.requires_grad_(True)
+ p_sample_batch = self.prepare_ddpm(eps, 'p')
+
+ # ! running diffusion forward
+ ddpm_ret = self.apply_model(p_sample_batch)
+ # p_loss = ddpm_ret['p_eps_objective']
+ p_loss = ddpm_ret['p_eps_objective'].mean()
+ if ce_flag:
+ cross_entropy = p_loss # why collapse?
+ normal_entropy = vae_out['posterior'].normal_entropy()
+ negative_entropy = -normal_entropy * self.entropy_weight(normal_entropy)
+ ce_loss = (cross_entropy + negative_entropy.mean())
+
+ if self.diffusion_ce_anneal: # gradually add ce lambda
+ raise NotImplementedError()
+ diffusion_ce_lambda = kl_coeff(
+ step=self.step + self.resume_step,
+ constant_step=5e3,
+ total_step=20e3,
+ min_kl_coeff=1e-2,
+ max_kl_coeff=self.loss_class.opt.negative_entropy_lambda)
+ ce_loss *= diffusion_ce_lambda
+
+ log_rec3d_loss_dict({
+ 'diffusion_ce_lambda': diffusion_ce_lambda,
+ })
+
+ loss += ce_loss
+ else:
+ loss += p_loss # p loss
+
+ if ce_flag and 'D' in behaviour: # ce only on E
+ # =====================================================================
+ # ! reconstruction loss + gan loss
+
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=False):
+
+ # 24GB memory use till now.
+ cano_pred = self.ddp_rec_model(
+ latent=vae_out,
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ with self.ddp_model.no_sync(): # type: ignore
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ {
+ **vae_out, # include latent here.
+ **cano_pred,
+ },
+ micro,
+ test_mode=False)
+
+ log_rec3d_loss_dict({
+ **loss_dict,
+ 'negative_entropy': negative_entropy.mean(),
+ })
+ loss += q_vae_recon_loss
+
+ # save image log
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.cano_ddpm_log(cano_pred, micro, ddpm_ret)
+
+ self.mp_trainer.backward(loss) # grad accumulation
+
+ # quit micro
+ _ = self.mp_trainer.optimize(self.opt, clip_grad=self.loss_class.opt.grad_clip)
+
+class TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD(TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm):
+ def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224, init_cvD=False, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, init_cvD=init_cvD, **kwargs)
+
+ def _setup_opt(self):
+ # TODO, two optims groups.
+ self.opt = AdamW([{
+ 'name': 'ddpm',
+ 'params': self.ddpm_model.parameters(),
+ }],
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+
+ for rec_param_group in self._init_optim_groups(self.rec_model, freeze_decoder=False):
+ self.opt.add_param_group(rec_param_group)
+ logger.log(self.opt)
+
+class TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_weightingv0(TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD):
+ '''
+ 1. weight CE with ema(var(eps)), since ce decreases, sigma decreases.
+ 2. clip entorpy (log sigma) with 0; avoid it form increasing too much
+ 3. add eps scaling back with ema_rate=0.9999, make sure the std=1.
+ 4. add grad clipping by default
+ '''
+ def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224, init_cvD=False, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, init_cvD=init_cvD, **kwargs)
+
+ # for dynamic entropy penalize
+ self.entropy_const = 0.5 * (np.log(2 * np.pi) + 1)
+ # self._load_and_sync_parameters
+
+
+ # def _load_model(self):
+ # # TODO, for currently compatability
+ # self._load_and_sync_parameters(model=self.model) # load to joint class
+
+ # def save(self):
+ # return super().save()
+
+ def prepare_ddpm(self, eps, mode='p'):
+
+ log_rec3d_loss_dict(
+ {
+ f'unscaled_eps_mean': eps.mean(),
+ f'unscaled_eps_std': eps.std([1,2,3]).mean(0),
+ }
+ )
+
+ scaled_eps = self._standarize(eps)
+ p_sample_batch = super().prepare_ddpm(scaled_eps, mode)
+
+ # update ema; this will not affect the diffusion computation of this batch.
+ self._update_latent_stat_ema(eps)
+
+ return p_sample_batch
+
+ def ce_weight(self):
+ return self.loss_class.opt.ce_lambda * (self.ddpm_model.ema_latent_std.mean().detach())
+
+ # def ce_weight(self):
+ # return self.loss_class.opt.ce_lambda
+
+ def entropy_weight(self, normal_entropy=None):
+ '''if log(sigma) > 0; stop penalty.
+ '''
+ # basically L1
+ negative_entroy_lambda = self.loss_class.opt.negative_entropy_lambda
+ # return th.where(normal_entropy>self.entropy_const, -negative_entroy_lambda, negative_entroy_lambda) # if log(sigma) > 0, weight = 0.
+ # return negative_entroy_lambda * (1/self.ddpm_model.ema_latent_std.mean().detach()**2) # if log(sigma) > 0, weight = 0.
+ return negative_entroy_lambda * (1/self.ddpm_model.ema_latent_std.mean().detach()) # if log(sigma) > 0, weight = 0.
+
+class TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED(TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_weightingv0):
+ def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224, init_cvD=False, diffusion_ce_anneal=False, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, init_cvD=init_cvD, **kwargs)
+ self.diffusion_ce_anneal = diffusion_ce_anneal
+
+ def run_step(self, batch, step='g_step'):
+
+ assert step in ['ce', 'ddpm', 'cano_ddpm_only', 'ce_ED', 'ce_E', 'ce_D', 'D', 'ED']
+ self.joint_rec_ddpm(batch, step)
+
+ self._anneal_lr()
+ self.log_step()
+
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ batch = self.next_n_batch(n=12) # effective BS=48
+ self.run_step(batch, 'ddpm') # ddpm fixed AE
+
+ batch = self.next_n_batch(n=3) # effective BS=12
+ self.run_step(batch, 'ce_ED')
+
+ self._post_run_step()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+
+
+ @th.inference_mode()
+ def log_diffusion_images(self, vae_out, p_sample_batch, micro, ddpm_ret):
+
+ eps_t_p, t_p, logsnr_p = (p_sample_batch[k] for k in (
+ 'eps_t_p',
+ 't_p',
+ 'logsnr_p',
+ ))
+ pred_eps_p = ddpm_ret['pred_eps_p']
+
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k: v[0:1].to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ else:
+ gt_depth = th.zeros_like(gt_img[:, 0:1, ...])
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ gt_img = self.pool_128(gt_img)
+ gt_depth = self.pool_128(gt_depth)
+ # cond = self.get_c_input(micro)
+ # hint = th.cat(cond['c_concat'], 1)
+
+ gt_vis = th.cat(
+ [
+ gt_img,
+ gt_img,
+ # self.pool_128(hint),
+ gt_img,
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ if 'bg_plane' in vae_out:
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ eps_t_p[0:1, :12] * self.triplane_scaling_divider,
+ 'bg_plane':
+ eps_t_p[0:1, 12:16] * self.triplane_scaling_divider,
+ }
+ else:
+ noised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ eps_t_p[0:1] * self.triplane_scaling_divider,
+ }
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=noised_latent,
+ # latent=eps_t_p[0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps_p, logsnr_p) # for VAE loss, denosied latent
+
+ if 'bg_plane' in vae_out:
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ pred_x0[0:1, :12] * self.triplane_scaling_divider,
+ 'bg_plane':
+ pred_x0[0:1, 12:16] * self.triplane_scaling_divider,
+ }
+ else:
+ denoised_latent = {
+ 'latent_normalized_2Ddiffusion':
+ pred_x0[0:1] * self.triplane_scaling_divider,
+ }
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=denoised_latent,
+ # latent=pred_x0[0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat(
+ [
+ self.pool_128(img) for img in (
+ pred_img[0:1],
+ noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1], # controlnet result
+ pred_depth[0:1].repeat_interleave(3, dim=1))
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis],
+ dim=-2)[0].permute(1, 2,
+ 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}.jpg'
+ )
+
+ th.cuda.empty_cache()
+
+
+ @th.inference_mode()
+ def log_patch_img(self, micro, pred, pred_cano):
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # infer novel view also
+ # if self.loss_class.opt.symmetry_loss:
+ # pred_nv_img = nvs_pred
+ # else:
+ # ! replace with novel view prediction
+
+ # ! log another novel-view prediction
+ # pred_nv_img = self.rec_model(
+ # img=micro['img_to_encoder'],
+ # c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ fg_mask = pred['image_mask'] * 2 - 1 # 0-1
+ input_fg_mask = pred_cano['image_mask'] * 2 - 1 # 0-1
+ if 'image_depth' in pred:
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_nv_depth = norm_depth(pred_cano['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_nv_depth = th.zeros_like(gt_depth)
+
+ # if 'image_sr' in pred:
+ # if pred['image_sr'].shape[-1] == 512:
+ # pred_img = th.cat([self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat([self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ # elif pred['image_sr'].shape[-1] == 256:
+ # pred_img = th.cat([self.pool_256(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat([self.pool_256(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_256(pred_depth)
+ # gt_depth = self.pool_256(gt_depth)
+
+ # else:
+ # pred_img = th.cat([self.pool_128(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat([self.pool_128(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # gt_depth = self.pool_128(gt_depth)
+ # pred_depth = self.pool_128(pred_depth)
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ pred_vis = th.cat([
+ pred_img,
+ pred_depth.repeat_interleave(3, dim=1),
+ fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ pred_cano['image_raw'],
+ pred_nv_depth.repeat_interleave(3, dim=1),
+ input_fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis = th.cat([pred_vis, pred_vis_nv], dim=-2) # cat in H dim
+
+ gt_vis = th.cat([
+ gt_img,
+ gt_depth.repeat_interleave(3, dim=1),
+ th.zeros_like(gt_img)
+ ],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # if 'conf_sigma' in pred:
+ # gt_vis = th.cat([gt_vis, fg_mask], dim=-1) # placeholder
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # st()
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(vis, nrow=vis.shape[-1] //
+ 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ value_range=(-1, 1),
+ normalize=True)
+
+ logger.log('log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ # self.writer.add_image(f'images',
+ # vis,
+ # self.step + self.resume_step,
+ # dataformats='HWC')
+
+
+
+class TrainLoop3D_LDM(TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED):
+ def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224, init_cvD=False, diffusion_ce_anneal=False, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, init_cvD=init_cvD, diffusion_ce_anneal=diffusion_ce_anneal, **kwargs)
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ batch = self.next_n_batch(n=2) # effective BS=64, micro=4, 30.7gib
+ self.run_step(batch, 'ddpm') # ddpm fixed AE
+
+ # batch = self.next_n_batch(n=1) #
+ # self.run_step(batch, 'ce_ED')
+
+ self._post_run_step()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+
+class TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED_nv(TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED):
+ # reconstruction function from train_nv_util.py
+ def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224, init_cvD=False, diffusion_ce_anneal=False, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, init_cvD=init_cvD, diffusion_ce_anneal=diffusion_ce_anneal, **kwargs)
+
+ # ! for rendering
+ self.eg3d_model = self.rec_model.decoder.triplane_decoder # type: ignore
+ self.renderdiff_loss = False # whether to render denoised latent for reconstruction loss
+
+ # self.inner_loop_k = 2
+ # self.ce_d_loop_k = 6
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ batch = self.next_n_batch(n=2) # effective BS=2*8
+ self.run_step(batch, 'ddpm')
+
+ # if self.step % self.inner_loop_k == 1: # train E per 2 steps
+ batch = next(self.data) # sample a new batch for rec training
+ # self.run_step(self.subset_batch(batch, micro_batchsize=6, big_endian=False), 'ce_ED') # freeze D, train E with diffusion prior
+ # self.run_step(batch, 'ce_ED') #
+ self.run_step(batch, 'ce_E') #
+
+ # if self.step % self.ce_d_loop_k == 1: # train D per 4 steps
+ # batch = next(self.data) # sample a new batch for rec training
+ # self.run_step(self.subset_batch(batch, micro_batchsize=4, big_endian=True), 'ED') # freeze E, train D
+
+ self._post_run_step()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+
+ # ddpm + rec loss
+ def joint_rec_ddpm(self, batch, behaviour='ddpm', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+
+ # ! enable the gradient of both models
+ # requires_grad(self.rec_model, True)
+ # if behaviour == 'ce': # ll sampling? later. train encoder.
+ ce_flag = False
+ diffusion_flag = True
+ if 'ce' in behaviour: # ll sampling? later. train encoder.
+ ##############################################
+ ###### Update the VAE encoder/decoder ########
+ ##############################################
+ requires_grad(self.ddpm_model, False)
+ self.ddpm_model.eval()
+ ce_flag = True
+
+ if behaviour == 'ce_E': # unfreeze E and freeze D
+ requires_grad(self.rec_model.encoder, True)
+ self.rec_model.encoder.train()
+ requires_grad(self.rec_model.decoder, False)
+ self.rec_model.decoder.eval()
+
+ elif behaviour == 'ce_D': # unfreeze E and freeze D
+ requires_grad(self.rec_model.encoder, False)
+ self.rec_model.encoder.eval()
+ requires_grad(self.rec_model.decoder, True)
+ self.rec_model.decoder.train()
+
+ else: # train all, may oom
+ requires_grad(self.rec_model, True)
+ self.rec_model.train()
+
+ elif behaviour == 'ED': # just train E and D
+ diffusion_flag = False
+ requires_grad(self.ddpm_model, False)
+ self.ddpm_model.eval()
+ requires_grad(self.rec_model, True)
+ self.rec_model.train()
+
+ elif behaviour == 'D':
+ diffusion_flag = False
+ requires_grad(self.rec_model.encoder, False)
+ self.rec_model.encoder.eval()
+ requires_grad(self.rec_model.decoder, True)
+ self.rec_model.decoder.train()
+
+ else: # train ddpm.
+ # self.flip_encoder_grad(False)
+ requires_grad(self.rec_model, False)
+ self.rec_model.eval()
+ requires_grad(self.ddpm_model, True)
+ self.ddpm_model.train()
+
+ self.mp_trainer.zero_grad()
+
+ assert args.train_vae
+
+ batch_size = batch['img'].shape[0]
+
+ # for i in range(0, batch_size, self.microbatch):
+ for i in range(0, batch_size, batch_size):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ # ! sample rendering patch
+ target = {
+ **self.eg3d_model(
+ c=micro['nv_c'], # type: ignore
+ ws=None,
+ planes=None,
+ sample_ray_only=True,
+ fg_bbox=micro['nv_bbox']), # rays o / dir
+ }
+
+ patch_rendering_resolution = self.eg3d_model.rendering_kwargs[
+ 'patch_rendering_resolution'] # type: ignore
+ cropped_target = {
+ k: th.empty_like(v)
+ [..., :patch_rendering_resolution, :patch_rendering_resolution]
+ if k not in [
+ 'ins_idx', 'img_to_encoder', 'img_sr', 'nv_img_to_encoder',
+ 'nv_img_sr', 'c'
+ ] else v
+ for k, v in micro.items()
+ }
+
+ # crop according to uv sampling
+ for j in range(micro['img'].shape[0]):
+ top, left, height, width = target['ray_bboxes'][
+ j] # list of tuple
+ # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ for key in ('img', 'depth_mask', 'depth'): # type: ignore
+ # target[key][i:i+1] = torchvision.transforms.functional.crop(
+ # cropped_target[key][
+ # j:j + 1] = torchvision.transforms.functional.crop(
+ # micro[key][j:j + 1], top, left, height, width)
+
+ cropped_target[f'{key}'][ # ! no nv_ here
+ j:j + 1] = torchvision.transforms.functional.crop(
+ micro[f'nv_{key}'][j:j + 1], top, left, height,
+ width)
+
+ # ! cano view loss
+ cano_target = {
+ **self.eg3d_model(
+ c=micro['c'], # type: ignore
+ ws=None,
+ planes=None,
+ sample_ray_only=True,
+ fg_bbox=micro['bbox']), # rays o / dir
+ }
+
+ cano_cropped_target = {
+ k: th.empty_like(v)
+ for k, v in cropped_target.items()
+ }
+
+ for j in range(micro['img'].shape[0]):
+ top, left, height, width = cano_target['ray_bboxes'][
+ j] # list of tuple
+ # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ for key in ('img', 'depth_mask', 'depth'): # type: ignore
+ # target[key][i:i+1] = torchvision.transforms.functional.crop(
+ cano_cropped_target[key][
+ j:j + 1] = torchvision.transforms.functional.crop(
+ micro[key][j:j + 1], top, left, height, width)
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=self.mp_trainer.use_amp):
+ enabled=False):
+ # and args.train_vae):
+
+ loss = th.tensor(0.).to(dist_util.dev())
+
+ # with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=False):
+ # quit amp in encoder, avoid nan.
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+
+
+ if diffusion_flag:
+
+ eps = vae_out[self.latent_name] # 12542mib, bs=4
+
+ # '''
+ # ! prepare for diffusion
+ if 'bg_plane' in vae_out:
+ eps = th.cat((eps, vae_out['bg_plane']), dim=1) # include background, B 12+4 32 32
+
+ if ce_flag:
+ p_sample_batch = self.prepare_ddpm(eps, 'q')
+ else:
+ eps.requires_grad_(True)
+ p_sample_batch = self.prepare_ddpm(eps, 'p')
+
+ # ! running diffusion forward
+ ddpm_ret = self.apply_model(p_sample_batch)
+ # p_loss = ddpm_ret['p_eps_objective']
+ p_loss = ddpm_ret['p_eps_objective'].mean()
+ # st() # 12890mib
+
+ if ce_flag:
+ cross_entropy = p_loss # why collapse?
+ normal_entropy = vae_out['posterior'].normal_entropy()
+ entropy_weight = self.entropy_weight(normal_entropy)
+ negative_entropy = -normal_entropy * entropy_weight
+ ce_loss = (cross_entropy + negative_entropy.mean())
+
+ # if self.diffusion_ce_anneal: # gradually add ce lambda
+ # diffusion_ce_lambda = kl_coeff(
+ # step=self.step + self.resume_step,
+ # constant_step=5e3+self.resume_step,
+ # total_step=25e3,
+ # min_kl_coeff=1e-5,
+ # max_kl_coeff=self.loss_class.opt.negative_entropy_lambda)
+ # # diffusion_ce_lambda = th.tensor(1e-5).to(dist_util.dev())
+ # ce_loss *= diffusion_ce_lambda
+
+ log_rec3d_loss_dict({
+ # 'diffusion_ce_lambda': diffusion_ce_lambda,
+ 'negative_entropy': negative_entropy.mean(),
+ 'entropy_weight': entropy_weight,
+ 'ce_loss': ce_loss
+ })
+
+ loss += ce_loss
+ else:
+ loss += p_loss # p loss
+
+
+ # ! do reconstruction supervision
+
+ # '''
+
+ if ce_flag or not diffusion_flag: # vae part
+ latent_to_decode = vae_out
+ else:
+ latent_to_decode = { # diffusion part
+ self.latent_name: ddpm_ret['pred_x0_p']
+ } # render denoised latent
+
+ # with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=False):
+ # st()
+ if ce_flag or self.renderdiff_loss or not diffusion_flag:
+ # ! do vae latent -> triplane decode
+ latent_to_decode.update(self.ddp_rec_model(latent=latent_to_decode, behaviour='decode_after_vae_no_render')) # triplane, 19mib bs=4
+
+ # ! do render
+ # st()
+ pred_nv_cano = self.ddp_rec_model( # 24gb, bs=4
+ # latent=latent.expand(2,),
+ latent={
+ 'latent_after_vit': # ! triplane for rendering
+ latent_to_decode['latent_after_vit'].repeat(2, 1, 1, 1)
+ },
+ c=th.cat([micro['nv_c'],
+ micro['c']]), # predict novel view here
+ behaviour='triplane_dec',
+ # ray_origins=target['ray_origins'],
+ # ray_directions=target['ray_directions'],
+ ray_origins=th.cat(
+ [target['ray_origins'], cano_target['ray_origins']],
+ 0),
+ ray_directions=th.cat([
+ target['ray_directions'], cano_target['ray_directions']
+ ]),
+ )
+
+ pred_nv_cano.update({ # for kld
+ 'posterior': vae_out['posterior'],
+ 'latent_normalized_2Ddiffusion': vae_out['latent_normalized_2Ddiffusion']
+ })
+
+ # ! 2D loss
+
+ with self.ddp_model.no_sync(): # type: ignore
+ loss_rec, loss_rec_dict, _ = self.loss_class(
+ pred_nv_cano,
+ {
+ k: th.cat([v, cano_cropped_target[k]], 0)
+ for k, v in cropped_target.items()
+ }, # prepare merged data
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None)
+
+ if diffusion_flag and not ce_flag:
+ prefix = 'denoised_'
+ else:
+ prefix = ''
+
+ log_rec3d_loss_dict({
+ f'{prefix}{k}': v for k, v in loss_rec_dict.items()
+ })
+
+ loss += loss_rec # l2, LPIPS, Alpha loss
+
+ # save image log
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ # self.cano_ddpm_log(cano_pred, micro, ddpm_ret)
+
+ self.mp_trainer.backward(loss) # grad accumulation, 27gib
+
+ # st()
+
+ # for name, p in self.model.named_parameters():
+ # if p.grad is None:
+ # logger.log(f"found rec unused param: {name}")
+
+ # _ = self.mp_trainer.optimize(self.opt, clip_grad=self.loss_class.opt.grad_clip)
+ _ = self.mp_trainer.optimize(self.opt, clip_grad=True)
+
+ if dist_util.get_rank() == 0:
+ if self.step % 500 == 0: # log diffusion
+ self.log_diffusion_images(vae_out, p_sample_batch, micro, ddpm_ret)
+ elif self.step % 500 == 1 and ce_flag: # log reconstruction
+ # st()
+ micro_bs = micro['img_to_encoder'].shape[0]
+ self.log_patch_img(
+ cropped_target,
+ {
+ k: pred_nv_cano[k][:micro_bs]
+ for k in ['image_raw', 'image_depth', 'image_mask']
+ },
+ {
+ k: pred_nv_cano[k][micro_bs:]
+ for k in ['image_raw', 'image_depth', 'image_mask']
+ },
+ )
+
+ def _init_optim_groups(self, rec_model, freeze_decoder=False):
+ # unfreeze decoder when scaling is enabled
+ return super()._init_optim_groups(rec_model, freeze_decoder=True)
+
+# class TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED_nv_noCE(TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED_nv):
+# """no sepatate CE schedule, use single schedule for joint ddpm/nv-rec training with entropy regularization
+# """
+# def __init__(self, *, rec_model, denoise_model, diffusion, sde_diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, triplane_scaling_divider=1, use_amp=False, diffusion_input_size=224, init_cvD=False, diffusion_ce_anneal=False, **kwargs):
+# super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, sde_diffusion=sde_diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, init_cvD=init_cvD, diffusion_ce_anneal=diffusion_ce_anneal, **kwargs)
+
+# def run_loop(self):
+# while (not self.lr_anneal_steps
+# or self.step + self.resume_step < self.lr_anneal_steps):
+
+# batch = self.next_n_batch(n=2) # effective BS=2*8
+# self.run_step(batch, 'ddpm')
+
+# # if self.step % self.inner_loop_k == 1: # train E per 2 steps
+# batch = next(self.data) # sample a new batch for rec training
+# self.run_step(self.subset_batch(batch, micro_batchsize=6, big_endian=False), 'ce_ED') # freeze D, train E with diffusion prior
+
+# self._post_run_step()
+
+# # Save the last checkpoint if it wasn't already saved.
+# if (self.step - 1) % self.save_interval != 0:
+# self.save()
\ No newline at end of file
diff --git a/nsr/lsgm/train_util_diffusion_lsgm_noD.py b/nsr/lsgm/train_util_diffusion_lsgm_noD.py
new file mode 100644
index 0000000000000000000000000000000000000000..ced343581dc0fb809557bb2722b88f4cab90c3d0
--- /dev/null
+++ b/nsr/lsgm/train_util_diffusion_lsgm_noD.py
@@ -0,0 +1,936 @@
+"""
+Modified from:
+https://github.com/NVlabs/LSGM/blob/main/training_obj_joint.py
+"""
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+import dnnlib
+from dnnlib.util import requires_grad
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+# import utils as lsgm_utils
+
+
+class TrainLoop3DDiffusionLSGM_noD(TrainLoop3DDiffusion):
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ sde_diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ **kwargs):
+ super().__init__(
+ rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ # freeze_ae=freeze_ae,
+ freeze_ae=not sde_diffusion.args.train_vae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ **kwargs)
+
+ assert sde_diffusion is not None
+ sde_diffusion.args.batch_size = batch_size
+ self.sde_diffusion = sde_diffusion
+ self.latent_name = 'latent_normalized_2Ddiffusion' # normalized triplane latent
+ self.render_latent_behaviour = 'decode_after_vae' # directly render using triplane operations
+
+ self.pool_512 = th.nn.AdaptiveAvgPool2d((512, 512))
+ self.pool_256 = th.nn.AdaptiveAvgPool2d((256, 256))
+ self.pool_128 = th.nn.AdaptiveAvgPool2d((128, 128))
+ self.pool_64 = th.nn.AdaptiveAvgPool2d((64, 64))
+
+ self.ddp_ddpm_model = self.ddp_model
+
+ # if sde_diffusion.args.joint_train:
+ # assert sde_diffusion.args.train_vae
+
+ def run_step(self, batch, step='diffusion_step_rec'):
+
+ # if step == 'diffusion_step_rec':
+
+ self.forward_diffusion(batch, behaviour='diffusion_step_rec')
+
+ # if took_step_ddpm:
+ self._update_ema()
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, step='diffusion_step_rec')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_ddpm_sample()
+ if self.sde_diffusion.args.train_vae:
+ self.eval_loop()
+
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ if self.sde_diffusion.args.train_vae:
+ self.save(self.mp_trainer_rec,
+ self.mp_trainer_rec.model_name)
+
+ # dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ if self.sde_diffusion.args.train_vae:
+ self.save(self.mp_trainer_rec,
+ self.mp_trainer_rec.model_name)
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ # self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ # ! duplicated code, needs refactor later
+ def ddpm_step(self, eps, t, logsnr, model_kwargs={}):
+ """helper function for ddpm predictions; returns predicted eps, x0 and logsnr
+ """
+ args = self.sde_diffusion.args
+ pred_params = self.ddp_ddpm_model(eps, t, **model_kwargs)
+ # pred_params = self.ddp_model(eps, t, **model_kwargs)
+ if args.pred_type == 'eps':
+ pred_eps = pred_params
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps, pred_params, logsnr) # for VAE loss, denosied latent
+ elif args.pred_type == 'x0':
+ # ! transform to pred_eps format for mixing_component
+ pred_x0 = pred_params
+ pred_eps = self.sde_diffusion._predict_eps_from_x0(
+ eps, pred_params, logsnr)
+ else:
+ raise NotImplementedError(f'{args.pred_type} not implemented.')
+
+ return pred_eps, pred_x0, logsnr
+
+ # def apply_model(self, p_sample_batch, model_kwargs={}):
+ # # args = self.sde_diffusion.args
+ # noise, eps_t_p, t_p, logsnr_p, obj_weight_t_p, var_t_p = (
+ # p_sample_batch[k] for k in ('noise', 'eps_t_p', 't_p', 'logsnr_p',
+ # 'obj_weight_t_p', 'var_t_p'))
+
+ # pred_eps_p, pred_x0_p, logsnr_p = self.ddpm_step(
+ # eps_t_p, t_p, logsnr_p, model_kwargs)
+
+ # # ! batchify for mixing_component
+ # # mixing normal trick
+ # mixing_component = self.sde_diffusion.mixing_component(
+ # eps_t_p, var_t_p, t_p, enabled=True) # TODO, which should I use?
+ # pred_eps_p = get_mixed_prediction(
+ # True, pred_eps_p,
+ # self.ddp_ddpm_model(x=None,
+ # timesteps=None,
+ # get_attr='mixing_logit'), mixing_component)
+
+ # # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ # with self.ddp_ddpm_model.no_sync(): # type: ignore
+ # l2_term_p = th.square(pred_eps_p - noise) # ? weights
+
+ # p_eps_objective = th.mean(obj_weight_t_p * l2_term_p)
+
+ # log_rec3d_loss_dict(
+ # dict(mixing_logit=self.ddp_ddpm_model(
+ # x=None, timesteps=None, get_attr='mixing_logit').detach(), ))
+
+ # return {
+ # 'pred_eps_p': pred_eps_p,
+ # 'eps_t_p': eps_t_p,
+ # 'p_eps_objective': p_eps_objective,
+ # 'pred_x0_p': pred_x0_p,
+ # 'logsnr_p': logsnr_p
+ # }
+
+ def forward_diffusion(self, batch, behaviour='rec', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+
+ # self.ddp_ddpm_model.requires_grad_(True)
+ requires_grad(self.ddp_rec_model.module, args.train_vae)
+ # self.ddp_rec_model.requires_grad_(args.train_vae)
+
+ if args.train_vae:
+ for param in self.ddp_rec_model.module.decoder.triplane_decoder.parameters( # type: ignore
+ ): # type: ignore
+ param.requires_grad_(
+ False
+ ) # ! disable triplane_decoder grad in each iteration indepenently;
+
+ self.mp_trainer_rec.zero_grad()
+ self.mp_trainer.zero_grad()
+
+ batch_size = batch['img'].shape[0]
+
+ # # update ddpm params
+ # took_step_ddpm = self.mp_trainer_ddpm.optimize(
+ # self.opt_ddpm) # TODO, update two groups of parameters
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ q_vae_recon_loss = th.tensor(0.0).to(dist_util.dev())
+ # vision_aided_loss = th.tensor(0.0).to(dist_util.dev())
+ # denoise_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+ # and args.train_vae):
+
+ assert behaviour == 'diffusion_step_rec'
+
+ # ! train vae with CE; ddpm fixed
+ requires_grad(self.ddp_model.module, False)
+ # if args.train_vae:
+ # assert args.add_rendering_loss
+ with th.set_grad_enabled(args.train_vae):
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ # behaviour='enc_dec_wo_triplane'
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+ # TODO, no need to render if not SSD; no need to do ViT decoder if only the latent is needed. update later
+
+ # TODO, train diff and sds together, available?
+ all_log_q = [vae_out['log_q_2Ddiffusion']]
+ eps = vae_out[self.latent_name]
+ eps.requires_grad_(True) # single stage diffusion
+
+ # t, weights = self.schedule_sampler.sample(
+ # eps.shape[0], dist_util.dev())
+
+ noise = th.randn(
+ size=eps.size(), device=eps.device
+ ) # note that this noise value is currently shared!
+ model_kwargs = {}
+
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_p)
+ eps_t_p = self.sde_diffusion.sample_q(eps, noise, var_t_p,
+ m_t_p)
+ logsnr_p = self.sde_diffusion.log_snr(m_t_p,
+ var_t_p) # for p only
+
+ # in case we want to train q (vae) with another batch using a different sampling scheme for times t
+ if args.iw_sample_q in ['ll_uniform', 'll_iw']:
+ t_q, var_t_q, m_t_q, obj_weight_t_q, _, g2_t_q = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_q)
+ eps_t_q = self.sde_diffusion.sample_q(
+ eps, noise, var_t_q, m_t_q)
+
+ eps_t_p = eps_t_p.detach().requires_grad_(
+ True) # ! p just not updated here
+ eps_t = th.cat([eps_t_p, eps_t_q], dim=0)
+ var_t = th.cat([var_t_p, var_t_q], dim=0)
+ t = th.cat([t_p, t_q], dim=0)
+ noise = th.cat([noise, noise], dim=0)
+ # logsnr = self.sde_diffusion.log_snr(m_t_q, var_t_p)
+ else:
+ eps_t, m_t, var_t, t, g2_t = eps_t_p, m_t_p, var_t_p, t_p, g2_t_p
+
+ # run the diffusion model
+ eps_t.requires_grad_(True) # 2*BS, 12, 16, 16
+ pred_params = self.ddp_model(eps_t, t, **model_kwargs)
+
+ if args.pred_type == 'eps':
+ pred_eps = pred_params
+ elif args.pred_type == 'x0':
+ # ! transform to pred_eps format for mixing_component
+ pred_eps = self.sde_diffusion._predict_eps_from_x0(
+ eps_t, pred_params, logsnr_p)
+ else:
+ raise NotImplementedError(
+ f'{args.pred_type} not implemented.')
+
+ # mixing normal trick
+ mixing_component = self.sde_diffusion.mixing_component(
+ eps_t, var_t, t, enabled=True) # TODO, which should I use?
+ pred_eps = get_mixed_prediction(
+ # True, pred_params,
+ True,
+ pred_eps,
+ self.ddp_model(x=None,
+ timesteps=None,
+ get_attr='mixing_logit'),
+ mixing_component)
+
+ # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ if last_batch or not self.use_ddp:
+ l2_term = th.square(pred_eps - noise)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ l2_term = th.square(pred_eps - noise) # ? weights
+
+ # nelbo loss with kl balancing
+ # ! remainign parts of cross entropy in likelihook training
+ # unpack separate objectives, in case we want to train q (vae) using a different sampling scheme for times t
+ if args.iw_sample_q in ['ll_uniform',
+ 'll_iw']: # ll_iw by default
+ l2_term_p, l2_term_q = th.chunk(l2_term, chunks=2, dim=0)
+ p_objective = th.mean(obj_weight_t_p * l2_term_p,
+ dim=[1, 2, 3])
+ cross_entropy_per_var = obj_weight_t_q * l2_term_q
+ else:
+ p_objective = th.mean(obj_weight_t_p * l2_term,
+ dim=[1, 2, 3])
+ cross_entropy_per_var = obj_weight_t_q * l2_term
+
+ cross_entropy_per_var += self.sde_diffusion.cross_entropy_const(
+ args.sde_time_eps)
+ all_neg_log_p = [cross_entropy_per_var
+ ] # since only one vae group
+
+ kl_all_list, kl_vals_per_group, kl_diag_list = kl_per_group_vada(
+ all_log_q, all_neg_log_p) # return the mean of two terms
+
+ # nelbo loss with kl balancing
+ balanced_kl, kl_coeffs, kl_vals = kl_balancer(kl_all_list,
+ kl_coeff=1.0,
+ kl_balance=False,
+ alpha_i=None)
+
+ # ! update vae for CE
+ # ! single stage diffusion for rec side 1: bind vae prior and diffusion prior
+ if args.train_vae:
+ # if args.add_rendering_loss:
+ # if args.joint_train:
+ with th.set_grad_enabled(args.train_vae):
+ target = micro
+ pred = self.ddp_rec_model(
+ latent=vae_out,
+ # latent={
+ # **vae_out, self.latent_name: pred_x0,
+ # 'latent_name': self.latent_name
+ # },
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ if last_batch or not self.use_ddp:
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+
+ log_rec3d_loss_dict(loss_dict)
+
+ # ! calculate p/q loss;
+ nelbo_loss = balanced_kl + q_vae_recon_loss
+ q_loss = th.mean(nelbo_loss)
+ p_loss = th.mean(p_objective)
+
+ log_rec3d_loss_dict(
+ dict(
+ q_vae_recon_loss=q_vae_recon_loss,
+ p_loss=p_loss,
+ balanced_kl=balanced_kl,
+ mixing_logit=self.ddp_model(
+ x=None, timesteps=None,
+ get_attr='mixing_logit').detach(),
+ ))
+
+ # ! single stage diffusion for rec side 2: generative feature
+ if args.p_rendering_loss:
+ with th.set_grad_enabled(args.train_vae):
+
+ # ! transform fro pred_eps format back to pred_x0, for p only.
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps[:eps_t_p.shape[0]],
+ logsnr_p) # for VAE loss, denosied latent
+
+ target = micro
+ pred = self.ddp_rec_model(
+ # latent=vae_out,
+ latent={
+ **vae_out, self.latent_name: pred_x0,
+ 'latent_name': self.latent_name
+ },
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ if last_batch or not self.use_ddp:
+ p_vae_recon_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ p_vae_recon_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ log_rec3d_loss_dict(
+ dict(p_vae_recon_loss=p_vae_recon_loss, ))
+
+ # ! backpropagate q_loss for vae and update vae params, if trained
+ if args.train_vae:
+ self.mp_trainer_rec.backward(
+ q_loss,
+ retain_graph=different_p_q_objectives(
+ args.iw_sample_p, args.iw_sample_q))
+
+ # if we use different p and q objectives or are not training the vae, discard gradients and backpropagate p_loss
+ if different_p_q_objectives(
+ args.iw_sample_p, args.iw_sample_q) or not args.train_vae:
+ if args.train_vae:
+ # discard current gradients computed by weighted loss for VAE
+ self.mp_trainer_rec.zero_grad()
+
+ self.mp_trainer.backward(p_loss)
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ if dist_util.get_rank(
+ ) == 0 and self.step % 500 == 0 and behaviour != 'diff':
+
+ with th.no_grad():
+
+ if not args.train_vae:
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k: v[0:1].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(
+ latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+ assert pred is not None
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=eps_t_p[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ # ! test time, use discrete diffusion model
+ params_p, _ = th.chunk(pred_eps, chunks=2,
+ dim=0) # get predicted noise
+
+ # TODO, implement for SDE difusion?
+ # ! two values isclose(rtol=1e-03, atol=1e-04)
+ # pred_xstart = self.diffusion._predict_xstart_from_eps(
+ # x_t=eps_t_p,
+ # t=th.tensor(t_p.detach() *
+ # self.diffusion.num_timesteps).long(),
+ # eps=params_p)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, params_p,
+ logsnr_p) # for VAE loss, denosied latent
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=pred_x0[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ )
+ del vis, pred_vis, pred_x0, pred_eps, micro, vae_out
+
+ th.cuda.empty_cache()
+
+ # ! copied from train_util.py
+ # TODO, needs to lint the class inheritance chain later.
+ @th.inference_mode()
+ def eval_novelview_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ all_loss_dict = []
+ novel_view_micro = {}
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i == 0:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ pred = self.rec_model(img=novel_view_micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+ # targe
+
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ self.pool_64(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ self.step + self.resume_step)
+ del video_out, vis, pred_vis, pred, micro
+ th.cuda.empty_cache()
+
+ # @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ @th.inference_mode()
+ def eval_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+ all_loss_dict = []
+ self.rec_model.eval()
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ pred = self.rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+
+ # if last_batch or not self.use_ddp:
+ # loss, loss_dict = self.loss_class(pred, target)
+ # else:
+ # with self.ddp_model.no_sync(): # type: ignore
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+ # gt_vis = th.cat([micro['img'], micro['img']], dim=-1) # TODO, fail to load depth. range [0, 1]
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(1,2,0).cpu().numpy() # ! pred in range[-1, 1]
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ self.pool_64(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores.json'), 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/Rec/{k}', v,
+ self.step + self.resume_step)
+
+ del video_out, vis, pred_vis, pred, micro
+ th.cuda.empty_cache()
+ self.eval_novelview_loop()
+ self.rec_model.train()
+
+ # for compatablity with p_sample, to lint
+ def apply_model_inference(self, x_noisy, t, c=None, model_kwargs={}):
+ # control = self.ddp_control_model(x=x_noisy,
+ # hint=th.cat(c['c_concat'], 1),
+ # timesteps=t,
+ # context=None)
+ # control = [c * scale for c, scale in zip(control, self.control_scales)]
+ pred_params = self.ddp_ddpm_model(x_noisy, t,
+ **model_kwargs
+ )
+
+
+ assert args.pred_type == 'eps'
+ # mixing normal trick
+ mixing_component = self.sde_diffusion.mixing_component(
+ eps, var_t, t, enabled=True) # TODO, which should I use?
+ pred_eps = get_mixed_prediction(
+ True, pred_eps,
+ self.ddp_ddpm_model(x=None, timesteps=None, get_attr='mixing_logit'), mixing_component)
+
+ return pred_params
+
+ @th.inference_mode()
+ def eval_ddpm_sample(self):
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=self.diffusion_input_size,
+ denoise_in_channels=self.ddp_rec_model.module.decoder.
+ triplane_decoder.out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ for i in range(1):
+ triplane_sample = sample_fn(
+ # self.ddp_model,
+ self,
+ (
+ args.batch_size,
+ self.ddp_rec_model.module.decoder.ldm_z_channels *
+ 3, # type: ignore
+ self.diffusion_input_size,
+ self.diffusion_input_size),
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ mixing_normal=True, # !
+ )
+ th.cuda.empty_cache()
+
+ self.render_video_given_triplane(
+ triplane_sample,
+ name_prefix=f'{self.step + self.resume_step}_{i}')
+ # st()
+ del triplane_sample
+ th.cuda.empty_cache()
diff --git a/nsr/lsgm/train_util_diffusion_lsgm_noD_joint.py b/nsr/lsgm/train_util_diffusion_lsgm_noD_joint.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed0d1b77ac908488a7b0af068bdb798be83a2b75
--- /dev/null
+++ b/nsr/lsgm/train_util_diffusion_lsgm_noD_joint.py
@@ -0,0 +1,1376 @@
+"""
+Modified from:
+https://github.com/NVlabs/LSGM/blob/main/training_obj_joint.py
+"""
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+from dnnlib.util import requires_grad
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion, TrainLoopDiffusionWithRec
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+from guided_diffusion.continuous_diffusion_utils import get_mixed_prediction, different_p_q_objectives, kl_per_group_vada, kl_balancer
+# import utils as lsgm_utils
+
+
+class JointDenoiseRecModel(th.nn.Module):
+
+ def __init__(self, ddpm_model, rec_model, diffusion_input_size) -> None:
+ super().__init__()
+ # del ddpm_model
+ # th.cuda.empty_cache()
+ # self.ddpm_model = th.nn.Identity()
+ self.ddpm_model = ddpm_model
+ self.rec_model = rec_model
+
+ self._setup_latent_stat(diffusion_input_size)
+
+ def _setup_latent_stat(self, diffusion_input_size): # for dynamic EMA tracking.
+ latent_size = (
+ 1,
+ self.ddpm_model.in_channels, # type: ignore
+ diffusion_input_size,
+ diffusion_input_size),
+
+ self.ddpm_model.register_buffer(
+ 'ema_latent_std',
+ th.ones(*latent_size).to(dist_util.dev()), persistent=True)
+ self.ddpm_model.register_buffer(
+ 'ema_latent_mean',
+ th.zeros(*latent_size).to(dist_util.dev()), persistent=True)
+
+ # TODO, lint api.
+ def forward(
+ self,
+ *args,
+ model_name='ddpm',
+ **kwargs,
+ ):
+ if model_name == 'ddpm':
+ return self.ddpm_model(*args, **kwargs)
+ elif model_name == 'rec':
+ return self.rec_model(*args, **kwargs)
+ else:
+ raise NotImplementedError(model_name)
+
+
+# TODO, merge with train_util_diffusion.py later
+class SDETrainLoopJoint(TrainLoopDiffusionWithRec):
+ """A dataclass with some required attribtues; copied from guided_diffusion TrainLoop
+ """
+
+ def __init__(
+ self,
+ rec_model,
+ denoise_model,
+ diffusion, # not used
+ sde_diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ triplane_scaling_divider=1,
+ use_amp=False,
+ diffusion_input_size=224,
+ train_vae=False,
+ **kwargs,
+ ) -> None:
+
+ joint_model = JointDenoiseRecModel(denoise_model, rec_model, diffusion_input_size)
+ super().__init__(
+ model=joint_model,
+ diffusion=diffusion, # just for sampling
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ eval_interval=eval_interval,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ use_amp=use_amp,
+ model_name='joint_denoise_rec_model',
+ iterations=iterations,
+ triplane_scaling_divider=triplane_scaling_divider,
+ diffusion_input_size=diffusion_input_size,
+ train_vae=train_vae,
+ **kwargs)
+ self.sde_diffusion = sde_diffusion
+ # setup latent scaling factor
+
+ # ! integrate the init_params_group for rec model
+ def _setup_model(self):
+
+ super()._setup_model()
+ self.ddp_rec_model = functools.partial(self.model, model_name='rec')
+ self.ddp_ddpm_model = functools.partial(self.model, model_name='ddpm')
+
+ self.rec_model = self.ddp_model.module.rec_model
+ self.ddpm_model = self.ddp_model.module.ddpm_model # compatability
+
+ # TODO, required?
+ # for param in self.ddp_rec_model.module.decoder.triplane_decoder.parameters( # type: ignore
+ # ): # type: ignore
+ # param.requires_grad_(
+ # False
+ # ) # ! disable triplane_decoder grad in each iteration indepenently;
+
+ def _load_model(self):
+ # TODO, for currently compatability
+ if 'joint' in self.resume_checkpoint: # load joint directly
+ self._load_and_sync_parameters(model=self.model, model_name=self.model_name)
+ else: # from scratch
+ self._load_and_sync_parameters(model=self.rec_model, model_name='rec')
+ self._load_and_sync_parameters(model=self.ddpm_model,
+ model_name='ddpm')
+
+ def _setup_opt(self):
+ # TODO, two optims groups.
+ self.opt = AdamW([{
+ 'name': 'ddpm',
+ 'params': self.ddpm_model.parameters(),
+ }],
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+
+ if self.train_vae:
+ for rec_param_group in self._init_optim_groups(self.rec_model):
+ self.opt.add_param_group(rec_param_group)
+
+ print(self.opt)
+
+
+class TrainLoop3DDiffusionLSGMJointnoD(SDETrainLoopJoint):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ sde_diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ resume_cldm_checkpoint=None,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ triplane_scaling_divider=1,
+ use_amp=False,
+ diffusion_input_size=224,
+ diffusion_ce_anneal=False,
+ # compile=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ **kwargs)
+
+ if sde_diffusion is not None:
+ sde_diffusion.args.batch_size = batch_size
+ self.latent_name = 'latent_normalized_2Ddiffusion' # normalized triplane latent
+ self.render_latent_behaviour = 'decode_after_vae' # directly render using triplane operations
+ self.diffusion_ce_anneal = diffusion_ce_anneal
+ # assert sde_diffusion.args.train_vae
+
+ def prepare_ddpm(self, eps, mode='p'):
+
+ log_rec3d_loss_dict({
+ f'eps_mean': eps.mean(),
+ f'eps_std': eps.std([1,2,3]).mean(0),
+ f'eps_max': eps.max()
+ })
+
+ args = self.sde_diffusion.args
+ # sample noise
+ noise = th.randn(size=eps.size(), device=eps.device
+ ) # note that this noise value is currently shared!
+
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ if mode == 'p':
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_p, noise.shape[0]) # TODO, q not used, fall back to original ddpm implementation
+ else:
+ assert mode == 'q'
+ # assert args.iw_sample_q in ['ll_uniform', 'll_iw']
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_q, noise.shape[0]) # TODO, q not used, fall back to original ddpm implementation
+ eps_t_p = self.sde_diffusion.sample_q(eps, noise, var_t_p, m_t_p)
+ # ! important
+ # eps_t_p = eps_t_p.detach().requires_grad_(True)
+ # logsnr_p = self.sde_diffusion.log_snr(m_t_p,
+ # var_t_p) # for p only
+ logsnr_p = self.sde_diffusion.log_snr(m_t_p, var_t_p) # for p only
+
+ return {
+ 'noise': noise,
+ 't_p': t_p,
+ 'eps_t_p': eps_t_p,
+ 'logsnr_p': logsnr_p,
+ 'obj_weight_t_p': obj_weight_t_p,
+ 'var_t_p': var_t_p,
+ 'm_t_p': m_t_p,
+ 'eps': eps,
+ 'mode': mode
+ }
+
+ # merged from noD.py
+
+ def ce_weight(self):
+ return self.loss_class.opt.ce_lambda
+
+ def apply_model(self, p_sample_batch, **model_kwargs):
+ args = self.sde_diffusion.args
+ # args = self.sde_diffusion.args
+ noise, eps_t_p, t_p, logsnr_p, obj_weight_t_p, var_t_p, m_t_p = (
+ p_sample_batch[k] for k in ('noise', 'eps_t_p', 't_p', 'logsnr_p',
+ 'obj_weight_t_p', 'var_t_p', 'm_t_p'))
+
+ pred_eps_p, pred_x0_p = self.ddpm_step(eps_t_p, t_p, logsnr_p, var_t_p, m_t_p,
+ **model_kwargs)
+
+ # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ with self.ddp_model.no_sync(): # type: ignore
+ if args.loss_type == 'eps':
+ l2_term_p = th.square(pred_eps_p - noise) # ? weights
+ elif args.loss_type == 'x0':
+ # l2_term_p = th.square(pred_eps_p - p_sample_batch['eps']) # ? weights
+ l2_term_p = th.square(
+ pred_x0_p - p_sample_batch['eps'].detach()) # ? weights
+ # if args.loss_weight == 'snr':
+ # obj_weight_t_p = th.sigmoid(th.exp(logsnr_p))
+ else:
+ raise NotImplementedError(args.loss_type)
+
+ # p_eps_objective = th.mean(obj_weight_t_p * l2_term_p)
+ p_eps_objective = obj_weight_t_p * l2_term_p
+
+ if p_sample_batch['mode'] == 'q':
+ ce_weight = self.ce_weight()
+ p_eps_objective = p_eps_objective * ce_weight
+
+ log_rec3d_loss_dict({
+ 'ce_weight': ce_weight,
+ })
+
+
+ log_rec3d_loss_dict({
+ f"{p_sample_batch['mode']}_loss":
+ p_eps_objective.mean(),
+ })
+ if self.ddpm_model.mixed_prediction:
+ log_rec3d_loss_dict({
+ 'mixing_logit':
+ self.ddp_ddpm_model(x=None,
+ timesteps=None,
+ get_attr='mixing_logit').detach(),})
+
+ return {
+ 'pred_eps_p': pred_eps_p,
+ 'eps_t_p': eps_t_p,
+ 'p_eps_objective': p_eps_objective,
+ 'pred_x0_p': pred_x0_p,
+ 'logsnr_p': logsnr_p
+ }
+
+ def ddpm_step(self, eps_t, t, logsnr, var_t, m_t, **model_kwargs):
+ """helper function for ddpm predictions; returns predicted eps, x0 and logsnr.
+
+ args notes:
+ eps_t is x_noisy
+ """
+ args = self.sde_diffusion.args
+ pred_params = self.ddp_ddpm_model(x=eps_t, timesteps=t, **model_kwargs)
+ # logsnr = self.sde_diffusion.log_snr(m_t, var_t) # for p only
+ if args.pred_type in ['eps', 'v']:
+ if args.pred_type == 'v':
+ pred_eps = self.sde_diffusion._predict_eps_from_z_and_v(
+ pred_params, var_t, eps_t, m_t
+ )
+ # pred_x0 = self.sde_diffusion._predict_x0_from_z_and_v(
+ # pred_params, var_t, eps_t, m_t) # ! verified
+ else:
+ pred_eps = pred_params
+
+ # mixing normal trick
+
+ if self.ddpm_model.mixed_prediction:
+ mixing_component = self.sde_diffusion.mixing_component(
+ eps_t, var_t, t, enabled=True) # z_t * sigma_t
+ pred_eps = get_mixed_prediction(
+ True, pred_eps,
+ self.ddp_ddpm_model(x=None,
+ timesteps=None,
+ get_attr='mixing_logit'), mixing_component)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps( eps_t, pred_eps, logsnr) # for VAE loss, denosied latent
+ # eps, pred_params, logsnr) # for VAE loss, denosied latent
+ elif args.pred_type == 'x0':
+ # ! pred_x0_mixed = alpha * pred_x0 + (1-alpha) * z_t * alpha_t
+ pred_x0 = pred_params # how to mix?
+
+ # mixing normal trick
+ mixing_component = self.sde_diffusion.mixing_component_x0(
+ eps_t, var_t, t, enabled=True) # z_t * alpha_t
+ pred_x0 = get_mixed_prediction(
+ True, pred_x0,
+ self.ddp_ddpm_model(x=None,
+ timesteps=None,
+ get_attr='mixing_logit'), mixing_component)
+
+ pred_eps = self.sde_diffusion._predict_eps_from_x0(
+ eps_t, pred_x0, logsnr)
+ else:
+ raise NotImplementedError(f'{args.pred_type} not implemented.')
+
+ log_rec3d_loss_dict({
+ f'pred_x0_mean': pred_x0.mean(),
+ f'pred_x0_std': pred_x0.std([1,2,3]).mean(0),
+ f'pred_x0_max': pred_x0.max(),
+ })
+
+ return pred_eps, pred_x0
+
+ def ddpm_loss(self, noise, pred_eps, last_batch):
+
+ # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ if last_batch or not self.use_ddp:
+ l2_term = th.square(pred_eps - noise)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ l2_term = th.square(pred_eps - noise) # ? weights
+ return l2_term
+
+ def run_step(self, batch, step='diffusion_step_rec'):
+
+ if step == 'ce_ddpm_step':
+ self.ce_ddpm_step(batch)
+ elif step == 'p_rendering_step':
+ self.p_rendering_step(batch)
+
+ elif step == 'eps_step':
+ self.eps_step(batch)
+
+ # ! both took ddpm step
+ self._update_ema()
+
+ self._anneal_lr()
+ self.log_step()
+
+ @th.inference_mode()
+ def _post_run_loop(self):
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ # if self.step % self.eval_interval == 0:
+ # if dist_util.get_rank() == 0:
+ # self.eval_ddpm_sample(
+ # self.rec_model,
+ # # self.ddpm_model
+ # ) # ! only support single GPU inference now.
+ # if self.sde_diffusion.args.train_vae:
+ # self.eval_loop(self.ddp_rec_model)
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank() == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_ddpm_sample(self.ddp_rec_model)
+ if self.sde_diffusion.args.train_vae:
+ self.eval_loop(self.ddp_rec_model)
+
+ if self.step % self.save_interval == 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ exit()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ batch = next(self.data)
+ self.run_step(batch, step='ce_ddpm_step')
+
+ self._post_run_loop()
+
+ # batch = next(self.data)
+ # self.run_step(batch, step='p_rendering_step')
+
+ def ce_ddpm_step(self, batch, behaviour='rec', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+ assert args.train_vae
+
+ requires_grad(self.rec_model, args.train_vae)
+ requires_grad(self.ddpm_model, True)
+
+ # TODO merge?
+ self.mp_trainer.zero_grad()
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ q_vae_recon_loss = th.tensor(0.0).to(dist_util.dev())
+ # vision_aided_loss = th.tensor(0.0).to(dist_util.dev())
+ # denoise_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ # ! part 1: train vae with CE; ddpm fixed
+ # ! TODO, add KL_all_list? vae.decompose
+ with th.set_grad_enabled(args.train_vae):
+ # vae_out = self.ddp_rec_model(
+ # img=micro['img_to_encoder'],
+ # c=micro['c'],
+ # behaviour='encoder_vae',
+ # ) # pred: (B, 3, 64, 64)
+ # TODO, no need to render if not SSD; no need to do ViT decoder if only the latent is needed. update later
+ # if args.train_vae:
+ # if args.add_rendering_loss:
+ # if args.joint_train:
+ # with th.set_grad_enabled(args.train_vae):
+ pred = self.ddp_rec_model(
+ # latent=vae_out,
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ )
+ # behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ if last_batch or not self.use_ddp:
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ pred, micro, test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ q_vae_recon_loss, loss_dict = self.loss_class(
+ pred, micro, test_mode=False)
+
+ log_rec3d_loss_dict(loss_dict)
+ # '''
+
+ # ! calculate p/q loss;
+ # nelbo_loss = balanced_kl * self.loss_class.opt.ce_balanced_kl + q_vae_recon_loss
+ nelbo_loss = q_vae_recon_loss
+ q_loss = th.mean(nelbo_loss)
+
+ # st()
+
+ # all_log_q = [vae_out['log_q_2Ddiffusion']]
+ # eps = vae_out[self.latent_name]
+ # all_log_q = [pred['log_q_2Ddiffusion']]
+ eps = pred[self.latent_name]
+
+ if not args.train_vae:
+ eps.requires_grad_(True) # single stage diffusion
+
+ # sample noise
+ noise = th.randn(
+ size=eps.size(), device=eps.device
+ ) # note that this noise value is currently shared!
+
+ # in case we want to train q (vae) with another batch using a different sampling scheme for times t
+ '''
+ assert args.iw_sample_q in ['ll_uniform', 'll_iw']
+ t_q, var_t_q, m_t_q, obj_weight_t_q, _, g2_t_q = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_q)
+ eps_t_q = self.sde_diffusion.sample_q(eps, noise, var_t_q,
+ m_t_q)
+
+ # eps_t = th.cat([eps_t_p, eps_t_q], dim=0)
+ # var_t = th.cat([var_t_p, var_t_q], dim=0)
+ # t = th.cat([t_p, t_q], dim=0)
+ # noise = th.cat([noise, noise], dim=0)
+
+ # run the diffusion model
+ if not args.train_vae:
+ eps_t_q.requires_grad_(True) # 2*BS, 12, 16, 16
+
+ # ! For CE guidance.
+ requires_grad(self.ddpm_model_module, False)
+ pred_eps_q, _, _ = self.ddpm_step(eps_t_q, t_q, m_t_q, var_t_q)
+
+ l2_term_q = self.ddpm_loss(noise, pred_eps_q, last_batch)
+
+ # pred_eps = th.cat([pred_eps_p, pred_eps_q], dim=0) # p then q
+
+ # ÇE: nelbo loss with kl balancing
+ assert args.iw_sample_q in ['ll_uniform', 'll_iw']
+ # l2_term_p, l2_term_q = th.chunk(l2_term, chunks=2, dim=0)
+ cross_entropy_per_var = obj_weight_t_q * l2_term_q
+
+ cross_entropy_per_var += self.sde_diffusion.cross_entropy_const(
+ args.sde_time_eps)
+ all_neg_log_p = [cross_entropy_per_var
+ ] # since only one vae group
+
+ kl_all_list, kl_vals_per_group, kl_diag_list = kl_per_group_vada(
+ all_log_q, all_neg_log_p) # return the mean of two terms
+
+ # nelbo loss with kl balancing
+ balanced_kl, kl_coeffs, kl_vals = kl_balancer(kl_all_list,
+ kl_coeff=1.0,
+ kl_balance=False,
+ alpha_i=None)
+ # st()
+
+ log_rec3d_loss_dict(
+ dict(
+ balanced_kl=balanced_kl,
+ l2_term_q=l2_term_q,
+ cross_entropy_per_var=cross_entropy_per_var.mean(),
+ all_log_q=all_log_q[0].mean(),
+ ))
+
+
+ '''
+ # ! update vae for CE
+ # ! single stage diffusion for rec side 1: bind vae prior and diffusion prior
+
+ # ! BP for CE and VAE; quit the AMP context.
+ # if args.train_vae:
+ # self.mp_trainer.backward(q_loss)
+ # _ = self.mp_trainer.optimize(self.opt)
+ # retain_graph=different_p_q_objectives(
+ # args.iw_sample_p,
+ # args.iw_sample_q))
+
+ log_rec3d_loss_dict(
+ dict(q_vae_recon_loss=q_vae_recon_loss,
+ # all_log_q=all_log_q[0].mean(),
+ ))
+
+ # ! adding p loss; enable ddpm gradient
+ # self.mp_trainer.zero_grad()
+ # requires_grad(self.rec_model_module,
+ # False) # could be removed since eps_t_p.detach()
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ # first get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_p)
+ eps_t_p = self.sde_diffusion.sample_q(eps, noise, var_t_p,
+ m_t_p)
+ eps_t_p = eps_t_p.detach(
+ ) # .requires_grad_(True) # ! update ddpm not rec module
+
+ pred_eps_p, _, = self.ddpm_step(eps_t_p, t_p, m_t_p, var_t_p)
+ l2_term_p = self.ddpm_loss(noise, pred_eps_p, last_batch)
+ p_loss = th.mean(obj_weight_t_p * l2_term_p)
+
+ # ! update ddpm
+ self.mp_trainer.backward(p_loss +
+ q_loss) # just backward for p_loss
+ _ = self.mp_trainer.optimize(self.opt)
+ # requires_grad(self.rec_model_module, True)
+
+ log_rec3d_loss_dict(
+ dict(
+ p_loss=p_loss,
+ mixing_logit=self.ddp_ddpm_model(
+ x=None, timesteps=None,
+ get_attr='mixing_logit').detach(),
+ ))
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ # ! todo, wrap in a single function
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+
+ with th.no_grad():
+
+ if not args.train_vae:
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k:
+ v[0:1].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(
+ latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+ assert pred is not None
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img,
+ # micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat([
+ pred_img[0:1], pred_depth[0:1].repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ f'{logger.get_dir()}/{self.step+self.resume_step}_{behaviour}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_{behaviour}.jpg'
+ )
+
+ th.cuda.empty_cache()
+
+ def eps_step(self, batch, behaviour='rec', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+
+ requires_grad(self.ddpm_model_module, True)
+ requires_grad(self.rec_model_module, False)
+
+ # TODO?
+ # if args.train_vae:
+ # for param in self.ddp_rec_model.module.decoder.triplane_decoder.parameters( # type: ignore
+ # ): # type: ignore
+ # param.requires_grad_(
+ # False
+ # ) # ! disable triplane_decoder grad in each iteration indepenently;
+
+ self.mp_trainer.zero_grad()
+
+ # assert args.train_vae
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+ # and args.train_vae):
+
+ # ! part 1: train vae with CE; ddpm fixed
+ # ! TODO, add KL_all_list? vae.decompose
+
+ with th.set_grad_enabled(args.train_vae):
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+ eps = vae_out[self.latent_name]
+
+ # sample noise
+ noise = th.randn(
+ size=eps.size(), device=eps.device
+ ) # note that this noise value is currently shared!
+
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_p)
+ eps_t_p = self.sde_diffusion.sample_q(eps, noise, var_t_p,
+ m_t_p)
+ logsnr_p = self.sde_diffusion.log_snr(m_t_p,
+ var_t_p) # for p only
+
+ pred_eps_p, pred_x0_p, logsnr_p = self.ddpm_step(
+ eps_t_p, t_p, m_t_p, var_t_p)
+
+ # ! batchify for mixing_component
+ # mixing normal trick
+ mixing_component = self.sde_diffusion.mixing_component(
+ eps_t_p, var_t_p, t_p,
+ enabled=True) # TODO, which should I use?
+ pred_eps_p = get_mixed_prediction(
+ True, pred_eps_p,
+ self.ddp_ddpm_model(x=None,
+ timesteps=None,
+ get_attr='mixing_logit'),
+ mixing_component)
+
+ # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ if last_batch or not self.use_ddp:
+ l2_term_p = th.square(pred_eps_p - noise)
+ else:
+ with self.ddp_ddpm_model.no_sync(): # type: ignore
+ l2_term_p = th.square(pred_eps_p - noise) # ? weights
+
+ p_eps_objective = th.mean(
+ obj_weight_t_p *
+ l2_term_p) * self.loss_class.opt.p_eps_lambda
+
+ log_rec3d_loss_dict(
+ dict(mixing_logit=self.ddp_ddpm_model(
+ x=None, timesteps=None,
+ get_attr='mixing_logit').detach(), ))
+
+ # =====================================================================
+ # ! single stage diffusion for rec side 2: generative feature
+ # if args.p_rendering_loss:
+ # target = micro
+ # pred = self.ddp_rec_model(
+ # # latent=vae_out,
+ # latent={
+ # **vae_out, self.latent_name: pred_x0_p,
+ # 'latent_name': self.latent_name
+ # },
+ # c=micro['c'],
+ # behaviour=self.render_latent_behaviour)
+
+ # # vae reconstruction loss
+ # if last_batch or not self.use_ddp:
+ # p_vae_recon_loss, _ = self.loss_class(pred,
+ # target,
+ # test_mode=False)
+ # else:
+ # with self.ddp_model.no_sync(): # type: ignore
+ # p_vae_recon_loss, _ = self.loss_class(
+ # pred, target, test_mode=False)
+ # log_rec3d_loss_dict(
+ # dict(p_vae_recon_loss=p_vae_recon_loss, ))
+ # p_loss = p_eps_objective + p_vae_recon_loss
+ # else:
+ p_loss = p_eps_objective
+
+ log_rec3d_loss_dict(
+ dict(p_loss=p_loss, p_eps_objective=p_eps_objective))
+
+ # ! to arrange: update vae params
+
+ self.mp_trainer.backward(p_loss)
+
+ # update ddpm accordingly
+ _ = self.mp_trainer.optimize(
+ self.opt) # TODO, update two groups of parameters
+
+ # TODO, merge visualization with original AE
+ # ! todo, merge required
+ # =================================== denoised AE log part ===================================
+ if dist_util.get_rank(
+ ) == 0 and self.step % 500 == 0 and behaviour != 'diff':
+
+ with th.no_grad():
+
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k:
+ v[0:1].to(dist_util.dev())
+ if isinstance(v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(
+ latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=eps_t_p[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps_p,
+ logsnr_p) # for VAE loss, denosied latent
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=pred_x0[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}_{behaviour}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}_{behaviour}.jpg'
+ )
+ del vis, pred_vis, pred_x0, pred_eps_p, micro, vae_out
+
+ th.cuda.empty_cache()
+
+ def p_rendering_step(self, batch, behaviour='rec', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+ args = self.sde_diffusion.args
+
+ requires_grad(self.ddpm_model, True)
+ requires_grad(self.rec_model, args.train_vae)
+
+ # TODO?
+ # if args.train_vae:
+ # for param in self.ddp_rec_model.module.decoder.triplane_decoder.parameters( # type: ignore
+ # ): # type: ignore
+ # param.requires_grad_(
+ # False
+ # ) # ! disable triplane_decoder grad in each iteration indepenently;
+
+ self.mp_trainer.zero_grad()
+
+ assert args.train_vae
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+ # and args.train_vae):
+
+ # ! part 1: train vae with CE; ddpm fixed
+ # ! TODO, add KL_all_list? vae.decompose
+
+ with th.set_grad_enabled(args.train_vae):
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='encoder_vae',
+ ) # pred: (B, 3, 64, 64)
+ eps = vae_out[self.latent_name]
+
+ # sample noise
+ noise = th.randn(
+ size=eps.size(), device=eps.device
+ ) # note that this noise value is currently shared!
+
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ self.sde_diffusion.iw_quantities(args.iw_sample_p)
+ eps_t_p = self.sde_diffusion.sample_q(eps, noise, var_t_p,
+ m_t_p)
+ logsnr_p = self.sde_diffusion.log_snr(m_t_p,
+ var_t_p) # for p only
+
+ # pred_eps_p, pred_x0_p, logsnr_p = self.ddpm_step(
+ pred_eps_p, pred_x0_p = self.ddpm_step(eps_t_p, t_p, logsnr_p,
+ var_t_p)
+ # eps_t_p, t_p, m_t_p, var_t_p)
+
+ # ! batchify for mixing_component
+ # mixing normal trick
+ # mixing_component = self.sde_diffusion.mixing_component(
+ # eps_t_p, var_t_p, t_p,
+ # enabled=True) # TODO, which should I use?
+ # pred_eps_p = get_mixed_prediction(
+ # True, pred_eps_p,
+ # self.ddp_ddpm_model(x=None,
+ # timesteps=None,
+ # get_attr='mixing_logit'),
+ # mixing_component)
+
+ # ! eps loss equivalent to snr weighting of x0 loss, see "progressive distillation"
+ if last_batch or not self.use_ddp:
+ l2_term_p = th.square(pred_eps_p - noise)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ l2_term_p = th.square(pred_eps_p - noise) # ? weights
+
+ p_eps_objective = th.mean(obj_weight_t_p * l2_term_p)
+ # st()
+
+ log_rec3d_loss_dict(
+ dict(mixing_logit=self.ddp_ddpm_model(
+ x=None, timesteps=None,
+ get_attr='mixing_logit').detach(), ))
+
+ # =====================================================================
+ # ! single stage diffusion for rec side 2: generative feature
+ if args.p_rendering_loss:
+ target = micro
+ pred = self.ddp_rec_model( # re-render
+ latent={
+ **vae_out, self.latent_name: pred_x0_p,
+ 'latent_name': self.latent_name
+ },
+ c=micro['c'],
+ behaviour=self.render_latent_behaviour)
+
+ # vae reconstruction loss
+ if last_batch or not self.use_ddp:
+ pred[self.latent_name] = vae_out[self.latent_name]
+ pred[
+ 'latent_name'] = self.latent_name # just for stats
+ p_vae_recon_loss, rec_loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ p_vae_recon_loss, rec_loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+ log_rec3d_loss_dict(
+ dict(p_vae_recon_loss=p_vae_recon_loss, ))
+
+ for key in rec_loss_dict.keys():
+ if 'latent' in key:
+ log_rec3d_loss_dict({key: rec_loss_dict[key]})
+
+ p_loss = p_eps_objective + p_vae_recon_loss
+ else:
+ p_loss = p_eps_objective
+
+ log_rec3d_loss_dict(
+ dict(p_loss=p_loss, p_eps_objective=p_eps_objective))
+
+ # ! to arrange: update vae params
+
+ self.mp_trainer.backward(p_loss)
+
+ # update ddpm accordingly
+ _ = self.mp_trainer.optimize(
+ self.opt) # TODO, update two groups of parameters
+
+ # TODO, merge visualization with original AE
+ # ! todo, merge required
+ # =================================== denoised AE log part ===================================
+ if dist_util.get_rank(
+ ) == 0 and self.step % 500 == 0 and behaviour != 'diff':
+
+ with th.no_grad():
+
+ vae_out.pop('posterior') # for calculating kl loss
+ vae_out_for_pred = {
+ k:
+ v[0:1].to(dist_util.dev())
+ if isinstance(v, th.Tensor) else v
+ for k, v in vae_out.items()
+ }
+
+ pred = self.ddp_rec_model(
+ latent=vae_out_for_pred,
+ c=micro['c'][0:1],
+ behaviour=self.render_latent_behaviour)
+ assert isinstance(pred, dict)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+
+ if 'image_depth' in pred:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # eps_t_p_3D = eps_t_p.reshape(batch_size, eps_t_p.shape[1]//3, 3, -1) # B C 3 L
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=eps_t_p[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ pred_x0 = self.sde_diffusion._predict_x0_from_eps(
+ eps_t_p, pred_eps_p,
+ logsnr_p) # for VAE loss, denosied latent
+
+ # pred_xstart_3D
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=pred_x0[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}_{behaviour}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t_p[0].item():3}_{behaviour}.jpg'
+ )
+ del vis, pred_vis, pred_x0, pred_eps_p, micro, vae_out
+
+ th.cuda.empty_cache()
+
+
+class TrainLoop3DDiffusionLSGMJointnoD_ponly(TrainLoop3DDiffusionLSGMJointnoD):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ sde_diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ triplane_scaling_divider=1,
+ use_amp=False,
+ diffusion_input_size=224,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ **kwargs)
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ self._post_run_loop()
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch = next(self.data)
+ # self.run_step(batch, step='ce_ddpm_step')
+
+ batch = next(self.data)
+ self.run_step(batch, step='p_rendering_step')
+ # self.run_step(batch, step='eps_step')
diff --git a/nsr/lsgm/train_util_diffusion_vpsde.py b/nsr/lsgm/train_util_diffusion_vpsde.py
new file mode 100644
index 0000000000000000000000000000000000000000..bea22ff78941b8ce09eae3eb0a71b642ebd036de
--- /dev/null
+++ b/nsr/lsgm/train_util_diffusion_vpsde.py
@@ -0,0 +1,583 @@
+"""
+Modified from:
+https://github.com/NVlabs/LSGM/blob/main/training_obj_joint.py
+"""
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from typing import Any
+
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+from guided_diffusion.gaussian_diffusion import ModelMeanType
+
+import dnnlib
+from dnnlib.util import calculate_adaptive_weight
+
+from ..train_util_diffusion import TrainLoop3DDiffusion
+from ..cvD.nvsD_canoD import TrainLoop3DcvD_nvsD_canoD
+
+
+class TrainLoop3DDiffusion_vpsde(TrainLoop3DDiffusion,TrainLoop3DcvD_nvsD_canoD):
+ def __init__(self, *, rec_model, denoise_model, diffusion, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, schedule_sampler=None, weight_decay=0, lr_anneal_steps=0, iterations=10001, ignore_resume_opt=False, freeze_ae=False, denoised_ae=True, triplane_scaling_divider=10, use_amp=False, diffusion_input_size=224, **kwargs):
+ super().__init__(rec_model=rec_model, denoise_model=denoise_model, diffusion=diffusion, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, schedule_sampler=schedule_sampler, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, ignore_resume_opt=ignore_resume_opt, freeze_ae=freeze_ae, denoised_ae=denoised_ae, triplane_scaling_divider=triplane_scaling_divider, use_amp=use_amp, diffusion_input_size=diffusion_input_size, **kwargs)
+
+ def run_step(self, batch, step='g_step'):
+
+ if step == 'diffusion_step_rec':
+ self.forward_diffusion(batch, behaviour='diffusion_step_rec')
+ _ = self.mp_trainer_rec.optimize(self.opt_rec) # TODO, update two groups of parameters
+ took_step_ddpm = self.mp_trainer.optimize(self.opt) # TODO, update two groups of parameters
+
+ if took_step_ddpm:
+ self._update_ema() # g_ema # TODO, ema only needs to track ddpm, remove ema tracking in rec
+
+ elif step == 'd_step_rec':
+ self.forward_D(batch, behaviour='rec')
+ # _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+ _ = self.mp_trainer_canonical_cvD.optimize(self.opt_cano_cvD)
+
+ elif step == 'diffusion_step_nvs':
+ self.forward_diffusion(batch, behaviour='diffusion_step_nvs')
+ _ = self.mp_trainer_rec.optimize(self.opt_rec) # TODO, update two groups of parameters
+ took_step_ddpm = self.mp_trainer.optimize(self.opt) # TODO, update two groups of parameters
+
+ if took_step_ddpm:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step_nvs':
+ self.forward_D(batch, behaviour='nvs')
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ # batch = next(self.data)
+ # self.run_step(batch, 'g_step_rec')
+
+ batch = next(self.data)
+ self.run_step(batch, step='diffusion_step_rec')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_rec')
+
+ # batch = next(self.data)
+ # self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, step='diffusion_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step_nvs')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ self.save(self.mp_trainer_rec, self.mp_trainer_rec.model_name)
+ self.save(self.mp_trainer_cvD, 'cvD')
+ self.save(self.mp_trainer_canonical_cvD, 'cano_cvD')
+
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+
+ self.save(self.mp_trainer, self.mp_trainer.model_name)
+ self.save(self.mp_trainer_rec, self.mp_trainer_rec.model_name)
+ self.save(self.mp_trainer_cvD, 'cvD')
+ self.save(self.mp_trainer_canonical_cvD, 'cano_cvD')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_canonical_cvD, 'cvD')
+
+ def forward_diffusion(self, batch, behaviour='rec', *args, **kwargs):
+ """
+ add sds grad to all ae predicted x_0
+ """
+
+ self.ddp_cano_cvD.requires_grad_(False)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ self.ddp_model.requires_grad_(True)
+ self.ddp_rec_model.requires_grad_(True)
+
+ # if behaviour != 'diff' and 'rec' in behaviour:
+ # if behaviour != 'diff' and 'rec' in behaviour: # pure diffusion step
+ # self.ddp_rec_model.requires_grad_(True)
+ for param in self.ddp_rec_model.module.decoder.triplane_decoder.parameters( # type: ignore
+ ): # type: ignore
+ param.requires_grad_(False) # ! disable triplane_decoder grad in each iteration indepenently;
+ # else:
+
+ self.mp_trainer_rec.zero_grad()
+ self.mp_trainer.zero_grad()
+
+ # ! no 'sds' step now, both add sds grad back to ViT
+
+ # assert behaviour != 'sds'
+ # if behaviour == 'sds':
+ # else:
+ # self.ddp_ddpm_model.requires_grad_(True)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ vae_nelbo_loss = th.tensor(0.0).to(dist_util.dev())
+ vision_aided_loss = th.tensor(0.0).to(dist_util.dev())
+ denoise_loss = th.tensor(0.0).to(dist_util.dev())
+ d_weight = th.tensor(0.0).to(dist_util.dev())
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp
+ and not self.freeze_ae):
+
+ # apply vae
+ vae_out = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='enc_dec_wo_triplane') # pred: (B, 3, 64, 64)
+
+
+ if behaviour == 'diffusion_step_rec':
+ target = micro
+ pred = self.ddp_rec_model(latent=vae_out,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ # vae reconstruction loss
+ if last_batch or not self.use_ddp:
+ vae_nelbo_loss, loss_dict = self.loss_class(pred,
+ target,
+ test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ vae_nelbo_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+
+ last_layer = self.ddp_rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ -1].weight # type: ignore
+
+ if 'image_sr' in pred:
+ vision_aided_loss = self.ddp_cano_cvD(
+ 0.5 * pred['image_sr'] +
+ 0.5 * th.nn.functional.interpolate(
+ pred['image_raw'],
+ size=pred['image_sr'].shape[2:],
+ mode='bilinear'),
+ for_G=True).mean() # [B, 1] shape
+ else:
+ vision_aided_loss = self.ddp_cano_cvD(
+ pred['image_raw'], for_G=True
+ ).mean(
+ ) # [B, 1] shape
+
+ d_weight = calculate_adaptive_weight(
+ vae_nelbo_loss,
+ vision_aided_loss,
+ last_layer,
+ # disc_weight_max=1) * 1
+ disc_weight_max=1) * self.loss_class.opt.rec_cvD_lambda
+ # d_weight = self.loss_class.opt.rec_cvD_lambda # since decoder is fixed here. set to 0.001
+
+ vision_aided_loss *= d_weight
+
+ # d_weight = self.loss_class.opt.rec_cvD_lambda
+ loss_dict.update({
+ 'vision_aided_loss/G_rec':
+ vision_aided_loss,
+ 'd_weight_G_rec':
+ d_weight,
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ elif behaviour == 'diffusion_step_nvs':
+
+ novel_view_c = th.cat([micro['c'][1:], micro['c'][:1]])
+
+ pred = self.ddp_rec_model(latent=vae_out,
+ c=novel_view_c,
+ behaviour='triplane_dec')
+
+ if 'image_sr' in pred:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ # pred_for_rec['image_sr'],
+ 0.5 * pred['image_sr'] +
+ 0.5 * th.nn.functional.interpolate(
+ pred['image_raw'],
+ size=pred['image_sr'].shape[2:],
+ mode='bilinear'),
+ for_G=True).mean() # [B, 1] shape
+ else:
+ vision_aided_loss = self.ddp_nvs_cvD(
+ pred['image_raw'], for_G=True
+ ).mean(
+ ) # [B, 1] shape
+
+ d_weight = self.loss_class.opt.nvs_cvD_lambda
+ vision_aided_loss *= d_weight
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs':
+ vision_aided_loss,
+ })
+
+ # ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # elif behaviour == 'diff':
+ # self.ddp_rec_model.requires_grad_(False)
+ # # assert self.ddp_rec_model.module.requires_grad == False, 'freeze ddpm_rec for pure diff step'
+ else:
+ raise NotImplementedError(behaviour)
+ # assert behaviour == 'sds'
+
+ # pred = None
+
+ # if behaviour != 'sds': # also train diffusion
+ # assert pred is not None
+
+ # TODO, train diff and sds together, available?
+ eps = vae_out[self.latent_name]
+
+ # if behaviour != 'sds':
+ # micro_to_denoise.detach_()
+ eps.requires_grad_(True) # single stage diffusion
+
+ t, weights = self.schedule_sampler.sample(
+ eps.shape[0], dist_util.dev())
+ noise = th.randn(size=vae_out.size(), device='cuda') # note that this noise value is currently shared!
+
+ model_kwargs = {}
+
+ # ?
+ # or directly use SSD NeRF version?
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+
+ # ! handle the sampling
+
+ # get diffusion quantities for p (sgm prior) sampling scheme and reweighting for q (vae)
+ t_p, var_t_p, m_t_p, obj_weight_t_p, obj_weight_t_q, g2_t_p = \
+ diffusion.iw_quantities(args.batch_size, args.time_eps, args.iw_sample_p, args.iw_subvp_like_vp_sde)
+ eps_t_p = diffusion.sample_q(vae_out, noise, var_t_p, m_t_p)
+
+ # in case we want to train q (vae) with another batch using a different sampling scheme for times t
+ if args.iw_sample_q in ['ll_uniform', 'll_iw']:
+ t_q, var_t_q, m_t_q, obj_weight_t_q, _, g2_t_q = \
+ diffusion.iw_quantities(args.batch_size, args.time_eps, args.iw_sample_q, args.iw_subvp_like_vp_sde)
+ eps_t_q = diffusion.sample_q(vae_out, noise, var_t_q, m_t_q)
+
+ eps_t_p = eps_t_p.detach().requires_grad_(True)
+ eps_t = th.cat([eps_t_p, eps_t_q], dim=0)
+ var_t = th.cat([var_t_p, var_t_q], dim=0)
+ t = th.cat([t_p, t_q], dim=0)
+ noise = th.cat([noise, noise], dim=0)
+ else:
+ eps_t, m_t, var_t, t, g2_t = eps_t_p, m_t_p, var_t_p, t_p, g2_t_p
+
+ # run the diffusion
+
+ # mixing normal trick
+ # TODO, create a new partial training_losses function
+ mixing_component = diffusion.mixing_component(eps_t, var_t, t, enabled=dae.mixed_prediction) # TODO, which should I use?
+ params = utils.get_mixed_prediction(dae.mixed_prediction, pred_params, dae.mixing_logit, mixing_component)
+
+ # nelbo loss with kl balancing
+
+
+
+
+ # ! remainign parts of cross entropy in likelihook training
+
+ cross_entropy_per_var += diffusion.cross_entropy_const(args.time_eps)
+ cross_entropy = th.sum(cross_entropy_per_var, dim=[1, 2, 3])
+ cross_entropy += remaining_neg_log_p_total # for remaining scales if there is any
+ all_neg_log_p = vae.decompose_eps(cross_entropy_per_var)
+ all_neg_log_p.extend(remaining_neg_log_p_per_ver) # add the remaining neg_log_p
+ kl_all_list, kl_vals_per_group, kl_diag_list = utils.kl_per_group_vada(all_log_q, all_neg_log_p)
+
+
+ kl_coeff = 1.0
+
+ # ! calculate p/q loss;
+ # ? no spectral regularizer here
+ # ? try adding grid_clip and sn later on.
+ q_loss = th.mean(nelbo_loss)
+ p_loss = th.mean(p_objective)
+
+ # backpropagate q_loss for vae and update vae params, if trained
+ if args.train_vae:
+ grad_scalar.scale(q_loss).backward(retain_graph=utils.different_p_q_objectives(args.iw_sample_p, args.iw_sample_q))
+ utils.average_gradients(vae.parameters(), args.distributed)
+ if args.grad_clip_max_norm > 0.: # apply gradient clipping
+ grad_scalar.unscale_(vae_optimizer)
+ th.nn.utils.clip_grad_norm_(vae.parameters(), max_norm=args.grad_clip_max_norm)
+ grad_scalar.step(vae_optimizer)
+
+ # if we use different p and q objectives or are not training the vae, discard gradients and backpropagate p_loss
+ if utils.different_p_q_objectives(args.iw_sample_p, args.iw_sample_q) or not args.train_vae:
+ if args.train_vae:
+ # discard current gradients computed by weighted loss for VAE
+ dae_optimizer.zero_grad()
+
+ # compute gradients with unweighted loss
+ grad_scalar.scale(p_loss).backward()
+
+ # update dae parameters
+ utils.average_gradients(dae.parameters(), args.distributed)
+ if args.grad_clip_max_norm > 0.: # apply gradient clipping
+ grad_scalar.unscale_(dae_optimizer)
+ th.nn.utils.clip_grad_norm_(dae.parameters(), max_norm=args.grad_clip_max_norm)
+ grad_scalar.step(dae_optimizer)
+
+
+ # unpack separate objectives, in case we want to train q (vae) using a different sampling scheme for times t
+ if args.iw_sample_q in ['ll_uniform', 'll_iw']:
+ l2_term_p, l2_term_q = th.chunk(l2_term, chunks=2, dim=0)
+ p_objective = th.sum(obj_weight_t_p * l2_term_p, dim=[1, 2, 3])
+ # cross_entropy_per_var = obj_weight_t_q * l2_term_q
+ else:
+ p_objective = th.sum(obj_weight_t_p * l2_term, dim=[1, 2, 3])
+ # cross_entropy_per_var = obj_weight_t_q * l2_term
+
+ # print(micro_to_denoise.min(), micro_to_denoise.max())
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ eps, # x_start
+ t,
+ model_kwargs=model_kwargs,
+ return_detail=True)
+
+ # ! DDPM step
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ # denoised_out = denoised_fn()
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ losses = compute_losses()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ denoise_loss = (losses["loss"] * weights).mean()
+
+ x_t = losses.pop('x_t')
+ model_output = losses.pop('model_output')
+ diffusion_target = losses.pop('diffusion_target')
+ alpha_bar = losses.pop('alpha_bar')
+
+ log_loss_dict(self.diffusion, t,
+ {k: v * weights
+ for k, v in losses.items()})
+
+ # if behaviour == 'sds':
+ # ! calculate sds grad, and add to the grad of
+
+ # if 'rec' in behaviour and self.loss_class.opt.sds_lamdba > 0: # only enable sds along with rec step
+ # w = (
+ # 1 - alpha_bar**2
+ # ) / self.triplane_scaling_divider * self.loss_class.opt.sds_lamdba # https://github.com/ashawkey/stable-dreamfusion/issues/106
+ # sds_grad = denoise_loss.clone().detach(
+ # ) * w # * https://pytorch.org/docs/stable/generated/th.Tensor.detach.html. detach() returned Tensor share the same storage with previous one. add clone() here.
+
+ # # ae_loss = AddGradient.apply(latent[self.latent_name], sds_grad) # add sds_grad during backward
+
+ # def sds_hook(grad_to_add):
+
+ # def modify_grad(grad):
+ # return grad + grad_to_add # add the sds grad to the original grad for BP
+
+ # return modify_grad
+
+ # eps[self.latent_name].register_hook(
+ # sds_hook(sds_grad)) # merge sds grad with rec/nvs ae step
+
+ loss = vae_nelbo_loss + denoise_loss + vision_aided_loss # caluclate loss within AMP
+
+ # ! cvD loss
+
+ # exit AMP before backward
+ self.mp_trainer_rec.backward(loss)
+ self.mp_trainer.backward(loss)
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0 and behaviour != 'diff':
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ # st()
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # if 'image_sr' in pred: # TODO
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=x_t[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ # if denoised_out is None:
+ # if not self.denoised_ae:
+ # denoised_out = denoised_fn()
+
+ if self.diffusion.model_mean_type == ModelMeanType.START_X:
+ pred_xstart = model_output
+ else: # * used here
+ pred_xstart = self.diffusion._predict_xstart_from_eps(
+ x_t=x_t, t=t, eps=model_output)
+
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=pred_xstart[0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ # denoised_out = denoised_ae_pred
+
+ # if not self.denoised_ae:
+ # denoised_ae_pred = self.ddp_rec_model(
+ # img=None,
+ # c=micro['c'][0:1],
+ # latent=denoised_out['pred_xstart'][0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically
+ # behaviour=self.render_latent_behaviour)
+ # else:
+ # assert denoised_ae_pred is not None
+ # denoised_ae_pred['image_raw'] = denoised_ae_pred[
+ # 'image_raw'][0:1]
+
+ # print(pred_img.shape)
+ # print('denoised_ae:', self.denoised_ae)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'][0:1],
+ denoised_ae_pred['image_raw'][0:1],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+ # s
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis = th.cat([
+ # self.pool_128(micro['img']), x_t[:, :3, ...],
+ # denoised_out['pred_xstart'][:, :3, ...]
+ # ],
+ # dim=-1)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}_{behaviour}.jpg'
+ )
+
+ th.cuda.empty_cache()
diff --git a/nsr/networks_stylegan2.py b/nsr/networks_stylegan2.py
new file mode 100644
index 0000000000000000000000000000000000000000..f12530965c129cec09034430b2e95c6defb18685
--- /dev/null
+++ b/nsr/networks_stylegan2.py
@@ -0,0 +1,1093 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Network architectures from the paper
+"Analyzing and Improving the Image Quality of StyleGAN".
+Matches the original implementation of configs E-F by Karras et al. at
+https://github.com/NVlabs/stylegan2/blob/master/training/networks_stylegan2.py"""
+
+import numpy as np
+import torch
+from torch_utils import misc
+from torch_utils import persistence
+from torch_utils.ops import conv2d_resample
+from torch_utils.ops import upfirdn2d
+from torch_utils.ops import bias_act
+from torch_utils.ops import fma
+from pdb import set_trace as st
+
+from pdb import set_trace as st
+
+#----------------------------------------------------------------------------
+
+
+@misc.profiled_function
+def normalize_2nd_moment(x, dim=1, eps=1e-8):
+ return x * (x.square().mean(dim=dim, keepdim=True) + eps).rsqrt()
+
+
+#----------------------------------------------------------------------------
+
+
+@misc.profiled_function
+# @torch.autocast(device_type='cuda')
+def modulated_conv2d(
+ x, # Input tensor of shape [batch_size, in_channels, in_height, in_width].
+ weight, # Weight tensor of shape [out_channels, in_channels, kernel_height, kernel_width].
+ styles, # Modulation coefficients of shape [batch_size, in_channels].
+ noise=None, # Optional noise tensor to add to the output activations.
+ up=1, # Integer upsampling factor.
+ down=1, # Integer downsampling factor.
+ padding=0, # Padding with respect to the upsampled image.
+ resample_filter=None, # Low-pass filter to apply when resampling activations. Must be prepared beforehand by calling upfirdn2d.setup_filter().
+ demodulate=True, # Apply weight demodulation?
+ flip_weight=True, # False = convolution, True = correlation (matches torch.nn.functional.conv2d).
+ fused_modconv=True, # Perform modulation, convolution, and demodulation as a single fused operation?
+):
+ batch_size = x.shape[0]
+ out_channels, in_channels, kh, kw = weight.shape
+ misc.assert_shape(weight, [out_channels, in_channels, kh, kw]) # [OIkk]
+ misc.assert_shape(x, [batch_size, in_channels, None, None]) # [NIHW]
+ misc.assert_shape(styles, [batch_size, in_channels]) # [NI]
+
+ # Pre-normalize inputs to avoid FP16 overflow.
+ if x.dtype == torch.float16 and demodulate:
+ weight = weight * (1 / np.sqrt(in_channels * kh * kw) / weight.norm(
+ float('inf'), dim=[1, 2, 3], keepdim=True)) # max_Ikk
+ styles = styles / styles.norm(float('inf'), dim=1,
+ keepdim=True) # max_I
+
+ # Calculate per-sample weights and demodulation coefficients.
+ w = None
+ dcoefs = None
+ if demodulate or fused_modconv:
+ w = weight.unsqueeze(0) # [NOIkk]
+ w = w * styles.reshape(batch_size, 1, -1, 1, 1) # [NOIkk]
+ if demodulate:
+ dcoefs = (w.square().sum(dim=[2, 3, 4]) + 1e-8).rsqrt() # [NO]
+ if demodulate and fused_modconv:
+ w = w * dcoefs.reshape(batch_size, -1, 1, 1, 1) # [NOIkk]
+
+ # Execute by scaling the activations before and after the convolution.
+ if not fused_modconv:
+ x = x * styles.to(x.dtype).reshape(batch_size, -1, 1, 1)
+ x = conv2d_resample.conv2d_resample(x=x,
+ w=weight.to(x.dtype),
+ f=resample_filter,
+ up=up,
+ down=down,
+ padding=padding,
+ flip_weight=flip_weight)
+ if demodulate and noise is not None:
+ x = fma.fma(x,
+ dcoefs.to(x.dtype).reshape(batch_size, -1, 1, 1),
+ noise.to(x.dtype))
+ elif demodulate:
+ x = x * dcoefs.to(x.dtype).reshape(batch_size, -1, 1, 1)
+ elif noise is not None:
+ x = x.add_(noise.to(x.dtype))
+ return x
+
+ # Execute as one fused op using grouped convolution.
+ with misc.suppress_tracer_warnings(
+ ): # this value will be treated as a constant
+ batch_size = int(batch_size)
+ misc.assert_shape(x, [batch_size, in_channels, None, None])
+ x = x.reshape(1, -1, *x.shape[2:])
+ w = w.reshape(-1, in_channels, kh, kw)
+ x = conv2d_resample.conv2d_resample(x=x,
+ w=w.to(x.dtype),
+ f=resample_filter,
+ up=up,
+ down=down,
+ padding=padding,
+ groups=batch_size,
+ flip_weight=flip_weight)
+ x = x.reshape(batch_size, -1, *x.shape[2:])
+ if noise is not None:
+ x = x.add_(noise)
+ return x
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class FullyConnectedLayer(torch.nn.Module):
+ def __init__(
+ self,
+ in_features, # Number of input features.
+ out_features, # Number of output features.
+ bias=True, # Apply additive bias before the activation function?
+ activation='linear', # Activation function: 'relu', 'lrelu', etc.
+ lr_multiplier=1, # Learning rate multiplier.
+ bias_init=0, # Initial value for the additive bias.
+ ):
+ super().__init__()
+ self.in_features = in_features
+ self.out_features = out_features
+ self.activation = activation
+ self.weight = torch.nn.Parameter(
+ torch.randn([out_features, in_features]) / lr_multiplier)
+ self.bias = torch.nn.Parameter(
+ torch.full([out_features],
+ np.float32(bias_init))) if bias else None
+ self.weight_gain = lr_multiplier / np.sqrt(in_features)
+ self.bias_gain = lr_multiplier
+
+ def forward(self, x):
+ w = self.weight.to(x.dtype) * self.weight_gain
+ b = self.bias
+ if b is not None:
+ b = b.to(x.dtype)
+ if self.bias_gain != 1:
+ b = b * self.bias_gain
+
+ if self.activation == 'linear' and b is not None:
+ x = torch.addmm(b.unsqueeze(0), x, w.t())
+ else:
+ x = x.matmul(w.t())
+ x = bias_act.bias_act(x, b, act=self.activation)
+ return x
+
+ def extra_repr(self):
+ return f'in_features={self.in_features:d}, out_features={self.out_features:d}, activation={self.activation:s}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class Conv2dLayer(torch.nn.Module):
+ def __init__(
+ self,
+ in_channels, # Number of input channels.
+ out_channels, # Number of output channels.
+ kernel_size, # Width and height of the convolution kernel.
+ bias=True, # Apply additive bias before the activation function?
+ activation='linear', # Activation function: 'relu', 'lrelu', etc.
+ up=1, # Integer upsampling factor.
+ down=1, # Integer downsampling factor.
+ resample_filter=[
+ 1, 3, 3, 1
+ ], # Low-pass filter to apply when resampling activations.
+ conv_clamp=None, # Clamp the output to +-X, None = disable clamping.
+ channels_last=False, # Expect the input to have memory_format=channels_last?
+ trainable=True, # Update the weights of this layer during training?
+ ):
+ super().__init__()
+ self.in_channels = in_channels
+ self.out_channels = out_channels
+ self.activation = activation
+ self.up = up
+ self.down = down
+ self.conv_clamp = conv_clamp
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter(resample_filter))
+ self.padding = kernel_size // 2
+ self.weight_gain = 1 / np.sqrt(in_channels * (kernel_size**2))
+ self.act_gain = bias_act.activation_funcs[activation].def_gain
+
+ memory_format = torch.channels_last if channels_last else torch.contiguous_format
+ weight = torch.randn(
+ [out_channels, in_channels, kernel_size,
+ kernel_size]).to(memory_format=memory_format)
+ bias = torch.zeros([out_channels]) if bias else None
+ if trainable:
+ self.weight = torch.nn.Parameter(weight)
+ self.bias = torch.nn.Parameter(bias) if bias is not None else None
+ else:
+ self.register_buffer('weight', weight)
+ if bias is not None:
+ self.register_buffer('bias', bias)
+ else:
+ self.bias = None
+
+ # @torch.autocast(device_type='cuda')
+ def forward(self, x, gain=1):
+ w = self.weight * self.weight_gain # w dtype is fp32
+ b = self.bias.to(x.dtype) if self.bias is not None else None
+
+ flip_weight = (self.up == 1) # slightly faster
+ x = conv2d_resample.conv2d_resample(x=x,
+ w=w.to(x.dtype),
+ f=self.resample_filter,
+ up=self.up,
+ down=self.down,
+ padding=self.padding,
+ flip_weight=flip_weight)
+
+ act_gain = self.act_gain * gain
+ act_clamp = self.conv_clamp * gain if self.conv_clamp is not None else None
+ x = bias_act.bias_act(x,
+ b,
+ act=self.activation,
+ gain=act_gain,
+ clamp=act_clamp)
+ return x
+
+ def extra_repr(self):
+ return ' '.join([
+ f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}, activation={self.activation:s},',
+ f'up={self.up}, down={self.down}'
+ ])
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class MappingNetwork(torch.nn.Module):
+ def __init__(
+ self,
+ z_dim, # Input latent (Z) dimensionality, 0 = no latent.
+ c_dim, # Conditioning label (C) dimensionality, 0 = no label.
+ w_dim, # Intermediate latent (W) dimensionality.
+ num_ws, # Number of intermediate latents to output, None = do not broadcast.
+ num_layers=8, # Number of mapping layers.
+ embed_features=None, # Label embedding dimensionality, None = same as w_dim.
+ layer_features=None, # Number of intermediate features in the mapping layers, None = same as w_dim.
+ activation='lrelu', # Activation function: 'relu', 'lrelu', etc.
+ lr_multiplier=0.01, # Learning rate multiplier for the mapping layers.
+ w_avg_beta=0.998, # Decay for tracking the moving average of W during training, None = do not track.
+ ):
+ super().__init__()
+ self.z_dim = z_dim
+ self.c_dim = c_dim
+ self.w_dim = w_dim
+ self.num_ws = num_ws
+ self.num_layers = num_layers
+ self.w_avg_beta = w_avg_beta
+
+ if embed_features is None:
+ embed_features = w_dim
+ if c_dim == 0:
+ embed_features = 0
+ if layer_features is None:
+ layer_features = w_dim
+ features_list = [z_dim + embed_features
+ ] + [layer_features] * (num_layers - 1) + [w_dim]
+
+ if c_dim > 0:
+ self.embed = FullyConnectedLayer(c_dim, embed_features)
+ for idx in range(num_layers):
+ in_features = features_list[idx]
+ out_features = features_list[idx + 1]
+ layer = FullyConnectedLayer(in_features,
+ out_features,
+ activation=activation,
+ lr_multiplier=lr_multiplier)
+ setattr(self, f'fc{idx}', layer)
+
+ if num_ws is not None and w_avg_beta is not None:
+ self.register_buffer('w_avg', torch.zeros([w_dim]))
+
+ def forward(self,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False):
+ # Embed, normalize, and concat inputs.
+ x = None
+ with torch.autograd.profiler.record_function('input'):
+ if self.z_dim > 0:
+ misc.assert_shape(z, [None, self.z_dim])
+ x = normalize_2nd_moment(z.to(torch.float32))
+ if self.c_dim > 0:
+ misc.assert_shape(c, [None, self.c_dim])
+ y = normalize_2nd_moment(self.embed(c.to(torch.float32)))
+ x = torch.cat([x, y], dim=1) if x is not None else y
+
+ # Main layers.
+ for idx in range(self.num_layers):
+ layer = getattr(self, f'fc{idx}')
+ x = layer(x)
+
+ # Update moving average of W.
+ if update_emas and self.w_avg_beta is not None:
+ with torch.autograd.profiler.record_function('update_w_avg'):
+ self.w_avg.copy_(x.detach().mean(dim=0).lerp(
+ self.w_avg, self.w_avg_beta))
+
+ # Broadcast.
+ if self.num_ws is not None:
+ with torch.autograd.profiler.record_function('broadcast'):
+ x = x.unsqueeze(1).repeat([1, self.num_ws, 1])
+
+ # Apply truncation.
+ if truncation_psi != 1:
+ with torch.autograd.profiler.record_function('truncate'):
+ assert self.w_avg_beta is not None
+ if self.num_ws is None or truncation_cutoff is None:
+ x = self.w_avg.lerp(x, truncation_psi)
+ else:
+ x[:, :truncation_cutoff] = self.w_avg.lerp(
+ x[:, :truncation_cutoff], truncation_psi)
+ return x
+
+ def extra_repr(self):
+ return f'z_dim={self.z_dim:d}, c_dim={self.c_dim:d}, w_dim={self.w_dim:d}, num_ws={self.num_ws:d}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class SynthesisLayer(torch.nn.Module):
+ def __init__(
+ self,
+ in_channels, # Number of input channels.
+ out_channels, # Number of output channels.
+ w_dim, # Intermediate latent (W) dimensionality.
+ resolution, # Resolution of this layer.
+ kernel_size=3, # Convolution kernel size.
+ up=1, # Integer upsampling factor.
+ use_noise=True, # Enable noise input?
+ activation='lrelu', # Activation function: 'relu', 'lrelu', etc.
+ resample_filter=[
+ 1, 3, 3, 1
+ ], # Low-pass filter to apply when resampling activations.
+ conv_clamp=None, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ channels_last=False, # Use channels_last format for the weights?
+ ):
+ super().__init__()
+ self.in_channels = in_channels
+ self.out_channels = out_channels
+ self.w_dim = w_dim
+ self.resolution = resolution
+ self.up = up
+ self.use_noise = use_noise
+ self.activation = activation
+ self.conv_clamp = conv_clamp
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter(resample_filter))
+ self.padding = kernel_size // 2
+ self.act_gain = bias_act.activation_funcs[activation].def_gain
+
+ self.affine = FullyConnectedLayer(w_dim, in_channels, bias_init=1)
+ memory_format = torch.channels_last if channels_last else torch.contiguous_format
+ self.weight = torch.nn.Parameter(
+ torch.randn([out_channels, in_channels, kernel_size,
+ kernel_size]).to(memory_format=memory_format))
+ if use_noise:
+ self.register_buffer('noise_const',
+ torch.randn([resolution, resolution]))
+ self.noise_strength = torch.nn.Parameter(torch.zeros([]))
+ self.bias = torch.nn.Parameter(torch.zeros([out_channels]))
+
+ # def forward(self, x, w, noise_mode='random', fused_modconv=True, gain=1):
+ def forward(self, x, w, noise_mode='const', fused_modconv=True, gain=1):
+ assert noise_mode in ['random', 'const', 'none']
+ in_resolution = self.resolution // self.up
+ misc.assert_shape(
+ x, [None, self.in_channels, in_resolution, in_resolution])
+ styles = self.affine(w)
+
+ noise = None
+ if self.use_noise and noise_mode == 'random':
+ noise = torch.randn(
+ [x.shape[0], 1, self.resolution, self.resolution],
+ device=x.device) * self.noise_strength
+ if self.use_noise and noise_mode == 'const':
+ noise = self.noise_const * self.noise_strength
+
+ flip_weight = (self.up == 1) # slightly faster
+ x = modulated_conv2d(x=x,
+ weight=self.weight,
+ styles=styles,
+ noise=noise,
+ up=self.up,
+ padding=self.padding,
+ resample_filter=self.resample_filter,
+ flip_weight=flip_weight,
+ fused_modconv=fused_modconv)
+
+ act_gain = self.act_gain * gain
+ act_clamp = self.conv_clamp * gain if self.conv_clamp is not None else None
+ x = bias_act.bias_act(x,
+ self.bias.to(x.dtype),
+ act=self.activation,
+ gain=act_gain,
+ clamp=act_clamp)
+ return x
+
+ def extra_repr(self):
+ return ' '.join([
+ f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}, w_dim={self.w_dim:d},',
+ f'resolution={self.resolution:d}, up={self.up}, activation={self.activation:s}'
+ ])
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class ToRGBLayer(torch.nn.Module):
+ def __init__(self,
+ in_channels,
+ out_channels,
+ w_dim,
+ kernel_size=1,
+ conv_clamp=None,
+ channels_last=False):
+ super().__init__()
+ self.in_channels = in_channels
+ self.out_channels = out_channels
+ self.w_dim = w_dim
+ self.conv_clamp = conv_clamp
+ self.affine = FullyConnectedLayer(w_dim, in_channels, bias_init=1)
+ memory_format = torch.channels_last if channels_last else torch.contiguous_format
+ self.weight = torch.nn.Parameter(
+ torch.randn([out_channels, in_channels, kernel_size,
+ kernel_size]).to(memory_format=memory_format))
+ self.bias = torch.nn.Parameter(torch.zeros([out_channels]))
+ self.weight_gain = 1 / np.sqrt(in_channels * (kernel_size**2))
+
+ def forward(self, x, w, fused_modconv=True):
+ styles = self.affine(w) * self.weight_gain
+ x = modulated_conv2d(x=x,
+ weight=self.weight,
+ styles=styles,
+ demodulate=False,
+ fused_modconv=fused_modconv)
+ x = bias_act.bias_act(x, self.bias.to(x.dtype), clamp=self.conv_clamp)
+ return x
+
+ def extra_repr(self):
+ return f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}, w_dim={self.w_dim:d}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class SynthesisBlock(torch.nn.Module):
+ def __init__(
+ self,
+ in_channels, # Number of input channels, 0 = first block.
+ out_channels, # Number of output channels.
+ w_dim, # Intermediate latent (W) dimensionality.
+ resolution, # Resolution of this block.
+ img_channels, # Number of output color channels.
+ is_last, # Is this the last block?
+ architecture='skip', # Architecture: 'orig', 'skip', 'resnet'.
+ resample_filter=[
+ 1, 3, 3, 1
+ ], # Low-pass filter to apply when resampling activations.
+ conv_clamp=256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ use_fp16=False, # Use FP16 for this block?
+ fp16_channels_last=False, # Use channels-last memory format with FP16?
+ fused_modconv_default=True, # Default value of fused_modconv. 'inference_only' = True for inference, False for training.
+ **layer_kwargs, # Arguments for SynthesisLayer.
+ ):
+ assert architecture in ['orig', 'skip', 'resnet']
+ super().__init__()
+ self.in_channels = in_channels
+ self.w_dim = w_dim
+ self.resolution = resolution
+ self.img_channels = img_channels
+ self.is_last = is_last
+ self.architecture = architecture
+ self.use_fp16 = use_fp16
+ self.channels_last = (use_fp16 and fp16_channels_last)
+ self.fused_modconv_default = fused_modconv_default
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter(resample_filter))
+ self.num_conv = 0
+ self.num_torgb = 0
+
+ if in_channels == 0:
+ self.const = torch.nn.Parameter(
+ torch.randn([out_channels, resolution, resolution]))
+
+ if in_channels != 0:
+ self.conv0 = SynthesisLayer(in_channels,
+ out_channels,
+ w_dim=w_dim,
+ resolution=resolution,
+ up=2,
+ resample_filter=resample_filter,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last,
+ **layer_kwargs)
+ self.num_conv += 1
+
+ self.conv1 = SynthesisLayer(out_channels,
+ out_channels,
+ w_dim=w_dim,
+ resolution=resolution,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last,
+ **layer_kwargs)
+ self.num_conv += 1
+
+ if is_last or architecture == 'skip':
+ self.torgb = ToRGBLayer(out_channels,
+ img_channels,
+ w_dim=w_dim,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last)
+ self.num_torgb += 1
+
+ if in_channels != 0 and architecture == 'resnet':
+ self.skip = Conv2dLayer(in_channels,
+ out_channels,
+ kernel_size=1,
+ bias=False,
+ up=2,
+ resample_filter=resample_filter,
+ channels_last=self.channels_last)
+
+ def forward(self,
+ x,
+ img,
+ ws,
+ force_fp32=False,
+ fused_modconv=None,
+ update_emas=False,
+ **layer_kwargs):
+ _ = update_emas # unused
+ misc.assert_shape(ws,
+ [None, self.num_conv + self.num_torgb, self.w_dim])
+ w_iter = iter(ws.unbind(dim=1))
+ if ws.device.type != 'cuda':
+ force_fp32 = True
+ dtype = torch.float16 if self.use_fp16 and not force_fp32 else torch.float32
+ memory_format = torch.channels_last if self.channels_last and not force_fp32 else torch.contiguous_format
+ if fused_modconv is None:
+ fused_modconv = self.fused_modconv_default
+ if fused_modconv == 'inference_only':
+ fused_modconv = (not self.training)
+
+ # Input.
+ if self.in_channels == 0:
+ x = self.const.to(dtype=dtype, memory_format=memory_format)
+ x = x.unsqueeze(0).repeat([ws.shape[0], 1, 1, 1])
+ else:
+ misc.assert_shape(x, [
+ None, self.in_channels, self.resolution // 2,
+ self.resolution // 2
+ ])
+ x = x.to(dtype=dtype, memory_format=memory_format)
+
+ # Main layers.
+ if self.in_channels == 0:
+ x = self.conv1(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+ elif self.architecture == 'resnet':
+ y = self.skip(x, gain=np.sqrt(0.5))
+ x = self.conv0(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+ x = self.conv1(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ gain=np.sqrt(0.5),
+ **layer_kwargs)
+ x = y.add_(x)
+ else:
+ x = self.conv0(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+ x = self.conv1(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+
+ # ToRGB.
+ if img is not None:
+ misc.assert_shape(img, [
+ None, self.img_channels, self.resolution // 2,
+ self.resolution // 2
+ ])
+ img = upfirdn2d.upsample2d(img, self.resample_filter)
+ if self.is_last or self.architecture == 'skip':
+ y = self.torgb(x, next(w_iter), fused_modconv=fused_modconv)
+ y = y.to(dtype=torch.float32,
+ memory_format=torch.contiguous_format)
+ img = img.add_(y) if img is not None else y
+
+ # assert x.dtype == dtype
+ assert img is None or img.dtype == torch.float32
+ return x, img
+
+ def extra_repr(self):
+ return f'resolution={self.resolution:d}, architecture={self.architecture:s}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class SynthesisNetwork(torch.nn.Module):
+ def __init__(
+ self,
+ w_dim, # Intermediate latent (W) dimensionality.
+ img_resolution, # Output image resolution.
+ img_channels, # Number of color channels.
+ channel_base=32768, # Overall multiplier for the number of channels.
+ channel_max=512, # Maximum number of channels in any layer.
+ num_fp16_res=4, # Use FP16 for the N highest resolutions.
+ **block_kwargs, # Arguments for SynthesisBlock.
+ ):
+ assert img_resolution >= 4 and img_resolution & (img_resolution -
+ 1) == 0
+ super().__init__()
+ self.w_dim = w_dim
+ self.img_resolution = img_resolution
+ self.img_resolution_log2 = int(np.log2(img_resolution))
+ self.img_channels = img_channels
+ self.num_fp16_res = num_fp16_res
+ self.block_resolutions = [
+ 2**i for i in range(2, self.img_resolution_log2 + 1)
+ ]
+ channels_dict = {
+ res: min(channel_base // res, channel_max)
+ for res in self.block_resolutions
+ }
+ fp16_resolution = max(2**(self.img_resolution_log2 + 1 - num_fp16_res),
+ 8)
+
+ self.num_ws = 0
+ for res in self.block_resolutions:
+ in_channels = channels_dict[res // 2] if res > 4 else 0
+ out_channels = channels_dict[res]
+ use_fp16 = (res >= fp16_resolution)
+ is_last = (res == self.img_resolution)
+ block = SynthesisBlock(in_channels,
+ out_channels,
+ w_dim=w_dim,
+ resolution=res,
+ img_channels=img_channels,
+ is_last=is_last,
+ use_fp16=use_fp16,
+ **block_kwargs)
+ self.num_ws += block.num_conv
+ if is_last:
+ self.num_ws += block.num_torgb
+ setattr(self, f'b{res}', block)
+
+ def forward(self, ws, **block_kwargs):
+ block_ws = []
+ with torch.autograd.profiler.record_function('split_ws'):
+ misc.assert_shape(ws, [None, self.num_ws, self.w_dim])
+ ws = ws.to(torch.float32)
+ w_idx = 0
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ block_ws.append(
+ ws.narrow(1, w_idx, block.num_conv +
+ block.num_torgb)) # dim start length
+ w_idx += block.num_conv
+ # print(f'synthesisNetwork : b{res}, device={block.conv1.weight.device}')
+
+ x = img = None
+ for res, cur_ws in zip(self.block_resolutions, block_ws):
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, cur_ws, **block_kwargs)
+ return img
+
+ def extra_repr(self):
+ return ' '.join([
+ f'w_dim={self.w_dim:d}, num_ws={self.num_ws:d},',
+ f'img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d},',
+ f'num_fp16_res={self.num_fp16_res:d}'
+ ])
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class Generator(torch.nn.Module):
+ def __init__(
+ self,
+ z_dim, # Input latent (Z) dimensionality.
+ c_dim, # Conditioning label (C) dimensionality.
+ w_dim, # Intermediate latent (W) dimensionality.
+ img_resolution, # Output resolution.
+ img_channels, # Number of output color channels.
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ **synthesis_kwargs, # Arguments for SynthesisNetwork.
+ ):
+ super().__init__()
+ self.z_dim = z_dim
+ self.c_dim = c_dim
+ self.w_dim = w_dim
+ self.img_resolution = img_resolution
+ self.img_channels = img_channels
+ self.synthesis = SynthesisNetwork(w_dim=w_dim,
+ img_resolution=img_resolution,
+ img_channels=img_channels,
+ **synthesis_kwargs)
+ self.num_ws = self.synthesis.num_ws
+ self.mapping = MappingNetwork(z_dim=z_dim,
+ c_dim=c_dim,
+ w_dim=w_dim,
+ num_ws=self.num_ws,
+ **mapping_kwargs)
+
+ def forward(self,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False,
+ **synthesis_kwargs):
+ ws = self.mapping(z,
+ c,
+ truncation_psi=truncation_psi,
+ truncation_cutoff=truncation_cutoff,
+ update_emas=update_emas)
+ img = self.synthesis(ws, update_emas=update_emas, **synthesis_kwargs)
+ return img
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class DiscriminatorBlock(torch.nn.Module):
+ def __init__(
+ self,
+ in_channels, # Number of input channels, 0 = first block.
+ tmp_channels, # Number of intermediate channels.
+ out_channels, # Number of output channels.
+ resolution, # Resolution of this block.
+ img_channels, # Number of input color channels.
+ first_layer_idx, # Index of the first layer.
+ architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ activation='lrelu', # Activation function: 'relu', 'lrelu', etc.
+ resample_filter=[
+ 1, 3, 3, 1
+ ], # Low-pass filter to apply when resampling activations.
+ conv_clamp=None, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ use_fp16=False, # Use FP16 for this block?
+ fp16_channels_last=False, # Use channels-last memory format with FP16?
+ freeze_layers=0, # Freeze-D: Number of layers to freeze.
+ ):
+ assert in_channels in [0, tmp_channels]
+ assert architecture in ['orig', 'skip', 'resnet']
+ super().__init__()
+ self.in_channels = in_channels
+ self.resolution = resolution
+ self.img_channels = img_channels
+ self.first_layer_idx = first_layer_idx
+ self.architecture = architecture
+ self.use_fp16 = use_fp16
+ self.channels_last = (use_fp16 and fp16_channels_last)
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter(resample_filter))
+
+ self.num_layers = 0
+
+ def trainable_gen():
+ while True:
+ layer_idx = self.first_layer_idx + self.num_layers
+ trainable = (layer_idx >= freeze_layers)
+ self.num_layers += 1
+ yield trainable
+
+ trainable_iter = trainable_gen()
+
+ if in_channels == 0 or architecture == 'skip':
+ self.fromrgb = Conv2dLayer(img_channels,
+ tmp_channels,
+ kernel_size=1,
+ activation=activation,
+ trainable=next(trainable_iter),
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last)
+
+ self.conv0 = Conv2dLayer(tmp_channels,
+ tmp_channels,
+ kernel_size=3,
+ activation=activation,
+ trainable=next(trainable_iter),
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last)
+
+ self.conv1 = Conv2dLayer(tmp_channels,
+ out_channels,
+ kernel_size=3,
+ activation=activation,
+ down=2,
+ trainable=next(trainable_iter),
+ resample_filter=resample_filter,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last)
+
+ if architecture == 'resnet':
+ self.skip = Conv2dLayer(tmp_channels,
+ out_channels,
+ kernel_size=1,
+ bias=False,
+ down=2,
+ trainable=next(trainable_iter),
+ resample_filter=resample_filter,
+ channels_last=self.channels_last)
+
+ def forward(self, x, img, force_fp32=False):
+ if (x if x is not None else img).device.type != 'cuda':
+ force_fp32 = True
+ dtype = torch.float16 if self.use_fp16 and not force_fp32 else torch.float32
+ # dtype = img.dtype
+ # dtype = x.dtype
+ memory_format = torch.channels_last if self.channels_last and not force_fp32 else torch.contiguous_format
+
+ # Input.
+ if x is not None:
+ misc.assert_shape(
+ x, [None, self.in_channels, self.resolution, self.resolution])
+ x = x.to(dtype=dtype, memory_format=memory_format)
+
+ # FromRGB.
+ if self.in_channels == 0 or self.architecture == 'skip':
+ misc.assert_shape(
+ img,
+ [None, self.img_channels, self.resolution, self.resolution])
+ img = img.to(dtype=dtype, memory_format=memory_format)
+ y = self.fromrgb(img)
+ x = x + y if x is not None else y
+ img = upfirdn2d.downsample2d(
+ img,
+ self.resample_filter) if self.architecture == 'skip' else None
+
+ # Main layers.
+ if self.architecture == 'resnet':
+ y = self.skip(x, gain=np.sqrt(0.5))
+ x = self.conv0(x)
+ x = self.conv1(x, gain=np.sqrt(0.5))
+ x = y.add_(x)
+ else:
+ x = self.conv0(x)
+ x = self.conv1(x)
+
+ assert x.dtype == dtype
+ return x, img
+
+ def extra_repr(self):
+ return f'resolution={self.resolution:d}, architecture={self.architecture:s}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class MinibatchStdLayer(torch.nn.Module):
+ def __init__(self, group_size, num_channels=1):
+ super().__init__()
+ self.group_size = group_size
+ self.num_channels = num_channels
+
+ def forward(self, x):
+ N, C, H, W = x.shape
+ with misc.suppress_tracer_warnings(
+ ): # as_tensor results are registered as constants
+ G = torch.min(
+ torch.as_tensor(self.group_size),
+ torch.as_tensor(N)) if self.group_size is not None else N
+ F = self.num_channels
+ c = C // F
+
+ y = x.reshape(
+ G, -1, F, c, H, W
+ ) # [GnFcHW] Split minibatch N into n groups of size G, and channels C into F groups of size c.
+ y = y - y.mean(dim=0) # [GnFcHW] Subtract mean over group.
+ y = y.square().mean(dim=0) # [nFcHW] Calc variance over group.
+ y = (y + 1e-8).sqrt() # [nFcHW] Calc stddev over group.
+ y = y.mean(dim=[2, 3,
+ 4]) # [nF] Take average over channels and pixels.
+ y = y.reshape(-1, F, 1, 1) # [nF11] Add missing dimensions.
+ y = y.repeat(G, 1, H, W) # [NFHW] Replicate over group and pixels.
+ x = torch.cat([x, y],
+ dim=1) # [NCHW] Append to input as new channels.
+ return x
+
+ def extra_repr(self):
+ return f'group_size={self.group_size}, num_channels={self.num_channels:d}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class DiscriminatorEpilogue(torch.nn.Module):
+ def __init__(
+ self,
+ in_channels, # Number of input channels.
+ cmap_dim, # Dimensionality of mapped conditioning label, 0 = no label.
+ resolution, # Resolution of this block.
+ img_channels, # Number of input color channels.
+ architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ mbstd_group_size=4, # Group size for the minibatch standard deviation layer, None = entire minibatch.
+ mbstd_num_channels=1, # Number of features for the minibatch standard deviation layer, 0 = disable.
+ activation='lrelu', # Activation function: 'relu', 'lrelu', etc.
+ conv_clamp=None, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ ):
+ assert architecture in ['orig', 'skip', 'resnet']
+ super().__init__()
+ self.in_channels = in_channels
+ self.cmap_dim = cmap_dim
+ self.resolution = resolution
+ self.img_channels = img_channels
+ self.architecture = architecture
+
+ if architecture == 'skip':
+ self.fromrgb = Conv2dLayer(img_channels,
+ in_channels,
+ kernel_size=1,
+ activation=activation)
+ self.mbstd = MinibatchStdLayer(group_size=mbstd_group_size,
+ num_channels=mbstd_num_channels
+ ) if mbstd_num_channels > 0 else None
+ self.conv = Conv2dLayer(in_channels + mbstd_num_channels,
+ in_channels,
+ kernel_size=3,
+ activation=activation,
+ conv_clamp=conv_clamp)
+ self.fc = FullyConnectedLayer(in_channels * (resolution**2),
+ in_channels,
+ activation=activation)
+ self.out = FullyConnectedLayer(in_channels,
+ 1 if cmap_dim == 0 else cmap_dim)
+
+ def forward(self, x, img, cmap, force_fp32=False):
+ misc.assert_shape(
+ x, [None, self.in_channels, self.resolution, self.resolution
+ ]) # [NCHW]
+ _ = force_fp32 # unused
+ # dtype = torch.float32
+ dtype = x.dtype
+ memory_format = torch.contiguous_format
+
+ # FromRGB.
+ x = x.to(dtype=dtype, memory_format=memory_format)
+ if self.architecture == 'skip':
+ misc.assert_shape(
+ img,
+ [None, self.img_channels, self.resolution, self.resolution])
+ img = img.to(dtype=dtype, memory_format=memory_format)
+ x = x + self.fromrgb(img)
+
+ # Main layers.
+ if self.mbstd is not None:
+ x = self.mbstd(x)
+ x = self.conv(x)
+ x = self.fc(x.flatten(1))
+ x = self.out(x)
+
+ # Conditioning.
+ if self.cmap_dim > 0:
+ misc.assert_shape(cmap, [None, self.cmap_dim])
+ x = (x * cmap).sum(dim=1,
+ keepdim=True) * (1 / np.sqrt(self.cmap_dim))
+
+ assert x.dtype == dtype
+ return x
+
+ def extra_repr(self):
+ return f'resolution={self.resolution:d}, architecture={self.architecture:s}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class Discriminator(torch.nn.Module):
+ def __init__(
+ self,
+ c_dim, # Conditioning label (C) dimensionality.
+ img_resolution, # Input resolution.
+ img_channels, # Number of input color channels.
+ architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'.
+ channel_base=32768, # Overall multiplier for the number of channels.
+ channel_max=512, # Maximum number of channels in any layer.
+ num_fp16_res=4, # Use FP16 for the N highest resolutions.
+ conv_clamp=256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ cmap_dim=None, # Dimensionality of mapped conditioning label, None = default.
+ block_kwargs={}, # Arguments for DiscriminatorBlock.
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ epilogue_kwargs={}, # Arguments for DiscriminatorEpilogue.
+ ):
+ super().__init__()
+ self.c_dim = c_dim
+ self.img_resolution = img_resolution
+ self.img_resolution_log2 = int(np.log2(img_resolution))
+ self.img_channels = img_channels
+ self.block_resolutions = [
+ 2**i for i in range(self.img_resolution_log2, 2, -1)
+ ]
+ channels_dict = {
+ res: min(channel_base // res, channel_max)
+ for res in self.block_resolutions + [4]
+ }
+ fp16_resolution = max(2**(self.img_resolution_log2 + 1 - num_fp16_res),
+ 8)
+
+ if cmap_dim is None:
+ cmap_dim = channels_dict[4]
+ if c_dim == 0:
+ cmap_dim = 0
+
+ common_kwargs = dict(img_channels=img_channels,
+ architecture=architecture,
+ conv_clamp=conv_clamp)
+ cur_layer_idx = 0
+ for res in self.block_resolutions:
+ in_channels = channels_dict[res] if res < img_resolution else 0
+ tmp_channels = channels_dict[res]
+ out_channels = channels_dict[res // 2]
+ use_fp16 = (res >= fp16_resolution)
+ block = DiscriminatorBlock(in_channels,
+ tmp_channels,
+ out_channels,
+ resolution=res,
+ first_layer_idx=cur_layer_idx,
+ use_fp16=use_fp16,
+ **block_kwargs,
+ **common_kwargs)
+ setattr(self, f'b{res}', block)
+ cur_layer_idx += block.num_layers
+ if c_dim > 0:
+ self.mapping = MappingNetwork(z_dim=0,
+ c_dim=c_dim,
+ w_dim=cmap_dim,
+ num_ws=None,
+ w_avg_beta=None,
+ **mapping_kwargs)
+ self.b4 = DiscriminatorEpilogue(channels_dict[4],
+ cmap_dim=cmap_dim,
+ resolution=4,
+ **epilogue_kwargs,
+ **common_kwargs)
+
+ def forward(self, img, c, update_emas=False, **block_kwargs):
+ _ = update_emas # unused
+ x = None
+ for res in self.block_resolutions:
+ block = getattr(self, f'b{res}')
+ x, img = block(x, img, **block_kwargs)
+
+ cmap = None
+ if self.c_dim > 0:
+ cmap = self.mapping(None, c)
+ x = self.b4(x, img, cmap)
+ return x
+
+ def extra_repr(self):
+ return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}'
+
+
+#----------------------------------------------------------------------------
diff --git a/nsr/networks_stylegan3.py b/nsr/networks_stylegan3.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c346b8a15b3ee00e58a2ac998dda3d5b4453020
--- /dev/null
+++ b/nsr/networks_stylegan3.py
@@ -0,0 +1,679 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Generator architecture from the paper
+"Alias-Free Generative Adversarial Networks"."""
+
+import numpy as np
+import scipy.signal
+import scipy.optimize
+import torch
+from torch_utils import misc
+from torch_utils import persistence
+from torch_utils.ops import conv2d_gradfix
+from torch_utils.ops import filtered_lrelu
+from torch_utils.ops import bias_act
+
+#----------------------------------------------------------------------------
+# from pdb import set_trace as st
+
+
+@misc.profiled_function
+def modulated_conv2d(
+ x, # Input tensor: [batch_size, in_channels, in_height, in_width]
+ w, # Weight tensor: [out_channels, in_channels, kernel_height, kernel_width]
+ s, # Style tensor: [batch_size, in_channels]
+ demodulate=True, # Apply weight demodulation?
+ padding=0, # Padding: int or [padH, padW]
+ input_gain=None, # Optional scale factors for the input channels: [], [in_channels], or [batch_size, in_channels]
+):
+ with misc.suppress_tracer_warnings(
+ ): # this value will be treated as a constant
+ batch_size = int(x.shape[0])
+ out_channels, in_channels, kh, kw = w.shape
+ misc.assert_shape(w, [out_channels, in_channels, kh, kw]) # [OIkk]
+ misc.assert_shape(x, [batch_size, in_channels, None, None]) # [NIHW]
+ misc.assert_shape(s, [batch_size, in_channels]) # [NI]
+
+ # Pre-normalize inputs.
+ if demodulate:
+ w = w * w.square().mean([1, 2, 3], keepdim=True).rsqrt()
+ s = s * s.square().mean().rsqrt()
+
+ # Modulate weights.
+ w = w.unsqueeze(0) # [NOIkk]
+ w = w * s.unsqueeze(1).unsqueeze(3).unsqueeze(4) # [NOIkk]
+
+ # Demodulate weights.
+ if demodulate:
+ dcoefs = (w.square().sum(dim=[2, 3, 4]) + 1e-8).rsqrt() # [NO]
+ w = w * dcoefs.unsqueeze(2).unsqueeze(3).unsqueeze(4) # [NOIkk]
+
+ # Apply input scaling.
+ if input_gain is not None:
+ input_gain = input_gain.expand(batch_size, in_channels) # [NI]
+ w = w * input_gain.unsqueeze(1).unsqueeze(3).unsqueeze(4) # [NOIkk]
+
+ # Execute as one fused op using grouped convolution.
+ x = x.reshape(1, -1, *x.shape[2:])
+ w = w.reshape(-1, in_channels, kh, kw)
+ x = conv2d_gradfix.conv2d(input=x,
+ weight=w.to(x.dtype),
+ padding=padding,
+ groups=batch_size)
+ x = x.reshape(batch_size, -1, *x.shape[2:])
+ return x
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class FullyConnectedLayer(torch.nn.Module):
+ def __init__(
+ self,
+ in_features, # Number of input features.
+ out_features, # Number of output features.
+ activation='linear', # Activation function: 'relu', 'lrelu', etc.
+ bias=True, # Apply additive bias before the activation function?
+ lr_multiplier=1, # Learning rate multiplier.
+ weight_init=1, # Initial standard deviation of the weight tensor.
+ bias_init=0, # Initial value of the additive bias.
+ ):
+ super().__init__()
+ self.in_features = in_features
+ self.out_features = out_features
+ self.activation = activation
+ self.weight = torch.nn.Parameter(
+ torch.randn([out_features, in_features]) *
+ (weight_init / lr_multiplier))
+ bias_init = np.broadcast_to(np.asarray(bias_init, dtype=np.float32),
+ [out_features])
+ self.bias = torch.nn.Parameter(
+ torch.from_numpy(bias_init / lr_multiplier)) if bias else None
+ self.weight_gain = lr_multiplier / np.sqrt(in_features)
+ self.bias_gain = lr_multiplier
+
+ def forward(self, x):
+ w = self.weight.to(x.dtype) * self.weight_gain
+ b = self.bias
+ if b is not None:
+ b = b.to(x.dtype)
+ if self.bias_gain != 1:
+ b = b * self.bias_gain
+ if self.activation == 'linear' and b is not None:
+ x = torch.addmm(b.unsqueeze(0), x, w.t())
+ else:
+ x = x.matmul(w.t())
+ x = bias_act.bias_act(x, b, act=self.activation)
+ return x
+
+ def extra_repr(self):
+ return f'in_features={self.in_features:d}, out_features={self.out_features:d}, activation={self.activation:s}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class MappingNetwork(torch.nn.Module):
+ def __init__(
+ self,
+ z_dim, # Input latent (Z) dimensionality.
+ c_dim, # Conditioning label (C) dimensionality, 0 = no labels.
+ w_dim, # Intermediate latent (W) dimensionality.
+ num_ws, # Number of intermediate latents to output.
+ num_layers=2, # Number of mapping layers.
+ lr_multiplier=0.01, # Learning rate multiplier for the mapping layers.
+ w_avg_beta=0.998, # Decay for tracking the moving average of W during training.
+ ):
+ super().__init__()
+ self.z_dim = z_dim
+ self.c_dim = c_dim
+ self.w_dim = w_dim
+ self.num_ws = num_ws
+ self.num_layers = num_layers
+ self.w_avg_beta = w_avg_beta
+
+ # Construct layers.
+ self.embed = FullyConnectedLayer(
+ self.c_dim, self.w_dim) if self.c_dim > 0 else None
+ features = [self.z_dim + (self.w_dim if self.c_dim > 0 else 0)
+ ] + [self.w_dim] * self.num_layers
+ for idx, in_features, out_features in zip(range(num_layers),
+ features[:-1], features[1:]):
+ layer = FullyConnectedLayer(in_features,
+ out_features,
+ activation='lrelu',
+ lr_multiplier=lr_multiplier)
+ setattr(self, f'fc{idx}', layer)
+ self.register_buffer('w_avg', torch.zeros([w_dim]))
+
+ def forward(self,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False):
+ misc.assert_shape(z, [None, self.z_dim])
+ if truncation_cutoff is None:
+ truncation_cutoff = self.num_ws
+
+ # Embed, normalize, and concatenate inputs.
+ x = z.to(torch.float32)
+ x = x * (x.square().mean(1, keepdim=True) + 1e-8).rsqrt()
+ if self.c_dim > 0:
+ misc.assert_shape(c, [None, self.c_dim])
+ y = self.embed(c.to(torch.float32))
+ y = y * (y.square().mean(1, keepdim=True) + 1e-8).rsqrt()
+ x = torch.cat([x, y], dim=1) if x is not None else y
+
+ # Execute layers.
+ for idx in range(self.num_layers):
+ x = getattr(self, f'fc{idx}')(x)
+
+ # Update moving average of W.
+ if update_emas:
+ self.w_avg.copy_(x.detach().mean(dim=0).lerp(
+ self.w_avg, self.w_avg_beta))
+
+ # Broadcast and apply truncation.
+ x = x.unsqueeze(1).repeat([1, self.num_ws, 1])
+ if truncation_psi != 1:
+ x[:, :truncation_cutoff] = self.w_avg.lerp(
+ x[:, :truncation_cutoff], truncation_psi)
+ return x
+
+ def extra_repr(self):
+ return f'z_dim={self.z_dim:d}, c_dim={self.c_dim:d}, w_dim={self.w_dim:d}, num_ws={self.num_ws:d}'
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class SynthesisInput(torch.nn.Module):
+ def __init__(
+ self,
+ w_dim, # Intermediate latent (W) dimensionality.
+ channels, # Number of output channels.
+ size, # Output spatial size: int or [width, height].
+ sampling_rate, # Output sampling rate.
+ bandwidth, # Output bandwidth.
+ ):
+ super().__init__()
+ self.w_dim = w_dim
+ self.channels = channels
+ self.size = np.broadcast_to(np.asarray(size), [2])
+ self.sampling_rate = sampling_rate
+ self.bandwidth = bandwidth
+
+ # Draw random frequencies from uniform 2D disc.
+ freqs = torch.randn([self.channels, 2])
+ radii = freqs.square().sum(dim=1, keepdim=True).sqrt()
+ freqs /= radii * radii.square().exp().pow(0.25)
+ freqs *= bandwidth
+ phases = torch.rand([self.channels]) - 0.5
+
+ # Setup parameters and buffers.
+ self.weight = torch.nn.Parameter(
+ torch.randn([self.channels, self.channels]))
+ self.affine = FullyConnectedLayer(w_dim,
+ 4,
+ weight_init=0,
+ bias_init=[1, 0, 0, 0])
+ self.register_buffer('transform', torch.eye(
+ 3, 3)) # User-specified inverse transform wrt. resulting image.
+ self.register_buffer('freqs', freqs)
+ self.register_buffer('phases', phases)
+
+ def forward(self, w):
+ # Introduce batch dimension.
+ transforms = self.transform.unsqueeze(0) # [batch, row, col]
+ freqs = self.freqs.unsqueeze(0) # [batch, channel, xy]
+ phases = self.phases.unsqueeze(0) # [batch, channel]
+
+ # Apply learned transformation.
+ t = self.affine(w) # t = (r_c, r_s, t_x, t_y)
+ t = t / t[:, :2].norm(dim=1,
+ keepdim=True) # t' = (r'_c, r'_s, t'_x, t'_y)
+ m_r = torch.eye(3, device=w.device).unsqueeze(0).repeat(
+ [w.shape[0], 1, 1]) # Inverse rotation wrt. resulting image.
+ m_r[:, 0, 0] = t[:, 0] # r'_c
+ m_r[:, 0, 1] = -t[:, 1] # r'_s
+ m_r[:, 1, 0] = t[:, 1] # r'_s
+ m_r[:, 1, 1] = t[:, 0] # r'_c
+ m_t = torch.eye(3, device=w.device).unsqueeze(0).repeat(
+ [w.shape[0], 1, 1]) # Inverse translation wrt. resulting image.
+ m_t[:, 0, 2] = -t[:, 2] # t'_x
+ m_t[:, 1, 2] = -t[:, 3] # t'_y
+ transforms = m_r @ m_t @ transforms # First rotate resulting image, then translate, and finally apply user-specified transform.
+
+ # Transform frequencies.
+ phases = phases + (freqs @ transforms[:, :2, 2:]).squeeze(2)
+ freqs = freqs @ transforms[:, :2, :2]
+
+ # Dampen out-of-band frequencies that may occur due to the user-specified transform.
+ amplitudes = (1 - (freqs.norm(dim=2) - self.bandwidth) /
+ (self.sampling_rate / 2 - self.bandwidth)).clamp(0, 1)
+
+ # Construct sampling grid.
+ theta = torch.eye(2, 3, device=w.device)
+ theta[0, 0] = 0.5 * self.size[0] / self.sampling_rate
+ theta[1, 1] = 0.5 * self.size[1] / self.sampling_rate
+ grids = torch.nn.functional.affine_grid(
+ theta.unsqueeze(0), [1, 1, self.size[1], self.size[0]],
+ align_corners=False)
+
+ # Compute Fourier features.
+ x = (grids.unsqueeze(3) @ freqs.permute(
+ 0, 2, 1).unsqueeze(1).unsqueeze(2)).squeeze(
+ 3) # [batch, height, width, channel]
+ x = x + phases.unsqueeze(1).unsqueeze(2)
+ x = torch.sin(x * (np.pi * 2))
+ x = x * amplitudes.unsqueeze(1).unsqueeze(2)
+
+ # Apply trainable mapping.
+ weight = self.weight / np.sqrt(self.channels)
+ x = x @ weight.t()
+
+ # Ensure correct shape.
+ x = x.permute(0, 3, 1, 2) # [batch, channel, height, width]
+ misc.assert_shape(
+ x,
+ [w.shape[0], self.channels,
+ int(self.size[1]),
+ int(self.size[0])])
+ return x
+
+ def extra_repr(self):
+ return '\n'.join([
+ f'w_dim={self.w_dim:d}, channels={self.channels:d}, size={list(self.size)},',
+ f'sampling_rate={self.sampling_rate:g}, bandwidth={self.bandwidth:g}'
+ ])
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class SynthesisLayer(torch.nn.Module):
+ def __init__(
+ self,
+ w_dim, # Intermediate latent (W) dimensionality.
+ is_torgb, # Is this the final ToRGB layer?
+ is_critically_sampled, # Does this layer use critical sampling?
+ use_fp16, # Does this layer use FP16?
+
+ # Input & output specifications.
+ in_channels, # Number of input channels.
+ out_channels, # Number of output channels.
+ in_size, # Input spatial size: int or [width, height].
+ out_size, # Output spatial size: int or [width, height].
+ in_sampling_rate, # Input sampling rate (s).
+ out_sampling_rate, # Output sampling rate (s).
+ in_cutoff, # Input cutoff frequency (f_c).
+ out_cutoff, # Output cutoff frequency (f_c).
+ in_half_width, # Input transition band half-width (f_h).
+ out_half_width, # Output Transition band half-width (f_h).
+
+ # Hyperparameters.
+ conv_kernel=3, # Convolution kernel size. Ignored for final the ToRGB layer.
+ filter_size=6, # Low-pass filter size relative to the lower resolution when up/downsampling.
+ lrelu_upsampling=2, # Relative sampling rate for leaky ReLU. Ignored for final the ToRGB layer.
+ use_radial_filters=False, # Use radially symmetric downsampling filter? Ignored for critically sampled layers.
+ conv_clamp=256, # Clamp the output to [-X, +X], None = disable clamping.
+ magnitude_ema_beta=0.999, # Decay rate for the moving average of input magnitudes.
+ ):
+ super().__init__()
+ self.w_dim = w_dim
+ self.is_torgb = is_torgb
+ self.is_critically_sampled = is_critically_sampled
+ self.use_fp16 = use_fp16
+ self.in_channels = in_channels
+ self.out_channels = out_channels
+ self.in_size = np.broadcast_to(np.asarray(in_size), [2])
+ self.out_size = np.broadcast_to(np.asarray(out_size), [2])
+ self.in_sampling_rate = in_sampling_rate
+ self.out_sampling_rate = out_sampling_rate
+ self.tmp_sampling_rate = max(in_sampling_rate, out_sampling_rate) * (
+ 1 if is_torgb else lrelu_upsampling)
+ self.in_cutoff = in_cutoff
+ self.out_cutoff = out_cutoff
+ self.in_half_width = in_half_width
+ self.out_half_width = out_half_width
+ self.conv_kernel = 1 if is_torgb else conv_kernel
+ self.conv_clamp = conv_clamp
+ self.magnitude_ema_beta = magnitude_ema_beta
+
+ # Setup parameters and buffers.
+ self.affine = FullyConnectedLayer(self.w_dim,
+ self.in_channels,
+ bias_init=1)
+ self.weight = torch.nn.Parameter(
+ torch.randn([
+ self.out_channels, self.in_channels, self.conv_kernel,
+ self.conv_kernel
+ ]))
+ self.bias = torch.nn.Parameter(torch.zeros([self.out_channels]))
+ self.register_buffer('magnitude_ema', torch.ones([]))
+
+ # Design upsampling filter.
+ self.up_factor = int(
+ np.rint(self.tmp_sampling_rate / self.in_sampling_rate))
+ assert self.in_sampling_rate * self.up_factor == self.tmp_sampling_rate
+ self.up_taps = filter_size * self.up_factor if self.up_factor > 1 and not self.is_torgb else 1
+ self.register_buffer(
+ 'up_filter',
+ self.design_lowpass_filter(numtaps=self.up_taps,
+ cutoff=self.in_cutoff,
+ width=self.in_half_width * 2,
+ fs=self.tmp_sampling_rate))
+
+ # Design downsampling filter.
+ self.down_factor = int(
+ np.rint(self.tmp_sampling_rate / self.out_sampling_rate))
+ assert self.out_sampling_rate * self.down_factor == self.tmp_sampling_rate
+ self.down_taps = filter_size * self.down_factor if self.down_factor > 1 and not self.is_torgb else 1
+ self.down_radial = use_radial_filters and not self.is_critically_sampled
+ self.register_buffer(
+ 'down_filter',
+ self.design_lowpass_filter(numtaps=self.down_taps,
+ cutoff=self.out_cutoff,
+ width=self.out_half_width * 2,
+ fs=self.tmp_sampling_rate,
+ radial=self.down_radial))
+
+ # Compute padding.
+ pad_total = (
+ self.out_size - 1
+ ) * self.down_factor + 1 # Desired output size before downsampling.
+ pad_total -= (self.in_size + self.conv_kernel -
+ 1) * self.up_factor # Input size after upsampling.
+ pad_total += self.up_taps + self.down_taps - 2 # Size reduction caused by the filters.
+ pad_lo = (
+ pad_total + self.up_factor
+ ) // 2 # Shift sample locations according to the symmetric interpretation (Appendix C.3).
+ pad_hi = pad_total - pad_lo
+ self.padding = [
+ int(pad_lo[0]),
+ int(pad_hi[0]),
+ int(pad_lo[1]),
+ int(pad_hi[1])
+ ]
+
+ def forward(self,
+ x,
+ w,
+ noise_mode='random',
+ force_fp32=False,
+ update_emas=False):
+ assert noise_mode in ['random', 'const', 'none'] # unused
+ misc.assert_shape(x, [
+ None, self.in_channels,
+ int(self.in_size[1]),
+ int(self.in_size[0])
+ ])
+ misc.assert_shape(w, [x.shape[0], self.w_dim])
+
+ # Track input magnitude.
+ if update_emas:
+ with torch.autograd.profiler.record_function(
+ 'update_magnitude_ema'):
+ magnitude_cur = x.detach().to(torch.float32).square().mean()
+ self.magnitude_ema.copy_(
+ magnitude_cur.lerp(self.magnitude_ema,
+ self.magnitude_ema_beta))
+ input_gain = self.magnitude_ema.rsqrt()
+
+ # Execute affine layer.
+ styles = self.affine(w)
+ if self.is_torgb:
+ weight_gain = 1 / np.sqrt(self.in_channels * (self.conv_kernel**2))
+ styles = styles * weight_gain
+
+ # Execute modulated conv2d.
+ dtype = torch.float16 if (self.use_fp16 and not force_fp32 and
+ x.device.type == 'cuda') else torch.float32
+ x = modulated_conv2d(x=x.to(dtype),
+ w=self.weight,
+ s=styles,
+ padding=self.conv_kernel - 1,
+ demodulate=(not self.is_torgb),
+ input_gain=input_gain)
+
+ # Execute bias, filtered leaky ReLU, and clamping.
+ gain = 1 if self.is_torgb else np.sqrt(2)
+ slope = 1 if self.is_torgb else 0.2
+ x = filtered_lrelu.filtered_lrelu(x=x,
+ fu=self.up_filter,
+ fd=self.down_filter,
+ b=self.bias.to(x.dtype),
+ up=self.up_factor,
+ down=self.down_factor,
+ padding=self.padding,
+ gain=gain,
+ slope=slope,
+ clamp=self.conv_clamp)
+
+ # Ensure correct shape and dtype.
+ misc.assert_shape(x, [
+ None, self.out_channels,
+ int(self.out_size[1]),
+ int(self.out_size[0])
+ ])
+ assert x.dtype == dtype
+ return x
+
+ @staticmethod
+ def design_lowpass_filter(numtaps, cutoff, width, fs, radial=False):
+ assert numtaps >= 1
+
+ # Identity filter.
+ if numtaps == 1:
+ return None
+
+ # Separable Kaiser low-pass filter.
+ if not radial:
+ f = scipy.signal.firwin(numtaps=numtaps,
+ cutoff=cutoff,
+ width=width,
+ fs=fs)
+ return torch.as_tensor(f, dtype=torch.float32)
+
+ # Radially symmetric jinc-based filter.
+ x = (np.arange(numtaps) - (numtaps - 1) / 2) / fs
+ r = np.hypot(*np.meshgrid(x, x))
+ f = scipy.special.j1(2 * cutoff * (np.pi * r)) / (np.pi * r)
+ beta = scipy.signal.kaiser_beta(
+ scipy.signal.kaiser_atten(numtaps, width / (fs / 2)))
+ w = np.kaiser(numtaps, beta)
+ f *= np.outer(w, w)
+ f /= np.sum(f)
+ return torch.as_tensor(f, dtype=torch.float32)
+
+ def extra_repr(self):
+ return '\n'.join([
+ f'w_dim={self.w_dim:d}, is_torgb={self.is_torgb},',
+ f'is_critically_sampled={self.is_critically_sampled}, use_fp16={self.use_fp16},',
+ f'in_sampling_rate={self.in_sampling_rate:g}, out_sampling_rate={self.out_sampling_rate:g},',
+ f'in_cutoff={self.in_cutoff:g}, out_cutoff={self.out_cutoff:g},',
+ f'in_half_width={self.in_half_width:g}, out_half_width={self.out_half_width:g},',
+ f'in_size={list(self.in_size)}, out_size={list(self.out_size)},',
+ f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}'
+ ])
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class SynthesisNetwork(torch.nn.Module):
+ def __init__(
+ self,
+ w_dim, # Intermediate latent (W) dimensionality.
+ img_resolution, # Output image resolution.
+ img_channels, # Number of color channels.
+ channel_base=32768, # Overall multiplier for the number of channels.
+ channel_max=512, # Maximum number of channels in any layer.
+ num_layers=14, # Total number of layers, excluding Fourier features and ToRGB.
+ num_critical=2, # Number of critically sampled layers at the end.
+ first_cutoff=2, # Cutoff frequency of the first layer (f_{c,0}).
+ first_stopband=2**
+ 2.1, # Minimum stopband of the first layer (f_{t,0}).
+ last_stopband_rel=2**
+ 0.3, # Minimum stopband of the last layer, expressed relative to the cutoff.
+ margin_size=10, # Number of additional pixels outside the image.
+ output_scale=0.25, # Scale factor for the output image.
+ num_fp16_res=4, # Use FP16 for the N highest resolutions.
+ **layer_kwargs, # Arguments for SynthesisLayer.
+ ):
+ super().__init__()
+ self.w_dim = w_dim
+ self.num_ws = num_layers + 2
+ self.img_resolution = img_resolution
+ self.img_channels = img_channels
+ self.num_layers = num_layers
+ self.num_critical = num_critical
+ self.margin_size = margin_size
+ self.output_scale = output_scale
+ self.num_fp16_res = num_fp16_res
+
+ # Geometric progression of layer cutoffs and min. stopbands.
+ last_cutoff = self.img_resolution / 2 # f_{c,N}
+ last_stopband = last_cutoff * last_stopband_rel # f_{t,N}
+ exponents = np.minimum(
+ np.arange(self.num_layers + 1) /
+ (self.num_layers - self.num_critical), 1)
+ cutoffs = first_cutoff * (last_cutoff /
+ first_cutoff)**exponents # f_c[i]
+ stopbands = first_stopband * (last_stopband /
+ first_stopband)**exponents # f_t[i]
+
+ # Compute remaining layer parameters.
+ sampling_rates = np.exp2(
+ np.ceil(np.log2(np.minimum(stopbands * 2,
+ self.img_resolution)))) # s[i]
+ half_widths = np.maximum(stopbands,
+ sampling_rates / 2) - cutoffs # f_h[i]
+ sizes = sampling_rates + self.margin_size * 2
+ sizes[-2:] = self.img_resolution
+ channels = np.rint(
+ np.minimum((channel_base / 2) / cutoffs, channel_max))
+ channels[-1] = self.img_channels
+
+ # Construct layers.
+ self.input = SynthesisInput(w_dim=self.w_dim,
+ channels=int(channels[0]),
+ size=int(sizes[0]),
+ sampling_rate=sampling_rates[0],
+ bandwidth=cutoffs[0])
+ self.layer_names = []
+ for idx in range(self.num_layers + 1):
+ prev = max(idx - 1, 0)
+ is_torgb = (idx == self.num_layers)
+ is_critically_sampled = (idx >=
+ self.num_layers - self.num_critical)
+ use_fp16 = (sampling_rates[idx] *
+ (2**self.num_fp16_res) > self.img_resolution)
+ layer = SynthesisLayer(w_dim=self.w_dim,
+ is_torgb=is_torgb,
+ is_critically_sampled=is_critically_sampled,
+ use_fp16=use_fp16,
+ in_channels=int(channels[prev]),
+ out_channels=int(channels[idx]),
+ in_size=int(sizes[prev]),
+ out_size=int(sizes[idx]),
+ in_sampling_rate=int(sampling_rates[prev]),
+ out_sampling_rate=int(sampling_rates[idx]),
+ in_cutoff=cutoffs[prev],
+ out_cutoff=cutoffs[idx],
+ in_half_width=half_widths[prev],
+ out_half_width=half_widths[idx],
+ **layer_kwargs)
+ name = f'L{idx}_{layer.out_size[0]}_{layer.out_channels}'
+ setattr(self, name, layer)
+ self.layer_names.append(name)
+
+ def forward(self, ws, **layer_kwargs):
+ misc.assert_shape(ws, [None, self.num_ws, self.w_dim])
+ ws = ws.to(torch.float32).unbind(dim=1)
+
+ # Execute layers.
+ x = self.input(ws[0])
+ for name, w in zip(self.layer_names, ws[1:]):
+ x = getattr(self, name)(x, w, **layer_kwargs)
+ if self.output_scale != 1:
+ x = x * self.output_scale
+
+ # Ensure correct shape and dtype.
+ misc.assert_shape(x, [
+ None, self.img_channels, self.img_resolution, self.img_resolution
+ ])
+ x = x.to(torch.float32)
+ return x
+
+ def extra_repr(self):
+ return '\n'.join([
+ f'w_dim={self.w_dim:d}, num_ws={self.num_ws:d},',
+ f'img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d},',
+ f'num_layers={self.num_layers:d}, num_critical={self.num_critical:d},',
+ f'margin_size={self.margin_size:d}, num_fp16_res={self.num_fp16_res:d}'
+ ])
+
+
+#----------------------------------------------------------------------------
+
+
+@persistence.persistent_class
+class Generator(torch.nn.Module):
+ def __init__(
+ self,
+ z_dim, # Input latent (Z) dimensionality.
+ c_dim, # Conditioning label (C) dimensionality.
+ w_dim, # Intermediate latent (W) dimensionality.
+ img_resolution, # Output resolution.
+ img_channels, # Number of output color channels.
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ **synthesis_kwargs, # Arguments for SynthesisNetwork.
+ ):
+ super().__init__()
+ self.z_dim = z_dim
+ self.c_dim = c_dim
+ self.w_dim = w_dim
+ self.img_resolution = img_resolution
+ self.img_channels = img_channels
+ self.synthesis = SynthesisNetwork(w_dim=w_dim,
+ img_resolution=img_resolution,
+ img_channels=img_channels,
+ **synthesis_kwargs)
+ self.num_ws = self.synthesis.num_ws
+ self.mapping = MappingNetwork(z_dim=z_dim,
+ c_dim=c_dim,
+ w_dim=w_dim,
+ num_ws=self.num_ws,
+ **mapping_kwargs)
+
+ def forward(self,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False,
+ **synthesis_kwargs):
+ ws = self.mapping(z,
+ c,
+ truncation_psi=truncation_psi,
+ truncation_cutoff=truncation_cutoff,
+ update_emas=update_emas)
+ img = self.synthesis(ws, update_emas=update_emas, **synthesis_kwargs)
+ return img
+
+
+#----------------------------------------------------------------------------
diff --git a/nsr/options.py b/nsr/options.py
new file mode 100644
index 0000000000000000000000000000000000000000..f8c75bd4cc1f883cff55cfdae88a39d8c3ca7e4f
--- /dev/null
+++ b/nsr/options.py
@@ -0,0 +1,120 @@
+import tyro
+from dataclasses import dataclass
+from typing import Tuple, Literal, Dict, Optional
+
+
+@dataclass
+class Options:
+ ### model
+ # Unet image input size
+ input_size: int = 256
+ # Unet definition
+ down_channels: Tuple[int, ...] = (64, 128, 256, 512, 1024, 1024)
+ down_attention: Tuple[bool, ...] = (False, False, False, True, True, True)
+ mid_attention: bool = True
+ up_channels: Tuple[int, ...] = (1024, 1024, 512, 256)
+ up_attention: Tuple[bool, ...] = (True, True, True, False)
+ # Unet output size, dependent on the input_size and U-Net structure!
+ splat_size: int = 64
+ # gaussian render size
+ output_size: int = 256
+
+ ### dataset
+ # data mode (only support s3 now)
+ data_mode: Literal['s3'] = 's3'
+ # fovy of the dataset
+ fovy: float = 49.1
+ # camera near plane
+ znear: float = 0.5
+ # camera far plane
+ zfar: float = 2.5
+ # number of all views (input + output)
+ num_views: int = 12
+ # number of views
+ num_input_views: int = 4
+ # camera radius
+ cam_radius: float = 1.5 # to better use [-1, 1]^3 space
+ # num workers
+ num_workers: int = 8
+
+ ### training
+ # workspace
+ workspace: str = './workspace'
+ # resume
+ resume: Optional[str] = None
+ # batch size (per-GPU)
+ batch_size: int = 8
+ # gradient accumulation
+ gradient_accumulation_steps: int = 1
+ # training epochs
+ num_epochs: int = 30
+ # lpips loss weight
+ lambda_lpips: float = 1.0
+ # gradient clip
+ gradient_clip: float = 1.0
+ # mixed precision
+ mixed_precision: str = 'bf16'
+ # learning rate
+ lr: float = 4e-4
+ # augmentation prob for grid distortion
+ prob_grid_distortion: float = 0.5
+ # augmentation prob for camera jitter
+ prob_cam_jitter: float = 0.5
+
+ ### testing
+ # test image path
+ test_path: Optional[str] = None
+
+ ### misc
+ # nvdiffrast backend setting
+ force_cuda_rast: bool = False
+ # render fancy video with gaussian scaling effect
+ fancy_video: bool = False
+
+
+# all the default settings
+config_defaults: Dict[str, Options] = {}
+config_doc: Dict[str, str] = {}
+
+config_doc['lrm'] = 'the default settings for LGM'
+config_defaults['lrm'] = Options()
+
+config_doc['small'] = 'small model with lower resolution Gaussians'
+config_defaults['small'] = Options(
+ input_size=256,
+ splat_size=64,
+ output_size=256,
+ batch_size=8,
+ gradient_accumulation_steps=1,
+ mixed_precision='bf16',
+)
+
+config_doc['big'] = 'big model with higher resolution Gaussians'
+config_defaults['big'] = Options(
+ input_size=256,
+ up_channels=(1024, 1024, 512, 256, 128), # one more decoder
+ up_attention=(True, True, True, False, False),
+ splat_size=128,
+ output_size=512, # render & supervise Gaussians at a higher resolution.
+ batch_size=8,
+ num_views=8,
+ gradient_accumulation_steps=1,
+ mixed_precision='bf16',
+)
+
+config_doc['tiny'] = 'tiny model for ablation'
+config_defaults['tiny'] = Options(
+ input_size=256,
+ down_channels=(32, 64, 128, 256, 512),
+ down_attention=(False, False, False, False, True),
+ up_channels=(512, 256, 128),
+ up_attention=(True, False, False, False),
+ splat_size=64,
+ output_size=256,
+ batch_size=16,
+ num_views=8,
+ gradient_accumulation_steps=1,
+ mixed_precision='bf16',
+)
+
+AllConfigs = tyro.extras.subcommand_type_from_defaults(config_defaults, config_doc)
\ No newline at end of file
diff --git a/nsr/script_util.py b/nsr/script_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..082c8205a0964cb4161690425d149f62ffe9ec4a
--- /dev/null
+++ b/nsr/script_util.py
@@ -0,0 +1,1708 @@
+import torch
+from torch import nn
+from nsr.triplane import Triplane_fg_bg_plane
+# import timm
+# from vit.vit_triplane import ViTTriplane, Triplane, ViTTriplaneDecomposed
+from vit.vit_triplane import Triplane, ViTTriplaneDecomposed
+import argparse
+import inspect
+import dnnlib
+from guided_diffusion import dist_util
+
+from pdb import set_trace as st
+
+import vit.vision_transformer as vits
+from guided_diffusion import logger
+from .confnet import ConfNet
+
+from ldm.modules.diffusionmodules.model import Encoder, Decoder, MVEncoder, MVEncoderGS, MVEncoderGSDynamicInp, MVEncoderGSDynamicInp_CA
+from ldm.modules.diffusionmodules.mv_unet import MVUNet, LGM_MVEncoder
+from torch.profiler import profile, record_function, ProfilerActivity
+
+from nsr.gs import GaussianRenderer
+from nsr.gs_surfel import GaussianRenderer2DGS
+# from nsr.srt.encoder import ImprovedSRTEncoderVAE, ImprovedSRTEncoderVAE_L5_vitl, ImprovedSRTEncoderVAE_mlp_ratio4, ImprovedSRTEncoderVAE_L6, ImprovedSRTEncoderVAE_mlp_ratio4_f8, ImprovedSRTEncoderVAE_mlp_ratio4_heavyPatchify, ImprovedSRTEncoderVAE_mlp_ratio4_f8_L6, ImprovedSRTEncoderVAE_mlp_ratio4_L6, HybridEncoder, ImprovedSRTEncoderVAE_mlp_ratio4_decomposed, HybridEncoderPCDStructuredLatent
+from nsr.srt.encoder import *
+
+# from ldm.modules.diffusionmodules.openaimodel import MultiViewUNetModel_Encoder
+
+# * create pre-trained encoder & triplane / other nsr decoder
+
+
+class AE(torch.nn.Module):
+
+ def __init__(self,
+ encoder,
+ decoder,
+ img_size,
+ encoder_cls_token,
+ decoder_cls_token,
+ preprocess,
+ use_clip,
+ dino_version='v1',
+ clip_dtype=None,
+ no_dim_up_mlp=False,
+ dim_up_mlp_as_func=False,
+ uvit_skip_encoder=False,
+ confnet=None) -> None:
+ super().__init__()
+ self.encoder = encoder
+ self.decoder = decoder
+ self.img_size = img_size
+ self.encoder_cls_token = encoder_cls_token
+ self.decoder_cls_token = decoder_cls_token
+ self.use_clip = use_clip
+ self.dino_version = dino_version
+ self.confnet = confnet
+
+ if self.dino_version == 'v2':
+ self.encoder.mask_token = None
+ self.decoder.vit_decoder.mask_token = None
+
+ if 'sd' not in self.dino_version:
+
+ self.uvit_skip_encoder = uvit_skip_encoder
+ if uvit_skip_encoder:
+ logger.log(
+ f'enables uvit: length of vit_encoder.blocks: {len(self.encoder.blocks)}'
+ )
+ for blk in self.encoder.blocks[len(self.encoder.blocks) // 2:]:
+ blk.skip_linear = nn.Linear(2 * self.encoder.embed_dim,
+ self.encoder.embed_dim)
+
+ # trunc_normal_(blk.skip_linear.weight, std=.02)
+ nn.init.constant_(blk.skip_linear.weight, 0)
+ if isinstance(
+ blk.skip_linear,
+ nn.Linear) and blk.skip_linear.bias is not None:
+ nn.init.constant_(blk.skip_linear.bias, 0)
+ else:
+ logger.log(f'disable uvit')
+ else:
+ if 'dit' not in self.dino_version: # dino vit, not dit
+ self.decoder.vit_decoder.cls_token = None
+ self.decoder.vit_decoder.patch_embed.proj = nn.Identity()
+ self.decoder.triplane_decoder.planes = None
+ self.decoder.vit_decoder.mask_token = None
+
+ if self.use_clip:
+ self.clip_dtype = clip_dtype # torch.float16
+
+ else:
+
+ if not no_dim_up_mlp and self.encoder.embed_dim != self.decoder.vit_decoder.embed_dim:
+ self.dim_up_mlp = nn.Linear(
+ self.encoder.embed_dim,
+ self.decoder.vit_decoder.embed_dim)
+ logger.log(
+ f"dim_up_mlp: {self.encoder.embed_dim} -> {self.decoder.vit_decoder.embed_dim}, as_func: {self.dim_up_mlp_as_func}"
+ )
+ else:
+ logger.log('ignore dim_up_mlp: ', no_dim_up_mlp)
+
+ self.preprocess = preprocess
+
+ self.dim_up_mlp = None # CLIP/B-16
+ self.dim_up_mlp_as_func = dim_up_mlp_as_func
+
+ # * remove certain components to make sure no unused parameters during DDP
+ # self.decoder.vit_decoder.cls_token = nn.Identity()
+ torch.cuda.empty_cache()
+ # self.decoder.vit_decoder.patch_embed.proj.bias = nn.Identity()
+ # self.decoder.vit_decoder.patch_embed.proj.weight = nn.Identity()
+ # self.decoder.vit_decoder.patch_embed.proj.bias = nn.Identity()
+
+ def encode(self, *args, **kwargs):
+ if not self.use_clip:
+ if self.dino_version == 'v1':
+ latent = self.encode_dinov1(*args, **kwargs)
+ elif self.dino_version == 'v2':
+ if self.uvit_skip_encoder:
+ latent = self.encode_dinov2_uvit(*args, **kwargs)
+ else:
+ latent = self.encode_dinov2(*args, **kwargs)
+ else:
+ latent = self.encoder(*args, **kwargs)
+
+ else:
+ latent = self.encode_clip(*args, **kwargs)
+
+ return latent
+
+ def encode_dinov1(self, x):
+ # return self.encoder(img)
+ x = self.encoder.prepare_tokens(x)
+ for blk in self.encoder.blocks:
+ x = blk(x)
+ x = self.encoder.norm(x)
+ if not self.encoder_cls_token:
+ return x[:, 1:]
+
+ return x
+
+ def encode_dinov2(self, x):
+ # return self.encoder(img)
+ x = self.encoder.prepare_tokens_with_masks(x, masks=None)
+ for blk in self.encoder.blocks:
+ x = blk(x)
+ x_norm = self.encoder.norm(x)
+
+ if not self.encoder_cls_token:
+ return x_norm[:, 1:]
+ # else:
+ # return x_norm[:, :1]
+
+ # return {
+ # "x_norm_clstoken": x_norm[:, 0],
+ # "x_norm_patchtokens": x_norm[:, 1:],
+ # }
+
+ return x_norm
+
+ def encode_dinov2_uvit(self, x):
+ # return self.encoder(img)
+ x = self.encoder.prepare_tokens_with_masks(x, masks=None)
+
+ # for blk in self.encoder.blocks:
+ # x = blk(x)
+
+ skips = [x]
+
+ # in blks
+ for blk in self.encoder.blocks[0:len(self.encoder.blocks) // 2 - 1]:
+ x = blk(x) # B 3 N C
+ skips.append(x)
+
+ # mid blks
+ for blk in self.encoder.blocks[len(self.encoder.blocks) // 2 -
+ 1:len(self.encoder.blocks) // 2]:
+ x = blk(x) # B 3 N C
+
+ # out blks
+ for blk in self.encoder.blocks[len(self.encoder.blocks) // 2:]:
+ x = x + blk.skip_linear(torch.cat(
+ [x, skips.pop()], dim=-1)) # long skip connections in uvit
+ x = blk(x) # B 3 N C
+
+ x_norm = self.encoder.norm(x)
+
+ if not self.decoder_cls_token:
+ return x_norm[:, 1:]
+
+ return x_norm
+
+ def encode_clip(self, x):
+ # * replace with CLIP encoding pipeline
+ # return self.encoder(img)
+ # x = x.dtype(self.clip_dtype)
+ x = self.encoder.conv1(x) # shape = [*, width, grid, grid]
+ x = x.reshape(x.shape[0], x.shape[1],
+ -1) # shape = [*, width, grid ** 2]
+ x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width]
+ x = torch.cat([
+ self.encoder.class_embedding.to(x.dtype) + torch.zeros(
+ x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device), x
+ ],
+ dim=1) # shape = [*, grid ** 2 + 1, width]
+ x = x + self.encoder.positional_embedding.to(x.dtype)
+ x = self.encoder.ln_pre(x)
+
+ x = x.permute(1, 0, 2) # NLD -> LND
+ x = self.encoder.transformer(x)
+ x = x.permute(1, 0, 2) # LND -> NLD
+ x = self.encoder.ln_post(x[:, 1:, :]) # * return the spatial tokens
+
+ return x
+
+ # x = self.ln_post(x[:, 0, :]) # * return the spatial tokens
+
+ # if self.proj is not None:
+ # x = x @ self.proj
+
+ # return x
+
+ def decode_wo_triplane(self, latent, c=None, img_size=None):
+ if img_size is None:
+ img_size = self.img_size
+
+ if self.dim_up_mlp is not None:
+ if not self.dim_up_mlp_as_func:
+ latent = self.dim_up_mlp(latent)
+ # return self.decoder.vit_decode(latent, img_size)
+ else:
+ return self.decoder.vit_decode(
+ latent, img_size,
+ dim_up_mlp=self.dim_up_mlp) # used in vae-ldm
+
+ return self.decoder.vit_decode(latent, img_size, c=c)
+
+ def decode(self, latent, c, img_size=None, return_raw_only=False):
+ # if img_size is None:
+ # img_size = self.img_size
+
+ # if self.dim_up_mlp is not None:
+ # latent = self.dim_up_mlp(latent)
+
+ latent = self.decode_wo_triplane(latent, img_size=img_size, c=c)
+ # return self.decoder.triplane_decode(latent, c, return_raw_only=return_raw_only)
+ return self.decoder.triplane_decode(latent, c)
+
+ def decode_after_vae_no_render(
+ self,
+ ret_dict,
+ img_size=None,
+ ):
+
+ if img_size is None:
+ img_size = self.img_size
+
+ assert self.dim_up_mlp is None
+ # if not self.dim_up_mlp_as_func:
+ # latent = self.dim_up_mlp(latent)
+ # return self.decoder.vit_decode(latent, img_size)
+
+ latent = self.decoder.vit_decode_backbone(ret_dict, img_size)
+ ret_dict = self.decoder.vit_decode_postprocess(latent, ret_dict)
+ return ret_dict
+
+ def decode_after_vae_no_render_gs(
+ self,
+ ret_dict,
+ img_size=None,
+ ):
+
+ ret_after_decoder = self.decode_after_vae_no_render(ret_dict, img_size)
+ return self.decoder.forward_gaussians(ret_after_decoder, c=None)
+
+ def decode_after_vae(
+ self,
+ # latent,
+ ret_dict, # vae_dict
+ c,
+ img_size=None,
+ return_raw_only=False):
+ ret_dict = self.decode_after_vae_no_render(ret_dict, img_size)
+ return self.decoder.triplane_decode(ret_dict, c)
+
+ def decode_confmap(self, img):
+ assert self.confnet is not None
+ # https://github.com/elliottwu/unsup3d/blob/dc961410d61684561f19525c2f7e9ee6f4dacb91/unsup3d/model.py#L152
+ # conf_sigma_l1 = self.confnet(img) # Bx2xHxW
+ return self.confnet(img) # Bx1xHxW
+
+ def encode_decode(self, img, c, return_raw_only=False):
+ latent = self.encode(img)
+ pred = self.decode(latent, c, return_raw_only=return_raw_only)
+ if self.confnet is not None:
+ pred.update({
+ 'conf_sigma': self.decode_confmap(img) # 224x224
+ })
+
+ return pred
+
+ def forward(self,
+ img=None,
+ c=None,
+ latent=None,
+ behaviour='enc_dec',
+ coordinates=None,
+ directions=None,
+ return_raw_only=False,
+ *args,
+ **kwargs):
+ """wrap all operations inside forward() for DDP use.
+ """
+
+ if behaviour == 'enc_dec':
+ pred = self.encode_decode(img, c, return_raw_only=return_raw_only)
+ return pred
+
+ elif behaviour == 'enc':
+ latent = self.encode(img)
+ return latent
+
+ elif behaviour == 'dec':
+ assert latent is not None
+ pred: dict = self.decode(latent,
+ c,
+ self.img_size,
+ return_raw_only=return_raw_only)
+ return pred
+
+ elif behaviour == 'dec_wo_triplane':
+ assert latent is not None
+ pred: dict = self.decode_wo_triplane(latent, self.img_size)
+ return pred
+
+ elif behaviour == 'enc_dec_wo_triplane':
+ # with profile(activities=[
+ # ProfilerActivity.CUDA], record_shapes=True) as prof:
+ # with record_function("encoding"):
+ latent = self.encode(img, c=c, **kwargs)
+
+ # print(prof.key_averages().table(sort_by="cuda_time_total", row_limit=10))
+
+ # with profile(activities=[
+ # ProfilerActivity.CUDA], record_shapes=True) as prof:
+ # with record_function("decoding"):
+ pred: dict = self.decode_wo_triplane(latent,
+ img_size=self.img_size,
+ c=c)
+ # print(prof.key_averages().table(sort_by="cuda_time_total", row_limit=10))
+ # st()
+
+ return pred
+
+ elif behaviour == 'encoder_vae':
+ latent = self.encode(img)
+ ret_dict = self.decoder.vae_reparameterization(latent, True)
+ return ret_dict
+
+ elif behaviour == 'decode_after_vae_no_render':
+ pred: dict = self.decode_after_vae_no_render(latent, self.img_size)
+ return pred
+
+ elif behaviour == 'decode_gs_after_vae_no_render':
+ pred: dict = self.decode_after_vae_no_render_gs(latent, self.img_size)
+ return pred
+
+ elif behaviour == 'decode_after_vae':
+ pred: dict = self.decode_after_vae(latent, c, self.img_size)
+ return pred
+
+ # elif behaviour == 'gaussian_dec':
+ # assert latent is not None
+ # pred: dict = self.decoder.triplane_decode(
+ # latent, c, return_raw_only=return_raw_only, **kwargs)
+ # # pred: dict = self.decoder.triplane_decode(latent, c)
+
+ elif behaviour == 'triplane_dec':
+ assert latent is not None
+ pred: dict = self.decoder.triplane_decode(
+ latent, c, return_raw_only=return_raw_only, **kwargs)
+ # pred: dict = self.decoder.triplane_decode(latent, c)
+
+ elif behaviour == 'triplane_decode_grid':
+ assert latent is not None
+ pred: dict = self.decoder.triplane_decode_grid(latent, **kwargs)
+ # pred: dict = self.decoder.triplane_decode(latent, c)
+
+ elif behaviour == 'vit_postprocess_triplane_dec':
+ assert latent is not None
+ latent = self.decoder.vit_decode_postprocess(
+ latent) # translate spatial token from vit-decoder into 2D
+ pred: dict = self.decoder.triplane_decode(
+ latent, c) # render with triplane
+
+ elif behaviour == 'triplane_renderer':
+ assert latent is not None
+ pred: dict = self.decoder.triplane_renderer(
+ latent, coordinates, directions)
+
+ # elif behaviour == 'triplane_SR':
+ # assert latent is not None
+ # pred: dict = self.decoder.triplane_renderer(
+ # latent, coordinates, directions)
+
+ elif behaviour == 'get_rendering_kwargs':
+ pred = self.decoder.triplane_decoder.rendering_kwargs
+
+ return pred
+
+
+class AE_CLIPEncoder(AE):
+
+ def __init__(self, encoder, decoder, img_size, cls_token) -> None:
+ super().__init__(encoder, decoder, img_size, cls_token)
+
+
+class AE_with_Diffusion(torch.nn.Module):
+
+ def __init__(self, auto_encoder, denoise_model) -> None:
+ super().__init__()
+ self.auto_encoder = auto_encoder
+ self.denoise_model = denoise_model # simply for easy MPTrainer manipulation
+
+ def forward(self,
+ img,
+ c,
+ behaviour='enc_dec',
+ latent=None,
+ *args,
+ **kwargs):
+ # wrap auto_encoder and denoising model inside a single forward function to use DDP (only forward supported) and MPTrainer (single model) easier
+ if behaviour == 'enc_dec':
+ pred = self.auto_encoder(img, c)
+ return pred
+ elif behaviour == 'enc':
+ latent = self.auto_encoder.encode(img)
+ if self.auto_encoder.dim_up_mlp is not None:
+ latent = self.auto_encoder.dim_up_mlp(latent)
+ return latent
+ elif behaviour == 'dec':
+ assert latent is not None
+ pred: dict = self.auto_encoder.decode(latent, c, self.img_size)
+ return pred
+ elif behaviour == 'denoise':
+ assert latent is not None
+ pred: dict = self.denoise_model(*args, **kwargs)
+ return pred
+
+
+def eg3d_options_default():
+
+ opts = dnnlib.EasyDict(
+ dict(
+ cbase=32768,
+ cmax=512,
+ map_depth=2,
+ g_class_name='nsr.triplane.TriPlaneGenerator', # TODO
+ g_num_fp16_res=0,
+ ))
+
+ return opts
+
+
+def rendering_options_defaults(opts):
+
+ rendering_options = {
+ # 'image_resolution': c.training_set_kwargs.resolution,
+ 'image_resolution': 256,
+ 'disparity_space_sampling': False,
+ 'clamp_mode': 'softplus',
+ 'c_gen_conditioning_zero':
+ True, # if true, fill generator pose conditioning label with dummy zero vector
+ # 'gpc_reg_prob': opts.gpc_reg_prob if opts.gen_pose_cond else None,
+ 'c_scale':
+ opts.c_scale, # mutliplier for generator pose conditioning label
+ 'superresolution_noise_mode': 'none',
+ 'density_reg': opts.density_reg, # strength of density regularization
+ 'density_reg_p_dist': opts.
+ density_reg_p_dist, # distance at which to sample perturbed points for density regularization
+ 'reg_type': opts.
+ reg_type, # for experimenting with variations on density regularization
+ 'decoder_lr_mul': 1,
+ # opts.decoder_lr_mul, # learning rate multiplier for decoder
+ 'decoder_activation': 'sigmoid',
+ 'sr_antialias': True,
+ 'return_triplane_features': False, # for DDF supervision
+ 'return_sampling_details_flag': False,
+
+ # * shape default sr
+
+ # 'superresolution_module': 'nsr.superresolution.SuperresolutionHybrid4X',
+ # 'superresolution_module':
+ # 'torch_utils.components.PixelUnshuffleUpsample',
+ 'superresolution_module': 'torch_utils.components.NearestConvSR',
+ }
+
+ if opts.cfg == 'ffhq':
+ rendering_options.update({
+ 'superresolution_module':
+ 'nsr.superresolution.SuperresolutionHybrid8XDC',
+ 'focal': 2985.29 / 700,
+ 'depth_resolution':
+ 48 - 0, # number of uniform samples to take per ray.
+ 'depth_resolution_importance':
+ 48 - 0, # number of importance samples to take per ray.
+ 'bg_depth_resolution':
+ 16, # 4/14 in stylenerf, https://github.com/facebookresearch/StyleNeRF/blob/7f5610a058f27fcc360c6b972181983d7df794cb/conf/model/stylenerf_ffhq.yaml#L48
+ 'ray_start':
+ 2.25, # near point along each ray to start taking samples.
+ 'ray_end':
+ 3.3, # far point along each ray to stop taking samples.
+ 'box_warp':
+ 1, # the side-length of the bounding box spanned by the tri-planes; box_warp=1 means [-0.5, -0.5, -0.5] -> [0.5, 0.5, 0.5].
+ 'avg_camera_radius':
+ 2.7, # used only in the visualizer to specify camera orbit radius.
+ 'avg_camera_pivot': [
+ 0, 0, 0.2
+ ], # used only in the visualizer to control center of camera rotation.
+ 'superresolution_noise_mode': 'random',
+ })
+ elif opts.cfg == 'afhq':
+ rendering_options.update({
+ 'superresolution_module':
+ 'nsr.superresolution.SuperresolutionHybrid8X',
+ 'superresolution_noise_mode': 'random',
+ 'focal': 4.2647,
+ 'depth_resolution': 48,
+ 'depth_resolution_importance': 48,
+ 'ray_start': 2.25,
+ 'ray_end': 3.3,
+ 'box_warp': 1,
+ 'avg_camera_radius': 2.7,
+ 'avg_camera_pivot': [0, 0, -0.06],
+ })
+ elif opts.cfg == 'shapenet': # TODO, lies in a sphere
+ rendering_options.update({
+ 'depth_resolution': 64,
+ 'depth_resolution_importance': 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start': 0.2,
+ 'ray_end': 2.2,
+ # 'ray_start': opts.ray_start,
+ # 'ray_end': opts.ray_end,
+ 'box_warp': 2, # TODO, how to set this value?
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'eg3d_shapenet_aug_resolution':
+ rendering_options.update({
+ 'depth_resolution': 80,
+ 'depth_resolution_importance': 80,
+ 'ray_start': 0.1,
+ 'ray_end': 1.9, # 2.6/1.7*1.2
+ 'box_warp': 1.1,
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'eg3d_shapenet_aug_resolution_chair':
+ rendering_options.update({
+ 'depth_resolution': 96,
+ 'depth_resolution_importance': 96,
+ 'ray_start': 0.1,
+ 'ray_end': 1.9, # 2.6/1.7*1.2
+ 'box_warp': 1.1,
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'eg3d_shapenet_aug_resolution_chair_128':
+ rendering_options.update({
+ 'depth_resolution': 128,
+ 'depth_resolution_importance': 128,
+ 'ray_start': 0.1,
+ 'ray_end': 1.9, # 2.6/1.7*1.2
+ 'box_warp': 1.1,
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'eg3d_shapenet_aug_resolution_chair_64':
+ rendering_options.update({
+ 'depth_resolution': 64,
+ 'depth_resolution_importance': 64,
+ 'ray_start': 0.1,
+ 'ray_end': 1.9, # 2.6/1.7*1.2
+ 'box_warp': 1.1,
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'srn_shapenet_aug_resolution_chair_128':
+ rendering_options.update({
+ 'depth_resolution': 128,
+ 'depth_resolution_importance': 128,
+ 'ray_start': 1.25,
+ 'ray_end': 2.75,
+ 'box_warp': 1.5,
+ 'white_back': True,
+ 'avg_camera_radius': 2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'eg3d_shapenet_aug_resolution_chair_128_residualSR':
+ rendering_options.update({
+ 'depth_resolution':
+ 128,
+ 'depth_resolution_importance':
+ 128,
+ 'ray_start':
+ 0.1,
+ 'ray_end':
+ 1.9, # 2.6/1.7*1.2
+ 'box_warp':
+ 1.1,
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR_Residual',
+ })
+
+ elif opts.cfg == 'shapenet_tuneray': # TODO, lies in a sphere
+ rendering_options.update({
+ 'depth_resolution': 64,
+ 'depth_resolution_importance': 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start': opts.ray_start,
+ 'ray_end': opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution': 80,
+ 'depth_resolution_importance': 80,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start': opts.ray_start,
+ 'ray_end': opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution': 128,
+ 'depth_resolution_importance': 128,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start': opts.ray_start,
+ 'ray_end': opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_96': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution': 96,
+ 'depth_resolution_importance': 96,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start': opts.ray_start,
+ 'ray_end': opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+ # ! default version
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_96_nearestSR': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 96,
+ 'depth_resolution_importance':
+ 96,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ opts.ray_start,
+ 'ray_end':
+ opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR',
+ })
+
+ # ! 64+64, since ssdnerf adopts this setting
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_64_nearestSR': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 64,
+ 'depth_resolution_importance':
+ 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ opts.ray_start,
+ 'ray_end':
+ opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR',
+ })
+
+ # ! 64+64+patch, since ssdnerf adopts this setting
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_64_nearestSR_patch': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 64,
+ 'depth_resolution_importance':
+ 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ opts.ray_start,
+ 'ray_end':
+ opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR',
+ # patch configs
+ 'PatchRaySampler':
+ True,
+ # 'patch_rendering_resolution': 32,
+ # 'patch_rendering_resolution': 48,
+ 'patch_rendering_resolution':
+ opts.patch_rendering_resolution,
+ })
+
+ elif opts.cfg == 'objverse_tuneray_aug_resolution_64_64_nearestSR': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 64,
+ 'depth_resolution_importance':
+ 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ opts.ray_start,
+ # 'auto',
+ 'ray_end':
+ opts.ray_end,
+ # 'auto',
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ # 2,
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.946, # ?
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR',
+ # patch configs
+ # 'PatchRaySampler': False,
+ # 'patch_rendering_resolution': 32,
+ # 'patch_rendering_resolution': 48,
+ # 'patch_rendering_resolution': opts.patch_rendering_resolution,
+ })
+
+ elif opts.cfg == 'objverse_tuneray_aug_resolution_64_64_auto': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 64,
+ 'depth_resolution_importance':
+ 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ 'auto',
+ 'ray_end':
+ 'auto',
+ 'box_warp':
+ 0.9,
+ 'white_back':
+ True,
+ 'radius_range': [1.5, 2],
+ # 'z_near': 1.5-0.45, # radius in [1.5, 2], https://github.com/modelscope/richdreamer/issues/12#issuecomment-1897734616
+ # 'z_far': 2.0+0.45,
+ 'sampler_bbox_min':
+ -0.45,
+ 'sampler_bbox_max':
+ 0.45,
+ # 'avg_camera_pivot': [0, 0, 0], # not used
+ 'filter_out_of_bbox':
+ True,
+ # 'superresolution_module':
+ # 'torch_utils.components.NearestConvSR',
+ # patch configs
+ 'PatchRaySampler':
+ True,
+ # 'patch_rendering_resolution': 32,
+ # 'patch_rendering_resolution': 48,
+ 'patch_rendering_resolution':
+ opts.patch_rendering_resolution,
+ })
+ rendering_options['z_near'] = rendering_options['radius_range'][
+ 0] + rendering_options['sampler_bbox_min']
+ rendering_options['z_far'] = rendering_options['radius_range'][
+ 1] + rendering_options['sampler_bbox_max']
+
+ elif opts.cfg == 'objverse_tuneray_aug_resolution_56_56_auto': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 56,
+ 'depth_resolution_importance':
+ 56,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ 'auto',
+ 'ray_end':
+ 'auto',
+ 'box_warp':
+ 0.9,
+ 'white_back':
+ True,
+ 'radius_range': [1.5, 2],
+ # 'z_near': 1.5-0.45, # radius in [1.5, 2], https://github.com/modelscope/richdreamer/issues/12#issuecomment-1897734616
+ # 'z_far': 2.0+0.45,
+ 'sampler_bbox_min':
+ -0.45,
+ 'sampler_bbox_max':
+ 0.45,
+ # 'avg_camera_pivot': [0, 0, 0], # not used
+ 'filter_out_of_bbox':
+ True,
+ # 'superresolution_module':
+ # 'torch_utils.components.NearestConvSR',
+ # patch configs
+ 'PatchRaySampler':
+ True,
+ # 'patch_rendering_resolution': 32,
+ # 'patch_rendering_resolution': 48,
+ 'patch_rendering_resolution':
+ opts.patch_rendering_resolution,
+ })
+ rendering_options['z_near'] = rendering_options['radius_range'][
+ 0] + rendering_options['sampler_bbox_min']
+ rendering_options['z_far'] = rendering_options['radius_range'][
+ 1] + rendering_options['sampler_bbox_max']
+
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_96_nearestResidualSR': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 96,
+ 'depth_resolution_importance':
+ 96,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ opts.ray_start,
+ 'ray_end':
+ opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR_Residual',
+ })
+
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_64_nearestResidualSR': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution':
+ 64,
+ 'depth_resolution_importance':
+ 64,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start':
+ opts.ray_start,
+ 'ray_end':
+ opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back':
+ True,
+ 'avg_camera_radius':
+ 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ 'superresolution_module':
+ 'torch_utils.components.NearestConvSR_Residual',
+ })
+
+ elif opts.cfg == 'shapenet_tuneray_aug_resolution_64_104': # to differentiate hwc
+ rendering_options.update({
+ 'depth_resolution': 104,
+ 'depth_resolution_importance': 104,
+ # * radius 1.2 setting, newly rendered images
+ 'ray_start': opts.ray_start,
+ 'ray_end': opts.ray_end,
+ 'box_warp':
+ opts.ray_end - opts.ray_start, # TODO, how to set this value?
+ 'white_back': True,
+ 'avg_camera_radius': 1.2,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+
+ rendering_options.update({'return_sampling_details_flag': True})
+ rendering_options.update({'return_sampling_details_flag': True})
+
+ return rendering_options
+
+
+def model_encoder_defaults():
+
+ return dict(
+ use_clip=False,
+ arch_encoder="vits",
+ arch_decoder="vits",
+ load_pretrain_encoder=False,
+ encoder_lr=1e-5,
+ encoder_weight_decay=
+ 0.001, # https://github.com/google-research/vision_transformer
+ no_dim_up_mlp=False,
+ dim_up_mlp_as_func=False,
+ decoder_load_pretrained=True,
+ uvit_skip_encoder=False,
+ # vae ldm
+ vae_p=1,
+ ldm_z_channels=4,
+ ldm_embed_dim=4,
+ use_conf_map=False,
+ # sd E, lite version by default
+ sd_E_ch=64,
+ z_channels=3 * 4,
+ latent_num=768,
+ sd_E_num_res_blocks=1,
+ num_frames=4,
+ # vit_decoder
+ arch_dit_decoder='DiT2-B/2',
+ return_all_dit_layers=False,
+ # sd D
+ # sd_D_ch=32,
+ # sd_D_res_blocks=1,
+ # sd_D_res_blocks=1,
+ lrm_decoder=False,
+ gs_rendering=False,
+ surfel_rendering=False,
+ plane_n=3,
+ in_plane_attention=True,
+ vae_dit_token_size=16,
+ )
+
+
+def triplane_decoder_defaults():
+ opts = dict(
+ triplane_fg_bg=False,flexicube_decoder=False,
+ cfg='shapenet',
+ density_reg=0.25,
+ density_reg_p_dist=0.004,
+ reg_type='l1',
+ triplane_decoder_lr=0.0025, # follow eg3d G lr
+ super_resolution_lr=0.0025,
+ # triplane_decoder_wd=0.1,
+ c_scale=1,
+ nsr_lr=0.02,
+ triplane_size=224,
+ decoder_in_chans=32,
+ triplane_in_chans=-1,
+ decoder_output_dim=3,
+ out_chans=96,
+ c_dim=25, # Conditioning label (C) dimensionality.
+ # ray_start=0.2,
+ # ray_end=2.2,
+ ray_start=0.6, # shapenet default
+ ray_end=1.8,
+ rendering_kwargs={},
+ sr_training=False,
+ bcg_synthesis=False, # from panohead
+ bcg_synthesis_kwargs={}, # G_kwargs.copy()
+ #
+ image_size=128, # raw 3D rendering output resolution.
+ patch_rendering_resolution=45,
+ )
+
+ # else:
+ # assert False, "Need to specify config"
+
+ # opts = dict(opts)
+ # opts.pop('cfg')
+
+ return opts
+
+
+def vit_decoder_defaults():
+ res = dict(
+ vit_decoder_lr=1e-5, # follow eg3d G lr
+ vit_decoder_wd=0.001,
+ )
+ return res
+
+
+def nsr_decoder_defaults():
+ res = {
+ 'decomposed': False,
+ } # TODO, add defaults for all nsr
+ res.update(triplane_decoder_defaults()) # triplane by default now
+ res.update(vit_decoder_defaults()) # type: ignore
+ return res
+
+
+def loss_defaults():
+ opt = dict(
+ color_criterion='mse',
+ l2_lambda=1.0,
+ lpips_lambda=0.,
+ lpips_delay_iter=0,
+ sr_delay_iter=0,
+ # kl_anneal=0,
+ kl_anneal=False,
+ latent_lambda=0.,
+ latent_criterion='mse',
+ kl_lambda=0.0,
+ pt_ft_kl=False,
+ ft_kl=False,
+ # pt_kl_lambda=1e-8,
+ # kl_anneal=False,
+ ssim_lambda=0.,
+ l1_lambda=0.,
+ id_lambda=0.0,
+ depth_lambda=0.0, # TODO
+ alpha_lambda=0.0, # TODO
+ fg_mse=False,
+ bg_lamdba=0.0,
+ density_reg=0.0, # tvloss in eg3d
+ density_reg_p_dist=0.004, # 'density regularization strength.'
+ density_reg_every=4, # lazy density reg
+
+ # 3D supervision, ffhq/afhq eg3d warm up
+ shape_uniform_lambda=0.005,
+ shape_importance_lambda=0.01,
+ shape_depth_lambda=0.,
+ xyz_lambda=0.0,
+ emd_lambda=0.0,
+ cd_lambda=0.0,
+ pruning_ot_lambda=0.0,
+ # 2dgs
+ lambda_normal=0.0,
+ lambda_dist=0.0,
+
+ #gghead reg
+ lambda_scale_reg=0.0,
+ lambda_opa_reg=0.0,
+
+ # gan loss
+ rec_cvD_lambda=0.01,
+ nvs_cvD_lambda=0.025,
+ patchgan_disc_factor=0.01,
+ patchgan_disc_g_weight=0.2, #
+ r1_gamma=1.0, # ffhq default value for eg3d
+ sds_lamdba=1.0,
+ nvs_D_lr_mul=1, # compared with 1e-4
+ cano_D_lr_mul=1, # compared with 1e-4
+
+ # lsgm loss
+ ce_balanced_kl=1.,
+ p_eps_lambda=1,
+ # symmetric loss
+ symmetry_loss=False,
+ depth_smoothness_lambda=0.0,
+ ce_lambda=1.0,
+ negative_entropy_lambda=1.0,
+ grad_clip=False,
+ online_mask=False, # in unsup3d
+ fps_sampling=False, # for emd loss
+ subset_fps_sampling=False, # for emd loss
+ subset_half_fps_sampling=False,
+ # gaussian loss
+ commitment_loss_lambda=0.0,
+ rand_aug_bg=False,
+ )
+ return opt
+
+
+def dataset_defaults():
+ res = dict(
+ use_lmdb=False,
+ dataset_size=-1,
+ use_wds=False,
+ use_lmdb_compressed=True,
+ compile=False,
+ interval=1,
+ objv_dataset=False,
+ decode_encode_img_only=False,
+ load_wds_diff=False,
+ load_wds_latent=False,
+ eval_load_wds_instance=True,
+ shards_lst="",
+ eval_shards_lst="",
+ mv_input=False,
+ duplicate_sample=True,
+ orthog_duplicate=False,
+ split_chunk_input=False, # split=8 per chunk
+ load_real=False,
+ load_mv_real=False,
+ load_gso=False,
+ four_view_for_latent=False,
+ single_view_for_i23d=False,
+ shuffle_across_cls=False,
+ load_extra_36_view=False,
+ mv_latent_dir='',
+ append_depth=False,
+ append_xyz=False,
+ read_normal=False,
+ plucker_embedding=False,
+ perturb_pcd_scale=0.0,
+ gs_cam_format=False,
+ frame_0_as_canonical=
+ False, # transform the first pose to a fixed position
+ pcd_path=None,
+ stage_1_output_dir='',
+ load_pcd=False,
+ use_chunk=False, # jpeg chunk
+ split_chunk_size=8,
+ load_caption_dataset=False,
+ load_mv_dataset=False,
+ export_mesh=False,
+ )
+ return res
+
+
+def encoder_and_nsr_defaults():
+ """
+ Defaults for image training.
+ """
+ # ViT configs
+ res = dict(
+ dino_version='v1',
+ encoder_in_channels=3,
+ img_size=[224],
+ patch_size=16, # ViT-S/16
+ in_chans=384,
+ num_classes=0,
+ embed_dim=384, # Check ViT encoder dim
+ depth=6,
+ num_heads=16,
+ mlp_ratio=4.,
+ qkv_bias=False,
+ qk_scale=None,
+ drop_rate=0.1,
+ attn_drop_rate=0.,
+ drop_path_rate=0.,
+ norm_layer='nn.LayerNorm',
+ # img_resolution=128, # Output resolution.
+ cls_token=False,
+ # image_size=128, # rendered output resolution.
+ # img_channels=3, # Number of output color channels.
+ encoder_cls_token=False,
+ decoder_cls_token=False,
+ sr_kwargs={},
+ sr_ratio=2,
+ # sd configs
+ )
+ # Triplane configs
+ res.update(model_encoder_defaults())
+ res.update(nsr_decoder_defaults())
+ res.update(
+ ae_classname='vit.vit_triplane.ViTTriplaneDecomposed') # if add SR
+ return res
+
+
+def create_3DAE_model(
+ arch_encoder,
+ arch_decoder,
+ dino_version='v1',
+ img_size=[224],
+ patch_size=16,
+ in_chans=384,
+ num_classes=0,
+ embed_dim=1024, # Check ViT encoder dim
+ depth=6,
+ num_heads=16,
+ mlp_ratio=4.,
+ qkv_bias=False,
+ qk_scale=None,
+ drop_rate=0.1,
+ attn_drop_rate=0.,
+ drop_path_rate=0.,
+ # norm_layer=nn.LayerNorm,
+ norm_layer='nn.LayerNorm',
+ out_chans=96,
+ decoder_in_chans=32,
+ triplane_in_chans=-1,
+ decoder_output_dim=32,
+ encoder_cls_token=False,
+ decoder_cls_token=False,
+ c_dim=25, # Conditioning label (C) dimensionality.
+ image_size=128, # Output resolution.
+ img_channels=3, # Number of output color channels.
+ rendering_kwargs={},
+ load_pretrain_encoder=False,
+ decomposed=True,
+ triplane_size=224,
+ ae_classname='ViTTriplaneDecomposed',
+ use_clip=False,
+ sr_kwargs={},
+ sr_ratio=2,
+ no_dim_up_mlp=False,
+ dim_up_mlp_as_func=False,
+ decoder_load_pretrained=True,
+ uvit_skip_encoder=False,
+ bcg_synthesis_kwargs={},
+ # decoder params
+ vae_p=1,
+ ldm_z_channels=4,
+ vae_dit_token_size=16,
+ ldm_embed_dim=4,
+ use_conf_map=False,
+ triplane_fg_bg=False,
+ flexicube_decoder=False,
+ encoder_in_channels=3,
+ sd_E_ch=64,
+ z_channels=3 * 4,
+ latent_num=768,
+ sd_E_num_res_blocks=1,
+ num_frames=4,
+ arch_dit_decoder='DiT2-B/2',
+ in_plane_attention=True,
+ lrm_decoder=False,
+ gs_rendering=False,
+ surfel_rendering=False,
+ return_all_dit_layers=False,
+ plane_n=3,
+ *args,
+ **kwargs):
+
+ # TODO, check pre-trained ViT encoder cfgs
+
+ preprocess = None
+ clip_dtype = None
+ if load_pretrain_encoder:
+ if not use_clip:
+ if dino_version == 'v1':
+ encoder = torch.hub.load(
+ 'facebookresearch/dino:main',
+ 'dino_{}{}'.format(arch_encoder, patch_size))
+ logger.log(
+ f'loaded pre-trained dino v1 ViT-S{patch_size} encoder ckpt'
+ )
+ elif dino_version == 'v2':
+ encoder = torch.hub.load(
+ 'facebookresearch/dinov2',
+ 'dinov2_{}{}'.format(arch_encoder, patch_size))
+ logger.log(
+ f'loaded pre-trained dino v2 {arch_encoder}{patch_size} encoder ckpt'
+ )
+ elif 'sd' in dino_version: # just for compat
+
+ if 'mv' in dino_version:
+ if 'lgm' in dino_version:
+ encoder_cls = MVUNet(
+ input_size=256,
+ up_channels=(1024, 1024, 512, 256,
+ 128), # one more decoder
+ up_attention=(True, True, True, False, False),
+ splat_size=128,
+ output_size=
+ 512, # render & supervise Gaussians at a higher resolution.
+ batch_size=8,
+ num_views=8,
+ gradient_accumulation_steps=1,
+ # mixed_precision='bf16',
+ )
+ elif 'gs' in dino_version:
+ encoder_cls = MVEncoder
+ else:
+ encoder_cls = MVEncoder
+
+ else:
+ encoder_cls = Encoder
+
+ encoder = encoder_cls( # mono input
+ double_z=True,
+ resolution=256,
+ in_channels=encoder_in_channels,
+ # ch=128,
+ ch=64, # ! fit in the memory
+ # ch_mult=[1,2,4,4],
+ # num_res_blocks=2,
+ ch_mult=[1, 2, 4, 4],
+ num_res_blocks=1,
+ dropout=0.0,
+ attn_resolutions=[],
+ out_ch=3, # unused
+ z_channels=z_channels,
+ ) # stable diffusion encoder
+ else:
+ raise NotImplementedError()
+
+ else:
+ import clip
+ model, preprocess = clip.load("ViT-B/16", device=dist_util.dev())
+ model.float() # convert weight to float32
+ clip_dtype = model.dtype
+ encoder = getattr(
+ model, 'visual') # only use the CLIP visual encoder here
+ encoder.requires_grad_(False)
+ logger.log(
+ f'loaded pre-trained CLIP ViT-B{patch_size} encoder, fixed.')
+
+ elif 'sd' in dino_version:
+ attn_kwargs = {}
+ attn_type = "mv-vanilla"
+ if 'mv' in dino_version:
+ if 'lgm' in dino_version:
+ encoder = LGM_MVEncoder(
+ in_channels=9,
+ # input_size=256,
+ up_channels=(1024, 1024, 512, 256,
+ 128), # one more decoder
+ up_attention=(True, True, True, False, False),
+ # splat_size=128,
+ # output_size=
+ # 512, # render & supervise Gaussians at a higher resolution.
+ # batch_size=8,
+ # num_views=8,
+ # gradient_accumulation_steps=1,
+ # mixed_precision='bf16',
+ )
+
+ elif 'srt' in dino_version:
+ if 'hybrid' in dino_version:
+ encoder_cls = HybridEncoder # best overall performance
+ # if 'vitl' in dino_version:
+ # encoder_cls = ImprovedSRTEncoderVAE_L5_vitl
+ # elif 'l6' in dino_version:
+ # encoder_cls = ImprovedSRTEncoderVAE_L6
+ # elif 'mlp4' in dino_version:
+ # if 'f8'in dino_version:
+ # # encoder_cls = ImprovedSRTEncoderVAE_mlp_ratio4_f8
+ # encoder_cls = ImprovedSRTEncoderVAE_mlp_ratio4_f8_L6
+ elif 'l6' in dino_version:
+ encoder_cls = ImprovedSRTEncoderVAE_mlp_ratio4_L6
+ elif 'heavy' in dino_version:
+ encoder_cls = ImprovedSRTEncoderVAE_mlp_ratio4_heavyPatchify
+
+ elif 'decomposed' in dino_version:
+ encoder_cls = ImprovedSRTEncoderVAE_mlp_ratio4_decomposed
+ elif 'pcd-structured' in dino_version:
+ attn_kwargs = {
+ 'n_heads': 8,
+ 'd_head': 64,
+ }
+ if 'pc2' in dino_version:
+ encoder_cls = HybridEncoderPCDStructuredLatentSNoPCD_PC2 # pixel-aligned by rasterization projection
+ elif 'nopcd' in dino_version:
+ encoder_cls = HybridEncoderPCDStructuredLatentSNoPCD
+ elif 'uniformfps' in dino_version:
+ encoder_cls = HybridEncoderPCDStructuredLatentUniformFPS
+ elif 'pixelaligned' in dino_version:
+ encoder_cls = HybridEncoderPCDStructuredLatentSNoPCD_PixelAlignedQuery
+ else:
+ encoder_cls = HybridEncoderPCDStructuredLatent
+ else:
+ encoder_cls = ImprovedSRTEncoderVAE_mlp_ratio4 # best overall performance
+ # else: # default version
+ # encoder_cls = ImprovedSRTEncoderVAE
+
+ elif 'gs' in dino_version:
+
+ if 'dynaInp' in dino_version:
+ if 'ca' in dino_version:
+ encoder_cls = MVEncoderGSDynamicInp_CA
+ else:
+ encoder_cls = MVEncoderGSDynamicInp
+ else:
+ encoder_cls = MVEncoderGS
+
+ attn_kwargs = {
+ 'n_heads': 8,
+ 'd_head': 64,
+ }
+
+ else:
+ if 'dynaInp' in dino_version:
+ if 'ca' in dino_version:
+ encoder_cls = MVEncoderGSDynamicInp_CA
+ else:
+ encoder_cls = MVEncoderGSDynamicInp
+ else:
+ encoder_cls = MVEncoder
+ attn_kwargs = {
+ 'n_heads': 8,
+ 'd_head': 64,
+ }
+
+ else:
+ encoder_cls = Encoder
+
+ if 'lgm' not in dino_version: # TODO, for compat now
+ # st()
+ encoder = encoder_cls(
+ double_z=True,
+ resolution=256,
+ in_channels=encoder_in_channels,
+ # ch=128,
+ # ch=64, # ! fit in the memory
+ ch=sd_E_ch,
+ # ch_mult=[1,2,4,4],
+ # num_res_blocks=2,
+ ch_mult=[1, 2, 4, 4],
+ # num_res_blocks=1,
+ num_res_blocks=sd_E_num_res_blocks,
+ num_frames=num_frames,
+ dropout=0.0,
+ attn_resolutions=[],
+ out_ch=3, # unused
+ z_channels=z_channels, # 4 * 3
+ attn_kwargs=attn_kwargs,
+ attn_type=attn_type,
+ latent_num=latent_num,
+ ) # stable diffusion encoder
+
+ else:
+ encoder = vits.__dict__[arch_encoder](
+ patch_size=patch_size,
+ drop_path_rate=drop_path_rate, # stochastic depth
+ img_size=img_size)
+
+ assert decomposed
+ if decomposed:
+
+ if not gs_rendering:
+ if triplane_in_chans == -1:
+ triplane_in_chans = decoder_in_chans
+
+ if triplane_fg_bg:
+ triplane_renderer_cls = Triplane_fg_bg_plane
+ elif flexicube_decoder:
+ triplane_renderer_cls = Triplane
+ else:
+ triplane_renderer_cls = Triplane
+
+ # triplane_decoder = Triplane(
+ triplane_decoder = triplane_renderer_cls(
+ c_dim, # Conditioning label (C) dimensionality.
+ image_size, # Output resolution.
+ img_channels, # Number of output color channels.
+ rendering_kwargs=rendering_kwargs,
+ out_chans=out_chans,
+ # create_triplane=True, # compatability, remove later
+ triplane_size=triplane_size,
+ decoder_in_chans=triplane_in_chans,
+ decoder_output_dim=decoder_output_dim,
+ sr_kwargs=sr_kwargs,
+ bcg_synthesis_kwargs=bcg_synthesis_kwargs,
+ lrm_decoder=lrm_decoder)
+ elif surfel_rendering:
+ triplane_decoder = GaussianRenderer2DGS(
+ image_size, out_chans, rendering_kwargs=rendering_kwargs)
+ else:
+ triplane_decoder = GaussianRenderer(
+ image_size, out_chans, rendering_kwargs=rendering_kwargs)
+
+ if load_pretrain_encoder:
+
+ if dino_version == 'v1':
+ vit_decoder = torch.hub.load(
+ 'facebookresearch/dino:main',
+ 'dino_{}{}'.format(arch_decoder, patch_size))
+ logger.log(
+ 'loaded pre-trained decoder',
+ "facebookresearch/dino:main', 'dino_{}{}".format(
+ arch_decoder, patch_size))
+ else:
+
+ vit_decoder = torch.hub.load(
+ 'facebookresearch/dinov2',
+ # 'dinov2_{}{}'.format(arch_decoder, patch_size))
+ 'dinov2_{}{}'.format(arch_decoder, patch_size),
+ pretrained=decoder_load_pretrained)
+ logger.log(
+ 'loaded pre-trained decoder',
+ "facebookresearch/dinov2', 'dinov2_{}{}".format(
+ arch_decoder,
+ patch_size), 'pretrianed=', decoder_load_pretrained)
+
+ elif 'dit' in dino_version:
+ from dit.dit_decoder import DiT2_models, DiTBlock
+
+ # st()
+ vit_decoder = DiT2_models[arch_dit_decoder](
+ input_size=16,
+ num_classes=0,
+ learn_sigma=False,
+ in_channels=embed_dim,
+ mixed_prediction=False,
+ context_dim=None, # add CLIP text embedding
+ roll_out=True,
+ plane_n=4 if ('gs' in dino_version
+ and 'trilatent' not in dino_version) else 3,
+ return_all_layers=return_all_dit_layers,
+ in_plane_attention=in_plane_attention,
+ vit_blk=DiTBlock,
+ )
+
+ else: # has bug on global token, to fix
+ vit_decoder = vits.__dict__[arch_decoder](
+ patch_size=patch_size,
+ drop_path_rate=drop_path_rate, # stochastic depth
+ img_size=img_size)
+
+ # decoder = ViTTriplaneDecomposed(vit_decoder, triplane_decoder)
+ # if True:
+ decoder_kwargs = dict(
+ class_name=ae_classname,
+ vit_decoder=vit_decoder,
+ triplane_decoder=triplane_decoder,
+ # encoder_cls_token=encoder_cls_token,
+ cls_token=decoder_cls_token,
+ sr_ratio=sr_ratio,
+ vae_p=vae_p,
+ ldm_z_channels=ldm_z_channels,
+ ldm_embed_dim=ldm_embed_dim,
+ vae_dit_token_size=vae_dit_token_size,
+ plane_n=plane_n,
+ )
+ decoder = dnnlib.util.construct_class_by_name(**decoder_kwargs)
+ else:
+ # deprecated
+ decoder = ViTTriplane(
+ img_size,
+ patch_size,
+ in_chans,
+ num_classes,
+ embed_dim,
+ depth,
+ num_heads,
+ mlp_ratio,
+ qkv_bias,
+ qk_scale,
+ drop_rate,
+ attn_drop_rate,
+ drop_path_rate,
+ norm_layer,
+ out_chans,
+ cls_token,
+ c_dim, # Conditioning label (C) dimensionality.
+ image_size, # Output resolution.
+ img_channels, # Number of output color channels.
+ # TODO, replace with c
+ rendering_kwargs=rendering_kwargs,
+ )
+ # if return_encoder_decoder:
+ # return encoder, decoder, img_size[0], cls_token
+ # else:
+
+ if use_conf_map:
+ confnet = ConfNet(cin=3, cout=1, nf=64, zdim=128)
+ else:
+ confnet = None
+
+ auto_encoder = AE(
+ encoder,
+ decoder,
+ img_size[0],
+ encoder_cls_token,
+ decoder_cls_token,
+ preprocess,
+ use_clip,
+ dino_version,
+ clip_dtype,
+ no_dim_up_mlp=no_dim_up_mlp,
+ dim_up_mlp_as_func=dim_up_mlp_as_func,
+ uvit_skip_encoder=uvit_skip_encoder,
+ confnet=confnet,
+ )
+
+ logger.log(auto_encoder)
+ torch.cuda.empty_cache()
+
+ return auto_encoder
+
+
+# def create_3DAE_Diffusion_model(
+# arch_encoder,
+# arch_decoder,
+# img_size=[224],
+# patch_size=16,
+# in_chans=384,
+# num_classes=0,
+# embed_dim=1024, # Check ViT encoder dim
+# depth=6,
+# num_heads=16,
+# mlp_ratio=4.,
+# qkv_bias=False,
+# qk_scale=None,
+# drop_rate=0.1,
+# attn_drop_rate=0.,
+# drop_path_rate=0.,
+# # norm_layer=nn.LayerNorm,
+# norm_layer='nn.LayerNorm',
+# out_chans=96,
+# decoder_in_chans=32,
+# decoder_output_dim=32,
+# cls_token=False,
+# c_dim=25, # Conditioning label (C) dimensionality.
+# img_resolution=128, # Output resolution.
+# img_channels=3, # Number of output color channels.
+# rendering_kwargs={},
+# load_pretrain_encoder=False,
+# decomposed=True,
+# triplane_size=224,
+# ae_classname='ViTTriplaneDecomposed',
+# # return_encoder_decoder=False,
+# *args,
+# **kwargs
+# ):
+
+# # TODO, check pre-trained ViT encoder cfgs
+
+# encoder, decoder, img_size, cls_token = create_3DAE_model(
+# arch_encoder,
+# arch_decoder,
+# img_size,
+# patch_size,
+# in_chans,
+# num_classes,
+# embed_dim, # Check ViT encoder dim
+# depth,
+# num_heads,
+# mlp_ratio,
+# qkv_bias,
+# qk_scale,
+# drop_rate,
+# attn_drop_rate,
+# drop_path_rate,
+# # norm_layer=nn.LayerNorm,
+# norm_layer,
+# out_chans=96,
+# decoder_in_chans=32,
+# decoder_output_dim=32,
+# cls_token=False,
+# c_dim=25, # Conditioning label (C) dimensionality.
+# img_resolution=128, # Output resolution.
+# img_channels=3, # Number of output color channels.
+# rendering_kwargs={},
+# load_pretrain_encoder=False,
+# decomposed=True,
+# triplane_size=224,
+# ae_classname='ViTTriplaneDecomposed',
+# return_encoder_decoder=False,
+# *args,
+# **kwargs
+# ) # type: ignore
+
+
+def create_Triplane(
+ c_dim=25, # Conditioning label (C) dimensionality.
+ img_resolution=128, # Output resolution.
+ img_channels=3, # Number of output color channels.
+ rendering_kwargs={},
+ decoder_output_dim=32,
+ *args,
+ **kwargs):
+
+ decoder = Triplane(
+ c_dim, # Conditioning label (C) dimensionality.
+ img_resolution, # Output resolution.
+ img_channels, # Number of output color channels.
+ # TODO, replace with c
+ rendering_kwargs=rendering_kwargs,
+ create_triplane=True,
+ decoder_output_dim=decoder_output_dim)
+ return decoder
+
+
+def DiT_defaults():
+ return {
+ 'dit_model': "DiT-B/16",
+ 'vae': "ema"
+ # dit_model="DiT-XL/2",
+ # dit_patch_size=8,
+ }
diff --git a/nsr/srt/__init__.py b/nsr/srt/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/nsr/srt/__pycache__/__init__.cpython-39.pyc b/nsr/srt/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c7504e5b42a6355429c60e9c0ef5f8992422e169
Binary files /dev/null and b/nsr/srt/__pycache__/__init__.cpython-39.pyc differ
diff --git a/nsr/srt/__pycache__/encoder.cpython-39.pyc b/nsr/srt/__pycache__/encoder.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d943f1788cdec7c2b013dd910d394ed4b166ee77
Binary files /dev/null and b/nsr/srt/__pycache__/encoder.cpython-39.pyc differ
diff --git a/nsr/srt/__pycache__/layers.cpython-39.pyc b/nsr/srt/__pycache__/layers.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..218dd884c95bf263d84cbdab42b9ba31401bfb9c
Binary files /dev/null and b/nsr/srt/__pycache__/layers.cpython-39.pyc differ
diff --git a/nsr/srt/encoder.py b/nsr/srt/encoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9e0f383da18b72675d8dfb786124f36773e400f
--- /dev/null
+++ b/nsr/srt/encoder.py
@@ -0,0 +1,936 @@
+from calendar import c
+import imageio
+import torchvision
+import random
+# import einops
+import kornia
+import einops
+import numpy as np
+import torch
+import torch.nn as nn
+from .layers import RayEncoder, Transformer, PreNorm
+from pdb import set_trace as st
+
+from pathlib import Path
+import math
+from ldm.modules.attention import MemoryEfficientCrossAttention
+from timm.models.vision_transformer import PatchEmbed
+from ldm.modules.diffusionmodules.model import Encoder
+from guided_diffusion import dist_util, logger
+import point_cloud_utils as pcu
+
+import pytorch3d.ops
+from pytorch3d.ops.utils import masked_gather
+from pytorch3d.implicitron.dataset.data_loader_map_provider import FrameData
+from pytorch3d.renderer import PointsRasterizationSettings, PointsRasterizer
+from pytorch3d.renderer.cameras import CamerasBase, PerspectiveCameras
+from pytorch3d.structures import Pointclouds
+
+from timm.models.vision_transformer import PatchEmbed, Mlp
+
+from vit.vit_triplane import XYZPosEmbed
+
+from utils.geometry import index, perspective
+
+
+def approx_gelu():
+ return nn.GELU(approximate="tanh")
+
+
+class SRTConvBlock(nn.Module):
+
+ def __init__(self, idim, hdim=None, odim=None):
+ super().__init__()
+ if hdim is None:
+ hdim = idim
+
+ if odim is None:
+ odim = 2 * hdim
+
+ conv_kwargs = {'bias': False, 'kernel_size': 3, 'padding': 1}
+ self.layers = nn.Sequential(
+ nn.Conv2d(idim, hdim, stride=1, **conv_kwargs), nn.ReLU(),
+ nn.Conv2d(hdim, odim, stride=2, **conv_kwargs), nn.ReLU())
+
+ def forward(self, x):
+ return self.layers(x)
+
+
+class SRTEncoder(nn.Module):
+ """ Scene Representation Transformer Encoder, as presented in the SRT paper at CVPR 2022 (caveats below)"""
+
+ def __init__(self,
+ num_conv_blocks=4,
+ num_att_blocks=10,
+ pos_start_octave=0,
+ scale_embeddings=False):
+ super().__init__()
+ self.ray_encoder = RayEncoder(pos_octaves=15,
+ pos_start_octave=pos_start_octave,
+ ray_octaves=15)
+
+ conv_blocks = [SRTConvBlock(idim=183, hdim=96)]
+ cur_hdim = 192
+ for i in range(1, num_conv_blocks):
+ conv_blocks.append(SRTConvBlock(idim=cur_hdim, odim=None))
+ cur_hdim *= 2
+
+ self.conv_blocks = nn.Sequential(*conv_blocks)
+
+ self.per_patch_linear = nn.Conv2d(cur_hdim, 768, kernel_size=1)
+
+ # Original SRT initializes with stddev=1/math.sqrt(d).
+ # But model initialization likely also differs between torch & jax, and this worked, so, eh.
+ embedding_stdev = (1. / math.sqrt(768)) if scale_embeddings else 1.
+ self.pixel_embedding = nn.Parameter(
+ torch.randn(1, 768, 15, 20) * embedding_stdev)
+ self.canonical_camera_embedding = nn.Parameter(
+ torch.randn(1, 1, 768) * embedding_stdev)
+ self.non_canonical_camera_embedding = nn.Parameter(
+ torch.randn(1, 1, 768) * embedding_stdev)
+
+ # SRT as in the CVPR paper does not use actual self attention, but a special type:
+ # the current features in the Nth layer don't self-attend, but they
+ # always attend into the initial patch embedding (i.e., the output of
+ # the CNN). SRT further used post-normalization rather than
+ # pre-normalization. Since then though, in OSRT, pre-norm and regular
+ # self-attention was found to perform better overall. So that's what
+ # we do here, though it may be less stable under some circumstances.
+ self.transformer = Transformer(768,
+ depth=num_att_blocks,
+ heads=12,
+ dim_head=64,
+ mlp_dim=1536,
+ selfatt=True)
+
+ def forward(self, images, camera_pos, rays):
+ """
+ Args:
+ images: [batch_size, num_images, 3, height, width].
+ Assume the first image is canonical - shuffling happens in the data loader.
+ camera_pos: [batch_size, num_images, 3]
+ rays: [batch_size, num_images, height, width, 3]
+ Returns:
+ scene representation: [batch_size, num_patches, channels_per_patch]
+ """
+
+ batch_size, num_images = images.shape[:2]
+
+ x = images.flatten(0, 1)
+ camera_pos = camera_pos.flatten(0, 1)
+ rays = rays.flatten(0, 1)
+
+ canonical_idxs = torch.zeros(batch_size, num_images)
+ canonical_idxs[:, 0] = 1
+ canonical_idxs = canonical_idxs.flatten(
+ 0, 1).unsqueeze(-1).unsqueeze(-1).to(x)
+ camera_id_embedding = canonical_idxs * self.canonical_camera_embedding + \
+ (1. - canonical_idxs) * self.non_canonical_camera_embedding
+
+ ray_enc = self.ray_encoder(camera_pos, rays)
+ x = torch.cat((x, ray_enc), 1)
+ x = self.conv_blocks(x)
+ x = self.per_patch_linear(x)
+ height, width = x.shape[2:]
+ x = x + self.pixel_embedding[:, :, :height, :width]
+ x = x.flatten(2, 3).permute(0, 2, 1)
+ x = x + camera_id_embedding
+
+ patches_per_image, channels_per_patch = x.shape[1:]
+ x = x.reshape(batch_size, num_images * patches_per_image,
+ channels_per_patch)
+
+ x = self.transformer(x)
+
+ return x
+
+
+class ImprovedSRTEncoder(nn.Module):
+ """
+ Scene Representation Transformer Encoder with the improvements from Appendix A.4 in the OSRT paper.
+ """
+
+ def __init__(self,
+ num_conv_blocks=3,
+ num_att_blocks=5,
+ pos_start_octave=0):
+ super().__init__()
+ self.ray_encoder = RayEncoder(pos_octaves=15,
+ pos_start_octave=pos_start_octave,
+ ray_octaves=15)
+
+ conv_blocks = [SRTConvBlock(idim=183, hdim=96)]
+ cur_hdim = 192
+ for i in range(1, num_conv_blocks):
+ conv_blocks.append(SRTConvBlock(idim=cur_hdim, odim=None))
+ cur_hdim *= 2
+
+ self.conv_blocks = nn.Sequential(*conv_blocks)
+
+ self.per_patch_linear = nn.Conv2d(cur_hdim, 768, kernel_size=1)
+
+ self.transformer = Transformer(768,
+ depth=num_att_blocks,
+ heads=12,
+ dim_head=64,
+ mlp_dim=1536,
+ selfatt=True)
+
+ def forward(self, images, camera_pos, rays):
+ """
+ Args:
+ images: [batch_size, num_images, 3, height, width]. Assume the first image is canonical.
+ camera_pos: [batch_size, num_images, 3]
+ rays: [batch_size, num_images, height, width, 3]
+ Returns:
+ scene representation: [batch_size, num_patches, channels_per_patch]
+ """
+
+ batch_size, num_images = images.shape[:2]
+
+ x = images.flatten(0, 1)
+ camera_pos = camera_pos.flatten(0, 1)
+ rays = rays.flatten(0, 1)
+
+ ray_enc = self.ray_encoder(camera_pos, rays)
+ x = torch.cat((x, ray_enc), 1)
+ x = self.conv_blocks(x)
+ x = self.per_patch_linear(x)
+ x = x.flatten(2, 3).permute(0, 2, 1)
+
+ patches_per_image, channels_per_patch = x.shape[1:]
+ x = x.reshape(batch_size, num_images * patches_per_image,
+ channels_per_patch)
+
+ x = self.transformer(x)
+
+ return x
+
+
+class ImprovedSRTEncoderVAE(nn.Module):
+ """
+ Modified from ImprovedSRTEncoder
+ 1. replace conv_blocks to timm embedder
+ 2. replace ray_PE with Plucker coordinate
+ 3. add xformers/flash for transformer attention
+ """
+
+ def __init__(
+ self,
+ *,
+ ch,
+ out_ch,
+ ch_mult=(1, 2, 4, 8),
+ num_res_blocks,
+ attn_resolutions,
+ dropout=0.0,
+ resamp_with_conv=True,
+ in_channels,
+ resolution,
+ z_channels,
+ double_z=True,
+ num_frames=4,
+ num_att_blocks=5,
+ tx_dim=768,
+ num_heads=12,
+ mlp_ratio=2, # denoted by srt
+ patch_size=16,
+ decomposed=False,
+ **kwargs):
+ super().__init__()
+ # self.ray_encoder = RayEncoder(pos_octaves=15, pos_start_octave=pos_start_octave,
+ # ray_octaves=15)
+
+ # conv_blocks = [SRTConvBlock(idim=183, hdim=96)]
+ # cur_hdim = 192
+ # for i in range(1, num_conv_blocks):
+ # conv_blocks.append(SRTConvBlock(idim=cur_hdim, odim=None))
+ # cur_hdim *= 2
+ self.num_frames = num_frames
+ self.embed_dim = tx_dim
+ self.embedder = PatchEmbed(
+ img_size=256,
+ patch_size=patch_size,
+ # patch_size=8, # compare the performance
+ in_chans=in_channels,
+ embed_dim=self.embed_dim,
+ norm_layer=None,
+ flatten=True,
+ bias=True,
+ ) # downsample f=16 here.
+
+ # same configuration as vit-B
+ if not decomposed:
+ self.transformer = Transformer(
+ self.embed_dim, # 12 * 64 = 768
+ depth=num_att_blocks,
+ heads=num_heads,
+ mlp_dim=mlp_ratio * self.embed_dim, # 1536 by default
+ )
+ else:
+ self.transformer_selfattn = Transformer(
+ self.embed_dim, # 12 * 64 = 768
+ depth=1,
+ heads=num_heads,
+ mlp_dim=mlp_ratio * self.embed_dim, # 1536 by default
+ )
+ self.transformer = Transformer(
+ self.embed_dim, # 12 * 64 = 768
+ # depth=num_att_blocks-1,
+ depth=num_att_blocks,
+ heads=num_heads,
+ mlp_dim=mlp_ratio * self.embed_dim, # 1536 by default
+ )
+
+ # to a compact latent, with CA
+ # query_dim = 4*(1+double_z)
+ query_dim = 12 * (1 + double_z
+ ) # for high-quality 3D encoding, follow direct3D
+ self.latent_embedding = nn.Parameter(
+ torch.randn(1, 32 * 32 * 3, query_dim))
+ self.readout_ca = MemoryEfficientCrossAttention(
+ query_dim,
+ self.embed_dim,
+ )
+
+ def forward_tx(self, x):
+ x = self.transformer(x) # B VL C
+
+ # ? 3DPE
+ x = self.readout_ca(self.latent_embedding.repeat(x.shape[0], 1, 1), x)
+
+ # ! reshape to 3D latent here. how to make the latent 3D-aware? Later. Performance first.
+ x = einops.rearrange(x, 'B (N H W) C -> B C (N H) W', H=32, W=32, N=3)
+ return x
+
+ def forward(self, x, **kwargs):
+ """
+ Args:
+ images: [batch_size, num_images, 3, height, width]. Assume the first image is canonical.
+ camera_pos: [batch_size, num_images, 3]
+ rays: [batch_size, num_images, height, width, 3]
+ Returns:
+ scene representation: [batch_size, num_patches, channels_per_patch]
+ """
+
+ x = self.embedder(x) # B L C
+ x = einops.rearrange(x, '(B V) L C -> B (V L) C', V=self.num_frames)
+ x = self.forward_tx(x)
+
+ return x
+
+
+# ! ablation the srt design
+class ImprovedSRTEncoderVAE_K8(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(patch_size=8, **kwargs)
+
+
+class ImprovedSRTEncoderVAE_L6(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(num_att_blocks=6, **kwargs)
+
+
+class ImprovedSRTEncoderVAE_L5_vitl(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(num_att_blocks=5, tx_dim=1024, num_heads=16, **kwargs)
+
+
+class ImprovedSRTEncoderVAE_mlp_ratio4(ImprovedSRTEncoderVAE
+ ): # ! by default now
+
+ def __init__(self, **kwargs):
+ super().__init__(mlp_ratio=4, **kwargs)
+
+
+class ImprovedSRTEncoderVAE_mlp_ratio4_decomposed(
+ ImprovedSRTEncoderVAE_mlp_ratio4):
+
+ def __init__(self, **kwargs):
+ super().__init__(decomposed=True, **kwargs) # just decompose tx
+
+ def forward(self, x, **kwargs):
+ """
+ Args:
+ images: [batch_size, num_images, 3, height, width]. Assume the first image is canonical.
+ camera_pos: [batch_size, num_images, 3]
+ rays: [batch_size, num_images, height, width, 3]
+ Returns:
+ scene representation: [batch_size, num_patches, channels_per_patch]
+ """
+
+ x = self.embedder(x) # B L C
+ # x = einops.rearrange(x, '(B V) L C -> B (V L) C', V=self.num_frames)
+ x = self.transformer_selfattn(x)
+ x = einops.rearrange(x, '(B V) L C -> B (V L) C', V=self.num_frames)
+ x = self.forward_tx(x)
+
+ return x
+
+
+class ImprovedSRTEncoderVAE_mlp_ratio4_f8(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(mlp_ratio=4, patch_size=8, **kwargs)
+
+
+class ImprovedSRTEncoderVAE_mlp_ratio4_f8_L6(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(mlp_ratio=4, patch_size=8, num_att_blocks=6, **kwargs)
+
+
+class ImprovedSRTEncoderVAE_mlp_ratio4_L6(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(mlp_ratio=4, num_att_blocks=6, **kwargs)
+
+
+# ! an SD VAE with one SRT attention + one CA attention for KL
+class HybridEncoder(Encoder):
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ # st()
+ self.srt = ImprovedSRTEncoderVAE(
+ **kwargs,
+ # num_frames=4,
+ num_att_blocks=1, # only one layer required
+ tx_dim=self.conv_out.weight.shape[1],
+ num_heads=8, # 256 / 64
+ mlp_ratio=4, # denoted by srt
+ # patch_size=16,
+ )
+ del self.srt.embedder # use original
+ self.conv_out = nn.Identity()
+
+ def forward(self, x, **kwargs):
+ x = super().forward(x)
+ x = einops.rearrange(x,
+ '(B V) C H W -> B (V H W) C',
+ V=self.srt.num_frames)
+ x = self.srt.forward_tx(x)
+ return x
+
+
+class ImprovedSRTEncoderVAE_mlp_ratio4_heavyPatchify(ImprovedSRTEncoderVAE):
+
+ def __init__(self, **kwargs):
+ super().__init__(mlp_ratio=4, **kwargs)
+ del self.embedder
+
+ conv_blocks = [SRTConvBlock(idim=10, hdim=48)] # match the ViT-B dim
+ cur_hdim = 48 * 2
+ for i in range(1,
+ 4): # f=16 still. could reduce attention layers by one?
+ conv_blocks.append(SRTConvBlock(idim=cur_hdim, odim=None))
+ cur_hdim *= 2
+
+ self.embedder = nn.Sequential(*conv_blocks)
+
+ def forward(self, x, **kwargs):
+ """
+ Args:
+ images: [batch_size, num_images, 3, height, width]. Assume the first image is canonical.
+ camera_pos: [batch_size, num_images, 3]
+ rays: [batch_size, num_images, height, width, 3]
+ Returns:
+ scene representation: [batch_size, num_patches, channels_per_patch]
+ """
+
+ x = self.embedder(x) # B C H W
+ x = einops.rearrange(x,
+ '(B V) C H W -> B (V H W) C',
+ V=self.num_frames)
+ x = self.transformer(x) # B VL C
+
+ # ? 3DPE
+ x = self.readout_ca(self.latent_embedding.repeat(x.shape[0], 1, 1), x)
+
+ # ! reshape to 3D latent here. how to make the latent 3D-aware? Later. Performance first.
+ x = einops.rearrange(x, 'B (N H W) C -> B C (N H) W', H=32, W=32, N=3)
+
+ return x
+
+
+class HybridEncoderPCDStructuredLatent(Encoder):
+
+ def __init__(self, num_frames, latent_num=768, **kwargs):
+ super().__init__(**kwargs)
+ # st()
+ self.num_frames = num_frames
+ tx_dim = self.conv_out.weight.shape[1] # after encoder mid_layers
+ self.srt = ImprovedSRTEncoderVAE(
+ **kwargs,
+ # num_frames=4,
+ num_att_blocks=3, # only one layer required
+ tx_dim=tx_dim,
+ num_heads=8, # 256 / 64
+ mlp_ratio=4, # denoted by srt
+ )
+ del self.srt.embedder, self.srt.readout_ca, self.srt.latent_embedding # use original
+
+ # self.box_pool2d = kornia.filters.BlurPool2D(kernel_size=(8,8), stride=8)
+ self.box_pool2d = kornia.filters.BlurPool2D(kernel_size=(8, 8),
+ stride=8)
+ # self.pool2d = kornia.filters.MedianBlur(kernel_size=(8,8), stride=8)
+ self.agg_ca = MemoryEfficientCrossAttention(
+ tx_dim,
+ tx_dim,
+ qk_norm=True, # as in vit-22B
+ )
+ self.spatial_token_reshape = lambda x: einops.rearrange(
+ x, '(B V) C H W -> B (V H W) C', V=self.num_frames)
+ self.latent_num = latent_num # 768 * 3 by default
+ self.xyz_pos_embed = XYZPosEmbed(tx_dim)
+
+ # ! VAE part
+ self.conv_out = nn.Identity()
+ self.Mlp_out = PreNorm(
+ tx_dim, # ! add PreNorm before VAE reduction, stablize training.
+ Mlp(
+ in_features=tx_dim, # reduce dim
+ hidden_features=tx_dim,
+ out_features=self.z_channels * 2, # double_z
+ act_layer=approx_gelu,
+ drop=0))
+ self.ca_no_pcd = False
+ self.pixel_aligned_query = False
+ self.pc2 = True
+ if self.pc2:
+ # https://github.com/lukemelas/projection-conditioned-point-cloud-diffusion/blob/64fd55a0d00b52735cf02e11c5112374c7104ece/experiments/model/projection_model.py#L87
+ # Save rasterization settings
+ raster_point_radius: float = 0.0075 # point size
+ image_size = 512 # ? hard coded
+ raster_points_per_pixel: int = 1
+ bin_size: int = 0
+ self.raster_settings = PointsRasterizationSettings(
+ image_size=(image_size, image_size),
+ radius=raster_point_radius,
+ points_per_pixel=raster_points_per_pixel,
+ bin_size=bin_size,
+ )
+ self.scale_factor = 1
+
+
+ # def _process_token_xyz(self, token_xyz, h):
+ # # pad zero xyz points to reasonable value.
+
+ # nonzero_mask = (token_xyz != 0).all(dim=2) # Shape: (B, N)
+ # non_zero_token_xyz = token_xyz[nonzero_mask]
+ # non_zero_token_h = h[nonzero_mask]
+
+ # # for loop to get foreground points of each instance
+ # # TODO, accelerate with vmap
+ # # No, directly use sparse pcd as input as surface points? fps sampling 768 from 4096 points.
+ # # All points here should not have 0 xyz.
+ # # fg_token_xyz = []
+ # # for idx in range(token_xyz.shape[1]):
+
+ # fps_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # non_zero_token_xyz, K=self.latent_num) # B self.latent_num
+ # # pcu.save_mesh_v(f'xyz.ply', xyz[0].float().detach().permute(1,2,0).reshape(-1,3).cpu().numpy(),) # check result first, before fps sampling
+ # # pcu.save_mesh_v(f'fps_xyz.ply', fps_xyz[0].float().detach().reshape(-1,3).cpu().numpy(),) # check result first, before fps sampling
+ # pcu.save_mesh_v(f'token_xyz3.ply', token_xyz[0].float().detach().reshape(-1,3).cpu().numpy(),)
+ # # xyz = self.spatial_token_reshape(xyz)
+ # # pcu.save_mesh_v(f'xyz_new.ply', xyz[0].float().detach().reshape(-1,3).cpu().numpy(),)
+
+ # st()
+ # query_h = masked_gather(non_zero_token_h, fps_idx) # torch.gather with dim expansion
+
+ # return query_h, fps_xyz
+
+ def _process_token_xyz(self, pcd, pcd_h):
+ # ! 16x uniform downsample before FPS.
+ # rand_start_pt = random.randint(0,16)
+ # query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # pcd[:, rand_start_pt::16], K=self.latent_num, random_start_point=True) # B self.latent_num
+ # query_pcd_h = masked_gather(pcd_h[:, rand_start_pt::16], fps_idx) # torch.gather with dim expansion
+
+ # ! fps very slow on high-res pcd
+ query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ pcd, K=self.latent_num,
+ # random_start_point=False) # B self.latent_num
+ random_start_point=True) # B self.latent_num
+ query_pcd_h = masked_gather(pcd_h,
+ fps_idx) # torch.gather with dim expansion
+
+ # pcu.save_mesh_v(f'xyz.ply', xyz[0].float().detach().permute(1,2,0).reshape(-1,3).cpu().numpy(),) # check result first, before fps sampling
+ # pcu.save_mesh_v(f'fps_xyz.ply', fps_xyz[0].float().detach().reshape(-1,3).cpu().numpy(),) # check result first, before fps sampling
+ # pcu.save_mesh_v(f'query_pcd_xyz.ply', query_pcd_xyz[0].float().detach().reshape(-1,3).cpu().numpy(),)
+ # pcu.save_mesh_v(f'pcd_xyz.ply', pcd[0].float().detach().reshape(-1,3).cpu().numpy(),)
+ # xyz = self.spatial_token_reshape(xyz)
+ # pcu.save_mesh_v(f'xyz_new.ply', xyz[0].float().detach().reshape(-1,3).cpu().numpy(),)
+
+ return query_pcd_h, query_pcd_xyz
+
+ def forward(self, x, pcd, **kwargs):
+
+ # def forward(self, x, num_frames=None):
+ assert x.shape[1] == 15 # rgb(3),normal(3),plucker_ray(6),xyz(3)
+ xyz = x[:, -3:, ...] # for fps downsampling
+
+ # 0. retrieve VAE tokens
+ h = super().forward(
+ x, num_frames=self.num_frames
+ ) # ! support data augmentation, different FPS different latent corresponding to the same instance?
+
+ # st()
+ # pcu.save_mesh_v(f'{Path(logger.get_dir())}/anchor_all.ply',pcd[0].float().detach().cpu().numpy())
+
+ # ! add 3D PE.
+ # 1. unproj 2D tokens to 3D
+ token_xyz = xyz[..., 4::8, 4::8]
+
+ if self.pixel_aligned_query:
+
+ # h = self.spatial_token_reshape(h) # V frames merge to a single latent here.
+ # h = h + self.xyz_pos_embed(token_xyz) # directly add PE to h here.
+
+ # # ! PE over surface fps-pcd
+ # pcd_h = self.xyz_pos_embed(pcd) # directly add PE to h here.
+
+ # 2. fps sampling surface as pcd-structured latent.
+ h, query_pcd_xyz = self._process_token_xyz(
+ pcd, token_xyz, h, c=kwargs.get('c'),
+ x=x) # aggregate with pixel-aligned operation.
+
+ elif self.pc2: # rasterize the point cloud to multi-view feature maps
+ # https://github.com/lukemelas/projection-conditioned-point-cloud-diffusion/blob/64fd55a0d00b52735cf02e11c5112374c7104ece/experiments/model/projection_model.py#L128
+
+ # ! prepare the features before projection
+ token_xyz = self.spatial_token_reshape(token_xyz)
+ h = self.spatial_token_reshape(
+ h) # V frames merge to a single latent here.
+ # directly add PE to h here.
+ h = h + self.xyz_pos_embed(token_xyz) # h: B L C
+
+ # ! prepare pytorch3d camera
+ c = kwargs['c'] # gs_format dict
+ focal_length = c['orig_pose'][..., 16:17] # B V 1
+ img_h, img_w = x.shape[-2:]
+ R, T = c['R'], c['T'] # B V 3 3, B V 3
+
+ # ! bs=1 test. will merge B, V later for parallel compute.
+ V = focal_length.shape[1]
+ principal_point = torch.zeros(V, 2)
+ img_size = torch.Tensor([img_h, img_w]).unsqueeze(0).repeat_interleave(V, 0).to(focal_length)
+ camera = PerspectiveCameras(focal_length=focal_length[0],principal_point=principal_point, R=R[0], T=T[0], image_size=img_size)
+
+ # camera = PerspectiveCameras(focal_length=focal_length, R=R, T=T, image_size=(img_h, img_w))
+ # !Create rasterizer
+ rasterizer = PointsRasterizer(cameras=camera.to(pcd.device), raster_settings=self.raster_settings)
+
+ fragments = rasterizer(Pointclouds(pcd[0:1].repeat_interleave(V, 0))) # (B, H, W, R)
+ fragments_idx: Tensor = fragments.idx.long()
+ visible_pixels = (fragments_idx > -1) # (B, H, W, R)
+
+ view_idx = 0 # Index of the viewpoint
+ # (Pdb) fragments.zbuf.shape
+ # torch.Size([8, 512, 512, 1])
+ # depth_image = fragments.zbuf[0, ..., 0].cpu().numpy() # Take the nearest point's depth
+ # depth_image = (depth_image - depth_image.min()) / (depth_image.max()-depth_image.min())
+ # imageio.imwrite('tmp/depth.jpg', (depth_image*255.0).astype(np.uint8))
+ # st()
+
+ points_to_visible_pixels = fragments_idx[visible_pixels]
+ # ! visualize the results
+
+ # for debug
+ normal = x[:, 3:6, ...]
+ normal_map = (normal * 127.5 + 127.5).float().to(
+ torch.uint8) # BV 3 H W
+
+ st()
+ pass
+
+ else:
+ token_xyz = self.spatial_token_reshape(token_xyz)
+ h = self.spatial_token_reshape(
+ h) # V frames merge to a single latent here.
+ h = h + self.xyz_pos_embed(token_xyz) # directly add PE to h here.
+
+ # ! PE over surface fps-pcd
+ pcd_h = self.xyz_pos_embed(pcd) # directly add PE to h here.
+
+ # 2. fps sampling surface as pcd-structured latent.
+ query_pcd_h, query_pcd_xyz = self._process_token_xyz(pcd, pcd_h)
+
+ # 2.5 Cross attention to aggregate from all tokens.
+ if self.ca_no_pcd:
+ h = self.agg_ca(query_pcd_h, h)
+ else:
+ h = self.agg_ca(
+ query_pcd_h, torch.cat([h, pcd_h], dim=1)
+ ) # cross attend to aggregate info from both vae-h and pcd-h
+
+ # 3. add vit TX (5 layers, concat xyz-PE)
+ # h = h + self.xyz_pos_embed(fps_xyz) # TODO, add PE of query pts. directly add to h here.
+ h = self.srt.transformer(h) # B L C
+
+ h = self.Mlp_out(h) # equivalent to conv_out, 256 -> 8 in sd-VAE
+ # h = einops.rearrange(h, 'B L C -> B C L') # for VAE compat
+
+ return {
+ 'h': h,
+ 'query_pcd_xyz': query_pcd_xyz
+ } # h_0, point cloud-structured latent space. For VAE later.
+
+
+class HybridEncoderPCDStructuredLatentUniformFPS(
+ HybridEncoderPCDStructuredLatent):
+
+ def __init__(self, num_frames, latent_num=768, **kwargs):
+ super().__init__(num_frames, latent_num, **kwargs)
+ self.ca_no_pcd = True # check speed up ratio
+
+ def _process_token_xyz(self, pcd, pcd_h):
+ # ! 16x uniform downsample before FPS.
+ rand_start_pt = random.randint(0, 16)
+ # rand_start_pt = 0
+ query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # pcd[:, rand_start_pt::16], K=self.latent_num, random_start_point=False) # B self.latent_num
+ pcd[:, rand_start_pt::16],
+ K=self.latent_num,
+ random_start_point=True) # B self.latent_num
+ query_pcd_h = masked_gather(pcd_h[:, rand_start_pt::16],
+ fps_idx) # torch.gather with dim expansion
+ # st()
+
+ # ! fps very slow on high-res pcd
+ # query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # pcd, K=self.latent_num, random_start_point=True) # B self.latent_num
+ # query_pcd_h = masked_gather(pcd_h, fps_idx) # torch.gather with dim expansion
+
+ # pcu.save_mesh_v(f'xyz.ply', xyz[0].float().detach().permute(1,2,0).reshape(-1,3).cpu().numpy(),) # check result first, before fps sampling
+ # pcu.save_mesh_v(f'fps_xyz.ply', fps_xyz[0].float().detach().reshape(-1,3).cpu().numpy(),) # check result first, before fps sampling
+ # pcu.save_mesh_v(f'query_pcd_xyz.ply', query_pcd_xyz[0].float().detach().reshape(-1,3).cpu().numpy(),)
+ # pcu.save_mesh_v(f'pcd_xyz.ply', pcd[0].float().detach().reshape(-1,3).cpu().numpy(),)
+ # xyz = self.spatial_token_reshape(xyz)
+ # pcu.save_mesh_v(f'xyz_new.ply', xyz[0].float().detach().reshape(-1,3).cpu().numpy(),)
+
+ return query_pcd_h, query_pcd_xyz
+
+
+class HybridEncoderPCDStructuredLatentSNoPCD(HybridEncoderPCDStructuredLatent):
+
+ def __init__(self, num_frames, latent_num=768, **kwargs):
+ super().__init__(num_frames, latent_num, **kwargs)
+ self.ca_no_pcd = True
+
+class HybridEncoderPCDStructuredLatentSNoPCD_PC2(HybridEncoderPCDStructuredLatentSNoPCD):
+
+ def __init__(self, num_frames, latent_num=768, **kwargs):
+ super().__init__(num_frames, latent_num, **kwargs)
+ self.pc2 = True
+
+
+class HybridEncoderPCDStructuredLatentSNoPCD_PixelAlignedQuery(
+ HybridEncoderPCDStructuredLatent):
+
+ def __init__(self, num_frames, latent_num=768, **kwargs):
+ super().__init__(num_frames, latent_num, **kwargs)
+ self.ca_no_pcd = True
+ self.pixel_aligned_query = True
+ self.F = 4 # pixel-aligned query from nearest F views
+
+ del self.agg_ca # for average pooling now.
+
+ def _pcd_to_homo(self, pcd):
+ return torch.cat([pcd, torch.ones_like(pcd[..., 0:1])], -1)
+
+ # ! FPS sampling
+ def _process_token_xyz(self, pcd, token_xyz, h, c, x=None):
+ V = c['cam_pos'].shape[1]
+
+ # (Pdb) p c.keys()
+ # dict_keys(['source_cv2wT_quat', 'cam_view', 'cam_view_proj', 'cam_pos', 'tanfov', 'orig_pose', 'orig_c2w', 'orig_w2c'])
+ # (Pdb) p c['cam_view'].shape
+ # torch.Size([8, 9, 4, 4])
+ # (Pdb) p c['cam_pos'].shape
+ # torch.Size([8, 9, 3])
+
+ # ! 16x uniform downsample before FPS.
+ # rand_start_pt = random.randint(0,16)
+ # query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ # pcd[:, rand_start_pt::16], K=self.latent_num, random_start_point=True) # B self.latent_num
+ # query_pcd_h = masked_gather(pcd_h[:, rand_start_pt::16], fps_idx) # torch.gather with dim expansion
+
+ # ! fps very slow on high-res pcd, but better.
+ # '''
+ query_pcd_xyz, fps_idx = pytorch3d.ops.sample_farthest_points(
+ pcd, K=self.latent_num, random_start_point=True) # B self.latent_num
+ # query_pcd_h = masked_gather(pcd_h, fps_idx) # torch.gather with dim expansion
+
+ # '''
+ # ! use unprojected xyz for pixel-aligned projection check
+
+ # query_pcd_xyz = self.spatial_token_reshape(token_xyz)
+ B, N = query_pcd_xyz.shape[:2]
+
+ normal = x[:, 3:6, ...]
+ normal_map = (normal * 127.5 + 127.5).float().to(
+ torch.uint8) # BV 3 H W
+
+ normal_map = einops.rearrange(normal_map,
+ '(B V) C H W -> B V C H W',
+ B=B,
+ V=V).detach().cpu() # V C H W
+ img_size = normal_map.shape[-1]
+
+ # ! ====== single-view debug here
+ for b in range(c['orig_w2c'].shape[0]):
+ for V in range(c['orig_w2c'].shape[1]):
+ selected_normal = normal_map[b, V]
+ proj_point = c['orig_w2c'][b, V] @ self._pcd_to_homo(query_pcd_xyz[b]).permute(1, 0)
+ proj_point[:2, ...] /= proj_point[2, ...]
+ proj_point[2, ...] = 1 # homo
+
+
+ intrin = c['orig_intrin'][b, V]
+ proj_point = intrin @ proj_point[:3]
+ proj_point = proj_point.permute(1,0)[..., :2] # 768 4
+
+ # st()
+
+ # proj_point = c['cam_view_proj'][b, V] @ self._pcd_to_homo(query_pcd_xyz[b]).permute(1, 0)
+
+ # plot proj_point and save
+ for uv_idx in range(proj_point.shape[0]):
+ # uv = proj_point[uv_idx] * 127.5 + 127.5
+ # uv = proj_point[uv_idx] * 127.5 + 127.5
+ uv = proj_point[uv_idx] * img_size
+ x, y = int(uv[0].clip(0, img_size)), int(uv[1].clip(0, img_size))
+ selected_normal[:, max(y - 1, 0):min(y + 1, img_size),
+ max(x - 1, 0):min(x + 1, img_size)] = torch.Tensor([
+ 255, 0, 0
+ ]).reshape(3, 1, 1).to(selected_normal) # set to red
+
+ torchvision.utils.save_image(selected_normal.float(),
+ f'tmp/pifu_normal_{b}_{V}.jpg',
+ normalize=True,
+ value_range=(0, 255))
+
+
+ st()
+ pass
+
+ st()
+ # ! ====== single-view debug done
+
+
+ # ! project pcd to each views
+ batched_query_pcd = einops.repeat(self._pcd_to_homo(query_pcd_xyz),
+ 'B N C -> (B V N) C 1',
+ V=V)
+ batched_cam_view_proj = einops.repeat(c['cam_view_proj'],
+ 'B V H W -> (B V N) H W',
+ N=N)
+
+ batched_proj_uv = einops.rearrange(
+ (batched_cam_view_proj @ batched_query_pcd),
+ '(B V N) L 1 -> (B V) L N',
+ B=B,
+ V=V,
+ N=N) # BV 4 N
+ batched_proj_uv = batched_proj_uv[..., :2, :] # BV N 2
+
+ # draw projected UV coordinate on 2d normal map
+ # idx_to_vis = 15 * 32 + 16 # middle of the img
+ # idx_to_vis = 16 * 6 + 15 * 32 + 16 # middle of the img
+ idx_to_vis = 0 # use fps points here
+ # st()
+
+ selected_proj_uv = einops.rearrange(batched_proj_uv,
+ '(B V) C N -> B V C N',
+ B=B,
+ V=V,
+ N=N)[0, ...,
+ idx_to_vis] # V 2 N -> V 2
+ # selected_normal = einops.rearrange(normal_map,
+ # '(B V) C H W -> B V C H W',
+ # B=B,
+ # V=V)[0].detach().cpu() # V C H W
+
+ for uv_idx in range(selected_proj_uv.shape[0]):
+ uv = selected_proj_uv[uv_idx] * 127.5 + 127.5
+ x, y = int(uv[0].clip(0, 255)), int(uv[1].clip(0, 255))
+ selected_normal[uv_idx, :,
+ max(y - 5, 0):min(y + 5, 255),
+ max(x - 5, 0):min(x + 5, 255)] = torch.Tensor([
+ 255, 0, 0
+ ]).reshape(3, 1,
+ 1).to(selected_normal) # set to red
+ # selected_normal[uv_idx, :, max(y-5, 0):min(y+5, 255), max(x-5,0):min(x+5,255)] = torch.Tensor([255,0,0]).to(selected_normal) # set to red
+ # st()
+ torchvision.utils.save_image(selected_normal.float(),
+ 'pifu_normal.jpg',
+ normalize=True,
+ value_range=(0, 255))
+ st()
+ pass
+
+ # ! grid sample
+ query_pcd_h = index(
+ h, batched_proj_uv) # h: (B V) C H W, uv: (B V) N 2 -> BV 256 768
+
+ query_pcd_h_to_gather = einops.rearrange(query_pcd_h,
+ '(B V) C N -> B N V C',
+ B=B,
+ V=V,
+ N=N)
+
+ # ! find nearest F views
+ _, knn_idx, _ = pytorch3d.ops.knn_points(
+ query_pcd_xyz, c['cam_pos'], K=self.F,
+ return_nn=False) # knn_idx: B N F
+ knn_idx_expanded = knn_idx[..., None].expand(
+ -1, -1, -1, query_pcd_h_to_gather.shape[-1]) # B N F -> B N F C
+ knn_pcd_h = torch.gather(
+ query_pcd_h_to_gather, dim=2,
+ index=knn_idx_expanded) # torch.Size([8, 768, 4, 256])
+
+ # average pooling knn feature.
+ query_pcd_h = knn_pcd_h.mean(dim=2)
+
+ # add PE
+ pcd_h = self.xyz_pos_embed(query_pcd_xyz) # pcd_h as PE feature.
+ query_pcd_h = query_pcd_h + pcd_h
+
+ # TODO: QKV aggregation with pcd_h as q, query_pcd_h as kv. Requires gather?
+ '''not used; binary mask for aggregation.
+
+ # * mask idx not used anymore. torch.gather() instead, more flexible.
+ # knn_idx_mask = torch.zeros((B,N,V), device=knn_idx.device)
+ # knn_idx_mask.scatter_(dim=2, index=knn_idx, src=torch.ones_like(knn_idx_mask)) # ! B N V
+
+ # try gather
+ # gather_idx = einops.rearrange(knn_idx_mask, 'B N V -> B N V 1').bool()
+
+ # query_pcd_h = einops.rearrange(query_pcd_h, "(B V) C N -> B N V C", B=pcd_h.shape[0], N=self.latent_num, V=V) # torch.Size([8, 768, 4, 256])
+ # ! apply KNN mask and average the feature.
+ # query_pcd_h = einops.reduce(query_pcd_h * knn_idx_mask.unsqueeze(-1), 'B N V C -> B N C', 'sum') / self.F # B 768 256. average pooling aggregated feature, like in pifu.
+ '''
+ '''
+ # pixel-aligned projection, not efficient enough.
+ knn_cam_view_proj = pytorch3d.ops.knn_gather(einops.rearrange(c['cam_view_proj'], 'B V H W-> B V (H W)'), knn_idx) # get corresponding cam_view_projection matrix (P matrix)
+ knn_cam_view_proj = einops.rearrange(knn_cam_view_proj, 'B N F (H W) -> (B N F) H W', H=4, W=4) # for matmul. H=W=4 here, P matrix.
+
+ batched_query_pcd = einops.repeat(self._pcd_to_homo(query_pcd_xyz), 'B N C -> (B N F) C 1', F=self.F)
+ xyz = knn_cam_view_proj @ batched_query_pcd # BNF 4 1
+
+ # st()
+ knn_spatial_feat = pytorch3d.ops.knn_gather(einops.rearrange(h, '(B V) C H W -> B V (C H W)', V=self.num_frames), knn_idx) # get corresponding feat for grid_sample
+ knn_spatial_feat = einops.rearrange(knn_spatial_feat, 'B N F (C H W) -> (B N F) C H W', C=h.shape[-3], H=h.shape[-2], W=h.shape[-1])
+ '''
+
+ # grid_sample
+ # https://github.com/shunsukesaito/PIFu/blob/f0a9c99ef887e1eb360e865a87aa5f166231980e/lib/geometry.py#L15
+
+ # average pooling multi-view extracted information
+
+ # return query_pcd_h, query_pcd_xyz
+ return query_pcd_h, query_pcd_xyz
diff --git a/nsr/srt/layers.py b/nsr/srt/layers.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b26be0995f50b79b9e62057a4e57f9026ccec6b
--- /dev/null
+++ b/nsr/srt/layers.py
@@ -0,0 +1,190 @@
+import torch
+import torch.nn as nn
+import numpy as np
+
+import math
+from einops import rearrange
+from vit.vision_transformer import MemEffAttention, Attention
+# from xformers.triton import FusedLayerNorm as LayerNorm
+from torch.nn import LayerNorm
+from xformers.components.feedforward import fused_mlp
+# from xformers.components.feedforward import mlp
+from xformers.components.activations import build_activation, Activation
+
+
+class PositionalEncoding(nn.Module):
+
+ def __init__(self, num_octaves=8, start_octave=0):
+ super().__init__()
+ self.num_octaves = num_octaves
+ self.start_octave = start_octave
+
+ def forward(self, coords, rays=None):
+ embed_fns = []
+ batch_size, num_points, dim = coords.shape
+
+ octaves = torch.arange(self.start_octave,
+ self.start_octave + self.num_octaves)
+ octaves = octaves.float().to(coords)
+ multipliers = 2**octaves * math.pi
+ coords = coords.unsqueeze(-1)
+ while len(multipliers.shape) < len(coords.shape):
+ multipliers = multipliers.unsqueeze(0)
+
+ scaled_coords = coords * multipliers
+
+ sines = torch.sin(scaled_coords).reshape(batch_size, num_points,
+ dim * self.num_octaves)
+ cosines = torch.cos(scaled_coords).reshape(batch_size, num_points,
+ dim * self.num_octaves)
+
+ result = torch.cat((sines, cosines), -1)
+ return result
+
+
+class RayEncoder(nn.Module):
+
+ def __init__(self,
+ pos_octaves=8,
+ pos_start_octave=0,
+ ray_octaves=4,
+ ray_start_octave=0):
+ super().__init__()
+ self.pos_encoding = PositionalEncoding(num_octaves=pos_octaves,
+ start_octave=pos_start_octave)
+ self.ray_encoding = PositionalEncoding(num_octaves=ray_octaves,
+ start_octave=ray_start_octave)
+
+ def forward(self, pos, rays):
+ if len(rays.shape) == 4:
+ batchsize, height, width, dims = rays.shape
+ pos_enc = self.pos_encoding(pos.unsqueeze(1))
+ pos_enc = pos_enc.view(batchsize, pos_enc.shape[-1], 1, 1)
+ pos_enc = pos_enc.repeat(1, 1, height, width)
+ rays = rays.flatten(1, 2)
+
+ ray_enc = self.ray_encoding(rays)
+ ray_enc = ray_enc.view(batchsize, height, width, ray_enc.shape[-1])
+ ray_enc = ray_enc.permute((0, 3, 1, 2))
+ x = torch.cat((pos_enc, ray_enc), 1)
+ else:
+ pos_enc = self.pos_encoding(pos)
+ ray_enc = self.ray_encoding(rays)
+ x = torch.cat((pos_enc, ray_enc), -1)
+
+ return x
+
+
+# Transformer implementation based on ViT
+# https://github.com/lucidrains/vit-pytorch/blob/main/vit_pytorch/vit.py
+
+
+class PreNorm(nn.Module):
+
+ def __init__(self, dim, fn):
+ super().__init__()
+ self.norm = LayerNorm(dim)
+ self.fn = fn
+
+ def forward(self, x, **kwargs):
+ return self.fn(self.norm(x), **kwargs)
+
+
+class FeedForward(nn.Module):
+
+ def __init__(self, dim, hidden_dim, dropout=0.):
+ super().__init__()
+ self.net = nn.Sequential(nn.Linear(dim, hidden_dim), nn.GELU(),
+ nn.Dropout(dropout),
+ nn.Linear(hidden_dim,
+ dim), nn.Dropout(dropout))
+
+ def forward(self, x):
+ return self.net(x)
+
+
+# class Attention(nn.Module):
+# def __init__(self, dim, heads=8, dim_head=64, dropout=0., selfatt=True, kv_dim=None):
+# super().__init__()
+# inner_dim = dim_head * heads
+# project_out = not (heads == 1 and dim_head == dim)
+
+# self.heads = heads
+# self.scale = dim_head ** -0.5
+
+# self.attend = nn.Softmax(dim=-1)
+# if selfatt:
+# self.to_qkv = nn.Linear(dim, inner_dim * 3, bias=False)
+# else:
+# self.to_q = nn.Linear(dim, inner_dim, bias=False)
+# self.to_kv = nn.Linear(kv_dim, inner_dim * 2, bias=False)
+
+# self.to_out = nn.Sequential(
+# nn.Linear(inner_dim, dim),
+# nn.Dropout(dropout)
+# ) if project_out else nn.Identity()
+
+# def forward(self, x, z=None):
+# if z is None:
+# qkv = self.to_qkv(x).chunk(3, dim=-1)
+# else:
+# q = self.to_q(x)
+# k, v = self.to_kv(z).chunk(2, dim=-1)
+# qkv = (q, k, v)
+
+# q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = self.heads), qkv)
+
+# dots = torch.matmul(q, k.transpose(-1, -2)) * self.scale
+
+# attn = self.attend(dots)
+
+# out = torch.matmul(attn, v)
+# out = rearrange(out, 'b h n d -> b n (h d)')
+# return self.to_out(out)
+
+
+class Transformer(nn.Module):
+
+ def __init__(self,
+ dim,
+ depth,
+ heads,
+ mlp_dim,
+ dropout=0.,
+ selfatt=True,
+ kv_dim=None,
+ no_flash_op=False,):
+ super().__init__()
+
+ # if no_flash_op:
+ # attn_cls = Attention # raw torch attention
+ # else:
+ attn_cls = MemEffAttention
+
+ self.layers = nn.ModuleList([])
+ for _ in range(depth):
+ self.layers.append(
+ nn.ModuleList([
+ PreNorm(dim,
+ attn_cls(
+ dim,
+ num_heads=heads,
+ qkv_bias=True,
+ qk_norm=True, # as in vit-22B
+ no_flash_op=no_flash_op,
+ )),
+ PreNorm(
+ dim,
+ fused_mlp.FusedMLP(dim,
+ # mlp.MLP(dim,
+ hidden_layer_multiplier=mlp_dim //
+ dim,
+ dropout=dropout,
+ activation=Activation.GeLU))
+ ]))
+
+ def forward(self, x):
+ for attn, ff in self.layers: # type: ignore
+ x = attn(x) + x
+ x = ff(x) + x
+ return x
diff --git a/nsr/superresolution.py b/nsr/superresolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca286d4ba3b08f03f875b97a35ca91b98baa49b4
--- /dev/null
+++ b/nsr/superresolution.py
@@ -0,0 +1,446 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Superresolution network architectures from the paper
+"Efficient Geometry-aware 3D Generative Adversarial Networks"."""
+
+import torch
+from nsr.networks_stylegan2 import Conv2dLayer, SynthesisLayer, ToRGBLayer
+from torch_utils.ops import upfirdn2d
+from torch_utils import persistence
+from torch_utils import misc
+
+from nsr.networks_stylegan2 import SynthesisBlock
+import numpy as np
+from pdb import set_trace as st
+
+
+@persistence.persistent_class
+class SynthesisBlockNoUp(torch.nn.Module):
+ def __init__(
+ self,
+ in_channels, # Number of input channels, 0 = first block.
+ out_channels, # Number of output channels.
+ w_dim, # Intermediate latent (W) dimensionality.
+ resolution, # Resolution of this block.
+ img_channels, # Number of output color channels.
+ is_last, # Is this the last block?
+ architecture='skip', # Architecture: 'orig', 'skip', 'resnet'.
+ resample_filter=[
+ 1, 3, 3, 1
+ ], # Low-pass filter to apply when resampling activations.
+ conv_clamp=256, # Clamp the output of convolution layers to +-X, None = disable clamping.
+ use_fp16=False, # Use FP16 for this block?
+ fp16_channels_last=False, # Use channels-last memory format with FP16?
+ fused_modconv_default=True, # Default value of fused_modconv. 'inference_only' = True for inference, False for training.
+ **layer_kwargs, # Arguments for SynthesisLayer.
+ ):
+ assert architecture in ['orig', 'skip', 'resnet']
+ super().__init__()
+ self.in_channels = in_channels
+ self.w_dim = w_dim
+ self.resolution = resolution
+ self.img_channels = img_channels
+ self.is_last = is_last
+ self.architecture = architecture
+ self.use_fp16 = use_fp16
+ self.channels_last = (use_fp16 and fp16_channels_last)
+ self.fused_modconv_default = fused_modconv_default
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter(resample_filter))
+ self.num_conv = 0
+ self.num_torgb = 0
+
+ if in_channels == 0:
+ self.const = torch.nn.Parameter(
+ torch.randn([out_channels, resolution, resolution]))
+
+ if in_channels != 0:
+ self.conv0 = SynthesisLayer(in_channels,
+ out_channels,
+ w_dim=w_dim,
+ resolution=resolution,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last,
+ **layer_kwargs)
+ self.num_conv += 1
+
+ self.conv1 = SynthesisLayer(out_channels,
+ out_channels,
+ w_dim=w_dim,
+ resolution=resolution,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last,
+ **layer_kwargs)
+ self.num_conv += 1
+
+ if is_last or architecture == 'skip':
+ self.torgb = ToRGBLayer(out_channels,
+ img_channels,
+ w_dim=w_dim,
+ conv_clamp=conv_clamp,
+ channels_last=self.channels_last)
+ self.num_torgb += 1
+
+ if in_channels != 0 and architecture == 'resnet':
+ self.skip = Conv2dLayer(in_channels,
+ out_channels,
+ kernel_size=1,
+ bias=False,
+ up=2,
+ resample_filter=resample_filter,
+ channels_last=self.channels_last)
+
+ def forward(self,
+ x,
+ img,
+ ws,
+ force_fp32=False,
+ fused_modconv=None,
+ update_emas=False,
+ **layer_kwargs):
+ _ = update_emas # unused
+ misc.assert_shape(ws,
+ [None, self.num_conv + self.num_torgb, self.w_dim])
+ w_iter = iter(ws.unbind(dim=1))
+ if ws.device.type != 'cuda':
+ force_fp32 = True
+ dtype = torch.float16 if self.use_fp16 and not force_fp32 else torch.float32
+ memory_format = torch.channels_last if self.channels_last and not force_fp32 else torch.contiguous_format
+ if fused_modconv is None:
+ fused_modconv = self.fused_modconv_default
+ if fused_modconv == 'inference_only':
+ fused_modconv = (not self.training)
+
+ # Input.
+ if self.in_channels == 0:
+ x = self.const.to(dtype=dtype, memory_format=memory_format)
+ x = x.unsqueeze(0).repeat([ws.shape[0], 1, 1, 1])
+ else:
+ misc.assert_shape(
+ x, [None, self.in_channels, self.resolution, self.resolution])
+ x = x.to(dtype=dtype, memory_format=memory_format)
+
+ # Main layers.
+ if self.in_channels == 0:
+ x = self.conv1(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+ elif self.architecture == 'resnet':
+ y = self.skip(x, gain=np.sqrt(0.5))
+ x = self.conv0(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+ x = self.conv1(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ gain=np.sqrt(0.5),
+ **layer_kwargs)
+ x = y.add_(x)
+ else:
+ x = self.conv0(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+ x = self.conv1(x,
+ next(w_iter),
+ fused_modconv=fused_modconv,
+ **layer_kwargs)
+
+ # ToRGB.
+ # if img is not None:
+ # misc.assert_shape(img, [None, self.img_channels, self.resolution // 2, self.resolution // 2])
+ # img = upfirdn2d.upsample2d(img, self.resample_filter)
+ if self.is_last or self.architecture == 'skip':
+ y = self.torgb(x, next(w_iter), fused_modconv=fused_modconv)
+ y = y.to(dtype=torch.float32,
+ memory_format=torch.contiguous_format)
+ img = img.add_(y) if img is not None else y
+
+ # assert x.dtype == dtype # support AMP in this library
+ assert img is None or img.dtype == torch.float32
+ return x, img
+
+ def extra_repr(self):
+ return f'resolution={self.resolution:d}, architecture={self.architecture:s}'
+
+
+#----------------------------------------------------------------------------
+
+
+# for 512x512 generation
+@persistence.persistent_class
+class SuperresolutionHybrid8X(torch.nn.Module):
+ def __init__(
+ self,
+ channels,
+ img_resolution,
+ sr_num_fp16_res,
+ sr_antialias,
+ num_fp16_res=4,
+ conv_clamp=None,
+ channel_base=None,
+ channel_max=None, # IGNORE
+ **block_kwargs):
+ super().__init__()
+ # assert img_resolution == 512
+
+ use_fp16 = sr_num_fp16_res > 0
+ self.input_resolution = 128
+ self.sr_antialias = sr_antialias
+ self.block0 = SynthesisBlock(channels,
+ 128,
+ w_dim=512,
+ resolution=256,
+ img_channels=3,
+ is_last=False,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.block1 = SynthesisBlock(128,
+ 64,
+ w_dim=512,
+ resolution=512,
+ img_channels=3,
+ is_last=True,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter([1, 3, 3, 1]))
+
+ def forward(self, rgb, x, ws, **block_kwargs):
+ ws = ws[:, -1:, :].repeat(1, 3, 1)
+
+ if x.shape[-1] != self.input_resolution:
+ x = torch.nn.functional.interpolate(x,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+ rgb = torch.nn.functional.interpolate(rgb,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+
+ x, rgb = self.block0(x, rgb, ws, **block_kwargs) # block_kwargs: {'noise_mode': 'none'}
+ x, rgb = self.block1(x, rgb, ws, **block_kwargs)
+ return rgb
+
+
+#----------------------------------------------------------------------------
+
+
+# for 256x256 generation
+@persistence.persistent_class
+class SuperresolutionHybrid4X(torch.nn.Module):
+ def __init__(
+ self,
+ channels,
+ img_resolution,
+ sr_num_fp16_res,
+ sr_antialias,
+ num_fp16_res=4,
+ conv_clamp=None,
+ channel_base=None,
+ channel_max=None, # IGNORE
+ **block_kwargs):
+ super().__init__()
+ # assert img_resolution == 256
+ use_fp16 = sr_num_fp16_res > 0
+ self.sr_antialias = sr_antialias
+ self.input_resolution = 128
+ self.block0 = SynthesisBlockNoUp(
+ channels,
+ 128,
+ w_dim=512,
+ resolution=128,
+ img_channels=3,
+ is_last=False,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.block1 = SynthesisBlock(128,
+ 64,
+ w_dim=512,
+ resolution=256,
+ img_channels=3,
+ is_last=True,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter([1, 3, 3, 1]))
+
+ def forward(self, rgb, x, ws, **block_kwargs):
+ ws = ws[:, -1:, :].repeat(1, 3, 1)
+
+ if x.shape[-1] < self.input_resolution:
+ x = torch.nn.functional.interpolate(x,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+ rgb = torch.nn.functional.interpolate(rgb,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+
+ x, rgb = self.block0(x, rgb, ws, **block_kwargs)
+ x, rgb = self.block1(x, rgb, ws, **block_kwargs)
+ return rgb
+
+
+#----------------------------------------------------------------------------
+
+
+# for 128 x 128 generation
+@persistence.persistent_class
+class SuperresolutionHybrid2X(torch.nn.Module):
+ def __init__(
+ self,
+ channels,
+ img_resolution,
+ sr_num_fp16_res,
+ sr_antialias,
+ num_fp16_res=4,
+ conv_clamp=None,
+ channel_base=None,
+ channel_max=None, # IGNORE
+ **block_kwargs):
+ super().__init__()
+ assert img_resolution == 128
+
+ use_fp16 = sr_num_fp16_res > 0
+ self.input_resolution = 64
+ # self.input_resolution = 128
+
+ self.sr_antialias = sr_antialias
+ self.block0 = SynthesisBlockNoUp(
+ channels,
+ 128,
+ w_dim=512,
+ resolution=64,
+ # resolution=128,
+ img_channels=3,
+ is_last=False,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.block1 = SynthesisBlock(128,
+ 64,
+ w_dim=512,
+ resolution=128,
+ # resolution=256,
+ img_channels=3,
+ is_last=True,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.register_buffer('resample_filter',
+ upfirdn2d.setup_filter([1, 3, 3, 1]))
+
+ def forward(self, rgb, x, ws, **block_kwargs):
+ ws = ws[:, -1:, :].repeat(1, 3, 1)
+
+ if x.shape[-1] != self.input_resolution:
+ x = torch.nn.functional.interpolate(x,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+ rgb = torch.nn.functional.interpolate(rgb,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+
+ x, rgb = self.block0(x, rgb, ws, **block_kwargs)
+ x, rgb = self.block1(x, rgb, ws, **block_kwargs)
+ return rgb
+
+
+#----------------------------------------------------------------------------
+
+
+# for 512x512 generation
+@persistence.persistent_class
+class SuperresolutionHybrid8XDC(torch.nn.Module):
+ def __init__(
+ self,
+ channels,
+ img_resolution,
+ sr_num_fp16_res,
+ sr_antialias,
+ num_fp16_res=4,
+ conv_clamp=None,
+ channel_base=None,
+ channel_max=None, # IGNORE
+ **block_kwargs):
+ super().__init__()
+ # assert img_resolution == 512
+
+ use_fp16 = sr_num_fp16_res > 0
+ self.input_resolution = 128
+ self.sr_antialias = sr_antialias
+ self.block0 = SynthesisBlock(channels,
+ 256,
+ w_dim=512,
+ resolution=256,
+ img_channels=3,
+ is_last=False,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+ self.block1 = SynthesisBlock(256,
+ 128,
+ w_dim=512,
+ resolution=512,
+ img_channels=3,
+ is_last=True,
+ use_fp16=use_fp16,
+ conv_clamp=(256 if use_fp16 else None),
+ **block_kwargs)
+
+ def forward(self, rgb, x, ws, base_x=None, **block_kwargs):
+ ws = ws[:, -1:, :].repeat(1, 3, 1) # BS 3 512
+
+ # st()
+ if x.shape[-1] != self.input_resolution: # resize 64 => 128
+ x = torch.nn.functional.interpolate(x,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+ rgb = torch.nn.functional.interpolate(rgb,
+ size=(self.input_resolution,
+ self.input_resolution),
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.sr_antialias)
+
+ x, rgb = self.block0(x, rgb, ws, **block_kwargs)
+ # print(f'device={self.block0.conv1.weight.device}')
+ x, rgb = self.block1(x, rgb, ws, **block_kwargs)
+ # print(f'device={self.block1.conv1.weight.device}')
+ return rgb
+
+
+#----------------------------------------------------------------------------
diff --git a/nsr/train_nv_util.py b/nsr/train_nv_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..b98302889e808e9cbc1fe73c2e9b42ce21841fda
--- /dev/null
+++ b/nsr/train_nv_util.py
@@ -0,0 +1,3049 @@
+import copy
+import cv2
+import einops
+from collections import defaultdict
+import matplotlib.pyplot as plt
+import random
+# import emd
+import pytorch3d.loss
+# import imageio.v3
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+from einops import rearrange
+import webdataset as wds
+
+from nsr.camera_utils import generate_input_camera, uni_mesh_path
+import point_cloud_utils as pcu
+import traceback
+import blobfile as bf
+from datasets.g_buffer_objaverse import focal2fov, fov2focal
+import math
+import imageio
+import numpy as np
+# from sympy import O
+import torch
+from torch.autograd import Function
+import torch.nn.functional as F
+import torch as th
+import open3d as o3d
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard import SummaryWriter
+from tqdm import tqdm
+import pytorch3d.ops
+
+from torch.profiler import profile, record_function, ProfilerActivity
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+from guided_diffusion.train_util import (calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+
+from datasets.g_buffer_objaverse import unity2blender, unity2blender_th, PostProcess
+
+from nsr.volumetric_rendering.ray_sampler import RaySampler
+from utils.mesh_util import post_process_mesh, to_cam_open3d_compat
+from .camera_utils import LookAtPoseSampler, FOV_to_intrinsics
+from nsr.camera_utils import generate_input_camera, uni_mesh_path, sample_uniform_cameras_on_sphere
+
+from utils.gs_utils.graphics_utils import getWorld2View2, getProjectionMatrix, getView2World
+from utils.general_utils import matrix_to_quaternion
+from utils.mesh_util import post_process_mesh, to_cam_open3d_compat, smooth_mesh
+from datasets.g_buffer_objaverse import focal2fov, fov2focal
+
+from .train_util import TrainLoop3DRec
+import kornia
+
+
+@th.autocast(device_type='cuda', dtype=th.float16, enabled=False)
+def psnr(input, target, max_val):
+ return kornia.metrics.psnr(input, target, max_val)
+
+
+def calc_emd(output, gt, eps=0.005, iterations=50):
+ import utils.emd.emd_module as emd
+ emd_loss = emd.emdModule()
+ dist, _ = emd_loss(output, gt, eps, iterations)
+ emd_out = torch.sqrt(dist).mean(1)
+ return emd_out
+
+
+class TrainLoop3DRecNV(TrainLoop3DRec):
+ # supervise the training of novel view
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+ self.rec_cano = True
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # return super().forward_backward(batch, *args, **kwargs)
+
+ self.mp_trainer_rec.zero_grad()
+ batch_size = batch['img_to_encoder'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ # st()
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ # ! concat novel-view? next version. also add self reconstruction, patch-based loss in the next version. verify novel-view prediction first.
+
+ # wrap forward within amp
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ target_nvs = {}
+ target_cano = {}
+
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ pred = self.rec_model(
+ latent=latent,
+ c=micro['nv_c'], # predict novel view here
+ behaviour='triplane_dec')
+
+ for k, v in micro.items():
+ if k[:2] == 'nv':
+ orig_key = k.replace('nv_', '')
+ target_nvs[orig_key] = v
+ target_cano[orig_key] = micro[orig_key]
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, fg_mask = self.loss_class(
+ pred,
+ target_nvs,
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None)
+ log_rec3d_loss_dict(loss_dict)
+
+ if self.rec_cano:
+
+ pred_cano = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ with self.rec_model.no_sync(): # type: ignore
+
+ fg_mask = target_cano['depth_mask'].unsqueeze(
+ 1).repeat_interleave(3, 1).float()
+
+ loss_cano, loss_cano_dict = self.loss_class.calc_2d_rec_loss(
+ pred_cano['image_raw'],
+ target_cano['img'],
+ fg_mask,
+ step=self.step + self.resume_step,
+ test_mode=False,
+ )
+
+ loss = loss + loss_cano
+
+ # remove redundant log
+ log_rec3d_loss_dict({
+ f'cano_{k}': v
+ for k, v in loss_cano_dict.items()
+ # if "loss" in k
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ if self.rec_cano:
+ self.log_img(micro, pred, pred_cano)
+ else:
+ self.log_img(micro, pred, None)
+
+ @th.inference_mode()
+ def log_img(self, micro, pred, pred_cano):
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # infer novel view also
+ # if self.loss_class.opt.symmetry_loss:
+ # pred_nv_img = nvs_pred
+ # else:
+ # ! replace with novel view prediction
+
+ # ! log another novel-view prediction
+ # pred_nv_img = self.rec_model(
+ # img=micro['img_to_encoder'],
+ # c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ fg_mask = pred['image_mask'] * 2 - 1 # 0-1
+ input_fg_mask = pred_cano['image_mask'] * 2 - 1 # 0-1
+ if 'image_depth' in pred:
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_nv_depth = norm_depth(pred_cano['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_nv_depth = th.zeros_like(gt_depth)
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat([self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat([self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat([self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat([self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat([self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat([self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_64(gt_img)
+ gt_depth = self.pool_64(gt_depth)
+
+ pred_vis = th.cat([
+ pred_img,
+ pred_depth.repeat_interleave(3, dim=1),
+ fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ pred_cano['image_raw'],
+ pred_nv_depth.repeat_interleave(3, dim=1),
+ input_fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis = th.cat([pred_vis, pred_vis_nv], dim=-2) # cat in H dim
+
+ gt_vis = th.cat([
+ gt_img,
+ gt_depth.repeat_interleave(3, dim=1),
+ th.zeros_like(gt_img)
+ ],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ if 'conf_sigma' in pred:
+ gt_vis = th.cat([gt_vis, fg_mask], dim=-1) # placeholder
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(vis, nrow=vis.shape[-1] //
+ 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ value_range=(-1, 1),
+ normalize=True)
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ logger.log('log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ # self.writer.add_image(f'images',
+ # vis,
+ # self.step + self.resume_step,
+ # dataformats='HWC')
+
+
+# return pred
+
+
+class TrainLoop3DRecNVPatch(TrainLoop3DRecNV):
+ # add patch rendering
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+ # the rendrer
+ self.eg3d_model = self.rec_model.module.decoder.triplane_decoder # type: ignore
+ # self.rec_cano = False
+ self.rec_cano = True
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # add patch sampling
+
+ self.mp_trainer_rec.zero_grad()
+ batch_size = batch['img_to_encoder'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ # ! sample rendering patch
+ target = {
+ **self.eg3d_model(
+ c=micro['nv_c'], # type: ignore
+ ws=None,
+ planes=None,
+ sample_ray_only=True,
+ fg_bbox=micro['nv_bbox']), # rays o / dir
+ }
+
+ patch_rendering_resolution = self.eg3d_model.rendering_kwargs[
+ 'patch_rendering_resolution'] # type: ignore
+ cropped_target = {
+ k:
+ th.empty_like(v)
+ [..., :patch_rendering_resolution, :patch_rendering_resolution]
+ if k not in [
+ 'ins_idx', 'img_to_encoder', 'img_sr', 'nv_img_to_encoder',
+ 'nv_img_sr', 'c'
+ ] else v
+ for k, v in micro.items()
+ }
+
+ # crop according to uv sampling
+ for j in range(micro['img'].shape[0]):
+ top, left, height, width = target['ray_bboxes'][
+ j] # list of tuple
+ # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ for key in ('img', 'depth_mask', 'depth'): # type: ignore
+ # target[key][i:i+1] = torchvision.transforms.functional.crop(
+ # cropped_target[key][
+ # j:j + 1] = torchvision.transforms.functional.crop(
+ # micro[key][j:j + 1], top, left, height, width)
+
+ cropped_target[f'{key}'][ # ! no nv_ here
+ j:j + 1] = torchvision.transforms.functional.crop(
+ micro[f'nv_{key}'][j:j + 1], top, left, height,
+ width)
+
+ # target.update(cropped_target)
+
+ # wrap forward within amp
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ # target_nvs = {}
+ # target_cano = {}
+
+ latent = self.rec_model(img=micro['img_to_encoder'],
+ behaviour='enc_dec_wo_triplane')
+
+ pred_nv = self.rec_model(
+ latent=latent,
+ c=micro['nv_c'], # predict novel view here
+ behaviour='triplane_dec',
+ ray_origins=target['ray_origins'],
+ ray_directions=target['ray_directions'],
+ )
+
+ # ! directly retrieve from target
+ # for k, v in target.items():
+ # if k[:2] == 'nv':
+ # orig_key = k.replace('nv_', '')
+ # target_nvs[orig_key] = v
+ # target_cano[orig_key] = target[orig_key]
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, _ = self.loss_class(pred_nv,
+ cropped_target,
+ step=self.step +
+ self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None)
+ log_rec3d_loss_dict(loss_dict)
+
+ if self.rec_cano:
+
+ cano_target = {
+ **self.eg3d_model(
+ c=micro['c'], # type: ignore
+ ws=None,
+ planes=None,
+ sample_ray_only=True,
+ fg_bbox=micro['bbox']), # rays o / dir
+ }
+
+ cano_cropped_target = {
+ k: th.empty_like(v)
+ for k, v in cropped_target.items()
+ }
+
+ for j in range(micro['img'].shape[0]):
+ top, left, height, width = cano_target['ray_bboxes'][
+ j] # list of tuple
+ # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ for key in ('img', 'depth_mask',
+ 'depth'): # type: ignore
+ # target[key][i:i+1] = torchvision.transforms.functional.crop(
+ cano_cropped_target[key][
+ j:j +
+ 1] = torchvision.transforms.functional.crop(
+ micro[key][j:j + 1], top, left, height,
+ width)
+
+ # cano_target.update(cano_cropped_target)
+
+ pred_cano = self.rec_model(
+ latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec',
+ ray_origins=cano_target['ray_origins'],
+ ray_directions=cano_target['ray_directions'],
+ )
+
+ with self.rec_model.no_sync(): # type: ignore
+
+ fg_mask = cano_cropped_target['depth_mask'].unsqueeze(
+ 1).repeat_interleave(3, 1).float()
+
+ loss_cano, loss_cano_dict = self.loss_class.calc_2d_rec_loss(
+ pred_cano['image_raw'],
+ cano_cropped_target['img'],
+ fg_mask,
+ step=self.step + self.resume_step,
+ test_mode=False,
+ )
+
+ loss = loss + loss_cano
+
+ # remove redundant log
+ log_rec3d_loss_dict({
+ f'cano_{k}': v
+ for k, v in loss_cano_dict.items()
+ # if "loss" in k
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ self.log_patch_img(cropped_target, pred_nv, pred_cano)
+
+ @th.inference_mode()
+ def log_patch_img(self, micro, pred, pred_cano):
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # infer novel view also
+ # if self.loss_class.opt.symmetry_loss:
+ # pred_nv_img = nvs_pred
+ # else:
+ # ! replace with novel view prediction
+
+ # ! log another novel-view prediction
+ # pred_nv_img = self.rec_model(
+ # img=micro['img_to_encoder'],
+ # c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ fg_mask = pred['image_mask'] * 2 - 1 # 0-1
+ input_fg_mask = pred_cano['image_mask'] * 2 - 1 # 0-1
+ if 'image_depth' in pred:
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_cano_depth = norm_depth(pred_cano['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_cano_depth = th.zeros_like(gt_depth)
+
+ # if 'image_sr' in pred:
+ # if pred['image_sr'].shape[-1] == 512:
+ # pred_img = th.cat([self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat([self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ # elif pred['image_sr'].shape[-1] == 256:
+ # pred_img = th.cat([self.pool_256(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat([self.pool_256(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_256(pred_depth)
+ # gt_depth = self.pool_256(gt_depth)
+
+ # else:
+ # pred_img = th.cat([self.pool_128(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat([self.pool_128(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # gt_depth = self.pool_128(gt_depth)
+ # pred_depth = self.pool_128(pred_depth)
+ # else:
+ # gt_img = self.pool_64(gt_img)
+ # gt_depth = self.pool_64(gt_depth)
+
+ pred_vis = th.cat([
+ pred_img,
+ pred_depth.repeat_interleave(3, dim=1),
+ fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ pred_cano['image_raw'],
+ pred_cano_depth.repeat_interleave(3, dim=1),
+ input_fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis = th.cat([pred_vis, pred_vis_nv], dim=-2) # cat in H dim
+
+ gt_vis = th.cat([
+ gt_img,
+ gt_depth.repeat_interleave(3, dim=1),
+ th.zeros_like(gt_img)
+ ],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ # if 'conf_sigma' in pred:
+ # gt_vis = th.cat([gt_vis, fg_mask], dim=-1) # placeholder
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # st()
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(vis, nrow=vis.shape[-1] //
+ 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ value_range=(-1, 1),
+ normalize=True)
+
+ logger.log('log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ # self.writer.add_image(f'images',
+ # vis,
+ # self.step + self.resume_step,
+ # dataformats='HWC')
+
+
+class TrainLoop3DRecNVPatchSingleForward(TrainLoop3DRecNVPatch):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # add patch sampling
+
+ self.mp_trainer_rec.zero_grad()
+ batch_size = batch['img_to_encoder'].shape[0]
+
+ batch.pop('caption') # not required
+ batch.pop('ins') # not required
+ batch.pop('nv_caption') # not required
+ batch.pop('nv_ins') # not required
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v[i:i + self.microbatch]
+ for k, v in batch.items()
+ }
+
+ # ! sample rendering patch
+ target = {
+ **self.eg3d_model(
+ c=micro['nv_c'], # type: ignore
+ ws=None,
+ planes=None,
+ sample_ray_only=True,
+ fg_bbox=micro['nv_bbox']), # rays o / dir
+ }
+
+ patch_rendering_resolution = self.eg3d_model.rendering_kwargs[
+ 'patch_rendering_resolution'] # type: ignore
+ cropped_target = {
+ k:
+ th.empty_like(v)
+ [..., :patch_rendering_resolution, :patch_rendering_resolution]
+ if k not in [
+ 'ins_idx', 'img_to_encoder', 'img_sr', 'nv_img_to_encoder',
+ 'nv_img_sr', 'c', 'caption', 'nv_caption'
+ ] else v
+ for k, v in micro.items()
+ }
+
+ # crop according to uv sampling
+ for j in range(micro['img'].shape[0]):
+ top, left, height, width = target['ray_bboxes'][
+ j] # list of tuple
+ # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ for key in ('img', 'depth_mask', 'depth'): # type: ignore
+ # target[key][i:i+1] = torchvision.transforms.functional.crop(
+ # cropped_target[key][
+ # j:j + 1] = torchvision.transforms.functional.crop(
+ # micro[key][j:j + 1], top, left, height, width)
+
+ cropped_target[f'{key}'][ # ! no nv_ here
+ j:j + 1] = torchvision.transforms.functional.crop(
+ micro[f'nv_{key}'][j:j + 1], top, left, height,
+ width)
+
+ # ! cano view loss
+ # cano_target = {
+ # **self.eg3d_model(
+ # c=micro['c'], # type: ignore
+ # ws=None,
+ # planes=None,
+ # sample_ray_only=True,
+ # fg_bbox=micro['bbox']), # rays o / dir
+ # }
+
+ # cano_cropped_target = {
+ # k: th.empty_like(v)
+ # for k, v in cropped_target.items()
+ # }
+
+ # for j in range(micro['img'].shape[0]):
+ # top, left, height, width = cano_target['ray_bboxes'][
+ # j] # list of tuple
+ # # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ # for key in ('img', 'depth_mask', 'depth'): # type: ignore
+ # # target[key][i:i+1] = torchvision.transforms.functional.crop(
+ # cano_cropped_target[key][
+ # j:j + 1] = torchvision.transforms.functional.crop(
+ # micro[key][j:j + 1], top, left, height, width)
+
+ # ! vit no amp
+ latent = self.rec_model(img=micro['img_to_encoder'].to(self.dtype),
+ behaviour='enc_dec_wo_triplane')
+
+ # wrap forward within amp
+ with th.autocast(device_type='cuda',
+ # dtype=th.float16,
+ dtype=th.bfloat16, # avoid NAN
+ enabled=self.mp_trainer_rec.use_amp):
+
+ # c = th.cat([micro['nv_c'], micro['c']]), # predict novel view here
+ # c = th.cat([micro['nv_c'].repeat(3, 1), micro['c']]), # predict novel view here
+ instance_mv_num = batch_size // 4 # 4 pairs by default
+ # instance_mv_num = 4
+ # ! roll views for multi-view supervision
+ c = th.cat([
+ micro['nv_c'].roll(instance_mv_num * i, dims=0)
+ for i in range(1, 4)
+ ]
+ # + [micro['c']]
+ ) # predict novel view here
+
+ ray_origins = th.cat(
+ [
+ target['ray_origins'].roll(instance_mv_num * i, dims=0)
+ for i in range(1, 4)
+ ]
+ # + [cano_target['ray_origins'] ]
+ ,
+ 0)
+
+ ray_directions = th.cat([
+ target['ray_directions'].roll(instance_mv_num * i, dims=0)
+ for i in range(1, 4)
+ ]
+ # + [cano_target['ray_directions'] ]
+ )
+
+ pred_nv_cano = self.rec_model(
+ # latent=latent.expand(2,),
+ latent={
+ 'latent_after_vit': # ! triplane for rendering
+ # latent['latent_after_vit'].repeat(2, 1, 1, 1)
+ latent['latent_after_vit'].repeat(3, 1, 1, 1)
+ },
+ c=c,
+ behaviour='triplane_dec',
+ # ray_origins=target['ray_origins'],
+ # ray_directions=target['ray_directions'],
+ ray_origins=ray_origins,
+ ray_directions=ray_directions,
+ )
+
+ pred_nv_cano.update(
+ latent
+ ) # torchvision.utils.save_image(pred_nv_cano['image_raw'], 'pred.png', normalize=True)
+ # gt = {
+ # k: th.cat([v, cano_cropped_target[k]], 0)
+ # for k, v in cropped_target.items()
+ # }
+ gt = {
+ k:
+ th.cat(
+ [
+ v.roll(instance_mv_num * i, dims=0)
+ for i in range(1, 4)
+ ]
+ # + [cano_cropped_target[k] ]
+ ,
+ 0)
+ for k, v in cropped_target.items()
+ } # torchvision.utils.save_image(gt['img'], 'gt.png', normalize=True)
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, _ = self.loss_class(
+ pred_nv_cano,
+ gt, # prepare merged data
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None)
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(loss)
+
+ # for name, p in self.rec_model.named_parameters():
+ # if p.grad is None:
+ # logger.log(f"found rec unused param: {name}")
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ micro_bs = micro['img_to_encoder'].shape[0]
+ self.log_patch_img( # record one cano view and one novel view
+ cropped_target,
+ {
+ k: pred_nv_cano[k][-micro_bs:]
+ for k in ['image_raw', 'image_depth', 'image_mask']
+ },
+ {
+ k: pred_nv_cano[k][:micro_bs]
+ for k in ['image_raw', 'image_depth', 'image_mask']
+ },
+ )
+
+ def eval_loop(self):
+ return super().eval_loop()
+
+ @th.inference_mode()
+ # def eval_loop(self, c_list:list):
+ def eval_novelview_loop_old(self, camera=None):
+ # novel view synthesis given evaluation camera trajectory
+
+ all_loss_dict = []
+ novel_view_micro = {}
+
+ # ! randomly inference an instance
+
+ export_mesh = True
+ if export_mesh:
+ Path(f'{logger.get_dir()}/FID_Cals/').mkdir(parents=True,
+ exist_ok=True)
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+
+ batch = {}
+ # if camera is not None:
+ # # batch['c'] = camera.to(batch['c'].device())
+ # batch['c'] = camera.clone()
+ # else:
+ # batch =
+
+ for eval_idx, render_reference in enumerate(tqdm(self.eval_data)):
+
+ if eval_idx > 500:
+ break
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}_{eval_idx}.mp4',
+ mode='I',
+ fps=25,
+ codec='libx264')
+
+ with open(
+ f'{logger.get_dir()}/triplane_{self.step+self.resume_step}_{eval_idx}_caption.txt',
+ 'w') as f:
+ f.write(render_reference['caption'])
+
+ for key in ['ins', 'bbox', 'caption']:
+ if key in render_reference:
+ render_reference.pop(key)
+
+ real_flag = False
+ mv_flag = False # TODO, use full-instance for evaluation? Calculate the metrics.
+ if render_reference['c'].shape[:2] == (1, 40):
+ real_flag = True
+ # real img monocular reconstruction
+ # compat lst for enumerate
+ render_reference = [{
+ k: v[0][idx:idx + 1]
+ for k, v in render_reference.items()
+ } for idx in range(40)]
+
+ elif render_reference['c'].shape[0] == 8:
+ mv_flag = True
+
+ render_reference = {
+ k: v[:4]
+ for k, v in render_reference.items()
+ }
+
+ # save gt
+ torchvision.utils.save_image(
+ render_reference[0:4]['img'],
+ logger.get_dir() + '/FID_Cals/{}_inp.png'.format(eval_idx),
+ padding=0,
+ normalize=True,
+ value_range=(-1, 1),
+ )
+ # torchvision.utils.save_image(render_reference[4:8]['img'],
+ # logger.get_dir() + '/FID_Cals/{}_inp2.png'.format(eval_idx),
+ # padding=0,
+ # normalize=True,
+ # value_range=(-1,1),
+ # )
+
+ else:
+ # compat lst for enumerate
+ st()
+ render_reference = [{
+ k: v[idx:idx + 1]
+ for k, v in render_reference.items()
+ } for idx in range(40)]
+
+ # ! single-view version
+ render_reference[0]['img_to_encoder'] = render_reference[14][
+ 'img_to_encoder'] # encode side view
+ render_reference[0]['img'] = render_reference[14][
+ 'img'] # encode side view
+
+ # save gt
+ torchvision.utils.save_image(
+ render_reference[0]['img'],
+ logger.get_dir() + '/FID_Cals/{}_gt.png'.format(eval_idx),
+ padding=0,
+ normalize=True,
+ value_range=(-1, 1))
+
+ # ! TODO, merge with render_video_given_triplane later
+ for i, batch in enumerate(render_reference):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ st()
+ if i == 0:
+ if mv_flag:
+ novel_view_micro = None
+ else:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ # v[14:15].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0],
+ 0) if isinstance(v, th.Tensor) else v[0:1]
+ for k, v in batch.items()
+ }
+
+ else:
+ if i == 1:
+
+ # ! output mesh
+ if export_mesh:
+
+ # ! get planes first
+ # self.latent_name = 'latent_normalized' # normalized triplane latent
+
+ # ddpm_latent = {
+ # self.latent_name: planes,
+ # }
+ # ddpm_latent.update(self.rec_model(latent=ddpm_latent, behaviour='decode_after_vae_no_render'))
+
+ # mesh_size = 512
+ # mesh_size = 256
+ mesh_size = 384
+ # mesh_size = 320
+ # mesh_thres = 3 # TODO, requires tuning
+ # mesh_thres = 5 # TODO, requires tuning
+ mesh_thres = 10 # TODO, requires tuning
+ import mcubes
+ import trimesh
+ dump_path = f'{logger.get_dir()}/mesh/'
+
+ os.makedirs(dump_path, exist_ok=True)
+
+ grid_out = self.rec_model(
+ latent=pred,
+ grid_size=mesh_size,
+ behaviour='triplane_decode_grid',
+ )
+
+ vtx, faces = mcubes.marching_cubes(
+ grid_out['sigma'].squeeze(0).squeeze(
+ -1).cpu().numpy(), mesh_thres)
+ vtx = vtx / (mesh_size - 1) * 2 - 1
+
+ # vtx_tensor = th.tensor(vtx, dtype=th.float32, device=dist_util.dev()).unsqueeze(0)
+ # vtx_colors = self.model.synthesizer.forward_points(planes, vtx_tensor)['rgb'].squeeze(0).cpu().numpy() # (0, 1)
+ # vtx_colors = (vtx_colors * 255).astype(np.uint8)
+
+ # mesh = trimesh.Trimesh(vertices=vtx, faces=faces, vertex_colors=vtx_colors)
+ mesh = trimesh.Trimesh(
+ vertices=vtx,
+ faces=faces,
+ )
+
+ mesh_dump_path = os.path.join(
+ dump_path, f'{eval_idx}.ply')
+ mesh.export(mesh_dump_path, 'ply')
+
+ print(f"Mesh dumped to {dump_path}")
+ del grid_out, mesh
+ th.cuda.empty_cache()
+ # return
+ # st()
+
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ pred = self.rec_model(img=novel_view_micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+ # targe
+
+ # if not export_mesh:
+ if not real_flag:
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ else:
+ # pred_vis = th.cat([
+ # self.pool_64(micro['img']), pred['image_raw'],
+ # pred_depth.repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1) # B, 3, H, W
+
+ pooled_depth = self.pool_128(pred_depth).repeat_interleave(
+ 3, dim=1)
+ pred_vis = th.cat(
+ [
+ # self.pool_128(micro['img']),
+ self.pool_128(novel_view_micro['img']
+ ), # use the input here
+ self.pool_128(pred['image_raw']),
+ pooled_depth,
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ if export_mesh:
+ # save image
+ torchvision.utils.save_image(
+ pred['image_raw'],
+ logger.get_dir() +
+ '/FID_Cals/{}_{}.png'.format(eval_idx, i),
+ padding=0,
+ normalize=True,
+ value_range=(-1, 1))
+
+ torchvision.utils.save_image(
+ pooled_depth,
+ logger.get_dir() +
+ '/FID_Cals/{}_{}_dpeth.png'.format(eval_idx, i),
+ padding=0,
+ normalize=True,
+ value_range=(0, 1))
+
+ # st()
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ # if not export_mesh:
+ if not real_flag or mv_flag:
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ self.step + self.resume_step)
+
+ del video_out
+ # del pred_vis
+ # del pred
+
+ th.cuda.empty_cache()
+
+ @th.inference_mode()
+ # def eval_loop(self, c_list:list):
+ def eval_novelview_loop(self, camera=None, save_latent=False):
+ # novel view synthesis given evaluation camera trajectory
+ if save_latent: # for diffusion learning
+ latent_dir = Path(f'{logger.get_dir()}/latent_dir')
+ latent_dir.mkdir(exist_ok=True, parents=True)
+
+ # wds_path = os.path.join(logger.get_dir(), 'latent_dir',
+ # f'wds-%06d.tar')
+ # sink = wds.ShardWriter(wds_path, start_shard=0)
+
+ # eval_batch_size = 20
+ # eval_batch_size = 1
+ eval_batch_size = 40 # ! for i23d
+
+ latent_rec_statistics = False
+
+ for eval_idx, micro in enumerate(tqdm(self.eval_data)):
+
+ # if eval_idx > 500:
+ # break
+
+ latent = self.rec_model(
+ img=micro['img_to_encoder'],
+ behaviour='encoder_vae') # pred: (B, 3, 64, 64)
+ # torchvision.utils.save_image(micro['img'], 'inp.jpg')
+ if micro['img'].shape[0] == 40:
+ assert eval_batch_size == 40
+
+ if save_latent:
+ # np.save(f'{logger.get_dir()}/latent_dir/{eval_idx}.npy', latent[self.latent_name].cpu().numpy())
+
+ latent_save_dir = f'{logger.get_dir()}/latent_dir/{micro["ins"][0]}'
+ Path(latent_save_dir).mkdir(parents=True, exist_ok=True)
+
+ np.save(f'{latent_save_dir}/latent.npy',
+ latent[self.latent_name][0].cpu().numpy())
+ assert all([
+ micro['ins'][0] == micro['ins'][i]
+ for i in range(micro['c'].shape[0])
+ ]) # ! assert same instance
+
+ # for i in range(micro['img'].shape[0]):
+
+ # compressed_sample = {
+ # 'latent':latent[self.latent_name][0].cpu().numpy(), # 12 32 32
+ # 'caption': micro['caption'][0].encode('utf-8'),
+ # 'ins': micro['ins'][0].encode('utf-8'),
+ # 'c': micro['c'][i].cpu().numpy(),
+ # 'img': micro['img'][i].cpu().numpy() # 128x128, for diffusion log
+ # }
+
+ # sink.write({
+ # "__key__": f"sample_{eval_idx*eval_batch_size+i:07d}",
+ # 'sample.pyd': compressed_sample
+ # })
+
+ if latent_rec_statistics:
+ gen_imgs = self.render_video_given_triplane(
+ latent[self.latent_name],
+ self.rec_model, # compatible with join_model
+ name_prefix=f'{self.step + self.resume_step}_{eval_idx}',
+ save_img=False,
+ render_reference={'c': micro['c']},
+ save_mesh=False,
+ render_reference_length=4,
+ return_gen_imgs=True)
+ rec_psnr = psnr((micro['img'] / 2 + 0.5),
+ (gen_imgs.cpu() / 2 + 0.5), 1.0)
+ with open(
+ os.path.join(logger.get_dir(),
+ 'four_view_rec_psnr.json'), 'a') as f:
+ json.dump(
+ {
+ f'{eval_idx}': {
+ 'ins': micro["ins"][0],
+ 'psnr': rec_psnr.item(),
+ }
+ }, f)
+ # save to json
+
+ elif eval_idx < 30:
+ # if False:
+ self.render_video_given_triplane(
+ latent[self.latent_name],
+ self.rec_model, # compatible with join_model
+ name_prefix=f'{self.step + self.resume_step}_{micro["ins"][0].split("/")[0]}_{eval_idx}',
+ save_img=False,
+ render_reference={'c': camera},
+ save_mesh=True)
+
+
+class TrainLoop3DRecNVPatchSingleForwardMV(TrainLoop3DRecNVPatchSingleForward):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+
+ def forward_backward(self, batch, behaviour='g_step', *args, **kwargs):
+ # add patch sampling
+
+ if behaviour == 'g_step':
+ self.mp_trainer_rec.zero_grad()
+ else:
+ self.mp_trainer_disc.zero_grad()
+
+ batch_size = batch['img_to_encoder'].shape[0]
+
+ batch.pop('caption') # not required
+ batch.pop('ins') # not required
+ batch.pop('nv_caption') # not required
+ batch.pop('nv_ins') # not required
+
+ if '__key__' in batch.keys():
+ batch.pop('__key__')
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k:
+ v[i:i + self.microbatch].to(dist_util.dev()) if isinstance(
+ v, th.Tensor) else v[i:i + self.microbatch]
+ for k, v in batch.items()
+ }
+
+ # ! sample rendering patch
+ # nv_c = th.cat([micro['nv_c'], micro['c']])
+ nv_c = th.cat([micro['nv_c'], micro['c']])
+ # nv_c = micro['nv_c']
+ target = {
+ **self.eg3d_model(
+ c=nv_c, # type: ignore
+ ws=None,
+ planes=None,
+ sample_ray_only=True,
+ fg_bbox=th.cat([micro['nv_bbox'], micro['bbox']])), # rays o / dir
+ }
+
+ patch_rendering_resolution = self.eg3d_model.rendering_kwargs[
+ 'patch_rendering_resolution'] # type: ignore
+ cropped_target = {
+ k:
+ th.empty_like(v).repeat_interleave(2, 0)
+ # th.empty_like(v).repeat_interleave(1, 0)
+ [..., :patch_rendering_resolution, :patch_rendering_resolution]
+ if k not in [
+ 'ins_idx', 'img_to_encoder', 'img_sr', 'nv_img_to_encoder',
+ 'nv_img_sr', 'c', 'caption', 'nv_caption'
+ ] else v
+ for k, v in micro.items()
+ }
+
+ # crop according to uv sampling
+ for j in range(2 * self.microbatch):
+ top, left, height, width = target['ray_bboxes'][
+ j] # list of tuple
+ # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ for key in ('img', 'depth_mask', 'depth'): # type: ignore
+
+ if j < self.microbatch:
+ cropped_target[f'{key}'][ # ! no nv_ here
+ j:j + 1] = torchvision.transforms.functional.crop(
+ micro[f'nv_{key}'][j:j + 1], top, left, height,
+ width)
+ else:
+ cropped_target[f'{key}'][ # ! no nv_ here
+ j:j + 1] = torchvision.transforms.functional.crop(
+ micro[f'{key}'][j - self.microbatch:j -
+ self.microbatch + 1], top,
+ left, height, width)
+
+ # for j in range(batch_size, 2*batch_size, 1):
+ # top, left, height, width = target['ray_bboxes'][
+ # j] # list of tuple
+ # # for key in ('img', 'depth_mask', 'depth', 'depth_mask_sr'): # type: ignore
+ # for key in ('img', 'depth_mask', 'depth'): # type: ignore
+
+ # cropped_target[f'{key}'][ # ! no nv_ here
+ # j:j + 1] = torchvision.transforms.functional.crop(
+ # micro[f'{key}'][j-batch_size:j-batch_size + 1], top, left, height,
+ # width)
+
+ # wrap forward within amp
+ with th.autocast(device_type='cuda',
+ dtype=self.dtype,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ # c = th.cat([micro['nv_c'], micro['c']]), # predict novel view here
+ # c = th.cat([micro['nv_c'].repeat(3, 1), micro['c']]), # predict novel view here
+ # instance_mv_num = batch_size // 4 # 4 pairs by default
+ # instance_mv_num = 4
+ # ! roll views for multi-view supervision
+ # c = micro['nv_c']
+
+ # ! vit no amp
+ latent = self.rec_model(img=micro['img_to_encoder'].to(self.dtype),
+ behaviour='enc_dec_wo_triplane')
+
+ # # ! disable amp in rendering and loss
+ # with th.autocast(device_type='cuda',
+ # dtype=th.float16,
+ # enabled=False):
+
+ ray_origins = target['ray_origins']
+ ray_directions = target['ray_directions']
+
+ pred_nv_cano = self.rec_model(
+ # latent=latent.expand(2,),
+ latent={
+ 'latent_after_vit': # ! triplane for rendering
+ # latent['latent_after_vit'].repeat_interleave(4, dim=0).repeat(2,1,1,1) # NV=4
+ latent['latent_after_vit'].repeat_interleave(6, dim=0).repeat(2,1,1,1) # NV=6
+ # latent['latent_after_vit'].repeat_interleave(10, dim=0).repeat(2,1,1,1) # NV=4
+ # latent['latent_after_vit'].repeat_interleave(8, dim=0) # NV=4
+ },
+ c=nv_c,
+ behaviour='triplane_dec',
+ ray_origins=ray_origins,
+ ray_directions=ray_directions,
+ )
+
+ pred_nv_cano.update(
+ latent
+ ) # torchvision.utils.save_image(pred_nv_cano['image_raw'], 'pred.png', normalize=True)
+ gt = cropped_target
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, _ = self.loss_class(
+ pred_nv_cano,
+ gt, # prepare merged data
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ behaviour=behaviour,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None,
+ # dtype=self.dtype
+ )
+ log_rec3d_loss_dict(loss_dict)
+
+ if behaviour == 'g_step':
+ self.mp_trainer_rec.backward(loss)
+ else:
+ self.mp_trainer_disc.backward(loss)
+
+ # for name, p in self.rec_model.named_parameters():
+ # if p.grad is None:
+ # logger.log(f"found rec unused param: {name}")
+ # torchvision.utils.save_image(cropped_target['img'], 'gt.png', normalize=True)
+ # torchvision.utils.save_image( pred_nv_cano['image_raw'], 'pred.png', normalize=True)
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0 and i == 0 and behaviour == 'g_step':
+ try:
+ torchvision.utils.save_image(
+ th.cat(
+ [cropped_target['img'], pred_nv_cano['image_raw']
+ ], ),
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ normalize=True, nrow=6*2)
+
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+ except Exception as e:
+ logger.log(e)
+
+ # micro_bs = micro['img_to_encoder'].shape[0]
+ # self.log_patch_img( # record one cano view and one novel view
+ # cropped_target,
+ # {
+ # k: pred_nv_cano[k][0:1]
+ # for k in ['image_raw', 'image_depth', 'image_mask']
+ # },
+ # {
+ # k: pred_nv_cano[k][1:2]
+ # for k in ['image_raw', 'image_depth', 'image_mask']
+ # },
+ # )
+
+ # def save(self):
+ # return super().save()
+
+
+class TrainLoop3DRecNVPatchSingleForwardMVAdvLoss(
+ TrainLoop3DRecNVPatchSingleForwardMV):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+
+ # create discriminator
+ disc_params = self.loss_class.get_trainable_parameters()
+
+ self.mp_trainer_disc = MixedPrecisionTrainer(
+ model=self.loss_class.discriminator,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='disc',
+ use_amp=use_amp,
+ model_params=disc_params)
+
+ # st() # check self.lr
+ self.opt_disc = AdamW(
+ self.mp_trainer_disc.master_params,
+ lr=self.lr, # follow sd code base
+ betas=(0, 0.999),
+ eps=1e-8)
+
+ # TODO, is loss cls already in the DDP?
+ if self.use_ddp:
+ self.ddp_disc = DDP(
+ self.loss_class.discriminator,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ self.ddp_disc = self.loss_class.discriminator
+
+ # def run_st
+
+ # def run_step(self, batch, *args):
+ # self.forward_backward(batch)
+ # took_step = self.mp_trainer_rec.optimize(self.opt)
+ # if took_step:
+ # self._update_ema()
+ # self._anneal_lr()
+ # self.log_step()
+
+ def save(self, mp_trainer=None, model_name='rec'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, mp_trainer.master_params)
+
+ dist.barrier()
+
+ # ! load disc
+ def _load_and_sync_parameters(self, submodule_name=''):
+ super()._load_and_sync_parameters(submodule_name)
+ # load disc
+
+ resume_checkpoint = self.resume_checkpoint.replace(
+ 'rec', 'disc') # * default behaviour
+ if os.path.exists(resume_checkpoint):
+ if dist_util.get_rank() == 0:
+ logger.log(
+ f"loading disc model from checkpoint: {resume_checkpoint}..."
+ )
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ model_state_dict = self.loss_class.discriminator.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys():
+ if v.size() == model_state_dict[k].size():
+ model_state_dict[k] = v
+ # model_state_dict[k].copy_(v)
+ else:
+ logger.log('!!!! partially load: ', k, ": ",
+ v.size(), "state_dict: ",
+ model_state_dict[k].size())
+
+ if dist_util.get_world_size() > 1:
+ # dist_util.sync_params(self.model.named_parameters())
+ dist_util.sync_params(
+ self.loss_class.get_trainable_parameters())
+ logger.log('synced disc params')
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step':
+ self.forward_backward(batch, behaviour='g_step')
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step':
+ self.forward_backward(batch, behaviour='d_step')
+ _ = self.mp_trainer_disc.optimize(self.opt_disc)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self, batch=None):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step')
+
+ if self.step % 1000 == 0:
+ dist_util.synchronize()
+ if self.step % 5000 == 0:
+ th.cuda.empty_cache() # avoid memory leak
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ if dist_util.get_rank() == 0:
+ try:
+ self.eval_loop()
+ except Exception as e:
+ logger.log(e)
+ dist_util.synchronize()
+
+ # if self.step % self.save_interval == 0 and self.step != 0:
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_disc,
+ self.mp_trainer_disc.model_name)
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ logger.log('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step -
+ 1) % self.save_interval != 0 and self.step != 1:
+ self.save()
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ # if (self.step - 1) % self.save_interval != 0 and self.step != 1:
+ if (self.step - 1) % self.save_interval != 0:
+ self.save() # save rec
+ self.save(self.mp_trainer_disc, self.mp_trainer_disc.model_name)
+
+
+class TrainLoop3DRecNVPatchSingleForwardMV_NoCrop(
+ TrainLoop3DRecNVPatchSingleForwardMV):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ num_frames=4,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+
+ self.num_frames = num_frames
+ self.ray_sampler = RaySampler()
+
+ print(self.opt)
+
+ # ! requires tuning
+ N = 768 # hyp param, overfitting now
+ # self.scale_expected_threshold = (1 / (N/2)) ** 0.5 * 0.45
+ self.scale_expected_threshold = 0.0075
+ self.latent_name = 'latent_normalized' # normalized triplane latent
+
+
+ # to transform to 3dgs
+ self.gs_bg_color=th.tensor([1,1,1], dtype=th.float32, device=dist_util.dev())
+ self.post_process = PostProcess(
+ 384,
+ 384,
+ imgnet_normalize=True,
+ plucker_embedding=True,
+ decode_encode_img_only=False,
+ mv_input=True,
+ split_chunk_input=16,
+ duplicate_sample=True,
+ append_depth=False,
+ append_xyz=False,
+ gs_cam_format=True,
+ orthog_duplicate=False,
+ frame_0_as_canonical=False,
+ pcd_path='pcd_path',
+ load_pcd=True,
+ split_chunk_size=16,
+ )
+
+ self.zfar = 100.0
+ self.znear = 0.01
+
+
+ # def _init_optim_groups(self, kwargs):
+ # return super()._init_optim_groups({**kwargs, 'ignore_encoder': True}) # freeze MVEncoder to accelerate training.
+
+ def forward_backward(self, batch, behaviour='g_step', *args, **kwargs):
+ # add patch sampling
+
+ self.mp_trainer_rec.zero_grad()
+ batch_size = batch['img_to_encoder'].shape[0]
+
+ batch.pop('caption') # not required
+ ins = batch.pop('ins') # not required
+
+ if '__key__' in batch.keys():
+ batch.pop('__key__')
+
+ assert isinstance(batch['c'], dict)
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {}
+
+ for k, v in batch.items(): # grad acc
+ if isinstance(v, th.Tensor):
+ micro[k] = v[i:i + self.microbatch].to(dist_util.dev())
+ elif isinstance(v, list):
+ micro[k] = v[i:i + self.microbatch]
+ elif isinstance(v, dict): #
+ assert k in ['c', 'nv_c']
+ micro[k] = {
+ key:
+ value[i:i + self.microbatch].to(dist_util.dev()) if
+ isinstance(value, th.Tensor) else value # can be float
+ for key, value in v.items()
+ }
+
+ assert micro['img_to_encoder'].shape[1] == 15
+ micro['normal'] = micro['img_to_encoder'][:, 3:6]
+ micro['nv_normal'] = micro['nv_img_to_encoder'][:, 3:6]
+
+ # ! concat nv_c to render N+N views
+
+ indices = np.random.permutation(self.num_frames)
+ indices, indices_nv = indices[:4], indices[-4:] # make sure thorough pose converage.
+ # indices, indices_nv = indices[:2], indices[-2:] # ! 2+2 views for supervision, as in gs-lrm.
+ # indices_nv = np.random.permutation(self.num_frames)[:6] # randomly pick 4+4 views for supervision.
+
+ # indices = np.arange(self.num_frames)
+ # indices_nv = np.arange(self.num_frames)
+
+ nv_c = {}
+ for key in micro['c'].keys():
+ if isinstance(micro['c'][key], th.Tensor):
+ nv_c[key] = th.cat([micro['c'][key][:, indices], micro['nv_c'][key][:, indices_nv]],
+ 1) # B 2V ...
+ else:
+ nv_c[key] = micro['c'][key] # float, will remove later
+
+ target = {}
+
+ for key in ('img', 'depth_mask', 'depth', 'normal',): # type: ignore
+ # st()
+ target[key] = th.cat([
+ rearrange(micro[key], '(B V) ... -> B V ...', V=self.num_frames)[:, indices],
+ rearrange(micro[f'nv_{key}'], '(B V) ... -> B V ...', V=self.num_frames)[:, indices_nv]
+ # rearrange(micro[key][:, indices], '(B V) ... -> B V ...', V=4),
+ # rearrange(micro[f'nv_{key}'][:, indices], '(B V) ... -> B V ...', V=4)
+ ], 1) # B 2*V H W
+ target[key] = rearrange(target[key],
+ 'B V ... -> (B V) ...') # concat
+
+ # st()
+
+ # wrap forward within amp
+ with th.autocast(device_type='cuda',
+ dtype=self.dtype,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ # ! vit no amp
+ # with profile(activities=[
+ # ProfilerActivity.CUDA], record_shapes=True) as prof:
+ # with record_function("get_gs"):
+ latent = self.rec_model(
+ img=micro['img_to_encoder'].to(self.dtype),
+ behaviour='enc_dec_wo_triplane',
+ c=micro['c'],
+ pcd=micro['fps_pcd'], # send in pcd for surface reference.
+ ) # send in input-view C since pixel-aligned gaussians required
+
+ # print(prof.key_averages().table(sort_by="cuda_time_total", row_limit=10))
+
+ gaussians, query_pcd_xyz = latent['gaussians'], latent['query_pcd_xyz']
+ # query_pcd_xyz = latent['query_pcd_xyz']
+
+ # if self.loss_class.opt.rand_aug_bg and random.random()>0.9:
+ if self.loss_class.opt.rand_aug_bg:
+ bg_color=torch.randint(0,255,(3,), device=dist_util.dev()) / 255.0
+ else:
+ bg_color=torch.tensor([1,1,1], dtype=torch.float32, device=dist_util.dev())
+
+ def visualize_latent_activations(latent, b_idx=0, write=False, ):
+
+ def normalize_latent_plane(latent_plane):
+ avg_p1 = latent_plane.detach().cpu().numpy().mean(0, keepdims=0)
+ avg_p1 = (avg_p1 - avg_p1.min()) / (avg_p1.max() - avg_p1.min())
+ # return avg_p1
+ return ((avg_p1).clip(0,1)*255.0).astype(np.uint8)
+
+ p1, p2, p3 = (normalize_latent_plane(latent_plane) for latent_plane in (latent[b_idx, 0:4], latent[b_idx,4:8], latent[b_idx,8:12]))
+
+ if write:
+ plt.imsave(os.path.join(logger.get_dir(), f'{self.step}_{b_idx}_1.jpg'), p1)
+ plt.imsave(os.path.join(logger.get_dir(), f'{self.step}_{b_idx}_2.jpg'), p2)
+ plt.imsave(os.path.join(logger.get_dir(), f'{self.step}_{b_idx}_3.jpg'), p3)
+ # imageio.imwrite(os.path.join(logger.get_dir(), f'{b_idx}_1.jpg'), p1)
+ # imageio.imwrite(os.path.join(logger.get_dir(), f'{b_idx}_2.jpg'), p2)
+ # imageio.imwrite(os.path.join(logger.get_dir(), f'{b_idx}_3.jpg'), p3)
+
+ return p1, p2, p3
+
+
+ # with profile(activities=[ProfilerActivity.CUDA, ProfilerActivity.CPU,], record_shapes=True) as prof:
+ # # ProfilerActivity.CPU, ProfilerActivity.CUDA], record_shapes=True) as prof:
+ # with record_function("rendering"):
+
+ pred_nv_cano = self.rec_model(
+ latent=latent,
+ # latent={
+ # 'gaussians': latent['gaussians'].repeat_interleave(2,0)
+ # },
+ c=nv_c,
+ behaviour='triplane_dec',
+ bg_color=bg_color,
+ )
+
+ fine_scale_key = list(pred_nv_cano.keys())[-1]
+
+ # st()
+ fine_gaussians = latent[fine_scale_key]
+ fine_gaussians_opa = fine_gaussians[..., 3:4]
+
+ # print(prof.key_averages().table(sort_by="cuda_time_total", row_limit=20))
+
+ # st() # torchvision.utils.save_image(pred_nv_cano['image_raw'][0], 'pred.jpg', normalize=True, value_range=(-1,1))
+
+ if self.loss_class.opt.rand_aug_bg:
+ # bg_color
+ alpha_mask = target['depth_mask'].float().unsqueeze(1) # B 1 H W
+ target['img'] = target['img'] * alpha_mask + (bg_color.reshape(1,3,1,1) * 2 - 1) * (1-alpha_mask)
+
+ target['depth_mask'] = target['depth_mask'].unsqueeze(1)
+ target['depth'] = target['depth'].unsqueeze(1)
+
+ multiscale_target = defaultdict(dict)
+ multiscale_pred = defaultdict(dict)
+
+
+ for idx, (gaussian_wavelet_key, gaussian_wavelet) in enumerate(pred_nv_cano.items()):
+ gs_output_size = pred_nv_cano[gaussian_wavelet_key]['image_raw'].shape[-1]
+ for k in gaussian_wavelet.keys():
+ pred_nv_cano[gaussian_wavelet_key][k] = rearrange(
+ gaussian_wavelet[k], 'B V ... -> (B V) ...') # match GT shape order
+
+ # if idx == 0: # only KL calculation in scale 0
+ if gaussian_wavelet_key == fine_scale_key:
+ pred_nv_cano[gaussian_wavelet_key].update(
+ {
+ k: latent[k] for k in ['posterior']
+ }
+ ) # ! for KL supervision
+
+ # ! prepare target according to the wavelet size
+ for k in target.keys():
+
+ if target[key].shape[-1] == gs_output_size:
+ multiscale_target[gaussian_wavelet_key][k] = target[k]
+ else:
+
+ if k in ('depth', 'normal'):
+ mode = 'nearest'
+ else:
+ mode='bilinear'
+
+ multiscale_target[gaussian_wavelet_key][k] = F.interpolate(target[k], size=(gs_output_size, gs_output_size), mode=mode)
+
+ # st()
+
+ # st()
+ # torchvision.utils.save_image(target['img'], 'gt.jpg', normalize=True, value_range=(-1,1))
+ # torchvision.utils.save_image(pred_nv_cano['image_raw'], 'pred.jpg', normalize=True, value_range=(-1,1))
+ # torchvision.utils.save_image(micro['img'], 'inp_gt.jpg', normalize=True, value_range=(-1,1))
+ # torchvision.utils.save_image(micro['nv_img'], 'nv_gt.jpg', normalize=True, value_range=(-1,1))
+
+ # if self.loss_class.opt.rand_aug_bg:
+ # # bg_color
+ # alpha_mask = target['depth_mask'].float().unsqueeze(1) # B 1 H W
+ # target['img'] = target['img'] * alpha_mask + (bg_color.reshape(1,3,1,1) * 2 - 1) * (1-alpha_mask)
+
+ lod_num = len(pred_nv_cano.keys())
+ random_scale_for_lpips = random.choice(list(pred_nv_cano.keys()))
+
+ with self.rec_model.no_sync(): # type: ignore
+ # with profile(activities=[
+ # ProfilerActivity.CPU, ProfilerActivity.CUDA], record_shapes=True) as prof:
+ # with record_function("loss"):
+
+ loss = th.tensor(0., device=dist_util.dev())
+ loss_dict = {}
+
+ if behaviour == 'd_step':
+
+ loss_scale, loss_dict_scale, _ = self.loss_class(
+ pred_nv_cano[random_scale_for_lpips],
+ multiscale_target[random_scale_for_lpips], # prepare merged data
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ behaviour=behaviour,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None,
+ ignore_kl=True, # only calculate once
+ ignore_lpips=True, # lpips on each lod
+ ignore_d_loss=False)
+
+ loss = loss + loss_scale
+ loss_dict.update(
+ {
+ f"{gaussian_wavelet_key.replace('gaussians_', '')}/{loss_key}": loss_v for loss_key, loss_v in loss_dict_scale.items()
+ }
+ )
+
+ else:
+
+ for scale_idx, gaussian_wavelet_key in enumerate(pred_nv_cano.keys()): # ! multi-scale gs rendering supervision
+
+ loss_scale, loss_dict_scale, _ = self.loss_class(
+ pred_nv_cano[gaussian_wavelet_key],
+ multiscale_target[gaussian_wavelet_key], # prepare merged data
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ behaviour=behaviour,
+ conf_sigma_l1=None,
+ conf_sigma_percl=None,
+ ignore_kl=gaussian_wavelet_key!=fine_scale_key, # only calculate once
+ ignore_lpips=gaussian_wavelet_key!=random_scale_for_lpips, # lpips on each lod
+ ignore_d_loss=gaussian_wavelet_key!=fine_scale_key)
+
+ loss = loss + loss_scale
+ loss_dict.update(
+ {
+ f"{gaussian_wavelet_key.replace('gaussians_', '')}/{loss_key}": loss_v for loss_key, loss_v in loss_dict_scale.items()
+ }
+ )
+
+ pos = latent['pos']
+ opacity = gaussians[..., 3:4]
+ scaling = gaussians[..., 4:6] # 2dgs here
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ with th.no_grad(): # save idx 0 here
+ try:
+ self.writer.add_histogram("scene/opacity_hist",
+ opacity[0][:],
+ self.step + self.resume_step)
+ self.writer.add_histogram("scene/scale_hist",
+ scaling[0][:],
+ self.step + self.resume_step)
+ except Exception as e:
+ logger.log(e)
+
+ if behaviour == 'g_step':
+
+ # ! 2dgs loss
+ # debugging now, open it from the beginning
+ # if (self.step + self.resume_step) >= 2000 and self.loss_class.opt.lambda_normal > 0:
+ surf_normal = multiscale_target[fine_scale_key]['normal'] * multiscale_target[fine_scale_key]['depth_mask'] # foreground supervision only.
+
+ # ! hard-coded
+ # rend_normal = pred_nv_cano['rend_normal'] # ! supervise disk normal with GT normal here instead;
+ # st()
+ rend_normal = pred_nv_cano[fine_scale_key]['rend_normal']
+ rend_dist = pred_nv_cano[fine_scale_key]['dist']
+
+ if self.loss_class.opt.lambda_scale_reg > 0:
+ scale_reg = (scaling-self.scale_expected_threshold).square().mean() * self.loss_class.opt.lambda_scale_reg
+ loss = loss + scale_reg
+ loss_dict.update({'loss_scale_reg': scale_reg})
+
+ if self.loss_class.opt.lambda_opa_reg > 0:
+ # small_base_opa = latent['gaussians_base_opa']
+ opa_reg = (-self.loss_class.beta_mvp_base_dist.log_prob(latent['gaussians_base_opa'].clamp(min=1/255, max=0.99)).mean()) * self.loss_class.opt.lambda_opa_reg
+ # ! also on the fine stage
+ opa_reg_fine = (-self.loss_class.beta_mvp_base_dist.log_prob(fine_gaussians_opa.clamp(min=1/255, max=0.99)).mean()) * self.loss_class.opt.lambda_opa_reg
+ # opa_reg = (1-latent['gaussians_base_opa'].mean() ) * self.loss_class.opt.lambda_opa_reg
+ loss = loss + opa_reg + opa_reg_fine
+ loss_dict.update({'loss_opa_reg': opa_reg, 'loss_opa_reg_fine': opa_reg_fine})
+
+
+ if (self.step + self.resume_step) >= 35000 and self.loss_class.opt.lambda_normal > 0:
+ # if (self.step + self.resume_step) >= 2000 and self.loss_class.opt.lambda_normal > 0:
+ # surf_normal = unity2blender_th(surf_normal) # ! g-buffer normal system is different
+
+ normal_error = (1 - (rend_normal * surf_normal).sum(dim=1)) # B H W
+ # normal_loss = self.loss_class.opt.lambda_normal * (normal_error.sum() / target['depth_mask'].sum()) # average with fg area ratio
+ normal_loss = self.loss_class.opt.lambda_normal * normal_error.mean()
+
+ loss = loss + normal_loss
+
+ loss_dict.update({'loss_normal': normal_loss})
+
+ # if (self.step + self.resume_step) >= 1500 and self.loss_class.opt.lambda_dist > 0:
+ if (self.step + self.resume_step) >= 15000 and self.loss_class.opt.lambda_dist > 0:
+ # if (self.step + self.resume_step) >= 300 and self.loss_class.opt.lambda_dist > 0:
+ dist_loss = self.loss_class.opt.lambda_dist * (rend_dist).mean()
+ loss = loss + dist_loss
+ loss_dict.update({'loss_dist': dist_loss})
+
+ if self.loss_class.opt.pruning_ot_lambda > 0:
+ # for now, save and analyze first
+ # selected_pts_mask_scaling = th.where(th.max(scaling, dim=-1).values < 0.01 * 0.9, True, False)
+ selected_pts_mask_scaling = th.where(
+ th.max(scaling, dim=-1).values > 0.05 * 0.9, True,
+ False)
+ # selected_pts_mask_opacity = th.where(opacity[..., 0] < 0.1, True, False) # B N
+ selected_pts_mask_opacity = th.where(
+ opacity[..., 0] < 0.01, True,
+ False) # 0.005 in the original 3dgs setting
+
+ selected_scaling_pts = pos[0][selected_pts_mask_scaling[0]]
+ selected_opacity_pts = pos[0][selected_pts_mask_opacity[0]]
+
+ pcu.save_mesh_v(
+ f'tmp/voxel/cd/10/scaling_masked_pts_0.05.ply',
+ selected_scaling_pts.detach().cpu().numpy(),
+ )
+
+ pcu.save_mesh_v(
+ f'tmp/voxel/cd/10/opacity_masked_pts_0.01.ply',
+ selected_opacity_pts.detach().cpu().numpy(),
+ )
+
+ # st()
+ # pass
+
+ if self.loss_class.opt.cd_lambda > 0:
+ # fuse depth to 3D point cloud to supervise the gaussians
+ B = latent['pos'].shape[0]
+ # c = micro['c']
+ # H = micro['depth'].shape[-1]
+ # V = 4
+ # # ! prepare 3D xyz ground truth
+
+ # cam2world_matrix = c['orig_c2w'][:, :, :16].reshape(
+ # B * V, 4, 4)
+ # intrinsics = c['orig_pose'][:, :,
+ # 16:25].reshape(B * V, 3, 3)
+
+ # # ! already in the world space after ray_sampler()
+ # ray_origins, ray_directions = self.ray_sampler( # shape:
+ # cam2world_matrix, intrinsics, H // 2)[:2]
+
+ # # depth = th.nn.functional.interpolate(micro['depth'].unsqueeze(1), (128,128), mode='nearest')[:, 0] # since each view has 128x128 Gaussians
+ # # depth = th.nn.functional.interpolate(micro['depth'].unsqueeze(1), (128,128), mode='nearest')[:, 0] # since each view has 128x128 Gaussians
+ # depth_128 = th.nn.functional.interpolate(
+ # micro['depth'].unsqueeze(1), (128, 128),
+ # mode='nearest'
+ # )[:, 0] # since each view has 128x128 Gaussians
+ # depth = depth_128.reshape(B * V, -1).unsqueeze(-1)
+ # # depth = micro['depth'].reshape(B*V, -1).unsqueeze(-1)
+
+ # gt_pos = ray_origins + depth * ray_directions # BV HW 3, already in the world space
+ # gt_pos = rearrange(gt_pos,
+ # '(B V) N C -> B (V N) C',
+ # B=B,
+ # V=V)
+ # gt_pos = gt_pos.clip(-0.45, 0.45)
+
+ # TODO
+ gt_pos = micro[
+ 'fps_pcd'] # all the same, will update later.
+
+ # ! use online here
+ # gt_pos = query_pcd_xyz
+
+ cd_loss = pytorch3d.loss.chamfer_distance(
+ gt_pos, latent['pos']
+ )[0] * self.loss_class.opt.cd_lambda # V=4 GT for now. Test with V=8 GT later.
+ # st()
+
+ # for vis
+ if False:
+ torchvision.utils.save_image(micro['img'],
+ 'gt.jpg',
+ value_range=(-1, 1),
+ normalize=True)
+ with th.no_grad():
+ for b in range(B):
+ pcu.save_mesh_v(
+ f'tmp/voxel/cd/10/again_pred-{b}.ply',
+ latent['pos'][b].detach().cpu().numpy(),
+ )
+ # pcu.save_mesh_v(
+ # f'tmp/voxel/cd/10/again-gt-{b}.ply',
+ # gt_pos[b].detach().cpu().numpy(),
+ # )
+ # st()
+
+ loss = loss + cd_loss
+ loss_dict.update({'loss_cd': cd_loss})
+
+ elif self.loss_class.opt.xyz_lambda > 0:
+ '''
+ B = latent['per_view_pos'].shape[0] // 4
+ V = 4
+ c = micro['c']
+ H = micro['depth'].shape[-1]
+ # ! prepare 3D xyz ground truth
+
+ cam2world_matrix = c['orig_c2w'][:, :, :16].reshape(
+ B * V, 4, 4)
+ intrinsics = c['orig_pose'][:, :,
+ 16:25].reshape(B * V, 3, 3)
+
+ # ! already in the world space after ray_sampler()
+ ray_origins, ray_directions = self.ray_sampler( # shape:
+ cam2world_matrix, intrinsics, H // 2)[:2]
+ # self.gs.output_size,)[:2]
+ # depth = rearrange(micro['depth'], '(B V) H W -> ')
+ depth_128 = th.nn.functional.interpolate(
+ micro['depth'].unsqueeze(1), (128, 128),
+ mode='nearest'
+ )[:, 0] # since each view has 128x128 Gaussians
+ depth = depth_128.reshape(B * V, -1).unsqueeze(-1)
+ fg_mask = th.nn.functional.interpolate(
+ micro['depth_mask'].unsqueeze(1).to(th.uint8),
+ (128, 128),
+ mode='nearest').squeeze(1) # B*V H W
+ fg_mask = fg_mask.reshape(B * V, -1).unsqueeze(-1)
+ gt_pos = ray_origins + depth * ray_directions # BV HW 3, already in the world space
+ # st()
+ gt_pos = fg_mask * gt_pos.clip(
+ -0.45, 0.45) # g-buffer objaverse range
+ pred = fg_mask * latent['per_view_pos']
+
+ # for vis
+ if True:
+ torchvision.utils.save_image(micro['img'],
+ 'gt.jpg',
+ value_range=(-1, 1),
+ normalize=True)
+ with th.no_grad():
+ gt_pos_vis = rearrange(gt_pos,
+ '(B V) N C -> B V N C',
+ B=B,
+ V=V)
+ pred_pos_vis = rearrange(pred,
+ '(B V) N C -> B V N C',
+ B=B,
+ V=V)
+ # save
+
+ for b in range(B):
+ for v in range(V):
+ # pcu.save_mesh_v(f'tmp/dust3r/add3dsupp-pred-{b}-{v}.ply',
+ # pred_pos_vis[b][v].detach().cpu().numpy(),)
+ # pcu.save_mesh_v(f'tmp/dust3r/add3dsupp-gt-{b}-{v}.ply',
+ # gt_pos_vis[b][v].detach().cpu().numpy(),)
+ pcu.save_mesh_v(
+ f'tmp/lambda50/no3dsupp-pred-{b}-{v}.ply',
+ pred_pos_vis[b]
+ [v].detach().cpu().numpy(),
+ )
+ pcu.save_mesh_v(
+ f'tmp/lambda50/no3dsupp-gt-{b}-{v}.ply',
+ gt_pos_vis[b]
+ [v].detach().cpu().numpy(),
+ )
+ st()
+
+ xyz_loss = th.nn.functional.mse_loss(
+ gt_pos, pred
+ ) * self.loss_class.opt.xyz_lambda # ! 15% nonzero points
+ loss = loss + xyz_loss
+ '''
+
+ # ! directly gs center supervision with l1 loss, follow LION VAE
+
+ # xyz_loss = th.nn.functional.l1_loss(
+ # query_pcd_xyz, pred
+ # ) * self.loss_class.opt.xyz_lambda # ! 15% nonzero points
+ xyz_loss = self.loss_class.criterion_xyz(query_pcd_xyz, latent['pos']) * self.loss_class.opt.xyz_lambda
+ loss = loss + xyz_loss
+
+ # only calculate foreground gt_pos here?
+ loss_dict.update({'loss_xyz': xyz_loss})
+
+ elif self.loss_class.opt.emd_lambda > 0:
+ # rand_pt_size = 4096 # K value. Input Error! The size of the point clouds should be a multiple of 1024.
+ pred = latent['pos']
+ rand_pt_size = min(2048, max(pred.shape[1], 1024)) # K value. Input Error! The size of the point clouds should be a multiple of 1024.
+
+ if micro['fps_pcd'].shape[0] == pred.shape[0]:
+ gt_point = micro['fps_pcd']
+ else: # overfit memory dataset
+ gt_point = micro[
+ 'fps_pcd'][::
+ 4] # consecutive 4 views are from the same ID
+
+ B, gt_point_N = gt_point.shape[:2]
+ # random sample pred points
+ # sampled_pred =
+ # rand_pt_idx = torch.randint(high=pred.shape[1]-gt_point_N, size=(B,))
+
+
+ # pcu.save_mesh_v( f'tmp/voxel/emd/gt-half.ply', gt_point[0, ::4].detach().cpu().numpy(),)
+
+ # for b in range(gt_point.shape[0]):
+ # pcu.save_mesh_v( f'{logger.get_dir()}/gt-{b}.ply', gt_point[b].detach().cpu().numpy(),)
+
+ # pcu.save_mesh_v( f'{logger.get_dir()}/pred-{b}.ply', pred[b].detach().cpu().numpy(),)
+
+ # pcu.save_mesh_v( f'0.ply', latent['pos'][0].detach().cpu().numpy())
+ # st()
+
+ if self.loss_class.opt.fps_sampling: # O(N*K). reduce K later.
+
+ if self.loss_class.opt.subset_fps_sampling:
+ rand_pt_size = 1024 # for faster calculation
+ # ! uniform sampling with randomness
+ # sampled_gt_pts_for_emd_loss = gt_point[:, random.randint(0,9)::9][:, :1024] # direct uniform downsample to the K size
+ # sampled_gt_pts_for_emd_loss = gt_point[:, random.randint(0,9)::4][:, :1024] # direct uniform downsample to the K size
+
+ rand_perm = torch.randperm(
+ gt_point.shape[1]
+ )[:rand_pt_size] # shuffle the xyz before downsample - fps sampling
+ sampled_gt_pts_for_emd_loss = gt_point[:, rand_perm]
+
+ # sampled_gt_pts_for_emd_loss = gt_point[:, ::4]
+ # sampled_pred_pts_for_emd_loss = pred[:, ::32]
+ # sampled_gt_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ # gt_point[:, ::4], K=rand_pt_size)[0] # V4
+
+ if self.loss_class.opt.subset_half_fps_sampling:
+ # sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ # pred, K=rand_pt_size)[0] # V5
+ rand_perm = torch.randperm(
+ pred.shape[1]
+ )[:4096] # shuffle the xyz before downsample - fps sampling
+ sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ pred[:, rand_perm],
+ K=rand_pt_size)[0] # improve randomness
+ else:
+ sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ pred[:, ::4], K=rand_pt_size)[0] # V5
+
+ # rand_perm = torch.randperm(pred.shape[1]) # shuffle the xyz before downsample - fps sampling
+ # sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ # pred[:, rand_perm][:, ::4], K=rand_pt_size)[0] # rand perm before downsampling, V6
+
+ # sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ # pred[:, rand_perm][:, ::8], K=rand_pt_size)[0] # rand perm before downsampling, V7
+
+ # sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ # pred[:, self.step%2::4], K=rand_pt_size)[0] # rand perm before downsampling, V8, based on V50
+
+ else:
+ sampled_gt_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ gt_point, K=rand_pt_size)[0]
+
+ # if self.loss_class.opt.subset_half_fps_sampling:
+ # rand_pt_size = 4096 # K value. Input Error! The size of the point clouds should be a multiple of 1024.
+
+ sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ pred, K=rand_pt_size)[0]
+
+ # else:
+ # sampled_pred_pts_for_emd_loss = pytorch3d.ops.sample_farthest_points(
+ # pred, K=rand_pt_size)[0]
+
+ else: # random sampling
+ rand_pt_idx_pred = torch.randint(high=pred.shape[1] -
+ rand_pt_size,
+ size=(1, ))[0]
+ rand_pt_idx_gt = torch.randint(high=gt_point.shape[1] -
+ rand_pt_size,
+ size=(1, ))[0]
+
+ sampled_pred_pts_for_emd_loss = pred[:,
+ rand_pt_idx_pred:
+ rand_pt_idx_pred +
+ rand_pt_size, ...]
+ sampled_gt_pts_for_emd_loss = gt_point[:,
+ rand_pt_idx_gt:
+ rand_pt_idx_gt +
+ rand_pt_size,
+ ...]
+
+ # only calculate foreground gt_pos here?
+
+ emd_loss = calc_emd(sampled_gt_pts_for_emd_loss,
+ sampled_pred_pts_for_emd_loss).mean(
+ ) * self.loss_class.opt.emd_lambda
+ loss = loss + emd_loss
+ loss_dict.update({'loss_emd': emd_loss})
+
+ if self.loss_class.opt.commitment_loss_lambda > 0:
+ ellipsoid_vol = torch.prod(scaling, dim=-1, keepdim=True) / ((0.01 * 0.9)**3) # * (4/3*torch.pi). normalized vol
+ commitment = ellipsoid_vol * opacity
+ to_be_pruned_ellipsoid_idx = commitment < (3/4)**3 * 0.9 # those points shall have larger vol*opacity contribution
+ commitment_loss = -commitment[to_be_pruned_ellipsoid_idx].mean() * self.loss_class.opt.commitment_loss_lambda
+
+ loss = loss + commitment_loss
+ loss_dict.update({'loss_commitment': commitment_loss})
+ loss_dict.update({'loss_commitment_opacity': opacity.mean()})
+ loss_dict.update({'loss_commitment_vol': ellipsoid_vol.mean()})
+
+
+ log_rec3d_loss_dict(loss_dict)
+
+ # self.mp_trainer_rec.backward(loss)
+ if behaviour == 'g_step':
+ self.mp_trainer_rec.backward(loss)
+ else:
+ self.mp_trainer_disc.backward(loss)
+
+ # for name, p in self.rec_model.named_parameters():
+ # if p.grad is None:
+ # logger.log(f"found rec unused param: {name}")
+
+ # print(name, p.grad.mean(), p.grad.abs().max())
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0 and i == 0 and behaviour=='g_step':
+ # if dist_util.get_rank() == 0 and self.step % 1 == 0 and i == 0:
+ try:
+ torchvision.utils.save_image(
+ th.cat([target['img'][::1], pred_nv_cano[fine_scale_key]['image_raw'][::1]], ),
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ normalize=True, value_range=(-1,1),nrow=len(indices)*2)
+
+ # save depth and normal and alpha
+ torchvision.utils.save_image(
+ th.cat([surf_normal[::1], rend_normal[::1]], ),
+ f'{logger.get_dir()}/{self.step+self.resume_step}_normal_new.jpg',
+ normalize=True, value_range=(-1,1), nrow=len(indices)*2)
+
+ torchvision.utils.save_image(
+ th.cat([target['depth'][::1], pred_nv_cano[fine_scale_key]['image_depth'][::1]], ),
+ f'{logger.get_dir()}/{self.step+self.resume_step}_depth.jpg',
+ normalize=True, nrow=len(indices)*2)
+
+ # torchvision.utils.save_image( pred_nv_cano['image_depth'][::1], f'{logger.get_dir()}/{self.step+self.resume_step}_depth.jpg', normalize=True, nrow=len(indices)*2)
+
+ torchvision.utils.save_image(
+ th.cat([target['depth_mask'][::1], pred_nv_cano[fine_scale_key]['image_mask'][::1]], ),
+ f'{logger.get_dir()}/{self.step+self.resume_step}_alpha.jpg',
+ normalize=True, value_range=(0,1), nrow=len(indices)*2)
+
+
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ except Exception as e:
+ logger.log('Exception when saving log: ', e)
+
+ # if self.step % 2500 == 0:
+ # th.cuda.empty_cache() # free vram
+
+ @torch.no_grad()
+ def export_mesh_from_2dgs(self, all_rgbs, all_depths, all_alphas, cam_pathes, latent_save_dir):
+ # https://github.com/autonomousvision/LaRa/blob/main/evaluation.py
+ n_thread = 1 # avoid TSDF cpu hanging bug.
+ os.environ["MKL_NUM_THREADS"] = f"{n_thread}"
+ os.environ["NUMEXPR_NUM_THREADS"] = f"{n_thread}"
+ os.environ["OMP_NUM_THREADS"] = f"4"
+ os.environ["VECLIB_MAXIMUM_THREADS"] = f"{n_thread}"
+ os.environ["OPENBLAS_NUM_THREADS"] = f"{n_thread}"
+
+ # copied from: https://github.com/hbb1/2d-gaussian-splatting/blob/19eb5f1e091a582e911b4282fe2832bac4c89f0f/render.py#L23
+ logger.log("exporting mesh ...")
+
+ # for g-objv
+ aabb = [-0.45,-0.45,-0.45,0.45,0.45,0.45]
+ self.aabb = np.array(aabb).reshape(2,3)*1.1
+
+ name = f'{latent_save_dir}/mesh_raw.obj'
+ mesh = self.extract_mesh_bounded(all_rgbs, all_depths, all_alphas, cam_pathes)
+
+ o3d.io.write_triangle_mesh(name, mesh)
+ logger.log("mesh saved at {}".format(name))
+ mesh_post = smooth_mesh(mesh)
+ o3d.io.write_triangle_mesh(name.replace('_raw.obj', '.obj'), mesh_post)
+ logger.log("mesh post processed saved at {}".format(name.replace('.obj', '_post.obj')))
+
+ def get_source_cw2wT(self, source_cameras_view_to_world):
+ return matrix_to_quaternion(
+ source_cameras_view_to_world[:3, :3].transpose(0, 1))
+
+
+ def c_to_3dgs_format(self, pose):
+ # TODO, switch to torch version (batched later)
+
+ c2w = pose[:16].reshape(4, 4) # 3x4
+
+ # ! load cam
+ w2c = np.linalg.inv(c2w)
+ R = np.transpose(
+ w2c[:3, :3]) # R is stored transposed due to 'glm' in CUDA code
+ T = w2c[:3, 3]
+ fx = pose[16]
+ FovX = focal2fov(fx, 1)
+ FovY = focal2fov(fx, 1)
+
+ tanfovx = math.tan(FovX * 0.5)
+ tanfovy = math.tan(FovY * 0.5)
+
+ assert tanfovx == tanfovy
+
+ trans = np.array([0.0, 0.0, 0.0])
+ scale = 1.0
+
+ world_view_transform = torch.tensor(getWorld2View2(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+ projection_matrix = getProjectionMatrix(znear=self.znear,
+ zfar=self.zfar,
+ fovX=FovX,
+ fovY=FovY).transpose(0, 1)
+ full_proj_transform = (world_view_transform.unsqueeze(0).bmm(
+ projection_matrix.unsqueeze(0))).squeeze(0)
+ camera_center = world_view_transform.inverse()[3, :3]
+
+ view_world_transform = torch.tensor(getView2World(R, T, trans,
+ scale)).transpose(
+ 0, 1)
+
+ # item.update(viewpoint_cam=[viewpoint_cam])
+ c = {}
+ c["source_cv2wT_quat"] = self.get_source_cw2wT(view_world_transform)
+ c.update(
+ projection_matrix=projection_matrix, # K
+ cam_view=world_view_transform, # world_view_transform
+ cam_view_proj=full_proj_transform, # full_proj_transform
+ cam_pos=camera_center,
+ tanfov=tanfovx, # TODO, fix in the renderer
+ # orig_c2w=c2w,
+ # orig_w2c=w2c,
+ orig_pose=torch.from_numpy(pose),
+ orig_c2w=torch.from_numpy(c2w),
+ orig_w2c=torch.from_numpy(w2c),
+ # tanfovy=tanfovy,
+ )
+
+ return c # dict for gs rendering
+
+
+ @torch.no_grad()
+ def extract_mesh_bounded(self, rgbmaps, depthmaps, alpha_maps, cam_pathes, voxel_size=0.004, sdf_trunc=0.02, depth_trunc=3, alpha_thres=0.08, mask_backgrond=False):
+ """
+ Perform TSDF fusion given a fixed depth range, used in the paper.
+
+ voxel_size: the voxel size of the volume
+ sdf_trunc: truncation value
+ depth_trunc: maximum depth range, should depended on the scene's scales
+ mask_backgrond: whether to mask backgroud, only works when the dataset have masks
+
+ return o3d.mesh
+ """
+
+ if self.aabb is not None: # as in lara.
+ center = self.aabb.mean(0)
+ # radius = np.linalg.norm(self.aabb[1] - self.aabb[0]) * 0.5
+ radius = np.linalg.norm(self.aabb[1] - self.aabb[0]) * 0.5
+ # voxel_size = radius / 256
+ voxel_size = radius / 192 # less holes
+ # sdf_trunc = voxel_size * 16 # less holes, slower integration
+ sdf_trunc = voxel_size * 12 #
+ print("using aabb")
+
+ volume = o3d.pipelines.integration.ScalableTSDFVolume(
+ voxel_length= voxel_size,
+ sdf_trunc=sdf_trunc,
+ color_type=o3d.pipelines.integration.TSDFVolumeColorType.RGB8
+ )
+
+ print("Running tsdf volume integration ...")
+ print(f'voxel_size: {voxel_size}')
+ print(f'sdf_trunc: {sdf_trunc}')
+ # print(f'depth_truc: {depth_trunc}')
+
+ # render_reference = th.load('eval_pose.pt', map_location='cpu').numpy()
+
+ # ! use uni_mesh_path, from Lara, Chen et al, ECCV 24'
+
+ # '''
+
+ # for i, cam_o3d in tqdm(enumerate(to_cam_open3d(self.viewpoint_stack)), desc="TSDF integration progress"):
+ for i, cam in tqdm(enumerate(cam_pathes), desc="TSDF integration progress"):
+ # rgb = self.rgbmaps[i]
+ # depth = self.depthmaps[i]
+ cam = self.c_to_3dgs_format(cam)
+ cam_o3d = to_cam_open3d_compat(cam)
+
+ rgb = rgbmaps[i][0]
+ depth = depthmaps[i][0]
+ alpha = alpha_maps[i][0]
+
+ # if we have mask provided, use it
+ # if mask_backgrond and (self.viewpoint_stack[i].gt_alpha_mask is not None):
+ # depth[(self.viewpoint_stack[i].gt_alpha_mask < 0.5)] = 0
+
+ depth[(alpha < alpha_thres)] = 0
+ if self.aabb is not None:
+ campos = cam['cam_pos'].cpu().numpy()
+ depth_trunc = np.linalg.norm(campos - center, axis=-1) + radius
+
+ # make open3d rgbd
+ rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(
+ o3d.geometry.Image(np.asarray(np.clip(rgb.permute(1,2,0).cpu().numpy(), 0.0, 1.0) * 255, order="C", dtype=np.uint8)),
+ o3d.geometry.Image(np.asarray(depth.permute(1,2,0).cpu().numpy(), order="C")),
+ depth_trunc = depth_trunc,
+ convert_rgb_to_intensity=False,
+ depth_scale = 1.0
+ )
+
+ volume.integrate(rgbd, intrinsic=cam_o3d.intrinsic, extrinsic=cam_o3d.extrinsic)
+
+ mesh = volume.extract_triangle_mesh()
+ return mesh
+
+
+
+ @th.inference_mode()
+ def eval_novelview_loop(self, camera=None, save_latent=False):
+ # novel view synthesis given evaluation camera trajectory
+ if save_latent: # for diffusion learning
+ latent_dir = Path(f'{logger.get_dir()}/latent_dir')
+ latent_dir.mkdir(exist_ok=True, parents=True)
+
+ render_reference=uni_mesh_path(10)
+
+ for eval_idx, micro in enumerate(tqdm(self.eval_data)):
+
+ latent_save_dir = f'{logger.get_dir()}/latent_dir/{micro["ins"][0]}'
+
+ all_latent_file = sorted(Path(latent_save_dir).glob('*.npz') )
+ if len(all_latent_file) == 0:
+ save_prefix = 0
+ else:
+ save_prefix = int(all_latent_file[-1].stem[-1] ) + 1
+
+ Path(latent_save_dir).mkdir(parents=True, exist_ok=True)
+
+ with th.autocast(device_type='cuda',
+ dtype=self.dtype,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ # st() # check whether more c info available
+ latent = self.rec_model(
+ img=micro['img_to_encoder'].to(self.dtype),
+ behaviour='enc_dec_wo_triplane',
+ c=micro['c'],
+ pcd=micro['fps_pcd'], # send in pcd for surface reference.
+ ) # send in input-view C since pixel-aligned gaussians required
+
+ # fine_scale_key = list(pred.keys())[-1]
+ # fine_scale_key = 'gaussians_upsampled_2'
+ fine_scale_key = 'gaussians_upsampled_3'
+ export_mesh = True # for debug
+
+ if True:
+
+ # if eval_idx < 1500 and eval_idx % 3 == 0:
+ if eval_idx < 1500:
+ all_rgbs, all_depths, all_alphas=self.render_gs_video_given_latent(
+ latent,
+ self.rec_model, # compatible with join_model
+ name_prefix=f'{self.step + self.resume_step}_{micro["ins"][0].split("/")[0]}_{eval_idx}',
+ save_img=False,
+ render_reference=render_reference,
+ export_mesh=False)
+
+ if export_mesh:
+ self.export_mesh_from_2dgs(all_rgbs, all_depths, all_alphas, render_reference, latent_save_dir)
+
+ # ! B=2 here
+ np.savez_compressed(f'{latent_save_dir}/latent-{save_prefix}.npz',
+ latent_normalized=latent['latent_normalized'].cpu().numpy(),
+ query_pcd_xyz=latent['query_pcd_xyz'].cpu().numpy()
+ )
+
+ # st()
+ for scale in ['gaussians_upsampled', 'gaussians_base', 'gaussians_upsampled_2', 'gaussians_upsampled_3']:
+ np.save(f'{latent_save_dir}/{scale}.npy', latent[scale].cpu().numpy())
+
+
+ @th.inference_mode()
+ def render_gs_video_given_latent(self,
+ ddpm_latent,
+ rec_model,
+ name_prefix='0',
+ save_img=False,
+ render_reference=None,
+ export_mesh=False):
+
+ all_rgbs, all_depths, all_alphas = [], [], []
+
+ # batch_size, L, C = planes.shape
+
+ # ddpm_latent = { self.latent_name: planes[..., :-3] * self.triplane_scaling_divider, # kl-reg latent
+ # 'query_pcd_xyz': self.pcd_unnormalize_fn(planes[..., -3:]) }
+
+ # ddpm_latent.update(rec_model(latent=ddpm_latent, behaviour='decode_gs_after_vae_no_render'))
+
+
+ # assert render_reference is None
+ # render_reference = self.eval_data # compat
+ # else: # use train_traj
+
+ # for key in ['ins', 'bbox', 'caption']:
+ # if key in render_reference:
+ # render_reference.pop(key)
+
+ # render_reference = [ { k:v[idx:idx+1] for k, v in render_reference.items() } for idx in range(40) ]
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/gs_{name_prefix}.mp4',
+ mode='I',
+ fps=15,
+ codec='libx264')
+
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+ for i, micro_c in enumerate(tqdm(render_reference)):
+ # micro = {
+ # k: v.to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ # for k, v in batch.items()
+ # }
+
+ c = self.post_process.c_to_3dgs_format(micro_c)
+ for k in c.keys(): # to cuda
+ if isinstance(c[k], th.Tensor) and k != 'tanfov':
+ c[k] = c[k].unsqueeze(0).unsqueeze(0).to(dist_util.dev()) # actually, could render 40 views together.
+ c['tanfov'] = th.tensor(c['tanfov']).to(dist_util.dev())
+
+ pred = rec_model(
+ img=None,
+ c=c, # TODO, to dict
+ latent=ddpm_latent, # render gs
+ behaviour='triplane_dec',
+ bg_color=self.gs_bg_color,
+ render_all_scale=True,
+ )
+
+ fine_scale_key = list(pred.keys())[-1]
+
+ all_rgbs.append(einops.rearrange(pred[fine_scale_key]['image'], 'B V ... -> (B V) ...'))
+ all_depths.append(einops.rearrange(pred[fine_scale_key]['depth'], 'B V ... -> (B V) ...'))
+ all_alphas.append(einops.rearrange(pred[fine_scale_key]['alpha'], 'B V ... -> (B V) ...'))
+
+ # st()
+ # fine_scale_key = list(pred.keys())[-1]
+ all_pred_vis = {}
+ for key in pred.keys():
+ pred_scale = pred[key] # only show finest result here
+ for k in pred_scale.keys():
+ pred_scale[k] = einops.rearrange(pred_scale[k], 'B V ... -> (B V) ...') # merge
+
+ pred_vis = self._make_vis_img(pred_scale)
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ all_pred_vis[key] = vis
+
+ # all_pred_vis_concat = np.concatenate([cv2.resize(all_pred_vis[k][0], (384*3, 384)) for k in ['gaussians_base', 'gaussians_upsampled', 'gaussians_upsampled_2']], axis=0)
+ # all_pred_vis_concat = np.concatenate([cv2.resize(all_pred_vis[k][0], (256*3, 256)) for k in ['gaussians_base', 'gaussians_upsampled',]], axis=0)
+ # all_pred_vis_concat = np.concatenate([cv2.resize(all_pred_vis[k][0], (384*3, 384)) for k in all_pred_vis.keys()], axis=0)
+ all_pred_vis_concat = np.concatenate([cv2.resize(all_pred_vis[k][0], (512*3, 512)) for k in all_pred_vis.keys()], axis=0)
+
+ # for j in range(vis.shape[0]):
+ video_out.append_data(all_pred_vis_concat)
+
+ video_out.close()
+
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4')
+
+ del video_out, pred, pred_vis, vis
+ return all_rgbs, all_depths, all_alphas
+
+ @th.no_grad()
+ def _make_vis_img(self, pred):
+
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ pred_depth = pred_depth.cpu()[0].permute(1, 2, 0).numpy()
+ pred_depth = (plt.cm.viridis(pred_depth[..., 0])[..., :3]) * 2 - 1
+ pred_depth = th.from_numpy(pred_depth).to(
+ pred['image_raw'].device).permute(2, 0, 1).unsqueeze(0)
+
+ gen_img = pred['image_raw']
+ rend_normal = pred['rend_normal']
+
+ pred_vis = th.cat(
+ [
+ gen_img,
+ rend_normal,
+ pred_depth,
+ ],
+ dim=-1) # B, 3, H, W
+
+ return pred_vis
+
+
+class TrainLoop3DRecNVPatchSingleForwardMV_NoCrop_adv(TrainLoop3DRecNVPatchSingleForwardMV_NoCrop):
+ def __init__(self, *, rec_model, loss_class, data, eval_data, batch_size, microbatch, lr, ema_rate, log_interval, eval_interval, save_interval, resume_checkpoint, use_fp16=False, fp16_scale_growth=0.001, weight_decay=0, lr_anneal_steps=0, iterations=10001, load_submodule_name='', ignore_resume_opt=False, model_name='rec', use_amp=False, num_frames=4, **kwargs):
+ super().__init__(rec_model=rec_model, loss_class=loss_class, data=data, eval_data=eval_data, batch_size=batch_size, microbatch=microbatch, lr=lr, ema_rate=ema_rate, log_interval=log_interval, eval_interval=eval_interval, save_interval=save_interval, resume_checkpoint=resume_checkpoint, use_fp16=use_fp16, fp16_scale_growth=fp16_scale_growth, weight_decay=weight_decay, lr_anneal_steps=lr_anneal_steps, iterations=iterations, load_submodule_name=load_submodule_name, ignore_resume_opt=ignore_resume_opt, model_name=model_name, use_amp=use_amp, num_frames=num_frames, **kwargs)
+
+ # create discriminator
+ # ! copied from ln3diff tri-plane version
+ disc_params = self.loss_class.get_trainable_parameters()
+
+ self.mp_trainer_disc = MixedPrecisionTrainer(
+ model=self.loss_class.discriminator,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='disc',
+ use_amp=use_amp,
+ model_params=disc_params)
+
+ # st() # check self.lr
+ self.opt_disc = AdamW(
+ self.mp_trainer_disc.master_params,
+ lr=self.lr, # follow sd code base
+ betas=(0, 0.999),
+ eps=1e-8)
+
+ # TODO, is loss cls already in the DDP?
+ if self.use_ddp:
+ self.ddp_disc = DDP(
+ self.loss_class.discriminator,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ self.ddp_disc = self.loss_class.discriminator
+
+ def save(self, mp_trainer=None, model_name='rec'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, mp_trainer.master_params)
+
+ dist.barrier()
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step':
+ self.forward_backward(batch, behaviour='g_step')
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step':
+ self.forward_backward(batch, behaviour='d_step')
+ _ = self.mp_trainer_disc.optimize(self.opt_disc)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self, batch=None):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step')
+
+ if self.step % 1000 == 0:
+ dist_util.synchronize()
+ if self.step % 5000 == 0:
+ th.cuda.empty_cache() # avoid memory leak
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ if dist_util.get_rank() == 0:
+ try:
+ self.eval_loop()
+ except Exception as e:
+ logger.log(e)
+ dist_util.synchronize()
+
+ # if self.step % self.save_interval == 0 and self.step != 0:
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_disc,
+ self.mp_trainer_disc.model_name)
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ logger.log('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step -
+ 1) % self.save_interval != 0 and self.step != 1:
+ self.save()
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ # if (self.step - 1) % self.save_interval != 0 and self.step != 1:
+ if (self.step - 1) % self.save_interval != 0:
+ try:
+ self.save() # save rec
+ self.save(self.mp_trainer_disc, self.mp_trainer_disc.model_name)
+ except Exception as e:
+ logger.log(e)
+
+ # ! load disc
+ def _load_and_sync_parameters(self, submodule_name=''):
+ super()._load_and_sync_parameters(submodule_name)
+ # load disc
+
+ resume_checkpoint = self.resume_checkpoint.replace(
+ 'rec', 'disc') # * default behaviour
+ if os.path.exists(resume_checkpoint):
+ if dist_util.get_rank() == 0:
+ logger.log(
+ f"loading disc model from checkpoint: {resume_checkpoint}..."
+ )
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ model_state_dict = self.loss_class.discriminator.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys():
+ if v.size() == model_state_dict[k].size():
+ model_state_dict[k] = v
+ # model_state_dict[k].copy_(v)
+ else:
+ logger.log('!!!! partially load: ', k, ": ",
+ v.size(), "state_dict: ",
+ model_state_dict[k].size())
+
+ if dist_util.get_world_size() > 1:
+ # dist_util.sync_params(self.model.named_parameters())
+ dist_util.sync_params(
+ self.loss_class.get_trainable_parameters())
+ logger.log('synced disc params')
+
diff --git a/nsr/train_util.py b/nsr/train_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..d03ae75cd51c3e4c85f771740d0c97a1363af4cd
--- /dev/null
+++ b/nsr/train_util.py
@@ -0,0 +1,1980 @@
+import copy
+import matplotlib.pyplot as plt
+import mcubes
+import trimesh
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+
+import traceback
+from nsr.gs import GaussianRenderer
+from nsr.gs_surfel import GaussianRenderer2DGS
+import blobfile as bf
+import imageio
+import numpy as np
+# from sympy import O
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard import SummaryWriter
+from tqdm import tqdm, trange
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+from guided_diffusion.train_util import (calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+
+from .camera_utils import LookAtPoseSampler, FOV_to_intrinsics
+
+# from ..guided_diffusion.train_util import TrainLoop
+
+
+def flip_yaw(pose_matrix):
+ flipped = pose_matrix.clone()
+ flipped[:, 0, 1] *= -1
+ flipped[:, 0, 2] *= -1
+ flipped[:, 1, 0] *= -1
+ flipped[:, 2, 0] *= -1
+ flipped[:, 0, 3] *= -1
+ # st()
+ return flipped
+
+
+# basic reconstruction model
+class TrainLoopBasic:
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ loss_class,
+ # diffusion,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ # schedule_sampler=None,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ compile=False,
+ **kwargs):
+ self.pool_512 = th.nn.AdaptiveAvgPool2d((512, 512))
+ self.pool_256 = th.nn.AdaptiveAvgPool2d((256, 256))
+ self.pool_128 = th.nn.AdaptiveAvgPool2d((128, 128))
+ self.pool_64 = th.nn.AdaptiveAvgPool2d((64, 64))
+ self.rec_model = rec_model
+ self.loss_class = loss_class
+ # self.diffusion = diffusion
+ # self.schedule_sampler = schedule_sampler or UniformSampler(diffusion)
+ self.data = data
+ self.eval_data = eval_data
+ self.batch_size = batch_size
+ self.microbatch = microbatch if microbatch > 0 else batch_size
+ self.lr = lr
+ self.ema_rate = ([ema_rate] if isinstance(ema_rate, float) else
+ [float(x) for x in ema_rate.split(",")])
+ self.log_interval = log_interval
+ self.eval_interval = eval_interval
+ self.save_interval = save_interval
+ self.iterations = iterations
+ self.resume_checkpoint = resume_checkpoint
+ self.use_fp16 = use_fp16
+ self.fp16_scale_growth = fp16_scale_growth
+ self.weight_decay = weight_decay
+ self.lr_anneal_steps = lr_anneal_steps
+
+ self.step = 0
+ self.resume_step = 0
+ # self.global_batch = self.batch_size * dist.get_world_size()
+ self.global_batch = self.batch_size * dist_util.get_world_size()
+
+ self.sync_cuda = th.cuda.is_available()
+
+ # self._load_and_sync_parameters(load_submodule_name)
+ self._load_and_sync_parameters()
+
+ # ! force bf16
+ # https://zhuanlan.zhihu.com/p/671165275
+ self.dtype = th.float32
+
+ if use_amp:
+ if th.cuda.get_device_capability(0)[0] < 8:
+ self.dtype = th.float16 # e.g., v100
+ else:
+ self.dtype = th.bfloat16 # e.g., a100 / a6000
+
+ self.mp_trainer_rec = MixedPrecisionTrainer(
+ model=self.rec_model,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name=model_name,
+ use_amp=use_amp)
+ self.writer = SummaryWriter(log_dir=f'{logger.get_dir()}/runs')
+
+ self.opt = AdamW(self._init_optim_groups(kwargs))
+
+ if dist_util.get_rank() == 0:
+ logger.log(self.opt)
+
+ if self.resume_step:
+ if not ignore_resume_opt:
+ self._load_optimizer_state()
+ else:
+ logger.warn("Ignoring optimizer state from checkpoint.")
+ # Model was resumed, either due to a restart or a checkpoint
+ # being specified at the command line.
+ # self.ema_params = [
+ # self._load_ema_parameters(rate, load_submodule_name) for rate in self.ema_rate
+ # ]
+
+ self.ema_params = [
+ self._load_ema_parameters(
+ rate,
+ self.rec_model,
+ self.mp_trainer_rec,
+ model_name=self.mp_trainer_rec.model_name)
+ for rate in self.ema_rate
+ ]
+ else:
+ self.ema_params = [
+ copy.deepcopy(self.mp_trainer_rec.master_params)
+ for _ in range(len(self.ema_rate))
+ ]
+
+ # compile
+ self.compile = compile
+ if compile:
+ logger.log('compiling... ignore vit_decoder')
+ self.model = th.compile(self.model)
+
+ # # self.rec_model.encoder = th.compile(self.rec_model.encoder)
+ # self.rec_model.decoder.decoder_pred = th.compile(
+ # self.rec_model.decoder.decoder_pred)
+ # # self.rec_model.decoder.triplane_decoder = th.compile(self.rec_model.decoder.triplane_decoder)
+ # for module_k, sub_module in self.rec_model.decoder.superresolution.items(
+ # ):
+ # self.rec_model.decoder.superresolution[module_k] = th.compile(
+ # sub_module)
+
+ if th.cuda.is_available():
+ self.use_ddp = True
+
+ self.rec_model = th.nn.SyncBatchNorm.convert_sync_batchnorm(
+ self.rec_model)
+
+ self.rec_model = DDP(
+ self.rec_model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ if dist_util.get_world_size() > 1:
+ logger.warn("Distributed training requires CUDA. "
+ "Gradients will not be synchronized properly!")
+ self.use_ddp = False
+ self.rec_model = self.rec_model
+
+ self.novel_view_poses = None
+ th.cuda.empty_cache()
+
+ def _init_optim_groups(self, kwargs):
+ raise NotImplementedError('')
+
+ def _load_and_sync_parameters(self, submodule_name=''):
+ # resume_checkpoint, self.resume_step = find_resume_checkpoint() or self.resume_checkpoint
+ resume_checkpoint = self.resume_checkpoint # * default behaviour
+ # logger.log('resume_checkpoint', resume_checkpoint, self.resume_checkpoint)
+
+ if resume_checkpoint:
+ self.resume_step = parse_resume_step_from_filename(
+ resume_checkpoint)
+ if dist_util.get_rank() == 0:
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ if submodule_name != '':
+ model_state_dict = getattr(self.rec_model,
+ submodule_name).state_dict()
+ if dist_util.get_rank() == 0:
+ logger.log('loading submodule: ', submodule_name)
+ else:
+ model_state_dict = self.rec_model.state_dict()
+
+ # model = self.rec_model
+
+ # for k, v in resume_state_dict.items():
+ # if k in model_state_dict.keys() and v.size(
+ # ) == model_state_dict[k].size():
+ # model_state_dict[k] = v
+ # else:
+ # logger.log('!!!! ignore key: ', k, ": ", v.size())
+
+ for k, v in resume_state_dict.items():
+ if '._orig_mod' in k: # prefix in torch.compile
+ k = k.replace('._orig_mod', '')
+ if k in model_state_dict.keys():
+ if v.size() == model_state_dict[k].size():
+ model_state_dict[k] = v
+ # model_state_dict[k].copy_(v)
+ else:
+ # if k == 'encoder.conv_in.weight':
+ if False:
+ model_state_dict[k][:, :v.shape[1]] = v
+ model_state_dict[k][:, v.shape[1]:] = 0
+
+ logger.log('!!!! partially load: ', k, ": ",
+ v.size(), "state_dict: ",
+ model_state_dict[k].size())
+
+ # if v.ndim > 1:
+ # model_state_dict[k][:v.shape[0], :v.shape[1], ...] = v # load the decoder
+ # model_state_dict[k][v.shape[0]:, v.shape[1]:, ...] = 0
+ # else:
+ # model_state_dict[k][:v.shape[0], ...] = v # load the decoder
+ # model_state_dict[k][v.shape[0]:, ...] = 0
+ # logger.log('!!!! size mismatch, partially load: ', k, ": ", v.size(), "state_dict: ", model_state_dict[k].size())
+ else:
+ logger.log('!!!! size mismatch, ignore: ', k,
+ ": ", v.size(), "state_dict: ",
+ model_state_dict[k].size())
+
+ # elif 'decoder.vit_decoder.blocks' in k:
+ # # st()
+ # # load from 2D ViT pre-trained into 3D ViT blocks.
+ # assert len(model.decoder.vit_decoder.blocks[0].vit_blks
+ # ) == 2 # assert depth=2 here.
+ # fusion_ca_depth = len(
+ # model.decoder.vit_decoder.blocks[0].vit_blks)
+ # vit_subblk_index = int(k.split('.')[3])
+ # vit_blk_keyname = ('.').join(k.split('.')[4:])
+ # fusion_blk_index = vit_subblk_index // fusion_ca_depth
+ # fusion_blk_subindex = vit_subblk_index % fusion_ca_depth
+ # model_state_dict[
+ # f'decoder.vit_decoder.blocks.{fusion_blk_index}.vit_blks.{fusion_blk_subindex}.{vit_blk_keyname}'] = v
+ # logger.log('load 2D ViT weight: {}'.format(
+ # f'decoder.vit_decoder.blocks.{fusion_blk_index}.vit_blks.{fusion_blk_subindex}.{vit_blk_keyname}'
+ # ))
+
+ else:
+ logger.log(
+ '!!!! ignore key, not in the model_state_dict: ',
+ k, ": ", v.size())
+
+ logger.log('model loading finished')
+
+ if submodule_name != '':
+ getattr(self.rec_model,
+ submodule_name).load_state_dict(model_state_dict,
+ strict=True)
+ else:
+ self.rec_model.load_state_dict(model_state_dict,
+ strict=False)
+ # strict=True)
+
+ if dist_util.get_world_size() > 1:
+ # dist_util.sync_params(self.model.named_parameters())
+ dist_util.sync_params(self.rec_model.parameters())
+ logger.log('synced params')
+
+ def _load_ema_parameters(self,
+ rate,
+ model=None,
+ mp_trainer=None,
+ model_name='ddpm'):
+
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+ if model is None:
+ model = self.rec_model
+
+ ema_params = copy.deepcopy(mp_trainer.master_params)
+
+ # main_checkpoint, _ = find_resume_checkpoint(
+ # self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ main_checkpoint = self.resume_checkpoint
+ ema_checkpoint = find_ema_checkpoint(main_checkpoint, self.resume_step,
+ rate, model_name)
+ if ema_checkpoint and model_name == 'ddpm':
+
+ if dist_util.get_rank() == 0:
+
+ if not Path(ema_checkpoint).exists():
+ logger.log(
+ f"failed to load EMA from checkpoint: {ema_checkpoint}, not exist"
+ )
+ return
+
+ logger.log(f"loading EMA from checkpoint: {ema_checkpoint}...")
+
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ state_dict = dist_util.load_state_dict(
+ ema_checkpoint, map_location=map_location)
+
+ model_ema_state_dict = model.state_dict()
+
+ for k, v in state_dict.items():
+ if k in model_ema_state_dict.keys() and v.size(
+ ) == model_ema_state_dict[k].size():
+ model_ema_state_dict[k] = v
+
+ elif 'IN' in k and getattr(model, 'decomposed_IN', False):
+ model_ema_state_dict[k.replace(
+ 'IN', 'IN.IN')] = v # decomposed IN
+
+ else:
+ logger.log('ignore key: ', k, ": ", v.size())
+
+ ema_params = mp_trainer.state_dict_to_master_params(
+ model_ema_state_dict)
+
+ del state_dict
+
+ # logger.log('ema mark 3, ', model_name, )
+
+ # ! debugging, remove to check which key fails.
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(ema_params)
+
+ # logger.log('ema mark 4, ', model_name, )
+ # del ema_params
+ return ema_params
+
+ def _load_optimizer_state(self):
+ main_checkpoint, _ = find_resume_checkpoint()
+ if self.resume_checkpoint == '':
+ main_checkpoint, _ = find_resume_checkpoint()
+ else:
+ main_checkpoint = self.resume_checkpoint
+ opt_checkpoint = bf.join(bf.dirname(main_checkpoint),
+ f"opt{self.resume_step:07}.pt")
+ # st()
+ if bf.exists(opt_checkpoint):
+ logger.log(
+ f"loading optimizer state from checkpoint: {opt_checkpoint}")
+
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ try:
+ state_dict = dist_util.load_state_dict(opt_checkpoint,
+ map_location=map_location)
+ self.opt.load_state_dict(state_dict)
+ except Exception as e:
+ logger.log(e)
+
+ # self.opt.load_state_dict({k: v for k, v in state_dict.items() if k in self.opt.state_dict()})
+
+ del state_dict
+ else:
+ logger.log('optimizer state load fail: {}'.format(opt_checkpoint))
+
+ def run_loop(self, batch=None):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ # if isinstance(self.data, list):
+ # if self.step <= self.data[2]:
+ # batch = next(self.data[1])
+ # else:
+ # batch = next(self.data[0])
+ # else:
+
+ # for _ in trange(10000): # io profiling
+ batch = next(self.data) # | 56/10000 [00:13<40:48, 4.06it/s]
+
+ # batch = next(self.data)
+ # ! comment out
+ # if self.novel_view_poses is None:
+ # self.novel_view_poses = th.roll(batch['c'], 1, 0).to(
+ # dist_util.dev()) # save for eval visualization use
+
+ self.run_step(batch)
+
+ if self.step % 1000 == 0:
+ dist_util.synchronize()
+ th.cuda.empty_cache() # avoid memory leak
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ try:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+ except Exception as e: # disk no quota
+ logger.log(e)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ # if self.step % self.eval_interval == 0 and (self.step +
+ # self.resume_step) != 0:
+ # if self.step % self.eval_interval == 0: # ! for debugging
+ # if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ try:
+ self.eval_loop()
+ except Exception as e:
+ logger.log(e)
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ self.save()
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ logger.log('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step -
+ 1) % self.save_interval != 0 and self.step != 1:
+ self.save()
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0 and self.step != 1:
+ self.save()
+
+ @th.no_grad()
+ def eval_loop(self):
+ raise NotImplementedError('')
+
+ def run_step(self, batch, *args):
+ self.forward_backward(batch)
+ took_step = self.mp_trainer_rec.optimize(self.opt)
+ if took_step:
+ self._update_ema()
+ self._anneal_lr()
+ self.log_step()
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # th.cuda.empty_cache()
+ raise NotImplementedError('')
+
+ def _update_ema(self):
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ update_ema(params, self.mp_trainer_rec.master_params, rate=rate)
+
+ def _anneal_lr(self):
+ if not self.lr_anneal_steps:
+ return
+ frac_done = (self.step + self.resume_step) / self.lr_anneal_steps
+ lr = self.lr * (1 - frac_done)
+ for param_group in self.opt.param_groups:
+ param_group["lr"] = lr
+
+ def log_step(self):
+ logger.logkv("step", self.step + self.resume_step)
+ logger.logkv("samples",
+ (self.step + self.resume_step + 1) * self.global_batch)
+
+ def save(self):
+
+ def save_checkpoint(rate, params):
+ state_dict = self.mp_trainer_rec.master_params_to_state_dict(
+ params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {rate}...")
+ if not rate:
+ filename = f"model_rec{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_rec_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ try:
+ save_checkpoint(
+ 0, self.mp_trainer_rec.master_params) # avoid OOM when saving ckpt
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ # ! save optimizer
+ if dist.get_rank() == 0:
+ with bf.BlobFile(
+ bf.join(get_blob_logdir(),
+ f"opt{(self.step+self.resume_step):07d}.pt"),
+ "wb",
+ ) as f:
+ th.save(self.opt.state_dict(), f)
+
+ except Exception as e: # disk quota exceed
+ logger.log(e)
+
+ th.cuda.empty_cache()
+
+ dist.barrier()
+
+
+class TrainLoop3DRec(TrainLoopBasic):
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ loss_class,
+ # diffusion,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ # schedule_sampler=None,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ compile=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ compile=compile,
+ **kwargs)
+
+ # self.rec_model = self.ddp_model
+ # self._prepare_nvs_pose() # for eval novelview visualization
+ th.cuda.empty_cache()
+
+ self.triplane_scaling_divider = 1.0
+ self.latent_name = 'latent_normalized_2Ddiffusion' # normalized triplane latent
+ self.render_latent_behaviour = 'decode_after_vae' # directly render using triplane operations
+
+ @th.inference_mode()
+ def render_video_given_triplane(self,
+ planes,
+ rec_model,
+ name_prefix='0',
+ save_img=False,
+ render_reference=None,
+ save_mesh=False, render_reference_length=40,
+ return_gen_imgs=False):
+
+ planes *= self.triplane_scaling_divider # if setting clip_denoised=True, the sampled planes will lie in [-1,1]. Thus, values beyond [+- std] will be abandoned in this version. Move to IN for later experiments.
+
+ # sr_w_code = getattr(self.ddp_rec_model.module.decoder, 'w_avg', None)
+ # sr_w_code = None
+ batch_size = planes.shape[0]
+
+ # if sr_w_code is not None:
+ # sr_w_code = sr_w_code.reshape(1, 1,
+ # -1).repeat_interleave(batch_size, 0)
+
+ # used during diffusion sampling inference
+ # if not save_img:
+
+ # ! mesh
+
+ if planes.shape[1] == 16: # ffhq/car
+ ddpm_latent = {
+ self.latent_name: planes[:, :12],
+ 'bg_plane': planes[:, 12:16],
+ }
+ else:
+ ddpm_latent = {
+ self.latent_name: planes,
+ }
+
+ ddpm_latent.update(
+ rec_model(latent=ddpm_latent,
+ behaviour='decode_after_vae_no_render'))
+
+ # if export_mesh:
+ # if True:
+ if save_mesh:
+ # mesh_size = 512
+ mesh_size = 192
+ # mesh_size = 384
+ # mesh_size = 320
+ # mesh_thres = 3 # TODO, requires tuning
+ # mesh_thres = 5 # TODO, requires tuning
+ mesh_thres = 10 # TODO, requires tuning
+ dump_path = f'{logger.get_dir()}/mesh/'
+
+ os.makedirs(dump_path, exist_ok=True)
+
+ grid_out = rec_model(
+ latent=ddpm_latent,
+ grid_size=mesh_size,
+ behaviour='triplane_decode_grid',
+ )
+
+ vtx, faces = mcubes.marching_cubes(
+ grid_out['sigma'].squeeze(0).squeeze(-1).cpu().numpy(),
+ mesh_thres)
+ vtx = vtx / (mesh_size - 1) * 2 - 1
+
+ # vtx_tensor = th.tensor(vtx, dtype=th.float32, device=dist_util.dev()).unsqueeze(0)
+ # vtx_colors = self.model.synthesizer.forward_points(planes, vtx_tensor)['rgb'].squeeze(0).cpu().numpy() # (0, 1)
+ # vtx_colors = (vtx_colors * 255).astype(np.uint8)
+
+ # mesh = trimesh.Trimesh(vertices=vtx, faces=faces, vertex_colors=vtx_colors)
+ mesh = trimesh.Trimesh(
+ vertices=vtx,
+ faces=faces,
+ )
+
+ mesh_dump_path = os.path.join(dump_path, f'{name_prefix}.ply')
+ mesh.export(mesh_dump_path, 'ply')
+
+ print(f"Mesh dumped to {dump_path}")
+ del grid_out, mesh
+ th.cuda.empty_cache()
+ # return
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4',
+ mode='I',
+ fps=15,
+ codec='libx264')
+
+ if planes.shape[1] == 16: # ffhq/car
+ ddpm_latent = {
+ self.latent_name: planes[:, :12],
+ 'bg_plane': planes[:, 12:16],
+ }
+ else:
+ ddpm_latent = {
+ self.latent_name: planes,
+ }
+
+ ddpm_latent.update(
+ rec_model(latent=ddpm_latent,
+ behaviour='decode_after_vae_no_render'))
+
+ # planes = planes.repeat_interleave(micro['c'].shape[0], 0)
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ # micro_batchsize = 2
+ # micro_batchsize = batch_size
+
+ if render_reference is None:
+ render_reference = self.eval_data # compat
+ else: # use train_traj
+ for key in ['ins', 'bbox', 'caption']:
+ if key in render_reference:
+ render_reference.pop(key)
+ # render_reference.pop('bbox')
+ # render_reference.pop('caption')
+
+ # compat lst for enumerate
+ render_reference = [{
+ k: v[idx:idx + 1]
+ for k, v in render_reference.items()
+ } for idx in range(render_reference_length)]
+
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+ if return_gen_imgs:
+ gen_imgs = []
+ for i, batch in enumerate(tqdm(render_reference)):
+ micro = {
+ k: v.to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+ # micro = {'c': batch['c'].to(dist_util.dev()).repeat_interleave(batch_size, 0)}
+
+ # all_pred = []
+ pred = rec_model(
+ img=None,
+ c=micro['c'],
+ latent=ddpm_latent,
+ # latent={
+ # # k: v.repeat_interleave(micro['c'].shape[0], 0) if v is not None else None
+ # k: v.repeat_interleave(micro['c'].shape[0], 0) if v is not None else None
+ # for k, v in ddpm_latent.items()
+ # },
+ behaviour='triplane_dec')
+
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ # save viridis_r depth
+ pred_depth = pred_depth.cpu()[0].permute(1, 2, 0).numpy()
+ pred_depth = (plt.cm.viridis(pred_depth[..., 0])[..., :3]) * 2 - 1
+ pred_depth = th.from_numpy(pred_depth).to(
+ pred['image_raw'].device).permute(2, 0, 1).unsqueeze(0)
+ # st()
+ # pred_depth =
+
+ if 'image_sr' in pred:
+
+ gen_img = pred['image_sr']
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), gen_img,
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 128:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']), pred['image_sr'],
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ gen_img = pred['image_raw']
+ if return_gen_imgs:
+ gen_imgs.append(gen_img)
+
+ pred_vis = th.cat(
+ [
+ # self.pool_128(micro['img']),
+ self.pool_128(gen_img),
+ # self.pool_128(pred_depth.repeat_interleave(3, dim=1))
+ self.pool_128(pred_depth)
+ ],
+ dim=-1) # B, 3, H, W
+
+ if save_img:
+ for batch_idx in range(gen_img.shape[0]):
+ sampled_img = Image.fromarray(
+ (gen_img[batch_idx].permute(1, 2, 0).cpu().numpy() *
+ 127.5 + 127.5).clip(0, 255).astype(np.uint8))
+ if sampled_img.size != (512, 512):
+ sampled_img = sampled_img.resize(
+ (128, 128), Image.HAMMING) # for shapenet
+ sampled_img.save(logger.get_dir() +
+ '/FID_Cals/{}_{}.png'.format(
+ int(name_prefix) * batch_size +
+ batch_idx, i))
+ # print('FID_Cals/{}_{}.png'.format(int(name_prefix)*batch_size+batch_idx, i))
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # if vis.shape[0] > 1:
+ # vis = np.concatenate(np.split(vis, vis.shape[0], axis=0),
+ # axis=-3)
+
+ # if not save_img:
+ for j in range(vis.shape[0]
+ ): # ! currently only export one plane at a time
+ video_out.append_data(vis[j])
+
+ # if not save_img:
+ video_out.close()
+ del video_out
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4')
+
+ if return_gen_imgs:
+ return th.cat(gen_imgs)
+
+ del vis, pred_vis, micro, pred,
+
+ def _init_optim_groups(self, kwargs):
+ optim_groups = []
+ if kwargs.get('decomposed', False): # AE
+ if not kwargs.get('ignore_encoder'):
+ optim_groups += [
+ # vit encoder
+ {
+ 'name': 'encoder',
+ 'params': self.mp_trainer_rec.model.encoder.parameters(),
+ 'lr': kwargs['encoder_lr'],
+ 'weight_decay': kwargs['encoder_weight_decay']
+ },
+ ]
+
+ optim_groups += [
+
+ # vit decoder backbone
+ {
+ 'name':
+ 'decoder.vit_decoder',
+ 'params':
+ self.mp_trainer_rec.model.decoder.vit_decoder.parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ },
+ ]
+
+ # gs rendering no MLP
+ if not (
+ isinstance(
+ self.mp_trainer_rec.model.decoder.triplane_decoder,
+ GaussianRenderer) or
+ isinstance(
+ self.mp_trainer_rec.model.decoder.triplane_decoder,
+ GaussianRenderer2DGS)):
+
+
+ optim_groups.append(
+ # triplane decoder, may include bg synthesis network
+ {
+ 'name':
+ 'decoder.triplane_decoder',
+ 'params':
+ self.mp_trainer_rec.model.decoder.triplane_decoder.
+ parameters(),
+ 'lr':
+ kwargs['triplane_decoder_lr'],
+ # 'weight_decay': self.weight_decay
+ }, )
+
+ if self.mp_trainer_rec.model.decoder.superresolution is not None:
+ optim_groups.append({
+ 'name':
+ 'decoder.superresolution',
+ 'params':
+ self.mp_trainer_rec.model.decoder.superresolution.
+ parameters(),
+ 'lr':
+ kwargs['super_resolution_lr'],
+ })
+
+ if self.mp_trainer_rec.model.dim_up_mlp is not None:
+ optim_groups.append({
+ 'name':
+ 'dim_up_mlp',
+ 'params':
+ self.mp_trainer_rec.model.dim_up_mlp.parameters(),
+ 'lr':
+ kwargs['encoder_lr'],
+ # 'weight_decay':
+ # self.weight_decay
+ })
+
+ # add 3D aware operators
+ if self.mp_trainer_rec.model.decoder.decoder_pred_3d is not None:
+ optim_groups.append({
+ 'name':
+ 'decoder_pred_3d',
+ 'params':
+ self.mp_trainer_rec.model.decoder.decoder_pred_3d.
+ parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ })
+
+ if self.mp_trainer_rec.model.decoder.transformer_3D_blk is not None:
+ optim_groups.append({
+ 'name':
+ 'decoder_transformer_3D_blk',
+ 'params':
+ self.mp_trainer_rec.model.decoder.transformer_3D_blk.
+ parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ })
+
+ if self.mp_trainer_rec.model.decoder.logvar is not None:
+ optim_groups.append({
+ 'name':
+ 'decoder_logvar',
+ 'params':
+ self.mp_trainer_rec.model.decoder.logvar,
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ })
+
+ if self.mp_trainer_rec.model.decoder.decoder_pred is not None:
+ optim_groups.append(
+ # MLP triplane SR
+ {
+ 'name':
+ 'decoder.decoder_pred',
+ 'params':
+ self.mp_trainer_rec.model.decoder.decoder_pred.
+ parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ # 'weight_decay': 0
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ }, )
+
+ if self.mp_trainer_rec.model.confnet is not None:
+ optim_groups.append({
+ 'name':
+ 'confnet',
+ 'params':
+ self.mp_trainer_rec.model.confnet.parameters(),
+ 'lr':
+ 1e-5, # as in unsup3d
+ })
+
+ # self.opt = AdamW(optim_groups)
+
+ if dist_util.get_rank() == 0:
+ logger.log('using independent optimizer for each components')
+ else:
+ optim_groups = [
+ dict(name='mp_trainer.master_params',
+ params=self.mp_trainer_rec.master_params,
+ lr=self.lr,
+ weight_decay=self.weight_decay)
+ ]
+
+ logger.log(optim_groups)
+
+ return optim_groups
+
+ @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ def eval_novelview_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ all_loss_dict = []
+ novel_view_micro = {}
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i == 0:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ if isinstance(v, th.Tensor) else v[0:1]
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ pred = self.rec_model(img=novel_view_micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+ # targe
+
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ # pred_vis = th.cat([
+ # self.pool_64(micro['img']), pred['image_raw'],
+ # pred_depth.repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1) # B, 3, H, W
+
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ self.step + self.resume_step)
+ del video_out
+ # del pred_vis
+ # del pred
+
+ th.cuda.empty_cache()
+
+ # @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ @th.inference_mode()
+ def eval_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+ all_loss_dict = []
+ self.rec_model.eval()
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {
+ k: v.to(dist_util.dev()) if isinstance(v, th.Tensor) else v
+ for k, v in batch.items()
+ }
+
+ pred = self.rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+
+ # if last_batch or not self.use_ddp:
+ # loss, loss_dict = self.loss_class(pred, target)
+ # else:
+ # with self.ddp_model.no_sync(): # type: ignore
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+ # gt_vis = th.cat([micro['img'], micro['img']], dim=-1) # TODO, fail to load depth. range [0, 1]
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(1,2,0).cpu().numpy() # ! pred in range[-1, 1]
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores.json'), 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/Rec/{k}', v,
+ self.step + self.resume_step)
+
+ th.cuda.empty_cache()
+ # if 'SuperresolutionHybrid8X' in self.rendering_kwargs: # ffhq/afhq
+ # self.eval_novelview_loop_trajectory()
+ # else:
+ self.eval_novelview_loop()
+ self.rec_model.train()
+
+ @th.inference_mode()
+ def eval_novelview_loop_trajectory(self):
+ # novel view synthesis given evaluation camera trajectory
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}_batch_{i}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ for idx, c in enumerate(self.all_nvs_params):
+ pred = self.rec_model(img=micro['img_to_encoder'],
+ c=c.unsqueeze(0).repeat_interleave(
+ micro['img'].shape[0],
+ 0)) # pred: (B, 3, 64, 64)
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ else:
+
+ # st()
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ # ! cooncat h dim
+ pred_vis = pred_vis.permute(0, 2, 3, 1).flatten(0,
+ 1) # H W 3
+
+ # vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ # vis = pred_vis.permute(1,2,0).cpu().numpy()
+ vis = pred_vis.cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # for j in range(vis.shape[0]):
+ # video_out.append_data(vis[j])
+ video_out.append_data(vis)
+
+ video_out.close()
+
+ th.cuda.empty_cache()
+
+ def _prepare_nvs_pose(self):
+
+ device = dist_util.dev()
+
+ fov_deg = 18.837 # for ffhq/afhq
+ intrinsics = FOV_to_intrinsics(fov_deg, device=device)
+
+ all_nvs_params = []
+
+ pitch_range = 0.25
+ yaw_range = 0.35
+ num_keyframes = 10 # how many nv poses to sample from
+ w_frames = 1
+
+ cam_pivot = th.Tensor(
+ self.rendering_kwargs.get('avg_camera_pivot')).to(device)
+ cam_radius = self.rendering_kwargs.get('avg_camera_radius')
+
+ for frame_idx in range(num_keyframes):
+
+ cam2world_pose = LookAtPoseSampler.sample(
+ 3.14 / 2 + yaw_range * np.sin(2 * 3.14 * frame_idx /
+ (num_keyframes * w_frames)),
+ 3.14 / 2 - 0.05 +
+ pitch_range * np.cos(2 * 3.14 * frame_idx /
+ (num_keyframes * w_frames)),
+ cam_pivot,
+ radius=cam_radius,
+ device=device)
+
+ camera_params = th.cat(
+ [cam2world_pose.reshape(-1, 16),
+ intrinsics.reshape(-1, 9)], 1)
+
+ all_nvs_params.append(camera_params)
+
+ self.all_nvs_params = th.cat(all_nvs_params, 0)
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # th.cuda.empty_cache()
+ self.mp_trainer_rec.zero_grad()
+ batch_size = batch['img_to_encoder'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # wrap forward within amp
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ target = micro
+
+ # ! only enable in ffhq dataset
+ conf_sigma_percl = None
+ conf_sigma_percl_flip = None
+ if 'conf_sigma' in pred:
+ # all_conf_sigma_l1, all_conf_sigma_percl = pred['conf_sigma']
+ # all_conf_sigma_l1 = pred['conf_sigma']
+ all_conf_sigma_l1 = th.nn.functional.interpolate(
+ pred['conf_sigma'],
+ size=pred['image_raw'].shape[-2:],
+ mode='bilinear'
+ ) # dynamically resize to target img size
+ conf_sigma_l1 = all_conf_sigma_l1[:, :1]
+ conf_sigma_l1_flip = all_conf_sigma_l1[:, 1:]
+ # conf_sigma_percl = all_conf_sigma_percl[:,:1]
+ # conf_sigma_percl_flip = all_conf_sigma_percl[:,1:]
+ else:
+ conf_sigma = None
+ conf_sigma_l1 = None
+ conf_sigma_l1_flip = None
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict, fg_mask = self.loss_class(
+ pred,
+ target,
+ step=self.step + self.resume_step,
+ test_mode=False,
+ return_fg_mask=True,
+ conf_sigma_l1=conf_sigma_l1,
+ conf_sigma_percl=conf_sigma_percl)
+
+ if self.loss_class.opt.symmetry_loss:
+ loss_dict['conf_sigma_log'] = conf_sigma_l1.log()
+ pose, intrinsics = micro['c'][:, :16].reshape(
+ -1, 4, 4), micro['c'][:, 16:]
+ flipped_pose = flip_yaw(pose)
+ mirror_c = th.cat(
+ [flipped_pose.reshape(-1, 16), intrinsics], -1)
+
+ nvs_pred = self.rec_model(latent={
+ k: v
+ for k, v in pred.items() if 'latent' in k
+ },
+ c=mirror_c,
+ behaviour='triplane_dec',
+ return_raw_only=True)
+
+ # concat data for supervision
+ nvs_gt = {
+ k: th.flip(target[k], [-1])
+ for k in
+ ['img'] # fliplr leads to wrong color; B 3 H W shape
+ }
+ flipped_fg_mask = th.flip(fg_mask, [-1])
+
+ # if 'conf_sigma' in pred:
+ # conf_sigma = th.flip(pred['conf_sigma'], [-1])
+ # conf_sigma = th.nn.AdaptiveAvgPool2d(fg_mask.shape[-2:])(conf_sigma) # dynamically resize to target img size
+ # else:
+ # conf_sigma=None
+
+ with self.rec_model.no_sync(): # type: ignore
+ loss_symm, loss_dict_symm = self.loss_class.calc_2d_rec_loss(
+ nvs_pred['image_raw'],
+ nvs_gt['img'],
+ flipped_fg_mask,
+ # test_mode=True,
+ test_mode=False,
+ step=self.step + self.resume_step,
+ # conf_sigma=conf_sigma,
+ conf_sigma_l1=conf_sigma_l1_flip,
+ conf_sigma_percl=conf_sigma_percl_flip)
+ # )
+ loss += (loss_symm * 1.0) # as in unsup3d
+ # loss += (loss_symm * 0.5) # as in unsup3d
+ # loss += (loss_symm * 0.01) # as in unsup3d
+ # if conf_sigma is not None:
+ # loss += th.nn.functional.mse_loss(conf_sigma, flipped_fg_mask) * 0.001 # a log that regularizes all confidence to 1
+ for k, v in loss_dict_symm.items():
+ loss_dict[f'{k}_symm'] = v
+ loss_dict[
+ 'flip_conf_sigma_log'] = conf_sigma_l1_flip.log()
+
+ # ! add density-reg in eg3d, tv-loss
+
+ if self.loss_class.opt.density_reg > 0 and self.step % self.loss_class.opt.density_reg_every == 0:
+
+ initial_coordinates = th.rand(
+ (batch_size, 1000, 3),
+ device=dist_util.dev()) * 2 - 1 # [-1, 1]
+ perturbed_coordinates = initial_coordinates + th.randn_like(
+ initial_coordinates
+ ) * self.loss_class.opt.density_reg_p_dist
+ all_coordinates = th.cat(
+ [initial_coordinates, perturbed_coordinates], dim=1)
+
+ sigma = self.rec_model(
+ latent=pred['latent'],
+ coordinates=all_coordinates,
+ directions=th.randn_like(all_coordinates),
+ behaviour='triplane_renderer',
+ )['sigma']
+
+ sigma_initial = sigma[:, :sigma.shape[1] // 2]
+ sigma_perturbed = sigma[:, sigma.shape[1] // 2:]
+
+ TVloss = th.nn.functional.l1_loss(
+ sigma_initial,
+ sigma_perturbed) * self.loss_class.opt.density_reg
+
+ loss_dict.update(dict(tv_loss=TVloss))
+ loss += TVloss
+
+ self.mp_trainer_rec.backward(loss)
+ log_rec3d_loss_dict(loss_dict)
+
+ # for name, p in self.rec_model.named_parameters():
+ # if p.grad is None:
+ # logger.log(f"found rec unused param: {name}")
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ def norm_depth(pred_depth): # to [-1,1]
+ # pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ return -(pred_depth * 2 - 1)
+
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # infer novel view also
+ if self.loss_class.opt.symmetry_loss:
+ pred_nv_img = nvs_pred
+ else:
+ pred_nv_img = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=self.novel_view_poses) # pred: (B, 3, 64, 64)
+
+ # if 'depth' in micro:
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = norm_depth(gt_depth)
+ # gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ # gt_depth.min())
+ # if True:
+ fg_mask = pred['image_mask'] * 2 - 1 # 0-1
+ nv_fg_mask = pred_nv_img['image_mask'] * 2 - 1 # 0-1
+ if 'image_depth' in pred:
+ pred_depth = norm_depth(pred['image_depth'])
+ pred_nv_depth = norm_depth(pred_nv_img['image_depth'])
+ else:
+ pred_depth = th.zeros_like(gt_depth)
+ pred_nv_depth = th.zeros_like(gt_depth)
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+ else:
+ gt_img = self.pool_128(gt_img)
+ gt_depth = self.pool_128(gt_depth)
+
+ pred_vis = th.cat([
+ pred_img,
+ pred_depth.repeat_interleave(3, dim=1),
+ fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ if 'conf_sigma' in pred:
+ conf_sigma_l1 = (1 / (conf_sigma_l1 + 1e-7)
+ ).repeat_interleave(3, dim=1) * 2 - 1
+ pred_vis = th.cat([
+ pred_vis,
+ conf_sigma_l1,
+ ], dim=-1) # B, 3, H, W
+
+ pred_vis_nv = th.cat([
+ pred_nv_img['image_raw'],
+ pred_nv_depth.repeat_interleave(3, dim=1),
+ nv_fg_mask.repeat_interleave(3, dim=1),
+ ],
+ dim=-1) # B, 3, H, W
+
+ if 'conf_sigma' in pred:
+ # conf_sigma_for_vis = (1/conf_sigma).repeat_interleave(3, dim=1)
+ # conf_sigma_for_vis = (conf_sigma_for_vis / conf_sigma_for_vis.max() ) * 2 - 1 # normalize to [-1,1]
+ conf_sigma_for_vis_flip = (
+ 1 / (conf_sigma_l1_flip + 1e-7)).repeat_interleave(
+ 3, dim=1) * 2 - 1
+ pred_vis_nv = th.cat(
+ [
+ pred_vis_nv,
+ conf_sigma_for_vis_flip,
+ # th.cat([conf_sigma_for_vis, flipped_fg_mask*2-1], -1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ pred_vis = th.cat([pred_vis, pred_vis_nv],
+ dim=-2) # cat in H dim
+
+ gt_vis = th.cat(
+ [
+ gt_img,
+ gt_depth.repeat_interleave(3, dim=1),
+ th.zeros_like(gt_img)
+ ],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ if 'conf_sigma' in pred:
+ gt_vis = th.cat([gt_vis, fg_mask],
+ dim=-1) # placeholder
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # st()
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+ # .permute(
+ # 0, 2, 3, 1).cpu()
+ vis_tensor = torchvision.utils.make_grid(
+ vis, nrow=vis.shape[-1] // 64) # HWC
+ torchvision.utils.save_image(
+ vis_tensor,
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg',
+ value_range=(-1, 1),
+ normalize=True)
+ # vis = vis.numpy() * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+
+ # Image.fromarray(vis).save(
+ # f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}.jpg')
+
+ # self.writer.add_image(f'images',
+ # vis,
+ # self.step + self.resume_step,
+ # dataformats='HWC')
+ return pred
+
+
+class TrainLoop3DTriplaneRec(TrainLoop3DRec):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ compile=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ compile=compile,
+ **kwargs)
+
+ @th.inference_mode()
+ def eval_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+ all_loss_dict = []
+ self.rec_model.eval()
+
+ device = dist_util.dev()
+
+ # to get intrinsics
+ demo_pose = next(self.data)
+ intrinsics = demo_pose['c'][0][16:25].to(device)
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=24,
+ bitrate='10M',
+ codec='libx264')
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+
+ cam_pivot = th.tensor([0, 0, 0], device=dist_util.dev())
+ cam_radius = 1.8
+
+ pitch_range = 0.45
+ yaw_range = 3.14 # 0.35
+ frames = 72
+
+ # TODO, use PanoHead trajectory
+ # for frame_idx in range(frames):
+
+ for pose_idx, (angle_y, angle_p) in enumerate(
+ # zip(np.linspace(-0.4, 0.4, 72), [-0.2] * 72)):
+ # zip(np.linspace(-1.57, 1.57, 72), [-1.57] * 72)):
+ # zip(np.linspace(0,3.14, 72), [0] * 72)): # check canonical pose
+ zip([0.2] * 72, np.linspace(-3.14, 3.14, 72))):
+
+ # cam2world_pose = LookAtPoseSampler.sample(3.14/2 + yaw_range * np.cos(2 * 3.14 * frame_idx / (frames)),
+ # 3.14/2 -0.05 + pitch_range * np.sin(2 * 3.14 * frame_idx / (frames)),
+ # cam_pivot,
+ # radius=cam_radius, device=device)
+
+ cam2world_pose = LookAtPoseSampler.sample(
+ np.pi / 2 + angle_y,
+ np.pi / 2 + angle_p,
+ # angle_p,
+ cam_pivot,
+ # horizontal_stddev=0.1, # 0.25
+ # vertical_stddev=0.125, # 0.35,
+ radius=cam_radius,
+ device=device)
+
+ camera_params = th.cat(
+ [cam2world_pose.reshape(-1, 16),
+ intrinsics.reshape(-1, 9)], 1).to(dist_util.dev())
+
+ # micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+ micro = {'c': camera_params}
+
+ pred = self.rec_model(c=micro['c'])
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ pred_vis = th.cat([
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ self.rec_model.train()
+
+
+class TrainLoop3DRecTrajVis(TrainLoop3DRec):
+
+ def __init__(self,
+ *,
+ rec_model,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ model_name='rec',
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ **kwargs)
+ self.rendering_kwargs = self.rec_model.module.decoder.triplane_decoder.rendering_kwargs # type: ignore
+ self._prepare_nvs_pose() # for eval novelview visualization
+
+ @th.inference_mode()
+ def eval_novelview_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}_batch_{i}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ for idx, c in enumerate(self.all_nvs_params):
+ pred = self.rec_model(img=micro['img_to_encoder'],
+ c=c.unsqueeze(0).repeat_interleave(
+ micro['img'].shape[0],
+ 0)) # pred: (B, 3, 64, 64)
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3,
+ dim=1)
+ ],
+ dim=-1)
+
+ else:
+
+ # st()
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ # ! cooncat h dim
+ pred_vis = pred_vis.permute(0, 2, 3, 1).flatten(0,
+ 1) # H W 3
+
+ # vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ # vis = pred_vis.permute(1,2,0).cpu().numpy()
+ vis = pred_vis.cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # for j in range(vis.shape[0]):
+ # video_out.append_data(vis[j])
+ video_out.append_data(vis)
+
+ video_out.close()
+
+ th.cuda.empty_cache()
+
+ def _prepare_nvs_pose(self):
+
+ device = dist_util.dev()
+
+ fov_deg = 18.837 # for ffhq/afhq
+ intrinsics = FOV_to_intrinsics(fov_deg, device=device)
+
+ all_nvs_params = []
+
+ pitch_range = 0.25
+ yaw_range = 0.35
+ num_keyframes = 10 # how many nv poses to sample from
+ w_frames = 1
+
+ cam_pivot = th.Tensor(
+ self.rendering_kwargs.get('avg_camera_pivot')).to(device)
+ cam_radius = self.rendering_kwargs.get('avg_camera_radius')
+
+ for frame_idx in range(num_keyframes):
+
+ cam2world_pose = LookAtPoseSampler.sample(
+ 3.14 / 2 + yaw_range * np.sin(2 * 3.14 * frame_idx /
+ (num_keyframes * w_frames)),
+ 3.14 / 2 - 0.05 +
+ pitch_range * np.cos(2 * 3.14 * frame_idx /
+ (num_keyframes * w_frames)),
+ cam_pivot,
+ radius=cam_radius,
+ device=device)
+
+ camera_params = th.cat(
+ [cam2world_pose.reshape(-1, 16),
+ intrinsics.reshape(-1, 9)], 1)
+
+ all_nvs_params.append(camera_params)
+
+ self.all_nvs_params = th.cat(all_nvs_params, 0)
diff --git a/nsr/train_util_DiT.py b/nsr/train_util_DiT.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbf5cf53bf44c1e6c2047c2e22d594e0bf9687a6
--- /dev/null
+++ b/nsr/train_util_DiT.py
@@ -0,0 +1,1040 @@
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+
+# from ..guided_diffusion.train_util import TrainLoop
+
+# use_amp=True
+use_amp = False
+if use_amp:
+ logger.log('DiT using AMP')
+
+from .train_util_diffusion import TrainLoop3DDiffusion
+import dnnlib
+
+
+class TrainLoop3DDiffusionDiT(TrainLoop3DDiffusion):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ **kwargs)
+
+ def eval_ddpm_sample(self):
+
+ args = dnnlib.EasyDict(
+ dict(batch_size=1,
+ image_size=224,
+ denoise_in_channels=24,
+ clip_denoised=True,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ for i in range(1):
+ triplane_sample = sample_fn(
+ self.ddp_model,
+ (args.batch_size, args.denoise_in_channels, args.image_size,
+ args.image_size), #
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ ) # B 8 H W*3
+
+ # print(triplane_sample.shape)
+
+ # B, C, H, W = triplane_sample.shape
+ # triplane_sample = triplane_sample.reshape(B, C, H, W//3, 3).permute(0,1,4,2,3) # c*3 order
+ # triplane_sample.reshape(B, -1, H, W//3) # B 24 H W
+
+ self.render_video_given_triplane(
+ triplane_sample,
+ name_prefix=f'{self.step + self.resume_step}_{i}')
+
+
+class TrainLoop3DDiffusionDiTOverfit(TrainLoop):
+
+ def __init__(
+ self,
+ *,
+ # model,
+ rec_model,
+ denoise_model,
+ diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ **kwargs):
+
+ super().__init__(model=denoise_model,
+ diffusion=diffusion,
+ data=data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ lr_anneal_steps=lr_anneal_steps,
+ weight_decay=weight_decay,
+ use_amp=use_amp)
+
+ # self.accelerator = Accelerator()
+
+ self.pool_512 = th.nn.AdaptiveAvgPool2d((512, 512))
+ self.pool_128 = th.nn.AdaptiveAvgPool2d((128, 128))
+ self.loss_class = loss_class
+ self.rec_model = rec_model
+ self.eval_interval = eval_interval
+ self.eval_data = eval_data
+ self.iterations = iterations
+ # self.triplane_std = 10
+ self.triplane_scaling_divider = triplane_scaling_divider
+
+ self._load_and_sync_parameters(model=self.rec_model, model_name='rec')
+
+ # * for loading EMA
+ self.mp_trainer_rec = MixedPrecisionTrainer(
+ model=self.rec_model,
+ use_fp16=self.use_fp16,
+ use_amp=use_amp,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='rec',
+ )
+ self.denoised_ae = denoised_ae
+ if not freeze_ae:
+ self.opt_rec = AdamW(self._init_optim_groups(kwargs))
+ else:
+ print('!! freezing AE !!')
+
+ if dist_util.get_rank() == 0:
+ self.writer = SummaryWriter(log_dir=f'{logger.get_dir()}/runs')
+ print(self.opt)
+ if not freeze_ae:
+ print(self.opt_rec)
+
+ # if not freeze_ae:
+ if self.resume_step:
+ if not ignore_resume_opt:
+ self._load_optimizer_state()
+ else:
+ logger.warn("Ignoring optimizer state from checkpoint.")
+ # Model was resumed, either due to a restart or a checkpoint
+ # being specified at the command line.
+ # if not freeze_ae:
+ # self.ema_params_rec = [
+ # self._load_ema_parameters(
+ # rate,
+ # self.rec_model,
+ # self.mp_trainer_rec,
+ # model_name=self.mp_trainer_rec.model_name)
+ # for rate in self.ema_rate
+ # ]
+ # else:
+ self.ema_params_rec = [
+ self._load_ema_parameters(
+ rate,
+ self.rec_model,
+ self.mp_trainer_rec,
+ model_name=self.mp_trainer_rec.model_name)
+ for rate in self.ema_rate
+ ]
+ else:
+ if not freeze_ae:
+ self.ema_params_rec = [
+ copy.deepcopy(self.mp_trainer_rec.master_params)
+ for _ in range(len(self.ema_rate))
+ ]
+
+ if self.use_ddp is True:
+ self.rec_model = th.nn.SyncBatchNorm.convert_sync_batchnorm(
+ self.rec_model)
+ self.ddp_rec_model = DDP(
+ self.rec_model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ # find_unused_parameters=True,
+ )
+ else:
+ self.ddp_rec_model = self.rec_model
+
+ if freeze_ae:
+ self.ddp_rec_model.eval()
+ self.ddp_rec_model.requires_grad_(False)
+ self.freeze_ae = freeze_ae
+
+ # if use_amp:
+
+ def _init_optim_groups(self, kwargs):
+ optim_groups = [
+ # vit encoder
+ {
+ 'name': 'vit_encoder',
+ 'params': self.mp_trainer_rec.model.encoder.parameters(),
+ 'lr': kwargs['encoder_lr'],
+ 'weight_decay': kwargs['encoder_weight_decay']
+ },
+ # vit decoder
+ {
+ 'name':
+ 'vit_decoder',
+ 'params':
+ self.mp_trainer_rec.model.decoder.vit_decoder.parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ },
+ {
+ 'name':
+ 'vit_decoder_pred',
+ 'params':
+ self.mp_trainer_rec.model.decoder.decoder_pred.parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ # 'weight_decay': 0
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ },
+
+ # triplane decoder
+ {
+ 'name':
+ 'triplane_decoder',
+ 'params':
+ self.mp_trainer_rec.model.decoder.triplane_decoder.parameters(
+ ),
+ 'lr':
+ kwargs['triplane_decoder_lr'],
+ # 'weight_decay': self.weight_decay
+ },
+ ]
+
+ if self.mp_trainer_rec.model.decoder.superresolution is not None:
+ optim_groups.append({
+ 'name':
+ 'triplane_decoder_superresolution',
+ 'params':
+ self.mp_trainer_rec.model.decoder.superresolution.parameters(),
+ 'lr':
+ kwargs['super_resolution_lr'],
+ })
+
+ return optim_groups
+
+ def run_loop(self, batch=None):
+ th.cuda.empty_cache()
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ batch = next(self.data)
+ self.run_step(batch)
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_ddpm_sample()
+ # continue # TODO, diffusion inference
+ # self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+ th.cuda.empty_cache()
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+ dist_util.synchronize()
+
+ th.cuda.empty_cache()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+
+ def run_step(self, batch, cond=None):
+ self.forward_backward(batch,
+ cond) # type: ignore # * 3D Reconstruction step
+ took_step_ddpm = self.mp_trainer.optimize(self.opt)
+ if took_step_ddpm:
+ self._update_ema()
+
+ if not self.freeze_ae:
+ took_step_rec = self.mp_trainer_rec.optimize(self.opt_rec)
+ if took_step_rec:
+ self._update_ema_rec()
+
+ self._anneal_lr()
+ self.log_step()
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # return super().forward_backward(batch, *args, **kwargs)
+ self.mp_trainer.zero_grad()
+ # all_denoised_out = dict()
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # if not freeze_ae:
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp
+ and not self.freeze_ae):
+ # with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=False,): # ! debugging, no AMP on all the input
+
+ # pred = self.ddp_rec_model(img=micro['img_to_encoder'],
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+ # if not self.freeze_ae:
+ # target = micro
+
+ # if last_batch or not self.use_ddp:
+ # ae_loss, loss_dict = self.loss_class(pred,
+ # target,
+ # test_mode=False)
+ # else:
+ # with self.ddp_model.no_sync(): # type: ignore
+ # ae_loss, loss_dict = self.loss_class(
+ # pred, target, test_mode=False)
+
+ # log_rec3d_loss_dict(loss_dict)
+ # else:
+ # ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # micro_to_denoise = micro['img']
+ # micro_to_denoise = micro['img'].repeat_interleave(
+ # 8, dim=1) # B 3*8 H W
+ micro_to_denoise = micro['img'].repeat_interleave(2, dim=1) # B 3*8 H W
+ # micro_to_denoise = micro['img'].repeat_interleave(1, dim=1) # B 3*8 H W
+
+ # micro_to_denoise = pred[
+ # 'latent'] / self.triplane_scaling_divider # normalize std to 1
+
+ t, weights = self.schedule_sampler.sample(
+ micro_to_denoise.shape[0], dist_util.dev())
+
+ # print('!!!', micro_to_denoise.dtype)
+ # =================================== denoised part ===================================
+
+ model_kwargs = {}
+
+ # print(micro_to_denoise.min(), micro_to_denoise.max())
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ micro_to_denoise, # x_start
+ t,
+ model_kwargs=model_kwargs,
+ )
+
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ # denoised_out = denoised_fn()
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ losses = compute_losses()
+ # denoised_out = denoised_fn()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ denoise_loss = (losses["loss"] * weights).mean()
+
+ x_t = losses['x_t']
+ losses.pop('x_t')
+
+ log_loss_dict(self.diffusion, t,
+ {k: v * weights
+ for k, v in losses.items()})
+
+ loss = denoise_loss # ! leave only denoise_loss for debugging
+
+ # exit AMP before backward
+ self.mp_trainer.backward(loss)
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_img = pred['image_raw']
+ # gt_img = micro['img']
+
+ # if 'image_sr' in pred: # TODO
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ # gt_vis = th.cat(
+ # [
+ # # gt_img,
+ # micro['img'],
+ # # gt_depth.repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ # if not self.denoised_ae:
+ # # continue
+
+ # denoised_ae_pred = self.ddp_rec_model(
+ # img=None,
+ # c=micro['c'][0:1],
+ # latent=denoised_out['pred_xstart'][0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically
+ # behaviour='triplane_dec')
+
+ # assert denoised_ae_pred is not None
+
+ # print(pred_img.shape)
+ # print('denoised_ae:', self.denoised_ae)
+
+ # pred_vis = th.cat([
+ # pred_img[0:1], denoised_ae_pred['image_raw'],
+ # pred_depth[0:1].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # x_t = self.diffusion.q_sample(
+ # x_start, t, noise=noise
+ # ) # * add noise according to predefined schedule
+
+ denoised_fn = functools.partial(
+ self.diffusion.p_mean_variance,
+ self.ddp_model,
+ x_t, # x_start
+ t,
+ model_kwargs=model_kwargs)
+
+ denoised_out = denoised_fn()
+
+ vis = th.cat([
+ micro['img'], x_t[:, :3, ...],
+ denoised_out['pred_xstart'][:, :3, ...]
+ ],
+ dim=-1)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}.jpg'
+ )
+
+ th.cuda.empty_cache()
+
+ @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ def eval_novelview_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ all_loss_dict = []
+ novel_view_micro = {}
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i == 0:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ pred = self.model(img=novel_view_micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+ # targe
+
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ if 'image_sr' in pred:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ self.step + self.resume_step)
+
+ @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ def eval_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+ all_loss_dict = []
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ # pred = self.model(img=micro['img_to_encoder'],
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # pred of rec model
+ pred = self.ddp_rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores.json'), 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/Rec/{k}', v,
+ self.step + self.resume_step)
+
+ self.eval_novelview_loop()
+
+ def save(self, mp_trainer=None, model_name='ddpm'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, self.mp_trainer.master_params)
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ dist.barrier()
+
+ def _load_and_sync_parameters(self, model=None, model_name='ddpm'):
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.model
+ print(resume_checkpoint)
+
+ if resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+
+ # ! rank 0 return will cause all other ranks to hang
+ # if not Path(resume_checkpoint).exists():
+ # logger.log(
+ # f"failed to load model from checkpoint: {resume_checkpoint}, not exist"
+ # )
+ # return
+
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ print(f'mark {model_name} loading ', flush=True)
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ print(f'mark {model_name} loading finished', flush=True)
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+
+ # elif 'IN' in k and model_name == 'rec' and getattr(model.decoder, 'decomposed_IN', False):
+ # model_state_dict[k.replace('IN', 'superresolution.norm.norm_layer')] = v # decomposed IN
+
+ else:
+ print('!!!! ignore key: ', k, ": ", v.size(),
+ 'shape in model: ', model_state_dict[k].size())
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ print(f'synced {model_name} params')
+
+ def _update_ema_rec(self):
+ for rate, params in zip(self.ema_rate, self.ema_params_rec):
+ update_ema(params, self.mp_trainer_rec.master_params, rate=rate)
+
+ def eval_ddpm_sample(self):
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ image_size=128,
+ # denoise_in_channels=3,
+ # denoise_in_channels=24,
+ denoise_in_channels=6,
+ # denoise_in_channels=6,
+ clip_denoised=True,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ for i in range(1):
+ img_sample = sample_fn(
+ self.ddp_model,
+ (args.batch_size, args.denoise_in_channels, args.image_size,
+ args.image_size),
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ )
+
+ pred_vis = img_sample
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()[0][..., :3]
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step + self.resume_step}_{i}.png')
+
+ # th.cuda.empty_cache()
+ # self.render_video_given_triplane(
+ # triplane_sample,
+ # name_prefix=f'{self.step + self.resume_step}_{i}')
+
+ th.cuda.empty_cache()
+
+ @th.inference_mode()
+ def render_video_given_triplane(self, planes, name_prefix='0'):
+
+ planes *= self.triplane_scaling_divider # if setting clip_denoised=True, the sampled planes will lie in [-1,1]. Thus, values beyond [+- std] will be abandoned in this version. Move to IN for later experiments.
+
+ # print(planes.min(), planes.max())
+
+ # used during diffusion sampling inference
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ pred = self.ddp_rec_model(img=None,
+ c=micro['c'],
+ latent=planes,
+ behaviour='triplane_dec')
+
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4')
+
+ @th.inference_mode()
+ def render_video_noise_schedule(self, name_prefix='0'):
+
+ # planes *= self.triplane_std # denormalize for rendering
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_visnoise_{name_prefix}.mp4',
+ mode='I',
+ fps=30,
+ codec='libx264')
+
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i % 10 != 0:
+ continue
+
+ # ========= novel view plane settings ====
+ if i == 0:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ latent = self.ddp_rec_model(
+ img=novel_view_micro['img_to_encoder'],
+ c=micro['c'])['latent'] # pred: (B, 3, 64, 64)
+
+ x_start = latent / self.triplane_scaling_divider # normalize std to 1
+ # x_start = latent
+
+ all_pred_vis = []
+ # for t in th.range(0,
+ # 4001,
+ # 500,
+ # dtype=th.long,
+ # device=dist_util.dev()): # cosine 4k steps
+ for t in th.range(0,
+ 1001,
+ 125,
+ dtype=th.long,
+ device=dist_util.dev()): # cosine 4k steps
+
+ # ========= add noise according to t
+ noise = th.randn_like(x_start) # x_start is the x0 image
+ x_t = self.diffusion.q_sample(
+ x_start, t, noise=noise
+ ) # * add noise according to predefined schedule
+ planes_x_t = (x_t * self.triplane_scaling_divider).clamp(
+ -50, 50) # de-scaling noised x_t
+
+ # planes_x_t = (x_t * 1).clamp(
+ # -50, 50) # de-scaling noised x_t
+
+ # ===== visualize
+ pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'],
+ latent=planes_x_t,
+ behaviour='triplane_dec') # pred: (B, 3, 64, 64)
+
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_vis = th.cat([
+ # # self.pool_128(micro['img']),
+ # pred['image_raw'],
+ # ],
+ # dim=-1) # B, 3, H, W
+ pred_vis = pred['image_raw']
+
+ all_pred_vis.append(pred_vis)
+ # TODO, make grid
+
+ all_pred_vis = torchvision.utils.make_grid(
+ th.cat(all_pred_vis, 0),
+ nrow=len(all_pred_vis),
+ normalize=True,
+ value_range=(-1, 1),
+ scale_each=True) # normalized to [-1,1]
+
+ vis = all_pred_vis.permute(1, 2, 0).cpu().numpy() # H W 3
+
+ vis = (vis * 255).clip(0, 255).astype(np.uint8)
+
+ video_out.append_data(vis)
+
+ video_out.close()
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_visnoise_{name_prefix}.mp4')
+
+ th.cuda.empty_cache()
diff --git a/nsr/train_util_cvD.py b/nsr/train_util_cvD.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a1a89da6576457ada7d85dac7fc25f216e6888
--- /dev/null
+++ b/nsr/train_util_cvD.py
@@ -0,0 +1,637 @@
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+import torchvision
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from tqdm import tqdm
+
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion import dist_util, logger
+from guided_diffusion.train_util import (calc_average_loss,
+ log_rec3d_loss_dict,
+ find_resume_checkpoint)
+
+from torch.optim import AdamW
+
+from .train_util import TrainLoopBasic, TrainLoop3DRec
+import vision_aided_loss
+from dnnlib.util import calculate_adaptive_weight
+
+
+def get_blob_logdir():
+ # You can change this to be a separate path to save checkpoints to
+ # a blobstore or some external drive.
+ return logger.get_dir()
+
+
+class TrainLoop3DcvD(TrainLoop3DRec):
+
+ def __init__(
+ self,
+ *,
+ rec_model,
+ loss_class,
+ # diffusion,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ # schedule_sampler=None,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ load_submodule_name='',
+ ignore_resume_opt=False,
+ use_amp=False,
+ cvD_name='cvD',
+ model_name='rec',
+ # SR_TRAINING=True,
+ SR_TRAINING=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ load_submodule_name=load_submodule_name,
+ ignore_resume_opt=ignore_resume_opt,
+ model_name=model_name,
+ use_amp=use_amp,
+ cvD_name=cvD_name,
+ **kwargs)
+
+ # self.rec_model = self.ddp_model
+
+ # device = loss_class.device
+ device = dist_util.dev()
+ # * create vision aided model
+ # TODO, load model
+ self.nvs_cvD = vision_aided_loss.Discriminator(
+ cv_type='clip', loss_type='multilevel_sigmoid_s',
+ device=device).to(device)
+ self.nvs_cvD.cv_ensemble.requires_grad_(False) # Freeze feature extractor
+ # self.nvs_cvD.train()
+
+ #
+ # SR_TRAINING = False
+ cvD_model_params=list(self.nvs_cvD.decoder.parameters())
+ self.SR_TRAINING = SR_TRAINING
+ # SR_TRAINING = True
+ if SR_TRAINING:
+ # width, patch_size = self.nvs_cvD.cv_ensemble
+ vision_width, vision_patch_size = [self.nvs_cvD.cv_ensemble.models[0].model.conv1.weight.shape[k] for k in [0, -1]]
+ self.nvs_cvD.cv_ensemble.models[0].model.conv1 = th.nn.Conv2d(in_channels=6, out_channels=vision_width, kernel_size=vision_patch_size, stride=vision_patch_size, bias=False).to(dist_util.dev())
+ self.nvs_cvD.cv_ensemble.models[0].model.conv1.requires_grad_(True)
+ cvD_model_params += list(self.nvs_cvD.cv_ensemble.models[0].model.conv1.parameters())
+
+ # change normalization metrics
+ self.nvs_cvD.cv_ensemble.models[0].image_mean = self.nvs_cvD.cv_ensemble.models[0].image_mean.repeat(2)
+ self.nvs_cvD.cv_ensemble.models[0].image_std = self.nvs_cvD.cv_ensemble.models[0].image_std.repeat(2)
+
+ # logger.log(f'nvs_cvD_model_params: {cvD_model_params}')
+
+ self._load_and_sync_parameters(model=self.nvs_cvD, model_name='cvD')
+
+ self.mp_trainer_cvD = MixedPrecisionTrainer(
+ model=self.nvs_cvD,
+ use_fp16=self.use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name=cvD_name,
+ use_amp=use_amp,
+ model_params=cvD_model_params
+ )
+
+ # cvD_lr = 4e-5*(lr/1e-5)
+ # cvD_lr = 4e-4*(lr/1e-5)
+ cvD_lr = 1e-4*(lr/1e-5) * self.loss_class.opt.nvs_D_lr_mul
+ # cvD_lr = 1e-5*(lr/1e-5)
+ self.opt_cvD = AdamW(
+ self.mp_trainer_cvD.master_params,
+ lr=cvD_lr,
+ betas=(0, 0.999),
+ eps=1e-8) # dlr in biggan cfg
+
+ logger.log(f'cpt_cvD lr: {cvD_lr}')
+
+ if self.use_ddp:
+ self.ddp_nvs_cvD = DDP(
+ self.nvs_cvD,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ )
+ else:
+ self.ddp_nvs_cvD = self.nvs_cvD
+
+ th.cuda.empty_cache()
+
+ def run_step(self, batch, step='g_step'):
+ # self.forward_backward(batch)
+
+ if step == 'g_step_rec':
+ self.forward_G_rec(batch)
+ took_step_g_rec = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_rec:
+ self._update_ema() # g_ema
+
+ elif step == 'g_step_nvs':
+ self.forward_G_nvs(batch)
+ took_step_g_nvs = self.mp_trainer_rec.optimize(self.opt)
+
+ if took_step_g_nvs:
+ self._update_ema() # g_ema
+
+ elif step == 'd_step':
+ self.forward_D(batch)
+ _ = self.mp_trainer_cvD.optimize(self.opt_cvD)
+
+ self._anneal_lr()
+ self.log_step()
+
+ def run_loop(self):
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_rec')
+
+ batch = next(self.data)
+ self.run_step(batch, 'g_step_nvs')
+
+ batch = next(self.data)
+ self.run_step(batch, 'd_step')
+
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.eval_interval == 0 and self.step != 0:
+ if dist_util.get_rank() == 0:
+ self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ if self.step % self.save_interval == 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+ dist_util.synchronize()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ logger.log('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ self.save(self.mp_trainer_cvD, 'cvD')
+
+ # def forward_backward(self, batch, *args, **kwargs):
+ # blur_sigma = max(1 - cur_nimg / (self.blur_fade_kimg * 1e3), 0) * self.blur_init_sigma if self.blur_fade_kimg > 0 else 0
+
+ def run_D_Diter(self, real, fake, D=None):
+ # Dmain: Minimize logits for generated images and maximize logits for real images.
+ if D is None:
+ D = self.ddp_nvs_cvD
+
+ lossD = D(real, for_real=True).mean() + D(
+ fake, for_real=False).mean()
+ return lossD
+
+ def forward_D(self, batch): # update D
+ self.mp_trainer_cvD.zero_grad()
+ self.ddp_nvs_cvD.requires_grad_(True)
+ self.rec_model.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ # * sample a new batch for D training
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_cvD.use_amp):
+
+ # pred = self.rec_model(img=micro['img_to_encoder'],
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][1:],
+ micro['c'][:1], # half novel view, half orig view
+ ]))
+
+ real_logits_cv = self.run_D_Diter(
+ real=micro['img_to_encoder'],
+ fake=pred['image_raw']) # TODO, add SR for FFHQ
+
+ log_rec3d_loss_dict({'vision_aided_loss/D': real_logits_cv})
+
+ self.mp_trainer_cvD.backward(real_logits_cv)
+
+ def forward_G_rec(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # VQ3D novel view d loss
+ # duplicated_for_nvs = th.cat([
+ # micro['img_to_encoder'][:batch_size - 2],
+ # micro['img_to_encoder'][:2]
+ # ], 0)
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ img=micro['img_to_encoder'], c=micro['c']
+ ) # render novel view for first half of the batch for D loss
+
+ target_for_rec = micro
+ pred_for_rec = pred
+
+ # pred_for_rec = {
+ # k: v[:batch_size - 2] if v is not None else None
+ # for k, v in pred.items()
+ # }
+ # target_for_rec = {
+ # k: v[:batch_size - 2] if v is not None else None
+ # for k, v in target.items()
+ # }
+
+ if last_batch or not self.use_ddp:
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+ else:
+ with self.rec_model.no_sync(): # type: ignore
+ loss, loss_dict = self.loss_class(pred_for_rec,
+ target_for_rec,
+ test_mode=False)
+
+ # add cvD supervision
+ vision_aided_loss = self.ddp_nvs_cvD(
+ pred_for_rec['image_raw'],
+ for_G=True).mean() # [B, 1] shape
+
+ last_layer = self.rec_model.module.decoder.triplane_decoder.decoder.net[ # type: ignore
+ -1].weight # type: ignore
+
+ d_weight = calculate_adaptive_weight(
+ loss, vision_aided_loss, last_layer,
+ # disc_weight_max=0.1) * 0.1
+ # disc_weight_max=0.1) * 0.05
+ disc_weight_max=1)
+ loss += vision_aided_loss * d_weight
+
+ loss_dict.update({
+ 'vision_aided_loss/G_rec': vision_aided_loss,
+ 'd_weight': d_weight
+ })
+
+ log_rec3d_loss_dict(loss_dict)
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ elif pred['image_sr'].shape[-1] == 256:
+ pred_img = th.cat(
+ [self.pool_256(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_256(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_256(pred_depth)
+ gt_depth = self.pool_256(gt_depth)
+
+ else:
+ pred_img = th.cat(
+ [self.pool_128(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_128(micro['img']), micro['img_sr']],
+ dim=-1)
+ gt_depth = self.pool_128(gt_depth)
+ pred_depth = self.pool_128(pred_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_rec.jpg'
+ )
+
+ def forward_G_nvs(self, batch): # update G
+
+ self.mp_trainer_rec.zero_grad()
+ self.rec_model.requires_grad_(True)
+ self.ddp_nvs_cvD.requires_grad_(False)
+
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev()).contiguous()
+ for k, v in batch.items()
+ }
+
+ # last_batch = (i + self.microbatch) >= batch_size
+
+ # VQ3D novel view d loss
+ # duplicated_for_nvs = th.cat([
+ # micro['img_to_encoder'][batch_size // 2:],
+ # micro['img_to_encoder'][:batch_size // 2]
+ # ], 0)
+
+ with th.autocast(device_type='cuda',
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp):
+
+ pred = self.rec_model(
+ # img=duplicated_for_nvs, c=micro['c']
+ img=micro['img_to_encoder'],
+ c=th.cat([
+ micro['c'][1:],
+ micro['c'][:1],
+ ])
+ ) # render novel view for first half of the batch for D loss
+
+ # add cvD supervision
+ vision_aided_loss = self.ddp_nvs_cvD(
+ pred['image_raw'], for_G=True).mean() # [B, 1] shape
+
+ # loss = vision_aided_loss * 0.01
+ # loss = vision_aided_loss * 0.005
+ # loss = vision_aided_loss * 0.1
+ loss = vision_aided_loss * 0.01
+
+ log_rec3d_loss_dict({
+ 'vision_aided_loss/G_nvs':
+ vision_aided_loss,
+ })
+
+ self.mp_trainer_rec.backward(loss)
+
+ # ! move to other places, add tensorboard
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred:
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [gt_img, gt_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # TODO, fail to load depth. range [0, 1]
+
+ pred_vis = th.cat(
+ [pred_img,
+ pred_depth.repeat_interleave(3, dim=1)],
+ dim=-1) # B, 3, H, W
+
+ # vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ vis = th.cat([gt_vis, pred_vis], dim=-2)
+
+ vis = torchvision.utils.make_grid(
+ vis,
+ normalize=True,
+ scale_each=True,
+ value_range=(-1, 1)).cpu().permute(1, 2, 0) # H W 3
+ vis = vis.numpy() * 255
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ # logger.log(vis.shape)
+
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+ logger.log(
+ 'log vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}_nvs.jpg'
+ )
+
+ def save(self, mp_trainer=None, model_name='rec'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer_rec
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, mp_trainer.master_params)
+
+ if model_name == 'ddpm':
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ dist.barrier()
+
+ def _load_and_sync_parameters(self, model=None, model_name='rec'):
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.rec_model # default model in the parent class
+
+ logger.log(resume_checkpoint)
+
+ if resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ logger.log(f'mark {model_name} loading ', )
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ logger.log(f'mark {model_name} loading finished', )
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+
+ # elif 'IN' in k and model_name == 'rec' and getattr(model.decoder, 'decomposed_IN', False):
+ # model_state_dict[k.replace('IN', 'superresolution.norm.norm_layer')] = v # decomposed IN
+ elif 'attn.wk' in k or 'attn.wv' in k: # old qkv
+ logger.log('ignore ', k)
+
+ elif 'decoder.vit_decoder.blocks' in k:
+ # st()
+ # load from 2D ViT pre-trained into 3D ViT blocks.
+ assert len(model.decoder.vit_decoder.blocks[0].vit_blks) == 2 # assert depth=2 here.
+ fusion_ca_depth = len(model.decoder.vit_decoder.blocks[0].vit_blks)
+ vit_subblk_index = int(k.split('.')[3])
+ vit_blk_keyname = ('.').join(k.split('.')[4:])
+ fusion_blk_index = vit_subblk_index // fusion_ca_depth
+ fusion_blk_subindex = vit_subblk_index % fusion_ca_depth
+ model_state_dict[f'decoder.vit_decoder.blocks.{fusion_blk_index}.vit_blks.{fusion_blk_subindex}.{vit_blk_keyname}'] = v
+ # logger.log('load 2D ViT weight: {}'.format(f'decoder.vit_decoder.blocks.{fusion_blk_index}.vit_blks.{fusion_blk_subindex}.{vit_blk_keyname}'))
+
+ elif 'IN' in k:
+ logger.log('ignore ', k)
+
+ elif 'quant_conv' in k:
+ logger.log('ignore ', k)
+
+ else:
+ logger.log('!!!! ignore key: ', k, ": ", v.size(),)
+ if k in model_state_dict:
+ logger.log('shape in model: ', model_state_dict[k].size())
+ else:
+ logger.log(k, 'not in model_state_dict')
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ logger.log(f'synced {model_name} params')
diff --git a/nsr/train_util_diffusion.py b/nsr/train_util_diffusion.py
new file mode 100644
index 0000000000000000000000000000000000000000..85ddb437dd05eb8e9c4831b9d34be9121584dcd0
--- /dev/null
+++ b/nsr/train_util_diffusion.py
@@ -0,0 +1,1349 @@
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+# from PIL import Image
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion.gaussian_diffusion import _extract_into_tensor
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+
+import dnnlib
+from huggingface_hub import hf_hub_download
+
+# AMP
+# from accelerate import Accelerator
+
+# from ..guided_diffusion.train_util import TrainLoop
+
+# use_amp = False
+# use_amp = True
+
+
+class TrainLoopDiffusionWithRec(TrainLoop):
+ """an interface with rec_model required apis
+ """
+
+ def __init__(
+ self,
+ *,
+ model,
+ diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ triplane_scaling_divider=1,
+ use_amp=False,
+ diffusion_input_size=224,
+ schedule_sampler=None,
+ model_name='ddpm',
+ train_vae=True,
+ **kwargs,
+ ):
+ super().__init__(
+ model=model,
+ diffusion=diffusion,
+ data=data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ use_amp=use_amp,
+ model_name=model_name,
+ train_vae=train_vae,
+ **kwargs,
+ )
+
+ self.latent_name = 'latent_normalized' # normalized triplane latent
+ self.diffusion_input_size = diffusion_input_size
+ self.render_latent_behaviour = 'triplane_dec' # directly render using triplane operations
+
+ self.loss_class = loss_class
+ # self.rec_model = rec_model
+ self.eval_interval = eval_interval
+ self.eval_data = eval_data
+ self.iterations = iterations
+ # self.triplane_std = 10
+ self.triplane_scaling_divider = triplane_scaling_divider
+
+ if dist_util.get_rank() == 0:
+ self.writer = SummaryWriter(log_dir=f'{logger.get_dir()}/runs')
+
+ # def _init_optim_groups(self, rec_model):
+ # """for initializing the reconstruction model.
+ # """
+ # kwargs = self.kwargs
+ # optim_groups = [
+ # # vit encoder
+ # {
+ # 'name': 'vit_encoder',
+ # 'params': rec_model.encoder.parameters(),
+ # 'lr': kwargs['encoder_lr'],
+ # 'weight_decay': kwargs['encoder_weight_decay']
+ # },
+ # # vit decoder
+ # {
+ # 'name': 'vit_decoder',
+ # 'params': rec_model.decoder.vit_decoder.parameters(),
+ # 'lr': kwargs['vit_decoder_lr'],
+ # 'weight_decay': kwargs['vit_decoder_wd']
+ # },
+ # {
+ # 'name': 'vit_decoder_pred',
+ # 'params': rec_model.decoder.decoder_pred.parameters(),
+ # 'lr': kwargs['vit_decoder_lr'],
+ # # 'weight_decay': 0
+ # 'weight_decay': kwargs['vit_decoder_wd']
+ # },
+
+ # # triplane decoder
+ # {
+ # 'name': 'triplane_decoder',
+ # 'params': rec_model.decoder.triplane_decoder.parameters(),
+ # 'lr': kwargs['triplane_decoder_lr'],
+ # # 'weight_decay': self.weight_decay
+ # },
+ # ]
+
+ # if rec_model.decoder.superresolution is not None:
+ # optim_groups.append({
+ # 'name':
+ # 'triplane_decoder_superresolution',
+ # 'params':
+ # rec_model.decoder.superresolution.parameters(),
+ # 'lr':
+ # kwargs['super_resolution_lr'],
+ # })
+
+ # return optim_groups
+
+ def _init_optim_groups(self, rec_model, freeze_decoder=False):
+ """for initializing the reconstruction model; fixing decoder part.
+ """
+ kwargs = self.kwargs
+ optim_groups = [
+ # vit encoder
+ {
+ 'name': 'vit_encoder',
+ 'params': rec_model.encoder.parameters(),
+ 'lr': kwargs['encoder_lr'],
+ 'weight_decay': kwargs['encoder_weight_decay']
+ },
+ ]
+
+ if not freeze_decoder:
+ optim_groups += [
+ # vit decoder
+ {
+ 'name': 'vit_decoder',
+ 'params': rec_model.decoder.vit_decoder.parameters(),
+ 'lr': kwargs['vit_decoder_lr'],
+ 'weight_decay': kwargs['vit_decoder_wd']
+ },
+ {
+ 'name': 'vit_decoder_pred',
+ 'params': rec_model.decoder.decoder_pred.parameters(),
+ 'lr': kwargs['vit_decoder_lr'],
+ # 'weight_decay': 0
+ 'weight_decay': kwargs['vit_decoder_wd']
+ },
+
+ # triplane decoder
+ {
+ 'name': 'triplane_decoder',
+ 'params': rec_model.decoder.triplane_decoder.parameters(),
+ 'lr': kwargs['triplane_decoder_lr'],
+ # 'weight_decay': self.weight_decay
+ },
+ ]
+
+ if rec_model.decoder.superresolution is not None:
+ optim_groups.append({
+ 'name':
+ 'triplane_decoder_superresolution',
+ 'params':
+ rec_model.decoder.superresolution.parameters(),
+ 'lr':
+ kwargs['super_resolution_lr'],
+ })
+
+ return optim_groups
+
+ # @th.no_grad()
+ # # def eval_loop(self, c_list:list):
+ # def eval_novelview_loop(self, rec_model):
+ # # novel view synthesis given evaluation camera trajectory
+ # video_out = imageio.get_writer(
+ # f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}.mp4',
+ # mode='I',
+ # fps=60,
+ # codec='libx264')
+
+ # all_loss_dict = []
+ # novel_view_micro = {}
+
+ # # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ # for i, batch in enumerate(tqdm(self.eval_data)):
+ # # for i in range(0, 8, self.microbatch):
+ # # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ # micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ # # st()
+
+ # if i == 0:
+ # novel_view_micro = {
+ # 'img_to_encoder': micro['img_to_encoder'][0:1]
+ # }
+
+ # latent = rec_model(img=novel_view_micro['img_to_encoder'],
+ # behaviour='enc_dec_wo_triplane')
+
+ # # else:
+ # # # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ # # novel_view_micro = {
+ # # k:
+ # # v[0:1].to(dist_util.dev()).repeat_interleave(
+ # # micro['img'].shape[0], 0)
+ # # for k, v in novel_view_micro.items()
+ # # }
+
+ # # pred = rec_model(img=novel_view_micro['img_to_encoder'].repeat_interleave(micro['img'].shape[0], 0),
+ # # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # # ! only render
+ # pred = rec_model(
+ # latent={
+ # 'latent_after_vit': latent['latent_after_vit'].repeat_interleave(micro['img'].shape[0], 0)
+ # },
+ # c=micro['c'], # predict novel view here
+ # behaviour='triplane_dec',
+ # )
+
+ # # target = {
+ # # 'img': micro['img'],
+ # # 'depth': micro['depth'],
+ # # 'depth_mask': micro['depth_mask']
+ # # }
+ # # targe
+
+ # _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ # all_loss_dict.append(loss_dict)
+
+ # # ! move to other places, add tensorboard
+
+ # # pred_vis = th.cat([
+ # # pred['image_raw'],
+ # # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # # ],
+ # # dim=-1)
+
+ # # normalize depth
+ # # if True:
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ # pred_depth.min())
+ # if 'image_sr' in pred:
+ # if pred['image_sr'].shape[-1] == 512:
+ # pred_vis = th.cat([
+ # micro['img_sr'],
+ # self.pool_512(pred['image_raw']), pred['image_sr'],
+ # self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+ # else:
+ # assert pred['image_sr'].shape[-1] == 128
+ # pred_vis = th.cat([
+ # micro['img_sr'],
+ # self.pool_128(pred['image_raw']), pred['image_sr'],
+ # self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+ # else:
+ # pred_vis = th.cat([
+ # self.pool_128(micro['img']),
+ # self.pool_128(pred['image_raw']),
+ # self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1) # B, 3, H, W
+
+ # vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ # vis = vis * 127.5 + 127.5
+ # vis = vis.clip(0, 255).astype(np.uint8)
+
+ # for j in range(vis.shape[0]):
+ # video_out.append_data(vis[j])
+
+ # video_out.close()
+
+ # del video_out, vis, pred_vis, pred
+ # th.cuda.empty_cache()
+
+ # val_scores_for_logging = calc_average_loss(all_loss_dict)
+ # with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ # 'a') as f:
+ # json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # # * log to tensorboard
+ # for k, v in val_scores_for_logging.items():
+ # self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ # self.step + self.resume_step)
+
+ @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ def eval_novelview_loop(self, rec_model):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ all_loss_dict = []
+ novel_view_micro = {}
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i == 0:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ pred = rec_model(img=novel_view_micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+ # targe
+
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ if 'image_sr' in pred:
+
+ if pred['image_sr'].shape[-1] == 512:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ elif pred['image_sr'].shape[-1] == 256:
+
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_256(pred['image_raw']), pred['image_sr'],
+ self.pool_256(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred['image_sr']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+
+ else:
+ # pred_vis = th.cat([
+ # self.pool_64(micro['img']), pred['image_raw'],
+ # pred_depth.repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1) # B, 3, H, W
+
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ self.step + self.resume_step)
+ del video_out
+ # del pred_vis
+ # del pred
+
+ th.cuda.empty_cache()
+
+ @th.no_grad()
+ def eval_loop(self, rec_model):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+ all_loss_dict = []
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ # pred = self.model(img=micro['img_to_encoder'],
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # pred of rec model
+ pred = rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+ if pred['image_sr'].shape[-1] == 512:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ assert pred['image_sr'].shape[-1] == 128
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_128(pred['image_raw']), pred['image_sr'],
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']),
+ self.pool_128(pred['image_raw']),
+ self.pool_128(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores.json'), 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/Rec/{k}', v,
+ self.step + self.resume_step)
+
+ del video_out, vis, pred_vis, pred
+ th.cuda.empty_cache()
+ self.eval_novelview_loop(rec_model)
+
+ def save(self, mp_trainer=None, model_name='ddpm'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ # save_checkpoint(0, self.mp_trainer_ddpm.master_params)
+ try:
+ save_checkpoint(0, mp_trainer.master_params)
+ if model_name == 'ddpm':
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+ except Exception as e:
+ logger.log(e)
+
+ th.cuda.empty_cache()
+ dist_util.synchronize()
+
+ def _load_and_sync_parameters(self,
+ model=None,
+ model_name='ddpm',
+ resume_checkpoint=None):
+ hf_loading = 'yslan/GaussianAnything' in self.resume_checkpoint
+
+ if not hf_loading and resume_checkpoint is None:
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.model
+
+ if hf_loading or resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+ # ! rank 0 return will cause all other ranks to hang
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ if hf_loading:
+ logger.log(f'mark {model_name} loading from hugging face')
+ else:
+ logger.log(f'mark {model_name} loading ')
+
+ if hf_loading:
+ hf_ckpt = self.resume_checkpoint.split('/')
+ repo_id = '/'.join(hf_ckpt[:2])
+ file_name = '/'.join(hf_ckpt[2:])
+ model_path = hf_hub_download(repo_id=repo_id,
+ filename=file_name)
+
+ resume_state_dict = dist_util.load_state_dict(
+ model_path, map_location=map_location)
+
+ else:
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+
+ # logger.log(f'mark {model_name} loading finished')
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+
+ else:
+ print(
+ '!!!! ignore key: ',
+ k,
+ ": ",
+ v.size(),
+ )
+ if k in model_state_dict:
+ print('shape in model: ',
+ model_state_dict[k].size())
+ else:
+ print(k, ' not in model')
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+ else:
+ logger.log(f'{resume_checkpoint} not found.')
+ # print(resume_checkpoint)
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ # dist_util.sync_params(model.named_parameters())
+ print(f'synced {model_name} params')
+
+ @th.inference_mode()
+ def apply_model_inference(self,
+ x_noisy,
+ t,
+ c=None,
+ model_kwargs={}): # compatiable api
+ # pred_params = self.ddp_model(x_noisy, t, c=c, model_kwargs=model_kwargs)
+ pred_params = self.ddp_model(x_noisy, t,
+ **model_kwargs) # unconditional model
+ return pred_params
+
+ @th.inference_mode()
+ def eval_ddpm_sample(self, rec_model, **kwargs): # , ddpm_model=None):
+ # rec_model.eval()
+ # self.ddpm_model.eval()
+ self.model.eval()
+
+ # if ddpm_model is None:
+ # ddpm_model = self.ddp_model
+
+ args = dnnlib.EasyDict(
+ dict(
+ batch_size=1,
+ # image_size=224,
+ image_size=self.diffusion_input_size,
+ # ddpm_image_size=224,
+ # denoise_in_channels=self.ddp_rec_model.module.decoder.triplane_decoder.out_chans, # type: ignore
+ denoise_in_channels=self.ddpm_model.
+ in_channels, # type: ignore
+ clip_denoised=False,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ # for i in range(2):
+ for i in range(1):
+ triplane_sample = sample_fn(
+ # self.ddp_model,
+ self,
+ (args.batch_size, args.denoise_in_channels,
+ self.diffusion_input_size, self.diffusion_input_size),
+ clip_denoised=args.clip_denoised,
+ # model_kwargs=model_kwargs,
+ mixing_normal=True, # !
+ device=dist_util.dev(),
+ # model_kwargs=model_kwargs,
+ **model_kwargs)
+ th.cuda.empty_cache()
+ self.render_video_given_triplane(
+ triplane_sample,
+ rec_model,
+ name_prefix=f'{self.step + self.resume_step}_{i}')
+ th.cuda.empty_cache()
+
+ # rec_model.train()
+ # self.ddpm_model.train()
+ # ddpm_model.train()
+ self.model.train()
+
+
+ @th.inference_mode()
+ def render_video_noise_schedule(self, name_prefix='0'):
+
+ # planes *= self.triplane_std # denormalize for rendering
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_visnoise_{name_prefix}.mp4',
+ mode='I',
+ fps=30,
+ codec='libx264')
+
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i % 10 != 0:
+ continue
+
+ # ========= novel view plane settings ====
+ if i == 0:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k:
+ v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ latent = self.ddp_rec_model(
+ img=novel_view_micro['img_to_encoder'],
+ c=micro['c'])[self.latent_name] # pred: (B, 3, 64, 64)
+
+ x_start = latent / self.triplane_scaling_divider # normalize std to 1
+ # x_start = latent
+
+ all_pred_vis = []
+ # for t in th.range(0,
+ # 4001,
+ # 500,
+ # dtype=th.long,
+ # device=dist_util.dev()): # cosine 4k steps
+ for t in th.range(0,
+ 1001,
+ 125,
+ dtype=th.long,
+ device=dist_util.dev()): # cosine 4k steps
+
+ # ========= add noise according to t
+ noise = th.randn_like(x_start) # x_start is the x0 image
+ x_t = self.diffusion.q_sample(
+ x_start, t, noise=noise
+ ) # * add noise according to predefined schedule
+ planes_x_t = (x_t * self.triplane_scaling_divider).clamp(
+ -50, 50) # de-scaling noised x_t
+
+ # planes_x_t = (x_t * 1).clamp(
+ # -50, 50) # de-scaling noised x_t
+
+ # ===== visualize
+ pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'],
+ latent=planes_x_t,
+ behaviour=self.render_latent_behaviour
+ ) # pred: (B, 3, 64, 64)
+
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_vis = th.cat([
+ # # self.pool_128(micro['img']),
+ # pred['image_raw'],
+ # ],
+ # dim=-1) # B, 3, H, W
+ pred_vis = pred['image_raw']
+
+ all_pred_vis.append(pred_vis)
+ # TODO, make grid
+
+ all_pred_vis = torchvision.utils.make_grid(
+ th.cat(all_pred_vis, 0),
+ nrow=len(all_pred_vis),
+ normalize=True,
+ value_range=(-1, 1),
+ scale_each=True) # normalized to [-1,1]
+
+ vis = all_pred_vis.permute(1, 2, 0).cpu().numpy() # H W 3
+
+ vis = (vis * 255).clip(0, 255).astype(np.uint8)
+
+ video_out.append_data(vis)
+
+ video_out.close()
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_visnoise_{name_prefix}.mp4')
+
+ th.cuda.empty_cache()
+
+ @th.inference_mode()
+ def plot_noise_nsr_curve(self, name_prefix='0'):
+ # planes *= self.triplane_std # denormalize for rendering
+
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i % 10 != 0:
+ continue
+
+ # if i == 0:
+ latent = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='enc_dec_wo_triplane') # pred: (B, 3, 64, 64)
+
+ x_start = latent[
+ self.
+ latent_name] / self.triplane_scaling_divider # normalize std to 1
+
+ snr_list = []
+ snr_wo_data_list = []
+ xt_mean = []
+ xt_std = []
+
+ for t in th.range(0,
+ 1001,
+ 5,
+ dtype=th.long,
+ device=dist_util.dev()): # cosine 4k steps
+
+ # ========= add noise according to t
+ noise = th.randn_like(x_start) # x_start is the x0 image
+
+ beta_t = _extract_into_tensor(
+ self.diffusion.sqrt_alphas_cumprod, t, x_start.shape)
+ one_minus_beta_t = _extract_into_tensor(
+ self.diffusion.sqrt_one_minus_alphas_cumprod, t,
+ x_start.shape)
+
+ signal_t = beta_t * x_start
+ noise_t = one_minus_beta_t * noise
+
+ x_t = signal_t + noise_t
+
+ snr = signal_t / (noise_t + 1e-6)
+ snr_wo_data = beta_t / (one_minus_beta_t + 1e-6)
+
+ snr_list.append(abs(snr).mean().cpu().numpy())
+ snr_wo_data_list.append(abs(snr_wo_data).mean().cpu().numpy())
+ xt_mean.append(x_t.mean().cpu().numpy())
+ xt_std.append(x_t.std().cpu().numpy())
+
+ print('xt_mean', xt_mean)
+ print('xt_std', xt_std)
+ print('snr', snr_list)
+
+ th.save(
+ {
+ 'xt_mean': xt_mean,
+ 'xt_std': xt_std,
+ 'snr': snr_list,
+ 'snr_wo_data': snr_wo_data_list,
+ },
+ Path(logger.get_dir()) / f'snr_{i}.pt')
+
+ th.cuda.empty_cache()
+
+
+# a legacy class for direct diffusion training, not joint.
+class TrainLoop3DDiffusion(TrainLoopDiffusionWithRec):
+
+ def __init__(
+ self,
+ *,
+ # model,
+ rec_model,
+ denoise_model,
+ diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ diffusion_input_size=224,
+ **kwargs):
+
+ super().__init__(
+ model=denoise_model,
+ diffusion=diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ diffusion_input_size=diffusion_input_size,
+ schedule_sampler=schedule_sampler,
+ )
+
+ # self.accelerator = Accelerator()
+
+ self._load_and_sync_parameters(model=self.rec_model, model_name='rec')
+
+ # * for loading EMA
+ self.mp_trainer_rec = MixedPrecisionTrainer(
+ model=self.rec_model,
+ use_fp16=self.use_fp16,
+ use_amp=use_amp,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='rec',
+ )
+ self.denoised_ae = denoised_ae
+
+ if not freeze_ae:
+ self.opt_rec = AdamW(
+ self._init_optim_groups(self.mp_trainer_rec.model))
+ else:
+ print('!! freezing AE !!')
+
+ # if not freeze_ae:
+ if self.resume_step:
+ if not ignore_resume_opt:
+ self._load_optimizer_state()
+ else:
+ logger.warn("Ignoring optimizer state from checkpoint.")
+
+ self.ema_params_rec = [
+ self._load_ema_parameters(
+ rate,
+ self.rec_model,
+ self.mp_trainer_rec,
+ model_name=self.mp_trainer_rec.model_name)
+ for rate in self.ema_rate
+ ] # for sync reconstruction model
+ else:
+ if not freeze_ae:
+ self.ema_params_rec = [
+ copy.deepcopy(self.mp_trainer_rec.master_params)
+ for _ in range(len(self.ema_rate))
+ ]
+
+ if self.use_ddp is True:
+ self.rec_model = th.nn.SyncBatchNorm.convert_sync_batchnorm(
+ self.rec_model)
+ self.ddp_rec_model = DDP(
+ self.rec_model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ # find_unused_parameters=True,
+ )
+ else:
+ self.ddp_rec_model = self.rec_model
+
+ if freeze_ae:
+ self.ddp_rec_model.eval()
+ self.ddp_rec_model.requires_grad_(False)
+ self.freeze_ae = freeze_ae
+
+ # if use_amp:
+
+ def _update_ema_rec(self):
+ for rate, params in zip(self.ema_rate, self.ema_params_rec):
+ update_ema(params, self.mp_trainer_rec.master_params, rate=rate)
+
+ def run_loop(self, batch=None):
+ th.cuda.empty_cache()
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_ddpm_sample(self.ddp_rec_model)
+ # continue # TODO, diffusion inference
+ # self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+ th.cuda.empty_cache()
+
+ batch = next(self.data)
+ self.run_step(batch)
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+ dist_util.synchronize()
+
+ th.cuda.empty_cache()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+
+ def run_step(self, batch, cond=None):
+ self.forward_backward(batch,
+ cond) # type: ignore # * 3D Reconstruction step
+ took_step_ddpm = self.mp_trainer.optimize(self.opt)
+ if took_step_ddpm:
+ self._update_ema()
+
+ if not self.freeze_ae:
+ took_step_rec = self.mp_trainer_rec.optimize(self.opt_rec)
+ if took_step_rec:
+ self._update_ema_rec()
+
+ self._anneal_lr()
+ self.log_step()
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # return super().forward_backward(batch, *args, **kwargs)
+ self.mp_trainer.zero_grad()
+ # all_denoised_out = dict()
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # if not freeze_ae:
+
+ # =================================== ae part ===================================
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp
+ and not self.freeze_ae):
+ # with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=False,): # ! debugging, no AMP on all the input
+
+ latent = self.ddp_rec_model(
+ img=micro['img_to_encoder'],
+ c=micro['c'],
+ behaviour='enc_dec_wo_triplane') # pred: (B, 3, 64, 64)
+
+ if not self.freeze_ae:
+ target = micro
+ pred = self.rec_model(latent=latent,
+ c=micro['c'],
+ behaviour='triplane_dec')
+
+ if last_batch or not self.use_ddp:
+ ae_loss, loss_dict = self.loss_class(pred,
+ target,
+ test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ ae_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+
+ log_rec3d_loss_dict(loss_dict)
+ else:
+ ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # =================================== prepare for ddpm part ===================================
+
+ micro_to_denoise = latent[
+ self.
+ latent_name] / self.triplane_scaling_divider # normalize std to 1
+
+ t, weights = self.schedule_sampler.sample(
+ micro_to_denoise.shape[0], dist_util.dev())
+
+ model_kwargs = {}
+
+ # print(micro_to_denoise.min(), micro_to_denoise.max())
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ micro_to_denoise, # x_start
+ t,
+ model_kwargs=model_kwargs,
+ )
+
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ # denoised_out = denoised_fn()
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ losses = compute_losses()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ denoise_loss = (losses["loss"] * weights).mean()
+
+ x_t = losses['x_t']
+ model_output = losses['model_output']
+ losses.pop('x_t')
+ losses.pop('model_output')
+
+ log_loss_dict(self.diffusion, t, {
+ k: v * weights
+ for k, v in losses.items()
+ })
+
+ # self.mp_trainer.backward(denoise_loss)
+ # =================================== denosied ae part ===================================
+ # if self.denoised_ae or self.step % 500 == 0:
+ if self.denoised_ae:
+ with th.cuda.amp.autocast(
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp
+ and not self.freeze_ae):
+ # continue
+ denoised_out = denoised_fn()
+
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'],
+ latent=denoised_out['pred_xstart'] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour=self.render_latent_behaviour)
+
+ # if self.denoised_ae:
+
+ if last_batch or not self.use_ddp:
+ denoised_ae_loss, loss_dict = self.loss_class(
+ denoised_ae_pred, micro, test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ denoised_ae_loss, loss_dict = self.loss_class(
+ denoised_ae_pred, micro, test_mode=False)
+
+ # * rename
+ loss_dict_denoise_ae = {}
+ for k, v in loss_dict.items():
+ loss_dict_denoise_ae[f'{k}_denoised'] = v.mean()
+ log_rec3d_loss_dict(loss_dict_denoise_ae)
+
+ else:
+ denoised_ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ loss = ae_loss + denoise_loss + denoised_ae_loss
+ # self.mp_trainer.backward(denosied_ae_loss)
+ # self.mp_trainer.backward(loss)
+
+ # exit AMP before backward
+ self.mp_trainer.backward(loss)
+ # if self.freeze_ae:
+ # else:
+ # self.mp_trainer.backward(denoise_loss)
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ # if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ if dist_util.get_rank() == 1 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+
+ if self.freeze_ae:
+ latent_micro = {
+ k:
+ v[0:1].to(dist_util.dev()) if v is not None else v
+ for k, v in latent.items()
+ }
+
+ pred = self.rec_model(latent=latent_micro,
+ c=micro['c'][0:1],
+ behaviour='triplane_dec')
+ else:
+ assert pred is not None
+
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ # if 'image_sr' in pred: # TODO
+ # pred_img = th.cat(
+ # [self.pool_512(pred_img), pred['image_sr']],
+ # dim=-1)
+ # gt_img = th.cat(
+ # [self.pool_512(micro['img']), micro['img_sr']],
+ # dim=-1)
+ # pred_depth = self.pool_512(pred_depth)
+ # gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'], micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ sr_w_code = latent_micro.get('sr_w_code', None)
+ if sr_w_code is not None:
+ sr_w_code = sr_w_code[0:1]
+
+ noised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent={
+ 'latent_normalized':
+ x_t[0:1] * self.triplane_scaling_divider,
+ # 'sr_w_code': getattr(self.ddp_rec_model.module.decoder,'w_avg').reshape(1,1,-1)
+ 'sr_w_code': sr_w_code
+ }, # TODO, how to define the scale automatically
+ behaviour=self.render_latent_behaviour)
+
+ denoised_fn = functools.partial(
+ self.diffusion.p_mean_variance,
+ self.ddp_model,
+ x_t, # x_start
+ t,
+ model_kwargs=model_kwargs)
+
+ denoised_out = denoised_fn()
+
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ # latent=denoised_out['pred_xstart'][0:1] * self.
+ # triplane_scaling_divider, # TODO, how to define the scale automatically
+ latent={
+ 'latent_normalized':
+ denoised_out['pred_xstart'][0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ # 'sr_w_code': getattr(self.ddp_rec_model.module.decoder,'w_avg').reshape(1,1,-1)
+ # 'sr_w_code': latent_micro['sr_w_code'][0:1]
+ 'sr_w_code':
+ sr_w_code
+ },
+ behaviour=self.render_latent_behaviour)
+
+ assert denoised_ae_pred is not None
+
+ # print(pred_img.shape)
+ # print('denoised_ae:', self.denoised_ae)
+
+ pred_vis = th.cat([
+ pred_img[0:1], noised_ae_pred['image_raw'],
+ denoised_ae_pred['image_raw'],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis = th.cat([
+ # self.pool_128(micro['img']), x_t[:, :3, ...],
+ # denoised_out['pred_xstart'][:, :3, ...]
+ # ],
+ # dim=-1)[0].permute(
+ # 1, 2, 0).cpu() # ! pred in range[-1, 1]
+
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}.jpg'
+ )
+
+ th.cuda.empty_cache()
diff --git a/nsr/train_util_diffusion_accelerate.py b/nsr/train_util_diffusion_accelerate.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ec575b5129de44ec45e1427c3a4ca1ccc91e10f
--- /dev/null
+++ b/nsr/train_util_diffusion_accelerate.py
@@ -0,0 +1,938 @@
+import copy
+import functools
+import json
+import os
+from pathlib import Path
+from pdb import set_trace as st
+
+import blobfile as bf
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+import torchvision
+from PIL import Image
+from torch.nn.parallel.distributed import DistributedDataParallel as DDP
+from torch.optim import AdamW
+from torch.utils.tensorboard.writer import SummaryWriter
+from tqdm import tqdm
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.fp16_util import MixedPrecisionTrainer
+from guided_diffusion.nn import update_ema
+from guided_diffusion.resample import LossAwareSampler, UniformSampler
+# from .train_util import TrainLoop3DRec
+from guided_diffusion.train_util import (TrainLoop, calc_average_loss,
+ find_ema_checkpoint,
+ find_resume_checkpoint,
+ get_blob_logdir, log_loss_dict,
+ log_rec3d_loss_dict,
+ parse_resume_step_from_filename)
+
+import dnnlib
+
+# AMP
+from accelerate import Accelerator
+
+# from ..guided_diffusion.train_util import TrainLoop
+
+# use_amp = False
+# use_amp = True
+
+
+class TrainLoop3DDiffusion(TrainLoop):
+
+ def __init__(
+ self,
+ *,
+ # model,
+ rec_model,
+ denoise_model,
+ diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ **kwargs):
+
+ super().__init__(model=denoise_model,
+ diffusion=diffusion,
+ data=data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ lr_anneal_steps=lr_anneal_steps,
+ weight_decay=weight_decay,
+ use_amp=use_amp)
+
+ self.accelerator = Accelerator()
+
+ self.pool_512 = th.nn.AdaptiveAvgPool2d((512, 512))
+ self.pool_128 = th.nn.AdaptiveAvgPool2d((128, 128))
+ self.loss_class = loss_class
+ self.rec_model = rec_model
+ self.eval_interval = eval_interval
+ self.eval_data = eval_data
+ self.iterations = iterations
+ # self.triplane_std = 10
+ self.triplane_scaling_divider = triplane_scaling_divider
+
+ self._load_and_sync_parameters(model=self.rec_model, model_name='rec')
+
+ # * for loading EMA
+ self.mp_trainer_rec = MixedPrecisionTrainer(
+ model=self.rec_model,
+ use_fp16=self.use_fp16,
+ use_amp=use_amp,
+ fp16_scale_growth=fp16_scale_growth,
+ model_name='rec',
+ )
+ self.denoised_ae = denoised_ae
+ if not freeze_ae:
+ self.opt_rec = AdamW(self._init_optim_groups(kwargs))
+ else:
+ print('!! freezing AE !!')
+
+ if dist_util.get_rank() == 0:
+ self.writer = SummaryWriter(log_dir=f'{logger.get_dir()}/runs')
+ print(self.opt)
+ if not freeze_ae:
+ print(self.opt_rec)
+
+ # if not freeze_ae:
+ if self.resume_step:
+ if not ignore_resume_opt:
+ self._load_optimizer_state()
+ else:
+ logger.warn("Ignoring optimizer state from checkpoint.")
+ # Model was resumed, either due to a restart or a checkpoint
+ # being specified at the command line.
+ # if not freeze_ae:
+ # self.ema_params_rec = [
+ # self._load_ema_parameters(
+ # rate,
+ # self.rec_model,
+ # self.mp_trainer_rec,
+ # model_name=self.mp_trainer_rec.model_name)
+ # for rate in self.ema_rate
+ # ]
+ # else:
+ self.ema_params_rec = [
+ self._load_ema_parameters(
+ rate,
+ self.rec_model,
+ self.mp_trainer_rec,
+ model_name=self.mp_trainer_rec.model_name)
+ for rate in self.ema_rate
+ ]
+ else:
+ if not freeze_ae:
+ self.ema_params_rec = [
+ copy.deepcopy(self.mp_trainer_rec.master_params)
+ for _ in range(len(self.ema_rate))
+ ]
+
+ if self.use_ddp is True:
+ self.rec_model = th.nn.SyncBatchNorm.convert_sync_batchnorm(
+ self.rec_model)
+ self.ddp_rec_model = DDP(
+ self.rec_model,
+ device_ids=[dist_util.dev()],
+ output_device=dist_util.dev(),
+ broadcast_buffers=False,
+ bucket_cap_mb=128,
+ find_unused_parameters=False,
+ # find_unused_parameters=True,
+ )
+ else:
+ self.ddp_rec_model = self.rec_model
+
+ if freeze_ae:
+ self.ddp_rec_model.eval()
+ self.ddp_rec_model.requires_grad_(False)
+ self.freeze_ae = freeze_ae
+
+ # if use_amp:
+
+ def _init_optim_groups(self, kwargs):
+ optim_groups = [
+ # vit encoder
+ {
+ 'name': 'vit_encoder',
+ 'params': self.mp_trainer_rec.model.encoder.parameters(),
+ 'lr': kwargs['encoder_lr'],
+ 'weight_decay': kwargs['encoder_weight_decay']
+ },
+ # vit decoder
+ {
+ 'name':
+ 'vit_decoder',
+ 'params':
+ self.mp_trainer_rec.model.decoder.vit_decoder.parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ },
+ {
+ 'name':
+ 'vit_decoder_pred',
+ 'params':
+ self.mp_trainer_rec.model.decoder.decoder_pred.parameters(),
+ 'lr':
+ kwargs['vit_decoder_lr'],
+ # 'weight_decay': 0
+ 'weight_decay':
+ kwargs['vit_decoder_wd']
+ },
+
+ # triplane decoder
+ {
+ 'name':
+ 'triplane_decoder',
+ 'params':
+ self.mp_trainer_rec.model.decoder.triplane_decoder.parameters(
+ ),
+ 'lr':
+ kwargs['triplane_decoder_lr'],
+ # 'weight_decay': self.weight_decay
+ },
+ ]
+
+ if self.mp_trainer_rec.model.decoder.superresolution is not None:
+ optim_groups.append({
+ 'name':
+ 'triplane_decoder_superresolution',
+ 'params':
+ self.mp_trainer_rec.model.decoder.superresolution.parameters(),
+ 'lr':
+ kwargs['super_resolution_lr'],
+ })
+
+ return optim_groups
+
+ def run_loop(self, batch=None):
+ th.cuda.empty_cache()
+ while (not self.lr_anneal_steps
+ or self.step + self.resume_step < self.lr_anneal_steps):
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # batch, cond = next(self.data)
+ # if batch is None:
+ batch = next(self.data)
+ self.run_step(batch)
+ if self.step % self.log_interval == 0 and dist_util.get_rank(
+ ) == 0:
+ out = logger.dumpkvs()
+ # * log to tensorboard
+ for k, v in out.items():
+ self.writer.add_scalar(f'Loss/{k}', v,
+ self.step + self.resume_step)
+
+ # if self.step % self.eval_interval == 0 and self.step != 0:
+ if self.step % self.eval_interval == 0:
+ if dist_util.get_rank() == 0:
+ self.eval_ddpm_sample()
+ # continue # TODO, diffusion inference
+ # self.eval_loop()
+ # self.eval_novelview_loop()
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+ th.cuda.empty_cache()
+
+ if self.step % self.save_interval == 0 and self.step != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+ dist_util.synchronize()
+
+ th.cuda.empty_cache()
+ # Run for a finite amount of time in integration tests.
+ if os.environ.get("DIFFUSION_TRAINING_TEST",
+ "") and self.step > 0:
+ return
+
+ self.step += 1
+
+ if self.step > self.iterations:
+ print('reached maximum iterations, exiting')
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+
+ exit()
+
+ # Save the last checkpoint if it wasn't already saved.
+ if (self.step - 1) % self.save_interval != 0:
+ self.save()
+ if not self.freeze_ae:
+ self.save(self.mp_trainer_rec, 'rec')
+
+ def run_step(self, batch, cond=None):
+ self.forward_backward(batch,
+ cond) # type: ignore # * 3D Reconstruction step
+ took_step_ddpm = self.mp_trainer.optimize(self.opt)
+ if took_step_ddpm:
+ self._update_ema()
+
+ if not self.freeze_ae:
+ took_step_rec = self.mp_trainer_rec.optimize(self.opt_rec)
+ if took_step_rec:
+ self._update_ema_rec()
+
+ self._anneal_lr()
+ self.log_step()
+
+ def forward_backward(self, batch, *args, **kwargs):
+ # return super().forward_backward(batch, *args, **kwargs)
+ self.mp_trainer.zero_grad()
+ # all_denoised_out = dict()
+ batch_size = batch['img'].shape[0]
+
+ for i in range(0, batch_size, self.microbatch):
+
+ micro = {
+ k: v[i:i + self.microbatch].to(dist_util.dev())
+ for k, v in batch.items()
+ }
+
+ last_batch = (i + self.microbatch) >= batch_size
+
+ # if not freeze_ae:
+
+ # =================================== ae part ===================================
+ # with th.cuda.amp.autocast(dtype=th.float16,
+ # enabled=self.mp_trainer_rec.use_amp
+ # and not self.freeze_ae):
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=False,): # ! debugging, no AMP on all the input
+
+ pred = self.ddp_rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ if not self.freeze_ae:
+ target = micro
+
+ if last_batch or not self.use_ddp:
+ ae_loss, loss_dict = self.loss_class(pred,
+ target,
+ test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ ae_loss, loss_dict = self.loss_class(
+ pred, target, test_mode=False)
+
+ log_rec3d_loss_dict(loss_dict)
+ else:
+ ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ micro_to_denoise = pred[
+ 'latent'] / self.triplane_scaling_divider # normalize std to 1
+
+ t, weights = self.schedule_sampler.sample(
+ micro_to_denoise.shape[0], dist_util.dev())
+
+ # print('!!!', micro_to_denoise.dtype)
+ # =================================== denoised part ===================================
+
+ model_kwargs = {}
+
+ # print(micro_to_denoise.min(), micro_to_denoise.max())
+ compute_losses = functools.partial(
+ self.diffusion.training_losses,
+ self.ddp_model,
+ micro_to_denoise, # x_start
+ t,
+ model_kwargs=model_kwargs,
+ )
+
+ denoised_fn = functools.partial(
+ self.diffusion.p_mean_variance,
+ self.ddp_model,
+ micro_to_denoise, # x_start
+ t,
+ model_kwargs=model_kwargs)
+
+ with th.cuda.amp.autocast(dtype=th.float16,
+ enabled=self.mp_trainer.use_amp):
+
+ if last_batch or not self.use_ddp:
+ losses = compute_losses()
+ denoised_out = denoised_fn()
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ losses = compute_losses()
+ denoised_out = denoised_fn()
+
+ if isinstance(self.schedule_sampler, LossAwareSampler):
+ self.schedule_sampler.update_with_local_losses(
+ t, losses["loss"].detach())
+
+ denoise_loss = (losses["loss"] * weights).mean()
+ log_loss_dict(self.diffusion, t,
+ {k: v * weights
+ for k, v in losses.items()})
+
+ # self.mp_trainer.backward(denoise_loss)
+ # =================================== denosied ae part ===================================
+ # if self.denoised_ae or self.step % 500 == 0:
+ if self.denoised_ae:
+ with th.cuda.amp.autocast(
+ dtype=th.float16,
+ enabled=self.mp_trainer_rec.use_amp
+ and not self.freeze_ae):
+ # continue
+
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'],
+ latent=denoised_out['pred_xstart'] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically?
+ behaviour='triplane_dec')
+
+ # if self.denoised_ae:
+
+ if last_batch or not self.use_ddp:
+ denoised_ae_loss, loss_dict = self.loss_class(
+ denoised_ae_pred, micro, test_mode=False)
+ else:
+ with self.ddp_model.no_sync(): # type: ignore
+ denoised_ae_loss, loss_dict = self.loss_class(
+ denoised_ae_pred, micro, test_mode=False)
+
+ # * rename
+ loss_dict_denoise_ae = {}
+ for k, v in loss_dict.items():
+ loss_dict_denoise_ae[f'{k}_denoised'] = v.mean()
+ log_rec3d_loss_dict(loss_dict_denoise_ae)
+
+ else:
+ denoised_ae_loss = th.tensor(0.0).to(dist_util.dev())
+
+ # loss = ae_loss + denoise_loss + denoised_ae_loss
+ loss = denoise_loss # ! leave only denoise_loss for debugging
+ # loss = ae_loss + denoise_loss
+ # self.mp_trainer.backward(denosied_ae_loss)
+ # if use_amp:
+ # self.mp_trainer.backward(loss)
+ # self.mp_trainer.scaler.scale(loss).backward()
+ # else:
+
+ # exit AMP before backward
+ self.mp_trainer.backward(loss)
+
+ # TODO, merge visualization with original AE
+ # =================================== denoised AE log part ===================================
+
+ if dist_util.get_rank() == 0 and self.step % 500 == 0:
+ with th.no_grad():
+ # gt_vis = th.cat([batch['img'], batch['depth']], dim=-1)
+
+ gt_depth = micro['depth']
+ if gt_depth.ndim == 3:
+ gt_depth = gt_depth.unsqueeze(1)
+ gt_depth = (gt_depth - gt_depth.min()) / (gt_depth.max() -
+ gt_depth.min())
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (
+ pred_depth.max() - pred_depth.min())
+ pred_img = pred['image_raw']
+ gt_img = micro['img']
+
+ if 'image_sr' in pred: # TODO
+ pred_img = th.cat(
+ [self.pool_512(pred_img), pred['image_sr']],
+ dim=-1)
+ gt_img = th.cat(
+ [self.pool_512(micro['img']), micro['img_sr']],
+ dim=-1)
+ pred_depth = self.pool_512(pred_depth)
+ gt_depth = self.pool_512(gt_depth)
+
+ gt_vis = th.cat(
+ [
+ gt_img, micro['img'],
+ gt_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1)[0:1] # TODO, fail to load depth. range [0, 1]
+
+ if not self.denoised_ae:
+ # continue
+
+ denoised_ae_pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'][0:1],
+ latent=denoised_out['pred_xstart'][0:1] * self.
+ triplane_scaling_divider, # TODO, how to define the scale automatically
+ behaviour='triplane_dec')
+
+ # assert denoised_ae_pred is not None
+
+ # print(pred_img.shape)
+ # print('denoised_ae:', self.denoised_ae)
+
+ pred_vis = th.cat([
+ pred_img[0:1], denoised_ae_pred['image_raw'],
+ pred_depth[0:1].repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = th.cat([gt_vis, pred_vis], dim=-2)[0].permute(
+ 1, 2, 0).cpu() # ! pred in range[-1, 1]
+ # vis_grid = torchvision.utils.make_grid(vis) # HWC
+ vis = vis.numpy() * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+ Image.fromarray(vis).save(
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}.jpg'
+ )
+ print(
+ 'log denoised vis to: ',
+ f'{logger.get_dir()}/{self.step+self.resume_step}denoised_{t[0].item()}.jpg'
+ )
+
+ th.cuda.empty_cache()
+
+ @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ def eval_novelview_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_novelview_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ all_loss_dict = []
+ novel_view_micro = {}
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i == 0:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ pred = self.model(img=novel_view_micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+ # target = {
+ # 'img': micro['img'],
+ # 'depth': micro['depth'],
+ # 'depth_mask': micro['depth_mask']
+ # }
+ # targe
+
+ _, loss_dict = self.loss_class(pred, micro, test_mode=True)
+ all_loss_dict.append(loss_dict)
+
+ # ! move to other places, add tensorboard
+
+ # pred_vis = th.cat([
+ # pred['image_raw'],
+ # -pred['image_depth'].repeat_interleave(3, dim=1)
+ # ],
+ # dim=-1)
+
+ # normalize depth
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+ if 'image_sr' in pred:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores_novelview.json'),
+ 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/NovelView/{k}', v,
+ self.step + self.resume_step)
+
+ @th.no_grad()
+ # def eval_loop(self, c_list:list):
+ def eval_loop(self):
+ # novel view synthesis given evaluation camera trajectory
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/video_{self.step+self.resume_step}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+ all_loss_dict = []
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ # for i in range(0, 8, self.microbatch):
+ # c = c_list[i].to(dist_util.dev()).reshape(1, -1)
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ # pred = self.model(img=micro['img_to_encoder'],
+ # c=micro['c']) # pred: (B, 3, 64, 64)
+
+ # pred of rec model
+ pred = self.ddp_rec_model(img=micro['img_to_encoder'],
+ c=micro['c']) # pred: (B, 3, 64, 64)
+
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+
+ val_scores_for_logging = calc_average_loss(all_loss_dict)
+ with open(os.path.join(logger.get_dir(), 'scores.json'), 'a') as f:
+ json.dump({'step': self.step, **val_scores_for_logging}, f)
+
+ # * log to tensorboard
+ for k, v in val_scores_for_logging.items():
+ self.writer.add_scalar(f'Eval/Rec/{k}', v,
+ self.step + self.resume_step)
+
+ self.eval_novelview_loop()
+
+ def save(self, mp_trainer=None, model_name='ddpm'):
+ if mp_trainer is None:
+ mp_trainer = self.mp_trainer
+
+ def save_checkpoint(rate, params):
+ state_dict = mp_trainer.master_params_to_state_dict(params)
+ if dist_util.get_rank() == 0:
+ logger.log(f"saving model {model_name} {rate}...")
+ if not rate:
+ filename = f"model_{model_name}{(self.step+self.resume_step):07d}.pt"
+ else:
+ filename = f"ema_{model_name}_{rate}_{(self.step+self.resume_step):07d}.pt"
+ with bf.BlobFile(bf.join(get_blob_logdir(), filename),
+ "wb") as f:
+ th.save(state_dict, f)
+
+ save_checkpoint(0, self.mp_trainer.master_params)
+ for rate, params in zip(self.ema_rate, self.ema_params):
+ save_checkpoint(rate, params)
+
+ dist.barrier()
+
+ def _load_and_sync_parameters(self, model=None, model_name='ddpm'):
+ resume_checkpoint, self.resume_step = find_resume_checkpoint(
+ self.resume_checkpoint, model_name) or self.resume_checkpoint
+
+ if model is None:
+ model = self.model
+ print(resume_checkpoint)
+
+ if resume_checkpoint and Path(resume_checkpoint).exists():
+ if dist_util.get_rank() == 0:
+
+ # ! rank 0 return will cause all other ranks to hang
+ # if not Path(resume_checkpoint).exists():
+ # logger.log(
+ # f"failed to load model from checkpoint: {resume_checkpoint}, not exist"
+ # )
+ # return
+
+ logger.log(
+ f"loading model from checkpoint: {resume_checkpoint}...")
+ map_location = {
+ 'cuda:%d' % 0: 'cuda:%d' % dist_util.get_rank()
+ } # configure map_location properly
+
+ print(f'mark {model_name} loading ', flush=True)
+ resume_state_dict = dist_util.load_state_dict(
+ resume_checkpoint, map_location=map_location)
+ print(f'mark {model_name} loading finished', flush=True)
+
+ model_state_dict = model.state_dict()
+
+ for k, v in resume_state_dict.items():
+ if k in model_state_dict.keys() and v.size(
+ ) == model_state_dict[k].size():
+ model_state_dict[k] = v
+ else:
+ print('!!!! ignore key: ', k, ": ", v.size(),
+ 'shape in model: ', model_state_dict[k].size())
+
+ model.load_state_dict(model_state_dict, strict=True)
+ del model_state_dict
+
+ if dist_util.get_world_size() > 1:
+ dist_util.sync_params(model.parameters())
+ print(f'synced {model_name} params')
+
+ def _update_ema_rec(self):
+ for rate, params in zip(self.ema_rate, self.ema_params_rec):
+ update_ema(params, self.mp_trainer_rec.master_params, rate=rate)
+
+ def eval_ddpm_sample(self):
+
+ args = dnnlib.EasyDict(
+ dict(batch_size=1,
+ image_size=224,
+ denoise_in_channels=24,
+ clip_denoised=True,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ for i in range(2):
+ triplane_sample = sample_fn(
+ self.ddp_model,
+ (args.batch_size, args.denoise_in_channels, args.image_size,
+ args.image_size),
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ )
+
+ self.render_video_given_triplane(
+ triplane_sample,
+ name_prefix=f'{self.step + self.resume_step}_{i}')
+
+ @th.inference_mode()
+ def render_video_given_triplane(self, planes, name_prefix='0'):
+
+ planes *= self.triplane_scaling_divider # if setting clip_denoised=True, the sampled planes will lie in [-1,1]. Thus, values beyond [+- std] will be abandoned in this version. Move to IN for later experiments.
+
+ # print(planes.min(), planes.max())
+
+ # used during diffusion sampling inference
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4',
+ mode='I',
+ fps=60,
+ codec='libx264')
+
+ # for i in range(0, len(c_list), 1): # TODO, larger batch size for eval
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ pred = self.ddp_rec_model(img=None,
+ c=micro['c'],
+ latent=planes,
+ behaviour='triplane_dec')
+
+ # if True:
+ pred_depth = pred['image_depth']
+ pred_depth = (pred_depth - pred_depth.min()) / (pred_depth.max() -
+ pred_depth.min())
+
+ if 'image_sr' in pred:
+ pred_vis = th.cat([
+ micro['img_sr'],
+ self.pool_512(pred['image_raw']), pred['image_sr'],
+ self.pool_512(pred_depth).repeat_interleave(3, dim=1)
+ ],
+ dim=-1)
+ else:
+ pred_vis = th.cat([
+ self.pool_128(micro['img']), pred['image_raw'],
+ pred_depth.repeat_interleave(3, dim=1)
+ ],
+ dim=-1) # B, 3, H, W
+
+ vis = pred_vis.permute(0, 2, 3, 1).cpu().numpy()
+ vis = vis * 127.5 + 127.5
+ vis = vis.clip(0, 255).astype(np.uint8)
+
+ for j in range(vis.shape[0]):
+ video_out.append_data(vis[j])
+
+ video_out.close()
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_{name_prefix}.mp4')
+
+ @th.inference_mode()
+ def render_video_noise_schedule(self, name_prefix='0'):
+
+ # planes *= self.triplane_std # denormalize for rendering
+
+ video_out = imageio.get_writer(
+ f'{logger.get_dir()}/triplane_visnoise_{name_prefix}.mp4',
+ mode='I',
+ fps=30,
+ codec='libx264')
+
+ for i, batch in enumerate(tqdm(self.eval_data)):
+ micro = {k: v.to(dist_util.dev()) for k, v in batch.items()}
+
+ if i % 10 != 0:
+ continue
+
+ # ========= novel view plane settings ====
+ if i == 0:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in batch.items()
+ }
+ else:
+ # if novel_view_micro['c'].shape[0] < micro['img'].shape[0]:
+ novel_view_micro = {
+ k: v[0:1].to(dist_util.dev()).repeat_interleave(
+ micro['img'].shape[0], 0)
+ for k, v in novel_view_micro.items()
+ }
+
+ latent = self.ddp_rec_model(
+ img=novel_view_micro['img_to_encoder'],
+ c=micro['c'])['latent'] # pred: (B, 3, 64, 64)
+
+ x_start = latent / self.triplane_scaling_divider # normalize std to 1
+ # x_start = latent
+
+ all_pred_vis = []
+ # for t in th.range(0,
+ # 4001,
+ # 500,
+ # dtype=th.long,
+ # device=dist_util.dev()): # cosine 4k steps
+ for t in th.range(0,
+ 1001,
+ 125,
+ dtype=th.long,
+ device=dist_util.dev()): # cosine 4k steps
+
+ # ========= add noise according to t
+ noise = th.randn_like(x_start) # x_start is the x0 image
+ x_t = self.diffusion.q_sample(
+ x_start, t, noise=noise
+ ) # * add noise according to predefined schedule
+ planes_x_t = (x_t * self.triplane_scaling_divider).clamp(
+ -50, 50) # de-scaling noised x_t
+
+ # planes_x_t = (x_t * 1).clamp(
+ # -50, 50) # de-scaling noised x_t
+
+ # ===== visualize
+ pred = self.ddp_rec_model(
+ img=None,
+ c=micro['c'],
+ latent=planes_x_t,
+ behaviour='triplane_dec') # pred: (B, 3, 64, 64)
+
+ # pred_depth = pred['image_depth']
+ # pred_depth = (pred_depth - pred_depth.min()) / (
+ # pred_depth.max() - pred_depth.min())
+ # pred_vis = th.cat([
+ # # self.pool_128(micro['img']),
+ # pred['image_raw'],
+ # ],
+ # dim=-1) # B, 3, H, W
+ pred_vis = pred['image_raw']
+
+ all_pred_vis.append(pred_vis)
+ # TODO, make grid
+
+ all_pred_vis = torchvision.utils.make_grid(
+ th.cat(all_pred_vis, 0),
+ nrow=len(all_pred_vis),
+ normalize=True,
+ value_range=(-1, 1),
+ scale_each=True) # normalized to [-1,1]
+
+ vis = all_pred_vis.permute(1, 2, 0).cpu().numpy() # H W 3
+
+ vis = (vis * 255).clip(0, 255).astype(np.uint8)
+
+ video_out.append_data(vis)
+
+ video_out.close()
+ print('logged video to: ',
+ f'{logger.get_dir()}/triplane_visnoise_{name_prefix}.mp4')
+
+ th.cuda.empty_cache()
diff --git a/nsr/train_util_diffusion_dit.py b/nsr/train_util_diffusion_dit.py
new file mode 100644
index 0000000000000000000000000000000000000000..cdfeef5cbedb0a837a00d545ebd98d2b32a3fea8
--- /dev/null
+++ b/nsr/train_util_diffusion_dit.py
@@ -0,0 +1,105 @@
+from .train_util_diffusion import TrainLoop3DDiffusion
+import dnnlib
+import torch as th
+
+
+class TrainLoop3DDiffusionDiT(TrainLoop3DDiffusion):
+
+ def __init__(self,
+ *,
+ rec_model,
+ denoise_model,
+ diffusion,
+ loss_class,
+ data,
+ eval_data,
+ batch_size,
+ microbatch,
+ lr,
+ ema_rate,
+ log_interval,
+ eval_interval,
+ save_interval,
+ resume_checkpoint,
+ use_fp16=False,
+ fp16_scale_growth=0.001,
+ schedule_sampler=None,
+ weight_decay=0,
+ lr_anneal_steps=0,
+ iterations=10001,
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ triplane_scaling_divider=10,
+ use_amp=False,
+ **kwargs):
+ super().__init__(rec_model=rec_model,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ batch_size=batch_size,
+ microbatch=microbatch,
+ lr=lr,
+ ema_rate=ema_rate,
+ log_interval=log_interval,
+ eval_interval=eval_interval,
+ save_interval=save_interval,
+ resume_checkpoint=resume_checkpoint,
+ use_fp16=use_fp16,
+ fp16_scale_growth=fp16_scale_growth,
+ schedule_sampler=schedule_sampler,
+ weight_decay=weight_decay,
+ lr_anneal_steps=lr_anneal_steps,
+ iterations=iterations,
+ ignore_resume_opt=ignore_resume_opt,
+ freeze_ae=freeze_ae,
+ denoised_ae=denoised_ae,
+ triplane_scaling_divider=triplane_scaling_divider,
+ use_amp=use_amp,
+ **kwargs)
+
+ self.latent_name = 'latent_from_vit'
+ self.render_latent_behaviour = 'vit_postprocess_triplane_dec' # translate latent into 2D spatial tokens, then triplane render
+
+ def eval_ddpm_sample(self):
+
+ args = dnnlib.EasyDict(
+ dict(batch_size=1,
+ image_size=224,
+ denoise_in_channels=self.ddp_rec_model.module.decoder.triplane_decoder.out_chans, # type: ignore
+ clip_denoised=False,
+ class_cond=False,
+ use_ddim=False))
+
+ model_kwargs = {}
+
+ if args.class_cond:
+ classes = th.randint(low=0,
+ high=NUM_CLASSES,
+ size=(args.batch_size, ),
+ device=dist_util.dev())
+ model_kwargs["y"] = classes
+
+ diffusion = self.diffusion
+ sample_fn = (diffusion.p_sample_loop
+ if not args.use_ddim else diffusion.ddim_sample_loop)
+
+ vit_L = (224//14)**2 # vit sequence length
+
+ if self.ddp_rec_model.module.decoder.vit_decoder.cls_token:
+ vit_L += 1
+
+ for i in range(1):
+ triplane_sample = sample_fn(
+ self.ddp_model,
+ (args.batch_size, vit_L, self.ddp_rec_model.module.decoder.vit_decoder.embed_dim), # vit token size, N L C
+ clip_denoised=args.clip_denoised,
+ model_kwargs=model_kwargs,
+ )
+
+ th.cuda.empty_cache()
+ self.render_video_given_triplane(
+ triplane_sample,
+ name_prefix=f'{self.step + self.resume_step}_{i}')
\ No newline at end of file
diff --git a/nsr/triplane.py b/nsr/triplane.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c4d65f82b0524b6790a30102a9521aac7236348
--- /dev/null
+++ b/nsr/triplane.py
@@ -0,0 +1,1088 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+from threading import local
+import torch
+import torch.nn as nn
+from torch_utils import persistence
+from .networks_stylegan2 import Generator as StyleGAN2Backbone
+from .networks_stylegan2 import ToRGBLayer, SynthesisNetwork, MappingNetwork
+from .volumetric_rendering.renderer import ImportanceRenderer
+from .volumetric_rendering.ray_sampler import RaySampler, PatchRaySampler
+import dnnlib
+from pdb import set_trace as st
+import math
+
+import torch.nn.functional as F
+import itertools
+from ldm.modules.diffusionmodules.model import SimpleDecoder, Decoder
+
+
+@persistence.persistent_class
+class TriPlaneGenerator(torch.nn.Module):
+
+ def __init__(
+ self,
+ z_dim, # Input latent (Z) dimensionality.
+ c_dim, # Conditioning label (C) dimensionality.
+ w_dim, # Intermediate latent (W) dimensionality.
+ img_resolution, # Output resolution.
+ img_channels, # Number of output color channels.
+ sr_num_fp16_res=0,
+ mapping_kwargs={}, # Arguments for MappingNetwork.
+ rendering_kwargs={},
+ sr_kwargs={},
+ bcg_synthesis_kwargs={},
+ # pifu_kwargs={},
+ # ada_kwargs={}, # not used, place holder
+ **synthesis_kwargs, # Arguments for SynthesisNetwork.
+ ):
+ super().__init__()
+ self.z_dim = z_dim
+ self.c_dim = c_dim
+ self.w_dim = w_dim
+ self.img_resolution = img_resolution
+ self.img_channels = img_channels
+ self.renderer = ImportanceRenderer()
+ # if 'PatchRaySampler' in rendering_kwargs:
+ # self.ray_sampler = PatchRaySampler()
+ # else:
+ # self.ray_sampler = RaySampler()
+ self.backbone = StyleGAN2Backbone(z_dim,
+ c_dim,
+ w_dim,
+ img_resolution=256,
+ img_channels=32 * 3,
+ mapping_kwargs=mapping_kwargs,
+ **synthesis_kwargs)
+ self.superresolution = dnnlib.util.construct_class_by_name(
+ class_name=rendering_kwargs['superresolution_module'],
+ channels=32,
+ img_resolution=img_resolution,
+ sr_num_fp16_res=sr_num_fp16_res,
+ sr_antialias=rendering_kwargs['sr_antialias'],
+ **sr_kwargs)
+
+ # self.bcg_synthesis = None
+ if rendering_kwargs.get('use_background', False):
+ self.bcg_synthesis = SynthesisNetwork(
+ w_dim,
+ img_resolution=self.superresolution.input_resolution,
+ img_channels=32,
+ **bcg_synthesis_kwargs)
+ self.bcg_mapping = MappingNetwork(z_dim=z_dim,
+ c_dim=c_dim,
+ w_dim=w_dim,
+ num_ws=self.num_ws,
+ **mapping_kwargs)
+ # New mapping network for self-adaptive camera pose, dim = 3
+
+ self.decoder = OSGDecoder(
+ 32, {
+ 'decoder_lr_mul': rendering_kwargs.get('decoder_lr_mul', 1),
+ 'decoder_output_dim': 32
+ })
+ self.neural_rendering_resolution = 64
+ self.rendering_kwargs = rendering_kwargs
+
+ self._last_planes = None
+ self.pool_256 = torch.nn.AdaptiveAvgPool2d((256, 256))
+
+ def mapping(self,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False):
+ if self.rendering_kwargs['c_gen_conditioning_zero']:
+ c = torch.zeros_like(c)
+ return self.backbone.mapping(z,
+ c *
+ self.rendering_kwargs.get('c_scale', 0),
+ truncation_psi=truncation_psi,
+ truncation_cutoff=truncation_cutoff,
+ update_emas=update_emas)
+
+ def synthesis(self,
+ ws,
+ c,
+ neural_rendering_resolution=None,
+ update_emas=False,
+ cache_backbone=False,
+ use_cached_backbone=False,
+ return_meta=False,
+ return_raw_only=False,
+ **synthesis_kwargs):
+
+ return_sampling_details_flag = self.rendering_kwargs.get(
+ 'return_sampling_details_flag', False)
+
+ if return_sampling_details_flag:
+ return_meta = True
+
+ cam2world_matrix = c[:, :16].view(-1, 4, 4)
+ # cam2world_matrix = torch.eye(4, device=c.device).unsqueeze(0).repeat_interleave(c.shape[0], dim=0)
+ # c[:, :16] = cam2world_matrix.view(-1, 16)
+ intrinsics = c[:, 16:25].view(-1, 3, 3)
+
+ if neural_rendering_resolution is None:
+ neural_rendering_resolution = self.neural_rendering_resolution
+ else:
+ self.neural_rendering_resolution = neural_rendering_resolution
+
+ H = W = self.neural_rendering_resolution
+ # Create a batch of rays for volume rendering
+ ray_origins, ray_directions = self.ray_sampler(
+ cam2world_matrix, intrinsics, neural_rendering_resolution)
+
+ # Create triplanes by running StyleGAN backbone
+ N, M, _ = ray_origins.shape
+ if use_cached_backbone and self._last_planes is not None:
+ planes = self._last_planes
+ else:
+ planes = self.backbone.synthesis(
+ ws[:, :self.backbone.num_ws, :], # ws, BS 14 512
+ update_emas=update_emas,
+ **synthesis_kwargs)
+ if cache_backbone:
+ self._last_planes = planes
+
+ # Reshape output into three 32-channel planes
+ planes = planes.view(len(planes), 3, 32, planes.shape[-2],
+ planes.shape[-1]) # BS 96 256 256
+
+ # Perform volume rendering
+ # st()
+ rendering_details = self.renderer(
+ planes,
+ self.decoder,
+ ray_origins,
+ ray_directions,
+ self.rendering_kwargs,
+ # return_meta=True)
+ return_meta=return_meta)
+
+ # calibs = create_calib_matrix(c)
+ # all_coords = rendering_details['all_coords']
+ # B, num_rays, S, _ = all_coords.shape
+ # all_coords_B3N = all_coords.reshape(B, -1, 3).permute(0,2,1)
+ # homo_coords = torch.cat([all_coords, torch.zeros_like(all_coords[..., :1])], -1)
+ # homo_coords[..., -1] = 1
+ # homo_coords = homo_coords.reshape(homo_coords.shape[0], -1, 4)
+ # homo_coords = homo_coords.permute(0,2,1)
+ # xyz = calibs @ homo_coords
+ # xyz = xyz.permute(0,2,1).reshape(B, H, W, S, 4)
+ # st()
+
+ # xyz_proj = perspective(all_coords_B3N, calibs)
+ # xyz_proj = xyz_proj.permute(0,2,1).reshape(B, H, W, S, 3) # [0,0] - [1,1]
+ # st()
+
+ feature_samples, depth_samples, weights_samples = (
+ rendering_details[k]
+ for k in ['feature_samples', 'depth_samples', 'weights_samples'])
+
+ if return_sampling_details_flag:
+ shape_synthesized = rendering_details['shape_synthesized']
+ else:
+ shape_synthesized = None
+
+ # Reshape into 'raw' neural-rendered image
+ feature_image = feature_samples.permute(0, 2, 1).reshape(
+ N, feature_samples.shape[-1], H, W).contiguous() # B 32 H W
+ depth_image = depth_samples.permute(0, 2, 1).reshape(N, 1, H, W)
+
+ # Run superresolution to get final image
+ rgb_image = feature_image[:, :3] # B 3 H W
+ if not return_raw_only:
+ sr_image = self.superresolution(
+ rgb_image,
+ feature_image,
+ ws[:, -1:, :], # only use the last layer
+ noise_mode=self.rendering_kwargs['superresolution_noise_mode'],
+ **{
+ k: synthesis_kwargs[k]
+ for k in synthesis_kwargs.keys() if k != 'noise_mode'
+ })
+ else:
+ sr_image = rgb_image
+
+ ret_dict = {
+ 'image': sr_image,
+ 'image_raw': rgb_image,
+ 'image_depth': depth_image,
+ 'weights_samples': weights_samples,
+ 'shape_synthesized': shape_synthesized
+ }
+ if return_meta:
+ ret_dict.update({
+ # 'feature_image': feature_image,
+ 'feature_volume':
+ rendering_details['feature_volume'],
+ 'all_coords':
+ rendering_details['all_coords'],
+ 'weights':
+ rendering_details['weights'],
+ })
+
+ return ret_dict
+
+ def sample(self,
+ coordinates,
+ directions,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False,
+ **synthesis_kwargs):
+ # Compute RGB features, density for arbitrary 3D coordinates. Mostly used for extracting shapes.
+ ws = self.mapping(z,
+ c,
+ truncation_psi=truncation_psi,
+ truncation_cutoff=truncation_cutoff,
+ update_emas=update_emas)
+ planes = self.backbone.synthesis(ws,
+ update_emas=update_emas,
+ **synthesis_kwargs)
+ planes = planes.view(len(planes), 3, 32, planes.shape[-2],
+ planes.shape[-1])
+ return self.renderer.run_model(planes, self.decoder, coordinates,
+ directions, self.rendering_kwargs)
+
+ def sample_mixed(self,
+ coordinates,
+ directions,
+ ws,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ update_emas=False,
+ **synthesis_kwargs):
+ # Same as sample, but expects latent vectors 'ws' instead of Gaussian noise 'z'
+ planes = self.backbone.synthesis(ws,
+ update_emas=update_emas,
+ **synthesis_kwargs)
+ planes = planes.view(len(planes), 3, 32, planes.shape[-2],
+ planes.shape[-1])
+ return self.renderer.run_model(planes, self.decoder, coordinates,
+ directions, self.rendering_kwargs)
+
+ def forward(self,
+ z,
+ c,
+ truncation_psi=1,
+ truncation_cutoff=None,
+ neural_rendering_resolution=None,
+ update_emas=False,
+ cache_backbone=False,
+ use_cached_backbone=False,
+ **synthesis_kwargs):
+ # Render a batch of generated images.
+ ws = self.mapping(z,
+ c,
+ truncation_psi=truncation_psi,
+ truncation_cutoff=truncation_cutoff,
+ update_emas=update_emas)
+ return self.synthesis(
+ ws,
+ c,
+ update_emas=update_emas,
+ neural_rendering_resolution=neural_rendering_resolution,
+ cache_backbone=cache_backbone,
+ use_cached_backbone=use_cached_backbone,
+ **synthesis_kwargs)
+
+
+from .networks_stylegan2 import FullyConnectedLayer
+
+# class OSGDecoder(torch.nn.Module):
+
+# def __init__(self, n_features, options):
+# super().__init__()
+# self.hidden_dim = 64
+# self.output_dim = options['decoder_output_dim']
+# self.n_features = n_features
+
+# self.net = torch.nn.Sequential(
+# FullyConnectedLayer(n_features,
+# self.hidden_dim,
+# lr_multiplier=options['decoder_lr_mul']),
+# torch.nn.Softplus(),
+# FullyConnectedLayer(self.hidden_dim,
+# 1 + options['decoder_output_dim'],
+# lr_multiplier=options['decoder_lr_mul']))
+
+# def forward(self, sampled_features, ray_directions):
+# # Aggregate features
+# sampled_features = sampled_features.mean(1)
+# x = sampled_features
+
+# N, M, C = x.shape
+# x = x.view(N * M, C)
+
+# x = self.net(x)
+# x = x.view(N, M, -1)
+# rgb = torch.sigmoid(x[..., 1:]) * (
+# 1 + 2 * 0.001) - 0.001 # Uses sigmoid clamping from MipNeRF
+# sigma = x[..., 0:1]
+# return {'rgb': rgb, 'sigma': sigma}
+
+
+@persistence.persistent_class
+class OSGDecoder(torch.nn.Module):
+
+ def __init__(self, n_features, options):
+ super().__init__()
+ self.hidden_dim = 64
+ self.decoder_output_dim = options['decoder_output_dim']
+
+ self.net = torch.nn.Sequential(
+ FullyConnectedLayer(n_features,
+ self.hidden_dim,
+ lr_multiplier=options['decoder_lr_mul']),
+ torch.nn.Softplus(),
+ FullyConnectedLayer(self.hidden_dim,
+ 1 + options['decoder_output_dim'],
+ lr_multiplier=options['decoder_lr_mul']))
+ self.activation = options.get('decoder_activation', 'sigmoid')
+
+ def forward(self, sampled_features, ray_directions):
+ # Aggregate features
+ sampled_features = sampled_features.mean(1)
+ x = sampled_features
+
+ N, M, C = x.shape
+ x = x.view(N * M, C)
+
+ x = self.net(x)
+ x = x.view(N, M, -1)
+ rgb = x[..., 1:]
+ sigma = x[..., 0:1]
+ if self.activation == "sigmoid":
+ # Original EG3D
+ rgb = torch.sigmoid(rgb) * (1 + 2 * 0.001) - 0.001
+ elif self.activation == "lrelu":
+ # StyleGAN2-style, use with toRGB
+ rgb = torch.nn.functional.leaky_relu(rgb, 0.2,
+ inplace=True) * math.sqrt(2)
+ return {'rgb': rgb, 'sigma': sigma}
+
+
+class OSGDecoderFlexicube(OSGDecoder):
+ # https://github.com/TencentARC/InstantMesh/blob/0a64425c6d390afa40128132cec42cd5c6408bbf/src/models/renderer/synthesizer_mesh.py#L15
+ def __init__(self, n_features, options, hidden_dim: int = 64, num_layers: int = 4, activation: nn.Module = nn.ReLU):
+ super().__init__(n_features, options)
+
+ # self.net_sdf = nn.Sequential(
+ # nn.Linear(3 * n_features, hidden_dim),
+ # activation(),
+ # *itertools.chain(*[[
+ # nn.Linear(hidden_dim, hidden_dim),
+ # activation(),
+ # ] for _ in range(num_layers - 2)]),
+ # nn.Linear(hidden_dim, 1),
+ # )
+ # self.net_rgb = nn.Sequential(
+ # nn.Linear(3 * n_features, hidden_dim),
+ # activation(),
+ # *itertools.chain(*[[
+ # nn.Linear(hidden_dim, hidden_dim),
+ # activation(),
+ # ] for _ in range(num_layers - 2)]),
+ # nn.Linear(hidden_dim, 3),
+ # )
+
+ # ! sdf and rgb prediction adopts old tradition, softplus here
+ # TODO, load pre-trained model weights
+ self.net_sdf = torch.nn.Sequential(
+ FullyConnectedLayer(n_features,
+ self.hidden_dim,
+ lr_multiplier=options['decoder_lr_mul']),
+ torch.nn.Softplus(),
+ FullyConnectedLayer(self.hidden_dim,
+ 1,
+ lr_multiplier=options['decoder_lr_mul']))
+
+ self.net_rgb = torch.nn.Sequential(
+ FullyConnectedLayer(n_features,
+ self.hidden_dim,
+ lr_multiplier=options['decoder_lr_mul']),
+ torch.nn.Softplus(),
+ FullyConnectedLayer(self.hidden_dim,
+ options['decoder_output_dim'],
+ lr_multiplier=options['decoder_lr_mul']))
+
+
+ # ! for following MLP, use new behaviour
+ self.net_deformation = nn.Sequential(
+ nn.Linear(3 * n_features, hidden_dim),
+ activation(),
+ *itertools.chain(*[[
+ nn.Linear(hidden_dim, hidden_dim),
+ activation(),
+ ] for _ in range(num_layers - 2)]),
+ nn.Linear(hidden_dim, 3),
+ )
+ self.net_weight = nn.Sequential(
+ nn.Linear(8 * 3 * n_features, hidden_dim),
+ activation(),
+ *itertools.chain(*[[
+ nn.Linear(hidden_dim, hidden_dim),
+ activation(),
+ ] for _ in range(num_layers - 2)]),
+ nn.Linear(hidden_dim, 21),
+ )
+
+ # init all bias to zero
+ for m in self.modules():
+ if isinstance(m, nn.Linear):
+ nn.init.zeros_(m.bias)
+
+ # def forward(self, sampled_features, ray_directions):
+ # # Aggregate features
+ # sampled_features = sampled_features.mean(1)
+ # x = sampled_features
+
+ # N, M, C = x.shape
+ # x = x.view(N * M, C)
+
+ # x = self.net(x)
+ # x = x.view(N, M, -1)
+ # rgb = x[..., 1:]
+ # sigma = x[..., 0:1]
+ # if self.activation == "sigmoid":
+ # # Original EG3D
+ # rgb = torch.sigmoid(rgb) * (1 + 2 * 0.001) - 0.001
+ # elif self.activation == "lrelu":
+ # # StyleGAN2-style, use with toRGB
+ # rgb = torch.nn.functional.leaky_relu(rgb, 0.2,
+ # inplace=True) * math.sqrt(2)
+ # return {'rgb': rgb, 'sigma': sigma}
+
+
+ def get_geometry_prediction(self, sampled_features, flexicubes_indices):
+ _N, n_planes, _M, _C = sampled_features.shape
+
+ sdf = self.net_sdf(sampled_features.mean(1)) # for compat issue
+ sampled_features = sampled_features.permute(0, 2, 1, 3).reshape(_N, _M, n_planes*_C)
+ deformation = self.net_deformation(sampled_features)
+
+ grid_features = torch.index_select(input=sampled_features, index=flexicubes_indices.reshape(-1), dim=1)
+ grid_features = grid_features.reshape(
+ sampled_features.shape[0], flexicubes_indices.shape[0], flexicubes_indices.shape[1] * sampled_features.shape[-1])
+ weight = self.net_weight(grid_features) * 0.1
+
+ return sdf, deformation, weight
+
+ def get_texture_prediction(self, sampled_features):
+ _N, n_planes, _M, _C = sampled_features.shape
+ # sampled_features = sampled_features.permute(0, 2, 1, 3).reshape(_N, _M, n_planes*_C)
+ sampled_features = sampled_features.mean(1)
+
+ rgb = self.net_rgb(sampled_features) # sigmoid feat by default
+ rgb = torch.sigmoid(rgb)*(1 + 2*0.001) - 0.001 # Uses sigmoid clamping from MipNeRF
+
+ return rgb
+
+
+class LRMOSGDecoder(nn.Module):
+ """
+ Triplane decoder that gives RGB and sigma values from sampled features.
+ Using ReLU here instead of Softplus in the original implementation.
+
+ Reference:
+ EG3D: https://github.com/NVlabs/eg3d/blob/main/eg3d/training/triplane.py#L112
+ """
+ def __init__(self, n_features: int,
+ hidden_dim: int = 64, num_layers: int = 4, activation: nn.Module = nn.ReLU):
+ super().__init__()
+ self.decoder_output_dim = 3
+ self.net = nn.Sequential(
+ nn.Linear(3 * n_features, hidden_dim),
+ activation(),
+ *itertools.chain(*[[
+ nn.Linear(hidden_dim, hidden_dim),
+ activation(),
+ ] for _ in range(num_layers - 2)]),
+ nn.Linear(hidden_dim, 1 + self.decoder_output_dim),
+ )
+ # init all bias to zero
+ for m in self.modules():
+ if isinstance(m, nn.Linear):
+ nn.init.zeros_(m.bias)
+
+ def forward(self, sampled_features, ray_directions):
+ # Aggregate features by mean
+ # sampled_features = sampled_features.mean(1)
+ # Aggregate features by concatenation
+ _N, n_planes, _M, _C = sampled_features.shape
+ sampled_features = sampled_features.permute(0, 2, 1, 3).reshape(_N, _M, n_planes*_C)
+ x = sampled_features
+
+ N, M, C = x.shape
+ x = x.contiguous().view(N*M, C)
+
+ x = self.net(x)
+ x = x.view(N, M, -1)
+ rgb = torch.sigmoid(x[..., 1:])*(1 + 2*0.001) - 0.001 # Uses sigmoid clamping from MipNeRF
+ sigma = x[..., 0:1]
+
+ return {'rgb': rgb, 'sigma': sigma}
+
+
+class Triplane(torch.nn.Module):
+
+ def __init__(
+ self,
+ c_dim=25, # Conditioning label (C) dimensionality.
+ img_resolution=128, # Output resolution.
+ img_channels=3, # Number of output color channels.
+ out_chans=96,
+ triplane_size=224,
+ rendering_kwargs={},
+ decoder_in_chans=32,
+ decoder_output_dim=32,
+ sr_num_fp16_res=0,
+ sr_kwargs={},
+ create_triplane=False, # for overfitting single instance study
+ bcg_synthesis_kwargs={},
+ lrm_decoder=False,
+ ):
+ super().__init__()
+ self.c_dim = c_dim
+ self.img_resolution = img_resolution # TODO
+ self.img_channels = img_channels
+ self.triplane_size = triplane_size
+
+ self.decoder_in_chans = decoder_in_chans
+ self.out_chans = out_chans
+
+ self.renderer = ImportanceRenderer()
+
+ if 'PatchRaySampler' in rendering_kwargs:
+ self.ray_sampler = PatchRaySampler()
+ else:
+ self.ray_sampler = RaySampler()
+
+ if lrm_decoder:
+ self.decoder = LRMOSGDecoder(
+ decoder_in_chans,)
+ else:
+ self.decoder = OSGDecoder(
+ decoder_in_chans,
+ {
+ 'decoder_lr_mul': rendering_kwargs.get('decoder_lr_mul', 1),
+ # 'decoder_output_dim': 32
+ 'decoder_output_dim': decoder_output_dim
+ })
+
+ self.neural_rendering_resolution = img_resolution # TODO
+ # self.neural_rendering_resolution = 128 # TODO
+ self.rendering_kwargs = rendering_kwargs
+ self.create_triplane = create_triplane
+ if create_triplane:
+ self.planes = nn.Parameter(torch.randn(1, out_chans, 256, 256))
+
+ if bool(sr_kwargs): # check whether empty
+ assert decoder_in_chans == decoder_output_dim, 'tradition'
+ if rendering_kwargs['superresolution_module'] in [
+ 'torch_utils.components.PixelUnshuffleUpsample',
+ 'torch_utils.components.NearestConvSR',
+ 'torch_utils.components.NearestConvSR_Residual'
+ ]:
+ self.superresolution = dnnlib.util.construct_class_by_name(
+ class_name=rendering_kwargs['superresolution_module'],
+ # * for PixelUnshuffleUpsample
+ sr_ratio=2, # 2x SR, 128 -> 256
+ output_dim=decoder_output_dim,
+ num_out_ch=3,
+ )
+ else:
+ self.superresolution = dnnlib.util.construct_class_by_name(
+ class_name=rendering_kwargs['superresolution_module'],
+ # * for stylegan upsample
+ channels=decoder_output_dim,
+ img_resolution=img_resolution,
+ sr_num_fp16_res=sr_num_fp16_res,
+ sr_antialias=rendering_kwargs['sr_antialias'],
+ **sr_kwargs)
+ else:
+ self.superresolution = None
+
+ self.bcg_synthesis = None
+
+ # * pure reconstruction
+ def forward(
+ self,
+ planes=None,
+ # img,
+ c=None,
+ ws=None,
+ ray_origins=None,
+ ray_directions=None,
+ z_bcg=None,
+ neural_rendering_resolution=None,
+ update_emas=False,
+ cache_backbone=False,
+ use_cached_backbone=False,
+ return_meta=False,
+ return_raw_only=False,
+ sample_ray_only=False,
+ fg_bbox=None,
+ **synthesis_kwargs):
+
+ cam2world_matrix = c[:, :16].reshape(-1, 4, 4)
+ # cam2world_matrix = torch.eye(4, device=c.device).unsqueeze(0).repeat_interleave(c.shape[0], dim=0)
+ # c[:, :16] = cam2world_matrix.view(-1, 16)
+ intrinsics = c[:, 16:25].reshape(-1, 3, 3)
+
+ if neural_rendering_resolution is None:
+ neural_rendering_resolution = self.neural_rendering_resolution
+ else:
+ self.neural_rendering_resolution = neural_rendering_resolution
+
+ if ray_directions is None: # when output video
+ H = W = self.neural_rendering_resolution
+ # Create a batch of rays for volume rendering
+ # ray_origins, ray_directions, ray_bboxes = self.ray_sampler(
+ # cam2world_matrix, intrinsics, neural_rendering_resolution)
+
+ if sample_ray_only: # ! for sampling
+ ray_origins, ray_directions, ray_bboxes = self.ray_sampler(
+ cam2world_matrix, intrinsics,
+ self.rendering_kwargs.get( 'patch_rendering_resolution' ),
+ self.neural_rendering_resolution, fg_bbox)
+
+ # for patch supervision
+ ret_dict = {
+ 'ray_origins': ray_origins,
+ 'ray_directions': ray_directions,
+ 'ray_bboxes': ray_bboxes,
+ }
+
+ return ret_dict
+
+ else: # ! for rendering
+ ray_origins, ray_directions, _ = self.ray_sampler(
+ cam2world_matrix, intrinsics, self.neural_rendering_resolution,
+ self.neural_rendering_resolution)
+
+ else:
+ assert ray_origins is not None
+ H = W = int(ray_directions.shape[1]**
+ 0.5) # dynamically set patch resolution
+
+ # ! match the batch size, if not returned
+ if planes is None:
+ assert self.planes is not None
+ planes = self.planes.repeat_interleave(c.shape[0], dim=0)
+ return_sampling_details_flag = self.rendering_kwargs.get(
+ 'return_sampling_details_flag', False)
+
+ if return_sampling_details_flag:
+ return_meta = True
+
+ # Create triplanes by running StyleGAN backbone
+ N, M, _ = ray_origins.shape
+
+ # Reshape output into three 32-channel planes
+ if planes.shape[1] == 3 * 2 * self.decoder_in_chans:
+ # if isinstance(planes, tuple):
+ # N *= 2
+ triplane_bg = True
+ # planes = torch.cat(planes, 0) # inference in parallel
+ # ray_origins = ray_origins.repeat(2,1,1)
+ # ray_directions = ray_directions.repeat(2,1,1)
+
+ else:
+ triplane_bg = False
+
+ # assert not triplane_bg
+
+ # ! hard coded, will fix later
+ # if planes.shape[1] == 3 * self.decoder_in_chans:
+ # else:
+
+ # planes = planes.view(len(planes), 3, self.decoder_in_chans,
+ planes = planes.reshape(
+ len(planes),
+ 3,
+ -1, # ! support background plane
+ planes.shape[-2],
+ planes.shape[-1]) # BS 96 256 256
+
+ # Perform volume rendering
+ rendering_details = self.renderer(planes,
+ self.decoder,
+ ray_origins,
+ ray_directions,
+ self.rendering_kwargs,
+ return_meta=return_meta)
+
+ feature_samples, depth_samples, weights_samples = (
+ rendering_details[k]
+ for k in ['feature_samples', 'depth_samples', 'weights_samples'])
+
+ if return_sampling_details_flag:
+ shape_synthesized = rendering_details['shape_synthesized']
+ else:
+ shape_synthesized = None
+
+ # Reshape into 'raw' neural-rendered image
+ feature_image = feature_samples.permute(0, 2, 1).reshape(
+ N, feature_samples.shape[-1], H,
+ W).contiguous() # B 32 H W, in [-1,1]
+ depth_image = depth_samples.permute(0, 2, 1).reshape(N, 1, H, W)
+ weights_samples = weights_samples.permute(0, 2, 1).reshape(N, 1, H, W)
+
+ # Generate Background
+ # if self.bcg_synthesis:
+
+ # # bg composition
+ # # if self.decoder.activation == "sigmoid":
+ # # feature_image = feature_image * 2 - 1 # Scale to (-1, 1), taken from ray marcher
+
+ # assert isinstance(
+ # z_bcg, torch.Tensor
+ # ) # 512 latents after reparmaterization, reuse the name
+ # # ws_bcg = ws[:,:self.bcg_synthesis.num_ws] if ws_bcg is None else ws_bcg[:,:self.bcg_synthesis.num_ws]
+
+ # with torch.autocast(device_type='cuda',
+ # dtype=torch.float16,
+ # enabled=False):
+
+ # ws_bcg = self.bcg_mapping(z_bcg, c=None) # reuse the name
+ # if ws_bcg.size(1) < self.bcg_synthesis.num_ws:
+ # ws_bcg = torch.cat([
+ # ws_bcg, ws_bcg[:, -1:].repeat(
+ # 1, self.bcg_synthesis.num_ws - ws_bcg.size(1), 1)
+ # ], 1)
+
+ # bcg_image = self.bcg_synthesis(ws_bcg,
+ # update_emas=update_emas,
+ # **synthesis_kwargs)
+ # bcg_image = torch.nn.functional.interpolate(
+ # bcg_image,
+ # size=feature_image.shape[2:],
+ # mode='bilinear',
+ # align_corners=False,
+ # antialias=self.rendering_kwargs['sr_antialias'])
+ # feature_image = feature_image + (1 - weights_samples) * bcg_image
+
+ # # Generate Raw image
+ # assert self.torgb
+ # rgb_image = self.torgb(feature_image,
+ # ws_bcg[:, -1],
+ # fused_modconv=False)
+ # rgb_image = rgb_image.to(dtype=torch.float32,
+ # memory_format=torch.contiguous_format)
+ # # st()
+ # else:
+
+ mask_image = weights_samples * (1 + 2 * 0.001) - 0.001
+ if triplane_bg:
+ # true_bs = N // 2
+ # weights_samples = weights_samples[:true_bs]
+ # mask_image = mask_image[:true_bs]
+ # feature_image = feature_image[:true_bs] * mask_image + feature_image[true_bs:] * (1-mask_image) # the first is foreground
+ # depth_image = depth_image[:true_bs]
+
+ # ! composited colors
+ # rgb_final = (
+ # 1 - fg_ret_dict['weights']
+ # ) * bg_ret_dict['rgb_final'] + fg_ret_dict[
+ # 'feature_samples'] # https://github.com/SizheAn/PanoHead/blob/17ad915941c7e2703d5aa3eb5ff12eac47c90e53/training/triplane.py#L127C45-L127C64
+
+ # ret_dict.update({
+ # 'feature_samples': rgb_final,
+ # })
+ # st()
+ feature_image = (1 - mask_image) * rendering_details[
+ 'bg_ret_dict']['rgb_final'] + feature_image
+
+ rgb_image = feature_image[:, :3]
+
+ # # Run superresolution to get final image
+ if self.superresolution is not None and not return_raw_only:
+ # assert ws is not None, 'feed in [cls] token here for SR module'
+
+ if ws is not None and ws.ndim == 2:
+ ws = ws.unsqueeze(
+ 1)[:, -1:, :] # follow stylegan tradition, B, N, C
+
+ sr_image = self.superresolution(
+ rgb=rgb_image,
+ x=feature_image,
+ base_x=rgb_image,
+ ws=ws, # only use the last layer
+ noise_mode=self.
+ rendering_kwargs['superresolution_noise_mode'], # none
+ **{
+ k: synthesis_kwargs[k]
+ for k in synthesis_kwargs.keys() if k != 'noise_mode'
+ })
+ else:
+ # sr_image = rgb_image
+ sr_image = None
+
+ if shape_synthesized is not None:
+ shape_synthesized.update({
+ 'image_depth': depth_image,
+ }) # for 3D loss easy computation, wrap all 3D in a single dict
+
+ ret_dict = {
+ 'feature_image': feature_image,
+ # 'image_raw': feature_image[:, :3],
+ 'image_raw': rgb_image,
+ 'image_depth': depth_image,
+ 'weights_samples': weights_samples,
+ # 'silhouette': mask_image,
+ # 'silhouette_normalized_3channel': (mask_image*2-1).repeat_interleave(3,1), # N 3 H W
+ 'shape_synthesized': shape_synthesized,
+ "image_mask": mask_image,
+ }
+
+ if sr_image is not None:
+ ret_dict.update({
+ 'image_sr': sr_image,
+ })
+
+ if return_meta:
+ ret_dict.update({
+ 'feature_volume':
+ rendering_details['feature_volume'],
+ 'all_coords':
+ rendering_details['all_coords'],
+ 'weights':
+ rendering_details['weights'],
+ })
+
+ return ret_dict
+
+
+class Triplane_fg_bg_plane(Triplane):
+ # a separate background plane
+
+ def __init__(self,
+ c_dim=25,
+ img_resolution=128,
+ img_channels=3,
+ out_chans=96,
+ triplane_size=224,
+ rendering_kwargs={},
+ decoder_in_chans=32,
+ decoder_output_dim=32,
+ sr_num_fp16_res=0,
+ sr_kwargs={},
+ bcg_synthesis_kwargs={}):
+ super().__init__(c_dim, img_resolution, img_channels, out_chans,
+ triplane_size, rendering_kwargs, decoder_in_chans,
+ decoder_output_dim, sr_num_fp16_res, sr_kwargs,
+ bcg_synthesis_kwargs)
+
+ self.bcg_decoder = Decoder(
+ ch=64, # half channel size
+ out_ch=32,
+ # ch_mult=(1, 2, 4),
+ ch_mult=(1, 2), # use res=64 for now
+ num_res_blocks=2,
+ dropout=0.0,
+ attn_resolutions=(),
+ z_channels=4,
+ resolution=64,
+ in_channels=3,
+ )
+
+ # * pure reconstruction
+ def forward(
+ self,
+ planes,
+ bg_plane,
+ # img,
+ c,
+ ws=None,
+ z_bcg=None,
+ neural_rendering_resolution=None,
+ update_emas=False,
+ cache_backbone=False,
+ use_cached_backbone=False,
+ return_meta=False,
+ return_raw_only=False,
+ **synthesis_kwargs):
+
+ # ! match the batch size
+ if planes is None:
+ assert self.planes is not None
+ planes = self.planes.repeat_interleave(c.shape[0], dim=0)
+ return_sampling_details_flag = self.rendering_kwargs.get(
+ 'return_sampling_details_flag', False)
+
+ if return_sampling_details_flag:
+ return_meta = True
+
+ cam2world_matrix = c[:, :16].reshape(-1, 4, 4)
+ # cam2world_matrix = torch.eye(4, device=c.device).unsqueeze(0).repeat_interleave(c.shape[0], dim=0)
+ # c[:, :16] = cam2world_matrix.view(-1, 16)
+ intrinsics = c[:, 16:25].reshape(-1, 3, 3)
+
+ if neural_rendering_resolution is None:
+ neural_rendering_resolution = self.neural_rendering_resolution
+ else:
+ self.neural_rendering_resolution = neural_rendering_resolution
+
+ H = W = self.neural_rendering_resolution
+ # Create a batch of rays for volume rendering
+ ray_origins, ray_directions, _ = self.ray_sampler(
+ cam2world_matrix, intrinsics, neural_rendering_resolution)
+
+ # Create triplanes by running StyleGAN backbone
+ N, M, _ = ray_origins.shape
+
+ # # Reshape output into three 32-channel planes
+ # if planes.shape[1] == 3 * 2 * self.decoder_in_chans:
+ # # if isinstance(planes, tuple):
+ # # N *= 2
+ # triplane_bg = True
+ # # planes = torch.cat(planes, 0) # inference in parallel
+ # # ray_origins = ray_origins.repeat(2,1,1)
+ # # ray_directions = ray_directions.repeat(2,1,1)
+
+ # else:
+ # triplane_bg = False
+
+ # assert not triplane_bg
+
+ planes = planes.view(
+ len(planes),
+ 3,
+ -1, # ! support background plane
+ planes.shape[-2],
+ planes.shape[-1]) # BS 96 256 256
+
+ # Perform volume rendering
+ rendering_details = self.renderer(planes,
+ self.decoder,
+ ray_origins,
+ ray_directions,
+ self.rendering_kwargs,
+ return_meta=return_meta)
+
+ feature_samples, depth_samples, weights_samples = (
+ rendering_details[k]
+ for k in ['feature_samples', 'depth_samples', 'weights_samples'])
+
+ if return_sampling_details_flag:
+ shape_synthesized = rendering_details['shape_synthesized']
+ else:
+ shape_synthesized = None
+
+ # Reshape into 'raw' neural-rendered image
+ feature_image = feature_samples.permute(0, 2, 1).reshape(
+ N, feature_samples.shape[-1], H,
+ W).contiguous() # B 32 H W, in [-1,1]
+ depth_image = depth_samples.permute(0, 2, 1).reshape(N, 1, H, W)
+ weights_samples = weights_samples.permute(0, 2, 1).reshape(N, 1, H, W)
+
+ bcg_image = self.bcg_decoder(bg_plane)
+ bcg_image = torch.nn.functional.interpolate(
+ bcg_image,
+ size=feature_image.shape[2:],
+ mode='bilinear',
+ align_corners=False,
+ antialias=self.rendering_kwargs['sr_antialias'])
+
+ mask_image = weights_samples * (1 + 2 * 0.001) - 0.001
+
+ # ! fuse fg/bg model output
+ feature_image = feature_image + (1 - weights_samples) * bcg_image
+
+ rgb_image = feature_image[:, :3]
+
+ # # Run superresolution to get final image
+ if self.superresolution is not None and not return_raw_only:
+ # assert ws is not None, 'feed in [cls] token here for SR module'
+
+ if ws is not None and ws.ndim == 2:
+ ws = ws.unsqueeze(
+ 1)[:, -1:, :] # follow stylegan tradition, B, N, C
+
+ sr_image = self.superresolution(
+ rgb=rgb_image,
+ x=feature_image,
+ base_x=rgb_image,
+ ws=ws, # only use the last layer
+ noise_mode=self.
+ rendering_kwargs['superresolution_noise_mode'], # none
+ **{
+ k: synthesis_kwargs[k]
+ for k in synthesis_kwargs.keys() if k != 'noise_mode'
+ })
+ else:
+ # sr_image = rgb_image
+ sr_image = None
+
+ if shape_synthesized is not None:
+ shape_synthesized.update({
+ 'image_depth': depth_image,
+ }) # for 3D loss easy computation, wrap all 3D in a single dict
+
+ ret_dict = {
+ 'feature_image': feature_image,
+ # 'image_raw': feature_image[:, :3],
+ 'image_raw': rgb_image,
+ 'image_depth': depth_image,
+ 'weights_samples': weights_samples,
+ # 'silhouette': mask_image,
+ # 'silhouette_normalized_3channel': (mask_image*2-1).repeat_interleave(3,1), # N 3 H W
+ 'shape_synthesized': shape_synthesized,
+ "image_mask": mask_image,
+ }
+
+ if sr_image is not None:
+ ret_dict.update({
+ 'image_sr': sr_image,
+ })
+
+ if return_meta:
+ ret_dict.update({
+ 'feature_volume':
+ rendering_details['feature_volume'],
+ 'all_coords':
+ rendering_details['all_coords'],
+ 'weights':
+ rendering_details['weights'],
+ })
+
+ return ret_dict
+
+class TriplaneFlexiCube(Triplane):
+ def __init__(self, c_dim=25, img_resolution=128, img_channels=3, out_chans=96, triplane_size=224, rendering_kwargs={}, decoder_in_chans=32, decoder_output_dim=32, sr_num_fp16_res=0, sr_kwargs={},
+ create_triplane=False, # for overfitting single instance study
+ bcg_synthesis_kwargs={},
+ lrm_decoder=False,
+ ):
+ super().__init__(c_dim, img_resolution, img_channels, out_chans, triplane_size, rendering_kwargs, decoder_in_chans, decoder_output_dim, sr_num_fp16_res, sr_kwargs, bcg_synthesis_kwargs, lrm_decoder=lrm_decoder, create_triplane=create_triplane)
+ # https://github.com/TencentARC/InstantMesh/blob/0a64425c6d390afa40128132cec42cd5c6408bbf/src/models/renderer/synthesizer_mesh.py#L93
+
+ def get_geometry_prediction(self, planes, sample_coordinates, flexicubes_indices):
+ plane_axes = self.plane_axes.to(planes.device)
+ sampled_features = sample_from_planes(
+ plane_axes, planes, sample_coordinates, padding_mode='zeros', box_warp=self.rendering_kwargs['box_warp'])
+
+ sdf, deformation, weight = self.decoder.get_geometry_prediction(sampled_features, flexicubes_indices)
+ return sdf, deformation, weight
+
+ def get_texture_prediction(self, planes, sample_coordinates):
+ plane_axes = self.plane_axes.to(planes.device)
+ sampled_features = sample_from_planes(
+ plane_axes, planes, sample_coordinates, padding_mode='zeros', box_warp=self.rendering_kwargs['box_warp'])
+
+ rgb = self.decoder.get_texture_prediction(sampled_features)
+ return rgb
\ No newline at end of file
diff --git a/nsr/volumetric_rendering/__init__.py b/nsr/volumetric_rendering/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..daba66567a95beabb103f7996198a9675ab20b4a
--- /dev/null
+++ b/nsr/volumetric_rendering/__init__.py
@@ -0,0 +1,11 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+# empty
\ No newline at end of file
diff --git a/nsr/volumetric_rendering/__pycache__/__init__.cpython-39.pyc b/nsr/volumetric_rendering/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7d359c2530e776e39735111c1fe408f2a4bcf622
Binary files /dev/null and b/nsr/volumetric_rendering/__pycache__/__init__.cpython-39.pyc differ
diff --git a/nsr/volumetric_rendering/__pycache__/math_utils.cpython-39.pyc b/nsr/volumetric_rendering/__pycache__/math_utils.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..393a45ba78b68411e9abb826561d92a27fb94cf7
Binary files /dev/null and b/nsr/volumetric_rendering/__pycache__/math_utils.cpython-39.pyc differ
diff --git a/nsr/volumetric_rendering/__pycache__/ray_marcher.cpython-39.pyc b/nsr/volumetric_rendering/__pycache__/ray_marcher.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..287c0da0adcd9dd33c35592718d8d106b02c8bb6
Binary files /dev/null and b/nsr/volumetric_rendering/__pycache__/ray_marcher.cpython-39.pyc differ
diff --git a/nsr/volumetric_rendering/__pycache__/ray_sampler.cpython-39.pyc b/nsr/volumetric_rendering/__pycache__/ray_sampler.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e839a50129afc2b418813afb363bf5ea0031349a
Binary files /dev/null and b/nsr/volumetric_rendering/__pycache__/ray_sampler.cpython-39.pyc differ
diff --git a/nsr/volumetric_rendering/__pycache__/renderer.cpython-39.pyc b/nsr/volumetric_rendering/__pycache__/renderer.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bd2c7fa3f14698fb3350a5832513befb269a9569
Binary files /dev/null and b/nsr/volumetric_rendering/__pycache__/renderer.cpython-39.pyc differ
diff --git a/nsr/volumetric_rendering/math_utils.py b/nsr/volumetric_rendering/math_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..81378a371d959e0c0b5886f00925651c0caa2124
--- /dev/null
+++ b/nsr/volumetric_rendering/math_utils.py
@@ -0,0 +1,137 @@
+# MIT License
+
+# Copyright (c) 2022 Petr Kellnhofer
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import torch
+from pdb import set_trace as st
+
+
+def transform_vectors(matrix: torch.Tensor,
+ vectors4: torch.Tensor) -> torch.Tensor:
+ """
+ Left-multiplies MxM @ NxM. Returns NxM.
+ """
+ res = torch.matmul(vectors4, matrix.T)
+ return res
+
+
+def normalize_vecs(vectors: torch.Tensor) -> torch.Tensor:
+ """
+ Normalize vector lengths.
+ """
+ return vectors / (torch.norm(vectors, dim=-1, keepdim=True))
+
+
+def torch_dot(x: torch.Tensor, y: torch.Tensor):
+ """
+ Dot product of two tensors.
+ """
+ return (x * y).sum(-1)
+
+
+def get_ray_limits_box(rays_o: torch.Tensor, rays_d: torch.Tensor,
+ box_side_length):
+ """
+ Author: Petr Kellnhofer
+ Intersects rays with the [-1, 1] NDC volume.
+ Returns min and max distance of entry.
+ Returns -1 for no intersection.
+ https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-box-intersection
+ """
+ o_shape = rays_o.shape
+ rays_o = rays_o.detach().reshape(-1, 3)
+ rays_d = rays_d.detach().reshape(-1, 3)
+
+ bb_min = [
+ -1 * (box_side_length / 2), -1 * (box_side_length / 2),
+ -1 * (box_side_length / 2)
+ ]
+ bb_max = [
+ 1 * (box_side_length / 2), 1 * (box_side_length / 2),
+ 1 * (box_side_length / 2)
+ ]
+ bounds = torch.tensor([bb_min, bb_max],
+ dtype=rays_o.dtype,
+ device=rays_o.device)
+ is_valid = torch.ones(rays_o.shape[:-1], dtype=bool, device=rays_o.device)
+
+ # Precompute inverse for stability.
+ invdir = 1 / rays_d
+ sign = (invdir < 0).long()
+
+ # Intersect with YZ plane.
+ tmin = (bounds.index_select(0, sign[..., 0])[..., 0] -
+ rays_o[..., 0]) * invdir[..., 0]
+ tmax = (bounds.index_select(0, 1 - sign[..., 0])[..., 0] -
+ rays_o[..., 0]) * invdir[..., 0]
+
+ # Intersect with XZ plane.
+ tymin = (bounds.index_select(0, sign[..., 1])[..., 1] -
+ rays_o[..., 1]) * invdir[..., 1]
+ tymax = (bounds.index_select(0, 1 - sign[..., 1])[..., 1] -
+ rays_o[..., 1]) * invdir[..., 1]
+
+ # Resolve parallel rays.
+ is_valid[torch.logical_or(tmin > tymax, tymin > tmax)] = False
+
+ # Use the shortest intersection.
+ tmin = torch.max(tmin, tymin)
+ tmax = torch.min(tmax, tymax)
+
+ # Intersect with XY plane.
+ tzmin = (bounds.index_select(0, sign[..., 2])[..., 2] -
+ rays_o[..., 2]) * invdir[..., 2]
+ tzmax = (bounds.index_select(0, 1 - sign[..., 2])[..., 2] -
+ rays_o[..., 2]) * invdir[..., 2]
+
+ # Resolve parallel rays.
+ is_valid[torch.logical_or(tmin > tzmax, tzmin > tmax)] = False
+
+ # Use the shortest intersection.
+ tmin = torch.max(tmin, tzmin)
+ tmax = torch.min(tmax, tzmax)
+
+ # Mark invalid.
+ tmin[torch.logical_not(is_valid)] = -1
+ tmax[torch.logical_not(is_valid)] = -2
+
+ return tmin.reshape(*o_shape[:-1], 1), tmax.reshape(*o_shape[:-1], 1)
+
+
+def linspace(start: torch.Tensor, stop: torch.Tensor, num: int):
+ """
+ Creates a tensor of shape [num, *start.shape] whose values are evenly spaced from start to end, inclusive.
+ Replicates but the multi-dimensional bahaviour of numpy.linspace in PyTorch.
+ """
+ # create a tensor of 'num' steps from 0 to 1
+ steps = torch.arange(num, dtype=torch.float32,
+ device=start.device) / (num - 1)
+
+ # reshape the 'steps' tensor to [-1, *([1]*start.ndim)] to allow for broadcastings
+ # - using 'steps.reshape([-1, *([1]*start.ndim)])' would be nice here but torchscript
+ # "cannot statically infer the expected size of a list in this contex", hence the code below
+ for i in range(start.ndim):
+ steps = steps.unsqueeze(-1)
+
+ # the output starts at 'start' and increments until 'stop' in each dimension
+ out = start[None] + steps * (stop - start)[None]
+
+ return out
diff --git a/nsr/volumetric_rendering/ray_marcher.py b/nsr/volumetric_rendering/ray_marcher.py
new file mode 100644
index 0000000000000000000000000000000000000000..62e4f0cf06a6e7d2256c810a9b83a58f6a2846ce
--- /dev/null
+++ b/nsr/volumetric_rendering/ray_marcher.py
@@ -0,0 +1,74 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""
+The ray marcher takes the raw output of the implicit representation and uses the volume rendering equation to produce composited colors and depths.
+Based off of the implementation in MipNeRF (this one doesn't do any cone tracing though!)
+"""
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from pdb import set_trace as st
+
+
+class MipRayMarcher2(nn.Module):
+
+ def __init__(self):
+ super().__init__()
+
+ def run_forward(self, colors, densities, depths, rendering_options):
+ deltas = depths[:, :, 1:] - depths[:, :, :-1]
+ colors_mid = (colors[:, :, :-1] + colors[:, :, 1:]) / 2
+ densities_mid = (densities[:, :, :-1] + densities[:, :, 1:]) / 2
+ depths_mid = (depths[:, :, :-1] + depths[:, :, 1:]) / 2
+
+ if rendering_options['clamp_mode'] == 'softplus':
+ densities_mid = F.softplus(
+ densities_mid -
+ 1) # activation bias of -1 makes things initialize better
+ else:
+ assert False, "MipRayMarcher only supports `clamp_mode`=`softplus`!"
+
+ density_delta = densities_mid * deltas
+
+ alpha = 1 - torch.exp(-density_delta)
+
+ alpha_shifted = torch.cat(
+ [torch.ones_like(alpha[:, :, :1]), 1 - alpha + 1e-10], -2)
+ T = torch.cumprod(alpha_shifted, -2) # transmittance
+ weights = alpha * T[:, :, :-1]
+ visibility = T[:, :,
+ -1] # bg lambda, https://github.com/Kai-46/nerfplusplus/blob/ebf2f3e75fd6c5dfc8c9d0b533800daaf17bd95f/ddp_model.py#L101
+ # st()
+
+ composite_rgb = torch.sum(weights * colors_mid, -2)
+ weight_total = weights.sum(2)
+ # composite_depth = torch.sum(weights * depths_mid, -2) / weight_total
+ composite_depth = torch.sum(
+ weights * depths_mid,
+ -2) # shapenet white background, no need this.
+
+ # clip the composite to min/max range of depths
+ composite_depth = torch.nan_to_num(composite_depth, float('inf'))
+ composite_depth = torch.clamp(composite_depth, torch.min(depths),
+ torch.max(depths))
+
+ if rendering_options.get('white_back', True):
+ composite_rgb = composite_rgb + 1 - weight_total
+
+ composite_rgb = composite_rgb * 2 - 1 # Scale (0,1) to (-1, 1)
+
+ return composite_rgb, composite_depth, visibility, weights
+
+ def forward(self, colors, densities, depths, rendering_options):
+ composite_rgb, composite_depth, visibility, weights = self.run_forward(
+ colors, densities, depths, rendering_options)
+
+ return composite_rgb, composite_depth, visibility, weights
diff --git a/nsr/volumetric_rendering/ray_sampler.py b/nsr/volumetric_rendering/ray_sampler.py
new file mode 100644
index 0000000000000000000000000000000000000000..56f057cc0b3790c10df4bba8c8a7a9557e166055
--- /dev/null
+++ b/nsr/volumetric_rendering/ray_sampler.py
@@ -0,0 +1,345 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""
+The ray sampler is a module that takes in camera matrices and resolution and batches of rays.
+Expects cam2world matrices that use the OpenCV camera coordinate system conventions.
+"""
+
+import torch
+from pdb import set_trace as st
+import random
+
+HUGE_NUMBER = 1e10
+TINY_NUMBER = 1e-6 # float32 only has 7 decimal digits precision
+
+
+######################################################################################
+# wrapper to simplify the use of nerfnet
+######################################################################################
+# https://github.com/Kai-46/nerfplusplus/blob/ebf2f3e75fd6c5dfc8c9d0b533800daaf17bd95f/ddp_model.py#L16
+def depth2pts_outside(ray_o, ray_d, depth):
+ '''
+ ray_o, ray_d: [..., 3]
+ depth: [...]; inverse of distance to sphere origin
+ '''
+ # note: d1 becomes negative if this mid point is behind camera
+ d1 = -torch.sum(ray_d * ray_o, dim=-1) / torch.sum(ray_d * ray_d, dim=-1)
+ p_mid = ray_o + d1.unsqueeze(-1) * ray_d
+ p_mid_norm = torch.norm(p_mid, dim=-1)
+ ray_d_cos = 1. / torch.norm(ray_d, dim=-1)
+ d2 = torch.sqrt(1. - p_mid_norm * p_mid_norm) * ray_d_cos
+ p_sphere = ray_o + (d1 + d2).unsqueeze(-1) * ray_d
+
+ rot_axis = torch.cross(ray_o, p_sphere, dim=-1)
+ rot_axis = rot_axis / torch.norm(rot_axis, dim=-1, keepdim=True)
+ phi = torch.asin(p_mid_norm)
+ theta = torch.asin(p_mid_norm * depth) # depth is inside [0, 1]
+ rot_angle = (phi - theta).unsqueeze(-1) # [..., 1]
+
+ # now rotate p_sphere
+ # Rodrigues formula: https://en.wikipedia.org/wiki/Rodrigues%27_rotation_formula
+ p_sphere_new = p_sphere * torch.cos(rot_angle) + \
+ torch.cross(rot_axis, p_sphere, dim=-1) * torch.sin(rot_angle) + \
+ rot_axis * torch.sum(rot_axis*p_sphere, dim=-1, keepdim=True) * (1.-torch.cos(rot_angle))
+ p_sphere_new = p_sphere_new / torch.norm(
+ p_sphere_new, dim=-1, keepdim=True)
+ pts = torch.cat((p_sphere_new, depth.unsqueeze(-1)), dim=-1)
+
+ # now calculate conventional depth
+ depth_real = 1. / (depth + TINY_NUMBER) * torch.cos(theta) * ray_d_cos + d1
+ return pts, depth_real
+
+
+class RaySampler(torch.nn.Module):
+
+ def __init__(self):
+ super().__init__()
+ self.ray_origins_h, self.ray_directions, self.depths, self.image_coords, self.rendering_options = None, None, None, None, None
+
+ def create_patch_uv(self,
+ patch_resolution,
+ resolution,
+ cam2world_matrix,
+ fg_bbox=None):
+
+ def sample_patch_uv(fg_bbox=None):
+ assert patch_resolution <= resolution
+
+ def sample_patch_range():
+ patch_reolution_start = random.randint(
+ 0, resolution -
+ patch_resolution) # alias for randrange(start, stop+1)
+ # patch_reolution_end = patch_reolution_start + patch_resolution
+ return patch_reolution_start # , patch_reolution_end
+
+ def sample_patch_range_oversample_boundary(range_start=None,
+ range_end=None):
+ # left down corner undersampled
+ if range_start is None:
+ # range_start = patch_resolution // 2
+ range_start = patch_resolution
+ if range_end is None:
+ # range_end = resolution + patch_resolution // 2
+ range_end = resolution + patch_resolution
+
+ # oversample the boundary
+ patch_reolution_end = random.randint(
+ range_start,
+ range_end,
+ )
+
+ # clip range
+ if patch_reolution_end <= patch_resolution:
+ patch_reolution_end = patch_resolution
+ elif patch_reolution_end > resolution:
+ patch_reolution_end = resolution
+
+ # patch_reolution_end = patch_reolution_start + patch_resolution
+ return patch_reolution_end # , patch_reolution_end
+
+ # h_start = sample_patch_range()
+ # assert fg_bbox is not None
+ if fg_bbox is not None and random.random(
+ ) > 0.025: # only train foreground. Has 0.1 prob to sample/train background.
+ # if fg_bbox is not None: # only train foreground. Has 0.1 prob to sample/train background.
+ # only return one UV here
+ top_min, left_min = fg_bbox[:, :2].min(dim=0,
+ keepdim=True)[0][0]
+ height_max, width_max = fg_bbox[:, 2:].max(dim=0,
+ keepdim=True)[0][0]
+
+ left_boundary, right_boundary = patch_resolution // 2, resolution - patch_resolution // 2
+ h_mid = random.randint(
+ min(max(top_min, left_boundary), right_boundary),
+ max(min(height_max, right_boundary), left_boundary),
+ )
+
+ w_mid = random.randint(
+ min(max(left_min, left_boundary), right_boundary),
+ max(min(width_max, right_boundary), left_boundary),
+ )
+
+ h_end = h_mid + patch_resolution // 2
+ w_end = w_mid + patch_resolution // 2
+
+ # if top_min + patch_resolution < height_max:
+ # h_end = sample_patch_range_oversample_boundary(
+ # top_min + patch_resolution, height_max)
+ # else:
+ # h_end = max(
+ # height_max.to(torch.uint8).item(), patch_resolution)
+ # if left_min + patch_resolution < width_max:
+ # w_end = sample_patch_range_oversample_boundary(
+ # left_min + patch_resolution, width_max)
+ # else:
+ # w_end = max(
+ # width_max.to(torch.uint8).item(), patch_resolution)
+
+ h_start = h_end - patch_resolution
+ w_start = w_end - patch_resolution
+
+ try:
+ assert h_start >= 0 and w_start >= 0
+ except:
+ st()
+
+ else:
+ h_end = sample_patch_range_oversample_boundary()
+ h_start = h_end - patch_resolution
+ w_end = sample_patch_range_oversample_boundary()
+ w_start = w_end - patch_resolution
+
+ assert h_start >= 0 and w_start >= 0
+
+ uv = torch.stack(
+ torch.meshgrid(
+ torch.arange(
+ start=h_start,
+ # end=h_start+patch_resolution,
+ end=h_end,
+ dtype=torch.float32,
+ device=cam2world_matrix.device),
+ torch.arange(
+ start=w_start,
+ # end=w_start + patch_resolution,
+ end=w_end,
+ dtype=torch.float32,
+ device=cam2world_matrix.device),
+ indexing='ij')) * (1. / resolution) + (0.5 / resolution)
+
+ uv = uv.flip(0).reshape(2, -1).transpose(1, 0) # ij -> xy
+
+ return uv, (h_start, w_start, patch_resolution, patch_resolution
+ ) # top: int, left: int, height: int, width: int
+
+ all_uv = []
+ ray_bboxes = []
+ for _ in range(cam2world_matrix.shape[0]):
+ uv, bbox = sample_patch_uv(fg_bbox)
+ all_uv.append(uv)
+ ray_bboxes.append(bbox)
+
+ all_uv = torch.stack(all_uv, 0) # B patch_res**2 2
+ # ray_bboxes = torch.stack(ray_bboxes, 0) # B patch_res**2 2
+
+ return all_uv, ray_bboxes
+
+ def create_uv(self, resolution, cam2world_matrix):
+
+ uv = torch.stack(
+ torch.meshgrid(torch.arange(resolution,
+ dtype=torch.float32,
+ device=cam2world_matrix.device),
+ torch.arange(resolution,
+ dtype=torch.float32,
+ device=cam2world_matrix.device),
+ indexing='ij')) * (1. / resolution) + (0.5 /
+ resolution)
+
+ uv = uv.flip(0).reshape(2, -1).transpose(1, 0) # why
+ uv = uv.unsqueeze(0).repeat(cam2world_matrix.shape[0], 1, 1)
+
+ return uv
+
+ def forward(self, cam2world_matrix, intrinsics, resolution, fg_mask=None):
+ """
+ Create batches of rays and return origins and directions.
+
+ cam2world_matrix: (N, 4, 4)
+ intrinsics: (N, 3, 3)
+ resolution: int
+
+ ray_origins: (N, M, 3)
+ ray_dirs: (N, M, 2)
+ """
+ N, M = cam2world_matrix.shape[0], resolution**2
+ cam_locs_world = cam2world_matrix[:, :3, 3]
+ fx = intrinsics[:, 0, 0]
+ fy = intrinsics[:, 1, 1]
+ cx = intrinsics[:, 0, 2]
+ cy = intrinsics[:, 1, 2]
+ sk = intrinsics[:, 0, 1]
+
+ # uv = torch.stack(
+ # torch.meshgrid(torch.arange(resolution,
+ # dtype=torch.float32,
+ # device=cam2world_matrix.device),
+ # torch.arange(resolution,
+ # dtype=torch.float32,
+ # device=cam2world_matrix.device),
+ # indexing='ij')) * (1. / resolution) + (0.5 /
+ # resolution)
+ # uv = uv.flip(0).reshape(2, -1).transpose(1, 0) # why
+ # uv = uv.unsqueeze(0).repeat(cam2world_matrix.shape[0], 1, 1)
+ uv = self.create_uv(
+ resolution,
+ cam2world_matrix,
+ )
+
+ x_cam = uv[:, :, 0].view(N, -1)
+ y_cam = uv[:, :, 1].view(N, -1) # [0,1] range
+ z_cam = torch.ones((N, M), device=cam2world_matrix.device)
+
+ # basically torch.inverse(intrinsics)
+ x_lift = (x_cam - cx.unsqueeze(-1) + cy.unsqueeze(-1) *
+ sk.unsqueeze(-1) / fy.unsqueeze(-1) - sk.unsqueeze(-1) *
+ y_cam / fy.unsqueeze(-1)) / fx.unsqueeze(-1) * z_cam
+ y_lift = (y_cam - cy.unsqueeze(-1)) / fy.unsqueeze(-1) * z_cam
+
+ cam_rel_points = torch.stack(
+ (x_lift, y_lift, z_cam, torch.ones_like(z_cam)), dim=-1)
+
+ # st()
+
+ world_rel_points = torch.bmm(cam2world_matrix,
+ cam_rel_points.permute(0, 2, 1)).permute(
+ 0, 2, 1)[:, :, :3]
+
+ ray_dirs = world_rel_points - cam_locs_world[:, None, :]
+ ray_dirs = torch.nn.functional.normalize(ray_dirs, dim=2)
+
+ ray_origins = cam_locs_world.unsqueeze(1).repeat(
+ 1, ray_dirs.shape[1], 1)
+
+ return ray_origins, ray_dirs, None
+
+
+class PatchRaySampler(RaySampler):
+
+ def forward(self,
+ cam2world_matrix,
+ intrinsics,
+ patch_resolution,
+ resolution,
+ fg_bbox=None):
+ """
+ Create batches of rays and return origins and directions.
+
+ cam2world_matrix: (N, 4, 4)
+ intrinsics: (N, 3, 3)
+ resolution: int
+
+ ray_origins: (N, M, 3)
+ ray_dirs: (N, M, 2)
+ """
+ N, M = cam2world_matrix.shape[0], patch_resolution**2
+ cam_locs_world = cam2world_matrix[:, :3, 3]
+ fx = intrinsics[:, 0, 0]
+ fy = intrinsics[:, 1, 1]
+ cx = intrinsics[:, 0, 2]
+ cy = intrinsics[:, 1, 2]
+ sk = intrinsics[:, 0, 1]
+
+ # uv = self.create_uv(resolution, cam2world_matrix)
+
+ # all_uv, ray_bboxes = self.create_patch_uv(
+ all_uv_list = []
+ ray_bboxes = []
+ for idx in range(N):
+ uv, bboxes = self.create_patch_uv(
+ patch_resolution, resolution, cam2world_matrix[idx:idx + 1],
+ fg_bbox[idx:idx + 1]
+ if fg_bbox is not None else None) # for debugging, hard coded
+ all_uv_list.append(
+ uv
+ # cam2world_matrix[idx:idx+1], )[0] # for debugging, hard coded
+ )
+ ray_bboxes.extend(bboxes)
+ all_uv = torch.cat(all_uv_list, 0)
+ # ray_bboxes = torch.cat(ray_bboxes_list, 0)
+ # all_uv, _ = self.create_patch_uv(
+ # patch_resolution, resolution,
+ # cam2world_matrix, fg_bbox) # for debugging, hard coded
+ # st()
+
+ x_cam = all_uv[:, :, 0].view(N, -1)
+ y_cam = all_uv[:, :, 1].view(N, -1) # [0,1] range
+ z_cam = torch.ones((N, M), device=cam2world_matrix.device)
+
+ # basically torch.inverse(intrinsics)
+ x_lift = (x_cam - cx.unsqueeze(-1) + cy.unsqueeze(-1) *
+ sk.unsqueeze(-1) / fy.unsqueeze(-1) - sk.unsqueeze(-1) *
+ y_cam / fy.unsqueeze(-1)) / fx.unsqueeze(-1) * z_cam
+ y_lift = (y_cam - cy.unsqueeze(-1)) / fy.unsqueeze(-1) * z_cam
+
+ cam_rel_points = torch.stack(
+ (x_lift, y_lift, z_cam, torch.ones_like(z_cam)), dim=-1)
+
+ world_rel_points = torch.bmm(cam2world_matrix,
+ cam_rel_points.permute(0, 2, 1)).permute(
+ 0, 2, 1)[:, :, :3]
+
+ ray_dirs = world_rel_points - cam_locs_world[:, None, :]
+ ray_dirs = torch.nn.functional.normalize(ray_dirs, dim=2)
+
+ ray_origins = cam_locs_world.unsqueeze(1).repeat(
+ 1, ray_dirs.shape[1], 1)
+
+ return ray_origins, ray_dirs, ray_bboxes
diff --git a/nsr/volumetric_rendering/renderer.py b/nsr/volumetric_rendering/renderer.py
new file mode 100644
index 0000000000000000000000000000000000000000..cee75f5c034af01dcc916c2681f4f7a06cd43b6f
--- /dev/null
+++ b/nsr/volumetric_rendering/renderer.py
@@ -0,0 +1,637 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""
+The renderer is a module that takes in rays, decides where to sample along each
+ray, and computes pixel colors using the volume rendering equation.
+"""
+
+import math
+import torch
+import torch.nn as nn
+import numpy as np
+
+from .ray_marcher import MipRayMarcher2
+from . import math_utils
+from pdb import set_trace as st
+from .ray_sampler import depth2pts_outside, HUGE_NUMBER, TINY_NUMBER
+
+
+def generate_planes():
+ """
+ Defines planes by the three vectors that form the "axes" of the
+ plane. Should work with arbitrary number of planes and planes of
+ arbitrary orientation.
+ """
+ return torch.tensor(
+ [[[1, 0, 0], [0, 1, 0], [0, 0, 1]], [[1, 0, 0], [0, 0, 1], [0, 1, 0]],
+ [[0, 0, 1], [1, 0, 0], [0, 1, 0]]],
+ dtype=torch.float32)
+
+
+# def project_onto_planes(planes, coordinates):
+# """
+# Does a projection of a 3D point onto a batch of 2D planes,
+# returning 2D plane coordinates.
+
+# Takes plane axes of shape n_planes, 3, 3
+# # Takes coordinates of shape N, M, 3
+# # returns projections of shape N*n_planes, M, 2
+# """
+# N, M, C = coordinates.shape
+# n_planes, _, _ = planes.shape
+# coordinates = coordinates.unsqueeze(1).expand(-1, n_planes, -1, -1).reshape(N*n_planes, M, 3)
+# inv_planes = torch.linalg.inv(planes).unsqueeze(0).expand(N, -1, -1, -1).reshape(N*n_planes, 3, 3)
+# projections = torch.bmm(coordinates, inv_planes)
+# return projections[..., :2]
+
+
+def project_onto_planes(planes, coordinates):
+ """
+ Does a projection of a 3D point onto a batch of 2D planes,
+ returning 2D plane coordinates.
+
+ Takes plane axes of shape n_planes, 3, 3
+ # Takes coordinates of shape N, M, 3
+ # returns projections of shape N*n_planes, M, 2
+ """
+
+ # # ORIGINAL
+ # N, M, C = coordinates.shape
+ # xy_coords = coordinates[..., [0, 1]]
+ # xz_coords = coordinates[..., [0, 2]]
+ # zx_coords = coordinates[..., [2, 0]]
+ # return torch.stack([xy_coords, xz_coords, zx_coords], dim=1).reshape(N*3, M, 2)
+
+ # FIXED
+ N, M, _ = coordinates.shape
+ xy_coords = coordinates[..., [0, 1]]
+ yz_coords = coordinates[..., [1, 2]]
+ zx_coords = coordinates[..., [2, 0]]
+ return torch.stack([xy_coords, yz_coords, zx_coords],
+ dim=1).reshape(N * 3, M, 2)
+
+
+def sample_from_planes(plane_axes,
+ plane_features,
+ coordinates,
+ mode='bilinear',
+ padding_mode='zeros',
+ box_warp=None):
+ assert padding_mode == 'zeros'
+ N, n_planes, C, H, W = plane_features.shape
+ _, M, _ = coordinates.shape
+ # st()
+ plane_features = plane_features.view(N * n_planes, C, H, W)
+ # plane_features = plane_features.reshape(N * n_planes, C, H, W)
+
+ coordinates = (2 / box_warp) * coordinates # TODO: add specific box bounds
+
+ projected_coordinates = project_onto_planes(plane_axes,
+ coordinates).unsqueeze(1)
+ output_features = torch.nn.functional.grid_sample(
+ plane_features,
+ projected_coordinates.float(),
+ mode=mode,
+ padding_mode=padding_mode,
+ align_corners=False).permute(0, 3, 2, 1).reshape(N, n_planes, M, C)
+ return output_features
+
+
+def sample_from_3dgrid(grid, coordinates):
+ """
+ Expects coordinates in shape (batch_size, num_points_per_batch, 3)
+ Expects grid in shape (1, channels, H, W, D)
+ (Also works if grid has batch size)
+ Returns sampled features of shape (batch_size, num_points_per_batch, feature_channels)
+ """
+ batch_size, n_coords, n_dims = coordinates.shape
+ sampled_features = torch.nn.functional.grid_sample(
+ grid.expand(batch_size, -1, -1, -1, -1),
+ coordinates.reshape(batch_size, 1, 1, -1, n_dims),
+ mode='bilinear',
+ padding_mode='zeros',
+ align_corners=False)
+ N, C, H, W, D = sampled_features.shape
+ sampled_features = sampled_features.permute(0, 4, 3, 2,
+ 1).reshape(N, H * W * D, C)
+ return sampled_features
+
+
+class ImportanceRenderer(torch.nn.Module):
+ def __init__(self):
+ super().__init__()
+ self.ray_marcher = MipRayMarcher2()
+ self.plane_axes = generate_planes()
+
+ def forward(self,
+ planes,
+ decoder,
+ ray_origins,
+ ray_directions,
+ rendering_options,
+ return_meta=False):
+ # return_sampling_details_flag=False):
+ self.plane_axes = self.plane_axes.to(ray_origins.device)
+ # if rendering_options.get('return_sampling_details_flag', None) is not None:
+ shape_synthesized = {}
+
+ if rendering_options['ray_start'] == rendering_options[
+ 'ray_end'] == 'auto':
+ ray_start, ray_end = math_utils.get_ray_limits_box(
+ ray_origins,
+ ray_directions,
+ box_side_length=rendering_options['box_warp'])
+ is_ray_valid = ray_end > ray_start
+ # st()
+ if torch.any(is_ray_valid).item():
+ ray_start[~is_ray_valid] = ray_start[is_ray_valid].min()
+ ray_end[~is_ray_valid] = ray_start[is_ray_valid].max()
+ depths_coarse = self.sample_stratified(
+ ray_origins, ray_start, ray_end,
+ rendering_options['depth_resolution'],
+ rendering_options['disparity_space_sampling'])
+ else:
+ # Create stratified depth samples
+ depths_coarse = self.sample_stratified(
+ ray_origins, rendering_options['ray_start'],
+ rendering_options['ray_end'],
+ rendering_options['depth_resolution'],
+ rendering_options['disparity_space_sampling'])
+
+ batch_size, num_rays, samples_per_ray, _ = depths_coarse.shape
+
+ # Coarse Pass
+ sample_coordinates = (
+ ray_origins.unsqueeze(-2) +
+ depths_coarse * ray_directions.unsqueeze(-2)).reshape(
+ batch_size, -1, 3)
+ # st() # np.save('sample_coordinates.npy', sample_coordinates.detach().cpu().numpy())
+ sample_directions = ray_directions.unsqueeze(-2).expand(
+ -1, -1, samples_per_ray, -1).reshape(batch_size, -1, 3)
+
+ colors_coarse, densities_coarse = self.run_model(
+ planes, decoder, sample_coordinates, sample_directions,
+ rendering_options, batch_size, num_rays, samples_per_ray)
+
+ colors_coarse = colors_coarse.reshape(batch_size, num_rays,
+ samples_per_ray,
+ colors_coarse.shape[-1])
+ densities_coarse = densities_coarse.reshape(batch_size, num_rays,
+ samples_per_ray, 1)
+
+ if rendering_options.get('return_sampling_details_flag', False):
+ shape_synthesized.update({
+ # 'coarse_coords': sample_coordinates.detach().clone(),
+ # 'coarse_densities': densities_coarse.detach()
+ 'coarse_coords':
+ sample_coordinates.reshape(batch_size, num_rays,
+ samples_per_ray, 3),
+ 'coarse_densities':
+ densities_coarse
+ })
+
+ # Fine Pass
+ N_importance = rendering_options['depth_resolution_importance']
+ if N_importance > 0:
+ _, _, _, weights = self.ray_marcher(colors_coarse,
+ densities_coarse,
+ depths_coarse,
+ rendering_options)
+
+ depths_fine = self.sample_importance(depths_coarse, weights,
+ N_importance)
+
+ sample_directions = ray_directions.unsqueeze(-2).expand(
+ -1, -1, N_importance, -1).reshape(batch_size, -1, 3)
+ sample_coordinates = (
+ ray_origins.unsqueeze(-2) +
+ depths_fine * ray_directions.unsqueeze(-2)).reshape(
+ batch_size, -1, 3)
+
+ colors_fine, densities_fine = self.run_model(
+ planes, decoder, sample_coordinates, sample_directions,
+ rendering_options, batch_size, num_rays, N_importance)
+ # colors_fine = out['rgb']
+ # densities_fine = out['sigma']
+ colors_fine = colors_fine.reshape(batch_size, num_rays,
+ N_importance,
+ colors_fine.shape[-1])
+ densities_fine = densities_fine.reshape(batch_size, num_rays,
+ N_importance, 1)
+ if rendering_options.get('return_sampling_details_flag', False):
+ shape_synthesized.update({
+ # 'fine_coords': sample_coordinates.detach(),
+ # 'fine_densities': densities_fine.detach()
+ 'fine_coords': sample_coordinates,
+ # 'fine_coords': sample_coordinates.reshape(batch_size, num_rays, N_importance, 3),
+ 'fine_densities': densities_fine,
+ })
+
+ all_depths, all_colors, all_densities, indices = self.unify_samples(
+ depths_coarse, colors_coarse, densities_coarse, depths_fine,
+ colors_fine, densities_fine)
+
+ # Aggregate
+ rgb_final, depth_final, visibility, weights = self.ray_marcher(
+ all_colors, all_densities, all_depths, rendering_options)
+
+ else:
+ rgb_final, depth_final, visibility, weights = self.ray_marcher(
+ colors_coarse, densities_coarse, depths_coarse,
+ rendering_options)
+
+ if rendering_options.get('return_surface', False):
+ weight_total = weights.sum(2)
+
+ all_coords = torch.cat([
+ shape_synthesized['coarse_coords'],
+ shape_synthesized['fine_coords']
+ ],
+ dim=-2) # B 4096 48+48 3
+ all_coords = torch.gather(all_coords, -2,
+ indices.expand(-1, -1, -1, 3))
+
+ composite_surface = torch.sum(weights * all_coords,
+ -2) / weight_total
+
+ # clip the composite to min/max range of depths
+ composite_surface = torch.nan_to_num(composite_surface,
+ float('inf'))
+ composite_surface = torch.clamp(composite_surface,
+ torch.min(all_coords),
+ torch.max(all_coords))
+ shape_synthesized['surface_coords'] = composite_surface
+
+ shape_synthesized.update({
+ # 'depth': depth_final.detach()
+ 'depth': depth_final
+ })
+
+ ret_dict = {
+ 'feature_samples': rgb_final,
+ 'depth_samples': depth_final,
+ 'weights_samples': weights.sum(2),
+ 'shape_synthesized': shape_synthesized,
+ 'visibility': visibility # T[..., -1]
+ }
+
+ if return_meta: # for pifu
+ all_coords = torch.cat([
+ shape_synthesized['coarse_coords'],
+ shape_synthesized['fine_coords'].reshape(
+ batch_size, num_rays, N_importance, 3)
+ ],
+ dim=-2)
+ # 'fine_coords': sample_coordinates,
+ all_coords = torch.gather(all_coords, -2,
+ indices.expand(-1, -1, -1, 3))
+
+ ret_dict.update({
+ 'all_coords': all_coords,
+ 'feature_volume': all_colors,
+ 'weights': weights
+ })
+
+ if rendering_options.get('return_sampling_details_flag', False):
+ ret_dict.update({'shape_synthesized': shape_synthesized})
+ # return rgb_final, depth_final, weights.sum(2), shape_synthesized # rgb_final, B, 4096, 32
+
+ # return rgb_final, depth_final, weights.sum(2)
+ return ret_dict
+
+ # old run_model
+ def _run_model(self, planes, decoder, sample_coordinates,
+ sample_directions, options):
+ sampled_features = sample_from_planes(self.plane_axes,
+ planes,
+ sample_coordinates,
+ padding_mode='zeros',
+ box_warp=options['box_warp'])
+
+ out = decoder(sampled_features, sample_directions)
+ if options.get('density_noise', 0) > 0:
+ out['sigma'] += torch.randn_like(
+ out['sigma']) * options['density_noise']
+ return out
+
+ def run_model(self, planes, decoder, sample_coordinates, sample_directions,
+ rendering_options, batch_size, num_rays, samples_per_ray):
+ """ a compat wrapper for Objaverse (bbox-sampling) and FFHQ/Shapenet-based rendering (ray-start/end sampling).
+
+ returns color and density
+ """
+
+ if rendering_options.get('filter_out_of_bbox', False):
+ # Coarse Pass
+ colors, densities = self._forward_pass(
+ # depths=depths_coarse,
+ # ray_directions=ray_directions,
+ # ray_origins=ray_origins,
+ sample_coordinates,
+ sample_directions,
+ planes=planes,
+ decoder=decoder,
+ rendering_options=rendering_options,
+ batch_size=batch_size,
+ num_rays=num_rays,
+ samples_per_ray=samples_per_ray,
+ )
+ else:
+ out = self._run_model(planes, decoder, sample_coordinates,
+ sample_directions, rendering_options)
+ colors = out['rgb']
+ densities = out['sigma']
+
+ return colors, densities
+
+ def _forward_pass(
+ self,
+ sample_coordinates,
+ sample_directions,
+ # depths: torch.Tensor,
+ # ray_directions: torch.Tensor,
+ # ray_origins: torch.Tensor,
+ planes: torch.Tensor,
+ decoder: nn.Module,
+ rendering_options: dict,
+ batch_size,
+ num_rays,
+ samples_per_ray):
+ """
+ Additional filtering is applied to filter out-of-box samples.
+ Modifications made by Zexin He.
+ """
+
+ # context related variables
+ # batch_size, num_rays, samples_per_ray, _ = depths.shape
+ device = sample_coordinates.device
+
+ # define sample points with depths
+ # sample_directions = ray_directions.unsqueeze(-2).expand(-1, -1, samples_per_ray, -1).reshape(batch_size, -1, 3)
+ # sample_coordinates = (ray_origins.unsqueeze(-2) + depths * ray_directions.unsqueeze(-2)).reshape(batch_size, -1, 3)
+
+ # filter out-of-box samples
+ mask_inbox = \
+ (rendering_options['sampler_bbox_min'] <= sample_coordinates) & \
+ (sample_coordinates <= rendering_options['sampler_bbox_max'])
+ mask_inbox = mask_inbox.all(-1) # np.save('box.npy', mask_inbox.detach().cpu().numpy())
+
+ # forward model according to all samples
+ _out = self._run_model(planes, decoder, sample_coordinates,
+ sample_directions, rendering_options)
+
+ # set out-of-box samples to zeros(rgb) & -inf(sigma)
+ SAFE_GUARD = 3
+ DATA_TYPE = _out['sigma'].dtype
+ colors_pass = torch.zeros(batch_size,
+ num_rays * samples_per_ray,
+ # 3,
+ decoder.decoder_output_dim,
+ device=device,
+ dtype=DATA_TYPE)
+ densities_pass = torch.nan_to_num(
+ torch.full((batch_size, num_rays * samples_per_ray, 1),
+ -float('inf'),
+ device=device,
+ dtype=DATA_TYPE)) / SAFE_GUARD
+ colors_pass[mask_inbox], densities_pass[mask_inbox] = _out['rgb'][
+ mask_inbox], _out['sigma'][mask_inbox]
+
+ # reshape back
+ # colors_pass = colors_pass.reshape(batch_size, num_rays, samples_per_ray, colors_pass.shape[-1])
+ # densities_pass = densities_pass.reshape(batch_size, num_rays, samples_per_ray, densities_pass.shape[-1])
+
+ return colors_pass, densities_pass
+
+ def sort_samples(self, all_depths, all_colors, all_densities):
+ _, indices = torch.sort(all_depths, dim=-2)
+ all_depths = torch.gather(all_depths, -2, indices)
+ all_colors = torch.gather(
+ all_colors, -2, indices.expand(-1, -1, -1, all_colors.shape[-1]))
+ all_densities = torch.gather(all_densities, -2,
+ indices.expand(-1, -1, -1, 1))
+ return all_depths, all_colors, all_densities
+
+ def unify_samples(self, depths1, colors1, densities1, depths2, colors2,
+ densities2):
+ all_depths = torch.cat([depths1, depths2], dim=-2)
+ all_colors = torch.cat([colors1, colors2], dim=-2)
+ all_densities = torch.cat([densities1, densities2], dim=-2)
+
+ _, indices = torch.sort(all_depths, dim=-2)
+ all_depths = torch.gather(all_depths, -2, indices)
+ all_colors = torch.gather(
+ all_colors, -2, indices.expand(-1, -1, -1, all_colors.shape[-1]))
+ all_densities = torch.gather(all_densities, -2,
+ indices.expand(-1, -1, -1, 1))
+
+ return all_depths, all_colors, all_densities, indices
+
+ def sample_stratified(self,
+ ray_origins,
+ ray_start,
+ ray_end,
+ depth_resolution,
+ disparity_space_sampling=False):
+ """
+ Return depths of approximately uniformly spaced samples along rays.
+ """
+ N, M, _ = ray_origins.shape
+ if disparity_space_sampling:
+ depths_coarse = torch.linspace(0,
+ 1,
+ depth_resolution,
+ device=ray_origins.device).reshape(
+ 1, 1, depth_resolution,
+ 1).repeat(N, M, 1, 1)
+ depth_delta = 1 / (depth_resolution - 1)
+ depths_coarse += torch.rand_like(depths_coarse) * depth_delta
+ depths_coarse = 1. / (1. / ray_start * (1. - depths_coarse) +
+ 1. / ray_end * depths_coarse)
+ else:
+ if type(ray_start) == torch.Tensor:
+ depths_coarse = math_utils.linspace(ray_start, ray_end,
+ depth_resolution).permute(
+ 1, 2, 0, 3)
+ depth_delta = (ray_end - ray_start) / (depth_resolution - 1)
+ depths_coarse += torch.rand_like(depths_coarse) * depth_delta[
+ ..., None]
+ else:
+ depths_coarse = torch.linspace(
+ ray_start,
+ ray_end,
+ depth_resolution,
+ device=ray_origins.device).reshape(1, 1, depth_resolution,
+ 1).repeat(N, M, 1, 1)
+ depth_delta = (ray_end - ray_start) / (depth_resolution - 1)
+ depths_coarse += torch.rand_like(depths_coarse) * depth_delta
+ # print("ignore normal noise!!! for debugging")
+
+ return depths_coarse
+
+ def sample_importance(self, z_vals, weights, N_importance):
+ """
+ Return depths of importance sampled points along rays. See NeRF importance sampling for more.
+ """
+ with torch.no_grad():
+ batch_size, num_rays, samples_per_ray, _ = z_vals.shape
+
+ z_vals = z_vals.reshape(batch_size * num_rays, samples_per_ray)
+ weights = weights.reshape(
+ batch_size * num_rays,
+ -1) # -1 to account for loss of 1 sample in MipRayMarcher
+
+ # smooth weights
+ weights = torch.nn.functional.max_pool1d(
+ weights.unsqueeze(1).float(), 2, 1, padding=1)
+ weights = torch.nn.functional.avg_pool1d(weights, 2, 1).squeeze()
+ weights = weights + 0.01
+
+ z_vals_mid = 0.5 * (z_vals[:, :-1] + z_vals[:, 1:])
+ importance_z_vals = self.sample_pdf(z_vals_mid, weights[:, 1:-1],
+ N_importance).detach().reshape(
+ batch_size, num_rays,
+ N_importance, 1)
+ return importance_z_vals
+
+ def sample_pdf(self, bins, weights, N_importance, det=False, eps=1e-5):
+ """
+ Sample @N_importance samples from @bins with distribution defined by @weights.
+ Inputs:
+ bins: (N_rays, N_samples_+1) where N_samples_ is "the number of coarse samples per ray - 2"
+ weights: (N_rays, N_samples_)
+ N_importance: the number of samples to draw from the distribution
+ det: deterministic or not
+ eps: a small number to prevent division by zero
+ Outputs:
+ samples: the sampled samples
+ """
+ N_rays, N_samples_ = weights.shape
+ weights = weights + eps # prevent division by zero (don't do inplace op!)
+ pdf = weights / torch.sum(weights, -1,
+ keepdim=True) # (N_rays, N_samples_)
+ cdf = torch.cumsum(
+ pdf, -1) # (N_rays, N_samples), cumulative distribution function
+ cdf = torch.cat([torch.zeros_like(cdf[:, :1]), cdf],
+ -1) # (N_rays, N_samples_+1)
+ # padded to 0~1 inclusive
+
+ if det:
+ u = torch.linspace(0, 1, N_importance, device=bins.device)
+ u = u.expand(N_rays, N_importance)
+ else:
+ u = torch.rand(N_rays, N_importance, device=bins.device)
+ u = u.contiguous()
+
+ inds = torch.searchsorted(cdf, u, right=True)
+ below = torch.clamp_min(inds - 1, 0)
+ above = torch.clamp_max(inds, N_samples_)
+
+ inds_sampled = torch.stack([below, above],
+ -1).view(N_rays, 2 * N_importance)
+ cdf_g = torch.gather(cdf, 1,
+ inds_sampled).view(N_rays, N_importance, 2)
+ bins_g = torch.gather(bins, 1,
+ inds_sampled).view(N_rays, N_importance, 2)
+
+ denom = cdf_g[..., 1] - cdf_g[..., 0]
+ denom[
+ denom <
+ eps] = 1 # denom equals 0 means a bin has weight 0, in which case it will not be sampled
+ # anyway, therefore any value for it is fine (set to 1 here)
+
+ samples = bins_g[..., 0] + (u - cdf_g[..., 0]) / denom * (
+ bins_g[..., 1] - bins_g[..., 0])
+ return samples
+
+
+class ImportanceRendererfg_bg(ImportanceRenderer):
+ """
+ render foreground-background together, using nerfpp strategy.
+ """
+ def __init__(self):
+ super().__init__()
+
+ def forward_background(self, bg_planes, decoder, ray_origins,
+ ray_directions, rendering_options):
+ # ! no importance sampling here.
+
+ # # background depth
+ depths_coarse = self.sample_stratified(
+ ray_origins, 0, 1, rendering_options['bg_depth_resolution'],
+ rendering_options['disparity_space_sampling']).squeeze(
+ -1) # remove the last 1 dim, B N S here
+
+ batch_size, num_rays, samples_per_ray = depths_coarse.shape
+
+ sample_directions = ray_directions.unsqueeze(-2).expand(
+ -1, -1, samples_per_ray, -1)
+ sample_origins = ray_origins.unsqueeze(-2).expand(
+ -1, -1, samples_per_ray, -1)
+
+ bg_sample_coordinates, _ = depth2pts_outside(
+ sample_origins, sample_directions,
+ depths_coarse) # [..., N_samples, 4]
+
+ out = self.run_model(bg_planes, decoder, bg_sample_coordinates,
+ sample_directions.reshape(batch_size, -1, 3),
+ rendering_options)
+
+ colors_coarse = out['rgb']
+ densities_coarse = out['sigma']
+ colors_coarse = colors_coarse.reshape(batch_size, num_rays,
+ samples_per_ray,
+ colors_coarse.shape[-1])
+ densities_coarse = densities_coarse.reshape(batch_size, num_rays,
+ samples_per_ray, 1)
+
+ rgb_final, depth_final, _, weights = self.ray_marcher(
+ colors_coarse, densities_coarse, depths_coarse, rendering_options)
+
+ ret_dict = {
+ 'feature_samples': rgb_final,
+ 'depth_samples': depth_final,
+ 'weights_samples': weights.sum(2),
+ # 'visibility': visibility # T[..., -1]
+ }
+
+ return ret_dict
+
+ def forward(self,
+ planes,
+ decoder,
+ ray_origins,
+ ray_directions,
+ rendering_options,
+ return_meta=False):
+
+ fg_planes, bg_planes = torch.split(
+ planes, planes.shape[2] // 2,
+ dim=2) # concatenated on the Channel side
+
+ # ! composite fg/bg
+ fg_ret_dict = super().forward(fg_planes,
+ decoder,
+ ray_origins,
+ ray_directions,
+ rendering_options,
+ return_meta=False)
+
+ bg_ret_dict = self.forward_background(
+ bg_planes,
+ decoder,
+ ray_origins,
+ ray_directions,
+ rendering_options,
+ )
+
+ ret_dict = {**fg_ret_dict, 'bg_ret_dict': bg_ret_dict} # for compat
+
+ return ret_dict # will composite in the external triplane.py
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f79791146759880d4e83c36e9451687afcbb4f39
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,44 @@
+torch
+xformers==0.0.26.post1+cu118
+torchvision
+absl-py
+tensorboard
+lpips
+kornia
+opencv-python
+ffmpeg
+einops
+beartype
+imageio[ffmpeg]
+blobfile
+ninja
+pyspng
+psutil
+mrcfile
+lmdb
+webdataset
+point_cloud_utils
+kiui
+plyfile
+open_clip_torch==2.24.0
+openai-clip
+timm
+matplotlib
+torchtyping
+omegaconf
+pytorch_lightning
+lz4
+xatlas
+open3d
+trimesh
+huggingface_hub
+safetensors
+PyMCubes
+vision aided loss
+transformers
+torchdiffeg==8.2.4torchmetrics==1.4.3torchtyping==0.1.5imath
+OpenEXR
+onnxscript
+tritonclient
+gradio==4.43.0
+third_party/diff-surfel-rasterization
\ No newline at end of file
diff --git a/scripts/__pycache__/run_0123pp.cpython-39.pyc b/scripts/__pycache__/run_0123pp.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c07eed76d0b4584dc909236b00f174323f146382
Binary files /dev/null and b/scripts/__pycache__/run_0123pp.cpython-39.pyc differ
diff --git a/scripts/gradio_app_stage2.py b/scripts/gradio_app_stage2.py
new file mode 100644
index 0000000000000000000000000000000000000000..127deec81336774df21634fc83bed0a9c0d19f82
--- /dev/null
+++ b/scripts/gradio_app_stage2.py
@@ -0,0 +1,370 @@
+import argparse
+import json
+import sys
+sys.path.append('.')
+import torch
+import torchvision
+from torchvision import transforms
+import numpy as np
+
+import os
+import dnnlib
+from omegaconf import OmegaConf
+from PIL import Image
+from dnnlib.util import EasyDict
+
+import gradio as gr
+
+import rembg
+
+from huggingface_hub import hf_hub_download
+
+
+"""
+Generate a large batch of image samples from a model and save them as a large
+numpy array. This can be used to produce samples for FID evaluation.
+"""
+
+import os
+
+
+from pdb import set_trace as st
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ NUM_CLASSES,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+ add_dict_to_argparser,
+ args_to_dict,
+ continuous_diffusion_defaults,
+ control_net_defaults,
+)
+
+th.backends.cuda.matmul.allow_tf32 = True
+th.backends.cudnn.allow_tf32 = True
+th.backends.cudnn.enabled = True
+
+from pathlib import Path
+
+from tqdm import tqdm, trange
+import dnnlib
+from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, AE_with_Diffusion, rendering_options_defaults, eg3d_options_default, dataset_defaults
+
+from datasets.shapenet import load_eval_data
+from torch.utils.data import Subset
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+from transport.train_utils import parse_transport_args
+
+from utils.infer_utils import remove_background, resize_foreground
+
+SEED = 0
+
+def resize_to_224(img):
+ img = transforms.functional.resize(img, 224,
+ interpolation=transforms.InterpolationMode.LANCZOS)
+ return img
+
+
+def set_white_background(image):
+ image = np.array(image).astype(np.float32) / 255.0
+ mask = image[:, :, 3:4]
+ image = image[:, :, :3] * mask + (1 - mask)
+ image = Image.fromarray((image * 255.0).astype(np.uint8))
+ return image
+
+
+def check_input_image(input_image):
+ if input_image is None:
+ raise gr.Error("No image uploaded!")
+
+
+
+def main(args):
+
+ os.environ['MASTER_ADDR'] = 'localhost'
+ os.environ['MASTER_PORT'] = '12355'
+ os.environ["CUDA_VISIBLE_DEVICES"] = "0"
+ os.environ["RANK"] = "0"
+ os.environ["WORLD_SIZE"] = "1"
+
+ # args.rendering_kwargs = rendering_options_defaults(args)
+
+ dist_util.setup_dist(args)
+ logger.configure(dir=args.logdir)
+
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ # * set denoise model args
+ logger.log("creating model and diffusion...")
+ args.img_size = [args.image_size_encoder]
+ # ! no longer required for LDM
+ # args.denoise_in_channels = args.out_chans
+ # args.denoise_out_channels = args.out_chans
+ args.image_size = args.image_size_encoder # 224, follow the triplane size
+
+ denoise_model, diffusion = create_model_and_diffusion(
+ **args_to_dict(args,
+ model_and_diffusion_defaults().keys()))
+
+ # if 'cldm' in args.trainer_name:
+ # assert isinstance(denoise_model, tuple)
+ # denoise_model, controlNet = denoise_model
+
+ # controlNet.to(dist_util.dev())
+ # controlNet.train()
+ # else:
+ # controlNet = None
+
+ opts = eg3d_options_default()
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ # denoise_model.load_state_dict(
+ # dist_util.load_state_dict(args.ddpm_model_path, map_location="cpu"))
+ denoise_model.to(dist_util.dev())
+ denoise_model.eval()
+
+ # * auto-encoder reconstruction model
+ logger.log("creating 3DAE...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ auto_encoder.to(dist_util.dev())
+ auto_encoder.eval()
+
+ # faster inference
+ # denoise_model = denoise_model.to(th.bfloat16)
+ # auto_encoder = auto_encoder.to(th.bfloat16)
+
+ # TODO, how to set the scale?
+ logger.log("create dataset")
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ # load data if i23d
+ if args.i23d:
+ data = load_eval_data(
+ file_path=args.eval_data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True, # for evaluation
+ preprocess=auto_encoder.preprocess,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+ else:
+ data = None # t23d sampling, only caption required
+
+
+ TrainLoop = {
+ 'flow_matching':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine,
+ 'flow_matching_gs':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine_gs, # slightly modified sampling and rendering for gs
+ }[args.trainer_name]
+
+ # continuous
+ sde_diffusion = None
+
+ auto_encoder.decoder.rendering_kwargs = args.rendering_kwargs
+ stage_1_output_dir = args.stage_1_output_dir
+
+ training_loop_class = TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model,
+ control_model=None, # to remove
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=None,
+ data=data,
+ eval_data=None,
+ **args)
+
+ css = """
+ h1 {
+ text-align: center;
+ display:block;
+ }
+ """
+
+
+ def preprocess(input_image, preprocess_background=True, foreground_ratio=0.85):
+ if preprocess_background:
+ rembg_session = rembg.new_session()
+ image = input_image.convert("RGB")
+ image = remove_background(image, rembg_session)
+ image = resize_foreground(image, foreground_ratio)
+ image = set_white_background(image)
+ else:
+ image = input_image
+ if image.mode == "RGBA":
+ image = set_white_background(image)
+ image = resize_to_224(image)
+ return image
+
+
+ with gr.Blocks(css=css) as demo:
+ gr.Markdown(
+ """
+ # LN3Diff (Scalable Latent Neural Fields Diffusion for Speedy 3D Generation)
+
+ **LN3Diff (ECCV 2024)** [[code](https://github.com/NIRVANALAN/LN3Diff), [project page](https://nirvanalan.github.io/projects/ln3diff/)] is a scalable 3D latent diffusion model that supports speedy 3D assets generation.
+ It first trains a 3D VAE on **Objaverse**, which compress each 3D asset into a compact 3D-aware latent. After that, a image/text-conditioned diffusion model is trained following LDM paradigm.
+ The model used in the demo adopts DiT-L/2 architecture and flow-matching framework, and supports single-image condition.
+ It is trained on 8 A100 GPUs for 1M iterations with batch size 256.
+ Locally, on an NVIDIA A100/A10 GPU, each image-conditioned diffusion generation can be done in 10~20 seconds (time varies due to the adaptive-step ODE solver used in flow-mathcing.)
+ Upload an image of an object or click on one of the provided examples to see how the LN3Diff works.
+ The 3D viewer will render a .obj object exported from the triplane, where the mesh resolution and iso-surface can be set manually.
+ For best results run the demo locally and render locally - to do so, clone the [main repository](https://github.com/NIRVANALAN/LN3Diff).
+ """
+ )
+ with gr.Row(variant="panel"):
+ with gr.Column():
+ with gr.Row():
+ input_image = gr.Image(
+ label="Input Image",
+ image_mode="RGBA",
+ sources="upload",
+ type="pil",
+ elem_id="content_image",
+ )
+ processed_image = gr.Image(label="Processed Image", interactive=False)
+
+ # params
+ with gr.Row():
+ with gr.Column():
+ with gr.Row():
+ # with gr.Group():
+
+ unconditional_guidance_scale = gr.Number(
+ label="CFG-scale", value=4.0, interactive=True,
+ )
+ seed = gr.Number(
+ label="Seed", value=42, interactive=True,
+ )
+
+ num_steps = gr.Number(
+ label="ODE Sampling Steps", value=250, interactive=True,
+ )
+
+ # with gr.Column():
+ with gr.Row():
+ mesh_size = gr.Number(
+ label="Mesh Resolution", value=192, interactive=True,
+ )
+
+ mesh_thres = gr.Number(
+ label="Mesh Iso-surface", value=10, interactive=True,
+ )
+
+ with gr.Row():
+ with gr.Group():
+ preprocess_background = gr.Checkbox(
+ label="Remove Background", value=True
+ )
+ with gr.Row():
+ submit = gr.Button("Generate", elem_id="generate", variant="primary")
+
+ with gr.Row(variant="panel"):
+ gr.Examples(
+ examples=[
+ str(path) for path in sorted(Path('./assets/demo-image-for-i23d/instantmesh').glob('**/*.png'))
+ ],
+ inputs=[input_image],
+ cache_examples=False,
+ label="Examples",
+ examples_per_page=20,
+ )
+
+ with gr.Column():
+ with gr.Row():
+ with gr.Tab("Reconstruction"):
+ with gr.Column():
+ output_video = gr.Video(value=None, width=512, label="Rendered Video", autoplay=True, loop=True)
+ # output_video = gr.Video(value=None, width=256, label="Rendered Video", autoplay=True)
+ output_gs = gr.Model3D(
+ height=256,
+ label="2DGS Center",
+ pan_speed=0.5,
+ clear_color=(1,1,1,1), # loading glb file only.
+ )
+ output_model = gr.Model3D(
+ height=256,
+ label="TSDF Mesh",
+ pan_speed=0.5,
+ clear_color=(1,1,1,1), # loading tsdf ply files.
+ )
+
+ gr.Markdown(
+ """
+ ## Comments:
+ 1. The sampling time varies since ODE-based sampling method (dopri5 by default) has adaptive internal step, and reducing sampling steps may not reduce the overal sampling time. Sampling steps=250 is the emperical value that works well in most cases.
+ 2. The 3D viewer shows a colored .glb mesh extracted from volumetric tri-plane, and may differ slightly with the volume rendering result.
+ 3. If you find your result unsatisfying, tune the CFG scale and change the random seed. Usually slightly increase the CFG value can lead to better performance.
+ 3. Known limitations include:
+ - Texture details missing: since our VAE is trained on 192x192 resolution due the the resource constraints, the texture details generated by the final 3D-LDM may be blurry. We will keep improving the performance in the future.
+ 4. Regarding reconstruction performance, our model is slightly inferior to state-of-the-art multi-view LRM-based method (e.g. InstantMesh), but offers much better diversity, flexibility and editing potential due to the intrinsic nature of diffusion model.
+
+ ## How does it work?
+
+ LN3Diff is a feedforward 3D Latent Diffusion Model that supports direct 3D asset generation via diffusion sampling.
+ Compared to SDS-based ([DreamFusion](https://dreamfusion3d.github.io/)), mulit-view generation-based ([MVDream](https://arxiv.org/abs/2308.16512), [Zero123++](https://github.com/SUDO-AI-3D/zero123plus), [Instant3D](https://instant-3d.github.io/)) and feedforward 3D reconstruction-based ([LRM](https://yiconghong.me/LRM/), [InstantMesh](https://github.com/TencentARC/InstantMesh), [LGM](https://github.com/3DTopia/LGM)),
+ LN3Diff supports feedforward 3D generation with a unified framework.
+ Like 2D/Video AIGC pipeline, LN3Diff first trains a 3D-VAE and then conduct LDM training (text/image conditioned) on the learned latent space. Some related methods from the industry ([Shape-E](https://github.com/openai/shap-e), [CLAY](https://github.com/CLAY-3D/OpenCLAY), [Meta 3D Gen](https://arxiv.org/abs/2303.05371)) also follow the same paradigm.
+ Though currently the performance of the origin 3D LDM's works are overall inferior to reconstruction-based methods, we believe the proposed method has much potential and scales better with more data and compute resources, and may yield better 3D editing performance due to its compatability with diffusion model.
+ For more results see the [project page](https://szymanowiczs.github.io/splatter-image) and the [ECCV article](https://arxiv.org/pdf/2403.12019).
+ """
+ )
+
+ submit.click(fn=check_input_image, inputs=[input_image]).success(
+ fn=preprocess,
+ inputs=[input_image, preprocess_background],
+ outputs=[processed_image],
+ ).success(
+ # fn=reconstruct_and_export,
+ # inputs=[processed_image],
+ # outputs=[output_model, output_video],
+ fn=training_loop_class.eval_i23d_and_export_gradio,
+ inputs=[processed_image, seed, mesh_size, mesh_thres, unconditional_guidance_scale],
+ # inputs=[processed_image, num_steps, seed, mesh_size, mesh_thres, unconditional_guidance_scale, args.stage_1_output_dir],
+ outputs=[output_video, output_gs, output_model,],
+ )
+
+ demo.queue(max_size=1)
+ demo.launch(share=True)
+
+if __name__ == "__main__":
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ with open('configs/gradio_i23d_stage2_args.json') as f:
+ args = json.load(f)
+ args = EasyDict(args)
+
+ args.local_rank = 0
+ args.gpus = 1
+
+ main(args)
diff --git a/scripts/run_0123pp.py b/scripts/run_0123pp.py
new file mode 100644
index 0000000000000000000000000000000000000000..d24f30e9ee169518491eed73b415f588bbbf7669
--- /dev/null
+++ b/scripts/run_0123pp.py
@@ -0,0 +1,288 @@
+import os
+import imageio
+import argparse
+from pdb import set_trace as st
+import numpy as np
+import torch
+import rembg
+from PIL import Image
+from torchvision.transforms import v2
+from pytorch_lightning import seed_everything
+from omegaconf import OmegaConf
+from einops import rearrange, repeat
+from tqdm import tqdm
+from huggingface_hub import hf_hub_download
+from diffusers import DiffusionPipeline, EulerAncestralDiscreteScheduler
+
+from src.utils.train_util import instantiate_from_config
+from src.utils.camera_util import (
+ FOV_to_intrinsics,
+ get_zero123plus_input_cameras,
+ get_circular_camera_poses,
+)
+from src.utils.mesh_util import save_obj, save_obj_with_mtl
+from src.utils.infer_util import remove_background, resize_foreground, save_video
+
+
+def get_render_cameras(batch_size=1, M=120, radius=4.0, elevation=20.0, is_flexicubes=False):
+ """
+ Get the rendering camera parameters.
+ """
+ c2ws = get_circular_camera_poses(M=M, radius=radius, elevation=elevation)
+ if is_flexicubes:
+ cameras = torch.linalg.inv(c2ws)
+ cameras = cameras.unsqueeze(0).repeat(batch_size, 1, 1, 1)
+ else:
+ extrinsics = c2ws.flatten(-2)
+ intrinsics = FOV_to_intrinsics(30.0).unsqueeze(0).repeat(M, 1, 1).float().flatten(-2)
+ cameras = torch.cat([extrinsics, intrinsics], dim=-1)
+ cameras = cameras.unsqueeze(0).repeat(batch_size, 1, 1)
+ return cameras
+
+
+def render_frames(model, planes, render_cameras, render_size=512, chunk_size=1, is_flexicubes=False):
+ """
+ Render frames from triplanes.
+ """
+ frames = []
+ for i in tqdm(range(0, render_cameras.shape[1], chunk_size)):
+ if is_flexicubes:
+ frame = model.forward_geometry(
+ planes,
+ render_cameras[:, i:i+chunk_size],
+ render_size=render_size,
+ )['img']
+ else:
+ frame = model.forward_synthesizer(
+ planes,
+ render_cameras[:, i:i+chunk_size],
+ render_size=render_size,
+ )['images_rgb']
+ frames.append(frame)
+
+ frames = torch.cat(frames, dim=1)[0] # we suppose batch size is always 1
+ return frames
+
+
+###############################################################################
+# Arguments.
+###############################################################################
+
+parser = argparse.ArgumentParser()
+parser.add_argument('config', type=str, help='Path to config file.')
+parser.add_argument('input_path', type=str, help='Path to input image or directory.')
+# parser.add_argument('--output_path', type=str, default='outputs/', help='Output directory.')
+parser.add_argument('--output_path', type=str, default='outputs_debug/', help='Output directory.')
+parser.add_argument('--diffusion_steps', type=int, default=75, help='Denoising Sampling steps.')
+parser.add_argument('--seed', type=int, default=42, help='Random seed for sampling.')
+parser.add_argument('--scale', type=float, default=1.0, help='Scale of generated object.')
+parser.add_argument('--distance', type=float, default=4.5, help='Render distance.')
+parser.add_argument('--view', type=int, default=6, choices=[4, 6], help='Number of input views.')
+parser.add_argument('--no_rembg', action='store_true', help='Do not remove input background.')
+parser.add_argument('--export_texmap', action='store_true', help='Export a mesh with texture map.')
+parser.add_argument('--save_video', action='store_true', help='Save a circular-view video.')
+args = parser.parse_args()
+seed_everything(args.seed)
+
+###############################################################################
+# Stage 0: Configuration.
+###############################################################################
+
+config = OmegaConf.load(args.config)
+config_name = os.path.basename(args.config).replace('.yaml', '')
+model_config = config.model_config
+infer_config = config.infer_config
+
+IS_FLEXICUBES = True if config_name.startswith('instant-mesh') else False
+
+device = torch.device('cuda')
+
+input_cameras = get_zero123plus_input_cameras(batch_size=1, radius=4.0*args.scale).to(device)
+
+render_cameras = get_render_cameras(
+ batch_size=1,
+ M=120,
+ radius=args.distance,
+ # elevation=20.0,
+ elevation=0,
+ is_flexicubes=IS_FLEXICUBES,
+ ).to(device)
+
+# torch.save(input_cameras.cpu(), 'input_cameras_1.5.pt')
+# torch.save(render_cameras.cpu(), 'render_cameras_1.5.pt')
+
+# st()
+
+# load diffusion model
+print('Loading diffusion model ...')
+pipeline = DiffusionPipeline.from_pretrained(
+ "sudo-ai/zero123plus-v1.2",
+ custom_pipeline="zero123plus",
+ torch_dtype=torch.float16,
+)
+pipeline.scheduler = EulerAncestralDiscreteScheduler.from_config(
+ pipeline.scheduler.config, timestep_spacing='trailing'
+)
+
+# load custom white-background UNet
+print('Loading custom white-background unet ...')
+if os.path.exists(infer_config.unet_path):
+ unet_ckpt_path = infer_config.unet_path
+else:
+ unet_ckpt_path = hf_hub_download(repo_id="TencentARC/InstantMesh", filename="diffusion_pytorch_model.bin", repo_type="model")
+state_dict = torch.load(unet_ckpt_path, map_location='cpu')
+pipeline.unet.load_state_dict(state_dict, strict=True)
+
+pipeline = pipeline.to(device)
+
+# load reconstruction model
+print('Loading reconstruction model ...')
+model = instantiate_from_config(model_config)
+if os.path.exists(infer_config.model_path):
+ model_ckpt_path = infer_config.model_path
+else:
+ model_ckpt_path = hf_hub_download(repo_id="TencentARC/InstantMesh", filename=f"{config_name.replace('-', '_')}.ckpt", repo_type="model")
+state_dict = torch.load(model_ckpt_path, map_location='cpu')['state_dict']
+state_dict = {k[14:]: v for k, v in state_dict.items() if k.startswith('lrm_generator.')}
+model.load_state_dict(state_dict, strict=True)
+
+model = model.to(device)
+if IS_FLEXICUBES:
+ model.init_flexicubes_geometry(device, fovy=30.0)
+model = model.eval()
+
+# make output directories
+image_path = os.path.join(args.output_path, config_name, 'images')
+mesh_path = os.path.join(args.output_path, config_name, 'meshes')
+video_path = os.path.join(args.output_path, config_name, 'videos')
+os.makedirs(image_path, exist_ok=True)
+os.makedirs(mesh_path, exist_ok=True)
+os.makedirs(video_path, exist_ok=True)
+
+# process input files
+if os.path.isdir(args.input_path):
+ input_files = [
+ os.path.join(args.input_path, file)
+ for file in os.listdir(args.input_path)
+ if file.endswith('.png') or file.endswith('.jpg') or file.endswith('.webp')
+ ]
+else:
+ input_files = [args.input_path]
+print(f'Total number of input images: {len(input_files)}')
+
+
+###############################################################################
+# Stage 1: Multiview generation.
+###############################################################################
+
+rembg_session = None if args.no_rembg else rembg.new_session()
+
+outputs = []
+for idx, image_file in enumerate(input_files):
+ name = os.path.basename(image_file).split('.')[0]
+ print(f'[{idx+1}/{len(input_files)}] Imagining {name} ...')
+
+ # remove background optionally
+ input_image = Image.open(image_file)
+ if not args.no_rembg:
+ input_image = remove_background(input_image, rembg_session)
+ input_image = resize_foreground(input_image, 0.85)
+
+ imageio.imwrite(os.path.join(image_path, f'{name}-input.png'), np.array(input_image))
+ # continue
+
+ # sampling
+ output_image = pipeline(
+ input_image,
+ num_inference_steps=args.diffusion_steps,
+ ).images[0]
+
+ output_image.save(os.path.join(image_path, f'{name}.png'))
+ print(f"Image saved to {os.path.join(image_path, f'{name}.png')}")
+
+ images = np.asarray(output_image, dtype=np.float32) / 255.0
+ images = torch.from_numpy(images).permute(2, 0, 1).contiguous().float() # (3, 960, 640)
+ images = rearrange(images, 'c (n h) (m w) -> (n m) c h w', n=3, m=2) # (6, 3, 320, 320)
+
+ st()
+
+ outputs.append({'name': name, 'images': images})
+
+# delete pipeline to save memory
+del pipeline
+
+
+###############################################################################
+# Stage 2: Reconstruction.
+###############################################################################
+
+
+exit()
+chunk_size = 20 if IS_FLEXICUBES else 1
+
+for idx, sample in enumerate(outputs):
+ name = sample['name']
+ print(f'[{idx+1}/{len(outputs)}] Creating {name} ...')
+
+ images = sample['images'].unsqueeze(0).to(device)
+ images = v2.functional.resize(images, 320, interpolation=3, antialias=True).clamp(0, 1)
+
+ if args.view == 4:
+ indices = torch.tensor([0, 2, 4, 5]).long().to(device)
+ images = images[:, indices]
+ input_cameras = input_cameras[:, indices]
+
+ with torch.no_grad():
+ # get triplane
+ planes = model.forward_planes(images, input_cameras)
+
+ # get mesh
+ mesh_path_idx = os.path.join(mesh_path, f'{name}.obj')
+
+ mesh_out = model.extract_mesh(
+ planes,
+ use_texture_map=args.export_texmap,
+ **infer_config,
+ )
+ if args.export_texmap:
+ vertices, faces, uvs, mesh_tex_idx, tex_map = mesh_out
+ save_obj_with_mtl(
+ vertices.data.cpu().numpy(),
+ uvs.data.cpu().numpy(),
+ faces.data.cpu().numpy(),
+ mesh_tex_idx.data.cpu().numpy(),
+ tex_map.permute(1, 2, 0).data.cpu().numpy(),
+ mesh_path_idx,
+ )
+ else:
+ vertices, faces, vertex_colors = mesh_out
+ save_obj(vertices, faces, vertex_colors, mesh_path_idx)
+ print(f"Mesh saved to {mesh_path_idx}")
+
+ # get video
+ if args.save_video:
+ video_path_idx = os.path.join(video_path, f'{name}.mp4')
+ render_size = infer_config.render_resolution
+ # render_cameras = get_render_cameras(
+ # batch_size=1,
+ # M=120,
+ # radius=args.distance,
+ # elevation=20.0,
+ # is_flexicubes=IS_FLEXICUBES,
+ # ).to(device)
+
+ frames = render_frames(
+ model,
+ planes,
+ render_cameras=render_cameras,
+ render_size=render_size,
+ chunk_size=chunk_size,
+ is_flexicubes=IS_FLEXICUBES,
+ )
+
+ save_video(
+ frames,
+ video_path_idx,
+ fps=30,
+ )
+ print(f"Video saved to {video_path_idx}")
diff --git a/scripts/save_pcd.py b/scripts/save_pcd.py
new file mode 100644
index 0000000000000000000000000000000000000000..3430c5a1c8c3be1471f9bea4569ea57a6e1ff9e7
--- /dev/null
+++ b/scripts/save_pcd.py
@@ -0,0 +1,554 @@
+"""
+Train a diffusion model on images.
+"""
+# import imageio
+from pathlib import Path
+import torchvision
+import kornia
+import lz4.frame
+import gzip
+import random
+import json
+import sys
+import os
+import lmdb
+from tqdm import tqdm
+
+sys.path.append('.')
+import torch.distributed as dist
+import pytorch3d.ops
+import pickle
+import traceback
+from PIL import Image
+import torch as th
+if th.cuda.is_available():
+ from xformers.triton import FusedLayerNorm as LayerNorm
+import torch.multiprocessing as mp
+import lzma
+import webdataset as wds
+import numpy as np
+
+import point_cloud_utils as pcu
+from torch.utils.data import DataLoader, Dataset
+import imageio.v3 as iio
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+# from nsr.train_util import TrainLoop3DRec as TrainLoop
+from nsr.train_nv_util import TrainLoop3DRecNV, TrainLoop3DRec, TrainLoop3DRecNVPatch
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default
+from datasets.shapenet import load_data, load_data_for_lmdb, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+from datasets.eg3d_dataset import init_dataset_kwargs
+from nsr.volumetric_rendering.ray_sampler import RaySampler
+
+# from .lmdb_create import encode_and_compress_image
+
+
+def encode_and_compress_image(inp_array, is_image=False, compress=True):
+ # Read the image using imageio
+ # image = imageio.v3.imread(image_path)
+
+ # Convert the image to bytes
+ # with io.BytesIO() as byte_buffer:
+ # imageio.imsave(byte_buffer, image, format="png")
+ # image_bytes = byte_buffer.getvalue()
+ if is_image:
+ inp_bytes = iio.imwrite("", inp_array, extension=".png")
+ else:
+ inp_bytes = inp_array.tobytes()
+
+ # Compress the image data using gzip
+ if compress:
+ # compressed_data = gzip.compress(inp_bytes)
+ compressed_data = lz4.frame.compress(inp_bytes)
+ return compressed_data
+ else:
+ return inp_bytes
+
+
+from pdb import set_trace as st
+import bz2
+
+# th.backends.cuda.matmul.allow_tf32 = True # https://huggingface.co/docs/diffusers/optimization/fp16
+
+
+def training_loop(args):
+ # def training_loop(args):
+ # dist_util.setup_dist(args)
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # https://blog.csdn.net/qq_41682740/article/details/126304613
+
+ SEED = args.seed
+
+ # dist.init_process_group(backend='nccl', init_method='env://', rank=args.local_rank, world_size=th.cuda.device_count())
+ # logger.log(f"{args.local_rank=} init complete, seed={SEED}")
+ # th.cuda.set_device(args.local_rank)
+ th.cuda.empty_cache()
+
+ # * deterministic algorithms flags
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+ random.seed(SEED)
+
+ ray_sampler = RaySampler()
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating encoder and NSR decoder...")
+ # device = dist_util.dev()
+ # device = th.device("cuda", args.local_rank)
+
+ # shared eg3d opts
+ opts = eg3d_options_default()
+
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_data_for_lmdb
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data, load_data_for_lmdb
+
+ # auto_encoder = create_3DAE_model(
+ # **args_to_dict(args,
+ # encoder_and_nsr_defaults().keys()))
+ # auto_encoder.to(device)
+ # auto_encoder.train()
+
+ logger.log("creating data loader...")
+ # data = load_data(
+ # st()
+ # if args.overfitting:
+ # data = load_memory_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # # load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ # )
+ # else:
+ if args.cfg in ('afhq', 'ffhq'):
+ # ! load data
+ logger.log("creating eg3d data loader...")
+ training_set_kwargs, dataset_name = init_dataset_kwargs(
+ data=args.data_dir,
+ class_name='datasets.eg3d_dataset.ImageFolderDatasetLMDB',
+ reso_gt=args.image_size) # only load pose here
+ # if args.cond and not training_set_kwargs.use_labels:
+ # raise Exception('check here')
+
+ # training_set_kwargs.use_labels = args.cond
+ training_set_kwargs.use_labels = True
+ training_set_kwargs.xflip = False
+ training_set_kwargs.random_seed = SEED
+ # training_set_kwargs.max_size = args.dataset_size
+ # desc = f'{args.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}'
+
+ # * construct ffhq/afhq dataset
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+ dataset_size = len(training_set)
+
+ # training_set_sampler = InfiniteSampler(
+ # dataset=training_set,
+ # rank=dist_util.get_rank(),
+ # num_replicas=dist_util.get_world_size(),
+ # seed=SEED)
+
+ data = DataLoader(
+ training_set,
+ shuffle=False,
+ batch_size=1,
+ num_workers=16,
+ drop_last=False,
+ # prefetch_factor=2,
+ pin_memory=True,
+ persistent_workers=True,
+ )
+
+ else:
+ # data, dataset_name, dataset_size, dataset = load_data_for_lmdb(
+ data, dataset_name, dataset_size = load_data_for_lmdb(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True,
+ preprocess=None,
+ dataset_size=args.dataset_size,
+ trainer_name=args.trainer_name,
+ shuffle_across_cls=args.shuffle_across_cls,
+ wds_split=args.wds_split,
+ four_view_for_latent=True
+ # wds_output_path=os.path.join(logger.get_dir(), f'wds-%06d.tar')
+ # load_depth=True # for evaluation
+ )
+ # if args.pose_warm_up_iter > 0:
+ # overfitting_dataset = load_memory_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # # load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ # )
+ # data = [data, overfitting_dataset, args.pose_warm_up_iter]
+ # eval_data = load_eval_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ # preprocess=auto_encoder.preprocess)
+ args.img_size = [args.image_size_encoder]
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ # opt.max_depth, opt.min_depth = args.rendering_kwargs.ray_end, args.rendering_kwargs.ray_start
+ # loss_class = E3DGELossClass(device, opt).to(device)
+
+ # writer = SummaryWriter() # TODO, add log dir
+
+ logger.log("training...")
+
+ # TrainLoop = {
+ # 'input_rec': TrainLoop3DRec,
+ # 'nv_rec': TrainLoop3DRecNV,
+ # 'nv_rec_patch': TrainLoop3DRecNVPatch,
+ # }[args.trainer_name]
+
+ # TrainLoop(rec_model=auto_encoder,
+ # loss_class=loss_class,
+ # data=data,
+ # eval_data=eval_data,
+ # **vars(args)).run_loop() # ! overfitting
+
+ # Function to compress an image using gzip
+ # def compress_image_gzip(image_path):
+ # def encode_and_compress_image(inp_array, is_image=False):
+ # # Read the image using imageio
+ # # image = imageio.v3.imread(image_path)
+
+ # # Convert the image to bytes
+ # # with io.BytesIO() as byte_buffer:
+ # # imageio.imsave(byte_buffer, image, format="png")
+ # # image_bytes = byte_buffer.getvalue()
+ # if is_image:
+ # inp_bytes = iio.imwrite("", inp_array, extension=".png")
+ # else:
+ # inp_bytes = inp_array.tobytes()
+
+ # # Compress the image data using gzip
+ # compressed_data = gzip.compress(inp_bytes)
+
+ # return compressed_data
+
+ def save_pcd_from_depth(dataset_loader, dataset_size, lmdb_path,
+ start_shard, wds_split):
+ """
+ Convert a PyTorch dataset to LMDB format.
+
+ Parameters:
+ - dataset: PyTorch dataset
+ - lmdb_path: Path to store the LMDB database
+ """
+ # env = lmdb.open(lmdb_path, map_size=1024 ** 4, readahead=False) # Adjust map_size based on your dataset size
+ # sink = wds.ShardWriter(lmdb_path, start_shard=start_shard)
+
+ # with env.begin(write=True) as txn:
+
+ # with env.begin(write=True) as txn:
+ # txn.put("length".encode("utf-8"), str(dataset_size).encode("utf-8"))
+
+ # K = 10000 # fps K
+ K = 4096 # fps K
+ # K = 128*128*2 # fps K, 32768
+ # K = 1024*24 # 20480
+ # K = 4096 # fps K
+
+ # if True:
+
+ # try:
+ for idx, sample in enumerate(tqdm(dataset_loader)):
+
+ # pass
+ # remove the batch index of returned dict sample
+
+ sample_ins = sample.pop('ins')
+ # !!! add all()
+ assert all([ sample_ins[i] == sample_ins[0] for i in range(0, len(sample_ins)) ]), sample_ins # check the batch is the same instnace
+
+ img_size = sample['raw_img'].shape[2]
+
+ pcd_path = Path(f'{logger.get_dir()}/fps-pcd/{sample_ins[0]}')
+
+ if (pcd_path / f'fps-{K}.ply').exists():
+ continue
+
+ pcd_path.mkdir(parents=True, exist_ok=True)
+
+ # sample = {
+ # # k:v.squeeze(0).cpu().numpy() if isinstance(v, th.Tensor) else v[0] for k, v in sample.items()
+ # k:v.cpu().numpy() if isinstance(v, th.Tensor) else v for k, v in sample.items()
+ # # k:v.cpu().numpy() if isinstance(v, torch.Tensor) else v for k, v in sample.items()
+ # }
+
+ B = sample['c'].shape[0]
+
+ cam2world_matrix = sample['c'][:, :16].reshape(B, 4, 4)
+ intrinsics = sample['c'][:, 16:25].reshape(B, 3, 3)
+
+ ray_origins, ray_directions = ray_sampler( # shape:
+ cam2world_matrix, intrinsics, img_size)[:2]
+
+ micro = sample
+
+ # self.gs.output_size,)[:2]
+ # depth = rearrange(micro['depth'], '(B V) H W -> ')
+ # depth_128 = th.nn.functional.interpolate(
+ # micro['depth'].unsqueeze(1), (128, 128),
+ # mode='nearest'
+ # )[:, 0] # since each view has 128x128 Gaussians
+ # depth = depth_128.reshape(B * V, -1).unsqueeze(-1)
+
+ # fg_mask = (micro['depth'] > 0).unsqueeze(1).float()
+
+ # fg_mask = micro['alpha_mask'].unsqueeze(1).float() # anti-alias? B 1 H W
+ fg_mask = (micro['alpha_mask'] == 1).unsqueeze(1).float() # anti-alias? B 1 H W
+
+ kernel = th.tensor([[0, 1, 0], [1, 1, 1], [0, 1,
+ 0]]).to(fg_mask.device)
+
+ # ! erode. but still some noise...
+ '''
+ erode_mask = kornia.morphology.erosion(fg_mask, kernel) # B 1 H W
+ # torchvision.utils.save_image(fg_mask.float()*2-1,'mask.jpg', value_range=(-1,1), normalize=True)
+ # torchvision.utils.save_image(erode_mask.float()*2-1,'erode_mask.jpg', value_range=(-1,1), normalize=True)
+
+ fg_mask = (erode_mask==1).float().reshape(B, -1).unsqueeze(-1) > 0 #
+ # '''
+ # fg_mask = fg_mask.reshape(B, -1).unsqueeze(-1) == 1 # ! for some failed data
+ # ! no erode:
+ fg_mask = fg_mask.reshape(B, -1).unsqueeze(-1) > 0 # ! for some failed data
+
+ depth = micro['depth'].reshape(B, -1).unsqueeze(-1)
+ depth = th.where(depth < 1.05, 0, depth) # filter outlier
+ depth[depth == 0] = 1e10 # so that rays_o will not appear in the final pcd.
+
+ # fg_mask = depth>0
+
+ # fg_mask = th.nn.functional.interpolate(
+ # micro['depth_mask'].unsqueeze(1).to(th.uint8),
+ # (128, 128),
+ # mode='nearest').squeeze(1) # B*V H W
+ # fg_mask = fg_mask.reshape(B * V, -1).unsqueeze(-1)
+
+
+ # gt_pos = gt_pos[gt_pos.nonzero(as_tuple=True)].reshape(-1, 3) # return non-zero points for fps sampling
+
+ # pcu.save_mesh_v(f'tmp/gt-512.ply', gt_pos.detach().cpu().numpy(),)
+
+ # fps sampling
+ try:
+
+ gt_pos = ray_origins + depth * ray_directions # BV HW 3, already in the world space
+ gt_pos = fg_mask * gt_pos # remove ray_origins when depth=0
+ # gt_pos = gt_pos[[8,16,24,25,26, 27, 31, 35]]
+ # gt_pos = gt_pos[[5,10,15,20,24,25,26]]
+ # gt_pos = gt_pos[[4, 12, 20, 25]]
+ gt_pos = gt_pos[:]
+ # gt_pos = gt_pos[[25,26]]
+ gt_pos = gt_pos.reshape(-1, 3).to(dist_util.dev())
+ gt_pos = gt_pos.clip(-0.45, 0.45)
+ gt_pos = th.where(gt_pos.abs()==0.45, 0, gt_pos) # no boundary here? Yes.
+
+ # ! filter the zero points together here
+
+ nonzero_mask = (gt_pos != 0).all(dim=-1) # Shape: (N, 3)
+ nonzero_gt_pos = gt_pos[nonzero_mask]
+
+ fps_points = pytorch3d.ops.sample_farthest_points(
+ nonzero_gt_pos.unsqueeze(0), K=K)[0]
+
+ pcu.save_mesh_v(
+ str(pcd_path / f'fps-{K}.ply'),
+ fps_points[0].detach().cpu().numpy(),
+ )
+
+ assert (pcd_path / f'fps-{K}.ply').exists()
+
+ except Exception as e:
+
+ st()
+ pass
+
+ print(pcd_path, 'save failed: ', e)
+
+ # ! debug projection matrix
+
+ # def pcd_to_homo(pcd):
+ # return th.cat([pcd, th.ones_like(pcd[..., 0:1])], -1)
+
+ # st()
+
+ # proj_point = th.inverse(cam2world_matrix[0]).to(fps_points) @ pcd_to_homo(fps_points[0]).permute(1, 0)
+ # # proj_point = th.inverse(cam2world_matrix[0]).to(fps_points) @ pcd_to_homo((ray_origins + depth * ray_directions)[0].to(fps_points)).permute(1, 0)
+ # proj_point[:2, ...] /= proj_point[2, ...]
+ # proj_point[2, ...] = 1 # homo
+
+
+ # proj_point = intrinsics[0].to(fps_points) @ proj_point[:3]
+ # proj_point = proj_point.permute(1,0)[..., :2] # 768 4
+ # st()
+
+ # torchvision.utils.save_image(micro['raw_img'][::5].permute(0,3,1,2).float()/127.5-1,'raw.jpg', value_range=(-1,1), normalize=True)
+
+ # # encode batch images/depths/strings? no need to encode ins/fname here; just save the caption
+
+ # # sample = dataset_loader[idx]
+ # compressed_sample = {}
+ # sample['ins'] = sample_ins[0]
+ # sample['caption'] = sample.pop('caption')[0]
+
+ # for k, v in sample.items():
+
+ # # key = f'{idx}-{k}'.encode('utf-8')
+
+ # if 'img' in k: # only bytes required? laod the 512 depth bytes only.
+ # v = encode_and_compress_image(v, is_image=True, compress=True)
+ # # v = encode_and_compress_image(v, is_image=True, compress=False)
+ # # elif 'depth' in k:
+ # elif isinstance(v, str):
+ # v = v.encode('utf-8') # caption / instance name
+ # else: # regular bytes encoding
+ # v = encode_and_compress_image(v.astype(np.float32), is_image=False, compress=True)
+ # # v = encode_and_compress_image(v.astype(np.float32), is_image=False, compress=False)
+
+ # compressed_sample[k] = v
+
+ # # st() # TODO, add .gz for compression after pipeline done
+ # sink.write({
+ # "__key__": f"sample_{wds_split:03d}_{idx:07d}",
+ # # **{f'{k}.pyd': v for k, v in compressed_sample.items()}, # store as pickle, already compressed
+ # 'sample.pyd': compressed_sample
+ # # 'sample.gz': compressed_sample
+ # })
+
+ # break
+ # if idx > 25:
+ # break
+ # except:
+ # continue
+
+ # sink.close()
+
+ # convert_to_lmdb(data, os.path.join(logger.get_dir(), dataset_name)) convert_to_lmdb_compressed(data, os.ath.join(logger.get_dir(), dataset_name))
+ # convert_to_lmdb_compressed(data, os.path.join(logger.get_dir()), dataset_size)
+ save_pcd_from_depth(data, dataset_size,
+ os.path.join(logger.get_dir(), f'wds-%06d.tar'),
+ args.start_shard, args.wds_split)
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ seed=0,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_amp=False,
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ # test warm up pose sampling training
+ objv_dataset=False,
+ pose_warm_up_iter=-1,
+ start_shard=0,
+ shuffle_across_cls=False,
+ wds_split=1, # out of 4
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+ # os.environ["NCCL_DEBUG"]="INFO"
+
+ args = create_argparser().parse_args()
+ # args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ opts = args
+
+ args.rendering_kwargs = rendering_options_defaults(opts)
+
+ # print(args)
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ # Launch processes.
+ print('Launching processes...')
+
+ # try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ # except Exception as e:
+ # # print(e)
+ # traceback.print_exc()
+ # dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/save_pcd_from_gs.py b/scripts/save_pcd_from_gs.py
new file mode 100644
index 0000000000000000000000000000000000000000..67756318c0c6dae070f838f9c899160c07699112
--- /dev/null
+++ b/scripts/save_pcd_from_gs.py
@@ -0,0 +1,273 @@
+"""
+Train a diffusion model on images.
+"""
+# import imageio
+from pathlib import Path
+import torchvision
+import kornia
+import lz4.frame
+import gzip
+import random
+import json
+import sys
+import os
+import lmdb
+from tqdm import tqdm
+
+sys.path.append('.')
+import torch.distributed as dist
+import pytorch3d.ops
+import pickle
+import traceback
+from PIL import Image
+import torch as th
+if th.cuda.is_available():
+ from xformers.triton import FusedLayerNorm as LayerNorm
+import torch.multiprocessing as mp
+import lzma
+import webdataset as wds
+import numpy as np
+
+import point_cloud_utils as pcu
+from torch.utils.data import DataLoader, Dataset
+import imageio.v3 as iio
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+# from nsr.train_util import TrainLoop3DRec as TrainLoop
+from nsr.train_nv_util import TrainLoop3DRecNV, TrainLoop3DRec, TrainLoop3DRecNVPatch
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default
+from datasets.shapenet import load_data, load_data_for_lmdb, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+from datasets.eg3d_dataset import init_dataset_kwargs
+from nsr.volumetric_rendering.ray_sampler import RaySampler
+
+# from .lmdb_create import encode_and_compress_image
+
+
+def encode_and_compress_image(inp_array, is_image=False, compress=True):
+ # Read the image using imageio
+ # image = imageio.v3.imread(image_path)
+
+ # Convert the image to bytes
+ # with io.BytesIO() as byte_buffer:
+ # imageio.imsave(byte_buffer, image, format="png")
+ # image_bytes = byte_buffer.getvalue()
+ if is_image:
+ inp_bytes = iio.imwrite("", inp_array, extension=".png")
+ else:
+ inp_bytes = inp_array.tobytes()
+
+ # Compress the image data using gzip
+ if compress:
+ # compressed_data = gzip.compress(inp_bytes)
+ compressed_data = lz4.frame.compress(inp_bytes)
+ return compressed_data
+ else:
+ return inp_bytes
+
+
+from pdb import set_trace as st
+import bz2
+
+# th.backends.cuda.matmul.allow_tf32 = True # https://huggingface.co/docs/diffusers/optimization/fp16
+
+
+def training_loop(args):
+ # def training_loop(args):
+ # dist_util.setup_dist(args)
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # https://blog.csdn.net/qq_41682740/article/details/126304613
+
+ SEED = args.seed
+
+ # dist.init_process_group(backend='nccl', init_method='env://', rank=args.local_rank, world_size=th.cuda.device_count())
+ # logger.log(f"{args.local_rank=} init complete, seed={SEED}")
+ # th.cuda.set_device(args.local_rank)
+ th.cuda.empty_cache()
+
+ # * deterministic algorithms flags
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+ random.seed(SEED)
+
+ ray_sampler = RaySampler()
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating encoder and NSR decoder...")
+ # device = dist_util.dev()
+ # device = th.device("cuda", args.local_rank)
+
+ # shared eg3d opts
+ opts = eg3d_options_default()
+
+ logger.log("creating data loader...")
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ # opt.max_depth, opt.min_depth = args.rendering_kwargs.ray_end, args.rendering_kwargs.ray_start
+ # loss_class = E3DGELossClass(device, opt).to(device)
+
+ # writer = SummaryWriter() # TODO, add log dir
+
+ logger.log("training...")
+
+ def save_pcd_from_gs(lmdb_path, start_shard, wds_split):
+ """
+ Convert a PyTorch dataset to LMDB format.
+
+ Parameters:
+ - dataset: PyTorch dataset
+ - lmdb_path: Path to store the LMDB database
+ """
+
+ # ! read dataset path
+
+ # latent_dir = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/final/objav/vae/gs/infer-latents/768/8x8/animals/latent_dir/Animals'
+ latent_dir = '/nas/shared/V2V/yslan/logs/nips23/Reconstruction/final/objav/vae/gs/infer-latents/768/8x8/animals-gs-latent-dim=10-fullset/latent_dir'
+
+ ins_list = []
+
+ for class_dir in os.listdir(latent_dir)[:]:
+ for dict_dir in os.listdir(os.path.join(latent_dir, class_dir))[:]:
+ for ins_dir in os.listdir(os.path.join(latent_dir, class_dir, dict_dir)):
+ ins_list.append(os.path.join(class_dir, dict_dir, ins_dir))
+
+ K = 4096 # fps K
+
+ for idx, ins in enumerate(tqdm(ins_list)):
+
+ # sample_ins = sample.pop('ins')
+ pcd_path = Path(f'{logger.get_dir()}/fps-pcd/{ins}')
+
+ if (pcd_path / f'fps-{K}.ply').exists():
+ continue
+
+ # ! load gaussians
+ gaussians = np.load(os.path.join(latent_dir,ins,'gaussians.npy'))
+
+ points = gaussians[0,:, 0:3]
+
+ # load opacity and scale
+ opacity = gaussians[0,:, 3:4]
+ # scale = gaussians[0,:, 4:6]
+ # colors = gaussians[0, :, 10:13]
+
+ opacity_mask = opacity < 0.005 # official threshold
+
+ high_opacity_points = points[~opacity_mask[..., 0]]
+ # high_opacity_colors = colors[~opacity_mask[..., 0]]
+ high_opacity_points = th.from_numpy(high_opacity_points).to(dist_util.dev())
+
+
+ pcd_path.mkdir(parents=True, exist_ok=True)
+
+ try:
+
+ fps_points = pytorch3d.ops.sample_farthest_points(
+ high_opacity_points.unsqueeze(0), K=K)[0]
+
+ pcu.save_mesh_v(
+ str(pcd_path / f'fps-{K}.ply'),
+ fps_points[0].detach().cpu().numpy(),
+ )
+
+ assert (pcd_path / f'fps-{K}.ply').exists()
+
+ except Exception as e:
+ continue
+ print(pcd_path, 'save failed: ', e)
+
+
+ save_pcd_from_gs(os.path.join(logger.get_dir(), f'wds-%06d.tar'),
+ args.start_shard, args.wds_split)
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ seed=0,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_amp=False,
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ # test warm up pose sampling training
+ objv_dataset=False,
+ pose_warm_up_iter=-1,
+ start_shard=0,
+ shuffle_across_cls=False,
+ wds_split=1, # out of 4
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+ # os.environ["NCCL_DEBUG"]="INFO"
+
+ args = create_argparser().parse_args()
+ # args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ opts = args
+
+ args.rendering_kwargs = rendering_options_defaults(opts)
+
+ # print(args)
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ # Launch processes.
+ print('Launching processes...')
+
+ # try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ # except Exception as e:
+ # # print(e)
+ # traceback.print_exc()
+ # dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/split_rgb_normal_vid.py b/scripts/split_rgb_normal_vid.py
new file mode 100644
index 0000000000000000000000000000000000000000..a0c70412e7b81ef6ded8a594db60d6fe7d49f01c
--- /dev/null
+++ b/scripts/split_rgb_normal_vid.py
@@ -0,0 +1,85 @@
+# import imageio.v3 as imageio
+import imageio
+from tqdm import tqdm
+from pdb import set_trace as st
+import glob
+import numpy as np
+import os
+from pathlib import Path
+# ! pip install opencv-python
+import cv2
+import matplotlib.pyplot as plt
+
+
+def save_2dgs_rgb_normal_vid(vid_path, output_dir):
+
+ frames = imageio.v3.imread(vid_path)
+
+ vid_name = Path(vid_path).stem
+
+ # output frames here
+ # output_dir = f'{ga_output_dir}/{index}'
+ # if not os.path.exists(output_dir):
+ # os.mkdir(output_dir)
+ # all_rgb_frames, all_normal_frames = [], []
+
+ rgb_video_out = imageio.get_writer(
+ f'{output_dir}/rgb/{vid_name}-rgb.mp4',
+ mode='I',
+ fps=24,
+ codec='libx264')
+
+ normal_video_out = imageio.get_writer(
+ f'{output_dir}/normal/{vid_name}-normal.mp4',
+ mode='I',
+ fps=24,
+ codec='libx264')
+
+ depth_video_out = imageio.get_writer(
+ f'{output_dir}/normal/{vid_name}-depth.mp4',
+ mode='I',
+ fps=24,
+ codec='libx264')
+
+
+ for idx, frame in enumerate(frames[:24]):
+ # frame_size = 512
+ frame_size = frame.shape[1] // 3
+
+ # rgb_video_out.append_data(frame[-384:, :384])
+ # normal_video_out.append_data(frame[-384:, 384*2:384*3])
+
+ rgb = frame[-frame_size:, :frame_size]
+ rgb_video_out.append_data(cv2.resize(rgb, (384, 384)))
+
+ depth = frame[-frame_size:, frame_size*2:frame_size*3]
+ depth_video_out.append_data(cv2.resize(depth, (384, 384)))
+
+ normal = frame[-frame_size:, frame_size*1:frame_size*2]
+ normal_video_out.append_data(cv2.resize(normal, (384, 384)))
+
+ rgb_video_out.close()
+ normal_video_out.close()
+ depth_video_out.close()
+
+
+# output_dir = '/mnt/sfs-common/yslan/Repo/3dgen/GA-logs/demo-video-buffer'
+# vid_input_dir = '/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange'
+
+output_dir = '/mnt/sfs-common/yslan/Repo/3dgen/GA-logs/demo-video-buffer-192'
+vid_input_dir = '/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_768-512-perturb0-debug'
+
+os.makedirs(os.path.join(output_dir, 'rgb'), exist_ok=True)
+os.makedirs(os.path.join(output_dir, 'normal'), exist_ok=True)
+
+
+
+all_vids = glob.glob(os.path.join(vid_input_dir, '*.mp4'))
+
+for vid_path in tqdm(all_vids[:]):
+ # if 'daily-used' in vid_path: # only on fancy categories.
+ # continue
+ try:
+ save_2dgs_rgb_normal_vid(vid_path, output_dir)
+ except:
+ print(vid_path)
\ No newline at end of file
diff --git a/scripts/triplane_rec_inference.py b/scripts/triplane_rec_inference.py
new file mode 100644
index 0000000000000000000000000000000000000000..3fe44ec7d6df9d3299150df8d3cc65ec594a1a64
--- /dev/null
+++ b/scripts/triplane_rec_inference.py
@@ -0,0 +1,153 @@
+"""
+Train a diffusion model on images.
+"""
+import sys
+import os
+sys.path.append('.')
+
+import torch as th
+import torch.multiprocessing as mp
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+from nsr.train_util import TrainLoop3DRec as TrainLoop
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, create_Triplane, loss_defaults
+from datasets.shapenet import load_eval_rays, load_data, load_eval_data
+from nsr.losses.builder import E3DGELossClass
+
+from pdb import set_trace as st
+
+def inference_loop(rank, master_addr, master_port, args):
+ dist_util.setup_dist(rank, master_addr, master_port, args.gpus)
+
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating eval rays...")
+ # TODO, load shapenet data
+ eval_data = load_eval_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=args.depth_lambda > 0
+ )
+ # c_list = load_eval_rays(
+ # file_path=args.data_dir,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # )
+
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+
+ logger.log("loading encoder and NSR decoder...")
+ auto_encoder = create_Triplane( # basically overfitting tirplane
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ # auto_encoder = create_3DAE_model(
+ # **args_to_dict(args,
+ # encoder_and_nsr_defaults().keys()))
+ auto_encoder.to(dist_util.dev())
+ auto_encoder.eval()
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()) )
+ loss_class = E3DGELossClass(dist_util.dev(), opt).to(dist_util.dev())
+
+ logger.log("training...")
+ TrainLoop(
+ model=auto_encoder,
+ # encoder,
+ # decoder
+ loss_class=loss_class,
+ # diffusion=diffusion,
+ data=eval_data, # TODO
+ # data=batch,
+ batch_size=args.batch_size,
+ microbatch=args.microbatch,
+ lr=args.lr,
+ ema_rate=args.ema_rate,
+ log_interval=args.log_interval,
+ save_interval=args.save_interval,
+ resume_checkpoint=args.resume_checkpoint,
+ use_fp16=args.use_fp16,
+ fp16_scale_growth=args.fp16_scale_growth,
+ weight_decay=args.weight_decay,
+ lr_anneal_steps=args.lr_anneal_steps,
+ ).eval_loop() # ! overfitting
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ num_workers=4,
+ local_rank=0,
+ gpus=1,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=10,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/eval",
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"
+ # ] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+
+ args = create_argparser().parse_args()
+ # st()
+
+ master_addr = '127.0.0.1'
+ master_port = dist_util._find_free_port()
+
+ # Launch processes.
+ print('Launching processes...')
+ th.multiprocessing.set_start_method('spawn')
+
+ subprocess_fn = inference_loop
+
+ # launch using torch.multiprocessing.spawn
+ if args.gpus == 1:
+ subprocess_fn(rank=0, master_addr=master_addr, master_port=master_port, args=args)
+ else:
+ th.multiprocessing.spawn(fn=subprocess_fn,
+ args=(master_addr, master_port,args),
+ nprocs=args.gpus)
+
diff --git a/scripts/triplane_rec_obaverse_train.py b/scripts/triplane_rec_obaverse_train.py
new file mode 100644
index 0000000000000000000000000000000000000000..733c6bc1e81ca9ab0ac8992d86c2970557405fab
--- /dev/null
+++ b/scripts/triplane_rec_obaverse_train.py
@@ -0,0 +1,180 @@
+"""
+Train a diffusion model on images.
+"""
+import sys
+import os
+sys.path.append('.')
+
+import torch as th
+import torch.multiprocessing as mp
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+from nsr.train_util import TrainLoop3DTriplaneRec as TrainLoop
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, create_Triplane, loss_defaults
+from datasets.g_buffer_objaverse import load_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from pdb import set_trace as st
+
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default
+
+import random
+import json
+import sys
+import os
+
+sys.path.append('.')
+import torch.distributed as dist
+
+import traceback
+
+
+# def training_loop(rank, master_addr, master_port, args):
+def training_loop(args):
+ # dist_util.setup_dist(rank, master_addr, master_port, args.gpus)
+ dist_util.setup_dist(args)
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ # print(args)
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ logger.log("creating data loader...")
+ # TODO, load shapenet data
+ # data = load_data(
+ data = load_memory_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ )
+ eval_data = data
+ # eval_data = load_eval_data(
+ # file_path=args.data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True # for evaluation
+ # )
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+ logger.log("creating encoder and NSR decoder...")
+
+
+ auto_encoder = create_Triplane( # basically overfitting tirplane
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ # auto_encoder = create_3DAE_model(
+ # **args_to_dict(args,
+ # encoder_and_nsr_defaults().keys()))
+ auto_encoder.to(dist_util.dev())
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()) )
+ loss_class = E3DGELossClass(dist_util.dev(), opt).to(dist_util.dev())
+
+ logger.log("training...")
+ TrainLoop(
+ rec_model=auto_encoder,
+ # encoder,
+ # decoder
+ loss_class=loss_class,
+ # diffusion=diffusion,
+ data=data,
+ eval_data=eval_data,
+ # data=batch,
+ batch_size=args.batch_size,
+ microbatch=args.microbatch,
+ lr=args.lr,
+ ema_rate=args.ema_rate,
+ log_interval=args.log_interval,
+ save_interval=args.save_interval,
+ resume_checkpoint=args.resume_checkpoint,
+ use_fp16=args.use_fp16,
+ fp16_scale_growth=args.fp16_scale_growth,
+ weight_decay=args.weight_decay,
+ lr_anneal_steps=args.lr_anneal_steps,
+ eval_interval=args.eval_interval,
+ ).run_loop() # ! overfitting
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ num_workers=4,
+ local_rank=0,
+ gpus=1,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=8,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=10,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ eval_interval=2500,
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"
+ # ] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ opts = args
+
+ args.rendering_kwargs = rendering_options_defaults(opts)
+
+ # Launch processes.
+ print('Launching processes...')
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/triplane_rec_train.py b/scripts/triplane_rec_train.py
new file mode 100644
index 0000000000000000000000000000000000000000..d35d2d5e09fbf2e25e1e1c508901cc8f345f32ba
--- /dev/null
+++ b/scripts/triplane_rec_train.py
@@ -0,0 +1,174 @@
+"""
+Train a diffusion model on images.
+"""
+import sys
+import os
+sys.path.append('.')
+
+import torch as th
+import torch.multiprocessing as mp
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+from nsr.train_util import TrainLoop3DRec as TrainLoop
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, create_Triplane, loss_defaults
+from datasets.shapenet import load_data, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from pdb import set_trace as st
+import json
+
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default
+
+import random
+import traceback
+import json
+import sys
+import os
+
+def training_loop(args):
+ # dist_util.setup_dist(rank, master_addr, master_port, args.gpus)
+ dist_util.setup_dist(args)
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ # print(args)
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ logger.log("creating data loader...")
+ # TODO, load shapenet data
+ # data = load_data(
+ data = load_memory_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ )
+ eval_data = load_eval_data(
+ file_path=args.data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True # for evaluation
+ )
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+ logger.log("creating encoder and NSR decoder...")
+
+
+ auto_encoder = create_Triplane( # basically overfitting tirplane
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ # auto_encoder = create_3DAE_model(
+ # **args_to_dict(args,
+ # encoder_and_nsr_defaults().keys()))
+ auto_encoder.to(dist_util.dev())
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()) )
+ loss_class = E3DGELossClass(dist_util.dev(), opt).to(dist_util.dev())
+
+ logger.log("training...")
+ TrainLoop(
+ rec_model=auto_encoder,
+ # encoder,
+ # decoder
+ loss_class=loss_class,
+ # diffusion=diffusion,
+ data=data,
+ eval_data=eval_data,
+ # data=batch,
+ batch_size=args.batch_size,
+ microbatch=args.microbatch,
+ lr=args.lr,
+ ema_rate=args.ema_rate,
+ log_interval=args.log_interval,
+ save_interval=args.save_interval,
+ resume_checkpoint=args.resume_checkpoint,
+ use_fp16=args.use_fp16,
+ fp16_scale_growth=args.fp16_scale_growth,
+ weight_decay=args.weight_decay,
+ lr_anneal_steps=args.lr_anneal_steps,
+ eval_interval=5000
+ ).run_loop() # ! overfitting
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ num_workers=4,
+ local_rank=0,
+ gpus=1,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=8,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=10,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+
+if __name__ == "__main__":
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"
+ # ] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ opts = args
+
+ args.rendering_kwargs = rendering_options_defaults(opts)
+
+ # Launch processes.
+ print('Launching processes...')
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/vit_triplane_cldm_train.py b/scripts/vit_triplane_cldm_train.py
new file mode 100644
index 0000000000000000000000000000000000000000..35045dbeb5311fe46fb3cae7b3ca3be711679865
--- /dev/null
+++ b/scripts/vit_triplane_cldm_train.py
@@ -0,0 +1,367 @@
+"""
+Train a diffusion model on images.
+"""
+import json
+import sys
+import os
+
+sys.path.append('.')
+
+# from dnnlib import EasyDict
+import traceback
+
+import torch as th
+import torch.multiprocessing as mp
+import torch.distributed as dist
+import numpy as np
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.resample import create_named_schedule_sampler
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+ continuous_diffusion_defaults,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+)
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+# from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default
+from datasets.shapenet import load_data, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from torch_utils import legacy, misc
+from torch.utils.data import Subset
+from pdb import set_trace as st
+
+from dnnlib.util import EasyDict, InfiniteSampler
+# from .vit_triplane_train_FFHQ import init_dataset_kwargs
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+# from torch.utils.tensorboard import SummaryWriter
+
+SEED = 0
+
+
+def training_loop(args):
+ # def training_loop(args):
+ logger.log("dist setup...")
+
+ th.cuda.set_device(
+ args.local_rank) # set this line to avoid extra memory on rank 0
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ dist_util.setup_dist(args)
+
+ # st() # mark
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating ViT encoder and NSR decoder...")
+ # st() # mark
+ device = dist_util.dev()
+
+ args.img_size = [args.image_size_encoder]
+
+ logger.log("creating model and diffusion...")
+ # * set denoise model args
+
+ if args.denoise_in_channels == -1:
+ args.diffusion_input_size = args.image_size_encoder
+ args.denoise_in_channels = args.out_chans
+ args.denoise_out_channels = args.out_chans
+ else:
+ assert args.denoise_out_channels != -1
+
+ # args.image_size = args.image_size_encoder # 224, follow the triplane size
+
+ # if args.diffusion_input_size == -1:
+ # else:
+ # args.image_size = args.diffusion_input_size
+
+ denoise_model, diffusion = create_model_and_diffusion(
+ **args_to_dict(args,
+ model_and_diffusion_defaults().keys()))
+ denoise_model.to(dist_util.dev())
+ denoise_model.train()
+
+ opts = eg3d_options_default()
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ logger.log("creating encoder and NSR decoder...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ auto_encoder.to(device)
+ auto_encoder.eval()
+
+ # * load G_ema modules into autoencoder
+ # * clone G_ema.decoder to auto_encoder triplane
+ # logger.log("AE triplane decoder reuses G_ema decoder...")
+ # auto_encoder.decoder.register_buffer('w_avg', G_ema.backbone.mapping.w_avg)
+
+ # auto_encoder.decoder.triplane_decoder.decoder.load_state_dict( # type: ignore
+ # G_ema.decoder.state_dict()) # type: ignore
+
+ # set grad=False in this manner suppresses the DDP forward no grad error.
+ logger.log("freeze triplane decoder...")
+ for param in auto_encoder.decoder.triplane_decoder.parameters(
+ ): # type: ignore
+ # for param in auto_encoder.decoder.triplane_decoder.decoder.parameters(): # type: ignore
+ param.requires_grad_(False)
+
+ # if args.sr_training:
+
+ # logger.log("AE triplane decoder reuses G_ema SR module...")
+ # # auto_encoder.decoder.triplane_decoder.superresolution.load_state_dict( # type: ignore
+ # # G_ema.superresolution.state_dict()) # type: ignore
+
+ # # set grad=False in this manner suppresses the DDP forward no grad error.
+ # logger.log("freeze SR module...")
+ # for param in auto_encoder.decoder.superresolution.parameters(): # type: ignore
+ # param.requires_grad_(False)
+
+ # # del G_ema
+ # th.cuda.empty_cache()
+
+ if args.cfg in ('afhq', 'ffhq'):
+
+ if args.sr_training:
+
+ logger.log("AE triplane decoder reuses G_ema SR module...")
+ auto_encoder.decoder.triplane_decoder.superresolution.load_state_dict( # type: ignore
+ G_ema.superresolution.state_dict()) # type: ignore
+
+ # set grad=False in this manner suppresses the DDP forward no grad error.
+ for param in auto_encoder.decoder.triplane_decoder.superresolution.parameters(
+ ): # type: ignore
+ param.requires_grad_(False)
+
+ # ! load data
+ logger.log("creating eg3d data loader...")
+ training_set_kwargs, dataset_name = init_dataset_kwargs(
+ data=args.data_dir,
+ class_name='datasets.eg3d_dataset.ImageFolderDataset'
+ ) # only load pose here
+ # if args.cond and not training_set_kwargs.use_labels:
+ # raise Exception('check here')
+
+ # training_set_kwargs.use_labels = args.cond
+ training_set_kwargs.use_labels = True
+ training_set_kwargs.xflip = True
+ training_set_kwargs.random_seed = SEED
+ # desc = f'{args.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}'
+
+ # * construct ffhq/afhq dataset
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+
+ training_set_sampler = InfiniteSampler(
+ dataset=training_set,
+ rank=dist_util.get_rank(),
+ num_replicas=dist_util.get_world_size(),
+ seed=SEED)
+
+ data = iter(
+ th.utils.data.DataLoader(
+ dataset=training_set,
+ sampler=training_set_sampler,
+ batch_size=args.batch_size,
+ pin_memory=True,
+ num_workers=args.num_workers,
+ ))
+ # prefetch_factor=2))
+
+ eval_data = th.utils.data.DataLoader(dataset=Subset(
+ training_set, np.arange(10)),
+ batch_size=args.eval_batch_size,
+ num_workers=1)
+
+ else:
+
+ logger.log("creating data loader...")
+ # TODO, load shapenet data
+ # data = load_data(
+ # st() mark
+ if args.overfitting:
+ logger.log("create overfitting memory dataset")
+ data = load_memory_data(
+ file_path=args.eval_data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True # for evaluation
+ )
+ else:
+ logger.log("create all instances dataset")
+ # st() mark
+ data = load_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True,
+ preprocess=auto_encoder.preprocess, # clip
+ dataset_size=args.dataset_size,
+ # load_depth=True # for evaluation
+ )
+ # st() mark
+ eval_data = load_eval_data(
+ file_path=args.eval_data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True # for evaluation
+ )
+
+ # let all processes sync up before starting with a new epoch of training
+
+ if dist_util.get_rank() == 0:
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ args.schedule_sampler = create_named_schedule_sampler(
+ args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ loss_class = E3DGELossClass(device, opt).to(device)
+
+ logger.log("training...")
+
+ TrainLoop = {
+ 'adm': nsr.TrainLoop3DDiffusion,
+ 'dit': nsr.TrainLoop3DDiffusionDiT,
+ 'ssd': nsr.TrainLoop3DDiffusionSingleStage,
+ # 'ssd_cvD': nsr.TrainLoop3DDiffusionSingleStagecvD,
+ 'ssd_cvD_sds': nsr.TrainLoop3DDiffusionSingleStagecvDSDS,
+ 'ssd_cvd_sds_no_separate_sds_step':
+ nsr.TrainLoop3DDiffusionSingleStagecvDSDS_sdswithrec,
+ 'vpsde_lsgm_noD': nsr.lsgm.TrainLoop3DDiffusionLSGM_noD, # use vpsde
+ # 'vpsde_lsgm': nsr.TrainLoop3DDiffusionLSGM, # use vpsde
+ # 'vpsde': nsr.TrainLoop3DDiffusion_vpsde,
+ }[args.trainer_name]
+
+ if 'vpsde' in args.trainer_name:
+ sde_diffusion = make_sde_diffusion(
+ dnnlib.EasyDict(
+ args_to_dict(args,
+ continuous_diffusion_defaults().keys())))
+ assert args.mixed_prediction, 'enable mixed_prediction by default'
+ logger.log('create VPSDE diffusion.')
+ else:
+ sde_diffusion = None
+
+ dist_util.synchronize()
+
+ TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ **vars(args)).run_loop()
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ dataset_size=-1,
+ diffusion_input_size=-1,
+ trainer_name='adm',
+ use_amp=False,
+ triplane_scaling_divider=1.0, # divide by this value
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ schedule_sampler="uniform",
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ resume_checkpoint_EG3D="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ load_submodule_name='', # for loading pretrained auto_encoder model
+ ignore_resume_opt=False,
+ # freeze_ae=False,
+ denoised_ae=True,
+ )
+
+ defaults.update(model_and_diffusion_defaults())
+ defaults.update(continuous_diffusion_defaults())
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ["TORCH_CPP_LOG_LEVEL"] = "INFO"
+ # os.environ["NCCL_DEBUG"] = "INFO"
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ # opts = dnnlib.EasyDict(vars(args)) # compatiable with triplane original settings
+ # opts = args
+ args.rendering_kwargs = rendering_options_defaults(args)
+
+ # Launch processes.
+ logger.log('Launching processes...')
+
+ logger.log('Available devices ', th.cuda.device_count())
+ logger.log('Current cuda device ', th.cuda.current_device())
+ # logger.log('GPU Device name:', th.cuda.get_device_name(th.cuda.current_device()))
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/vit_triplane_diffusion_sample.py b/scripts/vit_triplane_diffusion_sample.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4c2c739bcb26512d86151ea1181dea8faef98c7
--- /dev/null
+++ b/scripts/vit_triplane_diffusion_sample.py
@@ -0,0 +1,387 @@
+"""
+Generate a large batch of image samples from a model and save them as a large
+numpy array. This can be used to produce samples for FID evaluation.
+"""
+
+import argparse
+import json
+import sys
+import os
+
+sys.path.append('.')
+
+from pdb import set_trace as st
+import imageio
+import numpy as np
+import torch as th
+import torch.distributed as dist
+
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ NUM_CLASSES,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+ add_dict_to_argparser,
+ args_to_dict,
+ continuous_diffusion_defaults,
+ control_net_defaults,
+)
+
+th.backends.cuda.matmul.allow_tf32 = True
+th.backends.cudnn.allow_tf32 = True
+th.backends.cudnn.enabled = True
+
+from pathlib import Path
+
+from tqdm import tqdm, trange
+import dnnlib
+from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, AE_with_Diffusion, rendering_options_defaults, eg3d_options_default, dataset_defaults
+
+from datasets.shapenet import load_eval_data
+from torch.utils.data import Subset
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+SEED = 0
+
+
+def main(args):
+
+ # args.rendering_kwargs = rendering_options_defaults(args)
+
+ dist_util.setup_dist(args)
+ logger.configure(dir=args.logdir)
+
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ # * set denoise model args
+ logger.log("creating model and diffusion...")
+ args.img_size = [args.image_size_encoder]
+ # ! no longer required for LDM
+ # args.denoise_in_channels = args.out_chans
+ # args.denoise_out_channels = args.out_chans
+ args.image_size = args.image_size_encoder # 224, follow the triplane size
+
+ denoise_model, diffusion = create_model_and_diffusion(
+ **args_to_dict(args,
+ model_and_diffusion_defaults().keys()))
+
+ if 'cldm' in args.trainer_name:
+ assert isinstance(denoise_model, tuple)
+ denoise_model, controlNet = denoise_model
+
+ controlNet.to(dist_util.dev())
+ controlNet.train()
+ else:
+ controlNet = None
+
+ opts = eg3d_options_default()
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ # denoise_model.load_state_dict(
+ # dist_util.load_state_dict(args.ddpm_model_path, map_location="cpu"))
+ denoise_model.to(dist_util.dev())
+ if args.use_fp16:
+ denoise_model.convert_to_fp16()
+ denoise_model.eval()
+
+ # * auto-encoder reconstruction model
+ logger.log("creating 3DAE...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ # logger.log("AE triplane decoder reuses G_ema decoder...")
+ # auto_encoder.decoder.register_buffer('w_avg', G_ema.backbone.mapping.w_avg)
+
+ # print(auto_encoder.decoder.w_avg.shape) # [512]
+
+ # auto_encoder.load_state_dict(
+ # dist_util.load_state_dict(args.rec_model_path, map_location="cpu"))
+
+ auto_encoder.to(dist_util.dev())
+ auto_encoder.eval()
+
+ # TODO, how to set the scale?
+ logger.log("create dataset")
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ if args.cfg in ('afhq', 'ffhq'):
+ # ! load data
+ logger.log("creating eg3d data loader...")
+ training_set_kwargs, dataset_name = init_dataset_kwargs(
+ data=args.data_dir,
+ class_name='datasets.eg3d_dataset.ImageFolderDataset'
+ ) # only load pose here
+ # if args.cond and not training_set_kwargs.use_labels:
+ # raise Exception('check here')
+
+ # training_set_kwargs.use_labels = args.cond
+ training_set_kwargs.use_labels = True
+ training_set_kwargs.xflip = True
+ training_set_kwargs.random_seed = SEED
+ # desc = f'{args.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}'
+
+ # * construct ffhq/afhq dataset
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+
+ # training_set_sampler = InfiniteSampler(
+ # dataset=training_set,
+ # rank=dist_util.get_rank(),
+ # num_replicas=dist_util.get_world_size(),
+ # seed=SEED)
+
+ # data = iter(
+ # th.utils.data.DataLoader(dataset=training_set,
+ # sampler=training_set_sampler,
+ # batch_size=args.batch_size,
+ # pin_memory=True,
+ # num_workers=args.num_workers,))
+ # # prefetch_factor=2))
+
+ eval_data = th.utils.data.DataLoader(dataset=Subset(
+ training_set, np.arange(25)),
+ batch_size=args.eval_batch_size,
+ num_workers=1)
+
+ else:
+
+ logger.log("creating data loader...")
+
+ if args.use_wds:
+ if args.eval_data_dir == 'NONE':
+ with open(args.eval_shards_lst) as f:
+ eval_shards_lst = [url.strip() for url in f.readlines()]
+ else:
+ eval_shards_lst = args.eval_data_dir # auto expanded
+
+ eval_data = load_wds_data(
+ eval_shards_lst, args.image_size, args.image_size_encoder,
+ args.eval_batch_size, args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ else:
+ eval_data = load_eval_data(
+ file_path=args.eval_data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ TrainLoop = {
+ 'adm': nsr.TrainLoop3DDiffusion,
+ 'dit': nsr.TrainLoop3DDiffusionDiT,
+ # lsgm
+ 'ssd': nsr.TrainLoop3DDiffusionSingleStage,
+ # 'ssd_cvD': nsr.TrainLoop3DDiffusionSingleStagecvD,
+ 'ssd_cvD_sds': nsr.TrainLoop3DDiffusionSingleStagecvDSDS,
+ 'ssd_cvd_sds_no_separate_sds_step':
+ nsr.TrainLoop3DDiffusionSingleStagecvDSDS_sdswithrec,
+ # 'ssd_cvD': nsr.TrainLoop3DDiffusionSingleStagecvD,
+ 'ssd_cvD_sds': nsr.TrainLoop3DDiffusionSingleStagecvDSDS,
+ 'vpsde_lsgm_noD': nsr.lsgm.TrainLoop3DDiffusionLSGM_noD, # use vpsde
+ 'vpsde_cldm': nsr.lsgm.TrainLoop3DDiffusionLSGM_Control,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_cvD,
+ 'vpsde_lsgm_joint_noD':
+ nsr.lsgm.TrainLoop3DDiffusionLSGMJointnoD, # use vpsde
+ 'vpsde_lsgm_joint_noD_ponly':
+ nsr.lsgm.TrainLoop3DDiffusionLSGMJointnoD_ponly, # use vpsde
+ 'vpsde_crossattn': nsr.lsgm.TrainLoop3DDiffusionLSGM_crossattn,
+ 'vpsde_ldm': nsr.lsgm.TrainLoop3D_LDM,
+ 'sgm_legacy':
+ nsr.lsgm.sgm_DiffusionEngine.DiffusionEngineLSGM,
+ }[args.trainer_name]
+
+ # continuous
+ if 'vpsde' in args.trainer_name:
+ sde_diffusion = make_sde_diffusion(
+ dnnlib.EasyDict(
+ args_to_dict(args,
+ continuous_diffusion_defaults().keys())))
+ # assert args.mixed_prediction, 'enable mixed_prediction by default'
+ logger.log('create VPSDE diffusion.')
+ else:
+ sde_diffusion = None
+
+ # if 'cldm' in args.trainer_name:
+ # assert isinstance(denoise_model, tuple)
+ # denoise_model, controlNet = denoise_model
+
+ # controlNet.to(dist_util.dev())
+ # controlNet.train()
+ # else:
+ # controlNet = None
+ auto_encoder.decoder.rendering_kwargs = args.rendering_kwargs
+
+ training_loop_class = TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model,
+ control_model=controlNet,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=None,
+ data=None,
+ eval_data=eval_data,
+ **vars(args))
+
+ logger.log("sampling...")
+ dist_util.synchronize()
+
+ # all_images = []
+ # all_labels = []
+ # while len(all_images) * args.batch_size < args.num_samples:
+
+ if dist_util.get_rank() == 0:
+
+ (Path(logger.get_dir()) / 'FID_Cals').mkdir(exist_ok=True,
+ parents=True)
+
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ # ! use pre-saved camera pose
+ camera = th.load('eval_pose.pt', map_location=dist_util.dev())[:]
+
+ # for sample_idx in trange(args.num_samples):
+ model_kwargs = {}
+
+ # if args.class_cond:
+ # classes = th.randint(low=0,
+ # high=NUM_CLASSES,
+ # size=(args.batch_size, ),
+ # device=dist_util.dev())
+ # model_kwargs["y"] = classes
+ # training_loop_class.step = sample_idx # save to different position
+ # if args.create_controlnet or 'crossattn' in args.trainer_name:
+
+ training_loop_class.eval_cldm(
+ prompt=args.prompt,
+ unconditional_guidance_scale=args.
+ unconditional_guidance_scale,
+ use_ddim=args.use_ddim,
+ save_img=args.save_img,
+ use_train_trajectory=args.use_train_trajectory,
+ camera=camera,
+ num_instances=args.num_instances,
+ num_samples=args.num_samples,
+ # training_loop_class.rec_model,
+ # training_loop_class.ddpm_model
+ )
+
+
+ dist.barrier()
+ logger.log("sampling complete")
+
+
+def create_argparser():
+ defaults = dict(
+ image_size_encoder=224,
+ triplane_scaling_divider=1.0, # divide by this value
+ diffusion_input_size=-1,
+ trainer_name='adm',
+ use_amp=False,
+ # triplane_scaling_divider=1.0, # divide by this value
+
+ # * sampling flags
+ clip_denoised=False,
+ num_samples=10,
+ num_instances=10, # for i23d, loop different condition
+ use_ddim=False,
+ ddpm_model_path="",
+ cldm_model_path="",
+ rec_model_path="",
+
+ # * eval logging flags
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ data_dir="",
+ eval_data_dir="",
+ eval_batch_size=1,
+ num_workers=1,
+
+ # * training flags for loading TrainingLoop class
+ overfitting=False,
+ image_size=128,
+ iterations=150000,
+ schedule_sampler="uniform",
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ resume_cldm_checkpoint="",
+ resume_checkpoint_EG3D="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ load_submodule_name='', # for loading pretrained auto_encoder model
+ ignore_resume_opt=False,
+ freeze_ae=False,
+ denoised_ae=True,
+ # inference prompt
+ prompt="a red chair",
+ interval=1,
+ save_img=False,
+ use_train_trajectory=
+ False, # use train trajectory to sample images for fid calculation
+ unconditional_guidance_scale=1.0,
+ use_eos_feature=False,
+ )
+
+ defaults.update(model_and_diffusion_defaults())
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+ defaults.update(continuous_diffusion_defaults())
+ defaults.update(control_net_defaults())
+ defaults.update(dataset_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+
+ # os.environ["TORCH_CPP_LOG_LEVEL"] = "INFO"
+ # os.environ["NCCL_DEBUG"] = "INFO"
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ args = create_argparser().parse_args()
+
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ args.rendering_kwargs = rendering_options_defaults(args)
+
+ main(args)
diff --git a/scripts/vit_triplane_diffusion_train.py b/scripts/vit_triplane_diffusion_train.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c945f5bae5218ff17ff395bf4f4d4dce3048a71
--- /dev/null
+++ b/scripts/vit_triplane_diffusion_train.py
@@ -0,0 +1,514 @@
+"""
+Train a diffusion model on images.
+"""
+import json
+import sys
+import os
+
+sys.path.append('.')
+
+# from dnnlib import EasyDict
+import traceback
+
+import torch as th
+from xformers.triton import FusedLayerNorm as LayerNorm
+import torch.multiprocessing as mp
+import torch.distributed as dist
+import numpy as np
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.resample import create_named_schedule_sampler
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+ continuous_diffusion_defaults,
+ control_net_defaults,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+)
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+# from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+
+from datasets.eg3d_dataset import LMDBDataset_MV_Compressed_eg3d
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default, dataset_defaults
+from datasets.shapenet import load_data, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from torch_utils import legacy, misc
+from torch.utils.data import Subset
+from pdb import set_trace as st
+
+from dnnlib.util import EasyDict, InfiniteSampler
+# from .vit_triplane_train_FFHQ import init_dataset_kwargs
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+th.backends.cudnn.enabled = True # https://zhuanlan.zhihu.com/p/635824460
+th.backends.cudnn.benchmark = True
+
+# from torch.utils.tensorboard import SummaryWriter
+
+SEED = 0
+
+
+def training_loop(args):
+ # def training_loop(args):
+ logger.log("dist setup...")
+ # th.multiprocessing.set_start_method('spawn')
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ # st()
+
+ th.cuda.set_device(
+ args.local_rank) # set this line to avoid extra memory on rank 0
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ dist_util.setup_dist(args)
+
+ # st() # mark
+
+ th.backends.cuda.matmul.allow_tf32 = args.allow_tf32
+ th.backends.cudnn.allow_tf32 = args.allow_tf32
+ # st()
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating ViT encoder and NSR decoder...")
+ # st() # mark
+ device = dist_util.dev()
+
+ args.img_size = [args.image_size_encoder]
+
+ logger.log("creating model and diffusion...")
+ # * set denoise model args
+
+ if args.denoise_in_channels == -1:
+ args.diffusion_input_size = args.image_size_encoder
+ args.denoise_in_channels = args.out_chans
+ args.denoise_out_channels = args.out_chans
+ else:
+ assert args.denoise_out_channels != -1
+
+ # args.image_size = args.image_size_encoder # 224, follow the triplane size
+
+ # if args.diffusion_input_size == -1:
+ # else:
+ # args.image_size = args.diffusion_input_size
+
+ if args.pred_type == 'v': # for lsgm training
+ assert args.predict_v == True # for DDIM sampling
+
+ # if not args.create_dit:
+
+ denoise_model, diffusion = create_model_and_diffusion(
+ **args_to_dict(args,
+ model_and_diffusion_defaults().keys()))
+
+ opts = eg3d_options_default()
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ logger.log("creating encoder and NSR decoder...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ auto_encoder.to(device)
+ auto_encoder.eval()
+
+ # * load G_ema modules into autoencoder
+ # * clone G_ema.decoder to auto_encoder triplane
+ # logger.log("AE triplane decoder reuses G_ema decoder...")
+ # auto_encoder.decoder.register_buffer('w_avg', G_ema.backbone.mapping.w_avg)
+
+ # auto_encoder.decoder.triplane_decoder.decoder.load_state_dict( # type: ignore
+ # G_ema.decoder.state_dict()) # type: ignore
+
+ # set grad=False in this manner suppresses the DDP forward no grad error.
+
+ # if args.sr_training:
+
+ # logger.log("AE triplane decoder reuses G_ema SR module...")
+ # # auto_encoder.decoder.triplane_decoder.superresolution.load_state_dict( # type: ignore
+ # # G_ema.superresolution.state_dict()) # type: ignore
+
+ # # set grad=False in this manner suppresses the DDP forward no grad error.
+ # logger.log("freeze SR module...")
+ # for param in auto_encoder.decoder.superresolution.parameters(): # type: ignore
+ # param.requires_grad_(False)
+
+ # # del G_ema
+ # th.cuda.empty_cache()
+
+ if args.freeze_triplane_decoder:
+ logger.log("freeze triplane decoder...")
+ for param in auto_encoder.decoder.triplane_decoder.parameters(
+ ): # type: ignore
+ # for param in auto_encoder.decoder.triplane_decoder.decoder.parameters(): # type: ignore
+ param.requires_grad_(False)
+
+ if args.cfg in ('afhq', 'ffhq'):
+
+ if args.sr_training:
+
+ logger.log("AE triplane decoder reuses G_ema SR module...")
+ auto_encoder.decoder.triplane_decoder.superresolution.load_state_dict( # type: ignore
+ G_ema.superresolution.state_dict()) # type: ignore
+
+ # set grad=False in this manner suppresses the DDP forward no grad error.
+ for param in auto_encoder.decoder.triplane_decoder.superresolution.parameters(
+ ): # type: ignore
+ param.requires_grad_(False)
+
+ # ! load data
+ if args.use_lmdb:
+ logger.log("creating LMDB eg3d data loader...")
+ training_set = LMDBDataset_MV_Compressed_eg3d(
+ args.data_dir,
+ args.image_size,
+ args.image_size_encoder,
+ )
+ else:
+ logger.log("creating eg3d data loader...")
+
+ training_set_kwargs, dataset_name = init_dataset_kwargs(
+ data=args.data_dir,
+ class_name='datasets.eg3d_dataset.ImageFolderDataset',
+ reso_gt=args.image_size) # only load pose here
+ # if args.cond and not training_set_kwargs.use_labels:
+ # raise Exception('check here')
+
+ # training_set_kwargs.use_labels = args.cond
+ training_set_kwargs.use_labels = True
+ training_set_kwargs.xflip = False
+ training_set_kwargs.random_seed = SEED
+ training_set_kwargs.max_size = args.dataset_size
+ # desc = f'{args.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}'
+
+ # * construct ffhq/afhq dataset
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+
+ training_set_sampler = InfiniteSampler(
+ dataset=training_set,
+ rank=dist_util.get_rank(),
+ num_replicas=dist_util.get_world_size(),
+ seed=SEED)
+
+ data = iter(
+ th.utils.data.DataLoader(
+ dataset=training_set,
+ sampler=training_set_sampler,
+ batch_size=args.batch_size,
+ pin_memory=True,
+ num_workers=args.num_workers,
+ persistent_workers=args.num_workers > 0,
+ prefetch_factor=max(8 // args.batch_size, 2),
+ ))
+ # prefetch_factor=2))
+
+ eval_data = th.utils.data.DataLoader(dataset=Subset(
+ training_set, np.arange(8)),
+ batch_size=args.eval_batch_size,
+ num_workers=1)
+
+ else:
+
+ logger.log("creating data loader...")
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ # TODO, load shapenet data
+ # data = load_data(
+ # st() mark
+ # if args.overfitting:
+ # logger.log("create overfitting memory dataset")
+ # data = load_memory_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True # for evaluation
+ # )
+ # else:
+ if args.use_wds:
+ if args.data_dir == 'NONE':
+ with open(args.shards_lst) as f:
+ shards_lst = [url.strip() for url in f.readlines()]
+ data = load_wds_data(
+ shards_lst, args.image_size, args.image_size_encoder,
+ args.batch_size, args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ else:
+ data = load_wds_data(
+ args.data_dir, args.image_size, args.image_size_encoder,
+ args.batch_size, args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ # eval_data = load_wds_data(
+ # args.data_dir,
+ # args.image_size,
+ # args.image_size_encoder,
+ # args.eval_batch_size,
+ # args.num_workers,
+ # decode_encode_img_only=args.decode_encode_img_only,
+ # load_wds_diff=args.load_wds_diff)
+
+ if args.eval_data_dir == 'NONE':
+ with open(args.eval_shards_lst) as f:
+ eval_shards_lst = [url.strip() for url in f.readlines()]
+ else:
+ eval_shards_lst = args.eval_data_dir # auto expanded
+
+ eval_data = load_wds_data(
+ eval_shards_lst,
+ args.image_size,
+ args.image_size_encoder,
+ args.eval_batch_size,
+ args.num_workers,
+ plucker_embedding=args.plucker_embedding,
+ decode_encode_img_only=args.decode_encode_img_only,
+ mv_input=args.mv_input,
+ load_wds_diff=False,
+ load_instance=True)
+
+ else:
+ logger.log("create all instances dataset")
+ # st() mark
+ data = load_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys())
+ # load_depth=args.load_depth,
+ # preprocess=auto_encoder.preprocess, # clip
+ # dataset_size=args.dataset_size,
+ # use_lmdb=args.use_lmdb,
+ # trainer_name=args.trainer_name,
+ # load_depth=True # for evaluation
+ )
+ eval_data = data
+ # eval_data = load_eval_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ # interval=args.interval,
+ # use_lmdb=args.use_lmdb,
+ # )
+
+ # let all processes sync up before starting with a new epoch of training
+
+ if dist_util.get_rank() == 0:
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ args.schedule_sampler = create_named_schedule_sampler(
+ args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ loss_class = E3DGELossClass(device, opt).to(device)
+
+ logger.log("training...")
+
+ TrainLoop = {
+ 'adm':
+ nsr.TrainLoop3DDiffusion,
+ 'dit':
+ nsr.TrainLoop3DDiffusionDiT,
+ 'ssd':
+ nsr.TrainLoop3DDiffusionSingleStage,
+ # 'ssd_cvD': nsr.TrainLoop3DDiffusionSingleStagecvD,
+ 'ssd_cvD_sds':
+ nsr.TrainLoop3DDiffusionSingleStagecvDSDS,
+ 'ssd_cvd_sds_no_separate_sds_step':
+ nsr.TrainLoop3DDiffusionSingleStagecvDSDS_sdswithrec,
+ 'vpsde_lsgm_noD':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_noD, # use vpsde
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_cvD,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD_scaling':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_cvD_scaling,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_weightingv0':
+ nsr.lsgm.
+ TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_weightingv0,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED':
+ nsr.lsgm.
+ TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED,
+ 'vpsde_TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED_nv':
+ nsr.lsgm.
+ TrainLoop3DDiffusionLSGM_cvD_scaling_lsgm_unfreezeD_iterativeED_nv,
+ 'vpsde_lsgm_joint_noD':
+ nsr.lsgm.TrainLoop3DDiffusionLSGMJointnoD, # use vpsde
+ 'vpsde_lsgm_joint_noD_ponly':
+ nsr.lsgm.TrainLoop3DDiffusionLSGMJointnoD_ponly, # use vpsde
+ # control
+ 'vpsde_cldm':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_Control,
+ 'vpsde_crossattn':
+ nsr.lsgm.TrainLoop3DDiffusionLSGM_crossattn,
+ 'vpsde_crossattn_cldm':
+ nsr.lsgm.crossattn_cldm.TrainLoop3DDiffusionLSGM_crossattn_controlNet,
+ 'vpsde_ldm':
+ nsr.lsgm.TrainLoop3D_LDM,
+ 'sgm_legacy':
+ nsr.lsgm.sgm_DiffusionEngine.DiffusionEngineLSGM,
+ }[args.trainer_name]
+
+ if 'vpsde' in args.trainer_name:
+ sde_diffusion = make_sde_diffusion(
+ dnnlib.EasyDict(
+ args_to_dict(args,
+ continuous_diffusion_defaults().keys())))
+ # assert args.mixed_prediction, 'enable mixed_prediction by default'
+ logger.log('create VPSDE diffusion.')
+ else:
+ sde_diffusion = None
+
+ if 'cldm' in args.trainer_name:
+ assert isinstance(denoise_model, tuple)
+ denoise_model, controlNet = denoise_model
+
+ controlNet.to(dist_util.dev())
+ controlNet.train()
+ else:
+ controlNet = None
+
+ # st()
+ denoise_model.to(dist_util.dev())
+ denoise_model.train()
+
+ auto_encoder.decoder.rendering_kwargs = args.rendering_kwargs
+ TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model,
+ control_model=controlNet,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ **vars(args)).run_loop()
+
+ dist_util.synchronize()
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ dataset_size=-1,
+ diffusion_input_size=-1,
+ trainer_name='adm',
+ use_amp=False,
+ train_vae=True, # jldm?
+ triplane_scaling_divider=1.0, # divide by this value
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ schedule_sampler="uniform",
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ resume_checkpoint_EG3D="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ load_depth=True, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ load_submodule_name='', # for loading pretrained auto_encoder model
+ ignore_resume_opt=False,
+ # freeze_ae=False,
+ denoised_ae=True,
+ diffusion_ce_anneal=False,
+ use_lmdb=False,
+ interval=1,
+ freeze_triplane_decoder=False,
+ objv_dataset=False,
+ use_eos_feature=False,
+ clip_grad_throld=1.0,
+ allow_tf32=True,
+ )
+
+ defaults.update(model_and_diffusion_defaults())
+ defaults.update(continuous_diffusion_defaults())
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(dataset_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+ defaults.update(control_net_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ["TORCH_CPP_LOG_LEVEL"] = "INFO"
+ # os.environ["NCCL_DEBUG"] = "INFO"
+ th.multiprocessing.set_start_method('spawn')
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ # opts = dnnlib.EasyDict(vars(args)) # compatiable with triplane original settings
+ # opts = args
+ args.rendering_kwargs = rendering_options_defaults(args)
+
+ # Launch processes.
+ logger.log('Launching processes...')
+
+ logger.log('Available devices ', th.cuda.device_count())
+ logger.log('Current cuda device ', th.cuda.current_device())
+ # logger.log('GPU Device name:', th.cuda.get_device_name(th.cuda.current_device()))
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/vit_triplane_inference.py b/scripts/vit_triplane_inference.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b752a0734261d85e363b13f8865f095ee86f4d4
--- /dev/null
+++ b/scripts/vit_triplane_inference.py
@@ -0,0 +1,189 @@
+"""
+Train a diffusion model on images.
+"""
+import sys
+import os
+
+sys.path.append('.')
+import torch.distributed as dist
+
+import torch as th
+import torch.multiprocessing as mp
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+from nsr.train_util import TrainLoop3DRec as TrainLoop
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults
+from datasets.shapenet import load_data, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from pdb import set_trace as st
+
+th.backends.cuda.matmul.allow_tf32 = True
+th.backends.cudnn.allow_tf32 = True
+th.backends.cudnn.enabled = True
+
+SEED = 0
+
+
+def training_loop(args):
+ # def training_loop(args):
+ dist_util.setup_dist(args)
+
+ # dist.init_process_group(backend='nccl', init_method='env://', rank=args.local_rank, world_size=th.cuda.device_count())
+ print(f"{args.local_rank=} init complete")
+ th.cuda.set_device(args.local_rank)
+
+ th.cuda.manual_seed_all(SEED)
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating data loader...")
+ # TODO, load shapenet data
+ # data = load_data(
+ # if args.overfitting:
+ # data = load_memory_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=args.depth_lambda > 0
+ # # load_depth=True # for evaluation
+ # )
+ # else:
+ # data = load_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=args.depth_lambda > 0
+ # # load_depth=True # for evaluation
+ # )
+ eval_data = load_eval_data(
+ file_path=args.data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True # for evaluation
+ )
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+ logger.log("creating encoder and NSR decoder...")
+ # device = dist_util.dev()
+ device = th.device("cuda", args.local_rank)
+
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+ auto_encoder.to(device)
+ auto_encoder.eval()
+
+ # dist_util.sync_params(auto_encoder.named_parameters())
+
+ # auto_encoder.train()
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # noise = th.randn(1, 14 * 14, 384).to(device) # B, L, C
+ # noise = th.randn(1, 3,224,224).to(device)
+ # img = auto_encoder(noise, th.zeros(1, 25).to(device))
+ # print(img['image'].shape)
+
+ # if dist_util.get_rank()==0:
+ # print(auto_encoder)
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ loss_class = E3DGELossClass(device, opt).to(device)
+
+ # logger.log("training...")
+ TrainLoop(
+ rec_model=auto_encoder,
+ loss_class=loss_class,
+ # diffusion=diffusion,
+ data=None,
+ eval_interval=-1,
+ eval_data=eval_data,
+ # data=batch,
+ batch_size=args.batch_size,
+ microbatch=args.microbatch,
+ lr=args.lr,
+ ema_rate=args.ema_rate,
+ log_interval=args.log_interval,
+ save_interval=args.save_interval,
+ resume_checkpoint=args.resume_checkpoint,
+ resume_cldm_checkpoint=args.resume_cldm_checkpoint,
+ use_fp16=args.use_fp16,
+ fp16_scale_growth=args.fp16_scale_growth,
+ weight_decay=args.weight_decay,
+ lr_anneal_steps=args.lr_anneal_steps,
+ ).eval_loop() # ! overfitting
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=8,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=10,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+ os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+
+ master_addr = '127.0.0.1'
+ master_port = dist_util._find_free_port()
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+ args.master_addr = master_addr
+ args.master_port = master_port
+
+ # Launch processes.
+ print('Launching processes...')
+ training_loop(args)
diff --git a/scripts/vit_triplane_sit_sample.py b/scripts/vit_triplane_sit_sample.py
new file mode 100644
index 0000000000000000000000000000000000000000..33e8b0e72cb684fea410d4f5183bb03940869b7f
--- /dev/null
+++ b/scripts/vit_triplane_sit_sample.py
@@ -0,0 +1,398 @@
+"""
+Train a diffusion model on images.
+"""
+import json
+import sys
+import os
+
+sys.path.append('.')
+
+# from dnnlib import EasyDict
+import traceback
+
+import torch as th
+# from xformers.triton import FusedLayerNorm as LayerNorm
+import torch.multiprocessing as mp
+import torch.distributed as dist
+import numpy as np
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.resample import create_named_schedule_sampler
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+ continuous_diffusion_defaults,
+ control_net_defaults,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+)
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+# from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+
+from datasets.eg3d_dataset import LMDBDataset_MV_Compressed_eg3d
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default, dataset_defaults
+from datasets.shapenet import load_data, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from torch_utils import legacy, misc
+from torch.utils.data import Subset
+from pdb import set_trace as st
+
+from dnnlib.util import EasyDict, InfiniteSampler
+# from .vit_triplane_train_FFHQ import init_dataset_kwargs
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+th.backends.cudnn.enabled = True # https://zhuanlan.zhihu.com/p/635824460
+th.backends.cudnn.benchmark = True
+
+from transport import create_transport, Sampler
+from transport.train_utils import parse_transport_args
+from nsr.camera_utils import generate_input_camera, uni_mesh_path, sample_uniform_cameras_on_sphere
+
+# from torch.utils.tensorboard import SummaryWriter
+
+SEED = 0
+
+
+
+def training_loop(args):
+ # def training_loop(args):
+ logger.log("dist setup...")
+ # th.multiprocessing.set_start_method('spawn')
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ # st()
+
+ th.cuda.set_device(
+ args.local_rank) # set this line to avoid extra memory on rank 0
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ dist_util.setup_dist(args)
+
+ # st() # mark
+
+ th.backends.cuda.matmul.allow_tf32 = args.allow_tf32
+ th.backends.cudnn.allow_tf32 = args.allow_tf32
+ # st()
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating ViT encoder and NSR decoder...")
+ # st() # mark
+ device = dist_util.dev()
+
+ args.img_size = [args.image_size_encoder]
+
+ logger.log("creating model and diffusion...")
+ # * set denoise model args
+
+ if args.denoise_in_channels == -1:
+ args.diffusion_input_size = args.image_size_encoder
+ args.denoise_in_channels = args.out_chans
+ args.denoise_out_channels = args.out_chans
+ else:
+ assert args.denoise_out_channels != -1
+
+ # args.image_size = args.image_size_encoder # 224, follow the triplane size
+
+ # if args.diffusion_input_size == -1:
+ # else:
+ # args.image_size = args.diffusion_input_size
+
+ if args.pred_type == 'v': # for lsgm training
+ assert args.predict_v == True # for DDIM sampling
+
+ # if not args.create_dit:
+
+ denoise_model, diffusion = create_model_and_diffusion(
+ **args_to_dict(args,
+ model_and_diffusion_defaults().keys()))
+
+ opts = eg3d_options_default()
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ logger.log("creating encoder and NSR decoder...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ auto_encoder.to(device)
+ auto_encoder.eval()
+
+ logger.log("creating data loader...")
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data, load_data_cls
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ if args.i23d:
+ data = load_eval_data(
+ file_path=args.eval_data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True, # for evaluation
+ preprocess=auto_encoder.preprocess,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+ else:
+ data = None # t23d sampling, only caption required
+
+ # eval_dataset = load_data_cls(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_latent=True,
+ # return_dataset=True,
+ # **args_to_dict(args,
+ # dataset_defaults().keys())
+ # )
+
+ eval_dataset = None
+
+
+ # let all processes sync up before starting with a new epoch of training
+
+ if dist_util.get_rank() == 0:
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ args.schedule_sampler = create_named_schedule_sampler(
+ args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ loss_class = E3DGELossClass(device, opt).to(device)
+
+ logger.log("training...")
+
+ TrainLoop = {
+ 'flow_matching':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine,
+ 'flow_matching_gs':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine_gs, # slightly modified sampling and rendering for gs
+ }[args.trainer_name]
+
+
+ # if 'vpsde' in args.trainer_name:
+ # sde_diffusion = make_sde_diffusion(
+ # dnnlib.EasyDict(
+ # args_to_dict(args,
+ # continuous_diffusion_defaults().keys())))
+ # # assert args.mixed_prediction, 'enable mixed_prediction by default'
+ # logger.log('create VPSDE diffusion.')
+ # else:
+ sde_diffusion = None
+
+ # if 'cldm' in args.trainer_name:
+ # assert isinstance(denoise_model, tuple)
+ # denoise_model, controlNet = denoise_model
+
+ # controlNet.to(dist_util.dev())
+ # controlNet.train()
+ # else:
+ controlNet = None
+
+ # st()
+ denoise_model.to(dist_util.dev())
+ denoise_model.train()
+
+ auto_encoder.decoder.rendering_kwargs = args.rendering_kwargs
+
+ # camera = th.load('eval_pose.pt', map_location=dist_util.dev())[:]
+
+ # if fid
+
+ # '''
+ azimuths = []
+ elevations = []
+ frame_number = 10
+
+ for i in range(frame_number): # 1030 * 5 * 10, for FID 50K
+
+ azi, elevation = sample_uniform_cameras_on_sphere()
+ # azi, elevation = azi[0] / np.pi * 180, elevation[0] / np.pi * 180
+ azi, elevation = azi[0] / np.pi * 180, (elevation[0]-np.pi*0.5) / np.pi * 180 # [-0.5 pi, 0.5 pi]
+ azimuths.append(azi)
+ elevations.append(elevation)
+
+ azimuths = np.array(azimuths)
+ elevations = np.array(elevations)
+
+ # azimuths = np.array(list(range(0,360,30))).astype(float)
+ # frame_number = azimuths.shape[0]
+ # elevations = np.array([10]*azimuths.shape[0]).astype(float)
+
+ zero123pp_pose, _ = generate_input_camera(1.8, [[elevations[i], azimuths[i]] for i in range(frame_number)], fov=30)
+ K = th.Tensor([1.3889, 0.0000, 0.5000, 0.0000, 1.3889, 0.5000, 0.0000, 0.0000, 0.0039]).to(zero123pp_pose) # keeps the same
+ camera = th.cat([zero123pp_pose.reshape(frame_number,-1), K.unsqueeze(0).repeat(frame_number,1)], dim=-1)
+ # '''
+
+ # camera = uni_mesh_path(12, radius=2.0) # ! for exporting mesh
+
+ training_loop_class=TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model,
+ control_model=controlNet,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ # eval_data=None,
+ eval_data=eval_dataset, # return dataset
+ **vars(args))
+
+ if args.i23d:
+ # ! image-conditioned 3D generation
+ training_loop_class.eval_i23d_and_export(
+ prompt='',
+ save_img=args.save_img,
+ use_train_trajectory=args.use_train_trajectory,
+ camera=camera,
+ num_instances=args.num_instances,
+ num_samples=args.num_samples,
+ stage_1_output_dir=args.stage_1_output_dir,
+ export_mesh=args.export_mesh,
+ )
+
+
+ else:
+ # the script used in 3dtopia
+ with open('datasets/caption-forpaper.txt', 'r') as f:
+ all_prompts_available = [caption.strip() for caption in f.readlines()]
+
+ for prompt in all_prompts_available:
+
+ training_loop_class.eval_and_export(
+ prompt=prompt,
+ save_img=args.save_img,
+ use_train_trajectory=args.use_train_trajectory,
+ camera=camera,
+ num_instances=args.num_instances,
+ num_samples=args.num_samples,
+ stage_1_output_dir=args.stage_1_output_dir,
+ export_mesh=args.export_mesh,
+ )
+
+
+ dist_util.synchronize()
+ logger.log('sampling complete')
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ dataset_size=-1,
+ diffusion_input_size=-1,
+ trainer_name='adm',
+ use_amp=False,
+ train_vae=True, # jldm?
+ triplane_scaling_divider=1.0, # divide by this value
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ schedule_sampler="uniform",
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ resume_checkpoint_EG3D="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ load_depth=True, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ load_submodule_name='', # for loading pretrained auto_encoder model
+ ignore_resume_opt=False,
+ # freeze_ae=False,
+ denoised_ae=True,
+ diffusion_ce_anneal=False,
+ use_lmdb=False,
+ interval=1,
+ freeze_triplane_decoder=False,
+ objv_dataset=False,
+ use_eos_feature=False,
+ clip_grad_throld=1.0,
+ allow_tf32=True,
+ save_img=False,
+ use_train_trajectory=
+ False, # use train trajectory to sample images for fid calculation
+ unconditional_guidance_scale=1.0,
+ num_samples=10,
+ num_instances=10, # for i23d, loop different condition
+ )
+
+ defaults.update(model_and_diffusion_defaults())
+ defaults.update(continuous_diffusion_defaults())
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(dataset_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+ defaults.update(control_net_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ # ! add transport args
+ parse_transport_args(parser)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ["TORCH_CPP_LOG_LEVEL"] = "INFO"
+ # os.environ["NCCL_DEBUG"] = "INFO"
+ th.multiprocessing.set_start_method('spawn')
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ # opts = dnnlib.EasyDict(vars(args)) # compatiable with triplane original settings
+ # opts = args
+ args.rendering_kwargs = rendering_options_defaults(args)
+
+ # Launch processes.
+ logger.log('Launching processes...')
+
+ logger.log('Available devices ', th.cuda.device_count())
+ logger.log('Current cuda device ', th.cuda.current_device())
+ # logger.log('GPU Device name:', th.cuda.get_device_name(th.cuda.current_device()))
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/vit_triplane_sit_train.py b/scripts/vit_triplane_sit_train.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb6c142656b8e44b5a1ac4917a5f9d31a0173bfa
--- /dev/null
+++ b/scripts/vit_triplane_sit_train.py
@@ -0,0 +1,535 @@
+"""
+Train a diffusion model on images.
+"""
+import json
+import sys
+import os
+
+sys.path.append('.')
+
+# from dnnlib import EasyDict
+import traceback
+
+import torch as th
+
+# from xformers.triton import FusedLayerNorm as LayerNorm
+
+import torch.multiprocessing as mp
+import torch.distributed as dist
+import numpy as np
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.resample import create_named_schedule_sampler
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+ continuous_diffusion_defaults,
+ control_net_defaults,
+ model_and_diffusion_defaults,
+ create_model_and_diffusion,
+)
+from guided_diffusion.continuous_diffusion import make_diffusion as make_sde_diffusion
+import nsr
+import nsr.lsgm
+# from nsr.train_util_diffusion import TrainLoop3DDiffusion as TrainLoop
+
+from datasets.eg3d_dataset import LMDBDataset_MV_Compressed_eg3d
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default, dataset_defaults
+from datasets.shapenet import load_data, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+
+from torch_utils import legacy, misc
+from torch.utils.data import Subset
+from pdb import set_trace as st
+
+from dnnlib.util import EasyDict, InfiniteSampler
+# from .vit_triplane_train_FFHQ import init_dataset_kwargs
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+th.backends.cudnn.enabled = True # https://zhuanlan.zhihu.com/p/635824460
+th.backends.cudnn.benchmark = True
+
+from transport import create_transport, Sampler
+from transport.train_utils import parse_transport_args
+
+# from torch.utils.tensorboard import SummaryWriter
+
+SEED = 0
+
+
+def training_loop(args):
+ # def training_loop(args):
+ logger.log("dist setup...")
+ # th.multiprocessing.set_start_method('spawn')
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ # st()
+
+ th.cuda.set_device(
+ args.local_rank) # set this line to avoid extra memory on rank 0
+ th.cuda.empty_cache()
+
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+
+ dist_util.setup_dist(args)
+
+ # st() # mark
+
+ th.backends.cuda.matmul.allow_tf32 = args.allow_tf32
+ th.backends.cudnn.allow_tf32 = args.allow_tf32
+ # st()
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating ViT encoder and NSR decoder...")
+ # st() # mark
+ device = dist_util.dev()
+
+ args.img_size = [args.image_size_encoder]
+
+ logger.log("creating model and diffusion...")
+ # * set denoise model args
+
+ if args.denoise_in_channels == -1:
+ args.diffusion_input_size = args.image_size_encoder
+ args.denoise_in_channels = args.out_chans
+ args.denoise_out_channels = args.out_chans
+ else:
+ assert args.denoise_out_channels != -1
+
+ # args.image_size = args.image_size_encoder # 224, follow the triplane size
+
+ # if args.diffusion_input_size == -1:
+ # else:
+ # args.image_size = args.diffusion_input_size
+
+ if args.pred_type == 'v': # for lsgm training
+ assert args.predict_v == True # for DDIM sampling
+
+ # if not args.create_dit:
+
+ denoise_model, diffusion = create_model_and_diffusion(
+ **args_to_dict(args,
+ model_and_diffusion_defaults().keys()))
+
+ opts = eg3d_options_default()
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ logger.log("creating encoder and NSR decoder...")
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+
+ auto_encoder.to(device)
+ auto_encoder.eval()
+
+ # * load G_ema modules into autoencoder
+ # * clone G_ema.decoder to auto_encoder triplane
+ # logger.log("AE triplane decoder reuses G_ema decoder...")
+ # auto_encoder.decoder.register_buffer('w_avg', G_ema.backbone.mapping.w_avg)
+
+ # auto_encoder.decoder.triplane_decoder.decoder.load_state_dict( # type: ignore
+ # G_ema.decoder.state_dict()) # type: ignore
+
+ # set grad=False in this manner suppresses the DDP forward no grad error.
+
+ # if args.sr_training:
+
+ # logger.log("AE triplane decoder reuses G_ema SR module...")
+ # # auto_encoder.decoder.triplane_decoder.superresolution.load_state_dict( # type: ignore
+ # # G_ema.superresolution.state_dict()) # type: ignore
+
+ # # set grad=False in this manner suppresses the DDP forward no grad error.
+ # logger.log("freeze SR module...")
+ # for param in auto_encoder.decoder.superresolution.parameters(): # type: ignore
+ # param.requires_grad_(False)
+
+ # # del G_ema
+ # th.cuda.empty_cache()
+
+ if args.freeze_triplane_decoder:
+ logger.log("freeze triplane decoder...")
+ for param in auto_encoder.decoder.triplane_decoder.parameters(
+ ): # type: ignore
+ # for param in auto_encoder.decoder.triplane_decoder.decoder.parameters(): # type: ignore
+ param.requires_grad_(False)
+
+ if args.cfg in ('afhq', 'ffhq'):
+
+ if args.sr_training:
+
+ logger.log("AE triplane decoder reuses G_ema SR module...")
+ auto_encoder.decoder.triplane_decoder.superresolution.load_state_dict( # type: ignore
+ G_ema.superresolution.state_dict()) # type: ignore
+
+ # set grad=False in this manner suppresses the DDP forward no grad error.
+ for param in auto_encoder.decoder.triplane_decoder.superresolution.parameters(
+ ): # type: ignore
+ param.requires_grad_(False)
+
+ # ! load data
+ if args.use_lmdb:
+ logger.log("creating LMDB eg3d data loader...")
+ training_set = LMDBDataset_MV_Compressed_eg3d(
+ args.data_dir,
+ args.image_size,
+ args.image_size_encoder,
+ )
+ else:
+ logger.log("creating eg3d data loader...")
+
+ training_set_kwargs, dataset_name = init_dataset_kwargs(
+ data=args.data_dir,
+ class_name='datasets.eg3d_dataset.ImageFolderDataset',
+ reso_gt=args.image_size) # only load pose here
+ # if args.cond and not training_set_kwargs.use_labels:
+ # raise Exception('check here')
+
+ # training_set_kwargs.use_labels = args.cond
+ training_set_kwargs.use_labels = True
+ training_set_kwargs.xflip = False
+ training_set_kwargs.random_seed = SEED
+ training_set_kwargs.max_size = args.dataset_size
+ # desc = f'{args.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}'
+
+ # * construct ffhq/afhq dataset
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+
+ training_set_sampler = InfiniteSampler(
+ dataset=training_set,
+ rank=dist_util.get_rank(),
+ num_replicas=dist_util.get_world_size(),
+ seed=SEED)
+
+ data = iter(
+ th.utils.data.DataLoader(
+ dataset=training_set,
+ sampler=training_set_sampler,
+ batch_size=args.batch_size,
+ pin_memory=True,
+ num_workers=args.num_workers,
+ persistent_workers=args.num_workers > 0,
+ prefetch_factor=max(8 // args.batch_size, 2),
+ ))
+ # prefetch_factor=2))
+
+ eval_data = th.utils.data.DataLoader(dataset=Subset(
+ training_set, np.arange(8)),
+ batch_size=args.eval_batch_size,
+ num_workers=1)
+
+ else:
+
+ logger.log("creating data loader...")
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data, load_data_cls
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ # TODO, load shapenet data
+ # data = load_data(
+ # st() mark
+ # if args.overfitting:
+ # logger.log("create overfitting memory dataset")
+ # data = load_memory_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True # for evaluation
+ # )
+ # else:
+ if args.use_wds:
+ if args.data_dir == 'NONE':
+ with open(args.shards_lst) as f:
+ shards_lst = [url.strip() for url in f.readlines()]
+ data = load_wds_data(
+ shards_lst, args.image_size, args.image_size_encoder,
+ args.batch_size, args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ else:
+ data = load_wds_data(
+ args.data_dir, args.image_size, args.image_size_encoder,
+ args.batch_size, args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ # eval_data = load_wds_data(
+ # args.data_dir,
+ # args.image_size,
+ # args.image_size_encoder,
+ # args.eval_batch_size,
+ # args.num_workers,
+ # decode_encode_img_only=args.decode_encode_img_only,
+ # load_wds_diff=args.load_wds_diff)
+
+ if args.eval_data_dir == 'NONE':
+ with open(args.eval_shards_lst) as f:
+ eval_shards_lst = [url.strip() for url in f.readlines()]
+ else:
+ eval_shards_lst = args.eval_data_dir # auto expanded
+
+ eval_data = load_wds_data(
+ eval_shards_lst,
+ args.image_size,
+ args.image_size_encoder,
+ args.eval_batch_size,
+ args.num_workers,
+ plucker_embedding=args.plucker_embedding,
+ decode_encode_img_only=args.decode_encode_img_only,
+ mv_input=args.mv_input,
+ load_wds_diff=False,
+ load_instance=True)
+
+ else:
+ logger.log("create all instances dataset")
+ # st() mark
+ # data = load_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=args.load_depth,
+ # preprocess=auto_encoder.preprocess, # clip
+ # dataset_size=args.dataset_size,
+ # use_lmdb=args.use_lmdb,
+ # trainer_name=args.trainer_name,
+ # # load_depth=True # for evaluation
+ # )
+ # eval_data = load_eval_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ # interval=args.interval,
+ # use_lmdb=args.use_lmdb,
+ # )
+
+ data = load_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_latent=True,
+ **args_to_dict(args,
+ dataset_defaults().keys())
+ # load_depth=args.load_depth,
+ # preprocess=auto_encoder.preprocess, # clip
+ # dataset_size=args.dataset_size,
+ # use_lmdb=args.use_lmdb,
+ # trainer_name=args.trainer_name,
+ # load_depth=True # for evaluation
+ )
+ # eval_dataset = load_data_cls(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_latent=True,
+ # return_dataset=True,
+ # **args_to_dict(args,
+ # dataset_defaults().keys())
+ # # load_depth=args.load_depth,
+ # # preprocess=auto_encoder.preprocess, # clip
+ # # dataset_size=args.dataset_size,
+ # # use_lmdb=args.use_lmdb,
+ # # trainer_name=args.trainer_name,
+ # # load_depth=True # for evaluation
+ # )
+ eval_dataset = None
+
+ # st()
+
+ # eval_data = data
+ # eval_data = load_eval_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ # interval=args.interval,
+ # use_lmdb=args.use_lmdb,
+ # )
+
+
+ # let all processes sync up before starting with a new epoch of training
+
+ if dist_util.get_rank() == 0:
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ args.schedule_sampler = create_named_schedule_sampler(
+ args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ loss_class = E3DGELossClass(device, opt).to(device)
+
+ logger.log("training...")
+
+ TrainLoop = {
+ 'flow_matching':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine,
+ 'flow_matching_gs':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine_gs, # slightly modified sampling and rendering for gs
+ 'flow_matching_gs_clay':
+ nsr.lsgm.flow_matching_trainer.FlowMatchingEngine_gs_clay, # slightly modified sampling and rendering for gs
+ }[args.trainer_name]
+
+ if 'vpsde' in args.trainer_name:
+ sde_diffusion = make_sde_diffusion(
+ dnnlib.EasyDict(
+ args_to_dict(args,
+ continuous_diffusion_defaults().keys())))
+ # assert args.mixed_prediction, 'enable mixed_prediction by default'
+ logger.log('create VPSDE diffusion.')
+ else:
+ sde_diffusion = None
+
+ if 'cldm' in args.trainer_name:
+ assert isinstance(denoise_model, tuple)
+ denoise_model, controlNet = denoise_model
+
+ controlNet.to(dist_util.dev())
+ controlNet.train()
+ else:
+ controlNet = None
+
+ # st()
+ denoise_model.to(dist_util.dev())
+ denoise_model.train()
+
+ auto_encoder.decoder.rendering_kwargs = args.rendering_kwargs
+ TrainLoop(rec_model=auto_encoder,
+ denoise_model=denoise_model,
+ control_model=controlNet,
+ diffusion=diffusion,
+ sde_diffusion=sde_diffusion,
+ loss_class=loss_class,
+ data=data,
+ eval_data=None, # return dataset
+ # eval_data=eval_dataset, # return dataset
+ # eval_data=eval_data, # return dataset
+ **vars(args)).run_loop()
+
+ dist_util.synchronize()
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ dataset_size=-1,
+ diffusion_input_size=-1,
+ trainer_name='adm',
+ use_amp=False,
+ train_vae=True, # jldm?
+ triplane_scaling_divider=1.0, # divide by this value
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ schedule_sampler="uniform",
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ resume_checkpoint_EG3D="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ load_depth=True, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ load_submodule_name='', # for loading pretrained auto_encoder model
+ ignore_resume_opt=False,
+ # freeze_ae=False,
+ denoised_ae=True,
+ diffusion_ce_anneal=False,
+ use_lmdb=False,
+ interval=1,
+ freeze_triplane_decoder=False,
+ objv_dataset=False,
+ use_eos_feature=False,
+ clip_grad_throld=1.0,
+ allow_tf32=True,
+ )
+
+ defaults.update(model_and_diffusion_defaults())
+ defaults.update(continuous_diffusion_defaults())
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(dataset_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+ defaults.update(control_net_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ # ! add transport args
+ parse_transport_args(parser)
+
+ return parser
+
+
+if __name__ == "__main__":
+ # os.environ["TORCH_CPP_LOG_LEVEL"] = "INFO"
+ # os.environ["NCCL_DEBUG"] = "INFO"
+ th.multiprocessing.set_start_method('spawn')
+ th.multiprocessing.set_sharing_strategy('file_system') # avoid "too many files opened"
+ # https://stackoverflow.com/questions/76300957/multiprocessing-pool-too-many-file-descriptors-using-pytorch
+ # https://stackoverflow.com/questions/48250053/pytorchs-dataloader-too-many-open-files-error-when-no-files-should-be-open
+
+ os.environ[
+ "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ # opts = dnnlib.EasyDict(vars(args)) # compatiable with triplane original settings
+ # opts = args
+ args.rendering_kwargs = rendering_options_defaults(args)
+
+ # Launch processes.
+ logger.log('Launching processes...')
+
+ logger.log('Available devices ', th.cuda.device_count())
+ logger.log('Current cuda device ', th.cuda.current_device())
+ # logger.log('GPU Device name:', th.cuda.get_device_name(th.cuda.current_device()))
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/vit_triplane_train.py b/scripts/vit_triplane_train.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4929ecc4cbf332a7798a7358c3c6a60a702a230
--- /dev/null
+++ b/scripts/vit_triplane_train.py
@@ -0,0 +1,351 @@
+"""
+Train a diffusion model on images.
+"""
+from pdb import set_trace as st
+import random
+import json
+import sys
+import os
+
+sys.path.append('.')
+
+import torch.distributed as dist
+
+import traceback
+
+import torch as th
+
+# if th.cuda.is_available(): # FIXME
+# from xformers.triton import FusedLayerNorm as LayerNorm
+
+import torch.multiprocessing as mp
+import numpy as np
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+from nsr.train_nv_util import TrainLoop3DRecNV, TrainLoop3DRec, TrainLoop3DRecNVPatch, TrainLoop3DRecNVPatchSingleForward, TrainLoop3DRecNVPatchSingleForwardMV, TrainLoop3DRecNVPatchSingleForwardMV_NoCrop, TrainLoop3DRecNVPatchSingleForwardMVAdvLoss, TrainLoop3DRecNVPatchSingleForwardMV_NoCrop_adv
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default, dataset_defaults
+# from datasets import g_buffer_objaverse
+from nsr.losses.builder import E3DGELossClass, E3DGE_with_AdvLoss
+
+
+# th.backends.cuda.matmul.allow_tf32 = True # https://huggingface.co/docs/diffusers/optimization/fp16
+th.backends.cuda.matmul.allow_tf32 = True
+th.backends.cudnn.allow_tf32 = True
+th.backends.cudnn.enabled = True
+
+
+def training_loop(args):
+ # def training_loop(args):
+ dist_util.setup_dist(args)
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # https://blog.csdn.net/qq_41682740/article/details/126304613
+
+ SEED = args.seed
+
+ # dist.init_process_group(backend='nccl', init_method='env://', rank=args.local_rank, world_size=th.cuda.device_count())
+ logger.log(f"global_rank={args.global_rank}, local_rank={args.local_rank} init complete, seed={SEED}")
+ th.cuda.set_device(args.local_rank)
+ th.cuda.empty_cache()
+
+ # * deterministic algorithms flags
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+ random.seed(SEED)
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating encoder and NSR decoder...")
+ # device = dist_util.dev()
+ device = th.device("cuda", args.local_rank)
+
+ # shared eg3d opts
+ opts = eg3d_options_default()
+
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+ auto_encoder = create_3DAE_model(
+ **args_to_dict(args,
+ encoder_and_nsr_defaults().keys()))
+ auto_encoder.to(device)
+ auto_encoder.train()
+
+ logger.log("creating data loader...")
+ # data = load_data(
+ # st()
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_wds_data
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data
+
+ if args.overfitting:
+ data = load_memory_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ # load_depth=args.depth_lambda > 0
+ # load_depth=True, # for evaluation
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+ eval_data = None
+ else:
+ if args.use_wds:
+ # st()
+ if args.data_dir == 'NONE':
+ with open(args.shards_lst) as f:
+ shards_lst = [url.strip() for url in f.readlines()]
+ data = load_wds_data(
+ shards_lst, # type: ignore
+ args.image_size,
+ args.image_size_encoder,
+ args.batch_size,
+ args.num_workers,
+ # plucker_embedding=args.plucker_embedding,
+ # mv_input=args.mv_input,
+ # split_chunk_input=args.split_chunk_input,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ elif not args.inference:
+ data = load_wds_data(args.data_dir,
+ args.image_size,
+ args.image_size_encoder,
+ args.batch_size,
+ args.num_workers,
+ plucker_embedding=args.plucker_embedding,
+ mv_input=args.mv_input,
+ split_chunk_input=args.split_chunk_input)
+ else:
+ data = None
+ # ! load eval
+
+ if args.eval_data_dir == 'NONE':
+ with open(args.eval_shards_lst) as f:
+ eval_shards_lst = [url.strip() for url in f.readlines()]
+ else:
+ eval_shards_lst = args.eval_data_dir # auto expanded
+
+ eval_data = load_wds_data(
+ eval_shards_lst, # type: ignore
+ args.image_size,
+ args.image_size_encoder,
+ args.eval_batch_size,
+ args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+ # load_instance=True) # TODO
+
+ else:
+
+ if args.inference:
+ data = None
+ else:
+ data = load_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ **args_to_dict(args,
+ dataset_defaults().keys())
+ )
+
+ # load_depth=True # for evaluation
+
+ if args.pose_warm_up_iter > 0:
+ overfitting_dataset = load_memory_data(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ # load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+ data = [data, overfitting_dataset, args.pose_warm_up_iter]
+
+ eval_data = load_eval_data(
+ file_path=args.eval_data_dir,
+ batch_size=args.eval_batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True, # for evaluation
+ preprocess=auto_encoder.preprocess,
+ wds_split=args.wds_split,
+ # interval=args.interval,
+ # use_lmdb=args.use_lmdb,
+ # plucker_embedding=args.plucker_embedding,
+ # load_real=args.load_real,
+ # four_view_for_latent=args.four_view_for_latent,
+ # load_extra_36_view=args.load_extra_36_view,
+ # shuffle_across_cls=args.shuffle_across_cls,
+ **args_to_dict(args,
+ dataset_defaults().keys()))
+
+ logger.log("creating data loader done...")
+
+ args.img_size = [args.image_size_encoder]
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ # opt.max_depth, opt.min_depth = args.rendering_kwargs.ray_end, args.rendering_kwargs.ray_start
+ if 'disc' in args.trainer_name:
+ loss_class = E3DGE_with_AdvLoss(
+ device,
+ opt,
+ # disc_weight=args.patchgan_disc, # rec_cvD_lambda
+ disc_factor=args.patchgan_disc_factor, # reduce D update speed
+ disc_weight=args.patchgan_disc_g_weight).to(device)
+ else:
+ loss_class = E3DGELossClass(device, opt).to(device)
+
+ # writer = SummaryWriter() # TODO, add log dir
+
+ logger.log("training...")
+
+ TrainLoop = {
+ 'input_rec': TrainLoop3DRec,
+ 'nv_rec': TrainLoop3DRecNV,
+ # 'nv_rec_patch': TrainLoop3DRecNVPatch,
+ 'nv_rec_patch': TrainLoop3DRecNVPatchSingleForward,
+ 'nv_rec_patch_mvE': TrainLoop3DRecNVPatchSingleForwardMV,
+ 'nv_rec_patch_mvE_disc': TrainLoop3DRecNVPatchSingleForwardMVAdvLoss,
+ 'nv_rec_patch_mvE_gs': TrainLoop3DRecNVPatchSingleForwardMV_NoCrop,
+ 'nv_rec_patch_mvE_gs_disc': TrainLoop3DRecNVPatchSingleForwardMV_NoCrop_adv,
+ }[args.trainer_name]
+
+ logger.log("creating TrainLoop done...")
+
+ # th._dynamo.config.verbose=True # th212 required
+ # th._dynamo.config.suppress_errors = True
+ auto_encoder.decoder.rendering_kwargs = args.rendering_kwargs
+ train_loop = TrainLoop(
+ rec_model=auto_encoder,
+ loss_class=loss_class,
+ data=data,
+ eval_data=eval_data,
+ # compile=args.compile,
+ **vars(args))
+ # train_loop.rendering_kwargs = args.rendering_kwargs
+
+ if args.inference:
+ camera = th.load('eval_pose.pt', map_location=dist_util.dev())
+ train_loop.eval_novelview_loop(camera=camera,
+ save_latent=args.save_latent)
+ else:
+ train_loop.run_loop()
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ seed=0,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_amp=False,
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ # test warm up pose sampling training
+ pose_warm_up_iter=-1,
+ inference=False,
+ export_latent=False,
+ save_latent=False,
+ wds_split=1, # out of 4
+ )
+
+ defaults.update(dataset_defaults()) # type: ignore
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ th.multiprocessing.set_start_method('spawn')
+
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+ # os.environ["NCCL_DEBUG"]="INFO"
+
+ args = create_argparser().parse_args()
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ # if os.environ['WORLD_SIZE'] > 1:
+
+ # for multi-node training
+ if dist_util.get_world_size() > 1:
+ args.global_rank = int(os.environ["RANK"])
+ else:
+ args.global_rank = 0
+
+ args.gpus = th.cuda.device_count()
+
+ opts = args
+
+ args.rendering_kwargs = rendering_options_defaults(opts)
+
+ # print(args)
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ # Launch processes.
+ print('Launching processes...')
+
+ try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ except Exception as e:
+ # print(e)
+ traceback.print_exc()
+ dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/scripts/wds_create_batch_gz.py b/scripts/wds_create_batch_gz.py
new file mode 100644
index 0000000000000000000000000000000000000000..871d40084736dde416f8adedb5b15def2b21c31f
--- /dev/null
+++ b/scripts/wds_create_batch_gz.py
@@ -0,0 +1,475 @@
+"""
+Train a diffusion model on images.
+"""
+import cv2
+# import imageio
+import lz4.frame
+import gzip
+import random
+import json
+import sys
+import os
+import lmdb
+from tqdm import tqdm
+sys.path.append('.')
+import torch.distributed as dist
+from pathlib import Path
+import pickle
+import traceback
+from PIL import Image
+import torch as th
+if th.cuda.is_available():
+ from xformers.triton import FusedLayerNorm as LayerNorm
+import torch.multiprocessing as mp
+import lzma
+import webdataset as wds
+import numpy as np
+
+from torch.utils.data import DataLoader, Dataset
+import imageio.v3 as iio
+
+import argparse
+import dnnlib
+from guided_diffusion import dist_util, logger
+from guided_diffusion.script_util import (
+ args_to_dict,
+ add_dict_to_argparser,
+)
+# from nsr.train_util import TrainLoop3DRec as TrainLoop
+# from nsr.train_nv_util import TrainLoop3DRecNV, TrainLoop3DRec, TrainLoop3DRecNVPatch
+from nsr.script_util import create_3DAE_model, encoder_and_nsr_defaults, loss_defaults, rendering_options_defaults, eg3d_options_default
+from datasets.shapenet import load_data, load_data_for_lmdb, load_eval_data, load_memory_data
+from nsr.losses.builder import E3DGELossClass
+from datasets.eg3d_dataset import init_dataset_kwargs
+
+# from .lmdb_create import encode_and_compress_image
+
+def encode_and_compress_image(inp_array, is_image=False, compress=True):
+ # Read the image using imageio
+ # image = imageio.v3.imread(image_path)
+
+ # Convert the image to bytes
+ # with io.BytesIO() as byte_buffer:
+ # imageio.imsave(byte_buffer, image, format="png")
+ # image_bytes = byte_buffer.getvalue()
+ if is_image:
+ inp_bytes = iio.imwrite("", inp_array, extension=".png")
+ else:
+ inp_bytes = inp_array.tobytes()
+
+ # Compress the image data using gzip
+ if compress:
+ # compressed_data = gzip.compress(inp_bytes)
+ compressed_data = lz4.frame.compress(inp_bytes)
+ return compressed_data
+ else:
+ return inp_bytes
+
+
+
+
+from pdb import set_trace as st
+import bz2
+
+# th.backends.cuda.matmul.allow_tf32 = True # https://huggingface.co/docs/diffusers/optimization/fp16
+
+
+
+def training_loop(args):
+ # def training_loop(args):
+ # dist_util.setup_dist(args)
+ # th.autograd.set_detect_anomaly(True) # type: ignore
+ th.autograd.set_detect_anomaly(False) # type: ignore
+ # https://blog.csdn.net/qq_41682740/article/details/126304613
+
+ SEED = args.seed
+
+ # dist.init_process_group(backend='nccl', init_method='env://', rank=args.local_rank, world_size=th.cuda.device_count())
+ # logger.log(f"{args.local_rank=} init complete, seed={SEED}")
+ # th.cuda.set_device(args.local_rank)
+ th.cuda.empty_cache()
+
+ # * deterministic algorithms flags
+ th.cuda.manual_seed_all(SEED)
+ np.random.seed(SEED)
+ random.seed(SEED)
+
+ # logger.configure(dir=args.logdir, format_strs=["tensorboard", "csv"])
+ logger.configure(dir=args.logdir)
+
+ logger.log("creating encoder and NSR decoder...")
+ # device = dist_util.dev()
+ # device = th.device("cuda", args.local_rank)
+
+ # shared eg3d opts
+ opts = eg3d_options_default()
+
+ if args.sr_training:
+ args.sr_kwargs = dnnlib.EasyDict(
+ channel_base=opts.cbase,
+ channel_max=opts.cmax,
+ fused_modconv_default='inference_only',
+ use_noise=True
+ ) # ! close noise injection? since noise_mode='none' in eg3d
+
+
+ if args.objv_dataset:
+ from datasets.g_buffer_objaverse import load_data, load_eval_data, load_memory_data, load_data_for_lmdb
+ else: # shapenet
+ from datasets.shapenet import load_data, load_eval_data, load_memory_data, load_data_for_lmdb
+
+ # auto_encoder = create_3DAE_model(
+ # **args_to_dict(args,
+ # encoder_and_nsr_defaults().keys()))
+ # auto_encoder.to(device)
+ # auto_encoder.train()
+
+ logger.log("creating data loader...")
+ # data = load_data(
+ # st()
+ # if args.overfitting:
+ # data = load_memory_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # # load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ # )
+ # else:
+ if args.cfg in ('afhq', 'ffhq'):
+ # ! load data
+ logger.log("creating eg3d data loader...")
+ training_set_kwargs, dataset_name = init_dataset_kwargs(data=args.data_dir,
+ class_name='datasets.eg3d_dataset.ImageFolderDatasetLMDB',
+ reso_gt=args.image_size) # only load pose here
+ # if args.cond and not training_set_kwargs.use_labels:
+ # raise Exception('check here')
+
+ # training_set_kwargs.use_labels = args.cond
+ training_set_kwargs.use_labels = True
+ training_set_kwargs.xflip = False
+ training_set_kwargs.random_seed = SEED
+ # training_set_kwargs.max_size = args.dataset_size
+ # desc = f'{args.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}'
+
+ # * construct ffhq/afhq dataset
+ training_set = dnnlib.util.construct_class_by_name(
+ **training_set_kwargs) # subclass of training.dataset.Dataset
+ dataset_size = len(training_set)
+
+ # training_set_sampler = InfiniteSampler(
+ # dataset=training_set,
+ # rank=dist_util.get_rank(),
+ # num_replicas=dist_util.get_world_size(),
+ # seed=SEED)
+
+ data = DataLoader(
+ training_set,
+ shuffle=False,
+ batch_size=1,
+ num_workers=16,
+ drop_last=False,
+ # prefetch_factor=2,
+ pin_memory=True,
+ persistent_workers=True,
+ )
+
+ else:
+ # data, dataset_name, dataset_size, dataset = load_data_for_lmdb(
+ data, dataset_name, dataset_size = load_data_for_lmdb(
+ file_path=args.data_dir,
+ batch_size=args.batch_size,
+ reso=args.image_size,
+ reso_encoder=args.image_size_encoder, # 224 -> 128
+ num_workers=args.num_workers,
+ load_depth=True,
+ preprocess=None,
+ dataset_size=args.dataset_size,
+ trainer_name=args.trainer_name,
+ shuffle_across_cls=args.shuffle_across_cls,
+ wds_split=args.wds_split,
+ # wds_output_path=os.path.join(logger.get_dir(), f'wds-%06d.tar')
+ # load_depth=True # for evaluation
+ )
+ # if args.pose_warm_up_iter > 0:
+ # overfitting_dataset = load_memory_data(
+ # file_path=args.data_dir,
+ # batch_size=args.batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # # load_depth=args.depth_lambda > 0
+ # load_depth=True # for evaluation
+ # )
+ # data = [data, overfitting_dataset, args.pose_warm_up_iter]
+ # eval_data = load_eval_data(
+ # file_path=args.eval_data_dir,
+ # batch_size=args.eval_batch_size,
+ # reso=args.image_size,
+ # reso_encoder=args.image_size_encoder, # 224 -> 128
+ # num_workers=args.num_workers,
+ # load_depth=True, # for evaluation
+ # preprocess=auto_encoder.preprocess)
+ args.img_size = [args.image_size_encoder]
+ # try dry run
+ # batch = next(data)
+ # batch = None
+
+ # logger.log("creating model and diffusion...")
+
+ # let all processes sync up before starting with a new epoch of training
+ dist_util.synchronize()
+
+ # schedule_sampler = create_named_schedule_sampler(args.schedule_sampler, diffusion)
+
+ opt = dnnlib.EasyDict(args_to_dict(args, loss_defaults().keys()))
+ # opt.max_depth, opt.min_depth = args.rendering_kwargs.ray_end, args.rendering_kwargs.ray_start
+ # loss_class = E3DGELossClass(device, opt).to(device)
+
+ # writer = SummaryWriter() # TODO, add log dir
+
+ logger.log("training...")
+
+ # TrainLoop = {
+ # 'input_rec': TrainLoop3DRec,
+ # 'nv_rec': TrainLoop3DRecNV,
+ # 'nv_rec_patch': TrainLoop3DRecNVPatch,
+ # }[args.trainer_name]
+
+ # TrainLoop(rec_model=auto_encoder,
+ # loss_class=loss_class,
+ # data=data,
+ # eval_data=eval_data,
+ # **vars(args)).run_loop() # ! overfitting
+
+
+ # Function to compress an image using gzip
+ # def compress_image_gzip(image_path):
+ # def encode_and_compress_image(inp_array, is_image=False):
+ # # Read the image using imageio
+ # # image = imageio.v3.imread(image_path)
+
+ # # Convert the image to bytes
+ # # with io.BytesIO() as byte_buffer:
+ # # imageio.imsave(byte_buffer, image, format="png")
+ # # image_bytes = byte_buffer.getvalue()
+ # if is_image:
+ # inp_bytes = iio.imwrite("", inp_array, extension=".png")
+ # else:
+ # inp_bytes = inp_array.tobytes()
+
+ # # Compress the image data using gzip
+ # compressed_data = gzip.compress(inp_bytes)
+
+ # return compressed_data
+
+
+ def convert_to_wds_compressed(dataset_loader, dataset_size, lmdb_path, start_shard, wds_split):
+ """
+ Convert a PyTorch dataset to LMDB format.
+
+ Parameters:
+ - dataset: PyTorch dataset
+ - lmdb_path: Path to store the LMDB database
+ """
+ # env = lmdb.open(lmdb_path, map_size=1024 ** 4, readahead=False) # Adjust map_size based on your dataset size
+ # sink = wds.ShardWriter(lmdb_path, start_shard=start_shard, compress=True)
+ # sink = wds.ShardWriter(lmdb_path, start_shard=start_shard, compress=False)
+
+ # with env.begin(write=True) as txn:
+
+ # with env.begin(write=True) as txn:
+ # txn.put("length".encode("utf-8"), str(dataset_size).encode("utf-8"))
+
+
+ for idx, sample in enumerate(tqdm(dataset_loader)):
+ # if idx > 10:
+ # break
+
+ # pass
+ # remove the batch index of returned dict sample
+
+
+ # st()
+
+ sample_ins = sample.pop('ins')
+ assert all([sample_ins[i]==sample_ins[0] for i in range(0,len(sample_ins))]), sample_ins # check the batch is the same instnace
+ ins = sample_ins[0]
+
+
+ sample = {
+ # k:v.squeeze(0).cpu().numpy() if isinstance(v, th.Tensor) else v[0] for k, v in sample.items()
+ k:v.cpu().numpy() if isinstance(v, th.Tensor) else v for k, v in sample.items()
+ # k:v.cpu().numpy() if isinstance(v, torch.Tensor) else v for k, v in sample.items()
+ }
+
+ # encode batch images/depths/strings? no need to encode ins/fname here; just save the caption
+
+ # sample = dataset_loader[idx]
+ # compressed_sample = {}
+ caption = sample.pop('caption')[0]
+
+ # root_dir = os.path.join(logger.get_dir(), ins.split('/')[0], idx)
+
+ # root_dir = Path(os.path.join(logger.get_dir(), str(idx)) )
+ root_dir = Path(os.path.join(logger.get_dir(), ins) )
+
+ if root_dir.exists(): # find where to save what
+ save_idx = int(sorted(os.listdir(root_dir) )[-1] )
+ if save_idx >= 3: # may have duplicated stuffs.
+ continue
+ else:
+ root_dir.mkdir(parents=True, exist_ok=True)
+ save_idx = 0
+
+ root_dir = root_dir / f'{save_idx+1}'
+ # assert not root_dir.exists()
+ root_dir.mkdir(parents=True, exist_ok=False)
+
+ raw_img = sample.pop('raw_img')
+ # depth = sample.pop('depth') # just c and bbox
+ # depth = sample.pop('d_normalized') # just c and bbox
+ depth = sample.pop('depth') # just c and bbox
+ # d_near = sample.pop('d_near') # just c and bbox
+ # d_far = sample.pop('d_far') # just c and bbox
+ # d_near_far = np.stack([d_near, d_far])
+ normal = sample.pop('normal') # shall in [-1,1]
+
+ alpha_mask = (sample.pop('alpha_mask')*255).astype(np.uint8)
+
+ raw_img = np.concatenate([raw_img[i] for i in range(raw_img.shape[0])], axis=1) # concat png in w dim
+ # depth = np.concatenate([depth[i] for i in range(depth.shape[0])], axis=1) # concat png in w dim
+ alpha_mask = np.concatenate([alpha_mask[i] for i in range(alpha_mask.shape[0])], axis=1) # concat png in w dim
+ # depth_alpha = np.concatenate([depth, alpha_mask], axis=0)
+ normal = np.concatenate([normal[i] for i in range(normal.shape[0])], axis=1) # concat png in w dim
+ # nlrmal
+ # img_depth_alpha = np.concatenate([raw_img, depth, alpha_mask], axis=-1)
+
+ cv2.imwrite(os.path.join(str(root_dir), 'raw_img.jpg'), cv2.cvtColor(raw_img, cv2.COLOR_RGB2BGR), [int(cv2.IMWRITE_JPEG_QUALITY), 95])
+ # cv2.imwrite(os.path.join(str(root_dir), 'raw_img.png'), cv2.cvtColor(raw_img, cv2.COLOR_RGB2BGR),) # ! save a png version, v=5.
+ # cv2.imwrite(str(root_dir / 'depth_alpha.jpg'), depth_alpha, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
+ cv2.imwrite(str(root_dir / 'alpha.jpg'), alpha_mask, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
+ cv2.imwrite(str(root_dir / 'normal.png'), (normal*127.5+127.5)) # no quant error here
+
+ # st()
+ np.savez_compressed(str(root_dir / 'depth.npz'), depth=depth)
+
+ # np.save(root_dir / 'd_near_far.npy', d_near_far) # save high-res depth with npz
+
+ np.save(root_dir / 'c.npy', sample['c'].astype(np.float32))
+ # np.save(root_dir / 'bbox.npy', sample['bbox'].astype(np.uint8))
+
+ with open(root_dir / 'caption.txt', 'w') as f:
+ f.write(caption)
+
+ with open(root_dir / 'ins.txt', 'w') as f:
+ # f.write(ins.encode('utf8'))
+ f.write(ins)
+
+
+ # sink.write({
+ # "__key__": f"sample_{wds_split:03d}_{idx:07d}",
+ # # 'sample.npz': sample, # {str: ndarray}
+
+ # # 'c.npy': sample['c'].astype(np.float32),
+ # # 'bbox.npy': sample['bbox'].astype(np.uint8),
+ # # 'ins.txt': ins,
+ # # 'caption.txt': caption,
+ # # 'd_near.npy': d_near.astype(np.float32),
+ # # 'd_far.npy': d_far.astype(np.float32),
+ # # 'raw_img.jpeg': raw_img,
+ # # # 'raw_img.png': raw_img,
+ # # 'alpha_mask.jpeg': alpha_mask, # lossless compression
+
+ # 'depth_alpha.png': depth_alpha,
+ # 'raw_img.png': raw_img,
+
+ # })
+
+ # sink.close()
+
+
+ # convert_to_wds_compressed(data, dataset_size, os.path.join(logger.get_dir(), f'wds-%06d.tar.gz'), args.start_shard, args.wds_split)
+ convert_to_wds_compressed(data, dataset_size, os.path.join(logger.get_dir(), f'wds-%06d.tar'), args.start_shard, args.wds_split)
+
+
+
+def create_argparser(**kwargs):
+ # defaults.update(model_and_diffusion_defaults())
+
+ defaults = dict(
+ seed=0,
+ dataset_size=-1,
+ trainer_name='input_rec',
+ use_amp=False,
+ overfitting=False,
+ num_workers=4,
+ image_size=128,
+ image_size_encoder=224,
+ iterations=150000,
+ anneal_lr=False,
+ lr=5e-5,
+ weight_decay=0.0,
+ lr_anneal_steps=0,
+ batch_size=1,
+ eval_batch_size=12,
+ microbatch=-1, # -1 disables microbatches
+ ema_rate="0.9999", # comma-separated list of EMA values
+ log_interval=50,
+ eval_interval=2500,
+ save_interval=10000,
+ resume_checkpoint="",
+ use_fp16=False,
+ fp16_scale_growth=1e-3,
+ data_dir="",
+ eval_data_dir="",
+ # load_depth=False, # TODO
+ logdir="/mnt/lustre/yslan/logs/nips23/",
+ # test warm up pose sampling training
+ objv_dataset=False,
+ pose_warm_up_iter=-1,
+ start_shard=0,
+ shuffle_across_cls=False,
+ wds_split=1, # out of 4
+ )
+
+ defaults.update(encoder_and_nsr_defaults()) # type: ignore
+ defaults.update(loss_defaults())
+
+ parser = argparse.ArgumentParser()
+ add_dict_to_argparser(parser, defaults)
+
+ return parser
+
+
+if __name__ == "__main__":
+ th.multiprocessing.set_start_method('spawn')
+ # os.environ[
+ # "TORCH_DISTRIBUTED_DEBUG"] = "DETAIL" # set to DETAIL for runtime logging.
+ # os.environ["TORCH_CPP_LOG_LEVEL"]="INFO"
+ # os.environ["NCCL_DEBUG"]="INFO"
+
+ args = create_argparser().parse_args()
+ # args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.gpus = th.cuda.device_count()
+
+ opts = args
+
+ args.rendering_kwargs = rendering_options_defaults(opts)
+
+ # print(args)
+ with open(os.path.join(args.logdir, 'args.json'), 'w') as f:
+ json.dump(vars(args), f, indent=2)
+
+ # Launch processes.
+ print('Launching processes...')
+
+ # try:
+ training_loop(args)
+ # except KeyboardInterrupt as e:
+ # except Exception as e:
+ # # print(e)
+ # traceback.print_exc()
+ # dist_util.cleanup() # clean port and socket when ctrl+c
diff --git a/sgm/__init__.py b/sgm/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..24bc84af8b1041de34b9816e0507cb1ac207bd13
--- /dev/null
+++ b/sgm/__init__.py
@@ -0,0 +1,4 @@
+from .models import AutoencodingEngine, DiffusionEngine
+from .util import get_configs_path, instantiate_from_config
+
+__version__ = "0.1.0"
diff --git a/sgm/__pycache__/__init__.cpython-39.pyc b/sgm/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..164425bec332c817387b87a72fecb906ef4678ed
Binary files /dev/null and b/sgm/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/__pycache__/util.cpython-39.pyc b/sgm/__pycache__/util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f7e4c94b5bafb342981a65bb189bbaa3d2491342
Binary files /dev/null and b/sgm/__pycache__/util.cpython-39.pyc differ
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond-clay.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond-clay.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9ad8224db0cd6792a0aac13e357ac782eab034de
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond-clay.yaml
@@ -0,0 +1,72 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: 'img'
+ # ucg_rate: 0.33
+ # # legacy_ucg_value: None
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ # params:
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+ # # version: 'laion2b_s32b_b82k'
+ # freeze: True
+ # output_tokens: True
+ # # inp_size: 364
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitg
+ # inp_size: 364
+
+ - is_trainable: False
+ input_key: 'fps-xyz'
+ ucg_rate: 0.0
+ target: sgm.modules.encoders.modules.PCD_Scaler
+ params:
+ scaling_factor: 0.45
+ perturb_pcd_scale: 0.05
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..6276d67855b3ad202ac5116e5a11734e9d811fc3
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform-pcdcond.yaml
@@ -0,0 +1,73 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.33
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ # inp_size: 364
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.33
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ # inp_size: 364
+
+ - is_trainable: False
+ input_key: 'fps-xyz'
+ ucg_rate: 0.0
+ target: sgm.modules.encoders.modules.PCD_Scaler
+ params:
+ scaling_factor: 0.45
+ # perturb_pcd_scale: 0.05
+ perturb_pcd_scale: 0.0
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..90c0590ae2fad15ba9fe48d2dc5df2914ea99697
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-336-uniform.yaml
@@ -0,0 +1,64 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.33
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ # inp_size: 364
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.33
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ # inp_size: 364
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-336.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-336.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5794b75216453fc1f63621fb6d5bcaff917a3af4
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-336.yaml
@@ -0,0 +1,61 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.33
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ # inp_size: 364
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.33
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ # inp_size: 364
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: lognorm
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly copy.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly copy.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..63ac25fd9fd71b70aab3e9f82c1a24304fc0d7f1
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly copy.yaml
@@ -0,0 +1,79 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: 'img'
+ # # ucg_rate: 0.463
+ # ucg_rate: 0.562 # 0.316 ** 0.5
+ # # legacy_ucg_value: None
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ # params:
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+ # # version: 'laion2b_s32b_b82k'
+ # freeze: True
+ # output_tokens: True
+ # # inp_size: 448
+ # inp_size: 224
+
+ - is_trainable: False
+ input_key: 'img'
+ # ucg_rate: 0.463
+ ucg_rate: 0.1 # 0.316 ** 0.5
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ inp_size: 518
+ # inp_size: 224
+
+ # - is_trainable: False
+ # input_key: caption
+ # # ucg_rate: 0.463
+ # ucg_rate: 0.316
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ # params:
+ # always_return_pooled: True
+ # legacy: False
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..6eb9fd9c12c42150b2cef3f7589f1d7b629ed0f0
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay-dinoonly.yaml
@@ -0,0 +1,80 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: 'img'
+ # # ucg_rate: 0.463
+ # ucg_rate: 0.562 # 0.316 ** 0.5
+ # # legacy_ucg_value: None
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ # params:
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+ # # version: 'laion2b_s32b_b82k'
+ # freeze: True
+ # output_tokens: True
+ # # inp_size: 448
+ # inp_size: 224
+
+ - is_trainable: False
+ input_key: 'img'
+ # ucg_rate: 0.463
+ ucg_rate: 0.1 # 0.316 ** 0.5
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ inp_size: 518
+ output_cls: True
+ # inp_size: 224
+
+ # - is_trainable: False
+ # input_key: caption
+ # # ucg_rate: 0.463
+ # ucg_rate: 0.316
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ # params:
+ # always_return_pooled: True
+ # legacy: False
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..32169616d4db0e9b2375a21e79bcdbc71ec1d4dd
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform-clay.yaml
@@ -0,0 +1,80 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: 'img'
+ # # ucg_rate: 0.463
+ # ucg_rate: 0.562 # 0.316 ** 0.5
+ # # legacy_ucg_value: None
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ # params:
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+ # # version: 'laion2b_s32b_b82k'
+ # freeze: True
+ # output_tokens: True
+ # # inp_size: 448
+ # inp_size: 224
+
+ - is_trainable: False
+ input_key: 'img'
+ # ucg_rate: 0.463
+ # ucg_rate: 0.316 #
+ ucg_rate: 0.15 #
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ inp_size: 518
+ output_cls: True
+
+ - is_trainable: False
+ input_key: caption
+ # ucg_rate: 0.316
+ ucg_rate: 0.15
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ params:
+ always_return_pooled: True
+ legacy: False
+ arch: 'ViT-L-14'
+ version: 'openai'
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..1b59fa2e9b92d238e04a5005d9f101bcd015e206
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm-480-uniform.yaml
@@ -0,0 +1,66 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.316
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ # inp_size: 448
+ inp_size: 224
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.316
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ # inp_size: 448
+ inp_size: 224
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm-lognorm.yaml b/sgm/configs/img23d-clipl-compat-fm-lognorm.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..c2ed1d97c1b75d800500b436c858e332c2521d01
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm-lognorm.yaml
@@ -0,0 +1,57 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: lognorm
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat-fm.yaml b/sgm/configs/img23d-clipl-compat-fm.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d2431bc90ea99b9429d433f49d57dc19820c8368
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat-fm.yaml
@@ -0,0 +1,57 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ # params: # all follow default
+ # path_type: 1000
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/img23d-clipl-compat.yaml b/sgm/configs/img23d-clipl-compat.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..52c691a5fe20ef60c964578faeda403ad85f51a6
--- /dev/null
+++ b/sgm/configs/img23d-clipl-compat.yaml
@@ -0,0 +1,71 @@
+ldm_configs:
+
+ scheduler_config:
+ target: sgm.lr_scheduler.LambdaLinearScheduler
+ params:
+ warm_up_steps: [10000]
+ cycle_lengths: [10000000000000]
+ f_start: [1.e-6]
+ f_max: [1.]
+ f_min: [1.]
+
+ denoiser_config:
+ target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ params:
+ num_idx: 1000
+
+ scaling_config:
+ target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ # version: 'laion2b_s32b_b82k'
+ freeze: True
+ output_tokens: True
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.StandardDiffusionLoss
+ params:
+ loss_weighting_config:
+ target: sgm.modules.diffusionmodules.loss_weighting.EpsWeighting
+ sigma_sampler_config:
+ target: sgm.modules.diffusionmodules.sigma_sampling.DiscreteSampling
+ params:
+ num_idx: 1000
+
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ sampler_config:
+ target: sgm.modules.diffusionmodules.sampling.EulerEDMSampler
+ params:
+ # num_steps: 250
+ num_steps: 50
+
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 6.5
\ No newline at end of file
diff --git a/sgm/configs/mv23d-clipl-compat-fm-lognorm.yaml b/sgm/configs/mv23d-clipl-compat-fm-lognorm.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a0f1f480b82b309e8ee3635f0561f9d5b9d7c92c
--- /dev/null
+++ b/sgm/configs/mv23d-clipl-compat-fm-lognorm.yaml
@@ -0,0 +1,65 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.32
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageMVEmbedder
+ params:
+ open_clip_embedding_config:
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ freeze: True # TODO, add ModLN later
+ output_tokens: True
+
+ - is_trainable: True
+ input_key: 'img-c'
+ ucg_rate: 0.32
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMV
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ arch: vitb
+ n_cond_frames: 4 # first 4 views as cond
+ modLN: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: lognorm
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm-noclip.yaml b/sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm-noclip.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..dfa4832e97cb2ff0a4ba4e8932f0687f6f5834c7
--- /dev/null
+++ b/sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm-noclip.yaml
@@ -0,0 +1,67 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+
+ # - is_trainable: False
+ # input_key: 'img'
+ # ucg_rate: 0.32
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageMVEmbedder
+ # params:
+ # open_clip_embedding_config:
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ # params:
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+ # freeze: True # TODO, add ModLN later
+ # output_tokens: True
+
+ - is_trainable: True
+ input_key: 'img-c'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMVPlucker
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ arch: vitb
+ # n_cond_frames: 4 # first 4 views as cond
+ n_cond_frames: 6 # use all 6 views, to match instantMesh MV23D.
+ modLN: False
+ aug_c: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: lognorm
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm.yaml b/sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..37095be240e40a97efa5470d6a54b368bf0e4ead
--- /dev/null
+++ b/sgm/configs/mv23d-plucker-clipl-compat-fm-lognorm.yaml
@@ -0,0 +1,65 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+
+ - is_trainable: False
+ input_key: 'img'
+ ucg_rate: 0.32
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageMVEmbedder
+ params:
+ open_clip_embedding_config:
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ params:
+ arch: 'ViT-L-14'
+ version: 'openai'
+ freeze: True # TODO, add ModLN later
+ output_tokens: True
+
+ - is_trainable: True
+ input_key: 'img-c'
+ ucg_rate: 0.32
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMVPlucker
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ arch: vitb
+ n_cond_frames: 4 # first 4 views as cond
+ modLN: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: lognorm
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage1-mv23d-i23dpt-noi23d.yaml b/sgm/configs/stage1-mv23d-i23dpt-noi23d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..12c562340d267802a4ef42df01a4f7b7a2314231
--- /dev/null
+++ b/sgm/configs/stage1-mv23d-i23dpt-noi23d.yaml
@@ -0,0 +1,80 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: caption
+ # ucg_rate: 0.316
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ # params:
+ # always_return_pooled: True
+ # legacy: False
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+
+ - is_trainable: True
+ input_key: 'img-c'
+ ucg_rate: 0.1
+ # ucg_rate: 0.316
+ # ucg_rate: 0.167 # overall 0.1 dropout.
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMVPlucker
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ # arch: vitb
+ arch: vits
+ inp_size: 322
+ n_cond_frames: 6 # first 4 views as cond
+ modLN: False
+ aug_c: True
+
+
+ # - is_trainable: False
+ # input_key: 'img'
+ # ucg_rate: 0.6
+ # # legacy_ucg_value: None
+ # target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ # params:
+ # freeze: True
+ # arch: vitl
+ # inp_size: 518
+ # output_cls: True
+ # inp_size: 224
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: uniform
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage1-mv23d-i23dpt.yaml b/sgm/configs/stage1-mv23d-i23dpt.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..704ef41226e68307324b25c9736df16ff3fbdb76
--- /dev/null
+++ b/sgm/configs/stage1-mv23d-i23dpt.yaml
@@ -0,0 +1,91 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: caption
+ # ucg_rate: 0.316
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ # params:
+ # always_return_pooled: True
+ # legacy: False
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+
+ - is_trainable: True
+ input_key: 'img-c'
+ # ucg_rate: 0.1
+ # ucg_rate: 0.316
+ ucg_rate: 0.167 # overall 0.1 dropout.
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMVPlucker
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ arch: vits
+ inp_size: 308
+ n_cond_frames: 6 # first 4 views as cond
+ modLN: False
+ aug_c: True
+
+
+ - is_trainable: False
+ input_key: 'img'
+ # ucg_rate: 0.463
+ # ucg_rate: 0.1 # 0.316 ** 0.5
+ # ucg_rate: 0.316
+ ucg_rate: 0.6
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ inp_size: 518
+ output_cls: True
+ # inp_size: 224
+
+ # - is_trainable: False
+ # input_key: 'fps-xyz'
+ # ucg_rate: 0.0
+ # target: sgm.modules.encoders.modules.PCD_Scaler
+ # params:
+ # scaling_factor: 0.45
+ # perturb_pcd_scale: 0.015
+ # # perturb_pcd_scale: 0.0
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: uniform
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage1-mv23d-t23dpt.yaml b/sgm/configs/stage1-mv23d-t23dpt.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b1c946266c913b08f50f9d767b93317f109f23da
--- /dev/null
+++ b/sgm/configs/stage1-mv23d-t23dpt.yaml
@@ -0,0 +1,74 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: caption
+ ucg_rate: 0.1
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ params:
+ always_return_pooled: True
+ legacy: False
+ arch: 'ViT-L-14'
+ version: 'openai'
+
+ - is_trainable: True
+ input_key: 'img-c'
+ ucg_rate: 0.1
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMVPlucker
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ # arch: vits
+ arch: vitb
+ inp_size: 280
+ n_cond_frames: 4 # first 4 views as cond
+ modLN: False
+ aug_c: True
+
+ # - is_trainable: False
+ # input_key: 'fps-xyz'
+ # ucg_rate: 0.0
+ # target: sgm.modules.encoders.modules.PCD_Scaler
+ # params:
+ # scaling_factor: 0.45
+ # perturb_pcd_scale: 0.015
+ # # perturb_pcd_scale: 0.0
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: uniform
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage1-t23d.yaml b/sgm/configs/stage1-t23d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f0c4eae6204014f921998cdcb1c219b11d7ef22c
--- /dev/null
+++ b/sgm/configs/stage1-t23d.yaml
@@ -0,0 +1,58 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: caption
+ ucg_rate: 0.1
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ params:
+ always_return_pooled: True
+ legacy: False
+ arch: 'ViT-L-14'
+ version: 'openai'
+
+ # - is_trainable: False
+ # input_key: 'fps-xyz'
+ # ucg_rate: 0.0
+ # target: sgm.modules.encoders.modules.PCD_Scaler
+ # params:
+ # scaling_factor: 0.45
+ # perturb_pcd_scale: 0.015
+ # # perturb_pcd_scale: 0.0
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: uniform
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage2-i23d.yaml b/sgm/configs/stage2-i23d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f3306f244bb13208b5d8c489670172fc40da39cc
--- /dev/null
+++ b/sgm/configs/stage2-i23d.yaml
@@ -0,0 +1,80 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: 'img'
+ # # ucg_rate: 0.463
+ # ucg_rate: 0.562 # 0.316 ** 0.5
+ # # legacy_ucg_value: None
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPImageEmbedder
+ # params:
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+ # # version: 'laion2b_s32b_b82k'
+ # freeze: True
+ # output_tokens: True
+ # # inp_size: 448
+ # inp_size: 224
+
+ - is_trainable: False
+ input_key: 'img'
+ # ucg_rate: 0.463
+ ucg_rate: 0.1 # 0.316 ** 0.5
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ inp_size: 518
+ output_cls: True
+ # inp_size: 224
+
+ - is_trainable: False
+ input_key: 'fps-xyz'
+ ucg_rate: 0.0
+ target: sgm.modules.encoders.modules.PCD_Scaler
+ params:
+ scaling_factor: 0.45
+ # perturb_pcd_scale: 0.01 # as in VAE
+ # perturb_pcd_scale: 0.015
+ perturb_pcd_scale: 0.0
+
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ # snr_type: lognorm # too noisy?
+ snr_type: uniform
+ # path_type: Linear
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage2-mv23d-i23dpt.yaml b/sgm/configs/stage2-mv23d-i23dpt.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..15eac078fe98763821dfe6c35791968d1fab9fff
--- /dev/null
+++ b/sgm/configs/stage2-mv23d-i23dpt.yaml
@@ -0,0 +1,90 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ # - is_trainable: False
+ # input_key: caption
+ # ucg_rate: 0.316
+ # target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ # params:
+ # always_return_pooled: True
+ # legacy: False
+ # arch: 'ViT-L-14'
+ # version: 'openai'
+
+ - is_trainable: True
+ input_key: 'img-c'
+ # ucg_rate: 0.1
+ # ucg_rate: 0.316
+ ucg_rate: 0.167 # overall 0.1 dropout.
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedderMVPlucker
+ params:
+ freeze: False
+ enable_bf16: True
+ output_cls: False # return pooling
+ arch: vits
+ inp_size: 308
+ n_cond_frames: 6 # first 4 views as cond
+ modLN: False
+ aug_c: True
+
+
+ - is_trainable: False
+ input_key: 'img'
+ # ucg_rate: 0.463
+ # ucg_rate: 0.1 # 0.316 ** 0.5
+ # ucg_rate: 0.316
+ ucg_rate: 0.6
+ # legacy_ucg_value: None
+ target: sgm.modules.encoders.modules.FrozenDinov2ImageEmbedder
+ params:
+ freeze: True
+ arch: vitl
+ inp_size: 518
+ output_cls: True
+ # inp_size: 224
+
+ - is_trainable: False
+ input_key: 'fps-xyz'
+ ucg_rate: 0.0
+ target: sgm.modules.encoders.modules.PCD_Scaler
+ params:
+ scaling_factor: 0.45
+ perturb_pcd_scale: 0.0
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: uniform
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/stage2-t23d.yaml b/sgm/configs/stage2-t23d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..df4e81672b89c9da38b4410d0f1b7594a8e2549f
--- /dev/null
+++ b/sgm/configs/stage2-t23d.yaml
@@ -0,0 +1,57 @@
+ldm_configs:
+
+ # scheduler_config:
+ # target: sgm.lr_scheduler.LambdaLinearScheduler
+ # params:
+ # warm_up_steps: [10000]
+ # cycle_lengths: [10000000000000]
+ # f_start: [1.e-6]
+ # f_max: [1.]
+ # f_min: [1.]
+
+ # denoiser_config:
+ # target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ # params:
+ # num_idx: 1000
+
+ # scaling_config:
+ # target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ # discretization_config:
+ # target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: caption
+ ucg_rate: 0.1
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ params:
+ always_return_pooled: True
+ legacy: False
+ arch: 'ViT-L-14'
+ version: 'openai'
+
+ - is_trainable: False
+ input_key: 'fps-xyz'
+ ucg_rate: 0.0
+ target: sgm.modules.encoders.modules.PCD_Scaler
+ params:
+ scaling_factor: 0.45
+ perturb_pcd_scale: 0.0
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: uniform
+ path_type: GVP
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 5.0
\ No newline at end of file
diff --git a/sgm/configs/t23d-clipl-compat-fm.yaml b/sgm/configs/t23d-clipl-compat-fm.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e9da4da9ac3d1d61df3bf2c9c914a06dc37f2cce
--- /dev/null
+++ b/sgm/configs/t23d-clipl-compat-fm.yaml
@@ -0,0 +1,29 @@
+ldm_configs:
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: caption
+ ucg_rate: 0.1
+ target: sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2
+ params:
+ always_return_pooled: True
+ legacy: False
+ arch: 'ViT-L-14'
+ version: 'openai'
+
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.FMLoss
+ params:
+ transport_config:
+ target: transport.create_transport
+ params: # all follow default
+ snr_type: lognorm
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ scale: 4.0
\ No newline at end of file
diff --git a/sgm/configs/txt2img-clipl-compat.yaml b/sgm/configs/txt2img-clipl-compat.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..62ffa17bec26733f49bfc034737bda860a572102
--- /dev/null
+++ b/sgm/configs/txt2img-clipl-compat.yaml
@@ -0,0 +1,60 @@
+ldm_configs:
+
+ scheduler_config:
+ target: sgm.lr_scheduler.LambdaLinearScheduler
+ params:
+ warm_up_steps: [10000]
+ cycle_lengths: [10000000000000]
+ f_start: [1.e-6]
+ f_max: [1.]
+ f_min: [1.]
+
+ denoiser_config:
+ target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ params:
+ num_idx: 1000
+
+ scaling_config:
+ target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: False
+ input_key: caption
+ ucg_rate: 0.1
+ legacy_ucg_value: ""
+ target: sgm.modules.encoders.modules.FrozenCLIPEmbedder
+ params:
+ always_return_pooled: True
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.StandardDiffusionLoss
+ params:
+ loss_weighting_config:
+ target: sgm.modules.diffusionmodules.loss_weighting.EpsWeighting
+ sigma_sampler_config:
+ target: sgm.modules.diffusionmodules.sigma_sampling.DiscreteSampling
+ params:
+ num_idx: 1000
+
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ sampler_config:
+ target: sgm.modules.diffusionmodules.sampling.EulerEDMSampler
+ params:
+ # num_steps: 250
+ num_steps: 50
+
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ # scale: 1.0
+ scale: 6.5
\ No newline at end of file
diff --git a/sgm/configs/txt2img-clipl.yaml b/sgm/configs/txt2img-clipl.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..cb66ede901b1aa1acb18d162b88912a2e6eab0ce
--- /dev/null
+++ b/sgm/configs/txt2img-clipl.yaml
@@ -0,0 +1,184 @@
+model:
+ base_learning_rate: 1.0e-4
+ target: sgm.models.diffusion.DiffusionEngine
+ params:
+ scale_factor: 0.13025
+ disable_first_stage_autocast: True
+ log_keys:
+ - txt
+
+ scheduler_config:
+ target: sgm.lr_scheduler.LambdaLinearScheduler
+ params:
+ warm_up_steps: [10000]
+ cycle_lengths: [10000000000000]
+ f_start: [1.e-6]
+ f_max: [1.]
+ f_min: [1.]
+
+ denoiser_config:
+ target: sgm.modules.diffusionmodules.denoiser.DiscreteDenoiser
+ params:
+ num_idx: 1000
+
+ scaling_config:
+ target: sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ network_config:
+ target: sgm.modules.diffusionmodules.openaimodel.UNetModel
+ params:
+ use_checkpoint: True
+ in_channels: 4
+ out_channels: 4
+ model_channels: 320
+ attention_resolutions: [1, 2, 4]
+ num_res_blocks: 2
+ channel_mult: [1, 2, 4, 4]
+ num_head_channels: 64
+ num_classes: sequential
+ adm_in_channels: 1792
+ num_heads: 1
+ transformer_depth: 1
+ context_dim: 768
+ spatial_transformer_attn_type: softmax-xformers
+
+ conditioner_config:
+ target: sgm.modules.GeneralConditioner
+ params:
+ emb_models:
+ - is_trainable: True
+ input_key: txt
+ ucg_rate: 0.1
+ legacy_ucg_value: ""
+ target: sgm.modules.encoders.modules.FrozenCLIPEmbedder
+ params:
+ always_return_pooled: True
+
+ - is_trainable: False
+ ucg_rate: 0.1
+ input_key: original_size_as_tuple
+ target: sgm.modules.encoders.modules.ConcatTimestepEmbedderND
+ params:
+ outdim: 256
+
+ - is_trainable: False
+ input_key: crop_coords_top_left
+ ucg_rate: 0.1
+ target: sgm.modules.encoders.modules.ConcatTimestepEmbedderND
+ params:
+ outdim: 256
+
+ first_stage_config:
+ target: sgm.models.autoencoder.AutoencoderKL
+ params:
+ ckpt_path: CKPT_PATH
+ embed_dim: 4
+ monitor: val/rec_loss
+ ddconfig:
+ attn_type: vanilla-xformers
+ double_z: true
+ z_channels: 4
+ resolution: 256
+ in_channels: 3
+ out_ch: 3
+ ch: 128
+ ch_mult: [1, 2, 4, 4]
+ num_res_blocks: 2
+ attn_resolutions: []
+ dropout: 0.0
+ lossconfig:
+ target: torch.nn.Identity
+
+ loss_fn_config:
+ target: sgm.modules.diffusionmodules.loss.StandardDiffusionLoss
+ params:
+ loss_weighting_config:
+ target: sgm.modules.diffusionmodules.loss_weighting.EpsWeighting
+ sigma_sampler_config:
+ target: sgm.modules.diffusionmodules.sigma_sampling.DiscreteSampling
+ params:
+ num_idx: 1000
+
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ sampler_config:
+ target: sgm.modules.diffusionmodules.sampling.EulerEDMSampler
+ params:
+ num_steps: 50
+
+ discretization_config:
+ target: sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization
+
+ guider_config:
+ target: sgm.modules.diffusionmodules.guiders.VanillaCFG
+ params:
+ scale: 7.5
+
+data:
+ target: sgm.data.dataset.StableDataModuleFromConfig
+ params:
+ train:
+ datapipeline:
+ urls:
+ # USER: adapt this path the root of your custom dataset
+ - DATA_PATH
+ pipeline_config:
+ shardshuffle: 10000
+ sample_shuffle: 10000
+
+
+ decoders:
+ - pil
+
+ postprocessors:
+ - target: sdata.mappers.TorchVisionImageTransforms
+ params:
+ key: jpg # USER: you might wanna adapt this for your custom dataset
+ transforms:
+ - target: torchvision.transforms.Resize
+ params:
+ size: 256
+ interpolation: 3
+ - target: torchvision.transforms.ToTensor
+ - target: sdata.mappers.Rescaler
+ # USER: you might wanna use non-default parameters due to your custom dataset
+ - target: sdata.mappers.AddOriginalImageSizeAsTupleAndCropToSquare
+ # USER: you might wanna use non-default parameters due to your custom dataset
+
+ loader:
+ batch_size: 64
+ num_workers: 6
+
+lightning:
+ modelcheckpoint:
+ params:
+ every_n_train_steps: 5000
+
+ callbacks:
+ metrics_over_trainsteps_checkpoint:
+ params:
+ every_n_train_steps: 25000
+
+ image_logger:
+ target: main.ImageLogger
+ params:
+ disabled: False
+ enable_autocast: False
+ batch_frequency: 1000
+ max_images: 8
+ increase_log_steps: True
+ log_first_step: False
+ log_images_kwargs:
+ use_ema_scope: False
+ N: 8
+ n_rows: 2
+
+ trainer:
+ devices: 0,
+ benchmark: True
+ num_sanity_val_steps: 0
+ accumulate_grad_batches: 1
+ max_epochs: 1000
\ No newline at end of file
diff --git a/sgm/data/__init__.py b/sgm/data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7664a25c655c376bd1a7b0ccbaca7b983a2bf9ad
--- /dev/null
+++ b/sgm/data/__init__.py
@@ -0,0 +1 @@
+from .dataset import StableDataModuleFromConfig
diff --git a/sgm/data/cifar10.py b/sgm/data/cifar10.py
new file mode 100644
index 0000000000000000000000000000000000000000..6083646f136bad308a0485843b89234cf7a9d6cd
--- /dev/null
+++ b/sgm/data/cifar10.py
@@ -0,0 +1,67 @@
+import pytorch_lightning as pl
+import torchvision
+from torch.utils.data import DataLoader, Dataset
+from torchvision import transforms
+
+
+class CIFAR10DataDictWrapper(Dataset):
+ def __init__(self, dset):
+ super().__init__()
+ self.dset = dset
+
+ def __getitem__(self, i):
+ x, y = self.dset[i]
+ return {"jpg": x, "cls": y}
+
+ def __len__(self):
+ return len(self.dset)
+
+
+class CIFAR10Loader(pl.LightningDataModule):
+ def __init__(self, batch_size, num_workers=0, shuffle=True):
+ super().__init__()
+
+ transform = transforms.Compose(
+ [transforms.ToTensor(), transforms.Lambda(lambda x: x * 2.0 - 1.0)]
+ )
+
+ self.batch_size = batch_size
+ self.num_workers = num_workers
+ self.shuffle = shuffle
+ self.train_dataset = CIFAR10DataDictWrapper(
+ torchvision.datasets.CIFAR10(
+ root=".data/", train=True, download=True, transform=transform
+ )
+ )
+ self.test_dataset = CIFAR10DataDictWrapper(
+ torchvision.datasets.CIFAR10(
+ root=".data/", train=False, download=True, transform=transform
+ )
+ )
+
+ def prepare_data(self):
+ pass
+
+ def train_dataloader(self):
+ return DataLoader(
+ self.train_dataset,
+ batch_size=self.batch_size,
+ shuffle=self.shuffle,
+ num_workers=self.num_workers,
+ )
+
+ def test_dataloader(self):
+ return DataLoader(
+ self.test_dataset,
+ batch_size=self.batch_size,
+ shuffle=self.shuffle,
+ num_workers=self.num_workers,
+ )
+
+ def val_dataloader(self):
+ return DataLoader(
+ self.test_dataset,
+ batch_size=self.batch_size,
+ shuffle=self.shuffle,
+ num_workers=self.num_workers,
+ )
diff --git a/sgm/data/dataset.py b/sgm/data/dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..b726149996591c6c3db69230e1bb68c07d2faa12
--- /dev/null
+++ b/sgm/data/dataset.py
@@ -0,0 +1,80 @@
+from typing import Optional
+
+import torchdata.datapipes.iter
+import webdataset as wds
+from omegaconf import DictConfig
+from pytorch_lightning import LightningDataModule
+
+try:
+ from sdata import create_dataset, create_dummy_dataset, create_loader
+except ImportError as e:
+ print("#" * 100)
+ print("Datasets not yet available")
+ print("to enable, we need to add stable-datasets as a submodule")
+ print("please use ``git submodule update --init --recursive``")
+ print("and do ``pip install -e stable-datasets/`` from the root of this repo")
+ print("#" * 100)
+ exit(1)
+
+
+class StableDataModuleFromConfig(LightningDataModule):
+ def __init__(
+ self,
+ train: DictConfig,
+ validation: Optional[DictConfig] = None,
+ test: Optional[DictConfig] = None,
+ skip_val_loader: bool = False,
+ dummy: bool = False,
+ ):
+ super().__init__()
+ self.train_config = train
+ assert (
+ "datapipeline" in self.train_config and "loader" in self.train_config
+ ), "train config requires the fields `datapipeline` and `loader`"
+
+ self.val_config = validation
+ if not skip_val_loader:
+ if self.val_config is not None:
+ assert (
+ "datapipeline" in self.val_config and "loader" in self.val_config
+ ), "validation config requires the fields `datapipeline` and `loader`"
+ else:
+ print(
+ "Warning: No Validation datapipeline defined, using that one from training"
+ )
+ self.val_config = train
+
+ self.test_config = test
+ if self.test_config is not None:
+ assert (
+ "datapipeline" in self.test_config and "loader" in self.test_config
+ ), "test config requires the fields `datapipeline` and `loader`"
+
+ self.dummy = dummy
+ if self.dummy:
+ print("#" * 100)
+ print("USING DUMMY DATASET: HOPE YOU'RE DEBUGGING ;)")
+ print("#" * 100)
+
+ def setup(self, stage: str) -> None:
+ print("Preparing datasets")
+ if self.dummy:
+ data_fn = create_dummy_dataset
+ else:
+ data_fn = create_dataset
+
+ self.train_datapipeline = data_fn(**self.train_config.datapipeline)
+ if self.val_config:
+ self.val_datapipeline = data_fn(**self.val_config.datapipeline)
+ if self.test_config:
+ self.test_datapipeline = data_fn(**self.test_config.datapipeline)
+
+ def train_dataloader(self) -> torchdata.datapipes.iter.IterDataPipe:
+ loader = create_loader(self.train_datapipeline, **self.train_config.loader)
+ return loader
+
+ def val_dataloader(self) -> wds.DataPipeline:
+ return create_loader(self.val_datapipeline, **self.val_config.loader)
+
+ def test_dataloader(self) -> wds.DataPipeline:
+ return create_loader(self.test_datapipeline, **self.test_config.loader)
diff --git a/sgm/data/mnist.py b/sgm/data/mnist.py
new file mode 100644
index 0000000000000000000000000000000000000000..dea4d7e670666bec80ecb22aa89603345e173d09
--- /dev/null
+++ b/sgm/data/mnist.py
@@ -0,0 +1,85 @@
+import pytorch_lightning as pl
+import torchvision
+from torch.utils.data import DataLoader, Dataset
+from torchvision import transforms
+
+
+class MNISTDataDictWrapper(Dataset):
+ def __init__(self, dset):
+ super().__init__()
+ self.dset = dset
+
+ def __getitem__(self, i):
+ x, y = self.dset[i]
+ return {"jpg": x, "cls": y}
+
+ def __len__(self):
+ return len(self.dset)
+
+
+class MNISTLoader(pl.LightningDataModule):
+ def __init__(self, batch_size, num_workers=0, prefetch_factor=2, shuffle=True):
+ super().__init__()
+
+ transform = transforms.Compose(
+ [transforms.ToTensor(), transforms.Lambda(lambda x: x * 2.0 - 1.0)]
+ )
+
+ self.batch_size = batch_size
+ self.num_workers = num_workers
+ self.prefetch_factor = prefetch_factor if num_workers > 0 else 0
+ self.shuffle = shuffle
+ self.train_dataset = MNISTDataDictWrapper(
+ torchvision.datasets.MNIST(
+ root=".data/", train=True, download=True, transform=transform
+ )
+ )
+ self.test_dataset = MNISTDataDictWrapper(
+ torchvision.datasets.MNIST(
+ root=".data/", train=False, download=True, transform=transform
+ )
+ )
+
+ def prepare_data(self):
+ pass
+
+ def train_dataloader(self):
+ return DataLoader(
+ self.train_dataset,
+ batch_size=self.batch_size,
+ shuffle=self.shuffle,
+ num_workers=self.num_workers,
+ prefetch_factor=self.prefetch_factor,
+ )
+
+ def test_dataloader(self):
+ return DataLoader(
+ self.test_dataset,
+ batch_size=self.batch_size,
+ shuffle=self.shuffle,
+ num_workers=self.num_workers,
+ prefetch_factor=self.prefetch_factor,
+ )
+
+ def val_dataloader(self):
+ return DataLoader(
+ self.test_dataset,
+ batch_size=self.batch_size,
+ shuffle=self.shuffle,
+ num_workers=self.num_workers,
+ prefetch_factor=self.prefetch_factor,
+ )
+
+
+if __name__ == "__main__":
+ dset = MNISTDataDictWrapper(
+ torchvision.datasets.MNIST(
+ root=".data/",
+ train=False,
+ download=True,
+ transform=transforms.Compose(
+ [transforms.ToTensor(), transforms.Lambda(lambda x: x * 2.0 - 1.0)]
+ ),
+ )
+ )
+ ex = dset[0]
diff --git a/sgm/inference/api.py b/sgm/inference/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..a359a67bcd9740acc9e320d2f26dc6a3befb36e0
--- /dev/null
+++ b/sgm/inference/api.py
@@ -0,0 +1,385 @@
+import pathlib
+from dataclasses import asdict, dataclass
+from enum import Enum
+from typing import Optional
+
+from omegaconf import OmegaConf
+
+from sgm.inference.helpers import (Img2ImgDiscretizationWrapper, do_img2img,
+ do_sample)
+from sgm.modules.diffusionmodules.sampling import (DPMPP2MSampler,
+ DPMPP2SAncestralSampler,
+ EulerAncestralSampler,
+ EulerEDMSampler,
+ HeunEDMSampler,
+ LinearMultistepSampler)
+from sgm.util import load_model_from_config
+
+
+class ModelArchitecture(str, Enum):
+ SD_2_1 = "stable-diffusion-v2-1"
+ SD_2_1_768 = "stable-diffusion-v2-1-768"
+ SDXL_V0_9_BASE = "stable-diffusion-xl-v0-9-base"
+ SDXL_V0_9_REFINER = "stable-diffusion-xl-v0-9-refiner"
+ SDXL_V1_BASE = "stable-diffusion-xl-v1-base"
+ SDXL_V1_REFINER = "stable-diffusion-xl-v1-refiner"
+
+
+class Sampler(str, Enum):
+ EULER_EDM = "EulerEDMSampler"
+ HEUN_EDM = "HeunEDMSampler"
+ EULER_ANCESTRAL = "EulerAncestralSampler"
+ DPMPP2S_ANCESTRAL = "DPMPP2SAncestralSampler"
+ DPMPP2M = "DPMPP2MSampler"
+ LINEAR_MULTISTEP = "LinearMultistepSampler"
+
+
+class Discretization(str, Enum):
+ LEGACY_DDPM = "LegacyDDPMDiscretization"
+ EDM = "EDMDiscretization"
+
+
+class Guider(str, Enum):
+ VANILLA = "VanillaCFG"
+ IDENTITY = "IdentityGuider"
+
+
+class Thresholder(str, Enum):
+ NONE = "None"
+
+
+@dataclass
+class SamplingParams:
+ width: int = 1024
+ height: int = 1024
+ steps: int = 50
+ sampler: Sampler = Sampler.DPMPP2M
+ discretization: Discretization = Discretization.LEGACY_DDPM
+ guider: Guider = Guider.VANILLA
+ thresholder: Thresholder = Thresholder.NONE
+ scale: float = 6.0
+ aesthetic_score: float = 5.0
+ negative_aesthetic_score: float = 5.0
+ img2img_strength: float = 1.0
+ orig_width: int = 1024
+ orig_height: int = 1024
+ crop_coords_top: int = 0
+ crop_coords_left: int = 0
+ sigma_min: float = 0.0292
+ sigma_max: float = 14.6146
+ rho: float = 3.0
+ s_churn: float = 0.0
+ s_tmin: float = 0.0
+ s_tmax: float = 999.0
+ s_noise: float = 1.0
+ eta: float = 1.0
+ order: int = 4
+
+
+@dataclass
+class SamplingSpec:
+ width: int
+ height: int
+ channels: int
+ factor: int
+ is_legacy: bool
+ config: str
+ ckpt: str
+ is_guided: bool
+
+
+model_specs = {
+ ModelArchitecture.SD_2_1: SamplingSpec(
+ height=512,
+ width=512,
+ channels=4,
+ factor=8,
+ is_legacy=True,
+ config="sd_2_1.yaml",
+ ckpt="v2-1_512-ema-pruned.safetensors",
+ is_guided=True,
+ ),
+ ModelArchitecture.SD_2_1_768: SamplingSpec(
+ height=768,
+ width=768,
+ channels=4,
+ factor=8,
+ is_legacy=True,
+ config="sd_2_1_768.yaml",
+ ckpt="v2-1_768-ema-pruned.safetensors",
+ is_guided=True,
+ ),
+ ModelArchitecture.SDXL_V0_9_BASE: SamplingSpec(
+ height=1024,
+ width=1024,
+ channels=4,
+ factor=8,
+ is_legacy=False,
+ config="sd_xl_base.yaml",
+ ckpt="sd_xl_base_0.9.safetensors",
+ is_guided=True,
+ ),
+ ModelArchitecture.SDXL_V0_9_REFINER: SamplingSpec(
+ height=1024,
+ width=1024,
+ channels=4,
+ factor=8,
+ is_legacy=True,
+ config="sd_xl_refiner.yaml",
+ ckpt="sd_xl_refiner_0.9.safetensors",
+ is_guided=True,
+ ),
+ ModelArchitecture.SDXL_V1_BASE: SamplingSpec(
+ height=1024,
+ width=1024,
+ channels=4,
+ factor=8,
+ is_legacy=False,
+ config="sd_xl_base.yaml",
+ ckpt="sd_xl_base_1.0.safetensors",
+ is_guided=True,
+ ),
+ ModelArchitecture.SDXL_V1_REFINER: SamplingSpec(
+ height=1024,
+ width=1024,
+ channels=4,
+ factor=8,
+ is_legacy=True,
+ config="sd_xl_refiner.yaml",
+ ckpt="sd_xl_refiner_1.0.safetensors",
+ is_guided=True,
+ ),
+}
+
+
+class SamplingPipeline:
+ def __init__(
+ self,
+ model_id: ModelArchitecture,
+ model_path="checkpoints",
+ config_path="configs/inference",
+ device="cuda",
+ use_fp16=True,
+ ) -> None:
+ if model_id not in model_specs:
+ raise ValueError(f"Model {model_id} not supported")
+ self.model_id = model_id
+ self.specs = model_specs[self.model_id]
+ self.config = str(pathlib.Path(config_path, self.specs.config))
+ self.ckpt = str(pathlib.Path(model_path, self.specs.ckpt))
+ self.device = device
+ self.model = self._load_model(device=device, use_fp16=use_fp16)
+
+ def _load_model(self, device="cuda", use_fp16=True):
+ config = OmegaConf.load(self.config)
+ model = load_model_from_config(config, self.ckpt)
+ if model is None:
+ raise ValueError(f"Model {self.model_id} could not be loaded")
+ model.to(device)
+ if use_fp16:
+ model.conditioner.half()
+ model.model.half()
+ return model
+
+ def text_to_image(
+ self,
+ params: SamplingParams,
+ prompt: str,
+ negative_prompt: str = "",
+ samples: int = 1,
+ return_latents: bool = False,
+ ):
+ sampler = get_sampler_config(params)
+ value_dict = asdict(params)
+ value_dict["prompt"] = prompt
+ value_dict["negative_prompt"] = negative_prompt
+ value_dict["target_width"] = params.width
+ value_dict["target_height"] = params.height
+ return do_sample(
+ self.model,
+ sampler,
+ value_dict,
+ samples,
+ params.height,
+ params.width,
+ self.specs.channels,
+ self.specs.factor,
+ force_uc_zero_embeddings=["txt"] if not self.specs.is_legacy else [],
+ return_latents=return_latents,
+ filter=None,
+ )
+
+ def image_to_image(
+ self,
+ params: SamplingParams,
+ image,
+ prompt: str,
+ negative_prompt: str = "",
+ samples: int = 1,
+ return_latents: bool = False,
+ ):
+ sampler = get_sampler_config(params)
+
+ if params.img2img_strength < 1.0:
+ sampler.discretization = Img2ImgDiscretizationWrapper(
+ sampler.discretization,
+ strength=params.img2img_strength,
+ )
+ height, width = image.shape[2], image.shape[3]
+ value_dict = asdict(params)
+ value_dict["prompt"] = prompt
+ value_dict["negative_prompt"] = negative_prompt
+ value_dict["target_width"] = width
+ value_dict["target_height"] = height
+ return do_img2img(
+ image,
+ self.model,
+ sampler,
+ value_dict,
+ samples,
+ force_uc_zero_embeddings=["txt"] if not self.specs.is_legacy else [],
+ return_latents=return_latents,
+ filter=None,
+ )
+
+ def refiner(
+ self,
+ params: SamplingParams,
+ image,
+ prompt: str,
+ negative_prompt: Optional[str] = None,
+ samples: int = 1,
+ return_latents: bool = False,
+ ):
+ sampler = get_sampler_config(params)
+ value_dict = {
+ "orig_width": image.shape[3] * 8,
+ "orig_height": image.shape[2] * 8,
+ "target_width": image.shape[3] * 8,
+ "target_height": image.shape[2] * 8,
+ "prompt": prompt,
+ "negative_prompt": negative_prompt,
+ "crop_coords_top": 0,
+ "crop_coords_left": 0,
+ "aesthetic_score": 6.0,
+ "negative_aesthetic_score": 2.5,
+ }
+
+ return do_img2img(
+ image,
+ self.model,
+ sampler,
+ value_dict,
+ samples,
+ skip_encode=True,
+ return_latents=return_latents,
+ filter=None,
+ )
+
+
+def get_guider_config(params: SamplingParams):
+ if params.guider == Guider.IDENTITY:
+ guider_config = {
+ "target": "sgm.modules.diffusionmodules.guiders.IdentityGuider"
+ }
+ elif params.guider == Guider.VANILLA:
+ scale = params.scale
+
+ thresholder = params.thresholder
+
+ if thresholder == Thresholder.NONE:
+ dyn_thresh_config = {
+ "target": "sgm.modules.diffusionmodules.sampling_utils.NoDynamicThresholding"
+ }
+ else:
+ raise NotImplementedError
+
+ guider_config = {
+ "target": "sgm.modules.diffusionmodules.guiders.VanillaCFG",
+ "params": {"scale": scale, "dyn_thresh_config": dyn_thresh_config},
+ }
+ else:
+ raise NotImplementedError
+ return guider_config
+
+
+def get_discretization_config(params: SamplingParams):
+ if params.discretization == Discretization.LEGACY_DDPM:
+ discretization_config = {
+ "target": "sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization",
+ }
+ elif params.discretization == Discretization.EDM:
+ discretization_config = {
+ "target": "sgm.modules.diffusionmodules.discretizer.EDMDiscretization",
+ "params": {
+ "sigma_min": params.sigma_min,
+ "sigma_max": params.sigma_max,
+ "rho": params.rho,
+ },
+ }
+ else:
+ raise ValueError(f"unknown discretization {params.discretization}")
+ return discretization_config
+
+
+def get_sampler_config(params: SamplingParams):
+ discretization_config = get_discretization_config(params)
+ guider_config = get_guider_config(params)
+ sampler = None
+ if params.sampler == Sampler.EULER_EDM:
+ return EulerEDMSampler(
+ num_steps=params.steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ s_churn=params.s_churn,
+ s_tmin=params.s_tmin,
+ s_tmax=params.s_tmax,
+ s_noise=params.s_noise,
+ verbose=True,
+ )
+ if params.sampler == Sampler.HEUN_EDM:
+ return HeunEDMSampler(
+ num_steps=params.steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ s_churn=params.s_churn,
+ s_tmin=params.s_tmin,
+ s_tmax=params.s_tmax,
+ s_noise=params.s_noise,
+ verbose=True,
+ )
+ if params.sampler == Sampler.EULER_ANCESTRAL:
+ return EulerAncestralSampler(
+ num_steps=params.steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ eta=params.eta,
+ s_noise=params.s_noise,
+ verbose=True,
+ )
+ if params.sampler == Sampler.DPMPP2S_ANCESTRAL:
+ return DPMPP2SAncestralSampler(
+ num_steps=params.steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ eta=params.eta,
+ s_noise=params.s_noise,
+ verbose=True,
+ )
+ if params.sampler == Sampler.DPMPP2M:
+ return DPMPP2MSampler(
+ num_steps=params.steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ verbose=True,
+ )
+ if params.sampler == Sampler.LINEAR_MULTISTEP:
+ return LinearMultistepSampler(
+ num_steps=params.steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ order=params.order,
+ verbose=True,
+ )
+
+ raise ValueError(f"unknown sampler {params.sampler}!")
diff --git a/sgm/inference/helpers.py b/sgm/inference/helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..31b0ec3dc414bf522261e35f73805810cd35582d
--- /dev/null
+++ b/sgm/inference/helpers.py
@@ -0,0 +1,305 @@
+import math
+import os
+from typing import List, Optional, Union
+
+import numpy as np
+import torch
+from einops import rearrange
+from imwatermark import WatermarkEncoder
+from omegaconf import ListConfig
+from PIL import Image
+from torch import autocast
+
+from sgm.util import append_dims
+
+
+class WatermarkEmbedder:
+ def __init__(self, watermark):
+ self.watermark = watermark
+ self.num_bits = len(WATERMARK_BITS)
+ self.encoder = WatermarkEncoder()
+ self.encoder.set_watermark("bits", self.watermark)
+
+ def __call__(self, image: torch.Tensor) -> torch.Tensor:
+ """
+ Adds a predefined watermark to the input image
+
+ Args:
+ image: ([N,] B, RGB, H, W) in range [0, 1]
+
+ Returns:
+ same as input but watermarked
+ """
+ squeeze = len(image.shape) == 4
+ if squeeze:
+ image = image[None, ...]
+ n = image.shape[0]
+ image_np = rearrange(
+ (255 * image).detach().cpu(), "n b c h w -> (n b) h w c"
+ ).numpy()[:, :, :, ::-1]
+ # torch (b, c, h, w) in [0, 1] -> numpy (b, h, w, c) [0, 255]
+ # watermarking libary expects input as cv2 BGR format
+ for k in range(image_np.shape[0]):
+ image_np[k] = self.encoder.encode(image_np[k], "dwtDct")
+ image = torch.from_numpy(
+ rearrange(image_np[:, :, :, ::-1], "(n b) h w c -> n b c h w", n=n)
+ ).to(image.device)
+ image = torch.clamp(image / 255, min=0.0, max=1.0)
+ if squeeze:
+ image = image[0]
+ return image
+
+
+# A fixed 48-bit message that was choosen at random
+# WATERMARK_MESSAGE = 0xB3EC907BB19E
+WATERMARK_MESSAGE = 0b101100111110110010010000011110111011000110011110
+# bin(x)[2:] gives bits of x as str, use int to convert them to 0/1
+WATERMARK_BITS = [int(bit) for bit in bin(WATERMARK_MESSAGE)[2:]]
+embed_watermark = WatermarkEmbedder(WATERMARK_BITS)
+
+
+def get_unique_embedder_keys_from_conditioner(conditioner):
+ return list({x.input_key for x in conditioner.embedders})
+
+
+def perform_save_locally(save_path, samples):
+ os.makedirs(os.path.join(save_path), exist_ok=True)
+ base_count = len(os.listdir(os.path.join(save_path)))
+ samples = embed_watermark(samples)
+ for sample in samples:
+ sample = 255.0 * rearrange(sample.cpu().numpy(), "c h w -> h w c")
+ Image.fromarray(sample.astype(np.uint8)).save(
+ os.path.join(save_path, f"{base_count:09}.png")
+ )
+ base_count += 1
+
+
+class Img2ImgDiscretizationWrapper:
+ """
+ wraps a discretizer, and prunes the sigmas
+ params:
+ strength: float between 0.0 and 1.0. 1.0 means full sampling (all sigmas are returned)
+ """
+
+ def __init__(self, discretization, strength: float = 1.0):
+ self.discretization = discretization
+ self.strength = strength
+ assert 0.0 <= self.strength <= 1.0
+
+ def __call__(self, *args, **kwargs):
+ # sigmas start large first, and decrease then
+ sigmas = self.discretization(*args, **kwargs)
+ print(f"sigmas after discretization, before pruning img2img: ", sigmas)
+ sigmas = torch.flip(sigmas, (0,))
+ sigmas = sigmas[: max(int(self.strength * len(sigmas)), 1)]
+ print("prune index:", max(int(self.strength * len(sigmas)), 1))
+ sigmas = torch.flip(sigmas, (0,))
+ print(f"sigmas after pruning: ", sigmas)
+ return sigmas
+
+
+def do_sample(
+ model,
+ sampler,
+ value_dict,
+ num_samples,
+ H,
+ W,
+ C,
+ F,
+ force_uc_zero_embeddings: Optional[List] = None,
+ batch2model_input: Optional[List] = None,
+ return_latents=False,
+ filter=None,
+ device="cuda",
+):
+ if force_uc_zero_embeddings is None:
+ force_uc_zero_embeddings = []
+ if batch2model_input is None:
+ batch2model_input = []
+
+ with torch.no_grad():
+ with autocast(device) as precision_scope:
+ with model.ema_scope():
+ num_samples = [num_samples]
+ batch, batch_uc = get_batch(
+ get_unique_embedder_keys_from_conditioner(model.conditioner),
+ value_dict,
+ num_samples,
+ )
+ for key in batch:
+ if isinstance(batch[key], torch.Tensor):
+ print(key, batch[key].shape)
+ elif isinstance(batch[key], list):
+ print(key, [len(l) for l in batch[key]])
+ else:
+ print(key, batch[key])
+ c, uc = model.conditioner.get_unconditional_conditioning(
+ batch,
+ batch_uc=batch_uc,
+ force_uc_zero_embeddings=force_uc_zero_embeddings,
+ )
+
+ for k in c:
+ if not k == "crossattn":
+ c[k], uc[k] = map(
+ lambda y: y[k][: math.prod(num_samples)].to(device), (c, uc)
+ )
+
+ additional_model_inputs = {}
+ for k in batch2model_input:
+ additional_model_inputs[k] = batch[k]
+
+ shape = (math.prod(num_samples), C, H // F, W // F)
+ randn = torch.randn(shape).to(device)
+
+ def denoiser(input, sigma, c):
+ return model.denoiser(
+ model.model, input, sigma, c, **additional_model_inputs
+ )
+
+ samples_z = sampler(denoiser, randn, cond=c, uc=uc)
+ samples_x = model.decode_first_stage(samples_z)
+ samples = torch.clamp((samples_x + 1.0) / 2.0, min=0.0, max=1.0)
+
+ if filter is not None:
+ samples = filter(samples)
+
+ if return_latents:
+ return samples, samples_z
+ return samples
+
+
+def get_batch(keys, value_dict, N: Union[List, ListConfig], device="cuda"):
+ # Hardcoded demo setups; might undergo some changes in the future
+
+ batch = {}
+ batch_uc = {}
+
+ for key in keys:
+ if key == "txt":
+ batch["txt"] = (
+ np.repeat([value_dict["prompt"]], repeats=math.prod(N))
+ .reshape(N)
+ .tolist()
+ )
+ batch_uc["txt"] = (
+ np.repeat([value_dict["negative_prompt"]], repeats=math.prod(N))
+ .reshape(N)
+ .tolist()
+ )
+ elif key == "original_size_as_tuple":
+ batch["original_size_as_tuple"] = (
+ torch.tensor([value_dict["orig_height"], value_dict["orig_width"]])
+ .to(device)
+ .repeat(*N, 1)
+ )
+ elif key == "crop_coords_top_left":
+ batch["crop_coords_top_left"] = (
+ torch.tensor(
+ [value_dict["crop_coords_top"], value_dict["crop_coords_left"]]
+ )
+ .to(device)
+ .repeat(*N, 1)
+ )
+ elif key == "aesthetic_score":
+ batch["aesthetic_score"] = (
+ torch.tensor([value_dict["aesthetic_score"]]).to(device).repeat(*N, 1)
+ )
+ batch_uc["aesthetic_score"] = (
+ torch.tensor([value_dict["negative_aesthetic_score"]])
+ .to(device)
+ .repeat(*N, 1)
+ )
+
+ elif key == "target_size_as_tuple":
+ batch["target_size_as_tuple"] = (
+ torch.tensor([value_dict["target_height"], value_dict["target_width"]])
+ .to(device)
+ .repeat(*N, 1)
+ )
+ else:
+ batch[key] = value_dict[key]
+
+ for key in batch.keys():
+ if key not in batch_uc and isinstance(batch[key], torch.Tensor):
+ batch_uc[key] = torch.clone(batch[key])
+ return batch, batch_uc
+
+
+def get_input_image_tensor(image: Image.Image, device="cuda"):
+ w, h = image.size
+ print(f"loaded input image of size ({w}, {h})")
+ width, height = map(
+ lambda x: x - x % 64, (w, h)
+ ) # resize to integer multiple of 64
+ image = image.resize((width, height))
+ image_array = np.array(image.convert("RGB"))
+ image_array = image_array[None].transpose(0, 3, 1, 2)
+ image_tensor = torch.from_numpy(image_array).to(dtype=torch.float32) / 127.5 - 1.0
+ return image_tensor.to(device)
+
+
+def do_img2img(
+ img,
+ model,
+ sampler,
+ value_dict,
+ num_samples,
+ force_uc_zero_embeddings=[],
+ additional_kwargs={},
+ offset_noise_level: float = 0.0,
+ return_latents=False,
+ skip_encode=False,
+ filter=None,
+ device="cuda",
+):
+ with torch.no_grad():
+ with autocast(device) as precision_scope:
+ with model.ema_scope():
+ batch, batch_uc = get_batch(
+ get_unique_embedder_keys_from_conditioner(model.conditioner),
+ value_dict,
+ [num_samples],
+ )
+ c, uc = model.conditioner.get_unconditional_conditioning(
+ batch,
+ batch_uc=batch_uc,
+ force_uc_zero_embeddings=force_uc_zero_embeddings,
+ )
+
+ for k in c:
+ c[k], uc[k] = map(lambda y: y[k][:num_samples].to(device), (c, uc))
+
+ for k in additional_kwargs:
+ c[k] = uc[k] = additional_kwargs[k]
+ if skip_encode:
+ z = img
+ else:
+ z = model.encode_first_stage(img)
+ noise = torch.randn_like(z)
+ sigmas = sampler.discretization(sampler.num_steps)
+ sigma = sigmas[0].to(z.device)
+
+ if offset_noise_level > 0.0:
+ noise = noise + offset_noise_level * append_dims(
+ torch.randn(z.shape[0], device=z.device), z.ndim
+ )
+ noised_z = z + noise * append_dims(sigma, z.ndim)
+ noised_z = noised_z / torch.sqrt(
+ 1.0 + sigmas[0] ** 2.0
+ ) # Note: hardcoded to DDPM-like scaling. need to generalize later.
+
+ def denoiser(x, sigma, c):
+ return model.denoiser(model.model, x, sigma, c)
+
+ samples_z = sampler(denoiser, noised_z, cond=c, uc=uc)
+ samples_x = model.decode_first_stage(samples_z)
+ samples = torch.clamp((samples_x + 1.0) / 2.0, min=0.0, max=1.0)
+
+ if filter is not None:
+ samples = filter(samples)
+
+ if return_latents:
+ return samples, samples_z
+ return samples
diff --git a/sgm/lr_scheduler.py b/sgm/lr_scheduler.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2f4d384c1fcaff0df13e0564450d3fa972ace42
--- /dev/null
+++ b/sgm/lr_scheduler.py
@@ -0,0 +1,135 @@
+import numpy as np
+
+
+class LambdaWarmUpCosineScheduler:
+ """
+ note: use with a base_lr of 1.0
+ """
+
+ def __init__(
+ self,
+ warm_up_steps,
+ lr_min,
+ lr_max,
+ lr_start,
+ max_decay_steps,
+ verbosity_interval=0,
+ ):
+ self.lr_warm_up_steps = warm_up_steps
+ self.lr_start = lr_start
+ self.lr_min = lr_min
+ self.lr_max = lr_max
+ self.lr_max_decay_steps = max_decay_steps
+ self.last_lr = 0.0
+ self.verbosity_interval = verbosity_interval
+
+ def schedule(self, n, **kwargs):
+ if self.verbosity_interval > 0:
+ if n % self.verbosity_interval == 0:
+ print(f"current step: {n}, recent lr-multiplier: {self.last_lr}")
+ if n < self.lr_warm_up_steps:
+ lr = (
+ self.lr_max - self.lr_start
+ ) / self.lr_warm_up_steps * n + self.lr_start
+ self.last_lr = lr
+ return lr
+ else:
+ t = (n - self.lr_warm_up_steps) / (
+ self.lr_max_decay_steps - self.lr_warm_up_steps
+ )
+ t = min(t, 1.0)
+ lr = self.lr_min + 0.5 * (self.lr_max - self.lr_min) * (
+ 1 + np.cos(t * np.pi)
+ )
+ self.last_lr = lr
+ return lr
+
+ def __call__(self, n, **kwargs):
+ return self.schedule(n, **kwargs)
+
+
+class LambdaWarmUpCosineScheduler2:
+ """
+ supports repeated iterations, configurable via lists
+ note: use with a base_lr of 1.0.
+ """
+
+ def __init__(
+ self, warm_up_steps, f_min, f_max, f_start, cycle_lengths, verbosity_interval=0
+ ):
+ assert (
+ len(warm_up_steps)
+ == len(f_min)
+ == len(f_max)
+ == len(f_start)
+ == len(cycle_lengths)
+ )
+ self.lr_warm_up_steps = warm_up_steps
+ self.f_start = f_start
+ self.f_min = f_min
+ self.f_max = f_max
+ self.cycle_lengths = cycle_lengths
+ self.cum_cycles = np.cumsum([0] + list(self.cycle_lengths))
+ self.last_f = 0.0
+ self.verbosity_interval = verbosity_interval
+
+ def find_in_interval(self, n):
+ interval = 0
+ for cl in self.cum_cycles[1:]:
+ if n <= cl:
+ return interval
+ interval += 1
+
+ def schedule(self, n, **kwargs):
+ cycle = self.find_in_interval(n)
+ n = n - self.cum_cycles[cycle]
+ if self.verbosity_interval > 0:
+ if n % self.verbosity_interval == 0:
+ print(
+ f"current step: {n}, recent lr-multiplier: {self.last_f}, "
+ f"current cycle {cycle}"
+ )
+ if n < self.lr_warm_up_steps[cycle]:
+ f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[
+ cycle
+ ] * n + self.f_start[cycle]
+ self.last_f = f
+ return f
+ else:
+ t = (n - self.lr_warm_up_steps[cycle]) / (
+ self.cycle_lengths[cycle] - self.lr_warm_up_steps[cycle]
+ )
+ t = min(t, 1.0)
+ f = self.f_min[cycle] + 0.5 * (self.f_max[cycle] - self.f_min[cycle]) * (
+ 1 + np.cos(t * np.pi)
+ )
+ self.last_f = f
+ return f
+
+ def __call__(self, n, **kwargs):
+ return self.schedule(n, **kwargs)
+
+
+class LambdaLinearScheduler(LambdaWarmUpCosineScheduler2):
+ def schedule(self, n, **kwargs):
+ cycle = self.find_in_interval(n)
+ n = n - self.cum_cycles[cycle]
+ if self.verbosity_interval > 0:
+ if n % self.verbosity_interval == 0:
+ print(
+ f"current step: {n}, recent lr-multiplier: {self.last_f}, "
+ f"current cycle {cycle}"
+ )
+
+ if n < self.lr_warm_up_steps[cycle]:
+ f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[
+ cycle
+ ] * n + self.f_start[cycle]
+ self.last_f = f
+ return f
+ else:
+ f = self.f_min[cycle] + (self.f_max[cycle] - self.f_min[cycle]) * (
+ self.cycle_lengths[cycle] - n
+ ) / (self.cycle_lengths[cycle])
+ self.last_f = f
+ return f
diff --git a/sgm/models/__init__.py b/sgm/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c410b3747afc208e4204c8f140170e0a7808eace
--- /dev/null
+++ b/sgm/models/__init__.py
@@ -0,0 +1,2 @@
+from .autoencoder import AutoencodingEngine
+from .diffusion import DiffusionEngine
diff --git a/sgm/models/__pycache__/__init__.cpython-39.pyc b/sgm/models/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6011db5f39a94820a93ede8e0d10c17236d404ce
Binary files /dev/null and b/sgm/models/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/models/__pycache__/autoencoder.cpython-39.pyc b/sgm/models/__pycache__/autoencoder.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..176dd9d2c131e5440e0db6250e99cdd04ed8225b
Binary files /dev/null and b/sgm/models/__pycache__/autoencoder.cpython-39.pyc differ
diff --git a/sgm/models/__pycache__/diffusion.cpython-39.pyc b/sgm/models/__pycache__/diffusion.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..65983da4a48b40d02534d4970e7ffceb7c927af0
Binary files /dev/null and b/sgm/models/__pycache__/diffusion.cpython-39.pyc differ
diff --git a/sgm/models/autoencoder.py b/sgm/models/autoencoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..2949b91011a2be7a6b8ca17ce260812f20ce8b75
--- /dev/null
+++ b/sgm/models/autoencoder.py
@@ -0,0 +1,615 @@
+import logging
+import math
+import re
+from abc import abstractmethod
+from contextlib import contextmanager
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+import pytorch_lightning as pl
+import torch
+import torch.nn as nn
+from einops import rearrange
+from packaging import version
+
+from ..modules.autoencoding.regularizers import AbstractRegularizer
+from ..modules.ema import LitEma
+from ..util import (default, get_nested_attribute, get_obj_from_str,
+ instantiate_from_config)
+
+logpy = logging.getLogger(__name__)
+
+
+class AbstractAutoencoder(pl.LightningModule):
+ """
+ This is the base class for all autoencoders, including image autoencoders, image autoencoders with discriminators,
+ unCLIP models, etc. Hence, it is fairly general, and specific features
+ (e.g. discriminator training, encoding, decoding) must be implemented in subclasses.
+ """
+
+ def __init__(
+ self,
+ ema_decay: Union[None, float] = None,
+ monitor: Union[None, str] = None,
+ input_key: str = "jpg",
+ ):
+ super().__init__()
+
+ self.input_key = input_key
+ self.use_ema = ema_decay is not None
+ if monitor is not None:
+ self.monitor = monitor
+
+ if self.use_ema:
+ self.model_ema = LitEma(self, decay=ema_decay)
+ logpy.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
+
+ if version.parse(torch.__version__) >= version.parse("2.0.0"):
+ self.automatic_optimization = False
+
+ def apply_ckpt(self, ckpt: Union[None, str, dict]):
+ if ckpt is None:
+ return
+ if isinstance(ckpt, str):
+ ckpt = {
+ "target": "sgm.modules.checkpoint.CheckpointEngine",
+ "params": {"ckpt_path": ckpt},
+ }
+ engine = instantiate_from_config(ckpt)
+ engine(self)
+
+ @abstractmethod
+ def get_input(self, batch) -> Any:
+ raise NotImplementedError()
+
+ def on_train_batch_end(self, *args, **kwargs):
+ # for EMA computation
+ if self.use_ema:
+ self.model_ema(self)
+
+ @contextmanager
+ def ema_scope(self, context=None):
+ if self.use_ema:
+ self.model_ema.store(self.parameters())
+ self.model_ema.copy_to(self)
+ if context is not None:
+ logpy.info(f"{context}: Switched to EMA weights")
+ try:
+ yield None
+ finally:
+ if self.use_ema:
+ self.model_ema.restore(self.parameters())
+ if context is not None:
+ logpy.info(f"{context}: Restored training weights")
+
+ @abstractmethod
+ def encode(self, *args, **kwargs) -> torch.Tensor:
+ raise NotImplementedError("encode()-method of abstract base class called")
+
+ @abstractmethod
+ def decode(self, *args, **kwargs) -> torch.Tensor:
+ raise NotImplementedError("decode()-method of abstract base class called")
+
+ def instantiate_optimizer_from_config(self, params, lr, cfg):
+ logpy.info(f"loading >>> {cfg['target']} <<< optimizer from config")
+ return get_obj_from_str(cfg["target"])(
+ params, lr=lr, **cfg.get("params", dict())
+ )
+
+ def configure_optimizers(self) -> Any:
+ raise NotImplementedError()
+
+
+class AutoencodingEngine(AbstractAutoencoder):
+ """
+ Base class for all image autoencoders that we train, like VQGAN or AutoencoderKL
+ (we also restore them explicitly as special cases for legacy reasons).
+ Regularizations such as KL or VQ are moved to the regularizer class.
+ """
+
+ def __init__(
+ self,
+ *args,
+ encoder_config: Dict,
+ decoder_config: Dict,
+ loss_config: Dict,
+ regularizer_config: Dict,
+ optimizer_config: Union[Dict, None] = None,
+ lr_g_factor: float = 1.0,
+ trainable_ae_params: Optional[List[List[str]]] = None,
+ ae_optimizer_args: Optional[List[dict]] = None,
+ trainable_disc_params: Optional[List[List[str]]] = None,
+ disc_optimizer_args: Optional[List[dict]] = None,
+ disc_start_iter: int = 0,
+ diff_boost_factor: float = 3.0,
+ ckpt_engine: Union[None, str, dict] = None,
+ ckpt_path: Optional[str] = None,
+ additional_decode_keys: Optional[List[str]] = None,
+ **kwargs,
+ ):
+ super().__init__(*args, **kwargs)
+ self.automatic_optimization = False # pytorch lightning
+
+ self.encoder: torch.nn.Module = instantiate_from_config(encoder_config)
+ self.decoder: torch.nn.Module = instantiate_from_config(decoder_config)
+ self.loss: torch.nn.Module = instantiate_from_config(loss_config)
+ self.regularization: AbstractRegularizer = instantiate_from_config(
+ regularizer_config
+ )
+ self.optimizer_config = default(
+ optimizer_config, {"target": "torch.optim.Adam"}
+ )
+ self.diff_boost_factor = diff_boost_factor
+ self.disc_start_iter = disc_start_iter
+ self.lr_g_factor = lr_g_factor
+ self.trainable_ae_params = trainable_ae_params
+ if self.trainable_ae_params is not None:
+ self.ae_optimizer_args = default(
+ ae_optimizer_args,
+ [{} for _ in range(len(self.trainable_ae_params))],
+ )
+ assert len(self.ae_optimizer_args) == len(self.trainable_ae_params)
+ else:
+ self.ae_optimizer_args = [{}] # makes type consitent
+
+ self.trainable_disc_params = trainable_disc_params
+ if self.trainable_disc_params is not None:
+ self.disc_optimizer_args = default(
+ disc_optimizer_args,
+ [{} for _ in range(len(self.trainable_disc_params))],
+ )
+ assert len(self.disc_optimizer_args) == len(self.trainable_disc_params)
+ else:
+ self.disc_optimizer_args = [{}] # makes type consitent
+
+ if ckpt_path is not None:
+ assert ckpt_engine is None, "Can't set ckpt_engine and ckpt_path"
+ logpy.warn("Checkpoint path is deprecated, use `checkpoint_egnine` instead")
+ self.apply_ckpt(default(ckpt_path, ckpt_engine))
+ self.additional_decode_keys = set(default(additional_decode_keys, []))
+
+ def get_input(self, batch: Dict) -> torch.Tensor:
+ # assuming unified data format, dataloader returns a dict.
+ # image tensors should be scaled to -1 ... 1 and in channels-first
+ # format (e.g., bchw instead if bhwc)
+ return batch[self.input_key]
+
+ def get_autoencoder_params(self) -> list:
+ params = []
+ if hasattr(self.loss, "get_trainable_autoencoder_parameters"):
+ params += list(self.loss.get_trainable_autoencoder_parameters())
+ if hasattr(self.regularization, "get_trainable_parameters"):
+ params += list(self.regularization.get_trainable_parameters())
+ params = params + list(self.encoder.parameters())
+ params = params + list(self.decoder.parameters())
+ return params
+
+ def get_discriminator_params(self) -> list:
+ if hasattr(self.loss, "get_trainable_parameters"):
+ params = list(self.loss.get_trainable_parameters()) # e.g., discriminator
+ else:
+ params = []
+ return params
+
+ def get_last_layer(self):
+ return self.decoder.get_last_layer()
+
+ def encode(
+ self,
+ x: torch.Tensor,
+ return_reg_log: bool = False,
+ unregularized: bool = False,
+ ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]:
+ z = self.encoder(x)
+ if unregularized:
+ return z, dict()
+ z, reg_log = self.regularization(z)
+ if return_reg_log:
+ return z, reg_log
+ return z
+
+ def decode(self, z: torch.Tensor, **kwargs) -> torch.Tensor:
+ x = self.decoder(z, **kwargs)
+ return x
+
+ def forward(
+ self, x: torch.Tensor, **additional_decode_kwargs
+ ) -> Tuple[torch.Tensor, torch.Tensor, dict]:
+ z, reg_log = self.encode(x, return_reg_log=True)
+ dec = self.decode(z, **additional_decode_kwargs)
+ return z, dec, reg_log
+
+ def inner_training_step(
+ self, batch: dict, batch_idx: int, optimizer_idx: int = 0
+ ) -> torch.Tensor:
+ x = self.get_input(batch)
+ additional_decode_kwargs = {
+ key: batch[key] for key in self.additional_decode_keys.intersection(batch)
+ }
+ z, xrec, regularization_log = self(x, **additional_decode_kwargs)
+ if hasattr(self.loss, "forward_keys"):
+ extra_info = {
+ "z": z,
+ "optimizer_idx": optimizer_idx,
+ "global_step": self.global_step,
+ "last_layer": self.get_last_layer(),
+ "split": "train",
+ "regularization_log": regularization_log,
+ "autoencoder": self,
+ }
+ extra_info = {k: extra_info[k] for k in self.loss.forward_keys}
+ else:
+ extra_info = dict()
+
+ if optimizer_idx == 0:
+ # autoencode
+ out_loss = self.loss(x, xrec, **extra_info)
+ if isinstance(out_loss, tuple):
+ aeloss, log_dict_ae = out_loss
+ else:
+ # simple loss function
+ aeloss = out_loss
+ log_dict_ae = {"train/loss/rec": aeloss.detach()}
+
+ self.log_dict(
+ log_dict_ae,
+ prog_bar=False,
+ logger=True,
+ on_step=True,
+ on_epoch=True,
+ sync_dist=False,
+ )
+ self.log(
+ "loss",
+ aeloss.mean().detach(),
+ prog_bar=True,
+ logger=False,
+ on_epoch=False,
+ on_step=True,
+ )
+ return aeloss
+ elif optimizer_idx == 1:
+ # discriminator
+ discloss, log_dict_disc = self.loss(x, xrec, **extra_info)
+ # -> discriminator always needs to return a tuple
+ self.log_dict(
+ log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=True
+ )
+ return discloss
+ else:
+ raise NotImplementedError(f"Unknown optimizer {optimizer_idx}")
+
+ def training_step(self, batch: dict, batch_idx: int):
+ opts = self.optimizers()
+ if not isinstance(opts, list):
+ # Non-adversarial case
+ opts = [opts]
+ optimizer_idx = batch_idx % len(opts)
+ if self.global_step < self.disc_start_iter:
+ optimizer_idx = 0
+ opt = opts[optimizer_idx]
+ opt.zero_grad()
+ with opt.toggle_model():
+ loss = self.inner_training_step(
+ batch, batch_idx, optimizer_idx=optimizer_idx
+ )
+ self.manual_backward(loss)
+ opt.step()
+
+ def validation_step(self, batch: dict, batch_idx: int) -> Dict:
+ log_dict = self._validation_step(batch, batch_idx)
+ with self.ema_scope():
+ log_dict_ema = self._validation_step(batch, batch_idx, postfix="_ema")
+ log_dict.update(log_dict_ema)
+ return log_dict
+
+ def _validation_step(self, batch: dict, batch_idx: int, postfix: str = "") -> Dict:
+ x = self.get_input(batch)
+
+ z, xrec, regularization_log = self(x)
+ if hasattr(self.loss, "forward_keys"):
+ extra_info = {
+ "z": z,
+ "optimizer_idx": 0,
+ "global_step": self.global_step,
+ "last_layer": self.get_last_layer(),
+ "split": "val" + postfix,
+ "regularization_log": regularization_log,
+ "autoencoder": self,
+ }
+ extra_info = {k: extra_info[k] for k in self.loss.forward_keys}
+ else:
+ extra_info = dict()
+ out_loss = self.loss(x, xrec, **extra_info)
+ if isinstance(out_loss, tuple):
+ aeloss, log_dict_ae = out_loss
+ else:
+ # simple loss function
+ aeloss = out_loss
+ log_dict_ae = {f"val{postfix}/loss/rec": aeloss.detach()}
+ full_log_dict = log_dict_ae
+
+ if "optimizer_idx" in extra_info:
+ extra_info["optimizer_idx"] = 1
+ discloss, log_dict_disc = self.loss(x, xrec, **extra_info)
+ full_log_dict.update(log_dict_disc)
+ self.log(
+ f"val{postfix}/loss/rec",
+ log_dict_ae[f"val{postfix}/loss/rec"],
+ sync_dist=True,
+ )
+ self.log_dict(full_log_dict, sync_dist=True)
+ return full_log_dict
+
+ def get_param_groups(
+ self, parameter_names: List[List[str]], optimizer_args: List[dict]
+ ) -> Tuple[List[Dict[str, Any]], int]:
+ groups = []
+ num_params = 0
+ for names, args in zip(parameter_names, optimizer_args):
+ params = []
+ for pattern_ in names:
+ pattern_params = []
+ pattern = re.compile(pattern_)
+ for p_name, param in self.named_parameters():
+ if re.match(pattern, p_name):
+ pattern_params.append(param)
+ num_params += param.numel()
+ if len(pattern_params) == 0:
+ logpy.warn(f"Did not find parameters for pattern {pattern_}")
+ params.extend(pattern_params)
+ groups.append({"params": params, **args})
+ return groups, num_params
+
+ def configure_optimizers(self) -> List[torch.optim.Optimizer]:
+ if self.trainable_ae_params is None:
+ ae_params = self.get_autoencoder_params()
+ else:
+ ae_params, num_ae_params = self.get_param_groups(
+ self.trainable_ae_params, self.ae_optimizer_args
+ )
+ logpy.info(f"Number of trainable autoencoder parameters: {num_ae_params:,}")
+ if self.trainable_disc_params is None:
+ disc_params = self.get_discriminator_params()
+ else:
+ disc_params, num_disc_params = self.get_param_groups(
+ self.trainable_disc_params, self.disc_optimizer_args
+ )
+ logpy.info(
+ f"Number of trainable discriminator parameters: {num_disc_params:,}"
+ )
+ opt_ae = self.instantiate_optimizer_from_config(
+ ae_params,
+ default(self.lr_g_factor, 1.0) * self.learning_rate,
+ self.optimizer_config,
+ )
+ opts = [opt_ae]
+ if len(disc_params) > 0:
+ opt_disc = self.instantiate_optimizer_from_config(
+ disc_params, self.learning_rate, self.optimizer_config
+ )
+ opts.append(opt_disc)
+
+ return opts
+
+ @torch.no_grad()
+ def log_images(
+ self, batch: dict, additional_log_kwargs: Optional[Dict] = None, **kwargs
+ ) -> dict:
+ log = dict()
+ additional_decode_kwargs = {}
+ x = self.get_input(batch)
+ additional_decode_kwargs.update(
+ {key: batch[key] for key in self.additional_decode_keys.intersection(batch)}
+ )
+
+ _, xrec, _ = self(x, **additional_decode_kwargs)
+ log["inputs"] = x
+ log["reconstructions"] = xrec
+ diff = 0.5 * torch.abs(torch.clamp(xrec, -1.0, 1.0) - x)
+ diff.clamp_(0, 1.0)
+ log["diff"] = 2.0 * diff - 1.0
+ # diff_boost shows location of small errors, by boosting their
+ # brightness.
+ log["diff_boost"] = (
+ 2.0 * torch.clamp(self.diff_boost_factor * diff, 0.0, 1.0) - 1
+ )
+ if hasattr(self.loss, "log_images"):
+ log.update(self.loss.log_images(x, xrec))
+ with self.ema_scope():
+ _, xrec_ema, _ = self(x, **additional_decode_kwargs)
+ log["reconstructions_ema"] = xrec_ema
+ diff_ema = 0.5 * torch.abs(torch.clamp(xrec_ema, -1.0, 1.0) - x)
+ diff_ema.clamp_(0, 1.0)
+ log["diff_ema"] = 2.0 * diff_ema - 1.0
+ log["diff_boost_ema"] = (
+ 2.0 * torch.clamp(self.diff_boost_factor * diff_ema, 0.0, 1.0) - 1
+ )
+ if additional_log_kwargs:
+ additional_decode_kwargs.update(additional_log_kwargs)
+ _, xrec_add, _ = self(x, **additional_decode_kwargs)
+ log_str = "reconstructions-" + "-".join(
+ [f"{key}={additional_log_kwargs[key]}" for key in additional_log_kwargs]
+ )
+ log[log_str] = xrec_add
+ return log
+
+
+class AutoencodingEngineLegacy(AutoencodingEngine):
+ def __init__(self, embed_dim: int, **kwargs):
+ self.max_batch_size = kwargs.pop("max_batch_size", None)
+ ddconfig = kwargs.pop("ddconfig")
+ ckpt_path = kwargs.pop("ckpt_path", None)
+ ckpt_engine = kwargs.pop("ckpt_engine", None)
+ super().__init__(
+ encoder_config={
+ "target": "sgm.modules.diffusionmodules.model.Encoder",
+ "params": ddconfig,
+ },
+ decoder_config={
+ "target": "sgm.modules.diffusionmodules.model.Decoder",
+ "params": ddconfig,
+ },
+ **kwargs,
+ )
+ self.quant_conv = torch.nn.Conv2d(
+ (1 + ddconfig["double_z"]) * ddconfig["z_channels"],
+ (1 + ddconfig["double_z"]) * embed_dim,
+ 1,
+ )
+ self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1)
+ self.embed_dim = embed_dim
+
+ self.apply_ckpt(default(ckpt_path, ckpt_engine))
+
+ def get_autoencoder_params(self) -> list:
+ params = super().get_autoencoder_params()
+ return params
+
+ def encode(
+ self, x: torch.Tensor, return_reg_log: bool = False
+ ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]:
+ if self.max_batch_size is None:
+ z = self.encoder(x)
+ z = self.quant_conv(z)
+ else:
+ N = x.shape[0]
+ bs = self.max_batch_size
+ n_batches = int(math.ceil(N / bs))
+ z = list()
+ for i_batch in range(n_batches):
+ z_batch = self.encoder(x[i_batch * bs : (i_batch + 1) * bs])
+ z_batch = self.quant_conv(z_batch)
+ z.append(z_batch)
+ z = torch.cat(z, 0)
+
+ z, reg_log = self.regularization(z)
+ if return_reg_log:
+ return z, reg_log
+ return z
+
+ def decode(self, z: torch.Tensor, **decoder_kwargs) -> torch.Tensor:
+ if self.max_batch_size is None:
+ dec = self.post_quant_conv(z)
+ dec = self.decoder(dec, **decoder_kwargs)
+ else:
+ N = z.shape[0]
+ bs = self.max_batch_size
+ n_batches = int(math.ceil(N / bs))
+ dec = list()
+ for i_batch in range(n_batches):
+ dec_batch = self.post_quant_conv(z[i_batch * bs : (i_batch + 1) * bs])
+ dec_batch = self.decoder(dec_batch, **decoder_kwargs)
+ dec.append(dec_batch)
+ dec = torch.cat(dec, 0)
+
+ return dec
+
+
+class AutoencoderKL(AutoencodingEngineLegacy):
+ def __init__(self, **kwargs):
+ if "lossconfig" in kwargs:
+ kwargs["loss_config"] = kwargs.pop("lossconfig")
+ super().__init__(
+ regularizer_config={
+ "target": (
+ "sgm.modules.autoencoding.regularizers"
+ ".DiagonalGaussianRegularizer"
+ )
+ },
+ **kwargs,
+ )
+
+
+class AutoencoderLegacyVQ(AutoencodingEngineLegacy):
+ def __init__(
+ self,
+ embed_dim: int,
+ n_embed: int,
+ sane_index_shape: bool = False,
+ **kwargs,
+ ):
+ if "lossconfig" in kwargs:
+ logpy.warn(f"Parameter `lossconfig` is deprecated, use `loss_config`.")
+ kwargs["loss_config"] = kwargs.pop("lossconfig")
+ super().__init__(
+ regularizer_config={
+ "target": (
+ "sgm.modules.autoencoding.regularizers.quantize" ".VectorQuantizer"
+ ),
+ "params": {
+ "n_e": n_embed,
+ "e_dim": embed_dim,
+ "sane_index_shape": sane_index_shape,
+ },
+ },
+ **kwargs,
+ )
+
+
+class IdentityFirstStage(AbstractAutoencoder):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def get_input(self, x: Any) -> Any:
+ return x
+
+ def encode(self, x: Any, *args, **kwargs) -> Any:
+ return x
+
+ def decode(self, x: Any, *args, **kwargs) -> Any:
+ return x
+
+
+class AEIntegerWrapper(nn.Module):
+ def __init__(
+ self,
+ model: nn.Module,
+ shape: Union[None, Tuple[int, int], List[int]] = (16, 16),
+ regularization_key: str = "regularization",
+ encoder_kwargs: Optional[Dict[str, Any]] = None,
+ ):
+ super().__init__()
+ self.model = model
+ assert hasattr(model, "encode") and hasattr(
+ model, "decode"
+ ), "Need AE interface"
+ self.regularization = get_nested_attribute(model, regularization_key)
+ self.shape = shape
+ self.encoder_kwargs = default(encoder_kwargs, {"return_reg_log": True})
+
+ def encode(self, x) -> torch.Tensor:
+ assert (
+ not self.training
+ ), f"{self.__class__.__name__} only supports inference currently"
+ _, log = self.model.encode(x, **self.encoder_kwargs)
+ assert isinstance(log, dict)
+ inds = log["min_encoding_indices"]
+ return rearrange(inds, "b ... -> b (...)")
+
+ def decode(
+ self, inds: torch.Tensor, shape: Union[None, tuple, list] = None
+ ) -> torch.Tensor:
+ # expect inds shape (b, s) with s = h*w
+ shape = default(shape, self.shape) # Optional[(h, w)]
+ if shape is not None:
+ assert len(shape) == 2, f"Unhandeled shape {shape}"
+ inds = rearrange(inds, "b (h w) -> b h w", h=shape[0], w=shape[1])
+ h = self.regularization.get_codebook_entry(inds) # (b, h, w, c)
+ h = rearrange(h, "b h w c -> b c h w")
+ return self.model.decode(h)
+
+
+class AutoencoderKLModeOnly(AutoencodingEngineLegacy):
+ def __init__(self, **kwargs):
+ if "lossconfig" in kwargs:
+ kwargs["loss_config"] = kwargs.pop("lossconfig")
+ super().__init__(
+ regularizer_config={
+ "target": (
+ "sgm.modules.autoencoding.regularizers"
+ ".DiagonalGaussianRegularizer"
+ ),
+ "params": {"sample": False},
+ },
+ **kwargs,
+ )
diff --git a/sgm/models/diffusion.py b/sgm/models/diffusion.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f3efd3c7e1e37bf4f78673ae72021f1f968e116
--- /dev/null
+++ b/sgm/models/diffusion.py
@@ -0,0 +1,341 @@
+import math
+from contextlib import contextmanager
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+import pytorch_lightning as pl
+import torch
+from omegaconf import ListConfig, OmegaConf
+from safetensors.torch import load_file as load_safetensors
+from torch.optim.lr_scheduler import LambdaLR
+
+from ..modules import UNCONDITIONAL_CONFIG
+from ..modules.autoencoding.temporal_ae import VideoDecoder
+from ..modules.diffusionmodules.wrappers import OPENAIUNETWRAPPER
+from ..modules.ema import LitEma
+from ..util import (default, disabled_train, get_obj_from_str,
+ instantiate_from_config, log_txt_as_img)
+
+
+class DiffusionEngine(pl.LightningModule):
+ def __init__(
+ self,
+ network_config,
+ denoiser_config,
+ first_stage_config,
+ conditioner_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ sampler_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ optimizer_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ scheduler_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ loss_fn_config: Union[None, Dict, ListConfig, OmegaConf] = None,
+ network_wrapper: Union[None, str] = None,
+ ckpt_path: Union[None, str] = None,
+ use_ema: bool = False,
+ ema_decay_rate: float = 0.9999,
+ scale_factor: float = 1.0,
+ disable_first_stage_autocast=False,
+ input_key: str = "jpg",
+ log_keys: Union[List, None] = None,
+ no_cond_log: bool = False,
+ compile_model: bool = False,
+ en_and_decode_n_samples_a_time: Optional[int] = None,
+ ):
+ super().__init__()
+ self.log_keys = log_keys
+ self.input_key = input_key
+ self.optimizer_config = default(
+ optimizer_config, {"target": "torch.optim.AdamW"}
+ )
+ model = instantiate_from_config(network_config)
+ self.model = get_obj_from_str(default(network_wrapper, OPENAIUNETWRAPPER))(
+ model, compile_model=compile_model
+ )
+
+ self.denoiser = instantiate_from_config(denoiser_config)
+ self.sampler = (
+ instantiate_from_config(sampler_config)
+ if sampler_config is not None
+ else None
+ )
+ self.conditioner = instantiate_from_config(
+ default(conditioner_config, UNCONDITIONAL_CONFIG)
+ )
+ self.scheduler_config = scheduler_config
+ self._init_first_stage(first_stage_config)
+
+ self.loss_fn = (
+ instantiate_from_config(loss_fn_config)
+ if loss_fn_config is not None
+ else None
+ )
+
+ self.use_ema = use_ema
+ if self.use_ema:
+ self.model_ema = LitEma(self.model, decay=ema_decay_rate)
+ print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
+
+ self.scale_factor = scale_factor
+ self.disable_first_stage_autocast = disable_first_stage_autocast
+ self.no_cond_log = no_cond_log
+
+ if ckpt_path is not None:
+ self.init_from_ckpt(ckpt_path)
+
+ self.en_and_decode_n_samples_a_time = en_and_decode_n_samples_a_time
+
+ def init_from_ckpt(
+ self,
+ path: str,
+ ) -> None:
+ if path.endswith("ckpt"):
+ sd = torch.load(path, map_location="cpu")["state_dict"]
+ elif path.endswith("safetensors"):
+ sd = load_safetensors(path)
+ else:
+ raise NotImplementedError
+
+ missing, unexpected = self.load_state_dict(sd, strict=False)
+ print(
+ f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys"
+ )
+ if len(missing) > 0:
+ print(f"Missing Keys: {missing}")
+ if len(unexpected) > 0:
+ print(f"Unexpected Keys: {unexpected}")
+
+ def _init_first_stage(self, config):
+ model = instantiate_from_config(config).eval()
+ model.train = disabled_train
+ for param in model.parameters():
+ param.requires_grad = False
+ self.first_stage_model = model
+
+ def get_input(self, batch):
+ # assuming unified data format, dataloader returns a dict.
+ # image tensors should be scaled to -1 ... 1 and in bchw format
+ return batch[self.input_key]
+
+ @torch.no_grad()
+ def decode_first_stage(self, z):
+ z = 1.0 / self.scale_factor * z
+ n_samples = default(self.en_and_decode_n_samples_a_time, z.shape[0])
+
+ n_rounds = math.ceil(z.shape[0] / n_samples)
+ all_out = []
+ with torch.autocast("cuda", enabled=not self.disable_first_stage_autocast):
+ for n in range(n_rounds):
+ if isinstance(self.first_stage_model.decoder, VideoDecoder):
+ kwargs = {"timesteps": len(z[n * n_samples : (n + 1) * n_samples])}
+ else:
+ kwargs = {}
+ out = self.first_stage_model.decode(
+ z[n * n_samples : (n + 1) * n_samples], **kwargs
+ )
+ all_out.append(out)
+ out = torch.cat(all_out, dim=0)
+ return out
+
+ @torch.no_grad()
+ def encode_first_stage(self, x):
+ n_samples = default(self.en_and_decode_n_samples_a_time, x.shape[0])
+ n_rounds = math.ceil(x.shape[0] / n_samples)
+ all_out = []
+ with torch.autocast("cuda", enabled=not self.disable_first_stage_autocast):
+ for n in range(n_rounds):
+ out = self.first_stage_model.encode(
+ x[n * n_samples : (n + 1) * n_samples]
+ )
+ all_out.append(out)
+ z = torch.cat(all_out, dim=0)
+ z = self.scale_factor * z
+ return z
+
+ def forward(self, x, batch):
+ loss = self.loss_fn(self.model, self.denoiser, self.conditioner, x, batch)
+ loss_mean = loss.mean()
+ loss_dict = {"loss": loss_mean}
+ return loss_mean, loss_dict
+
+ def shared_step(self, batch: Dict) -> Any:
+ x = self.get_input(batch)
+ x = self.encode_first_stage(x)
+ batch["global_step"] = self.global_step
+ loss, loss_dict = self(x, batch)
+ return loss, loss_dict
+
+ def training_step(self, batch, batch_idx):
+ loss, loss_dict = self.shared_step(batch)
+
+ self.log_dict(
+ loss_dict, prog_bar=True, logger=True, on_step=True, on_epoch=False
+ )
+
+ self.log(
+ "global_step",
+ self.global_step,
+ prog_bar=True,
+ logger=True,
+ on_step=True,
+ on_epoch=False,
+ )
+
+ if self.scheduler_config is not None:
+ lr = self.optimizers().param_groups[0]["lr"]
+ self.log(
+ "lr_abs", lr, prog_bar=True, logger=True, on_step=True, on_epoch=False
+ )
+
+ return loss
+
+ def on_train_start(self, *args, **kwargs):
+ if self.sampler is None or self.loss_fn is None:
+ raise ValueError("Sampler and loss function need to be set for training.")
+
+ def on_train_batch_end(self, *args, **kwargs):
+ if self.use_ema:
+ self.model_ema(self.model)
+
+ @contextmanager
+ def ema_scope(self, context=None):
+ if self.use_ema:
+ self.model_ema.store(self.model.parameters())
+ self.model_ema.copy_to(self.model)
+ if context is not None:
+ print(f"{context}: Switched to EMA weights")
+ try:
+ yield None
+ finally:
+ if self.use_ema:
+ self.model_ema.restore(self.model.parameters())
+ if context is not None:
+ print(f"{context}: Restored training weights")
+
+ def instantiate_optimizer_from_config(self, params, lr, cfg):
+ return get_obj_from_str(cfg["target"])(
+ params, lr=lr, **cfg.get("params", dict())
+ )
+
+ def configure_optimizers(self):
+ lr = self.learning_rate
+ params = list(self.model.parameters())
+ for embedder in self.conditioner.embedders:
+ if embedder.is_trainable:
+ params = params + list(embedder.parameters())
+ opt = self.instantiate_optimizer_from_config(params, lr, self.optimizer_config)
+ if self.scheduler_config is not None:
+ scheduler = instantiate_from_config(self.scheduler_config)
+ print("Setting up LambdaLR scheduler...")
+ scheduler = [
+ {
+ "scheduler": LambdaLR(opt, lr_lambda=scheduler.schedule),
+ "interval": "step",
+ "frequency": 1,
+ }
+ ]
+ return [opt], scheduler
+ return opt
+
+ @torch.no_grad()
+ def sample(
+ self,
+ cond: Dict,
+ uc: Union[Dict, None] = None,
+ batch_size: int = 16,
+ shape: Union[None, Tuple, List] = None,
+ **kwargs,
+ ):
+ randn = torch.randn(batch_size, *shape).to(self.device)
+
+ denoiser = lambda input, sigma, c: self.denoiser(
+ self.model, input, sigma, c, **kwargs
+ )
+ samples = self.sampler(denoiser, randn, cond, uc=uc)
+ return samples
+
+ @torch.no_grad()
+ def log_conditionings(self, batch: Dict, n: int) -> Dict:
+ """
+ Defines heuristics to log different conditionings.
+ These can be lists of strings (text-to-image), tensors, ints, ...
+ """
+ image_h, image_w = batch[self.input_key].shape[2:]
+ log = dict()
+
+ for embedder in self.conditioner.embedders:
+ if (
+ (self.log_keys is None) or (embedder.input_key in self.log_keys)
+ ) and not self.no_cond_log:
+ x = batch[embedder.input_key][:n]
+ if isinstance(x, torch.Tensor):
+ if x.dim() == 1:
+ # class-conditional, convert integer to string
+ x = [str(x[i].item()) for i in range(x.shape[0])]
+ xc = log_txt_as_img((image_h, image_w), x, size=image_h // 4)
+ elif x.dim() == 2:
+ # size and crop cond and the like
+ x = [
+ "x".join([str(xx) for xx in x[i].tolist()])
+ for i in range(x.shape[0])
+ ]
+ xc = log_txt_as_img((image_h, image_w), x, size=image_h // 20)
+ else:
+ raise NotImplementedError()
+ elif isinstance(x, (List, ListConfig)):
+ if isinstance(x[0], str):
+ # strings
+ xc = log_txt_as_img((image_h, image_w), x, size=image_h // 20)
+ else:
+ raise NotImplementedError()
+ else:
+ raise NotImplementedError()
+ log[embedder.input_key] = xc
+ return log
+
+ @torch.no_grad()
+ def log_images(
+ self,
+ batch: Dict,
+ N: int = 8,
+ sample: bool = True,
+ ucg_keys: List[str] = None,
+ **kwargs,
+ ) -> Dict:
+ conditioner_input_keys = [e.input_key for e in self.conditioner.embedders]
+ if ucg_keys:
+ assert all(map(lambda x: x in conditioner_input_keys, ucg_keys)), (
+ "Each defined ucg key for sampling must be in the provided conditioner input keys,"
+ f"but we have {ucg_keys} vs. {conditioner_input_keys}"
+ )
+ else:
+ ucg_keys = conditioner_input_keys
+ log = dict()
+
+ x = self.get_input(batch)
+
+ c, uc = self.conditioner.get_unconditional_conditioning(
+ batch,
+ force_uc_zero_embeddings=ucg_keys
+ if len(self.conditioner.embedders) > 0
+ else [],
+ )
+
+ sampling_kwargs = {}
+
+ N = min(x.shape[0], N)
+ x = x.to(self.device)[:N]
+ log["inputs"] = x
+ z = self.encode_first_stage(x)
+ log["reconstructions"] = self.decode_first_stage(z)
+ log.update(self.log_conditionings(batch, N))
+
+ for k in c:
+ if isinstance(c[k], torch.Tensor):
+ c[k], uc[k] = map(lambda y: y[k][:N].to(self.device), (c, uc))
+
+ if sample:
+ with self.ema_scope("Plotting"):
+ samples = self.sample(
+ c, shape=z.shape[1:], uc=uc, batch_size=N, **sampling_kwargs
+ )
+ samples = self.decode_first_stage(samples)
+ log["samples"] = samples
+ return log
diff --git a/sgm/modules/__init__.py b/sgm/modules/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..0db1d7716a6e48f77b86a4b59c9289d6fb76b50b
--- /dev/null
+++ b/sgm/modules/__init__.py
@@ -0,0 +1,6 @@
+from .encoders.modules import GeneralConditioner
+
+UNCONDITIONAL_CONFIG = {
+ "target": "sgm.modules.GeneralConditioner",
+ "params": {"emb_models": []},
+}
diff --git a/sgm/modules/__pycache__/__init__.cpython-39.pyc b/sgm/modules/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..721261e73684c84141bda627b91157ba5d0ecc28
Binary files /dev/null and b/sgm/modules/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/__pycache__/attention.cpython-39.pyc b/sgm/modules/__pycache__/attention.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..21763def6537a0166a2bd1ef1beda043d751932c
Binary files /dev/null and b/sgm/modules/__pycache__/attention.cpython-39.pyc differ
diff --git a/sgm/modules/__pycache__/ema.cpython-39.pyc b/sgm/modules/__pycache__/ema.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0f97aac98fc610c36d888cf8e5b64177d42cf5e2
Binary files /dev/null and b/sgm/modules/__pycache__/ema.cpython-39.pyc differ
diff --git a/sgm/modules/__pycache__/video_attention.cpython-39.pyc b/sgm/modules/__pycache__/video_attention.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7e7f1e82ec4bfdb45f99fd2e978b52856a3bd0ce
Binary files /dev/null and b/sgm/modules/__pycache__/video_attention.cpython-39.pyc differ
diff --git a/sgm/modules/attention.py b/sgm/modules/attention.py
new file mode 100644
index 0000000000000000000000000000000000000000..52a50b7bd744bea0f0cdca23b0cfd14ad87794be
--- /dev/null
+++ b/sgm/modules/attention.py
@@ -0,0 +1,759 @@
+import logging
+import math
+from inspect import isfunction
+from typing import Any, Optional
+
+import torch
+import torch.nn.functional as F
+from einops import rearrange, repeat
+from packaging import version
+from torch import nn
+from torch.utils.checkpoint import checkpoint
+
+logpy = logging.getLogger(__name__)
+
+if version.parse(torch.__version__) >= version.parse("2.0.0"):
+ SDP_IS_AVAILABLE = True
+ from torch.backends.cuda import SDPBackend, sdp_kernel
+
+ BACKEND_MAP = {
+ SDPBackend.MATH: {
+ "enable_math": True,
+ "enable_flash": False,
+ "enable_mem_efficient": False,
+ },
+ SDPBackend.FLASH_ATTENTION: {
+ "enable_math": False,
+ "enable_flash": True,
+ "enable_mem_efficient": False,
+ },
+ SDPBackend.EFFICIENT_ATTENTION: {
+ "enable_math": False,
+ "enable_flash": False,
+ "enable_mem_efficient": True,
+ },
+ None: {"enable_math": True, "enable_flash": True, "enable_mem_efficient": True},
+ }
+else:
+ from contextlib import nullcontext
+
+ SDP_IS_AVAILABLE = False
+ sdp_kernel = nullcontext
+ BACKEND_MAP = {}
+ logpy.warn(
+ f"No SDP backend available, likely because you are running in pytorch "
+ f"versions < 2.0. In fact, you are using PyTorch {torch.__version__}. "
+ f"You might want to consider upgrading."
+ )
+
+try:
+ import xformers
+ import xformers.ops
+
+ XFORMERS_IS_AVAILABLE = True
+except:
+ XFORMERS_IS_AVAILABLE = False
+ logpy.warn("no module 'xformers'. Processing without...")
+
+# from .diffusionmodules.util import mixed_checkpoint as checkpoint
+
+
+def exists(val):
+ return val is not None
+
+
+def uniq(arr):
+ return {el: True for el in arr}.keys()
+
+
+def default(val, d):
+ if exists(val):
+ return val
+ return d() if isfunction(d) else d
+
+
+def max_neg_value(t):
+ return -torch.finfo(t.dtype).max
+
+
+def init_(tensor):
+ dim = tensor.shape[-1]
+ std = 1 / math.sqrt(dim)
+ tensor.uniform_(-std, std)
+ return tensor
+
+
+# feedforward
+class GEGLU(nn.Module):
+ def __init__(self, dim_in, dim_out):
+ super().__init__()
+ self.proj = nn.Linear(dim_in, dim_out * 2)
+
+ def forward(self, x):
+ x, gate = self.proj(x).chunk(2, dim=-1)
+ return x * F.gelu(gate)
+
+
+class FeedForward(nn.Module):
+ def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.0):
+ super().__init__()
+ inner_dim = int(dim * mult)
+ dim_out = default(dim_out, dim)
+ project_in = (
+ nn.Sequential(nn.Linear(dim, inner_dim), nn.GELU())
+ if not glu
+ else GEGLU(dim, inner_dim)
+ )
+
+ self.net = nn.Sequential(
+ project_in, nn.Dropout(dropout), nn.Linear(inner_dim, dim_out)
+ )
+
+ def forward(self, x):
+ return self.net(x)
+
+
+def zero_module(module):
+ """
+ Zero out the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().zero_()
+ return module
+
+
+def Normalize(in_channels):
+ return torch.nn.GroupNorm(
+ num_groups=32, num_channels=in_channels, eps=1e-6, affine=True
+ )
+
+
+class LinearAttention(nn.Module):
+ def __init__(self, dim, heads=4, dim_head=32):
+ super().__init__()
+ self.heads = heads
+ hidden_dim = dim_head * heads
+ self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)
+ self.to_out = nn.Conv2d(hidden_dim, dim, 1)
+
+ def forward(self, x):
+ b, c, h, w = x.shape
+ qkv = self.to_qkv(x)
+ q, k, v = rearrange(
+ qkv, "b (qkv heads c) h w -> qkv b heads c (h w)", heads=self.heads, qkv=3
+ )
+ k = k.softmax(dim=-1)
+ context = torch.einsum("bhdn,bhen->bhde", k, v)
+ out = torch.einsum("bhde,bhdn->bhen", context, q)
+ out = rearrange(
+ out, "b heads c (h w) -> b (heads c) h w", heads=self.heads, h=h, w=w
+ )
+ return self.to_out(out)
+
+
+class SelfAttention(nn.Module):
+ ATTENTION_MODES = ("xformers", "torch", "math")
+
+ def __init__(
+ self,
+ dim: int,
+ num_heads: int = 8,
+ qkv_bias: bool = False,
+ qk_scale: Optional[float] = None,
+ attn_drop: float = 0.0,
+ proj_drop: float = 0.0,
+ attn_mode: str = "xformers",
+ ):
+ super().__init__()
+ self.num_heads = num_heads
+ head_dim = dim // num_heads
+ self.scale = qk_scale or head_dim**-0.5
+
+ self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
+ self.attn_drop = nn.Dropout(attn_drop)
+ self.proj = nn.Linear(dim, dim)
+ self.proj_drop = nn.Dropout(proj_drop)
+ assert attn_mode in self.ATTENTION_MODES
+ self.attn_mode = attn_mode
+
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
+ B, L, C = x.shape
+
+ qkv = self.qkv(x)
+ if self.attn_mode == "torch":
+ qkv = rearrange(
+ qkv, "B L (K H D) -> K B H L D", K=3, H=self.num_heads
+ ).float()
+ q, k, v = qkv[0], qkv[1], qkv[2] # B H L D
+ x = torch.nn.functional.scaled_dot_product_attention(q, k, v)
+ x = rearrange(x, "B H L D -> B L (H D)")
+ elif self.attn_mode == "xformers":
+ qkv = rearrange(qkv, "B L (K H D) -> K B L H D", K=3, H=self.num_heads)
+ q, k, v = qkv[0], qkv[1], qkv[2] # B L H D
+ x = xformers.ops.memory_efficient_attention(q, k, v)
+ x = rearrange(x, "B L H D -> B L (H D)", H=self.num_heads)
+ elif self.attn_mode == "math":
+ qkv = rearrange(qkv, "B L (K H D) -> K B H L D", K=3, H=self.num_heads)
+ q, k, v = qkv[0], qkv[1], qkv[2] # B H L D
+ attn = (q @ k.transpose(-2, -1)) * self.scale
+ attn = attn.softmax(dim=-1)
+ attn = self.attn_drop(attn)
+ x = (attn @ v).transpose(1, 2).reshape(B, L, C)
+ else:
+ raise NotImplemented
+
+ x = self.proj(x)
+ x = self.proj_drop(x)
+ return x
+
+
+class SpatialSelfAttention(nn.Module):
+ def __init__(self, in_channels):
+ super().__init__()
+ self.in_channels = in_channels
+
+ self.norm = Normalize(in_channels)
+ self.q = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.k = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.v = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.proj_out = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+
+ def forward(self, x):
+ h_ = x
+ h_ = self.norm(h_)
+ q = self.q(h_)
+ k = self.k(h_)
+ v = self.v(h_)
+
+ # compute attention
+ b, c, h, w = q.shape
+ q = rearrange(q, "b c h w -> b (h w) c")
+ k = rearrange(k, "b c h w -> b c (h w)")
+ w_ = torch.einsum("bij,bjk->bik", q, k)
+
+ w_ = w_ * (int(c) ** (-0.5))
+ w_ = torch.nn.functional.softmax(w_, dim=2)
+
+ # attend to values
+ v = rearrange(v, "b c h w -> b c (h w)")
+ w_ = rearrange(w_, "b i j -> b j i")
+ h_ = torch.einsum("bij,bjk->bik", v, w_)
+ h_ = rearrange(h_, "b c (h w) -> b c h w", h=h)
+ h_ = self.proj_out(h_)
+
+ return x + h_
+
+
+class CrossAttention(nn.Module):
+ def __init__(
+ self,
+ query_dim,
+ context_dim=None,
+ heads=8,
+ dim_head=64,
+ dropout=0.0,
+ backend=None,
+ ):
+ super().__init__()
+ inner_dim = dim_head * heads
+ context_dim = default(context_dim, query_dim)
+
+ self.scale = dim_head**-0.5
+ self.heads = heads
+
+ self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
+ self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+ self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+
+ self.to_out = nn.Sequential(
+ nn.Linear(inner_dim, query_dim), nn.Dropout(dropout)
+ )
+ self.backend = backend
+
+ def forward(
+ self,
+ x,
+ context=None,
+ mask=None,
+ additional_tokens=None,
+ n_times_crossframe_attn_in_self=0,
+ ):
+ h = self.heads
+
+ if additional_tokens is not None:
+ # get the number of masked tokens at the beginning of the output sequence
+ n_tokens_to_mask = additional_tokens.shape[1]
+ # add additional token
+ x = torch.cat([additional_tokens, x], dim=1)
+
+ q = self.to_q(x)
+ context = default(context, x)
+ k = self.to_k(context)
+ v = self.to_v(context)
+
+ if n_times_crossframe_attn_in_self:
+ # reprogramming cross-frame attention as in https://arxiv.org/abs/2303.13439
+ assert x.shape[0] % n_times_crossframe_attn_in_self == 0
+ n_cp = x.shape[0] // n_times_crossframe_attn_in_self
+ k = repeat(
+ k[::n_times_crossframe_attn_in_self], "b ... -> (b n) ...", n=n_cp
+ )
+ v = repeat(
+ v[::n_times_crossframe_attn_in_self], "b ... -> (b n) ...", n=n_cp
+ )
+
+ q, k, v = map(lambda t: rearrange(t, "b n (h d) -> b h n d", h=h), (q, k, v))
+
+ ## old
+ """
+ sim = einsum('b i d, b j d -> b i j', q, k) * self.scale
+ del q, k
+
+ if exists(mask):
+ mask = rearrange(mask, 'b ... -> b (...)')
+ max_neg_value = -torch.finfo(sim.dtype).max
+ mask = repeat(mask, 'b j -> (b h) () j', h=h)
+ sim.masked_fill_(~mask, max_neg_value)
+
+ # attention, what we cannot get enough of
+ sim = sim.softmax(dim=-1)
+
+ out = einsum('b i j, b j d -> b i d', sim, v)
+ """
+ ## new
+ with sdp_kernel(**BACKEND_MAP[self.backend]):
+ # print("dispatching into backend", self.backend, "q/k/v shape: ", q.shape, k.shape, v.shape)
+ out = F.scaled_dot_product_attention(
+ q, k, v, attn_mask=mask
+ ) # scale is dim_head ** -0.5 per default
+
+ del q, k, v
+ out = rearrange(out, "b h n d -> b n (h d)", h=h)
+
+ if additional_tokens is not None:
+ # remove additional token
+ out = out[:, n_tokens_to_mask:]
+ return self.to_out(out)
+
+
+class MemoryEfficientCrossAttention(nn.Module):
+ # https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
+ def __init__(
+ self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0, **kwargs
+ ):
+ super().__init__()
+ logpy.debug(
+ f"Setting up {self.__class__.__name__}. Query dim is {query_dim}, "
+ f"context_dim is {context_dim} and using {heads} heads with a "
+ f"dimension of {dim_head}."
+ )
+ inner_dim = dim_head * heads
+ context_dim = default(context_dim, query_dim)
+
+ self.heads = heads
+ self.dim_head = dim_head
+
+ self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
+ self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
+ self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
+
+ self.to_out = nn.Sequential(
+ nn.Linear(inner_dim, query_dim), nn.Dropout(dropout)
+ )
+ self.attention_op: Optional[Any] = None
+
+ def forward(
+ self,
+ x,
+ context=None,
+ mask=None,
+ additional_tokens=None,
+ n_times_crossframe_attn_in_self=0,
+ ):
+ if additional_tokens is not None:
+ # get the number of masked tokens at the beginning of the output sequence
+ n_tokens_to_mask = additional_tokens.shape[1]
+ # add additional token
+ x = torch.cat([additional_tokens, x], dim=1)
+ q = self.to_q(x)
+ context = default(context, x)
+ k = self.to_k(context)
+ v = self.to_v(context)
+
+ if n_times_crossframe_attn_in_self:
+ # reprogramming cross-frame attention as in https://arxiv.org/abs/2303.13439
+ assert x.shape[0] % n_times_crossframe_attn_in_self == 0
+ # n_cp = x.shape[0]//n_times_crossframe_attn_in_self
+ k = repeat(
+ k[::n_times_crossframe_attn_in_self],
+ "b ... -> (b n) ...",
+ n=n_times_crossframe_attn_in_self,
+ )
+ v = repeat(
+ v[::n_times_crossframe_attn_in_self],
+ "b ... -> (b n) ...",
+ n=n_times_crossframe_attn_in_self,
+ )
+
+ b, _, _ = q.shape
+ q, k, v = map(
+ lambda t: t.unsqueeze(3)
+ .reshape(b, t.shape[1], self.heads, self.dim_head)
+ .permute(0, 2, 1, 3)
+ .reshape(b * self.heads, t.shape[1], self.dim_head)
+ .contiguous(),
+ (q, k, v),
+ )
+
+ # actually compute the attention, what we cannot get enough of
+ if version.parse(xformers.__version__) >= version.parse("0.0.21"):
+ # NOTE: workaround for
+ # https://github.com/facebookresearch/xformers/issues/845
+ max_bs = 32768
+ N = q.shape[0]
+ n_batches = math.ceil(N / max_bs)
+ out = list()
+ for i_batch in range(n_batches):
+ batch = slice(i_batch * max_bs, (i_batch + 1) * max_bs)
+ out.append(
+ xformers.ops.memory_efficient_attention(
+ q[batch],
+ k[batch],
+ v[batch],
+ attn_bias=None,
+ op=self.attention_op,
+ )
+ )
+ out = torch.cat(out, 0)
+ else:
+ out = xformers.ops.memory_efficient_attention(
+ q, k, v, attn_bias=None, op=self.attention_op
+ )
+
+ # TODO: Use this directly in the attention operation, as a bias
+ if exists(mask):
+ raise NotImplementedError
+ out = (
+ out.unsqueeze(0)
+ .reshape(b, self.heads, out.shape[1], self.dim_head)
+ .permute(0, 2, 1, 3)
+ .reshape(b, out.shape[1], self.heads * self.dim_head)
+ )
+ if additional_tokens is not None:
+ # remove additional token
+ out = out[:, n_tokens_to_mask:]
+ return self.to_out(out)
+
+
+class BasicTransformerBlock(nn.Module):
+ ATTENTION_MODES = {
+ "softmax": CrossAttention, # vanilla attention
+ "softmax-xformers": MemoryEfficientCrossAttention, # ampere
+ }
+
+ def __init__(
+ self,
+ dim,
+ n_heads,
+ d_head,
+ dropout=0.0,
+ context_dim=None,
+ gated_ff=True,
+ checkpoint=True,
+ disable_self_attn=False,
+ attn_mode="softmax",
+ sdp_backend=None,
+ ):
+ super().__init__()
+ assert attn_mode in self.ATTENTION_MODES
+ if attn_mode != "softmax" and not XFORMERS_IS_AVAILABLE:
+ logpy.warn(
+ f"Attention mode '{attn_mode}' is not available. Falling "
+ f"back to native attention. This is not a problem in "
+ f"Pytorch >= 2.0. FYI, you are running with PyTorch "
+ f"version {torch.__version__}."
+ )
+ attn_mode = "softmax"
+ elif attn_mode == "softmax" and not SDP_IS_AVAILABLE:
+ logpy.warn(
+ "We do not support vanilla attention anymore, as it is too "
+ "expensive. Sorry."
+ )
+ if not XFORMERS_IS_AVAILABLE:
+ assert (
+ False
+ ), "Please install xformers via e.g. 'pip install xformers==0.0.16'"
+ else:
+ logpy.info("Falling back to xformers efficient attention.")
+ attn_mode = "softmax-xformers"
+ attn_cls = self.ATTENTION_MODES[attn_mode]
+ if version.parse(torch.__version__) >= version.parse("2.0.0"):
+ assert sdp_backend is None or isinstance(sdp_backend, SDPBackend)
+ else:
+ assert sdp_backend is None
+ self.disable_self_attn = disable_self_attn
+ self.attn1 = attn_cls(
+ query_dim=dim,
+ heads=n_heads,
+ dim_head=d_head,
+ dropout=dropout,
+ context_dim=context_dim if self.disable_self_attn else None,
+ backend=sdp_backend,
+ ) # is a self-attention if not self.disable_self_attn
+ self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
+ self.attn2 = attn_cls(
+ query_dim=dim,
+ context_dim=context_dim,
+ heads=n_heads,
+ dim_head=d_head,
+ dropout=dropout,
+ backend=sdp_backend,
+ ) # is self-attn if context is none
+ self.norm1 = nn.LayerNorm(dim)
+ self.norm2 = nn.LayerNorm(dim)
+ self.norm3 = nn.LayerNorm(dim)
+ self.checkpoint = checkpoint
+ if self.checkpoint:
+ logpy.debug(f"{self.__class__.__name__} is using checkpointing")
+
+ def forward(
+ self, x, context=None, additional_tokens=None, n_times_crossframe_attn_in_self=0
+ ):
+ kwargs = {"x": x}
+
+ if context is not None:
+ kwargs.update({"context": context})
+
+ if additional_tokens is not None:
+ kwargs.update({"additional_tokens": additional_tokens})
+
+ if n_times_crossframe_attn_in_self:
+ kwargs.update(
+ {"n_times_crossframe_attn_in_self": n_times_crossframe_attn_in_self}
+ )
+
+ # return mixed_checkpoint(self._forward, kwargs, self.parameters(), self.checkpoint)
+ if self.checkpoint:
+ # inputs = {"x": x, "context": context}
+ return checkpoint(self._forward, x, context)
+ # return checkpoint(self._forward, inputs, self.parameters(), self.checkpoint)
+ else:
+ return self._forward(**kwargs)
+
+ def _forward(
+ self, x, context=None, additional_tokens=None, n_times_crossframe_attn_in_self=0
+ ):
+ x = (
+ self.attn1(
+ self.norm1(x),
+ context=context if self.disable_self_attn else None,
+ additional_tokens=additional_tokens,
+ n_times_crossframe_attn_in_self=n_times_crossframe_attn_in_self
+ if not self.disable_self_attn
+ else 0,
+ )
+ + x
+ )
+ x = (
+ self.attn2(
+ self.norm2(x), context=context, additional_tokens=additional_tokens
+ )
+ + x
+ )
+ x = self.ff(self.norm3(x)) + x
+ return x
+
+
+class BasicTransformerSingleLayerBlock(nn.Module):
+ ATTENTION_MODES = {
+ "softmax": CrossAttention, # vanilla attention
+ "softmax-xformers": MemoryEfficientCrossAttention # on the A100s not quite as fast as the above version
+ # (todo might depend on head_dim, check, falls back to semi-optimized kernels for dim!=[16,32,64,128])
+ }
+
+ def __init__(
+ self,
+ dim,
+ n_heads,
+ d_head,
+ dropout=0.0,
+ context_dim=None,
+ gated_ff=True,
+ checkpoint=True,
+ attn_mode="softmax",
+ ):
+ super().__init__()
+ assert attn_mode in self.ATTENTION_MODES
+ attn_cls = self.ATTENTION_MODES[attn_mode]
+ self.attn1 = attn_cls(
+ query_dim=dim,
+ heads=n_heads,
+ dim_head=d_head,
+ dropout=dropout,
+ context_dim=context_dim,
+ )
+ self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
+ self.norm1 = nn.LayerNorm(dim)
+ self.norm2 = nn.LayerNorm(dim)
+ self.checkpoint = checkpoint
+
+ def forward(self, x, context=None):
+ # inputs = {"x": x, "context": context}
+ # return checkpoint(self._forward, inputs, self.parameters(), self.checkpoint)
+ return checkpoint(self._forward, x, context)
+
+ def _forward(self, x, context=None):
+ x = self.attn1(self.norm1(x), context=context) + x
+ x = self.ff(self.norm2(x)) + x
+ return x
+
+
+class SpatialTransformer(nn.Module):
+ """
+ Transformer block for image-like data.
+ First, project the input (aka embedding)
+ and reshape to b, t, d.
+ Then apply standard transformer action.
+ Finally, reshape to image
+ NEW: use_linear for more efficiency instead of the 1x1 convs
+ """
+
+ def __init__(
+ self,
+ in_channels,
+ n_heads,
+ d_head,
+ depth=1,
+ dropout=0.0,
+ context_dim=None,
+ disable_self_attn=False,
+ use_linear=False,
+ attn_type="softmax",
+ use_checkpoint=True,
+ # sdp_backend=SDPBackend.FLASH_ATTENTION
+ sdp_backend=None,
+ ):
+ super().__init__()
+ logpy.debug(
+ f"constructing {self.__class__.__name__} of depth {depth} w/ "
+ f"{in_channels} channels and {n_heads} heads."
+ )
+
+ if exists(context_dim) and not isinstance(context_dim, list):
+ context_dim = [context_dim]
+ if exists(context_dim) and isinstance(context_dim, list):
+ if depth != len(context_dim):
+ logpy.warn(
+ f"{self.__class__.__name__}: Found context dims "
+ f"{context_dim} of depth {len(context_dim)}, which does not "
+ f"match the specified 'depth' of {depth}. Setting context_dim "
+ f"to {depth * [context_dim[0]]} now."
+ )
+ # depth does not match context dims.
+ assert all(
+ map(lambda x: x == context_dim[0], context_dim)
+ ), "need homogenous context_dim to match depth automatically"
+ context_dim = depth * [context_dim[0]]
+ elif context_dim is None:
+ context_dim = [None] * depth
+ self.in_channels = in_channels
+ inner_dim = n_heads * d_head
+ self.norm = Normalize(in_channels)
+ if not use_linear:
+ self.proj_in = nn.Conv2d(
+ in_channels, inner_dim, kernel_size=1, stride=1, padding=0
+ )
+ else:
+ self.proj_in = nn.Linear(in_channels, inner_dim)
+
+ self.transformer_blocks = nn.ModuleList(
+ [
+ BasicTransformerBlock(
+ inner_dim,
+ n_heads,
+ d_head,
+ dropout=dropout,
+ context_dim=context_dim[d],
+ disable_self_attn=disable_self_attn,
+ attn_mode=attn_type,
+ checkpoint=use_checkpoint,
+ sdp_backend=sdp_backend,
+ )
+ for d in range(depth)
+ ]
+ )
+ if not use_linear:
+ self.proj_out = zero_module(
+ nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0)
+ )
+ else:
+ # self.proj_out = zero_module(nn.Linear(in_channels, inner_dim))
+ self.proj_out = zero_module(nn.Linear(inner_dim, in_channels))
+ self.use_linear = use_linear
+
+ def forward(self, x, context=None):
+ # note: if no context is given, cross-attention defaults to self-attention
+ if not isinstance(context, list):
+ context = [context]
+ b, c, h, w = x.shape
+ x_in = x
+ x = self.norm(x)
+ if not self.use_linear:
+ x = self.proj_in(x)
+ x = rearrange(x, "b c h w -> b (h w) c").contiguous()
+ if self.use_linear:
+ x = self.proj_in(x)
+ for i, block in enumerate(self.transformer_blocks):
+ if i > 0 and len(context) == 1:
+ i = 0 # use same context for each block
+ x = block(x, context=context[i])
+ if self.use_linear:
+ x = self.proj_out(x)
+ x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w).contiguous()
+ if not self.use_linear:
+ x = self.proj_out(x)
+ return x + x_in
+
+
+class SimpleTransformer(nn.Module):
+ def __init__(
+ self,
+ dim: int,
+ depth: int,
+ heads: int,
+ dim_head: int,
+ context_dim: Optional[int] = None,
+ dropout: float = 0.0,
+ checkpoint: bool = True,
+ ):
+ super().__init__()
+ self.layers = nn.ModuleList([])
+ for _ in range(depth):
+ self.layers.append(
+ BasicTransformerBlock(
+ dim,
+ heads,
+ dim_head,
+ dropout=dropout,
+ context_dim=context_dim,
+ attn_mode="softmax-xformers",
+ checkpoint=checkpoint,
+ )
+ )
+
+ def forward(
+ self,
+ x: torch.Tensor,
+ context: Optional[torch.Tensor] = None,
+ ) -> torch.Tensor:
+ for layer in self.layers:
+ x = layer(x, context)
+ return x
diff --git a/sgm/modules/autoencoding/__init__.py b/sgm/modules/autoencoding/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/autoencoding/__pycache__/__init__.cpython-39.pyc b/sgm/modules/autoencoding/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eaf2bfca7b7785b3090fb24b276291a9754e14be
Binary files /dev/null and b/sgm/modules/autoencoding/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/__pycache__/temporal_ae.cpython-39.pyc b/sgm/modules/autoencoding/__pycache__/temporal_ae.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2428596ecf918ece4be6c0f5259a18367f784369
Binary files /dev/null and b/sgm/modules/autoencoding/__pycache__/temporal_ae.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/losses/__init__.py b/sgm/modules/autoencoding/losses/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b316c7aa6ea1c5e31a58987aa3b37b2933eb7e2
--- /dev/null
+++ b/sgm/modules/autoencoding/losses/__init__.py
@@ -0,0 +1,7 @@
+__all__ = [
+ "GeneralLPIPSWithDiscriminator",
+ "LatentLPIPS",
+]
+
+from .discriminator_loss import GeneralLPIPSWithDiscriminator
+from .lpips import LatentLPIPS
diff --git a/sgm/modules/autoencoding/losses/discriminator_loss.py b/sgm/modules/autoencoding/losses/discriminator_loss.py
new file mode 100644
index 0000000000000000000000000000000000000000..09b6829267bf8e4d98c3f29abdc19e58dcbcbe64
--- /dev/null
+++ b/sgm/modules/autoencoding/losses/discriminator_loss.py
@@ -0,0 +1,306 @@
+from typing import Dict, Iterator, List, Optional, Tuple, Union
+
+import numpy as np
+import torch
+import torch.nn as nn
+import torchvision
+from einops import rearrange
+from matplotlib import colormaps
+from matplotlib import pyplot as plt
+
+from ....util import default, instantiate_from_config
+from ..lpips.loss.lpips import LPIPS
+from ..lpips.model.model import weights_init
+from ..lpips.vqperceptual import hinge_d_loss, vanilla_d_loss
+
+
+class GeneralLPIPSWithDiscriminator(nn.Module):
+ def __init__(
+ self,
+ disc_start: int,
+ logvar_init: float = 0.0,
+ disc_num_layers: int = 3,
+ disc_in_channels: int = 3,
+ disc_factor: float = 1.0,
+ disc_weight: float = 1.0,
+ perceptual_weight: float = 1.0,
+ disc_loss: str = "hinge",
+ scale_input_to_tgt_size: bool = False,
+ dims: int = 2,
+ learn_logvar: bool = False,
+ regularization_weights: Union[None, Dict[str, float]] = None,
+ additional_log_keys: Optional[List[str]] = None,
+ discriminator_config: Optional[Dict] = None,
+ ):
+ super().__init__()
+ self.dims = dims
+ if self.dims > 2:
+ print(
+ f"running with dims={dims}. This means that for perceptual loss "
+ f"calculation, the LPIPS loss will be applied to each frame "
+ f"independently."
+ )
+ self.scale_input_to_tgt_size = scale_input_to_tgt_size
+ assert disc_loss in ["hinge", "vanilla"]
+ self.perceptual_loss = LPIPS().eval()
+ self.perceptual_weight = perceptual_weight
+ # output log variance
+ self.logvar = nn.Parameter(
+ torch.full((), logvar_init), requires_grad=learn_logvar
+ )
+ self.learn_logvar = learn_logvar
+
+ discriminator_config = default(
+ discriminator_config,
+ {
+ "target": "sgm.modules.autoencoding.lpips.model.model.NLayerDiscriminator",
+ "params": {
+ "input_nc": disc_in_channels,
+ "n_layers": disc_num_layers,
+ "use_actnorm": False,
+ },
+ },
+ )
+
+ self.discriminator = instantiate_from_config(discriminator_config).apply(
+ weights_init
+ )
+ self.discriminator_iter_start = disc_start
+ self.disc_loss = hinge_d_loss if disc_loss == "hinge" else vanilla_d_loss
+ self.disc_factor = disc_factor
+ self.discriminator_weight = disc_weight
+ self.regularization_weights = default(regularization_weights, {})
+
+ self.forward_keys = [
+ "optimizer_idx",
+ "global_step",
+ "last_layer",
+ "split",
+ "regularization_log",
+ ]
+
+ self.additional_log_keys = set(default(additional_log_keys, []))
+ self.additional_log_keys.update(set(self.regularization_weights.keys()))
+
+ def get_trainable_parameters(self) -> Iterator[nn.Parameter]:
+ return self.discriminator.parameters()
+
+ def get_trainable_autoencoder_parameters(self) -> Iterator[nn.Parameter]:
+ if self.learn_logvar:
+ yield self.logvar
+ yield from ()
+
+ @torch.no_grad()
+ def log_images(
+ self, inputs: torch.Tensor, reconstructions: torch.Tensor
+ ) -> Dict[str, torch.Tensor]:
+ # calc logits of real/fake
+ logits_real = self.discriminator(inputs.contiguous().detach())
+ if len(logits_real.shape) < 4:
+ # Non patch-discriminator
+ return dict()
+ logits_fake = self.discriminator(reconstructions.contiguous().detach())
+ # -> (b, 1, h, w)
+
+ # parameters for colormapping
+ high = max(logits_fake.abs().max(), logits_real.abs().max()).item()
+ cmap = colormaps["PiYG"] # diverging colormap
+
+ def to_colormap(logits: torch.Tensor) -> torch.Tensor:
+ """(b, 1, ...) -> (b, 3, ...)"""
+ logits = (logits + high) / (2 * high)
+ logits_np = cmap(logits.cpu().numpy())[..., :3] # truncate alpha channel
+ # -> (b, 1, ..., 3)
+ logits = torch.from_numpy(logits_np).to(logits.device)
+ return rearrange(logits, "b 1 ... c -> b c ...")
+
+ logits_real = torch.nn.functional.interpolate(
+ logits_real,
+ size=inputs.shape[-2:],
+ mode="nearest",
+ antialias=False,
+ )
+ logits_fake = torch.nn.functional.interpolate(
+ logits_fake,
+ size=reconstructions.shape[-2:],
+ mode="nearest",
+ antialias=False,
+ )
+
+ # alpha value of logits for overlay
+ alpha_real = torch.abs(logits_real) / high
+ alpha_fake = torch.abs(logits_fake) / high
+ # -> (b, 1, h, w) in range [0, 0.5]
+ # alpha value of lines don't really matter, since the values are the same
+ # for both images and logits anyway
+ grid_alpha_real = torchvision.utils.make_grid(alpha_real, nrow=4)
+ grid_alpha_fake = torchvision.utils.make_grid(alpha_fake, nrow=4)
+ grid_alpha = 0.8 * torch.cat((grid_alpha_real, grid_alpha_fake), dim=1)
+ # -> (1, h, w)
+ # blend logits and images together
+
+ # prepare logits for plotting
+ logits_real = to_colormap(logits_real)
+ logits_fake = to_colormap(logits_fake)
+ # resize logits
+ # -> (b, 3, h, w)
+
+ # make some grids
+ # add all logits to one plot
+ logits_real = torchvision.utils.make_grid(logits_real, nrow=4)
+ logits_fake = torchvision.utils.make_grid(logits_fake, nrow=4)
+ # I just love how torchvision calls the number of columns `nrow`
+ grid_logits = torch.cat((logits_real, logits_fake), dim=1)
+ # -> (3, h, w)
+
+ grid_images_real = torchvision.utils.make_grid(0.5 * inputs + 0.5, nrow=4)
+ grid_images_fake = torchvision.utils.make_grid(
+ 0.5 * reconstructions + 0.5, nrow=4
+ )
+ grid_images = torch.cat((grid_images_real, grid_images_fake), dim=1)
+ # -> (3, h, w) in range [0, 1]
+
+ grid_blend = grid_alpha * grid_logits + (1 - grid_alpha) * grid_images
+
+ # Create labeled colorbar
+ dpi = 100
+ height = 128 / dpi
+ width = grid_logits.shape[2] / dpi
+ fig, ax = plt.subplots(figsize=(width, height), dpi=dpi)
+ img = ax.imshow(np.array([[-high, high]]), cmap=cmap)
+ plt.colorbar(
+ img,
+ cax=ax,
+ orientation="horizontal",
+ fraction=0.9,
+ aspect=width / height,
+ pad=0.0,
+ )
+ img.set_visible(False)
+ fig.tight_layout()
+ fig.canvas.draw()
+ # manually convert figure to numpy
+ cbar_np = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
+ cbar_np = cbar_np.reshape(fig.canvas.get_width_height()[::-1] + (3,))
+ cbar = torch.from_numpy(cbar_np.copy()).to(grid_logits.dtype) / 255.0
+ cbar = rearrange(cbar, "h w c -> c h w").to(grid_logits.device)
+
+ # Add colorbar to plot
+ annotated_grid = torch.cat((grid_logits, cbar), dim=1)
+ blended_grid = torch.cat((grid_blend, cbar), dim=1)
+ return {
+ "vis_logits": 2 * annotated_grid[None, ...] - 1,
+ "vis_logits_blended": 2 * blended_grid[None, ...] - 1,
+ }
+
+ def calculate_adaptive_weight(
+ self, nll_loss: torch.Tensor, g_loss: torch.Tensor, last_layer: torch.Tensor
+ ) -> torch.Tensor:
+ nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0]
+ g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0]
+
+ d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4)
+ d_weight = torch.clamp(d_weight, 0.0, 1e4).detach()
+ d_weight = d_weight * self.discriminator_weight
+ return d_weight
+
+ def forward(
+ self,
+ inputs: torch.Tensor,
+ reconstructions: torch.Tensor,
+ *, # added because I changed the order here
+ regularization_log: Dict[str, torch.Tensor],
+ optimizer_idx: int,
+ global_step: int,
+ last_layer: torch.Tensor,
+ split: str = "train",
+ weights: Union[None, float, torch.Tensor] = None,
+ ) -> Tuple[torch.Tensor, dict]:
+ if self.scale_input_to_tgt_size:
+ inputs = torch.nn.functional.interpolate(
+ inputs, reconstructions.shape[2:], mode="bicubic", antialias=True
+ )
+
+ if self.dims > 2:
+ inputs, reconstructions = map(
+ lambda x: rearrange(x, "b c t h w -> (b t) c h w"),
+ (inputs, reconstructions),
+ )
+
+ rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous())
+ if self.perceptual_weight > 0:
+ p_loss = self.perceptual_loss(
+ inputs.contiguous(), reconstructions.contiguous()
+ )
+ rec_loss = rec_loss + self.perceptual_weight * p_loss
+
+ nll_loss, weighted_nll_loss = self.get_nll_loss(rec_loss, weights)
+
+ # now the GAN part
+ if optimizer_idx == 0:
+ # generator update
+ if global_step >= self.discriminator_iter_start or not self.training:
+ logits_fake = self.discriminator(reconstructions.contiguous())
+ g_loss = -torch.mean(logits_fake)
+ if self.training:
+ d_weight = self.calculate_adaptive_weight(
+ nll_loss, g_loss, last_layer=last_layer
+ )
+ else:
+ d_weight = torch.tensor(1.0)
+ else:
+ d_weight = torch.tensor(0.0)
+ g_loss = torch.tensor(0.0, requires_grad=True)
+
+ loss = weighted_nll_loss + d_weight * self.disc_factor * g_loss
+ log = dict()
+ for k in regularization_log:
+ if k in self.regularization_weights:
+ loss = loss + self.regularization_weights[k] * regularization_log[k]
+ if k in self.additional_log_keys:
+ log[f"{split}/{k}"] = regularization_log[k].detach().float().mean()
+
+ log.update(
+ {
+ f"{split}/loss/total": loss.clone().detach().mean(),
+ f"{split}/loss/nll": nll_loss.detach().mean(),
+ f"{split}/loss/rec": rec_loss.detach().mean(),
+ f"{split}/loss/g": g_loss.detach().mean(),
+ f"{split}/scalars/logvar": self.logvar.detach(),
+ f"{split}/scalars/d_weight": d_weight.detach(),
+ }
+ )
+
+ return loss, log
+ elif optimizer_idx == 1:
+ # second pass for discriminator update
+ logits_real = self.discriminator(inputs.contiguous().detach())
+ logits_fake = self.discriminator(reconstructions.contiguous().detach())
+
+ if global_step >= self.discriminator_iter_start or not self.training:
+ d_loss = self.disc_factor * self.disc_loss(logits_real, logits_fake)
+ else:
+ d_loss = torch.tensor(0.0, requires_grad=True)
+
+ log = {
+ f"{split}/loss/disc": d_loss.clone().detach().mean(),
+ f"{split}/logits/real": logits_real.detach().mean(),
+ f"{split}/logits/fake": logits_fake.detach().mean(),
+ }
+ return d_loss, log
+ else:
+ raise NotImplementedError(f"Unknown optimizer_idx {optimizer_idx}")
+
+ def get_nll_loss(
+ self,
+ rec_loss: torch.Tensor,
+ weights: Optional[Union[float, torch.Tensor]] = None,
+ ) -> Tuple[torch.Tensor, torch.Tensor]:
+ nll_loss = rec_loss / torch.exp(self.logvar) + self.logvar
+ weighted_nll_loss = nll_loss
+ if weights is not None:
+ weighted_nll_loss = weights * nll_loss
+ weighted_nll_loss = torch.sum(weighted_nll_loss) / weighted_nll_loss.shape[0]
+ nll_loss = torch.sum(nll_loss) / nll_loss.shape[0]
+
+ return nll_loss, weighted_nll_loss
diff --git a/sgm/modules/autoencoding/losses/lpips.py b/sgm/modules/autoencoding/losses/lpips.py
new file mode 100644
index 0000000000000000000000000000000000000000..b329fcc2ee9477f0122aa7d066866cdfe71ce521
--- /dev/null
+++ b/sgm/modules/autoencoding/losses/lpips.py
@@ -0,0 +1,73 @@
+import torch
+import torch.nn as nn
+
+from ....util import default, instantiate_from_config
+from ..lpips.loss.lpips import LPIPS
+
+
+class LatentLPIPS(nn.Module):
+ def __init__(
+ self,
+ decoder_config,
+ perceptual_weight=1.0,
+ latent_weight=1.0,
+ scale_input_to_tgt_size=False,
+ scale_tgt_to_input_size=False,
+ perceptual_weight_on_inputs=0.0,
+ ):
+ super().__init__()
+ self.scale_input_to_tgt_size = scale_input_to_tgt_size
+ self.scale_tgt_to_input_size = scale_tgt_to_input_size
+ self.init_decoder(decoder_config)
+ self.perceptual_loss = LPIPS().eval()
+ self.perceptual_weight = perceptual_weight
+ self.latent_weight = latent_weight
+ self.perceptual_weight_on_inputs = perceptual_weight_on_inputs
+
+ def init_decoder(self, config):
+ self.decoder = instantiate_from_config(config)
+ if hasattr(self.decoder, "encoder"):
+ del self.decoder.encoder
+
+ def forward(self, latent_inputs, latent_predictions, image_inputs, split="train"):
+ log = dict()
+ loss = (latent_inputs - latent_predictions) ** 2
+ log[f"{split}/latent_l2_loss"] = loss.mean().detach()
+ image_reconstructions = None
+ if self.perceptual_weight > 0.0:
+ image_reconstructions = self.decoder.decode(latent_predictions)
+ image_targets = self.decoder.decode(latent_inputs)
+ perceptual_loss = self.perceptual_loss(
+ image_targets.contiguous(), image_reconstructions.contiguous()
+ )
+ loss = (
+ self.latent_weight * loss.mean()
+ + self.perceptual_weight * perceptual_loss.mean()
+ )
+ log[f"{split}/perceptual_loss"] = perceptual_loss.mean().detach()
+
+ if self.perceptual_weight_on_inputs > 0.0:
+ image_reconstructions = default(
+ image_reconstructions, self.decoder.decode(latent_predictions)
+ )
+ if self.scale_input_to_tgt_size:
+ image_inputs = torch.nn.functional.interpolate(
+ image_inputs,
+ image_reconstructions.shape[2:],
+ mode="bicubic",
+ antialias=True,
+ )
+ elif self.scale_tgt_to_input_size:
+ image_reconstructions = torch.nn.functional.interpolate(
+ image_reconstructions,
+ image_inputs.shape[2:],
+ mode="bicubic",
+ antialias=True,
+ )
+
+ perceptual_loss2 = self.perceptual_loss(
+ image_inputs.contiguous(), image_reconstructions.contiguous()
+ )
+ loss = loss + self.perceptual_weight_on_inputs * perceptual_loss2.mean()
+ log[f"{split}/perceptual_loss_on_inputs"] = perceptual_loss2.mean().detach()
+ return loss, log
diff --git a/sgm/modules/autoencoding/lpips/__init__.py b/sgm/modules/autoencoding/lpips/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/autoencoding/lpips/__pycache__/__init__.cpython-39.pyc b/sgm/modules/autoencoding/lpips/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6c5ccdfa4d4f8027fd00bb88d060b9328a3447b
Binary files /dev/null and b/sgm/modules/autoencoding/lpips/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/lpips/__pycache__/util.cpython-39.pyc b/sgm/modules/autoencoding/lpips/__pycache__/util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fb078428aa63f32ff975a1178f3d88ff37989b84
Binary files /dev/null and b/sgm/modules/autoencoding/lpips/__pycache__/util.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/lpips/loss/.gitignore b/sgm/modules/autoencoding/lpips/loss/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..a92958a1cd4ffe005e1f5448ab3e6fd9c795a43a
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/loss/.gitignore
@@ -0,0 +1 @@
+vgg.pth
\ No newline at end of file
diff --git a/sgm/modules/autoencoding/lpips/loss/LICENSE b/sgm/modules/autoencoding/lpips/loss/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..924cfc85b8d63ef538f5676f830a2a8497932108
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/loss/LICENSE
@@ -0,0 +1,23 @@
+Copyright (c) 2018, Richard Zhang, Phillip Isola, Alexei A. Efros, Eli Shechtman, Oliver Wang
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/sgm/modules/autoencoding/lpips/loss/__init__.py b/sgm/modules/autoencoding/lpips/loss/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/autoencoding/lpips/loss/__pycache__/__init__.cpython-39.pyc b/sgm/modules/autoencoding/lpips/loss/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ed8d29cbb2259e563654ecaed32f4f25378a210b
Binary files /dev/null and b/sgm/modules/autoencoding/lpips/loss/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/lpips/loss/__pycache__/lpips.cpython-39.pyc b/sgm/modules/autoencoding/lpips/loss/__pycache__/lpips.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3fb860d74c38805d32597c4045829ab47dd63fa1
Binary files /dev/null and b/sgm/modules/autoencoding/lpips/loss/__pycache__/lpips.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/lpips/loss/lpips.py b/sgm/modules/autoencoding/lpips/loss/lpips.py
new file mode 100644
index 0000000000000000000000000000000000000000..3e34f3d083674f675a5ca024e9bd27fb77e2b6b5
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/loss/lpips.py
@@ -0,0 +1,147 @@
+"""Stripped version of https://github.com/richzhang/PerceptualSimilarity/tree/master/models"""
+
+from collections import namedtuple
+
+import torch
+import torch.nn as nn
+from torchvision import models
+
+from ..util import get_ckpt_path
+
+
+class LPIPS(nn.Module):
+ # Learned perceptual metric
+ def __init__(self, use_dropout=True):
+ super().__init__()
+ self.scaling_layer = ScalingLayer()
+ self.chns = [64, 128, 256, 512, 512] # vg16 features
+ self.net = vgg16(pretrained=True, requires_grad=False)
+ self.lin0 = NetLinLayer(self.chns[0], use_dropout=use_dropout)
+ self.lin1 = NetLinLayer(self.chns[1], use_dropout=use_dropout)
+ self.lin2 = NetLinLayer(self.chns[2], use_dropout=use_dropout)
+ self.lin3 = NetLinLayer(self.chns[3], use_dropout=use_dropout)
+ self.lin4 = NetLinLayer(self.chns[4], use_dropout=use_dropout)
+ self.load_from_pretrained()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def load_from_pretrained(self, name="vgg_lpips"):
+ ckpt = get_ckpt_path(name, "sgm/modules/autoencoding/lpips/loss")
+ self.load_state_dict(
+ torch.load(ckpt, map_location=torch.device("cpu")), strict=False
+ )
+ print("loaded pretrained LPIPS loss from {}".format(ckpt))
+
+ @classmethod
+ def from_pretrained(cls, name="vgg_lpips"):
+ if name != "vgg_lpips":
+ raise NotImplementedError
+ model = cls()
+ ckpt = get_ckpt_path(name)
+ model.load_state_dict(
+ torch.load(ckpt, map_location=torch.device("cpu")), strict=False
+ )
+ return model
+
+ def forward(self, input, target):
+ in0_input, in1_input = (self.scaling_layer(input), self.scaling_layer(target))
+ outs0, outs1 = self.net(in0_input), self.net(in1_input)
+ feats0, feats1, diffs = {}, {}, {}
+ lins = [self.lin0, self.lin1, self.lin2, self.lin3, self.lin4]
+ for kk in range(len(self.chns)):
+ feats0[kk], feats1[kk] = normalize_tensor(outs0[kk]), normalize_tensor(
+ outs1[kk]
+ )
+ diffs[kk] = (feats0[kk] - feats1[kk]) ** 2
+
+ res = [
+ spatial_average(lins[kk].model(diffs[kk]), keepdim=True)
+ for kk in range(len(self.chns))
+ ]
+ val = res[0]
+ for l in range(1, len(self.chns)):
+ val += res[l]
+ return val
+
+
+class ScalingLayer(nn.Module):
+ def __init__(self):
+ super(ScalingLayer, self).__init__()
+ self.register_buffer(
+ "shift", torch.Tensor([-0.030, -0.088, -0.188])[None, :, None, None]
+ )
+ self.register_buffer(
+ "scale", torch.Tensor([0.458, 0.448, 0.450])[None, :, None, None]
+ )
+
+ def forward(self, inp):
+ return (inp - self.shift) / self.scale
+
+
+class NetLinLayer(nn.Module):
+ """A single linear layer which does a 1x1 conv"""
+
+ def __init__(self, chn_in, chn_out=1, use_dropout=False):
+ super(NetLinLayer, self).__init__()
+ layers = (
+ [
+ nn.Dropout(),
+ ]
+ if (use_dropout)
+ else []
+ )
+ layers += [
+ nn.Conv2d(chn_in, chn_out, 1, stride=1, padding=0, bias=False),
+ ]
+ self.model = nn.Sequential(*layers)
+
+
+class vgg16(torch.nn.Module):
+ def __init__(self, requires_grad=False, pretrained=True):
+ super(vgg16, self).__init__()
+ vgg_pretrained_features = models.vgg16(pretrained=pretrained).features
+ self.slice1 = torch.nn.Sequential()
+ self.slice2 = torch.nn.Sequential()
+ self.slice3 = torch.nn.Sequential()
+ self.slice4 = torch.nn.Sequential()
+ self.slice5 = torch.nn.Sequential()
+ self.N_slices = 5
+ for x in range(4):
+ self.slice1.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(4, 9):
+ self.slice2.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(9, 16):
+ self.slice3.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(16, 23):
+ self.slice4.add_module(str(x), vgg_pretrained_features[x])
+ for x in range(23, 30):
+ self.slice5.add_module(str(x), vgg_pretrained_features[x])
+ if not requires_grad:
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def forward(self, X):
+ h = self.slice1(X)
+ h_relu1_2 = h
+ h = self.slice2(h)
+ h_relu2_2 = h
+ h = self.slice3(h)
+ h_relu3_3 = h
+ h = self.slice4(h)
+ h_relu4_3 = h
+ h = self.slice5(h)
+ h_relu5_3 = h
+ vgg_outputs = namedtuple(
+ "VggOutputs", ["relu1_2", "relu2_2", "relu3_3", "relu4_3", "relu5_3"]
+ )
+ out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3, h_relu5_3)
+ return out
+
+
+def normalize_tensor(x, eps=1e-10):
+ norm_factor = torch.sqrt(torch.sum(x**2, dim=1, keepdim=True))
+ return x / (norm_factor + eps)
+
+
+def spatial_average(x, keepdim=True):
+ return x.mean([2, 3], keepdim=keepdim)
diff --git a/sgm/modules/autoencoding/lpips/model/LICENSE b/sgm/modules/autoencoding/lpips/model/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..4b356e66b5aa689b339f1a80a9f1b5ba378003bb
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/model/LICENSE
@@ -0,0 +1,58 @@
+Copyright (c) 2017, Jun-Yan Zhu and Taesung Park
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+--------------------------- LICENSE FOR pix2pix --------------------------------
+BSD License
+
+For pix2pix software
+Copyright (c) 2016, Phillip Isola and Jun-Yan Zhu
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+----------------------------- LICENSE FOR DCGAN --------------------------------
+BSD License
+
+For dcgan.torch software
+
+Copyright (c) 2015, Facebook, Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+Neither the name Facebook nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/sgm/modules/autoencoding/lpips/model/__init__.py b/sgm/modules/autoencoding/lpips/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/autoencoding/lpips/model/model.py b/sgm/modules/autoencoding/lpips/model/model.py
new file mode 100644
index 0000000000000000000000000000000000000000..66357d4e627f9a69a5abbbad15546c96fcd758fe
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/model/model.py
@@ -0,0 +1,88 @@
+import functools
+
+import torch.nn as nn
+
+from ..util import ActNorm
+
+
+def weights_init(m):
+ classname = m.__class__.__name__
+ if classname.find("Conv") != -1:
+ nn.init.normal_(m.weight.data, 0.0, 0.02)
+ elif classname.find("BatchNorm") != -1:
+ nn.init.normal_(m.weight.data, 1.0, 0.02)
+ nn.init.constant_(m.bias.data, 0)
+
+
+class NLayerDiscriminator(nn.Module):
+ """Defines a PatchGAN discriminator as in Pix2Pix
+ --> see https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/models/networks.py
+ """
+
+ def __init__(self, input_nc=3, ndf=64, n_layers=3, use_actnorm=False):
+ """Construct a PatchGAN discriminator
+ Parameters:
+ input_nc (int) -- the number of channels in input images
+ ndf (int) -- the number of filters in the last conv layer
+ n_layers (int) -- the number of conv layers in the discriminator
+ norm_layer -- normalization layer
+ """
+ super(NLayerDiscriminator, self).__init__()
+ if not use_actnorm:
+ norm_layer = nn.BatchNorm2d
+ else:
+ norm_layer = ActNorm
+ if (
+ type(norm_layer) == functools.partial
+ ): # no need to use bias as BatchNorm2d has affine parameters
+ use_bias = norm_layer.func != nn.BatchNorm2d
+ else:
+ use_bias = norm_layer != nn.BatchNorm2d
+
+ kw = 4
+ padw = 1
+ sequence = [
+ nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw),
+ nn.LeakyReLU(0.2, True),
+ ]
+ nf_mult = 1
+ nf_mult_prev = 1
+ for n in range(1, n_layers): # gradually increase the number of filters
+ nf_mult_prev = nf_mult
+ nf_mult = min(2**n, 8)
+ sequence += [
+ nn.Conv2d(
+ ndf * nf_mult_prev,
+ ndf * nf_mult,
+ kernel_size=kw,
+ stride=2,
+ padding=padw,
+ bias=use_bias,
+ ),
+ norm_layer(ndf * nf_mult),
+ nn.LeakyReLU(0.2, True),
+ ]
+
+ nf_mult_prev = nf_mult
+ nf_mult = min(2**n_layers, 8)
+ sequence += [
+ nn.Conv2d(
+ ndf * nf_mult_prev,
+ ndf * nf_mult,
+ kernel_size=kw,
+ stride=1,
+ padding=padw,
+ bias=use_bias,
+ ),
+ norm_layer(ndf * nf_mult),
+ nn.LeakyReLU(0.2, True),
+ ]
+
+ sequence += [
+ nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)
+ ] # output 1 channel prediction map
+ self.main = nn.Sequential(*sequence)
+
+ def forward(self, input):
+ """Standard forward."""
+ return self.main(input)
diff --git a/sgm/modules/autoencoding/lpips/util.py b/sgm/modules/autoencoding/lpips/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..49c76e370bf16888ab61f42844b3c9f14ad9014c
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/util.py
@@ -0,0 +1,128 @@
+import hashlib
+import os
+
+import requests
+import torch
+import torch.nn as nn
+from tqdm import tqdm
+
+URL_MAP = {"vgg_lpips": "https://heibox.uni-heidelberg.de/f/607503859c864bc1b30b/?dl=1"}
+
+CKPT_MAP = {"vgg_lpips": "vgg.pth"}
+
+MD5_MAP = {"vgg_lpips": "d507d7349b931f0638a25a48a722f98a"}
+
+
+def download(url, local_path, chunk_size=1024):
+ os.makedirs(os.path.split(local_path)[0], exist_ok=True)
+ with requests.get(url, stream=True) as r:
+ total_size = int(r.headers.get("content-length", 0))
+ with tqdm(total=total_size, unit="B", unit_scale=True) as pbar:
+ with open(local_path, "wb") as f:
+ for data in r.iter_content(chunk_size=chunk_size):
+ if data:
+ f.write(data)
+ pbar.update(chunk_size)
+
+
+def md5_hash(path):
+ with open(path, "rb") as f:
+ content = f.read()
+ return hashlib.md5(content).hexdigest()
+
+
+def get_ckpt_path(name, root, check=False):
+ assert name in URL_MAP
+ path = os.path.join(root, CKPT_MAP[name])
+ if not os.path.exists(path) or (check and not md5_hash(path) == MD5_MAP[name]):
+ print("Downloading {} model from {} to {}".format(name, URL_MAP[name], path))
+ download(URL_MAP[name], path)
+ md5 = md5_hash(path)
+ assert md5 == MD5_MAP[name], md5
+ return path
+
+
+class ActNorm(nn.Module):
+ def __init__(
+ self, num_features, logdet=False, affine=True, allow_reverse_init=False
+ ):
+ assert affine
+ super().__init__()
+ self.logdet = logdet
+ self.loc = nn.Parameter(torch.zeros(1, num_features, 1, 1))
+ self.scale = nn.Parameter(torch.ones(1, num_features, 1, 1))
+ self.allow_reverse_init = allow_reverse_init
+
+ self.register_buffer("initialized", torch.tensor(0, dtype=torch.uint8))
+
+ def initialize(self, input):
+ with torch.no_grad():
+ flatten = input.permute(1, 0, 2, 3).contiguous().view(input.shape[1], -1)
+ mean = (
+ flatten.mean(1)
+ .unsqueeze(1)
+ .unsqueeze(2)
+ .unsqueeze(3)
+ .permute(1, 0, 2, 3)
+ )
+ std = (
+ flatten.std(1)
+ .unsqueeze(1)
+ .unsqueeze(2)
+ .unsqueeze(3)
+ .permute(1, 0, 2, 3)
+ )
+
+ self.loc.data.copy_(-mean)
+ self.scale.data.copy_(1 / (std + 1e-6))
+
+ def forward(self, input, reverse=False):
+ if reverse:
+ return self.reverse(input)
+ if len(input.shape) == 2:
+ input = input[:, :, None, None]
+ squeeze = True
+ else:
+ squeeze = False
+
+ _, _, height, width = input.shape
+
+ if self.training and self.initialized.item() == 0:
+ self.initialize(input)
+ self.initialized.fill_(1)
+
+ h = self.scale * (input + self.loc)
+
+ if squeeze:
+ h = h.squeeze(-1).squeeze(-1)
+
+ if self.logdet:
+ log_abs = torch.log(torch.abs(self.scale))
+ logdet = height * width * torch.sum(log_abs)
+ logdet = logdet * torch.ones(input.shape[0]).to(input)
+ return h, logdet
+
+ return h
+
+ def reverse(self, output):
+ if self.training and self.initialized.item() == 0:
+ if not self.allow_reverse_init:
+ raise RuntimeError(
+ "Initializing ActNorm in reverse direction is "
+ "disabled by default. Use allow_reverse_init=True to enable."
+ )
+ else:
+ self.initialize(output)
+ self.initialized.fill_(1)
+
+ if len(output.shape) == 2:
+ output = output[:, :, None, None]
+ squeeze = True
+ else:
+ squeeze = False
+
+ h = output / self.scale - self.loc
+
+ if squeeze:
+ h = h.squeeze(-1).squeeze(-1)
+ return h
diff --git a/sgm/modules/autoencoding/lpips/vqperceptual.py b/sgm/modules/autoencoding/lpips/vqperceptual.py
new file mode 100644
index 0000000000000000000000000000000000000000..6195f0a6ed7ee6fd32c1bccea071e6075e95ee43
--- /dev/null
+++ b/sgm/modules/autoencoding/lpips/vqperceptual.py
@@ -0,0 +1,17 @@
+import torch
+import torch.nn.functional as F
+
+
+def hinge_d_loss(logits_real, logits_fake):
+ loss_real = torch.mean(F.relu(1.0 - logits_real))
+ loss_fake = torch.mean(F.relu(1.0 + logits_fake))
+ d_loss = 0.5 * (loss_real + loss_fake)
+ return d_loss
+
+
+def vanilla_d_loss(logits_real, logits_fake):
+ d_loss = 0.5 * (
+ torch.mean(torch.nn.functional.softplus(-logits_real))
+ + torch.mean(torch.nn.functional.softplus(logits_fake))
+ )
+ return d_loss
diff --git a/sgm/modules/autoencoding/regularizers/__init__.py b/sgm/modules/autoencoding/regularizers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff2b1815a5ba88892375e8ec9bedacea49024113
--- /dev/null
+++ b/sgm/modules/autoencoding/regularizers/__init__.py
@@ -0,0 +1,31 @@
+from abc import abstractmethod
+from typing import Any, Tuple
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+from ....modules.distributions.distributions import \
+ DiagonalGaussianDistribution
+from .base import AbstractRegularizer
+
+
+class DiagonalGaussianRegularizer(AbstractRegularizer):
+ def __init__(self, sample: bool = True):
+ super().__init__()
+ self.sample = sample
+
+ def get_trainable_parameters(self) -> Any:
+ yield from ()
+
+ def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]:
+ log = dict()
+ posterior = DiagonalGaussianDistribution(z)
+ if self.sample:
+ z = posterior.sample()
+ else:
+ z = posterior.mode()
+ kl_loss = posterior.kl()
+ kl_loss = torch.sum(kl_loss) / kl_loss.shape[0]
+ log["kl_loss"] = kl_loss
+ return z, log
diff --git a/sgm/modules/autoencoding/regularizers/__pycache__/__init__.cpython-39.pyc b/sgm/modules/autoencoding/regularizers/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1fed79c95b42eeecc1b332024666fd2c010b1349
Binary files /dev/null and b/sgm/modules/autoencoding/regularizers/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/regularizers/__pycache__/base.cpython-39.pyc b/sgm/modules/autoencoding/regularizers/__pycache__/base.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3e43fb5e04c3b3762bad8fb1fad6bc57ef160e33
Binary files /dev/null and b/sgm/modules/autoencoding/regularizers/__pycache__/base.cpython-39.pyc differ
diff --git a/sgm/modules/autoencoding/regularizers/base.py b/sgm/modules/autoencoding/regularizers/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..fca681bb3c1f4818b57e956e31b98f76077ccb67
--- /dev/null
+++ b/sgm/modules/autoencoding/regularizers/base.py
@@ -0,0 +1,40 @@
+from abc import abstractmethod
+from typing import Any, Tuple
+
+import torch
+import torch.nn.functional as F
+from torch import nn
+
+
+class AbstractRegularizer(nn.Module):
+ def __init__(self):
+ super().__init__()
+
+ def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]:
+ raise NotImplementedError()
+
+ @abstractmethod
+ def get_trainable_parameters(self) -> Any:
+ raise NotImplementedError()
+
+
+class IdentityRegularizer(AbstractRegularizer):
+ def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]:
+ return z, dict()
+
+ def get_trainable_parameters(self) -> Any:
+ yield from ()
+
+
+def measure_perplexity(
+ predicted_indices: torch.Tensor, num_centroids: int
+) -> Tuple[torch.Tensor, torch.Tensor]:
+ # src: https://github.com/karpathy/deep-vector-quantization/blob/main/model.py
+ # eval cluster perplexity. when perplexity == num_embeddings then all clusters are used exactly equally
+ encodings = (
+ F.one_hot(predicted_indices, num_centroids).float().reshape(-1, num_centroids)
+ )
+ avg_probs = encodings.mean(0)
+ perplexity = (-(avg_probs * torch.log(avg_probs + 1e-10)).sum()).exp()
+ cluster_use = torch.sum(avg_probs > 0)
+ return perplexity, cluster_use
diff --git a/sgm/modules/autoencoding/regularizers/quantize.py b/sgm/modules/autoencoding/regularizers/quantize.py
new file mode 100644
index 0000000000000000000000000000000000000000..86a4dbdd10101b24f03bba134c4f8d2ab007f0db
--- /dev/null
+++ b/sgm/modules/autoencoding/regularizers/quantize.py
@@ -0,0 +1,487 @@
+import logging
+from abc import abstractmethod
+from typing import Dict, Iterator, Literal, Optional, Tuple, Union
+
+import numpy as np
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from einops import rearrange
+from torch import einsum
+
+from .base import AbstractRegularizer, measure_perplexity
+
+logpy = logging.getLogger(__name__)
+
+
+class AbstractQuantizer(AbstractRegularizer):
+ def __init__(self):
+ super().__init__()
+ # Define these in your init
+ # shape (N,)
+ self.used: Optional[torch.Tensor]
+ self.re_embed: int
+ self.unknown_index: Union[Literal["random"], int]
+
+ def remap_to_used(self, inds: torch.Tensor) -> torch.Tensor:
+ assert self.used is not None, "You need to define used indices for remap"
+ ishape = inds.shape
+ assert len(ishape) > 1
+ inds = inds.reshape(ishape[0], -1)
+ used = self.used.to(inds)
+ match = (inds[:, :, None] == used[None, None, ...]).long()
+ new = match.argmax(-1)
+ unknown = match.sum(2) < 1
+ if self.unknown_index == "random":
+ new[unknown] = torch.randint(0, self.re_embed, size=new[unknown].shape).to(
+ device=new.device
+ )
+ else:
+ new[unknown] = self.unknown_index
+ return new.reshape(ishape)
+
+ def unmap_to_all(self, inds: torch.Tensor) -> torch.Tensor:
+ assert self.used is not None, "You need to define used indices for remap"
+ ishape = inds.shape
+ assert len(ishape) > 1
+ inds = inds.reshape(ishape[0], -1)
+ used = self.used.to(inds)
+ if self.re_embed > self.used.shape[0]: # extra token
+ inds[inds >= self.used.shape[0]] = 0 # simply set to zero
+ back = torch.gather(used[None, :][inds.shape[0] * [0], :], 1, inds)
+ return back.reshape(ishape)
+
+ @abstractmethod
+ def get_codebook_entry(
+ self, indices: torch.Tensor, shape: Optional[Tuple[int, ...]] = None
+ ) -> torch.Tensor:
+ raise NotImplementedError()
+
+ def get_trainable_parameters(self) -> Iterator[torch.nn.Parameter]:
+ yield from self.parameters()
+
+
+class GumbelQuantizer(AbstractQuantizer):
+ """
+ credit to @karpathy:
+ https://github.com/karpathy/deep-vector-quantization/blob/main/model.py (thanks!)
+ Gumbel Softmax trick quantizer
+ Categorical Reparameterization with Gumbel-Softmax, Jang et al. 2016
+ https://arxiv.org/abs/1611.01144
+ """
+
+ def __init__(
+ self,
+ num_hiddens: int,
+ embedding_dim: int,
+ n_embed: int,
+ straight_through: bool = True,
+ kl_weight: float = 5e-4,
+ temp_init: float = 1.0,
+ remap: Optional[str] = None,
+ unknown_index: str = "random",
+ loss_key: str = "loss/vq",
+ ) -> None:
+ super().__init__()
+
+ self.loss_key = loss_key
+ self.embedding_dim = embedding_dim
+ self.n_embed = n_embed
+
+ self.straight_through = straight_through
+ self.temperature = temp_init
+ self.kl_weight = kl_weight
+
+ self.proj = nn.Conv2d(num_hiddens, n_embed, 1)
+ self.embed = nn.Embedding(n_embed, embedding_dim)
+
+ self.remap = remap
+ if self.remap is not None:
+ self.register_buffer("used", torch.tensor(np.load(self.remap)))
+ self.re_embed = self.used.shape[0]
+ else:
+ self.used = None
+ self.re_embed = n_embed
+ if unknown_index == "extra":
+ self.unknown_index = self.re_embed
+ self.re_embed = self.re_embed + 1
+ else:
+ assert unknown_index == "random" or isinstance(
+ unknown_index, int
+ ), "unknown index needs to be 'random', 'extra' or any integer"
+ self.unknown_index = unknown_index # "random" or "extra" or integer
+ if self.remap is not None:
+ logpy.info(
+ f"Remapping {self.n_embed} indices to {self.re_embed} indices. "
+ f"Using {self.unknown_index} for unknown indices."
+ )
+
+ def forward(
+ self, z: torch.Tensor, temp: Optional[float] = None, return_logits: bool = False
+ ) -> Tuple[torch.Tensor, Dict]:
+ # force hard = True when we are in eval mode, as we must quantize.
+ # actually, always true seems to work
+ hard = self.straight_through if self.training else True
+ temp = self.temperature if temp is None else temp
+ out_dict = {}
+ logits = self.proj(z)
+ if self.remap is not None:
+ # continue only with used logits
+ full_zeros = torch.zeros_like(logits)
+ logits = logits[:, self.used, ...]
+
+ soft_one_hot = F.gumbel_softmax(logits, tau=temp, dim=1, hard=hard)
+ if self.remap is not None:
+ # go back to all entries but unused set to zero
+ full_zeros[:, self.used, ...] = soft_one_hot
+ soft_one_hot = full_zeros
+ z_q = einsum("b n h w, n d -> b d h w", soft_one_hot, self.embed.weight)
+
+ # + kl divergence to the prior loss
+ qy = F.softmax(logits, dim=1)
+ diff = (
+ self.kl_weight
+ * torch.sum(qy * torch.log(qy * self.n_embed + 1e-10), dim=1).mean()
+ )
+ out_dict[self.loss_key] = diff
+
+ ind = soft_one_hot.argmax(dim=1)
+ out_dict["indices"] = ind
+ if self.remap is not None:
+ ind = self.remap_to_used(ind)
+
+ if return_logits:
+ out_dict["logits"] = logits
+
+ return z_q, out_dict
+
+ def get_codebook_entry(self, indices, shape):
+ # TODO: shape not yet optional
+ b, h, w, c = shape
+ assert b * h * w == indices.shape[0]
+ indices = rearrange(indices, "(b h w) -> b h w", b=b, h=h, w=w)
+ if self.remap is not None:
+ indices = self.unmap_to_all(indices)
+ one_hot = (
+ F.one_hot(indices, num_classes=self.n_embed).permute(0, 3, 1, 2).float()
+ )
+ z_q = einsum("b n h w, n d -> b d h w", one_hot, self.embed.weight)
+ return z_q
+
+
+class VectorQuantizer(AbstractQuantizer):
+ """
+ ____________________________________________
+ Discretization bottleneck part of the VQ-VAE.
+ Inputs:
+ - n_e : number of embeddings
+ - e_dim : dimension of embedding
+ - beta : commitment cost used in loss term,
+ beta * ||z_e(x)-sg[e]||^2
+ _____________________________________________
+ """
+
+ def __init__(
+ self,
+ n_e: int,
+ e_dim: int,
+ beta: float = 0.25,
+ remap: Optional[str] = None,
+ unknown_index: str = "random",
+ sane_index_shape: bool = False,
+ log_perplexity: bool = False,
+ embedding_weight_norm: bool = False,
+ loss_key: str = "loss/vq",
+ ):
+ super().__init__()
+ self.n_e = n_e
+ self.e_dim = e_dim
+ self.beta = beta
+ self.loss_key = loss_key
+
+ if not embedding_weight_norm:
+ self.embedding = nn.Embedding(self.n_e, self.e_dim)
+ self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e)
+ else:
+ self.embedding = torch.nn.utils.weight_norm(
+ nn.Embedding(self.n_e, self.e_dim), dim=1
+ )
+
+ self.remap = remap
+ if self.remap is not None:
+ self.register_buffer("used", torch.tensor(np.load(self.remap)))
+ self.re_embed = self.used.shape[0]
+ else:
+ self.used = None
+ self.re_embed = n_e
+ if unknown_index == "extra":
+ self.unknown_index = self.re_embed
+ self.re_embed = self.re_embed + 1
+ else:
+ assert unknown_index == "random" or isinstance(
+ unknown_index, int
+ ), "unknown index needs to be 'random', 'extra' or any integer"
+ self.unknown_index = unknown_index # "random" or "extra" or integer
+ if self.remap is not None:
+ logpy.info(
+ f"Remapping {self.n_e} indices to {self.re_embed} indices. "
+ f"Using {self.unknown_index} for unknown indices."
+ )
+
+ self.sane_index_shape = sane_index_shape
+ self.log_perplexity = log_perplexity
+
+ def forward(
+ self,
+ z: torch.Tensor,
+ ) -> Tuple[torch.Tensor, Dict]:
+ do_reshape = z.ndim == 4
+ if do_reshape:
+ # # reshape z -> (batch, height, width, channel) and flatten
+ z = rearrange(z, "b c h w -> b h w c").contiguous()
+
+ else:
+ assert z.ndim < 4, "No reshaping strategy for inputs > 4 dimensions defined"
+ z = z.contiguous()
+
+ z_flattened = z.view(-1, self.e_dim)
+ # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z
+
+ d = (
+ torch.sum(z_flattened**2, dim=1, keepdim=True)
+ + torch.sum(self.embedding.weight**2, dim=1)
+ - 2
+ * torch.einsum(
+ "bd,dn->bn", z_flattened, rearrange(self.embedding.weight, "n d -> d n")
+ )
+ )
+
+ min_encoding_indices = torch.argmin(d, dim=1)
+ z_q = self.embedding(min_encoding_indices).view(z.shape)
+ loss_dict = {}
+ if self.log_perplexity:
+ perplexity, cluster_usage = measure_perplexity(
+ min_encoding_indices.detach(), self.n_e
+ )
+ loss_dict.update({"perplexity": perplexity, "cluster_usage": cluster_usage})
+
+ # compute loss for embedding
+ loss = self.beta * torch.mean((z_q.detach() - z) ** 2) + torch.mean(
+ (z_q - z.detach()) ** 2
+ )
+ loss_dict[self.loss_key] = loss
+
+ # preserve gradients
+ z_q = z + (z_q - z).detach()
+
+ # reshape back to match original input shape
+ if do_reshape:
+ z_q = rearrange(z_q, "b h w c -> b c h w").contiguous()
+
+ if self.remap is not None:
+ min_encoding_indices = min_encoding_indices.reshape(
+ z.shape[0], -1
+ ) # add batch axis
+ min_encoding_indices = self.remap_to_used(min_encoding_indices)
+ min_encoding_indices = min_encoding_indices.reshape(-1, 1) # flatten
+
+ if self.sane_index_shape:
+ if do_reshape:
+ min_encoding_indices = min_encoding_indices.reshape(
+ z_q.shape[0], z_q.shape[2], z_q.shape[3]
+ )
+ else:
+ min_encoding_indices = rearrange(
+ min_encoding_indices, "(b s) 1 -> b s", b=z_q.shape[0]
+ )
+
+ loss_dict["min_encoding_indices"] = min_encoding_indices
+
+ return z_q, loss_dict
+
+ def get_codebook_entry(
+ self, indices: torch.Tensor, shape: Optional[Tuple[int, ...]] = None
+ ) -> torch.Tensor:
+ # shape specifying (batch, height, width, channel)
+ if self.remap is not None:
+ assert shape is not None, "Need to give shape for remap"
+ indices = indices.reshape(shape[0], -1) # add batch axis
+ indices = self.unmap_to_all(indices)
+ indices = indices.reshape(-1) # flatten again
+
+ # get quantized latent vectors
+ z_q = self.embedding(indices)
+
+ if shape is not None:
+ z_q = z_q.view(shape)
+ # reshape back to match original input shape
+ z_q = z_q.permute(0, 3, 1, 2).contiguous()
+
+ return z_q
+
+
+class EmbeddingEMA(nn.Module):
+ def __init__(self, num_tokens, codebook_dim, decay=0.99, eps=1e-5):
+ super().__init__()
+ self.decay = decay
+ self.eps = eps
+ weight = torch.randn(num_tokens, codebook_dim)
+ self.weight = nn.Parameter(weight, requires_grad=False)
+ self.cluster_size = nn.Parameter(torch.zeros(num_tokens), requires_grad=False)
+ self.embed_avg = nn.Parameter(weight.clone(), requires_grad=False)
+ self.update = True
+
+ def forward(self, embed_id):
+ return F.embedding(embed_id, self.weight)
+
+ def cluster_size_ema_update(self, new_cluster_size):
+ self.cluster_size.data.mul_(self.decay).add_(
+ new_cluster_size, alpha=1 - self.decay
+ )
+
+ def embed_avg_ema_update(self, new_embed_avg):
+ self.embed_avg.data.mul_(self.decay).add_(new_embed_avg, alpha=1 - self.decay)
+
+ def weight_update(self, num_tokens):
+ n = self.cluster_size.sum()
+ smoothed_cluster_size = (
+ (self.cluster_size + self.eps) / (n + num_tokens * self.eps) * n
+ )
+ # normalize embedding average with smoothed cluster size
+ embed_normalized = self.embed_avg / smoothed_cluster_size.unsqueeze(1)
+ self.weight.data.copy_(embed_normalized)
+
+
+class EMAVectorQuantizer(AbstractQuantizer):
+ def __init__(
+ self,
+ n_embed: int,
+ embedding_dim: int,
+ beta: float,
+ decay: float = 0.99,
+ eps: float = 1e-5,
+ remap: Optional[str] = None,
+ unknown_index: str = "random",
+ loss_key: str = "loss/vq",
+ ):
+ super().__init__()
+ self.codebook_dim = embedding_dim
+ self.num_tokens = n_embed
+ self.beta = beta
+ self.loss_key = loss_key
+
+ self.embedding = EmbeddingEMA(self.num_tokens, self.codebook_dim, decay, eps)
+
+ self.remap = remap
+ if self.remap is not None:
+ self.register_buffer("used", torch.tensor(np.load(self.remap)))
+ self.re_embed = self.used.shape[0]
+ else:
+ self.used = None
+ self.re_embed = n_embed
+ if unknown_index == "extra":
+ self.unknown_index = self.re_embed
+ self.re_embed = self.re_embed + 1
+ else:
+ assert unknown_index == "random" or isinstance(
+ unknown_index, int
+ ), "unknown index needs to be 'random', 'extra' or any integer"
+ self.unknown_index = unknown_index # "random" or "extra" or integer
+ if self.remap is not None:
+ logpy.info(
+ f"Remapping {self.n_embed} indices to {self.re_embed} indices. "
+ f"Using {self.unknown_index} for unknown indices."
+ )
+
+ def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, Dict]:
+ # reshape z -> (batch, height, width, channel) and flatten
+ # z, 'b c h w -> b h w c'
+ z = rearrange(z, "b c h w -> b h w c")
+ z_flattened = z.reshape(-1, self.codebook_dim)
+
+ # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z
+ d = (
+ z_flattened.pow(2).sum(dim=1, keepdim=True)
+ + self.embedding.weight.pow(2).sum(dim=1)
+ - 2 * torch.einsum("bd,nd->bn", z_flattened, self.embedding.weight)
+ ) # 'n d -> d n'
+
+ encoding_indices = torch.argmin(d, dim=1)
+
+ z_q = self.embedding(encoding_indices).view(z.shape)
+ encodings = F.one_hot(encoding_indices, self.num_tokens).type(z.dtype)
+ avg_probs = torch.mean(encodings, dim=0)
+ perplexity = torch.exp(-torch.sum(avg_probs * torch.log(avg_probs + 1e-10)))
+
+ if self.training and self.embedding.update:
+ # EMA cluster size
+ encodings_sum = encodings.sum(0)
+ self.embedding.cluster_size_ema_update(encodings_sum)
+ # EMA embedding average
+ embed_sum = encodings.transpose(0, 1) @ z_flattened
+ self.embedding.embed_avg_ema_update(embed_sum)
+ # normalize embed_avg and update weight
+ self.embedding.weight_update(self.num_tokens)
+
+ # compute loss for embedding
+ loss = self.beta * F.mse_loss(z_q.detach(), z)
+
+ # preserve gradients
+ z_q = z + (z_q - z).detach()
+
+ # reshape back to match original input shape
+ # z_q, 'b h w c -> b c h w'
+ z_q = rearrange(z_q, "b h w c -> b c h w")
+
+ out_dict = {
+ self.loss_key: loss,
+ "encodings": encodings,
+ "encoding_indices": encoding_indices,
+ "perplexity": perplexity,
+ }
+
+ return z_q, out_dict
+
+
+class VectorQuantizerWithInputProjection(VectorQuantizer):
+ def __init__(
+ self,
+ input_dim: int,
+ n_codes: int,
+ codebook_dim: int,
+ beta: float = 1.0,
+ output_dim: Optional[int] = None,
+ **kwargs,
+ ):
+ super().__init__(n_codes, codebook_dim, beta, **kwargs)
+ self.proj_in = nn.Linear(input_dim, codebook_dim)
+ self.output_dim = output_dim
+ if output_dim is not None:
+ self.proj_out = nn.Linear(codebook_dim, output_dim)
+ else:
+ self.proj_out = nn.Identity()
+
+ def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, Dict]:
+ rearr = False
+ in_shape = z.shape
+
+ if z.ndim > 3:
+ rearr = self.output_dim is not None
+ z = rearrange(z, "b c ... -> b (...) c")
+ z = self.proj_in(z)
+ z_q, loss_dict = super().forward(z)
+
+ z_q = self.proj_out(z_q)
+ if rearr:
+ if len(in_shape) == 4:
+ z_q = rearrange(z_q, "b (h w) c -> b c h w ", w=in_shape[-1])
+ elif len(in_shape) == 5:
+ z_q = rearrange(
+ z_q, "b (t h w) c -> b c t h w ", w=in_shape[-1], h=in_shape[-2]
+ )
+ else:
+ raise NotImplementedError(
+ f"rearranging not available for {len(in_shape)}-dimensional input."
+ )
+
+ return z_q, loss_dict
diff --git a/sgm/modules/autoencoding/temporal_ae.py b/sgm/modules/autoencoding/temporal_ae.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a17a91163469dbd8cfe2373d0d09554d5e25ed9
--- /dev/null
+++ b/sgm/modules/autoencoding/temporal_ae.py
@@ -0,0 +1,347 @@
+from typing import Callable, Iterable, Union
+
+import torch
+from einops import rearrange, repeat
+
+from sgm.modules.diffusionmodules.model import (XFORMERS_IS_AVAILABLE,
+ AttnBlock, Decoder,
+ MemoryEfficientAttnBlock,
+ ResnetBlock)
+from sgm.modules.diffusionmodules.openaimodel import (ResBlock,
+ timestep_embedding)
+from sgm.modules.video_attention import VideoTransformerBlock
+from sgm.util import partialclass
+
+
+class VideoResBlock(ResnetBlock):
+ def __init__(
+ self,
+ out_channels,
+ *args,
+ dropout=0.0,
+ video_kernel_size=3,
+ alpha=0.0,
+ merge_strategy="learned",
+ **kwargs,
+ ):
+ super().__init__(out_channels=out_channels, dropout=dropout, *args, **kwargs)
+ if video_kernel_size is None:
+ video_kernel_size = [3, 1, 1]
+ self.time_stack = ResBlock(
+ channels=out_channels,
+ emb_channels=0,
+ dropout=dropout,
+ dims=3,
+ use_scale_shift_norm=False,
+ use_conv=False,
+ up=False,
+ down=False,
+ kernel_size=video_kernel_size,
+ use_checkpoint=False,
+ skip_t_emb=True,
+ )
+
+ self.merge_strategy = merge_strategy
+ if self.merge_strategy == "fixed":
+ self.register_buffer("mix_factor", torch.Tensor([alpha]))
+ elif self.merge_strategy == "learned":
+ self.register_parameter(
+ "mix_factor", torch.nn.Parameter(torch.Tensor([alpha]))
+ )
+ else:
+ raise ValueError(f"unknown merge strategy {self.merge_strategy}")
+
+ def get_alpha(self, bs):
+ if self.merge_strategy == "fixed":
+ return self.mix_factor
+ elif self.merge_strategy == "learned":
+ return torch.sigmoid(self.mix_factor)
+ else:
+ raise NotImplementedError()
+
+ def forward(self, x, temb, skip_video=False, timesteps=None):
+ if timesteps is None:
+ timesteps = self.timesteps
+
+ b, c, h, w = x.shape
+
+ x = super().forward(x, temb)
+
+ if not skip_video:
+ x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps)
+
+ x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps)
+
+ x = self.time_stack(x, temb)
+
+ alpha = self.get_alpha(bs=b // timesteps)
+ x = alpha * x + (1.0 - alpha) * x_mix
+
+ x = rearrange(x, "b c t h w -> (b t) c h w")
+ return x
+
+
+class AE3DConv(torch.nn.Conv2d):
+ def __init__(self, in_channels, out_channels, video_kernel_size=3, *args, **kwargs):
+ super().__init__(in_channels, out_channels, *args, **kwargs)
+ if isinstance(video_kernel_size, Iterable):
+ padding = [int(k // 2) for k in video_kernel_size]
+ else:
+ padding = int(video_kernel_size // 2)
+
+ self.time_mix_conv = torch.nn.Conv3d(
+ in_channels=out_channels,
+ out_channels=out_channels,
+ kernel_size=video_kernel_size,
+ padding=padding,
+ )
+
+ def forward(self, input, timesteps, skip_video=False):
+ x = super().forward(input)
+ if skip_video:
+ return x
+ x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps)
+ x = self.time_mix_conv(x)
+ return rearrange(x, "b c t h w -> (b t) c h w")
+
+
+class VideoBlock(AttnBlock):
+ def __init__(
+ self, in_channels: int, alpha: float = 0, merge_strategy: str = "learned"
+ ):
+ super().__init__(in_channels)
+ # no context, single headed, as in base class
+ self.time_mix_block = VideoTransformerBlock(
+ dim=in_channels,
+ n_heads=1,
+ d_head=in_channels,
+ checkpoint=False,
+ ff_in=True,
+ attn_mode="softmax",
+ )
+
+ time_embed_dim = self.in_channels * 4
+ self.video_time_embed = torch.nn.Sequential(
+ torch.nn.Linear(self.in_channels, time_embed_dim),
+ torch.nn.SiLU(),
+ torch.nn.Linear(time_embed_dim, self.in_channels),
+ )
+
+ self.merge_strategy = merge_strategy
+ if self.merge_strategy == "fixed":
+ self.register_buffer("mix_factor", torch.Tensor([alpha]))
+ elif self.merge_strategy == "learned":
+ self.register_parameter(
+ "mix_factor", torch.nn.Parameter(torch.Tensor([alpha]))
+ )
+ else:
+ raise ValueError(f"unknown merge strategy {self.merge_strategy}")
+
+ def forward(self, x, timesteps, skip_video=False):
+ if skip_video:
+ return super().forward(x)
+
+ x_in = x
+ x = self.attention(x)
+ h, w = x.shape[2:]
+ x = rearrange(x, "b c h w -> b (h w) c")
+
+ x_mix = x
+ num_frames = torch.arange(timesteps, device=x.device)
+ num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps)
+ num_frames = rearrange(num_frames, "b t -> (b t)")
+ t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False)
+ emb = self.video_time_embed(t_emb) # b, n_channels
+ emb = emb[:, None, :]
+ x_mix = x_mix + emb
+
+ alpha = self.get_alpha()
+ x_mix = self.time_mix_block(x_mix, timesteps=timesteps)
+ x = alpha * x + (1.0 - alpha) * x_mix # alpha merge
+
+ x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w)
+ x = self.proj_out(x)
+
+ return x_in + x
+
+ def get_alpha(
+ self,
+ ):
+ if self.merge_strategy == "fixed":
+ return self.mix_factor
+ elif self.merge_strategy == "learned":
+ return torch.sigmoid(self.mix_factor)
+ else:
+ raise NotImplementedError(f"unknown merge strategy {self.merge_strategy}")
+
+
+class MemoryEfficientVideoBlock(MemoryEfficientAttnBlock):
+ def __init__(
+ self, in_channels: int, alpha: float = 0, merge_strategy: str = "learned"
+ ):
+ super().__init__(in_channels)
+ # no context, single headed, as in base class
+ self.time_mix_block = VideoTransformerBlock(
+ dim=in_channels,
+ n_heads=1,
+ d_head=in_channels,
+ checkpoint=False,
+ ff_in=True,
+ attn_mode="softmax-xformers",
+ )
+
+ time_embed_dim = self.in_channels * 4
+ self.video_time_embed = torch.nn.Sequential(
+ torch.nn.Linear(self.in_channels, time_embed_dim),
+ torch.nn.SiLU(),
+ torch.nn.Linear(time_embed_dim, self.in_channels),
+ )
+
+ self.merge_strategy = merge_strategy
+ if self.merge_strategy == "fixed":
+ self.register_buffer("mix_factor", torch.Tensor([alpha]))
+ elif self.merge_strategy == "learned":
+ self.register_parameter(
+ "mix_factor", torch.nn.Parameter(torch.Tensor([alpha]))
+ )
+ else:
+ raise ValueError(f"unknown merge strategy {self.merge_strategy}")
+
+ def forward(self, x, timesteps, skip_time_block=False):
+ if skip_time_block:
+ return super().forward(x)
+
+ x_in = x
+ x = self.attention(x)
+ h, w = x.shape[2:]
+ x = rearrange(x, "b c h w -> b (h w) c")
+
+ x_mix = x
+ num_frames = torch.arange(timesteps, device=x.device)
+ num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps)
+ num_frames = rearrange(num_frames, "b t -> (b t)")
+ t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False)
+ emb = self.video_time_embed(t_emb) # b, n_channels
+ emb = emb[:, None, :]
+ x_mix = x_mix + emb
+
+ alpha = self.get_alpha()
+ x_mix = self.time_mix_block(x_mix, timesteps=timesteps)
+ x = alpha * x + (1.0 - alpha) * x_mix # alpha merge
+
+ x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w)
+ x = self.proj_out(x)
+
+ return x_in + x
+
+ def get_alpha(
+ self,
+ ):
+ if self.merge_strategy == "fixed":
+ return self.mix_factor
+ elif self.merge_strategy == "learned":
+ return torch.sigmoid(self.mix_factor)
+ else:
+ raise NotImplementedError(f"unknown merge strategy {self.merge_strategy}")
+
+
+def make_time_attn(
+ in_channels,
+ attn_type="vanilla",
+ attn_kwargs=None,
+ alpha: float = 0,
+ merge_strategy: str = "learned",
+):
+ assert attn_type in [
+ "vanilla",
+ "vanilla-xformers",
+ ], f"attn_type {attn_type} not supported for spatio-temporal attention"
+ print(
+ f"making spatial and temporal attention of type '{attn_type}' with {in_channels} in_channels"
+ )
+ if not XFORMERS_IS_AVAILABLE and attn_type == "vanilla-xformers":
+ print(
+ f"Attention mode '{attn_type}' is not available. Falling back to vanilla attention. "
+ f"This is not a problem in Pytorch >= 2.0. FYI, you are running with PyTorch version {torch.__version__}"
+ )
+ attn_type = "vanilla"
+
+ if attn_type == "vanilla":
+ assert attn_kwargs is None
+ return partialclass(
+ VideoBlock, in_channels, alpha=alpha, merge_strategy=merge_strategy
+ )
+ elif attn_type == "vanilla-xformers":
+ print(f"building MemoryEfficientAttnBlock with {in_channels} in_channels...")
+ return partialclass(
+ MemoryEfficientVideoBlock,
+ in_channels,
+ alpha=alpha,
+ merge_strategy=merge_strategy,
+ )
+ else:
+ return NotImplementedError()
+
+
+class Conv2DWrapper(torch.nn.Conv2d):
+ def forward(self, input: torch.Tensor, **kwargs) -> torch.Tensor:
+ return super().forward(input)
+
+
+class VideoDecoder(Decoder):
+ available_time_modes = ["all", "conv-only", "attn-only"]
+
+ def __init__(
+ self,
+ *args,
+ video_kernel_size: Union[int, list] = 3,
+ alpha: float = 0.0,
+ merge_strategy: str = "learned",
+ time_mode: str = "conv-only",
+ **kwargs,
+ ):
+ self.video_kernel_size = video_kernel_size
+ self.alpha = alpha
+ self.merge_strategy = merge_strategy
+ self.time_mode = time_mode
+ assert (
+ self.time_mode in self.available_time_modes
+ ), f"time_mode parameter has to be in {self.available_time_modes}"
+ super().__init__(*args, **kwargs)
+
+ def get_last_layer(self, skip_time_mix=False, **kwargs):
+ if self.time_mode == "attn-only":
+ raise NotImplementedError("TODO")
+ else:
+ return (
+ self.conv_out.time_mix_conv.weight
+ if not skip_time_mix
+ else self.conv_out.weight
+ )
+
+ def _make_attn(self) -> Callable:
+ if self.time_mode not in ["conv-only", "only-last-conv"]:
+ return partialclass(
+ make_time_attn,
+ alpha=self.alpha,
+ merge_strategy=self.merge_strategy,
+ )
+ else:
+ return super()._make_attn()
+
+ def _make_conv(self) -> Callable:
+ if self.time_mode != "attn-only":
+ return partialclass(AE3DConv, video_kernel_size=self.video_kernel_size)
+ else:
+ return Conv2DWrapper
+
+ def _make_resblock(self) -> Callable:
+ if self.time_mode not in ["attn-only", "only-last-conv"]:
+ return partialclass(
+ VideoResBlock,
+ video_kernel_size=self.video_kernel_size,
+ alpha=self.alpha,
+ merge_strategy=self.merge_strategy,
+ )
+ else:
+ return super()._make_resblock()
diff --git a/sgm/modules/diffusionmodules/__init__.py b/sgm/modules/diffusionmodules/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/diffusionmodules/__pycache__/__init__.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7e3bfbd4106decc61630dee8aa86532f7512f79d
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/denoiser.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/denoiser.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..356a880c134539fcc43b78098c26d69e5511f82a
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/denoiser.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/denoiser_scaling.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/denoiser_scaling.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..516bc8049c03632769df0a3f5245a076711e5b75
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/denoiser_scaling.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/discretizer.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/discretizer.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c88a3fea9e5c0f3d7e02d95155e553f54c0d5cd2
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/discretizer.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/loss.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/loss.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d85dde1e520ed00dc25e81a3c4bfdb4116d0bf0f
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/loss.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/model.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/model.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1308643a1be41eb7ece36cb3bbc186d26222acf8
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/model.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/openaimodel.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/openaimodel.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0167d7f704590d68ef842f3da763969df260e56a
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/openaimodel.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/util.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/util.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd45cb704059baef5be4e6b8fc53af6ccddae29c
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/util.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/__pycache__/wrappers.cpython-39.pyc b/sgm/modules/diffusionmodules/__pycache__/wrappers.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..23e7a997c27f7978d302053ae3032ba1387edae8
Binary files /dev/null and b/sgm/modules/diffusionmodules/__pycache__/wrappers.cpython-39.pyc differ
diff --git a/sgm/modules/diffusionmodules/denoiser.py b/sgm/modules/diffusionmodules/denoiser.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f2558ece099a4b37a91d1f4f53d29cd57af57f1
--- /dev/null
+++ b/sgm/modules/diffusionmodules/denoiser.py
@@ -0,0 +1,78 @@
+from typing import Dict, Union
+
+import torch
+import torch.nn as nn
+
+from ...util import append_dims, instantiate_from_config
+from .denoiser_scaling import DenoiserScaling
+from .discretizer import Discretization
+
+from pdb import set_trace as st
+
+
+class Denoiser(nn.Module):
+ def __init__(self, scaling_config: Dict):
+ super().__init__()
+
+ self.scaling: DenoiserScaling = instantiate_from_config(scaling_config)
+
+ def possibly_quantize_sigma(self, sigma: torch.Tensor) -> torch.Tensor:
+ return sigma
+
+ def possibly_quantize_c_noise(self, c_noise: torch.Tensor) -> torch.Tensor:
+ return c_noise
+
+ def forward(
+ self,
+ network: nn.Module,
+ input: torch.Tensor,
+ sigma: torch.Tensor,
+ cond: Dict,
+ **additional_model_inputs,
+ ) -> torch.Tensor:
+ sigma = self.possibly_quantize_sigma(sigma)
+ sigma_shape = sigma.shape
+ sigma = append_dims(sigma, input.ndim)
+ c_skip, c_out, c_in, c_noise = self.scaling(sigma)
+ # st()
+ c_noise = self.possibly_quantize_c_noise(c_noise.reshape(sigma_shape))
+ return (
+ network(input * c_in, c_noise, cond, **additional_model_inputs) * c_out
+ + input * c_skip
+ )
+
+
+class DiscreteDenoiser(Denoiser):
+ def __init__(
+ self,
+ scaling_config: Dict,
+ num_idx: int,
+ discretization_config: Dict,
+ do_append_zero: bool = False,
+ quantize_c_noise: bool = True,
+ flip: bool = True,
+ ):
+ super().__init__(scaling_config)
+ self.discretization: Discretization = instantiate_from_config(
+ discretization_config
+ )
+ sigmas = self.discretization(num_idx, do_append_zero=do_append_zero, flip=flip)
+ self.register_buffer("sigmas", sigmas)
+ self.quantize_c_noise = quantize_c_noise
+ self.num_idx = num_idx
+
+ def sigma_to_idx(self, sigma: torch.Tensor) -> torch.Tensor:
+ dists = sigma - self.sigmas[:, None]
+ return dists.abs().argmin(dim=0).view(sigma.shape)
+
+ def idx_to_sigma(self, idx: Union[torch.Tensor, int]) -> torch.Tensor:
+ return self.sigmas[idx]
+
+ def possibly_quantize_sigma(self, sigma: torch.Tensor) -> torch.Tensor:
+ return self.idx_to_sigma(self.sigma_to_idx(sigma))
+
+ def possibly_quantize_c_noise(self, c_noise: torch.Tensor) -> torch.Tensor:
+ if self.quantize_c_noise:
+ return self.sigma_to_idx(c_noise)
+ else:
+ return c_noise
diff --git a/sgm/modules/diffusionmodules/denoiser_scaling.py b/sgm/modules/diffusionmodules/denoiser_scaling.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4e287bfe8a82839a9a12fbd25c3446f43ab493b
--- /dev/null
+++ b/sgm/modules/diffusionmodules/denoiser_scaling.py
@@ -0,0 +1,59 @@
+from abc import ABC, abstractmethod
+from typing import Tuple
+
+import torch
+
+
+class DenoiserScaling(ABC):
+ @abstractmethod
+ def __call__(
+ self, sigma: torch.Tensor
+ ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
+ pass
+
+
+class EDMScaling:
+ def __init__(self, sigma_data: float = 0.5):
+ self.sigma_data = sigma_data
+
+ def __call__(
+ self, sigma: torch.Tensor
+ ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
+ c_skip = self.sigma_data**2 / (sigma**2 + self.sigma_data**2)
+ c_out = sigma * self.sigma_data / (sigma**2 + self.sigma_data**2) ** 0.5
+ c_in = 1 / (sigma**2 + self.sigma_data**2) ** 0.5
+ c_noise = 0.25 * sigma.log()
+ return c_skip, c_out, c_in, c_noise
+
+
+class EpsScaling:
+ def __call__(
+ self, sigma: torch.Tensor
+ ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
+ c_skip = torch.ones_like(sigma, device=sigma.device)
+ c_out = -sigma
+ c_in = 1 / (sigma**2 + 1.0) ** 0.5
+ c_noise = sigma.clone()
+ return c_skip, c_out, c_in, c_noise
+
+
+class VScaling:
+ def __call__(
+ self, sigma: torch.Tensor
+ ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
+ c_skip = 1.0 / (sigma**2 + 1.0)
+ c_out = -sigma / (sigma**2 + 1.0) ** 0.5
+ c_in = 1.0 / (sigma**2 + 1.0) ** 0.5
+ c_noise = sigma.clone()
+ return c_skip, c_out, c_in, c_noise
+
+
+class VScalingWithEDMcNoise(DenoiserScaling):
+ def __call__(
+ self, sigma: torch.Tensor
+ ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
+ c_skip = 1.0 / (sigma**2 + 1.0)
+ c_out = -sigma / (sigma**2 + 1.0) ** 0.5
+ c_in = 1.0 / (sigma**2 + 1.0) ** 0.5
+ c_noise = 0.25 * sigma.log()
+ return c_skip, c_out, c_in, c_noise
diff --git a/sgm/modules/diffusionmodules/denoiser_weighting.py b/sgm/modules/diffusionmodules/denoiser_weighting.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8b03ca58f17ea3d7374f4bbb7bf1d2994755e00
--- /dev/null
+++ b/sgm/modules/diffusionmodules/denoiser_weighting.py
@@ -0,0 +1,24 @@
+import torch
+
+
+class UnitWeighting:
+ def __call__(self, sigma):
+ return torch.ones_like(sigma, device=sigma.device)
+
+
+class EDMWeighting:
+ def __init__(self, sigma_data=0.5):
+ self.sigma_data = sigma_data
+
+ def __call__(self, sigma):
+ return (sigma**2 + self.sigma_data**2) / (sigma * self.sigma_data) ** 2
+
+
+class VWeighting(EDMWeighting):
+ def __init__(self):
+ super().__init__(sigma_data=1.0)
+
+
+class EpsWeighting:
+ def __call__(self, sigma):
+ return sigma**-2.0
diff --git a/sgm/modules/diffusionmodules/discretizer.py b/sgm/modules/diffusionmodules/discretizer.py
new file mode 100644
index 0000000000000000000000000000000000000000..02add6081c5e3164d4402619b44d5be235d3ec58
--- /dev/null
+++ b/sgm/modules/diffusionmodules/discretizer.py
@@ -0,0 +1,69 @@
+from abc import abstractmethod
+from functools import partial
+
+import numpy as np
+import torch
+
+from ...modules.diffusionmodules.util import make_beta_schedule
+from ...util import append_zero
+
+
+def generate_roughly_equally_spaced_steps(
+ num_substeps: int, max_step: int
+) -> np.ndarray:
+ return np.linspace(max_step - 1, 0, num_substeps, endpoint=False).astype(int)[::-1]
+
+
+class Discretization:
+ def __call__(self, n, do_append_zero=True, device="cpu", flip=False):
+ sigmas = self.get_sigmas(n, device=device)
+ sigmas = append_zero(sigmas) if do_append_zero else sigmas
+ return sigmas if not flip else torch.flip(sigmas, (0,))
+
+ @abstractmethod
+ def get_sigmas(self, n, device):
+ pass
+
+
+class EDMDiscretization(Discretization):
+ def __init__(self, sigma_min=0.002, sigma_max=80.0, rho=7.0):
+ self.sigma_min = sigma_min
+ self.sigma_max = sigma_max
+ self.rho = rho
+
+ def get_sigmas(self, n, device="cpu"):
+ ramp = torch.linspace(0, 1, n, device=device)
+ min_inv_rho = self.sigma_min ** (1 / self.rho)
+ max_inv_rho = self.sigma_max ** (1 / self.rho)
+ sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** self.rho
+ return sigmas
+
+
+class LegacyDDPMDiscretization(Discretization):
+ def __init__(
+ self,
+ linear_start=0.00085,
+ linear_end=0.0120,
+ num_timesteps=1000,
+ ):
+ super().__init__()
+ self.num_timesteps = num_timesteps
+ betas = make_beta_schedule(
+ "linear", num_timesteps, linear_start=linear_start, linear_end=linear_end
+ )
+ alphas = 1.0 - betas
+ self.alphas_cumprod = np.cumprod(alphas, axis=0)
+ self.to_torch = partial(torch.tensor, dtype=torch.float32)
+
+ def get_sigmas(self, n, device="cpu"):
+ if n < self.num_timesteps:
+ timesteps = generate_roughly_equally_spaced_steps(n, self.num_timesteps)
+ alphas_cumprod = self.alphas_cumprod[timesteps]
+ elif n == self.num_timesteps:
+ alphas_cumprod = self.alphas_cumprod
+ else:
+ raise ValueError
+
+ to_torch = partial(torch.tensor, dtype=torch.float32, device=device)
+ sigmas = to_torch((1 - alphas_cumprod) / alphas_cumprod) ** 0.5
+ return torch.flip(sigmas, (0,))
diff --git a/sgm/modules/diffusionmodules/guiders.py b/sgm/modules/diffusionmodules/guiders.py
new file mode 100644
index 0000000000000000000000000000000000000000..e8eca43e8b7b8b7b0e6b9f3e2bddbae6e3456fee
--- /dev/null
+++ b/sgm/modules/diffusionmodules/guiders.py
@@ -0,0 +1,99 @@
+import logging
+from abc import ABC, abstractmethod
+from typing import Dict, List, Optional, Tuple, Union
+
+import torch
+from einops import rearrange, repeat
+
+from ...util import append_dims, default
+
+logpy = logging.getLogger(__name__)
+
+
+class Guider(ABC):
+ @abstractmethod
+ def __call__(self, x: torch.Tensor, sigma: float) -> torch.Tensor:
+ pass
+
+ def prepare_inputs(
+ self, x: torch.Tensor, s: float, c: Dict, uc: Dict
+ ) -> Tuple[torch.Tensor, float, Dict]:
+ pass
+
+
+class VanillaCFG(Guider):
+ def __init__(self, scale: float):
+ self.scale = scale
+
+ def __call__(self, x: torch.Tensor, sigma: torch.Tensor) -> torch.Tensor:
+ x_u, x_c = x.chunk(2)
+ x_pred = x_u + self.scale * (x_c - x_u)
+ return x_pred
+
+ def prepare_inputs(self, x, s, c, uc):
+ c_out = dict()
+
+ for k in c:
+ if k in ["vector", "crossattn", "concat"]:
+ c_out[k] = torch.cat((uc[k], c[k]), 0)
+ else:
+ assert c[k] == uc[k]
+ c_out[k] = c[k]
+ return torch.cat([x] * 2), torch.cat([s] * 2), c_out
+
+
+class IdentityGuider(Guider):
+ def __call__(self, x: torch.Tensor, sigma: float) -> torch.Tensor:
+ return x
+
+ def prepare_inputs(
+ self, x: torch.Tensor, s: float, c: Dict, uc: Dict
+ ) -> Tuple[torch.Tensor, float, Dict]:
+ c_out = dict()
+
+ for k in c:
+ c_out[k] = c[k]
+
+ return x, s, c_out
+
+
+class LinearPredictionGuider(Guider):
+ def __init__(
+ self,
+ max_scale: float,
+ num_frames: int,
+ min_scale: float = 1.0,
+ additional_cond_keys: Optional[Union[List[str], str]] = None,
+ ):
+ self.min_scale = min_scale
+ self.max_scale = max_scale
+ self.num_frames = num_frames
+ self.scale = torch.linspace(min_scale, max_scale, num_frames).unsqueeze(0)
+
+ additional_cond_keys = default(additional_cond_keys, [])
+ if isinstance(additional_cond_keys, str):
+ additional_cond_keys = [additional_cond_keys]
+ self.additional_cond_keys = additional_cond_keys
+
+ def __call__(self, x: torch.Tensor, sigma: torch.Tensor) -> torch.Tensor:
+ x_u, x_c = x.chunk(2)
+
+ x_u = rearrange(x_u, "(b t) ... -> b t ...", t=self.num_frames)
+ x_c = rearrange(x_c, "(b t) ... -> b t ...", t=self.num_frames)
+ scale = repeat(self.scale, "1 t -> b t", b=x_u.shape[0])
+ scale = append_dims(scale, x_u.ndim).to(x_u.device)
+
+ return rearrange(x_u + scale * (x_c - x_u), "b t ... -> (b t) ...")
+
+ def prepare_inputs(
+ self, x: torch.Tensor, s: torch.Tensor, c: dict, uc: dict
+ ) -> Tuple[torch.Tensor, torch.Tensor, dict]:
+ c_out = dict()
+
+ for k in c:
+ if k in ["vector", "crossattn", "concat"] + self.additional_cond_keys:
+ c_out[k] = torch.cat((uc[k], c[k]), 0)
+ else:
+ assert c[k] == uc[k]
+ c_out[k] = c[k]
+ return torch.cat([x] * 2), torch.cat([s] * 2), c_out
diff --git a/sgm/modules/diffusionmodules/loss.py b/sgm/modules/diffusionmodules/loss.py
new file mode 100644
index 0000000000000000000000000000000000000000..a24bda7e6f31b7b3bc0b1c6ecdea62961b2da943
--- /dev/null
+++ b/sgm/modules/diffusionmodules/loss.py
@@ -0,0 +1,147 @@
+from typing import Dict, List, Optional, Tuple, Union
+
+import torch
+import torch.nn as nn
+
+from ...modules.autoencoding.lpips.loss.lpips import LPIPS
+from ...modules.encoders.modules import GeneralConditioner
+from ...util import append_dims, instantiate_from_config
+from .denoiser import Denoiser
+
+from transport import create_transport, Sampler
+from pdb import set_trace as st
+
+class FMLoss(nn.Module):
+ def __init__(self, transport_config):
+ super().__init__()
+ self.transport = instantiate_from_config(transport_config)
+
+ def _forward(
+ self,
+ network: nn.Module,
+ cond: Dict,
+ input: torch.Tensor,
+ batch: Dict,
+ ) -> Tuple[torch.Tensor, Dict]:
+ # additional_model_inputs = {
+ # key: batch[key] for key in self.batch2model_keys.intersection(batch)
+ # }
+ model_kwargs = dict(context=cond)
+
+ loss_dict = self.transport.training_losses(network, input, model_kwargs)
+ # st() # check transport and model_kwargs whether OK
+ loss = loss_dict["loss"].mean()
+ return loss, loss_dict
+
+
+ def forward(
+ self,
+ network: nn.Module,
+ # denoiser: Denoiser,
+ conditioner: GeneralConditioner,
+ input: torch.Tensor,
+ batch: Dict,
+ ) -> torch.Tensor:
+ cond = conditioner(batch)
+ return self._forward(network, cond, input, batch)
+
+
+class StandardDiffusionLoss(nn.Module):
+ def __init__(
+ self,
+ sigma_sampler_config: dict,
+ loss_weighting_config: dict,
+ loss_type: str = "l2",
+ offset_noise_level: float = 0.0,
+ batch2model_keys: Optional[Union[str, List[str]]] = None,
+ ):
+ super().__init__()
+
+ assert loss_type in ["l2", "l1", "lpips"]
+
+ self.sigma_sampler = instantiate_from_config(sigma_sampler_config)
+ self.loss_weighting = instantiate_from_config(loss_weighting_config)
+
+ self.loss_type = loss_type
+ self.offset_noise_level = offset_noise_level
+
+ if loss_type == "lpips":
+ self.lpips = LPIPS().eval()
+
+ if not batch2model_keys:
+ batch2model_keys = []
+
+ if isinstance(batch2model_keys, str):
+ batch2model_keys = [batch2model_keys]
+
+ self.batch2model_keys = set(batch2model_keys)
+
+ def get_noised_input(
+ self, sigmas_bc: torch.Tensor, noise: torch.Tensor, input: torch.Tensor
+ ) -> torch.Tensor:
+ noised_input = input + noise * sigmas_bc
+ return noised_input
+
+ def forward(
+ self,
+ network: nn.Module,
+ denoiser: Denoiser,
+ conditioner: GeneralConditioner,
+ input: torch.Tensor,
+ batch: Dict,
+ ) -> torch.Tensor:
+ cond = conditioner(batch)
+ return self._forward(network, denoiser, cond, input, batch)
+
+ def _forward(
+ self,
+ network: nn.Module,
+ denoiser: Denoiser,
+ cond: Dict,
+ input: torch.Tensor,
+ batch: Dict,
+ ) -> Tuple[torch.Tensor, Dict]:
+ additional_model_inputs = {
+ key: batch[key] for key in self.batch2model_keys.intersection(batch)
+ }
+ sigmas = self.sigma_sampler(input.shape[0]).to(input)
+
+ noise = torch.randn_like(input)
+ if self.offset_noise_level > 0.0:
+ offset_shape = (
+ (input.shape[0], 1, input.shape[2])
+ if self.n_frames is not None
+ else (input.shape[0], input.shape[1])
+ )
+ noise = noise + self.offset_noise_level * append_dims(
+ torch.randn(offset_shape, device=input.device),
+ input.ndim,
+ )
+ sigmas_bc = append_dims(sigmas, input.ndim)
+ noised_input = self.get_noised_input(sigmas_bc, noise, input)
+
+ model_output = denoiser(
+ network, noised_input, sigmas, cond, **additional_model_inputs
+ )
+ w = append_dims(self.loss_weighting(sigmas), input.ndim)
+ return self.get_loss(model_output, input, w), {
+ 'noised_input': noised_input,
+ 'sigmas': sigmas,
+ 'noise': noise,
+ 'model_output': model_output, # x_start
+ }
+
+ def get_loss(self, model_output, target, w):
+ if self.loss_type == "l2":
+ return torch.mean(
+ (w * (model_output - target) ** 2).reshape(target.shape[0], -1), 1
+ )
+ elif self.loss_type == "l1":
+ return torch.mean(
+ (w * (model_output - target).abs()).reshape(target.shape[0], -1), 1
+ )
+ elif self.loss_type == "lpips":
+ loss = self.lpips(model_output, target).reshape(-1)
+ return loss
+ else:
+ raise NotImplementedError(f"Unknown loss type {self.loss_type}")
diff --git a/sgm/modules/diffusionmodules/loss_weighting.py b/sgm/modules/diffusionmodules/loss_weighting.py
new file mode 100644
index 0000000000000000000000000000000000000000..e12c0a76635435babd1af33969e82fa284525af8
--- /dev/null
+++ b/sgm/modules/diffusionmodules/loss_weighting.py
@@ -0,0 +1,32 @@
+from abc import ABC, abstractmethod
+
+import torch
+
+
+class DiffusionLossWeighting(ABC):
+ @abstractmethod
+ def __call__(self, sigma: torch.Tensor) -> torch.Tensor:
+ pass
+
+
+class UnitWeighting(DiffusionLossWeighting):
+ def __call__(self, sigma: torch.Tensor) -> torch.Tensor:
+ return torch.ones_like(sigma, device=sigma.device)
+
+
+class EDMWeighting(DiffusionLossWeighting):
+ def __init__(self, sigma_data: float = 0.5):
+ self.sigma_data = sigma_data
+
+ def __call__(self, sigma: torch.Tensor) -> torch.Tensor:
+ return (sigma**2 + self.sigma_data**2) / (sigma * self.sigma_data) ** 2
+
+
+class VWeighting(EDMWeighting):
+ def __init__(self):
+ super().__init__(sigma_data=1.0)
+
+
+class EpsWeighting(DiffusionLossWeighting):
+ def __call__(self, sigma: torch.Tensor) -> torch.Tensor:
+ return sigma**-2.0
diff --git a/sgm/modules/diffusionmodules/model.py b/sgm/modules/diffusionmodules/model.py
new file mode 100644
index 0000000000000000000000000000000000000000..4cf9d92140dee8443a0ea6b5cf218f2879ad88f4
--- /dev/null
+++ b/sgm/modules/diffusionmodules/model.py
@@ -0,0 +1,748 @@
+# pytorch_diffusion + derived encoder decoder
+import logging
+import math
+from typing import Any, Callable, Optional
+
+import numpy as np
+import torch
+import torch.nn as nn
+from einops import rearrange
+from packaging import version
+
+logpy = logging.getLogger(__name__)
+
+try:
+ import xformers
+ import xformers.ops
+
+ XFORMERS_IS_AVAILABLE = True
+except:
+ XFORMERS_IS_AVAILABLE = False
+ logpy.warning("no module 'xformers'. Processing without...")
+
+from ...modules.attention import LinearAttention, MemoryEfficientCrossAttention
+
+
+def get_timestep_embedding(timesteps, embedding_dim):
+ """
+ This matches the implementation in Denoising Diffusion Probabilistic Models:
+ From Fairseq.
+ Build sinusoidal embeddings.
+ This matches the implementation in tensor2tensor, but differs slightly
+ from the description in Section 3.5 of "Attention Is All You Need".
+ """
+ assert len(timesteps.shape) == 1
+
+ half_dim = embedding_dim // 2
+ emb = math.log(10000) / (half_dim - 1)
+ emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb)
+ emb = emb.to(device=timesteps.device)
+ emb = timesteps.float()[:, None] * emb[None, :]
+ emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)
+ if embedding_dim % 2 == 1: # zero pad
+ emb = torch.nn.functional.pad(emb, (0, 1, 0, 0))
+ return emb
+
+
+def nonlinearity(x):
+ # swish
+ return x * torch.sigmoid(x)
+
+
+def Normalize(in_channels, num_groups=32):
+ return torch.nn.GroupNorm(
+ num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True
+ )
+
+
+class Upsample(nn.Module):
+ def __init__(self, in_channels, with_conv):
+ super().__init__()
+ self.with_conv = with_conv
+ if self.with_conv:
+ self.conv = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=3, stride=1, padding=1
+ )
+
+ def forward(self, x):
+ x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest")
+ if self.with_conv:
+ x = self.conv(x)
+ return x
+
+
+class Downsample(nn.Module):
+ def __init__(self, in_channels, with_conv):
+ super().__init__()
+ self.with_conv = with_conv
+ if self.with_conv:
+ # no asymmetric padding in torch conv, must do it ourselves
+ self.conv = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=3, stride=2, padding=0
+ )
+
+ def forward(self, x):
+ if self.with_conv:
+ pad = (0, 1, 0, 1)
+ x = torch.nn.functional.pad(x, pad, mode="constant", value=0)
+ x = self.conv(x)
+ else:
+ x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2)
+ return x
+
+
+class ResnetBlock(nn.Module):
+ def __init__(
+ self,
+ *,
+ in_channels,
+ out_channels=None,
+ conv_shortcut=False,
+ dropout,
+ temb_channels=512,
+ ):
+ super().__init__()
+ self.in_channels = in_channels
+ out_channels = in_channels if out_channels is None else out_channels
+ self.out_channels = out_channels
+ self.use_conv_shortcut = conv_shortcut
+
+ self.norm1 = Normalize(in_channels)
+ self.conv1 = torch.nn.Conv2d(
+ in_channels, out_channels, kernel_size=3, stride=1, padding=1
+ )
+ if temb_channels > 0:
+ self.temb_proj = torch.nn.Linear(temb_channels, out_channels)
+ self.norm2 = Normalize(out_channels)
+ self.dropout = torch.nn.Dropout(dropout)
+ self.conv2 = torch.nn.Conv2d(
+ out_channels, out_channels, kernel_size=3, stride=1, padding=1
+ )
+ if self.in_channels != self.out_channels:
+ if self.use_conv_shortcut:
+ self.conv_shortcut = torch.nn.Conv2d(
+ in_channels, out_channels, kernel_size=3, stride=1, padding=1
+ )
+ else:
+ self.nin_shortcut = torch.nn.Conv2d(
+ in_channels, out_channels, kernel_size=1, stride=1, padding=0
+ )
+
+ def forward(self, x, temb):
+ h = x
+ h = self.norm1(h)
+ h = nonlinearity(h)
+ h = self.conv1(h)
+
+ if temb is not None:
+ h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None]
+
+ h = self.norm2(h)
+ h = nonlinearity(h)
+ h = self.dropout(h)
+ h = self.conv2(h)
+
+ if self.in_channels != self.out_channels:
+ if self.use_conv_shortcut:
+ x = self.conv_shortcut(x)
+ else:
+ x = self.nin_shortcut(x)
+
+ return x + h
+
+
+class LinAttnBlock(LinearAttention):
+ """to match AttnBlock usage"""
+
+ def __init__(self, in_channels):
+ super().__init__(dim=in_channels, heads=1, dim_head=in_channels)
+
+
+class AttnBlock(nn.Module):
+ def __init__(self, in_channels):
+ super().__init__()
+ self.in_channels = in_channels
+
+ self.norm = Normalize(in_channels)
+ self.q = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.k = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.v = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.proj_out = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+
+ def attention(self, h_: torch.Tensor) -> torch.Tensor:
+ h_ = self.norm(h_)
+ q = self.q(h_)
+ k = self.k(h_)
+ v = self.v(h_)
+
+ b, c, h, w = q.shape
+ q, k, v = map(
+ lambda x: rearrange(x, "b c h w -> b 1 (h w) c").contiguous(), (q, k, v)
+ )
+ h_ = torch.nn.functional.scaled_dot_product_attention(
+ q, k, v
+ ) # scale is dim ** -0.5 per default
+ # compute attention
+
+ return rearrange(h_, "b 1 (h w) c -> b c h w", h=h, w=w, c=c, b=b)
+
+ def forward(self, x, **kwargs):
+ h_ = x
+ h_ = self.attention(h_)
+ h_ = self.proj_out(h_)
+ return x + h_
+
+
+class MemoryEfficientAttnBlock(nn.Module):
+ """
+ Uses xformers efficient implementation,
+ see https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
+ Note: this is a single-head self-attention operation
+ """
+
+ #
+ def __init__(self, in_channels):
+ super().__init__()
+ self.in_channels = in_channels
+
+ self.norm = Normalize(in_channels)
+ self.q = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.k = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.v = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.proj_out = torch.nn.Conv2d(
+ in_channels, in_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.attention_op: Optional[Any] = None
+
+ def attention(self, h_: torch.Tensor) -> torch.Tensor:
+ h_ = self.norm(h_)
+ q = self.q(h_)
+ k = self.k(h_)
+ v = self.v(h_)
+
+ # compute attention
+ B, C, H, W = q.shape
+ q, k, v = map(lambda x: rearrange(x, "b c h w -> b (h w) c"), (q, k, v))
+
+ q, k, v = map(
+ lambda t: t.unsqueeze(3)
+ .reshape(B, t.shape[1], 1, C)
+ .permute(0, 2, 1, 3)
+ .reshape(B * 1, t.shape[1], C)
+ .contiguous(),
+ (q, k, v),
+ )
+ out = xformers.ops.memory_efficient_attention(
+ q, k, v, attn_bias=None, op=self.attention_op
+ )
+
+ out = (
+ out.unsqueeze(0)
+ .reshape(B, 1, out.shape[1], C)
+ .permute(0, 2, 1, 3)
+ .reshape(B, out.shape[1], C)
+ )
+ return rearrange(out, "b (h w) c -> b c h w", b=B, h=H, w=W, c=C)
+
+ def forward(self, x, **kwargs):
+ h_ = x
+ h_ = self.attention(h_)
+ h_ = self.proj_out(h_)
+ return x + h_
+
+
+class MemoryEfficientCrossAttentionWrapper(MemoryEfficientCrossAttention):
+ def forward(self, x, context=None, mask=None, **unused_kwargs):
+ b, c, h, w = x.shape
+ x = rearrange(x, "b c h w -> b (h w) c")
+ out = super().forward(x, context=context, mask=mask)
+ out = rearrange(out, "b (h w) c -> b c h w", h=h, w=w, c=c)
+ return x + out
+
+
+def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None):
+ assert attn_type in [
+ "vanilla",
+ "vanilla-xformers",
+ "memory-efficient-cross-attn",
+ "linear",
+ "none",
+ ], f"attn_type {attn_type} unknown"
+ if (
+ version.parse(torch.__version__) < version.parse("2.0.0")
+ and attn_type != "none"
+ ):
+ assert XFORMERS_IS_AVAILABLE, (
+ f"We do not support vanilla attention in {torch.__version__} anymore, "
+ f"as it is too expensive. Please install xformers via e.g. 'pip install xformers==0.0.16'"
+ )
+ attn_type = "vanilla-xformers"
+ logpy.info(f"making attention of type '{attn_type}' with {in_channels} in_channels")
+ if attn_type == "vanilla":
+ assert attn_kwargs is None
+ return AttnBlock(in_channels)
+ elif attn_type == "vanilla-xformers":
+ logpy.info(
+ f"building MemoryEfficientAttnBlock with {in_channels} in_channels..."
+ )
+ return MemoryEfficientAttnBlock(in_channels)
+ elif type == "memory-efficient-cross-attn":
+ attn_kwargs["query_dim"] = in_channels
+ return MemoryEfficientCrossAttentionWrapper(**attn_kwargs)
+ elif attn_type == "none":
+ return nn.Identity(in_channels)
+ else:
+ return LinAttnBlock(in_channels)
+
+
+class Model(nn.Module):
+ def __init__(
+ self,
+ *,
+ ch,
+ out_ch,
+ ch_mult=(1, 2, 4, 8),
+ num_res_blocks,
+ attn_resolutions,
+ dropout=0.0,
+ resamp_with_conv=True,
+ in_channels,
+ resolution,
+ use_timestep=True,
+ use_linear_attn=False,
+ attn_type="vanilla",
+ ):
+ super().__init__()
+ if use_linear_attn:
+ attn_type = "linear"
+ self.ch = ch
+ self.temb_ch = self.ch * 4
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ self.resolution = resolution
+ self.in_channels = in_channels
+
+ self.use_timestep = use_timestep
+ if self.use_timestep:
+ # timestep embedding
+ self.temb = nn.Module()
+ self.temb.dense = nn.ModuleList(
+ [
+ torch.nn.Linear(self.ch, self.temb_ch),
+ torch.nn.Linear(self.temb_ch, self.temb_ch),
+ ]
+ )
+
+ # downsampling
+ self.conv_in = torch.nn.Conv2d(
+ in_channels, self.ch, kernel_size=3, stride=1, padding=1
+ )
+
+ curr_res = resolution
+ in_ch_mult = (1,) + tuple(ch_mult)
+ self.down = nn.ModuleList()
+ for i_level in range(self.num_resolutions):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_in = ch * in_ch_mult[i_level]
+ block_out = ch * ch_mult[i_level]
+ for i_block in range(self.num_res_blocks):
+ block.append(
+ ResnetBlock(
+ in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ )
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type))
+ down = nn.Module()
+ down.block = block
+ down.attn = attn
+ if i_level != self.num_resolutions - 1:
+ down.downsample = Downsample(block_in, resamp_with_conv)
+ curr_res = curr_res // 2
+ self.down.append(down)
+
+ # middle
+ self.mid = nn.Module()
+ self.mid.block_1 = ResnetBlock(
+ in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)
+ self.mid.block_2 = ResnetBlock(
+ in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+
+ # upsampling
+ self.up = nn.ModuleList()
+ for i_level in reversed(range(self.num_resolutions)):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_out = ch * ch_mult[i_level]
+ skip_in = ch * ch_mult[i_level]
+ for i_block in range(self.num_res_blocks + 1):
+ if i_block == self.num_res_blocks:
+ skip_in = ch * in_ch_mult[i_level]
+ block.append(
+ ResnetBlock(
+ in_channels=block_in + skip_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ )
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type))
+ up = nn.Module()
+ up.block = block
+ up.attn = attn
+ if i_level != 0:
+ up.upsample = Upsample(block_in, resamp_with_conv)
+ curr_res = curr_res * 2
+ self.up.insert(0, up) # prepend to get consistent order
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = torch.nn.Conv2d(
+ block_in, out_ch, kernel_size=3, stride=1, padding=1
+ )
+
+ def forward(self, x, t=None, context=None):
+ # assert x.shape[2] == x.shape[3] == self.resolution
+ if context is not None:
+ # assume aligned context, cat along channel axis
+ x = torch.cat((x, context), dim=1)
+ if self.use_timestep:
+ # timestep embedding
+ assert t is not None
+ temb = get_timestep_embedding(t, self.ch)
+ temb = self.temb.dense[0](temb)
+ temb = nonlinearity(temb)
+ temb = self.temb.dense[1](temb)
+ else:
+ temb = None
+
+ # downsampling
+ hs = [self.conv_in(x)]
+ for i_level in range(self.num_resolutions):
+ for i_block in range(self.num_res_blocks):
+ h = self.down[i_level].block[i_block](hs[-1], temb)
+ if len(self.down[i_level].attn) > 0:
+ h = self.down[i_level].attn[i_block](h)
+ hs.append(h)
+ if i_level != self.num_resolutions - 1:
+ hs.append(self.down[i_level].downsample(hs[-1]))
+
+ # middle
+ h = hs[-1]
+ h = self.mid.block_1(h, temb)
+ h = self.mid.attn_1(h)
+ h = self.mid.block_2(h, temb)
+
+ # upsampling
+ for i_level in reversed(range(self.num_resolutions)):
+ for i_block in range(self.num_res_blocks + 1):
+ h = self.up[i_level].block[i_block](
+ torch.cat([h, hs.pop()], dim=1), temb
+ )
+ if len(self.up[i_level].attn) > 0:
+ h = self.up[i_level].attn[i_block](h)
+ if i_level != 0:
+ h = self.up[i_level].upsample(h)
+
+ # end
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h)
+ return h
+
+ def get_last_layer(self):
+ return self.conv_out.weight
+
+
+class Encoder(nn.Module):
+ def __init__(
+ self,
+ *,
+ ch,
+ out_ch,
+ ch_mult=(1, 2, 4, 8),
+ num_res_blocks,
+ attn_resolutions,
+ dropout=0.0,
+ resamp_with_conv=True,
+ in_channels,
+ resolution,
+ z_channels,
+ double_z=True,
+ use_linear_attn=False,
+ attn_type="vanilla",
+ **ignore_kwargs,
+ ):
+ super().__init__()
+ if use_linear_attn:
+ attn_type = "linear"
+ self.ch = ch
+ self.temb_ch = 0
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ self.resolution = resolution
+ self.in_channels = in_channels
+
+ # downsampling
+ self.conv_in = torch.nn.Conv2d(
+ in_channels, self.ch, kernel_size=3, stride=1, padding=1
+ )
+
+ curr_res = resolution
+ in_ch_mult = (1,) + tuple(ch_mult)
+ self.in_ch_mult = in_ch_mult
+ self.down = nn.ModuleList()
+ for i_level in range(self.num_resolutions):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_in = ch * in_ch_mult[i_level]
+ block_out = ch * ch_mult[i_level]
+ for i_block in range(self.num_res_blocks):
+ block.append(
+ ResnetBlock(
+ in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ )
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn(block_in, attn_type=attn_type))
+ down = nn.Module()
+ down.block = block
+ down.attn = attn
+ if i_level != self.num_resolutions - 1:
+ down.downsample = Downsample(block_in, resamp_with_conv)
+ curr_res = curr_res // 2
+ self.down.append(down)
+
+ # middle
+ self.mid = nn.Module()
+ self.mid.block_1 = ResnetBlock(
+ in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)
+ self.mid.block_2 = ResnetBlock(
+ in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = torch.nn.Conv2d(
+ block_in,
+ 2 * z_channels if double_z else z_channels,
+ kernel_size=3,
+ stride=1,
+ padding=1,
+ )
+
+ def forward(self, x):
+ # timestep embedding
+ temb = None
+
+ # downsampling
+ hs = [self.conv_in(x)]
+ for i_level in range(self.num_resolutions):
+ for i_block in range(self.num_res_blocks):
+ h = self.down[i_level].block[i_block](hs[-1], temb)
+ if len(self.down[i_level].attn) > 0:
+ h = self.down[i_level].attn[i_block](h)
+ hs.append(h)
+ if i_level != self.num_resolutions - 1:
+ hs.append(self.down[i_level].downsample(hs[-1]))
+
+ # middle
+ h = hs[-1]
+ h = self.mid.block_1(h, temb)
+ h = self.mid.attn_1(h)
+ h = self.mid.block_2(h, temb)
+
+ # end
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h)
+ return h
+
+
+class Decoder(nn.Module):
+ def __init__(
+ self,
+ *,
+ ch,
+ out_ch,
+ ch_mult=(1, 2, 4, 8),
+ num_res_blocks,
+ attn_resolutions,
+ dropout=0.0,
+ resamp_with_conv=True,
+ in_channels,
+ resolution,
+ z_channels,
+ give_pre_end=False,
+ tanh_out=False,
+ use_linear_attn=False,
+ attn_type="vanilla",
+ **ignorekwargs,
+ ):
+ super().__init__()
+ if use_linear_attn:
+ attn_type = "linear"
+ self.ch = ch
+ self.temb_ch = 0
+ self.num_resolutions = len(ch_mult)
+ self.num_res_blocks = num_res_blocks
+ self.resolution = resolution
+ self.in_channels = in_channels
+ self.give_pre_end = give_pre_end
+ self.tanh_out = tanh_out
+
+ # compute in_ch_mult, block_in and curr_res at lowest res
+ in_ch_mult = (1,) + tuple(ch_mult)
+ block_in = ch * ch_mult[self.num_resolutions - 1]
+ curr_res = resolution // 2 ** (self.num_resolutions - 1)
+ self.z_shape = (1, z_channels, curr_res, curr_res)
+ logpy.info(
+ "Working with z of shape {} = {} dimensions.".format(
+ self.z_shape, np.prod(self.z_shape)
+ )
+ )
+
+ make_attn_cls = self._make_attn()
+ make_resblock_cls = self._make_resblock()
+ make_conv_cls = self._make_conv()
+ # z to block_in
+ self.conv_in = torch.nn.Conv2d(
+ z_channels, block_in, kernel_size=3, stride=1, padding=1
+ )
+
+ # middle
+ self.mid = nn.Module()
+ self.mid.block_1 = make_resblock_cls(
+ in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ self.mid.attn_1 = make_attn_cls(block_in, attn_type=attn_type)
+ self.mid.block_2 = make_resblock_cls(
+ in_channels=block_in,
+ out_channels=block_in,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+
+ # upsampling
+ self.up = nn.ModuleList()
+ for i_level in reversed(range(self.num_resolutions)):
+ block = nn.ModuleList()
+ attn = nn.ModuleList()
+ block_out = ch * ch_mult[i_level]
+ for i_block in range(self.num_res_blocks + 1):
+ block.append(
+ make_resblock_cls(
+ in_channels=block_in,
+ out_channels=block_out,
+ temb_channels=self.temb_ch,
+ dropout=dropout,
+ )
+ )
+ block_in = block_out
+ if curr_res in attn_resolutions:
+ attn.append(make_attn_cls(block_in, attn_type=attn_type))
+ up = nn.Module()
+ up.block = block
+ up.attn = attn
+ if i_level != 0:
+ up.upsample = Upsample(block_in, resamp_with_conv)
+ curr_res = curr_res * 2
+ self.up.insert(0, up) # prepend to get consistent order
+
+ # end
+ self.norm_out = Normalize(block_in)
+ self.conv_out = make_conv_cls(
+ block_in, out_ch, kernel_size=3, stride=1, padding=1
+ )
+
+ def _make_attn(self) -> Callable:
+ return make_attn
+
+ def _make_resblock(self) -> Callable:
+ return ResnetBlock
+
+ def _make_conv(self) -> Callable:
+ return torch.nn.Conv2d
+
+ def get_last_layer(self, **kwargs):
+ return self.conv_out.weight
+
+ def forward(self, z, **kwargs):
+ # assert z.shape[1:] == self.z_shape[1:]
+ self.last_z_shape = z.shape
+
+ # timestep embedding
+ temb = None
+
+ # z to block_in
+ h = self.conv_in(z)
+
+ # middle
+ h = self.mid.block_1(h, temb, **kwargs)
+ h = self.mid.attn_1(h, **kwargs)
+ h = self.mid.block_2(h, temb, **kwargs)
+
+ # upsampling
+ for i_level in reversed(range(self.num_resolutions)):
+ for i_block in range(self.num_res_blocks + 1):
+ h = self.up[i_level].block[i_block](h, temb, **kwargs)
+ if len(self.up[i_level].attn) > 0:
+ h = self.up[i_level].attn[i_block](h, **kwargs)
+ if i_level != 0:
+ h = self.up[i_level].upsample(h)
+
+ # end
+ if self.give_pre_end:
+ return h
+
+ h = self.norm_out(h)
+ h = nonlinearity(h)
+ h = self.conv_out(h, **kwargs)
+ if self.tanh_out:
+ h = torch.tanh(h)
+ return h
diff --git a/sgm/modules/diffusionmodules/openaimodel.py b/sgm/modules/diffusionmodules/openaimodel.py
new file mode 100644
index 0000000000000000000000000000000000000000..b58e1b0e9be031cd09803d451fc59f2a5ce88eea
--- /dev/null
+++ b/sgm/modules/diffusionmodules/openaimodel.py
@@ -0,0 +1,853 @@
+import logging
+import math
+from abc import abstractmethod
+from typing import Iterable, List, Optional, Tuple, Union
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+from einops import rearrange
+from torch.utils.checkpoint import checkpoint
+
+from ...modules.attention import SpatialTransformer
+from ...modules.diffusionmodules.util import (avg_pool_nd, conv_nd, linear,
+ normalization,
+ timestep_embedding, zero_module)
+from ...modules.video_attention import SpatialVideoTransformer
+from ...util import exists
+
+logpy = logging.getLogger(__name__)
+
+
+class AttentionPool2d(nn.Module):
+ """
+ Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py
+ """
+
+ def __init__(
+ self,
+ spacial_dim: int,
+ embed_dim: int,
+ num_heads_channels: int,
+ output_dim: Optional[int] = None,
+ ):
+ super().__init__()
+ self.positional_embedding = nn.Parameter(
+ th.randn(embed_dim, spacial_dim**2 + 1) / embed_dim**0.5
+ )
+ self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1)
+ self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1)
+ self.num_heads = embed_dim // num_heads_channels
+ self.attention = QKVAttention(self.num_heads)
+
+ def forward(self, x: th.Tensor) -> th.Tensor:
+ b, c, _ = x.shape
+ x = x.reshape(b, c, -1)
+ x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1)
+ x = x + self.positional_embedding[None, :, :].to(x.dtype)
+ x = self.qkv_proj(x)
+ x = self.attention(x)
+ x = self.c_proj(x)
+ return x[:, :, 0]
+
+
+class TimestepBlock(nn.Module):
+ """
+ Any module where forward() takes timestep embeddings as a second argument.
+ """
+
+ @abstractmethod
+ def forward(self, x: th.Tensor, emb: th.Tensor):
+ """
+ Apply the module to `x` given `emb` timestep embeddings.
+ """
+
+
+class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
+ """
+ A sequential module that passes timestep embeddings to the children that
+ support it as an extra input.
+ """
+
+ def forward(
+ self,
+ x: th.Tensor,
+ emb: th.Tensor,
+ context: Optional[th.Tensor] = None,
+ image_only_indicator: Optional[th.Tensor] = None,
+ time_context: Optional[int] = None,
+ num_video_frames: Optional[int] = None,
+ ):
+ from ...modules.diffusionmodules.video_model import VideoResBlock
+
+ for layer in self:
+ module = layer
+
+ if isinstance(module, TimestepBlock) and not isinstance(
+ module, VideoResBlock
+ ):
+ x = layer(x, emb)
+ elif isinstance(module, VideoResBlock):
+ x = layer(x, emb, num_video_frames, image_only_indicator)
+ elif isinstance(module, SpatialVideoTransformer):
+ x = layer(
+ x,
+ context,
+ time_context,
+ num_video_frames,
+ image_only_indicator,
+ )
+ elif isinstance(module, SpatialTransformer):
+ x = layer(x, context)
+ else:
+ x = layer(x)
+ return x
+
+
+class Upsample(nn.Module):
+ """
+ An upsampling layer with an optional convolution.
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ upsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(
+ self,
+ channels: int,
+ use_conv: bool,
+ dims: int = 2,
+ out_channels: Optional[int] = None,
+ padding: int = 1,
+ third_up: bool = False,
+ kernel_size: int = 3,
+ scale_factor: int = 2,
+ ):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ self.third_up = third_up
+ self.scale_factor = scale_factor
+ if use_conv:
+ self.conv = conv_nd(
+ dims, self.channels, self.out_channels, kernel_size, padding=padding
+ )
+
+ def forward(self, x: th.Tensor) -> th.Tensor:
+ assert x.shape[1] == self.channels
+
+ if self.dims == 3:
+ t_factor = 1 if not self.third_up else self.scale_factor
+ x = F.interpolate(
+ x,
+ (
+ t_factor * x.shape[2],
+ x.shape[3] * self.scale_factor,
+ x.shape[4] * self.scale_factor,
+ ),
+ mode="nearest",
+ )
+ else:
+ x = F.interpolate(x, scale_factor=self.scale_factor, mode="nearest")
+ if self.use_conv:
+ x = self.conv(x)
+ return x
+
+
+class Downsample(nn.Module):
+ """
+ A downsampling layer with an optional convolution.
+ :param channels: channels in the inputs and outputs.
+ :param use_conv: a bool determining if a convolution is applied.
+ :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
+ downsampling occurs in the inner-two dimensions.
+ """
+
+ def __init__(
+ self,
+ channels: int,
+ use_conv: bool,
+ dims: int = 2,
+ out_channels: Optional[int] = None,
+ padding: int = 1,
+ third_down: bool = False,
+ ):
+ super().__init__()
+ self.channels = channels
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.dims = dims
+ stride = 2 if dims != 3 else ((1, 2, 2) if not third_down else (2, 2, 2))
+ if use_conv:
+ logpy.info(f"Building a Downsample layer with {dims} dims.")
+ logpy.info(
+ f" --> settings are: \n in-chn: {self.channels}, out-chn: {self.out_channels}, "
+ f"kernel-size: 3, stride: {stride}, padding: {padding}"
+ )
+ if dims == 3:
+ logpy.info(f" --> Downsampling third axis (time): {third_down}")
+ self.op = conv_nd(
+ dims,
+ self.channels,
+ self.out_channels,
+ 3,
+ stride=stride,
+ padding=padding,
+ )
+ else:
+ assert self.channels == self.out_channels
+ self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)
+
+ def forward(self, x: th.Tensor) -> th.Tensor:
+ assert x.shape[1] == self.channels
+
+ return self.op(x)
+
+
+class ResBlock(TimestepBlock):
+ """
+ A residual block that can optionally change the number of channels.
+ :param channels: the number of input channels.
+ :param emb_channels: the number of timestep embedding channels.
+ :param dropout: the rate of dropout.
+ :param out_channels: if specified, the number of out channels.
+ :param use_conv: if True and out_channels is specified, use a spatial
+ convolution instead of a smaller 1x1 convolution to change the
+ channels in the skip connection.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param use_checkpoint: if True, use gradient checkpointing on this module.
+ :param up: if True, use this block for upsampling.
+ :param down: if True, use this block for downsampling.
+ """
+
+ def __init__(
+ self,
+ channels: int,
+ emb_channels: int,
+ dropout: float,
+ out_channels: Optional[int] = None,
+ use_conv: bool = False,
+ use_scale_shift_norm: bool = False,
+ dims: int = 2,
+ use_checkpoint: bool = False,
+ up: bool = False,
+ down: bool = False,
+ kernel_size: int = 3,
+ exchange_temb_dims: bool = False,
+ skip_t_emb: bool = False,
+ ):
+ super().__init__()
+ self.channels = channels
+ self.emb_channels = emb_channels
+ self.dropout = dropout
+ self.out_channels = out_channels or channels
+ self.use_conv = use_conv
+ self.use_checkpoint = use_checkpoint
+ self.use_scale_shift_norm = use_scale_shift_norm
+ self.exchange_temb_dims = exchange_temb_dims
+
+ if isinstance(kernel_size, Iterable):
+ padding = [k // 2 for k in kernel_size]
+ else:
+ padding = kernel_size // 2
+
+ self.in_layers = nn.Sequential(
+ normalization(channels),
+ nn.SiLU(),
+ conv_nd(dims, channels, self.out_channels, kernel_size, padding=padding),
+ )
+
+ self.updown = up or down
+
+ if up:
+ self.h_upd = Upsample(channels, False, dims)
+ self.x_upd = Upsample(channels, False, dims)
+ elif down:
+ self.h_upd = Downsample(channels, False, dims)
+ self.x_upd = Downsample(channels, False, dims)
+ else:
+ self.h_upd = self.x_upd = nn.Identity()
+
+ self.skip_t_emb = skip_t_emb
+ self.emb_out_channels = (
+ 2 * self.out_channels if use_scale_shift_norm else self.out_channels
+ )
+ if self.skip_t_emb:
+ logpy.info(f"Skipping timestep embedding in {self.__class__.__name__}")
+ assert not self.use_scale_shift_norm
+ self.emb_layers = None
+ self.exchange_temb_dims = False
+ else:
+ self.emb_layers = nn.Sequential(
+ nn.SiLU(),
+ linear(
+ emb_channels,
+ self.emb_out_channels,
+ ),
+ )
+
+ self.out_layers = nn.Sequential(
+ normalization(self.out_channels),
+ nn.SiLU(),
+ nn.Dropout(p=dropout),
+ zero_module(
+ conv_nd(
+ dims,
+ self.out_channels,
+ self.out_channels,
+ kernel_size,
+ padding=padding,
+ )
+ ),
+ )
+
+ if self.out_channels == channels:
+ self.skip_connection = nn.Identity()
+ elif use_conv:
+ self.skip_connection = conv_nd(
+ dims, channels, self.out_channels, kernel_size, padding=padding
+ )
+ else:
+ self.skip_connection = conv_nd(dims, channels, self.out_channels, 1)
+
+ def forward(self, x: th.Tensor, emb: th.Tensor) -> th.Tensor:
+ """
+ Apply the block to a Tensor, conditioned on a timestep embedding.
+ :param x: an [N x C x ...] Tensor of features.
+ :param emb: an [N x emb_channels] Tensor of timestep embeddings.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+ if self.use_checkpoint:
+ return checkpoint(self._forward, x, emb)
+ else:
+ return self._forward(x, emb)
+
+ def _forward(self, x: th.Tensor, emb: th.Tensor) -> th.Tensor:
+ if self.updown:
+ in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1]
+ h = in_rest(x)
+ h = self.h_upd(h)
+ x = self.x_upd(x)
+ h = in_conv(h)
+ else:
+ h = self.in_layers(x)
+
+ if self.skip_t_emb:
+ emb_out = th.zeros_like(h)
+ else:
+ emb_out = self.emb_layers(emb).type(h.dtype)
+ while len(emb_out.shape) < len(h.shape):
+ emb_out = emb_out[..., None]
+ if self.use_scale_shift_norm:
+ out_norm, out_rest = self.out_layers[0], self.out_layers[1:]
+ scale, shift = th.chunk(emb_out, 2, dim=1)
+ h = out_norm(h) * (1 + scale) + shift
+ h = out_rest(h)
+ else:
+ if self.exchange_temb_dims:
+ emb_out = rearrange(emb_out, "b t c ... -> b c t ...")
+ h = h + emb_out
+ h = self.out_layers(h)
+ return self.skip_connection(x) + h
+
+
+class AttentionBlock(nn.Module):
+ """
+ An attention block that allows spatial positions to attend to each other.
+ Originally ported from here, but adapted to the N-d case.
+ https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66.
+ """
+
+ def __init__(
+ self,
+ channels: int,
+ num_heads: int = 1,
+ num_head_channels: int = -1,
+ use_checkpoint: bool = False,
+ use_new_attention_order: bool = False,
+ ):
+ super().__init__()
+ self.channels = channels
+ if num_head_channels == -1:
+ self.num_heads = num_heads
+ else:
+ assert (
+ channels % num_head_channels == 0
+ ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}"
+ self.num_heads = channels // num_head_channels
+ self.use_checkpoint = use_checkpoint
+ self.norm = normalization(channels)
+ self.qkv = conv_nd(1, channels, channels * 3, 1)
+ if use_new_attention_order:
+ # split qkv before split heads
+ self.attention = QKVAttention(self.num_heads)
+ else:
+ # split heads before split qkv
+ self.attention = QKVAttentionLegacy(self.num_heads)
+
+ self.proj_out = zero_module(conv_nd(1, channels, channels, 1))
+
+ def forward(self, x: th.Tensor, **kwargs) -> th.Tensor:
+ return checkpoint(self._forward, x)
+
+ def _forward(self, x: th.Tensor) -> th.Tensor:
+ b, c, *spatial = x.shape
+ x = x.reshape(b, c, -1)
+ qkv = self.qkv(self.norm(x))
+ h = self.attention(qkv)
+ h = self.proj_out(h)
+ return (x + h).reshape(b, c, *spatial)
+
+
+class QKVAttentionLegacy(nn.Module):
+ """
+ A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping
+ """
+
+ def __init__(self, n_heads: int):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv: th.Tensor) -> th.Tensor:
+ """
+ Apply QKV attention.
+ :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts", q * scale, k * scale
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v)
+ return a.reshape(bs, -1, length)
+
+
+class QKVAttention(nn.Module):
+ """
+ A module which performs QKV attention and splits in a different order.
+ """
+
+ def __init__(self, n_heads: int):
+ super().__init__()
+ self.n_heads = n_heads
+
+ def forward(self, qkv: th.Tensor) -> th.Tensor:
+ """
+ Apply QKV attention.
+ :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs.
+ :return: an [N x (H * C) x T] tensor after attention.
+ """
+ bs, width, length = qkv.shape
+ assert width % (3 * self.n_heads) == 0
+ ch = width // (3 * self.n_heads)
+ q, k, v = qkv.chunk(3, dim=1)
+ scale = 1 / math.sqrt(math.sqrt(ch))
+ weight = th.einsum(
+ "bct,bcs->bts",
+ (q * scale).view(bs * self.n_heads, ch, length),
+ (k * scale).view(bs * self.n_heads, ch, length),
+ ) # More stable with f16 than dividing afterwards
+ weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
+ a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length))
+ return a.reshape(bs, -1, length)
+
+
+class Timestep(nn.Module):
+ def __init__(self, dim: int):
+ super().__init__()
+ self.dim = dim
+
+ def forward(self, t: th.Tensor) -> th.Tensor:
+ return timestep_embedding(t, self.dim)
+
+
+class UNetModel(nn.Module):
+ """
+ The full UNet model with attention and timestep embedding.
+ :param in_channels: channels in the input Tensor.
+ :param model_channels: base channel count for the model.
+ :param out_channels: channels in the output Tensor.
+ :param num_res_blocks: number of residual blocks per downsample.
+ :param attention_resolutions: a collection of downsample rates at which
+ attention will take place. May be a set, list, or tuple.
+ For example, if this contains 4, then at 4x downsampling, attention
+ will be used.
+ :param dropout: the dropout probability.
+ :param channel_mult: channel multiplier for each level of the UNet.
+ :param conv_resample: if True, use learned convolutions for upsampling and
+ downsampling.
+ :param dims: determines if the signal is 1D, 2D, or 3D.
+ :param num_classes: if specified (as an int), then this model will be
+ class-conditional with `num_classes` classes.
+ :param use_checkpoint: use gradient checkpointing to reduce memory usage.
+ :param num_heads: the number of attention heads in each attention layer.
+ :param num_heads_channels: if specified, ignore num_heads and instead use
+ a fixed channel width per attention head.
+ :param num_heads_upsample: works with num_heads to set a different number
+ of heads for upsampling. Deprecated.
+ :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.
+ :param resblock_updown: use residual blocks for up/downsampling.
+ :param use_new_attention_order: use a different attention pattern for potentially
+ increased efficiency.
+ """
+
+ def __init__(
+ self,
+ in_channels: int,
+ model_channels: int,
+ out_channels: int,
+ num_res_blocks: int,
+ attention_resolutions: int,
+ dropout: float = 0.0,
+ channel_mult: Union[List, Tuple] = (1, 2, 4, 8),
+ conv_resample: bool = True,
+ dims: int = 2,
+ num_classes: Optional[Union[int, str]] = None,
+ use_checkpoint: bool = False,
+ num_heads: int = -1,
+ num_head_channels: int = -1,
+ num_heads_upsample: int = -1,
+ use_scale_shift_norm: bool = False,
+ resblock_updown: bool = False,
+ transformer_depth: int = 1,
+ context_dim: Optional[int] = None,
+ disable_self_attentions: Optional[List[bool]] = None,
+ num_attention_blocks: Optional[List[int]] = None,
+ disable_middle_self_attn: bool = False,
+ disable_middle_transformer: bool = False,
+ use_linear_in_transformer: bool = False,
+ spatial_transformer_attn_type: str = "softmax",
+ adm_in_channels: Optional[int] = None,
+ ):
+ super().__init__()
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ if num_heads == -1:
+ assert (
+ num_head_channels != -1
+ ), "Either num_heads or num_head_channels has to be set"
+
+ if num_head_channels == -1:
+ assert (
+ num_heads != -1
+ ), "Either num_heads or num_head_channels has to be set"
+
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ if isinstance(transformer_depth, int):
+ transformer_depth = len(channel_mult) * [transformer_depth]
+ transformer_depth_middle = transformer_depth[-1]
+
+ if isinstance(num_res_blocks, int):
+ self.num_res_blocks = len(channel_mult) * [num_res_blocks]
+ else:
+ if len(num_res_blocks) != len(channel_mult):
+ raise ValueError(
+ "provide num_res_blocks either as an int (globally constant) or "
+ "as a list/tuple (per-level) with the same length as channel_mult"
+ )
+ self.num_res_blocks = num_res_blocks
+
+ if disable_self_attentions is not None:
+ assert len(disable_self_attentions) == len(channel_mult)
+ if num_attention_blocks is not None:
+ assert len(num_attention_blocks) == len(self.num_res_blocks)
+ assert all(
+ map(
+ lambda i: self.num_res_blocks[i] >= num_attention_blocks[i],
+ range(len(num_attention_blocks)),
+ )
+ )
+ logpy.info(
+ f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. "
+ f"This option has LESS priority than attention_resolutions {attention_resolutions}, "
+ f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, "
+ f"attention will still not be set."
+ )
+
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.num_classes = num_classes
+ self.use_checkpoint = use_checkpoint
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if self.num_classes is not None:
+ if isinstance(self.num_classes, int):
+ self.label_emb = nn.Embedding(num_classes, time_embed_dim)
+ elif self.num_classes == "continuous":
+ logpy.info("setting up linear c_adm embedding layer")
+ self.label_emb = nn.Linear(1, time_embed_dim)
+ elif self.num_classes == "timestep":
+ self.label_emb = nn.Sequential(
+ Timestep(model_channels),
+ nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ ),
+ )
+ elif self.num_classes == "sequential":
+ assert adm_in_channels is not None
+ self.label_emb = nn.Sequential(
+ nn.Sequential(
+ linear(adm_in_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+ )
+ else:
+ raise ValueError
+
+ self.input_blocks = nn.ModuleList(
+ [
+ TimestepEmbedSequential(
+ conv_nd(dims, in_channels, model_channels, 3, padding=1)
+ )
+ ]
+ )
+ self._feature_size = model_channels
+ input_block_chans = [model_channels]
+ ch = model_channels
+ ds = 1
+ for level, mult in enumerate(channel_mult):
+ for nr in range(self.num_res_blocks[level]):
+ layers = [
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=mult * model_channels,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = mult * model_channels
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+
+ if context_dim is not None and exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if (
+ not exists(num_attention_blocks)
+ or nr < num_attention_blocks[level]
+ ):
+ layers.append(
+ SpatialTransformer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=transformer_depth[level],
+ context_dim=context_dim,
+ disable_self_attn=disabled_sa,
+ use_linear=use_linear_in_transformer,
+ attn_type=spatial_transformer_attn_type,
+ use_checkpoint=use_checkpoint,
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch, conv_resample, dims=dims, out_channels=out_ch
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+ ds *= 2
+ self._feature_size += ch
+
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+
+ self.middle_block = TimestepEmbedSequential(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ SpatialTransformer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=transformer_depth_middle,
+ context_dim=context_dim,
+ disable_self_attn=disable_middle_self_attn,
+ use_linear=use_linear_in_transformer,
+ attn_type=spatial_transformer_attn_type,
+ use_checkpoint=use_checkpoint,
+ )
+ if not disable_middle_transformer
+ else th.nn.Identity(),
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+
+ self.output_blocks = nn.ModuleList([])
+ for level, mult in list(enumerate(channel_mult))[::-1]:
+ for i in range(self.num_res_blocks[level] + 1):
+ ich = input_block_chans.pop()
+ layers = [
+ ResBlock(
+ ch + ich,
+ time_embed_dim,
+ dropout,
+ out_channels=model_channels * mult,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = model_channels * mult
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+
+ if exists(disable_self_attentions):
+ disabled_sa = disable_self_attentions[level]
+ else:
+ disabled_sa = False
+
+ if (
+ not exists(num_attention_blocks)
+ or i < num_attention_blocks[level]
+ ):
+ layers.append(
+ SpatialTransformer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=transformer_depth[level],
+ context_dim=context_dim,
+ disable_self_attn=disabled_sa,
+ use_linear=use_linear_in_transformer,
+ attn_type=spatial_transformer_attn_type,
+ use_checkpoint=use_checkpoint,
+ )
+ )
+ if level and i == self.num_res_blocks[level]:
+ out_ch = ch
+ layers.append(
+ ResBlock(
+ ch,
+ time_embed_dim,
+ dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ up=True,
+ )
+ if resblock_updown
+ else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)
+ )
+ ds //= 2
+ self.output_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),
+ )
+
+ def forward(
+ self,
+ x: th.Tensor,
+ timesteps: Optional[th.Tensor] = None,
+ context: Optional[th.Tensor] = None,
+ y: Optional[th.Tensor] = None,
+ **kwargs,
+ ) -> th.Tensor:
+ """
+ Apply the model to an input batch.
+ :param x: an [N x C x ...] Tensor of inputs.
+ :param timesteps: a 1-D batch of timesteps.
+ :param context: conditioning plugged in via crossattn
+ :param y: an [N] Tensor of labels, if class-conditional.
+ :return: an [N x C x ...] Tensor of outputs.
+ """
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional"
+ hs = []
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.num_classes is not None:
+ assert y.shape[0] == x.shape[0]
+ emb = emb + self.label_emb(y)
+
+ h = x
+ for module in self.input_blocks:
+ h = module(h, emb, context)
+ hs.append(h)
+ h = self.middle_block(h, emb, context)
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(h, emb, context)
+ h = h.type(x.dtype)
+
+ return self.out(h)
diff --git a/sgm/modules/diffusionmodules/sampling.py b/sgm/modules/diffusionmodules/sampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..af07566d599fdd6f255f8b9fd4592a962b0d2ace
--- /dev/null
+++ b/sgm/modules/diffusionmodules/sampling.py
@@ -0,0 +1,362 @@
+"""
+ Partially ported from https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/sampling.py
+"""
+
+
+from typing import Dict, Union
+
+import torch
+from omegaconf import ListConfig, OmegaConf
+from tqdm import tqdm
+
+from ...modules.diffusionmodules.sampling_utils import (get_ancestral_step,
+ linear_multistep_coeff,
+ to_d, to_neg_log_sigma,
+ to_sigma)
+from ...util import append_dims, default, instantiate_from_config
+
+DEFAULT_GUIDER = {"target": "sgm.modules.diffusionmodules.guiders.IdentityGuider"}
+
+
+class BaseDiffusionSampler:
+ def __init__(
+ self,
+ discretization_config: Union[Dict, ListConfig, OmegaConf],
+ num_steps: Union[int, None] = None,
+ guider_config: Union[Dict, ListConfig, OmegaConf, None] = None,
+ verbose: bool = False,
+ device: str = "cuda",
+ ):
+ self.num_steps = num_steps
+ self.discretization = instantiate_from_config(discretization_config)
+ self.guider = instantiate_from_config(
+ default(
+ guider_config,
+ DEFAULT_GUIDER,
+ )
+ )
+ self.verbose = verbose
+ self.device = device
+
+ def prepare_sampling_loop(self, x, cond, uc=None, num_steps=None):
+ sigmas = self.discretization(
+ self.num_steps if num_steps is None else num_steps, device=self.device
+ )
+ uc = default(uc, cond)
+
+ x *= torch.sqrt(1.0 + sigmas[0] ** 2.0)
+ num_sigmas = len(sigmas)
+
+ s_in = x.new_ones([x.shape[0]])
+
+ return x, s_in, sigmas, num_sigmas, cond, uc
+
+ def denoise(self, x, denoiser, sigma, cond, uc):
+ denoised = denoiser(*self.guider.prepare_inputs(x, sigma, cond, uc))
+ denoised = self.guider(denoised, sigma)
+ return denoised
+
+ def get_sigma_gen(self, num_sigmas):
+ sigma_generator = range(num_sigmas - 1)
+ if self.verbose:
+ print("#" * 30, " Sampling setting ", "#" * 30)
+ print(f"Sampler: {self.__class__.__name__}")
+ print(f"Discretization: {self.discretization.__class__.__name__}")
+ print(f"Guider: {self.guider.__class__.__name__}")
+ sigma_generator = tqdm(
+ sigma_generator,
+ total=num_sigmas,
+ desc=f"Sampling with {self.__class__.__name__} for {num_sigmas} steps",
+ )
+ return sigma_generator
+
+
+class SingleStepDiffusionSampler(BaseDiffusionSampler):
+ def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc, *args, **kwargs):
+ raise NotImplementedError
+
+ def euler_step(self, x, d, dt):
+ return x + dt * d
+
+
+class EDMSampler(SingleStepDiffusionSampler):
+ def __init__(
+ self, s_churn=0.0, s_tmin=0.0, s_tmax=float("inf"), s_noise=1.0, *args, **kwargs
+ ):
+ super().__init__(*args, **kwargs)
+
+ self.s_churn = s_churn
+ self.s_tmin = s_tmin
+ self.s_tmax = s_tmax
+ self.s_noise = s_noise
+
+ def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc=None, gamma=0.0):
+ sigma_hat = sigma * (gamma + 1.0)
+ if gamma > 0:
+ eps = torch.randn_like(x) * self.s_noise
+ x = x + eps * append_dims(sigma_hat**2 - sigma**2, x.ndim) ** 0.5
+
+ denoised = self.denoise(x, denoiser, sigma_hat, cond, uc)
+ d = to_d(x, sigma_hat, denoised)
+ dt = append_dims(next_sigma - sigma_hat, x.ndim)
+
+ euler_step = self.euler_step(x, d, dt)
+ x = self.possible_correction_step(
+ euler_step, x, d, dt, next_sigma, denoiser, cond, uc
+ )
+ return x
+
+ def __call__(self, denoiser, x, cond, uc=None, num_steps=None):
+ x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop(
+ x, cond, uc, num_steps
+ )
+
+ for i in self.get_sigma_gen(num_sigmas):
+ gamma = (
+ min(self.s_churn / (num_sigmas - 1), 2**0.5 - 1)
+ if self.s_tmin <= sigmas[i] <= self.s_tmax
+ else 0.0
+ )
+ x = self.sampler_step(
+ s_in * sigmas[i],
+ s_in * sigmas[i + 1],
+ denoiser,
+ x,
+ cond,
+ uc,
+ gamma,
+ )
+
+ return x
+
+
+class AncestralSampler(SingleStepDiffusionSampler):
+ def __init__(self, eta=1.0, s_noise=1.0, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ self.eta = eta
+ self.s_noise = s_noise
+ self.noise_sampler = lambda x: torch.randn_like(x)
+
+ def ancestral_euler_step(self, x, denoised, sigma, sigma_down):
+ d = to_d(x, sigma, denoised)
+ dt = append_dims(sigma_down - sigma, x.ndim)
+
+ return self.euler_step(x, d, dt)
+
+ def ancestral_step(self, x, sigma, next_sigma, sigma_up):
+ x = torch.where(
+ append_dims(next_sigma, x.ndim) > 0.0,
+ x + self.noise_sampler(x) * self.s_noise * append_dims(sigma_up, x.ndim),
+ x,
+ )
+ return x
+
+ def __call__(self, denoiser, x, cond, uc=None, num_steps=None):
+ x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop(
+ x, cond, uc, num_steps
+ )
+
+ for i in self.get_sigma_gen(num_sigmas):
+ x = self.sampler_step(
+ s_in * sigmas[i],
+ s_in * sigmas[i + 1],
+ denoiser,
+ x,
+ cond,
+ uc,
+ )
+
+ return x
+
+
+class LinearMultistepSampler(BaseDiffusionSampler):
+ def __init__(
+ self,
+ order=4,
+ *args,
+ **kwargs,
+ ):
+ super().__init__(*args, **kwargs)
+
+ self.order = order
+
+ def __call__(self, denoiser, x, cond, uc=None, num_steps=None, **kwargs):
+ x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop(
+ x, cond, uc, num_steps
+ )
+
+ ds = []
+ sigmas_cpu = sigmas.detach().cpu().numpy()
+ for i in self.get_sigma_gen(num_sigmas):
+ sigma = s_in * sigmas[i]
+ denoised = denoiser(
+ *self.guider.prepare_inputs(x, sigma, cond, uc), **kwargs
+ )
+ denoised = self.guider(denoised, sigma)
+ d = to_d(x, sigma, denoised)
+ ds.append(d)
+ if len(ds) > self.order:
+ ds.pop(0)
+ cur_order = min(i + 1, self.order)
+ coeffs = [
+ linear_multistep_coeff(cur_order, sigmas_cpu, i, j)
+ for j in range(cur_order)
+ ]
+ x = x + sum(coeff * d for coeff, d in zip(coeffs, reversed(ds)))
+
+ return x
+
+
+class EulerEDMSampler(EDMSampler):
+ def possible_correction_step(
+ self, euler_step, x, d, dt, next_sigma, denoiser, cond, uc
+ ):
+ return euler_step
+
+
+class HeunEDMSampler(EDMSampler):
+ def possible_correction_step(
+ self, euler_step, x, d, dt, next_sigma, denoiser, cond, uc
+ ):
+ if torch.sum(next_sigma) < 1e-14:
+ # Save a network evaluation if all noise levels are 0
+ return euler_step
+ else:
+ denoised = self.denoise(euler_step, denoiser, next_sigma, cond, uc)
+ d_new = to_d(euler_step, next_sigma, denoised)
+ d_prime = (d + d_new) / 2.0
+
+ # apply correction if noise level is not 0
+ x = torch.where(
+ append_dims(next_sigma, x.ndim) > 0.0, x + d_prime * dt, euler_step
+ )
+ return x
+
+
+class EulerAncestralSampler(AncestralSampler):
+ def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc):
+ sigma_down, sigma_up = get_ancestral_step(sigma, next_sigma, eta=self.eta)
+ denoised = self.denoise(x, denoiser, sigma, cond, uc)
+ x = self.ancestral_euler_step(x, denoised, sigma, sigma_down)
+ x = self.ancestral_step(x, sigma, next_sigma, sigma_up)
+
+ return x
+
+
+class DPMPP2SAncestralSampler(AncestralSampler):
+ def get_variables(self, sigma, sigma_down):
+ t, t_next = [to_neg_log_sigma(s) for s in (sigma, sigma_down)]
+ h = t_next - t
+ s = t + 0.5 * h
+ return h, s, t, t_next
+
+ def get_mult(self, h, s, t, t_next):
+ mult1 = to_sigma(s) / to_sigma(t)
+ mult2 = (-0.5 * h).expm1()
+ mult3 = to_sigma(t_next) / to_sigma(t)
+ mult4 = (-h).expm1()
+
+ return mult1, mult2, mult3, mult4
+
+ def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc=None, **kwargs):
+ sigma_down, sigma_up = get_ancestral_step(sigma, next_sigma, eta=self.eta)
+ denoised = self.denoise(x, denoiser, sigma, cond, uc)
+ x_euler = self.ancestral_euler_step(x, denoised, sigma, sigma_down)
+
+ if torch.sum(sigma_down) < 1e-14:
+ # Save a network evaluation if all noise levels are 0
+ x = x_euler
+ else:
+ h, s, t, t_next = self.get_variables(sigma, sigma_down)
+ mult = [
+ append_dims(mult, x.ndim) for mult in self.get_mult(h, s, t, t_next)
+ ]
+
+ x2 = mult[0] * x - mult[1] * denoised
+ denoised2 = self.denoise(x2, denoiser, to_sigma(s), cond, uc)
+ x_dpmpp2s = mult[2] * x - mult[3] * denoised2
+
+ # apply correction if noise level is not 0
+ x = torch.where(append_dims(sigma_down, x.ndim) > 0.0, x_dpmpp2s, x_euler)
+
+ x = self.ancestral_step(x, sigma, next_sigma, sigma_up)
+ return x
+
+
+class DPMPP2MSampler(BaseDiffusionSampler):
+ def get_variables(self, sigma, next_sigma, previous_sigma=None):
+ t, t_next = [to_neg_log_sigma(s) for s in (sigma, next_sigma)]
+ h = t_next - t
+
+ if previous_sigma is not None:
+ h_last = t - to_neg_log_sigma(previous_sigma)
+ r = h_last / h
+ return h, r, t, t_next
+ else:
+ return h, None, t, t_next
+
+ def get_mult(self, h, r, t, t_next, previous_sigma):
+ mult1 = to_sigma(t_next) / to_sigma(t)
+ mult2 = (-h).expm1()
+
+ if previous_sigma is not None:
+ mult3 = 1 + 1 / (2 * r)
+ mult4 = 1 / (2 * r)
+ return mult1, mult2, mult3, mult4
+ else:
+ return mult1, mult2
+
+ def sampler_step(
+ self,
+ old_denoised,
+ previous_sigma,
+ sigma,
+ next_sigma,
+ denoiser,
+ x,
+ cond,
+ uc=None,
+ ):
+ denoised = self.denoise(x, denoiser, sigma, cond, uc)
+
+ h, r, t, t_next = self.get_variables(sigma, next_sigma, previous_sigma)
+ mult = [
+ append_dims(mult, x.ndim)
+ for mult in self.get_mult(h, r, t, t_next, previous_sigma)
+ ]
+
+ x_standard = mult[0] * x - mult[1] * denoised
+ if old_denoised is None or torch.sum(next_sigma) < 1e-14:
+ # Save a network evaluation if all noise levels are 0 or on the first step
+ return x_standard, denoised
+ else:
+ denoised_d = mult[2] * denoised - mult[3] * old_denoised
+ x_advanced = mult[0] * x - mult[1] * denoised_d
+
+ # apply correction if noise level is not 0 and not first step
+ x = torch.where(
+ append_dims(next_sigma, x.ndim) > 0.0, x_advanced, x_standard
+ )
+
+ return x, denoised
+
+ def __call__(self, denoiser, x, cond, uc=None, num_steps=None, **kwargs):
+ x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop(
+ x, cond, uc, num_steps
+ )
+
+ old_denoised = None
+ for i in self.get_sigma_gen(num_sigmas):
+ x, old_denoised = self.sampler_step(
+ old_denoised,
+ None if i == 0 else s_in * sigmas[i - 1],
+ s_in * sigmas[i],
+ s_in * sigmas[i + 1],
+ denoiser,
+ x,
+ cond,
+ uc=uc,
+ )
+
+ return x
diff --git a/sgm/modules/diffusionmodules/sampling_utils.py b/sgm/modules/diffusionmodules/sampling_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..ce78527ea9052a8bfd0856ed2278901516fb9130
--- /dev/null
+++ b/sgm/modules/diffusionmodules/sampling_utils.py
@@ -0,0 +1,43 @@
+import torch
+from scipy import integrate
+
+from ...util import append_dims
+
+
+def linear_multistep_coeff(order, t, i, j, epsrel=1e-4):
+ if order - 1 > i:
+ raise ValueError(f"Order {order} too high for step {i}")
+
+ def fn(tau):
+ prod = 1.0
+ for k in range(order):
+ if j == k:
+ continue
+ prod *= (tau - t[i - k]) / (t[i - j] - t[i - k])
+ return prod
+
+ return integrate.quad(fn, t[i], t[i + 1], epsrel=epsrel)[0]
+
+
+def get_ancestral_step(sigma_from, sigma_to, eta=1.0):
+ if not eta:
+ return sigma_to, 0.0
+ sigma_up = torch.minimum(
+ sigma_to,
+ eta
+ * (sigma_to**2 * (sigma_from**2 - sigma_to**2) / sigma_from**2) ** 0.5,
+ )
+ sigma_down = (sigma_to**2 - sigma_up**2) ** 0.5
+ return sigma_down, sigma_up
+
+
+def to_d(x, sigma, denoised):
+ return (x - denoised) / append_dims(sigma, x.ndim)
+
+
+def to_neg_log_sigma(sigma):
+ return sigma.log().neg()
+
+
+def to_sigma(neg_log_sigma):
+ return neg_log_sigma.neg().exp()
diff --git a/sgm/modules/diffusionmodules/sigma_sampling.py b/sgm/modules/diffusionmodules/sigma_sampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..d54724c6ef6a7b8067784a4192b0fe2f41123063
--- /dev/null
+++ b/sgm/modules/diffusionmodules/sigma_sampling.py
@@ -0,0 +1,31 @@
+import torch
+
+from ...util import default, instantiate_from_config
+
+
+class EDMSampling:
+ def __init__(self, p_mean=-1.2, p_std=1.2):
+ self.p_mean = p_mean
+ self.p_std = p_std
+
+ def __call__(self, n_samples, rand=None):
+ log_sigma = self.p_mean + self.p_std * default(rand, torch.randn((n_samples,)))
+ return log_sigma.exp()
+
+
+class DiscreteSampling:
+ def __init__(self, discretization_config, num_idx, do_append_zero=False, flip=True):
+ self.num_idx = num_idx
+ self.sigmas = instantiate_from_config(discretization_config)(
+ num_idx, do_append_zero=do_append_zero, flip=flip
+ )
+
+ def idx_to_sigma(self, idx):
+ return self.sigmas[idx]
+
+ def __call__(self, n_samples, rand=None):
+ idx = default(
+ rand,
+ torch.randint(0, self.num_idx, (n_samples,)),
+ )
+ return self.idx_to_sigma(idx)
diff --git a/sgm/modules/diffusionmodules/util.py b/sgm/modules/diffusionmodules/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..389f0e449367b1b628d61dca105343d066dbefff
--- /dev/null
+++ b/sgm/modules/diffusionmodules/util.py
@@ -0,0 +1,369 @@
+"""
+partially adopted from
+https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py
+and
+https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py
+and
+https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py
+
+thanks!
+"""
+
+import math
+from typing import Optional
+
+import torch
+import torch.nn as nn
+from einops import rearrange, repeat
+
+
+def make_beta_schedule(
+ schedule,
+ n_timestep,
+ linear_start=1e-4,
+ linear_end=2e-2,
+):
+ if schedule == "linear":
+ betas = (
+ torch.linspace(
+ linear_start**0.5, linear_end**0.5, n_timestep, dtype=torch.float64
+ )
+ ** 2
+ )
+ return betas.numpy()
+
+
+def extract_into_tensor(a, t, x_shape):
+ b, *_ = t.shape
+ out = a.gather(-1, t)
+ return out.reshape(b, *((1,) * (len(x_shape) - 1)))
+
+
+def mixed_checkpoint(func, inputs: dict, params, flag):
+ """
+ Evaluate a function without caching intermediate activations, allowing for
+ reduced memory at the expense of extra compute in the backward pass. This differs from the original checkpoint function
+ borrowed from https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py in that
+ it also works with non-tensor inputs
+ :param func: the function to evaluate.
+ :param inputs: the argument dictionary to pass to `func`.
+ :param params: a sequence of parameters `func` depends on but does not
+ explicitly take as arguments.
+ :param flag: if False, disable gradient checkpointing.
+ """
+ if flag:
+ tensor_keys = [key for key in inputs if isinstance(inputs[key], torch.Tensor)]
+ tensor_inputs = [
+ inputs[key] for key in inputs if isinstance(inputs[key], torch.Tensor)
+ ]
+ non_tensor_keys = [
+ key for key in inputs if not isinstance(inputs[key], torch.Tensor)
+ ]
+ non_tensor_inputs = [
+ inputs[key] for key in inputs if not isinstance(inputs[key], torch.Tensor)
+ ]
+ args = tuple(tensor_inputs) + tuple(non_tensor_inputs) + tuple(params)
+ return MixedCheckpointFunction.apply(
+ func,
+ len(tensor_inputs),
+ len(non_tensor_inputs),
+ tensor_keys,
+ non_tensor_keys,
+ *args,
+ )
+ else:
+ return func(**inputs)
+
+
+class MixedCheckpointFunction(torch.autograd.Function):
+ @staticmethod
+ def forward(
+ ctx,
+ run_function,
+ length_tensors,
+ length_non_tensors,
+ tensor_keys,
+ non_tensor_keys,
+ *args,
+ ):
+ ctx.end_tensors = length_tensors
+ ctx.end_non_tensors = length_tensors + length_non_tensors
+ ctx.gpu_autocast_kwargs = {
+ "enabled": torch.is_autocast_enabled(),
+ "dtype": torch.get_autocast_gpu_dtype(),
+ "cache_enabled": torch.is_autocast_cache_enabled(),
+ }
+ assert (
+ len(tensor_keys) == length_tensors
+ and len(non_tensor_keys) == length_non_tensors
+ )
+
+ ctx.input_tensors = {
+ key: val for (key, val) in zip(tensor_keys, list(args[: ctx.end_tensors]))
+ }
+ ctx.input_non_tensors = {
+ key: val
+ for (key, val) in zip(
+ non_tensor_keys, list(args[ctx.end_tensors : ctx.end_non_tensors])
+ )
+ }
+ ctx.run_function = run_function
+ ctx.input_params = list(args[ctx.end_non_tensors :])
+
+ with torch.no_grad():
+ output_tensors = ctx.run_function(
+ **ctx.input_tensors, **ctx.input_non_tensors
+ )
+ return output_tensors
+
+ @staticmethod
+ def backward(ctx, *output_grads):
+ # additional_args = {key: ctx.input_tensors[key] for key in ctx.input_tensors if not isinstance(ctx.input_tensors[key],torch.Tensor)}
+ ctx.input_tensors = {
+ key: ctx.input_tensors[key].detach().requires_grad_(True)
+ for key in ctx.input_tensors
+ }
+
+ with torch.enable_grad(), torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs):
+ # Fixes a bug where the first op in run_function modifies the
+ # Tensor storage in place, which is not allowed for detach()'d
+ # Tensors.
+ shallow_copies = {
+ key: ctx.input_tensors[key].view_as(ctx.input_tensors[key])
+ for key in ctx.input_tensors
+ }
+ # shallow_copies.update(additional_args)
+ output_tensors = ctx.run_function(**shallow_copies, **ctx.input_non_tensors)
+ input_grads = torch.autograd.grad(
+ output_tensors,
+ list(ctx.input_tensors.values()) + ctx.input_params,
+ output_grads,
+ allow_unused=True,
+ )
+ del ctx.input_tensors
+ del ctx.input_params
+ del output_tensors
+ return (
+ (None, None, None, None, None)
+ + input_grads[: ctx.end_tensors]
+ + (None,) * (ctx.end_non_tensors - ctx.end_tensors)
+ + input_grads[ctx.end_tensors :]
+ )
+
+
+def checkpoint(func, inputs, params, flag):
+ """
+ Evaluate a function without caching intermediate activations, allowing for
+ reduced memory at the expense of extra compute in the backward pass.
+ :param func: the function to evaluate.
+ :param inputs: the argument sequence to pass to `func`.
+ :param params: a sequence of parameters `func` depends on but does not
+ explicitly take as arguments.
+ :param flag: if False, disable gradient checkpointing.
+ """
+ if flag:
+ args = tuple(inputs) + tuple(params)
+ return CheckpointFunction.apply(func, len(inputs), *args)
+ else:
+ return func(*inputs)
+
+
+class CheckpointFunction(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, run_function, length, *args):
+ ctx.run_function = run_function
+ ctx.input_tensors = list(args[:length])
+ ctx.input_params = list(args[length:])
+ ctx.gpu_autocast_kwargs = {
+ "enabled": torch.is_autocast_enabled(),
+ "dtype": torch.get_autocast_gpu_dtype(),
+ "cache_enabled": torch.is_autocast_cache_enabled(),
+ }
+ with torch.no_grad():
+ output_tensors = ctx.run_function(*ctx.input_tensors)
+ return output_tensors
+
+ @staticmethod
+ def backward(ctx, *output_grads):
+ ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors]
+ with torch.enable_grad(), torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs):
+ # Fixes a bug where the first op in run_function modifies the
+ # Tensor storage in place, which is not allowed for detach()'d
+ # Tensors.
+ shallow_copies = [x.view_as(x) for x in ctx.input_tensors]
+ output_tensors = ctx.run_function(*shallow_copies)
+ input_grads = torch.autograd.grad(
+ output_tensors,
+ ctx.input_tensors + ctx.input_params,
+ output_grads,
+ allow_unused=True,
+ )
+ del ctx.input_tensors
+ del ctx.input_params
+ del output_tensors
+ return (None, None) + input_grads
+
+
+def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False):
+ """
+ Create sinusoidal timestep embeddings.
+ :param timesteps: a 1-D Tensor of N indices, one per batch element.
+ These may be fractional.
+ :param dim: the dimension of the output.
+ :param max_period: controls the minimum frequency of the embeddings.
+ :return: an [N x dim] Tensor of positional embeddings.
+ """
+ if not repeat_only:
+ half = dim // 2
+ freqs = torch.exp(
+ -math.log(max_period)
+ * torch.arange(start=0, end=half, dtype=torch.float32)
+ / half
+ ).to(device=timesteps.device)
+ args = timesteps[:, None].float() * freqs[None]
+ embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
+ if dim % 2:
+ embedding = torch.cat(
+ [embedding, torch.zeros_like(embedding[:, :1])], dim=-1
+ )
+ else:
+ embedding = repeat(timesteps, "b -> b d", d=dim)
+ return embedding
+
+
+def zero_module(module):
+ """
+ Zero out the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().zero_()
+ return module
+
+
+def scale_module(module, scale):
+ """
+ Scale the parameters of a module and return it.
+ """
+ for p in module.parameters():
+ p.detach().mul_(scale)
+ return module
+
+
+def mean_flat(tensor):
+ """
+ Take the mean over all non-batch dimensions.
+ """
+ return tensor.mean(dim=list(range(1, len(tensor.shape))))
+
+
+def normalization(channels):
+ """
+ Make a standard normalization layer.
+ :param channels: number of input channels.
+ :return: an nn.Module for normalization.
+ """
+ return GroupNorm32(32, channels)
+
+
+# PyTorch 1.7 has SiLU, but we support PyTorch 1.5.
+class SiLU(nn.Module):
+ def forward(self, x):
+ return x * torch.sigmoid(x)
+
+
+class GroupNorm32(nn.GroupNorm):
+ def forward(self, x):
+ return super().forward(x.float()).type(x.dtype)
+
+
+def conv_nd(dims, *args, **kwargs):
+ """
+ Create a 1D, 2D, or 3D convolution module.
+ """
+ if dims == 1:
+ return nn.Conv1d(*args, **kwargs)
+ elif dims == 2:
+ return nn.Conv2d(*args, **kwargs)
+ elif dims == 3:
+ return nn.Conv3d(*args, **kwargs)
+ raise ValueError(f"unsupported dimensions: {dims}")
+
+
+def linear(*args, **kwargs):
+ """
+ Create a linear module.
+ """
+ return nn.Linear(*args, **kwargs)
+
+
+def avg_pool_nd(dims, *args, **kwargs):
+ """
+ Create a 1D, 2D, or 3D average pooling module.
+ """
+ if dims == 1:
+ return nn.AvgPool1d(*args, **kwargs)
+ elif dims == 2:
+ return nn.AvgPool2d(*args, **kwargs)
+ elif dims == 3:
+ return nn.AvgPool3d(*args, **kwargs)
+ raise ValueError(f"unsupported dimensions: {dims}")
+
+
+class AlphaBlender(nn.Module):
+ strategies = ["learned", "fixed", "learned_with_images"]
+
+ def __init__(
+ self,
+ alpha: float,
+ merge_strategy: str = "learned_with_images",
+ rearrange_pattern: str = "b t -> (b t) 1 1",
+ ):
+ super().__init__()
+ self.merge_strategy = merge_strategy
+ self.rearrange_pattern = rearrange_pattern
+
+ assert (
+ merge_strategy in self.strategies
+ ), f"merge_strategy needs to be in {self.strategies}"
+
+ if self.merge_strategy == "fixed":
+ self.register_buffer("mix_factor", torch.Tensor([alpha]))
+ elif (
+ self.merge_strategy == "learned"
+ or self.merge_strategy == "learned_with_images"
+ ):
+ self.register_parameter(
+ "mix_factor", torch.nn.Parameter(torch.Tensor([alpha]))
+ )
+ else:
+ raise ValueError(f"unknown merge strategy {self.merge_strategy}")
+
+ def get_alpha(self, image_only_indicator: torch.Tensor) -> torch.Tensor:
+ if self.merge_strategy == "fixed":
+ alpha = self.mix_factor
+ elif self.merge_strategy == "learned":
+ alpha = torch.sigmoid(self.mix_factor)
+ elif self.merge_strategy == "learned_with_images":
+ assert image_only_indicator is not None, "need image_only_indicator ..."
+ alpha = torch.where(
+ image_only_indicator.bool(),
+ torch.ones(1, 1, device=image_only_indicator.device),
+ rearrange(torch.sigmoid(self.mix_factor), "... -> ... 1"),
+ )
+ alpha = rearrange(alpha, self.rearrange_pattern)
+ else:
+ raise NotImplementedError
+ return alpha
+
+ def forward(
+ self,
+ x_spatial: torch.Tensor,
+ x_temporal: torch.Tensor,
+ image_only_indicator: Optional[torch.Tensor] = None,
+ ) -> torch.Tensor:
+ alpha = self.get_alpha(image_only_indicator)
+ x = (
+ alpha.to(x_spatial.dtype) * x_spatial
+ + (1.0 - alpha).to(x_spatial.dtype) * x_temporal
+ )
+ return x
diff --git a/sgm/modules/diffusionmodules/video_model.py b/sgm/modules/diffusionmodules/video_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff2d077c7d0c7ed1c4a2c21f14105c266abc4926
--- /dev/null
+++ b/sgm/modules/diffusionmodules/video_model.py
@@ -0,0 +1,493 @@
+from functools import partial
+from typing import List, Optional, Union
+
+from einops import rearrange
+
+from ...modules.diffusionmodules.openaimodel import *
+from ...modules.video_attention import SpatialVideoTransformer
+from ...util import default
+from .util import AlphaBlender
+
+
+class VideoResBlock(ResBlock):
+ def __init__(
+ self,
+ channels: int,
+ emb_channels: int,
+ dropout: float,
+ video_kernel_size: Union[int, List[int]] = 3,
+ merge_strategy: str = "fixed",
+ merge_factor: float = 0.5,
+ out_channels: Optional[int] = None,
+ use_conv: bool = False,
+ use_scale_shift_norm: bool = False,
+ dims: int = 2,
+ use_checkpoint: bool = False,
+ up: bool = False,
+ down: bool = False,
+ ):
+ super().__init__(
+ channels,
+ emb_channels,
+ dropout,
+ out_channels=out_channels,
+ use_conv=use_conv,
+ use_scale_shift_norm=use_scale_shift_norm,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ up=up,
+ down=down,
+ )
+
+ self.time_stack = ResBlock(
+ default(out_channels, channels),
+ emb_channels,
+ dropout=dropout,
+ dims=3,
+ out_channels=default(out_channels, channels),
+ use_scale_shift_norm=False,
+ use_conv=False,
+ up=False,
+ down=False,
+ kernel_size=video_kernel_size,
+ use_checkpoint=use_checkpoint,
+ exchange_temb_dims=True,
+ )
+ self.time_mixer = AlphaBlender(
+ alpha=merge_factor,
+ merge_strategy=merge_strategy,
+ rearrange_pattern="b t -> b 1 t 1 1",
+ )
+
+ def forward(
+ self,
+ x: th.Tensor,
+ emb: th.Tensor,
+ num_video_frames: int,
+ image_only_indicator: Optional[th.Tensor] = None,
+ ) -> th.Tensor:
+ x = super().forward(x, emb)
+
+ x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames)
+ x = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames)
+
+ x = self.time_stack(
+ x, rearrange(emb, "(b t) ... -> b t ...", t=num_video_frames)
+ )
+ x = self.time_mixer(
+ x_spatial=x_mix, x_temporal=x, image_only_indicator=image_only_indicator
+ )
+ x = rearrange(x, "b c t h w -> (b t) c h w")
+ return x
+
+
+class VideoUNet(nn.Module):
+ def __init__(
+ self,
+ in_channels: int,
+ model_channels: int,
+ out_channels: int,
+ num_res_blocks: int,
+ attention_resolutions: int,
+ dropout: float = 0.0,
+ channel_mult: List[int] = (1, 2, 4, 8),
+ conv_resample: bool = True,
+ dims: int = 2,
+ num_classes: Optional[int] = None,
+ use_checkpoint: bool = False,
+ num_heads: int = -1,
+ num_head_channels: int = -1,
+ num_heads_upsample: int = -1,
+ use_scale_shift_norm: bool = False,
+ resblock_updown: bool = False,
+ transformer_depth: Union[List[int], int] = 1,
+ transformer_depth_middle: Optional[int] = None,
+ context_dim: Optional[int] = None,
+ time_downup: bool = False,
+ time_context_dim: Optional[int] = None,
+ extra_ff_mix_layer: bool = False,
+ use_spatial_context: bool = False,
+ merge_strategy: str = "fixed",
+ merge_factor: float = 0.5,
+ spatial_transformer_attn_type: str = "softmax",
+ video_kernel_size: Union[int, List[int]] = 3,
+ use_linear_in_transformer: bool = False,
+ adm_in_channels: Optional[int] = None,
+ disable_temporal_crossattention: bool = False,
+ max_ddpm_temb_period: int = 10000,
+ ):
+ super().__init__()
+ assert context_dim is not None
+
+ if num_heads_upsample == -1:
+ num_heads_upsample = num_heads
+
+ if num_heads == -1:
+ assert num_head_channels != -1
+
+ if num_head_channels == -1:
+ assert num_heads != -1
+
+ self.in_channels = in_channels
+ self.model_channels = model_channels
+ self.out_channels = out_channels
+ if isinstance(transformer_depth, int):
+ transformer_depth = len(channel_mult) * [transformer_depth]
+ transformer_depth_middle = default(
+ transformer_depth_middle, transformer_depth[-1]
+ )
+
+ self.num_res_blocks = num_res_blocks
+ self.attention_resolutions = attention_resolutions
+ self.dropout = dropout
+ self.channel_mult = channel_mult
+ self.conv_resample = conv_resample
+ self.num_classes = num_classes
+ self.use_checkpoint = use_checkpoint
+ self.num_heads = num_heads
+ self.num_head_channels = num_head_channels
+ self.num_heads_upsample = num_heads_upsample
+
+ time_embed_dim = model_channels * 4
+ self.time_embed = nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+
+ if self.num_classes is not None:
+ if isinstance(self.num_classes, int):
+ self.label_emb = nn.Embedding(num_classes, time_embed_dim)
+ elif self.num_classes == "continuous":
+ print("setting up linear c_adm embedding layer")
+ self.label_emb = nn.Linear(1, time_embed_dim)
+ elif self.num_classes == "timestep":
+ self.label_emb = nn.Sequential(
+ Timestep(model_channels),
+ nn.Sequential(
+ linear(model_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ ),
+ )
+
+ elif self.num_classes == "sequential":
+ assert adm_in_channels is not None
+ self.label_emb = nn.Sequential(
+ nn.Sequential(
+ linear(adm_in_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, time_embed_dim),
+ )
+ )
+ else:
+ raise ValueError()
+
+ self.input_blocks = nn.ModuleList(
+ [
+ TimestepEmbedSequential(
+ conv_nd(dims, in_channels, model_channels, 3, padding=1)
+ )
+ ]
+ )
+ self._feature_size = model_channels
+ input_block_chans = [model_channels]
+ ch = model_channels
+ ds = 1
+
+ def get_attention_layer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=1,
+ context_dim=None,
+ use_checkpoint=False,
+ disabled_sa=False,
+ ):
+ return SpatialVideoTransformer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=depth,
+ context_dim=context_dim,
+ time_context_dim=time_context_dim,
+ dropout=dropout,
+ ff_in=extra_ff_mix_layer,
+ use_spatial_context=use_spatial_context,
+ merge_strategy=merge_strategy,
+ merge_factor=merge_factor,
+ checkpoint=use_checkpoint,
+ use_linear=use_linear_in_transformer,
+ attn_mode=spatial_transformer_attn_type,
+ disable_self_attn=disabled_sa,
+ disable_temporal_crossattention=disable_temporal_crossattention,
+ max_time_embed_period=max_ddpm_temb_period,
+ )
+
+ def get_resblock(
+ merge_factor,
+ merge_strategy,
+ video_kernel_size,
+ ch,
+ time_embed_dim,
+ dropout,
+ out_ch,
+ dims,
+ use_checkpoint,
+ use_scale_shift_norm,
+ down=False,
+ up=False,
+ ):
+ return VideoResBlock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ channels=ch,
+ emb_channels=time_embed_dim,
+ dropout=dropout,
+ out_channels=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=down,
+ up=up,
+ )
+
+ for level, mult in enumerate(channel_mult):
+ for _ in range(num_res_blocks):
+ layers = [
+ get_resblock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ ch=ch,
+ time_embed_dim=time_embed_dim,
+ dropout=dropout,
+ out_ch=mult * model_channels,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = mult * model_channels
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+
+ layers.append(
+ get_attention_layer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=transformer_depth[level],
+ context_dim=context_dim,
+ use_checkpoint=use_checkpoint,
+ disabled_sa=False,
+ )
+ )
+ self.input_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+ input_block_chans.append(ch)
+ if level != len(channel_mult) - 1:
+ ds *= 2
+ out_ch = ch
+ self.input_blocks.append(
+ TimestepEmbedSequential(
+ get_resblock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ ch=ch,
+ time_embed_dim=time_embed_dim,
+ dropout=dropout,
+ out_ch=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ down=True,
+ )
+ if resblock_updown
+ else Downsample(
+ ch,
+ conv_resample,
+ dims=dims,
+ out_channels=out_ch,
+ third_down=time_downup,
+ )
+ )
+ )
+ ch = out_ch
+ input_block_chans.append(ch)
+
+ self._feature_size += ch
+
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+
+ self.middle_block = TimestepEmbedSequential(
+ get_resblock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ ch=ch,
+ time_embed_dim=time_embed_dim,
+ out_ch=None,
+ dropout=dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ get_attention_layer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=transformer_depth_middle,
+ context_dim=context_dim,
+ use_checkpoint=use_checkpoint,
+ ),
+ get_resblock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ ch=ch,
+ out_ch=None,
+ time_embed_dim=time_embed_dim,
+ dropout=dropout,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ ),
+ )
+ self._feature_size += ch
+
+ self.output_blocks = nn.ModuleList([])
+ for level, mult in list(enumerate(channel_mult))[::-1]:
+ for i in range(num_res_blocks + 1):
+ ich = input_block_chans.pop()
+ layers = [
+ get_resblock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ ch=ch + ich,
+ time_embed_dim=time_embed_dim,
+ dropout=dropout,
+ out_ch=model_channels * mult,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ )
+ ]
+ ch = model_channels * mult
+ if ds in attention_resolutions:
+ if num_head_channels == -1:
+ dim_head = ch // num_heads
+ else:
+ num_heads = ch // num_head_channels
+ dim_head = num_head_channels
+
+ layers.append(
+ get_attention_layer(
+ ch,
+ num_heads,
+ dim_head,
+ depth=transformer_depth[level],
+ context_dim=context_dim,
+ use_checkpoint=use_checkpoint,
+ disabled_sa=False,
+ )
+ )
+ if level and i == num_res_blocks:
+ out_ch = ch
+ ds //= 2
+ layers.append(
+ get_resblock(
+ merge_factor=merge_factor,
+ merge_strategy=merge_strategy,
+ video_kernel_size=video_kernel_size,
+ ch=ch,
+ time_embed_dim=time_embed_dim,
+ dropout=dropout,
+ out_ch=out_ch,
+ dims=dims,
+ use_checkpoint=use_checkpoint,
+ use_scale_shift_norm=use_scale_shift_norm,
+ up=True,
+ )
+ if resblock_updown
+ else Upsample(
+ ch,
+ conv_resample,
+ dims=dims,
+ out_channels=out_ch,
+ third_up=time_downup,
+ )
+ )
+
+ self.output_blocks.append(TimestepEmbedSequential(*layers))
+ self._feature_size += ch
+
+ self.out = nn.Sequential(
+ normalization(ch),
+ nn.SiLU(),
+ zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),
+ )
+
+ def forward(
+ self,
+ x: th.Tensor,
+ timesteps: th.Tensor,
+ context: Optional[th.Tensor] = None,
+ y: Optional[th.Tensor] = None,
+ time_context: Optional[th.Tensor] = None,
+ num_video_frames: Optional[int] = None,
+ image_only_indicator: Optional[th.Tensor] = None,
+ ):
+ assert (y is not None) == (
+ self.num_classes is not None
+ ), "must specify y if and only if the model is class-conditional -> no, relax this TODO"
+ hs = []
+ t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
+ emb = self.time_embed(t_emb)
+
+ if self.num_classes is not None:
+ assert y.shape[0] == x.shape[0]
+ emb = emb + self.label_emb(y)
+
+ h = x
+ for module in self.input_blocks:
+ h = module(
+ h,
+ emb,
+ context=context,
+ image_only_indicator=image_only_indicator,
+ time_context=time_context,
+ num_video_frames=num_video_frames,
+ )
+ hs.append(h)
+ h = self.middle_block(
+ h,
+ emb,
+ context=context,
+ image_only_indicator=image_only_indicator,
+ time_context=time_context,
+ num_video_frames=num_video_frames,
+ )
+ for module in self.output_blocks:
+ h = th.cat([h, hs.pop()], dim=1)
+ h = module(
+ h,
+ emb,
+ context=context,
+ image_only_indicator=image_only_indicator,
+ time_context=time_context,
+ num_video_frames=num_video_frames,
+ )
+ h = h.type(x.dtype)
+ return self.out(h)
diff --git a/sgm/modules/diffusionmodules/wrappers.py b/sgm/modules/diffusionmodules/wrappers.py
new file mode 100644
index 0000000000000000000000000000000000000000..37449ea63e992b9f89856f1f47c18ba68be8e334
--- /dev/null
+++ b/sgm/modules/diffusionmodules/wrappers.py
@@ -0,0 +1,34 @@
+import torch
+import torch.nn as nn
+from packaging import version
+
+OPENAIUNETWRAPPER = "sgm.modules.diffusionmodules.wrappers.OpenAIWrapper"
+
+
+class IdentityWrapper(nn.Module):
+ def __init__(self, diffusion_model, compile_model: bool = False):
+ super().__init__()
+ compile = (
+ torch.compile
+ if (version.parse(torch.__version__) >= version.parse("2.0.0"))
+ and compile_model
+ else lambda x: x
+ )
+ self.diffusion_model = compile(diffusion_model)
+
+ def forward(self, *args, **kwargs):
+ return self.diffusion_model(*args, **kwargs)
+
+
+class OpenAIWrapper(IdentityWrapper):
+ def forward(
+ self, x: torch.Tensor, t: torch.Tensor, c: dict, **kwargs
+ ) -> torch.Tensor:
+ x = torch.cat((x, c.get("concat", torch.Tensor([]).type_as(x))), dim=1)
+ return self.diffusion_model(
+ x,
+ timesteps=t,
+ context=c.get("crossattn", None),
+ y=c.get("vector", None),
+ **kwargs,
+ )
diff --git a/sgm/modules/distributions/__init__.py b/sgm/modules/distributions/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/distributions/__pycache__/__init__.cpython-39.pyc b/sgm/modules/distributions/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fcfe026887b2724ac802661ebc90a2788da27543
Binary files /dev/null and b/sgm/modules/distributions/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/distributions/__pycache__/distributions.cpython-39.pyc b/sgm/modules/distributions/__pycache__/distributions.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b7f7e2ea7a2ec6426f332386d251a5f53ea6d46d
Binary files /dev/null and b/sgm/modules/distributions/__pycache__/distributions.cpython-39.pyc differ
diff --git a/sgm/modules/distributions/distributions.py b/sgm/modules/distributions/distributions.py
new file mode 100644
index 0000000000000000000000000000000000000000..016be35523187ea366db9ade391fe8ee276db60b
--- /dev/null
+++ b/sgm/modules/distributions/distributions.py
@@ -0,0 +1,102 @@
+import numpy as np
+import torch
+
+
+class AbstractDistribution:
+ def sample(self):
+ raise NotImplementedError()
+
+ def mode(self):
+ raise NotImplementedError()
+
+
+class DiracDistribution(AbstractDistribution):
+ def __init__(self, value):
+ self.value = value
+
+ def sample(self):
+ return self.value
+
+ def mode(self):
+ return self.value
+
+
+class DiagonalGaussianDistribution(object):
+ def __init__(self, parameters, deterministic=False):
+ self.parameters = parameters
+ self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)
+ self.logvar = torch.clamp(self.logvar, -30.0, 20.0)
+ self.deterministic = deterministic
+ self.std = torch.exp(0.5 * self.logvar)
+ self.var = torch.exp(self.logvar)
+ if self.deterministic:
+ self.var = self.std = torch.zeros_like(self.mean).to(
+ device=self.parameters.device
+ )
+
+ def sample(self):
+ x = self.mean + self.std * torch.randn(self.mean.shape).to(
+ device=self.parameters.device
+ )
+ return x
+
+ def kl(self, other=None):
+ if self.deterministic:
+ return torch.Tensor([0.0])
+ else:
+ if other is None:
+ return 0.5 * torch.sum(
+ torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar,
+ dim=[1, 2, 3],
+ )
+ else:
+ return 0.5 * torch.sum(
+ torch.pow(self.mean - other.mean, 2) / other.var
+ + self.var / other.var
+ - 1.0
+ - self.logvar
+ + other.logvar,
+ dim=[1, 2, 3],
+ )
+
+ def nll(self, sample, dims=[1, 2, 3]):
+ if self.deterministic:
+ return torch.Tensor([0.0])
+ logtwopi = np.log(2.0 * np.pi)
+ return 0.5 * torch.sum(
+ logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var,
+ dim=dims,
+ )
+
+ def mode(self):
+ return self.mean
+
+
+def normal_kl(mean1, logvar1, mean2, logvar2):
+ """
+ source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12
+ Compute the KL divergence between two gaussians.
+ Shapes are automatically broadcasted, so batches can be compared to
+ scalars, among other use cases.
+ """
+ tensor = None
+ for obj in (mean1, logvar1, mean2, logvar2):
+ if isinstance(obj, torch.Tensor):
+ tensor = obj
+ break
+ assert tensor is not None, "at least one argument must be a Tensor"
+
+ # Force variances to be Tensors. Broadcasting helps convert scalars to
+ # Tensors, but it does not work for torch.exp().
+ logvar1, logvar2 = [
+ x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor)
+ for x in (logvar1, logvar2)
+ ]
+
+ return 0.5 * (
+ -1.0
+ + logvar2
+ - logvar1
+ + torch.exp(logvar1 - logvar2)
+ + ((mean1 - mean2) ** 2) * torch.exp(-logvar2)
+ )
diff --git a/sgm/modules/ema.py b/sgm/modules/ema.py
new file mode 100644
index 0000000000000000000000000000000000000000..97b5ae2b230f89b4dba57e44c4f851478ad86f68
--- /dev/null
+++ b/sgm/modules/ema.py
@@ -0,0 +1,86 @@
+import torch
+from torch import nn
+
+
+class LitEma(nn.Module):
+ def __init__(self, model, decay=0.9999, use_num_upates=True):
+ super().__init__()
+ if decay < 0.0 or decay > 1.0:
+ raise ValueError("Decay must be between 0 and 1")
+
+ self.m_name2s_name = {}
+ self.register_buffer("decay", torch.tensor(decay, dtype=torch.float32))
+ self.register_buffer(
+ "num_updates",
+ torch.tensor(0, dtype=torch.int)
+ if use_num_upates
+ else torch.tensor(-1, dtype=torch.int),
+ )
+
+ for name, p in model.named_parameters():
+ if p.requires_grad:
+ # remove as '.'-character is not allowed in buffers
+ s_name = name.replace(".", "")
+ self.m_name2s_name.update({name: s_name})
+ self.register_buffer(s_name, p.clone().detach().data)
+
+ self.collected_params = []
+
+ def reset_num_updates(self):
+ del self.num_updates
+ self.register_buffer("num_updates", torch.tensor(0, dtype=torch.int))
+
+ def forward(self, model):
+ decay = self.decay
+
+ if self.num_updates >= 0:
+ self.num_updates += 1
+ decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates))
+
+ one_minus_decay = 1.0 - decay
+
+ with torch.no_grad():
+ m_param = dict(model.named_parameters())
+ shadow_params = dict(self.named_buffers())
+
+ for key in m_param:
+ if m_param[key].requires_grad:
+ sname = self.m_name2s_name[key]
+ shadow_params[sname] = shadow_params[sname].type_as(m_param[key])
+ shadow_params[sname].sub_(
+ one_minus_decay * (shadow_params[sname] - m_param[key])
+ )
+ else:
+ assert not key in self.m_name2s_name
+
+ def copy_to(self, model):
+ m_param = dict(model.named_parameters())
+ shadow_params = dict(self.named_buffers())
+ for key in m_param:
+ if m_param[key].requires_grad:
+ m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data)
+ else:
+ assert not key in self.m_name2s_name
+
+ def store(self, parameters):
+ """
+ Save the current parameters for restoring later.
+ Args:
+ parameters: Iterable of `torch.nn.Parameter`; the parameters to be
+ temporarily stored.
+ """
+ self.collected_params = [param.clone() for param in parameters]
+
+ def restore(self, parameters):
+ """
+ Restore the parameters stored with the `store` method.
+ Useful to validate the model with EMA parameters without affecting the
+ original optimization process. Store the parameters before the
+ `copy_to` method. After validation (or model saving), use this to
+ restore the former parameters.
+ Args:
+ parameters: Iterable of `torch.nn.Parameter`; the parameters to be
+ updated with the stored parameters.
+ """
+ for c_param, param in zip(self.collected_params, parameters):
+ param.data.copy_(c_param.data)
diff --git a/sgm/modules/encoders/__init__.py b/sgm/modules/encoders/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/modules/encoders/__pycache__/__init__.cpython-39.pyc b/sgm/modules/encoders/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0d3fb8f72123d0c57b6ce7bdec76325d664ab315
Binary files /dev/null and b/sgm/modules/encoders/__pycache__/__init__.cpython-39.pyc differ
diff --git a/sgm/modules/encoders/__pycache__/modules.cpython-39.pyc b/sgm/modules/encoders/__pycache__/modules.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..172ce515f60549fa5e861bb0d29d6d979a306d66
Binary files /dev/null and b/sgm/modules/encoders/__pycache__/modules.cpython-39.pyc differ
diff --git a/sgm/modules/encoders/modules.py b/sgm/modules/encoders/modules.py
new file mode 100644
index 0000000000000000000000000000000000000000..4cca880453697bc9a1f00090831b81d9baacd103
--- /dev/null
+++ b/sgm/modules/encoders/modules.py
@@ -0,0 +1,1787 @@
+import math
+import random
+import kiui
+from kiui.op import recenter
+import torchvision
+import torchvision.transforms.v2
+from contextlib import nullcontext
+from functools import partial
+from typing import Dict, List, Optional, Tuple, Union
+from pdb import set_trace as st
+
+import kornia
+import numpy as np
+import open_clip
+import torch
+import torch.nn as nn
+from einops import rearrange, repeat
+from omegaconf import ListConfig
+from torch.utils.checkpoint import checkpoint
+from transformers import (ByT5Tokenizer, CLIPTextModel, CLIPTokenizer,
+ T5EncoderModel, T5Tokenizer)
+
+from ...modules.autoencoding.regularizers import DiagonalGaussianRegularizer
+from ...modules.diffusionmodules.model import Encoder
+from ...modules.diffusionmodules.openaimodel import Timestep
+from ...modules.diffusionmodules.util import (extract_into_tensor,
+ make_beta_schedule)
+from ...modules.distributions.distributions import DiagonalGaussianDistribution
+from ...util import (append_dims, autocast, count_params, default,
+ disabled_train, expand_dims_like, instantiate_from_config)
+
+from dit.dit_models_xformers import CaptionEmbedder, approx_gelu, t2i_modulate
+
+
+class AbstractEmbModel(nn.Module):
+
+ def __init__(self):
+ super().__init__()
+ self._is_trainable = None
+ self._ucg_rate = None
+ self._input_key = None
+
+ @property
+ def is_trainable(self) -> bool:
+ return self._is_trainable
+
+ @property
+ def ucg_rate(self) -> Union[float, torch.Tensor]:
+ return self._ucg_rate
+
+ @property
+ def input_key(self) -> str:
+ return self._input_key
+
+ @is_trainable.setter
+ def is_trainable(self, value: bool):
+ self._is_trainable = value
+
+ @ucg_rate.setter
+ def ucg_rate(self, value: Union[float, torch.Tensor]):
+ self._ucg_rate = value
+
+ @input_key.setter
+ def input_key(self, value: str):
+ self._input_key = value
+
+ @is_trainable.deleter
+ def is_trainable(self):
+ del self._is_trainable
+
+ @ucg_rate.deleter
+ def ucg_rate(self):
+ del self._ucg_rate
+
+ @input_key.deleter
+ def input_key(self):
+ del self._input_key
+
+
+class GeneralConditioner(nn.Module):
+ OUTPUT_DIM2KEYS = {2: "vector", 3: "crossattn", 4: "concat", 5: "concat"}
+ KEY2CATDIM = {"vector": 1, "crossattn": 2, "concat": 1}
+
+ def __init__(self, emb_models: Union[List, ListConfig]):
+ super().__init__()
+ embedders = []
+ for n, embconfig in enumerate(emb_models):
+ embedder = instantiate_from_config(embconfig)
+ assert isinstance(
+ embedder, AbstractEmbModel
+ ), f"embedder model {embedder.__class__.__name__} has to inherit from AbstractEmbModel"
+ embedder.is_trainable = embconfig.get("is_trainable", False)
+ embedder.ucg_rate = embconfig.get("ucg_rate", 0.0)
+ if not embedder.is_trainable:
+ embedder.train = disabled_train
+ for param in embedder.parameters():
+ param.requires_grad = False
+ embedder.eval()
+ print(
+ f"Initialized embedder #{n}: {embedder.__class__.__name__} "
+ f"with {count_params(embedder, False)} params. Trainable: {embedder.is_trainable}"
+ )
+
+ if "input_key" in embconfig:
+ embedder.input_key = embconfig["input_key"]
+ elif "input_keys" in embconfig:
+ embedder.input_keys = embconfig["input_keys"]
+ else:
+ raise KeyError(
+ f"need either 'input_key' or 'input_keys' for embedder {embedder.__class__.__name__}"
+ )
+
+ embedder.legacy_ucg_val = embconfig.get("legacy_ucg_value", None)
+ if embedder.legacy_ucg_val is not None:
+ embedder.ucg_prng = np.random.RandomState()
+
+ embedders.append(embedder)
+ self.embedders = nn.ModuleList(embedders)
+
+ def possibly_get_ucg_val(self, embedder: AbstractEmbModel,
+ batch: Dict) -> Dict:
+ assert embedder.legacy_ucg_val is not None
+ p = embedder.ucg_rate
+ val = embedder.legacy_ucg_val
+ for i in range(len(batch[embedder.input_key])):
+ if embedder.ucg_prng.choice(2, p=[1 - p, p]):
+ batch[embedder.input_key][i] = val
+ return batch
+
+ def forward(self,
+ batch: Dict,
+ force_zero_embeddings: Optional[List] = None) -> Dict:
+ output = dict()
+ if force_zero_embeddings is None:
+ force_zero_embeddings = []
+ for embedder in self.embedders:
+ embedding_context = nullcontext if embedder.is_trainable else torch.no_grad
+ with embedding_context():
+ if hasattr(embedder, "input_key") and (embedder.input_key
+ is not None):
+ if embedder.legacy_ucg_val is not None:
+ batch = self.possibly_get_ucg_val(embedder, batch)
+ emb_out = embedder(batch[embedder.input_key])
+ elif hasattr(embedder, "input_keys"):
+ emb_out = embedder(
+ *[batch[k] for k in embedder.input_keys])
+ assert isinstance(
+ emb_out, (torch.Tensor, list, tuple)
+ ), f"encoder outputs must be tensors or a sequence, but got {type(emb_out)}"
+ if not isinstance(emb_out, (list, tuple)):
+ emb_out = [emb_out]
+ for emb in emb_out:
+ if embedder.input_key in ('caption', 'img'):
+ out_key = f'{embedder.input_key}_{self.OUTPUT_DIM2KEYS[emb.dim()]}'
+ elif emb.dim()==3 and emb.shape[-1] == 3:
+ out_key = 'fps-xyz'
+ else:
+ out_key = self.OUTPUT_DIM2KEYS[emb.dim()]
+ if embedder.ucg_rate > 0.0 and embedder.legacy_ucg_val is None:
+ emb = (expand_dims_like(
+ torch.bernoulli(
+ (1.0 - embedder.ucg_rate) *
+ torch.ones(emb.shape[0], device=emb.device)),
+ emb,
+ ) * emb)
+ if (hasattr(embedder, "input_key")
+ and embedder.input_key in force_zero_embeddings):
+ emb = torch.zeros_like(emb)
+ if out_key in output:
+ output[out_key] = torch.cat((output[out_key], emb),
+ self.KEY2CATDIM[out_key.split('_')[1]])
+ else:
+ output[out_key] = emb
+ return output
+
+ def get_unconditional_conditioning(
+ self,
+ batch_c: Dict,
+ batch_uc: Optional[Dict] = None,
+ force_uc_zero_embeddings: Optional[List[str]] = None,
+ force_cond_zero_embeddings: Optional[List[str]] = None,
+ ):
+ if force_uc_zero_embeddings is None:
+ force_uc_zero_embeddings = []
+ ucg_rates = list()
+ for embedder in self.embedders:
+ ucg_rates.append(embedder.ucg_rate)
+ embedder.ucg_rate = 0.0 # ! force no drop during inference
+ c = self(batch_c, force_cond_zero_embeddings)
+ uc = self(batch_c if batch_uc is None else batch_uc,
+ force_uc_zero_embeddings)
+
+ for embedder, rate in zip(self.embedders, ucg_rates):
+ embedder.ucg_rate = rate
+ return c, uc
+
+
+class InceptionV3(nn.Module):
+ """Wrapper around the https://github.com/mseitzer/pytorch-fid inception
+ port with an additional squeeze at the end"""
+
+ def __init__(self, normalize_input=False, **kwargs):
+ super().__init__()
+ from pytorch_fid import inception
+
+ kwargs["resize_input"] = True
+ self.model = inception.InceptionV3(normalize_input=normalize_input,
+ **kwargs)
+
+ def forward(self, inp):
+ outp = self.model(inp)
+
+ if len(outp) == 1:
+ return outp[0].squeeze()
+
+ return outp
+
+
+class IdentityEncoder(AbstractEmbModel):
+
+ def encode(self, x):
+ return x
+
+ def forward(self, x):
+ return x
+
+
+class ClassEmbedder(AbstractEmbModel):
+
+ def __init__(self, embed_dim, n_classes=1000, add_sequence_dim=False):
+ super().__init__()
+ self.embedding = nn.Embedding(n_classes, embed_dim)
+ self.n_classes = n_classes
+ self.add_sequence_dim = add_sequence_dim
+
+ def forward(self, c):
+ c = self.embedding(c)
+ if self.add_sequence_dim:
+ c = c[:, None, :]
+ return c
+
+ def get_unconditional_conditioning(self, bs, device="cuda"):
+ uc_class = (
+ self.n_classes - 1
+ ) # 1000 classes --> 0 ... 999, one extra class for ucg (class 1000)
+ uc = torch.ones((bs, ), device=device) * uc_class
+ uc = {self.key: uc.long()}
+ return uc
+
+
+class ClassEmbedderForMultiCond(ClassEmbedder):
+
+ def forward(self, batch, key=None, disable_dropout=False):
+ out = batch
+ key = default(key, self.key)
+ islist = isinstance(batch[key], list)
+ if islist:
+ batch[key] = batch[key][0]
+ c_out = super().forward(batch, key, disable_dropout)
+ out[key] = [c_out] if islist else c_out
+ return out
+
+
+class FrozenT5Embedder(AbstractEmbModel):
+ """Uses the T5 transformer encoder for text"""
+
+ def __init__(self,
+ version="google/t5-v1_1-xxl",
+ device="cuda",
+ max_length=77,
+ freeze=True
+ ): # others are google/t5-v1_1-xl and google/t5-v1_1-xxl
+ super().__init__()
+ self.tokenizer = T5Tokenizer.from_pretrained(version)
+ self.transformer = T5EncoderModel.from_pretrained(version)
+ self.device = device
+ self.max_length = max_length
+ if freeze:
+ self.freeze()
+
+ def freeze(self):
+ self.transformer = self.transformer.eval()
+
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def forward(self, text):
+ batch_encoding = self.tokenizer(
+ text,
+ truncation=True,
+ max_length=self.max_length,
+ return_length=True,
+ return_overflowing_tokens=False,
+ padding="max_length",
+ return_tensors="pt",
+ )
+ tokens = batch_encoding["input_ids"].to(self.device)
+ with torch.autocast("cuda", enabled=False):
+ outputs = self.transformer(input_ids=tokens)
+ z = outputs.last_hidden_state
+ return z
+
+ def encode(self, text):
+ return self(text)
+
+
+class FrozenByT5Embedder(AbstractEmbModel):
+ """
+ Uses the ByT5 transformer encoder for text. Is character-aware.
+ """
+
+ def __init__(self,
+ version="google/byt5-base",
+ device="cuda",
+ max_length=77,
+ freeze=True
+ ): # others are google/t5-v1_1-xl and google/t5-v1_1-xxl
+ super().__init__()
+ self.tokenizer = ByT5Tokenizer.from_pretrained(version)
+ self.transformer = T5EncoderModel.from_pretrained(version)
+ self.device = device
+ self.max_length = max_length
+ if freeze:
+ self.freeze()
+
+ def freeze(self):
+ self.transformer = self.transformer.eval()
+
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def forward(self, text):
+ batch_encoding = self.tokenizer(
+ text,
+ truncation=True,
+ max_length=self.max_length,
+ return_length=True,
+ return_overflowing_tokens=False,
+ padding="max_length",
+ return_tensors="pt",
+ )
+ tokens = batch_encoding["input_ids"].to(self.device)
+ with torch.autocast("cuda", enabled=False):
+ outputs = self.transformer(input_ids=tokens)
+ z = outputs.last_hidden_state
+ return z
+
+ def encode(self, text):
+ return self(text)
+
+
+class FrozenCLIPEmbedder(AbstractEmbModel):
+ """Uses the CLIP transformer encoder for text (from huggingface)"""
+
+ LAYERS = ["last", "pooled", "hidden"]
+
+ def __init__(
+ self,
+ version="openai/clip-vit-large-patch14",
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ layer="last",
+ layer_idx=None,
+ always_return_pooled=False,
+ ): # clip-vit-base-patch32
+ super().__init__()
+ assert layer in self.LAYERS
+ self.tokenizer = CLIPTokenizer.from_pretrained(version)
+ self.transformer = CLIPTextModel.from_pretrained(version)
+ self.device = device
+ self.max_length = max_length
+ if freeze:
+ self.freeze()
+ self.layer = layer
+ self.layer_idx = layer_idx
+ self.return_pooled = always_return_pooled
+ if layer == "hidden":
+ assert layer_idx is not None
+ assert 0 <= abs(layer_idx) <= 12
+
+ def freeze(self):
+ self.transformer = self.transformer.eval()
+
+ for param in self.parameters():
+ param.requires_grad = False
+
+ @autocast
+ def forward(self, text):
+ batch_encoding = self.tokenizer(
+ text,
+ truncation=True,
+ max_length=self.max_length,
+ return_length=True,
+ return_overflowing_tokens=False,
+ padding="max_length",
+ return_tensors="pt",
+ )
+ tokens = batch_encoding["input_ids"].to(self.device)
+ outputs = self.transformer(input_ids=tokens,
+ output_hidden_states=self.layer == "hidden")
+ if self.layer == "last":
+ z = outputs.last_hidden_state
+ elif self.layer == "pooled":
+ z = outputs.pooler_output[:, None, :]
+ else:
+ z = outputs.hidden_states[self.layer_idx]
+ if self.return_pooled:
+ return z, outputs.pooler_output
+ return z
+
+ def encode(self, text):
+ return self(text)
+
+
+class FrozenOpenCLIPEmbedder2(AbstractEmbModel):
+ """
+ Uses the OpenCLIP transformer encoder for text
+ """
+
+ LAYERS = ["pooled", "last", "penultimate"]
+
+ def __init__(
+ self,
+ arch="ViT-H-14",
+ version="laion2b_s32b_b79k",
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ layer="last",
+ always_return_pooled=False,
+ legacy=True,
+ ):
+ super().__init__()
+ assert layer in self.LAYERS
+ model, _, _ = open_clip.create_model_and_transforms(
+ arch,
+ device=torch.device("cpu"),
+ pretrained=version,
+ )
+ del model.visual
+ self.model = model
+
+ self.device = device
+ self.max_length = max_length
+ self.return_pooled = always_return_pooled
+ if freeze:
+ self.freeze()
+ self.layer = layer
+ if self.layer == "last":
+ self.layer_idx = 0
+ elif self.layer == "penultimate":
+ self.layer_idx = 1
+ else:
+ raise NotImplementedError()
+ self.legacy = legacy
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ @autocast
+ def forward(self, text):
+ tokens = open_clip.tokenize(text)
+ z = self.encode_with_transformer(tokens.to(self.device))
+ if not self.return_pooled and self.legacy:
+ return z
+ if self.return_pooled:
+ assert not self.legacy
+ return z[self.layer], z["pooled"]
+ return z[self.layer]
+
+ def encode_with_transformer(self, text):
+ x = self.model.token_embedding(text) # [batch_size, n_ctx, d_model]
+ x = x + self.model.positional_embedding
+ x = x.permute(1, 0, 2) # NLD -> LND
+ x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask)
+ if self.legacy:
+ x = x[self.layer]
+ x = self.model.ln_final(x)
+ return x
+ else:
+ # x is a dict and will stay a dict
+ o = x["last"]
+ o = self.model.ln_final(o)
+ pooled = self.pool(o, text)
+ x["pooled"] = pooled
+ return x
+
+ def pool(self, x, text):
+ # take features from the eot embedding (eot_token is the highest number in each sequence)
+ x = (x[torch.arange(x.shape[0]),
+ text.argmax(dim=-1)] @ self.model.text_projection)
+ return x
+
+ def text_transformer_forward(self, x: torch.Tensor, attn_mask=None):
+ outputs = {}
+ for i, r in enumerate(self.model.transformer.resblocks):
+ if i == len(self.model.transformer.resblocks) - 1:
+ outputs["penultimate"] = x.permute(1, 0, 2) # LND -> NLD
+ if (self.model.transformer.grad_checkpointing
+ and not torch.jit.is_scripting()):
+ x = checkpoint(r, x, attn_mask)
+ else:
+ x = r(x, attn_mask=attn_mask)
+ outputs["last"] = x.permute(1, 0, 2) # LND -> NLD
+ return outputs
+
+ def encode(self, text):
+ return self(text)
+
+
+class FrozenOpenCLIPEmbedder(AbstractEmbModel):
+ LAYERS = [
+ # "pooled",
+ "last",
+ "penultimate",
+ ]
+
+ def __init__(
+ self,
+ arch="ViT-H-14",
+ version="laion2b_s32b_b79k",
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ layer="last",
+ ):
+ super().__init__()
+ assert layer in self.LAYERS
+ model, _, _ = open_clip.create_model_and_transforms(
+ arch, device=torch.device("cpu"), pretrained=version)
+ del model.visual
+ self.model = model
+
+ self.device = device
+ self.max_length = max_length
+ if freeze:
+ self.freeze()
+ self.layer = layer
+ if self.layer == "last":
+ self.layer_idx = 0
+ elif self.layer == "penultimate":
+ self.layer_idx = 1
+ else:
+ raise NotImplementedError()
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def forward(self, text):
+ tokens = open_clip.tokenize(text)
+ z = self.encode_with_transformer(tokens.to(self.device))
+ return z
+
+ def encode_with_transformer(self, text):
+ x = self.model.token_embedding(text) # [batch_size, n_ctx, d_model]
+ x = x + self.model.positional_embedding
+ x = x.permute(1, 0, 2) # NLD -> LND
+ x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask)
+ x = x.permute(1, 0, 2) # LND -> NLD
+ x = self.model.ln_final(x)
+ return x
+
+ def text_transformer_forward(self, x: torch.Tensor, attn_mask=None):
+ for i, r in enumerate(self.model.transformer.resblocks):
+ if i == len(self.model.transformer.resblocks) - self.layer_idx:
+ break
+ if (self.model.transformer.grad_checkpointing
+ and not torch.jit.is_scripting()):
+ x = checkpoint(r, x, attn_mask)
+ else:
+ x = r(x, attn_mask=attn_mask)
+ return x
+
+ def encode(self, text):
+ return self(text)
+
+
+class FrozenOpenCLIPImageEmbedder(AbstractEmbModel):
+ """
+ Uses the OpenCLIP vision transformer encoder for images
+ """
+
+ def __init__(
+ self,
+ # arch="ViT-H-14",
+ # version="laion2b_s32b_b79k",
+ arch="ViT-L-14",
+ # version="laion2b_s32b_b82k",
+ version="openai",
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ antialias=True,
+ ucg_rate=0.0,
+ unsqueeze_dim=False,
+ repeat_to_max_len=False,
+ num_image_crops=0,
+ output_tokens=False,
+ init_device=None,
+ inp_size=224,
+ ):
+ super().__init__()
+ model, _, _ = open_clip.create_model_and_transforms(
+ arch,
+ device=torch.device(default(init_device, "cpu")),
+ pretrained=version,
+ )
+ del model.transformer
+ self.inp_size = inp_size
+ self.model = model
+ self.max_crops = num_image_crops
+ self.pad_to_max_len = self.max_crops > 0
+ self.repeat_to_max_len = repeat_to_max_len and (
+ not self.pad_to_max_len)
+ self.device = device
+ self.max_length = max_length
+ if freeze:
+ self.freeze()
+
+ self.antialias = antialias
+
+ self.register_buffer("mean",
+ torch.Tensor([0.48145466, 0.4578275, 0.40821073]),
+ persistent=False)
+ self.register_buffer("std",
+ torch.Tensor([0.26862954, 0.26130258,
+ 0.27577711]),
+ persistent=False)
+ self.ucg_rate = ucg_rate
+ self.unsqueeze_dim = unsqueeze_dim
+ self.stored_batch = None
+ self.model.visual.output_tokens = output_tokens
+ self.output_tokens = output_tokens
+ self.interpolate_offset = 0.0
+ self.patch_size = 14
+ npatch = (self.inp_size // self.patch_size) ** 2
+ # https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/configs/eval/vitl14_reg4_pretrain.yaml#L5
+ self.interpolate_antialias = True
+
+ if self.inp_size != 224:
+ self.model.visual.positional_embedding = torch.nn.Parameter(self.interpolate_pos_encoding(npatch, self.inp_size, self.inp_size) )
+
+ # https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/models/vision_transformer.py#L179
+ def interpolate_pos_encoding(self, npatch, w, h):
+ dim = self.model.visual.positional_embedding.shape[-1]
+
+ # previous_dtype = x.dtype
+ previous_dtype = torch.float32
+ # npatch = x.shape[1] - 1
+
+ pos_embed = self.model.visual.positional_embedding.float().unsqueeze(0)
+ N = pos_embed.shape[1] - 1
+
+ if npatch == N and w == h:
+ return self.model.visual.positional_embedding
+
+ class_pos_embed = pos_embed[:, 0]
+ patch_pos_embed = pos_embed[:, 1:]
+ # dim = x.shape[-1]
+ w0 = w // self.patch_size
+ h0 = h // self.patch_size
+ M = int(math.sqrt(N)) # Recover the number of patches in each dimension
+ assert N == M * M
+ kwargs = {}
+ if self.interpolate_offset:
+ # Historical kludge: add a small number to avoid floating point error in the interpolation, see https://github.com/facebookresearch/dino/issues/8
+ # Note: still needed for backward-compatibility, the underlying operators are using both output size and scale factors
+ sx = float(w0 + self.interpolate_offset) / M
+ sy = float(h0 + self.interpolate_offset) / M
+ kwargs["scale_factor"] = (sx, sy)
+ else:
+ # Simply specify an output size instead of a scale factor
+ kwargs["size"] = (w0, h0)
+ patch_pos_embed = nn.functional.interpolate(
+ patch_pos_embed.reshape(1, M, M, dim).permute(0, 3, 1, 2),
+ mode="bicubic",
+ antialias=self.interpolate_antialias,
+ **kwargs,
+ )
+ assert (w0, h0) == patch_pos_embed.shape[-2:]
+ patch_pos_embed = patch_pos_embed.permute(0, 2, 3, 1).view(1, -1, dim)
+ return torch.cat((class_pos_embed.unsqueeze(0), patch_pos_embed), dim=1).to(previous_dtype)[0]
+
+ def preprocess(self, x):
+ # normalize to [0,1]
+ x = kornia.geometry.resize(
+ x,
+ (self.inp_size, self.inp_size),
+ interpolation="bicubic",
+ align_corners=True,
+ antialias=self.antialias,
+ )
+ x = (x + 1.0) / 2.0
+ # renormalize according to clip
+ x = kornia.enhance.normalize(x, self.mean, self.std)
+ return x
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ @autocast
+ def forward(self, image, no_dropout=False):
+ z = self.encode_with_vision_transformer(image)
+ tokens = None
+ if self.output_tokens:
+ z, tokens = z[0], z[1]
+ z = z.to(image.dtype)
+ if self.ucg_rate > 0.0 and not no_dropout and not (self.max_crops > 0):
+ z = (torch.bernoulli(
+ (1.0 - self.ucg_rate) *
+ torch.ones(z.shape[0], device=z.device))[:, None] * z)
+ if tokens is not None:
+ tokens = (expand_dims_like(
+ torch.bernoulli(
+ (1.0 - self.ucg_rate) *
+ torch.ones(tokens.shape[0], device=tokens.device)),
+ tokens,
+ ) * tokens)
+ if self.unsqueeze_dim:
+ z = z[:, None, :]
+ if self.output_tokens:
+ assert not self.repeat_to_max_len
+ assert not self.pad_to_max_len
+ return tokens, z
+ if self.repeat_to_max_len:
+ if z.dim() == 2:
+ z_ = z[:, None, :]
+ else:
+ z_ = z
+ return repeat(z_, "b 1 d -> b n d", n=self.max_length), z
+ elif self.pad_to_max_len:
+ assert z.dim() == 3
+ z_pad = torch.cat(
+ (
+ z,
+ torch.zeros(
+ z.shape[0],
+ self.max_length - z.shape[1],
+ z.shape[2],
+ device=z.device,
+ ),
+ ),
+ 1,
+ )
+ return z_pad, z_pad[:, 0, ...]
+ return z
+
+ def encode_with_vision_transformer(self, img):
+ # if self.max_crops > 0:
+ # img = self.preprocess_by_cropping(img)
+ if img.dim() == 5:
+ assert self.max_crops == img.shape[1]
+ img = rearrange(img, "b n c h w -> (b n) c h w")
+ img = self.preprocess(img)
+ if not self.output_tokens:
+ assert not self.model.visual.output_tokens
+ x = self.model.visual(img)
+ tokens = None
+ else:
+ assert self.model.visual.output_tokens
+ x, tokens = self.model.visual(img)
+ if self.max_crops > 0:
+ x = rearrange(x, "(b n) d -> b n d", n=self.max_crops)
+ # drop out between 0 and all along the sequence axis
+ x = (torch.bernoulli(
+ (1.0 - self.ucg_rate) *
+ torch.ones(x.shape[0], x.shape[1], 1, device=x.device)) * x)
+ if tokens is not None:
+ tokens = rearrange(tokens,
+ "(b n) t d -> b t (n d)",
+ n=self.max_crops)
+ print(
+ f"You are running very experimental token-concat in {self.__class__.__name__}. "
+ f"Check what you are doing, and then remove this message.")
+ if self.output_tokens:
+ return x, tokens
+ return x
+
+ def encode(self, text):
+ return self(text)
+
+
+# dino-v2 embedder
+class FrozenDinov2ImageEmbedder(AbstractEmbModel):
+ """
+ Uses the Dino-v2 for low-level image embedding
+ """
+
+ def __init__(
+ self,
+ arch="vitl",
+ version="dinov2", # by default
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ antialias=True,
+ ucg_rate=0.0,
+ unsqueeze_dim=False,
+ repeat_to_max_len=False,
+ num_image_crops=0,
+ output_tokens=False,
+ output_cls=False,
+ init_device=None,
+ inp_size=224,
+ ):
+ super().__init__()
+
+ self.model = torch.hub.load(
+ f'facebookresearch/{version}',
+ '{}_{}{}_reg'.format(
+ version, f'{arch}', '14'
+ ), # with registers better performance. vitl and vitg similar. Since fixed, load the best one.
+ pretrained=True).to(torch.device(default(init_device, "cpu")))
+
+ # print(self.model)
+
+ # ! frozen
+ # self.tokenizer.requires_grad_(False)
+ # self.tokenizer.eval()
+
+ # assert freeze # add adaLN here
+ self.inp_size = inp_size
+ if freeze:
+ self.freeze()
+
+ # self.model = model
+ self.max_crops = num_image_crops
+ self.pad_to_max_len = self.max_crops > 0
+ self.repeat_to_max_len = repeat_to_max_len and (
+ not self.pad_to_max_len)
+ self.device = device
+ self.max_length = max_length
+
+ self.antialias = antialias
+
+ # https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/data/transforms.py#L41
+ IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
+ IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
+
+ self.register_buffer("mean",
+ torch.Tensor(IMAGENET_DEFAULT_MEAN),
+ persistent=False)
+ self.register_buffer("std",
+ torch.Tensor(IMAGENET_DEFAULT_STD),
+ persistent=False)
+
+ self.ucg_rate = ucg_rate
+ self.unsqueeze_dim = unsqueeze_dim
+ self.stored_batch = None
+ # self.model.visual.output_tokens = output_tokens
+ self.output_tokens = output_tokens # output
+ self.output_cls = output_cls
+ # self.output_tokens = False
+
+ def preprocess(self, x):
+ # normalize to [0,1]
+ x = kornia.geometry.resize(
+ x,
+ # (224, 224),
+ (self.inp_size, self.inp_size),
+ interpolation="bicubic",
+ align_corners=True,
+ antialias=self.antialias,
+ )
+ x = (x + 1.0) / 2.0
+ # renormalize according to clip
+ x = kornia.enhance.normalize(x, self.mean, self.std)
+ return x
+
+ def freeze(self):
+ self.model = self.model.eval()
+ for param in self.parameters():
+ param.requires_grad = False
+
+ def _model_forward(self, *args, **kwargs):
+ return self.model(*args, **kwargs)
+
+ def encode_with_vision_transformer(self, img, **kwargs):
+ # if self.max_crops > 0:
+ # img = self.preprocess_by_cropping(img)
+ if img.dim() == 5:
+ # assert self.max_crops == img.shape[1]
+ img = rearrange(img, "b n c h w -> (b n) c h w")
+ img = self.preprocess(img)
+
+ # https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/models/vision_transformer.py#L326
+ if not self.output_cls:
+ return self._model_forward(
+ img, is_training=True,
+ **kwargs)['x_norm_patchtokens'] # to return spatial tokens
+
+ else:
+ dino_ret_dict = self._model_forward(
+ img, is_training=True) # to return spatial tokens
+ x_patchtokens, x_norm_clstoken = dino_ret_dict[
+ 'x_norm_patchtokens'], dino_ret_dict['x_norm_clstoken']
+
+ return x_norm_clstoken, x_patchtokens
+
+ @autocast
+ def forward(self, image, no_dropout=False, **kwargs):
+ tokens = self.encode_with_vision_transformer(image, **kwargs)
+ z = None
+ if self.output_cls:
+ # z, tokens = z[0], z[1]
+ z, tokens = tokens[0], tokens[1]
+ z = z.to(image.dtype)
+ tokens = tokens.to(image.dtype) # ! return spatial tokens only
+ if self.ucg_rate > 0.0 and not no_dropout and not (self.max_crops > 0):
+ if z is not None:
+ z = (torch.bernoulli(
+ (1.0 - self.ucg_rate) *
+ torch.ones(z.shape[0], device=z.device))[:, None] * z)
+ tokens = (expand_dims_like(
+ torch.bernoulli(
+ (1.0 - self.ucg_rate) *
+ torch.ones(tokens.shape[0], device=tokens.device)),
+ tokens,
+ ) * tokens)
+ if self.output_cls:
+ return tokens, z
+ else:
+ return tokens
+
+
+class FrozenDinov2ImageEmbedderMVPlucker(FrozenDinov2ImageEmbedder):
+
+ def __init__(
+ self,
+ arch="vitl",
+ version="dinov2", # by default
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ antialias=True,
+ ucg_rate=0.0,
+ unsqueeze_dim=False,
+ repeat_to_max_len=False,
+ num_image_crops=0,
+ output_tokens=False,
+ output_cls=False,
+ init_device=None,
+ # mv cond settings
+ n_cond_frames=4, # numebr of condition views
+ enable_bf16=False,
+ modLN=False,
+ aug_c=False,
+ inp_size=224,
+ ):
+ super().__init__(
+ arch,
+ version,
+ device,
+ max_length,
+ freeze,
+ antialias,
+ ucg_rate,
+ unsqueeze_dim,
+ repeat_to_max_len,
+ num_image_crops,
+ output_tokens,
+ output_cls,
+ init_device,
+ inp_size=inp_size,
+ )
+ self.n_cond_frames = n_cond_frames
+ self.dtype = torch.bfloat16 if enable_bf16 else torch.float32
+ self.enable_bf16 = enable_bf16
+ self.aug_c = aug_c
+
+ # ! proj c_cond to features
+
+ self.reso_encoder = inp_size
+ orig_patch_embed_weight = self.model.patch_embed.state_dict()
+
+ # ! 9-d input
+ with torch.no_grad():
+ new_patch_embed = PatchEmbed(img_size=224,
+ patch_size=14,
+ in_chans=9,
+ embed_dim=self.model.embed_dim)
+ # zero init first
+ nn.init.constant_(new_patch_embed.proj.weight, 0)
+ nn.init.constant_(new_patch_embed.proj.bias, 0)
+ # load pre-trained first 3 layers weights, bias into the new patch_embed
+
+ new_patch_embed.proj.weight[:, :3].copy_(orig_patch_embed_weight['proj.weight'])
+ new_patch_embed.proj.bias[:].copy_(orig_patch_embed_weight['proj.bias'])
+
+ self.model.patch_embed = new_patch_embed # xyz in the front
+ # self.scale_jitter_aug = torchvision.transforms.v2.ScaleJitter(target_size=(self.reso_encoder, self.reso_encoder), scale_range=(0.5, 1.5))
+
+ @autocast
+ def scale_jitter_aug(self, x):
+ inp_size = x.shape[2]
+ # aug_size = torch.randint(low=50, high=100, size=(1,)) / 100 * inp_size
+ aug_size = int(max(0.5, random.random()) * inp_size)
+ # st()
+ x = torch.nn.functional.interpolate(x,
+ size=aug_size,
+ mode='bilinear',
+ antialias=True)
+ x = torch.nn.functional.interpolate(x,size=inp_size,
+ mode='bilinear', antialias=True)
+ return x
+
+ @autocast
+ def gen_rays(self, c):
+ # Generate rays
+ intrinsics, c2w = c[16:], c[:16].reshape(4, 4)
+ self.h = self.reso_encoder
+ self.w = self.reso_encoder
+ yy, xx = torch.meshgrid(
+ torch.arange(self.h, dtype=torch.float32, device=c.device) + 0.5,
+ torch.arange(self.w, dtype=torch.float32, device=c.device) + 0.5,
+ indexing='ij')
+
+ # normalize to 0-1 pixel range
+ yy = yy / self.h
+ xx = xx / self.w
+
+ # K = np.array([f_x, 0, w / 2, 0, f_y, h / 2, 0, 0, 1]).reshape(3, 3)
+ cx, cy, fx, fy = intrinsics[2], intrinsics[5], intrinsics[
+ 0], intrinsics[4]
+ # cx *= self.w
+ # cy *= self.h
+
+ # f_x = f_y = fx * h / res_raw
+ # c2w = torch.from_numpy(c2w).float()
+ c2w = c2w.float()
+
+ xx = (xx - cx) / fx
+ yy = (yy - cy) / fy
+ zz = torch.ones_like(xx)
+ dirs = torch.stack((xx, yy, zz), dim=-1) # OpenCV convention
+ dirs /= torch.norm(dirs, dim=-1, keepdim=True)
+ dirs = dirs.reshape(-1, 3, 1)
+ del xx, yy, zz
+ # st()
+ dirs = (c2w[None, :3, :3] @ dirs)[..., 0]
+
+ origins = c2w[None, :3, 3].expand(self.h * self.w, -1).contiguous()
+ origins = origins.view(self.h, self.w, 3)
+ dirs = dirs.view(self.h, self.w, 3)
+
+ return origins, dirs
+
+ @autocast
+ def get_plucker_ray(self, c):
+ rays_plucker = []
+ for idx in range(c.shape[0]):
+ rays_o, rays_d = self.gen_rays(c[idx])
+ rays_plucker.append(
+ torch.cat([torch.cross(rays_o, rays_d, dim=-1), rays_d],
+ dim=-1).permute(2, 0, 1)) # [h, w, 6] -> 6,h,w
+ rays_plucker = torch.stack(rays_plucker, 0)
+ return rays_plucker
+
+ @autocast
+ def _model_forward(self, x, plucker_c, *args, **kwargs):
+
+ with torch.cuda.amp.autocast(dtype=self.dtype, enabled=True):
+ x = torch.cat([x, plucker_c], dim=1).to(self.dtype)
+ return self.model(x, **kwargs)
+
+ def preprocess(self, x):
+ # add gaussian noise and rescale augmentation
+
+ if self.ucg_rate > 0.0:
+
+ # 1 means maintain the input x
+ enable_drop_flag = torch.bernoulli(
+ (1.0 - self.ucg_rate) *
+ torch.ones(x.shape[0], device=x.device))[:, None, None, None] # broadcast to B,1,1,1
+
+ # * add random downsample & upsample
+ # rescaled_x = self.downsample_upsample(x)
+ # torchvision.utils.save_image(x, 'tmp/x.png', normalize=True, value_range=(-1,1))
+ x_aug = self.scale_jitter_aug(x)
+ # torchvision.utils.save_image(x_aug, 'tmp/rescale-x.png', normalize=True, value_range=(-1,1))
+
+ # x_aug = x * enable_drop_flag + (1-enable_drop_flag) * x_aug
+
+ # * guassian noise jitter
+ # force linear_weight > 0.24
+ # linear_weight = torch.max(enable_drop_flag, torch.max(torch.rand_like(enable_drop_flag), 0.25 * torch.ones_like(enable_drop_flag), dim=0, keepdim=True), dim=0, keepdim=True)
+ gaussian_jitter_scale, jitter_lb = torch.rand_like(enable_drop_flag), 0.8 * torch.ones_like(enable_drop_flag)
+ gaussian_jitter_scale = torch.where(gaussian_jitter_scale>jitter_lb, gaussian_jitter_scale, jitter_lb)
+
+ # torchvision.utils.save_image(x, 'tmp/aug-x.png', normalize=True, value_range=(-1,1))
+ x_aug = gaussian_jitter_scale * x_aug + (1 - gaussian_jitter_scale) * torch.randn_like(x).clamp(-1,1)
+
+ x_aug = x * enable_drop_flag + (1-enable_drop_flag) * x_aug
+ # torchvision.utils.save_image(x_aug, 'tmp/final-x.png', normalize=True, value_range=(-1,1))
+
+ # st()
+
+ return super().preprocess(x)
+
+ def random_rotate_c(self, c):
+
+ intrinsics, c2ws = c[16:], c[:16].reshape(4, 4)
+
+ # https://github.com/TencentARC/InstantMesh/blob/34c193cc96eebd46deb7c48a76613753ad777122/src/data/objaverse.py#L195
+
+ degree = np.random.uniform(-math.pi * 0.25, math.pi * 0.25)
+
+ # random rotation along z axis
+ if random.random() > 0.5:
+ rot = torch.tensor([
+ [np.cos(degree), -np.sin(degree), 0, 0],
+ [np.sin(degree), np.cos(degree), 0, 0],
+ [0, 0, 1, 0],
+ [0, 0, 0, 1],
+ ]).to(c2ws)
+ else:
+ # random rotation along y axis
+ rot = torch.tensor([
+ [np.cos(degree), 0, np.sin(degree), 0],
+ [0, 1, 0, 0],
+ [-np.sin(degree), 0, np.cos(degree), 0],
+ [0, 0, 0, 1],
+ ]).to(c2ws)
+
+ c2ws = torch.matmul(rot, c2ws)
+
+ return torch.cat([c2ws.reshape(-1), intrinsics])
+
+ @autocast
+ def forward(self, img_c, no_dropout=False):
+
+
+ mv_image, c = img_c['img'], img_c['c']
+
+ if self.aug_c:
+ for idx_b in range(c.shape[0]):
+ for idx_v in range(c.shape[1]):
+ if random.random() > 0.8:
+ c[idx_b, idx_v] = self.random_rotate_c(c[idx_b, idx_v])
+
+ # plucker_c = self.get_plucker_ray(
+ # rearrange(c[:, 1:1 + self.n_cond_frames], "b t ... -> (b t) ..."))
+ plucker_c = self.get_plucker_ray(
+ rearrange(c[:, :self.n_cond_frames], "b t ... -> (b t) ..."))
+
+ # plucker_c = torch.ones_like(plucker_c)
+ # plucker_c = torch.zeros_like(plucker_c)
+
+ # mv_image_tokens = super().forward(mv_image[:, 1:1 + self.n_cond_frames],
+ mv_image_tokens = super().forward(mv_image[:, :self.n_cond_frames],
+ plucker_c=plucker_c,
+ no_dropout=no_dropout)
+
+ mv_image_tokens = rearrange(mv_image_tokens,
+ "(b t) ... -> b t ...",
+ t=self.n_cond_frames)
+
+ return mv_image_tokens
+
+def make_2tuple(x):
+ if isinstance(x, tuple):
+ assert len(x) == 2
+ return x
+
+ assert isinstance(x, int)
+ return (x, x)
+
+
+class PatchEmbed(nn.Module):
+ """
+ 2D image to patch embedding: (B,C,H,W) -> (B,N,D)
+
+ Args:
+ img_size: Image size.
+ patch_size: Patch token size.
+ in_chans: Number of input image channels.
+ embed_dim: Number of linear projection output channels.
+ norm_layer: Normalization layer.
+ """
+
+ def __init__(
+ self,
+ img_size: Union[int, Tuple[int, int]] = 224,
+ patch_size: Union[int, Tuple[int, int]] = 16,
+ in_chans: int = 3,
+ embed_dim: int = 768,
+ norm_layer = None,
+ flatten_embedding: bool = True,
+ ) -> None:
+ super().__init__()
+
+ image_HW = make_2tuple(img_size)
+ patch_HW = make_2tuple(patch_size)
+ patch_grid_size = (
+ image_HW[0] // patch_HW[0],
+ image_HW[1] // patch_HW[1],
+ )
+
+ self.img_size = image_HW
+ self.patch_size = patch_HW
+ self.patches_resolution = patch_grid_size
+ self.num_patches = patch_grid_size[0] * patch_grid_size[1]
+
+ self.in_chans = in_chans
+ self.embed_dim = embed_dim
+
+ self.flatten_embedding = flatten_embedding
+
+ self.proj = nn.Conv2d(in_chans,
+ embed_dim,
+ kernel_size=patch_HW,
+ stride=patch_HW)
+ self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity()
+
+ def forward(self, x):
+ _, _, H, W = x.shape
+ patch_H, patch_W = self.patch_size
+
+ assert H % patch_H == 0, f"Input image height {H} is not a multiple of patch height {patch_H}"
+ assert W % patch_W == 0, f"Input image width {W} is not a multiple of patch width: {patch_W}"
+
+ x = self.proj(x) # B C H W
+ H, W = x.size(2), x.size(3)
+ x = x.flatten(2).transpose(1, 2) # B HW C
+ x = self.norm(x)
+ if not self.flatten_embedding:
+ x = x.reshape(-1, H, W, self.embed_dim) # B H W C
+ return x
+
+ def flops(self) -> float:
+ Ho, Wo = self.patches_resolution
+ flops = Ho * Wo * self.embed_dim * self.in_chans * (
+ self.patch_size[0] * self.patch_size[1])
+ if self.norm is not None:
+ flops += Ho * Wo * self.embed_dim
+ return flops
+
+
+class FrozenDinov2ImageEmbedderMV(FrozenDinov2ImageEmbedder):
+
+ def __init__(
+ self,
+ arch="vitl",
+ version="dinov2", # by default
+ device="cuda",
+ max_length=77,
+ freeze=True,
+ antialias=True,
+ ucg_rate=0.0,
+ unsqueeze_dim=False,
+ repeat_to_max_len=False,
+ num_image_crops=0,
+ output_tokens=False,
+ output_cls=False,
+ init_device=None,
+ # mv cond settings
+ n_cond_frames=4, # numebr of condition views
+ enable_bf16=False,
+ modLN=False,
+ inp_size=224,
+ ):
+ super().__init__(
+ arch,
+ version,
+ device,
+ max_length,
+ freeze,
+ antialias,
+ ucg_rate,
+ unsqueeze_dim,
+ repeat_to_max_len,
+ num_image_crops,
+ output_tokens,
+ output_cls,
+ init_device,
+ inp_size=inp_size,
+ )
+ self.n_cond_frames = n_cond_frames
+ self.dtype = torch.bfloat16 if enable_bf16 else torch.float32
+ self.enable_bf16 = enable_bf16
+
+ # ! proj c_cond to features
+
+ hidden_size = self.model.embed_dim # 768 for vit-b
+
+ # self.cam_proj = CaptionEmbedder(16, hidden_size,
+ self.cam_proj = CaptionEmbedder(25, hidden_size, act_layer=approx_gelu)
+
+ # ! single-modLN
+ self.model.modLN_modulation = nn.Sequential(
+ nn.SiLU(), nn.Linear(hidden_size, 4 * hidden_size, bias=True))
+
+ # zero-init modLN
+ nn.init.constant_(self.model.modLN_modulation[-1].weight, 0)
+ nn.init.constant_(self.model.modLN_modulation[-1].bias, 0)
+
+ # inject modLN to dino block
+ for block in self.model.blocks:
+ block.scale_shift_table = nn.Parameter(torch.zeros(
+ 4, hidden_size)) # zero init also
+
+ # torch.randn(4, hidden_size) / hidden_size**0.5)
+
+ def _model_forward(self, x, *args, **kwargs):
+ # re-define model forward, finetune dino-v2.
+ assert self.training
+
+ # ? how to send in camera
+ # c = 0 # placeholder
+ # ret = self.model.forward_features(*args, **kwargs)
+
+ with torch.cuda.amp.autocast(dtype=self.dtype, enabled=True):
+
+ x = self.model.prepare_tokens_with_masks(x, masks=None)
+
+ B, N, C = x.shape
+ # TODO how to send in c
+ # c = torch.ones(B, 25).to(x) # placeholder
+ c = kwargs.get('c')
+ c = self.cam_proj(c)
+ cond = self.model.modLN_modulation(c)
+
+ # https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/layers/block.py#L89
+ for blk in self.model.blocks: # inject modLN
+
+ shift_msa, scale_msa, shift_mlp, scale_mlp = (
+ blk.scale_shift_table[None] +
+ cond.reshape(B, 4, -1)).chunk(4, dim=1)
+
+ def attn_residual_func(x: torch.Tensor) -> torch.Tensor:
+ # return blk.ls1(blk.attn(blk.norm1(x), attn_bias=attn_bias))
+ return blk.ls1(
+ blk.attn(
+ t2i_modulate(blk.norm1(x), shift_msa, scale_msa)))
+
+ def ffn_residual_func(x: torch.Tensor) -> torch.Tensor:
+ # return blk.ls2(blk.mlp(blk.norm2(x)))
+ return blk.ls2(
+ t2i_modulate(blk.mlp(blk.norm2(x)), shift_mlp,
+ scale_mlp))
+
+ x = x + blk.drop_path1(
+ attn_residual_func(x)) # all drop_path identity() here.
+ x = x + blk.drop_path2(ffn_residual_func(x))
+
+ x_norm = self.model.norm(x)
+
+ return {
+ "x_norm_clstoken": x_norm[:, 0],
+ # "x_norm_regtokens": x_norm[:, 1 : self.model.num_register_tokens + 1],
+ "x_norm_patchtokens": x_norm[:,
+ self.model.num_register_tokens + 1:],
+ # "x_prenorm": x,
+ # "masks": masks,
+ }
+
+ @autocast
+ def forward(self, img_c, no_dropout=False):
+
+ # if self.enable_bf16:
+ # with th.cuda.amp.autocast(dtype=self.dtype,
+ # enabled=True):
+ # mv_image = super().forward(mv_image[:, 1:1+self.n_cond_frames].to(torch.bf16))
+ # else:
+ mv_image, c = img_c['img'], img_c['c']
+
+ # ! use zero c here, ablation. current verison wrong.
+ # c = torch.zeros_like(c)
+
+ # ! frame-0 as canonical here.
+
+ mv_image = super().forward(mv_image[:, :self.n_cond_frames],
+ c=rearrange(c[:, :self.n_cond_frames],
+ "b t ... -> (b t) ...",
+ t=self.n_cond_frames),
+ no_dropout=no_dropout)
+
+ mv_image = rearrange(mv_image,
+ "(b t) ... -> b t ...",
+ t=self.n_cond_frames)
+
+ return mv_image
+
+
+class FrozenCLIPT5Encoder(AbstractEmbModel):
+
+ def __init__(
+ self,
+ clip_version="openai/clip-vit-large-patch14",
+ t5_version="google/t5-v1_1-xl",
+ device="cuda",
+ clip_max_length=77,
+ t5_max_length=77,
+ ):
+ super().__init__()
+ self.clip_encoder = FrozenCLIPEmbedder(clip_version,
+ device,
+ max_length=clip_max_length)
+ self.t5_encoder = FrozenT5Embedder(t5_version,
+ device,
+ max_length=t5_max_length)
+ print(
+ f"{self.clip_encoder.__class__.__name__} has {count_params(self.clip_encoder) * 1.e-6:.2f} M parameters, "
+ f"{self.t5_encoder.__class__.__name__} comes with {count_params(self.t5_encoder) * 1.e-6:.2f} M params."
+ )
+
+ def encode(self, text):
+ return self(text)
+
+ def forward(self, text):
+ clip_z = self.clip_encoder.encode(text)
+ t5_z = self.t5_encoder.encode(text)
+ return [clip_z, t5_z]
+
+
+class SpatialRescaler(nn.Module):
+
+ def __init__(
+ self,
+ n_stages=1,
+ method="bilinear",
+ multiplier=0.5,
+ in_channels=3,
+ out_channels=None,
+ bias=False,
+ wrap_video=False,
+ kernel_size=1,
+ remap_output=False,
+ ):
+ super().__init__()
+ self.n_stages = n_stages
+ assert self.n_stages >= 0
+ assert method in [
+ "nearest",
+ "linear",
+ "bilinear",
+ "trilinear",
+ "bicubic",
+ "area",
+ ]
+ self.multiplier = multiplier
+ self.interpolator = partial(torch.nn.functional.interpolate,
+ mode=method)
+ self.remap_output = out_channels is not None or remap_output
+ if self.remap_output:
+ print(
+ f"Spatial Rescaler mapping from {in_channels} to {out_channels} channels after resizing."
+ )
+ self.channel_mapper = nn.Conv2d(
+ in_channels,
+ out_channels,
+ kernel_size=kernel_size,
+ bias=bias,
+ padding=kernel_size // 2,
+ )
+ self.wrap_video = wrap_video
+
+ def forward(self, x):
+ if self.wrap_video and x.ndim == 5:
+ B, C, T, H, W = x.shape
+ x = rearrange(x, "b c t h w -> b t c h w")
+ x = rearrange(x, "b t c h w -> (b t) c h w")
+
+ for stage in range(self.n_stages):
+ x = self.interpolator(x, scale_factor=self.multiplier)
+
+ if self.wrap_video:
+ x = rearrange(x, "(b t) c h w -> b t c h w", b=B, t=T, c=C)
+ x = rearrange(x, "b t c h w -> b c t h w")
+ if self.remap_output:
+ x = self.channel_mapper(x)
+ return x
+
+ def encode(self, x):
+ return self(x)
+
+
+class LowScaleEncoder(nn.Module):
+
+ def __init__(
+ self,
+ model_config,
+ linear_start,
+ linear_end,
+ timesteps=1000,
+ max_noise_level=250,
+ output_size=64,
+ scale_factor=1.0,
+ ):
+ super().__init__()
+ self.max_noise_level = max_noise_level
+ self.model = instantiate_from_config(model_config)
+ self.augmentation_schedule = self.register_schedule(
+ timesteps=timesteps,
+ linear_start=linear_start,
+ linear_end=linear_end)
+ self.out_size = output_size
+ self.scale_factor = scale_factor
+
+ def register_schedule(
+ self,
+ beta_schedule="linear",
+ timesteps=1000,
+ linear_start=1e-4,
+ linear_end=2e-2,
+ cosine_s=8e-3,
+ ):
+ betas = make_beta_schedule(
+ beta_schedule,
+ timesteps,
+ linear_start=linear_start,
+ linear_end=linear_end,
+ cosine_s=cosine_s,
+ )
+ alphas = 1.0 - betas
+ alphas_cumprod = np.cumprod(alphas, axis=0)
+ alphas_cumprod_prev = np.append(1.0, alphas_cumprod[:-1])
+
+ (timesteps, ) = betas.shape
+ self.num_timesteps = int(timesteps)
+ self.linear_start = linear_start
+ self.linear_end = linear_end
+ assert (alphas_cumprod.shape[0] == self.num_timesteps
+ ), "alphas have to be defined for each timestep"
+
+ to_torch = partial(torch.tensor, dtype=torch.float32)
+
+ self.register_buffer("betas", to_torch(betas))
+ self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod))
+ self.register_buffer("alphas_cumprod_prev",
+ to_torch(alphas_cumprod_prev))
+
+ # calculations for diffusion q(x_t | x_{t-1}) and others
+ self.register_buffer("sqrt_alphas_cumprod",
+ to_torch(np.sqrt(alphas_cumprod)))
+ self.register_buffer("sqrt_one_minus_alphas_cumprod",
+ to_torch(np.sqrt(1.0 - alphas_cumprod)))
+ self.register_buffer("log_one_minus_alphas_cumprod",
+ to_torch(np.log(1.0 - alphas_cumprod)))
+ self.register_buffer("sqrt_recip_alphas_cumprod",
+ to_torch(np.sqrt(1.0 / alphas_cumprod)))
+ self.register_buffer("sqrt_recipm1_alphas_cumprod",
+ to_torch(np.sqrt(1.0 / alphas_cumprod - 1)))
+
+ def q_sample(self, x_start, t, noise=None):
+ noise = default(noise, lambda: torch.randn_like(x_start))
+ return (
+ extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) *
+ x_start + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod,
+ t, x_start.shape) * noise)
+
+ def forward(self, x):
+ z = self.model.encode(x)
+ if isinstance(z, DiagonalGaussianDistribution):
+ z = z.sample()
+ z = z * self.scale_factor
+ noise_level = torch.randint(0,
+ self.max_noise_level, (x.shape[0], ),
+ device=x.device).long()
+ z = self.q_sample(z, noise_level)
+ if self.out_size is not None:
+ z = torch.nn.functional.interpolate(z,
+ size=self.out_size,
+ mode="nearest")
+ return z, noise_level
+
+ def decode(self, z):
+ z = z / self.scale_factor
+ return self.model.decode(z)
+
+
+class ConcatTimestepEmbedderND(AbstractEmbModel):
+ """embeds each dimension independently and concatenates them"""
+
+ def __init__(self, outdim):
+ super().__init__()
+ self.timestep = Timestep(outdim)
+ self.outdim = outdim
+
+ def forward(self, x):
+ if x.ndim == 1:
+ x = x[:, None]
+ assert len(x.shape) == 2
+ b, dims = x.shape[0], x.shape[1]
+ x = rearrange(x, "b d -> (b d)")
+ emb = self.timestep(x)
+ emb = rearrange(emb,
+ "(b d) d2 -> b (d d2)",
+ b=b,
+ d=dims,
+ d2=self.outdim)
+ return emb
+
+
+class GaussianEncoder(Encoder, AbstractEmbModel):
+
+ def __init__(self,
+ weight: float = 1.0,
+ flatten_output: bool = True,
+ *args,
+ **kwargs):
+ super().__init__(*args, **kwargs)
+ self.posterior = DiagonalGaussianRegularizer()
+ self.weight = weight
+ self.flatten_output = flatten_output
+
+ def forward(self, x) -> Tuple[Dict, torch.Tensor]:
+ z = super().forward(x)
+ z, log = self.posterior(z)
+ log["loss"] = log["kl_loss"]
+ log["weight"] = self.weight
+ if self.flatten_output:
+ z = rearrange(z, "b c h w -> b (h w ) c")
+ return log, z
+
+
+class VideoPredictionEmbedderWithEncoder(AbstractEmbModel):
+
+ def __init__(
+ self,
+ n_cond_frames: int,
+ n_copies: int,
+ encoder_config: dict,
+ sigma_sampler_config: Optional[dict] = None,
+ sigma_cond_config: Optional[dict] = None,
+ is_ae: bool = False,
+ scale_factor: float = 1.0,
+ disable_encoder_autocast: bool = False,
+ en_and_decode_n_samples_a_time: Optional[int] = None,
+ ):
+ super().__init__()
+
+ self.n_cond_frames = n_cond_frames
+ self.n_copies = n_copies
+ self.encoder = instantiate_from_config(encoder_config)
+ self.sigma_sampler = (instantiate_from_config(sigma_sampler_config)
+ if sigma_sampler_config is not None else None)
+ self.sigma_cond = (instantiate_from_config(sigma_cond_config)
+ if sigma_cond_config is not None else None)
+ self.is_ae = is_ae
+ self.scale_factor = scale_factor
+ self.disable_encoder_autocast = disable_encoder_autocast
+ self.en_and_decode_n_samples_a_time = en_and_decode_n_samples_a_time
+
+ def forward(
+ self, vid: torch.Tensor
+ ) -> Union[
+ torch.Tensor,
+ Tuple[torch.Tensor, torch.Tensor],
+ Tuple[torch.Tensor, dict],
+ Tuple[Tuple[torch.Tensor, torch.Tensor], dict],
+ ]:
+ if self.sigma_sampler is not None:
+ b = vid.shape[0] // self.n_cond_frames
+ sigmas = self.sigma_sampler(b).to(vid.device)
+ if self.sigma_cond is not None:
+ sigma_cond = self.sigma_cond(sigmas)
+ sigma_cond = repeat(sigma_cond,
+ "b d -> (b t) d",
+ t=self.n_copies)
+ sigmas = repeat(sigmas, "b -> (b t)", t=self.n_cond_frames)
+ noise = torch.randn_like(vid)
+ vid = vid + noise * append_dims(sigmas, vid.ndim)
+
+ with torch.autocast("cuda", enabled=not self.disable_encoder_autocast):
+ n_samples = (self.en_and_decode_n_samples_a_time
+ if self.en_and_decode_n_samples_a_time is not None
+ else vid.shape[0])
+ n_rounds = math.ceil(vid.shape[0] / n_samples)
+ all_out = []
+ for n in range(n_rounds):
+ if self.is_ae:
+ out = self.encoder.encode(vid[n * n_samples:(n + 1) *
+ n_samples])
+ else:
+ out = self.encoder(vid[n * n_samples:(n + 1) * n_samples])
+ all_out.append(out)
+
+ vid = torch.cat(all_out, dim=0)
+ vid *= self.scale_factor
+
+ vid = rearrange(vid,
+ "(b t) c h w -> b () (t c) h w",
+ t=self.n_cond_frames)
+ vid = repeat(vid, "b 1 c h w -> (b t) c h w", t=self.n_copies)
+
+ return_val = (vid, sigma_cond) if self.sigma_cond is not None else vid
+
+ return return_val
+
+
+class FrozenOpenCLIPImagePredictionEmbedder(AbstractEmbModel):
+
+ def __init__(
+ self,
+ open_clip_embedding_config: Dict,
+ n_cond_frames: int,
+ n_copies: int,
+ ):
+ super().__init__()
+
+ self.n_cond_frames = n_cond_frames
+ self.n_copies = n_copies
+ self.open_clip = instantiate_from_config(open_clip_embedding_config)
+
+ def forward(self, vid):
+ vid = self.open_clip(vid)
+ vid = rearrange(vid, "(b t) d -> b t d", t=self.n_cond_frames)
+ vid = repeat(vid, "b t d -> (b s) t d", s=self.n_copies)
+
+ return vid
+
+
+class FrozenOpenCLIPImageMVEmbedder(AbstractEmbModel):
+ # for multi-view 3D diffusion condition. Only extract the first frame
+ def __init__(
+ self,
+ open_clip_embedding_config: Dict,
+ # n_cond_frames: int,
+ # n_copies: int,
+ ):
+ super().__init__()
+
+ # self.n_cond_frames = n_cond_frames
+ # self.n_copies = n_copies
+ self.open_clip = instantiate_from_config(open_clip_embedding_config)
+
+ def forward(self, vid, no_dropout=False):
+ # st()
+ vid = self.open_clip(vid[:, 0, ...], no_dropout=no_dropout)
+ # vid = rearrange(vid, "(b t) d -> b t d", t=self.n_cond_frames)
+ # vid = repeat(vid, "b t d -> (b s) t d", s=self.n_copies)
+
+ return vid
+
+# process PCD
+
+# raw scaling
+class PCD_Scaler(AbstractEmbModel):
+ """
+ just scale the input pcd
+ TODO, add some rand noise?
+ """
+
+ def __init__(
+ self,
+ scaling_factor=0.45,
+ perturb_pcd_scale=0.0,
+ ):
+ super().__init__()
+ self.scaling_factor = scaling_factor
+ self.perturb_pcd_scale = perturb_pcd_scale
+
+ @autocast
+ def forward(self, pcd, **kwargs):
+ if self.perturb_pcd_scale > 0:
+ t = torch.rand(pcd.shape[0], 1, 1).to(pcd) * self.perturb_pcd_scale
+ pcd = pcd + t * torch.randn_like(pcd)
+ pcd = pcd.clip(-0.45, 0.45) # avoid scaling xyz too large.
+ pcd = pcd / self.scaling_factor
+ return pcd
+
+
+
+# raw scaling
+class PCD_Scaler_perChannel(AbstractEmbModel):
+ """
+ scale the input pcd to unit std
+ """
+
+ def __init__(
+ self,
+ scaling_factor=[0.14593576, 0.15753542, 0.18873914],
+ ):
+ super().__init__()
+ self.scaling_factor = np.array(scaling_factor)
+
+ @autocast
+ def forward(self, pcd, **kwargs):
+ return pcd / self.scaling_factor
\ No newline at end of file
diff --git a/sgm/modules/video_attention.py b/sgm/modules/video_attention.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d7bd095e3e1c197a3baff21b010bea553c4fa1c
--- /dev/null
+++ b/sgm/modules/video_attention.py
@@ -0,0 +1,302 @@
+import torch
+
+from ..modules.attention import *
+from ..modules.diffusionmodules.util import (AlphaBlender, linear,
+ timestep_embedding)
+
+
+class TimeMixSequential(nn.Sequential):
+ def forward(self, x, context=None, timesteps=None):
+ for layer in self:
+ x = layer(x, context, timesteps)
+
+ return x
+
+
+class VideoTransformerBlock(nn.Module):
+ ATTENTION_MODES = {
+ "softmax": CrossAttention,
+ "softmax-xformers": MemoryEfficientCrossAttention,
+ }
+
+ def __init__(
+ self,
+ dim,
+ n_heads,
+ d_head,
+ dropout=0.0,
+ context_dim=None,
+ gated_ff=True,
+ checkpoint=True,
+ timesteps=None,
+ ff_in=False,
+ inner_dim=None,
+ attn_mode="softmax",
+ disable_self_attn=False,
+ disable_temporal_crossattention=False,
+ switch_temporal_ca_to_sa=False,
+ ):
+ super().__init__()
+
+ attn_cls = self.ATTENTION_MODES[attn_mode]
+
+ self.ff_in = ff_in or inner_dim is not None
+ if inner_dim is None:
+ inner_dim = dim
+
+ assert int(n_heads * d_head) == inner_dim
+
+ self.is_res = inner_dim == dim
+
+ if self.ff_in:
+ self.norm_in = nn.LayerNorm(dim)
+ self.ff_in = FeedForward(
+ dim, dim_out=inner_dim, dropout=dropout, glu=gated_ff
+ )
+
+ self.timesteps = timesteps
+ self.disable_self_attn = disable_self_attn
+ if self.disable_self_attn:
+ self.attn1 = attn_cls(
+ query_dim=inner_dim,
+ heads=n_heads,
+ dim_head=d_head,
+ context_dim=context_dim,
+ dropout=dropout,
+ ) # is a cross-attention
+ else:
+ self.attn1 = attn_cls(
+ query_dim=inner_dim, heads=n_heads, dim_head=d_head, dropout=dropout
+ ) # is a self-attention
+
+ self.ff = FeedForward(inner_dim, dim_out=dim, dropout=dropout, glu=gated_ff)
+
+ if disable_temporal_crossattention:
+ if switch_temporal_ca_to_sa:
+ raise ValueError
+ else:
+ self.attn2 = None
+ else:
+ self.norm2 = nn.LayerNorm(inner_dim)
+ if switch_temporal_ca_to_sa:
+ self.attn2 = attn_cls(
+ query_dim=inner_dim, heads=n_heads, dim_head=d_head, dropout=dropout
+ ) # is a self-attention
+ else:
+ self.attn2 = attn_cls(
+ query_dim=inner_dim,
+ context_dim=context_dim,
+ heads=n_heads,
+ dim_head=d_head,
+ dropout=dropout,
+ ) # is self-attn if context is none
+
+ self.norm1 = nn.LayerNorm(inner_dim)
+ self.norm3 = nn.LayerNorm(inner_dim)
+ self.switch_temporal_ca_to_sa = switch_temporal_ca_to_sa
+
+ self.checkpoint = checkpoint
+ if self.checkpoint:
+ print(f"{self.__class__.__name__} is using checkpointing")
+
+ def forward(
+ self, x: torch.Tensor, context: torch.Tensor = None, timesteps: int = None
+ ) -> torch.Tensor:
+ if self.checkpoint:
+ return checkpoint(self._forward, x, context, timesteps)
+ else:
+ return self._forward(x, context, timesteps=timesteps)
+
+ def _forward(self, x, context=None, timesteps=None):
+ assert self.timesteps or timesteps
+ assert not (self.timesteps and timesteps) or self.timesteps == timesteps
+ timesteps = self.timesteps or timesteps
+ B, S, C = x.shape
+ x = rearrange(x, "(b t) s c -> (b s) t c", t=timesteps)
+
+ if self.ff_in:
+ x_skip = x
+ x = self.ff_in(self.norm_in(x))
+ if self.is_res:
+ x += x_skip
+
+ if self.disable_self_attn:
+ x = self.attn1(self.norm1(x), context=context) + x
+ else:
+ x = self.attn1(self.norm1(x)) + x
+
+ if self.attn2 is not None:
+ if self.switch_temporal_ca_to_sa:
+ x = self.attn2(self.norm2(x)) + x
+ else:
+ x = self.attn2(self.norm2(x), context=context) + x
+ x_skip = x
+ x = self.ff(self.norm3(x))
+ if self.is_res:
+ x += x_skip
+
+ x = rearrange(
+ x, "(b s) t c -> (b t) s c", s=S, b=B // timesteps, c=C, t=timesteps
+ )
+ return x
+
+ def get_last_layer(self):
+ return self.ff.net[-1].weight
+
+
+class SpatialVideoTransformer(SpatialTransformer):
+ def __init__(
+ self,
+ in_channels,
+ n_heads,
+ d_head,
+ depth=1,
+ dropout=0.0,
+ use_linear=False,
+ context_dim=None,
+ use_spatial_context=False,
+ timesteps=None,
+ merge_strategy: str = "fixed",
+ merge_factor: float = 0.5,
+ time_context_dim=None,
+ ff_in=False,
+ checkpoint=False,
+ time_depth=1,
+ attn_mode="softmax",
+ disable_self_attn=False,
+ disable_temporal_crossattention=False,
+ max_time_embed_period: int = 10000,
+ ):
+ super().__init__(
+ in_channels,
+ n_heads,
+ d_head,
+ depth=depth,
+ dropout=dropout,
+ attn_type=attn_mode,
+ use_checkpoint=checkpoint,
+ context_dim=context_dim,
+ use_linear=use_linear,
+ disable_self_attn=disable_self_attn,
+ )
+ self.time_depth = time_depth
+ self.depth = depth
+ self.max_time_embed_period = max_time_embed_period
+
+ time_mix_d_head = d_head
+ n_time_mix_heads = n_heads
+
+ time_mix_inner_dim = int(time_mix_d_head * n_time_mix_heads)
+
+ inner_dim = n_heads * d_head
+ if use_spatial_context:
+ time_context_dim = context_dim
+
+ self.time_stack = nn.ModuleList(
+ [
+ VideoTransformerBlock(
+ inner_dim,
+ n_time_mix_heads,
+ time_mix_d_head,
+ dropout=dropout,
+ context_dim=time_context_dim,
+ timesteps=timesteps,
+ checkpoint=checkpoint,
+ ff_in=ff_in,
+ inner_dim=time_mix_inner_dim,
+ attn_mode=attn_mode,
+ disable_self_attn=disable_self_attn,
+ disable_temporal_crossattention=disable_temporal_crossattention,
+ )
+ for _ in range(self.depth)
+ ]
+ )
+
+ assert len(self.time_stack) == len(self.transformer_blocks)
+
+ self.use_spatial_context = use_spatial_context
+ self.in_channels = in_channels
+
+ time_embed_dim = self.in_channels * 4
+ self.time_pos_embed = nn.Sequential(
+ linear(self.in_channels, time_embed_dim),
+ nn.SiLU(),
+ linear(time_embed_dim, self.in_channels),
+ )
+
+ self.time_mixer = AlphaBlender(
+ alpha=merge_factor, merge_strategy=merge_strategy
+ )
+
+ def forward(
+ self,
+ x: torch.Tensor,
+ context: Optional[torch.Tensor] = None,
+ time_context: Optional[torch.Tensor] = None,
+ timesteps: Optional[int] = None,
+ image_only_indicator: Optional[torch.Tensor] = None,
+ ) -> torch.Tensor:
+ _, _, h, w = x.shape
+ x_in = x
+ spatial_context = None
+ if exists(context):
+ spatial_context = context
+
+ if self.use_spatial_context:
+ assert (
+ context.ndim == 3
+ ), f"n dims of spatial context should be 3 but are {context.ndim}"
+
+ time_context = context
+ time_context_first_timestep = time_context[::timesteps]
+ time_context = repeat(
+ time_context_first_timestep, "b ... -> (b n) ...", n=h * w
+ )
+ elif time_context is not None and not self.use_spatial_context:
+ time_context = repeat(time_context, "b ... -> (b n) ...", n=h * w)
+ if time_context.ndim == 2:
+ time_context = rearrange(time_context, "b c -> b 1 c")
+
+ x = self.norm(x)
+ if not self.use_linear:
+ x = self.proj_in(x)
+ x = rearrange(x, "b c h w -> b (h w) c")
+ if self.use_linear:
+ x = self.proj_in(x)
+
+ num_frames = torch.arange(timesteps, device=x.device)
+ num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps)
+ num_frames = rearrange(num_frames, "b t -> (b t)")
+ t_emb = timestep_embedding(
+ num_frames,
+ self.in_channels,
+ repeat_only=False,
+ max_period=self.max_time_embed_period,
+ )
+ emb = self.time_pos_embed(t_emb)
+ emb = emb[:, None, :]
+
+ for it_, (block, mix_block) in enumerate(
+ zip(self.transformer_blocks, self.time_stack)
+ ):
+ x = block(
+ x,
+ context=spatial_context,
+ )
+
+ x_mix = x
+ x_mix = x_mix + emb
+
+ x_mix = mix_block(x_mix, context=time_context, timesteps=timesteps)
+ x = self.time_mixer(
+ x_spatial=x,
+ x_temporal=x_mix,
+ image_only_indicator=image_only_indicator,
+ )
+ if self.use_linear:
+ x = self.proj_out(x)
+ x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w)
+ if not self.use_linear:
+ x = self.proj_out(x)
+ out = x + x_in
+ return out
diff --git a/sgm/sampling_utils/__init__.py b/sgm/sampling_utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..71e6591b99a458a239dda8fc553708ad8da57263
--- /dev/null
+++ b/sgm/sampling_utils/__init__.py
@@ -0,0 +1 @@
+# originally generative-models/scripts, for sampling stuffs
\ No newline at end of file
diff --git a/sgm/sampling_utils/demo/__init__.py b/sgm/sampling_utils/demo/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sgm/sampling_utils/demo/detect.py b/sgm/sampling_utils/demo/detect.py
new file mode 100644
index 0000000000000000000000000000000000000000..96e9f212b868e60602b0b1880a0f3d7c66e16703
--- /dev/null
+++ b/sgm/sampling_utils/demo/detect.py
@@ -0,0 +1,156 @@
+import argparse
+
+import cv2
+import numpy as np
+
+try:
+ from imwatermark import WatermarkDecoder
+except ImportError as e:
+ try:
+ # Assume some of the other dependencies such as torch are not fulfilled
+ # import file without loading unnecessary libraries.
+ import importlib.util
+ import sys
+
+ spec = importlib.util.find_spec("imwatermark.maxDct")
+ assert spec is not None
+ maxDct = importlib.util.module_from_spec(spec)
+ sys.modules["maxDct"] = maxDct
+ spec.loader.exec_module(maxDct)
+
+ class WatermarkDecoder(object):
+ """A minimal version of
+ https://github.com/ShieldMnt/invisible-watermark/blob/main/imwatermark/watermark.py
+ to only reconstruct bits using dwtDct"""
+
+ def __init__(self, wm_type="bytes", length=0):
+ assert wm_type == "bits", "Only bits defined in minimal import"
+ self._wmType = wm_type
+ self._wmLen = length
+
+ def reconstruct(self, bits):
+ if len(bits) != self._wmLen:
+ raise RuntimeError("bits are not matched with watermark length")
+
+ return bits
+
+ def decode(self, cv2Image, method="dwtDct", **configs):
+ (r, c, channels) = cv2Image.shape
+ if r * c < 256 * 256:
+ raise RuntimeError("image too small, should be larger than 256x256")
+
+ bits = []
+ assert method == "dwtDct"
+ embed = maxDct.EmbedMaxDct(watermarks=[], wmLen=self._wmLen, **configs)
+ bits = embed.decode(cv2Image)
+ return self.reconstruct(bits)
+
+ except:
+ raise e
+
+
+# A fixed 48-bit message that was choosen at random
+# WATERMARK_MESSAGE = 0xB3EC907BB19E
+WATERMARK_MESSAGE = 0b101100111110110010010000011110111011000110011110
+# bin(x)[2:] gives bits of x as str, use int to convert them to 0/1
+WATERMARK_BITS = [int(bit) for bit in bin(WATERMARK_MESSAGE)[2:]]
+MATCH_VALUES = [
+ [27, "No watermark detected"],
+ [33, "Partial watermark match. Cannot determine with certainty."],
+ [
+ 35,
+ (
+ "Likely watermarked. In our test 0.02% of real images were "
+ 'falsely detected as "Likely watermarked"'
+ ),
+ ],
+ [
+ 49,
+ (
+ "Very likely watermarked. In our test no real images were "
+ 'falsely detected as "Very likely watermarked"'
+ ),
+ ],
+]
+
+
+class GetWatermarkMatch:
+ def __init__(self, watermark):
+ self.watermark = watermark
+ self.num_bits = len(self.watermark)
+ self.decoder = WatermarkDecoder("bits", self.num_bits)
+
+ def __call__(self, x: np.ndarray) -> np.ndarray:
+ """
+ Detects the number of matching bits the predefined watermark with one
+ or multiple images. Images should be in cv2 format, e.g. h x w x c BGR.
+
+ Args:
+ x: ([B], h w, c) in range [0, 255]
+
+ Returns:
+ number of matched bits ([B],)
+ """
+ squeeze = len(x.shape) == 3
+ if squeeze:
+ x = x[None, ...]
+
+ bs = x.shape[0]
+ detected = np.empty((bs, self.num_bits), dtype=bool)
+ for k in range(bs):
+ detected[k] = self.decoder.decode(x[k], "dwtDct")
+ result = np.sum(detected == self.watermark, axis=-1)
+ if squeeze:
+ return result[0]
+ else:
+ return result
+
+
+get_watermark_match = GetWatermarkMatch(WATERMARK_BITS)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "filename",
+ nargs="+",
+ type=str,
+ help="Image files to check for watermarks",
+ )
+ opts = parser.parse_args()
+
+ print(
+ """
+ This script tries to detect watermarked images. Please be aware of
+ the following:
+ - As the watermark is supposed to be invisible, there is the risk that
+ watermarked images may not be detected.
+ - To maximize the chance of detection make sure that the image has the same
+ dimensions as when the watermark was applied (most likely 1024x1024
+ or 512x512).
+ - Specific image manipulation may drastically decrease the chance that
+ watermarks can be detected.
+ - There is also the chance that an image has the characteristics of the
+ watermark by chance.
+ - The watermark script is public, anybody may watermark any images, and
+ could therefore claim it to be generated.
+ - All numbers below are based on a test using 10,000 images without any
+ modifications after applying the watermark.
+ """
+ )
+
+ for fn in opts.filename:
+ image = cv2.imread(fn)
+ if image is None:
+ print(f"Couldn't read {fn}. Skipping")
+ continue
+
+ num_bits = get_watermark_match(image)
+ k = 0
+ while num_bits > MATCH_VALUES[k][0]:
+ k += 1
+ print(
+ f"{fn}: {MATCH_VALUES[k][1]}",
+ f"Bits that matched the watermark {num_bits} from {len(WATERMARK_BITS)}\n",
+ sep="\n\t",
+ )
diff --git a/sgm/sampling_utils/demo/discretization.py b/sgm/sampling_utils/demo/discretization.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7030a229692e25ddf8d2f516aef2aba0e6149b9
--- /dev/null
+++ b/sgm/sampling_utils/demo/discretization.py
@@ -0,0 +1,59 @@
+import torch
+
+from sgm.modules.diffusionmodules.discretizer import Discretization
+
+
+class Img2ImgDiscretizationWrapper:
+ """
+ wraps a discretizer, and prunes the sigmas
+ params:
+ strength: float between 0.0 and 1.0. 1.0 means full sampling (all sigmas are returned)
+ """
+
+ def __init__(self, discretization: Discretization, strength: float = 1.0):
+ self.discretization = discretization
+ self.strength = strength
+ assert 0.0 <= self.strength <= 1.0
+
+ def __call__(self, *args, **kwargs):
+ # sigmas start large first, and decrease then
+ sigmas = self.discretization(*args, **kwargs)
+ print(f"sigmas after discretization, before pruning img2img: ", sigmas)
+ sigmas = torch.flip(sigmas, (0,))
+ sigmas = sigmas[: max(int(self.strength * len(sigmas)), 1)]
+ print("prune index:", max(int(self.strength * len(sigmas)), 1))
+ sigmas = torch.flip(sigmas, (0,))
+ print(f"sigmas after pruning: ", sigmas)
+ return sigmas
+
+
+class Txt2NoisyDiscretizationWrapper:
+ """
+ wraps a discretizer, and prunes the sigmas
+ params:
+ strength: float between 0.0 and 1.0. 0.0 means full sampling (all sigmas are returned)
+ """
+
+ def __init__(
+ self, discretization: Discretization, strength: float = 0.0, original_steps=None
+ ):
+ self.discretization = discretization
+ self.strength = strength
+ self.original_steps = original_steps
+ assert 0.0 <= self.strength <= 1.0
+
+ def __call__(self, *args, **kwargs):
+ # sigmas start large first, and decrease then
+ sigmas = self.discretization(*args, **kwargs)
+ print(f"sigmas after discretization, before pruning img2img: ", sigmas)
+ sigmas = torch.flip(sigmas, (0,))
+ if self.original_steps is None:
+ steps = len(sigmas)
+ else:
+ steps = self.original_steps + 1
+ prune_index = max(min(int(self.strength * steps) - 1, steps - 1), 0)
+ sigmas = sigmas[prune_index:]
+ print("prune index:", prune_index)
+ sigmas = torch.flip(sigmas, (0,))
+ print(f"sigmas after pruning: ", sigmas)
+ return sigmas
diff --git a/sgm/sampling_utils/demo/gradio_app.py b/sgm/sampling_utils/demo/gradio_app.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab7c9d302d8ac5d18acda713a1814b3bcbc457d9
--- /dev/null
+++ b/sgm/sampling_utils/demo/gradio_app.py
@@ -0,0 +1,308 @@
+# Adding this at the very top of app.py to make 'generative-models' directory discoverable
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "generative-models"))
+
+import math
+import random
+import uuid
+from glob import glob
+from pathlib import Path
+from typing import Optional
+
+import cv2
+import gradio as gr
+import numpy as np
+import torch
+from einops import rearrange, repeat
+from fire import Fire
+from huggingface_hub import hf_hub_download
+from omegaconf import OmegaConf
+from PIL import Image
+from torchvision.transforms import ToTensor
+
+from scripts.sampling.simple_video_sample import (
+ get_batch, get_unique_embedder_keys_from_conditioner, load_model)
+from scripts.util.detection.nsfw_and_watermark_dectection import \
+ DeepFloydDataFiltering
+from sgm.inference.helpers import embed_watermark
+from sgm.util import default, instantiate_from_config
+
+# To download all svd models
+# hf_hub_download(repo_id="stabilityai/stable-video-diffusion-img2vid-xt", filename="svd_xt.safetensors", local_dir="checkpoints")
+# hf_hub_download(repo_id="stabilityai/stable-video-diffusion-img2vid", filename="svd.safetensors", local_dir="checkpoints")
+# hf_hub_download(repo_id="stabilityai/stable-video-diffusion-img2vid-xt-1-1", filename="svd_xt_1_1.safetensors", local_dir="checkpoints")
+
+
+# Define the repo, local directory and filename
+repo_id = "stabilityai/stable-video-diffusion-img2vid-xt-1-1" # replace with "stabilityai/stable-video-diffusion-img2vid-xt" or "stabilityai/stable-video-diffusion-img2vid" for other models
+filename = "svd_xt_1_1.safetensors" # replace with "svd_xt.safetensors" or "svd.safetensors" for other models
+local_dir = "checkpoints"
+local_file_path = os.path.join(local_dir, filename)
+
+# Check if the file already exists
+if not os.path.exists(local_file_path):
+ # If the file doesn't exist, download it
+ hf_hub_download(repo_id=repo_id, filename=filename, local_dir=local_dir)
+ print("File downloaded.")
+else:
+ print("File already exists. No need to download.")
+
+
+version = "svd_xt_1_1" # replace with 'svd_xt' or 'svd' for other models
+device = "cuda"
+max_64_bit_int = 2**63 - 1
+
+if version == "svd_xt_1_1":
+ num_frames = 25
+ num_steps = 30
+ model_config = "scripts/sampling/configs/svd_xt_1_1.yaml"
+else:
+ raise ValueError(f"Version {version} does not exist.")
+
+model, filter = load_model(
+ model_config,
+ device,
+ num_frames,
+ num_steps,
+)
+
+
+def sample(
+ input_path: str = "assets/test_image.png", # Can either be image file or folder with image files
+ seed: Optional[int] = None,
+ randomize_seed: bool = True,
+ motion_bucket_id: int = 127,
+ fps_id: int = 6,
+ version: str = "svd_xt_1_1",
+ cond_aug: float = 0.02,
+ decoding_t: int = 7, # Number of frames decoded at a time! This eats most VRAM. Reduce if necessary.
+ device: str = "cuda",
+ output_folder: str = "outputs",
+ progress=gr.Progress(track_tqdm=True),
+):
+ """
+ Simple script to generate a single sample conditioned on an image `input_path` or multiple images, one for each
+ image file in folder `input_path`. If you run out of VRAM, try decreasing `decoding_t`.
+ """
+ fps_id = int(fps_id) # casting float slider values to int)
+ if randomize_seed:
+ seed = random.randint(0, max_64_bit_int)
+
+ torch.manual_seed(seed)
+
+ path = Path(input_path)
+ all_img_paths = []
+ if path.is_file():
+ if any([input_path.endswith(x) for x in ["jpg", "jpeg", "png"]]):
+ all_img_paths = [input_path]
+ else:
+ raise ValueError("Path is not valid image file.")
+ elif path.is_dir():
+ all_img_paths = sorted(
+ [
+ f
+ for f in path.iterdir()
+ if f.is_file() and f.suffix.lower() in [".jpg", ".jpeg", ".png"]
+ ]
+ )
+ if len(all_img_paths) == 0:
+ raise ValueError("Folder does not contain any images.")
+ else:
+ raise ValueError
+
+ for input_img_path in all_img_paths:
+ with Image.open(input_img_path) as image:
+ if image.mode == "RGBA":
+ image = image.convert("RGB")
+ w, h = image.size
+
+ if h % 64 != 0 or w % 64 != 0:
+ width, height = map(lambda x: x - x % 64, (w, h))
+ image = image.resize((width, height))
+ print(
+ f"WARNING: Your image is of size {h}x{w} which is not divisible by 64. We are resizing to {height}x{width}!"
+ )
+
+ image = ToTensor()(image)
+ image = image * 2.0 - 1.0
+
+ image = image.unsqueeze(0).to(device)
+ H, W = image.shape[2:]
+ assert image.shape[1] == 3
+ F = 8
+ C = 4
+ shape = (num_frames, C, H // F, W // F)
+ if (H, W) != (576, 1024):
+ print(
+ "WARNING: The conditioning frame you provided is not 576x1024. This leads to suboptimal performance as model was only trained on 576x1024. Consider increasing `cond_aug`."
+ )
+ if motion_bucket_id > 255:
+ print(
+ "WARNING: High motion bucket! This may lead to suboptimal performance."
+ )
+
+ if fps_id < 5:
+ print("WARNING: Small fps value! This may lead to suboptimal performance.")
+
+ if fps_id > 30:
+ print("WARNING: Large fps value! This may lead to suboptimal performance.")
+
+ value_dict = {}
+ value_dict["motion_bucket_id"] = motion_bucket_id
+ value_dict["fps_id"] = fps_id
+ value_dict["cond_aug"] = cond_aug
+ value_dict["cond_frames_without_noise"] = image
+ value_dict["cond_frames"] = image + cond_aug * torch.randn_like(image)
+ value_dict["cond_aug"] = cond_aug
+
+ with torch.no_grad():
+ with torch.autocast(device):
+ batch, batch_uc = get_batch(
+ get_unique_embedder_keys_from_conditioner(model.conditioner),
+ value_dict,
+ [1, num_frames],
+ T=num_frames,
+ device=device,
+ )
+ c, uc = model.conditioner.get_unconditional_conditioning(
+ batch,
+ batch_uc=batch_uc,
+ force_uc_zero_embeddings=[
+ "cond_frames",
+ "cond_frames_without_noise",
+ ],
+ )
+
+ for k in ["crossattn", "concat"]:
+ uc[k] = repeat(uc[k], "b ... -> b t ...", t=num_frames)
+ uc[k] = rearrange(uc[k], "b t ... -> (b t) ...", t=num_frames)
+ c[k] = repeat(c[k], "b ... -> b t ...", t=num_frames)
+ c[k] = rearrange(c[k], "b t ... -> (b t) ...", t=num_frames)
+
+ randn = torch.randn(shape, device=device)
+
+ additional_model_inputs = {}
+ additional_model_inputs["image_only_indicator"] = torch.zeros(
+ 2, num_frames
+ ).to(device)
+ additional_model_inputs["num_video_frames"] = batch["num_video_frames"]
+
+ def denoiser(input, sigma, c):
+ return model.denoiser(
+ model.model, input, sigma, c, **additional_model_inputs
+ )
+
+ samples_z = model.sampler(denoiser, randn, cond=c, uc=uc)
+ model.en_and_decode_n_samples_a_time = decoding_t
+ samples_x = model.decode_first_stage(samples_z)
+ samples = torch.clamp((samples_x + 1.0) / 2.0, min=0.0, max=1.0)
+
+ os.makedirs(output_folder, exist_ok=True)
+ base_count = len(glob(os.path.join(output_folder, "*.mp4")))
+ video_path = os.path.join(output_folder, f"{base_count:06d}.mp4")
+ writer = cv2.VideoWriter(
+ video_path,
+ cv2.VideoWriter_fourcc(*"mp4v"),
+ fps_id + 1,
+ (samples.shape[-1], samples.shape[-2]),
+ )
+
+ samples = embed_watermark(samples)
+ samples = filter(samples)
+ vid = (
+ (rearrange(samples, "t c h w -> t h w c") * 255)
+ .cpu()
+ .numpy()
+ .astype(np.uint8)
+ )
+ for frame in vid:
+ frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
+ writer.write(frame)
+ writer.release()
+
+ return video_path, seed
+
+
+def resize_image(image_path, output_size=(1024, 576)):
+ image = Image.open(image_path)
+ # Calculate aspect ratios
+ target_aspect = output_size[0] / output_size[1] # Aspect ratio of the desired size
+ image_aspect = image.width / image.height # Aspect ratio of the original image
+
+ # Resize then crop if the original image is larger
+ if image_aspect > target_aspect:
+ # Resize the image to match the target height, maintaining aspect ratio
+ new_height = output_size[1]
+ new_width = int(new_height * image_aspect)
+ resized_image = image.resize((new_width, new_height), Image.LANCZOS)
+ # Calculate coordinates for cropping
+ left = (new_width - output_size[0]) / 2
+ top = 0
+ right = (new_width + output_size[0]) / 2
+ bottom = output_size[1]
+ else:
+ # Resize the image to match the target width, maintaining aspect ratio
+ new_width = output_size[0]
+ new_height = int(new_width / image_aspect)
+ resized_image = image.resize((new_width, new_height), Image.LANCZOS)
+ # Calculate coordinates for cropping
+ left = 0
+ top = (new_height - output_size[1]) / 2
+ right = output_size[0]
+ bottom = (new_height + output_size[1]) / 2
+
+ # Crop the image
+ cropped_image = resized_image.crop((left, top, right, bottom))
+
+ return cropped_image
+
+
+with gr.Blocks() as demo:
+ gr.Markdown(
+ """# Community demo for Stable Video Diffusion - Img2Vid - XT ([model](https://huggingface.co/stabilityai/stable-video-diffusion-img2vid-xt), [paper](https://stability.ai/research/stable-video-diffusion-scaling-latent-video-diffusion-models-to-large-datasets))
+#### Research release ([_non-commercial_](https://huggingface.co/stabilityai/stable-video-diffusion-img2vid-xt/blob/main/LICENSE)): generate `4s` vid from a single image at (`25 frames` at `6 fps`). Generation takes ~60s in an A100. [Join the waitlist for Stability's upcoming web experience](https://stability.ai/contact).
+ """
+ )
+ with gr.Row():
+ with gr.Column():
+ image = gr.Image(label="Upload your image", type="filepath")
+ generate_btn = gr.Button("Generate")
+ video = gr.Video()
+ with gr.Accordion("Advanced options", open=False):
+ seed = gr.Slider(
+ label="Seed",
+ value=42,
+ randomize=True,
+ minimum=0,
+ maximum=max_64_bit_int,
+ step=1,
+ )
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
+ motion_bucket_id = gr.Slider(
+ label="Motion bucket id",
+ info="Controls how much motion to add/remove from the image",
+ value=127,
+ minimum=1,
+ maximum=255,
+ )
+ fps_id = gr.Slider(
+ label="Frames per second",
+ info="The length of your video in seconds will be 25/fps",
+ value=6,
+ minimum=5,
+ maximum=30,
+ )
+
+ image.upload(fn=resize_image, inputs=image, outputs=image, queue=False)
+ generate_btn.click(
+ fn=sample,
+ inputs=[image, seed, randomize_seed, motion_bucket_id, fps_id],
+ outputs=[video, seed],
+ api_name="video",
+ )
+
+if __name__ == "__main__":
+ demo.queue(max_size=20)
+ demo.launch(share=True)
diff --git a/sgm/sampling_utils/demo/sampling.py b/sgm/sampling_utils/demo/sampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..46c20048176aeb0db96114f95d760091a46d49cd
--- /dev/null
+++ b/sgm/sampling_utils/demo/sampling.py
@@ -0,0 +1,364 @@
+from pytorch_lightning import seed_everything
+
+from scripts.demo.streamlit_helpers import *
+
+SAVE_PATH = "outputs/demo/txt2img/"
+
+SD_XL_BASE_RATIOS = {
+ "0.5": (704, 1408),
+ "0.52": (704, 1344),
+ "0.57": (768, 1344),
+ "0.6": (768, 1280),
+ "0.68": (832, 1216),
+ "0.72": (832, 1152),
+ "0.78": (896, 1152),
+ "0.82": (896, 1088),
+ "0.88": (960, 1088),
+ "0.94": (960, 1024),
+ "1.0": (1024, 1024),
+ "1.07": (1024, 960),
+ "1.13": (1088, 960),
+ "1.21": (1088, 896),
+ "1.29": (1152, 896),
+ "1.38": (1152, 832),
+ "1.46": (1216, 832),
+ "1.67": (1280, 768),
+ "1.75": (1344, 768),
+ "1.91": (1344, 704),
+ "2.0": (1408, 704),
+ "2.09": (1472, 704),
+ "2.4": (1536, 640),
+ "2.5": (1600, 640),
+ "2.89": (1664, 576),
+ "3.0": (1728, 576),
+}
+
+VERSION2SPECS = {
+ "SDXL-base-1.0": {
+ "H": 1024,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "is_legacy": False,
+ "config": "configs/inference/sd_xl_base.yaml",
+ "ckpt": "checkpoints/sd_xl_base_1.0.safetensors",
+ },
+ "SDXL-base-0.9": {
+ "H": 1024,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "is_legacy": False,
+ "config": "configs/inference/sd_xl_base.yaml",
+ "ckpt": "checkpoints/sd_xl_base_0.9.safetensors",
+ },
+ "SD-2.1": {
+ "H": 512,
+ "W": 512,
+ "C": 4,
+ "f": 8,
+ "is_legacy": True,
+ "config": "configs/inference/sd_2_1.yaml",
+ "ckpt": "checkpoints/v2-1_512-ema-pruned.safetensors",
+ },
+ "SD-2.1-768": {
+ "H": 768,
+ "W": 768,
+ "C": 4,
+ "f": 8,
+ "is_legacy": True,
+ "config": "configs/inference/sd_2_1_768.yaml",
+ "ckpt": "checkpoints/v2-1_768-ema-pruned.safetensors",
+ },
+ "SDXL-refiner-0.9": {
+ "H": 1024,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "is_legacy": True,
+ "config": "configs/inference/sd_xl_refiner.yaml",
+ "ckpt": "checkpoints/sd_xl_refiner_0.9.safetensors",
+ },
+ "SDXL-refiner-1.0": {
+ "H": 1024,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "is_legacy": True,
+ "config": "configs/inference/sd_xl_refiner.yaml",
+ "ckpt": "checkpoints/sd_xl_refiner_1.0.safetensors",
+ },
+}
+
+
+def load_img(display=True, key=None, device="cuda"):
+ image = get_interactive_image(key=key)
+ if image is None:
+ return None
+ if display:
+ st.image(image)
+ w, h = image.size
+ print(f"loaded input image of size ({w}, {h})")
+ width, height = map(
+ lambda x: x - x % 64, (w, h)
+ ) # resize to integer multiple of 64
+ image = image.resize((width, height))
+ image = np.array(image.convert("RGB"))
+ image = image[None].transpose(0, 3, 1, 2)
+ image = torch.from_numpy(image).to(dtype=torch.float32) / 127.5 - 1.0
+ return image.to(device)
+
+
+def run_txt2img(
+ state,
+ version,
+ version_dict,
+ is_legacy=False,
+ return_latents=False,
+ filter=None,
+ stage2strength=None,
+):
+ if version.startswith("SDXL-base"):
+ W, H = st.selectbox("Resolution:", list(SD_XL_BASE_RATIOS.values()), 10)
+ else:
+ H = st.number_input("H", value=version_dict["H"], min_value=64, max_value=2048)
+ W = st.number_input("W", value=version_dict["W"], min_value=64, max_value=2048)
+ C = version_dict["C"]
+ F = version_dict["f"]
+
+ init_dict = {
+ "orig_width": W,
+ "orig_height": H,
+ "target_width": W,
+ "target_height": H,
+ }
+ value_dict = init_embedder_options(
+ get_unique_embedder_keys_from_conditioner(state["model"].conditioner),
+ init_dict,
+ prompt=prompt,
+ negative_prompt=negative_prompt,
+ )
+ sampler, num_rows, num_cols = init_sampling(stage2strength=stage2strength)
+ num_samples = num_rows * num_cols
+
+ if st.button("Sample"):
+ st.write(f"**Model I:** {version}")
+ out = do_sample(
+ state["model"],
+ sampler,
+ value_dict,
+ num_samples,
+ H,
+ W,
+ C,
+ F,
+ force_uc_zero_embeddings=["txt"] if not is_legacy else [],
+ return_latents=return_latents,
+ filter=filter,
+ )
+ return out
+
+
+def run_img2img(
+ state,
+ version_dict,
+ is_legacy=False,
+ return_latents=False,
+ filter=None,
+ stage2strength=None,
+):
+ img = load_img()
+ if img is None:
+ return None
+ H, W = img.shape[2], img.shape[3]
+
+ init_dict = {
+ "orig_width": W,
+ "orig_height": H,
+ "target_width": W,
+ "target_height": H,
+ }
+ value_dict = init_embedder_options(
+ get_unique_embedder_keys_from_conditioner(state["model"].conditioner),
+ init_dict,
+ prompt=prompt,
+ negative_prompt=negative_prompt,
+ )
+ strength = st.number_input(
+ "**Img2Img Strength**", value=0.75, min_value=0.0, max_value=1.0
+ )
+ sampler, num_rows, num_cols = init_sampling(
+ img2img_strength=strength,
+ stage2strength=stage2strength,
+ )
+ num_samples = num_rows * num_cols
+
+ if st.button("Sample"):
+ out = do_img2img(
+ repeat(img, "1 ... -> n ...", n=num_samples),
+ state["model"],
+ sampler,
+ value_dict,
+ num_samples,
+ force_uc_zero_embeddings=["txt"] if not is_legacy else [],
+ return_latents=return_latents,
+ filter=filter,
+ )
+ return out
+
+
+def apply_refiner(
+ input,
+ state,
+ sampler,
+ num_samples,
+ prompt,
+ negative_prompt,
+ filter=None,
+ finish_denoising=False,
+):
+ init_dict = {
+ "orig_width": input.shape[3] * 8,
+ "orig_height": input.shape[2] * 8,
+ "target_width": input.shape[3] * 8,
+ "target_height": input.shape[2] * 8,
+ }
+
+ value_dict = init_dict
+ value_dict["prompt"] = prompt
+ value_dict["negative_prompt"] = negative_prompt
+
+ value_dict["crop_coords_top"] = 0
+ value_dict["crop_coords_left"] = 0
+
+ value_dict["aesthetic_score"] = 6.0
+ value_dict["negative_aesthetic_score"] = 2.5
+
+ st.warning(f"refiner input shape: {input.shape}")
+ samples = do_img2img(
+ input,
+ state["model"],
+ sampler,
+ value_dict,
+ num_samples,
+ skip_encode=True,
+ filter=filter,
+ add_noise=not finish_denoising,
+ )
+
+ return samples
+
+
+if __name__ == "__main__":
+ st.title("Stable Diffusion")
+ version = st.selectbox("Model Version", list(VERSION2SPECS.keys()), 0)
+ version_dict = VERSION2SPECS[version]
+ if st.checkbox("Load Model"):
+ mode = st.radio("Mode", ("txt2img", "img2img"), 0)
+ else:
+ mode = "skip"
+ st.write("__________________________")
+
+ set_lowvram_mode(st.checkbox("Low vram mode", True))
+
+ if version.startswith("SDXL-base"):
+ add_pipeline = st.checkbox("Load SDXL-refiner?", False)
+ st.write("__________________________")
+ else:
+ add_pipeline = False
+
+ seed = st.sidebar.number_input("seed", value=42, min_value=0, max_value=int(1e9))
+ seed_everything(seed)
+
+ save_locally, save_path = init_save_locally(os.path.join(SAVE_PATH, version))
+
+ if mode != "skip":
+ state = init_st(version_dict, load_filter=True)
+ if state["msg"]:
+ st.info(state["msg"])
+ model = state["model"]
+
+ is_legacy = version_dict["is_legacy"]
+
+ prompt = st.text_input(
+ "prompt",
+ "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k",
+ )
+ if is_legacy:
+ negative_prompt = st.text_input("negative prompt", "")
+ else:
+ negative_prompt = "" # which is unused
+
+ stage2strength = None
+ finish_denoising = False
+
+ if add_pipeline:
+ st.write("__________________________")
+ version2 = st.selectbox("Refiner:", ["SDXL-refiner-1.0", "SDXL-refiner-0.9"])
+ st.warning(
+ f"Running with {version2} as the second stage model. Make sure to provide (V)RAM :) "
+ )
+ st.write("**Refiner Options:**")
+
+ version_dict2 = VERSION2SPECS[version2]
+ state2 = init_st(version_dict2, load_filter=False)
+ st.info(state2["msg"])
+
+ stage2strength = st.number_input(
+ "**Refinement strength**", value=0.15, min_value=0.0, max_value=1.0
+ )
+
+ sampler2, *_ = init_sampling(
+ key=2,
+ img2img_strength=stage2strength,
+ specify_num_samples=False,
+ )
+ st.write("__________________________")
+ finish_denoising = st.checkbox("Finish denoising with refiner.", True)
+ if not finish_denoising:
+ stage2strength = None
+
+ if mode == "txt2img":
+ out = run_txt2img(
+ state,
+ version,
+ version_dict,
+ is_legacy=is_legacy,
+ return_latents=add_pipeline,
+ filter=state.get("filter"),
+ stage2strength=stage2strength,
+ )
+ elif mode == "img2img":
+ out = run_img2img(
+ state,
+ version_dict,
+ is_legacy=is_legacy,
+ return_latents=add_pipeline,
+ filter=state.get("filter"),
+ stage2strength=stage2strength,
+ )
+ elif mode == "skip":
+ out = None
+ else:
+ raise ValueError(f"unknown mode {mode}")
+ if isinstance(out, (tuple, list)):
+ samples, samples_z = out
+ else:
+ samples = out
+ samples_z = None
+
+ if add_pipeline and samples_z is not None:
+ st.write("**Running Refinement Stage**")
+ samples = apply_refiner(
+ samples_z,
+ state2,
+ sampler2,
+ samples_z.shape[0],
+ prompt=prompt,
+ negative_prompt=negative_prompt if is_legacy else "",
+ filter=state.get("filter"),
+ finish_denoising=finish_denoising,
+ )
+
+ if save_locally and samples is not None:
+ perform_save_locally(save_path, samples)
diff --git a/sgm/sampling_utils/demo/streamlit_helpers.py b/sgm/sampling_utils/demo/streamlit_helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c5760e26fcffefc95ddc5435870b2a804257e51
--- /dev/null
+++ b/sgm/sampling_utils/demo/streamlit_helpers.py
@@ -0,0 +1,887 @@
+import copy
+import math
+import os
+from glob import glob
+from typing import Dict, List, Optional, Tuple, Union
+
+import cv2
+import numpy as np
+import streamlit as st
+import torch
+import torch.nn as nn
+import torchvision.transforms as TT
+from einops import rearrange, repeat
+from imwatermark import WatermarkEncoder
+from omegaconf import ListConfig, OmegaConf
+from PIL import Image
+from safetensors.torch import load_file as load_safetensors
+from torch import autocast
+from torchvision import transforms
+from torchvision.utils import make_grid, save_image
+
+from scripts.demo.discretization import (Img2ImgDiscretizationWrapper,
+ Txt2NoisyDiscretizationWrapper)
+from scripts.util.detection.nsfw_and_watermark_dectection import \
+ DeepFloydDataFiltering
+from sgm.inference.helpers import embed_watermark
+from sgm.modules.diffusionmodules.guiders import (LinearPredictionGuider,
+ VanillaCFG)
+from sgm.modules.diffusionmodules.sampling import (DPMPP2MSampler,
+ DPMPP2SAncestralSampler,
+ EulerAncestralSampler,
+ EulerEDMSampler,
+ HeunEDMSampler,
+ LinearMultistepSampler)
+from sgm.util import append_dims, default, instantiate_from_config
+
+
+@st.cache_resource()
+def init_st(version_dict, load_ckpt=True, load_filter=True):
+ state = dict()
+ if not "model" in state:
+ config = version_dict["config"]
+ ckpt = version_dict["ckpt"]
+
+ config = OmegaConf.load(config)
+ model, msg = load_model_from_config(config, ckpt if load_ckpt else None)
+
+ state["msg"] = msg
+ state["model"] = model
+ state["ckpt"] = ckpt if load_ckpt else None
+ state["config"] = config
+ if load_filter:
+ state["filter"] = DeepFloydDataFiltering(verbose=False)
+ return state
+
+
+def load_model(model):
+ model.cuda()
+
+
+lowvram_mode = False
+
+
+def set_lowvram_mode(mode):
+ global lowvram_mode
+ lowvram_mode = mode
+
+
+def initial_model_load(model):
+ global lowvram_mode
+ if lowvram_mode:
+ model.model.half()
+ else:
+ model.cuda()
+ return model
+
+
+def unload_model(model):
+ global lowvram_mode
+ if lowvram_mode:
+ model.cpu()
+ torch.cuda.empty_cache()
+
+
+def load_model_from_config(config, ckpt=None, verbose=True):
+ model = instantiate_from_config(config.model)
+
+ if ckpt is not None:
+ print(f"Loading model from {ckpt}")
+ if ckpt.endswith("ckpt"):
+ pl_sd = torch.load(ckpt, map_location="cpu")
+ if "global_step" in pl_sd:
+ global_step = pl_sd["global_step"]
+ st.info(f"loaded ckpt from global step {global_step}")
+ print(f"Global Step: {pl_sd['global_step']}")
+ sd = pl_sd["state_dict"]
+ elif ckpt.endswith("safetensors"):
+ sd = load_safetensors(ckpt)
+ else:
+ raise NotImplementedError
+
+ msg = None
+
+ m, u = model.load_state_dict(sd, strict=False)
+
+ if len(m) > 0 and verbose:
+ print("missing keys:")
+ print(m)
+ if len(u) > 0 and verbose:
+ print("unexpected keys:")
+ print(u)
+ else:
+ msg = None
+
+ model = initial_model_load(model)
+ model.eval()
+ return model, msg
+
+
+def get_unique_embedder_keys_from_conditioner(conditioner):
+ return list(set([x.input_key for x in conditioner.embedders]))
+
+
+def init_embedder_options(keys, init_dict, prompt=None, negative_prompt=None):
+ # Hardcoded demo settings; might undergo some changes in the future
+
+ value_dict = {}
+ for key in keys:
+ if key == "txt":
+ if prompt is None:
+ prompt = "A professional photograph of an astronaut riding a pig"
+ if negative_prompt is None:
+ negative_prompt = ""
+
+ prompt = st.text_input("Prompt", prompt)
+ negative_prompt = st.text_input("Negative prompt", negative_prompt)
+
+ value_dict["prompt"] = prompt
+ value_dict["negative_prompt"] = negative_prompt
+
+ if key == "original_size_as_tuple":
+ orig_width = st.number_input(
+ "orig_width",
+ value=init_dict["orig_width"],
+ min_value=16,
+ )
+ orig_height = st.number_input(
+ "orig_height",
+ value=init_dict["orig_height"],
+ min_value=16,
+ )
+
+ value_dict["orig_width"] = orig_width
+ value_dict["orig_height"] = orig_height
+
+ if key == "crop_coords_top_left":
+ crop_coord_top = st.number_input("crop_coords_top", value=0, min_value=0)
+ crop_coord_left = st.number_input("crop_coords_left", value=0, min_value=0)
+
+ value_dict["crop_coords_top"] = crop_coord_top
+ value_dict["crop_coords_left"] = crop_coord_left
+
+ if key == "aesthetic_score":
+ value_dict["aesthetic_score"] = 6.0
+ value_dict["negative_aesthetic_score"] = 2.5
+
+ if key == "target_size_as_tuple":
+ value_dict["target_width"] = init_dict["target_width"]
+ value_dict["target_height"] = init_dict["target_height"]
+
+ if key in ["fps_id", "fps"]:
+ fps = st.number_input("fps", value=6, min_value=1)
+
+ value_dict["fps"] = fps
+ value_dict["fps_id"] = fps - 1
+
+ if key == "motion_bucket_id":
+ mb_id = st.number_input("motion bucket id", 0, 511, value=127)
+ value_dict["motion_bucket_id"] = mb_id
+
+ if key == "pool_image":
+ st.text("Image for pool conditioning")
+ image = load_img(
+ key="pool_image_input",
+ size=224,
+ center_crop=True,
+ )
+ if image is None:
+ st.info("Need an image here")
+ image = torch.zeros(1, 3, 224, 224)
+ value_dict["pool_image"] = image
+
+ return value_dict
+
+
+def perform_save_locally(save_path, samples):
+ os.makedirs(os.path.join(save_path), exist_ok=True)
+ base_count = len(os.listdir(os.path.join(save_path)))
+ samples = embed_watermark(samples)
+ for sample in samples:
+ sample = 255.0 * rearrange(sample.cpu().numpy(), "c h w -> h w c")
+ Image.fromarray(sample.astype(np.uint8)).save(
+ os.path.join(save_path, f"{base_count:09}.png")
+ )
+ base_count += 1
+
+
+def init_save_locally(_dir, init_value: bool = False):
+ save_locally = st.sidebar.checkbox("Save images locally", value=init_value)
+ if save_locally:
+ save_path = st.text_input("Save path", value=os.path.join(_dir, "samples"))
+ else:
+ save_path = None
+
+ return save_locally, save_path
+
+
+def get_guider(options, key):
+ guider = st.sidebar.selectbox(
+ f"Discretization #{key}",
+ [
+ "VanillaCFG",
+ "IdentityGuider",
+ "LinearPredictionGuider",
+ ],
+ options.get("guider", 0),
+ )
+
+ additional_guider_kwargs = options.pop("additional_guider_kwargs", {})
+
+ if guider == "IdentityGuider":
+ guider_config = {
+ "target": "sgm.modules.diffusionmodules.guiders.IdentityGuider"
+ }
+ elif guider == "VanillaCFG":
+ scale = st.number_input(
+ f"cfg-scale #{key}",
+ value=options.get("cfg", 5.0),
+ min_value=0.0,
+ )
+
+ guider_config = {
+ "target": "sgm.modules.diffusionmodules.guiders.VanillaCFG",
+ "params": {
+ "scale": scale,
+ **additional_guider_kwargs,
+ },
+ }
+ elif guider == "LinearPredictionGuider":
+ max_scale = st.number_input(
+ f"max-cfg-scale #{key}",
+ value=options.get("cfg", 1.5),
+ min_value=1.0,
+ )
+ min_scale = st.number_input(
+ f"min guidance scale",
+ value=options.get("min_cfg", 1.0),
+ min_value=1.0,
+ max_value=10.0,
+ )
+
+ guider_config = {
+ "target": "sgm.modules.diffusionmodules.guiders.LinearPredictionGuider",
+ "params": {
+ "max_scale": max_scale,
+ "min_scale": min_scale,
+ "num_frames": options["num_frames"],
+ **additional_guider_kwargs,
+ },
+ }
+ else:
+ raise NotImplementedError
+ return guider_config
+
+
+def init_sampling(
+ key=1,
+ img2img_strength: Optional[float] = None,
+ specify_num_samples: bool = True,
+ stage2strength: Optional[float] = None,
+ options: Optional[Dict[str, int]] = None,
+):
+ options = {} if options is None else options
+
+ num_rows, num_cols = 1, 1
+ if specify_num_samples:
+ num_cols = st.number_input(
+ f"num cols #{key}", value=num_cols, min_value=1, max_value=10
+ )
+
+ steps = st.sidebar.number_input(
+ f"steps #{key}", value=options.get("num_steps", 40), min_value=1, max_value=1000
+ )
+ sampler = st.sidebar.selectbox(
+ f"Sampler #{key}",
+ [
+ "EulerEDMSampler",
+ "HeunEDMSampler",
+ "EulerAncestralSampler",
+ "DPMPP2SAncestralSampler",
+ "DPMPP2MSampler",
+ "LinearMultistepSampler",
+ ],
+ options.get("sampler", 0),
+ )
+ discretization = st.sidebar.selectbox(
+ f"Discretization #{key}",
+ [
+ "LegacyDDPMDiscretization",
+ "EDMDiscretization",
+ ],
+ options.get("discretization", 0),
+ )
+
+ discretization_config = get_discretization(discretization, options=options, key=key)
+
+ guider_config = get_guider(options=options, key=key)
+
+ sampler = get_sampler(sampler, steps, discretization_config, guider_config, key=key)
+ if img2img_strength is not None:
+ st.warning(
+ f"Wrapping {sampler.__class__.__name__} with Img2ImgDiscretizationWrapper"
+ )
+ sampler.discretization = Img2ImgDiscretizationWrapper(
+ sampler.discretization, strength=img2img_strength
+ )
+ if stage2strength is not None:
+ sampler.discretization = Txt2NoisyDiscretizationWrapper(
+ sampler.discretization, strength=stage2strength, original_steps=steps
+ )
+ return sampler, num_rows, num_cols
+
+
+def get_discretization(discretization, options, key=1):
+ if discretization == "LegacyDDPMDiscretization":
+ discretization_config = {
+ "target": "sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization",
+ }
+ elif discretization == "EDMDiscretization":
+ sigma_min = st.number_input(
+ f"sigma_min #{key}", value=options.get("sigma_min", 0.03)
+ ) # 0.0292
+ sigma_max = st.number_input(
+ f"sigma_max #{key}", value=options.get("sigma_max", 14.61)
+ ) # 14.6146
+ rho = st.number_input(f"rho #{key}", value=options.get("rho", 3.0))
+ discretization_config = {
+ "target": "sgm.modules.diffusionmodules.discretizer.EDMDiscretization",
+ "params": {
+ "sigma_min": sigma_min,
+ "sigma_max": sigma_max,
+ "rho": rho,
+ },
+ }
+
+ return discretization_config
+
+
+def get_sampler(sampler_name, steps, discretization_config, guider_config, key=1):
+ if sampler_name == "EulerEDMSampler" or sampler_name == "HeunEDMSampler":
+ s_churn = st.sidebar.number_input(f"s_churn #{key}", value=0.0, min_value=0.0)
+ s_tmin = st.sidebar.number_input(f"s_tmin #{key}", value=0.0, min_value=0.0)
+ s_tmax = st.sidebar.number_input(f"s_tmax #{key}", value=999.0, min_value=0.0)
+ s_noise = st.sidebar.number_input(f"s_noise #{key}", value=1.0, min_value=0.0)
+
+ if sampler_name == "EulerEDMSampler":
+ sampler = EulerEDMSampler(
+ num_steps=steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ s_churn=s_churn,
+ s_tmin=s_tmin,
+ s_tmax=s_tmax,
+ s_noise=s_noise,
+ verbose=True,
+ )
+ elif sampler_name == "HeunEDMSampler":
+ sampler = HeunEDMSampler(
+ num_steps=steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ s_churn=s_churn,
+ s_tmin=s_tmin,
+ s_tmax=s_tmax,
+ s_noise=s_noise,
+ verbose=True,
+ )
+ elif (
+ sampler_name == "EulerAncestralSampler"
+ or sampler_name == "DPMPP2SAncestralSampler"
+ ):
+ s_noise = st.sidebar.number_input("s_noise", value=1.0, min_value=0.0)
+ eta = st.sidebar.number_input("eta", value=1.0, min_value=0.0)
+
+ if sampler_name == "EulerAncestralSampler":
+ sampler = EulerAncestralSampler(
+ num_steps=steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ eta=eta,
+ s_noise=s_noise,
+ verbose=True,
+ )
+ elif sampler_name == "DPMPP2SAncestralSampler":
+ sampler = DPMPP2SAncestralSampler(
+ num_steps=steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ eta=eta,
+ s_noise=s_noise,
+ verbose=True,
+ )
+ elif sampler_name == "DPMPP2MSampler":
+ sampler = DPMPP2MSampler(
+ num_steps=steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ verbose=True,
+ )
+ elif sampler_name == "LinearMultistepSampler":
+ order = st.sidebar.number_input("order", value=4, min_value=1)
+ sampler = LinearMultistepSampler(
+ num_steps=steps,
+ discretization_config=discretization_config,
+ guider_config=guider_config,
+ order=order,
+ verbose=True,
+ )
+ else:
+ raise ValueError(f"unknown sampler {sampler_name}!")
+
+ return sampler
+
+
+def get_interactive_image() -> Image.Image:
+ image = st.file_uploader("Input", type=["jpg", "JPEG", "png"])
+ if image is not None:
+ image = Image.open(image)
+ if not image.mode == "RGB":
+ image = image.convert("RGB")
+ return image
+
+
+def load_img(
+ display: bool = True,
+ size: Union[None, int, Tuple[int, int]] = None,
+ center_crop: bool = False,
+):
+ image = get_interactive_image()
+ if image is None:
+ return None
+ if display:
+ st.image(image)
+ w, h = image.size
+ print(f"loaded input image of size ({w}, {h})")
+
+ transform = []
+ if size is not None:
+ transform.append(transforms.Resize(size))
+ if center_crop:
+ transform.append(transforms.CenterCrop(size))
+ transform.append(transforms.ToTensor())
+ transform.append(transforms.Lambda(lambda x: 2.0 * x - 1.0))
+
+ transform = transforms.Compose(transform)
+ img = transform(image)[None, ...]
+ st.text(f"input min/max/mean: {img.min():.3f}/{img.max():.3f}/{img.mean():.3f}")
+ return img
+
+
+def get_init_img(batch_size=1, key=None):
+ init_image = load_img(key=key).cuda()
+ init_image = repeat(init_image, "1 ... -> b ...", b=batch_size)
+ return init_image
+
+
+def do_sample(
+ model,
+ sampler,
+ value_dict,
+ num_samples,
+ H,
+ W,
+ C,
+ F,
+ force_uc_zero_embeddings: Optional[List] = None,
+ force_cond_zero_embeddings: Optional[List] = None,
+ batch2model_input: List = None,
+ return_latents=False,
+ filter=None,
+ T=None,
+ additional_batch_uc_fields=None,
+ decoding_t=None,
+):
+ force_uc_zero_embeddings = default(force_uc_zero_embeddings, [])
+ batch2model_input = default(batch2model_input, [])
+ additional_batch_uc_fields = default(additional_batch_uc_fields, [])
+
+ st.text("Sampling")
+
+ outputs = st.empty()
+ precision_scope = autocast
+ with torch.no_grad():
+ with precision_scope("cuda"):
+ with model.ema_scope():
+ if T is not None:
+ num_samples = [num_samples, T]
+ else:
+ num_samples = [num_samples]
+
+ load_model(model.conditioner)
+ batch, batch_uc = get_batch(
+ get_unique_embedder_keys_from_conditioner(model.conditioner),
+ value_dict,
+ num_samples,
+ T=T,
+ additional_batch_uc_fields=additional_batch_uc_fields,
+ )
+
+ c, uc = model.conditioner.get_unconditional_conditioning(
+ batch,
+ batch_uc=batch_uc,
+ force_uc_zero_embeddings=force_uc_zero_embeddings,
+ force_cond_zero_embeddings=force_cond_zero_embeddings,
+ )
+ unload_model(model.conditioner)
+
+ for k in c:
+ if not k == "crossattn":
+ c[k], uc[k] = map(
+ lambda y: y[k][: math.prod(num_samples)].to("cuda"), (c, uc)
+ )
+ if k in ["crossattn", "concat"] and T is not None:
+ uc[k] = repeat(uc[k], "b ... -> b t ...", t=T)
+ uc[k] = rearrange(uc[k], "b t ... -> (b t) ...", t=T)
+ c[k] = repeat(c[k], "b ... -> b t ...", t=T)
+ c[k] = rearrange(c[k], "b t ... -> (b t) ...", t=T)
+
+ additional_model_inputs = {}
+ for k in batch2model_input:
+ if k == "image_only_indicator":
+ assert T is not None
+
+ if isinstance(
+ sampler.guider, (VanillaCFG, LinearPredictionGuider)
+ ):
+ additional_model_inputs[k] = torch.zeros(
+ num_samples[0] * 2, num_samples[1]
+ ).to("cuda")
+ else:
+ additional_model_inputs[k] = torch.zeros(num_samples).to(
+ "cuda"
+ )
+ else:
+ additional_model_inputs[k] = batch[k]
+
+ shape = (math.prod(num_samples), C, H // F, W // F)
+ randn = torch.randn(shape).to("cuda")
+
+ def denoiser(input, sigma, c):
+ return model.denoiser(
+ model.model, input, sigma, c, **additional_model_inputs
+ )
+
+ load_model(model.denoiser)
+ load_model(model.model)
+ samples_z = sampler(denoiser, randn, cond=c, uc=uc)
+ unload_model(model.model)
+ unload_model(model.denoiser)
+
+ load_model(model.first_stage_model)
+ model.en_and_decode_n_samples_a_time = (
+ decoding_t # Decode n frames at a time
+ )
+ samples_x = model.decode_first_stage(samples_z)
+ samples = torch.clamp((samples_x + 1.0) / 2.0, min=0.0, max=1.0)
+ unload_model(model.first_stage_model)
+
+ if filter is not None:
+ samples = filter(samples)
+
+ if T is None:
+ grid = torch.stack([samples])
+ grid = rearrange(grid, "n b c h w -> (n h) (b w) c")
+ outputs.image(grid.cpu().numpy())
+ else:
+ as_vids = rearrange(samples, "(b t) c h w -> b t c h w", t=T)
+ for i, vid in enumerate(as_vids):
+ grid = rearrange(make_grid(vid, nrow=4), "c h w -> h w c")
+ st.image(
+ grid.cpu().numpy(),
+ f"Sample #{i} as image",
+ )
+
+ if return_latents:
+ return samples, samples_z
+ return samples
+
+
+def get_batch(
+ keys,
+ value_dict: dict,
+ N: Union[List, ListConfig],
+ device: str = "cuda",
+ T: int = None,
+ additional_batch_uc_fields: List[str] = [],
+):
+ # Hardcoded demo setups; might undergo some changes in the future
+
+ batch = {}
+ batch_uc = {}
+
+ for key in keys:
+ if key == "txt":
+ batch["txt"] = [value_dict["prompt"]] * math.prod(N)
+
+ batch_uc["txt"] = [value_dict["negative_prompt"]] * math.prod(N)
+
+ elif key == "original_size_as_tuple":
+ batch["original_size_as_tuple"] = (
+ torch.tensor([value_dict["orig_height"], value_dict["orig_width"]])
+ .to(device)
+ .repeat(math.prod(N), 1)
+ )
+ elif key == "crop_coords_top_left":
+ batch["crop_coords_top_left"] = (
+ torch.tensor(
+ [value_dict["crop_coords_top"], value_dict["crop_coords_left"]]
+ )
+ .to(device)
+ .repeat(math.prod(N), 1)
+ )
+ elif key == "aesthetic_score":
+ batch["aesthetic_score"] = (
+ torch.tensor([value_dict["aesthetic_score"]])
+ .to(device)
+ .repeat(math.prod(N), 1)
+ )
+ batch_uc["aesthetic_score"] = (
+ torch.tensor([value_dict["negative_aesthetic_score"]])
+ .to(device)
+ .repeat(math.prod(N), 1)
+ )
+
+ elif key == "target_size_as_tuple":
+ batch["target_size_as_tuple"] = (
+ torch.tensor([value_dict["target_height"], value_dict["target_width"]])
+ .to(device)
+ .repeat(math.prod(N), 1)
+ )
+ elif key == "fps":
+ batch[key] = (
+ torch.tensor([value_dict["fps"]]).to(device).repeat(math.prod(N))
+ )
+ elif key == "fps_id":
+ batch[key] = (
+ torch.tensor([value_dict["fps_id"]]).to(device).repeat(math.prod(N))
+ )
+ elif key == "motion_bucket_id":
+ batch[key] = (
+ torch.tensor([value_dict["motion_bucket_id"]])
+ .to(device)
+ .repeat(math.prod(N))
+ )
+ elif key == "pool_image":
+ batch[key] = repeat(value_dict[key], "1 ... -> b ...", b=math.prod(N)).to(
+ device, dtype=torch.half
+ )
+ elif key == "cond_aug":
+ batch[key] = repeat(
+ torch.tensor([value_dict["cond_aug"]]).to("cuda"),
+ "1 -> b",
+ b=math.prod(N),
+ )
+ elif key == "cond_frames":
+ batch[key] = repeat(value_dict["cond_frames"], "1 ... -> b ...", b=N[0])
+ elif key == "cond_frames_without_noise":
+ batch[key] = repeat(
+ value_dict["cond_frames_without_noise"], "1 ... -> b ...", b=N[0]
+ )
+ else:
+ batch[key] = value_dict[key]
+
+ if T is not None:
+ batch["num_video_frames"] = T
+
+ for key in batch.keys():
+ if key not in batch_uc and isinstance(batch[key], torch.Tensor):
+ batch_uc[key] = torch.clone(batch[key])
+ elif key in additional_batch_uc_fields and key not in batch_uc:
+ batch_uc[key] = copy.copy(batch[key])
+ return batch, batch_uc
+
+
+@torch.no_grad()
+def do_img2img(
+ img,
+ model,
+ sampler,
+ value_dict,
+ num_samples,
+ force_uc_zero_embeddings: Optional[List] = None,
+ force_cond_zero_embeddings: Optional[List] = None,
+ additional_kwargs={},
+ offset_noise_level: int = 0.0,
+ return_latents=False,
+ skip_encode=False,
+ filter=None,
+ add_noise=True,
+):
+ st.text("Sampling")
+
+ outputs = st.empty()
+ precision_scope = autocast
+ with torch.no_grad():
+ with precision_scope("cuda"):
+ with model.ema_scope():
+ load_model(model.conditioner)
+ batch, batch_uc = get_batch(
+ get_unique_embedder_keys_from_conditioner(model.conditioner),
+ value_dict,
+ [num_samples],
+ )
+ c, uc = model.conditioner.get_unconditional_conditioning(
+ batch,
+ batch_uc=batch_uc,
+ force_uc_zero_embeddings=force_uc_zero_embeddings,
+ force_cond_zero_embeddings=force_cond_zero_embeddings,
+ )
+ unload_model(model.conditioner)
+ for k in c:
+ c[k], uc[k] = map(lambda y: y[k][:num_samples].to("cuda"), (c, uc))
+
+ for k in additional_kwargs:
+ c[k] = uc[k] = additional_kwargs[k]
+ if skip_encode:
+ z = img
+ else:
+ load_model(model.first_stage_model)
+ z = model.encode_first_stage(img)
+ unload_model(model.first_stage_model)
+
+ noise = torch.randn_like(z)
+
+ sigmas = sampler.discretization(sampler.num_steps).cuda()
+ sigma = sigmas[0]
+
+ st.info(f"all sigmas: {sigmas}")
+ st.info(f"noising sigma: {sigma}")
+ if offset_noise_level > 0.0:
+ noise = noise + offset_noise_level * append_dims(
+ torch.randn(z.shape[0], device=z.device), z.ndim
+ )
+ if add_noise:
+ noised_z = z + noise * append_dims(sigma, z.ndim).cuda()
+ noised_z = noised_z / torch.sqrt(
+ 1.0 + sigmas[0] ** 2.0
+ ) # Note: hardcoded to DDPM-like scaling. need to generalize later.
+ else:
+ noised_z = z / torch.sqrt(1.0 + sigmas[0] ** 2.0)
+
+ def denoiser(x, sigma, c):
+ return model.denoiser(model.model, x, sigma, c)
+
+ load_model(model.denoiser)
+ load_model(model.model)
+ samples_z = sampler(denoiser, noised_z, cond=c, uc=uc)
+ unload_model(model.model)
+ unload_model(model.denoiser)
+
+ load_model(model.first_stage_model)
+ samples_x = model.decode_first_stage(samples_z)
+ unload_model(model.first_stage_model)
+ samples = torch.clamp((samples_x + 1.0) / 2.0, min=0.0, max=1.0)
+
+ if filter is not None:
+ samples = filter(samples)
+
+ grid = rearrange(grid, "n b c h w -> (n h) (b w) c")
+ outputs.image(grid.cpu().numpy())
+ if return_latents:
+ return samples, samples_z
+ return samples
+
+
+def get_resizing_factor(
+ desired_shape: Tuple[int, int], current_shape: Tuple[int, int]
+) -> float:
+ r_bound = desired_shape[1] / desired_shape[0]
+ aspect_r = current_shape[1] / current_shape[0]
+ if r_bound >= 1.0:
+ if aspect_r >= r_bound:
+ factor = min(desired_shape) / min(current_shape)
+ else:
+ if aspect_r < 1.0:
+ factor = max(desired_shape) / min(current_shape)
+ else:
+ factor = max(desired_shape) / max(current_shape)
+ else:
+ if aspect_r <= r_bound:
+ factor = min(desired_shape) / min(current_shape)
+ else:
+ if aspect_r > 1:
+ factor = max(desired_shape) / min(current_shape)
+ else:
+ factor = max(desired_shape) / max(current_shape)
+
+ return factor
+
+
+def get_interactive_image(key=None) -> Image.Image:
+ image = st.file_uploader("Input", type=["jpg", "JPEG", "png"], key=key)
+ if image is not None:
+ image = Image.open(image)
+ if not image.mode == "RGB":
+ image = image.convert("RGB")
+ return image
+
+
+def load_img_for_prediction(
+ W: int, H: int, display=True, key=None, device="cuda"
+) -> torch.Tensor:
+ image = get_interactive_image(key=key)
+ if image is None:
+ return None
+ if display:
+ st.image(image)
+ w, h = image.size
+
+ image = np.array(image).transpose(2, 0, 1)
+ image = torch.from_numpy(image).to(dtype=torch.float32) / 255.0
+ image = image.unsqueeze(0)
+
+ rfs = get_resizing_factor((H, W), (h, w))
+ resize_size = [int(np.ceil(rfs * s)) for s in (h, w)]
+ top = (resize_size[0] - H) // 2
+ left = (resize_size[1] - W) // 2
+
+ image = torch.nn.functional.interpolate(
+ image, resize_size, mode="area", antialias=False
+ )
+ image = TT.functional.crop(image, top=top, left=left, height=H, width=W)
+
+ if display:
+ numpy_img = np.transpose(image[0].numpy(), (1, 2, 0))
+ pil_image = Image.fromarray((numpy_img * 255).astype(np.uint8))
+ st.image(pil_image)
+ return image.to(device) * 2.0 - 1.0
+
+
+def save_video_as_grid_and_mp4(
+ video_batch: torch.Tensor, save_path: str, T: int, fps: int = 5
+):
+ os.makedirs(save_path, exist_ok=True)
+ base_count = len(glob(os.path.join(save_path, "*.mp4")))
+
+ video_batch = rearrange(video_batch, "(b t) c h w -> b t c h w", t=T)
+ video_batch = embed_watermark(video_batch)
+ for vid in video_batch:
+ save_image(vid, fp=os.path.join(save_path, f"{base_count:06d}.png"), nrow=4)
+
+ video_path = os.path.join(save_path, f"{base_count:06d}.mp4")
+
+ writer = cv2.VideoWriter(
+ video_path,
+ cv2.VideoWriter_fourcc(*"MP4V"),
+ fps,
+ (vid.shape[-1], vid.shape[-2]),
+ )
+
+ vid = (
+ (rearrange(vid, "t c h w -> t h w c") * 255).cpu().numpy().astype(np.uint8)
+ )
+ for frame in vid:
+ frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
+ writer.write(frame)
+
+ writer.release()
+
+ video_path_h264 = video_path[:-4] + "_h264.mp4"
+ os.system(f"ffmpeg -i {video_path} -c:v libx264 {video_path_h264}")
+
+ with open(video_path_h264, "rb") as f:
+ video_bytes = f.read()
+ st.video(video_bytes)
+
+ base_count += 1
diff --git a/sgm/sampling_utils/demo/turbo.py b/sgm/sampling_utils/demo/turbo.py
new file mode 100644
index 0000000000000000000000000000000000000000..91a973aec70eae280ed21044e29141b678e6f693
--- /dev/null
+++ b/sgm/sampling_utils/demo/turbo.py
@@ -0,0 +1,234 @@
+from st_keyup import st_keyup
+from streamlit_helpers import *
+
+from sgm.modules.diffusionmodules.sampling import EulerAncestralSampler
+
+VERSION2SPECS = {
+ "SDXL-Turbo": {
+ "H": 512,
+ "W": 512,
+ "C": 4,
+ "f": 8,
+ "is_legacy": False,
+ "config": "configs/inference/sd_xl_base.yaml",
+ "ckpt": "checkpoints/sd_xl_turbo_1.0.safetensors",
+ },
+ "SD-Turbo": {
+ "H": 512,
+ "W": 512,
+ "C": 4,
+ "f": 8,
+ "is_legacy": False,
+ "config": "configs/inference/sd_2_1.yaml",
+ "ckpt": "checkpoints/sd_turbo.safetensors",
+ },
+}
+
+
+class SubstepSampler(EulerAncestralSampler):
+ def __init__(self, n_sample_steps=1, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.n_sample_steps = n_sample_steps
+ self.steps_subset = [0, 100, 200, 300, 1000]
+
+ def prepare_sampling_loop(self, x, cond, uc=None, num_steps=None):
+ sigmas = self.discretization(
+ self.num_steps if num_steps is None else num_steps, device=self.device
+ )
+ sigmas = sigmas[
+ self.steps_subset[: self.n_sample_steps] + self.steps_subset[-1:]
+ ]
+ uc = cond
+ x *= torch.sqrt(1.0 + sigmas[0] ** 2.0)
+ num_sigmas = len(sigmas)
+ s_in = x.new_ones([x.shape[0]])
+ return x, s_in, sigmas, num_sigmas, cond, uc
+
+
+def seeded_randn(shape, seed):
+ randn = np.random.RandomState(seed).randn(*shape)
+ randn = torch.from_numpy(randn).to(device="cuda", dtype=torch.float32)
+ return randn
+
+
+class SeededNoise:
+ def __init__(self, seed):
+ self.seed = seed
+
+ def __call__(self, x):
+ self.seed = self.seed + 1
+ return seeded_randn(x.shape, self.seed)
+
+
+def init_embedder_options(keys, init_dict, prompt=None, negative_prompt=None):
+ value_dict = {}
+ for key in keys:
+ if key == "txt":
+ value_dict["prompt"] = prompt
+ value_dict["negative_prompt"] = ""
+
+ if key == "original_size_as_tuple":
+ orig_width = init_dict["orig_width"]
+ orig_height = init_dict["orig_height"]
+
+ value_dict["orig_width"] = orig_width
+ value_dict["orig_height"] = orig_height
+
+ if key == "crop_coords_top_left":
+ crop_coord_top = 0
+ crop_coord_left = 0
+
+ value_dict["crop_coords_top"] = crop_coord_top
+ value_dict["crop_coords_left"] = crop_coord_left
+
+ if key == "aesthetic_score":
+ value_dict["aesthetic_score"] = 6.0
+ value_dict["negative_aesthetic_score"] = 2.5
+
+ if key == "target_size_as_tuple":
+ value_dict["target_width"] = init_dict["target_width"]
+ value_dict["target_height"] = init_dict["target_height"]
+
+ return value_dict
+
+
+def sample(
+ model,
+ sampler,
+ prompt="A lush garden with oversized flowers and vibrant colors, inhabited by miniature animals.",
+ H=1024,
+ W=1024,
+ seed=0,
+ filter=None,
+):
+ F = 8
+ C = 4
+ shape = (1, C, H // F, W // F)
+
+ value_dict = init_embedder_options(
+ keys=get_unique_embedder_keys_from_conditioner(model.conditioner),
+ init_dict={
+ "orig_width": W,
+ "orig_height": H,
+ "target_width": W,
+ "target_height": H,
+ },
+ prompt=prompt,
+ )
+
+ if seed is None:
+ seed = torch.seed()
+ precision_scope = autocast
+ with torch.no_grad():
+ with precision_scope("cuda"):
+ batch, batch_uc = get_batch(
+ get_unique_embedder_keys_from_conditioner(model.conditioner),
+ value_dict,
+ [1],
+ )
+ c = model.conditioner(batch)
+ uc = None
+ randn = seeded_randn(shape, seed)
+
+ def denoiser(input, sigma, c):
+ return model.denoiser(
+ model.model,
+ input,
+ sigma,
+ c,
+ )
+
+ samples_z = sampler(denoiser, randn, cond=c, uc=uc)
+ samples_x = model.decode_first_stage(samples_z)
+ samples = torch.clamp((samples_x + 1.0) / 2.0, min=0.0, max=1.0)
+ if filter is not None:
+ samples = filter(samples)
+ samples = (
+ (255 * samples)
+ .to(dtype=torch.uint8)
+ .permute(0, 2, 3, 1)
+ .detach()
+ .cpu()
+ .numpy()
+ )
+ return samples
+
+
+def v_spacer(height) -> None:
+ for _ in range(height):
+ st.write("\n")
+
+
+if __name__ == "__main__":
+ st.title("Turbo")
+
+ head_cols = st.columns([1, 1, 1])
+ with head_cols[0]:
+ version = st.selectbox("Model Version", list(VERSION2SPECS.keys()), 0)
+ version_dict = VERSION2SPECS[version]
+
+ with head_cols[1]:
+ v_spacer(2)
+ if st.checkbox("Load Model"):
+ mode = "txt2img"
+ else:
+ mode = "skip"
+
+ if mode != "skip":
+ state = init_st(version_dict, load_filter=True)
+ if state["msg"]:
+ st.info(state["msg"])
+ model = state["model"]
+ load_model(model)
+
+ # seed
+ if "seed" not in st.session_state:
+ st.session_state.seed = 0
+
+ def increment_counter():
+ st.session_state.seed += 1
+
+ def decrement_counter():
+ if st.session_state.seed > 0:
+ st.session_state.seed -= 1
+
+ with head_cols[2]:
+ n_steps = st.number_input(label="number of steps", min_value=1, max_value=4)
+
+ sampler = SubstepSampler(
+ n_sample_steps=1,
+ num_steps=1000,
+ eta=1.0,
+ discretization_config=dict(
+ target="sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization"
+ ),
+ )
+ sampler.n_sample_steps = n_steps
+ default_prompt = (
+ "A cinematic shot of a baby racoon wearing an intricate italian priest robe."
+ )
+ prompt = st_keyup(
+ "Enter a value", value=default_prompt, debounce=300, key="interactive_text"
+ )
+
+ cols = st.columns([1, 5, 1])
+ if mode != "skip":
+ with cols[0]:
+ v_spacer(14)
+ st.button("↩", on_click=decrement_counter)
+ with cols[2]:
+ v_spacer(14)
+ st.button("↪", on_click=increment_counter)
+
+ sampler.noise_sampler = SeededNoise(seed=st.session_state.seed)
+ out = sample(
+ model,
+ sampler,
+ H=512,
+ W=512,
+ seed=st.session_state.seed,
+ prompt=prompt,
+ filter=state.get("filter"),
+ )
+ with cols[1]:
+ st.image(out[0])
diff --git a/sgm/sampling_utils/demo/video_sampling.py b/sgm/sampling_utils/demo/video_sampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..95789020110f3abaf6e5a7755a7e5910b864c6df
--- /dev/null
+++ b/sgm/sampling_utils/demo/video_sampling.py
@@ -0,0 +1,200 @@
+import os
+
+from pytorch_lightning import seed_everything
+
+from scripts.demo.streamlit_helpers import *
+
+SAVE_PATH = "outputs/demo/vid/"
+
+VERSION2SPECS = {
+ "svd": {
+ "T": 14,
+ "H": 576,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "config": "configs/inference/svd.yaml",
+ "ckpt": "checkpoints/svd.safetensors",
+ "options": {
+ "discretization": 1,
+ "cfg": 2.5,
+ "sigma_min": 0.002,
+ "sigma_max": 700.0,
+ "rho": 7.0,
+ "guider": 2,
+ "force_uc_zero_embeddings": ["cond_frames", "cond_frames_without_noise"],
+ "num_steps": 25,
+ },
+ },
+ "svd_image_decoder": {
+ "T": 14,
+ "H": 576,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "config": "configs/inference/svd_image_decoder.yaml",
+ "ckpt": "checkpoints/svd_image_decoder.safetensors",
+ "options": {
+ "discretization": 1,
+ "cfg": 2.5,
+ "sigma_min": 0.002,
+ "sigma_max": 700.0,
+ "rho": 7.0,
+ "guider": 2,
+ "force_uc_zero_embeddings": ["cond_frames", "cond_frames_without_noise"],
+ "num_steps": 25,
+ },
+ },
+ "svd_xt": {
+ "T": 25,
+ "H": 576,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "config": "configs/inference/svd.yaml",
+ "ckpt": "checkpoints/svd_xt.safetensors",
+ "options": {
+ "discretization": 1,
+ "cfg": 3.0,
+ "min_cfg": 1.5,
+ "sigma_min": 0.002,
+ "sigma_max": 700.0,
+ "rho": 7.0,
+ "guider": 2,
+ "force_uc_zero_embeddings": ["cond_frames", "cond_frames_without_noise"],
+ "num_steps": 30,
+ "decoding_t": 14,
+ },
+ },
+ "svd_xt_image_decoder": {
+ "T": 25,
+ "H": 576,
+ "W": 1024,
+ "C": 4,
+ "f": 8,
+ "config": "configs/inference/svd_image_decoder.yaml",
+ "ckpt": "checkpoints/svd_xt_image_decoder.safetensors",
+ "options": {
+ "discretization": 1,
+ "cfg": 3.0,
+ "min_cfg": 1.5,
+ "sigma_min": 0.002,
+ "sigma_max": 700.0,
+ "rho": 7.0,
+ "guider": 2,
+ "force_uc_zero_embeddings": ["cond_frames", "cond_frames_without_noise"],
+ "num_steps": 30,
+ "decoding_t": 14,
+ },
+ },
+}
+
+
+if __name__ == "__main__":
+ st.title("Stable Video Diffusion")
+ version = st.selectbox(
+ "Model Version",
+ [k for k in VERSION2SPECS.keys()],
+ 0,
+ )
+ version_dict = VERSION2SPECS[version]
+ if st.checkbox("Load Model"):
+ mode = "img2vid"
+ else:
+ mode = "skip"
+
+ H = st.sidebar.number_input(
+ "H", value=version_dict["H"], min_value=64, max_value=2048
+ )
+ W = st.sidebar.number_input(
+ "W", value=version_dict["W"], min_value=64, max_value=2048
+ )
+ T = st.sidebar.number_input(
+ "T", value=version_dict["T"], min_value=0, max_value=128
+ )
+ C = version_dict["C"]
+ F = version_dict["f"]
+ options = version_dict["options"]
+
+ if mode != "skip":
+ state = init_st(version_dict, load_filter=True)
+ if state["msg"]:
+ st.info(state["msg"])
+ model = state["model"]
+
+ ukeys = set(
+ get_unique_embedder_keys_from_conditioner(state["model"].conditioner)
+ )
+
+ value_dict = init_embedder_options(
+ ukeys,
+ {},
+ )
+
+ value_dict["image_only_indicator"] = 0
+
+ if mode == "img2vid":
+ img = load_img_for_prediction(W, H)
+ cond_aug = st.number_input(
+ "Conditioning augmentation:", value=0.02, min_value=0.0
+ )
+ value_dict["cond_frames_without_noise"] = img
+ value_dict["cond_frames"] = img + cond_aug * torch.randn_like(img)
+ value_dict["cond_aug"] = cond_aug
+
+ seed = st.sidebar.number_input(
+ "seed", value=23, min_value=0, max_value=int(1e9)
+ )
+ seed_everything(seed)
+
+ save_locally, save_path = init_save_locally(
+ os.path.join(SAVE_PATH, version), init_value=True
+ )
+
+ options["num_frames"] = T
+
+ sampler, num_rows, num_cols = init_sampling(options=options)
+ num_samples = num_rows * num_cols
+
+ decoding_t = st.number_input(
+ "Decode t frames at a time (set small if you are low on VRAM)",
+ value=options.get("decoding_t", T),
+ min_value=1,
+ max_value=int(1e9),
+ )
+
+ if st.checkbox("Overwrite fps in mp4 generator", False):
+ saving_fps = st.number_input(
+ f"saving video at fps:", value=value_dict["fps"], min_value=1
+ )
+ else:
+ saving_fps = value_dict["fps"]
+
+ if st.button("Sample"):
+ out = do_sample(
+ model,
+ sampler,
+ value_dict,
+ num_samples,
+ H,
+ W,
+ C,
+ F,
+ T=T,
+ batch2model_input=["num_video_frames", "image_only_indicator"],
+ force_uc_zero_embeddings=options.get("force_uc_zero_embeddings", None),
+ force_cond_zero_embeddings=options.get(
+ "force_cond_zero_embeddings", None
+ ),
+ return_latents=False,
+ decoding_t=decoding_t,
+ )
+
+ if isinstance(out, (tuple, list)):
+ samples, samples_z = out
+ else:
+ samples = out
+ samples_z = None
+
+ if save_locally:
+ save_video_as_grid_and_mp4(samples, save_path, T, fps=saving_fps)
diff --git a/sgm/util.py b/sgm/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..66d9b2a69db2898323cbf2ad26a09ac8b2facd11
--- /dev/null
+++ b/sgm/util.py
@@ -0,0 +1,275 @@
+import functools
+import importlib
+import os
+from functools import partial
+from inspect import isfunction
+
+import fsspec
+import numpy as np
+import torch
+from PIL import Image, ImageDraw, ImageFont
+from safetensors.torch import load_file as load_safetensors
+
+
+def disabled_train(self, mode=True):
+ """Overwrite model.train with this function to make sure train/eval mode
+ does not change anymore."""
+ return self
+
+
+def get_string_from_tuple(s):
+ try:
+ # Check if the string starts and ends with parentheses
+ if s[0] == "(" and s[-1] == ")":
+ # Convert the string to a tuple
+ t = eval(s)
+ # Check if the type of t is tuple
+ if type(t) == tuple:
+ return t[0]
+ else:
+ pass
+ except:
+ pass
+ return s
+
+
+def is_power_of_two(n):
+ """
+ chat.openai.com/chat
+ Return True if n is a power of 2, otherwise return False.
+
+ The function is_power_of_two takes an integer n as input and returns True if n is a power of 2, otherwise it returns False.
+ The function works by first checking if n is less than or equal to 0. If n is less than or equal to 0, it can't be a power of 2, so the function returns False.
+ If n is greater than 0, the function checks whether n is a power of 2 by using a bitwise AND operation between n and n-1. If n is a power of 2, then it will have only one bit set to 1 in its binary representation. When we subtract 1 from a power of 2, all the bits to the right of that bit become 1, and the bit itself becomes 0. So, when we perform a bitwise AND between n and n-1, we get 0 if n is a power of 2, and a non-zero value otherwise.
+ Thus, if the result of the bitwise AND operation is 0, then n is a power of 2 and the function returns True. Otherwise, the function returns False.
+
+ """
+ if n <= 0:
+ return False
+ return (n & (n - 1)) == 0
+
+
+def autocast(f, enabled=True):
+ def do_autocast(*args, **kwargs):
+ with torch.cuda.amp.autocast(
+ enabled=enabled,
+ dtype=torch.get_autocast_gpu_dtype(),
+ cache_enabled=torch.is_autocast_cache_enabled(),
+ ):
+ return f(*args, **kwargs)
+
+ return do_autocast
+
+
+def load_partial_from_config(config):
+ return partial(get_obj_from_str(config["target"]), **config.get("params", dict()))
+
+
+def log_txt_as_img(wh, xc, size=10):
+ # wh a tuple of (width, height)
+ # xc a list of captions to plot
+ b = len(xc)
+ txts = list()
+ for bi in range(b):
+ txt = Image.new("RGB", wh, color="white")
+ draw = ImageDraw.Draw(txt)
+ font = ImageFont.truetype("data/DejaVuSans.ttf", size=size)
+ nc = int(40 * (wh[0] / 256))
+ if isinstance(xc[bi], list):
+ text_seq = xc[bi][0]
+ else:
+ text_seq = xc[bi]
+ lines = "\n".join(
+ text_seq[start : start + nc] for start in range(0, len(text_seq), nc)
+ )
+
+ try:
+ draw.text((0, 0), lines, fill="black", font=font)
+ except UnicodeEncodeError:
+ print("Cant encode string for logging. Skipping.")
+
+ txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0
+ txts.append(txt)
+ txts = np.stack(txts)
+ txts = torch.tensor(txts)
+ return txts
+
+
+def partialclass(cls, *args, **kwargs):
+ class NewCls(cls):
+ __init__ = functools.partialmethod(cls.__init__, *args, **kwargs)
+
+ return NewCls
+
+
+def make_path_absolute(path):
+ fs, p = fsspec.core.url_to_fs(path)
+ if fs.protocol == "file":
+ return os.path.abspath(p)
+ return path
+
+
+def ismap(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+ return (len(x.shape) == 4) and (x.shape[1] > 3)
+
+
+def isimage(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+ return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1)
+
+
+def isheatmap(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+
+ return x.ndim == 2
+
+
+def isneighbors(x):
+ if not isinstance(x, torch.Tensor):
+ return False
+ return x.ndim == 5 and (x.shape[2] == 3 or x.shape[2] == 1)
+
+
+def exists(x):
+ return x is not None
+
+
+def expand_dims_like(x, y):
+ while x.dim() != y.dim():
+ x = x.unsqueeze(-1)
+ return x
+
+
+def default(val, d):
+ if exists(val):
+ return val
+ return d() if isfunction(d) else d
+
+
+def mean_flat(tensor):
+ """
+ https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86
+ Take the mean over all non-batch dimensions.
+ """
+ return tensor.mean(dim=list(range(1, len(tensor.shape))))
+
+
+def count_params(model, verbose=False):
+ total_params = sum(p.numel() for p in model.parameters())
+ if verbose:
+ print(f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.")
+ return total_params
+
+
+def instantiate_from_config(config):
+ if not "target" in config:
+ if config == "__is_first_stage__":
+ return None
+ elif config == "__is_unconditional__":
+ return None
+ raise KeyError("Expected key `target` to instantiate.")
+ return get_obj_from_str(config["target"])(**config.get("params", dict()))
+
+
+def get_obj_from_str(string, reload=False, invalidate_cache=True):
+ module, cls = string.rsplit(".", 1)
+ if invalidate_cache:
+ importlib.invalidate_caches()
+ if reload:
+ module_imp = importlib.import_module(module)
+ importlib.reload(module_imp)
+ return getattr(importlib.import_module(module, package=None), cls)
+
+
+def append_zero(x):
+ return torch.cat([x, x.new_zeros([1])])
+
+
+def append_dims(x, target_dims):
+ """Appends dimensions to the end of a tensor until it has target_dims dimensions."""
+ dims_to_append = target_dims - x.ndim
+ if dims_to_append < 0:
+ raise ValueError(
+ f"input has {x.ndim} dims but target_dims is {target_dims}, which is less"
+ )
+ return x[(...,) + (None,) * dims_to_append]
+
+
+def load_model_from_config(config, ckpt, verbose=True, freeze=True):
+ print(f"Loading model from {ckpt}")
+ if ckpt.endswith("ckpt"):
+ pl_sd = torch.load(ckpt, map_location="cpu")
+ if "global_step" in pl_sd:
+ print(f"Global Step: {pl_sd['global_step']}")
+ sd = pl_sd["state_dict"]
+ elif ckpt.endswith("safetensors"):
+ sd = load_safetensors(ckpt)
+ else:
+ raise NotImplementedError
+
+ model = instantiate_from_config(config.model)
+
+ m, u = model.load_state_dict(sd, strict=False)
+
+ if len(m) > 0 and verbose:
+ print("missing keys:")
+ print(m)
+ if len(u) > 0 and verbose:
+ print("unexpected keys:")
+ print(u)
+
+ if freeze:
+ for param in model.parameters():
+ param.requires_grad = False
+
+ model.eval()
+ return model
+
+
+def get_configs_path() -> str:
+ """
+ Get the `configs` directory.
+ For a working copy, this is the one in the root of the repository,
+ but for an installed copy, it's in the `sgm` package (see pyproject.toml).
+ """
+ this_dir = os.path.dirname(__file__)
+ candidates = (
+ os.path.join(this_dir, "configs"),
+ os.path.join(this_dir, "..", "configs"),
+ )
+ for candidate in candidates:
+ candidate = os.path.abspath(candidate)
+ if os.path.isdir(candidate):
+ return candidate
+ raise FileNotFoundError(f"Could not find SGM configs in {candidates}")
+
+
+def get_nested_attribute(obj, attribute_path, depth=None, return_key=False):
+ """
+ Will return the result of a recursive get attribute call.
+ E.g.:
+ a.b.c
+ = getattr(getattr(a, "b"), "c")
+ = get_nested_attribute(a, "b.c")
+ If any part of the attribute call is an integer x with current obj a, will
+ try to call a[x] instead of a.x first.
+ """
+ attributes = attribute_path.split(".")
+ if depth is not None and depth > 0:
+ attributes = attributes[:depth]
+ assert len(attributes) > 0, "At least one attribute should be selected"
+ current_attribute = obj
+ current_key = None
+ for level, attribute in enumerate(attributes):
+ current_key = ".".join(attributes[: level + 1])
+ try:
+ id_ = int(attribute)
+ current_attribute = current_attribute[id_]
+ except ValueError:
+ current_attribute = getattr(current_attribute, attribute)
+
+ return (current_attribute, current_key) if return_key else current_attribute
diff --git a/shell_scripts/raw_img_list/Animals.txt b/shell_scripts/raw_img_list/Animals.txt
new file mode 100644
index 0000000000000000000000000000000000000000..06e0a62ea52609b956d23c0424651dd9b2097db1
--- /dev/null
+++ b/shell_scripts/raw_img_list/Animals.txt
@@ -0,0 +1,12487 @@
+0/10017/campos_512_v4
+0/10031/campos_512_v4
+0/10050/campos_512_v4
+0/10075/campos_512_v4
+0/10118/campos_512_v4
+0/10120/campos_512_v4
+0/10142/campos_512_v4
+0/10155/campos_512_v4
+0/10174/campos_512_v4
+0/10178/campos_512_v4
+0/10193/campos_512_v4
+0/10196/campos_512_v4
+0/10218/campos_512_v4
+0/10230/campos_512_v4
+0/10253/campos_512_v4
+0/10271/campos_512_v4
+0/10314/campos_512_v4
+0/10342/campos_512_v4
+0/10422/campos_512_v4
+0/10425/campos_512_v4
+0/10457/campos_512_v4
+0/10462/campos_512_v4
+0/10507/campos_512_v4
+0/10517/campos_512_v4
+0/10529/campos_512_v4
+0/10554/campos_512_v4
+0/10585/campos_512_v4
+0/10619/campos_512_v4
+0/10636/campos_512_v4
+0/10639/campos_512_v4
+0/10664/campos_512_v4
+0/10679/campos_512_v4
+0/10770/campos_512_v4
+0/10772/campos_512_v4
+0/10867/campos_512_v4
+0/10891/campos_512_v4
+0/10955/campos_512_v4
+0/10971/campos_512_v4
+0/10998/campos_512_v4
+0/11068/campos_512_v4
+0/11075/campos_512_v4
+0/11133/campos_512_v4
+0/11300/campos_512_v4
+0/11453/campos_512_v4
+0/11546/campos_512_v4
+0/11649/campos_512_v4
+0/11726/campos_512_v4
+0/11869/campos_512_v4
+0/11959/campos_512_v4
+0/12023/campos_512_v4
+0/12259/campos_512_v4
+0/12495/campos_512_v4
+0/12535/campos_512_v4
+0/12539/campos_512_v4
+0/12600/campos_512_v4
+0/12603/campos_512_v4
+0/12691/campos_512_v4
+0/12706/campos_512_v4
+0/12859/campos_512_v4
+0/12926/campos_512_v4
+0/12962/campos_512_v4
+0/13087/campos_512_v4
+0/13240/campos_512_v4
+0/13277/campos_512_v4
+0/13312/campos_512_v4
+0/13547/campos_512_v4
+0/13677/campos_512_v4
+0/13838/campos_512_v4
+0/13872/campos_512_v4
+0/13887/campos_512_v4
+0/14050/campos_512_v4
+0/14307/campos_512_v4
+0/14359/campos_512_v4
+0/14544/campos_512_v4
+0/14686/campos_512_v4
+0/14781/campos_512_v4
+0/14816/campos_512_v4
+0/14903/campos_512_v4
+0/14911/campos_512_v4
+1/15366/campos_512_v4
+1/15480/campos_512_v4
+1/15563/campos_512_v4
+1/15812/campos_512_v4
+1/15836/campos_512_v4
+1/15943/campos_512_v4
+1/15949/campos_512_v4
+1/15983/campos_512_v4
+1/16108/campos_512_v4
+1/16120/campos_512_v4
+1/16162/campos_512_v4
+1/16251/campos_512_v4
+1/16295/campos_512_v4
+1/16340/campos_512_v4
+1/16496/campos_512_v4
+1/16533/campos_512_v4
+1/16581/campos_512_v4
+1/16694/campos_512_v4
+1/16736/campos_512_v4
+1/16793/campos_512_v4
+1/16841/campos_512_v4
+1/16894/campos_512_v4
+1/16987/campos_512_v4
+1/17001/campos_512_v4
+1/17142/campos_512_v4
+1/17158/campos_512_v4
+1/17168/campos_512_v4
+1/17379/campos_512_v4
+1/17398/campos_512_v4
+1/17531/campos_512_v4
+1/17540/campos_512_v4
+1/17679/campos_512_v4
+1/17762/campos_512_v4
+1/17816/campos_512_v4
+1/17951/campos_512_v4
+1/18155/campos_512_v4
+1/18284/campos_512_v4
+1/18349/campos_512_v4
+1/18617/campos_512_v4
+1/18629/campos_512_v4
+1/18635/campos_512_v4
+1/18701/campos_512_v4
+1/18936/campos_512_v4
+1/18984/campos_512_v4
+1/18997/campos_512_v4
+1/19090/campos_512_v4
+1/19156/campos_512_v4
+1/19236/campos_512_v4
+1/19618/campos_512_v4
+1/19628/campos_512_v4
+1/19745/campos_512_v4
+1/19887/campos_512_v4
+1/19917/campos_512_v4
+10/60279/campos_512_v4
+10/60419/campos_512_v4
+10/60431/campos_512_v4
+10/60511/campos_512_v4
+10/60718/campos_512_v4
+10/60881/campos_512_v4
+10/61050/campos_512_v4
+10/61125/campos_512_v4
+10/61341/campos_512_v4
+10/61526/campos_512_v4
+10/61764/campos_512_v4
+10/61813/campos_512_v4
+10/62175/campos_512_v4
+10/62191/campos_512_v4
+10/62581/campos_512_v4
+10/62867/campos_512_v4
+10/63079/campos_512_v4
+10/63600/campos_512_v4
+10/63728/campos_512_v4
+10/64105/campos_512_v4
+10/64153/campos_512_v4
+10/64363/campos_512_v4
+10/64412/campos_512_v4
+10/64414/campos_512_v4
+100/510010/campos_512_v4
+100/510048/campos_512_v4
+100/510119/campos_512_v4
+100/510298/campos_512_v4
+100/510371/campos_512_v4
+100/510404/campos_512_v4
+100/510521/campos_512_v4
+100/510566/campos_512_v4
+100/510587/campos_512_v4
+100/510642/campos_512_v4
+100/510780/campos_512_v4
+100/510808/campos_512_v4
+100/510819/campos_512_v4
+100/510849/campos_512_v4
+100/510975/campos_512_v4
+100/510998/campos_512_v4
+100/511000/campos_512_v4
+100/511027/campos_512_v4
+100/511048/campos_512_v4
+100/511064/campos_512_v4
+100/511148/campos_512_v4
+100/511172/campos_512_v4
+100/511313/campos_512_v4
+100/511387/campos_512_v4
+100/511442/campos_512_v4
+100/511626/campos_512_v4
+100/511648/campos_512_v4
+100/511714/campos_512_v4
+100/511735/campos_512_v4
+100/511745/campos_512_v4
+100/511797/campos_512_v4
+100/511856/campos_512_v4
+100/511964/campos_512_v4
+100/512008/campos_512_v4
+100/512031/campos_512_v4
+100/512035/campos_512_v4
+100/512041/campos_512_v4
+100/512081/campos_512_v4
+100/512143/campos_512_v4
+100/512196/campos_512_v4
+100/512366/campos_512_v4
+100/512373/campos_512_v4
+100/512456/campos_512_v4
+100/512469/campos_512_v4
+100/512475/campos_512_v4
+100/512484/campos_512_v4
+100/512610/campos_512_v4
+100/512641/campos_512_v4
+100/512710/campos_512_v4
+100/512714/campos_512_v4
+100/512758/campos_512_v4
+100/512765/campos_512_v4
+100/512768/campos_512_v4
+100/512792/campos_512_v4
+100/512822/campos_512_v4
+100/512878/campos_512_v4
+100/512890/campos_512_v4
+100/512941/campos_512_v4
+100/512944/campos_512_v4
+100/512989/campos_512_v4
+100/513071/campos_512_v4
+100/513082/campos_512_v4
+100/513118/campos_512_v4
+100/513201/campos_512_v4
+100/513283/campos_512_v4
+100/513309/campos_512_v4
+100/513320/campos_512_v4
+100/513359/campos_512_v4
+100/513404/campos_512_v4
+100/513496/campos_512_v4
+100/513531/campos_512_v4
+100/513561/campos_512_v4
+100/513580/campos_512_v4
+100/513622/campos_512_v4
+100/513631/campos_512_v4
+100/513714/campos_512_v4
+100/513835/campos_512_v4
+100/513838/campos_512_v4
+100/513869/campos_512_v4
+100/513880/campos_512_v4
+100/513910/campos_512_v4
+100/514008/campos_512_v4
+100/514013/campos_512_v4
+100/514070/campos_512_v4
+100/514071/campos_512_v4
+100/514073/campos_512_v4
+100/514090/campos_512_v4
+100/514186/campos_512_v4
+100/514284/campos_512_v4
+100/514300/campos_512_v4
+100/514364/campos_512_v4
+100/514443/campos_512_v4
+100/514618/campos_512_v4
+100/514622/campos_512_v4
+100/514703/campos_512_v4
+100/514782/campos_512_v4
+100/514799/campos_512_v4
+100/514800/campos_512_v4
+100/514875/campos_512_v4
+100/514886/campos_512_v4
+101/515048/campos_512_v4
+101/515316/campos_512_v4
+101/515502/campos_512_v4
+101/515625/campos_512_v4
+101/515647/campos_512_v4
+101/515663/campos_512_v4
+101/515688/campos_512_v4
+101/515696/campos_512_v4
+101/515725/campos_512_v4
+101/515770/campos_512_v4
+101/515906/campos_512_v4
+101/515983/campos_512_v4
+101/516229/campos_512_v4
+101/516278/campos_512_v4
+101/516321/campos_512_v4
+101/516353/campos_512_v4
+101/516393/campos_512_v4
+101/516449/campos_512_v4
+101/516465/campos_512_v4
+101/516561/campos_512_v4
+101/516664/campos_512_v4
+101/516679/campos_512_v4
+101/516685/campos_512_v4
+101/516686/campos_512_v4
+101/516747/campos_512_v4
+101/516837/campos_512_v4
+101/516838/campos_512_v4
+101/516858/campos_512_v4
+101/516868/campos_512_v4
+101/516991/campos_512_v4
+101/517034/campos_512_v4
+101/517036/campos_512_v4
+101/517049/campos_512_v4
+101/517086/campos_512_v4
+101/517111/campos_512_v4
+101/517128/campos_512_v4
+101/517136/campos_512_v4
+101/517282/campos_512_v4
+101/517524/campos_512_v4
+101/517568/campos_512_v4
+101/517611/campos_512_v4
+101/517653/campos_512_v4
+101/517747/campos_512_v4
+101/517889/campos_512_v4
+101/517897/campos_512_v4
+101/517914/campos_512_v4
+101/517959/campos_512_v4
+101/517990/campos_512_v4
+101/518259/campos_512_v4
+101/518294/campos_512_v4
+101/518312/campos_512_v4
+101/518318/campos_512_v4
+101/518319/campos_512_v4
+101/518380/campos_512_v4
+101/518405/campos_512_v4
+101/518441/campos_512_v4
+101/518479/campos_512_v4
+101/518502/campos_512_v4
+101/518537/campos_512_v4
+101/518560/campos_512_v4
+101/518587/campos_512_v4
+101/518655/campos_512_v4
+101/518745/campos_512_v4
+101/518783/campos_512_v4
+101/518940/campos_512_v4
+101/518941/campos_512_v4
+101/518983/campos_512_v4
+101/519043/campos_512_v4
+101/519111/campos_512_v4
+101/519165/campos_512_v4
+101/519192/campos_512_v4
+101/519249/campos_512_v4
+101/519346/campos_512_v4
+101/519487/campos_512_v4
+101/519574/campos_512_v4
+101/519630/campos_512_v4
+101/519672/campos_512_v4
+101/519770/campos_512_v4
+101/519792/campos_512_v4
+101/519794/campos_512_v4
+101/519815/campos_512_v4
+101/519816/campos_512_v4
+101/519878/campos_512_v4
+102/520051/campos_512_v4
+102/520062/campos_512_v4
+102/520067/campos_512_v4
+102/520110/campos_512_v4
+102/520138/campos_512_v4
+102/520224/campos_512_v4
+102/520233/campos_512_v4
+102/520238/campos_512_v4
+102/520602/campos_512_v4
+102/520654/campos_512_v4
+102/520655/campos_512_v4
+102/520781/campos_512_v4
+102/520784/campos_512_v4
+102/520808/campos_512_v4
+102/520812/campos_512_v4
+102/520854/campos_512_v4
+102/521028/campos_512_v4
+102/521081/campos_512_v4
+102/521192/campos_512_v4
+102/521399/campos_512_v4
+102/521470/campos_512_v4
+102/521768/campos_512_v4
+102/521779/campos_512_v4
+102/521821/campos_512_v4
+102/521892/campos_512_v4
+102/521941/campos_512_v4
+102/521956/campos_512_v4
+102/521963/campos_512_v4
+102/522020/campos_512_v4
+102/522038/campos_512_v4
+102/522140/campos_512_v4
+102/522174/campos_512_v4
+102/522232/campos_512_v4
+102/522290/campos_512_v4
+102/522331/campos_512_v4
+102/522367/campos_512_v4
+102/522434/campos_512_v4
+102/522436/campos_512_v4
+102/522505/campos_512_v4
+102/522857/campos_512_v4
+102/522913/campos_512_v4
+102/522972/campos_512_v4
+102/523060/campos_512_v4
+102/523407/campos_512_v4
+102/523462/campos_512_v4
+102/523514/campos_512_v4
+102/523537/campos_512_v4
+102/523634/campos_512_v4
+102/523775/campos_512_v4
+102/523797/campos_512_v4
+102/523901/campos_512_v4
+102/523937/campos_512_v4
+102/523971/campos_512_v4
+102/524003/campos_512_v4
+102/524039/campos_512_v4
+102/524284/campos_512_v4
+102/524351/campos_512_v4
+102/524400/campos_512_v4
+102/524542/campos_512_v4
+102/524548/campos_512_v4
+102/524550/campos_512_v4
+102/524682/campos_512_v4
+102/524695/campos_512_v4
+102/524728/campos_512_v4
+102/524773/campos_512_v4
+102/524802/campos_512_v4
+102/524876/campos_512_v4
+102/524888/campos_512_v4
+102/524999/campos_512_v4
+103/525124/campos_512_v4
+103/525148/campos_512_v4
+103/525169/campos_512_v4
+103/525178/campos_512_v4
+103/525201/campos_512_v4
+103/525260/campos_512_v4
+103/525342/campos_512_v4
+103/525386/campos_512_v4
+103/525426/campos_512_v4
+103/525501/campos_512_v4
+103/525540/campos_512_v4
+103/525619/campos_512_v4
+103/525879/campos_512_v4
+103/525925/campos_512_v4
+103/526005/campos_512_v4
+103/526099/campos_512_v4
+103/526128/campos_512_v4
+103/526241/campos_512_v4
+103/526326/campos_512_v4
+103/526408/campos_512_v4
+103/526481/campos_512_v4
+103/526497/campos_512_v4
+103/526540/campos_512_v4
+103/526544/campos_512_v4
+103/526610/campos_512_v4
+103/526676/campos_512_v4
+103/526728/campos_512_v4
+103/526753/campos_512_v4
+103/526759/campos_512_v4
+103/526765/campos_512_v4
+103/526776/campos_512_v4
+103/526810/campos_512_v4
+103/526933/campos_512_v4
+103/526971/campos_512_v4
+103/526973/campos_512_v4
+103/527016/campos_512_v4
+103/527050/campos_512_v4
+103/527090/campos_512_v4
+103/527104/campos_512_v4
+103/527111/campos_512_v4
+103/527347/campos_512_v4
+103/527358/campos_512_v4
+103/527363/campos_512_v4
+103/527373/campos_512_v4
+103/527405/campos_512_v4
+103/527413/campos_512_v4
+103/527416/campos_512_v4
+103/527444/campos_512_v4
+103/527467/campos_512_v4
+103/527495/campos_512_v4
+103/527526/campos_512_v4
+103/527544/campos_512_v4
+103/527588/campos_512_v4
+103/527756/campos_512_v4
+103/527780/campos_512_v4
+103/527806/campos_512_v4
+103/527813/campos_512_v4
+103/527843/campos_512_v4
+103/527876/campos_512_v4
+103/527997/campos_512_v4
+103/528061/campos_512_v4
+103/528103/campos_512_v4
+103/528121/campos_512_v4
+103/528146/campos_512_v4
+103/528170/campos_512_v4
+103/528219/campos_512_v4
+103/528282/campos_512_v4
+103/528302/campos_512_v4
+103/528357/campos_512_v4
+103/528381/campos_512_v4
+103/528494/campos_512_v4
+103/528651/campos_512_v4
+103/528704/campos_512_v4
+103/528749/campos_512_v4
+103/528793/campos_512_v4
+103/528849/campos_512_v4
+103/528978/campos_512_v4
+103/528987/campos_512_v4
+103/529053/campos_512_v4
+103/529097/campos_512_v4
+103/529320/campos_512_v4
+103/529404/campos_512_v4
+103/529448/campos_512_v4
+103/529477/campos_512_v4
+103/529499/campos_512_v4
+103/529533/campos_512_v4
+103/529725/campos_512_v4
+103/529801/campos_512_v4
+103/529956/campos_512_v4
+103/529973/campos_512_v4
+103/529976/campos_512_v4
+104/530164/campos_512_v4
+104/530198/campos_512_v4
+104/530205/campos_512_v4
+104/530233/campos_512_v4
+104/530402/campos_512_v4
+104/530570/campos_512_v4
+104/530685/campos_512_v4
+104/530690/campos_512_v4
+104/530767/campos_512_v4
+104/530793/campos_512_v4
+104/530873/campos_512_v4
+104/530919/campos_512_v4
+104/530998/campos_512_v4
+104/531088/campos_512_v4
+104/531162/campos_512_v4
+104/531178/campos_512_v4
+104/531194/campos_512_v4
+104/531201/campos_512_v4
+104/531311/campos_512_v4
+104/531386/campos_512_v4
+104/531446/campos_512_v4
+104/531469/campos_512_v4
+104/531531/campos_512_v4
+104/531585/campos_512_v4
+104/531613/campos_512_v4
+104/531717/campos_512_v4
+104/531756/campos_512_v4
+104/531799/campos_512_v4
+104/531835/campos_512_v4
+104/531908/campos_512_v4
+104/531990/campos_512_v4
+104/532018/campos_512_v4
+104/532171/campos_512_v4
+104/532234/campos_512_v4
+104/532247/campos_512_v4
+104/532313/campos_512_v4
+104/532375/campos_512_v4
+104/532433/campos_512_v4
+104/532454/campos_512_v4
+104/532479/campos_512_v4
+104/532512/campos_512_v4
+104/532554/campos_512_v4
+104/532559/campos_512_v4
+104/532580/campos_512_v4
+104/532680/campos_512_v4
+104/532733/campos_512_v4
+104/532750/campos_512_v4
+104/532851/campos_512_v4
+104/532902/campos_512_v4
+104/532971/campos_512_v4
+104/532992/campos_512_v4
+104/533022/campos_512_v4
+104/533148/campos_512_v4
+104/533283/campos_512_v4
+104/533287/campos_512_v4
+104/533346/campos_512_v4
+104/533445/campos_512_v4
+104/533484/campos_512_v4
+104/533513/campos_512_v4
+104/533643/campos_512_v4
+104/533649/campos_512_v4
+104/533664/campos_512_v4
+104/533714/campos_512_v4
+104/533760/campos_512_v4
+104/533796/campos_512_v4
+104/533887/campos_512_v4
+104/533892/campos_512_v4
+104/533893/campos_512_v4
+104/534056/campos_512_v4
+104/534096/campos_512_v4
+104/534192/campos_512_v4
+104/534207/campos_512_v4
+104/534365/campos_512_v4
+104/534387/campos_512_v4
+104/534430/campos_512_v4
+104/534488/campos_512_v4
+104/534634/campos_512_v4
+104/534683/campos_512_v4
+104/534721/campos_512_v4
+104/534728/campos_512_v4
+104/534742/campos_512_v4
+105/535018/campos_512_v4
+105/535099/campos_512_v4
+105/535115/campos_512_v4
+105/535190/campos_512_v4
+105/535286/campos_512_v4
+105/535339/campos_512_v4
+105/535350/campos_512_v4
+105/535388/campos_512_v4
+105/535391/campos_512_v4
+105/535476/campos_512_v4
+105/535599/campos_512_v4
+105/535613/campos_512_v4
+105/535758/campos_512_v4
+105/535777/campos_512_v4
+105/535897/campos_512_v4
+105/535972/campos_512_v4
+105/535996/campos_512_v4
+105/536046/campos_512_v4
+105/536058/campos_512_v4
+105/536090/campos_512_v4
+105/536156/campos_512_v4
+105/536208/campos_512_v4
+105/536337/campos_512_v4
+105/536444/campos_512_v4
+105/536486/campos_512_v4
+105/536580/campos_512_v4
+105/536594/campos_512_v4
+105/536642/campos_512_v4
+105/536647/campos_512_v4
+105/536679/campos_512_v4
+105/536723/campos_512_v4
+105/536816/campos_512_v4
+105/536840/campos_512_v4
+105/536961/campos_512_v4
+105/537132/campos_512_v4
+105/537151/campos_512_v4
+105/537183/campos_512_v4
+105/537193/campos_512_v4
+105/537224/campos_512_v4
+105/537235/campos_512_v4
+105/537380/campos_512_v4
+105/537620/campos_512_v4
+105/537674/campos_512_v4
+105/537760/campos_512_v4
+105/537881/campos_512_v4
+105/537959/campos_512_v4
+105/537976/campos_512_v4
+105/538044/campos_512_v4
+105/538094/campos_512_v4
+105/538124/campos_512_v4
+105/538192/campos_512_v4
+105/538242/campos_512_v4
+105/538255/campos_512_v4
+105/538275/campos_512_v4
+105/538282/campos_512_v4
+105/538299/campos_512_v4
+105/538343/campos_512_v4
+105/538416/campos_512_v4
+105/538518/campos_512_v4
+105/538550/campos_512_v4
+105/538578/campos_512_v4
+105/538735/campos_512_v4
+105/538800/campos_512_v4
+105/538801/campos_512_v4
+105/538820/campos_512_v4
+105/538830/campos_512_v4
+105/538884/campos_512_v4
+105/538913/campos_512_v4
+105/538964/campos_512_v4
+105/539058/campos_512_v4
+105/539096/campos_512_v4
+105/539132/campos_512_v4
+105/539145/campos_512_v4
+105/539163/campos_512_v4
+105/539278/campos_512_v4
+105/539299/campos_512_v4
+105/539423/campos_512_v4
+105/539445/campos_512_v4
+105/539543/campos_512_v4
+105/539562/campos_512_v4
+105/539595/campos_512_v4
+105/539647/campos_512_v4
+105/539910/campos_512_v4
+105/539926/campos_512_v4
+105/539930/campos_512_v4
+105/539953/campos_512_v4
+105/539976/campos_512_v4
+106/540009/campos_512_v4
+106/540027/campos_512_v4
+106/540041/campos_512_v4
+106/540051/campos_512_v4
+106/540179/campos_512_v4
+106/540203/campos_512_v4
+106/540287/campos_512_v4
+106/540303/campos_512_v4
+106/540342/campos_512_v4
+106/540359/campos_512_v4
+106/540493/campos_512_v4
+106/540542/campos_512_v4
+106/540551/campos_512_v4
+106/540562/campos_512_v4
+106/540579/campos_512_v4
+106/540603/campos_512_v4
+106/540627/campos_512_v4
+106/540683/campos_512_v4
+106/540748/campos_512_v4
+106/540776/campos_512_v4
+106/540827/campos_512_v4
+106/540864/campos_512_v4
+106/540873/campos_512_v4
+106/540918/campos_512_v4
+106/541050/campos_512_v4
+106/541120/campos_512_v4
+106/541179/campos_512_v4
+106/541258/campos_512_v4
+106/541309/campos_512_v4
+106/541312/campos_512_v4
+106/541333/campos_512_v4
+106/541397/campos_512_v4
+106/541462/campos_512_v4
+106/541491/campos_512_v4
+106/541639/campos_512_v4
+106/541707/campos_512_v4
+106/541717/campos_512_v4
+106/541726/campos_512_v4
+106/541744/campos_512_v4
+106/541757/campos_512_v4
+106/541773/campos_512_v4
+106/541860/campos_512_v4
+106/541888/campos_512_v4
+106/541933/campos_512_v4
+106/541934/campos_512_v4
+106/541992/campos_512_v4
+106/542013/campos_512_v4
+106/542049/campos_512_v4
+106/542066/campos_512_v4
+106/542081/campos_512_v4
+106/542105/campos_512_v4
+106/542220/campos_512_v4
+106/542260/campos_512_v4
+106/542262/campos_512_v4
+106/542328/campos_512_v4
+106/542416/campos_512_v4
+106/542441/campos_512_v4
+106/542467/campos_512_v4
+106/542497/campos_512_v4
+106/542535/campos_512_v4
+106/542543/campos_512_v4
+106/542564/campos_512_v4
+106/542669/campos_512_v4
+106/542791/campos_512_v4
+106/542835/campos_512_v4
+106/542841/campos_512_v4
+106/542887/campos_512_v4
+106/542899/campos_512_v4
+106/542978/campos_512_v4
+106/543027/campos_512_v4
+106/543091/campos_512_v4
+106/543092/campos_512_v4
+106/543111/campos_512_v4
+106/543182/campos_512_v4
+106/543233/campos_512_v4
+106/543258/campos_512_v4
+106/543267/campos_512_v4
+106/543276/campos_512_v4
+106/543369/campos_512_v4
+106/543390/campos_512_v4
+106/543448/campos_512_v4
+106/543460/campos_512_v4
+106/543568/campos_512_v4
+106/543705/campos_512_v4
+106/543735/campos_512_v4
+106/543840/campos_512_v4
+106/543844/campos_512_v4
+106/543931/campos_512_v4
+106/544102/campos_512_v4
+106/544359/campos_512_v4
+106/544360/campos_512_v4
+106/544377/campos_512_v4
+106/544413/campos_512_v4
+106/544438/campos_512_v4
+106/544499/campos_512_v4
+106/544519/campos_512_v4
+106/544560/campos_512_v4
+106/544563/campos_512_v4
+106/544632/campos_512_v4
+106/544641/campos_512_v4
+106/544687/campos_512_v4
+106/544769/campos_512_v4
+106/544820/campos_512_v4
+106/544886/campos_512_v4
+107/545069/campos_512_v4
+107/545084/campos_512_v4
+107/545156/campos_512_v4
+107/545181/campos_512_v4
+107/545423/campos_512_v4
+107/545445/campos_512_v4
+107/545546/campos_512_v4
+107/545561/campos_512_v4
+107/545584/campos_512_v4
+107/545632/campos_512_v4
+107/545745/campos_512_v4
+107/545759/campos_512_v4
+107/545773/campos_512_v4
+107/545955/campos_512_v4
+107/546041/campos_512_v4
+107/546047/campos_512_v4
+107/546123/campos_512_v4
+107/546157/campos_512_v4
+107/546172/campos_512_v4
+107/546222/campos_512_v4
+107/546316/campos_512_v4
+107/546400/campos_512_v4
+107/546503/campos_512_v4
+107/546562/campos_512_v4
+107/546632/campos_512_v4
+107/546658/campos_512_v4
+107/546664/campos_512_v4
+107/546691/campos_512_v4
+107/546722/campos_512_v4
+107/546766/campos_512_v4
+107/546805/campos_512_v4
+107/546935/campos_512_v4
+107/546987/campos_512_v4
+107/547135/campos_512_v4
+107/547140/campos_512_v4
+107/547215/campos_512_v4
+107/547361/campos_512_v4
+107/547535/campos_512_v4
+107/547781/campos_512_v4
+107/547809/campos_512_v4
+107/547842/campos_512_v4
+107/547861/campos_512_v4
+107/547950/campos_512_v4
+107/547981/campos_512_v4
+107/547991/campos_512_v4
+107/548008/campos_512_v4
+107/548032/campos_512_v4
+107/548060/campos_512_v4
+107/548107/campos_512_v4
+107/548163/campos_512_v4
+107/548208/campos_512_v4
+107/548218/campos_512_v4
+107/548220/campos_512_v4
+107/548301/campos_512_v4
+107/548351/campos_512_v4
+107/548361/campos_512_v4
+107/548388/campos_512_v4
+107/548412/campos_512_v4
+107/548497/campos_512_v4
+107/548536/campos_512_v4
+107/548537/campos_512_v4
+107/548578/campos_512_v4
+107/548633/campos_512_v4
+107/548733/campos_512_v4
+107/548743/campos_512_v4
+107/548745/campos_512_v4
+107/548847/campos_512_v4
+107/548867/campos_512_v4
+107/548957/campos_512_v4
+107/548976/campos_512_v4
+107/548977/campos_512_v4
+107/549075/campos_512_v4
+107/549153/campos_512_v4
+107/549234/campos_512_v4
+107/549290/campos_512_v4
+107/549299/campos_512_v4
+107/549329/campos_512_v4
+107/549349/campos_512_v4
+107/549428/campos_512_v4
+107/549468/campos_512_v4
+107/549538/campos_512_v4
+107/549645/campos_512_v4
+107/549667/campos_512_v4
+107/549717/campos_512_v4
+107/549761/campos_512_v4
+107/549764/campos_512_v4
+107/549857/campos_512_v4
+107/549949/campos_512_v4
+107/549951/campos_512_v4
+108/550033/campos_512_v4
+108/550085/campos_512_v4
+108/550238/campos_512_v4
+108/550244/campos_512_v4
+108/550270/campos_512_v4
+108/550440/campos_512_v4
+108/550450/campos_512_v4
+108/550523/campos_512_v4
+108/550530/campos_512_v4
+108/550569/campos_512_v4
+108/550601/campos_512_v4
+108/550756/campos_512_v4
+108/550810/campos_512_v4
+108/550857/campos_512_v4
+108/550858/campos_512_v4
+108/550945/campos_512_v4
+108/551004/campos_512_v4
+108/551022/campos_512_v4
+108/551077/campos_512_v4
+108/551146/campos_512_v4
+108/551223/campos_512_v4
+108/551230/campos_512_v4
+108/551333/campos_512_v4
+108/551337/campos_512_v4
+108/551365/campos_512_v4
+108/551367/campos_512_v4
+108/551436/campos_512_v4
+108/551448/campos_512_v4
+108/551540/campos_512_v4
+108/551584/campos_512_v4
+108/551589/campos_512_v4
+108/551806/campos_512_v4
+108/551902/campos_512_v4
+108/551938/campos_512_v4
+108/551954/campos_512_v4
+108/552085/campos_512_v4
+108/552110/campos_512_v4
+108/552111/campos_512_v4
+108/552168/campos_512_v4
+108/552209/campos_512_v4
+108/552256/campos_512_v4
+108/552473/campos_512_v4
+108/552555/campos_512_v4
+108/552577/campos_512_v4
+108/552663/campos_512_v4
+108/552723/campos_512_v4
+108/552736/campos_512_v4
+108/552738/campos_512_v4
+108/552750/campos_512_v4
+108/552766/campos_512_v4
+108/552769/campos_512_v4
+108/552795/campos_512_v4
+108/552800/campos_512_v4
+108/552854/campos_512_v4
+108/552865/campos_512_v4
+108/552881/campos_512_v4
+108/552998/campos_512_v4
+108/553095/campos_512_v4
+108/553109/campos_512_v4
+108/553139/campos_512_v4
+108/553294/campos_512_v4
+108/553303/campos_512_v4
+108/553484/campos_512_v4
+108/553523/campos_512_v4
+108/553635/campos_512_v4
+108/553646/campos_512_v4
+108/553684/campos_512_v4
+108/553761/campos_512_v4
+108/553999/campos_512_v4
+108/554037/campos_512_v4
+108/554074/campos_512_v4
+108/554105/campos_512_v4
+108/554115/campos_512_v4
+108/554122/campos_512_v4
+108/554147/campos_512_v4
+108/554160/campos_512_v4
+108/554199/campos_512_v4
+108/554225/campos_512_v4
+108/554250/campos_512_v4
+108/554284/campos_512_v4
+108/554286/campos_512_v4
+108/554445/campos_512_v4
+108/554488/campos_512_v4
+108/554490/campos_512_v4
+108/554550/campos_512_v4
+108/554599/campos_512_v4
+108/554605/campos_512_v4
+108/554977/campos_512_v4
+109/555008/campos_512_v4
+109/555010/campos_512_v4
+109/555062/campos_512_v4
+109/555086/campos_512_v4
+109/555350/campos_512_v4
+109/555431/campos_512_v4
+109/555522/campos_512_v4
+109/555529/campos_512_v4
+109/555573/campos_512_v4
+109/555578/campos_512_v4
+109/555736/campos_512_v4
+109/555927/campos_512_v4
+109/555976/campos_512_v4
+109/555986/campos_512_v4
+109/556115/campos_512_v4
+109/556205/campos_512_v4
+109/556223/campos_512_v4
+109/556244/campos_512_v4
+109/556326/campos_512_v4
+109/556338/campos_512_v4
+109/556383/campos_512_v4
+109/556423/campos_512_v4
+109/556444/campos_512_v4
+109/556490/campos_512_v4
+109/556538/campos_512_v4
+109/556559/campos_512_v4
+109/556595/campos_512_v4
+109/556736/campos_512_v4
+109/556768/campos_512_v4
+109/556781/campos_512_v4
+109/556785/campos_512_v4
+109/556854/campos_512_v4
+109/556855/campos_512_v4
+109/556864/campos_512_v4
+109/556917/campos_512_v4
+109/556922/campos_512_v4
+109/556982/campos_512_v4
+109/557029/campos_512_v4
+109/557074/campos_512_v4
+109/557206/campos_512_v4
+109/557308/campos_512_v4
+109/557327/campos_512_v4
+109/557514/campos_512_v4
+109/557690/campos_512_v4
+109/557727/campos_512_v4
+109/557737/campos_512_v4
+109/557751/campos_512_v4
+109/557853/campos_512_v4
+109/557908/campos_512_v4
+109/557917/campos_512_v4
+109/557945/campos_512_v4
+109/557963/campos_512_v4
+109/558024/campos_512_v4
+109/558052/campos_512_v4
+109/558056/campos_512_v4
+109/558095/campos_512_v4
+109/558136/campos_512_v4
+109/558155/campos_512_v4
+109/558188/campos_512_v4
+109/558256/campos_512_v4
+109/558285/campos_512_v4
+109/558298/campos_512_v4
+109/558332/campos_512_v4
+109/558386/campos_512_v4
+109/558392/campos_512_v4
+109/558426/campos_512_v4
+109/558434/campos_512_v4
+109/558482/campos_512_v4
+109/558534/campos_512_v4
+109/558560/campos_512_v4
+109/558664/campos_512_v4
+109/558697/campos_512_v4
+109/558702/campos_512_v4
+109/558703/campos_512_v4
+109/558731/campos_512_v4
+109/558879/campos_512_v4
+109/558897/campos_512_v4
+109/558999/campos_512_v4
+109/559059/campos_512_v4
+109/559150/campos_512_v4
+109/559204/campos_512_v4
+109/559207/campos_512_v4
+109/559219/campos_512_v4
+109/559238/campos_512_v4
+109/559251/campos_512_v4
+109/559267/campos_512_v4
+109/559314/campos_512_v4
+109/559364/campos_512_v4
+109/559390/campos_512_v4
+109/559400/campos_512_v4
+109/559459/campos_512_v4
+109/559484/campos_512_v4
+109/559553/campos_512_v4
+109/559664/campos_512_v4
+109/559665/campos_512_v4
+109/559725/campos_512_v4
+109/559765/campos_512_v4
+109/559833/campos_512_v4
+109/559963/campos_512_v4
+109/559983/campos_512_v4
+11/65028/campos_512_v4
+11/65453/campos_512_v4
+11/65595/campos_512_v4
+11/65610/campos_512_v4
+11/65666/campos_512_v4
+11/65684/campos_512_v4
+11/65691/campos_512_v4
+11/65722/campos_512_v4
+11/65889/campos_512_v4
+11/65917/campos_512_v4
+11/65936/campos_512_v4
+11/65962/campos_512_v4
+11/66065/campos_512_v4
+11/66181/campos_512_v4
+11/66262/campos_512_v4
+11/66429/campos_512_v4
+11/66610/campos_512_v4
+11/66977/campos_512_v4
+11/66980/campos_512_v4
+11/67101/campos_512_v4
+11/67327/campos_512_v4
+11/67437/campos_512_v4
+11/67837/campos_512_v4
+11/67852/campos_512_v4
+11/68072/campos_512_v4
+11/68207/campos_512_v4
+11/68242/campos_512_v4
+11/68329/campos_512_v4
+11/68376/campos_512_v4
+11/68389/campos_512_v4
+11/68546/campos_512_v4
+11/68757/campos_512_v4
+11/68857/campos_512_v4
+11/69403/campos_512_v4
+11/69549/campos_512_v4
+11/69559/campos_512_v4
+11/69729/campos_512_v4
+11/69732/campos_512_v4
+11/69746/campos_512_v4
+11/69763/campos_512_v4
+11/69807/campos_512_v4
+11/69858/campos_512_v4
+11/69939/campos_512_v4
+11/69960/campos_512_v4
+11/69963/campos_512_v4
+110/560049/campos_512_v4
+110/560096/campos_512_v4
+110/560130/campos_512_v4
+110/560264/campos_512_v4
+110/560267/campos_512_v4
+110/560270/campos_512_v4
+110/560283/campos_512_v4
+110/560334/campos_512_v4
+110/560412/campos_512_v4
+110/560439/campos_512_v4
+110/560478/campos_512_v4
+110/560577/campos_512_v4
+110/560684/campos_512_v4
+110/560692/campos_512_v4
+110/560708/campos_512_v4
+110/560720/campos_512_v4
+110/560809/campos_512_v4
+110/560811/campos_512_v4
+110/560857/campos_512_v4
+110/560955/campos_512_v4
+110/560987/campos_512_v4
+110/561179/campos_512_v4
+110/561318/campos_512_v4
+110/561345/campos_512_v4
+110/561380/campos_512_v4
+110/561637/campos_512_v4
+110/561716/campos_512_v4
+110/561753/campos_512_v4
+110/561864/campos_512_v4
+110/561875/campos_512_v4
+110/561911/campos_512_v4
+110/561939/campos_512_v4
+110/562130/campos_512_v4
+110/562321/campos_512_v4
+110/562339/campos_512_v4
+110/562491/campos_512_v4
+110/562569/campos_512_v4
+110/562604/campos_512_v4
+110/562605/campos_512_v4
+110/562651/campos_512_v4
+110/562656/campos_512_v4
+110/562662/campos_512_v4
+110/562693/campos_512_v4
+110/562743/campos_512_v4
+110/562853/campos_512_v4
+110/562921/campos_512_v4
+110/563006/campos_512_v4
+110/563012/campos_512_v4
+110/563030/campos_512_v4
+110/563103/campos_512_v4
+110/563190/campos_512_v4
+110/563235/campos_512_v4
+110/563265/campos_512_v4
+110/563267/campos_512_v4
+110/563305/campos_512_v4
+110/563316/campos_512_v4
+110/563377/campos_512_v4
+110/563471/campos_512_v4
+110/563472/campos_512_v4
+110/563484/campos_512_v4
+110/563554/campos_512_v4
+110/563576/campos_512_v4
+110/563579/campos_512_v4
+110/563883/campos_512_v4
+110/563949/campos_512_v4
+110/563983/campos_512_v4
+110/563989/campos_512_v4
+110/564041/campos_512_v4
+110/564047/campos_512_v4
+110/564131/campos_512_v4
+110/564238/campos_512_v4
+110/564293/campos_512_v4
+110/564298/campos_512_v4
+110/564315/campos_512_v4
+110/564632/campos_512_v4
+110/564651/campos_512_v4
+110/564729/campos_512_v4
+110/564824/campos_512_v4
+110/564848/campos_512_v4
+110/564875/campos_512_v4
+110/564945/campos_512_v4
+110/564960/campos_512_v4
+111/565027/campos_512_v4
+111/565052/campos_512_v4
+111/565250/campos_512_v4
+111/565504/campos_512_v4
+111/565520/campos_512_v4
+111/565582/campos_512_v4
+111/565599/campos_512_v4
+111/565633/campos_512_v4
+111/565942/campos_512_v4
+111/565956/campos_512_v4
+111/566006/campos_512_v4
+111/566168/campos_512_v4
+111/566180/campos_512_v4
+111/566191/campos_512_v4
+111/566211/campos_512_v4
+111/566243/campos_512_v4
+111/566266/campos_512_v4
+111/566389/campos_512_v4
+111/566404/campos_512_v4
+111/566527/campos_512_v4
+111/566546/campos_512_v4
+111/566582/campos_512_v4
+111/566648/campos_512_v4
+111/566859/campos_512_v4
+111/566886/campos_512_v4
+111/566942/campos_512_v4
+111/566962/campos_512_v4
+111/566980/campos_512_v4
+111/567007/campos_512_v4
+111/567010/campos_512_v4
+111/567031/campos_512_v4
+111/567195/campos_512_v4
+111/567306/campos_512_v4
+111/567329/campos_512_v4
+111/567343/campos_512_v4
+111/567366/campos_512_v4
+111/567377/campos_512_v4
+111/567427/campos_512_v4
+111/567502/campos_512_v4
+111/567580/campos_512_v4
+111/567774/campos_512_v4
+111/567812/campos_512_v4
+111/567912/campos_512_v4
+111/567942/campos_512_v4
+111/567999/campos_512_v4
+111/568006/campos_512_v4
+111/568012/campos_512_v4
+111/568032/campos_512_v4
+111/568055/campos_512_v4
+111/568246/campos_512_v4
+111/568253/campos_512_v4
+111/568300/campos_512_v4
+111/568384/campos_512_v4
+111/568441/campos_512_v4
+111/568483/campos_512_v4
+111/568586/campos_512_v4
+111/568604/campos_512_v4
+111/568660/campos_512_v4
+111/568661/campos_512_v4
+111/568843/campos_512_v4
+111/568845/campos_512_v4
+111/568993/campos_512_v4
+111/569006/campos_512_v4
+111/569089/campos_512_v4
+111/569166/campos_512_v4
+111/569167/campos_512_v4
+111/569236/campos_512_v4
+111/569419/campos_512_v4
+111/569540/campos_512_v4
+111/569560/campos_512_v4
+111/569680/campos_512_v4
+111/569716/campos_512_v4
+111/569717/campos_512_v4
+111/569752/campos_512_v4
+111/569756/campos_512_v4
+111/569763/campos_512_v4
+111/569776/campos_512_v4
+111/569952/campos_512_v4
+112/570007/campos_512_v4
+112/570018/campos_512_v4
+112/570046/campos_512_v4
+112/570047/campos_512_v4
+112/570297/campos_512_v4
+112/570301/campos_512_v4
+112/570322/campos_512_v4
+112/570359/campos_512_v4
+112/570442/campos_512_v4
+112/570481/campos_512_v4
+112/570501/campos_512_v4
+112/570506/campos_512_v4
+112/570509/campos_512_v4
+112/570550/campos_512_v4
+112/570758/campos_512_v4
+112/570845/campos_512_v4
+112/570904/campos_512_v4
+112/570915/campos_512_v4
+112/571050/campos_512_v4
+112/571061/campos_512_v4
+112/571111/campos_512_v4
+112/571144/campos_512_v4
+112/571295/campos_512_v4
+112/571339/campos_512_v4
+112/571495/campos_512_v4
+112/571612/campos_512_v4
+112/571690/campos_512_v4
+112/571774/campos_512_v4
+112/571829/campos_512_v4
+112/571835/campos_512_v4
+112/571842/campos_512_v4
+112/571899/campos_512_v4
+112/571993/campos_512_v4
+112/572070/campos_512_v4
+112/572177/campos_512_v4
+112/572191/campos_512_v4
+112/572226/campos_512_v4
+112/572277/campos_512_v4
+112/572296/campos_512_v4
+112/572306/campos_512_v4
+112/572318/campos_512_v4
+112/572334/campos_512_v4
+112/572455/campos_512_v4
+112/572525/campos_512_v4
+112/572564/campos_512_v4
+112/572630/campos_512_v4
+112/572657/campos_512_v4
+112/572662/campos_512_v4
+112/572861/campos_512_v4
+112/572911/campos_512_v4
+112/572938/campos_512_v4
+112/573008/campos_512_v4
+112/573108/campos_512_v4
+112/573113/campos_512_v4
+112/573325/campos_512_v4
+112/573338/campos_512_v4
+112/573363/campos_512_v4
+112/573385/campos_512_v4
+112/573393/campos_512_v4
+112/573399/campos_512_v4
+112/573411/campos_512_v4
+112/573532/campos_512_v4
+112/573628/campos_512_v4
+112/573745/campos_512_v4
+112/573862/campos_512_v4
+112/574035/campos_512_v4
+112/574059/campos_512_v4
+112/574087/campos_512_v4
+112/574132/campos_512_v4
+112/574146/campos_512_v4
+112/574193/campos_512_v4
+112/574209/campos_512_v4
+112/574328/campos_512_v4
+112/574482/campos_512_v4
+112/574715/campos_512_v4
+112/574961/campos_512_v4
+112/574969/campos_512_v4
+112/574989/campos_512_v4
+113/575011/campos_512_v4
+113/575086/campos_512_v4
+113/575121/campos_512_v4
+113/575130/campos_512_v4
+113/575273/campos_512_v4
+113/575347/campos_512_v4
+113/575348/campos_512_v4
+113/575368/campos_512_v4
+113/575387/campos_512_v4
+113/575426/campos_512_v4
+113/575613/campos_512_v4
+113/575679/campos_512_v4
+113/575696/campos_512_v4
+113/575706/campos_512_v4
+113/575718/campos_512_v4
+113/575907/campos_512_v4
+113/575924/campos_512_v4
+113/575927/campos_512_v4
+113/575938/campos_512_v4
+113/575955/campos_512_v4
+113/576014/campos_512_v4
+113/576049/campos_512_v4
+113/576072/campos_512_v4
+113/576141/campos_512_v4
+113/576143/campos_512_v4
+113/576223/campos_512_v4
+113/576258/campos_512_v4
+113/576348/campos_512_v4
+113/576378/campos_512_v4
+113/576583/campos_512_v4
+113/576726/campos_512_v4
+113/576739/campos_512_v4
+113/576799/campos_512_v4
+113/576804/campos_512_v4
+113/576838/campos_512_v4
+113/576853/campos_512_v4
+113/576969/campos_512_v4
+113/576988/campos_512_v4
+113/577035/campos_512_v4
+113/577038/campos_512_v4
+113/577179/campos_512_v4
+113/577197/campos_512_v4
+113/577296/campos_512_v4
+113/577307/campos_512_v4
+113/577309/campos_512_v4
+113/577533/campos_512_v4
+113/577575/campos_512_v4
+113/577576/campos_512_v4
+113/577586/campos_512_v4
+113/577587/campos_512_v4
+113/577715/campos_512_v4
+113/577755/campos_512_v4
+113/577782/campos_512_v4
+113/577803/campos_512_v4
+113/577819/campos_512_v4
+113/577859/campos_512_v4
+113/577957/campos_512_v4
+113/578079/campos_512_v4
+113/578147/campos_512_v4
+113/578173/campos_512_v4
+113/578245/campos_512_v4
+113/578251/campos_512_v4
+113/578277/campos_512_v4
+113/578350/campos_512_v4
+113/578442/campos_512_v4
+113/578487/campos_512_v4
+113/578513/campos_512_v4
+113/578522/campos_512_v4
+113/578679/campos_512_v4
+113/578749/campos_512_v4
+113/578801/campos_512_v4
+113/578824/campos_512_v4
+113/578886/campos_512_v4
+113/578941/campos_512_v4
+113/578955/campos_512_v4
+113/578992/campos_512_v4
+113/579087/campos_512_v4
+113/579109/campos_512_v4
+113/579205/campos_512_v4
+113/579237/campos_512_v4
+113/579278/campos_512_v4
+113/579325/campos_512_v4
+113/579347/campos_512_v4
+113/579373/campos_512_v4
+113/579415/campos_512_v4
+113/579530/campos_512_v4
+113/579541/campos_512_v4
+113/579571/campos_512_v4
+113/579773/campos_512_v4
+113/579800/campos_512_v4
+113/579873/campos_512_v4
+113/579908/campos_512_v4
+114/580028/campos_512_v4
+114/580091/campos_512_v4
+114/580162/campos_512_v4
+114/580491/campos_512_v4
+114/580609/campos_512_v4
+114/580611/campos_512_v4
+114/580717/campos_512_v4
+114/580721/campos_512_v4
+114/580747/campos_512_v4
+114/580766/campos_512_v4
+114/580787/campos_512_v4
+114/580819/campos_512_v4
+114/580977/campos_512_v4
+114/581022/campos_512_v4
+114/581055/campos_512_v4
+114/581122/campos_512_v4
+114/581200/campos_512_v4
+114/581271/campos_512_v4
+114/581278/campos_512_v4
+114/581358/campos_512_v4
+114/581515/campos_512_v4
+114/581650/campos_512_v4
+114/581665/campos_512_v4
+114/581674/campos_512_v4
+114/581700/campos_512_v4
+114/581719/campos_512_v4
+114/581729/campos_512_v4
+114/581741/campos_512_v4
+114/581744/campos_512_v4
+114/581756/campos_512_v4
+114/581763/campos_512_v4
+114/581813/campos_512_v4
+114/581939/campos_512_v4
+114/582042/campos_512_v4
+114/582499/campos_512_v4
+114/582529/campos_512_v4
+114/582589/campos_512_v4
+114/582666/campos_512_v4
+114/582732/campos_512_v4
+114/582912/campos_512_v4
+114/582969/campos_512_v4
+114/582975/campos_512_v4
+114/583051/campos_512_v4
+114/583079/campos_512_v4
+114/583161/campos_512_v4
+114/583167/campos_512_v4
+114/583192/campos_512_v4
+114/583292/campos_512_v4
+114/583303/campos_512_v4
+114/583366/campos_512_v4
+114/583372/campos_512_v4
+114/583393/campos_512_v4
+114/583435/campos_512_v4
+114/583518/campos_512_v4
+114/583530/campos_512_v4
+114/583539/campos_512_v4
+114/583666/campos_512_v4
+114/583723/campos_512_v4
+114/583789/campos_512_v4
+114/583957/campos_512_v4
+114/583961/campos_512_v4
+114/584141/campos_512_v4
+114/584311/campos_512_v4
+114/584364/campos_512_v4
+114/584395/campos_512_v4
+114/584470/campos_512_v4
+114/584540/campos_512_v4
+114/584555/campos_512_v4
+114/584623/campos_512_v4
+114/584691/campos_512_v4
+114/584701/campos_512_v4
+114/584709/campos_512_v4
+114/584751/campos_512_v4
+114/584765/campos_512_v4
+114/584795/campos_512_v4
+114/584798/campos_512_v4
+114/584907/campos_512_v4
+114/584980/campos_512_v4
+114/584987/campos_512_v4
+114/584988/campos_512_v4
+115/585066/campos_512_v4
+115/585128/campos_512_v4
+115/585190/campos_512_v4
+115/585268/campos_512_v4
+115/585318/campos_512_v4
+115/585532/campos_512_v4
+115/585545/campos_512_v4
+115/585566/campos_512_v4
+115/585642/campos_512_v4
+115/585673/campos_512_v4
+115/585761/campos_512_v4
+115/585788/campos_512_v4
+115/585920/campos_512_v4
+115/585924/campos_512_v4
+115/585952/campos_512_v4
+115/586046/campos_512_v4
+115/586103/campos_512_v4
+115/586108/campos_512_v4
+115/586128/campos_512_v4
+115/586148/campos_512_v4
+115/586159/campos_512_v4
+115/586201/campos_512_v4
+115/586450/campos_512_v4
+115/586497/campos_512_v4
+115/586501/campos_512_v4
+115/586531/campos_512_v4
+115/586537/campos_512_v4
+115/586570/campos_512_v4
+115/586576/campos_512_v4
+115/586679/campos_512_v4
+115/586763/campos_512_v4
+115/586858/campos_512_v4
+115/586919/campos_512_v4
+115/586981/campos_512_v4
+115/587045/campos_512_v4
+115/587240/campos_512_v4
+115/587249/campos_512_v4
+115/587250/campos_512_v4
+115/587393/campos_512_v4
+115/587583/campos_512_v4
+115/587597/campos_512_v4
+115/587625/campos_512_v4
+115/587653/campos_512_v4
+115/587733/campos_512_v4
+115/587781/campos_512_v4
+115/588042/campos_512_v4
+115/588145/campos_512_v4
+115/588179/campos_512_v4
+115/588221/campos_512_v4
+115/588265/campos_512_v4
+115/588285/campos_512_v4
+115/588332/campos_512_v4
+115/588391/campos_512_v4
+115/588446/campos_512_v4
+115/588525/campos_512_v4
+115/588699/campos_512_v4
+115/588737/campos_512_v4
+115/588900/campos_512_v4
+115/588921/campos_512_v4
+115/588926/campos_512_v4
+115/589001/campos_512_v4
+115/589019/campos_512_v4
+115/589022/campos_512_v4
+115/589066/campos_512_v4
+115/589101/campos_512_v4
+115/589266/campos_512_v4
+115/589299/campos_512_v4
+115/589353/campos_512_v4
+115/589406/campos_512_v4
+115/589442/campos_512_v4
+115/589525/campos_512_v4
+115/589603/campos_512_v4
+115/589688/campos_512_v4
+115/589699/campos_512_v4
+115/589726/campos_512_v4
+115/589861/campos_512_v4
+115/589903/campos_512_v4
+116/590030/campos_512_v4
+116/590053/campos_512_v4
+116/590090/campos_512_v4
+116/590102/campos_512_v4
+116/590234/campos_512_v4
+116/590407/campos_512_v4
+116/590497/campos_512_v4
+116/590500/campos_512_v4
+116/590523/campos_512_v4
+116/590541/campos_512_v4
+116/590627/campos_512_v4
+116/590640/campos_512_v4
+116/590642/campos_512_v4
+116/590746/campos_512_v4
+116/590760/campos_512_v4
+116/590819/campos_512_v4
+116/590853/campos_512_v4
+116/590891/campos_512_v4
+116/590917/campos_512_v4
+116/591107/campos_512_v4
+116/591121/campos_512_v4
+116/591157/campos_512_v4
+116/591249/campos_512_v4
+116/591266/campos_512_v4
+116/591370/campos_512_v4
+116/591379/campos_512_v4
+116/591383/campos_512_v4
+116/591409/campos_512_v4
+116/591449/campos_512_v4
+116/591475/campos_512_v4
+116/591520/campos_512_v4
+116/591672/campos_512_v4
+116/591735/campos_512_v4
+116/591850/campos_512_v4
+116/591994/campos_512_v4
+116/592024/campos_512_v4
+116/592063/campos_512_v4
+116/592181/campos_512_v4
+116/592311/campos_512_v4
+116/592430/campos_512_v4
+116/592442/campos_512_v4
+116/592600/campos_512_v4
+116/592602/campos_512_v4
+116/592663/campos_512_v4
+116/592922/campos_512_v4
+116/592998/campos_512_v4
+116/593016/campos_512_v4
+116/593042/campos_512_v4
+116/593081/campos_512_v4
+116/593108/campos_512_v4
+116/593127/campos_512_v4
+116/593247/campos_512_v4
+116/593416/campos_512_v4
+116/593422/campos_512_v4
+116/593445/campos_512_v4
+116/593587/campos_512_v4
+116/593698/campos_512_v4
+116/593733/campos_512_v4
+116/593838/campos_512_v4
+116/593938/campos_512_v4
+116/594021/campos_512_v4
+116/594065/campos_512_v4
+116/594098/campos_512_v4
+116/594136/campos_512_v4
+116/594158/campos_512_v4
+116/594299/campos_512_v4
+116/594301/campos_512_v4
+116/594328/campos_512_v4
+116/594492/campos_512_v4
+116/594504/campos_512_v4
+116/594520/campos_512_v4
+116/594623/campos_512_v4
+116/594639/campos_512_v4
+116/594671/campos_512_v4
+116/594731/campos_512_v4
+116/594767/campos_512_v4
+116/594888/campos_512_v4
+116/594894/campos_512_v4
+116/594923/campos_512_v4
+116/594943/campos_512_v4
+117/595046/campos_512_v4
+117/595106/campos_512_v4
+117/595174/campos_512_v4
+117/595184/campos_512_v4
+117/595333/campos_512_v4
+117/595354/campos_512_v4
+117/595385/campos_512_v4
+117/595398/campos_512_v4
+117/595399/campos_512_v4
+117/595426/campos_512_v4
+117/595489/campos_512_v4
+117/595587/campos_512_v4
+117/595597/campos_512_v4
+117/595647/campos_512_v4
+117/595751/campos_512_v4
+117/595979/campos_512_v4
+117/596004/campos_512_v4
+117/596057/campos_512_v4
+117/596102/campos_512_v4
+117/596129/campos_512_v4
+117/596137/campos_512_v4
+117/596393/campos_512_v4
+117/596539/campos_512_v4
+117/596612/campos_512_v4
+117/596662/campos_512_v4
+117/596750/campos_512_v4
+117/596787/campos_512_v4
+117/596833/campos_512_v4
+117/596953/campos_512_v4
+117/596967/campos_512_v4
+117/597076/campos_512_v4
+117/597223/campos_512_v4
+117/597281/campos_512_v4
+117/597438/campos_512_v4
+117/597441/campos_512_v4
+117/597458/campos_512_v4
+117/597464/campos_512_v4
+117/597583/campos_512_v4
+117/597628/campos_512_v4
+117/597701/campos_512_v4
+117/597753/campos_512_v4
+117/597834/campos_512_v4
+117/598001/campos_512_v4
+117/598013/campos_512_v4
+117/598014/campos_512_v4
+117/598043/campos_512_v4
+117/598087/campos_512_v4
+117/598109/campos_512_v4
+117/598114/campos_512_v4
+117/598173/campos_512_v4
+117/598239/campos_512_v4
+117/598355/campos_512_v4
+117/598450/campos_512_v4
+117/598584/campos_512_v4
+117/598620/campos_512_v4
+117/598894/campos_512_v4
+117/598934/campos_512_v4
+117/598985/campos_512_v4
+117/599029/campos_512_v4
+117/599049/campos_512_v4
+117/599054/campos_512_v4
+117/599086/campos_512_v4
+117/599141/campos_512_v4
+117/599206/campos_512_v4
+117/599221/campos_512_v4
+117/599245/campos_512_v4
+117/599255/campos_512_v4
+117/599281/campos_512_v4
+117/599286/campos_512_v4
+117/599339/campos_512_v4
+117/599393/campos_512_v4
+117/599411/campos_512_v4
+117/599418/campos_512_v4
+117/599430/campos_512_v4
+117/599439/campos_512_v4
+117/599446/campos_512_v4
+117/599530/campos_512_v4
+117/599689/campos_512_v4
+117/599766/campos_512_v4
+117/599804/campos_512_v4
+117/599818/campos_512_v4
+117/599938/campos_512_v4
+117/599957/campos_512_v4
+118/600018/campos_512_v4
+118/600112/campos_512_v4
+118/600139/campos_512_v4
+118/600208/campos_512_v4
+118/600304/campos_512_v4
+118/600352/campos_512_v4
+118/600397/campos_512_v4
+118/600400/campos_512_v4
+118/600431/campos_512_v4
+118/600437/campos_512_v4
+118/600468/campos_512_v4
+118/600530/campos_512_v4
+118/600534/campos_512_v4
+118/600538/campos_512_v4
+118/600596/campos_512_v4
+118/600678/campos_512_v4
+118/600697/campos_512_v4
+118/600701/campos_512_v4
+118/600752/campos_512_v4
+118/600794/campos_512_v4
+118/600797/campos_512_v4
+118/600800/campos_512_v4
+118/600887/campos_512_v4
+118/601001/campos_512_v4
+118/601028/campos_512_v4
+118/601359/campos_512_v4
+118/601400/campos_512_v4
+118/601419/campos_512_v4
+118/601468/campos_512_v4
+118/601529/campos_512_v4
+118/601542/campos_512_v4
+118/601600/campos_512_v4
+118/601754/campos_512_v4
+118/601786/campos_512_v4
+118/601819/campos_512_v4
+118/601964/campos_512_v4
+118/602000/campos_512_v4
+118/602032/campos_512_v4
+118/602038/campos_512_v4
+118/602081/campos_512_v4
+118/602156/campos_512_v4
+118/602274/campos_512_v4
+118/602367/campos_512_v4
+118/602375/campos_512_v4
+118/602429/campos_512_v4
+118/602491/campos_512_v4
+118/602561/campos_512_v4
+118/602587/campos_512_v4
+118/602685/campos_512_v4
+118/602688/campos_512_v4
+118/602693/campos_512_v4
+118/602752/campos_512_v4
+118/602776/campos_512_v4
+118/602778/campos_512_v4
+118/602844/campos_512_v4
+118/602896/campos_512_v4
+118/602903/campos_512_v4
+118/602926/campos_512_v4
+118/602940/campos_512_v4
+118/603027/campos_512_v4
+118/603182/campos_512_v4
+118/603353/campos_512_v4
+118/603355/campos_512_v4
+118/603547/campos_512_v4
+118/603688/campos_512_v4
+118/603727/campos_512_v4
+118/603791/campos_512_v4
+118/603802/campos_512_v4
+118/603849/campos_512_v4
+118/603881/campos_512_v4
+118/603906/campos_512_v4
+118/603965/campos_512_v4
+118/604003/campos_512_v4
+118/604011/campos_512_v4
+118/604038/campos_512_v4
+118/604040/campos_512_v4
+118/604061/campos_512_v4
+118/604162/campos_512_v4
+118/604351/campos_512_v4
+118/604601/campos_512_v4
+118/604660/campos_512_v4
+118/604687/campos_512_v4
+118/604828/campos_512_v4
+118/604886/campos_512_v4
+118/604995/campos_512_v4
+119/605009/campos_512_v4
+119/605141/campos_512_v4
+119/605149/campos_512_v4
+119/605181/campos_512_v4
+119/605187/campos_512_v4
+119/605223/campos_512_v4
+119/605270/campos_512_v4
+119/605284/campos_512_v4
+119/605285/campos_512_v4
+119/605308/campos_512_v4
+119/605336/campos_512_v4
+119/605510/campos_512_v4
+119/605554/campos_512_v4
+119/605582/campos_512_v4
+119/605591/campos_512_v4
+119/605599/campos_512_v4
+119/605627/campos_512_v4
+119/605659/campos_512_v4
+119/605677/campos_512_v4
+119/605717/campos_512_v4
+119/605725/campos_512_v4
+119/605739/campos_512_v4
+119/605782/campos_512_v4
+119/605854/campos_512_v4
+119/605887/campos_512_v4
+119/605957/campos_512_v4
+119/605960/campos_512_v4
+119/605966/campos_512_v4
+119/606160/campos_512_v4
+119/606180/campos_512_v4
+119/606248/campos_512_v4
+119/606252/campos_512_v4
+119/606287/campos_512_v4
+119/606389/campos_512_v4
+119/606398/campos_512_v4
+119/606492/campos_512_v4
+119/606561/campos_512_v4
+119/606598/campos_512_v4
+119/606691/campos_512_v4
+119/606694/campos_512_v4
+119/606697/campos_512_v4
+119/606708/campos_512_v4
+119/606718/campos_512_v4
+119/606724/campos_512_v4
+119/606747/campos_512_v4
+119/606751/campos_512_v4
+119/606762/campos_512_v4
+119/606804/campos_512_v4
+119/606814/campos_512_v4
+119/606851/campos_512_v4
+119/606857/campos_512_v4
+119/606882/campos_512_v4
+119/606934/campos_512_v4
+119/606946/campos_512_v4
+119/606993/campos_512_v4
+119/607026/campos_512_v4
+119/607146/campos_512_v4
+119/607247/campos_512_v4
+119/607279/campos_512_v4
+119/607347/campos_512_v4
+119/607389/campos_512_v4
+119/607446/campos_512_v4
+119/607457/campos_512_v4
+119/607503/campos_512_v4
+119/607522/campos_512_v4
+119/607539/campos_512_v4
+119/607600/campos_512_v4
+119/607609/campos_512_v4
+119/607616/campos_512_v4
+119/607679/campos_512_v4
+119/607703/campos_512_v4
+119/607747/campos_512_v4
+119/607758/campos_512_v4
+119/607802/campos_512_v4
+119/607829/campos_512_v4
+119/608013/campos_512_v4
+119/608028/campos_512_v4
+119/608154/campos_512_v4
+119/608189/campos_512_v4
+119/608303/campos_512_v4
+119/608421/campos_512_v4
+119/608424/campos_512_v4
+119/608470/campos_512_v4
+119/608489/campos_512_v4
+119/608510/campos_512_v4
+119/608574/campos_512_v4
+119/608582/campos_512_v4
+119/608632/campos_512_v4
+119/608644/campos_512_v4
+119/608674/campos_512_v4
+119/608828/campos_512_v4
+119/608935/campos_512_v4
+119/608995/campos_512_v4
+119/609065/campos_512_v4
+119/609072/campos_512_v4
+119/609084/campos_512_v4
+119/609095/campos_512_v4
+119/609126/campos_512_v4
+119/609324/campos_512_v4
+119/609348/campos_512_v4
+119/609515/campos_512_v4
+119/609522/campos_512_v4
+119/609650/campos_512_v4
+119/609746/campos_512_v4
+119/609835/campos_512_v4
+119/609873/campos_512_v4
+119/609971/campos_512_v4
+12/70239/campos_512_v4
+12/70329/campos_512_v4
+12/70343/campos_512_v4
+12/70350/campos_512_v4
+12/70382/campos_512_v4
+12/70400/campos_512_v4
+12/70551/campos_512_v4
+12/70757/campos_512_v4
+12/70764/campos_512_v4
+12/70781/campos_512_v4
+12/70869/campos_512_v4
+12/71115/campos_512_v4
+12/71207/campos_512_v4
+12/71255/campos_512_v4
+12/71410/campos_512_v4
+12/71467/campos_512_v4
+12/71561/campos_512_v4
+12/71779/campos_512_v4
+12/71781/campos_512_v4
+12/71818/campos_512_v4
+12/71820/campos_512_v4
+12/72021/campos_512_v4
+12/72116/campos_512_v4
+12/72330/campos_512_v4
+12/72581/campos_512_v4
+12/72655/campos_512_v4
+12/72681/campos_512_v4
+12/72714/campos_512_v4
+12/73027/campos_512_v4
+12/73045/campos_512_v4
+12/73072/campos_512_v4
+12/73302/campos_512_v4
+12/73449/campos_512_v4
+12/73599/campos_512_v4
+12/73778/campos_512_v4
+12/73858/campos_512_v4
+12/73902/campos_512_v4
+12/73967/campos_512_v4
+12/74122/campos_512_v4
+12/74155/campos_512_v4
+12/74175/campos_512_v4
+12/74178/campos_512_v4
+12/74769/campos_512_v4
+12/74834/campos_512_v4
+12/74880/campos_512_v4
+12/74972/campos_512_v4
+120/610102/campos_512_v4
+120/610131/campos_512_v4
+120/610192/campos_512_v4
+120/610196/campos_512_v4
+120/610229/campos_512_v4
+120/610322/campos_512_v4
+120/610464/campos_512_v4
+120/610531/campos_512_v4
+120/610548/campos_512_v4
+120/610740/campos_512_v4
+120/610935/campos_512_v4
+120/610960/campos_512_v4
+120/610998/campos_512_v4
+120/611031/campos_512_v4
+120/611183/campos_512_v4
+120/611200/campos_512_v4
+120/611269/campos_512_v4
+120/611335/campos_512_v4
+120/611343/campos_512_v4
+120/611539/campos_512_v4
+120/611564/campos_512_v4
+120/611585/campos_512_v4
+120/611643/campos_512_v4
+120/611698/campos_512_v4
+120/611747/campos_512_v4
+120/611770/campos_512_v4
+120/611802/campos_512_v4
+120/611859/campos_512_v4
+120/611866/campos_512_v4
+120/612032/campos_512_v4
+120/612042/campos_512_v4
+120/612047/campos_512_v4
+120/612050/campos_512_v4
+120/612278/campos_512_v4
+120/612295/campos_512_v4
+120/612316/campos_512_v4
+120/612358/campos_512_v4
+120/612392/campos_512_v4
+120/612639/campos_512_v4
+120/612645/campos_512_v4
+120/612711/campos_512_v4
+120/612838/campos_512_v4
+120/612971/campos_512_v4
+120/613023/campos_512_v4
+120/613064/campos_512_v4
+120/613093/campos_512_v4
+120/613126/campos_512_v4
+120/613175/campos_512_v4
+120/613242/campos_512_v4
+120/613297/campos_512_v4
+120/613466/campos_512_v4
+120/613477/campos_512_v4
+120/613515/campos_512_v4
+120/613525/campos_512_v4
+120/613606/campos_512_v4
+120/613694/campos_512_v4
+120/613763/campos_512_v4
+120/613788/campos_512_v4
+120/613819/campos_512_v4
+120/613853/campos_512_v4
+120/613950/campos_512_v4
+120/613968/campos_512_v4
+120/613980/campos_512_v4
+120/614033/campos_512_v4
+120/614298/campos_512_v4
+120/614323/campos_512_v4
+120/614445/campos_512_v4
+120/614517/campos_512_v4
+120/614537/campos_512_v4
+120/614565/campos_512_v4
+120/614616/campos_512_v4
+120/614636/campos_512_v4
+120/614639/campos_512_v4
+120/614865/campos_512_v4
+121/615007/campos_512_v4
+121/615082/campos_512_v4
+121/615098/campos_512_v4
+121/615102/campos_512_v4
+121/615121/campos_512_v4
+121/615239/campos_512_v4
+121/615289/campos_512_v4
+121/615359/campos_512_v4
+121/615710/campos_512_v4
+121/615784/campos_512_v4
+121/615828/campos_512_v4
+121/615883/campos_512_v4
+121/615898/campos_512_v4
+121/615985/campos_512_v4
+121/616092/campos_512_v4
+121/616135/campos_512_v4
+121/616175/campos_512_v4
+121/616259/campos_512_v4
+121/616356/campos_512_v4
+121/616408/campos_512_v4
+121/616416/campos_512_v4
+121/616473/campos_512_v4
+121/616626/campos_512_v4
+121/616703/campos_512_v4
+121/616704/campos_512_v4
+121/616810/campos_512_v4
+121/616834/campos_512_v4
+121/616943/campos_512_v4
+121/617024/campos_512_v4
+121/617051/campos_512_v4
+121/617065/campos_512_v4
+121/617126/campos_512_v4
+121/617209/campos_512_v4
+121/617214/campos_512_v4
+121/617216/campos_512_v4
+121/617240/campos_512_v4
+121/617332/campos_512_v4
+121/617341/campos_512_v4
+121/617443/campos_512_v4
+121/617472/campos_512_v4
+121/617501/campos_512_v4
+121/617570/campos_512_v4
+121/617647/campos_512_v4
+121/617707/campos_512_v4
+121/617738/campos_512_v4
+121/617744/campos_512_v4
+121/617748/campos_512_v4
+121/617780/campos_512_v4
+121/617814/campos_512_v4
+121/617843/campos_512_v4
+121/617881/campos_512_v4
+121/617952/campos_512_v4
+121/618034/campos_512_v4
+121/618105/campos_512_v4
+121/618107/campos_512_v4
+121/618290/campos_512_v4
+121/618406/campos_512_v4
+121/618507/campos_512_v4
+121/618561/campos_512_v4
+121/618623/campos_512_v4
+121/618649/campos_512_v4
+121/618675/campos_512_v4
+121/618887/campos_512_v4
+121/618931/campos_512_v4
+121/618951/campos_512_v4
+121/619000/campos_512_v4
+121/619099/campos_512_v4
+121/619141/campos_512_v4
+121/619198/campos_512_v4
+121/619264/campos_512_v4
+121/619311/campos_512_v4
+121/619419/campos_512_v4
+121/619473/campos_512_v4
+121/619475/campos_512_v4
+121/619794/campos_512_v4
+121/619970/campos_512_v4
+122/620067/campos_512_v4
+122/620079/campos_512_v4
+122/620138/campos_512_v4
+122/620177/campos_512_v4
+122/620271/campos_512_v4
+122/620288/campos_512_v4
+122/620295/campos_512_v4
+122/620450/campos_512_v4
+122/620477/campos_512_v4
+122/620709/campos_512_v4
+122/620728/campos_512_v4
+122/620745/campos_512_v4
+122/620802/campos_512_v4
+122/620841/campos_512_v4
+122/620886/campos_512_v4
+122/620994/campos_512_v4
+122/621024/campos_512_v4
+122/621035/campos_512_v4
+122/621069/campos_512_v4
+122/621126/campos_512_v4
+122/621166/campos_512_v4
+122/621440/campos_512_v4
+122/621476/campos_512_v4
+122/621542/campos_512_v4
+122/621545/campos_512_v4
+122/621607/campos_512_v4
+122/621630/campos_512_v4
+122/621875/campos_512_v4
+122/621945/campos_512_v4
+122/622062/campos_512_v4
+122/622168/campos_512_v4
+122/622222/campos_512_v4
+122/622236/campos_512_v4
+122/622350/campos_512_v4
+122/622383/campos_512_v4
+122/622462/campos_512_v4
+122/622537/campos_512_v4
+122/622544/campos_512_v4
+122/622685/campos_512_v4
+122/622733/campos_512_v4
+122/622796/campos_512_v4
+122/622809/campos_512_v4
+122/622883/campos_512_v4
+122/622968/campos_512_v4
+122/622996/campos_512_v4
+122/622999/campos_512_v4
+122/623112/campos_512_v4
+122/623145/campos_512_v4
+122/623175/campos_512_v4
+122/623176/campos_512_v4
+122/623183/campos_512_v4
+122/623219/campos_512_v4
+122/623283/campos_512_v4
+122/623338/campos_512_v4
+122/623364/campos_512_v4
+122/623432/campos_512_v4
+122/623555/campos_512_v4
+122/623556/campos_512_v4
+122/623570/campos_512_v4
+122/623598/campos_512_v4
+122/623606/campos_512_v4
+122/623627/campos_512_v4
+122/623637/campos_512_v4
+122/623647/campos_512_v4
+122/623875/campos_512_v4
+122/623979/campos_512_v4
+122/624029/campos_512_v4
+122/624091/campos_512_v4
+122/624120/campos_512_v4
+122/624205/campos_512_v4
+122/624305/campos_512_v4
+122/624469/campos_512_v4
+122/624508/campos_512_v4
+122/624661/campos_512_v4
+122/624795/campos_512_v4
+122/624804/campos_512_v4
+122/624849/campos_512_v4
+122/624851/campos_512_v4
+122/624869/campos_512_v4
+122/624999/campos_512_v4
+123/625056/campos_512_v4
+123/625111/campos_512_v4
+123/625113/campos_512_v4
+123/625251/campos_512_v4
+123/625303/campos_512_v4
+123/625353/campos_512_v4
+123/625571/campos_512_v4
+123/625866/campos_512_v4
+123/625874/campos_512_v4
+123/625926/campos_512_v4
+123/626011/campos_512_v4
+123/626079/campos_512_v4
+123/626082/campos_512_v4
+123/626111/campos_512_v4
+123/626165/campos_512_v4
+123/626394/campos_512_v4
+123/626496/campos_512_v4
+123/626509/campos_512_v4
+123/626522/campos_512_v4
+123/626558/campos_512_v4
+123/626561/campos_512_v4
+123/626689/campos_512_v4
+123/626758/campos_512_v4
+123/626842/campos_512_v4
+123/626892/campos_512_v4
+123/626905/campos_512_v4
+123/626913/campos_512_v4
+123/626920/campos_512_v4
+123/626937/campos_512_v4
+123/626947/campos_512_v4
+123/627088/campos_512_v4
+123/627098/campos_512_v4
+123/627117/campos_512_v4
+123/627192/campos_512_v4
+123/627399/campos_512_v4
+123/627463/campos_512_v4
+123/627570/campos_512_v4
+123/627652/campos_512_v4
+123/627820/campos_512_v4
+123/627839/campos_512_v4
+123/627851/campos_512_v4
+123/627923/campos_512_v4
+123/627981/campos_512_v4
+123/628008/campos_512_v4
+123/628021/campos_512_v4
+123/628023/campos_512_v4
+123/628179/campos_512_v4
+123/628218/campos_512_v4
+123/628239/campos_512_v4
+123/628309/campos_512_v4
+123/628321/campos_512_v4
+123/628369/campos_512_v4
+123/628447/campos_512_v4
+123/628449/campos_512_v4
+123/628507/campos_512_v4
+123/628600/campos_512_v4
+123/628706/campos_512_v4
+123/628767/campos_512_v4
+123/628865/campos_512_v4
+123/628905/campos_512_v4
+123/628929/campos_512_v4
+123/629092/campos_512_v4
+123/629114/campos_512_v4
+123/629117/campos_512_v4
+123/629122/campos_512_v4
+123/629145/campos_512_v4
+123/629302/campos_512_v4
+123/629460/campos_512_v4
+123/629581/campos_512_v4
+123/629598/campos_512_v4
+123/629674/campos_512_v4
+123/629744/campos_512_v4
+123/629759/campos_512_v4
+123/629834/campos_512_v4
+123/629881/campos_512_v4
+123/629925/campos_512_v4
+124/630047/campos_512_v4
+124/630072/campos_512_v4
+124/630192/campos_512_v4
+124/630207/campos_512_v4
+124/630226/campos_512_v4
+124/630230/campos_512_v4
+124/630334/campos_512_v4
+124/630419/campos_512_v4
+124/630487/campos_512_v4
+124/630594/campos_512_v4
+124/630645/campos_512_v4
+124/630848/campos_512_v4
+124/630920/campos_512_v4
+124/631056/campos_512_v4
+124/631076/campos_512_v4
+124/631080/campos_512_v4
+124/631092/campos_512_v4
+124/631184/campos_512_v4
+124/631324/campos_512_v4
+124/631376/campos_512_v4
+124/631450/campos_512_v4
+124/631456/campos_512_v4
+124/631516/campos_512_v4
+124/631642/campos_512_v4
+124/631767/campos_512_v4
+124/631777/campos_512_v4
+124/631829/campos_512_v4
+124/631866/campos_512_v4
+124/631879/campos_512_v4
+124/631916/campos_512_v4
+124/631980/campos_512_v4
+124/632007/campos_512_v4
+124/632066/campos_512_v4
+124/632077/campos_512_v4
+124/632169/campos_512_v4
+124/632191/campos_512_v4
+124/632231/campos_512_v4
+124/632257/campos_512_v4
+124/632316/campos_512_v4
+124/632435/campos_512_v4
+124/632493/campos_512_v4
+124/632554/campos_512_v4
+124/632621/campos_512_v4
+124/632648/campos_512_v4
+124/632738/campos_512_v4
+124/632739/campos_512_v4
+124/632778/campos_512_v4
+124/632806/campos_512_v4
+124/632839/campos_512_v4
+124/632899/campos_512_v4
+124/632952/campos_512_v4
+124/633455/campos_512_v4
+124/633478/campos_512_v4
+124/633567/campos_512_v4
+124/633776/campos_512_v4
+124/633950/campos_512_v4
+124/634104/campos_512_v4
+124/634231/campos_512_v4
+124/634395/campos_512_v4
+124/634419/campos_512_v4
+124/634444/campos_512_v4
+124/634458/campos_512_v4
+124/634551/campos_512_v4
+124/634580/campos_512_v4
+124/634623/campos_512_v4
+124/634949/campos_512_v4
+125/635017/campos_512_v4
+125/635051/campos_512_v4
+125/635133/campos_512_v4
+125/635226/campos_512_v4
+125/635239/campos_512_v4
+125/635281/campos_512_v4
+125/635331/campos_512_v4
+125/635383/campos_512_v4
+125/635473/campos_512_v4
+125/635484/campos_512_v4
+125/635489/campos_512_v4
+125/635499/campos_512_v4
+125/635560/campos_512_v4
+125/635658/campos_512_v4
+125/635745/campos_512_v4
+125/635748/campos_512_v4
+125/635758/campos_512_v4
+125/635946/campos_512_v4
+125/636094/campos_512_v4
+125/636253/campos_512_v4
+125/636270/campos_512_v4
+125/636357/campos_512_v4
+125/636378/campos_512_v4
+125/636442/campos_512_v4
+125/636565/campos_512_v4
+125/636613/campos_512_v4
+125/636619/campos_512_v4
+125/636645/campos_512_v4
+125/636730/campos_512_v4
+125/636743/campos_512_v4
+125/636776/campos_512_v4
+125/636819/campos_512_v4
+125/636836/campos_512_v4
+125/636883/campos_512_v4
+125/636929/campos_512_v4
+125/636962/campos_512_v4
+125/636964/campos_512_v4
+125/636974/campos_512_v4
+125/636999/campos_512_v4
+125/637041/campos_512_v4
+125/637079/campos_512_v4
+125/637084/campos_512_v4
+125/637128/campos_512_v4
+125/637285/campos_512_v4
+125/637437/campos_512_v4
+125/637469/campos_512_v4
+125/637475/campos_512_v4
+125/637641/campos_512_v4
+125/637643/campos_512_v4
+125/637768/campos_512_v4
+125/637993/campos_512_v4
+125/638006/campos_512_v4
+125/638028/campos_512_v4
+125/638177/campos_512_v4
+125/638178/campos_512_v4
+125/638218/campos_512_v4
+125/638373/campos_512_v4
+125/638413/campos_512_v4
+125/638575/campos_512_v4
+125/638603/campos_512_v4
+125/638714/campos_512_v4
+125/638737/campos_512_v4
+125/638752/campos_512_v4
+125/638771/campos_512_v4
+125/638782/campos_512_v4
+125/638788/campos_512_v4
+125/638832/campos_512_v4
+125/638845/campos_512_v4
+125/638903/campos_512_v4
+125/638991/campos_512_v4
+125/639052/campos_512_v4
+125/639079/campos_512_v4
+125/639179/campos_512_v4
+125/639181/campos_512_v4
+125/639216/campos_512_v4
+125/639242/campos_512_v4
+125/639257/campos_512_v4
+125/639300/campos_512_v4
+125/639377/campos_512_v4
+125/639582/campos_512_v4
+125/639813/campos_512_v4
+127/645016/campos_512_v4
+127/645018/campos_512_v4
+127/645083/campos_512_v4
+127/645160/campos_512_v4
+127/645214/campos_512_v4
+127/645238/campos_512_v4
+127/645298/campos_512_v4
+127/645385/campos_512_v4
+127/645398/campos_512_v4
+127/645416/campos_512_v4
+127/645423/campos_512_v4
+127/645539/campos_512_v4
+127/645839/campos_512_v4
+127/646146/campos_512_v4
+127/646442/campos_512_v4
+127/646769/campos_512_v4
+127/646835/campos_512_v4
+127/647076/campos_512_v4
+127/647136/campos_512_v4
+127/647161/campos_512_v4
+127/647232/campos_512_v4
+127/647241/campos_512_v4
+127/647301/campos_512_v4
+127/647341/campos_512_v4
+127/647358/campos_512_v4
+127/647412/campos_512_v4
+127/647417/campos_512_v4
+127/647434/campos_512_v4
+127/647447/campos_512_v4
+127/647518/campos_512_v4
+127/647528/campos_512_v4
+127/647656/campos_512_v4
+127/647669/campos_512_v4
+127/647789/campos_512_v4
+127/647794/campos_512_v4
+127/647806/campos_512_v4
+127/647850/campos_512_v4
+127/647863/campos_512_v4
+127/647921/campos_512_v4
+127/647933/campos_512_v4
+127/647943/campos_512_v4
+127/647944/campos_512_v4
+127/647956/campos_512_v4
+127/647960/campos_512_v4
+127/648138/campos_512_v4
+127/648139/campos_512_v4
+127/648142/campos_512_v4
+127/648201/campos_512_v4
+127/648420/campos_512_v4
+127/648484/campos_512_v4
+127/648545/campos_512_v4
+127/648567/campos_512_v4
+127/648572/campos_512_v4
+127/648584/campos_512_v4
+127/648714/campos_512_v4
+127/648800/campos_512_v4
+127/648829/campos_512_v4
+127/648891/campos_512_v4
+127/648984/campos_512_v4
+127/648988/campos_512_v4
+127/649001/campos_512_v4
+127/649032/campos_512_v4
+127/649182/campos_512_v4
+127/649202/campos_512_v4
+127/649207/campos_512_v4
+127/649241/campos_512_v4
+127/649259/campos_512_v4
+127/649308/campos_512_v4
+127/649327/campos_512_v4
+127/649332/campos_512_v4
+127/649383/campos_512_v4
+127/649403/campos_512_v4
+127/649510/campos_512_v4
+127/649528/campos_512_v4
+127/649556/campos_512_v4
+127/649606/campos_512_v4
+127/649621/campos_512_v4
+127/649686/campos_512_v4
+127/649692/campos_512_v4
+127/649694/campos_512_v4
+127/649805/campos_512_v4
+127/649915/campos_512_v4
+127/649995/campos_512_v4
+128/650116/campos_512_v4
+128/650177/campos_512_v4
+128/650200/campos_512_v4
+128/650353/campos_512_v4
+128/650462/campos_512_v4
+128/650562/campos_512_v4
+128/650586/campos_512_v4
+128/650623/campos_512_v4
+128/650765/campos_512_v4
+128/650814/campos_512_v4
+128/650819/campos_512_v4
+128/650834/campos_512_v4
+128/650849/campos_512_v4
+128/650874/campos_512_v4
+128/650954/campos_512_v4
+128/651017/campos_512_v4
+128/651065/campos_512_v4
+128/651066/campos_512_v4
+128/651084/campos_512_v4
+128/651172/campos_512_v4
+128/651181/campos_512_v4
+128/651217/campos_512_v4
+128/651239/campos_512_v4
+128/651248/campos_512_v4
+128/651261/campos_512_v4
+128/651347/campos_512_v4
+128/651390/campos_512_v4
+128/651454/campos_512_v4
+128/651459/campos_512_v4
+128/651501/campos_512_v4
+128/651510/campos_512_v4
+128/651587/campos_512_v4
+128/651711/campos_512_v4
+128/651847/campos_512_v4
+128/651851/campos_512_v4
+128/651897/campos_512_v4
+128/651933/campos_512_v4
+128/651968/campos_512_v4
+128/651988/campos_512_v4
+128/652165/campos_512_v4
+128/652172/campos_512_v4
+128/652474/campos_512_v4
+128/652502/campos_512_v4
+128/652534/campos_512_v4
+128/652720/campos_512_v4
+128/652767/campos_512_v4
+128/652808/campos_512_v4
+128/652827/campos_512_v4
+128/652906/campos_512_v4
+128/652973/campos_512_v4
+128/653064/campos_512_v4
+128/653072/campos_512_v4
+128/653139/campos_512_v4
+128/653165/campos_512_v4
+128/653185/campos_512_v4
+128/653311/campos_512_v4
+128/653336/campos_512_v4
+128/653349/campos_512_v4
+128/653386/campos_512_v4
+128/653417/campos_512_v4
+128/653542/campos_512_v4
+128/653544/campos_512_v4
+128/653601/campos_512_v4
+128/653693/campos_512_v4
+128/653710/campos_512_v4
+128/653713/campos_512_v4
+128/653772/campos_512_v4
+128/653807/campos_512_v4
+128/653898/campos_512_v4
+128/653910/campos_512_v4
+128/653911/campos_512_v4
+128/653934/campos_512_v4
+128/653939/campos_512_v4
+128/654048/campos_512_v4
+128/654063/campos_512_v4
+128/654067/campos_512_v4
+128/654128/campos_512_v4
+128/654195/campos_512_v4
+128/654252/campos_512_v4
+128/654314/campos_512_v4
+128/654328/campos_512_v4
+128/654380/campos_512_v4
+128/654476/campos_512_v4
+128/654499/campos_512_v4
+128/654649/campos_512_v4
+128/654774/campos_512_v4
+128/654840/campos_512_v4
+128/654849/campos_512_v4
+129/655050/campos_512_v4
+129/655164/campos_512_v4
+129/655165/campos_512_v4
+129/655166/campos_512_v4
+129/655204/campos_512_v4
+129/655266/campos_512_v4
+129/655382/campos_512_v4
+129/655450/campos_512_v4
+129/655485/campos_512_v4
+129/655530/campos_512_v4
+129/655558/campos_512_v4
+129/655775/campos_512_v4
+129/655800/campos_512_v4
+129/655828/campos_512_v4
+129/655832/campos_512_v4
+129/655859/campos_512_v4
+129/655955/campos_512_v4
+129/656094/campos_512_v4
+129/656171/campos_512_v4
+129/656257/campos_512_v4
+129/656572/campos_512_v4
+129/656629/campos_512_v4
+129/656708/campos_512_v4
+129/656891/campos_512_v4
+129/656909/campos_512_v4
+129/657027/campos_512_v4
+129/657078/campos_512_v4
+129/657131/campos_512_v4
+129/657159/campos_512_v4
+129/657226/campos_512_v4
+129/657258/campos_512_v4
+129/657354/campos_512_v4
+129/657647/campos_512_v4
+129/657759/campos_512_v4
+129/657874/campos_512_v4
+129/657928/campos_512_v4
+129/657970/campos_512_v4
+129/658025/campos_512_v4
+129/658063/campos_512_v4
+129/658118/campos_512_v4
+129/658136/campos_512_v4
+129/658199/campos_512_v4
+129/658244/campos_512_v4
+129/658275/campos_512_v4
+129/658363/campos_512_v4
+129/658387/campos_512_v4
+129/658437/campos_512_v4
+129/658523/campos_512_v4
+129/658529/campos_512_v4
+129/658538/campos_512_v4
+129/658631/campos_512_v4
+129/658674/campos_512_v4
+129/658732/campos_512_v4
+129/658778/campos_512_v4
+129/658803/campos_512_v4
+129/658809/campos_512_v4
+129/658829/campos_512_v4
+129/658894/campos_512_v4
+129/658948/campos_512_v4
+129/658988/campos_512_v4
+129/659050/campos_512_v4
+129/659100/campos_512_v4
+129/659163/campos_512_v4
+129/659220/campos_512_v4
+129/659287/campos_512_v4
+129/659322/campos_512_v4
+129/659449/campos_512_v4
+129/659531/campos_512_v4
+129/659656/campos_512_v4
+129/659687/campos_512_v4
+129/659780/campos_512_v4
+129/659884/campos_512_v4
+129/659885/campos_512_v4
+129/660001/campos_512_v4
+13/75002/campos_512_v4
+13/75104/campos_512_v4
+13/75165/campos_512_v4
+13/75368/campos_512_v4
+13/75439/campos_512_v4
+13/75474/campos_512_v4
+13/75702/campos_512_v4
+13/75754/campos_512_v4
+13/75835/campos_512_v4
+13/76174/campos_512_v4
+13/76248/campos_512_v4
+13/76393/campos_512_v4
+13/76407/campos_512_v4
+13/76675/campos_512_v4
+13/76913/campos_512_v4
+13/76919/campos_512_v4
+13/76952/campos_512_v4
+13/77003/campos_512_v4
+13/77218/campos_512_v4
+13/77283/campos_512_v4
+13/77642/campos_512_v4
+13/77683/campos_512_v4
+13/77893/campos_512_v4
+13/78024/campos_512_v4
+13/78082/campos_512_v4
+13/78084/campos_512_v4
+13/78174/campos_512_v4
+13/78329/campos_512_v4
+13/78354/campos_512_v4
+13/78491/campos_512_v4
+13/78556/campos_512_v4
+13/78709/campos_512_v4
+13/78765/campos_512_v4
+13/78791/campos_512_v4
+13/78978/campos_512_v4
+13/79112/campos_512_v4
+13/79211/campos_512_v4
+13/79304/campos_512_v4
+13/79656/campos_512_v4
+13/79910/campos_512_v4
+130/660024/campos_512_v4
+130/660030/campos_512_v4
+130/660052/campos_512_v4
+130/660066/campos_512_v4
+130/660068/campos_512_v4
+130/660220/campos_512_v4
+130/660463/campos_512_v4
+130/660465/campos_512_v4
+130/660535/campos_512_v4
+130/660543/campos_512_v4
+130/660574/campos_512_v4
+130/660710/campos_512_v4
+130/660872/campos_512_v4
+130/660925/campos_512_v4
+130/661079/campos_512_v4
+130/661185/campos_512_v4
+130/661218/campos_512_v4
+130/661220/campos_512_v4
+130/661223/campos_512_v4
+130/661366/campos_512_v4
+130/661406/campos_512_v4
+130/661449/campos_512_v4
+130/661547/campos_512_v4
+130/661583/campos_512_v4
+130/661684/campos_512_v4
+130/661693/campos_512_v4
+130/661700/campos_512_v4
+130/661876/campos_512_v4
+130/661908/campos_512_v4
+130/662293/campos_512_v4
+130/662307/campos_512_v4
+130/662387/campos_512_v4
+130/662432/campos_512_v4
+130/662510/campos_512_v4
+130/662512/campos_512_v4
+130/662531/campos_512_v4
+130/662705/campos_512_v4
+130/662802/campos_512_v4
+130/662821/campos_512_v4
+130/662971/campos_512_v4
+130/662972/campos_512_v4
+130/662981/campos_512_v4
+130/663013/campos_512_v4
+130/663039/campos_512_v4
+130/663053/campos_512_v4
+130/663106/campos_512_v4
+130/663178/campos_512_v4
+130/663262/campos_512_v4
+130/663266/campos_512_v4
+130/663354/campos_512_v4
+130/663391/campos_512_v4
+130/663555/campos_512_v4
+130/663561/campos_512_v4
+130/663601/campos_512_v4
+130/663639/campos_512_v4
+130/663745/campos_512_v4
+130/663795/campos_512_v4
+130/663981/campos_512_v4
+130/663988/campos_512_v4
+130/664048/campos_512_v4
+130/664062/campos_512_v4
+130/664114/campos_512_v4
+130/664180/campos_512_v4
+130/664182/campos_512_v4
+130/664241/campos_512_v4
+130/664321/campos_512_v4
+130/664396/campos_512_v4
+130/664448/campos_512_v4
+130/664460/campos_512_v4
+130/664518/campos_512_v4
+130/664562/campos_512_v4
+130/664648/campos_512_v4
+130/664796/campos_512_v4
+130/664916/campos_512_v4
+130/664994/campos_512_v4
+131/665179/campos_512_v4
+131/665195/campos_512_v4
+131/665199/campos_512_v4
+131/665229/campos_512_v4
+131/665244/campos_512_v4
+131/665272/campos_512_v4
+131/665290/campos_512_v4
+131/665336/campos_512_v4
+131/665366/campos_512_v4
+131/665399/campos_512_v4
+131/665425/campos_512_v4
+131/665445/campos_512_v4
+131/665607/campos_512_v4
+131/665612/campos_512_v4
+131/665741/campos_512_v4
+131/665778/campos_512_v4
+131/665951/campos_512_v4
+131/666010/campos_512_v4
+131/666042/campos_512_v4
+131/666047/campos_512_v4
+131/666087/campos_512_v4
+131/666163/campos_512_v4
+131/666193/campos_512_v4
+131/666245/campos_512_v4
+131/666299/campos_512_v4
+131/666668/campos_512_v4
+131/666794/campos_512_v4
+131/666854/campos_512_v4
+131/666923/campos_512_v4
+131/666929/campos_512_v4
+131/667104/campos_512_v4
+131/667252/campos_512_v4
+131/667308/campos_512_v4
+131/667385/campos_512_v4
+131/667417/campos_512_v4
+131/667467/campos_512_v4
+131/667478/campos_512_v4
+131/667646/campos_512_v4
+131/667785/campos_512_v4
+131/667834/campos_512_v4
+131/667844/campos_512_v4
+131/667916/campos_512_v4
+131/667941/campos_512_v4
+131/667995/campos_512_v4
+131/668090/campos_512_v4
+131/668111/campos_512_v4
+131/668161/campos_512_v4
+131/668623/campos_512_v4
+131/668640/campos_512_v4
+131/668736/campos_512_v4
+131/668832/campos_512_v4
+131/668856/campos_512_v4
+131/668981/campos_512_v4
+131/669025/campos_512_v4
+131/669028/campos_512_v4
+131/669044/campos_512_v4
+131/669266/campos_512_v4
+131/669360/campos_512_v4
+131/669392/campos_512_v4
+131/669463/campos_512_v4
+131/669505/campos_512_v4
+131/669520/campos_512_v4
+131/669573/campos_512_v4
+131/669581/campos_512_v4
+131/669690/campos_512_v4
+131/669724/campos_512_v4
+131/669733/campos_512_v4
+131/669742/campos_512_v4
+131/669763/campos_512_v4
+131/669792/campos_512_v4
+131/669793/campos_512_v4
+131/669918/campos_512_v4
+132/670033/campos_512_v4
+132/670179/campos_512_v4
+132/670257/campos_512_v4
+132/670269/campos_512_v4
+132/670286/campos_512_v4
+132/670302/campos_512_v4
+132/670465/campos_512_v4
+132/670518/campos_512_v4
+132/670526/campos_512_v4
+132/670528/campos_512_v4
+132/670573/campos_512_v4
+132/670593/campos_512_v4
+132/670691/campos_512_v4
+132/670705/campos_512_v4
+132/670934/campos_512_v4
+132/670949/campos_512_v4
+132/670956/campos_512_v4
+132/670965/campos_512_v4
+132/670997/campos_512_v4
+132/671035/campos_512_v4
+132/671051/campos_512_v4
+132/671064/campos_512_v4
+132/671072/campos_512_v4
+132/671164/campos_512_v4
+132/671191/campos_512_v4
+132/671302/campos_512_v4
+132/671305/campos_512_v4
+132/671465/campos_512_v4
+132/671503/campos_512_v4
+132/671532/campos_512_v4
+132/671619/campos_512_v4
+132/671627/campos_512_v4
+132/671632/campos_512_v4
+132/671634/campos_512_v4
+132/671698/campos_512_v4
+132/671753/campos_512_v4
+132/671804/campos_512_v4
+132/671868/campos_512_v4
+132/671964/campos_512_v4
+132/672005/campos_512_v4
+132/672013/campos_512_v4
+132/672041/campos_512_v4
+132/672137/campos_512_v4
+132/672148/campos_512_v4
+132/672193/campos_512_v4
+132/672195/campos_512_v4
+132/672250/campos_512_v4
+132/672297/campos_512_v4
+132/672328/campos_512_v4
+132/672365/campos_512_v4
+132/672384/campos_512_v4
+132/672506/campos_512_v4
+132/672839/campos_512_v4
+132/672884/campos_512_v4
+132/672921/campos_512_v4
+132/672926/campos_512_v4
+132/672964/campos_512_v4
+132/672973/campos_512_v4
+132/672979/campos_512_v4
+132/672991/campos_512_v4
+132/673131/campos_512_v4
+132/673164/campos_512_v4
+132/673350/campos_512_v4
+132/673392/campos_512_v4
+132/673575/campos_512_v4
+132/673656/campos_512_v4
+132/673775/campos_512_v4
+132/673854/campos_512_v4
+132/673886/campos_512_v4
+132/673976/campos_512_v4
+132/673981/campos_512_v4
+132/674021/campos_512_v4
+132/674124/campos_512_v4
+132/674196/campos_512_v4
+132/674224/campos_512_v4
+132/674333/campos_512_v4
+132/674361/campos_512_v4
+132/674365/campos_512_v4
+132/674389/campos_512_v4
+132/674420/campos_512_v4
+132/674523/campos_512_v4
+132/674565/campos_512_v4
+132/674703/campos_512_v4
+132/674800/campos_512_v4
+132/674815/campos_512_v4
+132/674956/campos_512_v4
+132/674990/campos_512_v4
+133/675046/campos_512_v4
+133/675097/campos_512_v4
+133/675140/campos_512_v4
+133/675164/campos_512_v4
+133/675180/campos_512_v4
+133/675261/campos_512_v4
+133/675282/campos_512_v4
+133/675333/campos_512_v4
+133/675436/campos_512_v4
+133/675498/campos_512_v4
+133/675521/campos_512_v4
+133/675545/campos_512_v4
+133/675599/campos_512_v4
+133/675666/campos_512_v4
+133/675918/campos_512_v4
+133/675940/campos_512_v4
+133/676085/campos_512_v4
+133/676124/campos_512_v4
+133/676167/campos_512_v4
+133/676174/campos_512_v4
+133/676180/campos_512_v4
+133/676207/campos_512_v4
+133/676226/campos_512_v4
+133/676245/campos_512_v4
+133/676381/campos_512_v4
+133/676461/campos_512_v4
+133/676462/campos_512_v4
+133/676484/campos_512_v4
+133/676515/campos_512_v4
+133/676688/campos_512_v4
+133/676797/campos_512_v4
+133/676822/campos_512_v4
+133/677132/campos_512_v4
+133/677152/campos_512_v4
+133/677275/campos_512_v4
+133/677276/campos_512_v4
+133/677451/campos_512_v4
+133/677458/campos_512_v4
+133/677460/campos_512_v4
+133/677501/campos_512_v4
+133/677618/campos_512_v4
+133/677749/campos_512_v4
+133/677860/campos_512_v4
+133/677877/campos_512_v4
+133/677929/campos_512_v4
+133/677939/campos_512_v4
+133/677952/campos_512_v4
+133/678044/campos_512_v4
+133/678056/campos_512_v4
+133/678197/campos_512_v4
+133/678212/campos_512_v4
+133/678247/campos_512_v4
+133/678301/campos_512_v4
+133/678328/campos_512_v4
+133/678505/campos_512_v4
+133/678614/campos_512_v4
+133/678742/campos_512_v4
+133/678746/campos_512_v4
+133/679010/campos_512_v4
+133/679056/campos_512_v4
+133/679130/campos_512_v4
+133/679296/campos_512_v4
+133/679354/campos_512_v4
+133/679361/campos_512_v4
+133/679420/campos_512_v4
+133/679426/campos_512_v4
+133/679427/campos_512_v4
+133/679544/campos_512_v4
+133/679546/campos_512_v4
+133/679700/campos_512_v4
+133/679722/campos_512_v4
+134/680094/campos_512_v4
+134/680175/campos_512_v4
+134/680283/campos_512_v4
+134/680298/campos_512_v4
+134/680400/campos_512_v4
+134/680480/campos_512_v4
+134/680498/campos_512_v4
+134/680507/campos_512_v4
+134/680521/campos_512_v4
+134/680618/campos_512_v4
+134/680660/campos_512_v4
+134/680805/campos_512_v4
+134/680810/campos_512_v4
+134/680844/campos_512_v4
+134/680845/campos_512_v4
+134/680861/campos_512_v4
+134/680970/campos_512_v4
+134/681036/campos_512_v4
+134/681103/campos_512_v4
+134/681104/campos_512_v4
+134/681329/campos_512_v4
+134/681341/campos_512_v4
+134/681505/campos_512_v4
+134/681530/campos_512_v4
+134/681563/campos_512_v4
+134/681586/campos_512_v4
+134/681686/campos_512_v4
+134/681759/campos_512_v4
+134/681850/campos_512_v4
+134/681852/campos_512_v4
+134/681986/campos_512_v4
+134/682110/campos_512_v4
+134/682165/campos_512_v4
+134/682229/campos_512_v4
+134/682449/campos_512_v4
+134/682463/campos_512_v4
+134/682623/campos_512_v4
+134/682697/campos_512_v4
+134/682770/campos_512_v4
+134/682799/campos_512_v4
+134/682805/campos_512_v4
+134/682866/campos_512_v4
+134/682934/campos_512_v4
+134/682974/campos_512_v4
+134/683095/campos_512_v4
+134/683428/campos_512_v4
+134/683439/campos_512_v4
+134/683477/campos_512_v4
+134/683485/campos_512_v4
+134/683548/campos_512_v4
+134/683556/campos_512_v4
+134/683570/campos_512_v4
+134/683581/campos_512_v4
+134/683671/campos_512_v4
+134/683674/campos_512_v4
+134/683751/campos_512_v4
+134/683818/campos_512_v4
+134/683841/campos_512_v4
+134/683858/campos_512_v4
+134/683873/campos_512_v4
+134/683962/campos_512_v4
+134/684003/campos_512_v4
+134/684036/campos_512_v4
+134/684061/campos_512_v4
+134/684077/campos_512_v4
+134/684081/campos_512_v4
+134/684251/campos_512_v4
+134/684284/campos_512_v4
+134/684513/campos_512_v4
+134/684525/campos_512_v4
+134/684573/campos_512_v4
+134/684581/campos_512_v4
+134/684657/campos_512_v4
+134/684936/campos_512_v4
+134/684947/campos_512_v4
+135/685131/campos_512_v4
+135/685161/campos_512_v4
+135/685274/campos_512_v4
+135/685275/campos_512_v4
+135/685285/campos_512_v4
+135/685295/campos_512_v4
+135/685305/campos_512_v4
+135/685316/campos_512_v4
+135/685350/campos_512_v4
+135/685371/campos_512_v4
+135/685375/campos_512_v4
+135/685480/campos_512_v4
+135/685493/campos_512_v4
+135/685566/campos_512_v4
+135/685581/campos_512_v4
+135/685707/campos_512_v4
+135/685714/campos_512_v4
+135/685764/campos_512_v4
+135/685773/campos_512_v4
+135/685863/campos_512_v4
+135/686031/campos_512_v4
+135/686082/campos_512_v4
+135/686153/campos_512_v4
+135/686317/campos_512_v4
+135/686375/campos_512_v4
+135/686442/campos_512_v4
+135/686494/campos_512_v4
+135/686509/campos_512_v4
+135/686515/campos_512_v4
+135/686540/campos_512_v4
+135/686565/campos_512_v4
+135/686652/campos_512_v4
+135/686685/campos_512_v4
+135/686700/campos_512_v4
+135/686747/campos_512_v4
+135/686901/campos_512_v4
+135/687026/campos_512_v4
+135/687044/campos_512_v4
+135/687107/campos_512_v4
+135/687122/campos_512_v4
+135/687127/campos_512_v4
+135/687312/campos_512_v4
+135/687415/campos_512_v4
+135/687438/campos_512_v4
+135/687664/campos_512_v4
+135/687702/campos_512_v4
+135/687790/campos_512_v4
+135/687931/campos_512_v4
+135/687985/campos_512_v4
+135/688092/campos_512_v4
+135/688233/campos_512_v4
+135/688312/campos_512_v4
+135/688340/campos_512_v4
+135/688436/campos_512_v4
+135/688449/campos_512_v4
+135/688481/campos_512_v4
+135/688521/campos_512_v4
+135/688641/campos_512_v4
+135/688672/campos_512_v4
+135/688686/campos_512_v4
+135/688705/campos_512_v4
+135/688720/campos_512_v4
+135/688806/campos_512_v4
+135/689016/campos_512_v4
+135/689129/campos_512_v4
+135/689137/campos_512_v4
+135/689139/campos_512_v4
+135/689156/campos_512_v4
+135/689284/campos_512_v4
+135/689323/campos_512_v4
+135/689334/campos_512_v4
+135/689407/campos_512_v4
+135/689451/campos_512_v4
+135/689455/campos_512_v4
+135/689529/campos_512_v4
+135/689683/campos_512_v4
+135/689691/campos_512_v4
+135/689772/campos_512_v4
+135/689811/campos_512_v4
+135/689858/campos_512_v4
+135/689935/campos_512_v4
+136/690028/campos_512_v4
+136/690057/campos_512_v4
+136/690088/campos_512_v4
+136/690107/campos_512_v4
+136/690215/campos_512_v4
+136/690231/campos_512_v4
+136/690285/campos_512_v4
+136/690319/campos_512_v4
+136/690320/campos_512_v4
+136/690490/campos_512_v4
+136/690497/campos_512_v4
+136/690545/campos_512_v4
+136/690623/campos_512_v4
+136/690706/campos_512_v4
+136/690716/campos_512_v4
+136/690732/campos_512_v4
+136/690742/campos_512_v4
+136/690809/campos_512_v4
+136/690826/campos_512_v4
+136/690871/campos_512_v4
+136/691059/campos_512_v4
+136/691061/campos_512_v4
+136/691062/campos_512_v4
+136/691159/campos_512_v4
+136/691195/campos_512_v4
+136/691230/campos_512_v4
+136/691377/campos_512_v4
+136/691404/campos_512_v4
+136/691409/campos_512_v4
+136/691426/campos_512_v4
+136/691448/campos_512_v4
+136/691499/campos_512_v4
+136/691517/campos_512_v4
+136/691596/campos_512_v4
+136/691623/campos_512_v4
+136/691624/campos_512_v4
+136/691640/campos_512_v4
+136/691674/campos_512_v4
+136/691685/campos_512_v4
+136/691768/campos_512_v4
+136/691802/campos_512_v4
+136/691836/campos_512_v4
+136/691856/campos_512_v4
+136/691872/campos_512_v4
+136/691900/campos_512_v4
+136/691983/campos_512_v4
+136/692023/campos_512_v4
+136/692052/campos_512_v4
+136/692069/campos_512_v4
+136/692205/campos_512_v4
+136/692250/campos_512_v4
+136/692363/campos_512_v4
+136/692391/campos_512_v4
+136/692396/campos_512_v4
+136/692414/campos_512_v4
+136/692482/campos_512_v4
+136/692498/campos_512_v4
+136/692546/campos_512_v4
+136/692643/campos_512_v4
+136/692713/campos_512_v4
+136/692803/campos_512_v4
+136/693035/campos_512_v4
+136/693051/campos_512_v4
+136/693164/campos_512_v4
+136/693197/campos_512_v4
+136/693206/campos_512_v4
+136/693229/campos_512_v4
+136/693245/campos_512_v4
+136/693299/campos_512_v4
+136/693313/campos_512_v4
+136/693346/campos_512_v4
+136/693534/campos_512_v4
+136/693543/campos_512_v4
+136/693683/campos_512_v4
+136/693714/campos_512_v4
+136/693725/campos_512_v4
+136/693829/campos_512_v4
+136/693943/campos_512_v4
+136/694060/campos_512_v4
+136/694073/campos_512_v4
+136/694126/campos_512_v4
+136/694258/campos_512_v4
+136/694443/campos_512_v4
+136/694484/campos_512_v4
+136/694500/campos_512_v4
+136/694754/campos_512_v4
+136/694817/campos_512_v4
+136/694821/campos_512_v4
+136/694862/campos_512_v4
+136/694887/campos_512_v4
+136/694998/campos_512_v4
+137/695128/campos_512_v4
+137/695155/campos_512_v4
+137/695188/campos_512_v4
+137/695206/campos_512_v4
+137/695273/campos_512_v4
+137/695302/campos_512_v4
+137/695334/campos_512_v4
+137/695348/campos_512_v4
+137/695350/campos_512_v4
+137/695358/campos_512_v4
+137/695395/campos_512_v4
+137/695480/campos_512_v4
+137/695503/campos_512_v4
+137/695540/campos_512_v4
+137/695556/campos_512_v4
+137/695616/campos_512_v4
+137/695621/campos_512_v4
+137/695721/campos_512_v4
+137/695724/campos_512_v4
+137/695853/campos_512_v4
+137/695854/campos_512_v4
+137/695863/campos_512_v4
+137/695889/campos_512_v4
+137/695929/campos_512_v4
+137/696022/campos_512_v4
+137/696059/campos_512_v4
+137/696076/campos_512_v4
+137/696137/campos_512_v4
+137/696150/campos_512_v4
+137/696187/campos_512_v4
+137/696472/campos_512_v4
+137/696479/campos_512_v4
+137/696569/campos_512_v4
+137/696806/campos_512_v4
+137/696811/campos_512_v4
+137/696826/campos_512_v4
+137/696842/campos_512_v4
+137/697010/campos_512_v4
+137/697028/campos_512_v4
+137/697069/campos_512_v4
+137/697316/campos_512_v4
+137/697317/campos_512_v4
+137/697356/campos_512_v4
+137/697498/campos_512_v4
+137/697574/campos_512_v4
+137/697611/campos_512_v4
+137/697636/campos_512_v4
+137/697644/campos_512_v4
+137/697728/campos_512_v4
+137/697780/campos_512_v4
+137/697859/campos_512_v4
+137/697931/campos_512_v4
+137/697973/campos_512_v4
+137/698079/campos_512_v4
+137/698310/campos_512_v4
+137/698360/campos_512_v4
+137/698633/campos_512_v4
+137/698645/campos_512_v4
+137/698666/campos_512_v4
+137/698793/campos_512_v4
+137/698820/campos_512_v4
+137/698821/campos_512_v4
+137/699270/campos_512_v4
+137/699317/campos_512_v4
+137/699374/campos_512_v4
+137/699441/campos_512_v4
+137/699513/campos_512_v4
+137/699534/campos_512_v4
+137/699627/campos_512_v4
+137/699652/campos_512_v4
+137/699700/campos_512_v4
+137/699703/campos_512_v4
+137/699789/campos_512_v4
+137/699799/campos_512_v4
+137/699801/campos_512_v4
+137/699834/campos_512_v4
+137/699890/campos_512_v4
+137/699894/campos_512_v4
+137/699973/campos_512_v4
+138/700078/campos_512_v4
+138/700260/campos_512_v4
+138/700297/campos_512_v4
+138/700397/campos_512_v4
+138/700632/campos_512_v4
+138/700638/campos_512_v4
+138/700758/campos_512_v4
+138/700924/campos_512_v4
+138/700936/campos_512_v4
+138/700939/campos_512_v4
+138/701043/campos_512_v4
+138/701058/campos_512_v4
+138/701068/campos_512_v4
+138/701088/campos_512_v4
+138/701131/campos_512_v4
+138/701265/campos_512_v4
+138/701337/campos_512_v4
+138/701389/campos_512_v4
+138/701501/campos_512_v4
+138/701655/campos_512_v4
+138/701710/campos_512_v4
+138/701777/campos_512_v4
+138/701825/campos_512_v4
+138/701867/campos_512_v4
+138/701873/campos_512_v4
+138/701879/campos_512_v4
+138/701902/campos_512_v4
+138/701907/campos_512_v4
+138/702053/campos_512_v4
+138/702100/campos_512_v4
+138/702157/campos_512_v4
+138/702191/campos_512_v4
+138/702342/campos_512_v4
+138/702430/campos_512_v4
+138/702469/campos_512_v4
+138/702578/campos_512_v4
+138/702653/campos_512_v4
+138/702712/campos_512_v4
+138/702744/campos_512_v4
+138/702746/campos_512_v4
+138/702841/campos_512_v4
+138/702953/campos_512_v4
+138/702984/campos_512_v4
+138/703013/campos_512_v4
+138/703042/campos_512_v4
+138/703107/campos_512_v4
+138/703128/campos_512_v4
+138/703148/campos_512_v4
+138/703249/campos_512_v4
+138/703257/campos_512_v4
+138/703281/campos_512_v4
+138/703389/campos_512_v4
+138/703529/campos_512_v4
+138/703612/campos_512_v4
+138/703694/campos_512_v4
+138/703708/campos_512_v4
+138/703771/campos_512_v4
+138/703803/campos_512_v4
+138/703854/campos_512_v4
+138/703987/campos_512_v4
+138/703996/campos_512_v4
+138/704018/campos_512_v4
+138/704096/campos_512_v4
+138/704161/campos_512_v4
+138/704300/campos_512_v4
+138/704542/campos_512_v4
+138/704627/campos_512_v4
+138/704701/campos_512_v4
+138/704730/campos_512_v4
+138/704748/campos_512_v4
+138/704778/campos_512_v4
+138/704807/campos_512_v4
+138/704852/campos_512_v4
+138/704918/campos_512_v4
+138/704927/campos_512_v4
+138/704939/campos_512_v4
+138/704972/campos_512_v4
+139/705031/campos_512_v4
+139/705112/campos_512_v4
+139/705120/campos_512_v4
+139/705248/campos_512_v4
+139/705357/campos_512_v4
+139/705527/campos_512_v4
+139/705541/campos_512_v4
+139/705552/campos_512_v4
+139/705615/campos_512_v4
+139/705635/campos_512_v4
+139/705645/campos_512_v4
+139/705702/campos_512_v4
+139/705758/campos_512_v4
+139/705800/campos_512_v4
+139/706022/campos_512_v4
+139/706078/campos_512_v4
+139/706128/campos_512_v4
+139/706156/campos_512_v4
+139/706272/campos_512_v4
+139/706306/campos_512_v4
+139/706363/campos_512_v4
+139/706393/campos_512_v4
+139/706444/campos_512_v4
+139/706495/campos_512_v4
+139/706562/campos_512_v4
+139/706624/campos_512_v4
+139/706651/campos_512_v4
+139/706657/campos_512_v4
+139/706707/campos_512_v4
+139/706768/campos_512_v4
+139/706905/campos_512_v4
+139/706911/campos_512_v4
+139/707016/campos_512_v4
+139/707146/campos_512_v4
+139/707261/campos_512_v4
+139/707482/campos_512_v4
+139/707512/campos_512_v4
+139/707525/campos_512_v4
+139/707610/campos_512_v4
+139/707622/campos_512_v4
+139/707635/campos_512_v4
+139/707658/campos_512_v4
+139/707797/campos_512_v4
+139/707826/campos_512_v4
+139/707842/campos_512_v4
+139/707925/campos_512_v4
+139/707964/campos_512_v4
+139/708015/campos_512_v4
+139/708023/campos_512_v4
+139/708034/campos_512_v4
+139/708072/campos_512_v4
+139/708242/campos_512_v4
+139/708352/campos_512_v4
+139/708387/campos_512_v4
+139/708416/campos_512_v4
+139/708444/campos_512_v4
+139/708526/campos_512_v4
+139/708554/campos_512_v4
+139/708577/campos_512_v4
+139/708610/campos_512_v4
+139/708641/campos_512_v4
+139/708649/campos_512_v4
+139/708673/campos_512_v4
+139/708682/campos_512_v4
+139/708709/campos_512_v4
+139/708732/campos_512_v4
+139/708740/campos_512_v4
+139/708863/campos_512_v4
+139/708969/campos_512_v4
+139/709171/campos_512_v4
+139/709223/campos_512_v4
+139/709271/campos_512_v4
+139/709373/campos_512_v4
+139/709412/campos_512_v4
+139/709419/campos_512_v4
+139/709499/campos_512_v4
+139/709512/campos_512_v4
+139/709552/campos_512_v4
+139/709615/campos_512_v4
+139/709641/campos_512_v4
+139/709655/campos_512_v4
+139/709770/campos_512_v4
+139/709843/campos_512_v4
+139/709966/campos_512_v4
+139/709975/campos_512_v4
+14/80029/campos_512_v4
+14/80053/campos_512_v4
+14/80308/campos_512_v4
+14/80357/campos_512_v4
+14/80429/campos_512_v4
+14/80495/campos_512_v4
+14/80510/campos_512_v4
+14/80593/campos_512_v4
+14/80598/campos_512_v4
+14/80709/campos_512_v4
+14/80991/campos_512_v4
+14/81045/campos_512_v4
+14/81084/campos_512_v4
+14/81193/campos_512_v4
+14/81246/campos_512_v4
+14/81309/campos_512_v4
+14/81321/campos_512_v4
+14/81348/campos_512_v4
+14/81397/campos_512_v4
+14/81425/campos_512_v4
+14/81458/campos_512_v4
+14/81797/campos_512_v4
+14/81839/campos_512_v4
+14/81912/campos_512_v4
+14/81955/campos_512_v4
+14/81982/campos_512_v4
+14/82059/campos_512_v4
+14/82067/campos_512_v4
+14/82253/campos_512_v4
+14/82305/campos_512_v4
+14/82353/campos_512_v4
+14/82472/campos_512_v4
+14/82752/campos_512_v4
+14/82854/campos_512_v4
+14/82932/campos_512_v4
+14/83024/campos_512_v4
+14/83071/campos_512_v4
+14/83104/campos_512_v4
+14/83126/campos_512_v4
+14/83297/campos_512_v4
+14/83300/campos_512_v4
+14/83417/campos_512_v4
+14/83510/campos_512_v4
+14/83514/campos_512_v4
+14/83710/campos_512_v4
+14/83804/campos_512_v4
+14/83831/campos_512_v4
+14/83879/campos_512_v4
+14/84059/campos_512_v4
+14/84225/campos_512_v4
+14/84249/campos_512_v4
+14/84292/campos_512_v4
+14/84312/campos_512_v4
+14/84330/campos_512_v4
+14/84375/campos_512_v4
+14/84486/campos_512_v4
+14/84487/campos_512_v4
+14/84501/campos_512_v4
+14/84723/campos_512_v4
+14/84843/campos_512_v4
+14/84920/campos_512_v4
+14/84941/campos_512_v4
+14/84958/campos_512_v4
+140/710011/campos_512_v4
+140/710136/campos_512_v4
+140/710171/campos_512_v4
+140/710216/campos_512_v4
+140/710257/campos_512_v4
+140/710414/campos_512_v4
+140/710474/campos_512_v4
+140/710518/campos_512_v4
+140/710565/campos_512_v4
+140/710569/campos_512_v4
+140/710577/campos_512_v4
+140/710580/campos_512_v4
+140/710583/campos_512_v4
+140/710708/campos_512_v4
+140/710811/campos_512_v4
+140/711073/campos_512_v4
+140/711138/campos_512_v4
+140/711214/campos_512_v4
+140/711254/campos_512_v4
+140/711338/campos_512_v4
+140/711356/campos_512_v4
+140/711443/campos_512_v4
+140/711479/campos_512_v4
+140/711581/campos_512_v4
+140/711625/campos_512_v4
+140/711708/campos_512_v4
+140/711805/campos_512_v4
+140/711846/campos_512_v4
+140/711890/campos_512_v4
+140/711951/campos_512_v4
+140/712162/campos_512_v4
+140/712173/campos_512_v4
+140/712254/campos_512_v4
+140/712328/campos_512_v4
+140/712448/campos_512_v4
+140/712495/campos_512_v4
+140/712559/campos_512_v4
+140/712583/campos_512_v4
+140/712750/campos_512_v4
+140/712784/campos_512_v4
+140/712811/campos_512_v4
+140/712815/campos_512_v4
+140/712934/campos_512_v4
+140/713026/campos_512_v4
+140/713033/campos_512_v4
+140/713077/campos_512_v4
+140/713104/campos_512_v4
+140/713137/campos_512_v4
+140/713152/campos_512_v4
+140/713327/campos_512_v4
+140/713369/campos_512_v4
+140/713400/campos_512_v4
+140/713453/campos_512_v4
+140/713532/campos_512_v4
+140/713635/campos_512_v4
+140/713640/campos_512_v4
+140/713644/campos_512_v4
+140/713667/campos_512_v4
+140/713747/campos_512_v4
+140/713843/campos_512_v4
+140/714073/campos_512_v4
+140/714093/campos_512_v4
+140/714158/campos_512_v4
+140/714195/campos_512_v4
+140/714227/campos_512_v4
+140/714258/campos_512_v4
+140/714330/campos_512_v4
+140/714373/campos_512_v4
+140/714424/campos_512_v4
+140/714669/campos_512_v4
+140/714677/campos_512_v4
+140/714750/campos_512_v4
+140/714771/campos_512_v4
+140/714820/campos_512_v4
+140/714836/campos_512_v4
+140/714894/campos_512_v4
+140/714905/campos_512_v4
+140/714920/campos_512_v4
+140/714946/campos_512_v4
+140/714947/campos_512_v4
+140/714952/campos_512_v4
+140/714977/campos_512_v4
+141/715027/campos_512_v4
+141/715036/campos_512_v4
+141/715086/campos_512_v4
+141/715108/campos_512_v4
+141/715215/campos_512_v4
+141/715283/campos_512_v4
+141/715310/campos_512_v4
+141/715499/campos_512_v4
+141/715644/campos_512_v4
+141/715793/campos_512_v4
+141/715832/campos_512_v4
+141/715876/campos_512_v4
+141/715981/campos_512_v4
+141/716095/campos_512_v4
+141/716141/campos_512_v4
+141/716202/campos_512_v4
+141/716218/campos_512_v4
+141/716256/campos_512_v4
+141/716262/campos_512_v4
+141/716519/campos_512_v4
+141/716579/campos_512_v4
+141/716670/campos_512_v4
+141/716694/campos_512_v4
+141/716866/campos_512_v4
+141/716915/campos_512_v4
+141/716929/campos_512_v4
+141/716945/campos_512_v4
+141/716949/campos_512_v4
+141/716960/campos_512_v4
+141/717003/campos_512_v4
+141/717055/campos_512_v4
+141/717175/campos_512_v4
+141/717244/campos_512_v4
+141/717291/campos_512_v4
+141/717377/campos_512_v4
+141/717394/campos_512_v4
+141/717483/campos_512_v4
+141/717529/campos_512_v4
+141/717588/campos_512_v4
+141/717649/campos_512_v4
+141/717778/campos_512_v4
+141/717805/campos_512_v4
+141/717830/campos_512_v4
+141/717931/campos_512_v4
+141/717950/campos_512_v4
+141/717997/campos_512_v4
+141/718005/campos_512_v4
+141/718010/campos_512_v4
+141/718029/campos_512_v4
+141/718102/campos_512_v4
+141/718108/campos_512_v4
+141/718112/campos_512_v4
+141/718196/campos_512_v4
+141/718252/campos_512_v4
+141/718688/campos_512_v4
+141/718708/campos_512_v4
+141/718866/campos_512_v4
+141/718878/campos_512_v4
+141/718879/campos_512_v4
+141/718907/campos_512_v4
+141/718937/campos_512_v4
+141/719051/campos_512_v4
+141/719178/campos_512_v4
+141/719208/campos_512_v4
+141/719251/campos_512_v4
+141/719340/campos_512_v4
+141/719347/campos_512_v4
+141/719568/campos_512_v4
+141/719622/campos_512_v4
+141/719652/campos_512_v4
+141/719823/campos_512_v4
+141/719828/campos_512_v4
+141/719985/campos_512_v4
+141/719998/campos_512_v4
+142/720023/campos_512_v4
+142/720168/campos_512_v4
+142/720193/campos_512_v4
+142/720467/campos_512_v4
+142/720484/campos_512_v4
+142/720772/campos_512_v4
+142/720795/campos_512_v4
+142/720800/campos_512_v4
+142/720836/campos_512_v4
+142/720843/campos_512_v4
+142/720868/campos_512_v4
+142/720943/campos_512_v4
+142/721138/campos_512_v4
+142/721143/campos_512_v4
+142/721235/campos_512_v4
+142/721308/campos_512_v4
+142/721395/campos_512_v4
+142/721497/campos_512_v4
+142/721544/campos_512_v4
+142/721723/campos_512_v4
+142/721927/campos_512_v4
+142/722019/campos_512_v4
+142/722070/campos_512_v4
+142/722103/campos_512_v4
+142/722107/campos_512_v4
+142/722143/campos_512_v4
+142/722157/campos_512_v4
+142/722186/campos_512_v4
+142/722334/campos_512_v4
+142/722408/campos_512_v4
+142/722504/campos_512_v4
+142/722642/campos_512_v4
+142/722710/campos_512_v4
+142/722738/campos_512_v4
+142/722779/campos_512_v4
+142/722800/campos_512_v4
+142/722887/campos_512_v4
+142/722943/campos_512_v4
+142/722957/campos_512_v4
+142/722995/campos_512_v4
+142/723008/campos_512_v4
+142/723050/campos_512_v4
+142/723083/campos_512_v4
+142/723140/campos_512_v4
+142/723206/campos_512_v4
+142/723241/campos_512_v4
+142/723310/campos_512_v4
+142/723362/campos_512_v4
+142/723443/campos_512_v4
+142/723463/campos_512_v4
+142/723548/campos_512_v4
+142/723561/campos_512_v4
+142/723597/campos_512_v4
+142/723613/campos_512_v4
+142/723629/campos_512_v4
+142/723749/campos_512_v4
+142/723750/campos_512_v4
+142/724185/campos_512_v4
+142/724289/campos_512_v4
+142/724306/campos_512_v4
+142/724464/campos_512_v4
+142/724535/campos_512_v4
+142/724571/campos_512_v4
+142/724859/campos_512_v4
+142/724887/campos_512_v4
+143/725048/campos_512_v4
+143/725055/campos_512_v4
+143/725080/campos_512_v4
+143/725107/campos_512_v4
+143/725154/campos_512_v4
+143/725211/campos_512_v4
+143/725256/campos_512_v4
+143/725258/campos_512_v4
+143/725400/campos_512_v4
+143/725530/campos_512_v4
+143/725538/campos_512_v4
+143/725546/campos_512_v4
+143/725588/campos_512_v4
+143/725611/campos_512_v4
+143/725628/campos_512_v4
+143/725681/campos_512_v4
+143/725822/campos_512_v4
+143/725974/campos_512_v4
+143/726166/campos_512_v4
+143/726278/campos_512_v4
+143/726301/campos_512_v4
+143/726390/campos_512_v4
+143/726437/campos_512_v4
+143/726509/campos_512_v4
+143/726546/campos_512_v4
+143/726567/campos_512_v4
+143/726616/campos_512_v4
+143/726640/campos_512_v4
+143/726641/campos_512_v4
+143/726717/campos_512_v4
+143/726823/campos_512_v4
+143/726865/campos_512_v4
+143/726952/campos_512_v4
+143/726957/campos_512_v4
+143/727045/campos_512_v4
+143/727058/campos_512_v4
+143/727062/campos_512_v4
+143/727112/campos_512_v4
+143/727161/campos_512_v4
+143/727269/campos_512_v4
+143/727300/campos_512_v4
+143/727526/campos_512_v4
+143/727669/campos_512_v4
+143/727677/campos_512_v4
+143/727686/campos_512_v4
+143/727799/campos_512_v4
+143/728033/campos_512_v4
+143/728046/campos_512_v4
+143/728066/campos_512_v4
+143/728129/campos_512_v4
+143/728172/campos_512_v4
+143/728324/campos_512_v4
+143/728370/campos_512_v4
+143/728388/campos_512_v4
+143/728446/campos_512_v4
+143/728460/campos_512_v4
+143/728468/campos_512_v4
+143/728524/campos_512_v4
+143/728553/campos_512_v4
+143/728795/campos_512_v4
+143/728832/campos_512_v4
+143/728899/campos_512_v4
+143/728929/campos_512_v4
+143/728974/campos_512_v4
+143/729196/campos_512_v4
+143/729274/campos_512_v4
+143/729281/campos_512_v4
+143/729376/campos_512_v4
+143/729381/campos_512_v4
+143/729483/campos_512_v4
+143/729536/campos_512_v4
+143/729686/campos_512_v4
+143/729746/campos_512_v4
+143/729818/campos_512_v4
+143/729823/campos_512_v4
+143/729874/campos_512_v4
+143/729933/campos_512_v4
+143/729951/campos_512_v4
+143/729954/campos_512_v4
+144/730019/campos_512_v4
+144/730049/campos_512_v4
+144/730105/campos_512_v4
+144/730121/campos_512_v4
+144/730159/campos_512_v4
+144/730169/campos_512_v4
+144/730171/campos_512_v4
+144/730286/campos_512_v4
+144/730332/campos_512_v4
+144/730358/campos_512_v4
+144/730378/campos_512_v4
+144/730385/campos_512_v4
+144/730388/campos_512_v4
+144/730430/campos_512_v4
+144/730446/campos_512_v4
+144/730488/campos_512_v4
+144/730517/campos_512_v4
+144/730540/campos_512_v4
+144/730796/campos_512_v4
+144/730798/campos_512_v4
+144/730820/campos_512_v4
+144/730826/campos_512_v4
+144/730898/campos_512_v4
+144/730917/campos_512_v4
+144/730940/campos_512_v4
+144/731021/campos_512_v4
+144/731120/campos_512_v4
+144/731128/campos_512_v4
+144/731168/campos_512_v4
+144/731284/campos_512_v4
+144/731339/campos_512_v4
+144/731357/campos_512_v4
+144/731454/campos_512_v4
+144/731589/campos_512_v4
+144/731590/campos_512_v4
+144/731679/campos_512_v4
+144/731683/campos_512_v4
+144/731712/campos_512_v4
+144/731733/campos_512_v4
+144/731751/campos_512_v4
+144/731858/campos_512_v4
+144/732033/campos_512_v4
+144/732126/campos_512_v4
+144/732160/campos_512_v4
+144/732188/campos_512_v4
+144/732190/campos_512_v4
+144/732325/campos_512_v4
+144/732372/campos_512_v4
+144/732418/campos_512_v4
+144/732445/campos_512_v4
+144/732540/campos_512_v4
+144/732646/campos_512_v4
+144/732695/campos_512_v4
+144/732783/campos_512_v4
+144/732833/campos_512_v4
+144/732856/campos_512_v4
+144/732928/campos_512_v4
+144/732935/campos_512_v4
+144/733085/campos_512_v4
+144/733128/campos_512_v4
+144/733452/campos_512_v4
+144/733458/campos_512_v4
+144/733618/campos_512_v4
+144/733654/campos_512_v4
+144/733781/campos_512_v4
+144/733815/campos_512_v4
+144/733835/campos_512_v4
+144/733842/campos_512_v4
+144/733843/campos_512_v4
+144/733894/campos_512_v4
+144/733940/campos_512_v4
+144/733974/campos_512_v4
+144/734027/campos_512_v4
+144/734133/campos_512_v4
+144/734206/campos_512_v4
+144/734301/campos_512_v4
+144/734312/campos_512_v4
+144/734439/campos_512_v4
+144/734450/campos_512_v4
+144/734514/campos_512_v4
+144/734642/campos_512_v4
+144/734730/campos_512_v4
+144/734767/campos_512_v4
+144/734821/campos_512_v4
+144/734882/campos_512_v4
+145/735051/campos_512_v4
+145/735089/campos_512_v4
+145/735144/campos_512_v4
+145/735147/campos_512_v4
+145/735149/campos_512_v4
+145/735168/campos_512_v4
+145/735248/campos_512_v4
+145/735256/campos_512_v4
+145/735307/campos_512_v4
+145/735337/campos_512_v4
+145/735404/campos_512_v4
+145/735405/campos_512_v4
+145/735418/campos_512_v4
+145/735478/campos_512_v4
+145/735510/campos_512_v4
+145/735526/campos_512_v4
+145/735579/campos_512_v4
+145/735668/campos_512_v4
+145/735741/campos_512_v4
+145/735750/campos_512_v4
+145/735921/campos_512_v4
+145/735981/campos_512_v4
+145/735995/campos_512_v4
+145/736166/campos_512_v4
+145/736205/campos_512_v4
+145/736233/campos_512_v4
+145/736247/campos_512_v4
+145/736295/campos_512_v4
+145/736397/campos_512_v4
+145/736399/campos_512_v4
+145/736441/campos_512_v4
+145/736447/campos_512_v4
+145/736483/campos_512_v4
+145/736490/campos_512_v4
+145/736495/campos_512_v4
+145/736525/campos_512_v4
+145/736587/campos_512_v4
+145/736595/campos_512_v4
+145/736604/campos_512_v4
+145/736621/campos_512_v4
+145/736649/campos_512_v4
+145/736723/campos_512_v4
+145/736809/campos_512_v4
+145/736845/campos_512_v4
+145/736850/campos_512_v4
+145/736944/campos_512_v4
+145/736994/campos_512_v4
+145/737158/campos_512_v4
+145/737172/campos_512_v4
+145/737235/campos_512_v4
+145/737238/campos_512_v4
+145/737325/campos_512_v4
+145/737436/campos_512_v4
+145/737552/campos_512_v4
+145/737560/campos_512_v4
+145/737588/campos_512_v4
+145/737757/campos_512_v4
+145/737764/campos_512_v4
+145/737813/campos_512_v4
+145/737892/campos_512_v4
+145/737902/campos_512_v4
+145/737906/campos_512_v4
+145/737996/campos_512_v4
+145/738002/campos_512_v4
+145/738132/campos_512_v4
+145/738186/campos_512_v4
+145/738220/campos_512_v4
+145/738273/campos_512_v4
+145/738294/campos_512_v4
+145/738334/campos_512_v4
+145/738410/campos_512_v4
+145/738428/campos_512_v4
+145/738510/campos_512_v4
+145/738524/campos_512_v4
+145/738594/campos_512_v4
+145/738678/campos_512_v4
+145/738802/campos_512_v4
+145/738820/campos_512_v4
+145/738933/campos_512_v4
+145/738982/campos_512_v4
+145/738995/campos_512_v4
+145/739051/campos_512_v4
+145/739253/campos_512_v4
+145/739391/campos_512_v4
+145/739464/campos_512_v4
+145/739492/campos_512_v4
+145/739504/campos_512_v4
+145/739557/campos_512_v4
+145/739579/campos_512_v4
+145/739602/campos_512_v4
+145/739636/campos_512_v4
+145/739658/campos_512_v4
+145/739672/campos_512_v4
+145/739697/campos_512_v4
+145/739713/campos_512_v4
+145/739929/campos_512_v4
+145/739979/campos_512_v4
+145/739988/campos_512_v4
+146/740089/campos_512_v4
+146/740111/campos_512_v4
+146/740258/campos_512_v4
+146/740431/campos_512_v4
+146/740483/campos_512_v4
+146/740612/campos_512_v4
+146/740729/campos_512_v4
+146/740737/campos_512_v4
+146/740754/campos_512_v4
+146/740766/campos_512_v4
+146/740795/campos_512_v4
+146/740843/campos_512_v4
+146/740867/campos_512_v4
+146/741018/campos_512_v4
+146/741129/campos_512_v4
+146/741159/campos_512_v4
+146/741220/campos_512_v4
+146/741246/campos_512_v4
+146/741308/campos_512_v4
+146/741354/campos_512_v4
+146/741379/campos_512_v4
+146/741511/campos_512_v4
+146/741617/campos_512_v4
+146/741618/campos_512_v4
+146/741619/campos_512_v4
+146/741645/campos_512_v4
+146/741740/campos_512_v4
+146/741825/campos_512_v4
+146/741996/campos_512_v4
+146/742175/campos_512_v4
+146/742272/campos_512_v4
+146/742318/campos_512_v4
+146/742401/campos_512_v4
+146/742434/campos_512_v4
+146/742436/campos_512_v4
+146/742489/campos_512_v4
+146/742516/campos_512_v4
+146/742530/campos_512_v4
+146/742594/campos_512_v4
+146/742735/campos_512_v4
+146/742759/campos_512_v4
+146/742841/campos_512_v4
+146/742881/campos_512_v4
+146/742889/campos_512_v4
+146/742967/campos_512_v4
+146/743030/campos_512_v4
+146/743069/campos_512_v4
+146/743108/campos_512_v4
+146/743112/campos_512_v4
+146/743146/campos_512_v4
+146/743153/campos_512_v4
+146/743364/campos_512_v4
+146/743431/campos_512_v4
+146/743499/campos_512_v4
+146/743586/campos_512_v4
+146/743589/campos_512_v4
+146/743602/campos_512_v4
+146/743686/campos_512_v4
+146/743711/campos_512_v4
+146/743738/campos_512_v4
+146/743762/campos_512_v4
+146/743767/campos_512_v4
+146/743780/campos_512_v4
+146/743978/campos_512_v4
+146/744001/campos_512_v4
+146/744106/campos_512_v4
+146/744179/campos_512_v4
+146/744301/campos_512_v4
+146/744322/campos_512_v4
+146/744336/campos_512_v4
+146/744376/campos_512_v4
+146/744398/campos_512_v4
+146/744417/campos_512_v4
+146/744460/campos_512_v4
+146/744483/campos_512_v4
+146/744494/campos_512_v4
+146/744496/campos_512_v4
+146/744514/campos_512_v4
+146/744702/campos_512_v4
+146/744707/campos_512_v4
+146/744959/campos_512_v4
+147/745003/campos_512_v4
+147/745013/campos_512_v4
+147/745122/campos_512_v4
+147/745128/campos_512_v4
+147/745159/campos_512_v4
+147/745363/campos_512_v4
+147/745443/campos_512_v4
+147/745809/campos_512_v4
+147/745865/campos_512_v4
+147/745911/campos_512_v4
+147/745987/campos_512_v4
+147/746023/campos_512_v4
+147/746108/campos_512_v4
+147/746130/campos_512_v4
+147/746349/campos_512_v4
+147/746518/campos_512_v4
+147/746559/campos_512_v4
+147/746707/campos_512_v4
+147/746778/campos_512_v4
+147/747063/campos_512_v4
+147/747195/campos_512_v4
+147/747203/campos_512_v4
+147/747220/campos_512_v4
+147/747299/campos_512_v4
+147/747327/campos_512_v4
+147/747334/campos_512_v4
+147/747421/campos_512_v4
+147/747494/campos_512_v4
+147/747629/campos_512_v4
+147/747679/campos_512_v4
+147/747693/campos_512_v4
+147/747840/campos_512_v4
+147/747968/campos_512_v4
+147/748031/campos_512_v4
+147/748066/campos_512_v4
+147/748326/campos_512_v4
+147/748330/campos_512_v4
+147/748386/campos_512_v4
+147/748415/campos_512_v4
+147/748447/campos_512_v4
+147/748458/campos_512_v4
+147/748526/campos_512_v4
+147/748548/campos_512_v4
+147/748560/campos_512_v4
+147/748597/campos_512_v4
+147/748599/campos_512_v4
+147/748686/campos_512_v4
+147/748716/campos_512_v4
+147/748777/campos_512_v4
+147/748812/campos_512_v4
+147/748870/campos_512_v4
+147/749067/campos_512_v4
+147/749076/campos_512_v4
+147/749316/campos_512_v4
+147/749369/campos_512_v4
+147/749392/campos_512_v4
+147/749445/campos_512_v4
+147/749458/campos_512_v4
+147/749530/campos_512_v4
+147/749567/campos_512_v4
+147/749616/campos_512_v4
+147/749765/campos_512_v4
+147/749812/campos_512_v4
+147/749854/campos_512_v4
+147/749857/campos_512_v4
+147/749866/campos_512_v4
+147/749883/campos_512_v4
+147/749903/campos_512_v4
+147/749914/campos_512_v4
+147/749996/campos_512_v4
+148/750120/campos_512_v4
+148/750137/campos_512_v4
+148/750296/campos_512_v4
+148/750335/campos_512_v4
+148/750416/campos_512_v4
+148/750508/campos_512_v4
+148/750556/campos_512_v4
+148/750836/campos_512_v4
+148/750846/campos_512_v4
+148/750942/campos_512_v4
+148/751006/campos_512_v4
+148/751068/campos_512_v4
+148/751112/campos_512_v4
+148/751131/campos_512_v4
+148/751142/campos_512_v4
+148/751144/campos_512_v4
+148/751165/campos_512_v4
+148/751193/campos_512_v4
+148/751211/campos_512_v4
+148/751236/campos_512_v4
+148/751314/campos_512_v4
+148/751508/campos_512_v4
+148/751516/campos_512_v4
+148/751520/campos_512_v4
+148/751643/campos_512_v4
+148/751655/campos_512_v4
+148/751678/campos_512_v4
+148/751685/campos_512_v4
+148/751732/campos_512_v4
+148/751839/campos_512_v4
+148/752124/campos_512_v4
+148/752133/campos_512_v4
+148/752141/campos_512_v4
+148/752292/campos_512_v4
+148/752336/campos_512_v4
+148/752379/campos_512_v4
+148/752560/campos_512_v4
+148/752573/campos_512_v4
+148/752623/campos_512_v4
+148/752637/campos_512_v4
+148/752821/campos_512_v4
+148/752844/campos_512_v4
+148/752845/campos_512_v4
+148/752891/campos_512_v4
+148/752902/campos_512_v4
+148/752923/campos_512_v4
+148/752927/campos_512_v4
+148/752949/campos_512_v4
+148/752992/campos_512_v4
+148/753023/campos_512_v4
+148/753029/campos_512_v4
+148/753049/campos_512_v4
+148/753075/campos_512_v4
+148/753098/campos_512_v4
+148/753171/campos_512_v4
+148/753210/campos_512_v4
+148/753287/campos_512_v4
+148/753309/campos_512_v4
+148/753398/campos_512_v4
+148/753429/campos_512_v4
+148/753513/campos_512_v4
+148/753557/campos_512_v4
+148/753629/campos_512_v4
+148/753660/campos_512_v4
+148/753698/campos_512_v4
+148/753715/campos_512_v4
+148/753750/campos_512_v4
+148/753873/campos_512_v4
+148/753953/campos_512_v4
+148/753967/campos_512_v4
+148/753980/campos_512_v4
+148/754058/campos_512_v4
+148/754321/campos_512_v4
+148/754427/campos_512_v4
+148/754459/campos_512_v4
+148/754532/campos_512_v4
+148/754556/campos_512_v4
+148/754615/campos_512_v4
+148/754634/campos_512_v4
+148/754720/campos_512_v4
+148/754742/campos_512_v4
+148/754977/campos_512_v4
+148/754987/campos_512_v4
+148/754999/campos_512_v4
+149/755313/campos_512_v4
+149/755360/campos_512_v4
+149/755384/campos_512_v4
+149/755428/campos_512_v4
+149/755479/campos_512_v4
+149/755480/campos_512_v4
+149/755559/campos_512_v4
+149/755573/campos_512_v4
+149/755711/campos_512_v4
+149/755944/campos_512_v4
+149/756210/campos_512_v4
+149/756217/campos_512_v4
+149/756233/campos_512_v4
+149/756311/campos_512_v4
+149/756363/campos_512_v4
+149/756390/campos_512_v4
+149/756436/campos_512_v4
+149/756506/campos_512_v4
+149/756515/campos_512_v4
+149/756517/campos_512_v4
+149/756568/campos_512_v4
+149/756577/campos_512_v4
+149/756711/campos_512_v4
+149/756752/campos_512_v4
+149/756758/campos_512_v4
+149/756819/campos_512_v4
+149/756860/campos_512_v4
+149/756935/campos_512_v4
+149/756944/campos_512_v4
+149/757064/campos_512_v4
+149/757065/campos_512_v4
+149/757073/campos_512_v4
+149/757098/campos_512_v4
+149/757109/campos_512_v4
+149/757141/campos_512_v4
+149/757171/campos_512_v4
+149/757295/campos_512_v4
+149/757315/campos_512_v4
+149/757354/campos_512_v4
+149/757372/campos_512_v4
+149/757386/campos_512_v4
+149/757534/campos_512_v4
+149/757536/campos_512_v4
+149/757650/campos_512_v4
+149/757692/campos_512_v4
+149/757698/campos_512_v4
+149/757724/campos_512_v4
+149/757725/campos_512_v4
+149/757765/campos_512_v4
+149/757794/campos_512_v4
+149/757968/campos_512_v4
+149/758000/campos_512_v4
+149/758035/campos_512_v4
+149/758065/campos_512_v4
+149/758163/campos_512_v4
+149/758176/campos_512_v4
+149/758272/campos_512_v4
+149/758340/campos_512_v4
+149/758428/campos_512_v4
+149/758463/campos_512_v4
+149/758579/campos_512_v4
+149/758583/campos_512_v4
+149/758803/campos_512_v4
+149/758947/campos_512_v4
+149/759147/campos_512_v4
+149/759151/campos_512_v4
+149/759193/campos_512_v4
+149/759270/campos_512_v4
+149/759307/campos_512_v4
+149/759329/campos_512_v4
+149/759330/campos_512_v4
+149/759353/campos_512_v4
+149/759463/campos_512_v4
+149/759508/campos_512_v4
+149/759606/campos_512_v4
+149/759665/campos_512_v4
+149/759703/campos_512_v4
+149/759708/campos_512_v4
+149/759716/campos_512_v4
+149/759824/campos_512_v4
+15/85130/campos_512_v4
+15/85175/campos_512_v4
+15/85224/campos_512_v4
+15/85338/campos_512_v4
+15/85411/campos_512_v4
+15/85479/campos_512_v4
+15/85581/campos_512_v4
+15/85657/campos_512_v4
+15/85762/campos_512_v4
+15/85810/campos_512_v4
+15/85916/campos_512_v4
+15/85986/campos_512_v4
+15/86136/campos_512_v4
+15/86298/campos_512_v4
+15/86310/campos_512_v4
+15/86364/campos_512_v4
+15/86424/campos_512_v4
+15/86462/campos_512_v4
+15/86534/campos_512_v4
+15/86555/campos_512_v4
+15/86595/campos_512_v4
+15/86658/campos_512_v4
+15/86754/campos_512_v4
+15/86880/campos_512_v4
+15/87103/campos_512_v4
+15/87169/campos_512_v4
+15/87186/campos_512_v4
+15/87191/campos_512_v4
+15/87300/campos_512_v4
+15/87436/campos_512_v4
+15/87456/campos_512_v4
+15/87513/campos_512_v4
+15/87545/campos_512_v4
+15/87555/campos_512_v4
+15/87571/campos_512_v4
+15/87641/campos_512_v4
+15/87850/campos_512_v4
+15/87925/campos_512_v4
+15/87982/campos_512_v4
+15/88074/campos_512_v4
+15/88216/campos_512_v4
+15/88342/campos_512_v4
+15/88348/campos_512_v4
+15/88421/campos_512_v4
+15/88569/campos_512_v4
+15/88914/campos_512_v4
+15/88969/campos_512_v4
+15/88978/campos_512_v4
+15/88992/campos_512_v4
+15/88993/campos_512_v4
+15/89095/campos_512_v4
+15/89123/campos_512_v4
+15/89251/campos_512_v4
+15/89312/campos_512_v4
+15/89694/campos_512_v4
+15/89754/campos_512_v4
+15/89811/campos_512_v4
+15/89889/campos_512_v4
+150/760076/campos_512_v4
+150/760110/campos_512_v4
+150/760177/campos_512_v4
+150/760240/campos_512_v4
+150/760316/campos_512_v4
+150/760535/campos_512_v4
+150/760577/campos_512_v4
+150/760623/campos_512_v4
+150/760644/campos_512_v4
+150/760661/campos_512_v4
+150/760720/campos_512_v4
+150/760724/campos_512_v4
+150/760766/campos_512_v4
+150/760770/campos_512_v4
+150/760783/campos_512_v4
+150/760799/campos_512_v4
+150/760913/campos_512_v4
+150/760917/campos_512_v4
+150/760919/campos_512_v4
+150/760960/campos_512_v4
+150/760979/campos_512_v4
+150/760987/campos_512_v4
+150/761027/campos_512_v4
+150/761136/campos_512_v4
+150/761163/campos_512_v4
+150/761181/campos_512_v4
+150/761183/campos_512_v4
+150/761259/campos_512_v4
+150/761319/campos_512_v4
+150/761369/campos_512_v4
+150/761372/campos_512_v4
+150/761406/campos_512_v4
+150/761420/campos_512_v4
+150/761467/campos_512_v4
+150/761481/campos_512_v4
+150/761544/campos_512_v4
+150/761572/campos_512_v4
+150/761623/campos_512_v4
+150/761673/campos_512_v4
+150/761700/campos_512_v4
+150/761780/campos_512_v4
+150/761791/campos_512_v4
+150/761795/campos_512_v4
+150/761899/campos_512_v4
+150/761916/campos_512_v4
+150/762071/campos_512_v4
+150/762171/campos_512_v4
+150/762239/campos_512_v4
+150/762253/campos_512_v4
+150/762428/campos_512_v4
+150/762583/campos_512_v4
+150/762679/campos_512_v4
+150/762708/campos_512_v4
+150/762814/campos_512_v4
+150/762826/campos_512_v4
+150/762880/campos_512_v4
+150/763001/campos_512_v4
+150/763032/campos_512_v4
+150/763055/campos_512_v4
+150/763060/campos_512_v4
+150/763061/campos_512_v4
+150/763078/campos_512_v4
+150/763113/campos_512_v4
+150/763192/campos_512_v4
+150/763289/campos_512_v4
+150/763320/campos_512_v4
+150/763370/campos_512_v4
+150/763371/campos_512_v4
+150/763484/campos_512_v4
+150/763494/campos_512_v4
+150/763566/campos_512_v4
+150/763683/campos_512_v4
+150/763807/campos_512_v4
+150/763821/campos_512_v4
+150/763848/campos_512_v4
+150/763890/campos_512_v4
+150/763968/campos_512_v4
+150/763990/campos_512_v4
+150/764032/campos_512_v4
+150/764175/campos_512_v4
+150/764308/campos_512_v4
+150/764382/campos_512_v4
+150/764430/campos_512_v4
+150/764477/campos_512_v4
+150/764641/campos_512_v4
+150/764743/campos_512_v4
+150/764769/campos_512_v4
+150/764771/campos_512_v4
+150/764866/campos_512_v4
+150/764882/campos_512_v4
+151/765385/campos_512_v4
+151/765487/campos_512_v4
+151/765564/campos_512_v4
+151/765675/campos_512_v4
+151/765800/campos_512_v4
+151/765807/campos_512_v4
+151/765825/campos_512_v4
+151/766034/campos_512_v4
+151/766042/campos_512_v4
+151/766169/campos_512_v4
+151/766218/campos_512_v4
+151/766302/campos_512_v4
+151/766321/campos_512_v4
+151/766324/campos_512_v4
+151/766375/campos_512_v4
+151/766475/campos_512_v4
+151/766571/campos_512_v4
+151/766625/campos_512_v4
+151/766627/campos_512_v4
+151/766666/campos_512_v4
+151/766896/campos_512_v4
+151/766897/campos_512_v4
+151/766986/campos_512_v4
+151/766991/campos_512_v4
+151/767008/campos_512_v4
+151/767034/campos_512_v4
+151/767049/campos_512_v4
+151/767055/campos_512_v4
+151/767286/campos_512_v4
+151/767354/campos_512_v4
+151/767406/campos_512_v4
+151/767439/campos_512_v4
+151/767445/campos_512_v4
+151/767538/campos_512_v4
+151/767622/campos_512_v4
+151/767789/campos_512_v4
+151/767828/campos_512_v4
+151/767954/campos_512_v4
+151/767997/campos_512_v4
+151/768043/campos_512_v4
+151/768053/campos_512_v4
+151/768191/campos_512_v4
+151/768197/campos_512_v4
+151/768213/campos_512_v4
+151/768256/campos_512_v4
+151/768294/campos_512_v4
+151/768380/campos_512_v4
+151/768432/campos_512_v4
+151/768488/campos_512_v4
+151/768507/campos_512_v4
+151/768579/campos_512_v4
+151/768604/campos_512_v4
+151/768620/campos_512_v4
+151/768801/campos_512_v4
+151/768830/campos_512_v4
+151/768849/campos_512_v4
+151/768919/campos_512_v4
+151/768954/campos_512_v4
+151/768999/campos_512_v4
+151/769012/campos_512_v4
+151/769050/campos_512_v4
+151/769129/campos_512_v4
+151/769275/campos_512_v4
+151/769581/campos_512_v4
+151/769617/campos_512_v4
+151/769694/campos_512_v4
+151/769820/campos_512_v4
+151/769863/campos_512_v4
+151/769886/campos_512_v4
+151/769992/campos_512_v4
+152/770063/campos_512_v4
+152/770080/campos_512_v4
+152/770104/campos_512_v4
+152/770107/campos_512_v4
+152/770167/campos_512_v4
+152/770175/campos_512_v4
+152/770214/campos_512_v4
+152/770245/campos_512_v4
+152/770372/campos_512_v4
+152/770418/campos_512_v4
+152/770462/campos_512_v4
+152/770499/campos_512_v4
+152/770584/campos_512_v4
+152/770653/campos_512_v4
+152/770711/campos_512_v4
+152/770744/campos_512_v4
+152/770749/campos_512_v4
+152/770811/campos_512_v4
+152/770949/campos_512_v4
+152/770986/campos_512_v4
+152/771045/campos_512_v4
+152/771064/campos_512_v4
+152/771189/campos_512_v4
+152/771194/campos_512_v4
+152/771508/campos_512_v4
+152/771533/campos_512_v4
+152/771576/campos_512_v4
+152/771604/campos_512_v4
+152/771708/campos_512_v4
+152/771714/campos_512_v4
+152/771739/campos_512_v4
+152/771740/campos_512_v4
+152/771796/campos_512_v4
+152/771821/campos_512_v4
+152/771859/campos_512_v4
+152/771875/campos_512_v4
+152/772009/campos_512_v4
+152/772108/campos_512_v4
+152/772164/campos_512_v4
+152/772165/campos_512_v4
+152/772205/campos_512_v4
+152/772215/campos_512_v4
+152/772221/campos_512_v4
+152/772251/campos_512_v4
+152/772320/campos_512_v4
+152/772329/campos_512_v4
+152/772607/campos_512_v4
+152/772644/campos_512_v4
+152/772691/campos_512_v4
+152/772703/campos_512_v4
+152/772902/campos_512_v4
+152/772912/campos_512_v4
+152/773086/campos_512_v4
+152/773087/campos_512_v4
+152/773111/campos_512_v4
+152/773159/campos_512_v4
+152/773204/campos_512_v4
+152/773218/campos_512_v4
+152/773225/campos_512_v4
+152/773334/campos_512_v4
+152/773515/campos_512_v4
+152/773554/campos_512_v4
+152/773568/campos_512_v4
+152/773582/campos_512_v4
+152/773593/campos_512_v4
+152/773615/campos_512_v4
+152/773661/campos_512_v4
+152/773713/campos_512_v4
+152/773714/campos_512_v4
+152/773862/campos_512_v4
+152/773867/campos_512_v4
+152/773886/campos_512_v4
+152/773909/campos_512_v4
+152/773963/campos_512_v4
+152/774021/campos_512_v4
+152/774033/campos_512_v4
+152/774049/campos_512_v4
+152/774090/campos_512_v4
+152/774147/campos_512_v4
+152/774279/campos_512_v4
+152/774289/campos_512_v4
+152/774334/campos_512_v4
+152/774351/campos_512_v4
+152/774455/campos_512_v4
+152/774496/campos_512_v4
+152/774550/campos_512_v4
+152/774567/campos_512_v4
+152/774625/campos_512_v4
+152/774633/campos_512_v4
+152/774644/campos_512_v4
+152/774690/campos_512_v4
+152/774711/campos_512_v4
+152/774736/campos_512_v4
+152/774737/campos_512_v4
+152/774784/campos_512_v4
+152/774844/campos_512_v4
+152/774990/campos_512_v4
+153/775009/campos_512_v4
+153/775044/campos_512_v4
+153/775121/campos_512_v4
+153/775296/campos_512_v4
+153/775305/campos_512_v4
+153/775315/campos_512_v4
+153/775719/campos_512_v4
+153/775759/campos_512_v4
+153/775813/campos_512_v4
+153/775848/campos_512_v4
+153/775866/campos_512_v4
+153/776027/campos_512_v4
+153/776105/campos_512_v4
+153/776234/campos_512_v4
+153/776300/campos_512_v4
+153/776313/campos_512_v4
+153/776320/campos_512_v4
+153/776346/campos_512_v4
+153/776521/campos_512_v4
+153/776533/campos_512_v4
+153/776594/campos_512_v4
+153/776613/campos_512_v4
+153/776617/campos_512_v4
+153/776618/campos_512_v4
+153/776640/campos_512_v4
+153/776721/campos_512_v4
+153/776763/campos_512_v4
+153/776807/campos_512_v4
+153/776872/campos_512_v4
+153/776902/campos_512_v4
+153/777014/campos_512_v4
+153/777129/campos_512_v4
+153/777217/campos_512_v4
+153/777218/campos_512_v4
+153/777380/campos_512_v4
+153/777411/campos_512_v4
+153/777431/campos_512_v4
+153/777441/campos_512_v4
+153/777564/campos_512_v4
+153/777718/campos_512_v4
+153/777843/campos_512_v4
+153/777852/campos_512_v4
+153/778180/campos_512_v4
+153/778182/campos_512_v4
+153/778205/campos_512_v4
+153/778299/campos_512_v4
+153/778380/campos_512_v4
+153/778404/campos_512_v4
+153/778462/campos_512_v4
+153/778636/campos_512_v4
+153/778707/campos_512_v4
+153/778750/campos_512_v4
+153/778853/campos_512_v4
+153/778901/campos_512_v4
+153/778946/campos_512_v4
+153/779049/campos_512_v4
+153/779063/campos_512_v4
+153/779066/campos_512_v4
+153/779092/campos_512_v4
+153/779103/campos_512_v4
+153/779119/campos_512_v4
+153/779156/campos_512_v4
+153/779487/campos_512_v4
+153/779490/campos_512_v4
+153/779551/campos_512_v4
+153/779676/campos_512_v4
+153/779719/campos_512_v4
+153/779725/campos_512_v4
+153/779738/campos_512_v4
+153/779760/campos_512_v4
+153/779806/campos_512_v4
+153/779966/campos_512_v4
+154/780063/campos_512_v4
+154/780067/campos_512_v4
+154/780070/campos_512_v4
+154/780180/campos_512_v4
+154/780187/campos_512_v4
+154/780251/campos_512_v4
+154/780305/campos_512_v4
+154/780340/campos_512_v4
+154/780363/campos_512_v4
+154/780381/campos_512_v4
+154/780452/campos_512_v4
+154/780513/campos_512_v4
+154/780645/campos_512_v4
+154/780661/campos_512_v4
+154/780872/campos_512_v4
+154/780924/campos_512_v4
+154/780934/campos_512_v4
+154/781014/campos_512_v4
+154/781054/campos_512_v4
+154/781060/campos_512_v4
+154/781063/campos_512_v4
+154/781073/campos_512_v4
+154/781108/campos_512_v4
+154/781120/campos_512_v4
+154/781137/campos_512_v4
+154/781230/campos_512_v4
+154/781386/campos_512_v4
+154/781472/campos_512_v4
+154/781625/campos_512_v4
+154/781677/campos_512_v4
+154/781708/campos_512_v4
+154/781718/campos_512_v4
+154/781734/campos_512_v4
+154/781915/campos_512_v4
+154/781916/campos_512_v4
+154/781967/campos_512_v4
+154/782048/campos_512_v4
+154/782105/campos_512_v4
+154/782133/campos_512_v4
+154/782227/campos_512_v4
+154/782231/campos_512_v4
+154/782249/campos_512_v4
+154/782304/campos_512_v4
+154/782306/campos_512_v4
+154/782494/campos_512_v4
+154/782581/campos_512_v4
+154/782586/campos_512_v4
+154/782635/campos_512_v4
+154/782682/campos_512_v4
+154/782685/campos_512_v4
+154/782708/campos_512_v4
+154/782744/campos_512_v4
+154/782782/campos_512_v4
+154/783079/campos_512_v4
+154/783182/campos_512_v4
+154/783299/campos_512_v4
+154/783302/campos_512_v4
+154/783325/campos_512_v4
+154/783440/campos_512_v4
+154/783539/campos_512_v4
+154/783543/campos_512_v4
+154/783548/campos_512_v4
+154/783569/campos_512_v4
+154/783574/campos_512_v4
+154/783603/campos_512_v4
+154/783653/campos_512_v4
+154/783781/campos_512_v4
+154/783787/campos_512_v4
+154/783809/campos_512_v4
+154/783865/campos_512_v4
+154/783870/campos_512_v4
+154/783975/campos_512_v4
+154/783978/campos_512_v4
+154/784183/campos_512_v4
+154/784184/campos_512_v4
+154/784210/campos_512_v4
+154/784221/campos_512_v4
+154/784272/campos_512_v4
+154/784333/campos_512_v4
+154/784361/campos_512_v4
+154/784374/campos_512_v4
+154/784387/campos_512_v4
+154/784403/campos_512_v4
+154/784514/campos_512_v4
+154/784534/campos_512_v4
+154/784542/campos_512_v4
+154/784561/campos_512_v4
+154/784602/campos_512_v4
+154/784609/campos_512_v4
+154/784669/campos_512_v4
+154/784829/campos_512_v4
+154/784851/campos_512_v4
+154/784880/campos_512_v4
+154/784903/campos_512_v4
+154/784923/campos_512_v4
+154/784932/campos_512_v4
+154/784960/campos_512_v4
+154/784978/campos_512_v4
+154/784989/campos_512_v4
+155/785145/campos_512_v4
+155/785294/campos_512_v4
+155/785392/campos_512_v4
+155/785524/campos_512_v4
+155/785550/campos_512_v4
+155/785582/campos_512_v4
+155/785710/campos_512_v4
+155/785743/campos_512_v4
+155/785760/campos_512_v4
+155/785874/campos_512_v4
+155/785878/campos_512_v4
+155/785988/campos_512_v4
+155/786182/campos_512_v4
+155/786212/campos_512_v4
+155/786234/campos_512_v4
+155/786240/campos_512_v4
+155/786246/campos_512_v4
+155/786302/campos_512_v4
+155/786320/campos_512_v4
+155/786333/campos_512_v4
+155/786352/campos_512_v4
+155/786402/campos_512_v4
+155/786432/campos_512_v4
+155/786433/campos_512_v4
+155/786448/campos_512_v4
+155/786507/campos_512_v4
+155/786544/campos_512_v4
+155/786598/campos_512_v4
+155/786646/campos_512_v4
+155/786653/campos_512_v4
+155/786803/campos_512_v4
+155/787010/campos_512_v4
+155/787110/campos_512_v4
+155/787121/campos_512_v4
+155/787234/campos_512_v4
+155/787295/campos_512_v4
+155/787518/campos_512_v4
+155/787532/campos_512_v4
+155/787608/campos_512_v4
+155/787720/campos_512_v4
+155/787801/campos_512_v4
+155/787846/campos_512_v4
+155/787849/campos_512_v4
+155/787856/campos_512_v4
+155/787863/campos_512_v4
+155/787864/campos_512_v4
+155/787893/campos_512_v4
+155/787963/campos_512_v4
+155/787984/campos_512_v4
+155/788012/campos_512_v4
+155/788052/campos_512_v4
+155/788068/campos_512_v4
+155/788145/campos_512_v4
+155/788173/campos_512_v4
+155/788175/campos_512_v4
+155/788225/campos_512_v4
+155/788269/campos_512_v4
+155/788302/campos_512_v4
+155/788431/campos_512_v4
+155/788460/campos_512_v4
+155/788486/campos_512_v4
+155/788499/campos_512_v4
+155/788575/campos_512_v4
+155/788669/campos_512_v4
+155/788774/campos_512_v4
+155/788857/campos_512_v4
+155/788895/campos_512_v4
+155/789036/campos_512_v4
+155/789052/campos_512_v4
+155/789077/campos_512_v4
+155/789099/campos_512_v4
+155/789155/campos_512_v4
+155/789214/campos_512_v4
+155/789226/campos_512_v4
+155/789228/campos_512_v4
+155/789288/campos_512_v4
+155/789433/campos_512_v4
+155/789476/campos_512_v4
+155/789533/campos_512_v4
+155/789545/campos_512_v4
+155/789598/campos_512_v4
+155/789652/campos_512_v4
+155/789702/campos_512_v4
+155/789709/campos_512_v4
+155/789759/campos_512_v4
+155/789864/campos_512_v4
+155/789928/campos_512_v4
+156/790005/campos_512_v4
+156/790274/campos_512_v4
+156/790296/campos_512_v4
+156/790397/campos_512_v4
+156/790552/campos_512_v4
+156/790572/campos_512_v4
+156/790574/campos_512_v4
+156/790583/campos_512_v4
+156/790619/campos_512_v4
+156/790714/campos_512_v4
+156/790895/campos_512_v4
+156/790921/campos_512_v4
+156/790960/campos_512_v4
+156/790967/campos_512_v4
+156/791025/campos_512_v4
+156/791064/campos_512_v4
+156/791084/campos_512_v4
+156/791093/campos_512_v4
+156/791137/campos_512_v4
+156/791211/campos_512_v4
+156/791213/campos_512_v4
+156/791309/campos_512_v4
+156/791317/campos_512_v4
+156/791342/campos_512_v4
+156/791430/campos_512_v4
+156/791452/campos_512_v4
+156/791467/campos_512_v4
+156/791532/campos_512_v4
+156/791576/campos_512_v4
+156/791749/campos_512_v4
+156/791750/campos_512_v4
+156/791857/campos_512_v4
+156/791876/campos_512_v4
+156/791916/campos_512_v4
+156/791940/campos_512_v4
+156/791990/campos_512_v4
+156/792087/campos_512_v4
+156/792117/campos_512_v4
+156/792167/campos_512_v4
+156/792171/campos_512_v4
+156/792186/campos_512_v4
+156/792207/campos_512_v4
+156/792222/campos_512_v4
+156/792320/campos_512_v4
+156/792394/campos_512_v4
+156/792567/campos_512_v4
+156/792600/campos_512_v4
+156/792624/campos_512_v4
+156/792669/campos_512_v4
+156/792763/campos_512_v4
+156/792766/campos_512_v4
+156/792802/campos_512_v4
+156/792906/campos_512_v4
+156/792973/campos_512_v4
+156/793013/campos_512_v4
+156/793025/campos_512_v4
+156/793158/campos_512_v4
+156/793535/campos_512_v4
+156/793597/campos_512_v4
+156/793671/campos_512_v4
+156/793779/campos_512_v4
+156/793909/campos_512_v4
+156/794005/campos_512_v4
+156/794040/campos_512_v4
+156/794170/campos_512_v4
+156/794172/campos_512_v4
+156/794202/campos_512_v4
+156/794215/campos_512_v4
+156/794265/campos_512_v4
+156/794491/campos_512_v4
+156/794497/campos_512_v4
+156/794528/campos_512_v4
+156/794554/campos_512_v4
+156/794579/campos_512_v4
+156/794625/campos_512_v4
+156/794651/campos_512_v4
+156/794652/campos_512_v4
+156/794666/campos_512_v4
+156/794749/campos_512_v4
+156/794796/campos_512_v4
+156/794854/campos_512_v4
+156/794876/campos_512_v4
+156/794916/campos_512_v4
+156/794960/campos_512_v4
+157/795014/campos_512_v4
+157/795108/campos_512_v4
+157/795184/campos_512_v4
+157/795303/campos_512_v4
+157/795582/campos_512_v4
+157/795697/campos_512_v4
+157/795750/campos_512_v4
+157/795758/campos_512_v4
+157/795917/campos_512_v4
+157/795948/campos_512_v4
+157/795971/campos_512_v4
+157/796002/campos_512_v4
+157/796035/campos_512_v4
+157/796199/campos_512_v4
+157/796252/campos_512_v4
+157/796536/campos_512_v4
+157/796584/campos_512_v4
+157/796659/campos_512_v4
+157/796661/campos_512_v4
+157/796677/campos_512_v4
+157/796692/campos_512_v4
+157/796888/campos_512_v4
+157/796933/campos_512_v4
+157/796948/campos_512_v4
+157/797061/campos_512_v4
+157/797099/campos_512_v4
+157/797188/campos_512_v4
+157/797248/campos_512_v4
+157/797305/campos_512_v4
+157/797370/campos_512_v4
+157/797393/campos_512_v4
+157/797418/campos_512_v4
+157/797455/campos_512_v4
+157/797528/campos_512_v4
+157/797530/campos_512_v4
+157/797564/campos_512_v4
+157/797573/campos_512_v4
+157/797642/campos_512_v4
+157/797750/campos_512_v4
+157/797793/campos_512_v4
+157/797822/campos_512_v4
+157/797857/campos_512_v4
+157/797870/campos_512_v4
+157/797937/campos_512_v4
+157/797959/campos_512_v4
+157/797964/campos_512_v4
+157/797979/campos_512_v4
+157/797992/campos_512_v4
+157/798095/campos_512_v4
+157/798133/campos_512_v4
+157/798205/campos_512_v4
+157/798474/campos_512_v4
+157/798668/campos_512_v4
+157/798677/campos_512_v4
+157/798723/campos_512_v4
+157/798790/campos_512_v4
+157/798797/campos_512_v4
+157/798875/campos_512_v4
+157/798881/campos_512_v4
+157/798924/campos_512_v4
+157/798931/campos_512_v4
+157/798953/campos_512_v4
+157/798976/campos_512_v4
+157/799087/campos_512_v4
+157/799112/campos_512_v4
+157/799120/campos_512_v4
+157/799182/campos_512_v4
+157/799253/campos_512_v4
+157/799283/campos_512_v4
+157/799505/campos_512_v4
+157/799537/campos_512_v4
+157/799573/campos_512_v4
+157/799613/campos_512_v4
+157/799651/campos_512_v4
+157/799689/campos_512_v4
+157/799718/campos_512_v4
+157/799726/campos_512_v4
+157/799744/campos_512_v4
+157/799784/campos_512_v4
+157/799812/campos_512_v4
+157/799861/campos_512_v4
+157/799888/campos_512_v4
+157/799925/campos_512_v4
+158/800031/campos_512_v4
+158/800068/campos_512_v4
+158/800092/campos_512_v4
+158/800178/campos_512_v4
+158/800282/campos_512_v4
+158/800285/campos_512_v4
+158/800291/campos_512_v4
+158/800353/campos_512_v4
+158/800499/campos_512_v4
+158/800597/campos_512_v4
+158/800805/campos_512_v4
+158/800862/campos_512_v4
+158/800911/campos_512_v4
+158/801044/campos_512_v4
+158/801103/campos_512_v4
+158/801112/campos_512_v4
+158/801126/campos_512_v4
+158/801166/campos_512_v4
+158/801458/campos_512_v4
+158/801510/campos_512_v4
+158/801562/campos_512_v4
+158/801595/campos_512_v4
+158/801642/campos_512_v4
+158/801682/campos_512_v4
+158/801709/campos_512_v4
+158/801812/campos_512_v4
+158/801829/campos_512_v4
+158/801870/campos_512_v4
+158/801979/campos_512_v4
+158/802118/campos_512_v4
+158/802223/campos_512_v4
+158/802228/campos_512_v4
+158/802259/campos_512_v4
+158/802334/campos_512_v4
+158/802348/campos_512_v4
+158/802391/campos_512_v4
+158/802404/campos_512_v4
+158/802583/campos_512_v4
+158/802675/campos_512_v4
+158/802710/campos_512_v4
+158/802785/campos_512_v4
+158/802896/campos_512_v4
+158/803073/campos_512_v4
+158/803085/campos_512_v4
+158/803231/campos_512_v4
+158/803291/campos_512_v4
+158/803346/campos_512_v4
+158/803380/campos_512_v4
+158/803471/campos_512_v4
+158/803482/campos_512_v4
+158/803492/campos_512_v4
+158/803591/campos_512_v4
+158/803608/campos_512_v4
+158/803691/campos_512_v4
+158/803696/campos_512_v4
+158/803749/campos_512_v4
+158/803934/campos_512_v4
+158/803957/campos_512_v4
+158/803983/campos_512_v4
+158/804005/campos_512_v4
+158/804112/campos_512_v4
+158/804134/campos_512_v4
+158/804193/campos_512_v4
+158/804203/campos_512_v4
+158/804277/campos_512_v4
+158/804311/campos_512_v4
+158/804339/campos_512_v4
+158/804341/campos_512_v4
+158/804460/campos_512_v4
+158/804466/campos_512_v4
+158/804486/campos_512_v4
+158/804504/campos_512_v4
+158/804558/campos_512_v4
+158/804582/campos_512_v4
+158/804604/campos_512_v4
+158/804650/campos_512_v4
+158/804747/campos_512_v4
+158/804822/campos_512_v4
+158/804898/campos_512_v4
+159/805016/campos_512_v4
+159/805050/campos_512_v4
+159/805094/campos_512_v4
+159/805095/campos_512_v4
+159/805109/campos_512_v4
+159/805156/campos_512_v4
+159/805265/campos_512_v4
+159/805304/campos_512_v4
+159/805455/campos_512_v4
+159/805581/campos_512_v4
+159/805621/campos_512_v4
+159/805656/campos_512_v4
+159/805741/campos_512_v4
+159/805854/campos_512_v4
+159/805915/campos_512_v4
+159/806005/campos_512_v4
+159/806122/campos_512_v4
+159/806140/campos_512_v4
+159/806158/campos_512_v4
+159/806179/campos_512_v4
+159/806227/campos_512_v4
+159/806367/campos_512_v4
+159/806378/campos_512_v4
+159/806381/campos_512_v4
+159/806385/campos_512_v4
+159/806454/campos_512_v4
+159/806514/campos_512_v4
+159/806576/campos_512_v4
+159/806582/campos_512_v4
+159/806629/campos_512_v4
+159/806728/campos_512_v4
+159/806781/campos_512_v4
+159/806786/campos_512_v4
+159/806810/campos_512_v4
+159/806815/campos_512_v4
+159/806818/campos_512_v4
+159/806968/campos_512_v4
+159/806986/campos_512_v4
+159/807071/campos_512_v4
+159/807102/campos_512_v4
+159/807243/campos_512_v4
+159/807321/campos_512_v4
+159/807329/campos_512_v4
+159/807341/campos_512_v4
+159/807352/campos_512_v4
+159/807517/campos_512_v4
+159/807632/campos_512_v4
+159/807655/campos_512_v4
+159/807798/campos_512_v4
+159/807825/campos_512_v4
+159/807853/campos_512_v4
+159/808036/campos_512_v4
+159/808049/campos_512_v4
+159/808153/campos_512_v4
+159/808288/campos_512_v4
+159/808390/campos_512_v4
+159/808391/campos_512_v4
+159/808454/campos_512_v4
+159/808482/campos_512_v4
+159/808540/campos_512_v4
+159/808617/campos_512_v4
+159/808654/campos_512_v4
+159/808666/campos_512_v4
+159/808676/campos_512_v4
+16/90221/campos_512_v4
+16/90222/campos_512_v4
+16/90301/campos_512_v4
+16/90339/campos_512_v4
+16/90342/campos_512_v4
+16/90365/campos_512_v4
+16/90377/campos_512_v4
+16/90395/campos_512_v4
+16/90484/campos_512_v4
+16/90519/campos_512_v4
+16/90520/campos_512_v4
+16/90594/campos_512_v4
+16/90716/campos_512_v4
+16/90772/campos_512_v4
+16/90810/campos_512_v4
+16/90845/campos_512_v4
+16/90865/campos_512_v4
+16/91092/campos_512_v4
+16/91134/campos_512_v4
+16/91230/campos_512_v4
+16/91362/campos_512_v4
+16/91376/campos_512_v4
+16/91476/campos_512_v4
+16/91553/campos_512_v4
+16/91559/campos_512_v4
+16/91751/campos_512_v4
+16/92006/campos_512_v4
+16/92128/campos_512_v4
+16/92296/campos_512_v4
+16/92314/campos_512_v4
+16/92387/campos_512_v4
+16/92445/campos_512_v4
+16/92476/campos_512_v4
+16/92531/campos_512_v4
+16/92607/campos_512_v4
+16/92645/campos_512_v4
+16/92877/campos_512_v4
+16/92973/campos_512_v4
+16/92979/campos_512_v4
+16/93095/campos_512_v4
+16/93103/campos_512_v4
+16/93211/campos_512_v4
+16/93269/campos_512_v4
+16/93289/campos_512_v4
+16/93325/campos_512_v4
+16/93395/campos_512_v4
+16/93437/campos_512_v4
+16/93450/campos_512_v4
+16/93745/campos_512_v4
+16/93805/campos_512_v4
+16/93815/campos_512_v4
+16/93841/campos_512_v4
+16/93987/campos_512_v4
+16/94045/campos_512_v4
+16/94050/campos_512_v4
+16/94051/campos_512_v4
+16/94107/campos_512_v4
+16/94155/campos_512_v4
+16/94192/campos_512_v4
+16/94217/campos_512_v4
+16/94424/campos_512_v4
+16/94444/campos_512_v4
+16/94462/campos_512_v4
+16/94563/campos_512_v4
+16/94621/campos_512_v4
+16/94645/campos_512_v4
+16/94668/campos_512_v4
+16/94712/campos_512_v4
+16/94762/campos_512_v4
+16/94788/campos_512_v4
+16/94832/campos_512_v4
+17/95013/campos_512_v4
+17/95167/campos_512_v4
+17/95220/campos_512_v4
+17/95349/campos_512_v4
+17/95371/campos_512_v4
+17/95382/campos_512_v4
+17/95576/campos_512_v4
+17/95642/campos_512_v4
+17/95815/campos_512_v4
+17/95869/campos_512_v4
+17/95940/campos_512_v4
+17/95977/campos_512_v4
+17/96019/campos_512_v4
+17/96117/campos_512_v4
+17/96141/campos_512_v4
+17/96212/campos_512_v4
+17/96302/campos_512_v4
+17/96317/campos_512_v4
+17/96455/campos_512_v4
+17/96501/campos_512_v4
+17/96563/campos_512_v4
+17/96576/campos_512_v4
+17/96776/campos_512_v4
+17/96899/campos_512_v4
+17/97122/campos_512_v4
+17/97211/campos_512_v4
+17/97235/campos_512_v4
+17/97304/campos_512_v4
+17/97449/campos_512_v4
+17/97736/campos_512_v4
+17/97759/campos_512_v4
+17/97766/campos_512_v4
+17/97781/campos_512_v4
+17/97787/campos_512_v4
+17/97791/campos_512_v4
+17/97889/campos_512_v4
+17/98049/campos_512_v4
+17/98079/campos_512_v4
+17/98089/campos_512_v4
+17/98122/campos_512_v4
+17/98204/campos_512_v4
+17/98413/campos_512_v4
+17/98733/campos_512_v4
+17/98790/campos_512_v4
+17/99126/campos_512_v4
+17/99351/campos_512_v4
+17/99356/campos_512_v4
+17/99497/campos_512_v4
+17/99562/campos_512_v4
+17/99593/campos_512_v4
+17/99767/campos_512_v4
+17/99784/campos_512_v4
+17/99807/campos_512_v4
+17/99933/campos_512_v4
+17/99941/campos_512_v4
+2/20105/campos_512_v4
+2/20272/campos_512_v4
+2/20413/campos_512_v4
+2/20500/campos_512_v4
+2/20683/campos_512_v4
+2/20688/campos_512_v4
+2/20691/campos_512_v4
+2/20732/campos_512_v4
+2/21083/campos_512_v4
+2/21177/campos_512_v4
+2/21274/campos_512_v4
+2/21315/campos_512_v4
+2/21439/campos_512_v4
+2/21573/campos_512_v4
+2/21596/campos_512_v4
+2/21692/campos_512_v4
+2/21694/campos_512_v4
+2/21706/campos_512_v4
+2/21707/campos_512_v4
+2/21805/campos_512_v4
+2/21878/campos_512_v4
+2/21926/campos_512_v4
+2/21928/campos_512_v4
+2/22059/campos_512_v4
+2/22202/campos_512_v4
+2/22238/campos_512_v4
+2/22350/campos_512_v4
+2/22351/campos_512_v4
+2/22438/campos_512_v4
+2/22514/campos_512_v4
+2/22698/campos_512_v4
+2/22780/campos_512_v4
+2/22849/campos_512_v4
+2/22867/campos_512_v4
+2/22996/campos_512_v4
+2/23034/campos_512_v4
+2/23340/campos_512_v4
+2/23676/campos_512_v4
+2/23680/campos_512_v4
+2/23825/campos_512_v4
+2/23842/campos_512_v4
+2/23856/campos_512_v4
+2/24197/campos_512_v4
+2/24238/campos_512_v4
+2/24254/campos_512_v4
+2/24393/campos_512_v4
+2/24499/campos_512_v4
+2/24551/campos_512_v4
+2/24709/campos_512_v4
+2/24742/campos_512_v4
+2/24833/campos_512_v4
+2/24858/campos_512_v4
+23/125083/campos_512_v4
+23/125121/campos_512_v4
+23/125254/campos_512_v4
+23/125330/campos_512_v4
+23/125410/campos_512_v4
+23/125565/campos_512_v4
+23/125572/campos_512_v4
+23/125676/campos_512_v4
+23/125806/campos_512_v4
+23/125907/campos_512_v4
+23/126002/campos_512_v4
+23/126127/campos_512_v4
+23/126254/campos_512_v4
+23/126371/campos_512_v4
+23/126399/campos_512_v4
+23/126467/campos_512_v4
+23/126514/campos_512_v4
+23/126546/campos_512_v4
+23/126608/campos_512_v4
+23/126631/campos_512_v4
+23/126714/campos_512_v4
+23/126723/campos_512_v4
+23/126840/campos_512_v4
+23/126977/campos_512_v4
+23/127010/campos_512_v4
+23/127104/campos_512_v4
+23/127123/campos_512_v4
+23/127213/campos_512_v4
+23/127261/campos_512_v4
+23/127496/campos_512_v4
+23/127577/campos_512_v4
+23/127598/campos_512_v4
+23/127634/campos_512_v4
+23/127739/campos_512_v4
+23/127827/campos_512_v4
+23/127905/campos_512_v4
+23/127956/campos_512_v4
+23/128028/campos_512_v4
+23/128140/campos_512_v4
+23/128177/campos_512_v4
+23/128289/campos_512_v4
+23/128323/campos_512_v4
+23/128401/campos_512_v4
+23/128514/campos_512_v4
+23/128542/campos_512_v4
+23/128556/campos_512_v4
+23/128609/campos_512_v4
+23/128737/campos_512_v4
+23/128838/campos_512_v4
+23/128851/campos_512_v4
+23/128899/campos_512_v4
+23/129023/campos_512_v4
+23/129094/campos_512_v4
+23/129216/campos_512_v4
+23/129332/campos_512_v4
+23/129355/campos_512_v4
+23/129358/campos_512_v4
+23/129451/campos_512_v4
+23/129464/campos_512_v4
+23/129607/campos_512_v4
+23/129637/campos_512_v4
+23/129690/campos_512_v4
+23/129796/campos_512_v4
+23/129920/campos_512_v4
+24/130186/campos_512_v4
+24/130187/campos_512_v4
+24/130227/campos_512_v4
+24/130254/campos_512_v4
+24/130356/campos_512_v4
+24/130416/campos_512_v4
+24/130441/campos_512_v4
+24/130478/campos_512_v4
+24/130523/campos_512_v4
+24/130821/campos_512_v4
+24/130839/campos_512_v4
+24/130902/campos_512_v4
+24/131125/campos_512_v4
+24/131135/campos_512_v4
+24/131219/campos_512_v4
+24/131234/campos_512_v4
+24/131262/campos_512_v4
+24/131286/campos_512_v4
+24/131433/campos_512_v4
+24/131436/campos_512_v4
+24/131605/campos_512_v4
+24/131657/campos_512_v4
+24/131661/campos_512_v4
+24/131671/campos_512_v4
+24/131710/campos_512_v4
+24/131830/campos_512_v4
+24/131861/campos_512_v4
+24/131885/campos_512_v4
+24/131898/campos_512_v4
+24/132046/campos_512_v4
+24/132076/campos_512_v4
+24/132121/campos_512_v4
+24/132130/campos_512_v4
+24/132212/campos_512_v4
+24/132232/campos_512_v4
+24/132292/campos_512_v4
+24/132351/campos_512_v4
+24/132388/campos_512_v4
+24/132407/campos_512_v4
+24/132802/campos_512_v4
+24/132811/campos_512_v4
+24/132872/campos_512_v4
+24/132896/campos_512_v4
+24/132996/campos_512_v4
+24/133013/campos_512_v4
+24/133059/campos_512_v4
+24/133101/campos_512_v4
+24/133118/campos_512_v4
+24/133121/campos_512_v4
+24/133151/campos_512_v4
+24/133347/campos_512_v4
+24/133381/campos_512_v4
+24/133391/campos_512_v4
+24/133449/campos_512_v4
+24/133455/campos_512_v4
+24/133501/campos_512_v4
+24/133516/campos_512_v4
+24/133640/campos_512_v4
+24/133666/campos_512_v4
+24/133694/campos_512_v4
+24/133754/campos_512_v4
+24/133773/campos_512_v4
+24/133803/campos_512_v4
+24/133957/campos_512_v4
+24/133983/campos_512_v4
+24/134037/campos_512_v4
+24/134099/campos_512_v4
+24/134118/campos_512_v4
+24/134145/campos_512_v4
+24/134437/campos_512_v4
+24/134555/campos_512_v4
+24/134560/campos_512_v4
+24/134663/campos_512_v4
+24/134670/campos_512_v4
+24/134711/campos_512_v4
+24/134868/campos_512_v4
+24/134873/campos_512_v4
+24/134895/campos_512_v4
+24/134903/campos_512_v4
+24/134911/campos_512_v4
+24/134980/campos_512_v4
+24/134983/campos_512_v4
+25/135034/campos_512_v4
+25/135048/campos_512_v4
+25/135153/campos_512_v4
+25/135198/campos_512_v4
+25/135256/campos_512_v4
+25/135287/campos_512_v4
+25/135501/campos_512_v4
+25/135703/campos_512_v4
+25/135764/campos_512_v4
+25/135826/campos_512_v4
+25/135880/campos_512_v4
+25/135894/campos_512_v4
+25/136068/campos_512_v4
+25/136088/campos_512_v4
+25/136149/campos_512_v4
+25/136280/campos_512_v4
+25/136466/campos_512_v4
+25/136539/campos_512_v4
+25/136546/campos_512_v4
+25/136615/campos_512_v4
+25/136676/campos_512_v4
+25/137227/campos_512_v4
+25/137390/campos_512_v4
+25/137771/campos_512_v4
+25/137977/campos_512_v4
+25/138237/campos_512_v4
+25/138302/campos_512_v4
+25/138517/campos_512_v4
+25/138566/campos_512_v4
+25/138633/campos_512_v4
+25/138645/campos_512_v4
+25/138664/campos_512_v4
+25/138684/campos_512_v4
+25/138712/campos_512_v4
+25/138743/campos_512_v4
+25/138923/campos_512_v4
+25/138929/campos_512_v4
+25/139165/campos_512_v4
+25/139367/campos_512_v4
+25/139551/campos_512_v4
+25/139627/campos_512_v4
+25/139692/campos_512_v4
+25/139799/campos_512_v4
+25/139804/campos_512_v4
+25/139908/campos_512_v4
+26/140061/campos_512_v4
+26/140062/campos_512_v4
+26/140164/campos_512_v4
+26/140298/campos_512_v4
+26/140391/campos_512_v4
+26/140489/campos_512_v4
+26/140523/campos_512_v4
+26/140584/campos_512_v4
+26/140721/campos_512_v4
+26/141133/campos_512_v4
+26/141203/campos_512_v4
+26/141231/campos_512_v4
+26/141310/campos_512_v4
+26/141355/campos_512_v4
+26/141362/campos_512_v4
+26/141395/campos_512_v4
+26/141432/campos_512_v4
+26/141461/campos_512_v4
+26/141480/campos_512_v4
+26/141508/campos_512_v4
+26/141550/campos_512_v4
+26/141571/campos_512_v4
+26/141662/campos_512_v4
+26/141773/campos_512_v4
+26/141874/campos_512_v4
+26/141892/campos_512_v4
+26/141909/campos_512_v4
+26/141953/campos_512_v4
+26/142153/campos_512_v4
+26/142215/campos_512_v4
+26/142355/campos_512_v4
+26/142405/campos_512_v4
+26/142406/campos_512_v4
+26/142466/campos_512_v4
+26/142467/campos_512_v4
+26/142581/campos_512_v4
+26/142588/campos_512_v4
+26/142670/campos_512_v4
+26/142701/campos_512_v4
+26/142766/campos_512_v4
+26/143035/campos_512_v4
+26/143197/campos_512_v4
+26/143259/campos_512_v4
+26/143354/campos_512_v4
+26/143424/campos_512_v4
+26/143431/campos_512_v4
+26/143544/campos_512_v4
+26/143647/campos_512_v4
+26/143726/campos_512_v4
+26/144059/campos_512_v4
+26/144074/campos_512_v4
+26/144294/campos_512_v4
+26/144402/campos_512_v4
+26/144585/campos_512_v4
+26/144715/campos_512_v4
+26/144761/campos_512_v4
+26/144788/campos_512_v4
+26/144805/campos_512_v4
+26/144814/campos_512_v4
+26/144927/campos_512_v4
+27/145159/campos_512_v4
+27/145289/campos_512_v4
+27/145340/campos_512_v4
+27/145537/campos_512_v4
+27/145551/campos_512_v4
+27/145751/campos_512_v4
+27/145807/campos_512_v4
+27/145934/campos_512_v4
+27/146003/campos_512_v4
+27/146157/campos_512_v4
+27/146163/campos_512_v4
+27/146178/campos_512_v4
+27/146183/campos_512_v4
+27/146198/campos_512_v4
+27/146319/campos_512_v4
+27/146323/campos_512_v4
+27/146377/campos_512_v4
+27/146378/campos_512_v4
+27/146420/campos_512_v4
+27/146606/campos_512_v4
+27/146985/campos_512_v4
+27/147061/campos_512_v4
+27/147200/campos_512_v4
+27/147267/campos_512_v4
+27/147409/campos_512_v4
+27/147410/campos_512_v4
+27/147535/campos_512_v4
+27/147651/campos_512_v4
+27/147683/campos_512_v4
+27/147748/campos_512_v4
+27/147768/campos_512_v4
+27/147830/campos_512_v4
+27/148074/campos_512_v4
+27/148202/campos_512_v4
+27/148222/campos_512_v4
+27/148260/campos_512_v4
+27/148342/campos_512_v4
+27/148354/campos_512_v4
+27/148382/campos_512_v4
+27/148440/campos_512_v4
+27/148479/campos_512_v4
+27/148654/campos_512_v4
+27/148769/campos_512_v4
+27/148857/campos_512_v4
+27/148872/campos_512_v4
+27/148894/campos_512_v4
+27/148899/campos_512_v4
+27/148993/campos_512_v4
+27/149132/campos_512_v4
+27/149151/campos_512_v4
+27/149333/campos_512_v4
+27/149360/campos_512_v4
+27/149466/campos_512_v4
+27/149471/campos_512_v4
+27/149679/campos_512_v4
+27/149718/campos_512_v4
+27/149787/campos_512_v4
+27/149975/campos_512_v4
+28/150026/campos_512_v4
+28/150072/campos_512_v4
+28/150137/campos_512_v4
+28/150138/campos_512_v4
+28/150160/campos_512_v4
+28/150219/campos_512_v4
+28/150330/campos_512_v4
+28/150445/campos_512_v4
+28/150457/campos_512_v4
+28/150494/campos_512_v4
+28/150533/campos_512_v4
+28/150546/campos_512_v4
+28/150572/campos_512_v4
+28/150592/campos_512_v4
+28/150681/campos_512_v4
+28/150684/campos_512_v4
+28/150688/campos_512_v4
+28/150690/campos_512_v4
+28/150721/campos_512_v4
+28/150757/campos_512_v4
+28/150821/campos_512_v4
+28/150884/campos_512_v4
+28/150911/campos_512_v4
+28/151022/campos_512_v4
+28/151038/campos_512_v4
+28/151061/campos_512_v4
+28/151076/campos_512_v4
+28/151106/campos_512_v4
+28/151165/campos_512_v4
+28/151209/campos_512_v4
+28/151270/campos_512_v4
+28/151356/campos_512_v4
+28/151382/campos_512_v4
+28/151398/campos_512_v4
+28/151479/campos_512_v4
+28/151710/campos_512_v4
+28/151720/campos_512_v4
+28/151864/campos_512_v4
+28/151937/campos_512_v4
+28/151990/campos_512_v4
+28/152152/campos_512_v4
+28/152230/campos_512_v4
+28/152303/campos_512_v4
+28/152307/campos_512_v4
+28/152340/campos_512_v4
+28/152466/campos_512_v4
+28/152507/campos_512_v4
+28/152511/campos_512_v4
+28/152554/campos_512_v4
+28/152563/campos_512_v4
+28/152581/campos_512_v4
+28/152607/campos_512_v4
+28/152641/campos_512_v4
+28/152724/campos_512_v4
+28/152775/campos_512_v4
+28/152849/campos_512_v4
+28/153091/campos_512_v4
+28/153092/campos_512_v4
+28/153199/campos_512_v4
+28/153214/campos_512_v4
+28/153361/campos_512_v4
+28/153380/campos_512_v4
+28/153404/campos_512_v4
+28/153420/campos_512_v4
+28/153424/campos_512_v4
+28/153661/campos_512_v4
+28/153665/campos_512_v4
+28/153690/campos_512_v4
+28/153692/campos_512_v4
+28/153720/campos_512_v4
+28/153847/campos_512_v4
+28/153855/campos_512_v4
+28/153906/campos_512_v4
+28/153932/campos_512_v4
+28/153951/campos_512_v4
+28/153960/campos_512_v4
+28/153971/campos_512_v4
+28/154006/campos_512_v4
+28/154059/campos_512_v4
+28/154066/campos_512_v4
+28/154162/campos_512_v4
+28/154178/campos_512_v4
+28/154244/campos_512_v4
+28/154256/campos_512_v4
+28/154257/campos_512_v4
+28/154259/campos_512_v4
+28/154445/campos_512_v4
+28/154452/campos_512_v4
+28/154460/campos_512_v4
+28/154494/campos_512_v4
+28/154512/campos_512_v4
+28/154516/campos_512_v4
+28/154576/campos_512_v4
+28/154593/campos_512_v4
+28/154609/campos_512_v4
+28/154654/campos_512_v4
+28/154696/campos_512_v4
+28/154791/campos_512_v4
+28/154861/campos_512_v4
+28/154892/campos_512_v4
+28/154895/campos_512_v4
+28/154901/campos_512_v4
+28/154907/campos_512_v4
+29/155106/campos_512_v4
+29/155132/campos_512_v4
+29/155133/campos_512_v4
+29/155138/campos_512_v4
+29/155172/campos_512_v4
+29/155193/campos_512_v4
+29/155285/campos_512_v4
+29/155307/campos_512_v4
+29/155313/campos_512_v4
+29/155314/campos_512_v4
+29/155327/campos_512_v4
+29/155340/campos_512_v4
+29/155458/campos_512_v4
+29/155463/campos_512_v4
+29/155474/campos_512_v4
+29/155480/campos_512_v4
+29/155716/campos_512_v4
+29/155732/campos_512_v4
+29/155784/campos_512_v4
+29/155824/campos_512_v4
+29/155876/campos_512_v4
+29/155877/campos_512_v4
+29/155950/campos_512_v4
+29/156003/campos_512_v4
+29/156014/campos_512_v4
+29/156202/campos_512_v4
+29/156304/campos_512_v4
+29/156401/campos_512_v4
+29/156532/campos_512_v4
+29/156570/campos_512_v4
+29/156602/campos_512_v4
+29/156608/campos_512_v4
+29/156627/campos_512_v4
+29/156742/campos_512_v4
+29/156746/campos_512_v4
+29/156863/campos_512_v4
+29/156985/campos_512_v4
+29/157053/campos_512_v4
+29/157140/campos_512_v4
+29/157208/campos_512_v4
+29/157396/campos_512_v4
+29/157446/campos_512_v4
+29/157608/campos_512_v4
+29/157618/campos_512_v4
+29/157688/campos_512_v4
+29/157928/campos_512_v4
+29/158022/campos_512_v4
+29/158163/campos_512_v4
+29/158279/campos_512_v4
+29/158289/campos_512_v4
+29/158342/campos_512_v4
+29/158475/campos_512_v4
+29/158547/campos_512_v4
+29/158822/campos_512_v4
+29/158861/campos_512_v4
+29/158887/campos_512_v4
+29/158940/campos_512_v4
+29/159003/campos_512_v4
+29/159010/campos_512_v4
+29/159095/campos_512_v4
+29/159118/campos_512_v4
+29/159198/campos_512_v4
+29/159283/campos_512_v4
+29/159386/campos_512_v4
+29/159432/campos_512_v4
+29/159584/campos_512_v4
+29/159645/campos_512_v4
+29/159806/campos_512_v4
+29/159897/campos_512_v4
+29/159934/campos_512_v4
+30/160013/campos_512_v4
+30/160044/campos_512_v4
+30/160049/campos_512_v4
+30/160189/campos_512_v4
+30/160281/campos_512_v4
+30/160369/campos_512_v4
+30/160389/campos_512_v4
+30/160435/campos_512_v4
+30/160508/campos_512_v4
+30/160641/campos_512_v4
+30/160729/campos_512_v4
+30/160792/campos_512_v4
+30/160806/campos_512_v4
+30/160833/campos_512_v4
+30/160836/campos_512_v4
+30/160846/campos_512_v4
+30/160918/campos_512_v4
+30/160997/campos_512_v4
+30/161004/campos_512_v4
+30/161023/campos_512_v4
+30/161118/campos_512_v4
+30/161264/campos_512_v4
+30/161288/campos_512_v4
+30/161314/campos_512_v4
+30/161335/campos_512_v4
+30/161382/campos_512_v4
+30/161467/campos_512_v4
+30/161547/campos_512_v4
+30/161611/campos_512_v4
+30/161659/campos_512_v4
+30/161703/campos_512_v4
+30/161742/campos_512_v4
+30/161762/campos_512_v4
+30/161822/campos_512_v4
+30/161926/campos_512_v4
+30/161943/campos_512_v4
+30/161945/campos_512_v4
+30/161950/campos_512_v4
+30/161951/campos_512_v4
+30/162001/campos_512_v4
+30/162035/campos_512_v4
+30/162084/campos_512_v4
+30/162128/campos_512_v4
+30/162133/campos_512_v4
+30/162162/campos_512_v4
+30/162173/campos_512_v4
+30/162282/campos_512_v4
+30/162360/campos_512_v4
+30/162468/campos_512_v4
+30/162502/campos_512_v4
+30/162600/campos_512_v4
+30/162648/campos_512_v4
+30/162666/campos_512_v4
+30/162700/campos_512_v4
+30/162751/campos_512_v4
+30/162874/campos_512_v4
+30/162900/campos_512_v4
+30/163051/campos_512_v4
+30/163179/campos_512_v4
+30/163200/campos_512_v4
+30/163235/campos_512_v4
+30/163328/campos_512_v4
+30/163389/campos_512_v4
+30/163413/campos_512_v4
+30/163622/campos_512_v4
+30/163783/campos_512_v4
+30/163789/campos_512_v4
+30/163815/campos_512_v4
+30/163825/campos_512_v4
+30/164010/campos_512_v4
+30/164061/campos_512_v4
+30/164102/campos_512_v4
+30/164341/campos_512_v4
+30/164410/campos_512_v4
+30/164413/campos_512_v4
+30/164442/campos_512_v4
+30/164451/campos_512_v4
+30/164497/campos_512_v4
+30/164561/campos_512_v4
+30/164571/campos_512_v4
+30/164628/campos_512_v4
+30/164683/campos_512_v4
+30/164796/campos_512_v4
+30/164819/campos_512_v4
+30/164861/campos_512_v4
+30/164951/campos_512_v4
+30/164974/campos_512_v4
+30/164988/campos_512_v4
+31/165041/campos_512_v4
+31/165054/campos_512_v4
+31/165065/campos_512_v4
+31/165071/campos_512_v4
+31/165169/campos_512_v4
+31/165698/campos_512_v4
+31/165790/campos_512_v4
+31/165820/campos_512_v4
+31/166043/campos_512_v4
+31/166118/campos_512_v4
+31/166146/campos_512_v4
+31/166160/campos_512_v4
+31/166240/campos_512_v4
+31/166320/campos_512_v4
+31/166471/campos_512_v4
+31/166489/campos_512_v4
+31/166574/campos_512_v4
+31/166587/campos_512_v4
+31/166595/campos_512_v4
+31/166605/campos_512_v4
+31/166676/campos_512_v4
+31/166722/campos_512_v4
+31/166730/campos_512_v4
+31/166747/campos_512_v4
+31/166931/campos_512_v4
+31/166949/campos_512_v4
+31/167134/campos_512_v4
+31/167243/campos_512_v4
+31/167259/campos_512_v4
+31/167261/campos_512_v4
+31/167308/campos_512_v4
+31/167418/campos_512_v4
+31/167680/campos_512_v4
+31/167712/campos_512_v4
+31/167786/campos_512_v4
+31/167825/campos_512_v4
+31/167841/campos_512_v4
+31/167860/campos_512_v4
+31/167975/campos_512_v4
+31/168173/campos_512_v4
+31/168179/campos_512_v4
+31/168288/campos_512_v4
+31/168361/campos_512_v4
+31/168366/campos_512_v4
+31/168397/campos_512_v4
+31/168506/campos_512_v4
+31/168825/campos_512_v4
+31/168892/campos_512_v4
+31/169003/campos_512_v4
+31/169028/campos_512_v4
+31/169037/campos_512_v4
+31/169089/campos_512_v4
+31/169138/campos_512_v4
+31/169158/campos_512_v4
+31/169159/campos_512_v4
+31/169195/campos_512_v4
+31/169422/campos_512_v4
+31/169666/campos_512_v4
+31/169698/campos_512_v4
+31/169759/campos_512_v4
+31/169895/campos_512_v4
+31/169937/campos_512_v4
+31/169973/campos_512_v4
+32/170187/campos_512_v4
+32/170216/campos_512_v4
+32/170221/campos_512_v4
+32/170260/campos_512_v4
+32/170266/campos_512_v4
+32/170354/campos_512_v4
+32/170356/campos_512_v4
+32/170437/campos_512_v4
+32/170481/campos_512_v4
+32/170489/campos_512_v4
+32/170513/campos_512_v4
+32/170553/campos_512_v4
+32/170569/campos_512_v4
+32/170667/campos_512_v4
+32/170676/campos_512_v4
+32/170848/campos_512_v4
+32/170880/campos_512_v4
+32/170968/campos_512_v4
+32/171057/campos_512_v4
+32/171102/campos_512_v4
+32/171124/campos_512_v4
+32/171235/campos_512_v4
+32/171240/campos_512_v4
+32/171243/campos_512_v4
+32/171289/campos_512_v4
+32/171292/campos_512_v4
+32/171343/campos_512_v4
+32/171478/campos_512_v4
+32/171539/campos_512_v4
+32/171568/campos_512_v4
+32/171588/campos_512_v4
+32/171648/campos_512_v4
+32/171763/campos_512_v4
+32/171863/campos_512_v4
+32/171951/campos_512_v4
+32/171975/campos_512_v4
+32/172034/campos_512_v4
+32/172039/campos_512_v4
+32/172055/campos_512_v4
+32/172110/campos_512_v4
+32/172156/campos_512_v4
+32/172217/campos_512_v4
+32/172223/campos_512_v4
+32/172288/campos_512_v4
+32/172541/campos_512_v4
+32/172594/campos_512_v4
+32/172749/campos_512_v4
+32/172845/campos_512_v4
+32/173170/campos_512_v4
+32/173240/campos_512_v4
+32/173407/campos_512_v4
+32/173443/campos_512_v4
+32/173497/campos_512_v4
+32/173515/campos_512_v4
+32/173531/campos_512_v4
+32/173593/campos_512_v4
+32/173609/campos_512_v4
+32/173620/campos_512_v4
+32/173621/campos_512_v4
+32/173662/campos_512_v4
+32/173676/campos_512_v4
+32/173707/campos_512_v4
+32/173737/campos_512_v4
+32/173768/campos_512_v4
+32/173778/campos_512_v4
+32/173835/campos_512_v4
+32/173926/campos_512_v4
+32/173931/campos_512_v4
+32/173937/campos_512_v4
+32/173941/campos_512_v4
+32/173989/campos_512_v4
+32/174065/campos_512_v4
+32/174076/campos_512_v4
+32/174083/campos_512_v4
+32/174098/campos_512_v4
+32/174104/campos_512_v4
+32/174108/campos_512_v4
+32/174174/campos_512_v4
+32/174183/campos_512_v4
+32/174212/campos_512_v4
+32/174294/campos_512_v4
+32/174330/campos_512_v4
+32/174435/campos_512_v4
+32/174498/campos_512_v4
+32/174628/campos_512_v4
+32/174872/campos_512_v4
+32/174893/campos_512_v4
+32/174983/campos_512_v4
+33/175019/campos_512_v4
+33/175063/campos_512_v4
+33/175418/campos_512_v4
+33/175453/campos_512_v4
+33/175471/campos_512_v4
+33/175505/campos_512_v4
+33/175617/campos_512_v4
+33/175622/campos_512_v4
+33/175768/campos_512_v4
+33/175811/campos_512_v4
+33/175815/campos_512_v4
+33/175819/campos_512_v4
+33/175996/campos_512_v4
+33/175997/campos_512_v4
+33/176232/campos_512_v4
+33/176332/campos_512_v4
+33/176391/campos_512_v4
+33/176405/campos_512_v4
+33/176472/campos_512_v4
+33/176550/campos_512_v4
+33/176563/campos_512_v4
+33/176612/campos_512_v4
+33/176626/campos_512_v4
+33/176832/campos_512_v4
+33/176917/campos_512_v4
+33/177086/campos_512_v4
+33/177114/campos_512_v4
+33/177248/campos_512_v4
+33/177287/campos_512_v4
+33/177301/campos_512_v4
+33/177322/campos_512_v4
+33/177330/campos_512_v4
+33/177457/campos_512_v4
+33/177524/campos_512_v4
+33/177600/campos_512_v4
+33/177923/campos_512_v4
+33/178155/campos_512_v4
+33/178324/campos_512_v4
+33/178418/campos_512_v4
+33/178473/campos_512_v4
+33/178529/campos_512_v4
+33/178742/campos_512_v4
+33/178799/campos_512_v4
+33/178830/campos_512_v4
+33/178881/campos_512_v4
+33/178925/campos_512_v4
+33/178930/campos_512_v4
+33/178981/campos_512_v4
+33/178983/campos_512_v4
+33/178989/campos_512_v4
+33/179094/campos_512_v4
+33/179131/campos_512_v4
+33/179205/campos_512_v4
+33/179212/campos_512_v4
+33/179270/campos_512_v4
+33/179299/campos_512_v4
+33/179313/campos_512_v4
+33/179374/campos_512_v4
+33/179432/campos_512_v4
+33/179464/campos_512_v4
+33/179705/campos_512_v4
+33/179765/campos_512_v4
+33/179800/campos_512_v4
+33/179892/campos_512_v4
+33/179908/campos_512_v4
+33/179920/campos_512_v4
+34/180033/campos_512_v4
+34/180455/campos_512_v4
+34/180460/campos_512_v4
+34/180543/campos_512_v4
+34/180568/campos_512_v4
+34/180603/campos_512_v4
+34/180700/campos_512_v4
+34/180737/campos_512_v4
+34/180761/campos_512_v4
+34/180858/campos_512_v4
+34/180900/campos_512_v4
+34/180954/campos_512_v4
+34/181113/campos_512_v4
+34/181115/campos_512_v4
+34/181173/campos_512_v4
+34/181222/campos_512_v4
+34/181240/campos_512_v4
+34/181258/campos_512_v4
+34/181266/campos_512_v4
+34/181379/campos_512_v4
+34/181397/campos_512_v4
+34/181429/campos_512_v4
+34/181437/campos_512_v4
+34/181599/campos_512_v4
+34/181625/campos_512_v4
+34/181632/campos_512_v4
+34/181640/campos_512_v4
+34/181738/campos_512_v4
+34/181812/campos_512_v4
+34/181869/campos_512_v4
+34/181890/campos_512_v4
+34/181905/campos_512_v4
+34/181983/campos_512_v4
+34/182027/campos_512_v4
+34/182045/campos_512_v4
+34/182537/campos_512_v4
+34/182646/campos_512_v4
+34/182656/campos_512_v4
+34/182661/campos_512_v4
+34/182665/campos_512_v4
+34/182778/campos_512_v4
+34/183007/campos_512_v4
+34/183021/campos_512_v4
+34/183097/campos_512_v4
+34/183515/campos_512_v4
+34/183611/campos_512_v4
+34/184011/campos_512_v4
+34/184199/campos_512_v4
+34/184248/campos_512_v4
+34/184283/campos_512_v4
+34/184295/campos_512_v4
+34/184329/campos_512_v4
+34/184366/campos_512_v4
+34/184391/campos_512_v4
+34/184458/campos_512_v4
+34/184479/campos_512_v4
+34/184484/campos_512_v4
+34/184490/campos_512_v4
+34/184628/campos_512_v4
+34/184634/campos_512_v4
+34/184655/campos_512_v4
+34/184701/campos_512_v4
+34/184702/campos_512_v4
+34/184705/campos_512_v4
+34/184745/campos_512_v4
+34/184748/campos_512_v4
+34/184754/campos_512_v4
+34/184913/campos_512_v4
+34/184920/campos_512_v4
+34/184963/campos_512_v4
+35/185094/campos_512_v4
+35/185112/campos_512_v4
+35/185122/campos_512_v4
+35/185285/campos_512_v4
+35/185439/campos_512_v4
+35/185595/campos_512_v4
+35/185622/campos_512_v4
+35/185684/campos_512_v4
+35/185714/campos_512_v4
+35/185740/campos_512_v4
+35/185773/campos_512_v4
+35/185898/campos_512_v4
+35/185918/campos_512_v4
+35/185947/campos_512_v4
+35/186012/campos_512_v4
+35/186100/campos_512_v4
+35/186101/campos_512_v4
+35/186102/campos_512_v4
+35/186137/campos_512_v4
+35/186151/campos_512_v4
+35/186169/campos_512_v4
+35/186237/campos_512_v4
+35/186279/campos_512_v4
+35/186374/campos_512_v4
+35/186484/campos_512_v4
+35/186576/campos_512_v4
+35/186637/campos_512_v4
+35/186927/campos_512_v4
+35/187021/campos_512_v4
+35/187089/campos_512_v4
+35/187109/campos_512_v4
+35/187110/campos_512_v4
+35/187174/campos_512_v4
+35/187176/campos_512_v4
+35/187226/campos_512_v4
+35/187415/campos_512_v4
+35/187452/campos_512_v4
+35/187489/campos_512_v4
+35/187509/campos_512_v4
+35/187521/campos_512_v4
+35/187545/campos_512_v4
+35/187575/campos_512_v4
+35/187633/campos_512_v4
+35/187669/campos_512_v4
+35/187701/campos_512_v4
+35/187717/campos_512_v4
+35/187756/campos_512_v4
+35/187759/campos_512_v4
+35/187760/campos_512_v4
+35/187816/campos_512_v4
+35/187832/campos_512_v4
+35/187948/campos_512_v4
+35/187967/campos_512_v4
+35/187993/campos_512_v4
+35/188100/campos_512_v4
+35/188135/campos_512_v4
+35/188190/campos_512_v4
+35/188239/campos_512_v4
+35/188317/campos_512_v4
+35/188403/campos_512_v4
+35/188517/campos_512_v4
+35/188561/campos_512_v4
+35/188751/campos_512_v4
+35/188888/campos_512_v4
+35/189051/campos_512_v4
+35/189063/campos_512_v4
+35/189123/campos_512_v4
+35/189275/campos_512_v4
+35/189727/campos_512_v4
+35/189729/campos_512_v4
+35/189735/campos_512_v4
+35/189740/campos_512_v4
+35/189757/campos_512_v4
+35/189784/campos_512_v4
+35/189840/campos_512_v4
+35/189871/campos_512_v4
+36/190049/campos_512_v4
+36/190067/campos_512_v4
+36/190297/campos_512_v4
+36/190360/campos_512_v4
+36/190389/campos_512_v4
+36/190395/campos_512_v4
+36/190411/campos_512_v4
+36/190455/campos_512_v4
+36/190498/campos_512_v4
+36/190738/campos_512_v4
+36/190745/campos_512_v4
+36/190787/campos_512_v4
+36/190871/campos_512_v4
+36/190900/campos_512_v4
+36/191017/campos_512_v4
+36/191091/campos_512_v4
+36/191106/campos_512_v4
+36/191324/campos_512_v4
+36/191347/campos_512_v4
+36/191385/campos_512_v4
+36/191437/campos_512_v4
+36/191457/campos_512_v4
+36/191491/campos_512_v4
+36/191528/campos_512_v4
+36/191529/campos_512_v4
+36/191694/campos_512_v4
+36/191821/campos_512_v4
+36/191830/campos_512_v4
+36/191891/campos_512_v4
+36/191995/campos_512_v4
+36/191997/campos_512_v4
+36/192013/campos_512_v4
+36/192041/campos_512_v4
+36/192042/campos_512_v4
+36/192115/campos_512_v4
+36/192128/campos_512_v4
+36/192142/campos_512_v4
+36/192150/campos_512_v4
+36/192205/campos_512_v4
+36/192258/campos_512_v4
+36/192589/campos_512_v4
+36/192604/campos_512_v4
+36/192651/campos_512_v4
+36/192736/campos_512_v4
+36/192923/campos_512_v4
+36/192932/campos_512_v4
+36/192941/campos_512_v4
+36/192992/campos_512_v4
+36/193060/campos_512_v4
+36/193184/campos_512_v4
+36/193241/campos_512_v4
+36/193266/campos_512_v4
+36/193286/campos_512_v4
+36/193313/campos_512_v4
+36/193475/campos_512_v4
+36/193581/campos_512_v4
+36/193749/campos_512_v4
+36/193761/campos_512_v4
+36/193812/campos_512_v4
+36/193822/campos_512_v4
+36/193957/campos_512_v4
+36/193992/campos_512_v4
+36/194068/campos_512_v4
+36/194133/campos_512_v4
+36/194199/campos_512_v4
+36/194219/campos_512_v4
+36/194232/campos_512_v4
+36/194317/campos_512_v4
+36/194388/campos_512_v4
+36/194432/campos_512_v4
+36/194530/campos_512_v4
+36/194597/campos_512_v4
+36/194774/campos_512_v4
+36/194776/campos_512_v4
+36/194814/campos_512_v4
+36/194901/campos_512_v4
+36/194931/campos_512_v4
+36/194943/campos_512_v4
+36/194984/campos_512_v4
+37/195013/campos_512_v4
+37/195385/campos_512_v4
+37/195467/campos_512_v4
+37/195483/campos_512_v4
+37/195509/campos_512_v4
+37/195555/campos_512_v4
+37/195662/campos_512_v4
+37/195739/campos_512_v4
+37/195757/campos_512_v4
+37/195786/campos_512_v4
+37/195949/campos_512_v4
+37/196021/campos_512_v4
+37/196051/campos_512_v4
+37/196210/campos_512_v4
+37/196313/campos_512_v4
+37/196393/campos_512_v4
+37/196396/campos_512_v4
+37/196523/campos_512_v4
+37/196530/campos_512_v4
+37/196639/campos_512_v4
+37/196791/campos_512_v4
+37/196811/campos_512_v4
+37/197017/campos_512_v4
+37/197087/campos_512_v4
+37/197257/campos_512_v4
+37/197260/campos_512_v4
+37/197346/campos_512_v4
+37/197376/campos_512_v4
+37/197390/campos_512_v4
+37/197432/campos_512_v4
+37/197461/campos_512_v4
+37/197521/campos_512_v4
+37/197585/campos_512_v4
+37/197604/campos_512_v4
+37/197997/campos_512_v4
+37/198002/campos_512_v4
+37/198013/campos_512_v4
+37/198032/campos_512_v4
+37/198171/campos_512_v4
+37/198172/campos_512_v4
+37/198185/campos_512_v4
+37/198419/campos_512_v4
+37/198459/campos_512_v4
+37/198483/campos_512_v4
+37/198596/campos_512_v4
+37/198632/campos_512_v4
+37/198635/campos_512_v4
+37/198765/campos_512_v4
+37/198826/campos_512_v4
+37/198835/campos_512_v4
+37/198849/campos_512_v4
+37/198855/campos_512_v4
+37/198863/campos_512_v4
+37/198949/campos_512_v4
+37/198951/campos_512_v4
+37/198957/campos_512_v4
+37/199005/campos_512_v4
+37/199012/campos_512_v4
+37/199112/campos_512_v4
+37/199131/campos_512_v4
+37/199178/campos_512_v4
+37/199193/campos_512_v4
+37/199219/campos_512_v4
+37/199263/campos_512_v4
+37/199300/campos_512_v4
+37/199302/campos_512_v4
+37/199303/campos_512_v4
+37/199323/campos_512_v4
+37/199405/campos_512_v4
+37/199434/campos_512_v4
+37/199449/campos_512_v4
+37/199535/campos_512_v4
+37/199654/campos_512_v4
+37/199659/campos_512_v4
+37/199766/campos_512_v4
+37/199786/campos_512_v4
+37/199788/campos_512_v4
+37/199801/campos_512_v4
+37/199867/campos_512_v4
+37/199869/campos_512_v4
+37/199886/campos_512_v4
+37/199904/campos_512_v4
+38/200246/campos_512_v4
+38/200374/campos_512_v4
+38/200517/campos_512_v4
+38/200527/campos_512_v4
+38/200529/campos_512_v4
+38/200649/campos_512_v4
+38/200761/campos_512_v4
+38/200824/campos_512_v4
+38/200920/campos_512_v4
+38/201080/campos_512_v4
+38/201121/campos_512_v4
+38/201184/campos_512_v4
+38/201293/campos_512_v4
+38/201341/campos_512_v4
+38/201351/campos_512_v4
+38/201374/campos_512_v4
+38/201388/campos_512_v4
+38/201488/campos_512_v4
+38/201661/campos_512_v4
+38/201670/campos_512_v4
+38/201836/campos_512_v4
+38/201840/campos_512_v4
+38/201849/campos_512_v4
+38/201871/campos_512_v4
+38/201908/campos_512_v4
+38/201935/campos_512_v4
+38/202115/campos_512_v4
+38/202191/campos_512_v4
+38/202254/campos_512_v4
+38/202411/campos_512_v4
+38/202417/campos_512_v4
+38/202473/campos_512_v4
+38/202568/campos_512_v4
+38/202571/campos_512_v4
+38/202624/campos_512_v4
+38/202797/campos_512_v4
+38/202829/campos_512_v4
+38/202886/campos_512_v4
+38/202887/campos_512_v4
+38/202959/campos_512_v4
+38/202966/campos_512_v4
+38/203026/campos_512_v4
+38/203062/campos_512_v4
+38/203091/campos_512_v4
+38/203099/campos_512_v4
+38/203134/campos_512_v4
+38/203136/campos_512_v4
+38/203165/campos_512_v4
+38/203231/campos_512_v4
+38/203299/campos_512_v4
+38/203309/campos_512_v4
+38/203311/campos_512_v4
+38/203431/campos_512_v4
+38/203478/campos_512_v4
+38/203628/campos_512_v4
+38/203645/campos_512_v4
+38/203810/campos_512_v4
+38/203885/campos_512_v4
+38/203952/campos_512_v4
+38/203998/campos_512_v4
+38/204026/campos_512_v4
+38/204027/campos_512_v4
+38/204157/campos_512_v4
+38/204184/campos_512_v4
+38/204188/campos_512_v4
+38/204204/campos_512_v4
+38/204208/campos_512_v4
+38/204218/campos_512_v4
+38/204220/campos_512_v4
+38/204271/campos_512_v4
+38/204331/campos_512_v4
+38/204348/campos_512_v4
+38/204356/campos_512_v4
+38/204432/campos_512_v4
+38/204443/campos_512_v4
+38/204494/campos_512_v4
+38/204578/campos_512_v4
+38/204585/campos_512_v4
+38/204596/campos_512_v4
+38/204611/campos_512_v4
+38/204684/campos_512_v4
+38/204697/campos_512_v4
+38/204791/campos_512_v4
+38/204796/campos_512_v4
+38/204811/campos_512_v4
+38/204833/campos_512_v4
+38/204839/campos_512_v4
+38/204869/campos_512_v4
+38/204957/campos_512_v4
+38/204984/campos_512_v4
+38/204999/campos_512_v4
+4/30004/campos_512_v4
+4/30262/campos_512_v4
+4/30326/campos_512_v4
+4/30347/campos_512_v4
+4/30367/campos_512_v4
+4/30439/campos_512_v4
+4/30926/campos_512_v4
+4/30928/campos_512_v4
+4/30979/campos_512_v4
+4/31135/campos_512_v4
+4/31234/campos_512_v4
+4/31292/campos_512_v4
+4/31298/campos_512_v4
+4/31300/campos_512_v4
+4/31515/campos_512_v4
+4/31534/campos_512_v4
+4/31556/campos_512_v4
+4/31561/campos_512_v4
+4/31605/campos_512_v4
+4/31871/campos_512_v4
+4/31961/campos_512_v4
+4/31995/campos_512_v4
+4/32070/campos_512_v4
+4/32089/campos_512_v4
+4/32209/campos_512_v4
+4/32252/campos_512_v4
+4/32343/campos_512_v4
+4/32506/campos_512_v4
+4/32796/campos_512_v4
+4/32813/campos_512_v4
+4/32853/campos_512_v4
+4/32880/campos_512_v4
+4/32890/campos_512_v4
+4/33169/campos_512_v4
+4/33215/campos_512_v4
+4/33217/campos_512_v4
+4/33270/campos_512_v4
+4/33283/campos_512_v4
+4/33318/campos_512_v4
+4/33441/campos_512_v4
+4/33480/campos_512_v4
+4/33624/campos_512_v4
+4/33661/campos_512_v4
+4/33707/campos_512_v4
+4/33731/campos_512_v4
+4/33777/campos_512_v4
+4/33894/campos_512_v4
+4/33916/campos_512_v4
+4/34032/campos_512_v4
+4/34033/campos_512_v4
+4/34052/campos_512_v4
+4/34059/campos_512_v4
+4/34268/campos_512_v4
+4/34357/campos_512_v4
+4/34403/campos_512_v4
+4/34515/campos_512_v4
+4/34954/campos_512_v4
+40/210091/campos_512_v4
+40/210132/campos_512_v4
+40/210229/campos_512_v4
+40/210237/campos_512_v4
+40/210259/campos_512_v4
+40/210287/campos_512_v4
+40/210355/campos_512_v4
+40/210357/campos_512_v4
+40/210395/campos_512_v4
+40/210466/campos_512_v4
+40/210484/campos_512_v4
+40/210598/campos_512_v4
+40/210656/campos_512_v4
+40/210689/campos_512_v4
+40/210713/campos_512_v4
+40/210771/campos_512_v4
+40/210774/campos_512_v4
+40/210853/campos_512_v4
+40/210866/campos_512_v4
+40/210869/campos_512_v4
+40/210883/campos_512_v4
+40/210919/campos_512_v4
+40/210950/campos_512_v4
+40/210979/campos_512_v4
+40/210986/campos_512_v4
+40/211004/campos_512_v4
+40/211008/campos_512_v4
+40/211122/campos_512_v4
+40/211306/campos_512_v4
+40/211312/campos_512_v4
+40/211349/campos_512_v4
+40/211355/campos_512_v4
+40/211430/campos_512_v4
+40/211437/campos_512_v4
+40/211505/campos_512_v4
+40/211524/campos_512_v4
+40/211547/campos_512_v4
+40/211584/campos_512_v4
+40/211642/campos_512_v4
+40/211652/campos_512_v4
+40/211657/campos_512_v4
+40/211665/campos_512_v4
+40/211843/campos_512_v4
+40/211868/campos_512_v4
+40/211869/campos_512_v4
+40/211870/campos_512_v4
+40/211921/campos_512_v4
+40/211925/campos_512_v4
+40/211956/campos_512_v4
+40/211968/campos_512_v4
+40/211988/campos_512_v4
+40/212015/campos_512_v4
+40/212044/campos_512_v4
+40/212118/campos_512_v4
+40/212183/campos_512_v4
+40/212209/campos_512_v4
+40/212218/campos_512_v4
+40/212219/campos_512_v4
+40/212276/campos_512_v4
+40/212330/campos_512_v4
+40/212333/campos_512_v4
+40/212373/campos_512_v4
+40/212391/campos_512_v4
+40/212437/campos_512_v4
+40/212723/campos_512_v4
+40/212758/campos_512_v4
+40/212820/campos_512_v4
+40/212850/campos_512_v4
+40/213041/campos_512_v4
+40/213082/campos_512_v4
+40/213144/campos_512_v4
+40/213154/campos_512_v4
+40/213179/campos_512_v4
+40/213202/campos_512_v4
+40/213263/campos_512_v4
+40/213297/campos_512_v4
+40/213467/campos_512_v4
+40/213589/campos_512_v4
+40/213706/campos_512_v4
+40/213754/campos_512_v4
+40/213814/campos_512_v4
+40/213818/campos_512_v4
+40/213833/campos_512_v4
+40/213835/campos_512_v4
+40/213853/campos_512_v4
+40/213855/campos_512_v4
+40/213883/campos_512_v4
+40/213950/campos_512_v4
+40/213961/campos_512_v4
+40/214029/campos_512_v4
+40/214088/campos_512_v4
+40/214109/campos_512_v4
+40/214203/campos_512_v4
+40/214233/campos_512_v4
+40/214457/campos_512_v4
+40/214513/campos_512_v4
+40/214687/campos_512_v4
+40/214704/campos_512_v4
+40/214716/campos_512_v4
+40/214726/campos_512_v4
+40/214728/campos_512_v4
+40/214762/campos_512_v4
+40/214781/campos_512_v4
+40/214868/campos_512_v4
+40/214938/campos_512_v4
+40/214970/campos_512_v4
+40/214986/campos_512_v4
+41/215014/campos_512_v4
+41/215040/campos_512_v4
+41/215063/campos_512_v4
+41/215074/campos_512_v4
+41/215112/campos_512_v4
+41/215121/campos_512_v4
+41/215157/campos_512_v4
+41/215261/campos_512_v4
+41/215275/campos_512_v4
+41/215365/campos_512_v4
+41/215449/campos_512_v4
+41/215450/campos_512_v4
+41/215469/campos_512_v4
+41/215482/campos_512_v4
+41/215515/campos_512_v4
+41/215587/campos_512_v4
+41/215718/campos_512_v4
+41/215767/campos_512_v4
+41/215801/campos_512_v4
+41/215967/campos_512_v4
+41/215986/campos_512_v4
+41/215995/campos_512_v4
+41/216045/campos_512_v4
+41/216066/campos_512_v4
+41/216088/campos_512_v4
+41/216111/campos_512_v4
+41/216273/campos_512_v4
+41/216382/campos_512_v4
+41/216573/campos_512_v4
+41/216646/campos_512_v4
+41/216729/campos_512_v4
+41/216735/campos_512_v4
+41/216764/campos_512_v4
+41/216923/campos_512_v4
+41/217050/campos_512_v4
+41/217151/campos_512_v4
+41/217281/campos_512_v4
+41/217303/campos_512_v4
+41/217331/campos_512_v4
+41/217362/campos_512_v4
+41/217377/campos_512_v4
+41/217405/campos_512_v4
+41/217503/campos_512_v4
+41/217605/campos_512_v4
+41/217689/campos_512_v4
+41/217706/campos_512_v4
+41/217756/campos_512_v4
+41/217785/campos_512_v4
+41/217817/campos_512_v4
+41/217931/campos_512_v4
+41/217932/campos_512_v4
+41/217960/campos_512_v4
+41/217996/campos_512_v4
+41/218127/campos_512_v4
+41/218142/campos_512_v4
+41/218163/campos_512_v4
+41/218165/campos_512_v4
+41/218231/campos_512_v4
+41/218300/campos_512_v4
+41/218366/campos_512_v4
+41/218411/campos_512_v4
+41/218415/campos_512_v4
+41/218474/campos_512_v4
+41/218478/campos_512_v4
+41/218518/campos_512_v4
+41/218565/campos_512_v4
+41/218585/campos_512_v4
+41/218656/campos_512_v4
+41/218709/campos_512_v4
+41/218736/campos_512_v4
+41/218844/campos_512_v4
+41/218862/campos_512_v4
+41/218870/campos_512_v4
+41/218902/campos_512_v4
+41/219005/campos_512_v4
+41/219006/campos_512_v4
+41/219037/campos_512_v4
+41/219085/campos_512_v4
+41/219109/campos_512_v4
+41/219246/campos_512_v4
+41/219276/campos_512_v4
+41/219280/campos_512_v4
+41/219295/campos_512_v4
+41/219320/campos_512_v4
+41/219328/campos_512_v4
+41/219405/campos_512_v4
+41/219484/campos_512_v4
+41/219536/campos_512_v4
+41/219569/campos_512_v4
+41/219589/campos_512_v4
+41/219718/campos_512_v4
+41/219805/campos_512_v4
+41/219845/campos_512_v4
+41/219958/campos_512_v4
+42/220139/campos_512_v4
+42/220239/campos_512_v4
+42/220240/campos_512_v4
+42/220305/campos_512_v4
+42/220315/campos_512_v4
+42/220323/campos_512_v4
+42/220516/campos_512_v4
+42/220599/campos_512_v4
+42/220689/campos_512_v4
+42/220810/campos_512_v4
+42/220824/campos_512_v4
+42/220965/campos_512_v4
+42/221025/campos_512_v4
+42/221312/campos_512_v4
+42/221339/campos_512_v4
+42/221346/campos_512_v4
+42/221368/campos_512_v4
+42/221381/campos_512_v4
+42/221502/campos_512_v4
+42/221611/campos_512_v4
+42/221824/campos_512_v4
+42/221952/campos_512_v4
+42/222048/campos_512_v4
+42/222118/campos_512_v4
+42/222286/campos_512_v4
+42/222304/campos_512_v4
+42/222402/campos_512_v4
+42/222446/campos_512_v4
+42/222476/campos_512_v4
+42/222534/campos_512_v4
+42/222570/campos_512_v4
+42/222581/campos_512_v4
+42/222638/campos_512_v4
+42/222688/campos_512_v4
+42/222748/campos_512_v4
+42/222775/campos_512_v4
+42/222849/campos_512_v4
+42/222863/campos_512_v4
+42/222921/campos_512_v4
+42/222925/campos_512_v4
+42/222950/campos_512_v4
+42/222951/campos_512_v4
+42/223012/campos_512_v4
+42/223031/campos_512_v4
+42/223090/campos_512_v4
+42/223364/campos_512_v4
+42/223417/campos_512_v4
+42/223514/campos_512_v4
+42/223528/campos_512_v4
+42/223598/campos_512_v4
+42/223621/campos_512_v4
+42/223640/campos_512_v4
+42/223657/campos_512_v4
+42/223680/campos_512_v4
+42/223749/campos_512_v4
+42/223756/campos_512_v4
+42/223857/campos_512_v4
+42/223925/campos_512_v4
+42/224182/campos_512_v4
+42/224297/campos_512_v4
+42/224310/campos_512_v4
+42/224355/campos_512_v4
+42/224386/campos_512_v4
+42/224402/campos_512_v4
+42/224479/campos_512_v4
+42/224539/campos_512_v4
+42/224582/campos_512_v4
+42/224606/campos_512_v4
+42/224639/campos_512_v4
+42/224663/campos_512_v4
+42/224724/campos_512_v4
+42/224804/campos_512_v4
+42/224813/campos_512_v4
+42/224824/campos_512_v4
+42/224829/campos_512_v4
+42/224833/campos_512_v4
+42/224854/campos_512_v4
+42/224962/campos_512_v4
+43/225018/campos_512_v4
+43/225087/campos_512_v4
+43/225117/campos_512_v4
+43/225129/campos_512_v4
+43/225160/campos_512_v4
+43/225192/campos_512_v4
+43/225195/campos_512_v4
+43/225214/campos_512_v4
+43/225269/campos_512_v4
+43/225371/campos_512_v4
+43/225387/campos_512_v4
+43/225464/campos_512_v4
+43/225573/campos_512_v4
+43/225614/campos_512_v4
+43/225659/campos_512_v4
+43/225673/campos_512_v4
+43/225685/campos_512_v4
+43/225704/campos_512_v4
+43/225725/campos_512_v4
+43/225735/campos_512_v4
+43/225925/campos_512_v4
+43/225985/campos_512_v4
+43/226008/campos_512_v4
+43/226017/campos_512_v4
+43/226063/campos_512_v4
+43/226134/campos_512_v4
+43/226146/campos_512_v4
+43/226153/campos_512_v4
+43/226180/campos_512_v4
+43/226248/campos_512_v4
+43/226292/campos_512_v4
+43/226293/campos_512_v4
+43/226301/campos_512_v4
+43/226306/campos_512_v4
+43/226313/campos_512_v4
+43/226426/campos_512_v4
+43/226439/campos_512_v4
+43/226446/campos_512_v4
+43/226518/campos_512_v4
+43/226552/campos_512_v4
+43/226566/campos_512_v4
+43/226688/campos_512_v4
+43/226997/campos_512_v4
+43/227036/campos_512_v4
+43/227251/campos_512_v4
+43/227341/campos_512_v4
+43/227382/campos_512_v4
+43/227393/campos_512_v4
+43/227450/campos_512_v4
+43/227550/campos_512_v4
+43/227556/campos_512_v4
+43/227595/campos_512_v4
+43/227616/campos_512_v4
+43/227617/campos_512_v4
+43/227768/campos_512_v4
+43/227859/campos_512_v4
+43/227915/campos_512_v4
+43/227958/campos_512_v4
+43/227998/campos_512_v4
+43/228088/campos_512_v4
+43/228116/campos_512_v4
+43/228167/campos_512_v4
+43/228245/campos_512_v4
+43/228271/campos_512_v4
+43/228330/campos_512_v4
+43/228439/campos_512_v4
+43/228545/campos_512_v4
+43/228562/campos_512_v4
+43/228694/campos_512_v4
+43/228732/campos_512_v4
+43/228734/campos_512_v4
+43/228736/campos_512_v4
+43/228839/campos_512_v4
+43/228872/campos_512_v4
+43/228890/campos_512_v4
+43/228933/campos_512_v4
+43/228968/campos_512_v4
+43/228989/campos_512_v4
+43/229068/campos_512_v4
+43/229069/campos_512_v4
+43/229076/campos_512_v4
+43/229103/campos_512_v4
+43/229109/campos_512_v4
+43/229123/campos_512_v4
+43/229131/campos_512_v4
+43/229163/campos_512_v4
+43/229190/campos_512_v4
+43/229309/campos_512_v4
+43/229328/campos_512_v4
+43/229558/campos_512_v4
+43/229594/campos_512_v4
+43/229625/campos_512_v4
+43/229658/campos_512_v4
+43/229684/campos_512_v4
+43/229974/campos_512_v4
+44/230018/campos_512_v4
+44/230022/campos_512_v4
+44/230208/campos_512_v4
+44/230219/campos_512_v4
+44/230328/campos_512_v4
+44/230416/campos_512_v4
+44/230439/campos_512_v4
+44/230441/campos_512_v4
+44/230701/campos_512_v4
+44/230771/campos_512_v4
+44/230814/campos_512_v4
+44/230819/campos_512_v4
+44/230841/campos_512_v4
+44/230860/campos_512_v4
+44/230973/campos_512_v4
+44/231028/campos_512_v4
+44/231081/campos_512_v4
+44/231297/campos_512_v4
+44/231325/campos_512_v4
+44/231406/campos_512_v4
+44/231429/campos_512_v4
+44/231437/campos_512_v4
+44/231460/campos_512_v4
+44/231524/campos_512_v4
+44/231608/campos_512_v4
+44/231737/campos_512_v4
+44/231796/campos_512_v4
+44/232078/campos_512_v4
+44/232200/campos_512_v4
+44/232204/campos_512_v4
+44/232205/campos_512_v4
+44/232255/campos_512_v4
+44/232268/campos_512_v4
+44/232274/campos_512_v4
+44/232286/campos_512_v4
+44/232300/campos_512_v4
+44/232383/campos_512_v4
+44/232459/campos_512_v4
+44/232636/campos_512_v4
+44/232680/campos_512_v4
+44/232863/campos_512_v4
+44/232979/campos_512_v4
+44/232986/campos_512_v4
+44/233043/campos_512_v4
+44/233048/campos_512_v4
+44/233217/campos_512_v4
+44/233220/campos_512_v4
+44/233252/campos_512_v4
+44/233304/campos_512_v4
+44/233316/campos_512_v4
+44/233342/campos_512_v4
+44/233393/campos_512_v4
+44/233511/campos_512_v4
+44/233528/campos_512_v4
+44/233626/campos_512_v4
+44/233734/campos_512_v4
+44/233763/campos_512_v4
+44/233888/campos_512_v4
+44/233940/campos_512_v4
+44/233951/campos_512_v4
+44/234106/campos_512_v4
+44/234156/campos_512_v4
+44/234216/campos_512_v4
+44/234233/campos_512_v4
+44/234356/campos_512_v4
+44/234407/campos_512_v4
+44/234482/campos_512_v4
+44/234502/campos_512_v4
+44/234605/campos_512_v4
+44/234630/campos_512_v4
+44/234773/campos_512_v4
+44/234804/campos_512_v4
+44/234817/campos_512_v4
+44/234940/campos_512_v4
+44/234992/campos_512_v4
+45/235109/campos_512_v4
+45/235157/campos_512_v4
+45/235167/campos_512_v4
+45/235191/campos_512_v4
+45/235209/campos_512_v4
+45/235237/campos_512_v4
+45/235252/campos_512_v4
+45/235298/campos_512_v4
+45/235314/campos_512_v4
+45/235356/campos_512_v4
+45/235462/campos_512_v4
+45/235469/campos_512_v4
+45/235488/campos_512_v4
+45/235499/campos_512_v4
+45/235570/campos_512_v4
+45/235604/campos_512_v4
+45/235720/campos_512_v4
+45/235773/campos_512_v4
+45/235898/campos_512_v4
+45/235946/campos_512_v4
+45/236019/campos_512_v4
+45/236101/campos_512_v4
+45/236115/campos_512_v4
+45/236141/campos_512_v4
+45/236143/campos_512_v4
+45/236247/campos_512_v4
+45/236274/campos_512_v4
+45/236287/campos_512_v4
+45/236406/campos_512_v4
+45/236418/campos_512_v4
+45/236429/campos_512_v4
+45/236431/campos_512_v4
+45/236449/campos_512_v4
+45/236493/campos_512_v4
+45/236526/campos_512_v4
+45/236582/campos_512_v4
+45/236662/campos_512_v4
+45/236715/campos_512_v4
+45/236815/campos_512_v4
+45/236842/campos_512_v4
+45/236878/campos_512_v4
+45/236976/campos_512_v4
+45/237000/campos_512_v4
+45/237104/campos_512_v4
+45/237167/campos_512_v4
+45/237359/campos_512_v4
+45/237497/campos_512_v4
+45/237574/campos_512_v4
+45/237625/campos_512_v4
+45/237633/campos_512_v4
+45/237772/campos_512_v4
+45/237810/campos_512_v4
+45/237814/campos_512_v4
+45/237977/campos_512_v4
+45/238015/campos_512_v4
+45/238017/campos_512_v4
+45/238039/campos_512_v4
+45/238041/campos_512_v4
+45/238092/campos_512_v4
+45/238129/campos_512_v4
+45/238190/campos_512_v4
+45/238193/campos_512_v4
+45/238203/campos_512_v4
+45/238287/campos_512_v4
+45/238386/campos_512_v4
+45/238390/campos_512_v4
+45/238411/campos_512_v4
+45/238461/campos_512_v4
+45/238474/campos_512_v4
+45/238491/campos_512_v4
+45/238509/campos_512_v4
+45/238542/campos_512_v4
+45/238550/campos_512_v4
+45/238596/campos_512_v4
+45/238597/campos_512_v4
+45/238658/campos_512_v4
+45/238668/campos_512_v4
+45/238698/campos_512_v4
+45/238774/campos_512_v4
+45/238824/campos_512_v4
+45/238829/campos_512_v4
+45/238859/campos_512_v4
+45/238969/campos_512_v4
+45/238974/campos_512_v4
+45/239025/campos_512_v4
+45/239098/campos_512_v4
+45/239151/campos_512_v4
+45/239229/campos_512_v4
+45/239258/campos_512_v4
+45/239260/campos_512_v4
+45/239339/campos_512_v4
+45/239342/campos_512_v4
+45/239356/campos_512_v4
+45/239377/campos_512_v4
+45/239506/campos_512_v4
+45/239544/campos_512_v4
+45/239742/campos_512_v4
+45/239773/campos_512_v4
+45/239785/campos_512_v4
+45/239801/campos_512_v4
+45/239830/campos_512_v4
+45/239856/campos_512_v4
+45/239873/campos_512_v4
+45/239901/campos_512_v4
+45/239912/campos_512_v4
+45/239931/campos_512_v4
+45/239980/campos_512_v4
+45/239986/campos_512_v4
+45/239993/campos_512_v4
+46/240016/campos_512_v4
+46/240069/campos_512_v4
+46/240108/campos_512_v4
+46/240159/campos_512_v4
+46/240551/campos_512_v4
+46/240599/campos_512_v4
+46/240633/campos_512_v4
+46/240685/campos_512_v4
+46/240715/campos_512_v4
+46/240802/campos_512_v4
+46/240840/campos_512_v4
+46/240930/campos_512_v4
+46/241028/campos_512_v4
+46/241179/campos_512_v4
+46/241184/campos_512_v4
+46/241200/campos_512_v4
+46/241261/campos_512_v4
+46/241378/campos_512_v4
+46/241395/campos_512_v4
+46/241463/campos_512_v4
+46/241585/campos_512_v4
+46/241795/campos_512_v4
+46/242010/campos_512_v4
+46/242033/campos_512_v4
+46/242089/campos_512_v4
+46/242159/campos_512_v4
+46/242282/campos_512_v4
+46/242289/campos_512_v4
+46/242318/campos_512_v4
+46/242390/campos_512_v4
+46/242451/campos_512_v4
+46/242490/campos_512_v4
+46/242536/campos_512_v4
+46/242595/campos_512_v4
+46/242647/campos_512_v4
+46/242687/campos_512_v4
+46/242760/campos_512_v4
+46/242850/campos_512_v4
+46/242870/campos_512_v4
+46/242910/campos_512_v4
+46/242914/campos_512_v4
+46/242943/campos_512_v4
+46/243000/campos_512_v4
+46/243064/campos_512_v4
+46/243116/campos_512_v4
+46/243291/campos_512_v4
+46/243294/campos_512_v4
+46/243334/campos_512_v4
+46/243348/campos_512_v4
+46/243361/campos_512_v4
+46/243409/campos_512_v4
+46/243419/campos_512_v4
+46/243491/campos_512_v4
+46/243537/campos_512_v4
+46/243598/campos_512_v4
+46/243639/campos_512_v4
+46/243658/campos_512_v4
+46/243703/campos_512_v4
+46/243818/campos_512_v4
+46/243819/campos_512_v4
+46/244011/campos_512_v4
+46/244058/campos_512_v4
+46/244068/campos_512_v4
+46/244073/campos_512_v4
+46/244259/campos_512_v4
+46/244295/campos_512_v4
+46/244382/campos_512_v4
+46/244432/campos_512_v4
+46/244448/campos_512_v4
+46/244458/campos_512_v4
+46/244693/campos_512_v4
+46/244801/campos_512_v4
+46/244814/campos_512_v4
+46/244880/campos_512_v4
+46/244914/campos_512_v4
+46/244972/campos_512_v4
+47/245008/campos_512_v4
+47/245149/campos_512_v4
+47/245164/campos_512_v4
+47/245223/campos_512_v4
+47/245225/campos_512_v4
+47/245250/campos_512_v4
+47/245341/campos_512_v4
+47/245526/campos_512_v4
+47/245539/campos_512_v4
+47/245624/campos_512_v4
+47/245725/campos_512_v4
+47/245758/campos_512_v4
+47/245797/campos_512_v4
+47/245873/campos_512_v4
+47/245926/campos_512_v4
+47/245970/campos_512_v4
+47/245995/campos_512_v4
+47/246038/campos_512_v4
+47/246046/campos_512_v4
+47/246155/campos_512_v4
+47/246180/campos_512_v4
+47/246187/campos_512_v4
+47/246344/campos_512_v4
+47/246346/campos_512_v4
+47/246351/campos_512_v4
+47/246394/campos_512_v4
+47/246416/campos_512_v4
+47/246449/campos_512_v4
+47/246455/campos_512_v4
+47/246461/campos_512_v4
+47/246510/campos_512_v4
+47/246644/campos_512_v4
+47/246687/campos_512_v4
+47/246695/campos_512_v4
+47/246811/campos_512_v4
+47/246838/campos_512_v4
+47/247021/campos_512_v4
+47/247154/campos_512_v4
+47/247194/campos_512_v4
+47/247300/campos_512_v4
+47/247351/campos_512_v4
+47/247352/campos_512_v4
+47/247417/campos_512_v4
+47/247513/campos_512_v4
+47/247545/campos_512_v4
+47/247626/campos_512_v4
+47/247630/campos_512_v4
+47/247648/campos_512_v4
+47/247658/campos_512_v4
+47/247827/campos_512_v4
+47/247829/campos_512_v4
+47/247939/campos_512_v4
+47/248020/campos_512_v4
+47/248085/campos_512_v4
+47/248099/campos_512_v4
+47/248101/campos_512_v4
+47/248102/campos_512_v4
+47/248140/campos_512_v4
+47/248150/campos_512_v4
+47/248168/campos_512_v4
+47/248216/campos_512_v4
+47/248241/campos_512_v4
+47/248303/campos_512_v4
+47/248310/campos_512_v4
+47/248327/campos_512_v4
+47/248343/campos_512_v4
+47/248378/campos_512_v4
+47/248400/campos_512_v4
+47/248423/campos_512_v4
+47/248451/campos_512_v4
+47/248481/campos_512_v4
+47/248523/campos_512_v4
+47/248582/campos_512_v4
+47/248663/campos_512_v4
+47/248733/campos_512_v4
+47/248790/campos_512_v4
+47/248796/campos_512_v4
+47/248967/campos_512_v4
+47/248975/campos_512_v4
+47/249074/campos_512_v4
+47/249193/campos_512_v4
+47/249244/campos_512_v4
+47/249257/campos_512_v4
+47/249263/campos_512_v4
+47/249287/campos_512_v4
+47/249393/campos_512_v4
+47/249402/campos_512_v4
+47/249435/campos_512_v4
+47/249491/campos_512_v4
+47/249555/campos_512_v4
+47/249698/campos_512_v4
+47/249711/campos_512_v4
+47/249865/campos_512_v4
+47/249901/campos_512_v4
+47/249904/campos_512_v4
+47/249926/campos_512_v4
+47/249928/campos_512_v4
+47/249938/campos_512_v4
+47/249951/campos_512_v4
+47/249966/campos_512_v4
+47/249994/campos_512_v4
+47/249998/campos_512_v4
+48/250031/campos_512_v4
+48/250126/campos_512_v4
+48/250242/campos_512_v4
+48/250300/campos_512_v4
+48/250330/campos_512_v4
+48/250435/campos_512_v4
+48/250572/campos_512_v4
+48/250591/campos_512_v4
+48/250669/campos_512_v4
+48/250707/campos_512_v4
+48/250759/campos_512_v4
+48/250804/campos_512_v4
+48/250867/campos_512_v4
+48/250946/campos_512_v4
+48/251154/campos_512_v4
+48/251155/campos_512_v4
+48/251180/campos_512_v4
+48/251200/campos_512_v4
+48/251261/campos_512_v4
+48/251368/campos_512_v4
+48/251468/campos_512_v4
+48/251473/campos_512_v4
+48/251547/campos_512_v4
+48/251572/campos_512_v4
+48/251631/campos_512_v4
+48/251633/campos_512_v4
+48/251756/campos_512_v4
+48/251880/campos_512_v4
+48/251929/campos_512_v4
+48/251944/campos_512_v4
+48/251960/campos_512_v4
+48/251969/campos_512_v4
+48/252035/campos_512_v4
+48/252046/campos_512_v4
+48/252057/campos_512_v4
+48/252092/campos_512_v4
+48/252127/campos_512_v4
+48/252133/campos_512_v4
+48/252157/campos_512_v4
+48/252171/campos_512_v4
+48/252203/campos_512_v4
+48/252440/campos_512_v4
+48/252442/campos_512_v4
+48/252481/campos_512_v4
+48/252497/campos_512_v4
+48/252540/campos_512_v4
+48/252558/campos_512_v4
+48/252598/campos_512_v4
+48/252655/campos_512_v4
+48/252662/campos_512_v4
+48/252717/campos_512_v4
+48/252732/campos_512_v4
+48/252737/campos_512_v4
+48/252808/campos_512_v4
+48/252820/campos_512_v4
+48/252821/campos_512_v4
+48/252831/campos_512_v4
+48/252860/campos_512_v4
+48/252916/campos_512_v4
+48/252963/campos_512_v4
+48/253079/campos_512_v4
+48/253143/campos_512_v4
+48/253158/campos_512_v4
+48/253253/campos_512_v4
+48/253356/campos_512_v4
+48/253360/campos_512_v4
+48/253363/campos_512_v4
+48/253517/campos_512_v4
+48/253564/campos_512_v4
+48/253667/campos_512_v4
+48/253742/campos_512_v4
+48/253788/campos_512_v4
+48/253818/campos_512_v4
+48/253847/campos_512_v4
+48/254067/campos_512_v4
+48/254127/campos_512_v4
+48/254200/campos_512_v4
+48/254201/campos_512_v4
+48/254241/campos_512_v4
+48/254254/campos_512_v4
+48/254420/campos_512_v4
+48/254427/campos_512_v4
+48/254454/campos_512_v4
+48/254497/campos_512_v4
+48/254505/campos_512_v4
+48/254518/campos_512_v4
+48/254542/campos_512_v4
+48/254549/campos_512_v4
+48/254610/campos_512_v4
+48/254666/campos_512_v4
+48/254709/campos_512_v4
+48/254747/campos_512_v4
+49/255038/campos_512_v4
+49/255076/campos_512_v4
+49/255138/campos_512_v4
+49/255242/campos_512_v4
+49/255246/campos_512_v4
+49/255294/campos_512_v4
+49/255321/campos_512_v4
+49/255368/campos_512_v4
+49/255560/campos_512_v4
+49/255569/campos_512_v4
+49/255661/campos_512_v4
+49/255731/campos_512_v4
+49/255739/campos_512_v4
+49/255831/campos_512_v4
+49/255854/campos_512_v4
+49/255873/campos_512_v4
+49/255928/campos_512_v4
+49/255929/campos_512_v4
+49/256096/campos_512_v4
+49/256117/campos_512_v4
+49/256188/campos_512_v4
+49/256244/campos_512_v4
+49/256326/campos_512_v4
+49/256358/campos_512_v4
+49/256383/campos_512_v4
+49/256418/campos_512_v4
+49/256421/campos_512_v4
+49/256638/campos_512_v4
+49/256692/campos_512_v4
+49/256694/campos_512_v4
+49/256707/campos_512_v4
+49/256931/campos_512_v4
+49/256949/campos_512_v4
+49/257012/campos_512_v4
+49/257042/campos_512_v4
+49/257072/campos_512_v4
+49/257129/campos_512_v4
+49/257161/campos_512_v4
+49/257256/campos_512_v4
+49/257272/campos_512_v4
+49/257351/campos_512_v4
+49/257373/campos_512_v4
+49/257664/campos_512_v4
+49/257772/campos_512_v4
+49/257925/campos_512_v4
+49/257981/campos_512_v4
+49/258010/campos_512_v4
+49/258117/campos_512_v4
+49/258185/campos_512_v4
+49/258186/campos_512_v4
+49/258209/campos_512_v4
+49/258245/campos_512_v4
+49/258345/campos_512_v4
+49/258354/campos_512_v4
+49/258442/campos_512_v4
+49/258508/campos_512_v4
+49/258649/campos_512_v4
+49/258698/campos_512_v4
+49/258708/campos_512_v4
+49/258758/campos_512_v4
+49/258781/campos_512_v4
+49/258849/campos_512_v4
+49/258931/campos_512_v4
+49/258976/campos_512_v4
+49/259088/campos_512_v4
+49/259174/campos_512_v4
+49/259224/campos_512_v4
+49/259340/campos_512_v4
+49/259382/campos_512_v4
+49/259389/campos_512_v4
+49/259458/campos_512_v4
+49/259478/campos_512_v4
+49/259482/campos_512_v4
+49/259561/campos_512_v4
+49/259582/campos_512_v4
+49/259667/campos_512_v4
+49/259690/campos_512_v4
+49/259760/campos_512_v4
+49/259900/campos_512_v4
+49/259909/campos_512_v4
+49/259939/campos_512_v4
+5/35003/campos_512_v4
+5/35111/campos_512_v4
+5/35115/campos_512_v4
+5/35131/campos_512_v4
+5/35254/campos_512_v4
+5/35342/campos_512_v4
+5/35397/campos_512_v4
+5/35429/campos_512_v4
+5/35441/campos_512_v4
+5/35469/campos_512_v4
+5/35475/campos_512_v4
+5/35625/campos_512_v4
+5/35675/campos_512_v4
+5/35680/campos_512_v4
+5/35719/campos_512_v4
+5/35937/campos_512_v4
+5/35967/campos_512_v4
+5/36114/campos_512_v4
+5/36150/campos_512_v4
+5/36172/campos_512_v4
+5/36221/campos_512_v4
+5/36254/campos_512_v4
+5/36318/campos_512_v4
+5/36322/campos_512_v4
+5/36356/campos_512_v4
+5/36398/campos_512_v4
+5/36419/campos_512_v4
+5/36476/campos_512_v4
+5/36584/campos_512_v4
+5/36814/campos_512_v4
+5/36865/campos_512_v4
+5/36944/campos_512_v4
+5/36967/campos_512_v4
+5/36992/campos_512_v4
+5/37083/campos_512_v4
+5/37129/campos_512_v4
+5/37238/campos_512_v4
+5/37649/campos_512_v4
+5/37656/campos_512_v4
+5/37671/campos_512_v4
+5/37738/campos_512_v4
+5/37783/campos_512_v4
+5/37814/campos_512_v4
+5/37899/campos_512_v4
+5/37933/campos_512_v4
+5/37958/campos_512_v4
+5/38012/campos_512_v4
+5/38075/campos_512_v4
+5/38151/campos_512_v4
+5/38201/campos_512_v4
+5/38255/campos_512_v4
+5/38274/campos_512_v4
+5/38280/campos_512_v4
+5/38282/campos_512_v4
+5/38311/campos_512_v4
+5/38414/campos_512_v4
+5/38484/campos_512_v4
+5/38489/campos_512_v4
+5/38627/campos_512_v4
+5/38660/campos_512_v4
+5/38690/campos_512_v4
+5/38775/campos_512_v4
+5/38800/campos_512_v4
+5/38847/campos_512_v4
+5/38896/campos_512_v4
+5/38914/campos_512_v4
+5/38918/campos_512_v4
+5/38941/campos_512_v4
+5/38996/campos_512_v4
+5/39132/campos_512_v4
+5/39234/campos_512_v4
+5/39551/campos_512_v4
+5/39654/campos_512_v4
+5/39746/campos_512_v4
+5/39759/campos_512_v4
+5/39782/campos_512_v4
+5/39831/campos_512_v4
+5/39839/campos_512_v4
+5/39860/campos_512_v4
+5/39907/campos_512_v4
+50/260002/campos_512_v4
+50/260032/campos_512_v4
+50/260044/campos_512_v4
+50/260059/campos_512_v4
+50/260065/campos_512_v4
+50/260205/campos_512_v4
+50/260232/campos_512_v4
+50/260258/campos_512_v4
+50/260269/campos_512_v4
+50/260270/campos_512_v4
+50/260275/campos_512_v4
+50/260382/campos_512_v4
+50/260514/campos_512_v4
+50/260551/campos_512_v4
+50/260587/campos_512_v4
+50/260608/campos_512_v4
+50/260634/campos_512_v4
+50/260763/campos_512_v4
+50/260794/campos_512_v4
+50/260828/campos_512_v4
+50/260832/campos_512_v4
+50/260905/campos_512_v4
+50/261032/campos_512_v4
+50/261101/campos_512_v4
+50/261158/campos_512_v4
+50/261161/campos_512_v4
+50/261164/campos_512_v4
+50/261187/campos_512_v4
+50/261202/campos_512_v4
+50/261214/campos_512_v4
+50/261242/campos_512_v4
+50/261516/campos_512_v4
+50/261554/campos_512_v4
+50/261565/campos_512_v4
+50/261603/campos_512_v4
+50/261663/campos_512_v4
+50/261818/campos_512_v4
+50/261840/campos_512_v4
+50/261878/campos_512_v4
+50/261881/campos_512_v4
+50/261896/campos_512_v4
+50/261932/campos_512_v4
+50/261965/campos_512_v4
+50/261983/campos_512_v4
+50/262042/campos_512_v4
+50/262089/campos_512_v4
+50/262090/campos_512_v4
+50/262138/campos_512_v4
+50/262171/campos_512_v4
+50/262290/campos_512_v4
+50/262306/campos_512_v4
+50/262312/campos_512_v4
+50/262373/campos_512_v4
+50/262517/campos_512_v4
+50/262575/campos_512_v4
+50/262805/campos_512_v4
+50/262882/campos_512_v4
+50/262899/campos_512_v4
+50/262902/campos_512_v4
+50/263012/campos_512_v4
+50/263101/campos_512_v4
+50/263155/campos_512_v4
+50/263381/campos_512_v4
+50/263407/campos_512_v4
+50/263429/campos_512_v4
+50/263552/campos_512_v4
+50/263559/campos_512_v4
+50/263614/campos_512_v4
+50/263821/campos_512_v4
+50/263873/campos_512_v4
+50/263876/campos_512_v4
+50/263907/campos_512_v4
+50/263961/campos_512_v4
+50/263984/campos_512_v4
+50/263991/campos_512_v4
+50/263996/campos_512_v4
+50/264045/campos_512_v4
+50/264066/campos_512_v4
+50/264067/campos_512_v4
+50/264073/campos_512_v4
+50/264198/campos_512_v4
+50/264386/campos_512_v4
+50/264472/campos_512_v4
+50/264520/campos_512_v4
+50/264557/campos_512_v4
+50/264584/campos_512_v4
+50/264696/campos_512_v4
+50/264731/campos_512_v4
+50/264849/campos_512_v4
+50/264885/campos_512_v4
+50/264951/campos_512_v4
+50/264953/campos_512_v4
+50/264977/campos_512_v4
+51/265117/campos_512_v4
+51/265203/campos_512_v4
+51/265221/campos_512_v4
+51/265408/campos_512_v4
+51/265628/campos_512_v4
+51/265635/campos_512_v4
+51/265740/campos_512_v4
+51/265782/campos_512_v4
+51/265875/campos_512_v4
+51/265893/campos_512_v4
+51/265967/campos_512_v4
+51/265974/campos_512_v4
+51/265995/campos_512_v4
+51/266123/campos_512_v4
+51/266142/campos_512_v4
+51/266146/campos_512_v4
+51/266172/campos_512_v4
+51/266241/campos_512_v4
+51/266270/campos_512_v4
+51/266313/campos_512_v4
+51/266318/campos_512_v4
+51/266339/campos_512_v4
+51/266356/campos_512_v4
+51/266384/campos_512_v4
+51/266453/campos_512_v4
+51/266475/campos_512_v4
+51/266508/campos_512_v4
+51/266684/campos_512_v4
+51/266690/campos_512_v4
+51/266707/campos_512_v4
+51/266729/campos_512_v4
+51/266815/campos_512_v4
+51/266885/campos_512_v4
+51/266939/campos_512_v4
+51/266974/campos_512_v4
+51/267031/campos_512_v4
+51/267032/campos_512_v4
+51/267035/campos_512_v4
+51/267049/campos_512_v4
+51/267123/campos_512_v4
+51/267184/campos_512_v4
+51/267398/campos_512_v4
+51/267447/campos_512_v4
+51/267461/campos_512_v4
+51/267514/campos_512_v4
+51/267535/campos_512_v4
+51/267567/campos_512_v4
+51/267760/campos_512_v4
+51/267822/campos_512_v4
+51/267830/campos_512_v4
+51/267875/campos_512_v4
+51/267908/campos_512_v4
+51/267966/campos_512_v4
+51/268033/campos_512_v4
+51/268110/campos_512_v4
+51/268111/campos_512_v4
+51/268212/campos_512_v4
+51/268268/campos_512_v4
+51/268305/campos_512_v4
+51/268383/campos_512_v4
+51/268409/campos_512_v4
+51/268446/campos_512_v4
+51/268460/campos_512_v4
+51/268580/campos_512_v4
+51/268636/campos_512_v4
+51/268941/campos_512_v4
+51/268964/campos_512_v4
+51/269010/campos_512_v4
+51/269024/campos_512_v4
+51/269035/campos_512_v4
+51/269148/campos_512_v4
+51/269164/campos_512_v4
+51/269186/campos_512_v4
+51/269219/campos_512_v4
+51/269251/campos_512_v4
+51/269257/campos_512_v4
+51/269277/campos_512_v4
+51/269305/campos_512_v4
+51/269399/campos_512_v4
+51/269455/campos_512_v4
+51/269456/campos_512_v4
+51/269473/campos_512_v4
+51/269476/campos_512_v4
+51/269485/campos_512_v4
+51/269486/campos_512_v4
+51/269507/campos_512_v4
+51/269727/campos_512_v4
+51/269742/campos_512_v4
+51/269865/campos_512_v4
+51/269941/campos_512_v4
+51/269978/campos_512_v4
+51/269989/campos_512_v4
+52/270051/campos_512_v4
+52/270075/campos_512_v4
+52/270097/campos_512_v4
+52/270123/campos_512_v4
+52/270161/campos_512_v4
+52/270208/campos_512_v4
+52/270255/campos_512_v4
+52/270346/campos_512_v4
+52/270356/campos_512_v4
+52/270454/campos_512_v4
+52/270527/campos_512_v4
+52/270540/campos_512_v4
+52/270631/campos_512_v4
+52/270634/campos_512_v4
+52/270642/campos_512_v4
+52/270703/campos_512_v4
+52/270730/campos_512_v4
+52/270760/campos_512_v4
+52/270886/campos_512_v4
+52/270962/campos_512_v4
+52/271087/campos_512_v4
+52/271101/campos_512_v4
+52/271295/campos_512_v4
+52/271313/campos_512_v4
+52/271378/campos_512_v4
+52/271473/campos_512_v4
+52/271490/campos_512_v4
+52/271508/campos_512_v4
+52/271612/campos_512_v4
+52/271625/campos_512_v4
+52/271642/campos_512_v4
+52/271697/campos_512_v4
+52/271698/campos_512_v4
+52/271740/campos_512_v4
+52/271827/campos_512_v4
+52/271865/campos_512_v4
+52/271960/campos_512_v4
+52/271985/campos_512_v4
+52/271992/campos_512_v4
+52/272002/campos_512_v4
+52/272052/campos_512_v4
+52/272054/campos_512_v4
+52/272088/campos_512_v4
+52/272154/campos_512_v4
+52/272248/campos_512_v4
+52/272290/campos_512_v4
+52/272304/campos_512_v4
+52/272328/campos_512_v4
+52/272405/campos_512_v4
+52/272493/campos_512_v4
+52/272552/campos_512_v4
+52/272578/campos_512_v4
+52/272640/campos_512_v4
+52/272782/campos_512_v4
+52/272783/campos_512_v4
+52/272825/campos_512_v4
+52/273007/campos_512_v4
+52/273010/campos_512_v4
+52/273045/campos_512_v4
+52/273063/campos_512_v4
+52/273072/campos_512_v4
+52/273087/campos_512_v4
+52/273110/campos_512_v4
+52/273116/campos_512_v4
+52/273170/campos_512_v4
+52/273230/campos_512_v4
+52/273243/campos_512_v4
+52/273277/campos_512_v4
+52/273295/campos_512_v4
+52/273343/campos_512_v4
+52/273351/campos_512_v4
+52/273406/campos_512_v4
+52/273446/campos_512_v4
+52/273538/campos_512_v4
+52/273557/campos_512_v4
+52/273566/campos_512_v4
+52/273571/campos_512_v4
+52/273755/campos_512_v4
+52/273781/campos_512_v4
+52/273785/campos_512_v4
+52/273799/campos_512_v4
+52/273887/campos_512_v4
+52/273900/campos_512_v4
+52/273918/campos_512_v4
+52/274056/campos_512_v4
+52/274093/campos_512_v4
+52/274122/campos_512_v4
+52/274214/campos_512_v4
+52/274306/campos_512_v4
+52/274317/campos_512_v4
+52/274323/campos_512_v4
+52/274335/campos_512_v4
+52/274357/campos_512_v4
+52/274395/campos_512_v4
+52/274399/campos_512_v4
+52/274402/campos_512_v4
+52/274404/campos_512_v4
+52/274407/campos_512_v4
+52/274447/campos_512_v4
+52/274469/campos_512_v4
+52/274640/campos_512_v4
+52/274642/campos_512_v4
+52/274720/campos_512_v4
+52/274802/campos_512_v4
+52/274821/campos_512_v4
+52/274838/campos_512_v4
+52/274924/campos_512_v4
+53/275120/campos_512_v4
+53/275129/campos_512_v4
+53/275297/campos_512_v4
+53/275345/campos_512_v4
+53/275353/campos_512_v4
+53/275442/campos_512_v4
+53/275475/campos_512_v4
+53/275559/campos_512_v4
+53/275593/campos_512_v4
+53/275668/campos_512_v4
+53/275682/campos_512_v4
+53/275886/campos_512_v4
+53/275887/campos_512_v4
+53/275961/campos_512_v4
+53/275973/campos_512_v4
+53/275985/campos_512_v4
+53/276011/campos_512_v4
+53/276040/campos_512_v4
+53/276132/campos_512_v4
+53/276160/campos_512_v4
+53/276201/campos_512_v4
+53/276220/campos_512_v4
+53/276230/campos_512_v4
+53/276259/campos_512_v4
+53/276260/campos_512_v4
+53/276280/campos_512_v4
+53/276435/campos_512_v4
+53/276485/campos_512_v4
+53/276491/campos_512_v4
+53/276572/campos_512_v4
+53/276608/campos_512_v4
+53/276636/campos_512_v4
+53/276649/campos_512_v4
+53/276662/campos_512_v4
+53/276709/campos_512_v4
+53/276736/campos_512_v4
+53/276764/campos_512_v4
+53/276810/campos_512_v4
+53/276843/campos_512_v4
+53/276856/campos_512_v4
+53/276905/campos_512_v4
+53/277033/campos_512_v4
+53/277073/campos_512_v4
+53/277101/campos_512_v4
+53/277102/campos_512_v4
+53/277162/campos_512_v4
+53/277260/campos_512_v4
+53/277266/campos_512_v4
+53/277268/campos_512_v4
+53/277275/campos_512_v4
+53/277279/campos_512_v4
+53/277472/campos_512_v4
+53/277496/campos_512_v4
+53/277600/campos_512_v4
+53/277665/campos_512_v4
+53/277746/campos_512_v4
+53/277764/campos_512_v4
+53/277905/campos_512_v4
+53/277986/campos_512_v4
+53/277994/campos_512_v4
+53/277997/campos_512_v4
+53/278053/campos_512_v4
+53/278066/campos_512_v4
+53/278096/campos_512_v4
+53/278108/campos_512_v4
+53/278278/campos_512_v4
+53/278289/campos_512_v4
+53/278300/campos_512_v4
+53/278344/campos_512_v4
+53/278500/campos_512_v4
+53/278506/campos_512_v4
+53/278554/campos_512_v4
+53/278561/campos_512_v4
+53/278562/campos_512_v4
+53/278615/campos_512_v4
+53/278704/campos_512_v4
+53/278734/campos_512_v4
+53/278853/campos_512_v4
+53/278885/campos_512_v4
+53/278971/campos_512_v4
+53/279076/campos_512_v4
+53/279139/campos_512_v4
+53/279142/campos_512_v4
+53/279267/campos_512_v4
+53/279299/campos_512_v4
+53/279354/campos_512_v4
+53/279357/campos_512_v4
+53/279364/campos_512_v4
+53/279377/campos_512_v4
+53/279428/campos_512_v4
+53/279568/campos_512_v4
+53/279588/campos_512_v4
+53/279605/campos_512_v4
+53/279619/campos_512_v4
+53/279637/campos_512_v4
+53/279663/campos_512_v4
+53/279720/campos_512_v4
+53/279798/campos_512_v4
+53/279828/campos_512_v4
+53/279844/campos_512_v4
+53/279942/campos_512_v4
+53/279959/campos_512_v4
+54/280123/campos_512_v4
+54/280254/campos_512_v4
+54/280281/campos_512_v4
+54/280291/campos_512_v4
+54/280412/campos_512_v4
+54/280437/campos_512_v4
+54/280470/campos_512_v4
+54/280489/campos_512_v4
+54/280531/campos_512_v4
+54/280558/campos_512_v4
+54/280627/campos_512_v4
+54/280664/campos_512_v4
+54/280698/campos_512_v4
+54/280727/campos_512_v4
+54/280730/campos_512_v4
+54/280820/campos_512_v4
+54/280823/campos_512_v4
+54/280845/campos_512_v4
+54/280880/campos_512_v4
+54/280919/campos_512_v4
+54/280940/campos_512_v4
+54/280980/campos_512_v4
+54/281247/campos_512_v4
+54/281381/campos_512_v4
+54/281485/campos_512_v4
+54/281555/campos_512_v4
+54/281634/campos_512_v4
+54/281671/campos_512_v4
+54/281690/campos_512_v4
+54/281801/campos_512_v4
+54/281832/campos_512_v4
+54/282080/campos_512_v4
+54/282143/campos_512_v4
+54/282161/campos_512_v4
+54/282262/campos_512_v4
+54/282412/campos_512_v4
+54/282445/campos_512_v4
+54/282720/campos_512_v4
+54/282726/campos_512_v4
+54/282767/campos_512_v4
+54/282789/campos_512_v4
+54/282842/campos_512_v4
+54/282894/campos_512_v4
+54/282957/campos_512_v4
+54/283048/campos_512_v4
+54/283177/campos_512_v4
+54/283203/campos_512_v4
+54/283257/campos_512_v4
+54/283269/campos_512_v4
+54/283277/campos_512_v4
+54/283293/campos_512_v4
+54/283306/campos_512_v4
+54/283397/campos_512_v4
+54/283475/campos_512_v4
+54/283593/campos_512_v4
+54/283721/campos_512_v4
+54/283776/campos_512_v4
+54/283908/campos_512_v4
+54/283955/campos_512_v4
+54/283996/campos_512_v4
+54/284034/campos_512_v4
+54/284037/campos_512_v4
+54/284041/campos_512_v4
+54/284045/campos_512_v4
+54/284086/campos_512_v4
+54/284186/campos_512_v4
+54/284209/campos_512_v4
+54/284245/campos_512_v4
+54/284325/campos_512_v4
+54/284388/campos_512_v4
+54/284403/campos_512_v4
+54/284429/campos_512_v4
+54/284468/campos_512_v4
+54/284564/campos_512_v4
+54/284568/campos_512_v4
+54/284608/campos_512_v4
+54/284681/campos_512_v4
+54/284827/campos_512_v4
+54/284845/campos_512_v4
+54/284860/campos_512_v4
+54/284888/campos_512_v4
+54/284956/campos_512_v4
+54/284986/campos_512_v4
+55/285011/campos_512_v4
+55/285166/campos_512_v4
+55/285183/campos_512_v4
+55/285216/campos_512_v4
+55/285325/campos_512_v4
+55/285372/campos_512_v4
+55/285428/campos_512_v4
+55/285467/campos_512_v4
+55/285525/campos_512_v4
+55/285635/campos_512_v4
+55/285645/campos_512_v4
+55/285756/campos_512_v4
+55/285810/campos_512_v4
+55/285836/campos_512_v4
+55/285911/campos_512_v4
+55/285923/campos_512_v4
+55/285940/campos_512_v4
+55/285969/campos_512_v4
+55/285993/campos_512_v4
+55/286015/campos_512_v4
+55/286019/campos_512_v4
+55/286098/campos_512_v4
+55/286157/campos_512_v4
+55/286173/campos_512_v4
+55/286177/campos_512_v4
+55/286211/campos_512_v4
+55/286292/campos_512_v4
+55/286325/campos_512_v4
+55/286354/campos_512_v4
+55/286548/campos_512_v4
+55/286634/campos_512_v4
+55/286667/campos_512_v4
+55/286668/campos_512_v4
+55/286713/campos_512_v4
+55/286872/campos_512_v4
+55/286977/campos_512_v4
+55/287188/campos_512_v4
+55/287289/campos_512_v4
+55/287407/campos_512_v4
+55/287437/campos_512_v4
+55/287440/campos_512_v4
+55/287462/campos_512_v4
+55/287467/campos_512_v4
+55/287473/campos_512_v4
+55/287481/campos_512_v4
+55/287560/campos_512_v4
+55/287650/campos_512_v4
+55/287800/campos_512_v4
+55/287838/campos_512_v4
+55/287932/campos_512_v4
+55/287972/campos_512_v4
+55/288089/campos_512_v4
+55/288330/campos_512_v4
+55/288361/campos_512_v4
+55/288371/campos_512_v4
+55/288502/campos_512_v4
+55/288581/campos_512_v4
+55/288694/campos_512_v4
+55/288699/campos_512_v4
+55/288710/campos_512_v4
+55/288732/campos_512_v4
+55/288747/campos_512_v4
+55/288858/campos_512_v4
+55/288868/campos_512_v4
+55/288877/campos_512_v4
+55/289009/campos_512_v4
+55/289041/campos_512_v4
+55/289142/campos_512_v4
+55/289207/campos_512_v4
+55/289251/campos_512_v4
+55/289314/campos_512_v4
+55/289338/campos_512_v4
+55/289361/campos_512_v4
+55/289377/campos_512_v4
+55/289430/campos_512_v4
+55/289458/campos_512_v4
+55/289489/campos_512_v4
+55/289577/campos_512_v4
+55/289622/campos_512_v4
+55/289725/campos_512_v4
+55/289766/campos_512_v4
+55/289829/campos_512_v4
+55/289904/campos_512_v4
+55/289962/campos_512_v4
+56/290155/campos_512_v4
+56/290205/campos_512_v4
+56/290354/campos_512_v4
+56/290395/campos_512_v4
+56/290408/campos_512_v4
+56/290479/campos_512_v4
+56/290973/campos_512_v4
+56/291033/campos_512_v4
+56/291100/campos_512_v4
+56/291237/campos_512_v4
+56/291408/campos_512_v4
+56/291659/campos_512_v4
+56/291705/campos_512_v4
+56/291725/campos_512_v4
+56/291747/campos_512_v4
+56/291798/campos_512_v4
+56/291890/campos_512_v4
+56/291909/campos_512_v4
+56/291946/campos_512_v4
+56/291975/campos_512_v4
+56/291984/campos_512_v4
+56/292009/campos_512_v4
+56/292013/campos_512_v4
+56/292028/campos_512_v4
+56/292053/campos_512_v4
+56/292101/campos_512_v4
+56/292108/campos_512_v4
+56/292158/campos_512_v4
+56/292240/campos_512_v4
+56/292272/campos_512_v4
+56/292324/campos_512_v4
+56/292353/campos_512_v4
+56/292648/campos_512_v4
+56/292734/campos_512_v4
+56/292741/campos_512_v4
+56/292783/campos_512_v4
+56/292961/campos_512_v4
+56/293046/campos_512_v4
+56/293059/campos_512_v4
+56/293116/campos_512_v4
+56/293143/campos_512_v4
+56/293150/campos_512_v4
+56/293162/campos_512_v4
+56/293179/campos_512_v4
+56/293207/campos_512_v4
+56/293210/campos_512_v4
+56/293271/campos_512_v4
+56/293359/campos_512_v4
+56/293362/campos_512_v4
+56/293403/campos_512_v4
+56/293407/campos_512_v4
+56/293451/campos_512_v4
+56/293463/campos_512_v4
+56/293491/campos_512_v4
+56/293552/campos_512_v4
+56/293585/campos_512_v4
+56/293687/campos_512_v4
+56/293836/campos_512_v4
+56/293891/campos_512_v4
+56/293964/campos_512_v4
+56/293988/campos_512_v4
+56/294007/campos_512_v4
+56/294021/campos_512_v4
+56/294326/campos_512_v4
+56/294611/campos_512_v4
+56/294633/campos_512_v4
+56/294731/campos_512_v4
+56/294808/campos_512_v4
+56/294832/campos_512_v4
+56/294864/campos_512_v4
+56/294895/campos_512_v4
+56/294916/campos_512_v4
+56/294943/campos_512_v4
+56/294956/campos_512_v4
+56/294989/campos_512_v4
+57/295030/campos_512_v4
+57/295033/campos_512_v4
+57/295097/campos_512_v4
+57/295134/campos_512_v4
+57/295138/campos_512_v4
+57/295301/campos_512_v4
+57/295320/campos_512_v4
+57/295321/campos_512_v4
+57/295323/campos_512_v4
+57/295357/campos_512_v4
+57/295407/campos_512_v4
+57/295479/campos_512_v4
+57/295502/campos_512_v4
+57/295503/campos_512_v4
+57/295551/campos_512_v4
+57/295637/campos_512_v4
+57/295655/campos_512_v4
+57/295658/campos_512_v4
+57/295659/campos_512_v4
+57/295674/campos_512_v4
+57/295679/campos_512_v4
+57/295805/campos_512_v4
+57/295843/campos_512_v4
+57/295860/campos_512_v4
+57/295900/campos_512_v4
+57/295951/campos_512_v4
+57/296058/campos_512_v4
+57/296081/campos_512_v4
+57/296215/campos_512_v4
+57/296330/campos_512_v4
+57/296446/campos_512_v4
+57/296627/campos_512_v4
+57/296636/campos_512_v4
+57/296697/campos_512_v4
+57/296699/campos_512_v4
+57/296719/campos_512_v4
+57/296808/campos_512_v4
+57/296812/campos_512_v4
+57/296838/campos_512_v4
+57/296840/campos_512_v4
+57/296984/campos_512_v4
+57/297034/campos_512_v4
+57/297044/campos_512_v4
+57/297054/campos_512_v4
+57/297055/campos_512_v4
+57/297056/campos_512_v4
+57/297144/campos_512_v4
+57/297159/campos_512_v4
+57/297214/campos_512_v4
+57/297215/campos_512_v4
+57/297229/campos_512_v4
+57/297266/campos_512_v4
+57/297438/campos_512_v4
+57/297544/campos_512_v4
+57/297579/campos_512_v4
+57/297614/campos_512_v4
+57/297686/campos_512_v4
+57/297687/campos_512_v4
+57/297690/campos_512_v4
+57/297694/campos_512_v4
+57/297696/campos_512_v4
+57/297780/campos_512_v4
+57/297895/campos_512_v4
+57/297924/campos_512_v4
+57/297943/campos_512_v4
+57/297960/campos_512_v4
+57/298052/campos_512_v4
+57/298089/campos_512_v4
+57/298150/campos_512_v4
+57/298432/campos_512_v4
+57/298474/campos_512_v4
+57/298577/campos_512_v4
+57/298589/campos_512_v4
+57/298671/campos_512_v4
+57/298772/campos_512_v4
+57/298780/campos_512_v4
+57/298813/campos_512_v4
+57/298872/campos_512_v4
+57/298912/campos_512_v4
+57/298960/campos_512_v4
+57/299217/campos_512_v4
+57/299258/campos_512_v4
+57/299283/campos_512_v4
+57/299306/campos_512_v4
+57/299331/campos_512_v4
+57/299353/campos_512_v4
+57/299389/campos_512_v4
+57/299393/campos_512_v4
+57/299481/campos_512_v4
+57/299578/campos_512_v4
+57/299648/campos_512_v4
+57/299652/campos_512_v4
+57/299702/campos_512_v4
+57/299707/campos_512_v4
+57/299710/campos_512_v4
+57/299754/campos_512_v4
+57/299794/campos_512_v4
+57/299845/campos_512_v4
+57/299853/campos_512_v4
+57/299940/campos_512_v4
+57/299944/campos_512_v4
+58/300032/campos_512_v4
+58/300098/campos_512_v4
+58/300122/campos_512_v4
+58/300151/campos_512_v4
+58/300154/campos_512_v4
+58/300226/campos_512_v4
+58/300272/campos_512_v4
+58/300321/campos_512_v4
+58/300329/campos_512_v4
+58/300772/campos_512_v4
+58/300798/campos_512_v4
+58/300944/campos_512_v4
+58/300948/campos_512_v4
+58/300975/campos_512_v4
+58/301010/campos_512_v4
+58/301038/campos_512_v4
+58/301101/campos_512_v4
+58/301111/campos_512_v4
+58/301181/campos_512_v4
+58/301333/campos_512_v4
+58/301346/campos_512_v4
+58/301357/campos_512_v4
+58/301390/campos_512_v4
+58/301509/campos_512_v4
+58/301540/campos_512_v4
+58/301549/campos_512_v4
+58/301621/campos_512_v4
+58/301813/campos_512_v4
+58/301888/campos_512_v4
+58/301934/campos_512_v4
+58/301970/campos_512_v4
+58/302021/campos_512_v4
+58/302151/campos_512_v4
+58/302164/campos_512_v4
+58/302176/campos_512_v4
+58/302216/campos_512_v4
+58/302222/campos_512_v4
+58/302454/campos_512_v4
+58/302530/campos_512_v4
+58/302574/campos_512_v4
+58/302671/campos_512_v4
+58/302717/campos_512_v4
+58/302743/campos_512_v4
+58/302816/campos_512_v4
+58/302885/campos_512_v4
+58/302995/campos_512_v4
+58/303040/campos_512_v4
+58/303045/campos_512_v4
+58/303252/campos_512_v4
+58/303265/campos_512_v4
+58/303266/campos_512_v4
+58/303304/campos_512_v4
+58/303332/campos_512_v4
+58/303347/campos_512_v4
+58/303357/campos_512_v4
+58/303361/campos_512_v4
+58/303600/campos_512_v4
+58/303603/campos_512_v4
+58/303668/campos_512_v4
+58/303705/campos_512_v4
+58/303782/campos_512_v4
+58/303836/campos_512_v4
+58/303843/campos_512_v4
+58/304025/campos_512_v4
+58/304166/campos_512_v4
+58/304224/campos_512_v4
+58/304259/campos_512_v4
+58/304294/campos_512_v4
+58/304367/campos_512_v4
+58/304380/campos_512_v4
+58/304520/campos_512_v4
+58/304603/campos_512_v4
+58/304654/campos_512_v4
+58/304708/campos_512_v4
+58/304747/campos_512_v4
+58/304781/campos_512_v4
+58/304907/campos_512_v4
+59/305087/campos_512_v4
+59/305088/campos_512_v4
+59/305093/campos_512_v4
+59/305164/campos_512_v4
+59/305218/campos_512_v4
+59/305255/campos_512_v4
+59/305421/campos_512_v4
+59/305467/campos_512_v4
+59/305487/campos_512_v4
+59/305590/campos_512_v4
+59/305608/campos_512_v4
+59/305795/campos_512_v4
+59/305797/campos_512_v4
+59/305798/campos_512_v4
+59/305799/campos_512_v4
+59/305835/campos_512_v4
+59/305836/campos_512_v4
+59/305872/campos_512_v4
+59/305891/campos_512_v4
+59/305904/campos_512_v4
+59/305989/campos_512_v4
+59/306016/campos_512_v4
+59/306041/campos_512_v4
+59/306161/campos_512_v4
+59/306194/campos_512_v4
+59/306199/campos_512_v4
+59/306211/campos_512_v4
+59/306218/campos_512_v4
+59/306279/campos_512_v4
+59/306320/campos_512_v4
+59/306363/campos_512_v4
+59/306393/campos_512_v4
+59/306398/campos_512_v4
+59/306420/campos_512_v4
+59/306430/campos_512_v4
+59/306457/campos_512_v4
+59/306492/campos_512_v4
+59/306514/campos_512_v4
+59/306550/campos_512_v4
+59/306587/campos_512_v4
+59/306709/campos_512_v4
+59/306739/campos_512_v4
+59/306754/campos_512_v4
+59/306755/campos_512_v4
+59/306800/campos_512_v4
+59/306833/campos_512_v4
+59/306848/campos_512_v4
+59/306880/campos_512_v4
+59/306940/campos_512_v4
+59/306944/campos_512_v4
+59/306953/campos_512_v4
+59/307069/campos_512_v4
+59/307118/campos_512_v4
+59/307126/campos_512_v4
+59/307143/campos_512_v4
+59/307167/campos_512_v4
+59/307183/campos_512_v4
+59/307299/campos_512_v4
+59/307324/campos_512_v4
+59/307333/campos_512_v4
+59/307386/campos_512_v4
+59/307498/campos_512_v4
+59/307569/campos_512_v4
+59/307665/campos_512_v4
+59/307668/campos_512_v4
+59/307684/campos_512_v4
+59/307690/campos_512_v4
+59/307799/campos_512_v4
+59/307833/campos_512_v4
+59/307866/campos_512_v4
+59/307913/campos_512_v4
+59/307998/campos_512_v4
+59/308115/campos_512_v4
+59/308132/campos_512_v4
+59/308343/campos_512_v4
+59/308356/campos_512_v4
+59/308376/campos_512_v4
+59/308435/campos_512_v4
+59/308437/campos_512_v4
+59/308485/campos_512_v4
+59/308577/campos_512_v4
+59/308587/campos_512_v4
+59/308637/campos_512_v4
+59/308790/campos_512_v4
+59/308797/campos_512_v4
+59/308820/campos_512_v4
+59/308971/campos_512_v4
+59/309048/campos_512_v4
+59/309083/campos_512_v4
+59/309101/campos_512_v4
+59/309120/campos_512_v4
+59/309225/campos_512_v4
+59/309310/campos_512_v4
+59/309316/campos_512_v4
+59/309323/campos_512_v4
+59/309446/campos_512_v4
+59/309477/campos_512_v4
+59/309496/campos_512_v4
+59/309503/campos_512_v4
+59/309524/campos_512_v4
+59/309531/campos_512_v4
+59/309567/campos_512_v4
+59/309607/campos_512_v4
+59/309789/campos_512_v4
+59/309808/campos_512_v4
+59/309849/campos_512_v4
+59/309862/campos_512_v4
+59/309943/campos_512_v4
+59/309962/campos_512_v4
+59/309979/campos_512_v4
+6/40059/campos_512_v4
+6/40116/campos_512_v4
+6/40348/campos_512_v4
+6/40432/campos_512_v4
+6/40434/campos_512_v4
+6/40500/campos_512_v4
+6/40686/campos_512_v4
+6/40748/campos_512_v4
+6/40895/campos_512_v4
+6/40916/campos_512_v4
+6/40961/campos_512_v4
+6/41047/campos_512_v4
+6/41052/campos_512_v4
+6/41120/campos_512_v4
+6/41168/campos_512_v4
+6/41319/campos_512_v4
+6/41380/campos_512_v4
+6/41417/campos_512_v4
+6/41452/campos_512_v4
+6/41547/campos_512_v4
+6/41609/campos_512_v4
+6/41610/campos_512_v4
+6/41711/campos_512_v4
+6/41753/campos_512_v4
+6/41903/campos_512_v4
+6/42048/campos_512_v4
+6/42098/campos_512_v4
+6/42118/campos_512_v4
+6/42297/campos_512_v4
+6/42439/campos_512_v4
+6/42535/campos_512_v4
+6/42587/campos_512_v4
+6/42626/campos_512_v4
+6/42657/campos_512_v4
+6/42664/campos_512_v4
+6/42679/campos_512_v4
+6/43061/campos_512_v4
+6/43083/campos_512_v4
+6/43135/campos_512_v4
+6/43146/campos_512_v4
+6/43170/campos_512_v4
+6/43220/campos_512_v4
+6/43374/campos_512_v4
+6/43484/campos_512_v4
+6/43567/campos_512_v4
+6/43603/campos_512_v4
+6/43676/campos_512_v4
+6/43690/campos_512_v4
+6/43802/campos_512_v4
+6/44008/campos_512_v4
+6/44031/campos_512_v4
+6/44039/campos_512_v4
+6/44109/campos_512_v4
+6/44276/campos_512_v4
+6/44316/campos_512_v4
+6/44438/campos_512_v4
+6/44553/campos_512_v4
+6/44555/campos_512_v4
+6/44591/campos_512_v4
+6/44734/campos_512_v4
+6/44791/campos_512_v4
+6/44799/campos_512_v4
+6/44806/campos_512_v4
+6/44823/campos_512_v4
+6/44904/campos_512_v4
+6/44962/campos_512_v4
+6/44969/campos_512_v4
+6/44985/campos_512_v4
+6/44991/campos_512_v4
+60/310010/campos_512_v4
+60/310098/campos_512_v4
+60/310140/campos_512_v4
+60/310215/campos_512_v4
+60/310227/campos_512_v4
+60/310257/campos_512_v4
+60/310265/campos_512_v4
+60/310314/campos_512_v4
+60/310345/campos_512_v4
+60/310378/campos_512_v4
+60/310385/campos_512_v4
+60/310396/campos_512_v4
+60/310485/campos_512_v4
+60/310541/campos_512_v4
+60/310627/campos_512_v4
+60/310680/campos_512_v4
+60/310754/campos_512_v4
+60/310833/campos_512_v4
+60/310838/campos_512_v4
+60/310976/campos_512_v4
+60/310983/campos_512_v4
+60/310988/campos_512_v4
+60/310994/campos_512_v4
+60/311142/campos_512_v4
+60/311187/campos_512_v4
+60/311274/campos_512_v4
+60/311365/campos_512_v4
+60/311501/campos_512_v4
+60/311517/campos_512_v4
+60/311619/campos_512_v4
+60/311727/campos_512_v4
+60/311834/campos_512_v4
+60/311926/campos_512_v4
+60/311936/campos_512_v4
+60/311943/campos_512_v4
+60/311958/campos_512_v4
+60/311963/campos_512_v4
+60/312021/campos_512_v4
+60/312047/campos_512_v4
+60/312138/campos_512_v4
+60/312307/campos_512_v4
+60/312360/campos_512_v4
+60/312380/campos_512_v4
+60/312384/campos_512_v4
+60/312466/campos_512_v4
+60/312500/campos_512_v4
+60/312572/campos_512_v4
+60/312652/campos_512_v4
+60/312671/campos_512_v4
+60/312730/campos_512_v4
+60/312753/campos_512_v4
+60/312804/campos_512_v4
+60/312885/campos_512_v4
+60/312901/campos_512_v4
+60/312983/campos_512_v4
+60/313039/campos_512_v4
+60/313100/campos_512_v4
+60/313116/campos_512_v4
+60/313122/campos_512_v4
+60/313131/campos_512_v4
+60/313139/campos_512_v4
+60/313143/campos_512_v4
+60/313193/campos_512_v4
+60/313258/campos_512_v4
+60/313284/campos_512_v4
+60/313315/campos_512_v4
+60/313353/campos_512_v4
+60/313384/campos_512_v4
+60/313405/campos_512_v4
+60/313494/campos_512_v4
+60/313532/campos_512_v4
+60/313545/campos_512_v4
+60/313555/campos_512_v4
+60/313593/campos_512_v4
+60/313650/campos_512_v4
+60/313669/campos_512_v4
+60/313671/campos_512_v4
+60/313773/campos_512_v4
+60/313807/campos_512_v4
+60/313825/campos_512_v4
+60/313952/campos_512_v4
+60/314318/campos_512_v4
+60/314406/campos_512_v4
+60/314424/campos_512_v4
+60/314580/campos_512_v4
+60/314597/campos_512_v4
+60/314725/campos_512_v4
+60/314792/campos_512_v4
+60/314839/campos_512_v4
+60/314858/campos_512_v4
+60/314885/campos_512_v4
+60/314913/campos_512_v4
+60/314931/campos_512_v4
+61/315003/campos_512_v4
+61/315015/campos_512_v4
+61/315361/campos_512_v4
+61/315447/campos_512_v4
+61/315508/campos_512_v4
+61/315862/campos_512_v4
+61/315869/campos_512_v4
+61/315903/campos_512_v4
+61/315936/campos_512_v4
+61/315945/campos_512_v4
+61/315964/campos_512_v4
+61/315978/campos_512_v4
+61/315991/campos_512_v4
+61/315992/campos_512_v4
+61/316036/campos_512_v4
+61/316073/campos_512_v4
+61/316110/campos_512_v4
+61/316143/campos_512_v4
+61/316195/campos_512_v4
+61/316375/campos_512_v4
+61/316435/campos_512_v4
+61/316436/campos_512_v4
+61/316442/campos_512_v4
+61/316456/campos_512_v4
+61/316497/campos_512_v4
+61/316531/campos_512_v4
+61/316536/campos_512_v4
+61/316553/campos_512_v4
+61/316676/campos_512_v4
+61/316677/campos_512_v4
+61/316733/campos_512_v4
+61/316818/campos_512_v4
+61/316838/campos_512_v4
+61/316858/campos_512_v4
+61/316896/campos_512_v4
+61/317035/campos_512_v4
+61/317060/campos_512_v4
+61/317062/campos_512_v4
+61/317141/campos_512_v4
+61/317142/campos_512_v4
+61/317340/campos_512_v4
+61/317412/campos_512_v4
+61/317527/campos_512_v4
+61/317541/campos_512_v4
+61/317582/campos_512_v4
+61/317584/campos_512_v4
+61/317592/campos_512_v4
+61/317642/campos_512_v4
+61/317652/campos_512_v4
+61/317697/campos_512_v4
+61/317720/campos_512_v4
+61/317731/campos_512_v4
+61/317740/campos_512_v4
+61/317834/campos_512_v4
+61/317842/campos_512_v4
+61/317858/campos_512_v4
+61/317916/campos_512_v4
+61/317927/campos_512_v4
+61/317944/campos_512_v4
+61/317996/campos_512_v4
+61/318066/campos_512_v4
+61/318098/campos_512_v4
+61/318112/campos_512_v4
+61/318238/campos_512_v4
+61/318242/campos_512_v4
+61/318248/campos_512_v4
+61/318261/campos_512_v4
+61/318292/campos_512_v4
+61/318305/campos_512_v4
+61/318335/campos_512_v4
+61/318336/campos_512_v4
+61/318352/campos_512_v4
+61/318355/campos_512_v4
+61/318425/campos_512_v4
+61/318440/campos_512_v4
+61/318444/campos_512_v4
+61/318484/campos_512_v4
+61/318627/campos_512_v4
+61/318690/campos_512_v4
+61/318788/campos_512_v4
+61/318889/campos_512_v4
+61/318944/campos_512_v4
+61/318978/campos_512_v4
+61/319011/campos_512_v4
+61/319054/campos_512_v4
+61/319295/campos_512_v4
+61/319402/campos_512_v4
+61/319463/campos_512_v4
+61/319487/campos_512_v4
+61/319529/campos_512_v4
+61/319535/campos_512_v4
+61/319549/campos_512_v4
+61/319585/campos_512_v4
+61/319727/campos_512_v4
+61/319814/campos_512_v4
+61/319815/campos_512_v4
+61/319856/campos_512_v4
+61/319904/campos_512_v4
+61/319909/campos_512_v4
+61/319919/campos_512_v4
+61/319960/campos_512_v4
+62/320011/campos_512_v4
+62/320021/campos_512_v4
+62/320055/campos_512_v4
+62/320129/campos_512_v4
+62/320196/campos_512_v4
+62/320210/campos_512_v4
+62/320212/campos_512_v4
+62/320260/campos_512_v4
+62/320267/campos_512_v4
+62/320287/campos_512_v4
+62/320369/campos_512_v4
+62/320411/campos_512_v4
+62/320429/campos_512_v4
+62/320439/campos_512_v4
+62/320527/campos_512_v4
+62/320646/campos_512_v4
+62/320704/campos_512_v4
+62/320822/campos_512_v4
+62/320842/campos_512_v4
+62/320917/campos_512_v4
+62/320973/campos_512_v4
+62/321023/campos_512_v4
+62/321069/campos_512_v4
+62/321233/campos_512_v4
+62/321316/campos_512_v4
+62/321332/campos_512_v4
+62/321476/campos_512_v4
+62/321543/campos_512_v4
+62/321577/campos_512_v4
+62/321579/campos_512_v4
+62/321601/campos_512_v4
+62/321654/campos_512_v4
+62/321662/campos_512_v4
+62/321695/campos_512_v4
+62/321718/campos_512_v4
+62/321725/campos_512_v4
+62/321751/campos_512_v4
+62/321876/campos_512_v4
+62/321902/campos_512_v4
+62/321952/campos_512_v4
+62/321970/campos_512_v4
+62/321997/campos_512_v4
+62/322010/campos_512_v4
+62/322014/campos_512_v4
+62/322095/campos_512_v4
+62/322175/campos_512_v4
+62/322273/campos_512_v4
+62/322289/campos_512_v4
+62/322299/campos_512_v4
+62/322303/campos_512_v4
+62/322322/campos_512_v4
+62/322393/campos_512_v4
+62/322484/campos_512_v4
+62/322499/campos_512_v4
+62/322622/campos_512_v4
+62/322674/campos_512_v4
+62/322676/campos_512_v4
+62/322887/campos_512_v4
+62/323070/campos_512_v4
+62/323124/campos_512_v4
+62/323129/campos_512_v4
+62/323136/campos_512_v4
+62/323140/campos_512_v4
+62/323185/campos_512_v4
+62/323220/campos_512_v4
+62/323242/campos_512_v4
+62/323253/campos_512_v4
+62/323261/campos_512_v4
+62/323345/campos_512_v4
+62/323362/campos_512_v4
+62/323447/campos_512_v4
+62/323492/campos_512_v4
+62/323545/campos_512_v4
+62/323570/campos_512_v4
+62/323703/campos_512_v4
+62/323719/campos_512_v4
+62/323751/campos_512_v4
+62/323967/campos_512_v4
+62/324007/campos_512_v4
+62/324014/campos_512_v4
+62/324130/campos_512_v4
+62/324131/campos_512_v4
+62/324232/campos_512_v4
+62/324392/campos_512_v4
+62/324411/campos_512_v4
+62/324440/campos_512_v4
+62/324508/campos_512_v4
+62/324578/campos_512_v4
+62/324630/campos_512_v4
+62/324659/campos_512_v4
+62/324774/campos_512_v4
+62/324797/campos_512_v4
+62/324817/campos_512_v4
+62/324854/campos_512_v4
+62/324857/campos_512_v4
+62/324870/campos_512_v4
+62/324883/campos_512_v4
+62/324929/campos_512_v4
+62/324949/campos_512_v4
+62/324984/campos_512_v4
+63/325075/campos_512_v4
+63/325373/campos_512_v4
+63/325388/campos_512_v4
+63/325393/campos_512_v4
+63/325457/campos_512_v4
+63/325473/campos_512_v4
+63/325477/campos_512_v4
+63/325583/campos_512_v4
+63/325601/campos_512_v4
+63/325618/campos_512_v4
+63/325623/campos_512_v4
+63/325636/campos_512_v4
+63/325745/campos_512_v4
+63/325757/campos_512_v4
+63/325785/campos_512_v4
+63/325838/campos_512_v4
+63/325940/campos_512_v4
+63/325951/campos_512_v4
+63/325987/campos_512_v4
+63/326006/campos_512_v4
+63/326041/campos_512_v4
+63/326064/campos_512_v4
+63/326070/campos_512_v4
+63/326138/campos_512_v4
+63/326140/campos_512_v4
+63/326288/campos_512_v4
+63/326314/campos_512_v4
+63/326445/campos_512_v4
+63/326510/campos_512_v4
+63/326542/campos_512_v4
+63/326618/campos_512_v4
+63/326681/campos_512_v4
+63/326688/campos_512_v4
+63/326691/campos_512_v4
+63/326725/campos_512_v4
+63/326768/campos_512_v4
+63/326801/campos_512_v4
+63/326860/campos_512_v4
+63/326877/campos_512_v4
+63/326901/campos_512_v4
+63/326928/campos_512_v4
+63/327031/campos_512_v4
+63/327051/campos_512_v4
+63/327057/campos_512_v4
+63/327098/campos_512_v4
+63/327202/campos_512_v4
+63/327225/campos_512_v4
+63/327279/campos_512_v4
+63/327297/campos_512_v4
+63/327360/campos_512_v4
+63/327475/campos_512_v4
+63/327663/campos_512_v4
+63/327908/campos_512_v4
+63/327975/campos_512_v4
+63/328006/campos_512_v4
+63/328059/campos_512_v4
+63/328090/campos_512_v4
+63/328146/campos_512_v4
+63/328188/campos_512_v4
+63/328217/campos_512_v4
+63/328242/campos_512_v4
+63/328359/campos_512_v4
+63/328433/campos_512_v4
+63/328485/campos_512_v4
+63/328495/campos_512_v4
+63/328535/campos_512_v4
+63/328570/campos_512_v4
+63/328579/campos_512_v4
+63/328647/campos_512_v4
+63/328660/campos_512_v4
+63/328669/campos_512_v4
+63/328675/campos_512_v4
+63/328679/campos_512_v4
+63/328686/campos_512_v4
+63/328687/campos_512_v4
+63/328706/campos_512_v4
+63/328758/campos_512_v4
+63/328791/campos_512_v4
+63/328795/campos_512_v4
+63/328880/campos_512_v4
+63/328977/campos_512_v4
+63/328983/campos_512_v4
+63/329047/campos_512_v4
+63/329073/campos_512_v4
+63/329093/campos_512_v4
+63/329116/campos_512_v4
+63/329130/campos_512_v4
+63/329160/campos_512_v4
+63/329173/campos_512_v4
+63/329255/campos_512_v4
+63/329363/campos_512_v4
+63/329365/campos_512_v4
+63/329411/campos_512_v4
+63/329445/campos_512_v4
+63/329517/campos_512_v4
+63/329532/campos_512_v4
+63/329554/campos_512_v4
+63/329556/campos_512_v4
+63/329557/campos_512_v4
+63/329584/campos_512_v4
+63/329633/campos_512_v4
+63/329699/campos_512_v4
+63/329789/campos_512_v4
+63/329790/campos_512_v4
+63/329806/campos_512_v4
+63/329857/campos_512_v4
+63/329893/campos_512_v4
+63/329928/campos_512_v4
+63/329937/campos_512_v4
+63/329959/campos_512_v4
+64/330024/campos_512_v4
+64/330126/campos_512_v4
+64/330222/campos_512_v4
+64/330301/campos_512_v4
+64/330400/campos_512_v4
+64/330480/campos_512_v4
+64/330585/campos_512_v4
+64/330615/campos_512_v4
+64/330639/campos_512_v4
+64/330662/campos_512_v4
+64/330740/campos_512_v4
+64/330813/campos_512_v4
+64/330897/campos_512_v4
+64/330964/campos_512_v4
+64/330998/campos_512_v4
+64/331005/campos_512_v4
+64/331048/campos_512_v4
+64/331056/campos_512_v4
+64/331057/campos_512_v4
+64/331094/campos_512_v4
+64/331131/campos_512_v4
+64/331151/campos_512_v4
+64/331250/campos_512_v4
+64/331701/campos_512_v4
+64/331703/campos_512_v4
+64/331721/campos_512_v4
+64/331774/campos_512_v4
+64/331793/campos_512_v4
+64/331805/campos_512_v4
+64/331839/campos_512_v4
+64/331910/campos_512_v4
+64/332016/campos_512_v4
+64/332037/campos_512_v4
+64/332185/campos_512_v4
+64/332266/campos_512_v4
+64/332280/campos_512_v4
+64/332284/campos_512_v4
+64/332302/campos_512_v4
+64/332314/campos_512_v4
+64/332353/campos_512_v4
+64/332556/campos_512_v4
+64/332688/campos_512_v4
+64/332731/campos_512_v4
+64/332886/campos_512_v4
+64/332943/campos_512_v4
+64/332962/campos_512_v4
+64/332994/campos_512_v4
+64/333013/campos_512_v4
+64/333032/campos_512_v4
+64/333064/campos_512_v4
+64/333082/campos_512_v4
+64/333102/campos_512_v4
+64/333191/campos_512_v4
+64/333229/campos_512_v4
+64/333309/campos_512_v4
+64/333323/campos_512_v4
+64/333346/campos_512_v4
+64/333391/campos_512_v4
+64/333398/campos_512_v4
+64/333513/campos_512_v4
+64/333542/campos_512_v4
+64/333544/campos_512_v4
+64/333550/campos_512_v4
+64/333701/campos_512_v4
+64/333760/campos_512_v4
+64/333783/campos_512_v4
+64/333823/campos_512_v4
+64/333834/campos_512_v4
+64/333853/campos_512_v4
+64/333881/campos_512_v4
+64/334014/campos_512_v4
+64/334078/campos_512_v4
+64/334198/campos_512_v4
+64/334262/campos_512_v4
+64/334267/campos_512_v4
+64/334327/campos_512_v4
+64/334400/campos_512_v4
+64/334448/campos_512_v4
+64/334469/campos_512_v4
+64/334478/campos_512_v4
+64/334653/campos_512_v4
+64/334942/campos_512_v4
+64/334993/campos_512_v4
+65/335270/campos_512_v4
+65/335370/campos_512_v4
+65/335378/campos_512_v4
+65/335407/campos_512_v4
+65/335432/campos_512_v4
+65/335465/campos_512_v4
+65/335480/campos_512_v4
+65/335523/campos_512_v4
+65/335558/campos_512_v4
+65/335673/campos_512_v4
+65/335744/campos_512_v4
+65/335759/campos_512_v4
+65/335776/campos_512_v4
+65/335800/campos_512_v4
+65/335816/campos_512_v4
+65/335928/campos_512_v4
+65/335985/campos_512_v4
+65/336092/campos_512_v4
+65/336121/campos_512_v4
+65/336127/campos_512_v4
+65/336176/campos_512_v4
+65/336202/campos_512_v4
+65/336238/campos_512_v4
+65/336440/campos_512_v4
+65/336475/campos_512_v4
+65/336486/campos_512_v4
+65/336611/campos_512_v4
+65/336660/campos_512_v4
+65/336686/campos_512_v4
+65/336876/campos_512_v4
+65/336894/campos_512_v4
+65/337026/campos_512_v4
+65/337092/campos_512_v4
+65/337142/campos_512_v4
+65/337143/campos_512_v4
+65/337173/campos_512_v4
+65/337191/campos_512_v4
+65/337265/campos_512_v4
+65/337269/campos_512_v4
+65/337493/campos_512_v4
+65/337562/campos_512_v4
+65/337612/campos_512_v4
+65/337623/campos_512_v4
+65/337876/campos_512_v4
+65/337959/campos_512_v4
+65/337990/campos_512_v4
+65/338006/campos_512_v4
+65/338020/campos_512_v4
+65/338212/campos_512_v4
+65/338234/campos_512_v4
+65/338307/campos_512_v4
+65/338390/campos_512_v4
+65/338392/campos_512_v4
+65/338525/campos_512_v4
+65/338596/campos_512_v4
+65/338702/campos_512_v4
+65/338735/campos_512_v4
+65/338742/campos_512_v4
+65/338777/campos_512_v4
+65/338815/campos_512_v4
+65/338896/campos_512_v4
+65/338933/campos_512_v4
+65/338939/campos_512_v4
+65/338947/campos_512_v4
+65/338971/campos_512_v4
+65/339234/campos_512_v4
+65/339245/campos_512_v4
+65/339274/campos_512_v4
+65/339296/campos_512_v4
+65/339306/campos_512_v4
+65/339311/campos_512_v4
+65/339433/campos_512_v4
+65/339478/campos_512_v4
+65/339592/campos_512_v4
+65/339611/campos_512_v4
+65/339706/campos_512_v4
+65/339763/campos_512_v4
+65/339870/campos_512_v4
+65/339930/campos_512_v4
+65/339972/campos_512_v4
+65/339998/campos_512_v4
+66/340110/campos_512_v4
+66/340203/campos_512_v4
+66/340321/campos_512_v4
+66/340388/campos_512_v4
+66/340462/campos_512_v4
+66/340481/campos_512_v4
+66/340498/campos_512_v4
+66/340526/campos_512_v4
+66/340610/campos_512_v4
+66/340621/campos_512_v4
+66/340648/campos_512_v4
+66/340788/campos_512_v4
+66/340834/campos_512_v4
+66/340869/campos_512_v4
+66/340991/campos_512_v4
+66/341067/campos_512_v4
+66/341095/campos_512_v4
+66/341099/campos_512_v4
+66/341136/campos_512_v4
+66/341224/campos_512_v4
+66/341406/campos_512_v4
+66/341410/campos_512_v4
+66/341411/campos_512_v4
+66/341490/campos_512_v4
+66/341516/campos_512_v4
+66/341528/campos_512_v4
+66/341919/campos_512_v4
+66/341965/campos_512_v4
+66/341988/campos_512_v4
+66/342122/campos_512_v4
+66/342148/campos_512_v4
+66/342173/campos_512_v4
+66/342337/campos_512_v4
+66/342395/campos_512_v4
+66/342506/campos_512_v4
+66/342523/campos_512_v4
+66/342548/campos_512_v4
+66/342552/campos_512_v4
+66/342596/campos_512_v4
+66/342623/campos_512_v4
+66/342711/campos_512_v4
+66/342736/campos_512_v4
+66/342761/campos_512_v4
+66/342841/campos_512_v4
+66/342882/campos_512_v4
+66/342956/campos_512_v4
+66/342972/campos_512_v4
+66/343012/campos_512_v4
+66/343026/campos_512_v4
+66/343076/campos_512_v4
+66/343083/campos_512_v4
+66/343126/campos_512_v4
+66/343232/campos_512_v4
+66/343277/campos_512_v4
+66/343312/campos_512_v4
+66/343314/campos_512_v4
+66/343323/campos_512_v4
+66/343447/campos_512_v4
+66/343456/campos_512_v4
+66/343568/campos_512_v4
+66/343705/campos_512_v4
+66/343933/campos_512_v4
+66/343959/campos_512_v4
+66/344012/campos_512_v4
+66/344021/campos_512_v4
+66/344099/campos_512_v4
+66/344145/campos_512_v4
+66/344270/campos_512_v4
+66/344283/campos_512_v4
+66/344372/campos_512_v4
+66/344375/campos_512_v4
+66/344523/campos_512_v4
+66/344556/campos_512_v4
+66/344587/campos_512_v4
+66/344616/campos_512_v4
+66/344645/campos_512_v4
+66/344760/campos_512_v4
+66/344774/campos_512_v4
+66/344779/campos_512_v4
+66/344784/campos_512_v4
+66/344869/campos_512_v4
+66/344925/campos_512_v4
+66/344938/campos_512_v4
+66/344977/campos_512_v4
+67/345117/campos_512_v4
+67/345160/campos_512_v4
+67/345185/campos_512_v4
+67/345195/campos_512_v4
+67/345220/campos_512_v4
+67/345323/campos_512_v4
+67/345387/campos_512_v4
+67/345394/campos_512_v4
+67/345465/campos_512_v4
+67/345504/campos_512_v4
+67/345565/campos_512_v4
+67/345585/campos_512_v4
+67/345612/campos_512_v4
+67/345653/campos_512_v4
+67/345655/campos_512_v4
+67/345692/campos_512_v4
+67/345698/campos_512_v4
+67/345762/campos_512_v4
+67/345825/campos_512_v4
+67/345869/campos_512_v4
+67/345904/campos_512_v4
+67/345996/campos_512_v4
+67/346000/campos_512_v4
+67/346002/campos_512_v4
+67/346010/campos_512_v4
+67/346068/campos_512_v4
+67/346093/campos_512_v4
+67/346201/campos_512_v4
+67/346284/campos_512_v4
+67/346345/campos_512_v4
+67/346360/campos_512_v4
+67/346362/campos_512_v4
+67/346398/campos_512_v4
+67/346523/campos_512_v4
+67/346580/campos_512_v4
+67/346607/campos_512_v4
+67/346609/campos_512_v4
+67/346634/campos_512_v4
+67/346643/campos_512_v4
+67/346687/campos_512_v4
+67/346887/campos_512_v4
+67/346894/campos_512_v4
+67/346929/campos_512_v4
+67/346957/campos_512_v4
+67/347000/campos_512_v4
+67/347198/campos_512_v4
+67/347228/campos_512_v4
+67/347277/campos_512_v4
+67/347306/campos_512_v4
+67/347372/campos_512_v4
+67/347422/campos_512_v4
+67/347485/campos_512_v4
+67/347547/campos_512_v4
+67/347583/campos_512_v4
+67/347643/campos_512_v4
+67/347653/campos_512_v4
+67/347665/campos_512_v4
+67/347690/campos_512_v4
+67/347774/campos_512_v4
+67/347787/campos_512_v4
+67/347788/campos_512_v4
+67/347789/campos_512_v4
+67/347927/campos_512_v4
+67/347968/campos_512_v4
+67/348091/campos_512_v4
+67/348117/campos_512_v4
+67/348131/campos_512_v4
+67/348162/campos_512_v4
+67/348319/campos_512_v4
+67/348334/campos_512_v4
+67/348397/campos_512_v4
+67/348398/campos_512_v4
+67/348413/campos_512_v4
+67/348457/campos_512_v4
+67/348498/campos_512_v4
+67/348504/campos_512_v4
+67/348625/campos_512_v4
+67/348648/campos_512_v4
+67/348776/campos_512_v4
+67/348779/campos_512_v4
+67/348816/campos_512_v4
+67/348926/campos_512_v4
+67/348952/campos_512_v4
+67/348977/campos_512_v4
+67/349003/campos_512_v4
+67/349052/campos_512_v4
+67/349129/campos_512_v4
+67/349199/campos_512_v4
+67/349200/campos_512_v4
+67/349207/campos_512_v4
+67/349292/campos_512_v4
+67/349321/campos_512_v4
+67/349361/campos_512_v4
+67/349368/campos_512_v4
+67/349413/campos_512_v4
+67/349442/campos_512_v4
+67/349509/campos_512_v4
+67/349560/campos_512_v4
+67/349568/campos_512_v4
+67/349599/campos_512_v4
+67/349628/campos_512_v4
+67/349716/campos_512_v4
+67/349776/campos_512_v4
+67/349803/campos_512_v4
+67/349859/campos_512_v4
+67/349873/campos_512_v4
+67/349907/campos_512_v4
+67/350001/campos_512_v4
+68/350039/campos_512_v4
+68/350051/campos_512_v4
+68/350059/campos_512_v4
+68/350084/campos_512_v4
+68/350101/campos_512_v4
+68/350110/campos_512_v4
+68/350164/campos_512_v4
+68/350166/campos_512_v4
+68/350232/campos_512_v4
+68/350374/campos_512_v4
+68/350392/campos_512_v4
+68/350398/campos_512_v4
+68/350472/campos_512_v4
+68/350557/campos_512_v4
+68/350565/campos_512_v4
+68/350567/campos_512_v4
+68/350594/campos_512_v4
+68/350622/campos_512_v4
+68/350636/campos_512_v4
+68/350758/campos_512_v4
+68/350810/campos_512_v4
+68/350825/campos_512_v4
+68/350829/campos_512_v4
+68/350926/campos_512_v4
+68/350954/campos_512_v4
+68/350968/campos_512_v4
+68/351001/campos_512_v4
+68/351045/campos_512_v4
+68/351060/campos_512_v4
+68/351106/campos_512_v4
+68/351110/campos_512_v4
+68/351126/campos_512_v4
+68/351141/campos_512_v4
+68/351176/campos_512_v4
+68/351181/campos_512_v4
+68/351200/campos_512_v4
+68/351252/campos_512_v4
+68/351386/campos_512_v4
+68/351391/campos_512_v4
+68/351426/campos_512_v4
+68/351456/campos_512_v4
+68/351560/campos_512_v4
+68/351675/campos_512_v4
+68/351694/campos_512_v4
+68/351714/campos_512_v4
+68/351717/campos_512_v4
+68/351778/campos_512_v4
+68/351782/campos_512_v4
+68/351789/campos_512_v4
+68/351906/campos_512_v4
+68/351921/campos_512_v4
+68/351970/campos_512_v4
+68/352105/campos_512_v4
+68/352148/campos_512_v4
+68/352174/campos_512_v4
+68/352179/campos_512_v4
+68/352281/campos_512_v4
+68/352326/campos_512_v4
+68/352400/campos_512_v4
+68/352447/campos_512_v4
+68/352623/campos_512_v4
+68/352645/campos_512_v4
+68/352662/campos_512_v4
+68/352693/campos_512_v4
+68/352833/campos_512_v4
+68/352847/campos_512_v4
+68/352865/campos_512_v4
+68/352881/campos_512_v4
+68/352925/campos_512_v4
+68/352945/campos_512_v4
+68/353000/campos_512_v4
+68/353060/campos_512_v4
+68/353072/campos_512_v4
+68/353094/campos_512_v4
+68/353141/campos_512_v4
+68/353150/campos_512_v4
+68/353202/campos_512_v4
+68/353206/campos_512_v4
+68/353231/campos_512_v4
+68/353345/campos_512_v4
+68/353361/campos_512_v4
+68/353416/campos_512_v4
+68/353452/campos_512_v4
+68/353459/campos_512_v4
+68/353505/campos_512_v4
+68/353512/campos_512_v4
+68/353613/campos_512_v4
+68/353618/campos_512_v4
+68/353630/campos_512_v4
+68/353660/campos_512_v4
+68/353682/campos_512_v4
+68/353688/campos_512_v4
+68/353712/campos_512_v4
+68/353723/campos_512_v4
+68/353727/campos_512_v4
+68/353848/campos_512_v4
+68/353880/campos_512_v4
+68/353922/campos_512_v4
+68/353932/campos_512_v4
+68/353939/campos_512_v4
+68/353949/campos_512_v4
+68/353958/campos_512_v4
+68/353976/campos_512_v4
+68/354067/campos_512_v4
+68/354087/campos_512_v4
+68/354112/campos_512_v4
+68/354145/campos_512_v4
+68/354146/campos_512_v4
+68/354241/campos_512_v4
+68/354261/campos_512_v4
+68/354357/campos_512_v4
+68/354384/campos_512_v4
+68/354436/campos_512_v4
+68/354466/campos_512_v4
+68/354492/campos_512_v4
+68/354505/campos_512_v4
+68/354510/campos_512_v4
+68/354538/campos_512_v4
+68/354556/campos_512_v4
+68/354591/campos_512_v4
+68/354626/campos_512_v4
+68/354633/campos_512_v4
+68/354637/campos_512_v4
+68/354776/campos_512_v4
+68/354798/campos_512_v4
+68/354807/campos_512_v4
+68/354878/campos_512_v4
+68/354921/campos_512_v4
+68/354947/campos_512_v4
+69/355015/campos_512_v4
+69/355052/campos_512_v4
+69/355056/campos_512_v4
+69/355132/campos_512_v4
+69/355185/campos_512_v4
+69/355224/campos_512_v4
+69/355272/campos_512_v4
+69/355325/campos_512_v4
+69/355401/campos_512_v4
+69/355455/campos_512_v4
+69/355569/campos_512_v4
+69/355610/campos_512_v4
+69/355749/campos_512_v4
+69/355754/campos_512_v4
+69/355768/campos_512_v4
+69/355844/campos_512_v4
+69/355858/campos_512_v4
+69/355964/campos_512_v4
+69/355990/campos_512_v4
+69/356051/campos_512_v4
+69/356126/campos_512_v4
+69/356149/campos_512_v4
+69/356284/campos_512_v4
+69/356285/campos_512_v4
+69/356292/campos_512_v4
+69/356305/campos_512_v4
+69/356309/campos_512_v4
+69/356365/campos_512_v4
+69/356379/campos_512_v4
+69/356393/campos_512_v4
+69/356467/campos_512_v4
+69/356633/campos_512_v4
+69/356723/campos_512_v4
+69/356845/campos_512_v4
+69/356854/campos_512_v4
+69/356992/campos_512_v4
+69/357004/campos_512_v4
+69/357035/campos_512_v4
+69/357064/campos_512_v4
+69/357154/campos_512_v4
+69/357228/campos_512_v4
+69/357537/campos_512_v4
+69/357566/campos_512_v4
+69/357610/campos_512_v4
+69/357669/campos_512_v4
+69/357761/campos_512_v4
+69/357763/campos_512_v4
+69/357822/campos_512_v4
+69/357981/campos_512_v4
+69/358027/campos_512_v4
+69/358059/campos_512_v4
+69/358088/campos_512_v4
+69/358161/campos_512_v4
+69/358169/campos_512_v4
+69/358258/campos_512_v4
+69/358288/campos_512_v4
+69/358292/campos_512_v4
+69/358377/campos_512_v4
+69/358402/campos_512_v4
+69/358513/campos_512_v4
+69/358521/campos_512_v4
+69/358527/campos_512_v4
+69/358539/campos_512_v4
+69/358690/campos_512_v4
+69/358693/campos_512_v4
+69/358908/campos_512_v4
+69/359019/campos_512_v4
+69/359041/campos_512_v4
+69/359064/campos_512_v4
+69/359069/campos_512_v4
+69/359199/campos_512_v4
+69/359200/campos_512_v4
+69/359203/campos_512_v4
+69/359362/campos_512_v4
+69/359403/campos_512_v4
+69/359445/campos_512_v4
+69/359510/campos_512_v4
+69/359599/campos_512_v4
+69/359617/campos_512_v4
+69/359721/campos_512_v4
+69/359798/campos_512_v4
+69/359933/campos_512_v4
+7/45082/campos_512_v4
+7/45175/campos_512_v4
+7/45237/campos_512_v4
+7/45260/campos_512_v4
+7/45280/campos_512_v4
+7/45287/campos_512_v4
+7/45530/campos_512_v4
+7/45532/campos_512_v4
+7/45622/campos_512_v4
+7/45682/campos_512_v4
+7/46000/campos_512_v4
+7/46031/campos_512_v4
+7/46172/campos_512_v4
+7/46369/campos_512_v4
+7/46399/campos_512_v4
+7/46428/campos_512_v4
+7/46443/campos_512_v4
+7/46473/campos_512_v4
+7/46504/campos_512_v4
+7/46551/campos_512_v4
+7/46576/campos_512_v4
+7/46624/campos_512_v4
+7/46967/campos_512_v4
+7/46996/campos_512_v4
+7/47030/campos_512_v4
+7/47250/campos_512_v4
+7/47324/campos_512_v4
+7/47326/campos_512_v4
+7/47339/campos_512_v4
+7/47472/campos_512_v4
+7/47501/campos_512_v4
+7/47527/campos_512_v4
+7/47556/campos_512_v4
+7/47562/campos_512_v4
+7/47597/campos_512_v4
+7/47670/campos_512_v4
+7/47745/campos_512_v4
+7/47849/campos_512_v4
+7/47941/campos_512_v4
+7/48016/campos_512_v4
+7/48073/campos_512_v4
+7/48082/campos_512_v4
+7/48128/campos_512_v4
+7/48177/campos_512_v4
+7/48215/campos_512_v4
+7/48229/campos_512_v4
+7/48272/campos_512_v4
+7/48403/campos_512_v4
+7/48457/campos_512_v4
+7/48482/campos_512_v4
+7/48566/campos_512_v4
+7/48573/campos_512_v4
+7/48612/campos_512_v4
+7/48665/campos_512_v4
+7/48735/campos_512_v4
+7/48971/campos_512_v4
+7/48976/campos_512_v4
+7/49033/campos_512_v4
+7/49141/campos_512_v4
+7/49225/campos_512_v4
+7/49473/campos_512_v4
+7/49528/campos_512_v4
+7/49596/campos_512_v4
+7/49606/campos_512_v4
+7/49630/campos_512_v4
+7/49662/campos_512_v4
+7/49746/campos_512_v4
+7/49977/campos_512_v4
+7/49989/campos_512_v4
+70/360028/campos_512_v4
+70/360057/campos_512_v4
+70/360090/campos_512_v4
+70/360110/campos_512_v4
+70/360119/campos_512_v4
+70/360255/campos_512_v4
+70/360272/campos_512_v4
+70/360348/campos_512_v4
+70/360362/campos_512_v4
+70/360442/campos_512_v4
+70/360455/campos_512_v4
+70/360567/campos_512_v4
+70/360685/campos_512_v4
+70/360704/campos_512_v4
+70/360748/campos_512_v4
+70/360787/campos_512_v4
+70/360821/campos_512_v4
+70/360915/campos_512_v4
+70/360940/campos_512_v4
+70/360970/campos_512_v4
+70/360995/campos_512_v4
+70/361022/campos_512_v4
+70/361110/campos_512_v4
+70/361124/campos_512_v4
+70/361165/campos_512_v4
+70/361232/campos_512_v4
+70/361330/campos_512_v4
+70/361424/campos_512_v4
+70/361461/campos_512_v4
+70/361595/campos_512_v4
+70/361732/campos_512_v4
+70/361794/campos_512_v4
+70/361809/campos_512_v4
+70/361869/campos_512_v4
+70/361899/campos_512_v4
+70/361962/campos_512_v4
+70/362010/campos_512_v4
+70/362057/campos_512_v4
+70/362084/campos_512_v4
+70/362164/campos_512_v4
+70/362250/campos_512_v4
+70/362325/campos_512_v4
+70/362332/campos_512_v4
+70/362349/campos_512_v4
+70/362355/campos_512_v4
+70/362372/campos_512_v4
+70/362400/campos_512_v4
+70/362485/campos_512_v4
+70/362486/campos_512_v4
+70/362516/campos_512_v4
+70/362521/campos_512_v4
+70/362558/campos_512_v4
+70/362579/campos_512_v4
+70/362596/campos_512_v4
+70/362597/campos_512_v4
+70/362670/campos_512_v4
+70/362729/campos_512_v4
+70/362819/campos_512_v4
+70/363002/campos_512_v4
+70/363233/campos_512_v4
+70/363383/campos_512_v4
+70/363409/campos_512_v4
+70/363521/campos_512_v4
+70/363616/campos_512_v4
+70/363671/campos_512_v4
+70/363692/campos_512_v4
+70/363801/campos_512_v4
+70/363859/campos_512_v4
+70/363876/campos_512_v4
+70/363899/campos_512_v4
+70/363939/campos_512_v4
+70/363987/campos_512_v4
+70/364034/campos_512_v4
+70/364092/campos_512_v4
+70/364160/campos_512_v4
+70/364197/campos_512_v4
+70/364242/campos_512_v4
+70/364373/campos_512_v4
+70/364412/campos_512_v4
+70/364434/campos_512_v4
+70/364485/campos_512_v4
+70/364521/campos_512_v4
+70/364653/campos_512_v4
+70/364808/campos_512_v4
+70/364816/campos_512_v4
+70/364877/campos_512_v4
+70/364892/campos_512_v4
+70/364896/campos_512_v4
+70/364924/campos_512_v4
+70/364987/campos_512_v4
+71/365029/campos_512_v4
+71/365148/campos_512_v4
+71/365181/campos_512_v4
+71/365311/campos_512_v4
+71/365338/campos_512_v4
+71/365389/campos_512_v4
+71/365441/campos_512_v4
+71/365456/campos_512_v4
+71/365510/campos_512_v4
+71/365515/campos_512_v4
+71/365567/campos_512_v4
+71/365667/campos_512_v4
+71/365741/campos_512_v4
+71/365844/campos_512_v4
+71/365847/campos_512_v4
+71/365856/campos_512_v4
+71/365901/campos_512_v4
+71/365914/campos_512_v4
+71/365959/campos_512_v4
+71/366011/campos_512_v4
+71/366079/campos_512_v4
+71/366149/campos_512_v4
+71/366176/campos_512_v4
+71/366181/campos_512_v4
+71/366223/campos_512_v4
+71/366285/campos_512_v4
+71/366345/campos_512_v4
+71/366490/campos_512_v4
+71/366499/campos_512_v4
+71/366538/campos_512_v4
+71/366639/campos_512_v4
+71/366667/campos_512_v4
+71/366740/campos_512_v4
+71/366927/campos_512_v4
+71/366933/campos_512_v4
+71/366940/campos_512_v4
+71/367039/campos_512_v4
+71/367064/campos_512_v4
+71/367101/campos_512_v4
+71/367120/campos_512_v4
+71/367141/campos_512_v4
+71/367151/campos_512_v4
+71/367174/campos_512_v4
+71/367219/campos_512_v4
+71/367321/campos_512_v4
+71/367339/campos_512_v4
+71/367345/campos_512_v4
+71/367469/campos_512_v4
+71/367500/campos_512_v4
+71/367553/campos_512_v4
+71/367589/campos_512_v4
+71/367639/campos_512_v4
+71/367650/campos_512_v4
+71/367678/campos_512_v4
+71/367718/campos_512_v4
+71/367796/campos_512_v4
+71/367856/campos_512_v4
+71/367877/campos_512_v4
+71/367917/campos_512_v4
+71/367927/campos_512_v4
+71/368016/campos_512_v4
+71/368139/campos_512_v4
+71/368243/campos_512_v4
+71/368246/campos_512_v4
+71/368274/campos_512_v4
+71/368295/campos_512_v4
+71/368368/campos_512_v4
+71/368418/campos_512_v4
+71/368442/campos_512_v4
+71/368475/campos_512_v4
+71/368555/campos_512_v4
+71/368563/campos_512_v4
+71/368613/campos_512_v4
+71/368634/campos_512_v4
+71/368672/campos_512_v4
+71/368768/campos_512_v4
+71/368872/campos_512_v4
+71/368939/campos_512_v4
+71/368967/campos_512_v4
+71/369063/campos_512_v4
+71/369112/campos_512_v4
+71/369190/campos_512_v4
+71/369193/campos_512_v4
+71/369254/campos_512_v4
+71/369282/campos_512_v4
+71/369294/campos_512_v4
+71/369346/campos_512_v4
+71/369350/campos_512_v4
+71/369367/campos_512_v4
+71/369424/campos_512_v4
+71/369439/campos_512_v4
+71/369499/campos_512_v4
+71/369539/campos_512_v4
+71/369849/campos_512_v4
+72/370257/campos_512_v4
+72/370311/campos_512_v4
+72/370314/campos_512_v4
+72/370339/campos_512_v4
+72/370357/campos_512_v4
+72/370436/campos_512_v4
+72/370459/campos_512_v4
+72/370501/campos_512_v4
+72/370554/campos_512_v4
+72/370580/campos_512_v4
+72/370589/campos_512_v4
+72/370662/campos_512_v4
+72/370709/campos_512_v4
+72/370882/campos_512_v4
+72/370913/campos_512_v4
+72/370924/campos_512_v4
+72/370926/campos_512_v4
+72/371022/campos_512_v4
+72/371023/campos_512_v4
+72/371033/campos_512_v4
+72/371060/campos_512_v4
+72/371098/campos_512_v4
+72/371257/campos_512_v4
+72/371392/campos_512_v4
+72/371489/campos_512_v4
+72/371509/campos_512_v4
+72/371517/campos_512_v4
+72/371572/campos_512_v4
+72/371573/campos_512_v4
+72/371585/campos_512_v4
+72/371679/campos_512_v4
+72/371690/campos_512_v4
+72/371691/campos_512_v4
+72/371698/campos_512_v4
+72/371791/campos_512_v4
+72/372032/campos_512_v4
+72/372076/campos_512_v4
+72/372167/campos_512_v4
+72/372183/campos_512_v4
+72/372194/campos_512_v4
+72/372197/campos_512_v4
+72/372215/campos_512_v4
+72/372226/campos_512_v4
+72/372229/campos_512_v4
+72/372232/campos_512_v4
+72/372376/campos_512_v4
+72/372426/campos_512_v4
+72/372548/campos_512_v4
+72/372557/campos_512_v4
+72/372595/campos_512_v4
+72/372642/campos_512_v4
+72/372681/campos_512_v4
+72/372755/campos_512_v4
+72/372790/campos_512_v4
+72/373236/campos_512_v4
+72/373256/campos_512_v4
+72/373278/campos_512_v4
+72/373487/campos_512_v4
+72/373522/campos_512_v4
+72/373624/campos_512_v4
+72/373675/campos_512_v4
+72/373829/campos_512_v4
+72/373849/campos_512_v4
+72/373989/campos_512_v4
+72/374013/campos_512_v4
+72/374126/campos_512_v4
+72/374147/campos_512_v4
+72/374163/campos_512_v4
+72/374226/campos_512_v4
+72/374304/campos_512_v4
+72/374320/campos_512_v4
+72/374337/campos_512_v4
+72/374351/campos_512_v4
+72/374387/campos_512_v4
+72/374404/campos_512_v4
+72/374405/campos_512_v4
+72/374543/campos_512_v4
+72/374774/campos_512_v4
+72/374840/campos_512_v4
+72/374850/campos_512_v4
+72/374915/campos_512_v4
+73/375016/campos_512_v4
+73/375022/campos_512_v4
+73/375028/campos_512_v4
+73/375031/campos_512_v4
+73/375083/campos_512_v4
+73/375138/campos_512_v4
+73/375150/campos_512_v4
+73/375372/campos_512_v4
+73/375397/campos_512_v4
+73/375657/campos_512_v4
+73/375672/campos_512_v4
+73/375682/campos_512_v4
+73/375707/campos_512_v4
+73/375710/campos_512_v4
+73/375777/campos_512_v4
+73/375789/campos_512_v4
+73/375835/campos_512_v4
+73/375997/campos_512_v4
+73/376083/campos_512_v4
+73/376211/campos_512_v4
+73/376242/campos_512_v4
+73/376265/campos_512_v4
+73/376285/campos_512_v4
+73/376476/campos_512_v4
+73/376519/campos_512_v4
+73/376567/campos_512_v4
+73/376647/campos_512_v4
+73/376701/campos_512_v4
+73/376709/campos_512_v4
+73/376713/campos_512_v4
+73/377012/campos_512_v4
+73/377053/campos_512_v4
+73/377083/campos_512_v4
+73/377145/campos_512_v4
+73/377174/campos_512_v4
+73/377273/campos_512_v4
+73/377493/campos_512_v4
+73/377577/campos_512_v4
+73/377611/campos_512_v4
+73/377612/campos_512_v4
+73/377627/campos_512_v4
+73/377675/campos_512_v4
+73/377721/campos_512_v4
+73/377874/campos_512_v4
+73/377916/campos_512_v4
+73/377987/campos_512_v4
+73/378019/campos_512_v4
+73/378095/campos_512_v4
+73/378116/campos_512_v4
+73/378117/campos_512_v4
+73/378134/campos_512_v4
+73/378207/campos_512_v4
+73/378353/campos_512_v4
+73/378388/campos_512_v4
+73/378432/campos_512_v4
+73/378467/campos_512_v4
+73/378600/campos_512_v4
+73/378611/campos_512_v4
+73/378669/campos_512_v4
+73/378715/campos_512_v4
+73/378726/campos_512_v4
+73/378816/campos_512_v4
+73/378819/campos_512_v4
+73/378848/campos_512_v4
+73/378908/campos_512_v4
+73/378993/campos_512_v4
+73/379013/campos_512_v4
+73/379027/campos_512_v4
+73/379124/campos_512_v4
+73/379163/campos_512_v4
+73/379169/campos_512_v4
+73/379253/campos_512_v4
+73/379526/campos_512_v4
+73/379598/campos_512_v4
+73/379611/campos_512_v4
+73/379633/campos_512_v4
+73/379664/campos_512_v4
+73/379667/campos_512_v4
+73/379685/campos_512_v4
+73/379703/campos_512_v4
+73/379728/campos_512_v4
+73/379843/campos_512_v4
+73/379885/campos_512_v4
+73/379904/campos_512_v4
+73/379993/campos_512_v4
+74/380007/campos_512_v4
+74/380049/campos_512_v4
+74/380062/campos_512_v4
+74/380117/campos_512_v4
+74/380130/campos_512_v4
+74/380207/campos_512_v4
+74/380241/campos_512_v4
+74/380359/campos_512_v4
+74/380368/campos_512_v4
+74/380369/campos_512_v4
+74/380402/campos_512_v4
+74/380409/campos_512_v4
+74/380445/campos_512_v4
+74/380459/campos_512_v4
+74/380523/campos_512_v4
+74/380537/campos_512_v4
+74/380661/campos_512_v4
+74/380668/campos_512_v4
+74/380672/campos_512_v4
+74/380700/campos_512_v4
+74/380752/campos_512_v4
+74/380846/campos_512_v4
+74/380858/campos_512_v4
+74/381017/campos_512_v4
+74/381044/campos_512_v4
+74/381125/campos_512_v4
+74/381128/campos_512_v4
+74/381194/campos_512_v4
+74/381242/campos_512_v4
+74/381262/campos_512_v4
+74/381650/campos_512_v4
+74/381653/campos_512_v4
+74/381730/campos_512_v4
+74/381775/campos_512_v4
+74/381913/campos_512_v4
+74/381925/campos_512_v4
+74/381962/campos_512_v4
+74/382017/campos_512_v4
+74/382037/campos_512_v4
+74/382061/campos_512_v4
+74/382068/campos_512_v4
+74/382071/campos_512_v4
+74/382076/campos_512_v4
+74/382270/campos_512_v4
+74/382421/campos_512_v4
+74/382442/campos_512_v4
+74/382523/campos_512_v4
+74/382594/campos_512_v4
+74/382671/campos_512_v4
+74/382745/campos_512_v4
+74/382855/campos_512_v4
+74/382947/campos_512_v4
+74/382998/campos_512_v4
+74/382999/campos_512_v4
+74/383032/campos_512_v4
+74/383070/campos_512_v4
+74/383127/campos_512_v4
+74/383149/campos_512_v4
+74/383172/campos_512_v4
+74/383204/campos_512_v4
+74/383246/campos_512_v4
+74/383257/campos_512_v4
+74/383337/campos_512_v4
+74/383412/campos_512_v4
+74/383429/campos_512_v4
+74/383613/campos_512_v4
+74/383741/campos_512_v4
+74/383742/campos_512_v4
+74/383799/campos_512_v4
+74/383807/campos_512_v4
+74/383824/campos_512_v4
+74/383828/campos_512_v4
+74/383930/campos_512_v4
+74/384006/campos_512_v4
+74/384040/campos_512_v4
+74/384108/campos_512_v4
+74/384125/campos_512_v4
+74/384128/campos_512_v4
+74/384137/campos_512_v4
+74/384147/campos_512_v4
+74/384231/campos_512_v4
+74/384281/campos_512_v4
+74/384334/campos_512_v4
+74/384341/campos_512_v4
+74/384417/campos_512_v4
+74/384482/campos_512_v4
+74/384593/campos_512_v4
+74/384630/campos_512_v4
+74/384648/campos_512_v4
+74/384654/campos_512_v4
+74/384740/campos_512_v4
+74/384749/campos_512_v4
+74/384783/campos_512_v4
+74/384802/campos_512_v4
+74/384859/campos_512_v4
+74/384861/campos_512_v4
+74/384866/campos_512_v4
+74/384958/campos_512_v4
+75/385056/campos_512_v4
+75/385095/campos_512_v4
+75/385105/campos_512_v4
+75/385128/campos_512_v4
+75/385234/campos_512_v4
+75/385319/campos_512_v4
+75/385342/campos_512_v4
+75/385456/campos_512_v4
+75/385503/campos_512_v4
+75/385505/campos_512_v4
+75/385506/campos_512_v4
+75/385535/campos_512_v4
+75/385688/campos_512_v4
+75/385795/campos_512_v4
+75/385921/campos_512_v4
+75/385972/campos_512_v4
+75/386014/campos_512_v4
+75/386036/campos_512_v4
+75/386039/campos_512_v4
+75/386062/campos_512_v4
+75/386076/campos_512_v4
+75/386192/campos_512_v4
+75/386202/campos_512_v4
+75/386218/campos_512_v4
+75/386230/campos_512_v4
+75/386285/campos_512_v4
+75/386389/campos_512_v4
+75/386436/campos_512_v4
+75/386572/campos_512_v4
+75/386574/campos_512_v4
+75/386628/campos_512_v4
+75/386635/campos_512_v4
+75/386652/campos_512_v4
+75/386675/campos_512_v4
+75/386706/campos_512_v4
+75/386739/campos_512_v4
+75/386752/campos_512_v4
+75/386789/campos_512_v4
+75/386828/campos_512_v4
+75/387007/campos_512_v4
+75/387050/campos_512_v4
+75/387057/campos_512_v4
+75/387121/campos_512_v4
+75/387303/campos_512_v4
+75/387344/campos_512_v4
+75/387351/campos_512_v4
+75/387477/campos_512_v4
+75/387488/campos_512_v4
+75/387513/campos_512_v4
+75/387689/campos_512_v4
+75/387876/campos_512_v4
+75/387892/campos_512_v4
+75/387922/campos_512_v4
+75/387939/campos_512_v4
+75/387944/campos_512_v4
+75/388038/campos_512_v4
+75/388194/campos_512_v4
+75/388242/campos_512_v4
+75/388247/campos_512_v4
+75/388462/campos_512_v4
+75/388491/campos_512_v4
+75/388518/campos_512_v4
+75/388583/campos_512_v4
+75/388802/campos_512_v4
+75/388944/campos_512_v4
+75/389016/campos_512_v4
+75/389034/campos_512_v4
+75/389226/campos_512_v4
+75/389247/campos_512_v4
+75/389295/campos_512_v4
+75/389419/campos_512_v4
+75/389565/campos_512_v4
+75/389622/campos_512_v4
+75/389689/campos_512_v4
+75/389797/campos_512_v4
+75/389860/campos_512_v4
+75/389892/campos_512_v4
+75/389946/campos_512_v4
+75/389954/campos_512_v4
+76/390116/campos_512_v4
+76/390128/campos_512_v4
+76/390129/campos_512_v4
+76/390216/campos_512_v4
+76/390257/campos_512_v4
+76/390284/campos_512_v4
+76/390291/campos_512_v4
+76/390441/campos_512_v4
+76/390631/campos_512_v4
+76/390724/campos_512_v4
+76/390763/campos_512_v4
+76/390767/campos_512_v4
+76/390823/campos_512_v4
+76/390864/campos_512_v4
+76/390886/campos_512_v4
+76/390962/campos_512_v4
+76/391188/campos_512_v4
+76/391215/campos_512_v4
+76/391273/campos_512_v4
+76/391394/campos_512_v4
+76/391426/campos_512_v4
+76/391465/campos_512_v4
+76/391467/campos_512_v4
+76/391476/campos_512_v4
+76/391485/campos_512_v4
+76/391497/campos_512_v4
+76/391569/campos_512_v4
+76/391641/campos_512_v4
+76/391665/campos_512_v4
+76/391682/campos_512_v4
+76/391820/campos_512_v4
+76/391893/campos_512_v4
+76/391927/campos_512_v4
+76/392010/campos_512_v4
+76/392011/campos_512_v4
+76/392074/campos_512_v4
+76/392111/campos_512_v4
+76/392114/campos_512_v4
+76/392162/campos_512_v4
+76/392220/campos_512_v4
+76/392231/campos_512_v4
+76/392233/campos_512_v4
+76/392245/campos_512_v4
+76/392251/campos_512_v4
+76/392265/campos_512_v4
+76/392355/campos_512_v4
+76/392447/campos_512_v4
+76/392515/campos_512_v4
+76/392556/campos_512_v4
+76/392569/campos_512_v4
+76/392574/campos_512_v4
+76/392577/campos_512_v4
+76/392655/campos_512_v4
+76/392668/campos_512_v4
+76/392676/campos_512_v4
+76/392688/campos_512_v4
+76/392826/campos_512_v4
+76/392952/campos_512_v4
+76/393094/campos_512_v4
+76/393168/campos_512_v4
+76/393234/campos_512_v4
+76/393342/campos_512_v4
+76/393388/campos_512_v4
+76/393413/campos_512_v4
+76/393424/campos_512_v4
+76/393510/campos_512_v4
+76/393627/campos_512_v4
+76/393704/campos_512_v4
+76/393749/campos_512_v4
+76/393792/campos_512_v4
+76/393822/campos_512_v4
+76/393938/campos_512_v4
+76/394047/campos_512_v4
+76/394077/campos_512_v4
+76/394122/campos_512_v4
+76/394140/campos_512_v4
+76/394170/campos_512_v4
+76/394191/campos_512_v4
+76/394348/campos_512_v4
+76/394353/campos_512_v4
+76/394387/campos_512_v4
+76/394410/campos_512_v4
+76/394529/campos_512_v4
+76/394633/campos_512_v4
+76/394646/campos_512_v4
+76/394723/campos_512_v4
+76/394730/campos_512_v4
+76/394800/campos_512_v4
+76/394834/campos_512_v4
+76/394843/campos_512_v4
+76/394848/campos_512_v4
+76/394895/campos_512_v4
+76/394902/campos_512_v4
+76/394991/campos_512_v4
+76/394995/campos_512_v4
+76/394998/campos_512_v4
+77/395043/campos_512_v4
+77/395064/campos_512_v4
+77/395125/campos_512_v4
+77/395162/campos_512_v4
+77/395191/campos_512_v4
+77/395241/campos_512_v4
+77/395276/campos_512_v4
+77/395426/campos_512_v4
+77/395540/campos_512_v4
+77/395653/campos_512_v4
+77/395679/campos_512_v4
+77/395764/campos_512_v4
+77/395851/campos_512_v4
+77/395855/campos_512_v4
+77/395973/campos_512_v4
+77/396015/campos_512_v4
+77/396112/campos_512_v4
+77/396128/campos_512_v4
+77/396129/campos_512_v4
+77/396207/campos_512_v4
+77/396223/campos_512_v4
+77/396308/campos_512_v4
+77/396343/campos_512_v4
+77/396379/campos_512_v4
+77/396412/campos_512_v4
+77/396493/campos_512_v4
+77/396495/campos_512_v4
+77/396516/campos_512_v4
+77/396584/campos_512_v4
+77/396596/campos_512_v4
+77/396633/campos_512_v4
+77/396692/campos_512_v4
+77/396870/campos_512_v4
+77/396887/campos_512_v4
+77/396893/campos_512_v4
+77/397040/campos_512_v4
+77/397149/campos_512_v4
+77/397323/campos_512_v4
+77/397334/campos_512_v4
+77/397343/campos_512_v4
+77/397375/campos_512_v4
+77/397449/campos_512_v4
+77/397536/campos_512_v4
+77/397554/campos_512_v4
+77/397581/campos_512_v4
+77/397597/campos_512_v4
+77/397623/campos_512_v4
+77/397632/campos_512_v4
+77/397792/campos_512_v4
+77/397808/campos_512_v4
+77/397829/campos_512_v4
+77/398022/campos_512_v4
+77/398028/campos_512_v4
+77/398036/campos_512_v4
+77/398088/campos_512_v4
+77/398101/campos_512_v4
+77/398128/campos_512_v4
+77/398152/campos_512_v4
+77/398197/campos_512_v4
+77/398237/campos_512_v4
+77/398392/campos_512_v4
+77/398476/campos_512_v4
+77/398482/campos_512_v4
+77/398517/campos_512_v4
+77/398722/campos_512_v4
+77/398724/campos_512_v4
+77/398827/campos_512_v4
+77/399042/campos_512_v4
+77/399070/campos_512_v4
+77/399077/campos_512_v4
+77/399155/campos_512_v4
+77/399181/campos_512_v4
+77/399295/campos_512_v4
+77/399325/campos_512_v4
+77/399400/campos_512_v4
+77/399416/campos_512_v4
+77/399439/campos_512_v4
+77/399479/campos_512_v4
+77/399557/campos_512_v4
+77/399573/campos_512_v4
+77/399615/campos_512_v4
+77/399624/campos_512_v4
+77/399634/campos_512_v4
+77/399637/campos_512_v4
+77/399725/campos_512_v4
+77/399727/campos_512_v4
+77/399761/campos_512_v4
+77/399776/campos_512_v4
+77/399789/campos_512_v4
+77/399904/campos_512_v4
+78/400187/campos_512_v4
+78/400219/campos_512_v4
+78/400264/campos_512_v4
+78/400281/campos_512_v4
+78/400298/campos_512_v4
+78/400359/campos_512_v4
+78/400362/campos_512_v4
+78/400364/campos_512_v4
+78/400370/campos_512_v4
+78/400385/campos_512_v4
+78/400411/campos_512_v4
+78/400458/campos_512_v4
+78/400484/campos_512_v4
+78/400487/campos_512_v4
+78/400493/campos_512_v4
+78/400510/campos_512_v4
+78/400625/campos_512_v4
+78/400634/campos_512_v4
+78/400637/campos_512_v4
+78/400638/campos_512_v4
+78/400673/campos_512_v4
+78/400694/campos_512_v4
+78/400801/campos_512_v4
+78/400847/campos_512_v4
+78/400952/campos_512_v4
+78/401138/campos_512_v4
+78/401159/campos_512_v4
+78/401194/campos_512_v4
+78/401219/campos_512_v4
+78/401228/campos_512_v4
+78/401356/campos_512_v4
+78/401576/campos_512_v4
+78/401665/campos_512_v4
+78/401693/campos_512_v4
+78/401698/campos_512_v4
+78/401777/campos_512_v4
+78/401874/campos_512_v4
+78/401896/campos_512_v4
+78/401923/campos_512_v4
+78/402024/campos_512_v4
+78/402031/campos_512_v4
+78/402071/campos_512_v4
+78/402073/campos_512_v4
+78/402139/campos_512_v4
+78/402141/campos_512_v4
+78/402143/campos_512_v4
+78/402156/campos_512_v4
+78/402216/campos_512_v4
+78/402223/campos_512_v4
+78/402225/campos_512_v4
+78/402227/campos_512_v4
+78/402240/campos_512_v4
+78/402282/campos_512_v4
+78/402359/campos_512_v4
+78/402382/campos_512_v4
+78/402400/campos_512_v4
+78/402447/campos_512_v4
+78/402508/campos_512_v4
+78/402545/campos_512_v4
+78/402589/campos_512_v4
+78/402602/campos_512_v4
+78/402616/campos_512_v4
+78/402618/campos_512_v4
+78/402624/campos_512_v4
+78/402683/campos_512_v4
+78/402686/campos_512_v4
+78/402716/campos_512_v4
+78/402749/campos_512_v4
+78/402766/campos_512_v4
+78/402901/campos_512_v4
+78/402963/campos_512_v4
+78/402995/campos_512_v4
+78/403092/campos_512_v4
+78/403142/campos_512_v4
+78/403163/campos_512_v4
+78/403213/campos_512_v4
+78/403225/campos_512_v4
+78/403228/campos_512_v4
+78/403249/campos_512_v4
+78/403265/campos_512_v4
+78/403348/campos_512_v4
+78/403417/campos_512_v4
+78/403418/campos_512_v4
+78/403421/campos_512_v4
+78/403425/campos_512_v4
+78/403450/campos_512_v4
+78/403460/campos_512_v4
+78/403581/campos_512_v4
+78/403602/campos_512_v4
+78/403684/campos_512_v4
+78/403759/campos_512_v4
+78/403890/campos_512_v4
+78/403920/campos_512_v4
+78/403995/campos_512_v4
+78/404029/campos_512_v4
+78/404052/campos_512_v4
+78/404060/campos_512_v4
+78/404086/campos_512_v4
+78/404089/campos_512_v4
+78/404166/campos_512_v4
+78/404169/campos_512_v4
+78/404213/campos_512_v4
+78/404246/campos_512_v4
+78/404249/campos_512_v4
+78/404286/campos_512_v4
+78/404306/campos_512_v4
+78/404406/campos_512_v4
+78/404443/campos_512_v4
+78/404462/campos_512_v4
+78/404466/campos_512_v4
+78/404660/campos_512_v4
+78/404752/campos_512_v4
+78/404777/campos_512_v4
+78/404819/campos_512_v4
+79/405023/campos_512_v4
+79/405102/campos_512_v4
+79/405140/campos_512_v4
+79/405145/campos_512_v4
+79/405227/campos_512_v4
+79/405282/campos_512_v4
+79/405395/campos_512_v4
+79/405428/campos_512_v4
+79/405443/campos_512_v4
+79/405460/campos_512_v4
+79/405505/campos_512_v4
+79/405606/campos_512_v4
+79/405783/campos_512_v4
+79/405784/campos_512_v4
+79/405804/campos_512_v4
+79/405850/campos_512_v4
+79/405867/campos_512_v4
+79/405868/campos_512_v4
+79/405914/campos_512_v4
+79/405947/campos_512_v4
+79/405986/campos_512_v4
+79/406044/campos_512_v4
+79/406086/campos_512_v4
+79/406094/campos_512_v4
+79/406144/campos_512_v4
+79/406164/campos_512_v4
+79/406383/campos_512_v4
+79/406476/campos_512_v4
+79/406569/campos_512_v4
+79/406638/campos_512_v4
+79/406830/campos_512_v4
+79/406849/campos_512_v4
+79/407029/campos_512_v4
+79/407038/campos_512_v4
+79/407089/campos_512_v4
+79/407157/campos_512_v4
+79/407200/campos_512_v4
+79/407324/campos_512_v4
+79/407392/campos_512_v4
+79/407394/campos_512_v4
+79/407412/campos_512_v4
+79/407413/campos_512_v4
+79/407419/campos_512_v4
+79/407490/campos_512_v4
+79/407541/campos_512_v4
+79/407628/campos_512_v4
+79/407748/campos_512_v4
+79/407788/campos_512_v4
+79/407812/campos_512_v4
+79/407816/campos_512_v4
+79/407967/campos_512_v4
+79/407996/campos_512_v4
+79/408019/campos_512_v4
+79/408186/campos_512_v4
+79/408219/campos_512_v4
+79/408280/campos_512_v4
+79/408327/campos_512_v4
+79/408441/campos_512_v4
+79/408472/campos_512_v4
+79/408659/campos_512_v4
+79/408661/campos_512_v4
+79/408662/campos_512_v4
+79/408673/campos_512_v4
+79/408749/campos_512_v4
+79/408824/campos_512_v4
+79/408880/campos_512_v4
+79/408890/campos_512_v4
+79/408971/campos_512_v4
+79/408982/campos_512_v4
+79/409005/campos_512_v4
+79/409080/campos_512_v4
+79/409165/campos_512_v4
+79/409173/campos_512_v4
+79/409235/campos_512_v4
+79/409277/campos_512_v4
+79/409283/campos_512_v4
+79/409342/campos_512_v4
+79/409452/campos_512_v4
+79/409478/campos_512_v4
+79/409525/campos_512_v4
+79/409590/campos_512_v4
+79/409680/campos_512_v4
+79/409762/campos_512_v4
+79/409815/campos_512_v4
+79/409923/campos_512_v4
+79/409925/campos_512_v4
+8/50017/campos_512_v4
+8/50150/campos_512_v4
+8/50175/campos_512_v4
+8/50221/campos_512_v4
+8/50243/campos_512_v4
+8/50414/campos_512_v4
+8/50500/campos_512_v4
+8/50511/campos_512_v4
+8/50805/campos_512_v4
+8/50808/campos_512_v4
+8/50865/campos_512_v4
+8/51013/campos_512_v4
+8/51054/campos_512_v4
+8/51105/campos_512_v4
+8/51139/campos_512_v4
+8/51316/campos_512_v4
+8/51368/campos_512_v4
+8/51659/campos_512_v4
+8/51796/campos_512_v4
+8/51807/campos_512_v4
+8/51868/campos_512_v4
+8/51973/campos_512_v4
+8/51980/campos_512_v4
+8/52086/campos_512_v4
+8/52310/campos_512_v4
+8/52341/campos_512_v4
+8/52406/campos_512_v4
+8/52507/campos_512_v4
+8/52515/campos_512_v4
+8/52554/campos_512_v4
+8/52595/campos_512_v4
+8/52616/campos_512_v4
+8/52660/campos_512_v4
+8/52684/campos_512_v4
+8/52745/campos_512_v4
+8/52787/campos_512_v4
+8/52818/campos_512_v4
+8/52854/campos_512_v4
+8/52867/campos_512_v4
+8/52870/campos_512_v4
+8/52877/campos_512_v4
+8/52880/campos_512_v4
+8/52908/campos_512_v4
+8/52984/campos_512_v4
+8/53023/campos_512_v4
+8/53105/campos_512_v4
+8/53187/campos_512_v4
+8/53197/campos_512_v4
+8/53259/campos_512_v4
+8/53413/campos_512_v4
+8/53432/campos_512_v4
+8/53472/campos_512_v4
+8/53492/campos_512_v4
+8/53515/campos_512_v4
+8/53609/campos_512_v4
+8/53752/campos_512_v4
+8/53762/campos_512_v4
+8/53798/campos_512_v4
+8/53859/campos_512_v4
+8/54136/campos_512_v4
+8/54318/campos_512_v4
+8/54601/campos_512_v4
+8/54699/campos_512_v4
+8/54849/campos_512_v4
+8/54881/campos_512_v4
+80/410192/campos_512_v4
+80/410198/campos_512_v4
+80/410325/campos_512_v4
+80/410364/campos_512_v4
+80/410406/campos_512_v4
+80/410479/campos_512_v4
+80/410487/campos_512_v4
+80/410550/campos_512_v4
+80/410555/campos_512_v4
+80/410556/campos_512_v4
+80/410577/campos_512_v4
+80/410610/campos_512_v4
+80/410844/campos_512_v4
+80/410886/campos_512_v4
+80/410970/campos_512_v4
+80/410971/campos_512_v4
+80/410991/campos_512_v4
+80/410997/campos_512_v4
+80/411013/campos_512_v4
+80/411047/campos_512_v4
+80/411150/campos_512_v4
+80/411257/campos_512_v4
+80/411285/campos_512_v4
+80/411317/campos_512_v4
+80/411428/campos_512_v4
+80/411476/campos_512_v4
+80/411512/campos_512_v4
+80/411513/campos_512_v4
+80/411515/campos_512_v4
+80/411564/campos_512_v4
+80/411599/campos_512_v4
+80/411694/campos_512_v4
+80/411715/campos_512_v4
+80/411764/campos_512_v4
+80/411768/campos_512_v4
+80/411817/campos_512_v4
+80/411852/campos_512_v4
+80/411872/campos_512_v4
+80/411902/campos_512_v4
+80/411905/campos_512_v4
+80/411925/campos_512_v4
+80/412037/campos_512_v4
+80/412041/campos_512_v4
+80/412219/campos_512_v4
+80/412281/campos_512_v4
+80/412282/campos_512_v4
+80/412305/campos_512_v4
+80/412323/campos_512_v4
+80/412370/campos_512_v4
+80/412445/campos_512_v4
+80/412540/campos_512_v4
+80/412588/campos_512_v4
+80/412652/campos_512_v4
+80/412732/campos_512_v4
+80/412850/campos_512_v4
+80/412900/campos_512_v4
+80/412905/campos_512_v4
+80/412916/campos_512_v4
+80/412987/campos_512_v4
+80/413052/campos_512_v4
+80/413190/campos_512_v4
+80/413222/campos_512_v4
+80/413398/campos_512_v4
+80/413443/campos_512_v4
+80/413482/campos_512_v4
+80/413506/campos_512_v4
+80/413596/campos_512_v4
+80/413605/campos_512_v4
+80/413664/campos_512_v4
+80/413670/campos_512_v4
+80/413782/campos_512_v4
+80/413786/campos_512_v4
+80/413909/campos_512_v4
+80/414036/campos_512_v4
+80/414068/campos_512_v4
+80/414108/campos_512_v4
+80/414157/campos_512_v4
+80/414185/campos_512_v4
+80/414206/campos_512_v4
+80/414219/campos_512_v4
+80/414223/campos_512_v4
+80/414262/campos_512_v4
+80/414380/campos_512_v4
+80/414445/campos_512_v4
+80/414448/campos_512_v4
+80/414530/campos_512_v4
+80/414539/campos_512_v4
+80/414569/campos_512_v4
+80/414666/campos_512_v4
+80/414813/campos_512_v4
+80/414845/campos_512_v4
+80/414979/campos_512_v4
+81/415019/campos_512_v4
+81/415144/campos_512_v4
+81/415381/campos_512_v4
+81/415468/campos_512_v4
+81/415533/campos_512_v4
+81/415579/campos_512_v4
+81/415612/campos_512_v4
+81/415728/campos_512_v4
+81/415739/campos_512_v4
+81/415992/campos_512_v4
+81/416094/campos_512_v4
+81/416158/campos_512_v4
+81/416225/campos_512_v4
+81/416277/campos_512_v4
+81/416294/campos_512_v4
+81/416330/campos_512_v4
+81/416399/campos_512_v4
+81/416497/campos_512_v4
+81/416572/campos_512_v4
+81/416625/campos_512_v4
+81/416640/campos_512_v4
+81/416643/campos_512_v4
+81/416657/campos_512_v4
+81/416692/campos_512_v4
+81/416698/campos_512_v4
+81/416750/campos_512_v4
+81/416814/campos_512_v4
+81/416888/campos_512_v4
+81/416914/campos_512_v4
+81/416931/campos_512_v4
+81/416964/campos_512_v4
+81/417180/campos_512_v4
+81/417188/campos_512_v4
+81/417278/campos_512_v4
+81/417366/campos_512_v4
+81/417371/campos_512_v4
+81/417421/campos_512_v4
+81/417424/campos_512_v4
+81/417477/campos_512_v4
+81/417653/campos_512_v4
+81/417658/campos_512_v4
+81/417662/campos_512_v4
+81/417671/campos_512_v4
+81/417751/campos_512_v4
+81/417753/campos_512_v4
+81/417814/campos_512_v4
+81/417835/campos_512_v4
+81/417942/campos_512_v4
+81/417979/campos_512_v4
+81/417997/campos_512_v4
+81/418001/campos_512_v4
+81/418015/campos_512_v4
+81/418019/campos_512_v4
+81/418026/campos_512_v4
+81/418048/campos_512_v4
+81/418084/campos_512_v4
+81/418152/campos_512_v4
+81/418188/campos_512_v4
+81/418216/campos_512_v4
+81/418231/campos_512_v4
+81/418265/campos_512_v4
+81/418332/campos_512_v4
+81/418357/campos_512_v4
+81/418462/campos_512_v4
+81/418473/campos_512_v4
+81/418530/campos_512_v4
+81/418617/campos_512_v4
+81/419025/campos_512_v4
+81/419113/campos_512_v4
+81/419165/campos_512_v4
+81/419186/campos_512_v4
+81/419275/campos_512_v4
+81/419276/campos_512_v4
+81/419318/campos_512_v4
+81/419613/campos_512_v4
+81/419647/campos_512_v4
+81/419766/campos_512_v4
+81/419820/campos_512_v4
+81/419846/campos_512_v4
+81/419927/campos_512_v4
+82/420277/campos_512_v4
+82/420492/campos_512_v4
+82/420549/campos_512_v4
+82/420557/campos_512_v4
+82/420578/campos_512_v4
+82/420603/campos_512_v4
+82/420718/campos_512_v4
+82/420789/campos_512_v4
+82/420911/campos_512_v4
+82/420919/campos_512_v4
+82/420946/campos_512_v4
+82/421065/campos_512_v4
+82/421129/campos_512_v4
+82/421152/campos_512_v4
+82/421178/campos_512_v4
+82/421223/campos_512_v4
+82/421252/campos_512_v4
+82/421302/campos_512_v4
+82/421307/campos_512_v4
+82/421371/campos_512_v4
+82/421443/campos_512_v4
+82/421462/campos_512_v4
+82/421501/campos_512_v4
+82/421514/campos_512_v4
+82/421862/campos_512_v4
+82/421937/campos_512_v4
+82/421956/campos_512_v4
+82/421979/campos_512_v4
+82/422090/campos_512_v4
+82/422096/campos_512_v4
+82/422136/campos_512_v4
+82/422162/campos_512_v4
+82/422270/campos_512_v4
+82/422317/campos_512_v4
+82/422394/campos_512_v4
+82/422439/campos_512_v4
+82/422464/campos_512_v4
+82/422517/campos_512_v4
+82/422591/campos_512_v4
+82/422733/campos_512_v4
+82/422760/campos_512_v4
+82/422832/campos_512_v4
+82/422933/campos_512_v4
+82/422952/campos_512_v4
+82/422958/campos_512_v4
+82/422982/campos_512_v4
+82/423049/campos_512_v4
+82/423069/campos_512_v4
+82/423104/campos_512_v4
+82/423127/campos_512_v4
+82/423246/campos_512_v4
+82/423277/campos_512_v4
+82/423358/campos_512_v4
+82/423367/campos_512_v4
+82/423521/campos_512_v4
+82/423524/campos_512_v4
+82/423550/campos_512_v4
+82/423555/campos_512_v4
+82/423576/campos_512_v4
+82/423703/campos_512_v4
+82/423707/campos_512_v4
+82/423711/campos_512_v4
+82/423829/campos_512_v4
+82/423862/campos_512_v4
+82/423884/campos_512_v4
+82/423885/campos_512_v4
+82/423912/campos_512_v4
+82/423932/campos_512_v4
+82/423950/campos_512_v4
+82/423964/campos_512_v4
+82/424021/campos_512_v4
+82/424097/campos_512_v4
+82/424098/campos_512_v4
+82/424121/campos_512_v4
+82/424205/campos_512_v4
+82/424285/campos_512_v4
+82/424385/campos_512_v4
+82/424400/campos_512_v4
+82/424410/campos_512_v4
+82/424451/campos_512_v4
+82/424683/campos_512_v4
+82/424791/campos_512_v4
+82/424866/campos_512_v4
+82/424991/campos_512_v4
+82/424997/campos_512_v4
+83/425069/campos_512_v4
+83/425197/campos_512_v4
+83/425348/campos_512_v4
+83/425367/campos_512_v4
+83/425454/campos_512_v4
+83/425472/campos_512_v4
+83/425544/campos_512_v4
+83/425566/campos_512_v4
+83/425729/campos_512_v4
+83/425924/campos_512_v4
+83/425964/campos_512_v4
+83/425978/campos_512_v4
+83/426055/campos_512_v4
+83/426091/campos_512_v4
+83/426348/campos_512_v4
+83/426368/campos_512_v4
+83/426622/campos_512_v4
+83/426711/campos_512_v4
+83/426802/campos_512_v4
+83/426816/campos_512_v4
+83/426829/campos_512_v4
+83/426967/campos_512_v4
+83/427037/campos_512_v4
+83/427048/campos_512_v4
+83/427050/campos_512_v4
+83/427085/campos_512_v4
+83/427172/campos_512_v4
+83/427183/campos_512_v4
+83/427200/campos_512_v4
+83/427249/campos_512_v4
+83/427407/campos_512_v4
+83/427418/campos_512_v4
+83/427519/campos_512_v4
+83/427546/campos_512_v4
+83/427595/campos_512_v4
+83/427753/campos_512_v4
+83/427968/campos_512_v4
+83/428052/campos_512_v4
+83/428160/campos_512_v4
+83/428183/campos_512_v4
+83/428204/campos_512_v4
+83/428296/campos_512_v4
+83/428333/campos_512_v4
+83/428400/campos_512_v4
+83/428411/campos_512_v4
+83/428413/campos_512_v4
+83/428500/campos_512_v4
+83/428612/campos_512_v4
+83/428717/campos_512_v4
+83/428786/campos_512_v4
+83/428957/campos_512_v4
+83/429055/campos_512_v4
+83/429057/campos_512_v4
+83/429072/campos_512_v4
+83/429290/campos_512_v4
+83/429297/campos_512_v4
+83/429307/campos_512_v4
+83/429371/campos_512_v4
+83/429457/campos_512_v4
+83/429510/campos_512_v4
+83/429540/campos_512_v4
+83/429581/campos_512_v4
+83/429623/campos_512_v4
+84/430039/campos_512_v4
+84/430065/campos_512_v4
+84/430067/campos_512_v4
+84/430083/campos_512_v4
+84/430085/campos_512_v4
+84/430158/campos_512_v4
+84/430161/campos_512_v4
+84/430172/campos_512_v4
+84/430209/campos_512_v4
+84/430279/campos_512_v4
+84/430375/campos_512_v4
+84/430422/campos_512_v4
+84/430439/campos_512_v4
+84/430514/campos_512_v4
+84/430727/campos_512_v4
+84/430832/campos_512_v4
+84/430936/campos_512_v4
+84/431057/campos_512_v4
+84/431059/campos_512_v4
+84/431186/campos_512_v4
+84/431240/campos_512_v4
+84/431281/campos_512_v4
+84/431302/campos_512_v4
+84/431391/campos_512_v4
+84/431468/campos_512_v4
+84/431530/campos_512_v4
+84/431589/campos_512_v4
+84/431674/campos_512_v4
+84/431690/campos_512_v4
+84/431721/campos_512_v4
+84/431728/campos_512_v4
+84/431799/campos_512_v4
+84/431821/campos_512_v4
+84/431874/campos_512_v4
+84/432001/campos_512_v4
+84/432060/campos_512_v4
+84/432083/campos_512_v4
+84/432104/campos_512_v4
+84/432109/campos_512_v4
+84/432141/campos_512_v4
+84/432512/campos_512_v4
+84/432514/campos_512_v4
+84/432531/campos_512_v4
+84/432536/campos_512_v4
+84/432537/campos_512_v4
+84/432545/campos_512_v4
+84/432631/campos_512_v4
+84/432644/campos_512_v4
+84/432720/campos_512_v4
+84/432722/campos_512_v4
+84/432743/campos_512_v4
+84/432745/campos_512_v4
+84/432860/campos_512_v4
+84/432978/campos_512_v4
+84/433086/campos_512_v4
+84/433113/campos_512_v4
+84/433141/campos_512_v4
+84/433225/campos_512_v4
+84/433366/campos_512_v4
+84/433373/campos_512_v4
+84/433387/campos_512_v4
+84/433434/campos_512_v4
+84/433466/campos_512_v4
+84/433514/campos_512_v4
+84/433557/campos_512_v4
+84/433575/campos_512_v4
+84/433639/campos_512_v4
+84/433691/campos_512_v4
+84/433700/campos_512_v4
+84/433707/campos_512_v4
+84/433710/campos_512_v4
+84/433842/campos_512_v4
+84/433883/campos_512_v4
+84/433886/campos_512_v4
+84/433940/campos_512_v4
+84/433947/campos_512_v4
+84/434066/campos_512_v4
+84/434090/campos_512_v4
+84/434109/campos_512_v4
+84/434110/campos_512_v4
+84/434226/campos_512_v4
+84/434304/campos_512_v4
+84/434351/campos_512_v4
+84/434502/campos_512_v4
+84/434514/campos_512_v4
+84/434685/campos_512_v4
+84/434696/campos_512_v4
+84/434724/campos_512_v4
+84/434914/campos_512_v4
+84/434924/campos_512_v4
+84/434944/campos_512_v4
+84/434951/campos_512_v4
+85/435020/campos_512_v4
+85/435079/campos_512_v4
+85/435155/campos_512_v4
+85/435303/campos_512_v4
+85/435455/campos_512_v4
+85/435470/campos_512_v4
+85/435474/campos_512_v4
+85/435476/campos_512_v4
+85/435601/campos_512_v4
+85/435658/campos_512_v4
+85/435674/campos_512_v4
+85/435738/campos_512_v4
+85/435744/campos_512_v4
+85/435761/campos_512_v4
+85/435965/campos_512_v4
+85/435995/campos_512_v4
+85/436001/campos_512_v4
+85/436027/campos_512_v4
+85/436089/campos_512_v4
+85/436130/campos_512_v4
+85/436163/campos_512_v4
+85/436168/campos_512_v4
+85/436524/campos_512_v4
+85/436734/campos_512_v4
+85/436785/campos_512_v4
+85/436869/campos_512_v4
+85/436876/campos_512_v4
+85/436878/campos_512_v4
+85/436955/campos_512_v4
+85/436994/campos_512_v4
+85/437050/campos_512_v4
+85/437062/campos_512_v4
+85/437189/campos_512_v4
+85/437195/campos_512_v4
+85/437214/campos_512_v4
+85/437222/campos_512_v4
+85/437237/campos_512_v4
+85/437301/campos_512_v4
+85/437311/campos_512_v4
+85/437340/campos_512_v4
+85/437418/campos_512_v4
+85/437468/campos_512_v4
+85/437532/campos_512_v4
+85/437657/campos_512_v4
+85/437688/campos_512_v4
+85/437774/campos_512_v4
+85/437968/campos_512_v4
+85/437985/campos_512_v4
+85/438011/campos_512_v4
+85/438014/campos_512_v4
+85/438077/campos_512_v4
+85/438106/campos_512_v4
+85/438118/campos_512_v4
+85/438163/campos_512_v4
+85/438181/campos_512_v4
+85/438355/campos_512_v4
+85/438383/campos_512_v4
+85/438446/campos_512_v4
+85/438492/campos_512_v4
+85/438603/campos_512_v4
+85/438646/campos_512_v4
+85/438690/campos_512_v4
+85/438741/campos_512_v4
+85/438858/campos_512_v4
+85/438969/campos_512_v4
+85/439015/campos_512_v4
+85/439056/campos_512_v4
+85/439100/campos_512_v4
+85/439118/campos_512_v4
+85/439129/campos_512_v4
+85/439141/campos_512_v4
+85/439150/campos_512_v4
+85/439153/campos_512_v4
+85/439184/campos_512_v4
+85/439202/campos_512_v4
+85/439235/campos_512_v4
+85/439246/campos_512_v4
+85/439282/campos_512_v4
+85/439283/campos_512_v4
+85/439298/campos_512_v4
+85/439450/campos_512_v4
+85/439478/campos_512_v4
+85/439572/campos_512_v4
+85/439579/campos_512_v4
+85/439583/campos_512_v4
+85/439675/campos_512_v4
+85/439737/campos_512_v4
+85/439780/campos_512_v4
+85/439846/campos_512_v4
+85/439919/campos_512_v4
+86/440003/campos_512_v4
+86/440040/campos_512_v4
+86/440087/campos_512_v4
+86/440128/campos_512_v4
+86/440276/campos_512_v4
+86/440345/campos_512_v4
+86/440369/campos_512_v4
+86/440388/campos_512_v4
+86/440442/campos_512_v4
+86/440490/campos_512_v4
+86/440545/campos_512_v4
+86/440560/campos_512_v4
+86/440564/campos_512_v4
+86/440634/campos_512_v4
+86/440732/campos_512_v4
+86/440785/campos_512_v4
+86/440792/campos_512_v4
+86/440833/campos_512_v4
+86/440862/campos_512_v4
+86/440886/campos_512_v4
+86/440922/campos_512_v4
+86/440957/campos_512_v4
+86/440987/campos_512_v4
+86/441087/campos_512_v4
+86/441202/campos_512_v4
+86/441225/campos_512_v4
+86/441434/campos_512_v4
+86/441447/campos_512_v4
+86/441449/campos_512_v4
+86/441514/campos_512_v4
+86/441542/campos_512_v4
+86/441596/campos_512_v4
+86/441609/campos_512_v4
+86/441656/campos_512_v4
+86/441729/campos_512_v4
+86/441826/campos_512_v4
+86/441832/campos_512_v4
+86/441904/campos_512_v4
+86/441945/campos_512_v4
+86/442065/campos_512_v4
+86/442090/campos_512_v4
+86/442136/campos_512_v4
+86/442169/campos_512_v4
+86/442183/campos_512_v4
+86/442219/campos_512_v4
+86/442271/campos_512_v4
+86/442276/campos_512_v4
+86/442343/campos_512_v4
+86/442395/campos_512_v4
+86/442477/campos_512_v4
+86/442478/campos_512_v4
+86/442489/campos_512_v4
+86/442501/campos_512_v4
+86/442578/campos_512_v4
+86/442593/campos_512_v4
+86/442600/campos_512_v4
+86/442613/campos_512_v4
+86/442695/campos_512_v4
+86/442704/campos_512_v4
+86/442757/campos_512_v4
+86/442825/campos_512_v4
+86/442863/campos_512_v4
+86/442895/campos_512_v4
+86/442946/campos_512_v4
+86/443009/campos_512_v4
+86/443010/campos_512_v4
+86/443029/campos_512_v4
+86/443174/campos_512_v4
+86/443197/campos_512_v4
+86/443246/campos_512_v4
+86/443276/campos_512_v4
+86/443379/campos_512_v4
+86/443455/campos_512_v4
+86/443536/campos_512_v4
+86/443539/campos_512_v4
+86/443643/campos_512_v4
+86/443704/campos_512_v4
+86/443787/campos_512_v4
+86/443833/campos_512_v4
+86/443835/campos_512_v4
+86/443943/campos_512_v4
+86/443977/campos_512_v4
+86/444010/campos_512_v4
+86/444025/campos_512_v4
+86/444118/campos_512_v4
+86/444305/campos_512_v4
+86/444371/campos_512_v4
+86/444374/campos_512_v4
+86/444412/campos_512_v4
+86/444447/campos_512_v4
+86/444549/campos_512_v4
+86/444564/campos_512_v4
+86/444630/campos_512_v4
+86/444716/campos_512_v4
+86/444729/campos_512_v4
+86/444860/campos_512_v4
+86/444861/campos_512_v4
+86/444877/campos_512_v4
+86/444879/campos_512_v4
+86/444922/campos_512_v4
+86/444939/campos_512_v4
+86/444945/campos_512_v4
+86/444968/campos_512_v4
+87/445003/campos_512_v4
+87/445026/campos_512_v4
+87/445106/campos_512_v4
+87/445187/campos_512_v4
+87/445193/campos_512_v4
+87/445206/campos_512_v4
+87/445210/campos_512_v4
+87/445241/campos_512_v4
+87/445318/campos_512_v4
+87/445387/campos_512_v4
+87/445437/campos_512_v4
+87/445580/campos_512_v4
+87/445783/campos_512_v4
+87/445787/campos_512_v4
+87/445827/campos_512_v4
+87/445908/campos_512_v4
+87/445938/campos_512_v4
+87/445960/campos_512_v4
+87/446131/campos_512_v4
+87/446153/campos_512_v4
+87/446171/campos_512_v4
+87/446234/campos_512_v4
+87/446302/campos_512_v4
+87/446431/campos_512_v4
+87/446432/campos_512_v4
+87/446517/campos_512_v4
+87/446531/campos_512_v4
+87/446545/campos_512_v4
+87/446600/campos_512_v4
+87/446629/campos_512_v4
+87/446665/campos_512_v4
+87/446773/campos_512_v4
+87/446812/campos_512_v4
+87/446960/campos_512_v4
+87/447045/campos_512_v4
+87/447072/campos_512_v4
+87/447103/campos_512_v4
+87/447108/campos_512_v4
+87/447114/campos_512_v4
+87/447152/campos_512_v4
+87/447231/campos_512_v4
+87/447437/campos_512_v4
+87/447438/campos_512_v4
+87/447511/campos_512_v4
+87/447575/campos_512_v4
+87/447604/campos_512_v4
+87/447616/campos_512_v4
+87/447633/campos_512_v4
+87/447709/campos_512_v4
+87/447718/campos_512_v4
+87/447734/campos_512_v4
+87/447768/campos_512_v4
+87/447861/campos_512_v4
+87/447934/campos_512_v4
+87/447951/campos_512_v4
+87/448016/campos_512_v4
+87/448024/campos_512_v4
+87/448041/campos_512_v4
+87/448126/campos_512_v4
+87/448182/campos_512_v4
+87/448240/campos_512_v4
+87/448493/campos_512_v4
+87/448551/campos_512_v4
+87/448594/campos_512_v4
+87/448602/campos_512_v4
+87/448646/campos_512_v4
+87/448756/campos_512_v4
+87/448792/campos_512_v4
+87/448853/campos_512_v4
+87/448902/campos_512_v4
+87/448968/campos_512_v4
+87/449047/campos_512_v4
+87/449065/campos_512_v4
+87/449079/campos_512_v4
+87/449093/campos_512_v4
+87/449174/campos_512_v4
+87/449185/campos_512_v4
+87/449202/campos_512_v4
+87/449292/campos_512_v4
+87/449313/campos_512_v4
+87/449316/campos_512_v4
+87/449424/campos_512_v4
+87/449547/campos_512_v4
+87/449593/campos_512_v4
+87/449637/campos_512_v4
+87/449704/campos_512_v4
+87/449737/campos_512_v4
+88/450051/campos_512_v4
+88/450196/campos_512_v4
+88/450231/campos_512_v4
+88/450237/campos_512_v4
+88/450310/campos_512_v4
+88/450330/campos_512_v4
+88/450341/campos_512_v4
+88/450358/campos_512_v4
+88/450374/campos_512_v4
+88/450383/campos_512_v4
+88/450395/campos_512_v4
+88/450435/campos_512_v4
+88/450479/campos_512_v4
+88/450566/campos_512_v4
+88/450607/campos_512_v4
+88/450773/campos_512_v4
+88/450797/campos_512_v4
+88/450827/campos_512_v4
+88/450833/campos_512_v4
+88/450884/campos_512_v4
+88/451055/campos_512_v4
+88/451067/campos_512_v4
+88/451112/campos_512_v4
+88/451166/campos_512_v4
+88/451170/campos_512_v4
+88/451209/campos_512_v4
+88/451256/campos_512_v4
+88/451305/campos_512_v4
+88/451321/campos_512_v4
+88/451405/campos_512_v4
+88/451461/campos_512_v4
+88/451510/campos_512_v4
+88/451525/campos_512_v4
+88/451528/campos_512_v4
+88/451540/campos_512_v4
+88/451569/campos_512_v4
+88/451655/campos_512_v4
+88/451823/campos_512_v4
+88/451999/campos_512_v4
+88/452057/campos_512_v4
+88/452131/campos_512_v4
+88/452140/campos_512_v4
+88/452169/campos_512_v4
+88/452200/campos_512_v4
+88/452211/campos_512_v4
+88/452248/campos_512_v4
+88/452265/campos_512_v4
+88/452298/campos_512_v4
+88/452308/campos_512_v4
+88/452322/campos_512_v4
+88/452629/campos_512_v4
+88/452668/campos_512_v4
+88/452698/campos_512_v4
+88/452751/campos_512_v4
+88/452805/campos_512_v4
+88/452950/campos_512_v4
+88/453035/campos_512_v4
+88/453090/campos_512_v4
+88/453134/campos_512_v4
+88/453212/campos_512_v4
+88/453281/campos_512_v4
+88/453314/campos_512_v4
+88/453324/campos_512_v4
+88/453365/campos_512_v4
+88/453383/campos_512_v4
+88/453406/campos_512_v4
+88/453420/campos_512_v4
+88/453451/campos_512_v4
+88/453773/campos_512_v4
+88/453933/campos_512_v4
+88/453991/campos_512_v4
+88/454043/campos_512_v4
+88/454056/campos_512_v4
+88/454093/campos_512_v4
+88/454170/campos_512_v4
+88/454194/campos_512_v4
+88/454246/campos_512_v4
+88/454328/campos_512_v4
+88/454358/campos_512_v4
+88/454538/campos_512_v4
+88/454568/campos_512_v4
+88/454596/campos_512_v4
+88/454625/campos_512_v4
+88/454708/campos_512_v4
+88/454742/campos_512_v4
+88/454751/campos_512_v4
+88/454789/campos_512_v4
+88/454878/campos_512_v4
+88/454922/campos_512_v4
+88/454954/campos_512_v4
+88/454981/campos_512_v4
+89/455100/campos_512_v4
+89/455145/campos_512_v4
+89/455170/campos_512_v4
+89/455226/campos_512_v4
+89/455265/campos_512_v4
+89/455348/campos_512_v4
+89/455364/campos_512_v4
+89/455411/campos_512_v4
+89/455413/campos_512_v4
+89/455420/campos_512_v4
+89/455425/campos_512_v4
+89/455497/campos_512_v4
+89/455574/campos_512_v4
+89/455589/campos_512_v4
+89/455606/campos_512_v4
+89/455652/campos_512_v4
+89/455662/campos_512_v4
+89/455688/campos_512_v4
+89/455709/campos_512_v4
+89/455835/campos_512_v4
+89/455842/campos_512_v4
+89/455854/campos_512_v4
+89/455876/campos_512_v4
+89/455932/campos_512_v4
+89/455946/campos_512_v4
+89/456107/campos_512_v4
+89/456121/campos_512_v4
+89/456191/campos_512_v4
+89/456208/campos_512_v4
+89/456336/campos_512_v4
+89/456345/campos_512_v4
+89/456510/campos_512_v4
+89/456554/campos_512_v4
+89/456594/campos_512_v4
+89/456633/campos_512_v4
+89/456869/campos_512_v4
+89/456887/campos_512_v4
+89/456913/campos_512_v4
+89/457058/campos_512_v4
+89/457078/campos_512_v4
+89/457159/campos_512_v4
+89/457175/campos_512_v4
+89/457178/campos_512_v4
+89/457183/campos_512_v4
+89/457214/campos_512_v4
+89/457224/campos_512_v4
+89/457239/campos_512_v4
+89/457322/campos_512_v4
+89/457349/campos_512_v4
+89/457387/campos_512_v4
+89/457396/campos_512_v4
+89/457423/campos_512_v4
+89/457498/campos_512_v4
+89/457629/campos_512_v4
+89/457645/campos_512_v4
+89/457661/campos_512_v4
+89/457728/campos_512_v4
+89/457733/campos_512_v4
+89/457814/campos_512_v4
+89/457849/campos_512_v4
+89/457872/campos_512_v4
+89/457891/campos_512_v4
+89/457921/campos_512_v4
+89/457943/campos_512_v4
+89/457959/campos_512_v4
+89/458102/campos_512_v4
+89/458148/campos_512_v4
+89/458167/campos_512_v4
+89/458226/campos_512_v4
+89/458310/campos_512_v4
+89/458311/campos_512_v4
+89/458332/campos_512_v4
+89/458375/campos_512_v4
+89/458404/campos_512_v4
+89/458441/campos_512_v4
+89/458522/campos_512_v4
+89/458543/campos_512_v4
+89/458627/campos_512_v4
+89/458635/campos_512_v4
+89/458657/campos_512_v4
+89/458776/campos_512_v4
+89/458789/campos_512_v4
+89/458795/campos_512_v4
+89/459154/campos_512_v4
+89/459245/campos_512_v4
+89/459303/campos_512_v4
+89/459319/campos_512_v4
+89/459347/campos_512_v4
+89/459411/campos_512_v4
+89/459473/campos_512_v4
+89/459534/campos_512_v4
+89/459543/campos_512_v4
+89/459621/campos_512_v4
+89/459630/campos_512_v4
+89/459670/campos_512_v4
+89/459681/campos_512_v4
+89/459706/campos_512_v4
+89/459811/campos_512_v4
+9/55015/campos_512_v4
+9/55146/campos_512_v4
+9/55157/campos_512_v4
+9/55204/campos_512_v4
+9/55330/campos_512_v4
+9/55523/campos_512_v4
+9/55707/campos_512_v4
+9/55719/campos_512_v4
+9/55883/campos_512_v4
+9/55897/campos_512_v4
+9/55954/campos_512_v4
+9/56023/campos_512_v4
+9/56024/campos_512_v4
+9/56245/campos_512_v4
+9/56354/campos_512_v4
+9/56479/campos_512_v4
+9/56698/campos_512_v4
+9/56762/campos_512_v4
+9/56879/campos_512_v4
+9/56922/campos_512_v4
+9/56979/campos_512_v4
+9/56986/campos_512_v4
+9/57059/campos_512_v4
+9/57306/campos_512_v4
+9/57482/campos_512_v4
+9/57713/campos_512_v4
+9/57840/campos_512_v4
+9/58030/campos_512_v4
+9/58816/campos_512_v4
+9/58824/campos_512_v4
+9/58837/campos_512_v4
+9/59248/campos_512_v4
+9/59442/campos_512_v4
+9/59670/campos_512_v4
+9/59763/campos_512_v4
+9/59820/campos_512_v4
+9/59861/campos_512_v4
+9/59900/campos_512_v4
+90/460125/campos_512_v4
+90/460222/campos_512_v4
+90/460269/campos_512_v4
+90/460334/campos_512_v4
+90/460400/campos_512_v4
+90/460486/campos_512_v4
+90/460567/campos_512_v4
+90/460625/campos_512_v4
+90/460687/campos_512_v4
+90/460745/campos_512_v4
+90/460768/campos_512_v4
+90/460778/campos_512_v4
+90/460825/campos_512_v4
+90/460834/campos_512_v4
+90/460839/campos_512_v4
+90/460841/campos_512_v4
+90/460848/campos_512_v4
+90/460856/campos_512_v4
+90/460902/campos_512_v4
+90/460953/campos_512_v4
+90/461046/campos_512_v4
+90/461145/campos_512_v4
+90/461180/campos_512_v4
+90/461217/campos_512_v4
+90/461284/campos_512_v4
+90/461346/campos_512_v4
+90/461426/campos_512_v4
+90/461558/campos_512_v4
+90/461641/campos_512_v4
+90/461694/campos_512_v4
+90/461704/campos_512_v4
+90/461830/campos_512_v4
+90/461959/campos_512_v4
+90/461985/campos_512_v4
+90/462047/campos_512_v4
+90/462103/campos_512_v4
+90/462201/campos_512_v4
+90/462224/campos_512_v4
+90/462326/campos_512_v4
+90/462351/campos_512_v4
+90/462357/campos_512_v4
+90/462394/campos_512_v4
+90/462434/campos_512_v4
+90/462580/campos_512_v4
+90/462626/campos_512_v4
+90/462660/campos_512_v4
+90/462697/campos_512_v4
+90/462713/campos_512_v4
+90/462740/campos_512_v4
+90/462824/campos_512_v4
+90/462878/campos_512_v4
+90/462905/campos_512_v4
+90/463205/campos_512_v4
+90/463206/campos_512_v4
+90/463272/campos_512_v4
+90/463302/campos_512_v4
+90/463318/campos_512_v4
+90/463332/campos_512_v4
+90/463338/campos_512_v4
+90/463393/campos_512_v4
+90/463425/campos_512_v4
+90/463427/campos_512_v4
+90/463478/campos_512_v4
+90/463505/campos_512_v4
+90/463513/campos_512_v4
+90/463580/campos_512_v4
+90/463591/campos_512_v4
+90/463613/campos_512_v4
+90/463706/campos_512_v4
+90/463803/campos_512_v4
+90/463958/campos_512_v4
+90/463978/campos_512_v4
+90/464024/campos_512_v4
+90/464169/campos_512_v4
+90/464210/campos_512_v4
+90/464223/campos_512_v4
+90/464298/campos_512_v4
+90/464455/campos_512_v4
+90/464512/campos_512_v4
+90/464516/campos_512_v4
+90/464562/campos_512_v4
+90/464801/campos_512_v4
+90/464805/campos_512_v4
+90/464895/campos_512_v4
+90/464913/campos_512_v4
+90/464914/campos_512_v4
+91/465022/campos_512_v4
+91/465037/campos_512_v4
+91/465072/campos_512_v4
+91/465470/campos_512_v4
+91/465476/campos_512_v4
+91/465528/campos_512_v4
+91/465548/campos_512_v4
+91/465577/campos_512_v4
+91/465649/campos_512_v4
+91/465659/campos_512_v4
+91/465669/campos_512_v4
+91/465721/campos_512_v4
+91/465731/campos_512_v4
+91/465822/campos_512_v4
+91/465835/campos_512_v4
+91/466008/campos_512_v4
+91/466122/campos_512_v4
+91/466382/campos_512_v4
+91/466404/campos_512_v4
+91/466435/campos_512_v4
+91/466578/campos_512_v4
+91/466616/campos_512_v4
+91/466664/campos_512_v4
+91/466703/campos_512_v4
+91/466709/campos_512_v4
+91/466816/campos_512_v4
+91/466920/campos_512_v4
+91/466948/campos_512_v4
+91/467079/campos_512_v4
+91/467281/campos_512_v4
+91/467304/campos_512_v4
+91/467325/campos_512_v4
+91/467406/campos_512_v4
+91/467475/campos_512_v4
+91/467517/campos_512_v4
+91/467780/campos_512_v4
+91/467883/campos_512_v4
+91/467902/campos_512_v4
+91/467916/campos_512_v4
+91/467921/campos_512_v4
+91/467981/campos_512_v4
+91/468117/campos_512_v4
+91/468175/campos_512_v4
+91/468177/campos_512_v4
+91/468190/campos_512_v4
+91/468271/campos_512_v4
+91/468285/campos_512_v4
+91/468314/campos_512_v4
+91/468324/campos_512_v4
+91/468430/campos_512_v4
+91/468510/campos_512_v4
+91/468557/campos_512_v4
+91/468572/campos_512_v4
+91/468716/campos_512_v4
+91/468752/campos_512_v4
+91/468890/campos_512_v4
+91/468974/campos_512_v4
+91/469039/campos_512_v4
+91/469088/campos_512_v4
+91/469156/campos_512_v4
+91/469171/campos_512_v4
+91/469212/campos_512_v4
+91/469222/campos_512_v4
+91/469340/campos_512_v4
+91/469365/campos_512_v4
+91/469419/campos_512_v4
+91/469468/campos_512_v4
+91/469487/campos_512_v4
+91/469504/campos_512_v4
+91/469646/campos_512_v4
+91/469651/campos_512_v4
+91/469752/campos_512_v4
+91/469803/campos_512_v4
+91/469823/campos_512_v4
+91/469900/campos_512_v4
+91/469941/campos_512_v4
+91/469944/campos_512_v4
+91/469983/campos_512_v4
+91/470001/campos_512_v4
+92/470070/campos_512_v4
+92/470098/campos_512_v4
+92/470101/campos_512_v4
+92/470120/campos_512_v4
+92/470187/campos_512_v4
+92/470293/campos_512_v4
+92/470307/campos_512_v4
+92/470382/campos_512_v4
+92/470389/campos_512_v4
+92/470406/campos_512_v4
+92/470435/campos_512_v4
+92/470454/campos_512_v4
+92/470477/campos_512_v4
+92/470580/campos_512_v4
+92/470593/campos_512_v4
+92/470602/campos_512_v4
+92/470607/campos_512_v4
+92/470610/campos_512_v4
+92/470748/campos_512_v4
+92/470767/campos_512_v4
+92/470891/campos_512_v4
+92/471003/campos_512_v4
+92/471042/campos_512_v4
+92/471043/campos_512_v4
+92/471087/campos_512_v4
+92/471117/campos_512_v4
+92/471201/campos_512_v4
+92/471202/campos_512_v4
+92/471205/campos_512_v4
+92/471224/campos_512_v4
+92/471344/campos_512_v4
+92/471411/campos_512_v4
+92/471483/campos_512_v4
+92/471559/campos_512_v4
+92/471606/campos_512_v4
+92/471624/campos_512_v4
+92/471627/campos_512_v4
+92/471722/campos_512_v4
+92/471735/campos_512_v4
+92/471777/campos_512_v4
+92/471891/campos_512_v4
+92/471899/campos_512_v4
+92/471923/campos_512_v4
+92/472077/campos_512_v4
+92/472110/campos_512_v4
+92/472139/campos_512_v4
+92/472258/campos_512_v4
+92/472266/campos_512_v4
+92/472298/campos_512_v4
+92/472348/campos_512_v4
+92/472386/campos_512_v4
+92/472450/campos_512_v4
+92/472542/campos_512_v4
+92/472636/campos_512_v4
+92/472692/campos_512_v4
+92/472717/campos_512_v4
+92/472785/campos_512_v4
+92/472792/campos_512_v4
+92/472987/campos_512_v4
+92/473001/campos_512_v4
+92/473011/campos_512_v4
+92/473019/campos_512_v4
+92/473030/campos_512_v4
+92/473059/campos_512_v4
+92/473081/campos_512_v4
+92/473103/campos_512_v4
+92/473225/campos_512_v4
+92/473270/campos_512_v4
+92/473290/campos_512_v4
+92/473303/campos_512_v4
+92/473323/campos_512_v4
+92/473472/campos_512_v4
+92/473477/campos_512_v4
+92/473482/campos_512_v4
+92/473578/campos_512_v4
+92/473837/campos_512_v4
+92/473909/campos_512_v4
+92/474060/campos_512_v4
+92/474066/campos_512_v4
+92/474083/campos_512_v4
+92/474090/campos_512_v4
+92/474123/campos_512_v4
+92/474245/campos_512_v4
+92/474269/campos_512_v4
+92/474281/campos_512_v4
+92/474395/campos_512_v4
+92/474446/campos_512_v4
+92/474450/campos_512_v4
+92/474631/campos_512_v4
+92/474688/campos_512_v4
+92/474712/campos_512_v4
+92/474798/campos_512_v4
+92/474801/campos_512_v4
+92/474912/campos_512_v4
+92/474954/campos_512_v4
+93/475166/campos_512_v4
+93/475181/campos_512_v4
+93/475193/campos_512_v4
+93/475263/campos_512_v4
+93/475312/campos_512_v4
+93/475322/campos_512_v4
+93/475347/campos_512_v4
+93/475409/campos_512_v4
+93/475467/campos_512_v4
+93/475470/campos_512_v4
+93/475482/campos_512_v4
+93/475490/campos_512_v4
+93/475503/campos_512_v4
+93/475702/campos_512_v4
+93/475726/campos_512_v4
+93/475745/campos_512_v4
+93/475758/campos_512_v4
+93/475798/campos_512_v4
+93/475810/campos_512_v4
+93/475861/campos_512_v4
+93/475930/campos_512_v4
+93/475983/campos_512_v4
+93/476051/campos_512_v4
+93/476071/campos_512_v4
+93/476076/campos_512_v4
+93/476122/campos_512_v4
+93/476211/campos_512_v4
+93/476263/campos_512_v4
+93/476347/campos_512_v4
+93/476528/campos_512_v4
+93/476529/campos_512_v4
+93/476557/campos_512_v4
+93/476579/campos_512_v4
+93/476662/campos_512_v4
+93/476681/campos_512_v4
+93/476706/campos_512_v4
+93/476763/campos_512_v4
+93/476797/campos_512_v4
+93/476879/campos_512_v4
+93/476967/campos_512_v4
+93/476969/campos_512_v4
+93/477015/campos_512_v4
+93/477047/campos_512_v4
+93/477052/campos_512_v4
+93/477076/campos_512_v4
+93/477096/campos_512_v4
+93/477121/campos_512_v4
+93/477306/campos_512_v4
+93/477333/campos_512_v4
+93/477540/campos_512_v4
+93/477561/campos_512_v4
+93/477652/campos_512_v4
+93/477788/campos_512_v4
+93/477803/campos_512_v4
+93/477828/campos_512_v4
+93/477844/campos_512_v4
+93/477890/campos_512_v4
+93/477904/campos_512_v4
+93/477981/campos_512_v4
+93/477984/campos_512_v4
+93/478010/campos_512_v4
+93/478045/campos_512_v4
+93/478072/campos_512_v4
+93/478198/campos_512_v4
+93/478266/campos_512_v4
+93/478316/campos_512_v4
+93/478396/campos_512_v4
+93/478462/campos_512_v4
+93/478515/campos_512_v4
+93/478539/campos_512_v4
+93/478559/campos_512_v4
+93/478602/campos_512_v4
+93/478642/campos_512_v4
+93/478679/campos_512_v4
+93/478731/campos_512_v4
+93/478820/campos_512_v4
+93/478861/campos_512_v4
+93/478921/campos_512_v4
+93/479125/campos_512_v4
+93/479206/campos_512_v4
+93/479330/campos_512_v4
+93/479420/campos_512_v4
+93/479632/campos_512_v4
+93/479873/campos_512_v4
+93/479880/campos_512_v4
+94/480010/campos_512_v4
+94/480045/campos_512_v4
+94/480167/campos_512_v4
+94/480240/campos_512_v4
+94/480352/campos_512_v4
+94/480408/campos_512_v4
+94/480494/campos_512_v4
+94/480550/campos_512_v4
+94/480696/campos_512_v4
+94/480706/campos_512_v4
+94/480991/campos_512_v4
+94/480999/campos_512_v4
+94/481078/campos_512_v4
+94/481082/campos_512_v4
+94/481206/campos_512_v4
+94/481216/campos_512_v4
+94/481270/campos_512_v4
+94/481530/campos_512_v4
+94/481576/campos_512_v4
+94/481657/campos_512_v4
+94/481666/campos_512_v4
+94/481684/campos_512_v4
+94/481721/campos_512_v4
+94/481778/campos_512_v4
+94/481806/campos_512_v4
+94/481871/campos_512_v4
+94/481872/campos_512_v4
+94/481922/campos_512_v4
+94/481936/campos_512_v4
+94/482233/campos_512_v4
+94/482234/campos_512_v4
+94/482406/campos_512_v4
+94/482415/campos_512_v4
+94/482433/campos_512_v4
+94/482443/campos_512_v4
+94/482534/campos_512_v4
+94/482580/campos_512_v4
+94/482644/campos_512_v4
+94/482753/campos_512_v4
+94/482789/campos_512_v4
+94/482899/campos_512_v4
+94/482929/campos_512_v4
+94/483008/campos_512_v4
+94/483050/campos_512_v4
+94/483100/campos_512_v4
+94/483149/campos_512_v4
+94/483152/campos_512_v4
+94/483159/campos_512_v4
+94/483183/campos_512_v4
+94/483212/campos_512_v4
+94/483232/campos_512_v4
+94/483248/campos_512_v4
+94/483258/campos_512_v4
+94/483312/campos_512_v4
+94/483400/campos_512_v4
+94/483436/campos_512_v4
+94/483449/campos_512_v4
+94/483451/campos_512_v4
+94/483486/campos_512_v4
+94/483642/campos_512_v4
+94/483662/campos_512_v4
+94/483735/campos_512_v4
+94/483778/campos_512_v4
+94/483809/campos_512_v4
+94/483828/campos_512_v4
+94/483929/campos_512_v4
+94/483973/campos_512_v4
+94/483995/campos_512_v4
+94/484058/campos_512_v4
+94/484090/campos_512_v4
+94/484094/campos_512_v4
+94/484138/campos_512_v4
+94/484179/campos_512_v4
+94/484227/campos_512_v4
+94/484232/campos_512_v4
+94/484284/campos_512_v4
+94/484293/campos_512_v4
+94/484429/campos_512_v4
+94/484445/campos_512_v4
+94/484480/campos_512_v4
+94/484712/campos_512_v4
+94/484774/campos_512_v4
+94/484888/campos_512_v4
+94/484909/campos_512_v4
+94/484945/campos_512_v4
+95/485009/campos_512_v4
+95/485026/campos_512_v4
+95/485065/campos_512_v4
+95/485071/campos_512_v4
+95/485072/campos_512_v4
+95/485074/campos_512_v4
+95/485089/campos_512_v4
+95/485104/campos_512_v4
+95/485137/campos_512_v4
+95/485141/campos_512_v4
+95/485168/campos_512_v4
+95/485174/campos_512_v4
+95/485196/campos_512_v4
+95/485219/campos_512_v4
+95/485286/campos_512_v4
+95/485382/campos_512_v4
+95/485411/campos_512_v4
+95/485436/campos_512_v4
+95/485546/campos_512_v4
+95/485576/campos_512_v4
+95/485739/campos_512_v4
+95/485744/campos_512_v4
+95/485790/campos_512_v4
+95/485844/campos_512_v4
+95/485868/campos_512_v4
+95/485895/campos_512_v4
+95/485915/campos_512_v4
+95/486047/campos_512_v4
+95/486101/campos_512_v4
+95/486127/campos_512_v4
+95/486255/campos_512_v4
+95/486319/campos_512_v4
+95/486402/campos_512_v4
+95/486420/campos_512_v4
+95/486471/campos_512_v4
+95/486488/campos_512_v4
+95/486507/campos_512_v4
+95/486539/campos_512_v4
+95/486552/campos_512_v4
+95/486694/campos_512_v4
+95/486701/campos_512_v4
+95/486935/campos_512_v4
+95/487020/campos_512_v4
+95/487095/campos_512_v4
+95/487349/campos_512_v4
+95/487365/campos_512_v4
+95/487421/campos_512_v4
+95/487453/campos_512_v4
+95/487566/campos_512_v4
+95/487579/campos_512_v4
+95/487619/campos_512_v4
+95/487681/campos_512_v4
+95/487966/campos_512_v4
+95/488052/campos_512_v4
+95/488060/campos_512_v4
+95/488153/campos_512_v4
+95/488385/campos_512_v4
+95/488448/campos_512_v4
+95/488456/campos_512_v4
+95/488461/campos_512_v4
+95/488480/campos_512_v4
+95/488573/campos_512_v4
+95/488588/campos_512_v4
+95/488731/campos_512_v4
+95/488769/campos_512_v4
+95/488817/campos_512_v4
+95/488842/campos_512_v4
+95/488901/campos_512_v4
+95/488914/campos_512_v4
+95/489056/campos_512_v4
+95/489154/campos_512_v4
+95/489365/campos_512_v4
+95/489372/campos_512_v4
+95/489418/campos_512_v4
+95/489426/campos_512_v4
+95/489535/campos_512_v4
+95/489578/campos_512_v4
+95/489627/campos_512_v4
+95/489773/campos_512_v4
+95/489793/campos_512_v4
+95/489805/campos_512_v4
+95/489893/campos_512_v4
+95/489954/campos_512_v4
+96/490010/campos_512_v4
+96/490074/campos_512_v4
+96/490128/campos_512_v4
+96/490162/campos_512_v4
+96/490221/campos_512_v4
+96/490277/campos_512_v4
+96/490288/campos_512_v4
+96/490326/campos_512_v4
+96/490344/campos_512_v4
+96/490379/campos_512_v4
+96/490437/campos_512_v4
+96/490499/campos_512_v4
+96/490541/campos_512_v4
+96/490564/campos_512_v4
+96/490628/campos_512_v4
+96/490645/campos_512_v4
+96/490648/campos_512_v4
+96/490663/campos_512_v4
+96/490691/campos_512_v4
+96/490728/campos_512_v4
+96/490761/campos_512_v4
+96/490966/campos_512_v4
+96/491052/campos_512_v4
+96/491058/campos_512_v4
+96/491067/campos_512_v4
+96/491069/campos_512_v4
+96/491100/campos_512_v4
+96/491101/campos_512_v4
+96/491198/campos_512_v4
+96/491216/campos_512_v4
+96/491239/campos_512_v4
+96/491343/campos_512_v4
+96/491402/campos_512_v4
+96/491490/campos_512_v4
+96/491508/campos_512_v4
+96/491549/campos_512_v4
+96/491624/campos_512_v4
+96/491637/campos_512_v4
+96/491663/campos_512_v4
+96/491734/campos_512_v4
+96/491748/campos_512_v4
+96/491759/campos_512_v4
+96/491777/campos_512_v4
+96/491897/campos_512_v4
+96/491954/campos_512_v4
+96/491975/campos_512_v4
+96/492094/campos_512_v4
+96/492123/campos_512_v4
+96/492180/campos_512_v4
+96/492204/campos_512_v4
+96/492251/campos_512_v4
+96/492295/campos_512_v4
+96/492363/campos_512_v4
+96/492386/campos_512_v4
+96/492399/campos_512_v4
+96/492401/campos_512_v4
+96/492482/campos_512_v4
+96/492519/campos_512_v4
+96/492608/campos_512_v4
+96/493011/campos_512_v4
+96/493032/campos_512_v4
+96/493038/campos_512_v4
+96/493065/campos_512_v4
+96/493069/campos_512_v4
+96/493083/campos_512_v4
+96/493158/campos_512_v4
+96/493349/campos_512_v4
+96/493427/campos_512_v4
+96/493435/campos_512_v4
+96/493450/campos_512_v4
+96/493470/campos_512_v4
+96/493485/campos_512_v4
+96/493506/campos_512_v4
+96/493552/campos_512_v4
+96/493593/campos_512_v4
+96/493613/campos_512_v4
+96/493655/campos_512_v4
+96/493687/campos_512_v4
+96/493796/campos_512_v4
+96/493837/campos_512_v4
+96/493973/campos_512_v4
+96/493994/campos_512_v4
+96/493999/campos_512_v4
+96/494033/campos_512_v4
+96/494060/campos_512_v4
+96/494118/campos_512_v4
+96/494124/campos_512_v4
+96/494131/campos_512_v4
+96/494133/campos_512_v4
+96/494149/campos_512_v4
+96/494191/campos_512_v4
+96/494202/campos_512_v4
+96/494212/campos_512_v4
+96/494261/campos_512_v4
+96/494280/campos_512_v4
+96/494314/campos_512_v4
+96/494322/campos_512_v4
+96/494410/campos_512_v4
+96/494416/campos_512_v4
+96/494486/campos_512_v4
+96/494651/campos_512_v4
+96/494667/campos_512_v4
+96/494696/campos_512_v4
+96/494762/campos_512_v4
+96/494849/campos_512_v4
+96/494866/campos_512_v4
+96/494867/campos_512_v4
+96/494890/campos_512_v4
+97/495015/campos_512_v4
+97/495045/campos_512_v4
+97/495142/campos_512_v4
+97/495161/campos_512_v4
+97/495276/campos_512_v4
+97/495280/campos_512_v4
+97/495302/campos_512_v4
+97/495356/campos_512_v4
+97/495360/campos_512_v4
+97/495361/campos_512_v4
+97/495394/campos_512_v4
+97/495437/campos_512_v4
+97/495503/campos_512_v4
+97/495614/campos_512_v4
+97/495645/campos_512_v4
+97/495705/campos_512_v4
+97/495717/campos_512_v4
+97/495850/campos_512_v4
+97/495893/campos_512_v4
+97/495920/campos_512_v4
+97/495931/campos_512_v4
+97/496007/campos_512_v4
+97/496039/campos_512_v4
+97/496176/campos_512_v4
+97/496210/campos_512_v4
+97/496232/campos_512_v4
+97/496267/campos_512_v4
+97/496335/campos_512_v4
+97/496358/campos_512_v4
+97/496404/campos_512_v4
+97/496443/campos_512_v4
+97/496493/campos_512_v4
+97/496592/campos_512_v4
+97/496727/campos_512_v4
+97/496743/campos_512_v4
+97/496795/campos_512_v4
+97/496796/campos_512_v4
+97/496830/campos_512_v4
+97/496857/campos_512_v4
+97/496871/campos_512_v4
+97/496892/campos_512_v4
+97/496912/campos_512_v4
+97/496929/campos_512_v4
+97/496951/campos_512_v4
+97/496967/campos_512_v4
+97/497065/campos_512_v4
+97/497200/campos_512_v4
+97/497257/campos_512_v4
+97/497266/campos_512_v4
+97/497337/campos_512_v4
+97/497425/campos_512_v4
+97/497482/campos_512_v4
+97/497492/campos_512_v4
+97/497542/campos_512_v4
+97/497559/campos_512_v4
+97/497681/campos_512_v4
+97/497755/campos_512_v4
+97/497803/campos_512_v4
+97/497829/campos_512_v4
+97/497976/campos_512_v4
+97/497977/campos_512_v4
+97/498097/campos_512_v4
+97/498122/campos_512_v4
+97/498145/campos_512_v4
+97/498174/campos_512_v4
+97/498222/campos_512_v4
+97/498253/campos_512_v4
+97/498317/campos_512_v4
+97/498350/campos_512_v4
+97/498367/campos_512_v4
+97/498447/campos_512_v4
+97/498495/campos_512_v4
+97/498544/campos_512_v4
+97/498545/campos_512_v4
+97/498552/campos_512_v4
+97/498671/campos_512_v4
+97/498673/campos_512_v4
+97/498676/campos_512_v4
+97/498708/campos_512_v4
+97/498748/campos_512_v4
+97/498763/campos_512_v4
+97/498783/campos_512_v4
+97/498784/campos_512_v4
+97/498822/campos_512_v4
+97/499032/campos_512_v4
+97/499052/campos_512_v4
+97/499081/campos_512_v4
+97/499083/campos_512_v4
+97/499204/campos_512_v4
+97/499236/campos_512_v4
+97/499320/campos_512_v4
+97/499325/campos_512_v4
+97/499349/campos_512_v4
+97/499372/campos_512_v4
+97/499377/campos_512_v4
+97/499408/campos_512_v4
+97/499421/campos_512_v4
+97/499464/campos_512_v4
+97/499682/campos_512_v4
+97/499688/campos_512_v4
+97/499783/campos_512_v4
+97/499789/campos_512_v4
+97/499840/campos_512_v4
+97/499902/campos_512_v4
+97/499917/campos_512_v4
+97/499960/campos_512_v4
+98/500295/campos_512_v4
+98/500308/campos_512_v4
+98/500398/campos_512_v4
+98/500441/campos_512_v4
+98/500490/campos_512_v4
+98/500562/campos_512_v4
+98/500582/campos_512_v4
+98/500641/campos_512_v4
+98/500649/campos_512_v4
+98/500716/campos_512_v4
+98/500725/campos_512_v4
+98/500774/campos_512_v4
+98/500867/campos_512_v4
+98/500883/campos_512_v4
+98/501123/campos_512_v4
+98/501130/campos_512_v4
+98/501132/campos_512_v4
+98/501150/campos_512_v4
+98/501161/campos_512_v4
+98/501311/campos_512_v4
+98/501312/campos_512_v4
+98/501348/campos_512_v4
+98/501439/campos_512_v4
+98/501448/campos_512_v4
+98/501475/campos_512_v4
+98/501595/campos_512_v4
+98/501626/campos_512_v4
+98/501633/campos_512_v4
+98/501788/campos_512_v4
+98/501799/campos_512_v4
+98/501862/campos_512_v4
+98/501926/campos_512_v4
+98/502085/campos_512_v4
+98/502120/campos_512_v4
+98/502179/campos_512_v4
+98/502286/campos_512_v4
+98/502317/campos_512_v4
+98/502351/campos_512_v4
+98/502390/campos_512_v4
+98/502666/campos_512_v4
+98/502668/campos_512_v4
+98/502682/campos_512_v4
+98/502712/campos_512_v4
+98/502808/campos_512_v4
+98/502819/campos_512_v4
+98/502828/campos_512_v4
+98/502924/campos_512_v4
+98/502942/campos_512_v4
+98/502956/campos_512_v4
+98/502974/campos_512_v4
+98/503039/campos_512_v4
+98/503057/campos_512_v4
+98/503059/campos_512_v4
+98/503194/campos_512_v4
+98/503208/campos_512_v4
+98/503238/campos_512_v4
+98/503285/campos_512_v4
+98/503474/campos_512_v4
+98/503518/campos_512_v4
+98/503573/campos_512_v4
+98/503664/campos_512_v4
+98/503665/campos_512_v4
+98/503677/campos_512_v4
+98/503698/campos_512_v4
+98/503729/campos_512_v4
+98/503736/campos_512_v4
+98/503753/campos_512_v4
+98/503796/campos_512_v4
+98/503823/campos_512_v4
+98/503884/campos_512_v4
+98/504123/campos_512_v4
+98/504138/campos_512_v4
+98/504144/campos_512_v4
+98/504204/campos_512_v4
+98/504309/campos_512_v4
+98/504316/campos_512_v4
+98/504497/campos_512_v4
+98/504529/campos_512_v4
+98/504571/campos_512_v4
+98/504636/campos_512_v4
+98/504638/campos_512_v4
+98/504704/campos_512_v4
+98/504736/campos_512_v4
+98/504825/campos_512_v4
+98/504846/campos_512_v4
+98/504852/campos_512_v4
+98/504908/campos_512_v4
+98/504921/campos_512_v4
+98/504986/campos_512_v4
+98/504996/campos_512_v4
+99/505103/campos_512_v4
+99/505122/campos_512_v4
+99/505123/campos_512_v4
+99/505191/campos_512_v4
+99/505256/campos_512_v4
+99/505299/campos_512_v4
+99/505488/campos_512_v4
+99/505499/campos_512_v4
+99/505522/campos_512_v4
+99/505580/campos_512_v4
+99/505603/campos_512_v4
+99/505725/campos_512_v4
+99/505744/campos_512_v4
+99/505768/campos_512_v4
+99/505802/campos_512_v4
+99/505908/campos_512_v4
+99/506100/campos_512_v4
+99/506115/campos_512_v4
+99/506261/campos_512_v4
+99/506271/campos_512_v4
+99/506392/campos_512_v4
+99/506413/campos_512_v4
+99/506488/campos_512_v4
+99/506526/campos_512_v4
+99/506765/campos_512_v4
+99/506831/campos_512_v4
+99/506993/campos_512_v4
+99/507045/campos_512_v4
+99/507173/campos_512_v4
+99/507210/campos_512_v4
+99/507327/campos_512_v4
+99/507334/campos_512_v4
+99/507355/campos_512_v4
+99/507408/campos_512_v4
+99/507505/campos_512_v4
+99/507554/campos_512_v4
+99/507571/campos_512_v4
+99/507652/campos_512_v4
+99/507688/campos_512_v4
+99/507793/campos_512_v4
+99/507889/campos_512_v4
+99/507933/campos_512_v4
+99/508060/campos_512_v4
+99/508081/campos_512_v4
+99/508111/campos_512_v4
+99/508156/campos_512_v4
+99/508172/campos_512_v4
+99/508264/campos_512_v4
+99/508277/campos_512_v4
+99/508337/campos_512_v4
+99/508380/campos_512_v4
+99/508395/campos_512_v4
+99/508474/campos_512_v4
+99/508522/campos_512_v4
+99/508550/campos_512_v4
+99/508619/campos_512_v4
+99/508773/campos_512_v4
+99/508889/campos_512_v4
+99/509127/campos_512_v4
+99/509184/campos_512_v4
+99/509208/campos_512_v4
+99/509230/campos_512_v4
+99/509301/campos_512_v4
+99/509348/campos_512_v4
+99/509395/campos_512_v4
+99/509400/campos_512_v4
+99/509534/campos_512_v4
+99/509597/campos_512_v4
+99/509650/campos_512_v4
+99/509651/campos_512_v4
+99/509663/campos_512_v4
+99/509716/campos_512_v4
+99/509813/campos_512_v4
+99/509836/campos_512_v4
+99/509889/campos_512_v4
diff --git a/shell_scripts/raw_img_list/BuildingsOutdoor.txt b/shell_scripts/raw_img_list/BuildingsOutdoor.txt
new file mode 100644
index 0000000000000000000000000000000000000000..fb010df1f96095a9794f8c9f7e697690b62fc9ff
--- /dev/null
+++ b/shell_scripts/raw_img_list/BuildingsOutdoor.txt
@@ -0,0 +1,25681 @@
+0/10039/campos_512_v4
+0/10060/campos_512_v4
+0/10165/campos_512_v4
+0/10267/campos_512_v4
+0/10300/campos_512_v4
+0/10340/campos_512_v4
+0/10525/campos_512_v4
+0/10580/campos_512_v4
+0/10609/campos_512_v4
+0/10736/campos_512_v4
+0/10860/campos_512_v4
+0/10932/campos_512_v4
+0/11069/campos_512_v4
+0/11079/campos_512_v4
+0/11182/campos_512_v4
+0/11189/campos_512_v4
+0/11212/campos_512_v4
+0/11237/campos_512_v4
+0/11242/campos_512_v4
+0/11243/campos_512_v4
+0/11251/campos_512_v4
+0/11254/campos_512_v4
+0/11316/campos_512_v4
+0/11351/campos_512_v4
+0/11372/campos_512_v4
+0/11382/campos_512_v4
+0/11413/campos_512_v4
+0/11428/campos_512_v4
+0/11445/campos_512_v4
+0/11449/campos_512_v4
+0/11457/campos_512_v4
+0/11467/campos_512_v4
+0/11471/campos_512_v4
+0/11487/campos_512_v4
+0/11537/campos_512_v4
+0/11570/campos_512_v4
+0/11639/campos_512_v4
+0/11663/campos_512_v4
+0/11727/campos_512_v4
+0/11839/campos_512_v4
+0/11868/campos_512_v4
+0/11904/campos_512_v4
+0/11912/campos_512_v4
+0/11913/campos_512_v4
+0/11920/campos_512_v4
+0/11963/campos_512_v4
+0/12045/campos_512_v4
+0/12067/campos_512_v4
+0/12092/campos_512_v4
+0/12148/campos_512_v4
+0/12219/campos_512_v4
+0/12240/campos_512_v4
+0/12242/campos_512_v4
+0/12243/campos_512_v4
+0/12333/campos_512_v4
+0/12335/campos_512_v4
+0/12402/campos_512_v4
+0/12416/campos_512_v4
+0/12457/campos_512_v4
+0/12464/campos_512_v4
+0/12468/campos_512_v4
+0/12574/campos_512_v4
+0/12606/campos_512_v4
+0/12712/campos_512_v4
+0/12732/campos_512_v4
+0/12769/campos_512_v4
+0/12770/campos_512_v4
+0/12787/campos_512_v4
+0/12790/campos_512_v4
+0/12800/campos_512_v4
+0/12807/campos_512_v4
+0/12854/campos_512_v4
+0/12948/campos_512_v4
+0/12953/campos_512_v4
+0/12971/campos_512_v4
+0/13000/campos_512_v4
+0/13007/campos_512_v4
+0/13008/campos_512_v4
+0/13038/campos_512_v4
+0/13109/campos_512_v4
+0/13110/campos_512_v4
+0/13135/campos_512_v4
+0/13153/campos_512_v4
+0/13205/campos_512_v4
+0/13232/campos_512_v4
+0/13272/campos_512_v4
+0/13280/campos_512_v4
+0/13284/campos_512_v4
+0/13300/campos_512_v4
+0/13305/campos_512_v4
+0/13326/campos_512_v4
+0/13388/campos_512_v4
+0/13394/campos_512_v4
+0/13426/campos_512_v4
+0/13444/campos_512_v4
+0/13449/campos_512_v4
+0/13450/campos_512_v4
+0/13482/campos_512_v4
+0/13488/campos_512_v4
+0/13517/campos_512_v4
+0/13529/campos_512_v4
+0/13541/campos_512_v4
+0/13549/campos_512_v4
+0/13564/campos_512_v4
+0/13568/campos_512_v4
+0/13575/campos_512_v4
+0/13592/campos_512_v4
+0/13595/campos_512_v4
+0/13615/campos_512_v4
+0/13617/campos_512_v4
+0/13636/campos_512_v4
+0/13664/campos_512_v4
+0/13673/campos_512_v4
+0/13679/campos_512_v4
+0/13745/campos_512_v4
+0/13764/campos_512_v4
+0/13778/campos_512_v4
+0/13789/campos_512_v4
+0/13859/campos_512_v4
+0/13882/campos_512_v4
+0/13891/campos_512_v4
+0/13968/campos_512_v4
+0/13970/campos_512_v4
+0/14014/campos_512_v4
+0/14057/campos_512_v4
+0/14091/campos_512_v4
+0/14120/campos_512_v4
+0/14164/campos_512_v4
+0/14165/campos_512_v4
+0/14177/campos_512_v4
+0/14183/campos_512_v4
+0/14185/campos_512_v4
+0/14196/campos_512_v4
+0/14248/campos_512_v4
+0/14279/campos_512_v4
+0/14287/campos_512_v4
+0/14328/campos_512_v4
+0/14337/campos_512_v4
+0/14388/campos_512_v4
+0/14394/campos_512_v4
+0/14408/campos_512_v4
+0/14422/campos_512_v4
+0/14479/campos_512_v4
+0/14482/campos_512_v4
+0/14486/campos_512_v4
+0/14609/campos_512_v4
+0/14673/campos_512_v4
+0/14706/campos_512_v4
+0/14751/campos_512_v4
+0/14800/campos_512_v4
+0/14804/campos_512_v4
+0/14809/campos_512_v4
+0/14812/campos_512_v4
+0/14821/campos_512_v4
+0/14837/campos_512_v4
+0/14838/campos_512_v4
+0/14900/campos_512_v4
+0/14928/campos_512_v4
+0/14938/campos_512_v4
+0/14948/campos_512_v4
+0/14956/campos_512_v4
+0/14981/campos_512_v4
+0/14990/campos_512_v4
+1/15013/campos_512_v4
+1/15034/campos_512_v4
+1/15049/campos_512_v4
+1/15069/campos_512_v4
+1/15079/campos_512_v4
+1/15109/campos_512_v4
+1/15120/campos_512_v4
+1/15162/campos_512_v4
+1/15172/campos_512_v4
+1/15207/campos_512_v4
+1/15211/campos_512_v4
+1/15218/campos_512_v4
+1/15292/campos_512_v4
+1/15296/campos_512_v4
+1/15325/campos_512_v4
+1/15353/campos_512_v4
+1/15355/campos_512_v4
+1/15369/campos_512_v4
+1/15380/campos_512_v4
+1/15400/campos_512_v4
+1/15433/campos_512_v4
+1/15444/campos_512_v4
+1/15537/campos_512_v4
+1/15545/campos_512_v4
+1/15547/campos_512_v4
+1/15576/campos_512_v4
+1/15582/campos_512_v4
+1/15606/campos_512_v4
+1/15716/campos_512_v4
+1/15718/campos_512_v4
+1/15722/campos_512_v4
+1/15839/campos_512_v4
+1/15863/campos_512_v4
+1/15867/campos_512_v4
+1/15916/campos_512_v4
+1/15946/campos_512_v4
+1/15970/campos_512_v4
+1/15974/campos_512_v4
+1/15994/campos_512_v4
+1/16061/campos_512_v4
+1/16169/campos_512_v4
+1/16179/campos_512_v4
+1/16234/campos_512_v4
+1/16247/campos_512_v4
+1/16262/campos_512_v4
+1/16264/campos_512_v4
+1/16284/campos_512_v4
+1/16296/campos_512_v4
+1/16327/campos_512_v4
+1/16376/campos_512_v4
+1/16397/campos_512_v4
+1/16422/campos_512_v4
+1/16443/campos_512_v4
+1/16465/campos_512_v4
+1/16472/campos_512_v4
+1/16499/campos_512_v4
+1/16538/campos_512_v4
+1/16548/campos_512_v4
+1/16561/campos_512_v4
+1/16602/campos_512_v4
+1/16635/campos_512_v4
+1/16641/campos_512_v4
+1/16663/campos_512_v4
+1/16714/campos_512_v4
+1/16715/campos_512_v4
+1/16718/campos_512_v4
+1/16750/campos_512_v4
+1/16821/campos_512_v4
+1/16836/campos_512_v4
+1/16920/campos_512_v4
+1/16947/campos_512_v4
+1/16966/campos_512_v4
+1/16980/campos_512_v4
+1/17076/campos_512_v4
+1/17088/campos_512_v4
+1/17165/campos_512_v4
+1/17178/campos_512_v4
+1/17227/campos_512_v4
+1/17241/campos_512_v4
+1/17244/campos_512_v4
+1/17259/campos_512_v4
+1/17285/campos_512_v4
+1/17295/campos_512_v4
+1/17328/campos_512_v4
+1/17334/campos_512_v4
+1/17361/campos_512_v4
+1/17446/campos_512_v4
+1/17467/campos_512_v4
+1/17478/campos_512_v4
+1/17490/campos_512_v4
+1/17593/campos_512_v4
+1/17664/campos_512_v4
+1/17687/campos_512_v4
+1/17712/campos_512_v4
+1/17738/campos_512_v4
+1/17744/campos_512_v4
+1/17797/campos_512_v4
+1/17862/campos_512_v4
+1/17900/campos_512_v4
+1/17943/campos_512_v4
+1/18001/campos_512_v4
+1/18022/campos_512_v4
+1/18037/campos_512_v4
+1/18085/campos_512_v4
+1/18118/campos_512_v4
+1/18125/campos_512_v4
+1/18191/campos_512_v4
+1/18228/campos_512_v4
+1/18242/campos_512_v4
+1/18290/campos_512_v4
+1/18336/campos_512_v4
+1/18343/campos_512_v4
+1/18346/campos_512_v4
+1/18388/campos_512_v4
+1/18440/campos_512_v4
+1/18454/campos_512_v4
+1/18455/campos_512_v4
+1/18468/campos_512_v4
+1/18495/campos_512_v4
+1/18640/campos_512_v4
+1/18674/campos_512_v4
+1/18707/campos_512_v4
+1/18710/campos_512_v4
+1/18718/campos_512_v4
+1/18749/campos_512_v4
+1/18800/campos_512_v4
+1/18804/campos_512_v4
+1/18903/campos_512_v4
+1/18911/campos_512_v4
+1/18919/campos_512_v4
+1/18925/campos_512_v4
+1/18985/campos_512_v4
+1/18991/campos_512_v4
+1/19000/campos_512_v4
+1/19019/campos_512_v4
+1/19147/campos_512_v4
+1/19199/campos_512_v4
+1/19275/campos_512_v4
+1/19349/campos_512_v4
+1/19351/campos_512_v4
+1/19365/campos_512_v4
+1/19446/campos_512_v4
+1/19531/campos_512_v4
+1/19556/campos_512_v4
+1/19562/campos_512_v4
+1/19564/campos_512_v4
+1/19629/campos_512_v4
+1/19640/campos_512_v4
+1/19683/campos_512_v4
+1/19684/campos_512_v4
+1/19723/campos_512_v4
+1/19736/campos_512_v4
+1/19750/campos_512_v4
+1/19756/campos_512_v4
+1/19837/campos_512_v4
+1/19840/campos_512_v4
+1/19848/campos_512_v4
+1/19903/campos_512_v4
+1/19929/campos_512_v4
+1/19944/campos_512_v4
+1/19950/campos_512_v4
+1/19956/campos_512_v4
+1/19963/campos_512_v4
+1/19966/campos_512_v4
+1/19997/campos_512_v4
+10/60012/campos_512_v4
+10/60043/campos_512_v4
+10/60046/campos_512_v4
+10/60186/campos_512_v4
+10/60204/campos_512_v4
+10/60229/campos_512_v4
+10/60294/campos_512_v4
+10/60330/campos_512_v4
+10/60359/campos_512_v4
+10/60400/campos_512_v4
+10/60403/campos_512_v4
+10/60425/campos_512_v4
+10/60468/campos_512_v4
+10/60492/campos_512_v4
+10/60527/campos_512_v4
+10/60530/campos_512_v4
+10/60546/campos_512_v4
+10/60556/campos_512_v4
+10/60595/campos_512_v4
+10/60598/campos_512_v4
+10/60608/campos_512_v4
+10/60625/campos_512_v4
+10/60640/campos_512_v4
+10/60648/campos_512_v4
+10/60653/campos_512_v4
+10/60717/campos_512_v4
+10/60721/campos_512_v4
+10/60743/campos_512_v4
+10/60749/campos_512_v4
+10/60767/campos_512_v4
+10/60780/campos_512_v4
+10/60799/campos_512_v4
+10/60807/campos_512_v4
+10/60810/campos_512_v4
+10/60834/campos_512_v4
+10/60906/campos_512_v4
+10/60919/campos_512_v4
+10/61002/campos_512_v4
+10/61022/campos_512_v4
+10/61049/campos_512_v4
+10/61063/campos_512_v4
+10/61065/campos_512_v4
+10/61078/campos_512_v4
+10/61080/campos_512_v4
+10/61085/campos_512_v4
+10/61090/campos_512_v4
+10/61114/campos_512_v4
+10/61134/campos_512_v4
+10/61155/campos_512_v4
+10/61197/campos_512_v4
+10/61247/campos_512_v4
+10/61286/campos_512_v4
+10/61291/campos_512_v4
+10/61314/campos_512_v4
+10/61321/campos_512_v4
+10/61352/campos_512_v4
+10/61370/campos_512_v4
+10/61416/campos_512_v4
+10/61419/campos_512_v4
+10/61446/campos_512_v4
+10/61477/campos_512_v4
+10/61487/campos_512_v4
+10/61503/campos_512_v4
+10/61523/campos_512_v4
+10/61533/campos_512_v4
+10/61538/campos_512_v4
+10/61539/campos_512_v4
+10/61574/campos_512_v4
+10/61593/campos_512_v4
+10/61600/campos_512_v4
+10/61611/campos_512_v4
+10/61612/campos_512_v4
+10/61717/campos_512_v4
+10/61724/campos_512_v4
+10/61736/campos_512_v4
+10/61742/campos_512_v4
+10/61830/campos_512_v4
+10/61831/campos_512_v4
+10/61889/campos_512_v4
+10/61920/campos_512_v4
+10/61931/campos_512_v4
+10/61949/campos_512_v4
+10/61976/campos_512_v4
+10/62005/campos_512_v4
+10/62007/campos_512_v4
+10/62025/campos_512_v4
+10/62034/campos_512_v4
+10/62036/campos_512_v4
+10/62039/campos_512_v4
+10/62095/campos_512_v4
+10/62131/campos_512_v4
+10/62212/campos_512_v4
+10/62230/campos_512_v4
+10/62254/campos_512_v4
+10/62267/campos_512_v4
+10/62283/campos_512_v4
+10/62382/campos_512_v4
+10/62401/campos_512_v4
+10/62454/campos_512_v4
+10/62467/campos_512_v4
+10/62490/campos_512_v4
+10/62511/campos_512_v4
+10/62517/campos_512_v4
+10/62579/campos_512_v4
+10/62615/campos_512_v4
+10/62634/campos_512_v4
+10/62654/campos_512_v4
+10/62665/campos_512_v4
+10/62674/campos_512_v4
+10/62677/campos_512_v4
+10/62680/campos_512_v4
+10/62686/campos_512_v4
+10/62695/campos_512_v4
+10/62712/campos_512_v4
+10/62723/campos_512_v4
+10/62725/campos_512_v4
+10/62731/campos_512_v4
+10/62732/campos_512_v4
+10/62759/campos_512_v4
+10/62761/campos_512_v4
+10/62776/campos_512_v4
+10/62827/campos_512_v4
+10/62829/campos_512_v4
+10/62857/campos_512_v4
+10/62861/campos_512_v4
+10/62880/campos_512_v4
+10/62914/campos_512_v4
+10/62920/campos_512_v4
+10/62923/campos_512_v4
+10/62975/campos_512_v4
+10/63010/campos_512_v4
+10/63013/campos_512_v4
+10/63033/campos_512_v4
+10/63037/campos_512_v4
+10/63059/campos_512_v4
+10/63127/campos_512_v4
+10/63138/campos_512_v4
+10/63175/campos_512_v4
+10/63215/campos_512_v4
+10/63219/campos_512_v4
+10/63240/campos_512_v4
+10/63270/campos_512_v4
+10/63319/campos_512_v4
+10/63329/campos_512_v4
+10/63335/campos_512_v4
+10/63374/campos_512_v4
+10/63388/campos_512_v4
+10/63403/campos_512_v4
+10/63410/campos_512_v4
+10/63429/campos_512_v4
+10/63434/campos_512_v4
+10/63501/campos_512_v4
+10/63503/campos_512_v4
+10/63511/campos_512_v4
+10/63552/campos_512_v4
+10/63554/campos_512_v4
+10/63564/campos_512_v4
+10/63641/campos_512_v4
+10/63669/campos_512_v4
+10/63673/campos_512_v4
+10/63674/campos_512_v4
+10/63713/campos_512_v4
+10/63800/campos_512_v4
+10/63818/campos_512_v4
+10/63832/campos_512_v4
+10/63937/campos_512_v4
+10/63938/campos_512_v4
+10/63975/campos_512_v4
+10/63984/campos_512_v4
+10/64006/campos_512_v4
+10/64042/campos_512_v4
+10/64043/campos_512_v4
+10/64052/campos_512_v4
+10/64066/campos_512_v4
+10/64115/campos_512_v4
+10/64134/campos_512_v4
+10/64216/campos_512_v4
+10/64224/campos_512_v4
+10/64241/campos_512_v4
+10/64249/campos_512_v4
+10/64254/campos_512_v4
+10/64292/campos_512_v4
+10/64293/campos_512_v4
+10/64304/campos_512_v4
+10/64317/campos_512_v4
+10/64321/campos_512_v4
+10/64340/campos_512_v4
+10/64394/campos_512_v4
+10/64406/campos_512_v4
+10/64416/campos_512_v4
+10/64419/campos_512_v4
+10/64435/campos_512_v4
+10/64489/campos_512_v4
+10/64539/campos_512_v4
+10/64560/campos_512_v4
+10/64617/campos_512_v4
+10/64627/campos_512_v4
+10/64676/campos_512_v4
+10/64684/campos_512_v4
+10/64713/campos_512_v4
+10/64714/campos_512_v4
+10/64724/campos_512_v4
+10/64738/campos_512_v4
+10/64745/campos_512_v4
+10/64746/campos_512_v4
+10/64753/campos_512_v4
+10/64761/campos_512_v4
+10/64790/campos_512_v4
+10/64838/campos_512_v4
+10/64912/campos_512_v4
+10/64959/campos_512_v4
+10/64977/campos_512_v4
+100/510117/campos_512_v4
+100/510183/campos_512_v4
+100/510188/campos_512_v4
+100/510196/campos_512_v4
+100/510200/campos_512_v4
+100/510236/campos_512_v4
+100/510305/campos_512_v4
+100/510338/campos_512_v4
+100/510362/campos_512_v4
+100/510365/campos_512_v4
+100/510376/campos_512_v4
+100/510400/campos_512_v4
+100/510434/campos_512_v4
+100/510443/campos_512_v4
+100/510486/campos_512_v4
+100/510503/campos_512_v4
+100/510522/campos_512_v4
+100/510541/campos_512_v4
+100/510578/campos_512_v4
+100/510591/campos_512_v4
+100/510706/campos_512_v4
+100/510762/campos_512_v4
+100/510764/campos_512_v4
+100/510768/campos_512_v4
+100/510827/campos_512_v4
+100/510828/campos_512_v4
+100/510866/campos_512_v4
+100/510876/campos_512_v4
+100/510946/campos_512_v4
+100/510948/campos_512_v4
+100/510991/campos_512_v4
+100/510993/campos_512_v4
+100/511014/campos_512_v4
+100/511016/campos_512_v4
+100/511037/campos_512_v4
+100/511071/campos_512_v4
+100/511075/campos_512_v4
+100/511173/campos_512_v4
+100/511176/campos_512_v4
+100/511177/campos_512_v4
+100/511186/campos_512_v4
+100/511187/campos_512_v4
+100/511210/campos_512_v4
+100/511274/campos_512_v4
+100/511283/campos_512_v4
+100/511303/campos_512_v4
+100/511321/campos_512_v4
+100/511326/campos_512_v4
+100/511375/campos_512_v4
+100/511424/campos_512_v4
+100/511457/campos_512_v4
+100/511461/campos_512_v4
+100/511483/campos_512_v4
+100/511487/campos_512_v4
+100/511521/campos_512_v4
+100/511532/campos_512_v4
+100/511536/campos_512_v4
+100/511550/campos_512_v4
+100/511557/campos_512_v4
+100/511591/campos_512_v4
+100/511596/campos_512_v4
+100/511618/campos_512_v4
+100/511691/campos_512_v4
+100/511761/campos_512_v4
+100/511784/campos_512_v4
+100/511801/campos_512_v4
+100/511810/campos_512_v4
+100/511829/campos_512_v4
+100/511831/campos_512_v4
+100/511864/campos_512_v4
+100/511930/campos_512_v4
+100/511937/campos_512_v4
+100/511973/campos_512_v4
+100/511980/campos_512_v4
+100/511987/campos_512_v4
+100/512179/campos_512_v4
+100/512223/campos_512_v4
+100/512251/campos_512_v4
+100/512275/campos_512_v4
+100/512320/campos_512_v4
+100/512367/campos_512_v4
+100/512379/campos_512_v4
+100/512436/campos_512_v4
+100/512453/campos_512_v4
+100/512560/campos_512_v4
+100/512562/campos_512_v4
+100/512636/campos_512_v4
+100/512660/campos_512_v4
+100/512690/campos_512_v4
+100/512697/campos_512_v4
+100/512699/campos_512_v4
+100/512704/campos_512_v4
+100/512711/campos_512_v4
+100/512732/campos_512_v4
+100/512767/campos_512_v4
+100/512770/campos_512_v4
+100/512780/campos_512_v4
+100/512781/campos_512_v4
+100/512783/campos_512_v4
+100/512791/campos_512_v4
+100/512826/campos_512_v4
+100/512840/campos_512_v4
+100/513024/campos_512_v4
+100/513049/campos_512_v4
+100/513196/campos_512_v4
+100/513200/campos_512_v4
+100/513204/campos_512_v4
+100/513274/campos_512_v4
+100/513394/campos_512_v4
+100/513450/campos_512_v4
+100/513501/campos_512_v4
+100/513520/campos_512_v4
+100/513655/campos_512_v4
+100/513715/campos_512_v4
+100/513729/campos_512_v4
+100/513736/campos_512_v4
+100/513737/campos_512_v4
+100/513787/campos_512_v4
+100/513948/campos_512_v4
+100/514003/campos_512_v4
+100/514045/campos_512_v4
+100/514050/campos_512_v4
+100/514057/campos_512_v4
+100/514128/campos_512_v4
+100/514138/campos_512_v4
+100/514178/campos_512_v4
+100/514242/campos_512_v4
+100/514253/campos_512_v4
+100/514265/campos_512_v4
+100/514270/campos_512_v4
+100/514327/campos_512_v4
+100/514373/campos_512_v4
+100/514395/campos_512_v4
+100/514461/campos_512_v4
+100/514467/campos_512_v4
+100/514519/campos_512_v4
+100/514570/campos_512_v4
+100/514577/campos_512_v4
+100/514601/campos_512_v4
+100/514637/campos_512_v4
+100/514680/campos_512_v4
+100/514685/campos_512_v4
+100/514705/campos_512_v4
+100/514713/campos_512_v4
+100/514725/campos_512_v4
+100/514827/campos_512_v4
+100/514828/campos_512_v4
+100/514833/campos_512_v4
+100/514894/campos_512_v4
+100/514943/campos_512_v4
+100/514952/campos_512_v4
+100/514960/campos_512_v4
+100/514976/campos_512_v4
+101/515011/campos_512_v4
+101/515027/campos_512_v4
+101/515097/campos_512_v4
+101/515134/campos_512_v4
+101/515138/campos_512_v4
+101/515159/campos_512_v4
+101/515197/campos_512_v4
+101/515222/campos_512_v4
+101/515237/campos_512_v4
+101/515243/campos_512_v4
+101/515328/campos_512_v4
+101/515330/campos_512_v4
+101/515355/campos_512_v4
+101/515380/campos_512_v4
+101/515382/campos_512_v4
+101/515386/campos_512_v4
+101/515389/campos_512_v4
+101/515394/campos_512_v4
+101/515408/campos_512_v4
+101/515410/campos_512_v4
+101/515457/campos_512_v4
+101/515488/campos_512_v4
+101/515516/campos_512_v4
+101/515579/campos_512_v4
+101/515619/campos_512_v4
+101/515630/campos_512_v4
+101/515642/campos_512_v4
+101/515765/campos_512_v4
+101/515785/campos_512_v4
+101/515896/campos_512_v4
+101/515933/campos_512_v4
+101/515937/campos_512_v4
+101/515950/campos_512_v4
+101/515968/campos_512_v4
+101/516033/campos_512_v4
+101/516035/campos_512_v4
+101/516046/campos_512_v4
+101/516217/campos_512_v4
+101/516228/campos_512_v4
+101/516354/campos_512_v4
+101/516360/campos_512_v4
+101/516428/campos_512_v4
+101/516438/campos_512_v4
+101/516474/campos_512_v4
+101/516507/campos_512_v4
+101/516513/campos_512_v4
+101/516549/campos_512_v4
+101/516557/campos_512_v4
+101/516560/campos_512_v4
+101/516603/campos_512_v4
+101/516683/campos_512_v4
+101/516733/campos_512_v4
+101/516735/campos_512_v4
+101/516765/campos_512_v4
+101/516778/campos_512_v4
+101/516807/campos_512_v4
+101/516829/campos_512_v4
+101/516833/campos_512_v4
+101/516851/campos_512_v4
+101/516859/campos_512_v4
+101/516863/campos_512_v4
+101/516883/campos_512_v4
+101/516909/campos_512_v4
+101/516975/campos_512_v4
+101/516977/campos_512_v4
+101/517018/campos_512_v4
+101/517094/campos_512_v4
+101/517097/campos_512_v4
+101/517104/campos_512_v4
+101/517106/campos_512_v4
+101/517171/campos_512_v4
+101/517182/campos_512_v4
+101/517204/campos_512_v4
+101/517276/campos_512_v4
+101/517292/campos_512_v4
+101/517399/campos_512_v4
+101/517419/campos_512_v4
+101/517460/campos_512_v4
+101/517488/campos_512_v4
+101/517541/campos_512_v4
+101/517555/campos_512_v4
+101/517566/campos_512_v4
+101/517586/campos_512_v4
+101/517596/campos_512_v4
+101/517615/campos_512_v4
+101/517645/campos_512_v4
+101/517743/campos_512_v4
+101/517754/campos_512_v4
+101/517756/campos_512_v4
+101/517779/campos_512_v4
+101/517810/campos_512_v4
+101/517843/campos_512_v4
+101/517862/campos_512_v4
+101/517879/campos_512_v4
+101/517882/campos_512_v4
+101/517919/campos_512_v4
+101/517938/campos_512_v4
+101/517943/campos_512_v4
+101/517952/campos_512_v4
+101/517994/campos_512_v4
+101/518077/campos_512_v4
+101/518094/campos_512_v4
+101/518132/campos_512_v4
+101/518152/campos_512_v4
+101/518200/campos_512_v4
+101/518205/campos_512_v4
+101/518218/campos_512_v4
+101/518296/campos_512_v4
+101/518306/campos_512_v4
+101/518316/campos_512_v4
+101/518359/campos_512_v4
+101/518466/campos_512_v4
+101/518470/campos_512_v4
+101/518542/campos_512_v4
+101/518547/campos_512_v4
+101/518622/campos_512_v4
+101/518644/campos_512_v4
+101/518650/campos_512_v4
+101/518661/campos_512_v4
+101/518665/campos_512_v4
+101/518715/campos_512_v4
+101/518734/campos_512_v4
+101/518738/campos_512_v4
+101/518763/campos_512_v4
+101/518815/campos_512_v4
+101/518916/campos_512_v4
+101/518929/campos_512_v4
+101/518971/campos_512_v4
+101/519092/campos_512_v4
+101/519174/campos_512_v4
+101/519226/campos_512_v4
+101/519227/campos_512_v4
+101/519241/campos_512_v4
+101/519282/campos_512_v4
+101/519315/campos_512_v4
+101/519326/campos_512_v4
+101/519355/campos_512_v4
+101/519388/campos_512_v4
+101/519403/campos_512_v4
+101/519433/campos_512_v4
+101/519435/campos_512_v4
+101/519446/campos_512_v4
+101/519502/campos_512_v4
+101/519519/campos_512_v4
+101/519559/campos_512_v4
+101/519622/campos_512_v4
+101/519661/campos_512_v4
+101/519664/campos_512_v4
+101/519668/campos_512_v4
+101/519673/campos_512_v4
+101/519680/campos_512_v4
+101/519748/campos_512_v4
+101/519756/campos_512_v4
+101/519766/campos_512_v4
+101/519772/campos_512_v4
+101/519879/campos_512_v4
+101/519925/campos_512_v4
+101/519955/campos_512_v4
+101/519958/campos_512_v4
+101/519970/campos_512_v4
+101/519999/campos_512_v4
+102/520017/campos_512_v4
+102/520054/campos_512_v4
+102/520075/campos_512_v4
+102/520100/campos_512_v4
+102/520111/campos_512_v4
+102/520134/campos_512_v4
+102/520170/campos_512_v4
+102/520181/campos_512_v4
+102/520220/campos_512_v4
+102/520251/campos_512_v4
+102/520280/campos_512_v4
+102/520284/campos_512_v4
+102/520318/campos_512_v4
+102/520323/campos_512_v4
+102/520354/campos_512_v4
+102/520361/campos_512_v4
+102/520383/campos_512_v4
+102/520384/campos_512_v4
+102/520398/campos_512_v4
+102/520408/campos_512_v4
+102/520419/campos_512_v4
+102/520477/campos_512_v4
+102/520487/campos_512_v4
+102/520509/campos_512_v4
+102/520537/campos_512_v4
+102/520604/campos_512_v4
+102/520647/campos_512_v4
+102/520656/campos_512_v4
+102/520676/campos_512_v4
+102/520715/campos_512_v4
+102/520733/campos_512_v4
+102/520737/campos_512_v4
+102/520753/campos_512_v4
+102/520767/campos_512_v4
+102/520779/campos_512_v4
+102/520819/campos_512_v4
+102/520825/campos_512_v4
+102/520843/campos_512_v4
+102/520858/campos_512_v4
+102/520888/campos_512_v4
+102/520900/campos_512_v4
+102/520919/campos_512_v4
+102/520921/campos_512_v4
+102/520922/campos_512_v4
+102/520977/campos_512_v4
+102/521049/campos_512_v4
+102/521126/campos_512_v4
+102/521129/campos_512_v4
+102/521134/campos_512_v4
+102/521159/campos_512_v4
+102/521219/campos_512_v4
+102/521229/campos_512_v4
+102/521338/campos_512_v4
+102/521352/campos_512_v4
+102/521358/campos_512_v4
+102/521416/campos_512_v4
+102/521459/campos_512_v4
+102/521537/campos_512_v4
+102/521538/campos_512_v4
+102/521556/campos_512_v4
+102/521558/campos_512_v4
+102/521560/campos_512_v4
+102/521645/campos_512_v4
+102/521669/campos_512_v4
+102/521677/campos_512_v4
+102/521742/campos_512_v4
+102/521751/campos_512_v4
+102/521763/campos_512_v4
+102/521789/campos_512_v4
+102/521816/campos_512_v4
+102/521820/campos_512_v4
+102/521824/campos_512_v4
+102/521836/campos_512_v4
+102/521840/campos_512_v4
+102/521856/campos_512_v4
+102/521867/campos_512_v4
+102/521908/campos_512_v4
+102/521953/campos_512_v4
+102/521968/campos_512_v4
+102/522024/campos_512_v4
+102/522028/campos_512_v4
+102/522031/campos_512_v4
+102/522048/campos_512_v4
+102/522090/campos_512_v4
+102/522116/campos_512_v4
+102/522157/campos_512_v4
+102/522217/campos_512_v4
+102/522236/campos_512_v4
+102/522285/campos_512_v4
+102/522297/campos_512_v4
+102/522419/campos_512_v4
+102/522437/campos_512_v4
+102/522458/campos_512_v4
+102/522489/campos_512_v4
+102/522515/campos_512_v4
+102/522537/campos_512_v4
+102/522566/campos_512_v4
+102/522598/campos_512_v4
+102/522640/campos_512_v4
+102/522663/campos_512_v4
+102/522682/campos_512_v4
+102/522738/campos_512_v4
+102/522805/campos_512_v4
+102/522873/campos_512_v4
+102/522888/campos_512_v4
+102/522900/campos_512_v4
+102/522930/campos_512_v4
+102/522931/campos_512_v4
+102/522954/campos_512_v4
+102/522982/campos_512_v4
+102/522983/campos_512_v4
+102/522989/campos_512_v4
+102/523023/campos_512_v4
+102/523068/campos_512_v4
+102/523108/campos_512_v4
+102/523134/campos_512_v4
+102/523166/campos_512_v4
+102/523267/campos_512_v4
+102/523339/campos_512_v4
+102/523422/campos_512_v4
+102/523427/campos_512_v4
+102/523436/campos_512_v4
+102/523439/campos_512_v4
+102/523465/campos_512_v4
+102/523508/campos_512_v4
+102/523580/campos_512_v4
+102/523602/campos_512_v4
+102/523662/campos_512_v4
+102/523722/campos_512_v4
+102/523723/campos_512_v4
+102/523827/campos_512_v4
+102/523838/campos_512_v4
+102/523873/campos_512_v4
+102/523914/campos_512_v4
+102/523921/campos_512_v4
+102/524007/campos_512_v4
+102/524033/campos_512_v4
+102/524042/campos_512_v4
+102/524048/campos_512_v4
+102/524104/campos_512_v4
+102/524125/campos_512_v4
+102/524199/campos_512_v4
+102/524221/campos_512_v4
+102/524233/campos_512_v4
+102/524249/campos_512_v4
+102/524336/campos_512_v4
+102/524462/campos_512_v4
+102/524539/campos_512_v4
+102/524540/campos_512_v4
+102/524552/campos_512_v4
+102/524747/campos_512_v4
+102/524749/campos_512_v4
+102/524809/campos_512_v4
+102/524873/campos_512_v4
+102/524879/campos_512_v4
+102/524928/campos_512_v4
+102/524952/campos_512_v4
+102/524980/campos_512_v4
+103/525007/campos_512_v4
+103/525010/campos_512_v4
+103/525041/campos_512_v4
+103/525057/campos_512_v4
+103/525102/campos_512_v4
+103/525127/campos_512_v4
+103/525161/campos_512_v4
+103/525170/campos_512_v4
+103/525173/campos_512_v4
+103/525177/campos_512_v4
+103/525203/campos_512_v4
+103/525284/campos_512_v4
+103/525290/campos_512_v4
+103/525401/campos_512_v4
+103/525420/campos_512_v4
+103/525424/campos_512_v4
+103/525463/campos_512_v4
+103/525478/campos_512_v4
+103/525493/campos_512_v4
+103/525551/campos_512_v4
+103/525573/campos_512_v4
+103/525577/campos_512_v4
+103/525636/campos_512_v4
+103/525681/campos_512_v4
+103/525811/campos_512_v4
+103/525820/campos_512_v4
+103/525824/campos_512_v4
+103/525825/campos_512_v4
+103/525860/campos_512_v4
+103/525866/campos_512_v4
+103/525895/campos_512_v4
+103/525916/campos_512_v4
+103/525919/campos_512_v4
+103/525961/campos_512_v4
+103/526002/campos_512_v4
+103/526011/campos_512_v4
+103/526015/campos_512_v4
+103/526030/campos_512_v4
+103/526032/campos_512_v4
+103/526116/campos_512_v4
+103/526157/campos_512_v4
+103/526224/campos_512_v4
+103/526259/campos_512_v4
+103/526314/campos_512_v4
+103/526439/campos_512_v4
+103/526454/campos_512_v4
+103/526455/campos_512_v4
+103/526456/campos_512_v4
+103/526482/campos_512_v4
+103/526525/campos_512_v4
+103/526559/campos_512_v4
+103/526662/campos_512_v4
+103/526669/campos_512_v4
+103/526685/campos_512_v4
+103/526702/campos_512_v4
+103/526730/campos_512_v4
+103/526780/campos_512_v4
+103/526786/campos_512_v4
+103/526850/campos_512_v4
+103/526901/campos_512_v4
+103/526911/campos_512_v4
+103/526916/campos_512_v4
+103/526934/campos_512_v4
+103/526965/campos_512_v4
+103/526983/campos_512_v4
+103/526996/campos_512_v4
+103/527013/campos_512_v4
+103/527017/campos_512_v4
+103/527057/campos_512_v4
+103/527083/campos_512_v4
+103/527086/campos_512_v4
+103/527089/campos_512_v4
+103/527092/campos_512_v4
+103/527120/campos_512_v4
+103/527150/campos_512_v4
+103/527177/campos_512_v4
+103/527181/campos_512_v4
+103/527186/campos_512_v4
+103/527228/campos_512_v4
+103/527236/campos_512_v4
+103/527250/campos_512_v4
+103/527286/campos_512_v4
+103/527290/campos_512_v4
+103/527295/campos_512_v4
+103/527299/campos_512_v4
+103/527361/campos_512_v4
+103/527371/campos_512_v4
+103/527375/campos_512_v4
+103/527386/campos_512_v4
+103/527387/campos_512_v4
+103/527411/campos_512_v4
+103/527415/campos_512_v4
+103/527450/campos_512_v4
+103/527465/campos_512_v4
+103/527550/campos_512_v4
+103/527562/campos_512_v4
+103/527567/campos_512_v4
+103/527568/campos_512_v4
+103/527571/campos_512_v4
+103/527581/campos_512_v4
+103/527611/campos_512_v4
+103/527630/campos_512_v4
+103/527668/campos_512_v4
+103/527695/campos_512_v4
+103/527752/campos_512_v4
+103/527768/campos_512_v4
+103/527786/campos_512_v4
+103/527909/campos_512_v4
+103/527933/campos_512_v4
+103/527981/campos_512_v4
+103/527992/campos_512_v4
+103/528001/campos_512_v4
+103/528041/campos_512_v4
+103/528058/campos_512_v4
+103/528064/campos_512_v4
+103/528086/campos_512_v4
+103/528135/campos_512_v4
+103/528245/campos_512_v4
+103/528281/campos_512_v4
+103/528292/campos_512_v4
+103/528306/campos_512_v4
+103/528318/campos_512_v4
+103/528349/campos_512_v4
+103/528458/campos_512_v4
+103/528459/campos_512_v4
+103/528474/campos_512_v4
+103/528523/campos_512_v4
+103/528577/campos_512_v4
+103/528658/campos_512_v4
+103/528683/campos_512_v4
+103/528699/campos_512_v4
+103/528757/campos_512_v4
+103/528760/campos_512_v4
+103/528762/campos_512_v4
+103/528775/campos_512_v4
+103/528930/campos_512_v4
+103/528976/campos_512_v4
+103/528977/campos_512_v4
+103/529032/campos_512_v4
+103/529054/campos_512_v4
+103/529093/campos_512_v4
+103/529108/campos_512_v4
+103/529126/campos_512_v4
+103/529153/campos_512_v4
+103/529200/campos_512_v4
+103/529227/campos_512_v4
+103/529254/campos_512_v4
+103/529287/campos_512_v4
+103/529340/campos_512_v4
+103/529388/campos_512_v4
+103/529395/campos_512_v4
+103/529405/campos_512_v4
+103/529423/campos_512_v4
+103/529472/campos_512_v4
+103/529473/campos_512_v4
+103/529484/campos_512_v4
+103/529513/campos_512_v4
+103/529566/campos_512_v4
+103/529642/campos_512_v4
+103/529644/campos_512_v4
+103/529654/campos_512_v4
+103/529665/campos_512_v4
+103/529744/campos_512_v4
+103/529762/campos_512_v4
+103/529765/campos_512_v4
+103/529770/campos_512_v4
+103/529813/campos_512_v4
+103/529877/campos_512_v4
+103/529887/campos_512_v4
+103/529919/campos_512_v4
+103/529944/campos_512_v4
+103/529980/campos_512_v4
+104/530020/campos_512_v4
+104/530064/campos_512_v4
+104/530073/campos_512_v4
+104/530076/campos_512_v4
+104/530079/campos_512_v4
+104/530085/campos_512_v4
+104/530108/campos_512_v4
+104/530113/campos_512_v4
+104/530132/campos_512_v4
+104/530143/campos_512_v4
+104/530159/campos_512_v4
+104/530161/campos_512_v4
+104/530173/campos_512_v4
+104/530253/campos_512_v4
+104/530291/campos_512_v4
+104/530337/campos_512_v4
+104/530353/campos_512_v4
+104/530365/campos_512_v4
+104/530403/campos_512_v4
+104/530405/campos_512_v4
+104/530449/campos_512_v4
+104/530487/campos_512_v4
+104/530600/campos_512_v4
+104/530603/campos_512_v4
+104/530605/campos_512_v4
+104/530617/campos_512_v4
+104/530631/campos_512_v4
+104/530645/campos_512_v4
+104/530678/campos_512_v4
+104/530684/campos_512_v4
+104/530701/campos_512_v4
+104/530735/campos_512_v4
+104/530753/campos_512_v4
+104/530804/campos_512_v4
+104/530821/campos_512_v4
+104/530838/campos_512_v4
+104/530862/campos_512_v4
+104/530872/campos_512_v4
+104/530927/campos_512_v4
+104/530932/campos_512_v4
+104/530944/campos_512_v4
+104/530954/campos_512_v4
+104/530997/campos_512_v4
+104/531005/campos_512_v4
+104/531028/campos_512_v4
+104/531148/campos_512_v4
+104/531149/campos_512_v4
+104/531172/campos_512_v4
+104/531174/campos_512_v4
+104/531186/campos_512_v4
+104/531216/campos_512_v4
+104/531220/campos_512_v4
+104/531234/campos_512_v4
+104/531250/campos_512_v4
+104/531265/campos_512_v4
+104/531267/campos_512_v4
+104/531279/campos_512_v4
+104/531281/campos_512_v4
+104/531287/campos_512_v4
+104/531319/campos_512_v4
+104/531348/campos_512_v4
+104/531361/campos_512_v4
+104/531363/campos_512_v4
+104/531420/campos_512_v4
+104/531424/campos_512_v4
+104/531432/campos_512_v4
+104/531435/campos_512_v4
+104/531468/campos_512_v4
+104/531501/campos_512_v4
+104/531519/campos_512_v4
+104/531525/campos_512_v4
+104/531596/campos_512_v4
+104/531627/campos_512_v4
+104/531660/campos_512_v4
+104/531666/campos_512_v4
+104/531682/campos_512_v4
+104/531687/campos_512_v4
+104/531713/campos_512_v4
+104/531715/campos_512_v4
+104/531737/campos_512_v4
+104/531745/campos_512_v4
+104/531754/campos_512_v4
+104/531770/campos_512_v4
+104/531802/campos_512_v4
+104/531855/campos_512_v4
+104/531859/campos_512_v4
+104/531888/campos_512_v4
+104/531890/campos_512_v4
+104/531911/campos_512_v4
+104/531934/campos_512_v4
+104/531956/campos_512_v4
+104/531966/campos_512_v4
+104/532031/campos_512_v4
+104/532035/campos_512_v4
+104/532102/campos_512_v4
+104/532112/campos_512_v4
+104/532150/campos_512_v4
+104/532222/campos_512_v4
+104/532237/campos_512_v4
+104/532245/campos_512_v4
+104/532260/campos_512_v4
+104/532272/campos_512_v4
+104/532281/campos_512_v4
+104/532287/campos_512_v4
+104/532302/campos_512_v4
+104/532339/campos_512_v4
+104/532354/campos_512_v4
+104/532370/campos_512_v4
+104/532417/campos_512_v4
+104/532431/campos_512_v4
+104/532448/campos_512_v4
+104/532450/campos_512_v4
+104/532470/campos_512_v4
+104/532486/campos_512_v4
+104/532508/campos_512_v4
+104/532525/campos_512_v4
+104/532539/campos_512_v4
+104/532560/campos_512_v4
+104/532589/campos_512_v4
+104/532598/campos_512_v4
+104/532632/campos_512_v4
+104/532656/campos_512_v4
+104/532665/campos_512_v4
+104/532671/campos_512_v4
+104/532738/campos_512_v4
+104/532743/campos_512_v4
+104/532780/campos_512_v4
+104/532883/campos_512_v4
+104/532897/campos_512_v4
+104/532906/campos_512_v4
+104/532910/campos_512_v4
+104/532937/campos_512_v4
+104/532942/campos_512_v4
+104/532965/campos_512_v4
+104/533062/campos_512_v4
+104/533133/campos_512_v4
+104/533155/campos_512_v4
+104/533169/campos_512_v4
+104/533178/campos_512_v4
+104/533187/campos_512_v4
+104/533239/campos_512_v4
+104/533251/campos_512_v4
+104/533381/campos_512_v4
+104/533412/campos_512_v4
+104/533453/campos_512_v4
+104/533531/campos_512_v4
+104/533592/campos_512_v4
+104/533623/campos_512_v4
+104/533635/campos_512_v4
+104/533640/campos_512_v4
+104/533675/campos_512_v4
+104/533684/campos_512_v4
+104/533698/campos_512_v4
+104/533707/campos_512_v4
+104/533711/campos_512_v4
+104/533712/campos_512_v4
+104/533759/campos_512_v4
+104/533762/campos_512_v4
+104/533763/campos_512_v4
+104/533777/campos_512_v4
+104/533814/campos_512_v4
+104/533819/campos_512_v4
+104/533821/campos_512_v4
+104/533847/campos_512_v4
+104/533875/campos_512_v4
+104/533902/campos_512_v4
+104/533943/campos_512_v4
+104/533996/campos_512_v4
+104/534025/campos_512_v4
+104/534032/campos_512_v4
+104/534047/campos_512_v4
+104/534052/campos_512_v4
+104/534064/campos_512_v4
+104/534072/campos_512_v4
+104/534117/campos_512_v4
+104/534168/campos_512_v4
+104/534204/campos_512_v4
+104/534268/campos_512_v4
+104/534340/campos_512_v4
+104/534370/campos_512_v4
+104/534431/campos_512_v4
+104/534457/campos_512_v4
+104/534475/campos_512_v4
+104/534477/campos_512_v4
+104/534483/campos_512_v4
+104/534490/campos_512_v4
+104/534528/campos_512_v4
+104/534544/campos_512_v4
+104/534594/campos_512_v4
+104/534629/campos_512_v4
+104/534636/campos_512_v4
+104/534638/campos_512_v4
+104/534705/campos_512_v4
+104/534740/campos_512_v4
+104/534763/campos_512_v4
+104/534791/campos_512_v4
+104/534859/campos_512_v4
+104/534869/campos_512_v4
+104/534890/campos_512_v4
+104/534924/campos_512_v4
+104/534928/campos_512_v4
+104/534969/campos_512_v4
+104/534996/campos_512_v4
+105/535014/campos_512_v4
+105/535037/campos_512_v4
+105/535079/campos_512_v4
+105/535143/campos_512_v4
+105/535163/campos_512_v4
+105/535234/campos_512_v4
+105/535255/campos_512_v4
+105/535259/campos_512_v4
+105/535280/campos_512_v4
+105/535320/campos_512_v4
+105/535342/campos_512_v4
+105/535362/campos_512_v4
+105/535406/campos_512_v4
+105/535416/campos_512_v4
+105/535445/campos_512_v4
+105/535504/campos_512_v4
+105/535555/campos_512_v4
+105/535592/campos_512_v4
+105/535604/campos_512_v4
+105/535683/campos_512_v4
+105/535765/campos_512_v4
+105/535771/campos_512_v4
+105/535788/campos_512_v4
+105/535804/campos_512_v4
+105/535828/campos_512_v4
+105/535858/campos_512_v4
+105/535877/campos_512_v4
+105/535929/campos_512_v4
+105/535971/campos_512_v4
+105/536045/campos_512_v4
+105/536052/campos_512_v4
+105/536074/campos_512_v4
+105/536115/campos_512_v4
+105/536151/campos_512_v4
+105/536177/campos_512_v4
+105/536265/campos_512_v4
+105/536292/campos_512_v4
+105/536313/campos_512_v4
+105/536327/campos_512_v4
+105/536355/campos_512_v4
+105/536363/campos_512_v4
+105/536399/campos_512_v4
+105/536401/campos_512_v4
+105/536436/campos_512_v4
+105/536457/campos_512_v4
+105/536466/campos_512_v4
+105/536468/campos_512_v4
+105/536473/campos_512_v4
+105/536506/campos_512_v4
+105/536514/campos_512_v4
+105/536533/campos_512_v4
+105/536569/campos_512_v4
+105/536579/campos_512_v4
+105/536583/campos_512_v4
+105/536603/campos_512_v4
+105/536608/campos_512_v4
+105/536680/campos_512_v4
+105/536683/campos_512_v4
+105/536721/campos_512_v4
+105/536746/campos_512_v4
+105/536749/campos_512_v4
+105/536781/campos_512_v4
+105/536797/campos_512_v4
+105/536803/campos_512_v4
+105/536902/campos_512_v4
+105/536936/campos_512_v4
+105/536942/campos_512_v4
+105/537016/campos_512_v4
+105/537019/campos_512_v4
+105/537084/campos_512_v4
+105/537086/campos_512_v4
+105/537089/campos_512_v4
+105/537108/campos_512_v4
+105/537124/campos_512_v4
+105/537142/campos_512_v4
+105/537162/campos_512_v4
+105/537177/campos_512_v4
+105/537186/campos_512_v4
+105/537194/campos_512_v4
+105/537196/campos_512_v4
+105/537241/campos_512_v4
+105/537259/campos_512_v4
+105/537366/campos_512_v4
+105/537370/campos_512_v4
+105/537372/campos_512_v4
+105/537440/campos_512_v4
+105/537450/campos_512_v4
+105/537506/campos_512_v4
+105/537517/campos_512_v4
+105/537533/campos_512_v4
+105/537543/campos_512_v4
+105/537564/campos_512_v4
+105/537605/campos_512_v4
+105/537626/campos_512_v4
+105/537678/campos_512_v4
+105/537681/campos_512_v4
+105/537743/campos_512_v4
+105/537764/campos_512_v4
+105/537771/campos_512_v4
+105/537822/campos_512_v4
+105/537830/campos_512_v4
+105/537869/campos_512_v4
+105/537899/campos_512_v4
+105/537955/campos_512_v4
+105/538032/campos_512_v4
+105/538081/campos_512_v4
+105/538086/campos_512_v4
+105/538090/campos_512_v4
+105/538095/campos_512_v4
+105/538109/campos_512_v4
+105/538139/campos_512_v4
+105/538141/campos_512_v4
+105/538185/campos_512_v4
+105/538210/campos_512_v4
+105/538220/campos_512_v4
+105/538244/campos_512_v4
+105/538253/campos_512_v4
+105/538288/campos_512_v4
+105/538310/campos_512_v4
+105/538346/campos_512_v4
+105/538348/campos_512_v4
+105/538394/campos_512_v4
+105/538418/campos_512_v4
+105/538420/campos_512_v4
+105/538443/campos_512_v4
+105/538451/campos_512_v4
+105/538464/campos_512_v4
+105/538493/campos_512_v4
+105/538516/campos_512_v4
+105/538563/campos_512_v4
+105/538587/campos_512_v4
+105/538640/campos_512_v4
+105/538683/campos_512_v4
+105/538689/campos_512_v4
+105/538707/campos_512_v4
+105/538715/campos_512_v4
+105/538716/campos_512_v4
+105/538728/campos_512_v4
+105/538762/campos_512_v4
+105/538832/campos_512_v4
+105/538901/campos_512_v4
+105/538924/campos_512_v4
+105/538969/campos_512_v4
+105/538998/campos_512_v4
+105/539001/campos_512_v4
+105/539012/campos_512_v4
+105/539082/campos_512_v4
+105/539104/campos_512_v4
+105/539133/campos_512_v4
+105/539160/campos_512_v4
+105/539165/campos_512_v4
+105/539177/campos_512_v4
+105/539205/campos_512_v4
+105/539223/campos_512_v4
+105/539227/campos_512_v4
+105/539341/campos_512_v4
+105/539469/campos_512_v4
+105/539551/campos_512_v4
+105/539587/campos_512_v4
+105/539604/campos_512_v4
+105/539621/campos_512_v4
+105/539630/campos_512_v4
+105/539670/campos_512_v4
+105/539671/campos_512_v4
+105/539719/campos_512_v4
+105/539726/campos_512_v4
+105/539740/campos_512_v4
+105/539751/campos_512_v4
+105/539771/campos_512_v4
+105/539850/campos_512_v4
+105/539937/campos_512_v4
+105/539964/campos_512_v4
+105/539973/campos_512_v4
+105/539983/campos_512_v4
+105/540001/campos_512_v4
+106/540119/campos_512_v4
+106/540131/campos_512_v4
+106/540175/campos_512_v4
+106/540184/campos_512_v4
+106/540213/campos_512_v4
+106/540214/campos_512_v4
+106/540216/campos_512_v4
+106/540269/campos_512_v4
+106/540286/campos_512_v4
+106/540321/campos_512_v4
+106/540324/campos_512_v4
+106/540372/campos_512_v4
+106/540402/campos_512_v4
+106/540416/campos_512_v4
+106/540419/campos_512_v4
+106/540427/campos_512_v4
+106/540433/campos_512_v4
+106/540459/campos_512_v4
+106/540540/campos_512_v4
+106/540546/campos_512_v4
+106/540547/campos_512_v4
+106/540560/campos_512_v4
+106/540574/campos_512_v4
+106/540589/campos_512_v4
+106/540602/campos_512_v4
+106/540622/campos_512_v4
+106/540638/campos_512_v4
+106/540657/campos_512_v4
+106/540671/campos_512_v4
+106/540691/campos_512_v4
+106/540713/campos_512_v4
+106/540770/campos_512_v4
+106/540837/campos_512_v4
+106/540886/campos_512_v4
+106/540910/campos_512_v4
+106/540926/campos_512_v4
+106/540928/campos_512_v4
+106/540983/campos_512_v4
+106/541070/campos_512_v4
+106/541136/campos_512_v4
+106/541143/campos_512_v4
+106/541202/campos_512_v4
+106/541325/campos_512_v4
+106/541366/campos_512_v4
+106/541368/campos_512_v4
+106/541371/campos_512_v4
+106/541435/campos_512_v4
+106/541437/campos_512_v4
+106/541439/campos_512_v4
+106/541456/campos_512_v4
+106/541465/campos_512_v4
+106/541486/campos_512_v4
+106/541532/campos_512_v4
+106/541538/campos_512_v4
+106/541598/campos_512_v4
+106/541644/campos_512_v4
+106/541664/campos_512_v4
+106/541689/campos_512_v4
+106/541702/campos_512_v4
+106/541750/campos_512_v4
+106/541789/campos_512_v4
+106/541792/campos_512_v4
+106/541809/campos_512_v4
+106/541838/campos_512_v4
+106/541886/campos_512_v4
+106/541900/campos_512_v4
+106/541909/campos_512_v4
+106/542009/campos_512_v4
+106/542043/campos_512_v4
+106/542046/campos_512_v4
+106/542050/campos_512_v4
+106/542064/campos_512_v4
+106/542092/campos_512_v4
+106/542096/campos_512_v4
+106/542148/campos_512_v4
+106/542172/campos_512_v4
+106/542217/campos_512_v4
+106/542222/campos_512_v4
+106/542224/campos_512_v4
+106/542256/campos_512_v4
+106/542276/campos_512_v4
+106/542299/campos_512_v4
+106/542329/campos_512_v4
+106/542353/campos_512_v4
+106/542470/campos_512_v4
+106/542532/campos_512_v4
+106/542542/campos_512_v4
+106/542545/campos_512_v4
+106/542577/campos_512_v4
+106/542676/campos_512_v4
+106/542701/campos_512_v4
+106/542740/campos_512_v4
+106/542747/campos_512_v4
+106/542761/campos_512_v4
+106/542834/campos_512_v4
+106/542883/campos_512_v4
+106/542924/campos_512_v4
+106/542946/campos_512_v4
+106/542974/campos_512_v4
+106/542989/campos_512_v4
+106/543042/campos_512_v4
+106/543062/campos_512_v4
+106/543094/campos_512_v4
+106/543112/campos_512_v4
+106/543133/campos_512_v4
+106/543136/campos_512_v4
+106/543176/campos_512_v4
+106/543238/campos_512_v4
+106/543256/campos_512_v4
+106/543286/campos_512_v4
+106/543336/campos_512_v4
+106/543343/campos_512_v4
+106/543373/campos_512_v4
+106/543378/campos_512_v4
+106/543396/campos_512_v4
+106/543403/campos_512_v4
+106/543452/campos_512_v4
+106/543484/campos_512_v4
+106/543501/campos_512_v4
+106/543515/campos_512_v4
+106/543555/campos_512_v4
+106/543607/campos_512_v4
+106/543610/campos_512_v4
+106/543673/campos_512_v4
+106/543676/campos_512_v4
+106/543678/campos_512_v4
+106/543681/campos_512_v4
+106/543710/campos_512_v4
+106/543720/campos_512_v4
+106/543761/campos_512_v4
+106/543793/campos_512_v4
+106/543850/campos_512_v4
+106/543851/campos_512_v4
+106/543889/campos_512_v4
+106/543901/campos_512_v4
+106/543908/campos_512_v4
+106/543933/campos_512_v4
+106/543955/campos_512_v4
+106/544025/campos_512_v4
+106/544042/campos_512_v4
+106/544047/campos_512_v4
+106/544079/campos_512_v4
+106/544167/campos_512_v4
+106/544179/campos_512_v4
+106/544222/campos_512_v4
+106/544351/campos_512_v4
+106/544361/campos_512_v4
+106/544383/campos_512_v4
+106/544397/campos_512_v4
+106/544399/campos_512_v4
+106/544402/campos_512_v4
+106/544421/campos_512_v4
+106/544439/campos_512_v4
+106/544446/campos_512_v4
+106/544462/campos_512_v4
+106/544503/campos_512_v4
+106/544513/campos_512_v4
+106/544538/campos_512_v4
+106/544580/campos_512_v4
+106/544582/campos_512_v4
+106/544590/campos_512_v4
+106/544605/campos_512_v4
+106/544616/campos_512_v4
+106/544621/campos_512_v4
+106/544655/campos_512_v4
+106/544685/campos_512_v4
+106/544692/campos_512_v4
+106/544705/campos_512_v4
+106/544719/campos_512_v4
+106/544725/campos_512_v4
+106/544768/campos_512_v4
+106/544778/campos_512_v4
+106/544781/campos_512_v4
+106/544963/campos_512_v4
+106/544965/campos_512_v4
+106/544982/campos_512_v4
+106/544992/campos_512_v4
+107/545091/campos_512_v4
+107/545123/campos_512_v4
+107/545164/campos_512_v4
+107/545222/campos_512_v4
+107/545235/campos_512_v4
+107/545245/campos_512_v4
+107/545359/campos_512_v4
+107/545362/campos_512_v4
+107/545364/campos_512_v4
+107/545367/campos_512_v4
+107/545384/campos_512_v4
+107/545401/campos_512_v4
+107/545434/campos_512_v4
+107/545438/campos_512_v4
+107/545475/campos_512_v4
+107/545481/campos_512_v4
+107/545501/campos_512_v4
+107/545509/campos_512_v4
+107/545550/campos_512_v4
+107/545556/campos_512_v4
+107/545588/campos_512_v4
+107/545609/campos_512_v4
+107/545616/campos_512_v4
+107/545633/campos_512_v4
+107/545636/campos_512_v4
+107/545658/campos_512_v4
+107/545671/campos_512_v4
+107/545685/campos_512_v4
+107/545686/campos_512_v4
+107/545712/campos_512_v4
+107/545733/campos_512_v4
+107/545806/campos_512_v4
+107/545844/campos_512_v4
+107/545846/campos_512_v4
+107/545862/campos_512_v4
+107/545879/campos_512_v4
+107/545936/campos_512_v4
+107/545991/campos_512_v4
+107/545998/campos_512_v4
+107/546012/campos_512_v4
+107/546044/campos_512_v4
+107/546062/campos_512_v4
+107/546067/campos_512_v4
+107/546100/campos_512_v4
+107/546101/campos_512_v4
+107/546124/campos_512_v4
+107/546128/campos_512_v4
+107/546190/campos_512_v4
+107/546203/campos_512_v4
+107/546206/campos_512_v4
+107/546232/campos_512_v4
+107/546236/campos_512_v4
+107/546261/campos_512_v4
+107/546263/campos_512_v4
+107/546271/campos_512_v4
+107/546282/campos_512_v4
+107/546383/campos_512_v4
+107/546384/campos_512_v4
+107/546410/campos_512_v4
+107/546416/campos_512_v4
+107/546417/campos_512_v4
+107/546436/campos_512_v4
+107/546445/campos_512_v4
+107/546457/campos_512_v4
+107/546550/campos_512_v4
+107/546607/campos_512_v4
+107/546629/campos_512_v4
+107/546645/campos_512_v4
+107/546693/campos_512_v4
+107/546776/campos_512_v4
+107/546847/campos_512_v4
+107/546884/campos_512_v4
+107/546892/campos_512_v4
+107/546909/campos_512_v4
+107/546924/campos_512_v4
+107/546927/campos_512_v4
+107/546941/campos_512_v4
+107/546966/campos_512_v4
+107/547007/campos_512_v4
+107/547014/campos_512_v4
+107/547029/campos_512_v4
+107/547055/campos_512_v4
+107/547123/campos_512_v4
+107/547131/campos_512_v4
+107/547161/campos_512_v4
+107/547171/campos_512_v4
+107/547176/campos_512_v4
+107/547241/campos_512_v4
+107/547319/campos_512_v4
+107/547333/campos_512_v4
+107/547358/campos_512_v4
+107/547365/campos_512_v4
+107/547368/campos_512_v4
+107/547406/campos_512_v4
+107/547407/campos_512_v4
+107/547475/campos_512_v4
+107/547479/campos_512_v4
+107/547483/campos_512_v4
+107/547496/campos_512_v4
+107/547558/campos_512_v4
+107/547610/campos_512_v4
+107/547632/campos_512_v4
+107/547662/campos_512_v4
+107/547821/campos_512_v4
+107/547840/campos_512_v4
+107/547892/campos_512_v4
+107/547919/campos_512_v4
+107/547929/campos_512_v4
+107/547945/campos_512_v4
+107/547955/campos_512_v4
+107/547982/campos_512_v4
+107/548006/campos_512_v4
+107/548050/campos_512_v4
+107/548125/campos_512_v4
+107/548205/campos_512_v4
+107/548239/campos_512_v4
+107/548241/campos_512_v4
+107/548244/campos_512_v4
+107/548273/campos_512_v4
+107/548369/campos_512_v4
+107/548371/campos_512_v4
+107/548392/campos_512_v4
+107/548398/campos_512_v4
+107/548435/campos_512_v4
+107/548448/campos_512_v4
+107/548513/campos_512_v4
+107/548518/campos_512_v4
+107/548538/campos_512_v4
+107/548574/campos_512_v4
+107/548583/campos_512_v4
+107/548587/campos_512_v4
+107/548644/campos_512_v4
+107/548648/campos_512_v4
+107/548653/campos_512_v4
+107/548713/campos_512_v4
+107/548724/campos_512_v4
+107/548729/campos_512_v4
+107/548739/campos_512_v4
+107/548762/campos_512_v4
+107/548799/campos_512_v4
+107/548802/campos_512_v4
+107/548982/campos_512_v4
+107/549083/campos_512_v4
+107/549111/campos_512_v4
+107/549169/campos_512_v4
+107/549170/campos_512_v4
+107/549178/campos_512_v4
+107/549187/campos_512_v4
+107/549238/campos_512_v4
+107/549244/campos_512_v4
+107/549271/campos_512_v4
+107/549274/campos_512_v4
+107/549291/campos_512_v4
+107/549336/campos_512_v4
+107/549350/campos_512_v4
+107/549375/campos_512_v4
+107/549422/campos_512_v4
+107/549435/campos_512_v4
+107/549520/campos_512_v4
+107/549709/campos_512_v4
+107/549732/campos_512_v4
+107/549783/campos_512_v4
+107/549789/campos_512_v4
+107/549814/campos_512_v4
+107/549841/campos_512_v4
+107/549847/campos_512_v4
+107/549887/campos_512_v4
+107/549910/campos_512_v4
+107/549922/campos_512_v4
+107/549931/campos_512_v4
+107/549937/campos_512_v4
+107/549955/campos_512_v4
+107/549964/campos_512_v4
+107/549974/campos_512_v4
+107/549979/campos_512_v4
+108/550007/campos_512_v4
+108/550008/campos_512_v4
+108/550016/campos_512_v4
+108/550018/campos_512_v4
+108/550051/campos_512_v4
+108/550070/campos_512_v4
+108/550081/campos_512_v4
+108/550100/campos_512_v4
+108/550192/campos_512_v4
+108/550204/campos_512_v4
+108/550222/campos_512_v4
+108/550313/campos_512_v4
+108/550349/campos_512_v4
+108/550371/campos_512_v4
+108/550431/campos_512_v4
+108/550454/campos_512_v4
+108/550464/campos_512_v4
+108/550469/campos_512_v4
+108/550534/campos_512_v4
+108/550618/campos_512_v4
+108/550712/campos_512_v4
+108/550734/campos_512_v4
+108/550748/campos_512_v4
+108/550768/campos_512_v4
+108/550786/campos_512_v4
+108/550798/campos_512_v4
+108/550819/campos_512_v4
+108/550897/campos_512_v4
+108/550906/campos_512_v4
+108/550919/campos_512_v4
+108/550921/campos_512_v4
+108/550924/campos_512_v4
+108/550933/campos_512_v4
+108/550950/campos_512_v4
+108/550951/campos_512_v4
+108/550956/campos_512_v4
+108/551026/campos_512_v4
+108/551032/campos_512_v4
+108/551047/campos_512_v4
+108/551097/campos_512_v4
+108/551136/campos_512_v4
+108/551142/campos_512_v4
+108/551169/campos_512_v4
+108/551239/campos_512_v4
+108/551253/campos_512_v4
+108/551264/campos_512_v4
+108/551276/campos_512_v4
+108/551302/campos_512_v4
+108/551311/campos_512_v4
+108/551366/campos_512_v4
+108/551419/campos_512_v4
+108/551431/campos_512_v4
+108/551529/campos_512_v4
+108/551551/campos_512_v4
+108/551552/campos_512_v4
+108/551557/campos_512_v4
+108/551573/campos_512_v4
+108/551608/campos_512_v4
+108/551609/campos_512_v4
+108/551849/campos_512_v4
+108/552028/campos_512_v4
+108/552067/campos_512_v4
+108/552118/campos_512_v4
+108/552131/campos_512_v4
+108/552140/campos_512_v4
+108/552163/campos_512_v4
+108/552171/campos_512_v4
+108/552174/campos_512_v4
+108/552188/campos_512_v4
+108/552205/campos_512_v4
+108/552214/campos_512_v4
+108/552284/campos_512_v4
+108/552291/campos_512_v4
+108/552301/campos_512_v4
+108/552311/campos_512_v4
+108/552366/campos_512_v4
+108/552396/campos_512_v4
+108/552397/campos_512_v4
+108/552406/campos_512_v4
+108/552418/campos_512_v4
+108/552497/campos_512_v4
+108/552499/campos_512_v4
+108/552509/campos_512_v4
+108/552521/campos_512_v4
+108/552579/campos_512_v4
+108/552662/campos_512_v4
+108/552666/campos_512_v4
+108/552681/campos_512_v4
+108/552699/campos_512_v4
+108/552727/campos_512_v4
+108/552754/campos_512_v4
+108/552756/campos_512_v4
+108/552783/campos_512_v4
+108/552802/campos_512_v4
+108/552921/campos_512_v4
+108/552949/campos_512_v4
+108/552963/campos_512_v4
+108/553035/campos_512_v4
+108/553043/campos_512_v4
+108/553054/campos_512_v4
+108/553096/campos_512_v4
+108/553107/campos_512_v4
+108/553142/campos_512_v4
+108/553226/campos_512_v4
+108/553235/campos_512_v4
+108/553254/campos_512_v4
+108/553256/campos_512_v4
+108/553272/campos_512_v4
+108/553369/campos_512_v4
+108/553373/campos_512_v4
+108/553400/campos_512_v4
+108/553483/campos_512_v4
+108/553504/campos_512_v4
+108/553539/campos_512_v4
+108/553615/campos_512_v4
+108/553627/campos_512_v4
+108/553654/campos_512_v4
+108/553708/campos_512_v4
+108/553724/campos_512_v4
+108/553827/campos_512_v4
+108/553867/campos_512_v4
+108/553870/campos_512_v4
+108/553871/campos_512_v4
+108/553898/campos_512_v4
+108/553919/campos_512_v4
+108/553936/campos_512_v4
+108/553956/campos_512_v4
+108/553978/campos_512_v4
+108/553988/campos_512_v4
+108/554003/campos_512_v4
+108/554031/campos_512_v4
+108/554061/campos_512_v4
+108/554063/campos_512_v4
+108/554082/campos_512_v4
+108/554144/campos_512_v4
+108/554172/campos_512_v4
+108/554204/campos_512_v4
+108/554217/campos_512_v4
+108/554230/campos_512_v4
+108/554283/campos_512_v4
+108/554308/campos_512_v4
+108/554311/campos_512_v4
+108/554319/campos_512_v4
+108/554359/campos_512_v4
+108/554367/campos_512_v4
+108/554370/campos_512_v4
+108/554386/campos_512_v4
+108/554405/campos_512_v4
+108/554412/campos_512_v4
+108/554414/campos_512_v4
+108/554425/campos_512_v4
+108/554443/campos_512_v4
+108/554449/campos_512_v4
+108/554461/campos_512_v4
+108/554492/campos_512_v4
+108/554528/campos_512_v4
+108/554543/campos_512_v4
+108/554547/campos_512_v4
+108/554559/campos_512_v4
+108/554563/campos_512_v4
+108/554604/campos_512_v4
+108/554625/campos_512_v4
+108/554633/campos_512_v4
+108/554676/campos_512_v4
+108/554698/campos_512_v4
+108/554723/campos_512_v4
+108/554731/campos_512_v4
+108/554866/campos_512_v4
+108/554935/campos_512_v4
+108/554974/campos_512_v4
+109/555004/campos_512_v4
+109/555015/campos_512_v4
+109/555078/campos_512_v4
+109/555137/campos_512_v4
+109/555194/campos_512_v4
+109/555196/campos_512_v4
+109/555224/campos_512_v4
+109/555243/campos_512_v4
+109/555272/campos_512_v4
+109/555289/campos_512_v4
+109/555338/campos_512_v4
+109/555343/campos_512_v4
+109/555415/campos_512_v4
+109/555433/campos_512_v4
+109/555451/campos_512_v4
+109/555500/campos_512_v4
+109/555562/campos_512_v4
+109/555564/campos_512_v4
+109/555594/campos_512_v4
+109/555665/campos_512_v4
+109/555706/campos_512_v4
+109/555711/campos_512_v4
+109/555744/campos_512_v4
+109/555787/campos_512_v4
+109/555797/campos_512_v4
+109/555799/campos_512_v4
+109/555808/campos_512_v4
+109/555812/campos_512_v4
+109/555817/campos_512_v4
+109/555822/campos_512_v4
+109/555903/campos_512_v4
+109/555925/campos_512_v4
+109/555967/campos_512_v4
+109/555973/campos_512_v4
+109/555977/campos_512_v4
+109/555981/campos_512_v4
+109/555992/campos_512_v4
+109/556011/campos_512_v4
+109/556055/campos_512_v4
+109/556101/campos_512_v4
+109/556111/campos_512_v4
+109/556114/campos_512_v4
+109/556116/campos_512_v4
+109/556129/campos_512_v4
+109/556132/campos_512_v4
+109/556251/campos_512_v4
+109/556324/campos_512_v4
+109/556325/campos_512_v4
+109/556370/campos_512_v4
+109/556371/campos_512_v4
+109/556384/campos_512_v4
+109/556425/campos_512_v4
+109/556432/campos_512_v4
+109/556468/campos_512_v4
+109/556481/campos_512_v4
+109/556519/campos_512_v4
+109/556532/campos_512_v4
+109/556560/campos_512_v4
+109/556583/campos_512_v4
+109/556611/campos_512_v4
+109/556622/campos_512_v4
+109/556633/campos_512_v4
+109/556643/campos_512_v4
+109/556680/campos_512_v4
+109/556687/campos_512_v4
+109/556688/campos_512_v4
+109/556718/campos_512_v4
+109/556758/campos_512_v4
+109/556788/campos_512_v4
+109/556789/campos_512_v4
+109/556798/campos_512_v4
+109/556799/campos_512_v4
+109/556902/campos_512_v4
+109/556913/campos_512_v4
+109/556921/campos_512_v4
+109/557053/campos_512_v4
+109/557099/campos_512_v4
+109/557107/campos_512_v4
+109/557118/campos_512_v4
+109/557122/campos_512_v4
+109/557159/campos_512_v4
+109/557161/campos_512_v4
+109/557188/campos_512_v4
+109/557221/campos_512_v4
+109/557295/campos_512_v4
+109/557301/campos_512_v4
+109/557309/campos_512_v4
+109/557313/campos_512_v4
+109/557319/campos_512_v4
+109/557381/campos_512_v4
+109/557416/campos_512_v4
+109/557423/campos_512_v4
+109/557448/campos_512_v4
+109/557449/campos_512_v4
+109/557456/campos_512_v4
+109/557462/campos_512_v4
+109/557475/campos_512_v4
+109/557543/campos_512_v4
+109/557555/campos_512_v4
+109/557583/campos_512_v4
+109/557597/campos_512_v4
+109/557621/campos_512_v4
+109/557633/campos_512_v4
+109/557712/campos_512_v4
+109/557748/campos_512_v4
+109/557757/campos_512_v4
+109/557867/campos_512_v4
+109/557878/campos_512_v4
+109/557883/campos_512_v4
+109/557909/campos_512_v4
+109/557914/campos_512_v4
+109/557927/campos_512_v4
+109/557980/campos_512_v4
+109/557986/campos_512_v4
+109/558041/campos_512_v4
+109/558060/campos_512_v4
+109/558064/campos_512_v4
+109/558087/campos_512_v4
+109/558101/campos_512_v4
+109/558104/campos_512_v4
+109/558122/campos_512_v4
+109/558124/campos_512_v4
+109/558128/campos_512_v4
+109/558133/campos_512_v4
+109/558163/campos_512_v4
+109/558213/campos_512_v4
+109/558239/campos_512_v4
+109/558262/campos_512_v4
+109/558264/campos_512_v4
+109/558320/campos_512_v4
+109/558322/campos_512_v4
+109/558334/campos_512_v4
+109/558335/campos_512_v4
+109/558365/campos_512_v4
+109/558369/campos_512_v4
+109/558385/campos_512_v4
+109/558441/campos_512_v4
+109/558504/campos_512_v4
+109/558517/campos_512_v4
+109/558576/campos_512_v4
+109/558611/campos_512_v4
+109/558615/campos_512_v4
+109/558616/campos_512_v4
+109/558619/campos_512_v4
+109/558643/campos_512_v4
+109/558648/campos_512_v4
+109/558665/campos_512_v4
+109/558771/campos_512_v4
+109/558805/campos_512_v4
+109/558868/campos_512_v4
+109/558895/campos_512_v4
+109/558946/campos_512_v4
+109/558977/campos_512_v4
+109/558986/campos_512_v4
+109/558991/campos_512_v4
+109/559044/campos_512_v4
+109/559084/campos_512_v4
+109/559107/campos_512_v4
+109/559114/campos_512_v4
+109/559159/campos_512_v4
+109/559170/campos_512_v4
+109/559234/campos_512_v4
+109/559244/campos_512_v4
+109/559256/campos_512_v4
+109/559327/campos_512_v4
+109/559383/campos_512_v4
+109/559470/campos_512_v4
+109/559517/campos_512_v4
+109/559528/campos_512_v4
+109/559577/campos_512_v4
+109/559597/campos_512_v4
+109/559658/campos_512_v4
+109/559709/campos_512_v4
+109/559739/campos_512_v4
+109/559744/campos_512_v4
+109/559747/campos_512_v4
+109/559749/campos_512_v4
+109/559751/campos_512_v4
+109/559771/campos_512_v4
+109/559802/campos_512_v4
+109/559824/campos_512_v4
+109/559872/campos_512_v4
+109/559892/campos_512_v4
+109/559897/campos_512_v4
+109/559954/campos_512_v4
+11/65013/campos_512_v4
+11/65023/campos_512_v4
+11/65052/campos_512_v4
+11/65064/campos_512_v4
+11/65072/campos_512_v4
+11/65074/campos_512_v4
+11/65100/campos_512_v4
+11/65116/campos_512_v4
+11/65187/campos_512_v4
+11/65305/campos_512_v4
+11/65325/campos_512_v4
+11/65344/campos_512_v4
+11/65345/campos_512_v4
+11/65367/campos_512_v4
+11/65375/campos_512_v4
+11/65384/campos_512_v4
+11/65427/campos_512_v4
+11/65535/campos_512_v4
+11/65562/campos_512_v4
+11/65630/campos_512_v4
+11/65644/campos_512_v4
+11/65653/campos_512_v4
+11/65658/campos_512_v4
+11/65690/campos_512_v4
+11/65742/campos_512_v4
+11/65746/campos_512_v4
+11/65766/campos_512_v4
+11/65812/campos_512_v4
+11/65848/campos_512_v4
+11/65855/campos_512_v4
+11/65871/campos_512_v4
+11/65891/campos_512_v4
+11/65944/campos_512_v4
+11/65984/campos_512_v4
+11/65991/campos_512_v4
+11/66015/campos_512_v4
+11/66059/campos_512_v4
+11/66075/campos_512_v4
+11/66158/campos_512_v4
+11/66173/campos_512_v4
+11/66217/campos_512_v4
+11/66344/campos_512_v4
+11/66347/campos_512_v4
+11/66362/campos_512_v4
+11/66387/campos_512_v4
+11/66394/campos_512_v4
+11/66398/campos_512_v4
+11/66447/campos_512_v4
+11/66451/campos_512_v4
+11/66453/campos_512_v4
+11/66513/campos_512_v4
+11/66522/campos_512_v4
+11/66523/campos_512_v4
+11/66539/campos_512_v4
+11/66616/campos_512_v4
+11/66647/campos_512_v4
+11/66678/campos_512_v4
+11/66690/campos_512_v4
+11/66705/campos_512_v4
+11/66718/campos_512_v4
+11/66747/campos_512_v4
+11/66756/campos_512_v4
+11/66758/campos_512_v4
+11/66764/campos_512_v4
+11/66792/campos_512_v4
+11/66817/campos_512_v4
+11/66826/campos_512_v4
+11/66834/campos_512_v4
+11/66844/campos_512_v4
+11/66868/campos_512_v4
+11/66888/campos_512_v4
+11/66911/campos_512_v4
+11/66923/campos_512_v4
+11/66936/campos_512_v4
+11/66954/campos_512_v4
+11/66963/campos_512_v4
+11/66979/campos_512_v4
+11/67142/campos_512_v4
+11/67176/campos_512_v4
+11/67188/campos_512_v4
+11/67205/campos_512_v4
+11/67268/campos_512_v4
+11/67296/campos_512_v4
+11/67301/campos_512_v4
+11/67323/campos_512_v4
+11/67330/campos_512_v4
+11/67408/campos_512_v4
+11/67439/campos_512_v4
+11/67444/campos_512_v4
+11/67462/campos_512_v4
+11/67470/campos_512_v4
+11/67471/campos_512_v4
+11/67472/campos_512_v4
+11/67516/campos_512_v4
+11/67522/campos_512_v4
+11/67533/campos_512_v4
+11/67548/campos_512_v4
+11/67590/campos_512_v4
+11/67605/campos_512_v4
+11/67643/campos_512_v4
+11/67676/campos_512_v4
+11/67719/campos_512_v4
+11/67752/campos_512_v4
+11/67792/campos_512_v4
+11/67807/campos_512_v4
+11/67809/campos_512_v4
+11/67818/campos_512_v4
+11/67825/campos_512_v4
+11/67839/campos_512_v4
+11/67883/campos_512_v4
+11/67887/campos_512_v4
+11/67912/campos_512_v4
+11/67916/campos_512_v4
+11/67940/campos_512_v4
+11/67947/campos_512_v4
+11/67952/campos_512_v4
+11/67961/campos_512_v4
+11/67971/campos_512_v4
+11/67977/campos_512_v4
+11/68009/campos_512_v4
+11/68015/campos_512_v4
+11/68095/campos_512_v4
+11/68121/campos_512_v4
+11/68162/campos_512_v4
+11/68176/campos_512_v4
+11/68179/campos_512_v4
+11/68193/campos_512_v4
+11/68258/campos_512_v4
+11/68333/campos_512_v4
+11/68399/campos_512_v4
+11/68414/campos_512_v4
+11/68438/campos_512_v4
+11/68441/campos_512_v4
+11/68446/campos_512_v4
+11/68456/campos_512_v4
+11/68481/campos_512_v4
+11/68482/campos_512_v4
+11/68526/campos_512_v4
+11/68568/campos_512_v4
+11/68626/campos_512_v4
+11/68640/campos_512_v4
+11/68644/campos_512_v4
+11/68647/campos_512_v4
+11/68648/campos_512_v4
+11/68660/campos_512_v4
+11/68666/campos_512_v4
+11/68679/campos_512_v4
+11/68718/campos_512_v4
+11/68729/campos_512_v4
+11/68750/campos_512_v4
+11/68784/campos_512_v4
+11/68816/campos_512_v4
+11/68817/campos_512_v4
+11/68846/campos_512_v4
+11/68866/campos_512_v4
+11/68889/campos_512_v4
+11/69070/campos_512_v4
+11/69086/campos_512_v4
+11/69101/campos_512_v4
+11/69163/campos_512_v4
+11/69191/campos_512_v4
+11/69225/campos_512_v4
+11/69261/campos_512_v4
+11/69349/campos_512_v4
+11/69355/campos_512_v4
+11/69372/campos_512_v4
+11/69373/campos_512_v4
+11/69375/campos_512_v4
+11/69406/campos_512_v4
+11/69520/campos_512_v4
+11/69524/campos_512_v4
+11/69534/campos_512_v4
+11/69553/campos_512_v4
+11/69557/campos_512_v4
+11/69581/campos_512_v4
+11/69585/campos_512_v4
+11/69603/campos_512_v4
+11/69619/campos_512_v4
+11/69626/campos_512_v4
+11/69644/campos_512_v4
+11/69657/campos_512_v4
+11/69660/campos_512_v4
+11/69696/campos_512_v4
+11/69706/campos_512_v4
+11/69716/campos_512_v4
+11/69733/campos_512_v4
+11/69735/campos_512_v4
+11/69737/campos_512_v4
+11/69748/campos_512_v4
+11/69758/campos_512_v4
+11/69800/campos_512_v4
+11/69897/campos_512_v4
+11/69898/campos_512_v4
+11/69972/campos_512_v4
+110/560016/campos_512_v4
+110/560101/campos_512_v4
+110/560108/campos_512_v4
+110/560134/campos_512_v4
+110/560136/campos_512_v4
+110/560156/campos_512_v4
+110/560161/campos_512_v4
+110/560175/campos_512_v4
+110/560251/campos_512_v4
+110/560328/campos_512_v4
+110/560352/campos_512_v4
+110/560453/campos_512_v4
+110/560476/campos_512_v4
+110/560550/campos_512_v4
+110/560568/campos_512_v4
+110/560584/campos_512_v4
+110/560612/campos_512_v4
+110/560623/campos_512_v4
+110/560636/campos_512_v4
+110/560656/campos_512_v4
+110/560657/campos_512_v4
+110/560664/campos_512_v4
+110/560695/campos_512_v4
+110/560745/campos_512_v4
+110/560750/campos_512_v4
+110/560754/campos_512_v4
+110/560791/campos_512_v4
+110/560827/campos_512_v4
+110/560892/campos_512_v4
+110/561056/campos_512_v4
+110/561074/campos_512_v4
+110/561093/campos_512_v4
+110/561112/campos_512_v4
+110/561119/campos_512_v4
+110/561129/campos_512_v4
+110/561165/campos_512_v4
+110/561182/campos_512_v4
+110/561194/campos_512_v4
+110/561217/campos_512_v4
+110/561237/campos_512_v4
+110/561269/campos_512_v4
+110/561286/campos_512_v4
+110/561290/campos_512_v4
+110/561302/campos_512_v4
+110/561309/campos_512_v4
+110/561326/campos_512_v4
+110/561360/campos_512_v4
+110/561401/campos_512_v4
+110/561487/campos_512_v4
+110/561493/campos_512_v4
+110/561516/campos_512_v4
+110/561535/campos_512_v4
+110/561550/campos_512_v4
+110/561562/campos_512_v4
+110/561595/campos_512_v4
+110/561621/campos_512_v4
+110/561625/campos_512_v4
+110/561661/campos_512_v4
+110/561687/campos_512_v4
+110/561692/campos_512_v4
+110/561730/campos_512_v4
+110/561731/campos_512_v4
+110/561764/campos_512_v4
+110/561778/campos_512_v4
+110/561796/campos_512_v4
+110/561829/campos_512_v4
+110/561941/campos_512_v4
+110/561972/campos_512_v4
+110/562021/campos_512_v4
+110/562040/campos_512_v4
+110/562042/campos_512_v4
+110/562043/campos_512_v4
+110/562095/campos_512_v4
+110/562129/campos_512_v4
+110/562181/campos_512_v4
+110/562210/campos_512_v4
+110/562214/campos_512_v4
+110/562265/campos_512_v4
+110/562295/campos_512_v4
+110/562311/campos_512_v4
+110/562345/campos_512_v4
+110/562417/campos_512_v4
+110/562564/campos_512_v4
+110/562573/campos_512_v4
+110/562581/campos_512_v4
+110/562619/campos_512_v4
+110/562686/campos_512_v4
+110/562695/campos_512_v4
+110/562731/campos_512_v4
+110/562793/campos_512_v4
+110/562811/campos_512_v4
+110/562830/campos_512_v4
+110/562859/campos_512_v4
+110/562869/campos_512_v4
+110/562890/campos_512_v4
+110/562910/campos_512_v4
+110/562940/campos_512_v4
+110/562954/campos_512_v4
+110/562962/campos_512_v4
+110/562963/campos_512_v4
+110/562971/campos_512_v4
+110/562978/campos_512_v4
+110/563016/campos_512_v4
+110/563020/campos_512_v4
+110/563024/campos_512_v4
+110/563035/campos_512_v4
+110/563042/campos_512_v4
+110/563052/campos_512_v4
+110/563089/campos_512_v4
+110/563098/campos_512_v4
+110/563104/campos_512_v4
+110/563133/campos_512_v4
+110/563138/campos_512_v4
+110/563180/campos_512_v4
+110/563217/campos_512_v4
+110/563219/campos_512_v4
+110/563273/campos_512_v4
+110/563280/campos_512_v4
+110/563319/campos_512_v4
+110/563372/campos_512_v4
+110/563406/campos_512_v4
+110/563498/campos_512_v4
+110/563509/campos_512_v4
+110/563532/campos_512_v4
+110/563547/campos_512_v4
+110/563557/campos_512_v4
+110/563563/campos_512_v4
+110/563584/campos_512_v4
+110/563592/campos_512_v4
+110/563615/campos_512_v4
+110/563716/campos_512_v4
+110/563756/campos_512_v4
+110/563827/campos_512_v4
+110/563842/campos_512_v4
+110/563930/campos_512_v4
+110/563933/campos_512_v4
+110/564026/campos_512_v4
+110/564045/campos_512_v4
+110/564072/campos_512_v4
+110/564133/campos_512_v4
+110/564153/campos_512_v4
+110/564192/campos_512_v4
+110/564195/campos_512_v4
+110/564203/campos_512_v4
+110/564233/campos_512_v4
+110/564265/campos_512_v4
+110/564282/campos_512_v4
+110/564327/campos_512_v4
+110/564414/campos_512_v4
+110/564471/campos_512_v4
+110/564494/campos_512_v4
+110/564497/campos_512_v4
+110/564512/campos_512_v4
+110/564523/campos_512_v4
+110/564541/campos_512_v4
+110/564545/campos_512_v4
+110/564587/campos_512_v4
+110/564637/campos_512_v4
+110/564640/campos_512_v4
+110/564645/campos_512_v4
+110/564669/campos_512_v4
+110/564723/campos_512_v4
+110/564919/campos_512_v4
+111/565008/campos_512_v4
+111/565022/campos_512_v4
+111/565069/campos_512_v4
+111/565078/campos_512_v4
+111/565098/campos_512_v4
+111/565108/campos_512_v4
+111/565117/campos_512_v4
+111/565140/campos_512_v4
+111/565225/campos_512_v4
+111/565277/campos_512_v4
+111/565304/campos_512_v4
+111/565305/campos_512_v4
+111/565315/campos_512_v4
+111/565347/campos_512_v4
+111/565385/campos_512_v4
+111/565396/campos_512_v4
+111/565426/campos_512_v4
+111/565454/campos_512_v4
+111/565463/campos_512_v4
+111/565467/campos_512_v4
+111/565477/campos_512_v4
+111/565598/campos_512_v4
+111/565601/campos_512_v4
+111/565605/campos_512_v4
+111/565755/campos_512_v4
+111/565764/campos_512_v4
+111/565806/campos_512_v4
+111/565809/campos_512_v4
+111/565946/campos_512_v4
+111/566012/campos_512_v4
+111/566022/campos_512_v4
+111/566042/campos_512_v4
+111/566129/campos_512_v4
+111/566144/campos_512_v4
+111/566154/campos_512_v4
+111/566193/campos_512_v4
+111/566252/campos_512_v4
+111/566263/campos_512_v4
+111/566279/campos_512_v4
+111/566316/campos_512_v4
+111/566317/campos_512_v4
+111/566319/campos_512_v4
+111/566378/campos_512_v4
+111/566477/campos_512_v4
+111/566506/campos_512_v4
+111/566530/campos_512_v4
+111/566560/campos_512_v4
+111/566606/campos_512_v4
+111/566608/campos_512_v4
+111/566609/campos_512_v4
+111/566614/campos_512_v4
+111/566653/campos_512_v4
+111/566689/campos_512_v4
+111/566693/campos_512_v4
+111/566742/campos_512_v4
+111/566776/campos_512_v4
+111/566817/campos_512_v4
+111/566844/campos_512_v4
+111/566891/campos_512_v4
+111/566892/campos_512_v4
+111/566894/campos_512_v4
+111/566900/campos_512_v4
+111/566927/campos_512_v4
+111/567004/campos_512_v4
+111/567030/campos_512_v4
+111/567034/campos_512_v4
+111/567056/campos_512_v4
+111/567057/campos_512_v4
+111/567061/campos_512_v4
+111/567087/campos_512_v4
+111/567108/campos_512_v4
+111/567109/campos_512_v4
+111/567198/campos_512_v4
+111/567206/campos_512_v4
+111/567214/campos_512_v4
+111/567224/campos_512_v4
+111/567252/campos_512_v4
+111/567265/campos_512_v4
+111/567270/campos_512_v4
+111/567274/campos_512_v4
+111/567340/campos_512_v4
+111/567378/campos_512_v4
+111/567395/campos_512_v4
+111/567461/campos_512_v4
+111/567484/campos_512_v4
+111/567613/campos_512_v4
+111/567647/campos_512_v4
+111/567661/campos_512_v4
+111/567705/campos_512_v4
+111/567823/campos_512_v4
+111/567831/campos_512_v4
+111/567832/campos_512_v4
+111/567897/campos_512_v4
+111/567901/campos_512_v4
+111/568000/campos_512_v4
+111/568004/campos_512_v4
+111/568007/campos_512_v4
+111/568121/campos_512_v4
+111/568176/campos_512_v4
+111/568177/campos_512_v4
+111/568191/campos_512_v4
+111/568197/campos_512_v4
+111/568310/campos_512_v4
+111/568329/campos_512_v4
+111/568335/campos_512_v4
+111/568351/campos_512_v4
+111/568359/campos_512_v4
+111/568466/campos_512_v4
+111/568477/campos_512_v4
+111/568489/campos_512_v4
+111/568512/campos_512_v4
+111/568529/campos_512_v4
+111/568577/campos_512_v4
+111/568590/campos_512_v4
+111/568591/campos_512_v4
+111/568605/campos_512_v4
+111/568609/campos_512_v4
+111/568636/campos_512_v4
+111/568751/campos_512_v4
+111/568775/campos_512_v4
+111/568848/campos_512_v4
+111/568864/campos_512_v4
+111/568902/campos_512_v4
+111/568929/campos_512_v4
+111/568950/campos_512_v4
+111/568953/campos_512_v4
+111/568982/campos_512_v4
+111/568988/campos_512_v4
+111/568994/campos_512_v4
+111/569057/campos_512_v4
+111/569062/campos_512_v4
+111/569118/campos_512_v4
+111/569137/campos_512_v4
+111/569154/campos_512_v4
+111/569193/campos_512_v4
+111/569238/campos_512_v4
+111/569259/campos_512_v4
+111/569294/campos_512_v4
+111/569308/campos_512_v4
+111/569367/campos_512_v4
+111/569370/campos_512_v4
+111/569475/campos_512_v4
+111/569488/campos_512_v4
+111/569492/campos_512_v4
+111/569527/campos_512_v4
+111/569692/campos_512_v4
+111/569702/campos_512_v4
+111/569724/campos_512_v4
+111/569771/campos_512_v4
+111/569774/campos_512_v4
+111/569814/campos_512_v4
+111/569898/campos_512_v4
+111/569903/campos_512_v4
+111/569942/campos_512_v4
+111/569959/campos_512_v4
+111/570001/campos_512_v4
+112/570053/campos_512_v4
+112/570148/campos_512_v4
+112/570180/campos_512_v4
+112/570213/campos_512_v4
+112/570230/campos_512_v4
+112/570234/campos_512_v4
+112/570244/campos_512_v4
+112/570531/campos_512_v4
+112/570565/campos_512_v4
+112/570632/campos_512_v4
+112/570651/campos_512_v4
+112/570827/campos_512_v4
+112/570829/campos_512_v4
+112/570868/campos_512_v4
+112/570893/campos_512_v4
+112/570937/campos_512_v4
+112/571012/campos_512_v4
+112/571025/campos_512_v4
+112/571028/campos_512_v4
+112/571079/campos_512_v4
+112/571087/campos_512_v4
+112/571131/campos_512_v4
+112/571162/campos_512_v4
+112/571188/campos_512_v4
+112/571199/campos_512_v4
+112/571226/campos_512_v4
+112/571260/campos_512_v4
+112/571292/campos_512_v4
+112/571414/campos_512_v4
+112/571488/campos_512_v4
+112/571510/campos_512_v4
+112/571599/campos_512_v4
+112/571613/campos_512_v4
+112/571659/campos_512_v4
+112/571681/campos_512_v4
+112/571732/campos_512_v4
+112/571920/campos_512_v4
+112/571992/campos_512_v4
+112/572006/campos_512_v4
+112/572161/campos_512_v4
+112/572182/campos_512_v4
+112/572209/campos_512_v4
+112/572242/campos_512_v4
+112/572365/campos_512_v4
+112/572404/campos_512_v4
+112/572493/campos_512_v4
+112/572500/campos_512_v4
+112/572534/campos_512_v4
+112/572576/campos_512_v4
+112/572637/campos_512_v4
+112/572638/campos_512_v4
+112/572694/campos_512_v4
+112/572708/campos_512_v4
+112/572796/campos_512_v4
+112/572851/campos_512_v4
+112/572856/campos_512_v4
+112/572902/campos_512_v4
+112/572910/campos_512_v4
+112/572931/campos_512_v4
+112/572949/campos_512_v4
+112/572951/campos_512_v4
+112/572954/campos_512_v4
+112/572994/campos_512_v4
+112/573001/campos_512_v4
+112/573056/campos_512_v4
+112/573063/campos_512_v4
+112/573096/campos_512_v4
+112/573110/campos_512_v4
+112/573131/campos_512_v4
+112/573156/campos_512_v4
+112/573198/campos_512_v4
+112/573230/campos_512_v4
+112/573254/campos_512_v4
+112/573274/campos_512_v4
+112/573278/campos_512_v4
+112/573283/campos_512_v4
+112/573285/campos_512_v4
+112/573349/campos_512_v4
+112/573351/campos_512_v4
+112/573357/campos_512_v4
+112/573364/campos_512_v4
+112/573372/campos_512_v4
+112/573374/campos_512_v4
+112/573382/campos_512_v4
+112/573405/campos_512_v4
+112/573438/campos_512_v4
+112/573454/campos_512_v4
+112/573483/campos_512_v4
+112/573492/campos_512_v4
+112/573505/campos_512_v4
+112/573534/campos_512_v4
+112/573548/campos_512_v4
+112/573581/campos_512_v4
+112/573586/campos_512_v4
+112/573589/campos_512_v4
+112/573593/campos_512_v4
+112/573616/campos_512_v4
+112/573617/campos_512_v4
+112/573661/campos_512_v4
+112/573662/campos_512_v4
+112/573720/campos_512_v4
+112/573760/campos_512_v4
+112/573766/campos_512_v4
+112/573770/campos_512_v4
+112/573833/campos_512_v4
+112/573908/campos_512_v4
+112/573909/campos_512_v4
+112/573920/campos_512_v4
+112/573938/campos_512_v4
+112/573990/campos_512_v4
+112/574008/campos_512_v4
+112/574074/campos_512_v4
+112/574077/campos_512_v4
+112/574082/campos_512_v4
+112/574084/campos_512_v4
+112/574101/campos_512_v4
+112/574137/campos_512_v4
+112/574167/campos_512_v4
+112/574181/campos_512_v4
+112/574187/campos_512_v4
+112/574208/campos_512_v4
+112/574230/campos_512_v4
+112/574236/campos_512_v4
+112/574238/campos_512_v4
+112/574244/campos_512_v4
+112/574263/campos_512_v4
+112/574267/campos_512_v4
+112/574286/campos_512_v4
+112/574330/campos_512_v4
+112/574346/campos_512_v4
+112/574369/campos_512_v4
+112/574409/campos_512_v4
+112/574453/campos_512_v4
+112/574460/campos_512_v4
+112/574504/campos_512_v4
+112/574529/campos_512_v4
+112/574543/campos_512_v4
+112/574564/campos_512_v4
+112/574578/campos_512_v4
+112/574618/campos_512_v4
+112/574656/campos_512_v4
+112/574698/campos_512_v4
+112/574729/campos_512_v4
+112/574741/campos_512_v4
+112/574780/campos_512_v4
+112/574826/campos_512_v4
+112/574856/campos_512_v4
+112/574874/campos_512_v4
+112/574905/campos_512_v4
+113/575004/campos_512_v4
+113/575012/campos_512_v4
+113/575013/campos_512_v4
+113/575100/campos_512_v4
+113/575134/campos_512_v4
+113/575166/campos_512_v4
+113/575170/campos_512_v4
+113/575193/campos_512_v4
+113/575199/campos_512_v4
+113/575200/campos_512_v4
+113/575202/campos_512_v4
+113/575224/campos_512_v4
+113/575254/campos_512_v4
+113/575259/campos_512_v4
+113/575292/campos_512_v4
+113/575362/campos_512_v4
+113/575371/campos_512_v4
+113/575383/campos_512_v4
+113/575385/campos_512_v4
+113/575412/campos_512_v4
+113/575433/campos_512_v4
+113/575438/campos_512_v4
+113/575525/campos_512_v4
+113/575544/campos_512_v4
+113/575552/campos_512_v4
+113/575559/campos_512_v4
+113/575562/campos_512_v4
+113/575574/campos_512_v4
+113/575585/campos_512_v4
+113/575624/campos_512_v4
+113/575684/campos_512_v4
+113/575709/campos_512_v4
+113/575725/campos_512_v4
+113/575778/campos_512_v4
+113/575786/campos_512_v4
+113/575801/campos_512_v4
+113/575823/campos_512_v4
+113/575914/campos_512_v4
+113/575925/campos_512_v4
+113/575929/campos_512_v4
+113/575939/campos_512_v4
+113/575964/campos_512_v4
+113/575971/campos_512_v4
+113/575999/campos_512_v4
+113/576012/campos_512_v4
+113/576020/campos_512_v4
+113/576028/campos_512_v4
+113/576033/campos_512_v4
+113/576100/campos_512_v4
+113/576108/campos_512_v4
+113/576114/campos_512_v4
+113/576117/campos_512_v4
+113/576149/campos_512_v4
+113/576170/campos_512_v4
+113/576295/campos_512_v4
+113/576305/campos_512_v4
+113/576333/campos_512_v4
+113/576351/campos_512_v4
+113/576404/campos_512_v4
+113/576406/campos_512_v4
+113/576455/campos_512_v4
+113/576472/campos_512_v4
+113/576490/campos_512_v4
+113/576506/campos_512_v4
+113/576520/campos_512_v4
+113/576599/campos_512_v4
+113/576681/campos_512_v4
+113/576725/campos_512_v4
+113/576752/campos_512_v4
+113/576773/campos_512_v4
+113/576782/campos_512_v4
+113/576808/campos_512_v4
+113/576865/campos_512_v4
+113/576948/campos_512_v4
+113/576960/campos_512_v4
+113/576963/campos_512_v4
+113/577063/campos_512_v4
+113/577096/campos_512_v4
+113/577119/campos_512_v4
+113/577128/campos_512_v4
+113/577148/campos_512_v4
+113/577174/campos_512_v4
+113/577224/campos_512_v4
+113/577272/campos_512_v4
+113/577302/campos_512_v4
+113/577390/campos_512_v4
+113/577494/campos_512_v4
+113/577508/campos_512_v4
+113/577517/campos_512_v4
+113/577554/campos_512_v4
+113/577607/campos_512_v4
+113/577614/campos_512_v4
+113/577623/campos_512_v4
+113/577704/campos_512_v4
+113/577720/campos_512_v4
+113/577741/campos_512_v4
+113/577746/campos_512_v4
+113/577787/campos_512_v4
+113/577791/campos_512_v4
+113/577836/campos_512_v4
+113/577845/campos_512_v4
+113/577862/campos_512_v4
+113/577881/campos_512_v4
+113/577888/campos_512_v4
+113/577953/campos_512_v4
+113/577974/campos_512_v4
+113/578019/campos_512_v4
+113/578058/campos_512_v4
+113/578103/campos_512_v4
+113/578120/campos_512_v4
+113/578126/campos_512_v4
+113/578130/campos_512_v4
+113/578235/campos_512_v4
+113/578260/campos_512_v4
+113/578266/campos_512_v4
+113/578364/campos_512_v4
+113/578417/campos_512_v4
+113/578419/campos_512_v4
+113/578457/campos_512_v4
+113/578558/campos_512_v4
+113/578562/campos_512_v4
+113/578574/campos_512_v4
+113/578648/campos_512_v4
+113/578667/campos_512_v4
+113/578676/campos_512_v4
+113/578689/campos_512_v4
+113/578754/campos_512_v4
+113/578775/campos_512_v4
+113/578794/campos_512_v4
+113/578835/campos_512_v4
+113/578863/campos_512_v4
+113/578957/campos_512_v4
+113/578985/campos_512_v4
+113/578996/campos_512_v4
+113/578997/campos_512_v4
+113/579016/campos_512_v4
+113/579067/campos_512_v4
+113/579085/campos_512_v4
+113/579128/campos_512_v4
+113/579297/campos_512_v4
+113/579312/campos_512_v4
+113/579359/campos_512_v4
+113/579387/campos_512_v4
+113/579396/campos_512_v4
+113/579405/campos_512_v4
+113/579409/campos_512_v4
+113/579430/campos_512_v4
+113/579443/campos_512_v4
+113/579447/campos_512_v4
+113/579466/campos_512_v4
+113/579474/campos_512_v4
+113/579522/campos_512_v4
+113/579555/campos_512_v4
+113/579556/campos_512_v4
+113/579610/campos_512_v4
+113/579621/campos_512_v4
+113/579622/campos_512_v4
+113/579630/campos_512_v4
+113/579653/campos_512_v4
+113/579665/campos_512_v4
+113/579676/campos_512_v4
+113/579763/campos_512_v4
+113/579785/campos_512_v4
+113/579788/campos_512_v4
+113/579813/campos_512_v4
+113/579830/campos_512_v4
+113/579853/campos_512_v4
+113/579874/campos_512_v4
+113/579897/campos_512_v4
+113/579943/campos_512_v4
+113/579968/campos_512_v4
+113/579985/campos_512_v4
+113/579995/campos_512_v4
+114/580049/campos_512_v4
+114/580104/campos_512_v4
+114/580136/campos_512_v4
+114/580171/campos_512_v4
+114/580200/campos_512_v4
+114/580202/campos_512_v4
+114/580227/campos_512_v4
+114/580229/campos_512_v4
+114/580248/campos_512_v4
+114/580277/campos_512_v4
+114/580287/campos_512_v4
+114/580293/campos_512_v4
+114/580339/campos_512_v4
+114/580362/campos_512_v4
+114/580375/campos_512_v4
+114/580406/campos_512_v4
+114/580425/campos_512_v4
+114/580465/campos_512_v4
+114/580492/campos_512_v4
+114/580496/campos_512_v4
+114/580501/campos_512_v4
+114/580508/campos_512_v4
+114/580531/campos_512_v4
+114/580619/campos_512_v4
+114/580635/campos_512_v4
+114/580642/campos_512_v4
+114/580696/campos_512_v4
+114/580698/campos_512_v4
+114/580730/campos_512_v4
+114/580749/campos_512_v4
+114/580865/campos_512_v4
+114/580872/campos_512_v4
+114/580883/campos_512_v4
+114/580889/campos_512_v4
+114/580923/campos_512_v4
+114/580971/campos_512_v4
+114/581002/campos_512_v4
+114/581025/campos_512_v4
+114/581034/campos_512_v4
+114/581046/campos_512_v4
+114/581064/campos_512_v4
+114/581105/campos_512_v4
+114/581116/campos_512_v4
+114/581174/campos_512_v4
+114/581191/campos_512_v4
+114/581203/campos_512_v4
+114/581247/campos_512_v4
+114/581257/campos_512_v4
+114/581282/campos_512_v4
+114/581310/campos_512_v4
+114/581476/campos_512_v4
+114/581492/campos_512_v4
+114/581513/campos_512_v4
+114/581611/campos_512_v4
+114/581640/campos_512_v4
+114/581661/campos_512_v4
+114/581668/campos_512_v4
+114/581691/campos_512_v4
+114/581693/campos_512_v4
+114/581722/campos_512_v4
+114/581735/campos_512_v4
+114/581749/campos_512_v4
+114/581765/campos_512_v4
+114/581770/campos_512_v4
+114/581783/campos_512_v4
+114/581788/campos_512_v4
+114/581804/campos_512_v4
+114/581858/campos_512_v4
+114/581869/campos_512_v4
+114/581871/campos_512_v4
+114/581917/campos_512_v4
+114/581968/campos_512_v4
+114/581989/campos_512_v4
+114/582003/campos_512_v4
+114/582039/campos_512_v4
+114/582047/campos_512_v4
+114/582056/campos_512_v4
+114/582063/campos_512_v4
+114/582064/campos_512_v4
+114/582093/campos_512_v4
+114/582117/campos_512_v4
+114/582223/campos_512_v4
+114/582233/campos_512_v4
+114/582235/campos_512_v4
+114/582239/campos_512_v4
+114/582242/campos_512_v4
+114/582283/campos_512_v4
+114/582315/campos_512_v4
+114/582428/campos_512_v4
+114/582489/campos_512_v4
+114/582514/campos_512_v4
+114/582539/campos_512_v4
+114/582635/campos_512_v4
+114/582689/campos_512_v4
+114/582701/campos_512_v4
+114/582763/campos_512_v4
+114/582785/campos_512_v4
+114/582791/campos_512_v4
+114/582797/campos_512_v4
+114/582825/campos_512_v4
+114/582840/campos_512_v4
+114/582930/campos_512_v4
+114/583046/campos_512_v4
+114/583153/campos_512_v4
+114/583177/campos_512_v4
+114/583262/campos_512_v4
+114/583274/campos_512_v4
+114/583335/campos_512_v4
+114/583361/campos_512_v4
+114/583367/campos_512_v4
+114/583378/campos_512_v4
+114/583404/campos_512_v4
+114/583437/campos_512_v4
+114/583440/campos_512_v4
+114/583457/campos_512_v4
+114/583506/campos_512_v4
+114/583621/campos_512_v4
+114/583629/campos_512_v4
+114/583656/campos_512_v4
+114/583669/campos_512_v4
+114/583684/campos_512_v4
+114/583699/campos_512_v4
+114/583718/campos_512_v4
+114/583801/campos_512_v4
+114/583839/campos_512_v4
+114/583849/campos_512_v4
+114/583858/campos_512_v4
+114/583920/campos_512_v4
+114/583938/campos_512_v4
+114/583955/campos_512_v4
+114/583962/campos_512_v4
+114/583977/campos_512_v4
+114/584034/campos_512_v4
+114/584045/campos_512_v4
+114/584049/campos_512_v4
+114/584114/campos_512_v4
+114/584124/campos_512_v4
+114/584133/campos_512_v4
+114/584178/campos_512_v4
+114/584204/campos_512_v4
+114/584208/campos_512_v4
+114/584237/campos_512_v4
+114/584251/campos_512_v4
+114/584277/campos_512_v4
+114/584288/campos_512_v4
+114/584298/campos_512_v4
+114/584407/campos_512_v4
+114/584414/campos_512_v4
+114/584459/campos_512_v4
+114/584480/campos_512_v4
+114/584495/campos_512_v4
+114/584515/campos_512_v4
+114/584679/campos_512_v4
+114/584739/campos_512_v4
+114/584756/campos_512_v4
+114/584785/campos_512_v4
+114/584806/campos_512_v4
+114/584859/campos_512_v4
+114/584875/campos_512_v4
+114/584882/campos_512_v4
+114/584909/campos_512_v4
+114/584919/campos_512_v4
+114/584923/campos_512_v4
+114/584943/campos_512_v4
+114/584957/campos_512_v4
+114/584991/campos_512_v4
+114/584996/campos_512_v4
+115/585024/campos_512_v4
+115/585058/campos_512_v4
+115/585070/campos_512_v4
+115/585170/campos_512_v4
+115/585197/campos_512_v4
+115/585201/campos_512_v4
+115/585206/campos_512_v4
+115/585217/campos_512_v4
+115/585298/campos_512_v4
+115/585325/campos_512_v4
+115/585336/campos_512_v4
+115/585353/campos_512_v4
+115/585406/campos_512_v4
+115/585420/campos_512_v4
+115/585438/campos_512_v4
+115/585439/campos_512_v4
+115/585468/campos_512_v4
+115/585486/campos_512_v4
+115/585509/campos_512_v4
+115/585531/campos_512_v4
+115/585541/campos_512_v4
+115/585562/campos_512_v4
+115/585650/campos_512_v4
+115/585670/campos_512_v4
+115/585687/campos_512_v4
+115/585706/campos_512_v4
+115/585737/campos_512_v4
+115/585753/campos_512_v4
+115/585767/campos_512_v4
+115/585785/campos_512_v4
+115/585914/campos_512_v4
+115/585989/campos_512_v4
+115/586008/campos_512_v4
+115/586029/campos_512_v4
+115/586042/campos_512_v4
+115/586068/campos_512_v4
+115/586096/campos_512_v4
+115/586140/campos_512_v4
+115/586156/campos_512_v4
+115/586191/campos_512_v4
+115/586247/campos_512_v4
+115/586255/campos_512_v4
+115/586325/campos_512_v4
+115/586355/campos_512_v4
+115/586528/campos_512_v4
+115/586597/campos_512_v4
+115/586674/campos_512_v4
+115/586684/campos_512_v4
+115/586700/campos_512_v4
+115/586702/campos_512_v4
+115/586728/campos_512_v4
+115/586729/campos_512_v4
+115/586737/campos_512_v4
+115/586811/campos_512_v4
+115/586846/campos_512_v4
+115/586861/campos_512_v4
+115/586972/campos_512_v4
+115/586974/campos_512_v4
+115/587064/campos_512_v4
+115/587071/campos_512_v4
+115/587077/campos_512_v4
+115/587155/campos_512_v4
+115/587171/campos_512_v4
+115/587179/campos_512_v4
+115/587185/campos_512_v4
+115/587197/campos_512_v4
+115/587266/campos_512_v4
+115/587292/campos_512_v4
+115/587324/campos_512_v4
+115/587328/campos_512_v4
+115/587335/campos_512_v4
+115/587344/campos_512_v4
+115/587349/campos_512_v4
+115/587373/campos_512_v4
+115/587385/campos_512_v4
+115/587429/campos_512_v4
+115/587467/campos_512_v4
+115/587520/campos_512_v4
+115/587535/campos_512_v4
+115/587550/campos_512_v4
+115/587641/campos_512_v4
+115/587658/campos_512_v4
+115/587734/campos_512_v4
+115/587759/campos_512_v4
+115/587835/campos_512_v4
+115/587854/campos_512_v4
+115/587899/campos_512_v4
+115/587913/campos_512_v4
+115/587933/campos_512_v4
+115/587943/campos_512_v4
+115/587950/campos_512_v4
+115/587993/campos_512_v4
+115/588000/campos_512_v4
+115/588022/campos_512_v4
+115/588106/campos_512_v4
+115/588134/campos_512_v4
+115/588181/campos_512_v4
+115/588212/campos_512_v4
+115/588243/campos_512_v4
+115/588301/campos_512_v4
+115/588333/campos_512_v4
+115/588369/campos_512_v4
+115/588371/campos_512_v4
+115/588387/campos_512_v4
+115/588426/campos_512_v4
+115/588469/campos_512_v4
+115/588476/campos_512_v4
+115/588498/campos_512_v4
+115/588504/campos_512_v4
+115/588549/campos_512_v4
+115/588568/campos_512_v4
+115/588582/campos_512_v4
+115/588651/campos_512_v4
+115/588668/campos_512_v4
+115/588716/campos_512_v4
+115/588751/campos_512_v4
+115/588775/campos_512_v4
+115/588787/campos_512_v4
+115/588799/campos_512_v4
+115/588824/campos_512_v4
+115/588827/campos_512_v4
+115/588846/campos_512_v4
+115/588856/campos_512_v4
+115/588902/campos_512_v4
+115/588920/campos_512_v4
+115/588951/campos_512_v4
+115/588988/campos_512_v4
+115/588993/campos_512_v4
+115/588995/campos_512_v4
+115/589096/campos_512_v4
+115/589110/campos_512_v4
+115/589125/campos_512_v4
+115/589208/campos_512_v4
+115/589215/campos_512_v4
+115/589247/campos_512_v4
+115/589290/campos_512_v4
+115/589307/campos_512_v4
+115/589400/campos_512_v4
+115/589465/campos_512_v4
+115/589514/campos_512_v4
+115/589543/campos_512_v4
+115/589544/campos_512_v4
+115/589549/campos_512_v4
+115/589552/campos_512_v4
+115/589567/campos_512_v4
+115/589580/campos_512_v4
+115/589592/campos_512_v4
+115/589624/campos_512_v4
+115/589719/campos_512_v4
+115/589759/campos_512_v4
+115/589776/campos_512_v4
+115/589807/campos_512_v4
+115/589826/campos_512_v4
+115/589833/campos_512_v4
+115/589850/campos_512_v4
+115/589854/campos_512_v4
+115/589912/campos_512_v4
+115/589930/campos_512_v4
+115/589952/campos_512_v4
+115/589960/campos_512_v4
+115/589968/campos_512_v4
+116/590006/campos_512_v4
+116/590056/campos_512_v4
+116/590075/campos_512_v4
+116/590098/campos_512_v4
+116/590099/campos_512_v4
+116/590139/campos_512_v4
+116/590172/campos_512_v4
+116/590225/campos_512_v4
+116/590244/campos_512_v4
+116/590261/campos_512_v4
+116/590262/campos_512_v4
+116/590267/campos_512_v4
+116/590270/campos_512_v4
+116/590271/campos_512_v4
+116/590281/campos_512_v4
+116/590289/campos_512_v4
+116/590290/campos_512_v4
+116/590301/campos_512_v4
+116/590321/campos_512_v4
+116/590341/campos_512_v4
+116/590344/campos_512_v4
+116/590384/campos_512_v4
+116/590400/campos_512_v4
+116/590452/campos_512_v4
+116/590480/campos_512_v4
+116/590524/campos_512_v4
+116/590550/campos_512_v4
+116/590646/campos_512_v4
+116/590678/campos_512_v4
+116/590718/campos_512_v4
+116/590785/campos_512_v4
+116/590793/campos_512_v4
+116/590794/campos_512_v4
+116/590798/campos_512_v4
+116/590865/campos_512_v4
+116/590874/campos_512_v4
+116/590934/campos_512_v4
+116/590963/campos_512_v4
+116/591035/campos_512_v4
+116/591044/campos_512_v4
+116/591072/campos_512_v4
+116/591146/campos_512_v4
+116/591220/campos_512_v4
+116/591287/campos_512_v4
+116/591357/campos_512_v4
+116/591400/campos_512_v4
+116/591455/campos_512_v4
+116/591464/campos_512_v4
+116/591487/campos_512_v4
+116/591534/campos_512_v4
+116/591539/campos_512_v4
+116/591580/campos_512_v4
+116/591593/campos_512_v4
+116/591597/campos_512_v4
+116/591615/campos_512_v4
+116/591638/campos_512_v4
+116/591645/campos_512_v4
+116/591650/campos_512_v4
+116/591667/campos_512_v4
+116/591698/campos_512_v4
+116/591749/campos_512_v4
+116/591772/campos_512_v4
+116/591773/campos_512_v4
+116/591801/campos_512_v4
+116/591815/campos_512_v4
+116/591825/campos_512_v4
+116/591832/campos_512_v4
+116/591849/campos_512_v4
+116/591864/campos_512_v4
+116/591872/campos_512_v4
+116/591876/campos_512_v4
+116/591896/campos_512_v4
+116/591943/campos_512_v4
+116/591981/campos_512_v4
+116/591991/campos_512_v4
+116/592013/campos_512_v4
+116/592052/campos_512_v4
+116/592073/campos_512_v4
+116/592111/campos_512_v4
+116/592127/campos_512_v4
+116/592129/campos_512_v4
+116/592135/campos_512_v4
+116/592196/campos_512_v4
+116/592214/campos_512_v4
+116/592215/campos_512_v4
+116/592220/campos_512_v4
+116/592244/campos_512_v4
+116/592257/campos_512_v4
+116/592303/campos_512_v4
+116/592329/campos_512_v4
+116/592358/campos_512_v4
+116/592375/campos_512_v4
+116/592423/campos_512_v4
+116/592461/campos_512_v4
+116/592469/campos_512_v4
+116/592477/campos_512_v4
+116/592519/campos_512_v4
+116/592542/campos_512_v4
+116/592552/campos_512_v4
+116/592567/campos_512_v4
+116/592584/campos_512_v4
+116/592672/campos_512_v4
+116/592721/campos_512_v4
+116/592756/campos_512_v4
+116/592759/campos_512_v4
+116/592778/campos_512_v4
+116/592793/campos_512_v4
+116/592817/campos_512_v4
+116/592837/campos_512_v4
+116/592865/campos_512_v4
+116/592908/campos_512_v4
+116/592963/campos_512_v4
+116/593062/campos_512_v4
+116/593073/campos_512_v4
+116/593125/campos_512_v4
+116/593133/campos_512_v4
+116/593164/campos_512_v4
+116/593189/campos_512_v4
+116/593196/campos_512_v4
+116/593200/campos_512_v4
+116/593209/campos_512_v4
+116/593244/campos_512_v4
+116/593270/campos_512_v4
+116/593273/campos_512_v4
+116/593324/campos_512_v4
+116/593386/campos_512_v4
+116/593409/campos_512_v4
+116/593464/campos_512_v4
+116/593472/campos_512_v4
+116/593479/campos_512_v4
+116/593497/campos_512_v4
+116/593499/campos_512_v4
+116/593514/campos_512_v4
+116/593539/campos_512_v4
+116/593563/campos_512_v4
+116/593613/campos_512_v4
+116/593629/campos_512_v4
+116/593691/campos_512_v4
+116/593723/campos_512_v4
+116/593784/campos_512_v4
+116/593820/campos_512_v4
+116/593834/campos_512_v4
+116/593919/campos_512_v4
+116/593946/campos_512_v4
+116/593973/campos_512_v4
+116/593996/campos_512_v4
+116/593997/campos_512_v4
+116/594067/campos_512_v4
+116/594102/campos_512_v4
+116/594109/campos_512_v4
+116/594137/campos_512_v4
+116/594145/campos_512_v4
+116/594177/campos_512_v4
+116/594188/campos_512_v4
+116/594191/campos_512_v4
+116/594225/campos_512_v4
+116/594228/campos_512_v4
+116/594231/campos_512_v4
+116/594279/campos_512_v4
+116/594353/campos_512_v4
+116/594366/campos_512_v4
+116/594400/campos_512_v4
+116/594467/campos_512_v4
+116/594477/campos_512_v4
+116/594484/campos_512_v4
+116/594563/campos_512_v4
+116/594630/campos_512_v4
+116/594688/campos_512_v4
+116/594724/campos_512_v4
+116/594743/campos_512_v4
+116/594769/campos_512_v4
+116/594774/campos_512_v4
+116/594789/campos_512_v4
+116/594792/campos_512_v4
+116/594853/campos_512_v4
+116/594862/campos_512_v4
+116/594905/campos_512_v4
+116/594912/campos_512_v4
+116/594917/campos_512_v4
+116/594931/campos_512_v4
+116/594935/campos_512_v4
+117/595008/campos_512_v4
+117/595071/campos_512_v4
+117/595117/campos_512_v4
+117/595223/campos_512_v4
+117/595230/campos_512_v4
+117/595241/campos_512_v4
+117/595259/campos_512_v4
+117/595345/campos_512_v4
+117/595437/campos_512_v4
+117/595552/campos_512_v4
+117/595596/campos_512_v4
+117/595631/campos_512_v4
+117/595643/campos_512_v4
+117/595651/campos_512_v4
+117/595666/campos_512_v4
+117/595692/campos_512_v4
+117/595708/campos_512_v4
+117/595730/campos_512_v4
+117/595742/campos_512_v4
+117/595828/campos_512_v4
+117/595860/campos_512_v4
+117/595880/campos_512_v4
+117/596003/campos_512_v4
+117/596053/campos_512_v4
+117/596098/campos_512_v4
+117/596103/campos_512_v4
+117/596127/campos_512_v4
+117/596159/campos_512_v4
+117/596174/campos_512_v4
+117/596206/campos_512_v4
+117/596207/campos_512_v4
+117/596222/campos_512_v4
+117/596224/campos_512_v4
+117/596257/campos_512_v4
+117/596300/campos_512_v4
+117/596307/campos_512_v4
+117/596411/campos_512_v4
+117/596501/campos_512_v4
+117/596512/campos_512_v4
+117/596532/campos_512_v4
+117/596552/campos_512_v4
+117/596601/campos_512_v4
+117/596643/campos_512_v4
+117/596670/campos_512_v4
+117/596734/campos_512_v4
+117/596735/campos_512_v4
+117/596767/campos_512_v4
+117/596779/campos_512_v4
+117/596850/campos_512_v4
+117/596913/campos_512_v4
+117/596921/campos_512_v4
+117/596954/campos_512_v4
+117/596993/campos_512_v4
+117/597022/campos_512_v4
+117/597039/campos_512_v4
+117/597051/campos_512_v4
+117/597068/campos_512_v4
+117/597092/campos_512_v4
+117/597097/campos_512_v4
+117/597169/campos_512_v4
+117/597243/campos_512_v4
+117/597251/campos_512_v4
+117/597263/campos_512_v4
+117/597280/campos_512_v4
+117/597297/campos_512_v4
+117/597333/campos_512_v4
+117/597338/campos_512_v4
+117/597353/campos_512_v4
+117/597354/campos_512_v4
+117/597361/campos_512_v4
+117/597377/campos_512_v4
+117/597389/campos_512_v4
+117/597412/campos_512_v4
+117/597415/campos_512_v4
+117/597419/campos_512_v4
+117/597443/campos_512_v4
+117/597444/campos_512_v4
+117/597455/campos_512_v4
+117/597527/campos_512_v4
+117/597567/campos_512_v4
+117/597575/campos_512_v4
+117/597580/campos_512_v4
+117/597670/campos_512_v4
+117/597729/campos_512_v4
+117/597731/campos_512_v4
+117/597791/campos_512_v4
+117/597796/campos_512_v4
+117/597828/campos_512_v4
+117/597835/campos_512_v4
+117/597857/campos_512_v4
+117/597894/campos_512_v4
+117/597903/campos_512_v4
+117/597913/campos_512_v4
+117/597951/campos_512_v4
+117/597973/campos_512_v4
+117/597981/campos_512_v4
+117/597989/campos_512_v4
+117/597992/campos_512_v4
+117/598003/campos_512_v4
+117/598016/campos_512_v4
+117/598024/campos_512_v4
+117/598039/campos_512_v4
+117/598069/campos_512_v4
+117/598101/campos_512_v4
+117/598115/campos_512_v4
+117/598142/campos_512_v4
+117/598208/campos_512_v4
+117/598222/campos_512_v4
+117/598224/campos_512_v4
+117/598238/campos_512_v4
+117/598253/campos_512_v4
+117/598269/campos_512_v4
+117/598324/campos_512_v4
+117/598413/campos_512_v4
+117/598465/campos_512_v4
+117/598466/campos_512_v4
+117/598531/campos_512_v4
+117/598540/campos_512_v4
+117/598550/campos_512_v4
+117/598593/campos_512_v4
+117/598659/campos_512_v4
+117/598681/campos_512_v4
+117/598705/campos_512_v4
+117/598710/campos_512_v4
+117/598713/campos_512_v4
+117/598753/campos_512_v4
+117/598810/campos_512_v4
+117/598850/campos_512_v4
+117/598877/campos_512_v4
+117/598888/campos_512_v4
+117/598898/campos_512_v4
+117/599005/campos_512_v4
+117/599010/campos_512_v4
+117/599022/campos_512_v4
+117/599058/campos_512_v4
+117/599079/campos_512_v4
+117/599153/campos_512_v4
+117/599177/campos_512_v4
+117/599179/campos_512_v4
+117/599357/campos_512_v4
+117/599378/campos_512_v4
+117/599399/campos_512_v4
+117/599495/campos_512_v4
+117/599550/campos_512_v4
+117/599553/campos_512_v4
+117/599571/campos_512_v4
+117/599627/campos_512_v4
+117/599636/campos_512_v4
+117/599691/campos_512_v4
+117/599722/campos_512_v4
+117/599777/campos_512_v4
+117/599840/campos_512_v4
+117/599879/campos_512_v4
+117/599884/campos_512_v4
+117/599903/campos_512_v4
+117/599966/campos_512_v4
+117/599996/campos_512_v4
+117/600000/campos_512_v4
+118/600007/campos_512_v4
+118/600016/campos_512_v4
+118/600022/campos_512_v4
+118/600105/campos_512_v4
+118/600151/campos_512_v4
+118/600180/campos_512_v4
+118/600198/campos_512_v4
+118/600212/campos_512_v4
+118/600215/campos_512_v4
+118/600313/campos_512_v4
+118/600336/campos_512_v4
+118/600452/campos_512_v4
+118/600463/campos_512_v4
+118/600466/campos_512_v4
+118/600486/campos_512_v4
+118/600493/campos_512_v4
+118/600507/campos_512_v4
+118/600515/campos_512_v4
+118/600541/campos_512_v4
+118/600548/campos_512_v4
+118/600550/campos_512_v4
+118/600569/campos_512_v4
+118/600575/campos_512_v4
+118/600700/campos_512_v4
+118/600718/campos_512_v4
+118/600742/campos_512_v4
+118/600748/campos_512_v4
+118/600796/campos_512_v4
+118/600837/campos_512_v4
+118/600852/campos_512_v4
+118/600855/campos_512_v4
+118/600907/campos_512_v4
+118/600948/campos_512_v4
+118/600966/campos_512_v4
+118/601007/campos_512_v4
+118/601086/campos_512_v4
+118/601195/campos_512_v4
+118/601218/campos_512_v4
+118/601238/campos_512_v4
+118/601239/campos_512_v4
+118/601300/campos_512_v4
+118/601307/campos_512_v4
+118/601323/campos_512_v4
+118/601330/campos_512_v4
+118/601366/campos_512_v4
+118/601368/campos_512_v4
+118/601369/campos_512_v4
+118/601410/campos_512_v4
+118/601459/campos_512_v4
+118/601536/campos_512_v4
+118/601550/campos_512_v4
+118/601583/campos_512_v4
+118/601585/campos_512_v4
+118/601610/campos_512_v4
+118/601624/campos_512_v4
+118/601649/campos_512_v4
+118/601654/campos_512_v4
+118/601670/campos_512_v4
+118/601702/campos_512_v4
+118/601744/campos_512_v4
+118/601763/campos_512_v4
+118/601778/campos_512_v4
+118/601810/campos_512_v4
+118/601813/campos_512_v4
+118/601816/campos_512_v4
+118/601853/campos_512_v4
+118/601896/campos_512_v4
+118/601902/campos_512_v4
+118/601935/campos_512_v4
+118/601952/campos_512_v4
+118/602004/campos_512_v4
+118/602010/campos_512_v4
+118/602095/campos_512_v4
+118/602098/campos_512_v4
+118/602101/campos_512_v4
+118/602114/campos_512_v4
+118/602116/campos_512_v4
+118/602230/campos_512_v4
+118/602252/campos_512_v4
+118/602348/campos_512_v4
+118/602358/campos_512_v4
+118/602405/campos_512_v4
+118/602443/campos_512_v4
+118/602536/campos_512_v4
+118/602544/campos_512_v4
+118/602568/campos_512_v4
+118/602574/campos_512_v4
+118/602656/campos_512_v4
+118/602703/campos_512_v4
+118/602729/campos_512_v4
+118/602768/campos_512_v4
+118/602784/campos_512_v4
+118/602795/campos_512_v4
+118/602802/campos_512_v4
+118/602824/campos_512_v4
+118/602827/campos_512_v4
+118/602848/campos_512_v4
+118/602873/campos_512_v4
+118/602902/campos_512_v4
+118/602910/campos_512_v4
+118/602942/campos_512_v4
+118/602961/campos_512_v4
+118/602973/campos_512_v4
+118/602974/campos_512_v4
+118/602989/campos_512_v4
+118/602992/campos_512_v4
+118/603022/campos_512_v4
+118/603170/campos_512_v4
+118/603233/campos_512_v4
+118/603255/campos_512_v4
+118/603303/campos_512_v4
+118/603352/campos_512_v4
+118/603365/campos_512_v4
+118/603465/campos_512_v4
+118/603478/campos_512_v4
+118/603521/campos_512_v4
+118/603573/campos_512_v4
+118/603585/campos_512_v4
+118/603606/campos_512_v4
+118/603614/campos_512_v4
+118/603626/campos_512_v4
+118/603633/campos_512_v4
+118/603634/campos_512_v4
+118/603661/campos_512_v4
+118/603666/campos_512_v4
+118/603675/campos_512_v4
+118/603746/campos_512_v4
+118/603787/campos_512_v4
+118/603810/campos_512_v4
+118/603867/campos_512_v4
+118/603943/campos_512_v4
+118/603944/campos_512_v4
+118/603949/campos_512_v4
+118/603998/campos_512_v4
+118/604000/campos_512_v4
+118/604018/campos_512_v4
+118/604071/campos_512_v4
+118/604080/campos_512_v4
+118/604124/campos_512_v4
+118/604131/campos_512_v4
+118/604157/campos_512_v4
+118/604251/campos_512_v4
+118/604255/campos_512_v4
+118/604317/campos_512_v4
+118/604334/campos_512_v4
+118/604376/campos_512_v4
+118/604428/campos_512_v4
+118/604475/campos_512_v4
+118/604524/campos_512_v4
+118/604529/campos_512_v4
+118/604584/campos_512_v4
+118/604607/campos_512_v4
+118/604623/campos_512_v4
+118/604627/campos_512_v4
+118/604657/campos_512_v4
+118/604670/campos_512_v4
+118/604689/campos_512_v4
+118/604692/campos_512_v4
+118/604721/campos_512_v4
+118/604780/campos_512_v4
+118/604824/campos_512_v4
+118/604905/campos_512_v4
+118/604928/campos_512_v4
+118/604930/campos_512_v4
+119/605005/campos_512_v4
+119/605011/campos_512_v4
+119/605014/campos_512_v4
+119/605023/campos_512_v4
+119/605024/campos_512_v4
+119/605052/campos_512_v4
+119/605097/campos_512_v4
+119/605131/campos_512_v4
+119/605155/campos_512_v4
+119/605165/campos_512_v4
+119/605231/campos_512_v4
+119/605253/campos_512_v4
+119/605259/campos_512_v4
+119/605295/campos_512_v4
+119/605330/campos_512_v4
+119/605339/campos_512_v4
+119/605361/campos_512_v4
+119/605378/campos_512_v4
+119/605379/campos_512_v4
+119/605398/campos_512_v4
+119/605428/campos_512_v4
+119/605436/campos_512_v4
+119/605438/campos_512_v4
+119/605454/campos_512_v4
+119/605469/campos_512_v4
+119/605502/campos_512_v4
+119/605535/campos_512_v4
+119/605555/campos_512_v4
+119/605602/campos_512_v4
+119/605618/campos_512_v4
+119/605631/campos_512_v4
+119/605654/campos_512_v4
+119/605670/campos_512_v4
+119/605713/campos_512_v4
+119/605727/campos_512_v4
+119/605733/campos_512_v4
+119/605810/campos_512_v4
+119/605828/campos_512_v4
+119/605841/campos_512_v4
+119/605879/campos_512_v4
+119/605903/campos_512_v4
+119/605927/campos_512_v4
+119/605933/campos_512_v4
+119/605953/campos_512_v4
+119/606002/campos_512_v4
+119/606036/campos_512_v4
+119/606125/campos_512_v4
+119/606130/campos_512_v4
+119/606151/campos_512_v4
+119/606156/campos_512_v4
+119/606164/campos_512_v4
+119/606179/campos_512_v4
+119/606194/campos_512_v4
+119/606197/campos_512_v4
+119/606204/campos_512_v4
+119/606260/campos_512_v4
+119/606339/campos_512_v4
+119/606344/campos_512_v4
+119/606359/campos_512_v4
+119/606377/campos_512_v4
+119/606405/campos_512_v4
+119/606420/campos_512_v4
+119/606450/campos_512_v4
+119/606473/campos_512_v4
+119/606502/campos_512_v4
+119/606539/campos_512_v4
+119/606575/campos_512_v4
+119/606617/campos_512_v4
+119/606618/campos_512_v4
+119/606641/campos_512_v4
+119/606644/campos_512_v4
+119/606655/campos_512_v4
+119/606664/campos_512_v4
+119/606665/campos_512_v4
+119/606671/campos_512_v4
+119/606774/campos_512_v4
+119/606867/campos_512_v4
+119/606900/campos_512_v4
+119/606906/campos_512_v4
+119/606937/campos_512_v4
+119/606985/campos_512_v4
+119/607034/campos_512_v4
+119/607112/campos_512_v4
+119/607151/campos_512_v4
+119/607161/campos_512_v4
+119/607193/campos_512_v4
+119/607249/campos_512_v4
+119/607336/campos_512_v4
+119/607355/campos_512_v4
+119/607371/campos_512_v4
+119/607388/campos_512_v4
+119/607404/campos_512_v4
+119/607475/campos_512_v4
+119/607479/campos_512_v4
+119/607495/campos_512_v4
+119/607560/campos_512_v4
+119/607562/campos_512_v4
+119/607603/campos_512_v4
+119/607647/campos_512_v4
+119/607697/campos_512_v4
+119/607715/campos_512_v4
+119/607781/campos_512_v4
+119/607800/campos_512_v4
+119/607817/campos_512_v4
+119/607841/campos_512_v4
+119/607901/campos_512_v4
+119/607915/campos_512_v4
+119/607925/campos_512_v4
+119/607951/campos_512_v4
+119/608008/campos_512_v4
+119/608023/campos_512_v4
+119/608078/campos_512_v4
+119/608095/campos_512_v4
+119/608126/campos_512_v4
+119/608188/campos_512_v4
+119/608241/campos_512_v4
+119/608283/campos_512_v4
+119/608327/campos_512_v4
+119/608348/campos_512_v4
+119/608357/campos_512_v4
+119/608360/campos_512_v4
+119/608377/campos_512_v4
+119/608387/campos_512_v4
+119/608409/campos_512_v4
+119/608414/campos_512_v4
+119/608428/campos_512_v4
+119/608455/campos_512_v4
+119/608463/campos_512_v4
+119/608467/campos_512_v4
+119/608473/campos_512_v4
+119/608480/campos_512_v4
+119/608503/campos_512_v4
+119/608540/campos_512_v4
+119/608543/campos_512_v4
+119/608551/campos_512_v4
+119/608564/campos_512_v4
+119/608565/campos_512_v4
+119/608573/campos_512_v4
+119/608581/campos_512_v4
+119/608591/campos_512_v4
+119/608601/campos_512_v4
+119/608608/campos_512_v4
+119/608693/campos_512_v4
+119/608717/campos_512_v4
+119/608736/campos_512_v4
+119/608741/campos_512_v4
+119/608778/campos_512_v4
+119/608846/campos_512_v4
+119/608872/campos_512_v4
+119/608907/campos_512_v4
+119/608917/campos_512_v4
+119/608923/campos_512_v4
+119/608939/campos_512_v4
+119/609007/campos_512_v4
+119/609050/campos_512_v4
+119/609160/campos_512_v4
+119/609189/campos_512_v4
+119/609194/campos_512_v4
+119/609247/campos_512_v4
+119/609273/campos_512_v4
+119/609286/campos_512_v4
+119/609297/campos_512_v4
+119/609413/campos_512_v4
+119/609471/campos_512_v4
+119/609501/campos_512_v4
+119/609523/campos_512_v4
+119/609548/campos_512_v4
+119/609570/campos_512_v4
+119/609589/campos_512_v4
+119/609619/campos_512_v4
+119/609636/campos_512_v4
+119/609660/campos_512_v4
+119/609715/campos_512_v4
+119/609738/campos_512_v4
+119/609745/campos_512_v4
+119/609765/campos_512_v4
+119/609804/campos_512_v4
+119/609844/campos_512_v4
+119/609847/campos_512_v4
+119/609862/campos_512_v4
+119/609882/campos_512_v4
+119/609908/campos_512_v4
+119/609946/campos_512_v4
+119/609969/campos_512_v4
+119/609988/campos_512_v4
+12/70014/campos_512_v4
+12/70035/campos_512_v4
+12/70040/campos_512_v4
+12/70070/campos_512_v4
+12/70077/campos_512_v4
+12/70081/campos_512_v4
+12/70107/campos_512_v4
+12/70108/campos_512_v4
+12/70141/campos_512_v4
+12/70142/campos_512_v4
+12/70150/campos_512_v4
+12/70193/campos_512_v4
+12/70282/campos_512_v4
+12/70381/campos_512_v4
+12/70418/campos_512_v4
+12/70442/campos_512_v4
+12/70475/campos_512_v4
+12/70496/campos_512_v4
+12/70506/campos_512_v4
+12/70531/campos_512_v4
+12/70540/campos_512_v4
+12/70567/campos_512_v4
+12/70580/campos_512_v4
+12/70620/campos_512_v4
+12/70655/campos_512_v4
+12/70669/campos_512_v4
+12/70715/campos_512_v4
+12/70732/campos_512_v4
+12/70740/campos_512_v4
+12/70760/campos_512_v4
+12/70765/campos_512_v4
+12/70858/campos_512_v4
+12/70880/campos_512_v4
+12/70901/campos_512_v4
+12/70922/campos_512_v4
+12/70926/campos_512_v4
+12/70927/campos_512_v4
+12/70968/campos_512_v4
+12/71007/campos_512_v4
+12/71009/campos_512_v4
+12/71027/campos_512_v4
+12/71031/campos_512_v4
+12/71032/campos_512_v4
+12/71034/campos_512_v4
+12/71068/campos_512_v4
+12/71075/campos_512_v4
+12/71079/campos_512_v4
+12/71123/campos_512_v4
+12/71132/campos_512_v4
+12/71152/campos_512_v4
+12/71162/campos_512_v4
+12/71200/campos_512_v4
+12/71237/campos_512_v4
+12/71264/campos_512_v4
+12/71332/campos_512_v4
+12/71353/campos_512_v4
+12/71360/campos_512_v4
+12/71385/campos_512_v4
+12/71395/campos_512_v4
+12/71468/campos_512_v4
+12/71517/campos_512_v4
+12/71519/campos_512_v4
+12/71530/campos_512_v4
+12/71551/campos_512_v4
+12/71581/campos_512_v4
+12/71627/campos_512_v4
+12/71629/campos_512_v4
+12/71660/campos_512_v4
+12/71676/campos_512_v4
+12/71704/campos_512_v4
+12/71712/campos_512_v4
+12/71730/campos_512_v4
+12/71735/campos_512_v4
+12/71765/campos_512_v4
+12/71780/campos_512_v4
+12/71835/campos_512_v4
+12/71870/campos_512_v4
+12/71879/campos_512_v4
+12/71895/campos_512_v4
+12/71897/campos_512_v4
+12/71928/campos_512_v4
+12/71941/campos_512_v4
+12/71942/campos_512_v4
+12/71943/campos_512_v4
+12/71954/campos_512_v4
+12/71962/campos_512_v4
+12/71968/campos_512_v4
+12/72024/campos_512_v4
+12/72048/campos_512_v4
+12/72054/campos_512_v4
+12/72070/campos_512_v4
+12/72072/campos_512_v4
+12/72098/campos_512_v4
+12/72150/campos_512_v4
+12/72154/campos_512_v4
+12/72184/campos_512_v4
+12/72212/campos_512_v4
+12/72245/campos_512_v4
+12/72249/campos_512_v4
+12/72266/campos_512_v4
+12/72274/campos_512_v4
+12/72296/campos_512_v4
+12/72298/campos_512_v4
+12/72321/campos_512_v4
+12/72337/campos_512_v4
+12/72362/campos_512_v4
+12/72421/campos_512_v4
+12/72457/campos_512_v4
+12/72462/campos_512_v4
+12/72467/campos_512_v4
+12/72495/campos_512_v4
+12/72533/campos_512_v4
+12/72539/campos_512_v4
+12/72597/campos_512_v4
+12/72640/campos_512_v4
+12/72665/campos_512_v4
+12/72666/campos_512_v4
+12/72672/campos_512_v4
+12/72702/campos_512_v4
+12/72797/campos_512_v4
+12/72848/campos_512_v4
+12/72886/campos_512_v4
+12/72974/campos_512_v4
+12/72994/campos_512_v4
+12/73121/campos_512_v4
+12/73137/campos_512_v4
+12/73199/campos_512_v4
+12/73208/campos_512_v4
+12/73231/campos_512_v4
+12/73232/campos_512_v4
+12/73241/campos_512_v4
+12/73277/campos_512_v4
+12/73295/campos_512_v4
+12/73303/campos_512_v4
+12/73323/campos_512_v4
+12/73334/campos_512_v4
+12/73348/campos_512_v4
+12/73361/campos_512_v4
+12/73388/campos_512_v4
+12/73412/campos_512_v4
+12/73427/campos_512_v4
+12/73463/campos_512_v4
+12/73520/campos_512_v4
+12/73534/campos_512_v4
+12/73596/campos_512_v4
+12/73601/campos_512_v4
+12/73607/campos_512_v4
+12/73628/campos_512_v4
+12/73699/campos_512_v4
+12/73709/campos_512_v4
+12/73723/campos_512_v4
+12/73787/campos_512_v4
+12/73909/campos_512_v4
+12/73927/campos_512_v4
+12/73974/campos_512_v4
+12/74013/campos_512_v4
+12/74026/campos_512_v4
+12/74028/campos_512_v4
+12/74039/campos_512_v4
+12/74147/campos_512_v4
+12/74169/campos_512_v4
+12/74181/campos_512_v4
+12/74324/campos_512_v4
+12/74338/campos_512_v4
+12/74365/campos_512_v4
+12/74451/campos_512_v4
+12/74469/campos_512_v4
+12/74475/campos_512_v4
+12/74477/campos_512_v4
+12/74480/campos_512_v4
+12/74572/campos_512_v4
+12/74595/campos_512_v4
+12/74676/campos_512_v4
+12/74685/campos_512_v4
+12/74701/campos_512_v4
+12/74718/campos_512_v4
+12/74721/campos_512_v4
+12/74744/campos_512_v4
+12/74790/campos_512_v4
+12/74850/campos_512_v4
+12/74858/campos_512_v4
+12/74862/campos_512_v4
+12/74873/campos_512_v4
+12/74913/campos_512_v4
+12/74929/campos_512_v4
+12/74954/campos_512_v4
+12/74956/campos_512_v4
+12/74958/campos_512_v4
+12/74982/campos_512_v4
+12/74984/campos_512_v4
+12/75000/campos_512_v4
+120/610076/campos_512_v4
+120/610091/campos_512_v4
+120/610125/campos_512_v4
+120/610137/campos_512_v4
+120/610151/campos_512_v4
+120/610165/campos_512_v4
+120/610170/campos_512_v4
+120/610180/campos_512_v4
+120/610183/campos_512_v4
+120/610184/campos_512_v4
+120/610304/campos_512_v4
+120/610319/campos_512_v4
+120/610334/campos_512_v4
+120/610337/campos_512_v4
+120/610343/campos_512_v4
+120/610359/campos_512_v4
+120/610430/campos_512_v4
+120/610482/campos_512_v4
+120/610521/campos_512_v4
+120/610526/campos_512_v4
+120/610530/campos_512_v4
+120/610580/campos_512_v4
+120/610734/campos_512_v4
+120/610743/campos_512_v4
+120/610747/campos_512_v4
+120/610777/campos_512_v4
+120/610837/campos_512_v4
+120/610868/campos_512_v4
+120/610869/campos_512_v4
+120/610872/campos_512_v4
+120/610917/campos_512_v4
+120/610920/campos_512_v4
+120/610932/campos_512_v4
+120/611059/campos_512_v4
+120/611063/campos_512_v4
+120/611113/campos_512_v4
+120/611129/campos_512_v4
+120/611213/campos_512_v4
+120/611217/campos_512_v4
+120/611220/campos_512_v4
+120/611234/campos_512_v4
+120/611250/campos_512_v4
+120/611281/campos_512_v4
+120/611314/campos_512_v4
+120/611380/campos_512_v4
+120/611388/campos_512_v4
+120/611407/campos_512_v4
+120/611425/campos_512_v4
+120/611485/campos_512_v4
+120/611486/campos_512_v4
+120/611514/campos_512_v4
+120/611517/campos_512_v4
+120/611603/campos_512_v4
+120/611618/campos_512_v4
+120/611620/campos_512_v4
+120/611628/campos_512_v4
+120/611696/campos_512_v4
+120/611750/campos_512_v4
+120/611765/campos_512_v4
+120/611901/campos_512_v4
+120/611950/campos_512_v4
+120/611955/campos_512_v4
+120/611985/campos_512_v4
+120/612036/campos_512_v4
+120/612252/campos_512_v4
+120/612277/campos_512_v4
+120/612297/campos_512_v4
+120/612421/campos_512_v4
+120/612465/campos_512_v4
+120/612468/campos_512_v4
+120/612522/campos_512_v4
+120/612542/campos_512_v4
+120/612599/campos_512_v4
+120/612622/campos_512_v4
+120/612629/campos_512_v4
+120/612750/campos_512_v4
+120/612798/campos_512_v4
+120/612801/campos_512_v4
+120/612810/campos_512_v4
+120/612829/campos_512_v4
+120/612850/campos_512_v4
+120/612955/campos_512_v4
+120/612985/campos_512_v4
+120/613012/campos_512_v4
+120/613021/campos_512_v4
+120/613101/campos_512_v4
+120/613163/campos_512_v4
+120/613233/campos_512_v4
+120/613344/campos_512_v4
+120/613347/campos_512_v4
+120/613385/campos_512_v4
+120/613396/campos_512_v4
+120/613433/campos_512_v4
+120/613449/campos_512_v4
+120/613484/campos_512_v4
+120/613521/campos_512_v4
+120/613533/campos_512_v4
+120/613602/campos_512_v4
+120/613615/campos_512_v4
+120/613634/campos_512_v4
+120/613645/campos_512_v4
+120/613648/campos_512_v4
+120/613654/campos_512_v4
+120/613733/campos_512_v4
+120/613735/campos_512_v4
+120/613755/campos_512_v4
+120/613758/campos_512_v4
+120/613769/campos_512_v4
+120/613791/campos_512_v4
+120/613816/campos_512_v4
+120/613849/campos_512_v4
+120/613880/campos_512_v4
+120/613883/campos_512_v4
+120/613917/campos_512_v4
+120/613936/campos_512_v4
+120/613965/campos_512_v4
+120/613996/campos_512_v4
+120/613997/campos_512_v4
+120/614063/campos_512_v4
+120/614074/campos_512_v4
+120/614126/campos_512_v4
+120/614153/campos_512_v4
+120/614155/campos_512_v4
+120/614174/campos_512_v4
+120/614320/campos_512_v4
+120/614349/campos_512_v4
+120/614371/campos_512_v4
+120/614416/campos_512_v4
+120/614426/campos_512_v4
+120/614427/campos_512_v4
+120/614430/campos_512_v4
+120/614482/campos_512_v4
+120/614492/campos_512_v4
+120/614648/campos_512_v4
+120/614682/campos_512_v4
+120/614734/campos_512_v4
+120/614750/campos_512_v4
+120/614825/campos_512_v4
+120/614875/campos_512_v4
+120/614878/campos_512_v4
+121/615010/campos_512_v4
+121/615011/campos_512_v4
+121/615027/campos_512_v4
+121/615029/campos_512_v4
+121/615140/campos_512_v4
+121/615148/campos_512_v4
+121/615236/campos_512_v4
+121/615280/campos_512_v4
+121/615350/campos_512_v4
+121/615462/campos_512_v4
+121/615481/campos_512_v4
+121/615492/campos_512_v4
+121/615501/campos_512_v4
+121/615508/campos_512_v4
+121/615523/campos_512_v4
+121/615535/campos_512_v4
+121/615564/campos_512_v4
+121/615662/campos_512_v4
+121/615665/campos_512_v4
+121/615669/campos_512_v4
+121/615692/campos_512_v4
+121/615709/campos_512_v4
+121/615719/campos_512_v4
+121/615724/campos_512_v4
+121/615778/campos_512_v4
+121/615817/campos_512_v4
+121/615851/campos_512_v4
+121/615894/campos_512_v4
+121/615968/campos_512_v4
+121/615969/campos_512_v4
+121/616019/campos_512_v4
+121/616053/campos_512_v4
+121/616124/campos_512_v4
+121/616160/campos_512_v4
+121/616167/campos_512_v4
+121/616188/campos_512_v4
+121/616244/campos_512_v4
+121/616396/campos_512_v4
+121/616415/campos_512_v4
+121/616462/campos_512_v4
+121/616469/campos_512_v4
+121/616499/campos_512_v4
+121/616554/campos_512_v4
+121/616560/campos_512_v4
+121/616570/campos_512_v4
+121/616620/campos_512_v4
+121/616663/campos_512_v4
+121/616756/campos_512_v4
+121/616828/campos_512_v4
+121/616856/campos_512_v4
+121/616871/campos_512_v4
+121/616872/campos_512_v4
+121/616902/campos_512_v4
+121/616970/campos_512_v4
+121/617018/campos_512_v4
+121/617029/campos_512_v4
+121/617030/campos_512_v4
+121/617041/campos_512_v4
+121/617045/campos_512_v4
+121/617050/campos_512_v4
+121/617077/campos_512_v4
+121/617079/campos_512_v4
+121/617116/campos_512_v4
+121/617231/campos_512_v4
+121/617323/campos_512_v4
+121/617328/campos_512_v4
+121/617360/campos_512_v4
+121/617428/campos_512_v4
+121/617457/campos_512_v4
+121/617467/campos_512_v4
+121/617482/campos_512_v4
+121/617488/campos_512_v4
+121/617489/campos_512_v4
+121/617538/campos_512_v4
+121/617587/campos_512_v4
+121/617590/campos_512_v4
+121/617609/campos_512_v4
+121/617624/campos_512_v4
+121/617629/campos_512_v4
+121/617658/campos_512_v4
+121/617668/campos_512_v4
+121/617672/campos_512_v4
+121/617683/campos_512_v4
+121/617706/campos_512_v4
+121/617851/campos_512_v4
+121/617912/campos_512_v4
+121/617933/campos_512_v4
+121/617937/campos_512_v4
+121/617971/campos_512_v4
+121/617983/campos_512_v4
+121/618026/campos_512_v4
+121/618045/campos_512_v4
+121/618054/campos_512_v4
+121/618081/campos_512_v4
+121/618104/campos_512_v4
+121/618110/campos_512_v4
+121/618152/campos_512_v4
+121/618156/campos_512_v4
+121/618164/campos_512_v4
+121/618176/campos_512_v4
+121/618185/campos_512_v4
+121/618221/campos_512_v4
+121/618239/campos_512_v4
+121/618275/campos_512_v4
+121/618282/campos_512_v4
+121/618289/campos_512_v4
+121/618331/campos_512_v4
+121/618351/campos_512_v4
+121/618367/campos_512_v4
+121/618400/campos_512_v4
+121/618496/campos_512_v4
+121/618513/campos_512_v4
+121/618526/campos_512_v4
+121/618562/campos_512_v4
+121/618564/campos_512_v4
+121/618594/campos_512_v4
+121/618633/campos_512_v4
+121/618664/campos_512_v4
+121/618754/campos_512_v4
+121/618785/campos_512_v4
+121/618788/campos_512_v4
+121/618790/campos_512_v4
+121/618799/campos_512_v4
+121/618852/campos_512_v4
+121/618933/campos_512_v4
+121/618977/campos_512_v4
+121/619037/campos_512_v4
+121/619067/campos_512_v4
+121/619094/campos_512_v4
+121/619188/campos_512_v4
+121/619196/campos_512_v4
+121/619274/campos_512_v4
+121/619290/campos_512_v4
+121/619324/campos_512_v4
+121/619330/campos_512_v4
+121/619367/campos_512_v4
+121/619372/campos_512_v4
+121/619387/campos_512_v4
+121/619429/campos_512_v4
+121/619457/campos_512_v4
+121/619466/campos_512_v4
+121/619559/campos_512_v4
+121/619567/campos_512_v4
+121/619568/campos_512_v4
+121/619600/campos_512_v4
+121/619621/campos_512_v4
+121/619627/campos_512_v4
+121/619712/campos_512_v4
+121/619728/campos_512_v4
+121/619774/campos_512_v4
+121/619790/campos_512_v4
+121/619931/campos_512_v4
+121/619987/campos_512_v4
+121/619998/campos_512_v4
+122/620051/campos_512_v4
+122/620101/campos_512_v4
+122/620159/campos_512_v4
+122/620184/campos_512_v4
+122/620219/campos_512_v4
+122/620221/campos_512_v4
+122/620269/campos_512_v4
+122/620293/campos_512_v4
+122/620341/campos_512_v4
+122/620364/campos_512_v4
+122/620366/campos_512_v4
+122/620436/campos_512_v4
+122/620500/campos_512_v4
+122/620541/campos_512_v4
+122/620585/campos_512_v4
+122/620606/campos_512_v4
+122/620671/campos_512_v4
+122/620678/campos_512_v4
+122/620698/campos_512_v4
+122/620717/campos_512_v4
+122/620797/campos_512_v4
+122/620807/campos_512_v4
+122/620813/campos_512_v4
+122/620840/campos_512_v4
+122/620843/campos_512_v4
+122/620849/campos_512_v4
+122/620925/campos_512_v4
+122/620979/campos_512_v4
+122/620997/campos_512_v4
+122/621014/campos_512_v4
+122/621050/campos_512_v4
+122/621054/campos_512_v4
+122/621070/campos_512_v4
+122/621092/campos_512_v4
+122/621097/campos_512_v4
+122/621132/campos_512_v4
+122/621146/campos_512_v4
+122/621221/campos_512_v4
+122/621222/campos_512_v4
+122/621237/campos_512_v4
+122/621253/campos_512_v4
+122/621259/campos_512_v4
+122/621268/campos_512_v4
+122/621311/campos_512_v4
+122/621329/campos_512_v4
+122/621337/campos_512_v4
+122/621347/campos_512_v4
+122/621363/campos_512_v4
+122/621425/campos_512_v4
+122/621437/campos_512_v4
+122/621463/campos_512_v4
+122/621482/campos_512_v4
+122/621576/campos_512_v4
+122/621578/campos_512_v4
+122/621594/campos_512_v4
+122/621691/campos_512_v4
+122/621704/campos_512_v4
+122/621726/campos_512_v4
+122/621766/campos_512_v4
+122/621770/campos_512_v4
+122/621834/campos_512_v4
+122/621841/campos_512_v4
+122/621868/campos_512_v4
+122/621990/campos_512_v4
+122/622004/campos_512_v4
+122/622177/campos_512_v4
+122/622181/campos_512_v4
+122/622225/campos_512_v4
+122/622261/campos_512_v4
+122/622295/campos_512_v4
+122/622332/campos_512_v4
+122/622335/campos_512_v4
+122/622378/campos_512_v4
+122/622418/campos_512_v4
+122/622419/campos_512_v4
+122/622424/campos_512_v4
+122/622425/campos_512_v4
+122/622436/campos_512_v4
+122/622453/campos_512_v4
+122/622465/campos_512_v4
+122/622466/campos_512_v4
+122/622471/campos_512_v4
+122/622490/campos_512_v4
+122/622504/campos_512_v4
+122/622510/campos_512_v4
+122/622518/campos_512_v4
+122/622677/campos_512_v4
+122/622724/campos_512_v4
+122/622748/campos_512_v4
+122/622757/campos_512_v4
+122/622791/campos_512_v4
+122/622844/campos_512_v4
+122/622848/campos_512_v4
+122/622869/campos_512_v4
+122/622895/campos_512_v4
+122/622896/campos_512_v4
+122/622926/campos_512_v4
+122/622936/campos_512_v4
+122/622959/campos_512_v4
+122/622990/campos_512_v4
+122/623014/campos_512_v4
+122/623017/campos_512_v4
+122/623062/campos_512_v4
+122/623119/campos_512_v4
+122/623135/campos_512_v4
+122/623142/campos_512_v4
+122/623148/campos_512_v4
+122/623226/campos_512_v4
+122/623242/campos_512_v4
+122/623247/campos_512_v4
+122/623269/campos_512_v4
+122/623277/campos_512_v4
+122/623321/campos_512_v4
+122/623346/campos_512_v4
+122/623382/campos_512_v4
+122/623389/campos_512_v4
+122/623407/campos_512_v4
+122/623410/campos_512_v4
+122/623465/campos_512_v4
+122/623466/campos_512_v4
+122/623473/campos_512_v4
+122/623506/campos_512_v4
+122/623582/campos_512_v4
+122/623625/campos_512_v4
+122/623721/campos_512_v4
+122/623724/campos_512_v4
+122/623740/campos_512_v4
+122/623832/campos_512_v4
+122/623879/campos_512_v4
+122/623882/campos_512_v4
+122/623916/campos_512_v4
+122/623999/campos_512_v4
+122/624000/campos_512_v4
+122/624001/campos_512_v4
+122/624020/campos_512_v4
+122/624086/campos_512_v4
+122/624107/campos_512_v4
+122/624162/campos_512_v4
+122/624171/campos_512_v4
+122/624175/campos_512_v4
+122/624203/campos_512_v4
+122/624208/campos_512_v4
+122/624222/campos_512_v4
+122/624265/campos_512_v4
+122/624304/campos_512_v4
+122/624335/campos_512_v4
+122/624426/campos_512_v4
+122/624434/campos_512_v4
+122/624449/campos_512_v4
+122/624479/campos_512_v4
+122/624523/campos_512_v4
+122/624533/campos_512_v4
+122/624536/campos_512_v4
+122/624571/campos_512_v4
+122/624574/campos_512_v4
+122/624585/campos_512_v4
+122/624613/campos_512_v4
+122/624646/campos_512_v4
+122/624686/campos_512_v4
+122/624727/campos_512_v4
+122/624760/campos_512_v4
+122/624775/campos_512_v4
+122/624841/campos_512_v4
+122/624857/campos_512_v4
+122/624879/campos_512_v4
+122/624967/campos_512_v4
+122/624982/campos_512_v4
+123/625045/campos_512_v4
+123/625049/campos_512_v4
+123/625076/campos_512_v4
+123/625101/campos_512_v4
+123/625114/campos_512_v4
+123/625127/campos_512_v4
+123/625163/campos_512_v4
+123/625188/campos_512_v4
+123/625191/campos_512_v4
+123/625262/campos_512_v4
+123/625335/campos_512_v4
+123/625348/campos_512_v4
+123/625374/campos_512_v4
+123/625377/campos_512_v4
+123/625460/campos_512_v4
+123/625543/campos_512_v4
+123/625597/campos_512_v4
+123/625602/campos_512_v4
+123/625603/campos_512_v4
+123/625622/campos_512_v4
+123/625660/campos_512_v4
+123/625685/campos_512_v4
+123/625688/campos_512_v4
+123/625703/campos_512_v4
+123/625721/campos_512_v4
+123/625726/campos_512_v4
+123/625741/campos_512_v4
+123/625796/campos_512_v4
+123/625799/campos_512_v4
+123/625835/campos_512_v4
+123/625882/campos_512_v4
+123/626235/campos_512_v4
+123/626283/campos_512_v4
+123/626286/campos_512_v4
+123/626478/campos_512_v4
+123/626480/campos_512_v4
+123/626492/campos_512_v4
+123/626510/campos_512_v4
+123/626517/campos_512_v4
+123/626531/campos_512_v4
+123/626568/campos_512_v4
+123/626607/campos_512_v4
+123/626656/campos_512_v4
+123/626686/campos_512_v4
+123/626705/campos_512_v4
+123/626757/campos_512_v4
+123/626817/campos_512_v4
+123/626836/campos_512_v4
+123/626904/campos_512_v4
+123/627012/campos_512_v4
+123/627017/campos_512_v4
+123/627019/campos_512_v4
+123/627031/campos_512_v4
+123/627037/campos_512_v4
+123/627055/campos_512_v4
+123/627062/campos_512_v4
+123/627115/campos_512_v4
+123/627133/campos_512_v4
+123/627160/campos_512_v4
+123/627168/campos_512_v4
+123/627202/campos_512_v4
+123/627212/campos_512_v4
+123/627229/campos_512_v4
+123/627244/campos_512_v4
+123/627247/campos_512_v4
+123/627263/campos_512_v4
+123/627267/campos_512_v4
+123/627283/campos_512_v4
+123/627287/campos_512_v4
+123/627293/campos_512_v4
+123/627320/campos_512_v4
+123/627363/campos_512_v4
+123/627370/campos_512_v4
+123/627375/campos_512_v4
+123/627438/campos_512_v4
+123/627499/campos_512_v4
+123/627646/campos_512_v4
+123/627766/campos_512_v4
+123/627790/campos_512_v4
+123/627805/campos_512_v4
+123/627836/campos_512_v4
+123/627848/campos_512_v4
+123/627850/campos_512_v4
+123/627859/campos_512_v4
+123/627864/campos_512_v4
+123/627874/campos_512_v4
+123/627885/campos_512_v4
+123/627896/campos_512_v4
+123/627900/campos_512_v4
+123/627902/campos_512_v4
+123/627908/campos_512_v4
+123/627982/campos_512_v4
+123/628062/campos_512_v4
+123/628101/campos_512_v4
+123/628128/campos_512_v4
+123/628129/campos_512_v4
+123/628164/campos_512_v4
+123/628176/campos_512_v4
+123/628178/campos_512_v4
+123/628197/campos_512_v4
+123/628200/campos_512_v4
+123/628202/campos_512_v4
+123/628224/campos_512_v4
+123/628276/campos_512_v4
+123/628295/campos_512_v4
+123/628338/campos_512_v4
+123/628401/campos_512_v4
+123/628406/campos_512_v4
+123/628424/campos_512_v4
+123/628496/campos_512_v4
+123/628548/campos_512_v4
+123/628561/campos_512_v4
+123/628624/campos_512_v4
+123/628695/campos_512_v4
+123/628704/campos_512_v4
+123/628710/campos_512_v4
+123/628732/campos_512_v4
+123/628853/campos_512_v4
+123/628854/campos_512_v4
+123/628873/campos_512_v4
+123/628909/campos_512_v4
+123/628938/campos_512_v4
+123/628951/campos_512_v4
+123/629023/campos_512_v4
+123/629026/campos_512_v4
+123/629178/campos_512_v4
+123/629215/campos_512_v4
+123/629230/campos_512_v4
+123/629233/campos_512_v4
+123/629239/campos_512_v4
+123/629251/campos_512_v4
+123/629270/campos_512_v4
+123/629271/campos_512_v4
+123/629283/campos_512_v4
+123/629287/campos_512_v4
+123/629388/campos_512_v4
+123/629389/campos_512_v4
+123/629446/campos_512_v4
+123/629455/campos_512_v4
+123/629482/campos_512_v4
+123/629483/campos_512_v4
+123/629517/campos_512_v4
+123/629531/campos_512_v4
+123/629658/campos_512_v4
+123/629701/campos_512_v4
+123/629708/campos_512_v4
+123/629713/campos_512_v4
+123/629741/campos_512_v4
+123/629756/campos_512_v4
+123/629878/campos_512_v4
+123/629900/campos_512_v4
+123/629904/campos_512_v4
+123/629950/campos_512_v4
+123/629978/campos_512_v4
+123/629984/campos_512_v4
+123/629991/campos_512_v4
+124/630008/campos_512_v4
+124/630022/campos_512_v4
+124/630035/campos_512_v4
+124/630065/campos_512_v4
+124/630076/campos_512_v4
+124/630094/campos_512_v4
+124/630104/campos_512_v4
+124/630111/campos_512_v4
+124/630117/campos_512_v4
+124/630188/campos_512_v4
+124/630239/campos_512_v4
+124/630283/campos_512_v4
+124/630290/campos_512_v4
+124/630293/campos_512_v4
+124/630294/campos_512_v4
+124/630305/campos_512_v4
+124/630323/campos_512_v4
+124/630335/campos_512_v4
+124/630341/campos_512_v4
+124/630346/campos_512_v4
+124/630384/campos_512_v4
+124/630385/campos_512_v4
+124/630387/campos_512_v4
+124/630410/campos_512_v4
+124/630420/campos_512_v4
+124/630438/campos_512_v4
+124/630439/campos_512_v4
+124/630513/campos_512_v4
+124/630530/campos_512_v4
+124/630550/campos_512_v4
+124/630565/campos_512_v4
+124/630592/campos_512_v4
+124/630612/campos_512_v4
+124/630619/campos_512_v4
+124/630637/campos_512_v4
+124/630653/campos_512_v4
+124/630663/campos_512_v4
+124/630679/campos_512_v4
+124/630817/campos_512_v4
+124/630831/campos_512_v4
+124/630852/campos_512_v4
+124/630861/campos_512_v4
+124/630897/campos_512_v4
+124/630911/campos_512_v4
+124/630916/campos_512_v4
+124/630932/campos_512_v4
+124/630946/campos_512_v4
+124/630951/campos_512_v4
+124/630968/campos_512_v4
+124/630981/campos_512_v4
+124/630986/campos_512_v4
+124/631006/campos_512_v4
+124/631011/campos_512_v4
+124/631051/campos_512_v4
+124/631122/campos_512_v4
+124/631129/campos_512_v4
+124/631143/campos_512_v4
+124/631169/campos_512_v4
+124/631286/campos_512_v4
+124/631301/campos_512_v4
+124/631349/campos_512_v4
+124/631379/campos_512_v4
+124/631392/campos_512_v4
+124/631396/campos_512_v4
+124/631405/campos_512_v4
+124/631424/campos_512_v4
+124/631442/campos_512_v4
+124/631478/campos_512_v4
+124/631479/campos_512_v4
+124/631495/campos_512_v4
+124/631521/campos_512_v4
+124/631560/campos_512_v4
+124/631576/campos_512_v4
+124/631579/campos_512_v4
+124/631610/campos_512_v4
+124/631834/campos_512_v4
+124/631907/campos_512_v4
+124/631998/campos_512_v4
+124/632026/campos_512_v4
+124/632067/campos_512_v4
+124/632118/campos_512_v4
+124/632130/campos_512_v4
+124/632134/campos_512_v4
+124/632183/campos_512_v4
+124/632211/campos_512_v4
+124/632217/campos_512_v4
+124/632301/campos_512_v4
+124/632303/campos_512_v4
+124/632334/campos_512_v4
+124/632367/campos_512_v4
+124/632374/campos_512_v4
+124/632395/campos_512_v4
+124/632415/campos_512_v4
+124/632419/campos_512_v4
+124/632583/campos_512_v4
+124/632675/campos_512_v4
+124/632787/campos_512_v4
+124/632804/campos_512_v4
+124/632815/campos_512_v4
+124/632889/campos_512_v4
+124/632939/campos_512_v4
+124/632949/campos_512_v4
+124/632958/campos_512_v4
+124/633035/campos_512_v4
+124/633064/campos_512_v4
+124/633080/campos_512_v4
+124/633102/campos_512_v4
+124/633130/campos_512_v4
+124/633139/campos_512_v4
+124/633294/campos_512_v4
+124/633344/campos_512_v4
+124/633369/campos_512_v4
+124/633403/campos_512_v4
+124/633428/campos_512_v4
+124/633572/campos_512_v4
+124/633623/campos_512_v4
+124/633672/campos_512_v4
+124/633713/campos_512_v4
+124/633734/campos_512_v4
+124/633736/campos_512_v4
+124/633737/campos_512_v4
+124/633760/campos_512_v4
+124/633772/campos_512_v4
+124/633777/campos_512_v4
+124/633822/campos_512_v4
+124/633831/campos_512_v4
+124/633838/campos_512_v4
+124/633893/campos_512_v4
+124/633907/campos_512_v4
+124/633932/campos_512_v4
+124/633951/campos_512_v4
+124/633984/campos_512_v4
+124/634125/campos_512_v4
+124/634146/campos_512_v4
+124/634165/campos_512_v4
+124/634236/campos_512_v4
+124/634237/campos_512_v4
+124/634244/campos_512_v4
+124/634257/campos_512_v4
+124/634277/campos_512_v4
+124/634324/campos_512_v4
+124/634325/campos_512_v4
+124/634335/campos_512_v4
+124/634346/campos_512_v4
+124/634374/campos_512_v4
+124/634381/campos_512_v4
+124/634387/campos_512_v4
+124/634429/campos_512_v4
+124/634474/campos_512_v4
+124/634484/campos_512_v4
+124/634513/campos_512_v4
+124/634515/campos_512_v4
+124/634524/campos_512_v4
+124/634545/campos_512_v4
+124/634556/campos_512_v4
+124/634612/campos_512_v4
+124/634617/campos_512_v4
+124/634619/campos_512_v4
+124/634648/campos_512_v4
+124/634668/campos_512_v4
+124/634717/campos_512_v4
+124/634735/campos_512_v4
+124/634800/campos_512_v4
+124/634804/campos_512_v4
+124/634820/campos_512_v4
+124/634869/campos_512_v4
+124/634875/campos_512_v4
+124/634955/campos_512_v4
+124/634957/campos_512_v4
+125/635028/campos_512_v4
+125/635043/campos_512_v4
+125/635068/campos_512_v4
+125/635116/campos_512_v4
+125/635120/campos_512_v4
+125/635173/campos_512_v4
+125/635186/campos_512_v4
+125/635237/campos_512_v4
+125/635275/campos_512_v4
+125/635280/campos_512_v4
+125/635285/campos_512_v4
+125/635288/campos_512_v4
+125/635307/campos_512_v4
+125/635317/campos_512_v4
+125/635325/campos_512_v4
+125/635326/campos_512_v4
+125/635362/campos_512_v4
+125/635364/campos_512_v4
+125/635366/campos_512_v4
+125/635378/campos_512_v4
+125/635407/campos_512_v4
+125/635498/campos_512_v4
+125/635563/campos_512_v4
+125/635596/campos_512_v4
+125/635706/campos_512_v4
+125/635708/campos_512_v4
+125/635751/campos_512_v4
+125/635790/campos_512_v4
+125/635824/campos_512_v4
+125/635832/campos_512_v4
+125/635864/campos_512_v4
+125/635889/campos_512_v4
+125/635915/campos_512_v4
+125/635917/campos_512_v4
+125/635957/campos_512_v4
+125/635991/campos_512_v4
+125/636024/campos_512_v4
+125/636030/campos_512_v4
+125/636039/campos_512_v4
+125/636040/campos_512_v4
+125/636044/campos_512_v4
+125/636190/campos_512_v4
+125/636237/campos_512_v4
+125/636266/campos_512_v4
+125/636269/campos_512_v4
+125/636325/campos_512_v4
+125/636451/campos_512_v4
+125/636464/campos_512_v4
+125/636479/campos_512_v4
+125/636488/campos_512_v4
+125/636492/campos_512_v4
+125/636493/campos_512_v4
+125/636500/campos_512_v4
+125/636543/campos_512_v4
+125/636562/campos_512_v4
+125/636572/campos_512_v4
+125/636582/campos_512_v4
+125/636583/campos_512_v4
+125/636589/campos_512_v4
+125/636602/campos_512_v4
+125/636604/campos_512_v4
+125/636612/campos_512_v4
+125/636643/campos_512_v4
+125/636689/campos_512_v4
+125/636705/campos_512_v4
+125/636708/campos_512_v4
+125/636712/campos_512_v4
+125/636733/campos_512_v4
+125/636762/campos_512_v4
+125/636778/campos_512_v4
+125/636786/campos_512_v4
+125/636792/campos_512_v4
+125/636802/campos_512_v4
+125/636834/campos_512_v4
+125/636860/campos_512_v4
+125/636871/campos_512_v4
+125/636875/campos_512_v4
+125/636877/campos_512_v4
+125/636965/campos_512_v4
+125/637069/campos_512_v4
+125/637091/campos_512_v4
+125/637098/campos_512_v4
+125/637205/campos_512_v4
+125/637211/campos_512_v4
+125/637235/campos_512_v4
+125/637370/campos_512_v4
+125/637371/campos_512_v4
+125/637388/campos_512_v4
+125/637421/campos_512_v4
+125/637480/campos_512_v4
+125/637546/campos_512_v4
+125/637557/campos_512_v4
+125/637588/campos_512_v4
+125/637593/campos_512_v4
+125/637631/campos_512_v4
+125/637645/campos_512_v4
+125/637670/campos_512_v4
+125/637688/campos_512_v4
+125/637704/campos_512_v4
+125/637715/campos_512_v4
+125/637739/campos_512_v4
+125/637747/campos_512_v4
+125/637763/campos_512_v4
+125/637796/campos_512_v4
+125/637798/campos_512_v4
+125/637876/campos_512_v4
+125/637930/campos_512_v4
+125/637946/campos_512_v4
+125/638001/campos_512_v4
+125/638045/campos_512_v4
+125/638063/campos_512_v4
+125/638079/campos_512_v4
+125/638092/campos_512_v4
+125/638102/campos_512_v4
+125/638126/campos_512_v4
+125/638141/campos_512_v4
+125/638188/campos_512_v4
+125/638192/campos_512_v4
+125/638204/campos_512_v4
+125/638236/campos_512_v4
+125/638277/campos_512_v4
+125/638346/campos_512_v4
+125/638362/campos_512_v4
+125/638368/campos_512_v4
+125/638382/campos_512_v4
+125/638425/campos_512_v4
+125/638428/campos_512_v4
+125/638442/campos_512_v4
+125/638444/campos_512_v4
+125/638522/campos_512_v4
+125/638561/campos_512_v4
+125/638650/campos_512_v4
+125/638678/campos_512_v4
+125/638726/campos_512_v4
+125/638731/campos_512_v4
+125/638800/campos_512_v4
+125/638806/campos_512_v4
+125/638876/campos_512_v4
+125/638879/campos_512_v4
+125/638969/campos_512_v4
+125/638987/campos_512_v4
+125/638997/campos_512_v4
+125/639087/campos_512_v4
+125/639130/campos_512_v4
+125/639158/campos_512_v4
+125/639166/campos_512_v4
+125/639189/campos_512_v4
+125/639200/campos_512_v4
+125/639210/campos_512_v4
+125/639237/campos_512_v4
+125/639243/campos_512_v4
+125/639303/campos_512_v4
+125/639360/campos_512_v4
+125/639414/campos_512_v4
+125/639437/campos_512_v4
+125/639507/campos_512_v4
+125/639669/campos_512_v4
+125/639706/campos_512_v4
+125/639736/campos_512_v4
+125/639792/campos_512_v4
+125/639903/campos_512_v4
+125/639932/campos_512_v4
+125/639980/campos_512_v4
+127/645048/campos_512_v4
+127/645110/campos_512_v4
+127/645126/campos_512_v4
+127/645173/campos_512_v4
+127/645182/campos_512_v4
+127/645197/campos_512_v4
+127/645255/campos_512_v4
+127/645266/campos_512_v4
+127/645281/campos_512_v4
+127/645282/campos_512_v4
+127/645310/campos_512_v4
+127/645366/campos_512_v4
+127/645392/campos_512_v4
+127/645466/campos_512_v4
+127/645478/campos_512_v4
+127/645506/campos_512_v4
+127/645536/campos_512_v4
+127/645544/campos_512_v4
+127/645556/campos_512_v4
+127/645608/campos_512_v4
+127/645637/campos_512_v4
+127/645652/campos_512_v4
+127/645695/campos_512_v4
+127/645761/campos_512_v4
+127/645843/campos_512_v4
+127/645860/campos_512_v4
+127/645889/campos_512_v4
+127/645951/campos_512_v4
+127/646036/campos_512_v4
+127/646054/campos_512_v4
+127/646058/campos_512_v4
+127/646070/campos_512_v4
+127/646074/campos_512_v4
+127/646083/campos_512_v4
+127/646374/campos_512_v4
+127/646376/campos_512_v4
+127/646439/campos_512_v4
+127/646467/campos_512_v4
+127/646474/campos_512_v4
+127/646556/campos_512_v4
+127/646612/campos_512_v4
+127/646617/campos_512_v4
+127/646667/campos_512_v4
+127/646705/campos_512_v4
+127/646816/campos_512_v4
+127/646833/campos_512_v4
+127/646851/campos_512_v4
+127/646898/campos_512_v4
+127/646950/campos_512_v4
+127/646996/campos_512_v4
+127/646998/campos_512_v4
+127/647022/campos_512_v4
+127/647052/campos_512_v4
+127/647066/campos_512_v4
+127/647104/campos_512_v4
+127/647109/campos_512_v4
+127/647110/campos_512_v4
+127/647114/campos_512_v4
+127/647116/campos_512_v4
+127/647205/campos_512_v4
+127/647230/campos_512_v4
+127/647233/campos_512_v4
+127/647306/campos_512_v4
+127/647330/campos_512_v4
+127/647389/campos_512_v4
+127/647397/campos_512_v4
+127/647413/campos_512_v4
+127/647459/campos_512_v4
+127/647536/campos_512_v4
+127/647558/campos_512_v4
+127/647621/campos_512_v4
+127/647663/campos_512_v4
+127/647712/campos_512_v4
+127/647766/campos_512_v4
+127/647771/campos_512_v4
+127/647825/campos_512_v4
+127/647853/campos_512_v4
+127/647871/campos_512_v4
+127/647920/campos_512_v4
+127/647929/campos_512_v4
+127/648034/campos_512_v4
+127/648071/campos_512_v4
+127/648125/campos_512_v4
+127/648144/campos_512_v4
+127/648151/campos_512_v4
+127/648153/campos_512_v4
+127/648184/campos_512_v4
+127/648196/campos_512_v4
+127/648332/campos_512_v4
+127/648353/campos_512_v4
+127/648358/campos_512_v4
+127/648426/campos_512_v4
+127/648445/campos_512_v4
+127/648450/campos_512_v4
+127/648458/campos_512_v4
+127/648486/campos_512_v4
+127/648518/campos_512_v4
+127/648537/campos_512_v4
+127/648552/campos_512_v4
+127/648566/campos_512_v4
+127/648604/campos_512_v4
+127/648698/campos_512_v4
+127/648729/campos_512_v4
+127/648749/campos_512_v4
+127/648770/campos_512_v4
+127/648792/campos_512_v4
+127/648814/campos_512_v4
+127/648852/campos_512_v4
+127/648860/campos_512_v4
+127/648901/campos_512_v4
+127/648903/campos_512_v4
+127/648911/campos_512_v4
+127/648928/campos_512_v4
+127/648940/campos_512_v4
+127/648949/campos_512_v4
+127/648952/campos_512_v4
+127/648957/campos_512_v4
+127/649012/campos_512_v4
+127/649024/campos_512_v4
+127/649033/campos_512_v4
+127/649081/campos_512_v4
+127/649109/campos_512_v4
+127/649112/campos_512_v4
+127/649119/campos_512_v4
+127/649135/campos_512_v4
+127/649166/campos_512_v4
+127/649220/campos_512_v4
+127/649261/campos_512_v4
+127/649264/campos_512_v4
+127/649269/campos_512_v4
+127/649280/campos_512_v4
+127/649286/campos_512_v4
+127/649355/campos_512_v4
+127/649360/campos_512_v4
+127/649436/campos_512_v4
+127/649437/campos_512_v4
+127/649443/campos_512_v4
+127/649457/campos_512_v4
+127/649473/campos_512_v4
+127/649492/campos_512_v4
+127/649529/campos_512_v4
+127/649539/campos_512_v4
+127/649569/campos_512_v4
+127/649610/campos_512_v4
+127/649660/campos_512_v4
+127/649682/campos_512_v4
+127/649705/campos_512_v4
+127/649709/campos_512_v4
+127/649711/campos_512_v4
+127/649722/campos_512_v4
+127/649725/campos_512_v4
+127/649798/campos_512_v4
+127/649803/campos_512_v4
+127/649820/campos_512_v4
+127/649822/campos_512_v4
+127/649823/campos_512_v4
+127/649839/campos_512_v4
+127/649866/campos_512_v4
+127/649933/campos_512_v4
+127/649956/campos_512_v4
+128/650036/campos_512_v4
+128/650050/campos_512_v4
+128/650082/campos_512_v4
+128/650086/campos_512_v4
+128/650173/campos_512_v4
+128/650174/campos_512_v4
+128/650262/campos_512_v4
+128/650274/campos_512_v4
+128/650277/campos_512_v4
+128/650280/campos_512_v4
+128/650297/campos_512_v4
+128/650329/campos_512_v4
+128/650330/campos_512_v4
+128/650352/campos_512_v4
+128/650468/campos_512_v4
+128/650471/campos_512_v4
+128/650525/campos_512_v4
+128/650564/campos_512_v4
+128/650571/campos_512_v4
+128/650581/campos_512_v4
+128/650642/campos_512_v4
+128/650644/campos_512_v4
+128/650661/campos_512_v4
+128/650678/campos_512_v4
+128/650710/campos_512_v4
+128/650711/campos_512_v4
+128/650735/campos_512_v4
+128/650744/campos_512_v4
+128/650764/campos_512_v4
+128/650767/campos_512_v4
+128/650782/campos_512_v4
+128/650837/campos_512_v4
+128/650847/campos_512_v4
+128/650886/campos_512_v4
+128/650887/campos_512_v4
+128/650890/campos_512_v4
+128/651011/campos_512_v4
+128/651022/campos_512_v4
+128/651023/campos_512_v4
+128/651029/campos_512_v4
+128/651060/campos_512_v4
+128/651100/campos_512_v4
+128/651103/campos_512_v4
+128/651175/campos_512_v4
+128/651219/campos_512_v4
+128/651249/campos_512_v4
+128/651260/campos_512_v4
+128/651413/campos_512_v4
+128/651529/campos_512_v4
+128/651537/campos_512_v4
+128/651545/campos_512_v4
+128/651641/campos_512_v4
+128/651746/campos_512_v4
+128/651765/campos_512_v4
+128/651791/campos_512_v4
+128/651808/campos_512_v4
+128/651832/campos_512_v4
+128/651873/campos_512_v4
+128/651938/campos_512_v4
+128/651990/campos_512_v4
+128/652048/campos_512_v4
+128/652050/campos_512_v4
+128/652083/campos_512_v4
+128/652096/campos_512_v4
+128/652125/campos_512_v4
+128/652163/campos_512_v4
+128/652174/campos_512_v4
+128/652201/campos_512_v4
+128/652217/campos_512_v4
+128/652221/campos_512_v4
+128/652245/campos_512_v4
+128/652255/campos_512_v4
+128/652282/campos_512_v4
+128/652294/campos_512_v4
+128/652308/campos_512_v4
+128/652351/campos_512_v4
+128/652352/campos_512_v4
+128/652392/campos_512_v4
+128/652404/campos_512_v4
+128/652407/campos_512_v4
+128/652409/campos_512_v4
+128/652426/campos_512_v4
+128/652431/campos_512_v4
+128/652442/campos_512_v4
+128/652465/campos_512_v4
+128/652503/campos_512_v4
+128/652550/campos_512_v4
+128/652588/campos_512_v4
+128/652614/campos_512_v4
+128/652623/campos_512_v4
+128/652660/campos_512_v4
+128/652685/campos_512_v4
+128/652756/campos_512_v4
+128/652814/campos_512_v4
+128/652823/campos_512_v4
+128/652859/campos_512_v4
+128/652873/campos_512_v4
+128/652883/campos_512_v4
+128/652907/campos_512_v4
+128/652910/campos_512_v4
+128/652934/campos_512_v4
+128/652938/campos_512_v4
+128/652951/campos_512_v4
+128/652976/campos_512_v4
+128/652986/campos_512_v4
+128/653016/campos_512_v4
+128/653040/campos_512_v4
+128/653041/campos_512_v4
+128/653082/campos_512_v4
+128/653129/campos_512_v4
+128/653150/campos_512_v4
+128/653159/campos_512_v4
+128/653169/campos_512_v4
+128/653173/campos_512_v4
+128/653239/campos_512_v4
+128/653244/campos_512_v4
+128/653270/campos_512_v4
+128/653408/campos_512_v4
+128/653418/campos_512_v4
+128/653487/campos_512_v4
+128/653497/campos_512_v4
+128/653547/campos_512_v4
+128/653554/campos_512_v4
+128/653599/campos_512_v4
+128/653613/campos_512_v4
+128/653661/campos_512_v4
+128/653702/campos_512_v4
+128/653733/campos_512_v4
+128/653765/campos_512_v4
+128/653803/campos_512_v4
+128/653890/campos_512_v4
+128/653904/campos_512_v4
+128/653908/campos_512_v4
+128/653933/campos_512_v4
+128/654024/campos_512_v4
+128/654026/campos_512_v4
+128/654031/campos_512_v4
+128/654056/campos_512_v4
+128/654068/campos_512_v4
+128/654152/campos_512_v4
+128/654161/campos_512_v4
+128/654184/campos_512_v4
+128/654185/campos_512_v4
+128/654198/campos_512_v4
+128/654244/campos_512_v4
+128/654247/campos_512_v4
+128/654363/campos_512_v4
+128/654468/campos_512_v4
+128/654473/campos_512_v4
+128/654514/campos_512_v4
+128/654548/campos_512_v4
+128/654576/campos_512_v4
+128/654586/campos_512_v4
+128/654599/campos_512_v4
+128/654672/campos_512_v4
+128/654707/campos_512_v4
+128/654800/campos_512_v4
+128/654842/campos_512_v4
+128/654959/campos_512_v4
+128/654996/campos_512_v4
+129/655040/campos_512_v4
+129/655062/campos_512_v4
+129/655078/campos_512_v4
+129/655091/campos_512_v4
+129/655099/campos_512_v4
+129/655115/campos_512_v4
+129/655117/campos_512_v4
+129/655134/campos_512_v4
+129/655158/campos_512_v4
+129/655174/campos_512_v4
+129/655183/campos_512_v4
+129/655188/campos_512_v4
+129/655220/campos_512_v4
+129/655221/campos_512_v4
+129/655242/campos_512_v4
+129/655254/campos_512_v4
+129/655262/campos_512_v4
+129/655323/campos_512_v4
+129/655352/campos_512_v4
+129/655394/campos_512_v4
+129/655397/campos_512_v4
+129/655421/campos_512_v4
+129/655436/campos_512_v4
+129/655483/campos_512_v4
+129/655542/campos_512_v4
+129/655553/campos_512_v4
+129/655573/campos_512_v4
+129/655601/campos_512_v4
+129/655653/campos_512_v4
+129/655657/campos_512_v4
+129/655853/campos_512_v4
+129/655906/campos_512_v4
+129/655908/campos_512_v4
+129/655927/campos_512_v4
+129/655947/campos_512_v4
+129/655964/campos_512_v4
+129/655965/campos_512_v4
+129/656062/campos_512_v4
+129/656153/campos_512_v4
+129/656161/campos_512_v4
+129/656175/campos_512_v4
+129/656269/campos_512_v4
+129/656278/campos_512_v4
+129/656343/campos_512_v4
+129/656344/campos_512_v4
+129/656367/campos_512_v4
+129/656377/campos_512_v4
+129/656390/campos_512_v4
+129/656395/campos_512_v4
+129/656405/campos_512_v4
+129/656409/campos_512_v4
+129/656472/campos_512_v4
+129/656499/campos_512_v4
+129/656535/campos_512_v4
+129/656539/campos_512_v4
+129/656591/campos_512_v4
+129/656625/campos_512_v4
+129/656656/campos_512_v4
+129/656658/campos_512_v4
+129/656715/campos_512_v4
+129/656731/campos_512_v4
+129/656735/campos_512_v4
+129/656752/campos_512_v4
+129/656778/campos_512_v4
+129/656848/campos_512_v4
+129/656863/campos_512_v4
+129/656882/campos_512_v4
+129/656883/campos_512_v4
+129/656935/campos_512_v4
+129/656975/campos_512_v4
+129/657045/campos_512_v4
+129/657138/campos_512_v4
+129/657187/campos_512_v4
+129/657207/campos_512_v4
+129/657222/campos_512_v4
+129/657242/campos_512_v4
+129/657246/campos_512_v4
+129/657256/campos_512_v4
+129/657269/campos_512_v4
+129/657272/campos_512_v4
+129/657297/campos_512_v4
+129/657312/campos_512_v4
+129/657356/campos_512_v4
+129/657378/campos_512_v4
+129/657391/campos_512_v4
+129/657399/campos_512_v4
+129/657435/campos_512_v4
+129/657446/campos_512_v4
+129/657464/campos_512_v4
+129/657489/campos_512_v4
+129/657490/campos_512_v4
+129/657502/campos_512_v4
+129/657554/campos_512_v4
+129/657582/campos_512_v4
+129/657626/campos_512_v4
+129/657673/campos_512_v4
+129/657701/campos_512_v4
+129/657750/campos_512_v4
+129/657814/campos_512_v4
+129/657829/campos_512_v4
+129/657840/campos_512_v4
+129/657878/campos_512_v4
+129/657906/campos_512_v4
+129/657936/campos_512_v4
+129/657965/campos_512_v4
+129/657982/campos_512_v4
+129/657989/campos_512_v4
+129/657995/campos_512_v4
+129/658026/campos_512_v4
+129/658079/campos_512_v4
+129/658080/campos_512_v4
+129/658125/campos_512_v4
+129/658132/campos_512_v4
+129/658191/campos_512_v4
+129/658195/campos_512_v4
+129/658237/campos_512_v4
+129/658245/campos_512_v4
+129/658259/campos_512_v4
+129/658339/campos_512_v4
+129/658342/campos_512_v4
+129/658365/campos_512_v4
+129/658377/campos_512_v4
+129/658429/campos_512_v4
+129/658446/campos_512_v4
+129/658455/campos_512_v4
+129/658468/campos_512_v4
+129/658506/campos_512_v4
+129/658564/campos_512_v4
+129/658644/campos_512_v4
+129/658667/campos_512_v4
+129/658685/campos_512_v4
+129/658717/campos_512_v4
+129/658735/campos_512_v4
+129/658738/campos_512_v4
+129/658765/campos_512_v4
+129/658772/campos_512_v4
+129/658788/campos_512_v4
+129/658843/campos_512_v4
+129/658848/campos_512_v4
+129/658928/campos_512_v4
+129/658955/campos_512_v4
+129/658957/campos_512_v4
+129/659006/campos_512_v4
+129/659096/campos_512_v4
+129/659114/campos_512_v4
+129/659144/campos_512_v4
+129/659197/campos_512_v4
+129/659266/campos_512_v4
+129/659316/campos_512_v4
+129/659332/campos_512_v4
+129/659433/campos_512_v4
+129/659437/campos_512_v4
+129/659503/campos_512_v4
+129/659564/campos_512_v4
+129/659593/campos_512_v4
+129/659606/campos_512_v4
+129/659611/campos_512_v4
+129/659618/campos_512_v4
+129/659628/campos_512_v4
+129/659849/campos_512_v4
+129/659860/campos_512_v4
+129/659882/campos_512_v4
+129/659920/campos_512_v4
+129/659940/campos_512_v4
+129/659962/campos_512_v4
+129/659974/campos_512_v4
+129/659982/campos_512_v4
+13/75050/campos_512_v4
+13/75153/campos_512_v4
+13/75194/campos_512_v4
+13/75208/campos_512_v4
+13/75212/campos_512_v4
+13/75277/campos_512_v4
+13/75284/campos_512_v4
+13/75363/campos_512_v4
+13/75365/campos_512_v4
+13/75425/campos_512_v4
+13/75436/campos_512_v4
+13/75458/campos_512_v4
+13/75519/campos_512_v4
+13/75533/campos_512_v4
+13/75580/campos_512_v4
+13/75611/campos_512_v4
+13/75629/campos_512_v4
+13/75661/campos_512_v4
+13/75662/campos_512_v4
+13/75679/campos_512_v4
+13/75705/campos_512_v4
+13/75723/campos_512_v4
+13/75744/campos_512_v4
+13/75745/campos_512_v4
+13/75792/campos_512_v4
+13/75815/campos_512_v4
+13/75816/campos_512_v4
+13/75869/campos_512_v4
+13/75871/campos_512_v4
+13/75885/campos_512_v4
+13/75939/campos_512_v4
+13/75968/campos_512_v4
+13/75972/campos_512_v4
+13/75973/campos_512_v4
+13/75997/campos_512_v4
+13/76052/campos_512_v4
+13/76080/campos_512_v4
+13/76231/campos_512_v4
+13/76256/campos_512_v4
+13/76274/campos_512_v4
+13/76324/campos_512_v4
+13/76326/campos_512_v4
+13/76380/campos_512_v4
+13/76411/campos_512_v4
+13/76423/campos_512_v4
+13/76453/campos_512_v4
+13/76465/campos_512_v4
+13/76479/campos_512_v4
+13/76528/campos_512_v4
+13/76540/campos_512_v4
+13/76586/campos_512_v4
+13/76587/campos_512_v4
+13/76622/campos_512_v4
+13/76657/campos_512_v4
+13/76672/campos_512_v4
+13/76676/campos_512_v4
+13/76758/campos_512_v4
+13/76828/campos_512_v4
+13/76849/campos_512_v4
+13/76941/campos_512_v4
+13/76970/campos_512_v4
+13/77046/campos_512_v4
+13/77110/campos_512_v4
+13/77115/campos_512_v4
+13/77123/campos_512_v4
+13/77193/campos_512_v4
+13/77234/campos_512_v4
+13/77369/campos_512_v4
+13/77394/campos_512_v4
+13/77410/campos_512_v4
+13/77412/campos_512_v4
+13/77415/campos_512_v4
+13/77446/campos_512_v4
+13/77447/campos_512_v4
+13/77479/campos_512_v4
+13/77536/campos_512_v4
+13/77550/campos_512_v4
+13/77576/campos_512_v4
+13/77597/campos_512_v4
+13/77621/campos_512_v4
+13/77649/campos_512_v4
+13/77651/campos_512_v4
+13/77724/campos_512_v4
+13/77736/campos_512_v4
+13/77761/campos_512_v4
+13/77763/campos_512_v4
+13/77767/campos_512_v4
+13/77829/campos_512_v4
+13/77877/campos_512_v4
+13/77922/campos_512_v4
+13/77927/campos_512_v4
+13/77993/campos_512_v4
+13/78048/campos_512_v4
+13/78054/campos_512_v4
+13/78062/campos_512_v4
+13/78069/campos_512_v4
+13/78127/campos_512_v4
+13/78149/campos_512_v4
+13/78156/campos_512_v4
+13/78223/campos_512_v4
+13/78230/campos_512_v4
+13/78248/campos_512_v4
+13/78253/campos_512_v4
+13/78260/campos_512_v4
+13/78295/campos_512_v4
+13/78317/campos_512_v4
+13/78323/campos_512_v4
+13/78335/campos_512_v4
+13/78374/campos_512_v4
+13/78540/campos_512_v4
+13/78543/campos_512_v4
+13/78624/campos_512_v4
+13/78632/campos_512_v4
+13/78692/campos_512_v4
+13/78745/campos_512_v4
+13/78761/campos_512_v4
+13/78824/campos_512_v4
+13/78891/campos_512_v4
+13/78942/campos_512_v4
+13/78967/campos_512_v4
+13/79032/campos_512_v4
+13/79033/campos_512_v4
+13/79058/campos_512_v4
+13/79062/campos_512_v4
+13/79066/campos_512_v4
+13/79091/campos_512_v4
+13/79107/campos_512_v4
+13/79150/campos_512_v4
+13/79171/campos_512_v4
+13/79218/campos_512_v4
+13/79227/campos_512_v4
+13/79237/campos_512_v4
+13/79268/campos_512_v4
+13/79269/campos_512_v4
+13/79401/campos_512_v4
+13/79413/campos_512_v4
+13/79429/campos_512_v4
+13/79441/campos_512_v4
+13/79455/campos_512_v4
+13/79498/campos_512_v4
+13/79577/campos_512_v4
+13/79578/campos_512_v4
+13/79607/campos_512_v4
+13/79612/campos_512_v4
+13/79741/campos_512_v4
+13/79791/campos_512_v4
+13/79802/campos_512_v4
+13/79831/campos_512_v4
+13/79970/campos_512_v4
+13/79972/campos_512_v4
+13/79995/campos_512_v4
+130/660035/campos_512_v4
+130/660095/campos_512_v4
+130/660102/campos_512_v4
+130/660130/campos_512_v4
+130/660245/campos_512_v4
+130/660274/campos_512_v4
+130/660289/campos_512_v4
+130/660308/campos_512_v4
+130/660316/campos_512_v4
+130/660357/campos_512_v4
+130/660366/campos_512_v4
+130/660367/campos_512_v4
+130/660370/campos_512_v4
+130/660399/campos_512_v4
+130/660447/campos_512_v4
+130/660450/campos_512_v4
+130/660464/campos_512_v4
+130/660513/campos_512_v4
+130/660524/campos_512_v4
+130/660567/campos_512_v4
+130/660572/campos_512_v4
+130/660578/campos_512_v4
+130/660632/campos_512_v4
+130/660664/campos_512_v4
+130/660666/campos_512_v4
+130/660721/campos_512_v4
+130/660777/campos_512_v4
+130/660809/campos_512_v4
+130/660855/campos_512_v4
+130/660901/campos_512_v4
+130/660910/campos_512_v4
+130/660944/campos_512_v4
+130/660945/campos_512_v4
+130/660951/campos_512_v4
+130/660958/campos_512_v4
+130/660960/campos_512_v4
+130/661162/campos_512_v4
+130/661214/campos_512_v4
+130/661303/campos_512_v4
+130/661308/campos_512_v4
+130/661309/campos_512_v4
+130/661316/campos_512_v4
+130/661397/campos_512_v4
+130/661436/campos_512_v4
+130/661452/campos_512_v4
+130/661545/campos_512_v4
+130/661590/campos_512_v4
+130/661602/campos_512_v4
+130/661646/campos_512_v4
+130/661687/campos_512_v4
+130/661752/campos_512_v4
+130/661783/campos_512_v4
+130/661827/campos_512_v4
+130/661878/campos_512_v4
+130/661951/campos_512_v4
+130/662001/campos_512_v4
+130/662050/campos_512_v4
+130/662070/campos_512_v4
+130/662076/campos_512_v4
+130/662136/campos_512_v4
+130/662338/campos_512_v4
+130/662357/campos_512_v4
+130/662363/campos_512_v4
+130/662366/campos_512_v4
+130/662412/campos_512_v4
+130/662416/campos_512_v4
+130/662493/campos_512_v4
+130/662511/campos_512_v4
+130/662550/campos_512_v4
+130/662572/campos_512_v4
+130/662593/campos_512_v4
+130/662600/campos_512_v4
+130/662605/campos_512_v4
+130/662659/campos_512_v4
+130/662801/campos_512_v4
+130/662860/campos_512_v4
+130/662868/campos_512_v4
+130/662890/campos_512_v4
+130/662904/campos_512_v4
+130/662910/campos_512_v4
+130/662939/campos_512_v4
+130/662954/campos_512_v4
+130/662957/campos_512_v4
+130/662986/campos_512_v4
+130/663016/campos_512_v4
+130/663092/campos_512_v4
+130/663098/campos_512_v4
+130/663108/campos_512_v4
+130/663203/campos_512_v4
+130/663247/campos_512_v4
+130/663288/campos_512_v4
+130/663303/campos_512_v4
+130/663353/campos_512_v4
+130/663397/campos_512_v4
+130/663473/campos_512_v4
+130/663498/campos_512_v4
+130/663511/campos_512_v4
+130/663534/campos_512_v4
+130/663562/campos_512_v4
+130/663771/campos_512_v4
+130/663778/campos_512_v4
+130/663789/campos_512_v4
+130/663839/campos_512_v4
+130/663998/campos_512_v4
+130/664051/campos_512_v4
+130/664070/campos_512_v4
+130/664097/campos_512_v4
+130/664154/campos_512_v4
+130/664162/campos_512_v4
+130/664176/campos_512_v4
+130/664255/campos_512_v4
+130/664262/campos_512_v4
+130/664269/campos_512_v4
+130/664308/campos_512_v4
+130/664331/campos_512_v4
+130/664366/campos_512_v4
+130/664378/campos_512_v4
+130/664455/campos_512_v4
+130/664470/campos_512_v4
+130/664494/campos_512_v4
+130/664537/campos_512_v4
+130/664572/campos_512_v4
+130/664606/campos_512_v4
+130/664639/campos_512_v4
+130/664667/campos_512_v4
+130/664669/campos_512_v4
+130/664722/campos_512_v4
+130/664724/campos_512_v4
+130/664752/campos_512_v4
+130/664770/campos_512_v4
+130/664782/campos_512_v4
+130/664792/campos_512_v4
+130/664838/campos_512_v4
+130/664845/campos_512_v4
+130/664874/campos_512_v4
+130/664889/campos_512_v4
+130/664896/campos_512_v4
+130/664904/campos_512_v4
+130/664937/campos_512_v4
+130/664987/campos_512_v4
+130/664991/campos_512_v4
+131/665091/campos_512_v4
+131/665098/campos_512_v4
+131/665114/campos_512_v4
+131/665187/campos_512_v4
+131/665239/campos_512_v4
+131/665273/campos_512_v4
+131/665274/campos_512_v4
+131/665301/campos_512_v4
+131/665324/campos_512_v4
+131/665341/campos_512_v4
+131/665392/campos_512_v4
+131/665394/campos_512_v4
+131/665397/campos_512_v4
+131/665403/campos_512_v4
+131/665412/campos_512_v4
+131/665420/campos_512_v4
+131/665443/campos_512_v4
+131/665483/campos_512_v4
+131/665538/campos_512_v4
+131/665555/campos_512_v4
+131/665590/campos_512_v4
+131/665597/campos_512_v4
+131/665598/campos_512_v4
+131/665626/campos_512_v4
+131/665651/campos_512_v4
+131/665666/campos_512_v4
+131/665696/campos_512_v4
+131/665719/campos_512_v4
+131/665735/campos_512_v4
+131/665756/campos_512_v4
+131/665777/campos_512_v4
+131/665803/campos_512_v4
+131/665823/campos_512_v4
+131/665831/campos_512_v4
+131/665838/campos_512_v4
+131/665868/campos_512_v4
+131/665909/campos_512_v4
+131/666003/campos_512_v4
+131/666008/campos_512_v4
+131/666020/campos_512_v4
+131/666025/campos_512_v4
+131/666071/campos_512_v4
+131/666081/campos_512_v4
+131/666112/campos_512_v4
+131/666114/campos_512_v4
+131/666167/campos_512_v4
+131/666211/campos_512_v4
+131/666228/campos_512_v4
+131/666290/campos_512_v4
+131/666314/campos_512_v4
+131/666333/campos_512_v4
+131/666379/campos_512_v4
+131/666444/campos_512_v4
+131/666451/campos_512_v4
+131/666480/campos_512_v4
+131/666481/campos_512_v4
+131/666509/campos_512_v4
+131/666513/campos_512_v4
+131/666515/campos_512_v4
+131/666525/campos_512_v4
+131/666593/campos_512_v4
+131/666605/campos_512_v4
+131/666649/campos_512_v4
+131/666731/campos_512_v4
+131/666829/campos_512_v4
+131/666852/campos_512_v4
+131/666898/campos_512_v4
+131/666947/campos_512_v4
+131/666948/campos_512_v4
+131/666949/campos_512_v4
+131/666993/campos_512_v4
+131/667089/campos_512_v4
+131/667109/campos_512_v4
+131/667178/campos_512_v4
+131/667231/campos_512_v4
+131/667241/campos_512_v4
+131/667349/campos_512_v4
+131/667404/campos_512_v4
+131/667405/campos_512_v4
+131/667462/campos_512_v4
+131/667482/campos_512_v4
+131/667484/campos_512_v4
+131/667506/campos_512_v4
+131/667512/campos_512_v4
+131/667522/campos_512_v4
+131/667588/campos_512_v4
+131/667695/campos_512_v4
+131/667712/campos_512_v4
+131/667753/campos_512_v4
+131/667779/campos_512_v4
+131/667809/campos_512_v4
+131/667882/campos_512_v4
+131/667964/campos_512_v4
+131/667983/campos_512_v4
+131/667987/campos_512_v4
+131/668019/campos_512_v4
+131/668036/campos_512_v4
+131/668052/campos_512_v4
+131/668160/campos_512_v4
+131/668175/campos_512_v4
+131/668182/campos_512_v4
+131/668205/campos_512_v4
+131/668211/campos_512_v4
+131/668261/campos_512_v4
+131/668312/campos_512_v4
+131/668342/campos_512_v4
+131/668402/campos_512_v4
+131/668409/campos_512_v4
+131/668422/campos_512_v4
+131/668440/campos_512_v4
+131/668477/campos_512_v4
+131/668519/campos_512_v4
+131/668541/campos_512_v4
+131/668555/campos_512_v4
+131/668683/campos_512_v4
+131/668716/campos_512_v4
+131/668757/campos_512_v4
+131/668787/campos_512_v4
+131/668809/campos_512_v4
+131/668841/campos_512_v4
+131/668864/campos_512_v4
+131/668897/campos_512_v4
+131/668929/campos_512_v4
+131/668936/campos_512_v4
+131/668950/campos_512_v4
+131/668958/campos_512_v4
+131/668963/campos_512_v4
+131/668974/campos_512_v4
+131/669005/campos_512_v4
+131/669014/campos_512_v4
+131/669075/campos_512_v4
+131/669226/campos_512_v4
+131/669235/campos_512_v4
+131/669238/campos_512_v4
+131/669247/campos_512_v4
+131/669264/campos_512_v4
+131/669448/campos_512_v4
+131/669450/campos_512_v4
+131/669452/campos_512_v4
+131/669458/campos_512_v4
+131/669515/campos_512_v4
+131/669522/campos_512_v4
+131/669540/campos_512_v4
+131/669575/campos_512_v4
+131/669602/campos_512_v4
+131/669643/campos_512_v4
+131/669649/campos_512_v4
+131/669650/campos_512_v4
+131/669663/campos_512_v4
+131/669686/campos_512_v4
+131/669695/campos_512_v4
+131/669714/campos_512_v4
+131/669732/campos_512_v4
+131/669748/campos_512_v4
+131/669806/campos_512_v4
+131/669821/campos_512_v4
+131/669826/campos_512_v4
+131/669828/campos_512_v4
+131/669899/campos_512_v4
+131/669910/campos_512_v4
+132/670012/campos_512_v4
+132/670053/campos_512_v4
+132/670073/campos_512_v4
+132/670136/campos_512_v4
+132/670187/campos_512_v4
+132/670208/campos_512_v4
+132/670292/campos_512_v4
+132/670472/campos_512_v4
+132/670483/campos_512_v4
+132/670506/campos_512_v4
+132/670530/campos_512_v4
+132/670581/campos_512_v4
+132/670673/campos_512_v4
+132/670688/campos_512_v4
+132/670708/campos_512_v4
+132/670777/campos_512_v4
+132/670828/campos_512_v4
+132/670845/campos_512_v4
+132/670848/campos_512_v4
+132/670851/campos_512_v4
+132/670903/campos_512_v4
+132/670920/campos_512_v4
+132/670938/campos_512_v4
+132/671002/campos_512_v4
+132/671011/campos_512_v4
+132/671071/campos_512_v4
+132/671093/campos_512_v4
+132/671097/campos_512_v4
+132/671130/campos_512_v4
+132/671171/campos_512_v4
+132/671183/campos_512_v4
+132/671187/campos_512_v4
+132/671193/campos_512_v4
+132/671197/campos_512_v4
+132/671241/campos_512_v4
+132/671343/campos_512_v4
+132/671352/campos_512_v4
+132/671363/campos_512_v4
+132/671364/campos_512_v4
+132/671422/campos_512_v4
+132/671428/campos_512_v4
+132/671475/campos_512_v4
+132/671485/campos_512_v4
+132/671525/campos_512_v4
+132/671556/campos_512_v4
+132/671583/campos_512_v4
+132/671656/campos_512_v4
+132/671682/campos_512_v4
+132/671781/campos_512_v4
+132/671783/campos_512_v4
+132/671813/campos_512_v4
+132/671817/campos_512_v4
+132/671824/campos_512_v4
+132/671862/campos_512_v4
+132/671867/campos_512_v4
+132/672035/campos_512_v4
+132/672054/campos_512_v4
+132/672065/campos_512_v4
+132/672090/campos_512_v4
+132/672124/campos_512_v4
+132/672130/campos_512_v4
+132/672201/campos_512_v4
+132/672227/campos_512_v4
+132/672295/campos_512_v4
+132/672313/campos_512_v4
+132/672314/campos_512_v4
+132/672320/campos_512_v4
+132/672332/campos_512_v4
+132/672337/campos_512_v4
+132/672346/campos_512_v4
+132/672386/campos_512_v4
+132/672406/campos_512_v4
+132/672433/campos_512_v4
+132/672439/campos_512_v4
+132/672517/campos_512_v4
+132/672585/campos_512_v4
+132/672599/campos_512_v4
+132/672646/campos_512_v4
+132/672696/campos_512_v4
+132/672775/campos_512_v4
+132/672809/campos_512_v4
+132/672910/campos_512_v4
+132/672916/campos_512_v4
+132/672957/campos_512_v4
+132/672993/campos_512_v4
+132/673059/campos_512_v4
+132/673076/campos_512_v4
+132/673141/campos_512_v4
+132/673178/campos_512_v4
+132/673181/campos_512_v4
+132/673259/campos_512_v4
+132/673308/campos_512_v4
+132/673330/campos_512_v4
+132/673366/campos_512_v4
+132/673382/campos_512_v4
+132/673440/campos_512_v4
+132/673458/campos_512_v4
+132/673465/campos_512_v4
+132/673484/campos_512_v4
+132/673556/campos_512_v4
+132/673568/campos_512_v4
+132/673573/campos_512_v4
+132/673627/campos_512_v4
+132/673633/campos_512_v4
+132/673696/campos_512_v4
+132/673766/campos_512_v4
+132/673780/campos_512_v4
+132/673794/campos_512_v4
+132/673809/campos_512_v4
+132/673885/campos_512_v4
+132/673888/campos_512_v4
+132/673908/campos_512_v4
+132/673925/campos_512_v4
+132/673936/campos_512_v4
+132/674013/campos_512_v4
+132/674057/campos_512_v4
+132/674113/campos_512_v4
+132/674256/campos_512_v4
+132/674261/campos_512_v4
+132/674294/campos_512_v4
+132/674335/campos_512_v4
+132/674343/campos_512_v4
+132/674378/campos_512_v4
+132/674383/campos_512_v4
+132/674406/campos_512_v4
+132/674438/campos_512_v4
+132/674508/campos_512_v4
+132/674562/campos_512_v4
+132/674629/campos_512_v4
+132/674657/campos_512_v4
+132/674664/campos_512_v4
+132/674704/campos_512_v4
+132/674738/campos_512_v4
+132/674780/campos_512_v4
+132/674816/campos_512_v4
+132/674817/campos_512_v4
+132/674846/campos_512_v4
+132/674903/campos_512_v4
+132/674909/campos_512_v4
+132/674935/campos_512_v4
+132/674951/campos_512_v4
+132/674975/campos_512_v4
+133/675004/campos_512_v4
+133/675055/campos_512_v4
+133/675075/campos_512_v4
+133/675082/campos_512_v4
+133/675096/campos_512_v4
+133/675101/campos_512_v4
+133/675105/campos_512_v4
+133/675166/campos_512_v4
+133/675195/campos_512_v4
+133/675297/campos_512_v4
+133/675335/campos_512_v4
+133/675350/campos_512_v4
+133/675358/campos_512_v4
+133/675368/campos_512_v4
+133/675381/campos_512_v4
+133/675405/campos_512_v4
+133/675409/campos_512_v4
+133/675412/campos_512_v4
+133/675488/campos_512_v4
+133/675506/campos_512_v4
+133/675536/campos_512_v4
+133/675557/campos_512_v4
+133/675566/campos_512_v4
+133/675679/campos_512_v4
+133/675705/campos_512_v4
+133/675706/campos_512_v4
+133/675715/campos_512_v4
+133/675740/campos_512_v4
+133/675742/campos_512_v4
+133/675804/campos_512_v4
+133/675809/campos_512_v4
+133/675874/campos_512_v4
+133/675920/campos_512_v4
+133/675926/campos_512_v4
+133/675970/campos_512_v4
+133/675986/campos_512_v4
+133/676015/campos_512_v4
+133/676016/campos_512_v4
+133/676078/campos_512_v4
+133/676109/campos_512_v4
+133/676201/campos_512_v4
+133/676317/campos_512_v4
+133/676326/campos_512_v4
+133/676416/campos_512_v4
+133/676476/campos_512_v4
+133/676501/campos_512_v4
+133/676537/campos_512_v4
+133/676553/campos_512_v4
+133/676586/campos_512_v4
+133/676616/campos_512_v4
+133/676648/campos_512_v4
+133/676657/campos_512_v4
+133/676683/campos_512_v4
+133/676690/campos_512_v4
+133/676704/campos_512_v4
+133/676706/campos_512_v4
+133/676711/campos_512_v4
+133/676743/campos_512_v4
+133/676746/campos_512_v4
+133/676768/campos_512_v4
+133/676824/campos_512_v4
+133/676846/campos_512_v4
+133/676859/campos_512_v4
+133/676860/campos_512_v4
+133/676862/campos_512_v4
+133/676934/campos_512_v4
+133/676943/campos_512_v4
+133/677044/campos_512_v4
+133/677045/campos_512_v4
+133/677065/campos_512_v4
+133/677083/campos_512_v4
+133/677103/campos_512_v4
+133/677104/campos_512_v4
+133/677106/campos_512_v4
+133/677141/campos_512_v4
+133/677142/campos_512_v4
+133/677213/campos_512_v4
+133/677237/campos_512_v4
+133/677251/campos_512_v4
+133/677317/campos_512_v4
+133/677367/campos_512_v4
+133/677391/campos_512_v4
+133/677414/campos_512_v4
+133/677433/campos_512_v4
+133/677462/campos_512_v4
+133/677464/campos_512_v4
+133/677486/campos_512_v4
+133/677554/campos_512_v4
+133/677563/campos_512_v4
+133/677568/campos_512_v4
+133/677674/campos_512_v4
+133/677720/campos_512_v4
+133/677839/campos_512_v4
+133/677840/campos_512_v4
+133/677843/campos_512_v4
+133/677884/campos_512_v4
+133/677914/campos_512_v4
+133/677942/campos_512_v4
+133/678008/campos_512_v4
+133/678066/campos_512_v4
+133/678157/campos_512_v4
+133/678271/campos_512_v4
+133/678289/campos_512_v4
+133/678299/campos_512_v4
+133/678340/campos_512_v4
+133/678367/campos_512_v4
+133/678392/campos_512_v4
+133/678420/campos_512_v4
+133/678507/campos_512_v4
+133/678541/campos_512_v4
+133/678563/campos_512_v4
+133/678615/campos_512_v4
+133/678716/campos_512_v4
+133/678727/campos_512_v4
+133/678772/campos_512_v4
+133/678825/campos_512_v4
+133/678830/campos_512_v4
+133/678840/campos_512_v4
+133/678914/campos_512_v4
+133/678964/campos_512_v4
+133/678977/campos_512_v4
+133/678982/campos_512_v4
+133/679044/campos_512_v4
+133/679102/campos_512_v4
+133/679166/campos_512_v4
+133/679168/campos_512_v4
+133/679175/campos_512_v4
+133/679179/campos_512_v4
+133/679204/campos_512_v4
+133/679205/campos_512_v4
+133/679212/campos_512_v4
+133/679248/campos_512_v4
+133/679279/campos_512_v4
+133/679338/campos_512_v4
+133/679414/campos_512_v4
+133/679431/campos_512_v4
+133/679437/campos_512_v4
+133/679510/campos_512_v4
+133/679573/campos_512_v4
+133/679584/campos_512_v4
+133/679649/campos_512_v4
+133/679669/campos_512_v4
+133/679709/campos_512_v4
+133/679717/campos_512_v4
+133/679718/campos_512_v4
+133/679726/campos_512_v4
+133/679756/campos_512_v4
+133/679892/campos_512_v4
+133/679897/campos_512_v4
+133/679901/campos_512_v4
+134/680011/campos_512_v4
+134/680025/campos_512_v4
+134/680026/campos_512_v4
+134/680035/campos_512_v4
+134/680052/campos_512_v4
+134/680123/campos_512_v4
+134/680237/campos_512_v4
+134/680295/campos_512_v4
+134/680305/campos_512_v4
+134/680320/campos_512_v4
+134/680346/campos_512_v4
+134/680373/campos_512_v4
+134/680425/campos_512_v4
+134/680438/campos_512_v4
+134/680490/campos_512_v4
+134/680493/campos_512_v4
+134/680512/campos_512_v4
+134/680528/campos_512_v4
+134/680529/campos_512_v4
+134/680587/campos_512_v4
+134/680629/campos_512_v4
+134/680692/campos_512_v4
+134/680697/campos_512_v4
+134/680860/campos_512_v4
+134/680973/campos_512_v4
+134/680987/campos_512_v4
+134/681128/campos_512_v4
+134/681137/campos_512_v4
+134/681162/campos_512_v4
+134/681169/campos_512_v4
+134/681232/campos_512_v4
+134/681240/campos_512_v4
+134/681255/campos_512_v4
+134/681384/campos_512_v4
+134/681447/campos_512_v4
+134/681455/campos_512_v4
+134/681472/campos_512_v4
+134/681540/campos_512_v4
+134/681544/campos_512_v4
+134/681580/campos_512_v4
+134/681593/campos_512_v4
+134/681680/campos_512_v4
+134/681746/campos_512_v4
+134/681815/campos_512_v4
+134/681863/campos_512_v4
+134/681883/campos_512_v4
+134/681887/campos_512_v4
+134/681926/campos_512_v4
+134/681958/campos_512_v4
+134/682040/campos_512_v4
+134/682067/campos_512_v4
+134/682148/campos_512_v4
+134/682166/campos_512_v4
+134/682204/campos_512_v4
+134/682291/campos_512_v4
+134/682306/campos_512_v4
+134/682310/campos_512_v4
+134/682326/campos_512_v4
+134/682355/campos_512_v4
+134/682401/campos_512_v4
+134/682405/campos_512_v4
+134/682418/campos_512_v4
+134/682423/campos_512_v4
+134/682430/campos_512_v4
+134/682445/campos_512_v4
+134/682471/campos_512_v4
+134/682516/campos_512_v4
+134/682523/campos_512_v4
+134/682651/campos_512_v4
+134/682663/campos_512_v4
+134/682682/campos_512_v4
+134/682740/campos_512_v4
+134/682778/campos_512_v4
+134/682802/campos_512_v4
+134/682823/campos_512_v4
+134/682825/campos_512_v4
+134/682827/campos_512_v4
+134/682856/campos_512_v4
+134/682897/campos_512_v4
+134/682967/campos_512_v4
+134/682981/campos_512_v4
+134/683004/campos_512_v4
+134/683012/campos_512_v4
+134/683015/campos_512_v4
+134/683038/campos_512_v4
+134/683042/campos_512_v4
+134/683073/campos_512_v4
+134/683082/campos_512_v4
+134/683152/campos_512_v4
+134/683161/campos_512_v4
+134/683173/campos_512_v4
+134/683211/campos_512_v4
+134/683227/campos_512_v4
+134/683241/campos_512_v4
+134/683254/campos_512_v4
+134/683302/campos_512_v4
+134/683324/campos_512_v4
+134/683326/campos_512_v4
+134/683342/campos_512_v4
+134/683407/campos_512_v4
+134/683509/campos_512_v4
+134/683517/campos_512_v4
+134/683528/campos_512_v4
+134/683560/campos_512_v4
+134/683679/campos_512_v4
+134/683713/campos_512_v4
+134/683719/campos_512_v4
+134/683798/campos_512_v4
+134/683802/campos_512_v4
+134/683821/campos_512_v4
+134/683839/campos_512_v4
+134/683842/campos_512_v4
+134/683937/campos_512_v4
+134/683938/campos_512_v4
+134/684004/campos_512_v4
+134/684098/campos_512_v4
+134/684142/campos_512_v4
+134/684186/campos_512_v4
+134/684192/campos_512_v4
+134/684283/campos_512_v4
+134/684332/campos_512_v4
+134/684376/campos_512_v4
+134/684403/campos_512_v4
+134/684426/campos_512_v4
+134/684450/campos_512_v4
+134/684458/campos_512_v4
+134/684475/campos_512_v4
+134/684496/campos_512_v4
+134/684572/campos_512_v4
+134/684643/campos_512_v4
+134/684659/campos_512_v4
+134/684678/campos_512_v4
+134/684683/campos_512_v4
+134/684701/campos_512_v4
+134/684731/campos_512_v4
+134/684815/campos_512_v4
+134/684826/campos_512_v4
+134/684860/campos_512_v4
+134/684868/campos_512_v4
+134/684875/campos_512_v4
+134/684922/campos_512_v4
+134/684929/campos_512_v4
+134/684961/campos_512_v4
+135/685007/campos_512_v4
+135/685029/campos_512_v4
+135/685062/campos_512_v4
+135/685072/campos_512_v4
+135/685093/campos_512_v4
+135/685104/campos_512_v4
+135/685106/campos_512_v4
+135/685122/campos_512_v4
+135/685162/campos_512_v4
+135/685256/campos_512_v4
+135/685271/campos_512_v4
+135/685292/campos_512_v4
+135/685299/campos_512_v4
+135/685329/campos_512_v4
+135/685347/campos_512_v4
+135/685381/campos_512_v4
+135/685413/campos_512_v4
+135/685507/campos_512_v4
+135/685540/campos_512_v4
+135/685560/campos_512_v4
+135/685590/campos_512_v4
+135/685603/campos_512_v4
+135/685606/campos_512_v4
+135/685610/campos_512_v4
+135/685653/campos_512_v4
+135/685657/campos_512_v4
+135/685750/campos_512_v4
+135/685794/campos_512_v4
+135/685840/campos_512_v4
+135/685917/campos_512_v4
+135/685919/campos_512_v4
+135/685934/campos_512_v4
+135/685938/campos_512_v4
+135/685952/campos_512_v4
+135/685969/campos_512_v4
+135/685974/campos_512_v4
+135/686050/campos_512_v4
+135/686060/campos_512_v4
+135/686094/campos_512_v4
+135/686157/campos_512_v4
+135/686174/campos_512_v4
+135/686211/campos_512_v4
+135/686217/campos_512_v4
+135/686243/campos_512_v4
+135/686263/campos_512_v4
+135/686291/campos_512_v4
+135/686292/campos_512_v4
+135/686302/campos_512_v4
+135/686305/campos_512_v4
+135/686347/campos_512_v4
+135/686440/campos_512_v4
+135/686459/campos_512_v4
+135/686522/campos_512_v4
+135/686574/campos_512_v4
+135/686600/campos_512_v4
+135/686696/campos_512_v4
+135/686769/campos_512_v4
+135/686785/campos_512_v4
+135/686803/campos_512_v4
+135/686819/campos_512_v4
+135/686839/campos_512_v4
+135/686894/campos_512_v4
+135/686923/campos_512_v4
+135/686941/campos_512_v4
+135/686946/campos_512_v4
+135/687116/campos_512_v4
+135/687149/campos_512_v4
+135/687164/campos_512_v4
+135/687200/campos_512_v4
+135/687233/campos_512_v4
+135/687236/campos_512_v4
+135/687279/campos_512_v4
+135/687284/campos_512_v4
+135/687344/campos_512_v4
+135/687360/campos_512_v4
+135/687386/campos_512_v4
+135/687413/campos_512_v4
+135/687433/campos_512_v4
+135/687451/campos_512_v4
+135/687461/campos_512_v4
+135/687473/campos_512_v4
+135/687506/campos_512_v4
+135/687510/campos_512_v4
+135/687540/campos_512_v4
+135/687555/campos_512_v4
+135/687556/campos_512_v4
+135/687575/campos_512_v4
+135/687689/campos_512_v4
+135/687692/campos_512_v4
+135/687696/campos_512_v4
+135/687767/campos_512_v4
+135/687781/campos_512_v4
+135/687808/campos_512_v4
+135/687856/campos_512_v4
+135/687873/campos_512_v4
+135/687887/campos_512_v4
+135/687909/campos_512_v4
+135/687919/campos_512_v4
+135/687933/campos_512_v4
+135/687947/campos_512_v4
+135/688039/campos_512_v4
+135/688041/campos_512_v4
+135/688052/campos_512_v4
+135/688056/campos_512_v4
+135/688080/campos_512_v4
+135/688181/campos_512_v4
+135/688190/campos_512_v4
+135/688235/campos_512_v4
+135/688267/campos_512_v4
+135/688270/campos_512_v4
+135/688281/campos_512_v4
+135/688309/campos_512_v4
+135/688333/campos_512_v4
+135/688385/campos_512_v4
+135/688393/campos_512_v4
+135/688463/campos_512_v4
+135/688509/campos_512_v4
+135/688567/campos_512_v4
+135/688578/campos_512_v4
+135/688592/campos_512_v4
+135/688635/campos_512_v4
+135/688669/campos_512_v4
+135/688673/campos_512_v4
+135/688680/campos_512_v4
+135/688692/campos_512_v4
+135/688698/campos_512_v4
+135/688701/campos_512_v4
+135/688713/campos_512_v4
+135/688742/campos_512_v4
+135/688815/campos_512_v4
+135/688844/campos_512_v4
+135/688847/campos_512_v4
+135/688899/campos_512_v4
+135/688907/campos_512_v4
+135/688919/campos_512_v4
+135/688924/campos_512_v4
+135/689022/campos_512_v4
+135/689133/campos_512_v4
+135/689136/campos_512_v4
+135/689144/campos_512_v4
+135/689153/campos_512_v4
+135/689164/campos_512_v4
+135/689174/campos_512_v4
+135/689189/campos_512_v4
+135/689248/campos_512_v4
+135/689267/campos_512_v4
+135/689285/campos_512_v4
+135/689367/campos_512_v4
+135/689403/campos_512_v4
+135/689416/campos_512_v4
+135/689436/campos_512_v4
+135/689443/campos_512_v4
+135/689480/campos_512_v4
+135/689490/campos_512_v4
+135/689491/campos_512_v4
+135/689504/campos_512_v4
+135/689526/campos_512_v4
+135/689544/campos_512_v4
+135/689608/campos_512_v4
+135/689652/campos_512_v4
+135/689677/campos_512_v4
+135/689679/campos_512_v4
+135/689720/campos_512_v4
+135/689736/campos_512_v4
+135/689759/campos_512_v4
+135/689764/campos_512_v4
+135/689765/campos_512_v4
+135/689835/campos_512_v4
+135/689867/campos_512_v4
+135/689869/campos_512_v4
+135/689970/campos_512_v4
+136/690002/campos_512_v4
+136/690094/campos_512_v4
+136/690115/campos_512_v4
+136/690121/campos_512_v4
+136/690130/campos_512_v4
+136/690184/campos_512_v4
+136/690186/campos_512_v4
+136/690248/campos_512_v4
+136/690304/campos_512_v4
+136/690384/campos_512_v4
+136/690533/campos_512_v4
+136/690537/campos_512_v4
+136/690575/campos_512_v4
+136/690603/campos_512_v4
+136/690707/campos_512_v4
+136/690740/campos_512_v4
+136/690752/campos_512_v4
+136/690783/campos_512_v4
+136/690807/campos_512_v4
+136/691009/campos_512_v4
+136/691017/campos_512_v4
+136/691028/campos_512_v4
+136/691041/campos_512_v4
+136/691140/campos_512_v4
+136/691151/campos_512_v4
+136/691174/campos_512_v4
+136/691182/campos_512_v4
+136/691203/campos_512_v4
+136/691228/campos_512_v4
+136/691249/campos_512_v4
+136/691259/campos_512_v4
+136/691263/campos_512_v4
+136/691304/campos_512_v4
+136/691319/campos_512_v4
+136/691360/campos_512_v4
+136/691361/campos_512_v4
+136/691422/campos_512_v4
+136/691428/campos_512_v4
+136/691461/campos_512_v4
+136/691510/campos_512_v4
+136/691603/campos_512_v4
+136/691642/campos_512_v4
+136/691659/campos_512_v4
+136/691752/campos_512_v4
+136/691795/campos_512_v4
+136/691808/campos_512_v4
+136/691810/campos_512_v4
+136/691825/campos_512_v4
+136/691845/campos_512_v4
+136/691877/campos_512_v4
+136/691922/campos_512_v4
+136/691930/campos_512_v4
+136/691982/campos_512_v4
+136/691997/campos_512_v4
+136/692044/campos_512_v4
+136/692089/campos_512_v4
+136/692112/campos_512_v4
+136/692183/campos_512_v4
+136/692191/campos_512_v4
+136/692224/campos_512_v4
+136/692240/campos_512_v4
+136/692322/campos_512_v4
+136/692354/campos_512_v4
+136/692355/campos_512_v4
+136/692409/campos_512_v4
+136/692410/campos_512_v4
+136/692431/campos_512_v4
+136/692502/campos_512_v4
+136/692505/campos_512_v4
+136/692513/campos_512_v4
+136/692552/campos_512_v4
+136/692557/campos_512_v4
+136/692585/campos_512_v4
+136/692650/campos_512_v4
+136/692655/campos_512_v4
+136/692672/campos_512_v4
+136/692702/campos_512_v4
+136/692715/campos_512_v4
+136/692734/campos_512_v4
+136/692742/campos_512_v4
+136/692756/campos_512_v4
+136/692801/campos_512_v4
+136/692865/campos_512_v4
+136/692921/campos_512_v4
+136/692959/campos_512_v4
+136/692988/campos_512_v4
+136/693117/campos_512_v4
+136/693126/campos_512_v4
+136/693188/campos_512_v4
+136/693191/campos_512_v4
+136/693248/campos_512_v4
+136/693263/campos_512_v4
+136/693294/campos_512_v4
+136/693314/campos_512_v4
+136/693409/campos_512_v4
+136/693412/campos_512_v4
+136/693423/campos_512_v4
+136/693474/campos_512_v4
+136/693477/campos_512_v4
+136/693497/campos_512_v4
+136/693505/campos_512_v4
+136/693517/campos_512_v4
+136/693539/campos_512_v4
+136/693548/campos_512_v4
+136/693549/campos_512_v4
+136/693681/campos_512_v4
+136/693685/campos_512_v4
+136/693686/campos_512_v4
+136/693694/campos_512_v4
+136/693738/campos_512_v4
+136/693785/campos_512_v4
+136/693852/campos_512_v4
+136/693907/campos_512_v4
+136/693920/campos_512_v4
+136/693961/campos_512_v4
+136/693999/campos_512_v4
+136/694012/campos_512_v4
+136/694017/campos_512_v4
+136/694022/campos_512_v4
+136/694035/campos_512_v4
+136/694057/campos_512_v4
+136/694088/campos_512_v4
+136/694125/campos_512_v4
+136/694228/campos_512_v4
+136/694291/campos_512_v4
+136/694295/campos_512_v4
+136/694336/campos_512_v4
+136/694349/campos_512_v4
+136/694378/campos_512_v4
+136/694420/campos_512_v4
+136/694424/campos_512_v4
+136/694592/campos_512_v4
+136/694622/campos_512_v4
+136/694670/campos_512_v4
+136/694689/campos_512_v4
+136/694693/campos_512_v4
+136/694734/campos_512_v4
+136/694741/campos_512_v4
+136/694768/campos_512_v4
+136/694771/campos_512_v4
+136/694788/campos_512_v4
+136/694807/campos_512_v4
+136/694825/campos_512_v4
+136/694833/campos_512_v4
+136/694921/campos_512_v4
+137/695012/campos_512_v4
+137/695030/campos_512_v4
+137/695091/campos_512_v4
+137/695105/campos_512_v4
+137/695143/campos_512_v4
+137/695145/campos_512_v4
+137/695253/campos_512_v4
+137/695293/campos_512_v4
+137/695307/campos_512_v4
+137/695372/campos_512_v4
+137/695420/campos_512_v4
+137/695469/campos_512_v4
+137/695483/campos_512_v4
+137/695505/campos_512_v4
+137/695514/campos_512_v4
+137/695516/campos_512_v4
+137/695524/campos_512_v4
+137/695526/campos_512_v4
+137/695531/campos_512_v4
+137/695604/campos_512_v4
+137/695622/campos_512_v4
+137/695654/campos_512_v4
+137/695667/campos_512_v4
+137/695668/campos_512_v4
+137/695672/campos_512_v4
+137/695694/campos_512_v4
+137/695715/campos_512_v4
+137/695725/campos_512_v4
+137/695729/campos_512_v4
+137/695834/campos_512_v4
+137/695855/campos_512_v4
+137/695873/campos_512_v4
+137/695879/campos_512_v4
+137/695883/campos_512_v4
+137/695897/campos_512_v4
+137/695918/campos_512_v4
+137/695926/campos_512_v4
+137/695948/campos_512_v4
+137/695954/campos_512_v4
+137/695986/campos_512_v4
+137/696053/campos_512_v4
+137/696075/campos_512_v4
+137/696101/campos_512_v4
+137/696175/campos_512_v4
+137/696225/campos_512_v4
+137/696250/campos_512_v4
+137/696253/campos_512_v4
+137/696301/campos_512_v4
+137/696322/campos_512_v4
+137/696344/campos_512_v4
+137/696408/campos_512_v4
+137/696430/campos_512_v4
+137/696452/campos_512_v4
+137/696466/campos_512_v4
+137/696488/campos_512_v4
+137/696490/campos_512_v4
+137/696500/campos_512_v4
+137/696542/campos_512_v4
+137/696600/campos_512_v4
+137/696612/campos_512_v4
+137/696647/campos_512_v4
+137/696677/campos_512_v4
+137/696688/campos_512_v4
+137/696758/campos_512_v4
+137/696767/campos_512_v4
+137/696782/campos_512_v4
+137/696801/campos_512_v4
+137/696816/campos_512_v4
+137/696820/campos_512_v4
+137/696835/campos_512_v4
+137/696880/campos_512_v4
+137/696929/campos_512_v4
+137/697178/campos_512_v4
+137/697196/campos_512_v4
+137/697225/campos_512_v4
+137/697233/campos_512_v4
+137/697252/campos_512_v4
+137/697272/campos_512_v4
+137/697299/campos_512_v4
+137/697396/campos_512_v4
+137/697409/campos_512_v4
+137/697418/campos_512_v4
+137/697437/campos_512_v4
+137/697451/campos_512_v4
+137/697475/campos_512_v4
+137/697477/campos_512_v4
+137/697488/campos_512_v4
+137/697497/campos_512_v4
+137/697511/campos_512_v4
+137/697517/campos_512_v4
+137/697570/campos_512_v4
+137/697589/campos_512_v4
+137/697627/campos_512_v4
+137/697634/campos_512_v4
+137/697685/campos_512_v4
+137/697703/campos_512_v4
+137/697730/campos_512_v4
+137/697733/campos_512_v4
+137/697752/campos_512_v4
+137/697767/campos_512_v4
+137/697806/campos_512_v4
+137/697823/campos_512_v4
+137/697832/campos_512_v4
+137/697866/campos_512_v4
+137/697871/campos_512_v4
+137/697895/campos_512_v4
+137/697905/campos_512_v4
+137/697972/campos_512_v4
+137/698022/campos_512_v4
+137/698023/campos_512_v4
+137/698040/campos_512_v4
+137/698055/campos_512_v4
+137/698066/campos_512_v4
+137/698119/campos_512_v4
+137/698131/campos_512_v4
+137/698175/campos_512_v4
+137/698207/campos_512_v4
+137/698209/campos_512_v4
+137/698248/campos_512_v4
+137/698256/campos_512_v4
+137/698264/campos_512_v4
+137/698283/campos_512_v4
+137/698285/campos_512_v4
+137/698344/campos_512_v4
+137/698374/campos_512_v4
+137/698376/campos_512_v4
+137/698391/campos_512_v4
+137/698395/campos_512_v4
+137/698418/campos_512_v4
+137/698455/campos_512_v4
+137/698458/campos_512_v4
+137/698519/campos_512_v4
+137/698597/campos_512_v4
+137/698601/campos_512_v4
+137/698619/campos_512_v4
+137/698629/campos_512_v4
+137/698630/campos_512_v4
+137/698631/campos_512_v4
+137/698636/campos_512_v4
+137/698642/campos_512_v4
+137/698670/campos_512_v4
+137/698690/campos_512_v4
+137/698694/campos_512_v4
+137/698707/campos_512_v4
+137/698720/campos_512_v4
+137/698844/campos_512_v4
+137/698852/campos_512_v4
+137/698865/campos_512_v4
+137/699000/campos_512_v4
+137/699019/campos_512_v4
+137/699045/campos_512_v4
+137/699060/campos_512_v4
+137/699069/campos_512_v4
+137/699173/campos_512_v4
+137/699332/campos_512_v4
+137/699347/campos_512_v4
+137/699365/campos_512_v4
+137/699410/campos_512_v4
+137/699443/campos_512_v4
+137/699446/campos_512_v4
+137/699454/campos_512_v4
+137/699477/campos_512_v4
+137/699480/campos_512_v4
+137/699503/campos_512_v4
+137/699510/campos_512_v4
+137/699523/campos_512_v4
+137/699529/campos_512_v4
+137/699535/campos_512_v4
+137/699564/campos_512_v4
+137/699565/campos_512_v4
+137/699616/campos_512_v4
+137/699632/campos_512_v4
+137/699672/campos_512_v4
+137/699685/campos_512_v4
+137/699755/campos_512_v4
+137/699763/campos_512_v4
+137/699768/campos_512_v4
+137/699798/campos_512_v4
+137/699840/campos_512_v4
+137/699912/campos_512_v4
+137/699966/campos_512_v4
+137/699983/campos_512_v4
+138/700110/campos_512_v4
+138/700121/campos_512_v4
+138/700146/campos_512_v4
+138/700234/campos_512_v4
+138/700303/campos_512_v4
+138/700331/campos_512_v4
+138/700421/campos_512_v4
+138/700436/campos_512_v4
+138/700489/campos_512_v4
+138/700506/campos_512_v4
+138/700521/campos_512_v4
+138/700522/campos_512_v4
+138/700527/campos_512_v4
+138/700540/campos_512_v4
+138/700546/campos_512_v4
+138/700704/campos_512_v4
+138/700763/campos_512_v4
+138/700770/campos_512_v4
+138/700803/campos_512_v4
+138/700822/campos_512_v4
+138/700824/campos_512_v4
+138/700865/campos_512_v4
+138/700937/campos_512_v4
+138/701038/campos_512_v4
+138/701092/campos_512_v4
+138/701112/campos_512_v4
+138/701210/campos_512_v4
+138/701296/campos_512_v4
+138/701322/campos_512_v4
+138/701384/campos_512_v4
+138/701413/campos_512_v4
+138/701460/campos_512_v4
+138/701467/campos_512_v4
+138/701527/campos_512_v4
+138/701533/campos_512_v4
+138/701579/campos_512_v4
+138/701627/campos_512_v4
+138/701697/campos_512_v4
+138/701718/campos_512_v4
+138/701724/campos_512_v4
+138/701726/campos_512_v4
+138/701734/campos_512_v4
+138/701744/campos_512_v4
+138/701770/campos_512_v4
+138/701791/campos_512_v4
+138/701799/campos_512_v4
+138/701809/campos_512_v4
+138/701823/campos_512_v4
+138/701842/campos_512_v4
+138/701928/campos_512_v4
+138/701997/campos_512_v4
+138/702004/campos_512_v4
+138/702027/campos_512_v4
+138/702107/campos_512_v4
+138/702146/campos_512_v4
+138/702232/campos_512_v4
+138/702288/campos_512_v4
+138/702299/campos_512_v4
+138/702386/campos_512_v4
+138/702403/campos_512_v4
+138/702404/campos_512_v4
+138/702414/campos_512_v4
+138/702436/campos_512_v4
+138/702437/campos_512_v4
+138/702503/campos_512_v4
+138/702598/campos_512_v4
+138/702664/campos_512_v4
+138/702764/campos_512_v4
+138/702784/campos_512_v4
+138/702792/campos_512_v4
+138/702793/campos_512_v4
+138/702816/campos_512_v4
+138/702951/campos_512_v4
+138/703014/campos_512_v4
+138/703018/campos_512_v4
+138/703026/campos_512_v4
+138/703055/campos_512_v4
+138/703108/campos_512_v4
+138/703186/campos_512_v4
+138/703250/campos_512_v4
+138/703287/campos_512_v4
+138/703300/campos_512_v4
+138/703320/campos_512_v4
+138/703459/campos_512_v4
+138/703468/campos_512_v4
+138/703538/campos_512_v4
+138/703541/campos_512_v4
+138/703569/campos_512_v4
+138/703618/campos_512_v4
+138/703620/campos_512_v4
+138/703629/campos_512_v4
+138/703696/campos_512_v4
+138/703716/campos_512_v4
+138/703736/campos_512_v4
+138/703757/campos_512_v4
+138/703776/campos_512_v4
+138/703778/campos_512_v4
+138/703807/campos_512_v4
+138/703817/campos_512_v4
+138/703834/campos_512_v4
+138/703848/campos_512_v4
+138/703952/campos_512_v4
+138/703994/campos_512_v4
+138/703995/campos_512_v4
+138/703997/campos_512_v4
+138/704005/campos_512_v4
+138/704065/campos_512_v4
+138/704120/campos_512_v4
+138/704122/campos_512_v4
+138/704202/campos_512_v4
+138/704226/campos_512_v4
+138/704266/campos_512_v4
+138/704321/campos_512_v4
+138/704343/campos_512_v4
+138/704349/campos_512_v4
+138/704375/campos_512_v4
+138/704394/campos_512_v4
+138/704406/campos_512_v4
+138/704408/campos_512_v4
+138/704435/campos_512_v4
+138/704453/campos_512_v4
+138/704531/campos_512_v4
+138/704536/campos_512_v4
+138/704678/campos_512_v4
+138/704723/campos_512_v4
+138/704783/campos_512_v4
+138/704824/campos_512_v4
+138/704830/campos_512_v4
+138/704833/campos_512_v4
+138/704890/campos_512_v4
+138/704929/campos_512_v4
+138/704953/campos_512_v4
+138/704992/campos_512_v4
+139/705080/campos_512_v4
+139/705089/campos_512_v4
+139/705134/campos_512_v4
+139/705146/campos_512_v4
+139/705160/campos_512_v4
+139/705201/campos_512_v4
+139/705216/campos_512_v4
+139/705218/campos_512_v4
+139/705227/campos_512_v4
+139/705264/campos_512_v4
+139/705271/campos_512_v4
+139/705278/campos_512_v4
+139/705324/campos_512_v4
+139/705331/campos_512_v4
+139/705342/campos_512_v4
+139/705444/campos_512_v4
+139/705460/campos_512_v4
+139/705580/campos_512_v4
+139/705587/campos_512_v4
+139/705604/campos_512_v4
+139/705611/campos_512_v4
+139/705760/campos_512_v4
+139/705805/campos_512_v4
+139/705822/campos_512_v4
+139/705872/campos_512_v4
+139/705888/campos_512_v4
+139/705934/campos_512_v4
+139/705942/campos_512_v4
+139/705960/campos_512_v4
+139/706015/campos_512_v4
+139/706018/campos_512_v4
+139/706021/campos_512_v4
+139/706033/campos_512_v4
+139/706144/campos_512_v4
+139/706155/campos_512_v4
+139/706172/campos_512_v4
+139/706200/campos_512_v4
+139/706373/campos_512_v4
+139/706380/campos_512_v4
+139/706412/campos_512_v4
+139/706434/campos_512_v4
+139/706474/campos_512_v4
+139/706511/campos_512_v4
+139/706565/campos_512_v4
+139/706620/campos_512_v4
+139/706664/campos_512_v4
+139/706677/campos_512_v4
+139/706679/campos_512_v4
+139/706717/campos_512_v4
+139/706748/campos_512_v4
+139/706749/campos_512_v4
+139/706798/campos_512_v4
+139/706890/campos_512_v4
+139/706891/campos_512_v4
+139/706983/campos_512_v4
+139/706994/campos_512_v4
+139/707003/campos_512_v4
+139/707021/campos_512_v4
+139/707035/campos_512_v4
+139/707041/campos_512_v4
+139/707099/campos_512_v4
+139/707249/campos_512_v4
+139/707267/campos_512_v4
+139/707341/campos_512_v4
+139/707344/campos_512_v4
+139/707345/campos_512_v4
+139/707383/campos_512_v4
+139/707391/campos_512_v4
+139/707435/campos_512_v4
+139/707447/campos_512_v4
+139/707460/campos_512_v4
+139/707474/campos_512_v4
+139/707476/campos_512_v4
+139/707487/campos_512_v4
+139/707498/campos_512_v4
+139/707539/campos_512_v4
+139/707540/campos_512_v4
+139/707569/campos_512_v4
+139/707585/campos_512_v4
+139/707642/campos_512_v4
+139/707653/campos_512_v4
+139/707735/campos_512_v4
+139/707787/campos_512_v4
+139/707848/campos_512_v4
+139/707888/campos_512_v4
+139/707889/campos_512_v4
+139/707945/campos_512_v4
+139/707995/campos_512_v4
+139/708003/campos_512_v4
+139/708025/campos_512_v4
+139/708061/campos_512_v4
+139/708063/campos_512_v4
+139/708064/campos_512_v4
+139/708079/campos_512_v4
+139/708129/campos_512_v4
+139/708154/campos_512_v4
+139/708173/campos_512_v4
+139/708176/campos_512_v4
+139/708185/campos_512_v4
+139/708194/campos_512_v4
+139/708222/campos_512_v4
+139/708265/campos_512_v4
+139/708282/campos_512_v4
+139/708401/campos_512_v4
+139/708432/campos_512_v4
+139/708504/campos_512_v4
+139/708568/campos_512_v4
+139/708607/campos_512_v4
+139/708615/campos_512_v4
+139/708616/campos_512_v4
+139/708669/campos_512_v4
+139/708714/campos_512_v4
+139/708755/campos_512_v4
+139/708785/campos_512_v4
+139/708814/campos_512_v4
+139/708818/campos_512_v4
+139/708829/campos_512_v4
+139/708846/campos_512_v4
+139/708856/campos_512_v4
+139/708896/campos_512_v4
+139/708933/campos_512_v4
+139/708938/campos_512_v4
+139/709172/campos_512_v4
+139/709179/campos_512_v4
+139/709205/campos_512_v4
+139/709325/campos_512_v4
+139/709333/campos_512_v4
+139/709377/campos_512_v4
+139/709449/campos_512_v4
+139/709469/campos_512_v4
+139/709482/campos_512_v4
+139/709491/campos_512_v4
+139/709514/campos_512_v4
+139/709519/campos_512_v4
+139/709541/campos_512_v4
+139/709579/campos_512_v4
+139/709630/campos_512_v4
+139/709644/campos_512_v4
+139/709686/campos_512_v4
+139/709729/campos_512_v4
+139/709730/campos_512_v4
+139/709807/campos_512_v4
+139/709895/campos_512_v4
+139/709908/campos_512_v4
+139/709913/campos_512_v4
+139/709960/campos_512_v4
+139/709987/campos_512_v4
+14/80006/campos_512_v4
+14/80015/campos_512_v4
+14/80022/campos_512_v4
+14/80067/campos_512_v4
+14/80104/campos_512_v4
+14/80148/campos_512_v4
+14/80193/campos_512_v4
+14/80198/campos_512_v4
+14/80202/campos_512_v4
+14/80232/campos_512_v4
+14/80257/campos_512_v4
+14/80277/campos_512_v4
+14/80320/campos_512_v4
+14/80325/campos_512_v4
+14/80336/campos_512_v4
+14/80354/campos_512_v4
+14/80365/campos_512_v4
+14/80381/campos_512_v4
+14/80430/campos_512_v4
+14/80464/campos_512_v4
+14/80481/campos_512_v4
+14/80549/campos_512_v4
+14/80662/campos_512_v4
+14/80671/campos_512_v4
+14/80694/campos_512_v4
+14/80747/campos_512_v4
+14/80794/campos_512_v4
+14/80856/campos_512_v4
+14/80938/campos_512_v4
+14/81002/campos_512_v4
+14/81060/campos_512_v4
+14/81063/campos_512_v4
+14/81079/campos_512_v4
+14/81094/campos_512_v4
+14/81100/campos_512_v4
+14/81215/campos_512_v4
+14/81227/campos_512_v4
+14/81244/campos_512_v4
+14/81307/campos_512_v4
+14/81337/campos_512_v4
+14/81361/campos_512_v4
+14/81411/campos_512_v4
+14/81451/campos_512_v4
+14/81517/campos_512_v4
+14/81524/campos_512_v4
+14/81529/campos_512_v4
+14/81545/campos_512_v4
+14/81580/campos_512_v4
+14/81607/campos_512_v4
+14/81614/campos_512_v4
+14/81617/campos_512_v4
+14/81716/campos_512_v4
+14/81726/campos_512_v4
+14/81737/campos_512_v4
+14/81750/campos_512_v4
+14/81790/campos_512_v4
+14/81835/campos_512_v4
+14/81836/campos_512_v4
+14/81840/campos_512_v4
+14/81866/campos_512_v4
+14/81965/campos_512_v4
+14/82112/campos_512_v4
+14/82122/campos_512_v4
+14/82204/campos_512_v4
+14/82215/campos_512_v4
+14/82231/campos_512_v4
+14/82250/campos_512_v4
+14/82292/campos_512_v4
+14/82356/campos_512_v4
+14/82413/campos_512_v4
+14/82432/campos_512_v4
+14/82520/campos_512_v4
+14/82523/campos_512_v4
+14/82544/campos_512_v4
+14/82563/campos_512_v4
+14/82609/campos_512_v4
+14/82610/campos_512_v4
+14/82639/campos_512_v4
+14/82721/campos_512_v4
+14/82766/campos_512_v4
+14/82791/campos_512_v4
+14/82797/campos_512_v4
+14/82803/campos_512_v4
+14/82834/campos_512_v4
+14/82887/campos_512_v4
+14/82918/campos_512_v4
+14/82920/campos_512_v4
+14/82921/campos_512_v4
+14/82946/campos_512_v4
+14/82949/campos_512_v4
+14/82972/campos_512_v4
+14/82981/campos_512_v4
+14/83023/campos_512_v4
+14/83033/campos_512_v4
+14/83048/campos_512_v4
+14/83091/campos_512_v4
+14/83105/campos_512_v4
+14/83145/campos_512_v4
+14/83146/campos_512_v4
+14/83180/campos_512_v4
+14/83185/campos_512_v4
+14/83195/campos_512_v4
+14/83261/campos_512_v4
+14/83277/campos_512_v4
+14/83296/campos_512_v4
+14/83301/campos_512_v4
+14/83346/campos_512_v4
+14/83368/campos_512_v4
+14/83391/campos_512_v4
+14/83410/campos_512_v4
+14/83422/campos_512_v4
+14/83460/campos_512_v4
+14/83543/campos_512_v4
+14/83547/campos_512_v4
+14/83555/campos_512_v4
+14/83569/campos_512_v4
+14/83571/campos_512_v4
+14/83590/campos_512_v4
+14/83595/campos_512_v4
+14/83613/campos_512_v4
+14/83622/campos_512_v4
+14/83689/campos_512_v4
+14/83744/campos_512_v4
+14/83757/campos_512_v4
+14/83775/campos_512_v4
+14/83822/campos_512_v4
+14/83829/campos_512_v4
+14/83853/campos_512_v4
+14/83979/campos_512_v4
+14/83998/campos_512_v4
+14/84028/campos_512_v4
+14/84048/campos_512_v4
+14/84060/campos_512_v4
+14/84101/campos_512_v4
+14/84123/campos_512_v4
+14/84140/campos_512_v4
+14/84145/campos_512_v4
+14/84178/campos_512_v4
+14/84241/campos_512_v4
+14/84322/campos_512_v4
+14/84336/campos_512_v4
+14/84395/campos_512_v4
+14/84398/campos_512_v4
+14/84418/campos_512_v4
+14/84433/campos_512_v4
+14/84535/campos_512_v4
+14/84574/campos_512_v4
+14/84593/campos_512_v4
+14/84637/campos_512_v4
+14/84646/campos_512_v4
+14/84649/campos_512_v4
+14/84709/campos_512_v4
+14/84743/campos_512_v4
+14/84750/campos_512_v4
+14/84751/campos_512_v4
+14/84785/campos_512_v4
+14/84792/campos_512_v4
+14/84818/campos_512_v4
+14/84879/campos_512_v4
+14/84951/campos_512_v4
+140/710027/campos_512_v4
+140/710045/campos_512_v4
+140/710051/campos_512_v4
+140/710087/campos_512_v4
+140/710116/campos_512_v4
+140/710134/campos_512_v4
+140/710172/campos_512_v4
+140/710196/campos_512_v4
+140/710242/campos_512_v4
+140/710283/campos_512_v4
+140/710330/campos_512_v4
+140/710391/campos_512_v4
+140/710435/campos_512_v4
+140/710440/campos_512_v4
+140/710454/campos_512_v4
+140/710516/campos_512_v4
+140/710575/campos_512_v4
+140/710613/campos_512_v4
+140/710721/campos_512_v4
+140/710753/campos_512_v4
+140/710787/campos_512_v4
+140/710803/campos_512_v4
+140/710804/campos_512_v4
+140/710831/campos_512_v4
+140/710875/campos_512_v4
+140/710910/campos_512_v4
+140/710915/campos_512_v4
+140/710916/campos_512_v4
+140/710925/campos_512_v4
+140/710933/campos_512_v4
+140/710947/campos_512_v4
+140/710985/campos_512_v4
+140/710998/campos_512_v4
+140/711121/campos_512_v4
+140/711148/campos_512_v4
+140/711222/campos_512_v4
+140/711230/campos_512_v4
+140/711264/campos_512_v4
+140/711276/campos_512_v4
+140/711317/campos_512_v4
+140/711334/campos_512_v4
+140/711377/campos_512_v4
+140/711397/campos_512_v4
+140/711399/campos_512_v4
+140/711401/campos_512_v4
+140/711419/campos_512_v4
+140/711444/campos_512_v4
+140/711448/campos_512_v4
+140/711543/campos_512_v4
+140/711638/campos_512_v4
+140/711682/campos_512_v4
+140/711753/campos_512_v4
+140/711849/campos_512_v4
+140/711885/campos_512_v4
+140/711920/campos_512_v4
+140/711926/campos_512_v4
+140/711950/campos_512_v4
+140/712034/campos_512_v4
+140/712099/campos_512_v4
+140/712123/campos_512_v4
+140/712169/campos_512_v4
+140/712179/campos_512_v4
+140/712188/campos_512_v4
+140/712239/campos_512_v4
+140/712240/campos_512_v4
+140/712269/campos_512_v4
+140/712315/campos_512_v4
+140/712326/campos_512_v4
+140/712363/campos_512_v4
+140/712389/campos_512_v4
+140/712459/campos_512_v4
+140/712488/campos_512_v4
+140/712494/campos_512_v4
+140/712513/campos_512_v4
+140/712521/campos_512_v4
+140/712530/campos_512_v4
+140/712544/campos_512_v4
+140/712610/campos_512_v4
+140/712638/campos_512_v4
+140/712657/campos_512_v4
+140/712690/campos_512_v4
+140/712693/campos_512_v4
+140/712722/campos_512_v4
+140/712734/campos_512_v4
+140/712821/campos_512_v4
+140/712882/campos_512_v4
+140/712883/campos_512_v4
+140/712918/campos_512_v4
+140/712925/campos_512_v4
+140/712967/campos_512_v4
+140/713030/campos_512_v4
+140/713088/campos_512_v4
+140/713151/campos_512_v4
+140/713242/campos_512_v4
+140/713243/campos_512_v4
+140/713255/campos_512_v4
+140/713301/campos_512_v4
+140/713305/campos_512_v4
+140/713309/campos_512_v4
+140/713336/campos_512_v4
+140/713338/campos_512_v4
+140/713351/campos_512_v4
+140/713353/campos_512_v4
+140/713393/campos_512_v4
+140/713403/campos_512_v4
+140/713440/campos_512_v4
+140/713470/campos_512_v4
+140/713476/campos_512_v4
+140/713494/campos_512_v4
+140/713496/campos_512_v4
+140/713514/campos_512_v4
+140/713527/campos_512_v4
+140/713551/campos_512_v4
+140/713578/campos_512_v4
+140/713595/campos_512_v4
+140/713616/campos_512_v4
+140/713650/campos_512_v4
+140/713692/campos_512_v4
+140/713724/campos_512_v4
+140/713778/campos_512_v4
+140/713781/campos_512_v4
+140/713783/campos_512_v4
+140/713785/campos_512_v4
+140/713796/campos_512_v4
+140/713814/campos_512_v4
+140/713822/campos_512_v4
+140/713847/campos_512_v4
+140/713852/campos_512_v4
+140/713853/campos_512_v4
+140/713857/campos_512_v4
+140/713944/campos_512_v4
+140/713964/campos_512_v4
+140/714020/campos_512_v4
+140/714025/campos_512_v4
+140/714036/campos_512_v4
+140/714054/campos_512_v4
+140/714080/campos_512_v4
+140/714086/campos_512_v4
+140/714112/campos_512_v4
+140/714120/campos_512_v4
+140/714172/campos_512_v4
+140/714176/campos_512_v4
+140/714184/campos_512_v4
+140/714245/campos_512_v4
+140/714322/campos_512_v4
+140/714366/campos_512_v4
+140/714368/campos_512_v4
+140/714406/campos_512_v4
+140/714407/campos_512_v4
+140/714409/campos_512_v4
+140/714463/campos_512_v4
+140/714467/campos_512_v4
+140/714516/campos_512_v4
+140/714528/campos_512_v4
+140/714570/campos_512_v4
+140/714592/campos_512_v4
+140/714630/campos_512_v4
+140/714654/campos_512_v4
+140/714691/campos_512_v4
+140/714708/campos_512_v4
+140/714714/campos_512_v4
+140/714809/campos_512_v4
+140/714827/campos_512_v4
+140/714857/campos_512_v4
+140/714865/campos_512_v4
+140/714895/campos_512_v4
+140/714963/campos_512_v4
+141/715009/campos_512_v4
+141/715020/campos_512_v4
+141/715031/campos_512_v4
+141/715092/campos_512_v4
+141/715121/campos_512_v4
+141/715189/campos_512_v4
+141/715226/campos_512_v4
+141/715269/campos_512_v4
+141/715292/campos_512_v4
+141/715293/campos_512_v4
+141/715309/campos_512_v4
+141/715375/campos_512_v4
+141/715409/campos_512_v4
+141/715434/campos_512_v4
+141/715481/campos_512_v4
+141/715537/campos_512_v4
+141/715577/campos_512_v4
+141/715583/campos_512_v4
+141/715624/campos_512_v4
+141/715725/campos_512_v4
+141/715729/campos_512_v4
+141/715738/campos_512_v4
+141/715758/campos_512_v4
+141/715822/campos_512_v4
+141/715872/campos_512_v4
+141/715873/campos_512_v4
+141/715889/campos_512_v4
+141/715918/campos_512_v4
+141/715967/campos_512_v4
+141/716023/campos_512_v4
+141/716026/campos_512_v4
+141/716079/campos_512_v4
+141/716133/campos_512_v4
+141/716235/campos_512_v4
+141/716287/campos_512_v4
+141/716288/campos_512_v4
+141/716319/campos_512_v4
+141/716507/campos_512_v4
+141/716527/campos_512_v4
+141/716533/campos_512_v4
+141/716541/campos_512_v4
+141/716583/campos_512_v4
+141/716637/campos_512_v4
+141/716639/campos_512_v4
+141/716675/campos_512_v4
+141/716739/campos_512_v4
+141/716831/campos_512_v4
+141/716966/campos_512_v4
+141/716978/campos_512_v4
+141/717062/campos_512_v4
+141/717069/campos_512_v4
+141/717079/campos_512_v4
+141/717089/campos_512_v4
+141/717129/campos_512_v4
+141/717189/campos_512_v4
+141/717235/campos_512_v4
+141/717263/campos_512_v4
+141/717266/campos_512_v4
+141/717277/campos_512_v4
+141/717331/campos_512_v4
+141/717336/campos_512_v4
+141/717355/campos_512_v4
+141/717397/campos_512_v4
+141/717462/campos_512_v4
+141/717468/campos_512_v4
+141/717473/campos_512_v4
+141/717513/campos_512_v4
+141/717564/campos_512_v4
+141/717594/campos_512_v4
+141/717614/campos_512_v4
+141/717676/campos_512_v4
+141/717717/campos_512_v4
+141/717726/campos_512_v4
+141/717744/campos_512_v4
+141/717752/campos_512_v4
+141/717753/campos_512_v4
+141/717794/campos_512_v4
+141/717835/campos_512_v4
+141/717851/campos_512_v4
+141/717906/campos_512_v4
+141/717951/campos_512_v4
+141/718051/campos_512_v4
+141/718056/campos_512_v4
+141/718182/campos_512_v4
+141/718243/campos_512_v4
+141/718284/campos_512_v4
+141/718298/campos_512_v4
+141/718324/campos_512_v4
+141/718367/campos_512_v4
+141/718408/campos_512_v4
+141/718498/campos_512_v4
+141/718551/campos_512_v4
+141/718566/campos_512_v4
+141/718602/campos_512_v4
+141/718637/campos_512_v4
+141/718655/campos_512_v4
+141/718662/campos_512_v4
+141/718682/campos_512_v4
+141/718720/campos_512_v4
+141/718741/campos_512_v4
+141/718823/campos_512_v4
+141/718829/campos_512_v4
+141/718830/campos_512_v4
+141/718855/campos_512_v4
+141/718881/campos_512_v4
+141/718897/campos_512_v4
+141/718901/campos_512_v4
+141/718949/campos_512_v4
+141/718954/campos_512_v4
+141/718959/campos_512_v4
+141/718962/campos_512_v4
+141/718968/campos_512_v4
+141/718980/campos_512_v4
+141/718990/campos_512_v4
+141/719021/campos_512_v4
+141/719089/campos_512_v4
+141/719113/campos_512_v4
+141/719163/campos_512_v4
+141/719198/campos_512_v4
+141/719204/campos_512_v4
+141/719209/campos_512_v4
+141/719228/campos_512_v4
+141/719298/campos_512_v4
+141/719305/campos_512_v4
+141/719308/campos_512_v4
+141/719311/campos_512_v4
+141/719364/campos_512_v4
+141/719374/campos_512_v4
+141/719421/campos_512_v4
+141/719478/campos_512_v4
+141/719572/campos_512_v4
+141/719581/campos_512_v4
+141/719756/campos_512_v4
+141/719782/campos_512_v4
+141/719854/campos_512_v4
+141/719890/campos_512_v4
+141/719902/campos_512_v4
+141/719904/campos_512_v4
+142/720013/campos_512_v4
+142/720019/campos_512_v4
+142/720038/campos_512_v4
+142/720086/campos_512_v4
+142/720088/campos_512_v4
+142/720098/campos_512_v4
+142/720144/campos_512_v4
+142/720150/campos_512_v4
+142/720191/campos_512_v4
+142/720202/campos_512_v4
+142/720214/campos_512_v4
+142/720218/campos_512_v4
+142/720247/campos_512_v4
+142/720260/campos_512_v4
+142/720364/campos_512_v4
+142/720371/campos_512_v4
+142/720372/campos_512_v4
+142/720379/campos_512_v4
+142/720517/campos_512_v4
+142/720530/campos_512_v4
+142/720556/campos_512_v4
+142/720565/campos_512_v4
+142/720586/campos_512_v4
+142/720595/campos_512_v4
+142/720604/campos_512_v4
+142/720626/campos_512_v4
+142/720658/campos_512_v4
+142/720706/campos_512_v4
+142/720722/campos_512_v4
+142/720756/campos_512_v4
+142/720764/campos_512_v4
+142/720783/campos_512_v4
+142/720837/campos_512_v4
+142/720871/campos_512_v4
+142/720873/campos_512_v4
+142/720882/campos_512_v4
+142/720906/campos_512_v4
+142/720924/campos_512_v4
+142/720970/campos_512_v4
+142/720981/campos_512_v4
+142/721054/campos_512_v4
+142/721101/campos_512_v4
+142/721129/campos_512_v4
+142/721163/campos_512_v4
+142/721193/campos_512_v4
+142/721202/campos_512_v4
+142/721206/campos_512_v4
+142/721226/campos_512_v4
+142/721229/campos_512_v4
+142/721230/campos_512_v4
+142/721241/campos_512_v4
+142/721263/campos_512_v4
+142/721275/campos_512_v4
+142/721303/campos_512_v4
+142/721402/campos_512_v4
+142/721489/campos_512_v4
+142/721491/campos_512_v4
+142/721546/campos_512_v4
+142/721620/campos_512_v4
+142/721704/campos_512_v4
+142/721756/campos_512_v4
+142/721827/campos_512_v4
+142/721837/campos_512_v4
+142/721841/campos_512_v4
+142/721845/campos_512_v4
+142/721848/campos_512_v4
+142/721919/campos_512_v4
+142/721923/campos_512_v4
+142/721928/campos_512_v4
+142/721981/campos_512_v4
+142/722010/campos_512_v4
+142/722023/campos_512_v4
+142/722026/campos_512_v4
+142/722055/campos_512_v4
+142/722110/campos_512_v4
+142/722122/campos_512_v4
+142/722216/campos_512_v4
+142/722232/campos_512_v4
+142/722298/campos_512_v4
+142/722306/campos_512_v4
+142/722307/campos_512_v4
+142/722312/campos_512_v4
+142/722432/campos_512_v4
+142/722448/campos_512_v4
+142/722451/campos_512_v4
+142/722457/campos_512_v4
+142/722458/campos_512_v4
+142/722465/campos_512_v4
+142/722467/campos_512_v4
+142/722513/campos_512_v4
+142/722524/campos_512_v4
+142/722529/campos_512_v4
+142/722574/campos_512_v4
+142/722653/campos_512_v4
+142/722666/campos_512_v4
+142/722681/campos_512_v4
+142/722685/campos_512_v4
+142/722705/campos_512_v4
+142/722731/campos_512_v4
+142/722759/campos_512_v4
+142/722771/campos_512_v4
+142/722790/campos_512_v4
+142/722815/campos_512_v4
+142/722833/campos_512_v4
+142/722934/campos_512_v4
+142/722948/campos_512_v4
+142/722952/campos_512_v4
+142/722982/campos_512_v4
+142/723023/campos_512_v4
+142/723048/campos_512_v4
+142/723049/campos_512_v4
+142/723079/campos_512_v4
+142/723110/campos_512_v4
+142/723150/campos_512_v4
+142/723162/campos_512_v4
+142/723229/campos_512_v4
+142/723263/campos_512_v4
+142/723272/campos_512_v4
+142/723275/campos_512_v4
+142/723302/campos_512_v4
+142/723315/campos_512_v4
+142/723358/campos_512_v4
+142/723369/campos_512_v4
+142/723395/campos_512_v4
+142/723427/campos_512_v4
+142/723483/campos_512_v4
+142/723489/campos_512_v4
+142/723491/campos_512_v4
+142/723514/campos_512_v4
+142/723527/campos_512_v4
+142/723545/campos_512_v4
+142/723549/campos_512_v4
+142/723571/campos_512_v4
+142/723584/campos_512_v4
+142/723604/campos_512_v4
+142/723625/campos_512_v4
+142/723636/campos_512_v4
+142/723668/campos_512_v4
+142/723678/campos_512_v4
+142/723691/campos_512_v4
+142/723701/campos_512_v4
+142/723708/campos_512_v4
+142/723712/campos_512_v4
+142/723763/campos_512_v4
+142/723857/campos_512_v4
+142/723937/campos_512_v4
+142/723945/campos_512_v4
+142/723970/campos_512_v4
+142/723993/campos_512_v4
+142/724007/campos_512_v4
+142/724029/campos_512_v4
+142/724036/campos_512_v4
+142/724171/campos_512_v4
+142/724212/campos_512_v4
+142/724251/campos_512_v4
+142/724293/campos_512_v4
+142/724300/campos_512_v4
+142/724310/campos_512_v4
+142/724367/campos_512_v4
+142/724373/campos_512_v4
+142/724429/campos_512_v4
+142/724526/campos_512_v4
+142/724589/campos_512_v4
+142/724620/campos_512_v4
+142/724644/campos_512_v4
+142/724686/campos_512_v4
+142/724741/campos_512_v4
+142/724743/campos_512_v4
+142/724808/campos_512_v4
+142/724831/campos_512_v4
+142/724931/campos_512_v4
+142/724992/campos_512_v4
+143/725020/campos_512_v4
+143/725047/campos_512_v4
+143/725103/campos_512_v4
+143/725110/campos_512_v4
+143/725115/campos_512_v4
+143/725126/campos_512_v4
+143/725144/campos_512_v4
+143/725251/campos_512_v4
+143/725272/campos_512_v4
+143/725283/campos_512_v4
+143/725368/campos_512_v4
+143/725416/campos_512_v4
+143/725429/campos_512_v4
+143/725430/campos_512_v4
+143/725479/campos_512_v4
+143/725492/campos_512_v4
+143/725509/campos_512_v4
+143/725552/campos_512_v4
+143/725600/campos_512_v4
+143/725664/campos_512_v4
+143/725683/campos_512_v4
+143/725712/campos_512_v4
+143/725735/campos_512_v4
+143/725794/campos_512_v4
+143/725813/campos_512_v4
+143/725836/campos_512_v4
+143/725855/campos_512_v4
+143/725876/campos_512_v4
+143/725913/campos_512_v4
+143/725922/campos_512_v4
+143/725925/campos_512_v4
+143/725949/campos_512_v4
+143/725959/campos_512_v4
+143/725961/campos_512_v4
+143/725997/campos_512_v4
+143/725999/campos_512_v4
+143/726048/campos_512_v4
+143/726078/campos_512_v4
+143/726088/campos_512_v4
+143/726091/campos_512_v4
+143/726103/campos_512_v4
+143/726143/campos_512_v4
+143/726236/campos_512_v4
+143/726266/campos_512_v4
+143/726307/campos_512_v4
+143/726309/campos_512_v4
+143/726356/campos_512_v4
+143/726361/campos_512_v4
+143/726395/campos_512_v4
+143/726455/campos_512_v4
+143/726502/campos_512_v4
+143/726543/campos_512_v4
+143/726575/campos_512_v4
+143/726603/campos_512_v4
+143/726606/campos_512_v4
+143/726624/campos_512_v4
+143/726657/campos_512_v4
+143/726686/campos_512_v4
+143/726696/campos_512_v4
+143/726709/campos_512_v4
+143/726712/campos_512_v4
+143/726743/campos_512_v4
+143/726763/campos_512_v4
+143/726784/campos_512_v4
+143/726789/campos_512_v4
+143/726812/campos_512_v4
+143/726848/campos_512_v4
+143/726869/campos_512_v4
+143/726877/campos_512_v4
+143/726891/campos_512_v4
+143/726893/campos_512_v4
+143/726910/campos_512_v4
+143/726941/campos_512_v4
+143/727039/campos_512_v4
+143/727053/campos_512_v4
+143/727100/campos_512_v4
+143/727139/campos_512_v4
+143/727169/campos_512_v4
+143/727200/campos_512_v4
+143/727211/campos_512_v4
+143/727213/campos_512_v4
+143/727221/campos_512_v4
+143/727252/campos_512_v4
+143/727254/campos_512_v4
+143/727258/campos_512_v4
+143/727275/campos_512_v4
+143/727334/campos_512_v4
+143/727342/campos_512_v4
+143/727356/campos_512_v4
+143/727414/campos_512_v4
+143/727441/campos_512_v4
+143/727450/campos_512_v4
+143/727465/campos_512_v4
+143/727494/campos_512_v4
+143/727498/campos_512_v4
+143/727505/campos_512_v4
+143/727546/campos_512_v4
+143/727557/campos_512_v4
+143/727558/campos_512_v4
+143/727682/campos_512_v4
+143/727689/campos_512_v4
+143/727696/campos_512_v4
+143/727748/campos_512_v4
+143/727819/campos_512_v4
+143/727858/campos_512_v4
+143/727886/campos_512_v4
+143/727937/campos_512_v4
+143/727950/campos_512_v4
+143/727966/campos_512_v4
+143/727995/campos_512_v4
+143/727997/campos_512_v4
+143/728043/campos_512_v4
+143/728065/campos_512_v4
+143/728128/campos_512_v4
+143/728136/campos_512_v4
+143/728149/campos_512_v4
+143/728150/campos_512_v4
+143/728191/campos_512_v4
+143/728229/campos_512_v4
+143/728233/campos_512_v4
+143/728244/campos_512_v4
+143/728246/campos_512_v4
+143/728251/campos_512_v4
+143/728261/campos_512_v4
+143/728268/campos_512_v4
+143/728311/campos_512_v4
+143/728317/campos_512_v4
+143/728318/campos_512_v4
+143/728347/campos_512_v4
+143/728395/campos_512_v4
+143/728399/campos_512_v4
+143/728415/campos_512_v4
+143/728434/campos_512_v4
+143/728450/campos_512_v4
+143/728465/campos_512_v4
+143/728506/campos_512_v4
+143/728519/campos_512_v4
+143/728571/campos_512_v4
+143/728581/campos_512_v4
+143/728606/campos_512_v4
+143/728647/campos_512_v4
+143/728706/campos_512_v4
+143/728716/campos_512_v4
+143/728743/campos_512_v4
+143/728780/campos_512_v4
+143/728783/campos_512_v4
+143/728804/campos_512_v4
+143/728810/campos_512_v4
+143/728813/campos_512_v4
+143/728828/campos_512_v4
+143/728862/campos_512_v4
+143/728915/campos_512_v4
+143/728917/campos_512_v4
+143/728961/campos_512_v4
+143/728972/campos_512_v4
+143/729024/campos_512_v4
+143/729051/campos_512_v4
+143/729093/campos_512_v4
+143/729113/campos_512_v4
+143/729131/campos_512_v4
+143/729133/campos_512_v4
+143/729145/campos_512_v4
+143/729169/campos_512_v4
+143/729172/campos_512_v4
+143/729217/campos_512_v4
+143/729225/campos_512_v4
+143/729229/campos_512_v4
+143/729237/campos_512_v4
+143/729276/campos_512_v4
+143/729284/campos_512_v4
+143/729295/campos_512_v4
+143/729297/campos_512_v4
+143/729371/campos_512_v4
+143/729423/campos_512_v4
+143/729451/campos_512_v4
+143/729547/campos_512_v4
+143/729559/campos_512_v4
+143/729651/campos_512_v4
+143/729680/campos_512_v4
+143/729684/campos_512_v4
+143/729730/campos_512_v4
+143/729739/campos_512_v4
+143/729742/campos_512_v4
+143/729760/campos_512_v4
+143/729766/campos_512_v4
+143/729774/campos_512_v4
+143/729794/campos_512_v4
+143/729797/campos_512_v4
+143/729800/campos_512_v4
+143/729808/campos_512_v4
+143/729813/campos_512_v4
+143/729907/campos_512_v4
+143/729927/campos_512_v4
+143/729996/campos_512_v4
+144/730068/campos_512_v4
+144/730089/campos_512_v4
+144/730114/campos_512_v4
+144/730228/campos_512_v4
+144/730283/campos_512_v4
+144/730367/campos_512_v4
+144/730371/campos_512_v4
+144/730397/campos_512_v4
+144/730425/campos_512_v4
+144/730495/campos_512_v4
+144/730505/campos_512_v4
+144/730673/campos_512_v4
+144/730681/campos_512_v4
+144/730746/campos_512_v4
+144/730751/campos_512_v4
+144/730752/campos_512_v4
+144/730834/campos_512_v4
+144/730839/campos_512_v4
+144/730895/campos_512_v4
+144/730899/campos_512_v4
+144/730912/campos_512_v4
+144/730980/campos_512_v4
+144/730981/campos_512_v4
+144/730986/campos_512_v4
+144/730987/campos_512_v4
+144/731045/campos_512_v4
+144/731153/campos_512_v4
+144/731170/campos_512_v4
+144/731223/campos_512_v4
+144/731260/campos_512_v4
+144/731289/campos_512_v4
+144/731318/campos_512_v4
+144/731436/campos_512_v4
+144/731457/campos_512_v4
+144/731517/campos_512_v4
+144/731536/campos_512_v4
+144/731578/campos_512_v4
+144/731625/campos_512_v4
+144/731680/campos_512_v4
+144/731758/campos_512_v4
+144/731787/campos_512_v4
+144/731790/campos_512_v4
+144/731798/campos_512_v4
+144/731856/campos_512_v4
+144/731875/campos_512_v4
+144/731886/campos_512_v4
+144/731900/campos_512_v4
+144/731919/campos_512_v4
+144/731933/campos_512_v4
+144/731955/campos_512_v4
+144/731973/campos_512_v4
+144/732007/campos_512_v4
+144/732073/campos_512_v4
+144/732084/campos_512_v4
+144/732146/campos_512_v4
+144/732204/campos_512_v4
+144/732229/campos_512_v4
+144/732233/campos_512_v4
+144/732252/campos_512_v4
+144/732275/campos_512_v4
+144/732330/campos_512_v4
+144/732353/campos_512_v4
+144/732366/campos_512_v4
+144/732465/campos_512_v4
+144/732538/campos_512_v4
+144/732628/campos_512_v4
+144/732642/campos_512_v4
+144/732663/campos_512_v4
+144/732687/campos_512_v4
+144/732689/campos_512_v4
+144/732710/campos_512_v4
+144/732724/campos_512_v4
+144/732742/campos_512_v4
+144/732794/campos_512_v4
+144/732861/campos_512_v4
+144/732907/campos_512_v4
+144/732913/campos_512_v4
+144/732934/campos_512_v4
+144/732960/campos_512_v4
+144/732966/campos_512_v4
+144/733046/campos_512_v4
+144/733047/campos_512_v4
+144/733048/campos_512_v4
+144/733105/campos_512_v4
+144/733200/campos_512_v4
+144/733218/campos_512_v4
+144/733357/campos_512_v4
+144/733382/campos_512_v4
+144/733398/campos_512_v4
+144/733442/campos_512_v4
+144/733462/campos_512_v4
+144/733472/campos_512_v4
+144/733485/campos_512_v4
+144/733552/campos_512_v4
+144/733554/campos_512_v4
+144/733564/campos_512_v4
+144/733574/campos_512_v4
+144/733664/campos_512_v4
+144/733665/campos_512_v4
+144/733690/campos_512_v4
+144/733787/campos_512_v4
+144/733807/campos_512_v4
+144/733824/campos_512_v4
+144/733826/campos_512_v4
+144/733840/campos_512_v4
+144/733859/campos_512_v4
+144/733868/campos_512_v4
+144/733879/campos_512_v4
+144/733892/campos_512_v4
+144/733923/campos_512_v4
+144/733966/campos_512_v4
+144/733993/campos_512_v4
+144/734003/campos_512_v4
+144/734005/campos_512_v4
+144/734090/campos_512_v4
+144/734173/campos_512_v4
+144/734190/campos_512_v4
+144/734219/campos_512_v4
+144/734245/campos_512_v4
+144/734281/campos_512_v4
+144/734416/campos_512_v4
+144/734420/campos_512_v4
+144/734461/campos_512_v4
+144/734496/campos_512_v4
+144/734593/campos_512_v4
+144/734636/campos_512_v4
+144/734638/campos_512_v4
+144/734644/campos_512_v4
+144/734649/campos_512_v4
+144/734667/campos_512_v4
+144/734687/campos_512_v4
+144/734729/campos_512_v4
+144/734742/campos_512_v4
+144/734771/campos_512_v4
+144/734775/campos_512_v4
+144/734786/campos_512_v4
+144/734796/campos_512_v4
+144/734858/campos_512_v4
+144/734892/campos_512_v4
+144/734899/campos_512_v4
+144/734930/campos_512_v4
+145/735042/campos_512_v4
+145/735073/campos_512_v4
+145/735080/campos_512_v4
+145/735117/campos_512_v4
+145/735139/campos_512_v4
+145/735182/campos_512_v4
+145/735255/campos_512_v4
+145/735267/campos_512_v4
+145/735272/campos_512_v4
+145/735289/campos_512_v4
+145/735315/campos_512_v4
+145/735352/campos_512_v4
+145/735382/campos_512_v4
+145/735402/campos_512_v4
+145/735461/campos_512_v4
+145/735485/campos_512_v4
+145/735496/campos_512_v4
+145/735507/campos_512_v4
+145/735514/campos_512_v4
+145/735520/campos_512_v4
+145/735527/campos_512_v4
+145/735539/campos_512_v4
+145/735562/campos_512_v4
+145/735602/campos_512_v4
+145/735638/campos_512_v4
+145/735698/campos_512_v4
+145/735725/campos_512_v4
+145/735761/campos_512_v4
+145/735902/campos_512_v4
+145/735905/campos_512_v4
+145/735937/campos_512_v4
+145/735976/campos_512_v4
+145/736032/campos_512_v4
+145/736057/campos_512_v4
+145/736110/campos_512_v4
+145/736112/campos_512_v4
+145/736174/campos_512_v4
+145/736195/campos_512_v4
+145/736212/campos_512_v4
+145/736324/campos_512_v4
+145/736377/campos_512_v4
+145/736378/campos_512_v4
+145/736405/campos_512_v4
+145/736406/campos_512_v4
+145/736421/campos_512_v4
+145/736449/campos_512_v4
+145/736459/campos_512_v4
+145/736479/campos_512_v4
+145/736531/campos_512_v4
+145/736670/campos_512_v4
+145/736743/campos_512_v4
+145/736765/campos_512_v4
+145/736773/campos_512_v4
+145/736829/campos_512_v4
+145/736837/campos_512_v4
+145/736857/campos_512_v4
+145/736864/campos_512_v4
+145/736894/campos_512_v4
+145/736902/campos_512_v4
+145/736904/campos_512_v4
+145/737156/campos_512_v4
+145/737189/campos_512_v4
+145/737202/campos_512_v4
+145/737263/campos_512_v4
+145/737275/campos_512_v4
+145/737319/campos_512_v4
+145/737368/campos_512_v4
+145/737374/campos_512_v4
+145/737393/campos_512_v4
+145/737394/campos_512_v4
+145/737409/campos_512_v4
+145/737416/campos_512_v4
+145/737445/campos_512_v4
+145/737457/campos_512_v4
+145/737465/campos_512_v4
+145/737506/campos_512_v4
+145/737517/campos_512_v4
+145/737568/campos_512_v4
+145/737577/campos_512_v4
+145/737600/campos_512_v4
+145/737602/campos_512_v4
+145/737671/campos_512_v4
+145/737693/campos_512_v4
+145/737706/campos_512_v4
+145/737707/campos_512_v4
+145/737749/campos_512_v4
+145/737787/campos_512_v4
+145/737809/campos_512_v4
+145/737811/campos_512_v4
+145/737829/campos_512_v4
+145/737845/campos_512_v4
+145/737888/campos_512_v4
+145/737914/campos_512_v4
+145/737923/campos_512_v4
+145/737994/campos_512_v4
+145/738061/campos_512_v4
+145/738113/campos_512_v4
+145/738152/campos_512_v4
+145/738225/campos_512_v4
+145/738228/campos_512_v4
+145/738250/campos_512_v4
+145/738288/campos_512_v4
+145/738318/campos_512_v4
+145/738325/campos_512_v4
+145/738329/campos_512_v4
+145/738357/campos_512_v4
+145/738366/campos_512_v4
+145/738394/campos_512_v4
+145/738432/campos_512_v4
+145/738457/campos_512_v4
+145/738469/campos_512_v4
+145/738480/campos_512_v4
+145/738542/campos_512_v4
+145/738551/campos_512_v4
+145/738565/campos_512_v4
+145/738566/campos_512_v4
+145/738824/campos_512_v4
+145/738880/campos_512_v4
+145/738888/campos_512_v4
+145/738900/campos_512_v4
+145/738910/campos_512_v4
+145/738959/campos_512_v4
+145/738961/campos_512_v4
+145/738979/campos_512_v4
+145/738997/campos_512_v4
+145/739058/campos_512_v4
+145/739089/campos_512_v4
+145/739112/campos_512_v4
+145/739151/campos_512_v4
+145/739192/campos_512_v4
+145/739234/campos_512_v4
+145/739278/campos_512_v4
+145/739294/campos_512_v4
+145/739298/campos_512_v4
+145/739351/campos_512_v4
+145/739366/campos_512_v4
+145/739408/campos_512_v4
+145/739444/campos_512_v4
+145/739479/campos_512_v4
+145/739511/campos_512_v4
+145/739533/campos_512_v4
+145/739555/campos_512_v4
+145/739572/campos_512_v4
+145/739601/campos_512_v4
+145/739611/campos_512_v4
+145/739629/campos_512_v4
+145/739637/campos_512_v4
+145/739683/campos_512_v4
+145/739715/campos_512_v4
+145/739729/campos_512_v4
+145/739739/campos_512_v4
+145/739755/campos_512_v4
+145/739768/campos_512_v4
+145/739769/campos_512_v4
+145/739770/campos_512_v4
+145/739783/campos_512_v4
+145/739795/campos_512_v4
+145/739802/campos_512_v4
+145/739828/campos_512_v4
+145/739842/campos_512_v4
+145/739864/campos_512_v4
+145/739913/campos_512_v4
+145/739960/campos_512_v4
+145/739993/campos_512_v4
+146/740004/campos_512_v4
+146/740010/campos_512_v4
+146/740035/campos_512_v4
+146/740050/campos_512_v4
+146/740077/campos_512_v4
+146/740079/campos_512_v4
+146/740124/campos_512_v4
+146/740152/campos_512_v4
+146/740160/campos_512_v4
+146/740199/campos_512_v4
+146/740202/campos_512_v4
+146/740207/campos_512_v4
+146/740239/campos_512_v4
+146/740249/campos_512_v4
+146/740309/campos_512_v4
+146/740334/campos_512_v4
+146/740370/campos_512_v4
+146/740384/campos_512_v4
+146/740409/campos_512_v4
+146/740411/campos_512_v4
+146/740442/campos_512_v4
+146/740484/campos_512_v4
+146/740489/campos_512_v4
+146/740513/campos_512_v4
+146/740545/campos_512_v4
+146/740573/campos_512_v4
+146/740608/campos_512_v4
+146/740642/campos_512_v4
+146/740648/campos_512_v4
+146/740688/campos_512_v4
+146/740708/campos_512_v4
+146/740751/campos_512_v4
+146/740770/campos_512_v4
+146/740773/campos_512_v4
+146/740834/campos_512_v4
+146/740874/campos_512_v4
+146/740875/campos_512_v4
+146/740942/campos_512_v4
+146/741011/campos_512_v4
+146/741036/campos_512_v4
+146/741047/campos_512_v4
+146/741052/campos_512_v4
+146/741097/campos_512_v4
+146/741140/campos_512_v4
+146/741184/campos_512_v4
+146/741202/campos_512_v4
+146/741244/campos_512_v4
+146/741261/campos_512_v4
+146/741306/campos_512_v4
+146/741315/campos_512_v4
+146/741316/campos_512_v4
+146/741363/campos_512_v4
+146/741386/campos_512_v4
+146/741514/campos_512_v4
+146/741519/campos_512_v4
+146/741553/campos_512_v4
+146/741612/campos_512_v4
+146/741648/campos_512_v4
+146/741708/campos_512_v4
+146/741755/campos_512_v4
+146/741801/campos_512_v4
+146/741821/campos_512_v4
+146/741874/campos_512_v4
+146/741881/campos_512_v4
+146/741883/campos_512_v4
+146/741901/campos_512_v4
+146/741904/campos_512_v4
+146/741978/campos_512_v4
+146/742063/campos_512_v4
+146/742125/campos_512_v4
+146/742134/campos_512_v4
+146/742166/campos_512_v4
+146/742180/campos_512_v4
+146/742212/campos_512_v4
+146/742219/campos_512_v4
+146/742241/campos_512_v4
+146/742260/campos_512_v4
+146/742286/campos_512_v4
+146/742295/campos_512_v4
+146/742324/campos_512_v4
+146/742345/campos_512_v4
+146/742389/campos_512_v4
+146/742439/campos_512_v4
+146/742452/campos_512_v4
+146/742490/campos_512_v4
+146/742517/campos_512_v4
+146/742597/campos_512_v4
+146/742604/campos_512_v4
+146/742659/campos_512_v4
+146/742743/campos_512_v4
+146/742792/campos_512_v4
+146/742814/campos_512_v4
+146/742830/campos_512_v4
+146/742863/campos_512_v4
+146/742868/campos_512_v4
+146/742901/campos_512_v4
+146/742959/campos_512_v4
+146/743035/campos_512_v4
+146/743036/campos_512_v4
+146/743096/campos_512_v4
+146/743127/campos_512_v4
+146/743135/campos_512_v4
+146/743196/campos_512_v4
+146/743239/campos_512_v4
+146/743274/campos_512_v4
+146/743281/campos_512_v4
+146/743288/campos_512_v4
+146/743308/campos_512_v4
+146/743318/campos_512_v4
+146/743323/campos_512_v4
+146/743370/campos_512_v4
+146/743386/campos_512_v4
+146/743428/campos_512_v4
+146/743444/campos_512_v4
+146/743454/campos_512_v4
+146/743465/campos_512_v4
+146/743474/campos_512_v4
+146/743487/campos_512_v4
+146/743493/campos_512_v4
+146/743508/campos_512_v4
+146/743593/campos_512_v4
+146/743763/campos_512_v4
+146/743866/campos_512_v4
+146/743875/campos_512_v4
+146/743918/campos_512_v4
+146/743950/campos_512_v4
+146/743953/campos_512_v4
+146/743982/campos_512_v4
+146/743991/campos_512_v4
+146/744019/campos_512_v4
+146/744063/campos_512_v4
+146/744084/campos_512_v4
+146/744108/campos_512_v4
+146/744126/campos_512_v4
+146/744135/campos_512_v4
+146/744191/campos_512_v4
+146/744318/campos_512_v4
+146/744324/campos_512_v4
+146/744362/campos_512_v4
+146/744391/campos_512_v4
+146/744402/campos_512_v4
+146/744508/campos_512_v4
+146/744509/campos_512_v4
+146/744567/campos_512_v4
+146/744574/campos_512_v4
+146/744644/campos_512_v4
+146/744657/campos_512_v4
+146/744692/campos_512_v4
+146/744710/campos_512_v4
+146/744712/campos_512_v4
+146/744786/campos_512_v4
+146/744801/campos_512_v4
+146/744826/campos_512_v4
+146/744832/campos_512_v4
+146/744858/campos_512_v4
+146/744909/campos_512_v4
+146/744919/campos_512_v4
+146/744940/campos_512_v4
+146/744967/campos_512_v4
+146/744981/campos_512_v4
+146/744986/campos_512_v4
+147/745008/campos_512_v4
+147/745015/campos_512_v4
+147/745041/campos_512_v4
+147/745042/campos_512_v4
+147/745045/campos_512_v4
+147/745050/campos_512_v4
+147/745071/campos_512_v4
+147/745078/campos_512_v4
+147/745126/campos_512_v4
+147/745154/campos_512_v4
+147/745191/campos_512_v4
+147/745202/campos_512_v4
+147/745218/campos_512_v4
+147/745238/campos_512_v4
+147/745265/campos_512_v4
+147/745305/campos_512_v4
+147/745357/campos_512_v4
+147/745371/campos_512_v4
+147/745398/campos_512_v4
+147/745407/campos_512_v4
+147/745442/campos_512_v4
+147/745458/campos_512_v4
+147/745495/campos_512_v4
+147/745521/campos_512_v4
+147/745528/campos_512_v4
+147/745556/campos_512_v4
+147/745594/campos_512_v4
+147/745596/campos_512_v4
+147/745628/campos_512_v4
+147/745650/campos_512_v4
+147/745662/campos_512_v4
+147/745687/campos_512_v4
+147/745715/campos_512_v4
+147/745727/campos_512_v4
+147/745770/campos_512_v4
+147/745811/campos_512_v4
+147/745899/campos_512_v4
+147/746010/campos_512_v4
+147/746034/campos_512_v4
+147/746047/campos_512_v4
+147/746071/campos_512_v4
+147/746137/campos_512_v4
+147/746148/campos_512_v4
+147/746167/campos_512_v4
+147/746192/campos_512_v4
+147/746298/campos_512_v4
+147/746405/campos_512_v4
+147/746418/campos_512_v4
+147/746530/campos_512_v4
+147/746577/campos_512_v4
+147/746585/campos_512_v4
+147/746596/campos_512_v4
+147/746627/campos_512_v4
+147/746629/campos_512_v4
+147/746699/campos_512_v4
+147/746733/campos_512_v4
+147/746764/campos_512_v4
+147/746779/campos_512_v4
+147/746824/campos_512_v4
+147/746843/campos_512_v4
+147/746863/campos_512_v4
+147/746865/campos_512_v4
+147/746905/campos_512_v4
+147/746917/campos_512_v4
+147/746939/campos_512_v4
+147/746947/campos_512_v4
+147/746995/campos_512_v4
+147/747224/campos_512_v4
+147/747243/campos_512_v4
+147/747257/campos_512_v4
+147/747268/campos_512_v4
+147/747346/campos_512_v4
+147/747379/campos_512_v4
+147/747399/campos_512_v4
+147/747417/campos_512_v4
+147/747437/campos_512_v4
+147/747469/campos_512_v4
+147/747492/campos_512_v4
+147/747497/campos_512_v4
+147/747536/campos_512_v4
+147/747539/campos_512_v4
+147/747554/campos_512_v4
+147/747669/campos_512_v4
+147/747671/campos_512_v4
+147/747682/campos_512_v4
+147/747789/campos_512_v4
+147/747809/campos_512_v4
+147/747880/campos_512_v4
+147/747906/campos_512_v4
+147/747908/campos_512_v4
+147/747940/campos_512_v4
+147/747959/campos_512_v4
+147/747978/campos_512_v4
+147/748009/campos_512_v4
+147/748013/campos_512_v4
+147/748123/campos_512_v4
+147/748145/campos_512_v4
+147/748162/campos_512_v4
+147/748222/campos_512_v4
+147/748324/campos_512_v4
+147/748353/campos_512_v4
+147/748476/campos_512_v4
+147/748489/campos_512_v4
+147/748496/campos_512_v4
+147/748546/campos_512_v4
+147/748575/campos_512_v4
+147/748589/campos_512_v4
+147/748649/campos_512_v4
+147/748703/campos_512_v4
+147/748728/campos_512_v4
+147/748729/campos_512_v4
+147/748782/campos_512_v4
+147/748836/campos_512_v4
+147/748895/campos_512_v4
+147/748916/campos_512_v4
+147/748964/campos_512_v4
+147/748980/campos_512_v4
+147/749091/campos_512_v4
+147/749217/campos_512_v4
+147/749221/campos_512_v4
+147/749266/campos_512_v4
+147/749284/campos_512_v4
+147/749287/campos_512_v4
+147/749288/campos_512_v4
+147/749299/campos_512_v4
+147/749352/campos_512_v4
+147/749383/campos_512_v4
+147/749406/campos_512_v4
+147/749468/campos_512_v4
+147/749493/campos_512_v4
+147/749538/campos_512_v4
+147/749566/campos_512_v4
+147/749581/campos_512_v4
+147/749605/campos_512_v4
+147/749636/campos_512_v4
+147/749652/campos_512_v4
+147/749657/campos_512_v4
+147/749702/campos_512_v4
+147/749744/campos_512_v4
+147/749823/campos_512_v4
+147/749896/campos_512_v4
+147/749912/campos_512_v4
+148/750050/campos_512_v4
+148/750059/campos_512_v4
+148/750060/campos_512_v4
+148/750069/campos_512_v4
+148/750130/campos_512_v4
+148/750135/campos_512_v4
+148/750149/campos_512_v4
+148/750202/campos_512_v4
+148/750272/campos_512_v4
+148/750290/campos_512_v4
+148/750304/campos_512_v4
+148/750345/campos_512_v4
+148/750346/campos_512_v4
+148/750372/campos_512_v4
+148/750425/campos_512_v4
+148/750514/campos_512_v4
+148/750547/campos_512_v4
+148/750559/campos_512_v4
+148/750563/campos_512_v4
+148/750572/campos_512_v4
+148/750608/campos_512_v4
+148/750615/campos_512_v4
+148/750685/campos_512_v4
+148/750720/campos_512_v4
+148/750730/campos_512_v4
+148/750763/campos_512_v4
+148/750785/campos_512_v4
+148/750819/campos_512_v4
+148/750831/campos_512_v4
+148/750856/campos_512_v4
+148/750880/campos_512_v4
+148/750881/campos_512_v4
+148/750901/campos_512_v4
+148/750918/campos_512_v4
+148/750938/campos_512_v4
+148/750944/campos_512_v4
+148/751053/campos_512_v4
+148/751061/campos_512_v4
+148/751175/campos_512_v4
+148/751276/campos_512_v4
+148/751283/campos_512_v4
+148/751292/campos_512_v4
+148/751300/campos_512_v4
+148/751315/campos_512_v4
+148/751326/campos_512_v4
+148/751339/campos_512_v4
+148/751345/campos_512_v4
+148/751395/campos_512_v4
+148/751410/campos_512_v4
+148/751414/campos_512_v4
+148/751421/campos_512_v4
+148/751453/campos_512_v4
+148/751579/campos_512_v4
+148/751591/campos_512_v4
+148/751630/campos_512_v4
+148/751653/campos_512_v4
+148/751690/campos_512_v4
+148/751697/campos_512_v4
+148/751767/campos_512_v4
+148/751852/campos_512_v4
+148/751862/campos_512_v4
+148/751945/campos_512_v4
+148/751954/campos_512_v4
+148/752036/campos_512_v4
+148/752076/campos_512_v4
+148/752115/campos_512_v4
+148/752125/campos_512_v4
+148/752176/campos_512_v4
+148/752184/campos_512_v4
+148/752202/campos_512_v4
+148/752265/campos_512_v4
+148/752316/campos_512_v4
+148/752339/campos_512_v4
+148/752353/campos_512_v4
+148/752386/campos_512_v4
+148/752387/campos_512_v4
+148/752406/campos_512_v4
+148/752433/campos_512_v4
+148/752441/campos_512_v4
+148/752464/campos_512_v4
+148/752514/campos_512_v4
+148/752559/campos_512_v4
+148/752639/campos_512_v4
+148/752675/campos_512_v4
+148/752698/campos_512_v4
+148/752699/campos_512_v4
+148/752719/campos_512_v4
+148/752729/campos_512_v4
+148/752763/campos_512_v4
+148/752804/campos_512_v4
+148/752824/campos_512_v4
+148/752868/campos_512_v4
+148/752879/campos_512_v4
+148/752905/campos_512_v4
+148/752980/campos_512_v4
+148/753019/campos_512_v4
+148/753033/campos_512_v4
+148/753106/campos_512_v4
+148/753125/campos_512_v4
+148/753127/campos_512_v4
+148/753161/campos_512_v4
+148/753177/campos_512_v4
+148/753214/campos_512_v4
+148/753225/campos_512_v4
+148/753262/campos_512_v4
+148/753283/campos_512_v4
+148/753343/campos_512_v4
+148/753373/campos_512_v4
+148/753377/campos_512_v4
+148/753434/campos_512_v4
+148/753443/campos_512_v4
+148/753450/campos_512_v4
+148/753456/campos_512_v4
+148/753462/campos_512_v4
+148/753476/campos_512_v4
+148/753553/campos_512_v4
+148/753556/campos_512_v4
+148/753581/campos_512_v4
+148/753606/campos_512_v4
+148/753617/campos_512_v4
+148/753625/campos_512_v4
+148/753659/campos_512_v4
+148/753673/campos_512_v4
+148/753681/campos_512_v4
+148/753684/campos_512_v4
+148/753689/campos_512_v4
+148/753732/campos_512_v4
+148/753786/campos_512_v4
+148/753870/campos_512_v4
+148/753878/campos_512_v4
+148/753962/campos_512_v4
+148/753965/campos_512_v4
+148/754029/campos_512_v4
+148/754129/campos_512_v4
+148/754132/campos_512_v4
+148/754157/campos_512_v4
+148/754220/campos_512_v4
+148/754239/campos_512_v4
+148/754269/campos_512_v4
+148/754309/campos_512_v4
+148/754338/campos_512_v4
+148/754388/campos_512_v4
+148/754457/campos_512_v4
+148/754483/campos_512_v4
+148/754487/campos_512_v4
+148/754505/campos_512_v4
+148/754515/campos_512_v4
+148/754542/campos_512_v4
+148/754550/campos_512_v4
+148/754551/campos_512_v4
+148/754569/campos_512_v4
+148/754572/campos_512_v4
+148/754605/campos_512_v4
+148/754611/campos_512_v4
+148/754619/campos_512_v4
+148/754642/campos_512_v4
+148/754643/campos_512_v4
+148/754652/campos_512_v4
+148/754666/campos_512_v4
+148/754784/campos_512_v4
+148/754802/campos_512_v4
+148/754808/campos_512_v4
+148/754811/campos_512_v4
+148/754856/campos_512_v4
+148/754864/campos_512_v4
+148/754871/campos_512_v4
+148/754922/campos_512_v4
+148/754986/campos_512_v4
+149/755009/campos_512_v4
+149/755024/campos_512_v4
+149/755046/campos_512_v4
+149/755057/campos_512_v4
+149/755062/campos_512_v4
+149/755076/campos_512_v4
+149/755085/campos_512_v4
+149/755169/campos_512_v4
+149/755225/campos_512_v4
+149/755240/campos_512_v4
+149/755299/campos_512_v4
+149/755328/campos_512_v4
+149/755330/campos_512_v4
+149/755336/campos_512_v4
+149/755382/campos_512_v4
+149/755409/campos_512_v4
+149/755437/campos_512_v4
+149/755485/campos_512_v4
+149/755517/campos_512_v4
+149/755518/campos_512_v4
+149/755530/campos_512_v4
+149/755541/campos_512_v4
+149/755561/campos_512_v4
+149/755745/campos_512_v4
+149/755768/campos_512_v4
+149/755792/campos_512_v4
+149/755807/campos_512_v4
+149/755840/campos_512_v4
+149/755856/campos_512_v4
+149/755911/campos_512_v4
+149/755972/campos_512_v4
+149/755997/campos_512_v4
+149/756015/campos_512_v4
+149/756053/campos_512_v4
+149/756114/campos_512_v4
+149/756139/campos_512_v4
+149/756178/campos_512_v4
+149/756251/campos_512_v4
+149/756299/campos_512_v4
+149/756307/campos_512_v4
+149/756334/campos_512_v4
+149/756357/campos_512_v4
+149/756367/campos_512_v4
+149/756368/campos_512_v4
+149/756377/campos_512_v4
+149/756381/campos_512_v4
+149/756395/campos_512_v4
+149/756453/campos_512_v4
+149/756457/campos_512_v4
+149/756460/campos_512_v4
+149/756486/campos_512_v4
+149/756496/campos_512_v4
+149/756507/campos_512_v4
+149/756524/campos_512_v4
+149/756528/campos_512_v4
+149/756541/campos_512_v4
+149/756550/campos_512_v4
+149/756596/campos_512_v4
+149/756636/campos_512_v4
+149/756659/campos_512_v4
+149/756673/campos_512_v4
+149/756701/campos_512_v4
+149/756721/campos_512_v4
+149/756736/campos_512_v4
+149/756740/campos_512_v4
+149/756748/campos_512_v4
+149/756778/campos_512_v4
+149/756813/campos_512_v4
+149/756831/campos_512_v4
+149/756834/campos_512_v4
+149/756857/campos_512_v4
+149/756868/campos_512_v4
+149/756871/campos_512_v4
+149/756878/campos_512_v4
+149/756880/campos_512_v4
+149/756888/campos_512_v4
+149/756929/campos_512_v4
+149/756945/campos_512_v4
+149/756959/campos_512_v4
+149/757114/campos_512_v4
+149/757135/campos_512_v4
+149/757146/campos_512_v4
+149/757157/campos_512_v4
+149/757180/campos_512_v4
+149/757232/campos_512_v4
+149/757276/campos_512_v4
+149/757329/campos_512_v4
+149/757355/campos_512_v4
+149/757374/campos_512_v4
+149/757398/campos_512_v4
+149/757420/campos_512_v4
+149/757425/campos_512_v4
+149/757474/campos_512_v4
+149/757501/campos_512_v4
+149/757502/campos_512_v4
+149/757522/campos_512_v4
+149/757531/campos_512_v4
+149/757544/campos_512_v4
+149/757547/campos_512_v4
+149/757549/campos_512_v4
+149/757550/campos_512_v4
+149/757608/campos_512_v4
+149/757609/campos_512_v4
+149/757705/campos_512_v4
+149/757785/campos_512_v4
+149/757886/campos_512_v4
+149/757911/campos_512_v4
+149/757913/campos_512_v4
+149/757963/campos_512_v4
+149/757964/campos_512_v4
+149/757992/campos_512_v4
+149/758030/campos_512_v4
+149/758052/campos_512_v4
+149/758055/campos_512_v4
+149/758089/campos_512_v4
+149/758114/campos_512_v4
+149/758118/campos_512_v4
+149/758133/campos_512_v4
+149/758160/campos_512_v4
+149/758188/campos_512_v4
+149/758243/campos_512_v4
+149/758271/campos_512_v4
+149/758283/campos_512_v4
+149/758289/campos_512_v4
+149/758413/campos_512_v4
+149/758429/campos_512_v4
+149/758459/campos_512_v4
+149/758460/campos_512_v4
+149/758484/campos_512_v4
+149/758516/campos_512_v4
+149/758537/campos_512_v4
+149/758593/campos_512_v4
+149/758644/campos_512_v4
+149/758661/campos_512_v4
+149/758674/campos_512_v4
+149/758752/campos_512_v4
+149/758761/campos_512_v4
+149/758784/campos_512_v4
+149/758794/campos_512_v4
+149/758801/campos_512_v4
+149/758820/campos_512_v4
+149/758821/campos_512_v4
+149/758860/campos_512_v4
+149/758898/campos_512_v4
+149/758919/campos_512_v4
+149/758945/campos_512_v4
+149/758956/campos_512_v4
+149/759037/campos_512_v4
+149/759046/campos_512_v4
+149/759083/campos_512_v4
+149/759087/campos_512_v4
+149/759088/campos_512_v4
+149/759090/campos_512_v4
+149/759109/campos_512_v4
+149/759118/campos_512_v4
+149/759155/campos_512_v4
+149/759164/campos_512_v4
+149/759250/campos_512_v4
+149/759281/campos_512_v4
+149/759282/campos_512_v4
+149/759314/campos_512_v4
+149/759367/campos_512_v4
+149/759393/campos_512_v4
+149/759411/campos_512_v4
+149/759412/campos_512_v4
+149/759433/campos_512_v4
+149/759440/campos_512_v4
+149/759456/campos_512_v4
+149/759535/campos_512_v4
+149/759542/campos_512_v4
+149/759568/campos_512_v4
+149/759579/campos_512_v4
+149/759585/campos_512_v4
+149/759588/campos_512_v4
+149/759617/campos_512_v4
+149/759627/campos_512_v4
+149/759634/campos_512_v4
+149/759672/campos_512_v4
+149/759698/campos_512_v4
+149/759742/campos_512_v4
+149/759877/campos_512_v4
+149/759953/campos_512_v4
+149/759959/campos_512_v4
+149/759969/campos_512_v4
+149/759974/campos_512_v4
+149/759978/campos_512_v4
+149/759999/campos_512_v4
+15/85065/campos_512_v4
+15/85092/campos_512_v4
+15/85113/campos_512_v4
+15/85114/campos_512_v4
+15/85134/campos_512_v4
+15/85142/campos_512_v4
+15/85166/campos_512_v4
+15/85185/campos_512_v4
+15/85243/campos_512_v4
+15/85267/campos_512_v4
+15/85271/campos_512_v4
+15/85317/campos_512_v4
+15/85334/campos_512_v4
+15/85357/campos_512_v4
+15/85393/campos_512_v4
+15/85406/campos_512_v4
+15/85419/campos_512_v4
+15/85470/campos_512_v4
+15/85476/campos_512_v4
+15/85515/campos_512_v4
+15/85523/campos_512_v4
+15/85559/campos_512_v4
+15/85582/campos_512_v4
+15/85641/campos_512_v4
+15/85734/campos_512_v4
+15/85737/campos_512_v4
+15/85750/campos_512_v4
+15/85760/campos_512_v4
+15/85770/campos_512_v4
+15/85859/campos_512_v4
+15/85862/campos_512_v4
+15/85886/campos_512_v4
+15/85967/campos_512_v4
+15/85991/campos_512_v4
+15/86005/campos_512_v4
+15/86075/campos_512_v4
+15/86120/campos_512_v4
+15/86201/campos_512_v4
+15/86271/campos_512_v4
+15/86292/campos_512_v4
+15/86307/campos_512_v4
+15/86340/campos_512_v4
+15/86352/campos_512_v4
+15/86361/campos_512_v4
+15/86375/campos_512_v4
+15/86401/campos_512_v4
+15/86437/campos_512_v4
+15/86443/campos_512_v4
+15/86465/campos_512_v4
+15/86478/campos_512_v4
+15/86544/campos_512_v4
+15/86576/campos_512_v4
+15/86577/campos_512_v4
+15/86578/campos_512_v4
+15/86579/campos_512_v4
+15/86600/campos_512_v4
+15/86618/campos_512_v4
+15/86647/campos_512_v4
+15/86748/campos_512_v4
+15/86752/campos_512_v4
+15/86753/campos_512_v4
+15/86758/campos_512_v4
+15/86762/campos_512_v4
+15/86807/campos_512_v4
+15/86814/campos_512_v4
+15/86833/campos_512_v4
+15/86864/campos_512_v4
+15/86903/campos_512_v4
+15/86904/campos_512_v4
+15/86935/campos_512_v4
+15/87009/campos_512_v4
+15/87043/campos_512_v4
+15/87051/campos_512_v4
+15/87057/campos_512_v4
+15/87063/campos_512_v4
+15/87066/campos_512_v4
+15/87067/campos_512_v4
+15/87080/campos_512_v4
+15/87124/campos_512_v4
+15/87126/campos_512_v4
+15/87177/campos_512_v4
+15/87181/campos_512_v4
+15/87206/campos_512_v4
+15/87224/campos_512_v4
+15/87226/campos_512_v4
+15/87261/campos_512_v4
+15/87270/campos_512_v4
+15/87272/campos_512_v4
+15/87278/campos_512_v4
+15/87338/campos_512_v4
+15/87357/campos_512_v4
+15/87381/campos_512_v4
+15/87387/campos_512_v4
+15/87396/campos_512_v4
+15/87405/campos_512_v4
+15/87408/campos_512_v4
+15/87438/campos_512_v4
+15/87481/campos_512_v4
+15/87482/campos_512_v4
+15/87509/campos_512_v4
+15/87522/campos_512_v4
+15/87535/campos_512_v4
+15/87538/campos_512_v4
+15/87539/campos_512_v4
+15/87553/campos_512_v4
+15/87577/campos_512_v4
+15/87584/campos_512_v4
+15/87593/campos_512_v4
+15/87611/campos_512_v4
+15/87645/campos_512_v4
+15/87648/campos_512_v4
+15/87654/campos_512_v4
+15/87663/campos_512_v4
+15/87693/campos_512_v4
+15/87740/campos_512_v4
+15/87754/campos_512_v4
+15/87757/campos_512_v4
+15/87795/campos_512_v4
+15/87856/campos_512_v4
+15/87857/campos_512_v4
+15/87900/campos_512_v4
+15/87917/campos_512_v4
+15/87932/campos_512_v4
+15/88045/campos_512_v4
+15/88053/campos_512_v4
+15/88094/campos_512_v4
+15/88117/campos_512_v4
+15/88176/campos_512_v4
+15/88194/campos_512_v4
+15/88201/campos_512_v4
+15/88209/campos_512_v4
+15/88283/campos_512_v4
+15/88295/campos_512_v4
+15/88322/campos_512_v4
+15/88353/campos_512_v4
+15/88388/campos_512_v4
+15/88389/campos_512_v4
+15/88410/campos_512_v4
+15/88469/campos_512_v4
+15/88488/campos_512_v4
+15/88491/campos_512_v4
+15/88559/campos_512_v4
+15/88585/campos_512_v4
+15/88601/campos_512_v4
+15/88608/campos_512_v4
+15/88609/campos_512_v4
+15/88625/campos_512_v4
+15/88694/campos_512_v4
+15/88696/campos_512_v4
+15/88720/campos_512_v4
+15/88734/campos_512_v4
+15/88742/campos_512_v4
+15/88756/campos_512_v4
+15/88759/campos_512_v4
+15/88763/campos_512_v4
+15/88767/campos_512_v4
+15/88868/campos_512_v4
+15/88931/campos_512_v4
+15/88971/campos_512_v4
+15/88972/campos_512_v4
+15/88976/campos_512_v4
+15/88999/campos_512_v4
+15/89007/campos_512_v4
+15/89035/campos_512_v4
+15/89045/campos_512_v4
+15/89057/campos_512_v4
+15/89075/campos_512_v4
+15/89121/campos_512_v4
+15/89131/campos_512_v4
+15/89136/campos_512_v4
+15/89159/campos_512_v4
+15/89160/campos_512_v4
+15/89164/campos_512_v4
+15/89177/campos_512_v4
+15/89181/campos_512_v4
+15/89192/campos_512_v4
+15/89196/campos_512_v4
+15/89276/campos_512_v4
+15/89304/campos_512_v4
+15/89354/campos_512_v4
+15/89363/campos_512_v4
+15/89364/campos_512_v4
+15/89381/campos_512_v4
+15/89391/campos_512_v4
+15/89406/campos_512_v4
+15/89452/campos_512_v4
+15/89468/campos_512_v4
+15/89490/campos_512_v4
+15/89518/campos_512_v4
+15/89566/campos_512_v4
+15/89585/campos_512_v4
+15/89628/campos_512_v4
+15/89688/campos_512_v4
+15/89693/campos_512_v4
+15/89704/campos_512_v4
+15/89739/campos_512_v4
+15/89769/campos_512_v4
+15/89777/campos_512_v4
+15/89800/campos_512_v4
+15/89829/campos_512_v4
+15/89835/campos_512_v4
+15/89886/campos_512_v4
+15/89897/campos_512_v4
+15/89899/campos_512_v4
+15/89910/campos_512_v4
+15/89931/campos_512_v4
+15/89953/campos_512_v4
+15/89959/campos_512_v4
+15/89961/campos_512_v4
+150/760163/campos_512_v4
+150/760174/campos_512_v4
+150/760184/campos_512_v4
+150/760202/campos_512_v4
+150/760244/campos_512_v4
+150/760270/campos_512_v4
+150/760324/campos_512_v4
+150/760405/campos_512_v4
+150/760409/campos_512_v4
+150/760415/campos_512_v4
+150/760429/campos_512_v4
+150/760437/campos_512_v4
+150/760485/campos_512_v4
+150/760512/campos_512_v4
+150/760513/campos_512_v4
+150/760533/campos_512_v4
+150/760546/campos_512_v4
+150/760547/campos_512_v4
+150/760609/campos_512_v4
+150/760611/campos_512_v4
+150/760620/campos_512_v4
+150/760652/campos_512_v4
+150/760665/campos_512_v4
+150/760687/campos_512_v4
+150/760706/campos_512_v4
+150/760809/campos_512_v4
+150/760853/campos_512_v4
+150/760858/campos_512_v4
+150/760863/campos_512_v4
+150/760934/campos_512_v4
+150/760942/campos_512_v4
+150/760944/campos_512_v4
+150/760964/campos_512_v4
+150/760974/campos_512_v4
+150/760988/campos_512_v4
+150/760997/campos_512_v4
+150/761016/campos_512_v4
+150/761031/campos_512_v4
+150/761032/campos_512_v4
+150/761056/campos_512_v4
+150/761057/campos_512_v4
+150/761133/campos_512_v4
+150/761135/campos_512_v4
+150/761142/campos_512_v4
+150/761165/campos_512_v4
+150/761175/campos_512_v4
+150/761205/campos_512_v4
+150/761210/campos_512_v4
+150/761231/campos_512_v4
+150/761284/campos_512_v4
+150/761288/campos_512_v4
+150/761327/campos_512_v4
+150/761332/campos_512_v4
+150/761339/campos_512_v4
+150/761341/campos_512_v4
+150/761403/campos_512_v4
+150/761432/campos_512_v4
+150/761515/campos_512_v4
+150/761526/campos_512_v4
+150/761532/campos_512_v4
+150/761584/campos_512_v4
+150/761615/campos_512_v4
+150/761703/campos_512_v4
+150/761717/campos_512_v4
+150/761732/campos_512_v4
+150/761736/campos_512_v4
+150/761764/campos_512_v4
+150/761766/campos_512_v4
+150/761875/campos_512_v4
+150/761882/campos_512_v4
+150/761892/campos_512_v4
+150/761896/campos_512_v4
+150/761917/campos_512_v4
+150/761927/campos_512_v4
+150/761940/campos_512_v4
+150/761959/campos_512_v4
+150/761988/campos_512_v4
+150/762044/campos_512_v4
+150/762055/campos_512_v4
+150/762061/campos_512_v4
+150/762080/campos_512_v4
+150/762151/campos_512_v4
+150/762241/campos_512_v4
+150/762260/campos_512_v4
+150/762292/campos_512_v4
+150/762315/campos_512_v4
+150/762318/campos_512_v4
+150/762322/campos_512_v4
+150/762338/campos_512_v4
+150/762363/campos_512_v4
+150/762366/campos_512_v4
+150/762391/campos_512_v4
+150/762407/campos_512_v4
+150/762446/campos_512_v4
+150/762465/campos_512_v4
+150/762529/campos_512_v4
+150/762580/campos_512_v4
+150/762581/campos_512_v4
+150/762673/campos_512_v4
+150/762735/campos_512_v4
+150/762751/campos_512_v4
+150/762811/campos_512_v4
+150/762812/campos_512_v4
+150/762852/campos_512_v4
+150/762986/campos_512_v4
+150/763007/campos_512_v4
+150/763009/campos_512_v4
+150/763013/campos_512_v4
+150/763018/campos_512_v4
+150/763114/campos_512_v4
+150/763118/campos_512_v4
+150/763121/campos_512_v4
+150/763126/campos_512_v4
+150/763131/campos_512_v4
+150/763154/campos_512_v4
+150/763164/campos_512_v4
+150/763167/campos_512_v4
+150/763276/campos_512_v4
+150/763280/campos_512_v4
+150/763293/campos_512_v4
+150/763347/campos_512_v4
+150/763387/campos_512_v4
+150/763414/campos_512_v4
+150/763440/campos_512_v4
+150/763486/campos_512_v4
+150/763513/campos_512_v4
+150/763515/campos_512_v4
+150/763528/campos_512_v4
+150/763563/campos_512_v4
+150/763645/campos_512_v4
+150/763656/campos_512_v4
+150/763680/campos_512_v4
+150/763715/campos_512_v4
+150/763815/campos_512_v4
+150/763824/campos_512_v4
+150/763895/campos_512_v4
+150/763911/campos_512_v4
+150/763925/campos_512_v4
+150/763976/campos_512_v4
+150/764025/campos_512_v4
+150/764029/campos_512_v4
+150/764152/campos_512_v4
+150/764188/campos_512_v4
+150/764199/campos_512_v4
+150/764210/campos_512_v4
+150/764218/campos_512_v4
+150/764245/campos_512_v4
+150/764272/campos_512_v4
+150/764279/campos_512_v4
+150/764300/campos_512_v4
+150/764316/campos_512_v4
+150/764317/campos_512_v4
+150/764358/campos_512_v4
+150/764369/campos_512_v4
+150/764395/campos_512_v4
+150/764411/campos_512_v4
+150/764426/campos_512_v4
+150/764429/campos_512_v4
+150/764460/campos_512_v4
+150/764471/campos_512_v4
+150/764633/campos_512_v4
+150/764665/campos_512_v4
+150/764685/campos_512_v4
+150/764688/campos_512_v4
+150/764710/campos_512_v4
+150/764718/campos_512_v4
+150/764789/campos_512_v4
+150/764842/campos_512_v4
+150/764855/campos_512_v4
+150/764861/campos_512_v4
+150/764949/campos_512_v4
+150/764978/campos_512_v4
+150/764990/campos_512_v4
+150/764999/campos_512_v4
+151/765023/campos_512_v4
+151/765031/campos_512_v4
+151/765100/campos_512_v4
+151/765170/campos_512_v4
+151/765193/campos_512_v4
+151/765251/campos_512_v4
+151/765266/campos_512_v4
+151/765284/campos_512_v4
+151/765300/campos_512_v4
+151/765302/campos_512_v4
+151/765324/campos_512_v4
+151/765357/campos_512_v4
+151/765359/campos_512_v4
+151/765363/campos_512_v4
+151/765404/campos_512_v4
+151/765435/campos_512_v4
+151/765460/campos_512_v4
+151/765523/campos_512_v4
+151/765568/campos_512_v4
+151/765587/campos_512_v4
+151/765592/campos_512_v4
+151/765616/campos_512_v4
+151/765644/campos_512_v4
+151/765669/campos_512_v4
+151/765702/campos_512_v4
+151/765854/campos_512_v4
+151/765907/campos_512_v4
+151/765910/campos_512_v4
+151/765950/campos_512_v4
+151/765992/campos_512_v4
+151/765993/campos_512_v4
+151/766000/campos_512_v4
+151/766063/campos_512_v4
+151/766092/campos_512_v4
+151/766107/campos_512_v4
+151/766132/campos_512_v4
+151/766167/campos_512_v4
+151/766201/campos_512_v4
+151/766209/campos_512_v4
+151/766279/campos_512_v4
+151/766359/campos_512_v4
+151/766415/campos_512_v4
+151/766418/campos_512_v4
+151/766433/campos_512_v4
+151/766466/campos_512_v4
+151/766480/campos_512_v4
+151/766616/campos_512_v4
+151/766633/campos_512_v4
+151/766661/campos_512_v4
+151/766698/campos_512_v4
+151/766709/campos_512_v4
+151/766752/campos_512_v4
+151/766759/campos_512_v4
+151/766867/campos_512_v4
+151/766872/campos_512_v4
+151/766959/campos_512_v4
+151/767073/campos_512_v4
+151/767102/campos_512_v4
+151/767196/campos_512_v4
+151/767259/campos_512_v4
+151/767281/campos_512_v4
+151/767295/campos_512_v4
+151/767316/campos_512_v4
+151/767330/campos_512_v4
+151/767572/campos_512_v4
+151/767623/campos_512_v4
+151/767628/campos_512_v4
+151/767765/campos_512_v4
+151/767782/campos_512_v4
+151/767835/campos_512_v4
+151/767838/campos_512_v4
+151/767840/campos_512_v4
+151/767848/campos_512_v4
+151/767864/campos_512_v4
+151/767907/campos_512_v4
+151/767916/campos_512_v4
+151/767919/campos_512_v4
+151/767956/campos_512_v4
+151/767999/campos_512_v4
+151/768017/campos_512_v4
+151/768032/campos_512_v4
+151/768036/campos_512_v4
+151/768057/campos_512_v4
+151/768086/campos_512_v4
+151/768103/campos_512_v4
+151/768104/campos_512_v4
+151/768173/campos_512_v4
+151/768198/campos_512_v4
+151/768200/campos_512_v4
+151/768201/campos_512_v4
+151/768209/campos_512_v4
+151/768254/campos_512_v4
+151/768269/campos_512_v4
+151/768320/campos_512_v4
+151/768349/campos_512_v4
+151/768364/campos_512_v4
+151/768426/campos_512_v4
+151/768451/campos_512_v4
+151/768534/campos_512_v4
+151/768541/campos_512_v4
+151/768546/campos_512_v4
+151/768550/campos_512_v4
+151/768569/campos_512_v4
+151/768621/campos_512_v4
+151/768623/campos_512_v4
+151/768634/campos_512_v4
+151/768717/campos_512_v4
+151/768756/campos_512_v4
+151/768769/campos_512_v4
+151/768833/campos_512_v4
+151/768894/campos_512_v4
+151/768917/campos_512_v4
+151/768928/campos_512_v4
+151/768978/campos_512_v4
+151/768982/campos_512_v4
+151/768997/campos_512_v4
+151/769002/campos_512_v4
+151/769007/campos_512_v4
+151/769088/campos_512_v4
+151/769108/campos_512_v4
+151/769118/campos_512_v4
+151/769191/campos_512_v4
+151/769212/campos_512_v4
+151/769228/campos_512_v4
+151/769252/campos_512_v4
+151/769272/campos_512_v4
+151/769285/campos_512_v4
+151/769299/campos_512_v4
+151/769325/campos_512_v4
+151/769332/campos_512_v4
+151/769333/campos_512_v4
+151/769395/campos_512_v4
+151/769415/campos_512_v4
+151/769431/campos_512_v4
+151/769478/campos_512_v4
+151/769479/campos_512_v4
+151/769541/campos_512_v4
+151/769545/campos_512_v4
+151/769555/campos_512_v4
+151/769661/campos_512_v4
+151/769680/campos_512_v4
+151/769697/campos_512_v4
+151/769720/campos_512_v4
+151/769767/campos_512_v4
+151/769768/campos_512_v4
+151/769778/campos_512_v4
+151/769809/campos_512_v4
+151/769825/campos_512_v4
+151/769828/campos_512_v4
+151/769878/campos_512_v4
+151/769879/campos_512_v4
+151/769884/campos_512_v4
+151/769908/campos_512_v4
+151/769920/campos_512_v4
+151/769942/campos_512_v4
+152/770014/campos_512_v4
+152/770024/campos_512_v4
+152/770082/campos_512_v4
+152/770106/campos_512_v4
+152/770129/campos_512_v4
+152/770150/campos_512_v4
+152/770164/campos_512_v4
+152/770203/campos_512_v4
+152/770205/campos_512_v4
+152/770235/campos_512_v4
+152/770244/campos_512_v4
+152/770256/campos_512_v4
+152/770383/campos_512_v4
+152/770398/campos_512_v4
+152/770431/campos_512_v4
+152/770446/campos_512_v4
+152/770466/campos_512_v4
+152/770536/campos_512_v4
+152/770575/campos_512_v4
+152/770587/campos_512_v4
+152/770701/campos_512_v4
+152/770719/campos_512_v4
+152/770728/campos_512_v4
+152/770746/campos_512_v4
+152/770821/campos_512_v4
+152/770865/campos_512_v4
+152/770898/campos_512_v4
+152/770935/campos_512_v4
+152/770942/campos_512_v4
+152/770968/campos_512_v4
+152/770976/campos_512_v4
+152/771025/campos_512_v4
+152/771053/campos_512_v4
+152/771118/campos_512_v4
+152/771135/campos_512_v4
+152/771158/campos_512_v4
+152/771209/campos_512_v4
+152/771235/campos_512_v4
+152/771283/campos_512_v4
+152/771285/campos_512_v4
+152/771350/campos_512_v4
+152/771404/campos_512_v4
+152/771409/campos_512_v4
+152/771444/campos_512_v4
+152/771470/campos_512_v4
+152/771480/campos_512_v4
+152/771496/campos_512_v4
+152/771530/campos_512_v4
+152/771542/campos_512_v4
+152/771572/campos_512_v4
+152/771610/campos_512_v4
+152/771623/campos_512_v4
+152/771665/campos_512_v4
+152/771673/campos_512_v4
+152/771681/campos_512_v4
+152/771711/campos_512_v4
+152/771733/campos_512_v4
+152/771757/campos_512_v4
+152/771784/campos_512_v4
+152/771791/campos_512_v4
+152/771797/campos_512_v4
+152/771809/campos_512_v4
+152/771826/campos_512_v4
+152/771861/campos_512_v4
+152/771903/campos_512_v4
+152/771913/campos_512_v4
+152/771941/campos_512_v4
+152/771961/campos_512_v4
+152/771971/campos_512_v4
+152/772010/campos_512_v4
+152/772029/campos_512_v4
+152/772051/campos_512_v4
+152/772174/campos_512_v4
+152/772190/campos_512_v4
+152/772204/campos_512_v4
+152/772225/campos_512_v4
+152/772229/campos_512_v4
+152/772262/campos_512_v4
+152/772304/campos_512_v4
+152/772305/campos_512_v4
+152/772352/campos_512_v4
+152/772355/campos_512_v4
+152/772400/campos_512_v4
+152/772452/campos_512_v4
+152/772482/campos_512_v4
+152/772489/campos_512_v4
+152/772536/campos_512_v4
+152/772537/campos_512_v4
+152/772542/campos_512_v4
+152/772548/campos_512_v4
+152/772578/campos_512_v4
+152/772580/campos_512_v4
+152/772619/campos_512_v4
+152/772622/campos_512_v4
+152/772625/campos_512_v4
+152/772656/campos_512_v4
+152/772661/campos_512_v4
+152/772708/campos_512_v4
+152/772717/campos_512_v4
+152/772736/campos_512_v4
+152/772838/campos_512_v4
+152/772865/campos_512_v4
+152/772906/campos_512_v4
+152/772908/campos_512_v4
+152/772913/campos_512_v4
+152/772946/campos_512_v4
+152/772956/campos_512_v4
+152/772970/campos_512_v4
+152/772980/campos_512_v4
+152/772987/campos_512_v4
+152/773002/campos_512_v4
+152/773046/campos_512_v4
+152/773136/campos_512_v4
+152/773140/campos_512_v4
+152/773166/campos_512_v4
+152/773185/campos_512_v4
+152/773188/campos_512_v4
+152/773234/campos_512_v4
+152/773291/campos_512_v4
+152/773314/campos_512_v4
+152/773354/campos_512_v4
+152/773361/campos_512_v4
+152/773365/campos_512_v4
+152/773409/campos_512_v4
+152/773463/campos_512_v4
+152/773499/campos_512_v4
+152/773521/campos_512_v4
+152/773527/campos_512_v4
+152/773592/campos_512_v4
+152/773638/campos_512_v4
+152/773660/campos_512_v4
+152/773663/campos_512_v4
+152/773678/campos_512_v4
+152/773721/campos_512_v4
+152/773722/campos_512_v4
+152/773723/campos_512_v4
+152/773724/campos_512_v4
+152/773780/campos_512_v4
+152/773782/campos_512_v4
+152/773999/campos_512_v4
+152/774015/campos_512_v4
+152/774082/campos_512_v4
+152/774124/campos_512_v4
+152/774149/campos_512_v4
+152/774183/campos_512_v4
+152/774274/campos_512_v4
+152/774374/campos_512_v4
+152/774397/campos_512_v4
+152/774434/campos_512_v4
+152/774438/campos_512_v4
+152/774440/campos_512_v4
+152/774502/campos_512_v4
+152/774507/campos_512_v4
+152/774518/campos_512_v4
+152/774604/campos_512_v4
+152/774614/campos_512_v4
+152/774665/campos_512_v4
+152/774668/campos_512_v4
+152/774671/campos_512_v4
+152/774689/campos_512_v4
+152/774779/campos_512_v4
+152/774790/campos_512_v4
+152/774811/campos_512_v4
+152/774874/campos_512_v4
+152/774906/campos_512_v4
+152/774985/campos_512_v4
+153/775003/campos_512_v4
+153/775010/campos_512_v4
+153/775060/campos_512_v4
+153/775065/campos_512_v4
+153/775098/campos_512_v4
+153/775134/campos_512_v4
+153/775143/campos_512_v4
+153/775166/campos_512_v4
+153/775324/campos_512_v4
+153/775395/campos_512_v4
+153/775403/campos_512_v4
+153/775404/campos_512_v4
+153/775418/campos_512_v4
+153/775446/campos_512_v4
+153/775455/campos_512_v4
+153/775492/campos_512_v4
+153/775498/campos_512_v4
+153/775586/campos_512_v4
+153/775608/campos_512_v4
+153/775665/campos_512_v4
+153/775668/campos_512_v4
+153/775695/campos_512_v4
+153/775707/campos_512_v4
+153/775756/campos_512_v4
+153/775773/campos_512_v4
+153/775792/campos_512_v4
+153/775812/campos_512_v4
+153/775820/campos_512_v4
+153/775843/campos_512_v4
+153/775862/campos_512_v4
+153/775869/campos_512_v4
+153/775901/campos_512_v4
+153/775935/campos_512_v4
+153/776002/campos_512_v4
+153/776040/campos_512_v4
+153/776050/campos_512_v4
+153/776077/campos_512_v4
+153/776130/campos_512_v4
+153/776156/campos_512_v4
+153/776164/campos_512_v4
+153/776178/campos_512_v4
+153/776209/campos_512_v4
+153/776240/campos_512_v4
+153/776291/campos_512_v4
+153/776323/campos_512_v4
+153/776366/campos_512_v4
+153/776367/campos_512_v4
+153/776383/campos_512_v4
+153/776431/campos_512_v4
+153/776510/campos_512_v4
+153/776518/campos_512_v4
+153/776550/campos_512_v4
+153/776566/campos_512_v4
+153/776649/campos_512_v4
+153/776650/campos_512_v4
+153/776664/campos_512_v4
+153/776676/campos_512_v4
+153/776726/campos_512_v4
+153/776738/campos_512_v4
+153/776744/campos_512_v4
+153/776756/campos_512_v4
+153/776781/campos_512_v4
+153/776782/campos_512_v4
+153/776786/campos_512_v4
+153/776801/campos_512_v4
+153/776885/campos_512_v4
+153/776925/campos_512_v4
+153/776931/campos_512_v4
+153/776952/campos_512_v4
+153/776980/campos_512_v4
+153/776985/campos_512_v4
+153/777003/campos_512_v4
+153/777007/campos_512_v4
+153/777057/campos_512_v4
+153/777072/campos_512_v4
+153/777084/campos_512_v4
+153/777121/campos_512_v4
+153/777163/campos_512_v4
+153/777168/campos_512_v4
+153/777203/campos_512_v4
+153/777305/campos_512_v4
+153/777316/campos_512_v4
+153/777346/campos_512_v4
+153/777508/campos_512_v4
+153/777509/campos_512_v4
+153/777673/campos_512_v4
+153/777729/campos_512_v4
+153/777738/campos_512_v4
+153/777751/campos_512_v4
+153/777770/campos_512_v4
+153/777775/campos_512_v4
+153/777849/campos_512_v4
+153/777867/campos_512_v4
+153/777873/campos_512_v4
+153/777877/campos_512_v4
+153/777900/campos_512_v4
+153/777965/campos_512_v4
+153/777983/campos_512_v4
+153/778074/campos_512_v4
+153/778140/campos_512_v4
+153/778166/campos_512_v4
+153/778192/campos_512_v4
+153/778258/campos_512_v4
+153/778285/campos_512_v4
+153/778294/campos_512_v4
+153/778318/campos_512_v4
+153/778331/campos_512_v4
+153/778355/campos_512_v4
+153/778401/campos_512_v4
+153/778402/campos_512_v4
+153/778411/campos_512_v4
+153/778413/campos_512_v4
+153/778420/campos_512_v4
+153/778445/campos_512_v4
+153/778451/campos_512_v4
+153/778464/campos_512_v4
+153/778483/campos_512_v4
+153/778496/campos_512_v4
+153/778554/campos_512_v4
+153/778574/campos_512_v4
+153/778594/campos_512_v4
+153/778621/campos_512_v4
+153/778715/campos_512_v4
+153/778741/campos_512_v4
+153/778748/campos_512_v4
+153/778800/campos_512_v4
+153/778840/campos_512_v4
+153/778845/campos_512_v4
+153/778878/campos_512_v4
+153/778903/campos_512_v4
+153/778958/campos_512_v4
+153/779058/campos_512_v4
+153/779266/campos_512_v4
+153/779385/campos_512_v4
+153/779406/campos_512_v4
+153/779410/campos_512_v4
+153/779451/campos_512_v4
+153/779458/campos_512_v4
+153/779494/campos_512_v4
+153/779528/campos_512_v4
+153/779532/campos_512_v4
+153/779537/campos_512_v4
+153/779556/campos_512_v4
+153/779562/campos_512_v4
+153/779564/campos_512_v4
+153/779569/campos_512_v4
+153/779598/campos_512_v4
+153/779615/campos_512_v4
+153/779619/campos_512_v4
+153/779654/campos_512_v4
+153/779755/campos_512_v4
+153/779757/campos_512_v4
+153/779777/campos_512_v4
+153/779779/campos_512_v4
+153/779792/campos_512_v4
+153/779822/campos_512_v4
+153/779867/campos_512_v4
+153/779869/campos_512_v4
+153/779876/campos_512_v4
+153/779928/campos_512_v4
+154/780010/campos_512_v4
+154/780052/campos_512_v4
+154/780081/campos_512_v4
+154/780154/campos_512_v4
+154/780188/campos_512_v4
+154/780191/campos_512_v4
+154/780230/campos_512_v4
+154/780258/campos_512_v4
+154/780295/campos_512_v4
+154/780323/campos_512_v4
+154/780338/campos_512_v4
+154/780380/campos_512_v4
+154/780410/campos_512_v4
+154/780447/campos_512_v4
+154/780463/campos_512_v4
+154/780518/campos_512_v4
+154/780586/campos_512_v4
+154/780634/campos_512_v4
+154/780639/campos_512_v4
+154/780678/campos_512_v4
+154/780696/campos_512_v4
+154/780720/campos_512_v4
+154/780742/campos_512_v4
+154/780754/campos_512_v4
+154/780795/campos_512_v4
+154/780803/campos_512_v4
+154/780854/campos_512_v4
+154/780875/campos_512_v4
+154/780881/campos_512_v4
+154/780882/campos_512_v4
+154/780952/campos_512_v4
+154/780961/campos_512_v4
+154/780971/campos_512_v4
+154/781039/campos_512_v4
+154/781061/campos_512_v4
+154/781062/campos_512_v4
+154/781088/campos_512_v4
+154/781159/campos_512_v4
+154/781173/campos_512_v4
+154/781195/campos_512_v4
+154/781243/campos_512_v4
+154/781281/campos_512_v4
+154/781302/campos_512_v4
+154/781364/campos_512_v4
+154/781373/campos_512_v4
+154/781381/campos_512_v4
+154/781395/campos_512_v4
+154/781407/campos_512_v4
+154/781459/campos_512_v4
+154/781468/campos_512_v4
+154/781517/campos_512_v4
+154/781533/campos_512_v4
+154/781593/campos_512_v4
+154/781624/campos_512_v4
+154/781636/campos_512_v4
+154/781681/campos_512_v4
+154/781698/campos_512_v4
+154/781790/campos_512_v4
+154/781836/campos_512_v4
+154/781864/campos_512_v4
+154/781917/campos_512_v4
+154/781923/campos_512_v4
+154/781961/campos_512_v4
+154/781997/campos_512_v4
+154/782004/campos_512_v4
+154/782069/campos_512_v4
+154/782079/campos_512_v4
+154/782087/campos_512_v4
+154/782106/campos_512_v4
+154/782118/campos_512_v4
+154/782121/campos_512_v4
+154/782143/campos_512_v4
+154/782179/campos_512_v4
+154/782188/campos_512_v4
+154/782219/campos_512_v4
+154/782221/campos_512_v4
+154/782225/campos_512_v4
+154/782247/campos_512_v4
+154/782270/campos_512_v4
+154/782338/campos_512_v4
+154/782370/campos_512_v4
+154/782398/campos_512_v4
+154/782421/campos_512_v4
+154/782481/campos_512_v4
+154/782561/campos_512_v4
+154/782595/campos_512_v4
+154/782691/campos_512_v4
+154/782698/campos_512_v4
+154/782707/campos_512_v4
+154/782738/campos_512_v4
+154/782739/campos_512_v4
+154/782764/campos_512_v4
+154/782769/campos_512_v4
+154/782810/campos_512_v4
+154/782835/campos_512_v4
+154/782870/campos_512_v4
+154/782917/campos_512_v4
+154/782924/campos_512_v4
+154/782942/campos_512_v4
+154/782947/campos_512_v4
+154/782975/campos_512_v4
+154/782979/campos_512_v4
+154/783045/campos_512_v4
+154/783054/campos_512_v4
+154/783061/campos_512_v4
+154/783129/campos_512_v4
+154/783140/campos_512_v4
+154/783146/campos_512_v4
+154/783154/campos_512_v4
+154/783176/campos_512_v4
+154/783207/campos_512_v4
+154/783218/campos_512_v4
+154/783297/campos_512_v4
+154/783321/campos_512_v4
+154/783336/campos_512_v4
+154/783344/campos_512_v4
+154/783350/campos_512_v4
+154/783388/campos_512_v4
+154/783408/campos_512_v4
+154/783413/campos_512_v4
+154/783428/campos_512_v4
+154/783451/campos_512_v4
+154/783471/campos_512_v4
+154/783479/campos_512_v4
+154/783530/campos_512_v4
+154/783622/campos_512_v4
+154/783657/campos_512_v4
+154/783669/campos_512_v4
+154/783678/campos_512_v4
+154/783707/campos_512_v4
+154/783709/campos_512_v4
+154/783734/campos_512_v4
+154/783737/campos_512_v4
+154/783767/campos_512_v4
+154/783862/campos_512_v4
+154/783932/campos_512_v4
+154/783940/campos_512_v4
+154/783945/campos_512_v4
+154/783976/campos_512_v4
+154/784000/campos_512_v4
+154/784083/campos_512_v4
+154/784087/campos_512_v4
+154/784151/campos_512_v4
+154/784253/campos_512_v4
+154/784319/campos_512_v4
+154/784354/campos_512_v4
+154/784393/campos_512_v4
+154/784399/campos_512_v4
+154/784415/campos_512_v4
+154/784532/campos_512_v4
+154/784603/campos_512_v4
+154/784611/campos_512_v4
+154/784642/campos_512_v4
+154/784660/campos_512_v4
+154/784674/campos_512_v4
+154/784680/campos_512_v4
+154/784690/campos_512_v4
+154/784696/campos_512_v4
+154/784699/campos_512_v4
+154/784703/campos_512_v4
+154/784744/campos_512_v4
+154/784785/campos_512_v4
+154/784846/campos_512_v4
+154/784871/campos_512_v4
+154/784888/campos_512_v4
+154/784895/campos_512_v4
+154/784910/campos_512_v4
+154/784924/campos_512_v4
+154/784947/campos_512_v4
+154/784962/campos_512_v4
+155/785061/campos_512_v4
+155/785075/campos_512_v4
+155/785171/campos_512_v4
+155/785185/campos_512_v4
+155/785231/campos_512_v4
+155/785238/campos_512_v4
+155/785322/campos_512_v4
+155/785338/campos_512_v4
+155/785342/campos_512_v4
+155/785364/campos_512_v4
+155/785377/campos_512_v4
+155/785432/campos_512_v4
+155/785448/campos_512_v4
+155/785590/campos_512_v4
+155/785605/campos_512_v4
+155/785608/campos_512_v4
+155/785630/campos_512_v4
+155/785639/campos_512_v4
+155/785640/campos_512_v4
+155/785671/campos_512_v4
+155/785697/campos_512_v4
+155/785709/campos_512_v4
+155/785748/campos_512_v4
+155/785755/campos_512_v4
+155/785788/campos_512_v4
+155/785841/campos_512_v4
+155/785901/campos_512_v4
+155/785938/campos_512_v4
+155/785944/campos_512_v4
+155/786080/campos_512_v4
+155/786104/campos_512_v4
+155/786113/campos_512_v4
+155/786191/campos_512_v4
+155/786206/campos_512_v4
+155/786209/campos_512_v4
+155/786224/campos_512_v4
+155/786237/campos_512_v4
+155/786247/campos_512_v4
+155/786263/campos_512_v4
+155/786282/campos_512_v4
+155/786284/campos_512_v4
+155/786288/campos_512_v4
+155/786294/campos_512_v4
+155/786317/campos_512_v4
+155/786398/campos_512_v4
+155/786427/campos_512_v4
+155/786464/campos_512_v4
+155/786523/campos_512_v4
+155/786542/campos_512_v4
+155/786555/campos_512_v4
+155/786577/campos_512_v4
+155/786606/campos_512_v4
+155/786759/campos_512_v4
+155/786822/campos_512_v4
+155/786830/campos_512_v4
+155/786844/campos_512_v4
+155/786907/campos_512_v4
+155/786943/campos_512_v4
+155/786967/campos_512_v4
+155/786969/campos_512_v4
+155/786972/campos_512_v4
+155/787074/campos_512_v4
+155/787084/campos_512_v4
+155/787104/campos_512_v4
+155/787154/campos_512_v4
+155/787156/campos_512_v4
+155/787240/campos_512_v4
+155/787294/campos_512_v4
+155/787312/campos_512_v4
+155/787363/campos_512_v4
+155/787392/campos_512_v4
+155/787397/campos_512_v4
+155/787407/campos_512_v4
+155/787412/campos_512_v4
+155/787442/campos_512_v4
+155/787456/campos_512_v4
+155/787457/campos_512_v4
+155/787529/campos_512_v4
+155/787696/campos_512_v4
+155/787706/campos_512_v4
+155/787711/campos_512_v4
+155/787732/campos_512_v4
+155/787738/campos_512_v4
+155/787773/campos_512_v4
+155/787788/campos_512_v4
+155/787889/campos_512_v4
+155/787907/campos_512_v4
+155/787989/campos_512_v4
+155/788054/campos_512_v4
+155/788097/campos_512_v4
+155/788137/campos_512_v4
+155/788182/campos_512_v4
+155/788304/campos_512_v4
+155/788316/campos_512_v4
+155/788320/campos_512_v4
+155/788336/campos_512_v4
+155/788360/campos_512_v4
+155/788369/campos_512_v4
+155/788389/campos_512_v4
+155/788446/campos_512_v4
+155/788514/campos_512_v4
+155/788564/campos_512_v4
+155/788599/campos_512_v4
+155/788600/campos_512_v4
+155/788601/campos_512_v4
+155/788619/campos_512_v4
+155/788659/campos_512_v4
+155/788706/campos_512_v4
+155/788737/campos_512_v4
+155/788751/campos_512_v4
+155/788776/campos_512_v4
+155/788824/campos_512_v4
+155/788836/campos_512_v4
+155/788850/campos_512_v4
+155/788885/campos_512_v4
+155/788896/campos_512_v4
+155/788948/campos_512_v4
+155/788974/campos_512_v4
+155/789043/campos_512_v4
+155/789111/campos_512_v4
+155/789143/campos_512_v4
+155/789203/campos_512_v4
+155/789237/campos_512_v4
+155/789238/campos_512_v4
+155/789250/campos_512_v4
+155/789268/campos_512_v4
+155/789298/campos_512_v4
+155/789332/campos_512_v4
+155/789357/campos_512_v4
+155/789432/campos_512_v4
+155/789434/campos_512_v4
+155/789439/campos_512_v4
+155/789516/campos_512_v4
+155/789534/campos_512_v4
+155/789550/campos_512_v4
+155/789581/campos_512_v4
+155/789582/campos_512_v4
+155/789686/campos_512_v4
+155/789710/campos_512_v4
+155/789747/campos_512_v4
+155/789895/campos_512_v4
+155/789910/campos_512_v4
+155/789914/campos_512_v4
+155/789929/campos_512_v4
+155/789943/campos_512_v4
+155/789963/campos_512_v4
+156/790115/campos_512_v4
+156/790118/campos_512_v4
+156/790121/campos_512_v4
+156/790186/campos_512_v4
+156/790211/campos_512_v4
+156/790246/campos_512_v4
+156/790279/campos_512_v4
+156/790290/campos_512_v4
+156/790399/campos_512_v4
+156/790452/campos_512_v4
+156/790483/campos_512_v4
+156/790500/campos_512_v4
+156/790543/campos_512_v4
+156/790544/campos_512_v4
+156/790566/campos_512_v4
+156/790575/campos_512_v4
+156/790598/campos_512_v4
+156/790605/campos_512_v4
+156/790628/campos_512_v4
+156/790634/campos_512_v4
+156/790748/campos_512_v4
+156/790753/campos_512_v4
+156/790805/campos_512_v4
+156/790841/campos_512_v4
+156/790843/campos_512_v4
+156/790884/campos_512_v4
+156/790944/campos_512_v4
+156/790954/campos_512_v4
+156/790975/campos_512_v4
+156/790996/campos_512_v4
+156/791043/campos_512_v4
+156/791056/campos_512_v4
+156/791168/campos_512_v4
+156/791182/campos_512_v4
+156/791200/campos_512_v4
+156/791284/campos_512_v4
+156/791381/campos_512_v4
+156/791382/campos_512_v4
+156/791435/campos_512_v4
+156/791454/campos_512_v4
+156/791517/campos_512_v4
+156/791561/campos_512_v4
+156/791637/campos_512_v4
+156/791650/campos_512_v4
+156/791654/campos_512_v4
+156/791827/campos_512_v4
+156/791854/campos_512_v4
+156/791873/campos_512_v4
+156/791885/campos_512_v4
+156/791890/campos_512_v4
+156/791931/campos_512_v4
+156/791959/campos_512_v4
+156/791969/campos_512_v4
+156/792055/campos_512_v4
+156/792059/campos_512_v4
+156/792085/campos_512_v4
+156/792102/campos_512_v4
+156/792114/campos_512_v4
+156/792121/campos_512_v4
+156/792142/campos_512_v4
+156/792174/campos_512_v4
+156/792181/campos_512_v4
+156/792195/campos_512_v4
+156/792238/campos_512_v4
+156/792239/campos_512_v4
+156/792268/campos_512_v4
+156/792341/campos_512_v4
+156/792387/campos_512_v4
+156/792390/campos_512_v4
+156/792408/campos_512_v4
+156/792496/campos_512_v4
+156/792524/campos_512_v4
+156/792540/campos_512_v4
+156/792604/campos_512_v4
+156/792623/campos_512_v4
+156/792640/campos_512_v4
+156/792679/campos_512_v4
+156/792711/campos_512_v4
+156/792735/campos_512_v4
+156/792754/campos_512_v4
+156/792789/campos_512_v4
+156/792867/campos_512_v4
+156/792914/campos_512_v4
+156/792949/campos_512_v4
+156/793033/campos_512_v4
+156/793099/campos_512_v4
+156/793129/campos_512_v4
+156/793153/campos_512_v4
+156/793244/campos_512_v4
+156/793273/campos_512_v4
+156/793281/campos_512_v4
+156/793323/campos_512_v4
+156/793326/campos_512_v4
+156/793334/campos_512_v4
+156/793335/campos_512_v4
+156/793364/campos_512_v4
+156/793376/campos_512_v4
+156/793450/campos_512_v4
+156/793537/campos_512_v4
+156/793593/campos_512_v4
+156/793613/campos_512_v4
+156/793622/campos_512_v4
+156/793628/campos_512_v4
+156/793652/campos_512_v4
+156/793692/campos_512_v4
+156/793705/campos_512_v4
+156/793723/campos_512_v4
+156/793742/campos_512_v4
+156/793797/campos_512_v4
+156/793815/campos_512_v4
+156/793834/campos_512_v4
+156/793876/campos_512_v4
+156/793877/campos_512_v4
+156/793901/campos_512_v4
+156/793911/campos_512_v4
+156/793921/campos_512_v4
+156/793979/campos_512_v4
+156/794022/campos_512_v4
+156/794037/campos_512_v4
+156/794065/campos_512_v4
+156/794074/campos_512_v4
+156/794154/campos_512_v4
+156/794205/campos_512_v4
+156/794246/campos_512_v4
+156/794289/campos_512_v4
+156/794338/campos_512_v4
+156/794377/campos_512_v4
+156/794383/campos_512_v4
+156/794426/campos_512_v4
+156/794465/campos_512_v4
+156/794549/campos_512_v4
+156/794592/campos_512_v4
+156/794596/campos_512_v4
+156/794601/campos_512_v4
+156/794609/campos_512_v4
+156/794626/campos_512_v4
+156/794627/campos_512_v4
+156/794656/campos_512_v4
+156/794703/campos_512_v4
+156/794731/campos_512_v4
+156/794766/campos_512_v4
+156/794773/campos_512_v4
+156/794812/campos_512_v4
+156/794848/campos_512_v4
+156/794932/campos_512_v4
+156/794987/campos_512_v4
+156/794988/campos_512_v4
+157/795009/campos_512_v4
+157/795018/campos_512_v4
+157/795030/campos_512_v4
+157/795065/campos_512_v4
+157/795068/campos_512_v4
+157/795149/campos_512_v4
+157/795208/campos_512_v4
+157/795361/campos_512_v4
+157/795384/campos_512_v4
+157/795430/campos_512_v4
+157/795439/campos_512_v4
+157/795479/campos_512_v4
+157/795482/campos_512_v4
+157/795494/campos_512_v4
+157/795507/campos_512_v4
+157/795523/campos_512_v4
+157/795545/campos_512_v4
+157/795653/campos_512_v4
+157/795695/campos_512_v4
+157/795745/campos_512_v4
+157/795790/campos_512_v4
+157/795854/campos_512_v4
+157/795886/campos_512_v4
+157/795945/campos_512_v4
+157/795959/campos_512_v4
+157/795981/campos_512_v4
+157/796053/campos_512_v4
+157/796069/campos_512_v4
+157/796074/campos_512_v4
+157/796078/campos_512_v4
+157/796102/campos_512_v4
+157/796192/campos_512_v4
+157/796193/campos_512_v4
+157/796242/campos_512_v4
+157/796328/campos_512_v4
+157/796394/campos_512_v4
+157/796413/campos_512_v4
+157/796446/campos_512_v4
+157/796479/campos_512_v4
+157/796549/campos_512_v4
+157/796552/campos_512_v4
+157/796594/campos_512_v4
+157/796605/campos_512_v4
+157/796650/campos_512_v4
+157/796674/campos_512_v4
+157/796679/campos_512_v4
+157/796693/campos_512_v4
+157/796699/campos_512_v4
+157/796714/campos_512_v4
+157/796796/campos_512_v4
+157/796853/campos_512_v4
+157/796867/campos_512_v4
+157/796882/campos_512_v4
+157/796922/campos_512_v4
+157/796988/campos_512_v4
+157/797040/campos_512_v4
+157/797056/campos_512_v4
+157/797070/campos_512_v4
+157/797121/campos_512_v4
+157/797134/campos_512_v4
+157/797162/campos_512_v4
+157/797177/campos_512_v4
+157/797179/campos_512_v4
+157/797209/campos_512_v4
+157/797212/campos_512_v4
+157/797295/campos_512_v4
+157/797325/campos_512_v4
+157/797424/campos_512_v4
+157/797450/campos_512_v4
+157/797558/campos_512_v4
+157/797572/campos_512_v4
+157/797576/campos_512_v4
+157/797601/campos_512_v4
+157/797614/campos_512_v4
+157/797657/campos_512_v4
+157/797717/campos_512_v4
+157/797782/campos_512_v4
+157/797824/campos_512_v4
+157/797833/campos_512_v4
+157/797850/campos_512_v4
+157/797854/campos_512_v4
+157/797949/campos_512_v4
+157/797974/campos_512_v4
+157/797977/campos_512_v4
+157/797985/campos_512_v4
+157/798016/campos_512_v4
+157/798068/campos_512_v4
+157/798069/campos_512_v4
+157/798076/campos_512_v4
+157/798084/campos_512_v4
+157/798086/campos_512_v4
+157/798111/campos_512_v4
+157/798134/campos_512_v4
+157/798257/campos_512_v4
+157/798338/campos_512_v4
+157/798343/campos_512_v4
+157/798350/campos_512_v4
+157/798361/campos_512_v4
+157/798401/campos_512_v4
+157/798439/campos_512_v4
+157/798517/campos_512_v4
+157/798526/campos_512_v4
+157/798545/campos_512_v4
+157/798626/campos_512_v4
+157/798791/campos_512_v4
+157/798803/campos_512_v4
+157/798841/campos_512_v4
+157/798862/campos_512_v4
+157/798913/campos_512_v4
+157/798944/campos_512_v4
+157/798998/campos_512_v4
+157/799049/campos_512_v4
+157/799057/campos_512_v4
+157/799088/campos_512_v4
+157/799104/campos_512_v4
+157/799159/campos_512_v4
+157/799196/campos_512_v4
+157/799219/campos_512_v4
+157/799265/campos_512_v4
+157/799290/campos_512_v4
+157/799300/campos_512_v4
+157/799327/campos_512_v4
+157/799508/campos_512_v4
+157/799515/campos_512_v4
+157/799522/campos_512_v4
+157/799538/campos_512_v4
+157/799589/campos_512_v4
+157/799638/campos_512_v4
+157/799717/campos_512_v4
+157/799736/campos_512_v4
+157/799745/campos_512_v4
+157/799793/campos_512_v4
+157/799798/campos_512_v4
+157/799838/campos_512_v4
+157/799950/campos_512_v4
+157/799951/campos_512_v4
+157/799974/campos_512_v4
+158/800034/campos_512_v4
+158/800051/campos_512_v4
+158/800104/campos_512_v4
+158/800138/campos_512_v4
+158/800208/campos_512_v4
+158/800297/campos_512_v4
+158/800299/campos_512_v4
+158/800317/campos_512_v4
+158/800318/campos_512_v4
+158/800378/campos_512_v4
+158/800523/campos_512_v4
+158/800527/campos_512_v4
+158/800538/campos_512_v4
+158/800577/campos_512_v4
+158/800629/campos_512_v4
+158/800696/campos_512_v4
+158/800720/campos_512_v4
+158/800775/campos_512_v4
+158/800795/campos_512_v4
+158/800798/campos_512_v4
+158/800824/campos_512_v4
+158/800849/campos_512_v4
+158/800864/campos_512_v4
+158/800930/campos_512_v4
+158/800936/campos_512_v4
+158/800975/campos_512_v4
+158/801008/campos_512_v4
+158/801034/campos_512_v4
+158/801068/campos_512_v4
+158/801097/campos_512_v4
+158/801101/campos_512_v4
+158/801218/campos_512_v4
+158/801255/campos_512_v4
+158/801301/campos_512_v4
+158/801359/campos_512_v4
+158/801360/campos_512_v4
+158/801368/campos_512_v4
+158/801381/campos_512_v4
+158/801384/campos_512_v4
+158/801391/campos_512_v4
+158/801394/campos_512_v4
+158/801433/campos_512_v4
+158/801455/campos_512_v4
+158/801460/campos_512_v4
+158/801468/campos_512_v4
+158/801481/campos_512_v4
+158/801501/campos_512_v4
+158/801528/campos_512_v4
+158/801540/campos_512_v4
+158/801546/campos_512_v4
+158/801560/campos_512_v4
+158/801568/campos_512_v4
+158/801620/campos_512_v4
+158/801632/campos_512_v4
+158/801655/campos_512_v4
+158/801685/campos_512_v4
+158/801686/campos_512_v4
+158/801731/campos_512_v4
+158/801754/campos_512_v4
+158/801778/campos_512_v4
+158/801867/campos_512_v4
+158/801912/campos_512_v4
+158/801913/campos_512_v4
+158/801963/campos_512_v4
+158/802041/campos_512_v4
+158/802059/campos_512_v4
+158/802120/campos_512_v4
+158/802176/campos_512_v4
+158/802190/campos_512_v4
+158/802210/campos_512_v4
+158/802230/campos_512_v4
+158/802324/campos_512_v4
+158/802397/campos_512_v4
+158/802456/campos_512_v4
+158/802458/campos_512_v4
+158/802466/campos_512_v4
+158/802510/campos_512_v4
+158/802511/campos_512_v4
+158/802526/campos_512_v4
+158/802540/campos_512_v4
+158/802554/campos_512_v4
+158/802557/campos_512_v4
+158/802684/campos_512_v4
+158/802685/campos_512_v4
+158/802699/campos_512_v4
+158/802709/campos_512_v4
+158/802885/campos_512_v4
+158/802900/campos_512_v4
+158/802954/campos_512_v4
+158/803009/campos_512_v4
+158/803041/campos_512_v4
+158/803046/campos_512_v4
+158/803047/campos_512_v4
+158/803066/campos_512_v4
+158/803084/campos_512_v4
+158/803101/campos_512_v4
+158/803110/campos_512_v4
+158/803122/campos_512_v4
+158/803130/campos_512_v4
+158/803181/campos_512_v4
+158/803182/campos_512_v4
+158/803197/campos_512_v4
+158/803210/campos_512_v4
+158/803248/campos_512_v4
+158/803260/campos_512_v4
+158/803340/campos_512_v4
+158/803341/campos_512_v4
+158/803390/campos_512_v4
+158/803474/campos_512_v4
+158/803499/campos_512_v4
+158/803543/campos_512_v4
+158/803550/campos_512_v4
+158/803581/campos_512_v4
+158/803636/campos_512_v4
+158/803641/campos_512_v4
+158/803651/campos_512_v4
+158/803662/campos_512_v4
+158/803674/campos_512_v4
+158/803695/campos_512_v4
+158/803711/campos_512_v4
+158/803712/campos_512_v4
+158/803714/campos_512_v4
+158/803718/campos_512_v4
+158/803719/campos_512_v4
+158/803794/campos_512_v4
+158/803816/campos_512_v4
+158/803848/campos_512_v4
+158/803881/campos_512_v4
+158/803894/campos_512_v4
+158/803931/campos_512_v4
+158/803951/campos_512_v4
+158/804031/campos_512_v4
+158/804061/campos_512_v4
+158/804062/campos_512_v4
+158/804075/campos_512_v4
+158/804080/campos_512_v4
+158/804154/campos_512_v4
+158/804198/campos_512_v4
+158/804245/campos_512_v4
+158/804264/campos_512_v4
+158/804271/campos_512_v4
+158/804297/campos_512_v4
+158/804350/campos_512_v4
+158/804439/campos_512_v4
+158/804442/campos_512_v4
+158/804458/campos_512_v4
+158/804515/campos_512_v4
+158/804542/campos_512_v4
+158/804589/campos_512_v4
+158/804666/campos_512_v4
+158/804760/campos_512_v4
+158/804794/campos_512_v4
+158/804845/campos_512_v4
+158/804854/campos_512_v4
+158/804855/campos_512_v4
+158/804874/campos_512_v4
+158/804875/campos_512_v4
+158/804901/campos_512_v4
+158/804906/campos_512_v4
+158/804921/campos_512_v4
+158/804943/campos_512_v4
+158/804966/campos_512_v4
+158/804970/campos_512_v4
+159/805038/campos_512_v4
+159/805055/campos_512_v4
+159/805068/campos_512_v4
+159/805072/campos_512_v4
+159/805112/campos_512_v4
+159/805126/campos_512_v4
+159/805200/campos_512_v4
+159/805218/campos_512_v4
+159/805233/campos_512_v4
+159/805236/campos_512_v4
+159/805269/campos_512_v4
+159/805307/campos_512_v4
+159/805318/campos_512_v4
+159/805331/campos_512_v4
+159/805397/campos_512_v4
+159/805403/campos_512_v4
+159/805409/campos_512_v4
+159/805411/campos_512_v4
+159/805457/campos_512_v4
+159/805475/campos_512_v4
+159/805480/campos_512_v4
+159/805513/campos_512_v4
+159/805525/campos_512_v4
+159/805572/campos_512_v4
+159/805602/campos_512_v4
+159/805615/campos_512_v4
+159/805624/campos_512_v4
+159/805630/campos_512_v4
+159/805654/campos_512_v4
+159/805708/campos_512_v4
+159/805778/campos_512_v4
+159/805784/campos_512_v4
+159/805793/campos_512_v4
+159/805843/campos_512_v4
+159/806012/campos_512_v4
+159/806021/campos_512_v4
+159/806041/campos_512_v4
+159/806091/campos_512_v4
+159/806097/campos_512_v4
+159/806103/campos_512_v4
+159/806127/campos_512_v4
+159/806205/campos_512_v4
+159/806214/campos_512_v4
+159/806221/campos_512_v4
+159/806253/campos_512_v4
+159/806258/campos_512_v4
+159/806264/campos_512_v4
+159/806282/campos_512_v4
+159/806299/campos_512_v4
+159/806311/campos_512_v4
+159/806331/campos_512_v4
+159/806336/campos_512_v4
+159/806341/campos_512_v4
+159/806351/campos_512_v4
+159/806374/campos_512_v4
+159/806437/campos_512_v4
+159/806516/campos_512_v4
+159/806519/campos_512_v4
+159/806521/campos_512_v4
+159/806535/campos_512_v4
+159/806536/campos_512_v4
+159/806550/campos_512_v4
+159/806551/campos_512_v4
+159/806562/campos_512_v4
+159/806583/campos_512_v4
+159/806644/campos_512_v4
+159/806709/campos_512_v4
+159/806711/campos_512_v4
+159/806798/campos_512_v4
+159/806809/campos_512_v4
+159/806862/campos_512_v4
+159/806870/campos_512_v4
+159/806880/campos_512_v4
+159/806884/campos_512_v4
+159/806913/campos_512_v4
+159/806939/campos_512_v4
+159/806967/campos_512_v4
+159/806988/campos_512_v4
+159/807075/campos_512_v4
+159/807094/campos_512_v4
+159/807096/campos_512_v4
+159/807166/campos_512_v4
+159/807293/campos_512_v4
+159/807295/campos_512_v4
+159/807339/campos_512_v4
+159/807343/campos_512_v4
+159/807378/campos_512_v4
+159/807394/campos_512_v4
+159/807399/campos_512_v4
+159/807409/campos_512_v4
+159/807448/campos_512_v4
+159/807464/campos_512_v4
+159/807528/campos_512_v4
+159/807615/campos_512_v4
+159/807630/campos_512_v4
+159/807635/campos_512_v4
+159/807694/campos_512_v4
+159/807735/campos_512_v4
+159/807796/campos_512_v4
+159/807818/campos_512_v4
+159/807933/campos_512_v4
+159/807936/campos_512_v4
+159/807971/campos_512_v4
+159/808109/campos_512_v4
+159/808185/campos_512_v4
+159/808321/campos_512_v4
+159/808416/campos_512_v4
+159/808422/campos_512_v4
+159/808443/campos_512_v4
+159/808461/campos_512_v4
+159/808466/campos_512_v4
+159/808483/campos_512_v4
+159/808491/campos_512_v4
+159/808530/campos_512_v4
+159/808534/campos_512_v4
+159/808538/campos_512_v4
+159/808637/campos_512_v4
+159/808655/campos_512_v4
+159/808665/campos_512_v4
+159/808703/campos_512_v4
+159/808710/campos_512_v4
+159/808744/campos_512_v4
+16/90058/campos_512_v4
+16/90098/campos_512_v4
+16/90116/campos_512_v4
+16/90190/campos_512_v4
+16/90207/campos_512_v4
+16/90225/campos_512_v4
+16/90227/campos_512_v4
+16/90232/campos_512_v4
+16/90259/campos_512_v4
+16/90270/campos_512_v4
+16/90272/campos_512_v4
+16/90293/campos_512_v4
+16/90299/campos_512_v4
+16/90386/campos_512_v4
+16/90387/campos_512_v4
+16/90404/campos_512_v4
+16/90416/campos_512_v4
+16/90468/campos_512_v4
+16/90528/campos_512_v4
+16/90547/campos_512_v4
+16/90548/campos_512_v4
+16/90564/campos_512_v4
+16/90617/campos_512_v4
+16/90623/campos_512_v4
+16/90633/campos_512_v4
+16/90637/campos_512_v4
+16/90695/campos_512_v4
+16/90749/campos_512_v4
+16/90758/campos_512_v4
+16/90813/campos_512_v4
+16/90818/campos_512_v4
+16/90837/campos_512_v4
+16/90936/campos_512_v4
+16/90942/campos_512_v4
+16/90943/campos_512_v4
+16/90973/campos_512_v4
+16/90978/campos_512_v4
+16/91005/campos_512_v4
+16/91021/campos_512_v4
+16/91022/campos_512_v4
+16/91028/campos_512_v4
+16/91117/campos_512_v4
+16/91124/campos_512_v4
+16/91136/campos_512_v4
+16/91138/campos_512_v4
+16/91160/campos_512_v4
+16/91166/campos_512_v4
+16/91176/campos_512_v4
+16/91217/campos_512_v4
+16/91266/campos_512_v4
+16/91287/campos_512_v4
+16/91309/campos_512_v4
+16/91356/campos_512_v4
+16/91361/campos_512_v4
+16/91377/campos_512_v4
+16/91388/campos_512_v4
+16/91431/campos_512_v4
+16/91454/campos_512_v4
+16/91464/campos_512_v4
+16/91474/campos_512_v4
+16/91490/campos_512_v4
+16/91509/campos_512_v4
+16/91518/campos_512_v4
+16/91533/campos_512_v4
+16/91568/campos_512_v4
+16/91575/campos_512_v4
+16/91585/campos_512_v4
+16/91612/campos_512_v4
+16/91613/campos_512_v4
+16/91614/campos_512_v4
+16/91619/campos_512_v4
+16/91620/campos_512_v4
+16/91626/campos_512_v4
+16/91663/campos_512_v4
+16/91715/campos_512_v4
+16/91728/campos_512_v4
+16/91735/campos_512_v4
+16/91736/campos_512_v4
+16/91790/campos_512_v4
+16/91795/campos_512_v4
+16/91824/campos_512_v4
+16/91854/campos_512_v4
+16/91869/campos_512_v4
+16/91887/campos_512_v4
+16/91899/campos_512_v4
+16/91940/campos_512_v4
+16/91941/campos_512_v4
+16/91956/campos_512_v4
+16/91964/campos_512_v4
+16/91966/campos_512_v4
+16/91975/campos_512_v4
+16/91989/campos_512_v4
+16/92016/campos_512_v4
+16/92026/campos_512_v4
+16/92036/campos_512_v4
+16/92060/campos_512_v4
+16/92150/campos_512_v4
+16/92168/campos_512_v4
+16/92208/campos_512_v4
+16/92217/campos_512_v4
+16/92221/campos_512_v4
+16/92228/campos_512_v4
+16/92235/campos_512_v4
+16/92236/campos_512_v4
+16/92259/campos_512_v4
+16/92279/campos_512_v4
+16/92284/campos_512_v4
+16/92308/campos_512_v4
+16/92337/campos_512_v4
+16/92356/campos_512_v4
+16/92370/campos_512_v4
+16/92403/campos_512_v4
+16/92407/campos_512_v4
+16/92466/campos_512_v4
+16/92502/campos_512_v4
+16/92516/campos_512_v4
+16/92562/campos_512_v4
+16/92570/campos_512_v4
+16/92582/campos_512_v4
+16/92643/campos_512_v4
+16/92646/campos_512_v4
+16/92700/campos_512_v4
+16/92714/campos_512_v4
+16/92722/campos_512_v4
+16/92808/campos_512_v4
+16/92821/campos_512_v4
+16/92827/campos_512_v4
+16/92847/campos_512_v4
+16/92851/campos_512_v4
+16/92890/campos_512_v4
+16/92903/campos_512_v4
+16/92904/campos_512_v4
+16/92922/campos_512_v4
+16/92928/campos_512_v4
+16/92986/campos_512_v4
+16/93004/campos_512_v4
+16/93016/campos_512_v4
+16/93067/campos_512_v4
+16/93072/campos_512_v4
+16/93081/campos_512_v4
+16/93120/campos_512_v4
+16/93126/campos_512_v4
+16/93144/campos_512_v4
+16/93148/campos_512_v4
+16/93162/campos_512_v4
+16/93169/campos_512_v4
+16/93175/campos_512_v4
+16/93177/campos_512_v4
+16/93196/campos_512_v4
+16/93207/campos_512_v4
+16/93214/campos_512_v4
+16/93231/campos_512_v4
+16/93234/campos_512_v4
+16/93284/campos_512_v4
+16/93288/campos_512_v4
+16/93330/campos_512_v4
+16/93361/campos_512_v4
+16/93384/campos_512_v4
+16/93429/campos_512_v4
+16/93440/campos_512_v4
+16/93446/campos_512_v4
+16/93474/campos_512_v4
+16/93479/campos_512_v4
+16/93535/campos_512_v4
+16/93570/campos_512_v4
+16/93585/campos_512_v4
+16/93608/campos_512_v4
+16/93610/campos_512_v4
+16/93628/campos_512_v4
+16/93652/campos_512_v4
+16/93699/campos_512_v4
+16/93774/campos_512_v4
+16/93783/campos_512_v4
+16/93855/campos_512_v4
+16/93880/campos_512_v4
+16/93934/campos_512_v4
+16/93947/campos_512_v4
+16/93952/campos_512_v4
+16/93962/campos_512_v4
+16/94048/campos_512_v4
+16/94049/campos_512_v4
+16/94073/campos_512_v4
+16/94079/campos_512_v4
+16/94159/campos_512_v4
+16/94214/campos_512_v4
+16/94228/campos_512_v4
+16/94241/campos_512_v4
+16/94257/campos_512_v4
+16/94274/campos_512_v4
+16/94278/campos_512_v4
+16/94287/campos_512_v4
+16/94372/campos_512_v4
+16/94396/campos_512_v4
+16/94420/campos_512_v4
+16/94453/campos_512_v4
+16/94457/campos_512_v4
+16/94494/campos_512_v4
+16/94509/campos_512_v4
+16/94540/campos_512_v4
+16/94543/campos_512_v4
+16/94571/campos_512_v4
+16/94580/campos_512_v4
+16/94605/campos_512_v4
+16/94617/campos_512_v4
+16/94650/campos_512_v4
+16/94683/campos_512_v4
+16/94687/campos_512_v4
+16/94693/campos_512_v4
+16/94700/campos_512_v4
+16/94710/campos_512_v4
+16/94718/campos_512_v4
+16/94753/campos_512_v4
+16/94790/campos_512_v4
+16/94795/campos_512_v4
+16/94845/campos_512_v4
+16/94846/campos_512_v4
+16/94848/campos_512_v4
+16/94855/campos_512_v4
+16/94866/campos_512_v4
+16/94892/campos_512_v4
+16/94899/campos_512_v4
+16/94912/campos_512_v4
+16/94932/campos_512_v4
+16/94939/campos_512_v4
+16/94971/campos_512_v4
+16/94974/campos_512_v4
+16/94977/campos_512_v4
+17/95015/campos_512_v4
+17/95129/campos_512_v4
+17/95136/campos_512_v4
+17/95161/campos_512_v4
+17/95236/campos_512_v4
+17/95285/campos_512_v4
+17/95287/campos_512_v4
+17/95301/campos_512_v4
+17/95308/campos_512_v4
+17/95313/campos_512_v4
+17/95322/campos_512_v4
+17/95358/campos_512_v4
+17/95359/campos_512_v4
+17/95380/campos_512_v4
+17/95387/campos_512_v4
+17/95389/campos_512_v4
+17/95399/campos_512_v4
+17/95402/campos_512_v4
+17/95430/campos_512_v4
+17/95467/campos_512_v4
+17/95488/campos_512_v4
+17/95501/campos_512_v4
+17/95525/campos_512_v4
+17/95530/campos_512_v4
+17/95552/campos_512_v4
+17/95671/campos_512_v4
+17/95688/campos_512_v4
+17/95690/campos_512_v4
+17/95750/campos_512_v4
+17/95757/campos_512_v4
+17/95759/campos_512_v4
+17/95761/campos_512_v4
+17/95861/campos_512_v4
+17/95892/campos_512_v4
+17/95902/campos_512_v4
+17/95956/campos_512_v4
+17/95988/campos_512_v4
+17/95999/campos_512_v4
+17/96018/campos_512_v4
+17/96020/campos_512_v4
+17/96024/campos_512_v4
+17/96044/campos_512_v4
+17/96062/campos_512_v4
+17/96111/campos_512_v4
+17/96133/campos_512_v4
+17/96147/campos_512_v4
+17/96161/campos_512_v4
+17/96175/campos_512_v4
+17/96214/campos_512_v4
+17/96365/campos_512_v4
+17/96463/campos_512_v4
+17/96479/campos_512_v4
+17/96499/campos_512_v4
+17/96544/campos_512_v4
+17/96602/campos_512_v4
+17/96613/campos_512_v4
+17/96646/campos_512_v4
+17/96661/campos_512_v4
+17/96709/campos_512_v4
+17/96710/campos_512_v4
+17/96715/campos_512_v4
+17/96752/campos_512_v4
+17/96775/campos_512_v4
+17/96787/campos_512_v4
+17/96916/campos_512_v4
+17/96941/campos_512_v4
+17/96966/campos_512_v4
+17/96970/campos_512_v4
+17/97010/campos_512_v4
+17/97034/campos_512_v4
+17/97065/campos_512_v4
+17/97067/campos_512_v4
+17/97072/campos_512_v4
+17/97075/campos_512_v4
+17/97102/campos_512_v4
+17/97132/campos_512_v4
+17/97157/campos_512_v4
+17/97204/campos_512_v4
+17/97250/campos_512_v4
+17/97314/campos_512_v4
+17/97326/campos_512_v4
+17/97336/campos_512_v4
+17/97343/campos_512_v4
+17/97346/campos_512_v4
+17/97363/campos_512_v4
+17/97397/campos_512_v4
+17/97398/campos_512_v4
+17/97519/campos_512_v4
+17/97536/campos_512_v4
+17/97582/campos_512_v4
+17/97602/campos_512_v4
+17/97620/campos_512_v4
+17/97637/campos_512_v4
+17/97658/campos_512_v4
+17/97662/campos_512_v4
+17/97672/campos_512_v4
+17/97683/campos_512_v4
+17/97797/campos_512_v4
+17/97908/campos_512_v4
+17/97912/campos_512_v4
+17/97917/campos_512_v4
+17/97985/campos_512_v4
+17/98027/campos_512_v4
+17/98038/campos_512_v4
+17/98055/campos_512_v4
+17/98070/campos_512_v4
+17/98109/campos_512_v4
+17/98177/campos_512_v4
+17/98237/campos_512_v4
+17/98261/campos_512_v4
+17/98339/campos_512_v4
+17/98400/campos_512_v4
+17/98435/campos_512_v4
+17/98496/campos_512_v4
+17/98501/campos_512_v4
+17/98522/campos_512_v4
+17/98579/campos_512_v4
+17/98586/campos_512_v4
+17/98590/campos_512_v4
+17/98594/campos_512_v4
+17/98654/campos_512_v4
+17/98679/campos_512_v4
+17/98697/campos_512_v4
+17/98735/campos_512_v4
+17/98776/campos_512_v4
+17/98868/campos_512_v4
+17/98886/campos_512_v4
+17/98910/campos_512_v4
+17/98980/campos_512_v4
+17/99006/campos_512_v4
+17/99032/campos_512_v4
+17/99055/campos_512_v4
+17/99074/campos_512_v4
+17/99092/campos_512_v4
+17/99121/campos_512_v4
+17/99194/campos_512_v4
+17/99209/campos_512_v4
+17/99267/campos_512_v4
+17/99272/campos_512_v4
+17/99291/campos_512_v4
+17/99312/campos_512_v4
+17/99368/campos_512_v4
+17/99407/campos_512_v4
+17/99456/campos_512_v4
+17/99458/campos_512_v4
+17/99533/campos_512_v4
+17/99535/campos_512_v4
+17/99635/campos_512_v4
+17/99678/campos_512_v4
+17/99680/campos_512_v4
+17/99703/campos_512_v4
+17/99795/campos_512_v4
+17/99798/campos_512_v4
+17/99802/campos_512_v4
+17/99809/campos_512_v4
+17/99841/campos_512_v4
+17/99845/campos_512_v4
+17/99854/campos_512_v4
+17/99895/campos_512_v4
+17/99927/campos_512_v4
+17/99985/campos_512_v4
+2/20007/campos_512_v4
+2/20009/campos_512_v4
+2/20078/campos_512_v4
+2/20079/campos_512_v4
+2/20106/campos_512_v4
+2/20111/campos_512_v4
+2/20119/campos_512_v4
+2/20205/campos_512_v4
+2/20225/campos_512_v4
+2/20253/campos_512_v4
+2/20256/campos_512_v4
+2/20259/campos_512_v4
+2/20280/campos_512_v4
+2/20289/campos_512_v4
+2/20335/campos_512_v4
+2/20338/campos_512_v4
+2/20353/campos_512_v4
+2/20390/campos_512_v4
+2/20394/campos_512_v4
+2/20406/campos_512_v4
+2/20427/campos_512_v4
+2/20510/campos_512_v4
+2/20513/campos_512_v4
+2/20528/campos_512_v4
+2/20555/campos_512_v4
+2/20574/campos_512_v4
+2/20586/campos_512_v4
+2/20604/campos_512_v4
+2/20614/campos_512_v4
+2/20624/campos_512_v4
+2/20627/campos_512_v4
+2/20636/campos_512_v4
+2/20637/campos_512_v4
+2/20648/campos_512_v4
+2/20653/campos_512_v4
+2/20671/campos_512_v4
+2/20712/campos_512_v4
+2/20777/campos_512_v4
+2/20810/campos_512_v4
+2/20815/campos_512_v4
+2/20834/campos_512_v4
+2/20876/campos_512_v4
+2/20893/campos_512_v4
+2/20898/campos_512_v4
+2/20925/campos_512_v4
+2/20961/campos_512_v4
+2/20983/campos_512_v4
+2/21115/campos_512_v4
+2/21161/campos_512_v4
+2/21187/campos_512_v4
+2/21250/campos_512_v4
+2/21254/campos_512_v4
+2/21282/campos_512_v4
+2/21339/campos_512_v4
+2/21357/campos_512_v4
+2/21367/campos_512_v4
+2/21379/campos_512_v4
+2/21408/campos_512_v4
+2/21454/campos_512_v4
+2/21488/campos_512_v4
+2/21501/campos_512_v4
+2/21503/campos_512_v4
+2/21504/campos_512_v4
+2/21578/campos_512_v4
+2/21705/campos_512_v4
+2/21715/campos_512_v4
+2/21724/campos_512_v4
+2/21733/campos_512_v4
+2/21734/campos_512_v4
+2/21751/campos_512_v4
+2/21792/campos_512_v4
+2/21794/campos_512_v4
+2/21857/campos_512_v4
+2/21865/campos_512_v4
+2/21873/campos_512_v4
+2/21917/campos_512_v4
+2/21938/campos_512_v4
+2/21941/campos_512_v4
+2/21962/campos_512_v4
+2/22001/campos_512_v4
+2/22029/campos_512_v4
+2/22060/campos_512_v4
+2/22080/campos_512_v4
+2/22113/campos_512_v4
+2/22150/campos_512_v4
+2/22188/campos_512_v4
+2/22207/campos_512_v4
+2/22230/campos_512_v4
+2/22287/campos_512_v4
+2/22401/campos_512_v4
+2/22415/campos_512_v4
+2/22482/campos_512_v4
+2/22513/campos_512_v4
+2/22570/campos_512_v4
+2/22614/campos_512_v4
+2/22618/campos_512_v4
+2/22723/campos_512_v4
+2/22788/campos_512_v4
+2/22864/campos_512_v4
+2/22871/campos_512_v4
+2/22902/campos_512_v4
+2/22918/campos_512_v4
+2/22940/campos_512_v4
+2/22942/campos_512_v4
+2/23053/campos_512_v4
+2/23107/campos_512_v4
+2/23110/campos_512_v4
+2/23143/campos_512_v4
+2/23190/campos_512_v4
+2/23205/campos_512_v4
+2/23210/campos_512_v4
+2/23298/campos_512_v4
+2/23302/campos_512_v4
+2/23307/campos_512_v4
+2/23358/campos_512_v4
+2/23380/campos_512_v4
+2/23383/campos_512_v4
+2/23401/campos_512_v4
+2/23412/campos_512_v4
+2/23416/campos_512_v4
+2/23425/campos_512_v4
+2/23465/campos_512_v4
+2/23485/campos_512_v4
+2/23490/campos_512_v4
+2/23508/campos_512_v4
+2/23577/campos_512_v4
+2/23606/campos_512_v4
+2/23609/campos_512_v4
+2/23636/campos_512_v4
+2/23643/campos_512_v4
+2/23660/campos_512_v4
+2/23670/campos_512_v4
+2/23698/campos_512_v4
+2/23806/campos_512_v4
+2/23808/campos_512_v4
+2/23852/campos_512_v4
+2/23866/campos_512_v4
+2/23901/campos_512_v4
+2/23954/campos_512_v4
+2/23966/campos_512_v4
+2/23971/campos_512_v4
+2/24029/campos_512_v4
+2/24040/campos_512_v4
+2/24056/campos_512_v4
+2/24125/campos_512_v4
+2/24163/campos_512_v4
+2/24204/campos_512_v4
+2/24226/campos_512_v4
+2/24227/campos_512_v4
+2/24252/campos_512_v4
+2/24272/campos_512_v4
+2/24277/campos_512_v4
+2/24314/campos_512_v4
+2/24358/campos_512_v4
+2/24413/campos_512_v4
+2/24416/campos_512_v4
+2/24427/campos_512_v4
+2/24454/campos_512_v4
+2/24457/campos_512_v4
+2/24502/campos_512_v4
+2/24505/campos_512_v4
+2/24509/campos_512_v4
+2/24533/campos_512_v4
+2/24534/campos_512_v4
+2/24562/campos_512_v4
+2/24627/campos_512_v4
+2/24664/campos_512_v4
+2/24676/campos_512_v4
+2/24689/campos_512_v4
+2/24701/campos_512_v4
+2/24720/campos_512_v4
+2/24759/campos_512_v4
+2/24830/campos_512_v4
+2/24849/campos_512_v4
+2/24884/campos_512_v4
+2/24918/campos_512_v4
+2/24969/campos_512_v4
+23/125056/campos_512_v4
+23/125060/campos_512_v4
+23/125078/campos_512_v4
+23/125082/campos_512_v4
+23/125147/campos_512_v4
+23/125160/campos_512_v4
+23/125170/campos_512_v4
+23/125178/campos_512_v4
+23/125181/campos_512_v4
+23/125219/campos_512_v4
+23/125222/campos_512_v4
+23/125244/campos_512_v4
+23/125247/campos_512_v4
+23/125250/campos_512_v4
+23/125269/campos_512_v4
+23/125270/campos_512_v4
+23/125271/campos_512_v4
+23/125281/campos_512_v4
+23/125288/campos_512_v4
+23/125296/campos_512_v4
+23/125302/campos_512_v4
+23/125308/campos_512_v4
+23/125324/campos_512_v4
+23/125337/campos_512_v4
+23/125379/campos_512_v4
+23/125389/campos_512_v4
+23/125398/campos_512_v4
+23/125422/campos_512_v4
+23/125423/campos_512_v4
+23/125494/campos_512_v4
+23/125517/campos_512_v4
+23/125623/campos_512_v4
+23/125626/campos_512_v4
+23/125644/campos_512_v4
+23/125660/campos_512_v4
+23/125715/campos_512_v4
+23/125731/campos_512_v4
+23/125751/campos_512_v4
+23/125754/campos_512_v4
+23/125763/campos_512_v4
+23/125800/campos_512_v4
+23/125812/campos_512_v4
+23/125854/campos_512_v4
+23/125861/campos_512_v4
+23/125869/campos_512_v4
+23/125872/campos_512_v4
+23/125879/campos_512_v4
+23/125883/campos_512_v4
+23/125917/campos_512_v4
+23/125931/campos_512_v4
+23/125973/campos_512_v4
+23/126044/campos_512_v4
+23/126051/campos_512_v4
+23/126108/campos_512_v4
+23/126112/campos_512_v4
+23/126153/campos_512_v4
+23/126204/campos_512_v4
+23/126208/campos_512_v4
+23/126217/campos_512_v4
+23/126227/campos_512_v4
+23/126240/campos_512_v4
+23/126263/campos_512_v4
+23/126264/campos_512_v4
+23/126266/campos_512_v4
+23/126271/campos_512_v4
+23/126280/campos_512_v4
+23/126331/campos_512_v4
+23/126336/campos_512_v4
+23/126346/campos_512_v4
+23/126352/campos_512_v4
+23/126355/campos_512_v4
+23/126364/campos_512_v4
+23/126378/campos_512_v4
+23/126397/campos_512_v4
+23/126442/campos_512_v4
+23/126449/campos_512_v4
+23/126507/campos_512_v4
+23/126543/campos_512_v4
+23/126565/campos_512_v4
+23/126584/campos_512_v4
+23/126602/campos_512_v4
+23/126660/campos_512_v4
+23/126679/campos_512_v4
+23/126706/campos_512_v4
+23/126725/campos_512_v4
+23/126730/campos_512_v4
+23/126757/campos_512_v4
+23/126890/campos_512_v4
+23/126933/campos_512_v4
+23/126978/campos_512_v4
+23/127021/campos_512_v4
+23/127042/campos_512_v4
+23/127043/campos_512_v4
+23/127065/campos_512_v4
+23/127068/campos_512_v4
+23/127076/campos_512_v4
+23/127094/campos_512_v4
+23/127096/campos_512_v4
+23/127103/campos_512_v4
+23/127117/campos_512_v4
+23/127126/campos_512_v4
+23/127133/campos_512_v4
+23/127156/campos_512_v4
+23/127166/campos_512_v4
+23/127183/campos_512_v4
+23/127194/campos_512_v4
+23/127203/campos_512_v4
+23/127218/campos_512_v4
+23/127225/campos_512_v4
+23/127307/campos_512_v4
+23/127420/campos_512_v4
+23/127436/campos_512_v4
+23/127444/campos_512_v4
+23/127479/campos_512_v4
+23/127489/campos_512_v4
+23/127500/campos_512_v4
+23/127507/campos_512_v4
+23/127512/campos_512_v4
+23/127513/campos_512_v4
+23/127521/campos_512_v4
+23/127536/campos_512_v4
+23/127552/campos_512_v4
+23/127560/campos_512_v4
+23/127590/campos_512_v4
+23/127601/campos_512_v4
+23/127609/campos_512_v4
+23/127767/campos_512_v4
+23/127781/campos_512_v4
+23/127786/campos_512_v4
+23/127813/campos_512_v4
+23/127815/campos_512_v4
+23/127835/campos_512_v4
+23/127838/campos_512_v4
+23/127850/campos_512_v4
+23/127883/campos_512_v4
+23/127904/campos_512_v4
+23/127914/campos_512_v4
+23/127923/campos_512_v4
+23/127935/campos_512_v4
+23/127942/campos_512_v4
+23/127945/campos_512_v4
+23/127951/campos_512_v4
+23/127955/campos_512_v4
+23/127959/campos_512_v4
+23/127963/campos_512_v4
+23/127981/campos_512_v4
+23/127991/campos_512_v4
+23/127996/campos_512_v4
+23/128050/campos_512_v4
+23/128078/campos_512_v4
+23/128087/campos_512_v4
+23/128171/campos_512_v4
+23/128183/campos_512_v4
+23/128188/campos_512_v4
+23/128194/campos_512_v4
+23/128201/campos_512_v4
+23/128240/campos_512_v4
+23/128251/campos_512_v4
+23/128277/campos_512_v4
+23/128298/campos_512_v4
+23/128301/campos_512_v4
+23/128303/campos_512_v4
+23/128312/campos_512_v4
+23/128406/campos_512_v4
+23/128429/campos_512_v4
+23/128453/campos_512_v4
+23/128455/campos_512_v4
+23/128464/campos_512_v4
+23/128466/campos_512_v4
+23/128476/campos_512_v4
+23/128520/campos_512_v4
+23/128524/campos_512_v4
+23/128531/campos_512_v4
+23/128545/campos_512_v4
+23/128557/campos_512_v4
+23/128559/campos_512_v4
+23/128560/campos_512_v4
+23/128591/campos_512_v4
+23/128608/campos_512_v4
+23/128615/campos_512_v4
+23/128617/campos_512_v4
+23/128619/campos_512_v4
+23/128632/campos_512_v4
+23/128649/campos_512_v4
+23/128655/campos_512_v4
+23/128656/campos_512_v4
+23/128666/campos_512_v4
+23/128668/campos_512_v4
+23/128731/campos_512_v4
+23/128739/campos_512_v4
+23/128742/campos_512_v4
+23/128774/campos_512_v4
+23/128784/campos_512_v4
+23/128813/campos_512_v4
+23/128825/campos_512_v4
+23/128853/campos_512_v4
+23/128859/campos_512_v4
+23/128872/campos_512_v4
+23/128881/campos_512_v4
+23/128890/campos_512_v4
+23/128920/campos_512_v4
+23/129014/campos_512_v4
+23/129064/campos_512_v4
+23/129065/campos_512_v4
+23/129074/campos_512_v4
+23/129098/campos_512_v4
+23/129113/campos_512_v4
+23/129139/campos_512_v4
+23/129155/campos_512_v4
+23/129196/campos_512_v4
+23/129208/campos_512_v4
+23/129225/campos_512_v4
+23/129236/campos_512_v4
+23/129247/campos_512_v4
+23/129294/campos_512_v4
+23/129296/campos_512_v4
+23/129320/campos_512_v4
+23/129364/campos_512_v4
+23/129386/campos_512_v4
+23/129392/campos_512_v4
+23/129428/campos_512_v4
+23/129431/campos_512_v4
+23/129449/campos_512_v4
+23/129480/campos_512_v4
+23/129507/campos_512_v4
+23/129528/campos_512_v4
+23/129544/campos_512_v4
+23/129552/campos_512_v4
+23/129622/campos_512_v4
+23/129672/campos_512_v4
+23/129688/campos_512_v4
+23/129724/campos_512_v4
+23/129760/campos_512_v4
+23/129806/campos_512_v4
+23/129821/campos_512_v4
+23/129913/campos_512_v4
+23/129935/campos_512_v4
+23/129949/campos_512_v4
+23/129955/campos_512_v4
+23/129964/campos_512_v4
+23/129970/campos_512_v4
+24/130012/campos_512_v4
+24/130016/campos_512_v4
+24/130028/campos_512_v4
+24/130031/campos_512_v4
+24/130047/campos_512_v4
+24/130057/campos_512_v4
+24/130064/campos_512_v4
+24/130070/campos_512_v4
+24/130096/campos_512_v4
+24/130117/campos_512_v4
+24/130163/campos_512_v4
+24/130271/campos_512_v4
+24/130349/campos_512_v4
+24/130428/campos_512_v4
+24/130436/campos_512_v4
+24/130444/campos_512_v4
+24/130459/campos_512_v4
+24/130468/campos_512_v4
+24/130518/campos_512_v4
+24/130532/campos_512_v4
+24/130543/campos_512_v4
+24/130584/campos_512_v4
+24/130605/campos_512_v4
+24/130620/campos_512_v4
+24/130631/campos_512_v4
+24/130636/campos_512_v4
+24/130665/campos_512_v4
+24/130687/campos_512_v4
+24/130697/campos_512_v4
+24/130723/campos_512_v4
+24/130766/campos_512_v4
+24/130767/campos_512_v4
+24/130788/campos_512_v4
+24/130838/campos_512_v4
+24/130863/campos_512_v4
+24/130887/campos_512_v4
+24/130915/campos_512_v4
+24/130970/campos_512_v4
+24/131158/campos_512_v4
+24/131181/campos_512_v4
+24/131215/campos_512_v4
+24/131276/campos_512_v4
+24/131285/campos_512_v4
+24/131303/campos_512_v4
+24/131367/campos_512_v4
+24/131377/campos_512_v4
+24/131380/campos_512_v4
+24/131401/campos_512_v4
+24/131412/campos_512_v4
+24/131497/campos_512_v4
+24/131538/campos_512_v4
+24/131539/campos_512_v4
+24/131585/campos_512_v4
+24/131603/campos_512_v4
+24/131676/campos_512_v4
+24/131716/campos_512_v4
+24/131818/campos_512_v4
+24/131834/campos_512_v4
+24/131892/campos_512_v4
+24/132054/campos_512_v4
+24/132074/campos_512_v4
+24/132083/campos_512_v4
+24/132084/campos_512_v4
+24/132099/campos_512_v4
+24/132134/campos_512_v4
+24/132149/campos_512_v4
+24/132218/campos_512_v4
+24/132227/campos_512_v4
+24/132238/campos_512_v4
+24/132267/campos_512_v4
+24/132317/campos_512_v4
+24/132336/campos_512_v4
+24/132361/campos_512_v4
+24/132421/campos_512_v4
+24/132427/campos_512_v4
+24/132429/campos_512_v4
+24/132438/campos_512_v4
+24/132465/campos_512_v4
+24/132471/campos_512_v4
+24/132491/campos_512_v4
+24/132560/campos_512_v4
+24/132667/campos_512_v4
+24/132683/campos_512_v4
+24/132709/campos_512_v4
+24/132738/campos_512_v4
+24/132763/campos_512_v4
+24/132769/campos_512_v4
+24/132771/campos_512_v4
+24/132773/campos_512_v4
+24/132814/campos_512_v4
+24/132825/campos_512_v4
+24/132862/campos_512_v4
+24/132865/campos_512_v4
+24/132909/campos_512_v4
+24/132910/campos_512_v4
+24/132950/campos_512_v4
+24/132958/campos_512_v4
+24/132966/campos_512_v4
+24/132990/campos_512_v4
+24/133025/campos_512_v4
+24/133027/campos_512_v4
+24/133068/campos_512_v4
+24/133070/campos_512_v4
+24/133071/campos_512_v4
+24/133075/campos_512_v4
+24/133152/campos_512_v4
+24/133171/campos_512_v4
+24/133205/campos_512_v4
+24/133235/campos_512_v4
+24/133248/campos_512_v4
+24/133275/campos_512_v4
+24/133291/campos_512_v4
+24/133316/campos_512_v4
+24/133319/campos_512_v4
+24/133382/campos_512_v4
+24/133400/campos_512_v4
+24/133419/campos_512_v4
+24/133463/campos_512_v4
+24/133477/campos_512_v4
+24/133479/campos_512_v4
+24/133483/campos_512_v4
+24/133522/campos_512_v4
+24/133537/campos_512_v4
+24/133544/campos_512_v4
+24/133562/campos_512_v4
+24/133690/campos_512_v4
+24/133719/campos_512_v4
+24/133757/campos_512_v4
+24/133759/campos_512_v4
+24/133777/campos_512_v4
+24/133794/campos_512_v4
+24/133808/campos_512_v4
+24/133911/campos_512_v4
+24/133922/campos_512_v4
+24/133924/campos_512_v4
+24/133945/campos_512_v4
+24/133960/campos_512_v4
+24/133985/campos_512_v4
+24/134001/campos_512_v4
+24/134002/campos_512_v4
+24/134104/campos_512_v4
+24/134122/campos_512_v4
+24/134127/campos_512_v4
+24/134173/campos_512_v4
+24/134220/campos_512_v4
+24/134230/campos_512_v4
+24/134242/campos_512_v4
+24/134251/campos_512_v4
+24/134254/campos_512_v4
+24/134293/campos_512_v4
+24/134303/campos_512_v4
+24/134359/campos_512_v4
+24/134370/campos_512_v4
+24/134377/campos_512_v4
+24/134419/campos_512_v4
+24/134445/campos_512_v4
+24/134455/campos_512_v4
+24/134465/campos_512_v4
+24/134494/campos_512_v4
+24/134543/campos_512_v4
+24/134550/campos_512_v4
+24/134624/campos_512_v4
+24/134635/campos_512_v4
+24/134642/campos_512_v4
+24/134655/campos_512_v4
+24/134684/campos_512_v4
+24/134707/campos_512_v4
+24/134723/campos_512_v4
+24/134739/campos_512_v4
+24/134794/campos_512_v4
+24/134808/campos_512_v4
+24/134813/campos_512_v4
+24/134859/campos_512_v4
+24/134867/campos_512_v4
+25/135019/campos_512_v4
+25/135030/campos_512_v4
+25/135041/campos_512_v4
+25/135053/campos_512_v4
+25/135054/campos_512_v4
+25/135055/campos_512_v4
+25/135060/campos_512_v4
+25/135084/campos_512_v4
+25/135110/campos_512_v4
+25/135129/campos_512_v4
+25/135147/campos_512_v4
+25/135166/campos_512_v4
+25/135184/campos_512_v4
+25/135192/campos_512_v4
+25/135193/campos_512_v4
+25/135201/campos_512_v4
+25/135202/campos_512_v4
+25/135244/campos_512_v4
+25/135261/campos_512_v4
+25/135302/campos_512_v4
+25/135328/campos_512_v4
+25/135364/campos_512_v4
+25/135384/campos_512_v4
+25/135404/campos_512_v4
+25/135412/campos_512_v4
+25/135446/campos_512_v4
+25/135455/campos_512_v4
+25/135571/campos_512_v4
+25/135576/campos_512_v4
+25/135587/campos_512_v4
+25/135593/campos_512_v4
+25/135607/campos_512_v4
+25/135653/campos_512_v4
+25/135676/campos_512_v4
+25/135716/campos_512_v4
+25/135753/campos_512_v4
+25/135798/campos_512_v4
+25/135806/campos_512_v4
+25/135878/campos_512_v4
+25/135904/campos_512_v4
+25/135932/campos_512_v4
+25/135966/campos_512_v4
+25/135968/campos_512_v4
+25/135974/campos_512_v4
+25/135992/campos_512_v4
+25/136054/campos_512_v4
+25/136056/campos_512_v4
+25/136097/campos_512_v4
+25/136104/campos_512_v4
+25/136137/campos_512_v4
+25/136153/campos_512_v4
+25/136166/campos_512_v4
+25/136169/campos_512_v4
+25/136171/campos_512_v4
+25/136174/campos_512_v4
+25/136175/campos_512_v4
+25/136185/campos_512_v4
+25/136189/campos_512_v4
+25/136211/campos_512_v4
+25/136251/campos_512_v4
+25/136270/campos_512_v4
+25/136284/campos_512_v4
+25/136314/campos_512_v4
+25/136319/campos_512_v4
+25/136348/campos_512_v4
+25/136393/campos_512_v4
+25/136394/campos_512_v4
+25/136401/campos_512_v4
+25/136410/campos_512_v4
+25/136434/campos_512_v4
+25/136471/campos_512_v4
+25/136509/campos_512_v4
+25/136528/campos_512_v4
+25/136556/campos_512_v4
+25/136559/campos_512_v4
+25/136560/campos_512_v4
+25/136570/campos_512_v4
+25/136594/campos_512_v4
+25/136614/campos_512_v4
+25/136663/campos_512_v4
+25/136668/campos_512_v4
+25/136674/campos_512_v4
+25/136677/campos_512_v4
+25/136686/campos_512_v4
+25/136701/campos_512_v4
+25/136718/campos_512_v4
+25/136769/campos_512_v4
+25/136845/campos_512_v4
+25/136850/campos_512_v4
+25/136940/campos_512_v4
+25/136947/campos_512_v4
+25/137038/campos_512_v4
+25/137049/campos_512_v4
+25/137069/campos_512_v4
+25/137073/campos_512_v4
+25/137097/campos_512_v4
+25/137143/campos_512_v4
+25/137156/campos_512_v4
+25/137157/campos_512_v4
+25/137160/campos_512_v4
+25/137183/campos_512_v4
+25/137186/campos_512_v4
+25/137237/campos_512_v4
+25/137249/campos_512_v4
+25/137265/campos_512_v4
+25/137275/campos_512_v4
+25/137283/campos_512_v4
+25/137287/campos_512_v4
+25/137306/campos_512_v4
+25/137314/campos_512_v4
+25/137317/campos_512_v4
+25/137322/campos_512_v4
+25/137326/campos_512_v4
+25/137329/campos_512_v4
+25/137333/campos_512_v4
+25/137335/campos_512_v4
+25/137343/campos_512_v4
+25/137387/campos_512_v4
+25/137396/campos_512_v4
+25/137399/campos_512_v4
+25/137416/campos_512_v4
+25/137442/campos_512_v4
+25/137494/campos_512_v4
+25/137514/campos_512_v4
+25/137553/campos_512_v4
+25/137556/campos_512_v4
+25/137622/campos_512_v4
+25/137635/campos_512_v4
+25/137636/campos_512_v4
+25/137647/campos_512_v4
+25/137693/campos_512_v4
+25/137702/campos_512_v4
+25/137715/campos_512_v4
+25/137716/campos_512_v4
+25/137718/campos_512_v4
+25/137732/campos_512_v4
+25/137780/campos_512_v4
+25/137786/campos_512_v4
+25/137799/campos_512_v4
+25/137824/campos_512_v4
+25/137831/campos_512_v4
+25/137835/campos_512_v4
+25/137838/campos_512_v4
+25/137840/campos_512_v4
+25/137842/campos_512_v4
+25/137877/campos_512_v4
+25/137892/campos_512_v4
+25/137911/campos_512_v4
+25/137921/campos_512_v4
+25/137934/campos_512_v4
+25/137955/campos_512_v4
+25/137984/campos_512_v4
+25/137995/campos_512_v4
+25/138037/campos_512_v4
+25/138058/campos_512_v4
+25/138073/campos_512_v4
+25/138090/campos_512_v4
+25/138101/campos_512_v4
+25/138104/campos_512_v4
+25/138111/campos_512_v4
+25/138134/campos_512_v4
+25/138141/campos_512_v4
+25/138150/campos_512_v4
+25/138208/campos_512_v4
+25/138211/campos_512_v4
+25/138282/campos_512_v4
+25/138297/campos_512_v4
+25/138354/campos_512_v4
+25/138359/campos_512_v4
+25/138369/campos_512_v4
+25/138411/campos_512_v4
+25/138422/campos_512_v4
+25/138424/campos_512_v4
+25/138436/campos_512_v4
+25/138446/campos_512_v4
+25/138483/campos_512_v4
+25/138530/campos_512_v4
+25/138548/campos_512_v4
+25/138558/campos_512_v4
+25/138640/campos_512_v4
+25/138695/campos_512_v4
+25/138709/campos_512_v4
+25/138785/campos_512_v4
+25/138792/campos_512_v4
+25/138828/campos_512_v4
+25/138851/campos_512_v4
+25/138854/campos_512_v4
+25/138883/campos_512_v4
+25/138920/campos_512_v4
+25/138926/campos_512_v4
+25/138938/campos_512_v4
+25/138958/campos_512_v4
+25/138968/campos_512_v4
+25/138970/campos_512_v4
+25/138971/campos_512_v4
+25/138988/campos_512_v4
+25/139011/campos_512_v4
+25/139032/campos_512_v4
+25/139069/campos_512_v4
+25/139079/campos_512_v4
+25/139080/campos_512_v4
+25/139081/campos_512_v4
+25/139084/campos_512_v4
+25/139087/campos_512_v4
+25/139094/campos_512_v4
+25/139105/campos_512_v4
+25/139123/campos_512_v4
+25/139143/campos_512_v4
+25/139144/campos_512_v4
+25/139178/campos_512_v4
+25/139186/campos_512_v4
+25/139215/campos_512_v4
+25/139224/campos_512_v4
+25/139230/campos_512_v4
+25/139236/campos_512_v4
+25/139245/campos_512_v4
+25/139254/campos_512_v4
+25/139261/campos_512_v4
+25/139272/campos_512_v4
+25/139274/campos_512_v4
+25/139285/campos_512_v4
+25/139293/campos_512_v4
+25/139325/campos_512_v4
+25/139340/campos_512_v4
+25/139345/campos_512_v4
+25/139349/campos_512_v4
+25/139371/campos_512_v4
+25/139434/campos_512_v4
+25/139472/campos_512_v4
+25/139490/campos_512_v4
+25/139515/campos_512_v4
+25/139583/campos_512_v4
+25/139596/campos_512_v4
+25/139610/campos_512_v4
+25/139611/campos_512_v4
+25/139614/campos_512_v4
+25/139619/campos_512_v4
+25/139649/campos_512_v4
+25/139654/campos_512_v4
+25/139664/campos_512_v4
+25/139669/campos_512_v4
+25/139681/campos_512_v4
+25/139726/campos_512_v4
+25/139733/campos_512_v4
+25/139735/campos_512_v4
+25/139747/campos_512_v4
+25/139754/campos_512_v4
+25/139757/campos_512_v4
+25/139780/campos_512_v4
+25/139841/campos_512_v4
+25/139842/campos_512_v4
+25/139843/campos_512_v4
+25/139860/campos_512_v4
+25/139891/campos_512_v4
+25/139894/campos_512_v4
+25/139934/campos_512_v4
+25/139945/campos_512_v4
+25/139987/campos_512_v4
+25/139992/campos_512_v4
+26/140032/campos_512_v4
+26/140041/campos_512_v4
+26/140067/campos_512_v4
+26/140082/campos_512_v4
+26/140123/campos_512_v4
+26/140142/campos_512_v4
+26/140173/campos_512_v4
+26/140201/campos_512_v4
+26/140211/campos_512_v4
+26/140221/campos_512_v4
+26/140237/campos_512_v4
+26/140282/campos_512_v4
+26/140318/campos_512_v4
+26/140327/campos_512_v4
+26/140359/campos_512_v4
+26/140369/campos_512_v4
+26/140396/campos_512_v4
+26/140482/campos_512_v4
+26/140487/campos_512_v4
+26/140507/campos_512_v4
+26/140509/campos_512_v4
+26/140519/campos_512_v4
+26/140529/campos_512_v4
+26/140536/campos_512_v4
+26/140560/campos_512_v4
+26/140595/campos_512_v4
+26/140604/campos_512_v4
+26/140632/campos_512_v4
+26/140633/campos_512_v4
+26/140764/campos_512_v4
+26/140802/campos_512_v4
+26/140832/campos_512_v4
+26/140840/campos_512_v4
+26/140898/campos_512_v4
+26/140923/campos_512_v4
+26/140926/campos_512_v4
+26/140990/campos_512_v4
+26/141001/campos_512_v4
+26/141044/campos_512_v4
+26/141080/campos_512_v4
+26/141117/campos_512_v4
+26/141145/campos_512_v4
+26/141188/campos_512_v4
+26/141212/campos_512_v4
+26/141261/campos_512_v4
+26/141266/campos_512_v4
+26/141293/campos_512_v4
+26/141345/campos_512_v4
+26/141354/campos_512_v4
+26/141386/campos_512_v4
+26/141420/campos_512_v4
+26/141465/campos_512_v4
+26/141503/campos_512_v4
+26/141504/campos_512_v4
+26/141506/campos_512_v4
+26/141532/campos_512_v4
+26/141534/campos_512_v4
+26/141535/campos_512_v4
+26/141542/campos_512_v4
+26/141545/campos_512_v4
+26/141592/campos_512_v4
+26/141623/campos_512_v4
+26/141657/campos_512_v4
+26/141681/campos_512_v4
+26/141692/campos_512_v4
+26/141734/campos_512_v4
+26/141809/campos_512_v4
+26/141846/campos_512_v4
+26/141864/campos_512_v4
+26/141891/campos_512_v4
+26/141895/campos_512_v4
+26/141952/campos_512_v4
+26/141979/campos_512_v4
+26/141987/campos_512_v4
+26/142002/campos_512_v4
+26/142003/campos_512_v4
+26/142006/campos_512_v4
+26/142050/campos_512_v4
+26/142059/campos_512_v4
+26/142100/campos_512_v4
+26/142122/campos_512_v4
+26/142132/campos_512_v4
+26/142140/campos_512_v4
+26/142142/campos_512_v4
+26/142166/campos_512_v4
+26/142259/campos_512_v4
+26/142296/campos_512_v4
+26/142418/campos_512_v4
+26/142481/campos_512_v4
+26/142496/campos_512_v4
+26/142535/campos_512_v4
+26/142582/campos_512_v4
+26/142651/campos_512_v4
+26/142672/campos_512_v4
+26/142688/campos_512_v4
+26/142719/campos_512_v4
+26/142747/campos_512_v4
+26/142765/campos_512_v4
+26/142792/campos_512_v4
+26/142800/campos_512_v4
+26/142810/campos_512_v4
+26/142852/campos_512_v4
+26/142859/campos_512_v4
+26/142892/campos_512_v4
+26/142929/campos_512_v4
+26/142944/campos_512_v4
+26/142980/campos_512_v4
+26/142983/campos_512_v4
+26/142984/campos_512_v4
+26/143020/campos_512_v4
+26/143093/campos_512_v4
+26/143098/campos_512_v4
+26/143104/campos_512_v4
+26/143125/campos_512_v4
+26/143142/campos_512_v4
+26/143206/campos_512_v4
+26/143213/campos_512_v4
+26/143215/campos_512_v4
+26/143253/campos_512_v4
+26/143282/campos_512_v4
+26/143325/campos_512_v4
+26/143334/campos_512_v4
+26/143360/campos_512_v4
+26/143364/campos_512_v4
+26/143375/campos_512_v4
+26/143380/campos_512_v4
+26/143418/campos_512_v4
+26/143423/campos_512_v4
+26/143456/campos_512_v4
+26/143484/campos_512_v4
+26/143496/campos_512_v4
+26/143590/campos_512_v4
+26/143617/campos_512_v4
+26/143663/campos_512_v4
+26/143683/campos_512_v4
+26/143688/campos_512_v4
+26/143690/campos_512_v4
+26/143760/campos_512_v4
+26/143831/campos_512_v4
+26/143832/campos_512_v4
+26/143841/campos_512_v4
+26/143875/campos_512_v4
+26/143899/campos_512_v4
+26/143923/campos_512_v4
+26/143931/campos_512_v4
+26/143942/campos_512_v4
+26/143998/campos_512_v4
+26/144009/campos_512_v4
+26/144033/campos_512_v4
+26/144039/campos_512_v4
+26/144056/campos_512_v4
+26/144089/campos_512_v4
+26/144091/campos_512_v4
+26/144103/campos_512_v4
+26/144128/campos_512_v4
+26/144168/campos_512_v4
+26/144190/campos_512_v4
+26/144211/campos_512_v4
+26/144226/campos_512_v4
+26/144257/campos_512_v4
+26/144265/campos_512_v4
+26/144271/campos_512_v4
+26/144308/campos_512_v4
+26/144326/campos_512_v4
+26/144341/campos_512_v4
+26/144342/campos_512_v4
+26/144368/campos_512_v4
+26/144442/campos_512_v4
+26/144445/campos_512_v4
+26/144458/campos_512_v4
+26/144478/campos_512_v4
+26/144491/campos_512_v4
+26/144500/campos_512_v4
+26/144510/campos_512_v4
+26/144516/campos_512_v4
+26/144593/campos_512_v4
+26/144606/campos_512_v4
+26/144613/campos_512_v4
+26/144635/campos_512_v4
+26/144729/campos_512_v4
+26/144789/campos_512_v4
+26/144825/campos_512_v4
+26/144826/campos_512_v4
+26/144884/campos_512_v4
+26/144902/campos_512_v4
+26/144934/campos_512_v4
+26/144945/campos_512_v4
+26/144952/campos_512_v4
+26/144983/campos_512_v4
+26/144984/campos_512_v4
+26/144985/campos_512_v4
+27/145009/campos_512_v4
+27/145014/campos_512_v4
+27/145033/campos_512_v4
+27/145065/campos_512_v4
+27/145116/campos_512_v4
+27/145117/campos_512_v4
+27/145143/campos_512_v4
+27/145208/campos_512_v4
+27/145242/campos_512_v4
+27/145271/campos_512_v4
+27/145301/campos_512_v4
+27/145304/campos_512_v4
+27/145329/campos_512_v4
+27/145337/campos_512_v4
+27/145353/campos_512_v4
+27/145368/campos_512_v4
+27/145381/campos_512_v4
+27/145406/campos_512_v4
+27/145414/campos_512_v4
+27/145433/campos_512_v4
+27/145439/campos_512_v4
+27/145443/campos_512_v4
+27/145451/campos_512_v4
+27/145552/campos_512_v4
+27/145590/campos_512_v4
+27/145602/campos_512_v4
+27/145662/campos_512_v4
+27/145695/campos_512_v4
+27/145726/campos_512_v4
+27/145743/campos_512_v4
+27/145764/campos_512_v4
+27/145781/campos_512_v4
+27/145902/campos_512_v4
+27/145990/campos_512_v4
+27/146058/campos_512_v4
+27/146079/campos_512_v4
+27/146080/campos_512_v4
+27/146096/campos_512_v4
+27/146104/campos_512_v4
+27/146109/campos_512_v4
+27/146129/campos_512_v4
+27/146139/campos_512_v4
+27/146140/campos_512_v4
+27/146199/campos_512_v4
+27/146267/campos_512_v4
+27/146268/campos_512_v4
+27/146279/campos_512_v4
+27/146288/campos_512_v4
+27/146291/campos_512_v4
+27/146296/campos_512_v4
+27/146300/campos_512_v4
+27/146301/campos_512_v4
+27/146312/campos_512_v4
+27/146314/campos_512_v4
+27/146318/campos_512_v4
+27/146330/campos_512_v4
+27/146339/campos_512_v4
+27/146348/campos_512_v4
+27/146396/campos_512_v4
+27/146399/campos_512_v4
+27/146439/campos_512_v4
+27/146446/campos_512_v4
+27/146447/campos_512_v4
+27/146462/campos_512_v4
+27/146463/campos_512_v4
+27/146473/campos_512_v4
+27/146475/campos_512_v4
+27/146485/campos_512_v4
+27/146503/campos_512_v4
+27/146575/campos_512_v4
+27/146587/campos_512_v4
+27/146593/campos_512_v4
+27/146611/campos_512_v4
+27/146683/campos_512_v4
+27/146711/campos_512_v4
+27/146723/campos_512_v4
+27/146743/campos_512_v4
+27/146747/campos_512_v4
+27/146748/campos_512_v4
+27/146789/campos_512_v4
+27/146887/campos_512_v4
+27/146897/campos_512_v4
+27/146899/campos_512_v4
+27/146936/campos_512_v4
+27/146957/campos_512_v4
+27/146959/campos_512_v4
+27/147039/campos_512_v4
+27/147053/campos_512_v4
+27/147066/campos_512_v4
+27/147099/campos_512_v4
+27/147129/campos_512_v4
+27/147133/campos_512_v4
+27/147187/campos_512_v4
+27/147207/campos_512_v4
+27/147243/campos_512_v4
+27/147271/campos_512_v4
+27/147279/campos_512_v4
+27/147420/campos_512_v4
+27/147457/campos_512_v4
+27/147468/campos_512_v4
+27/147483/campos_512_v4
+27/147504/campos_512_v4
+27/147509/campos_512_v4
+27/147510/campos_512_v4
+27/147528/campos_512_v4
+27/147540/campos_512_v4
+27/147567/campos_512_v4
+27/147574/campos_512_v4
+27/147575/campos_512_v4
+27/147579/campos_512_v4
+27/147598/campos_512_v4
+27/147607/campos_512_v4
+27/147624/campos_512_v4
+27/147652/campos_512_v4
+27/147678/campos_512_v4
+27/147685/campos_512_v4
+27/147702/campos_512_v4
+27/147707/campos_512_v4
+27/147755/campos_512_v4
+27/147801/campos_512_v4
+27/147804/campos_512_v4
+27/147805/campos_512_v4
+27/147821/campos_512_v4
+27/147822/campos_512_v4
+27/147872/campos_512_v4
+27/147873/campos_512_v4
+27/147920/campos_512_v4
+27/147921/campos_512_v4
+27/147976/campos_512_v4
+27/147989/campos_512_v4
+27/148011/campos_512_v4
+27/148036/campos_512_v4
+27/148047/campos_512_v4
+27/148083/campos_512_v4
+27/148084/campos_512_v4
+27/148095/campos_512_v4
+27/148120/campos_512_v4
+27/148127/campos_512_v4
+27/148134/campos_512_v4
+27/148146/campos_512_v4
+27/148154/campos_512_v4
+27/148207/campos_512_v4
+27/148221/campos_512_v4
+27/148248/campos_512_v4
+27/148249/campos_512_v4
+27/148255/campos_512_v4
+27/148274/campos_512_v4
+27/148306/campos_512_v4
+27/148310/campos_512_v4
+27/148337/campos_512_v4
+27/148343/campos_512_v4
+27/148366/campos_512_v4
+27/148369/campos_512_v4
+27/148388/campos_512_v4
+27/148435/campos_512_v4
+27/148445/campos_512_v4
+27/148477/campos_512_v4
+27/148487/campos_512_v4
+27/148509/campos_512_v4
+27/148557/campos_512_v4
+27/148603/campos_512_v4
+27/148622/campos_512_v4
+27/148657/campos_512_v4
+27/148658/campos_512_v4
+27/148691/campos_512_v4
+27/148721/campos_512_v4
+27/148733/campos_512_v4
+27/148759/campos_512_v4
+27/148801/campos_512_v4
+27/148850/campos_512_v4
+27/148884/campos_512_v4
+27/148925/campos_512_v4
+27/148941/campos_512_v4
+27/148943/campos_512_v4
+27/148953/campos_512_v4
+27/148957/campos_512_v4
+27/148965/campos_512_v4
+27/148992/campos_512_v4
+27/149065/campos_512_v4
+27/149073/campos_512_v4
+27/149098/campos_512_v4
+27/149142/campos_512_v4
+27/149155/campos_512_v4
+27/149166/campos_512_v4
+27/149178/campos_512_v4
+27/149196/campos_512_v4
+27/149205/campos_512_v4
+27/149225/campos_512_v4
+27/149283/campos_512_v4
+27/149305/campos_512_v4
+27/149341/campos_512_v4
+27/149362/campos_512_v4
+27/149397/campos_512_v4
+27/149404/campos_512_v4
+27/149450/campos_512_v4
+27/149465/campos_512_v4
+27/149479/campos_512_v4
+27/149493/campos_512_v4
+27/149502/campos_512_v4
+27/149506/campos_512_v4
+27/149535/campos_512_v4
+27/149591/campos_512_v4
+27/149600/campos_512_v4
+27/149612/campos_512_v4
+27/149619/campos_512_v4
+27/149663/campos_512_v4
+27/149682/campos_512_v4
+27/149703/campos_512_v4
+27/149736/campos_512_v4
+27/149745/campos_512_v4
+27/149747/campos_512_v4
+27/149765/campos_512_v4
+27/149810/campos_512_v4
+27/149825/campos_512_v4
+27/149834/campos_512_v4
+27/149866/campos_512_v4
+27/149869/campos_512_v4
+27/149912/campos_512_v4
+27/149920/campos_512_v4
+28/150022/campos_512_v4
+28/150030/campos_512_v4
+28/150044/campos_512_v4
+28/150124/campos_512_v4
+28/150132/campos_512_v4
+28/150196/campos_512_v4
+28/150229/campos_512_v4
+28/150256/campos_512_v4
+28/150312/campos_512_v4
+28/150329/campos_512_v4
+28/150366/campos_512_v4
+28/150372/campos_512_v4
+28/150374/campos_512_v4
+28/150384/campos_512_v4
+28/150407/campos_512_v4
+28/150412/campos_512_v4
+28/150436/campos_512_v4
+28/150472/campos_512_v4
+28/150493/campos_512_v4
+28/150503/campos_512_v4
+28/150520/campos_512_v4
+28/150532/campos_512_v4
+28/150555/campos_512_v4
+28/150622/campos_512_v4
+28/150639/campos_512_v4
+28/150643/campos_512_v4
+28/150669/campos_512_v4
+28/150751/campos_512_v4
+28/150755/campos_512_v4
+28/150756/campos_512_v4
+28/150759/campos_512_v4
+28/150815/campos_512_v4
+28/150823/campos_512_v4
+28/150835/campos_512_v4
+28/150838/campos_512_v4
+28/150855/campos_512_v4
+28/150872/campos_512_v4
+28/150898/campos_512_v4
+28/150900/campos_512_v4
+28/150909/campos_512_v4
+28/150959/campos_512_v4
+28/151003/campos_512_v4
+28/151020/campos_512_v4
+28/151026/campos_512_v4
+28/151027/campos_512_v4
+28/151045/campos_512_v4
+28/151080/campos_512_v4
+28/151093/campos_512_v4
+28/151155/campos_512_v4
+28/151193/campos_512_v4
+28/151215/campos_512_v4
+28/151228/campos_512_v4
+28/151250/campos_512_v4
+28/151269/campos_512_v4
+28/151386/campos_512_v4
+28/151475/campos_512_v4
+28/151504/campos_512_v4
+28/151545/campos_512_v4
+28/151586/campos_512_v4
+28/151617/campos_512_v4
+28/151626/campos_512_v4
+28/151634/campos_512_v4
+28/151698/campos_512_v4
+28/151783/campos_512_v4
+28/151800/campos_512_v4
+28/151814/campos_512_v4
+28/151822/campos_512_v4
+28/151870/campos_512_v4
+28/151901/campos_512_v4
+28/151908/campos_512_v4
+28/151953/campos_512_v4
+28/151956/campos_512_v4
+28/151957/campos_512_v4
+28/151984/campos_512_v4
+28/151999/campos_512_v4
+28/152005/campos_512_v4
+28/152017/campos_512_v4
+28/152028/campos_512_v4
+28/152036/campos_512_v4
+28/152070/campos_512_v4
+28/152099/campos_512_v4
+28/152109/campos_512_v4
+28/152113/campos_512_v4
+28/152142/campos_512_v4
+28/152176/campos_512_v4
+28/152248/campos_512_v4
+28/152258/campos_512_v4
+28/152275/campos_512_v4
+28/152285/campos_512_v4
+28/152304/campos_512_v4
+28/152331/campos_512_v4
+28/152335/campos_512_v4
+28/152349/campos_512_v4
+28/152350/campos_512_v4
+28/152377/campos_512_v4
+28/152389/campos_512_v4
+28/152408/campos_512_v4
+28/152416/campos_512_v4
+28/152418/campos_512_v4
+28/152565/campos_512_v4
+28/152571/campos_512_v4
+28/152666/campos_512_v4
+28/152709/campos_512_v4
+28/152726/campos_512_v4
+28/152752/campos_512_v4
+28/152773/campos_512_v4
+28/152774/campos_512_v4
+28/152792/campos_512_v4
+28/152813/campos_512_v4
+28/152940/campos_512_v4
+28/152949/campos_512_v4
+28/152960/campos_512_v4
+28/152988/campos_512_v4
+28/153001/campos_512_v4
+28/153013/campos_512_v4
+28/153034/campos_512_v4
+28/153041/campos_512_v4
+28/153042/campos_512_v4
+28/153057/campos_512_v4
+28/153102/campos_512_v4
+28/153112/campos_512_v4
+28/153268/campos_512_v4
+28/153297/campos_512_v4
+28/153314/campos_512_v4
+28/153318/campos_512_v4
+28/153330/campos_512_v4
+28/153340/campos_512_v4
+28/153370/campos_512_v4
+28/153390/campos_512_v4
+28/153393/campos_512_v4
+28/153437/campos_512_v4
+28/153464/campos_512_v4
+28/153548/campos_512_v4
+28/153570/campos_512_v4
+28/153614/campos_512_v4
+28/153667/campos_512_v4
+28/153754/campos_512_v4
+28/153766/campos_512_v4
+28/153767/campos_512_v4
+28/153817/campos_512_v4
+28/153839/campos_512_v4
+28/153851/campos_512_v4
+28/153885/campos_512_v4
+28/153904/campos_512_v4
+28/153992/campos_512_v4
+28/154004/campos_512_v4
+28/154005/campos_512_v4
+28/154035/campos_512_v4
+28/154087/campos_512_v4
+28/154129/campos_512_v4
+28/154132/campos_512_v4
+28/154134/campos_512_v4
+28/154199/campos_512_v4
+28/154214/campos_512_v4
+28/154253/campos_512_v4
+28/154275/campos_512_v4
+28/154311/campos_512_v4
+28/154328/campos_512_v4
+28/154343/campos_512_v4
+28/154366/campos_512_v4
+28/154372/campos_512_v4
+28/154391/campos_512_v4
+28/154435/campos_512_v4
+28/154453/campos_512_v4
+28/154462/campos_512_v4
+28/154476/campos_512_v4
+28/154491/campos_512_v4
+28/154511/campos_512_v4
+28/154520/campos_512_v4
+28/154526/campos_512_v4
+28/154535/campos_512_v4
+28/154548/campos_512_v4
+28/154618/campos_512_v4
+28/154627/campos_512_v4
+28/154648/campos_512_v4
+28/154672/campos_512_v4
+28/154681/campos_512_v4
+28/154733/campos_512_v4
+28/154744/campos_512_v4
+28/154770/campos_512_v4
+28/154789/campos_512_v4
+28/154830/campos_512_v4
+28/154897/campos_512_v4
+28/154903/campos_512_v4
+28/154981/campos_512_v4
+29/155010/campos_512_v4
+29/155013/campos_512_v4
+29/155037/campos_512_v4
+29/155066/campos_512_v4
+29/155070/campos_512_v4
+29/155092/campos_512_v4
+29/155100/campos_512_v4
+29/155122/campos_512_v4
+29/155141/campos_512_v4
+29/155142/campos_512_v4
+29/155162/campos_512_v4
+29/155175/campos_512_v4
+29/155180/campos_512_v4
+29/155192/campos_512_v4
+29/155196/campos_512_v4
+29/155214/campos_512_v4
+29/155241/campos_512_v4
+29/155262/campos_512_v4
+29/155269/campos_512_v4
+29/155274/campos_512_v4
+29/155275/campos_512_v4
+29/155277/campos_512_v4
+29/155283/campos_512_v4
+29/155292/campos_512_v4
+29/155312/campos_512_v4
+29/155338/campos_512_v4
+29/155402/campos_512_v4
+29/155517/campos_512_v4
+29/155569/campos_512_v4
+29/155629/campos_512_v4
+29/155663/campos_512_v4
+29/155704/campos_512_v4
+29/155761/campos_512_v4
+29/155775/campos_512_v4
+29/155790/campos_512_v4
+29/155798/campos_512_v4
+29/155799/campos_512_v4
+29/155810/campos_512_v4
+29/155811/campos_512_v4
+29/155818/campos_512_v4
+29/155850/campos_512_v4
+29/155871/campos_512_v4
+29/155881/campos_512_v4
+29/155884/campos_512_v4
+29/155888/campos_512_v4
+29/155907/campos_512_v4
+29/155929/campos_512_v4
+29/155946/campos_512_v4
+29/155955/campos_512_v4
+29/155987/campos_512_v4
+29/155992/campos_512_v4
+29/156012/campos_512_v4
+29/156041/campos_512_v4
+29/156054/campos_512_v4
+29/156063/campos_512_v4
+29/156097/campos_512_v4
+29/156161/campos_512_v4
+29/156196/campos_512_v4
+29/156208/campos_512_v4
+29/156244/campos_512_v4
+29/156315/campos_512_v4
+29/156378/campos_512_v4
+29/156400/campos_512_v4
+29/156440/campos_512_v4
+29/156527/campos_512_v4
+29/156541/campos_512_v4
+29/156604/campos_512_v4
+29/156611/campos_512_v4
+29/156616/campos_512_v4
+29/156621/campos_512_v4
+29/156623/campos_512_v4
+29/156667/campos_512_v4
+29/156669/campos_512_v4
+29/156691/campos_512_v4
+29/156703/campos_512_v4
+29/156713/campos_512_v4
+29/156718/campos_512_v4
+29/156720/campos_512_v4
+29/156751/campos_512_v4
+29/156768/campos_512_v4
+29/156770/campos_512_v4
+29/156793/campos_512_v4
+29/156796/campos_512_v4
+29/156802/campos_512_v4
+29/156803/campos_512_v4
+29/156813/campos_512_v4
+29/156837/campos_512_v4
+29/156843/campos_512_v4
+29/156845/campos_512_v4
+29/156907/campos_512_v4
+29/156908/campos_512_v4
+29/156920/campos_512_v4
+29/156951/campos_512_v4
+29/156969/campos_512_v4
+29/156997/campos_512_v4
+29/157008/campos_512_v4
+29/157015/campos_512_v4
+29/157027/campos_512_v4
+29/157044/campos_512_v4
+29/157056/campos_512_v4
+29/157058/campos_512_v4
+29/157092/campos_512_v4
+29/157112/campos_512_v4
+29/157117/campos_512_v4
+29/157127/campos_512_v4
+29/157167/campos_512_v4
+29/157183/campos_512_v4
+29/157184/campos_512_v4
+29/157198/campos_512_v4
+29/157227/campos_512_v4
+29/157236/campos_512_v4
+29/157237/campos_512_v4
+29/157247/campos_512_v4
+29/157250/campos_512_v4
+29/157268/campos_512_v4
+29/157296/campos_512_v4
+29/157302/campos_512_v4
+29/157306/campos_512_v4
+29/157322/campos_512_v4
+29/157344/campos_512_v4
+29/157348/campos_512_v4
+29/157363/campos_512_v4
+29/157372/campos_512_v4
+29/157401/campos_512_v4
+29/157414/campos_512_v4
+29/157424/campos_512_v4
+29/157489/campos_512_v4
+29/157496/campos_512_v4
+29/157513/campos_512_v4
+29/157555/campos_512_v4
+29/157577/campos_512_v4
+29/157606/campos_512_v4
+29/157612/campos_512_v4
+29/157622/campos_512_v4
+29/157641/campos_512_v4
+29/157674/campos_512_v4
+29/157687/campos_512_v4
+29/157734/campos_512_v4
+29/157745/campos_512_v4
+29/157763/campos_512_v4
+29/157776/campos_512_v4
+29/157795/campos_512_v4
+29/157797/campos_512_v4
+29/157806/campos_512_v4
+29/157837/campos_512_v4
+29/157845/campos_512_v4
+29/157857/campos_512_v4
+29/157859/campos_512_v4
+29/157860/campos_512_v4
+29/157877/campos_512_v4
+29/157884/campos_512_v4
+29/157893/campos_512_v4
+29/157896/campos_512_v4
+29/157913/campos_512_v4
+29/158006/campos_512_v4
+29/158012/campos_512_v4
+29/158024/campos_512_v4
+29/158047/campos_512_v4
+29/158071/campos_512_v4
+29/158079/campos_512_v4
+29/158089/campos_512_v4
+29/158096/campos_512_v4
+29/158127/campos_512_v4
+29/158131/campos_512_v4
+29/158137/campos_512_v4
+29/158144/campos_512_v4
+29/158157/campos_512_v4
+29/158201/campos_512_v4
+29/158219/campos_512_v4
+29/158220/campos_512_v4
+29/158237/campos_512_v4
+29/158257/campos_512_v4
+29/158314/campos_512_v4
+29/158363/campos_512_v4
+29/158372/campos_512_v4
+29/158398/campos_512_v4
+29/158465/campos_512_v4
+29/158467/campos_512_v4
+29/158478/campos_512_v4
+29/158528/campos_512_v4
+29/158588/campos_512_v4
+29/158598/campos_512_v4
+29/158603/campos_512_v4
+29/158610/campos_512_v4
+29/158670/campos_512_v4
+29/158671/campos_512_v4
+29/158684/campos_512_v4
+29/158686/campos_512_v4
+29/158715/campos_512_v4
+29/158730/campos_512_v4
+29/158743/campos_512_v4
+29/158772/campos_512_v4
+29/158782/campos_512_v4
+29/158860/campos_512_v4
+29/158875/campos_512_v4
+29/158889/campos_512_v4
+29/158893/campos_512_v4
+29/158895/campos_512_v4
+29/158908/campos_512_v4
+29/158925/campos_512_v4
+29/158953/campos_512_v4
+29/158955/campos_512_v4
+29/158995/campos_512_v4
+29/158998/campos_512_v4
+29/159005/campos_512_v4
+29/159047/campos_512_v4
+29/159060/campos_512_v4
+29/159152/campos_512_v4
+29/159167/campos_512_v4
+29/159171/campos_512_v4
+29/159188/campos_512_v4
+29/159203/campos_512_v4
+29/159226/campos_512_v4
+29/159227/campos_512_v4
+29/159230/campos_512_v4
+29/159242/campos_512_v4
+29/159320/campos_512_v4
+29/159336/campos_512_v4
+29/159357/campos_512_v4
+29/159359/campos_512_v4
+29/159367/campos_512_v4
+29/159439/campos_512_v4
+29/159441/campos_512_v4
+29/159488/campos_512_v4
+29/159498/campos_512_v4
+29/159502/campos_512_v4
+29/159506/campos_512_v4
+29/159516/campos_512_v4
+29/159538/campos_512_v4
+29/159546/campos_512_v4
+29/159564/campos_512_v4
+29/159588/campos_512_v4
+29/159594/campos_512_v4
+29/159600/campos_512_v4
+29/159603/campos_512_v4
+29/159617/campos_512_v4
+29/159628/campos_512_v4
+29/159631/campos_512_v4
+29/159635/campos_512_v4
+29/159639/campos_512_v4
+29/159646/campos_512_v4
+29/159652/campos_512_v4
+29/159657/campos_512_v4
+29/159666/campos_512_v4
+29/159678/campos_512_v4
+29/159680/campos_512_v4
+29/159688/campos_512_v4
+29/159692/campos_512_v4
+29/159699/campos_512_v4
+29/159703/campos_512_v4
+29/159729/campos_512_v4
+29/159753/campos_512_v4
+29/159756/campos_512_v4
+29/159794/campos_512_v4
+29/159795/campos_512_v4
+29/159799/campos_512_v4
+29/159856/campos_512_v4
+29/159885/campos_512_v4
+29/159944/campos_512_v4
+29/159947/campos_512_v4
+29/159954/campos_512_v4
+30/160029/campos_512_v4
+30/160064/campos_512_v4
+30/160112/campos_512_v4
+30/160136/campos_512_v4
+30/160165/campos_512_v4
+30/160188/campos_512_v4
+30/160199/campos_512_v4
+30/160239/campos_512_v4
+30/160240/campos_512_v4
+30/160245/campos_512_v4
+30/160302/campos_512_v4
+30/160322/campos_512_v4
+30/160378/campos_512_v4
+30/160400/campos_512_v4
+30/160453/campos_512_v4
+30/160465/campos_512_v4
+30/160514/campos_512_v4
+30/160515/campos_512_v4
+30/160523/campos_512_v4
+30/160529/campos_512_v4
+30/160548/campos_512_v4
+30/160560/campos_512_v4
+30/160624/campos_512_v4
+30/160675/campos_512_v4
+30/160692/campos_512_v4
+30/160715/campos_512_v4
+30/160751/campos_512_v4
+30/160754/campos_512_v4
+30/160757/campos_512_v4
+30/160801/campos_512_v4
+30/160817/campos_512_v4
+30/160881/campos_512_v4
+30/160933/campos_512_v4
+30/160952/campos_512_v4
+30/160969/campos_512_v4
+30/160993/campos_512_v4
+30/161006/campos_512_v4
+30/161007/campos_512_v4
+30/161014/campos_512_v4
+30/161051/campos_512_v4
+30/161058/campos_512_v4
+30/161094/campos_512_v4
+30/161099/campos_512_v4
+30/161100/campos_512_v4
+30/161139/campos_512_v4
+30/161143/campos_512_v4
+30/161155/campos_512_v4
+30/161174/campos_512_v4
+30/161176/campos_512_v4
+30/161177/campos_512_v4
+30/161194/campos_512_v4
+30/161196/campos_512_v4
+30/161248/campos_512_v4
+30/161269/campos_512_v4
+30/161368/campos_512_v4
+30/161396/campos_512_v4
+30/161436/campos_512_v4
+30/161450/campos_512_v4
+30/161456/campos_512_v4
+30/161472/campos_512_v4
+30/161539/campos_512_v4
+30/161605/campos_512_v4
+30/161797/campos_512_v4
+30/161805/campos_512_v4
+30/161806/campos_512_v4
+30/161812/campos_512_v4
+30/161818/campos_512_v4
+30/161825/campos_512_v4
+30/161861/campos_512_v4
+30/161915/campos_512_v4
+30/161918/campos_512_v4
+30/161937/campos_512_v4
+30/161938/campos_512_v4
+30/162009/campos_512_v4
+30/162027/campos_512_v4
+30/162057/campos_512_v4
+30/162111/campos_512_v4
+30/162113/campos_512_v4
+30/162136/campos_512_v4
+30/162159/campos_512_v4
+30/162194/campos_512_v4
+30/162217/campos_512_v4
+30/162252/campos_512_v4
+30/162292/campos_512_v4
+30/162294/campos_512_v4
+30/162295/campos_512_v4
+30/162323/campos_512_v4
+30/162372/campos_512_v4
+30/162426/campos_512_v4
+30/162431/campos_512_v4
+30/162433/campos_512_v4
+30/162496/campos_512_v4
+30/162511/campos_512_v4
+30/162578/campos_512_v4
+30/162601/campos_512_v4
+30/162608/campos_512_v4
+30/162621/campos_512_v4
+30/162687/campos_512_v4
+30/162724/campos_512_v4
+30/162738/campos_512_v4
+30/162748/campos_512_v4
+30/162757/campos_512_v4
+30/162764/campos_512_v4
+30/162766/campos_512_v4
+30/162780/campos_512_v4
+30/162787/campos_512_v4
+30/162790/campos_512_v4
+30/162822/campos_512_v4
+30/162836/campos_512_v4
+30/162840/campos_512_v4
+30/162852/campos_512_v4
+30/162882/campos_512_v4
+30/162891/campos_512_v4
+30/162903/campos_512_v4
+30/162946/campos_512_v4
+30/162957/campos_512_v4
+30/162961/campos_512_v4
+30/162990/campos_512_v4
+30/162994/campos_512_v4
+30/163000/campos_512_v4
+30/163012/campos_512_v4
+30/163025/campos_512_v4
+30/163043/campos_512_v4
+30/163070/campos_512_v4
+30/163177/campos_512_v4
+30/163231/campos_512_v4
+30/163233/campos_512_v4
+30/163342/campos_512_v4
+30/163354/campos_512_v4
+30/163428/campos_512_v4
+30/163451/campos_512_v4
+30/163520/campos_512_v4
+30/163595/campos_512_v4
+30/163618/campos_512_v4
+30/163620/campos_512_v4
+30/163667/campos_512_v4
+30/163681/campos_512_v4
+30/163700/campos_512_v4
+30/163714/campos_512_v4
+30/163761/campos_512_v4
+30/163790/campos_512_v4
+30/163837/campos_512_v4
+30/163857/campos_512_v4
+30/163876/campos_512_v4
+30/163911/campos_512_v4
+30/163959/campos_512_v4
+30/163991/campos_512_v4
+30/163995/campos_512_v4
+30/163996/campos_512_v4
+30/164011/campos_512_v4
+30/164012/campos_512_v4
+30/164083/campos_512_v4
+30/164099/campos_512_v4
+30/164106/campos_512_v4
+30/164113/campos_512_v4
+30/164155/campos_512_v4
+30/164202/campos_512_v4
+30/164223/campos_512_v4
+30/164268/campos_512_v4
+30/164275/campos_512_v4
+30/164328/campos_512_v4
+30/164390/campos_512_v4
+30/164407/campos_512_v4
+30/164477/campos_512_v4
+30/164481/campos_512_v4
+30/164485/campos_512_v4
+30/164499/campos_512_v4
+30/164536/campos_512_v4
+30/164570/campos_512_v4
+30/164578/campos_512_v4
+30/164586/campos_512_v4
+30/164619/campos_512_v4
+30/164631/campos_512_v4
+30/164636/campos_512_v4
+30/164668/campos_512_v4
+30/164679/campos_512_v4
+30/164694/campos_512_v4
+30/164703/campos_512_v4
+30/164704/campos_512_v4
+30/164794/campos_512_v4
+30/164903/campos_512_v4
+30/164906/campos_512_v4
+30/164916/campos_512_v4
+30/164999/campos_512_v4
+31/165034/campos_512_v4
+31/165036/campos_512_v4
+31/165064/campos_512_v4
+31/165133/campos_512_v4
+31/165138/campos_512_v4
+31/165142/campos_512_v4
+31/165150/campos_512_v4
+31/165174/campos_512_v4
+31/165179/campos_512_v4
+31/165212/campos_512_v4
+31/165213/campos_512_v4
+31/165232/campos_512_v4
+31/165236/campos_512_v4
+31/165257/campos_512_v4
+31/165260/campos_512_v4
+31/165300/campos_512_v4
+31/165301/campos_512_v4
+31/165309/campos_512_v4
+31/165332/campos_512_v4
+31/165339/campos_512_v4
+31/165367/campos_512_v4
+31/165385/campos_512_v4
+31/165394/campos_512_v4
+31/165397/campos_512_v4
+31/165418/campos_512_v4
+31/165424/campos_512_v4
+31/165443/campos_512_v4
+31/165487/campos_512_v4
+31/165501/campos_512_v4
+31/165516/campos_512_v4
+31/165535/campos_512_v4
+31/165580/campos_512_v4
+31/165590/campos_512_v4
+31/165596/campos_512_v4
+31/165609/campos_512_v4
+31/165637/campos_512_v4
+31/165643/campos_512_v4
+31/165653/campos_512_v4
+31/165659/campos_512_v4
+31/165661/campos_512_v4
+31/165674/campos_512_v4
+31/165684/campos_512_v4
+31/165715/campos_512_v4
+31/165726/campos_512_v4
+31/165730/campos_512_v4
+31/165734/campos_512_v4
+31/165745/campos_512_v4
+31/165749/campos_512_v4
+31/165752/campos_512_v4
+31/165760/campos_512_v4
+31/165762/campos_512_v4
+31/165763/campos_512_v4
+31/165777/campos_512_v4
+31/165788/campos_512_v4
+31/165890/campos_512_v4
+31/165902/campos_512_v4
+31/165904/campos_512_v4
+31/165946/campos_512_v4
+31/165957/campos_512_v4
+31/165962/campos_512_v4
+31/165981/campos_512_v4
+31/165990/campos_512_v4
+31/166012/campos_512_v4
+31/166039/campos_512_v4
+31/166087/campos_512_v4
+31/166124/campos_512_v4
+31/166128/campos_512_v4
+31/166140/campos_512_v4
+31/166142/campos_512_v4
+31/166148/campos_512_v4
+31/166152/campos_512_v4
+31/166159/campos_512_v4
+31/166163/campos_512_v4
+31/166168/campos_512_v4
+31/166169/campos_512_v4
+31/166181/campos_512_v4
+31/166187/campos_512_v4
+31/166209/campos_512_v4
+31/166225/campos_512_v4
+31/166252/campos_512_v4
+31/166258/campos_512_v4
+31/166321/campos_512_v4
+31/166360/campos_512_v4
+31/166373/campos_512_v4
+31/166437/campos_512_v4
+31/166464/campos_512_v4
+31/166505/campos_512_v4
+31/166523/campos_512_v4
+31/166557/campos_512_v4
+31/166569/campos_512_v4
+31/166603/campos_512_v4
+31/166617/campos_512_v4
+31/166634/campos_512_v4
+31/166646/campos_512_v4
+31/166660/campos_512_v4
+31/166662/campos_512_v4
+31/166687/campos_512_v4
+31/166690/campos_512_v4
+31/166699/campos_512_v4
+31/166710/campos_512_v4
+31/166712/campos_512_v4
+31/166714/campos_512_v4
+31/166717/campos_512_v4
+31/166718/campos_512_v4
+31/166719/campos_512_v4
+31/166768/campos_512_v4
+31/166774/campos_512_v4
+31/166782/campos_512_v4
+31/166787/campos_512_v4
+31/166813/campos_512_v4
+31/166822/campos_512_v4
+31/166866/campos_512_v4
+31/166870/campos_512_v4
+31/166909/campos_512_v4
+31/166947/campos_512_v4
+31/166952/campos_512_v4
+31/166957/campos_512_v4
+31/166969/campos_512_v4
+31/167019/campos_512_v4
+31/167047/campos_512_v4
+31/167063/campos_512_v4
+31/167085/campos_512_v4
+31/167147/campos_512_v4
+31/167162/campos_512_v4
+31/167163/campos_512_v4
+31/167189/campos_512_v4
+31/167202/campos_512_v4
+31/167219/campos_512_v4
+31/167247/campos_512_v4
+31/167263/campos_512_v4
+31/167292/campos_512_v4
+31/167317/campos_512_v4
+31/167331/campos_512_v4
+31/167362/campos_512_v4
+31/167409/campos_512_v4
+31/167436/campos_512_v4
+31/167480/campos_512_v4
+31/167525/campos_512_v4
+31/167536/campos_512_v4
+31/167585/campos_512_v4
+31/167605/campos_512_v4
+31/167623/campos_512_v4
+31/167643/campos_512_v4
+31/167655/campos_512_v4
+31/167661/campos_512_v4
+31/167678/campos_512_v4
+31/167689/campos_512_v4
+31/167707/campos_512_v4
+31/167708/campos_512_v4
+31/167734/campos_512_v4
+31/167741/campos_512_v4
+31/167768/campos_512_v4
+31/167770/campos_512_v4
+31/167822/campos_512_v4
+31/167830/campos_512_v4
+31/167853/campos_512_v4
+31/167871/campos_512_v4
+31/167878/campos_512_v4
+31/167895/campos_512_v4
+31/167907/campos_512_v4
+31/167911/campos_512_v4
+31/167917/campos_512_v4
+31/167921/campos_512_v4
+31/167947/campos_512_v4
+31/167953/campos_512_v4
+31/167985/campos_512_v4
+31/167991/campos_512_v4
+31/167995/campos_512_v4
+31/168007/campos_512_v4
+31/168021/campos_512_v4
+31/168046/campos_512_v4
+31/168064/campos_512_v4
+31/168066/campos_512_v4
+31/168078/campos_512_v4
+31/168082/campos_512_v4
+31/168090/campos_512_v4
+31/168127/campos_512_v4
+31/168136/campos_512_v4
+31/168155/campos_512_v4
+31/168191/campos_512_v4
+31/168215/campos_512_v4
+31/168231/campos_512_v4
+31/168258/campos_512_v4
+31/168259/campos_512_v4
+31/168260/campos_512_v4
+31/168265/campos_512_v4
+31/168296/campos_512_v4
+31/168318/campos_512_v4
+31/168319/campos_512_v4
+31/168347/campos_512_v4
+31/168367/campos_512_v4
+31/168381/campos_512_v4
+31/168392/campos_512_v4
+31/168400/campos_512_v4
+31/168407/campos_512_v4
+31/168462/campos_512_v4
+31/168465/campos_512_v4
+31/168475/campos_512_v4
+31/168477/campos_512_v4
+31/168498/campos_512_v4
+31/168535/campos_512_v4
+31/168559/campos_512_v4
+31/168564/campos_512_v4
+31/168576/campos_512_v4
+31/168608/campos_512_v4
+31/168611/campos_512_v4
+31/168644/campos_512_v4
+31/168647/campos_512_v4
+31/168652/campos_512_v4
+31/168691/campos_512_v4
+31/168692/campos_512_v4
+31/168700/campos_512_v4
+31/168743/campos_512_v4
+31/168751/campos_512_v4
+31/168768/campos_512_v4
+31/168776/campos_512_v4
+31/168826/campos_512_v4
+31/168861/campos_512_v4
+31/168937/campos_512_v4
+31/168951/campos_512_v4
+31/168967/campos_512_v4
+31/168994/campos_512_v4
+31/169026/campos_512_v4
+31/169042/campos_512_v4
+31/169052/campos_512_v4
+31/169057/campos_512_v4
+31/169058/campos_512_v4
+31/169061/campos_512_v4
+31/169087/campos_512_v4
+31/169091/campos_512_v4
+31/169092/campos_512_v4
+31/169113/campos_512_v4
+31/169134/campos_512_v4
+31/169151/campos_512_v4
+31/169186/campos_512_v4
+31/169194/campos_512_v4
+31/169202/campos_512_v4
+31/169238/campos_512_v4
+31/169244/campos_512_v4
+31/169249/campos_512_v4
+31/169251/campos_512_v4
+31/169253/campos_512_v4
+31/169257/campos_512_v4
+31/169265/campos_512_v4
+31/169274/campos_512_v4
+31/169289/campos_512_v4
+31/169305/campos_512_v4
+31/169314/campos_512_v4
+31/169317/campos_512_v4
+31/169318/campos_512_v4
+31/169320/campos_512_v4
+31/169322/campos_512_v4
+31/169323/campos_512_v4
+31/169345/campos_512_v4
+31/169378/campos_512_v4
+31/169387/campos_512_v4
+31/169391/campos_512_v4
+31/169421/campos_512_v4
+31/169427/campos_512_v4
+31/169447/campos_512_v4
+31/169485/campos_512_v4
+31/169505/campos_512_v4
+31/169524/campos_512_v4
+31/169528/campos_512_v4
+31/169529/campos_512_v4
+31/169575/campos_512_v4
+31/169627/campos_512_v4
+31/169639/campos_512_v4
+31/169662/campos_512_v4
+31/169676/campos_512_v4
+31/169691/campos_512_v4
+31/169705/campos_512_v4
+31/169730/campos_512_v4
+31/169774/campos_512_v4
+31/169775/campos_512_v4
+31/169777/campos_512_v4
+31/169779/campos_512_v4
+31/169792/campos_512_v4
+31/169795/campos_512_v4
+31/169801/campos_512_v4
+31/169814/campos_512_v4
+31/169817/campos_512_v4
+31/169823/campos_512_v4
+31/169831/campos_512_v4
+31/169857/campos_512_v4
+31/169871/campos_512_v4
+31/169873/campos_512_v4
+31/169889/campos_512_v4
+31/169896/campos_512_v4
+31/169911/campos_512_v4
+31/169951/campos_512_v4
+31/169961/campos_512_v4
+31/169968/campos_512_v4
+32/170030/campos_512_v4
+32/170082/campos_512_v4
+32/170142/campos_512_v4
+32/170161/campos_512_v4
+32/170220/campos_512_v4
+32/170241/campos_512_v4
+32/170253/campos_512_v4
+32/170268/campos_512_v4
+32/170302/campos_512_v4
+32/170304/campos_512_v4
+32/170319/campos_512_v4
+32/170360/campos_512_v4
+32/170401/campos_512_v4
+32/170404/campos_512_v4
+32/170415/campos_512_v4
+32/170421/campos_512_v4
+32/170425/campos_512_v4
+32/170458/campos_512_v4
+32/170498/campos_512_v4
+32/170505/campos_512_v4
+32/170532/campos_512_v4
+32/170534/campos_512_v4
+32/170590/campos_512_v4
+32/170624/campos_512_v4
+32/170652/campos_512_v4
+32/170669/campos_512_v4
+32/170674/campos_512_v4
+32/170690/campos_512_v4
+32/170708/campos_512_v4
+32/170719/campos_512_v4
+32/170721/campos_512_v4
+32/170747/campos_512_v4
+32/170755/campos_512_v4
+32/170758/campos_512_v4
+32/170767/campos_512_v4
+32/170771/campos_512_v4
+32/170790/campos_512_v4
+32/170795/campos_512_v4
+32/170796/campos_512_v4
+32/170922/campos_512_v4
+32/170944/campos_512_v4
+32/170956/campos_512_v4
+32/170973/campos_512_v4
+32/170977/campos_512_v4
+32/170990/campos_512_v4
+32/170995/campos_512_v4
+32/171018/campos_512_v4
+32/171061/campos_512_v4
+32/171067/campos_512_v4
+32/171103/campos_512_v4
+32/171108/campos_512_v4
+32/171132/campos_512_v4
+32/171148/campos_512_v4
+32/171171/campos_512_v4
+32/171186/campos_512_v4
+32/171203/campos_512_v4
+32/171213/campos_512_v4
+32/171226/campos_512_v4
+32/171251/campos_512_v4
+32/171455/campos_512_v4
+32/171488/campos_512_v4
+32/171580/campos_512_v4
+32/171590/campos_512_v4
+32/171593/campos_512_v4
+32/171599/campos_512_v4
+32/171647/campos_512_v4
+32/171657/campos_512_v4
+32/171665/campos_512_v4
+32/171704/campos_512_v4
+32/171742/campos_512_v4
+32/171780/campos_512_v4
+32/171789/campos_512_v4
+32/171845/campos_512_v4
+32/171848/campos_512_v4
+32/171850/campos_512_v4
+32/171854/campos_512_v4
+32/171872/campos_512_v4
+32/171987/campos_512_v4
+32/172028/campos_512_v4
+32/172045/campos_512_v4
+32/172058/campos_512_v4
+32/172077/campos_512_v4
+32/172089/campos_512_v4
+32/172091/campos_512_v4
+32/172160/campos_512_v4
+32/172175/campos_512_v4
+32/172197/campos_512_v4
+32/172212/campos_512_v4
+32/172297/campos_512_v4
+32/172313/campos_512_v4
+32/172323/campos_512_v4
+32/172430/campos_512_v4
+32/172473/campos_512_v4
+32/172487/campos_512_v4
+32/172495/campos_512_v4
+32/172502/campos_512_v4
+32/172514/campos_512_v4
+32/172596/campos_512_v4
+32/172622/campos_512_v4
+32/172635/campos_512_v4
+32/172641/campos_512_v4
+32/172658/campos_512_v4
+32/172664/campos_512_v4
+32/172670/campos_512_v4
+32/172692/campos_512_v4
+32/172716/campos_512_v4
+32/172803/campos_512_v4
+32/172894/campos_512_v4
+32/172948/campos_512_v4
+32/172962/campos_512_v4
+32/172977/campos_512_v4
+32/173027/campos_512_v4
+32/173029/campos_512_v4
+32/173055/campos_512_v4
+32/173084/campos_512_v4
+32/173085/campos_512_v4
+32/173128/campos_512_v4
+32/173157/campos_512_v4
+32/173183/campos_512_v4
+32/173225/campos_512_v4
+32/173261/campos_512_v4
+32/173264/campos_512_v4
+32/173279/campos_512_v4
+32/173285/campos_512_v4
+32/173295/campos_512_v4
+32/173313/campos_512_v4
+32/173316/campos_512_v4
+32/173358/campos_512_v4
+32/173359/campos_512_v4
+32/173441/campos_512_v4
+32/173463/campos_512_v4
+32/173468/campos_512_v4
+32/173472/campos_512_v4
+32/173485/campos_512_v4
+32/173500/campos_512_v4
+32/173512/campos_512_v4
+32/173533/campos_512_v4
+32/173548/campos_512_v4
+32/173560/campos_512_v4
+32/173584/campos_512_v4
+32/173601/campos_512_v4
+32/173605/campos_512_v4
+32/173615/campos_512_v4
+32/173638/campos_512_v4
+32/173660/campos_512_v4
+32/173666/campos_512_v4
+32/173672/campos_512_v4
+32/173686/campos_512_v4
+32/173793/campos_512_v4
+32/173802/campos_512_v4
+32/173858/campos_512_v4
+32/173913/campos_512_v4
+32/173920/campos_512_v4
+32/173932/campos_512_v4
+32/173963/campos_512_v4
+32/173990/campos_512_v4
+32/174023/campos_512_v4
+32/174029/campos_512_v4
+32/174038/campos_512_v4
+32/174041/campos_512_v4
+32/174058/campos_512_v4
+32/174074/campos_512_v4
+32/174100/campos_512_v4
+32/174131/campos_512_v4
+32/174140/campos_512_v4
+32/174192/campos_512_v4
+32/174194/campos_512_v4
+32/174213/campos_512_v4
+32/174221/campos_512_v4
+32/174227/campos_512_v4
+32/174247/campos_512_v4
+32/174260/campos_512_v4
+32/174273/campos_512_v4
+32/174303/campos_512_v4
+32/174355/campos_512_v4
+32/174391/campos_512_v4
+32/174395/campos_512_v4
+32/174443/campos_512_v4
+32/174457/campos_512_v4
+32/174502/campos_512_v4
+32/174529/campos_512_v4
+32/174537/campos_512_v4
+32/174541/campos_512_v4
+32/174550/campos_512_v4
+32/174555/campos_512_v4
+32/174589/campos_512_v4
+32/174686/campos_512_v4
+32/174709/campos_512_v4
+32/174740/campos_512_v4
+32/174752/campos_512_v4
+32/174764/campos_512_v4
+32/174781/campos_512_v4
+32/174795/campos_512_v4
+32/174827/campos_512_v4
+32/174831/campos_512_v4
+32/174864/campos_512_v4
+32/174882/campos_512_v4
+32/174934/campos_512_v4
+32/174954/campos_512_v4
+32/174992/campos_512_v4
+33/175010/campos_512_v4
+33/175054/campos_512_v4
+33/175081/campos_512_v4
+33/175088/campos_512_v4
+33/175126/campos_512_v4
+33/175128/campos_512_v4
+33/175129/campos_512_v4
+33/175175/campos_512_v4
+33/175204/campos_512_v4
+33/175205/campos_512_v4
+33/175236/campos_512_v4
+33/175246/campos_512_v4
+33/175269/campos_512_v4
+33/175277/campos_512_v4
+33/175329/campos_512_v4
+33/175370/campos_512_v4
+33/175402/campos_512_v4
+33/175406/campos_512_v4
+33/175407/campos_512_v4
+33/175409/campos_512_v4
+33/175411/campos_512_v4
+33/175446/campos_512_v4
+33/175459/campos_512_v4
+33/175460/campos_512_v4
+33/175466/campos_512_v4
+33/175490/campos_512_v4
+33/175510/campos_512_v4
+33/175513/campos_512_v4
+33/175519/campos_512_v4
+33/175521/campos_512_v4
+33/175522/campos_512_v4
+33/175544/campos_512_v4
+33/175567/campos_512_v4
+33/175599/campos_512_v4
+33/175602/campos_512_v4
+33/175603/campos_512_v4
+33/175604/campos_512_v4
+33/175612/campos_512_v4
+33/175627/campos_512_v4
+33/175632/campos_512_v4
+33/175644/campos_512_v4
+33/175660/campos_512_v4
+33/175677/campos_512_v4
+33/175706/campos_512_v4
+33/175720/campos_512_v4
+33/175724/campos_512_v4
+33/175726/campos_512_v4
+33/175750/campos_512_v4
+33/175770/campos_512_v4
+33/175773/campos_512_v4
+33/175801/campos_512_v4
+33/175817/campos_512_v4
+33/175847/campos_512_v4
+33/175858/campos_512_v4
+33/175882/campos_512_v4
+33/175891/campos_512_v4
+33/175909/campos_512_v4
+33/175923/campos_512_v4
+33/175939/campos_512_v4
+33/175953/campos_512_v4
+33/175963/campos_512_v4
+33/175993/campos_512_v4
+33/176011/campos_512_v4
+33/176037/campos_512_v4
+33/176042/campos_512_v4
+33/176072/campos_512_v4
+33/176073/campos_512_v4
+33/176083/campos_512_v4
+33/176117/campos_512_v4
+33/176130/campos_512_v4
+33/176168/campos_512_v4
+33/176185/campos_512_v4
+33/176201/campos_512_v4
+33/176236/campos_512_v4
+33/176241/campos_512_v4
+33/176248/campos_512_v4
+33/176249/campos_512_v4
+33/176285/campos_512_v4
+33/176324/campos_512_v4
+33/176329/campos_512_v4
+33/176334/campos_512_v4
+33/176374/campos_512_v4
+33/176410/campos_512_v4
+33/176415/campos_512_v4
+33/176423/campos_512_v4
+33/176430/campos_512_v4
+33/176460/campos_512_v4
+33/176465/campos_512_v4
+33/176476/campos_512_v4
+33/176478/campos_512_v4
+33/176519/campos_512_v4
+33/176534/campos_512_v4
+33/176535/campos_512_v4
+33/176561/campos_512_v4
+33/176585/campos_512_v4
+33/176602/campos_512_v4
+33/176610/campos_512_v4
+33/176637/campos_512_v4
+33/176638/campos_512_v4
+33/176643/campos_512_v4
+33/176695/campos_512_v4
+33/176699/campos_512_v4
+33/176763/campos_512_v4
+33/176768/campos_512_v4
+33/176817/campos_512_v4
+33/176836/campos_512_v4
+33/176837/campos_512_v4
+33/176840/campos_512_v4
+33/176856/campos_512_v4
+33/176918/campos_512_v4
+33/176923/campos_512_v4
+33/176936/campos_512_v4
+33/176965/campos_512_v4
+33/176968/campos_512_v4
+33/177005/campos_512_v4
+33/177020/campos_512_v4
+33/177064/campos_512_v4
+33/177089/campos_512_v4
+33/177094/campos_512_v4
+33/177111/campos_512_v4
+33/177172/campos_512_v4
+33/177215/campos_512_v4
+33/177221/campos_512_v4
+33/177247/campos_512_v4
+33/177271/campos_512_v4
+33/177276/campos_512_v4
+33/177280/campos_512_v4
+33/177299/campos_512_v4
+33/177316/campos_512_v4
+33/177331/campos_512_v4
+33/177369/campos_512_v4
+33/177427/campos_512_v4
+33/177429/campos_512_v4
+33/177459/campos_512_v4
+33/177472/campos_512_v4
+33/177491/campos_512_v4
+33/177495/campos_512_v4
+33/177505/campos_512_v4
+33/177516/campos_512_v4
+33/177523/campos_512_v4
+33/177530/campos_512_v4
+33/177534/campos_512_v4
+33/177536/campos_512_v4
+33/177548/campos_512_v4
+33/177564/campos_512_v4
+33/177566/campos_512_v4
+33/177570/campos_512_v4
+33/177572/campos_512_v4
+33/177581/campos_512_v4
+33/177583/campos_512_v4
+33/177590/campos_512_v4
+33/177601/campos_512_v4
+33/177613/campos_512_v4
+33/177627/campos_512_v4
+33/177664/campos_512_v4
+33/177679/campos_512_v4
+33/177680/campos_512_v4
+33/177694/campos_512_v4
+33/177754/campos_512_v4
+33/177768/campos_512_v4
+33/177787/campos_512_v4
+33/177788/campos_512_v4
+33/177840/campos_512_v4
+33/177852/campos_512_v4
+33/177855/campos_512_v4
+33/177867/campos_512_v4
+33/177889/campos_512_v4
+33/177941/campos_512_v4
+33/177943/campos_512_v4
+33/177969/campos_512_v4
+33/177980/campos_512_v4
+33/177981/campos_512_v4
+33/178003/campos_512_v4
+33/178075/campos_512_v4
+33/178107/campos_512_v4
+33/178120/campos_512_v4
+33/178177/campos_512_v4
+33/178245/campos_512_v4
+33/178340/campos_512_v4
+33/178341/campos_512_v4
+33/178383/campos_512_v4
+33/178403/campos_512_v4
+33/178439/campos_512_v4
+33/178474/campos_512_v4
+33/178498/campos_512_v4
+33/178504/campos_512_v4
+33/178508/campos_512_v4
+33/178526/campos_512_v4
+33/178551/campos_512_v4
+33/178578/campos_512_v4
+33/178630/campos_512_v4
+33/178632/campos_512_v4
+33/178666/campos_512_v4
+33/178667/campos_512_v4
+33/178699/campos_512_v4
+33/178723/campos_512_v4
+33/178740/campos_512_v4
+33/178774/campos_512_v4
+33/178812/campos_512_v4
+33/178818/campos_512_v4
+33/178821/campos_512_v4
+33/178901/campos_512_v4
+33/178929/campos_512_v4
+33/178965/campos_512_v4
+33/178992/campos_512_v4
+33/179019/campos_512_v4
+33/179022/campos_512_v4
+33/179024/campos_512_v4
+33/179063/campos_512_v4
+33/179073/campos_512_v4
+33/179074/campos_512_v4
+33/179079/campos_512_v4
+33/179101/campos_512_v4
+33/179103/campos_512_v4
+33/179106/campos_512_v4
+33/179113/campos_512_v4
+33/179132/campos_512_v4
+33/179141/campos_512_v4
+33/179148/campos_512_v4
+33/179153/campos_512_v4
+33/179157/campos_512_v4
+33/179166/campos_512_v4
+33/179237/campos_512_v4
+33/179240/campos_512_v4
+33/179267/campos_512_v4
+33/179276/campos_512_v4
+33/179287/campos_512_v4
+33/179323/campos_512_v4
+33/179354/campos_512_v4
+33/179367/campos_512_v4
+33/179413/campos_512_v4
+33/179417/campos_512_v4
+33/179421/campos_512_v4
+33/179457/campos_512_v4
+33/179460/campos_512_v4
+33/179461/campos_512_v4
+33/179535/campos_512_v4
+33/179547/campos_512_v4
+33/179557/campos_512_v4
+33/179562/campos_512_v4
+33/179646/campos_512_v4
+33/179661/campos_512_v4
+33/179683/campos_512_v4
+33/179731/campos_512_v4
+33/179739/campos_512_v4
+33/179786/campos_512_v4
+33/179789/campos_512_v4
+33/179801/campos_512_v4
+33/179835/campos_512_v4
+33/179889/campos_512_v4
+33/179909/campos_512_v4
+33/179953/campos_512_v4
+33/179965/campos_512_v4
+33/179966/campos_512_v4
+34/180078/campos_512_v4
+34/180129/campos_512_v4
+34/180133/campos_512_v4
+34/180165/campos_512_v4
+34/180189/campos_512_v4
+34/180204/campos_512_v4
+34/180243/campos_512_v4
+34/180260/campos_512_v4
+34/180262/campos_512_v4
+34/180353/campos_512_v4
+34/180373/campos_512_v4
+34/180383/campos_512_v4
+34/180391/campos_512_v4
+34/180409/campos_512_v4
+34/180429/campos_512_v4
+34/180485/campos_512_v4
+34/180486/campos_512_v4
+34/180494/campos_512_v4
+34/180509/campos_512_v4
+34/180546/campos_512_v4
+34/180557/campos_512_v4
+34/180581/campos_512_v4
+34/180606/campos_512_v4
+34/180610/campos_512_v4
+34/180625/campos_512_v4
+34/180651/campos_512_v4
+34/180673/campos_512_v4
+34/180690/campos_512_v4
+34/180711/campos_512_v4
+34/180719/campos_512_v4
+34/180730/campos_512_v4
+34/180743/campos_512_v4
+34/180808/campos_512_v4
+34/180814/campos_512_v4
+34/180844/campos_512_v4
+34/180869/campos_512_v4
+34/180915/campos_512_v4
+34/180916/campos_512_v4
+34/180920/campos_512_v4
+34/180957/campos_512_v4
+34/180959/campos_512_v4
+34/180965/campos_512_v4
+34/180968/campos_512_v4
+34/181031/campos_512_v4
+34/181055/campos_512_v4
+34/181067/campos_512_v4
+34/181085/campos_512_v4
+34/181120/campos_512_v4
+34/181189/campos_512_v4
+34/181216/campos_512_v4
+34/181226/campos_512_v4
+34/181265/campos_512_v4
+34/181317/campos_512_v4
+34/181362/campos_512_v4
+34/181384/campos_512_v4
+34/181401/campos_512_v4
+34/181408/campos_512_v4
+34/181426/campos_512_v4
+34/181450/campos_512_v4
+34/181463/campos_512_v4
+34/181466/campos_512_v4
+34/181467/campos_512_v4
+34/181500/campos_512_v4
+34/181504/campos_512_v4
+34/181519/campos_512_v4
+34/181547/campos_512_v4
+34/181549/campos_512_v4
+34/181559/campos_512_v4
+34/181580/campos_512_v4
+34/181589/campos_512_v4
+34/181619/campos_512_v4
+34/181627/campos_512_v4
+34/181655/campos_512_v4
+34/181665/campos_512_v4
+34/181713/campos_512_v4
+34/181730/campos_512_v4
+34/181745/campos_512_v4
+34/181775/campos_512_v4
+34/181779/campos_512_v4
+34/181783/campos_512_v4
+34/181790/campos_512_v4
+34/181802/campos_512_v4
+34/181803/campos_512_v4
+34/181838/campos_512_v4
+34/181860/campos_512_v4
+34/181870/campos_512_v4
+34/181901/campos_512_v4
+34/181920/campos_512_v4
+34/181950/campos_512_v4
+34/181968/campos_512_v4
+34/182023/campos_512_v4
+34/182043/campos_512_v4
+34/182564/campos_512_v4
+34/182618/campos_512_v4
+34/182622/campos_512_v4
+34/182635/campos_512_v4
+34/182637/campos_512_v4
+34/182671/campos_512_v4
+34/182684/campos_512_v4
+34/182705/campos_512_v4
+34/182743/campos_512_v4
+34/182752/campos_512_v4
+34/182804/campos_512_v4
+34/182857/campos_512_v4
+34/182877/campos_512_v4
+34/182890/campos_512_v4
+34/182911/campos_512_v4
+34/182967/campos_512_v4
+34/182991/campos_512_v4
+34/183028/campos_512_v4
+34/183029/campos_512_v4
+34/183058/campos_512_v4
+34/183069/campos_512_v4
+34/183080/campos_512_v4
+34/183098/campos_512_v4
+34/183505/campos_512_v4
+34/183522/campos_512_v4
+34/183563/campos_512_v4
+34/183590/campos_512_v4
+34/184002/campos_512_v4
+34/184008/campos_512_v4
+34/184042/campos_512_v4
+34/184048/campos_512_v4
+34/184065/campos_512_v4
+34/184067/campos_512_v4
+34/184110/campos_512_v4
+34/184144/campos_512_v4
+34/184151/campos_512_v4
+34/184238/campos_512_v4
+34/184254/campos_512_v4
+34/184266/campos_512_v4
+34/184277/campos_512_v4
+34/184322/campos_512_v4
+34/184338/campos_512_v4
+34/184355/campos_512_v4
+34/184362/campos_512_v4
+34/184370/campos_512_v4
+34/184395/campos_512_v4
+34/184431/campos_512_v4
+34/184433/campos_512_v4
+34/184439/campos_512_v4
+34/184450/campos_512_v4
+34/184452/campos_512_v4
+34/184455/campos_512_v4
+34/184456/campos_512_v4
+34/184526/campos_512_v4
+34/184540/campos_512_v4
+34/184547/campos_512_v4
+34/184564/campos_512_v4
+34/184696/campos_512_v4
+34/184706/campos_512_v4
+34/184755/campos_512_v4
+34/184766/campos_512_v4
+34/184767/campos_512_v4
+34/184782/campos_512_v4
+34/184785/campos_512_v4
+34/184798/campos_512_v4
+34/184846/campos_512_v4
+34/184869/campos_512_v4
+34/184910/campos_512_v4
+34/184917/campos_512_v4
+34/184925/campos_512_v4
+34/184930/campos_512_v4
+34/184950/campos_512_v4
+34/184953/campos_512_v4
+34/184971/campos_512_v4
+35/185032/campos_512_v4
+35/185045/campos_512_v4
+35/185065/campos_512_v4
+35/185095/campos_512_v4
+35/185114/campos_512_v4
+35/185118/campos_512_v4
+35/185139/campos_512_v4
+35/185144/campos_512_v4
+35/185167/campos_512_v4
+35/185377/campos_512_v4
+35/185381/campos_512_v4
+35/185383/campos_512_v4
+35/185447/campos_512_v4
+35/185451/campos_512_v4
+35/185484/campos_512_v4
+35/185611/campos_512_v4
+35/185646/campos_512_v4
+35/185729/campos_512_v4
+35/185733/campos_512_v4
+35/185734/campos_512_v4
+35/185743/campos_512_v4
+35/185748/campos_512_v4
+35/185776/campos_512_v4
+35/185806/campos_512_v4
+35/185891/campos_512_v4
+35/185942/campos_512_v4
+35/185943/campos_512_v4
+35/185969/campos_512_v4
+35/186058/campos_512_v4
+35/186062/campos_512_v4
+35/186070/campos_512_v4
+35/186095/campos_512_v4
+35/186132/campos_512_v4
+35/186201/campos_512_v4
+35/186223/campos_512_v4
+35/186229/campos_512_v4
+35/186247/campos_512_v4
+35/186369/campos_512_v4
+35/186395/campos_512_v4
+35/186412/campos_512_v4
+35/186416/campos_512_v4
+35/186417/campos_512_v4
+35/186438/campos_512_v4
+35/186451/campos_512_v4
+35/186503/campos_512_v4
+35/186519/campos_512_v4
+35/186527/campos_512_v4
+35/186532/campos_512_v4
+35/186545/campos_512_v4
+35/186548/campos_512_v4
+35/186562/campos_512_v4
+35/186590/campos_512_v4
+35/186728/campos_512_v4
+35/186733/campos_512_v4
+35/186797/campos_512_v4
+35/186831/campos_512_v4
+35/186846/campos_512_v4
+35/186937/campos_512_v4
+35/186954/campos_512_v4
+35/187010/campos_512_v4
+35/187015/campos_512_v4
+35/187024/campos_512_v4
+35/187026/campos_512_v4
+35/187052/campos_512_v4
+35/187071/campos_512_v4
+35/187093/campos_512_v4
+35/187116/campos_512_v4
+35/187131/campos_512_v4
+35/187148/campos_512_v4
+35/187169/campos_512_v4
+35/187200/campos_512_v4
+35/187232/campos_512_v4
+35/187241/campos_512_v4
+35/187243/campos_512_v4
+35/187284/campos_512_v4
+35/187296/campos_512_v4
+35/187311/campos_512_v4
+35/187324/campos_512_v4
+35/187349/campos_512_v4
+35/187474/campos_512_v4
+35/187492/campos_512_v4
+35/187543/campos_512_v4
+35/187618/campos_512_v4
+35/187649/campos_512_v4
+35/187676/campos_512_v4
+35/187683/campos_512_v4
+35/187702/campos_512_v4
+35/187710/campos_512_v4
+35/187731/campos_512_v4
+35/187743/campos_512_v4
+35/187770/campos_512_v4
+35/187966/campos_512_v4
+35/188003/campos_512_v4
+35/188033/campos_512_v4
+35/188057/campos_512_v4
+35/188067/campos_512_v4
+35/188071/campos_512_v4
+35/188151/campos_512_v4
+35/188159/campos_512_v4
+35/188216/campos_512_v4
+35/188308/campos_512_v4
+35/188326/campos_512_v4
+35/188365/campos_512_v4
+35/188442/campos_512_v4
+35/188458/campos_512_v4
+35/188460/campos_512_v4
+35/188472/campos_512_v4
+35/188557/campos_512_v4
+35/188564/campos_512_v4
+35/188586/campos_512_v4
+35/188610/campos_512_v4
+35/188618/campos_512_v4
+35/188623/campos_512_v4
+35/188689/campos_512_v4
+35/188726/campos_512_v4
+35/188757/campos_512_v4
+35/188783/campos_512_v4
+35/188856/campos_512_v4
+35/188863/campos_512_v4
+35/188883/campos_512_v4
+35/188902/campos_512_v4
+35/188927/campos_512_v4
+35/189084/campos_512_v4
+35/189090/campos_512_v4
+35/189097/campos_512_v4
+35/189122/campos_512_v4
+35/189152/campos_512_v4
+35/189186/campos_512_v4
+35/189212/campos_512_v4
+35/189213/campos_512_v4
+35/189263/campos_512_v4
+35/189264/campos_512_v4
+35/189290/campos_512_v4
+35/189295/campos_512_v4
+35/189297/campos_512_v4
+35/189333/campos_512_v4
+35/189334/campos_512_v4
+35/189368/campos_512_v4
+35/189467/campos_512_v4
+35/189474/campos_512_v4
+35/189549/campos_512_v4
+35/189554/campos_512_v4
+35/189593/campos_512_v4
+35/189600/campos_512_v4
+35/189635/campos_512_v4
+35/189662/campos_512_v4
+35/189667/campos_512_v4
+35/189704/campos_512_v4
+35/189779/campos_512_v4
+35/189811/campos_512_v4
+35/189820/campos_512_v4
+35/189879/campos_512_v4
+35/189883/campos_512_v4
+35/189884/campos_512_v4
+35/189928/campos_512_v4
+35/189995/campos_512_v4
+35/189996/campos_512_v4
+36/190032/campos_512_v4
+36/190079/campos_512_v4
+36/190081/campos_512_v4
+36/190123/campos_512_v4
+36/190140/campos_512_v4
+36/190143/campos_512_v4
+36/190171/campos_512_v4
+36/190181/campos_512_v4
+36/190205/campos_512_v4
+36/190231/campos_512_v4
+36/190302/campos_512_v4
+36/190325/campos_512_v4
+36/190336/campos_512_v4
+36/190342/campos_512_v4
+36/190352/campos_512_v4
+36/190355/campos_512_v4
+36/190371/campos_512_v4
+36/190408/campos_512_v4
+36/190434/campos_512_v4
+36/190454/campos_512_v4
+36/190480/campos_512_v4
+36/190531/campos_512_v4
+36/190546/campos_512_v4
+36/190560/campos_512_v4
+36/190574/campos_512_v4
+36/190599/campos_512_v4
+36/190628/campos_512_v4
+36/190639/campos_512_v4
+36/190679/campos_512_v4
+36/190685/campos_512_v4
+36/190691/campos_512_v4
+36/190719/campos_512_v4
+36/190725/campos_512_v4
+36/190751/campos_512_v4
+36/190808/campos_512_v4
+36/190814/campos_512_v4
+36/190826/campos_512_v4
+36/190838/campos_512_v4
+36/190849/campos_512_v4
+36/190878/campos_512_v4
+36/190890/campos_512_v4
+36/190965/campos_512_v4
+36/190967/campos_512_v4
+36/191015/campos_512_v4
+36/191045/campos_512_v4
+36/191064/campos_512_v4
+36/191119/campos_512_v4
+36/191127/campos_512_v4
+36/191142/campos_512_v4
+36/191161/campos_512_v4
+36/191200/campos_512_v4
+36/191208/campos_512_v4
+36/191280/campos_512_v4
+36/191299/campos_512_v4
+36/191367/campos_512_v4
+36/191375/campos_512_v4
+36/191480/campos_512_v4
+36/191482/campos_512_v4
+36/191519/campos_512_v4
+36/191617/campos_512_v4
+36/191634/campos_512_v4
+36/191674/campos_512_v4
+36/191741/campos_512_v4
+36/191748/campos_512_v4
+36/191764/campos_512_v4
+36/191767/campos_512_v4
+36/191794/campos_512_v4
+36/191832/campos_512_v4
+36/191863/campos_512_v4
+36/191871/campos_512_v4
+36/191881/campos_512_v4
+36/191908/campos_512_v4
+36/192014/campos_512_v4
+36/192018/campos_512_v4
+36/192027/campos_512_v4
+36/192032/campos_512_v4
+36/192045/campos_512_v4
+36/192057/campos_512_v4
+36/192061/campos_512_v4
+36/192096/campos_512_v4
+36/192108/campos_512_v4
+36/192130/campos_512_v4
+36/192135/campos_512_v4
+36/192200/campos_512_v4
+36/192239/campos_512_v4
+36/192250/campos_512_v4
+36/192272/campos_512_v4
+36/192292/campos_512_v4
+36/192317/campos_512_v4
+36/192321/campos_512_v4
+36/192324/campos_512_v4
+36/192335/campos_512_v4
+36/192337/campos_512_v4
+36/192339/campos_512_v4
+36/192348/campos_512_v4
+36/192351/campos_512_v4
+36/192359/campos_512_v4
+36/192378/campos_512_v4
+36/192422/campos_512_v4
+36/192438/campos_512_v4
+36/192511/campos_512_v4
+36/192528/campos_512_v4
+36/192613/campos_512_v4
+36/192635/campos_512_v4
+36/192652/campos_512_v4
+36/192677/campos_512_v4
+36/192686/campos_512_v4
+36/192723/campos_512_v4
+36/192789/campos_512_v4
+36/192888/campos_512_v4
+36/192901/campos_512_v4
+36/192909/campos_512_v4
+36/192920/campos_512_v4
+36/192927/campos_512_v4
+36/192937/campos_512_v4
+36/192953/campos_512_v4
+36/192957/campos_512_v4
+36/193044/campos_512_v4
+36/193064/campos_512_v4
+36/193072/campos_512_v4
+36/193085/campos_512_v4
+36/193104/campos_512_v4
+36/193140/campos_512_v4
+36/193236/campos_512_v4
+36/193326/campos_512_v4
+36/193363/campos_512_v4
+36/193481/campos_512_v4
+36/193558/campos_512_v4
+36/193559/campos_512_v4
+36/193573/campos_512_v4
+36/193592/campos_512_v4
+36/193596/campos_512_v4
+36/193597/campos_512_v4
+36/193600/campos_512_v4
+36/193618/campos_512_v4
+36/193630/campos_512_v4
+36/193636/campos_512_v4
+36/193691/campos_512_v4
+36/193694/campos_512_v4
+36/193731/campos_512_v4
+36/193740/campos_512_v4
+36/193763/campos_512_v4
+36/193809/campos_512_v4
+36/193814/campos_512_v4
+36/193828/campos_512_v4
+36/193829/campos_512_v4
+36/193863/campos_512_v4
+36/193864/campos_512_v4
+36/193866/campos_512_v4
+36/193893/campos_512_v4
+36/193901/campos_512_v4
+36/193961/campos_512_v4
+36/193983/campos_512_v4
+36/194014/campos_512_v4
+36/194026/campos_512_v4
+36/194033/campos_512_v4
+36/194035/campos_512_v4
+36/194056/campos_512_v4
+36/194077/campos_512_v4
+36/194160/campos_512_v4
+36/194166/campos_512_v4
+36/194169/campos_512_v4
+36/194175/campos_512_v4
+36/194201/campos_512_v4
+36/194222/campos_512_v4
+36/194249/campos_512_v4
+36/194260/campos_512_v4
+36/194294/campos_512_v4
+36/194300/campos_512_v4
+36/194329/campos_512_v4
+36/194346/campos_512_v4
+36/194392/campos_512_v4
+36/194403/campos_512_v4
+36/194461/campos_512_v4
+36/194516/campos_512_v4
+36/194538/campos_512_v4
+36/194577/campos_512_v4
+36/194595/campos_512_v4
+36/194599/campos_512_v4
+36/194602/campos_512_v4
+36/194608/campos_512_v4
+36/194615/campos_512_v4
+36/194662/campos_512_v4
+36/194699/campos_512_v4
+36/194718/campos_512_v4
+36/194844/campos_512_v4
+36/194848/campos_512_v4
+36/194855/campos_512_v4
+36/194911/campos_512_v4
+36/194926/campos_512_v4
+36/194947/campos_512_v4
+37/195039/campos_512_v4
+37/195050/campos_512_v4
+37/195077/campos_512_v4
+37/195096/campos_512_v4
+37/195129/campos_512_v4
+37/195134/campos_512_v4
+37/195142/campos_512_v4
+37/195183/campos_512_v4
+37/195187/campos_512_v4
+37/195200/campos_512_v4
+37/195290/campos_512_v4
+37/195335/campos_512_v4
+37/195374/campos_512_v4
+37/195395/campos_512_v4
+37/195433/campos_512_v4
+37/195439/campos_512_v4
+37/195449/campos_512_v4
+37/195490/campos_512_v4
+37/195577/campos_512_v4
+37/195602/campos_512_v4
+37/195630/campos_512_v4
+37/195707/campos_512_v4
+37/195776/campos_512_v4
+37/195784/campos_512_v4
+37/195888/campos_512_v4
+37/195900/campos_512_v4
+37/195984/campos_512_v4
+37/196042/campos_512_v4
+37/196047/campos_512_v4
+37/196082/campos_512_v4
+37/196135/campos_512_v4
+37/196156/campos_512_v4
+37/196187/campos_512_v4
+37/196216/campos_512_v4
+37/196219/campos_512_v4
+37/196236/campos_512_v4
+37/196240/campos_512_v4
+37/196249/campos_512_v4
+37/196332/campos_512_v4
+37/196376/campos_512_v4
+37/196419/campos_512_v4
+37/196424/campos_512_v4
+37/196452/campos_512_v4
+37/196512/campos_512_v4
+37/196526/campos_512_v4
+37/196541/campos_512_v4
+37/196563/campos_512_v4
+37/196593/campos_512_v4
+37/196604/campos_512_v4
+37/196648/campos_512_v4
+37/196656/campos_512_v4
+37/196727/campos_512_v4
+37/196734/campos_512_v4
+37/196743/campos_512_v4
+37/196760/campos_512_v4
+37/196784/campos_512_v4
+37/196819/campos_512_v4
+37/196917/campos_512_v4
+37/197016/campos_512_v4
+37/197018/campos_512_v4
+37/197026/campos_512_v4
+37/197061/campos_512_v4
+37/197125/campos_512_v4
+37/197206/campos_512_v4
+37/197218/campos_512_v4
+37/197227/campos_512_v4
+37/197240/campos_512_v4
+37/197423/campos_512_v4
+37/197498/campos_512_v4
+37/197503/campos_512_v4
+37/197560/campos_512_v4
+37/197605/campos_512_v4
+37/197648/campos_512_v4
+37/197673/campos_512_v4
+37/197720/campos_512_v4
+37/197747/campos_512_v4
+37/197761/campos_512_v4
+37/197770/campos_512_v4
+37/197781/campos_512_v4
+37/197807/campos_512_v4
+37/197818/campos_512_v4
+37/197939/campos_512_v4
+37/197958/campos_512_v4
+37/197995/campos_512_v4
+37/198094/campos_512_v4
+37/198102/campos_512_v4
+37/198109/campos_512_v4
+37/198112/campos_512_v4
+37/198137/campos_512_v4
+37/198157/campos_512_v4
+37/198193/campos_512_v4
+37/198211/campos_512_v4
+37/198212/campos_512_v4
+37/198229/campos_512_v4
+37/198264/campos_512_v4
+37/198279/campos_512_v4
+37/198317/campos_512_v4
+37/198341/campos_512_v4
+37/198397/campos_512_v4
+37/198414/campos_512_v4
+37/198436/campos_512_v4
+37/198480/campos_512_v4
+37/198511/campos_512_v4
+37/198530/campos_512_v4
+37/198540/campos_512_v4
+37/198613/campos_512_v4
+37/198616/campos_512_v4
+37/198663/campos_512_v4
+37/198758/campos_512_v4
+37/198789/campos_512_v4
+37/198822/campos_512_v4
+37/198830/campos_512_v4
+37/198857/campos_512_v4
+37/198868/campos_512_v4
+37/198888/campos_512_v4
+37/198911/campos_512_v4
+37/198924/campos_512_v4
+37/198929/campos_512_v4
+37/198963/campos_512_v4
+37/198973/campos_512_v4
+37/198980/campos_512_v4
+37/199030/campos_512_v4
+37/199039/campos_512_v4
+37/199108/campos_512_v4
+37/199123/campos_512_v4
+37/199160/campos_512_v4
+37/199169/campos_512_v4
+37/199218/campos_512_v4
+37/199226/campos_512_v4
+37/199271/campos_512_v4
+37/199274/campos_512_v4
+37/199312/campos_512_v4
+37/199313/campos_512_v4
+37/199318/campos_512_v4
+37/199348/campos_512_v4
+37/199385/campos_512_v4
+37/199413/campos_512_v4
+37/199432/campos_512_v4
+37/199438/campos_512_v4
+37/199457/campos_512_v4
+37/199528/campos_512_v4
+37/199574/campos_512_v4
+37/199608/campos_512_v4
+37/199629/campos_512_v4
+37/199638/campos_512_v4
+37/199723/campos_512_v4
+37/199740/campos_512_v4
+37/199810/campos_512_v4
+37/199820/campos_512_v4
+37/199842/campos_512_v4
+37/199844/campos_512_v4
+37/199857/campos_512_v4
+37/199872/campos_512_v4
+37/199889/campos_512_v4
+37/199911/campos_512_v4
+37/199934/campos_512_v4
+37/199938/campos_512_v4
+37/199941/campos_512_v4
+38/200004/campos_512_v4
+38/200016/campos_512_v4
+38/200043/campos_512_v4
+38/200052/campos_512_v4
+38/200074/campos_512_v4
+38/200083/campos_512_v4
+38/200180/campos_512_v4
+38/200190/campos_512_v4
+38/200221/campos_512_v4
+38/200238/campos_512_v4
+38/200250/campos_512_v4
+38/200264/campos_512_v4
+38/200272/campos_512_v4
+38/200287/campos_512_v4
+38/200321/campos_512_v4
+38/200355/campos_512_v4
+38/200375/campos_512_v4
+38/200406/campos_512_v4
+38/200407/campos_512_v4
+38/200417/campos_512_v4
+38/200429/campos_512_v4
+38/200483/campos_512_v4
+38/200511/campos_512_v4
+38/200528/campos_512_v4
+38/200543/campos_512_v4
+38/200557/campos_512_v4
+38/200582/campos_512_v4
+38/200589/campos_512_v4
+38/200599/campos_512_v4
+38/200622/campos_512_v4
+38/200651/campos_512_v4
+38/200709/campos_512_v4
+38/200754/campos_512_v4
+38/200767/campos_512_v4
+38/200798/campos_512_v4
+38/200847/campos_512_v4
+38/200870/campos_512_v4
+38/200874/campos_512_v4
+38/200909/campos_512_v4
+38/200917/campos_512_v4
+38/200970/campos_512_v4
+38/200974/campos_512_v4
+38/200984/campos_512_v4
+38/201017/campos_512_v4
+38/201027/campos_512_v4
+38/201030/campos_512_v4
+38/201034/campos_512_v4
+38/201035/campos_512_v4
+38/201041/campos_512_v4
+38/201050/campos_512_v4
+38/201079/campos_512_v4
+38/201102/campos_512_v4
+38/201165/campos_512_v4
+38/201180/campos_512_v4
+38/201188/campos_512_v4
+38/201191/campos_512_v4
+38/201199/campos_512_v4
+38/201205/campos_512_v4
+38/201216/campos_512_v4
+38/201228/campos_512_v4
+38/201321/campos_512_v4
+38/201343/campos_512_v4
+38/201352/campos_512_v4
+38/201412/campos_512_v4
+38/201415/campos_512_v4
+38/201448/campos_512_v4
+38/201490/campos_512_v4
+38/201522/campos_512_v4
+38/201525/campos_512_v4
+38/201536/campos_512_v4
+38/201547/campos_512_v4
+38/201604/campos_512_v4
+38/201617/campos_512_v4
+38/201658/campos_512_v4
+38/201671/campos_512_v4
+38/201705/campos_512_v4
+38/201742/campos_512_v4
+38/201828/campos_512_v4
+38/201831/campos_512_v4
+38/201924/campos_512_v4
+38/201929/campos_512_v4
+38/201944/campos_512_v4
+38/201978/campos_512_v4
+38/201995/campos_512_v4
+38/201998/campos_512_v4
+38/202006/campos_512_v4
+38/202045/campos_512_v4
+38/202049/campos_512_v4
+38/202077/campos_512_v4
+38/202102/campos_512_v4
+38/202175/campos_512_v4
+38/202235/campos_512_v4
+38/202344/campos_512_v4
+38/202349/campos_512_v4
+38/202376/campos_512_v4
+38/202482/campos_512_v4
+38/202489/campos_512_v4
+38/202535/campos_512_v4
+38/202560/campos_512_v4
+38/202640/campos_512_v4
+38/202647/campos_512_v4
+38/202662/campos_512_v4
+38/202744/campos_512_v4
+38/202782/campos_512_v4
+38/202793/campos_512_v4
+38/202820/campos_512_v4
+38/202826/campos_512_v4
+38/202837/campos_512_v4
+38/202869/campos_512_v4
+38/202874/campos_512_v4
+38/202936/campos_512_v4
+38/202960/campos_512_v4
+38/203000/campos_512_v4
+38/203073/campos_512_v4
+38/203126/campos_512_v4
+38/203128/campos_512_v4
+38/203131/campos_512_v4
+38/203141/campos_512_v4
+38/203147/campos_512_v4
+38/203151/campos_512_v4
+38/203153/campos_512_v4
+38/203186/campos_512_v4
+38/203210/campos_512_v4
+38/203242/campos_512_v4
+38/203410/campos_512_v4
+38/203420/campos_512_v4
+38/203471/campos_512_v4
+38/203555/campos_512_v4
+38/203571/campos_512_v4
+38/203577/campos_512_v4
+38/203638/campos_512_v4
+38/203646/campos_512_v4
+38/203649/campos_512_v4
+38/203665/campos_512_v4
+38/203702/campos_512_v4
+38/203710/campos_512_v4
+38/203725/campos_512_v4
+38/203728/campos_512_v4
+38/203773/campos_512_v4
+38/203792/campos_512_v4
+38/203797/campos_512_v4
+38/203876/campos_512_v4
+38/203891/campos_512_v4
+38/203898/campos_512_v4
+38/203950/campos_512_v4
+38/203962/campos_512_v4
+38/203975/campos_512_v4
+38/203977/campos_512_v4
+38/204008/campos_512_v4
+38/204061/campos_512_v4
+38/204065/campos_512_v4
+38/204068/campos_512_v4
+38/204077/campos_512_v4
+38/204094/campos_512_v4
+38/204096/campos_512_v4
+38/204098/campos_512_v4
+38/204105/campos_512_v4
+38/204112/campos_512_v4
+38/204116/campos_512_v4
+38/204145/campos_512_v4
+38/204239/campos_512_v4
+38/204286/campos_512_v4
+38/204308/campos_512_v4
+38/204382/campos_512_v4
+38/204387/campos_512_v4
+38/204395/campos_512_v4
+38/204413/campos_512_v4
+38/204440/campos_512_v4
+38/204470/campos_512_v4
+38/204477/campos_512_v4
+38/204495/campos_512_v4
+38/204526/campos_512_v4
+38/204548/campos_512_v4
+38/204550/campos_512_v4
+38/204561/campos_512_v4
+38/204628/campos_512_v4
+38/204634/campos_512_v4
+38/204641/campos_512_v4
+38/204654/campos_512_v4
+38/204672/campos_512_v4
+38/204705/campos_512_v4
+38/204727/campos_512_v4
+38/204729/campos_512_v4
+38/204778/campos_512_v4
+38/204831/campos_512_v4
+38/204841/campos_512_v4
+38/204852/campos_512_v4
+38/204910/campos_512_v4
+38/204931/campos_512_v4
+38/204983/campos_512_v4
+38/204998/campos_512_v4
+4/30003/campos_512_v4
+4/30026/campos_512_v4
+4/30049/campos_512_v4
+4/30094/campos_512_v4
+4/30101/campos_512_v4
+4/30169/campos_512_v4
+4/30186/campos_512_v4
+4/30208/campos_512_v4
+4/30222/campos_512_v4
+4/30245/campos_512_v4
+4/30247/campos_512_v4
+4/30265/campos_512_v4
+4/30267/campos_512_v4
+4/30304/campos_512_v4
+4/30315/campos_512_v4
+4/30317/campos_512_v4
+4/30454/campos_512_v4
+4/30480/campos_512_v4
+4/30489/campos_512_v4
+4/30491/campos_512_v4
+4/30495/campos_512_v4
+4/30513/campos_512_v4
+4/30530/campos_512_v4
+4/30539/campos_512_v4
+4/30541/campos_512_v4
+4/30546/campos_512_v4
+4/30556/campos_512_v4
+4/30563/campos_512_v4
+4/30579/campos_512_v4
+4/30590/campos_512_v4
+4/30651/campos_512_v4
+4/30660/campos_512_v4
+4/30700/campos_512_v4
+4/30728/campos_512_v4
+4/30774/campos_512_v4
+4/30803/campos_512_v4
+4/30804/campos_512_v4
+4/30834/campos_512_v4
+4/30844/campos_512_v4
+4/30847/campos_512_v4
+4/30864/campos_512_v4
+4/30897/campos_512_v4
+4/30921/campos_512_v4
+4/30973/campos_512_v4
+4/30986/campos_512_v4
+4/31040/campos_512_v4
+4/31060/campos_512_v4
+4/31109/campos_512_v4
+4/31118/campos_512_v4
+4/31125/campos_512_v4
+4/31127/campos_512_v4
+4/31137/campos_512_v4
+4/31154/campos_512_v4
+4/31170/campos_512_v4
+4/31171/campos_512_v4
+4/31174/campos_512_v4
+4/31184/campos_512_v4
+4/31198/campos_512_v4
+4/31249/campos_512_v4
+4/31291/campos_512_v4
+4/31299/campos_512_v4
+4/31309/campos_512_v4
+4/31326/campos_512_v4
+4/31344/campos_512_v4
+4/31373/campos_512_v4
+4/31393/campos_512_v4
+4/31427/campos_512_v4
+4/31438/campos_512_v4
+4/31454/campos_512_v4
+4/31456/campos_512_v4
+4/31468/campos_512_v4
+4/31521/campos_512_v4
+4/31588/campos_512_v4
+4/31595/campos_512_v4
+4/31597/campos_512_v4
+4/31600/campos_512_v4
+4/31613/campos_512_v4
+4/31722/campos_512_v4
+4/31723/campos_512_v4
+4/31724/campos_512_v4
+4/31728/campos_512_v4
+4/31745/campos_512_v4
+4/31760/campos_512_v4
+4/31761/campos_512_v4
+4/31770/campos_512_v4
+4/31776/campos_512_v4
+4/31796/campos_512_v4
+4/31815/campos_512_v4
+4/31840/campos_512_v4
+4/31845/campos_512_v4
+4/31878/campos_512_v4
+4/31936/campos_512_v4
+4/31940/campos_512_v4
+4/31942/campos_512_v4
+4/31955/campos_512_v4
+4/31974/campos_512_v4
+4/32010/campos_512_v4
+4/32063/campos_512_v4
+4/32064/campos_512_v4
+4/32158/campos_512_v4
+4/32215/campos_512_v4
+4/32240/campos_512_v4
+4/32247/campos_512_v4
+4/32272/campos_512_v4
+4/32277/campos_512_v4
+4/32279/campos_512_v4
+4/32290/campos_512_v4
+4/32325/campos_512_v4
+4/32344/campos_512_v4
+4/32366/campos_512_v4
+4/32381/campos_512_v4
+4/32394/campos_512_v4
+4/32445/campos_512_v4
+4/32447/campos_512_v4
+4/32488/campos_512_v4
+4/32491/campos_512_v4
+4/32518/campos_512_v4
+4/32541/campos_512_v4
+4/32546/campos_512_v4
+4/32556/campos_512_v4
+4/32576/campos_512_v4
+4/32587/campos_512_v4
+4/32592/campos_512_v4
+4/32637/campos_512_v4
+4/32653/campos_512_v4
+4/32663/campos_512_v4
+4/32683/campos_512_v4
+4/32713/campos_512_v4
+4/32725/campos_512_v4
+4/32747/campos_512_v4
+4/32769/campos_512_v4
+4/32772/campos_512_v4
+4/32778/campos_512_v4
+4/32807/campos_512_v4
+4/32810/campos_512_v4
+4/32839/campos_512_v4
+4/32861/campos_512_v4
+4/32864/campos_512_v4
+4/32878/campos_512_v4
+4/32991/campos_512_v4
+4/33048/campos_512_v4
+4/33072/campos_512_v4
+4/33132/campos_512_v4
+4/33213/campos_512_v4
+4/33216/campos_512_v4
+4/33227/campos_512_v4
+4/33230/campos_512_v4
+4/33251/campos_512_v4
+4/33275/campos_512_v4
+4/33277/campos_512_v4
+4/33281/campos_512_v4
+4/33295/campos_512_v4
+4/33306/campos_512_v4
+4/33309/campos_512_v4
+4/33313/campos_512_v4
+4/33322/campos_512_v4
+4/33361/campos_512_v4
+4/33388/campos_512_v4
+4/33390/campos_512_v4
+4/33440/campos_512_v4
+4/33502/campos_512_v4
+4/33514/campos_512_v4
+4/33645/campos_512_v4
+4/33659/campos_512_v4
+4/33671/campos_512_v4
+4/33724/campos_512_v4
+4/33740/campos_512_v4
+4/33757/campos_512_v4
+4/33799/campos_512_v4
+4/33815/campos_512_v4
+4/33842/campos_512_v4
+4/33848/campos_512_v4
+4/33880/campos_512_v4
+4/33906/campos_512_v4
+4/33936/campos_512_v4
+4/33948/campos_512_v4
+4/33992/campos_512_v4
+4/34016/campos_512_v4
+4/34068/campos_512_v4
+4/34090/campos_512_v4
+4/34104/campos_512_v4
+4/34110/campos_512_v4
+4/34132/campos_512_v4
+4/34157/campos_512_v4
+4/34180/campos_512_v4
+4/34214/campos_512_v4
+4/34267/campos_512_v4
+4/34308/campos_512_v4
+4/34313/campos_512_v4
+4/34328/campos_512_v4
+4/34335/campos_512_v4
+4/34388/campos_512_v4
+4/34419/campos_512_v4
+4/34438/campos_512_v4
+4/34444/campos_512_v4
+4/34465/campos_512_v4
+4/34470/campos_512_v4
+4/34500/campos_512_v4
+4/34509/campos_512_v4
+4/34514/campos_512_v4
+4/34552/campos_512_v4
+4/34599/campos_512_v4
+4/34619/campos_512_v4
+4/34627/campos_512_v4
+4/34635/campos_512_v4
+4/34639/campos_512_v4
+4/34643/campos_512_v4
+4/34644/campos_512_v4
+4/34688/campos_512_v4
+4/34691/campos_512_v4
+4/34693/campos_512_v4
+4/34711/campos_512_v4
+4/34720/campos_512_v4
+4/34742/campos_512_v4
+4/34778/campos_512_v4
+4/34792/campos_512_v4
+4/34796/campos_512_v4
+4/34820/campos_512_v4
+4/34824/campos_512_v4
+4/34836/campos_512_v4
+4/34844/campos_512_v4
+4/34860/campos_512_v4
+4/34873/campos_512_v4
+4/34951/campos_512_v4
+4/34971/campos_512_v4
+4/34978/campos_512_v4
+4/34994/campos_512_v4
+40/210007/campos_512_v4
+40/210054/campos_512_v4
+40/210153/campos_512_v4
+40/210158/campos_512_v4
+40/210176/campos_512_v4
+40/210177/campos_512_v4
+40/210232/campos_512_v4
+40/210284/campos_512_v4
+40/210288/campos_512_v4
+40/210565/campos_512_v4
+40/210599/campos_512_v4
+40/210625/campos_512_v4
+40/210663/campos_512_v4
+40/210676/campos_512_v4
+40/210680/campos_512_v4
+40/210682/campos_512_v4
+40/210684/campos_512_v4
+40/210691/campos_512_v4
+40/210696/campos_512_v4
+40/210718/campos_512_v4
+40/210751/campos_512_v4
+40/210757/campos_512_v4
+40/210823/campos_512_v4
+40/210833/campos_512_v4
+40/210834/campos_512_v4
+40/210849/campos_512_v4
+40/210858/campos_512_v4
+40/210917/campos_512_v4
+40/210969/campos_512_v4
+40/211015/campos_512_v4
+40/211038/campos_512_v4
+40/211040/campos_512_v4
+40/211085/campos_512_v4
+40/211097/campos_512_v4
+40/211163/campos_512_v4
+40/211167/campos_512_v4
+40/211169/campos_512_v4
+40/211172/campos_512_v4
+40/211182/campos_512_v4
+40/211207/campos_512_v4
+40/211318/campos_512_v4
+40/211325/campos_512_v4
+40/211387/campos_512_v4
+40/211397/campos_512_v4
+40/211422/campos_512_v4
+40/211449/campos_512_v4
+40/211491/campos_512_v4
+40/211566/campos_512_v4
+40/211608/campos_512_v4
+40/211646/campos_512_v4
+40/211677/campos_512_v4
+40/211695/campos_512_v4
+40/211726/campos_512_v4
+40/211734/campos_512_v4
+40/211737/campos_512_v4
+40/211741/campos_512_v4
+40/211747/campos_512_v4
+40/211842/campos_512_v4
+40/211955/campos_512_v4
+40/211983/campos_512_v4
+40/211989/campos_512_v4
+40/212056/campos_512_v4
+40/212066/campos_512_v4
+40/212117/campos_512_v4
+40/212163/campos_512_v4
+40/212196/campos_512_v4
+40/212211/campos_512_v4
+40/212214/campos_512_v4
+40/212254/campos_512_v4
+40/212300/campos_512_v4
+40/212340/campos_512_v4
+40/212361/campos_512_v4
+40/212405/campos_512_v4
+40/212453/campos_512_v4
+40/212469/campos_512_v4
+40/212483/campos_512_v4
+40/212501/campos_512_v4
+40/212534/campos_512_v4
+40/212547/campos_512_v4
+40/212554/campos_512_v4
+40/212678/campos_512_v4
+40/212757/campos_512_v4
+40/212777/campos_512_v4
+40/212809/campos_512_v4
+40/212846/campos_512_v4
+40/212958/campos_512_v4
+40/212994/campos_512_v4
+40/213056/campos_512_v4
+40/213111/campos_512_v4
+40/213153/campos_512_v4
+40/213331/campos_512_v4
+40/213333/campos_512_v4
+40/213481/campos_512_v4
+40/213503/campos_512_v4
+40/213528/campos_512_v4
+40/213532/campos_512_v4
+40/213617/campos_512_v4
+40/213648/campos_512_v4
+40/213683/campos_512_v4
+40/213704/campos_512_v4
+40/213716/campos_512_v4
+40/213743/campos_512_v4
+40/213751/campos_512_v4
+40/213767/campos_512_v4
+40/213776/campos_512_v4
+40/213783/campos_512_v4
+40/213784/campos_512_v4
+40/213791/campos_512_v4
+40/213798/campos_512_v4
+40/213810/campos_512_v4
+40/213824/campos_512_v4
+40/213839/campos_512_v4
+40/213911/campos_512_v4
+40/213913/campos_512_v4
+40/213915/campos_512_v4
+40/213952/campos_512_v4
+40/213994/campos_512_v4
+40/214041/campos_512_v4
+40/214065/campos_512_v4
+40/214083/campos_512_v4
+40/214091/campos_512_v4
+40/214154/campos_512_v4
+40/214155/campos_512_v4
+40/214163/campos_512_v4
+40/214242/campos_512_v4
+40/214352/campos_512_v4
+40/214360/campos_512_v4
+40/214418/campos_512_v4
+40/214460/campos_512_v4
+40/214547/campos_512_v4
+40/214563/campos_512_v4
+40/214595/campos_512_v4
+40/214638/campos_512_v4
+40/214663/campos_512_v4
+40/214723/campos_512_v4
+40/214776/campos_512_v4
+40/214852/campos_512_v4
+40/214857/campos_512_v4
+40/214865/campos_512_v4
+40/214951/campos_512_v4
+41/215008/campos_512_v4
+41/215105/campos_512_v4
+41/215166/campos_512_v4
+41/215206/campos_512_v4
+41/215213/campos_512_v4
+41/215228/campos_512_v4
+41/215231/campos_512_v4
+41/215276/campos_512_v4
+41/215331/campos_512_v4
+41/215395/campos_512_v4
+41/215416/campos_512_v4
+41/215479/campos_512_v4
+41/215506/campos_512_v4
+41/215534/campos_512_v4
+41/215569/campos_512_v4
+41/215617/campos_512_v4
+41/215655/campos_512_v4
+41/215660/campos_512_v4
+41/215670/campos_512_v4
+41/215688/campos_512_v4
+41/215698/campos_512_v4
+41/215713/campos_512_v4
+41/215724/campos_512_v4
+41/215734/campos_512_v4
+41/215753/campos_512_v4
+41/215754/campos_512_v4
+41/215761/campos_512_v4
+41/215823/campos_512_v4
+41/215884/campos_512_v4
+41/215927/campos_512_v4
+41/215993/campos_512_v4
+41/216012/campos_512_v4
+41/216014/campos_512_v4
+41/216019/campos_512_v4
+41/216029/campos_512_v4
+41/216040/campos_512_v4
+41/216124/campos_512_v4
+41/216139/campos_512_v4
+41/216141/campos_512_v4
+41/216172/campos_512_v4
+41/216208/campos_512_v4
+41/216240/campos_512_v4
+41/216245/campos_512_v4
+41/216307/campos_512_v4
+41/216345/campos_512_v4
+41/216363/campos_512_v4
+41/216453/campos_512_v4
+41/216467/campos_512_v4
+41/216503/campos_512_v4
+41/216527/campos_512_v4
+41/216535/campos_512_v4
+41/216590/campos_512_v4
+41/216620/campos_512_v4
+41/216658/campos_512_v4
+41/216663/campos_512_v4
+41/216676/campos_512_v4
+41/216694/campos_512_v4
+41/216700/campos_512_v4
+41/216717/campos_512_v4
+41/216754/campos_512_v4
+41/216850/campos_512_v4
+41/216876/campos_512_v4
+41/216882/campos_512_v4
+41/217004/campos_512_v4
+41/217030/campos_512_v4
+41/217091/campos_512_v4
+41/217092/campos_512_v4
+41/217120/campos_512_v4
+41/217146/campos_512_v4
+41/217154/campos_512_v4
+41/217183/campos_512_v4
+41/217199/campos_512_v4
+41/217203/campos_512_v4
+41/217218/campos_512_v4
+41/217277/campos_512_v4
+41/217383/campos_512_v4
+41/217436/campos_512_v4
+41/217443/campos_512_v4
+41/217453/campos_512_v4
+41/217466/campos_512_v4
+41/217508/campos_512_v4
+41/217509/campos_512_v4
+41/217522/campos_512_v4
+41/217527/campos_512_v4
+41/217531/campos_512_v4
+41/217545/campos_512_v4
+41/217548/campos_512_v4
+41/217628/campos_512_v4
+41/217663/campos_512_v4
+41/217687/campos_512_v4
+41/217814/campos_512_v4
+41/217816/campos_512_v4
+41/217836/campos_512_v4
+41/217839/campos_512_v4
+41/217844/campos_512_v4
+41/217860/campos_512_v4
+41/217876/campos_512_v4
+41/217889/campos_512_v4
+41/217982/campos_512_v4
+41/217989/campos_512_v4
+41/218032/campos_512_v4
+41/218047/campos_512_v4
+41/218057/campos_512_v4
+41/218087/campos_512_v4
+41/218088/campos_512_v4
+41/218158/campos_512_v4
+41/218203/campos_512_v4
+41/218241/campos_512_v4
+41/218242/campos_512_v4
+41/218287/campos_512_v4
+41/218303/campos_512_v4
+41/218326/campos_512_v4
+41/218333/campos_512_v4
+41/218361/campos_512_v4
+41/218375/campos_512_v4
+41/218381/campos_512_v4
+41/218392/campos_512_v4
+41/218436/campos_512_v4
+41/218455/campos_512_v4
+41/218457/campos_512_v4
+41/218461/campos_512_v4
+41/218475/campos_512_v4
+41/218481/campos_512_v4
+41/218527/campos_512_v4
+41/218558/campos_512_v4
+41/218606/campos_512_v4
+41/218607/campos_512_v4
+41/218644/campos_512_v4
+41/218671/campos_512_v4
+41/218703/campos_512_v4
+41/218726/campos_512_v4
+41/218732/campos_512_v4
+41/218747/campos_512_v4
+41/218768/campos_512_v4
+41/218783/campos_512_v4
+41/218793/campos_512_v4
+41/218804/campos_512_v4
+41/218816/campos_512_v4
+41/218880/campos_512_v4
+41/218899/campos_512_v4
+41/218915/campos_512_v4
+41/218967/campos_512_v4
+41/219021/campos_512_v4
+41/219057/campos_512_v4
+41/219088/campos_512_v4
+41/219168/campos_512_v4
+41/219186/campos_512_v4
+41/219274/campos_512_v4
+41/219283/campos_512_v4
+41/219351/campos_512_v4
+41/219377/campos_512_v4
+41/219389/campos_512_v4
+41/219415/campos_512_v4
+41/219438/campos_512_v4
+41/219622/campos_512_v4
+41/219626/campos_512_v4
+41/219662/campos_512_v4
+41/219705/campos_512_v4
+41/219730/campos_512_v4
+41/219852/campos_512_v4
+41/219858/campos_512_v4
+41/219892/campos_512_v4
+41/219898/campos_512_v4
+41/219946/campos_512_v4
+41/219947/campos_512_v4
+41/219962/campos_512_v4
+41/219966/campos_512_v4
+41/219974/campos_512_v4
+42/220026/campos_512_v4
+42/220099/campos_512_v4
+42/220125/campos_512_v4
+42/220184/campos_512_v4
+42/220201/campos_512_v4
+42/220217/campos_512_v4
+42/220220/campos_512_v4
+42/220265/campos_512_v4
+42/220272/campos_512_v4
+42/220279/campos_512_v4
+42/220342/campos_512_v4
+42/220433/campos_512_v4
+42/220484/campos_512_v4
+42/220485/campos_512_v4
+42/220567/campos_512_v4
+42/220604/campos_512_v4
+42/220654/campos_512_v4
+42/220730/campos_512_v4
+42/220773/campos_512_v4
+42/220832/campos_512_v4
+42/220901/campos_512_v4
+42/220920/campos_512_v4
+42/220922/campos_512_v4
+42/220929/campos_512_v4
+42/220943/campos_512_v4
+42/220986/campos_512_v4
+42/220995/campos_512_v4
+42/221005/campos_512_v4
+42/221019/campos_512_v4
+42/221020/campos_512_v4
+42/221076/campos_512_v4
+42/221091/campos_512_v4
+42/221105/campos_512_v4
+42/221107/campos_512_v4
+42/221177/campos_512_v4
+42/221279/campos_512_v4
+42/221298/campos_512_v4
+42/221333/campos_512_v4
+42/221354/campos_512_v4
+42/221366/campos_512_v4
+42/221405/campos_512_v4
+42/221448/campos_512_v4
+42/221470/campos_512_v4
+42/221481/campos_512_v4
+42/221487/campos_512_v4
+42/221493/campos_512_v4
+42/221521/campos_512_v4
+42/221609/campos_512_v4
+42/221620/campos_512_v4
+42/221634/campos_512_v4
+42/221678/campos_512_v4
+42/221686/campos_512_v4
+42/221702/campos_512_v4
+42/221737/campos_512_v4
+42/221761/campos_512_v4
+42/221860/campos_512_v4
+42/221983/campos_512_v4
+42/221991/campos_512_v4
+42/222004/campos_512_v4
+42/222009/campos_512_v4
+42/222054/campos_512_v4
+42/222057/campos_512_v4
+42/222090/campos_512_v4
+42/222091/campos_512_v4
+42/222094/campos_512_v4
+42/222102/campos_512_v4
+42/222197/campos_512_v4
+42/222207/campos_512_v4
+42/222229/campos_512_v4
+42/222269/campos_512_v4
+42/222299/campos_512_v4
+42/222310/campos_512_v4
+42/222358/campos_512_v4
+42/222372/campos_512_v4
+42/222410/campos_512_v4
+42/222507/campos_512_v4
+42/222516/campos_512_v4
+42/222518/campos_512_v4
+42/222524/campos_512_v4
+42/222543/campos_512_v4
+42/222568/campos_512_v4
+42/222578/campos_512_v4
+42/222604/campos_512_v4
+42/222616/campos_512_v4
+42/222661/campos_512_v4
+42/222665/campos_512_v4
+42/222696/campos_512_v4
+42/222707/campos_512_v4
+42/222718/campos_512_v4
+42/222750/campos_512_v4
+42/222758/campos_512_v4
+42/222812/campos_512_v4
+42/222838/campos_512_v4
+42/222879/campos_512_v4
+42/222913/campos_512_v4
+42/222915/campos_512_v4
+42/223005/campos_512_v4
+42/223154/campos_512_v4
+42/223160/campos_512_v4
+42/223223/campos_512_v4
+42/223231/campos_512_v4
+42/223234/campos_512_v4
+42/223297/campos_512_v4
+42/223308/campos_512_v4
+42/223318/campos_512_v4
+42/223356/campos_512_v4
+42/223383/campos_512_v4
+42/223432/campos_512_v4
+42/223433/campos_512_v4
+42/223468/campos_512_v4
+42/223487/campos_512_v4
+42/223492/campos_512_v4
+42/223516/campos_512_v4
+42/223560/campos_512_v4
+42/223565/campos_512_v4
+42/223719/campos_512_v4
+42/223791/campos_512_v4
+42/223796/campos_512_v4
+42/223806/campos_512_v4
+42/223833/campos_512_v4
+42/223864/campos_512_v4
+42/223980/campos_512_v4
+42/223985/campos_512_v4
+42/223988/campos_512_v4
+42/224002/campos_512_v4
+42/224033/campos_512_v4
+42/224054/campos_512_v4
+42/224056/campos_512_v4
+42/224066/campos_512_v4
+42/224091/campos_512_v4
+42/224094/campos_512_v4
+42/224124/campos_512_v4
+42/224147/campos_512_v4
+42/224217/campos_512_v4
+42/224294/campos_512_v4
+42/224312/campos_512_v4
+42/224313/campos_512_v4
+42/224327/campos_512_v4
+42/224341/campos_512_v4
+42/224358/campos_512_v4
+42/224468/campos_512_v4
+42/224507/campos_512_v4
+42/224527/campos_512_v4
+42/224548/campos_512_v4
+42/224564/campos_512_v4
+42/224571/campos_512_v4
+42/224574/campos_512_v4
+42/224618/campos_512_v4
+42/224630/campos_512_v4
+42/224642/campos_512_v4
+42/224673/campos_512_v4
+42/224734/campos_512_v4
+42/224751/campos_512_v4
+42/224831/campos_512_v4
+42/224852/campos_512_v4
+42/224910/campos_512_v4
+43/225003/campos_512_v4
+43/225032/campos_512_v4
+43/225044/campos_512_v4
+43/225045/campos_512_v4
+43/225059/campos_512_v4
+43/225070/campos_512_v4
+43/225163/campos_512_v4
+43/225227/campos_512_v4
+43/225327/campos_512_v4
+43/225348/campos_512_v4
+43/225350/campos_512_v4
+43/225351/campos_512_v4
+43/225353/campos_512_v4
+43/225392/campos_512_v4
+43/225395/campos_512_v4
+43/225410/campos_512_v4
+43/225424/campos_512_v4
+43/225429/campos_512_v4
+43/225437/campos_512_v4
+43/225485/campos_512_v4
+43/225506/campos_512_v4
+43/225551/campos_512_v4
+43/225554/campos_512_v4
+43/225586/campos_512_v4
+43/225662/campos_512_v4
+43/225675/campos_512_v4
+43/225738/campos_512_v4
+43/225765/campos_512_v4
+43/225840/campos_512_v4
+43/225844/campos_512_v4
+43/225858/campos_512_v4
+43/225942/campos_512_v4
+43/226002/campos_512_v4
+43/226016/campos_512_v4
+43/226028/campos_512_v4
+43/226033/campos_512_v4
+43/226046/campos_512_v4
+43/226062/campos_512_v4
+43/226067/campos_512_v4
+43/226072/campos_512_v4
+43/226152/campos_512_v4
+43/226161/campos_512_v4
+43/226173/campos_512_v4
+43/226242/campos_512_v4
+43/226243/campos_512_v4
+43/226245/campos_512_v4
+43/226279/campos_512_v4
+43/226289/campos_512_v4
+43/226348/campos_512_v4
+43/226369/campos_512_v4
+43/226380/campos_512_v4
+43/226393/campos_512_v4
+43/226417/campos_512_v4
+43/226455/campos_512_v4
+43/226585/campos_512_v4
+43/226614/campos_512_v4
+43/226619/campos_512_v4
+43/226692/campos_512_v4
+43/226693/campos_512_v4
+43/226727/campos_512_v4
+43/226745/campos_512_v4
+43/226827/campos_512_v4
+43/226841/campos_512_v4
+43/227015/campos_512_v4
+43/227050/campos_512_v4
+43/227056/campos_512_v4
+43/227120/campos_512_v4
+43/227181/campos_512_v4
+43/227186/campos_512_v4
+43/227190/campos_512_v4
+43/227244/campos_512_v4
+43/227269/campos_512_v4
+43/227271/campos_512_v4
+43/227281/campos_512_v4
+43/227283/campos_512_v4
+43/227286/campos_512_v4
+43/227295/campos_512_v4
+43/227334/campos_512_v4
+43/227401/campos_512_v4
+43/227420/campos_512_v4
+43/227454/campos_512_v4
+43/227505/campos_512_v4
+43/227526/campos_512_v4
+43/227609/campos_512_v4
+43/227635/campos_512_v4
+43/227671/campos_512_v4
+43/227683/campos_512_v4
+43/227746/campos_512_v4
+43/227827/campos_512_v4
+43/227833/campos_512_v4
+43/227906/campos_512_v4
+43/227956/campos_512_v4
+43/227970/campos_512_v4
+43/228024/campos_512_v4
+43/228026/campos_512_v4
+43/228036/campos_512_v4
+43/228051/campos_512_v4
+43/228060/campos_512_v4
+43/228072/campos_512_v4
+43/228085/campos_512_v4
+43/228096/campos_512_v4
+43/228108/campos_512_v4
+43/228138/campos_512_v4
+43/228203/campos_512_v4
+43/228211/campos_512_v4
+43/228236/campos_512_v4
+43/228254/campos_512_v4
+43/228257/campos_512_v4
+43/228266/campos_512_v4
+43/228300/campos_512_v4
+43/228318/campos_512_v4
+43/228325/campos_512_v4
+43/228329/campos_512_v4
+43/228351/campos_512_v4
+43/228378/campos_512_v4
+43/228388/campos_512_v4
+43/228404/campos_512_v4
+43/228416/campos_512_v4
+43/228432/campos_512_v4
+43/228454/campos_512_v4
+43/228482/campos_512_v4
+43/228557/campos_512_v4
+43/228614/campos_512_v4
+43/228674/campos_512_v4
+43/228677/campos_512_v4
+43/228693/campos_512_v4
+43/228700/campos_512_v4
+43/228707/campos_512_v4
+43/228717/campos_512_v4
+43/228754/campos_512_v4
+43/228771/campos_512_v4
+43/228777/campos_512_v4
+43/228789/campos_512_v4
+43/228873/campos_512_v4
+43/228885/campos_512_v4
+43/228889/campos_512_v4
+43/228900/campos_512_v4
+43/228918/campos_512_v4
+43/228945/campos_512_v4
+43/228957/campos_512_v4
+43/229045/campos_512_v4
+43/229052/campos_512_v4
+43/229061/campos_512_v4
+43/229079/campos_512_v4
+43/229114/campos_512_v4
+43/229119/campos_512_v4
+43/229139/campos_512_v4
+43/229157/campos_512_v4
+43/229183/campos_512_v4
+43/229195/campos_512_v4
+43/229261/campos_512_v4
+43/229339/campos_512_v4
+43/229385/campos_512_v4
+43/229413/campos_512_v4
+43/229433/campos_512_v4
+43/229442/campos_512_v4
+43/229451/campos_512_v4
+43/229581/campos_512_v4
+43/229583/campos_512_v4
+43/229591/campos_512_v4
+43/229623/campos_512_v4
+43/229631/campos_512_v4
+43/229641/campos_512_v4
+43/229652/campos_512_v4
+43/229653/campos_512_v4
+43/229656/campos_512_v4
+43/229664/campos_512_v4
+43/229694/campos_512_v4
+43/229787/campos_512_v4
+43/229873/campos_512_v4
+43/229881/campos_512_v4
+43/229885/campos_512_v4
+43/229910/campos_512_v4
+43/229961/campos_512_v4
+43/229976/campos_512_v4
+43/229979/campos_512_v4
+44/230026/campos_512_v4
+44/230047/campos_512_v4
+44/230056/campos_512_v4
+44/230091/campos_512_v4
+44/230111/campos_512_v4
+44/230157/campos_512_v4
+44/230159/campos_512_v4
+44/230181/campos_512_v4
+44/230202/campos_512_v4
+44/230207/campos_512_v4
+44/230317/campos_512_v4
+44/230327/campos_512_v4
+44/230368/campos_512_v4
+44/230461/campos_512_v4
+44/230471/campos_512_v4
+44/230590/campos_512_v4
+44/230617/campos_512_v4
+44/230632/campos_512_v4
+44/230672/campos_512_v4
+44/230700/campos_512_v4
+44/230769/campos_512_v4
+44/230803/campos_512_v4
+44/230826/campos_512_v4
+44/230919/campos_512_v4
+44/230935/campos_512_v4
+44/230936/campos_512_v4
+44/230971/campos_512_v4
+44/231026/campos_512_v4
+44/231038/campos_512_v4
+44/231163/campos_512_v4
+44/231195/campos_512_v4
+44/231210/campos_512_v4
+44/231296/campos_512_v4
+44/231300/campos_512_v4
+44/231302/campos_512_v4
+44/231309/campos_512_v4
+44/231311/campos_512_v4
+44/231339/campos_512_v4
+44/231358/campos_512_v4
+44/231371/campos_512_v4
+44/231387/campos_512_v4
+44/231410/campos_512_v4
+44/231432/campos_512_v4
+44/231488/campos_512_v4
+44/231609/campos_512_v4
+44/231673/campos_512_v4
+44/231688/campos_512_v4
+44/231798/campos_512_v4
+44/231817/campos_512_v4
+44/231868/campos_512_v4
+44/231875/campos_512_v4
+44/231907/campos_512_v4
+44/231927/campos_512_v4
+44/231933/campos_512_v4
+44/231954/campos_512_v4
+44/232028/campos_512_v4
+44/232046/campos_512_v4
+44/232071/campos_512_v4
+44/232089/campos_512_v4
+44/232106/campos_512_v4
+44/232112/campos_512_v4
+44/232126/campos_512_v4
+44/232138/campos_512_v4
+44/232172/campos_512_v4
+44/232211/campos_512_v4
+44/232230/campos_512_v4
+44/232271/campos_512_v4
+44/232306/campos_512_v4
+44/232349/campos_512_v4
+44/232350/campos_512_v4
+44/232386/campos_512_v4
+44/232391/campos_512_v4
+44/232423/campos_512_v4
+44/232424/campos_512_v4
+44/232438/campos_512_v4
+44/232462/campos_512_v4
+44/232478/campos_512_v4
+44/232515/campos_512_v4
+44/232533/campos_512_v4
+44/232711/campos_512_v4
+44/232726/campos_512_v4
+44/232727/campos_512_v4
+44/232735/campos_512_v4
+44/232742/campos_512_v4
+44/232765/campos_512_v4
+44/232870/campos_512_v4
+44/232883/campos_512_v4
+44/232927/campos_512_v4
+44/232954/campos_512_v4
+44/232974/campos_512_v4
+44/233053/campos_512_v4
+44/233088/campos_512_v4
+44/233092/campos_512_v4
+44/233104/campos_512_v4
+44/233141/campos_512_v4
+44/233153/campos_512_v4
+44/233175/campos_512_v4
+44/233202/campos_512_v4
+44/233214/campos_512_v4
+44/233218/campos_512_v4
+44/233294/campos_512_v4
+44/233320/campos_512_v4
+44/233385/campos_512_v4
+44/233401/campos_512_v4
+44/233404/campos_512_v4
+44/233460/campos_512_v4
+44/233468/campos_512_v4
+44/233483/campos_512_v4
+44/233503/campos_512_v4
+44/233553/campos_512_v4
+44/233570/campos_512_v4
+44/233616/campos_512_v4
+44/233621/campos_512_v4
+44/233645/campos_512_v4
+44/233666/campos_512_v4
+44/233686/campos_512_v4
+44/233752/campos_512_v4
+44/233759/campos_512_v4
+44/233805/campos_512_v4
+44/233910/campos_512_v4
+44/233987/campos_512_v4
+44/233992/campos_512_v4
+44/234018/campos_512_v4
+44/234033/campos_512_v4
+44/234037/campos_512_v4
+44/234038/campos_512_v4
+44/234039/campos_512_v4
+44/234044/campos_512_v4
+44/234069/campos_512_v4
+44/234115/campos_512_v4
+44/234121/campos_512_v4
+44/234138/campos_512_v4
+44/234169/campos_512_v4
+44/234179/campos_512_v4
+44/234192/campos_512_v4
+44/234193/campos_512_v4
+44/234204/campos_512_v4
+44/234308/campos_512_v4
+44/234559/campos_512_v4
+44/234571/campos_512_v4
+44/234660/campos_512_v4
+44/234785/campos_512_v4
+44/234796/campos_512_v4
+44/234837/campos_512_v4
+44/234844/campos_512_v4
+44/234850/campos_512_v4
+44/234885/campos_512_v4
+44/234896/campos_512_v4
+44/234907/campos_512_v4
+44/234917/campos_512_v4
+44/234930/campos_512_v4
+44/234957/campos_512_v4
+44/234967/campos_512_v4
+44/234985/campos_512_v4
+45/235075/campos_512_v4
+45/235079/campos_512_v4
+45/235126/campos_512_v4
+45/235130/campos_512_v4
+45/235202/campos_512_v4
+45/235214/campos_512_v4
+45/235340/campos_512_v4
+45/235397/campos_512_v4
+45/235517/campos_512_v4
+45/235550/campos_512_v4
+45/235587/campos_512_v4
+45/235591/campos_512_v4
+45/235602/campos_512_v4
+45/235668/campos_512_v4
+45/235669/campos_512_v4
+45/235809/campos_512_v4
+45/235817/campos_512_v4
+45/235855/campos_512_v4
+45/235944/campos_512_v4
+45/235948/campos_512_v4
+45/235950/campos_512_v4
+45/235964/campos_512_v4
+45/235974/campos_512_v4
+45/236013/campos_512_v4
+45/236119/campos_512_v4
+45/236132/campos_512_v4
+45/236264/campos_512_v4
+45/236285/campos_512_v4
+45/236306/campos_512_v4
+45/236348/campos_512_v4
+45/236373/campos_512_v4
+45/236417/campos_512_v4
+45/236423/campos_512_v4
+45/236425/campos_512_v4
+45/236540/campos_512_v4
+45/236591/campos_512_v4
+45/236593/campos_512_v4
+45/236603/campos_512_v4
+45/236634/campos_512_v4
+45/236663/campos_512_v4
+45/236747/campos_512_v4
+45/236755/campos_512_v4
+45/236874/campos_512_v4
+45/236907/campos_512_v4
+45/236932/campos_512_v4
+45/236934/campos_512_v4
+45/236955/campos_512_v4
+45/236967/campos_512_v4
+45/236972/campos_512_v4
+45/236981/campos_512_v4
+45/236990/campos_512_v4
+45/237179/campos_512_v4
+45/237194/campos_512_v4
+45/237221/campos_512_v4
+45/237225/campos_512_v4
+45/237244/campos_512_v4
+45/237284/campos_512_v4
+45/237313/campos_512_v4
+45/237330/campos_512_v4
+45/237350/campos_512_v4
+45/237356/campos_512_v4
+45/237401/campos_512_v4
+45/237423/campos_512_v4
+45/237436/campos_512_v4
+45/237446/campos_512_v4
+45/237481/campos_512_v4
+45/237492/campos_512_v4
+45/237498/campos_512_v4
+45/237503/campos_512_v4
+45/237511/campos_512_v4
+45/237553/campos_512_v4
+45/237632/campos_512_v4
+45/237702/campos_512_v4
+45/237720/campos_512_v4
+45/237757/campos_512_v4
+45/237782/campos_512_v4
+45/237841/campos_512_v4
+45/237877/campos_512_v4
+45/237892/campos_512_v4
+45/237913/campos_512_v4
+45/237914/campos_512_v4
+45/237919/campos_512_v4
+45/237946/campos_512_v4
+45/237990/campos_512_v4
+45/238000/campos_512_v4
+45/238003/campos_512_v4
+45/238006/campos_512_v4
+45/238052/campos_512_v4
+45/238089/campos_512_v4
+45/238093/campos_512_v4
+45/238103/campos_512_v4
+45/238119/campos_512_v4
+45/238134/campos_512_v4
+45/238174/campos_512_v4
+45/238179/campos_512_v4
+45/238243/campos_512_v4
+45/238301/campos_512_v4
+45/238349/campos_512_v4
+45/238374/campos_512_v4
+45/238379/campos_512_v4
+45/238389/campos_512_v4
+45/238415/campos_512_v4
+45/238416/campos_512_v4
+45/238427/campos_512_v4
+45/238459/campos_512_v4
+45/238489/campos_512_v4
+45/238574/campos_512_v4
+45/238631/campos_512_v4
+45/238676/campos_512_v4
+45/238690/campos_512_v4
+45/238702/campos_512_v4
+45/238711/campos_512_v4
+45/238744/campos_512_v4
+45/238811/campos_512_v4
+45/238832/campos_512_v4
+45/238893/campos_512_v4
+45/238895/campos_512_v4
+45/238917/campos_512_v4
+45/238921/campos_512_v4
+45/238931/campos_512_v4
+45/238932/campos_512_v4
+45/238979/campos_512_v4
+45/238982/campos_512_v4
+45/239082/campos_512_v4
+45/239117/campos_512_v4
+45/239118/campos_512_v4
+45/239205/campos_512_v4
+45/239224/campos_512_v4
+45/239236/campos_512_v4
+45/239238/campos_512_v4
+45/239267/campos_512_v4
+45/239407/campos_512_v4
+45/239445/campos_512_v4
+45/239464/campos_512_v4
+45/239480/campos_512_v4
+45/239524/campos_512_v4
+45/239554/campos_512_v4
+45/239577/campos_512_v4
+45/239600/campos_512_v4
+45/239626/campos_512_v4
+45/239637/campos_512_v4
+45/239667/campos_512_v4
+45/239689/campos_512_v4
+45/239690/campos_512_v4
+45/239704/campos_512_v4
+45/239752/campos_512_v4
+45/239800/campos_512_v4
+45/239855/campos_512_v4
+45/239958/campos_512_v4
+45/239962/campos_512_v4
+45/239977/campos_512_v4
+45/239998/campos_512_v4
+46/240010/campos_512_v4
+46/240033/campos_512_v4
+46/240054/campos_512_v4
+46/240127/campos_512_v4
+46/240160/campos_512_v4
+46/240165/campos_512_v4
+46/240179/campos_512_v4
+46/240214/campos_512_v4
+46/240228/campos_512_v4
+46/240266/campos_512_v4
+46/240296/campos_512_v4
+46/240383/campos_512_v4
+46/240387/campos_512_v4
+46/240467/campos_512_v4
+46/240514/campos_512_v4
+46/240545/campos_512_v4
+46/240601/campos_512_v4
+46/240627/campos_512_v4
+46/240640/campos_512_v4
+46/240664/campos_512_v4
+46/240675/campos_512_v4
+46/240694/campos_512_v4
+46/240738/campos_512_v4
+46/240741/campos_512_v4
+46/240775/campos_512_v4
+46/240811/campos_512_v4
+46/240884/campos_512_v4
+46/240987/campos_512_v4
+46/241026/campos_512_v4
+46/241078/campos_512_v4
+46/241102/campos_512_v4
+46/241132/campos_512_v4
+46/241140/campos_512_v4
+46/241167/campos_512_v4
+46/241233/campos_512_v4
+46/241343/campos_512_v4
+46/241416/campos_512_v4
+46/241427/campos_512_v4
+46/241454/campos_512_v4
+46/241455/campos_512_v4
+46/241500/campos_512_v4
+46/241506/campos_512_v4
+46/241582/campos_512_v4
+46/241598/campos_512_v4
+46/241632/campos_512_v4
+46/241659/campos_512_v4
+46/241683/campos_512_v4
+46/241769/campos_512_v4
+46/241777/campos_512_v4
+46/241780/campos_512_v4
+46/241811/campos_512_v4
+46/241819/campos_512_v4
+46/241911/campos_512_v4
+46/241952/campos_512_v4
+46/241967/campos_512_v4
+46/241995/campos_512_v4
+46/242003/campos_512_v4
+46/242039/campos_512_v4
+46/242082/campos_512_v4
+46/242085/campos_512_v4
+46/242094/campos_512_v4
+46/242125/campos_512_v4
+46/242133/campos_512_v4
+46/242144/campos_512_v4
+46/242155/campos_512_v4
+46/242160/campos_512_v4
+46/242164/campos_512_v4
+46/242271/campos_512_v4
+46/242302/campos_512_v4
+46/242303/campos_512_v4
+46/242306/campos_512_v4
+46/242356/campos_512_v4
+46/242363/campos_512_v4
+46/242395/campos_512_v4
+46/242445/campos_512_v4
+46/242449/campos_512_v4
+46/242459/campos_512_v4
+46/242531/campos_512_v4
+46/242545/campos_512_v4
+46/242587/campos_512_v4
+46/242607/campos_512_v4
+46/242630/campos_512_v4
+46/242659/campos_512_v4
+46/242675/campos_512_v4
+46/242722/campos_512_v4
+46/242727/campos_512_v4
+46/242767/campos_512_v4
+46/242771/campos_512_v4
+46/242812/campos_512_v4
+46/242816/campos_512_v4
+46/242818/campos_512_v4
+46/242864/campos_512_v4
+46/242868/campos_512_v4
+46/242874/campos_512_v4
+46/242929/campos_512_v4
+46/242945/campos_512_v4
+46/242953/campos_512_v4
+46/242965/campos_512_v4
+46/243034/campos_512_v4
+46/243036/campos_512_v4
+46/243072/campos_512_v4
+46/243093/campos_512_v4
+46/243106/campos_512_v4
+46/243158/campos_512_v4
+46/243165/campos_512_v4
+46/243221/campos_512_v4
+46/243267/campos_512_v4
+46/243289/campos_512_v4
+46/243295/campos_512_v4
+46/243303/campos_512_v4
+46/243323/campos_512_v4
+46/243363/campos_512_v4
+46/243488/campos_512_v4
+46/243524/campos_512_v4
+46/243587/campos_512_v4
+46/243590/campos_512_v4
+46/243604/campos_512_v4
+46/243632/campos_512_v4
+46/243651/campos_512_v4
+46/243661/campos_512_v4
+46/243716/campos_512_v4
+46/243776/campos_512_v4
+46/243791/campos_512_v4
+46/243812/campos_512_v4
+46/243834/campos_512_v4
+46/244005/campos_512_v4
+46/244091/campos_512_v4
+46/244116/campos_512_v4
+46/244117/campos_512_v4
+46/244167/campos_512_v4
+46/244177/campos_512_v4
+46/244182/campos_512_v4
+46/244276/campos_512_v4
+46/244320/campos_512_v4
+46/244345/campos_512_v4
+46/244351/campos_512_v4
+46/244385/campos_512_v4
+46/244423/campos_512_v4
+46/244499/campos_512_v4
+46/244519/campos_512_v4
+46/244525/campos_512_v4
+46/244529/campos_512_v4
+46/244563/campos_512_v4
+46/244611/campos_512_v4
+46/244666/campos_512_v4
+46/244687/campos_512_v4
+46/244721/campos_512_v4
+46/244748/campos_512_v4
+46/244751/campos_512_v4
+46/244756/campos_512_v4
+46/244761/campos_512_v4
+46/244771/campos_512_v4
+46/244894/campos_512_v4
+46/244910/campos_512_v4
+47/245011/campos_512_v4
+47/245052/campos_512_v4
+47/245085/campos_512_v4
+47/245106/campos_512_v4
+47/245123/campos_512_v4
+47/245159/campos_512_v4
+47/245211/campos_512_v4
+47/245249/campos_512_v4
+47/245253/campos_512_v4
+47/245261/campos_512_v4
+47/245290/campos_512_v4
+47/245307/campos_512_v4
+47/245328/campos_512_v4
+47/245347/campos_512_v4
+47/245350/campos_512_v4
+47/245380/campos_512_v4
+47/245386/campos_512_v4
+47/245392/campos_512_v4
+47/245438/campos_512_v4
+47/245443/campos_512_v4
+47/245509/campos_512_v4
+47/245548/campos_512_v4
+47/245594/campos_512_v4
+47/245705/campos_512_v4
+47/245721/campos_512_v4
+47/245766/campos_512_v4
+47/245778/campos_512_v4
+47/245810/campos_512_v4
+47/245811/campos_512_v4
+47/245845/campos_512_v4
+47/245852/campos_512_v4
+47/245869/campos_512_v4
+47/245877/campos_512_v4
+47/245885/campos_512_v4
+47/245893/campos_512_v4
+47/245941/campos_512_v4
+47/245943/campos_512_v4
+47/245991/campos_512_v4
+47/246025/campos_512_v4
+47/246028/campos_512_v4
+47/246072/campos_512_v4
+47/246109/campos_512_v4
+47/246146/campos_512_v4
+47/246186/campos_512_v4
+47/246255/campos_512_v4
+47/246298/campos_512_v4
+47/246326/campos_512_v4
+47/246375/campos_512_v4
+47/246392/campos_512_v4
+47/246469/campos_512_v4
+47/246476/campos_512_v4
+47/246538/campos_512_v4
+47/246542/campos_512_v4
+47/246563/campos_512_v4
+47/246565/campos_512_v4
+47/246568/campos_512_v4
+47/246585/campos_512_v4
+47/246587/campos_512_v4
+47/246591/campos_512_v4
+47/246599/campos_512_v4
+47/246634/campos_512_v4
+47/246648/campos_512_v4
+47/246722/campos_512_v4
+47/246724/campos_512_v4
+47/246725/campos_512_v4
+47/246741/campos_512_v4
+47/246777/campos_512_v4
+47/246780/campos_512_v4
+47/246868/campos_512_v4
+47/246869/campos_512_v4
+47/246880/campos_512_v4
+47/246891/campos_512_v4
+47/246920/campos_512_v4
+47/246945/campos_512_v4
+47/246949/campos_512_v4
+47/246954/campos_512_v4
+47/246969/campos_512_v4
+47/246978/campos_512_v4
+47/246982/campos_512_v4
+47/247009/campos_512_v4
+47/247019/campos_512_v4
+47/247023/campos_512_v4
+47/247068/campos_512_v4
+47/247085/campos_512_v4
+47/247120/campos_512_v4
+47/247124/campos_512_v4
+47/247137/campos_512_v4
+47/247147/campos_512_v4
+47/247159/campos_512_v4
+47/247162/campos_512_v4
+47/247177/campos_512_v4
+47/247211/campos_512_v4
+47/247228/campos_512_v4
+47/247236/campos_512_v4
+47/247278/campos_512_v4
+47/247282/campos_512_v4
+47/247290/campos_512_v4
+47/247347/campos_512_v4
+47/247395/campos_512_v4
+47/247465/campos_512_v4
+47/247469/campos_512_v4
+47/247471/campos_512_v4
+47/247502/campos_512_v4
+47/247662/campos_512_v4
+47/247674/campos_512_v4
+47/247677/campos_512_v4
+47/247697/campos_512_v4
+47/247707/campos_512_v4
+47/247729/campos_512_v4
+47/247755/campos_512_v4
+47/247804/campos_512_v4
+47/247992/campos_512_v4
+47/248040/campos_512_v4
+47/248070/campos_512_v4
+47/248111/campos_512_v4
+47/248136/campos_512_v4
+47/248193/campos_512_v4
+47/248201/campos_512_v4
+47/248224/campos_512_v4
+47/248245/campos_512_v4
+47/248255/campos_512_v4
+47/248273/campos_512_v4
+47/248277/campos_512_v4
+47/248293/campos_512_v4
+47/248430/campos_512_v4
+47/248502/campos_512_v4
+47/248527/campos_512_v4
+47/248602/campos_512_v4
+47/248646/campos_512_v4
+47/248650/campos_512_v4
+47/248668/campos_512_v4
+47/248688/campos_512_v4
+47/248763/campos_512_v4
+47/248797/campos_512_v4
+47/248865/campos_512_v4
+47/248887/campos_512_v4
+47/248893/campos_512_v4
+47/248902/campos_512_v4
+47/248905/campos_512_v4
+47/248919/campos_512_v4
+47/248964/campos_512_v4
+47/248982/campos_512_v4
+47/249019/campos_512_v4
+47/249025/campos_512_v4
+47/249092/campos_512_v4
+47/249097/campos_512_v4
+47/249124/campos_512_v4
+47/249151/campos_512_v4
+47/249168/campos_512_v4
+47/249200/campos_512_v4
+47/249207/campos_512_v4
+47/249299/campos_512_v4
+47/249319/campos_512_v4
+47/249325/campos_512_v4
+47/249395/campos_512_v4
+47/249420/campos_512_v4
+47/249466/campos_512_v4
+47/249523/campos_512_v4
+47/249524/campos_512_v4
+47/249550/campos_512_v4
+47/249557/campos_512_v4
+47/249657/campos_512_v4
+47/249658/campos_512_v4
+47/249668/campos_512_v4
+47/249718/campos_512_v4
+47/249737/campos_512_v4
+47/249780/campos_512_v4
+47/249787/campos_512_v4
+47/249788/campos_512_v4
+47/249803/campos_512_v4
+47/249813/campos_512_v4
+47/249856/campos_512_v4
+47/249861/campos_512_v4
+47/249898/campos_512_v4
+47/249962/campos_512_v4
+47/250001/campos_512_v4
+48/250029/campos_512_v4
+48/250155/campos_512_v4
+48/250264/campos_512_v4
+48/250290/campos_512_v4
+48/250302/campos_512_v4
+48/250315/campos_512_v4
+48/250345/campos_512_v4
+48/250456/campos_512_v4
+48/250483/campos_512_v4
+48/250519/campos_512_v4
+48/250529/campos_512_v4
+48/250536/campos_512_v4
+48/250570/campos_512_v4
+48/250607/campos_512_v4
+48/250642/campos_512_v4
+48/250654/campos_512_v4
+48/250706/campos_512_v4
+48/250720/campos_512_v4
+48/250722/campos_512_v4
+48/250763/campos_512_v4
+48/250776/campos_512_v4
+48/250791/campos_512_v4
+48/250805/campos_512_v4
+48/250808/campos_512_v4
+48/250854/campos_512_v4
+48/250879/campos_512_v4
+48/250882/campos_512_v4
+48/250922/campos_512_v4
+48/250926/campos_512_v4
+48/250995/campos_512_v4
+48/251000/campos_512_v4
+48/251046/campos_512_v4
+48/251054/campos_512_v4
+48/251101/campos_512_v4
+48/251149/campos_512_v4
+48/251157/campos_512_v4
+48/251174/campos_512_v4
+48/251234/campos_512_v4
+48/251239/campos_512_v4
+48/251280/campos_512_v4
+48/251302/campos_512_v4
+48/251305/campos_512_v4
+48/251336/campos_512_v4
+48/251380/campos_512_v4
+48/251400/campos_512_v4
+48/251488/campos_512_v4
+48/251558/campos_512_v4
+48/251644/campos_512_v4
+48/251658/campos_512_v4
+48/251846/campos_512_v4
+48/251898/campos_512_v4
+48/251970/campos_512_v4
+48/252025/campos_512_v4
+48/252097/campos_512_v4
+48/252201/campos_512_v4
+48/252211/campos_512_v4
+48/252230/campos_512_v4
+48/252244/campos_512_v4
+48/252270/campos_512_v4
+48/252278/campos_512_v4
+48/252320/campos_512_v4
+48/252339/campos_512_v4
+48/252369/campos_512_v4
+48/252396/campos_512_v4
+48/252405/campos_512_v4
+48/252429/campos_512_v4
+48/252460/campos_512_v4
+48/252500/campos_512_v4
+48/252527/campos_512_v4
+48/252544/campos_512_v4
+48/252588/campos_512_v4
+48/252609/campos_512_v4
+48/252616/campos_512_v4
+48/252626/campos_512_v4
+48/252681/campos_512_v4
+48/252694/campos_512_v4
+48/252742/campos_512_v4
+48/252745/campos_512_v4
+48/252788/campos_512_v4
+48/252792/campos_512_v4
+48/252793/campos_512_v4
+48/252815/campos_512_v4
+48/252833/campos_512_v4
+48/252914/campos_512_v4
+48/253008/campos_512_v4
+48/253017/campos_512_v4
+48/253033/campos_512_v4
+48/253075/campos_512_v4
+48/253084/campos_512_v4
+48/253120/campos_512_v4
+48/253134/campos_512_v4
+48/253142/campos_512_v4
+48/253147/campos_512_v4
+48/253149/campos_512_v4
+48/253205/campos_512_v4
+48/253211/campos_512_v4
+48/253276/campos_512_v4
+48/253290/campos_512_v4
+48/253294/campos_512_v4
+48/253395/campos_512_v4
+48/253406/campos_512_v4
+48/253530/campos_512_v4
+48/253538/campos_512_v4
+48/253589/campos_512_v4
+48/253616/campos_512_v4
+48/253618/campos_512_v4
+48/253633/campos_512_v4
+48/253665/campos_512_v4
+48/253696/campos_512_v4
+48/253709/campos_512_v4
+48/253749/campos_512_v4
+48/253760/campos_512_v4
+48/253784/campos_512_v4
+48/253815/campos_512_v4
+48/253883/campos_512_v4
+48/253910/campos_512_v4
+48/253911/campos_512_v4
+48/253916/campos_512_v4
+48/253947/campos_512_v4
+48/253956/campos_512_v4
+48/253989/campos_512_v4
+48/253990/campos_512_v4
+48/253993/campos_512_v4
+48/254004/campos_512_v4
+48/254016/campos_512_v4
+48/254019/campos_512_v4
+48/254042/campos_512_v4
+48/254045/campos_512_v4
+48/254117/campos_512_v4
+48/254175/campos_512_v4
+48/254193/campos_512_v4
+48/254212/campos_512_v4
+48/254218/campos_512_v4
+48/254235/campos_512_v4
+48/254250/campos_512_v4
+48/254308/campos_512_v4
+48/254343/campos_512_v4
+48/254389/campos_512_v4
+48/254398/campos_512_v4
+48/254404/campos_512_v4
+48/254441/campos_512_v4
+48/254488/campos_512_v4
+48/254529/campos_512_v4
+48/254531/campos_512_v4
+48/254558/campos_512_v4
+48/254602/campos_512_v4
+48/254643/campos_512_v4
+48/254657/campos_512_v4
+48/254765/campos_512_v4
+48/254782/campos_512_v4
+48/254796/campos_512_v4
+48/254810/campos_512_v4
+48/254837/campos_512_v4
+48/254918/campos_512_v4
+48/254933/campos_512_v4
+48/254946/campos_512_v4
+48/254978/campos_512_v4
+49/255029/campos_512_v4
+49/255104/campos_512_v4
+49/255110/campos_512_v4
+49/255142/campos_512_v4
+49/255151/campos_512_v4
+49/255158/campos_512_v4
+49/255191/campos_512_v4
+49/255222/campos_512_v4
+49/255223/campos_512_v4
+49/255235/campos_512_v4
+49/255241/campos_512_v4
+49/255245/campos_512_v4
+49/255335/campos_512_v4
+49/255339/campos_512_v4
+49/255343/campos_512_v4
+49/255346/campos_512_v4
+49/255363/campos_512_v4
+49/255403/campos_512_v4
+49/255434/campos_512_v4
+49/255455/campos_512_v4
+49/255460/campos_512_v4
+49/255544/campos_512_v4
+49/255583/campos_512_v4
+49/255605/campos_512_v4
+49/255632/campos_512_v4
+49/255778/campos_512_v4
+49/255828/campos_512_v4
+49/255836/campos_512_v4
+49/255901/campos_512_v4
+49/255939/campos_512_v4
+49/255940/campos_512_v4
+49/256082/campos_512_v4
+49/256122/campos_512_v4
+49/256123/campos_512_v4
+49/256178/campos_512_v4
+49/256220/campos_512_v4
+49/256308/campos_512_v4
+49/256363/campos_512_v4
+49/256397/campos_512_v4
+49/256402/campos_512_v4
+49/256487/campos_512_v4
+49/256501/campos_512_v4
+49/256504/campos_512_v4
+49/256544/campos_512_v4
+49/256572/campos_512_v4
+49/256594/campos_512_v4
+49/256616/campos_512_v4
+49/256666/campos_512_v4
+49/256674/campos_512_v4
+49/256693/campos_512_v4
+49/256704/campos_512_v4
+49/256706/campos_512_v4
+49/256727/campos_512_v4
+49/256731/campos_512_v4
+49/256736/campos_512_v4
+49/256751/campos_512_v4
+49/256798/campos_512_v4
+49/256860/campos_512_v4
+49/256869/campos_512_v4
+49/256901/campos_512_v4
+49/256909/campos_512_v4
+49/256917/campos_512_v4
+49/256942/campos_512_v4
+49/257016/campos_512_v4
+49/257028/campos_512_v4
+49/257031/campos_512_v4
+49/257174/campos_512_v4
+49/257179/campos_512_v4
+49/257189/campos_512_v4
+49/257243/campos_512_v4
+49/257266/campos_512_v4
+49/257320/campos_512_v4
+49/257345/campos_512_v4
+49/257423/campos_512_v4
+49/257446/campos_512_v4
+49/257448/campos_512_v4
+49/257481/campos_512_v4
+49/257499/campos_512_v4
+49/257510/campos_512_v4
+49/257522/campos_512_v4
+49/257589/campos_512_v4
+49/257607/campos_512_v4
+49/257665/campos_512_v4
+49/257683/campos_512_v4
+49/257716/campos_512_v4
+49/257726/campos_512_v4
+49/257744/campos_512_v4
+49/257795/campos_512_v4
+49/257815/campos_512_v4
+49/257823/campos_512_v4
+49/257834/campos_512_v4
+49/257867/campos_512_v4
+49/257897/campos_512_v4
+49/257901/campos_512_v4
+49/257947/campos_512_v4
+49/257952/campos_512_v4
+49/257977/campos_512_v4
+49/258110/campos_512_v4
+49/258151/campos_512_v4
+49/258238/campos_512_v4
+49/258287/campos_512_v4
+49/258297/campos_512_v4
+49/258308/campos_512_v4
+49/258342/campos_512_v4
+49/258362/campos_512_v4
+49/258371/campos_512_v4
+49/258398/campos_512_v4
+49/258451/campos_512_v4
+49/258461/campos_512_v4
+49/258496/campos_512_v4
+49/258502/campos_512_v4
+49/258527/campos_512_v4
+49/258578/campos_512_v4
+49/258598/campos_512_v4
+49/258601/campos_512_v4
+49/258623/campos_512_v4
+49/258647/campos_512_v4
+49/258681/campos_512_v4
+49/258705/campos_512_v4
+49/258711/campos_512_v4
+49/258715/campos_512_v4
+49/258732/campos_512_v4
+49/258767/campos_512_v4
+49/258788/campos_512_v4
+49/258821/campos_512_v4
+49/258839/campos_512_v4
+49/258841/campos_512_v4
+49/258870/campos_512_v4
+49/258893/campos_512_v4
+49/258905/campos_512_v4
+49/258921/campos_512_v4
+49/258930/campos_512_v4
+49/258970/campos_512_v4
+49/258974/campos_512_v4
+49/258979/campos_512_v4
+49/258980/campos_512_v4
+49/258988/campos_512_v4
+49/259042/campos_512_v4
+49/259114/campos_512_v4
+49/259122/campos_512_v4
+49/259183/campos_512_v4
+49/259190/campos_512_v4
+49/259204/campos_512_v4
+49/259248/campos_512_v4
+49/259255/campos_512_v4
+49/259274/campos_512_v4
+49/259278/campos_512_v4
+49/259295/campos_512_v4
+49/259299/campos_512_v4
+49/259314/campos_512_v4
+49/259338/campos_512_v4
+49/259377/campos_512_v4
+49/259392/campos_512_v4
+49/259409/campos_512_v4
+49/259439/campos_512_v4
+49/259475/campos_512_v4
+49/259495/campos_512_v4
+49/259535/campos_512_v4
+49/259543/campos_512_v4
+49/259562/campos_512_v4
+49/259583/campos_512_v4
+49/259638/campos_512_v4
+49/259663/campos_512_v4
+49/259697/campos_512_v4
+49/259721/campos_512_v4
+49/259756/campos_512_v4
+49/259757/campos_512_v4
+49/259761/campos_512_v4
+49/259791/campos_512_v4
+49/259792/campos_512_v4
+49/259793/campos_512_v4
+49/259821/campos_512_v4
+49/259834/campos_512_v4
+49/259907/campos_512_v4
+49/259915/campos_512_v4
+49/259924/campos_512_v4
+49/259933/campos_512_v4
+49/259936/campos_512_v4
+49/259960/campos_512_v4
+5/35039/campos_512_v4
+5/35083/campos_512_v4
+5/35086/campos_512_v4
+5/35095/campos_512_v4
+5/35121/campos_512_v4
+5/35167/campos_512_v4
+5/35188/campos_512_v4
+5/35210/campos_512_v4
+5/35212/campos_512_v4
+5/35304/campos_512_v4
+5/35306/campos_512_v4
+5/35394/campos_512_v4
+5/35435/campos_512_v4
+5/35443/campos_512_v4
+5/35462/campos_512_v4
+5/35480/campos_512_v4
+5/35482/campos_512_v4
+5/35510/campos_512_v4
+5/35535/campos_512_v4
+5/35559/campos_512_v4
+5/35560/campos_512_v4
+5/35591/campos_512_v4
+5/35619/campos_512_v4
+5/35650/campos_512_v4
+5/35742/campos_512_v4
+5/35788/campos_512_v4
+5/35810/campos_512_v4
+5/35813/campos_512_v4
+5/35825/campos_512_v4
+5/35907/campos_512_v4
+5/35920/campos_512_v4
+5/35954/campos_512_v4
+5/35956/campos_512_v4
+5/35958/campos_512_v4
+5/35998/campos_512_v4
+5/36014/campos_512_v4
+5/36031/campos_512_v4
+5/36079/campos_512_v4
+5/36130/campos_512_v4
+5/36159/campos_512_v4
+5/36194/campos_512_v4
+5/36201/campos_512_v4
+5/36252/campos_512_v4
+5/36285/campos_512_v4
+5/36296/campos_512_v4
+5/36314/campos_512_v4
+5/36327/campos_512_v4
+5/36351/campos_512_v4
+5/36434/campos_512_v4
+5/36436/campos_512_v4
+5/36451/campos_512_v4
+5/36456/campos_512_v4
+5/36497/campos_512_v4
+5/36513/campos_512_v4
+5/36537/campos_512_v4
+5/36610/campos_512_v4
+5/36672/campos_512_v4
+5/36683/campos_512_v4
+5/36699/campos_512_v4
+5/36711/campos_512_v4
+5/36750/campos_512_v4
+5/36755/campos_512_v4
+5/36776/campos_512_v4
+5/36823/campos_512_v4
+5/36824/campos_512_v4
+5/36908/campos_512_v4
+5/36965/campos_512_v4
+5/36972/campos_512_v4
+5/36993/campos_512_v4
+5/37031/campos_512_v4
+5/37032/campos_512_v4
+5/37069/campos_512_v4
+5/37075/campos_512_v4
+5/37080/campos_512_v4
+5/37084/campos_512_v4
+5/37088/campos_512_v4
+5/37107/campos_512_v4
+5/37157/campos_512_v4
+5/37175/campos_512_v4
+5/37226/campos_512_v4
+5/37254/campos_512_v4
+5/37283/campos_512_v4
+5/37284/campos_512_v4
+5/37316/campos_512_v4
+5/37318/campos_512_v4
+5/37324/campos_512_v4
+5/37378/campos_512_v4
+5/37423/campos_512_v4
+5/37524/campos_512_v4
+5/37547/campos_512_v4
+5/37610/campos_512_v4
+5/37611/campos_512_v4
+5/37632/campos_512_v4
+5/37675/campos_512_v4
+5/37693/campos_512_v4
+5/37695/campos_512_v4
+5/37698/campos_512_v4
+5/37709/campos_512_v4
+5/37718/campos_512_v4
+5/37732/campos_512_v4
+5/37741/campos_512_v4
+5/37745/campos_512_v4
+5/37748/campos_512_v4
+5/37749/campos_512_v4
+5/37763/campos_512_v4
+5/37769/campos_512_v4
+5/37802/campos_512_v4
+5/37820/campos_512_v4
+5/37909/campos_512_v4
+5/37921/campos_512_v4
+5/37922/campos_512_v4
+5/37950/campos_512_v4
+5/37952/campos_512_v4
+5/37962/campos_512_v4
+5/37988/campos_512_v4
+5/38002/campos_512_v4
+5/38162/campos_512_v4
+5/38189/campos_512_v4
+5/38192/campos_512_v4
+5/38218/campos_512_v4
+5/38231/campos_512_v4
+5/38244/campos_512_v4
+5/38292/campos_512_v4
+5/38328/campos_512_v4
+5/38334/campos_512_v4
+5/38348/campos_512_v4
+5/38364/campos_512_v4
+5/38367/campos_512_v4
+5/38384/campos_512_v4
+5/38401/campos_512_v4
+5/38408/campos_512_v4
+5/38423/campos_512_v4
+5/38426/campos_512_v4
+5/38475/campos_512_v4
+5/38510/campos_512_v4
+5/38511/campos_512_v4
+5/38513/campos_512_v4
+5/38559/campos_512_v4
+5/38565/campos_512_v4
+5/38616/campos_512_v4
+5/38628/campos_512_v4
+5/38637/campos_512_v4
+5/38669/campos_512_v4
+5/38687/campos_512_v4
+5/38723/campos_512_v4
+5/38762/campos_512_v4
+5/38764/campos_512_v4
+5/38772/campos_512_v4
+5/38774/campos_512_v4
+5/38781/campos_512_v4
+5/38797/campos_512_v4
+5/38851/campos_512_v4
+5/38925/campos_512_v4
+5/38933/campos_512_v4
+5/38981/campos_512_v4
+5/39036/campos_512_v4
+5/39056/campos_512_v4
+5/39140/campos_512_v4
+5/39170/campos_512_v4
+5/39197/campos_512_v4
+5/39221/campos_512_v4
+5/39232/campos_512_v4
+5/39267/campos_512_v4
+5/39308/campos_512_v4
+5/39314/campos_512_v4
+5/39331/campos_512_v4
+5/39335/campos_512_v4
+5/39352/campos_512_v4
+5/39367/campos_512_v4
+5/39407/campos_512_v4
+5/39462/campos_512_v4
+5/39470/campos_512_v4
+5/39472/campos_512_v4
+5/39480/campos_512_v4
+5/39493/campos_512_v4
+5/39503/campos_512_v4
+5/39604/campos_512_v4
+5/39626/campos_512_v4
+5/39645/campos_512_v4
+5/39648/campos_512_v4
+5/39655/campos_512_v4
+5/39703/campos_512_v4
+5/39727/campos_512_v4
+5/39749/campos_512_v4
+5/39793/campos_512_v4
+5/39798/campos_512_v4
+5/39828/campos_512_v4
+5/39861/campos_512_v4
+5/39920/campos_512_v4
+5/39963/campos_512_v4
+5/39980/campos_512_v4
+50/260040/campos_512_v4
+50/260101/campos_512_v4
+50/260106/campos_512_v4
+50/260124/campos_512_v4
+50/260132/campos_512_v4
+50/260138/campos_512_v4
+50/260161/campos_512_v4
+50/260164/campos_512_v4
+50/260167/campos_512_v4
+50/260172/campos_512_v4
+50/260182/campos_512_v4
+50/260216/campos_512_v4
+50/260271/campos_512_v4
+50/260321/campos_512_v4
+50/260357/campos_512_v4
+50/260374/campos_512_v4
+50/260379/campos_512_v4
+50/260399/campos_512_v4
+50/260400/campos_512_v4
+50/260428/campos_512_v4
+50/260486/campos_512_v4
+50/260523/campos_512_v4
+50/260526/campos_512_v4
+50/260528/campos_512_v4
+50/260545/campos_512_v4
+50/260615/campos_512_v4
+50/260641/campos_512_v4
+50/260649/campos_512_v4
+50/260659/campos_512_v4
+50/260664/campos_512_v4
+50/260665/campos_512_v4
+50/260724/campos_512_v4
+50/260782/campos_512_v4
+50/260786/campos_512_v4
+50/260791/campos_512_v4
+50/260846/campos_512_v4
+50/260904/campos_512_v4
+50/260909/campos_512_v4
+50/260924/campos_512_v4
+50/260939/campos_512_v4
+50/260960/campos_512_v4
+50/260981/campos_512_v4
+50/261019/campos_512_v4
+50/261043/campos_512_v4
+50/261059/campos_512_v4
+50/261151/campos_512_v4
+50/261231/campos_512_v4
+50/261272/campos_512_v4
+50/261328/campos_512_v4
+50/261398/campos_512_v4
+50/261403/campos_512_v4
+50/261463/campos_512_v4
+50/261467/campos_512_v4
+50/261501/campos_512_v4
+50/261506/campos_512_v4
+50/261508/campos_512_v4
+50/261527/campos_512_v4
+50/261534/campos_512_v4
+50/261541/campos_512_v4
+50/261570/campos_512_v4
+50/261620/campos_512_v4
+50/261637/campos_512_v4
+50/261662/campos_512_v4
+50/261664/campos_512_v4
+50/261717/campos_512_v4
+50/261718/campos_512_v4
+50/261736/campos_512_v4
+50/261753/campos_512_v4
+50/261754/campos_512_v4
+50/261760/campos_512_v4
+50/261761/campos_512_v4
+50/261775/campos_512_v4
+50/261792/campos_512_v4
+50/261806/campos_512_v4
+50/261823/campos_512_v4
+50/261845/campos_512_v4
+50/261864/campos_512_v4
+50/261898/campos_512_v4
+50/261937/campos_512_v4
+50/261941/campos_512_v4
+50/262001/campos_512_v4
+50/262031/campos_512_v4
+50/262045/campos_512_v4
+50/262097/campos_512_v4
+50/262112/campos_512_v4
+50/262149/campos_512_v4
+50/262238/campos_512_v4
+50/262262/campos_512_v4
+50/262298/campos_512_v4
+50/262318/campos_512_v4
+50/262344/campos_512_v4
+50/262346/campos_512_v4
+50/262374/campos_512_v4
+50/262389/campos_512_v4
+50/262402/campos_512_v4
+50/262407/campos_512_v4
+50/262483/campos_512_v4
+50/262501/campos_512_v4
+50/262529/campos_512_v4
+50/262610/campos_512_v4
+50/262628/campos_512_v4
+50/262649/campos_512_v4
+50/262659/campos_512_v4
+50/262779/campos_512_v4
+50/262858/campos_512_v4
+50/262886/campos_512_v4
+50/262894/campos_512_v4
+50/262907/campos_512_v4
+50/262952/campos_512_v4
+50/262998/campos_512_v4
+50/263042/campos_512_v4
+50/263048/campos_512_v4
+50/263050/campos_512_v4
+50/263075/campos_512_v4
+50/263088/campos_512_v4
+50/263121/campos_512_v4
+50/263129/campos_512_v4
+50/263133/campos_512_v4
+50/263139/campos_512_v4
+50/263189/campos_512_v4
+50/263198/campos_512_v4
+50/263220/campos_512_v4
+50/263265/campos_512_v4
+50/263314/campos_512_v4
+50/263387/campos_512_v4
+50/263388/campos_512_v4
+50/263399/campos_512_v4
+50/263484/campos_512_v4
+50/263523/campos_512_v4
+50/263619/campos_512_v4
+50/263625/campos_512_v4
+50/263657/campos_512_v4
+50/263694/campos_512_v4
+50/263770/campos_512_v4
+50/263795/campos_512_v4
+50/263797/campos_512_v4
+50/263837/campos_512_v4
+50/263950/campos_512_v4
+50/263958/campos_512_v4
+50/264043/campos_512_v4
+50/264044/campos_512_v4
+50/264098/campos_512_v4
+50/264109/campos_512_v4
+50/264173/campos_512_v4
+50/264182/campos_512_v4
+50/264317/campos_512_v4
+50/264346/campos_512_v4
+50/264351/campos_512_v4
+50/264357/campos_512_v4
+50/264365/campos_512_v4
+50/264375/campos_512_v4
+50/264408/campos_512_v4
+50/264444/campos_512_v4
+50/264475/campos_512_v4
+50/264541/campos_512_v4
+50/264635/campos_512_v4
+50/264667/campos_512_v4
+50/264705/campos_512_v4
+50/264717/campos_512_v4
+50/264755/campos_512_v4
+50/264818/campos_512_v4
+50/264851/campos_512_v4
+50/264860/campos_512_v4
+50/264909/campos_512_v4
+50/264945/campos_512_v4
+50/264952/campos_512_v4
+50/264965/campos_512_v4
+51/265015/campos_512_v4
+51/265046/campos_512_v4
+51/265088/campos_512_v4
+51/265123/campos_512_v4
+51/265190/campos_512_v4
+51/265209/campos_512_v4
+51/265239/campos_512_v4
+51/265248/campos_512_v4
+51/265250/campos_512_v4
+51/265312/campos_512_v4
+51/265326/campos_512_v4
+51/265385/campos_512_v4
+51/265406/campos_512_v4
+51/265420/campos_512_v4
+51/265423/campos_512_v4
+51/265427/campos_512_v4
+51/265455/campos_512_v4
+51/265516/campos_512_v4
+51/265520/campos_512_v4
+51/265547/campos_512_v4
+51/265597/campos_512_v4
+51/265609/campos_512_v4
+51/265625/campos_512_v4
+51/265678/campos_512_v4
+51/265713/campos_512_v4
+51/265719/campos_512_v4
+51/265721/campos_512_v4
+51/265788/campos_512_v4
+51/265799/campos_512_v4
+51/265809/campos_512_v4
+51/265828/campos_512_v4
+51/265840/campos_512_v4
+51/265858/campos_512_v4
+51/265863/campos_512_v4
+51/265900/campos_512_v4
+51/265918/campos_512_v4
+51/265932/campos_512_v4
+51/265948/campos_512_v4
+51/266005/campos_512_v4
+51/266036/campos_512_v4
+51/266078/campos_512_v4
+51/266093/campos_512_v4
+51/266204/campos_512_v4
+51/266210/campos_512_v4
+51/266218/campos_512_v4
+51/266229/campos_512_v4
+51/266242/campos_512_v4
+51/266300/campos_512_v4
+51/266303/campos_512_v4
+51/266367/campos_512_v4
+51/266477/campos_512_v4
+51/266523/campos_512_v4
+51/266541/campos_512_v4
+51/266549/campos_512_v4
+51/266590/campos_512_v4
+51/266643/campos_512_v4
+51/266666/campos_512_v4
+51/266699/campos_512_v4
+51/266709/campos_512_v4
+51/266730/campos_512_v4
+51/266799/campos_512_v4
+51/266835/campos_512_v4
+51/266840/campos_512_v4
+51/266874/campos_512_v4
+51/266877/campos_512_v4
+51/266966/campos_512_v4
+51/266967/campos_512_v4
+51/266970/campos_512_v4
+51/267036/campos_512_v4
+51/267044/campos_512_v4
+51/267081/campos_512_v4
+51/267091/campos_512_v4
+51/267111/campos_512_v4
+51/267133/campos_512_v4
+51/267158/campos_512_v4
+51/267196/campos_512_v4
+51/267254/campos_512_v4
+51/267274/campos_512_v4
+51/267288/campos_512_v4
+51/267329/campos_512_v4
+51/267330/campos_512_v4
+51/267336/campos_512_v4
+51/267362/campos_512_v4
+51/267413/campos_512_v4
+51/267426/campos_512_v4
+51/267477/campos_512_v4
+51/267516/campos_512_v4
+51/267558/campos_512_v4
+51/267562/campos_512_v4
+51/267636/campos_512_v4
+51/267648/campos_512_v4
+51/267717/campos_512_v4
+51/267762/campos_512_v4
+51/267766/campos_512_v4
+51/267775/campos_512_v4
+51/267783/campos_512_v4
+51/267786/campos_512_v4
+51/267820/campos_512_v4
+51/267840/campos_512_v4
+51/267874/campos_512_v4
+51/267881/campos_512_v4
+51/267896/campos_512_v4
+51/267972/campos_512_v4
+51/267975/campos_512_v4
+51/267988/campos_512_v4
+51/268048/campos_512_v4
+51/268086/campos_512_v4
+51/268092/campos_512_v4
+51/268099/campos_512_v4
+51/268106/campos_512_v4
+51/268108/campos_512_v4
+51/268129/campos_512_v4
+51/268131/campos_512_v4
+51/268190/campos_512_v4
+51/268272/campos_512_v4
+51/268281/campos_512_v4
+51/268372/campos_512_v4
+51/268427/campos_512_v4
+51/268582/campos_512_v4
+51/268614/campos_512_v4
+51/268641/campos_512_v4
+51/268643/campos_512_v4
+51/268676/campos_512_v4
+51/268690/campos_512_v4
+51/268745/campos_512_v4
+51/268749/campos_512_v4
+51/268753/campos_512_v4
+51/268760/campos_512_v4
+51/268771/campos_512_v4
+51/268777/campos_512_v4
+51/268813/campos_512_v4
+51/268818/campos_512_v4
+51/268905/campos_512_v4
+51/268930/campos_512_v4
+51/268956/campos_512_v4
+51/268976/campos_512_v4
+51/268995/campos_512_v4
+51/269004/campos_512_v4
+51/269064/campos_512_v4
+51/269078/campos_512_v4
+51/269109/campos_512_v4
+51/269124/campos_512_v4
+51/269143/campos_512_v4
+51/269178/campos_512_v4
+51/269241/campos_512_v4
+51/269253/campos_512_v4
+51/269267/campos_512_v4
+51/269314/campos_512_v4
+51/269326/campos_512_v4
+51/269381/campos_512_v4
+51/269395/campos_512_v4
+51/269426/campos_512_v4
+51/269468/campos_512_v4
+51/269475/campos_512_v4
+51/269495/campos_512_v4
+51/269564/campos_512_v4
+51/269596/campos_512_v4
+51/269625/campos_512_v4
+51/269628/campos_512_v4
+51/269638/campos_512_v4
+51/269659/campos_512_v4
+51/269662/campos_512_v4
+51/269666/campos_512_v4
+51/269763/campos_512_v4
+51/269802/campos_512_v4
+51/269812/campos_512_v4
+51/269813/campos_512_v4
+51/269830/campos_512_v4
+51/269831/campos_512_v4
+51/269883/campos_512_v4
+51/269890/campos_512_v4
+51/269906/campos_512_v4
+51/269911/campos_512_v4
+51/269948/campos_512_v4
+51/269983/campos_512_v4
+51/269996/campos_512_v4
+52/270020/campos_512_v4
+52/270022/campos_512_v4
+52/270035/campos_512_v4
+52/270042/campos_512_v4
+52/270048/campos_512_v4
+52/270083/campos_512_v4
+52/270122/campos_512_v4
+52/270147/campos_512_v4
+52/270167/campos_512_v4
+52/270304/campos_512_v4
+52/270319/campos_512_v4
+52/270351/campos_512_v4
+52/270366/campos_512_v4
+52/270433/campos_512_v4
+52/270444/campos_512_v4
+52/270461/campos_512_v4
+52/270494/campos_512_v4
+52/270534/campos_512_v4
+52/270595/campos_512_v4
+52/270616/campos_512_v4
+52/270630/campos_512_v4
+52/270700/campos_512_v4
+52/270719/campos_512_v4
+52/270868/campos_512_v4
+52/270917/campos_512_v4
+52/270937/campos_512_v4
+52/270938/campos_512_v4
+52/270946/campos_512_v4
+52/270983/campos_512_v4
+52/271013/campos_512_v4
+52/271046/campos_512_v4
+52/271060/campos_512_v4
+52/271134/campos_512_v4
+52/271153/campos_512_v4
+52/271179/campos_512_v4
+52/271210/campos_512_v4
+52/271236/campos_512_v4
+52/271241/campos_512_v4
+52/271251/campos_512_v4
+52/271272/campos_512_v4
+52/271308/campos_512_v4
+52/271309/campos_512_v4
+52/271359/campos_512_v4
+52/271362/campos_512_v4
+52/271384/campos_512_v4
+52/271395/campos_512_v4
+52/271404/campos_512_v4
+52/271455/campos_512_v4
+52/271462/campos_512_v4
+52/271538/campos_512_v4
+52/271541/campos_512_v4
+52/271542/campos_512_v4
+52/271605/campos_512_v4
+52/271653/campos_512_v4
+52/271679/campos_512_v4
+52/271688/campos_512_v4
+52/271730/campos_512_v4
+52/271745/campos_512_v4
+52/271785/campos_512_v4
+52/271862/campos_512_v4
+52/271895/campos_512_v4
+52/271910/campos_512_v4
+52/271964/campos_512_v4
+52/271968/campos_512_v4
+52/271983/campos_512_v4
+52/272009/campos_512_v4
+52/272034/campos_512_v4
+52/272051/campos_512_v4
+52/272081/campos_512_v4
+52/272084/campos_512_v4
+52/272115/campos_512_v4
+52/272169/campos_512_v4
+52/272180/campos_512_v4
+52/272212/campos_512_v4
+52/272226/campos_512_v4
+52/272237/campos_512_v4
+52/272254/campos_512_v4
+52/272259/campos_512_v4
+52/272276/campos_512_v4
+52/272305/campos_512_v4
+52/272317/campos_512_v4
+52/272346/campos_512_v4
+52/272363/campos_512_v4
+52/272368/campos_512_v4
+52/272391/campos_512_v4
+52/272439/campos_512_v4
+52/272447/campos_512_v4
+52/272471/campos_512_v4
+52/272491/campos_512_v4
+52/272502/campos_512_v4
+52/272556/campos_512_v4
+52/272573/campos_512_v4
+52/272597/campos_512_v4
+52/272602/campos_512_v4
+52/272615/campos_512_v4
+52/272651/campos_512_v4
+52/272667/campos_512_v4
+52/272668/campos_512_v4
+52/272670/campos_512_v4
+52/272758/campos_512_v4
+52/272784/campos_512_v4
+52/272787/campos_512_v4
+52/272835/campos_512_v4
+52/272887/campos_512_v4
+52/272901/campos_512_v4
+52/272956/campos_512_v4
+52/272960/campos_512_v4
+52/272980/campos_512_v4
+52/273024/campos_512_v4
+52/273053/campos_512_v4
+52/273100/campos_512_v4
+52/273118/campos_512_v4
+52/273160/campos_512_v4
+52/273178/campos_512_v4
+52/273215/campos_512_v4
+52/273232/campos_512_v4
+52/273246/campos_512_v4
+52/273263/campos_512_v4
+52/273279/campos_512_v4
+52/273448/campos_512_v4
+52/273485/campos_512_v4
+52/273508/campos_512_v4
+52/273597/campos_512_v4
+52/273610/campos_512_v4
+52/273648/campos_512_v4
+52/273705/campos_512_v4
+52/273728/campos_512_v4
+52/273737/campos_512_v4
+52/273745/campos_512_v4
+52/273756/campos_512_v4
+52/273775/campos_512_v4
+52/273801/campos_512_v4
+52/273853/campos_512_v4
+52/273890/campos_512_v4
+52/273927/campos_512_v4
+52/273951/campos_512_v4
+52/274015/campos_512_v4
+52/274047/campos_512_v4
+52/274062/campos_512_v4
+52/274073/campos_512_v4
+52/274135/campos_512_v4
+52/274142/campos_512_v4
+52/274199/campos_512_v4
+52/274265/campos_512_v4
+52/274271/campos_512_v4
+52/274275/campos_512_v4
+52/274362/campos_512_v4
+52/274446/campos_512_v4
+52/274467/campos_512_v4
+52/274473/campos_512_v4
+52/274505/campos_512_v4
+52/274548/campos_512_v4
+52/274607/campos_512_v4
+52/274657/campos_512_v4
+52/274688/campos_512_v4
+52/274699/campos_512_v4
+52/274711/campos_512_v4
+52/274714/campos_512_v4
+52/274731/campos_512_v4
+52/274780/campos_512_v4
+52/274798/campos_512_v4
+52/274900/campos_512_v4
+52/274927/campos_512_v4
+52/274952/campos_512_v4
+53/275040/campos_512_v4
+53/275060/campos_512_v4
+53/275126/campos_512_v4
+53/275128/campos_512_v4
+53/275180/campos_512_v4
+53/275214/campos_512_v4
+53/275219/campos_512_v4
+53/275222/campos_512_v4
+53/275234/campos_512_v4
+53/275300/campos_512_v4
+53/275315/campos_512_v4
+53/275418/campos_512_v4
+53/275434/campos_512_v4
+53/275441/campos_512_v4
+53/275478/campos_512_v4
+53/275487/campos_512_v4
+53/275500/campos_512_v4
+53/275529/campos_512_v4
+53/275531/campos_512_v4
+53/275551/campos_512_v4
+53/275560/campos_512_v4
+53/275600/campos_512_v4
+53/275606/campos_512_v4
+53/275672/campos_512_v4
+53/275723/campos_512_v4
+53/275788/campos_512_v4
+53/275802/campos_512_v4
+53/275825/campos_512_v4
+53/275827/campos_512_v4
+53/275870/campos_512_v4
+53/275878/campos_512_v4
+53/275883/campos_512_v4
+53/275912/campos_512_v4
+53/276041/campos_512_v4
+53/276063/campos_512_v4
+53/276071/campos_512_v4
+53/276095/campos_512_v4
+53/276117/campos_512_v4
+53/276196/campos_512_v4
+53/276207/campos_512_v4
+53/276211/campos_512_v4
+53/276227/campos_512_v4
+53/276233/campos_512_v4
+53/276341/campos_512_v4
+53/276342/campos_512_v4
+53/276360/campos_512_v4
+53/276409/campos_512_v4
+53/276422/campos_512_v4
+53/276437/campos_512_v4
+53/276511/campos_512_v4
+53/276536/campos_512_v4
+53/276589/campos_512_v4
+53/276616/campos_512_v4
+53/276617/campos_512_v4
+53/276661/campos_512_v4
+53/276669/campos_512_v4
+53/276673/campos_512_v4
+53/276711/campos_512_v4
+53/276721/campos_512_v4
+53/276757/campos_512_v4
+53/276787/campos_512_v4
+53/276860/campos_512_v4
+53/276882/campos_512_v4
+53/276898/campos_512_v4
+53/276900/campos_512_v4
+53/276921/campos_512_v4
+53/276936/campos_512_v4
+53/276976/campos_512_v4
+53/276984/campos_512_v4
+53/277008/campos_512_v4
+53/277023/campos_512_v4
+53/277070/campos_512_v4
+53/277097/campos_512_v4
+53/277115/campos_512_v4
+53/277123/campos_512_v4
+53/277177/campos_512_v4
+53/277247/campos_512_v4
+53/277324/campos_512_v4
+53/277331/campos_512_v4
+53/277345/campos_512_v4
+53/277348/campos_512_v4
+53/277412/campos_512_v4
+53/277426/campos_512_v4
+53/277504/campos_512_v4
+53/277537/campos_512_v4
+53/277559/campos_512_v4
+53/277639/campos_512_v4
+53/277723/campos_512_v4
+53/277724/campos_512_v4
+53/277774/campos_512_v4
+53/277848/campos_512_v4
+53/277893/campos_512_v4
+53/278023/campos_512_v4
+53/278045/campos_512_v4
+53/278168/campos_512_v4
+53/278185/campos_512_v4
+53/278186/campos_512_v4
+53/278230/campos_512_v4
+53/278250/campos_512_v4
+53/278276/campos_512_v4
+53/278315/campos_512_v4
+53/278319/campos_512_v4
+53/278321/campos_512_v4
+53/278389/campos_512_v4
+53/278400/campos_512_v4
+53/278446/campos_512_v4
+53/278460/campos_512_v4
+53/278510/campos_512_v4
+53/278529/campos_512_v4
+53/278537/campos_512_v4
+53/278575/campos_512_v4
+53/278590/campos_512_v4
+53/278689/campos_512_v4
+53/278727/campos_512_v4
+53/278747/campos_512_v4
+53/278774/campos_512_v4
+53/278779/campos_512_v4
+53/278833/campos_512_v4
+53/278838/campos_512_v4
+53/278867/campos_512_v4
+53/278923/campos_512_v4
+53/278924/campos_512_v4
+53/278991/campos_512_v4
+53/279009/campos_512_v4
+53/279020/campos_512_v4
+53/279042/campos_512_v4
+53/279053/campos_512_v4
+53/279077/campos_512_v4
+53/279102/campos_512_v4
+53/279107/campos_512_v4
+53/279115/campos_512_v4
+53/279159/campos_512_v4
+53/279161/campos_512_v4
+53/279166/campos_512_v4
+53/279201/campos_512_v4
+53/279204/campos_512_v4
+53/279215/campos_512_v4
+53/279226/campos_512_v4
+53/279253/campos_512_v4
+53/279259/campos_512_v4
+53/279272/campos_512_v4
+53/279400/campos_512_v4
+53/279405/campos_512_v4
+53/279470/campos_512_v4
+53/279514/campos_512_v4
+53/279547/campos_512_v4
+53/279570/campos_512_v4
+53/279579/campos_512_v4
+53/279584/campos_512_v4
+53/279586/campos_512_v4
+53/279591/campos_512_v4
+53/279680/campos_512_v4
+53/279688/campos_512_v4
+53/279728/campos_512_v4
+53/279739/campos_512_v4
+53/279751/campos_512_v4
+53/279826/campos_512_v4
+53/279872/campos_512_v4
+53/279886/campos_512_v4
+53/279907/campos_512_v4
+53/279935/campos_512_v4
+53/279938/campos_512_v4
+53/279977/campos_512_v4
+53/279991/campos_512_v4
+53/279992/campos_512_v4
+54/280022/campos_512_v4
+54/280048/campos_512_v4
+54/280053/campos_512_v4
+54/280088/campos_512_v4
+54/280097/campos_512_v4
+54/280107/campos_512_v4
+54/280145/campos_512_v4
+54/280149/campos_512_v4
+54/280220/campos_512_v4
+54/280257/campos_512_v4
+54/280296/campos_512_v4
+54/280299/campos_512_v4
+54/280303/campos_512_v4
+54/280363/campos_512_v4
+54/280381/campos_512_v4
+54/280467/campos_512_v4
+54/280526/campos_512_v4
+54/280534/campos_512_v4
+54/280557/campos_512_v4
+54/280565/campos_512_v4
+54/280583/campos_512_v4
+54/280717/campos_512_v4
+54/280722/campos_512_v4
+54/280787/campos_512_v4
+54/280805/campos_512_v4
+54/280814/campos_512_v4
+54/280840/campos_512_v4
+54/280973/campos_512_v4
+54/281006/campos_512_v4
+54/281020/campos_512_v4
+54/281034/campos_512_v4
+54/281061/campos_512_v4
+54/281067/campos_512_v4
+54/281080/campos_512_v4
+54/281126/campos_512_v4
+54/281175/campos_512_v4
+54/281212/campos_512_v4
+54/281233/campos_512_v4
+54/281321/campos_512_v4
+54/281375/campos_512_v4
+54/281379/campos_512_v4
+54/281387/campos_512_v4
+54/281460/campos_512_v4
+54/281486/campos_512_v4
+54/281506/campos_512_v4
+54/281516/campos_512_v4
+54/281528/campos_512_v4
+54/281566/campos_512_v4
+54/281581/campos_512_v4
+54/281593/campos_512_v4
+54/281635/campos_512_v4
+54/281664/campos_512_v4
+54/281674/campos_512_v4
+54/281680/campos_512_v4
+54/281682/campos_512_v4
+54/281688/campos_512_v4
+54/281727/campos_512_v4
+54/281766/campos_512_v4
+54/281815/campos_512_v4
+54/281912/campos_512_v4
+54/281966/campos_512_v4
+54/281969/campos_512_v4
+54/282136/campos_512_v4
+54/282142/campos_512_v4
+54/282155/campos_512_v4
+54/282159/campos_512_v4
+54/282197/campos_512_v4
+54/282214/campos_512_v4
+54/282242/campos_512_v4
+54/282324/campos_512_v4
+54/282346/campos_512_v4
+54/282363/campos_512_v4
+54/282381/campos_512_v4
+54/282424/campos_512_v4
+54/282450/campos_512_v4
+54/282530/campos_512_v4
+54/282537/campos_512_v4
+54/282556/campos_512_v4
+54/282575/campos_512_v4
+54/282667/campos_512_v4
+54/282745/campos_512_v4
+54/282746/campos_512_v4
+54/282799/campos_512_v4
+54/282923/campos_512_v4
+54/282926/campos_512_v4
+54/282931/campos_512_v4
+54/283074/campos_512_v4
+54/283087/campos_512_v4
+54/283095/campos_512_v4
+54/283143/campos_512_v4
+54/283166/campos_512_v4
+54/283187/campos_512_v4
+54/283235/campos_512_v4
+54/283243/campos_512_v4
+54/283355/campos_512_v4
+54/283455/campos_512_v4
+54/283479/campos_512_v4
+54/283484/campos_512_v4
+54/283487/campos_512_v4
+54/283514/campos_512_v4
+54/283526/campos_512_v4
+54/283548/campos_512_v4
+54/283587/campos_512_v4
+54/283614/campos_512_v4
+54/283637/campos_512_v4
+54/283674/campos_512_v4
+54/283695/campos_512_v4
+54/283736/campos_512_v4
+54/283743/campos_512_v4
+54/283784/campos_512_v4
+54/283789/campos_512_v4
+54/283807/campos_512_v4
+54/283880/campos_512_v4
+54/283951/campos_512_v4
+54/283990/campos_512_v4
+54/283998/campos_512_v4
+54/284022/campos_512_v4
+54/284134/campos_512_v4
+54/284147/campos_512_v4
+54/284150/campos_512_v4
+54/284176/campos_512_v4
+54/284178/campos_512_v4
+54/284228/campos_512_v4
+54/284239/campos_512_v4
+54/284260/campos_512_v4
+54/284265/campos_512_v4
+54/284289/campos_512_v4
+54/284293/campos_512_v4
+54/284322/campos_512_v4
+54/284387/campos_512_v4
+54/284408/campos_512_v4
+54/284434/campos_512_v4
+54/284443/campos_512_v4
+54/284446/campos_512_v4
+54/284462/campos_512_v4
+54/284489/campos_512_v4
+54/284584/campos_512_v4
+54/284587/campos_512_v4
+54/284595/campos_512_v4
+54/284596/campos_512_v4
+54/284625/campos_512_v4
+54/284629/campos_512_v4
+54/284645/campos_512_v4
+54/284662/campos_512_v4
+54/284711/campos_512_v4
+54/284735/campos_512_v4
+54/284757/campos_512_v4
+54/284785/campos_512_v4
+54/284794/campos_512_v4
+54/284883/campos_512_v4
+54/284890/campos_512_v4
+54/284934/campos_512_v4
+54/284952/campos_512_v4
+54/284961/campos_512_v4
+54/284980/campos_512_v4
+54/284995/campos_512_v4
+55/285006/campos_512_v4
+55/285018/campos_512_v4
+55/285063/campos_512_v4
+55/285064/campos_512_v4
+55/285079/campos_512_v4
+55/285095/campos_512_v4
+55/285101/campos_512_v4
+55/285133/campos_512_v4
+55/285144/campos_512_v4
+55/285154/campos_512_v4
+55/285160/campos_512_v4
+55/285264/campos_512_v4
+55/285422/campos_512_v4
+55/285434/campos_512_v4
+55/285451/campos_512_v4
+55/285469/campos_512_v4
+55/285483/campos_512_v4
+55/285498/campos_512_v4
+55/285512/campos_512_v4
+55/285516/campos_512_v4
+55/285523/campos_512_v4
+55/285526/campos_512_v4
+55/285586/campos_512_v4
+55/285589/campos_512_v4
+55/285592/campos_512_v4
+55/285619/campos_512_v4
+55/285628/campos_512_v4
+55/285648/campos_512_v4
+55/285652/campos_512_v4
+55/285660/campos_512_v4
+55/285699/campos_512_v4
+55/285702/campos_512_v4
+55/285705/campos_512_v4
+55/285732/campos_512_v4
+55/285766/campos_512_v4
+55/285784/campos_512_v4
+55/285803/campos_512_v4
+55/285821/campos_512_v4
+55/285838/campos_512_v4
+55/285889/campos_512_v4
+55/285894/campos_512_v4
+55/285899/campos_512_v4
+55/285902/campos_512_v4
+55/285917/campos_512_v4
+55/285926/campos_512_v4
+55/285947/campos_512_v4
+55/285966/campos_512_v4
+55/285983/campos_512_v4
+55/286053/campos_512_v4
+55/286058/campos_512_v4
+55/286080/campos_512_v4
+55/286185/campos_512_v4
+55/286195/campos_512_v4
+55/286202/campos_512_v4
+55/286216/campos_512_v4
+55/286217/campos_512_v4
+55/286328/campos_512_v4
+55/286339/campos_512_v4
+55/286366/campos_512_v4
+55/286371/campos_512_v4
+55/286385/campos_512_v4
+55/286431/campos_512_v4
+55/286480/campos_512_v4
+55/286514/campos_512_v4
+55/286525/campos_512_v4
+55/286564/campos_512_v4
+55/286571/campos_512_v4
+55/286599/campos_512_v4
+55/286630/campos_512_v4
+55/286637/campos_512_v4
+55/286665/campos_512_v4
+55/286740/campos_512_v4
+55/286748/campos_512_v4
+55/286776/campos_512_v4
+55/286794/campos_512_v4
+55/286801/campos_512_v4
+55/286810/campos_512_v4
+55/286827/campos_512_v4
+55/286919/campos_512_v4
+55/286939/campos_512_v4
+55/286950/campos_512_v4
+55/286965/campos_512_v4
+55/286978/campos_512_v4
+55/287026/campos_512_v4
+55/287063/campos_512_v4
+55/287164/campos_512_v4
+55/287248/campos_512_v4
+55/287279/campos_512_v4
+55/287295/campos_512_v4
+55/287308/campos_512_v4
+55/287422/campos_512_v4
+55/287453/campos_512_v4
+55/287521/campos_512_v4
+55/287598/campos_512_v4
+55/287601/campos_512_v4
+55/287661/campos_512_v4
+55/287703/campos_512_v4
+55/287726/campos_512_v4
+55/287728/campos_512_v4
+55/287747/campos_512_v4
+55/287757/campos_512_v4
+55/287774/campos_512_v4
+55/287779/campos_512_v4
+55/287799/campos_512_v4
+55/287814/campos_512_v4
+55/287819/campos_512_v4
+55/287834/campos_512_v4
+55/287848/campos_512_v4
+55/287879/campos_512_v4
+55/287979/campos_512_v4
+55/287987/campos_512_v4
+55/288015/campos_512_v4
+55/288043/campos_512_v4
+55/288091/campos_512_v4
+55/288117/campos_512_v4
+55/288129/campos_512_v4
+55/288151/campos_512_v4
+55/288159/campos_512_v4
+55/288222/campos_512_v4
+55/288241/campos_512_v4
+55/288255/campos_512_v4
+55/288285/campos_512_v4
+55/288333/campos_512_v4
+55/288357/campos_512_v4
+55/288406/campos_512_v4
+55/288449/campos_512_v4
+55/288523/campos_512_v4
+55/288562/campos_512_v4
+55/288648/campos_512_v4
+55/288721/campos_512_v4
+55/288730/campos_512_v4
+55/288735/campos_512_v4
+55/288780/campos_512_v4
+55/288834/campos_512_v4
+55/288859/campos_512_v4
+55/288870/campos_512_v4
+55/288888/campos_512_v4
+55/288892/campos_512_v4
+55/288893/campos_512_v4
+55/288898/campos_512_v4
+55/288920/campos_512_v4
+55/288986/campos_512_v4
+55/289008/campos_512_v4
+55/289099/campos_512_v4
+55/289127/campos_512_v4
+55/289143/campos_512_v4
+55/289170/campos_512_v4
+55/289181/campos_512_v4
+55/289228/campos_512_v4
+55/289255/campos_512_v4
+55/289258/campos_512_v4
+55/289318/campos_512_v4
+55/289326/campos_512_v4
+55/289359/campos_512_v4
+55/289360/campos_512_v4
+55/289371/campos_512_v4
+55/289372/campos_512_v4
+55/289392/campos_512_v4
+55/289420/campos_512_v4
+55/289429/campos_512_v4
+55/289470/campos_512_v4
+55/289478/campos_512_v4
+55/289500/campos_512_v4
+55/289528/campos_512_v4
+55/289541/campos_512_v4
+55/289563/campos_512_v4
+55/289572/campos_512_v4
+55/289579/campos_512_v4
+55/289624/campos_512_v4
+55/289686/campos_512_v4
+55/289692/campos_512_v4
+55/289727/campos_512_v4
+55/289824/campos_512_v4
+55/289835/campos_512_v4
+55/289837/campos_512_v4
+55/289846/campos_512_v4
+55/289860/campos_512_v4
+55/289879/campos_512_v4
+55/289884/campos_512_v4
+55/289899/campos_512_v4
+55/289912/campos_512_v4
+55/289930/campos_512_v4
+55/289936/campos_512_v4
+55/289939/campos_512_v4
+55/289957/campos_512_v4
+55/289966/campos_512_v4
+55/289969/campos_512_v4
+56/290018/campos_512_v4
+56/290035/campos_512_v4
+56/290089/campos_512_v4
+56/290107/campos_512_v4
+56/290117/campos_512_v4
+56/290190/campos_512_v4
+56/290228/campos_512_v4
+56/290257/campos_512_v4
+56/290278/campos_512_v4
+56/290331/campos_512_v4
+56/290376/campos_512_v4
+56/290465/campos_512_v4
+56/290501/campos_512_v4
+56/290545/campos_512_v4
+56/290577/campos_512_v4
+56/290597/campos_512_v4
+56/290598/campos_512_v4
+56/290602/campos_512_v4
+56/290622/campos_512_v4
+56/290630/campos_512_v4
+56/290641/campos_512_v4
+56/290655/campos_512_v4
+56/290659/campos_512_v4
+56/290662/campos_512_v4
+56/290670/campos_512_v4
+56/290671/campos_512_v4
+56/290729/campos_512_v4
+56/290730/campos_512_v4
+56/290745/campos_512_v4
+56/290779/campos_512_v4
+56/290797/campos_512_v4
+56/290805/campos_512_v4
+56/290820/campos_512_v4
+56/290860/campos_512_v4
+56/290894/campos_512_v4
+56/290969/campos_512_v4
+56/290975/campos_512_v4
+56/290991/campos_512_v4
+56/291029/campos_512_v4
+56/291030/campos_512_v4
+56/291032/campos_512_v4
+56/291057/campos_512_v4
+56/291088/campos_512_v4
+56/291101/campos_512_v4
+56/291118/campos_512_v4
+56/291128/campos_512_v4
+56/291154/campos_512_v4
+56/291163/campos_512_v4
+56/291278/campos_512_v4
+56/291321/campos_512_v4
+56/291358/campos_512_v4
+56/291396/campos_512_v4
+56/291424/campos_512_v4
+56/291440/campos_512_v4
+56/291464/campos_512_v4
+56/291480/campos_512_v4
+56/291486/campos_512_v4
+56/291509/campos_512_v4
+56/291573/campos_512_v4
+56/291596/campos_512_v4
+56/291624/campos_512_v4
+56/291667/campos_512_v4
+56/291672/campos_512_v4
+56/291678/campos_512_v4
+56/291804/campos_512_v4
+56/291813/campos_512_v4
+56/291851/campos_512_v4
+56/291853/campos_512_v4
+56/291928/campos_512_v4
+56/291930/campos_512_v4
+56/291982/campos_512_v4
+56/292019/campos_512_v4
+56/292065/campos_512_v4
+56/292081/campos_512_v4
+56/292085/campos_512_v4
+56/292097/campos_512_v4
+56/292180/campos_512_v4
+56/292184/campos_512_v4
+56/292206/campos_512_v4
+56/292210/campos_512_v4
+56/292226/campos_512_v4
+56/292287/campos_512_v4
+56/292296/campos_512_v4
+56/292299/campos_512_v4
+56/292302/campos_512_v4
+56/292305/campos_512_v4
+56/292329/campos_512_v4
+56/292366/campos_512_v4
+56/292418/campos_512_v4
+56/292424/campos_512_v4
+56/292432/campos_512_v4
+56/292450/campos_512_v4
+56/292517/campos_512_v4
+56/292518/campos_512_v4
+56/292551/campos_512_v4
+56/292574/campos_512_v4
+56/292575/campos_512_v4
+56/292589/campos_512_v4
+56/292634/campos_512_v4
+56/292673/campos_512_v4
+56/292679/campos_512_v4
+56/292806/campos_512_v4
+56/292829/campos_512_v4
+56/292867/campos_512_v4
+56/292907/campos_512_v4
+56/292930/campos_512_v4
+56/292963/campos_512_v4
+56/292998/campos_512_v4
+56/293009/campos_512_v4
+56/293026/campos_512_v4
+56/293058/campos_512_v4
+56/293078/campos_512_v4
+56/293115/campos_512_v4
+56/293144/campos_512_v4
+56/293201/campos_512_v4
+56/293242/campos_512_v4
+56/293273/campos_512_v4
+56/293276/campos_512_v4
+56/293306/campos_512_v4
+56/293316/campos_512_v4
+56/293348/campos_512_v4
+56/293400/campos_512_v4
+56/293434/campos_512_v4
+56/293436/campos_512_v4
+56/293474/campos_512_v4
+56/293536/campos_512_v4
+56/293555/campos_512_v4
+56/293558/campos_512_v4
+56/293576/campos_512_v4
+56/293582/campos_512_v4
+56/293635/campos_512_v4
+56/293636/campos_512_v4
+56/293675/campos_512_v4
+56/293680/campos_512_v4
+56/293691/campos_512_v4
+56/293714/campos_512_v4
+56/293729/campos_512_v4
+56/293774/campos_512_v4
+56/293790/campos_512_v4
+56/293846/campos_512_v4
+56/293916/campos_512_v4
+56/294025/campos_512_v4
+56/294180/campos_512_v4
+56/294233/campos_512_v4
+56/294261/campos_512_v4
+56/294352/campos_512_v4
+56/294357/campos_512_v4
+56/294424/campos_512_v4
+56/294479/campos_512_v4
+56/294513/campos_512_v4
+56/294604/campos_512_v4
+56/294661/campos_512_v4
+56/294680/campos_512_v4
+56/294685/campos_512_v4
+56/294721/campos_512_v4
+56/294754/campos_512_v4
+56/294801/campos_512_v4
+56/294839/campos_512_v4
+56/294959/campos_512_v4
+57/295088/campos_512_v4
+57/295089/campos_512_v4
+57/295124/campos_512_v4
+57/295159/campos_512_v4
+57/295160/campos_512_v4
+57/295168/campos_512_v4
+57/295184/campos_512_v4
+57/295425/campos_512_v4
+57/295446/campos_512_v4
+57/295476/campos_512_v4
+57/295533/campos_512_v4
+57/295547/campos_512_v4
+57/295556/campos_512_v4
+57/295602/campos_512_v4
+57/295610/campos_512_v4
+57/295652/campos_512_v4
+57/295680/campos_512_v4
+57/295684/campos_512_v4
+57/295721/campos_512_v4
+57/295766/campos_512_v4
+57/295861/campos_512_v4
+57/295871/campos_512_v4
+57/295921/campos_512_v4
+57/295963/campos_512_v4
+57/296003/campos_512_v4
+57/296023/campos_512_v4
+57/296036/campos_512_v4
+57/296095/campos_512_v4
+57/296153/campos_512_v4
+57/296202/campos_512_v4
+57/296238/campos_512_v4
+57/296247/campos_512_v4
+57/296267/campos_512_v4
+57/296285/campos_512_v4
+57/296302/campos_512_v4
+57/296347/campos_512_v4
+57/296361/campos_512_v4
+57/296523/campos_512_v4
+57/296571/campos_512_v4
+57/296601/campos_512_v4
+57/296608/campos_512_v4
+57/296610/campos_512_v4
+57/296614/campos_512_v4
+57/296634/campos_512_v4
+57/296678/campos_512_v4
+57/296729/campos_512_v4
+57/296748/campos_512_v4
+57/296897/campos_512_v4
+57/296909/campos_512_v4
+57/296951/campos_512_v4
+57/296954/campos_512_v4
+57/296956/campos_512_v4
+57/296987/campos_512_v4
+57/297067/campos_512_v4
+57/297114/campos_512_v4
+57/297132/campos_512_v4
+57/297326/campos_512_v4
+57/297332/campos_512_v4
+57/297352/campos_512_v4
+57/297385/campos_512_v4
+57/297423/campos_512_v4
+57/297510/campos_512_v4
+57/297567/campos_512_v4
+57/297573/campos_512_v4
+57/297582/campos_512_v4
+57/297584/campos_512_v4
+57/297634/campos_512_v4
+57/297700/campos_512_v4
+57/297708/campos_512_v4
+57/297711/campos_512_v4
+57/297725/campos_512_v4
+57/297741/campos_512_v4
+57/297751/campos_512_v4
+57/297794/campos_512_v4
+57/297807/campos_512_v4
+57/297851/campos_512_v4
+57/297875/campos_512_v4
+57/297887/campos_512_v4
+57/297893/campos_512_v4
+57/297903/campos_512_v4
+57/297916/campos_512_v4
+57/297917/campos_512_v4
+57/297942/campos_512_v4
+57/297950/campos_512_v4
+57/297957/campos_512_v4
+57/297993/campos_512_v4
+57/298017/campos_512_v4
+57/298029/campos_512_v4
+57/298124/campos_512_v4
+57/298144/campos_512_v4
+57/298177/campos_512_v4
+57/298192/campos_512_v4
+57/298218/campos_512_v4
+57/298248/campos_512_v4
+57/298262/campos_512_v4
+57/298264/campos_512_v4
+57/298300/campos_512_v4
+57/298318/campos_512_v4
+57/298384/campos_512_v4
+57/298403/campos_512_v4
+57/298407/campos_512_v4
+57/298409/campos_512_v4
+57/298526/campos_512_v4
+57/298546/campos_512_v4
+57/298550/campos_512_v4
+57/298552/campos_512_v4
+57/298563/campos_512_v4
+57/298566/campos_512_v4
+57/298571/campos_512_v4
+57/298575/campos_512_v4
+57/298610/campos_512_v4
+57/298625/campos_512_v4
+57/298647/campos_512_v4
+57/298764/campos_512_v4
+57/298802/campos_512_v4
+57/298851/campos_512_v4
+57/298907/campos_512_v4
+57/298933/campos_512_v4
+57/298951/campos_512_v4
+57/298954/campos_512_v4
+57/298978/campos_512_v4
+57/299041/campos_512_v4
+57/299152/campos_512_v4
+57/299191/campos_512_v4
+57/299192/campos_512_v4
+57/299212/campos_512_v4
+57/299241/campos_512_v4
+57/299260/campos_512_v4
+57/299268/campos_512_v4
+57/299308/campos_512_v4
+57/299326/campos_512_v4
+57/299330/campos_512_v4
+57/299419/campos_512_v4
+57/299420/campos_512_v4
+57/299438/campos_512_v4
+57/299463/campos_512_v4
+57/299480/campos_512_v4
+57/299485/campos_512_v4
+57/299538/campos_512_v4
+57/299548/campos_512_v4
+57/299549/campos_512_v4
+57/299572/campos_512_v4
+57/299638/campos_512_v4
+57/299639/campos_512_v4
+57/299647/campos_512_v4
+57/299658/campos_512_v4
+57/299667/campos_512_v4
+57/299671/campos_512_v4
+57/299733/campos_512_v4
+57/299751/campos_512_v4
+57/299756/campos_512_v4
+57/299762/campos_512_v4
+57/299798/campos_512_v4
+57/299818/campos_512_v4
+57/299833/campos_512_v4
+57/299855/campos_512_v4
+57/299861/campos_512_v4
+57/299869/campos_512_v4
+57/299916/campos_512_v4
+57/299918/campos_512_v4
+57/299972/campos_512_v4
+58/300053/campos_512_v4
+58/300089/campos_512_v4
+58/300148/campos_512_v4
+58/300153/campos_512_v4
+58/300166/campos_512_v4
+58/300187/campos_512_v4
+58/300228/campos_512_v4
+58/300330/campos_512_v4
+58/300395/campos_512_v4
+58/300432/campos_512_v4
+58/300490/campos_512_v4
+58/300503/campos_512_v4
+58/300504/campos_512_v4
+58/300529/campos_512_v4
+58/300544/campos_512_v4
+58/300563/campos_512_v4
+58/300564/campos_512_v4
+58/300585/campos_512_v4
+58/300625/campos_512_v4
+58/300646/campos_512_v4
+58/300691/campos_512_v4
+58/300738/campos_512_v4
+58/300794/campos_512_v4
+58/300803/campos_512_v4
+58/300828/campos_512_v4
+58/300848/campos_512_v4
+58/300894/campos_512_v4
+58/300934/campos_512_v4
+58/300978/campos_512_v4
+58/301015/campos_512_v4
+58/301021/campos_512_v4
+58/301045/campos_512_v4
+58/301062/campos_512_v4
+58/301171/campos_512_v4
+58/301190/campos_512_v4
+58/301199/campos_512_v4
+58/301275/campos_512_v4
+58/301329/campos_512_v4
+58/301406/campos_512_v4
+58/301412/campos_512_v4
+58/301424/campos_512_v4
+58/301518/campos_512_v4
+58/301558/campos_512_v4
+58/301561/campos_512_v4
+58/301564/campos_512_v4
+58/301579/campos_512_v4
+58/301592/campos_512_v4
+58/301627/campos_512_v4
+58/301683/campos_512_v4
+58/301700/campos_512_v4
+58/301703/campos_512_v4
+58/301723/campos_512_v4
+58/301772/campos_512_v4
+58/301889/campos_512_v4
+58/301894/campos_512_v4
+58/301902/campos_512_v4
+58/301904/campos_512_v4
+58/301988/campos_512_v4
+58/302026/campos_512_v4
+58/302116/campos_512_v4
+58/302161/campos_512_v4
+58/302172/campos_512_v4
+58/302280/campos_512_v4
+58/302293/campos_512_v4
+58/302299/campos_512_v4
+58/302305/campos_512_v4
+58/302314/campos_512_v4
+58/302325/campos_512_v4
+58/302360/campos_512_v4
+58/302366/campos_512_v4
+58/302370/campos_512_v4
+58/302404/campos_512_v4
+58/302442/campos_512_v4
+58/302452/campos_512_v4
+58/302569/campos_512_v4
+58/302600/campos_512_v4
+58/302620/campos_512_v4
+58/302715/campos_512_v4
+58/302735/campos_512_v4
+58/302771/campos_512_v4
+58/302834/campos_512_v4
+58/302838/campos_512_v4
+58/302843/campos_512_v4
+58/302849/campos_512_v4
+58/302878/campos_512_v4
+58/302903/campos_512_v4
+58/302942/campos_512_v4
+58/302956/campos_512_v4
+58/302968/campos_512_v4
+58/302969/campos_512_v4
+58/302994/campos_512_v4
+58/303107/campos_512_v4
+58/303117/campos_512_v4
+58/303215/campos_512_v4
+58/303227/campos_512_v4
+58/303262/campos_512_v4
+58/303285/campos_512_v4
+58/303321/campos_512_v4
+58/303405/campos_512_v4
+58/303437/campos_512_v4
+58/303493/campos_512_v4
+58/303501/campos_512_v4
+58/303532/campos_512_v4
+58/303537/campos_512_v4
+58/303569/campos_512_v4
+58/303572/campos_512_v4
+58/303586/campos_512_v4
+58/303611/campos_512_v4
+58/303630/campos_512_v4
+58/303638/campos_512_v4
+58/303639/campos_512_v4
+58/303669/campos_512_v4
+58/303688/campos_512_v4
+58/303713/campos_512_v4
+58/303742/campos_512_v4
+58/303770/campos_512_v4
+58/303778/campos_512_v4
+58/303908/campos_512_v4
+58/303924/campos_512_v4
+58/303997/campos_512_v4
+58/304002/campos_512_v4
+58/304026/campos_512_v4
+58/304050/campos_512_v4
+58/304113/campos_512_v4
+58/304121/campos_512_v4
+58/304174/campos_512_v4
+58/304186/campos_512_v4
+58/304219/campos_512_v4
+58/304221/campos_512_v4
+58/304232/campos_512_v4
+58/304248/campos_512_v4
+58/304257/campos_512_v4
+58/304264/campos_512_v4
+58/304272/campos_512_v4
+58/304276/campos_512_v4
+58/304287/campos_512_v4
+58/304295/campos_512_v4
+58/304336/campos_512_v4
+58/304345/campos_512_v4
+58/304366/campos_512_v4
+58/304392/campos_512_v4
+58/304419/campos_512_v4
+58/304428/campos_512_v4
+58/304500/campos_512_v4
+58/304507/campos_512_v4
+58/304512/campos_512_v4
+58/304519/campos_512_v4
+58/304537/campos_512_v4
+58/304585/campos_512_v4
+58/304638/campos_512_v4
+58/304760/campos_512_v4
+58/304773/campos_512_v4
+58/304797/campos_512_v4
+58/304801/campos_512_v4
+58/304859/campos_512_v4
+58/304913/campos_512_v4
+58/304933/campos_512_v4
+58/304941/campos_512_v4
+58/304980/campos_512_v4
+58/304990/campos_512_v4
+58/304991/campos_512_v4
+59/305016/campos_512_v4
+59/305106/campos_512_v4
+59/305112/campos_512_v4
+59/305135/campos_512_v4
+59/305146/campos_512_v4
+59/305248/campos_512_v4
+59/305325/campos_512_v4
+59/305345/campos_512_v4
+59/305384/campos_512_v4
+59/305389/campos_512_v4
+59/305415/campos_512_v4
+59/305436/campos_512_v4
+59/305440/campos_512_v4
+59/305447/campos_512_v4
+59/305483/campos_512_v4
+59/305484/campos_512_v4
+59/305489/campos_512_v4
+59/305490/campos_512_v4
+59/305584/campos_512_v4
+59/305650/campos_512_v4
+59/305688/campos_512_v4
+59/305691/campos_512_v4
+59/305694/campos_512_v4
+59/305709/campos_512_v4
+59/305716/campos_512_v4
+59/305728/campos_512_v4
+59/305879/campos_512_v4
+59/305885/campos_512_v4
+59/305926/campos_512_v4
+59/305955/campos_512_v4
+59/305967/campos_512_v4
+59/306080/campos_512_v4
+59/306113/campos_512_v4
+59/306133/campos_512_v4
+59/306134/campos_512_v4
+59/306139/campos_512_v4
+59/306193/campos_512_v4
+59/306305/campos_512_v4
+59/306311/campos_512_v4
+59/306351/campos_512_v4
+59/306378/campos_512_v4
+59/306382/campos_512_v4
+59/306397/campos_512_v4
+59/306463/campos_512_v4
+59/306491/campos_512_v4
+59/306502/campos_512_v4
+59/306506/campos_512_v4
+59/306539/campos_512_v4
+59/306542/campos_512_v4
+59/306551/campos_512_v4
+59/306566/campos_512_v4
+59/306622/campos_512_v4
+59/306658/campos_512_v4
+59/306664/campos_512_v4
+59/306691/campos_512_v4
+59/306701/campos_512_v4
+59/306798/campos_512_v4
+59/306823/campos_512_v4
+59/306830/campos_512_v4
+59/306870/campos_512_v4
+59/306938/campos_512_v4
+59/306985/campos_512_v4
+59/306999/campos_512_v4
+59/307007/campos_512_v4
+59/307009/campos_512_v4
+59/307020/campos_512_v4
+59/307034/campos_512_v4
+59/307072/campos_512_v4
+59/307146/campos_512_v4
+59/307151/campos_512_v4
+59/307172/campos_512_v4
+59/307176/campos_512_v4
+59/307210/campos_512_v4
+59/307217/campos_512_v4
+59/307229/campos_512_v4
+59/307265/campos_512_v4
+59/307321/campos_512_v4
+59/307373/campos_512_v4
+59/307388/campos_512_v4
+59/307400/campos_512_v4
+59/307490/campos_512_v4
+59/307518/campos_512_v4
+59/307604/campos_512_v4
+59/307609/campos_512_v4
+59/307622/campos_512_v4
+59/307635/campos_512_v4
+59/307682/campos_512_v4
+59/307693/campos_512_v4
+59/307699/campos_512_v4
+59/307715/campos_512_v4
+59/307723/campos_512_v4
+59/307730/campos_512_v4
+59/307732/campos_512_v4
+59/307784/campos_512_v4
+59/307805/campos_512_v4
+59/307835/campos_512_v4
+59/307855/campos_512_v4
+59/307879/campos_512_v4
+59/307884/campos_512_v4
+59/307885/campos_512_v4
+59/307919/campos_512_v4
+59/307925/campos_512_v4
+59/307929/campos_512_v4
+59/307972/campos_512_v4
+59/308042/campos_512_v4
+59/308082/campos_512_v4
+59/308120/campos_512_v4
+59/308126/campos_512_v4
+59/308210/campos_512_v4
+59/308211/campos_512_v4
+59/308253/campos_512_v4
+59/308255/campos_512_v4
+59/308258/campos_512_v4
+59/308284/campos_512_v4
+59/308298/campos_512_v4
+59/308309/campos_512_v4
+59/308310/campos_512_v4
+59/308333/campos_512_v4
+59/308378/campos_512_v4
+59/308392/campos_512_v4
+59/308394/campos_512_v4
+59/308431/campos_512_v4
+59/308455/campos_512_v4
+59/308468/campos_512_v4
+59/308496/campos_512_v4
+59/308518/campos_512_v4
+59/308539/campos_512_v4
+59/308547/campos_512_v4
+59/308555/campos_512_v4
+59/308665/campos_512_v4
+59/308769/campos_512_v4
+59/308802/campos_512_v4
+59/308813/campos_512_v4
+59/308869/campos_512_v4
+59/308871/campos_512_v4
+59/308952/campos_512_v4
+59/308955/campos_512_v4
+59/308985/campos_512_v4
+59/309006/campos_512_v4
+59/309110/campos_512_v4
+59/309131/campos_512_v4
+59/309171/campos_512_v4
+59/309264/campos_512_v4
+59/309275/campos_512_v4
+59/309286/campos_512_v4
+59/309297/campos_512_v4
+59/309311/campos_512_v4
+59/309324/campos_512_v4
+59/309345/campos_512_v4
+59/309355/campos_512_v4
+59/309357/campos_512_v4
+59/309362/campos_512_v4
+59/309379/campos_512_v4
+59/309387/campos_512_v4
+59/309445/campos_512_v4
+59/309449/campos_512_v4
+59/309497/campos_512_v4
+59/309544/campos_512_v4
+59/309572/campos_512_v4
+59/309633/campos_512_v4
+59/309654/campos_512_v4
+59/309660/campos_512_v4
+59/309672/campos_512_v4
+59/309680/campos_512_v4
+59/309696/campos_512_v4
+59/309715/campos_512_v4
+59/309735/campos_512_v4
+59/309753/campos_512_v4
+59/309758/campos_512_v4
+59/309761/campos_512_v4
+59/309810/campos_512_v4
+59/309816/campos_512_v4
+59/309853/campos_512_v4
+59/309864/campos_512_v4
+59/309894/campos_512_v4
+59/309920/campos_512_v4
+59/309977/campos_512_v4
+59/309992/campos_512_v4
+6/40069/campos_512_v4
+6/40078/campos_512_v4
+6/40143/campos_512_v4
+6/40173/campos_512_v4
+6/40181/campos_512_v4
+6/40190/campos_512_v4
+6/40222/campos_512_v4
+6/40258/campos_512_v4
+6/40302/campos_512_v4
+6/40308/campos_512_v4
+6/40336/campos_512_v4
+6/40346/campos_512_v4
+6/40366/campos_512_v4
+6/40386/campos_512_v4
+6/40387/campos_512_v4
+6/40409/campos_512_v4
+6/40413/campos_512_v4
+6/40465/campos_512_v4
+6/40512/campos_512_v4
+6/40515/campos_512_v4
+6/40537/campos_512_v4
+6/40579/campos_512_v4
+6/40589/campos_512_v4
+6/40645/campos_512_v4
+6/40660/campos_512_v4
+6/40681/campos_512_v4
+6/40698/campos_512_v4
+6/40699/campos_512_v4
+6/40724/campos_512_v4
+6/40753/campos_512_v4
+6/40770/campos_512_v4
+6/40771/campos_512_v4
+6/40776/campos_512_v4
+6/40806/campos_512_v4
+6/40832/campos_512_v4
+6/40843/campos_512_v4
+6/40844/campos_512_v4
+6/40846/campos_512_v4
+6/40913/campos_512_v4
+6/40955/campos_512_v4
+6/41060/campos_512_v4
+6/41076/campos_512_v4
+6/41084/campos_512_v4
+6/41121/campos_512_v4
+6/41123/campos_512_v4
+6/41157/campos_512_v4
+6/41170/campos_512_v4
+6/41196/campos_512_v4
+6/41201/campos_512_v4
+6/41261/campos_512_v4
+6/41299/campos_512_v4
+6/41370/campos_512_v4
+6/41465/campos_512_v4
+6/41488/campos_512_v4
+6/41489/campos_512_v4
+6/41524/campos_512_v4
+6/41542/campos_512_v4
+6/41545/campos_512_v4
+6/41554/campos_512_v4
+6/41573/campos_512_v4
+6/41585/campos_512_v4
+6/41604/campos_512_v4
+6/41608/campos_512_v4
+6/41627/campos_512_v4
+6/41652/campos_512_v4
+6/41653/campos_512_v4
+6/41679/campos_512_v4
+6/41738/campos_512_v4
+6/41758/campos_512_v4
+6/41773/campos_512_v4
+6/41780/campos_512_v4
+6/41794/campos_512_v4
+6/41806/campos_512_v4
+6/41854/campos_512_v4
+6/41882/campos_512_v4
+6/41906/campos_512_v4
+6/41918/campos_512_v4
+6/41919/campos_512_v4
+6/41929/campos_512_v4
+6/41934/campos_512_v4
+6/41937/campos_512_v4
+6/41941/campos_512_v4
+6/41948/campos_512_v4
+6/41988/campos_512_v4
+6/42001/campos_512_v4
+6/42018/campos_512_v4
+6/42057/campos_512_v4
+6/42072/campos_512_v4
+6/42094/campos_512_v4
+6/42096/campos_512_v4
+6/42100/campos_512_v4
+6/42102/campos_512_v4
+6/42176/campos_512_v4
+6/42208/campos_512_v4
+6/42214/campos_512_v4
+6/42230/campos_512_v4
+6/42273/campos_512_v4
+6/42276/campos_512_v4
+6/42277/campos_512_v4
+6/42280/campos_512_v4
+6/42293/campos_512_v4
+6/42294/campos_512_v4
+6/42322/campos_512_v4
+6/42326/campos_512_v4
+6/42330/campos_512_v4
+6/42346/campos_512_v4
+6/42391/campos_512_v4
+6/42452/campos_512_v4
+6/42456/campos_512_v4
+6/42465/campos_512_v4
+6/42561/campos_512_v4
+6/42606/campos_512_v4
+6/42653/campos_512_v4
+6/42681/campos_512_v4
+6/42685/campos_512_v4
+6/42722/campos_512_v4
+6/42737/campos_512_v4
+6/42757/campos_512_v4
+6/42759/campos_512_v4
+6/42812/campos_512_v4
+6/42813/campos_512_v4
+6/42879/campos_512_v4
+6/42886/campos_512_v4
+6/42897/campos_512_v4
+6/42939/campos_512_v4
+6/42977/campos_512_v4
+6/43039/campos_512_v4
+6/43052/campos_512_v4
+6/43092/campos_512_v4
+6/43300/campos_512_v4
+6/43383/campos_512_v4
+6/43390/campos_512_v4
+6/43428/campos_512_v4
+6/43434/campos_512_v4
+6/43446/campos_512_v4
+6/43512/campos_512_v4
+6/43536/campos_512_v4
+6/43544/campos_512_v4
+6/43559/campos_512_v4
+6/43592/campos_512_v4
+6/43602/campos_512_v4
+6/43644/campos_512_v4
+6/43674/campos_512_v4
+6/43675/campos_512_v4
+6/43726/campos_512_v4
+6/43750/campos_512_v4
+6/43796/campos_512_v4
+6/43826/campos_512_v4
+6/43856/campos_512_v4
+6/43875/campos_512_v4
+6/43940/campos_512_v4
+6/43968/campos_512_v4
+6/44029/campos_512_v4
+6/44068/campos_512_v4
+6/44091/campos_512_v4
+6/44111/campos_512_v4
+6/44144/campos_512_v4
+6/44187/campos_512_v4
+6/44197/campos_512_v4
+6/44199/campos_512_v4
+6/44231/campos_512_v4
+6/44235/campos_512_v4
+6/44238/campos_512_v4
+6/44242/campos_512_v4
+6/44267/campos_512_v4
+6/44305/campos_512_v4
+6/44338/campos_512_v4
+6/44339/campos_512_v4
+6/44342/campos_512_v4
+6/44377/campos_512_v4
+6/44392/campos_512_v4
+6/44394/campos_512_v4
+6/44396/campos_512_v4
+6/44401/campos_512_v4
+6/44402/campos_512_v4
+6/44433/campos_512_v4
+6/44437/campos_512_v4
+6/44451/campos_512_v4
+6/44470/campos_512_v4
+6/44503/campos_512_v4
+6/44521/campos_512_v4
+6/44578/campos_512_v4
+6/44626/campos_512_v4
+6/44636/campos_512_v4
+6/44651/campos_512_v4
+6/44684/campos_512_v4
+6/44710/campos_512_v4
+6/44726/campos_512_v4
+6/44729/campos_512_v4
+6/44779/campos_512_v4
+6/44783/campos_512_v4
+6/44808/campos_512_v4
+6/44818/campos_512_v4
+6/44834/campos_512_v4
+6/44861/campos_512_v4
+6/44872/campos_512_v4
+6/44940/campos_512_v4
+60/310027/campos_512_v4
+60/310037/campos_512_v4
+60/310067/campos_512_v4
+60/310121/campos_512_v4
+60/310139/campos_512_v4
+60/310174/campos_512_v4
+60/310189/campos_512_v4
+60/310209/campos_512_v4
+60/310231/campos_512_v4
+60/310233/campos_512_v4
+60/310237/campos_512_v4
+60/310350/campos_512_v4
+60/310482/campos_512_v4
+60/310486/campos_512_v4
+60/310487/campos_512_v4
+60/310492/campos_512_v4
+60/310499/campos_512_v4
+60/310509/campos_512_v4
+60/310516/campos_512_v4
+60/310525/campos_512_v4
+60/310535/campos_512_v4
+60/310540/campos_512_v4
+60/310560/campos_512_v4
+60/310591/campos_512_v4
+60/310607/campos_512_v4
+60/310618/campos_512_v4
+60/310645/campos_512_v4
+60/310701/campos_512_v4
+60/310746/campos_512_v4
+60/310755/campos_512_v4
+60/310771/campos_512_v4
+60/310839/campos_512_v4
+60/310877/campos_512_v4
+60/310902/campos_512_v4
+60/310914/campos_512_v4
+60/310926/campos_512_v4
+60/310978/campos_512_v4
+60/311002/campos_512_v4
+60/311013/campos_512_v4
+60/311036/campos_512_v4
+60/311050/campos_512_v4
+60/311068/campos_512_v4
+60/311116/campos_512_v4
+60/311121/campos_512_v4
+60/311164/campos_512_v4
+60/311173/campos_512_v4
+60/311287/campos_512_v4
+60/311308/campos_512_v4
+60/311324/campos_512_v4
+60/311361/campos_512_v4
+60/311385/campos_512_v4
+60/311492/campos_512_v4
+60/311642/campos_512_v4
+60/311693/campos_512_v4
+60/311713/campos_512_v4
+60/311717/campos_512_v4
+60/311758/campos_512_v4
+60/311772/campos_512_v4
+60/311781/campos_512_v4
+60/311792/campos_512_v4
+60/311811/campos_512_v4
+60/311812/campos_512_v4
+60/311826/campos_512_v4
+60/311841/campos_512_v4
+60/311870/campos_512_v4
+60/311896/campos_512_v4
+60/311951/campos_512_v4
+60/311953/campos_512_v4
+60/311966/campos_512_v4
+60/311980/campos_512_v4
+60/311989/campos_512_v4
+60/312019/campos_512_v4
+60/312038/campos_512_v4
+60/312064/campos_512_v4
+60/312098/campos_512_v4
+60/312104/campos_512_v4
+60/312173/campos_512_v4
+60/312206/campos_512_v4
+60/312209/campos_512_v4
+60/312225/campos_512_v4
+60/312239/campos_512_v4
+60/312252/campos_512_v4
+60/312259/campos_512_v4
+60/312262/campos_512_v4
+60/312329/campos_512_v4
+60/312333/campos_512_v4
+60/312401/campos_512_v4
+60/312439/campos_512_v4
+60/312449/campos_512_v4
+60/312485/campos_512_v4
+60/312522/campos_512_v4
+60/312551/campos_512_v4
+60/312605/campos_512_v4
+60/312625/campos_512_v4
+60/312659/campos_512_v4
+60/312669/campos_512_v4
+60/312725/campos_512_v4
+60/312731/campos_512_v4
+60/312756/campos_512_v4
+60/312818/campos_512_v4
+60/312883/campos_512_v4
+60/312884/campos_512_v4
+60/312888/campos_512_v4
+60/312899/campos_512_v4
+60/312910/campos_512_v4
+60/312913/campos_512_v4
+60/312930/campos_512_v4
+60/312999/campos_512_v4
+60/313038/campos_512_v4
+60/313218/campos_512_v4
+60/313302/campos_512_v4
+60/313312/campos_512_v4
+60/313331/campos_512_v4
+60/313332/campos_512_v4
+60/313408/campos_512_v4
+60/313451/campos_512_v4
+60/313454/campos_512_v4
+60/313533/campos_512_v4
+60/313581/campos_512_v4
+60/313610/campos_512_v4
+60/313647/campos_512_v4
+60/313657/campos_512_v4
+60/313683/campos_512_v4
+60/313702/campos_512_v4
+60/313703/campos_512_v4
+60/313724/campos_512_v4
+60/313749/campos_512_v4
+60/313840/campos_512_v4
+60/313877/campos_512_v4
+60/313886/campos_512_v4
+60/313908/campos_512_v4
+60/313925/campos_512_v4
+60/313930/campos_512_v4
+60/313936/campos_512_v4
+60/314005/campos_512_v4
+60/314021/campos_512_v4
+60/314071/campos_512_v4
+60/314098/campos_512_v4
+60/314112/campos_512_v4
+60/314207/campos_512_v4
+60/314212/campos_512_v4
+60/314214/campos_512_v4
+60/314216/campos_512_v4
+60/314218/campos_512_v4
+60/314228/campos_512_v4
+60/314248/campos_512_v4
+60/314253/campos_512_v4
+60/314273/campos_512_v4
+60/314325/campos_512_v4
+60/314331/campos_512_v4
+60/314336/campos_512_v4
+60/314346/campos_512_v4
+60/314360/campos_512_v4
+60/314389/campos_512_v4
+60/314480/campos_512_v4
+60/314503/campos_512_v4
+60/314510/campos_512_v4
+60/314553/campos_512_v4
+60/314588/campos_512_v4
+60/314592/campos_512_v4
+60/314669/campos_512_v4
+60/314687/campos_512_v4
+60/314714/campos_512_v4
+60/314758/campos_512_v4
+60/314768/campos_512_v4
+60/314771/campos_512_v4
+60/314787/campos_512_v4
+60/314823/campos_512_v4
+60/314870/campos_512_v4
+60/314873/campos_512_v4
+60/314890/campos_512_v4
+60/315000/campos_512_v4
+61/315040/campos_512_v4
+61/315066/campos_512_v4
+61/315073/campos_512_v4
+61/315074/campos_512_v4
+61/315098/campos_512_v4
+61/315125/campos_512_v4
+61/315161/campos_512_v4
+61/315173/campos_512_v4
+61/315175/campos_512_v4
+61/315181/campos_512_v4
+61/315187/campos_512_v4
+61/315259/campos_512_v4
+61/315343/campos_512_v4
+61/315407/campos_512_v4
+61/315436/campos_512_v4
+61/315442/campos_512_v4
+61/315469/campos_512_v4
+61/315504/campos_512_v4
+61/315556/campos_512_v4
+61/315583/campos_512_v4
+61/315587/campos_512_v4
+61/315608/campos_512_v4
+61/315621/campos_512_v4
+61/315632/campos_512_v4
+61/315675/campos_512_v4
+61/315679/campos_512_v4
+61/315696/campos_512_v4
+61/315720/campos_512_v4
+61/315765/campos_512_v4
+61/315768/campos_512_v4
+61/315835/campos_512_v4
+61/315842/campos_512_v4
+61/315854/campos_512_v4
+61/315870/campos_512_v4
+61/315883/campos_512_v4
+61/315896/campos_512_v4
+61/315970/campos_512_v4
+61/316042/campos_512_v4
+61/316102/campos_512_v4
+61/316192/campos_512_v4
+61/316280/campos_512_v4
+61/316293/campos_512_v4
+61/316302/campos_512_v4
+61/316415/campos_512_v4
+61/316455/campos_512_v4
+61/316491/campos_512_v4
+61/316512/campos_512_v4
+61/316549/campos_512_v4
+61/316568/campos_512_v4
+61/316576/campos_512_v4
+61/316594/campos_512_v4
+61/316602/campos_512_v4
+61/316662/campos_512_v4
+61/316684/campos_512_v4
+61/316714/campos_512_v4
+61/316826/campos_512_v4
+61/316878/campos_512_v4
+61/316911/campos_512_v4
+61/316936/campos_512_v4
+61/316958/campos_512_v4
+61/316989/campos_512_v4
+61/316994/campos_512_v4
+61/317002/campos_512_v4
+61/317007/campos_512_v4
+61/317020/campos_512_v4
+61/317024/campos_512_v4
+61/317039/campos_512_v4
+61/317109/campos_512_v4
+61/317221/campos_512_v4
+61/317231/campos_512_v4
+61/317249/campos_512_v4
+61/317269/campos_512_v4
+61/317273/campos_512_v4
+61/317286/campos_512_v4
+61/317289/campos_512_v4
+61/317305/campos_512_v4
+61/317316/campos_512_v4
+61/317352/campos_512_v4
+61/317358/campos_512_v4
+61/317362/campos_512_v4
+61/317478/campos_512_v4
+61/317523/campos_512_v4
+61/317553/campos_512_v4
+61/317556/campos_512_v4
+61/317617/campos_512_v4
+61/317653/campos_512_v4
+61/317656/campos_512_v4
+61/317659/campos_512_v4
+61/317671/campos_512_v4
+61/317674/campos_512_v4
+61/317761/campos_512_v4
+61/317762/campos_512_v4
+61/317784/campos_512_v4
+61/317831/campos_512_v4
+61/317840/campos_512_v4
+61/317936/campos_512_v4
+61/318051/campos_512_v4
+61/318065/campos_512_v4
+61/318106/campos_512_v4
+61/318126/campos_512_v4
+61/318133/campos_512_v4
+61/318151/campos_512_v4
+61/318170/campos_512_v4
+61/318194/campos_512_v4
+61/318308/campos_512_v4
+61/318314/campos_512_v4
+61/318360/campos_512_v4
+61/318363/campos_512_v4
+61/318392/campos_512_v4
+61/318397/campos_512_v4
+61/318423/campos_512_v4
+61/318439/campos_512_v4
+61/318445/campos_512_v4
+61/318450/campos_512_v4
+61/318452/campos_512_v4
+61/318501/campos_512_v4
+61/318523/campos_512_v4
+61/318555/campos_512_v4
+61/318575/campos_512_v4
+61/318687/campos_512_v4
+61/318694/campos_512_v4
+61/318703/campos_512_v4
+61/318715/campos_512_v4
+61/318719/campos_512_v4
+61/318723/campos_512_v4
+61/318732/campos_512_v4
+61/318754/campos_512_v4
+61/318768/campos_512_v4
+61/318772/campos_512_v4
+61/318799/campos_512_v4
+61/318820/campos_512_v4
+61/318912/campos_512_v4
+61/318951/campos_512_v4
+61/318971/campos_512_v4
+61/319017/campos_512_v4
+61/319116/campos_512_v4
+61/319139/campos_512_v4
+61/319175/campos_512_v4
+61/319181/campos_512_v4
+61/319250/campos_512_v4
+61/319261/campos_512_v4
+61/319387/campos_512_v4
+61/319408/campos_512_v4
+61/319411/campos_512_v4
+61/319425/campos_512_v4
+61/319444/campos_512_v4
+61/319483/campos_512_v4
+61/319493/campos_512_v4
+61/319513/campos_512_v4
+61/319518/campos_512_v4
+61/319595/campos_512_v4
+61/319597/campos_512_v4
+61/319624/campos_512_v4
+61/319655/campos_512_v4
+61/319692/campos_512_v4
+61/319716/campos_512_v4
+61/319736/campos_512_v4
+61/319779/campos_512_v4
+61/319833/campos_512_v4
+61/319838/campos_512_v4
+61/319867/campos_512_v4
+61/319874/campos_512_v4
+61/319947/campos_512_v4
+61/319966/campos_512_v4
+61/319995/campos_512_v4
+61/319999/campos_512_v4
+62/320037/campos_512_v4
+62/320148/campos_512_v4
+62/320184/campos_512_v4
+62/320281/campos_512_v4
+62/320296/campos_512_v4
+62/320313/campos_512_v4
+62/320320/campos_512_v4
+62/320342/campos_512_v4
+62/320346/campos_512_v4
+62/320378/campos_512_v4
+62/320432/campos_512_v4
+62/320530/campos_512_v4
+62/320563/campos_512_v4
+62/320567/campos_512_v4
+62/320576/campos_512_v4
+62/320663/campos_512_v4
+62/320679/campos_512_v4
+62/320688/campos_512_v4
+62/320726/campos_512_v4
+62/320808/campos_512_v4
+62/320823/campos_512_v4
+62/320866/campos_512_v4
+62/320892/campos_512_v4
+62/320902/campos_512_v4
+62/320967/campos_512_v4
+62/321154/campos_512_v4
+62/321191/campos_512_v4
+62/321217/campos_512_v4
+62/321327/campos_512_v4
+62/321369/campos_512_v4
+62/321408/campos_512_v4
+62/321409/campos_512_v4
+62/321423/campos_512_v4
+62/321446/campos_512_v4
+62/321566/campos_512_v4
+62/321591/campos_512_v4
+62/321599/campos_512_v4
+62/321643/campos_512_v4
+62/321646/campos_512_v4
+62/321656/campos_512_v4
+62/321711/campos_512_v4
+62/321719/campos_512_v4
+62/321753/campos_512_v4
+62/321793/campos_512_v4
+62/321860/campos_512_v4
+62/321872/campos_512_v4
+62/321918/campos_512_v4
+62/322041/campos_512_v4
+62/322048/campos_512_v4
+62/322053/campos_512_v4
+62/322069/campos_512_v4
+62/322091/campos_512_v4
+62/322162/campos_512_v4
+62/322165/campos_512_v4
+62/322225/campos_512_v4
+62/322248/campos_512_v4
+62/322282/campos_512_v4
+62/322304/campos_512_v4
+62/322310/campos_512_v4
+62/322330/campos_512_v4
+62/322335/campos_512_v4
+62/322396/campos_512_v4
+62/322486/campos_512_v4
+62/322551/campos_512_v4
+62/322601/campos_512_v4
+62/322604/campos_512_v4
+62/322611/campos_512_v4
+62/322621/campos_512_v4
+62/322628/campos_512_v4
+62/322631/campos_512_v4
+62/322643/campos_512_v4
+62/322754/campos_512_v4
+62/322879/campos_512_v4
+62/322892/campos_512_v4
+62/322927/campos_512_v4
+62/322966/campos_512_v4
+62/322978/campos_512_v4
+62/323028/campos_512_v4
+62/323067/campos_512_v4
+62/323101/campos_512_v4
+62/323141/campos_512_v4
+62/323143/campos_512_v4
+62/323176/campos_512_v4
+62/323222/campos_512_v4
+62/323223/campos_512_v4
+62/323263/campos_512_v4
+62/323274/campos_512_v4
+62/323285/campos_512_v4
+62/323319/campos_512_v4
+62/323338/campos_512_v4
+62/323340/campos_512_v4
+62/323347/campos_512_v4
+62/323373/campos_512_v4
+62/323399/campos_512_v4
+62/323442/campos_512_v4
+62/323443/campos_512_v4
+62/323473/campos_512_v4
+62/323491/campos_512_v4
+62/323495/campos_512_v4
+62/323523/campos_512_v4
+62/323563/campos_512_v4
+62/323601/campos_512_v4
+62/323659/campos_512_v4
+62/323697/campos_512_v4
+62/323774/campos_512_v4
+62/323803/campos_512_v4
+62/323808/campos_512_v4
+62/323966/campos_512_v4
+62/323984/campos_512_v4
+62/324088/campos_512_v4
+62/324113/campos_512_v4
+62/324114/campos_512_v4
+62/324144/campos_512_v4
+62/324159/campos_512_v4
+62/324165/campos_512_v4
+62/324205/campos_512_v4
+62/324212/campos_512_v4
+62/324215/campos_512_v4
+62/324220/campos_512_v4
+62/324230/campos_512_v4
+62/324253/campos_512_v4
+62/324300/campos_512_v4
+62/324313/campos_512_v4
+62/324350/campos_512_v4
+62/324370/campos_512_v4
+62/324374/campos_512_v4
+62/324427/campos_512_v4
+62/324430/campos_512_v4
+62/324431/campos_512_v4
+62/324455/campos_512_v4
+62/324492/campos_512_v4
+62/324525/campos_512_v4
+62/324552/campos_512_v4
+62/324554/campos_512_v4
+62/324599/campos_512_v4
+62/324624/campos_512_v4
+62/324716/campos_512_v4
+62/324726/campos_512_v4
+62/324768/campos_512_v4
+62/324788/campos_512_v4
+62/324847/campos_512_v4
+62/324868/campos_512_v4
+62/324872/campos_512_v4
+62/324877/campos_512_v4
+62/324913/campos_512_v4
+62/324974/campos_512_v4
+62/324981/campos_512_v4
+63/325044/campos_512_v4
+63/325059/campos_512_v4
+63/325089/campos_512_v4
+63/325095/campos_512_v4
+63/325116/campos_512_v4
+63/325151/campos_512_v4
+63/325155/campos_512_v4
+63/325227/campos_512_v4
+63/325239/campos_512_v4
+63/325317/campos_512_v4
+63/325327/campos_512_v4
+63/325328/campos_512_v4
+63/325345/campos_512_v4
+63/325346/campos_512_v4
+63/325451/campos_512_v4
+63/325465/campos_512_v4
+63/325495/campos_512_v4
+63/325498/campos_512_v4
+63/325531/campos_512_v4
+63/325574/campos_512_v4
+63/325597/campos_512_v4
+63/325624/campos_512_v4
+63/325676/campos_512_v4
+63/325680/campos_512_v4
+63/325732/campos_512_v4
+63/325750/campos_512_v4
+63/325774/campos_512_v4
+63/325836/campos_512_v4
+63/325847/campos_512_v4
+63/325858/campos_512_v4
+63/325867/campos_512_v4
+63/325885/campos_512_v4
+63/325918/campos_512_v4
+63/325982/campos_512_v4
+63/326025/campos_512_v4
+63/326030/campos_512_v4
+63/326063/campos_512_v4
+63/326129/campos_512_v4
+63/326143/campos_512_v4
+63/326196/campos_512_v4
+63/326211/campos_512_v4
+63/326249/campos_512_v4
+63/326322/campos_512_v4
+63/326349/campos_512_v4
+63/326352/campos_512_v4
+63/326381/campos_512_v4
+63/326435/campos_512_v4
+63/326440/campos_512_v4
+63/326450/campos_512_v4
+63/326466/campos_512_v4
+63/326497/campos_512_v4
+63/326507/campos_512_v4
+63/326531/campos_512_v4
+63/326595/campos_512_v4
+63/326596/campos_512_v4
+63/326617/campos_512_v4
+63/326632/campos_512_v4
+63/326634/campos_512_v4
+63/326680/campos_512_v4
+63/326685/campos_512_v4
+63/326699/campos_512_v4
+63/326705/campos_512_v4
+63/326722/campos_512_v4
+63/326740/campos_512_v4
+63/326777/campos_512_v4
+63/326784/campos_512_v4
+63/326794/campos_512_v4
+63/326822/campos_512_v4
+63/326864/campos_512_v4
+63/326952/campos_512_v4
+63/327044/campos_512_v4
+63/327067/campos_512_v4
+63/327132/campos_512_v4
+63/327141/campos_512_v4
+63/327180/campos_512_v4
+63/327226/campos_512_v4
+63/327229/campos_512_v4
+63/327268/campos_512_v4
+63/327271/campos_512_v4
+63/327292/campos_512_v4
+63/327296/campos_512_v4
+63/327338/campos_512_v4
+63/327343/campos_512_v4
+63/327430/campos_512_v4
+63/327457/campos_512_v4
+63/327459/campos_512_v4
+63/327460/campos_512_v4
+63/327464/campos_512_v4
+63/327472/campos_512_v4
+63/327482/campos_512_v4
+63/327509/campos_512_v4
+63/327527/campos_512_v4
+63/327555/campos_512_v4
+63/327599/campos_512_v4
+63/327607/campos_512_v4
+63/327610/campos_512_v4
+63/327668/campos_512_v4
+63/327709/campos_512_v4
+63/327727/campos_512_v4
+63/327759/campos_512_v4
+63/327763/campos_512_v4
+63/327917/campos_512_v4
+63/327958/campos_512_v4
+63/328011/campos_512_v4
+63/328053/campos_512_v4
+63/328077/campos_512_v4
+63/328082/campos_512_v4
+63/328083/campos_512_v4
+63/328206/campos_512_v4
+63/328237/campos_512_v4
+63/328319/campos_512_v4
+63/328342/campos_512_v4
+63/328370/campos_512_v4
+63/328378/campos_512_v4
+63/328380/campos_512_v4
+63/328395/campos_512_v4
+63/328397/campos_512_v4
+63/328398/campos_512_v4
+63/328425/campos_512_v4
+63/328426/campos_512_v4
+63/328460/campos_512_v4
+63/328461/campos_512_v4
+63/328502/campos_512_v4
+63/328508/campos_512_v4
+63/328523/campos_512_v4
+63/328555/campos_512_v4
+63/328571/campos_512_v4
+63/328639/campos_512_v4
+63/328714/campos_512_v4
+63/328722/campos_512_v4
+63/328776/campos_512_v4
+63/328903/campos_512_v4
+63/328905/campos_512_v4
+63/328920/campos_512_v4
+63/328924/campos_512_v4
+63/328940/campos_512_v4
+63/328942/campos_512_v4
+63/328987/campos_512_v4
+63/329001/campos_512_v4
+63/329033/campos_512_v4
+63/329075/campos_512_v4
+63/329138/campos_512_v4
+63/329153/campos_512_v4
+63/329161/campos_512_v4
+63/329174/campos_512_v4
+63/329200/campos_512_v4
+63/329210/campos_512_v4
+63/329217/campos_512_v4
+63/329221/campos_512_v4
+63/329230/campos_512_v4
+63/329234/campos_512_v4
+63/329249/campos_512_v4
+63/329250/campos_512_v4
+63/329297/campos_512_v4
+63/329350/campos_512_v4
+63/329381/campos_512_v4
+63/329429/campos_512_v4
+63/329436/campos_512_v4
+63/329447/campos_512_v4
+63/329469/campos_512_v4
+63/329474/campos_512_v4
+63/329523/campos_512_v4
+63/329575/campos_512_v4
+63/329586/campos_512_v4
+63/329628/campos_512_v4
+63/329635/campos_512_v4
+63/329640/campos_512_v4
+63/329643/campos_512_v4
+63/329671/campos_512_v4
+63/329689/campos_512_v4
+63/329713/campos_512_v4
+63/329740/campos_512_v4
+63/329768/campos_512_v4
+63/329775/campos_512_v4
+63/329778/campos_512_v4
+63/329848/campos_512_v4
+63/329912/campos_512_v4
+63/329968/campos_512_v4
+63/329999/campos_512_v4
+64/330004/campos_512_v4
+64/330052/campos_512_v4
+64/330058/campos_512_v4
+64/330065/campos_512_v4
+64/330067/campos_512_v4
+64/330071/campos_512_v4
+64/330087/campos_512_v4
+64/330119/campos_512_v4
+64/330125/campos_512_v4
+64/330132/campos_512_v4
+64/330146/campos_512_v4
+64/330313/campos_512_v4
+64/330333/campos_512_v4
+64/330343/campos_512_v4
+64/330403/campos_512_v4
+64/330407/campos_512_v4
+64/330554/campos_512_v4
+64/330560/campos_512_v4
+64/330566/campos_512_v4
+64/330596/campos_512_v4
+64/330613/campos_512_v4
+64/330702/campos_512_v4
+64/330732/campos_512_v4
+64/330805/campos_512_v4
+64/330817/campos_512_v4
+64/330819/campos_512_v4
+64/330886/campos_512_v4
+64/330900/campos_512_v4
+64/330923/campos_512_v4
+64/330924/campos_512_v4
+64/330968/campos_512_v4
+64/330977/campos_512_v4
+64/330992/campos_512_v4
+64/330996/campos_512_v4
+64/331015/campos_512_v4
+64/331063/campos_512_v4
+64/331092/campos_512_v4
+64/331143/campos_512_v4
+64/331150/campos_512_v4
+64/331208/campos_512_v4
+64/331246/campos_512_v4
+64/331269/campos_512_v4
+64/331326/campos_512_v4
+64/331387/campos_512_v4
+64/331426/campos_512_v4
+64/331437/campos_512_v4
+64/331466/campos_512_v4
+64/331467/campos_512_v4
+64/331492/campos_512_v4
+64/331558/campos_512_v4
+64/331559/campos_512_v4
+64/331566/campos_512_v4
+64/331569/campos_512_v4
+64/331641/campos_512_v4
+64/331674/campos_512_v4
+64/331681/campos_512_v4
+64/331693/campos_512_v4
+64/331698/campos_512_v4
+64/331728/campos_512_v4
+64/331738/campos_512_v4
+64/331755/campos_512_v4
+64/331760/campos_512_v4
+64/331771/campos_512_v4
+64/331797/campos_512_v4
+64/331803/campos_512_v4
+64/331840/campos_512_v4
+64/331855/campos_512_v4
+64/331867/campos_512_v4
+64/331869/campos_512_v4
+64/331885/campos_512_v4
+64/331945/campos_512_v4
+64/331964/campos_512_v4
+64/331999/campos_512_v4
+64/332022/campos_512_v4
+64/332033/campos_512_v4
+64/332058/campos_512_v4
+64/332068/campos_512_v4
+64/332088/campos_512_v4
+64/332117/campos_512_v4
+64/332150/campos_512_v4
+64/332211/campos_512_v4
+64/332224/campos_512_v4
+64/332254/campos_512_v4
+64/332282/campos_512_v4
+64/332390/campos_512_v4
+64/332442/campos_512_v4
+64/332458/campos_512_v4
+64/332463/campos_512_v4
+64/332485/campos_512_v4
+64/332568/campos_512_v4
+64/332595/campos_512_v4
+64/332600/campos_512_v4
+64/332634/campos_512_v4
+64/332671/campos_512_v4
+64/332694/campos_512_v4
+64/332697/campos_512_v4
+64/332718/campos_512_v4
+64/332720/campos_512_v4
+64/332721/campos_512_v4
+64/332761/campos_512_v4
+64/332791/campos_512_v4
+64/332814/campos_512_v4
+64/332920/campos_512_v4
+64/333008/campos_512_v4
+64/333009/campos_512_v4
+64/333015/campos_512_v4
+64/333090/campos_512_v4
+64/333173/campos_512_v4
+64/333227/campos_512_v4
+64/333237/campos_512_v4
+64/333252/campos_512_v4
+64/333265/campos_512_v4
+64/333336/campos_512_v4
+64/333344/campos_512_v4
+64/333354/campos_512_v4
+64/333421/campos_512_v4
+64/333429/campos_512_v4
+64/333519/campos_512_v4
+64/333530/campos_512_v4
+64/333590/campos_512_v4
+64/333591/campos_512_v4
+64/333617/campos_512_v4
+64/333634/campos_512_v4
+64/333691/campos_512_v4
+64/333724/campos_512_v4
+64/333827/campos_512_v4
+64/333837/campos_512_v4
+64/333887/campos_512_v4
+64/333890/campos_512_v4
+64/333907/campos_512_v4
+64/334007/campos_512_v4
+64/334039/campos_512_v4
+64/334041/campos_512_v4
+64/334043/campos_512_v4
+64/334126/campos_512_v4
+64/334167/campos_512_v4
+64/334226/campos_512_v4
+64/334249/campos_512_v4
+64/334261/campos_512_v4
+64/334292/campos_512_v4
+64/334409/campos_512_v4
+64/334429/campos_512_v4
+64/334468/campos_512_v4
+64/334485/campos_512_v4
+64/334505/campos_512_v4
+64/334518/campos_512_v4
+64/334579/campos_512_v4
+64/334581/campos_512_v4
+64/334599/campos_512_v4
+64/334701/campos_512_v4
+64/334709/campos_512_v4
+64/334808/campos_512_v4
+64/334814/campos_512_v4
+64/334819/campos_512_v4
+64/334866/campos_512_v4
+64/334875/campos_512_v4
+64/334883/campos_512_v4
+64/334892/campos_512_v4
+64/334904/campos_512_v4
+64/334907/campos_512_v4
+64/334917/campos_512_v4
+64/334920/campos_512_v4
+64/334939/campos_512_v4
+64/334944/campos_512_v4
+64/334958/campos_512_v4
+64/334985/campos_512_v4
+64/334994/campos_512_v4
+65/335015/campos_512_v4
+65/335256/campos_512_v4
+65/335275/campos_512_v4
+65/335338/campos_512_v4
+65/335340/campos_512_v4
+65/335348/campos_512_v4
+65/335441/campos_512_v4
+65/335551/campos_512_v4
+65/335596/campos_512_v4
+65/335599/campos_512_v4
+65/335601/campos_512_v4
+65/335605/campos_512_v4
+65/335676/campos_512_v4
+65/335678/campos_512_v4
+65/335748/campos_512_v4
+65/335758/campos_512_v4
+65/335762/campos_512_v4
+65/335766/campos_512_v4
+65/335794/campos_512_v4
+65/335855/campos_512_v4
+65/335882/campos_512_v4
+65/335915/campos_512_v4
+65/335965/campos_512_v4
+65/335988/campos_512_v4
+65/336115/campos_512_v4
+65/336147/campos_512_v4
+65/336240/campos_512_v4
+65/336244/campos_512_v4
+65/336269/campos_512_v4
+65/336313/campos_512_v4
+65/336339/campos_512_v4
+65/336350/campos_512_v4
+65/336359/campos_512_v4
+65/336371/campos_512_v4
+65/336411/campos_512_v4
+65/336436/campos_512_v4
+65/336526/campos_512_v4
+65/336567/campos_512_v4
+65/336604/campos_512_v4
+65/336624/campos_512_v4
+65/336684/campos_512_v4
+65/336705/campos_512_v4
+65/336732/campos_512_v4
+65/336758/campos_512_v4
+65/336830/campos_512_v4
+65/336840/campos_512_v4
+65/336852/campos_512_v4
+65/336861/campos_512_v4
+65/336885/campos_512_v4
+65/336888/campos_512_v4
+65/336941/campos_512_v4
+65/337005/campos_512_v4
+65/337015/campos_512_v4
+65/337028/campos_512_v4
+65/337080/campos_512_v4
+65/337081/campos_512_v4
+65/337083/campos_512_v4
+65/337093/campos_512_v4
+65/337115/campos_512_v4
+65/337128/campos_512_v4
+65/337231/campos_512_v4
+65/337238/campos_512_v4
+65/337247/campos_512_v4
+65/337273/campos_512_v4
+65/337316/campos_512_v4
+65/337338/campos_512_v4
+65/337382/campos_512_v4
+65/337398/campos_512_v4
+65/337484/campos_512_v4
+65/337486/campos_512_v4
+65/337528/campos_512_v4
+65/337589/campos_512_v4
+65/337622/campos_512_v4
+65/337657/campos_512_v4
+65/337661/campos_512_v4
+65/337674/campos_512_v4
+65/337682/campos_512_v4
+65/337687/campos_512_v4
+65/337712/campos_512_v4
+65/337730/campos_512_v4
+65/337742/campos_512_v4
+65/337751/campos_512_v4
+65/337789/campos_512_v4
+65/337798/campos_512_v4
+65/337805/campos_512_v4
+65/337885/campos_512_v4
+65/337927/campos_512_v4
+65/337999/campos_512_v4
+65/338037/campos_512_v4
+65/338094/campos_512_v4
+65/338095/campos_512_v4
+65/338109/campos_512_v4
+65/338111/campos_512_v4
+65/338139/campos_512_v4
+65/338152/campos_512_v4
+65/338160/campos_512_v4
+65/338224/campos_512_v4
+65/338259/campos_512_v4
+65/338269/campos_512_v4
+65/338272/campos_512_v4
+65/338274/campos_512_v4
+65/338309/campos_512_v4
+65/338312/campos_512_v4
+65/338325/campos_512_v4
+65/338340/campos_512_v4
+65/338391/campos_512_v4
+65/338394/campos_512_v4
+65/338467/campos_512_v4
+65/338472/campos_512_v4
+65/338473/campos_512_v4
+65/338518/campos_512_v4
+65/338635/campos_512_v4
+65/338694/campos_512_v4
+65/338721/campos_512_v4
+65/338744/campos_512_v4
+65/338748/campos_512_v4
+65/338793/campos_512_v4
+65/338821/campos_512_v4
+65/338871/campos_512_v4
+65/338898/campos_512_v4
+65/338909/campos_512_v4
+65/338922/campos_512_v4
+65/338938/campos_512_v4
+65/338953/campos_512_v4
+65/338978/campos_512_v4
+65/339025/campos_512_v4
+65/339061/campos_512_v4
+65/339081/campos_512_v4
+65/339120/campos_512_v4
+65/339180/campos_512_v4
+65/339219/campos_512_v4
+65/339249/campos_512_v4
+65/339250/campos_512_v4
+65/339338/campos_512_v4
+65/339380/campos_512_v4
+65/339549/campos_512_v4
+65/339552/campos_512_v4
+65/339579/campos_512_v4
+65/339607/campos_512_v4
+65/339655/campos_512_v4
+65/339716/campos_512_v4
+65/339766/campos_512_v4
+65/339771/campos_512_v4
+65/339786/campos_512_v4
+65/339856/campos_512_v4
+65/339888/campos_512_v4
+65/339890/campos_512_v4
+65/339906/campos_512_v4
+65/339939/campos_512_v4
+65/339985/campos_512_v4
+66/340004/campos_512_v4
+66/340006/campos_512_v4
+66/340010/campos_512_v4
+66/340053/campos_512_v4
+66/340054/campos_512_v4
+66/340078/campos_512_v4
+66/340268/campos_512_v4
+66/340271/campos_512_v4
+66/340294/campos_512_v4
+66/340302/campos_512_v4
+66/340313/campos_512_v4
+66/340346/campos_512_v4
+66/340362/campos_512_v4
+66/340392/campos_512_v4
+66/340408/campos_512_v4
+66/340415/campos_512_v4
+66/340470/campos_512_v4
+66/340497/campos_512_v4
+66/340513/campos_512_v4
+66/340549/campos_512_v4
+66/340561/campos_512_v4
+66/340591/campos_512_v4
+66/340618/campos_512_v4
+66/340636/campos_512_v4
+66/340663/campos_512_v4
+66/340688/campos_512_v4
+66/340708/campos_512_v4
+66/340716/campos_512_v4
+66/340726/campos_512_v4
+66/340753/campos_512_v4
+66/340769/campos_512_v4
+66/340779/campos_512_v4
+66/340783/campos_512_v4
+66/340813/campos_512_v4
+66/340925/campos_512_v4
+66/340975/campos_512_v4
+66/341005/campos_512_v4
+66/341009/campos_512_v4
+66/341028/campos_512_v4
+66/341059/campos_512_v4
+66/341082/campos_512_v4
+66/341183/campos_512_v4
+66/341222/campos_512_v4
+66/341277/campos_512_v4
+66/341304/campos_512_v4
+66/341347/campos_512_v4
+66/341364/campos_512_v4
+66/341368/campos_512_v4
+66/341424/campos_512_v4
+66/341436/campos_512_v4
+66/341457/campos_512_v4
+66/341481/campos_512_v4
+66/341482/campos_512_v4
+66/341495/campos_512_v4
+66/341512/campos_512_v4
+66/341588/campos_512_v4
+66/341597/campos_512_v4
+66/341619/campos_512_v4
+66/341657/campos_512_v4
+66/341704/campos_512_v4
+66/341709/campos_512_v4
+66/341711/campos_512_v4
+66/341715/campos_512_v4
+66/341742/campos_512_v4
+66/341745/campos_512_v4
+66/341772/campos_512_v4
+66/341838/campos_512_v4
+66/341881/campos_512_v4
+66/341887/campos_512_v4
+66/341891/campos_512_v4
+66/341909/campos_512_v4
+66/341929/campos_512_v4
+66/341970/campos_512_v4
+66/342009/campos_512_v4
+66/342029/campos_512_v4
+66/342033/campos_512_v4
+66/342056/campos_512_v4
+66/342103/campos_512_v4
+66/342109/campos_512_v4
+66/342158/campos_512_v4
+66/342161/campos_512_v4
+66/342196/campos_512_v4
+66/342236/campos_512_v4
+66/342278/campos_512_v4
+66/342286/campos_512_v4
+66/342287/campos_512_v4
+66/342322/campos_512_v4
+66/342330/campos_512_v4
+66/342342/campos_512_v4
+66/342355/campos_512_v4
+66/342379/campos_512_v4
+66/342386/campos_512_v4
+66/342436/campos_512_v4
+66/342460/campos_512_v4
+66/342461/campos_512_v4
+66/342503/campos_512_v4
+66/342505/campos_512_v4
+66/342528/campos_512_v4
+66/342565/campos_512_v4
+66/342580/campos_512_v4
+66/342589/campos_512_v4
+66/342599/campos_512_v4
+66/342654/campos_512_v4
+66/342667/campos_512_v4
+66/342719/campos_512_v4
+66/342730/campos_512_v4
+66/342753/campos_512_v4
+66/342851/campos_512_v4
+66/342943/campos_512_v4
+66/342952/campos_512_v4
+66/342958/campos_512_v4
+66/343096/campos_512_v4
+66/343115/campos_512_v4
+66/343141/campos_512_v4
+66/343143/campos_512_v4
+66/343200/campos_512_v4
+66/343227/campos_512_v4
+66/343259/campos_512_v4
+66/343262/campos_512_v4
+66/343324/campos_512_v4
+66/343393/campos_512_v4
+66/343413/campos_512_v4
+66/343423/campos_512_v4
+66/343464/campos_512_v4
+66/343488/campos_512_v4
+66/343521/campos_512_v4
+66/343523/campos_512_v4
+66/343622/campos_512_v4
+66/343635/campos_512_v4
+66/343673/campos_512_v4
+66/343744/campos_512_v4
+66/343758/campos_512_v4
+66/343771/campos_512_v4
+66/343795/campos_512_v4
+66/343820/campos_512_v4
+66/343836/campos_512_v4
+66/343872/campos_512_v4
+66/343879/campos_512_v4
+66/343906/campos_512_v4
+66/343925/campos_512_v4
+66/343929/campos_512_v4
+66/343950/campos_512_v4
+66/343978/campos_512_v4
+66/344039/campos_512_v4
+66/344050/campos_512_v4
+66/344090/campos_512_v4
+66/344117/campos_512_v4
+66/344152/campos_512_v4
+66/344164/campos_512_v4
+66/344290/campos_512_v4
+66/344292/campos_512_v4
+66/344387/campos_512_v4
+66/344401/campos_512_v4
+66/344471/campos_512_v4
+66/344489/campos_512_v4
+66/344508/campos_512_v4
+66/344530/campos_512_v4
+66/344567/campos_512_v4
+66/344571/campos_512_v4
+66/344580/campos_512_v4
+66/344613/campos_512_v4
+66/344644/campos_512_v4
+66/344658/campos_512_v4
+66/344667/campos_512_v4
+66/344671/campos_512_v4
+66/344712/campos_512_v4
+66/344736/campos_512_v4
+66/344754/campos_512_v4
+66/344782/campos_512_v4
+66/344866/campos_512_v4
+66/344873/campos_512_v4
+66/344891/campos_512_v4
+66/344919/campos_512_v4
+66/344930/campos_512_v4
+67/345002/campos_512_v4
+67/345006/campos_512_v4
+67/345009/campos_512_v4
+67/345026/campos_512_v4
+67/345068/campos_512_v4
+67/345072/campos_512_v4
+67/345086/campos_512_v4
+67/345091/campos_512_v4
+67/345105/campos_512_v4
+67/345144/campos_512_v4
+67/345154/campos_512_v4
+67/345187/campos_512_v4
+67/345240/campos_512_v4
+67/345253/campos_512_v4
+67/345284/campos_512_v4
+67/345322/campos_512_v4
+67/345354/campos_512_v4
+67/345438/campos_512_v4
+67/345473/campos_512_v4
+67/345486/campos_512_v4
+67/345539/campos_512_v4
+67/345540/campos_512_v4
+67/345569/campos_512_v4
+67/345639/campos_512_v4
+67/345711/campos_512_v4
+67/345744/campos_512_v4
+67/345746/campos_512_v4
+67/345785/campos_512_v4
+67/345809/campos_512_v4
+67/345847/campos_512_v4
+67/345863/campos_512_v4
+67/345865/campos_512_v4
+67/345963/campos_512_v4
+67/345970/campos_512_v4
+67/345973/campos_512_v4
+67/345989/campos_512_v4
+67/346021/campos_512_v4
+67/346045/campos_512_v4
+67/346107/campos_512_v4
+67/346127/campos_512_v4
+67/346131/campos_512_v4
+67/346136/campos_512_v4
+67/346159/campos_512_v4
+67/346200/campos_512_v4
+67/346212/campos_512_v4
+67/346220/campos_512_v4
+67/346221/campos_512_v4
+67/346240/campos_512_v4
+67/346245/campos_512_v4
+67/346264/campos_512_v4
+67/346277/campos_512_v4
+67/346339/campos_512_v4
+67/346355/campos_512_v4
+67/346382/campos_512_v4
+67/346399/campos_512_v4
+67/346405/campos_512_v4
+67/346449/campos_512_v4
+67/346492/campos_512_v4
+67/346498/campos_512_v4
+67/346515/campos_512_v4
+67/346524/campos_512_v4
+67/346564/campos_512_v4
+67/346578/campos_512_v4
+67/346621/campos_512_v4
+67/346665/campos_512_v4
+67/346680/campos_512_v4
+67/346684/campos_512_v4
+67/346721/campos_512_v4
+67/346774/campos_512_v4
+67/346777/campos_512_v4
+67/346778/campos_512_v4
+67/346815/campos_512_v4
+67/346819/campos_512_v4
+67/346860/campos_512_v4
+67/346913/campos_512_v4
+67/346968/campos_512_v4
+67/347020/campos_512_v4
+67/347074/campos_512_v4
+67/347090/campos_512_v4
+67/347144/campos_512_v4
+67/347201/campos_512_v4
+67/347206/campos_512_v4
+67/347232/campos_512_v4
+67/347247/campos_512_v4
+67/347279/campos_512_v4
+67/347295/campos_512_v4
+67/347376/campos_512_v4
+67/347378/campos_512_v4
+67/347433/campos_512_v4
+67/347502/campos_512_v4
+67/347545/campos_512_v4
+67/347553/campos_512_v4
+67/347582/campos_512_v4
+67/347585/campos_512_v4
+67/347595/campos_512_v4
+67/347612/campos_512_v4
+67/347642/campos_512_v4
+67/347676/campos_512_v4
+67/347736/campos_512_v4
+67/347741/campos_512_v4
+67/347864/campos_512_v4
+67/347875/campos_512_v4
+67/347905/campos_512_v4
+67/347957/campos_512_v4
+67/347962/campos_512_v4
+67/347967/campos_512_v4
+67/348065/campos_512_v4
+67/348169/campos_512_v4
+67/348205/campos_512_v4
+67/348217/campos_512_v4
+67/348273/campos_512_v4
+67/348285/campos_512_v4
+67/348318/campos_512_v4
+67/348353/campos_512_v4
+67/348418/campos_512_v4
+67/348454/campos_512_v4
+67/348476/campos_512_v4
+67/348550/campos_512_v4
+67/348580/campos_512_v4
+67/348592/campos_512_v4
+67/348596/campos_512_v4
+67/348614/campos_512_v4
+67/348723/campos_512_v4
+67/348726/campos_512_v4
+67/348813/campos_512_v4
+67/348849/campos_512_v4
+67/348859/campos_512_v4
+67/348875/campos_512_v4
+67/348959/campos_512_v4
+67/348965/campos_512_v4
+67/348979/campos_512_v4
+67/349004/campos_512_v4
+67/349026/campos_512_v4
+67/349118/campos_512_v4
+67/349124/campos_512_v4
+67/349127/campos_512_v4
+67/349138/campos_512_v4
+67/349153/campos_512_v4
+67/349155/campos_512_v4
+67/349211/campos_512_v4
+67/349212/campos_512_v4
+67/349214/campos_512_v4
+67/349266/campos_512_v4
+67/349283/campos_512_v4
+67/349291/campos_512_v4
+67/349337/campos_512_v4
+67/349356/campos_512_v4
+67/349385/campos_512_v4
+67/349397/campos_512_v4
+67/349411/campos_512_v4
+67/349439/campos_512_v4
+67/349491/campos_512_v4
+67/349495/campos_512_v4
+67/349545/campos_512_v4
+67/349563/campos_512_v4
+67/349603/campos_512_v4
+67/349657/campos_512_v4
+67/349669/campos_512_v4
+67/349696/campos_512_v4
+67/349706/campos_512_v4
+67/349732/campos_512_v4
+67/349883/campos_512_v4
+67/349960/campos_512_v4
+67/349986/campos_512_v4
+68/350005/campos_512_v4
+68/350073/campos_512_v4
+68/350115/campos_512_v4
+68/350137/campos_512_v4
+68/350138/campos_512_v4
+68/350202/campos_512_v4
+68/350206/campos_512_v4
+68/350224/campos_512_v4
+68/350255/campos_512_v4
+68/350280/campos_512_v4
+68/350282/campos_512_v4
+68/350314/campos_512_v4
+68/350336/campos_512_v4
+68/350380/campos_512_v4
+68/350459/campos_512_v4
+68/350462/campos_512_v4
+68/350475/campos_512_v4
+68/350518/campos_512_v4
+68/350551/campos_512_v4
+68/350570/campos_512_v4
+68/350578/campos_512_v4
+68/350634/campos_512_v4
+68/350649/campos_512_v4
+68/350677/campos_512_v4
+68/350678/campos_512_v4
+68/350693/campos_512_v4
+68/350706/campos_512_v4
+68/350778/campos_512_v4
+68/350840/campos_512_v4
+68/350861/campos_512_v4
+68/350863/campos_512_v4
+68/350867/campos_512_v4
+68/350868/campos_512_v4
+68/350904/campos_512_v4
+68/350910/campos_512_v4
+68/350965/campos_512_v4
+68/351005/campos_512_v4
+68/351031/campos_512_v4
+68/351050/campos_512_v4
+68/351082/campos_512_v4
+68/351090/campos_512_v4
+68/351111/campos_512_v4
+68/351118/campos_512_v4
+68/351134/campos_512_v4
+68/351153/campos_512_v4
+68/351165/campos_512_v4
+68/351188/campos_512_v4
+68/351211/campos_512_v4
+68/351243/campos_512_v4
+68/351263/campos_512_v4
+68/351309/campos_512_v4
+68/351326/campos_512_v4
+68/351342/campos_512_v4
+68/351345/campos_512_v4
+68/351373/campos_512_v4
+68/351403/campos_512_v4
+68/351417/campos_512_v4
+68/351439/campos_512_v4
+68/351509/campos_512_v4
+68/351589/campos_512_v4
+68/351590/campos_512_v4
+68/351667/campos_512_v4
+68/351684/campos_512_v4
+68/351689/campos_512_v4
+68/351695/campos_512_v4
+68/351707/campos_512_v4
+68/351737/campos_512_v4
+68/351746/campos_512_v4
+68/351781/campos_512_v4
+68/351793/campos_512_v4
+68/351806/campos_512_v4
+68/351814/campos_512_v4
+68/351824/campos_512_v4
+68/351855/campos_512_v4
+68/351859/campos_512_v4
+68/351900/campos_512_v4
+68/351904/campos_512_v4
+68/351919/campos_512_v4
+68/351924/campos_512_v4
+68/351947/campos_512_v4
+68/351978/campos_512_v4
+68/351984/campos_512_v4
+68/351991/campos_512_v4
+68/352033/campos_512_v4
+68/352061/campos_512_v4
+68/352160/campos_512_v4
+68/352193/campos_512_v4
+68/352259/campos_512_v4
+68/352268/campos_512_v4
+68/352299/campos_512_v4
+68/352308/campos_512_v4
+68/352312/campos_512_v4
+68/352332/campos_512_v4
+68/352369/campos_512_v4
+68/352460/campos_512_v4
+68/352509/campos_512_v4
+68/352530/campos_512_v4
+68/352531/campos_512_v4
+68/352546/campos_512_v4
+68/352570/campos_512_v4
+68/352582/campos_512_v4
+68/352644/campos_512_v4
+68/352681/campos_512_v4
+68/352738/campos_512_v4
+68/352793/campos_512_v4
+68/352805/campos_512_v4
+68/352808/campos_512_v4
+68/352902/campos_512_v4
+68/352951/campos_512_v4
+68/352984/campos_512_v4
+68/352992/campos_512_v4
+68/353013/campos_512_v4
+68/353090/campos_512_v4
+68/353130/campos_512_v4
+68/353166/campos_512_v4
+68/353169/campos_512_v4
+68/353170/campos_512_v4
+68/353174/campos_512_v4
+68/353223/campos_512_v4
+68/353235/campos_512_v4
+68/353291/campos_512_v4
+68/353395/campos_512_v4
+68/353408/campos_512_v4
+68/353420/campos_512_v4
+68/353423/campos_512_v4
+68/353486/campos_512_v4
+68/353569/campos_512_v4
+68/353607/campos_512_v4
+68/353637/campos_512_v4
+68/353639/campos_512_v4
+68/353672/campos_512_v4
+68/353702/campos_512_v4
+68/353710/campos_512_v4
+68/353728/campos_512_v4
+68/353731/campos_512_v4
+68/353739/campos_512_v4
+68/353797/campos_512_v4
+68/353877/campos_512_v4
+68/353928/campos_512_v4
+68/353991/campos_512_v4
+68/354014/campos_512_v4
+68/354021/campos_512_v4
+68/354071/campos_512_v4
+68/354168/campos_512_v4
+68/354209/campos_512_v4
+68/354214/campos_512_v4
+68/354231/campos_512_v4
+68/354277/campos_512_v4
+68/354342/campos_512_v4
+68/354361/campos_512_v4
+68/354387/campos_512_v4
+68/354483/campos_512_v4
+68/354509/campos_512_v4
+68/354519/campos_512_v4
+68/354598/campos_512_v4
+68/354607/campos_512_v4
+68/354630/campos_512_v4
+68/354666/campos_512_v4
+68/354675/campos_512_v4
+68/354700/campos_512_v4
+68/354764/campos_512_v4
+68/354773/campos_512_v4
+68/354786/campos_512_v4
+68/354855/campos_512_v4
+68/354886/campos_512_v4
+68/354913/campos_512_v4
+68/354923/campos_512_v4
+68/354932/campos_512_v4
+68/354983/campos_512_v4
+69/355088/campos_512_v4
+69/355125/campos_512_v4
+69/355137/campos_512_v4
+69/355169/campos_512_v4
+69/355172/campos_512_v4
+69/355174/campos_512_v4
+69/355181/campos_512_v4
+69/355228/campos_512_v4
+69/355251/campos_512_v4
+69/355271/campos_512_v4
+69/355284/campos_512_v4
+69/355309/campos_512_v4
+69/355341/campos_512_v4
+69/355406/campos_512_v4
+69/355413/campos_512_v4
+69/355453/campos_512_v4
+69/355456/campos_512_v4
+69/355469/campos_512_v4
+69/355496/campos_512_v4
+69/355510/campos_512_v4
+69/355551/campos_512_v4
+69/355553/campos_512_v4
+69/355570/campos_512_v4
+69/355577/campos_512_v4
+69/355609/campos_512_v4
+69/355611/campos_512_v4
+69/355646/campos_512_v4
+69/355663/campos_512_v4
+69/355699/campos_512_v4
+69/355719/campos_512_v4
+69/355727/campos_512_v4
+69/355740/campos_512_v4
+69/355833/campos_512_v4
+69/355834/campos_512_v4
+69/355918/campos_512_v4
+69/355925/campos_512_v4
+69/355943/campos_512_v4
+69/355946/campos_512_v4
+69/355965/campos_512_v4
+69/355984/campos_512_v4
+69/356001/campos_512_v4
+69/356013/campos_512_v4
+69/356016/campos_512_v4
+69/356024/campos_512_v4
+69/356058/campos_512_v4
+69/356060/campos_512_v4
+69/356062/campos_512_v4
+69/356071/campos_512_v4
+69/356155/campos_512_v4
+69/356194/campos_512_v4
+69/356211/campos_512_v4
+69/356296/campos_512_v4
+69/356310/campos_512_v4
+69/356316/campos_512_v4
+69/356353/campos_512_v4
+69/356355/campos_512_v4
+69/356366/campos_512_v4
+69/356404/campos_512_v4
+69/356440/campos_512_v4
+69/356484/campos_512_v4
+69/356494/campos_512_v4
+69/356507/campos_512_v4
+69/356508/campos_512_v4
+69/356538/campos_512_v4
+69/356632/campos_512_v4
+69/356643/campos_512_v4
+69/356647/campos_512_v4
+69/356648/campos_512_v4
+69/356670/campos_512_v4
+69/356676/campos_512_v4
+69/356721/campos_512_v4
+69/356772/campos_512_v4
+69/356776/campos_512_v4
+69/356781/campos_512_v4
+69/356852/campos_512_v4
+69/356899/campos_512_v4
+69/356926/campos_512_v4
+69/356948/campos_512_v4
+69/356961/campos_512_v4
+69/356968/campos_512_v4
+69/357103/campos_512_v4
+69/357120/campos_512_v4
+69/357153/campos_512_v4
+69/357156/campos_512_v4
+69/357162/campos_512_v4
+69/357181/campos_512_v4
+69/357191/campos_512_v4
+69/357202/campos_512_v4
+69/357230/campos_512_v4
+69/357286/campos_512_v4
+69/357533/campos_512_v4
+69/357541/campos_512_v4
+69/357597/campos_512_v4
+69/357603/campos_512_v4
+69/357672/campos_512_v4
+69/357696/campos_512_v4
+69/357698/campos_512_v4
+69/357712/campos_512_v4
+69/357725/campos_512_v4
+69/357806/campos_512_v4
+69/357816/campos_512_v4
+69/357832/campos_512_v4
+69/357920/campos_512_v4
+69/357961/campos_512_v4
+69/357978/campos_512_v4
+69/358025/campos_512_v4
+69/358073/campos_512_v4
+69/358101/campos_512_v4
+69/358135/campos_512_v4
+69/358151/campos_512_v4
+69/358187/campos_512_v4
+69/358188/campos_512_v4
+69/358204/campos_512_v4
+69/358257/campos_512_v4
+69/358260/campos_512_v4
+69/358322/campos_512_v4
+69/358328/campos_512_v4
+69/358337/campos_512_v4
+69/358416/campos_512_v4
+69/358422/campos_512_v4
+69/358424/campos_512_v4
+69/358440/campos_512_v4
+69/358480/campos_512_v4
+69/358595/campos_512_v4
+69/358742/campos_512_v4
+69/358773/campos_512_v4
+69/358828/campos_512_v4
+69/358837/campos_512_v4
+69/358844/campos_512_v4
+69/358879/campos_512_v4
+69/358898/campos_512_v4
+69/358910/campos_512_v4
+69/358911/campos_512_v4
+69/358912/campos_512_v4
+69/358938/campos_512_v4
+69/358962/campos_512_v4
+69/359016/campos_512_v4
+69/359056/campos_512_v4
+69/359084/campos_512_v4
+69/359093/campos_512_v4
+69/359113/campos_512_v4
+69/359136/campos_512_v4
+69/359163/campos_512_v4
+69/359201/campos_512_v4
+69/359209/campos_512_v4
+69/359229/campos_512_v4
+69/359232/campos_512_v4
+69/359236/campos_512_v4
+69/359238/campos_512_v4
+69/359243/campos_512_v4
+69/359251/campos_512_v4
+69/359261/campos_512_v4
+69/359263/campos_512_v4
+69/359288/campos_512_v4
+69/359357/campos_512_v4
+69/359384/campos_512_v4
+69/359392/campos_512_v4
+69/359446/campos_512_v4
+69/359470/campos_512_v4
+69/359521/campos_512_v4
+69/359560/campos_512_v4
+69/359602/campos_512_v4
+69/359715/campos_512_v4
+69/359728/campos_512_v4
+69/359744/campos_512_v4
+69/359758/campos_512_v4
+69/359808/campos_512_v4
+69/359810/campos_512_v4
+69/359848/campos_512_v4
+69/359859/campos_512_v4
+69/359879/campos_512_v4
+69/359910/campos_512_v4
+69/359954/campos_512_v4
+69/359960/campos_512_v4
+69/359982/campos_512_v4
+7/45011/campos_512_v4
+7/45014/campos_512_v4
+7/45057/campos_512_v4
+7/45059/campos_512_v4
+7/45095/campos_512_v4
+7/45096/campos_512_v4
+7/45121/campos_512_v4
+7/45198/campos_512_v4
+7/45204/campos_512_v4
+7/45247/campos_512_v4
+7/45349/campos_512_v4
+7/45371/campos_512_v4
+7/45381/campos_512_v4
+7/45383/campos_512_v4
+7/45444/campos_512_v4
+7/45450/campos_512_v4
+7/45461/campos_512_v4
+7/45462/campos_512_v4
+7/45554/campos_512_v4
+7/45555/campos_512_v4
+7/45600/campos_512_v4
+7/45606/campos_512_v4
+7/45641/campos_512_v4
+7/45696/campos_512_v4
+7/45741/campos_512_v4
+7/45754/campos_512_v4
+7/45786/campos_512_v4
+7/45792/campos_512_v4
+7/45795/campos_512_v4
+7/45815/campos_512_v4
+7/45837/campos_512_v4
+7/45851/campos_512_v4
+7/45870/campos_512_v4
+7/45871/campos_512_v4
+7/45893/campos_512_v4
+7/45895/campos_512_v4
+7/45901/campos_512_v4
+7/45946/campos_512_v4
+7/45971/campos_512_v4
+7/45973/campos_512_v4
+7/46006/campos_512_v4
+7/46012/campos_512_v4
+7/46019/campos_512_v4
+7/46021/campos_512_v4
+7/46051/campos_512_v4
+7/46055/campos_512_v4
+7/46062/campos_512_v4
+7/46066/campos_512_v4
+7/46090/campos_512_v4
+7/46189/campos_512_v4
+7/46210/campos_512_v4
+7/46285/campos_512_v4
+7/46305/campos_512_v4
+7/46315/campos_512_v4
+7/46329/campos_512_v4
+7/46334/campos_512_v4
+7/46337/campos_512_v4
+7/46355/campos_512_v4
+7/46372/campos_512_v4
+7/46425/campos_512_v4
+7/46436/campos_512_v4
+7/46464/campos_512_v4
+7/46486/campos_512_v4
+7/46565/campos_512_v4
+7/46579/campos_512_v4
+7/46593/campos_512_v4
+7/46595/campos_512_v4
+7/46605/campos_512_v4
+7/46619/campos_512_v4
+7/46631/campos_512_v4
+7/46744/campos_512_v4
+7/46751/campos_512_v4
+7/46762/campos_512_v4
+7/46780/campos_512_v4
+7/46783/campos_512_v4
+7/46800/campos_512_v4
+7/46821/campos_512_v4
+7/46874/campos_512_v4
+7/46895/campos_512_v4
+7/46943/campos_512_v4
+7/46959/campos_512_v4
+7/46994/campos_512_v4
+7/47000/campos_512_v4
+7/47022/campos_512_v4
+7/47028/campos_512_v4
+7/47039/campos_512_v4
+7/47097/campos_512_v4
+7/47103/campos_512_v4
+7/47133/campos_512_v4
+7/47172/campos_512_v4
+7/47258/campos_512_v4
+7/47291/campos_512_v4
+7/47332/campos_512_v4
+7/47333/campos_512_v4
+7/47340/campos_512_v4
+7/47367/campos_512_v4
+7/47370/campos_512_v4
+7/47378/campos_512_v4
+7/47422/campos_512_v4
+7/47430/campos_512_v4
+7/47442/campos_512_v4
+7/47471/campos_512_v4
+7/47550/campos_512_v4
+7/47558/campos_512_v4
+7/47560/campos_512_v4
+7/47569/campos_512_v4
+7/47595/campos_512_v4
+7/47619/campos_512_v4
+7/47654/campos_512_v4
+7/47672/campos_512_v4
+7/47704/campos_512_v4
+7/47792/campos_512_v4
+7/47800/campos_512_v4
+7/47813/campos_512_v4
+7/47816/campos_512_v4
+7/47817/campos_512_v4
+7/47852/campos_512_v4
+7/47924/campos_512_v4
+7/47936/campos_512_v4
+7/47973/campos_512_v4
+7/47993/campos_512_v4
+7/48027/campos_512_v4
+7/48040/campos_512_v4
+7/48144/campos_512_v4
+7/48151/campos_512_v4
+7/48166/campos_512_v4
+7/48248/campos_512_v4
+7/48264/campos_512_v4
+7/48266/campos_512_v4
+7/48270/campos_512_v4
+7/48271/campos_512_v4
+7/48291/campos_512_v4
+7/48314/campos_512_v4
+7/48374/campos_512_v4
+7/48384/campos_512_v4
+7/48385/campos_512_v4
+7/48388/campos_512_v4
+7/48394/campos_512_v4
+7/48396/campos_512_v4
+7/48407/campos_512_v4
+7/48426/campos_512_v4
+7/48437/campos_512_v4
+7/48449/campos_512_v4
+7/48464/campos_512_v4
+7/48525/campos_512_v4
+7/48528/campos_512_v4
+7/48534/campos_512_v4
+7/48564/campos_512_v4
+7/48587/campos_512_v4
+7/48599/campos_512_v4
+7/48624/campos_512_v4
+7/48647/campos_512_v4
+7/48695/campos_512_v4
+7/48702/campos_512_v4
+7/48750/campos_512_v4
+7/48751/campos_512_v4
+7/48753/campos_512_v4
+7/48787/campos_512_v4
+7/48814/campos_512_v4
+7/48872/campos_512_v4
+7/48927/campos_512_v4
+7/48979/campos_512_v4
+7/49026/campos_512_v4
+7/49034/campos_512_v4
+7/49037/campos_512_v4
+7/49045/campos_512_v4
+7/49087/campos_512_v4
+7/49149/campos_512_v4
+7/49164/campos_512_v4
+7/49168/campos_512_v4
+7/49179/campos_512_v4
+7/49191/campos_512_v4
+7/49194/campos_512_v4
+7/49218/campos_512_v4
+7/49267/campos_512_v4
+7/49273/campos_512_v4
+7/49283/campos_512_v4
+7/49344/campos_512_v4
+7/49346/campos_512_v4
+7/49372/campos_512_v4
+7/49381/campos_512_v4
+7/49390/campos_512_v4
+7/49425/campos_512_v4
+7/49426/campos_512_v4
+7/49442/campos_512_v4
+7/49485/campos_512_v4
+7/49512/campos_512_v4
+7/49550/campos_512_v4
+7/49559/campos_512_v4
+7/49652/campos_512_v4
+7/49656/campos_512_v4
+7/49664/campos_512_v4
+7/49680/campos_512_v4
+7/49804/campos_512_v4
+7/49805/campos_512_v4
+7/49806/campos_512_v4
+7/49835/campos_512_v4
+7/49846/campos_512_v4
+7/49887/campos_512_v4
+7/49918/campos_512_v4
+7/49938/campos_512_v4
+7/49939/campos_512_v4
+7/49945/campos_512_v4
+7/49958/campos_512_v4
+7/49976/campos_512_v4
+7/49990/campos_512_v4
+7/49993/campos_512_v4
+70/360041/campos_512_v4
+70/360079/campos_512_v4
+70/360080/campos_512_v4
+70/360129/campos_512_v4
+70/360140/campos_512_v4
+70/360253/campos_512_v4
+70/360270/campos_512_v4
+70/360284/campos_512_v4
+70/360380/campos_512_v4
+70/360389/campos_512_v4
+70/360431/campos_512_v4
+70/360444/campos_512_v4
+70/360453/campos_512_v4
+70/360459/campos_512_v4
+70/360478/campos_512_v4
+70/360546/campos_512_v4
+70/360562/campos_512_v4
+70/360593/campos_512_v4
+70/360606/campos_512_v4
+70/360631/campos_512_v4
+70/360637/campos_512_v4
+70/360706/campos_512_v4
+70/360728/campos_512_v4
+70/360780/campos_512_v4
+70/360796/campos_512_v4
+70/360820/campos_512_v4
+70/360825/campos_512_v4
+70/360855/campos_512_v4
+70/360977/campos_512_v4
+70/361008/campos_512_v4
+70/361068/campos_512_v4
+70/361087/campos_512_v4
+70/361170/campos_512_v4
+70/361191/campos_512_v4
+70/361199/campos_512_v4
+70/361208/campos_512_v4
+70/361233/campos_512_v4
+70/361301/campos_512_v4
+70/361346/campos_512_v4
+70/361363/campos_512_v4
+70/361365/campos_512_v4
+70/361463/campos_512_v4
+70/361473/campos_512_v4
+70/361494/campos_512_v4
+70/361507/campos_512_v4
+70/361545/campos_512_v4
+70/361562/campos_512_v4
+70/361576/campos_512_v4
+70/361714/campos_512_v4
+70/361715/campos_512_v4
+70/361724/campos_512_v4
+70/361734/campos_512_v4
+70/361742/campos_512_v4
+70/361746/campos_512_v4
+70/361756/campos_512_v4
+70/361797/campos_512_v4
+70/361815/campos_512_v4
+70/361832/campos_512_v4
+70/361835/campos_512_v4
+70/361848/campos_512_v4
+70/361876/campos_512_v4
+70/361883/campos_512_v4
+70/361906/campos_512_v4
+70/361915/campos_512_v4
+70/361928/campos_512_v4
+70/362074/campos_512_v4
+70/362119/campos_512_v4
+70/362135/campos_512_v4
+70/362160/campos_512_v4
+70/362177/campos_512_v4
+70/362240/campos_512_v4
+70/362271/campos_512_v4
+70/362283/campos_512_v4
+70/362360/campos_512_v4
+70/362399/campos_512_v4
+70/362468/campos_512_v4
+70/362627/campos_512_v4
+70/362638/campos_512_v4
+70/362733/campos_512_v4
+70/362738/campos_512_v4
+70/362791/campos_512_v4
+70/362796/campos_512_v4
+70/362844/campos_512_v4
+70/362846/campos_512_v4
+70/362861/campos_512_v4
+70/362862/campos_512_v4
+70/362878/campos_512_v4
+70/362905/campos_512_v4
+70/362964/campos_512_v4
+70/362972/campos_512_v4
+70/362981/campos_512_v4
+70/362985/campos_512_v4
+70/363057/campos_512_v4
+70/363119/campos_512_v4
+70/363121/campos_512_v4
+70/363132/campos_512_v4
+70/363135/campos_512_v4
+70/363141/campos_512_v4
+70/363223/campos_512_v4
+70/363239/campos_512_v4
+70/363249/campos_512_v4
+70/363273/campos_512_v4
+70/363410/campos_512_v4
+70/363447/campos_512_v4
+70/363449/campos_512_v4
+70/363528/campos_512_v4
+70/363532/campos_512_v4
+70/363586/campos_512_v4
+70/363708/campos_512_v4
+70/363751/campos_512_v4
+70/363752/campos_512_v4
+70/363772/campos_512_v4
+70/363793/campos_512_v4
+70/363857/campos_512_v4
+70/363862/campos_512_v4
+70/363897/campos_512_v4
+70/364006/campos_512_v4
+70/364043/campos_512_v4
+70/364086/campos_512_v4
+70/364093/campos_512_v4
+70/364102/campos_512_v4
+70/364155/campos_512_v4
+70/364171/campos_512_v4
+70/364202/campos_512_v4
+70/364206/campos_512_v4
+70/364216/campos_512_v4
+70/364238/campos_512_v4
+70/364272/campos_512_v4
+70/364275/campos_512_v4
+70/364302/campos_512_v4
+70/364307/campos_512_v4
+70/364309/campos_512_v4
+70/364362/campos_512_v4
+70/364414/campos_512_v4
+70/364452/campos_512_v4
+70/364469/campos_512_v4
+70/364525/campos_512_v4
+70/364603/campos_512_v4
+70/364611/campos_512_v4
+70/364650/campos_512_v4
+70/364673/campos_512_v4
+70/364682/campos_512_v4
+70/364686/campos_512_v4
+70/364704/campos_512_v4
+70/364805/campos_512_v4
+70/364826/campos_512_v4
+70/364867/campos_512_v4
+70/364879/campos_512_v4
+70/364959/campos_512_v4
+70/364968/campos_512_v4
+70/364977/campos_512_v4
+71/365012/campos_512_v4
+71/365032/campos_512_v4
+71/365069/campos_512_v4
+71/365100/campos_512_v4
+71/365111/campos_512_v4
+71/365115/campos_512_v4
+71/365132/campos_512_v4
+71/365174/campos_512_v4
+71/365180/campos_512_v4
+71/365205/campos_512_v4
+71/365215/campos_512_v4
+71/365244/campos_512_v4
+71/365248/campos_512_v4
+71/365260/campos_512_v4
+71/365307/campos_512_v4
+71/365326/campos_512_v4
+71/365374/campos_512_v4
+71/365425/campos_512_v4
+71/365493/campos_512_v4
+71/365522/campos_512_v4
+71/365532/campos_512_v4
+71/365533/campos_512_v4
+71/365535/campos_512_v4
+71/365599/campos_512_v4
+71/365618/campos_512_v4
+71/365632/campos_512_v4
+71/365644/campos_512_v4
+71/365690/campos_512_v4
+71/365808/campos_512_v4
+71/365809/campos_512_v4
+71/365825/campos_512_v4
+71/365905/campos_512_v4
+71/365966/campos_512_v4
+71/365992/campos_512_v4
+71/366016/campos_512_v4
+71/366029/campos_512_v4
+71/366053/campos_512_v4
+71/366056/campos_512_v4
+71/366063/campos_512_v4
+71/366148/campos_512_v4
+71/366154/campos_512_v4
+71/366170/campos_512_v4
+71/366197/campos_512_v4
+71/366198/campos_512_v4
+71/366227/campos_512_v4
+71/366290/campos_512_v4
+71/366308/campos_512_v4
+71/366309/campos_512_v4
+71/366361/campos_512_v4
+71/366368/campos_512_v4
+71/366369/campos_512_v4
+71/366522/campos_512_v4
+71/366525/campos_512_v4
+71/366542/campos_512_v4
+71/366564/campos_512_v4
+71/366577/campos_512_v4
+71/366582/campos_512_v4
+71/366658/campos_512_v4
+71/366683/campos_512_v4
+71/366716/campos_512_v4
+71/366748/campos_512_v4
+71/366788/campos_512_v4
+71/366800/campos_512_v4
+71/366869/campos_512_v4
+71/366975/campos_512_v4
+71/367006/campos_512_v4
+71/367034/campos_512_v4
+71/367047/campos_512_v4
+71/367108/campos_512_v4
+71/367117/campos_512_v4
+71/367167/campos_512_v4
+71/367252/campos_512_v4
+71/367256/campos_512_v4
+71/367261/campos_512_v4
+71/367309/campos_512_v4
+71/367410/campos_512_v4
+71/367434/campos_512_v4
+71/367435/campos_512_v4
+71/367476/campos_512_v4
+71/367494/campos_512_v4
+71/367495/campos_512_v4
+71/367506/campos_512_v4
+71/367635/campos_512_v4
+71/367642/campos_512_v4
+71/367648/campos_512_v4
+71/367663/campos_512_v4
+71/367677/campos_512_v4
+71/367698/campos_512_v4
+71/367771/campos_512_v4
+71/367781/campos_512_v4
+71/367940/campos_512_v4
+71/367952/campos_512_v4
+71/367974/campos_512_v4
+71/367989/campos_512_v4
+71/368022/campos_512_v4
+71/368056/campos_512_v4
+71/368151/campos_512_v4
+71/368165/campos_512_v4
+71/368186/campos_512_v4
+71/368196/campos_512_v4
+71/368213/campos_512_v4
+71/368220/campos_512_v4
+71/368240/campos_512_v4
+71/368252/campos_512_v4
+71/368312/campos_512_v4
+71/368348/campos_512_v4
+71/368366/campos_512_v4
+71/368380/campos_512_v4
+71/368432/campos_512_v4
+71/368537/campos_512_v4
+71/368582/campos_512_v4
+71/368637/campos_512_v4
+71/368648/campos_512_v4
+71/368650/campos_512_v4
+71/368730/campos_512_v4
+71/368731/campos_512_v4
+71/368749/campos_512_v4
+71/368806/campos_512_v4
+71/368816/campos_512_v4
+71/368864/campos_512_v4
+71/368881/campos_512_v4
+71/368929/campos_512_v4
+71/368943/campos_512_v4
+71/368957/campos_512_v4
+71/368959/campos_512_v4
+71/368968/campos_512_v4
+71/368996/campos_512_v4
+71/369003/campos_512_v4
+71/369020/campos_512_v4
+71/369030/campos_512_v4
+71/369033/campos_512_v4
+71/369074/campos_512_v4
+71/369092/campos_512_v4
+71/369123/campos_512_v4
+71/369251/campos_512_v4
+71/369258/campos_512_v4
+71/369263/campos_512_v4
+71/369280/campos_512_v4
+71/369297/campos_512_v4
+71/369317/campos_512_v4
+71/369357/campos_512_v4
+71/369371/campos_512_v4
+71/369372/campos_512_v4
+71/369505/campos_512_v4
+71/369530/campos_512_v4
+71/369558/campos_512_v4
+71/369571/campos_512_v4
+71/369667/campos_512_v4
+71/369684/campos_512_v4
+71/369711/campos_512_v4
+71/369729/campos_512_v4
+71/369777/campos_512_v4
+71/369780/campos_512_v4
+71/369811/campos_512_v4
+71/369868/campos_512_v4
+72/370236/campos_512_v4
+72/370258/campos_512_v4
+72/370294/campos_512_v4
+72/370298/campos_512_v4
+72/370328/campos_512_v4
+72/370377/campos_512_v4
+72/370434/campos_512_v4
+72/370513/campos_512_v4
+72/370529/campos_512_v4
+72/370605/campos_512_v4
+72/370699/campos_512_v4
+72/370745/campos_512_v4
+72/370758/campos_512_v4
+72/370792/campos_512_v4
+72/370795/campos_512_v4
+72/370799/campos_512_v4
+72/370826/campos_512_v4
+72/370848/campos_512_v4
+72/370861/campos_512_v4
+72/370905/campos_512_v4
+72/370958/campos_512_v4
+72/370961/campos_512_v4
+72/370966/campos_512_v4
+72/371081/campos_512_v4
+72/371096/campos_512_v4
+72/371114/campos_512_v4
+72/371192/campos_512_v4
+72/371239/campos_512_v4
+72/371292/campos_512_v4
+72/371381/campos_512_v4
+72/371383/campos_512_v4
+72/371422/campos_512_v4
+72/371432/campos_512_v4
+72/371491/campos_512_v4
+72/371499/campos_512_v4
+72/371512/campos_512_v4
+72/371519/campos_512_v4
+72/371521/campos_512_v4
+72/371523/campos_512_v4
+72/371525/campos_512_v4
+72/371539/campos_512_v4
+72/371553/campos_512_v4
+72/371562/campos_512_v4
+72/371602/campos_512_v4
+72/371626/campos_512_v4
+72/371643/campos_512_v4
+72/371655/campos_512_v4
+72/371657/campos_512_v4
+72/371658/campos_512_v4
+72/371693/campos_512_v4
+72/371696/campos_512_v4
+72/371714/campos_512_v4
+72/371744/campos_512_v4
+72/371753/campos_512_v4
+72/371768/campos_512_v4
+72/371804/campos_512_v4
+72/371858/campos_512_v4
+72/371875/campos_512_v4
+72/371910/campos_512_v4
+72/371952/campos_512_v4
+72/371955/campos_512_v4
+72/371983/campos_512_v4
+72/372038/campos_512_v4
+72/372040/campos_512_v4
+72/372172/campos_512_v4
+72/372239/campos_512_v4
+72/372265/campos_512_v4
+72/372270/campos_512_v4
+72/372271/campos_512_v4
+72/372288/campos_512_v4
+72/372321/campos_512_v4
+72/372324/campos_512_v4
+72/372383/campos_512_v4
+72/372394/campos_512_v4
+72/372424/campos_512_v4
+72/372484/campos_512_v4
+72/372486/campos_512_v4
+72/372496/campos_512_v4
+72/372520/campos_512_v4
+72/372593/campos_512_v4
+72/372599/campos_512_v4
+72/372604/campos_512_v4
+72/372674/campos_512_v4
+72/372801/campos_512_v4
+72/372821/campos_512_v4
+72/372882/campos_512_v4
+72/372890/campos_512_v4
+72/372927/campos_512_v4
+72/372939/campos_512_v4
+72/372948/campos_512_v4
+72/373012/campos_512_v4
+72/373034/campos_512_v4
+72/373051/campos_512_v4
+72/373100/campos_512_v4
+72/373233/campos_512_v4
+72/373283/campos_512_v4
+72/373306/campos_512_v4
+72/373316/campos_512_v4
+72/373330/campos_512_v4
+72/373439/campos_512_v4
+72/373490/campos_512_v4
+72/373494/campos_512_v4
+72/373544/campos_512_v4
+72/373617/campos_512_v4
+72/373632/campos_512_v4
+72/373639/campos_512_v4
+72/373642/campos_512_v4
+72/373645/campos_512_v4
+72/373670/campos_512_v4
+72/373693/campos_512_v4
+72/373712/campos_512_v4
+72/373733/campos_512_v4
+72/373769/campos_512_v4
+72/373791/campos_512_v4
+72/373814/campos_512_v4
+72/373891/campos_512_v4
+72/373902/campos_512_v4
+72/374095/campos_512_v4
+72/374105/campos_512_v4
+72/374106/campos_512_v4
+72/374135/campos_512_v4
+72/374150/campos_512_v4
+72/374206/campos_512_v4
+72/374261/campos_512_v4
+72/374285/campos_512_v4
+72/374298/campos_512_v4
+72/374311/campos_512_v4
+72/374316/campos_512_v4
+72/374326/campos_512_v4
+72/374425/campos_512_v4
+72/374495/campos_512_v4
+72/374514/campos_512_v4
+72/374538/campos_512_v4
+72/374581/campos_512_v4
+72/374701/campos_512_v4
+72/374704/campos_512_v4
+72/374719/campos_512_v4
+72/374724/campos_512_v4
+72/374794/campos_512_v4
+73/375020/campos_512_v4
+73/375021/campos_512_v4
+73/375036/campos_512_v4
+73/375047/campos_512_v4
+73/375063/campos_512_v4
+73/375113/campos_512_v4
+73/375116/campos_512_v4
+73/375164/campos_512_v4
+73/375201/campos_512_v4
+73/375218/campos_512_v4
+73/375224/campos_512_v4
+73/375317/campos_512_v4
+73/375343/campos_512_v4
+73/375370/campos_512_v4
+73/375394/campos_512_v4
+73/375433/campos_512_v4
+73/375492/campos_512_v4
+73/375515/campos_512_v4
+73/375547/campos_512_v4
+73/375558/campos_512_v4
+73/375596/campos_512_v4
+73/375654/campos_512_v4
+73/375688/campos_512_v4
+73/375800/campos_512_v4
+73/375845/campos_512_v4
+73/375847/campos_512_v4
+73/375911/campos_512_v4
+73/375913/campos_512_v4
+73/375952/campos_512_v4
+73/375971/campos_512_v4
+73/375992/campos_512_v4
+73/376127/campos_512_v4
+73/376134/campos_512_v4
+73/376143/campos_512_v4
+73/376166/campos_512_v4
+73/376189/campos_512_v4
+73/376291/campos_512_v4
+73/376419/campos_512_v4
+73/376464/campos_512_v4
+73/376473/campos_512_v4
+73/376510/campos_512_v4
+73/376527/campos_512_v4
+73/376538/campos_512_v4
+73/376547/campos_512_v4
+73/376555/campos_512_v4
+73/376621/campos_512_v4
+73/376628/campos_512_v4
+73/376685/campos_512_v4
+73/376712/campos_512_v4
+73/376715/campos_512_v4
+73/376717/campos_512_v4
+73/376721/campos_512_v4
+73/376729/campos_512_v4
+73/376736/campos_512_v4
+73/376741/campos_512_v4
+73/376759/campos_512_v4
+73/376822/campos_512_v4
+73/376847/campos_512_v4
+73/376853/campos_512_v4
+73/376897/campos_512_v4
+73/376927/campos_512_v4
+73/376954/campos_512_v4
+73/376967/campos_512_v4
+73/376989/campos_512_v4
+73/377005/campos_512_v4
+73/377025/campos_512_v4
+73/377084/campos_512_v4
+73/377088/campos_512_v4
+73/377092/campos_512_v4
+73/377102/campos_512_v4
+73/377117/campos_512_v4
+73/377143/campos_512_v4
+73/377185/campos_512_v4
+73/377212/campos_512_v4
+73/377215/campos_512_v4
+73/377292/campos_512_v4
+73/377303/campos_512_v4
+73/377405/campos_512_v4
+73/377447/campos_512_v4
+73/377458/campos_512_v4
+73/377475/campos_512_v4
+73/377480/campos_512_v4
+73/377502/campos_512_v4
+73/377524/campos_512_v4
+73/377530/campos_512_v4
+73/377540/campos_512_v4
+73/377671/campos_512_v4
+73/377672/campos_512_v4
+73/377677/campos_512_v4
+73/377704/campos_512_v4
+73/377747/campos_512_v4
+73/377800/campos_512_v4
+73/377876/campos_512_v4
+73/377878/campos_512_v4
+73/377879/campos_512_v4
+73/377882/campos_512_v4
+73/377917/campos_512_v4
+73/377928/campos_512_v4
+73/377930/campos_512_v4
+73/377931/campos_512_v4
+73/377932/campos_512_v4
+73/377936/campos_512_v4
+73/378026/campos_512_v4
+73/378035/campos_512_v4
+73/378052/campos_512_v4
+73/378057/campos_512_v4
+73/378077/campos_512_v4
+73/378103/campos_512_v4
+73/378124/campos_512_v4
+73/378202/campos_512_v4
+73/378229/campos_512_v4
+73/378249/campos_512_v4
+73/378322/campos_512_v4
+73/378437/campos_512_v4
+73/378482/campos_512_v4
+73/378556/campos_512_v4
+73/378612/campos_512_v4
+73/378637/campos_512_v4
+73/378661/campos_512_v4
+73/378665/campos_512_v4
+73/378670/campos_512_v4
+73/378807/campos_512_v4
+73/378869/campos_512_v4
+73/378881/campos_512_v4
+73/378957/campos_512_v4
+73/378973/campos_512_v4
+73/378988/campos_512_v4
+73/378998/campos_512_v4
+73/379112/campos_512_v4
+73/379156/campos_512_v4
+73/379162/campos_512_v4
+73/379222/campos_512_v4
+73/379245/campos_512_v4
+73/379516/campos_512_v4
+73/379522/campos_512_v4
+73/379642/campos_512_v4
+73/379655/campos_512_v4
+73/379677/campos_512_v4
+73/379679/campos_512_v4
+73/379719/campos_512_v4
+73/379726/campos_512_v4
+73/379732/campos_512_v4
+73/379828/campos_512_v4
+73/379832/campos_512_v4
+73/379852/campos_512_v4
+73/379878/campos_512_v4
+73/379886/campos_512_v4
+73/379938/campos_512_v4
+73/379953/campos_512_v4
+73/379991/campos_512_v4
+74/380096/campos_512_v4
+74/380098/campos_512_v4
+74/380122/campos_512_v4
+74/380126/campos_512_v4
+74/380132/campos_512_v4
+74/380165/campos_512_v4
+74/380175/campos_512_v4
+74/380210/campos_512_v4
+74/380246/campos_512_v4
+74/380490/campos_512_v4
+74/380586/campos_512_v4
+74/380759/campos_512_v4
+74/380763/campos_512_v4
+74/380765/campos_512_v4
+74/380850/campos_512_v4
+74/380853/campos_512_v4
+74/380890/campos_512_v4
+74/380923/campos_512_v4
+74/380926/campos_512_v4
+74/380931/campos_512_v4
+74/380961/campos_512_v4
+74/380983/campos_512_v4
+74/381004/campos_512_v4
+74/381084/campos_512_v4
+74/381088/campos_512_v4
+74/381090/campos_512_v4
+74/381094/campos_512_v4
+74/381111/campos_512_v4
+74/381115/campos_512_v4
+74/381168/campos_512_v4
+74/381264/campos_512_v4
+74/381307/campos_512_v4
+74/381331/campos_512_v4
+74/381398/campos_512_v4
+74/381407/campos_512_v4
+74/381468/campos_512_v4
+74/381471/campos_512_v4
+74/381555/campos_512_v4
+74/381597/campos_512_v4
+74/381610/campos_512_v4
+74/381634/campos_512_v4
+74/381646/campos_512_v4
+74/381657/campos_512_v4
+74/381678/campos_512_v4
+74/381727/campos_512_v4
+74/381764/campos_512_v4
+74/381932/campos_512_v4
+74/381934/campos_512_v4
+74/381957/campos_512_v4
+74/382003/campos_512_v4
+74/382021/campos_512_v4
+74/382041/campos_512_v4
+74/382044/campos_512_v4
+74/382058/campos_512_v4
+74/382124/campos_512_v4
+74/382128/campos_512_v4
+74/382129/campos_512_v4
+74/382182/campos_512_v4
+74/382249/campos_512_v4
+74/382321/campos_512_v4
+74/382399/campos_512_v4
+74/382422/campos_512_v4
+74/382423/campos_512_v4
+74/382427/campos_512_v4
+74/382441/campos_512_v4
+74/382569/campos_512_v4
+74/382572/campos_512_v4
+74/382686/campos_512_v4
+74/382688/campos_512_v4
+74/382755/campos_512_v4
+74/382790/campos_512_v4
+74/382798/campos_512_v4
+74/382853/campos_512_v4
+74/382864/campos_512_v4
+74/382896/campos_512_v4
+74/382902/campos_512_v4
+74/382981/campos_512_v4
+74/383004/campos_512_v4
+74/383016/campos_512_v4
+74/383071/campos_512_v4
+74/383123/campos_512_v4
+74/383206/campos_512_v4
+74/383260/campos_512_v4
+74/383304/campos_512_v4
+74/383417/campos_512_v4
+74/383428/campos_512_v4
+74/383434/campos_512_v4
+74/383440/campos_512_v4
+74/383469/campos_512_v4
+74/383488/campos_512_v4
+74/383537/campos_512_v4
+74/383543/campos_512_v4
+74/383560/campos_512_v4
+74/383602/campos_512_v4
+74/383609/campos_512_v4
+74/383657/campos_512_v4
+74/383677/campos_512_v4
+74/383735/campos_512_v4
+74/383747/campos_512_v4
+74/383791/campos_512_v4
+74/383827/campos_512_v4
+74/383845/campos_512_v4
+74/383865/campos_512_v4
+74/383904/campos_512_v4
+74/383907/campos_512_v4
+74/383955/campos_512_v4
+74/383961/campos_512_v4
+74/384013/campos_512_v4
+74/384061/campos_512_v4
+74/384070/campos_512_v4
+74/384132/campos_512_v4
+74/384141/campos_512_v4
+74/384151/campos_512_v4
+74/384159/campos_512_v4
+74/384178/campos_512_v4
+74/384204/campos_512_v4
+74/384240/campos_512_v4
+74/384284/campos_512_v4
+74/384329/campos_512_v4
+74/384376/campos_512_v4
+74/384407/campos_512_v4
+74/384478/campos_512_v4
+74/384513/campos_512_v4
+74/384536/campos_512_v4
+74/384559/campos_512_v4
+74/384635/campos_512_v4
+74/384642/campos_512_v4
+74/384680/campos_512_v4
+74/384719/campos_512_v4
+74/384831/campos_512_v4
+74/384853/campos_512_v4
+74/384873/campos_512_v4
+74/384882/campos_512_v4
+74/384894/campos_512_v4
+74/384992/campos_512_v4
+74/384995/campos_512_v4
+75/385026/campos_512_v4
+75/385039/campos_512_v4
+75/385109/campos_512_v4
+75/385176/campos_512_v4
+75/385185/campos_512_v4
+75/385236/campos_512_v4
+75/385459/campos_512_v4
+75/385462/campos_512_v4
+75/385499/campos_512_v4
+75/385541/campos_512_v4
+75/385546/campos_512_v4
+75/385572/campos_512_v4
+75/385610/campos_512_v4
+75/385611/campos_512_v4
+75/385620/campos_512_v4
+75/385635/campos_512_v4
+75/385640/campos_512_v4
+75/385703/campos_512_v4
+75/385722/campos_512_v4
+75/385779/campos_512_v4
+75/385807/campos_512_v4
+75/385825/campos_512_v4
+75/385885/campos_512_v4
+75/385933/campos_512_v4
+75/385991/campos_512_v4
+75/386025/campos_512_v4
+75/386028/campos_512_v4
+75/386059/campos_512_v4
+75/386064/campos_512_v4
+75/386075/campos_512_v4
+75/386187/campos_512_v4
+75/386249/campos_512_v4
+75/386265/campos_512_v4
+75/386317/campos_512_v4
+75/386376/campos_512_v4
+75/386425/campos_512_v4
+75/386478/campos_512_v4
+75/386506/campos_512_v4
+75/386535/campos_512_v4
+75/386581/campos_512_v4
+75/386590/campos_512_v4
+75/386608/campos_512_v4
+75/386614/campos_512_v4
+75/386640/campos_512_v4
+75/386669/campos_512_v4
+75/386715/campos_512_v4
+75/386721/campos_512_v4
+75/386778/campos_512_v4
+75/386815/campos_512_v4
+75/386819/campos_512_v4
+75/386830/campos_512_v4
+75/386863/campos_512_v4
+75/386902/campos_512_v4
+75/386947/campos_512_v4
+75/386988/campos_512_v4
+75/387021/campos_512_v4
+75/387053/campos_512_v4
+75/387056/campos_512_v4
+75/387139/campos_512_v4
+75/387154/campos_512_v4
+75/387172/campos_512_v4
+75/387189/campos_512_v4
+75/387214/campos_512_v4
+75/387221/campos_512_v4
+75/387223/campos_512_v4
+75/387306/campos_512_v4
+75/387321/campos_512_v4
+75/387340/campos_512_v4
+75/387350/campos_512_v4
+75/387391/campos_512_v4
+75/387406/campos_512_v4
+75/387565/campos_512_v4
+75/387573/campos_512_v4
+75/387682/campos_512_v4
+75/387694/campos_512_v4
+75/387721/campos_512_v4
+75/387724/campos_512_v4
+75/387731/campos_512_v4
+75/387744/campos_512_v4
+75/387749/campos_512_v4
+75/387765/campos_512_v4
+75/387795/campos_512_v4
+75/387829/campos_512_v4
+75/387831/campos_512_v4
+75/387912/campos_512_v4
+75/387943/campos_512_v4
+75/387999/campos_512_v4
+75/388023/campos_512_v4
+75/388034/campos_512_v4
+75/388161/campos_512_v4
+75/388232/campos_512_v4
+75/388250/campos_512_v4
+75/388293/campos_512_v4
+75/388348/campos_512_v4
+75/388356/campos_512_v4
+75/388417/campos_512_v4
+75/388470/campos_512_v4
+75/388484/campos_512_v4
+75/388505/campos_512_v4
+75/388517/campos_512_v4
+75/388584/campos_512_v4
+75/388600/campos_512_v4
+75/388617/campos_512_v4
+75/388667/campos_512_v4
+75/388685/campos_512_v4
+75/388693/campos_512_v4
+75/388709/campos_512_v4
+75/388785/campos_512_v4
+75/388796/campos_512_v4
+75/388804/campos_512_v4
+75/388809/campos_512_v4
+75/388844/campos_512_v4
+75/388862/campos_512_v4
+75/388879/campos_512_v4
+75/388920/campos_512_v4
+75/388958/campos_512_v4
+75/388975/campos_512_v4
+75/388977/campos_512_v4
+75/389044/campos_512_v4
+75/389059/campos_512_v4
+75/389123/campos_512_v4
+75/389162/campos_512_v4
+75/389165/campos_512_v4
+75/389252/campos_512_v4
+75/389256/campos_512_v4
+75/389262/campos_512_v4
+75/389274/campos_512_v4
+75/389286/campos_512_v4
+75/389331/campos_512_v4
+75/389385/campos_512_v4
+75/389439/campos_512_v4
+75/389454/campos_512_v4
+75/389457/campos_512_v4
+75/389575/campos_512_v4
+75/389671/campos_512_v4
+75/389731/campos_512_v4
+75/389776/campos_512_v4
+75/389795/campos_512_v4
+75/389799/campos_512_v4
+75/389804/campos_512_v4
+75/389817/campos_512_v4
+75/389870/campos_512_v4
+75/389990/campos_512_v4
+76/390076/campos_512_v4
+76/390104/campos_512_v4
+76/390123/campos_512_v4
+76/390153/campos_512_v4
+76/390224/campos_512_v4
+76/390270/campos_512_v4
+76/390287/campos_512_v4
+76/390319/campos_512_v4
+76/390369/campos_512_v4
+76/390398/campos_512_v4
+76/390432/campos_512_v4
+76/390455/campos_512_v4
+76/390461/campos_512_v4
+76/390486/campos_512_v4
+76/390516/campos_512_v4
+76/390518/campos_512_v4
+76/390520/campos_512_v4
+76/390527/campos_512_v4
+76/390562/campos_512_v4
+76/390575/campos_512_v4
+76/390587/campos_512_v4
+76/390635/campos_512_v4
+76/390666/campos_512_v4
+76/390668/campos_512_v4
+76/390756/campos_512_v4
+76/390765/campos_512_v4
+76/390852/campos_512_v4
+76/390858/campos_512_v4
+76/390861/campos_512_v4
+76/390865/campos_512_v4
+76/390869/campos_512_v4
+76/390877/campos_512_v4
+76/390971/campos_512_v4
+76/390994/campos_512_v4
+76/391006/campos_512_v4
+76/391024/campos_512_v4
+76/391217/campos_512_v4
+76/391221/campos_512_v4
+76/391258/campos_512_v4
+76/391262/campos_512_v4
+76/391291/campos_512_v4
+76/391297/campos_512_v4
+76/391316/campos_512_v4
+76/391318/campos_512_v4
+76/391323/campos_512_v4
+76/391348/campos_512_v4
+76/391423/campos_512_v4
+76/391463/campos_512_v4
+76/391469/campos_512_v4
+76/391481/campos_512_v4
+76/391494/campos_512_v4
+76/391561/campos_512_v4
+76/391566/campos_512_v4
+76/391568/campos_512_v4
+76/391592/campos_512_v4
+76/391632/campos_512_v4
+76/391633/campos_512_v4
+76/391658/campos_512_v4
+76/391752/campos_512_v4
+76/391767/campos_512_v4
+76/391781/campos_512_v4
+76/391819/campos_512_v4
+76/391858/campos_512_v4
+76/391881/campos_512_v4
+76/391901/campos_512_v4
+76/391903/campos_512_v4
+76/391961/campos_512_v4
+76/391969/campos_512_v4
+76/392040/campos_512_v4
+76/392130/campos_512_v4
+76/392209/campos_512_v4
+76/392228/campos_512_v4
+76/392343/campos_512_v4
+76/392399/campos_512_v4
+76/392424/campos_512_v4
+76/392452/campos_512_v4
+76/392555/campos_512_v4
+76/392558/campos_512_v4
+76/392581/campos_512_v4
+76/392711/campos_512_v4
+76/392733/campos_512_v4
+76/392774/campos_512_v4
+76/392817/campos_512_v4
+76/392831/campos_512_v4
+76/392840/campos_512_v4
+76/392856/campos_512_v4
+76/392884/campos_512_v4
+76/392891/campos_512_v4
+76/392904/campos_512_v4
+76/392905/campos_512_v4
+76/392927/campos_512_v4
+76/392932/campos_512_v4
+76/392957/campos_512_v4
+76/392960/campos_512_v4
+76/393005/campos_512_v4
+76/393034/campos_512_v4
+76/393074/campos_512_v4
+76/393123/campos_512_v4
+76/393148/campos_512_v4
+76/393174/campos_512_v4
+76/393178/campos_512_v4
+76/393222/campos_512_v4
+76/393290/campos_512_v4
+76/393334/campos_512_v4
+76/393371/campos_512_v4
+76/393374/campos_512_v4
+76/393395/campos_512_v4
+76/393470/campos_512_v4
+76/393474/campos_512_v4
+76/393482/campos_512_v4
+76/393496/campos_512_v4
+76/393545/campos_512_v4
+76/393554/campos_512_v4
+76/393557/campos_512_v4
+76/393571/campos_512_v4
+76/393591/campos_512_v4
+76/393596/campos_512_v4
+76/393612/campos_512_v4
+76/393658/campos_512_v4
+76/393756/campos_512_v4
+76/393769/campos_512_v4
+76/393776/campos_512_v4
+76/393781/campos_512_v4
+76/393801/campos_512_v4
+76/393819/campos_512_v4
+76/393919/campos_512_v4
+76/393928/campos_512_v4
+76/393944/campos_512_v4
+76/393947/campos_512_v4
+76/393953/campos_512_v4
+76/394013/campos_512_v4
+76/394016/campos_512_v4
+76/394020/campos_512_v4
+76/394112/campos_512_v4
+76/394133/campos_512_v4
+76/394134/campos_512_v4
+76/394164/campos_512_v4
+76/394174/campos_512_v4
+76/394181/campos_512_v4
+76/394204/campos_512_v4
+76/394228/campos_512_v4
+76/394268/campos_512_v4
+76/394284/campos_512_v4
+76/394289/campos_512_v4
+76/394294/campos_512_v4
+76/394301/campos_512_v4
+76/394321/campos_512_v4
+76/394331/campos_512_v4
+76/394341/campos_512_v4
+76/394361/campos_512_v4
+76/394430/campos_512_v4
+76/394444/campos_512_v4
+76/394539/campos_512_v4
+76/394555/campos_512_v4
+76/394595/campos_512_v4
+76/394600/campos_512_v4
+76/394683/campos_512_v4
+76/394708/campos_512_v4
+76/394735/campos_512_v4
+76/394755/campos_512_v4
+76/394774/campos_512_v4
+76/394781/campos_512_v4
+76/394802/campos_512_v4
+76/394811/campos_512_v4
+76/394888/campos_512_v4
+76/394923/campos_512_v4
+76/394951/campos_512_v4
+76/394973/campos_512_v4
+77/395060/campos_512_v4
+77/395081/campos_512_v4
+77/395094/campos_512_v4
+77/395110/campos_512_v4
+77/395147/campos_512_v4
+77/395148/campos_512_v4
+77/395181/campos_512_v4
+77/395194/campos_512_v4
+77/395213/campos_512_v4
+77/395233/campos_512_v4
+77/395245/campos_512_v4
+77/395258/campos_512_v4
+77/395267/campos_512_v4
+77/395272/campos_512_v4
+77/395289/campos_512_v4
+77/395309/campos_512_v4
+77/395328/campos_512_v4
+77/395355/campos_512_v4
+77/395440/campos_512_v4
+77/395480/campos_512_v4
+77/395526/campos_512_v4
+77/395543/campos_512_v4
+77/395573/campos_512_v4
+77/395618/campos_512_v4
+77/395631/campos_512_v4
+77/395632/campos_512_v4
+77/395651/campos_512_v4
+77/395706/campos_512_v4
+77/395904/campos_512_v4
+77/395905/campos_512_v4
+77/395913/campos_512_v4
+77/395921/campos_512_v4
+77/395944/campos_512_v4
+77/395955/campos_512_v4
+77/395964/campos_512_v4
+77/396002/campos_512_v4
+77/396016/campos_512_v4
+77/396053/campos_512_v4
+77/396183/campos_512_v4
+77/396195/campos_512_v4
+77/396201/campos_512_v4
+77/396210/campos_512_v4
+77/396211/campos_512_v4
+77/396219/campos_512_v4
+77/396221/campos_512_v4
+77/396235/campos_512_v4
+77/396238/campos_512_v4
+77/396331/campos_512_v4
+77/396360/campos_512_v4
+77/396392/campos_512_v4
+77/396396/campos_512_v4
+77/396439/campos_512_v4
+77/396487/campos_512_v4
+77/396488/campos_512_v4
+77/396492/campos_512_v4
+77/396497/campos_512_v4
+77/396522/campos_512_v4
+77/396551/campos_512_v4
+77/396590/campos_512_v4
+77/396603/campos_512_v4
+77/396638/campos_512_v4
+77/396645/campos_512_v4
+77/396665/campos_512_v4
+77/396698/campos_512_v4
+77/396706/campos_512_v4
+77/396727/campos_512_v4
+77/396735/campos_512_v4
+77/396765/campos_512_v4
+77/396809/campos_512_v4
+77/396892/campos_512_v4
+77/396912/campos_512_v4
+77/396955/campos_512_v4
+77/396994/campos_512_v4
+77/396999/campos_512_v4
+77/397022/campos_512_v4
+77/397039/campos_512_v4
+77/397086/campos_512_v4
+77/397098/campos_512_v4
+77/397112/campos_512_v4
+77/397138/campos_512_v4
+77/397197/campos_512_v4
+77/397208/campos_512_v4
+77/397224/campos_512_v4
+77/397231/campos_512_v4
+77/397265/campos_512_v4
+77/397355/campos_512_v4
+77/397356/campos_512_v4
+77/397379/campos_512_v4
+77/397444/campos_512_v4
+77/397489/campos_512_v4
+77/397503/campos_512_v4
+77/397550/campos_512_v4
+77/397604/campos_512_v4
+77/397620/campos_512_v4
+77/397621/campos_512_v4
+77/397622/campos_512_v4
+77/397777/campos_512_v4
+77/397778/campos_512_v4
+77/397823/campos_512_v4
+77/397832/campos_512_v4
+77/397841/campos_512_v4
+77/397915/campos_512_v4
+77/397936/campos_512_v4
+77/397944/campos_512_v4
+77/397985/campos_512_v4
+77/398001/campos_512_v4
+77/398004/campos_512_v4
+77/398026/campos_512_v4
+77/398070/campos_512_v4
+77/398210/campos_512_v4
+77/398226/campos_512_v4
+77/398230/campos_512_v4
+77/398238/campos_512_v4
+77/398240/campos_512_v4
+77/398272/campos_512_v4
+77/398308/campos_512_v4
+77/398346/campos_512_v4
+77/398394/campos_512_v4
+77/398398/campos_512_v4
+77/398410/campos_512_v4
+77/398415/campos_512_v4
+77/398512/campos_512_v4
+77/398606/campos_512_v4
+77/398637/campos_512_v4
+77/398649/campos_512_v4
+77/398670/campos_512_v4
+77/398698/campos_512_v4
+77/398708/campos_512_v4
+77/398737/campos_512_v4
+77/398786/campos_512_v4
+77/398836/campos_512_v4
+77/398837/campos_512_v4
+77/398906/campos_512_v4
+77/399053/campos_512_v4
+77/399054/campos_512_v4
+77/399107/campos_512_v4
+77/399111/campos_512_v4
+77/399186/campos_512_v4
+77/399191/campos_512_v4
+77/399199/campos_512_v4
+77/399246/campos_512_v4
+77/399279/campos_512_v4
+77/399322/campos_512_v4
+77/399370/campos_512_v4
+77/399371/campos_512_v4
+77/399401/campos_512_v4
+77/399475/campos_512_v4
+77/399482/campos_512_v4
+77/399553/campos_512_v4
+77/399600/campos_512_v4
+77/399641/campos_512_v4
+77/399692/campos_512_v4
+77/399769/campos_512_v4
+77/399874/campos_512_v4
+77/399905/campos_512_v4
+77/399906/campos_512_v4
+77/399955/campos_512_v4
+77/399994/campos_512_v4
+78/400034/campos_512_v4
+78/400058/campos_512_v4
+78/400140/campos_512_v4
+78/400162/campos_512_v4
+78/400164/campos_512_v4
+78/400192/campos_512_v4
+78/400220/campos_512_v4
+78/400229/campos_512_v4
+78/400235/campos_512_v4
+78/400250/campos_512_v4
+78/400268/campos_512_v4
+78/400308/campos_512_v4
+78/400335/campos_512_v4
+78/400354/campos_512_v4
+78/400395/campos_512_v4
+78/400407/campos_512_v4
+78/400419/campos_512_v4
+78/400448/campos_512_v4
+78/400483/campos_512_v4
+78/400520/campos_512_v4
+78/400524/campos_512_v4
+78/400534/campos_512_v4
+78/400562/campos_512_v4
+78/400564/campos_512_v4
+78/400629/campos_512_v4
+78/400642/campos_512_v4
+78/400647/campos_512_v4
+78/400693/campos_512_v4
+78/400711/campos_512_v4
+78/400712/campos_512_v4
+78/400740/campos_512_v4
+78/400758/campos_512_v4
+78/400769/campos_512_v4
+78/400798/campos_512_v4
+78/400800/campos_512_v4
+78/400864/campos_512_v4
+78/400937/campos_512_v4
+78/400953/campos_512_v4
+78/401037/campos_512_v4
+78/401116/campos_512_v4
+78/401143/campos_512_v4
+78/401147/campos_512_v4
+78/401223/campos_512_v4
+78/401254/campos_512_v4
+78/401291/campos_512_v4
+78/401376/campos_512_v4
+78/401407/campos_512_v4
+78/401445/campos_512_v4
+78/401471/campos_512_v4
+78/401482/campos_512_v4
+78/401484/campos_512_v4
+78/401517/campos_512_v4
+78/401526/campos_512_v4
+78/401554/campos_512_v4
+78/401613/campos_512_v4
+78/401614/campos_512_v4
+78/401645/campos_512_v4
+78/401658/campos_512_v4
+78/401661/campos_512_v4
+78/401688/campos_512_v4
+78/401714/campos_512_v4
+78/401715/campos_512_v4
+78/401752/campos_512_v4
+78/401779/campos_512_v4
+78/401786/campos_512_v4
+78/401799/campos_512_v4
+78/401884/campos_512_v4
+78/401893/campos_512_v4
+78/401930/campos_512_v4
+78/401944/campos_512_v4
+78/401960/campos_512_v4
+78/401968/campos_512_v4
+78/401974/campos_512_v4
+78/401999/campos_512_v4
+78/402061/campos_512_v4
+78/402081/campos_512_v4
+78/402093/campos_512_v4
+78/402098/campos_512_v4
+78/402106/campos_512_v4
+78/402189/campos_512_v4
+78/402204/campos_512_v4
+78/402259/campos_512_v4
+78/402265/campos_512_v4
+78/402284/campos_512_v4
+78/402289/campos_512_v4
+78/402351/campos_512_v4
+78/402362/campos_512_v4
+78/402363/campos_512_v4
+78/402389/campos_512_v4
+78/402454/campos_512_v4
+78/402467/campos_512_v4
+78/402468/campos_512_v4
+78/402476/campos_512_v4
+78/402529/campos_512_v4
+78/402544/campos_512_v4
+78/402552/campos_512_v4
+78/402559/campos_512_v4
+78/402560/campos_512_v4
+78/402579/campos_512_v4
+78/402596/campos_512_v4
+78/402705/campos_512_v4
+78/402833/campos_512_v4
+78/402865/campos_512_v4
+78/402881/campos_512_v4
+78/402913/campos_512_v4
+78/402969/campos_512_v4
+78/403027/campos_512_v4
+78/403042/campos_512_v4
+78/403076/campos_512_v4
+78/403097/campos_512_v4
+78/403166/campos_512_v4
+78/403185/campos_512_v4
+78/403209/campos_512_v4
+78/403275/campos_512_v4
+78/403321/campos_512_v4
+78/403351/campos_512_v4
+78/403371/campos_512_v4
+78/403375/campos_512_v4
+78/403377/campos_512_v4
+78/403384/campos_512_v4
+78/403399/campos_512_v4
+78/403411/campos_512_v4
+78/403471/campos_512_v4
+78/403484/campos_512_v4
+78/403518/campos_512_v4
+78/403567/campos_512_v4
+78/403584/campos_512_v4
+78/403619/campos_512_v4
+78/403635/campos_512_v4
+78/403700/campos_512_v4
+78/403742/campos_512_v4
+78/403751/campos_512_v4
+78/403782/campos_512_v4
+78/403786/campos_512_v4
+78/403803/campos_512_v4
+78/403843/campos_512_v4
+78/403948/campos_512_v4
+78/403959/campos_512_v4
+78/403969/campos_512_v4
+78/403985/campos_512_v4
+78/403989/campos_512_v4
+78/404001/campos_512_v4
+78/404013/campos_512_v4
+78/404019/campos_512_v4
+78/404047/campos_512_v4
+78/404106/campos_512_v4
+78/404114/campos_512_v4
+78/404115/campos_512_v4
+78/404163/campos_512_v4
+78/404206/campos_512_v4
+78/404215/campos_512_v4
+78/404253/campos_512_v4
+78/404278/campos_512_v4
+78/404282/campos_512_v4
+78/404320/campos_512_v4
+78/404323/campos_512_v4
+78/404326/campos_512_v4
+78/404329/campos_512_v4
+78/404341/campos_512_v4
+78/404441/campos_512_v4
+78/404515/campos_512_v4
+78/404563/campos_512_v4
+78/404642/campos_512_v4
+78/404655/campos_512_v4
+78/404665/campos_512_v4
+78/404708/campos_512_v4
+78/404747/campos_512_v4
+78/404769/campos_512_v4
+78/404776/campos_512_v4
+78/404805/campos_512_v4
+78/404808/campos_512_v4
+78/404840/campos_512_v4
+78/404845/campos_512_v4
+78/404853/campos_512_v4
+78/404906/campos_512_v4
+78/404933/campos_512_v4
+78/404977/campos_512_v4
+78/404986/campos_512_v4
+79/405003/campos_512_v4
+79/405151/campos_512_v4
+79/405212/campos_512_v4
+79/405219/campos_512_v4
+79/405241/campos_512_v4
+79/405254/campos_512_v4
+79/405340/campos_512_v4
+79/405367/campos_512_v4
+79/405406/campos_512_v4
+79/405416/campos_512_v4
+79/405425/campos_512_v4
+79/405477/campos_512_v4
+79/405541/campos_512_v4
+79/405581/campos_512_v4
+79/405623/campos_512_v4
+79/405671/campos_512_v4
+79/405675/campos_512_v4
+79/405694/campos_512_v4
+79/405722/campos_512_v4
+79/405747/campos_512_v4
+79/405824/campos_512_v4
+79/405847/campos_512_v4
+79/405849/campos_512_v4
+79/405899/campos_512_v4
+79/405961/campos_512_v4
+79/405972/campos_512_v4
+79/405979/campos_512_v4
+79/405990/campos_512_v4
+79/405993/campos_512_v4
+79/406028/campos_512_v4
+79/406216/campos_512_v4
+79/406218/campos_512_v4
+79/406267/campos_512_v4
+79/406268/campos_512_v4
+79/406368/campos_512_v4
+79/406376/campos_512_v4
+79/406380/campos_512_v4
+79/406443/campos_512_v4
+79/406454/campos_512_v4
+79/406504/campos_512_v4
+79/406516/campos_512_v4
+79/406532/campos_512_v4
+79/406543/campos_512_v4
+79/406567/campos_512_v4
+79/406584/campos_512_v4
+79/406604/campos_512_v4
+79/406606/campos_512_v4
+79/406616/campos_512_v4
+79/406719/campos_512_v4
+79/406841/campos_512_v4
+79/406856/campos_512_v4
+79/406879/campos_512_v4
+79/407017/campos_512_v4
+79/407025/campos_512_v4
+79/407040/campos_512_v4
+79/407068/campos_512_v4
+79/407086/campos_512_v4
+79/407116/campos_512_v4
+79/407145/campos_512_v4
+79/407153/campos_512_v4
+79/407154/campos_512_v4
+79/407205/campos_512_v4
+79/407217/campos_512_v4
+79/407231/campos_512_v4
+79/407238/campos_512_v4
+79/407263/campos_512_v4
+79/407265/campos_512_v4
+79/407379/campos_512_v4
+79/407425/campos_512_v4
+79/407463/campos_512_v4
+79/407465/campos_512_v4
+79/407473/campos_512_v4
+79/407515/campos_512_v4
+79/407549/campos_512_v4
+79/407551/campos_512_v4
+79/407669/campos_512_v4
+79/407698/campos_512_v4
+79/407715/campos_512_v4
+79/407730/campos_512_v4
+79/407757/campos_512_v4
+79/407768/campos_512_v4
+79/407769/campos_512_v4
+79/407773/campos_512_v4
+79/407774/campos_512_v4
+79/407791/campos_512_v4
+79/407819/campos_512_v4
+79/407842/campos_512_v4
+79/407864/campos_512_v4
+79/407887/campos_512_v4
+79/407899/campos_512_v4
+79/407931/campos_512_v4
+79/407946/campos_512_v4
+79/407978/campos_512_v4
+79/407993/campos_512_v4
+79/408032/campos_512_v4
+79/408048/campos_512_v4
+79/408055/campos_512_v4
+79/408102/campos_512_v4
+79/408168/campos_512_v4
+79/408183/campos_512_v4
+79/408192/campos_512_v4
+79/408202/campos_512_v4
+79/408209/campos_512_v4
+79/408230/campos_512_v4
+79/408252/campos_512_v4
+79/408255/campos_512_v4
+79/408266/campos_512_v4
+79/408335/campos_512_v4
+79/408340/campos_512_v4
+79/408394/campos_512_v4
+79/408405/campos_512_v4
+79/408417/campos_512_v4
+79/408457/campos_512_v4
+79/408477/campos_512_v4
+79/408543/campos_512_v4
+79/408573/campos_512_v4
+79/408581/campos_512_v4
+79/408600/campos_512_v4
+79/408716/campos_512_v4
+79/408720/campos_512_v4
+79/408729/campos_512_v4
+79/408743/campos_512_v4
+79/408758/campos_512_v4
+79/408831/campos_512_v4
+79/408841/campos_512_v4
+79/408844/campos_512_v4
+79/408861/campos_512_v4
+79/408918/campos_512_v4
+79/408939/campos_512_v4
+79/408956/campos_512_v4
+79/408992/campos_512_v4
+79/409010/campos_512_v4
+79/409011/campos_512_v4
+79/409029/campos_512_v4
+79/409048/campos_512_v4
+79/409054/campos_512_v4
+79/409058/campos_512_v4
+79/409072/campos_512_v4
+79/409110/campos_512_v4
+79/409175/campos_512_v4
+79/409176/campos_512_v4
+79/409202/campos_512_v4
+79/409237/campos_512_v4
+79/409238/campos_512_v4
+79/409276/campos_512_v4
+79/409298/campos_512_v4
+79/409313/campos_512_v4
+79/409408/campos_512_v4
+79/409427/campos_512_v4
+79/409454/campos_512_v4
+79/409476/campos_512_v4
+79/409499/campos_512_v4
+79/409569/campos_512_v4
+79/409572/campos_512_v4
+79/409585/campos_512_v4
+79/409591/campos_512_v4
+79/409611/campos_512_v4
+79/409634/campos_512_v4
+79/409671/campos_512_v4
+79/409688/campos_512_v4
+79/409730/campos_512_v4
+79/409764/campos_512_v4
+79/409794/campos_512_v4
+79/409842/campos_512_v4
+79/409875/campos_512_v4
+79/409889/campos_512_v4
+79/409904/campos_512_v4
+79/409910/campos_512_v4
+79/409914/campos_512_v4
+79/409919/campos_512_v4
+79/409995/campos_512_v4
+8/50040/campos_512_v4
+8/50062/campos_512_v4
+8/50063/campos_512_v4
+8/50096/campos_512_v4
+8/50112/campos_512_v4
+8/50159/campos_512_v4
+8/50177/campos_512_v4
+8/50181/campos_512_v4
+8/50185/campos_512_v4
+8/50197/campos_512_v4
+8/50198/campos_512_v4
+8/50204/campos_512_v4
+8/50206/campos_512_v4
+8/50212/campos_512_v4
+8/50234/campos_512_v4
+8/50260/campos_512_v4
+8/50263/campos_512_v4
+8/50272/campos_512_v4
+8/50277/campos_512_v4
+8/50290/campos_512_v4
+8/50292/campos_512_v4
+8/50331/campos_512_v4
+8/50335/campos_512_v4
+8/50352/campos_512_v4
+8/50436/campos_512_v4
+8/50462/campos_512_v4
+8/50477/campos_512_v4
+8/50495/campos_512_v4
+8/50528/campos_512_v4
+8/50538/campos_512_v4
+8/50574/campos_512_v4
+8/50598/campos_512_v4
+8/50623/campos_512_v4
+8/50624/campos_512_v4
+8/50655/campos_512_v4
+8/50662/campos_512_v4
+8/50667/campos_512_v4
+8/50694/campos_512_v4
+8/50730/campos_512_v4
+8/50774/campos_512_v4
+8/50798/campos_512_v4
+8/50841/campos_512_v4
+8/50890/campos_512_v4
+8/50906/campos_512_v4
+8/50924/campos_512_v4
+8/50925/campos_512_v4
+8/50926/campos_512_v4
+8/50936/campos_512_v4
+8/50940/campos_512_v4
+8/50956/campos_512_v4
+8/51029/campos_512_v4
+8/51055/campos_512_v4
+8/51078/campos_512_v4
+8/51092/campos_512_v4
+8/51093/campos_512_v4
+8/51098/campos_512_v4
+8/51106/campos_512_v4
+8/51183/campos_512_v4
+8/51207/campos_512_v4
+8/51230/campos_512_v4
+8/51246/campos_512_v4
+8/51251/campos_512_v4
+8/51252/campos_512_v4
+8/51294/campos_512_v4
+8/51305/campos_512_v4
+8/51315/campos_512_v4
+8/51323/campos_512_v4
+8/51328/campos_512_v4
+8/51357/campos_512_v4
+8/51361/campos_512_v4
+8/51363/campos_512_v4
+8/51371/campos_512_v4
+8/51377/campos_512_v4
+8/51455/campos_512_v4
+8/51459/campos_512_v4
+8/51477/campos_512_v4
+8/51480/campos_512_v4
+8/51511/campos_512_v4
+8/51520/campos_512_v4
+8/51527/campos_512_v4
+8/51528/campos_512_v4
+8/51549/campos_512_v4
+8/51591/campos_512_v4
+8/51614/campos_512_v4
+8/51636/campos_512_v4
+8/51650/campos_512_v4
+8/51683/campos_512_v4
+8/51745/campos_512_v4
+8/51805/campos_512_v4
+8/51850/campos_512_v4
+8/51866/campos_512_v4
+8/51871/campos_512_v4
+8/51970/campos_512_v4
+8/51988/campos_512_v4
+8/52020/campos_512_v4
+8/52046/campos_512_v4
+8/52058/campos_512_v4
+8/52067/campos_512_v4
+8/52073/campos_512_v4
+8/52088/campos_512_v4
+8/52103/campos_512_v4
+8/52108/campos_512_v4
+8/52125/campos_512_v4
+8/52172/campos_512_v4
+8/52175/campos_512_v4
+8/52195/campos_512_v4
+8/52212/campos_512_v4
+8/52224/campos_512_v4
+8/52265/campos_512_v4
+8/52285/campos_512_v4
+8/52293/campos_512_v4
+8/52314/campos_512_v4
+8/52321/campos_512_v4
+8/52326/campos_512_v4
+8/52357/campos_512_v4
+8/52380/campos_512_v4
+8/52430/campos_512_v4
+8/52445/campos_512_v4
+8/52455/campos_512_v4
+8/52572/campos_512_v4
+8/52576/campos_512_v4
+8/52585/campos_512_v4
+8/52597/campos_512_v4
+8/52601/campos_512_v4
+8/52610/campos_512_v4
+8/52611/campos_512_v4
+8/52629/campos_512_v4
+8/52636/campos_512_v4
+8/52637/campos_512_v4
+8/52643/campos_512_v4
+8/52669/campos_512_v4
+8/52691/campos_512_v4
+8/52706/campos_512_v4
+8/52721/campos_512_v4
+8/52731/campos_512_v4
+8/52743/campos_512_v4
+8/52772/campos_512_v4
+8/52881/campos_512_v4
+8/52887/campos_512_v4
+8/52899/campos_512_v4
+8/52946/campos_512_v4
+8/52962/campos_512_v4
+8/52987/campos_512_v4
+8/52988/campos_512_v4
+8/52994/campos_512_v4
+8/53065/campos_512_v4
+8/53081/campos_512_v4
+8/53133/campos_512_v4
+8/53202/campos_512_v4
+8/53213/campos_512_v4
+8/53251/campos_512_v4
+8/53261/campos_512_v4
+8/53263/campos_512_v4
+8/53269/campos_512_v4
+8/53283/campos_512_v4
+8/53340/campos_512_v4
+8/53367/campos_512_v4
+8/53415/campos_512_v4
+8/53420/campos_512_v4
+8/53431/campos_512_v4
+8/53469/campos_512_v4
+8/53513/campos_512_v4
+8/53534/campos_512_v4
+8/53551/campos_512_v4
+8/53566/campos_512_v4
+8/53573/campos_512_v4
+8/53595/campos_512_v4
+8/53651/campos_512_v4
+8/53660/campos_512_v4
+8/53699/campos_512_v4
+8/53713/campos_512_v4
+8/53731/campos_512_v4
+8/53746/campos_512_v4
+8/53760/campos_512_v4
+8/53774/campos_512_v4
+8/53792/campos_512_v4
+8/53837/campos_512_v4
+8/53843/campos_512_v4
+8/53866/campos_512_v4
+8/53869/campos_512_v4
+8/53911/campos_512_v4
+8/53921/campos_512_v4
+8/53925/campos_512_v4
+8/53928/campos_512_v4
+8/53949/campos_512_v4
+8/53953/campos_512_v4
+8/53959/campos_512_v4
+8/54007/campos_512_v4
+8/54048/campos_512_v4
+8/54066/campos_512_v4
+8/54085/campos_512_v4
+8/54087/campos_512_v4
+8/54122/campos_512_v4
+8/54131/campos_512_v4
+8/54132/campos_512_v4
+8/54142/campos_512_v4
+8/54155/campos_512_v4
+8/54157/campos_512_v4
+8/54187/campos_512_v4
+8/54255/campos_512_v4
+8/54263/campos_512_v4
+8/54276/campos_512_v4
+8/54288/campos_512_v4
+8/54305/campos_512_v4
+8/54306/campos_512_v4
+8/54335/campos_512_v4
+8/54382/campos_512_v4
+8/54384/campos_512_v4
+8/54451/campos_512_v4
+8/54506/campos_512_v4
+8/54545/campos_512_v4
+8/54556/campos_512_v4
+8/54611/campos_512_v4
+8/54635/campos_512_v4
+8/54637/campos_512_v4
+8/54639/campos_512_v4
+8/54641/campos_512_v4
+8/54644/campos_512_v4
+8/54701/campos_512_v4
+8/54749/campos_512_v4
+8/54760/campos_512_v4
+8/54768/campos_512_v4
+8/54802/campos_512_v4
+8/54839/campos_512_v4
+8/54841/campos_512_v4
+8/54862/campos_512_v4
+8/54885/campos_512_v4
+8/54915/campos_512_v4
+8/54918/campos_512_v4
+8/54929/campos_512_v4
+8/54933/campos_512_v4
+8/54936/campos_512_v4
+8/54944/campos_512_v4
+8/54946/campos_512_v4
+8/54958/campos_512_v4
+80/410018/campos_512_v4
+80/410025/campos_512_v4
+80/410032/campos_512_v4
+80/410054/campos_512_v4
+80/410064/campos_512_v4
+80/410104/campos_512_v4
+80/410155/campos_512_v4
+80/410208/campos_512_v4
+80/410226/campos_512_v4
+80/410233/campos_512_v4
+80/410237/campos_512_v4
+80/410283/campos_512_v4
+80/410295/campos_512_v4
+80/410319/campos_512_v4
+80/410322/campos_512_v4
+80/410395/campos_512_v4
+80/410433/campos_512_v4
+80/410442/campos_512_v4
+80/410469/campos_512_v4
+80/410477/campos_512_v4
+80/410480/campos_512_v4
+80/410490/campos_512_v4
+80/410511/campos_512_v4
+80/410567/campos_512_v4
+80/410585/campos_512_v4
+80/410591/campos_512_v4
+80/410609/campos_512_v4
+80/410651/campos_512_v4
+80/410701/campos_512_v4
+80/410711/campos_512_v4
+80/410781/campos_512_v4
+80/410785/campos_512_v4
+80/410807/campos_512_v4
+80/410845/campos_512_v4
+80/410879/campos_512_v4
+80/410888/campos_512_v4
+80/410940/campos_512_v4
+80/410964/campos_512_v4
+80/410996/campos_512_v4
+80/411055/campos_512_v4
+80/411094/campos_512_v4
+80/411147/campos_512_v4
+80/411149/campos_512_v4
+80/411153/campos_512_v4
+80/411167/campos_512_v4
+80/411186/campos_512_v4
+80/411197/campos_512_v4
+80/411207/campos_512_v4
+80/411232/campos_512_v4
+80/411286/campos_512_v4
+80/411369/campos_512_v4
+80/411384/campos_512_v4
+80/411394/campos_512_v4
+80/411454/campos_512_v4
+80/411459/campos_512_v4
+80/411498/campos_512_v4
+80/411507/campos_512_v4
+80/411580/campos_512_v4
+80/411587/campos_512_v4
+80/411645/campos_512_v4
+80/411690/campos_512_v4
+80/411760/campos_512_v4
+80/411804/campos_512_v4
+80/411820/campos_512_v4
+80/411839/campos_512_v4
+80/411877/campos_512_v4
+80/411880/campos_512_v4
+80/411886/campos_512_v4
+80/411936/campos_512_v4
+80/411941/campos_512_v4
+80/411956/campos_512_v4
+80/411965/campos_512_v4
+80/411978/campos_512_v4
+80/411983/campos_512_v4
+80/411990/campos_512_v4
+80/411999/campos_512_v4
+80/412022/campos_512_v4
+80/412028/campos_512_v4
+80/412061/campos_512_v4
+80/412063/campos_512_v4
+80/412073/campos_512_v4
+80/412087/campos_512_v4
+80/412128/campos_512_v4
+80/412157/campos_512_v4
+80/412158/campos_512_v4
+80/412184/campos_512_v4
+80/412257/campos_512_v4
+80/412262/campos_512_v4
+80/412338/campos_512_v4
+80/412362/campos_512_v4
+80/412386/campos_512_v4
+80/412387/campos_512_v4
+80/412398/campos_512_v4
+80/412421/campos_512_v4
+80/412425/campos_512_v4
+80/412434/campos_512_v4
+80/412437/campos_512_v4
+80/412496/campos_512_v4
+80/412518/campos_512_v4
+80/412527/campos_512_v4
+80/412535/campos_512_v4
+80/412563/campos_512_v4
+80/412575/campos_512_v4
+80/412596/campos_512_v4
+80/412614/campos_512_v4
+80/412857/campos_512_v4
+80/412875/campos_512_v4
+80/412910/campos_512_v4
+80/412922/campos_512_v4
+80/413010/campos_512_v4
+80/413029/campos_512_v4
+80/413031/campos_512_v4
+80/413056/campos_512_v4
+80/413083/campos_512_v4
+80/413154/campos_512_v4
+80/413201/campos_512_v4
+80/413209/campos_512_v4
+80/413236/campos_512_v4
+80/413300/campos_512_v4
+80/413317/campos_512_v4
+80/413323/campos_512_v4
+80/413388/campos_512_v4
+80/413428/campos_512_v4
+80/413454/campos_512_v4
+80/413539/campos_512_v4
+80/413549/campos_512_v4
+80/413555/campos_512_v4
+80/413572/campos_512_v4
+80/413595/campos_512_v4
+80/413628/campos_512_v4
+80/413634/campos_512_v4
+80/413636/campos_512_v4
+80/413672/campos_512_v4
+80/413676/campos_512_v4
+80/413702/campos_512_v4
+80/413703/campos_512_v4
+80/413746/campos_512_v4
+80/413754/campos_512_v4
+80/413762/campos_512_v4
+80/413794/campos_512_v4
+80/413800/campos_512_v4
+80/413824/campos_512_v4
+80/413891/campos_512_v4
+80/413925/campos_512_v4
+80/413926/campos_512_v4
+80/413940/campos_512_v4
+80/414019/campos_512_v4
+80/414045/campos_512_v4
+80/414071/campos_512_v4
+80/414132/campos_512_v4
+80/414138/campos_512_v4
+80/414172/campos_512_v4
+80/414197/campos_512_v4
+80/414266/campos_512_v4
+80/414275/campos_512_v4
+80/414283/campos_512_v4
+80/414298/campos_512_v4
+80/414347/campos_512_v4
+80/414396/campos_512_v4
+80/414469/campos_512_v4
+80/414483/campos_512_v4
+80/414503/campos_512_v4
+80/414561/campos_512_v4
+80/414573/campos_512_v4
+80/414614/campos_512_v4
+80/414657/campos_512_v4
+80/414671/campos_512_v4
+80/414741/campos_512_v4
+80/414748/campos_512_v4
+80/414819/campos_512_v4
+80/414821/campos_512_v4
+80/414842/campos_512_v4
+80/414849/campos_512_v4
+80/414850/campos_512_v4
+80/414851/campos_512_v4
+80/414889/campos_512_v4
+80/414897/campos_512_v4
+80/414900/campos_512_v4
+80/414934/campos_512_v4
+80/414935/campos_512_v4
+80/414939/campos_512_v4
+80/414970/campos_512_v4
+80/414999/campos_512_v4
+80/415000/campos_512_v4
+81/415069/campos_512_v4
+81/415077/campos_512_v4
+81/415136/campos_512_v4
+81/415143/campos_512_v4
+81/415178/campos_512_v4
+81/415271/campos_512_v4
+81/415370/campos_512_v4
+81/415384/campos_512_v4
+81/415402/campos_512_v4
+81/415438/campos_512_v4
+81/415442/campos_512_v4
+81/415471/campos_512_v4
+81/415511/campos_512_v4
+81/415688/campos_512_v4
+81/415720/campos_512_v4
+81/415727/campos_512_v4
+81/415736/campos_512_v4
+81/415749/campos_512_v4
+81/415784/campos_512_v4
+81/415818/campos_512_v4
+81/415898/campos_512_v4
+81/415916/campos_512_v4
+81/415918/campos_512_v4
+81/415921/campos_512_v4
+81/415925/campos_512_v4
+81/415941/campos_512_v4
+81/416050/campos_512_v4
+81/416062/campos_512_v4
+81/416194/campos_512_v4
+81/416207/campos_512_v4
+81/416227/campos_512_v4
+81/416253/campos_512_v4
+81/416305/campos_512_v4
+81/416349/campos_512_v4
+81/416353/campos_512_v4
+81/416419/campos_512_v4
+81/416453/campos_512_v4
+81/416457/campos_512_v4
+81/416462/campos_512_v4
+81/416493/campos_512_v4
+81/416494/campos_512_v4
+81/416522/campos_512_v4
+81/416577/campos_512_v4
+81/416594/campos_512_v4
+81/416609/campos_512_v4
+81/416651/campos_512_v4
+81/416675/campos_512_v4
+81/416699/campos_512_v4
+81/416771/campos_512_v4
+81/416786/campos_512_v4
+81/416796/campos_512_v4
+81/416845/campos_512_v4
+81/416864/campos_512_v4
+81/416895/campos_512_v4
+81/416907/campos_512_v4
+81/416945/campos_512_v4
+81/417078/campos_512_v4
+81/417080/campos_512_v4
+81/417090/campos_512_v4
+81/417106/campos_512_v4
+81/417109/campos_512_v4
+81/417132/campos_512_v4
+81/417205/campos_512_v4
+81/417215/campos_512_v4
+81/417251/campos_512_v4
+81/417315/campos_512_v4
+81/417365/campos_512_v4
+81/417381/campos_512_v4
+81/417406/campos_512_v4
+81/417449/campos_512_v4
+81/417464/campos_512_v4
+81/417485/campos_512_v4
+81/417510/campos_512_v4
+81/417526/campos_512_v4
+81/417655/campos_512_v4
+81/417763/campos_512_v4
+81/417786/campos_512_v4
+81/417811/campos_512_v4
+81/417812/campos_512_v4
+81/417864/campos_512_v4
+81/417878/campos_512_v4
+81/417920/campos_512_v4
+81/417976/campos_512_v4
+81/417981/campos_512_v4
+81/418014/campos_512_v4
+81/418144/campos_512_v4
+81/418162/campos_512_v4
+81/418192/campos_512_v4
+81/418205/campos_512_v4
+81/418263/campos_512_v4
+81/418271/campos_512_v4
+81/418277/campos_512_v4
+81/418418/campos_512_v4
+81/418437/campos_512_v4
+81/418446/campos_512_v4
+81/418503/campos_512_v4
+81/418570/campos_512_v4
+81/418714/campos_512_v4
+81/419021/campos_512_v4
+81/419022/campos_512_v4
+81/419054/campos_512_v4
+81/419069/campos_512_v4
+81/419074/campos_512_v4
+81/419090/campos_512_v4
+81/419128/campos_512_v4
+81/419170/campos_512_v4
+81/419217/campos_512_v4
+81/419247/campos_512_v4
+81/419270/campos_512_v4
+81/419310/campos_512_v4
+81/419317/campos_512_v4
+81/419356/campos_512_v4
+81/419384/campos_512_v4
+81/419448/campos_512_v4
+81/419484/campos_512_v4
+81/419516/campos_512_v4
+81/419520/campos_512_v4
+81/419541/campos_512_v4
+81/419548/campos_512_v4
+81/419599/campos_512_v4
+81/419642/campos_512_v4
+81/419653/campos_512_v4
+81/419678/campos_512_v4
+81/419720/campos_512_v4
+81/419751/campos_512_v4
+81/419791/campos_512_v4
+81/419825/campos_512_v4
+81/419862/campos_512_v4
+81/419940/campos_512_v4
+81/419942/campos_512_v4
+81/420001/campos_512_v4
+82/420004/campos_512_v4
+82/420049/campos_512_v4
+82/420058/campos_512_v4
+82/420082/campos_512_v4
+82/420089/campos_512_v4
+82/420117/campos_512_v4
+82/420129/campos_512_v4
+82/420186/campos_512_v4
+82/420343/campos_512_v4
+82/420412/campos_512_v4
+82/420442/campos_512_v4
+82/420472/campos_512_v4
+82/420585/campos_512_v4
+82/420591/campos_512_v4
+82/420642/campos_512_v4
+82/420689/campos_512_v4
+82/420714/campos_512_v4
+82/420906/campos_512_v4
+82/420977/campos_512_v4
+82/420995/campos_512_v4
+82/421067/campos_512_v4
+82/421107/campos_512_v4
+82/421112/campos_512_v4
+82/421128/campos_512_v4
+82/421135/campos_512_v4
+82/421161/campos_512_v4
+82/421165/campos_512_v4
+82/421182/campos_512_v4
+82/421233/campos_512_v4
+82/421261/campos_512_v4
+82/421285/campos_512_v4
+82/421313/campos_512_v4
+82/421348/campos_512_v4
+82/421353/campos_512_v4
+82/421406/campos_512_v4
+82/421434/campos_512_v4
+82/421440/campos_512_v4
+82/421447/campos_512_v4
+82/421571/campos_512_v4
+82/421643/campos_512_v4
+82/421738/campos_512_v4
+82/421752/campos_512_v4
+82/421764/campos_512_v4
+82/421770/campos_512_v4
+82/421783/campos_512_v4
+82/421849/campos_512_v4
+82/421949/campos_512_v4
+82/421962/campos_512_v4
+82/421976/campos_512_v4
+82/422011/campos_512_v4
+82/422026/campos_512_v4
+82/422042/campos_512_v4
+82/422065/campos_512_v4
+82/422102/campos_512_v4
+82/422143/campos_512_v4
+82/422215/campos_512_v4
+82/422239/campos_512_v4
+82/422344/campos_512_v4
+82/422380/campos_512_v4
+82/422445/campos_512_v4
+82/422458/campos_512_v4
+82/422498/campos_512_v4
+82/422538/campos_512_v4
+82/422596/campos_512_v4
+82/422598/campos_512_v4
+82/422602/campos_512_v4
+82/422674/campos_512_v4
+82/422706/campos_512_v4
+82/422819/campos_512_v4
+82/422843/campos_512_v4
+82/422844/campos_512_v4
+82/422856/campos_512_v4
+82/422895/campos_512_v4
+82/422934/campos_512_v4
+82/422941/campos_512_v4
+82/423015/campos_512_v4
+82/423055/campos_512_v4
+82/423061/campos_512_v4
+82/423076/campos_512_v4
+82/423084/campos_512_v4
+82/423147/campos_512_v4
+82/423159/campos_512_v4
+82/423160/campos_512_v4
+82/423190/campos_512_v4
+82/423205/campos_512_v4
+82/423319/campos_512_v4
+82/423349/campos_512_v4
+82/423390/campos_512_v4
+82/423401/campos_512_v4
+82/423402/campos_512_v4
+82/423446/campos_512_v4
+82/423563/campos_512_v4
+82/423620/campos_512_v4
+82/423645/campos_512_v4
+82/423723/campos_512_v4
+82/423777/campos_512_v4
+82/423812/campos_512_v4
+82/423831/campos_512_v4
+82/423959/campos_512_v4
+82/423983/campos_512_v4
+82/424005/campos_512_v4
+82/424031/campos_512_v4
+82/424033/campos_512_v4
+82/424103/campos_512_v4
+82/424137/campos_512_v4
+82/424212/campos_512_v4
+82/424221/campos_512_v4
+82/424222/campos_512_v4
+82/424225/campos_512_v4
+82/424260/campos_512_v4
+82/424294/campos_512_v4
+82/424318/campos_512_v4
+82/424360/campos_512_v4
+82/424378/campos_512_v4
+82/424465/campos_512_v4
+82/424466/campos_512_v4
+82/424483/campos_512_v4
+82/424533/campos_512_v4
+82/424590/campos_512_v4
+82/424621/campos_512_v4
+82/424623/campos_512_v4
+82/424630/campos_512_v4
+82/424677/campos_512_v4
+82/424768/campos_512_v4
+82/424774/campos_512_v4
+82/424797/campos_512_v4
+82/424826/campos_512_v4
+82/424896/campos_512_v4
+82/424914/campos_512_v4
+82/424968/campos_512_v4
+82/424988/campos_512_v4
+82/424994/campos_512_v4
+83/425013/campos_512_v4
+83/425054/campos_512_v4
+83/425118/campos_512_v4
+83/425128/campos_512_v4
+83/425208/campos_512_v4
+83/425320/campos_512_v4
+83/425328/campos_512_v4
+83/425338/campos_512_v4
+83/425368/campos_512_v4
+83/425375/campos_512_v4
+83/425379/campos_512_v4
+83/425428/campos_512_v4
+83/425482/campos_512_v4
+83/425547/campos_512_v4
+83/425579/campos_512_v4
+83/425590/campos_512_v4
+83/425634/campos_512_v4
+83/425704/campos_512_v4
+83/425708/campos_512_v4
+83/425835/campos_512_v4
+83/425836/campos_512_v4
+83/425886/campos_512_v4
+83/425890/campos_512_v4
+83/425948/campos_512_v4
+83/425950/campos_512_v4
+83/426017/campos_512_v4
+83/426048/campos_512_v4
+83/426062/campos_512_v4
+83/426066/campos_512_v4
+83/426094/campos_512_v4
+83/426125/campos_512_v4
+83/426133/campos_512_v4
+83/426195/campos_512_v4
+83/426228/campos_512_v4
+83/426256/campos_512_v4
+83/426281/campos_512_v4
+83/426295/campos_512_v4
+83/426298/campos_512_v4
+83/426324/campos_512_v4
+83/426360/campos_512_v4
+83/426455/campos_512_v4
+83/426668/campos_512_v4
+83/426719/campos_512_v4
+83/426810/campos_512_v4
+83/426812/campos_512_v4
+83/426839/campos_512_v4
+83/426842/campos_512_v4
+83/426864/campos_512_v4
+83/426868/campos_512_v4
+83/426915/campos_512_v4
+83/426962/campos_512_v4
+83/426971/campos_512_v4
+83/427008/campos_512_v4
+83/427017/campos_512_v4
+83/427025/campos_512_v4
+83/427053/campos_512_v4
+83/427109/campos_512_v4
+83/427137/campos_512_v4
+83/427218/campos_512_v4
+83/427221/campos_512_v4
+83/427227/campos_512_v4
+83/427241/campos_512_v4
+83/427251/campos_512_v4
+83/427260/campos_512_v4
+83/427263/campos_512_v4
+83/427310/campos_512_v4
+83/427334/campos_512_v4
+83/427486/campos_512_v4
+83/427492/campos_512_v4
+83/427520/campos_512_v4
+83/427831/campos_512_v4
+83/427844/campos_512_v4
+83/427899/campos_512_v4
+83/427900/campos_512_v4
+83/427942/campos_512_v4
+83/428036/campos_512_v4
+83/428071/campos_512_v4
+83/428118/campos_512_v4
+83/428157/campos_512_v4
+83/428163/campos_512_v4
+83/428228/campos_512_v4
+83/428275/campos_512_v4
+83/428297/campos_512_v4
+83/428302/campos_512_v4
+83/428307/campos_512_v4
+83/428435/campos_512_v4
+83/428488/campos_512_v4
+83/428504/campos_512_v4
+83/428525/campos_512_v4
+83/428570/campos_512_v4
+83/428594/campos_512_v4
+83/428652/campos_512_v4
+83/428674/campos_512_v4
+83/428686/campos_512_v4
+83/428727/campos_512_v4
+83/428745/campos_512_v4
+83/428779/campos_512_v4
+83/428843/campos_512_v4
+83/428844/campos_512_v4
+83/428848/campos_512_v4
+83/428863/campos_512_v4
+83/428941/campos_512_v4
+83/429011/campos_512_v4
+83/429046/campos_512_v4
+83/429059/campos_512_v4
+83/429080/campos_512_v4
+83/429092/campos_512_v4
+83/429114/campos_512_v4
+83/429135/campos_512_v4
+83/429191/campos_512_v4
+83/429211/campos_512_v4
+83/429245/campos_512_v4
+83/429255/campos_512_v4
+83/429279/campos_512_v4
+83/429284/campos_512_v4
+83/429294/campos_512_v4
+83/429357/campos_512_v4
+83/429376/campos_512_v4
+83/429382/campos_512_v4
+83/429496/campos_512_v4
+83/429564/campos_512_v4
+83/429566/campos_512_v4
+83/429576/campos_512_v4
+83/429586/campos_512_v4
+83/429665/campos_512_v4
+83/429677/campos_512_v4
+83/429689/campos_512_v4
+83/429691/campos_512_v4
+83/429727/campos_512_v4
+84/430017/campos_512_v4
+84/430021/campos_512_v4
+84/430031/campos_512_v4
+84/430035/campos_512_v4
+84/430084/campos_512_v4
+84/430095/campos_512_v4
+84/430198/campos_512_v4
+84/430215/campos_512_v4
+84/430217/campos_512_v4
+84/430372/campos_512_v4
+84/430432/campos_512_v4
+84/430443/campos_512_v4
+84/430458/campos_512_v4
+84/430493/campos_512_v4
+84/430507/campos_512_v4
+84/430510/campos_512_v4
+84/430544/campos_512_v4
+84/430559/campos_512_v4
+84/430606/campos_512_v4
+84/430755/campos_512_v4
+84/430790/campos_512_v4
+84/430863/campos_512_v4
+84/430866/campos_512_v4
+84/430879/campos_512_v4
+84/430923/campos_512_v4
+84/430953/campos_512_v4
+84/430999/campos_512_v4
+84/431012/campos_512_v4
+84/431022/campos_512_v4
+84/431050/campos_512_v4
+84/431053/campos_512_v4
+84/431121/campos_512_v4
+84/431127/campos_512_v4
+84/431133/campos_512_v4
+84/431157/campos_512_v4
+84/431164/campos_512_v4
+84/431187/campos_512_v4
+84/431266/campos_512_v4
+84/431268/campos_512_v4
+84/431292/campos_512_v4
+84/431355/campos_512_v4
+84/431423/campos_512_v4
+84/431431/campos_512_v4
+84/431538/campos_512_v4
+84/431551/campos_512_v4
+84/431624/campos_512_v4
+84/431724/campos_512_v4
+84/431731/campos_512_v4
+84/431735/campos_512_v4
+84/431793/campos_512_v4
+84/431803/campos_512_v4
+84/431806/campos_512_v4
+84/431840/campos_512_v4
+84/431903/campos_512_v4
+84/431933/campos_512_v4
+84/432059/campos_512_v4
+84/432117/campos_512_v4
+84/432123/campos_512_v4
+84/432188/campos_512_v4
+84/432200/campos_512_v4
+84/432203/campos_512_v4
+84/432241/campos_512_v4
+84/432311/campos_512_v4
+84/432339/campos_512_v4
+84/432376/campos_512_v4
+84/432588/campos_512_v4
+84/432594/campos_512_v4
+84/432651/campos_512_v4
+84/432674/campos_512_v4
+84/432730/campos_512_v4
+84/432852/campos_512_v4
+84/432893/campos_512_v4
+84/433004/campos_512_v4
+84/433007/campos_512_v4
+84/433008/campos_512_v4
+84/433089/campos_512_v4
+84/433142/campos_512_v4
+84/433179/campos_512_v4
+84/433190/campos_512_v4
+84/433223/campos_512_v4
+84/433407/campos_512_v4
+84/433454/campos_512_v4
+84/433517/campos_512_v4
+84/433548/campos_512_v4
+84/433565/campos_512_v4
+84/433628/campos_512_v4
+84/433663/campos_512_v4
+84/433705/campos_512_v4
+84/433743/campos_512_v4
+84/433749/campos_512_v4
+84/433799/campos_512_v4
+84/433857/campos_512_v4
+84/433862/campos_512_v4
+84/433871/campos_512_v4
+84/433908/campos_512_v4
+84/433963/campos_512_v4
+84/433968/campos_512_v4
+84/434007/campos_512_v4
+84/434023/campos_512_v4
+84/434049/campos_512_v4
+84/434062/campos_512_v4
+84/434063/campos_512_v4
+84/434070/campos_512_v4
+84/434104/campos_512_v4
+84/434130/campos_512_v4
+84/434151/campos_512_v4
+84/434181/campos_512_v4
+84/434213/campos_512_v4
+84/434269/campos_512_v4
+84/434292/campos_512_v4
+84/434314/campos_512_v4
+84/434348/campos_512_v4
+84/434357/campos_512_v4
+84/434411/campos_512_v4
+84/434454/campos_512_v4
+84/434527/campos_512_v4
+84/434558/campos_512_v4
+84/434561/campos_512_v4
+84/434583/campos_512_v4
+84/434587/campos_512_v4
+84/434593/campos_512_v4
+84/434663/campos_512_v4
+84/434672/campos_512_v4
+84/434746/campos_512_v4
+84/434758/campos_512_v4
+84/434813/campos_512_v4
+84/434853/campos_512_v4
+84/434908/campos_512_v4
+84/434987/campos_512_v4
+84/434997/campos_512_v4
+85/435018/campos_512_v4
+85/435057/campos_512_v4
+85/435085/campos_512_v4
+85/435158/campos_512_v4
+85/435166/campos_512_v4
+85/435195/campos_512_v4
+85/435197/campos_512_v4
+85/435210/campos_512_v4
+85/435286/campos_512_v4
+85/435292/campos_512_v4
+85/435387/campos_512_v4
+85/435483/campos_512_v4
+85/435517/campos_512_v4
+85/435589/campos_512_v4
+85/435616/campos_512_v4
+85/435621/campos_512_v4
+85/435640/campos_512_v4
+85/435650/campos_512_v4
+85/435683/campos_512_v4
+85/435762/campos_512_v4
+85/435766/campos_512_v4
+85/435769/campos_512_v4
+85/435812/campos_512_v4
+85/435915/campos_512_v4
+85/435942/campos_512_v4
+85/436008/campos_512_v4
+85/436120/campos_512_v4
+85/436173/campos_512_v4
+85/436177/campos_512_v4
+85/436302/campos_512_v4
+85/436303/campos_512_v4
+85/436364/campos_512_v4
+85/436377/campos_512_v4
+85/436384/campos_512_v4
+85/436451/campos_512_v4
+85/436471/campos_512_v4
+85/436482/campos_512_v4
+85/436506/campos_512_v4
+85/436540/campos_512_v4
+85/436667/campos_512_v4
+85/436670/campos_512_v4
+85/436778/campos_512_v4
+85/436838/campos_512_v4
+85/436860/campos_512_v4
+85/436932/campos_512_v4
+85/436936/campos_512_v4
+85/437057/campos_512_v4
+85/437093/campos_512_v4
+85/437238/campos_512_v4
+85/437250/campos_512_v4
+85/437271/campos_512_v4
+85/437312/campos_512_v4
+85/437466/campos_512_v4
+85/437474/campos_512_v4
+85/437481/campos_512_v4
+85/437482/campos_512_v4
+85/437494/campos_512_v4
+85/437499/campos_512_v4
+85/437520/campos_512_v4
+85/437562/campos_512_v4
+85/437564/campos_512_v4
+85/437616/campos_512_v4
+85/437648/campos_512_v4
+85/437670/campos_512_v4
+85/437676/campos_512_v4
+85/437698/campos_512_v4
+85/437723/campos_512_v4
+85/437735/campos_512_v4
+85/437793/campos_512_v4
+85/437821/campos_512_v4
+85/437934/campos_512_v4
+85/438016/campos_512_v4
+85/438041/campos_512_v4
+85/438082/campos_512_v4
+85/438088/campos_512_v4
+85/438161/campos_512_v4
+85/438166/campos_512_v4
+85/438185/campos_512_v4
+85/438253/campos_512_v4
+85/438290/campos_512_v4
+85/438300/campos_512_v4
+85/438332/campos_512_v4
+85/438368/campos_512_v4
+85/438397/campos_512_v4
+85/438427/campos_512_v4
+85/438432/campos_512_v4
+85/438444/campos_512_v4
+85/438455/campos_512_v4
+85/438461/campos_512_v4
+85/438489/campos_512_v4
+85/438534/campos_512_v4
+85/438547/campos_512_v4
+85/438580/campos_512_v4
+85/438604/campos_512_v4
+85/438627/campos_512_v4
+85/438851/campos_512_v4
+85/438868/campos_512_v4
+85/438925/campos_512_v4
+85/438946/campos_512_v4
+85/438984/campos_512_v4
+85/438988/campos_512_v4
+85/439011/campos_512_v4
+85/439182/campos_512_v4
+85/439191/campos_512_v4
+85/439310/campos_512_v4
+85/439318/campos_512_v4
+85/439357/campos_512_v4
+85/439419/campos_512_v4
+85/439442/campos_512_v4
+85/439448/campos_512_v4
+85/439463/campos_512_v4
+85/439515/campos_512_v4
+85/439526/campos_512_v4
+85/439563/campos_512_v4
+85/439565/campos_512_v4
+85/439567/campos_512_v4
+85/439588/campos_512_v4
+85/439615/campos_512_v4
+85/439624/campos_512_v4
+85/439626/campos_512_v4
+85/439640/campos_512_v4
+85/439671/campos_512_v4
+85/439864/campos_512_v4
+85/439872/campos_512_v4
+85/439905/campos_512_v4
+85/439914/campos_512_v4
+85/439936/campos_512_v4
+85/439974/campos_512_v4
+86/440043/campos_512_v4
+86/440063/campos_512_v4
+86/440084/campos_512_v4
+86/440107/campos_512_v4
+86/440145/campos_512_v4
+86/440193/campos_512_v4
+86/440203/campos_512_v4
+86/440235/campos_512_v4
+86/440247/campos_512_v4
+86/440268/campos_512_v4
+86/440299/campos_512_v4
+86/440306/campos_512_v4
+86/440348/campos_512_v4
+86/440375/campos_512_v4
+86/440385/campos_512_v4
+86/440391/campos_512_v4
+86/440453/campos_512_v4
+86/440457/campos_512_v4
+86/440525/campos_512_v4
+86/440537/campos_512_v4
+86/440542/campos_512_v4
+86/440566/campos_512_v4
+86/440577/campos_512_v4
+86/440587/campos_512_v4
+86/440620/campos_512_v4
+86/440640/campos_512_v4
+86/440690/campos_512_v4
+86/440729/campos_512_v4
+86/440742/campos_512_v4
+86/440832/campos_512_v4
+86/440875/campos_512_v4
+86/440885/campos_512_v4
+86/440937/campos_512_v4
+86/440958/campos_512_v4
+86/440961/campos_512_v4
+86/440976/campos_512_v4
+86/440999/campos_512_v4
+86/441007/campos_512_v4
+86/441034/campos_512_v4
+86/441059/campos_512_v4
+86/441063/campos_512_v4
+86/441124/campos_512_v4
+86/441203/campos_512_v4
+86/441250/campos_512_v4
+86/441287/campos_512_v4
+86/441345/campos_512_v4
+86/441379/campos_512_v4
+86/441390/campos_512_v4
+86/441413/campos_512_v4
+86/441481/campos_512_v4
+86/441494/campos_512_v4
+86/441507/campos_512_v4
+86/441548/campos_512_v4
+86/441569/campos_512_v4
+86/441668/campos_512_v4
+86/441699/campos_512_v4
+86/441708/campos_512_v4
+86/441711/campos_512_v4
+86/441762/campos_512_v4
+86/441829/campos_512_v4
+86/441917/campos_512_v4
+86/441919/campos_512_v4
+86/441920/campos_512_v4
+86/441940/campos_512_v4
+86/441959/campos_512_v4
+86/442001/campos_512_v4
+86/442049/campos_512_v4
+86/442087/campos_512_v4
+86/442113/campos_512_v4
+86/442214/campos_512_v4
+86/442217/campos_512_v4
+86/442239/campos_512_v4
+86/442248/campos_512_v4
+86/442250/campos_512_v4
+86/442272/campos_512_v4
+86/442331/campos_512_v4
+86/442340/campos_512_v4
+86/442351/campos_512_v4
+86/442353/campos_512_v4
+86/442411/campos_512_v4
+86/442529/campos_512_v4
+86/442550/campos_512_v4
+86/442606/campos_512_v4
+86/442658/campos_512_v4
+86/442699/campos_512_v4
+86/442701/campos_512_v4
+86/442705/campos_512_v4
+86/442709/campos_512_v4
+86/442716/campos_512_v4
+86/442720/campos_512_v4
+86/442741/campos_512_v4
+86/442775/campos_512_v4
+86/442808/campos_512_v4
+86/442839/campos_512_v4
+86/442968/campos_512_v4
+86/443033/campos_512_v4
+86/443049/campos_512_v4
+86/443054/campos_512_v4
+86/443059/campos_512_v4
+86/443157/campos_512_v4
+86/443166/campos_512_v4
+86/443209/campos_512_v4
+86/443215/campos_512_v4
+86/443236/campos_512_v4
+86/443270/campos_512_v4
+86/443310/campos_512_v4
+86/443323/campos_512_v4
+86/443478/campos_512_v4
+86/443563/campos_512_v4
+86/443567/campos_512_v4
+86/443571/campos_512_v4
+86/443572/campos_512_v4
+86/443650/campos_512_v4
+86/443676/campos_512_v4
+86/443700/campos_512_v4
+86/443715/campos_512_v4
+86/443741/campos_512_v4
+86/443793/campos_512_v4
+86/443888/campos_512_v4
+86/443903/campos_512_v4
+86/443910/campos_512_v4
+86/443986/campos_512_v4
+86/443993/campos_512_v4
+86/444006/campos_512_v4
+86/444016/campos_512_v4
+86/444045/campos_512_v4
+86/444048/campos_512_v4
+86/444053/campos_512_v4
+86/444075/campos_512_v4
+86/444115/campos_512_v4
+86/444282/campos_512_v4
+86/444313/campos_512_v4
+86/444330/campos_512_v4
+86/444341/campos_512_v4
+86/444387/campos_512_v4
+86/444395/campos_512_v4
+86/444402/campos_512_v4
+86/444532/campos_512_v4
+86/444556/campos_512_v4
+86/444573/campos_512_v4
+86/444580/campos_512_v4
+86/444598/campos_512_v4
+86/444683/campos_512_v4
+86/444704/campos_512_v4
+86/444763/campos_512_v4
+86/444772/campos_512_v4
+86/444858/campos_512_v4
+86/444866/campos_512_v4
+87/445016/campos_512_v4
+87/445057/campos_512_v4
+87/445066/campos_512_v4
+87/445088/campos_512_v4
+87/445142/campos_512_v4
+87/445200/campos_512_v4
+87/445299/campos_512_v4
+87/445323/campos_512_v4
+87/445328/campos_512_v4
+87/445388/campos_512_v4
+87/445412/campos_512_v4
+87/445417/campos_512_v4
+87/445425/campos_512_v4
+87/445470/campos_512_v4
+87/445486/campos_512_v4
+87/445492/campos_512_v4
+87/445523/campos_512_v4
+87/445628/campos_512_v4
+87/445650/campos_512_v4
+87/445657/campos_512_v4
+87/445722/campos_512_v4
+87/445764/campos_512_v4
+87/445778/campos_512_v4
+87/445871/campos_512_v4
+87/445892/campos_512_v4
+87/445901/campos_512_v4
+87/445927/campos_512_v4
+87/445980/campos_512_v4
+87/446002/campos_512_v4
+87/446193/campos_512_v4
+87/446224/campos_512_v4
+87/446232/campos_512_v4
+87/446280/campos_512_v4
+87/446306/campos_512_v4
+87/446314/campos_512_v4
+87/446421/campos_512_v4
+87/446430/campos_512_v4
+87/446443/campos_512_v4
+87/446464/campos_512_v4
+87/446469/campos_512_v4
+87/446497/campos_512_v4
+87/446516/campos_512_v4
+87/446535/campos_512_v4
+87/446549/campos_512_v4
+87/446554/campos_512_v4
+87/446565/campos_512_v4
+87/446595/campos_512_v4
+87/446597/campos_512_v4
+87/446708/campos_512_v4
+87/446734/campos_512_v4
+87/446742/campos_512_v4
+87/446744/campos_512_v4
+87/446779/campos_512_v4
+87/446866/campos_512_v4
+87/446898/campos_512_v4
+87/446938/campos_512_v4
+87/446952/campos_512_v4
+87/446981/campos_512_v4
+87/447004/campos_512_v4
+87/447052/campos_512_v4
+87/447119/campos_512_v4
+87/447185/campos_512_v4
+87/447290/campos_512_v4
+87/447337/campos_512_v4
+87/447350/campos_512_v4
+87/447427/campos_512_v4
+87/447520/campos_512_v4
+87/447662/campos_512_v4
+87/447687/campos_512_v4
+87/447695/campos_512_v4
+87/447706/campos_512_v4
+87/447708/campos_512_v4
+87/447739/campos_512_v4
+87/447769/campos_512_v4
+87/447903/campos_512_v4
+87/447919/campos_512_v4
+87/448008/campos_512_v4
+87/448029/campos_512_v4
+87/448046/campos_512_v4
+87/448062/campos_512_v4
+87/448090/campos_512_v4
+87/448134/campos_512_v4
+87/448141/campos_512_v4
+87/448150/campos_512_v4
+87/448203/campos_512_v4
+87/448243/campos_512_v4
+87/448247/campos_512_v4
+87/448278/campos_512_v4
+87/448311/campos_512_v4
+87/448382/campos_512_v4
+87/448400/campos_512_v4
+87/448411/campos_512_v4
+87/448475/campos_512_v4
+87/448500/campos_512_v4
+87/448521/campos_512_v4
+87/448547/campos_512_v4
+87/448557/campos_512_v4
+87/448626/campos_512_v4
+87/448631/campos_512_v4
+87/448704/campos_512_v4
+87/448739/campos_512_v4
+87/448740/campos_512_v4
+87/448746/campos_512_v4
+87/448858/campos_512_v4
+87/448884/campos_512_v4
+87/448888/campos_512_v4
+87/448932/campos_512_v4
+87/448962/campos_512_v4
+87/449013/campos_512_v4
+87/449064/campos_512_v4
+87/449146/campos_512_v4
+87/449303/campos_512_v4
+87/449312/campos_512_v4
+87/449406/campos_512_v4
+87/449419/campos_512_v4
+87/449505/campos_512_v4
+87/449541/campos_512_v4
+87/449549/campos_512_v4
+87/449589/campos_512_v4
+87/449616/campos_512_v4
+87/449641/campos_512_v4
+87/449693/campos_512_v4
+87/449716/campos_512_v4
+87/449728/campos_512_v4
+87/449766/campos_512_v4
+88/450048/campos_512_v4
+88/450059/campos_512_v4
+88/450070/campos_512_v4
+88/450133/campos_512_v4
+88/450154/campos_512_v4
+88/450367/campos_512_v4
+88/450431/campos_512_v4
+88/450452/campos_512_v4
+88/450482/campos_512_v4
+88/450487/campos_512_v4
+88/450492/campos_512_v4
+88/450541/campos_512_v4
+88/450572/campos_512_v4
+88/450582/campos_512_v4
+88/450661/campos_512_v4
+88/450678/campos_512_v4
+88/450693/campos_512_v4
+88/450694/campos_512_v4
+88/450753/campos_512_v4
+88/450769/campos_512_v4
+88/450789/campos_512_v4
+88/450803/campos_512_v4
+88/450887/campos_512_v4
+88/450896/campos_512_v4
+88/450907/campos_512_v4
+88/450917/campos_512_v4
+88/450928/campos_512_v4
+88/450938/campos_512_v4
+88/450956/campos_512_v4
+88/450967/campos_512_v4
+88/451047/campos_512_v4
+88/451058/campos_512_v4
+88/451072/campos_512_v4
+88/451128/campos_512_v4
+88/451235/campos_512_v4
+88/451254/campos_512_v4
+88/451338/campos_512_v4
+88/451348/campos_512_v4
+88/451385/campos_512_v4
+88/451523/campos_512_v4
+88/451527/campos_512_v4
+88/451571/campos_512_v4
+88/451602/campos_512_v4
+88/451711/campos_512_v4
+88/451728/campos_512_v4
+88/451734/campos_512_v4
+88/451816/campos_512_v4
+88/451844/campos_512_v4
+88/451848/campos_512_v4
+88/451937/campos_512_v4
+88/452029/campos_512_v4
+88/452059/campos_512_v4
+88/452115/campos_512_v4
+88/452135/campos_512_v4
+88/452168/campos_512_v4
+88/452237/campos_512_v4
+88/452241/campos_512_v4
+88/452290/campos_512_v4
+88/452490/campos_512_v4
+88/452537/campos_512_v4
+88/452557/campos_512_v4
+88/452570/campos_512_v4
+88/452592/campos_512_v4
+88/452651/campos_512_v4
+88/452688/campos_512_v4
+88/452706/campos_512_v4
+88/452727/campos_512_v4
+88/452736/campos_512_v4
+88/452737/campos_512_v4
+88/452757/campos_512_v4
+88/452784/campos_512_v4
+88/452829/campos_512_v4
+88/452971/campos_512_v4
+88/452995/campos_512_v4
+88/453019/campos_512_v4
+88/453063/campos_512_v4
+88/453083/campos_512_v4
+88/453084/campos_512_v4
+88/453096/campos_512_v4
+88/453142/campos_512_v4
+88/453165/campos_512_v4
+88/453178/campos_512_v4
+88/453262/campos_512_v4
+88/453272/campos_512_v4
+88/453352/campos_512_v4
+88/453402/campos_512_v4
+88/453410/campos_512_v4
+88/453482/campos_512_v4
+88/453515/campos_512_v4
+88/453518/campos_512_v4
+88/453530/campos_512_v4
+88/453556/campos_512_v4
+88/453584/campos_512_v4
+88/453599/campos_512_v4
+88/453601/campos_512_v4
+88/453614/campos_512_v4
+88/453621/campos_512_v4
+88/453666/campos_512_v4
+88/453677/campos_512_v4
+88/453729/campos_512_v4
+88/453750/campos_512_v4
+88/453765/campos_512_v4
+88/453777/campos_512_v4
+88/453780/campos_512_v4
+88/453806/campos_512_v4
+88/453825/campos_512_v4
+88/453869/campos_512_v4
+88/453899/campos_512_v4
+88/453983/campos_512_v4
+88/454057/campos_512_v4
+88/454065/campos_512_v4
+88/454094/campos_512_v4
+88/454097/campos_512_v4
+88/454098/campos_512_v4
+88/454128/campos_512_v4
+88/454169/campos_512_v4
+88/454171/campos_512_v4
+88/454237/campos_512_v4
+88/454271/campos_512_v4
+88/454288/campos_512_v4
+88/454299/campos_512_v4
+88/454303/campos_512_v4
+88/454311/campos_512_v4
+88/454315/campos_512_v4
+88/454332/campos_512_v4
+88/454396/campos_512_v4
+88/454446/campos_512_v4
+88/454485/campos_512_v4
+88/454530/campos_512_v4
+88/454559/campos_512_v4
+88/454615/campos_512_v4
+88/454651/campos_512_v4
+88/454652/campos_512_v4
+88/454687/campos_512_v4
+88/454696/campos_512_v4
+88/454841/campos_512_v4
+88/454862/campos_512_v4
+88/454896/campos_512_v4
+88/454963/campos_512_v4
+88/454973/campos_512_v4
+88/454977/campos_512_v4
+88/454979/campos_512_v4
+88/454980/campos_512_v4
+89/455028/campos_512_v4
+89/455034/campos_512_v4
+89/455062/campos_512_v4
+89/455143/campos_512_v4
+89/455188/campos_512_v4
+89/455230/campos_512_v4
+89/455240/campos_512_v4
+89/455257/campos_512_v4
+89/455290/campos_512_v4
+89/455294/campos_512_v4
+89/455407/campos_512_v4
+89/455459/campos_512_v4
+89/455488/campos_512_v4
+89/455501/campos_512_v4
+89/455520/campos_512_v4
+89/455525/campos_512_v4
+89/455533/campos_512_v4
+89/455544/campos_512_v4
+89/455552/campos_512_v4
+89/455573/campos_512_v4
+89/455577/campos_512_v4
+89/455587/campos_512_v4
+89/455628/campos_512_v4
+89/455656/campos_512_v4
+89/455667/campos_512_v4
+89/455702/campos_512_v4
+89/455714/campos_512_v4
+89/455715/campos_512_v4
+89/455781/campos_512_v4
+89/455792/campos_512_v4
+89/455793/campos_512_v4
+89/455802/campos_512_v4
+89/455819/campos_512_v4
+89/455838/campos_512_v4
+89/455849/campos_512_v4
+89/455851/campos_512_v4
+89/455853/campos_512_v4
+89/455875/campos_512_v4
+89/455879/campos_512_v4
+89/455983/campos_512_v4
+89/456014/campos_512_v4
+89/456064/campos_512_v4
+89/456084/campos_512_v4
+89/456143/campos_512_v4
+89/456279/campos_512_v4
+89/456288/campos_512_v4
+89/456347/campos_512_v4
+89/456369/campos_512_v4
+89/456391/campos_512_v4
+89/456406/campos_512_v4
+89/456425/campos_512_v4
+89/456467/campos_512_v4
+89/456494/campos_512_v4
+89/456500/campos_512_v4
+89/456515/campos_512_v4
+89/456528/campos_512_v4
+89/456599/campos_512_v4
+89/456606/campos_512_v4
+89/456614/campos_512_v4
+89/456642/campos_512_v4
+89/456644/campos_512_v4
+89/456652/campos_512_v4
+89/456671/campos_512_v4
+89/456674/campos_512_v4
+89/456682/campos_512_v4
+89/456706/campos_512_v4
+89/456815/campos_512_v4
+89/456856/campos_512_v4
+89/456905/campos_512_v4
+89/456997/campos_512_v4
+89/457022/campos_512_v4
+89/457032/campos_512_v4
+89/457043/campos_512_v4
+89/457049/campos_512_v4
+89/457073/campos_512_v4
+89/457147/campos_512_v4
+89/457190/campos_512_v4
+89/457286/campos_512_v4
+89/457313/campos_512_v4
+89/457320/campos_512_v4
+89/457348/campos_512_v4
+89/457382/campos_512_v4
+89/457449/campos_512_v4
+89/457557/campos_512_v4
+89/457628/campos_512_v4
+89/457649/campos_512_v4
+89/457668/campos_512_v4
+89/457738/campos_512_v4
+89/457816/campos_512_v4
+89/457835/campos_512_v4
+89/457861/campos_512_v4
+89/457890/campos_512_v4
+89/458004/campos_512_v4
+89/458012/campos_512_v4
+89/458060/campos_512_v4
+89/458076/campos_512_v4
+89/458090/campos_512_v4
+89/458110/campos_512_v4
+89/458140/campos_512_v4
+89/458158/campos_512_v4
+89/458181/campos_512_v4
+89/458183/campos_512_v4
+89/458221/campos_512_v4
+89/458303/campos_512_v4
+89/458445/campos_512_v4
+89/458451/campos_512_v4
+89/458482/campos_512_v4
+89/458566/campos_512_v4
+89/458588/campos_512_v4
+89/458607/campos_512_v4
+89/458608/campos_512_v4
+89/458616/campos_512_v4
+89/458637/campos_512_v4
+89/458642/campos_512_v4
+89/458769/campos_512_v4
+89/458822/campos_512_v4
+89/458884/campos_512_v4
+89/458938/campos_512_v4
+89/458992/campos_512_v4
+89/458994/campos_512_v4
+89/458998/campos_512_v4
+89/459045/campos_512_v4
+89/459060/campos_512_v4
+89/459085/campos_512_v4
+89/459094/campos_512_v4
+89/459148/campos_512_v4
+89/459198/campos_512_v4
+89/459212/campos_512_v4
+89/459242/campos_512_v4
+89/459369/campos_512_v4
+89/459423/campos_512_v4
+89/459427/campos_512_v4
+89/459496/campos_512_v4
+89/459541/campos_512_v4
+89/459570/campos_512_v4
+89/459592/campos_512_v4
+89/459598/campos_512_v4
+89/459614/campos_512_v4
+89/459690/campos_512_v4
+89/459700/campos_512_v4
+89/459714/campos_512_v4
+89/459734/campos_512_v4
+89/459784/campos_512_v4
+89/459792/campos_512_v4
+89/459823/campos_512_v4
+89/459829/campos_512_v4
+89/459848/campos_512_v4
+89/459852/campos_512_v4
+89/459854/campos_512_v4
+89/459875/campos_512_v4
+89/459977/campos_512_v4
+89/459982/campos_512_v4
+9/55052/campos_512_v4
+9/55053/campos_512_v4
+9/55089/campos_512_v4
+9/55092/campos_512_v4
+9/55119/campos_512_v4
+9/55158/campos_512_v4
+9/55166/campos_512_v4
+9/55167/campos_512_v4
+9/55173/campos_512_v4
+9/55189/campos_512_v4
+9/55205/campos_512_v4
+9/55229/campos_512_v4
+9/55239/campos_512_v4
+9/55241/campos_512_v4
+9/55258/campos_512_v4
+9/55305/campos_512_v4
+9/55313/campos_512_v4
+9/55316/campos_512_v4
+9/55318/campos_512_v4
+9/55355/campos_512_v4
+9/55401/campos_512_v4
+9/55409/campos_512_v4
+9/55418/campos_512_v4
+9/55421/campos_512_v4
+9/55424/campos_512_v4
+9/55425/campos_512_v4
+9/55509/campos_512_v4
+9/55519/campos_512_v4
+9/55562/campos_512_v4
+9/55566/campos_512_v4
+9/55598/campos_512_v4
+9/55656/campos_512_v4
+9/55670/campos_512_v4
+9/55672/campos_512_v4
+9/55717/campos_512_v4
+9/55718/campos_512_v4
+9/55720/campos_512_v4
+9/55722/campos_512_v4
+9/55723/campos_512_v4
+9/55864/campos_512_v4
+9/55886/campos_512_v4
+9/55900/campos_512_v4
+9/55937/campos_512_v4
+9/55961/campos_512_v4
+9/55974/campos_512_v4
+9/55980/campos_512_v4
+9/55985/campos_512_v4
+9/55996/campos_512_v4
+9/56002/campos_512_v4
+9/56021/campos_512_v4
+9/56022/campos_512_v4
+9/56029/campos_512_v4
+9/56061/campos_512_v4
+9/56078/campos_512_v4
+9/56081/campos_512_v4
+9/56094/campos_512_v4
+9/56125/campos_512_v4
+9/56151/campos_512_v4
+9/56186/campos_512_v4
+9/56188/campos_512_v4
+9/56189/campos_512_v4
+9/56190/campos_512_v4
+9/56204/campos_512_v4
+9/56214/campos_512_v4
+9/56216/campos_512_v4
+9/56223/campos_512_v4
+9/56258/campos_512_v4
+9/56273/campos_512_v4
+9/56285/campos_512_v4
+9/56308/campos_512_v4
+9/56316/campos_512_v4
+9/56322/campos_512_v4
+9/56325/campos_512_v4
+9/56333/campos_512_v4
+9/56352/campos_512_v4
+9/56373/campos_512_v4
+9/56400/campos_512_v4
+9/56415/campos_512_v4
+9/56421/campos_512_v4
+9/56423/campos_512_v4
+9/56452/campos_512_v4
+9/56468/campos_512_v4
+9/56476/campos_512_v4
+9/56587/campos_512_v4
+9/56595/campos_512_v4
+9/56615/campos_512_v4
+9/56631/campos_512_v4
+9/56642/campos_512_v4
+9/56650/campos_512_v4
+9/56672/campos_512_v4
+9/56678/campos_512_v4
+9/56690/campos_512_v4
+9/56744/campos_512_v4
+9/56747/campos_512_v4
+9/56749/campos_512_v4
+9/56765/campos_512_v4
+9/56775/campos_512_v4
+9/56782/campos_512_v4
+9/56793/campos_512_v4
+9/56794/campos_512_v4
+9/56811/campos_512_v4
+9/56815/campos_512_v4
+9/56851/campos_512_v4
+9/56882/campos_512_v4
+9/56890/campos_512_v4
+9/56903/campos_512_v4
+9/56932/campos_512_v4
+9/56955/campos_512_v4
+9/56968/campos_512_v4
+9/57030/campos_512_v4
+9/57043/campos_512_v4
+9/57044/campos_512_v4
+9/57046/campos_512_v4
+9/57048/campos_512_v4
+9/57112/campos_512_v4
+9/57124/campos_512_v4
+9/57132/campos_512_v4
+9/57182/campos_512_v4
+9/57199/campos_512_v4
+9/57224/campos_512_v4
+9/57261/campos_512_v4
+9/57281/campos_512_v4
+9/57317/campos_512_v4
+9/57363/campos_512_v4
+9/57377/campos_512_v4
+9/57379/campos_512_v4
+9/57393/campos_512_v4
+9/57404/campos_512_v4
+9/57411/campos_512_v4
+9/57437/campos_512_v4
+9/57468/campos_512_v4
+9/57470/campos_512_v4
+9/57533/campos_512_v4
+9/57534/campos_512_v4
+9/57611/campos_512_v4
+9/57616/campos_512_v4
+9/57650/campos_512_v4
+9/57652/campos_512_v4
+9/57654/campos_512_v4
+9/57672/campos_512_v4
+9/57716/campos_512_v4
+9/57718/campos_512_v4
+9/57723/campos_512_v4
+9/57724/campos_512_v4
+9/57748/campos_512_v4
+9/57802/campos_512_v4
+9/57820/campos_512_v4
+9/57841/campos_512_v4
+9/57851/campos_512_v4
+9/57863/campos_512_v4
+9/57887/campos_512_v4
+9/57926/campos_512_v4
+9/57937/campos_512_v4
+9/57938/campos_512_v4
+9/57942/campos_512_v4
+9/57964/campos_512_v4
+9/57966/campos_512_v4
+9/57973/campos_512_v4
+9/57979/campos_512_v4
+9/57980/campos_512_v4
+9/58006/campos_512_v4
+9/58052/campos_512_v4
+9/58054/campos_512_v4
+9/58093/campos_512_v4
+9/58113/campos_512_v4
+9/58157/campos_512_v4
+9/58167/campos_512_v4
+9/58175/campos_512_v4
+9/58189/campos_512_v4
+9/58217/campos_512_v4
+9/58277/campos_512_v4
+9/58358/campos_512_v4
+9/58375/campos_512_v4
+9/58378/campos_512_v4
+9/58385/campos_512_v4
+9/58390/campos_512_v4
+9/58400/campos_512_v4
+9/58437/campos_512_v4
+9/58444/campos_512_v4
+9/58447/campos_512_v4
+9/58455/campos_512_v4
+9/58459/campos_512_v4
+9/58485/campos_512_v4
+9/58487/campos_512_v4
+9/58495/campos_512_v4
+9/58502/campos_512_v4
+9/58520/campos_512_v4
+9/58549/campos_512_v4
+9/58552/campos_512_v4
+9/58565/campos_512_v4
+9/58582/campos_512_v4
+9/58593/campos_512_v4
+9/58607/campos_512_v4
+9/58627/campos_512_v4
+9/58686/campos_512_v4
+9/58689/campos_512_v4
+9/58690/campos_512_v4
+9/58695/campos_512_v4
+9/58702/campos_512_v4
+9/58790/campos_512_v4
+9/58813/campos_512_v4
+9/58815/campos_512_v4
+9/58874/campos_512_v4
+9/58892/campos_512_v4
+9/58896/campos_512_v4
+9/58901/campos_512_v4
+9/58906/campos_512_v4
+9/58929/campos_512_v4
+9/58931/campos_512_v4
+9/58936/campos_512_v4
+9/58938/campos_512_v4
+9/58942/campos_512_v4
+9/58953/campos_512_v4
+9/58971/campos_512_v4
+9/58980/campos_512_v4
+9/58992/campos_512_v4
+9/58994/campos_512_v4
+9/58996/campos_512_v4
+9/58998/campos_512_v4
+9/59004/campos_512_v4
+9/59031/campos_512_v4
+9/59053/campos_512_v4
+9/59077/campos_512_v4
+9/59081/campos_512_v4
+9/59084/campos_512_v4
+9/59098/campos_512_v4
+9/59156/campos_512_v4
+9/59163/campos_512_v4
+9/59255/campos_512_v4
+9/59279/campos_512_v4
+9/59298/campos_512_v4
+9/59328/campos_512_v4
+9/59358/campos_512_v4
+9/59374/campos_512_v4
+9/59386/campos_512_v4
+9/59402/campos_512_v4
+9/59409/campos_512_v4
+9/59414/campos_512_v4
+9/59430/campos_512_v4
+9/59471/campos_512_v4
+9/59486/campos_512_v4
+9/59494/campos_512_v4
+9/59531/campos_512_v4
+9/59533/campos_512_v4
+9/59545/campos_512_v4
+9/59589/campos_512_v4
+9/59592/campos_512_v4
+9/59594/campos_512_v4
+9/59615/campos_512_v4
+9/59640/campos_512_v4
+9/59669/campos_512_v4
+9/59703/campos_512_v4
+9/59707/campos_512_v4
+9/59744/campos_512_v4
+9/59771/campos_512_v4
+9/59774/campos_512_v4
+9/59776/campos_512_v4
+9/59801/campos_512_v4
+9/59806/campos_512_v4
+9/59828/campos_512_v4
+9/59873/campos_512_v4
+9/59880/campos_512_v4
+9/59882/campos_512_v4
+9/59893/campos_512_v4
+9/59896/campos_512_v4
+9/59907/campos_512_v4
+9/59940/campos_512_v4
+9/59948/campos_512_v4
+90/460004/campos_512_v4
+90/460008/campos_512_v4
+90/460019/campos_512_v4
+90/460032/campos_512_v4
+90/460050/campos_512_v4
+90/460056/campos_512_v4
+90/460117/campos_512_v4
+90/460159/campos_512_v4
+90/460195/campos_512_v4
+90/460199/campos_512_v4
+90/460209/campos_512_v4
+90/460215/campos_512_v4
+90/460259/campos_512_v4
+90/460290/campos_512_v4
+90/460304/campos_512_v4
+90/460306/campos_512_v4
+90/460415/campos_512_v4
+90/460439/campos_512_v4
+90/460441/campos_512_v4
+90/460473/campos_512_v4
+90/460505/campos_512_v4
+90/460531/campos_512_v4
+90/460541/campos_512_v4
+90/460558/campos_512_v4
+90/460598/campos_512_v4
+90/460618/campos_512_v4
+90/460624/campos_512_v4
+90/460660/campos_512_v4
+90/460691/campos_512_v4
+90/460714/campos_512_v4
+90/460731/campos_512_v4
+90/460736/campos_512_v4
+90/460769/campos_512_v4
+90/460800/campos_512_v4
+90/460812/campos_512_v4
+90/460813/campos_512_v4
+90/460817/campos_512_v4
+90/460904/campos_512_v4
+90/460910/campos_512_v4
+90/460934/campos_512_v4
+90/460957/campos_512_v4
+90/460992/campos_512_v4
+90/461057/campos_512_v4
+90/461070/campos_512_v4
+90/461155/campos_512_v4
+90/461174/campos_512_v4
+90/461179/campos_512_v4
+90/461192/campos_512_v4
+90/461193/campos_512_v4
+90/461228/campos_512_v4
+90/461250/campos_512_v4
+90/461336/campos_512_v4
+90/461489/campos_512_v4
+90/461519/campos_512_v4
+90/461534/campos_512_v4
+90/461557/campos_512_v4
+90/461601/campos_512_v4
+90/461649/campos_512_v4
+90/461650/campos_512_v4
+90/461696/campos_512_v4
+90/461718/campos_512_v4
+90/461724/campos_512_v4
+90/461728/campos_512_v4
+90/461739/campos_512_v4
+90/461761/campos_512_v4
+90/461764/campos_512_v4
+90/461896/campos_512_v4
+90/461903/campos_512_v4
+90/461908/campos_512_v4
+90/461914/campos_512_v4
+90/461989/campos_512_v4
+90/462001/campos_512_v4
+90/462015/campos_512_v4
+90/462077/campos_512_v4
+90/462097/campos_512_v4
+90/462131/campos_512_v4
+90/462136/campos_512_v4
+90/462149/campos_512_v4
+90/462162/campos_512_v4
+90/462206/campos_512_v4
+90/462347/campos_512_v4
+90/462374/campos_512_v4
+90/462405/campos_512_v4
+90/462425/campos_512_v4
+90/462552/campos_512_v4
+90/462562/campos_512_v4
+90/462578/campos_512_v4
+90/462636/campos_512_v4
+90/462651/campos_512_v4
+90/462733/campos_512_v4
+90/462736/campos_512_v4
+90/462777/campos_512_v4
+90/462783/campos_512_v4
+90/462784/campos_512_v4
+90/462815/campos_512_v4
+90/462847/campos_512_v4
+90/462874/campos_512_v4
+90/462947/campos_512_v4
+90/462955/campos_512_v4
+90/463019/campos_512_v4
+90/463022/campos_512_v4
+90/463114/campos_512_v4
+90/463136/campos_512_v4
+90/463163/campos_512_v4
+90/463165/campos_512_v4
+90/463183/campos_512_v4
+90/463198/campos_512_v4
+90/463250/campos_512_v4
+90/463266/campos_512_v4
+90/463271/campos_512_v4
+90/463278/campos_512_v4
+90/463343/campos_512_v4
+90/463352/campos_512_v4
+90/463388/campos_512_v4
+90/463441/campos_512_v4
+90/463447/campos_512_v4
+90/463492/campos_512_v4
+90/463496/campos_512_v4
+90/463500/campos_512_v4
+90/463516/campos_512_v4
+90/463530/campos_512_v4
+90/463550/campos_512_v4
+90/463589/campos_512_v4
+90/463605/campos_512_v4
+90/463647/campos_512_v4
+90/463658/campos_512_v4
+90/463675/campos_512_v4
+90/463682/campos_512_v4
+90/463695/campos_512_v4
+90/463772/campos_512_v4
+90/463784/campos_512_v4
+90/463837/campos_512_v4
+90/463928/campos_512_v4
+90/463998/campos_512_v4
+90/464016/campos_512_v4
+90/464040/campos_512_v4
+90/464069/campos_512_v4
+90/464119/campos_512_v4
+90/464156/campos_512_v4
+90/464165/campos_512_v4
+90/464191/campos_512_v4
+90/464218/campos_512_v4
+90/464233/campos_512_v4
+90/464238/campos_512_v4
+90/464245/campos_512_v4
+90/464247/campos_512_v4
+90/464272/campos_512_v4
+90/464285/campos_512_v4
+90/464308/campos_512_v4
+90/464459/campos_512_v4
+90/464464/campos_512_v4
+90/464529/campos_512_v4
+90/464535/campos_512_v4
+90/464550/campos_512_v4
+90/464572/campos_512_v4
+90/464604/campos_512_v4
+90/464612/campos_512_v4
+90/464756/campos_512_v4
+90/464786/campos_512_v4
+90/464796/campos_512_v4
+90/464857/campos_512_v4
+90/464932/campos_512_v4
+90/464962/campos_512_v4
+90/464981/campos_512_v4
+90/464990/campos_512_v4
+90/464993/campos_512_v4
+91/465008/campos_512_v4
+91/465028/campos_512_v4
+91/465046/campos_512_v4
+91/465050/campos_512_v4
+91/465104/campos_512_v4
+91/465109/campos_512_v4
+91/465150/campos_512_v4
+91/465182/campos_512_v4
+91/465309/campos_512_v4
+91/465360/campos_512_v4
+91/465375/campos_512_v4
+91/465390/campos_512_v4
+91/465396/campos_512_v4
+91/465429/campos_512_v4
+91/465450/campos_512_v4
+91/465573/campos_512_v4
+91/465618/campos_512_v4
+91/465663/campos_512_v4
+91/465672/campos_512_v4
+91/465865/campos_512_v4
+91/465869/campos_512_v4
+91/465964/campos_512_v4
+91/465999/campos_512_v4
+91/466000/campos_512_v4
+91/466004/campos_512_v4
+91/466046/campos_512_v4
+91/466051/campos_512_v4
+91/466066/campos_512_v4
+91/466087/campos_512_v4
+91/466188/campos_512_v4
+91/466204/campos_512_v4
+91/466219/campos_512_v4
+91/466223/campos_512_v4
+91/466225/campos_512_v4
+91/466310/campos_512_v4
+91/466358/campos_512_v4
+91/466376/campos_512_v4
+91/466405/campos_512_v4
+91/466418/campos_512_v4
+91/466444/campos_512_v4
+91/466466/campos_512_v4
+91/466482/campos_512_v4
+91/466483/campos_512_v4
+91/466490/campos_512_v4
+91/466510/campos_512_v4
+91/466524/campos_512_v4
+91/466554/campos_512_v4
+91/466563/campos_512_v4
+91/466565/campos_512_v4
+91/466642/campos_512_v4
+91/466796/campos_512_v4
+91/466818/campos_512_v4
+91/466819/campos_512_v4
+91/466893/campos_512_v4
+91/466993/campos_512_v4
+91/467112/campos_512_v4
+91/467188/campos_512_v4
+91/467196/campos_512_v4
+91/467203/campos_512_v4
+91/467209/campos_512_v4
+91/467224/campos_512_v4
+91/467225/campos_512_v4
+91/467227/campos_512_v4
+91/467234/campos_512_v4
+91/467237/campos_512_v4
+91/467257/campos_512_v4
+91/467316/campos_512_v4
+91/467334/campos_512_v4
+91/467341/campos_512_v4
+91/467388/campos_512_v4
+91/467413/campos_512_v4
+91/467460/campos_512_v4
+91/467536/campos_512_v4
+91/467559/campos_512_v4
+91/467612/campos_512_v4
+91/467615/campos_512_v4
+91/467632/campos_512_v4
+91/467728/campos_512_v4
+91/467745/campos_512_v4
+91/467748/campos_512_v4
+91/467786/campos_512_v4
+91/467802/campos_512_v4
+91/467823/campos_512_v4
+91/467866/campos_512_v4
+91/467869/campos_512_v4
+91/467898/campos_512_v4
+91/467903/campos_512_v4
+91/467911/campos_512_v4
+91/467968/campos_512_v4
+91/468011/campos_512_v4
+91/468054/campos_512_v4
+91/468059/campos_512_v4
+91/468067/campos_512_v4
+91/468092/campos_512_v4
+91/468093/campos_512_v4
+91/468111/campos_512_v4
+91/468138/campos_512_v4
+91/468139/campos_512_v4
+91/468159/campos_512_v4
+91/468171/campos_512_v4
+91/468261/campos_512_v4
+91/468274/campos_512_v4
+91/468284/campos_512_v4
+91/468300/campos_512_v4
+91/468305/campos_512_v4
+91/468306/campos_512_v4
+91/468354/campos_512_v4
+91/468461/campos_512_v4
+91/468479/campos_512_v4
+91/468500/campos_512_v4
+91/468570/campos_512_v4
+91/468580/campos_512_v4
+91/468604/campos_512_v4
+91/468648/campos_512_v4
+91/468653/campos_512_v4
+91/468677/campos_512_v4
+91/468711/campos_512_v4
+91/468748/campos_512_v4
+91/468750/campos_512_v4
+91/468861/campos_512_v4
+91/468930/campos_512_v4
+91/469007/campos_512_v4
+91/469024/campos_512_v4
+91/469038/campos_512_v4
+91/469060/campos_512_v4
+91/469116/campos_512_v4
+91/469190/campos_512_v4
+91/469226/campos_512_v4
+91/469240/campos_512_v4
+91/469280/campos_512_v4
+91/469377/campos_512_v4
+91/469405/campos_512_v4
+91/469424/campos_512_v4
+91/469482/campos_512_v4
+91/469513/campos_512_v4
+91/469534/campos_512_v4
+91/469591/campos_512_v4
+91/469597/campos_512_v4
+91/469611/campos_512_v4
+91/469640/campos_512_v4
+91/469656/campos_512_v4
+91/469667/campos_512_v4
+91/469788/campos_512_v4
+91/469837/campos_512_v4
+91/469846/campos_512_v4
+91/469859/campos_512_v4
+91/469953/campos_512_v4
+91/469975/campos_512_v4
+91/469979/campos_512_v4
+91/469986/campos_512_v4
+92/470006/campos_512_v4
+92/470051/campos_512_v4
+92/470112/campos_512_v4
+92/470165/campos_512_v4
+92/470245/campos_512_v4
+92/470248/campos_512_v4
+92/470250/campos_512_v4
+92/470261/campos_512_v4
+92/470268/campos_512_v4
+92/470280/campos_512_v4
+92/470375/campos_512_v4
+92/470411/campos_512_v4
+92/470422/campos_512_v4
+92/470432/campos_512_v4
+92/470438/campos_512_v4
+92/470467/campos_512_v4
+92/470491/campos_512_v4
+92/470494/campos_512_v4
+92/470525/campos_512_v4
+92/470558/campos_512_v4
+92/470596/campos_512_v4
+92/470622/campos_512_v4
+92/470684/campos_512_v4
+92/470697/campos_512_v4
+92/470727/campos_512_v4
+92/470746/campos_512_v4
+92/470750/campos_512_v4
+92/470758/campos_512_v4
+92/470760/campos_512_v4
+92/470792/campos_512_v4
+92/470820/campos_512_v4
+92/470832/campos_512_v4
+92/470837/campos_512_v4
+92/471005/campos_512_v4
+92/471013/campos_512_v4
+92/471034/campos_512_v4
+92/471047/campos_512_v4
+92/471165/campos_512_v4
+92/471309/campos_512_v4
+92/471351/campos_512_v4
+92/471352/campos_512_v4
+92/471405/campos_512_v4
+92/471415/campos_512_v4
+92/471442/campos_512_v4
+92/471445/campos_512_v4
+92/471478/campos_512_v4
+92/471513/campos_512_v4
+92/471560/campos_512_v4
+92/471574/campos_512_v4
+92/471578/campos_512_v4
+92/471600/campos_512_v4
+92/471690/campos_512_v4
+92/471716/campos_512_v4
+92/471723/campos_512_v4
+92/471829/campos_512_v4
+92/471833/campos_512_v4
+92/471865/campos_512_v4
+92/471883/campos_512_v4
+92/471952/campos_512_v4
+92/472051/campos_512_v4
+92/472058/campos_512_v4
+92/472109/campos_512_v4
+92/472118/campos_512_v4
+92/472126/campos_512_v4
+92/472207/campos_512_v4
+92/472221/campos_512_v4
+92/472245/campos_512_v4
+92/472249/campos_512_v4
+92/472296/campos_512_v4
+92/472408/campos_512_v4
+92/472495/campos_512_v4
+92/472511/campos_512_v4
+92/472539/campos_512_v4
+92/472559/campos_512_v4
+92/472588/campos_512_v4
+92/472615/campos_512_v4
+92/472711/campos_512_v4
+92/472729/campos_512_v4
+92/472733/campos_512_v4
+92/472777/campos_512_v4
+92/472789/campos_512_v4
+92/472793/campos_512_v4
+92/472855/campos_512_v4
+92/472982/campos_512_v4
+92/473013/campos_512_v4
+92/473047/campos_512_v4
+92/473086/campos_512_v4
+92/473116/campos_512_v4
+92/473204/campos_512_v4
+92/473253/campos_512_v4
+92/473261/campos_512_v4
+92/473281/campos_512_v4
+92/473363/campos_512_v4
+92/473365/campos_512_v4
+92/473369/campos_512_v4
+92/473413/campos_512_v4
+92/473455/campos_512_v4
+92/473463/campos_512_v4
+92/473489/campos_512_v4
+92/473500/campos_512_v4
+92/473504/campos_512_v4
+92/473520/campos_512_v4
+92/473532/campos_512_v4
+92/473594/campos_512_v4
+92/473611/campos_512_v4
+92/473622/campos_512_v4
+92/473648/campos_512_v4
+92/473655/campos_512_v4
+92/473667/campos_512_v4
+92/473670/campos_512_v4
+92/473715/campos_512_v4
+92/473773/campos_512_v4
+92/473804/campos_512_v4
+92/473805/campos_512_v4
+92/473842/campos_512_v4
+92/473884/campos_512_v4
+92/473924/campos_512_v4
+92/473961/campos_512_v4
+92/474091/campos_512_v4
+92/474143/campos_512_v4
+92/474145/campos_512_v4
+92/474170/campos_512_v4
+92/474199/campos_512_v4
+92/474261/campos_512_v4
+92/474318/campos_512_v4
+92/474332/campos_512_v4
+92/474336/campos_512_v4
+92/474505/campos_512_v4
+92/474535/campos_512_v4
+92/474547/campos_512_v4
+92/474548/campos_512_v4
+92/474575/campos_512_v4
+92/474577/campos_512_v4
+92/474580/campos_512_v4
+92/474581/campos_512_v4
+92/474615/campos_512_v4
+92/474626/campos_512_v4
+92/474642/campos_512_v4
+92/474665/campos_512_v4
+92/474706/campos_512_v4
+92/474713/campos_512_v4
+92/474742/campos_512_v4
+92/474747/campos_512_v4
+92/474751/campos_512_v4
+92/474778/campos_512_v4
+92/474796/campos_512_v4
+92/474834/campos_512_v4
+92/474897/campos_512_v4
+92/474935/campos_512_v4
+92/474970/campos_512_v4
+92/474991/campos_512_v4
+93/475023/campos_512_v4
+93/475028/campos_512_v4
+93/475035/campos_512_v4
+93/475059/campos_512_v4
+93/475080/campos_512_v4
+93/475160/campos_512_v4
+93/475170/campos_512_v4
+93/475219/campos_512_v4
+93/475246/campos_512_v4
+93/475249/campos_512_v4
+93/475272/campos_512_v4
+93/475302/campos_512_v4
+93/475329/campos_512_v4
+93/475335/campos_512_v4
+93/475374/campos_512_v4
+93/475392/campos_512_v4
+93/475436/campos_512_v4
+93/475468/campos_512_v4
+93/475481/campos_512_v4
+93/475487/campos_512_v4
+93/475560/campos_512_v4
+93/475585/campos_512_v4
+93/475612/campos_512_v4
+93/475657/campos_512_v4
+93/475767/campos_512_v4
+93/475782/campos_512_v4
+93/475891/campos_512_v4
+93/475897/campos_512_v4
+93/475913/campos_512_v4
+93/475975/campos_512_v4
+93/475977/campos_512_v4
+93/476021/campos_512_v4
+93/476093/campos_512_v4
+93/476171/campos_512_v4
+93/476196/campos_512_v4
+93/476248/campos_512_v4
+93/476267/campos_512_v4
+93/476326/campos_512_v4
+93/476346/campos_512_v4
+93/476382/campos_512_v4
+93/476403/campos_512_v4
+93/476405/campos_512_v4
+93/476428/campos_512_v4
+93/476457/campos_512_v4
+93/476506/campos_512_v4
+93/476643/campos_512_v4
+93/476645/campos_512_v4
+93/476743/campos_512_v4
+93/476799/campos_512_v4
+93/476835/campos_512_v4
+93/476839/campos_512_v4
+93/476862/campos_512_v4
+93/476869/campos_512_v4
+93/476920/campos_512_v4
+93/476928/campos_512_v4
+93/476936/campos_512_v4
+93/476980/campos_512_v4
+93/477013/campos_512_v4
+93/477028/campos_512_v4
+93/477058/campos_512_v4
+93/477109/campos_512_v4
+93/477146/campos_512_v4
+93/477167/campos_512_v4
+93/477188/campos_512_v4
+93/477203/campos_512_v4
+93/477241/campos_512_v4
+93/477259/campos_512_v4
+93/477274/campos_512_v4
+93/477388/campos_512_v4
+93/477409/campos_512_v4
+93/477412/campos_512_v4
+93/477419/campos_512_v4
+93/477441/campos_512_v4
+93/477493/campos_512_v4
+93/477498/campos_512_v4
+93/477515/campos_512_v4
+93/477565/campos_512_v4
+93/477566/campos_512_v4
+93/477567/campos_512_v4
+93/477573/campos_512_v4
+93/477592/campos_512_v4
+93/477656/campos_512_v4
+93/477689/campos_512_v4
+93/477731/campos_512_v4
+93/477749/campos_512_v4
+93/477774/campos_512_v4
+93/477778/campos_512_v4
+93/477779/campos_512_v4
+93/477791/campos_512_v4
+93/477797/campos_512_v4
+93/477808/campos_512_v4
+93/477823/campos_512_v4
+93/477863/campos_512_v4
+93/477914/campos_512_v4
+93/477941/campos_512_v4
+93/477958/campos_512_v4
+93/478002/campos_512_v4
+93/478012/campos_512_v4
+93/478014/campos_512_v4
+93/478111/campos_512_v4
+93/478125/campos_512_v4
+93/478147/campos_512_v4
+93/478161/campos_512_v4
+93/478200/campos_512_v4
+93/478277/campos_512_v4
+93/478317/campos_512_v4
+93/478422/campos_512_v4
+93/478448/campos_512_v4
+93/478450/campos_512_v4
+93/478488/campos_512_v4
+93/478513/campos_512_v4
+93/478537/campos_512_v4
+93/478543/campos_512_v4
+93/478556/campos_512_v4
+93/478597/campos_512_v4
+93/478675/campos_512_v4
+93/478725/campos_512_v4
+93/478734/campos_512_v4
+93/478788/campos_512_v4
+93/478899/campos_512_v4
+93/478901/campos_512_v4
+93/478942/campos_512_v4
+93/478961/campos_512_v4
+93/478965/campos_512_v4
+93/478966/campos_512_v4
+93/478977/campos_512_v4
+93/479023/campos_512_v4
+93/479038/campos_512_v4
+93/479074/campos_512_v4
+93/479166/campos_512_v4
+93/479186/campos_512_v4
+93/479192/campos_512_v4
+93/479271/campos_512_v4
+93/479291/campos_512_v4
+93/479343/campos_512_v4
+93/479376/campos_512_v4
+93/479407/campos_512_v4
+93/479427/campos_512_v4
+93/479486/campos_512_v4
+93/479494/campos_512_v4
+93/479527/campos_512_v4
+93/479542/campos_512_v4
+93/479649/campos_512_v4
+93/479683/campos_512_v4
+93/479696/campos_512_v4
+93/479703/campos_512_v4
+93/479706/campos_512_v4
+93/479728/campos_512_v4
+93/479769/campos_512_v4
+93/479826/campos_512_v4
+93/479836/campos_512_v4
+93/479838/campos_512_v4
+93/479857/campos_512_v4
+93/479884/campos_512_v4
+93/479889/campos_512_v4
+93/479906/campos_512_v4
+93/479935/campos_512_v4
+93/479976/campos_512_v4
+93/479983/campos_512_v4
+94/480035/campos_512_v4
+94/480039/campos_512_v4
+94/480051/campos_512_v4
+94/480072/campos_512_v4
+94/480118/campos_512_v4
+94/480124/campos_512_v4
+94/480219/campos_512_v4
+94/480227/campos_512_v4
+94/480236/campos_512_v4
+94/480248/campos_512_v4
+94/480268/campos_512_v4
+94/480292/campos_512_v4
+94/480304/campos_512_v4
+94/480329/campos_512_v4
+94/480340/campos_512_v4
+94/480342/campos_512_v4
+94/480346/campos_512_v4
+94/480358/campos_512_v4
+94/480478/campos_512_v4
+94/480493/campos_512_v4
+94/480496/campos_512_v4
+94/480572/campos_512_v4
+94/480586/campos_512_v4
+94/480646/campos_512_v4
+94/480667/campos_512_v4
+94/480669/campos_512_v4
+94/480709/campos_512_v4
+94/480716/campos_512_v4
+94/480729/campos_512_v4
+94/480756/campos_512_v4
+94/480760/campos_512_v4
+94/480847/campos_512_v4
+94/480866/campos_512_v4
+94/480918/campos_512_v4
+94/480981/campos_512_v4
+94/480992/campos_512_v4
+94/480998/campos_512_v4
+94/481020/campos_512_v4
+94/481038/campos_512_v4
+94/481042/campos_512_v4
+94/481093/campos_512_v4
+94/481094/campos_512_v4
+94/481109/campos_512_v4
+94/481133/campos_512_v4
+94/481134/campos_512_v4
+94/481193/campos_512_v4
+94/481211/campos_512_v4
+94/481247/campos_512_v4
+94/481272/campos_512_v4
+94/481282/campos_512_v4
+94/481283/campos_512_v4
+94/481335/campos_512_v4
+94/481368/campos_512_v4
+94/481432/campos_512_v4
+94/481476/campos_512_v4
+94/481504/campos_512_v4
+94/481560/campos_512_v4
+94/481596/campos_512_v4
+94/481614/campos_512_v4
+94/481629/campos_512_v4
+94/481659/campos_512_v4
+94/481678/campos_512_v4
+94/481718/campos_512_v4
+94/481748/campos_512_v4
+94/481790/campos_512_v4
+94/481810/campos_512_v4
+94/481812/campos_512_v4
+94/481955/campos_512_v4
+94/481956/campos_512_v4
+94/481980/campos_512_v4
+94/482032/campos_512_v4
+94/482056/campos_512_v4
+94/482083/campos_512_v4
+94/482090/campos_512_v4
+94/482094/campos_512_v4
+94/482152/campos_512_v4
+94/482200/campos_512_v4
+94/482206/campos_512_v4
+94/482218/campos_512_v4
+94/482219/campos_512_v4
+94/482230/campos_512_v4
+94/482239/campos_512_v4
+94/482252/campos_512_v4
+94/482267/campos_512_v4
+94/482269/campos_512_v4
+94/482286/campos_512_v4
+94/482306/campos_512_v4
+94/482354/campos_512_v4
+94/482356/campos_512_v4
+94/482376/campos_512_v4
+94/482432/campos_512_v4
+94/482441/campos_512_v4
+94/482452/campos_512_v4
+94/482558/campos_512_v4
+94/482561/campos_512_v4
+94/482573/campos_512_v4
+94/482585/campos_512_v4
+94/482586/campos_512_v4
+94/482602/campos_512_v4
+94/482607/campos_512_v4
+94/482631/campos_512_v4
+94/482668/campos_512_v4
+94/482684/campos_512_v4
+94/482698/campos_512_v4
+94/482700/campos_512_v4
+94/482761/campos_512_v4
+94/482771/campos_512_v4
+94/482788/campos_512_v4
+94/482794/campos_512_v4
+94/482799/campos_512_v4
+94/482819/campos_512_v4
+94/482820/campos_512_v4
+94/482876/campos_512_v4
+94/482881/campos_512_v4
+94/482890/campos_512_v4
+94/482900/campos_512_v4
+94/482923/campos_512_v4
+94/482931/campos_512_v4
+94/482953/campos_512_v4
+94/483105/campos_512_v4
+94/483113/campos_512_v4
+94/483124/campos_512_v4
+94/483136/campos_512_v4
+94/483169/campos_512_v4
+94/483174/campos_512_v4
+94/483209/campos_512_v4
+94/483238/campos_512_v4
+94/483252/campos_512_v4
+94/483313/campos_512_v4
+94/483333/campos_512_v4
+94/483361/campos_512_v4
+94/483430/campos_512_v4
+94/483448/campos_512_v4
+94/483501/campos_512_v4
+94/483518/campos_512_v4
+94/483633/campos_512_v4
+94/483641/campos_512_v4
+94/483655/campos_512_v4
+94/483671/campos_512_v4
+94/483673/campos_512_v4
+94/483681/campos_512_v4
+94/483704/campos_512_v4
+94/483727/campos_512_v4
+94/483757/campos_512_v4
+94/483791/campos_512_v4
+94/483816/campos_512_v4
+94/483846/campos_512_v4
+94/483901/campos_512_v4
+94/483922/campos_512_v4
+94/483953/campos_512_v4
+94/483988/campos_512_v4
+94/484017/campos_512_v4
+94/484032/campos_512_v4
+94/484033/campos_512_v4
+94/484040/campos_512_v4
+94/484046/campos_512_v4
+94/484093/campos_512_v4
+94/484155/campos_512_v4
+94/484169/campos_512_v4
+94/484177/campos_512_v4
+94/484214/campos_512_v4
+94/484234/campos_512_v4
+94/484245/campos_512_v4
+94/484322/campos_512_v4
+94/484381/campos_512_v4
+94/484463/campos_512_v4
+94/484475/campos_512_v4
+94/484499/campos_512_v4
+94/484507/campos_512_v4
+94/484516/campos_512_v4
+94/484518/campos_512_v4
+94/484590/campos_512_v4
+94/484635/campos_512_v4
+94/484639/campos_512_v4
+94/484681/campos_512_v4
+94/484759/campos_512_v4
+94/484767/campos_512_v4
+94/484813/campos_512_v4
+94/484837/campos_512_v4
+94/484856/campos_512_v4
+94/484870/campos_512_v4
+94/484917/campos_512_v4
+94/484934/campos_512_v4
+94/484960/campos_512_v4
+95/485029/campos_512_v4
+95/485048/campos_512_v4
+95/485056/campos_512_v4
+95/485064/campos_512_v4
+95/485081/campos_512_v4
+95/485107/campos_512_v4
+95/485115/campos_512_v4
+95/485157/campos_512_v4
+95/485175/campos_512_v4
+95/485188/campos_512_v4
+95/485206/campos_512_v4
+95/485297/campos_512_v4
+95/485371/campos_512_v4
+95/485383/campos_512_v4
+95/485387/campos_512_v4
+95/485407/campos_512_v4
+95/485458/campos_512_v4
+95/485483/campos_512_v4
+95/485556/campos_512_v4
+95/485645/campos_512_v4
+95/485660/campos_512_v4
+95/485665/campos_512_v4
+95/485703/campos_512_v4
+95/485709/campos_512_v4
+95/485710/campos_512_v4
+95/485722/campos_512_v4
+95/485740/campos_512_v4
+95/485773/campos_512_v4
+95/485776/campos_512_v4
+95/485777/campos_512_v4
+95/485831/campos_512_v4
+95/485839/campos_512_v4
+95/485911/campos_512_v4
+95/485922/campos_512_v4
+95/485961/campos_512_v4
+95/486010/campos_512_v4
+95/486039/campos_512_v4
+95/486071/campos_512_v4
+95/486074/campos_512_v4
+95/486088/campos_512_v4
+95/486092/campos_512_v4
+95/486102/campos_512_v4
+95/486117/campos_512_v4
+95/486143/campos_512_v4
+95/486212/campos_512_v4
+95/486335/campos_512_v4
+95/486338/campos_512_v4
+95/486356/campos_512_v4
+95/486363/campos_512_v4
+95/486365/campos_512_v4
+95/486502/campos_512_v4
+95/486515/campos_512_v4
+95/486522/campos_512_v4
+95/486527/campos_512_v4
+95/486542/campos_512_v4
+95/486580/campos_512_v4
+95/486608/campos_512_v4
+95/486632/campos_512_v4
+95/486670/campos_512_v4
+95/486707/campos_512_v4
+95/486723/campos_512_v4
+95/486727/campos_512_v4
+95/486739/campos_512_v4
+95/486753/campos_512_v4
+95/486784/campos_512_v4
+95/486817/campos_512_v4
+95/486833/campos_512_v4
+95/486857/campos_512_v4
+95/486894/campos_512_v4
+95/486909/campos_512_v4
+95/486973/campos_512_v4
+95/487010/campos_512_v4
+95/487090/campos_512_v4
+95/487177/campos_512_v4
+95/487234/campos_512_v4
+95/487257/campos_512_v4
+95/487285/campos_512_v4
+95/487313/campos_512_v4
+95/487354/campos_512_v4
+95/487368/campos_512_v4
+95/487380/campos_512_v4
+95/487419/campos_512_v4
+95/487451/campos_512_v4
+95/487478/campos_512_v4
+95/487479/campos_512_v4
+95/487484/campos_512_v4
+95/487532/campos_512_v4
+95/487551/campos_512_v4
+95/487559/campos_512_v4
+95/487575/campos_512_v4
+95/487598/campos_512_v4
+95/487621/campos_512_v4
+95/487625/campos_512_v4
+95/487709/campos_512_v4
+95/487729/campos_512_v4
+95/487801/campos_512_v4
+95/487817/campos_512_v4
+95/487926/campos_512_v4
+95/488004/campos_512_v4
+95/488005/campos_512_v4
+95/488068/campos_512_v4
+95/488109/campos_512_v4
+95/488166/campos_512_v4
+95/488210/campos_512_v4
+95/488230/campos_512_v4
+95/488266/campos_512_v4
+95/488294/campos_512_v4
+95/488336/campos_512_v4
+95/488359/campos_512_v4
+95/488384/campos_512_v4
+95/488407/campos_512_v4
+95/488484/campos_512_v4
+95/488512/campos_512_v4
+95/488565/campos_512_v4
+95/488577/campos_512_v4
+95/488645/campos_512_v4
+95/488651/campos_512_v4
+95/488661/campos_512_v4
+95/488718/campos_512_v4
+95/488735/campos_512_v4
+95/488793/campos_512_v4
+95/488829/campos_512_v4
+95/488833/campos_512_v4
+95/488846/campos_512_v4
+95/488886/campos_512_v4
+95/488915/campos_512_v4
+95/488923/campos_512_v4
+95/488999/campos_512_v4
+95/489013/campos_512_v4
+95/489030/campos_512_v4
+95/489034/campos_512_v4
+95/489071/campos_512_v4
+95/489098/campos_512_v4
+95/489116/campos_512_v4
+95/489279/campos_512_v4
+95/489286/campos_512_v4
+95/489300/campos_512_v4
+95/489322/campos_512_v4
+95/489358/campos_512_v4
+95/489377/campos_512_v4
+95/489434/campos_512_v4
+95/489455/campos_512_v4
+95/489460/campos_512_v4
+95/489484/campos_512_v4
+95/489485/campos_512_v4
+95/489562/campos_512_v4
+95/489566/campos_512_v4
+95/489613/campos_512_v4
+95/489643/campos_512_v4
+95/489658/campos_512_v4
+95/489664/campos_512_v4
+95/489744/campos_512_v4
+95/489802/campos_512_v4
+95/489817/campos_512_v4
+95/489910/campos_512_v4
+95/489928/campos_512_v4
+96/490024/campos_512_v4
+96/490034/campos_512_v4
+96/490038/campos_512_v4
+96/490046/campos_512_v4
+96/490085/campos_512_v4
+96/490111/campos_512_v4
+96/490113/campos_512_v4
+96/490118/campos_512_v4
+96/490169/campos_512_v4
+96/490230/campos_512_v4
+96/490238/campos_512_v4
+96/490253/campos_512_v4
+96/490255/campos_512_v4
+96/490301/campos_512_v4
+96/490363/campos_512_v4
+96/490402/campos_512_v4
+96/490403/campos_512_v4
+96/490509/campos_512_v4
+96/490542/campos_512_v4
+96/490565/campos_512_v4
+96/490627/campos_512_v4
+96/490631/campos_512_v4
+96/490635/campos_512_v4
+96/490684/campos_512_v4
+96/490688/campos_512_v4
+96/490713/campos_512_v4
+96/490714/campos_512_v4
+96/490735/campos_512_v4
+96/490793/campos_512_v4
+96/490803/campos_512_v4
+96/490807/campos_512_v4
+96/490809/campos_512_v4
+96/490823/campos_512_v4
+96/490906/campos_512_v4
+96/490940/campos_512_v4
+96/490964/campos_512_v4
+96/490992/campos_512_v4
+96/490993/campos_512_v4
+96/491010/campos_512_v4
+96/491057/campos_512_v4
+96/491089/campos_512_v4
+96/491130/campos_512_v4
+96/491154/campos_512_v4
+96/491264/campos_512_v4
+96/491265/campos_512_v4
+96/491272/campos_512_v4
+96/491288/campos_512_v4
+96/491289/campos_512_v4
+96/491323/campos_512_v4
+96/491336/campos_512_v4
+96/491346/campos_512_v4
+96/491378/campos_512_v4
+96/491385/campos_512_v4
+96/491406/campos_512_v4
+96/491444/campos_512_v4
+96/491465/campos_512_v4
+96/491583/campos_512_v4
+96/491584/campos_512_v4
+96/491633/campos_512_v4
+96/491648/campos_512_v4
+96/491704/campos_512_v4
+96/491705/campos_512_v4
+96/491714/campos_512_v4
+96/491722/campos_512_v4
+96/491790/campos_512_v4
+96/491843/campos_512_v4
+96/491853/campos_512_v4
+96/491885/campos_512_v4
+96/491898/campos_512_v4
+96/491940/campos_512_v4
+96/491942/campos_512_v4
+96/491953/campos_512_v4
+96/491993/campos_512_v4
+96/492005/campos_512_v4
+96/492070/campos_512_v4
+96/492087/campos_512_v4
+96/492088/campos_512_v4
+96/492105/campos_512_v4
+96/492118/campos_512_v4
+96/492128/campos_512_v4
+96/492278/campos_512_v4
+96/492321/campos_512_v4
+96/492325/campos_512_v4
+96/492370/campos_512_v4
+96/492413/campos_512_v4
+96/492538/campos_512_v4
+96/492542/campos_512_v4
+96/492553/campos_512_v4
+96/492619/campos_512_v4
+96/492623/campos_512_v4
+96/492665/campos_512_v4
+96/493031/campos_512_v4
+96/493092/campos_512_v4
+96/493207/campos_512_v4
+96/493229/campos_512_v4
+96/493237/campos_512_v4
+96/493244/campos_512_v4
+96/493341/campos_512_v4
+96/493344/campos_512_v4
+96/493383/campos_512_v4
+96/493398/campos_512_v4
+96/493402/campos_512_v4
+96/493405/campos_512_v4
+96/493430/campos_512_v4
+96/493502/campos_512_v4
+96/493520/campos_512_v4
+96/493521/campos_512_v4
+96/493548/campos_512_v4
+96/493592/campos_512_v4
+96/493619/campos_512_v4
+96/493649/campos_512_v4
+96/493682/campos_512_v4
+96/493721/campos_512_v4
+96/493761/campos_512_v4
+96/493766/campos_512_v4
+96/493794/campos_512_v4
+96/493822/campos_512_v4
+96/493840/campos_512_v4
+96/493853/campos_512_v4
+96/493879/campos_512_v4
+96/493946/campos_512_v4
+96/493947/campos_512_v4
+96/494006/campos_512_v4
+96/494067/campos_512_v4
+96/494099/campos_512_v4
+96/494103/campos_512_v4
+96/494106/campos_512_v4
+96/494107/campos_512_v4
+96/494170/campos_512_v4
+96/494176/campos_512_v4
+96/494231/campos_512_v4
+96/494234/campos_512_v4
+96/494247/campos_512_v4
+96/494249/campos_512_v4
+96/494260/campos_512_v4
+96/494287/campos_512_v4
+96/494292/campos_512_v4
+96/494369/campos_512_v4
+96/494392/campos_512_v4
+96/494395/campos_512_v4
+96/494420/campos_512_v4
+96/494446/campos_512_v4
+96/494491/campos_512_v4
+96/494505/campos_512_v4
+96/494514/campos_512_v4
+96/494516/campos_512_v4
+96/494533/campos_512_v4
+96/494563/campos_512_v4
+96/494571/campos_512_v4
+96/494620/campos_512_v4
+96/494710/campos_512_v4
+96/494716/campos_512_v4
+96/494722/campos_512_v4
+96/494744/campos_512_v4
+96/494746/campos_512_v4
+96/494757/campos_512_v4
+96/494775/campos_512_v4
+96/494777/campos_512_v4
+96/494829/campos_512_v4
+96/494931/campos_512_v4
+96/494941/campos_512_v4
+96/494986/campos_512_v4
+97/495061/campos_512_v4
+97/495070/campos_512_v4
+97/495077/campos_512_v4
+97/495163/campos_512_v4
+97/495171/campos_512_v4
+97/495212/campos_512_v4
+97/495255/campos_512_v4
+97/495290/campos_512_v4
+97/495308/campos_512_v4
+97/495313/campos_512_v4
+97/495322/campos_512_v4
+97/495364/campos_512_v4
+97/495380/campos_512_v4
+97/495404/campos_512_v4
+97/495488/campos_512_v4
+97/495506/campos_512_v4
+97/495544/campos_512_v4
+97/495553/campos_512_v4
+97/495561/campos_512_v4
+97/495565/campos_512_v4
+97/495604/campos_512_v4
+97/495675/campos_512_v4
+97/495781/campos_512_v4
+97/495782/campos_512_v4
+97/495799/campos_512_v4
+97/495801/campos_512_v4
+97/495826/campos_512_v4
+97/495897/campos_512_v4
+97/495926/campos_512_v4
+97/495939/campos_512_v4
+97/496018/campos_512_v4
+97/496042/campos_512_v4
+97/496077/campos_512_v4
+97/496088/campos_512_v4
+97/496093/campos_512_v4
+97/496126/campos_512_v4
+97/496137/campos_512_v4
+97/496167/campos_512_v4
+97/496258/campos_512_v4
+97/496296/campos_512_v4
+97/496297/campos_512_v4
+97/496390/campos_512_v4
+97/496438/campos_512_v4
+97/496470/campos_512_v4
+97/496482/campos_512_v4
+97/496490/campos_512_v4
+97/496524/campos_512_v4
+97/496559/campos_512_v4
+97/496591/campos_512_v4
+97/496594/campos_512_v4
+97/496599/campos_512_v4
+97/496601/campos_512_v4
+97/496623/campos_512_v4
+97/496658/campos_512_v4
+97/496690/campos_512_v4
+97/496714/campos_512_v4
+97/496732/campos_512_v4
+97/496734/campos_512_v4
+97/496735/campos_512_v4
+97/496756/campos_512_v4
+97/496776/campos_512_v4
+97/496809/campos_512_v4
+97/496812/campos_512_v4
+97/496848/campos_512_v4
+97/496851/campos_512_v4
+97/496884/campos_512_v4
+97/496934/campos_512_v4
+97/496948/campos_512_v4
+97/497010/campos_512_v4
+97/497052/campos_512_v4
+97/497088/campos_512_v4
+97/497102/campos_512_v4
+97/497158/campos_512_v4
+97/497161/campos_512_v4
+97/497188/campos_512_v4
+97/497213/campos_512_v4
+97/497264/campos_512_v4
+97/497273/campos_512_v4
+97/497279/campos_512_v4
+97/497372/campos_512_v4
+97/497374/campos_512_v4
+97/497375/campos_512_v4
+97/497429/campos_512_v4
+97/497440/campos_512_v4
+97/497453/campos_512_v4
+97/497461/campos_512_v4
+97/497534/campos_512_v4
+97/497544/campos_512_v4
+97/497591/campos_512_v4
+97/497600/campos_512_v4
+97/497614/campos_512_v4
+97/497657/campos_512_v4
+97/497667/campos_512_v4
+97/497688/campos_512_v4
+97/497696/campos_512_v4
+97/497711/campos_512_v4
+97/497725/campos_512_v4
+97/497749/campos_512_v4
+97/497776/campos_512_v4
+97/497789/campos_512_v4
+97/497831/campos_512_v4
+97/497837/campos_512_v4
+97/497861/campos_512_v4
+97/497933/campos_512_v4
+97/497959/campos_512_v4
+97/497967/campos_512_v4
+97/498047/campos_512_v4
+97/498072/campos_512_v4
+97/498086/campos_512_v4
+97/498087/campos_512_v4
+97/498127/campos_512_v4
+97/498131/campos_512_v4
+97/498141/campos_512_v4
+97/498165/campos_512_v4
+97/498315/campos_512_v4
+97/498359/campos_512_v4
+97/498389/campos_512_v4
+97/498395/campos_512_v4
+97/498431/campos_512_v4
+97/498444/campos_512_v4
+97/498540/campos_512_v4
+97/498572/campos_512_v4
+97/498581/campos_512_v4
+97/498584/campos_512_v4
+97/498636/campos_512_v4
+97/498646/campos_512_v4
+97/498654/campos_512_v4
+97/498664/campos_512_v4
+97/498674/campos_512_v4
+97/498683/campos_512_v4
+97/498774/campos_512_v4
+97/498780/campos_512_v4
+97/498844/campos_512_v4
+97/498846/campos_512_v4
+97/498978/campos_512_v4
+97/498993/campos_512_v4
+97/499014/campos_512_v4
+97/499028/campos_512_v4
+97/499105/campos_512_v4
+97/499157/campos_512_v4
+97/499167/campos_512_v4
+97/499261/campos_512_v4
+97/499270/campos_512_v4
+97/499291/campos_512_v4
+97/499308/campos_512_v4
+97/499318/campos_512_v4
+97/499321/campos_512_v4
+97/499344/campos_512_v4
+97/499439/campos_512_v4
+97/499462/campos_512_v4
+97/499481/campos_512_v4
+97/499496/campos_512_v4
+97/499600/campos_512_v4
+97/499606/campos_512_v4
+97/499626/campos_512_v4
+97/499627/campos_512_v4
+97/499635/campos_512_v4
+97/499723/campos_512_v4
+97/499889/campos_512_v4
+97/499893/campos_512_v4
+97/499897/campos_512_v4
+97/499904/campos_512_v4
+97/499913/campos_512_v4
+97/499919/campos_512_v4
+97/499937/campos_512_v4
+97/499971/campos_512_v4
+98/500017/campos_512_v4
+98/500019/campos_512_v4
+98/500067/campos_512_v4
+98/500081/campos_512_v4
+98/500124/campos_512_v4
+98/500188/campos_512_v4
+98/500197/campos_512_v4
+98/500223/campos_512_v4
+98/500248/campos_512_v4
+98/500311/campos_512_v4
+98/500313/campos_512_v4
+98/500323/campos_512_v4
+98/500373/campos_512_v4
+98/500474/campos_512_v4
+98/500491/campos_512_v4
+98/500518/campos_512_v4
+98/500526/campos_512_v4
+98/500568/campos_512_v4
+98/500580/campos_512_v4
+98/500648/campos_512_v4
+98/500663/campos_512_v4
+98/500680/campos_512_v4
+98/500729/campos_512_v4
+98/500761/campos_512_v4
+98/500766/campos_512_v4
+98/500777/campos_512_v4
+98/500795/campos_512_v4
+98/500836/campos_512_v4
+98/500851/campos_512_v4
+98/500968/campos_512_v4
+98/501029/campos_512_v4
+98/501048/campos_512_v4
+98/501057/campos_512_v4
+98/501074/campos_512_v4
+98/501108/campos_512_v4
+98/501135/campos_512_v4
+98/501254/campos_512_v4
+98/501258/campos_512_v4
+98/501263/campos_512_v4
+98/501301/campos_512_v4
+98/501351/campos_512_v4
+98/501353/campos_512_v4
+98/501364/campos_512_v4
+98/501382/campos_512_v4
+98/501413/campos_512_v4
+98/501423/campos_512_v4
+98/501487/campos_512_v4
+98/501491/campos_512_v4
+98/501545/campos_512_v4
+98/501600/campos_512_v4
+98/501621/campos_512_v4
+98/501663/campos_512_v4
+98/501670/campos_512_v4
+98/501709/campos_512_v4
+98/501733/campos_512_v4
+98/501738/campos_512_v4
+98/501771/campos_512_v4
+98/501790/campos_512_v4
+98/501846/campos_512_v4
+98/501868/campos_512_v4
+98/501947/campos_512_v4
+98/501949/campos_512_v4
+98/501952/campos_512_v4
+98/502001/campos_512_v4
+98/502023/campos_512_v4
+98/502036/campos_512_v4
+98/502054/campos_512_v4
+98/502057/campos_512_v4
+98/502062/campos_512_v4
+98/502096/campos_512_v4
+98/502097/campos_512_v4
+98/502136/campos_512_v4
+98/502141/campos_512_v4
+98/502158/campos_512_v4
+98/502178/campos_512_v4
+98/502223/campos_512_v4
+98/502271/campos_512_v4
+98/502285/campos_512_v4
+98/502364/campos_512_v4
+98/502385/campos_512_v4
+98/502465/campos_512_v4
+98/502467/campos_512_v4
+98/502567/campos_512_v4
+98/502594/campos_512_v4
+98/502613/campos_512_v4
+98/502674/campos_512_v4
+98/502699/campos_512_v4
+98/502704/campos_512_v4
+98/502771/campos_512_v4
+98/502810/campos_512_v4
+98/502817/campos_512_v4
+98/502846/campos_512_v4
+98/502882/campos_512_v4
+98/502891/campos_512_v4
+98/502932/campos_512_v4
+98/502963/campos_512_v4
+98/503020/campos_512_v4
+98/503134/campos_512_v4
+98/503149/campos_512_v4
+98/503202/campos_512_v4
+98/503210/campos_512_v4
+98/503212/campos_512_v4
+98/503234/campos_512_v4
+98/503292/campos_512_v4
+98/503435/campos_512_v4
+98/503497/campos_512_v4
+98/503501/campos_512_v4
+98/503652/campos_512_v4
+98/503686/campos_512_v4
+98/503715/campos_512_v4
+98/503720/campos_512_v4
+98/503721/campos_512_v4
+98/503744/campos_512_v4
+98/503795/campos_512_v4
+98/503801/campos_512_v4
+98/503869/campos_512_v4
+98/503879/campos_512_v4
+98/503894/campos_512_v4
+98/503916/campos_512_v4
+98/503936/campos_512_v4
+98/503967/campos_512_v4
+98/504013/campos_512_v4
+98/504057/campos_512_v4
+98/504079/campos_512_v4
+98/504113/campos_512_v4
+98/504133/campos_512_v4
+98/504143/campos_512_v4
+98/504183/campos_512_v4
+98/504228/campos_512_v4
+98/504230/campos_512_v4
+98/504266/campos_512_v4
+98/504267/campos_512_v4
+98/504400/campos_512_v4
+98/504463/campos_512_v4
+98/504486/campos_512_v4
+98/504508/campos_512_v4
+98/504516/campos_512_v4
+98/504542/campos_512_v4
+98/504577/campos_512_v4
+98/504726/campos_512_v4
+98/504775/campos_512_v4
+98/504792/campos_512_v4
+98/504819/campos_512_v4
+98/504824/campos_512_v4
+98/504858/campos_512_v4
+98/504922/campos_512_v4
+98/504940/campos_512_v4
+98/504943/campos_512_v4
+98/504953/campos_512_v4
+99/505025/campos_512_v4
+99/505051/campos_512_v4
+99/505055/campos_512_v4
+99/505132/campos_512_v4
+99/505137/campos_512_v4
+99/505196/campos_512_v4
+99/505206/campos_512_v4
+99/505226/campos_512_v4
+99/505241/campos_512_v4
+99/505292/campos_512_v4
+99/505363/campos_512_v4
+99/505364/campos_512_v4
+99/505429/campos_512_v4
+99/505458/campos_512_v4
+99/505483/campos_512_v4
+99/505516/campos_512_v4
+99/505534/campos_512_v4
+99/505665/campos_512_v4
+99/505670/campos_512_v4
+99/505708/campos_512_v4
+99/505729/campos_512_v4
+99/505746/campos_512_v4
+99/505810/campos_512_v4
+99/505851/campos_512_v4
+99/505932/campos_512_v4
+99/505942/campos_512_v4
+99/505949/campos_512_v4
+99/506063/campos_512_v4
+99/506105/campos_512_v4
+99/506137/campos_512_v4
+99/506186/campos_512_v4
+99/506208/campos_512_v4
+99/506229/campos_512_v4
+99/506249/campos_512_v4
+99/506250/campos_512_v4
+99/506321/campos_512_v4
+99/506323/campos_512_v4
+99/506330/campos_512_v4
+99/506389/campos_512_v4
+99/506410/campos_512_v4
+99/506438/campos_512_v4
+99/506478/campos_512_v4
+99/506480/campos_512_v4
+99/506495/campos_512_v4
+99/506546/campos_512_v4
+99/506573/campos_512_v4
+99/506613/campos_512_v4
+99/506618/campos_512_v4
+99/506621/campos_512_v4
+99/506661/campos_512_v4
+99/506686/campos_512_v4
+99/506718/campos_512_v4
+99/506723/campos_512_v4
+99/506789/campos_512_v4
+99/506803/campos_512_v4
+99/506841/campos_512_v4
+99/506854/campos_512_v4
+99/506862/campos_512_v4
+99/506880/campos_512_v4
+99/506905/campos_512_v4
+99/506909/campos_512_v4
+99/506920/campos_512_v4
+99/506948/campos_512_v4
+99/506967/campos_512_v4
+99/506973/campos_512_v4
+99/506978/campos_512_v4
+99/507078/campos_512_v4
+99/507166/campos_512_v4
+99/507223/campos_512_v4
+99/507240/campos_512_v4
+99/507321/campos_512_v4
+99/507340/campos_512_v4
+99/507352/campos_512_v4
+99/507369/campos_512_v4
+99/507396/campos_512_v4
+99/507431/campos_512_v4
+99/507436/campos_512_v4
+99/507452/campos_512_v4
+99/507475/campos_512_v4
+99/507476/campos_512_v4
+99/507524/campos_512_v4
+99/507528/campos_512_v4
+99/507591/campos_512_v4
+99/507592/campos_512_v4
+99/507599/campos_512_v4
+99/507628/campos_512_v4
+99/507657/campos_512_v4
+99/507697/campos_512_v4
+99/507701/campos_512_v4
+99/507764/campos_512_v4
+99/507776/campos_512_v4
+99/507783/campos_512_v4
+99/507818/campos_512_v4
+99/507826/campos_512_v4
+99/507835/campos_512_v4
+99/507853/campos_512_v4
+99/507900/campos_512_v4
+99/507902/campos_512_v4
+99/507915/campos_512_v4
+99/507928/campos_512_v4
+99/507930/campos_512_v4
+99/507944/campos_512_v4
+99/508020/campos_512_v4
+99/508026/campos_512_v4
+99/508061/campos_512_v4
+99/508086/campos_512_v4
+99/508120/campos_512_v4
+99/508121/campos_512_v4
+99/508122/campos_512_v4
+99/508133/campos_512_v4
+99/508195/campos_512_v4
+99/508201/campos_512_v4
+99/508230/campos_512_v4
+99/508242/campos_512_v4
+99/508247/campos_512_v4
+99/508267/campos_512_v4
+99/508318/campos_512_v4
+99/508336/campos_512_v4
+99/508389/campos_512_v4
+99/508524/campos_512_v4
+99/508536/campos_512_v4
+99/508538/campos_512_v4
+99/508540/campos_512_v4
+99/508569/campos_512_v4
+99/508608/campos_512_v4
+99/508650/campos_512_v4
+99/508654/campos_512_v4
+99/508691/campos_512_v4
+99/508696/campos_512_v4
+99/508712/campos_512_v4
+99/508746/campos_512_v4
+99/508771/campos_512_v4
+99/508815/campos_512_v4
+99/508831/campos_512_v4
+99/508881/campos_512_v4
+99/508888/campos_512_v4
+99/508924/campos_512_v4
+99/508959/campos_512_v4
+99/508987/campos_512_v4
+99/509003/campos_512_v4
+99/509012/campos_512_v4
+99/509017/campos_512_v4
+99/509174/campos_512_v4
+99/509210/campos_512_v4
+99/509222/campos_512_v4
+99/509253/campos_512_v4
+99/509287/campos_512_v4
+99/509303/campos_512_v4
+99/509351/campos_512_v4
+99/509440/campos_512_v4
+99/509450/campos_512_v4
+99/509465/campos_512_v4
+99/509478/campos_512_v4
+99/509519/campos_512_v4
+99/509598/campos_512_v4
+99/509624/campos_512_v4
+99/509628/campos_512_v4
+99/509673/campos_512_v4
+99/509799/campos_512_v4
+99/509805/campos_512_v4
+99/509806/campos_512_v4
+99/509820/campos_512_v4
+99/509826/campos_512_v4
+99/509830/campos_512_v4
+99/509863/campos_512_v4
+99/509867/campos_512_v4
+99/509917/campos_512_v4
+99/509982/campos_512_v4
+99/509989/campos_512_v4
diff --git a/shell_scripts/raw_img_list/Electronics.txt b/shell_scripts/raw_img_list/Electronics.txt
new file mode 100644
index 0000000000000000000000000000000000000000..711b8f07d9aa02f2379513c6021254313ba97f31
--- /dev/null
+++ b/shell_scripts/raw_img_list/Electronics.txt
@@ -0,0 +1,6216 @@
+0/10007/campos_512_v4
+0/10347/campos_512_v4
+0/10363/campos_512_v4
+0/10374/campos_512_v4
+0/10387/campos_512_v4
+0/10395/campos_512_v4
+0/10649/campos_512_v4
+0/10668/campos_512_v4
+0/10689/campos_512_v4
+0/10809/campos_512_v4
+0/10880/campos_512_v4
+0/11290/campos_512_v4
+0/11359/campos_512_v4
+0/11523/campos_512_v4
+0/11594/campos_512_v4
+0/11711/campos_512_v4
+0/12317/campos_512_v4
+0/12740/campos_512_v4
+0/13074/campos_512_v4
+0/13849/campos_512_v4
+0/13912/campos_512_v4
+0/14158/campos_512_v4
+0/14162/campos_512_v4
+0/14400/campos_512_v4
+0/14566/campos_512_v4
+0/14805/campos_512_v4
+0/14884/campos_512_v4
+0/14925/campos_512_v4
+1/15002/campos_512_v4
+1/15081/campos_512_v4
+1/15119/campos_512_v4
+1/15429/campos_512_v4
+1/15809/campos_512_v4
+1/15975/campos_512_v4
+1/16018/campos_512_v4
+1/16229/campos_512_v4
+1/16332/campos_512_v4
+1/16588/campos_512_v4
+1/17395/campos_512_v4
+1/17615/campos_512_v4
+1/17677/campos_512_v4
+1/17929/campos_512_v4
+1/18036/campos_512_v4
+1/18172/campos_512_v4
+1/18590/campos_512_v4
+1/19009/campos_512_v4
+1/19259/campos_512_v4
+1/19460/campos_512_v4
+1/19480/campos_512_v4
+1/19613/campos_512_v4
+10/60888/campos_512_v4
+10/61072/campos_512_v4
+10/61452/campos_512_v4
+10/61872/campos_512_v4
+10/62950/campos_512_v4
+10/63222/campos_512_v4
+10/63253/campos_512_v4
+10/63367/campos_512_v4
+10/63402/campos_512_v4
+10/63461/campos_512_v4
+10/63634/campos_512_v4
+10/64691/campos_512_v4
+100/510123/campos_512_v4
+100/510248/campos_512_v4
+100/510514/campos_512_v4
+100/510653/campos_512_v4
+100/510707/campos_512_v4
+100/510739/campos_512_v4
+100/510839/campos_512_v4
+100/510841/campos_512_v4
+100/511021/campos_512_v4
+100/511062/campos_512_v4
+100/511262/campos_512_v4
+100/511291/campos_512_v4
+100/511430/campos_512_v4
+100/511555/campos_512_v4
+100/511572/campos_512_v4
+100/511595/campos_512_v4
+100/511607/campos_512_v4
+100/511713/campos_512_v4
+100/511786/campos_512_v4
+100/511848/campos_512_v4
+100/512010/campos_512_v4
+100/512054/campos_512_v4
+100/512099/campos_512_v4
+100/512193/campos_512_v4
+100/512316/campos_512_v4
+100/512725/campos_512_v4
+100/512793/campos_512_v4
+100/512984/campos_512_v4
+100/513151/campos_512_v4
+100/513353/campos_512_v4
+100/513369/campos_512_v4
+100/513611/campos_512_v4
+100/513678/campos_512_v4
+100/513690/campos_512_v4
+100/513821/campos_512_v4
+100/513885/campos_512_v4
+100/513905/campos_512_v4
+100/514009/campos_512_v4
+100/514026/campos_512_v4
+100/514104/campos_512_v4
+100/514153/campos_512_v4
+100/514162/campos_512_v4
+100/514232/campos_512_v4
+100/514320/campos_512_v4
+100/514478/campos_512_v4
+100/514537/campos_512_v4
+100/514629/campos_512_v4
+100/514788/campos_512_v4
+100/514817/campos_512_v4
+100/514884/campos_512_v4
+100/514893/campos_512_v4
+101/515249/campos_512_v4
+101/515435/campos_512_v4
+101/515574/campos_512_v4
+101/515669/campos_512_v4
+101/515676/campos_512_v4
+101/516114/campos_512_v4
+101/516183/campos_512_v4
+101/516233/campos_512_v4
+101/516283/campos_512_v4
+101/516331/campos_512_v4
+101/516464/campos_512_v4
+101/516468/campos_512_v4
+101/516551/campos_512_v4
+101/516704/campos_512_v4
+101/516889/campos_512_v4
+101/517080/campos_512_v4
+101/517098/campos_512_v4
+101/517151/campos_512_v4
+101/517283/campos_512_v4
+101/517381/campos_512_v4
+101/517395/campos_512_v4
+101/517617/campos_512_v4
+101/517786/campos_512_v4
+101/517981/campos_512_v4
+101/518000/campos_512_v4
+101/518086/campos_512_v4
+101/518090/campos_512_v4
+101/518142/campos_512_v4
+101/518186/campos_512_v4
+101/518224/campos_512_v4
+101/518274/campos_512_v4
+101/518279/campos_512_v4
+101/518826/campos_512_v4
+101/518850/campos_512_v4
+101/518867/campos_512_v4
+101/518899/campos_512_v4
+101/519006/campos_512_v4
+101/519231/campos_512_v4
+101/519399/campos_512_v4
+101/519589/campos_512_v4
+101/519693/campos_512_v4
+101/519845/campos_512_v4
+101/519941/campos_512_v4
+102/520092/campos_512_v4
+102/520179/campos_512_v4
+102/520272/campos_512_v4
+102/520317/campos_512_v4
+102/520401/campos_512_v4
+102/520593/campos_512_v4
+102/521073/campos_512_v4
+102/521113/campos_512_v4
+102/521127/campos_512_v4
+102/521171/campos_512_v4
+102/521505/campos_512_v4
+102/521652/campos_512_v4
+102/521673/campos_512_v4
+102/521795/campos_512_v4
+102/522283/campos_512_v4
+102/522662/campos_512_v4
+102/522702/campos_512_v4
+102/522736/campos_512_v4
+102/522875/campos_512_v4
+102/523036/campos_512_v4
+102/523037/campos_512_v4
+102/523065/campos_512_v4
+102/523215/campos_512_v4
+102/523323/campos_512_v4
+102/523365/campos_512_v4
+102/523609/campos_512_v4
+102/523725/campos_512_v4
+102/523747/campos_512_v4
+102/523765/campos_512_v4
+102/523806/campos_512_v4
+102/523959/campos_512_v4
+102/524034/campos_512_v4
+102/524292/campos_512_v4
+102/524479/campos_512_v4
+102/524531/campos_512_v4
+102/524636/campos_512_v4
+102/524991/campos_512_v4
+103/525008/campos_512_v4
+103/525018/campos_512_v4
+103/525034/campos_512_v4
+103/525402/campos_512_v4
+103/525474/campos_512_v4
+103/525504/campos_512_v4
+103/525507/campos_512_v4
+103/525805/campos_512_v4
+103/525834/campos_512_v4
+103/525953/campos_512_v4
+103/525959/campos_512_v4
+103/526024/campos_512_v4
+103/526167/campos_512_v4
+103/526526/campos_512_v4
+103/527468/campos_512_v4
+103/527506/campos_512_v4
+103/527509/campos_512_v4
+103/527727/campos_512_v4
+103/527738/campos_512_v4
+103/527910/campos_512_v4
+103/528204/campos_512_v4
+103/528221/campos_512_v4
+103/528354/campos_512_v4
+103/528678/campos_512_v4
+103/528738/campos_512_v4
+103/528776/campos_512_v4
+103/528806/campos_512_v4
+103/529023/campos_512_v4
+103/529203/campos_512_v4
+103/529476/campos_512_v4
+103/529549/campos_512_v4
+103/529713/campos_512_v4
+103/529845/campos_512_v4
+104/530123/campos_512_v4
+104/530598/campos_512_v4
+104/530693/campos_512_v4
+104/530775/campos_512_v4
+104/530950/campos_512_v4
+104/531029/campos_512_v4
+104/531032/campos_512_v4
+104/531129/campos_512_v4
+104/531296/campos_512_v4
+104/531494/campos_512_v4
+104/531631/campos_512_v4
+104/531718/campos_512_v4
+104/531762/campos_512_v4
+104/531852/campos_512_v4
+104/531986/campos_512_v4
+104/532078/campos_512_v4
+104/532213/campos_512_v4
+104/532432/campos_512_v4
+104/532492/campos_512_v4
+104/532493/campos_512_v4
+104/532503/campos_512_v4
+104/532531/campos_512_v4
+104/532542/campos_512_v4
+104/532550/campos_512_v4
+104/532636/campos_512_v4
+104/532637/campos_512_v4
+104/532689/campos_512_v4
+104/532760/campos_512_v4
+104/532813/campos_512_v4
+104/533284/campos_512_v4
+104/533355/campos_512_v4
+104/533503/campos_512_v4
+104/533886/campos_512_v4
+104/533977/campos_512_v4
+104/533983/campos_512_v4
+104/533984/campos_512_v4
+104/534121/campos_512_v4
+104/534140/campos_512_v4
+104/534147/campos_512_v4
+104/534150/campos_512_v4
+104/534156/campos_512_v4
+104/534190/campos_512_v4
+104/534200/campos_512_v4
+104/534418/campos_512_v4
+104/534460/campos_512_v4
+104/534532/campos_512_v4
+104/534649/campos_512_v4
+104/534679/campos_512_v4
+104/534977/campos_512_v4
+105/535374/campos_512_v4
+105/535436/campos_512_v4
+105/535490/campos_512_v4
+105/535533/campos_512_v4
+105/535540/campos_512_v4
+105/535578/campos_512_v4
+105/535590/campos_512_v4
+105/535591/campos_512_v4
+105/535593/campos_512_v4
+105/535658/campos_512_v4
+105/535676/campos_512_v4
+105/535799/campos_512_v4
+105/535808/campos_512_v4
+105/535849/campos_512_v4
+105/535967/campos_512_v4
+105/536121/campos_512_v4
+105/536218/campos_512_v4
+105/536380/campos_512_v4
+105/536424/campos_512_v4
+105/536482/campos_512_v4
+105/536540/campos_512_v4
+105/536581/campos_512_v4
+105/536690/campos_512_v4
+105/536895/campos_512_v4
+105/536966/campos_512_v4
+105/536967/campos_512_v4
+105/537305/campos_512_v4
+105/537339/campos_512_v4
+105/537560/campos_512_v4
+105/537583/campos_512_v4
+105/537628/campos_512_v4
+105/537631/campos_512_v4
+105/537722/campos_512_v4
+105/537806/campos_512_v4
+105/537842/campos_512_v4
+105/537864/campos_512_v4
+105/538153/campos_512_v4
+105/538589/campos_512_v4
+105/538654/campos_512_v4
+105/538672/campos_512_v4
+105/538729/campos_512_v4
+105/538747/campos_512_v4
+105/538935/campos_512_v4
+105/538962/campos_512_v4
+105/539022/campos_512_v4
+105/539148/campos_512_v4
+105/539240/campos_512_v4
+105/539444/campos_512_v4
+105/539639/campos_512_v4
+105/539701/campos_512_v4
+105/539716/campos_512_v4
+105/539759/campos_512_v4
+105/539957/campos_512_v4
+105/539985/campos_512_v4
+106/540040/campos_512_v4
+106/540224/campos_512_v4
+106/540237/campos_512_v4
+106/540254/campos_512_v4
+106/540280/campos_512_v4
+106/540504/campos_512_v4
+106/540584/campos_512_v4
+106/540782/campos_512_v4
+106/540908/campos_512_v4
+106/541056/campos_512_v4
+106/541077/campos_512_v4
+106/541351/campos_512_v4
+106/541529/campos_512_v4
+106/541612/campos_512_v4
+106/541628/campos_512_v4
+106/541753/campos_512_v4
+106/541955/campos_512_v4
+106/541971/campos_512_v4
+106/542023/campos_512_v4
+106/542111/campos_512_v4
+106/542115/campos_512_v4
+106/542351/campos_512_v4
+106/542493/campos_512_v4
+106/542583/campos_512_v4
+106/542642/campos_512_v4
+106/542731/campos_512_v4
+106/542795/campos_512_v4
+106/543360/campos_512_v4
+106/543566/campos_512_v4
+106/543623/campos_512_v4
+106/543714/campos_512_v4
+106/543858/campos_512_v4
+106/543869/campos_512_v4
+106/543895/campos_512_v4
+106/544286/campos_512_v4
+106/544559/campos_512_v4
+106/544695/campos_512_v4
+106/544738/campos_512_v4
+106/544930/campos_512_v4
+107/545125/campos_512_v4
+107/545229/campos_512_v4
+107/545232/campos_512_v4
+107/545326/campos_512_v4
+107/545345/campos_512_v4
+107/545602/campos_512_v4
+107/545881/campos_512_v4
+107/546030/campos_512_v4
+107/546038/campos_512_v4
+107/546060/campos_512_v4
+107/546187/campos_512_v4
+107/546196/campos_512_v4
+107/546243/campos_512_v4
+107/546621/campos_512_v4
+107/546741/campos_512_v4
+107/546803/campos_512_v4
+107/546821/campos_512_v4
+107/546830/campos_512_v4
+107/546849/campos_512_v4
+107/546962/campos_512_v4
+107/546993/campos_512_v4
+107/547154/campos_512_v4
+107/547239/campos_512_v4
+107/547679/campos_512_v4
+107/547719/campos_512_v4
+107/547835/campos_512_v4
+107/547853/campos_512_v4
+107/547886/campos_512_v4
+107/547897/campos_512_v4
+107/547905/campos_512_v4
+107/548015/campos_512_v4
+107/548115/campos_512_v4
+107/548171/campos_512_v4
+107/548257/campos_512_v4
+107/548295/campos_512_v4
+107/548299/campos_512_v4
+107/548328/campos_512_v4
+107/548335/campos_512_v4
+107/548372/campos_512_v4
+107/548516/campos_512_v4
+107/548553/campos_512_v4
+107/548555/campos_512_v4
+107/548630/campos_512_v4
+107/548702/campos_512_v4
+107/548707/campos_512_v4
+107/548890/campos_512_v4
+107/549035/campos_512_v4
+107/549118/campos_512_v4
+107/549192/campos_512_v4
+107/549217/campos_512_v4
+107/549312/campos_512_v4
+107/549691/campos_512_v4
+107/549705/campos_512_v4
+107/549757/campos_512_v4
+108/550086/campos_512_v4
+108/550098/campos_512_v4
+108/550125/campos_512_v4
+108/550315/campos_512_v4
+108/550357/campos_512_v4
+108/550365/campos_512_v4
+108/550437/campos_512_v4
+108/550482/campos_512_v4
+108/550495/campos_512_v4
+108/550498/campos_512_v4
+108/550815/campos_512_v4
+108/550844/campos_512_v4
+108/550853/campos_512_v4
+108/550913/campos_512_v4
+108/550925/campos_512_v4
+108/550947/campos_512_v4
+108/551020/campos_512_v4
+108/551101/campos_512_v4
+108/551109/campos_512_v4
+108/551349/campos_512_v4
+108/551382/campos_512_v4
+108/551397/campos_512_v4
+108/551413/campos_512_v4
+108/551417/campos_512_v4
+108/551802/campos_512_v4
+108/552010/campos_512_v4
+108/552040/campos_512_v4
+108/552070/campos_512_v4
+108/552097/campos_512_v4
+108/552226/campos_512_v4
+108/552538/campos_512_v4
+108/552767/campos_512_v4
+108/552787/campos_512_v4
+108/552907/campos_512_v4
+108/552910/campos_512_v4
+108/553091/campos_512_v4
+108/553174/campos_512_v4
+108/553548/campos_512_v4
+108/553738/campos_512_v4
+108/553899/campos_512_v4
+108/553953/campos_512_v4
+108/554005/campos_512_v4
+108/554040/campos_512_v4
+108/554117/campos_512_v4
+108/554174/campos_512_v4
+108/554184/campos_512_v4
+108/554233/campos_512_v4
+108/554393/campos_512_v4
+108/554398/campos_512_v4
+108/554648/campos_512_v4
+108/554706/campos_512_v4
+108/554871/campos_512_v4
+109/555161/campos_512_v4
+109/555324/campos_512_v4
+109/555381/campos_512_v4
+109/555780/campos_512_v4
+109/555971/campos_512_v4
+109/556030/campos_512_v4
+109/556169/campos_512_v4
+109/556941/campos_512_v4
+109/556980/campos_512_v4
+109/557005/campos_512_v4
+109/557042/campos_512_v4
+109/557130/campos_512_v4
+109/557269/campos_512_v4
+109/557278/campos_512_v4
+109/557334/campos_512_v4
+109/557388/campos_512_v4
+109/557389/campos_512_v4
+109/557606/campos_512_v4
+109/557666/campos_512_v4
+109/557721/campos_512_v4
+109/557816/campos_512_v4
+109/558232/campos_512_v4
+109/558477/campos_512_v4
+109/558497/campos_512_v4
+109/558519/campos_512_v4
+109/558888/campos_512_v4
+109/558899/campos_512_v4
+109/559032/campos_512_v4
+109/559047/campos_512_v4
+109/559344/campos_512_v4
+109/559368/campos_512_v4
+109/559537/campos_512_v4
+109/559775/campos_512_v4
+109/559921/campos_512_v4
+109/559923/campos_512_v4
+11/65169/campos_512_v4
+11/65394/campos_512_v4
+11/65604/campos_512_v4
+11/65607/campos_512_v4
+11/65659/campos_512_v4
+11/65856/campos_512_v4
+11/65859/campos_512_v4
+11/66205/campos_512_v4
+11/66479/campos_512_v4
+11/66650/campos_512_v4
+11/67190/campos_512_v4
+11/67551/campos_512_v4
+11/67554/campos_512_v4
+11/67619/campos_512_v4
+11/67620/campos_512_v4
+11/67632/campos_512_v4
+11/67769/campos_512_v4
+11/68245/campos_512_v4
+11/68354/campos_512_v4
+11/68882/campos_512_v4
+11/69107/campos_512_v4
+11/69253/campos_512_v4
+11/69504/campos_512_v4
+11/69926/campos_512_v4
+110/560565/campos_512_v4
+110/560678/campos_512_v4
+110/560865/campos_512_v4
+110/560998/campos_512_v4
+110/561052/campos_512_v4
+110/561446/campos_512_v4
+110/561456/campos_512_v4
+110/561473/campos_512_v4
+110/561719/campos_512_v4
+110/561910/campos_512_v4
+110/561916/campos_512_v4
+110/561923/campos_512_v4
+110/561927/campos_512_v4
+110/561933/campos_512_v4
+110/561975/campos_512_v4
+110/561991/campos_512_v4
+110/562030/campos_512_v4
+110/562159/campos_512_v4
+110/562378/campos_512_v4
+110/562379/campos_512_v4
+110/562395/campos_512_v4
+110/562423/campos_512_v4
+110/562502/campos_512_v4
+110/562511/campos_512_v4
+110/562512/campos_512_v4
+110/562642/campos_512_v4
+110/562643/campos_512_v4
+110/562666/campos_512_v4
+110/562898/campos_512_v4
+110/562998/campos_512_v4
+110/563122/campos_512_v4
+110/563169/campos_512_v4
+110/563194/campos_512_v4
+110/563815/campos_512_v4
+110/564009/campos_512_v4
+110/564010/campos_512_v4
+110/564030/campos_512_v4
+110/564042/campos_512_v4
+110/564204/campos_512_v4
+110/564241/campos_512_v4
+110/564268/campos_512_v4
+110/564289/campos_512_v4
+110/564295/campos_512_v4
+110/564434/campos_512_v4
+110/564479/campos_512_v4
+110/564716/campos_512_v4
+110/564922/campos_512_v4
+111/565089/campos_512_v4
+111/565356/campos_512_v4
+111/565647/campos_512_v4
+111/565776/campos_512_v4
+111/565777/campos_512_v4
+111/565990/campos_512_v4
+111/566029/campos_512_v4
+111/566050/campos_512_v4
+111/566337/campos_512_v4
+111/566434/campos_512_v4
+111/566443/campos_512_v4
+111/566629/campos_512_v4
+111/566654/campos_512_v4
+111/566680/campos_512_v4
+111/566787/campos_512_v4
+111/566841/campos_512_v4
+111/567019/campos_512_v4
+111/567050/campos_512_v4
+111/567251/campos_512_v4
+111/567411/campos_512_v4
+111/567440/campos_512_v4
+111/567724/campos_512_v4
+111/567778/campos_512_v4
+111/567830/campos_512_v4
+111/567927/campos_512_v4
+111/568085/campos_512_v4
+111/568105/campos_512_v4
+111/568416/campos_512_v4
+111/568599/campos_512_v4
+111/569120/campos_512_v4
+111/569334/campos_512_v4
+111/569399/campos_512_v4
+111/569440/campos_512_v4
+111/569556/campos_512_v4
+111/569600/campos_512_v4
+111/569950/campos_512_v4
+111/569971/campos_512_v4
+111/569977/campos_512_v4
+112/570302/campos_512_v4
+112/570410/campos_512_v4
+112/570564/campos_512_v4
+112/570843/campos_512_v4
+112/571096/campos_512_v4
+112/571106/campos_512_v4
+112/571204/campos_512_v4
+112/571563/campos_512_v4
+112/571630/campos_512_v4
+112/571647/campos_512_v4
+112/571881/campos_512_v4
+112/571964/campos_512_v4
+112/572019/campos_512_v4
+112/572026/campos_512_v4
+112/572038/campos_512_v4
+112/572339/campos_512_v4
+112/572383/campos_512_v4
+112/572476/campos_512_v4
+112/572497/campos_512_v4
+112/572575/campos_512_v4
+112/572608/campos_512_v4
+112/572685/campos_512_v4
+112/572740/campos_512_v4
+112/572850/campos_512_v4
+112/572904/campos_512_v4
+112/572916/campos_512_v4
+112/573286/campos_512_v4
+112/573567/campos_512_v4
+112/573580/campos_512_v4
+112/573828/campos_512_v4
+112/574515/campos_512_v4
+112/574625/campos_512_v4
+112/574820/campos_512_v4
+112/574854/campos_512_v4
+112/574870/campos_512_v4
+113/575107/campos_512_v4
+113/575115/campos_512_v4
+113/575164/campos_512_v4
+113/575204/campos_512_v4
+113/575264/campos_512_v4
+113/575309/campos_512_v4
+113/575686/campos_512_v4
+113/575720/campos_512_v4
+113/575820/campos_512_v4
+113/575822/campos_512_v4
+113/575842/campos_512_v4
+113/575885/campos_512_v4
+113/576148/campos_512_v4
+113/576466/campos_512_v4
+113/576481/campos_512_v4
+113/576691/campos_512_v4
+113/576735/campos_512_v4
+113/576779/campos_512_v4
+113/576802/campos_512_v4
+113/576973/campos_512_v4
+113/576992/campos_512_v4
+113/577032/campos_512_v4
+113/577305/campos_512_v4
+113/577340/campos_512_v4
+113/577394/campos_512_v4
+113/577433/campos_512_v4
+113/577472/campos_512_v4
+113/577522/campos_512_v4
+113/577688/campos_512_v4
+113/577752/campos_512_v4
+113/577849/campos_512_v4
+113/578224/campos_512_v4
+113/578451/campos_512_v4
+113/578707/campos_512_v4
+113/578763/campos_512_v4
+113/578890/campos_512_v4
+113/578965/campos_512_v4
+113/578972/campos_512_v4
+113/579173/campos_512_v4
+113/579199/campos_512_v4
+113/579203/campos_512_v4
+113/579283/campos_512_v4
+113/579367/campos_512_v4
+113/579484/campos_512_v4
+113/579668/campos_512_v4
+113/579859/campos_512_v4
+113/579905/campos_512_v4
+114/580345/campos_512_v4
+114/580581/campos_512_v4
+114/580595/campos_512_v4
+114/580634/campos_512_v4
+114/580648/campos_512_v4
+114/580695/campos_512_v4
+114/581216/campos_512_v4
+114/581304/campos_512_v4
+114/581827/campos_512_v4
+114/581851/campos_512_v4
+114/581922/campos_512_v4
+114/581936/campos_512_v4
+114/581995/campos_512_v4
+114/582112/campos_512_v4
+114/582417/campos_512_v4
+114/582680/campos_512_v4
+114/582681/campos_512_v4
+114/582956/campos_512_v4
+114/582966/campos_512_v4
+114/582995/campos_512_v4
+114/583126/campos_512_v4
+114/583302/campos_512_v4
+114/583388/campos_512_v4
+114/583444/campos_512_v4
+114/583464/campos_512_v4
+114/583602/campos_512_v4
+114/583673/campos_512_v4
+114/583943/campos_512_v4
+114/584066/campos_512_v4
+114/584067/campos_512_v4
+114/584071/campos_512_v4
+114/584096/campos_512_v4
+114/584215/campos_512_v4
+114/584372/campos_512_v4
+114/584413/campos_512_v4
+114/584740/campos_512_v4
+114/584969/campos_512_v4
+114/584984/campos_512_v4
+114/584993/campos_512_v4
+115/585223/campos_512_v4
+115/585291/campos_512_v4
+115/585410/campos_512_v4
+115/585575/campos_512_v4
+115/585593/campos_512_v4
+115/585691/campos_512_v4
+115/585744/campos_512_v4
+115/585917/campos_512_v4
+115/585967/campos_512_v4
+115/586063/campos_512_v4
+115/586277/campos_512_v4
+115/586351/campos_512_v4
+115/586412/campos_512_v4
+115/586473/campos_512_v4
+115/586488/campos_512_v4
+115/586977/campos_512_v4
+115/587141/campos_512_v4
+115/587218/campos_512_v4
+115/587265/campos_512_v4
+115/587273/campos_512_v4
+115/587339/campos_512_v4
+115/587465/campos_512_v4
+115/587478/campos_512_v4
+115/587917/campos_512_v4
+115/588169/campos_512_v4
+115/588227/campos_512_v4
+115/588351/campos_512_v4
+115/588420/campos_512_v4
+115/588579/campos_512_v4
+115/588580/campos_512_v4
+115/588671/campos_512_v4
+115/588723/campos_512_v4
+115/589152/campos_512_v4
+115/589162/campos_512_v4
+115/589259/campos_512_v4
+115/589296/campos_512_v4
+115/589391/campos_512_v4
+115/589430/campos_512_v4
+115/589651/campos_512_v4
+115/589709/campos_512_v4
+115/589880/campos_512_v4
+115/589991/campos_512_v4
+116/590003/campos_512_v4
+116/590148/campos_512_v4
+116/590160/campos_512_v4
+116/590256/campos_512_v4
+116/590485/campos_512_v4
+116/590555/campos_512_v4
+116/590647/campos_512_v4
+116/590941/campos_512_v4
+116/591091/campos_512_v4
+116/591214/campos_512_v4
+116/591389/campos_512_v4
+116/591466/campos_512_v4
+116/591500/campos_512_v4
+116/591677/campos_512_v4
+116/591692/campos_512_v4
+116/591725/campos_512_v4
+116/591831/campos_512_v4
+116/591947/campos_512_v4
+116/591979/campos_512_v4
+116/592022/campos_512_v4
+116/592035/campos_512_v4
+116/592381/campos_512_v4
+116/592482/campos_512_v4
+116/592638/campos_512_v4
+116/592811/campos_512_v4
+116/593011/campos_512_v4
+116/593088/campos_512_v4
+116/593495/campos_512_v4
+116/593527/campos_512_v4
+116/593685/campos_512_v4
+116/593695/campos_512_v4
+116/593699/campos_512_v4
+116/594036/campos_512_v4
+116/594062/campos_512_v4
+116/594090/campos_512_v4
+116/594409/campos_512_v4
+116/594413/campos_512_v4
+116/594496/campos_512_v4
+116/594625/campos_512_v4
+116/594823/campos_512_v4
+117/595226/campos_512_v4
+117/595253/campos_512_v4
+117/595391/campos_512_v4
+117/595418/campos_512_v4
+117/595458/campos_512_v4
+117/595843/campos_512_v4
+117/596058/campos_512_v4
+117/596079/campos_512_v4
+117/596142/campos_512_v4
+117/596153/campos_512_v4
+117/596202/campos_512_v4
+117/596204/campos_512_v4
+117/596377/campos_512_v4
+117/596404/campos_512_v4
+117/596654/campos_512_v4
+117/596669/campos_512_v4
+117/596729/campos_512_v4
+117/597086/campos_512_v4
+117/597179/campos_512_v4
+117/597196/campos_512_v4
+117/597202/campos_512_v4
+117/597454/campos_512_v4
+117/597768/campos_512_v4
+117/597912/campos_512_v4
+117/597964/campos_512_v4
+117/598040/campos_512_v4
+117/598181/campos_512_v4
+117/598344/campos_512_v4
+117/598389/campos_512_v4
+117/598395/campos_512_v4
+117/598472/campos_512_v4
+117/598624/campos_512_v4
+117/598650/campos_512_v4
+117/598816/campos_512_v4
+117/598873/campos_512_v4
+117/598904/campos_512_v4
+117/599261/campos_512_v4
+117/599312/campos_512_v4
+117/599352/campos_512_v4
+117/599440/campos_512_v4
+117/599514/campos_512_v4
+117/599521/campos_512_v4
+117/599568/campos_512_v4
+117/599651/campos_512_v4
+117/599827/campos_512_v4
+117/599959/campos_512_v4
+117/599992/campos_512_v4
+118/600004/campos_512_v4
+118/600098/campos_512_v4
+118/600100/campos_512_v4
+118/600107/campos_512_v4
+118/600140/campos_512_v4
+118/600188/campos_512_v4
+118/600267/campos_512_v4
+118/600274/campos_512_v4
+118/600474/campos_512_v4
+118/600561/campos_512_v4
+118/600598/campos_512_v4
+118/600682/campos_512_v4
+118/600711/campos_512_v4
+118/600910/campos_512_v4
+118/600994/campos_512_v4
+118/601024/campos_512_v4
+118/601146/campos_512_v4
+118/601149/campos_512_v4
+118/601173/campos_512_v4
+118/601327/campos_512_v4
+118/601414/campos_512_v4
+118/601480/campos_512_v4
+118/601527/campos_512_v4
+118/601662/campos_512_v4
+118/601994/campos_512_v4
+118/602131/campos_512_v4
+118/602249/campos_512_v4
+118/602299/campos_512_v4
+118/602493/campos_512_v4
+118/602721/campos_512_v4
+118/602757/campos_512_v4
+118/602758/campos_512_v4
+118/602906/campos_512_v4
+118/602977/campos_512_v4
+118/603078/campos_512_v4
+118/603116/campos_512_v4
+118/603253/campos_512_v4
+118/603434/campos_512_v4
+118/603503/campos_512_v4
+118/603692/campos_512_v4
+118/603706/campos_512_v4
+118/603713/campos_512_v4
+118/603773/campos_512_v4
+118/603859/campos_512_v4
+118/603869/campos_512_v4
+118/603898/campos_512_v4
+118/604062/campos_512_v4
+118/604064/campos_512_v4
+118/604358/campos_512_v4
+118/604478/campos_512_v4
+118/604491/campos_512_v4
+118/604509/campos_512_v4
+118/604511/campos_512_v4
+118/604717/campos_512_v4
+118/604734/campos_512_v4
+118/604750/campos_512_v4
+118/604867/campos_512_v4
+118/604960/campos_512_v4
+119/605017/campos_512_v4
+119/605251/campos_512_v4
+119/605338/campos_512_v4
+119/605507/campos_512_v4
+119/605534/campos_512_v4
+119/605914/campos_512_v4
+119/605920/campos_512_v4
+119/605921/campos_512_v4
+119/605924/campos_512_v4
+119/606029/campos_512_v4
+119/606031/campos_512_v4
+119/606094/campos_512_v4
+119/606223/campos_512_v4
+119/606380/campos_512_v4
+119/606409/campos_512_v4
+119/606547/campos_512_v4
+119/606950/campos_512_v4
+119/607113/campos_512_v4
+119/607142/campos_512_v4
+119/607149/campos_512_v4
+119/607170/campos_512_v4
+119/607430/campos_512_v4
+119/607468/campos_512_v4
+119/607818/campos_512_v4
+119/607869/campos_512_v4
+119/607875/campos_512_v4
+119/607898/campos_512_v4
+119/608001/campos_512_v4
+119/608091/campos_512_v4
+119/608251/campos_512_v4
+119/608349/campos_512_v4
+119/608829/campos_512_v4
+119/608892/campos_512_v4
+119/608964/campos_512_v4
+119/609106/campos_512_v4
+119/609270/campos_512_v4
+119/609839/campos_512_v4
+12/70041/campos_512_v4
+12/70109/campos_512_v4
+12/70253/campos_512_v4
+12/70477/campos_512_v4
+12/70928/campos_512_v4
+12/71094/campos_512_v4
+12/71296/campos_512_v4
+12/71439/campos_512_v4
+12/71544/campos_512_v4
+12/71998/campos_512_v4
+12/72060/campos_512_v4
+12/72228/campos_512_v4
+12/72922/campos_512_v4
+12/73391/campos_512_v4
+12/73474/campos_512_v4
+12/73809/campos_512_v4
+12/73867/campos_512_v4
+12/73878/campos_512_v4
+12/73941/campos_512_v4
+12/73960/campos_512_v4
+12/73986/campos_512_v4
+12/74177/campos_512_v4
+12/74556/campos_512_v4
+12/74777/campos_512_v4
+120/610173/campos_512_v4
+120/610253/campos_512_v4
+120/610457/campos_512_v4
+120/611597/campos_512_v4
+120/611688/campos_512_v4
+120/611820/campos_512_v4
+120/612236/campos_512_v4
+120/612269/campos_512_v4
+120/612296/campos_512_v4
+120/612649/campos_512_v4
+120/613071/campos_512_v4
+120/613173/campos_512_v4
+120/613216/campos_512_v4
+120/613546/campos_512_v4
+120/613586/campos_512_v4
+120/613722/campos_512_v4
+120/613805/campos_512_v4
+120/614471/campos_512_v4
+120/614500/campos_512_v4
+120/614695/campos_512_v4
+121/615461/campos_512_v4
+121/615463/campos_512_v4
+121/615698/campos_512_v4
+121/615776/campos_512_v4
+121/615879/campos_512_v4
+121/616226/campos_512_v4
+121/616240/campos_512_v4
+121/616565/campos_512_v4
+121/616571/campos_512_v4
+121/616706/campos_512_v4
+121/616804/campos_512_v4
+121/617057/campos_512_v4
+121/617368/campos_512_v4
+121/618030/campos_512_v4
+121/618103/campos_512_v4
+121/618243/campos_512_v4
+121/618380/campos_512_v4
+121/618755/campos_512_v4
+121/619563/campos_512_v4
+121/619659/campos_512_v4
+121/619675/campos_512_v4
+121/619995/campos_512_v4
+122/620116/campos_512_v4
+122/620179/campos_512_v4
+122/620361/campos_512_v4
+122/620438/campos_512_v4
+122/620738/campos_512_v4
+122/620780/campos_512_v4
+122/620918/campos_512_v4
+122/621189/campos_512_v4
+122/621431/campos_512_v4
+122/621633/campos_512_v4
+122/622834/campos_512_v4
+122/623488/campos_512_v4
+122/623571/campos_512_v4
+122/623716/campos_512_v4
+122/624194/campos_512_v4
+122/624321/campos_512_v4
+123/625063/campos_512_v4
+123/625087/campos_512_v4
+123/625450/campos_512_v4
+123/625619/campos_512_v4
+123/625710/campos_512_v4
+123/626266/campos_512_v4
+123/626299/campos_512_v4
+123/626345/campos_512_v4
+123/626813/campos_512_v4
+123/627452/campos_512_v4
+123/627707/campos_512_v4
+123/627721/campos_512_v4
+123/628435/campos_512_v4
+123/628663/campos_512_v4
+123/628784/campos_512_v4
+123/628787/campos_512_v4
+123/628936/campos_512_v4
+123/628937/campos_512_v4
+123/629312/campos_512_v4
+123/629714/campos_512_v4
+123/629823/campos_512_v4
+123/629992/campos_512_v4
+124/630349/campos_512_v4
+124/631007/campos_512_v4
+124/631075/campos_512_v4
+124/631245/campos_512_v4
+124/631365/campos_512_v4
+124/631936/campos_512_v4
+124/632452/campos_512_v4
+124/632586/campos_512_v4
+124/632699/campos_512_v4
+124/632809/campos_512_v4
+124/632817/campos_512_v4
+124/633066/campos_512_v4
+124/633306/campos_512_v4
+124/633435/campos_512_v4
+124/633509/campos_512_v4
+124/633541/campos_512_v4
+124/633616/campos_512_v4
+124/633995/campos_512_v4
+124/634266/campos_512_v4
+124/634774/campos_512_v4
+125/635184/campos_512_v4
+125/635316/campos_512_v4
+125/635650/campos_512_v4
+125/635717/campos_512_v4
+125/635930/campos_512_v4
+125/635961/campos_512_v4
+125/636041/campos_512_v4
+125/636298/campos_512_v4
+125/636611/campos_512_v4
+125/637416/campos_512_v4
+125/637442/campos_512_v4
+125/637655/campos_512_v4
+125/637911/campos_512_v4
+125/638478/campos_512_v4
+125/638486/campos_512_v4
+125/638994/campos_512_v4
+125/639119/campos_512_v4
+125/639143/campos_512_v4
+125/639312/campos_512_v4
+125/639533/campos_512_v4
+125/639610/campos_512_v4
+125/639644/campos_512_v4
+125/639900/campos_512_v4
+125/639933/campos_512_v4
+127/645279/campos_512_v4
+127/645414/campos_512_v4
+127/645625/campos_512_v4
+127/645707/campos_512_v4
+127/646103/campos_512_v4
+127/646423/campos_512_v4
+127/646498/campos_512_v4
+127/646594/campos_512_v4
+127/646608/campos_512_v4
+127/646901/campos_512_v4
+127/646941/campos_512_v4
+127/646997/campos_512_v4
+127/647362/campos_512_v4
+127/647740/campos_512_v4
+127/648101/campos_512_v4
+127/648221/campos_512_v4
+127/648603/campos_512_v4
+127/648919/campos_512_v4
+127/649111/campos_512_v4
+127/649614/campos_512_v4
+127/649827/campos_512_v4
+128/650187/campos_512_v4
+128/650265/campos_512_v4
+128/650521/campos_512_v4
+128/650554/campos_512_v4
+128/650818/campos_512_v4
+128/650934/campos_512_v4
+128/651049/campos_512_v4
+128/651466/campos_512_v4
+128/651715/campos_512_v4
+128/651789/campos_512_v4
+128/652018/campos_512_v4
+128/652051/campos_512_v4
+128/652601/campos_512_v4
+128/652779/campos_512_v4
+128/653046/campos_512_v4
+128/653071/campos_512_v4
+128/653155/campos_512_v4
+128/653327/campos_512_v4
+128/653943/campos_512_v4
+128/654356/campos_512_v4
+128/654616/campos_512_v4
+128/654633/campos_512_v4
+129/655053/campos_512_v4
+129/655284/campos_512_v4
+129/655300/campos_512_v4
+129/655351/campos_512_v4
+129/655533/campos_512_v4
+129/655580/campos_512_v4
+129/656294/campos_512_v4
+129/656438/campos_512_v4
+129/656654/campos_512_v4
+129/656829/campos_512_v4
+129/657054/campos_512_v4
+129/657129/campos_512_v4
+129/657247/campos_512_v4
+129/657777/campos_512_v4
+129/657917/campos_512_v4
+129/658465/campos_512_v4
+129/658722/campos_512_v4
+129/658758/campos_512_v4
+129/659809/campos_512_v4
+129/659859/campos_512_v4
+129/659950/campos_512_v4
+13/75713/campos_512_v4
+13/75956/campos_512_v4
+13/76658/campos_512_v4
+13/77090/campos_512_v4
+13/77167/campos_512_v4
+13/77689/campos_512_v4
+13/77865/campos_512_v4
+13/77884/campos_512_v4
+13/78162/campos_512_v4
+13/78420/campos_512_v4
+13/78579/campos_512_v4
+13/79246/campos_512_v4
+13/79309/campos_512_v4
+13/79474/campos_512_v4
+13/79699/campos_512_v4
+130/660213/campos_512_v4
+130/660510/campos_512_v4
+130/660602/campos_512_v4
+130/660929/campos_512_v4
+130/661246/campos_512_v4
+130/661248/campos_512_v4
+130/661793/campos_512_v4
+130/661928/campos_512_v4
+130/661941/campos_512_v4
+130/662279/campos_512_v4
+130/662400/campos_512_v4
+130/662424/campos_512_v4
+130/662743/campos_512_v4
+130/663038/campos_512_v4
+130/663089/campos_512_v4
+130/663148/campos_512_v4
+130/663238/campos_512_v4
+130/663735/campos_512_v4
+130/663819/campos_512_v4
+130/664023/campos_512_v4
+130/664190/campos_512_v4
+130/664369/campos_512_v4
+130/664867/campos_512_v4
+130/664924/campos_512_v4
+131/665497/campos_512_v4
+131/665977/campos_512_v4
+131/667092/campos_512_v4
+131/667224/campos_512_v4
+131/667352/campos_512_v4
+131/667365/campos_512_v4
+131/667613/campos_512_v4
+131/667793/campos_512_v4
+131/667963/campos_512_v4
+131/667972/campos_512_v4
+131/668300/campos_512_v4
+131/668401/campos_512_v4
+131/668530/campos_512_v4
+131/668995/campos_512_v4
+131/669150/campos_512_v4
+131/669750/campos_512_v4
+132/670460/campos_512_v4
+132/670549/campos_512_v4
+132/670557/campos_512_v4
+132/670785/campos_512_v4
+132/671063/campos_512_v4
+132/671158/campos_512_v4
+132/671381/campos_512_v4
+132/671386/campos_512_v4
+132/671589/campos_512_v4
+132/671717/campos_512_v4
+132/671820/campos_512_v4
+132/671851/campos_512_v4
+132/671872/campos_512_v4
+132/671894/campos_512_v4
+132/672544/campos_512_v4
+132/672651/campos_512_v4
+132/672903/campos_512_v4
+132/673001/campos_512_v4
+132/673107/campos_512_v4
+132/673302/campos_512_v4
+132/673500/campos_512_v4
+132/673558/campos_512_v4
+132/673711/campos_512_v4
+132/673756/campos_512_v4
+132/673843/campos_512_v4
+132/674089/campos_512_v4
+132/674312/campos_512_v4
+132/674402/campos_512_v4
+133/675014/campos_512_v4
+133/675058/campos_512_v4
+133/675248/campos_512_v4
+133/675509/campos_512_v4
+133/675819/campos_512_v4
+133/675878/campos_512_v4
+133/676114/campos_512_v4
+133/676195/campos_512_v4
+133/676429/campos_512_v4
+133/676594/campos_512_v4
+133/676897/campos_512_v4
+133/676974/campos_512_v4
+133/677240/campos_512_v4
+133/677401/campos_512_v4
+133/678020/campos_512_v4
+133/678058/campos_512_v4
+133/678083/campos_512_v4
+133/678088/campos_512_v4
+133/678205/campos_512_v4
+133/678315/campos_512_v4
+133/678348/campos_512_v4
+133/678366/campos_512_v4
+133/678402/campos_512_v4
+133/678621/campos_512_v4
+133/678683/campos_512_v4
+133/678809/campos_512_v4
+133/678818/campos_512_v4
+133/679113/campos_512_v4
+133/679308/campos_512_v4
+133/679411/campos_512_v4
+133/679535/campos_512_v4
+133/679648/campos_512_v4
+133/679670/campos_512_v4
+133/679719/campos_512_v4
+133/679856/campos_512_v4
+133/679905/campos_512_v4
+133/679918/campos_512_v4
+133/679943/campos_512_v4
+134/680125/campos_512_v4
+134/680643/campos_512_v4
+134/680759/campos_512_v4
+134/681859/campos_512_v4
+134/682011/campos_512_v4
+134/682173/campos_512_v4
+134/682505/campos_512_v4
+134/682812/campos_512_v4
+134/682815/campos_512_v4
+134/683779/campos_512_v4
+134/683899/campos_512_v4
+134/683922/campos_512_v4
+134/684157/campos_512_v4
+134/684243/campos_512_v4
+134/684252/campos_512_v4
+134/684852/campos_512_v4
+135/685143/campos_512_v4
+135/685290/campos_512_v4
+135/685469/campos_512_v4
+135/685740/campos_512_v4
+135/685859/campos_512_v4
+135/686040/campos_512_v4
+135/686081/campos_512_v4
+135/686107/campos_512_v4
+135/686171/campos_512_v4
+135/686179/campos_512_v4
+135/686836/campos_512_v4
+135/686895/campos_512_v4
+135/686914/campos_512_v4
+135/686927/campos_512_v4
+135/686934/campos_512_v4
+135/687045/campos_512_v4
+135/687120/campos_512_v4
+135/687142/campos_512_v4
+135/687273/campos_512_v4
+135/687342/campos_512_v4
+135/687486/campos_512_v4
+135/687688/campos_512_v4
+135/687730/campos_512_v4
+135/687826/campos_512_v4
+135/687958/campos_512_v4
+135/687961/campos_512_v4
+135/688536/campos_512_v4
+135/688653/campos_512_v4
+135/688661/campos_512_v4
+135/688799/campos_512_v4
+135/689197/campos_512_v4
+135/689642/campos_512_v4
+135/689986/campos_512_v4
+136/690081/campos_512_v4
+136/690393/campos_512_v4
+136/690414/campos_512_v4
+136/690446/campos_512_v4
+136/690477/campos_512_v4
+136/690535/campos_512_v4
+136/690947/campos_512_v4
+136/691025/campos_512_v4
+136/691148/campos_512_v4
+136/692249/campos_512_v4
+136/692374/campos_512_v4
+136/692385/campos_512_v4
+136/692511/campos_512_v4
+136/692750/campos_512_v4
+136/693000/campos_512_v4
+136/693202/campos_512_v4
+136/693535/campos_512_v4
+136/693863/campos_512_v4
+136/694055/campos_512_v4
+136/694268/campos_512_v4
+136/694271/campos_512_v4
+136/694364/campos_512_v4
+136/694706/campos_512_v4
+136/694927/campos_512_v4
+137/695059/campos_512_v4
+137/695080/campos_512_v4
+137/695109/campos_512_v4
+137/695166/campos_512_v4
+137/695461/campos_512_v4
+137/695488/campos_512_v4
+137/695677/campos_512_v4
+137/696353/campos_512_v4
+137/696355/campos_512_v4
+137/696548/campos_512_v4
+137/696588/campos_512_v4
+137/696615/campos_512_v4
+137/696622/campos_512_v4
+137/696777/campos_512_v4
+137/696815/campos_512_v4
+137/696991/campos_512_v4
+137/697140/campos_512_v4
+137/697508/campos_512_v4
+137/697533/campos_512_v4
+137/697543/campos_512_v4
+137/697550/campos_512_v4
+137/697747/campos_512_v4
+137/698086/campos_512_v4
+137/698132/campos_512_v4
+137/698355/campos_512_v4
+137/699676/campos_512_v4
+137/699739/campos_512_v4
+138/700389/campos_512_v4
+138/700642/campos_512_v4
+138/700830/campos_512_v4
+138/700884/campos_512_v4
+138/701797/campos_512_v4
+138/701800/campos_512_v4
+138/702001/campos_512_v4
+138/702431/campos_512_v4
+138/702444/campos_512_v4
+138/702950/campos_512_v4
+138/703429/campos_512_v4
+138/703687/campos_512_v4
+138/703931/campos_512_v4
+138/704191/campos_512_v4
+138/704316/campos_512_v4
+138/704722/campos_512_v4
+138/704749/campos_512_v4
+138/704752/campos_512_v4
+139/705202/campos_512_v4
+139/705578/campos_512_v4
+139/705864/campos_512_v4
+139/705927/campos_512_v4
+139/706641/campos_512_v4
+139/706766/campos_512_v4
+139/706836/campos_512_v4
+139/706900/campos_512_v4
+139/707368/campos_512_v4
+139/707395/campos_512_v4
+139/707480/campos_512_v4
+139/707575/campos_512_v4
+139/707837/campos_512_v4
+139/707894/campos_512_v4
+139/708035/campos_512_v4
+139/708078/campos_512_v4
+139/708097/campos_512_v4
+139/708174/campos_512_v4
+139/708230/campos_512_v4
+139/708452/campos_512_v4
+139/708484/campos_512_v4
+139/708589/campos_512_v4
+139/708716/campos_512_v4
+139/708945/campos_512_v4
+139/708954/campos_512_v4
+139/709022/campos_512_v4
+139/709430/campos_512_v4
+139/709622/campos_512_v4
+139/709628/campos_512_v4
+139/709671/campos_512_v4
+139/709798/campos_512_v4
+139/709923/campos_512_v4
+139/709946/campos_512_v4
+14/80163/campos_512_v4
+14/80370/campos_512_v4
+14/80640/campos_512_v4
+14/81190/campos_512_v4
+14/81259/campos_512_v4
+14/81293/campos_512_v4
+14/81908/campos_512_v4
+14/81947/campos_512_v4
+14/82285/campos_512_v4
+14/82425/campos_512_v4
+14/82458/campos_512_v4
+14/82616/campos_512_v4
+14/82782/campos_512_v4
+14/82908/campos_512_v4
+14/83461/campos_512_v4
+14/83465/campos_512_v4
+14/83812/campos_512_v4
+14/84219/campos_512_v4
+14/84397/campos_512_v4
+14/84437/campos_512_v4
+14/84672/campos_512_v4
+14/84765/campos_512_v4
+140/710039/campos_512_v4
+140/710062/campos_512_v4
+140/710262/campos_512_v4
+140/710280/campos_512_v4
+140/710632/campos_512_v4
+140/710857/campos_512_v4
+140/710956/campos_512_v4
+140/711237/campos_512_v4
+140/711337/campos_512_v4
+140/711366/campos_512_v4
+140/711416/campos_512_v4
+140/711429/campos_512_v4
+140/711473/campos_512_v4
+140/711645/campos_512_v4
+140/711665/campos_512_v4
+140/711707/campos_512_v4
+140/711734/campos_512_v4
+140/711959/campos_512_v4
+140/711992/campos_512_v4
+140/712316/campos_512_v4
+140/712569/campos_512_v4
+140/712727/campos_512_v4
+140/712740/campos_512_v4
+140/712843/campos_512_v4
+140/713103/campos_512_v4
+140/713303/campos_512_v4
+140/713575/campos_512_v4
+140/713678/campos_512_v4
+140/713684/campos_512_v4
+140/713718/campos_512_v4
+140/713756/campos_512_v4
+140/713786/campos_512_v4
+140/713795/campos_512_v4
+140/713905/campos_512_v4
+140/714303/campos_512_v4
+140/714334/campos_512_v4
+140/714389/campos_512_v4
+140/714443/campos_512_v4
+140/714447/campos_512_v4
+140/714484/campos_512_v4
+140/714547/campos_512_v4
+140/714562/campos_512_v4
+140/714638/campos_512_v4
+140/714674/campos_512_v4
+140/714901/campos_512_v4
+141/715013/campos_512_v4
+141/715014/campos_512_v4
+141/715074/campos_512_v4
+141/715151/campos_512_v4
+141/715202/campos_512_v4
+141/715265/campos_512_v4
+141/715442/campos_512_v4
+141/715604/campos_512_v4
+141/715680/campos_512_v4
+141/715804/campos_512_v4
+141/715827/campos_512_v4
+141/716061/campos_512_v4
+141/716294/campos_512_v4
+141/716503/campos_512_v4
+141/716635/campos_512_v4
+141/716660/campos_512_v4
+141/716993/campos_512_v4
+141/717262/campos_512_v4
+141/717285/campos_512_v4
+141/717641/campos_512_v4
+141/717677/campos_512_v4
+141/717723/campos_512_v4
+141/718037/campos_512_v4
+141/718039/campos_512_v4
+141/718061/campos_512_v4
+141/718147/campos_512_v4
+141/718165/campos_512_v4
+141/718255/campos_512_v4
+141/718256/campos_512_v4
+141/718274/campos_512_v4
+141/718403/campos_512_v4
+141/718526/campos_512_v4
+141/718546/campos_512_v4
+141/718645/campos_512_v4
+141/718737/campos_512_v4
+141/718805/campos_512_v4
+141/718831/campos_512_v4
+141/718875/campos_512_v4
+141/719302/campos_512_v4
+141/719480/campos_512_v4
+141/719500/campos_512_v4
+141/719634/campos_512_v4
+141/719641/campos_512_v4
+141/719738/campos_512_v4
+141/719912/campos_512_v4
+141/719936/campos_512_v4
+142/720006/campos_512_v4
+142/720255/campos_512_v4
+142/720275/campos_512_v4
+142/720497/campos_512_v4
+142/720717/campos_512_v4
+142/720786/campos_512_v4
+142/720895/campos_512_v4
+142/721183/campos_512_v4
+142/721199/campos_512_v4
+142/721250/campos_512_v4
+142/721444/campos_512_v4
+142/721512/campos_512_v4
+142/721734/campos_512_v4
+142/721809/campos_512_v4
+142/721886/campos_512_v4
+142/722036/campos_512_v4
+142/722332/campos_512_v4
+142/722335/campos_512_v4
+142/722398/campos_512_v4
+142/722426/campos_512_v4
+142/722453/campos_512_v4
+142/722625/campos_512_v4
+142/722663/campos_512_v4
+142/722678/campos_512_v4
+142/722682/campos_512_v4
+142/722848/campos_512_v4
+142/722875/campos_512_v4
+142/723076/campos_512_v4
+142/723122/campos_512_v4
+142/723151/campos_512_v4
+142/723213/campos_512_v4
+142/723265/campos_512_v4
+142/723303/campos_512_v4
+142/723308/campos_512_v4
+142/723498/campos_512_v4
+142/723576/campos_512_v4
+142/723615/campos_512_v4
+142/723654/campos_512_v4
+142/723695/campos_512_v4
+142/723809/campos_512_v4
+142/723849/campos_512_v4
+142/723866/campos_512_v4
+142/723912/campos_512_v4
+142/723922/campos_512_v4
+142/723934/campos_512_v4
+142/724000/campos_512_v4
+142/724030/campos_512_v4
+142/724054/campos_512_v4
+142/724860/campos_512_v4
+142/724972/campos_512_v4
+143/725084/campos_512_v4
+143/725160/campos_512_v4
+143/725173/campos_512_v4
+143/725206/campos_512_v4
+143/725247/campos_512_v4
+143/725358/campos_512_v4
+143/725403/campos_512_v4
+143/725518/campos_512_v4
+143/725535/campos_512_v4
+143/725539/campos_512_v4
+143/726157/campos_512_v4
+143/726172/campos_512_v4
+143/726205/campos_512_v4
+143/726218/campos_512_v4
+143/726239/campos_512_v4
+143/726275/campos_512_v4
+143/726385/campos_512_v4
+143/726406/campos_512_v4
+143/726412/campos_512_v4
+143/726419/campos_512_v4
+143/726468/campos_512_v4
+143/726635/campos_512_v4
+143/726820/campos_512_v4
+143/726936/campos_512_v4
+143/726995/campos_512_v4
+143/727037/campos_512_v4
+143/727195/campos_512_v4
+143/727212/campos_512_v4
+143/727370/campos_512_v4
+143/727402/campos_512_v4
+143/727432/campos_512_v4
+143/727509/campos_512_v4
+143/727511/campos_512_v4
+143/727705/campos_512_v4
+143/727735/campos_512_v4
+143/727954/campos_512_v4
+143/727973/campos_512_v4
+143/727985/campos_512_v4
+143/728002/campos_512_v4
+143/728097/campos_512_v4
+143/728115/campos_512_v4
+143/728550/campos_512_v4
+143/728565/campos_512_v4
+143/728595/campos_512_v4
+143/728641/campos_512_v4
+143/728815/campos_512_v4
+143/728924/campos_512_v4
+143/729066/campos_512_v4
+143/729068/campos_512_v4
+143/729287/campos_512_v4
+143/729387/campos_512_v4
+143/729456/campos_512_v4
+143/729525/campos_512_v4
+143/729532/campos_512_v4
+143/729615/campos_512_v4
+143/729791/campos_512_v4
+143/729834/campos_512_v4
+143/729920/campos_512_v4
+143/729923/campos_512_v4
+144/730047/campos_512_v4
+144/730214/campos_512_v4
+144/730287/campos_512_v4
+144/730293/campos_512_v4
+144/730307/campos_512_v4
+144/730475/campos_512_v4
+144/730548/campos_512_v4
+144/730615/campos_512_v4
+144/730626/campos_512_v4
+144/731064/campos_512_v4
+144/731069/campos_512_v4
+144/731299/campos_512_v4
+144/731375/campos_512_v4
+144/731387/campos_512_v4
+144/731437/campos_512_v4
+144/731456/campos_512_v4
+144/731647/campos_512_v4
+144/731671/campos_512_v4
+144/731732/campos_512_v4
+144/731945/campos_512_v4
+144/731957/campos_512_v4
+144/731997/campos_512_v4
+144/732014/campos_512_v4
+144/732015/campos_512_v4
+144/732377/campos_512_v4
+144/732477/campos_512_v4
+144/732503/campos_512_v4
+144/732515/campos_512_v4
+144/732569/campos_512_v4
+144/732704/campos_512_v4
+144/733026/campos_512_v4
+144/733062/campos_512_v4
+144/733066/campos_512_v4
+144/733078/campos_512_v4
+144/733117/campos_512_v4
+144/733190/campos_512_v4
+144/733240/campos_512_v4
+144/733311/campos_512_v4
+144/733323/campos_512_v4
+144/733338/campos_512_v4
+144/733430/campos_512_v4
+144/733440/campos_512_v4
+144/733534/campos_512_v4
+144/733673/campos_512_v4
+144/734012/campos_512_v4
+144/734104/campos_512_v4
+144/734474/campos_512_v4
+144/734564/campos_512_v4
+144/734691/campos_512_v4
+144/734755/campos_512_v4
+144/734781/campos_512_v4
+145/735163/campos_512_v4
+145/735203/campos_512_v4
+145/735453/campos_512_v4
+145/735459/campos_512_v4
+145/735684/campos_512_v4
+145/735688/campos_512_v4
+145/735696/campos_512_v4
+145/735721/campos_512_v4
+145/735751/campos_512_v4
+145/735839/campos_512_v4
+145/735911/campos_512_v4
+145/735927/campos_512_v4
+145/735936/campos_512_v4
+145/735990/campos_512_v4
+145/736053/campos_512_v4
+145/736155/campos_512_v4
+145/736343/campos_512_v4
+145/736477/campos_512_v4
+145/736480/campos_512_v4
+145/736627/campos_512_v4
+145/736673/campos_512_v4
+145/736678/campos_512_v4
+145/736851/campos_512_v4
+145/737287/campos_512_v4
+145/737302/campos_512_v4
+145/737348/campos_512_v4
+145/737353/campos_512_v4
+145/737484/campos_512_v4
+145/737570/campos_512_v4
+145/737608/campos_512_v4
+145/737746/campos_512_v4
+145/738142/campos_512_v4
+145/738166/campos_512_v4
+145/738218/campos_512_v4
+145/738354/campos_512_v4
+145/738389/campos_512_v4
+145/738605/campos_512_v4
+145/738631/campos_512_v4
+145/738847/campos_512_v4
+145/738990/campos_512_v4
+145/739196/campos_512_v4
+145/739232/campos_512_v4
+145/739245/campos_512_v4
+145/739600/campos_512_v4
+145/739685/campos_512_v4
+145/739970/campos_512_v4
+146/740074/campos_512_v4
+146/740082/campos_512_v4
+146/740099/campos_512_v4
+146/740126/campos_512_v4
+146/740146/campos_512_v4
+146/740354/campos_512_v4
+146/740391/campos_512_v4
+146/740782/campos_512_v4
+146/740832/campos_512_v4
+146/741077/campos_512_v4
+146/741118/campos_512_v4
+146/741236/campos_512_v4
+146/741277/campos_512_v4
+146/741552/campos_512_v4
+146/741556/campos_512_v4
+146/741613/campos_512_v4
+146/741752/campos_512_v4
+146/741776/campos_512_v4
+146/741835/campos_512_v4
+146/741908/campos_512_v4
+146/742243/campos_512_v4
+146/742285/campos_512_v4
+146/742317/campos_512_v4
+146/742364/campos_512_v4
+146/742438/campos_512_v4
+146/742616/campos_512_v4
+146/742664/campos_512_v4
+146/742670/campos_512_v4
+146/742883/campos_512_v4
+146/742915/campos_512_v4
+146/743314/campos_512_v4
+146/743383/campos_512_v4
+146/743614/campos_512_v4
+146/743679/campos_512_v4
+146/743719/campos_512_v4
+146/743909/campos_512_v4
+146/744234/campos_512_v4
+146/744243/campos_512_v4
+146/744434/campos_512_v4
+146/744444/campos_512_v4
+146/744546/campos_512_v4
+146/744667/campos_512_v4
+146/744797/campos_512_v4
+147/745084/campos_512_v4
+147/745275/campos_512_v4
+147/745294/campos_512_v4
+147/745675/campos_512_v4
+147/745742/campos_512_v4
+147/745751/campos_512_v4
+147/746066/campos_512_v4
+147/746190/campos_512_v4
+147/746296/campos_512_v4
+147/746718/campos_512_v4
+147/746832/campos_512_v4
+147/746890/campos_512_v4
+147/746916/campos_512_v4
+147/746946/campos_512_v4
+147/746961/campos_512_v4
+147/747060/campos_512_v4
+147/747121/campos_512_v4
+147/747172/campos_512_v4
+147/747173/campos_512_v4
+147/747378/campos_512_v4
+147/747415/campos_512_v4
+147/747526/campos_512_v4
+147/747627/campos_512_v4
+147/747814/campos_512_v4
+147/747941/campos_512_v4
+147/747944/campos_512_v4
+147/748128/campos_512_v4
+147/748149/campos_512_v4
+147/748182/campos_512_v4
+147/748185/campos_512_v4
+147/748250/campos_512_v4
+147/748269/campos_512_v4
+147/748383/campos_512_v4
+147/748482/campos_512_v4
+147/748528/campos_512_v4
+147/748565/campos_512_v4
+147/748920/campos_512_v4
+147/748970/campos_512_v4
+147/748992/campos_512_v4
+147/749022/campos_512_v4
+147/749026/campos_512_v4
+147/749044/campos_512_v4
+147/749050/campos_512_v4
+147/749356/campos_512_v4
+147/749395/campos_512_v4
+147/749434/campos_512_v4
+147/749550/campos_512_v4
+147/749553/campos_512_v4
+147/749737/campos_512_v4
+147/749762/campos_512_v4
+147/749798/campos_512_v4
+147/749829/campos_512_v4
+147/749979/campos_512_v4
+148/750152/campos_512_v4
+148/750275/campos_512_v4
+148/750295/campos_512_v4
+148/750320/campos_512_v4
+148/750397/campos_512_v4
+148/750637/campos_512_v4
+148/750719/campos_512_v4
+148/750735/campos_512_v4
+148/750745/campos_512_v4
+148/750789/campos_512_v4
+148/751092/campos_512_v4
+148/751141/campos_512_v4
+148/751280/campos_512_v4
+148/751358/campos_512_v4
+148/751947/campos_512_v4
+148/751969/campos_512_v4
+148/751998/campos_512_v4
+148/752185/campos_512_v4
+148/752195/campos_512_v4
+148/752239/campos_512_v4
+148/752277/campos_512_v4
+148/752428/campos_512_v4
+148/752444/campos_512_v4
+148/752452/campos_512_v4
+148/752525/campos_512_v4
+148/752531/campos_512_v4
+148/752788/campos_512_v4
+148/752791/campos_512_v4
+148/752888/campos_512_v4
+148/752928/campos_512_v4
+148/752931/campos_512_v4
+148/752970/campos_512_v4
+148/753115/campos_512_v4
+148/753571/campos_512_v4
+148/753572/campos_512_v4
+148/753575/campos_512_v4
+148/753896/campos_512_v4
+148/753945/campos_512_v4
+148/754260/campos_512_v4
+148/754412/campos_512_v4
+148/754473/campos_512_v4
+148/754525/campos_512_v4
+148/754587/campos_512_v4
+148/754631/campos_512_v4
+148/754645/campos_512_v4
+148/754791/campos_512_v4
+149/755038/campos_512_v4
+149/755042/campos_512_v4
+149/755060/campos_512_v4
+149/755202/campos_512_v4
+149/755266/campos_512_v4
+149/755316/campos_512_v4
+149/755676/campos_512_v4
+149/755713/campos_512_v4
+149/755733/campos_512_v4
+149/755749/campos_512_v4
+149/755793/campos_512_v4
+149/755981/campos_512_v4
+149/756148/campos_512_v4
+149/756149/campos_512_v4
+149/756446/campos_512_v4
+149/756463/campos_512_v4
+149/756483/campos_512_v4
+149/756495/campos_512_v4
+149/756509/campos_512_v4
+149/756527/campos_512_v4
+149/756714/campos_512_v4
+149/756719/campos_512_v4
+149/756725/campos_512_v4
+149/756767/campos_512_v4
+149/756771/campos_512_v4
+149/756911/campos_512_v4
+149/756915/campos_512_v4
+149/756920/campos_512_v4
+149/756926/campos_512_v4
+149/756939/campos_512_v4
+149/756977/campos_512_v4
+149/757028/campos_512_v4
+149/757031/campos_512_v4
+149/757183/campos_512_v4
+149/757325/campos_512_v4
+149/757438/campos_512_v4
+149/757687/campos_512_v4
+149/757704/campos_512_v4
+149/757727/campos_512_v4
+149/757778/campos_512_v4
+149/757797/campos_512_v4
+149/757826/campos_512_v4
+149/757980/campos_512_v4
+149/758043/campos_512_v4
+149/758279/campos_512_v4
+149/758500/campos_512_v4
+149/758505/campos_512_v4
+149/758512/campos_512_v4
+149/758517/campos_512_v4
+149/758745/campos_512_v4
+149/759251/campos_512_v4
+149/759320/campos_512_v4
+149/759351/campos_512_v4
+149/759400/campos_512_v4
+149/759496/campos_512_v4
+149/759774/campos_512_v4
+149/759920/campos_512_v4
+149/759977/campos_512_v4
+15/85837/campos_512_v4
+15/85992/campos_512_v4
+15/86060/campos_512_v4
+15/86347/campos_512_v4
+15/86390/campos_512_v4
+15/86439/campos_512_v4
+15/86732/campos_512_v4
+15/87180/campos_512_v4
+15/87428/campos_512_v4
+15/87698/campos_512_v4
+15/87716/campos_512_v4
+15/87858/campos_512_v4
+15/87890/campos_512_v4
+15/88002/campos_512_v4
+15/88060/campos_512_v4
+15/88144/campos_512_v4
+15/88267/campos_512_v4
+15/88288/campos_512_v4
+15/88615/campos_512_v4
+15/88703/campos_512_v4
+15/88893/campos_512_v4
+15/89692/campos_512_v4
+15/89784/campos_512_v4
+15/89794/campos_512_v4
+15/89806/campos_512_v4
+150/760045/campos_512_v4
+150/760147/campos_512_v4
+150/760370/campos_512_v4
+150/760385/campos_512_v4
+150/760572/campos_512_v4
+150/760642/campos_512_v4
+150/760837/campos_512_v4
+150/761005/campos_512_v4
+150/761015/campos_512_v4
+150/761065/campos_512_v4
+150/761092/campos_512_v4
+150/761104/campos_512_v4
+150/761118/campos_512_v4
+150/761184/campos_512_v4
+150/761416/campos_512_v4
+150/761556/campos_512_v4
+150/761776/campos_512_v4
+150/762085/campos_512_v4
+150/762261/campos_512_v4
+150/762356/campos_512_v4
+150/762394/campos_512_v4
+150/762575/campos_512_v4
+150/762728/campos_512_v4
+150/762790/campos_512_v4
+150/762809/campos_512_v4
+150/762926/campos_512_v4
+150/762979/campos_512_v4
+150/763064/campos_512_v4
+150/763084/campos_512_v4
+150/763285/campos_512_v4
+150/763495/campos_512_v4
+150/763562/campos_512_v4
+150/763640/campos_512_v4
+150/763801/campos_512_v4
+150/763927/campos_512_v4
+150/763944/campos_512_v4
+150/764136/campos_512_v4
+150/764198/campos_512_v4
+150/764289/campos_512_v4
+150/764304/campos_512_v4
+150/764400/campos_512_v4
+150/764433/campos_512_v4
+150/764555/campos_512_v4
+150/764744/campos_512_v4
+150/764954/campos_512_v4
+150/764956/campos_512_v4
+150/764975/campos_512_v4
+150/765001/campos_512_v4
+151/765296/campos_512_v4
+151/765309/campos_512_v4
+151/765450/campos_512_v4
+151/765532/campos_512_v4
+151/765577/campos_512_v4
+151/765940/campos_512_v4
+151/765946/campos_512_v4
+151/766036/campos_512_v4
+151/766112/campos_512_v4
+151/766268/campos_512_v4
+151/766585/campos_512_v4
+151/766679/campos_512_v4
+151/766975/campos_512_v4
+151/767573/campos_512_v4
+151/767589/campos_512_v4
+151/767605/campos_512_v4
+151/767697/campos_512_v4
+151/767901/campos_512_v4
+151/767913/campos_512_v4
+151/767953/campos_512_v4
+151/767973/campos_512_v4
+151/768063/campos_512_v4
+151/768143/campos_512_v4
+151/768149/campos_512_v4
+151/768222/campos_512_v4
+151/768246/campos_512_v4
+151/768434/campos_512_v4
+151/768444/campos_512_v4
+151/768478/campos_512_v4
+151/768521/campos_512_v4
+151/768528/campos_512_v4
+151/768599/campos_512_v4
+151/768740/campos_512_v4
+151/768826/campos_512_v4
+151/768835/campos_512_v4
+151/768936/campos_512_v4
+151/768947/campos_512_v4
+151/768987/campos_512_v4
+151/769168/campos_512_v4
+151/769195/campos_512_v4
+151/769240/campos_512_v4
+151/769274/campos_512_v4
+151/769315/campos_512_v4
+151/769337/campos_512_v4
+151/769606/campos_512_v4
+151/769676/campos_512_v4
+151/769974/campos_512_v4
+152/770126/campos_512_v4
+152/770208/campos_512_v4
+152/770230/campos_512_v4
+152/770320/campos_512_v4
+152/770445/campos_512_v4
+152/770537/campos_512_v4
+152/770565/campos_512_v4
+152/770593/campos_512_v4
+152/770861/campos_512_v4
+152/770938/campos_512_v4
+152/770971/campos_512_v4
+152/771009/campos_512_v4
+152/771212/campos_512_v4
+152/771240/campos_512_v4
+152/771245/campos_512_v4
+152/771303/campos_512_v4
+152/771390/campos_512_v4
+152/771509/campos_512_v4
+152/771555/campos_512_v4
+152/771575/campos_512_v4
+152/771637/campos_512_v4
+152/771657/campos_512_v4
+152/771705/campos_512_v4
+152/771939/campos_512_v4
+152/771982/campos_512_v4
+152/772390/campos_512_v4
+152/772501/campos_512_v4
+152/772526/campos_512_v4
+152/772714/campos_512_v4
+152/772716/campos_512_v4
+152/772786/campos_512_v4
+152/772819/campos_512_v4
+152/772907/campos_512_v4
+152/773077/campos_512_v4
+152/773119/campos_512_v4
+152/773304/campos_512_v4
+152/773331/campos_512_v4
+152/773342/campos_512_v4
+152/773384/campos_512_v4
+152/773407/campos_512_v4
+152/773434/campos_512_v4
+152/773549/campos_512_v4
+152/773676/campos_512_v4
+152/773878/campos_512_v4
+152/773920/campos_512_v4
+152/773952/campos_512_v4
+152/773984/campos_512_v4
+152/774038/campos_512_v4
+152/774074/campos_512_v4
+152/774100/campos_512_v4
+152/774126/campos_512_v4
+152/774204/campos_512_v4
+152/774259/campos_512_v4
+152/774260/campos_512_v4
+152/774366/campos_512_v4
+152/774579/campos_512_v4
+152/774680/campos_512_v4
+152/774746/campos_512_v4
+152/774863/campos_512_v4
+152/774942/campos_512_v4
+153/775027/campos_512_v4
+153/775191/campos_512_v4
+153/775247/campos_512_v4
+153/775366/campos_512_v4
+153/775456/campos_512_v4
+153/775519/campos_512_v4
+153/775531/campos_512_v4
+153/775533/campos_512_v4
+153/775565/campos_512_v4
+153/775657/campos_512_v4
+153/775757/campos_512_v4
+153/775766/campos_512_v4
+153/775853/campos_512_v4
+153/775918/campos_512_v4
+153/775953/campos_512_v4
+153/775962/campos_512_v4
+153/776160/campos_512_v4
+153/776195/campos_512_v4
+153/776205/campos_512_v4
+153/776333/campos_512_v4
+153/776425/campos_512_v4
+153/776555/campos_512_v4
+153/776641/campos_512_v4
+153/776659/campos_512_v4
+153/776692/campos_512_v4
+153/776710/campos_512_v4
+153/776836/campos_512_v4
+153/776838/campos_512_v4
+153/776896/campos_512_v4
+153/777061/campos_512_v4
+153/777063/campos_512_v4
+153/777094/campos_512_v4
+153/777150/campos_512_v4
+153/777153/campos_512_v4
+153/777246/campos_512_v4
+153/777266/campos_512_v4
+153/777371/campos_512_v4
+153/777510/campos_512_v4
+153/777521/campos_512_v4
+153/777698/campos_512_v4
+153/777707/campos_512_v4
+153/777786/campos_512_v4
+153/777833/campos_512_v4
+153/777939/campos_512_v4
+153/777961/campos_512_v4
+153/777993/campos_512_v4
+153/778014/campos_512_v4
+153/778045/campos_512_v4
+153/778053/campos_512_v4
+153/778232/campos_512_v4
+153/778251/campos_512_v4
+153/778287/campos_512_v4
+153/778323/campos_512_v4
+153/778599/campos_512_v4
+153/778613/campos_512_v4
+153/778629/campos_512_v4
+153/778703/campos_512_v4
+153/778862/campos_512_v4
+153/778872/campos_512_v4
+153/778920/campos_512_v4
+153/778932/campos_512_v4
+153/779160/campos_512_v4
+153/779174/campos_512_v4
+153/779255/campos_512_v4
+153/779276/campos_512_v4
+153/779315/campos_512_v4
+153/779411/campos_512_v4
+153/779434/campos_512_v4
+153/779687/campos_512_v4
+153/779704/campos_512_v4
+153/779743/campos_512_v4
+153/779885/campos_512_v4
+153/779973/campos_512_v4
+153/779999/campos_512_v4
+154/780017/campos_512_v4
+154/780107/campos_512_v4
+154/780125/campos_512_v4
+154/780129/campos_512_v4
+154/780136/campos_512_v4
+154/780247/campos_512_v4
+154/780312/campos_512_v4
+154/780420/campos_512_v4
+154/780558/campos_512_v4
+154/780698/campos_512_v4
+154/780736/campos_512_v4
+154/780965/campos_512_v4
+154/781247/campos_512_v4
+154/781267/campos_512_v4
+154/781344/campos_512_v4
+154/781434/campos_512_v4
+154/781442/campos_512_v4
+154/781586/campos_512_v4
+154/781789/campos_512_v4
+154/781831/campos_512_v4
+154/781863/campos_512_v4
+154/782022/campos_512_v4
+154/782051/campos_512_v4
+154/782077/campos_512_v4
+154/782198/campos_512_v4
+154/782220/campos_512_v4
+154/782308/campos_512_v4
+154/782320/campos_512_v4
+154/782342/campos_512_v4
+154/782473/campos_512_v4
+154/782476/campos_512_v4
+154/782499/campos_512_v4
+154/782500/campos_512_v4
+154/782624/campos_512_v4
+154/782632/campos_512_v4
+154/782653/campos_512_v4
+154/782656/campos_512_v4
+154/782998/campos_512_v4
+154/783003/campos_512_v4
+154/783159/campos_512_v4
+154/783211/campos_512_v4
+154/783282/campos_512_v4
+154/783356/campos_512_v4
+154/783493/campos_512_v4
+154/783528/campos_512_v4
+154/783559/campos_512_v4
+154/783651/campos_512_v4
+154/783742/campos_512_v4
+154/783744/campos_512_v4
+154/783748/campos_512_v4
+154/783752/campos_512_v4
+154/783794/campos_512_v4
+154/783844/campos_512_v4
+154/783878/campos_512_v4
+154/783977/campos_512_v4
+154/784141/campos_512_v4
+154/784364/campos_512_v4
+154/784383/campos_512_v4
+154/784564/campos_512_v4
+154/784666/campos_512_v4
+154/784752/campos_512_v4
+154/784779/campos_512_v4
+154/784942/campos_512_v4
+154/784972/campos_512_v4
+155/785020/campos_512_v4
+155/785049/campos_512_v4
+155/785178/campos_512_v4
+155/785190/campos_512_v4
+155/785197/campos_512_v4
+155/785335/campos_512_v4
+155/785434/campos_512_v4
+155/785436/campos_512_v4
+155/785562/campos_512_v4
+155/785603/campos_512_v4
+155/785606/campos_512_v4
+155/785622/campos_512_v4
+155/785648/campos_512_v4
+155/785672/campos_512_v4
+155/785675/campos_512_v4
+155/785728/campos_512_v4
+155/785816/campos_512_v4
+155/785830/campos_512_v4
+155/785918/campos_512_v4
+155/785943/campos_512_v4
+155/785953/campos_512_v4
+155/786266/campos_512_v4
+155/786300/campos_512_v4
+155/786322/campos_512_v4
+155/786354/campos_512_v4
+155/786381/campos_512_v4
+155/786410/campos_512_v4
+155/786450/campos_512_v4
+155/786481/campos_512_v4
+155/786515/campos_512_v4
+155/786616/campos_512_v4
+155/787000/campos_512_v4
+155/787106/campos_512_v4
+155/787214/campos_512_v4
+155/787249/campos_512_v4
+155/787270/campos_512_v4
+155/787423/campos_512_v4
+155/787482/campos_512_v4
+155/787484/campos_512_v4
+155/787530/campos_512_v4
+155/787537/campos_512_v4
+155/787638/campos_512_v4
+155/787670/campos_512_v4
+155/787916/campos_512_v4
+155/788007/campos_512_v4
+155/788151/campos_512_v4
+155/788187/campos_512_v4
+155/788229/campos_512_v4
+155/788294/campos_512_v4
+155/788425/campos_512_v4
+155/788566/campos_512_v4
+155/788799/campos_512_v4
+155/789223/campos_512_v4
+155/789316/campos_512_v4
+155/789323/campos_512_v4
+155/789366/campos_512_v4
+155/789617/campos_512_v4
+155/789805/campos_512_v4
+155/789870/campos_512_v4
+155/789891/campos_512_v4
+155/789979/campos_512_v4
+155/789987/campos_512_v4
+156/790021/campos_512_v4
+156/790029/campos_512_v4
+156/790036/campos_512_v4
+156/790147/campos_512_v4
+156/790254/campos_512_v4
+156/790255/campos_512_v4
+156/790257/campos_512_v4
+156/790297/campos_512_v4
+156/790334/campos_512_v4
+156/790386/campos_512_v4
+156/790454/campos_512_v4
+156/790472/campos_512_v4
+156/790510/campos_512_v4
+156/790580/campos_512_v4
+156/790706/campos_512_v4
+156/790920/campos_512_v4
+156/790929/campos_512_v4
+156/790958/campos_512_v4
+156/790963/campos_512_v4
+156/790983/campos_512_v4
+156/791026/campos_512_v4
+156/791038/campos_512_v4
+156/791082/campos_512_v4
+156/791305/campos_512_v4
+156/791519/campos_512_v4
+156/791655/campos_512_v4
+156/791759/campos_512_v4
+156/791836/campos_512_v4
+156/792032/campos_512_v4
+156/792115/campos_512_v4
+156/792168/campos_512_v4
+156/792313/campos_512_v4
+156/792423/campos_512_v4
+156/792495/campos_512_v4
+156/792497/campos_512_v4
+156/792701/campos_512_v4
+156/792749/campos_512_v4
+156/792806/campos_512_v4
+156/792843/campos_512_v4
+156/792853/campos_512_v4
+156/792934/campos_512_v4
+156/793086/campos_512_v4
+156/793147/campos_512_v4
+156/793242/campos_512_v4
+156/793434/campos_512_v4
+156/793474/campos_512_v4
+156/793477/campos_512_v4
+156/793545/campos_512_v4
+156/793609/campos_512_v4
+156/793659/campos_512_v4
+156/793822/campos_512_v4
+156/793900/campos_512_v4
+156/794000/campos_512_v4
+156/794099/campos_512_v4
+156/794198/campos_512_v4
+156/794247/campos_512_v4
+156/794372/campos_512_v4
+156/794407/campos_512_v4
+156/794448/campos_512_v4
+156/794561/campos_512_v4
+156/794754/campos_512_v4
+156/794800/campos_512_v4
+156/794836/campos_512_v4
+156/794958/campos_512_v4
+156/794969/campos_512_v4
+156/794972/campos_512_v4
+156/794977/campos_512_v4
+157/795097/campos_512_v4
+157/795136/campos_512_v4
+157/795458/campos_512_v4
+157/795611/campos_512_v4
+157/795661/campos_512_v4
+157/795751/campos_512_v4
+157/795777/campos_512_v4
+157/795805/campos_512_v4
+157/795812/campos_512_v4
+157/795897/campos_512_v4
+157/796095/campos_512_v4
+157/796175/campos_512_v4
+157/796248/campos_512_v4
+157/796300/campos_512_v4
+157/796439/campos_512_v4
+157/796456/campos_512_v4
+157/796501/campos_512_v4
+157/796642/campos_512_v4
+157/796727/campos_512_v4
+157/796769/campos_512_v4
+157/796926/campos_512_v4
+157/796953/campos_512_v4
+157/797058/campos_512_v4
+157/797092/campos_512_v4
+157/797103/campos_512_v4
+157/797317/campos_512_v4
+157/797326/campos_512_v4
+157/797506/campos_512_v4
+157/797933/campos_512_v4
+157/797944/campos_512_v4
+157/797972/campos_512_v4
+157/797990/campos_512_v4
+157/798100/campos_512_v4
+157/798314/campos_512_v4
+157/798396/campos_512_v4
+157/798494/campos_512_v4
+157/798497/campos_512_v4
+157/798617/campos_512_v4
+157/798847/campos_512_v4
+157/798972/campos_512_v4
+157/799067/campos_512_v4
+157/799073/campos_512_v4
+157/799099/campos_512_v4
+157/799143/campos_512_v4
+157/799148/campos_512_v4
+157/799319/campos_512_v4
+157/799530/campos_512_v4
+157/799842/campos_512_v4
+158/800027/campos_512_v4
+158/800150/campos_512_v4
+158/800227/campos_512_v4
+158/800320/campos_512_v4
+158/800409/campos_512_v4
+158/800441/campos_512_v4
+158/800495/campos_512_v4
+158/800611/campos_512_v4
+158/800682/campos_512_v4
+158/800812/campos_512_v4
+158/800898/campos_512_v4
+158/800907/campos_512_v4
+158/801003/campos_512_v4
+158/801039/campos_512_v4
+158/801197/campos_512_v4
+158/801262/campos_512_v4
+158/801364/campos_512_v4
+158/801374/campos_512_v4
+158/801380/campos_512_v4
+158/801547/campos_512_v4
+158/801712/campos_512_v4
+158/801750/campos_512_v4
+158/801866/campos_512_v4
+158/801888/campos_512_v4
+158/801892/campos_512_v4
+158/801975/campos_512_v4
+158/801981/campos_512_v4
+158/801988/campos_512_v4
+158/802014/campos_512_v4
+158/802133/campos_512_v4
+158/802159/campos_512_v4
+158/802171/campos_512_v4
+158/802405/campos_512_v4
+158/802443/campos_512_v4
+158/802504/campos_512_v4
+158/802692/campos_512_v4
+158/802905/campos_512_v4
+158/802932/campos_512_v4
+158/802941/campos_512_v4
+158/803032/campos_512_v4
+158/803171/campos_512_v4
+158/803199/campos_512_v4
+158/803456/campos_512_v4
+158/803496/campos_512_v4
+158/803501/campos_512_v4
+158/803682/campos_512_v4
+158/803728/campos_512_v4
+158/803921/campos_512_v4
+158/803972/campos_512_v4
+158/804011/campos_512_v4
+158/804074/campos_512_v4
+158/804467/campos_512_v4
+158/804578/campos_512_v4
+158/804661/campos_512_v4
+158/804698/campos_512_v4
+158/804706/campos_512_v4
+158/804756/campos_512_v4
+158/804912/campos_512_v4
+158/804953/campos_512_v4
+159/805190/campos_512_v4
+159/805229/campos_512_v4
+159/805332/campos_512_v4
+159/805368/campos_512_v4
+159/805384/campos_512_v4
+159/805760/campos_512_v4
+159/805823/campos_512_v4
+159/805829/campos_512_v4
+159/805834/campos_512_v4
+159/805920/campos_512_v4
+159/805961/campos_512_v4
+159/806216/campos_512_v4
+159/806218/campos_512_v4
+159/806278/campos_512_v4
+159/806494/campos_512_v4
+159/806646/campos_512_v4
+159/806650/campos_512_v4
+159/806758/campos_512_v4
+159/806831/campos_512_v4
+159/806938/campos_512_v4
+159/807000/campos_512_v4
+159/807121/campos_512_v4
+159/807172/campos_512_v4
+159/807264/campos_512_v4
+159/807310/campos_512_v4
+159/807396/campos_512_v4
+159/807579/campos_512_v4
+159/807696/campos_512_v4
+159/807785/campos_512_v4
+159/807812/campos_512_v4
+159/808056/campos_512_v4
+159/808110/campos_512_v4
+159/808187/campos_512_v4
+159/808439/campos_512_v4
+159/808446/campos_512_v4
+159/808448/campos_512_v4
+159/808467/campos_512_v4
+159/808701/campos_512_v4
+16/90131/campos_512_v4
+16/90158/campos_512_v4
+16/90162/campos_512_v4
+16/90302/campos_512_v4
+16/90403/campos_512_v4
+16/90610/campos_512_v4
+16/90641/campos_512_v4
+16/90893/campos_512_v4
+16/91665/campos_512_v4
+16/91670/campos_512_v4
+16/91709/campos_512_v4
+16/92522/campos_512_v4
+16/92529/campos_512_v4
+16/93356/campos_512_v4
+16/93932/campos_512_v4
+16/94395/campos_512_v4
+16/94400/campos_512_v4
+16/94637/campos_512_v4
+16/94934/campos_512_v4
+17/95393/campos_512_v4
+17/95432/campos_512_v4
+17/95600/campos_512_v4
+17/95928/campos_512_v4
+17/96241/campos_512_v4
+17/96632/campos_512_v4
+17/96898/campos_512_v4
+17/96909/campos_512_v4
+17/96930/campos_512_v4
+17/97064/campos_512_v4
+17/97089/campos_512_v4
+17/97337/campos_512_v4
+17/97583/campos_512_v4
+17/97675/campos_512_v4
+17/97720/campos_512_v4
+17/97782/campos_512_v4
+17/97868/campos_512_v4
+17/98162/campos_512_v4
+17/98186/campos_512_v4
+17/98614/campos_512_v4
+17/98749/campos_512_v4
+17/99015/campos_512_v4
+17/99394/campos_512_v4
+17/99503/campos_512_v4
+17/99723/campos_512_v4
+17/99827/campos_512_v4
+17/99869/campos_512_v4
+2/20227/campos_512_v4
+2/21035/campos_512_v4
+2/21496/campos_512_v4
+2/21777/campos_512_v4
+2/22043/campos_512_v4
+2/22167/campos_512_v4
+2/22246/campos_512_v4
+2/22260/campos_512_v4
+2/22406/campos_512_v4
+2/22462/campos_512_v4
+2/22808/campos_512_v4
+2/22894/campos_512_v4
+2/23148/campos_512_v4
+2/23164/campos_512_v4
+2/23187/campos_512_v4
+2/23255/campos_512_v4
+2/23460/campos_512_v4
+2/23694/campos_512_v4
+2/24181/campos_512_v4
+2/24717/campos_512_v4
+23/125127/campos_512_v4
+23/125179/campos_512_v4
+23/125185/campos_512_v4
+23/125268/campos_512_v4
+23/125829/campos_512_v4
+23/125848/campos_512_v4
+23/125920/campos_512_v4
+23/125980/campos_512_v4
+23/126075/campos_512_v4
+23/126316/campos_512_v4
+23/126701/campos_512_v4
+23/128079/campos_512_v4
+23/128160/campos_512_v4
+23/128334/campos_512_v4
+23/128518/campos_512_v4
+23/129038/campos_512_v4
+23/129164/campos_512_v4
+23/129997/campos_512_v4
+24/130005/campos_512_v4
+24/131030/campos_512_v4
+24/131776/campos_512_v4
+24/132487/campos_512_v4
+24/132504/campos_512_v4
+24/133008/campos_512_v4
+24/133926/campos_512_v4
+24/134054/campos_512_v4
+24/134267/campos_512_v4
+24/134908/campos_512_v4
+25/137145/campos_512_v4
+25/138435/campos_512_v4
+25/138489/campos_512_v4
+25/138531/campos_512_v4
+25/138580/campos_512_v4
+25/138847/campos_512_v4
+25/139425/campos_512_v4
+25/139480/campos_512_v4
+25/139903/campos_512_v4
+25/139923/campos_512_v4
+26/140500/campos_512_v4
+26/140784/campos_512_v4
+26/141036/campos_512_v4
+26/141168/campos_512_v4
+26/141443/campos_512_v4
+26/141636/campos_512_v4
+26/141795/campos_512_v4
+26/142302/campos_512_v4
+26/142613/campos_512_v4
+26/142785/campos_512_v4
+26/142793/campos_512_v4
+26/143440/campos_512_v4
+26/143512/campos_512_v4
+26/143868/campos_512_v4
+26/143918/campos_512_v4
+26/144082/campos_512_v4
+26/144085/campos_512_v4
+26/144360/campos_512_v4
+26/144413/campos_512_v4
+26/144642/campos_512_v4
+27/145260/campos_512_v4
+27/145364/campos_512_v4
+27/145513/campos_512_v4
+27/145715/campos_512_v4
+27/146086/campos_512_v4
+27/146206/campos_512_v4
+27/146382/campos_512_v4
+27/146449/campos_512_v4
+27/147201/campos_512_v4
+27/148073/campos_512_v4
+27/148283/campos_512_v4
+27/148541/campos_512_v4
+27/148807/campos_512_v4
+27/149461/campos_512_v4
+27/149727/campos_512_v4
+28/150117/campos_512_v4
+28/150242/campos_512_v4
+28/150261/campos_512_v4
+28/150464/campos_512_v4
+28/150600/campos_512_v4
+28/150842/campos_512_v4
+28/151052/campos_512_v4
+28/151095/campos_512_v4
+28/151564/campos_512_v4
+28/151797/campos_512_v4
+28/151859/campos_512_v4
+28/151909/campos_512_v4
+28/152082/campos_512_v4
+28/152279/campos_512_v4
+28/152492/campos_512_v4
+28/152509/campos_512_v4
+28/152614/campos_512_v4
+28/152665/campos_512_v4
+28/152889/campos_512_v4
+28/153788/campos_512_v4
+28/153852/campos_512_v4
+28/153903/campos_512_v4
+28/153970/campos_512_v4
+28/153995/campos_512_v4
+28/154191/campos_512_v4
+28/154399/campos_512_v4
+28/154687/campos_512_v4
+28/154868/campos_512_v4
+28/154879/campos_512_v4
+28/154905/campos_512_v4
+29/155086/campos_512_v4
+29/155639/campos_512_v4
+29/155801/campos_512_v4
+29/156033/campos_512_v4
+29/156060/campos_512_v4
+29/156130/campos_512_v4
+29/156176/campos_512_v4
+29/156221/campos_512_v4
+29/156435/campos_512_v4
+29/157007/campos_512_v4
+29/157482/campos_512_v4
+29/157931/campos_512_v4
+29/158000/campos_512_v4
+29/158690/campos_512_v4
+29/158935/campos_512_v4
+29/158963/campos_512_v4
+29/159373/campos_512_v4
+30/160065/campos_512_v4
+30/160627/campos_512_v4
+30/160671/campos_512_v4
+30/161018/campos_512_v4
+30/161137/campos_512_v4
+30/161146/campos_512_v4
+30/161832/campos_512_v4
+30/162015/campos_512_v4
+30/162025/campos_512_v4
+30/162255/campos_512_v4
+30/162555/campos_512_v4
+30/162558/campos_512_v4
+30/162906/campos_512_v4
+30/163585/campos_512_v4
+30/163772/campos_512_v4
+30/163867/campos_512_v4
+30/164081/campos_512_v4
+30/164314/campos_512_v4
+30/164443/campos_512_v4
+30/164491/campos_512_v4
+31/165021/campos_512_v4
+31/165864/campos_512_v4
+31/165916/campos_512_v4
+31/166044/campos_512_v4
+31/166056/campos_512_v4
+31/166388/campos_512_v4
+31/166702/campos_512_v4
+31/166727/campos_512_v4
+31/167939/campos_512_v4
+31/167948/campos_512_v4
+31/168013/campos_512_v4
+31/168786/campos_512_v4
+31/168797/campos_512_v4
+31/168921/campos_512_v4
+31/168945/campos_512_v4
+31/169619/campos_512_v4
+32/170387/campos_512_v4
+32/170557/campos_512_v4
+32/171183/campos_512_v4
+32/171522/campos_512_v4
+32/171552/campos_512_v4
+32/171646/campos_512_v4
+32/171808/campos_512_v4
+32/171911/campos_512_v4
+32/171914/campos_512_v4
+32/172023/campos_512_v4
+32/172333/campos_512_v4
+32/172605/campos_512_v4
+32/172639/campos_512_v4
+32/173219/campos_512_v4
+32/173536/campos_512_v4
+32/173659/campos_512_v4
+32/173866/campos_512_v4
+32/174084/campos_512_v4
+33/175698/campos_512_v4
+33/175856/campos_512_v4
+33/176424/campos_512_v4
+33/177142/campos_512_v4
+33/177545/campos_512_v4
+33/177684/campos_512_v4
+33/178108/campos_512_v4
+33/178831/campos_512_v4
+33/178939/campos_512_v4
+33/179318/campos_512_v4
+33/179385/campos_512_v4
+33/179698/campos_512_v4
+33/179702/campos_512_v4
+34/180079/campos_512_v4
+34/180656/campos_512_v4
+34/180723/campos_512_v4
+34/180740/campos_512_v4
+34/181200/campos_512_v4
+34/181679/campos_512_v4
+34/181759/campos_512_v4
+34/181776/campos_512_v4
+34/181881/campos_512_v4
+34/181948/campos_512_v4
+34/182097/campos_512_v4
+34/184390/campos_512_v4
+34/184403/campos_512_v4
+34/184436/campos_512_v4
+34/184720/campos_512_v4
+35/185082/campos_512_v4
+35/185607/campos_512_v4
+35/186020/campos_512_v4
+35/186104/campos_512_v4
+35/186147/campos_512_v4
+35/186391/campos_512_v4
+35/186465/campos_512_v4
+35/186598/campos_512_v4
+35/186704/campos_512_v4
+35/187371/campos_512_v4
+35/187560/campos_512_v4
+35/187913/campos_512_v4
+35/188548/campos_512_v4
+35/188985/campos_512_v4
+35/189111/campos_512_v4
+35/189427/campos_512_v4
+35/189493/campos_512_v4
+35/189671/campos_512_v4
+35/189744/campos_512_v4
+36/190175/campos_512_v4
+36/190785/campos_512_v4
+36/191159/campos_512_v4
+36/192097/campos_512_v4
+36/193120/campos_512_v4
+36/193209/campos_512_v4
+36/193741/campos_512_v4
+36/194116/campos_512_v4
+36/194545/campos_512_v4
+37/195259/campos_512_v4
+37/195655/campos_512_v4
+37/195813/campos_512_v4
+37/195870/campos_512_v4
+37/196431/campos_512_v4
+37/197033/campos_512_v4
+37/197457/campos_512_v4
+37/197473/campos_512_v4
+37/197846/campos_512_v4
+37/198219/campos_512_v4
+37/198994/campos_512_v4
+37/199704/campos_512_v4
+38/200237/campos_512_v4
+38/200513/campos_512_v4
+38/200704/campos_512_v4
+38/201217/campos_512_v4
+38/201408/campos_512_v4
+38/201461/campos_512_v4
+38/202029/campos_512_v4
+38/202060/campos_512_v4
+38/202663/campos_512_v4
+38/202721/campos_512_v4
+38/202840/campos_512_v4
+38/203270/campos_512_v4
+38/203361/campos_512_v4
+38/203417/campos_512_v4
+38/203699/campos_512_v4
+38/203879/campos_512_v4
+38/204000/campos_512_v4
+38/204306/campos_512_v4
+38/204458/campos_512_v4
+38/204462/campos_512_v4
+38/204615/campos_512_v4
+38/204723/campos_512_v4
+38/204813/campos_512_v4
+38/204826/campos_512_v4
+4/30042/campos_512_v4
+4/30200/campos_512_v4
+4/30217/campos_512_v4
+4/30273/campos_512_v4
+4/30373/campos_512_v4
+4/31601/campos_512_v4
+4/31696/campos_512_v4
+4/31862/campos_512_v4
+4/31986/campos_512_v4
+4/32050/campos_512_v4
+4/32156/campos_512_v4
+4/32945/campos_512_v4
+4/32960/campos_512_v4
+4/33004/campos_512_v4
+4/33046/campos_512_v4
+4/33146/campos_512_v4
+4/33180/campos_512_v4
+4/33618/campos_512_v4
+4/34029/campos_512_v4
+4/34036/campos_512_v4
+4/34178/campos_512_v4
+4/34592/campos_512_v4
+4/34741/campos_512_v4
+4/34839/campos_512_v4
+4/34889/campos_512_v4
+4/34987/campos_512_v4
+40/210062/campos_512_v4
+40/210211/campos_512_v4
+40/210214/campos_512_v4
+40/210236/campos_512_v4
+40/210291/campos_512_v4
+40/210311/campos_512_v4
+40/210359/campos_512_v4
+40/210488/campos_512_v4
+40/210632/campos_512_v4
+40/210641/campos_512_v4
+40/210938/campos_512_v4
+40/210972/campos_512_v4
+40/211054/campos_512_v4
+40/211106/campos_512_v4
+40/211221/campos_512_v4
+40/211249/campos_512_v4
+40/211308/campos_512_v4
+40/211379/campos_512_v4
+40/211477/campos_512_v4
+40/211500/campos_512_v4
+40/211514/campos_512_v4
+40/211598/campos_512_v4
+40/211746/campos_512_v4
+40/211790/campos_512_v4
+40/211803/campos_512_v4
+40/211943/campos_512_v4
+40/212085/campos_512_v4
+40/212152/campos_512_v4
+40/212279/campos_512_v4
+40/212389/campos_512_v4
+40/212402/campos_512_v4
+40/212527/campos_512_v4
+40/212571/campos_512_v4
+40/212601/campos_512_v4
+40/212631/campos_512_v4
+40/212667/campos_512_v4
+40/212767/campos_512_v4
+40/212860/campos_512_v4
+40/212881/campos_512_v4
+40/212904/campos_512_v4
+40/212978/campos_512_v4
+40/213049/campos_512_v4
+40/213114/campos_512_v4
+40/213135/campos_512_v4
+40/213181/campos_512_v4
+40/213244/campos_512_v4
+40/213350/campos_512_v4
+40/213440/campos_512_v4
+40/213469/campos_512_v4
+40/213512/campos_512_v4
+40/213592/campos_512_v4
+40/213672/campos_512_v4
+40/213723/campos_512_v4
+40/213729/campos_512_v4
+40/213737/campos_512_v4
+40/213815/campos_512_v4
+40/214089/campos_512_v4
+40/214274/campos_512_v4
+40/214293/campos_512_v4
+40/214392/campos_512_v4
+40/214490/campos_512_v4
+40/214707/campos_512_v4
+40/214759/campos_512_v4
+40/214885/campos_512_v4
+41/215137/campos_512_v4
+41/215175/campos_512_v4
+41/215181/campos_512_v4
+41/215410/campos_512_v4
+41/215414/campos_512_v4
+41/215428/campos_512_v4
+41/215430/campos_512_v4
+41/215594/campos_512_v4
+41/215621/campos_512_v4
+41/215623/campos_512_v4
+41/215665/campos_512_v4
+41/215737/campos_512_v4
+41/215743/campos_512_v4
+41/215825/campos_512_v4
+41/215874/campos_512_v4
+41/215897/campos_512_v4
+41/215939/campos_512_v4
+41/216024/campos_512_v4
+41/216107/campos_512_v4
+41/216173/campos_512_v4
+41/216210/campos_512_v4
+41/216286/campos_512_v4
+41/216357/campos_512_v4
+41/216434/campos_512_v4
+41/216493/campos_512_v4
+41/216565/campos_512_v4
+41/216611/campos_512_v4
+41/216674/campos_512_v4
+41/216693/campos_512_v4
+41/216736/campos_512_v4
+41/216750/campos_512_v4
+41/216834/campos_512_v4
+41/216922/campos_512_v4
+41/217082/campos_512_v4
+41/217118/campos_512_v4
+41/217228/campos_512_v4
+41/217268/campos_512_v4
+41/217339/campos_512_v4
+41/217341/campos_512_v4
+41/217393/campos_512_v4
+41/217411/campos_512_v4
+41/217652/campos_512_v4
+41/217737/campos_512_v4
+41/217767/campos_512_v4
+41/217780/campos_512_v4
+41/217857/campos_512_v4
+41/218049/campos_512_v4
+41/218063/campos_512_v4
+41/218118/campos_512_v4
+41/218129/campos_512_v4
+41/218180/campos_512_v4
+41/218286/campos_512_v4
+41/218331/campos_512_v4
+41/218566/campos_512_v4
+41/218570/campos_512_v4
+41/218699/campos_512_v4
+41/218756/campos_512_v4
+41/218761/campos_512_v4
+41/218778/campos_512_v4
+41/218858/campos_512_v4
+41/218963/campos_512_v4
+41/219030/campos_512_v4
+41/219166/campos_512_v4
+41/219325/campos_512_v4
+41/219397/campos_512_v4
+41/219574/campos_512_v4
+41/219766/campos_512_v4
+41/219860/campos_512_v4
+41/219918/campos_512_v4
+41/219931/campos_512_v4
+41/219963/campos_512_v4
+41/219968/campos_512_v4
+41/219986/campos_512_v4
+42/220020/campos_512_v4
+42/220029/campos_512_v4
+42/220132/campos_512_v4
+42/220504/campos_512_v4
+42/220525/campos_512_v4
+42/220579/campos_512_v4
+42/220617/campos_512_v4
+42/220657/campos_512_v4
+42/220727/campos_512_v4
+42/220817/campos_512_v4
+42/221003/campos_512_v4
+42/221012/campos_512_v4
+42/221046/campos_512_v4
+42/221198/campos_512_v4
+42/221200/campos_512_v4
+42/221540/campos_512_v4
+42/221593/campos_512_v4
+42/221605/campos_512_v4
+42/221667/campos_512_v4
+42/221707/campos_512_v4
+42/221749/campos_512_v4
+42/221776/campos_512_v4
+42/221904/campos_512_v4
+42/222019/campos_512_v4
+42/222089/campos_512_v4
+42/222112/campos_512_v4
+42/222194/campos_512_v4
+42/222437/campos_512_v4
+42/222461/campos_512_v4
+42/222697/campos_512_v4
+42/222833/campos_512_v4
+42/222987/campos_512_v4
+42/223171/campos_512_v4
+42/223174/campos_512_v4
+42/223190/campos_512_v4
+42/223315/campos_512_v4
+42/223350/campos_512_v4
+42/223379/campos_512_v4
+42/223389/campos_512_v4
+42/223466/campos_512_v4
+42/223475/campos_512_v4
+42/223624/campos_512_v4
+42/223957/campos_512_v4
+42/224185/campos_512_v4
+42/224230/campos_512_v4
+42/224433/campos_512_v4
+42/224439/campos_512_v4
+42/224472/campos_512_v4
+42/224534/campos_512_v4
+42/224603/campos_512_v4
+42/224610/campos_512_v4
+42/224644/campos_512_v4
+42/224659/campos_512_v4
+42/224662/campos_512_v4
+42/224722/campos_512_v4
+42/224758/campos_512_v4
+42/224770/campos_512_v4
+42/224827/campos_512_v4
+42/224828/campos_512_v4
+42/224858/campos_512_v4
+42/224890/campos_512_v4
+42/224895/campos_512_v4
+42/224957/campos_512_v4
+42/224960/campos_512_v4
+43/225049/campos_512_v4
+43/225171/campos_512_v4
+43/225179/campos_512_v4
+43/225221/campos_512_v4
+43/225299/campos_512_v4
+43/225326/campos_512_v4
+43/225399/campos_512_v4
+43/225444/campos_512_v4
+43/225527/campos_512_v4
+43/225833/campos_512_v4
+43/226220/campos_512_v4
+43/226381/campos_512_v4
+43/226419/campos_512_v4
+43/226438/campos_512_v4
+43/226462/campos_512_v4
+43/226529/campos_512_v4
+43/226540/campos_512_v4
+43/226571/campos_512_v4
+43/226611/campos_512_v4
+43/226677/campos_512_v4
+43/226697/campos_512_v4
+43/226780/campos_512_v4
+43/226975/campos_512_v4
+43/227126/campos_512_v4
+43/227128/campos_512_v4
+43/227140/campos_512_v4
+43/227276/campos_512_v4
+43/227315/campos_512_v4
+43/227384/campos_512_v4
+43/227412/campos_512_v4
+43/227507/campos_512_v4
+43/227585/campos_512_v4
+43/227787/campos_512_v4
+43/227846/campos_512_v4
+43/227927/campos_512_v4
+43/228056/campos_512_v4
+43/228102/campos_512_v4
+43/228206/campos_512_v4
+43/228222/campos_512_v4
+43/228344/campos_512_v4
+43/228390/campos_512_v4
+43/228478/campos_512_v4
+43/228502/campos_512_v4
+43/228666/campos_512_v4
+43/228716/campos_512_v4
+43/228884/campos_512_v4
+43/228891/campos_512_v4
+43/229031/campos_512_v4
+43/229057/campos_512_v4
+43/229074/campos_512_v4
+43/229075/campos_512_v4
+43/229399/campos_512_v4
+43/229467/campos_512_v4
+43/229468/campos_512_v4
+43/229519/campos_512_v4
+43/229721/campos_512_v4
+43/229768/campos_512_v4
+43/229788/campos_512_v4
+43/229800/campos_512_v4
+43/229801/campos_512_v4
+43/229806/campos_512_v4
+43/229856/campos_512_v4
+43/229977/campos_512_v4
+44/230124/campos_512_v4
+44/230236/campos_512_v4
+44/230344/campos_512_v4
+44/230369/campos_512_v4
+44/230456/campos_512_v4
+44/230562/campos_512_v4
+44/230677/campos_512_v4
+44/230702/campos_512_v4
+44/230756/campos_512_v4
+44/230890/campos_512_v4
+44/230940/campos_512_v4
+44/230954/campos_512_v4
+44/231003/campos_512_v4
+44/231027/campos_512_v4
+44/231033/campos_512_v4
+44/231087/campos_512_v4
+44/231344/campos_512_v4
+44/231348/campos_512_v4
+44/231492/campos_512_v4
+44/231515/campos_512_v4
+44/231526/campos_512_v4
+44/231651/campos_512_v4
+44/231759/campos_512_v4
+44/231998/campos_512_v4
+44/232022/campos_512_v4
+44/232207/campos_512_v4
+44/232264/campos_512_v4
+44/232545/campos_512_v4
+44/232576/campos_512_v4
+44/232610/campos_512_v4
+44/232964/campos_512_v4
+44/233035/campos_512_v4
+44/233204/campos_512_v4
+44/233226/campos_512_v4
+44/233315/campos_512_v4
+44/233343/campos_512_v4
+44/233433/campos_512_v4
+44/233448/campos_512_v4
+44/233732/campos_512_v4
+44/233747/campos_512_v4
+44/233825/campos_512_v4
+44/233909/campos_512_v4
+44/233969/campos_512_v4
+44/234007/campos_512_v4
+44/234365/campos_512_v4
+44/234402/campos_512_v4
+44/234425/campos_512_v4
+44/234452/campos_512_v4
+44/234485/campos_512_v4
+44/234541/campos_512_v4
+44/234825/campos_512_v4
+44/234903/campos_512_v4
+44/234945/campos_512_v4
+44/234949/campos_512_v4
+44/234964/campos_512_v4
+45/235035/campos_512_v4
+45/235193/campos_512_v4
+45/235254/campos_512_v4
+45/235359/campos_512_v4
+45/235381/campos_512_v4
+45/235753/campos_512_v4
+45/235783/campos_512_v4
+45/235798/campos_512_v4
+45/235870/campos_512_v4
+45/235888/campos_512_v4
+45/235907/campos_512_v4
+45/235932/campos_512_v4
+45/236031/campos_512_v4
+45/236136/campos_512_v4
+45/236180/campos_512_v4
+45/236317/campos_512_v4
+45/236398/campos_512_v4
+45/236427/campos_512_v4
+45/236480/campos_512_v4
+45/236519/campos_512_v4
+45/236611/campos_512_v4
+45/236667/campos_512_v4
+45/236680/campos_512_v4
+45/236725/campos_512_v4
+45/236861/campos_512_v4
+45/236929/campos_512_v4
+45/236991/campos_512_v4
+45/237024/campos_512_v4
+45/237064/campos_512_v4
+45/237246/campos_512_v4
+45/237326/campos_512_v4
+45/237470/campos_512_v4
+45/237522/campos_512_v4
+45/237671/campos_512_v4
+45/237742/campos_512_v4
+45/237819/campos_512_v4
+45/237867/campos_512_v4
+45/238065/campos_512_v4
+45/238196/campos_512_v4
+45/238211/campos_512_v4
+45/238218/campos_512_v4
+45/238392/campos_512_v4
+45/238456/campos_512_v4
+45/238458/campos_512_v4
+45/238534/campos_512_v4
+45/238554/campos_512_v4
+45/238613/campos_512_v4
+45/238645/campos_512_v4
+45/238701/campos_512_v4
+45/238714/campos_512_v4
+45/238894/campos_512_v4
+45/238976/campos_512_v4
+45/239012/campos_512_v4
+45/239034/campos_512_v4
+45/239122/campos_512_v4
+45/239213/campos_512_v4
+45/239271/campos_512_v4
+45/239343/campos_512_v4
+45/239500/campos_512_v4
+45/239583/campos_512_v4
+45/239671/campos_512_v4
+45/239764/campos_512_v4
+45/239819/campos_512_v4
+45/239876/campos_512_v4
+45/239896/campos_512_v4
+45/239971/campos_512_v4
+45/239984/campos_512_v4
+45/239989/campos_512_v4
+46/240116/campos_512_v4
+46/240122/campos_512_v4
+46/240141/campos_512_v4
+46/240356/campos_512_v4
+46/240365/campos_512_v4
+46/240423/campos_512_v4
+46/240425/campos_512_v4
+46/240451/campos_512_v4
+46/240530/campos_512_v4
+46/240550/campos_512_v4
+46/240584/campos_512_v4
+46/240619/campos_512_v4
+46/240655/campos_512_v4
+46/240678/campos_512_v4
+46/240682/campos_512_v4
+46/240765/campos_512_v4
+46/240771/campos_512_v4
+46/240793/campos_512_v4
+46/240865/campos_512_v4
+46/240915/campos_512_v4
+46/241093/campos_512_v4
+46/241138/campos_512_v4
+46/241148/campos_512_v4
+46/241149/campos_512_v4
+46/241180/campos_512_v4
+46/241205/campos_512_v4
+46/241230/campos_512_v4
+46/241374/campos_512_v4
+46/241534/campos_512_v4
+46/241642/campos_512_v4
+46/241646/campos_512_v4
+46/241660/campos_512_v4
+46/241697/campos_512_v4
+46/241736/campos_512_v4
+46/241802/campos_512_v4
+46/241950/campos_512_v4
+46/242100/campos_512_v4
+46/242124/campos_512_v4
+46/242212/campos_512_v4
+46/242225/campos_512_v4
+46/242241/campos_512_v4
+46/242249/campos_512_v4
+46/242256/campos_512_v4
+46/242266/campos_512_v4
+46/242403/campos_512_v4
+46/242559/campos_512_v4
+46/242618/campos_512_v4
+46/242733/campos_512_v4
+46/242752/campos_512_v4
+46/242787/campos_512_v4
+46/243023/campos_512_v4
+46/243218/campos_512_v4
+46/243338/campos_512_v4
+46/243399/campos_512_v4
+46/243517/campos_512_v4
+46/243589/campos_512_v4
+46/243708/campos_512_v4
+46/243829/campos_512_v4
+46/243842/campos_512_v4
+46/244134/campos_512_v4
+46/244143/campos_512_v4
+46/244166/campos_512_v4
+46/244190/campos_512_v4
+46/244207/campos_512_v4
+46/244454/campos_512_v4
+46/244480/campos_512_v4
+46/244574/campos_512_v4
+46/244608/campos_512_v4
+46/244650/campos_512_v4
+46/244681/campos_512_v4
+46/244710/campos_512_v4
+46/244716/campos_512_v4
+46/244723/campos_512_v4
+46/244737/campos_512_v4
+46/244829/campos_512_v4
+46/244876/campos_512_v4
+46/244881/campos_512_v4
+46/245000/campos_512_v4
+47/245060/campos_512_v4
+47/245078/campos_512_v4
+47/245206/campos_512_v4
+47/245207/campos_512_v4
+47/245226/campos_512_v4
+47/245280/campos_512_v4
+47/245465/campos_512_v4
+47/245497/campos_512_v4
+47/245630/campos_512_v4
+47/245662/campos_512_v4
+47/245722/campos_512_v4
+47/245726/campos_512_v4
+47/245728/campos_512_v4
+47/245881/campos_512_v4
+47/245894/campos_512_v4
+47/246004/campos_512_v4
+47/246096/campos_512_v4
+47/246447/campos_512_v4
+47/246448/campos_512_v4
+47/246555/campos_512_v4
+47/246943/campos_512_v4
+47/246971/campos_512_v4
+47/247000/campos_512_v4
+47/247025/campos_512_v4
+47/247075/campos_512_v4
+47/247190/campos_512_v4
+47/247350/campos_512_v4
+47/247362/campos_512_v4
+47/247560/campos_512_v4
+47/247570/campos_512_v4
+47/247608/campos_512_v4
+47/247840/campos_512_v4
+47/247879/campos_512_v4
+47/247896/campos_512_v4
+47/247957/campos_512_v4
+47/248044/campos_512_v4
+47/248090/campos_512_v4
+47/248145/campos_512_v4
+47/248244/campos_512_v4
+47/248455/campos_512_v4
+47/248472/campos_512_v4
+47/248497/campos_512_v4
+47/248593/campos_512_v4
+47/248601/campos_512_v4
+47/248656/campos_512_v4
+47/248779/campos_512_v4
+47/248830/campos_512_v4
+47/248891/campos_512_v4
+47/248997/campos_512_v4
+47/249088/campos_512_v4
+47/249118/campos_512_v4
+47/249146/campos_512_v4
+47/249236/campos_512_v4
+47/249302/campos_512_v4
+47/249403/campos_512_v4
+47/249521/campos_512_v4
+47/249674/campos_512_v4
+47/249846/campos_512_v4
+47/249952/campos_512_v4
+48/250080/campos_512_v4
+48/250136/campos_512_v4
+48/250148/campos_512_v4
+48/250150/campos_512_v4
+48/250209/campos_512_v4
+48/250243/campos_512_v4
+48/250257/campos_512_v4
+48/250341/campos_512_v4
+48/250401/campos_512_v4
+48/250551/campos_512_v4
+48/250561/campos_512_v4
+48/250582/campos_512_v4
+48/250694/campos_512_v4
+48/250738/campos_512_v4
+48/250935/campos_512_v4
+48/251056/campos_512_v4
+48/251281/campos_512_v4
+48/251321/campos_512_v4
+48/251334/campos_512_v4
+48/251381/campos_512_v4
+48/251540/campos_512_v4
+48/251608/campos_512_v4
+48/251672/campos_512_v4
+48/251791/campos_512_v4
+48/251865/campos_512_v4
+48/251881/campos_512_v4
+48/251951/campos_512_v4
+48/251959/campos_512_v4
+48/251962/campos_512_v4
+48/252062/campos_512_v4
+48/252112/campos_512_v4
+48/252185/campos_512_v4
+48/252191/campos_512_v4
+48/252231/campos_512_v4
+48/252321/campos_512_v4
+48/252356/campos_512_v4
+48/252445/campos_512_v4
+48/252471/campos_512_v4
+48/252486/campos_512_v4
+48/252666/campos_512_v4
+48/252692/campos_512_v4
+48/253038/campos_512_v4
+48/253088/campos_512_v4
+48/253132/campos_512_v4
+48/253203/campos_512_v4
+48/253315/campos_512_v4
+48/253442/campos_512_v4
+48/253483/campos_512_v4
+48/253501/campos_512_v4
+48/253683/campos_512_v4
+48/253774/campos_512_v4
+48/253780/campos_512_v4
+48/253811/campos_512_v4
+48/253873/campos_512_v4
+48/253894/campos_512_v4
+48/253978/campos_512_v4
+48/253985/campos_512_v4
+48/253992/campos_512_v4
+48/254122/campos_512_v4
+48/254457/campos_512_v4
+48/254641/campos_512_v4
+48/254650/campos_512_v4
+48/254868/campos_512_v4
+48/254949/campos_512_v4
+49/255027/campos_512_v4
+49/255060/campos_512_v4
+49/255085/campos_512_v4
+49/255135/campos_512_v4
+49/255219/campos_512_v4
+49/255297/campos_512_v4
+49/255497/campos_512_v4
+49/255749/campos_512_v4
+49/255805/campos_512_v4
+49/255815/campos_512_v4
+49/255921/campos_512_v4
+49/256014/campos_512_v4
+49/256347/campos_512_v4
+49/256369/campos_512_v4
+49/256462/campos_512_v4
+49/256586/campos_512_v4
+49/256779/campos_512_v4
+49/256831/campos_512_v4
+49/256857/campos_512_v4
+49/256873/campos_512_v4
+49/256883/campos_512_v4
+49/256921/campos_512_v4
+49/256934/campos_512_v4
+49/256936/campos_512_v4
+49/256988/campos_512_v4
+49/257002/campos_512_v4
+49/257109/campos_512_v4
+49/257125/campos_512_v4
+49/257211/campos_512_v4
+49/257302/campos_512_v4
+49/257386/campos_512_v4
+49/257461/campos_512_v4
+49/257572/campos_512_v4
+49/257721/campos_512_v4
+49/257754/campos_512_v4
+49/257781/campos_512_v4
+49/257802/campos_512_v4
+49/257857/campos_512_v4
+49/257894/campos_512_v4
+49/257928/campos_512_v4
+49/257933/campos_512_v4
+49/258006/campos_512_v4
+49/258060/campos_512_v4
+49/258076/campos_512_v4
+49/258165/campos_512_v4
+49/258260/campos_512_v4
+49/258263/campos_512_v4
+49/258289/campos_512_v4
+49/258315/campos_512_v4
+49/258802/campos_512_v4
+49/258827/campos_512_v4
+49/258945/campos_512_v4
+49/259008/campos_512_v4
+49/259048/campos_512_v4
+49/259103/campos_512_v4
+49/259116/campos_512_v4
+49/259245/campos_512_v4
+49/259343/campos_512_v4
+49/259456/campos_512_v4
+49/259457/campos_512_v4
+49/259472/campos_512_v4
+49/259527/campos_512_v4
+49/259536/campos_512_v4
+49/259598/campos_512_v4
+49/259617/campos_512_v4
+49/259649/campos_512_v4
+49/259808/campos_512_v4
+49/259888/campos_512_v4
+49/259925/campos_512_v4
+5/35217/campos_512_v4
+5/35298/campos_512_v4
+5/35503/campos_512_v4
+5/35547/campos_512_v4
+5/35711/campos_512_v4
+5/35734/campos_512_v4
+5/35887/campos_512_v4
+5/36077/campos_512_v4
+5/36080/campos_512_v4
+5/36213/campos_512_v4
+5/36426/campos_512_v4
+5/36581/campos_512_v4
+5/36795/campos_512_v4
+5/36950/campos_512_v4
+5/37118/campos_512_v4
+5/37127/campos_512_v4
+5/37348/campos_512_v4
+5/37351/campos_512_v4
+5/37430/campos_512_v4
+5/38054/campos_512_v4
+5/38262/campos_512_v4
+5/38266/campos_512_v4
+5/38284/campos_512_v4
+5/38719/campos_512_v4
+5/39123/campos_512_v4
+5/39286/campos_512_v4
+5/39300/campos_512_v4
+5/39491/campos_512_v4
+5/39874/campos_512_v4
+5/39936/campos_512_v4
+50/260223/campos_512_v4
+50/260265/campos_512_v4
+50/260300/campos_512_v4
+50/260305/campos_512_v4
+50/260422/campos_512_v4
+50/260575/campos_512_v4
+50/260577/campos_512_v4
+50/260738/campos_512_v4
+50/260746/campos_512_v4
+50/260897/campos_512_v4
+50/260957/campos_512_v4
+50/260959/campos_512_v4
+50/261104/campos_512_v4
+50/261250/campos_512_v4
+50/261478/campos_512_v4
+50/261695/campos_512_v4
+50/261698/campos_512_v4
+50/261795/campos_512_v4
+50/261852/campos_512_v4
+50/261938/campos_512_v4
+50/261947/campos_512_v4
+50/262013/campos_512_v4
+50/262026/campos_512_v4
+50/262083/campos_512_v4
+50/262223/campos_512_v4
+50/262250/campos_512_v4
+50/262326/campos_512_v4
+50/262348/campos_512_v4
+50/262365/campos_512_v4
+50/262485/campos_512_v4
+50/262540/campos_512_v4
+50/262834/campos_512_v4
+50/263030/campos_512_v4
+50/263183/campos_512_v4
+50/263229/campos_512_v4
+50/263338/campos_512_v4
+50/263390/campos_512_v4
+50/263430/campos_512_v4
+50/263550/campos_512_v4
+50/263629/campos_512_v4
+50/263630/campos_512_v4
+50/263758/campos_512_v4
+50/263772/campos_512_v4
+50/263790/campos_512_v4
+50/263810/campos_512_v4
+50/263819/campos_512_v4
+50/264151/campos_512_v4
+50/264220/campos_512_v4
+50/264233/campos_512_v4
+50/264291/campos_512_v4
+50/264416/campos_512_v4
+50/264578/campos_512_v4
+50/264580/campos_512_v4
+50/264896/campos_512_v4
+50/264989/campos_512_v4
+51/265023/campos_512_v4
+51/265078/campos_512_v4
+51/265094/campos_512_v4
+51/265130/campos_512_v4
+51/265137/campos_512_v4
+51/265201/campos_512_v4
+51/265225/campos_512_v4
+51/265226/campos_512_v4
+51/265267/campos_512_v4
+51/265270/campos_512_v4
+51/265490/campos_512_v4
+51/265544/campos_512_v4
+51/265989/campos_512_v4
+51/266030/campos_512_v4
+51/266119/campos_512_v4
+51/266228/campos_512_v4
+51/266324/campos_512_v4
+51/266338/campos_512_v4
+51/266344/campos_512_v4
+51/266439/campos_512_v4
+51/266481/campos_512_v4
+51/266492/campos_512_v4
+51/266594/campos_512_v4
+51/266623/campos_512_v4
+51/266663/campos_512_v4
+51/266689/campos_512_v4
+51/266762/campos_512_v4
+51/266794/campos_512_v4
+51/266860/campos_512_v4
+51/266921/campos_512_v4
+51/266956/campos_512_v4
+51/267106/campos_512_v4
+51/267117/campos_512_v4
+51/267160/campos_512_v4
+51/267463/campos_512_v4
+51/267550/campos_512_v4
+51/267565/campos_512_v4
+51/267596/campos_512_v4
+51/267637/campos_512_v4
+51/267753/campos_512_v4
+51/267811/campos_512_v4
+51/267829/campos_512_v4
+51/267933/campos_512_v4
+51/268017/campos_512_v4
+51/268195/campos_512_v4
+51/268282/campos_512_v4
+51/268308/campos_512_v4
+51/268447/campos_512_v4
+51/268602/campos_512_v4
+51/268867/campos_512_v4
+51/268920/campos_512_v4
+51/268925/campos_512_v4
+51/268998/campos_512_v4
+51/269076/campos_512_v4
+51/269101/campos_512_v4
+51/269144/campos_512_v4
+51/269236/campos_512_v4
+51/269239/campos_512_v4
+51/269283/campos_512_v4
+51/269427/campos_512_v4
+51/269508/campos_512_v4
+51/269524/campos_512_v4
+51/269584/campos_512_v4
+51/269600/campos_512_v4
+51/269750/campos_512_v4
+51/269853/campos_512_v4
+51/269921/campos_512_v4
+51/269923/campos_512_v4
+52/270058/campos_512_v4
+52/270064/campos_512_v4
+52/270073/campos_512_v4
+52/270173/campos_512_v4
+52/270360/campos_512_v4
+52/270411/campos_512_v4
+52/270502/campos_512_v4
+52/270692/campos_512_v4
+52/270895/campos_512_v4
+52/271082/campos_512_v4
+52/271158/campos_512_v4
+52/271171/campos_512_v4
+52/271180/campos_512_v4
+52/271420/campos_512_v4
+52/271481/campos_512_v4
+52/271499/campos_512_v4
+52/271630/campos_512_v4
+52/271654/campos_512_v4
+52/271709/campos_512_v4
+52/271854/campos_512_v4
+52/272060/campos_512_v4
+52/272083/campos_512_v4
+52/272114/campos_512_v4
+52/272147/campos_512_v4
+52/272164/campos_512_v4
+52/272185/campos_512_v4
+52/272230/campos_512_v4
+52/272280/campos_512_v4
+52/272499/campos_512_v4
+52/272507/campos_512_v4
+52/272559/campos_512_v4
+52/272652/campos_512_v4
+52/272657/campos_512_v4
+52/272675/campos_512_v4
+52/272682/campos_512_v4
+52/272859/campos_512_v4
+52/272890/campos_512_v4
+52/272949/campos_512_v4
+52/272955/campos_512_v4
+52/272972/campos_512_v4
+52/273227/campos_512_v4
+52/273229/campos_512_v4
+52/273322/campos_512_v4
+52/273376/campos_512_v4
+52/273418/campos_512_v4
+52/273419/campos_512_v4
+52/273425/campos_512_v4
+52/273430/campos_512_v4
+52/273458/campos_512_v4
+52/273491/campos_512_v4
+52/273726/campos_512_v4
+52/273727/campos_512_v4
+52/273936/campos_512_v4
+52/273946/campos_512_v4
+52/273972/campos_512_v4
+52/274035/campos_512_v4
+52/274043/campos_512_v4
+52/274098/campos_512_v4
+52/274147/campos_512_v4
+52/274204/campos_512_v4
+52/274351/campos_512_v4
+52/274367/campos_512_v4
+52/274431/campos_512_v4
+52/274522/campos_512_v4
+52/274527/campos_512_v4
+53/275036/campos_512_v4
+53/275186/campos_512_v4
+53/275223/campos_512_v4
+53/275285/campos_512_v4
+53/275293/campos_512_v4
+53/275372/campos_512_v4
+53/275408/campos_512_v4
+53/275521/campos_512_v4
+53/275626/campos_512_v4
+53/275917/campos_512_v4
+53/276002/campos_512_v4
+53/276164/campos_512_v4
+53/276188/campos_512_v4
+53/276292/campos_512_v4
+53/276321/campos_512_v4
+53/276323/campos_512_v4
+53/276397/campos_512_v4
+53/276440/campos_512_v4
+53/276678/campos_512_v4
+53/276732/campos_512_v4
+53/276793/campos_512_v4
+53/276799/campos_512_v4
+53/276816/campos_512_v4
+53/276826/campos_512_v4
+53/276868/campos_512_v4
+53/276897/campos_512_v4
+53/277026/campos_512_v4
+53/277130/campos_512_v4
+53/277149/campos_512_v4
+53/277425/campos_512_v4
+53/277641/campos_512_v4
+53/277834/campos_512_v4
+53/277836/campos_512_v4
+53/278022/campos_512_v4
+53/278123/campos_512_v4
+53/278261/campos_512_v4
+53/278264/campos_512_v4
+53/278312/campos_512_v4
+53/278339/campos_512_v4
+53/278462/campos_512_v4
+53/278681/campos_512_v4
+53/278698/campos_512_v4
+53/278748/campos_512_v4
+53/278784/campos_512_v4
+53/278810/campos_512_v4
+53/278855/campos_512_v4
+53/278925/campos_512_v4
+53/278952/campos_512_v4
+53/278998/campos_512_v4
+53/279036/campos_512_v4
+53/279040/campos_512_v4
+53/279066/campos_512_v4
+53/279093/campos_512_v4
+53/279108/campos_512_v4
+53/279273/campos_512_v4
+53/279280/campos_512_v4
+53/279285/campos_512_v4
+53/279447/campos_512_v4
+53/279553/campos_512_v4
+53/279676/campos_512_v4
+53/279744/campos_512_v4
+53/279783/campos_512_v4
+53/279887/campos_512_v4
+53/279919/campos_512_v4
+53/279932/campos_512_v4
+53/279933/campos_512_v4
+54/280041/campos_512_v4
+54/280049/campos_512_v4
+54/280306/campos_512_v4
+54/280403/campos_512_v4
+54/280431/campos_512_v4
+54/280527/campos_512_v4
+54/280718/campos_512_v4
+54/280835/campos_512_v4
+54/281027/campos_512_v4
+54/281101/campos_512_v4
+54/281133/campos_512_v4
+54/281285/campos_512_v4
+54/281302/campos_512_v4
+54/281401/campos_512_v4
+54/281518/campos_512_v4
+54/281539/campos_512_v4
+54/281685/campos_512_v4
+54/281796/campos_512_v4
+54/281836/campos_512_v4
+54/281890/campos_512_v4
+54/282066/campos_512_v4
+54/282341/campos_512_v4
+54/282482/campos_512_v4
+54/282653/campos_512_v4
+54/282690/campos_512_v4
+54/282783/campos_512_v4
+54/282823/campos_512_v4
+54/282873/campos_512_v4
+54/283040/campos_512_v4
+54/283147/campos_512_v4
+54/283178/campos_512_v4
+54/283305/campos_512_v4
+54/283326/campos_512_v4
+54/283342/campos_512_v4
+54/283440/campos_512_v4
+54/283442/campos_512_v4
+54/283450/campos_512_v4
+54/283520/campos_512_v4
+54/283803/campos_512_v4
+54/283840/campos_512_v4
+54/283915/campos_512_v4
+54/283949/campos_512_v4
+54/283954/campos_512_v4
+54/283972/campos_512_v4
+54/284028/campos_512_v4
+54/284159/campos_512_v4
+54/284197/campos_512_v4
+54/284316/campos_512_v4
+54/284382/campos_512_v4
+54/284432/campos_512_v4
+54/284637/campos_512_v4
+54/284694/campos_512_v4
+54/284742/campos_512_v4
+54/284807/campos_512_v4
+54/284859/campos_512_v4
+54/284893/campos_512_v4
+54/284910/campos_512_v4
+54/284987/campos_512_v4
+55/285049/campos_512_v4
+55/285212/campos_512_v4
+55/285315/campos_512_v4
+55/285332/campos_512_v4
+55/285333/campos_512_v4
+55/285577/campos_512_v4
+55/285637/campos_512_v4
+55/285643/campos_512_v4
+55/285689/campos_512_v4
+55/286073/campos_512_v4
+55/286234/campos_512_v4
+55/286258/campos_512_v4
+55/286284/campos_512_v4
+55/286340/campos_512_v4
+55/286353/campos_512_v4
+55/286391/campos_512_v4
+55/286434/campos_512_v4
+55/286461/campos_512_v4
+55/286605/campos_512_v4
+55/286641/campos_512_v4
+55/286761/campos_512_v4
+55/286908/campos_512_v4
+55/287013/campos_512_v4
+55/287029/campos_512_v4
+55/287041/campos_512_v4
+55/287105/campos_512_v4
+55/287153/campos_512_v4
+55/287163/campos_512_v4
+55/287212/campos_512_v4
+55/287335/campos_512_v4
+55/287458/campos_512_v4
+55/287548/campos_512_v4
+55/287962/campos_512_v4
+55/288120/campos_512_v4
+55/288164/campos_512_v4
+55/288332/campos_512_v4
+55/288450/campos_512_v4
+55/288678/campos_512_v4
+55/288857/campos_512_v4
+55/289027/campos_512_v4
+55/289044/campos_512_v4
+55/289067/campos_512_v4
+55/289243/campos_512_v4
+55/289291/campos_512_v4
+55/289394/campos_512_v4
+55/289512/campos_512_v4
+55/289616/campos_512_v4
+55/289668/campos_512_v4
+55/289970/campos_512_v4
+55/289991/campos_512_v4
+56/290243/campos_512_v4
+56/290246/campos_512_v4
+56/290319/campos_512_v4
+56/290499/campos_512_v4
+56/290534/campos_512_v4
+56/290578/campos_512_v4
+56/290593/campos_512_v4
+56/290678/campos_512_v4
+56/290683/campos_512_v4
+56/290802/campos_512_v4
+56/290842/campos_512_v4
+56/291203/campos_512_v4
+56/291431/campos_512_v4
+56/291604/campos_512_v4
+56/291873/campos_512_v4
+56/291954/campos_512_v4
+56/292044/campos_512_v4
+56/292070/campos_512_v4
+56/292277/campos_512_v4
+56/292384/campos_512_v4
+56/292460/campos_512_v4
+56/292577/campos_512_v4
+56/292623/campos_512_v4
+56/292849/campos_512_v4
+56/292923/campos_512_v4
+56/292933/campos_512_v4
+56/292995/campos_512_v4
+56/293122/campos_512_v4
+56/293182/campos_512_v4
+56/293244/campos_512_v4
+56/293246/campos_512_v4
+56/293278/campos_512_v4
+56/293405/campos_512_v4
+56/293475/campos_512_v4
+56/293479/campos_512_v4
+56/293520/campos_512_v4
+56/293580/campos_512_v4
+56/293762/campos_512_v4
+56/293946/campos_512_v4
+56/294041/campos_512_v4
+56/294147/campos_512_v4
+56/294279/campos_512_v4
+56/294329/campos_512_v4
+56/294348/campos_512_v4
+56/294355/campos_512_v4
+56/294432/campos_512_v4
+56/294463/campos_512_v4
+56/294569/campos_512_v4
+56/294815/campos_512_v4
+57/295427/campos_512_v4
+57/295472/campos_512_v4
+57/295599/campos_512_v4
+57/295631/campos_512_v4
+57/295724/campos_512_v4
+57/295955/campos_512_v4
+57/296028/campos_512_v4
+57/296169/campos_512_v4
+57/296392/campos_512_v4
+57/296458/campos_512_v4
+57/296597/campos_512_v4
+57/296635/campos_512_v4
+57/296876/campos_512_v4
+57/296899/campos_512_v4
+57/296903/campos_512_v4
+57/296923/campos_512_v4
+57/297121/campos_512_v4
+57/297127/campos_512_v4
+57/297267/campos_512_v4
+57/297319/campos_512_v4
+57/297395/campos_512_v4
+57/297451/campos_512_v4
+57/297456/campos_512_v4
+57/297457/campos_512_v4
+57/297557/campos_512_v4
+57/297588/campos_512_v4
+57/297646/campos_512_v4
+57/298179/campos_512_v4
+57/298489/campos_512_v4
+57/298866/campos_512_v4
+57/298886/campos_512_v4
+57/299097/campos_512_v4
+57/299295/campos_512_v4
+57/299317/campos_512_v4
+57/299448/campos_512_v4
+57/299583/campos_512_v4
+57/299585/campos_512_v4
+57/299649/campos_512_v4
+57/299676/campos_512_v4
+57/299750/campos_512_v4
+57/299941/campos_512_v4
+58/300049/campos_512_v4
+58/300201/campos_512_v4
+58/300265/campos_512_v4
+58/300276/campos_512_v4
+58/300291/campos_512_v4
+58/300315/campos_512_v4
+58/300561/campos_512_v4
+58/300789/campos_512_v4
+58/300834/campos_512_v4
+58/300987/campos_512_v4
+58/301183/campos_512_v4
+58/301274/campos_512_v4
+58/301400/campos_512_v4
+58/301498/campos_512_v4
+58/301556/campos_512_v4
+58/301578/campos_512_v4
+58/301593/campos_512_v4
+58/301655/campos_512_v4
+58/301669/campos_512_v4
+58/301746/campos_512_v4
+58/301990/campos_512_v4
+58/302159/campos_512_v4
+58/302173/campos_512_v4
+58/302229/campos_512_v4
+58/302343/campos_512_v4
+58/302369/campos_512_v4
+58/302384/campos_512_v4
+58/302398/campos_512_v4
+58/302445/campos_512_v4
+58/302578/campos_512_v4
+58/302586/campos_512_v4
+58/302826/campos_512_v4
+58/302872/campos_512_v4
+58/302913/campos_512_v4
+58/302933/campos_512_v4
+58/302965/campos_512_v4
+58/303013/campos_512_v4
+58/303047/campos_512_v4
+58/303094/campos_512_v4
+58/303105/campos_512_v4
+58/303120/campos_512_v4
+58/303264/campos_512_v4
+58/303488/campos_512_v4
+58/303511/campos_512_v4
+58/303529/campos_512_v4
+58/303567/campos_512_v4
+58/303931/campos_512_v4
+58/303992/campos_512_v4
+58/304059/campos_512_v4
+58/304090/campos_512_v4
+58/304261/campos_512_v4
+58/304518/campos_512_v4
+58/304558/campos_512_v4
+58/304590/campos_512_v4
+58/304704/campos_512_v4
+58/304796/campos_512_v4
+58/304862/campos_512_v4
+58/304903/campos_512_v4
+58/304960/campos_512_v4
+58/304978/campos_512_v4
+58/304995/campos_512_v4
+59/305043/campos_512_v4
+59/305173/campos_512_v4
+59/305515/campos_512_v4
+59/305537/campos_512_v4
+59/305569/campos_512_v4
+59/305820/campos_512_v4
+59/305982/campos_512_v4
+59/306177/campos_512_v4
+59/306338/campos_512_v4
+59/306425/campos_512_v4
+59/306607/campos_512_v4
+59/306631/campos_512_v4
+59/306688/campos_512_v4
+59/306850/campos_512_v4
+59/306982/campos_512_v4
+59/307050/campos_512_v4
+59/307109/campos_512_v4
+59/307139/campos_512_v4
+59/307222/campos_512_v4
+59/307251/campos_512_v4
+59/307362/campos_512_v4
+59/307412/campos_512_v4
+59/307460/campos_512_v4
+59/307593/campos_512_v4
+59/307648/campos_512_v4
+59/307667/campos_512_v4
+59/307712/campos_512_v4
+59/307836/campos_512_v4
+59/307892/campos_512_v4
+59/308022/campos_512_v4
+59/308056/campos_512_v4
+59/308073/campos_512_v4
+59/308104/campos_512_v4
+59/308239/campos_512_v4
+59/308307/campos_512_v4
+59/308395/campos_512_v4
+59/308514/campos_512_v4
+59/308618/campos_512_v4
+59/308824/campos_512_v4
+59/308918/campos_512_v4
+59/308964/campos_512_v4
+59/309027/campos_512_v4
+59/309180/campos_512_v4
+59/309349/campos_512_v4
+59/309353/campos_512_v4
+59/309377/campos_512_v4
+59/309437/campos_512_v4
+59/309458/campos_512_v4
+59/309579/campos_512_v4
+59/309580/campos_512_v4
+59/309682/campos_512_v4
+59/309785/campos_512_v4
+59/309821/campos_512_v4
+59/309844/campos_512_v4
+59/309931/campos_512_v4
+59/310000/campos_512_v4
+6/40094/campos_512_v4
+6/40148/campos_512_v4
+6/40541/campos_512_v4
+6/40547/campos_512_v4
+6/40631/campos_512_v4
+6/41169/campos_512_v4
+6/41343/campos_512_v4
+6/41446/campos_512_v4
+6/41705/campos_512_v4
+6/41750/campos_512_v4
+6/42140/campos_512_v4
+6/42309/campos_512_v4
+6/42520/campos_512_v4
+6/42596/campos_512_v4
+6/42625/campos_512_v4
+6/42750/campos_512_v4
+6/42877/campos_512_v4
+6/42901/campos_512_v4
+6/43137/campos_512_v4
+6/43248/campos_512_v4
+6/43565/campos_512_v4
+6/43828/campos_512_v4
+6/43911/campos_512_v4
+6/43943/campos_512_v4
+6/43966/campos_512_v4
+6/44071/campos_512_v4
+6/44220/campos_512_v4
+6/44281/campos_512_v4
+6/44301/campos_512_v4
+6/44695/campos_512_v4
+6/44717/campos_512_v4
+60/310008/campos_512_v4
+60/310082/campos_512_v4
+60/310115/campos_512_v4
+60/310198/campos_512_v4
+60/310280/campos_512_v4
+60/310303/campos_512_v4
+60/310425/campos_512_v4
+60/310653/campos_512_v4
+60/310724/campos_512_v4
+60/310869/campos_512_v4
+60/311047/campos_512_v4
+60/311058/campos_512_v4
+60/311080/campos_512_v4
+60/311089/campos_512_v4
+60/311143/campos_512_v4
+60/311180/campos_512_v4
+60/311239/campos_512_v4
+60/311340/campos_512_v4
+60/311404/campos_512_v4
+60/311494/campos_512_v4
+60/311500/campos_512_v4
+60/311621/campos_512_v4
+60/311879/campos_512_v4
+60/311881/campos_512_v4
+60/312441/campos_512_v4
+60/312453/campos_512_v4
+60/312592/campos_512_v4
+60/312789/campos_512_v4
+60/312810/campos_512_v4
+60/312860/campos_512_v4
+60/312869/campos_512_v4
+60/312877/campos_512_v4
+60/312970/campos_512_v4
+60/312975/campos_512_v4
+60/312979/campos_512_v4
+60/313011/campos_512_v4
+60/313571/campos_512_v4
+60/313573/campos_512_v4
+60/313792/campos_512_v4
+60/313987/campos_512_v4
+60/314130/campos_512_v4
+60/314135/campos_512_v4
+60/314315/campos_512_v4
+60/314439/campos_512_v4
+60/314470/campos_512_v4
+60/314484/campos_512_v4
+60/314584/campos_512_v4
+60/314586/campos_512_v4
+60/314665/campos_512_v4
+60/314679/campos_512_v4
+60/314955/campos_512_v4
+60/314999/campos_512_v4
+61/315012/campos_512_v4
+61/315070/campos_512_v4
+61/315119/campos_512_v4
+61/315163/campos_512_v4
+61/315435/campos_512_v4
+61/315591/campos_512_v4
+61/315617/campos_512_v4
+61/315645/campos_512_v4
+61/315656/campos_512_v4
+61/315830/campos_512_v4
+61/315848/campos_512_v4
+61/315924/campos_512_v4
+61/316121/campos_512_v4
+61/316283/campos_512_v4
+61/316338/campos_512_v4
+61/316420/campos_512_v4
+61/316433/campos_512_v4
+61/316461/campos_512_v4
+61/316540/campos_512_v4
+61/316621/campos_512_v4
+61/316656/campos_512_v4
+61/316789/campos_512_v4
+61/316879/campos_512_v4
+61/317012/campos_512_v4
+61/317076/campos_512_v4
+61/317100/campos_512_v4
+61/317206/campos_512_v4
+61/317279/campos_512_v4
+61/317311/campos_512_v4
+61/317428/campos_512_v4
+61/317515/campos_512_v4
+61/317535/campos_512_v4
+61/317586/campos_512_v4
+61/317609/campos_512_v4
+61/317977/campos_512_v4
+61/318142/campos_512_v4
+61/318224/campos_512_v4
+61/318225/campos_512_v4
+61/318404/campos_512_v4
+61/318662/campos_512_v4
+61/318712/campos_512_v4
+61/318779/campos_512_v4
+61/318783/campos_512_v4
+61/318853/campos_512_v4
+61/318939/campos_512_v4
+61/318995/campos_512_v4
+61/319267/campos_512_v4
+61/319351/campos_512_v4
+61/319366/campos_512_v4
+61/319485/campos_512_v4
+61/319509/campos_512_v4
+61/319592/campos_512_v4
+61/319713/campos_512_v4
+61/319738/campos_512_v4
+61/319924/campos_512_v4
+62/320041/campos_512_v4
+62/320092/campos_512_v4
+62/320141/campos_512_v4
+62/320259/campos_512_v4
+62/320309/campos_512_v4
+62/320459/campos_512_v4
+62/320507/campos_512_v4
+62/320582/campos_512_v4
+62/320594/campos_512_v4
+62/320698/campos_512_v4
+62/320700/campos_512_v4
+62/320703/campos_512_v4
+62/320932/campos_512_v4
+62/320936/campos_512_v4
+62/321045/campos_512_v4
+62/321109/campos_512_v4
+62/321112/campos_512_v4
+62/321264/campos_512_v4
+62/321273/campos_512_v4
+62/321468/campos_512_v4
+62/321687/campos_512_v4
+62/321707/campos_512_v4
+62/321717/campos_512_v4
+62/321737/campos_512_v4
+62/321794/campos_512_v4
+62/321827/campos_512_v4
+62/321884/campos_512_v4
+62/321926/campos_512_v4
+62/322019/campos_512_v4
+62/322139/campos_512_v4
+62/322334/campos_512_v4
+62/322498/campos_512_v4
+62/322525/campos_512_v4
+62/322724/campos_512_v4
+62/322740/campos_512_v4
+62/322749/campos_512_v4
+62/322785/campos_512_v4
+62/322858/campos_512_v4
+62/322942/campos_512_v4
+62/323063/campos_512_v4
+62/323120/campos_512_v4
+62/323215/campos_512_v4
+62/323227/campos_512_v4
+62/323238/campos_512_v4
+62/323275/campos_512_v4
+62/323279/campos_512_v4
+62/323295/campos_512_v4
+62/323333/campos_512_v4
+62/323498/campos_512_v4
+62/323615/campos_512_v4
+62/323616/campos_512_v4
+62/323756/campos_512_v4
+62/323801/campos_512_v4
+62/323927/campos_512_v4
+62/323950/campos_512_v4
+62/323956/campos_512_v4
+62/324054/campos_512_v4
+62/324116/campos_512_v4
+62/324438/campos_512_v4
+62/324612/campos_512_v4
+62/324712/campos_512_v4
+62/324994/campos_512_v4
+63/325053/campos_512_v4
+63/325061/campos_512_v4
+63/325237/campos_512_v4
+63/325421/campos_512_v4
+63/325464/campos_512_v4
+63/325568/campos_512_v4
+63/325592/campos_512_v4
+63/325607/campos_512_v4
+63/325662/campos_512_v4
+63/325764/campos_512_v4
+63/325795/campos_512_v4
+63/325840/campos_512_v4
+63/325939/campos_512_v4
+63/326037/campos_512_v4
+63/326116/campos_512_v4
+63/326122/campos_512_v4
+63/326154/campos_512_v4
+63/326224/campos_512_v4
+63/326461/campos_512_v4
+63/326473/campos_512_v4
+63/326605/campos_512_v4
+63/326665/campos_512_v4
+63/326667/campos_512_v4
+63/326677/campos_512_v4
+63/326686/campos_512_v4
+63/326776/campos_512_v4
+63/326823/campos_512_v4
+63/326824/campos_512_v4
+63/326833/campos_512_v4
+63/326867/campos_512_v4
+63/327092/campos_512_v4
+63/327166/campos_512_v4
+63/327217/campos_512_v4
+63/327260/campos_512_v4
+63/327269/campos_512_v4
+63/327404/campos_512_v4
+63/327485/campos_512_v4
+63/327528/campos_512_v4
+63/327530/campos_512_v4
+63/327660/campos_512_v4
+63/327740/campos_512_v4
+63/327767/campos_512_v4
+63/327827/campos_512_v4
+63/327899/campos_512_v4
+63/327906/campos_512_v4
+63/327945/campos_512_v4
+63/327990/campos_512_v4
+63/328128/campos_512_v4
+63/328177/campos_512_v4
+63/328768/campos_512_v4
+63/328844/campos_512_v4
+63/328850/campos_512_v4
+63/328902/campos_512_v4
+63/328952/campos_512_v4
+63/329105/campos_512_v4
+63/329211/campos_512_v4
+63/329235/campos_512_v4
+63/329322/campos_512_v4
+63/329360/campos_512_v4
+63/329403/campos_512_v4
+63/329434/campos_512_v4
+63/329511/campos_512_v4
+63/329637/campos_512_v4
+63/329657/campos_512_v4
+63/329721/campos_512_v4
+63/329827/campos_512_v4
+63/329834/campos_512_v4
+63/329845/campos_512_v4
+63/329897/campos_512_v4
+63/330000/campos_512_v4
+64/330050/campos_512_v4
+64/330128/campos_512_v4
+64/330134/campos_512_v4
+64/330145/campos_512_v4
+64/330161/campos_512_v4
+64/330439/campos_512_v4
+64/330459/campos_512_v4
+64/330490/campos_512_v4
+64/330695/campos_512_v4
+64/330837/campos_512_v4
+64/330857/campos_512_v4
+64/330912/campos_512_v4
+64/330952/campos_512_v4
+64/331047/campos_512_v4
+64/331302/campos_512_v4
+64/331324/campos_512_v4
+64/331335/campos_512_v4
+64/331464/campos_512_v4
+64/331472/campos_512_v4
+64/331621/campos_512_v4
+64/331666/campos_512_v4
+64/331779/campos_512_v4
+64/331815/campos_512_v4
+64/331827/campos_512_v4
+64/331852/campos_512_v4
+64/331914/campos_512_v4
+64/332094/campos_512_v4
+64/332139/campos_512_v4
+64/332212/campos_512_v4
+64/332309/campos_512_v4
+64/332393/campos_512_v4
+64/332432/campos_512_v4
+64/332448/campos_512_v4
+64/332478/campos_512_v4
+64/332484/campos_512_v4
+64/332544/campos_512_v4
+64/332558/campos_512_v4
+64/332639/campos_512_v4
+64/332926/campos_512_v4
+64/333072/campos_512_v4
+64/333203/campos_512_v4
+64/333206/campos_512_v4
+64/333454/campos_512_v4
+64/333486/campos_512_v4
+64/333507/campos_512_v4
+64/333562/campos_512_v4
+64/333619/campos_512_v4
+64/333742/campos_512_v4
+64/333854/campos_512_v4
+64/333941/campos_512_v4
+64/333961/campos_512_v4
+64/334040/campos_512_v4
+64/334055/campos_512_v4
+64/334342/campos_512_v4
+64/334574/campos_512_v4
+64/334591/campos_512_v4
+64/334657/campos_512_v4
+65/335063/campos_512_v4
+65/335065/campos_512_v4
+65/335098/campos_512_v4
+65/335178/campos_512_v4
+65/335195/campos_512_v4
+65/335236/campos_512_v4
+65/335291/campos_512_v4
+65/335299/campos_512_v4
+65/335305/campos_512_v4
+65/335385/campos_512_v4
+65/335533/campos_512_v4
+65/335566/campos_512_v4
+65/335588/campos_512_v4
+65/335592/campos_512_v4
+65/335659/campos_512_v4
+65/335695/campos_512_v4
+65/335767/campos_512_v4
+65/335785/campos_512_v4
+65/335837/campos_512_v4
+65/335845/campos_512_v4
+65/335927/campos_512_v4
+65/335963/campos_512_v4
+65/335969/campos_512_v4
+65/335991/campos_512_v4
+65/336084/campos_512_v4
+65/336213/campos_512_v4
+65/336243/campos_512_v4
+65/336370/campos_512_v4
+65/336410/campos_512_v4
+65/336428/campos_512_v4
+65/336430/campos_512_v4
+65/336521/campos_512_v4
+65/336528/campos_512_v4
+65/336713/campos_512_v4
+65/336739/campos_512_v4
+65/336831/campos_512_v4
+65/336881/campos_512_v4
+65/337147/campos_512_v4
+65/337292/campos_512_v4
+65/337368/campos_512_v4
+65/337510/campos_512_v4
+65/337525/campos_512_v4
+65/337647/campos_512_v4
+65/337790/campos_512_v4
+65/337818/campos_512_v4
+65/337832/campos_512_v4
+65/337863/campos_512_v4
+65/338127/campos_512_v4
+65/338140/campos_512_v4
+65/338257/campos_512_v4
+65/338280/campos_512_v4
+65/338655/campos_512_v4
+65/338675/campos_512_v4
+65/338743/campos_512_v4
+65/338745/campos_512_v4
+65/338786/campos_512_v4
+65/338801/campos_512_v4
+65/338888/campos_512_v4
+65/338918/campos_512_v4
+65/338960/campos_512_v4
+65/339002/campos_512_v4
+65/339090/campos_512_v4
+65/339132/campos_512_v4
+65/339220/campos_512_v4
+65/339452/campos_512_v4
+65/339546/campos_512_v4
+65/339567/campos_512_v4
+65/339650/campos_512_v4
+65/339662/campos_512_v4
+65/339978/campos_512_v4
+65/339987/campos_512_v4
+66/340167/campos_512_v4
+66/340269/campos_512_v4
+66/340287/campos_512_v4
+66/340384/campos_512_v4
+66/340452/campos_512_v4
+66/340459/campos_512_v4
+66/340521/campos_512_v4
+66/340631/campos_512_v4
+66/340666/campos_512_v4
+66/340994/campos_512_v4
+66/341014/campos_512_v4
+66/341043/campos_512_v4
+66/341232/campos_512_v4
+66/341246/campos_512_v4
+66/341357/campos_512_v4
+66/341367/campos_512_v4
+66/341465/campos_512_v4
+66/341510/campos_512_v4
+66/341530/campos_512_v4
+66/341610/campos_512_v4
+66/341843/campos_512_v4
+66/341908/campos_512_v4
+66/341939/campos_512_v4
+66/342041/campos_512_v4
+66/342049/campos_512_v4
+66/342121/campos_512_v4
+66/342302/campos_512_v4
+66/342394/campos_512_v4
+66/342430/campos_512_v4
+66/342605/campos_512_v4
+66/342635/campos_512_v4
+66/342710/campos_512_v4
+66/342824/campos_512_v4
+66/343002/campos_512_v4
+66/343176/campos_512_v4
+66/343191/campos_512_v4
+66/343342/campos_512_v4
+66/343357/campos_512_v4
+66/343365/campos_512_v4
+66/343368/campos_512_v4
+66/343507/campos_512_v4
+66/343613/campos_512_v4
+66/343818/campos_512_v4
+66/343902/campos_512_v4
+66/344093/campos_512_v4
+66/344280/campos_512_v4
+66/344303/campos_512_v4
+66/344445/campos_512_v4
+66/344448/campos_512_v4
+66/344643/campos_512_v4
+66/344696/campos_512_v4
+66/344700/campos_512_v4
+66/344702/campos_512_v4
+66/344827/campos_512_v4
+66/344828/campos_512_v4
+67/345189/campos_512_v4
+67/345282/campos_512_v4
+67/345315/campos_512_v4
+67/345391/campos_512_v4
+67/345448/campos_512_v4
+67/345727/campos_512_v4
+67/345768/campos_512_v4
+67/345840/campos_512_v4
+67/345881/campos_512_v4
+67/346116/campos_512_v4
+67/346227/campos_512_v4
+67/346247/campos_512_v4
+67/346298/campos_512_v4
+67/346318/campos_512_v4
+67/346364/campos_512_v4
+67/346385/campos_512_v4
+67/346482/campos_512_v4
+67/346504/campos_512_v4
+67/346584/campos_512_v4
+67/346632/campos_512_v4
+67/346649/campos_512_v4
+67/346773/campos_512_v4
+67/346946/campos_512_v4
+67/347069/campos_512_v4
+67/347126/campos_512_v4
+67/347349/campos_512_v4
+67/347420/campos_512_v4
+67/347428/campos_512_v4
+67/347878/campos_512_v4
+67/347890/campos_512_v4
+67/348188/campos_512_v4
+67/348210/campos_512_v4
+67/348568/campos_512_v4
+67/348576/campos_512_v4
+67/348618/campos_512_v4
+67/348833/campos_512_v4
+67/349038/campos_512_v4
+67/349050/campos_512_v4
+67/349133/campos_512_v4
+67/349278/campos_512_v4
+67/349297/campos_512_v4
+67/349378/campos_512_v4
+67/349379/campos_512_v4
+67/349389/campos_512_v4
+67/349443/campos_512_v4
+67/349502/campos_512_v4
+67/349550/campos_512_v4
+67/349586/campos_512_v4
+67/349616/campos_512_v4
+67/349653/campos_512_v4
+67/349753/campos_512_v4
+67/349783/campos_512_v4
+67/349811/campos_512_v4
+67/349920/campos_512_v4
+67/349939/campos_512_v4
+68/350135/campos_512_v4
+68/350262/campos_512_v4
+68/350306/campos_512_v4
+68/350325/campos_512_v4
+68/350399/campos_512_v4
+68/350463/campos_512_v4
+68/350476/campos_512_v4
+68/350511/campos_512_v4
+68/350621/campos_512_v4
+68/350747/campos_512_v4
+68/350749/campos_512_v4
+68/350824/campos_512_v4
+68/350838/campos_512_v4
+68/350876/campos_512_v4
+68/350901/campos_512_v4
+68/351030/campos_512_v4
+68/351066/campos_512_v4
+68/351070/campos_512_v4
+68/351073/campos_512_v4
+68/351145/campos_512_v4
+68/351152/campos_512_v4
+68/351179/campos_512_v4
+68/351284/campos_512_v4
+68/351304/campos_512_v4
+68/351377/campos_512_v4
+68/351628/campos_512_v4
+68/351697/campos_512_v4
+68/351880/campos_512_v4
+68/351881/campos_512_v4
+68/351958/campos_512_v4
+68/351959/campos_512_v4
+68/352186/campos_512_v4
+68/352508/campos_512_v4
+68/352539/campos_512_v4
+68/352549/campos_512_v4
+68/352646/campos_512_v4
+68/352849/campos_512_v4
+68/352998/campos_512_v4
+68/353008/campos_512_v4
+68/353160/campos_512_v4
+68/353450/campos_512_v4
+68/353479/campos_512_v4
+68/353572/campos_512_v4
+68/353581/campos_512_v4
+68/353595/campos_512_v4
+68/353599/campos_512_v4
+68/353612/campos_512_v4
+68/353649/campos_512_v4
+68/353705/campos_512_v4
+68/353729/campos_512_v4
+68/353779/campos_512_v4
+68/353831/campos_512_v4
+68/353979/campos_512_v4
+68/354025/campos_512_v4
+68/354029/campos_512_v4
+68/354118/campos_512_v4
+68/354119/campos_512_v4
+68/354167/campos_512_v4
+68/354187/campos_512_v4
+68/354218/campos_512_v4
+68/354381/campos_512_v4
+68/354438/campos_512_v4
+68/354471/campos_512_v4
+68/354588/campos_512_v4
+68/354639/campos_512_v4
+68/354758/campos_512_v4
+68/354768/campos_512_v4
+69/355180/campos_512_v4
+69/355190/campos_512_v4
+69/355241/campos_512_v4
+69/355266/campos_512_v4
+69/355319/campos_512_v4
+69/355335/campos_512_v4
+69/355361/campos_512_v4
+69/355680/campos_512_v4
+69/355725/campos_512_v4
+69/355847/campos_512_v4
+69/355997/campos_512_v4
+69/356002/campos_512_v4
+69/356014/campos_512_v4
+69/356037/campos_512_v4
+69/356301/campos_512_v4
+69/356471/campos_512_v4
+69/356589/campos_512_v4
+69/356640/campos_512_v4
+69/356689/campos_512_v4
+69/356787/campos_512_v4
+69/356797/campos_512_v4
+69/356833/campos_512_v4
+69/356871/campos_512_v4
+69/356902/campos_512_v4
+69/357113/campos_512_v4
+69/357133/campos_512_v4
+69/357138/campos_512_v4
+69/357206/campos_512_v4
+69/357225/campos_512_v4
+69/357520/campos_512_v4
+69/357608/campos_512_v4
+69/357713/campos_512_v4
+69/357767/campos_512_v4
+69/357803/campos_512_v4
+69/357828/campos_512_v4
+69/357908/campos_512_v4
+69/358175/campos_512_v4
+69/358247/campos_512_v4
+69/358279/campos_512_v4
+69/358371/campos_512_v4
+69/358386/campos_512_v4
+69/358582/campos_512_v4
+69/358620/campos_512_v4
+69/358793/campos_512_v4
+69/358937/campos_512_v4
+69/358959/campos_512_v4
+69/358997/campos_512_v4
+69/359264/campos_512_v4
+69/359309/campos_512_v4
+69/359356/campos_512_v4
+69/359360/campos_512_v4
+69/359376/campos_512_v4
+69/359454/campos_512_v4
+69/359745/campos_512_v4
+69/359770/campos_512_v4
+69/359870/campos_512_v4
+7/45228/campos_512_v4
+7/45346/campos_512_v4
+7/45368/campos_512_v4
+7/45369/campos_512_v4
+7/45484/campos_512_v4
+7/45510/campos_512_v4
+7/45573/campos_512_v4
+7/45577/campos_512_v4
+7/45589/campos_512_v4
+7/45637/campos_512_v4
+7/45673/campos_512_v4
+7/45820/campos_512_v4
+7/46052/campos_512_v4
+7/46195/campos_512_v4
+7/46320/campos_512_v4
+7/46403/campos_512_v4
+7/46667/campos_512_v4
+7/46698/campos_512_v4
+7/46969/campos_512_v4
+7/47084/campos_512_v4
+7/47104/campos_512_v4
+7/47193/campos_512_v4
+7/47305/campos_512_v4
+7/47477/campos_512_v4
+7/47533/campos_512_v4
+7/47540/campos_512_v4
+7/47618/campos_512_v4
+7/47779/campos_512_v4
+7/47870/campos_512_v4
+7/47932/campos_512_v4
+7/47998/campos_512_v4
+7/48067/campos_512_v4
+7/48115/campos_512_v4
+7/48533/campos_512_v4
+7/48660/campos_512_v4
+7/49030/campos_512_v4
+7/49056/campos_512_v4
+7/49147/campos_512_v4
+7/49175/campos_512_v4
+7/49182/campos_512_v4
+7/49239/campos_512_v4
+7/49250/campos_512_v4
+7/49278/campos_512_v4
+7/49340/campos_512_v4
+7/49373/campos_512_v4
+7/49566/campos_512_v4
+7/49628/campos_512_v4
+7/49857/campos_512_v4
+7/49877/campos_512_v4
+7/49988/campos_512_v4
+70/360056/campos_512_v4
+70/360111/campos_512_v4
+70/360234/campos_512_v4
+70/360250/campos_512_v4
+70/360282/campos_512_v4
+70/360386/campos_512_v4
+70/360392/campos_512_v4
+70/360429/campos_512_v4
+70/360577/campos_512_v4
+70/360813/campos_512_v4
+70/360921/campos_512_v4
+70/360948/campos_512_v4
+70/360958/campos_512_v4
+70/360979/campos_512_v4
+70/360999/campos_512_v4
+70/361040/campos_512_v4
+70/361106/campos_512_v4
+70/361164/campos_512_v4
+70/361223/campos_512_v4
+70/361252/campos_512_v4
+70/361259/campos_512_v4
+70/361266/campos_512_v4
+70/361295/campos_512_v4
+70/361635/campos_512_v4
+70/361682/campos_512_v4
+70/361686/campos_512_v4
+70/361721/campos_512_v4
+70/361772/campos_512_v4
+70/361886/campos_512_v4
+70/362354/campos_512_v4
+70/362426/campos_512_v4
+70/362457/campos_512_v4
+70/362471/campos_512_v4
+70/362519/campos_512_v4
+70/362574/campos_512_v4
+70/362580/campos_512_v4
+70/362649/campos_512_v4
+70/362712/campos_512_v4
+70/362799/campos_512_v4
+70/362807/campos_512_v4
+70/362849/campos_512_v4
+70/362893/campos_512_v4
+70/363161/campos_512_v4
+70/363167/campos_512_v4
+70/363421/campos_512_v4
+70/363512/campos_512_v4
+70/363538/campos_512_v4
+70/363673/campos_512_v4
+70/363898/campos_512_v4
+70/363956/campos_512_v4
+70/363978/campos_512_v4
+70/364025/campos_512_v4
+70/364268/campos_512_v4
+70/364288/campos_512_v4
+70/364574/campos_512_v4
+70/364703/campos_512_v4
+70/364730/campos_512_v4
+70/364732/campos_512_v4
+70/364739/campos_512_v4
+70/364764/campos_512_v4
+70/364771/campos_512_v4
+70/364862/campos_512_v4
+70/364917/campos_512_v4
+71/365019/campos_512_v4
+71/365322/campos_512_v4
+71/365376/campos_512_v4
+71/365516/campos_512_v4
+71/365598/campos_512_v4
+71/365619/campos_512_v4
+71/365726/campos_512_v4
+71/365796/campos_512_v4
+71/365881/campos_512_v4
+71/365918/campos_512_v4
+71/365967/campos_512_v4
+71/366012/campos_512_v4
+71/366131/campos_512_v4
+71/366249/campos_512_v4
+71/366307/campos_512_v4
+71/366325/campos_512_v4
+71/366468/campos_512_v4
+71/366536/campos_512_v4
+71/366654/campos_512_v4
+71/366961/campos_512_v4
+71/366977/campos_512_v4
+71/366993/campos_512_v4
+71/367032/campos_512_v4
+71/367112/campos_512_v4
+71/367588/campos_512_v4
+71/367767/campos_512_v4
+71/367871/campos_512_v4
+71/367921/campos_512_v4
+71/367945/campos_512_v4
+71/367983/campos_512_v4
+71/368082/campos_512_v4
+71/368189/campos_512_v4
+71/368200/campos_512_v4
+71/368230/campos_512_v4
+71/368360/campos_512_v4
+71/368545/campos_512_v4
+71/368564/campos_512_v4
+71/368755/campos_512_v4
+71/368784/campos_512_v4
+71/368813/campos_512_v4
+71/368904/campos_512_v4
+71/368961/campos_512_v4
+71/369125/campos_512_v4
+71/369341/campos_512_v4
+71/369369/campos_512_v4
+71/369526/campos_512_v4
+71/369541/campos_512_v4
+71/369603/campos_512_v4
+71/369787/campos_512_v4
+71/369842/campos_512_v4
+72/370413/campos_512_v4
+72/370461/campos_512_v4
+72/370576/campos_512_v4
+72/370608/campos_512_v4
+72/370801/campos_512_v4
+72/370814/campos_512_v4
+72/370921/campos_512_v4
+72/370960/campos_512_v4
+72/370988/campos_512_v4
+72/371181/campos_512_v4
+72/371277/campos_512_v4
+72/371359/campos_512_v4
+72/371376/campos_512_v4
+72/371474/campos_512_v4
+72/371529/campos_512_v4
+72/371584/campos_512_v4
+72/371747/campos_512_v4
+72/372071/campos_512_v4
+72/372209/campos_512_v4
+72/372372/campos_512_v4
+72/372378/campos_512_v4
+72/372474/campos_512_v4
+72/372490/campos_512_v4
+72/372512/campos_512_v4
+72/372515/campos_512_v4
+72/372563/campos_512_v4
+72/372588/campos_512_v4
+72/372631/campos_512_v4
+72/372690/campos_512_v4
+72/372694/campos_512_v4
+72/372731/campos_512_v4
+72/372806/campos_512_v4
+72/373082/campos_512_v4
+72/373153/campos_512_v4
+72/373176/campos_512_v4
+72/373222/campos_512_v4
+72/373229/campos_512_v4
+72/373284/campos_512_v4
+72/373286/campos_512_v4
+72/373419/campos_512_v4
+72/373614/campos_512_v4
+72/373648/campos_512_v4
+72/373681/campos_512_v4
+72/373737/campos_512_v4
+72/373752/campos_512_v4
+72/373837/campos_512_v4
+72/373946/campos_512_v4
+72/374132/campos_512_v4
+72/374161/campos_512_v4
+72/374180/campos_512_v4
+72/374323/campos_512_v4
+72/374422/campos_512_v4
+72/374433/campos_512_v4
+72/374648/campos_512_v4
+72/374864/campos_512_v4
+73/375065/campos_512_v4
+73/375151/campos_512_v4
+73/375163/campos_512_v4
+73/375233/campos_512_v4
+73/375267/campos_512_v4
+73/375327/campos_512_v4
+73/375384/campos_512_v4
+73/375457/campos_512_v4
+73/375630/campos_512_v4
+73/375643/campos_512_v4
+73/375668/campos_512_v4
+73/375676/campos_512_v4
+73/375887/campos_512_v4
+73/376023/campos_512_v4
+73/376024/campos_512_v4
+73/376244/campos_512_v4
+73/376330/campos_512_v4
+73/376460/campos_512_v4
+73/376477/campos_512_v4
+73/376548/campos_512_v4
+73/376807/campos_512_v4
+73/376818/campos_512_v4
+73/376996/campos_512_v4
+73/377098/campos_512_v4
+73/377270/campos_512_v4
+73/377316/campos_512_v4
+73/377825/campos_512_v4
+73/377854/campos_512_v4
+73/378042/campos_512_v4
+73/378153/campos_512_v4
+73/378156/campos_512_v4
+73/378178/campos_512_v4
+73/378233/campos_512_v4
+73/378235/campos_512_v4
+73/378315/campos_512_v4
+73/378417/campos_512_v4
+73/378602/campos_512_v4
+73/378634/campos_512_v4
+73/378697/campos_512_v4
+73/378774/campos_512_v4
+73/378830/campos_512_v4
+73/379188/campos_512_v4
+73/379216/campos_512_v4
+73/379254/campos_512_v4
+73/379509/campos_512_v4
+73/379533/campos_512_v4
+73/379547/campos_512_v4
+73/379630/campos_512_v4
+73/379645/campos_512_v4
+73/379700/campos_512_v4
+73/379724/campos_512_v4
+73/379774/campos_512_v4
+73/379793/campos_512_v4
+73/379836/campos_512_v4
+73/379853/campos_512_v4
+73/379864/campos_512_v4
+73/379882/campos_512_v4
+73/379980/campos_512_v4
+74/380146/campos_512_v4
+74/380199/campos_512_v4
+74/380201/campos_512_v4
+74/380205/campos_512_v4
+74/380442/campos_512_v4
+74/380518/campos_512_v4
+74/380660/campos_512_v4
+74/380750/campos_512_v4
+74/380773/campos_512_v4
+74/380785/campos_512_v4
+74/380839/campos_512_v4
+74/380868/campos_512_v4
+74/381070/campos_512_v4
+74/381185/campos_512_v4
+74/381220/campos_512_v4
+74/381362/campos_512_v4
+74/381370/campos_512_v4
+74/381503/campos_512_v4
+74/381507/campos_512_v4
+74/381513/campos_512_v4
+74/381613/campos_512_v4
+74/381633/campos_512_v4
+74/381688/campos_512_v4
+74/381788/campos_512_v4
+74/381812/campos_512_v4
+74/381978/campos_512_v4
+74/381994/campos_512_v4
+74/381996/campos_512_v4
+74/382034/campos_512_v4
+74/382054/campos_512_v4
+74/382224/campos_512_v4
+74/382255/campos_512_v4
+74/382570/campos_512_v4
+74/382587/campos_512_v4
+74/382652/campos_512_v4
+74/383028/campos_512_v4
+74/383115/campos_512_v4
+74/383497/campos_512_v4
+74/383504/campos_512_v4
+74/383583/campos_512_v4
+74/383597/campos_512_v4
+74/383832/campos_512_v4
+74/383888/campos_512_v4
+74/383992/campos_512_v4
+74/384115/campos_512_v4
+74/384233/campos_512_v4
+74/384347/campos_512_v4
+74/384483/campos_512_v4
+74/384492/campos_512_v4
+74/384504/campos_512_v4
+74/384572/campos_512_v4
+74/384695/campos_512_v4
+74/384757/campos_512_v4
+74/384871/campos_512_v4
+75/385011/campos_512_v4
+75/385036/campos_512_v4
+75/385145/campos_512_v4
+75/385155/campos_512_v4
+75/385194/campos_512_v4
+75/385235/campos_512_v4
+75/385458/campos_512_v4
+75/385471/campos_512_v4
+75/385680/campos_512_v4
+75/385770/campos_512_v4
+75/385945/campos_512_v4
+75/386024/campos_512_v4
+75/386048/campos_512_v4
+75/386079/campos_512_v4
+75/386135/campos_512_v4
+75/386275/campos_512_v4
+75/386369/campos_512_v4
+75/386621/campos_512_v4
+75/386698/campos_512_v4
+75/386982/campos_512_v4
+75/387060/campos_512_v4
+75/387173/campos_512_v4
+75/387204/campos_512_v4
+75/387252/campos_512_v4
+75/387293/campos_512_v4
+75/387370/campos_512_v4
+75/387541/campos_512_v4
+75/387560/campos_512_v4
+75/387629/campos_512_v4
+75/387778/campos_512_v4
+75/387884/campos_512_v4
+75/387960/campos_512_v4
+75/387963/campos_512_v4
+75/388215/campos_512_v4
+75/388290/campos_512_v4
+75/388413/campos_512_v4
+75/388452/campos_512_v4
+75/388573/campos_512_v4
+75/388602/campos_512_v4
+75/388707/campos_512_v4
+75/388787/campos_512_v4
+75/388842/campos_512_v4
+75/388914/campos_512_v4
+75/388995/campos_512_v4
+75/389091/campos_512_v4
+75/389221/campos_512_v4
+75/389512/campos_512_v4
+75/389851/campos_512_v4
+76/390048/campos_512_v4
+76/390223/campos_512_v4
+76/390332/campos_512_v4
+76/390642/campos_512_v4
+76/390697/campos_512_v4
+76/390942/campos_512_v4
+76/391072/campos_512_v4
+76/391132/campos_512_v4
+76/391195/campos_512_v4
+76/391216/campos_512_v4
+76/391276/campos_512_v4
+76/391715/campos_512_v4
+76/391800/campos_512_v4
+76/391809/campos_512_v4
+76/391817/campos_512_v4
+76/392068/campos_512_v4
+76/392293/campos_512_v4
+76/392526/campos_512_v4
+76/392604/campos_512_v4
+76/392608/campos_512_v4
+76/392861/campos_512_v4
+76/392934/campos_512_v4
+76/392936/campos_512_v4
+76/392968/campos_512_v4
+76/393018/campos_512_v4
+76/393200/campos_512_v4
+76/393582/campos_512_v4
+76/393710/campos_512_v4
+76/393738/campos_512_v4
+76/393767/campos_512_v4
+76/393789/campos_512_v4
+76/393805/campos_512_v4
+76/393840/campos_512_v4
+76/393841/campos_512_v4
+76/393898/campos_512_v4
+76/393945/campos_512_v4
+76/394017/campos_512_v4
+76/394042/campos_512_v4
+76/394064/campos_512_v4
+76/394071/campos_512_v4
+76/394076/campos_512_v4
+76/394163/campos_512_v4
+76/394186/campos_512_v4
+76/394315/campos_512_v4
+76/394350/campos_512_v4
+76/394831/campos_512_v4
+76/394905/campos_512_v4
+76/394979/campos_512_v4
+76/395001/campos_512_v4
+77/395111/campos_512_v4
+77/395208/campos_512_v4
+77/395237/campos_512_v4
+77/395249/campos_512_v4
+77/395422/campos_512_v4
+77/395660/campos_512_v4
+77/395688/campos_512_v4
+77/395715/campos_512_v4
+77/395732/campos_512_v4
+77/395931/campos_512_v4
+77/396008/campos_512_v4
+77/396098/campos_512_v4
+77/396297/campos_512_v4
+77/396307/campos_512_v4
+77/396333/campos_512_v4
+77/396676/campos_512_v4
+77/396712/campos_512_v4
+77/396760/campos_512_v4
+77/396828/campos_512_v4
+77/396998/campos_512_v4
+77/397253/campos_512_v4
+77/397327/campos_512_v4
+77/397357/campos_512_v4
+77/397551/campos_512_v4
+77/397552/campos_512_v4
+77/397654/campos_512_v4
+77/397880/campos_512_v4
+77/398008/campos_512_v4
+77/398072/campos_512_v4
+77/398182/campos_512_v4
+77/398553/campos_512_v4
+77/398571/campos_512_v4
+77/398830/campos_512_v4
+77/398841/campos_512_v4
+77/398926/campos_512_v4
+77/399225/campos_512_v4
+77/399363/campos_512_v4
+77/399577/campos_512_v4
+77/399609/campos_512_v4
+77/399999/campos_512_v4
+78/400112/campos_512_v4
+78/400163/campos_512_v4
+78/400305/campos_512_v4
+78/400332/campos_512_v4
+78/400376/campos_512_v4
+78/400380/campos_512_v4
+78/400437/campos_512_v4
+78/400519/campos_512_v4
+78/400531/campos_512_v4
+78/400717/campos_512_v4
+78/400903/campos_512_v4
+78/401036/campos_512_v4
+78/401142/campos_512_v4
+78/401149/campos_512_v4
+78/401601/campos_512_v4
+78/401671/campos_512_v4
+78/401742/campos_512_v4
+78/401744/campos_512_v4
+78/401771/campos_512_v4
+78/401844/campos_512_v4
+78/401847/campos_512_v4
+78/402168/campos_512_v4
+78/402210/campos_512_v4
+78/402247/campos_512_v4
+78/402341/campos_512_v4
+78/402346/campos_512_v4
+78/402392/campos_512_v4
+78/402426/campos_512_v4
+78/402451/campos_512_v4
+78/402503/campos_512_v4
+78/402520/campos_512_v4
+78/402580/campos_512_v4
+78/402647/campos_512_v4
+78/402677/campos_512_v4
+78/402701/campos_512_v4
+78/402764/campos_512_v4
+78/402774/campos_512_v4
+78/402883/campos_512_v4
+78/403126/campos_512_v4
+78/403305/campos_512_v4
+78/403357/campos_512_v4
+78/403390/campos_512_v4
+78/403490/campos_512_v4
+78/403534/campos_512_v4
+78/403557/campos_512_v4
+78/403563/campos_512_v4
+78/403645/campos_512_v4
+78/403750/campos_512_v4
+78/403908/campos_512_v4
+78/403937/campos_512_v4
+78/404007/campos_512_v4
+78/404057/campos_512_v4
+78/404207/campos_512_v4
+78/404427/campos_512_v4
+78/404429/campos_512_v4
+78/404468/campos_512_v4
+78/404569/campos_512_v4
+78/404618/campos_512_v4
+78/404804/campos_512_v4
+78/404823/campos_512_v4
+78/404826/campos_512_v4
+78/404886/campos_512_v4
+78/404895/campos_512_v4
+79/405129/campos_512_v4
+79/405215/campos_512_v4
+79/405281/campos_512_v4
+79/405292/campos_512_v4
+79/405566/campos_512_v4
+79/405580/campos_512_v4
+79/405706/campos_512_v4
+79/405709/campos_512_v4
+79/405740/campos_512_v4
+79/405878/campos_512_v4
+79/405887/campos_512_v4
+79/405956/campos_512_v4
+79/406038/campos_512_v4
+79/406068/campos_512_v4
+79/406117/campos_512_v4
+79/406146/campos_512_v4
+79/406149/campos_512_v4
+79/406292/campos_512_v4
+79/406303/campos_512_v4
+79/406313/campos_512_v4
+79/406464/campos_512_v4
+79/406580/campos_512_v4
+79/406582/campos_512_v4
+79/406823/campos_512_v4
+79/407048/campos_512_v4
+79/407098/campos_512_v4
+79/407118/campos_512_v4
+79/407285/campos_512_v4
+79/407366/campos_512_v4
+79/407417/campos_512_v4
+79/407437/campos_512_v4
+79/407594/campos_512_v4
+79/407605/campos_512_v4
+79/407640/campos_512_v4
+79/407721/campos_512_v4
+79/407745/campos_512_v4
+79/407821/campos_512_v4
+79/407827/campos_512_v4
+79/407872/campos_512_v4
+79/407998/campos_512_v4
+79/408108/campos_512_v4
+79/408143/campos_512_v4
+79/408153/campos_512_v4
+79/408220/campos_512_v4
+79/408259/campos_512_v4
+79/408493/campos_512_v4
+79/408551/campos_512_v4
+79/408698/campos_512_v4
+79/408753/campos_512_v4
+79/408913/campos_512_v4
+79/408995/campos_512_v4
+79/409008/campos_512_v4
+79/409129/campos_512_v4
+79/409264/campos_512_v4
+79/409291/campos_512_v4
+79/409300/campos_512_v4
+79/409553/campos_512_v4
+79/409571/campos_512_v4
+79/409655/campos_512_v4
+79/409708/campos_512_v4
+79/409711/campos_512_v4
+79/409753/campos_512_v4
+79/409882/campos_512_v4
+8/50036/campos_512_v4
+8/50077/campos_512_v4
+8/50104/campos_512_v4
+8/50186/campos_512_v4
+8/50350/campos_512_v4
+8/50585/campos_512_v4
+8/50753/campos_512_v4
+8/50766/campos_512_v4
+8/50860/campos_512_v4
+8/51273/campos_512_v4
+8/51436/campos_512_v4
+8/51622/campos_512_v4
+8/51654/campos_512_v4
+8/51719/campos_512_v4
+8/51825/campos_512_v4
+8/51928/campos_512_v4
+8/52155/campos_512_v4
+8/52201/campos_512_v4
+8/52493/campos_512_v4
+8/52510/campos_512_v4
+8/52703/campos_512_v4
+8/53180/campos_512_v4
+8/53898/campos_512_v4
+8/53917/campos_512_v4
+8/53922/campos_512_v4
+8/54023/campos_512_v4
+8/54070/campos_512_v4
+8/54088/campos_512_v4
+8/54321/campos_512_v4
+8/54365/campos_512_v4
+8/54366/campos_512_v4
+8/54389/campos_512_v4
+8/54420/campos_512_v4
+8/54492/campos_512_v4
+8/54561/campos_512_v4
+8/54719/campos_512_v4
+8/54726/campos_512_v4
+8/54843/campos_512_v4
+8/54853/campos_512_v4
+8/54941/campos_512_v4
+80/410121/campos_512_v4
+80/410177/campos_512_v4
+80/410381/campos_512_v4
+80/410475/campos_512_v4
+80/410613/campos_512_v4
+80/410685/campos_512_v4
+80/411107/campos_512_v4
+80/411252/campos_512_v4
+80/411386/campos_512_v4
+80/411401/campos_512_v4
+80/411494/campos_512_v4
+80/411668/campos_512_v4
+80/411889/campos_512_v4
+80/412164/campos_512_v4
+80/412326/campos_512_v4
+80/412369/campos_512_v4
+80/412376/campos_512_v4
+80/412397/campos_512_v4
+80/412474/campos_512_v4
+80/412512/campos_512_v4
+80/412748/campos_512_v4
+80/412771/campos_512_v4
+80/412929/campos_512_v4
+80/412961/campos_512_v4
+80/413000/campos_512_v4
+80/413012/campos_512_v4
+80/413014/campos_512_v4
+80/413184/campos_512_v4
+80/413321/campos_512_v4
+80/413337/campos_512_v4
+80/413412/campos_512_v4
+80/413496/campos_512_v4
+80/413576/campos_512_v4
+80/413629/campos_512_v4
+80/413812/campos_512_v4
+80/413992/campos_512_v4
+80/414246/campos_512_v4
+80/414271/campos_512_v4
+80/414315/campos_512_v4
+80/414400/campos_512_v4
+80/414420/campos_512_v4
+80/414764/campos_512_v4
+81/415218/campos_512_v4
+81/415538/campos_512_v4
+81/415565/campos_512_v4
+81/415673/campos_512_v4
+81/415690/campos_512_v4
+81/415767/campos_512_v4
+81/416416/campos_512_v4
+81/416452/campos_512_v4
+81/416661/campos_512_v4
+81/416956/campos_512_v4
+81/417187/campos_512_v4
+81/417576/campos_512_v4
+81/417620/campos_512_v4
+81/417678/campos_512_v4
+81/417967/campos_512_v4
+81/417970/campos_512_v4
+81/418020/campos_512_v4
+81/418333/campos_512_v4
+81/419414/campos_512_v4
+81/419559/campos_512_v4
+81/419562/campos_512_v4
+81/419777/campos_512_v4
+81/419799/campos_512_v4
+81/419895/campos_512_v4
+81/419909/campos_512_v4
+81/419965/campos_512_v4
+81/419971/campos_512_v4
+82/420073/campos_512_v4
+82/420145/campos_512_v4
+82/420156/campos_512_v4
+82/420168/campos_512_v4
+82/420215/campos_512_v4
+82/420334/campos_512_v4
+82/420336/campos_512_v4
+82/420392/campos_512_v4
+82/420462/campos_512_v4
+82/420502/campos_512_v4
+82/420622/campos_512_v4
+82/420756/campos_512_v4
+82/420840/campos_512_v4
+82/421287/campos_512_v4
+82/421359/campos_512_v4
+82/421469/campos_512_v4
+82/421648/campos_512_v4
+82/421716/campos_512_v4
+82/421781/campos_512_v4
+82/421995/campos_512_v4
+82/422081/campos_512_v4
+82/422176/campos_512_v4
+82/422271/campos_512_v4
+82/423020/campos_512_v4
+82/423045/campos_512_v4
+82/423048/campos_512_v4
+82/423098/campos_512_v4
+82/423362/campos_512_v4
+82/423780/campos_512_v4
+82/424101/campos_512_v4
+82/424406/campos_512_v4
+82/424484/campos_512_v4
+82/424783/campos_512_v4
+82/424947/campos_512_v4
+83/425389/campos_512_v4
+83/425479/campos_512_v4
+83/425739/campos_512_v4
+83/425951/campos_512_v4
+83/426128/campos_512_v4
+83/426264/campos_512_v4
+83/426355/campos_512_v4
+83/426550/campos_512_v4
+83/426600/campos_512_v4
+83/426741/campos_512_v4
+83/426808/campos_512_v4
+83/428239/campos_512_v4
+83/428241/campos_512_v4
+83/428395/campos_512_v4
+83/428476/campos_512_v4
+83/428554/campos_512_v4
+83/428734/campos_512_v4
+83/428814/campos_512_v4
+83/429085/campos_512_v4
+83/429106/campos_512_v4
+83/429355/campos_512_v4
+83/429477/campos_512_v4
+83/429491/campos_512_v4
+83/429521/campos_512_v4
+83/429618/campos_512_v4
+83/429699/campos_512_v4
+83/429700/campos_512_v4
+84/430097/campos_512_v4
+84/430313/campos_512_v4
+84/430531/campos_512_v4
+84/430607/campos_512_v4
+84/430761/campos_512_v4
+84/430834/campos_512_v4
+84/430836/campos_512_v4
+84/430908/campos_512_v4
+84/431078/campos_512_v4
+84/431151/campos_512_v4
+84/431283/campos_512_v4
+84/431330/campos_512_v4
+84/431369/campos_512_v4
+84/431593/campos_512_v4
+84/431789/campos_512_v4
+84/432168/campos_512_v4
+84/432224/campos_512_v4
+84/432238/campos_512_v4
+84/432426/campos_512_v4
+84/432918/campos_512_v4
+84/432949/campos_512_v4
+84/433242/campos_512_v4
+84/433272/campos_512_v4
+84/433315/campos_512_v4
+84/433472/campos_512_v4
+84/433679/campos_512_v4
+84/433687/campos_512_v4
+84/434039/campos_512_v4
+84/434142/campos_512_v4
+84/434208/campos_512_v4
+84/434440/campos_512_v4
+84/434584/campos_512_v4
+84/434733/campos_512_v4
+84/434865/campos_512_v4
+84/434927/campos_512_v4
+84/435001/campos_512_v4
+85/435114/campos_512_v4
+85/435295/campos_512_v4
+85/436355/campos_512_v4
+85/436411/campos_512_v4
+85/436758/campos_512_v4
+85/436799/campos_512_v4
+85/436865/campos_512_v4
+85/436899/campos_512_v4
+85/436976/campos_512_v4
+85/437140/campos_512_v4
+85/437235/campos_512_v4
+85/437295/campos_512_v4
+85/437348/campos_512_v4
+85/437456/campos_512_v4
+85/437566/campos_512_v4
+85/437627/campos_512_v4
+85/437720/campos_512_v4
+85/437758/campos_512_v4
+85/437827/campos_512_v4
+85/437859/campos_512_v4
+85/438029/campos_512_v4
+85/438351/campos_512_v4
+85/438395/campos_512_v4
+85/438677/campos_512_v4
+85/438949/campos_512_v4
+85/439259/campos_512_v4
+85/439347/campos_512_v4
+85/439447/campos_512_v4
+86/440098/campos_512_v4
+86/440168/campos_512_v4
+86/440414/campos_512_v4
+86/441147/campos_512_v4
+86/441215/campos_512_v4
+86/441247/campos_512_v4
+86/441751/campos_512_v4
+86/441915/campos_512_v4
+86/441977/campos_512_v4
+86/442103/campos_512_v4
+86/442230/campos_512_v4
+86/442438/campos_512_v4
+86/442441/campos_512_v4
+86/442455/campos_512_v4
+86/442876/campos_512_v4
+86/443102/campos_512_v4
+86/443292/campos_512_v4
+86/443336/campos_512_v4
+86/443384/campos_512_v4
+86/443395/campos_512_v4
+86/443476/campos_512_v4
+86/443930/campos_512_v4
+86/443962/campos_512_v4
+86/444137/campos_512_v4
+86/444231/campos_512_v4
+86/444468/campos_512_v4
+86/444481/campos_512_v4
+86/444679/campos_512_v4
+86/444701/campos_512_v4
+86/444738/campos_512_v4
+87/445467/campos_512_v4
+87/445624/campos_512_v4
+87/445663/campos_512_v4
+87/445712/campos_512_v4
+87/445742/campos_512_v4
+87/445770/campos_512_v4
+87/445775/campos_512_v4
+87/446028/campos_512_v4
+87/446475/campos_512_v4
+87/446539/campos_512_v4
+87/446869/campos_512_v4
+87/446989/campos_512_v4
+87/446999/campos_512_v4
+87/447006/campos_512_v4
+87/447046/campos_512_v4
+87/447275/campos_512_v4
+87/447361/campos_512_v4
+87/447389/campos_512_v4
+87/447669/campos_512_v4
+87/447840/campos_512_v4
+87/448051/campos_512_v4
+87/448149/campos_512_v4
+87/448161/campos_512_v4
+87/448285/campos_512_v4
+87/448320/campos_512_v4
+87/448481/campos_512_v4
+87/448668/campos_512_v4
+87/448723/campos_512_v4
+87/448735/campos_512_v4
+87/448994/campos_512_v4
+87/449078/campos_512_v4
+87/449311/campos_512_v4
+87/449380/campos_512_v4
+87/449479/campos_512_v4
+87/449530/campos_512_v4
+87/449586/campos_512_v4
+87/449760/campos_512_v4
+88/450087/campos_512_v4
+88/450159/campos_512_v4
+88/450174/campos_512_v4
+88/450185/campos_512_v4
+88/450249/campos_512_v4
+88/450449/campos_512_v4
+88/450456/campos_512_v4
+88/450600/campos_512_v4
+88/450839/campos_512_v4
+88/451081/campos_512_v4
+88/451153/campos_512_v4
+88/451317/campos_512_v4
+88/451715/campos_512_v4
+88/452362/campos_512_v4
+88/452671/campos_512_v4
+88/452839/campos_512_v4
+88/452943/campos_512_v4
+88/452944/campos_512_v4
+88/452959/campos_512_v4
+88/453195/campos_512_v4
+88/454038/campos_512_v4
+88/454132/campos_512_v4
+88/454360/campos_512_v4
+88/454543/campos_512_v4
+88/454885/campos_512_v4
+89/455070/campos_512_v4
+89/455263/campos_512_v4
+89/455354/campos_512_v4
+89/455398/campos_512_v4
+89/455400/campos_512_v4
+89/455469/campos_512_v4
+89/455527/campos_512_v4
+89/455730/campos_512_v4
+89/455774/campos_512_v4
+89/455822/campos_512_v4
+89/456138/campos_512_v4
+89/456567/campos_512_v4
+89/456617/campos_512_v4
+89/456865/campos_512_v4
+89/456996/campos_512_v4
+89/457475/campos_512_v4
+89/457555/campos_512_v4
+89/457583/campos_512_v4
+89/458216/campos_512_v4
+89/458257/campos_512_v4
+89/458463/campos_512_v4
+89/458549/campos_512_v4
+89/458969/campos_512_v4
+89/459028/campos_512_v4
+89/459089/campos_512_v4
+89/459316/campos_512_v4
+89/459346/campos_512_v4
+89/459562/campos_512_v4
+89/459677/campos_512_v4
+89/459697/campos_512_v4
+89/459817/campos_512_v4
+89/459834/campos_512_v4
+89/459979/campos_512_v4
+9/55393/campos_512_v4
+9/55405/campos_512_v4
+9/55412/campos_512_v4
+9/55741/campos_512_v4
+9/55774/campos_512_v4
+9/55932/campos_512_v4
+9/56016/campos_512_v4
+9/56180/campos_512_v4
+9/56482/campos_512_v4
+9/56608/campos_512_v4
+9/56687/campos_512_v4
+9/56840/campos_512_v4
+9/57211/campos_512_v4
+9/57416/campos_512_v4
+9/57517/campos_512_v4
+9/57621/campos_512_v4
+9/57759/campos_512_v4
+9/57907/campos_512_v4
+9/58110/campos_512_v4
+9/58410/campos_512_v4
+9/58647/campos_512_v4
+9/58682/campos_512_v4
+9/59155/campos_512_v4
+9/59185/campos_512_v4
+9/59353/campos_512_v4
+9/59671/campos_512_v4
+9/59804/campos_512_v4
+9/59944/campos_512_v4
+9/59946/campos_512_v4
+90/460060/campos_512_v4
+90/460206/campos_512_v4
+90/460207/campos_512_v4
+90/460357/campos_512_v4
+90/460379/campos_512_v4
+90/460385/campos_512_v4
+90/460484/campos_512_v4
+90/460826/campos_512_v4
+90/460845/campos_512_v4
+90/461028/campos_512_v4
+90/461075/campos_512_v4
+90/461239/campos_512_v4
+90/461315/campos_512_v4
+90/461393/campos_512_v4
+90/461418/campos_512_v4
+90/461613/campos_512_v4
+90/462157/campos_512_v4
+90/462182/campos_512_v4
+90/462216/campos_512_v4
+90/462262/campos_512_v4
+90/462574/campos_512_v4
+90/462719/campos_512_v4
+90/462916/campos_512_v4
+90/462997/campos_512_v4
+90/463062/campos_512_v4
+90/463408/campos_512_v4
+90/463506/campos_512_v4
+90/463557/campos_512_v4
+90/463827/campos_512_v4
+90/463830/campos_512_v4
+90/463874/campos_512_v4
+90/463890/campos_512_v4
+90/463939/campos_512_v4
+90/463949/campos_512_v4
+90/464066/campos_512_v4
+90/464133/campos_512_v4
+90/464232/campos_512_v4
+90/464383/campos_512_v4
+90/464402/campos_512_v4
+90/464404/campos_512_v4
+90/464501/campos_512_v4
+90/464686/campos_512_v4
+90/464728/campos_512_v4
+90/464749/campos_512_v4
+90/464787/campos_512_v4
+90/464819/campos_512_v4
+91/465161/campos_512_v4
+91/465227/campos_512_v4
+91/465322/campos_512_v4
+91/465440/campos_512_v4
+91/465554/campos_512_v4
+91/465559/campos_512_v4
+91/465849/campos_512_v4
+91/466091/campos_512_v4
+91/466208/campos_512_v4
+91/466221/campos_512_v4
+91/466722/campos_512_v4
+91/466861/campos_512_v4
+91/466975/campos_512_v4
+91/467083/campos_512_v4
+91/467314/campos_512_v4
+91/467553/campos_512_v4
+91/467562/campos_512_v4
+91/467684/campos_512_v4
+91/467875/campos_512_v4
+91/468074/campos_512_v4
+91/468107/campos_512_v4
+91/468129/campos_512_v4
+91/468277/campos_512_v4
+91/468429/campos_512_v4
+91/468549/campos_512_v4
+91/468647/campos_512_v4
+91/469061/campos_512_v4
+91/469364/campos_512_v4
+91/469372/campos_512_v4
+91/469429/campos_512_v4
+91/469553/campos_512_v4
+91/469664/campos_512_v4
+91/469747/campos_512_v4
+91/469908/campos_512_v4
+92/470198/campos_512_v4
+92/470244/campos_512_v4
+92/470323/campos_512_v4
+92/470541/campos_512_v4
+92/470568/campos_512_v4
+92/470747/campos_512_v4
+92/470947/campos_512_v4
+92/471190/campos_512_v4
+92/471249/campos_512_v4
+92/471345/campos_512_v4
+92/471347/campos_512_v4
+92/471364/campos_512_v4
+92/471678/campos_512_v4
+92/471718/campos_512_v4
+92/471858/campos_512_v4
+92/472030/campos_512_v4
+92/472136/campos_512_v4
+92/472262/campos_512_v4
+92/472394/campos_512_v4
+92/472497/campos_512_v4
+92/472618/campos_512_v4
+92/472712/campos_512_v4
+92/472715/campos_512_v4
+92/472769/campos_512_v4
+92/473003/campos_512_v4
+92/473080/campos_512_v4
+92/473168/campos_512_v4
+92/473315/campos_512_v4
+92/473373/campos_512_v4
+92/473564/campos_512_v4
+92/473581/campos_512_v4
+92/473601/campos_512_v4
+92/473609/campos_512_v4
+92/473735/campos_512_v4
+92/473798/campos_512_v4
+92/473871/campos_512_v4
+92/473931/campos_512_v4
+92/474216/campos_512_v4
+92/474250/campos_512_v4
+92/474347/campos_512_v4
+92/474542/campos_512_v4
+92/474640/campos_512_v4
+92/474648/campos_512_v4
+92/474659/campos_512_v4
+92/474794/campos_512_v4
+92/474999/campos_512_v4
+93/475039/campos_512_v4
+93/475158/campos_512_v4
+93/475165/campos_512_v4
+93/475265/campos_512_v4
+93/475502/campos_512_v4
+93/475524/campos_512_v4
+93/475605/campos_512_v4
+93/475724/campos_512_v4
+93/475971/campos_512_v4
+93/476188/campos_512_v4
+93/476240/campos_512_v4
+93/476253/campos_512_v4
+93/476339/campos_512_v4
+93/476359/campos_512_v4
+93/476578/campos_512_v4
+93/476692/campos_512_v4
+93/476705/campos_512_v4
+93/476729/campos_512_v4
+93/476730/campos_512_v4
+93/476777/campos_512_v4
+93/476814/campos_512_v4
+93/477245/campos_512_v4
+93/477393/campos_512_v4
+93/477394/campos_512_v4
+93/477575/campos_512_v4
+93/477588/campos_512_v4
+93/477739/campos_512_v4
+93/477796/campos_512_v4
+93/478184/campos_512_v4
+93/478233/campos_512_v4
+93/478476/campos_512_v4
+93/478795/campos_512_v4
+93/478877/campos_512_v4
+93/478883/campos_512_v4
+93/478907/campos_512_v4
+93/479332/campos_512_v4
+93/479431/campos_512_v4
+93/479460/campos_512_v4
+93/479658/campos_512_v4
+93/479674/campos_512_v4
+93/479690/campos_512_v4
+93/479788/campos_512_v4
+93/479865/campos_512_v4
+93/479891/campos_512_v4
+93/479932/campos_512_v4
+94/480254/campos_512_v4
+94/480390/campos_512_v4
+94/480453/campos_512_v4
+94/480485/campos_512_v4
+94/480511/campos_512_v4
+94/480607/campos_512_v4
+94/480641/campos_512_v4
+94/480733/campos_512_v4
+94/480834/campos_512_v4
+94/480966/campos_512_v4
+94/481069/campos_512_v4
+94/481145/campos_512_v4
+94/481167/campos_512_v4
+94/481271/campos_512_v4
+94/481350/campos_512_v4
+94/481425/campos_512_v4
+94/481444/campos_512_v4
+94/481510/campos_512_v4
+94/481522/campos_512_v4
+94/481561/campos_512_v4
+94/481688/campos_512_v4
+94/481713/campos_512_v4
+94/481725/campos_512_v4
+94/481789/campos_512_v4
+94/481829/campos_512_v4
+94/481920/campos_512_v4
+94/482044/campos_512_v4
+94/482098/campos_512_v4
+94/482110/campos_512_v4
+94/482247/campos_512_v4
+94/482384/campos_512_v4
+94/482386/campos_512_v4
+94/482430/campos_512_v4
+94/482592/campos_512_v4
+94/482650/campos_512_v4
+94/482784/campos_512_v4
+94/482829/campos_512_v4
+94/482861/campos_512_v4
+94/483029/campos_512_v4
+94/483030/campos_512_v4
+94/483141/campos_512_v4
+94/483181/campos_512_v4
+94/483468/campos_512_v4
+94/483474/campos_512_v4
+94/483491/campos_512_v4
+94/483502/campos_512_v4
+94/483503/campos_512_v4
+94/483525/campos_512_v4
+94/483696/campos_512_v4
+94/483932/campos_512_v4
+94/484116/campos_512_v4
+94/484161/campos_512_v4
+94/484316/campos_512_v4
+94/484328/campos_512_v4
+94/484512/campos_512_v4
+94/484611/campos_512_v4
+94/484968/campos_512_v4
+95/485234/campos_512_v4
+95/485524/campos_512_v4
+95/485569/campos_512_v4
+95/485788/campos_512_v4
+95/485870/campos_512_v4
+95/485937/campos_512_v4
+95/486149/campos_512_v4
+95/486259/campos_512_v4
+95/486442/campos_512_v4
+95/486523/campos_512_v4
+95/486642/campos_512_v4
+95/486800/campos_512_v4
+95/486927/campos_512_v4
+95/487022/campos_512_v4
+95/487104/campos_512_v4
+95/487263/campos_512_v4
+95/487395/campos_512_v4
+95/487444/campos_512_v4
+95/487949/campos_512_v4
+95/487967/campos_512_v4
+95/488157/campos_512_v4
+95/488173/campos_512_v4
+95/488371/campos_512_v4
+95/488415/campos_512_v4
+95/488419/campos_512_v4
+95/488439/campos_512_v4
+95/488710/campos_512_v4
+95/488840/campos_512_v4
+95/488857/campos_512_v4
+95/489156/campos_512_v4
+95/489436/campos_512_v4
+95/489442/campos_512_v4
+95/489509/campos_512_v4
+95/489545/campos_512_v4
+95/489682/campos_512_v4
+95/489683/campos_512_v4
+95/489947/campos_512_v4
+96/490188/campos_512_v4
+96/490299/campos_512_v4
+96/490330/campos_512_v4
+96/490415/campos_512_v4
+96/490607/campos_512_v4
+96/490646/campos_512_v4
+96/490671/campos_512_v4
+96/490785/campos_512_v4
+96/490876/campos_512_v4
+96/491097/campos_512_v4
+96/491105/campos_512_v4
+96/491133/campos_512_v4
+96/491191/campos_512_v4
+96/491296/campos_512_v4
+96/491386/campos_512_v4
+96/491590/campos_512_v4
+96/491658/campos_512_v4
+96/491685/campos_512_v4
+96/491686/campos_512_v4
+96/491785/campos_512_v4
+96/491815/campos_512_v4
+96/491854/campos_512_v4
+96/491974/campos_512_v4
+96/492181/campos_512_v4
+96/492220/campos_512_v4
+96/492242/campos_512_v4
+96/492293/campos_512_v4
+96/492380/campos_512_v4
+96/493024/campos_512_v4
+96/493025/campos_512_v4
+96/493027/campos_512_v4
+96/493085/campos_512_v4
+96/493128/campos_512_v4
+96/493205/campos_512_v4
+96/493263/campos_512_v4
+96/493313/campos_512_v4
+96/493476/campos_512_v4
+96/493696/campos_512_v4
+96/493706/campos_512_v4
+96/493723/campos_512_v4
+96/493728/campos_512_v4
+96/493758/campos_512_v4
+96/494032/campos_512_v4
+96/494088/campos_512_v4
+96/494246/campos_512_v4
+96/494482/campos_512_v4
+96/494634/campos_512_v4
+96/494665/campos_512_v4
+96/494788/campos_512_v4
+96/494815/campos_512_v4
+96/494972/campos_512_v4
+97/495451/campos_512_v4
+97/495463/campos_512_v4
+97/495605/campos_512_v4
+97/495837/campos_512_v4
+97/496092/campos_512_v4
+97/496244/campos_512_v4
+97/496748/campos_512_v4
+97/496880/campos_512_v4
+97/496930/campos_512_v4
+97/496950/campos_512_v4
+97/496996/campos_512_v4
+97/497772/campos_512_v4
+97/497964/campos_512_v4
+97/498020/campos_512_v4
+97/498250/campos_512_v4
+97/498254/campos_512_v4
+97/498490/campos_512_v4
+97/498549/campos_512_v4
+97/498749/campos_512_v4
+97/498860/campos_512_v4
+97/498902/campos_512_v4
+97/498934/campos_512_v4
+97/499144/campos_512_v4
+97/499347/campos_512_v4
+97/499389/campos_512_v4
+97/499442/campos_512_v4
+97/499676/campos_512_v4
+97/499731/campos_512_v4
+97/499861/campos_512_v4
+97/499912/campos_512_v4
+98/500032/campos_512_v4
+98/500174/campos_512_v4
+98/500339/campos_512_v4
+98/500379/campos_512_v4
+98/500415/campos_512_v4
+98/500514/campos_512_v4
+98/500570/campos_512_v4
+98/500604/campos_512_v4
+98/500765/campos_512_v4
+98/501119/campos_512_v4
+98/501124/campos_512_v4
+98/501209/campos_512_v4
+98/501278/campos_512_v4
+98/501297/campos_512_v4
+98/501376/campos_512_v4
+98/501471/campos_512_v4
+98/501576/campos_512_v4
+98/501863/campos_512_v4
+98/501871/campos_512_v4
+98/501930/campos_512_v4
+98/502052/campos_512_v4
+98/502106/campos_512_v4
+98/502139/campos_512_v4
+98/502189/campos_512_v4
+98/502227/campos_512_v4
+98/502270/campos_512_v4
+98/502411/campos_512_v4
+98/502514/campos_512_v4
+98/502551/campos_512_v4
+98/502574/campos_512_v4
+98/502598/campos_512_v4
+98/502930/campos_512_v4
+98/502988/campos_512_v4
+98/503077/campos_512_v4
+98/503092/campos_512_v4
+98/503113/campos_512_v4
+98/503170/campos_512_v4
+98/503220/campos_512_v4
+98/503310/campos_512_v4
+98/503700/campos_512_v4
+98/503723/campos_512_v4
+98/503764/campos_512_v4
+98/503843/campos_512_v4
+98/503952/campos_512_v4
+98/503957/campos_512_v4
+98/504004/campos_512_v4
+98/504078/campos_512_v4
+98/504203/campos_512_v4
+98/504300/campos_512_v4
+98/504421/campos_512_v4
+98/504481/campos_512_v4
+98/504509/campos_512_v4
+98/504956/campos_512_v4
+99/505036/campos_512_v4
+99/505178/campos_512_v4
+99/505373/campos_512_v4
+99/505676/campos_512_v4
+99/505730/campos_512_v4
+99/505739/campos_512_v4
+99/505857/campos_512_v4
+99/505935/campos_512_v4
+99/506016/campos_512_v4
+99/506118/campos_512_v4
+99/506295/campos_512_v4
+99/506429/campos_512_v4
+99/506598/campos_512_v4
+99/506653/campos_512_v4
+99/506735/campos_512_v4
+99/507027/campos_512_v4
+99/507063/campos_512_v4
+99/507348/campos_512_v4
+99/507459/campos_512_v4
+99/507606/campos_512_v4
+99/507751/campos_512_v4
+99/508045/campos_512_v4
+99/508217/campos_512_v4
+99/508279/campos_512_v4
+99/508303/campos_512_v4
+99/508306/campos_512_v4
+99/508419/campos_512_v4
+99/508559/campos_512_v4
+99/508926/campos_512_v4
+99/508952/campos_512_v4
+99/509100/campos_512_v4
+99/509607/campos_512_v4
+99/509616/campos_512_v4
diff --git a/shell_scripts/raw_img_list/Food.txt b/shell_scripts/raw_img_list/Food.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a6a0938c529463743277a25561782ada8e58a72d
--- /dev/null
+++ b/shell_scripts/raw_img_list/Food.txt
@@ -0,0 +1,2575 @@
+0/10006/campos_512_v4
+0/10032/campos_512_v4
+0/10054/campos_512_v4
+0/10067/campos_512_v4
+0/10153/campos_512_v4
+0/10169/campos_512_v4
+0/10176/campos_512_v4
+0/10283/campos_512_v4
+0/10310/campos_512_v4
+0/10368/campos_512_v4
+0/10510/campos_512_v4
+0/10735/campos_512_v4
+0/10760/campos_512_v4
+0/10827/campos_512_v4
+0/10845/campos_512_v4
+0/10876/campos_512_v4
+0/10907/campos_512_v4
+0/10913/campos_512_v4
+0/10927/campos_512_v4
+0/10929/campos_512_v4
+0/10949/campos_512_v4
+0/10952/campos_512_v4
+0/10962/campos_512_v4
+0/11278/campos_512_v4
+0/12090/campos_512_v4
+0/12153/campos_512_v4
+0/12181/campos_512_v4
+0/12328/campos_512_v4
+0/12839/campos_512_v4
+0/12844/campos_512_v4
+0/13226/campos_512_v4
+0/13818/campos_512_v4
+0/13841/campos_512_v4
+0/14056/campos_512_v4
+0/14326/campos_512_v4
+0/14428/campos_512_v4
+1/15826/campos_512_v4
+1/16249/campos_512_v4
+1/17277/campos_512_v4
+1/17305/campos_512_v4
+1/17410/campos_512_v4
+1/17431/campos_512_v4
+1/17603/campos_512_v4
+1/17994/campos_512_v4
+1/18003/campos_512_v4
+1/18301/campos_512_v4
+1/18535/campos_512_v4
+1/18706/campos_512_v4
+1/18994/campos_512_v4
+1/19046/campos_512_v4
+1/19144/campos_512_v4
+1/19455/campos_512_v4
+1/19622/campos_512_v4
+1/19938/campos_512_v4
+10/60388/campos_512_v4
+10/60488/campos_512_v4
+10/60629/campos_512_v4
+10/61227/campos_512_v4
+10/61410/campos_512_v4
+10/62181/campos_512_v4
+10/62202/campos_512_v4
+10/62269/campos_512_v4
+10/63069/campos_512_v4
+10/63197/campos_512_v4
+10/63830/campos_512_v4
+10/64332/campos_512_v4
+10/64883/campos_512_v4
+100/510673/campos_512_v4
+100/511501/campos_512_v4
+100/511718/campos_512_v4
+100/511866/campos_512_v4
+100/511922/campos_512_v4
+100/511924/campos_512_v4
+100/511963/campos_512_v4
+100/511986/campos_512_v4
+100/512069/campos_512_v4
+100/512241/campos_512_v4
+100/512287/campos_512_v4
+100/512352/campos_512_v4
+100/513008/campos_512_v4
+100/513461/campos_512_v4
+100/513593/campos_512_v4
+100/513914/campos_512_v4
+100/514172/campos_512_v4
+100/514381/campos_512_v4
+100/514633/campos_512_v4
+100/514876/campos_512_v4
+100/514908/campos_512_v4
+101/515034/campos_512_v4
+101/515234/campos_512_v4
+101/515254/campos_512_v4
+101/515678/campos_512_v4
+101/516294/campos_512_v4
+101/516365/campos_512_v4
+101/516565/campos_512_v4
+101/517100/campos_512_v4
+101/517102/campos_512_v4
+101/517119/campos_512_v4
+101/517538/campos_512_v4
+101/517699/campos_512_v4
+101/518089/campos_512_v4
+101/518145/campos_512_v4
+101/518353/campos_512_v4
+101/519554/campos_512_v4
+101/519587/campos_512_v4
+101/519597/campos_512_v4
+101/519675/campos_512_v4
+101/519695/campos_512_v4
+102/520315/campos_512_v4
+102/520642/campos_512_v4
+102/521389/campos_512_v4
+102/521576/campos_512_v4
+102/522742/campos_512_v4
+102/523137/campos_512_v4
+102/523338/campos_512_v4
+102/523792/campos_512_v4
+102/524161/campos_512_v4
+102/524239/campos_512_v4
+102/524594/campos_512_v4
+102/524867/campos_512_v4
+103/525117/campos_512_v4
+103/525990/campos_512_v4
+103/526205/campos_512_v4
+103/526494/campos_512_v4
+103/526768/campos_512_v4
+103/527149/campos_512_v4
+103/527445/campos_512_v4
+103/527503/campos_512_v4
+103/527886/campos_512_v4
+103/528315/campos_512_v4
+103/528344/campos_512_v4
+103/528493/campos_512_v4
+103/528743/campos_512_v4
+103/528897/campos_512_v4
+103/529242/campos_512_v4
+103/529263/campos_512_v4
+103/529747/campos_512_v4
+104/530386/campos_512_v4
+104/530847/campos_512_v4
+104/530902/campos_512_v4
+104/530908/campos_512_v4
+104/531445/campos_512_v4
+104/531701/campos_512_v4
+104/531772/campos_512_v4
+104/532092/campos_512_v4
+104/532116/campos_512_v4
+104/532229/campos_512_v4
+104/532296/campos_512_v4
+104/532301/campos_512_v4
+104/532647/campos_512_v4
+104/532846/campos_512_v4
+104/533204/campos_512_v4
+104/533750/campos_512_v4
+104/534092/campos_512_v4
+104/534777/campos_512_v4
+105/535617/campos_512_v4
+105/535660/campos_512_v4
+105/536069/campos_512_v4
+105/536124/campos_512_v4
+105/536315/campos_512_v4
+105/536658/campos_512_v4
+105/537004/campos_512_v4
+105/537096/campos_512_v4
+105/537524/campos_512_v4
+105/537682/campos_512_v4
+105/537813/campos_512_v4
+105/538401/campos_512_v4
+105/538501/campos_512_v4
+105/538559/campos_512_v4
+105/538736/campos_512_v4
+105/538829/campos_512_v4
+105/539119/campos_512_v4
+105/539127/campos_512_v4
+105/539513/campos_512_v4
+105/539612/campos_512_v4
+105/539686/campos_512_v4
+105/539956/campos_512_v4
+106/540129/campos_512_v4
+106/540316/campos_512_v4
+106/540396/campos_512_v4
+106/541049/campos_512_v4
+106/541524/campos_512_v4
+106/541678/campos_512_v4
+106/541768/campos_512_v4
+106/542045/campos_512_v4
+106/542431/campos_512_v4
+106/542464/campos_512_v4
+106/542483/campos_512_v4
+106/542510/campos_512_v4
+106/542826/campos_512_v4
+106/542936/campos_512_v4
+106/542985/campos_512_v4
+106/543137/campos_512_v4
+106/543624/campos_512_v4
+106/543855/campos_512_v4
+106/543991/campos_512_v4
+106/544657/campos_512_v4
+106/544699/campos_512_v4
+106/544823/campos_512_v4
+106/544983/campos_512_v4
+106/544986/campos_512_v4
+107/545230/campos_512_v4
+107/545301/campos_512_v4
+107/545417/campos_512_v4
+107/545536/campos_512_v4
+107/545683/campos_512_v4
+107/545792/campos_512_v4
+107/545811/campos_512_v4
+107/545818/campos_512_v4
+107/546163/campos_512_v4
+107/546428/campos_512_v4
+107/546732/campos_512_v4
+107/547071/campos_512_v4
+107/547819/campos_512_v4
+107/548161/campos_512_v4
+107/548289/campos_512_v4
+107/548471/campos_512_v4
+107/549161/campos_512_v4
+107/549270/campos_512_v4
+107/549420/campos_512_v4
+107/549600/campos_512_v4
+108/550319/campos_512_v4
+108/550509/campos_512_v4
+108/551657/campos_512_v4
+108/551688/campos_512_v4
+108/551845/campos_512_v4
+108/551852/campos_512_v4
+108/552459/campos_512_v4
+108/552607/campos_512_v4
+108/552786/campos_512_v4
+108/552807/campos_512_v4
+108/552920/campos_512_v4
+108/553281/campos_512_v4
+108/553493/campos_512_v4
+108/553535/campos_512_v4
+108/553836/campos_512_v4
+108/553960/campos_512_v4
+108/554106/campos_512_v4
+108/554295/campos_512_v4
+108/554682/campos_512_v4
+108/554827/campos_512_v4
+108/554929/campos_512_v4
+109/555351/campos_512_v4
+109/555485/campos_512_v4
+109/555612/campos_512_v4
+109/556377/campos_512_v4
+109/557039/campos_512_v4
+109/557218/campos_512_v4
+109/557243/campos_512_v4
+109/557401/campos_512_v4
+109/557829/campos_512_v4
+109/557866/campos_512_v4
+109/558673/campos_512_v4
+109/558677/campos_512_v4
+109/559181/campos_512_v4
+109/559284/campos_512_v4
+109/559403/campos_512_v4
+11/65537/campos_512_v4
+11/65631/campos_512_v4
+11/65645/campos_512_v4
+11/65899/campos_512_v4
+11/66159/campos_512_v4
+11/66437/campos_512_v4
+11/66495/campos_512_v4
+11/66554/campos_512_v4
+11/66797/campos_512_v4
+11/66891/campos_512_v4
+11/67222/campos_512_v4
+11/67594/campos_512_v4
+11/68365/campos_512_v4
+11/68512/campos_512_v4
+11/68851/campos_512_v4
+11/69081/campos_512_v4
+11/69161/campos_512_v4
+110/560652/campos_512_v4
+110/561246/campos_512_v4
+110/561627/campos_512_v4
+110/561657/campos_512_v4
+110/561791/campos_512_v4
+110/563005/campos_512_v4
+110/563011/campos_512_v4
+110/563706/campos_512_v4
+110/564106/campos_512_v4
+110/564207/campos_512_v4
+110/564643/campos_512_v4
+111/565263/campos_512_v4
+111/565382/campos_512_v4
+111/565427/campos_512_v4
+111/565796/campos_512_v4
+111/565804/campos_512_v4
+111/566201/campos_512_v4
+111/566305/campos_512_v4
+111/566358/campos_512_v4
+111/566764/campos_512_v4
+111/567236/campos_512_v4
+111/567334/campos_512_v4
+111/567762/campos_512_v4
+111/568738/campos_512_v4
+111/568768/campos_512_v4
+111/569051/campos_512_v4
+111/569382/campos_512_v4
+112/570536/campos_512_v4
+112/570677/campos_512_v4
+112/571128/campos_512_v4
+112/571462/campos_512_v4
+112/571777/campos_512_v4
+112/571937/campos_512_v4
+112/571948/campos_512_v4
+112/572173/campos_512_v4
+112/572300/campos_512_v4
+112/572677/campos_512_v4
+112/572819/campos_512_v4
+112/572892/campos_512_v4
+112/573070/campos_512_v4
+112/573099/campos_512_v4
+112/573321/campos_512_v4
+112/573342/campos_512_v4
+112/573421/campos_512_v4
+112/573555/campos_512_v4
+112/573767/campos_512_v4
+112/574321/campos_512_v4
+112/574500/campos_512_v4
+112/574958/campos_512_v4
+113/575230/campos_512_v4
+113/575322/campos_512_v4
+113/575384/campos_512_v4
+113/575940/campos_512_v4
+113/577300/campos_512_v4
+113/577628/campos_512_v4
+113/578156/campos_512_v4
+113/578196/campos_512_v4
+113/578714/campos_512_v4
+113/579389/campos_512_v4
+113/579417/campos_512_v4
+113/579817/campos_512_v4
+114/580236/campos_512_v4
+114/580382/campos_512_v4
+114/580384/campos_512_v4
+114/580490/campos_512_v4
+114/580530/campos_512_v4
+114/581428/campos_512_v4
+114/581473/campos_512_v4
+114/581812/campos_512_v4
+114/582865/campos_512_v4
+114/582898/campos_512_v4
+114/583231/campos_512_v4
+114/583284/campos_512_v4
+114/583320/campos_512_v4
+114/583446/campos_512_v4
+114/583548/campos_512_v4
+114/583648/campos_512_v4
+114/584032/campos_512_v4
+114/584378/campos_512_v4
+114/584496/campos_512_v4
+114/584893/campos_512_v4
+115/585877/campos_512_v4
+115/586939/campos_512_v4
+115/587672/campos_512_v4
+115/587812/campos_512_v4
+115/587888/campos_512_v4
+115/588497/campos_512_v4
+115/588555/campos_512_v4
+115/588605/campos_512_v4
+115/588915/campos_512_v4
+115/588925/campos_512_v4
+115/589242/campos_512_v4
+115/589764/campos_512_v4
+116/590138/campos_512_v4
+116/591111/campos_512_v4
+116/591225/campos_512_v4
+116/591284/campos_512_v4
+116/591468/campos_512_v4
+116/591624/campos_512_v4
+116/591664/campos_512_v4
+116/592139/campos_512_v4
+116/592491/campos_512_v4
+116/592535/campos_512_v4
+116/592664/campos_512_v4
+116/592686/campos_512_v4
+116/592709/campos_512_v4
+116/593313/campos_512_v4
+116/593505/campos_512_v4
+116/593620/campos_512_v4
+116/593775/campos_512_v4
+116/594411/campos_512_v4
+116/594648/campos_512_v4
+116/594985/campos_512_v4
+117/595042/campos_512_v4
+117/595131/campos_512_v4
+117/595271/campos_512_v4
+117/595342/campos_512_v4
+117/595507/campos_512_v4
+117/595538/campos_512_v4
+117/596272/campos_512_v4
+117/596311/campos_512_v4
+117/596866/campos_512_v4
+117/597126/campos_512_v4
+117/597754/campos_512_v4
+117/597788/campos_512_v4
+117/598058/campos_512_v4
+117/598893/campos_512_v4
+117/599392/campos_512_v4
+117/599505/campos_512_v4
+117/599978/campos_512_v4
+118/600444/campos_512_v4
+118/600653/campos_512_v4
+118/600784/campos_512_v4
+118/601004/campos_512_v4
+118/601023/campos_512_v4
+118/601134/campos_512_v4
+118/601334/campos_512_v4
+118/601809/campos_512_v4
+118/601859/campos_512_v4
+118/602185/campos_512_v4
+118/602242/campos_512_v4
+118/602613/campos_512_v4
+118/602639/campos_512_v4
+118/602996/campos_512_v4
+118/603462/campos_512_v4
+118/604052/campos_512_v4
+118/604240/campos_512_v4
+118/604245/campos_512_v4
+118/604314/campos_512_v4
+118/604457/campos_512_v4
+118/604632/campos_512_v4
+118/604712/campos_512_v4
+118/604753/campos_512_v4
+119/605035/campos_512_v4
+119/605411/campos_512_v4
+119/606134/campos_512_v4
+119/606485/campos_512_v4
+119/606566/campos_512_v4
+119/606845/campos_512_v4
+119/607641/campos_512_v4
+119/607653/campos_512_v4
+119/608118/campos_512_v4
+119/608299/campos_512_v4
+119/608350/campos_512_v4
+119/608617/campos_512_v4
+119/609023/campos_512_v4
+119/609681/campos_512_v4
+119/609699/campos_512_v4
+119/609806/campos_512_v4
+12/70671/campos_512_v4
+12/70948/campos_512_v4
+12/71532/campos_512_v4
+12/71751/campos_512_v4
+12/72213/campos_512_v4
+12/72401/campos_512_v4
+12/72600/campos_512_v4
+12/72935/campos_512_v4
+12/73360/campos_512_v4
+12/73781/campos_512_v4
+12/73842/campos_512_v4
+12/73929/campos_512_v4
+12/74463/campos_512_v4
+12/74699/campos_512_v4
+12/74917/campos_512_v4
+120/610338/campos_512_v4
+120/611150/campos_512_v4
+120/611434/campos_512_v4
+120/611662/campos_512_v4
+120/611943/campos_512_v4
+120/613444/campos_512_v4
+120/613461/campos_512_v4
+120/613678/campos_512_v4
+120/613754/campos_512_v4
+120/613878/campos_512_v4
+120/613890/campos_512_v4
+120/614227/campos_512_v4
+120/614725/campos_512_v4
+120/614821/campos_512_v4
+120/614858/campos_512_v4
+120/614925/campos_512_v4
+120/614963/campos_512_v4
+121/615381/campos_512_v4
+121/615395/campos_512_v4
+121/616103/campos_512_v4
+121/616327/campos_512_v4
+121/616522/campos_512_v4
+121/616935/campos_512_v4
+121/616938/campos_512_v4
+121/617055/campos_512_v4
+121/617340/campos_512_v4
+121/617990/campos_512_v4
+121/618525/campos_512_v4
+121/618651/campos_512_v4
+121/619304/campos_512_v4
+121/619350/campos_512_v4
+121/619601/campos_512_v4
+122/620389/campos_512_v4
+122/620396/campos_512_v4
+122/620403/campos_512_v4
+122/620416/campos_512_v4
+122/620694/campos_512_v4
+122/620771/campos_512_v4
+122/621661/campos_512_v4
+122/621747/campos_512_v4
+122/622525/campos_512_v4
+122/622839/campos_512_v4
+122/622937/campos_512_v4
+122/623045/campos_512_v4
+122/623081/campos_512_v4
+122/623240/campos_512_v4
+122/623653/campos_512_v4
+122/623960/campos_512_v4
+122/624130/campos_512_v4
+122/624351/campos_512_v4
+122/624462/campos_512_v4
+122/624766/campos_512_v4
+123/625197/campos_512_v4
+123/626506/campos_512_v4
+123/626850/campos_512_v4
+123/627286/campos_512_v4
+123/627303/campos_512_v4
+123/627554/campos_512_v4
+123/627571/campos_512_v4
+123/627736/campos_512_v4
+123/628323/campos_512_v4
+123/629908/campos_512_v4
+124/630026/campos_512_v4
+124/630596/campos_512_v4
+124/630749/campos_512_v4
+124/630964/campos_512_v4
+124/631603/campos_512_v4
+124/631990/campos_512_v4
+124/632508/campos_512_v4
+124/632526/campos_512_v4
+124/632600/campos_512_v4
+124/632864/campos_512_v4
+124/633051/campos_512_v4
+124/633383/campos_512_v4
+124/633860/campos_512_v4
+124/634029/campos_512_v4
+124/634040/campos_512_v4
+124/634463/campos_512_v4
+124/634634/campos_512_v4
+124/634695/campos_512_v4
+124/634762/campos_512_v4
+125/635251/campos_512_v4
+125/636038/campos_512_v4
+125/636126/campos_512_v4
+125/636463/campos_512_v4
+125/636880/campos_512_v4
+125/637183/campos_512_v4
+125/637206/campos_512_v4
+125/637668/campos_512_v4
+125/638067/campos_512_v4
+125/638104/campos_512_v4
+125/638140/campos_512_v4
+125/638266/campos_512_v4
+125/638530/campos_512_v4
+125/638559/campos_512_v4
+125/638636/campos_512_v4
+125/638699/campos_512_v4
+125/638774/campos_512_v4
+125/638779/campos_512_v4
+125/638901/campos_512_v4
+127/645161/campos_512_v4
+127/646133/campos_512_v4
+127/646189/campos_512_v4
+127/646325/campos_512_v4
+127/646435/campos_512_v4
+127/646529/campos_512_v4
+127/647248/campos_512_v4
+127/648247/campos_512_v4
+127/648798/campos_512_v4
+127/648861/campos_512_v4
+127/649557/campos_512_v4
+127/649930/campos_512_v4
+128/650519/campos_512_v4
+128/650998/campos_512_v4
+128/651513/campos_512_v4
+128/651730/campos_512_v4
+128/651816/campos_512_v4
+128/651856/campos_512_v4
+128/652452/campos_512_v4
+128/652647/campos_512_v4
+128/652809/campos_512_v4
+128/652818/campos_512_v4
+128/652981/campos_512_v4
+128/653003/campos_512_v4
+128/653029/campos_512_v4
+128/653230/campos_512_v4
+128/653455/campos_512_v4
+128/653695/campos_512_v4
+128/654502/campos_512_v4
+128/654981/campos_512_v4
+129/655334/campos_512_v4
+129/655914/campos_512_v4
+129/656303/campos_512_v4
+129/656705/campos_512_v4
+129/656993/campos_512_v4
+129/657373/campos_512_v4
+129/657933/campos_512_v4
+129/658492/campos_512_v4
+129/658556/campos_512_v4
+129/658561/campos_512_v4
+129/658632/campos_512_v4
+129/659261/campos_512_v4
+129/659474/campos_512_v4
+129/659569/campos_512_v4
+129/659643/campos_512_v4
+13/75587/campos_512_v4
+13/75637/campos_512_v4
+13/76190/campos_512_v4
+13/76297/campos_512_v4
+13/76441/campos_512_v4
+13/76733/campos_512_v4
+13/76859/campos_512_v4
+13/77021/campos_512_v4
+13/78345/campos_512_v4
+13/78346/campos_512_v4
+13/78370/campos_512_v4
+13/79109/campos_512_v4
+13/79873/campos_512_v4
+130/660403/campos_512_v4
+130/660732/campos_512_v4
+130/661281/campos_512_v4
+130/661378/campos_512_v4
+130/661447/campos_512_v4
+130/661514/campos_512_v4
+130/662284/campos_512_v4
+130/662532/campos_512_v4
+130/662724/campos_512_v4
+130/662858/campos_512_v4
+130/663720/campos_512_v4
+130/663822/campos_512_v4
+130/664011/campos_512_v4
+130/664052/campos_512_v4
+130/664200/campos_512_v4
+130/664352/campos_512_v4
+130/664679/campos_512_v4
+130/664737/campos_512_v4
+130/664852/campos_512_v4
+130/664986/campos_512_v4
+131/665081/campos_512_v4
+131/665232/campos_512_v4
+131/665606/campos_512_v4
+131/666039/campos_512_v4
+131/666369/campos_512_v4
+131/666377/campos_512_v4
+131/666539/campos_512_v4
+131/666632/campos_512_v4
+131/666724/campos_512_v4
+131/666744/campos_512_v4
+131/666842/campos_512_v4
+131/667050/campos_512_v4
+131/667672/campos_512_v4
+131/667902/campos_512_v4
+131/668070/campos_512_v4
+131/668491/campos_512_v4
+131/668669/campos_512_v4
+131/668699/campos_512_v4
+131/668704/campos_512_v4
+131/668795/campos_512_v4
+131/669189/campos_512_v4
+132/671102/campos_512_v4
+132/671177/campos_512_v4
+132/671449/campos_512_v4
+132/671618/campos_512_v4
+132/672046/campos_512_v4
+132/672643/campos_512_v4
+132/672796/campos_512_v4
+132/673121/campos_512_v4
+132/673183/campos_512_v4
+132/673612/campos_512_v4
+132/673752/campos_512_v4
+132/674267/campos_512_v4
+132/674833/campos_512_v4
+133/675054/campos_512_v4
+133/675388/campos_512_v4
+133/675640/campos_512_v4
+133/675688/campos_512_v4
+133/675893/campos_512_v4
+133/676509/campos_512_v4
+133/676640/campos_512_v4
+133/676701/campos_512_v4
+133/676961/campos_512_v4
+133/677049/campos_512_v4
+133/677257/campos_512_v4
+133/677672/campos_512_v4
+133/678026/campos_512_v4
+133/678373/campos_512_v4
+133/678599/campos_512_v4
+133/678671/campos_512_v4
+133/679029/campos_512_v4
+133/679159/campos_512_v4
+133/679371/campos_512_v4
+133/679385/campos_512_v4
+133/680000/campos_512_v4
+134/680003/campos_512_v4
+134/680820/campos_512_v4
+134/680859/campos_512_v4
+134/680916/campos_512_v4
+134/681389/campos_512_v4
+134/681733/campos_512_v4
+134/681779/campos_512_v4
+134/681827/campos_512_v4
+134/681971/campos_512_v4
+134/682167/campos_512_v4
+134/682318/campos_512_v4
+134/682814/campos_512_v4
+134/683314/campos_512_v4
+134/683542/campos_512_v4
+134/684607/campos_512_v4
+134/684735/campos_512_v4
+134/684969/campos_512_v4
+135/685158/campos_512_v4
+135/685287/campos_512_v4
+135/685549/campos_512_v4
+135/685703/campos_512_v4
+135/686103/campos_512_v4
+135/686300/campos_512_v4
+135/686562/campos_512_v4
+135/686725/campos_512_v4
+135/686812/campos_512_v4
+135/687034/campos_512_v4
+135/687066/campos_512_v4
+135/687367/campos_512_v4
+135/687405/campos_512_v4
+135/687724/campos_512_v4
+135/688062/campos_512_v4
+135/688144/campos_512_v4
+135/688511/campos_512_v4
+135/688616/campos_512_v4
+135/689026/campos_512_v4
+135/689191/campos_512_v4
+135/689539/campos_512_v4
+135/689570/campos_512_v4
+135/689602/campos_512_v4
+136/690608/campos_512_v4
+136/690803/campos_512_v4
+136/691034/campos_512_v4
+136/691729/campos_512_v4
+136/692330/campos_512_v4
+136/693094/campos_512_v4
+136/693406/campos_512_v4
+136/693433/campos_512_v4
+136/693503/campos_512_v4
+136/693987/campos_512_v4
+136/694244/campos_512_v4
+136/694564/campos_512_v4
+136/694936/campos_512_v4
+137/695282/campos_512_v4
+137/695648/campos_512_v4
+137/695870/campos_512_v4
+137/695924/campos_512_v4
+137/696019/campos_512_v4
+137/696117/campos_512_v4
+137/696449/campos_512_v4
+137/696456/campos_512_v4
+137/697095/campos_512_v4
+137/697121/campos_512_v4
+137/697314/campos_512_v4
+137/698027/campos_512_v4
+137/698158/campos_512_v4
+137/698185/campos_512_v4
+137/698187/campos_512_v4
+137/698254/campos_512_v4
+137/698321/campos_512_v4
+137/698414/campos_512_v4
+137/698499/campos_512_v4
+137/698544/campos_512_v4
+137/698832/campos_512_v4
+137/699051/campos_512_v4
+137/699574/campos_512_v4
+137/699794/campos_512_v4
+137/699933/campos_512_v4
+138/700898/campos_512_v4
+138/701172/campos_512_v4
+138/701224/campos_512_v4
+138/701236/campos_512_v4
+138/701569/campos_512_v4
+138/702223/campos_512_v4
+138/702561/campos_512_v4
+138/703034/campos_512_v4
+138/703344/campos_512_v4
+138/703900/campos_512_v4
+138/704795/campos_512_v4
+138/704829/campos_512_v4
+139/705807/campos_512_v4
+139/705873/campos_512_v4
+139/705996/campos_512_v4
+139/706067/campos_512_v4
+139/706951/campos_512_v4
+139/707015/campos_512_v4
+139/707165/campos_512_v4
+139/707347/campos_512_v4
+139/707378/campos_512_v4
+139/707392/campos_512_v4
+139/707393/campos_512_v4
+139/708423/campos_512_v4
+139/708505/campos_512_v4
+139/708617/campos_512_v4
+139/709093/campos_512_v4
+139/709261/campos_512_v4
+139/709291/campos_512_v4
+14/80261/campos_512_v4
+14/81049/campos_512_v4
+14/81155/campos_512_v4
+14/81161/campos_512_v4
+14/81347/campos_512_v4
+14/81405/campos_512_v4
+14/81598/campos_512_v4
+14/81905/campos_512_v4
+14/82441/campos_512_v4
+14/82786/campos_512_v4
+14/83267/campos_512_v4
+14/83762/campos_512_v4
+14/84683/campos_512_v4
+140/710513/campos_512_v4
+140/710622/campos_512_v4
+140/710981/campos_512_v4
+140/711857/campos_512_v4
+140/712068/campos_512_v4
+140/712147/campos_512_v4
+140/712652/campos_512_v4
+140/714435/campos_512_v4
+141/715115/campos_512_v4
+141/715294/campos_512_v4
+141/715550/campos_512_v4
+141/715708/campos_512_v4
+141/715978/campos_512_v4
+141/716273/campos_512_v4
+141/716721/campos_512_v4
+141/716839/campos_512_v4
+141/717465/campos_512_v4
+141/717630/campos_512_v4
+141/717758/campos_512_v4
+141/718047/campos_512_v4
+141/718083/campos_512_v4
+141/718994/campos_512_v4
+141/719203/campos_512_v4
+141/719779/campos_512_v4
+141/719892/campos_512_v4
+141/719989/campos_512_v4
+141/719993/campos_512_v4
+142/720230/campos_512_v4
+142/720290/campos_512_v4
+142/720539/campos_512_v4
+142/720540/campos_512_v4
+142/720638/campos_512_v4
+142/720909/campos_512_v4
+142/721094/campos_512_v4
+142/721147/campos_512_v4
+142/721451/campos_512_v4
+142/721802/campos_512_v4
+142/722170/campos_512_v4
+142/722207/campos_512_v4
+142/722367/campos_512_v4
+142/723778/campos_512_v4
+142/724280/campos_512_v4
+142/724308/campos_512_v4
+142/724383/campos_512_v4
+142/724684/campos_512_v4
+142/724736/campos_512_v4
+143/725065/campos_512_v4
+143/725475/campos_512_v4
+143/725745/campos_512_v4
+143/726030/campos_512_v4
+143/726060/campos_512_v4
+143/726232/campos_512_v4
+143/726594/campos_512_v4
+143/726660/campos_512_v4
+143/726863/campos_512_v4
+143/726909/campos_512_v4
+143/727186/campos_512_v4
+143/727453/campos_512_v4
+143/727539/campos_512_v4
+143/727659/campos_512_v4
+143/727667/campos_512_v4
+143/728610/campos_512_v4
+143/728705/campos_512_v4
+143/728755/campos_512_v4
+143/729520/campos_512_v4
+143/729522/campos_512_v4
+143/729717/campos_512_v4
+143/729741/campos_512_v4
+143/729819/campos_512_v4
+143/729964/campos_512_v4
+144/731292/campos_512_v4
+144/732286/campos_512_v4
+144/732502/campos_512_v4
+144/732535/campos_512_v4
+144/733316/campos_512_v4
+144/734097/campos_512_v4
+144/734154/campos_512_v4
+144/734172/campos_512_v4
+144/734529/campos_512_v4
+144/734547/campos_512_v4
+144/734648/campos_512_v4
+144/734965/campos_512_v4
+145/735298/campos_512_v4
+145/735634/campos_512_v4
+145/735804/campos_512_v4
+145/736067/campos_512_v4
+145/736139/campos_512_v4
+145/737097/campos_512_v4
+145/737124/campos_512_v4
+145/737208/campos_512_v4
+145/737488/campos_512_v4
+145/738355/campos_512_v4
+145/738624/campos_512_v4
+145/738881/campos_512_v4
+145/738905/campos_512_v4
+145/739020/campos_512_v4
+145/739413/campos_512_v4
+145/739850/campos_512_v4
+146/740188/campos_512_v4
+146/740687/campos_512_v4
+146/740703/campos_512_v4
+146/740755/campos_512_v4
+146/741194/campos_512_v4
+146/741284/campos_512_v4
+146/741970/campos_512_v4
+146/742058/campos_512_v4
+146/742086/campos_512_v4
+146/742107/campos_512_v4
+146/742524/campos_512_v4
+146/742551/campos_512_v4
+146/742736/campos_512_v4
+146/742981/campos_512_v4
+146/743453/campos_512_v4
+146/743464/campos_512_v4
+146/743633/campos_512_v4
+146/743839/campos_512_v4
+146/743985/campos_512_v4
+146/744346/campos_512_v4
+146/744641/campos_512_v4
+146/744711/campos_512_v4
+147/745448/campos_512_v4
+147/745876/campos_512_v4
+147/746276/campos_512_v4
+147/746521/campos_512_v4
+147/747095/campos_512_v4
+147/747462/campos_512_v4
+147/747707/campos_512_v4
+147/747935/campos_512_v4
+147/748759/campos_512_v4
+147/749375/campos_512_v4
+147/749382/campos_512_v4
+147/749511/campos_512_v4
+147/749847/campos_512_v4
+147/749923/campos_512_v4
+148/750645/campos_512_v4
+148/750705/campos_512_v4
+148/750904/campos_512_v4
+148/751968/campos_512_v4
+148/752105/campos_512_v4
+148/752311/campos_512_v4
+148/752595/campos_512_v4
+148/753334/campos_512_v4
+148/753539/campos_512_v4
+148/754470/campos_512_v4
+148/754910/campos_512_v4
+149/755700/campos_512_v4
+149/756001/campos_512_v4
+149/756019/campos_512_v4
+149/756355/campos_512_v4
+149/756643/campos_512_v4
+149/756720/campos_512_v4
+149/756921/campos_512_v4
+149/756954/campos_512_v4
+149/756979/campos_512_v4
+149/757070/campos_512_v4
+149/757143/campos_512_v4
+149/757193/campos_512_v4
+149/757222/campos_512_v4
+149/757244/campos_512_v4
+149/758481/campos_512_v4
+149/758708/campos_512_v4
+149/759596/campos_512_v4
+149/759709/campos_512_v4
+149/759789/campos_512_v4
+149/759925/campos_512_v4
+15/85758/campos_512_v4
+15/85959/campos_512_v4
+15/86712/campos_512_v4
+15/86856/campos_512_v4
+15/87054/campos_512_v4
+15/87496/campos_512_v4
+15/87800/campos_512_v4
+15/88238/campos_512_v4
+15/88254/campos_512_v4
+15/88427/campos_512_v4
+15/88589/campos_512_v4
+15/88663/campos_512_v4
+15/89052/campos_512_v4
+15/89301/campos_512_v4
+15/89454/campos_512_v4
+150/761010/campos_512_v4
+150/761995/campos_512_v4
+150/762014/campos_512_v4
+150/762175/campos_512_v4
+150/762510/campos_512_v4
+150/763587/campos_512_v4
+150/763742/campos_512_v4
+150/764183/campos_512_v4
+150/764762/campos_512_v4
+151/765137/campos_512_v4
+151/765425/campos_512_v4
+151/765426/campos_512_v4
+151/765467/campos_512_v4
+151/765526/campos_512_v4
+151/766097/campos_512_v4
+151/766651/campos_512_v4
+151/767879/campos_512_v4
+151/767887/campos_512_v4
+151/768774/campos_512_v4
+151/769662/campos_512_v4
+152/770684/campos_512_v4
+152/770929/campos_512_v4
+152/770955/campos_512_v4
+152/770965/campos_512_v4
+152/771546/campos_512_v4
+152/771617/campos_512_v4
+152/772440/campos_512_v4
+152/772446/campos_512_v4
+152/772663/campos_512_v4
+152/773213/campos_512_v4
+152/773684/campos_512_v4
+152/774278/campos_512_v4
+152/774283/campos_512_v4
+152/774410/campos_512_v4
+152/774450/campos_512_v4
+152/774629/campos_512_v4
+152/774937/campos_512_v4
+153/775617/campos_512_v4
+153/775675/campos_512_v4
+153/776127/campos_512_v4
+153/776448/campos_512_v4
+153/776451/campos_512_v4
+153/776480/campos_512_v4
+153/776661/campos_512_v4
+153/776824/campos_512_v4
+153/776994/campos_512_v4
+153/777486/campos_512_v4
+153/778203/campos_512_v4
+153/778433/campos_512_v4
+153/778457/campos_512_v4
+153/778898/campos_512_v4
+153/779702/campos_512_v4
+154/780023/campos_512_v4
+154/780759/campos_512_v4
+154/780955/campos_512_v4
+154/781104/campos_512_v4
+154/781187/campos_512_v4
+154/781318/campos_512_v4
+154/781458/campos_512_v4
+154/781951/campos_512_v4
+154/782697/campos_512_v4
+154/783414/campos_512_v4
+154/783853/campos_512_v4
+154/783900/campos_512_v4
+154/784230/campos_512_v4
+155/785148/campos_512_v4
+155/785642/campos_512_v4
+155/786793/campos_512_v4
+155/787007/campos_512_v4
+155/787101/campos_512_v4
+155/787722/campos_512_v4
+155/788174/campos_512_v4
+155/788178/campos_512_v4
+155/788267/campos_512_v4
+155/789059/campos_512_v4
+155/789098/campos_512_v4
+155/789282/campos_512_v4
+155/789358/campos_512_v4
+155/789665/campos_512_v4
+156/790479/campos_512_v4
+156/790486/campos_512_v4
+156/790972/campos_512_v4
+156/791295/campos_512_v4
+156/791492/campos_512_v4
+156/791681/campos_512_v4
+156/792003/campos_512_v4
+156/792004/campos_512_v4
+156/792134/campos_512_v4
+156/792296/campos_512_v4
+156/792708/campos_512_v4
+156/793314/campos_512_v4
+156/794082/campos_512_v4
+156/794397/campos_512_v4
+156/794718/campos_512_v4
+157/795595/campos_512_v4
+157/795613/campos_512_v4
+157/796633/campos_512_v4
+157/796723/campos_512_v4
+157/796993/campos_512_v4
+157/797195/campos_512_v4
+157/797369/campos_512_v4
+157/798107/campos_512_v4
+157/798306/campos_512_v4
+157/798598/campos_512_v4
+157/798627/campos_512_v4
+157/798919/campos_512_v4
+158/800567/campos_512_v4
+158/800924/campos_512_v4
+158/801048/campos_512_v4
+158/802168/campos_512_v4
+158/802623/campos_512_v4
+158/803189/campos_512_v4
+158/803830/campos_512_v4
+158/803833/campos_512_v4
+158/804223/campos_512_v4
+158/804389/campos_512_v4
+159/805789/campos_512_v4
+159/805809/campos_512_v4
+159/805875/campos_512_v4
+159/806042/campos_512_v4
+159/806372/campos_512_v4
+159/806438/campos_512_v4
+159/807351/campos_512_v4
+159/807512/campos_512_v4
+159/807535/campos_512_v4
+159/807574/campos_512_v4
+159/807754/campos_512_v4
+159/807843/campos_512_v4
+159/808177/campos_512_v4
+159/808368/campos_512_v4
+16/90141/campos_512_v4
+16/90147/campos_512_v4
+16/90341/campos_512_v4
+16/90436/campos_512_v4
+16/90642/campos_512_v4
+16/93026/campos_512_v4
+16/93549/campos_512_v4
+16/93898/campos_512_v4
+16/94055/campos_512_v4
+16/94823/campos_512_v4
+17/95177/campos_512_v4
+17/95247/campos_512_v4
+17/95749/campos_512_v4
+17/96027/campos_512_v4
+17/96157/campos_512_v4
+17/96307/campos_512_v4
+17/96326/campos_512_v4
+17/96363/campos_512_v4
+17/96434/campos_512_v4
+17/96650/campos_512_v4
+17/97571/campos_512_v4
+17/97573/campos_512_v4
+17/97698/campos_512_v4
+17/97983/campos_512_v4
+17/98002/campos_512_v4
+17/98622/campos_512_v4
+17/99713/campos_512_v4
+17/99958/campos_512_v4
+2/20578/campos_512_v4
+2/21095/campos_512_v4
+2/21182/campos_512_v4
+2/21252/campos_512_v4
+2/21647/campos_512_v4
+2/22172/campos_512_v4
+2/22206/campos_512_v4
+2/22688/campos_512_v4
+2/23192/campos_512_v4
+2/23339/campos_512_v4
+2/23499/campos_512_v4
+2/23665/campos_512_v4
+2/24423/campos_512_v4
+2/24656/campos_512_v4
+23/125103/campos_512_v4
+23/125479/campos_512_v4
+23/125586/campos_512_v4
+23/125637/campos_512_v4
+23/126138/campos_512_v4
+23/126197/campos_512_v4
+23/126260/campos_512_v4
+23/127175/campos_512_v4
+23/127259/campos_512_v4
+23/127441/campos_512_v4
+23/127688/campos_512_v4
+23/128147/campos_512_v4
+23/128223/campos_512_v4
+23/128357/campos_512_v4
+23/128471/campos_512_v4
+23/128811/campos_512_v4
+23/129016/campos_512_v4
+23/129404/campos_512_v4
+23/129826/campos_512_v4
+24/130153/campos_512_v4
+24/130402/campos_512_v4
+24/130578/campos_512_v4
+24/130704/campos_512_v4
+24/130845/campos_512_v4
+24/130859/campos_512_v4
+24/130975/campos_512_v4
+24/131034/campos_512_v4
+24/131623/campos_512_v4
+24/131915/campos_512_v4
+24/132033/campos_512_v4
+24/132161/campos_512_v4
+24/132245/campos_512_v4
+24/132400/campos_512_v4
+24/132445/campos_512_v4
+24/132826/campos_512_v4
+24/133254/campos_512_v4
+24/133268/campos_512_v4
+24/133350/campos_512_v4
+24/133401/campos_512_v4
+24/133569/campos_512_v4
+24/134164/campos_512_v4
+24/134431/campos_512_v4
+24/134484/campos_512_v4
+24/134538/campos_512_v4
+24/134823/campos_512_v4
+25/135007/campos_512_v4
+25/135462/campos_512_v4
+25/136033/campos_512_v4
+25/136140/campos_512_v4
+25/136586/campos_512_v4
+25/137460/campos_512_v4
+25/137501/campos_512_v4
+25/138096/campos_512_v4
+25/138457/campos_512_v4
+25/138639/campos_512_v4
+25/138787/campos_512_v4
+25/139253/campos_512_v4
+25/139501/campos_512_v4
+25/139811/campos_512_v4
+25/139814/campos_512_v4
+26/140314/campos_512_v4
+26/140685/campos_512_v4
+26/140774/campos_512_v4
+26/141263/campos_512_v4
+26/141448/campos_512_v4
+26/141539/campos_512_v4
+26/141704/campos_512_v4
+26/141836/campos_512_v4
+26/141921/campos_512_v4
+26/143324/campos_512_v4
+26/143552/campos_512_v4
+26/143720/campos_512_v4
+26/144474/campos_512_v4
+26/144540/campos_512_v4
+26/144683/campos_512_v4
+26/144953/campos_512_v4
+27/145028/campos_512_v4
+27/145219/campos_512_v4
+27/145456/campos_512_v4
+27/145541/campos_512_v4
+27/145740/campos_512_v4
+27/145803/campos_512_v4
+27/146190/campos_512_v4
+27/146257/campos_512_v4
+27/146549/campos_512_v4
+27/146898/campos_512_v4
+27/146938/campos_512_v4
+27/148395/campos_512_v4
+27/148880/campos_512_v4
+27/148912/campos_512_v4
+27/149739/campos_512_v4
+27/149884/campos_512_v4
+28/150481/campos_512_v4
+28/150531/campos_512_v4
+28/151031/campos_512_v4
+28/151262/campos_512_v4
+28/151367/campos_512_v4
+28/151606/campos_512_v4
+28/152276/campos_512_v4
+28/152608/campos_512_v4
+28/153466/campos_512_v4
+28/153531/campos_512_v4
+28/153539/campos_512_v4
+28/154677/campos_512_v4
+28/154815/campos_512_v4
+28/154876/campos_512_v4
+29/155096/campos_512_v4
+29/155137/campos_512_v4
+29/155518/campos_512_v4
+29/155951/campos_512_v4
+29/155968/campos_512_v4
+29/156456/campos_512_v4
+29/156832/campos_512_v4
+29/158337/campos_512_v4
+29/158731/campos_512_v4
+29/159511/campos_512_v4
+29/159528/campos_512_v4
+29/159670/campos_512_v4
+30/160010/campos_512_v4
+30/160085/campos_512_v4
+30/160257/campos_512_v4
+30/160483/campos_512_v4
+30/160505/campos_512_v4
+30/160530/campos_512_v4
+30/160665/campos_512_v4
+30/161218/campos_512_v4
+30/161552/campos_512_v4
+30/161572/campos_512_v4
+30/161866/campos_512_v4
+30/163154/campos_512_v4
+30/163535/campos_512_v4
+30/163779/campos_512_v4
+30/163903/campos_512_v4
+30/164513/campos_512_v4
+30/164789/campos_512_v4
+31/165177/campos_512_v4
+31/165374/campos_512_v4
+31/165515/campos_512_v4
+31/166299/campos_512_v4
+31/166338/campos_512_v4
+31/166537/campos_512_v4
+31/166693/campos_512_v4
+31/167005/campos_512_v4
+31/167216/campos_512_v4
+31/167836/campos_512_v4
+31/167932/campos_512_v4
+31/168072/campos_512_v4
+31/168247/campos_512_v4
+31/168837/campos_512_v4
+31/168864/campos_512_v4
+31/168963/campos_512_v4
+31/169996/campos_512_v4
+32/170074/campos_512_v4
+32/170078/campos_512_v4
+32/170088/campos_512_v4
+32/170407/campos_512_v4
+32/170488/campos_512_v4
+32/170692/campos_512_v4
+32/171630/campos_512_v4
+32/171949/campos_512_v4
+32/172106/campos_512_v4
+32/172565/campos_512_v4
+32/172589/campos_512_v4
+32/172767/campos_512_v4
+32/172777/campos_512_v4
+32/172868/campos_512_v4
+32/173761/campos_512_v4
+32/174018/campos_512_v4
+32/174080/campos_512_v4
+32/174137/campos_512_v4
+32/174481/campos_512_v4
+32/174666/campos_512_v4
+32/174888/campos_512_v4
+32/174904/campos_512_v4
+32/174979/campos_512_v4
+33/175017/campos_512_v4
+33/175787/campos_512_v4
+33/175825/campos_512_v4
+33/175874/campos_512_v4
+33/175877/campos_512_v4
+33/176364/campos_512_v4
+33/176609/campos_512_v4
+33/176670/campos_512_v4
+33/176957/campos_512_v4
+33/177382/campos_512_v4
+33/177407/campos_512_v4
+33/177456/campos_512_v4
+33/177561/campos_512_v4
+33/177625/campos_512_v4
+33/178501/campos_512_v4
+33/178542/campos_512_v4
+33/178594/campos_512_v4
+33/178747/campos_512_v4
+33/179078/campos_512_v4
+33/179127/campos_512_v4
+33/179161/campos_512_v4
+33/179503/campos_512_v4
+33/179807/campos_512_v4
+33/179957/campos_512_v4
+34/180305/campos_512_v4
+34/180375/campos_512_v4
+34/181230/campos_512_v4
+34/181360/campos_512_v4
+34/181672/campos_512_v4
+34/181975/campos_512_v4
+34/183514/campos_512_v4
+34/184003/campos_512_v4
+34/184216/campos_512_v4
+34/184285/campos_512_v4
+34/184291/campos_512_v4
+34/184423/campos_512_v4
+34/184453/campos_512_v4
+34/184473/campos_512_v4
+34/184538/campos_512_v4
+34/184684/campos_512_v4
+34/184847/campos_512_v4
+34/184850/campos_512_v4
+35/185096/campos_512_v4
+35/185774/campos_512_v4
+35/186131/campos_512_v4
+35/186167/campos_512_v4
+35/186180/campos_512_v4
+35/186280/campos_512_v4
+35/186429/campos_512_v4
+35/186658/campos_512_v4
+35/187323/campos_512_v4
+35/189136/campos_512_v4
+35/189221/campos_512_v4
+35/189252/campos_512_v4
+35/189496/campos_512_v4
+35/189957/campos_512_v4
+36/190041/campos_512_v4
+36/191024/campos_512_v4
+36/191141/campos_512_v4
+36/191907/campos_512_v4
+36/191993/campos_512_v4
+36/192103/campos_512_v4
+36/192203/campos_512_v4
+36/192294/campos_512_v4
+36/192493/campos_512_v4
+36/193026/campos_512_v4
+36/193309/campos_512_v4
+36/193379/campos_512_v4
+36/193852/campos_512_v4
+36/194854/campos_512_v4
+37/195152/campos_512_v4
+37/195681/campos_512_v4
+37/195711/campos_512_v4
+37/195993/campos_512_v4
+37/196002/campos_512_v4
+37/196275/campos_512_v4
+37/196334/campos_512_v4
+37/196613/campos_512_v4
+37/196696/campos_512_v4
+37/196754/campos_512_v4
+37/196899/campos_512_v4
+37/197340/campos_512_v4
+37/197398/campos_512_v4
+37/197627/campos_512_v4
+37/197806/campos_512_v4
+37/199310/campos_512_v4
+37/199399/campos_512_v4
+37/199589/campos_512_v4
+37/199720/campos_512_v4
+37/199721/campos_512_v4
+38/200077/campos_512_v4
+38/200197/campos_512_v4
+38/200656/campos_512_v4
+38/201779/campos_512_v4
+38/202017/campos_512_v4
+38/202111/campos_512_v4
+38/202211/campos_512_v4
+38/202248/campos_512_v4
+38/203356/campos_512_v4
+38/203666/campos_512_v4
+38/203707/campos_512_v4
+38/203863/campos_512_v4
+38/203957/campos_512_v4
+38/204017/campos_512_v4
+38/204057/campos_512_v4
+38/204546/campos_512_v4
+38/204556/campos_512_v4
+4/30303/campos_512_v4
+4/30575/campos_512_v4
+4/30708/campos_512_v4
+4/31409/campos_512_v4
+4/31626/campos_512_v4
+4/31783/campos_512_v4
+4/32095/campos_512_v4
+4/32438/campos_512_v4
+4/32736/campos_512_v4
+4/33076/campos_512_v4
+4/33584/campos_512_v4
+4/33895/campos_512_v4
+4/34210/campos_512_v4
+4/34479/campos_512_v4
+40/210420/campos_512_v4
+40/210772/campos_512_v4
+40/211206/campos_512_v4
+40/211281/campos_512_v4
+40/211481/campos_512_v4
+40/211651/campos_512_v4
+40/211660/campos_512_v4
+40/211931/campos_512_v4
+40/212017/campos_512_v4
+40/212182/campos_512_v4
+40/212191/campos_512_v4
+40/212390/campos_512_v4
+40/212736/campos_512_v4
+40/213100/campos_512_v4
+40/213684/campos_512_v4
+40/214015/campos_512_v4
+40/214113/campos_512_v4
+40/214703/campos_512_v4
+40/214829/campos_512_v4
+41/215226/campos_512_v4
+41/215232/campos_512_v4
+41/215242/campos_512_v4
+41/215459/campos_512_v4
+41/215485/campos_512_v4
+41/215520/campos_512_v4
+41/215551/campos_512_v4
+41/216119/campos_512_v4
+41/216239/campos_512_v4
+41/216751/campos_512_v4
+41/216928/campos_512_v4
+41/217406/campos_512_v4
+41/217611/campos_512_v4
+41/217959/campos_512_v4
+41/218162/campos_512_v4
+41/218409/campos_512_v4
+41/218557/campos_512_v4
+41/218772/campos_512_v4
+41/219359/campos_512_v4
+41/219423/campos_512_v4
+41/219446/campos_512_v4
+41/219465/campos_512_v4
+41/219591/campos_512_v4
+41/219685/campos_512_v4
+42/220162/campos_512_v4
+42/220164/campos_512_v4
+42/220725/campos_512_v4
+42/220944/campos_512_v4
+42/221126/campos_512_v4
+42/221319/campos_512_v4
+42/221526/campos_512_v4
+42/222141/campos_512_v4
+42/222159/campos_512_v4
+42/222416/campos_512_v4
+42/222828/campos_512_v4
+42/222928/campos_512_v4
+42/222977/campos_512_v4
+42/223077/campos_512_v4
+42/223131/campos_512_v4
+42/223261/campos_512_v4
+42/223599/campos_512_v4
+42/223745/campos_512_v4
+42/223995/campos_512_v4
+42/224342/campos_512_v4
+42/224536/campos_512_v4
+42/224798/campos_512_v4
+42/224942/campos_512_v4
+43/225046/campos_512_v4
+43/225666/campos_512_v4
+43/225684/campos_512_v4
+43/225730/campos_512_v4
+43/225734/campos_512_v4
+43/225767/campos_512_v4
+43/225984/campos_512_v4
+43/226122/campos_512_v4
+43/226155/campos_512_v4
+43/226168/campos_512_v4
+43/226249/campos_512_v4
+43/226362/campos_512_v4
+43/227145/campos_512_v4
+43/227999/campos_512_v4
+43/228455/campos_512_v4
+43/228465/campos_512_v4
+43/228583/campos_512_v4
+43/228624/campos_512_v4
+43/228784/campos_512_v4
+43/229588/campos_512_v4
+44/230049/campos_512_v4
+44/230197/campos_512_v4
+44/230419/campos_512_v4
+44/230679/campos_512_v4
+44/231512/campos_512_v4
+44/231718/campos_512_v4
+44/232302/campos_512_v4
+44/232633/campos_512_v4
+44/232665/campos_512_v4
+44/232779/campos_512_v4
+44/233354/campos_512_v4
+44/233713/campos_512_v4
+44/233929/campos_512_v4
+44/233997/campos_512_v4
+44/234058/campos_512_v4
+44/234063/campos_512_v4
+44/234163/campos_512_v4
+44/234277/campos_512_v4
+45/235300/campos_512_v4
+45/235838/campos_512_v4
+45/236000/campos_512_v4
+45/236159/campos_512_v4
+45/236374/campos_512_v4
+45/236404/campos_512_v4
+45/236413/campos_512_v4
+45/236895/campos_512_v4
+45/237188/campos_512_v4
+45/237390/campos_512_v4
+45/237417/campos_512_v4
+45/237786/campos_512_v4
+45/237937/campos_512_v4
+45/238097/campos_512_v4
+45/238126/campos_512_v4
+45/238342/campos_512_v4
+45/238445/campos_512_v4
+45/238680/campos_512_v4
+45/238720/campos_512_v4
+45/238993/campos_512_v4
+45/239041/campos_512_v4
+45/239338/campos_512_v4
+46/240237/campos_512_v4
+46/240319/campos_512_v4
+46/240331/campos_512_v4
+46/240438/campos_512_v4
+46/240969/campos_512_v4
+46/241250/campos_512_v4
+46/241999/campos_512_v4
+46/242257/campos_512_v4
+46/242372/campos_512_v4
+46/242873/campos_512_v4
+46/243555/campos_512_v4
+46/243592/campos_512_v4
+46/244026/campos_512_v4
+46/244453/campos_512_v4
+46/244541/campos_512_v4
+46/244588/campos_512_v4
+47/245233/campos_512_v4
+47/245330/campos_512_v4
+47/245349/campos_512_v4
+47/245753/campos_512_v4
+47/246055/campos_512_v4
+47/246203/campos_512_v4
+47/246287/campos_512_v4
+47/246860/campos_512_v4
+47/247399/campos_512_v4
+47/247583/campos_512_v4
+47/247985/campos_512_v4
+47/248530/campos_512_v4
+47/248914/campos_512_v4
+47/249111/campos_512_v4
+47/249262/campos_512_v4
+47/249266/campos_512_v4
+47/249518/campos_512_v4
+47/249607/campos_512_v4
+47/249789/campos_512_v4
+47/249895/campos_512_v4
+47/249911/campos_512_v4
+48/250144/campos_512_v4
+48/250743/campos_512_v4
+48/251243/campos_512_v4
+48/251379/campos_512_v4
+48/251386/campos_512_v4
+48/251491/campos_512_v4
+48/251601/campos_512_v4
+48/252032/campos_512_v4
+48/252084/campos_512_v4
+48/252539/campos_512_v4
+48/252991/campos_512_v4
+48/253513/campos_512_v4
+48/253816/campos_512_v4
+48/253871/campos_512_v4
+48/253904/campos_512_v4
+48/253968/campos_512_v4
+48/254306/campos_512_v4
+48/254354/campos_512_v4
+48/254481/campos_512_v4
+48/254932/campos_512_v4
+49/255426/campos_512_v4
+49/255523/campos_512_v4
+49/255576/campos_512_v4
+49/255638/campos_512_v4
+49/255673/campos_512_v4
+49/255730/campos_512_v4
+49/255884/campos_512_v4
+49/256078/campos_512_v4
+49/256085/campos_512_v4
+49/256569/campos_512_v4
+49/256796/campos_512_v4
+49/256827/campos_512_v4
+49/257224/campos_512_v4
+49/257250/campos_512_v4
+49/257844/campos_512_v4
+49/257888/campos_512_v4
+49/258033/campos_512_v4
+49/258083/campos_512_v4
+49/258240/campos_512_v4
+49/258355/campos_512_v4
+49/258418/campos_512_v4
+49/258456/campos_512_v4
+49/258479/campos_512_v4
+49/258614/campos_512_v4
+49/258810/campos_512_v4
+49/259000/campos_512_v4
+49/259217/campos_512_v4
+49/259374/campos_512_v4
+49/259505/campos_512_v4
+49/259549/campos_512_v4
+49/259653/campos_512_v4
+49/259706/campos_512_v4
+5/35420/campos_512_v4
+5/36189/campos_512_v4
+5/36617/campos_512_v4
+5/37597/campos_512_v4
+5/37811/campos_512_v4
+5/37937/campos_512_v4
+5/38213/campos_512_v4
+5/38317/campos_512_v4
+5/38409/campos_512_v4
+5/38446/campos_512_v4
+5/39028/campos_512_v4
+5/39168/campos_512_v4
+5/39381/campos_512_v4
+5/39408/campos_512_v4
+5/39739/campos_512_v4
+50/260047/campos_512_v4
+50/260440/campos_512_v4
+50/260448/campos_512_v4
+50/260529/campos_512_v4
+50/260819/campos_512_v4
+50/260971/campos_512_v4
+50/261154/campos_512_v4
+50/261391/campos_512_v4
+50/261450/campos_512_v4
+50/261668/campos_512_v4
+50/261963/campos_512_v4
+50/261973/campos_512_v4
+50/262338/campos_512_v4
+50/262356/campos_512_v4
+50/263051/campos_512_v4
+50/263227/campos_512_v4
+50/263272/campos_512_v4
+50/263515/campos_512_v4
+50/264680/campos_512_v4
+50/264765/campos_512_v4
+51/265154/campos_512_v4
+51/265424/campos_512_v4
+51/265692/campos_512_v4
+51/265856/campos_512_v4
+51/265985/campos_512_v4
+51/266220/campos_512_v4
+51/266558/campos_512_v4
+51/266599/campos_512_v4
+51/266683/campos_512_v4
+51/268127/campos_512_v4
+51/268300/campos_512_v4
+51/268314/campos_512_v4
+51/268449/campos_512_v4
+51/268842/campos_512_v4
+51/269007/campos_512_v4
+51/269270/campos_512_v4
+51/269552/campos_512_v4
+52/270213/campos_512_v4
+52/270776/campos_512_v4
+52/270779/campos_512_v4
+52/270932/campos_512_v4
+52/271430/campos_512_v4
+52/271741/campos_512_v4
+52/272063/campos_512_v4
+52/272140/campos_512_v4
+52/272240/campos_512_v4
+52/272371/campos_512_v4
+52/272413/campos_512_v4
+52/272510/campos_512_v4
+52/272605/campos_512_v4
+52/272690/campos_512_v4
+52/272896/campos_512_v4
+52/273012/campos_512_v4
+52/273120/campos_512_v4
+52/273186/campos_512_v4
+52/273292/campos_512_v4
+52/273321/campos_512_v4
+52/273352/campos_512_v4
+52/273356/campos_512_v4
+52/273368/campos_512_v4
+52/273718/campos_512_v4
+52/273772/campos_512_v4
+52/273954/campos_512_v4
+52/274018/campos_512_v4
+52/274314/campos_512_v4
+52/274411/campos_512_v4
+53/275009/campos_512_v4
+53/276141/campos_512_v4
+53/277138/campos_512_v4
+53/277391/campos_512_v4
+53/277410/campos_512_v4
+53/277565/campos_512_v4
+53/277781/campos_512_v4
+53/277786/campos_512_v4
+53/277814/campos_512_v4
+53/278143/campos_512_v4
+53/278199/campos_512_v4
+53/278322/campos_512_v4
+53/278432/campos_512_v4
+53/278619/campos_512_v4
+53/278947/campos_512_v4
+53/279279/campos_512_v4
+53/279342/campos_512_v4
+53/279631/campos_512_v4
+53/279711/campos_512_v4
+54/280290/campos_512_v4
+54/280435/campos_512_v4
+54/280631/campos_512_v4
+54/281393/campos_512_v4
+54/281397/campos_512_v4
+54/281653/campos_512_v4
+54/282269/campos_512_v4
+54/282553/campos_512_v4
+54/282951/campos_512_v4
+54/283020/campos_512_v4
+54/283174/campos_512_v4
+54/283340/campos_512_v4
+54/283546/campos_512_v4
+54/283822/campos_512_v4
+54/283945/campos_512_v4
+54/284340/campos_512_v4
+54/284410/campos_512_v4
+54/284465/campos_512_v4
+55/285026/campos_512_v4
+55/285476/campos_512_v4
+55/285812/campos_512_v4
+55/286192/campos_512_v4
+55/286835/campos_512_v4
+55/287107/campos_512_v4
+55/287170/campos_512_v4
+55/287710/campos_512_v4
+55/287785/campos_512_v4
+55/287817/campos_512_v4
+55/288137/campos_512_v4
+55/288335/campos_512_v4
+55/288616/campos_512_v4
+55/288628/campos_512_v4
+55/288766/campos_512_v4
+55/288864/campos_512_v4
+55/288988/campos_512_v4
+55/289066/campos_512_v4
+55/289157/campos_512_v4
+55/289250/campos_512_v4
+55/289386/campos_512_v4
+55/289573/campos_512_v4
+55/289620/campos_512_v4
+56/290397/campos_512_v4
+56/290401/campos_512_v4
+56/290895/campos_512_v4
+56/290910/campos_512_v4
+56/291232/campos_512_v4
+56/291800/campos_512_v4
+56/292362/campos_512_v4
+56/292370/campos_512_v4
+56/292971/campos_512_v4
+56/293261/campos_512_v4
+56/293525/campos_512_v4
+56/293686/campos_512_v4
+56/294003/campos_512_v4
+56/294093/campos_512_v4
+56/294195/campos_512_v4
+56/294400/campos_512_v4
+57/295163/campos_512_v4
+57/295172/campos_512_v4
+57/295386/campos_512_v4
+57/295560/campos_512_v4
+57/295565/campos_512_v4
+57/296050/campos_512_v4
+57/296180/campos_512_v4
+57/296192/campos_512_v4
+57/296344/campos_512_v4
+57/296350/campos_512_v4
+57/296466/campos_512_v4
+57/296889/campos_512_v4
+57/297197/campos_512_v4
+57/297253/campos_512_v4
+57/297416/campos_512_v4
+57/297631/campos_512_v4
+57/297896/campos_512_v4
+57/297925/campos_512_v4
+57/298082/campos_512_v4
+57/298128/campos_512_v4
+57/298137/campos_512_v4
+57/298412/campos_512_v4
+57/298516/campos_512_v4
+57/298741/campos_512_v4
+57/299036/campos_512_v4
+57/299243/campos_512_v4
+57/299459/campos_512_v4
+58/300263/campos_512_v4
+58/300453/campos_512_v4
+58/301257/campos_512_v4
+58/301313/campos_512_v4
+58/301321/campos_512_v4
+58/301651/campos_512_v4
+58/301910/campos_512_v4
+58/301966/campos_512_v4
+58/301986/campos_512_v4
+58/302289/campos_512_v4
+58/302579/campos_512_v4
+58/303323/campos_512_v4
+58/303352/campos_512_v4
+58/303558/campos_512_v4
+58/303560/campos_512_v4
+58/303592/campos_512_v4
+58/304335/campos_512_v4
+58/304970/campos_512_v4
+59/305021/campos_512_v4
+59/305124/campos_512_v4
+59/306010/campos_512_v4
+59/306078/campos_512_v4
+59/306347/campos_512_v4
+59/306716/campos_512_v4
+59/306721/campos_512_v4
+59/307556/campos_512_v4
+59/307633/campos_512_v4
+59/308127/campos_512_v4
+59/308857/campos_512_v4
+59/308902/campos_512_v4
+59/309105/campos_512_v4
+59/309238/campos_512_v4
+59/309582/campos_512_v4
+59/309647/campos_512_v4
+59/309846/campos_512_v4
+6/41071/campos_512_v4
+6/41607/campos_512_v4
+6/41672/campos_512_v4
+6/42175/campos_512_v4
+6/42194/campos_512_v4
+6/42361/campos_512_v4
+6/42804/campos_512_v4
+6/42993/campos_512_v4
+6/43561/campos_512_v4
+6/44391/campos_512_v4
+6/44434/campos_512_v4
+6/44805/campos_512_v4
+60/310222/campos_512_v4
+60/310800/campos_512_v4
+60/311718/campos_512_v4
+60/311742/campos_512_v4
+60/311911/campos_512_v4
+60/312066/campos_512_v4
+60/312724/campos_512_v4
+60/312887/campos_512_v4
+60/313378/campos_512_v4
+60/313428/campos_512_v4
+60/313670/campos_512_v4
+60/313937/campos_512_v4
+60/314124/campos_512_v4
+60/314311/campos_512_v4
+60/314383/campos_512_v4
+60/314453/campos_512_v4
+61/315331/campos_512_v4
+61/315356/campos_512_v4
+61/315384/campos_512_v4
+61/315433/campos_512_v4
+61/315562/campos_512_v4
+61/316065/campos_512_v4
+61/316075/campos_512_v4
+61/316469/campos_512_v4
+61/316671/campos_512_v4
+61/317011/campos_512_v4
+61/317155/campos_512_v4
+61/317168/campos_512_v4
+61/317242/campos_512_v4
+61/317283/campos_512_v4
+61/317345/campos_512_v4
+61/317458/campos_512_v4
+61/317861/campos_512_v4
+61/317912/campos_512_v4
+61/318544/campos_512_v4
+61/318576/campos_512_v4
+61/318877/campos_512_v4
+61/318886/campos_512_v4
+61/319154/campos_512_v4
+61/319224/campos_512_v4
+61/319864/campos_512_v4
+62/320575/campos_512_v4
+62/321946/campos_512_v4
+62/322008/campos_512_v4
+62/322209/campos_512_v4
+62/322533/campos_512_v4
+62/322538/campos_512_v4
+62/323418/campos_512_v4
+62/323948/campos_512_v4
+62/324077/campos_512_v4
+62/324138/campos_512_v4
+62/324654/campos_512_v4
+62/324794/campos_512_v4
+63/325501/campos_512_v4
+63/325729/campos_512_v4
+63/326089/campos_512_v4
+63/326124/campos_512_v4
+63/326316/campos_512_v4
+63/326899/campos_512_v4
+63/327455/campos_512_v4
+63/328878/campos_512_v4
+63/328970/campos_512_v4
+63/329320/campos_512_v4
+63/329703/campos_512_v4
+63/329868/campos_512_v4
+64/330160/campos_512_v4
+64/330168/campos_512_v4
+64/330266/campos_512_v4
+64/330598/campos_512_v4
+64/330677/campos_512_v4
+64/331365/campos_512_v4
+64/331678/campos_512_v4
+64/332012/campos_512_v4
+64/333979/campos_512_v4
+64/334344/campos_512_v4
+64/334395/campos_512_v4
+65/335046/campos_512_v4
+65/335168/campos_512_v4
+65/335343/campos_512_v4
+65/335421/campos_512_v4
+65/335463/campos_512_v4
+65/335844/campos_512_v4
+65/336616/campos_512_v4
+65/337106/campos_512_v4
+65/337266/campos_512_v4
+65/337344/campos_512_v4
+65/337948/campos_512_v4
+65/338040/campos_512_v4
+65/338199/campos_512_v4
+65/338710/campos_512_v4
+65/338850/campos_512_v4
+65/338920/campos_512_v4
+65/338954/campos_512_v4
+65/339553/campos_512_v4
+66/340111/campos_512_v4
+66/340229/campos_512_v4
+66/340261/campos_512_v4
+66/340403/campos_512_v4
+66/340582/campos_512_v4
+66/341702/campos_512_v4
+66/342153/campos_512_v4
+66/342326/campos_512_v4
+66/342645/campos_512_v4
+66/344228/campos_512_v4
+66/344353/campos_512_v4
+66/344583/campos_512_v4
+67/345404/campos_512_v4
+67/345734/campos_512_v4
+67/345896/campos_512_v4
+67/345969/campos_512_v4
+67/346036/campos_512_v4
+67/346302/campos_512_v4
+67/346320/campos_512_v4
+67/346344/campos_512_v4
+67/346461/campos_512_v4
+67/346521/campos_512_v4
+67/346755/campos_512_v4
+67/347668/campos_512_v4
+67/347798/campos_512_v4
+67/347931/campos_512_v4
+67/348027/campos_512_v4
+67/348153/campos_512_v4
+67/348664/campos_512_v4
+67/349431/campos_512_v4
+67/349447/campos_512_v4
+67/349967/campos_512_v4
+68/350419/campos_512_v4
+68/350506/campos_512_v4
+68/350569/campos_512_v4
+68/351021/campos_512_v4
+68/351105/campos_512_v4
+68/351119/campos_512_v4
+68/351611/campos_512_v4
+68/351848/campos_512_v4
+68/352071/campos_512_v4
+68/352199/campos_512_v4
+68/352923/campos_512_v4
+68/353385/campos_512_v4
+68/353846/campos_512_v4
+68/353890/campos_512_v4
+68/353982/campos_512_v4
+68/354282/campos_512_v4
+68/354611/campos_512_v4
+68/354665/campos_512_v4
+69/355009/campos_512_v4
+69/355292/campos_512_v4
+69/355746/campos_512_v4
+69/356513/campos_512_v4
+69/356599/campos_512_v4
+69/356674/campos_512_v4
+69/356785/campos_512_v4
+69/357575/campos_512_v4
+69/357665/campos_512_v4
+69/358062/campos_512_v4
+69/358178/campos_512_v4
+69/358339/campos_512_v4
+69/358384/campos_512_v4
+69/358465/campos_512_v4
+69/359062/campos_512_v4
+69/359790/campos_512_v4
+7/45321/campos_512_v4
+7/45380/campos_512_v4
+7/45619/campos_512_v4
+7/45812/campos_512_v4
+7/45825/campos_512_v4
+7/46648/campos_512_v4
+7/46747/campos_512_v4
+7/46929/campos_512_v4
+7/47065/campos_512_v4
+7/47106/campos_512_v4
+7/47541/campos_512_v4
+7/47548/campos_512_v4
+7/47648/campos_512_v4
+7/48005/campos_512_v4
+7/48284/campos_512_v4
+7/48608/campos_512_v4
+7/48904/campos_512_v4
+7/49423/campos_512_v4
+7/49480/campos_512_v4
+70/360965/campos_512_v4
+70/361338/campos_512_v4
+70/361549/campos_512_v4
+70/361776/campos_512_v4
+70/361937/campos_512_v4
+70/362166/campos_512_v4
+70/362328/campos_512_v4
+70/362760/campos_512_v4
+70/363115/campos_512_v4
+70/363717/campos_512_v4
+70/364076/campos_512_v4
+70/364184/campos_512_v4
+70/364341/campos_512_v4
+70/364588/campos_512_v4
+71/365026/campos_512_v4
+71/365062/campos_512_v4
+71/365751/campos_512_v4
+71/365768/campos_512_v4
+71/365925/campos_512_v4
+71/366244/campos_512_v4
+71/366298/campos_512_v4
+71/366472/campos_512_v4
+71/366549/campos_512_v4
+71/366557/campos_512_v4
+71/367819/campos_512_v4
+71/367830/campos_512_v4
+71/367866/campos_512_v4
+71/367972/campos_512_v4
+71/368020/campos_512_v4
+71/368120/campos_512_v4
+71/368235/campos_512_v4
+71/368292/campos_512_v4
+71/368723/campos_512_v4
+71/369002/campos_512_v4
+71/369085/campos_512_v4
+71/369237/campos_512_v4
+71/369379/campos_512_v4
+72/370692/campos_512_v4
+72/370940/campos_512_v4
+72/371057/campos_512_v4
+72/371642/campos_512_v4
+72/371649/campos_512_v4
+72/371660/campos_512_v4
+72/371864/campos_512_v4
+72/372088/campos_512_v4
+72/372199/campos_512_v4
+72/372377/campos_512_v4
+72/372591/campos_512_v4
+72/373131/campos_512_v4
+72/373804/campos_512_v4
+72/373844/campos_512_v4
+72/373871/campos_512_v4
+72/373961/campos_512_v4
+72/374470/campos_512_v4
+72/374544/campos_512_v4
+72/374549/campos_512_v4
+72/374888/campos_512_v4
+73/375627/campos_512_v4
+73/375833/campos_512_v4
+73/376011/campos_512_v4
+73/376029/campos_512_v4
+73/376128/campos_512_v4
+73/376203/campos_512_v4
+73/376698/campos_512_v4
+73/376780/campos_512_v4
+73/377295/campos_512_v4
+73/377915/campos_512_v4
+73/378626/campos_512_v4
+73/378990/campos_512_v4
+73/379116/campos_512_v4
+73/379820/campos_512_v4
+74/381875/campos_512_v4
+74/381967/campos_512_v4
+74/382028/campos_512_v4
+74/382774/campos_512_v4
+74/382803/campos_512_v4
+74/383870/campos_512_v4
+74/384238/campos_512_v4
+74/384467/campos_512_v4
+74/384929/campos_512_v4
+74/384999/campos_512_v4
+75/385005/campos_512_v4
+75/385074/campos_512_v4
+75/385705/campos_512_v4
+75/386106/campos_512_v4
+75/388705/campos_512_v4
+75/388812/campos_512_v4
+75/388856/campos_512_v4
+75/389656/campos_512_v4
+76/390167/campos_512_v4
+76/390630/campos_512_v4
+76/390651/campos_512_v4
+76/391238/campos_512_v4
+76/391429/campos_512_v4
+76/391588/campos_512_v4
+76/391982/campos_512_v4
+76/392215/campos_512_v4
+76/392342/campos_512_v4
+76/392742/campos_512_v4
+76/392944/campos_512_v4
+76/394050/campos_512_v4
+76/394196/campos_512_v4
+76/394606/campos_512_v4
+76/394655/campos_512_v4
+76/394772/campos_512_v4
+77/395069/campos_512_v4
+77/395353/campos_512_v4
+77/395957/campos_512_v4
+77/396162/campos_512_v4
+77/396541/campos_512_v4
+77/396930/campos_512_v4
+77/397183/campos_512_v4
+77/397313/campos_512_v4
+77/397374/campos_512_v4
+77/397923/campos_512_v4
+77/398684/campos_512_v4
+77/398771/campos_512_v4
+77/399049/campos_512_v4
+77/399399/campos_512_v4
+78/400128/campos_512_v4
+78/400606/campos_512_v4
+78/400615/campos_512_v4
+78/400714/campos_512_v4
+78/400910/campos_512_v4
+78/401689/campos_512_v4
+78/401729/campos_512_v4
+78/401842/campos_512_v4
+78/402054/campos_512_v4
+78/402396/campos_512_v4
+78/402694/campos_512_v4
+78/402854/campos_512_v4
+78/402897/campos_512_v4
+78/403420/campos_512_v4
+78/403694/campos_512_v4
+78/403723/campos_512_v4
+78/403731/campos_512_v4
+78/404130/campos_512_v4
+78/404276/campos_512_v4
+78/404544/campos_512_v4
+78/404742/campos_512_v4
+79/405181/campos_512_v4
+79/405249/campos_512_v4
+79/405330/campos_512_v4
+79/405365/campos_512_v4
+79/405369/campos_512_v4
+79/405564/campos_512_v4
+79/405584/campos_512_v4
+79/405800/campos_512_v4
+79/405832/campos_512_v4
+79/406167/campos_512_v4
+79/406420/campos_512_v4
+79/407062/campos_512_v4
+79/407656/campos_512_v4
+79/408562/campos_512_v4
+79/409150/campos_512_v4
+8/50256/campos_512_v4
+8/50264/campos_512_v4
+8/50440/campos_512_v4
+8/50517/campos_512_v4
+8/51088/campos_512_v4
+8/51257/campos_512_v4
+8/51388/campos_512_v4
+8/51513/campos_512_v4
+8/51944/campos_512_v4
+8/52004/campos_512_v4
+8/52131/campos_512_v4
+8/52742/campos_512_v4
+8/52998/campos_512_v4
+8/53060/campos_512_v4
+8/53112/campos_512_v4
+8/53161/campos_512_v4
+8/53496/campos_512_v4
+8/53614/campos_512_v4
+8/53826/campos_512_v4
+8/53918/campos_512_v4
+8/54324/campos_512_v4
+8/54472/campos_512_v4
+8/54600/campos_512_v4
+80/410050/campos_512_v4
+80/410171/campos_512_v4
+80/411023/campos_512_v4
+80/411203/campos_512_v4
+80/411504/campos_512_v4
+80/411785/campos_512_v4
+80/411916/campos_512_v4
+80/412062/campos_512_v4
+80/412189/campos_512_v4
+80/412417/campos_512_v4
+80/412430/campos_512_v4
+80/412432/campos_512_v4
+80/412489/campos_512_v4
+80/412977/campos_512_v4
+80/413087/campos_512_v4
+80/413089/campos_512_v4
+80/413946/campos_512_v4
+80/414583/campos_512_v4
+80/414785/campos_512_v4
+81/415242/campos_512_v4
+81/415516/campos_512_v4
+81/415625/campos_512_v4
+81/415636/campos_512_v4
+81/415653/campos_512_v4
+81/416000/campos_512_v4
+81/416007/campos_512_v4
+81/416058/campos_512_v4
+81/416173/campos_512_v4
+81/416204/campos_512_v4
+81/416245/campos_512_v4
+81/416266/campos_512_v4
+81/416293/campos_512_v4
+81/416372/campos_512_v4
+81/416745/campos_512_v4
+81/417096/campos_512_v4
+81/417579/campos_512_v4
+81/418174/campos_512_v4
+81/418178/campos_512_v4
+81/418417/campos_512_v4
+81/419120/campos_512_v4
+81/419182/campos_512_v4
+82/420040/campos_512_v4
+82/420274/campos_512_v4
+82/420832/campos_512_v4
+82/422047/campos_512_v4
+82/422254/campos_512_v4
+82/422622/campos_512_v4
+82/423434/campos_512_v4
+82/423905/campos_512_v4
+82/423955/campos_512_v4
+82/424667/campos_512_v4
+83/425568/campos_512_v4
+83/425674/campos_512_v4
+83/425923/campos_512_v4
+83/426490/campos_512_v4
+83/426609/campos_512_v4
+83/427040/campos_512_v4
+83/428439/campos_512_v4
+83/429124/campos_512_v4
+84/430776/campos_512_v4
+84/430779/campos_512_v4
+84/431822/campos_512_v4
+84/431863/campos_512_v4
+84/431937/campos_512_v4
+84/432736/campos_512_v4
+84/433667/campos_512_v4
+84/433848/campos_512_v4
+84/434406/campos_512_v4
+84/434590/campos_512_v4
+84/434753/campos_512_v4
+84/434889/campos_512_v4
+85/435225/campos_512_v4
+85/435388/campos_512_v4
+85/435635/campos_512_v4
+85/436293/campos_512_v4
+85/436709/campos_512_v4
+85/436711/campos_512_v4
+85/437432/campos_512_v4
+85/437632/campos_512_v4
+85/437644/campos_512_v4
+85/437872/campos_512_v4
+85/438354/campos_512_v4
+85/438643/campos_512_v4
+85/439223/campos_512_v4
+85/439639/campos_512_v4
+86/440319/campos_512_v4
+86/440944/campos_512_v4
+86/441101/campos_512_v4
+86/441444/campos_512_v4
+86/441922/campos_512_v4
+86/442157/campos_512_v4
+86/442534/campos_512_v4
+86/442642/campos_512_v4
+86/442987/campos_512_v4
+86/443007/campos_512_v4
+86/443105/campos_512_v4
+86/443216/campos_512_v4
+86/443277/campos_512_v4
+86/443927/campos_512_v4
+86/444439/campos_512_v4
+86/444721/campos_512_v4
+86/444804/campos_512_v4
+86/445000/campos_512_v4
+87/445032/campos_512_v4
+87/445327/campos_512_v4
+87/445465/campos_512_v4
+87/445507/campos_512_v4
+87/445543/campos_512_v4
+87/445866/campos_512_v4
+87/446541/campos_512_v4
+87/446685/campos_512_v4
+87/446830/campos_512_v4
+87/446988/campos_512_v4
+87/447340/campos_512_v4
+87/447455/campos_512_v4
+87/447523/campos_512_v4
+87/447644/campos_512_v4
+87/447916/campos_512_v4
+87/448184/campos_512_v4
+87/448186/campos_512_v4
+87/448520/campos_512_v4
+87/448679/campos_512_v4
+87/448867/campos_512_v4
+87/449005/campos_512_v4
+88/450038/campos_512_v4
+88/450357/campos_512_v4
+88/450781/campos_512_v4
+88/452049/campos_512_v4
+88/452293/campos_512_v4
+88/453419/campos_512_v4
+88/453521/campos_512_v4
+88/453721/campos_512_v4
+88/454007/campos_512_v4
+88/454512/campos_512_v4
+88/454513/campos_512_v4
+88/454945/campos_512_v4
+88/454969/campos_512_v4
+89/455066/campos_512_v4
+89/456171/campos_512_v4
+89/456216/campos_512_v4
+89/456366/campos_512_v4
+89/456373/campos_512_v4
+89/456696/campos_512_v4
+89/456715/campos_512_v4
+89/456971/campos_512_v4
+89/457238/campos_512_v4
+89/457307/campos_512_v4
+89/457308/campos_512_v4
+89/457712/campos_512_v4
+89/458349/campos_512_v4
+89/458594/campos_512_v4
+89/458674/campos_512_v4
+89/459527/campos_512_v4
+89/459613/campos_512_v4
+89/459770/campos_512_v4
+9/55164/campos_512_v4
+9/56623/campos_512_v4
+9/57100/campos_512_v4
+9/57237/campos_512_v4
+9/58819/campos_512_v4
+9/59045/campos_512_v4
+9/59113/campos_512_v4
+9/59395/campos_512_v4
+90/460169/campos_512_v4
+90/460226/campos_512_v4
+90/460408/campos_512_v4
+90/460455/campos_512_v4
+90/460752/campos_512_v4
+90/460898/campos_512_v4
+90/461674/campos_512_v4
+90/461730/campos_512_v4
+90/461799/campos_512_v4
+90/462069/campos_512_v4
+90/462519/campos_512_v4
+90/462565/campos_512_v4
+90/462670/campos_512_v4
+90/462860/campos_512_v4
+90/462967/campos_512_v4
+90/462987/campos_512_v4
+90/463862/campos_512_v4
+90/464213/campos_512_v4
+91/465369/campos_512_v4
+91/465506/campos_512_v4
+91/465989/campos_512_v4
+91/467995/campos_512_v4
+91/468015/campos_512_v4
+91/468102/campos_512_v4
+91/468527/campos_512_v4
+91/468892/campos_512_v4
+91/469021/campos_512_v4
+91/469094/campos_512_v4
+91/469307/campos_512_v4
+91/469718/campos_512_v4
+92/471197/campos_512_v4
+92/471316/campos_512_v4
+92/471433/campos_512_v4
+92/471789/campos_512_v4
+92/471809/campos_512_v4
+92/472868/campos_512_v4
+92/473240/campos_512_v4
+92/473612/campos_512_v4
+92/473767/campos_512_v4
+92/473900/campos_512_v4
+92/474382/campos_512_v4
+92/474413/campos_512_v4
+92/474563/campos_512_v4
+92/474832/campos_512_v4
+92/474866/campos_512_v4
+92/474960/campos_512_v4
+93/475284/campos_512_v4
+93/475336/campos_512_v4
+93/475356/campos_512_v4
+93/475399/campos_512_v4
+93/475684/campos_512_v4
+93/475858/campos_512_v4
+93/475863/campos_512_v4
+93/476184/campos_512_v4
+93/476192/campos_512_v4
+93/476247/campos_512_v4
+93/476955/campos_512_v4
+93/477679/campos_512_v4
+93/478017/campos_512_v4
+93/478308/campos_512_v4
+93/478313/campos_512_v4
+93/478417/campos_512_v4
+93/478831/campos_512_v4
+93/479085/campos_512_v4
+93/479651/campos_512_v4
+94/480103/campos_512_v4
+94/481441/campos_512_v4
+94/481767/campos_512_v4
+94/482075/campos_512_v4
+94/482976/campos_512_v4
+94/483156/campos_512_v4
+94/483610/campos_512_v4
+94/483811/campos_512_v4
+94/483891/campos_512_v4
+94/484342/campos_512_v4
+94/484537/campos_512_v4
+94/484794/campos_512_v4
+95/485068/campos_512_v4
+95/485775/campos_512_v4
+95/485827/campos_512_v4
+95/485845/campos_512_v4
+95/486514/campos_512_v4
+95/486840/campos_512_v4
+95/487204/campos_512_v4
+95/487735/campos_512_v4
+95/487804/campos_512_v4
+95/488467/campos_512_v4
+95/488604/campos_512_v4
+95/488834/campos_512_v4
+95/489385/campos_512_v4
+95/489763/campos_512_v4
+96/490916/campos_512_v4
+96/490921/campos_512_v4
+96/491715/campos_512_v4
+96/492311/campos_512_v4
+96/492475/campos_512_v4
+96/492684/campos_512_v4
+96/493143/campos_512_v4
+96/493419/campos_512_v4
+96/493523/campos_512_v4
+96/493928/campos_512_v4
+96/494142/campos_512_v4
+96/494498/campos_512_v4
+96/494553/campos_512_v4
+96/494574/campos_512_v4
+96/494828/campos_512_v4
+97/495156/campos_512_v4
+97/495254/campos_512_v4
+97/495265/campos_512_v4
+97/495625/campos_512_v4
+97/495676/campos_512_v4
+97/495754/campos_512_v4
+97/495877/campos_512_v4
+97/495988/campos_512_v4
+97/496058/campos_512_v4
+97/496370/campos_512_v4
+97/496540/campos_512_v4
+97/497152/campos_512_v4
+97/497804/campos_512_v4
+97/498136/campos_512_v4
+97/498491/campos_512_v4
+97/498525/campos_512_v4
+97/498647/campos_512_v4
+97/498753/campos_512_v4
+97/499677/campos_512_v4
+98/500046/campos_512_v4
+98/500316/campos_512_v4
+98/501419/campos_512_v4
+98/501583/campos_512_v4
+98/501658/campos_512_v4
+98/502345/campos_512_v4
+98/502419/campos_512_v4
+98/502753/campos_512_v4
+98/503188/campos_512_v4
+98/503440/campos_512_v4
+98/503590/campos_512_v4
+98/503969/campos_512_v4
+98/504258/campos_512_v4
+98/504341/campos_512_v4
+99/505109/campos_512_v4
+99/505171/campos_512_v4
+99/505741/campos_512_v4
+99/505982/campos_512_v4
+99/506380/campos_512_v4
+99/507394/campos_512_v4
+99/507959/campos_512_v4
+99/508131/campos_512_v4
+99/508793/campos_512_v4
+99/509057/campos_512_v4
+99/509268/campos_512_v4
diff --git a/shell_scripts/raw_img_list/Furnitures.txt b/shell_scripts/raw_img_list/Furnitures.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b0a77e7122af7f41939297785896ec99dd123e41
--- /dev/null
+++ b/shell_scripts/raw_img_list/Furnitures.txt
@@ -0,0 +1,7242 @@
+0/10041/campos_512_v4
+0/10088/campos_512_v4
+0/10195/campos_512_v4
+0/10199/campos_512_v4
+0/10220/campos_512_v4
+0/10243/campos_512_v4
+0/10292/campos_512_v4
+0/10317/campos_512_v4
+0/10329/campos_512_v4
+0/10377/campos_512_v4
+0/10400/campos_512_v4
+0/10424/campos_512_v4
+0/10427/campos_512_v4
+0/10491/campos_512_v4
+0/10595/campos_512_v4
+0/10623/campos_512_v4
+0/10678/campos_512_v4
+0/10692/campos_512_v4
+0/10725/campos_512_v4
+0/10733/campos_512_v4
+0/10759/campos_512_v4
+0/10861/campos_512_v4
+0/10873/campos_512_v4
+0/11164/campos_512_v4
+0/11355/campos_512_v4
+0/11397/campos_512_v4
+0/11498/campos_512_v4
+0/11566/campos_512_v4
+0/11811/campos_512_v4
+0/11889/campos_512_v4
+0/11903/campos_512_v4
+0/11978/campos_512_v4
+0/11998/campos_512_v4
+0/12137/campos_512_v4
+0/12164/campos_512_v4
+0/12213/campos_512_v4
+0/12404/campos_512_v4
+0/12426/campos_512_v4
+0/12557/campos_512_v4
+0/12596/campos_512_v4
+0/12789/campos_512_v4
+0/13245/campos_512_v4
+0/13269/campos_512_v4
+0/13370/campos_512_v4
+0/13465/campos_512_v4
+0/13484/campos_512_v4
+0/13601/campos_512_v4
+0/13791/campos_512_v4
+0/13917/campos_512_v4
+0/14076/campos_512_v4
+0/14139/campos_512_v4
+0/14145/campos_512_v4
+0/14500/campos_512_v4
+0/14558/campos_512_v4
+0/14679/campos_512_v4
+0/14725/campos_512_v4
+0/14729/campos_512_v4
+0/14747/campos_512_v4
+0/14811/campos_512_v4
+1/15018/campos_512_v4
+1/15083/campos_512_v4
+1/15204/campos_512_v4
+1/15483/campos_512_v4
+1/15539/campos_512_v4
+1/15680/campos_512_v4
+1/15719/campos_512_v4
+1/15756/campos_512_v4
+1/15887/campos_512_v4
+1/16058/campos_512_v4
+1/16066/campos_512_v4
+1/16100/campos_512_v4
+1/16157/campos_512_v4
+1/16171/campos_512_v4
+1/16303/campos_512_v4
+1/16634/campos_512_v4
+1/16796/campos_512_v4
+1/17021/campos_512_v4
+1/17080/campos_512_v4
+1/17104/campos_512_v4
+1/17146/campos_512_v4
+1/17256/campos_512_v4
+1/17327/campos_512_v4
+1/17426/campos_512_v4
+1/17512/campos_512_v4
+1/17518/campos_512_v4
+1/17519/campos_512_v4
+1/17527/campos_512_v4
+1/17605/campos_512_v4
+1/17669/campos_512_v4
+1/17808/campos_512_v4
+1/17884/campos_512_v4
+1/18097/campos_512_v4
+1/18377/campos_512_v4
+1/18396/campos_512_v4
+1/18401/campos_512_v4
+1/18476/campos_512_v4
+1/18661/campos_512_v4
+1/18714/campos_512_v4
+1/18774/campos_512_v4
+1/18816/campos_512_v4
+1/18817/campos_512_v4
+1/18969/campos_512_v4
+1/19045/campos_512_v4
+1/19067/campos_512_v4
+1/19266/campos_512_v4
+1/19321/campos_512_v4
+1/19331/campos_512_v4
+1/19415/campos_512_v4
+1/19435/campos_512_v4
+1/19563/campos_512_v4
+1/19633/campos_512_v4
+1/19646/campos_512_v4
+1/19659/campos_512_v4
+1/19897/campos_512_v4
+1/19908/campos_512_v4
+10/60601/campos_512_v4
+10/60971/campos_512_v4
+10/61117/campos_512_v4
+10/61156/campos_512_v4
+10/61345/campos_512_v4
+10/61389/campos_512_v4
+10/61444/campos_512_v4
+10/61629/campos_512_v4
+10/61713/campos_512_v4
+10/61765/campos_512_v4
+10/61780/campos_512_v4
+10/61781/campos_512_v4
+10/61810/campos_512_v4
+10/61836/campos_512_v4
+10/61890/campos_512_v4
+10/61980/campos_512_v4
+10/62090/campos_512_v4
+10/62227/campos_512_v4
+10/62291/campos_512_v4
+10/62292/campos_512_v4
+10/62368/campos_512_v4
+10/62445/campos_512_v4
+10/62492/campos_512_v4
+10/62743/campos_512_v4
+10/62931/campos_512_v4
+10/62970/campos_512_v4
+10/63017/campos_512_v4
+10/63190/campos_512_v4
+10/63245/campos_512_v4
+10/63289/campos_512_v4
+10/63303/campos_512_v4
+10/63341/campos_512_v4
+10/63408/campos_512_v4
+10/63475/campos_512_v4
+10/63508/campos_512_v4
+10/63572/campos_512_v4
+10/63625/campos_512_v4
+10/63804/campos_512_v4
+10/63987/campos_512_v4
+10/64112/campos_512_v4
+10/64276/campos_512_v4
+10/64396/campos_512_v4
+10/64420/campos_512_v4
+10/64467/campos_512_v4
+10/64701/campos_512_v4
+10/64854/campos_512_v4
+100/510031/campos_512_v4
+100/510102/campos_512_v4
+100/510126/campos_512_v4
+100/510574/campos_512_v4
+100/510671/campos_512_v4
+100/510737/campos_512_v4
+100/510832/campos_512_v4
+100/510883/campos_512_v4
+100/510926/campos_512_v4
+100/510976/campos_512_v4
+100/511193/campos_512_v4
+100/511360/campos_512_v4
+100/511421/campos_512_v4
+100/511599/campos_512_v4
+100/511615/campos_512_v4
+100/511697/campos_512_v4
+100/511763/campos_512_v4
+100/511921/campos_512_v4
+100/512026/campos_512_v4
+100/512155/campos_512_v4
+100/512221/campos_512_v4
+100/512240/campos_512_v4
+100/512353/campos_512_v4
+100/512455/campos_512_v4
+100/512481/campos_512_v4
+100/512844/campos_512_v4
+100/512955/campos_512_v4
+100/512995/campos_512_v4
+100/513235/campos_512_v4
+100/513257/campos_512_v4
+100/513281/campos_512_v4
+100/513291/campos_512_v4
+100/513318/campos_512_v4
+100/513355/campos_512_v4
+100/514268/campos_512_v4
+100/514278/campos_512_v4
+100/514511/campos_512_v4
+100/514694/campos_512_v4
+100/514712/campos_512_v4
+100/514756/campos_512_v4
+100/514813/campos_512_v4
+100/514854/campos_512_v4
+100/514897/campos_512_v4
+100/514998/campos_512_v4
+101/515135/campos_512_v4
+101/515196/campos_512_v4
+101/515334/campos_512_v4
+101/515418/campos_512_v4
+101/515559/campos_512_v4
+101/515707/campos_512_v4
+101/515720/campos_512_v4
+101/515741/campos_512_v4
+101/516013/campos_512_v4
+101/516076/campos_512_v4
+101/516097/campos_512_v4
+101/516101/campos_512_v4
+101/516219/campos_512_v4
+101/516232/campos_512_v4
+101/516643/campos_512_v4
+101/516718/campos_512_v4
+101/516744/campos_512_v4
+101/516763/campos_512_v4
+101/516923/campos_512_v4
+101/516999/campos_512_v4
+101/517053/campos_512_v4
+101/517146/campos_512_v4
+101/517307/campos_512_v4
+101/517329/campos_512_v4
+101/517365/campos_512_v4
+101/517482/campos_512_v4
+101/517604/campos_512_v4
+101/517642/campos_512_v4
+101/517664/campos_512_v4
+101/517703/campos_512_v4
+101/517719/campos_512_v4
+101/517721/campos_512_v4
+101/518284/campos_512_v4
+101/518285/campos_512_v4
+101/518388/campos_512_v4
+101/518415/campos_512_v4
+101/518518/campos_512_v4
+101/518698/campos_512_v4
+101/518884/campos_512_v4
+101/518895/campos_512_v4
+101/518950/campos_512_v4
+101/519056/campos_512_v4
+101/519074/campos_512_v4
+101/519125/campos_512_v4
+101/519179/campos_512_v4
+101/519196/campos_512_v4
+101/519573/campos_512_v4
+101/519650/campos_512_v4
+101/519655/campos_512_v4
+101/519783/campos_512_v4
+101/519805/campos_512_v4
+101/519895/campos_512_v4
+101/519918/campos_512_v4
+101/519950/campos_512_v4
+101/519996/campos_512_v4
+102/520108/campos_512_v4
+102/520136/campos_512_v4
+102/520194/campos_512_v4
+102/520839/campos_512_v4
+102/520927/campos_512_v4
+102/520949/campos_512_v4
+102/520957/campos_512_v4
+102/521075/campos_512_v4
+102/521282/campos_512_v4
+102/521375/campos_512_v4
+102/521428/campos_512_v4
+102/521657/campos_512_v4
+102/521982/campos_512_v4
+102/522029/campos_512_v4
+102/522084/campos_512_v4
+102/522091/campos_512_v4
+102/522205/campos_512_v4
+102/522214/campos_512_v4
+102/522616/campos_512_v4
+102/522691/campos_512_v4
+102/522696/campos_512_v4
+102/522776/campos_512_v4
+102/522842/campos_512_v4
+102/522960/campos_512_v4
+102/523093/campos_512_v4
+102/523110/campos_512_v4
+102/523429/campos_512_v4
+102/523496/campos_512_v4
+102/523503/campos_512_v4
+102/523677/campos_512_v4
+102/523781/campos_512_v4
+102/523855/campos_512_v4
+102/523896/campos_512_v4
+102/523946/campos_512_v4
+102/524098/campos_512_v4
+102/524188/campos_512_v4
+102/524362/campos_512_v4
+102/524674/campos_512_v4
+102/524676/campos_512_v4
+102/524718/campos_512_v4
+102/524806/campos_512_v4
+102/524997/campos_512_v4
+103/525046/campos_512_v4
+103/525058/campos_512_v4
+103/525105/campos_512_v4
+103/525419/campos_512_v4
+103/525434/campos_512_v4
+103/525575/campos_512_v4
+103/525579/campos_512_v4
+103/525650/campos_512_v4
+103/525677/campos_512_v4
+103/525716/campos_512_v4
+103/525790/campos_512_v4
+103/525796/campos_512_v4
+103/525818/campos_512_v4
+103/526085/campos_512_v4
+103/526140/campos_512_v4
+103/526222/campos_512_v4
+103/526234/campos_512_v4
+103/526238/campos_512_v4
+103/526245/campos_512_v4
+103/526329/campos_512_v4
+103/526351/campos_512_v4
+103/526369/campos_512_v4
+103/526444/campos_512_v4
+103/526457/campos_512_v4
+103/526561/campos_512_v4
+103/526570/campos_512_v4
+103/526604/campos_512_v4
+103/526710/campos_512_v4
+103/526711/campos_512_v4
+103/526752/campos_512_v4
+103/526844/campos_512_v4
+103/526981/campos_512_v4
+103/527194/campos_512_v4
+103/527294/campos_512_v4
+103/527305/campos_512_v4
+103/527326/campos_512_v4
+103/527547/campos_512_v4
+103/527617/campos_512_v4
+103/527684/campos_512_v4
+103/527707/campos_512_v4
+103/527951/campos_512_v4
+103/527967/campos_512_v4
+103/528017/campos_512_v4
+103/528048/campos_512_v4
+103/528159/campos_512_v4
+103/528169/campos_512_v4
+103/528286/campos_512_v4
+103/528345/campos_512_v4
+103/528456/campos_512_v4
+103/528585/campos_512_v4
+103/528655/campos_512_v4
+103/528739/campos_512_v4
+103/528796/campos_512_v4
+103/529110/campos_512_v4
+103/529198/campos_512_v4
+103/529363/campos_512_v4
+103/529368/campos_512_v4
+103/529593/campos_512_v4
+103/529641/campos_512_v4
+103/529912/campos_512_v4
+104/530129/campos_512_v4
+104/530691/campos_512_v4
+104/530704/campos_512_v4
+104/530983/campos_512_v4
+104/531075/campos_512_v4
+104/531236/campos_512_v4
+104/531292/campos_512_v4
+104/531345/campos_512_v4
+104/531366/campos_512_v4
+104/531490/campos_512_v4
+104/531516/campos_512_v4
+104/531543/campos_512_v4
+104/531580/campos_512_v4
+104/531969/campos_512_v4
+104/532109/campos_512_v4
+104/532111/campos_512_v4
+104/532145/campos_512_v4
+104/532179/campos_512_v4
+104/532228/campos_512_v4
+104/532518/campos_512_v4
+104/532640/campos_512_v4
+104/532708/campos_512_v4
+104/532762/campos_512_v4
+104/532836/campos_512_v4
+104/532838/campos_512_v4
+104/532884/campos_512_v4
+104/532919/campos_512_v4
+104/533139/campos_512_v4
+104/533195/campos_512_v4
+104/533312/campos_512_v4
+104/533369/campos_512_v4
+104/533389/campos_512_v4
+104/533526/campos_512_v4
+104/533825/campos_512_v4
+104/533845/campos_512_v4
+104/533850/campos_512_v4
+104/534185/campos_512_v4
+104/534399/campos_512_v4
+104/534436/campos_512_v4
+104/534458/campos_512_v4
+104/534463/campos_512_v4
+104/534703/campos_512_v4
+105/535123/campos_512_v4
+105/535266/campos_512_v4
+105/535274/campos_512_v4
+105/535285/campos_512_v4
+105/535290/campos_512_v4
+105/535301/campos_512_v4
+105/535397/campos_512_v4
+105/535657/campos_512_v4
+105/535759/campos_512_v4
+105/535812/campos_512_v4
+105/535841/campos_512_v4
+105/535925/campos_512_v4
+105/536010/campos_512_v4
+105/536092/campos_512_v4
+105/536132/campos_512_v4
+105/536147/campos_512_v4
+105/536233/campos_512_v4
+105/536271/campos_512_v4
+105/536290/campos_512_v4
+105/536406/campos_512_v4
+105/536428/campos_512_v4
+105/536434/campos_512_v4
+105/536531/campos_512_v4
+105/536621/campos_512_v4
+105/536739/campos_512_v4
+105/536758/campos_512_v4
+105/536808/campos_512_v4
+105/536830/campos_512_v4
+105/536866/campos_512_v4
+105/536945/campos_512_v4
+105/537061/campos_512_v4
+105/537284/campos_512_v4
+105/537292/campos_512_v4
+105/537528/campos_512_v4
+105/537711/campos_512_v4
+105/537929/campos_512_v4
+105/538223/campos_512_v4
+105/538309/campos_512_v4
+105/538362/campos_512_v4
+105/538649/campos_512_v4
+105/538742/campos_512_v4
+105/538960/campos_512_v4
+105/539027/campos_512_v4
+105/539114/campos_512_v4
+105/539263/campos_512_v4
+105/539425/campos_512_v4
+105/539429/campos_512_v4
+105/539521/campos_512_v4
+105/539744/campos_512_v4
+105/539770/campos_512_v4
+105/539802/campos_512_v4
+105/539994/campos_512_v4
+106/540133/campos_512_v4
+106/540138/campos_512_v4
+106/540140/campos_512_v4
+106/540271/campos_512_v4
+106/540360/campos_512_v4
+106/540362/campos_512_v4
+106/540855/campos_512_v4
+106/541058/campos_512_v4
+106/541149/campos_512_v4
+106/541190/campos_512_v4
+106/541191/campos_512_v4
+106/541237/campos_512_v4
+106/541476/campos_512_v4
+106/541638/campos_512_v4
+106/541767/campos_512_v4
+106/541921/campos_512_v4
+106/541929/campos_512_v4
+106/542180/campos_512_v4
+106/542196/campos_512_v4
+106/542275/campos_512_v4
+106/542349/campos_512_v4
+106/542466/campos_512_v4
+106/542624/campos_512_v4
+106/542654/campos_512_v4
+106/542954/campos_512_v4
+106/542957/campos_512_v4
+106/543065/campos_512_v4
+106/543119/campos_512_v4
+106/543139/campos_512_v4
+106/543146/campos_512_v4
+106/543206/campos_512_v4
+106/543312/campos_512_v4
+106/543319/campos_512_v4
+106/543415/campos_512_v4
+106/543661/campos_512_v4
+106/543679/campos_512_v4
+106/543734/campos_512_v4
+106/543753/campos_512_v4
+106/543770/campos_512_v4
+106/543966/campos_512_v4
+106/544003/campos_512_v4
+106/544201/campos_512_v4
+106/544217/campos_512_v4
+106/544288/campos_512_v4
+106/544448/campos_512_v4
+106/544609/campos_512_v4
+106/544626/campos_512_v4
+106/544669/campos_512_v4
+106/544754/campos_512_v4
+106/544809/campos_512_v4
+106/544838/campos_512_v4
+106/544922/campos_512_v4
+107/545139/campos_512_v4
+107/545258/campos_512_v4
+107/545570/campos_512_v4
+107/545591/campos_512_v4
+107/545801/campos_512_v4
+107/545876/campos_512_v4
+107/545989/campos_512_v4
+107/546032/campos_512_v4
+107/546283/campos_512_v4
+107/546354/campos_512_v4
+107/546409/campos_512_v4
+107/546427/campos_512_v4
+107/546505/campos_512_v4
+107/546512/campos_512_v4
+107/546599/campos_512_v4
+107/546877/campos_512_v4
+107/546948/campos_512_v4
+107/546992/campos_512_v4
+107/547258/campos_512_v4
+107/547511/campos_512_v4
+107/547559/campos_512_v4
+107/547574/campos_512_v4
+107/547743/campos_512_v4
+107/548034/campos_512_v4
+107/548036/campos_512_v4
+107/548070/campos_512_v4
+107/548086/campos_512_v4
+107/548120/campos_512_v4
+107/548147/campos_512_v4
+107/548253/campos_512_v4
+107/548274/campos_512_v4
+107/548339/campos_512_v4
+107/548432/campos_512_v4
+107/548462/campos_512_v4
+107/548522/campos_512_v4
+107/548539/campos_512_v4
+107/548556/campos_512_v4
+107/548584/campos_512_v4
+107/549078/campos_512_v4
+107/549193/campos_512_v4
+107/549259/campos_512_v4
+107/549260/campos_512_v4
+107/549389/campos_512_v4
+107/549459/campos_512_v4
+107/549475/campos_512_v4
+107/549598/campos_512_v4
+108/550039/campos_512_v4
+108/550074/campos_512_v4
+108/550109/campos_512_v4
+108/550118/campos_512_v4
+108/550193/campos_512_v4
+108/550247/campos_512_v4
+108/550283/campos_512_v4
+108/550351/campos_512_v4
+108/550595/campos_512_v4
+108/550627/campos_512_v4
+108/550708/campos_512_v4
+108/550725/campos_512_v4
+108/550770/campos_512_v4
+108/551173/campos_512_v4
+108/551234/campos_512_v4
+108/551250/campos_512_v4
+108/551270/campos_512_v4
+108/551414/campos_512_v4
+108/551531/campos_512_v4
+108/551798/campos_512_v4
+108/551824/campos_512_v4
+108/551992/campos_512_v4
+108/552119/campos_512_v4
+108/552199/campos_512_v4
+108/552294/campos_512_v4
+108/552333/campos_512_v4
+108/552474/campos_512_v4
+108/552546/campos_512_v4
+108/552617/campos_512_v4
+108/552737/campos_512_v4
+108/552905/campos_512_v4
+108/552939/campos_512_v4
+108/552950/campos_512_v4
+108/552986/campos_512_v4
+108/553049/campos_512_v4
+108/553308/campos_512_v4
+108/553498/campos_512_v4
+108/553545/campos_512_v4
+108/553622/campos_512_v4
+108/553653/campos_512_v4
+108/553668/campos_512_v4
+108/553679/campos_512_v4
+108/553744/campos_512_v4
+108/554000/campos_512_v4
+108/554004/campos_512_v4
+108/554030/campos_512_v4
+108/554033/campos_512_v4
+108/554090/campos_512_v4
+108/554185/campos_512_v4
+108/554246/campos_512_v4
+108/554285/campos_512_v4
+108/554432/campos_512_v4
+108/554519/campos_512_v4
+108/554544/campos_512_v4
+108/554548/campos_512_v4
+108/554551/campos_512_v4
+108/554553/campos_512_v4
+108/554611/campos_512_v4
+108/554757/campos_512_v4
+108/554772/campos_512_v4
+108/554818/campos_512_v4
+108/554967/campos_512_v4
+109/555204/campos_512_v4
+109/555250/campos_512_v4
+109/555275/campos_512_v4
+109/555282/campos_512_v4
+109/555435/campos_512_v4
+109/555488/campos_512_v4
+109/555579/campos_512_v4
+109/555626/campos_512_v4
+109/555724/campos_512_v4
+109/555727/campos_512_v4
+109/555778/campos_512_v4
+109/555798/campos_512_v4
+109/556134/campos_512_v4
+109/556141/campos_512_v4
+109/556426/campos_512_v4
+109/556543/campos_512_v4
+109/556590/campos_512_v4
+109/556662/campos_512_v4
+109/556767/campos_512_v4
+109/556932/campos_512_v4
+109/556968/campos_512_v4
+109/557303/campos_512_v4
+109/557377/campos_512_v4
+109/557394/campos_512_v4
+109/557568/campos_512_v4
+109/557692/campos_512_v4
+109/557730/campos_512_v4
+109/557739/campos_512_v4
+109/557850/campos_512_v4
+109/557944/campos_512_v4
+109/558174/campos_512_v4
+109/558196/campos_512_v4
+109/558257/campos_512_v4
+109/558272/campos_512_v4
+109/558338/campos_512_v4
+109/558400/campos_512_v4
+109/558484/campos_512_v4
+109/558683/campos_512_v4
+109/558739/campos_512_v4
+109/558838/campos_512_v4
+109/558954/campos_512_v4
+109/559095/campos_512_v4
+109/559203/campos_512_v4
+109/559300/campos_512_v4
+109/559362/campos_512_v4
+109/559451/campos_512_v4
+109/559536/campos_512_v4
+109/559602/campos_512_v4
+109/559682/campos_512_v4
+109/559692/campos_512_v4
+109/559957/campos_512_v4
+109/559970/campos_512_v4
+109/559978/campos_512_v4
+11/65075/campos_512_v4
+11/65253/campos_512_v4
+11/65314/campos_512_v4
+11/65543/campos_512_v4
+11/65729/campos_512_v4
+11/65796/campos_512_v4
+11/65820/campos_512_v4
+11/66080/campos_512_v4
+11/66238/campos_512_v4
+11/66433/campos_512_v4
+11/66488/campos_512_v4
+11/66617/campos_512_v4
+11/66707/campos_512_v4
+11/66763/campos_512_v4
+11/66809/campos_512_v4
+11/66953/campos_512_v4
+11/67244/campos_512_v4
+11/67269/campos_512_v4
+11/67279/campos_512_v4
+11/67483/campos_512_v4
+11/67699/campos_512_v4
+11/67816/campos_512_v4
+11/67905/campos_512_v4
+11/68093/campos_512_v4
+11/68186/campos_512_v4
+11/68340/campos_512_v4
+11/68356/campos_512_v4
+11/68383/campos_512_v4
+11/68545/campos_512_v4
+11/68963/campos_512_v4
+11/68977/campos_512_v4
+11/69084/campos_512_v4
+11/69171/campos_512_v4
+11/69222/campos_512_v4
+11/69340/campos_512_v4
+11/69386/campos_512_v4
+11/69529/campos_512_v4
+11/69648/campos_512_v4
+11/69655/campos_512_v4
+11/69739/campos_512_v4
+11/69751/campos_512_v4
+11/69761/campos_512_v4
+11/69790/campos_512_v4
+11/69791/campos_512_v4
+11/69853/campos_512_v4
+11/69977/campos_512_v4
+110/560225/campos_512_v4
+110/560226/campos_512_v4
+110/560417/campos_512_v4
+110/560421/campos_512_v4
+110/560443/campos_512_v4
+110/560547/campos_512_v4
+110/560557/campos_512_v4
+110/560816/campos_512_v4
+110/560899/campos_512_v4
+110/561113/campos_512_v4
+110/561118/campos_512_v4
+110/561121/campos_512_v4
+110/561199/campos_512_v4
+110/561210/campos_512_v4
+110/561540/campos_512_v4
+110/561607/campos_512_v4
+110/561761/campos_512_v4
+110/561981/campos_512_v4
+110/561989/campos_512_v4
+110/562160/campos_512_v4
+110/562182/campos_512_v4
+110/562383/campos_512_v4
+110/562411/campos_512_v4
+110/562508/campos_512_v4
+110/562525/campos_512_v4
+110/562653/campos_512_v4
+110/562670/campos_512_v4
+110/562804/campos_512_v4
+110/562936/campos_512_v4
+110/562995/campos_512_v4
+110/563109/campos_512_v4
+110/563115/campos_512_v4
+110/563121/campos_512_v4
+110/563146/campos_512_v4
+110/563604/campos_512_v4
+110/563702/campos_512_v4
+110/563957/campos_512_v4
+110/564020/campos_512_v4
+110/564023/campos_512_v4
+110/564354/campos_512_v4
+110/564426/campos_512_v4
+110/564665/campos_512_v4
+110/564680/campos_512_v4
+110/564784/campos_512_v4
+110/564842/campos_512_v4
+110/564870/campos_512_v4
+111/565345/campos_512_v4
+111/565484/campos_512_v4
+111/565636/campos_512_v4
+111/565795/campos_512_v4
+111/565845/campos_512_v4
+111/566117/campos_512_v4
+111/566422/campos_512_v4
+111/566490/campos_512_v4
+111/566574/campos_512_v4
+111/566626/campos_512_v4
+111/566635/campos_512_v4
+111/566981/campos_512_v4
+111/566988/campos_512_v4
+111/567046/campos_512_v4
+111/567223/campos_512_v4
+111/567312/campos_512_v4
+111/567428/campos_512_v4
+111/567591/campos_512_v4
+111/567646/campos_512_v4
+111/567669/campos_512_v4
+111/567813/campos_512_v4
+111/567926/campos_512_v4
+111/567945/campos_512_v4
+111/567962/campos_512_v4
+111/568047/campos_512_v4
+111/568063/campos_512_v4
+111/568184/campos_512_v4
+111/568237/campos_512_v4
+111/568244/campos_512_v4
+111/568261/campos_512_v4
+111/568347/campos_512_v4
+111/568507/campos_512_v4
+111/568558/campos_512_v4
+111/568637/campos_512_v4
+111/568690/campos_512_v4
+111/568812/campos_512_v4
+111/568986/campos_512_v4
+111/569094/campos_512_v4
+111/569134/campos_512_v4
+111/569181/campos_512_v4
+111/569221/campos_512_v4
+111/569398/campos_512_v4
+111/569487/campos_512_v4
+111/569797/campos_512_v4
+111/569833/campos_512_v4
+111/569889/campos_512_v4
+112/570035/campos_512_v4
+112/570529/campos_512_v4
+112/570594/campos_512_v4
+112/571229/campos_512_v4
+112/571230/campos_512_v4
+112/571249/campos_512_v4
+112/571312/campos_512_v4
+112/571352/campos_512_v4
+112/571443/campos_512_v4
+112/571448/campos_512_v4
+112/571558/campos_512_v4
+112/571650/campos_512_v4
+112/571705/campos_512_v4
+112/571807/campos_512_v4
+112/571827/campos_512_v4
+112/571876/campos_512_v4
+112/571883/campos_512_v4
+112/571888/campos_512_v4
+112/572003/campos_512_v4
+112/572113/campos_512_v4
+112/572192/campos_512_v4
+112/572326/campos_512_v4
+112/572390/campos_512_v4
+112/572399/campos_512_v4
+112/572424/campos_512_v4
+112/572470/campos_512_v4
+112/572562/campos_512_v4
+112/572634/campos_512_v4
+112/572674/campos_512_v4
+112/572720/campos_512_v4
+112/572865/campos_512_v4
+112/572936/campos_512_v4
+112/573127/campos_512_v4
+112/573248/campos_512_v4
+112/573294/campos_512_v4
+112/573339/campos_512_v4
+112/573362/campos_512_v4
+112/573423/campos_512_v4
+112/573507/campos_512_v4
+112/573536/campos_512_v4
+112/573656/campos_512_v4
+112/573663/campos_512_v4
+112/573672/campos_512_v4
+112/574037/campos_512_v4
+112/574388/campos_512_v4
+112/574600/campos_512_v4
+112/574661/campos_512_v4
+112/574669/campos_512_v4
+112/574745/campos_512_v4
+112/574822/campos_512_v4
+112/574884/campos_512_v4
+112/574908/campos_512_v4
+112/574945/campos_512_v4
+112/574981/campos_512_v4
+113/575310/campos_512_v4
+113/575759/campos_512_v4
+113/575928/campos_512_v4
+113/575970/campos_512_v4
+113/576300/campos_512_v4
+113/576319/campos_512_v4
+113/576340/campos_512_v4
+113/576342/campos_512_v4
+113/576365/campos_512_v4
+113/576370/campos_512_v4
+113/576891/campos_512_v4
+113/576909/campos_512_v4
+113/576926/campos_512_v4
+113/576995/campos_512_v4
+113/577028/campos_512_v4
+113/577049/campos_512_v4
+113/577199/campos_512_v4
+113/577572/campos_512_v4
+113/578024/campos_512_v4
+113/578057/campos_512_v4
+113/578155/campos_512_v4
+113/578209/campos_512_v4
+113/578368/campos_512_v4
+113/578418/campos_512_v4
+113/578452/campos_512_v4
+113/578597/campos_512_v4
+113/578762/campos_512_v4
+113/578961/campos_512_v4
+113/579028/campos_512_v4
+113/579167/campos_512_v4
+113/579218/campos_512_v4
+113/579268/campos_512_v4
+113/579476/campos_512_v4
+113/579842/campos_512_v4
+114/580014/campos_512_v4
+114/580246/campos_512_v4
+114/580264/campos_512_v4
+114/580316/campos_512_v4
+114/580336/campos_512_v4
+114/580450/campos_512_v4
+114/580483/campos_512_v4
+114/580514/campos_512_v4
+114/580606/campos_512_v4
+114/580720/campos_512_v4
+114/580734/campos_512_v4
+114/580754/campos_512_v4
+114/580823/campos_512_v4
+114/580826/campos_512_v4
+114/580863/campos_512_v4
+114/580868/campos_512_v4
+114/580957/campos_512_v4
+114/580964/campos_512_v4
+114/581000/campos_512_v4
+114/581042/campos_512_v4
+114/581237/campos_512_v4
+114/581268/campos_512_v4
+114/581610/campos_512_v4
+114/581631/campos_512_v4
+114/581670/campos_512_v4
+114/581699/campos_512_v4
+114/582145/campos_512_v4
+114/582211/campos_512_v4
+114/582214/campos_512_v4
+114/582302/campos_512_v4
+114/582304/campos_512_v4
+114/582365/campos_512_v4
+114/582413/campos_512_v4
+114/582420/campos_512_v4
+114/582478/campos_512_v4
+114/582494/campos_512_v4
+114/582613/campos_512_v4
+114/582646/campos_512_v4
+114/582744/campos_512_v4
+114/582756/campos_512_v4
+114/582759/campos_512_v4
+114/582786/campos_512_v4
+114/582801/campos_512_v4
+114/582808/campos_512_v4
+114/582824/campos_512_v4
+114/583486/campos_512_v4
+114/583562/campos_512_v4
+114/583630/campos_512_v4
+114/583752/campos_512_v4
+114/583949/campos_512_v4
+114/583966/campos_512_v4
+114/583970/campos_512_v4
+114/583999/campos_512_v4
+114/584013/campos_512_v4
+114/584039/campos_512_v4
+114/584041/campos_512_v4
+114/584064/campos_512_v4
+114/584219/campos_512_v4
+114/584231/campos_512_v4
+114/584317/campos_512_v4
+114/584341/campos_512_v4
+114/584362/campos_512_v4
+114/584574/campos_512_v4
+114/584596/campos_512_v4
+114/584826/campos_512_v4
+115/585025/campos_512_v4
+115/585088/campos_512_v4
+115/585120/campos_512_v4
+115/585149/campos_512_v4
+115/585157/campos_512_v4
+115/585162/campos_512_v4
+115/585205/campos_512_v4
+115/585215/campos_512_v4
+115/585301/campos_512_v4
+115/585651/campos_512_v4
+115/585685/campos_512_v4
+115/585901/campos_512_v4
+115/585953/campos_512_v4
+115/585980/campos_512_v4
+115/586009/campos_512_v4
+115/586413/campos_512_v4
+115/586555/campos_512_v4
+115/586658/campos_512_v4
+115/586719/campos_512_v4
+115/586734/campos_512_v4
+115/586793/campos_512_v4
+115/586802/campos_512_v4
+115/586832/campos_512_v4
+115/587016/campos_512_v4
+115/587044/campos_512_v4
+115/587055/campos_512_v4
+115/587066/campos_512_v4
+115/587187/campos_512_v4
+115/587279/campos_512_v4
+115/587313/campos_512_v4
+115/587499/campos_512_v4
+115/587521/campos_512_v4
+115/587639/campos_512_v4
+115/587825/campos_512_v4
+115/587932/campos_512_v4
+115/587949/campos_512_v4
+115/588198/campos_512_v4
+115/588201/campos_512_v4
+115/588617/campos_512_v4
+115/588649/campos_512_v4
+115/588780/campos_512_v4
+115/589029/campos_512_v4
+115/589034/campos_512_v4
+115/589095/campos_512_v4
+115/589311/campos_512_v4
+115/589417/campos_512_v4
+115/589538/campos_512_v4
+115/589598/campos_512_v4
+116/590168/campos_512_v4
+116/590316/campos_512_v4
+116/590772/campos_512_v4
+116/590857/campos_512_v4
+116/590951/campos_512_v4
+116/591024/campos_512_v4
+116/591105/campos_512_v4
+116/591149/campos_512_v4
+116/591260/campos_512_v4
+116/591302/campos_512_v4
+116/591458/campos_512_v4
+116/591528/campos_512_v4
+116/591575/campos_512_v4
+116/591627/campos_512_v4
+116/591688/campos_512_v4
+116/591775/campos_512_v4
+116/591925/campos_512_v4
+116/592028/campos_512_v4
+116/592260/campos_512_v4
+116/592286/campos_512_v4
+116/592289/campos_512_v4
+116/592335/campos_512_v4
+116/592790/campos_512_v4
+116/592851/campos_512_v4
+116/592874/campos_512_v4
+116/593041/campos_512_v4
+116/593149/campos_512_v4
+116/593272/campos_512_v4
+116/593537/campos_512_v4
+116/593556/campos_512_v4
+116/593633/campos_512_v4
+116/593841/campos_512_v4
+116/593883/campos_512_v4
+116/594106/campos_512_v4
+116/594107/campos_512_v4
+116/594157/campos_512_v4
+116/594338/campos_512_v4
+116/594428/campos_512_v4
+116/594431/campos_512_v4
+116/594588/campos_512_v4
+116/594717/campos_512_v4
+116/594771/campos_512_v4
+116/594865/campos_512_v4
+117/595048/campos_512_v4
+117/595069/campos_512_v4
+117/595143/campos_512_v4
+117/595334/campos_512_v4
+117/595442/campos_512_v4
+117/595502/campos_512_v4
+117/595902/campos_512_v4
+117/596039/campos_512_v4
+117/596087/campos_512_v4
+117/596180/campos_512_v4
+117/596303/campos_512_v4
+117/596356/campos_512_v4
+117/596432/campos_512_v4
+117/596445/campos_512_v4
+117/596452/campos_512_v4
+117/596523/campos_512_v4
+117/596605/campos_512_v4
+117/596680/campos_512_v4
+117/596755/campos_512_v4
+117/597060/campos_512_v4
+117/597077/campos_512_v4
+117/597414/campos_512_v4
+117/597703/campos_512_v4
+117/597800/campos_512_v4
+117/597844/campos_512_v4
+117/597918/campos_512_v4
+117/597943/campos_512_v4
+117/598079/campos_512_v4
+117/598082/campos_512_v4
+117/598105/campos_512_v4
+117/598207/campos_512_v4
+117/598633/campos_512_v4
+117/598845/campos_512_v4
+117/599072/campos_512_v4
+117/599081/campos_512_v4
+117/599098/campos_512_v4
+117/599368/campos_512_v4
+117/599574/campos_512_v4
+117/599580/campos_512_v4
+117/599633/campos_512_v4
+117/599682/campos_512_v4
+117/599963/campos_512_v4
+117/599998/campos_512_v4
+118/600271/campos_512_v4
+118/600318/campos_512_v4
+118/600323/campos_512_v4
+118/600624/campos_512_v4
+118/600696/campos_512_v4
+118/600843/campos_512_v4
+118/601178/campos_512_v4
+118/601183/campos_512_v4
+118/601252/campos_512_v4
+118/601320/campos_512_v4
+118/601376/campos_512_v4
+118/601501/campos_512_v4
+118/601514/campos_512_v4
+118/601520/campos_512_v4
+118/601604/campos_512_v4
+118/601643/campos_512_v4
+118/601660/campos_512_v4
+118/601686/campos_512_v4
+118/601708/campos_512_v4
+118/601857/campos_512_v4
+118/601860/campos_512_v4
+118/601907/campos_512_v4
+118/602100/campos_512_v4
+118/602436/campos_512_v4
+118/602514/campos_512_v4
+118/602554/campos_512_v4
+118/602615/campos_512_v4
+118/602658/campos_512_v4
+118/602674/campos_512_v4
+118/602779/campos_512_v4
+118/602801/campos_512_v4
+118/602912/campos_512_v4
+118/603206/campos_512_v4
+118/603277/campos_512_v4
+118/603325/campos_512_v4
+118/603556/campos_512_v4
+118/603580/campos_512_v4
+118/603587/campos_512_v4
+118/603621/campos_512_v4
+118/603624/campos_512_v4
+118/603649/campos_512_v4
+118/603660/campos_512_v4
+118/603718/campos_512_v4
+118/603780/campos_512_v4
+118/603878/campos_512_v4
+118/603882/campos_512_v4
+118/604007/campos_512_v4
+118/604142/campos_512_v4
+118/604161/campos_512_v4
+118/604399/campos_512_v4
+118/604465/campos_512_v4
+118/604495/campos_512_v4
+118/604535/campos_512_v4
+118/604654/campos_512_v4
+118/604659/campos_512_v4
+118/604829/campos_512_v4
+118/604879/campos_512_v4
+119/605050/campos_512_v4
+119/605056/campos_512_v4
+119/605325/campos_512_v4
+119/605419/campos_512_v4
+119/605509/campos_512_v4
+119/605704/campos_512_v4
+119/605718/campos_512_v4
+119/605963/campos_512_v4
+119/606008/campos_512_v4
+119/606597/campos_512_v4
+119/606631/campos_512_v4
+119/606677/campos_512_v4
+119/606881/campos_512_v4
+119/607315/campos_512_v4
+119/607937/campos_512_v4
+119/607981/campos_512_v4
+119/608226/campos_512_v4
+119/608359/campos_512_v4
+119/608390/campos_512_v4
+119/608418/campos_512_v4
+119/608448/campos_512_v4
+119/608464/campos_512_v4
+119/608508/campos_512_v4
+119/608713/campos_512_v4
+119/608720/campos_512_v4
+119/608871/campos_512_v4
+119/608905/campos_512_v4
+119/608961/campos_512_v4
+119/609140/campos_512_v4
+119/609153/campos_512_v4
+119/609261/campos_512_v4
+119/609303/campos_512_v4
+119/609415/campos_512_v4
+119/609418/campos_512_v4
+119/609761/campos_512_v4
+119/609814/campos_512_v4
+119/609841/campos_512_v4
+119/609965/campos_512_v4
+12/70179/campos_512_v4
+12/70252/campos_512_v4
+12/70302/campos_512_v4
+12/70637/campos_512_v4
+12/70702/campos_512_v4
+12/70911/campos_512_v4
+12/71125/campos_512_v4
+12/71172/campos_512_v4
+12/71177/campos_512_v4
+12/71321/campos_512_v4
+12/71355/campos_512_v4
+12/71469/campos_512_v4
+12/71481/campos_512_v4
+12/71603/campos_512_v4
+12/71640/campos_512_v4
+12/71979/campos_512_v4
+12/71988/campos_512_v4
+12/72264/campos_512_v4
+12/72268/campos_512_v4
+12/72391/campos_512_v4
+12/72699/campos_512_v4
+12/72738/campos_512_v4
+12/72874/campos_512_v4
+12/72958/campos_512_v4
+12/73020/campos_512_v4
+12/73032/campos_512_v4
+12/73048/campos_512_v4
+12/73062/campos_512_v4
+12/73081/campos_512_v4
+12/73157/campos_512_v4
+12/73180/campos_512_v4
+12/73294/campos_512_v4
+12/73315/campos_512_v4
+12/73369/campos_512_v4
+12/73404/campos_512_v4
+12/73710/campos_512_v4
+12/73814/campos_512_v4
+12/73873/campos_512_v4
+12/73875/campos_512_v4
+12/73879/campos_512_v4
+12/73983/campos_512_v4
+12/74005/campos_512_v4
+12/74056/campos_512_v4
+12/74084/campos_512_v4
+12/74098/campos_512_v4
+12/74116/campos_512_v4
+12/74531/campos_512_v4
+12/74681/campos_512_v4
+12/74742/campos_512_v4
+12/74897/campos_512_v4
+12/74932/campos_512_v4
+12/74997/campos_512_v4
+120/610059/campos_512_v4
+120/610163/campos_512_v4
+120/610559/campos_512_v4
+120/610801/campos_512_v4
+120/610862/campos_512_v4
+120/610962/campos_512_v4
+120/611013/campos_512_v4
+120/611095/campos_512_v4
+120/611182/campos_512_v4
+120/611357/campos_512_v4
+120/611404/campos_512_v4
+120/611441/campos_512_v4
+120/611453/campos_512_v4
+120/611497/campos_512_v4
+120/611575/campos_512_v4
+120/611668/campos_512_v4
+120/611720/campos_512_v4
+120/611824/campos_512_v4
+120/612000/campos_512_v4
+120/612012/campos_512_v4
+120/612231/campos_512_v4
+120/612272/campos_512_v4
+120/612301/campos_512_v4
+120/612510/campos_512_v4
+120/612576/campos_512_v4
+120/612601/campos_512_v4
+120/612620/campos_512_v4
+120/613094/campos_512_v4
+120/613294/campos_512_v4
+120/613366/campos_512_v4
+120/613425/campos_512_v4
+120/613620/campos_512_v4
+120/613859/campos_512_v4
+120/613975/campos_512_v4
+120/614177/campos_512_v4
+120/614326/campos_512_v4
+120/614370/campos_512_v4
+120/614629/campos_512_v4
+120/614660/campos_512_v4
+120/614780/campos_512_v4
+120/614910/campos_512_v4
+120/614914/campos_512_v4
+121/615068/campos_512_v4
+121/615344/campos_512_v4
+121/615743/campos_512_v4
+121/615785/campos_512_v4
+121/615804/campos_512_v4
+121/615903/campos_512_v4
+121/615963/campos_512_v4
+121/615979/campos_512_v4
+121/615989/campos_512_v4
+121/616176/campos_512_v4
+121/616312/campos_512_v4
+121/616449/campos_512_v4
+121/616496/campos_512_v4
+121/616519/campos_512_v4
+121/616578/campos_512_v4
+121/616668/campos_512_v4
+121/616861/campos_512_v4
+121/617104/campos_512_v4
+121/617292/campos_512_v4
+121/617315/campos_512_v4
+121/617396/campos_512_v4
+121/617515/campos_512_v4
+121/617516/campos_512_v4
+121/617567/campos_512_v4
+121/617690/campos_512_v4
+121/617704/campos_512_v4
+121/617752/campos_512_v4
+121/617783/campos_512_v4
+121/617889/campos_512_v4
+121/617903/campos_512_v4
+121/617924/campos_512_v4
+121/618061/campos_512_v4
+121/618179/campos_512_v4
+121/618186/campos_512_v4
+121/618215/campos_512_v4
+121/618261/campos_512_v4
+121/618336/campos_512_v4
+121/618397/campos_512_v4
+121/618443/campos_512_v4
+121/618482/campos_512_v4
+121/618603/campos_512_v4
+121/619100/campos_512_v4
+121/619336/campos_512_v4
+121/619351/campos_512_v4
+121/619440/campos_512_v4
+121/619516/campos_512_v4
+121/619555/campos_512_v4
+121/619626/campos_512_v4
+121/619628/campos_512_v4
+121/619784/campos_512_v4
+121/619921/campos_512_v4
+122/620072/campos_512_v4
+122/620400/campos_512_v4
+122/620421/campos_512_v4
+122/620426/campos_512_v4
+122/620537/campos_512_v4
+122/620614/campos_512_v4
+122/620665/campos_512_v4
+122/620775/campos_512_v4
+122/620922/campos_512_v4
+122/620989/campos_512_v4
+122/621015/campos_512_v4
+122/621141/campos_512_v4
+122/621415/campos_512_v4
+122/621449/campos_512_v4
+122/621472/campos_512_v4
+122/621512/campos_512_v4
+122/621523/campos_512_v4
+122/621663/campos_512_v4
+122/621664/campos_512_v4
+122/621826/campos_512_v4
+122/621941/campos_512_v4
+122/621976/campos_512_v4
+122/621997/campos_512_v4
+122/622068/campos_512_v4
+122/622165/campos_512_v4
+122/622245/campos_512_v4
+122/622278/campos_512_v4
+122/622280/campos_512_v4
+122/622310/campos_512_v4
+122/622336/campos_512_v4
+122/622337/campos_512_v4
+122/622421/campos_512_v4
+122/622659/campos_512_v4
+122/622808/campos_512_v4
+122/623158/campos_512_v4
+122/623413/campos_512_v4
+122/623484/campos_512_v4
+122/623530/campos_512_v4
+122/623563/campos_512_v4
+122/623579/campos_512_v4
+122/623656/campos_512_v4
+122/623797/campos_512_v4
+122/623841/campos_512_v4
+122/623898/campos_512_v4
+122/623942/campos_512_v4
+122/623989/campos_512_v4
+122/624132/campos_512_v4
+122/624201/campos_512_v4
+122/624283/campos_512_v4
+122/624366/campos_512_v4
+122/624424/campos_512_v4
+122/624550/campos_512_v4
+122/624738/campos_512_v4
+122/624752/campos_512_v4
+122/624774/campos_512_v4
+123/625139/campos_512_v4
+123/625212/campos_512_v4
+123/625253/campos_512_v4
+123/625579/campos_512_v4
+123/625740/campos_512_v4
+123/625781/campos_512_v4
+123/625934/campos_512_v4
+123/626090/campos_512_v4
+123/626156/campos_512_v4
+123/626275/campos_512_v4
+123/626417/campos_512_v4
+123/626599/campos_512_v4
+123/626776/campos_512_v4
+123/626803/campos_512_v4
+123/627121/campos_512_v4
+123/627462/campos_512_v4
+123/627509/campos_512_v4
+123/627566/campos_512_v4
+123/627597/campos_512_v4
+123/627806/campos_512_v4
+123/628049/campos_512_v4
+123/628099/campos_512_v4
+123/628228/campos_512_v4
+123/628230/campos_512_v4
+123/628262/campos_512_v4
+123/628265/campos_512_v4
+123/628367/campos_512_v4
+123/628981/campos_512_v4
+123/629033/campos_512_v4
+123/629037/campos_512_v4
+123/629547/campos_512_v4
+123/629683/campos_512_v4
+123/629850/campos_512_v4
+123/630000/campos_512_v4
+124/630004/campos_512_v4
+124/630009/campos_512_v4
+124/630013/campos_512_v4
+124/630100/campos_512_v4
+124/630198/campos_512_v4
+124/630227/campos_512_v4
+124/630247/campos_512_v4
+124/630527/campos_512_v4
+124/630538/campos_512_v4
+124/630613/campos_512_v4
+124/630628/campos_512_v4
+124/630705/campos_512_v4
+124/630765/campos_512_v4
+124/630803/campos_512_v4
+124/630814/campos_512_v4
+124/630819/campos_512_v4
+124/631004/campos_512_v4
+124/631176/campos_512_v4
+124/631268/campos_512_v4
+124/631366/campos_512_v4
+124/631394/campos_512_v4
+124/631418/campos_512_v4
+124/631502/campos_512_v4
+124/631569/campos_512_v4
+124/631587/campos_512_v4
+124/631598/campos_512_v4
+124/631627/campos_512_v4
+124/631775/campos_512_v4
+124/631813/campos_512_v4
+124/631844/campos_512_v4
+124/631951/campos_512_v4
+124/632259/campos_512_v4
+124/632460/campos_512_v4
+124/632711/campos_512_v4
+124/632751/campos_512_v4
+124/632830/campos_512_v4
+124/632882/campos_512_v4
+124/632938/campos_512_v4
+124/632948/campos_512_v4
+124/633047/campos_512_v4
+124/633235/campos_512_v4
+124/633295/campos_512_v4
+124/633309/campos_512_v4
+124/633316/campos_512_v4
+124/633465/campos_512_v4
+124/633476/campos_512_v4
+124/633529/campos_512_v4
+124/633555/campos_512_v4
+124/633561/campos_512_v4
+124/633723/campos_512_v4
+124/633898/campos_512_v4
+124/633969/campos_512_v4
+124/633978/campos_512_v4
+124/634045/campos_512_v4
+124/634085/campos_512_v4
+124/634367/campos_512_v4
+124/634417/campos_512_v4
+124/634508/campos_512_v4
+124/634611/campos_512_v4
+124/634706/campos_512_v4
+124/634815/campos_512_v4
+124/634830/campos_512_v4
+124/634891/campos_512_v4
+125/635259/campos_512_v4
+125/635260/campos_512_v4
+125/635291/campos_512_v4
+125/635309/campos_512_v4
+125/635360/campos_512_v4
+125/635416/campos_512_v4
+125/635732/campos_512_v4
+125/635933/campos_512_v4
+125/635985/campos_512_v4
+125/636012/campos_512_v4
+125/636390/campos_512_v4
+125/636394/campos_512_v4
+125/636404/campos_512_v4
+125/636409/campos_512_v4
+125/636435/campos_512_v4
+125/636443/campos_512_v4
+125/636470/campos_512_v4
+125/636471/campos_512_v4
+125/636494/campos_512_v4
+125/637214/campos_512_v4
+125/637294/campos_512_v4
+125/637315/campos_512_v4
+125/637465/campos_512_v4
+125/637495/campos_512_v4
+125/637511/campos_512_v4
+125/637696/campos_512_v4
+125/637766/campos_512_v4
+125/637893/campos_512_v4
+125/637960/campos_512_v4
+125/637970/campos_512_v4
+125/638075/campos_512_v4
+125/638323/campos_512_v4
+125/638333/campos_512_v4
+125/638338/campos_512_v4
+125/638372/campos_512_v4
+125/638598/campos_512_v4
+125/638626/campos_512_v4
+125/638986/campos_512_v4
+125/639042/campos_512_v4
+125/639393/campos_512_v4
+125/639752/campos_512_v4
+125/639799/campos_512_v4
+125/639968/campos_512_v4
+125/639999/campos_512_v4
+127/645311/campos_512_v4
+127/645349/campos_512_v4
+127/645374/campos_512_v4
+127/645422/campos_512_v4
+127/645592/campos_512_v4
+127/645653/campos_512_v4
+127/645664/campos_512_v4
+127/645751/campos_512_v4
+127/645821/campos_512_v4
+127/645945/campos_512_v4
+127/646089/campos_512_v4
+127/646147/campos_512_v4
+127/646257/campos_512_v4
+127/646320/campos_512_v4
+127/646417/campos_512_v4
+127/646488/campos_512_v4
+127/646572/campos_512_v4
+127/646666/campos_512_v4
+127/646672/campos_512_v4
+127/646706/campos_512_v4
+127/646742/campos_512_v4
+127/646825/campos_512_v4
+127/646928/campos_512_v4
+127/647093/campos_512_v4
+127/647131/campos_512_v4
+127/647471/campos_512_v4
+127/647496/campos_512_v4
+127/647685/campos_512_v4
+127/647737/campos_512_v4
+127/647805/campos_512_v4
+127/647818/campos_512_v4
+127/647931/campos_512_v4
+127/648022/campos_512_v4
+127/648033/campos_512_v4
+127/648172/campos_512_v4
+127/648418/campos_512_v4
+127/648505/campos_512_v4
+127/648831/campos_512_v4
+127/648971/campos_512_v4
+127/648995/campos_512_v4
+127/649015/campos_512_v4
+127/649017/campos_512_v4
+127/649052/campos_512_v4
+127/649107/campos_512_v4
+127/649125/campos_512_v4
+127/649552/campos_512_v4
+127/649698/campos_512_v4
+127/649729/campos_512_v4
+127/649737/campos_512_v4
+127/649746/campos_512_v4
+127/649970/campos_512_v4
+128/650010/campos_512_v4
+128/650183/campos_512_v4
+128/650223/campos_512_v4
+128/650309/campos_512_v4
+128/650327/campos_512_v4
+128/650423/campos_512_v4
+128/650517/campos_512_v4
+128/650762/campos_512_v4
+128/650784/campos_512_v4
+128/650787/campos_512_v4
+128/650797/campos_512_v4
+128/650823/campos_512_v4
+128/650843/campos_512_v4
+128/650889/campos_512_v4
+128/650928/campos_512_v4
+128/650989/campos_512_v4
+128/651121/campos_512_v4
+128/651223/campos_512_v4
+128/651399/campos_512_v4
+128/651404/campos_512_v4
+128/651439/campos_512_v4
+128/651440/campos_512_v4
+128/651470/campos_512_v4
+128/651549/campos_512_v4
+128/651603/campos_512_v4
+128/651635/campos_512_v4
+128/651757/campos_512_v4
+128/651879/campos_512_v4
+128/651970/campos_512_v4
+128/652005/campos_512_v4
+128/652063/campos_512_v4
+128/652091/campos_512_v4
+128/652340/campos_512_v4
+128/652641/campos_512_v4
+128/652652/campos_512_v4
+128/652730/campos_512_v4
+128/652741/campos_512_v4
+128/652860/campos_512_v4
+128/653104/campos_512_v4
+128/653175/campos_512_v4
+128/653252/campos_512_v4
+128/653276/campos_512_v4
+128/653285/campos_512_v4
+128/653451/campos_512_v4
+128/653526/campos_512_v4
+128/653563/campos_512_v4
+128/653715/campos_512_v4
+128/653788/campos_512_v4
+128/653936/campos_512_v4
+128/654362/campos_512_v4
+128/654411/campos_512_v4
+128/654503/campos_512_v4
+128/654559/campos_512_v4
+128/654665/campos_512_v4
+128/654716/campos_512_v4
+128/654810/campos_512_v4
+128/654918/campos_512_v4
+129/655080/campos_512_v4
+129/655261/campos_512_v4
+129/655302/campos_512_v4
+129/655336/campos_512_v4
+129/655343/campos_512_v4
+129/655411/campos_512_v4
+129/655500/campos_512_v4
+129/655506/campos_512_v4
+129/655532/campos_512_v4
+129/655594/campos_512_v4
+129/655759/campos_512_v4
+129/655785/campos_512_v4
+129/655821/campos_512_v4
+129/655936/campos_512_v4
+129/655995/campos_512_v4
+129/656024/campos_512_v4
+129/656180/campos_512_v4
+129/656237/campos_512_v4
+129/656530/campos_512_v4
+129/656576/campos_512_v4
+129/656610/campos_512_v4
+129/656619/campos_512_v4
+129/656660/campos_512_v4
+129/656730/campos_512_v4
+129/656751/campos_512_v4
+129/657335/campos_512_v4
+129/658019/campos_512_v4
+129/658062/campos_512_v4
+129/658273/campos_512_v4
+129/658326/campos_512_v4
+129/658620/campos_512_v4
+129/658713/campos_512_v4
+129/658799/campos_512_v4
+129/659015/campos_512_v4
+129/659401/campos_512_v4
+129/659411/campos_512_v4
+129/659459/campos_512_v4
+129/659666/campos_512_v4
+129/659692/campos_512_v4
+129/659973/campos_512_v4
+129/659984/campos_512_v4
+13/75005/campos_512_v4
+13/75048/campos_512_v4
+13/75053/campos_512_v4
+13/75081/campos_512_v4
+13/75157/campos_512_v4
+13/75236/campos_512_v4
+13/75294/campos_512_v4
+13/75378/campos_512_v4
+13/75423/campos_512_v4
+13/75441/campos_512_v4
+13/75594/campos_512_v4
+13/75635/campos_512_v4
+13/75642/campos_512_v4
+13/76024/campos_512_v4
+13/76057/campos_512_v4
+13/76144/campos_512_v4
+13/76215/campos_512_v4
+13/76240/campos_512_v4
+13/76343/campos_512_v4
+13/76347/campos_512_v4
+13/76520/campos_512_v4
+13/76765/campos_512_v4
+13/76995/campos_512_v4
+13/77037/campos_512_v4
+13/77357/campos_512_v4
+13/77671/campos_512_v4
+13/78023/campos_512_v4
+13/78103/campos_512_v4
+13/78243/campos_512_v4
+13/78256/campos_512_v4
+13/78321/campos_512_v4
+13/78362/campos_512_v4
+13/78458/campos_512_v4
+13/78511/campos_512_v4
+13/78542/campos_512_v4
+13/78552/campos_512_v4
+13/78764/campos_512_v4
+13/78799/campos_512_v4
+13/79006/campos_512_v4
+13/79044/campos_512_v4
+13/79087/campos_512_v4
+13/79144/campos_512_v4
+13/79223/campos_512_v4
+13/79276/campos_512_v4
+13/79298/campos_512_v4
+13/79642/campos_512_v4
+13/79682/campos_512_v4
+13/79785/campos_512_v4
+13/79815/campos_512_v4
+13/79954/campos_512_v4
+13/79959/campos_512_v4
+13/79997/campos_512_v4
+130/660140/campos_512_v4
+130/660142/campos_512_v4
+130/660194/campos_512_v4
+130/660492/campos_512_v4
+130/661019/campos_512_v4
+130/661181/campos_512_v4
+130/661270/campos_512_v4
+130/661460/campos_512_v4
+130/661462/campos_512_v4
+130/661492/campos_512_v4
+130/661521/campos_512_v4
+130/661548/campos_512_v4
+130/661765/campos_512_v4
+130/661916/campos_512_v4
+130/662069/campos_512_v4
+130/662203/campos_512_v4
+130/662287/campos_512_v4
+130/662473/campos_512_v4
+130/662541/campos_512_v4
+130/663147/campos_512_v4
+130/663208/campos_512_v4
+130/663220/campos_512_v4
+130/663242/campos_512_v4
+130/663413/campos_512_v4
+130/663469/campos_512_v4
+130/663548/campos_512_v4
+130/663612/campos_512_v4
+130/663860/campos_512_v4
+130/663907/campos_512_v4
+130/664020/campos_512_v4
+130/664080/campos_512_v4
+130/664304/campos_512_v4
+130/664318/campos_512_v4
+130/664334/campos_512_v4
+130/664713/campos_512_v4
+130/664804/campos_512_v4
+130/664873/campos_512_v4
+130/664915/campos_512_v4
+131/665009/campos_512_v4
+131/665070/campos_512_v4
+131/665157/campos_512_v4
+131/665259/campos_512_v4
+131/665321/campos_512_v4
+131/665342/campos_512_v4
+131/665360/campos_512_v4
+131/665417/campos_512_v4
+131/665421/campos_512_v4
+131/665506/campos_512_v4
+131/665585/campos_512_v4
+131/665611/campos_512_v4
+131/665624/campos_512_v4
+131/665637/campos_512_v4
+131/665647/campos_512_v4
+131/665725/campos_512_v4
+131/665925/campos_512_v4
+131/666028/campos_512_v4
+131/666144/campos_512_v4
+131/666166/campos_512_v4
+131/666186/campos_512_v4
+131/666254/campos_512_v4
+131/666374/campos_512_v4
+131/666424/campos_512_v4
+131/666438/campos_512_v4
+131/666458/campos_512_v4
+131/666642/campos_512_v4
+131/666673/campos_512_v4
+131/666810/campos_512_v4
+131/667006/campos_512_v4
+131/667096/campos_512_v4
+131/667108/campos_512_v4
+131/667185/campos_512_v4
+131/667263/campos_512_v4
+131/667408/campos_512_v4
+131/667422/campos_512_v4
+131/667650/campos_512_v4
+131/667833/campos_512_v4
+131/667892/campos_512_v4
+131/667896/campos_512_v4
+131/667927/campos_512_v4
+131/668665/campos_512_v4
+131/668764/campos_512_v4
+131/668825/campos_512_v4
+131/668851/campos_512_v4
+131/668871/campos_512_v4
+131/669093/campos_512_v4
+131/669423/campos_512_v4
+131/669538/campos_512_v4
+131/669604/campos_512_v4
+131/669745/campos_512_v4
+131/669761/campos_512_v4
+131/669785/campos_512_v4
+132/670070/campos_512_v4
+132/670126/campos_512_v4
+132/670151/campos_512_v4
+132/670210/campos_512_v4
+132/670303/campos_512_v4
+132/670322/campos_512_v4
+132/670669/campos_512_v4
+132/670948/campos_512_v4
+132/670990/campos_512_v4
+132/671182/campos_512_v4
+132/671591/campos_512_v4
+132/671735/campos_512_v4
+132/671770/campos_512_v4
+132/671878/campos_512_v4
+132/671909/campos_512_v4
+132/672024/campos_512_v4
+132/672283/campos_512_v4
+132/672338/campos_512_v4
+132/672382/campos_512_v4
+132/672410/campos_512_v4
+132/672411/campos_512_v4
+132/672579/campos_512_v4
+132/672632/campos_512_v4
+132/672882/campos_512_v4
+132/672972/campos_512_v4
+132/673097/campos_512_v4
+132/673331/campos_512_v4
+132/673593/campos_512_v4
+132/673632/campos_512_v4
+132/673641/campos_512_v4
+132/673828/campos_512_v4
+132/673831/campos_512_v4
+132/673969/campos_512_v4
+132/674025/campos_512_v4
+132/674035/campos_512_v4
+132/674091/campos_512_v4
+132/674116/campos_512_v4
+132/674219/campos_512_v4
+132/674283/campos_512_v4
+132/674323/campos_512_v4
+132/674334/campos_512_v4
+132/674412/campos_512_v4
+132/674413/campos_512_v4
+132/674423/campos_512_v4
+132/674439/campos_512_v4
+132/674510/campos_512_v4
+132/674623/campos_512_v4
+132/674772/campos_512_v4
+132/674893/campos_512_v4
+132/674945/campos_512_v4
+133/675098/campos_512_v4
+133/675113/campos_512_v4
+133/675313/campos_512_v4
+133/675375/campos_512_v4
+133/675519/campos_512_v4
+133/675839/campos_512_v4
+133/675859/campos_512_v4
+133/675870/campos_512_v4
+133/675876/campos_512_v4
+133/675935/campos_512_v4
+133/676034/campos_512_v4
+133/676037/campos_512_v4
+133/676067/campos_512_v4
+133/676219/campos_512_v4
+133/676504/campos_512_v4
+133/676536/campos_512_v4
+133/676539/campos_512_v4
+133/676633/campos_512_v4
+133/676901/campos_512_v4
+133/676940/campos_512_v4
+133/677019/campos_512_v4
+133/677155/campos_512_v4
+133/677169/campos_512_v4
+133/677303/campos_512_v4
+133/677332/campos_512_v4
+133/677350/campos_512_v4
+133/677379/campos_512_v4
+133/677476/campos_512_v4
+133/677725/campos_512_v4
+133/677788/campos_512_v4
+133/677847/campos_512_v4
+133/677911/campos_512_v4
+133/677958/campos_512_v4
+133/677998/campos_512_v4
+133/678023/campos_512_v4
+133/678102/campos_512_v4
+133/678120/campos_512_v4
+133/678168/campos_512_v4
+133/678182/campos_512_v4
+133/678462/campos_512_v4
+133/678509/campos_512_v4
+133/678518/campos_512_v4
+133/678635/campos_512_v4
+133/678670/campos_512_v4
+133/678775/campos_512_v4
+133/678836/campos_512_v4
+133/678913/campos_512_v4
+133/679255/campos_512_v4
+133/679275/campos_512_v4
+133/679399/campos_512_v4
+133/679486/campos_512_v4
+133/679647/campos_512_v4
+133/679712/campos_512_v4
+133/679784/campos_512_v4
+134/680097/campos_512_v4
+134/680132/campos_512_v4
+134/680250/campos_512_v4
+134/680315/campos_512_v4
+134/680357/campos_512_v4
+134/680492/campos_512_v4
+134/680625/campos_512_v4
+134/680655/campos_512_v4
+134/680808/campos_512_v4
+134/680847/campos_512_v4
+134/680910/campos_512_v4
+134/681610/campos_512_v4
+134/681616/campos_512_v4
+134/681687/campos_512_v4
+134/681712/campos_512_v4
+134/681837/campos_512_v4
+134/681870/campos_512_v4
+134/681906/campos_512_v4
+134/682078/campos_512_v4
+134/682271/campos_512_v4
+134/682443/campos_512_v4
+134/682470/campos_512_v4
+134/682550/campos_512_v4
+134/682555/campos_512_v4
+134/682621/campos_512_v4
+134/682657/campos_512_v4
+134/682702/campos_512_v4
+134/682905/campos_512_v4
+134/682938/campos_512_v4
+134/683009/campos_512_v4
+134/683016/campos_512_v4
+134/683068/campos_512_v4
+134/683224/campos_512_v4
+134/683605/campos_512_v4
+134/683616/campos_512_v4
+134/683653/campos_512_v4
+134/683714/campos_512_v4
+134/683978/campos_512_v4
+134/684029/campos_512_v4
+134/684104/campos_512_v4
+134/684124/campos_512_v4
+134/684267/campos_512_v4
+134/684408/campos_512_v4
+134/684447/campos_512_v4
+135/685016/campos_512_v4
+135/685187/campos_512_v4
+135/685236/campos_512_v4
+135/685303/campos_512_v4
+135/685559/campos_512_v4
+135/685721/campos_512_v4
+135/685771/campos_512_v4
+135/685792/campos_512_v4
+135/685830/campos_512_v4
+135/685868/campos_512_v4
+135/685981/campos_512_v4
+135/686087/campos_512_v4
+135/686215/campos_512_v4
+135/686400/campos_512_v4
+135/686539/campos_512_v4
+135/686554/campos_512_v4
+135/686595/campos_512_v4
+135/686622/campos_512_v4
+135/686791/campos_512_v4
+135/686848/campos_512_v4
+135/687307/campos_512_v4
+135/687463/campos_512_v4
+135/687472/campos_512_v4
+135/687624/campos_512_v4
+135/687665/campos_512_v4
+135/687756/campos_512_v4
+135/687860/campos_512_v4
+135/687902/campos_512_v4
+135/687959/campos_512_v4
+135/687962/campos_512_v4
+135/687984/campos_512_v4
+135/688006/campos_512_v4
+135/688390/campos_512_v4
+135/688396/campos_512_v4
+135/688520/campos_512_v4
+135/688526/campos_512_v4
+135/688539/campos_512_v4
+135/688750/campos_512_v4
+135/688792/campos_512_v4
+135/688848/campos_512_v4
+135/688894/campos_512_v4
+135/689005/campos_512_v4
+135/689078/campos_512_v4
+135/689187/campos_512_v4
+135/689515/campos_512_v4
+135/689666/campos_512_v4
+136/690158/campos_512_v4
+136/690181/campos_512_v4
+136/690203/campos_512_v4
+136/690236/campos_512_v4
+136/690239/campos_512_v4
+136/690488/campos_512_v4
+136/690736/campos_512_v4
+136/691247/campos_512_v4
+136/691254/campos_512_v4
+136/691429/campos_512_v4
+136/691660/campos_512_v4
+136/691661/campos_512_v4
+136/691712/campos_512_v4
+136/691966/campos_512_v4
+136/691988/campos_512_v4
+136/691996/campos_512_v4
+136/692174/campos_512_v4
+136/692184/campos_512_v4
+136/692407/campos_512_v4
+136/692606/campos_512_v4
+136/692675/campos_512_v4
+136/692925/campos_512_v4
+136/693205/campos_512_v4
+136/693232/campos_512_v4
+136/693446/campos_512_v4
+136/693522/campos_512_v4
+136/693608/campos_512_v4
+136/693835/campos_512_v4
+136/693866/campos_512_v4
+136/693886/campos_512_v4
+136/693913/campos_512_v4
+136/693965/campos_512_v4
+136/694009/campos_512_v4
+136/694394/campos_512_v4
+136/694486/campos_512_v4
+136/694572/campos_512_v4
+136/694653/campos_512_v4
+136/694677/campos_512_v4
+136/694824/campos_512_v4
+136/694979/campos_512_v4
+137/695159/campos_512_v4
+137/695237/campos_512_v4
+137/695349/campos_512_v4
+137/695373/campos_512_v4
+137/695430/campos_512_v4
+137/695662/campos_512_v4
+137/695921/campos_512_v4
+137/696015/campos_512_v4
+137/696144/campos_512_v4
+137/696190/campos_512_v4
+137/696241/campos_512_v4
+137/696324/campos_512_v4
+137/696487/campos_512_v4
+137/696619/campos_512_v4
+137/696623/campos_512_v4
+137/696659/campos_512_v4
+137/696735/campos_512_v4
+137/696812/campos_512_v4
+137/696850/campos_512_v4
+137/696862/campos_512_v4
+137/697098/campos_512_v4
+137/697236/campos_512_v4
+137/697293/campos_512_v4
+137/697403/campos_512_v4
+137/697552/campos_512_v4
+137/697580/campos_512_v4
+137/697613/campos_512_v4
+137/697620/campos_512_v4
+137/697650/campos_512_v4
+137/697841/campos_512_v4
+137/697894/campos_512_v4
+137/697927/campos_512_v4
+137/697929/campos_512_v4
+137/698044/campos_512_v4
+137/698225/campos_512_v4
+137/698272/campos_512_v4
+137/698308/campos_512_v4
+137/698323/campos_512_v4
+137/698339/campos_512_v4
+137/698386/campos_512_v4
+137/698523/campos_512_v4
+137/698532/campos_512_v4
+137/698564/campos_512_v4
+137/698782/campos_512_v4
+137/698783/campos_512_v4
+137/698791/campos_512_v4
+137/698930/campos_512_v4
+137/698979/campos_512_v4
+137/699085/campos_512_v4
+137/699098/campos_512_v4
+137/699110/campos_512_v4
+137/699119/campos_512_v4
+137/699151/campos_512_v4
+137/699187/campos_512_v4
+137/699199/campos_512_v4
+137/699245/campos_512_v4
+137/699464/campos_512_v4
+137/699560/campos_512_v4
+137/699588/campos_512_v4
+137/699751/campos_512_v4
+137/699905/campos_512_v4
+137/699948/campos_512_v4
+137/699959/campos_512_v4
+138/700028/campos_512_v4
+138/700086/campos_512_v4
+138/700239/campos_512_v4
+138/700288/campos_512_v4
+138/700435/campos_512_v4
+138/700580/campos_512_v4
+138/700587/campos_512_v4
+138/700759/campos_512_v4
+138/700793/campos_512_v4
+138/700845/campos_512_v4
+138/700948/campos_512_v4
+138/700978/campos_512_v4
+138/701299/campos_512_v4
+138/701302/campos_512_v4
+138/701390/campos_512_v4
+138/701638/campos_512_v4
+138/701698/campos_512_v4
+138/701796/campos_512_v4
+138/701847/campos_512_v4
+138/701910/campos_512_v4
+138/701934/campos_512_v4
+138/701954/campos_512_v4
+138/702045/campos_512_v4
+138/702117/campos_512_v4
+138/702237/campos_512_v4
+138/702410/campos_512_v4
+138/702479/campos_512_v4
+138/702529/campos_512_v4
+138/702675/campos_512_v4
+138/702683/campos_512_v4
+138/702723/campos_512_v4
+138/702914/campos_512_v4
+138/703009/campos_512_v4
+138/703109/campos_512_v4
+138/703203/campos_512_v4
+138/703268/campos_512_v4
+138/703322/campos_512_v4
+138/703404/campos_512_v4
+138/703463/campos_512_v4
+138/703472/campos_512_v4
+138/703677/campos_512_v4
+138/703748/campos_512_v4
+138/704023/campos_512_v4
+138/704037/campos_512_v4
+138/704438/campos_512_v4
+138/704582/campos_512_v4
+138/704631/campos_512_v4
+138/704675/campos_512_v4
+138/704960/campos_512_v4
+139/705035/campos_512_v4
+139/705056/campos_512_v4
+139/705186/campos_512_v4
+139/705240/campos_512_v4
+139/705261/campos_512_v4
+139/705545/campos_512_v4
+139/705599/campos_512_v4
+139/705675/campos_512_v4
+139/705683/campos_512_v4
+139/705780/campos_512_v4
+139/705787/campos_512_v4
+139/706083/campos_512_v4
+139/706100/campos_512_v4
+139/706246/campos_512_v4
+139/706465/campos_512_v4
+139/706558/campos_512_v4
+139/706964/campos_512_v4
+139/707007/campos_512_v4
+139/707030/campos_512_v4
+139/707036/campos_512_v4
+139/707065/campos_512_v4
+139/707087/campos_512_v4
+139/707123/campos_512_v4
+139/707219/campos_512_v4
+139/707266/campos_512_v4
+139/707496/campos_512_v4
+139/707571/campos_512_v4
+139/707576/campos_512_v4
+139/707614/campos_512_v4
+139/707670/campos_512_v4
+139/707711/campos_512_v4
+139/707774/campos_512_v4
+139/707895/campos_512_v4
+139/707911/campos_512_v4
+139/708030/campos_512_v4
+139/708197/campos_512_v4
+139/708317/campos_512_v4
+139/708410/campos_512_v4
+139/708541/campos_512_v4
+139/708565/campos_512_v4
+139/708646/campos_512_v4
+139/708816/campos_512_v4
+139/708827/campos_512_v4
+139/708838/campos_512_v4
+139/708930/campos_512_v4
+139/709057/campos_512_v4
+139/709378/campos_512_v4
+139/709650/campos_512_v4
+139/709716/campos_512_v4
+139/709830/campos_512_v4
+139/709934/campos_512_v4
+14/80004/campos_512_v4
+14/80008/campos_512_v4
+14/80122/campos_512_v4
+14/80128/campos_512_v4
+14/80312/campos_512_v4
+14/80389/campos_512_v4
+14/80425/campos_512_v4
+14/80441/campos_512_v4
+14/80674/campos_512_v4
+14/80743/campos_512_v4
+14/80921/campos_512_v4
+14/81028/campos_512_v4
+14/81129/campos_512_v4
+14/81202/campos_512_v4
+14/81228/campos_512_v4
+14/81247/campos_512_v4
+14/81254/campos_512_v4
+14/81326/campos_512_v4
+14/81392/campos_512_v4
+14/81427/campos_512_v4
+14/81549/campos_512_v4
+14/81676/campos_512_v4
+14/81742/campos_512_v4
+14/81976/campos_512_v4
+14/82141/campos_512_v4
+14/82247/campos_512_v4
+14/82321/campos_512_v4
+14/82372/campos_512_v4
+14/82385/campos_512_v4
+14/82466/campos_512_v4
+14/82486/campos_512_v4
+14/82559/campos_512_v4
+14/82578/campos_512_v4
+14/82751/campos_512_v4
+14/82790/campos_512_v4
+14/82792/campos_512_v4
+14/82794/campos_512_v4
+14/82939/campos_512_v4
+14/83094/campos_512_v4
+14/83241/campos_512_v4
+14/83350/campos_512_v4
+14/83388/campos_512_v4
+14/83454/campos_512_v4
+14/83482/campos_512_v4
+14/83506/campos_512_v4
+14/83856/campos_512_v4
+14/84064/campos_512_v4
+14/84122/campos_512_v4
+14/84143/campos_512_v4
+14/84164/campos_512_v4
+14/84284/campos_512_v4
+14/84497/campos_512_v4
+14/84689/campos_512_v4
+14/84698/campos_512_v4
+14/84856/campos_512_v4
+14/84986/campos_512_v4
+140/710144/campos_512_v4
+140/710178/campos_512_v4
+140/710241/campos_512_v4
+140/710246/campos_512_v4
+140/710366/campos_512_v4
+140/710554/campos_512_v4
+140/710799/campos_512_v4
+140/710825/campos_512_v4
+140/710844/campos_512_v4
+140/710877/campos_512_v4
+140/710902/campos_512_v4
+140/711061/campos_512_v4
+140/711263/campos_512_v4
+140/711279/campos_512_v4
+140/711285/campos_512_v4
+140/711491/campos_512_v4
+140/711592/campos_512_v4
+140/711660/campos_512_v4
+140/711666/campos_512_v4
+140/711703/campos_512_v4
+140/711803/campos_512_v4
+140/712103/campos_512_v4
+140/712451/campos_512_v4
+140/712575/campos_512_v4
+140/712632/campos_512_v4
+140/712651/campos_512_v4
+140/712788/campos_512_v4
+140/712906/campos_512_v4
+140/712986/campos_512_v4
+140/713023/campos_512_v4
+140/713043/campos_512_v4
+140/713121/campos_512_v4
+140/713231/campos_512_v4
+140/713250/campos_512_v4
+140/713364/campos_512_v4
+140/713367/campos_512_v4
+140/713368/campos_512_v4
+140/713602/campos_512_v4
+140/713707/campos_512_v4
+140/713846/campos_512_v4
+140/713982/campos_512_v4
+140/714005/campos_512_v4
+140/714251/campos_512_v4
+140/714703/campos_512_v4
+140/714705/campos_512_v4
+140/714776/campos_512_v4
+140/714898/campos_512_v4
+140/714910/campos_512_v4
+141/715062/campos_512_v4
+141/715096/campos_512_v4
+141/715547/campos_512_v4
+141/715559/campos_512_v4
+141/715561/campos_512_v4
+141/715579/campos_512_v4
+141/715739/campos_512_v4
+141/715980/campos_512_v4
+141/715986/campos_512_v4
+141/716600/campos_512_v4
+141/716610/campos_512_v4
+141/716933/campos_512_v4
+141/716938/campos_512_v4
+141/716992/campos_512_v4
+141/717012/campos_512_v4
+141/717054/campos_512_v4
+141/717063/campos_512_v4
+141/717145/campos_512_v4
+141/717187/campos_512_v4
+141/717207/campos_512_v4
+141/717219/campos_512_v4
+141/717445/campos_512_v4
+141/717512/campos_512_v4
+141/717555/campos_512_v4
+141/717640/campos_512_v4
+141/717653/campos_512_v4
+141/717678/campos_512_v4
+141/718388/campos_512_v4
+141/718424/campos_512_v4
+141/718516/campos_512_v4
+141/718572/campos_512_v4
+141/718622/campos_512_v4
+141/718715/campos_512_v4
+141/718748/campos_512_v4
+141/718870/campos_512_v4
+141/718957/campos_512_v4
+141/719104/campos_512_v4
+141/719223/campos_512_v4
+141/719245/campos_512_v4
+141/719324/campos_512_v4
+141/719371/campos_512_v4
+141/719467/campos_512_v4
+141/719625/campos_512_v4
+141/719721/campos_512_v4
+141/719746/campos_512_v4
+141/719764/campos_512_v4
+141/719830/campos_512_v4
+141/719846/campos_512_v4
+142/720221/campos_512_v4
+142/720264/campos_512_v4
+142/720331/campos_512_v4
+142/720542/campos_512_v4
+142/720701/campos_512_v4
+142/720724/campos_512_v4
+142/720938/campos_512_v4
+142/720963/campos_512_v4
+142/721084/campos_512_v4
+142/721150/campos_512_v4
+142/721171/campos_512_v4
+142/721256/campos_512_v4
+142/721358/campos_512_v4
+142/721461/campos_512_v4
+142/721487/campos_512_v4
+142/721567/campos_512_v4
+142/721806/campos_512_v4
+142/722088/campos_512_v4
+142/722094/campos_512_v4
+142/722135/campos_512_v4
+142/722279/campos_512_v4
+142/722400/campos_512_v4
+142/722433/campos_512_v4
+142/722477/campos_512_v4
+142/722598/campos_512_v4
+142/722675/campos_512_v4
+142/722768/campos_512_v4
+142/722873/campos_512_v4
+142/722896/campos_512_v4
+142/723013/campos_512_v4
+142/723040/campos_512_v4
+142/723144/campos_512_v4
+142/723330/campos_512_v4
+142/723392/campos_512_v4
+142/723408/campos_512_v4
+142/723586/campos_512_v4
+142/723632/campos_512_v4
+142/723660/campos_512_v4
+142/723780/campos_512_v4
+142/723837/campos_512_v4
+142/723838/campos_512_v4
+142/723868/campos_512_v4
+142/723954/campos_512_v4
+142/723966/campos_512_v4
+142/724034/campos_512_v4
+142/724386/campos_512_v4
+142/724416/campos_512_v4
+142/724435/campos_512_v4
+142/724451/campos_512_v4
+142/724515/campos_512_v4
+142/724576/campos_512_v4
+142/724618/campos_512_v4
+142/724759/campos_512_v4
+142/724915/campos_512_v4
+142/724967/campos_512_v4
+143/725005/campos_512_v4
+143/725101/campos_512_v4
+143/725280/campos_512_v4
+143/725426/campos_512_v4
+143/725652/campos_512_v4
+143/725703/campos_512_v4
+143/725723/campos_512_v4
+143/725861/campos_512_v4
+143/725884/campos_512_v4
+143/725909/campos_512_v4
+143/726099/campos_512_v4
+143/726303/campos_512_v4
+143/726335/campos_512_v4
+143/726476/campos_512_v4
+143/726479/campos_512_v4
+143/726496/campos_512_v4
+143/726617/campos_512_v4
+143/726631/campos_512_v4
+143/726654/campos_512_v4
+143/726668/campos_512_v4
+143/726809/campos_512_v4
+143/726810/campos_512_v4
+143/726864/campos_512_v4
+143/726974/campos_512_v4
+143/726989/campos_512_v4
+143/727020/campos_512_v4
+143/727032/campos_512_v4
+143/727067/campos_512_v4
+143/727124/campos_512_v4
+143/727158/campos_512_v4
+143/727172/campos_512_v4
+143/727360/campos_512_v4
+143/727540/campos_512_v4
+143/727625/campos_512_v4
+143/727695/campos_512_v4
+143/727700/campos_512_v4
+143/727885/campos_512_v4
+143/728012/campos_512_v4
+143/728019/campos_512_v4
+143/728114/campos_512_v4
+143/728196/campos_512_v4
+143/728217/campos_512_v4
+143/728296/campos_512_v4
+143/728365/campos_512_v4
+143/728494/campos_512_v4
+143/728497/campos_512_v4
+143/729438/campos_512_v4
+143/729468/campos_512_v4
+143/729724/campos_512_v4
+143/729775/campos_512_v4
+143/729969/campos_512_v4
+144/730477/campos_512_v4
+144/730555/campos_512_v4
+144/730628/campos_512_v4
+144/730713/campos_512_v4
+144/730847/campos_512_v4
+144/731341/campos_512_v4
+144/731613/campos_512_v4
+144/731832/campos_512_v4
+144/731922/campos_512_v4
+144/731969/campos_512_v4
+144/732001/campos_512_v4
+144/732150/campos_512_v4
+144/732192/campos_512_v4
+144/732253/campos_512_v4
+144/732356/campos_512_v4
+144/732459/campos_512_v4
+144/732504/campos_512_v4
+144/732653/campos_512_v4
+144/732699/campos_512_v4
+144/732706/campos_512_v4
+144/733111/campos_512_v4
+144/733195/campos_512_v4
+144/733221/campos_512_v4
+144/733260/campos_512_v4
+144/733275/campos_512_v4
+144/733469/campos_512_v4
+144/733905/campos_512_v4
+144/734128/campos_512_v4
+144/734183/campos_512_v4
+144/734314/campos_512_v4
+144/734424/campos_512_v4
+144/734473/campos_512_v4
+145/735146/campos_512_v4
+145/735172/campos_512_v4
+145/735353/campos_512_v4
+145/735367/campos_512_v4
+145/735480/campos_512_v4
+145/735564/campos_512_v4
+145/735629/campos_512_v4
+145/735640/campos_512_v4
+145/735642/campos_512_v4
+145/735729/campos_512_v4
+145/735872/campos_512_v4
+145/736145/campos_512_v4
+145/736309/campos_512_v4
+145/736401/campos_512_v4
+145/736642/campos_512_v4
+145/736751/campos_512_v4
+145/736849/campos_512_v4
+145/737004/campos_512_v4
+145/737034/campos_512_v4
+145/737168/campos_512_v4
+145/737182/campos_512_v4
+145/737288/campos_512_v4
+145/737439/campos_512_v4
+145/737612/campos_512_v4
+145/737648/campos_512_v4
+145/737655/campos_512_v4
+145/737830/campos_512_v4
+145/737835/campos_512_v4
+145/737973/campos_512_v4
+145/738029/campos_512_v4
+145/738089/campos_512_v4
+145/738312/campos_512_v4
+145/738333/campos_512_v4
+145/738552/campos_512_v4
+145/738612/campos_512_v4
+145/738999/campos_512_v4
+145/739013/campos_512_v4
+145/739124/campos_512_v4
+145/739222/campos_512_v4
+145/739325/campos_512_v4
+145/739375/campos_512_v4
+145/739526/campos_512_v4
+145/739536/campos_512_v4
+145/739568/campos_512_v4
+145/739682/campos_512_v4
+145/739845/campos_512_v4
+145/739973/campos_512_v4
+146/740281/campos_512_v4
+146/740576/campos_512_v4
+146/740793/campos_512_v4
+146/741045/campos_512_v4
+146/741071/campos_512_v4
+146/741106/campos_512_v4
+146/741630/campos_512_v4
+146/742221/campos_512_v4
+146/742231/campos_512_v4
+146/742610/campos_512_v4
+146/742845/campos_512_v4
+146/742905/campos_512_v4
+146/742910/campos_512_v4
+146/742961/campos_512_v4
+146/743003/campos_512_v4
+146/743095/campos_512_v4
+146/743389/campos_512_v4
+146/743462/campos_512_v4
+146/743728/campos_512_v4
+146/743741/campos_512_v4
+146/743926/campos_512_v4
+146/744085/campos_512_v4
+146/744319/campos_512_v4
+146/744447/campos_512_v4
+146/744596/campos_512_v4
+146/744643/campos_512_v4
+146/744841/campos_512_v4
+146/744911/campos_512_v4
+146/744956/campos_512_v4
+147/745035/campos_512_v4
+147/745080/campos_512_v4
+147/745161/campos_512_v4
+147/745187/campos_512_v4
+147/745282/campos_512_v4
+147/745474/campos_512_v4
+147/745526/campos_512_v4
+147/745564/campos_512_v4
+147/745606/campos_512_v4
+147/745702/campos_512_v4
+147/745950/campos_512_v4
+147/745999/campos_512_v4
+147/746163/campos_512_v4
+147/746187/campos_512_v4
+147/746189/campos_512_v4
+147/746272/campos_512_v4
+147/746322/campos_512_v4
+147/746436/campos_512_v4
+147/746450/campos_512_v4
+147/746504/campos_512_v4
+147/746555/campos_512_v4
+147/746642/campos_512_v4
+147/746782/campos_512_v4
+147/747100/campos_512_v4
+147/747211/campos_512_v4
+147/747240/campos_512_v4
+147/747272/campos_512_v4
+147/747306/campos_512_v4
+147/747310/campos_512_v4
+147/747460/campos_512_v4
+147/747560/campos_512_v4
+147/747614/campos_512_v4
+147/747644/campos_512_v4
+147/747753/campos_512_v4
+147/747764/campos_512_v4
+147/747766/campos_512_v4
+147/747775/campos_512_v4
+147/747851/campos_512_v4
+147/747954/campos_512_v4
+147/747969/campos_512_v4
+147/748001/campos_512_v4
+147/748131/campos_512_v4
+147/748295/campos_512_v4
+147/748731/campos_512_v4
+147/748876/campos_512_v4
+147/749071/campos_512_v4
+147/749166/campos_512_v4
+147/749280/campos_512_v4
+147/749344/campos_512_v4
+147/749855/campos_512_v4
+147/749884/campos_512_v4
+147/749921/campos_512_v4
+148/750071/campos_512_v4
+148/750265/campos_512_v4
+148/750350/campos_512_v4
+148/750546/campos_512_v4
+148/750667/campos_512_v4
+148/750808/campos_512_v4
+148/751254/campos_512_v4
+148/751349/campos_512_v4
+148/751381/campos_512_v4
+148/751412/campos_512_v4
+148/751580/campos_512_v4
+148/751756/campos_512_v4
+148/751769/campos_512_v4
+148/752005/campos_512_v4
+148/752011/campos_512_v4
+148/752258/campos_512_v4
+148/752361/campos_512_v4
+148/752366/campos_512_v4
+148/752625/campos_512_v4
+148/752674/campos_512_v4
+148/752782/campos_512_v4
+148/752807/campos_512_v4
+148/752836/campos_512_v4
+148/752893/campos_512_v4
+148/753025/campos_512_v4
+148/753356/campos_512_v4
+148/753508/campos_512_v4
+148/753536/campos_512_v4
+148/753564/campos_512_v4
+148/753702/campos_512_v4
+148/753925/campos_512_v4
+148/754395/campos_512_v4
+148/754397/campos_512_v4
+148/754446/campos_512_v4
+148/754504/campos_512_v4
+148/754536/campos_512_v4
+148/754798/campos_512_v4
+148/754806/campos_512_v4
+148/754972/campos_512_v4
+149/755047/campos_512_v4
+149/755128/campos_512_v4
+149/755142/campos_512_v4
+149/755311/campos_512_v4
+149/755514/campos_512_v4
+149/755591/campos_512_v4
+149/755653/campos_512_v4
+149/755743/campos_512_v4
+149/755949/campos_512_v4
+149/756011/campos_512_v4
+149/756024/campos_512_v4
+149/756185/campos_512_v4
+149/756325/campos_512_v4
+149/756326/campos_512_v4
+149/756361/campos_512_v4
+149/756464/campos_512_v4
+149/756558/campos_512_v4
+149/756660/campos_512_v4
+149/756738/campos_512_v4
+149/756784/campos_512_v4
+149/756839/campos_512_v4
+149/756940/campos_512_v4
+149/756994/campos_512_v4
+149/757029/campos_512_v4
+149/757121/campos_512_v4
+149/757262/campos_512_v4
+149/757268/campos_512_v4
+149/757274/campos_512_v4
+149/757465/campos_512_v4
+149/757498/campos_512_v4
+149/757509/campos_512_v4
+149/757517/campos_512_v4
+149/757526/campos_512_v4
+149/757922/campos_512_v4
+149/758009/campos_512_v4
+149/758031/campos_512_v4
+149/758295/campos_512_v4
+149/758401/campos_512_v4
+149/758474/campos_512_v4
+149/758575/campos_512_v4
+149/758665/campos_512_v4
+149/758683/campos_512_v4
+149/758698/campos_512_v4
+149/758777/campos_512_v4
+149/758903/campos_512_v4
+149/759041/campos_512_v4
+149/759673/campos_512_v4
+149/759725/campos_512_v4
+149/759838/campos_512_v4
+149/759881/campos_512_v4
+15/85048/campos_512_v4
+15/85184/campos_512_v4
+15/85249/campos_512_v4
+15/85481/campos_512_v4
+15/85579/campos_512_v4
+15/85609/campos_512_v4
+15/85624/campos_512_v4
+15/85654/campos_512_v4
+15/85827/campos_512_v4
+15/85847/campos_512_v4
+15/85945/campos_512_v4
+15/86050/campos_512_v4
+15/86145/campos_512_v4
+15/86153/campos_512_v4
+15/86160/campos_512_v4
+15/86407/campos_512_v4
+15/86535/campos_512_v4
+15/86561/campos_512_v4
+15/86936/campos_512_v4
+15/87133/campos_512_v4
+15/87155/campos_512_v4
+15/87189/campos_512_v4
+15/87319/campos_512_v4
+15/87394/campos_512_v4
+15/87407/campos_512_v4
+15/87414/campos_512_v4
+15/87417/campos_512_v4
+15/87773/campos_512_v4
+15/87812/campos_512_v4
+15/87869/campos_512_v4
+15/87933/campos_512_v4
+15/88008/campos_512_v4
+15/88024/campos_512_v4
+15/88212/campos_512_v4
+15/88245/campos_512_v4
+15/88255/campos_512_v4
+15/88284/campos_512_v4
+15/88333/campos_512_v4
+15/88349/campos_512_v4
+15/88385/campos_512_v4
+15/89020/campos_512_v4
+15/89096/campos_512_v4
+15/89142/campos_512_v4
+15/89162/campos_512_v4
+15/89172/campos_512_v4
+15/89216/campos_512_v4
+15/89241/campos_512_v4
+15/89244/campos_512_v4
+15/89361/campos_512_v4
+15/89378/campos_512_v4
+15/89763/campos_512_v4
+15/89900/campos_512_v4
+150/760033/campos_512_v4
+150/760049/campos_512_v4
+150/760556/campos_512_v4
+150/760583/campos_512_v4
+150/760659/campos_512_v4
+150/760906/campos_512_v4
+150/760929/campos_512_v4
+150/761192/campos_512_v4
+150/761439/campos_512_v4
+150/761445/campos_512_v4
+150/761586/campos_512_v4
+150/761599/campos_512_v4
+150/761604/campos_512_v4
+150/761710/campos_512_v4
+150/761718/campos_512_v4
+150/762136/campos_512_v4
+150/762147/campos_512_v4
+150/762212/campos_512_v4
+150/762246/campos_512_v4
+150/762328/campos_512_v4
+150/762349/campos_512_v4
+150/762431/campos_512_v4
+150/762706/campos_512_v4
+150/762937/campos_512_v4
+150/763257/campos_512_v4
+150/763269/campos_512_v4
+150/763325/campos_512_v4
+150/763444/campos_512_v4
+150/763461/campos_512_v4
+150/763465/campos_512_v4
+150/763521/campos_512_v4
+150/763615/campos_512_v4
+150/763758/campos_512_v4
+150/763799/campos_512_v4
+150/763832/campos_512_v4
+150/764112/campos_512_v4
+150/764251/campos_512_v4
+150/764269/campos_512_v4
+150/764346/campos_512_v4
+150/764378/campos_512_v4
+150/764452/campos_512_v4
+150/764457/campos_512_v4
+150/764465/campos_512_v4
+150/764748/campos_512_v4
+150/764862/campos_512_v4
+151/765035/campos_512_v4
+151/765163/campos_512_v4
+151/765440/campos_512_v4
+151/765447/campos_512_v4
+151/765449/campos_512_v4
+151/765810/campos_512_v4
+151/765818/campos_512_v4
+151/765835/campos_512_v4
+151/766273/campos_512_v4
+151/766300/campos_512_v4
+151/766301/campos_512_v4
+151/766554/campos_512_v4
+151/766587/campos_512_v4
+151/766607/campos_512_v4
+151/766680/campos_512_v4
+151/766885/campos_512_v4
+151/766930/campos_512_v4
+151/766951/campos_512_v4
+151/767022/campos_512_v4
+151/767078/campos_512_v4
+151/767545/campos_512_v4
+151/767629/campos_512_v4
+151/767678/campos_512_v4
+151/767681/campos_512_v4
+151/767755/campos_512_v4
+151/767834/campos_512_v4
+151/767928/campos_512_v4
+151/768037/campos_512_v4
+151/768125/campos_512_v4
+151/768288/campos_512_v4
+151/768405/campos_512_v4
+151/768720/campos_512_v4
+151/769010/campos_512_v4
+151/769206/campos_512_v4
+151/769530/campos_512_v4
+151/769559/campos_512_v4
+151/769580/campos_512_v4
+151/769647/campos_512_v4
+151/769655/campos_512_v4
+151/769725/campos_512_v4
+151/769817/campos_512_v4
+151/769860/campos_512_v4
+152/770236/campos_512_v4
+152/770238/campos_512_v4
+152/770560/campos_512_v4
+152/770649/campos_512_v4
+152/771084/campos_512_v4
+152/771398/campos_512_v4
+152/771402/campos_512_v4
+152/771669/campos_512_v4
+152/771723/campos_512_v4
+152/771743/campos_512_v4
+152/771772/campos_512_v4
+152/771885/campos_512_v4
+152/771919/campos_512_v4
+152/772017/campos_512_v4
+152/772072/campos_512_v4
+152/772206/campos_512_v4
+152/772274/campos_512_v4
+152/772417/campos_512_v4
+152/772480/campos_512_v4
+152/772557/campos_512_v4
+152/772617/campos_512_v4
+152/772867/campos_512_v4
+152/772910/campos_512_v4
+152/773105/campos_512_v4
+152/773401/campos_512_v4
+152/773428/campos_512_v4
+152/773567/campos_512_v4
+152/773626/campos_512_v4
+152/773795/campos_512_v4
+152/773933/campos_512_v4
+152/774184/campos_512_v4
+152/774532/campos_512_v4
+152/774534/campos_512_v4
+152/774837/campos_512_v4
+152/774966/campos_512_v4
+153/775050/campos_512_v4
+153/775148/campos_512_v4
+153/775423/campos_512_v4
+153/775467/campos_512_v4
+153/775485/campos_512_v4
+153/775499/campos_512_v4
+153/775569/campos_512_v4
+153/775632/campos_512_v4
+153/775697/campos_512_v4
+153/775763/campos_512_v4
+153/775790/campos_512_v4
+153/775828/campos_512_v4
+153/776133/campos_512_v4
+153/776135/campos_512_v4
+153/776198/campos_512_v4
+153/776216/campos_512_v4
+153/776358/campos_512_v4
+153/776779/campos_512_v4
+153/776819/campos_512_v4
+153/776974/campos_512_v4
+153/777002/campos_512_v4
+153/777281/campos_512_v4
+153/777344/campos_512_v4
+153/777438/campos_512_v4
+153/777485/campos_512_v4
+153/777728/campos_512_v4
+153/778204/campos_512_v4
+153/778284/campos_512_v4
+153/778746/campos_512_v4
+153/778795/campos_512_v4
+153/778806/campos_512_v4
+153/778820/campos_512_v4
+153/778843/campos_512_v4
+153/779079/campos_512_v4
+153/779127/campos_512_v4
+153/779149/campos_512_v4
+153/779250/campos_512_v4
+153/779331/campos_512_v4
+153/779382/campos_512_v4
+153/779522/campos_512_v4
+153/779824/campos_512_v4
+153/779871/campos_512_v4
+153/779975/campos_512_v4
+154/780044/campos_512_v4
+154/780057/campos_512_v4
+154/780134/campos_512_v4
+154/780486/campos_512_v4
+154/780502/campos_512_v4
+154/780508/campos_512_v4
+154/780557/campos_512_v4
+154/780829/campos_512_v4
+154/781036/campos_512_v4
+154/781251/campos_512_v4
+154/781278/campos_512_v4
+154/781428/campos_512_v4
+154/781443/campos_512_v4
+154/781479/campos_512_v4
+154/781701/campos_512_v4
+154/781807/campos_512_v4
+154/782074/campos_512_v4
+154/782216/campos_512_v4
+154/782254/campos_512_v4
+154/782257/campos_512_v4
+154/782671/campos_512_v4
+154/782702/campos_512_v4
+154/782836/campos_512_v4
+154/782843/campos_512_v4
+154/782872/campos_512_v4
+154/782887/campos_512_v4
+154/782973/campos_512_v4
+154/783018/campos_512_v4
+154/783032/campos_512_v4
+154/783427/campos_512_v4
+154/783553/campos_512_v4
+154/783652/campos_512_v4
+154/783880/campos_512_v4
+154/783881/campos_512_v4
+154/783904/campos_512_v4
+154/784061/campos_512_v4
+154/784064/campos_512_v4
+154/784163/campos_512_v4
+154/784328/campos_512_v4
+154/784422/campos_512_v4
+154/784862/campos_512_v4
+154/784868/campos_512_v4
+154/784986/campos_512_v4
+155/785142/campos_512_v4
+155/785182/campos_512_v4
+155/785766/campos_512_v4
+155/785850/campos_512_v4
+155/786193/campos_512_v4
+155/786275/campos_512_v4
+155/786681/campos_512_v4
+155/786861/campos_512_v4
+155/786914/campos_512_v4
+155/787019/campos_512_v4
+155/787061/campos_512_v4
+155/787088/campos_512_v4
+155/787213/campos_512_v4
+155/787238/campos_512_v4
+155/787417/campos_512_v4
+155/787590/campos_512_v4
+155/787761/campos_512_v4
+155/787866/campos_512_v4
+155/788220/campos_512_v4
+155/788306/campos_512_v4
+155/788313/campos_512_v4
+155/788337/campos_512_v4
+155/788532/campos_512_v4
+155/788666/campos_512_v4
+155/789095/campos_512_v4
+155/789365/campos_512_v4
+155/789522/campos_512_v4
+155/789877/campos_512_v4
+155/789955/campos_512_v4
+156/790368/campos_512_v4
+156/790422/campos_512_v4
+156/790652/campos_512_v4
+156/790785/campos_512_v4
+156/790821/campos_512_v4
+156/790949/campos_512_v4
+156/791006/campos_512_v4
+156/791245/campos_512_v4
+156/791357/campos_512_v4
+156/791370/campos_512_v4
+156/791445/campos_512_v4
+156/791461/campos_512_v4
+156/791468/campos_512_v4
+156/791538/campos_512_v4
+156/791579/campos_512_v4
+156/791589/campos_512_v4
+156/791594/campos_512_v4
+156/791608/campos_512_v4
+156/791627/campos_512_v4
+156/791831/campos_512_v4
+156/791956/campos_512_v4
+156/791994/campos_512_v4
+156/792101/campos_512_v4
+156/792161/campos_512_v4
+156/792293/campos_512_v4
+156/792536/campos_512_v4
+156/792560/campos_512_v4
+156/792605/campos_512_v4
+156/792626/campos_512_v4
+156/792689/campos_512_v4
+156/792707/campos_512_v4
+156/792715/campos_512_v4
+156/792775/campos_512_v4
+156/792826/campos_512_v4
+156/792883/campos_512_v4
+156/792959/campos_512_v4
+156/792991/campos_512_v4
+156/793078/campos_512_v4
+156/793228/campos_512_v4
+156/793306/campos_512_v4
+156/793640/campos_512_v4
+156/793773/campos_512_v4
+156/793845/campos_512_v4
+156/793959/campos_512_v4
+156/794051/campos_512_v4
+156/794118/campos_512_v4
+156/794219/campos_512_v4
+156/794284/campos_512_v4
+156/794393/campos_512_v4
+156/794508/campos_512_v4
+156/794694/campos_512_v4
+156/794852/campos_512_v4
+156/794928/campos_512_v4
+157/795249/campos_512_v4
+157/795261/campos_512_v4
+157/795464/campos_512_v4
+157/795512/campos_512_v4
+157/795733/campos_512_v4
+157/795814/campos_512_v4
+157/795941/campos_512_v4
+157/796093/campos_512_v4
+157/796099/campos_512_v4
+157/796190/campos_512_v4
+157/796251/campos_512_v4
+157/796385/campos_512_v4
+157/796502/campos_512_v4
+157/796616/campos_512_v4
+157/796639/campos_512_v4
+157/796664/campos_512_v4
+157/796675/campos_512_v4
+157/796936/campos_512_v4
+157/797050/campos_512_v4
+157/797073/campos_512_v4
+157/797328/campos_512_v4
+157/797585/campos_512_v4
+157/797841/campos_512_v4
+157/797957/campos_512_v4
+157/797962/campos_512_v4
+157/797975/campos_512_v4
+157/798123/campos_512_v4
+157/798228/campos_512_v4
+157/798382/campos_512_v4
+157/798522/campos_512_v4
+157/798620/campos_512_v4
+157/798621/campos_512_v4
+157/798983/campos_512_v4
+157/799053/campos_512_v4
+157/799158/campos_512_v4
+157/799177/campos_512_v4
+157/799644/campos_512_v4
+157/799660/campos_512_v4
+157/799703/campos_512_v4
+157/799723/campos_512_v4
+157/799779/campos_512_v4
+157/799813/campos_512_v4
+158/800063/campos_512_v4
+158/800187/campos_512_v4
+158/800228/campos_512_v4
+158/800284/campos_512_v4
+158/800419/campos_512_v4
+158/800438/campos_512_v4
+158/800598/campos_512_v4
+158/800678/campos_512_v4
+158/800713/campos_512_v4
+158/800835/campos_512_v4
+158/801162/campos_512_v4
+158/801268/campos_512_v4
+158/801271/campos_512_v4
+158/801273/campos_512_v4
+158/801286/campos_512_v4
+158/801290/campos_512_v4
+158/801476/campos_512_v4
+158/801836/campos_512_v4
+158/802072/campos_512_v4
+158/802121/campos_512_v4
+158/802122/campos_512_v4
+158/802232/campos_512_v4
+158/802347/campos_512_v4
+158/802423/campos_512_v4
+158/802445/campos_512_v4
+158/802459/campos_512_v4
+158/802686/campos_512_v4
+158/802831/campos_512_v4
+158/803326/campos_512_v4
+158/803375/campos_512_v4
+158/803478/campos_512_v4
+158/803494/campos_512_v4
+158/803606/campos_512_v4
+158/803607/campos_512_v4
+158/803722/campos_512_v4
+158/803797/campos_512_v4
+158/803838/campos_512_v4
+158/803927/campos_512_v4
+158/803937/campos_512_v4
+158/804143/campos_512_v4
+158/804360/campos_512_v4
+158/804428/campos_512_v4
+158/804450/campos_512_v4
+158/804628/campos_512_v4
+158/804764/campos_512_v4
+158/804809/campos_512_v4
+158/804815/campos_512_v4
+159/805042/campos_512_v4
+159/805283/campos_512_v4
+159/805530/campos_512_v4
+159/805636/campos_512_v4
+159/806008/campos_512_v4
+159/806190/campos_512_v4
+159/806262/campos_512_v4
+159/806320/campos_512_v4
+159/806348/campos_512_v4
+159/806442/campos_512_v4
+159/806729/campos_512_v4
+159/806865/campos_512_v4
+159/807099/campos_512_v4
+159/807110/campos_512_v4
+159/807113/campos_512_v4
+159/807424/campos_512_v4
+159/807466/campos_512_v4
+159/807602/campos_512_v4
+159/807941/campos_512_v4
+159/808082/campos_512_v4
+159/808487/campos_512_v4
+159/808489/campos_512_v4
+159/808689/campos_512_v4
+16/90067/campos_512_v4
+16/90187/campos_512_v4
+16/90452/campos_512_v4
+16/90679/campos_512_v4
+16/90727/campos_512_v4
+16/90953/campos_512_v4
+16/90984/campos_512_v4
+16/91012/campos_512_v4
+16/91061/campos_512_v4
+16/91120/campos_512_v4
+16/91281/campos_512_v4
+16/91335/campos_512_v4
+16/91368/campos_512_v4
+16/91447/campos_512_v4
+16/91485/campos_512_v4
+16/91554/campos_512_v4
+16/92027/campos_512_v4
+16/92152/campos_512_v4
+16/92170/campos_512_v4
+16/92318/campos_512_v4
+16/92412/campos_512_v4
+16/92689/campos_512_v4
+16/93079/campos_512_v4
+16/93111/campos_512_v4
+16/93420/campos_512_v4
+16/93484/campos_512_v4
+16/93883/campos_512_v4
+16/93925/campos_512_v4
+16/93936/campos_512_v4
+16/93986/campos_512_v4
+16/94343/campos_512_v4
+16/94523/campos_512_v4
+16/94553/campos_512_v4
+16/94627/campos_512_v4
+16/94649/campos_512_v4
+16/94852/campos_512_v4
+16/94985/campos_512_v4
+16/95001/campos_512_v4
+17/95061/campos_512_v4
+17/95135/campos_512_v4
+17/95195/campos_512_v4
+17/95205/campos_512_v4
+17/95273/campos_512_v4
+17/95354/campos_512_v4
+17/95390/campos_512_v4
+17/95553/campos_512_v4
+17/95571/campos_512_v4
+17/95591/campos_512_v4
+17/95644/campos_512_v4
+17/95723/campos_512_v4
+17/95832/campos_512_v4
+17/95997/campos_512_v4
+17/96089/campos_512_v4
+17/96346/campos_512_v4
+17/96375/campos_512_v4
+17/96385/campos_512_v4
+17/96423/campos_512_v4
+17/96481/campos_512_v4
+17/96672/campos_512_v4
+17/96785/campos_512_v4
+17/96811/campos_512_v4
+17/96892/campos_512_v4
+17/96959/campos_512_v4
+17/97001/campos_512_v4
+17/97032/campos_512_v4
+17/97049/campos_512_v4
+17/97128/campos_512_v4
+17/97158/campos_512_v4
+17/97265/campos_512_v4
+17/97291/campos_512_v4
+17/97317/campos_512_v4
+17/97437/campos_512_v4
+17/97656/campos_512_v4
+17/97880/campos_512_v4
+17/97923/campos_512_v4
+17/97982/campos_512_v4
+17/98033/campos_512_v4
+17/98235/campos_512_v4
+17/98264/campos_512_v4
+17/98311/campos_512_v4
+17/98774/campos_512_v4
+17/98829/campos_512_v4
+17/98924/campos_512_v4
+17/99484/campos_512_v4
+17/99662/campos_512_v4
+17/99701/campos_512_v4
+17/99757/campos_512_v4
+17/99770/campos_512_v4
+17/99786/campos_512_v4
+17/99968/campos_512_v4
+2/20070/campos_512_v4
+2/20072/campos_512_v4
+2/20107/campos_512_v4
+2/20134/campos_512_v4
+2/20496/campos_512_v4
+2/20509/campos_512_v4
+2/20597/campos_512_v4
+2/20654/campos_512_v4
+2/20679/campos_512_v4
+2/20942/campos_512_v4
+2/21010/campos_512_v4
+2/21297/campos_512_v4
+2/21435/campos_512_v4
+2/21543/campos_512_v4
+2/21752/campos_512_v4
+2/22006/campos_512_v4
+2/22075/campos_512_v4
+2/22242/campos_512_v4
+2/22284/campos_512_v4
+2/22479/campos_512_v4
+2/22821/campos_512_v4
+2/22915/campos_512_v4
+2/23029/campos_512_v4
+2/23105/campos_512_v4
+2/23161/campos_512_v4
+2/23175/campos_512_v4
+2/23202/campos_512_v4
+2/23363/campos_512_v4
+2/23504/campos_512_v4
+2/23525/campos_512_v4
+2/23624/campos_512_v4
+2/23752/campos_512_v4
+2/23840/campos_512_v4
+2/23892/campos_512_v4
+2/23908/campos_512_v4
+2/24018/campos_512_v4
+2/24019/campos_512_v4
+2/24194/campos_512_v4
+2/24303/campos_512_v4
+2/24304/campos_512_v4
+2/24406/campos_512_v4
+2/24544/campos_512_v4
+2/24638/campos_512_v4
+2/24838/campos_512_v4
+2/24936/campos_512_v4
+23/125057/campos_512_v4
+23/125233/campos_512_v4
+23/125277/campos_512_v4
+23/125496/campos_512_v4
+23/125610/campos_512_v4
+23/125633/campos_512_v4
+23/125730/campos_512_v4
+23/125764/campos_512_v4
+23/125881/campos_512_v4
+23/125924/campos_512_v4
+23/125950/campos_512_v4
+23/126434/campos_512_v4
+23/126488/campos_512_v4
+23/126509/campos_512_v4
+23/127003/campos_512_v4
+23/127044/campos_512_v4
+23/127128/campos_512_v4
+23/127285/campos_512_v4
+23/127303/campos_512_v4
+23/127412/campos_512_v4
+23/127443/campos_512_v4
+23/127514/campos_512_v4
+23/127615/campos_512_v4
+23/127668/campos_512_v4
+23/127706/campos_512_v4
+23/127780/campos_512_v4
+23/128375/campos_512_v4
+23/128509/campos_512_v4
+23/128854/campos_512_v4
+23/128898/campos_512_v4
+23/128983/campos_512_v4
+23/129041/campos_512_v4
+23/129175/campos_512_v4
+23/129177/campos_512_v4
+23/129219/campos_512_v4
+23/129352/campos_512_v4
+23/129496/campos_512_v4
+23/129563/campos_512_v4
+23/129577/campos_512_v4
+23/129588/campos_512_v4
+23/129725/campos_512_v4
+23/129850/campos_512_v4
+23/129878/campos_512_v4
+23/129965/campos_512_v4
+24/130125/campos_512_v4
+24/130148/campos_512_v4
+24/130251/campos_512_v4
+24/130329/campos_512_v4
+24/130413/campos_512_v4
+24/130422/campos_512_v4
+24/130476/campos_512_v4
+24/130510/campos_512_v4
+24/130733/campos_512_v4
+24/130916/campos_512_v4
+24/130935/campos_512_v4
+24/130936/campos_512_v4
+24/130969/campos_512_v4
+24/130995/campos_512_v4
+24/131163/campos_512_v4
+24/131197/campos_512_v4
+24/131325/campos_512_v4
+24/131332/campos_512_v4
+24/131545/campos_512_v4
+24/131571/campos_512_v4
+24/131620/campos_512_v4
+24/131725/campos_512_v4
+24/131893/campos_512_v4
+24/131932/campos_512_v4
+24/131993/campos_512_v4
+24/131998/campos_512_v4
+24/132061/campos_512_v4
+24/132249/campos_512_v4
+24/132258/campos_512_v4
+24/132293/campos_512_v4
+24/132638/campos_512_v4
+24/132674/campos_512_v4
+24/132679/campos_512_v4
+24/132741/campos_512_v4
+24/132756/campos_512_v4
+24/132758/campos_512_v4
+24/132842/campos_512_v4
+24/132948/campos_512_v4
+24/132949/campos_512_v4
+24/132967/campos_512_v4
+24/132971/campos_512_v4
+24/133050/campos_512_v4
+24/133089/campos_512_v4
+24/133091/campos_512_v4
+24/133167/campos_512_v4
+24/133237/campos_512_v4
+24/133452/campos_512_v4
+24/133578/campos_512_v4
+24/133807/campos_512_v4
+24/133819/campos_512_v4
+24/133974/campos_512_v4
+24/134210/campos_512_v4
+24/134235/campos_512_v4
+24/134249/campos_512_v4
+24/134464/campos_512_v4
+24/134566/campos_512_v4
+24/134680/campos_512_v4
+24/134699/campos_512_v4
+24/134941/campos_512_v4
+24/134991/campos_512_v4
+25/135181/campos_512_v4
+25/135211/campos_512_v4
+25/135333/campos_512_v4
+25/135718/campos_512_v4
+25/135732/campos_512_v4
+25/135848/campos_512_v4
+25/135999/campos_512_v4
+25/136023/campos_512_v4
+25/136310/campos_512_v4
+25/136329/campos_512_v4
+25/136357/campos_512_v4
+25/136608/campos_512_v4
+25/136815/campos_512_v4
+25/137070/campos_512_v4
+25/137173/campos_512_v4
+25/137255/campos_512_v4
+25/137334/campos_512_v4
+25/137968/campos_512_v4
+25/137975/campos_512_v4
+25/138027/campos_512_v4
+25/138032/campos_512_v4
+25/138053/campos_512_v4
+25/138258/campos_512_v4
+25/138401/campos_512_v4
+25/138418/campos_512_v4
+25/138516/campos_512_v4
+25/138699/campos_512_v4
+25/138735/campos_512_v4
+25/138949/campos_512_v4
+25/139056/campos_512_v4
+25/139212/campos_512_v4
+25/139311/campos_512_v4
+25/139745/campos_512_v4
+25/139746/campos_512_v4
+25/139817/campos_512_v4
+25/139826/campos_512_v4
+25/139928/campos_512_v4
+25/139994/campos_512_v4
+26/140107/campos_512_v4
+26/140323/campos_512_v4
+26/140544/campos_512_v4
+26/140600/campos_512_v4
+26/140626/campos_512_v4
+26/140822/campos_512_v4
+26/140837/campos_512_v4
+26/141094/campos_512_v4
+26/141121/campos_512_v4
+26/141144/campos_512_v4
+26/141150/campos_512_v4
+26/141205/campos_512_v4
+26/141268/campos_512_v4
+26/141289/campos_512_v4
+26/141316/campos_512_v4
+26/141341/campos_512_v4
+26/141344/campos_512_v4
+26/141381/campos_512_v4
+26/141401/campos_512_v4
+26/141444/campos_512_v4
+26/141558/campos_512_v4
+26/141596/campos_512_v4
+26/141627/campos_512_v4
+26/141889/campos_512_v4
+26/141971/campos_512_v4
+26/142034/campos_512_v4
+26/142197/campos_512_v4
+26/142279/campos_512_v4
+26/142394/campos_512_v4
+26/142712/campos_512_v4
+26/142993/campos_512_v4
+26/143199/campos_512_v4
+26/143216/campos_512_v4
+26/143295/campos_512_v4
+26/143311/campos_512_v4
+26/143356/campos_512_v4
+26/143405/campos_512_v4
+26/143445/campos_512_v4
+26/143636/campos_512_v4
+26/143686/campos_512_v4
+26/143700/campos_512_v4
+26/143892/campos_512_v4
+26/143975/campos_512_v4
+26/143995/campos_512_v4
+26/144099/campos_512_v4
+26/144172/campos_512_v4
+26/144407/campos_512_v4
+26/144472/campos_512_v4
+26/144586/campos_512_v4
+26/144693/campos_512_v4
+26/144752/campos_512_v4
+26/144780/campos_512_v4
+26/144796/campos_512_v4
+26/144820/campos_512_v4
+27/145252/campos_512_v4
+27/145410/campos_512_v4
+27/145471/campos_512_v4
+27/145724/campos_512_v4
+27/145736/campos_512_v4
+27/145856/campos_512_v4
+27/145888/campos_512_v4
+27/145909/campos_512_v4
+27/146017/campos_512_v4
+27/146102/campos_512_v4
+27/146813/campos_512_v4
+27/146913/campos_512_v4
+27/147010/campos_512_v4
+27/147024/campos_512_v4
+27/147134/campos_512_v4
+27/147148/campos_512_v4
+27/147179/campos_512_v4
+27/147315/campos_512_v4
+27/147522/campos_512_v4
+27/147684/campos_512_v4
+27/147829/campos_512_v4
+27/147917/campos_512_v4
+27/147942/campos_512_v4
+27/148082/campos_512_v4
+27/148161/campos_512_v4
+27/148357/campos_512_v4
+27/148508/campos_512_v4
+27/148548/campos_512_v4
+27/148585/campos_512_v4
+27/148680/campos_512_v4
+27/149120/campos_512_v4
+27/149130/campos_512_v4
+27/149356/campos_512_v4
+27/149384/campos_512_v4
+27/149497/campos_512_v4
+27/149626/campos_512_v4
+27/149698/campos_512_v4
+27/149792/campos_512_v4
+27/149812/campos_512_v4
+28/150004/campos_512_v4
+28/150008/campos_512_v4
+28/150086/campos_512_v4
+28/150135/campos_512_v4
+28/150178/campos_512_v4
+28/150275/campos_512_v4
+28/150597/campos_512_v4
+28/150937/campos_512_v4
+28/151010/campos_512_v4
+28/151123/campos_512_v4
+28/151210/campos_512_v4
+28/151222/campos_512_v4
+28/151328/campos_512_v4
+28/151378/campos_512_v4
+28/151433/campos_512_v4
+28/151461/campos_512_v4
+28/151481/campos_512_v4
+28/151559/campos_512_v4
+28/151654/campos_512_v4
+28/151812/campos_512_v4
+28/151880/campos_512_v4
+28/151884/campos_512_v4
+28/151905/campos_512_v4
+28/151986/campos_512_v4
+28/152003/campos_512_v4
+28/152068/campos_512_v4
+28/152076/campos_512_v4
+28/152110/campos_512_v4
+28/152112/campos_512_v4
+28/152119/campos_512_v4
+28/152122/campos_512_v4
+28/152241/campos_512_v4
+28/152273/campos_512_v4
+28/152365/campos_512_v4
+28/152398/campos_512_v4
+28/152543/campos_512_v4
+28/152595/campos_512_v4
+28/152701/campos_512_v4
+28/152747/campos_512_v4
+28/153181/campos_512_v4
+28/153306/campos_512_v4
+28/153385/campos_512_v4
+28/153528/campos_512_v4
+28/153840/campos_512_v4
+28/153967/campos_512_v4
+28/153996/campos_512_v4
+28/154064/campos_512_v4
+28/154092/campos_512_v4
+28/154180/campos_512_v4
+28/154212/campos_512_v4
+28/154379/campos_512_v4
+28/154390/campos_512_v4
+28/154561/campos_512_v4
+28/154788/campos_512_v4
+28/154793/campos_512_v4
+28/154816/campos_512_v4
+28/154969/campos_512_v4
+28/154976/campos_512_v4
+29/155298/campos_512_v4
+29/155414/campos_512_v4
+29/155522/campos_512_v4
+29/155578/campos_512_v4
+29/155635/campos_512_v4
+29/155643/campos_512_v4
+29/155826/campos_512_v4
+29/155863/campos_512_v4
+29/156121/campos_512_v4
+29/156128/campos_512_v4
+29/156191/campos_512_v4
+29/156215/campos_512_v4
+29/156219/campos_512_v4
+29/156319/campos_512_v4
+29/156348/campos_512_v4
+29/156542/campos_512_v4
+29/156615/campos_512_v4
+29/156645/campos_512_v4
+29/156708/campos_512_v4
+29/156719/campos_512_v4
+29/156771/campos_512_v4
+29/156853/campos_512_v4
+29/157189/campos_512_v4
+29/157370/campos_512_v4
+29/157375/campos_512_v4
+29/157812/campos_512_v4
+29/157990/campos_512_v4
+29/158056/campos_512_v4
+29/158083/campos_512_v4
+29/158175/campos_512_v4
+29/158326/campos_512_v4
+29/158498/campos_512_v4
+29/158560/campos_512_v4
+29/158585/campos_512_v4
+29/158590/campos_512_v4
+29/158688/campos_512_v4
+29/158720/campos_512_v4
+29/158828/campos_512_v4
+29/159115/campos_512_v4
+29/159134/campos_512_v4
+29/159163/campos_512_v4
+29/159238/campos_512_v4
+29/159647/campos_512_v4
+29/159758/campos_512_v4
+29/159803/campos_512_v4
+29/159894/campos_512_v4
+29/159958/campos_512_v4
+30/160057/campos_512_v4
+30/160197/campos_512_v4
+30/160230/campos_512_v4
+30/160253/campos_512_v4
+30/160278/campos_512_v4
+30/160331/campos_512_v4
+30/160339/campos_512_v4
+30/160596/campos_512_v4
+30/160800/campos_512_v4
+30/160818/campos_512_v4
+30/160943/campos_512_v4
+30/161025/campos_512_v4
+30/161086/campos_512_v4
+30/161138/campos_512_v4
+30/161147/campos_512_v4
+30/161285/campos_512_v4
+30/161289/campos_512_v4
+30/161494/campos_512_v4
+30/161600/campos_512_v4
+30/161609/campos_512_v4
+30/161610/campos_512_v4
+30/161788/campos_512_v4
+30/161845/campos_512_v4
+30/162088/campos_512_v4
+30/162126/campos_512_v4
+30/162380/campos_512_v4
+30/162384/campos_512_v4
+30/162443/campos_512_v4
+30/162446/campos_512_v4
+30/162723/campos_512_v4
+30/162749/campos_512_v4
+30/162755/campos_512_v4
+30/162802/campos_512_v4
+30/162909/campos_512_v4
+30/162978/campos_512_v4
+30/163026/campos_512_v4
+30/163282/campos_512_v4
+30/163366/campos_512_v4
+30/163610/campos_512_v4
+30/163678/campos_512_v4
+30/163769/campos_512_v4
+30/163855/campos_512_v4
+30/163858/campos_512_v4
+30/163869/campos_512_v4
+30/163916/campos_512_v4
+30/163993/campos_512_v4
+30/164088/campos_512_v4
+30/164094/campos_512_v4
+30/164145/campos_512_v4
+30/164256/campos_512_v4
+30/164305/campos_512_v4
+30/164425/campos_512_v4
+30/164559/campos_512_v4
+30/164577/campos_512_v4
+30/164664/campos_512_v4
+30/164852/campos_512_v4
+30/164875/campos_512_v4
+31/165356/campos_512_v4
+31/165431/campos_512_v4
+31/165589/campos_512_v4
+31/165748/campos_512_v4
+31/165964/campos_512_v4
+31/166205/campos_512_v4
+31/166214/campos_512_v4
+31/166274/campos_512_v4
+31/166566/campos_512_v4
+31/166570/campos_512_v4
+31/166797/campos_512_v4
+31/166864/campos_512_v4
+31/167036/campos_512_v4
+31/167131/campos_512_v4
+31/167149/campos_512_v4
+31/167182/campos_512_v4
+31/167537/campos_512_v4
+31/167602/campos_512_v4
+31/168042/campos_512_v4
+31/168374/campos_512_v4
+31/168417/campos_512_v4
+31/168515/campos_512_v4
+31/168905/campos_512_v4
+31/169088/campos_512_v4
+31/169095/campos_512_v4
+31/169481/campos_512_v4
+31/169513/campos_512_v4
+31/169664/campos_512_v4
+31/169725/campos_512_v4
+31/169810/campos_512_v4
+31/169830/campos_512_v4
+32/170013/campos_512_v4
+32/170163/campos_512_v4
+32/170239/campos_512_v4
+32/170244/campos_512_v4
+32/170382/campos_512_v4
+32/170393/campos_512_v4
+32/170435/campos_512_v4
+32/170584/campos_512_v4
+32/170761/campos_512_v4
+32/170784/campos_512_v4
+32/170927/campos_512_v4
+32/171011/campos_512_v4
+32/171022/campos_512_v4
+32/171042/campos_512_v4
+32/171056/campos_512_v4
+32/171153/campos_512_v4
+32/171177/campos_512_v4
+32/171215/campos_512_v4
+32/171342/campos_512_v4
+32/171349/campos_512_v4
+32/171461/campos_512_v4
+32/171559/campos_512_v4
+32/171584/campos_512_v4
+32/171825/campos_512_v4
+32/171982/campos_512_v4
+32/172088/campos_512_v4
+32/172122/campos_512_v4
+32/172213/campos_512_v4
+32/172262/campos_512_v4
+32/172442/campos_512_v4
+32/172587/campos_512_v4
+32/172614/campos_512_v4
+32/172813/campos_512_v4
+32/172821/campos_512_v4
+32/172899/campos_512_v4
+32/173078/campos_512_v4
+32/173134/campos_512_v4
+32/173238/campos_512_v4
+32/173250/campos_512_v4
+32/173308/campos_512_v4
+32/173326/campos_512_v4
+32/173550/campos_512_v4
+32/173675/campos_512_v4
+32/173821/campos_512_v4
+32/173938/campos_512_v4
+32/173983/campos_512_v4
+32/174067/campos_512_v4
+32/174078/campos_512_v4
+32/174195/campos_512_v4
+32/174245/campos_512_v4
+32/174437/campos_512_v4
+32/174574/campos_512_v4
+32/174672/campos_512_v4
+32/174836/campos_512_v4
+32/174995/campos_512_v4
+33/175178/campos_512_v4
+33/175305/campos_512_v4
+33/175350/campos_512_v4
+33/175441/campos_512_v4
+33/175473/campos_512_v4
+33/175725/campos_512_v4
+33/175735/campos_512_v4
+33/176065/campos_512_v4
+33/176145/campos_512_v4
+33/176240/campos_512_v4
+33/176413/campos_512_v4
+33/176457/campos_512_v4
+33/176598/campos_512_v4
+33/176614/campos_512_v4
+33/176673/campos_512_v4
+33/176843/campos_512_v4
+33/177024/campos_512_v4
+33/177029/campos_512_v4
+33/177209/campos_512_v4
+33/177366/campos_512_v4
+33/177732/campos_512_v4
+33/177849/campos_512_v4
+33/177853/campos_512_v4
+33/177909/campos_512_v4
+33/178033/campos_512_v4
+33/178090/campos_512_v4
+33/178201/campos_512_v4
+33/178219/campos_512_v4
+33/178290/campos_512_v4
+33/178388/campos_512_v4
+33/178745/campos_512_v4
+33/178813/campos_512_v4
+33/178861/campos_512_v4
+33/178910/campos_512_v4
+33/178990/campos_512_v4
+33/179158/campos_512_v4
+33/179187/campos_512_v4
+33/179342/campos_512_v4
+33/179489/campos_512_v4
+33/179500/campos_512_v4
+33/179533/campos_512_v4
+33/179713/campos_512_v4
+33/179743/campos_512_v4
+33/179756/campos_512_v4
+33/179780/campos_512_v4
+33/179961/campos_512_v4
+33/179967/campos_512_v4
+34/180018/campos_512_v4
+34/180087/campos_512_v4
+34/180148/campos_512_v4
+34/180205/campos_512_v4
+34/180273/campos_512_v4
+34/180475/campos_512_v4
+34/180540/campos_512_v4
+34/180620/campos_512_v4
+34/180630/campos_512_v4
+34/180665/campos_512_v4
+34/180781/campos_512_v4
+34/180838/campos_512_v4
+34/180935/campos_512_v4
+34/181234/campos_512_v4
+34/181241/campos_512_v4
+34/181736/campos_512_v4
+34/181780/campos_512_v4
+34/181813/campos_512_v4
+34/181848/campos_512_v4
+34/181994/campos_512_v4
+34/182077/campos_512_v4
+34/182502/campos_512_v4
+34/182513/campos_512_v4
+34/182556/campos_512_v4
+34/182580/campos_512_v4
+34/182591/campos_512_v4
+34/182835/campos_512_v4
+34/182856/campos_512_v4
+34/182920/campos_512_v4
+34/182929/campos_512_v4
+34/183115/campos_512_v4
+34/183556/campos_512_v4
+34/183568/campos_512_v4
+34/184121/campos_512_v4
+34/184343/campos_512_v4
+34/184575/campos_512_v4
+34/184725/campos_512_v4
+34/184859/campos_512_v4
+34/184914/campos_512_v4
+34/184942/campos_512_v4
+34/184966/campos_512_v4
+35/185101/campos_512_v4
+35/185396/campos_512_v4
+35/185479/campos_512_v4
+35/185527/campos_512_v4
+35/185557/campos_512_v4
+35/185562/campos_512_v4
+35/185606/campos_512_v4
+35/185755/campos_512_v4
+35/186225/campos_512_v4
+35/186228/campos_512_v4
+35/186233/campos_512_v4
+35/186372/campos_512_v4
+35/186456/campos_512_v4
+35/186676/campos_512_v4
+35/186692/campos_512_v4
+35/186859/campos_512_v4
+35/186932/campos_512_v4
+35/186979/campos_512_v4
+35/187080/campos_512_v4
+35/187491/campos_512_v4
+35/187496/campos_512_v4
+35/187677/campos_512_v4
+35/187769/campos_512_v4
+35/187820/campos_512_v4
+35/187929/campos_512_v4
+35/188013/campos_512_v4
+35/188142/campos_512_v4
+35/188204/campos_512_v4
+35/188236/campos_512_v4
+35/188292/campos_512_v4
+35/188303/campos_512_v4
+35/188329/campos_512_v4
+35/188330/campos_512_v4
+35/188395/campos_512_v4
+35/188401/campos_512_v4
+35/188439/campos_512_v4
+35/188476/campos_512_v4
+35/188560/campos_512_v4
+35/188614/campos_512_v4
+35/188738/campos_512_v4
+35/188742/campos_512_v4
+35/188905/campos_512_v4
+35/189159/campos_512_v4
+35/189241/campos_512_v4
+35/189301/campos_512_v4
+35/189402/campos_512_v4
+35/189498/campos_512_v4
+35/189612/campos_512_v4
+35/189634/campos_512_v4
+35/189754/campos_512_v4
+35/189935/campos_512_v4
+35/189987/campos_512_v4
+36/190047/campos_512_v4
+36/190188/campos_512_v4
+36/190221/campos_512_v4
+36/190367/campos_512_v4
+36/190438/campos_512_v4
+36/190780/campos_512_v4
+36/190978/campos_512_v4
+36/191067/campos_512_v4
+36/191088/campos_512_v4
+36/191100/campos_512_v4
+36/191250/campos_512_v4
+36/191462/campos_512_v4
+36/191494/campos_512_v4
+36/191550/campos_512_v4
+36/191708/campos_512_v4
+36/191796/campos_512_v4
+36/191811/campos_512_v4
+36/191996/campos_512_v4
+36/192119/campos_512_v4
+36/192233/campos_512_v4
+36/192333/campos_512_v4
+36/192445/campos_512_v4
+36/192481/campos_512_v4
+36/192603/campos_512_v4
+36/192675/campos_512_v4
+36/192684/campos_512_v4
+36/192689/campos_512_v4
+36/192760/campos_512_v4
+36/192858/campos_512_v4
+36/192879/campos_512_v4
+36/192933/campos_512_v4
+36/193029/campos_512_v4
+36/193229/campos_512_v4
+36/193335/campos_512_v4
+36/193342/campos_512_v4
+36/193420/campos_512_v4
+36/193428/campos_512_v4
+36/193503/campos_512_v4
+36/193548/campos_512_v4
+36/193675/campos_512_v4
+36/193676/campos_512_v4
+36/193759/campos_512_v4
+36/193766/campos_512_v4
+36/193785/campos_512_v4
+36/193887/campos_512_v4
+36/193948/campos_512_v4
+36/193963/campos_512_v4
+36/194144/campos_512_v4
+36/194180/campos_512_v4
+36/194318/campos_512_v4
+36/194478/campos_512_v4
+36/194946/campos_512_v4
+36/194970/campos_512_v4
+37/195032/campos_512_v4
+37/195085/campos_512_v4
+37/195220/campos_512_v4
+37/195265/campos_512_v4
+37/195282/campos_512_v4
+37/195404/campos_512_v4
+37/195539/campos_512_v4
+37/195650/campos_512_v4
+37/195652/campos_512_v4
+37/195749/campos_512_v4
+37/195761/campos_512_v4
+37/195822/campos_512_v4
+37/196016/campos_512_v4
+37/196048/campos_512_v4
+37/196069/campos_512_v4
+37/196112/campos_512_v4
+37/196186/campos_512_v4
+37/196205/campos_512_v4
+37/196232/campos_512_v4
+37/196483/campos_512_v4
+37/196596/campos_512_v4
+37/196729/campos_512_v4
+37/196741/campos_512_v4
+37/196749/campos_512_v4
+37/196831/campos_512_v4
+37/197040/campos_512_v4
+37/197063/campos_512_v4
+37/197128/campos_512_v4
+37/197293/campos_512_v4
+37/197387/campos_512_v4
+37/197458/campos_512_v4
+37/197741/campos_512_v4
+37/197994/campos_512_v4
+37/198040/campos_512_v4
+37/198054/campos_512_v4
+37/198199/campos_512_v4
+37/198306/campos_512_v4
+37/198379/campos_512_v4
+37/198456/campos_512_v4
+37/198485/campos_512_v4
+37/198665/campos_512_v4
+37/198701/campos_512_v4
+37/198796/campos_512_v4
+37/199282/campos_512_v4
+37/199441/campos_512_v4
+37/199516/campos_512_v4
+37/199669/campos_512_v4
+37/199703/campos_512_v4
+38/200118/campos_512_v4
+38/200228/campos_512_v4
+38/200516/campos_512_v4
+38/200846/campos_512_v4
+38/200864/campos_512_v4
+38/200989/campos_512_v4
+38/201223/campos_512_v4
+38/201266/campos_512_v4
+38/201330/campos_512_v4
+38/201366/campos_512_v4
+38/201395/campos_512_v4
+38/201467/campos_512_v4
+38/201610/campos_512_v4
+38/201939/campos_512_v4
+38/201943/campos_512_v4
+38/202089/campos_512_v4
+38/202205/campos_512_v4
+38/202239/campos_512_v4
+38/202501/campos_512_v4
+38/202512/campos_512_v4
+38/202519/campos_512_v4
+38/202634/campos_512_v4
+38/202685/campos_512_v4
+38/202779/campos_512_v4
+38/202817/campos_512_v4
+38/202949/campos_512_v4
+38/202965/campos_512_v4
+38/202969/campos_512_v4
+38/203025/campos_512_v4
+38/203119/campos_512_v4
+38/203227/campos_512_v4
+38/203262/campos_512_v4
+38/203271/campos_512_v4
+38/203701/campos_512_v4
+38/203757/campos_512_v4
+38/203805/campos_512_v4
+38/203958/campos_512_v4
+38/204071/campos_512_v4
+38/204109/campos_512_v4
+38/204127/campos_512_v4
+38/204482/campos_512_v4
+38/204500/campos_512_v4
+38/204622/campos_512_v4
+38/204713/campos_512_v4
+38/204941/campos_512_v4
+4/30030/campos_512_v4
+4/30052/campos_512_v4
+4/30182/campos_512_v4
+4/30389/campos_512_v4
+4/30432/campos_512_v4
+4/30477/campos_512_v4
+4/30486/campos_512_v4
+4/30505/campos_512_v4
+4/30923/campos_512_v4
+4/30991/campos_512_v4
+4/31004/campos_512_v4
+4/31053/campos_512_v4
+4/31145/campos_512_v4
+4/31197/campos_512_v4
+4/31238/campos_512_v4
+4/31270/campos_512_v4
+4/31293/campos_512_v4
+4/31791/campos_512_v4
+4/31831/campos_512_v4
+4/31899/campos_512_v4
+4/31968/campos_512_v4
+4/32093/campos_512_v4
+4/32278/campos_512_v4
+4/32322/campos_512_v4
+4/32360/campos_512_v4
+4/32470/campos_512_v4
+4/32526/campos_512_v4
+4/32595/campos_512_v4
+4/32717/campos_512_v4
+4/32793/campos_512_v4
+4/32930/campos_512_v4
+4/33053/campos_512_v4
+4/33067/campos_512_v4
+4/33172/campos_512_v4
+4/33302/campos_512_v4
+4/33381/campos_512_v4
+4/33439/campos_512_v4
+4/33508/campos_512_v4
+4/33542/campos_512_v4
+4/33650/campos_512_v4
+4/33656/campos_512_v4
+4/33752/campos_512_v4
+4/33761/campos_512_v4
+4/33995/campos_512_v4
+4/34000/campos_512_v4
+4/34113/campos_512_v4
+4/34203/campos_512_v4
+4/34226/campos_512_v4
+4/34228/campos_512_v4
+4/34248/campos_512_v4
+4/34368/campos_512_v4
+4/34391/campos_512_v4
+4/34706/campos_512_v4
+4/34738/campos_512_v4
+4/34739/campos_512_v4
+4/34775/campos_512_v4
+4/34907/campos_512_v4
+40/210142/campos_512_v4
+40/210192/campos_512_v4
+40/210307/campos_512_v4
+40/210407/campos_512_v4
+40/210409/campos_512_v4
+40/210412/campos_512_v4
+40/210419/campos_512_v4
+40/210491/campos_512_v4
+40/210568/campos_512_v4
+40/210634/campos_512_v4
+40/211007/campos_512_v4
+40/211044/campos_512_v4
+40/211065/campos_512_v4
+40/211389/campos_512_v4
+40/211432/campos_512_v4
+40/211451/campos_512_v4
+40/211613/campos_512_v4
+40/211615/campos_512_v4
+40/211920/campos_512_v4
+40/211999/campos_512_v4
+40/212052/campos_512_v4
+40/212161/campos_512_v4
+40/212284/campos_512_v4
+40/212408/campos_512_v4
+40/212423/campos_512_v4
+40/212487/campos_512_v4
+40/212515/campos_512_v4
+40/212691/campos_512_v4
+40/212713/campos_512_v4
+40/212869/campos_512_v4
+40/212964/campos_512_v4
+40/213013/campos_512_v4
+40/213045/campos_512_v4
+40/213341/campos_512_v4
+40/213722/campos_512_v4
+40/213724/campos_512_v4
+40/213850/campos_512_v4
+40/214076/campos_512_v4
+40/214097/campos_512_v4
+40/214127/campos_512_v4
+40/214267/campos_512_v4
+40/214269/campos_512_v4
+40/214396/campos_512_v4
+40/214405/campos_512_v4
+40/214407/campos_512_v4
+40/214507/campos_512_v4
+40/214533/campos_512_v4
+40/214548/campos_512_v4
+40/214580/campos_512_v4
+40/214813/campos_512_v4
+40/214824/campos_512_v4
+40/214867/campos_512_v4
+41/215335/campos_512_v4
+41/215399/campos_512_v4
+41/215481/campos_512_v4
+41/215603/campos_512_v4
+41/215729/campos_512_v4
+41/215768/campos_512_v4
+41/215810/campos_512_v4
+41/215813/campos_512_v4
+41/216037/campos_512_v4
+41/216083/campos_512_v4
+41/216109/campos_512_v4
+41/216225/campos_512_v4
+41/216276/campos_512_v4
+41/216283/campos_512_v4
+41/216337/campos_512_v4
+41/216427/campos_512_v4
+41/216504/campos_512_v4
+41/216511/campos_512_v4
+41/216592/campos_512_v4
+41/216677/campos_512_v4
+41/216863/campos_512_v4
+41/216998/campos_512_v4
+41/217005/campos_512_v4
+41/217027/campos_512_v4
+41/217133/campos_512_v4
+41/217148/campos_512_v4
+41/217345/campos_512_v4
+41/217397/campos_512_v4
+41/217431/campos_512_v4
+41/217476/campos_512_v4
+41/217488/campos_512_v4
+41/217515/campos_512_v4
+41/217627/campos_512_v4
+41/217673/campos_512_v4
+41/217833/campos_512_v4
+41/217867/campos_512_v4
+41/218310/campos_512_v4
+41/218367/campos_512_v4
+41/218408/campos_512_v4
+41/218659/campos_512_v4
+41/218687/campos_512_v4
+41/218759/campos_512_v4
+41/218797/campos_512_v4
+41/219023/campos_512_v4
+41/219524/campos_512_v4
+41/219533/campos_512_v4
+41/219580/campos_512_v4
+41/219697/campos_512_v4
+41/219715/campos_512_v4
+41/219720/campos_512_v4
+41/219722/campos_512_v4
+41/219814/campos_512_v4
+41/219891/campos_512_v4
+41/219979/campos_512_v4
+42/220143/campos_512_v4
+42/220229/campos_512_v4
+42/220244/campos_512_v4
+42/220270/campos_512_v4
+42/220482/campos_512_v4
+42/220512/campos_512_v4
+42/220740/campos_512_v4
+42/220816/campos_512_v4
+42/220963/campos_512_v4
+42/220978/campos_512_v4
+42/221007/campos_512_v4
+42/221081/campos_512_v4
+42/221110/campos_512_v4
+42/221330/campos_512_v4
+42/221431/campos_512_v4
+42/221477/campos_512_v4
+42/221697/campos_512_v4
+42/221701/campos_512_v4
+42/221752/campos_512_v4
+42/221833/campos_512_v4
+42/221908/campos_512_v4
+42/222025/campos_512_v4
+42/222067/campos_512_v4
+42/222303/campos_512_v4
+42/222684/campos_512_v4
+42/222719/campos_512_v4
+42/222845/campos_512_v4
+42/222846/campos_512_v4
+42/222930/campos_512_v4
+42/222943/campos_512_v4
+42/222945/campos_512_v4
+42/222978/campos_512_v4
+42/223030/campos_512_v4
+42/223115/campos_512_v4
+42/223130/campos_512_v4
+42/223253/campos_512_v4
+42/223515/campos_512_v4
+42/223606/campos_512_v4
+42/223714/campos_512_v4
+42/223778/campos_512_v4
+42/223874/campos_512_v4
+42/223921/campos_512_v4
+42/224072/campos_512_v4
+42/224097/campos_512_v4
+42/224111/campos_512_v4
+42/224289/campos_512_v4
+42/224519/campos_512_v4
+42/224754/campos_512_v4
+42/224761/campos_512_v4
+42/224772/campos_512_v4
+42/224786/campos_512_v4
+42/224870/campos_512_v4
+43/225065/campos_512_v4
+43/225115/campos_512_v4
+43/225141/campos_512_v4
+43/225169/campos_512_v4
+43/225202/campos_512_v4
+43/225414/campos_512_v4
+43/225635/campos_512_v4
+43/225643/campos_512_v4
+43/225652/campos_512_v4
+43/225680/campos_512_v4
+43/225707/campos_512_v4
+43/225968/campos_512_v4
+43/226193/campos_512_v4
+43/226334/campos_512_v4
+43/226412/campos_512_v4
+43/226563/campos_512_v4
+43/226597/campos_512_v4
+43/226601/campos_512_v4
+43/226762/campos_512_v4
+43/226987/campos_512_v4
+43/226992/campos_512_v4
+43/227180/campos_512_v4
+43/227220/campos_512_v4
+43/227396/campos_512_v4
+43/227545/campos_512_v4
+43/227716/campos_512_v4
+43/227730/campos_512_v4
+43/227759/campos_512_v4
+43/227862/campos_512_v4
+43/227868/campos_512_v4
+43/227875/campos_512_v4
+43/227881/campos_512_v4
+43/227930/campos_512_v4
+43/228020/campos_512_v4
+43/228045/campos_512_v4
+43/228048/campos_512_v4
+43/228287/campos_512_v4
+43/228289/campos_512_v4
+43/228413/campos_512_v4
+43/228418/campos_512_v4
+43/228631/campos_512_v4
+43/228749/campos_512_v4
+43/228757/campos_512_v4
+43/228817/campos_512_v4
+43/228905/campos_512_v4
+43/229067/campos_512_v4
+43/229167/campos_512_v4
+43/229432/campos_512_v4
+43/229598/campos_512_v4
+43/229673/campos_512_v4
+43/229790/campos_512_v4
+43/229851/campos_512_v4
+44/230080/campos_512_v4
+44/230165/campos_512_v4
+44/230220/campos_512_v4
+44/230228/campos_512_v4
+44/230288/campos_512_v4
+44/230500/campos_512_v4
+44/230513/campos_512_v4
+44/230774/campos_512_v4
+44/230821/campos_512_v4
+44/230869/campos_512_v4
+44/230930/campos_512_v4
+44/230932/campos_512_v4
+44/230985/campos_512_v4
+44/230998/campos_512_v4
+44/231049/campos_512_v4
+44/231247/campos_512_v4
+44/231329/campos_512_v4
+44/231398/campos_512_v4
+44/231556/campos_512_v4
+44/231695/campos_512_v4
+44/231771/campos_512_v4
+44/231795/campos_512_v4
+44/231977/campos_512_v4
+44/232129/campos_512_v4
+44/232310/campos_512_v4
+44/232325/campos_512_v4
+44/232343/campos_512_v4
+44/232414/campos_512_v4
+44/232475/campos_512_v4
+44/232661/campos_512_v4
+44/232671/campos_512_v4
+44/232903/campos_512_v4
+44/232915/campos_512_v4
+44/233177/campos_512_v4
+44/233383/campos_512_v4
+44/233389/campos_512_v4
+44/233465/campos_512_v4
+44/233472/campos_512_v4
+44/233500/campos_512_v4
+44/233504/campos_512_v4
+44/233601/campos_512_v4
+44/233643/campos_512_v4
+44/233949/campos_512_v4
+44/234077/campos_512_v4
+44/234086/campos_512_v4
+44/234111/campos_512_v4
+44/234224/campos_512_v4
+44/234316/campos_512_v4
+44/234370/campos_512_v4
+44/234454/campos_512_v4
+44/234549/campos_512_v4
+44/234698/campos_512_v4
+44/234760/campos_512_v4
+44/234875/campos_512_v4
+44/234878/campos_512_v4
+44/234946/campos_512_v4
+45/235028/campos_512_v4
+45/235072/campos_512_v4
+45/235197/campos_512_v4
+45/235243/campos_512_v4
+45/235368/campos_512_v4
+45/235501/campos_512_v4
+45/235572/campos_512_v4
+45/235582/campos_512_v4
+45/235801/campos_512_v4
+45/235992/campos_512_v4
+45/236175/campos_512_v4
+45/236188/campos_512_v4
+45/236214/campos_512_v4
+45/236218/campos_512_v4
+45/236278/campos_512_v4
+45/236372/campos_512_v4
+45/236386/campos_512_v4
+45/236470/campos_512_v4
+45/236513/campos_512_v4
+45/236554/campos_512_v4
+45/236802/campos_512_v4
+45/236837/campos_512_v4
+45/236855/campos_512_v4
+45/237071/campos_512_v4
+45/237296/campos_512_v4
+45/237324/campos_512_v4
+45/237365/campos_512_v4
+45/237384/campos_512_v4
+45/237460/campos_512_v4
+45/237566/campos_512_v4
+45/237700/campos_512_v4
+45/237713/campos_512_v4
+45/237725/campos_512_v4
+45/237963/campos_512_v4
+45/237984/campos_512_v4
+45/238357/campos_512_v4
+45/238361/campos_512_v4
+45/238590/campos_512_v4
+45/238624/campos_512_v4
+45/238865/campos_512_v4
+45/239265/campos_512_v4
+45/239380/campos_512_v4
+45/239401/campos_512_v4
+45/239564/campos_512_v4
+45/239616/campos_512_v4
+45/239648/campos_512_v4
+45/239769/campos_512_v4
+45/239797/campos_512_v4
+46/240164/campos_512_v4
+46/240248/campos_512_v4
+46/240352/campos_512_v4
+46/241058/campos_512_v4
+46/241079/campos_512_v4
+46/241087/campos_512_v4
+46/241129/campos_512_v4
+46/241192/campos_512_v4
+46/241215/campos_512_v4
+46/241229/campos_512_v4
+46/241234/campos_512_v4
+46/241251/campos_512_v4
+46/241308/campos_512_v4
+46/241355/campos_512_v4
+46/241399/campos_512_v4
+46/241417/campos_512_v4
+46/241656/campos_512_v4
+46/241692/campos_512_v4
+46/241824/campos_512_v4
+46/241833/campos_512_v4
+46/241838/campos_512_v4
+46/241970/campos_512_v4
+46/242099/campos_512_v4
+46/242182/campos_512_v4
+46/242214/campos_512_v4
+46/242357/campos_512_v4
+46/242444/campos_512_v4
+46/242489/campos_512_v4
+46/242627/campos_512_v4
+46/242709/campos_512_v4
+46/242855/campos_512_v4
+46/242921/campos_512_v4
+46/242974/campos_512_v4
+46/242980/campos_512_v4
+46/243136/campos_512_v4
+46/243154/campos_512_v4
+46/243290/campos_512_v4
+46/243553/campos_512_v4
+46/243615/campos_512_v4
+46/244146/campos_512_v4
+46/244313/campos_512_v4
+46/244668/campos_512_v4
+46/244746/campos_512_v4
+46/244755/campos_512_v4
+47/245161/campos_512_v4
+47/245450/campos_512_v4
+47/245789/campos_512_v4
+47/245837/campos_512_v4
+47/245878/campos_512_v4
+47/246309/campos_512_v4
+47/246388/campos_512_v4
+47/246486/campos_512_v4
+47/246624/campos_512_v4
+47/246674/campos_512_v4
+47/246990/campos_512_v4
+47/247036/campos_512_v4
+47/247122/campos_512_v4
+47/247140/campos_512_v4
+47/247227/campos_512_v4
+47/247448/campos_512_v4
+47/247463/campos_512_v4
+47/247472/campos_512_v4
+47/247633/campos_512_v4
+47/247671/campos_512_v4
+47/247995/campos_512_v4
+47/248033/campos_512_v4
+47/248054/campos_512_v4
+47/248061/campos_512_v4
+47/248309/campos_512_v4
+47/248351/campos_512_v4
+47/248387/campos_512_v4
+47/248415/campos_512_v4
+47/248477/campos_512_v4
+47/248636/campos_512_v4
+47/248645/campos_512_v4
+47/248677/campos_512_v4
+47/248727/campos_512_v4
+47/249415/campos_512_v4
+47/249430/campos_512_v4
+47/249502/campos_512_v4
+47/249542/campos_512_v4
+47/249701/campos_512_v4
+47/249751/campos_512_v4
+47/249805/campos_512_v4
+47/249838/campos_512_v4
+47/249894/campos_512_v4
+48/250217/campos_512_v4
+48/250389/campos_512_v4
+48/250468/campos_512_v4
+48/250655/campos_512_v4
+48/250691/campos_512_v4
+48/250729/campos_512_v4
+48/250737/campos_512_v4
+48/250793/campos_512_v4
+48/250797/campos_512_v4
+48/250953/campos_512_v4
+48/251032/campos_512_v4
+48/251052/campos_512_v4
+48/251249/campos_512_v4
+48/251456/campos_512_v4
+48/251530/campos_512_v4
+48/251531/campos_512_v4
+48/251569/campos_512_v4
+48/251647/campos_512_v4
+48/251762/campos_512_v4
+48/251842/campos_512_v4
+48/251932/campos_512_v4
+48/252243/campos_512_v4
+48/252498/campos_512_v4
+48/252803/campos_512_v4
+48/252869/campos_512_v4
+48/252954/campos_512_v4
+48/253074/campos_512_v4
+48/253094/campos_512_v4
+48/253389/campos_512_v4
+48/253494/campos_512_v4
+48/253512/campos_512_v4
+48/253522/campos_512_v4
+48/253698/campos_512_v4
+48/253869/campos_512_v4
+48/253970/campos_512_v4
+48/254006/campos_512_v4
+48/254106/campos_512_v4
+48/254115/campos_512_v4
+48/254146/campos_512_v4
+48/254174/campos_512_v4
+48/254184/campos_512_v4
+48/254341/campos_512_v4
+48/254581/campos_512_v4
+48/254677/campos_512_v4
+48/254756/campos_512_v4
+48/254779/campos_512_v4
+48/254794/campos_512_v4
+48/254834/campos_512_v4
+48/254849/campos_512_v4
+48/254899/campos_512_v4
+49/255077/campos_512_v4
+49/255265/campos_512_v4
+49/255588/campos_512_v4
+49/255634/campos_512_v4
+49/255771/campos_512_v4
+49/255870/campos_512_v4
+49/256236/campos_512_v4
+49/256317/campos_512_v4
+49/256341/campos_512_v4
+49/256444/campos_512_v4
+49/256512/campos_512_v4
+49/256532/campos_512_v4
+49/256933/campos_512_v4
+49/256950/campos_512_v4
+49/257036/campos_512_v4
+49/257157/campos_512_v4
+49/257160/campos_512_v4
+49/257213/campos_512_v4
+49/257261/campos_512_v4
+49/257291/campos_512_v4
+49/257490/campos_512_v4
+49/257506/campos_512_v4
+49/257569/campos_512_v4
+49/257610/campos_512_v4
+49/257642/campos_512_v4
+49/257854/campos_512_v4
+49/257912/campos_512_v4
+49/258124/campos_512_v4
+49/258171/campos_512_v4
+49/258423/campos_512_v4
+49/258437/campos_512_v4
+49/258696/campos_512_v4
+49/258716/campos_512_v4
+49/258818/campos_512_v4
+49/258865/campos_512_v4
+49/258871/campos_512_v4
+49/258911/campos_512_v4
+49/258918/campos_512_v4
+49/258964/campos_512_v4
+49/259163/campos_512_v4
+49/259259/campos_512_v4
+49/259402/campos_512_v4
+49/259427/campos_512_v4
+49/259489/campos_512_v4
+49/259501/campos_512_v4
+49/259857/campos_512_v4
+49/259968/campos_512_v4
+5/35093/campos_512_v4
+5/35110/campos_512_v4
+5/35446/campos_512_v4
+5/35453/campos_512_v4
+5/35499/campos_512_v4
+5/35593/campos_512_v4
+5/35768/campos_512_v4
+5/36043/campos_512_v4
+5/36225/campos_512_v4
+5/36382/campos_512_v4
+5/36386/campos_512_v4
+5/36418/campos_512_v4
+5/36435/campos_512_v4
+5/36608/campos_512_v4
+5/36793/campos_512_v4
+5/36901/campos_512_v4
+5/36921/campos_512_v4
+5/37086/campos_512_v4
+5/37115/campos_512_v4
+5/37119/campos_512_v4
+5/37329/campos_512_v4
+5/37342/campos_512_v4
+5/37424/campos_512_v4
+5/37536/campos_512_v4
+5/37567/campos_512_v4
+5/37594/campos_512_v4
+5/37601/campos_512_v4
+5/37680/campos_512_v4
+5/37688/campos_512_v4
+5/37771/campos_512_v4
+5/37828/campos_512_v4
+5/37953/campos_512_v4
+5/37984/campos_512_v4
+5/38007/campos_512_v4
+5/38083/campos_512_v4
+5/38178/campos_512_v4
+5/38272/campos_512_v4
+5/38403/campos_512_v4
+5/38429/campos_512_v4
+5/38631/campos_512_v4
+5/38816/campos_512_v4
+5/38846/campos_512_v4
+5/39083/campos_512_v4
+5/39111/campos_512_v4
+5/39179/campos_512_v4
+5/39581/campos_512_v4
+5/39623/campos_512_v4
+5/39629/campos_512_v4
+5/39745/campos_512_v4
+5/39761/campos_512_v4
+5/39897/campos_512_v4
+5/39987/campos_512_v4
+50/260105/campos_512_v4
+50/260118/campos_512_v4
+50/260127/campos_512_v4
+50/260134/campos_512_v4
+50/260170/campos_512_v4
+50/260346/campos_512_v4
+50/260389/campos_512_v4
+50/260624/campos_512_v4
+50/260688/campos_512_v4
+50/260749/campos_512_v4
+50/260978/campos_512_v4
+50/261086/campos_512_v4
+50/261116/campos_512_v4
+50/261324/campos_512_v4
+50/261333/campos_512_v4
+50/261334/campos_512_v4
+50/261415/campos_512_v4
+50/261964/campos_512_v4
+50/261984/campos_512_v4
+50/262170/campos_512_v4
+50/262379/campos_512_v4
+50/262391/campos_512_v4
+50/262436/campos_512_v4
+50/262664/campos_512_v4
+50/262685/campos_512_v4
+50/262691/campos_512_v4
+50/262694/campos_512_v4
+50/262765/campos_512_v4
+50/262892/campos_512_v4
+50/263098/campos_512_v4
+50/263116/campos_512_v4
+50/263230/campos_512_v4
+50/263524/campos_512_v4
+50/263543/campos_512_v4
+50/263677/campos_512_v4
+50/263746/campos_512_v4
+50/263862/campos_512_v4
+50/263866/campos_512_v4
+50/263901/campos_512_v4
+50/263909/campos_512_v4
+50/263968/campos_512_v4
+50/264097/campos_512_v4
+50/264139/campos_512_v4
+50/264201/campos_512_v4
+50/264213/campos_512_v4
+50/264339/campos_512_v4
+50/264356/campos_512_v4
+50/264390/campos_512_v4
+50/264417/campos_512_v4
+50/264438/campos_512_v4
+50/264513/campos_512_v4
+50/264694/campos_512_v4
+50/264794/campos_512_v4
+51/265069/campos_512_v4
+51/265089/campos_512_v4
+51/265131/campos_512_v4
+51/265146/campos_512_v4
+51/265238/campos_512_v4
+51/265246/campos_512_v4
+51/265550/campos_512_v4
+51/265556/campos_512_v4
+51/265558/campos_512_v4
+51/265599/campos_512_v4
+51/265604/campos_512_v4
+51/265638/campos_512_v4
+51/265784/campos_512_v4
+51/266002/campos_512_v4
+51/266028/campos_512_v4
+51/266185/campos_512_v4
+51/266254/campos_512_v4
+51/266271/campos_512_v4
+51/266290/campos_512_v4
+51/266328/campos_512_v4
+51/266330/campos_512_v4
+51/266455/campos_512_v4
+51/266665/campos_512_v4
+51/266682/campos_512_v4
+51/266861/campos_512_v4
+51/267188/campos_512_v4
+51/267368/campos_512_v4
+51/267387/campos_512_v4
+51/267671/campos_512_v4
+51/267755/campos_512_v4
+51/267772/campos_512_v4
+51/267932/campos_512_v4
+51/268001/campos_512_v4
+51/268052/campos_512_v4
+51/268066/campos_512_v4
+51/268220/campos_512_v4
+51/268255/campos_512_v4
+51/268341/campos_512_v4
+51/268363/campos_512_v4
+51/268377/campos_512_v4
+51/268416/campos_512_v4
+51/268531/campos_512_v4
+51/268640/campos_512_v4
+51/268731/campos_512_v4
+51/268847/campos_512_v4
+51/268885/campos_512_v4
+51/268929/campos_512_v4
+51/268945/campos_512_v4
+51/269011/campos_512_v4
+51/269065/campos_512_v4
+51/269249/campos_512_v4
+51/269275/campos_512_v4
+51/269602/campos_512_v4
+51/269642/campos_512_v4
+51/269902/campos_512_v4
+52/270170/campos_512_v4
+52/270315/campos_512_v4
+52/270324/campos_512_v4
+52/270325/campos_512_v4
+52/270352/campos_512_v4
+52/270622/campos_512_v4
+52/270817/campos_512_v4
+52/270902/campos_512_v4
+52/271002/campos_512_v4
+52/271024/campos_512_v4
+52/271119/campos_512_v4
+52/271152/campos_512_v4
+52/271206/campos_512_v4
+52/271317/campos_512_v4
+52/271535/campos_512_v4
+52/271553/campos_512_v4
+52/271626/campos_512_v4
+52/271828/campos_512_v4
+52/271966/campos_512_v4
+52/272129/campos_512_v4
+52/272343/campos_512_v4
+52/272352/campos_512_v4
+52/272474/campos_512_v4
+52/272565/campos_512_v4
+52/272669/campos_512_v4
+52/272843/campos_512_v4
+52/272865/campos_512_v4
+52/272902/campos_512_v4
+52/272928/campos_512_v4
+52/272942/campos_512_v4
+52/273013/campos_512_v4
+52/273533/campos_512_v4
+52/273578/campos_512_v4
+52/273702/campos_512_v4
+52/273878/campos_512_v4
+52/273962/campos_512_v4
+52/274365/campos_512_v4
+52/274459/campos_512_v4
+52/274535/campos_512_v4
+52/274542/campos_512_v4
+52/274616/campos_512_v4
+52/274738/campos_512_v4
+52/274978/campos_512_v4
+53/275134/campos_512_v4
+53/275189/campos_512_v4
+53/275289/campos_512_v4
+53/275413/campos_512_v4
+53/275584/campos_512_v4
+53/275693/campos_512_v4
+53/275729/campos_512_v4
+53/276027/campos_512_v4
+53/276056/campos_512_v4
+53/276135/campos_512_v4
+53/276205/campos_512_v4
+53/276461/campos_512_v4
+53/276497/campos_512_v4
+53/276654/campos_512_v4
+53/276708/campos_512_v4
+53/276771/campos_512_v4
+53/276842/campos_512_v4
+53/277029/campos_512_v4
+53/277143/campos_512_v4
+53/277409/campos_512_v4
+53/277750/campos_512_v4
+53/277948/campos_512_v4
+53/278112/campos_512_v4
+53/278151/campos_512_v4
+53/278172/campos_512_v4
+53/278425/campos_512_v4
+53/278631/campos_512_v4
+53/278647/campos_512_v4
+53/278695/campos_512_v4
+53/279049/campos_512_v4
+53/279057/campos_512_v4
+53/279155/campos_512_v4
+53/279231/campos_512_v4
+53/279430/campos_512_v4
+53/279552/campos_512_v4
+53/279712/campos_512_v4
+53/279931/campos_512_v4
+54/280058/campos_512_v4
+54/280243/campos_512_v4
+54/280301/campos_512_v4
+54/280333/campos_512_v4
+54/280356/campos_512_v4
+54/280487/campos_512_v4
+54/280528/campos_512_v4
+54/280577/campos_512_v4
+54/280694/campos_512_v4
+54/280768/campos_512_v4
+54/280878/campos_512_v4
+54/280956/campos_512_v4
+54/281145/campos_512_v4
+54/281318/campos_512_v4
+54/281344/campos_512_v4
+54/281352/campos_512_v4
+54/281416/campos_512_v4
+54/281570/campos_512_v4
+54/281591/campos_512_v4
+54/281693/campos_512_v4
+54/281811/campos_512_v4
+54/281874/campos_512_v4
+54/282192/campos_512_v4
+54/282267/campos_512_v4
+54/282270/campos_512_v4
+54/282361/campos_512_v4
+54/282500/campos_512_v4
+54/282629/campos_512_v4
+54/282772/campos_512_v4
+54/282810/campos_512_v4
+54/283283/campos_512_v4
+54/283446/campos_512_v4
+54/283452/campos_512_v4
+54/283456/campos_512_v4
+54/283491/campos_512_v4
+54/283627/campos_512_v4
+54/283654/campos_512_v4
+54/283916/campos_512_v4
+54/283946/campos_512_v4
+54/283994/campos_512_v4
+54/284066/campos_512_v4
+54/284067/campos_512_v4
+54/284233/campos_512_v4
+54/284269/campos_512_v4
+54/284509/campos_512_v4
+54/284535/campos_512_v4
+54/284624/campos_512_v4
+54/284642/campos_512_v4
+54/284668/campos_512_v4
+54/284774/campos_512_v4
+54/284854/campos_512_v4
+54/284904/campos_512_v4
+54/284951/campos_512_v4
+54/284983/campos_512_v4
+55/285185/campos_512_v4
+55/285226/campos_512_v4
+55/285310/campos_512_v4
+55/285350/campos_512_v4
+55/285770/campos_512_v4
+55/285849/campos_512_v4
+55/285955/campos_512_v4
+55/286132/campos_512_v4
+55/286158/campos_512_v4
+55/286252/campos_512_v4
+55/286293/campos_512_v4
+55/286312/campos_512_v4
+55/286698/campos_512_v4
+55/286733/campos_512_v4
+55/286751/campos_512_v4
+55/286874/campos_512_v4
+55/286975/campos_512_v4
+55/286998/campos_512_v4
+55/287047/campos_512_v4
+55/287247/campos_512_v4
+55/287277/campos_512_v4
+55/287563/campos_512_v4
+55/287647/campos_512_v4
+55/287755/campos_512_v4
+55/288044/campos_512_v4
+55/288054/campos_512_v4
+55/288219/campos_512_v4
+55/288367/campos_512_v4
+55/288654/campos_512_v4
+55/288871/campos_512_v4
+55/288976/campos_512_v4
+55/289016/campos_512_v4
+55/289096/campos_512_v4
+55/289239/campos_512_v4
+55/289260/campos_512_v4
+55/289306/campos_512_v4
+55/289335/campos_512_v4
+55/289463/campos_512_v4
+55/289519/campos_512_v4
+55/289590/campos_512_v4
+55/289636/campos_512_v4
+55/289638/campos_512_v4
+55/289687/campos_512_v4
+55/289793/campos_512_v4
+56/290033/campos_512_v4
+56/290053/campos_512_v4
+56/290054/campos_512_v4
+56/290152/campos_512_v4
+56/290265/campos_512_v4
+56/290324/campos_512_v4
+56/290402/campos_512_v4
+56/290613/campos_512_v4
+56/290756/campos_512_v4
+56/290835/campos_512_v4
+56/290933/campos_512_v4
+56/290954/campos_512_v4
+56/291008/campos_512_v4
+56/291051/campos_512_v4
+56/291140/campos_512_v4
+56/291767/campos_512_v4
+56/291783/campos_512_v4
+56/291806/campos_512_v4
+56/291908/campos_512_v4
+56/292049/campos_512_v4
+56/292223/campos_512_v4
+56/292314/campos_512_v4
+56/292536/campos_512_v4
+56/292549/campos_512_v4
+56/292596/campos_512_v4
+56/292636/campos_512_v4
+56/292717/campos_512_v4
+56/292882/campos_512_v4
+56/293149/campos_512_v4
+56/293349/campos_512_v4
+56/293391/campos_512_v4
+56/293573/campos_512_v4
+56/293639/campos_512_v4
+56/293833/campos_512_v4
+56/293948/campos_512_v4
+56/293974/campos_512_v4
+56/294014/campos_512_v4
+56/294150/campos_512_v4
+56/294242/campos_512_v4
+56/294303/campos_512_v4
+56/294312/campos_512_v4
+56/294397/campos_512_v4
+56/294505/campos_512_v4
+56/294642/campos_512_v4
+56/294746/campos_512_v4
+56/294888/campos_512_v4
+56/294954/campos_512_v4
+57/295014/campos_512_v4
+57/295017/campos_512_v4
+57/295028/campos_512_v4
+57/295219/campos_512_v4
+57/295325/campos_512_v4
+57/295519/campos_512_v4
+57/295600/campos_512_v4
+57/295649/campos_512_v4
+57/295841/campos_512_v4
+57/295907/campos_512_v4
+57/295912/campos_512_v4
+57/296040/campos_512_v4
+57/296246/campos_512_v4
+57/296259/campos_512_v4
+57/296346/campos_512_v4
+57/296367/campos_512_v4
+57/296425/campos_512_v4
+57/296439/campos_512_v4
+57/296650/campos_512_v4
+57/296895/campos_512_v4
+57/296938/campos_512_v4
+57/296957/campos_512_v4
+57/297089/campos_512_v4
+57/297182/campos_512_v4
+57/297228/campos_512_v4
+57/297427/campos_512_v4
+57/297577/campos_512_v4
+57/297805/campos_512_v4
+57/298227/campos_512_v4
+57/298270/campos_512_v4
+57/298271/campos_512_v4
+57/298381/campos_512_v4
+57/298427/campos_512_v4
+57/298578/campos_512_v4
+57/298714/campos_512_v4
+57/298794/campos_512_v4
+57/298932/campos_512_v4
+57/299094/campos_512_v4
+57/299116/campos_512_v4
+57/299122/campos_512_v4
+57/299145/campos_512_v4
+57/299263/campos_512_v4
+57/299357/campos_512_v4
+57/299377/campos_512_v4
+57/299564/campos_512_v4
+57/299868/campos_512_v4
+58/300142/campos_512_v4
+58/300216/campos_512_v4
+58/300229/campos_512_v4
+58/300472/campos_512_v4
+58/300540/campos_512_v4
+58/300621/campos_512_v4
+58/300640/campos_512_v4
+58/300652/campos_512_v4
+58/300758/campos_512_v4
+58/300781/campos_512_v4
+58/300807/campos_512_v4
+58/300985/campos_512_v4
+58/301166/campos_512_v4
+58/301353/campos_512_v4
+58/301359/campos_512_v4
+58/301422/campos_512_v4
+58/301604/campos_512_v4
+58/301818/campos_512_v4
+58/301906/campos_512_v4
+58/301919/campos_512_v4
+58/302041/campos_512_v4
+58/302158/campos_512_v4
+58/302231/campos_512_v4
+58/302526/campos_512_v4
+58/302538/campos_512_v4
+58/302767/campos_512_v4
+58/302778/campos_512_v4
+58/302836/campos_512_v4
+58/303035/campos_512_v4
+58/303286/campos_512_v4
+58/303612/campos_512_v4
+58/303634/campos_512_v4
+58/303654/campos_512_v4
+58/303658/campos_512_v4
+58/303870/campos_512_v4
+58/304068/campos_512_v4
+58/304119/campos_512_v4
+58/304252/campos_512_v4
+58/304299/campos_512_v4
+58/304340/campos_512_v4
+58/304351/campos_512_v4
+58/304395/campos_512_v4
+58/304470/campos_512_v4
+58/304608/campos_512_v4
+58/304690/campos_512_v4
+58/304958/campos_512_v4
+59/305058/campos_512_v4
+59/305245/campos_512_v4
+59/305273/campos_512_v4
+59/305275/campos_512_v4
+59/305427/campos_512_v4
+59/305528/campos_512_v4
+59/305534/campos_512_v4
+59/305535/campos_512_v4
+59/305637/campos_512_v4
+59/305708/campos_512_v4
+59/305805/campos_512_v4
+59/305881/campos_512_v4
+59/306079/campos_512_v4
+59/306179/campos_512_v4
+59/306312/campos_512_v4
+59/306422/campos_512_v4
+59/306543/campos_512_v4
+59/306611/campos_512_v4
+59/306654/campos_512_v4
+59/306717/campos_512_v4
+59/306760/campos_512_v4
+59/306871/campos_512_v4
+59/306945/campos_512_v4
+59/307015/campos_512_v4
+59/307106/campos_512_v4
+59/307190/campos_512_v4
+59/307392/campos_512_v4
+59/307428/campos_512_v4
+59/307532/campos_512_v4
+59/307558/campos_512_v4
+59/307773/campos_512_v4
+59/307961/campos_512_v4
+59/308121/campos_512_v4
+59/308201/campos_512_v4
+59/308292/campos_512_v4
+59/308415/campos_512_v4
+59/308744/campos_512_v4
+59/308807/campos_512_v4
+59/309144/campos_512_v4
+59/309176/campos_512_v4
+59/309186/campos_512_v4
+59/309354/campos_512_v4
+59/309406/campos_512_v4
+59/309416/campos_512_v4
+59/309636/campos_512_v4
+59/309670/campos_512_v4
+59/309677/campos_512_v4
+59/309824/campos_512_v4
+59/309891/campos_512_v4
+6/40165/campos_512_v4
+6/40200/campos_512_v4
+6/40272/campos_512_v4
+6/40452/campos_512_v4
+6/40791/campos_512_v4
+6/40879/campos_512_v4
+6/40943/campos_512_v4
+6/41111/campos_512_v4
+6/41194/campos_512_v4
+6/41329/campos_512_v4
+6/41339/campos_512_v4
+6/41353/campos_512_v4
+6/41538/campos_512_v4
+6/41582/campos_512_v4
+6/41897/campos_512_v4
+6/42092/campos_512_v4
+6/42185/campos_512_v4
+6/42233/campos_512_v4
+6/42254/campos_512_v4
+6/42377/campos_512_v4
+6/42702/campos_512_v4
+6/42767/campos_512_v4
+6/42808/campos_512_v4
+6/42854/campos_512_v4
+6/42880/campos_512_v4
+6/42905/campos_512_v4
+6/42919/campos_512_v4
+6/43020/campos_512_v4
+6/43036/campos_512_v4
+6/43064/campos_512_v4
+6/43120/campos_512_v4
+6/43291/campos_512_v4
+6/43354/campos_512_v4
+6/43358/campos_512_v4
+6/43367/campos_512_v4
+6/43368/campos_512_v4
+6/43440/campos_512_v4
+6/43513/campos_512_v4
+6/43534/campos_512_v4
+6/43656/campos_512_v4
+6/43705/campos_512_v4
+6/43811/campos_512_v4
+6/43953/campos_512_v4
+6/43977/campos_512_v4
+6/43984/campos_512_v4
+6/44214/campos_512_v4
+6/44407/campos_512_v4
+6/44613/campos_512_v4
+6/44759/campos_512_v4
+6/44809/campos_512_v4
+6/44836/campos_512_v4
+60/310013/campos_512_v4
+60/310046/campos_512_v4
+60/310058/campos_512_v4
+60/310116/campos_512_v4
+60/310292/campos_512_v4
+60/310354/campos_512_v4
+60/310366/campos_512_v4
+60/310474/campos_512_v4
+60/310630/campos_512_v4
+60/310665/campos_512_v4
+60/310875/campos_512_v4
+60/310904/campos_512_v4
+60/311149/campos_512_v4
+60/311209/campos_512_v4
+60/311243/campos_512_v4
+60/311366/campos_512_v4
+60/311376/campos_512_v4
+60/311463/campos_512_v4
+60/311646/campos_512_v4
+60/311647/campos_512_v4
+60/312022/campos_512_v4
+60/312146/campos_512_v4
+60/312211/campos_512_v4
+60/312303/campos_512_v4
+60/312491/campos_512_v4
+60/312510/campos_512_v4
+60/312628/campos_512_v4
+60/312636/campos_512_v4
+60/312684/campos_512_v4
+60/312843/campos_512_v4
+60/312858/campos_512_v4
+60/312966/campos_512_v4
+60/313065/campos_512_v4
+60/313171/campos_512_v4
+60/313187/campos_512_v4
+60/313282/campos_512_v4
+60/313340/campos_512_v4
+60/313355/campos_512_v4
+60/313360/campos_512_v4
+60/313511/campos_512_v4
+60/313585/campos_512_v4
+60/313679/campos_512_v4
+60/313954/campos_512_v4
+60/313962/campos_512_v4
+60/314051/campos_512_v4
+60/314078/campos_512_v4
+60/314148/campos_512_v4
+60/314174/campos_512_v4
+60/314198/campos_512_v4
+60/314372/campos_512_v4
+60/314415/campos_512_v4
+60/314511/campos_512_v4
+60/314613/campos_512_v4
+60/314701/campos_512_v4
+60/314751/campos_512_v4
+60/314798/campos_512_v4
+60/314803/campos_512_v4
+60/314899/campos_512_v4
+60/314937/campos_512_v4
+61/315069/campos_512_v4
+61/315174/campos_512_v4
+61/315197/campos_512_v4
+61/315292/campos_512_v4
+61/315338/campos_512_v4
+61/315690/campos_512_v4
+61/315804/campos_512_v4
+61/315922/campos_512_v4
+61/315932/campos_512_v4
+61/316000/campos_512_v4
+61/316003/campos_512_v4
+61/316005/campos_512_v4
+61/316131/campos_512_v4
+61/316262/campos_512_v4
+61/316543/campos_512_v4
+61/316555/campos_512_v4
+61/316614/campos_512_v4
+61/316626/campos_512_v4
+61/316642/campos_512_v4
+61/317101/campos_512_v4
+61/317223/campos_512_v4
+61/317225/campos_512_v4
+61/317239/campos_512_v4
+61/317241/campos_512_v4
+61/317284/campos_512_v4
+61/317290/campos_512_v4
+61/317304/campos_512_v4
+61/317370/campos_512_v4
+61/317371/campos_512_v4
+61/317561/campos_512_v4
+61/317575/campos_512_v4
+61/317759/campos_512_v4
+61/317787/campos_512_v4
+61/317810/campos_512_v4
+61/318086/campos_512_v4
+61/318101/campos_512_v4
+61/318121/campos_512_v4
+61/318410/campos_512_v4
+61/318848/campos_512_v4
+61/319028/campos_512_v4
+61/319111/campos_512_v4
+61/319228/campos_512_v4
+61/319265/campos_512_v4
+61/319270/campos_512_v4
+61/319283/campos_512_v4
+61/319379/campos_512_v4
+61/319581/campos_512_v4
+61/319680/campos_512_v4
+61/319687/campos_512_v4
+61/319719/campos_512_v4
+61/319803/campos_512_v4
+62/320137/campos_512_v4
+62/320354/campos_512_v4
+62/320424/campos_512_v4
+62/320489/campos_512_v4
+62/320602/campos_512_v4
+62/320631/campos_512_v4
+62/320640/campos_512_v4
+62/320682/campos_512_v4
+62/320725/campos_512_v4
+62/320766/campos_512_v4
+62/320857/campos_512_v4
+62/320930/campos_512_v4
+62/320964/campos_512_v4
+62/321012/campos_512_v4
+62/321482/campos_512_v4
+62/321547/campos_512_v4
+62/321819/campos_512_v4
+62/321941/campos_512_v4
+62/321947/campos_512_v4
+62/322093/campos_512_v4
+62/322336/campos_512_v4
+62/322470/campos_512_v4
+62/322563/campos_512_v4
+62/322679/campos_512_v4
+62/322687/campos_512_v4
+62/322913/campos_512_v4
+62/322998/campos_512_v4
+62/323037/campos_512_v4
+62/323071/campos_512_v4
+62/323267/campos_512_v4
+62/323289/campos_512_v4
+62/323304/campos_512_v4
+62/323334/campos_512_v4
+62/323360/campos_512_v4
+62/323635/campos_512_v4
+62/323731/campos_512_v4
+62/323857/campos_512_v4
+62/323874/campos_512_v4
+62/323914/campos_512_v4
+62/323924/campos_512_v4
+62/323993/campos_512_v4
+62/324287/campos_512_v4
+62/324365/campos_512_v4
+62/324375/campos_512_v4
+62/324645/campos_512_v4
+62/324786/campos_512_v4
+62/324958/campos_512_v4
+62/324963/campos_512_v4
+63/325082/campos_512_v4
+63/325101/campos_512_v4
+63/325233/campos_512_v4
+63/325281/campos_512_v4
+63/325325/campos_512_v4
+63/325496/campos_512_v4
+63/325527/campos_512_v4
+63/325617/campos_512_v4
+63/325690/campos_512_v4
+63/325758/campos_512_v4
+63/325810/campos_512_v4
+63/325889/campos_512_v4
+63/326033/campos_512_v4
+63/326279/campos_512_v4
+63/326606/campos_512_v4
+63/326642/campos_512_v4
+63/326672/campos_512_v4
+63/326718/campos_512_v4
+63/326830/campos_512_v4
+63/327023/campos_512_v4
+63/327029/campos_512_v4
+63/327055/campos_512_v4
+63/327150/campos_512_v4
+63/327181/campos_512_v4
+63/327432/campos_512_v4
+63/327539/campos_512_v4
+63/327630/campos_512_v4
+63/327810/campos_512_v4
+63/328021/campos_512_v4
+63/328141/campos_512_v4
+63/328185/campos_512_v4
+63/328229/campos_512_v4
+63/328293/campos_512_v4
+63/328332/campos_512_v4
+63/328424/campos_512_v4
+63/328450/campos_512_v4
+63/328457/campos_512_v4
+63/328473/campos_512_v4
+63/328520/campos_512_v4
+63/328927/campos_512_v4
+63/329025/campos_512_v4
+63/329223/campos_512_v4
+63/329247/campos_512_v4
+63/329357/campos_512_v4
+63/329359/campos_512_v4
+63/329379/campos_512_v4
+63/329528/campos_512_v4
+63/329565/campos_512_v4
+63/329611/campos_512_v4
+63/329658/campos_512_v4
+63/329879/campos_512_v4
+63/329997/campos_512_v4
+64/330074/campos_512_v4
+64/330236/campos_512_v4
+64/330492/campos_512_v4
+64/330661/campos_512_v4
+64/330802/campos_512_v4
+64/330928/campos_512_v4
+64/331110/campos_512_v4
+64/331278/campos_512_v4
+64/331298/campos_512_v4
+64/331316/campos_512_v4
+64/331501/campos_512_v4
+64/331870/campos_512_v4
+64/331916/campos_512_v4
+64/331923/campos_512_v4
+64/331979/campos_512_v4
+64/332054/campos_512_v4
+64/332727/campos_512_v4
+64/332833/campos_512_v4
+64/333128/campos_512_v4
+64/333269/campos_512_v4
+64/333358/campos_512_v4
+64/333902/campos_512_v4
+64/334058/campos_512_v4
+64/334087/campos_512_v4
+64/334240/campos_512_v4
+64/334299/campos_512_v4
+64/334388/campos_512_v4
+64/334512/campos_512_v4
+64/334556/campos_512_v4
+64/334659/campos_512_v4
+64/334682/campos_512_v4
+64/334720/campos_512_v4
+64/334795/campos_512_v4
+65/335170/campos_512_v4
+65/335545/campos_512_v4
+65/335773/campos_512_v4
+65/335929/campos_512_v4
+65/336273/campos_512_v4
+65/336302/campos_512_v4
+65/336478/campos_512_v4
+65/336565/campos_512_v4
+65/336603/campos_512_v4
+65/336722/campos_512_v4
+65/336776/campos_512_v4
+65/336784/campos_512_v4
+65/336916/campos_512_v4
+65/336920/campos_512_v4
+65/337016/campos_512_v4
+65/337032/campos_512_v4
+65/337325/campos_512_v4
+65/337392/campos_512_v4
+65/337448/campos_512_v4
+65/337467/campos_512_v4
+65/337515/campos_512_v4
+65/337567/campos_512_v4
+65/337903/campos_512_v4
+65/337968/campos_512_v4
+65/337989/campos_512_v4
+65/338087/campos_512_v4
+65/338090/campos_512_v4
+65/338238/campos_512_v4
+65/338338/campos_512_v4
+65/338420/campos_512_v4
+65/338441/campos_512_v4
+65/338490/campos_512_v4
+65/338503/campos_512_v4
+65/338562/campos_512_v4
+65/338643/campos_512_v4
+65/338687/campos_512_v4
+65/338709/campos_512_v4
+65/339049/campos_512_v4
+65/339066/campos_512_v4
+65/339077/campos_512_v4
+65/339243/campos_512_v4
+65/339288/campos_512_v4
+65/339294/campos_512_v4
+65/339340/campos_512_v4
+65/339390/campos_512_v4
+65/339499/campos_512_v4
+65/339696/campos_512_v4
+65/339768/campos_512_v4
+65/339845/campos_512_v4
+65/339898/campos_512_v4
+65/339933/campos_512_v4
+66/340122/campos_512_v4
+66/340266/campos_512_v4
+66/340279/campos_512_v4
+66/340396/campos_512_v4
+66/340534/campos_512_v4
+66/340704/campos_512_v4
+66/340870/campos_512_v4
+66/340961/campos_512_v4
+66/340972/campos_512_v4
+66/341156/campos_512_v4
+66/341229/campos_512_v4
+66/341390/campos_512_v4
+66/341638/campos_512_v4
+66/341675/campos_512_v4
+66/341692/campos_512_v4
+66/341795/campos_512_v4
+66/341911/campos_512_v4
+66/341961/campos_512_v4
+66/341968/campos_512_v4
+66/341990/campos_512_v4
+66/342052/campos_512_v4
+66/342069/campos_512_v4
+66/342136/campos_512_v4
+66/342256/campos_512_v4
+66/342356/campos_512_v4
+66/342675/campos_512_v4
+66/342724/campos_512_v4
+66/342752/campos_512_v4
+66/343018/campos_512_v4
+66/343187/campos_512_v4
+66/343265/campos_512_v4
+66/343451/campos_512_v4
+66/343660/campos_512_v4
+66/343699/campos_512_v4
+66/343711/campos_512_v4
+66/343953/campos_512_v4
+66/344155/campos_512_v4
+66/344325/campos_512_v4
+66/344414/campos_512_v4
+66/344708/campos_512_v4
+66/344975/campos_512_v4
+67/345044/campos_512_v4
+67/345055/campos_512_v4
+67/345067/campos_512_v4
+67/345281/campos_512_v4
+67/345389/campos_512_v4
+67/345395/campos_512_v4
+67/345440/campos_512_v4
+67/345466/campos_512_v4
+67/345826/campos_512_v4
+67/345830/campos_512_v4
+67/345910/campos_512_v4
+67/345966/campos_512_v4
+67/346029/campos_512_v4
+67/346237/campos_512_v4
+67/346348/campos_512_v4
+67/346402/campos_512_v4
+67/346657/campos_512_v4
+67/346699/campos_512_v4
+67/346898/campos_512_v4
+67/347053/campos_512_v4
+67/347256/campos_512_v4
+67/347669/campos_512_v4
+67/347969/campos_512_v4
+67/348059/campos_512_v4
+67/348227/campos_512_v4
+67/348246/campos_512_v4
+67/348282/campos_512_v4
+67/348371/campos_512_v4
+67/348641/campos_512_v4
+67/348655/campos_512_v4
+67/348672/campos_512_v4
+67/348840/campos_512_v4
+67/349218/campos_512_v4
+67/349559/campos_512_v4
+67/349698/campos_512_v4
+67/349785/campos_512_v4
+67/349828/campos_512_v4
+67/349898/campos_512_v4
+67/349990/campos_512_v4
+68/350155/campos_512_v4
+68/350194/campos_512_v4
+68/350340/campos_512_v4
+68/350384/campos_512_v4
+68/350468/campos_512_v4
+68/350674/campos_512_v4
+68/350689/campos_512_v4
+68/350711/campos_512_v4
+68/350771/campos_512_v4
+68/350814/campos_512_v4
+68/350909/campos_512_v4
+68/350957/campos_512_v4
+68/351112/campos_512_v4
+68/351186/campos_512_v4
+68/351215/campos_512_v4
+68/351277/campos_512_v4
+68/351280/campos_512_v4
+68/351307/campos_512_v4
+68/351587/campos_512_v4
+68/351641/campos_512_v4
+68/351669/campos_512_v4
+68/351725/campos_512_v4
+68/351786/campos_512_v4
+68/351827/campos_512_v4
+68/352106/campos_512_v4
+68/352153/campos_512_v4
+68/352457/campos_512_v4
+68/352695/campos_512_v4
+68/352751/campos_512_v4
+68/352870/campos_512_v4
+68/352928/campos_512_v4
+68/352989/campos_512_v4
+68/353107/campos_512_v4
+68/353138/campos_512_v4
+68/353184/campos_512_v4
+68/353189/campos_512_v4
+68/353215/campos_512_v4
+68/353241/campos_512_v4
+68/353256/campos_512_v4
+68/353268/campos_512_v4
+68/353343/campos_512_v4
+68/353346/campos_512_v4
+68/353489/campos_512_v4
+68/353555/campos_512_v4
+68/353593/campos_512_v4
+68/353621/campos_512_v4
+68/353732/campos_512_v4
+68/353878/campos_512_v4
+68/353888/campos_512_v4
+68/353919/campos_512_v4
+68/354130/campos_512_v4
+68/354194/campos_512_v4
+68/354307/campos_512_v4
+68/354430/campos_512_v4
+68/354499/campos_512_v4
+68/354572/campos_512_v4
+68/354638/campos_512_v4
+69/355004/campos_512_v4
+69/355010/campos_512_v4
+69/355147/campos_512_v4
+69/355376/campos_512_v4
+69/355491/campos_512_v4
+69/355548/campos_512_v4
+69/355684/campos_512_v4
+69/355822/campos_512_v4
+69/355848/campos_512_v4
+69/356008/campos_512_v4
+69/356066/campos_512_v4
+69/356129/campos_512_v4
+69/356216/campos_512_v4
+69/356233/campos_512_v4
+69/356288/campos_512_v4
+69/356477/campos_512_v4
+69/356487/campos_512_v4
+69/356644/campos_512_v4
+69/356657/campos_512_v4
+69/356688/campos_512_v4
+69/357250/campos_512_v4
+69/357262/campos_512_v4
+69/357311/campos_512_v4
+69/357561/campos_512_v4
+69/357636/campos_512_v4
+69/357706/campos_512_v4
+69/357789/campos_512_v4
+69/357823/campos_512_v4
+69/357848/campos_512_v4
+69/357905/campos_512_v4
+69/357993/campos_512_v4
+69/358041/campos_512_v4
+69/358145/campos_512_v4
+69/358280/campos_512_v4
+69/358389/campos_512_v4
+69/358446/campos_512_v4
+69/358653/campos_512_v4
+69/358673/campos_512_v4
+69/358850/campos_512_v4
+69/358956/campos_512_v4
+69/359119/campos_512_v4
+69/359422/campos_512_v4
+69/359449/campos_512_v4
+69/359455/campos_512_v4
+69/359456/campos_512_v4
+69/359492/campos_512_v4
+69/359493/campos_512_v4
+69/359616/campos_512_v4
+69/359660/campos_512_v4
+69/359702/campos_512_v4
+69/359720/campos_512_v4
+69/359822/campos_512_v4
+69/360001/campos_512_v4
+7/45002/campos_512_v4
+7/45188/campos_512_v4
+7/45463/campos_512_v4
+7/45808/campos_512_v4
+7/45984/campos_512_v4
+7/46124/campos_512_v4
+7/46170/campos_512_v4
+7/46243/campos_512_v4
+7/46457/campos_512_v4
+7/46511/campos_512_v4
+7/46642/campos_512_v4
+7/46693/campos_512_v4
+7/46717/campos_512_v4
+7/46816/campos_512_v4
+7/46882/campos_512_v4
+7/46906/campos_512_v4
+7/46960/campos_512_v4
+7/46973/campos_512_v4
+7/47019/campos_512_v4
+7/47026/campos_512_v4
+7/47056/campos_512_v4
+7/47128/campos_512_v4
+7/47456/campos_512_v4
+7/47574/campos_512_v4
+7/47764/campos_512_v4
+7/47819/campos_512_v4
+7/47869/campos_512_v4
+7/47900/campos_512_v4
+7/47985/campos_512_v4
+7/48167/campos_512_v4
+7/48198/campos_512_v4
+7/48225/campos_512_v4
+7/48237/campos_512_v4
+7/48267/campos_512_v4
+7/48324/campos_512_v4
+7/48746/campos_512_v4
+7/48775/campos_512_v4
+7/48804/campos_512_v4
+7/48863/campos_512_v4
+7/49067/campos_512_v4
+7/49070/campos_512_v4
+7/49105/campos_512_v4
+7/49318/campos_512_v4
+7/49502/campos_512_v4
+7/49579/campos_512_v4
+7/49731/campos_512_v4
+7/49745/campos_512_v4
+7/49793/campos_512_v4
+7/49948/campos_512_v4
+7/49963/campos_512_v4
+70/360397/campos_512_v4
+70/360547/campos_512_v4
+70/360588/campos_512_v4
+70/360695/campos_512_v4
+70/360725/campos_512_v4
+70/360852/campos_512_v4
+70/360878/campos_512_v4
+70/360937/campos_512_v4
+70/361151/campos_512_v4
+70/361184/campos_512_v4
+70/361430/campos_512_v4
+70/361498/campos_512_v4
+70/361803/campos_512_v4
+70/361958/campos_512_v4
+70/362228/campos_512_v4
+70/362279/campos_512_v4
+70/362466/campos_512_v4
+70/362850/campos_512_v4
+70/362984/campos_512_v4
+70/363137/campos_512_v4
+70/363160/campos_512_v4
+70/363212/campos_512_v4
+70/363385/campos_512_v4
+70/363504/campos_512_v4
+70/363632/campos_512_v4
+70/363668/campos_512_v4
+70/363745/campos_512_v4
+70/364079/campos_512_v4
+70/364378/campos_512_v4
+70/364476/campos_512_v4
+70/364489/campos_512_v4
+70/364515/campos_512_v4
+70/364719/campos_512_v4
+70/364798/campos_512_v4
+70/364833/campos_512_v4
+70/364936/campos_512_v4
+71/365063/campos_512_v4
+71/365065/campos_512_v4
+71/365252/campos_512_v4
+71/365457/campos_512_v4
+71/365545/campos_512_v4
+71/365610/campos_512_v4
+71/365661/campos_512_v4
+71/365817/campos_512_v4
+71/365843/campos_512_v4
+71/365962/campos_512_v4
+71/366133/campos_512_v4
+71/366141/campos_512_v4
+71/366306/campos_512_v4
+71/366315/campos_512_v4
+71/366355/campos_512_v4
+71/366571/campos_512_v4
+71/366624/campos_512_v4
+71/366734/campos_512_v4
+71/366762/campos_512_v4
+71/366804/campos_512_v4
+71/366860/campos_512_v4
+71/367062/campos_512_v4
+71/367088/campos_512_v4
+71/367486/campos_512_v4
+71/367493/campos_512_v4
+71/367711/campos_512_v4
+71/367736/campos_512_v4
+71/367880/campos_512_v4
+71/367948/campos_512_v4
+71/367996/campos_512_v4
+71/368025/campos_512_v4
+71/368346/campos_512_v4
+71/368551/campos_512_v4
+71/368853/campos_512_v4
+71/368930/campos_512_v4
+71/369184/campos_512_v4
+71/369384/campos_512_v4
+71/369398/campos_512_v4
+71/369462/campos_512_v4
+71/369611/campos_512_v4
+71/369663/campos_512_v4
+71/369683/campos_512_v4
+71/369725/campos_512_v4
+71/369774/campos_512_v4
+71/369814/campos_512_v4
+72/370297/campos_512_v4
+72/370354/campos_512_v4
+72/370583/campos_512_v4
+72/370754/campos_512_v4
+72/370794/campos_512_v4
+72/370860/campos_512_v4
+72/370955/campos_512_v4
+72/371062/campos_512_v4
+72/371209/campos_512_v4
+72/371327/campos_512_v4
+72/371429/campos_512_v4
+72/371522/campos_512_v4
+72/371600/campos_512_v4
+72/371707/campos_512_v4
+72/371751/campos_512_v4
+72/371873/campos_512_v4
+72/372160/campos_512_v4
+72/372289/campos_512_v4
+72/372292/campos_512_v4
+72/372354/campos_512_v4
+72/372375/campos_512_v4
+72/372473/campos_512_v4
+72/372505/campos_512_v4
+72/372613/campos_512_v4
+72/372840/campos_512_v4
+72/372850/campos_512_v4
+72/372926/campos_512_v4
+72/372977/campos_512_v4
+72/373336/campos_512_v4
+72/373402/campos_512_v4
+72/373443/campos_512_v4
+72/373479/campos_512_v4
+72/373591/campos_512_v4
+72/373604/campos_512_v4
+72/373841/campos_512_v4
+72/373882/campos_512_v4
+72/373990/campos_512_v4
+72/373995/campos_512_v4
+72/374329/campos_512_v4
+72/374395/campos_512_v4
+72/374396/campos_512_v4
+72/374513/campos_512_v4
+72/374539/campos_512_v4
+72/374585/campos_512_v4
+72/374594/campos_512_v4
+72/374842/campos_512_v4
+72/374882/campos_512_v4
+72/374928/campos_512_v4
+72/374976/campos_512_v4
+73/375096/campos_512_v4
+73/375639/campos_512_v4
+73/375895/campos_512_v4
+73/375912/campos_512_v4
+73/376000/campos_512_v4
+73/376013/campos_512_v4
+73/376093/campos_512_v4
+73/376284/campos_512_v4
+73/376300/campos_512_v4
+73/376432/campos_512_v4
+73/376489/campos_512_v4
+73/376516/campos_512_v4
+73/376655/campos_512_v4
+73/376659/campos_512_v4
+73/376708/campos_512_v4
+73/376797/campos_512_v4
+73/376801/campos_512_v4
+73/376903/campos_512_v4
+73/377002/campos_512_v4
+73/377067/campos_512_v4
+73/377103/campos_512_v4
+73/377221/campos_512_v4
+73/377244/campos_512_v4
+73/377263/campos_512_v4
+73/377308/campos_512_v4
+73/377552/campos_512_v4
+73/377724/campos_512_v4
+73/378062/campos_512_v4
+73/378119/campos_512_v4
+73/378316/campos_512_v4
+73/378615/campos_512_v4
+73/378713/campos_512_v4
+73/378735/campos_512_v4
+73/379583/campos_512_v4
+74/380380/campos_512_v4
+74/380579/campos_512_v4
+74/380674/campos_512_v4
+74/380720/campos_512_v4
+74/381109/campos_512_v4
+74/381116/campos_512_v4
+74/381123/campos_512_v4
+74/381212/campos_512_v4
+74/381475/campos_512_v4
+74/381530/campos_512_v4
+74/381571/campos_512_v4
+74/381616/campos_512_v4
+74/381731/campos_512_v4
+74/381778/campos_512_v4
+74/381809/campos_512_v4
+74/381874/campos_512_v4
+74/381897/campos_512_v4
+74/381959/campos_512_v4
+74/382101/campos_512_v4
+74/382467/campos_512_v4
+74/382551/campos_512_v4
+74/382560/campos_512_v4
+74/382566/campos_512_v4
+74/382577/campos_512_v4
+74/382680/campos_512_v4
+74/382684/campos_512_v4
+74/382944/campos_512_v4
+74/383184/campos_512_v4
+74/383553/campos_512_v4
+74/383698/campos_512_v4
+74/383743/campos_512_v4
+74/384065/campos_512_v4
+74/384081/campos_512_v4
+74/384251/campos_512_v4
+74/384292/campos_512_v4
+74/384345/campos_512_v4
+74/384446/campos_512_v4
+74/384494/campos_512_v4
+74/384542/campos_512_v4
+74/384624/campos_512_v4
+74/384704/campos_512_v4
+74/384720/campos_512_v4
+74/384795/campos_512_v4
+75/385007/campos_512_v4
+75/385110/campos_512_v4
+75/385181/campos_512_v4
+75/385198/campos_512_v4
+75/385273/campos_512_v4
+75/385339/campos_512_v4
+75/385345/campos_512_v4
+75/385663/campos_512_v4
+75/385696/campos_512_v4
+75/385710/campos_512_v4
+75/385794/campos_512_v4
+75/386037/campos_512_v4
+75/386156/campos_512_v4
+75/386259/campos_512_v4
+75/386262/campos_512_v4
+75/386396/campos_512_v4
+75/386457/campos_512_v4
+75/386482/campos_512_v4
+75/386517/campos_512_v4
+75/386821/campos_512_v4
+75/386945/campos_512_v4
+75/386978/campos_512_v4
+75/387318/campos_512_v4
+75/387434/campos_512_v4
+75/387435/campos_512_v4
+75/387516/campos_512_v4
+75/387661/campos_512_v4
+75/387679/campos_512_v4
+75/387781/campos_512_v4
+75/387791/campos_512_v4
+75/387906/campos_512_v4
+75/387954/campos_512_v4
+75/387971/campos_512_v4
+75/387998/campos_512_v4
+75/388009/campos_512_v4
+75/388056/campos_512_v4
+75/388110/campos_512_v4
+75/388181/campos_512_v4
+75/388278/campos_512_v4
+75/388285/campos_512_v4
+75/388469/campos_512_v4
+75/388510/campos_512_v4
+75/388524/campos_512_v4
+75/388783/campos_512_v4
+75/388814/campos_512_v4
+75/388900/campos_512_v4
+75/389003/campos_512_v4
+75/389159/campos_512_v4
+75/389225/campos_512_v4
+75/389311/campos_512_v4
+75/389355/campos_512_v4
+75/389503/campos_512_v4
+75/389564/campos_512_v4
+75/389703/campos_512_v4
+75/389707/campos_512_v4
+75/389714/campos_512_v4
+75/389787/campos_512_v4
+75/389942/campos_512_v4
+75/389947/campos_512_v4
+75/389953/campos_512_v4
+75/389995/campos_512_v4
+76/390011/campos_512_v4
+76/390423/campos_512_v4
+76/390491/campos_512_v4
+76/390559/campos_512_v4
+76/390889/campos_512_v4
+76/390895/campos_512_v4
+76/391052/campos_512_v4
+76/391272/campos_512_v4
+76/391296/campos_512_v4
+76/391391/campos_512_v4
+76/391734/campos_512_v4
+76/391748/campos_512_v4
+76/391775/campos_512_v4
+76/391801/campos_512_v4
+76/391928/campos_512_v4
+76/392091/campos_512_v4
+76/392219/campos_512_v4
+76/392379/campos_512_v4
+76/393066/campos_512_v4
+76/393100/campos_512_v4
+76/393256/campos_512_v4
+76/393259/campos_512_v4
+76/393370/campos_512_v4
+76/393667/campos_512_v4
+76/393720/campos_512_v4
+76/393741/campos_512_v4
+76/393889/campos_512_v4
+76/393932/campos_512_v4
+76/393952/campos_512_v4
+76/393963/campos_512_v4
+76/394060/campos_512_v4
+76/394075/campos_512_v4
+76/394099/campos_512_v4
+76/394469/campos_512_v4
+76/394494/campos_512_v4
+76/394502/campos_512_v4
+76/394508/campos_512_v4
+76/394566/campos_512_v4
+76/394621/campos_512_v4
+76/394823/campos_512_v4
+76/394852/campos_512_v4
+76/394880/campos_512_v4
+77/395003/campos_512_v4
+77/395113/campos_512_v4
+77/395118/campos_512_v4
+77/395207/campos_512_v4
+77/395324/campos_512_v4
+77/395338/campos_512_v4
+77/395941/campos_512_v4
+77/396072/campos_512_v4
+77/396166/campos_512_v4
+77/396216/campos_512_v4
+77/396314/campos_512_v4
+77/396472/campos_512_v4
+77/396755/campos_512_v4
+77/396856/campos_512_v4
+77/396958/campos_512_v4
+77/396990/campos_512_v4
+77/397077/campos_512_v4
+77/397354/campos_512_v4
+77/397360/campos_512_v4
+77/397580/campos_512_v4
+77/397616/campos_512_v4
+77/397626/campos_512_v4
+77/397659/campos_512_v4
+77/397717/campos_512_v4
+77/397734/campos_512_v4
+77/397757/campos_512_v4
+77/397902/campos_512_v4
+77/398033/campos_512_v4
+77/398035/campos_512_v4
+77/398103/campos_512_v4
+77/398106/campos_512_v4
+77/398150/campos_512_v4
+77/398192/campos_512_v4
+77/398250/campos_512_v4
+77/398356/campos_512_v4
+77/398462/campos_512_v4
+77/398567/campos_512_v4
+77/398586/campos_512_v4
+77/398641/campos_512_v4
+77/398925/campos_512_v4
+77/399044/campos_512_v4
+77/399172/campos_512_v4
+77/399179/campos_512_v4
+77/399200/campos_512_v4
+77/399204/campos_512_v4
+77/399265/campos_512_v4
+77/399273/campos_512_v4
+77/399417/campos_512_v4
+77/399941/campos_512_v4
+78/400021/campos_512_v4
+78/400221/campos_512_v4
+78/400239/campos_512_v4
+78/400307/campos_512_v4
+78/400518/campos_512_v4
+78/400607/campos_512_v4
+78/400653/campos_512_v4
+78/400720/campos_512_v4
+78/400853/campos_512_v4
+78/400901/campos_512_v4
+78/400999/campos_512_v4
+78/401099/campos_512_v4
+78/401112/campos_512_v4
+78/401188/campos_512_v4
+78/401229/campos_512_v4
+78/401241/campos_512_v4
+78/401386/campos_512_v4
+78/401556/campos_512_v4
+78/401582/campos_512_v4
+78/401678/campos_512_v4
+78/401681/campos_512_v4
+78/401832/campos_512_v4
+78/402205/campos_512_v4
+78/402241/campos_512_v4
+78/402255/campos_512_v4
+78/402502/campos_512_v4
+78/402536/campos_512_v4
+78/402577/campos_512_v4
+78/402630/campos_512_v4
+78/402966/campos_512_v4
+78/403178/campos_512_v4
+78/403526/campos_512_v4
+78/403528/campos_512_v4
+78/403586/campos_512_v4
+78/403637/campos_512_v4
+78/403781/campos_512_v4
+78/403850/campos_512_v4
+78/403909/campos_512_v4
+78/404200/campos_512_v4
+78/404262/campos_512_v4
+78/404288/campos_512_v4
+78/404340/campos_512_v4
+78/404402/campos_512_v4
+78/404585/campos_512_v4
+78/404657/campos_512_v4
+78/404683/campos_512_v4
+78/404894/campos_512_v4
+78/404980/campos_512_v4
+79/405005/campos_512_v4
+79/405051/campos_512_v4
+79/405060/campos_512_v4
+79/405157/campos_512_v4
+79/405239/campos_512_v4
+79/405453/campos_512_v4
+79/405550/campos_512_v4
+79/405635/campos_512_v4
+79/405658/campos_512_v4
+79/405673/campos_512_v4
+79/405846/campos_512_v4
+79/406071/campos_512_v4
+79/406326/campos_512_v4
+79/406350/campos_512_v4
+79/406423/campos_512_v4
+79/406428/campos_512_v4
+79/406433/campos_512_v4
+79/406595/campos_512_v4
+79/406861/campos_512_v4
+79/407055/campos_512_v4
+79/407096/campos_512_v4
+79/407334/campos_512_v4
+79/407384/campos_512_v4
+79/407659/campos_512_v4
+79/407686/campos_512_v4
+79/407830/campos_512_v4
+79/408059/campos_512_v4
+79/408127/campos_512_v4
+79/408316/campos_512_v4
+79/408325/campos_512_v4
+79/408418/campos_512_v4
+79/408430/campos_512_v4
+79/408433/campos_512_v4
+79/408449/campos_512_v4
+79/408459/campos_512_v4
+79/408560/campos_512_v4
+79/408582/campos_512_v4
+79/408645/campos_512_v4
+79/408707/campos_512_v4
+79/408828/campos_512_v4
+79/409151/campos_512_v4
+79/409415/campos_512_v4
+79/409481/campos_512_v4
+79/409538/campos_512_v4
+79/409555/campos_512_v4
+79/409725/campos_512_v4
+79/409833/campos_512_v4
+79/409869/campos_512_v4
+8/50316/campos_512_v4
+8/50409/campos_512_v4
+8/50442/campos_512_v4
+8/50480/campos_512_v4
+8/50483/campos_512_v4
+8/50496/campos_512_v4
+8/50610/campos_512_v4
+8/50611/campos_512_v4
+8/51006/campos_512_v4
+8/51166/campos_512_v4
+8/51173/campos_512_v4
+8/51228/campos_512_v4
+8/51293/campos_512_v4
+8/51405/campos_512_v4
+8/51420/campos_512_v4
+8/51424/campos_512_v4
+8/51572/campos_512_v4
+8/51778/campos_512_v4
+8/51779/campos_512_v4
+8/51831/campos_512_v4
+8/51857/campos_512_v4
+8/52038/campos_512_v4
+8/52042/campos_512_v4
+8/52096/campos_512_v4
+8/52128/campos_512_v4
+8/52161/campos_512_v4
+8/52171/campos_512_v4
+8/52282/campos_512_v4
+8/52343/campos_512_v4
+8/52401/campos_512_v4
+8/52538/campos_512_v4
+8/52565/campos_512_v4
+8/52775/campos_512_v4
+8/52917/campos_512_v4
+8/52949/campos_512_v4
+8/53026/campos_512_v4
+8/53077/campos_512_v4
+8/53172/campos_512_v4
+8/53243/campos_512_v4
+8/53349/campos_512_v4
+8/53425/campos_512_v4
+8/53516/campos_512_v4
+8/53587/campos_512_v4
+8/53681/campos_512_v4
+8/53821/campos_512_v4
+8/53856/campos_512_v4
+8/54006/campos_512_v4
+8/54230/campos_512_v4
+8/54377/campos_512_v4
+8/54424/campos_512_v4
+8/54560/campos_512_v4
+8/54677/campos_512_v4
+8/54682/campos_512_v4
+8/54779/campos_512_v4
+80/410023/campos_512_v4
+80/410261/campos_512_v4
+80/410365/campos_512_v4
+80/410392/campos_512_v4
+80/410662/campos_512_v4
+80/410761/campos_512_v4
+80/410899/campos_512_v4
+80/411038/campos_512_v4
+80/411139/campos_512_v4
+80/411198/campos_512_v4
+80/411290/campos_512_v4
+80/411313/campos_512_v4
+80/411399/campos_512_v4
+80/411407/campos_512_v4
+80/411465/campos_512_v4
+80/411538/campos_512_v4
+80/411540/campos_512_v4
+80/411592/campos_512_v4
+80/411609/campos_512_v4
+80/411631/campos_512_v4
+80/411741/campos_512_v4
+80/411753/campos_512_v4
+80/411763/campos_512_v4
+80/411849/campos_512_v4
+80/412030/campos_512_v4
+80/412227/campos_512_v4
+80/412302/campos_512_v4
+80/412343/campos_512_v4
+80/412433/campos_512_v4
+80/412615/campos_512_v4
+80/412634/campos_512_v4
+80/412876/campos_512_v4
+80/412895/campos_512_v4
+80/412898/campos_512_v4
+80/412925/campos_512_v4
+80/412926/campos_512_v4
+80/412974/campos_512_v4
+80/413107/campos_512_v4
+80/413205/campos_512_v4
+80/413217/campos_512_v4
+80/413247/campos_512_v4
+80/413806/campos_512_v4
+80/413880/campos_512_v4
+80/414053/campos_512_v4
+80/414102/campos_512_v4
+80/414116/campos_512_v4
+80/414457/campos_512_v4
+80/414536/campos_512_v4
+80/414566/campos_512_v4
+80/414705/campos_512_v4
+81/415233/campos_512_v4
+81/415416/campos_512_v4
+81/415451/campos_512_v4
+81/415530/campos_512_v4
+81/415595/campos_512_v4
+81/415606/campos_512_v4
+81/415734/campos_512_v4
+81/415867/campos_512_v4
+81/415884/campos_512_v4
+81/416334/campos_512_v4
+81/416549/campos_512_v4
+81/416591/campos_512_v4
+81/416794/campos_512_v4
+81/417253/campos_512_v4
+81/417326/campos_512_v4
+81/417356/campos_512_v4
+81/417522/campos_512_v4
+81/417532/campos_512_v4
+81/417592/campos_512_v4
+81/417707/campos_512_v4
+81/417867/campos_512_v4
+81/417904/campos_512_v4
+81/417912/campos_512_v4
+81/418103/campos_512_v4
+81/418200/campos_512_v4
+81/418406/campos_512_v4
+81/418486/campos_512_v4
+81/418679/campos_512_v4
+81/419162/campos_512_v4
+81/419224/campos_512_v4
+81/419243/campos_512_v4
+81/419539/campos_512_v4
+81/419870/campos_512_v4
+81/419918/campos_512_v4
+82/420191/campos_512_v4
+82/420216/campos_512_v4
+82/420313/campos_512_v4
+82/420347/campos_512_v4
+82/420523/campos_512_v4
+82/420650/campos_512_v4
+82/420837/campos_512_v4
+82/420838/campos_512_v4
+82/420862/campos_512_v4
+82/421008/campos_512_v4
+82/421014/campos_512_v4
+82/421389/campos_512_v4
+82/421582/campos_512_v4
+82/421748/campos_512_v4
+82/421828/campos_512_v4
+82/421988/campos_512_v4
+82/422053/campos_512_v4
+82/422255/campos_512_v4
+82/422273/campos_512_v4
+82/422322/campos_512_v4
+82/422331/campos_512_v4
+82/422433/campos_512_v4
+82/422540/campos_512_v4
+82/422568/campos_512_v4
+82/422613/campos_512_v4
+82/422644/campos_512_v4
+82/422899/campos_512_v4
+82/423285/campos_512_v4
+82/423307/campos_512_v4
+82/423424/campos_512_v4
+82/423425/campos_512_v4
+82/423453/campos_512_v4
+82/423536/campos_512_v4
+82/423720/campos_512_v4
+82/423735/campos_512_v4
+82/423763/campos_512_v4
+82/423939/campos_512_v4
+82/424136/campos_512_v4
+82/424149/campos_512_v4
+82/424213/campos_512_v4
+82/424224/campos_512_v4
+82/424256/campos_512_v4
+82/424387/campos_512_v4
+82/424430/campos_512_v4
+82/424478/campos_512_v4
+82/424525/campos_512_v4
+82/424545/campos_512_v4
+82/424610/campos_512_v4
+82/424709/campos_512_v4
+83/425038/campos_512_v4
+83/425120/campos_512_v4
+83/425191/campos_512_v4
+83/425299/campos_512_v4
+83/425622/campos_512_v4
+83/425786/campos_512_v4
+83/425810/campos_512_v4
+83/425822/campos_512_v4
+83/425942/campos_512_v4
+83/426014/campos_512_v4
+83/426064/campos_512_v4
+83/426307/campos_512_v4
+83/426365/campos_512_v4
+83/426429/campos_512_v4
+83/426487/campos_512_v4
+83/426498/campos_512_v4
+83/426732/campos_512_v4
+83/427091/campos_512_v4
+83/427191/campos_512_v4
+83/427267/campos_512_v4
+83/427607/campos_512_v4
+83/427635/campos_512_v4
+83/427654/campos_512_v4
+83/427710/campos_512_v4
+83/427809/campos_512_v4
+83/427888/campos_512_v4
+83/428042/campos_512_v4
+83/428083/campos_512_v4
+83/428130/campos_512_v4
+83/428184/campos_512_v4
+83/428221/campos_512_v4
+83/428226/campos_512_v4
+83/428626/campos_512_v4
+83/428724/campos_512_v4
+83/428855/campos_512_v4
+83/429189/campos_512_v4
+83/429199/campos_512_v4
+83/429228/campos_512_v4
+83/429304/campos_512_v4
+83/429637/campos_512_v4
+84/430003/campos_512_v4
+84/430233/campos_512_v4
+84/430274/campos_512_v4
+84/430339/campos_512_v4
+84/430366/campos_512_v4
+84/430453/campos_512_v4
+84/430463/campos_512_v4
+84/430623/campos_512_v4
+84/430734/campos_512_v4
+84/430822/campos_512_v4
+84/430918/campos_512_v4
+84/430958/campos_512_v4
+84/430998/campos_512_v4
+84/431246/campos_512_v4
+84/431388/campos_512_v4
+84/431424/campos_512_v4
+84/431451/campos_512_v4
+84/431638/campos_512_v4
+84/431701/campos_512_v4
+84/431733/campos_512_v4
+84/431958/campos_512_v4
+84/431960/campos_512_v4
+84/432040/campos_512_v4
+84/432043/campos_512_v4
+84/432364/campos_512_v4
+84/432392/campos_512_v4
+84/432434/campos_512_v4
+84/432675/campos_512_v4
+84/432713/campos_512_v4
+84/432731/campos_512_v4
+84/432945/campos_512_v4
+84/433054/campos_512_v4
+84/433057/campos_512_v4
+84/433161/campos_512_v4
+84/433209/campos_512_v4
+84/433311/campos_512_v4
+84/433367/campos_512_v4
+84/433372/campos_512_v4
+84/433406/campos_512_v4
+84/433468/campos_512_v4
+84/433776/campos_512_v4
+84/433810/campos_512_v4
+84/433817/campos_512_v4
+84/433916/campos_512_v4
+84/434060/campos_512_v4
+84/434084/campos_512_v4
+84/434201/campos_512_v4
+84/434554/campos_512_v4
+84/434678/campos_512_v4
+84/434806/campos_512_v4
+85/435026/campos_512_v4
+85/435144/campos_512_v4
+85/435186/campos_512_v4
+85/435316/campos_512_v4
+85/435354/campos_512_v4
+85/435384/campos_512_v4
+85/435473/campos_512_v4
+85/435522/campos_512_v4
+85/435535/campos_512_v4
+85/435560/campos_512_v4
+85/435615/campos_512_v4
+85/435742/campos_512_v4
+85/435855/campos_512_v4
+85/435952/campos_512_v4
+85/436149/campos_512_v4
+85/436152/campos_512_v4
+85/436191/campos_512_v4
+85/436375/campos_512_v4
+85/436422/campos_512_v4
+85/436527/campos_512_v4
+85/436562/campos_512_v4
+85/436806/campos_512_v4
+85/436848/campos_512_v4
+85/437124/campos_512_v4
+85/437274/campos_512_v4
+85/437507/campos_512_v4
+85/437611/campos_512_v4
+85/437693/campos_512_v4
+85/437753/campos_512_v4
+85/438072/campos_512_v4
+85/438096/campos_512_v4
+85/438130/campos_512_v4
+85/438172/campos_512_v4
+85/438221/campos_512_v4
+85/438229/campos_512_v4
+85/438231/campos_512_v4
+85/438270/campos_512_v4
+85/438352/campos_512_v4
+85/438398/campos_512_v4
+85/438460/campos_512_v4
+85/438710/campos_512_v4
+85/438957/campos_512_v4
+85/439477/campos_512_v4
+85/439631/campos_512_v4
+85/439633/campos_512_v4
+85/439677/campos_512_v4
+85/439685/campos_512_v4
+85/439707/campos_512_v4
+86/440115/campos_512_v4
+86/440127/campos_512_v4
+86/440156/campos_512_v4
+86/440248/campos_512_v4
+86/440310/campos_512_v4
+86/440350/campos_512_v4
+86/440452/campos_512_v4
+86/440456/campos_512_v4
+86/440461/campos_512_v4
+86/440562/campos_512_v4
+86/440694/campos_512_v4
+86/440707/campos_512_v4
+86/440719/campos_512_v4
+86/440893/campos_512_v4
+86/441234/campos_512_v4
+86/441268/campos_512_v4
+86/441283/campos_512_v4
+86/441373/campos_512_v4
+86/441534/campos_512_v4
+86/441537/campos_512_v4
+86/441556/campos_512_v4
+86/441703/campos_512_v4
+86/441754/campos_512_v4
+86/441822/campos_512_v4
+86/441941/campos_512_v4
+86/442016/campos_512_v4
+86/442259/campos_512_v4
+86/442334/campos_512_v4
+86/442335/campos_512_v4
+86/442389/campos_512_v4
+86/442521/campos_512_v4
+86/442680/campos_512_v4
+86/442747/campos_512_v4
+86/442766/campos_512_v4
+86/442974/campos_512_v4
+86/443024/campos_512_v4
+86/443193/campos_512_v4
+86/443311/campos_512_v4
+86/443376/campos_512_v4
+86/443490/campos_512_v4
+86/443495/campos_512_v4
+86/443507/campos_512_v4
+86/443553/campos_512_v4
+86/443642/campos_512_v4
+86/443649/campos_512_v4
+86/443734/campos_512_v4
+86/443868/campos_512_v4
+86/443955/campos_512_v4
+86/443990/campos_512_v4
+86/444023/campos_512_v4
+86/444125/campos_512_v4
+86/444165/campos_512_v4
+86/444273/campos_512_v4
+86/444501/campos_512_v4
+86/444648/campos_512_v4
+86/444731/campos_512_v4
+86/444835/campos_512_v4
+86/444856/campos_512_v4
+86/444878/campos_512_v4
+87/445018/campos_512_v4
+87/445065/campos_512_v4
+87/445078/campos_512_v4
+87/445217/campos_512_v4
+87/445459/campos_512_v4
+87/445704/campos_512_v4
+87/445836/campos_512_v4
+87/445914/campos_512_v4
+87/445993/campos_512_v4
+87/446005/campos_512_v4
+87/446096/campos_512_v4
+87/446108/campos_512_v4
+87/446291/campos_512_v4
+87/446358/campos_512_v4
+87/446676/campos_512_v4
+87/446705/campos_512_v4
+87/446731/campos_512_v4
+87/446924/campos_512_v4
+87/447064/campos_512_v4
+87/447159/campos_512_v4
+87/447199/campos_512_v4
+87/447559/campos_512_v4
+87/448043/campos_512_v4
+87/448168/campos_512_v4
+87/448204/campos_512_v4
+87/448347/campos_512_v4
+87/448361/campos_512_v4
+87/448424/campos_512_v4
+87/448537/campos_512_v4
+87/448540/campos_512_v4
+87/448663/campos_512_v4
+87/449021/campos_512_v4
+87/449159/campos_512_v4
+87/449194/campos_512_v4
+87/449251/campos_512_v4
+87/449411/campos_512_v4
+87/449494/campos_512_v4
+87/449581/campos_512_v4
+87/449595/campos_512_v4
+87/449695/campos_512_v4
+87/449767/campos_512_v4
+88/450067/campos_512_v4
+88/450354/campos_512_v4
+88/450355/campos_512_v4
+88/450484/campos_512_v4
+88/451130/campos_512_v4
+88/451142/campos_512_v4
+88/451244/campos_512_v4
+88/451353/campos_512_v4
+88/451369/campos_512_v4
+88/451475/campos_512_v4
+88/451487/campos_512_v4
+88/451710/campos_512_v4
+88/451787/campos_512_v4
+88/451831/campos_512_v4
+88/451980/campos_512_v4
+88/452039/campos_512_v4
+88/452160/campos_512_v4
+88/452220/campos_512_v4
+88/452463/campos_512_v4
+88/452476/campos_512_v4
+88/452493/campos_512_v4
+88/452507/campos_512_v4
+88/452520/campos_512_v4
+88/452571/campos_512_v4
+88/452593/campos_512_v4
+88/452701/campos_512_v4
+88/452763/campos_512_v4
+88/453186/campos_512_v4
+88/453292/campos_512_v4
+88/453489/campos_512_v4
+88/453733/campos_512_v4
+88/453857/campos_512_v4
+88/453893/campos_512_v4
+88/453920/campos_512_v4
+88/454130/campos_512_v4
+88/454162/campos_512_v4
+88/454231/campos_512_v4
+88/454310/campos_512_v4
+88/454523/campos_512_v4
+88/454662/campos_512_v4
+88/454731/campos_512_v4
+89/455212/campos_512_v4
+89/455311/campos_512_v4
+89/455392/campos_512_v4
+89/455542/campos_512_v4
+89/455706/campos_512_v4
+89/456072/campos_512_v4
+89/456089/campos_512_v4
+89/456111/campos_512_v4
+89/456147/campos_512_v4
+89/456446/campos_512_v4
+89/456575/campos_512_v4
+89/456669/campos_512_v4
+89/456752/campos_512_v4
+89/456800/campos_512_v4
+89/456839/campos_512_v4
+89/456854/campos_512_v4
+89/456878/campos_512_v4
+89/457248/campos_512_v4
+89/457327/campos_512_v4
+89/457435/campos_512_v4
+89/457483/campos_512_v4
+89/457544/campos_512_v4
+89/457653/campos_512_v4
+89/457696/campos_512_v4
+89/457736/campos_512_v4
+89/457931/campos_512_v4
+89/458202/campos_512_v4
+89/458228/campos_512_v4
+89/458299/campos_512_v4
+89/458324/campos_512_v4
+89/458569/campos_512_v4
+89/458707/campos_512_v4
+89/458867/campos_512_v4
+89/458993/campos_512_v4
+89/459038/campos_512_v4
+89/459093/campos_512_v4
+89/459289/campos_512_v4
+89/459693/campos_512_v4
+89/459799/campos_512_v4
+89/459845/campos_512_v4
+89/459906/campos_512_v4
+89/459920/campos_512_v4
+89/459938/campos_512_v4
+89/459955/campos_512_v4
+89/459969/campos_512_v4
+89/459985/campos_512_v4
+9/55004/campos_512_v4
+9/55103/campos_512_v4
+9/55186/campos_512_v4
+9/55232/campos_512_v4
+9/55413/campos_512_v4
+9/55499/campos_512_v4
+9/55547/campos_512_v4
+9/55621/campos_512_v4
+9/55665/campos_512_v4
+9/55675/campos_512_v4
+9/55731/campos_512_v4
+9/55775/campos_512_v4
+9/55843/campos_512_v4
+9/56047/campos_512_v4
+9/56153/campos_512_v4
+9/56234/campos_512_v4
+9/56301/campos_512_v4
+9/56521/campos_512_v4
+9/56547/campos_512_v4
+9/56613/campos_512_v4
+9/56865/campos_512_v4
+9/56954/campos_512_v4
+9/57103/campos_512_v4
+9/57338/campos_512_v4
+9/57355/campos_512_v4
+9/57364/campos_512_v4
+9/57430/campos_512_v4
+9/57498/campos_512_v4
+9/57649/campos_512_v4
+9/57687/campos_512_v4
+9/57843/campos_512_v4
+9/58025/campos_512_v4
+9/58061/campos_512_v4
+9/58070/campos_512_v4
+9/58099/campos_512_v4
+9/58126/campos_512_v4
+9/58162/campos_512_v4
+9/58237/campos_512_v4
+9/58404/campos_512_v4
+9/58436/campos_512_v4
+9/58550/campos_512_v4
+9/58553/campos_512_v4
+9/58660/campos_512_v4
+9/58683/campos_512_v4
+9/58727/campos_512_v4
+9/58774/campos_512_v4
+9/58921/campos_512_v4
+9/58983/campos_512_v4
+9/59044/campos_512_v4
+9/59186/campos_512_v4
+9/59379/campos_512_v4
+90/460021/campos_512_v4
+90/460149/campos_512_v4
+90/460423/campos_512_v4
+90/460424/campos_512_v4
+90/460539/campos_512_v4
+90/460568/campos_512_v4
+90/460590/campos_512_v4
+90/460592/campos_512_v4
+90/460594/campos_512_v4
+90/460748/campos_512_v4
+90/460865/campos_512_v4
+90/460916/campos_512_v4
+90/460931/campos_512_v4
+90/461127/campos_512_v4
+90/461326/campos_512_v4
+90/461384/campos_512_v4
+90/461433/campos_512_v4
+90/461860/campos_512_v4
+90/461885/campos_512_v4
+90/461910/campos_512_v4
+90/461933/campos_512_v4
+90/462139/campos_512_v4
+90/462146/campos_512_v4
+90/462297/campos_512_v4
+90/462389/campos_512_v4
+90/462455/campos_512_v4
+90/462528/campos_512_v4
+90/462546/campos_512_v4
+90/462591/campos_512_v4
+90/462611/campos_512_v4
+90/462678/campos_512_v4
+90/462835/campos_512_v4
+90/462867/campos_512_v4
+90/463153/campos_512_v4
+90/463229/campos_512_v4
+90/463399/campos_512_v4
+90/463537/campos_512_v4
+90/463539/campos_512_v4
+90/463633/campos_512_v4
+90/463636/campos_512_v4
+90/463653/campos_512_v4
+90/463680/campos_512_v4
+90/463813/campos_512_v4
+90/463944/campos_512_v4
+90/463991/campos_512_v4
+90/464001/campos_512_v4
+90/464049/campos_512_v4
+90/464080/campos_512_v4
+90/464180/campos_512_v4
+90/464209/campos_512_v4
+90/464234/campos_512_v4
+90/464254/campos_512_v4
+90/464347/campos_512_v4
+90/464372/campos_512_v4
+90/464449/campos_512_v4
+90/464507/campos_512_v4
+90/464561/campos_512_v4
+90/464697/campos_512_v4
+90/464740/campos_512_v4
+90/464772/campos_512_v4
+90/464950/campos_512_v4
+91/465158/campos_512_v4
+91/465294/campos_512_v4
+91/465407/campos_512_v4
+91/465463/campos_512_v4
+91/465485/campos_512_v4
+91/465509/campos_512_v4
+91/465592/campos_512_v4
+91/465627/campos_512_v4
+91/465799/campos_512_v4
+91/465807/campos_512_v4
+91/465831/campos_512_v4
+91/465870/campos_512_v4
+91/465954/campos_512_v4
+91/465959/campos_512_v4
+91/465968/campos_512_v4
+91/466038/campos_512_v4
+91/466140/campos_512_v4
+91/466559/campos_512_v4
+91/466773/campos_512_v4
+91/467062/campos_512_v4
+91/467200/campos_512_v4
+91/467382/campos_512_v4
+91/467566/campos_512_v4
+91/467598/campos_512_v4
+91/467625/campos_512_v4
+91/467865/campos_512_v4
+91/467888/campos_512_v4
+91/467980/campos_512_v4
+91/468002/campos_512_v4
+91/468046/campos_512_v4
+91/468158/campos_512_v4
+91/468205/campos_512_v4
+91/468241/campos_512_v4
+91/468257/campos_512_v4
+91/468294/campos_512_v4
+91/468357/campos_512_v4
+91/468361/campos_512_v4
+91/468660/campos_512_v4
+91/468662/campos_512_v4
+91/468880/campos_512_v4
+91/468914/campos_512_v4
+91/469284/campos_512_v4
+91/469449/campos_512_v4
+91/469529/campos_512_v4
+91/469586/campos_512_v4
+91/469784/campos_512_v4
+91/469935/campos_512_v4
+92/470010/campos_512_v4
+92/470160/campos_512_v4
+92/470218/campos_512_v4
+92/470269/campos_512_v4
+92/470291/campos_512_v4
+92/470317/campos_512_v4
+92/470423/campos_512_v4
+92/470545/campos_512_v4
+92/470699/campos_512_v4
+92/471006/campos_512_v4
+92/471234/campos_512_v4
+92/471283/campos_512_v4
+92/471317/campos_512_v4
+92/471350/campos_512_v4
+92/471668/campos_512_v4
+92/471714/campos_512_v4
+92/471750/campos_512_v4
+92/471947/campos_512_v4
+92/472032/campos_512_v4
+92/472106/campos_512_v4
+92/472198/campos_512_v4
+92/472366/campos_512_v4
+92/472382/campos_512_v4
+92/472452/campos_512_v4
+92/472501/campos_512_v4
+92/472548/campos_512_v4
+92/472554/campos_512_v4
+92/472572/campos_512_v4
+92/472751/campos_512_v4
+92/472798/campos_512_v4
+92/472901/campos_512_v4
+92/472932/campos_512_v4
+92/472949/campos_512_v4
+92/473095/campos_512_v4
+92/473126/campos_512_v4
+92/473163/campos_512_v4
+92/473350/campos_512_v4
+92/473597/campos_512_v4
+92/473748/campos_512_v4
+92/473897/campos_512_v4
+92/473903/campos_512_v4
+92/473993/campos_512_v4
+92/474156/campos_512_v4
+92/474212/campos_512_v4
+92/474475/campos_512_v4
+92/474739/campos_512_v4
+92/474784/campos_512_v4
+92/474881/campos_512_v4
+92/474894/campos_512_v4
+93/475034/campos_512_v4
+93/475067/campos_512_v4
+93/475119/campos_512_v4
+93/475238/campos_512_v4
+93/475535/campos_512_v4
+93/475699/campos_512_v4
+93/475742/campos_512_v4
+93/475779/campos_512_v4
+93/476049/campos_512_v4
+93/476130/campos_512_v4
+93/476150/campos_512_v4
+93/476278/campos_512_v4
+93/476295/campos_512_v4
+93/476494/campos_512_v4
+93/476589/campos_512_v4
+93/476592/campos_512_v4
+93/476689/campos_512_v4
+93/476861/campos_512_v4
+93/477136/campos_512_v4
+93/477195/campos_512_v4
+93/477496/campos_512_v4
+93/477509/campos_512_v4
+93/477639/campos_512_v4
+93/478091/campos_512_v4
+93/478112/campos_512_v4
+93/478324/campos_512_v4
+93/478593/campos_512_v4
+93/478665/campos_512_v4
+93/478924/campos_512_v4
+93/479365/campos_512_v4
+93/479742/campos_512_v4
+93/479764/campos_512_v4
+93/479797/campos_512_v4
+93/479977/campos_512_v4
+93/479997/campos_512_v4
+94/480224/campos_512_v4
+94/480392/campos_512_v4
+94/480512/campos_512_v4
+94/480825/campos_512_v4
+94/480983/campos_512_v4
+94/481068/campos_512_v4
+94/481073/campos_512_v4
+94/481095/campos_512_v4
+94/481213/campos_512_v4
+94/481343/campos_512_v4
+94/481392/campos_512_v4
+94/481443/campos_512_v4
+94/481569/campos_512_v4
+94/481731/campos_512_v4
+94/481783/campos_512_v4
+94/482047/campos_512_v4
+94/482298/campos_512_v4
+94/482340/campos_512_v4
+94/482375/campos_512_v4
+94/482393/campos_512_v4
+94/482482/campos_512_v4
+94/482681/campos_512_v4
+94/482888/campos_512_v4
+94/482898/campos_512_v4
+94/483053/campos_512_v4
+94/483140/campos_512_v4
+94/483184/campos_512_v4
+94/483198/campos_512_v4
+94/483236/campos_512_v4
+94/483307/campos_512_v4
+94/483368/campos_512_v4
+94/483434/campos_512_v4
+94/483460/campos_512_v4
+94/483521/campos_512_v4
+94/483599/campos_512_v4
+94/483620/campos_512_v4
+94/483938/campos_512_v4
+94/483958/campos_512_v4
+94/483990/campos_512_v4
+94/484077/campos_512_v4
+94/484176/campos_512_v4
+94/484440/campos_512_v4
+94/484453/campos_512_v4
+94/484626/campos_512_v4
+94/484642/campos_512_v4
+94/484706/campos_512_v4
+94/484765/campos_512_v4
+94/484852/campos_512_v4
+94/484918/campos_512_v4
+94/484956/campos_512_v4
+95/485040/campos_512_v4
+95/485079/campos_512_v4
+95/485203/campos_512_v4
+95/485264/campos_512_v4
+95/485345/campos_512_v4
+95/485441/campos_512_v4
+95/485638/campos_512_v4
+95/486042/campos_512_v4
+95/486196/campos_512_v4
+95/486307/campos_512_v4
+95/486375/campos_512_v4
+95/486645/campos_512_v4
+95/486955/campos_512_v4
+95/486999/campos_512_v4
+95/487011/campos_512_v4
+95/487245/campos_512_v4
+95/487597/campos_512_v4
+95/487667/campos_512_v4
+95/487849/campos_512_v4
+95/487852/campos_512_v4
+95/487951/campos_512_v4
+95/488031/campos_512_v4
+95/488180/campos_512_v4
+95/488212/campos_512_v4
+95/488226/campos_512_v4
+95/488275/campos_512_v4
+95/488311/campos_512_v4
+95/488396/campos_512_v4
+95/488465/campos_512_v4
+95/488525/campos_512_v4
+95/488867/campos_512_v4
+95/488957/campos_512_v4
+95/489254/campos_512_v4
+95/489402/campos_512_v4
+95/489542/campos_512_v4
+95/489650/campos_512_v4
+95/489755/campos_512_v4
+95/489779/campos_512_v4
+95/489834/campos_512_v4
+96/490223/campos_512_v4
+96/490254/campos_512_v4
+96/490345/campos_512_v4
+96/490694/campos_512_v4
+96/490702/campos_512_v4
+96/490725/campos_512_v4
+96/490773/campos_512_v4
+96/491000/campos_512_v4
+96/491124/campos_512_v4
+96/491211/campos_512_v4
+96/491234/campos_512_v4
+96/491505/campos_512_v4
+96/491688/campos_512_v4
+96/491709/campos_512_v4
+96/491736/campos_512_v4
+96/491826/campos_512_v4
+96/491999/campos_512_v4
+96/492065/campos_512_v4
+96/492205/campos_512_v4
+96/492267/campos_512_v4
+96/492371/campos_512_v4
+96/492406/campos_512_v4
+96/492511/campos_512_v4
+96/492548/campos_512_v4
+96/492659/campos_512_v4
+96/493224/campos_512_v4
+96/493233/campos_512_v4
+96/493492/campos_512_v4
+96/493578/campos_512_v4
+96/493606/campos_512_v4
+96/493733/campos_512_v4
+96/493960/campos_512_v4
+96/494178/campos_512_v4
+96/494182/campos_512_v4
+96/494497/campos_512_v4
+96/494507/campos_512_v4
+96/494886/campos_512_v4
+96/494925/campos_512_v4
+97/495187/campos_512_v4
+97/495198/campos_512_v4
+97/495321/campos_512_v4
+97/495331/campos_512_v4
+97/495344/campos_512_v4
+97/495388/campos_512_v4
+97/495399/campos_512_v4
+97/495442/campos_512_v4
+97/495842/campos_512_v4
+97/496041/campos_512_v4
+97/496054/campos_512_v4
+97/496427/campos_512_v4
+97/496464/campos_512_v4
+97/496613/campos_512_v4
+97/496841/campos_512_v4
+97/496879/campos_512_v4
+97/496898/campos_512_v4
+97/496955/campos_512_v4
+97/497245/campos_512_v4
+97/497293/campos_512_v4
+97/497321/campos_512_v4
+97/497409/campos_512_v4
+97/497546/campos_512_v4
+97/497570/campos_512_v4
+97/497593/campos_512_v4
+97/497697/campos_512_v4
+97/498016/campos_512_v4
+97/498024/campos_512_v4
+97/498075/campos_512_v4
+97/498139/campos_512_v4
+97/498159/campos_512_v4
+97/498223/campos_512_v4
+97/498259/campos_512_v4
+97/498274/campos_512_v4
+97/498292/campos_512_v4
+97/498555/campos_512_v4
+97/498568/campos_512_v4
+97/498592/campos_512_v4
+97/498755/campos_512_v4
+97/498823/campos_512_v4
+97/498894/campos_512_v4
+97/498957/campos_512_v4
+97/499018/campos_512_v4
+97/499058/campos_512_v4
+97/499138/campos_512_v4
+97/499217/campos_512_v4
+97/499231/campos_512_v4
+97/499248/campos_512_v4
+97/499403/campos_512_v4
+97/499460/campos_512_v4
+97/499480/campos_512_v4
+97/499636/campos_512_v4
+97/499654/campos_512_v4
+97/499890/campos_512_v4
+98/500089/campos_512_v4
+98/500113/campos_512_v4
+98/500182/campos_512_v4
+98/500255/campos_512_v4
+98/500322/campos_512_v4
+98/500427/campos_512_v4
+98/500447/campos_512_v4
+98/500502/campos_512_v4
+98/500542/campos_512_v4
+98/501035/campos_512_v4
+98/501281/campos_512_v4
+98/501316/campos_512_v4
+98/501485/campos_512_v4
+98/502161/campos_512_v4
+98/502165/campos_512_v4
+98/502237/campos_512_v4
+98/502320/campos_512_v4
+98/502437/campos_512_v4
+98/502470/campos_512_v4
+98/502539/campos_512_v4
+98/502615/campos_512_v4
+98/502641/campos_512_v4
+98/502809/campos_512_v4
+98/502847/campos_512_v4
+98/502849/campos_512_v4
+98/502889/campos_512_v4
+98/502907/campos_512_v4
+98/503090/campos_512_v4
+98/503235/campos_512_v4
+98/503295/campos_512_v4
+98/503350/campos_512_v4
+98/503420/campos_512_v4
+98/503458/campos_512_v4
+98/503469/campos_512_v4
+98/503545/campos_512_v4
+98/503606/campos_512_v4
+98/503655/campos_512_v4
+98/503792/campos_512_v4
+98/503835/campos_512_v4
+98/503914/campos_512_v4
+98/503927/campos_512_v4
+98/504035/campos_512_v4
+98/504084/campos_512_v4
+98/504110/campos_512_v4
+98/504370/campos_512_v4
+98/504537/campos_512_v4
+98/504634/campos_512_v4
+98/504640/campos_512_v4
+98/504679/campos_512_v4
+98/504745/campos_512_v4
+98/504755/campos_512_v4
+98/504770/campos_512_v4
+98/504872/campos_512_v4
+98/504967/campos_512_v4
+99/505077/campos_512_v4
+99/505120/campos_512_v4
+99/505135/campos_512_v4
+99/505231/campos_512_v4
+99/505272/campos_512_v4
+99/505314/campos_512_v4
+99/505341/campos_512_v4
+99/505400/campos_512_v4
+99/505471/campos_512_v4
+99/505719/campos_512_v4
+99/505727/campos_512_v4
+99/505967/campos_512_v4
+99/505969/campos_512_v4
+99/506018/campos_512_v4
+99/506088/campos_512_v4
+99/506539/campos_512_v4
+99/506644/campos_512_v4
+99/506884/campos_512_v4
+99/507033/campos_512_v4
+99/507254/campos_512_v4
+99/507320/campos_512_v4
+99/507412/campos_512_v4
+99/507788/campos_512_v4
+99/507811/campos_512_v4
+99/507908/campos_512_v4
+99/507929/campos_512_v4
+99/507976/campos_512_v4
+99/508025/campos_512_v4
+99/508161/campos_512_v4
+99/508359/campos_512_v4
+99/508564/campos_512_v4
+99/508810/campos_512_v4
+99/508968/campos_512_v4
+99/509066/campos_512_v4
+99/509099/campos_512_v4
+99/509196/campos_512_v4
+99/509243/campos_512_v4
+99/509255/campos_512_v4
+99/509279/campos_512_v4
+99/509282/campos_512_v4
+99/509293/campos_512_v4
+99/509297/campos_512_v4
+99/509326/campos_512_v4
+99/509431/campos_512_v4
+99/509708/campos_512_v4
+99/509735/campos_512_v4
+99/509828/campos_512_v4
+99/509983/campos_512_v4
diff --git a/shell_scripts/raw_img_list/Human-Shape.txt b/shell_scripts/raw_img_list/Human-Shape.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b860e75a3bfb34c5e1ea07d64a7129a62c4999e6
--- /dev/null
+++ b/shell_scripts/raw_img_list/Human-Shape.txt
@@ -0,0 +1,10208 @@
+0/10133/campos_512_v4
+0/10210/campos_512_v4
+0/10240/campos_512_v4
+0/10523/campos_512_v4
+0/10528/campos_512_v4
+0/10632/campos_512_v4
+0/10863/campos_512_v4
+0/11047/campos_512_v4
+0/11110/campos_512_v4
+0/11130/campos_512_v4
+0/11236/campos_512_v4
+0/11295/campos_512_v4
+0/11330/campos_512_v4
+0/11448/campos_512_v4
+0/11564/campos_512_v4
+0/11635/campos_512_v4
+0/11742/campos_512_v4
+0/11841/campos_512_v4
+0/11906/campos_512_v4
+0/11930/campos_512_v4
+0/11935/campos_512_v4
+0/11964/campos_512_v4
+0/12009/campos_512_v4
+0/12280/campos_512_v4
+0/12471/campos_512_v4
+0/12474/campos_512_v4
+0/12563/campos_512_v4
+0/12696/campos_512_v4
+0/12779/campos_512_v4
+0/12783/campos_512_v4
+0/12974/campos_512_v4
+0/13094/campos_512_v4
+0/13105/campos_512_v4
+0/13107/campos_512_v4
+0/13139/campos_512_v4
+0/13301/campos_512_v4
+0/13315/campos_512_v4
+0/13350/campos_512_v4
+0/13396/campos_512_v4
+0/13409/campos_512_v4
+0/13486/campos_512_v4
+0/13589/campos_512_v4
+0/13609/campos_512_v4
+0/13672/campos_512_v4
+0/13687/campos_512_v4
+0/13735/campos_512_v4
+0/13757/campos_512_v4
+0/13759/campos_512_v4
+0/13794/campos_512_v4
+0/13801/campos_512_v4
+0/13847/campos_512_v4
+0/13897/campos_512_v4
+0/13919/campos_512_v4
+0/13959/campos_512_v4
+0/14047/campos_512_v4
+0/14125/campos_512_v4
+0/14168/campos_512_v4
+0/14191/campos_512_v4
+0/14308/campos_512_v4
+0/14431/campos_512_v4
+0/14437/campos_512_v4
+0/14532/campos_512_v4
+0/14613/campos_512_v4
+0/14663/campos_512_v4
+1/15087/campos_512_v4
+1/15354/campos_512_v4
+1/15567/campos_512_v4
+1/15573/campos_512_v4
+1/15575/campos_512_v4
+1/15603/campos_512_v4
+1/15726/campos_512_v4
+1/15746/campos_512_v4
+1/15840/campos_512_v4
+1/16047/campos_512_v4
+1/16115/campos_512_v4
+1/16218/campos_512_v4
+1/16240/campos_512_v4
+1/16580/campos_512_v4
+1/16930/campos_512_v4
+1/16984/campos_512_v4
+1/17081/campos_512_v4
+1/17272/campos_512_v4
+1/17575/campos_512_v4
+1/17695/campos_512_v4
+1/17696/campos_512_v4
+1/17698/campos_512_v4
+1/17815/campos_512_v4
+1/18020/campos_512_v4
+1/18108/campos_512_v4
+1/18130/campos_512_v4
+1/18149/campos_512_v4
+1/18202/campos_512_v4
+1/18223/campos_512_v4
+1/18246/campos_512_v4
+1/18513/campos_512_v4
+1/18531/campos_512_v4
+1/18552/campos_512_v4
+1/18622/campos_512_v4
+1/18639/campos_512_v4
+1/18738/campos_512_v4
+1/18810/campos_512_v4
+1/18928/campos_512_v4
+1/19003/campos_512_v4
+1/19240/campos_512_v4
+1/19282/campos_512_v4
+1/19345/campos_512_v4
+1/19485/campos_512_v4
+1/19506/campos_512_v4
+1/19524/campos_512_v4
+1/19686/campos_512_v4
+1/19689/campos_512_v4
+1/19921/campos_512_v4
+10/60272/campos_512_v4
+10/60879/campos_512_v4
+10/61053/campos_512_v4
+10/61101/campos_512_v4
+10/61212/campos_512_v4
+10/61245/campos_512_v4
+10/61299/campos_512_v4
+10/62108/campos_512_v4
+10/62124/campos_512_v4
+10/62196/campos_512_v4
+10/62472/campos_512_v4
+10/62582/campos_512_v4
+10/62643/campos_512_v4
+10/62704/campos_512_v4
+10/62862/campos_512_v4
+10/63046/campos_512_v4
+10/63176/campos_512_v4
+10/63556/campos_512_v4
+10/63588/campos_512_v4
+10/63601/campos_512_v4
+10/63644/campos_512_v4
+10/63695/campos_512_v4
+10/63947/campos_512_v4
+10/64014/campos_512_v4
+10/64022/campos_512_v4
+10/64125/campos_512_v4
+10/64164/campos_512_v4
+10/64341/campos_512_v4
+10/64495/campos_512_v4
+10/64502/campos_512_v4
+10/64523/campos_512_v4
+10/64533/campos_512_v4
+10/64996/campos_512_v4
+100/510085/campos_512_v4
+100/510088/campos_512_v4
+100/510241/campos_512_v4
+100/510280/campos_512_v4
+100/510286/campos_512_v4
+100/510287/campos_512_v4
+100/510342/campos_512_v4
+100/510389/campos_512_v4
+100/510435/campos_512_v4
+100/510460/campos_512_v4
+100/510640/campos_512_v4
+100/510674/campos_512_v4
+100/510690/campos_512_v4
+100/510744/campos_512_v4
+100/510959/campos_512_v4
+100/511137/campos_512_v4
+100/511200/campos_512_v4
+100/511242/campos_512_v4
+100/511467/campos_512_v4
+100/511472/campos_512_v4
+100/511508/campos_512_v4
+100/511646/campos_512_v4
+100/511890/campos_512_v4
+100/511956/campos_512_v4
+100/511989/campos_512_v4
+100/512299/campos_512_v4
+100/512399/campos_512_v4
+100/512508/campos_512_v4
+100/512561/campos_512_v4
+100/512598/campos_512_v4
+100/512627/campos_512_v4
+100/512679/campos_512_v4
+100/512712/campos_512_v4
+100/512815/campos_512_v4
+100/512924/campos_512_v4
+100/512927/campos_512_v4
+100/512951/campos_512_v4
+100/512975/campos_512_v4
+100/513120/campos_512_v4
+100/513224/campos_512_v4
+100/513278/campos_512_v4
+100/513634/campos_512_v4
+100/513686/campos_512_v4
+100/513728/campos_512_v4
+100/513772/campos_512_v4
+100/513824/campos_512_v4
+100/513872/campos_512_v4
+100/513918/campos_512_v4
+100/513966/campos_512_v4
+100/514037/campos_512_v4
+100/514084/campos_512_v4
+100/514168/campos_512_v4
+100/514256/campos_512_v4
+100/514321/campos_512_v4
+100/514418/campos_512_v4
+100/514522/campos_512_v4
+100/514559/campos_512_v4
+100/514602/campos_512_v4
+100/514678/campos_512_v4
+100/514744/campos_512_v4
+100/514891/campos_512_v4
+100/514957/campos_512_v4
+100/514986/campos_512_v4
+101/515009/campos_512_v4
+101/515013/campos_512_v4
+101/515281/campos_512_v4
+101/515291/campos_512_v4
+101/515412/campos_512_v4
+101/515415/campos_512_v4
+101/515421/campos_512_v4
+101/515442/campos_512_v4
+101/515445/campos_512_v4
+101/515451/campos_512_v4
+101/515481/campos_512_v4
+101/515483/campos_512_v4
+101/515499/campos_512_v4
+101/515513/campos_512_v4
+101/515517/campos_512_v4
+101/515541/campos_512_v4
+101/515675/campos_512_v4
+101/515726/campos_512_v4
+101/515744/campos_512_v4
+101/515921/campos_512_v4
+101/515946/campos_512_v4
+101/516036/campos_512_v4
+101/516105/campos_512_v4
+101/516129/campos_512_v4
+101/516197/campos_512_v4
+101/516242/campos_512_v4
+101/516338/campos_512_v4
+101/516440/campos_512_v4
+101/516472/campos_512_v4
+101/516749/campos_512_v4
+101/516881/campos_512_v4
+101/516892/campos_512_v4
+101/517084/campos_512_v4
+101/517291/campos_512_v4
+101/517425/campos_512_v4
+101/517702/campos_512_v4
+101/517869/campos_512_v4
+101/517993/campos_512_v4
+101/518069/campos_512_v4
+101/518088/campos_512_v4
+101/518156/campos_512_v4
+101/518165/campos_512_v4
+101/518209/campos_512_v4
+101/518300/campos_512_v4
+101/518375/campos_512_v4
+101/518435/campos_512_v4
+101/518519/campos_512_v4
+101/518532/campos_512_v4
+101/518852/campos_512_v4
+101/519051/campos_512_v4
+101/519283/campos_512_v4
+101/519415/campos_512_v4
+101/519620/campos_512_v4
+101/519789/campos_512_v4
+101/519975/campos_512_v4
+102/520041/campos_512_v4
+102/520093/campos_512_v4
+102/520339/campos_512_v4
+102/520397/campos_512_v4
+102/520439/campos_512_v4
+102/520483/campos_512_v4
+102/520833/campos_512_v4
+102/521047/campos_512_v4
+102/521070/campos_512_v4
+102/521084/campos_512_v4
+102/521118/campos_512_v4
+102/521160/campos_512_v4
+102/521182/campos_512_v4
+102/521368/campos_512_v4
+102/521509/campos_512_v4
+102/521635/campos_512_v4
+102/521682/campos_512_v4
+102/521723/campos_512_v4
+102/521784/campos_512_v4
+102/521803/campos_512_v4
+102/521904/campos_512_v4
+102/521928/campos_512_v4
+102/521929/campos_512_v4
+102/522010/campos_512_v4
+102/522104/campos_512_v4
+102/522197/campos_512_v4
+102/522223/campos_512_v4
+102/522227/campos_512_v4
+102/522260/campos_512_v4
+102/522303/campos_512_v4
+102/522360/campos_512_v4
+102/522502/campos_512_v4
+102/522592/campos_512_v4
+102/522761/campos_512_v4
+102/522994/campos_512_v4
+102/523027/campos_512_v4
+102/523096/campos_512_v4
+102/523181/campos_512_v4
+102/523234/campos_512_v4
+102/523284/campos_512_v4
+102/523301/campos_512_v4
+102/523411/campos_512_v4
+102/523421/campos_512_v4
+102/523545/campos_512_v4
+102/523563/campos_512_v4
+102/523588/campos_512_v4
+102/523593/campos_512_v4
+102/523695/campos_512_v4
+102/523700/campos_512_v4
+102/523787/campos_512_v4
+102/523822/campos_512_v4
+102/523892/campos_512_v4
+102/523909/campos_512_v4
+102/524081/campos_512_v4
+102/524088/campos_512_v4
+102/524089/campos_512_v4
+102/524101/campos_512_v4
+102/524130/campos_512_v4
+102/524238/campos_512_v4
+102/524528/campos_512_v4
+102/524653/campos_512_v4
+102/524829/campos_512_v4
+102/524847/campos_512_v4
+102/524865/campos_512_v4
+102/524929/campos_512_v4
+102/524977/campos_512_v4
+103/525059/campos_512_v4
+103/525085/campos_512_v4
+103/525146/campos_512_v4
+103/525244/campos_512_v4
+103/525278/campos_512_v4
+103/525309/campos_512_v4
+103/525430/campos_512_v4
+103/525687/campos_512_v4
+103/525793/campos_512_v4
+103/526028/campos_512_v4
+103/526076/campos_512_v4
+103/526100/campos_512_v4
+103/526149/campos_512_v4
+103/526279/campos_512_v4
+103/526281/campos_512_v4
+103/526397/campos_512_v4
+103/526492/campos_512_v4
+103/526558/campos_512_v4
+103/526598/campos_512_v4
+103/526603/campos_512_v4
+103/527054/campos_512_v4
+103/527221/campos_512_v4
+103/527224/campos_512_v4
+103/527293/campos_512_v4
+103/527348/campos_512_v4
+103/527389/campos_512_v4
+103/527475/campos_512_v4
+103/527479/campos_512_v4
+103/527655/campos_512_v4
+103/527732/campos_512_v4
+103/527751/campos_512_v4
+103/527837/campos_512_v4
+103/527863/campos_512_v4
+103/527973/campos_512_v4
+103/528062/campos_512_v4
+103/528119/campos_512_v4
+103/528250/campos_512_v4
+103/528313/campos_512_v4
+103/528331/campos_512_v4
+103/528450/campos_512_v4
+103/528558/campos_512_v4
+103/528763/campos_512_v4
+103/528846/campos_512_v4
+103/528891/campos_512_v4
+103/528905/campos_512_v4
+103/528934/campos_512_v4
+103/528974/campos_512_v4
+103/528982/campos_512_v4
+103/529099/campos_512_v4
+103/529123/campos_512_v4
+103/529191/campos_512_v4
+103/529225/campos_512_v4
+103/529252/campos_512_v4
+103/529578/campos_512_v4
+103/529602/campos_512_v4
+103/529820/campos_512_v4
+103/529864/campos_512_v4
+103/529909/campos_512_v4
+104/530088/campos_512_v4
+104/530091/campos_512_v4
+104/530111/campos_512_v4
+104/530140/campos_512_v4
+104/530171/campos_512_v4
+104/530257/campos_512_v4
+104/530419/campos_512_v4
+104/530447/campos_512_v4
+104/530466/campos_512_v4
+104/530519/campos_512_v4
+104/530620/campos_512_v4
+104/530656/campos_512_v4
+104/530692/campos_512_v4
+104/530738/campos_512_v4
+104/530768/campos_512_v4
+104/530887/campos_512_v4
+104/530892/campos_512_v4
+104/530953/campos_512_v4
+104/530984/campos_512_v4
+104/531068/campos_512_v4
+104/531138/campos_512_v4
+104/531163/campos_512_v4
+104/531164/campos_512_v4
+104/531210/campos_512_v4
+104/531232/campos_512_v4
+104/531618/campos_512_v4
+104/531707/campos_512_v4
+104/531789/campos_512_v4
+104/531914/campos_512_v4
+104/531939/campos_512_v4
+104/532085/campos_512_v4
+104/532129/campos_512_v4
+104/532133/campos_512_v4
+104/532136/campos_512_v4
+104/532160/campos_512_v4
+104/532161/campos_512_v4
+104/532241/campos_512_v4
+104/532392/campos_512_v4
+104/532415/campos_512_v4
+104/532490/campos_512_v4
+104/532555/campos_512_v4
+104/532592/campos_512_v4
+104/532611/campos_512_v4
+104/532623/campos_512_v4
+104/532658/campos_512_v4
+104/532765/campos_512_v4
+104/532767/campos_512_v4
+104/532781/campos_512_v4
+104/532806/campos_512_v4
+104/532831/campos_512_v4
+104/532943/campos_512_v4
+104/532963/campos_512_v4
+104/532989/campos_512_v4
+104/533005/campos_512_v4
+104/533323/campos_512_v4
+104/533328/campos_512_v4
+104/533341/campos_512_v4
+104/533360/campos_512_v4
+104/533399/campos_512_v4
+104/533456/campos_512_v4
+104/533514/campos_512_v4
+104/533578/campos_512_v4
+104/533727/campos_512_v4
+104/533863/campos_512_v4
+104/533993/campos_512_v4
+104/534081/campos_512_v4
+104/534223/campos_512_v4
+104/534235/campos_512_v4
+104/534245/campos_512_v4
+104/534295/campos_512_v4
+104/534366/campos_512_v4
+104/534461/campos_512_v4
+104/534474/campos_512_v4
+104/534597/campos_512_v4
+104/534749/campos_512_v4
+104/534839/campos_512_v4
+104/534892/campos_512_v4
+104/534902/campos_512_v4
+105/535208/campos_512_v4
+105/535293/campos_512_v4
+105/535440/campos_512_v4
+105/535595/campos_512_v4
+105/535730/campos_512_v4
+105/535755/campos_512_v4
+105/535823/campos_512_v4
+105/535840/campos_512_v4
+105/535913/campos_512_v4
+105/535921/campos_512_v4
+105/535923/campos_512_v4
+105/535952/campos_512_v4
+105/535995/campos_512_v4
+105/536040/campos_512_v4
+105/536059/campos_512_v4
+105/536122/campos_512_v4
+105/536407/campos_512_v4
+105/536541/campos_512_v4
+105/536640/campos_512_v4
+105/536641/campos_512_v4
+105/536699/campos_512_v4
+105/536712/campos_512_v4
+105/536715/campos_512_v4
+105/536852/campos_512_v4
+105/536960/campos_512_v4
+105/536972/campos_512_v4
+105/537047/campos_512_v4
+105/537387/campos_512_v4
+105/537403/campos_512_v4
+105/537497/campos_512_v4
+105/537588/campos_512_v4
+105/537611/campos_512_v4
+105/537783/campos_512_v4
+105/537788/campos_512_v4
+105/538006/campos_512_v4
+105/538174/campos_512_v4
+105/538256/campos_512_v4
+105/538424/campos_512_v4
+105/538458/campos_512_v4
+105/538475/campos_512_v4
+105/538496/campos_512_v4
+105/538755/campos_512_v4
+105/538763/campos_512_v4
+105/539244/campos_512_v4
+105/539330/campos_512_v4
+105/539456/campos_512_v4
+105/539504/campos_512_v4
+105/539525/campos_512_v4
+105/539605/campos_512_v4
+105/539756/campos_512_v4
+105/539758/campos_512_v4
+105/539794/campos_512_v4
+105/539894/campos_512_v4
+105/539906/campos_512_v4
+105/539966/campos_512_v4
+106/540030/campos_512_v4
+106/540206/campos_512_v4
+106/540234/campos_512_v4
+106/540238/campos_512_v4
+106/540296/campos_512_v4
+106/540335/campos_512_v4
+106/540405/campos_512_v4
+106/540545/campos_512_v4
+106/540674/campos_512_v4
+106/540693/campos_512_v4
+106/540720/campos_512_v4
+106/540904/campos_512_v4
+106/541064/campos_512_v4
+106/541078/campos_512_v4
+106/541231/campos_512_v4
+106/541253/campos_512_v4
+106/541303/campos_512_v4
+106/541353/campos_512_v4
+106/541595/campos_512_v4
+106/541641/campos_512_v4
+106/541683/campos_512_v4
+106/541817/campos_512_v4
+106/541919/campos_512_v4
+106/542087/campos_512_v4
+106/542496/campos_512_v4
+106/542538/campos_512_v4
+106/542683/campos_512_v4
+106/542696/campos_512_v4
+106/542779/campos_512_v4
+106/542873/campos_512_v4
+106/542916/campos_512_v4
+106/542961/campos_512_v4
+106/542971/campos_512_v4
+106/543141/campos_512_v4
+106/543354/campos_512_v4
+106/543367/campos_512_v4
+106/543382/campos_512_v4
+106/543488/campos_512_v4
+106/543801/campos_512_v4
+106/544014/campos_512_v4
+106/544097/campos_512_v4
+106/544115/campos_512_v4
+106/544190/campos_512_v4
+106/544209/campos_512_v4
+106/544246/campos_512_v4
+106/544334/campos_512_v4
+106/544473/campos_512_v4
+106/544543/campos_512_v4
+106/544550/campos_512_v4
+106/544638/campos_512_v4
+106/544802/campos_512_v4
+106/544866/campos_512_v4
+107/545231/campos_512_v4
+107/545374/campos_512_v4
+107/545513/campos_512_v4
+107/545549/campos_512_v4
+107/545603/campos_512_v4
+107/545782/campos_512_v4
+107/545793/campos_512_v4
+107/545970/campos_512_v4
+107/546055/campos_512_v4
+107/546117/campos_512_v4
+107/546140/campos_512_v4
+107/546194/campos_512_v4
+107/546268/campos_512_v4
+107/546309/campos_512_v4
+107/546357/campos_512_v4
+107/546402/campos_512_v4
+107/546567/campos_512_v4
+107/546696/campos_512_v4
+107/546744/campos_512_v4
+107/546958/campos_512_v4
+107/547026/campos_512_v4
+107/547098/campos_512_v4
+107/547159/campos_512_v4
+107/547181/campos_512_v4
+107/547237/campos_512_v4
+107/547340/campos_512_v4
+107/547458/campos_512_v4
+107/547509/campos_512_v4
+107/547601/campos_512_v4
+107/547746/campos_512_v4
+107/547873/campos_512_v4
+107/548023/campos_512_v4
+107/548077/campos_512_v4
+107/548109/campos_512_v4
+107/548116/campos_512_v4
+107/548173/campos_512_v4
+107/548183/campos_512_v4
+107/548250/campos_512_v4
+107/548260/campos_512_v4
+107/548286/campos_512_v4
+107/548320/campos_512_v4
+107/548464/campos_512_v4
+107/548525/campos_512_v4
+107/548625/campos_512_v4
+107/548634/campos_512_v4
+107/548655/campos_512_v4
+107/548678/campos_512_v4
+107/548686/campos_512_v4
+107/548706/campos_512_v4
+107/548821/campos_512_v4
+107/548822/campos_512_v4
+107/548846/campos_512_v4
+107/548934/campos_512_v4
+107/548997/campos_512_v4
+107/549128/campos_512_v4
+107/549160/campos_512_v4
+107/549211/campos_512_v4
+107/549396/campos_512_v4
+107/549400/campos_512_v4
+107/549407/campos_512_v4
+107/549724/campos_512_v4
+107/549850/campos_512_v4
+107/549889/campos_512_v4
+107/549946/campos_512_v4
+108/550013/campos_512_v4
+108/550020/campos_512_v4
+108/550045/campos_512_v4
+108/550154/campos_512_v4
+108/550206/campos_512_v4
+108/550239/campos_512_v4
+108/550353/campos_512_v4
+108/550426/campos_512_v4
+108/550455/campos_512_v4
+108/550630/campos_512_v4
+108/550674/campos_512_v4
+108/550678/campos_512_v4
+108/550759/campos_512_v4
+108/550824/campos_512_v4
+108/550833/campos_512_v4
+108/550907/campos_512_v4
+108/551011/campos_512_v4
+108/551063/campos_512_v4
+108/551091/campos_512_v4
+108/551291/campos_512_v4
+108/551605/campos_512_v4
+108/551670/campos_512_v4
+108/551679/campos_512_v4
+108/551753/campos_512_v4
+108/551775/campos_512_v4
+108/551841/campos_512_v4
+108/551863/campos_512_v4
+108/551914/campos_512_v4
+108/551973/campos_512_v4
+108/552063/campos_512_v4
+108/552092/campos_512_v4
+108/552260/campos_512_v4
+108/552407/campos_512_v4
+108/552517/campos_512_v4
+108/552573/campos_512_v4
+108/552684/campos_512_v4
+108/552797/campos_512_v4
+108/552902/campos_512_v4
+108/552931/campos_512_v4
+108/552961/campos_512_v4
+108/553038/campos_512_v4
+108/553077/campos_512_v4
+108/553078/campos_512_v4
+108/553099/campos_512_v4
+108/553185/campos_512_v4
+108/553199/campos_512_v4
+108/553339/campos_512_v4
+108/553391/campos_512_v4
+108/553428/campos_512_v4
+108/553487/campos_512_v4
+108/553601/campos_512_v4
+108/553690/campos_512_v4
+108/553754/campos_512_v4
+108/553888/campos_512_v4
+108/553932/campos_512_v4
+108/554026/campos_512_v4
+108/554270/campos_512_v4
+108/554365/campos_512_v4
+108/554410/campos_512_v4
+108/554491/campos_512_v4
+108/554554/campos_512_v4
+108/554694/campos_512_v4
+108/554714/campos_512_v4
+108/554765/campos_512_v4
+108/554817/campos_512_v4
+108/554857/campos_512_v4
+108/554969/campos_512_v4
+108/554981/campos_512_v4
+109/555045/campos_512_v4
+109/555107/campos_512_v4
+109/555141/campos_512_v4
+109/555207/campos_512_v4
+109/555552/campos_512_v4
+109/555598/campos_512_v4
+109/555604/campos_512_v4
+109/555619/campos_512_v4
+109/556026/campos_512_v4
+109/556057/campos_512_v4
+109/556285/campos_512_v4
+109/556291/campos_512_v4
+109/556360/campos_512_v4
+109/556380/campos_512_v4
+109/556465/campos_512_v4
+109/556624/campos_512_v4
+109/556816/campos_512_v4
+109/556826/campos_512_v4
+109/556890/campos_512_v4
+109/556906/campos_512_v4
+109/556945/campos_512_v4
+109/557073/campos_512_v4
+109/557144/campos_512_v4
+109/557194/campos_512_v4
+109/557342/campos_512_v4
+109/557427/campos_512_v4
+109/557439/campos_512_v4
+109/557441/campos_512_v4
+109/557604/campos_512_v4
+109/557624/campos_512_v4
+109/557649/campos_512_v4
+109/557662/campos_512_v4
+109/557708/campos_512_v4
+109/557714/campos_512_v4
+109/557859/campos_512_v4
+109/557928/campos_512_v4
+109/557934/campos_512_v4
+109/557993/campos_512_v4
+109/558035/campos_512_v4
+109/558173/campos_512_v4
+109/558201/campos_512_v4
+109/558254/campos_512_v4
+109/558288/campos_512_v4
+109/558340/campos_512_v4
+109/558367/campos_512_v4
+109/558380/campos_512_v4
+109/558403/campos_512_v4
+109/558410/campos_512_v4
+109/558473/campos_512_v4
+109/558486/campos_512_v4
+109/558762/campos_512_v4
+109/558768/campos_512_v4
+109/558775/campos_512_v4
+109/558806/campos_512_v4
+109/558867/campos_512_v4
+109/559220/campos_512_v4
+109/559589/campos_512_v4
+109/559621/campos_512_v4
+109/559637/campos_512_v4
+109/559723/campos_512_v4
+109/559738/campos_512_v4
+109/559798/campos_512_v4
+109/559927/campos_512_v4
+109/559972/campos_512_v4
+109/559993/campos_512_v4
+11/65275/campos_512_v4
+11/65363/campos_512_v4
+11/65549/campos_512_v4
+11/65650/campos_512_v4
+11/65976/campos_512_v4
+11/66001/campos_512_v4
+11/66081/campos_512_v4
+11/67144/campos_512_v4
+11/67529/campos_512_v4
+11/67570/campos_512_v4
+11/67702/campos_512_v4
+11/67856/campos_512_v4
+11/67960/campos_512_v4
+11/68077/campos_512_v4
+11/68149/campos_512_v4
+11/68241/campos_512_v4
+11/68353/campos_512_v4
+11/68508/campos_512_v4
+11/68525/campos_512_v4
+11/68558/campos_512_v4
+11/68621/campos_512_v4
+11/68667/campos_512_v4
+11/68830/campos_512_v4
+11/69097/campos_512_v4
+11/69196/campos_512_v4
+11/69213/campos_512_v4
+11/69330/campos_512_v4
+11/69400/campos_512_v4
+11/69453/campos_512_v4
+11/69474/campos_512_v4
+11/69634/campos_512_v4
+11/69872/campos_512_v4
+11/69907/campos_512_v4
+11/69932/campos_512_v4
+110/560035/campos_512_v4
+110/560133/campos_512_v4
+110/560145/campos_512_v4
+110/560164/campos_512_v4
+110/560173/campos_512_v4
+110/560337/campos_512_v4
+110/560341/campos_512_v4
+110/560344/campos_512_v4
+110/560374/campos_512_v4
+110/560515/campos_512_v4
+110/560545/campos_512_v4
+110/560650/campos_512_v4
+110/560904/campos_512_v4
+110/560909/campos_512_v4
+110/560944/campos_512_v4
+110/561024/campos_512_v4
+110/561053/campos_512_v4
+110/561114/campos_512_v4
+110/561156/campos_512_v4
+110/561167/campos_512_v4
+110/561339/campos_512_v4
+110/561351/campos_512_v4
+110/561357/campos_512_v4
+110/561374/campos_512_v4
+110/561379/campos_512_v4
+110/561589/campos_512_v4
+110/561926/campos_512_v4
+110/561966/campos_512_v4
+110/561968/campos_512_v4
+110/562011/campos_512_v4
+110/562105/campos_512_v4
+110/562117/campos_512_v4
+110/562135/campos_512_v4
+110/562164/campos_512_v4
+110/562190/campos_512_v4
+110/562200/campos_512_v4
+110/562227/campos_512_v4
+110/562259/campos_512_v4
+110/562396/campos_512_v4
+110/562739/campos_512_v4
+110/562753/campos_512_v4
+110/562895/campos_512_v4
+110/562947/campos_512_v4
+110/563041/campos_512_v4
+110/563061/campos_512_v4
+110/563136/campos_512_v4
+110/563224/campos_512_v4
+110/563473/campos_512_v4
+110/563492/campos_512_v4
+110/563689/campos_512_v4
+110/563742/campos_512_v4
+110/563978/campos_512_v4
+110/564050/campos_512_v4
+110/564167/campos_512_v4
+110/564178/campos_512_v4
+110/564182/campos_512_v4
+110/564230/campos_512_v4
+110/564311/campos_512_v4
+110/564378/campos_512_v4
+110/564468/campos_512_v4
+110/564693/campos_512_v4
+110/564711/campos_512_v4
+110/564753/campos_512_v4
+110/564790/campos_512_v4
+111/565035/campos_512_v4
+111/565101/campos_512_v4
+111/565229/campos_512_v4
+111/565281/campos_512_v4
+111/565393/campos_512_v4
+111/565678/campos_512_v4
+111/565737/campos_512_v4
+111/565751/campos_512_v4
+111/565786/campos_512_v4
+111/565959/campos_512_v4
+111/566015/campos_512_v4
+111/566053/campos_512_v4
+111/566095/campos_512_v4
+111/566657/campos_512_v4
+111/567547/campos_512_v4
+111/567607/campos_512_v4
+111/567638/campos_512_v4
+111/567695/campos_512_v4
+111/567827/campos_512_v4
+111/567972/campos_512_v4
+111/567981/campos_512_v4
+111/567982/campos_512_v4
+111/568029/campos_512_v4
+111/568106/campos_512_v4
+111/568134/campos_512_v4
+111/568396/campos_512_v4
+111/568460/campos_512_v4
+111/568462/campos_512_v4
+111/568471/campos_512_v4
+111/568622/campos_512_v4
+111/568683/campos_512_v4
+111/568784/campos_512_v4
+111/568961/campos_512_v4
+111/569119/campos_512_v4
+111/569210/campos_512_v4
+111/569232/campos_512_v4
+111/569273/campos_512_v4
+111/569384/campos_512_v4
+111/569403/campos_512_v4
+111/569457/campos_512_v4
+111/569509/campos_512_v4
+111/569537/campos_512_v4
+111/569564/campos_512_v4
+111/569586/campos_512_v4
+111/569762/campos_512_v4
+111/569888/campos_512_v4
+111/569941/campos_512_v4
+111/569945/campos_512_v4
+111/569987/campos_512_v4
+112/570002/campos_512_v4
+112/570077/campos_512_v4
+112/570088/campos_512_v4
+112/570094/campos_512_v4
+112/570117/campos_512_v4
+112/570165/campos_512_v4
+112/570450/campos_512_v4
+112/570493/campos_512_v4
+112/570643/campos_512_v4
+112/570902/campos_512_v4
+112/571115/campos_512_v4
+112/571314/campos_512_v4
+112/571450/campos_512_v4
+112/571507/campos_512_v4
+112/571564/campos_512_v4
+112/571680/campos_512_v4
+112/571821/campos_512_v4
+112/571833/campos_512_v4
+112/571887/campos_512_v4
+112/571931/campos_512_v4
+112/571994/campos_512_v4
+112/572073/campos_512_v4
+112/572079/campos_512_v4
+112/572145/campos_512_v4
+112/572214/campos_512_v4
+112/572219/campos_512_v4
+112/572267/campos_512_v4
+112/572537/campos_512_v4
+112/572545/campos_512_v4
+112/572559/campos_512_v4
+112/572673/campos_512_v4
+112/572852/campos_512_v4
+112/572957/campos_512_v4
+112/572966/campos_512_v4
+112/573388/campos_512_v4
+112/573395/campos_512_v4
+112/573659/campos_512_v4
+112/573680/campos_512_v4
+112/573845/campos_512_v4
+112/573912/campos_512_v4
+112/573981/campos_512_v4
+112/574254/campos_512_v4
+112/574299/campos_512_v4
+112/574343/campos_512_v4
+112/574478/campos_512_v4
+112/574526/campos_512_v4
+112/574584/campos_512_v4
+112/574617/campos_512_v4
+112/574665/campos_512_v4
+112/574748/campos_512_v4
+112/574842/campos_512_v4
+112/574896/campos_512_v4
+113/575319/campos_512_v4
+113/575353/campos_512_v4
+113/575356/campos_512_v4
+113/575394/campos_512_v4
+113/575427/campos_512_v4
+113/575606/campos_512_v4
+113/575745/campos_512_v4
+113/575899/campos_512_v4
+113/575910/campos_512_v4
+113/575968/campos_512_v4
+113/575983/campos_512_v4
+113/576018/campos_512_v4
+113/576038/campos_512_v4
+113/576089/campos_512_v4
+113/576190/campos_512_v4
+113/576191/campos_512_v4
+113/576251/campos_512_v4
+113/576299/campos_512_v4
+113/576357/campos_512_v4
+113/576381/campos_512_v4
+113/576426/campos_512_v4
+113/576446/campos_512_v4
+113/576460/campos_512_v4
+113/576462/campos_512_v4
+113/576515/campos_512_v4
+113/576581/campos_512_v4
+113/576653/campos_512_v4
+113/576668/campos_512_v4
+113/576736/campos_512_v4
+113/576754/campos_512_v4
+113/576760/campos_512_v4
+113/576791/campos_512_v4
+113/576912/campos_512_v4
+113/576987/campos_512_v4
+113/577196/campos_512_v4
+113/577325/campos_512_v4
+113/577373/campos_512_v4
+113/577509/campos_512_v4
+113/577537/campos_512_v4
+113/577594/campos_512_v4
+113/577598/campos_512_v4
+113/577609/campos_512_v4
+113/577636/campos_512_v4
+113/577644/campos_512_v4
+113/577670/campos_512_v4
+113/578068/campos_512_v4
+113/578076/campos_512_v4
+113/578104/campos_512_v4
+113/578114/campos_512_v4
+113/578261/campos_512_v4
+113/578353/campos_512_v4
+113/578373/campos_512_v4
+113/578406/campos_512_v4
+113/578432/campos_512_v4
+113/578441/campos_512_v4
+113/578496/campos_512_v4
+113/578527/campos_512_v4
+113/578571/campos_512_v4
+113/578758/campos_512_v4
+113/578810/campos_512_v4
+113/578946/campos_512_v4
+113/579015/campos_512_v4
+113/579232/campos_512_v4
+113/579243/campos_512_v4
+113/579253/campos_512_v4
+113/579333/campos_512_v4
+113/579362/campos_512_v4
+113/579372/campos_512_v4
+113/579380/campos_512_v4
+113/579401/campos_512_v4
+113/579419/campos_512_v4
+113/579748/campos_512_v4
+113/579793/campos_512_v4
+113/579822/campos_512_v4
+113/579862/campos_512_v4
+113/579876/campos_512_v4
+113/579924/campos_512_v4
+113/579987/campos_512_v4
+114/580045/campos_512_v4
+114/580128/campos_512_v4
+114/580253/campos_512_v4
+114/580294/campos_512_v4
+114/580544/campos_512_v4
+114/580671/campos_512_v4
+114/580678/campos_512_v4
+114/580680/campos_512_v4
+114/580683/campos_512_v4
+114/580790/campos_512_v4
+114/580851/campos_512_v4
+114/580918/campos_512_v4
+114/580939/campos_512_v4
+114/581031/campos_512_v4
+114/581045/campos_512_v4
+114/581051/campos_512_v4
+114/581061/campos_512_v4
+114/581071/campos_512_v4
+114/581091/campos_512_v4
+114/581138/campos_512_v4
+114/581170/campos_512_v4
+114/581264/campos_512_v4
+114/581300/campos_512_v4
+114/581315/campos_512_v4
+114/581320/campos_512_v4
+114/581372/campos_512_v4
+114/581381/campos_512_v4
+114/581422/campos_512_v4
+114/581461/campos_512_v4
+114/581491/campos_512_v4
+114/581622/campos_512_v4
+114/581703/campos_512_v4
+114/581791/campos_512_v4
+114/581831/campos_512_v4
+114/581953/campos_512_v4
+114/581988/campos_512_v4
+114/582133/campos_512_v4
+114/582234/campos_512_v4
+114/582249/campos_512_v4
+114/582380/campos_512_v4
+114/582401/campos_512_v4
+114/582473/campos_512_v4
+114/582575/campos_512_v4
+114/582632/campos_512_v4
+114/582717/campos_512_v4
+114/582832/campos_512_v4
+114/582884/campos_512_v4
+114/582934/campos_512_v4
+114/582963/campos_512_v4
+114/583089/campos_512_v4
+114/583296/campos_512_v4
+114/583375/campos_512_v4
+114/583396/campos_512_v4
+114/583442/campos_512_v4
+114/583452/campos_512_v4
+114/583547/campos_512_v4
+114/583579/campos_512_v4
+114/583627/campos_512_v4
+114/583687/campos_512_v4
+114/583803/campos_512_v4
+114/583951/campos_512_v4
+114/584019/campos_512_v4
+114/584055/campos_512_v4
+114/584073/campos_512_v4
+114/584286/campos_512_v4
+114/584289/campos_512_v4
+114/584359/campos_512_v4
+114/584366/campos_512_v4
+114/584449/campos_512_v4
+114/584489/campos_512_v4
+114/584557/campos_512_v4
+114/584606/campos_512_v4
+114/584686/campos_512_v4
+114/584742/campos_512_v4
+114/584842/campos_512_v4
+114/584979/campos_512_v4
+115/585028/campos_512_v4
+115/585250/campos_512_v4
+115/585288/campos_512_v4
+115/585385/campos_512_v4
+115/585390/campos_512_v4
+115/585563/campos_512_v4
+115/585574/campos_512_v4
+115/585580/campos_512_v4
+115/585690/campos_512_v4
+115/585818/campos_512_v4
+115/585828/campos_512_v4
+115/585918/campos_512_v4
+115/585990/campos_512_v4
+115/586001/campos_512_v4
+115/586033/campos_512_v4
+115/586138/campos_512_v4
+115/586171/campos_512_v4
+115/586248/campos_512_v4
+115/586297/campos_512_v4
+115/586338/campos_512_v4
+115/586342/campos_512_v4
+115/586475/campos_512_v4
+115/586495/campos_512_v4
+115/586953/campos_512_v4
+115/587149/campos_512_v4
+115/587318/campos_512_v4
+115/587337/campos_512_v4
+115/587614/campos_512_v4
+115/587740/campos_512_v4
+115/587751/campos_512_v4
+115/587811/campos_512_v4
+115/587817/campos_512_v4
+115/587823/campos_512_v4
+115/587861/campos_512_v4
+115/588011/campos_512_v4
+115/588186/campos_512_v4
+115/588254/campos_512_v4
+115/588472/campos_512_v4
+115/588650/campos_512_v4
+115/588738/campos_512_v4
+115/588779/campos_512_v4
+115/588908/campos_512_v4
+115/588928/campos_512_v4
+115/589153/campos_512_v4
+115/589467/campos_512_v4
+115/589558/campos_512_v4
+115/589643/campos_512_v4
+115/589668/campos_512_v4
+115/589672/campos_512_v4
+115/589679/campos_512_v4
+115/589708/campos_512_v4
+115/589919/campos_512_v4
+115/589992/campos_512_v4
+116/590163/campos_512_v4
+116/590216/campos_512_v4
+116/590336/campos_512_v4
+116/590376/campos_512_v4
+116/590378/campos_512_v4
+116/590441/campos_512_v4
+116/590545/campos_512_v4
+116/590547/campos_512_v4
+116/590564/campos_512_v4
+116/590592/campos_512_v4
+116/590800/campos_512_v4
+116/590844/campos_512_v4
+116/590935/campos_512_v4
+116/590958/campos_512_v4
+116/590991/campos_512_v4
+116/591279/campos_512_v4
+116/591355/campos_512_v4
+116/591365/campos_512_v4
+116/591415/campos_512_v4
+116/591598/campos_512_v4
+116/591599/campos_512_v4
+116/591658/campos_512_v4
+116/591816/campos_512_v4
+116/591875/campos_512_v4
+116/592121/campos_512_v4
+116/592141/campos_512_v4
+116/592164/campos_512_v4
+116/592179/campos_512_v4
+116/592202/campos_512_v4
+116/592337/campos_512_v4
+116/592361/campos_512_v4
+116/592383/campos_512_v4
+116/592410/campos_512_v4
+116/592474/campos_512_v4
+116/592520/campos_512_v4
+116/592525/campos_512_v4
+116/592631/campos_512_v4
+116/592729/campos_512_v4
+116/592762/campos_512_v4
+116/592769/campos_512_v4
+116/592873/campos_512_v4
+116/592885/campos_512_v4
+116/593053/campos_512_v4
+116/593061/campos_512_v4
+116/593271/campos_512_v4
+116/593304/campos_512_v4
+116/593344/campos_512_v4
+116/593385/campos_512_v4
+116/593395/campos_512_v4
+116/593567/campos_512_v4
+116/593573/campos_512_v4
+116/593583/campos_512_v4
+116/593652/campos_512_v4
+116/593660/campos_512_v4
+116/593720/campos_512_v4
+116/593772/campos_512_v4
+116/593778/campos_512_v4
+116/593805/campos_512_v4
+116/593807/campos_512_v4
+116/593830/campos_512_v4
+116/594088/campos_512_v4
+116/594129/campos_512_v4
+116/594213/campos_512_v4
+116/594230/campos_512_v4
+116/594378/campos_512_v4
+116/594399/campos_512_v4
+116/594501/campos_512_v4
+116/594516/campos_512_v4
+117/595151/campos_512_v4
+117/595393/campos_512_v4
+117/595539/campos_512_v4
+117/595870/campos_512_v4
+117/595948/campos_512_v4
+117/596012/campos_512_v4
+117/596073/campos_512_v4
+117/596209/campos_512_v4
+117/596214/campos_512_v4
+117/596343/campos_512_v4
+117/596351/campos_512_v4
+117/596372/campos_512_v4
+117/596593/campos_512_v4
+117/596615/campos_512_v4
+117/596627/campos_512_v4
+117/596740/campos_512_v4
+117/596776/campos_512_v4
+117/596857/campos_512_v4
+117/596864/campos_512_v4
+117/596874/campos_512_v4
+117/596876/campos_512_v4
+117/596895/campos_512_v4
+117/596905/campos_512_v4
+117/596919/campos_512_v4
+117/596922/campos_512_v4
+117/596926/campos_512_v4
+117/596963/campos_512_v4
+117/597029/campos_512_v4
+117/597037/campos_512_v4
+117/597142/campos_512_v4
+117/597192/campos_512_v4
+117/597194/campos_512_v4
+117/597226/campos_512_v4
+117/597484/campos_512_v4
+117/597551/campos_512_v4
+117/597699/campos_512_v4
+117/597792/campos_512_v4
+117/597819/campos_512_v4
+117/597883/campos_512_v4
+117/597905/campos_512_v4
+117/597940/campos_512_v4
+117/597952/campos_512_v4
+117/597960/campos_512_v4
+117/598008/campos_512_v4
+117/598132/campos_512_v4
+117/598174/campos_512_v4
+117/598246/campos_512_v4
+117/598276/campos_512_v4
+117/598325/campos_512_v4
+117/598337/campos_512_v4
+117/598346/campos_512_v4
+117/598397/campos_512_v4
+117/598416/campos_512_v4
+117/598451/campos_512_v4
+117/598529/campos_512_v4
+117/598607/campos_512_v4
+117/598632/campos_512_v4
+117/598662/campos_512_v4
+117/598770/campos_512_v4
+117/598974/campos_512_v4
+117/598976/campos_512_v4
+117/599076/campos_512_v4
+117/599277/campos_512_v4
+117/599434/campos_512_v4
+117/599477/campos_512_v4
+117/599546/campos_512_v4
+117/599666/campos_512_v4
+117/599685/campos_512_v4
+117/599764/campos_512_v4
+117/599767/campos_512_v4
+117/599908/campos_512_v4
+117/599942/campos_512_v4
+118/600034/campos_512_v4
+118/600051/campos_512_v4
+118/600079/campos_512_v4
+118/600172/campos_512_v4
+118/600368/campos_512_v4
+118/600436/campos_512_v4
+118/600498/campos_512_v4
+118/600547/campos_512_v4
+118/600549/campos_512_v4
+118/601050/campos_512_v4
+118/601060/campos_512_v4
+118/601130/campos_512_v4
+118/601153/campos_512_v4
+118/601267/campos_512_v4
+118/601293/campos_512_v4
+118/601333/campos_512_v4
+118/601431/campos_512_v4
+118/601473/campos_512_v4
+118/601575/campos_512_v4
+118/601627/campos_512_v4
+118/601824/campos_512_v4
+118/601999/campos_512_v4
+118/602033/campos_512_v4
+118/602041/campos_512_v4
+118/602093/campos_512_v4
+118/602378/campos_512_v4
+118/602387/campos_512_v4
+118/602480/campos_512_v4
+118/602492/campos_512_v4
+118/602588/campos_512_v4
+118/602590/campos_512_v4
+118/602628/campos_512_v4
+118/602705/campos_512_v4
+118/602708/campos_512_v4
+118/602870/campos_512_v4
+118/602878/campos_512_v4
+118/602900/campos_512_v4
+118/602933/campos_512_v4
+118/602949/campos_512_v4
+118/603089/campos_512_v4
+118/603391/campos_512_v4
+118/603399/campos_512_v4
+118/603476/campos_512_v4
+118/603479/campos_512_v4
+118/603558/campos_512_v4
+118/603622/campos_512_v4
+118/603642/campos_512_v4
+118/603850/campos_512_v4
+118/603910/campos_512_v4
+118/604032/campos_512_v4
+118/604041/campos_512_v4
+118/604054/campos_512_v4
+118/604356/campos_512_v4
+118/604374/campos_512_v4
+118/604406/campos_512_v4
+118/604433/campos_512_v4
+118/604481/campos_512_v4
+118/604578/campos_512_v4
+118/604598/campos_512_v4
+118/604662/campos_512_v4
+118/604697/campos_512_v4
+118/604700/campos_512_v4
+118/604708/campos_512_v4
+118/604735/campos_512_v4
+118/604827/campos_512_v4
+118/604898/campos_512_v4
+119/605334/campos_512_v4
+119/605369/campos_512_v4
+119/605557/campos_512_v4
+119/605574/campos_512_v4
+119/605676/campos_512_v4
+119/605808/campos_512_v4
+119/605860/campos_512_v4
+119/605998/campos_512_v4
+119/606003/campos_512_v4
+119/606046/campos_512_v4
+119/606106/campos_512_v4
+119/606127/campos_512_v4
+119/606150/campos_512_v4
+119/606279/campos_512_v4
+119/606394/campos_512_v4
+119/606414/campos_512_v4
+119/606419/campos_512_v4
+119/606798/campos_512_v4
+119/606980/campos_512_v4
+119/607080/campos_512_v4
+119/607167/campos_512_v4
+119/607519/campos_512_v4
+119/607559/campos_512_v4
+119/607613/campos_512_v4
+119/607650/campos_512_v4
+119/607655/campos_512_v4
+119/607878/campos_512_v4
+119/607923/campos_512_v4
+119/607930/campos_512_v4
+119/607969/campos_512_v4
+119/607996/campos_512_v4
+119/608081/campos_512_v4
+119/608106/campos_512_v4
+119/608139/campos_512_v4
+119/608172/campos_512_v4
+119/608578/campos_512_v4
+119/608676/campos_512_v4
+119/608688/campos_512_v4
+119/608689/campos_512_v4
+119/608942/campos_512_v4
+119/609053/campos_512_v4
+119/609054/campos_512_v4
+119/609114/campos_512_v4
+119/609230/campos_512_v4
+119/609233/campos_512_v4
+119/609275/campos_512_v4
+119/609502/campos_512_v4
+119/609721/campos_512_v4
+119/609742/campos_512_v4
+119/609770/campos_512_v4
+119/609824/campos_512_v4
+119/609912/campos_512_v4
+119/609948/campos_512_v4
+119/609977/campos_512_v4
+119/609978/campos_512_v4
+119/609996/campos_512_v4
+12/70220/campos_512_v4
+12/70314/campos_512_v4
+12/70322/campos_512_v4
+12/70352/campos_512_v4
+12/70375/campos_512_v4
+12/70389/campos_512_v4
+12/70514/campos_512_v4
+12/70536/campos_512_v4
+12/70566/campos_512_v4
+12/70636/campos_512_v4
+12/70654/campos_512_v4
+12/70687/campos_512_v4
+12/70730/campos_512_v4
+12/70942/campos_512_v4
+12/71014/campos_512_v4
+12/71326/campos_512_v4
+12/71406/campos_512_v4
+12/71436/campos_512_v4
+12/71441/campos_512_v4
+12/71746/campos_512_v4
+12/71842/campos_512_v4
+12/72149/campos_512_v4
+12/72378/campos_512_v4
+12/72589/campos_512_v4
+12/72638/campos_512_v4
+12/72704/campos_512_v4
+12/72719/campos_512_v4
+12/72899/campos_512_v4
+12/72919/campos_512_v4
+12/72921/campos_512_v4
+12/72970/campos_512_v4
+12/73078/campos_512_v4
+12/73142/campos_512_v4
+12/73196/campos_512_v4
+12/73198/campos_512_v4
+12/73505/campos_512_v4
+12/73565/campos_512_v4
+12/73589/campos_512_v4
+12/73662/campos_512_v4
+12/73768/campos_512_v4
+12/73846/campos_512_v4
+12/74220/campos_512_v4
+12/74325/campos_512_v4
+12/74461/campos_512_v4
+12/74522/campos_512_v4
+12/74838/campos_512_v4
+12/74855/campos_512_v4
+12/74861/campos_512_v4
+12/74927/campos_512_v4
+12/74933/campos_512_v4
+120/610037/campos_512_v4
+120/610194/campos_512_v4
+120/610263/campos_512_v4
+120/610301/campos_512_v4
+120/610428/campos_512_v4
+120/610431/campos_512_v4
+120/610517/campos_512_v4
+120/610746/campos_512_v4
+120/610771/campos_512_v4
+120/610974/campos_512_v4
+120/611231/campos_512_v4
+120/611298/campos_512_v4
+120/611402/campos_512_v4
+120/611504/campos_512_v4
+120/611608/campos_512_v4
+120/611623/campos_512_v4
+120/611756/campos_512_v4
+120/611778/campos_512_v4
+120/611908/campos_512_v4
+120/611911/campos_512_v4
+120/612054/campos_512_v4
+120/612241/campos_512_v4
+120/612369/campos_512_v4
+120/612422/campos_512_v4
+120/612435/campos_512_v4
+120/612461/campos_512_v4
+120/612501/campos_512_v4
+120/612583/campos_512_v4
+120/612644/campos_512_v4
+120/612647/campos_512_v4
+120/612707/campos_512_v4
+120/612720/campos_512_v4
+120/612825/campos_512_v4
+120/612944/campos_512_v4
+120/613022/campos_512_v4
+120/613048/campos_512_v4
+120/613070/campos_512_v4
+120/613082/campos_512_v4
+120/613179/campos_512_v4
+120/613226/campos_512_v4
+120/613342/campos_512_v4
+120/613503/campos_512_v4
+120/613539/campos_512_v4
+120/613714/campos_512_v4
+120/613743/campos_512_v4
+120/613865/campos_512_v4
+120/613868/campos_512_v4
+120/613998/campos_512_v4
+120/614224/campos_512_v4
+120/614234/campos_512_v4
+120/614238/campos_512_v4
+120/614283/campos_512_v4
+120/614352/campos_512_v4
+120/614547/campos_512_v4
+120/614628/campos_512_v4
+120/614632/campos_512_v4
+120/614677/campos_512_v4
+120/614704/campos_512_v4
+121/615064/campos_512_v4
+121/615229/campos_512_v4
+121/615273/campos_512_v4
+121/615531/campos_512_v4
+121/615571/campos_512_v4
+121/615653/campos_512_v4
+121/615663/campos_512_v4
+121/615993/campos_512_v4
+121/616010/campos_512_v4
+121/616035/campos_512_v4
+121/616120/campos_512_v4
+121/616219/campos_512_v4
+121/616229/campos_512_v4
+121/616286/campos_512_v4
+121/616294/campos_512_v4
+121/616328/campos_512_v4
+121/616406/campos_512_v4
+121/616553/campos_512_v4
+121/616557/campos_512_v4
+121/616651/campos_512_v4
+121/616661/campos_512_v4
+121/616674/campos_512_v4
+121/616675/campos_512_v4
+121/616692/campos_512_v4
+121/616744/campos_512_v4
+121/616783/campos_512_v4
+121/616805/campos_512_v4
+121/616905/campos_512_v4
+121/616969/campos_512_v4
+121/617014/campos_512_v4
+121/617070/campos_512_v4
+121/617086/campos_512_v4
+121/617108/campos_512_v4
+121/617271/campos_512_v4
+121/617273/campos_512_v4
+121/617288/campos_512_v4
+121/617314/campos_512_v4
+121/617447/campos_512_v4
+121/617499/campos_512_v4
+121/617514/campos_512_v4
+121/617600/campos_512_v4
+121/617641/campos_512_v4
+121/617646/campos_512_v4
+121/617801/campos_512_v4
+121/617815/campos_512_v4
+121/618009/campos_512_v4
+121/618307/campos_512_v4
+121/618476/campos_512_v4
+121/618484/campos_512_v4
+121/618658/campos_512_v4
+121/618666/campos_512_v4
+121/618688/campos_512_v4
+121/618814/campos_512_v4
+121/618932/campos_512_v4
+121/619057/campos_512_v4
+121/619199/campos_512_v4
+121/619388/campos_512_v4
+121/619400/campos_512_v4
+121/619655/campos_512_v4
+121/619674/campos_512_v4
+121/619677/campos_512_v4
+121/619746/campos_512_v4
+121/619747/campos_512_v4
+122/620020/campos_512_v4
+122/620054/campos_512_v4
+122/620139/campos_512_v4
+122/620186/campos_512_v4
+122/620334/campos_512_v4
+122/620385/campos_512_v4
+122/620470/campos_512_v4
+122/620487/campos_512_v4
+122/620520/campos_512_v4
+122/620553/campos_512_v4
+122/620590/campos_512_v4
+122/620636/campos_512_v4
+122/620736/campos_512_v4
+122/620740/campos_512_v4
+122/620880/campos_512_v4
+122/621145/campos_512_v4
+122/621164/campos_512_v4
+122/621446/campos_512_v4
+122/621574/campos_512_v4
+122/621583/campos_512_v4
+122/621639/campos_512_v4
+122/621646/campos_512_v4
+122/621716/campos_512_v4
+122/621763/campos_512_v4
+122/621792/campos_512_v4
+122/621796/campos_512_v4
+122/621818/campos_512_v4
+122/621894/campos_512_v4
+122/621963/campos_512_v4
+122/621977/campos_512_v4
+122/621985/campos_512_v4
+122/622157/campos_512_v4
+122/622273/campos_512_v4
+122/622414/campos_512_v4
+122/622437/campos_512_v4
+122/622499/campos_512_v4
+122/622542/campos_512_v4
+122/622585/campos_512_v4
+122/622631/campos_512_v4
+122/622727/campos_512_v4
+122/622731/campos_512_v4
+122/622829/campos_512_v4
+122/622846/campos_512_v4
+122/622994/campos_512_v4
+122/623224/campos_512_v4
+122/623641/campos_512_v4
+122/623751/campos_512_v4
+122/623771/campos_512_v4
+122/623854/campos_512_v4
+122/624079/campos_512_v4
+122/624112/campos_512_v4
+122/624485/campos_512_v4
+122/624486/campos_512_v4
+122/624499/campos_512_v4
+122/624652/campos_512_v4
+122/624668/campos_512_v4
+122/624720/campos_512_v4
+122/624801/campos_512_v4
+122/624932/campos_512_v4
+122/624954/campos_512_v4
+122/624984/campos_512_v4
+123/625220/campos_512_v4
+123/625230/campos_512_v4
+123/625312/campos_512_v4
+123/625717/campos_512_v4
+123/625839/campos_512_v4
+123/625904/campos_512_v4
+123/626010/campos_512_v4
+123/626104/campos_512_v4
+123/626125/campos_512_v4
+123/626188/campos_512_v4
+123/626195/campos_512_v4
+123/626411/campos_512_v4
+123/626440/campos_512_v4
+123/626545/campos_512_v4
+123/626565/campos_512_v4
+123/626598/campos_512_v4
+123/626683/campos_512_v4
+123/626697/campos_512_v4
+123/626759/campos_512_v4
+123/626895/campos_512_v4
+123/627016/campos_512_v4
+123/627036/campos_512_v4
+123/627179/campos_512_v4
+123/627183/campos_512_v4
+123/627264/campos_512_v4
+123/627302/campos_512_v4
+123/627443/campos_512_v4
+123/627450/campos_512_v4
+123/627610/campos_512_v4
+123/627695/campos_512_v4
+123/627785/campos_512_v4
+123/627830/campos_512_v4
+123/627835/campos_512_v4
+123/627867/campos_512_v4
+123/627962/campos_512_v4
+123/627984/campos_512_v4
+123/628077/campos_512_v4
+123/628126/campos_512_v4
+123/628245/campos_512_v4
+123/628263/campos_512_v4
+123/628281/campos_512_v4
+123/628286/campos_512_v4
+123/628336/campos_512_v4
+123/628421/campos_512_v4
+123/628512/campos_512_v4
+123/628562/campos_512_v4
+123/628564/campos_512_v4
+123/628649/campos_512_v4
+123/628765/campos_512_v4
+123/628778/campos_512_v4
+123/628782/campos_512_v4
+123/628808/campos_512_v4
+123/629042/campos_512_v4
+123/629100/campos_512_v4
+123/629177/campos_512_v4
+123/629355/campos_512_v4
+123/629476/campos_512_v4
+123/629507/campos_512_v4
+123/629644/campos_512_v4
+123/629680/campos_512_v4
+123/629691/campos_512_v4
+123/629717/campos_512_v4
+123/629724/campos_512_v4
+123/629753/campos_512_v4
+123/629767/campos_512_v4
+123/629860/campos_512_v4
+123/629947/campos_512_v4
+123/629953/campos_512_v4
+123/629986/campos_512_v4
+124/630018/campos_512_v4
+124/630061/campos_512_v4
+124/630143/campos_512_v4
+124/630154/campos_512_v4
+124/630164/campos_512_v4
+124/630344/campos_512_v4
+124/630395/campos_512_v4
+124/630404/campos_512_v4
+124/630542/campos_512_v4
+124/630563/campos_512_v4
+124/630751/campos_512_v4
+124/630798/campos_512_v4
+124/630820/campos_512_v4
+124/630841/campos_512_v4
+124/630869/campos_512_v4
+124/630898/campos_512_v4
+124/630947/campos_512_v4
+124/631014/campos_512_v4
+124/631055/campos_512_v4
+124/631065/campos_512_v4
+124/631128/campos_512_v4
+124/631139/campos_512_v4
+124/631170/campos_512_v4
+124/631287/campos_512_v4
+124/631486/campos_512_v4
+124/631534/campos_512_v4
+124/631577/campos_512_v4
+124/631616/campos_512_v4
+124/631662/campos_512_v4
+124/631766/campos_512_v4
+124/631966/campos_512_v4
+124/631971/campos_512_v4
+124/632040/campos_512_v4
+124/632085/campos_512_v4
+124/632119/campos_512_v4
+124/632235/campos_512_v4
+124/632392/campos_512_v4
+124/632462/campos_512_v4
+124/632464/campos_512_v4
+124/632534/campos_512_v4
+124/632723/campos_512_v4
+124/632741/campos_512_v4
+124/632744/campos_512_v4
+124/632746/campos_512_v4
+124/632822/campos_512_v4
+124/632947/campos_512_v4
+124/633000/campos_512_v4
+124/633149/campos_512_v4
+124/633166/campos_512_v4
+124/633205/campos_512_v4
+124/633259/campos_512_v4
+124/633399/campos_512_v4
+124/633400/campos_512_v4
+124/633413/campos_512_v4
+124/633438/campos_512_v4
+124/633480/campos_512_v4
+124/633535/campos_512_v4
+124/633863/campos_512_v4
+124/634120/campos_512_v4
+124/634130/campos_512_v4
+124/634301/campos_512_v4
+124/634318/campos_512_v4
+124/634464/campos_512_v4
+124/634479/campos_512_v4
+124/634592/campos_512_v4
+124/634712/campos_512_v4
+124/634733/campos_512_v4
+124/634778/campos_512_v4
+124/634948/campos_512_v4
+124/634953/campos_512_v4
+125/635219/campos_512_v4
+125/635351/campos_512_v4
+125/635535/campos_512_v4
+125/635569/campos_512_v4
+125/635578/campos_512_v4
+125/635672/campos_512_v4
+125/635705/campos_512_v4
+125/635737/campos_512_v4
+125/635740/campos_512_v4
+125/635773/campos_512_v4
+125/635911/campos_512_v4
+125/636113/campos_512_v4
+125/636163/campos_512_v4
+125/636184/campos_512_v4
+125/636230/campos_512_v4
+125/636342/campos_512_v4
+125/636393/campos_512_v4
+125/636711/campos_512_v4
+125/636739/campos_512_v4
+125/636783/campos_512_v4
+125/637022/campos_512_v4
+125/637131/campos_512_v4
+125/637197/campos_512_v4
+125/637212/campos_512_v4
+125/637280/campos_512_v4
+125/637296/campos_512_v4
+125/637340/campos_512_v4
+125/637487/campos_512_v4
+125/637510/campos_512_v4
+125/637538/campos_512_v4
+125/637851/campos_512_v4
+125/637959/campos_512_v4
+125/637990/campos_512_v4
+125/638203/campos_512_v4
+125/638238/campos_512_v4
+125/638298/campos_512_v4
+125/638334/campos_512_v4
+125/638404/campos_512_v4
+125/638534/campos_512_v4
+125/638566/campos_512_v4
+125/638586/campos_512_v4
+125/638595/campos_512_v4
+125/638683/campos_512_v4
+125/638797/campos_512_v4
+125/638808/campos_512_v4
+125/638844/campos_512_v4
+125/638846/campos_512_v4
+125/638861/campos_512_v4
+125/638919/campos_512_v4
+125/639029/campos_512_v4
+125/639311/campos_512_v4
+125/639315/campos_512_v4
+125/639333/campos_512_v4
+125/639365/campos_512_v4
+125/639489/campos_512_v4
+125/639492/campos_512_v4
+125/639502/campos_512_v4
+125/639517/campos_512_v4
+125/639594/campos_512_v4
+125/639634/campos_512_v4
+125/639723/campos_512_v4
+125/639789/campos_512_v4
+125/639970/campos_512_v4
+127/645015/campos_512_v4
+127/645042/campos_512_v4
+127/645052/campos_512_v4
+127/645124/campos_512_v4
+127/645308/campos_512_v4
+127/645494/campos_512_v4
+127/645639/campos_512_v4
+127/645643/campos_512_v4
+127/645897/campos_512_v4
+127/645959/campos_512_v4
+127/645983/campos_512_v4
+127/645985/campos_512_v4
+127/646082/campos_512_v4
+127/646203/campos_512_v4
+127/646267/campos_512_v4
+127/646421/campos_512_v4
+127/646679/campos_512_v4
+127/646845/campos_512_v4
+127/646865/campos_512_v4
+127/646909/campos_512_v4
+127/646921/campos_512_v4
+127/646947/campos_512_v4
+127/646967/campos_512_v4
+127/647101/campos_512_v4
+127/647393/campos_512_v4
+127/647435/campos_512_v4
+127/647456/campos_512_v4
+127/647527/campos_512_v4
+127/647534/campos_512_v4
+127/647802/campos_512_v4
+127/647967/campos_512_v4
+127/647978/campos_512_v4
+127/648043/campos_512_v4
+127/648070/campos_512_v4
+127/648185/campos_512_v4
+127/648403/campos_512_v4
+127/648432/campos_512_v4
+127/648462/campos_512_v4
+127/648512/campos_512_v4
+127/648593/campos_512_v4
+127/648612/campos_512_v4
+127/648625/campos_512_v4
+127/648746/campos_512_v4
+127/648848/campos_512_v4
+127/648926/campos_512_v4
+127/648972/campos_512_v4
+127/649031/campos_512_v4
+127/649073/campos_512_v4
+127/649085/campos_512_v4
+127/649150/campos_512_v4
+127/649279/campos_512_v4
+127/649358/campos_512_v4
+127/649540/campos_512_v4
+127/649551/campos_512_v4
+127/649883/campos_512_v4
+127/649916/campos_512_v4
+127/649918/campos_512_v4
+127/649998/campos_512_v4
+128/650025/campos_512_v4
+128/650085/campos_512_v4
+128/650136/campos_512_v4
+128/650196/campos_512_v4
+128/650334/campos_512_v4
+128/650515/campos_512_v4
+128/650705/campos_512_v4
+128/650758/campos_512_v4
+128/650760/campos_512_v4
+128/650803/campos_512_v4
+128/650811/campos_512_v4
+128/650860/campos_512_v4
+128/650878/campos_512_v4
+128/650985/campos_512_v4
+128/650987/campos_512_v4
+128/651045/campos_512_v4
+128/651115/campos_512_v4
+128/651131/campos_512_v4
+128/651155/campos_512_v4
+128/651266/campos_512_v4
+128/651291/campos_512_v4
+128/651316/campos_512_v4
+128/651360/campos_512_v4
+128/651379/campos_512_v4
+128/651504/campos_512_v4
+128/651596/campos_512_v4
+128/651648/campos_512_v4
+128/651821/campos_512_v4
+128/651846/campos_512_v4
+128/651978/campos_512_v4
+128/652327/campos_512_v4
+128/652403/campos_512_v4
+128/652412/campos_512_v4
+128/652413/campos_512_v4
+128/652437/campos_512_v4
+128/652473/campos_512_v4
+128/652501/campos_512_v4
+128/652693/campos_512_v4
+128/652740/campos_512_v4
+128/652760/campos_512_v4
+128/652826/campos_512_v4
+128/652894/campos_512_v4
+128/652954/campos_512_v4
+128/653073/campos_512_v4
+128/653099/campos_512_v4
+128/653142/campos_512_v4
+128/653143/campos_512_v4
+128/653182/campos_512_v4
+128/653238/campos_512_v4
+128/653288/campos_512_v4
+128/653388/campos_512_v4
+128/653449/campos_512_v4
+128/653570/campos_512_v4
+128/653631/campos_512_v4
+128/653729/campos_512_v4
+128/653793/campos_512_v4
+128/653833/campos_512_v4
+128/653944/campos_512_v4
+128/654025/campos_512_v4
+128/654057/campos_512_v4
+128/654072/campos_512_v4
+128/654174/campos_512_v4
+128/654187/campos_512_v4
+128/654190/campos_512_v4
+128/654232/campos_512_v4
+128/654267/campos_512_v4
+128/654376/campos_512_v4
+128/654491/campos_512_v4
+128/654495/campos_512_v4
+128/654539/campos_512_v4
+128/654591/campos_512_v4
+128/654777/campos_512_v4
+128/654865/campos_512_v4
+128/654883/campos_512_v4
+128/654885/campos_512_v4
+128/654948/campos_512_v4
+128/654973/campos_512_v4
+128/654988/campos_512_v4
+129/655123/campos_512_v4
+129/655147/campos_512_v4
+129/655148/campos_512_v4
+129/655364/campos_512_v4
+129/655554/campos_512_v4
+129/655578/campos_512_v4
+129/655765/campos_512_v4
+129/655780/campos_512_v4
+129/656075/campos_512_v4
+129/656155/campos_512_v4
+129/656272/campos_512_v4
+129/656336/campos_512_v4
+129/656346/campos_512_v4
+129/656487/campos_512_v4
+129/656536/campos_512_v4
+129/656620/campos_512_v4
+129/656696/campos_512_v4
+129/656702/campos_512_v4
+129/656738/campos_512_v4
+129/656762/campos_512_v4
+129/656775/campos_512_v4
+129/656944/campos_512_v4
+129/656978/campos_512_v4
+129/656982/campos_512_v4
+129/657067/campos_512_v4
+129/657385/campos_512_v4
+129/657390/campos_512_v4
+129/657455/campos_512_v4
+129/657543/campos_512_v4
+129/657648/campos_512_v4
+129/657649/campos_512_v4
+129/657724/campos_512_v4
+129/657807/campos_512_v4
+129/657817/campos_512_v4
+129/657940/campos_512_v4
+129/657946/campos_512_v4
+129/658046/campos_512_v4
+129/658064/campos_512_v4
+129/658072/campos_512_v4
+129/658205/campos_512_v4
+129/658304/campos_512_v4
+129/658315/campos_512_v4
+129/658414/campos_512_v4
+129/658439/campos_512_v4
+129/658484/campos_512_v4
+129/658675/campos_512_v4
+129/658693/campos_512_v4
+129/658734/campos_512_v4
+129/658811/campos_512_v4
+129/658989/campos_512_v4
+129/658999/campos_512_v4
+129/659033/campos_512_v4
+129/659069/campos_512_v4
+129/659089/campos_512_v4
+129/659182/campos_512_v4
+129/659422/campos_512_v4
+129/659543/campos_512_v4
+129/659567/campos_512_v4
+129/659734/campos_512_v4
+129/659785/campos_512_v4
+129/659794/campos_512_v4
+129/659838/campos_512_v4
+129/659905/campos_512_v4
+129/659953/campos_512_v4
+13/75022/campos_512_v4
+13/75201/campos_512_v4
+13/75215/campos_512_v4
+13/75217/campos_512_v4
+13/75309/campos_512_v4
+13/75329/campos_512_v4
+13/75405/campos_512_v4
+13/75461/campos_512_v4
+13/75624/campos_512_v4
+13/75971/campos_512_v4
+13/76045/campos_512_v4
+13/76178/campos_512_v4
+13/76225/campos_512_v4
+13/76278/campos_512_v4
+13/76428/campos_512_v4
+13/76477/campos_512_v4
+13/76766/campos_512_v4
+13/77065/campos_512_v4
+13/77205/campos_512_v4
+13/77367/campos_512_v4
+13/77368/campos_512_v4
+13/77595/campos_512_v4
+13/77741/campos_512_v4
+13/77796/campos_512_v4
+13/78037/campos_512_v4
+13/78120/campos_512_v4
+13/78163/campos_512_v4
+13/78206/campos_512_v4
+13/78271/campos_512_v4
+13/78554/campos_512_v4
+13/78695/campos_512_v4
+13/78830/campos_512_v4
+13/78882/campos_512_v4
+13/78947/campos_512_v4
+13/78951/campos_512_v4
+13/78977/campos_512_v4
+13/79020/campos_512_v4
+13/79069/campos_512_v4
+13/79081/campos_512_v4
+13/79240/campos_512_v4
+13/79436/campos_512_v4
+13/79585/campos_512_v4
+13/79638/campos_512_v4
+13/79740/campos_512_v4
+13/79829/campos_512_v4
+13/79857/campos_512_v4
+130/660067/campos_512_v4
+130/660207/campos_512_v4
+130/660350/campos_512_v4
+130/660392/campos_512_v4
+130/660439/campos_512_v4
+130/660554/campos_512_v4
+130/660742/campos_512_v4
+130/660764/campos_512_v4
+130/660833/campos_512_v4
+130/660948/campos_512_v4
+130/660950/campos_512_v4
+130/661121/campos_512_v4
+130/661138/campos_512_v4
+130/661215/campos_512_v4
+130/661291/campos_512_v4
+130/661385/campos_512_v4
+130/661387/campos_512_v4
+130/661489/campos_512_v4
+130/661526/campos_512_v4
+130/661539/campos_512_v4
+130/661564/campos_512_v4
+130/661776/campos_512_v4
+130/661779/campos_512_v4
+130/661869/campos_512_v4
+130/661891/campos_512_v4
+130/662236/campos_512_v4
+130/662422/campos_512_v4
+130/662678/campos_512_v4
+130/662694/campos_512_v4
+130/662702/campos_512_v4
+130/662729/campos_512_v4
+130/662759/campos_512_v4
+130/662830/campos_512_v4
+130/662861/campos_512_v4
+130/662872/campos_512_v4
+130/662946/campos_512_v4
+130/662970/campos_512_v4
+130/663120/campos_512_v4
+130/663128/campos_512_v4
+130/663135/campos_512_v4
+130/663246/campos_512_v4
+130/663260/campos_512_v4
+130/663279/campos_512_v4
+130/663358/campos_512_v4
+130/663364/campos_512_v4
+130/663493/campos_512_v4
+130/663559/campos_512_v4
+130/663570/campos_512_v4
+130/663664/campos_512_v4
+130/663666/campos_512_v4
+130/663710/campos_512_v4
+130/663751/campos_512_v4
+130/663781/campos_512_v4
+130/663790/campos_512_v4
+130/663809/campos_512_v4
+130/663863/campos_512_v4
+130/663958/campos_512_v4
+130/663991/campos_512_v4
+130/664095/campos_512_v4
+130/664327/campos_512_v4
+130/664337/campos_512_v4
+130/664405/campos_512_v4
+130/664467/campos_512_v4
+130/664634/campos_512_v4
+130/664858/campos_512_v4
+130/664903/campos_512_v4
+130/664929/campos_512_v4
+130/664983/campos_512_v4
+130/664993/campos_512_v4
+130/665000/campos_512_v4
+131/665085/campos_512_v4
+131/665547/campos_512_v4
+131/665749/campos_512_v4
+131/665751/campos_512_v4
+131/665835/campos_512_v4
+131/665837/campos_512_v4
+131/665847/campos_512_v4
+131/665866/campos_512_v4
+131/665919/campos_512_v4
+131/665932/campos_512_v4
+131/666260/campos_512_v4
+131/666714/campos_512_v4
+131/666758/campos_512_v4
+131/666778/campos_512_v4
+131/666844/campos_512_v4
+131/666857/campos_512_v4
+131/666883/campos_512_v4
+131/667073/campos_512_v4
+131/667131/campos_512_v4
+131/667167/campos_512_v4
+131/667168/campos_512_v4
+131/667181/campos_512_v4
+131/667207/campos_512_v4
+131/667256/campos_512_v4
+131/667530/campos_512_v4
+131/667552/campos_512_v4
+131/667594/campos_512_v4
+131/667802/campos_512_v4
+131/667841/campos_512_v4
+131/667853/campos_512_v4
+131/667925/campos_512_v4
+131/668011/campos_512_v4
+131/668309/campos_512_v4
+131/668313/campos_512_v4
+131/668415/campos_512_v4
+131/668468/campos_512_v4
+131/668552/campos_512_v4
+131/668565/campos_512_v4
+131/668587/campos_512_v4
+131/668622/campos_512_v4
+131/668626/campos_512_v4
+131/668659/campos_512_v4
+131/668793/campos_512_v4
+131/668800/campos_512_v4
+131/668884/campos_512_v4
+131/669119/campos_512_v4
+131/669194/campos_512_v4
+131/669457/campos_512_v4
+131/669557/campos_512_v4
+131/669613/campos_512_v4
+131/669625/campos_512_v4
+131/669668/campos_512_v4
+131/669777/campos_512_v4
+131/669837/campos_512_v4
+131/669845/campos_512_v4
+131/669882/campos_512_v4
+131/669911/campos_512_v4
+132/670006/campos_512_v4
+132/670082/campos_512_v4
+132/670084/campos_512_v4
+132/670090/campos_512_v4
+132/670095/campos_512_v4
+132/670123/campos_512_v4
+132/670241/campos_512_v4
+132/670469/campos_512_v4
+132/670480/campos_512_v4
+132/670498/campos_512_v4
+132/670731/campos_512_v4
+132/670847/campos_512_v4
+132/670950/campos_512_v4
+132/670953/campos_512_v4
+132/671017/campos_512_v4
+132/671150/campos_512_v4
+132/671195/campos_512_v4
+132/671222/campos_512_v4
+132/671243/campos_512_v4
+132/671311/campos_512_v4
+132/671474/campos_512_v4
+132/671572/campos_512_v4
+132/671700/campos_512_v4
+132/671764/campos_512_v4
+132/671857/campos_512_v4
+132/672059/campos_512_v4
+132/672060/campos_512_v4
+132/672082/campos_512_v4
+132/672109/campos_512_v4
+132/672254/campos_512_v4
+132/672501/campos_512_v4
+132/672532/campos_512_v4
+132/672595/campos_512_v4
+132/672597/campos_512_v4
+132/672624/campos_512_v4
+132/672639/campos_512_v4
+132/672719/campos_512_v4
+132/673103/campos_512_v4
+132/673173/campos_512_v4
+132/673309/campos_512_v4
+132/673437/campos_512_v4
+132/673452/campos_512_v4
+132/673592/campos_512_v4
+132/673616/campos_512_v4
+132/673624/campos_512_v4
+132/673648/campos_512_v4
+132/673666/campos_512_v4
+132/673703/campos_512_v4
+132/673712/campos_512_v4
+132/673713/campos_512_v4
+132/673863/campos_512_v4
+132/673871/campos_512_v4
+132/674076/campos_512_v4
+132/674108/campos_512_v4
+132/674167/campos_512_v4
+132/674503/campos_512_v4
+132/674569/campos_512_v4
+132/674617/campos_512_v4
+132/674643/campos_512_v4
+132/674693/campos_512_v4
+132/674808/campos_512_v4
+132/674818/campos_512_v4
+132/674845/campos_512_v4
+132/674921/campos_512_v4
+133/675038/campos_512_v4
+133/675047/campos_512_v4
+133/675196/campos_512_v4
+133/675322/campos_512_v4
+133/675349/campos_512_v4
+133/675379/campos_512_v4
+133/675617/campos_512_v4
+133/675621/campos_512_v4
+133/675672/campos_512_v4
+133/675771/campos_512_v4
+133/675873/campos_512_v4
+133/675921/campos_512_v4
+133/675983/campos_512_v4
+133/676026/campos_512_v4
+133/676033/campos_512_v4
+133/676183/campos_512_v4
+133/676439/campos_512_v4
+133/676622/campos_512_v4
+133/676623/campos_512_v4
+133/676656/campos_512_v4
+133/676700/campos_512_v4
+133/676830/campos_512_v4
+133/676919/campos_512_v4
+133/677117/campos_512_v4
+133/677124/campos_512_v4
+133/677212/campos_512_v4
+133/677233/campos_512_v4
+133/677234/campos_512_v4
+133/677278/campos_512_v4
+133/677425/campos_512_v4
+133/677668/campos_512_v4
+133/677677/campos_512_v4
+133/677701/campos_512_v4
+133/677774/campos_512_v4
+133/677800/campos_512_v4
+133/677818/campos_512_v4
+133/677863/campos_512_v4
+133/677947/campos_512_v4
+133/678034/campos_512_v4
+133/678165/campos_512_v4
+133/678177/campos_512_v4
+133/678184/campos_512_v4
+133/678305/campos_512_v4
+133/678312/campos_512_v4
+133/678330/campos_512_v4
+133/678361/campos_512_v4
+133/678443/campos_512_v4
+133/678478/campos_512_v4
+133/678612/campos_512_v4
+133/678793/campos_512_v4
+133/678794/campos_512_v4
+133/678855/campos_512_v4
+133/678870/campos_512_v4
+133/678896/campos_512_v4
+133/678938/campos_512_v4
+133/678959/campos_512_v4
+133/679057/campos_512_v4
+133/679094/campos_512_v4
+133/679106/campos_512_v4
+133/679147/campos_512_v4
+133/679160/campos_512_v4
+133/679195/campos_512_v4
+133/679586/campos_512_v4
+133/679667/campos_512_v4
+133/679867/campos_512_v4
+133/679880/campos_512_v4
+133/680001/campos_512_v4
+134/680062/campos_512_v4
+134/680127/campos_512_v4
+134/680206/campos_512_v4
+134/680399/campos_512_v4
+134/680432/campos_512_v4
+134/680522/campos_512_v4
+134/680658/campos_512_v4
+134/680682/campos_512_v4
+134/680742/campos_512_v4
+134/680778/campos_512_v4
+134/680855/campos_512_v4
+134/680895/campos_512_v4
+134/681021/campos_512_v4
+134/681050/campos_512_v4
+134/681055/campos_512_v4
+134/681328/campos_512_v4
+134/681400/campos_512_v4
+134/681417/campos_512_v4
+134/681446/campos_512_v4
+134/681458/campos_512_v4
+134/681486/campos_512_v4
+134/681637/campos_512_v4
+134/681750/campos_512_v4
+134/681763/campos_512_v4
+134/681835/campos_512_v4
+134/682054/campos_512_v4
+134/682055/campos_512_v4
+134/682058/campos_512_v4
+134/682101/campos_512_v4
+134/682153/campos_512_v4
+134/682181/campos_512_v4
+134/682252/campos_512_v4
+134/682268/campos_512_v4
+134/682343/campos_512_v4
+134/682362/campos_512_v4
+134/682704/campos_512_v4
+134/682712/campos_512_v4
+134/682782/campos_512_v4
+134/682813/campos_512_v4
+134/682828/campos_512_v4
+134/682965/campos_512_v4
+134/683034/campos_512_v4
+134/683060/campos_512_v4
+134/683105/campos_512_v4
+134/683126/campos_512_v4
+134/683262/campos_512_v4
+134/683362/campos_512_v4
+134/683437/campos_512_v4
+134/683572/campos_512_v4
+134/683647/campos_512_v4
+134/683738/campos_512_v4
+134/683817/campos_512_v4
+134/683893/campos_512_v4
+134/684016/campos_512_v4
+134/684097/campos_512_v4
+134/684105/campos_512_v4
+134/684113/campos_512_v4
+134/684193/campos_512_v4
+134/684360/campos_512_v4
+134/684497/campos_512_v4
+134/684562/campos_512_v4
+134/684698/campos_512_v4
+134/684740/campos_512_v4
+134/684864/campos_512_v4
+134/684882/campos_512_v4
+134/684885/campos_512_v4
+134/684934/campos_512_v4
+135/685009/campos_512_v4
+135/685039/campos_512_v4
+135/685168/campos_512_v4
+135/685269/campos_512_v4
+135/685376/campos_512_v4
+135/685440/campos_512_v4
+135/685472/campos_512_v4
+135/685481/campos_512_v4
+135/685482/campos_512_v4
+135/685631/campos_512_v4
+135/685652/campos_512_v4
+135/685733/campos_512_v4
+135/685832/campos_512_v4
+135/685986/campos_512_v4
+135/685992/campos_512_v4
+135/686004/campos_512_v4
+135/686061/campos_512_v4
+135/686077/campos_512_v4
+135/686085/campos_512_v4
+135/686244/campos_512_v4
+135/686258/campos_512_v4
+135/686294/campos_512_v4
+135/686359/campos_512_v4
+135/686452/campos_512_v4
+135/686491/campos_512_v4
+135/686559/campos_512_v4
+135/686579/campos_512_v4
+135/686713/campos_512_v4
+135/686758/campos_512_v4
+135/686817/campos_512_v4
+135/686891/campos_512_v4
+135/686978/campos_512_v4
+135/687074/campos_512_v4
+135/687140/campos_512_v4
+135/687298/campos_512_v4
+135/687313/campos_512_v4
+135/687376/campos_512_v4
+135/687517/campos_512_v4
+135/687701/campos_512_v4
+135/687813/campos_512_v4
+135/687999/campos_512_v4
+135/688073/campos_512_v4
+135/688175/campos_512_v4
+135/688209/campos_512_v4
+135/688382/campos_512_v4
+135/688391/campos_512_v4
+135/688411/campos_512_v4
+135/688478/campos_512_v4
+135/688501/campos_512_v4
+135/688707/campos_512_v4
+135/688745/campos_512_v4
+135/688770/campos_512_v4
+135/688849/campos_512_v4
+135/688861/campos_512_v4
+135/688874/campos_512_v4
+135/688979/campos_512_v4
+135/689195/campos_512_v4
+135/689235/campos_512_v4
+135/689239/campos_512_v4
+135/689262/campos_512_v4
+135/689458/campos_512_v4
+135/689519/campos_512_v4
+135/689621/campos_512_v4
+135/689748/campos_512_v4
+135/689810/campos_512_v4
+135/689828/campos_512_v4
+135/689848/campos_512_v4
+135/689902/campos_512_v4
+135/689934/campos_512_v4
+136/690113/campos_512_v4
+136/690182/campos_512_v4
+136/690296/campos_512_v4
+136/690407/campos_512_v4
+136/690511/campos_512_v4
+136/690633/campos_512_v4
+136/690636/campos_512_v4
+136/690644/campos_512_v4
+136/690781/campos_512_v4
+136/690901/campos_512_v4
+136/690933/campos_512_v4
+136/690965/campos_512_v4
+136/691021/campos_512_v4
+136/691121/campos_512_v4
+136/691252/campos_512_v4
+136/691286/campos_512_v4
+136/691331/campos_512_v4
+136/691379/campos_512_v4
+136/691446/campos_512_v4
+136/691453/campos_512_v4
+136/691470/campos_512_v4
+136/691482/campos_512_v4
+136/691529/campos_512_v4
+136/691609/campos_512_v4
+136/691630/campos_512_v4
+136/691634/campos_512_v4
+136/691635/campos_512_v4
+136/691654/campos_512_v4
+136/691667/campos_512_v4
+136/691718/campos_512_v4
+136/691923/campos_512_v4
+136/691931/campos_512_v4
+136/691940/campos_512_v4
+136/691998/campos_512_v4
+136/692162/campos_512_v4
+136/692194/campos_512_v4
+136/692237/campos_512_v4
+136/692238/campos_512_v4
+136/692272/campos_512_v4
+136/692307/campos_512_v4
+136/692394/campos_512_v4
+136/692433/campos_512_v4
+136/692523/campos_512_v4
+136/692629/campos_512_v4
+136/692645/campos_512_v4
+136/692647/campos_512_v4
+136/692651/campos_512_v4
+136/692671/campos_512_v4
+136/692678/campos_512_v4
+136/692688/campos_512_v4
+136/692805/campos_512_v4
+136/692850/campos_512_v4
+136/692867/campos_512_v4
+136/692961/campos_512_v4
+136/692989/campos_512_v4
+136/693049/campos_512_v4
+136/693143/campos_512_v4
+136/693144/campos_512_v4
+136/693241/campos_512_v4
+136/693260/campos_512_v4
+136/693266/campos_512_v4
+136/693312/campos_512_v4
+136/693458/campos_512_v4
+136/693621/campos_512_v4
+136/693647/campos_512_v4
+136/693799/campos_512_v4
+136/693827/campos_512_v4
+136/693902/campos_512_v4
+136/693928/campos_512_v4
+136/693997/campos_512_v4
+136/694048/campos_512_v4
+136/694076/campos_512_v4
+136/694166/campos_512_v4
+136/694234/campos_512_v4
+136/694239/campos_512_v4
+136/694272/campos_512_v4
+136/694319/campos_512_v4
+136/694329/campos_512_v4
+136/694351/campos_512_v4
+136/694483/campos_512_v4
+136/694661/campos_512_v4
+136/694701/campos_512_v4
+136/694740/campos_512_v4
+136/694753/campos_512_v4
+136/694875/campos_512_v4
+137/695161/campos_512_v4
+137/695225/campos_512_v4
+137/695278/campos_512_v4
+137/695371/campos_512_v4
+137/695424/campos_512_v4
+137/695593/campos_512_v4
+137/695640/campos_512_v4
+137/695765/campos_512_v4
+137/695820/campos_512_v4
+137/695836/campos_512_v4
+137/695906/campos_512_v4
+137/695916/campos_512_v4
+137/696020/campos_512_v4
+137/696026/campos_512_v4
+137/696132/campos_512_v4
+137/696159/campos_512_v4
+137/696202/campos_512_v4
+137/696296/campos_512_v4
+137/696310/campos_512_v4
+137/696343/campos_512_v4
+137/696375/campos_512_v4
+137/696376/campos_512_v4
+137/696445/campos_512_v4
+137/696527/campos_512_v4
+137/696610/campos_512_v4
+137/696616/campos_512_v4
+137/696617/campos_512_v4
+137/696754/campos_512_v4
+137/696840/campos_512_v4
+137/696863/campos_512_v4
+137/696885/campos_512_v4
+137/697075/campos_512_v4
+137/697082/campos_512_v4
+137/697137/campos_512_v4
+137/697139/campos_512_v4
+137/697144/campos_512_v4
+137/697170/campos_512_v4
+137/697175/campos_512_v4
+137/697271/campos_512_v4
+137/697324/campos_512_v4
+137/697363/campos_512_v4
+137/697431/campos_512_v4
+137/697482/campos_512_v4
+137/697540/campos_512_v4
+137/697794/campos_512_v4
+137/697901/campos_512_v4
+137/697994/campos_512_v4
+137/698006/campos_512_v4
+137/698026/campos_512_v4
+137/698028/campos_512_v4
+137/698058/campos_512_v4
+137/698157/campos_512_v4
+137/698278/campos_512_v4
+137/698357/campos_512_v4
+137/698432/campos_512_v4
+137/698502/campos_512_v4
+137/698552/campos_512_v4
+137/698593/campos_512_v4
+137/698649/campos_512_v4
+137/698880/campos_512_v4
+137/698887/campos_512_v4
+137/698923/campos_512_v4
+137/699144/campos_512_v4
+137/699192/campos_512_v4
+137/699240/campos_512_v4
+137/699329/campos_512_v4
+137/699494/campos_512_v4
+137/699602/campos_512_v4
+137/699653/campos_512_v4
+137/699677/campos_512_v4
+137/699807/campos_512_v4
+137/699833/campos_512_v4
+137/699853/campos_512_v4
+138/700063/campos_512_v4
+138/700312/campos_512_v4
+138/700382/campos_512_v4
+138/700479/campos_512_v4
+138/700493/campos_512_v4
+138/700647/campos_512_v4
+138/700650/campos_512_v4
+138/700694/campos_512_v4
+138/700730/campos_512_v4
+138/700756/campos_512_v4
+138/700809/campos_512_v4
+138/700994/campos_512_v4
+138/701005/campos_512_v4
+138/701135/campos_512_v4
+138/701139/campos_512_v4
+138/701164/campos_512_v4
+138/701165/campos_512_v4
+138/701255/campos_512_v4
+138/701269/campos_512_v4
+138/701406/campos_512_v4
+138/701428/campos_512_v4
+138/701572/campos_512_v4
+138/701590/campos_512_v4
+138/701614/campos_512_v4
+138/701882/campos_512_v4
+138/701992/campos_512_v4
+138/702225/campos_512_v4
+138/702273/campos_512_v4
+138/702382/campos_512_v4
+138/702614/campos_512_v4
+138/702625/campos_512_v4
+138/702633/campos_512_v4
+138/702681/campos_512_v4
+138/702695/campos_512_v4
+138/702726/campos_512_v4
+138/702947/campos_512_v4
+138/703060/campos_512_v4
+138/703229/campos_512_v4
+138/703262/campos_512_v4
+138/703293/campos_512_v4
+138/703486/campos_512_v4
+138/703571/campos_512_v4
+138/703577/campos_512_v4
+138/703604/campos_512_v4
+138/703702/campos_512_v4
+138/703721/campos_512_v4
+138/703772/campos_512_v4
+138/704061/campos_512_v4
+138/704105/campos_512_v4
+138/704180/campos_512_v4
+138/704190/campos_512_v4
+138/704389/campos_512_v4
+138/704402/campos_512_v4
+138/704442/campos_512_v4
+138/704547/campos_512_v4
+138/704672/campos_512_v4
+138/704690/campos_512_v4
+138/704698/campos_512_v4
+138/704716/campos_512_v4
+138/704746/campos_512_v4
+138/704793/campos_512_v4
+138/704863/campos_512_v4
+138/704887/campos_512_v4
+138/704937/campos_512_v4
+139/705062/campos_512_v4
+139/705091/campos_512_v4
+139/705093/campos_512_v4
+139/705373/campos_512_v4
+139/705574/campos_512_v4
+139/705715/campos_512_v4
+139/705743/campos_512_v4
+139/705821/campos_512_v4
+139/705826/campos_512_v4
+139/705885/campos_512_v4
+139/706020/campos_512_v4
+139/706079/campos_512_v4
+139/706492/campos_512_v4
+139/706515/campos_512_v4
+139/706808/campos_512_v4
+139/706821/campos_512_v4
+139/706853/campos_512_v4
+139/706860/campos_512_v4
+139/707130/campos_512_v4
+139/707211/campos_512_v4
+139/707240/campos_512_v4
+139/707354/campos_512_v4
+139/707373/campos_512_v4
+139/707382/campos_512_v4
+139/707434/campos_512_v4
+139/707561/campos_512_v4
+139/707587/campos_512_v4
+139/707612/campos_512_v4
+139/707935/campos_512_v4
+139/708146/campos_512_v4
+139/708214/campos_512_v4
+139/708249/campos_512_v4
+139/708503/campos_512_v4
+139/708622/campos_512_v4
+139/708658/campos_512_v4
+139/708686/campos_512_v4
+139/708850/campos_512_v4
+139/708994/campos_512_v4
+139/709001/campos_512_v4
+139/709003/campos_512_v4
+139/709043/campos_512_v4
+139/709068/campos_512_v4
+139/709177/campos_512_v4
+139/709220/campos_512_v4
+139/709247/campos_512_v4
+139/709322/campos_512_v4
+139/709349/campos_512_v4
+139/709591/campos_512_v4
+139/709600/campos_512_v4
+139/709606/campos_512_v4
+139/709640/campos_512_v4
+139/709662/campos_512_v4
+139/709744/campos_512_v4
+139/709763/campos_512_v4
+139/709878/campos_512_v4
+139/709956/campos_512_v4
+139/710001/campos_512_v4
+14/80313/campos_512_v4
+14/80356/campos_512_v4
+14/80414/campos_512_v4
+14/80477/campos_512_v4
+14/80483/campos_512_v4
+14/80509/campos_512_v4
+14/80773/campos_512_v4
+14/80866/campos_512_v4
+14/80939/campos_512_v4
+14/81388/campos_512_v4
+14/81400/campos_512_v4
+14/81420/campos_512_v4
+14/81845/campos_512_v4
+14/81906/campos_512_v4
+14/81973/campos_512_v4
+14/82089/campos_512_v4
+14/82120/campos_512_v4
+14/82266/campos_512_v4
+14/82405/campos_512_v4
+14/82467/campos_512_v4
+14/82573/campos_512_v4
+14/82678/campos_512_v4
+14/82801/campos_512_v4
+14/83012/campos_512_v4
+14/83085/campos_512_v4
+14/83112/campos_512_v4
+14/83244/campos_512_v4
+14/83504/campos_512_v4
+14/83533/campos_512_v4
+14/83549/campos_512_v4
+14/83644/campos_512_v4
+14/83711/campos_512_v4
+14/83855/campos_512_v4
+14/83880/campos_512_v4
+14/83964/campos_512_v4
+14/83999/campos_512_v4
+14/84175/campos_512_v4
+14/84251/campos_512_v4
+14/84270/campos_512_v4
+14/84279/campos_512_v4
+14/84519/campos_512_v4
+14/84580/campos_512_v4
+14/84701/campos_512_v4
+14/84775/campos_512_v4
+14/84949/campos_512_v4
+14/84955/campos_512_v4
+140/710098/campos_512_v4
+140/710102/campos_512_v4
+140/710368/campos_512_v4
+140/710410/campos_512_v4
+140/710555/campos_512_v4
+140/710609/campos_512_v4
+140/710726/campos_512_v4
+140/710731/campos_512_v4
+140/710869/campos_512_v4
+140/711050/campos_512_v4
+140/711057/campos_512_v4
+140/711066/campos_512_v4
+140/711125/campos_512_v4
+140/711595/campos_512_v4
+140/711616/campos_512_v4
+140/711763/campos_512_v4
+140/712003/campos_512_v4
+140/712122/campos_512_v4
+140/712151/campos_512_v4
+140/712183/campos_512_v4
+140/712224/campos_512_v4
+140/712277/campos_512_v4
+140/712282/campos_512_v4
+140/712286/campos_512_v4
+140/712485/campos_512_v4
+140/712567/campos_512_v4
+140/712573/campos_512_v4
+140/712603/campos_512_v4
+140/712670/campos_512_v4
+140/712676/campos_512_v4
+140/712778/campos_512_v4
+140/712831/campos_512_v4
+140/712874/campos_512_v4
+140/712908/campos_512_v4
+140/712942/campos_512_v4
+140/713021/campos_512_v4
+140/713111/campos_512_v4
+140/713112/campos_512_v4
+140/713384/campos_512_v4
+140/713389/campos_512_v4
+140/713464/campos_512_v4
+140/713633/campos_512_v4
+140/713715/campos_512_v4
+140/713746/campos_512_v4
+140/713938/campos_512_v4
+140/714031/campos_512_v4
+140/714109/campos_512_v4
+140/714140/campos_512_v4
+140/714320/campos_512_v4
+140/714481/campos_512_v4
+140/714504/campos_512_v4
+140/714589/campos_512_v4
+140/714609/campos_512_v4
+140/714621/campos_512_v4
+140/714659/campos_512_v4
+140/714729/campos_512_v4
+140/714778/campos_512_v4
+140/714792/campos_512_v4
+140/714800/campos_512_v4
+140/714896/campos_512_v4
+140/714970/campos_512_v4
+141/715066/campos_512_v4
+141/715125/campos_512_v4
+141/715393/campos_512_v4
+141/715487/campos_512_v4
+141/715505/campos_512_v4
+141/715525/campos_512_v4
+141/715706/campos_512_v4
+141/715802/campos_512_v4
+141/715850/campos_512_v4
+141/715874/campos_512_v4
+141/715948/campos_512_v4
+141/715969/campos_512_v4
+141/715970/campos_512_v4
+141/715984/campos_512_v4
+141/716033/campos_512_v4
+141/716112/campos_512_v4
+141/716236/campos_512_v4
+141/716274/campos_512_v4
+141/716631/campos_512_v4
+141/716636/campos_512_v4
+141/716661/campos_512_v4
+141/716785/campos_512_v4
+141/716814/campos_512_v4
+141/716858/campos_512_v4
+141/716918/campos_512_v4
+141/716964/campos_512_v4
+141/717074/campos_512_v4
+141/717099/campos_512_v4
+141/717161/campos_512_v4
+141/717166/campos_512_v4
+141/717345/campos_512_v4
+141/717379/campos_512_v4
+141/717470/campos_512_v4
+141/717682/campos_512_v4
+141/717783/campos_512_v4
+141/717806/campos_512_v4
+141/717817/campos_512_v4
+141/718008/campos_512_v4
+141/718059/campos_512_v4
+141/718173/campos_512_v4
+141/718191/campos_512_v4
+141/718440/campos_512_v4
+141/718562/campos_512_v4
+141/718563/campos_512_v4
+141/718751/campos_512_v4
+141/718783/campos_512_v4
+141/718851/campos_512_v4
+141/718926/campos_512_v4
+141/718981/campos_512_v4
+141/718993/campos_512_v4
+141/719138/campos_512_v4
+141/719148/campos_512_v4
+141/719156/campos_512_v4
+141/719186/campos_512_v4
+141/719190/campos_512_v4
+141/719277/campos_512_v4
+141/719418/campos_512_v4
+141/719457/campos_512_v4
+141/719505/campos_512_v4
+141/719506/campos_512_v4
+141/719538/campos_512_v4
+141/719557/campos_512_v4
+141/719754/campos_512_v4
+141/719773/campos_512_v4
+141/719788/campos_512_v4
+141/719790/campos_512_v4
+141/719795/campos_512_v4
+141/719836/campos_512_v4
+141/719990/campos_512_v4
+142/720033/campos_512_v4
+142/720037/campos_512_v4
+142/720114/campos_512_v4
+142/720153/campos_512_v4
+142/720171/campos_512_v4
+142/720317/campos_512_v4
+142/720325/campos_512_v4
+142/720370/campos_512_v4
+142/720444/campos_512_v4
+142/720473/campos_512_v4
+142/720486/campos_512_v4
+142/720619/campos_512_v4
+142/720669/campos_512_v4
+142/720707/campos_512_v4
+142/720744/campos_512_v4
+142/720794/campos_512_v4
+142/720864/campos_512_v4
+142/720865/campos_512_v4
+142/720889/campos_512_v4
+142/720964/campos_512_v4
+142/721123/campos_512_v4
+142/721180/campos_512_v4
+142/721284/campos_512_v4
+142/721316/campos_512_v4
+142/721341/campos_512_v4
+142/721469/campos_512_v4
+142/721514/campos_512_v4
+142/721550/campos_512_v4
+142/721553/campos_512_v4
+142/721585/campos_512_v4
+142/721712/campos_512_v4
+142/721724/campos_512_v4
+142/721742/campos_512_v4
+142/721749/campos_512_v4
+142/721925/campos_512_v4
+142/721953/campos_512_v4
+142/721970/campos_512_v4
+142/722011/campos_512_v4
+142/722079/campos_512_v4
+142/722134/campos_512_v4
+142/722179/campos_512_v4
+142/722185/campos_512_v4
+142/722354/campos_512_v4
+142/722500/campos_512_v4
+142/722571/campos_512_v4
+142/722591/campos_512_v4
+142/722664/campos_512_v4
+142/722818/campos_512_v4
+142/723152/campos_512_v4
+142/723176/campos_512_v4
+142/723180/campos_512_v4
+142/723434/campos_512_v4
+142/723453/campos_512_v4
+142/723567/campos_512_v4
+142/723644/campos_512_v4
+142/723831/campos_512_v4
+142/723861/campos_512_v4
+142/723894/campos_512_v4
+142/724043/campos_512_v4
+142/724149/campos_512_v4
+142/724347/campos_512_v4
+142/724374/campos_512_v4
+142/724387/campos_512_v4
+142/724396/campos_512_v4
+142/724524/campos_512_v4
+142/724578/campos_512_v4
+142/724724/campos_512_v4
+142/724791/campos_512_v4
+142/724795/campos_512_v4
+142/724833/campos_512_v4
+142/724878/campos_512_v4
+142/724888/campos_512_v4
+142/724952/campos_512_v4
+142/724970/campos_512_v4
+143/725117/campos_512_v4
+143/725220/campos_512_v4
+143/725242/campos_512_v4
+143/725286/campos_512_v4
+143/725304/campos_512_v4
+143/725371/campos_512_v4
+143/725393/campos_512_v4
+143/725435/campos_512_v4
+143/725436/campos_512_v4
+143/725511/campos_512_v4
+143/725521/campos_512_v4
+143/725542/campos_512_v4
+143/725619/campos_512_v4
+143/725682/campos_512_v4
+143/725733/campos_512_v4
+143/725880/campos_512_v4
+143/725941/campos_512_v4
+143/726065/campos_512_v4
+143/726206/campos_512_v4
+143/726242/campos_512_v4
+143/726315/campos_512_v4
+143/726340/campos_512_v4
+143/726349/campos_512_v4
+143/726351/campos_512_v4
+143/726495/campos_512_v4
+143/726535/campos_512_v4
+143/726549/campos_512_v4
+143/726571/campos_512_v4
+143/726615/campos_512_v4
+143/726620/campos_512_v4
+143/726630/campos_512_v4
+143/726723/campos_512_v4
+143/726972/campos_512_v4
+143/726984/campos_512_v4
+143/727010/campos_512_v4
+143/727023/campos_512_v4
+143/727064/campos_512_v4
+143/727137/campos_512_v4
+143/727167/campos_512_v4
+143/727189/campos_512_v4
+143/727201/campos_512_v4
+143/727219/campos_512_v4
+143/727259/campos_512_v4
+143/727365/campos_512_v4
+143/727483/campos_512_v4
+143/727499/campos_512_v4
+143/727590/campos_512_v4
+143/727647/campos_512_v4
+143/727665/campos_512_v4
+143/727692/campos_512_v4
+143/727766/campos_512_v4
+143/727879/campos_512_v4
+143/727947/campos_512_v4
+143/727976/campos_512_v4
+143/727983/campos_512_v4
+143/728234/campos_512_v4
+143/728392/campos_512_v4
+143/728430/campos_512_v4
+143/728687/campos_512_v4
+143/728730/campos_512_v4
+143/728772/campos_512_v4
+143/728827/campos_512_v4
+143/728891/campos_512_v4
+143/728938/campos_512_v4
+143/728939/campos_512_v4
+143/729121/campos_512_v4
+143/729166/campos_512_v4
+143/729206/campos_512_v4
+143/729260/campos_512_v4
+143/729342/campos_512_v4
+143/729351/campos_512_v4
+143/729357/campos_512_v4
+143/729368/campos_512_v4
+143/729462/campos_512_v4
+143/729498/campos_512_v4
+143/729501/campos_512_v4
+143/729585/campos_512_v4
+143/729671/campos_512_v4
+143/729831/campos_512_v4
+143/729881/campos_512_v4
+144/730057/campos_512_v4
+144/730183/campos_512_v4
+144/730268/campos_512_v4
+144/730298/campos_512_v4
+144/730364/campos_512_v4
+144/730383/campos_512_v4
+144/730486/campos_512_v4
+144/730625/campos_512_v4
+144/730710/campos_512_v4
+144/730715/campos_512_v4
+144/730929/campos_512_v4
+144/730985/campos_512_v4
+144/731018/campos_512_v4
+144/731025/campos_512_v4
+144/731071/campos_512_v4
+144/731113/campos_512_v4
+144/731127/campos_512_v4
+144/731200/campos_512_v4
+144/731275/campos_512_v4
+144/731324/campos_512_v4
+144/731406/campos_512_v4
+144/731609/campos_512_v4
+144/731695/campos_512_v4
+144/731789/campos_512_v4
+144/731792/campos_512_v4
+144/731836/campos_512_v4
+144/731896/campos_512_v4
+144/731901/campos_512_v4
+144/731932/campos_512_v4
+144/731952/campos_512_v4
+144/731966/campos_512_v4
+144/732162/campos_512_v4
+144/732296/campos_512_v4
+144/732308/campos_512_v4
+144/732369/campos_512_v4
+144/732516/campos_512_v4
+144/732544/campos_512_v4
+144/732707/campos_512_v4
+144/732787/campos_512_v4
+144/732889/campos_512_v4
+144/733135/campos_512_v4
+144/733161/campos_512_v4
+144/733169/campos_512_v4
+144/733171/campos_512_v4
+144/733289/campos_512_v4
+144/733309/campos_512_v4
+144/733443/campos_512_v4
+144/733499/campos_512_v4
+144/733589/campos_512_v4
+144/733598/campos_512_v4
+144/733669/campos_512_v4
+144/733711/campos_512_v4
+144/733731/campos_512_v4
+144/733762/campos_512_v4
+144/733916/campos_512_v4
+144/733919/campos_512_v4
+144/733933/campos_512_v4
+144/733973/campos_512_v4
+144/734077/campos_512_v4
+144/734144/campos_512_v4
+144/734170/campos_512_v4
+144/734241/campos_512_v4
+144/734331/campos_512_v4
+144/734397/campos_512_v4
+144/734401/campos_512_v4
+144/734427/campos_512_v4
+144/734469/campos_512_v4
+144/734523/campos_512_v4
+144/734674/campos_512_v4
+145/735024/campos_512_v4
+145/735038/campos_512_v4
+145/735114/campos_512_v4
+145/735228/campos_512_v4
+145/735442/campos_512_v4
+145/735516/campos_512_v4
+145/735730/campos_512_v4
+145/735767/campos_512_v4
+145/735892/campos_512_v4
+145/736144/campos_512_v4
+145/736162/campos_512_v4
+145/736350/campos_512_v4
+145/736392/campos_512_v4
+145/736551/campos_512_v4
+145/736572/campos_512_v4
+145/736575/campos_512_v4
+145/736636/campos_512_v4
+145/736690/campos_512_v4
+145/736706/campos_512_v4
+145/736750/campos_512_v4
+145/736840/campos_512_v4
+145/736999/campos_512_v4
+145/737278/campos_512_v4
+145/737366/campos_512_v4
+145/737446/campos_512_v4
+145/737493/campos_512_v4
+145/737514/campos_512_v4
+145/737521/campos_512_v4
+145/737619/campos_512_v4
+145/737696/campos_512_v4
+145/737703/campos_512_v4
+145/737772/campos_512_v4
+145/737799/campos_512_v4
+145/737834/campos_512_v4
+145/737873/campos_512_v4
+145/737932/campos_512_v4
+145/737942/campos_512_v4
+145/738050/campos_512_v4
+145/738271/campos_512_v4
+145/738300/campos_512_v4
+145/738313/campos_512_v4
+145/738374/campos_512_v4
+145/738377/campos_512_v4
+145/738415/campos_512_v4
+145/738427/campos_512_v4
+145/738559/campos_512_v4
+145/738574/campos_512_v4
+145/738667/campos_512_v4
+145/738732/campos_512_v4
+145/738749/campos_512_v4
+145/738821/campos_512_v4
+145/738833/campos_512_v4
+145/738896/campos_512_v4
+145/738926/campos_512_v4
+145/738934/campos_512_v4
+145/739018/campos_512_v4
+145/739070/campos_512_v4
+145/739077/campos_512_v4
+145/739145/campos_512_v4
+145/739239/campos_512_v4
+145/739316/campos_512_v4
+145/739352/campos_512_v4
+145/739365/campos_512_v4
+145/739443/campos_512_v4
+145/739445/campos_512_v4
+145/739456/campos_512_v4
+145/739457/campos_512_v4
+145/739493/campos_512_v4
+145/739565/campos_512_v4
+145/739855/campos_512_v4
+145/739882/campos_512_v4
+146/740084/campos_512_v4
+146/740170/campos_512_v4
+146/740263/campos_512_v4
+146/740300/campos_512_v4
+146/740350/campos_512_v4
+146/740367/campos_512_v4
+146/740408/campos_512_v4
+146/740506/campos_512_v4
+146/740538/campos_512_v4
+146/740560/campos_512_v4
+146/740674/campos_512_v4
+146/740731/campos_512_v4
+146/740752/campos_512_v4
+146/740778/campos_512_v4
+146/741010/campos_512_v4
+146/741075/campos_512_v4
+146/741112/campos_512_v4
+146/741344/campos_512_v4
+146/741449/campos_512_v4
+146/741547/campos_512_v4
+146/741573/campos_512_v4
+146/741601/campos_512_v4
+146/741649/campos_512_v4
+146/741660/campos_512_v4
+146/741845/campos_512_v4
+146/741896/campos_512_v4
+146/741932/campos_512_v4
+146/741945/campos_512_v4
+146/741957/campos_512_v4
+146/741975/campos_512_v4
+146/742223/campos_512_v4
+146/742224/campos_512_v4
+146/742353/campos_512_v4
+146/742542/campos_512_v4
+146/742592/campos_512_v4
+146/742595/campos_512_v4
+146/742744/campos_512_v4
+146/742748/campos_512_v4
+146/742757/campos_512_v4
+146/742764/campos_512_v4
+146/742774/campos_512_v4
+146/742785/campos_512_v4
+146/742809/campos_512_v4
+146/742944/campos_512_v4
+146/742963/campos_512_v4
+146/743059/campos_512_v4
+146/743225/campos_512_v4
+146/743228/campos_512_v4
+146/743325/campos_512_v4
+146/743338/campos_512_v4
+146/743393/campos_512_v4
+146/743418/campos_512_v4
+146/743425/campos_512_v4
+146/743610/campos_512_v4
+146/743659/campos_512_v4
+146/743681/campos_512_v4
+146/743714/campos_512_v4
+146/743725/campos_512_v4
+146/743788/campos_512_v4
+146/743853/campos_512_v4
+146/743887/campos_512_v4
+146/744025/campos_512_v4
+146/744100/campos_512_v4
+146/744284/campos_512_v4
+146/744291/campos_512_v4
+146/744375/campos_512_v4
+146/744384/campos_512_v4
+146/744637/campos_512_v4
+146/744721/campos_512_v4
+146/744740/campos_512_v4
+146/744811/campos_512_v4
+146/744827/campos_512_v4
+146/744855/campos_512_v4
+146/744899/campos_512_v4
+146/744910/campos_512_v4
+146/744933/campos_512_v4
+146/744977/campos_512_v4
+147/745089/campos_512_v4
+147/745227/campos_512_v4
+147/745229/campos_512_v4
+147/745377/campos_512_v4
+147/745378/campos_512_v4
+147/745524/campos_512_v4
+147/745657/campos_512_v4
+147/745719/campos_512_v4
+147/745728/campos_512_v4
+147/746146/campos_512_v4
+147/746157/campos_512_v4
+147/746259/campos_512_v4
+147/746277/campos_512_v4
+147/746345/campos_512_v4
+147/746444/campos_512_v4
+147/746516/campos_512_v4
+147/746572/campos_512_v4
+147/746620/campos_512_v4
+147/746774/campos_512_v4
+147/746875/campos_512_v4
+147/746918/campos_512_v4
+147/747045/campos_512_v4
+147/747149/campos_512_v4
+147/747275/campos_512_v4
+147/747444/campos_512_v4
+147/747484/campos_512_v4
+147/747590/campos_512_v4
+147/747628/campos_512_v4
+147/747651/campos_512_v4
+147/747657/campos_512_v4
+147/747659/campos_512_v4
+147/747684/campos_512_v4
+147/747743/campos_512_v4
+147/747829/campos_512_v4
+147/747853/campos_512_v4
+147/747931/campos_512_v4
+147/747982/campos_512_v4
+147/747989/campos_512_v4
+147/748152/campos_512_v4
+147/748201/campos_512_v4
+147/748291/campos_512_v4
+147/748303/campos_512_v4
+147/748503/campos_512_v4
+147/748614/campos_512_v4
+147/748633/campos_512_v4
+147/748654/campos_512_v4
+147/748657/campos_512_v4
+147/748680/campos_512_v4
+147/748763/campos_512_v4
+147/748823/campos_512_v4
+147/748862/campos_512_v4
+147/748921/campos_512_v4
+147/748929/campos_512_v4
+147/748931/campos_512_v4
+147/748937/campos_512_v4
+147/748971/campos_512_v4
+147/749164/campos_512_v4
+147/749245/campos_512_v4
+147/749276/campos_512_v4
+147/749278/campos_512_v4
+147/749307/campos_512_v4
+147/749462/campos_512_v4
+147/749624/campos_512_v4
+147/749681/campos_512_v4
+147/749797/campos_512_v4
+147/749870/campos_512_v4
+147/749902/campos_512_v4
+148/750040/campos_512_v4
+148/750176/campos_512_v4
+148/750310/campos_512_v4
+148/750334/campos_512_v4
+148/750367/campos_512_v4
+148/750398/campos_512_v4
+148/750475/campos_512_v4
+148/750579/campos_512_v4
+148/750591/campos_512_v4
+148/750654/campos_512_v4
+148/751024/campos_512_v4
+148/751226/campos_512_v4
+148/751363/campos_512_v4
+148/751371/campos_512_v4
+148/751398/campos_512_v4
+148/751429/campos_512_v4
+148/751510/campos_512_v4
+148/751522/campos_512_v4
+148/751532/campos_512_v4
+148/751876/campos_512_v4
+148/752052/campos_512_v4
+148/752127/campos_512_v4
+148/752183/campos_512_v4
+148/752223/campos_512_v4
+148/752256/campos_512_v4
+148/752362/campos_512_v4
+148/752370/campos_512_v4
+148/752426/campos_512_v4
+148/752437/campos_512_v4
+148/752503/campos_512_v4
+148/752613/campos_512_v4
+148/752617/campos_512_v4
+148/752628/campos_512_v4
+148/752762/campos_512_v4
+148/752768/campos_512_v4
+148/752855/campos_512_v4
+148/752986/campos_512_v4
+148/753041/campos_512_v4
+148/753057/campos_512_v4
+148/753204/campos_512_v4
+148/753370/campos_512_v4
+148/753448/campos_512_v4
+148/753574/campos_512_v4
+148/753640/campos_512_v4
+148/753746/campos_512_v4
+148/753755/campos_512_v4
+148/753875/campos_512_v4
+148/753907/campos_512_v4
+148/753982/campos_512_v4
+148/754074/campos_512_v4
+148/754076/campos_512_v4
+148/754091/campos_512_v4
+148/754167/campos_512_v4
+148/754248/campos_512_v4
+148/754293/campos_512_v4
+148/754506/campos_512_v4
+148/754598/campos_512_v4
+148/754690/campos_512_v4
+148/754747/campos_512_v4
+148/754847/campos_512_v4
+148/754942/campos_512_v4
+149/755012/campos_512_v4
+149/755097/campos_512_v4
+149/755326/campos_512_v4
+149/755364/campos_512_v4
+149/755473/campos_512_v4
+149/755533/campos_512_v4
+149/755535/campos_512_v4
+149/755634/campos_512_v4
+149/755648/campos_512_v4
+149/755797/campos_512_v4
+149/755920/campos_512_v4
+149/755937/campos_512_v4
+149/756098/campos_512_v4
+149/756118/campos_512_v4
+149/756175/campos_512_v4
+149/756197/campos_512_v4
+149/756242/campos_512_v4
+149/756330/campos_512_v4
+149/756336/campos_512_v4
+149/756354/campos_512_v4
+149/756485/campos_512_v4
+149/756511/campos_512_v4
+149/756512/campos_512_v4
+149/756564/campos_512_v4
+149/756588/campos_512_v4
+149/756629/campos_512_v4
+149/756702/campos_512_v4
+149/756707/campos_512_v4
+149/756756/campos_512_v4
+149/756918/campos_512_v4
+149/757021/campos_512_v4
+149/757043/campos_512_v4
+149/757195/campos_512_v4
+149/757208/campos_512_v4
+149/757240/campos_512_v4
+149/757246/campos_512_v4
+149/757277/campos_512_v4
+149/757389/campos_512_v4
+149/757530/campos_512_v4
+149/757689/campos_512_v4
+149/757714/campos_512_v4
+149/757851/campos_512_v4
+149/757855/campos_512_v4
+149/757918/campos_512_v4
+149/757987/campos_512_v4
+149/758066/campos_512_v4
+149/758080/campos_512_v4
+149/758195/campos_512_v4
+149/758222/campos_512_v4
+149/758225/campos_512_v4
+149/758281/campos_512_v4
+149/758364/campos_512_v4
+149/758471/campos_512_v4
+149/758511/campos_512_v4
+149/758605/campos_512_v4
+149/758689/campos_512_v4
+149/758719/campos_512_v4
+149/758734/campos_512_v4
+149/758856/campos_512_v4
+149/758867/campos_512_v4
+149/758881/campos_512_v4
+149/758897/campos_512_v4
+149/758999/campos_512_v4
+149/759084/campos_512_v4
+149/759136/campos_512_v4
+149/759293/campos_512_v4
+149/759451/campos_512_v4
+149/759470/campos_512_v4
+149/759511/campos_512_v4
+149/759544/campos_512_v4
+149/759555/campos_512_v4
+149/759723/campos_512_v4
+15/85053/campos_512_v4
+15/85169/campos_512_v4
+15/85310/campos_512_v4
+15/85337/campos_512_v4
+15/85373/campos_512_v4
+15/85622/campos_512_v4
+15/85632/campos_512_v4
+15/85712/campos_512_v4
+15/85925/campos_512_v4
+15/86044/campos_512_v4
+15/86046/campos_512_v4
+15/86189/campos_512_v4
+15/86191/campos_512_v4
+15/86342/campos_512_v4
+15/86626/campos_512_v4
+15/86687/campos_512_v4
+15/86812/campos_512_v4
+15/86990/campos_512_v4
+15/87002/campos_512_v4
+15/87021/campos_512_v4
+15/87050/campos_512_v4
+15/87082/campos_512_v4
+15/87137/campos_512_v4
+15/87201/campos_512_v4
+15/87454/campos_512_v4
+15/87557/campos_512_v4
+15/87564/campos_512_v4
+15/87638/campos_512_v4
+15/87685/campos_512_v4
+15/87689/campos_512_v4
+15/87766/campos_512_v4
+15/87924/campos_512_v4
+15/87960/campos_512_v4
+15/87966/campos_512_v4
+15/88126/campos_512_v4
+15/88159/campos_512_v4
+15/88450/campos_512_v4
+15/88700/campos_512_v4
+15/88712/campos_512_v4
+15/88717/campos_512_v4
+15/88853/campos_512_v4
+15/88863/campos_512_v4
+15/88954/campos_512_v4
+15/89053/campos_512_v4
+15/89182/campos_512_v4
+15/89294/campos_512_v4
+15/89328/campos_512_v4
+15/89339/campos_512_v4
+15/89536/campos_512_v4
+15/89655/campos_512_v4
+15/89726/campos_512_v4
+15/89786/campos_512_v4
+15/89821/campos_512_v4
+15/89978/campos_512_v4
+15/90000/campos_512_v4
+150/760029/campos_512_v4
+150/760069/campos_512_v4
+150/760209/campos_512_v4
+150/760280/campos_512_v4
+150/760338/campos_512_v4
+150/760395/campos_512_v4
+150/760549/campos_512_v4
+150/760629/campos_512_v4
+150/760725/campos_512_v4
+150/760739/campos_512_v4
+150/760819/campos_512_v4
+150/760865/campos_512_v4
+150/761042/campos_512_v4
+150/761075/campos_512_v4
+150/761178/campos_512_v4
+150/761182/campos_512_v4
+150/761287/campos_512_v4
+150/761367/campos_512_v4
+150/761552/campos_512_v4
+150/761562/campos_512_v4
+150/761579/campos_512_v4
+150/761646/campos_512_v4
+150/761690/campos_512_v4
+150/761807/campos_512_v4
+150/761867/campos_512_v4
+150/761912/campos_512_v4
+150/762027/campos_512_v4
+150/762040/campos_512_v4
+150/762052/campos_512_v4
+150/762262/campos_512_v4
+150/762339/campos_512_v4
+150/762388/campos_512_v4
+150/762426/campos_512_v4
+150/762451/campos_512_v4
+150/762492/campos_512_v4
+150/762527/campos_512_v4
+150/762664/campos_512_v4
+150/762740/campos_512_v4
+150/762755/campos_512_v4
+150/762914/campos_512_v4
+150/762924/campos_512_v4
+150/763015/campos_512_v4
+150/763133/campos_512_v4
+150/763151/campos_512_v4
+150/763168/campos_512_v4
+150/763213/campos_512_v4
+150/763237/campos_512_v4
+150/763441/campos_512_v4
+150/763532/campos_512_v4
+150/763546/campos_512_v4
+150/763576/campos_512_v4
+150/763687/campos_512_v4
+150/763782/campos_512_v4
+150/763814/campos_512_v4
+150/763847/campos_512_v4
+150/763928/campos_512_v4
+150/763970/campos_512_v4
+150/763997/campos_512_v4
+150/764082/campos_512_v4
+150/764084/campos_512_v4
+150/764127/campos_512_v4
+150/764187/campos_512_v4
+150/764291/campos_512_v4
+150/764315/campos_512_v4
+150/764338/campos_512_v4
+150/764396/campos_512_v4
+150/764435/campos_512_v4
+150/764504/campos_512_v4
+150/764537/campos_512_v4
+150/764546/campos_512_v4
+150/764584/campos_512_v4
+150/764640/campos_512_v4
+151/765033/campos_512_v4
+151/765088/campos_512_v4
+151/765107/campos_512_v4
+151/765117/campos_512_v4
+151/765210/campos_512_v4
+151/765415/campos_512_v4
+151/765464/campos_512_v4
+151/765485/campos_512_v4
+151/765491/campos_512_v4
+151/765565/campos_512_v4
+151/765650/campos_512_v4
+151/765684/campos_512_v4
+151/765712/campos_512_v4
+151/765812/campos_512_v4
+151/765919/campos_512_v4
+151/765975/campos_512_v4
+151/765977/campos_512_v4
+151/766021/campos_512_v4
+151/766120/campos_512_v4
+151/766185/campos_512_v4
+151/766194/campos_512_v4
+151/766230/campos_512_v4
+151/766247/campos_512_v4
+151/766435/campos_512_v4
+151/766495/campos_512_v4
+151/766524/campos_512_v4
+151/766547/campos_512_v4
+151/766745/campos_512_v4
+151/766974/campos_512_v4
+151/767017/campos_512_v4
+151/767119/campos_512_v4
+151/767145/campos_512_v4
+151/767146/campos_512_v4
+151/767262/campos_512_v4
+151/767338/campos_512_v4
+151/767401/campos_512_v4
+151/767430/campos_512_v4
+151/767450/campos_512_v4
+151/767493/campos_512_v4
+151/767626/campos_512_v4
+151/767687/campos_512_v4
+151/767745/campos_512_v4
+151/767786/campos_512_v4
+151/767909/campos_512_v4
+151/767923/campos_512_v4
+151/767994/campos_512_v4
+151/768124/campos_512_v4
+151/768357/campos_512_v4
+151/768443/campos_512_v4
+151/768508/campos_512_v4
+151/768527/campos_512_v4
+151/768563/campos_512_v4
+151/768645/campos_512_v4
+151/768792/campos_512_v4
+151/768823/campos_512_v4
+151/768829/campos_512_v4
+151/768996/campos_512_v4
+151/769017/campos_512_v4
+151/769038/campos_512_v4
+151/769046/campos_512_v4
+151/769162/campos_512_v4
+151/769188/campos_512_v4
+151/769271/campos_512_v4
+151/769346/campos_512_v4
+151/769362/campos_512_v4
+151/769464/campos_512_v4
+151/769491/campos_512_v4
+151/769640/campos_512_v4
+151/769799/campos_512_v4
+151/769873/campos_512_v4
+152/770015/campos_512_v4
+152/770059/campos_512_v4
+152/770267/campos_512_v4
+152/770324/campos_512_v4
+152/770358/campos_512_v4
+152/770454/campos_512_v4
+152/770554/campos_512_v4
+152/770567/campos_512_v4
+152/770654/campos_512_v4
+152/770736/campos_512_v4
+152/770860/campos_512_v4
+152/771058/campos_512_v4
+152/771089/campos_512_v4
+152/771180/campos_512_v4
+152/771197/campos_512_v4
+152/771202/campos_512_v4
+152/771287/campos_512_v4
+152/771446/campos_512_v4
+152/771472/campos_512_v4
+152/771502/campos_512_v4
+152/771625/campos_512_v4
+152/771692/campos_512_v4
+152/771857/campos_512_v4
+152/771888/campos_512_v4
+152/771899/campos_512_v4
+152/771963/campos_512_v4
+152/772022/campos_512_v4
+152/772041/campos_512_v4
+152/772281/campos_512_v4
+152/772370/campos_512_v4
+152/772598/campos_512_v4
+152/772713/campos_512_v4
+152/772752/campos_512_v4
+152/772753/campos_512_v4
+152/772793/campos_512_v4
+152/772936/campos_512_v4
+152/773098/campos_512_v4
+152/773151/campos_512_v4
+152/773320/campos_512_v4
+152/773408/campos_512_v4
+152/773468/campos_512_v4
+152/773479/campos_512_v4
+152/773486/campos_512_v4
+152/773498/campos_512_v4
+152/773631/campos_512_v4
+152/773633/campos_512_v4
+152/773740/campos_512_v4
+152/773899/campos_512_v4
+152/773978/campos_512_v4
+152/774057/campos_512_v4
+152/774115/campos_512_v4
+152/774261/campos_512_v4
+152/774347/campos_512_v4
+152/774411/campos_512_v4
+152/774419/campos_512_v4
+152/774523/campos_512_v4
+152/774575/campos_512_v4
+152/774580/campos_512_v4
+152/774699/campos_512_v4
+152/774869/campos_512_v4
+152/774929/campos_512_v4
+152/774940/campos_512_v4
+152/774987/campos_512_v4
+153/775064/campos_512_v4
+153/775138/campos_512_v4
+153/775163/campos_512_v4
+153/775188/campos_512_v4
+153/775210/campos_512_v4
+153/775229/campos_512_v4
+153/775308/campos_512_v4
+153/775335/campos_512_v4
+153/775486/campos_512_v4
+153/775577/campos_512_v4
+153/775802/campos_512_v4
+153/775825/campos_512_v4
+153/775830/campos_512_v4
+153/775896/campos_512_v4
+153/776093/campos_512_v4
+153/776290/campos_512_v4
+153/776304/campos_512_v4
+153/776305/campos_512_v4
+153/776532/campos_512_v4
+153/776539/campos_512_v4
+153/776622/campos_512_v4
+153/776667/campos_512_v4
+153/776752/campos_512_v4
+153/776802/campos_512_v4
+153/776806/campos_512_v4
+153/776950/campos_512_v4
+153/777038/campos_512_v4
+153/777358/campos_512_v4
+153/777394/campos_512_v4
+153/777474/campos_512_v4
+153/777535/campos_512_v4
+153/777571/campos_512_v4
+153/777595/campos_512_v4
+153/777606/campos_512_v4
+153/777617/campos_512_v4
+153/777652/campos_512_v4
+153/777653/campos_512_v4
+153/777840/campos_512_v4
+153/778035/campos_512_v4
+153/778112/campos_512_v4
+153/778118/campos_512_v4
+153/778121/campos_512_v4
+153/778311/campos_512_v4
+153/778461/campos_512_v4
+153/778581/campos_512_v4
+153/778597/campos_512_v4
+153/778618/campos_512_v4
+153/778639/campos_512_v4
+153/778716/campos_512_v4
+153/778724/campos_512_v4
+153/778764/campos_512_v4
+153/779019/campos_512_v4
+153/779124/campos_512_v4
+153/779133/campos_512_v4
+153/779330/campos_512_v4
+153/779368/campos_512_v4
+153/779575/campos_512_v4
+153/779688/campos_512_v4
+153/779768/campos_512_v4
+153/779834/campos_512_v4
+153/779853/campos_512_v4
+153/779866/campos_512_v4
+153/779950/campos_512_v4
+153/779967/campos_512_v4
+154/780018/campos_512_v4
+154/780123/campos_512_v4
+154/780182/campos_512_v4
+154/780194/campos_512_v4
+154/780300/campos_512_v4
+154/780302/campos_512_v4
+154/780324/campos_512_v4
+154/780398/campos_512_v4
+154/780594/campos_512_v4
+154/780595/campos_512_v4
+154/780623/campos_512_v4
+154/780821/campos_512_v4
+154/780835/campos_512_v4
+154/780889/campos_512_v4
+154/781017/campos_512_v4
+154/781242/campos_512_v4
+154/781284/campos_512_v4
+154/781306/campos_512_v4
+154/781370/campos_512_v4
+154/781693/campos_512_v4
+154/781719/campos_512_v4
+154/781776/campos_512_v4
+154/781922/campos_512_v4
+154/781933/campos_512_v4
+154/782038/campos_512_v4
+154/782080/campos_512_v4
+154/782114/campos_512_v4
+154/782171/campos_512_v4
+154/782213/campos_512_v4
+154/782332/campos_512_v4
+154/782339/campos_512_v4
+154/782351/campos_512_v4
+154/782463/campos_512_v4
+154/782487/campos_512_v4
+154/782498/campos_512_v4
+154/782534/campos_512_v4
+154/782719/campos_512_v4
+154/782730/campos_512_v4
+154/782760/campos_512_v4
+154/782926/campos_512_v4
+154/783000/campos_512_v4
+154/783133/campos_512_v4
+154/783262/campos_512_v4
+154/783324/campos_512_v4
+154/783501/campos_512_v4
+154/783713/campos_512_v4
+154/783721/campos_512_v4
+154/783749/campos_512_v4
+154/783903/campos_512_v4
+154/783943/campos_512_v4
+154/784033/campos_512_v4
+154/784112/campos_512_v4
+154/784144/campos_512_v4
+154/784202/campos_512_v4
+154/784266/campos_512_v4
+154/784300/campos_512_v4
+154/784336/campos_512_v4
+154/784338/campos_512_v4
+154/784428/campos_512_v4
+154/784445/campos_512_v4
+154/784450/campos_512_v4
+154/784469/campos_512_v4
+154/784486/campos_512_v4
+154/784499/campos_512_v4
+154/784589/campos_512_v4
+154/784618/campos_512_v4
+154/784831/campos_512_v4
+154/784938/campos_512_v4
+154/784980/campos_512_v4
+155/785079/campos_512_v4
+155/785084/campos_512_v4
+155/785116/campos_512_v4
+155/785173/campos_512_v4
+155/785223/campos_512_v4
+155/785309/campos_512_v4
+155/785345/campos_512_v4
+155/785401/campos_512_v4
+155/785504/campos_512_v4
+155/785572/campos_512_v4
+155/785634/campos_512_v4
+155/785679/campos_512_v4
+155/785725/campos_512_v4
+155/785747/campos_512_v4
+155/785893/campos_512_v4
+155/785958/campos_512_v4
+155/786055/campos_512_v4
+155/786090/campos_512_v4
+155/786095/campos_512_v4
+155/786361/campos_512_v4
+155/786532/campos_512_v4
+155/786561/campos_512_v4
+155/786572/campos_512_v4
+155/786603/campos_512_v4
+155/786628/campos_512_v4
+155/786680/campos_512_v4
+155/786717/campos_512_v4
+155/786881/campos_512_v4
+155/786901/campos_512_v4
+155/786965/campos_512_v4
+155/787025/campos_512_v4
+155/787049/campos_512_v4
+155/787344/campos_512_v4
+155/787451/campos_512_v4
+155/787556/campos_512_v4
+155/787559/campos_512_v4
+155/787596/campos_512_v4
+155/787840/campos_512_v4
+155/787852/campos_512_v4
+155/787905/campos_512_v4
+155/787987/campos_512_v4
+155/788035/campos_512_v4
+155/788041/campos_512_v4
+155/788059/campos_512_v4
+155/788075/campos_512_v4
+155/788123/campos_512_v4
+155/788144/campos_512_v4
+155/788505/campos_512_v4
+155/788528/campos_512_v4
+155/788533/campos_512_v4
+155/788537/campos_512_v4
+155/788621/campos_512_v4
+155/788735/campos_512_v4
+155/788750/campos_512_v4
+155/788874/campos_512_v4
+155/788959/campos_512_v4
+155/789168/campos_512_v4
+155/789205/campos_512_v4
+155/789258/campos_512_v4
+155/789296/campos_512_v4
+155/789428/campos_512_v4
+155/789528/campos_512_v4
+155/789541/campos_512_v4
+155/789560/campos_512_v4
+155/789721/campos_512_v4
+155/789748/campos_512_v4
+155/789796/campos_512_v4
+155/789831/campos_512_v4
+155/789837/campos_512_v4
+155/789946/campos_512_v4
+156/790366/campos_512_v4
+156/790458/campos_512_v4
+156/790517/campos_512_v4
+156/790528/campos_512_v4
+156/790639/campos_512_v4
+156/790675/campos_512_v4
+156/790737/campos_512_v4
+156/790943/campos_512_v4
+156/791071/campos_512_v4
+156/791104/campos_512_v4
+156/791260/campos_512_v4
+156/791426/campos_512_v4
+156/791518/campos_512_v4
+156/791685/campos_512_v4
+156/791739/campos_512_v4
+156/791914/campos_512_v4
+156/791922/campos_512_v4
+156/792078/campos_512_v4
+156/792188/campos_512_v4
+156/792402/campos_512_v4
+156/792464/campos_512_v4
+156/792638/campos_512_v4
+156/792738/campos_512_v4
+156/792820/campos_512_v4
+156/792837/campos_512_v4
+156/792896/campos_512_v4
+156/792954/campos_512_v4
+156/793067/campos_512_v4
+156/793222/campos_512_v4
+156/793249/campos_512_v4
+156/793293/campos_512_v4
+156/793373/campos_512_v4
+156/793410/campos_512_v4
+156/793419/campos_512_v4
+156/793507/campos_512_v4
+156/793527/campos_512_v4
+156/793718/campos_512_v4
+156/793766/campos_512_v4
+156/793768/campos_512_v4
+156/793806/campos_512_v4
+156/793842/campos_512_v4
+156/793925/campos_512_v4
+156/793942/campos_512_v4
+156/793981/campos_512_v4
+156/794097/campos_512_v4
+156/794233/campos_512_v4
+156/794280/campos_512_v4
+156/794285/campos_512_v4
+156/794296/campos_512_v4
+156/794334/campos_512_v4
+156/794340/campos_512_v4
+156/794342/campos_512_v4
+156/794394/campos_512_v4
+156/794400/campos_512_v4
+156/794500/campos_512_v4
+156/794552/campos_512_v4
+156/794869/campos_512_v4
+156/794870/campos_512_v4
+156/794943/campos_512_v4
+156/794953/campos_512_v4
+157/795054/campos_512_v4
+157/795076/campos_512_v4
+157/795130/campos_512_v4
+157/795156/campos_512_v4
+157/795270/campos_512_v4
+157/795393/campos_512_v4
+157/795447/campos_512_v4
+157/795465/campos_512_v4
+157/795497/campos_512_v4
+157/795596/campos_512_v4
+157/795677/campos_512_v4
+157/795709/campos_512_v4
+157/795736/campos_512_v4
+157/795778/campos_512_v4
+157/795860/campos_512_v4
+157/795874/campos_512_v4
+157/795974/campos_512_v4
+157/795994/campos_512_v4
+157/796276/campos_512_v4
+157/796357/campos_512_v4
+157/796404/campos_512_v4
+157/796464/campos_512_v4
+157/796631/campos_512_v4
+157/796742/campos_512_v4
+157/796784/campos_512_v4
+157/796909/campos_512_v4
+157/796930/campos_512_v4
+157/796966/campos_512_v4
+157/796990/campos_512_v4
+157/797111/campos_512_v4
+157/797298/campos_512_v4
+157/797320/campos_512_v4
+157/797690/campos_512_v4
+157/797753/campos_512_v4
+157/797777/campos_512_v4
+157/797894/campos_512_v4
+157/797908/campos_512_v4
+157/797918/campos_512_v4
+157/797936/campos_512_v4
+157/798216/campos_512_v4
+157/798294/campos_512_v4
+157/798395/campos_512_v4
+157/798434/campos_512_v4
+157/798493/campos_512_v4
+157/798713/campos_512_v4
+157/798918/campos_512_v4
+157/798937/campos_512_v4
+157/798951/campos_512_v4
+157/798997/campos_512_v4
+157/799044/campos_512_v4
+157/799135/campos_512_v4
+157/799160/campos_512_v4
+157/799200/campos_512_v4
+157/799212/campos_512_v4
+157/799558/campos_512_v4
+157/799615/campos_512_v4
+157/799690/campos_512_v4
+157/799795/campos_512_v4
+157/799874/campos_512_v4
+158/800002/campos_512_v4
+158/800085/campos_512_v4
+158/800098/campos_512_v4
+158/800119/campos_512_v4
+158/800120/campos_512_v4
+158/800194/campos_512_v4
+158/800212/campos_512_v4
+158/800362/campos_512_v4
+158/800440/campos_512_v4
+158/800482/campos_512_v4
+158/800537/campos_512_v4
+158/800788/campos_512_v4
+158/800834/campos_512_v4
+158/801152/campos_512_v4
+158/801169/campos_512_v4
+158/801353/campos_512_v4
+158/801420/campos_512_v4
+158/801529/campos_512_v4
+158/801742/campos_512_v4
+158/801748/campos_512_v4
+158/801824/campos_512_v4
+158/801865/campos_512_v4
+158/801898/campos_512_v4
+158/801995/campos_512_v4
+158/802027/campos_512_v4
+158/802061/campos_512_v4
+158/802112/campos_512_v4
+158/802123/campos_512_v4
+158/802173/campos_512_v4
+158/802247/campos_512_v4
+158/802341/campos_512_v4
+158/802514/campos_512_v4
+158/802593/campos_512_v4
+158/802687/campos_512_v4
+158/802762/campos_512_v4
+158/802809/campos_512_v4
+158/802848/campos_512_v4
+158/802883/campos_512_v4
+158/802952/campos_512_v4
+158/802955/campos_512_v4
+158/803093/campos_512_v4
+158/803192/campos_512_v4
+158/803195/campos_512_v4
+158/803239/campos_512_v4
+158/803311/campos_512_v4
+158/803438/campos_512_v4
+158/803558/campos_512_v4
+158/803560/campos_512_v4
+158/803731/campos_512_v4
+158/803768/campos_512_v4
+158/803784/campos_512_v4
+158/803806/campos_512_v4
+158/804007/campos_512_v4
+158/804118/campos_512_v4
+158/804336/campos_512_v4
+158/804371/campos_512_v4
+158/804456/campos_512_v4
+158/804521/campos_512_v4
+158/804586/campos_512_v4
+158/804711/campos_512_v4
+158/804859/campos_512_v4
+158/804911/campos_512_v4
+159/805088/campos_512_v4
+159/805111/campos_512_v4
+159/805289/campos_512_v4
+159/805296/campos_512_v4
+159/805441/campos_512_v4
+159/805810/campos_512_v4
+159/805815/campos_512_v4
+159/806059/campos_512_v4
+159/806060/campos_512_v4
+159/806281/campos_512_v4
+159/806412/campos_512_v4
+159/806423/campos_512_v4
+159/806486/campos_512_v4
+159/806813/campos_512_v4
+159/807013/campos_512_v4
+159/807062/campos_512_v4
+159/807142/campos_512_v4
+159/807211/campos_512_v4
+159/807272/campos_512_v4
+159/807340/campos_512_v4
+159/807529/campos_512_v4
+159/807656/campos_512_v4
+159/807715/campos_512_v4
+159/807961/campos_512_v4
+159/808014/campos_512_v4
+159/808089/campos_512_v4
+159/808099/campos_512_v4
+159/808111/campos_512_v4
+159/808116/campos_512_v4
+159/808119/campos_512_v4
+159/808217/campos_512_v4
+159/808309/campos_512_v4
+159/808311/campos_512_v4
+159/808418/campos_512_v4
+159/808592/campos_512_v4
+159/808629/campos_512_v4
+159/808691/campos_512_v4
+16/90085/campos_512_v4
+16/90119/campos_512_v4
+16/90142/campos_512_v4
+16/90177/campos_512_v4
+16/90219/campos_512_v4
+16/90233/campos_512_v4
+16/90357/campos_512_v4
+16/90448/campos_512_v4
+16/90622/campos_512_v4
+16/90629/campos_512_v4
+16/90670/campos_512_v4
+16/90710/campos_512_v4
+16/90928/campos_512_v4
+16/90963/campos_512_v4
+16/91126/campos_512_v4
+16/91308/campos_512_v4
+16/91422/campos_512_v4
+16/91480/campos_512_v4
+16/91634/campos_512_v4
+16/91640/campos_512_v4
+16/91685/campos_512_v4
+16/91690/campos_512_v4
+16/91775/campos_512_v4
+16/92382/campos_512_v4
+16/92427/campos_512_v4
+16/92438/campos_512_v4
+16/92581/campos_512_v4
+16/93050/campos_512_v4
+16/93141/campos_512_v4
+16/93302/campos_512_v4
+16/93452/campos_512_v4
+16/93490/campos_512_v4
+16/93552/campos_512_v4
+16/93599/campos_512_v4
+16/93612/campos_512_v4
+16/93739/campos_512_v4
+16/93751/campos_512_v4
+16/93757/campos_512_v4
+16/93921/campos_512_v4
+16/93992/campos_512_v4
+16/94121/campos_512_v4
+16/94212/campos_512_v4
+16/94307/campos_512_v4
+16/94437/campos_512_v4
+16/94443/campos_512_v4
+16/94502/campos_512_v4
+16/94555/campos_512_v4
+16/94598/campos_512_v4
+16/94629/campos_512_v4
+16/94658/campos_512_v4
+16/94701/campos_512_v4
+16/94842/campos_512_v4
+16/94887/campos_512_v4
+16/94942/campos_512_v4
+16/94967/campos_512_v4
+16/94999/campos_512_v4
+17/95009/campos_512_v4
+17/95027/campos_512_v4
+17/95081/campos_512_v4
+17/95094/campos_512_v4
+17/95096/campos_512_v4
+17/95185/campos_512_v4
+17/95407/campos_512_v4
+17/95497/campos_512_v4
+17/95543/campos_512_v4
+17/95559/campos_512_v4
+17/95623/campos_512_v4
+17/95748/campos_512_v4
+17/95772/campos_512_v4
+17/95843/campos_512_v4
+17/96049/campos_512_v4
+17/96119/campos_512_v4
+17/96200/campos_512_v4
+17/96407/campos_512_v4
+17/96426/campos_512_v4
+17/96700/campos_512_v4
+17/96741/campos_512_v4
+17/96770/campos_512_v4
+17/96875/campos_512_v4
+17/97083/campos_512_v4
+17/97305/campos_512_v4
+17/97322/campos_512_v4
+17/97440/campos_512_v4
+17/97479/campos_512_v4
+17/97522/campos_512_v4
+17/97623/campos_512_v4
+17/97626/campos_512_v4
+17/97634/campos_512_v4
+17/97744/campos_512_v4
+17/97977/campos_512_v4
+17/98167/campos_512_v4
+17/98210/campos_512_v4
+17/98277/campos_512_v4
+17/98328/campos_512_v4
+17/98333/campos_512_v4
+17/98401/campos_512_v4
+17/98419/campos_512_v4
+17/98564/campos_512_v4
+17/98602/campos_512_v4
+17/98711/campos_512_v4
+17/98742/campos_512_v4
+17/98743/campos_512_v4
+17/98909/campos_512_v4
+17/98994/campos_512_v4
+17/99300/campos_512_v4
+17/99500/campos_512_v4
+17/99557/campos_512_v4
+17/99566/campos_512_v4
+17/99571/campos_512_v4
+17/99585/campos_512_v4
+17/99609/campos_512_v4
+17/99687/campos_512_v4
+17/99877/campos_512_v4
+2/20062/campos_512_v4
+2/20258/campos_512_v4
+2/20397/campos_512_v4
+2/20479/campos_512_v4
+2/20721/campos_512_v4
+2/20813/campos_512_v4
+2/20895/campos_512_v4
+2/21118/campos_512_v4
+2/21216/campos_512_v4
+2/21406/campos_512_v4
+2/21461/campos_512_v4
+2/21463/campos_512_v4
+2/21483/campos_512_v4
+2/21638/campos_512_v4
+2/21660/campos_512_v4
+2/21713/campos_512_v4
+2/21719/campos_512_v4
+2/21731/campos_512_v4
+2/21768/campos_512_v4
+2/21809/campos_512_v4
+2/21943/campos_512_v4
+2/22013/campos_512_v4
+2/22108/campos_512_v4
+2/22123/campos_512_v4
+2/22199/campos_512_v4
+2/22236/campos_512_v4
+2/22321/campos_512_v4
+2/22328/campos_512_v4
+2/22343/campos_512_v4
+2/22450/campos_512_v4
+2/22517/campos_512_v4
+2/22606/campos_512_v4
+2/22758/campos_512_v4
+2/22760/campos_512_v4
+2/23024/campos_512_v4
+2/23025/campos_512_v4
+2/23076/campos_512_v4
+2/23155/campos_512_v4
+2/23172/campos_512_v4
+2/23300/campos_512_v4
+2/23420/campos_512_v4
+2/23443/campos_512_v4
+2/23587/campos_512_v4
+2/23650/campos_512_v4
+2/23792/campos_512_v4
+2/23903/campos_512_v4
+2/23929/campos_512_v4
+2/23941/campos_512_v4
+2/24143/campos_512_v4
+2/24224/campos_512_v4
+2/24311/campos_512_v4
+2/24349/campos_512_v4
+2/24356/campos_512_v4
+2/24426/campos_512_v4
+2/24443/campos_512_v4
+2/24525/campos_512_v4
+2/24580/campos_512_v4
+2/24907/campos_512_v4
+2/24968/campos_512_v4
+2/24995/campos_512_v4
+23/125029/campos_512_v4
+23/125076/campos_512_v4
+23/125098/campos_512_v4
+23/125199/campos_512_v4
+23/125439/campos_512_v4
+23/125647/campos_512_v4
+23/125675/campos_512_v4
+23/125748/campos_512_v4
+23/125835/campos_512_v4
+23/125880/campos_512_v4
+23/126036/campos_512_v4
+23/126040/campos_512_v4
+23/126285/campos_512_v4
+23/126297/campos_512_v4
+23/126430/campos_512_v4
+23/126457/campos_512_v4
+23/126468/campos_512_v4
+23/126495/campos_512_v4
+23/126535/campos_512_v4
+23/126541/campos_512_v4
+23/126562/campos_512_v4
+23/126926/campos_512_v4
+23/126967/campos_512_v4
+23/127029/campos_512_v4
+23/127191/campos_512_v4
+23/127217/campos_512_v4
+23/127284/campos_512_v4
+23/127294/campos_512_v4
+23/127348/campos_512_v4
+23/127353/campos_512_v4
+23/127360/campos_512_v4
+23/127415/campos_512_v4
+23/127802/campos_512_v4
+23/127933/campos_512_v4
+23/127946/campos_512_v4
+23/128414/campos_512_v4
+23/128540/campos_512_v4
+23/128558/campos_512_v4
+23/128628/campos_512_v4
+23/128679/campos_512_v4
+23/128706/campos_512_v4
+23/128754/campos_512_v4
+23/128757/campos_512_v4
+23/129313/campos_512_v4
+23/129444/campos_512_v4
+23/129450/campos_512_v4
+23/129454/campos_512_v4
+23/129478/campos_512_v4
+23/129510/campos_512_v4
+23/129514/campos_512_v4
+23/129554/campos_512_v4
+23/129619/campos_512_v4
+23/129745/campos_512_v4
+23/129747/campos_512_v4
+23/129846/campos_512_v4
+23/129895/campos_512_v4
+23/129930/campos_512_v4
+23/129934/campos_512_v4
+24/130114/campos_512_v4
+24/130188/campos_512_v4
+24/130300/campos_512_v4
+24/130354/campos_512_v4
+24/130525/campos_512_v4
+24/130530/campos_512_v4
+24/130598/campos_512_v4
+24/130883/campos_512_v4
+24/130913/campos_512_v4
+24/130919/campos_512_v4
+24/131172/campos_512_v4
+24/131212/campos_512_v4
+24/131235/campos_512_v4
+24/131336/campos_512_v4
+24/131404/campos_512_v4
+24/131659/campos_512_v4
+24/131810/campos_512_v4
+24/131966/campos_512_v4
+24/131968/campos_512_v4
+24/131989/campos_512_v4
+24/132012/campos_512_v4
+24/132437/campos_512_v4
+24/132566/campos_512_v4
+24/132640/campos_512_v4
+24/132702/campos_512_v4
+24/132718/campos_512_v4
+24/132835/campos_512_v4
+24/132965/campos_512_v4
+24/133000/campos_512_v4
+24/133053/campos_512_v4
+24/133060/campos_512_v4
+24/133225/campos_512_v4
+24/133259/campos_512_v4
+24/133447/campos_512_v4
+24/133458/campos_512_v4
+24/133500/campos_512_v4
+24/133523/campos_512_v4
+24/133580/campos_512_v4
+24/133590/campos_512_v4
+24/133693/campos_512_v4
+24/133716/campos_512_v4
+24/133828/campos_512_v4
+24/133916/campos_512_v4
+24/134285/campos_512_v4
+24/134369/campos_512_v4
+24/134383/campos_512_v4
+24/134425/campos_512_v4
+24/134536/campos_512_v4
+24/134551/campos_512_v4
+24/134718/campos_512_v4
+24/134975/campos_512_v4
+25/135104/campos_512_v4
+25/135191/campos_512_v4
+25/135239/campos_512_v4
+25/135276/campos_512_v4
+25/135402/campos_512_v4
+25/135419/campos_512_v4
+25/135521/campos_512_v4
+25/135630/campos_512_v4
+25/135679/campos_512_v4
+25/135751/campos_512_v4
+25/135780/campos_512_v4
+25/135902/campos_512_v4
+25/136010/campos_512_v4
+25/136228/campos_512_v4
+25/136274/campos_512_v4
+25/136376/campos_512_v4
+25/136464/campos_512_v4
+25/136519/campos_512_v4
+25/136661/campos_512_v4
+25/136730/campos_512_v4
+25/136740/campos_512_v4
+25/137168/campos_512_v4
+25/137189/campos_512_v4
+25/137207/campos_512_v4
+25/137298/campos_512_v4
+25/137391/campos_512_v4
+25/137410/campos_512_v4
+25/137480/campos_512_v4
+25/137624/campos_512_v4
+25/137644/campos_512_v4
+25/137690/campos_512_v4
+25/137752/campos_512_v4
+25/137860/campos_512_v4
+25/137909/campos_512_v4
+25/138117/campos_512_v4
+25/138245/campos_512_v4
+25/138287/campos_512_v4
+25/138290/campos_512_v4
+25/138415/campos_512_v4
+25/138629/campos_512_v4
+25/138705/campos_512_v4
+25/138865/campos_512_v4
+25/138888/campos_512_v4
+25/139196/campos_512_v4
+25/139226/campos_512_v4
+25/139275/campos_512_v4
+25/139507/campos_512_v4
+25/139646/campos_512_v4
+25/139658/campos_512_v4
+25/139744/campos_512_v4
+26/140060/campos_512_v4
+26/140091/campos_512_v4
+26/140117/campos_512_v4
+26/140145/campos_512_v4
+26/140185/campos_512_v4
+26/140217/campos_512_v4
+26/140278/campos_512_v4
+26/140308/campos_512_v4
+26/140437/campos_512_v4
+26/140477/campos_512_v4
+26/140549/campos_512_v4
+26/140556/campos_512_v4
+26/140634/campos_512_v4
+26/140656/campos_512_v4
+26/140763/campos_512_v4
+26/140775/campos_512_v4
+26/140780/campos_512_v4
+26/140790/campos_512_v4
+26/140793/campos_512_v4
+26/140815/campos_512_v4
+26/140823/campos_512_v4
+26/140859/campos_512_v4
+26/140984/campos_512_v4
+26/140995/campos_512_v4
+26/141021/campos_512_v4
+26/141025/campos_512_v4
+26/141193/campos_512_v4
+26/141267/campos_512_v4
+26/141404/campos_512_v4
+26/141774/campos_512_v4
+26/141805/campos_512_v4
+26/141837/campos_512_v4
+26/141842/campos_512_v4
+26/141873/campos_512_v4
+26/141878/campos_512_v4
+26/141963/campos_512_v4
+26/142274/campos_512_v4
+26/142280/campos_512_v4
+26/142298/campos_512_v4
+26/142310/campos_512_v4
+26/142348/campos_512_v4
+26/142437/campos_512_v4
+26/142716/campos_512_v4
+26/142816/campos_512_v4
+26/142898/campos_512_v4
+26/142902/campos_512_v4
+26/142909/campos_512_v4
+26/142991/campos_512_v4
+26/143154/campos_512_v4
+26/143155/campos_512_v4
+26/143228/campos_512_v4
+26/143292/campos_512_v4
+26/143541/campos_512_v4
+26/143556/campos_512_v4
+26/143587/campos_512_v4
+26/143785/campos_512_v4
+26/143824/campos_512_v4
+26/143854/campos_512_v4
+26/144498/campos_512_v4
+26/144588/campos_512_v4
+26/144596/campos_512_v4
+26/144801/campos_512_v4
+26/144803/campos_512_v4
+26/144808/campos_512_v4
+26/144809/campos_512_v4
+26/144975/campos_512_v4
+26/144991/campos_512_v4
+27/145144/campos_512_v4
+27/145382/campos_512_v4
+27/145483/campos_512_v4
+27/145544/campos_512_v4
+27/145948/campos_512_v4
+27/146077/campos_512_v4
+27/146207/campos_512_v4
+27/146347/campos_512_v4
+27/146404/campos_512_v4
+27/146526/campos_512_v4
+27/146934/campos_512_v4
+27/146960/campos_512_v4
+27/146967/campos_512_v4
+27/147164/campos_512_v4
+27/147294/campos_512_v4
+27/147328/campos_512_v4
+27/147546/campos_512_v4
+27/147841/campos_512_v4
+27/147859/campos_512_v4
+27/147889/campos_512_v4
+27/147898/campos_512_v4
+27/148030/campos_512_v4
+27/148402/campos_512_v4
+27/148662/campos_512_v4
+27/148799/campos_512_v4
+27/148848/campos_512_v4
+27/148914/campos_512_v4
+27/149095/campos_512_v4
+27/149315/campos_512_v4
+27/149613/campos_512_v4
+27/149705/campos_512_v4
+27/149763/campos_512_v4
+27/149795/campos_512_v4
+27/149911/campos_512_v4
+28/150107/campos_512_v4
+28/150128/campos_512_v4
+28/150526/campos_512_v4
+28/150547/campos_512_v4
+28/150638/campos_512_v4
+28/150683/campos_512_v4
+28/150715/campos_512_v4
+28/150962/campos_512_v4
+28/150994/campos_512_v4
+28/151110/campos_512_v4
+28/151162/campos_512_v4
+28/151214/campos_512_v4
+28/151414/campos_512_v4
+28/151431/campos_512_v4
+28/151443/campos_512_v4
+28/151472/campos_512_v4
+28/151558/campos_512_v4
+28/151589/campos_512_v4
+28/151700/campos_512_v4
+28/151752/campos_512_v4
+28/151823/campos_512_v4
+28/151980/campos_512_v4
+28/151989/campos_512_v4
+28/152126/campos_512_v4
+28/152174/campos_512_v4
+28/152464/campos_512_v4
+28/152525/campos_512_v4
+28/152550/campos_512_v4
+28/152551/campos_512_v4
+28/152591/campos_512_v4
+28/152593/campos_512_v4
+28/152668/campos_512_v4
+28/152720/campos_512_v4
+28/152912/campos_512_v4
+28/152961/campos_512_v4
+28/152969/campos_512_v4
+28/153707/campos_512_v4
+28/153759/campos_512_v4
+28/153806/campos_512_v4
+28/153821/campos_512_v4
+28/153892/campos_512_v4
+28/153913/campos_512_v4
+28/153924/campos_512_v4
+28/153938/campos_512_v4
+28/154187/campos_512_v4
+28/154301/campos_512_v4
+28/154358/campos_512_v4
+28/154432/campos_512_v4
+28/154746/campos_512_v4
+28/154758/campos_512_v4
+28/154915/campos_512_v4
+29/155077/campos_512_v4
+29/155194/campos_512_v4
+29/155219/campos_512_v4
+29/155226/campos_512_v4
+29/155261/campos_512_v4
+29/155359/campos_512_v4
+29/155410/campos_512_v4
+29/155477/campos_512_v4
+29/155562/campos_512_v4
+29/155592/campos_512_v4
+29/155623/campos_512_v4
+29/155819/campos_512_v4
+29/155859/campos_512_v4
+29/155861/campos_512_v4
+29/155996/campos_512_v4
+29/156029/campos_512_v4
+29/156212/campos_512_v4
+29/156281/campos_512_v4
+29/156375/campos_512_v4
+29/156486/campos_512_v4
+29/156494/campos_512_v4
+29/156558/campos_512_v4
+29/156593/campos_512_v4
+29/156867/campos_512_v4
+29/156974/campos_512_v4
+29/157160/campos_512_v4
+29/157270/campos_512_v4
+29/157307/campos_512_v4
+29/157350/campos_512_v4
+29/157359/campos_512_v4
+29/157379/campos_512_v4
+29/157380/campos_512_v4
+29/157381/campos_512_v4
+29/157503/campos_512_v4
+29/157570/campos_512_v4
+29/157780/campos_512_v4
+29/157895/campos_512_v4
+29/158057/campos_512_v4
+29/158215/campos_512_v4
+29/158230/campos_512_v4
+29/158276/campos_512_v4
+29/158298/campos_512_v4
+29/158435/campos_512_v4
+29/158542/campos_512_v4
+29/158561/campos_512_v4
+29/158612/campos_512_v4
+29/158620/campos_512_v4
+29/158648/campos_512_v4
+29/158657/campos_512_v4
+29/158833/campos_512_v4
+29/158835/campos_512_v4
+29/158939/campos_512_v4
+29/159084/campos_512_v4
+29/159244/campos_512_v4
+29/159268/campos_512_v4
+29/159525/campos_512_v4
+29/159636/campos_512_v4
+29/159855/campos_512_v4
+29/159895/campos_512_v4
+30/160012/campos_512_v4
+30/160031/campos_512_v4
+30/160042/campos_512_v4
+30/160063/campos_512_v4
+30/160084/campos_512_v4
+30/160250/campos_512_v4
+30/160314/campos_512_v4
+30/160658/campos_512_v4
+30/160686/campos_512_v4
+30/160697/campos_512_v4
+30/160812/campos_512_v4
+30/160879/campos_512_v4
+30/160957/campos_512_v4
+30/160970/campos_512_v4
+30/160982/campos_512_v4
+30/161072/campos_512_v4
+30/161087/campos_512_v4
+30/161273/campos_512_v4
+30/161291/campos_512_v4
+30/161473/campos_512_v4
+30/161498/campos_512_v4
+30/161591/campos_512_v4
+30/161853/campos_512_v4
+30/161961/campos_512_v4
+30/161966/campos_512_v4
+30/161988/campos_512_v4
+30/162142/campos_512_v4
+30/162165/campos_512_v4
+30/162211/campos_512_v4
+30/162270/campos_512_v4
+30/162338/campos_512_v4
+30/162349/campos_512_v4
+30/162517/campos_512_v4
+30/162541/campos_512_v4
+30/162580/campos_512_v4
+30/163056/campos_512_v4
+30/163201/campos_512_v4
+30/163293/campos_512_v4
+30/163894/campos_512_v4
+30/163910/campos_512_v4
+30/164140/campos_512_v4
+30/164269/campos_512_v4
+30/164279/campos_512_v4
+30/164371/campos_512_v4
+30/164484/campos_512_v4
+30/164590/campos_512_v4
+30/164841/campos_512_v4
+30/164955/campos_512_v4
+31/165052/campos_512_v4
+31/165104/campos_512_v4
+31/165112/campos_512_v4
+31/165158/campos_512_v4
+31/165159/campos_512_v4
+31/165308/campos_512_v4
+31/166325/campos_512_v4
+31/166416/campos_512_v4
+31/166573/campos_512_v4
+31/166589/campos_512_v4
+31/166692/campos_512_v4
+31/166805/campos_512_v4
+31/166844/campos_512_v4
+31/166881/campos_512_v4
+31/166885/campos_512_v4
+31/166983/campos_512_v4
+31/167060/campos_512_v4
+31/167116/campos_512_v4
+31/167204/campos_512_v4
+31/167843/campos_512_v4
+31/167876/campos_512_v4
+31/168154/campos_512_v4
+31/168314/campos_512_v4
+31/168368/campos_512_v4
+31/168386/campos_512_v4
+31/168835/campos_512_v4
+31/168925/campos_512_v4
+31/168959/campos_512_v4
+31/169165/campos_512_v4
+31/169209/campos_512_v4
+31/169217/campos_512_v4
+31/169259/campos_512_v4
+31/169270/campos_512_v4
+31/169273/campos_512_v4
+31/169347/campos_512_v4
+31/169410/campos_512_v4
+31/169508/campos_512_v4
+31/169566/campos_512_v4
+31/169584/campos_512_v4
+31/169601/campos_512_v4
+31/169693/campos_512_v4
+31/169726/campos_512_v4
+31/169727/campos_512_v4
+31/169803/campos_512_v4
+31/169829/campos_512_v4
+31/169931/campos_512_v4
+31/169943/campos_512_v4
+31/169974/campos_512_v4
+32/170018/campos_512_v4
+32/170175/campos_512_v4
+32/170232/campos_512_v4
+32/170348/campos_512_v4
+32/170355/campos_512_v4
+32/170408/campos_512_v4
+32/170770/campos_512_v4
+32/170940/campos_512_v4
+32/170951/campos_512_v4
+32/171167/campos_512_v4
+32/171308/campos_512_v4
+32/171368/campos_512_v4
+32/171398/campos_512_v4
+32/171948/campos_512_v4
+32/172000/campos_512_v4
+32/172065/campos_512_v4
+32/172268/campos_512_v4
+32/172302/campos_512_v4
+32/172380/campos_512_v4
+32/172485/campos_512_v4
+32/172497/campos_512_v4
+32/172866/campos_512_v4
+32/173030/campos_512_v4
+32/173168/campos_512_v4
+32/173445/campos_512_v4
+32/173535/campos_512_v4
+32/173588/campos_512_v4
+32/173828/campos_512_v4
+32/173896/campos_512_v4
+32/173944/campos_512_v4
+32/173999/campos_512_v4
+32/174171/campos_512_v4
+32/174219/campos_512_v4
+32/174235/campos_512_v4
+32/174350/campos_512_v4
+32/174461/campos_512_v4
+32/174505/campos_512_v4
+32/174527/campos_512_v4
+32/174718/campos_512_v4
+32/174729/campos_512_v4
+32/174754/campos_512_v4
+32/174808/campos_512_v4
+32/174968/campos_512_v4
+33/175003/campos_512_v4
+33/175011/campos_512_v4
+33/175073/campos_512_v4
+33/175155/campos_512_v4
+33/175179/campos_512_v4
+33/175207/campos_512_v4
+33/175303/campos_512_v4
+33/175413/campos_512_v4
+33/175488/campos_512_v4
+33/175523/campos_512_v4
+33/175728/campos_512_v4
+33/175843/campos_512_v4
+33/175910/campos_512_v4
+33/175930/campos_512_v4
+33/175951/campos_512_v4
+33/175965/campos_512_v4
+33/175981/campos_512_v4
+33/176028/campos_512_v4
+33/176223/campos_512_v4
+33/176265/campos_512_v4
+33/176277/campos_512_v4
+33/176313/campos_512_v4
+33/176543/campos_512_v4
+33/176694/campos_512_v4
+33/176887/campos_512_v4
+33/177032/campos_512_v4
+33/177052/campos_512_v4
+33/177158/campos_512_v4
+33/177201/campos_512_v4
+33/177375/campos_512_v4
+33/177451/campos_512_v4
+33/177458/campos_512_v4
+33/177473/campos_512_v4
+33/177563/campos_512_v4
+33/177593/campos_512_v4
+33/177619/campos_512_v4
+33/177681/campos_512_v4
+33/177696/campos_512_v4
+33/177734/campos_512_v4
+33/177836/campos_512_v4
+33/177875/campos_512_v4
+33/177958/campos_512_v4
+33/178313/campos_512_v4
+33/178446/campos_512_v4
+33/178574/campos_512_v4
+33/178661/campos_512_v4
+33/178756/campos_512_v4
+33/178801/campos_512_v4
+33/178850/campos_512_v4
+33/178865/campos_512_v4
+33/178928/campos_512_v4
+33/179245/campos_512_v4
+33/179253/campos_512_v4
+33/179281/campos_512_v4
+33/179467/campos_512_v4
+33/179620/campos_512_v4
+33/179710/campos_512_v4
+33/179738/campos_512_v4
+33/179794/campos_512_v4
+33/179831/campos_512_v4
+33/179919/campos_512_v4
+34/180134/campos_512_v4
+34/180196/campos_512_v4
+34/180238/campos_512_v4
+34/180297/campos_512_v4
+34/180361/campos_512_v4
+34/180366/campos_512_v4
+34/180449/campos_512_v4
+34/180451/campos_512_v4
+34/180533/campos_512_v4
+34/180534/campos_512_v4
+34/180873/campos_512_v4
+34/181040/campos_512_v4
+34/181131/campos_512_v4
+34/181252/campos_512_v4
+34/181293/campos_512_v4
+34/181312/campos_512_v4
+34/181365/campos_512_v4
+34/181368/campos_512_v4
+34/181395/campos_512_v4
+34/181428/campos_512_v4
+34/181529/campos_512_v4
+34/181787/campos_512_v4
+34/181810/campos_512_v4
+34/181833/campos_512_v4
+34/181926/campos_512_v4
+34/181931/campos_512_v4
+34/181957/campos_512_v4
+34/182028/campos_512_v4
+34/182104/campos_512_v4
+34/182530/campos_512_v4
+34/182559/campos_512_v4
+34/182615/campos_512_v4
+34/182888/campos_512_v4
+34/183539/campos_512_v4
+34/184107/campos_512_v4
+34/184146/campos_512_v4
+34/184173/campos_512_v4
+34/184211/campos_512_v4
+34/184214/campos_512_v4
+34/184336/campos_512_v4
+34/184376/campos_512_v4
+34/184430/campos_512_v4
+34/184454/campos_512_v4
+34/184462/campos_512_v4
+34/184471/campos_512_v4
+34/184477/campos_512_v4
+34/184555/campos_512_v4
+34/184762/campos_512_v4
+34/184826/campos_512_v4
+34/184836/campos_512_v4
+34/184857/campos_512_v4
+34/184998/campos_512_v4
+35/185036/campos_512_v4
+35/185164/campos_512_v4
+35/185382/campos_512_v4
+35/185393/campos_512_v4
+35/185415/campos_512_v4
+35/185531/campos_512_v4
+35/185702/campos_512_v4
+35/185709/campos_512_v4
+35/185779/campos_512_v4
+35/185834/campos_512_v4
+35/185847/campos_512_v4
+35/186154/campos_512_v4
+35/186642/campos_512_v4
+35/186644/campos_512_v4
+35/186669/campos_512_v4
+35/187118/campos_512_v4
+35/187188/campos_512_v4
+35/187397/campos_512_v4
+35/187894/campos_512_v4
+35/187983/campos_512_v4
+35/188002/campos_512_v4
+35/188009/campos_512_v4
+35/188153/campos_512_v4
+35/188192/campos_512_v4
+35/188566/campos_512_v4
+35/188682/campos_512_v4
+35/188687/campos_512_v4
+35/188697/campos_512_v4
+35/188860/campos_512_v4
+35/188970/campos_512_v4
+35/188998/campos_512_v4
+35/189204/campos_512_v4
+35/189219/campos_512_v4
+35/189299/campos_512_v4
+35/189414/campos_512_v4
+35/189592/campos_512_v4
+35/189748/campos_512_v4
+35/189762/campos_512_v4
+35/189924/campos_512_v4
+36/190127/campos_512_v4
+36/190163/campos_512_v4
+36/190234/campos_512_v4
+36/190248/campos_512_v4
+36/190293/campos_512_v4
+36/190369/campos_512_v4
+36/190472/campos_512_v4
+36/190533/campos_512_v4
+36/190571/campos_512_v4
+36/190657/campos_512_v4
+36/190758/campos_512_v4
+36/191037/campos_512_v4
+36/191038/campos_512_v4
+36/191117/campos_512_v4
+36/191154/campos_512_v4
+36/191197/campos_512_v4
+36/191290/campos_512_v4
+36/191356/campos_512_v4
+36/191358/campos_512_v4
+36/191379/campos_512_v4
+36/191396/campos_512_v4
+36/191468/campos_512_v4
+36/191649/campos_512_v4
+36/191808/campos_512_v4
+36/191849/campos_512_v4
+36/191926/campos_512_v4
+36/192090/campos_512_v4
+36/192179/campos_512_v4
+36/192295/campos_512_v4
+36/192330/campos_512_v4
+36/192338/campos_512_v4
+36/192374/campos_512_v4
+36/192415/campos_512_v4
+36/192449/campos_512_v4
+36/192469/campos_512_v4
+36/192872/campos_512_v4
+36/192877/campos_512_v4
+36/193111/campos_512_v4
+36/193194/campos_512_v4
+36/193219/campos_512_v4
+36/193243/campos_512_v4
+36/193260/campos_512_v4
+36/193269/campos_512_v4
+36/193432/campos_512_v4
+36/193532/campos_512_v4
+36/193552/campos_512_v4
+36/193631/campos_512_v4
+36/193663/campos_512_v4
+36/193771/campos_512_v4
+36/193824/campos_512_v4
+36/193918/campos_512_v4
+36/194058/campos_512_v4
+36/194114/campos_512_v4
+36/194208/campos_512_v4
+36/194285/campos_512_v4
+36/194490/campos_512_v4
+36/194594/campos_512_v4
+36/194603/campos_512_v4
+36/194610/campos_512_v4
+36/194672/campos_512_v4
+36/194688/campos_512_v4
+36/194698/campos_512_v4
+36/194796/campos_512_v4
+36/194806/campos_512_v4
+36/194834/campos_512_v4
+36/194837/campos_512_v4
+36/194845/campos_512_v4
+36/195001/campos_512_v4
+37/195363/campos_512_v4
+37/195528/campos_512_v4
+37/195633/campos_512_v4
+37/195747/campos_512_v4
+37/195867/campos_512_v4
+37/196117/campos_512_v4
+37/196242/campos_512_v4
+37/196308/campos_512_v4
+37/196395/campos_512_v4
+37/196466/campos_512_v4
+37/196547/campos_512_v4
+37/196566/campos_512_v4
+37/196598/campos_512_v4
+37/196660/campos_512_v4
+37/196662/campos_512_v4
+37/196964/campos_512_v4
+37/196972/campos_512_v4
+37/197054/campos_512_v4
+37/197079/campos_512_v4
+37/197122/campos_512_v4
+37/197139/campos_512_v4
+37/197400/campos_512_v4
+37/197517/campos_512_v4
+37/197574/campos_512_v4
+37/197649/campos_512_v4
+37/197651/campos_512_v4
+37/197668/campos_512_v4
+37/197694/campos_512_v4
+37/197722/campos_512_v4
+37/197784/campos_512_v4
+37/197787/campos_512_v4
+37/197824/campos_512_v4
+37/197949/campos_512_v4
+37/197991/campos_512_v4
+37/198061/campos_512_v4
+37/198121/campos_512_v4
+37/198177/campos_512_v4
+37/198215/campos_512_v4
+37/198412/campos_512_v4
+37/198427/campos_512_v4
+37/198475/campos_512_v4
+37/198477/campos_512_v4
+37/198523/campos_512_v4
+37/198610/campos_512_v4
+37/198658/campos_512_v4
+37/198669/campos_512_v4
+37/198730/campos_512_v4
+37/199063/campos_512_v4
+37/199281/campos_512_v4
+37/199339/campos_512_v4
+37/199346/campos_512_v4
+37/199377/campos_512_v4
+37/199523/campos_512_v4
+37/199588/campos_512_v4
+37/199673/campos_512_v4
+37/199757/campos_512_v4
+37/199833/campos_512_v4
+37/199893/campos_512_v4
+38/200002/campos_512_v4
+38/200139/campos_512_v4
+38/200157/campos_512_v4
+38/200218/campos_512_v4
+38/200460/campos_512_v4
+38/200663/campos_512_v4
+38/200675/campos_512_v4
+38/200773/campos_512_v4
+38/200819/campos_512_v4
+38/200928/campos_512_v4
+38/200953/campos_512_v4
+38/200972/campos_512_v4
+38/201166/campos_512_v4
+38/201177/campos_512_v4
+38/201313/campos_512_v4
+38/201317/campos_512_v4
+38/201515/campos_512_v4
+38/201568/campos_512_v4
+38/201829/campos_512_v4
+38/201857/campos_512_v4
+38/201926/campos_512_v4
+38/202083/campos_512_v4
+38/202330/campos_512_v4
+38/202404/campos_512_v4
+38/202412/campos_512_v4
+38/202759/campos_512_v4
+38/202771/campos_512_v4
+38/202846/campos_512_v4
+38/202947/campos_512_v4
+38/203072/campos_512_v4
+38/203156/campos_512_v4
+38/203176/campos_512_v4
+38/203221/campos_512_v4
+38/203240/campos_512_v4
+38/203289/campos_512_v4
+38/203339/campos_512_v4
+38/203349/campos_512_v4
+38/203447/campos_512_v4
+38/203466/campos_512_v4
+38/203715/campos_512_v4
+38/203787/campos_512_v4
+38/204255/campos_512_v4
+38/204399/campos_512_v4
+38/204422/campos_512_v4
+38/204555/campos_512_v4
+38/204730/campos_512_v4
+38/204820/campos_512_v4
+38/204857/campos_512_v4
+4/30012/campos_512_v4
+4/30051/campos_512_v4
+4/30067/campos_512_v4
+4/30144/campos_512_v4
+4/30175/campos_512_v4
+4/30272/campos_512_v4
+4/30418/campos_512_v4
+4/30451/campos_512_v4
+4/30628/campos_512_v4
+4/30784/campos_512_v4
+4/30839/campos_512_v4
+4/30898/campos_512_v4
+4/31026/campos_512_v4
+4/31106/campos_512_v4
+4/31172/campos_512_v4
+4/31431/campos_512_v4
+4/31443/campos_512_v4
+4/31620/campos_512_v4
+4/31631/campos_512_v4
+4/31682/campos_512_v4
+4/31767/campos_512_v4
+4/31839/campos_512_v4
+4/31885/campos_512_v4
+4/31890/campos_512_v4
+4/31898/campos_512_v4
+4/32129/campos_512_v4
+4/32191/campos_512_v4
+4/32267/campos_512_v4
+4/32335/campos_512_v4
+4/32511/campos_512_v4
+4/32586/campos_512_v4
+4/32604/campos_512_v4
+4/32612/campos_512_v4
+4/32684/campos_512_v4
+4/32763/campos_512_v4
+4/32838/campos_512_v4
+4/33021/campos_512_v4
+4/33396/campos_512_v4
+4/33469/campos_512_v4
+4/33511/campos_512_v4
+4/33531/campos_512_v4
+4/33553/campos_512_v4
+4/33576/campos_512_v4
+4/33685/campos_512_v4
+4/33695/campos_512_v4
+4/33711/campos_512_v4
+4/33728/campos_512_v4
+4/33831/campos_512_v4
+4/33900/campos_512_v4
+4/33929/campos_512_v4
+4/33968/campos_512_v4
+4/34030/campos_512_v4
+4/34054/campos_512_v4
+4/34158/campos_512_v4
+4/34274/campos_512_v4
+4/34369/campos_512_v4
+4/34468/campos_512_v4
+4/34522/campos_512_v4
+4/34640/campos_512_v4
+4/34676/campos_512_v4
+4/34770/campos_512_v4
+4/34988/campos_512_v4
+40/210069/campos_512_v4
+40/210450/campos_512_v4
+40/210498/campos_512_v4
+40/210617/campos_512_v4
+40/210706/campos_512_v4
+40/210719/campos_512_v4
+40/210730/campos_512_v4
+40/210813/campos_512_v4
+40/210977/campos_512_v4
+40/211132/campos_512_v4
+40/211160/campos_512_v4
+40/211275/campos_512_v4
+40/211286/campos_512_v4
+40/211510/campos_512_v4
+40/211544/campos_512_v4
+40/211607/campos_512_v4
+40/211667/campos_512_v4
+40/211730/campos_512_v4
+40/211748/campos_512_v4
+40/211859/campos_512_v4
+40/212016/campos_512_v4
+40/212037/campos_512_v4
+40/212040/campos_512_v4
+40/212089/campos_512_v4
+40/212145/campos_512_v4
+40/212476/campos_512_v4
+40/212492/campos_512_v4
+40/212668/campos_512_v4
+40/212839/campos_512_v4
+40/212990/campos_512_v4
+40/213029/campos_512_v4
+40/213065/campos_512_v4
+40/213069/campos_512_v4
+40/213104/campos_512_v4
+40/213110/campos_512_v4
+40/213139/campos_512_v4
+40/213162/campos_512_v4
+40/213172/campos_512_v4
+40/213177/campos_512_v4
+40/213351/campos_512_v4
+40/213446/campos_512_v4
+40/213533/campos_512_v4
+40/213551/campos_512_v4
+40/213606/campos_512_v4
+40/213670/campos_512_v4
+40/213694/campos_512_v4
+40/213702/campos_512_v4
+40/213735/campos_512_v4
+40/213813/campos_512_v4
+40/213830/campos_512_v4
+40/213844/campos_512_v4
+40/213880/campos_512_v4
+40/213957/campos_512_v4
+40/214101/campos_512_v4
+40/214161/campos_512_v4
+40/214261/campos_512_v4
+40/214278/campos_512_v4
+40/214299/campos_512_v4
+40/214333/campos_512_v4
+40/214377/campos_512_v4
+40/214387/campos_512_v4
+40/214474/campos_512_v4
+40/214604/campos_512_v4
+40/214756/campos_512_v4
+40/214804/campos_512_v4
+40/214845/campos_512_v4
+40/214877/campos_512_v4
+41/215048/campos_512_v4
+41/215104/campos_512_v4
+41/215241/campos_512_v4
+41/215303/campos_512_v4
+41/215355/campos_512_v4
+41/215394/campos_512_v4
+41/215458/campos_512_v4
+41/215814/campos_512_v4
+41/215855/campos_512_v4
+41/215996/campos_512_v4
+41/216044/campos_512_v4
+41/216154/campos_512_v4
+41/216179/campos_512_v4
+41/216195/campos_512_v4
+41/216224/campos_512_v4
+41/216229/campos_512_v4
+41/216356/campos_512_v4
+41/216436/campos_512_v4
+41/216553/campos_512_v4
+41/216568/campos_512_v4
+41/216577/campos_512_v4
+41/216603/campos_512_v4
+41/216656/campos_512_v4
+41/216680/campos_512_v4
+41/216776/campos_512_v4
+41/216777/campos_512_v4
+41/216801/campos_512_v4
+41/216868/campos_512_v4
+41/217081/campos_512_v4
+41/217108/campos_512_v4
+41/217153/campos_512_v4
+41/217177/campos_512_v4
+41/217259/campos_512_v4
+41/217323/campos_512_v4
+41/217451/campos_512_v4
+41/217460/campos_512_v4
+41/217524/campos_512_v4
+41/217569/campos_512_v4
+41/217579/campos_512_v4
+41/217624/campos_512_v4
+41/217639/campos_512_v4
+41/217648/campos_512_v4
+41/217659/campos_512_v4
+41/217690/campos_512_v4
+41/217745/campos_512_v4
+41/217779/campos_512_v4
+41/217840/campos_512_v4
+41/217868/campos_512_v4
+41/217874/campos_512_v4
+41/217881/campos_512_v4
+41/217975/campos_512_v4
+41/218112/campos_512_v4
+41/218131/campos_512_v4
+41/218164/campos_512_v4
+41/218256/campos_512_v4
+41/218340/campos_512_v4
+41/218403/campos_512_v4
+41/218479/campos_512_v4
+41/218613/campos_512_v4
+41/218684/campos_512_v4
+41/218901/campos_512_v4
+41/218952/campos_512_v4
+41/219000/campos_512_v4
+41/219034/campos_512_v4
+41/219041/campos_512_v4
+41/219047/campos_512_v4
+41/219097/campos_512_v4
+41/219140/campos_512_v4
+41/219198/campos_512_v4
+41/219385/campos_512_v4
+41/219387/campos_512_v4
+41/219499/campos_512_v4
+41/219557/campos_512_v4
+41/219590/campos_512_v4
+41/219594/campos_512_v4
+41/219713/campos_512_v4
+41/219714/campos_512_v4
+41/219723/campos_512_v4
+41/219818/campos_512_v4
+41/219851/campos_512_v4
+41/219972/campos_512_v4
+41/219983/campos_512_v4
+42/220011/campos_512_v4
+42/220056/campos_512_v4
+42/220070/campos_512_v4
+42/220073/campos_512_v4
+42/220202/campos_512_v4
+42/220249/campos_512_v4
+42/220303/campos_512_v4
+42/220362/campos_512_v4
+42/220384/campos_512_v4
+42/220460/campos_512_v4
+42/220476/campos_512_v4
+42/220541/campos_512_v4
+42/220586/campos_512_v4
+42/220709/campos_512_v4
+42/220813/campos_512_v4
+42/220847/campos_512_v4
+42/220860/campos_512_v4
+42/220878/campos_512_v4
+42/220879/campos_512_v4
+42/220906/campos_512_v4
+42/221004/campos_512_v4
+42/221221/campos_512_v4
+42/221274/campos_512_v4
+42/221365/campos_512_v4
+42/221509/campos_512_v4
+42/221579/campos_512_v4
+42/221716/campos_512_v4
+42/221811/campos_512_v4
+42/221839/campos_512_v4
+42/222017/campos_512_v4
+42/222051/campos_512_v4
+42/222096/campos_512_v4
+42/222113/campos_512_v4
+42/222131/campos_512_v4
+42/222139/campos_512_v4
+42/222164/campos_512_v4
+42/222199/campos_512_v4
+42/222210/campos_512_v4
+42/222764/campos_512_v4
+42/222783/campos_512_v4
+42/222835/campos_512_v4
+42/222890/campos_512_v4
+42/222924/campos_512_v4
+42/222953/campos_512_v4
+42/222956/campos_512_v4
+42/223051/campos_512_v4
+42/223254/campos_512_v4
+42/223351/campos_512_v4
+42/223375/campos_512_v4
+42/223382/campos_512_v4
+42/223450/campos_512_v4
+42/223486/campos_512_v4
+42/223612/campos_512_v4
+42/223620/campos_512_v4
+42/223651/campos_512_v4
+42/223654/campos_512_v4
+42/223736/campos_512_v4
+42/223797/campos_512_v4
+42/223851/campos_512_v4
+42/223888/campos_512_v4
+42/223981/campos_512_v4
+42/224014/campos_512_v4
+42/224024/campos_512_v4
+42/224093/campos_512_v4
+42/224144/campos_512_v4
+42/224238/campos_512_v4
+42/224299/campos_512_v4
+42/224301/campos_512_v4
+42/224335/campos_512_v4
+42/224369/campos_512_v4
+42/224381/campos_512_v4
+42/224388/campos_512_v4
+42/224398/campos_512_v4
+42/224418/campos_512_v4
+42/224452/campos_512_v4
+42/224501/campos_512_v4
+42/224529/campos_512_v4
+42/224608/campos_512_v4
+42/224801/campos_512_v4
+42/224834/campos_512_v4
+43/225015/campos_512_v4
+43/225061/campos_512_v4
+43/225074/campos_512_v4
+43/225101/campos_512_v4
+43/225107/campos_512_v4
+43/225243/campos_512_v4
+43/225314/campos_512_v4
+43/225419/campos_512_v4
+43/225455/campos_512_v4
+43/225580/campos_512_v4
+43/225654/campos_512_v4
+43/225842/campos_512_v4
+43/225854/campos_512_v4
+43/225941/campos_512_v4
+43/226051/campos_512_v4
+43/226061/campos_512_v4
+43/226107/campos_512_v4
+43/226142/campos_512_v4
+43/226222/campos_512_v4
+43/226329/campos_512_v4
+43/226379/campos_512_v4
+43/226526/campos_512_v4
+43/226940/campos_512_v4
+43/226959/campos_512_v4
+43/226961/campos_512_v4
+43/227051/campos_512_v4
+43/227063/campos_512_v4
+43/227204/campos_512_v4
+43/227252/campos_512_v4
+43/227264/campos_512_v4
+43/227319/campos_512_v4
+43/227410/campos_512_v4
+43/227429/campos_512_v4
+43/227488/campos_512_v4
+43/227548/campos_512_v4
+43/227560/campos_512_v4
+43/227567/campos_512_v4
+43/227645/campos_512_v4
+43/227718/campos_512_v4
+43/227777/campos_512_v4
+43/227963/campos_512_v4
+43/228197/campos_512_v4
+43/228290/campos_512_v4
+43/228475/campos_512_v4
+43/228697/campos_512_v4
+43/228756/campos_512_v4
+43/228797/campos_512_v4
+43/228823/campos_512_v4
+43/228829/campos_512_v4
+43/228913/campos_512_v4
+43/228987/campos_512_v4
+43/229009/campos_512_v4
+43/229086/campos_512_v4
+43/229206/campos_512_v4
+43/229270/campos_512_v4
+43/229277/campos_512_v4
+43/229319/campos_512_v4
+43/229422/campos_512_v4
+43/229457/campos_512_v4
+43/229536/campos_512_v4
+43/229675/campos_512_v4
+43/229682/campos_512_v4
+43/229690/campos_512_v4
+43/229700/campos_512_v4
+43/229725/campos_512_v4
+43/229735/campos_512_v4
+43/229880/campos_512_v4
+43/229888/campos_512_v4
+43/229895/campos_512_v4
+43/229958/campos_512_v4
+44/230005/campos_512_v4
+44/230102/campos_512_v4
+44/230185/campos_512_v4
+44/230356/campos_512_v4
+44/230428/campos_512_v4
+44/230459/campos_512_v4
+44/230470/campos_512_v4
+44/230541/campos_512_v4
+44/230544/campos_512_v4
+44/230598/campos_512_v4
+44/230723/campos_512_v4
+44/230755/campos_512_v4
+44/230811/campos_512_v4
+44/230827/campos_512_v4
+44/231022/campos_512_v4
+44/231126/campos_512_v4
+44/231176/campos_512_v4
+44/231187/campos_512_v4
+44/231205/campos_512_v4
+44/231206/campos_512_v4
+44/231340/campos_512_v4
+44/231350/campos_512_v4
+44/231510/campos_512_v4
+44/231530/campos_512_v4
+44/231535/campos_512_v4
+44/231540/campos_512_v4
+44/231555/campos_512_v4
+44/231574/campos_512_v4
+44/231583/campos_512_v4
+44/231617/campos_512_v4
+44/231679/campos_512_v4
+44/231741/campos_512_v4
+44/231778/campos_512_v4
+44/231793/campos_512_v4
+44/231882/campos_512_v4
+44/231921/campos_512_v4
+44/231972/campos_512_v4
+44/231973/campos_512_v4
+44/232081/campos_512_v4
+44/232164/campos_512_v4
+44/232234/campos_512_v4
+44/232323/campos_512_v4
+44/232354/campos_512_v4
+44/232375/campos_512_v4
+44/232456/campos_512_v4
+44/232704/campos_512_v4
+44/232728/campos_512_v4
+44/232763/campos_512_v4
+44/232767/campos_512_v4
+44/232811/campos_512_v4
+44/232849/campos_512_v4
+44/232866/campos_512_v4
+44/232876/campos_512_v4
+44/233049/campos_512_v4
+44/233319/campos_512_v4
+44/233399/campos_512_v4
+44/233782/campos_512_v4
+44/233818/campos_512_v4
+44/233939/campos_512_v4
+44/234042/campos_512_v4
+44/234110/campos_512_v4
+44/234134/campos_512_v4
+44/234140/campos_512_v4
+44/234197/campos_512_v4
+44/234228/campos_512_v4
+44/234249/campos_512_v4
+44/234252/campos_512_v4
+44/234496/campos_512_v4
+44/234751/campos_512_v4
+44/234855/campos_512_v4
+44/234895/campos_512_v4
+45/235047/campos_512_v4
+45/235078/campos_512_v4
+45/235121/campos_512_v4
+45/235208/campos_512_v4
+45/235328/campos_512_v4
+45/235415/campos_512_v4
+45/235452/campos_512_v4
+45/235510/campos_512_v4
+45/235513/campos_512_v4
+45/235546/campos_512_v4
+45/235625/campos_512_v4
+45/235662/campos_512_v4
+45/235764/campos_512_v4
+45/235778/campos_512_v4
+45/235787/campos_512_v4
+45/235849/campos_512_v4
+45/235877/campos_512_v4
+45/235923/campos_512_v4
+45/235976/campos_512_v4
+45/236011/campos_512_v4
+45/236149/campos_512_v4
+45/236283/campos_512_v4
+45/236292/campos_512_v4
+45/236341/campos_512_v4
+45/236365/campos_512_v4
+45/236391/campos_512_v4
+45/236393/campos_512_v4
+45/236448/campos_512_v4
+45/236473/campos_512_v4
+45/236514/campos_512_v4
+45/236570/campos_512_v4
+45/236588/campos_512_v4
+45/236595/campos_512_v4
+45/236648/campos_512_v4
+45/236701/campos_512_v4
+45/236708/campos_512_v4
+45/236741/campos_512_v4
+45/236849/campos_512_v4
+45/236893/campos_512_v4
+45/236962/campos_512_v4
+45/237114/campos_512_v4
+45/237119/campos_512_v4
+45/237130/campos_512_v4
+45/237137/campos_512_v4
+45/237145/campos_512_v4
+45/237164/campos_512_v4
+45/237183/campos_512_v4
+45/237237/campos_512_v4
+45/237253/campos_512_v4
+45/237288/campos_512_v4
+45/237540/campos_512_v4
+45/237551/campos_512_v4
+45/237580/campos_512_v4
+45/237676/campos_512_v4
+45/237692/campos_512_v4
+45/237696/campos_512_v4
+45/237754/campos_512_v4
+45/237762/campos_512_v4
+45/237842/campos_512_v4
+45/237875/campos_512_v4
+45/237893/campos_512_v4
+45/237976/campos_512_v4
+45/238080/campos_512_v4
+45/238084/campos_512_v4
+45/238268/campos_512_v4
+45/238304/campos_512_v4
+45/238311/campos_512_v4
+45/238581/campos_512_v4
+45/238741/campos_512_v4
+45/238747/campos_512_v4
+45/238798/campos_512_v4
+45/238821/campos_512_v4
+45/238964/campos_512_v4
+45/239109/campos_512_v4
+45/239144/campos_512_v4
+45/239176/campos_512_v4
+45/239323/campos_512_v4
+45/239363/campos_512_v4
+45/239532/campos_512_v4
+45/239541/campos_512_v4
+45/239591/campos_512_v4
+45/239717/campos_512_v4
+45/239869/campos_512_v4
+46/240197/campos_512_v4
+46/240232/campos_512_v4
+46/240249/campos_512_v4
+46/240259/campos_512_v4
+46/240349/campos_512_v4
+46/240417/campos_512_v4
+46/240420/campos_512_v4
+46/240473/campos_512_v4
+46/240487/campos_512_v4
+46/240525/campos_512_v4
+46/240539/campos_512_v4
+46/240596/campos_512_v4
+46/240668/campos_512_v4
+46/240728/campos_512_v4
+46/240734/campos_512_v4
+46/240801/campos_512_v4
+46/240809/campos_512_v4
+46/240897/campos_512_v4
+46/240942/campos_512_v4
+46/241105/campos_512_v4
+46/241164/campos_512_v4
+46/241170/campos_512_v4
+46/241224/campos_512_v4
+46/241231/campos_512_v4
+46/241275/campos_512_v4
+46/241278/campos_512_v4
+46/241290/campos_512_v4
+46/241318/campos_512_v4
+46/241348/campos_512_v4
+46/241360/campos_512_v4
+46/241510/campos_512_v4
+46/241521/campos_512_v4
+46/241563/campos_512_v4
+46/241611/campos_512_v4
+46/241662/campos_512_v4
+46/241680/campos_512_v4
+46/241883/campos_512_v4
+46/241924/campos_512_v4
+46/242062/campos_512_v4
+46/242070/campos_512_v4
+46/242115/campos_512_v4
+46/242210/campos_512_v4
+46/242244/campos_512_v4
+46/242250/campos_512_v4
+46/242362/campos_512_v4
+46/242384/campos_512_v4
+46/242416/campos_512_v4
+46/242591/campos_512_v4
+46/242685/campos_512_v4
+46/242778/campos_512_v4
+46/242803/campos_512_v4
+46/242848/campos_512_v4
+46/242894/campos_512_v4
+46/242955/campos_512_v4
+46/242957/campos_512_v4
+46/243010/campos_512_v4
+46/243170/campos_512_v4
+46/243480/campos_512_v4
+46/243511/campos_512_v4
+46/243672/campos_512_v4
+46/243726/campos_512_v4
+46/243740/campos_512_v4
+46/243742/campos_512_v4
+46/243778/campos_512_v4
+46/243808/campos_512_v4
+46/244083/campos_512_v4
+46/244150/campos_512_v4
+46/244152/campos_512_v4
+46/244188/campos_512_v4
+46/244347/campos_512_v4
+46/244475/campos_512_v4
+46/244609/campos_512_v4
+46/244610/campos_512_v4
+46/244632/campos_512_v4
+46/244645/campos_512_v4
+46/244682/campos_512_v4
+46/244724/campos_512_v4
+46/244739/campos_512_v4
+46/244765/campos_512_v4
+46/244888/campos_512_v4
+47/245005/campos_512_v4
+47/245015/campos_512_v4
+47/245026/campos_512_v4
+47/245094/campos_512_v4
+47/245112/campos_512_v4
+47/245180/campos_512_v4
+47/245242/campos_512_v4
+47/245294/campos_512_v4
+47/245295/campos_512_v4
+47/245424/campos_512_v4
+47/245520/campos_512_v4
+47/245571/campos_512_v4
+47/245581/campos_512_v4
+47/245621/campos_512_v4
+47/245663/campos_512_v4
+47/245847/campos_512_v4
+47/245860/campos_512_v4
+47/245887/campos_512_v4
+47/245920/campos_512_v4
+47/245944/campos_512_v4
+47/246002/campos_512_v4
+47/246066/campos_512_v4
+47/246073/campos_512_v4
+47/246174/campos_512_v4
+47/246254/campos_512_v4
+47/246279/campos_512_v4
+47/246307/campos_512_v4
+47/246315/campos_512_v4
+47/246366/campos_512_v4
+47/246368/campos_512_v4
+47/246383/campos_512_v4
+47/246511/campos_512_v4
+47/246523/campos_512_v4
+47/246731/campos_512_v4
+47/246735/campos_512_v4
+47/246800/campos_512_v4
+47/246832/campos_512_v4
+47/246840/campos_512_v4
+47/246850/campos_512_v4
+47/246922/campos_512_v4
+47/246950/campos_512_v4
+47/247293/campos_512_v4
+47/247315/campos_512_v4
+47/247334/campos_512_v4
+47/247404/campos_512_v4
+47/247512/campos_512_v4
+47/247536/campos_512_v4
+47/247544/campos_512_v4
+47/247600/campos_512_v4
+47/247681/campos_512_v4
+47/247739/campos_512_v4
+47/247749/campos_512_v4
+47/247865/campos_512_v4
+47/247897/campos_512_v4
+47/247930/campos_512_v4
+47/247981/campos_512_v4
+47/248052/campos_512_v4
+47/248104/campos_512_v4
+47/248122/campos_512_v4
+47/248138/campos_512_v4
+47/248212/campos_512_v4
+47/248475/campos_512_v4
+47/248491/campos_512_v4
+47/248560/campos_512_v4
+47/248605/campos_512_v4
+47/248657/campos_512_v4
+47/248681/campos_512_v4
+47/248755/campos_512_v4
+47/248927/campos_512_v4
+47/248948/campos_512_v4
+47/248957/campos_512_v4
+47/249050/campos_512_v4
+47/249164/campos_512_v4
+47/249221/campos_512_v4
+47/249453/campos_512_v4
+47/249529/campos_512_v4
+47/249532/campos_512_v4
+47/249560/campos_512_v4
+47/249683/campos_512_v4
+47/249704/campos_512_v4
+47/249761/campos_512_v4
+47/249765/campos_512_v4
+47/249833/campos_512_v4
+47/249991/campos_512_v4
+48/250079/campos_512_v4
+48/250116/campos_512_v4
+48/250122/campos_512_v4
+48/250245/campos_512_v4
+48/250322/campos_512_v4
+48/250376/campos_512_v4
+48/250470/campos_512_v4
+48/250527/campos_512_v4
+48/250559/campos_512_v4
+48/250712/campos_512_v4
+48/250754/campos_512_v4
+48/250767/campos_512_v4
+48/250908/campos_512_v4
+48/250945/campos_512_v4
+48/250987/campos_512_v4
+48/251126/campos_512_v4
+48/251211/campos_512_v4
+48/251242/campos_512_v4
+48/251474/campos_512_v4
+48/251486/campos_512_v4
+48/251614/campos_512_v4
+48/251649/campos_512_v4
+48/251668/campos_512_v4
+48/251771/campos_512_v4
+48/251785/campos_512_v4
+48/251868/campos_512_v4
+48/251930/campos_512_v4
+48/251972/campos_512_v4
+48/252015/campos_512_v4
+48/252103/campos_512_v4
+48/252195/campos_512_v4
+48/252291/campos_512_v4
+48/252309/campos_512_v4
+48/252363/campos_512_v4
+48/252375/campos_512_v4
+48/252520/campos_512_v4
+48/252583/campos_512_v4
+48/252684/campos_512_v4
+48/253013/campos_512_v4
+48/253135/campos_512_v4
+48/253145/campos_512_v4
+48/253154/campos_512_v4
+48/253186/campos_512_v4
+48/253220/campos_512_v4
+48/253281/campos_512_v4
+48/253316/campos_512_v4
+48/253326/campos_512_v4
+48/253347/campos_512_v4
+48/253375/campos_512_v4
+48/253430/campos_512_v4
+48/253596/campos_512_v4
+48/253673/campos_512_v4
+48/253750/campos_512_v4
+48/253793/campos_512_v4
+48/253797/campos_512_v4
+48/253866/campos_512_v4
+48/253949/campos_512_v4
+48/254047/campos_512_v4
+48/254053/campos_512_v4
+48/254078/campos_512_v4
+48/254147/campos_512_v4
+48/254267/campos_512_v4
+48/254287/campos_512_v4
+48/254324/campos_512_v4
+48/254430/campos_512_v4
+48/254449/campos_512_v4
+48/254574/campos_512_v4
+48/254580/campos_512_v4
+48/254681/campos_512_v4
+48/254749/campos_512_v4
+48/254784/campos_512_v4
+48/254819/campos_512_v4
+48/254846/campos_512_v4
+48/254916/campos_512_v4
+48/254960/campos_512_v4
+49/255099/campos_512_v4
+49/255118/campos_512_v4
+49/255243/campos_512_v4
+49/255394/campos_512_v4
+49/255443/campos_512_v4
+49/255464/campos_512_v4
+49/255592/campos_512_v4
+49/255679/campos_512_v4
+49/255755/campos_512_v4
+49/255774/campos_512_v4
+49/255796/campos_512_v4
+49/255861/campos_512_v4
+49/255875/campos_512_v4
+49/255900/campos_512_v4
+49/255947/campos_512_v4
+49/256021/campos_512_v4
+49/256033/campos_512_v4
+49/256150/campos_512_v4
+49/256201/campos_512_v4
+49/256212/campos_512_v4
+49/256240/campos_512_v4
+49/256250/campos_512_v4
+49/256269/campos_512_v4
+49/256299/campos_512_v4
+49/256457/campos_512_v4
+49/256463/campos_512_v4
+49/256502/campos_512_v4
+49/256542/campos_512_v4
+49/256584/campos_512_v4
+49/256762/campos_512_v4
+49/256803/campos_512_v4
+49/256992/campos_512_v4
+49/257015/campos_512_v4
+49/257063/campos_512_v4
+49/257076/campos_512_v4
+49/257335/campos_512_v4
+49/257416/campos_512_v4
+49/257528/campos_512_v4
+49/257732/campos_512_v4
+49/257779/campos_512_v4
+49/257826/campos_512_v4
+49/257902/campos_512_v4
+49/257978/campos_512_v4
+49/258027/campos_512_v4
+49/258093/campos_512_v4
+49/258098/campos_512_v4
+49/258213/campos_512_v4
+49/258216/campos_512_v4
+49/258233/campos_512_v4
+49/258403/campos_512_v4
+49/258426/campos_512_v4
+49/258486/campos_512_v4
+49/258570/campos_512_v4
+49/258585/campos_512_v4
+49/258610/campos_512_v4
+49/258611/campos_512_v4
+49/258634/campos_512_v4
+49/258906/campos_512_v4
+49/258942/campos_512_v4
+49/258973/campos_512_v4
+49/259089/campos_512_v4
+49/259112/campos_512_v4
+49/259127/campos_512_v4
+49/259128/campos_512_v4
+49/259172/campos_512_v4
+49/259189/campos_512_v4
+49/259254/campos_512_v4
+49/259265/campos_512_v4
+49/259269/campos_512_v4
+49/259406/campos_512_v4
+49/259557/campos_512_v4
+49/259578/campos_512_v4
+49/259585/campos_512_v4
+49/259655/campos_512_v4
+49/259665/campos_512_v4
+49/259671/campos_512_v4
+49/259687/campos_512_v4
+49/259747/campos_512_v4
+49/259783/campos_512_v4
+49/259784/campos_512_v4
+49/259873/campos_512_v4
+49/259884/campos_512_v4
+49/259891/campos_512_v4
+5/35071/campos_512_v4
+5/35105/campos_512_v4
+5/35107/campos_512_v4
+5/35123/campos_512_v4
+5/35134/campos_512_v4
+5/35152/campos_512_v4
+5/35154/campos_512_v4
+5/35186/campos_512_v4
+5/35312/campos_512_v4
+5/35314/campos_512_v4
+5/35527/campos_512_v4
+5/35594/campos_512_v4
+5/35615/campos_512_v4
+5/35835/campos_512_v4
+5/35844/campos_512_v4
+5/36012/campos_512_v4
+5/36035/campos_512_v4
+5/36078/campos_512_v4
+5/36100/campos_512_v4
+5/36209/campos_512_v4
+5/36416/campos_512_v4
+5/36446/campos_512_v4
+5/36471/campos_512_v4
+5/36494/campos_512_v4
+5/36500/campos_512_v4
+5/36572/campos_512_v4
+5/36599/campos_512_v4
+5/36643/campos_512_v4
+5/36668/campos_512_v4
+5/36817/campos_512_v4
+5/36838/campos_512_v4
+5/36888/campos_512_v4
+5/36966/campos_512_v4
+5/36991/campos_512_v4
+5/36998/campos_512_v4
+5/37067/campos_512_v4
+5/37165/campos_512_v4
+5/37195/campos_512_v4
+5/37219/campos_512_v4
+5/37270/campos_512_v4
+5/37411/campos_512_v4
+5/37426/campos_512_v4
+5/37456/campos_512_v4
+5/37526/campos_512_v4
+5/37561/campos_512_v4
+5/37716/campos_512_v4
+5/37796/campos_512_v4
+5/37853/campos_512_v4
+5/37897/campos_512_v4
+5/37908/campos_512_v4
+5/37940/campos_512_v4
+5/38001/campos_512_v4
+5/38060/campos_512_v4
+5/38125/campos_512_v4
+5/38270/campos_512_v4
+5/38300/campos_512_v4
+5/38377/campos_512_v4
+5/38442/campos_512_v4
+5/38590/campos_512_v4
+5/38607/campos_512_v4
+5/38646/campos_512_v4
+5/38735/campos_512_v4
+5/38964/campos_512_v4
+5/38982/campos_512_v4
+5/39158/campos_512_v4
+5/39204/campos_512_v4
+5/39260/campos_512_v4
+5/39338/campos_512_v4
+5/39355/campos_512_v4
+5/39485/campos_512_v4
+5/39501/campos_512_v4
+5/39570/campos_512_v4
+5/39603/campos_512_v4
+5/39606/campos_512_v4
+5/39628/campos_512_v4
+5/39752/campos_512_v4
+5/39775/campos_512_v4
+5/39854/campos_512_v4
+5/39893/campos_512_v4
+50/260008/campos_512_v4
+50/260024/campos_512_v4
+50/260135/campos_512_v4
+50/260154/campos_512_v4
+50/260229/campos_512_v4
+50/260367/campos_512_v4
+50/260397/campos_512_v4
+50/260451/campos_512_v4
+50/260513/campos_512_v4
+50/260588/campos_512_v4
+50/260604/campos_512_v4
+50/260622/campos_512_v4
+50/260653/campos_512_v4
+50/260798/campos_512_v4
+50/260820/campos_512_v4
+50/260826/campos_512_v4
+50/260859/campos_512_v4
+50/260881/campos_512_v4
+50/261033/campos_512_v4
+50/261074/campos_512_v4
+50/261103/campos_512_v4
+50/261228/campos_512_v4
+50/261299/campos_512_v4
+50/261350/campos_512_v4
+50/261434/campos_512_v4
+50/261575/campos_512_v4
+50/261576/campos_512_v4
+50/261706/campos_512_v4
+50/261731/campos_512_v4
+50/261786/campos_512_v4
+50/261839/campos_512_v4
+50/261958/campos_512_v4
+50/261980/campos_512_v4
+50/261989/campos_512_v4
+50/262002/campos_512_v4
+50/262104/campos_512_v4
+50/262169/campos_512_v4
+50/262186/campos_512_v4
+50/262254/campos_512_v4
+50/262289/campos_512_v4
+50/262395/campos_512_v4
+50/262489/campos_512_v4
+50/262607/campos_512_v4
+50/262630/campos_512_v4
+50/262679/campos_512_v4
+50/262766/campos_512_v4
+50/262824/campos_512_v4
+50/262869/campos_512_v4
+50/262939/campos_512_v4
+50/262975/campos_512_v4
+50/263045/campos_512_v4
+50/263074/campos_512_v4
+50/263083/campos_512_v4
+50/263091/campos_512_v4
+50/263218/campos_512_v4
+50/263322/campos_512_v4
+50/263377/campos_512_v4
+50/263434/campos_512_v4
+50/263689/campos_512_v4
+50/263741/campos_512_v4
+50/263783/campos_512_v4
+50/263789/campos_512_v4
+50/263943/campos_512_v4
+50/264013/campos_512_v4
+50/264078/campos_512_v4
+50/264132/campos_512_v4
+50/264309/campos_512_v4
+50/264427/campos_512_v4
+50/264455/campos_512_v4
+50/264462/campos_512_v4
+50/264487/campos_512_v4
+50/264544/campos_512_v4
+50/264592/campos_512_v4
+50/264685/campos_512_v4
+50/264709/campos_512_v4
+50/264719/campos_512_v4
+50/264723/campos_512_v4
+50/264865/campos_512_v4
+50/264875/campos_512_v4
+50/264915/campos_512_v4
+50/264926/campos_512_v4
+50/264942/campos_512_v4
+51/265010/campos_512_v4
+51/265042/campos_512_v4
+51/265096/campos_512_v4
+51/265112/campos_512_v4
+51/265143/campos_512_v4
+51/265345/campos_512_v4
+51/265371/campos_512_v4
+51/265433/campos_512_v4
+51/265448/campos_512_v4
+51/265464/campos_512_v4
+51/265470/campos_512_v4
+51/265523/campos_512_v4
+51/265560/campos_512_v4
+51/265592/campos_512_v4
+51/265617/campos_512_v4
+51/265760/campos_512_v4
+51/265795/campos_512_v4
+51/265847/campos_512_v4
+51/265903/campos_512_v4
+51/265908/campos_512_v4
+51/266091/campos_512_v4
+51/266168/campos_512_v4
+51/266267/campos_512_v4
+51/266374/campos_512_v4
+51/266400/campos_512_v4
+51/266419/campos_512_v4
+51/266472/campos_512_v4
+51/266490/campos_512_v4
+51/266502/campos_512_v4
+51/266529/campos_512_v4
+51/266598/campos_512_v4
+51/266617/campos_512_v4
+51/266754/campos_512_v4
+51/266803/campos_512_v4
+51/266836/campos_512_v4
+51/266845/campos_512_v4
+51/266872/campos_512_v4
+51/266898/campos_512_v4
+51/266961/campos_512_v4
+51/267051/campos_512_v4
+51/267065/campos_512_v4
+51/267206/campos_512_v4
+51/267212/campos_512_v4
+51/267314/campos_512_v4
+51/267363/campos_512_v4
+51/267399/campos_512_v4
+51/267528/campos_512_v4
+51/267653/campos_512_v4
+51/267723/campos_512_v4
+51/267746/campos_512_v4
+51/267841/campos_512_v4
+51/267899/campos_512_v4
+51/267925/campos_512_v4
+51/267957/campos_512_v4
+51/267998/campos_512_v4
+51/268029/campos_512_v4
+51/268034/campos_512_v4
+51/268043/campos_512_v4
+51/268046/campos_512_v4
+51/268067/campos_512_v4
+51/268141/campos_512_v4
+51/268189/campos_512_v4
+51/268213/campos_512_v4
+51/268231/campos_512_v4
+51/268253/campos_512_v4
+51/268356/campos_512_v4
+51/268382/campos_512_v4
+51/268441/campos_512_v4
+51/268569/campos_512_v4
+51/268751/campos_512_v4
+51/268839/campos_512_v4
+51/268854/campos_512_v4
+51/268897/campos_512_v4
+51/268940/campos_512_v4
+51/269027/campos_512_v4
+51/269096/campos_512_v4
+51/269170/campos_512_v4
+51/269258/campos_512_v4
+51/269321/campos_512_v4
+51/269430/campos_512_v4
+51/269503/campos_512_v4
+51/269633/campos_512_v4
+51/269637/campos_512_v4
+51/269672/campos_512_v4
+51/269766/campos_512_v4
+51/269805/campos_512_v4
+51/269810/campos_512_v4
+51/269856/campos_512_v4
+51/269924/campos_512_v4
+52/270041/campos_512_v4
+52/270046/campos_512_v4
+52/270077/campos_512_v4
+52/270152/campos_512_v4
+52/270180/campos_512_v4
+52/270187/campos_512_v4
+52/270197/campos_512_v4
+52/270237/campos_512_v4
+52/270309/campos_512_v4
+52/270377/campos_512_v4
+52/270385/campos_512_v4
+52/270439/campos_512_v4
+52/270443/campos_512_v4
+52/270460/campos_512_v4
+52/270568/campos_512_v4
+52/270601/campos_512_v4
+52/270612/campos_512_v4
+52/270615/campos_512_v4
+52/270625/campos_512_v4
+52/270708/campos_512_v4
+52/270762/campos_512_v4
+52/270793/campos_512_v4
+52/270825/campos_512_v4
+52/270953/campos_512_v4
+52/271003/campos_512_v4
+52/271154/campos_512_v4
+52/271204/campos_512_v4
+52/271256/campos_512_v4
+52/271283/campos_512_v4
+52/271312/campos_512_v4
+52/271356/campos_512_v4
+52/271369/campos_512_v4
+52/271401/campos_512_v4
+52/271402/campos_512_v4
+52/271482/campos_512_v4
+52/271572/campos_512_v4
+52/271613/campos_512_v4
+52/271712/campos_512_v4
+52/271912/campos_512_v4
+52/271923/campos_512_v4
+52/271961/campos_512_v4
+52/272027/campos_512_v4
+52/272039/campos_512_v4
+52/272093/campos_512_v4
+52/272187/campos_512_v4
+52/272287/campos_512_v4
+52/272487/campos_512_v4
+52/272490/campos_512_v4
+52/272536/campos_512_v4
+52/272818/campos_512_v4
+52/272900/campos_512_v4
+52/273011/campos_512_v4
+52/273048/campos_512_v4
+52/273060/campos_512_v4
+52/273074/campos_512_v4
+52/273128/campos_512_v4
+52/273152/campos_512_v4
+52/273165/campos_512_v4
+52/273372/campos_512_v4
+52/273375/campos_512_v4
+52/273488/campos_512_v4
+52/273586/campos_512_v4
+52/273618/campos_512_v4
+52/273717/campos_512_v4
+52/273822/campos_512_v4
+52/273849/campos_512_v4
+52/273915/campos_512_v4
+52/273959/campos_512_v4
+52/273983/campos_512_v4
+52/274049/campos_512_v4
+52/274099/campos_512_v4
+52/274192/campos_512_v4
+52/274304/campos_512_v4
+52/274337/campos_512_v4
+52/274600/campos_512_v4
+52/274612/campos_512_v4
+52/274695/campos_512_v4
+52/274747/campos_512_v4
+52/274833/campos_512_v4
+52/274846/campos_512_v4
+52/274884/campos_512_v4
+52/274890/campos_512_v4
+52/274920/campos_512_v4
+52/274932/campos_512_v4
+52/274986/campos_512_v4
+53/275019/campos_512_v4
+53/275028/campos_512_v4
+53/275077/campos_512_v4
+53/275104/campos_512_v4
+53/275108/campos_512_v4
+53/275151/campos_512_v4
+53/275172/campos_512_v4
+53/275317/campos_512_v4
+53/275459/campos_512_v4
+53/275466/campos_512_v4
+53/275566/campos_512_v4
+53/275601/campos_512_v4
+53/275663/campos_512_v4
+53/275778/campos_512_v4
+53/275795/campos_512_v4
+53/275835/campos_512_v4
+53/275900/campos_512_v4
+53/275904/campos_512_v4
+53/276067/campos_512_v4
+53/276142/campos_512_v4
+53/276163/campos_512_v4
+53/276236/campos_512_v4
+53/276327/campos_512_v4
+53/276459/campos_512_v4
+53/276462/campos_512_v4
+53/276474/campos_512_v4
+53/276583/campos_512_v4
+53/276591/campos_512_v4
+53/276621/campos_512_v4
+53/276628/campos_512_v4
+53/276664/campos_512_v4
+53/276696/campos_512_v4
+53/276743/campos_512_v4
+53/276813/campos_512_v4
+53/276836/campos_512_v4
+53/276906/campos_512_v4
+53/276932/campos_512_v4
+53/276968/campos_512_v4
+53/277027/campos_512_v4
+53/277047/campos_512_v4
+53/277092/campos_512_v4
+53/277173/campos_512_v4
+53/277176/campos_512_v4
+53/277222/campos_512_v4
+53/277241/campos_512_v4
+53/277433/campos_512_v4
+53/277461/campos_512_v4
+53/277525/campos_512_v4
+53/277638/campos_512_v4
+53/277643/campos_512_v4
+53/277816/campos_512_v4
+53/277843/campos_512_v4
+53/277894/campos_512_v4
+53/277934/campos_512_v4
+53/277959/campos_512_v4
+53/277962/campos_512_v4
+53/277974/campos_512_v4
+53/278005/campos_512_v4
+53/278055/campos_512_v4
+53/278072/campos_512_v4
+53/278079/campos_512_v4
+53/278110/campos_512_v4
+53/278129/campos_512_v4
+53/278260/campos_512_v4
+53/278263/campos_512_v4
+53/278404/campos_512_v4
+53/278412/campos_512_v4
+53/278450/campos_512_v4
+53/278621/campos_512_v4
+53/278636/campos_512_v4
+53/278694/campos_512_v4
+53/278804/campos_512_v4
+53/278843/campos_512_v4
+53/278956/campos_512_v4
+53/279045/campos_512_v4
+53/279046/campos_512_v4
+53/279089/campos_512_v4
+53/279127/campos_512_v4
+53/279134/campos_512_v4
+53/279313/campos_512_v4
+53/279371/campos_512_v4
+53/279460/campos_512_v4
+53/279463/campos_512_v4
+53/279549/campos_512_v4
+53/279558/campos_512_v4
+53/279564/campos_512_v4
+53/279686/campos_512_v4
+53/279782/campos_512_v4
+53/279842/campos_512_v4
+53/279951/campos_512_v4
+53/279973/campos_512_v4
+53/279985/campos_512_v4
+54/280086/campos_512_v4
+54/280100/campos_512_v4
+54/280120/campos_512_v4
+54/280267/campos_512_v4
+54/280309/campos_512_v4
+54/280368/campos_512_v4
+54/280461/campos_512_v4
+54/280653/campos_512_v4
+54/280921/campos_512_v4
+54/281152/campos_512_v4
+54/281216/campos_512_v4
+54/281350/campos_512_v4
+54/281378/campos_512_v4
+54/281430/campos_512_v4
+54/281527/campos_512_v4
+54/281560/campos_512_v4
+54/281643/campos_512_v4
+54/281741/campos_512_v4
+54/281781/campos_512_v4
+54/281794/campos_512_v4
+54/281844/campos_512_v4
+54/282013/campos_512_v4
+54/282050/campos_512_v4
+54/282062/campos_512_v4
+54/282070/campos_512_v4
+54/282083/campos_512_v4
+54/282153/campos_512_v4
+54/282389/campos_512_v4
+54/282454/campos_512_v4
+54/282669/campos_512_v4
+54/282683/campos_512_v4
+54/282724/campos_512_v4
+54/282813/campos_512_v4
+54/282868/campos_512_v4
+54/282893/campos_512_v4
+54/282916/campos_512_v4
+54/282998/campos_512_v4
+54/283028/campos_512_v4
+54/283039/campos_512_v4
+54/283046/campos_512_v4
+54/283097/campos_512_v4
+54/283194/campos_512_v4
+54/283242/campos_512_v4
+54/283296/campos_512_v4
+54/283301/campos_512_v4
+54/283345/campos_512_v4
+54/283349/campos_512_v4
+54/283535/campos_512_v4
+54/283571/campos_512_v4
+54/283608/campos_512_v4
+54/283719/campos_512_v4
+54/283720/campos_512_v4
+54/283748/campos_512_v4
+54/283775/campos_512_v4
+54/283869/campos_512_v4
+54/283871/campos_512_v4
+54/283878/campos_512_v4
+54/283895/campos_512_v4
+54/283953/campos_512_v4
+54/284200/campos_512_v4
+54/284224/campos_512_v4
+54/284291/campos_512_v4
+54/284349/campos_512_v4
+54/284395/campos_512_v4
+54/284456/campos_512_v4
+54/284506/campos_512_v4
+54/284605/campos_512_v4
+54/284606/campos_512_v4
+54/284651/campos_512_v4
+54/284710/campos_512_v4
+54/284730/campos_512_v4
+54/284770/campos_512_v4
+54/284891/campos_512_v4
+54/284969/campos_512_v4
+55/285073/campos_512_v4
+55/285102/campos_512_v4
+55/285109/campos_512_v4
+55/285141/campos_512_v4
+55/285305/campos_512_v4
+55/285518/campos_512_v4
+55/285609/campos_512_v4
+55/285667/campos_512_v4
+55/285684/campos_512_v4
+55/285802/campos_512_v4
+55/285872/campos_512_v4
+55/285882/campos_512_v4
+55/285910/campos_512_v4
+55/285936/campos_512_v4
+55/285938/campos_512_v4
+55/286042/campos_512_v4
+55/286044/campos_512_v4
+55/286220/campos_512_v4
+55/286222/campos_512_v4
+55/286351/campos_512_v4
+55/286404/campos_512_v4
+55/286443/campos_512_v4
+55/286470/campos_512_v4
+55/286541/campos_512_v4
+55/286607/campos_512_v4
+55/286760/campos_512_v4
+55/286876/campos_512_v4
+55/286923/campos_512_v4
+55/286934/campos_512_v4
+55/286976/campos_512_v4
+55/287052/campos_512_v4
+55/287080/campos_512_v4
+55/287127/campos_512_v4
+55/287225/campos_512_v4
+55/287340/campos_512_v4
+55/287344/campos_512_v4
+55/287382/campos_512_v4
+55/287427/campos_512_v4
+55/287459/campos_512_v4
+55/287571/campos_512_v4
+55/287592/campos_512_v4
+55/287644/campos_512_v4
+55/287659/campos_512_v4
+55/287706/campos_512_v4
+55/287712/campos_512_v4
+55/287759/campos_512_v4
+55/287805/campos_512_v4
+55/287829/campos_512_v4
+55/287859/campos_512_v4
+55/287895/campos_512_v4
+55/287900/campos_512_v4
+55/287915/campos_512_v4
+55/287941/campos_512_v4
+55/287999/campos_512_v4
+55/288041/campos_512_v4
+55/288103/campos_512_v4
+55/288303/campos_512_v4
+55/288316/campos_512_v4
+55/288359/campos_512_v4
+55/288416/campos_512_v4
+55/288457/campos_512_v4
+55/288619/campos_512_v4
+55/288689/campos_512_v4
+55/288748/campos_512_v4
+55/288784/campos_512_v4
+55/288802/campos_512_v4
+55/288823/campos_512_v4
+55/288851/campos_512_v4
+55/288927/campos_512_v4
+55/288944/campos_512_v4
+55/288950/campos_512_v4
+55/288971/campos_512_v4
+55/289059/campos_512_v4
+55/289161/campos_512_v4
+55/289169/campos_512_v4
+55/289218/campos_512_v4
+55/289257/campos_512_v4
+55/289259/campos_512_v4
+55/289432/campos_512_v4
+55/289455/campos_512_v4
+55/289456/campos_512_v4
+55/289465/campos_512_v4
+55/289634/campos_512_v4
+55/289653/campos_512_v4
+55/289831/campos_512_v4
+55/289945/campos_512_v4
+55/289979/campos_512_v4
+56/290026/campos_512_v4
+56/290055/campos_512_v4
+56/290071/campos_512_v4
+56/290110/campos_512_v4
+56/290119/campos_512_v4
+56/290128/campos_512_v4
+56/290203/campos_512_v4
+56/290229/campos_512_v4
+56/290286/campos_512_v4
+56/290292/campos_512_v4
+56/290326/campos_512_v4
+56/290421/campos_512_v4
+56/290455/campos_512_v4
+56/290505/campos_512_v4
+56/290621/campos_512_v4
+56/290689/campos_512_v4
+56/290694/campos_512_v4
+56/290704/campos_512_v4
+56/290722/campos_512_v4
+56/290733/campos_512_v4
+56/290758/campos_512_v4
+56/290799/campos_512_v4
+56/290844/campos_512_v4
+56/290922/campos_512_v4
+56/291158/campos_512_v4
+56/291161/campos_512_v4
+56/291401/campos_512_v4
+56/291455/campos_512_v4
+56/291836/campos_512_v4
+56/291896/campos_512_v4
+56/291903/campos_512_v4
+56/291965/campos_512_v4
+56/292006/campos_512_v4
+56/292051/campos_512_v4
+56/292168/campos_512_v4
+56/292227/campos_512_v4
+56/292379/campos_512_v4
+56/292472/campos_512_v4
+56/292553/campos_512_v4
+56/292586/campos_512_v4
+56/292637/campos_512_v4
+56/292641/campos_512_v4
+56/292655/campos_512_v4
+56/292742/campos_512_v4
+56/292754/campos_512_v4
+56/292878/campos_512_v4
+56/292944/campos_512_v4
+56/293034/campos_512_v4
+56/293173/campos_512_v4
+56/293176/campos_512_v4
+56/293418/campos_512_v4
+56/293481/campos_512_v4
+56/293541/campos_512_v4
+56/293642/campos_512_v4
+56/293668/campos_512_v4
+56/293728/campos_512_v4
+56/293823/campos_512_v4
+56/293863/campos_512_v4
+56/293945/campos_512_v4
+56/294026/campos_512_v4
+56/294092/campos_512_v4
+56/294110/campos_512_v4
+56/294131/campos_512_v4
+56/294135/campos_512_v4
+56/294187/campos_512_v4
+56/294196/campos_512_v4
+56/294259/campos_512_v4
+56/294359/campos_512_v4
+56/294534/campos_512_v4
+56/294558/campos_512_v4
+56/294933/campos_512_v4
+56/294950/campos_512_v4
+56/294952/campos_512_v4
+56/294990/campos_512_v4
+57/295037/campos_512_v4
+57/295053/campos_512_v4
+57/295147/campos_512_v4
+57/295214/campos_512_v4
+57/295312/campos_512_v4
+57/295345/campos_512_v4
+57/295576/campos_512_v4
+57/295622/campos_512_v4
+57/295694/campos_512_v4
+57/295794/campos_512_v4
+57/295795/campos_512_v4
+57/295890/campos_512_v4
+57/295917/campos_512_v4
+57/296001/campos_512_v4
+57/296062/campos_512_v4
+57/296147/campos_512_v4
+57/296424/campos_512_v4
+57/296497/campos_512_v4
+57/296525/campos_512_v4
+57/296546/campos_512_v4
+57/296568/campos_512_v4
+57/296706/campos_512_v4
+57/296789/campos_512_v4
+57/296973/campos_512_v4
+57/297080/campos_512_v4
+57/297116/campos_512_v4
+57/297147/campos_512_v4
+57/297220/campos_512_v4
+57/297271/campos_512_v4
+57/297296/campos_512_v4
+57/297341/campos_512_v4
+57/297346/campos_512_v4
+57/297357/campos_512_v4
+57/297406/campos_512_v4
+57/297455/campos_512_v4
+57/297473/campos_512_v4
+57/297487/campos_512_v4
+57/297621/campos_512_v4
+57/297642/campos_512_v4
+57/297720/campos_512_v4
+57/297753/campos_512_v4
+57/297801/campos_512_v4
+57/297968/campos_512_v4
+57/298015/campos_512_v4
+57/298215/campos_512_v4
+57/298226/campos_512_v4
+57/298274/campos_512_v4
+57/298324/campos_512_v4
+57/298341/campos_512_v4
+57/298394/campos_512_v4
+57/298473/campos_512_v4
+57/298513/campos_512_v4
+57/298540/campos_512_v4
+57/298545/campos_512_v4
+57/298603/campos_512_v4
+57/298605/campos_512_v4
+57/298722/campos_512_v4
+57/298766/campos_512_v4
+57/298811/campos_512_v4
+57/298885/campos_512_v4
+57/298970/campos_512_v4
+57/299038/campos_512_v4
+57/299080/campos_512_v4
+57/299087/campos_512_v4
+57/299092/campos_512_v4
+57/299103/campos_512_v4
+57/299105/campos_512_v4
+57/299230/campos_512_v4
+57/299280/campos_512_v4
+57/299329/campos_512_v4
+57/299362/campos_512_v4
+57/299603/campos_512_v4
+57/299617/campos_512_v4
+57/299688/campos_512_v4
+57/299720/campos_512_v4
+57/299801/campos_512_v4
+57/299878/campos_512_v4
+58/300209/campos_512_v4
+58/300401/campos_512_v4
+58/300433/campos_512_v4
+58/300471/campos_512_v4
+58/300527/campos_512_v4
+58/300536/campos_512_v4
+58/300552/campos_512_v4
+58/300576/campos_512_v4
+58/300645/campos_512_v4
+58/300653/campos_512_v4
+58/300669/campos_512_v4
+58/300715/campos_512_v4
+58/300736/campos_512_v4
+58/300742/campos_512_v4
+58/300786/campos_512_v4
+58/300865/campos_512_v4
+58/300912/campos_512_v4
+58/300916/campos_512_v4
+58/301055/campos_512_v4
+58/301113/campos_512_v4
+58/301155/campos_512_v4
+58/301168/campos_512_v4
+58/301218/campos_512_v4
+58/301316/campos_512_v4
+58/301428/campos_512_v4
+58/301502/campos_512_v4
+58/301597/campos_512_v4
+58/301608/campos_512_v4
+58/301647/campos_512_v4
+58/301682/campos_512_v4
+58/301684/campos_512_v4
+58/301739/campos_512_v4
+58/301781/campos_512_v4
+58/301856/campos_512_v4
+58/301857/campos_512_v4
+58/302208/campos_512_v4
+58/302284/campos_512_v4
+58/302463/campos_512_v4
+58/302472/campos_512_v4
+58/302642/campos_512_v4
+58/302759/campos_512_v4
+58/302769/campos_512_v4
+58/302867/campos_512_v4
+58/302908/campos_512_v4
+58/302910/campos_512_v4
+58/302985/campos_512_v4
+58/303039/campos_512_v4
+58/303067/campos_512_v4
+58/303078/campos_512_v4
+58/303188/campos_512_v4
+58/303205/campos_512_v4
+58/303295/campos_512_v4
+58/303298/campos_512_v4
+58/303299/campos_512_v4
+58/303378/campos_512_v4
+58/303441/campos_512_v4
+58/303524/campos_512_v4
+58/303599/campos_512_v4
+58/303629/campos_512_v4
+58/303679/campos_512_v4
+58/303743/campos_512_v4
+58/303819/campos_512_v4
+58/303887/campos_512_v4
+58/303938/campos_512_v4
+58/304084/campos_512_v4
+58/304117/campos_512_v4
+58/304124/campos_512_v4
+58/304334/campos_512_v4
+58/304348/campos_512_v4
+58/304382/campos_512_v4
+58/304426/campos_512_v4
+58/304496/campos_512_v4
+58/304498/campos_512_v4
+58/304545/campos_512_v4
+58/304563/campos_512_v4
+58/304578/campos_512_v4
+58/304630/campos_512_v4
+58/304684/campos_512_v4
+58/304771/campos_512_v4
+58/304865/campos_512_v4
+58/304959/campos_512_v4
+59/305002/campos_512_v4
+59/305045/campos_512_v4
+59/305071/campos_512_v4
+59/305110/campos_512_v4
+59/305281/campos_512_v4
+59/305312/campos_512_v4
+59/305399/campos_512_v4
+59/305556/campos_512_v4
+59/305580/campos_512_v4
+59/305593/campos_512_v4
+59/305604/campos_512_v4
+59/305620/campos_512_v4
+59/305701/campos_512_v4
+59/305778/campos_512_v4
+59/305976/campos_512_v4
+59/306004/campos_512_v4
+59/306059/campos_512_v4
+59/306170/campos_512_v4
+59/306175/campos_512_v4
+59/306367/campos_512_v4
+59/306555/campos_512_v4
+59/306585/campos_512_v4
+59/306874/campos_512_v4
+59/306930/campos_512_v4
+59/306979/campos_512_v4
+59/307150/campos_512_v4
+59/307154/campos_512_v4
+59/307200/campos_512_v4
+59/307253/campos_512_v4
+59/307277/campos_512_v4
+59/307430/campos_512_v4
+59/307527/campos_512_v4
+59/307655/campos_512_v4
+59/307688/campos_512_v4
+59/307711/campos_512_v4
+59/307760/campos_512_v4
+59/307813/campos_512_v4
+59/307946/campos_512_v4
+59/308000/campos_512_v4
+59/308038/campos_512_v4
+59/308098/campos_512_v4
+59/308108/campos_512_v4
+59/308401/campos_512_v4
+59/308500/campos_512_v4
+59/308588/campos_512_v4
+59/308777/campos_512_v4
+59/308873/campos_512_v4
+59/308920/campos_512_v4
+59/309001/campos_512_v4
+59/309014/campos_512_v4
+59/309152/campos_512_v4
+59/309195/campos_512_v4
+59/309204/campos_512_v4
+59/309233/campos_512_v4
+59/309291/campos_512_v4
+59/309319/campos_512_v4
+59/309376/campos_512_v4
+59/309549/campos_512_v4
+59/309625/campos_512_v4
+59/309695/campos_512_v4
+59/309740/campos_512_v4
+59/309781/campos_512_v4
+59/309822/campos_512_v4
+59/309830/campos_512_v4
+59/309860/campos_512_v4
+59/309875/campos_512_v4
+59/309939/campos_512_v4
+59/309967/campos_512_v4
+59/309969/campos_512_v4
+59/309994/campos_512_v4
+6/40291/campos_512_v4
+6/40327/campos_512_v4
+6/40329/campos_512_v4
+6/40370/campos_512_v4
+6/40410/campos_512_v4
+6/40556/campos_512_v4
+6/40598/campos_512_v4
+6/40652/campos_512_v4
+6/40667/campos_512_v4
+6/40804/campos_512_v4
+6/41005/campos_512_v4
+6/41027/campos_512_v4
+6/41081/campos_512_v4
+6/41150/campos_512_v4
+6/41617/campos_512_v4
+6/41784/campos_512_v4
+6/41809/campos_512_v4
+6/41872/campos_512_v4
+6/42074/campos_512_v4
+6/42113/campos_512_v4
+6/42422/campos_512_v4
+6/42437/campos_512_v4
+6/42591/campos_512_v4
+6/42612/campos_512_v4
+6/42641/campos_512_v4
+6/42775/campos_512_v4
+6/42794/campos_512_v4
+6/43043/campos_512_v4
+6/43119/campos_512_v4
+6/43275/campos_512_v4
+6/43508/campos_512_v4
+6/43593/campos_512_v4
+6/43881/campos_512_v4
+6/43898/campos_512_v4
+6/43909/campos_512_v4
+6/43914/campos_512_v4
+6/43950/campos_512_v4
+6/44001/campos_512_v4
+6/44157/campos_512_v4
+6/44270/campos_512_v4
+6/44418/campos_512_v4
+6/44490/campos_512_v4
+6/44507/campos_512_v4
+6/44549/campos_512_v4
+6/44670/campos_512_v4
+6/44796/campos_512_v4
+6/44841/campos_512_v4
+6/44947/campos_512_v4
+6/44949/campos_512_v4
+6/44974/campos_512_v4
+60/310026/campos_512_v4
+60/310038/campos_512_v4
+60/310064/campos_512_v4
+60/310164/campos_512_v4
+60/310242/campos_512_v4
+60/310355/campos_512_v4
+60/310369/campos_512_v4
+60/310370/campos_512_v4
+60/310445/campos_512_v4
+60/310513/campos_512_v4
+60/310527/campos_512_v4
+60/310563/campos_512_v4
+60/310577/campos_512_v4
+60/310649/campos_512_v4
+60/310675/campos_512_v4
+60/310980/campos_512_v4
+60/310995/campos_512_v4
+60/311172/campos_512_v4
+60/311258/campos_512_v4
+60/311289/campos_512_v4
+60/311334/campos_512_v4
+60/311486/campos_512_v4
+60/311584/campos_512_v4
+60/311651/campos_512_v4
+60/311684/campos_512_v4
+60/311734/campos_512_v4
+60/311783/campos_512_v4
+60/311899/campos_512_v4
+60/311912/campos_512_v4
+60/311988/campos_512_v4
+60/312085/campos_512_v4
+60/312093/campos_512_v4
+60/312134/campos_512_v4
+60/312182/campos_512_v4
+60/312192/campos_512_v4
+60/312322/campos_512_v4
+60/312428/campos_512_v4
+60/312435/campos_512_v4
+60/312550/campos_512_v4
+60/312563/campos_512_v4
+60/312834/campos_512_v4
+60/312881/campos_512_v4
+60/312891/campos_512_v4
+60/312945/campos_512_v4
+60/313098/campos_512_v4
+60/313157/campos_512_v4
+60/313215/campos_512_v4
+60/313320/campos_512_v4
+60/313381/campos_512_v4
+60/313472/campos_512_v4
+60/313501/campos_512_v4
+60/313514/campos_512_v4
+60/313521/campos_512_v4
+60/313534/campos_512_v4
+60/313578/campos_512_v4
+60/313615/campos_512_v4
+60/313668/campos_512_v4
+60/313826/campos_512_v4
+60/313917/campos_512_v4
+60/314081/campos_512_v4
+60/314119/campos_512_v4
+60/314259/campos_512_v4
+60/314294/campos_512_v4
+60/314525/campos_512_v4
+60/314562/campos_512_v4
+60/314660/campos_512_v4
+60/314760/campos_512_v4
+60/314785/campos_512_v4
+60/314802/campos_512_v4
+60/314902/campos_512_v4
+60/314928/campos_512_v4
+60/314991/campos_512_v4
+61/315095/campos_512_v4
+61/315135/campos_512_v4
+61/315170/campos_512_v4
+61/315229/campos_512_v4
+61/315235/campos_512_v4
+61/315305/campos_512_v4
+61/315335/campos_512_v4
+61/315373/campos_512_v4
+61/315561/campos_512_v4
+61/315639/campos_512_v4
+61/315684/campos_512_v4
+61/315722/campos_512_v4
+61/315734/campos_512_v4
+61/315800/campos_512_v4
+61/315824/campos_512_v4
+61/315906/campos_512_v4
+61/315914/campos_512_v4
+61/315956/campos_512_v4
+61/316044/campos_512_v4
+61/316118/campos_512_v4
+61/316124/campos_512_v4
+61/316140/campos_512_v4
+61/316235/campos_512_v4
+61/316337/campos_512_v4
+61/316421/campos_512_v4
+61/316467/campos_512_v4
+61/316593/campos_512_v4
+61/316599/campos_512_v4
+61/316615/campos_512_v4
+61/316623/campos_512_v4
+61/316674/campos_512_v4
+61/316675/campos_512_v4
+61/316702/campos_512_v4
+61/316803/campos_512_v4
+61/316969/campos_512_v4
+61/316978/campos_512_v4
+61/317004/campos_512_v4
+61/317195/campos_512_v4
+61/317229/campos_512_v4
+61/317237/campos_512_v4
+61/317243/campos_512_v4
+61/317274/campos_512_v4
+61/317330/campos_512_v4
+61/317376/campos_512_v4
+61/317378/campos_512_v4
+61/317415/campos_512_v4
+61/317430/campos_512_v4
+61/317533/campos_512_v4
+61/317636/campos_512_v4
+61/317676/campos_512_v4
+61/317683/campos_512_v4
+61/317691/campos_512_v4
+61/317812/campos_512_v4
+61/318154/campos_512_v4
+61/318199/campos_512_v4
+61/318227/campos_512_v4
+61/318320/campos_512_v4
+61/318390/campos_512_v4
+61/318428/campos_512_v4
+61/318556/campos_512_v4
+61/318724/campos_512_v4
+61/318821/campos_512_v4
+61/318836/campos_512_v4
+61/318847/campos_512_v4
+61/318879/campos_512_v4
+61/318940/campos_512_v4
+61/319042/campos_512_v4
+61/319230/campos_512_v4
+61/319294/campos_512_v4
+61/319342/campos_512_v4
+61/319447/campos_512_v4
+61/319503/campos_512_v4
+61/319550/campos_512_v4
+61/319626/campos_512_v4
+61/319650/campos_512_v4
+61/319654/campos_512_v4
+61/319950/campos_512_v4
+61/319971/campos_512_v4
+61/319992/campos_512_v4
+62/320009/campos_512_v4
+62/320022/campos_512_v4
+62/320109/campos_512_v4
+62/320290/campos_512_v4
+62/320413/campos_512_v4
+62/320414/campos_512_v4
+62/320611/campos_512_v4
+62/320796/campos_512_v4
+62/320844/campos_512_v4
+62/320855/campos_512_v4
+62/320955/campos_512_v4
+62/321003/campos_512_v4
+62/321074/campos_512_v4
+62/321138/campos_512_v4
+62/321163/campos_512_v4
+62/321181/campos_512_v4
+62/321253/campos_512_v4
+62/321347/campos_512_v4
+62/321354/campos_512_v4
+62/321404/campos_512_v4
+62/321411/campos_512_v4
+62/321564/campos_512_v4
+62/321631/campos_512_v4
+62/321677/campos_512_v4
+62/321712/campos_512_v4
+62/321739/campos_512_v4
+62/321799/campos_512_v4
+62/321985/campos_512_v4
+62/322007/campos_512_v4
+62/322280/campos_512_v4
+62/322291/campos_512_v4
+62/322384/campos_512_v4
+62/322433/campos_512_v4
+62/322437/campos_512_v4
+62/322639/campos_512_v4
+62/322657/campos_512_v4
+62/322746/campos_512_v4
+62/322824/campos_512_v4
+62/322828/campos_512_v4
+62/322830/campos_512_v4
+62/322877/campos_512_v4
+62/322958/campos_512_v4
+62/323085/campos_512_v4
+62/323243/campos_512_v4
+62/323258/campos_512_v4
+62/323343/campos_512_v4
+62/323488/campos_512_v4
+62/323528/campos_512_v4
+62/323667/campos_512_v4
+62/323743/campos_512_v4
+62/323769/campos_512_v4
+62/323833/campos_512_v4
+62/323843/campos_512_v4
+62/324125/campos_512_v4
+62/324252/campos_512_v4
+62/324316/campos_512_v4
+62/324521/campos_512_v4
+62/324642/campos_512_v4
+62/324773/campos_512_v4
+62/324850/campos_512_v4
+62/324882/campos_512_v4
+62/324936/campos_512_v4
+63/325127/campos_512_v4
+63/325217/campos_512_v4
+63/325344/campos_512_v4
+63/325461/campos_512_v4
+63/325528/campos_512_v4
+63/325611/campos_512_v4
+63/325631/campos_512_v4
+63/325778/campos_512_v4
+63/325959/campos_512_v4
+63/325962/campos_512_v4
+63/326019/campos_512_v4
+63/326049/campos_512_v4
+63/326274/campos_512_v4
+63/326334/campos_512_v4
+63/326339/campos_512_v4
+63/326534/campos_512_v4
+63/326621/campos_512_v4
+63/326758/campos_512_v4
+63/326790/campos_512_v4
+63/326845/campos_512_v4
+63/326893/campos_512_v4
+63/326996/campos_512_v4
+63/327028/campos_512_v4
+63/327071/campos_512_v4
+63/327112/campos_512_v4
+63/327165/campos_512_v4
+63/327284/campos_512_v4
+63/327365/campos_512_v4
+63/327529/campos_512_v4
+63/327762/campos_512_v4
+63/327824/campos_512_v4
+63/327830/campos_512_v4
+63/327897/campos_512_v4
+63/327942/campos_512_v4
+63/327947/campos_512_v4
+63/327962/campos_512_v4
+63/327980/campos_512_v4
+63/327995/campos_512_v4
+63/328040/campos_512_v4
+63/328079/campos_512_v4
+63/328109/campos_512_v4
+63/328158/campos_512_v4
+63/328367/campos_512_v4
+63/328449/campos_512_v4
+63/328487/campos_512_v4
+63/328613/campos_512_v4
+63/328664/campos_512_v4
+63/328746/campos_512_v4
+63/328763/campos_512_v4
+63/328802/campos_512_v4
+63/328813/campos_512_v4
+63/328925/campos_512_v4
+63/328945/campos_512_v4
+63/329078/campos_512_v4
+63/329113/campos_512_v4
+63/329139/campos_512_v4
+63/329162/campos_512_v4
+63/329185/campos_512_v4
+63/329206/campos_512_v4
+63/329278/campos_512_v4
+63/329460/campos_512_v4
+63/329480/campos_512_v4
+63/329536/campos_512_v4
+63/329672/campos_512_v4
+63/329701/campos_512_v4
+63/329704/campos_512_v4
+63/329934/campos_512_v4
+64/330046/campos_512_v4
+64/330176/campos_512_v4
+64/330279/campos_512_v4
+64/330744/campos_512_v4
+64/330783/campos_512_v4
+64/330851/campos_512_v4
+64/331123/campos_512_v4
+64/331136/campos_512_v4
+64/331153/campos_512_v4
+64/331232/campos_512_v4
+64/331304/campos_512_v4
+64/331332/campos_512_v4
+64/331404/campos_512_v4
+64/331534/campos_512_v4
+64/331579/campos_512_v4
+64/331900/campos_512_v4
+64/331969/campos_512_v4
+64/332007/campos_512_v4
+64/332026/campos_512_v4
+64/332028/campos_512_v4
+64/332029/campos_512_v4
+64/332137/campos_512_v4
+64/332318/campos_512_v4
+64/332385/campos_512_v4
+64/332465/campos_512_v4
+64/332510/campos_512_v4
+64/332529/campos_512_v4
+64/332627/campos_512_v4
+64/332719/campos_512_v4
+64/332741/campos_512_v4
+64/332807/campos_512_v4
+64/332868/campos_512_v4
+64/332877/campos_512_v4
+64/332891/campos_512_v4
+64/332947/campos_512_v4
+64/332954/campos_512_v4
+64/333039/campos_512_v4
+64/333057/campos_512_v4
+64/333130/campos_512_v4
+64/333169/campos_512_v4
+64/333172/campos_512_v4
+64/333179/campos_512_v4
+64/333225/campos_512_v4
+64/333294/campos_512_v4
+64/333300/campos_512_v4
+64/333455/campos_512_v4
+64/333586/campos_512_v4
+64/333611/campos_512_v4
+64/333654/campos_512_v4
+64/333685/campos_512_v4
+64/333797/campos_512_v4
+64/333942/campos_512_v4
+64/334035/campos_512_v4
+64/334053/campos_512_v4
+64/334064/campos_512_v4
+64/334076/campos_512_v4
+64/334084/campos_512_v4
+64/334118/campos_512_v4
+64/334150/campos_512_v4
+64/334152/campos_512_v4
+64/334241/campos_512_v4
+64/334243/campos_512_v4
+64/334308/campos_512_v4
+64/334399/campos_512_v4
+64/334411/campos_512_v4
+64/334470/campos_512_v4
+64/334489/campos_512_v4
+64/334532/campos_512_v4
+64/334544/campos_512_v4
+64/334583/campos_512_v4
+64/334620/campos_512_v4
+64/334645/campos_512_v4
+64/334845/campos_512_v4
+64/334909/campos_512_v4
+65/335034/campos_512_v4
+65/335078/campos_512_v4
+65/335183/campos_512_v4
+65/335283/campos_512_v4
+65/335329/campos_512_v4
+65/335383/campos_512_v4
+65/335429/campos_512_v4
+65/335473/campos_512_v4
+65/335478/campos_512_v4
+65/335579/campos_512_v4
+65/335614/campos_512_v4
+65/335698/campos_512_v4
+65/335778/campos_512_v4
+65/335964/campos_512_v4
+65/336016/campos_512_v4
+65/336201/campos_512_v4
+65/336216/campos_512_v4
+65/336346/campos_512_v4
+65/336360/campos_512_v4
+65/336366/campos_512_v4
+65/336392/campos_512_v4
+65/336399/campos_512_v4
+65/336402/campos_512_v4
+65/336615/campos_512_v4
+65/336707/campos_512_v4
+65/336748/campos_512_v4
+65/336817/campos_512_v4
+65/336899/campos_512_v4
+65/337072/campos_512_v4
+65/337188/campos_512_v4
+65/337337/campos_512_v4
+65/337447/campos_512_v4
+65/337501/campos_512_v4
+65/337551/campos_512_v4
+65/337569/campos_512_v4
+65/337577/campos_512_v4
+65/337660/campos_512_v4
+65/337858/campos_512_v4
+65/338012/campos_512_v4
+65/338036/campos_512_v4
+65/338061/campos_512_v4
+65/338069/campos_512_v4
+65/338124/campos_512_v4
+65/338179/campos_512_v4
+65/338271/campos_512_v4
+65/338436/campos_512_v4
+65/338517/campos_512_v4
+65/338611/campos_512_v4
+65/338623/campos_512_v4
+65/338720/campos_512_v4
+65/338831/campos_512_v4
+65/338964/campos_512_v4
+65/339070/campos_512_v4
+65/339094/campos_512_v4
+65/339121/campos_512_v4
+65/339172/campos_512_v4
+65/339343/campos_512_v4
+65/339576/campos_512_v4
+65/339598/campos_512_v4
+65/339641/campos_512_v4
+65/339645/campos_512_v4
+65/339678/campos_512_v4
+65/339821/campos_512_v4
+65/339826/campos_512_v4
+65/339844/campos_512_v4
+65/339973/campos_512_v4
+66/340007/campos_512_v4
+66/340114/campos_512_v4
+66/340175/campos_512_v4
+66/340214/campos_512_v4
+66/340215/campos_512_v4
+66/340430/campos_512_v4
+66/340637/campos_512_v4
+66/340645/campos_512_v4
+66/340654/campos_512_v4
+66/340676/campos_512_v4
+66/340781/campos_512_v4
+66/341020/campos_512_v4
+66/341048/campos_512_v4
+66/341153/campos_512_v4
+66/341417/campos_512_v4
+66/341541/campos_512_v4
+66/341556/campos_512_v4
+66/341609/campos_512_v4
+66/341620/campos_512_v4
+66/341668/campos_512_v4
+66/341695/campos_512_v4
+66/341758/campos_512_v4
+66/341819/campos_512_v4
+66/341875/campos_512_v4
+66/341948/campos_512_v4
+66/342025/campos_512_v4
+66/342047/campos_512_v4
+66/342083/campos_512_v4
+66/342141/campos_512_v4
+66/342190/campos_512_v4
+66/342250/campos_512_v4
+66/342268/campos_512_v4
+66/342362/campos_512_v4
+66/342454/campos_512_v4
+66/342520/campos_512_v4
+66/342625/campos_512_v4
+66/342691/campos_512_v4
+66/342743/campos_512_v4
+66/342787/campos_512_v4
+66/342876/campos_512_v4
+66/343003/campos_512_v4
+66/343024/campos_512_v4
+66/343090/campos_512_v4
+66/343151/campos_512_v4
+66/343175/campos_512_v4
+66/343203/campos_512_v4
+66/343248/campos_512_v4
+66/343264/campos_512_v4
+66/343291/campos_512_v4
+66/343316/campos_512_v4
+66/343446/campos_512_v4
+66/343461/campos_512_v4
+66/343642/campos_512_v4
+66/343745/campos_512_v4
+66/343762/campos_512_v4
+66/343769/campos_512_v4
+66/343774/campos_512_v4
+66/343794/campos_512_v4
+66/343814/campos_512_v4
+66/343815/campos_512_v4
+66/343897/campos_512_v4
+66/343927/campos_512_v4
+66/343938/campos_512_v4
+66/344020/campos_512_v4
+66/344097/campos_512_v4
+66/344227/campos_512_v4
+66/344277/campos_512_v4
+66/344328/campos_512_v4
+66/344428/campos_512_v4
+66/344553/campos_512_v4
+66/344633/campos_512_v4
+66/344725/campos_512_v4
+66/344747/campos_512_v4
+66/344750/campos_512_v4
+66/344785/campos_512_v4
+66/344857/campos_512_v4
+66/344862/campos_512_v4
+66/344864/campos_512_v4
+66/344994/campos_512_v4
+67/345121/campos_512_v4
+67/345131/campos_512_v4
+67/345159/campos_512_v4
+67/345269/campos_512_v4
+67/345299/campos_512_v4
+67/345454/campos_512_v4
+67/345552/campos_512_v4
+67/345657/campos_512_v4
+67/345689/campos_512_v4
+67/345707/campos_512_v4
+67/345713/campos_512_v4
+67/345758/campos_512_v4
+67/345777/campos_512_v4
+67/345897/campos_512_v4
+67/345944/campos_512_v4
+67/345992/campos_512_v4
+67/346032/campos_512_v4
+67/346042/campos_512_v4
+67/346053/campos_512_v4
+67/346091/campos_512_v4
+67/346114/campos_512_v4
+67/346208/campos_512_v4
+67/346250/campos_512_v4
+67/346328/campos_512_v4
+67/346464/campos_512_v4
+67/346560/campos_512_v4
+67/346645/campos_512_v4
+67/346673/campos_512_v4
+67/346726/campos_512_v4
+67/346881/campos_512_v4
+67/346882/campos_512_v4
+67/346922/campos_512_v4
+67/346944/campos_512_v4
+67/347082/campos_512_v4
+67/347102/campos_512_v4
+67/347111/campos_512_v4
+67/347131/campos_512_v4
+67/347180/campos_512_v4
+67/347288/campos_512_v4
+67/347305/campos_512_v4
+67/347470/campos_512_v4
+67/347635/campos_512_v4
+67/347639/campos_512_v4
+67/347678/campos_512_v4
+67/347829/campos_512_v4
+67/347944/campos_512_v4
+67/348002/campos_512_v4
+67/348128/campos_512_v4
+67/348175/campos_512_v4
+67/348181/campos_512_v4
+67/348368/campos_512_v4
+67/348531/campos_512_v4
+67/348626/campos_512_v4
+67/348790/campos_512_v4
+67/348809/campos_512_v4
+67/348810/campos_512_v4
+67/348888/campos_512_v4
+67/348918/campos_512_v4
+67/348986/campos_512_v4
+67/349032/campos_512_v4
+67/349067/campos_512_v4
+67/349098/campos_512_v4
+67/349237/campos_512_v4
+67/349363/campos_512_v4
+67/349392/campos_512_v4
+67/349402/campos_512_v4
+67/349512/campos_512_v4
+67/349554/campos_512_v4
+67/349579/campos_512_v4
+67/349715/campos_512_v4
+67/349893/campos_512_v4
+68/350018/campos_512_v4
+68/350165/campos_512_v4
+68/350167/campos_512_v4
+68/350245/campos_512_v4
+68/350253/campos_512_v4
+68/350266/campos_512_v4
+68/350357/campos_512_v4
+68/350359/campos_512_v4
+68/350470/campos_512_v4
+68/350547/campos_512_v4
+68/350558/campos_512_v4
+68/350620/campos_512_v4
+68/350731/campos_512_v4
+68/350750/campos_512_v4
+68/350869/campos_512_v4
+68/350917/campos_512_v4
+68/350939/campos_512_v4
+68/350955/campos_512_v4
+68/350964/campos_512_v4
+68/351053/campos_512_v4
+68/351275/campos_512_v4
+68/351283/campos_512_v4
+68/351325/campos_512_v4
+68/351372/campos_512_v4
+68/351410/campos_512_v4
+68/351449/campos_512_v4
+68/351748/campos_512_v4
+68/351760/campos_512_v4
+68/351942/campos_512_v4
+68/351977/campos_512_v4
+68/352189/campos_512_v4
+68/352421/campos_512_v4
+68/352568/campos_512_v4
+68/352619/campos_512_v4
+68/352761/campos_512_v4
+68/352765/campos_512_v4
+68/353037/campos_512_v4
+68/353273/campos_512_v4
+68/353309/campos_512_v4
+68/353472/campos_512_v4
+68/353560/campos_512_v4
+68/353615/campos_512_v4
+68/353647/campos_512_v4
+68/353707/campos_512_v4
+68/353752/campos_512_v4
+68/353761/campos_512_v4
+68/353835/campos_512_v4
+68/353867/campos_512_v4
+68/353941/campos_512_v4
+68/354058/campos_512_v4
+68/354066/campos_512_v4
+68/354081/campos_512_v4
+68/354172/campos_512_v4
+68/354308/campos_512_v4
+68/354426/campos_512_v4
+68/354498/campos_512_v4
+68/354553/campos_512_v4
+68/354594/campos_512_v4
+68/354606/campos_512_v4
+68/354806/campos_512_v4
+68/354877/campos_512_v4
+68/354920/campos_512_v4
+68/354934/campos_512_v4
+69/355008/campos_512_v4
+69/355110/campos_512_v4
+69/355194/campos_512_v4
+69/355202/campos_512_v4
+69/355242/campos_512_v4
+69/355256/campos_512_v4
+69/355316/campos_512_v4
+69/355320/campos_512_v4
+69/355382/campos_512_v4
+69/355415/campos_512_v4
+69/355517/campos_512_v4
+69/355568/campos_512_v4
+69/355591/campos_512_v4
+69/355765/campos_512_v4
+69/355818/campos_512_v4
+69/355963/campos_512_v4
+69/356032/campos_512_v4
+69/356073/campos_512_v4
+69/356281/campos_512_v4
+69/356343/campos_512_v4
+69/356344/campos_512_v4
+69/356392/campos_512_v4
+69/356396/campos_512_v4
+69/356407/campos_512_v4
+69/356409/campos_512_v4
+69/356433/campos_512_v4
+69/356463/campos_512_v4
+69/356626/campos_512_v4
+69/356806/campos_512_v4
+69/356836/campos_512_v4
+69/356856/campos_512_v4
+69/356872/campos_512_v4
+69/357001/campos_512_v4
+69/357090/campos_512_v4
+69/357166/campos_512_v4
+69/357506/campos_512_v4
+69/357609/campos_512_v4
+69/357629/campos_512_v4
+69/357639/campos_512_v4
+69/357702/campos_512_v4
+69/357812/campos_512_v4
+69/357842/campos_512_v4
+69/357902/campos_512_v4
+69/357986/campos_512_v4
+69/358012/campos_512_v4
+69/358023/campos_512_v4
+69/358049/campos_512_v4
+69/358102/campos_512_v4
+69/358222/campos_512_v4
+69/358259/campos_512_v4
+69/358298/campos_512_v4
+69/358347/campos_512_v4
+69/358352/campos_512_v4
+69/358481/campos_512_v4
+69/358525/campos_512_v4
+69/358748/campos_512_v4
+69/358749/campos_512_v4
+69/358987/campos_512_v4
+69/359036/campos_512_v4
+69/359099/campos_512_v4
+69/359118/campos_512_v4
+69/359207/campos_512_v4
+69/359287/campos_512_v4
+69/359348/campos_512_v4
+69/359508/campos_512_v4
+69/359516/campos_512_v4
+69/359623/campos_512_v4
+69/359647/campos_512_v4
+69/359731/campos_512_v4
+69/359783/campos_512_v4
+69/359860/campos_512_v4
+69/359914/campos_512_v4
+69/359942/campos_512_v4
+69/359961/campos_512_v4
+69/359974/campos_512_v4
+7/45063/campos_512_v4
+7/45071/campos_512_v4
+7/45205/campos_512_v4
+7/45216/campos_512_v4
+7/45304/campos_512_v4
+7/45574/campos_512_v4
+7/45654/campos_512_v4
+7/45769/campos_512_v4
+7/45788/campos_512_v4
+7/45801/campos_512_v4
+7/45832/campos_512_v4
+7/45876/campos_512_v4
+7/45892/campos_512_v4
+7/45997/campos_512_v4
+7/46092/campos_512_v4
+7/46514/campos_512_v4
+7/46549/campos_512_v4
+7/46590/campos_512_v4
+7/46596/campos_512_v4
+7/46789/campos_512_v4
+7/46845/campos_512_v4
+7/46885/campos_512_v4
+7/46934/campos_512_v4
+7/46997/campos_512_v4
+7/47008/campos_512_v4
+7/47113/campos_512_v4
+7/47237/campos_512_v4
+7/47513/campos_512_v4
+7/47661/campos_512_v4
+7/47834/campos_512_v4
+7/47835/campos_512_v4
+7/47921/campos_512_v4
+7/47966/campos_512_v4
+7/47980/campos_512_v4
+7/48044/campos_512_v4
+7/48118/campos_512_v4
+7/48196/campos_512_v4
+7/48236/campos_512_v4
+7/48315/campos_512_v4
+7/48393/campos_512_v4
+7/48424/campos_512_v4
+7/48606/campos_512_v4
+7/48614/campos_512_v4
+7/48689/campos_512_v4
+7/48819/campos_512_v4
+7/48973/campos_512_v4
+7/49126/campos_512_v4
+7/49188/campos_512_v4
+7/49228/campos_512_v4
+7/49589/campos_512_v4
+7/49643/campos_512_v4
+7/49665/campos_512_v4
+7/49721/campos_512_v4
+7/49783/campos_512_v4
+7/49810/campos_512_v4
+7/49843/campos_512_v4
+7/49919/campos_512_v4
+7/49986/campos_512_v4
+70/360124/campos_512_v4
+70/360222/campos_512_v4
+70/360241/campos_512_v4
+70/360279/campos_512_v4
+70/360320/campos_512_v4
+70/360464/campos_512_v4
+70/360472/campos_512_v4
+70/360483/campos_512_v4
+70/360556/campos_512_v4
+70/360672/campos_512_v4
+70/360954/campos_512_v4
+70/360963/campos_512_v4
+70/360997/campos_512_v4
+70/361006/campos_512_v4
+70/361037/campos_512_v4
+70/361115/campos_512_v4
+70/361243/campos_512_v4
+70/361355/campos_512_v4
+70/361368/campos_512_v4
+70/361392/campos_512_v4
+70/361478/campos_512_v4
+70/361543/campos_512_v4
+70/361647/campos_512_v4
+70/361753/campos_512_v4
+70/361813/campos_512_v4
+70/361843/campos_512_v4
+70/361903/campos_512_v4
+70/362115/campos_512_v4
+70/362299/campos_512_v4
+70/362321/campos_512_v4
+70/362380/campos_512_v4
+70/362384/campos_512_v4
+70/362476/campos_512_v4
+70/362500/campos_512_v4
+70/362622/campos_512_v4
+70/362688/campos_512_v4
+70/362783/campos_512_v4
+70/362808/campos_512_v4
+70/362810/campos_512_v4
+70/362813/campos_512_v4
+70/362890/campos_512_v4
+70/362955/campos_512_v4
+70/362990/campos_512_v4
+70/363030/campos_512_v4
+70/363033/campos_512_v4
+70/363037/campos_512_v4
+70/363061/campos_512_v4
+70/363179/campos_512_v4
+70/363198/campos_512_v4
+70/363237/campos_512_v4
+70/363242/campos_512_v4
+70/363310/campos_512_v4
+70/363319/campos_512_v4
+70/363453/campos_512_v4
+70/363468/campos_512_v4
+70/363557/campos_512_v4
+70/363568/campos_512_v4
+70/363641/campos_512_v4
+70/363685/campos_512_v4
+70/363703/campos_512_v4
+70/363730/campos_512_v4
+70/363746/campos_512_v4
+70/364005/campos_512_v4
+70/364091/campos_512_v4
+70/364124/campos_512_v4
+70/364173/campos_512_v4
+70/364183/campos_512_v4
+70/364257/campos_512_v4
+70/364266/campos_512_v4
+70/364271/campos_512_v4
+70/364656/campos_512_v4
+70/364842/campos_512_v4
+70/364923/campos_512_v4
+70/365000/campos_512_v4
+71/365071/campos_512_v4
+71/365237/campos_512_v4
+71/365299/campos_512_v4
+71/365340/campos_512_v4
+71/365348/campos_512_v4
+71/365365/campos_512_v4
+71/365484/campos_512_v4
+71/365519/campos_512_v4
+71/365551/campos_512_v4
+71/365577/campos_512_v4
+71/365588/campos_512_v4
+71/365669/campos_512_v4
+71/365721/campos_512_v4
+71/365781/campos_512_v4
+71/365804/campos_512_v4
+71/365923/campos_512_v4
+71/365990/campos_512_v4
+71/366037/campos_512_v4
+71/366055/campos_512_v4
+71/366059/campos_512_v4
+71/366084/campos_512_v4
+71/366233/campos_512_v4
+71/366436/campos_512_v4
+71/366474/campos_512_v4
+71/366534/campos_512_v4
+71/366561/campos_512_v4
+71/366644/campos_512_v4
+71/366662/campos_512_v4
+71/366792/campos_512_v4
+71/366840/campos_512_v4
+71/366859/campos_512_v4
+71/366911/campos_512_v4
+71/367017/campos_512_v4
+71/367023/campos_512_v4
+71/367033/campos_512_v4
+71/367104/campos_512_v4
+71/367155/campos_512_v4
+71/367283/campos_512_v4
+71/367395/campos_512_v4
+71/367511/campos_512_v4
+71/367626/campos_512_v4
+71/367658/campos_512_v4
+71/367714/campos_512_v4
+71/367929/campos_512_v4
+71/367994/campos_512_v4
+71/368053/campos_512_v4
+71/368104/campos_512_v4
+71/368140/campos_512_v4
+71/368255/campos_512_v4
+71/368268/campos_512_v4
+71/368334/campos_512_v4
+71/368335/campos_512_v4
+71/368378/campos_512_v4
+71/368451/campos_512_v4
+71/368622/campos_512_v4
+71/368623/campos_512_v4
+71/368626/campos_512_v4
+71/368636/campos_512_v4
+71/368700/campos_512_v4
+71/369009/campos_512_v4
+71/369140/campos_512_v4
+71/369321/campos_512_v4
+71/369365/campos_512_v4
+71/369381/campos_512_v4
+71/369436/campos_512_v4
+71/369468/campos_512_v4
+71/369513/campos_512_v4
+71/369635/campos_512_v4
+71/369643/campos_512_v4
+71/369718/campos_512_v4
+71/369746/campos_512_v4
+71/369759/campos_512_v4
+72/370345/campos_512_v4
+72/370474/campos_512_v4
+72/370665/campos_512_v4
+72/370959/campos_512_v4
+72/371118/campos_512_v4
+72/371135/campos_512_v4
+72/371175/campos_512_v4
+72/371221/campos_512_v4
+72/371248/campos_512_v4
+72/371271/campos_512_v4
+72/371440/campos_512_v4
+72/371540/campos_512_v4
+72/371610/campos_512_v4
+72/371662/campos_512_v4
+72/371700/campos_512_v4
+72/371706/campos_512_v4
+72/371951/campos_512_v4
+72/371976/campos_512_v4
+72/371977/campos_512_v4
+72/372122/campos_512_v4
+72/372253/campos_512_v4
+72/372261/campos_512_v4
+72/372323/campos_512_v4
+72/372355/campos_512_v4
+72/372504/campos_512_v4
+72/372514/campos_512_v4
+72/372547/campos_512_v4
+72/372565/campos_512_v4
+72/372582/campos_512_v4
+72/372605/campos_512_v4
+72/372606/campos_512_v4
+72/372665/campos_512_v4
+72/372684/campos_512_v4
+72/372723/campos_512_v4
+72/372744/campos_512_v4
+72/372765/campos_512_v4
+72/372824/campos_512_v4
+72/372845/campos_512_v4
+72/372848/campos_512_v4
+72/372934/campos_512_v4
+72/372998/campos_512_v4
+72/373052/campos_512_v4
+72/373085/campos_512_v4
+72/373117/campos_512_v4
+72/373154/campos_512_v4
+72/373239/campos_512_v4
+72/373339/campos_512_v4
+72/373383/campos_512_v4
+72/373462/campos_512_v4
+72/373516/campos_512_v4
+72/373526/campos_512_v4
+72/373552/campos_512_v4
+72/373553/campos_512_v4
+72/373579/campos_512_v4
+72/373627/campos_512_v4
+72/373683/campos_512_v4
+72/373740/campos_512_v4
+72/373756/campos_512_v4
+72/373770/campos_512_v4
+72/373772/campos_512_v4
+72/373872/campos_512_v4
+72/373957/campos_512_v4
+72/373998/campos_512_v4
+72/374038/campos_512_v4
+72/374049/campos_512_v4
+72/374069/campos_512_v4
+72/374155/campos_512_v4
+72/374156/campos_512_v4
+72/374181/campos_512_v4
+72/374256/campos_512_v4
+72/374371/campos_512_v4
+72/374443/campos_512_v4
+72/374448/campos_512_v4
+72/374451/campos_512_v4
+72/374508/campos_512_v4
+72/374537/campos_512_v4
+72/374587/campos_512_v4
+72/374611/campos_512_v4
+72/374725/campos_512_v4
+72/374730/campos_512_v4
+72/374792/campos_512_v4
+73/375058/campos_512_v4
+73/375111/campos_512_v4
+73/375134/campos_512_v4
+73/375230/campos_512_v4
+73/375300/campos_512_v4
+73/375347/campos_512_v4
+73/375659/campos_512_v4
+73/375792/campos_512_v4
+73/376025/campos_512_v4
+73/376072/campos_512_v4
+73/376427/campos_512_v4
+73/376458/campos_512_v4
+73/376468/campos_512_v4
+73/376564/campos_512_v4
+73/376735/campos_512_v4
+73/376743/campos_512_v4
+73/376952/campos_512_v4
+73/377020/campos_512_v4
+73/377028/campos_512_v4
+73/377194/campos_512_v4
+73/377339/campos_512_v4
+73/377442/campos_512_v4
+73/377477/campos_512_v4
+73/377535/campos_512_v4
+73/377592/campos_512_v4
+73/377596/campos_512_v4
+73/377684/campos_512_v4
+73/377696/campos_512_v4
+73/377699/campos_512_v4
+73/377706/campos_512_v4
+73/377708/campos_512_v4
+73/377741/campos_512_v4
+73/377781/campos_512_v4
+73/377793/campos_512_v4
+73/377841/campos_512_v4
+73/377858/campos_512_v4
+73/377986/campos_512_v4
+73/378040/campos_512_v4
+73/378192/campos_512_v4
+73/378196/campos_512_v4
+73/378246/campos_512_v4
+73/378272/campos_512_v4
+73/378281/campos_512_v4
+73/378293/campos_512_v4
+73/378410/campos_512_v4
+73/378444/campos_512_v4
+73/378462/campos_512_v4
+73/378532/campos_512_v4
+73/378562/campos_512_v4
+73/378566/campos_512_v4
+73/378618/campos_512_v4
+73/378680/campos_512_v4
+73/378719/campos_512_v4
+73/378934/campos_512_v4
+73/378951/campos_512_v4
+73/379016/campos_512_v4
+73/379025/campos_512_v4
+73/379050/campos_512_v4
+73/379102/campos_512_v4
+73/379123/campos_512_v4
+73/379227/campos_512_v4
+73/379505/campos_512_v4
+73/379644/campos_512_v4
+73/379717/campos_512_v4
+73/379811/campos_512_v4
+73/379926/campos_512_v4
+73/379952/campos_512_v4
+73/379963/campos_512_v4
+74/380180/campos_512_v4
+74/380230/campos_512_v4
+74/380235/campos_512_v4
+74/380263/campos_512_v4
+74/380283/campos_512_v4
+74/380345/campos_512_v4
+74/380378/campos_512_v4
+74/380386/campos_512_v4
+74/380391/campos_512_v4
+74/380398/campos_512_v4
+74/380531/campos_512_v4
+74/380708/campos_512_v4
+74/380728/campos_512_v4
+74/380818/campos_512_v4
+74/380860/campos_512_v4
+74/380884/campos_512_v4
+74/381207/campos_512_v4
+74/381244/campos_512_v4
+74/381278/campos_512_v4
+74/381284/campos_512_v4
+74/381293/campos_512_v4
+74/381333/campos_512_v4
+74/381351/campos_512_v4
+74/381479/campos_512_v4
+74/381556/campos_512_v4
+74/381892/campos_512_v4
+74/381952/campos_512_v4
+74/381966/campos_512_v4
+74/382087/campos_512_v4
+74/382110/campos_512_v4
+74/382122/campos_512_v4
+74/382159/campos_512_v4
+74/382184/campos_512_v4
+74/382305/campos_512_v4
+74/382443/campos_512_v4
+74/382465/campos_512_v4
+74/382488/campos_512_v4
+74/382593/campos_512_v4
+74/382631/campos_512_v4
+74/382667/campos_512_v4
+74/382690/campos_512_v4
+74/382697/campos_512_v4
+74/382752/campos_512_v4
+74/382807/campos_512_v4
+74/382830/campos_512_v4
+74/383023/campos_512_v4
+74/383037/campos_512_v4
+74/383077/campos_512_v4
+74/383170/campos_512_v4
+74/383286/campos_512_v4
+74/383358/campos_512_v4
+74/383482/campos_512_v4
+74/383485/campos_512_v4
+74/383556/campos_512_v4
+74/383647/campos_512_v4
+74/383673/campos_512_v4
+74/383693/campos_512_v4
+74/383783/campos_512_v4
+74/383808/campos_512_v4
+74/383840/campos_512_v4
+74/384042/campos_512_v4
+74/384093/campos_512_v4
+74/384202/campos_512_v4
+74/384237/campos_512_v4
+74/384543/campos_512_v4
+74/384560/campos_512_v4
+74/384586/campos_512_v4
+74/384734/campos_512_v4
+74/384746/campos_512_v4
+74/384781/campos_512_v4
+74/384791/campos_512_v4
+74/384798/campos_512_v4
+74/384803/campos_512_v4
+74/384933/campos_512_v4
+74/384966/campos_512_v4
+74/384967/campos_512_v4
+75/385012/campos_512_v4
+75/385029/campos_512_v4
+75/385086/campos_512_v4
+75/385170/campos_512_v4
+75/385196/campos_512_v4
+75/385202/campos_512_v4
+75/385222/campos_512_v4
+75/385223/campos_512_v4
+75/385335/campos_512_v4
+75/385447/campos_512_v4
+75/385543/campos_512_v4
+75/385589/campos_512_v4
+75/385622/campos_512_v4
+75/385633/campos_512_v4
+75/385700/campos_512_v4
+75/385733/campos_512_v4
+75/385759/campos_512_v4
+75/385775/campos_512_v4
+75/386040/campos_512_v4
+75/386201/campos_512_v4
+75/386227/campos_512_v4
+75/386280/campos_512_v4
+75/386329/campos_512_v4
+75/386338/campos_512_v4
+75/386364/campos_512_v4
+75/386510/campos_512_v4
+75/386552/campos_512_v4
+75/386607/campos_512_v4
+75/386829/campos_512_v4
+75/386837/campos_512_v4
+75/386958/campos_512_v4
+75/387026/campos_512_v4
+75/387104/campos_512_v4
+75/387134/campos_512_v4
+75/387150/campos_512_v4
+75/387162/campos_512_v4
+75/387187/campos_512_v4
+75/387260/campos_512_v4
+75/387271/campos_512_v4
+75/387574/campos_512_v4
+75/387610/campos_512_v4
+75/387644/campos_512_v4
+75/387646/campos_512_v4
+75/387752/campos_512_v4
+75/387767/campos_512_v4
+75/387788/campos_512_v4
+75/387928/campos_512_v4
+75/387961/campos_512_v4
+75/387987/campos_512_v4
+75/388068/campos_512_v4
+75/388188/campos_512_v4
+75/388263/campos_512_v4
+75/388265/campos_512_v4
+75/388381/campos_512_v4
+75/388387/campos_512_v4
+75/388403/campos_512_v4
+75/388438/campos_512_v4
+75/388495/campos_512_v4
+75/388618/campos_512_v4
+75/388728/campos_512_v4
+75/388817/campos_512_v4
+75/388821/campos_512_v4
+75/389000/campos_512_v4
+75/389036/campos_512_v4
+75/389338/campos_512_v4
+75/389416/campos_512_v4
+75/389479/campos_512_v4
+75/389517/campos_512_v4
+75/389570/campos_512_v4
+75/389585/campos_512_v4
+75/389654/campos_512_v4
+75/389658/campos_512_v4
+75/389678/campos_512_v4
+75/389713/campos_512_v4
+75/389771/campos_512_v4
+75/389924/campos_512_v4
+76/390106/campos_512_v4
+76/390172/campos_512_v4
+76/390175/campos_512_v4
+76/390194/campos_512_v4
+76/390203/campos_512_v4
+76/390208/campos_512_v4
+76/390211/campos_512_v4
+76/390318/campos_512_v4
+76/390335/campos_512_v4
+76/390373/campos_512_v4
+76/390382/campos_512_v4
+76/390385/campos_512_v4
+76/390424/campos_512_v4
+76/390440/campos_512_v4
+76/390510/campos_512_v4
+76/390676/campos_512_v4
+76/390749/campos_512_v4
+76/390755/campos_512_v4
+76/390949/campos_512_v4
+76/390950/campos_512_v4
+76/390978/campos_512_v4
+76/391040/campos_512_v4
+76/391086/campos_512_v4
+76/391138/campos_512_v4
+76/391313/campos_512_v4
+76/391409/campos_512_v4
+76/391435/campos_512_v4
+76/391492/campos_512_v4
+76/391493/campos_512_v4
+76/391593/campos_512_v4
+76/391625/campos_512_v4
+76/391652/campos_512_v4
+76/391713/campos_512_v4
+76/391727/campos_512_v4
+76/391750/campos_512_v4
+76/391879/campos_512_v4
+76/391891/campos_512_v4
+76/391911/campos_512_v4
+76/391940/campos_512_v4
+76/392035/campos_512_v4
+76/392037/campos_512_v4
+76/392122/campos_512_v4
+76/392146/campos_512_v4
+76/392161/campos_512_v4
+76/392227/campos_512_v4
+76/392298/campos_512_v4
+76/392320/campos_512_v4
+76/392339/campos_512_v4
+76/392373/campos_512_v4
+76/392505/campos_512_v4
+76/392506/campos_512_v4
+76/392523/campos_512_v4
+76/392546/campos_512_v4
+76/392565/campos_512_v4
+76/392734/campos_512_v4
+76/392809/campos_512_v4
+76/392855/campos_512_v4
+76/392871/campos_512_v4
+76/392913/campos_512_v4
+76/392992/campos_512_v4
+76/393061/campos_512_v4
+76/393104/campos_512_v4
+76/393109/campos_512_v4
+76/393229/campos_512_v4
+76/393277/campos_512_v4
+76/393407/campos_512_v4
+76/393646/campos_512_v4
+76/393701/campos_512_v4
+76/393785/campos_512_v4
+76/393877/campos_512_v4
+76/393976/campos_512_v4
+76/394000/campos_512_v4
+76/394007/campos_512_v4
+76/394014/campos_512_v4
+76/394034/campos_512_v4
+76/394194/campos_512_v4
+76/394195/campos_512_v4
+76/394277/campos_512_v4
+76/394287/campos_512_v4
+76/394402/campos_512_v4
+76/394422/campos_512_v4
+76/394450/campos_512_v4
+76/394507/campos_512_v4
+76/394540/campos_512_v4
+76/394584/campos_512_v4
+76/394614/campos_512_v4
+76/394752/campos_512_v4
+76/394765/campos_512_v4
+76/394775/campos_512_v4
+76/394780/campos_512_v4
+76/394835/campos_512_v4
+76/394946/campos_512_v4
+76/394980/campos_512_v4
+77/395007/campos_512_v4
+77/395019/campos_512_v4
+77/395089/campos_512_v4
+77/395230/campos_512_v4
+77/395326/campos_512_v4
+77/395359/campos_512_v4
+77/395441/campos_512_v4
+77/395490/campos_512_v4
+77/395669/campos_512_v4
+77/395671/campos_512_v4
+77/395683/campos_512_v4
+77/395796/campos_512_v4
+77/395896/campos_512_v4
+77/395970/campos_512_v4
+77/395989/campos_512_v4
+77/396089/campos_512_v4
+77/396163/campos_512_v4
+77/396231/campos_512_v4
+77/396473/campos_512_v4
+77/396512/campos_512_v4
+77/396531/campos_512_v4
+77/396678/campos_512_v4
+77/396700/campos_512_v4
+77/396709/campos_512_v4
+77/396710/campos_512_v4
+77/396753/campos_512_v4
+77/396822/campos_512_v4
+77/396834/campos_512_v4
+77/397032/campos_512_v4
+77/397035/campos_512_v4
+77/397053/campos_512_v4
+77/397139/campos_512_v4
+77/397319/campos_512_v4
+77/397422/campos_512_v4
+77/397496/campos_512_v4
+77/397521/campos_512_v4
+77/397587/campos_512_v4
+77/397593/campos_512_v4
+77/397637/campos_512_v4
+77/397677/campos_512_v4
+77/397783/campos_512_v4
+77/397833/campos_512_v4
+77/397853/campos_512_v4
+77/397858/campos_512_v4
+77/397900/campos_512_v4
+77/397951/campos_512_v4
+77/397958/campos_512_v4
+77/397986/campos_512_v4
+77/397994/campos_512_v4
+77/398109/campos_512_v4
+77/398165/campos_512_v4
+77/398179/campos_512_v4
+77/398242/campos_512_v4
+77/398256/campos_512_v4
+77/398330/campos_512_v4
+77/398337/campos_512_v4
+77/398417/campos_512_v4
+77/398496/campos_512_v4
+77/398688/campos_512_v4
+77/398729/campos_512_v4
+77/398822/campos_512_v4
+77/398834/campos_512_v4
+77/398991/campos_512_v4
+77/399084/campos_512_v4
+77/399123/campos_512_v4
+77/399136/campos_512_v4
+77/399139/campos_512_v4
+77/399180/campos_512_v4
+77/399367/campos_512_v4
+77/399384/campos_512_v4
+77/399409/campos_512_v4
+77/399595/campos_512_v4
+77/399612/campos_512_v4
+77/399822/campos_512_v4
+77/399856/campos_512_v4
+77/399882/campos_512_v4
+77/399954/campos_512_v4
+78/400052/campos_512_v4
+78/400481/campos_512_v4
+78/400619/campos_512_v4
+78/400658/campos_512_v4
+78/400878/campos_512_v4
+78/400944/campos_512_v4
+78/400958/campos_512_v4
+78/400987/campos_512_v4
+78/401201/campos_512_v4
+78/401305/campos_512_v4
+78/401325/campos_512_v4
+78/401374/campos_512_v4
+78/401496/campos_512_v4
+78/401553/campos_512_v4
+78/401570/campos_512_v4
+78/401862/campos_512_v4
+78/401954/campos_512_v4
+78/402066/campos_512_v4
+78/402088/campos_512_v4
+78/402114/campos_512_v4
+78/402137/campos_512_v4
+78/402343/campos_512_v4
+78/402356/campos_512_v4
+78/402408/campos_512_v4
+78/402431/campos_512_v4
+78/402452/campos_512_v4
+78/402479/campos_512_v4
+78/402565/campos_512_v4
+78/402852/campos_512_v4
+78/402873/campos_512_v4
+78/402892/campos_512_v4
+78/402968/campos_512_v4
+78/402975/campos_512_v4
+78/403140/campos_512_v4
+78/403157/campos_512_v4
+78/403158/campos_512_v4
+78/403219/campos_512_v4
+78/403237/campos_512_v4
+78/403253/campos_512_v4
+78/403272/campos_512_v4
+78/403414/campos_512_v4
+78/403434/campos_512_v4
+78/403441/campos_512_v4
+78/403457/campos_512_v4
+78/403596/campos_512_v4
+78/403609/campos_512_v4
+78/403658/campos_512_v4
+78/403799/campos_512_v4
+78/403808/campos_512_v4
+78/403855/campos_512_v4
+78/403928/campos_512_v4
+78/403970/campos_512_v4
+78/404152/campos_512_v4
+78/404197/campos_512_v4
+78/404450/campos_512_v4
+78/404557/campos_512_v4
+78/404592/campos_512_v4
+78/404666/campos_512_v4
+78/404810/campos_512_v4
+78/404924/campos_512_v4
+78/404937/campos_512_v4
+78/404988/campos_512_v4
+79/405095/campos_512_v4
+79/405120/campos_512_v4
+79/405133/campos_512_v4
+79/405138/campos_512_v4
+79/405213/campos_512_v4
+79/405327/campos_512_v4
+79/405427/campos_512_v4
+79/405482/campos_512_v4
+79/405526/campos_512_v4
+79/405599/campos_512_v4
+79/405614/campos_512_v4
+79/405636/campos_512_v4
+79/405736/campos_512_v4
+79/405769/campos_512_v4
+79/405799/campos_512_v4
+79/405838/campos_512_v4
+79/405844/campos_512_v4
+79/405864/campos_512_v4
+79/405874/campos_512_v4
+79/405954/campos_512_v4
+79/405969/campos_512_v4
+79/405981/campos_512_v4
+79/406041/campos_512_v4
+79/406069/campos_512_v4
+79/406072/campos_512_v4
+79/406186/campos_512_v4
+79/406248/campos_512_v4
+79/406253/campos_512_v4
+79/406465/campos_512_v4
+79/406493/campos_512_v4
+79/406544/campos_512_v4
+79/406630/campos_512_v4
+79/406679/campos_512_v4
+79/406784/campos_512_v4
+79/406896/campos_512_v4
+79/407011/campos_512_v4
+79/407018/campos_512_v4
+79/407243/campos_512_v4
+79/407329/campos_512_v4
+79/407492/campos_512_v4
+79/407590/campos_512_v4
+79/407726/campos_512_v4
+79/407765/campos_512_v4
+79/407824/campos_512_v4
+79/408068/campos_512_v4
+79/408225/campos_512_v4
+79/408257/campos_512_v4
+79/408315/campos_512_v4
+79/408370/campos_512_v4
+79/408511/campos_512_v4
+79/408585/campos_512_v4
+79/408602/campos_512_v4
+79/408617/campos_512_v4
+79/408618/campos_512_v4
+79/408764/campos_512_v4
+79/408772/campos_512_v4
+79/408796/campos_512_v4
+79/408806/campos_512_v4
+79/408878/campos_512_v4
+79/408934/campos_512_v4
+79/408941/campos_512_v4
+79/408942/campos_512_v4
+79/408951/campos_512_v4
+79/409163/campos_512_v4
+79/409227/campos_512_v4
+79/409229/campos_512_v4
+79/409307/campos_512_v4
+79/409330/campos_512_v4
+79/409357/campos_512_v4
+79/409673/campos_512_v4
+79/409799/campos_512_v4
+79/409832/campos_512_v4
+79/409895/campos_512_v4
+79/409896/campos_512_v4
+79/409913/campos_512_v4
+79/409955/campos_512_v4
+79/409981/campos_512_v4
+8/50094/campos_512_v4
+8/50258/campos_512_v4
+8/50282/campos_512_v4
+8/50326/campos_512_v4
+8/50429/campos_512_v4
+8/50505/campos_512_v4
+8/50553/campos_512_v4
+8/50632/campos_512_v4
+8/50804/campos_512_v4
+8/50849/campos_512_v4
+8/50869/campos_512_v4
+8/50908/campos_512_v4
+8/50944/campos_512_v4
+8/51044/campos_512_v4
+8/51107/campos_512_v4
+8/51111/campos_512_v4
+8/51158/campos_512_v4
+8/51215/campos_512_v4
+8/51341/campos_512_v4
+8/51370/campos_512_v4
+8/51469/campos_512_v4
+8/51482/campos_512_v4
+8/51486/campos_512_v4
+8/51576/campos_512_v4
+8/51590/campos_512_v4
+8/51665/campos_512_v4
+8/51666/campos_512_v4
+8/51773/campos_512_v4
+8/51804/campos_512_v4
+8/51811/campos_512_v4
+8/51821/campos_512_v4
+8/51852/campos_512_v4
+8/51906/campos_512_v4
+8/51910/campos_512_v4
+8/51979/campos_512_v4
+8/52113/campos_512_v4
+8/52164/campos_512_v4
+8/52192/campos_512_v4
+8/52303/campos_512_v4
+8/52308/campos_512_v4
+8/52623/campos_512_v4
+8/52657/campos_512_v4
+8/52659/campos_512_v4
+8/52678/campos_512_v4
+8/52760/campos_512_v4
+8/52778/campos_512_v4
+8/52796/campos_512_v4
+8/52808/campos_512_v4
+8/52862/campos_512_v4
+8/52948/campos_512_v4
+8/52983/campos_512_v4
+8/53013/campos_512_v4
+8/53035/campos_512_v4
+8/53080/campos_512_v4
+8/53126/campos_512_v4
+8/53166/campos_512_v4
+8/53182/campos_512_v4
+8/53210/campos_512_v4
+8/53274/campos_512_v4
+8/53395/campos_512_v4
+8/53409/campos_512_v4
+8/53440/campos_512_v4
+8/53444/campos_512_v4
+8/53449/campos_512_v4
+8/53542/campos_512_v4
+8/53586/campos_512_v4
+8/53661/campos_512_v4
+8/53714/campos_512_v4
+8/53783/campos_512_v4
+8/53952/campos_512_v4
+8/54018/campos_512_v4
+8/54133/campos_512_v4
+8/54183/campos_512_v4
+8/54214/campos_512_v4
+8/54412/campos_512_v4
+8/54585/campos_512_v4
+8/54607/campos_512_v4
+8/54700/campos_512_v4
+8/54746/campos_512_v4
+8/54811/campos_512_v4
+8/54824/campos_512_v4
+8/54837/campos_512_v4
+8/54847/campos_512_v4
+80/410051/campos_512_v4
+80/410082/campos_512_v4
+80/410228/campos_512_v4
+80/410267/campos_512_v4
+80/410311/campos_512_v4
+80/410510/campos_512_v4
+80/410537/campos_512_v4
+80/410547/campos_512_v4
+80/410548/campos_512_v4
+80/410571/campos_512_v4
+80/410666/campos_512_v4
+80/410707/campos_512_v4
+80/410716/campos_512_v4
+80/410729/campos_512_v4
+80/410751/campos_512_v4
+80/410836/campos_512_v4
+80/410992/campos_512_v4
+80/411161/campos_512_v4
+80/411276/campos_512_v4
+80/411370/campos_512_v4
+80/411676/campos_512_v4
+80/411746/campos_512_v4
+80/411917/campos_512_v4
+80/411931/campos_512_v4
+80/411977/campos_512_v4
+80/412033/campos_512_v4
+80/412108/campos_512_v4
+80/412216/campos_512_v4
+80/412230/campos_512_v4
+80/412239/campos_512_v4
+80/412307/campos_512_v4
+80/412322/campos_512_v4
+80/412365/campos_512_v4
+80/412372/campos_512_v4
+80/412423/campos_512_v4
+80/412688/campos_512_v4
+80/412781/campos_512_v4
+80/412792/campos_512_v4
+80/412994/campos_512_v4
+80/412996/campos_512_v4
+80/413037/campos_512_v4
+80/413058/campos_512_v4
+80/413082/campos_512_v4
+80/413131/campos_512_v4
+80/413157/campos_512_v4
+80/413258/campos_512_v4
+80/413264/campos_512_v4
+80/413267/campos_512_v4
+80/413279/campos_512_v4
+80/413315/campos_512_v4
+80/413501/campos_512_v4
+80/413727/campos_512_v4
+80/413838/campos_512_v4
+80/413873/campos_512_v4
+80/413976/campos_512_v4
+80/414096/campos_512_v4
+80/414145/campos_512_v4
+80/414237/campos_512_v4
+80/414239/campos_512_v4
+80/414335/campos_512_v4
+80/414340/campos_512_v4
+80/414358/campos_512_v4
+80/414360/campos_512_v4
+80/414492/campos_512_v4
+80/414537/campos_512_v4
+80/414543/campos_512_v4
+80/414588/campos_512_v4
+80/414606/campos_512_v4
+80/414617/campos_512_v4
+80/414673/campos_512_v4
+80/414688/campos_512_v4
+80/414697/campos_512_v4
+80/414909/campos_512_v4
+80/414973/campos_512_v4
+81/415032/campos_512_v4
+81/415252/campos_512_v4
+81/415640/campos_512_v4
+81/415646/campos_512_v4
+81/415677/campos_512_v4
+81/415721/campos_512_v4
+81/415801/campos_512_v4
+81/415840/campos_512_v4
+81/415880/campos_512_v4
+81/415896/campos_512_v4
+81/415927/campos_512_v4
+81/415993/campos_512_v4
+81/416075/campos_512_v4
+81/416132/campos_512_v4
+81/416161/campos_512_v4
+81/416270/campos_512_v4
+81/416332/campos_512_v4
+81/416336/campos_512_v4
+81/416384/campos_512_v4
+81/416420/campos_512_v4
+81/416484/campos_512_v4
+81/416655/campos_512_v4
+81/416751/campos_512_v4
+81/416808/campos_512_v4
+81/416852/campos_512_v4
+81/416869/campos_512_v4
+81/416876/campos_512_v4
+81/416916/campos_512_v4
+81/416924/campos_512_v4
+81/416957/campos_512_v4
+81/416990/campos_512_v4
+81/417034/campos_512_v4
+81/417164/campos_512_v4
+81/417245/campos_512_v4
+81/417259/campos_512_v4
+81/417337/campos_512_v4
+81/417340/campos_512_v4
+81/417342/campos_512_v4
+81/417419/campos_512_v4
+81/417459/campos_512_v4
+81/417466/campos_512_v4
+81/417549/campos_512_v4
+81/417736/campos_512_v4
+81/417752/campos_512_v4
+81/417792/campos_512_v4
+81/417828/campos_512_v4
+81/417948/campos_512_v4
+81/418028/campos_512_v4
+81/418171/campos_512_v4
+81/418183/campos_512_v4
+81/418226/campos_512_v4
+81/418355/campos_512_v4
+81/418428/campos_512_v4
+81/418550/campos_512_v4
+81/418563/campos_512_v4
+81/418583/campos_512_v4
+81/418646/campos_512_v4
+81/419061/campos_512_v4
+81/419078/campos_512_v4
+81/419114/campos_512_v4
+81/419141/campos_512_v4
+81/419192/campos_512_v4
+81/419206/campos_512_v4
+81/419297/campos_512_v4
+81/419498/campos_512_v4
+81/419658/campos_512_v4
+81/419668/campos_512_v4
+81/419910/campos_512_v4
+82/420029/campos_512_v4
+82/420036/campos_512_v4
+82/420038/campos_512_v4
+82/420112/campos_512_v4
+82/420207/campos_512_v4
+82/420254/campos_512_v4
+82/420263/campos_512_v4
+82/420322/campos_512_v4
+82/420385/campos_512_v4
+82/420507/campos_512_v4
+82/420522/campos_512_v4
+82/420625/campos_512_v4
+82/420644/campos_512_v4
+82/420659/campos_512_v4
+82/420743/campos_512_v4
+82/420949/campos_512_v4
+82/420964/campos_512_v4
+82/420985/campos_512_v4
+82/420998/campos_512_v4
+82/420999/campos_512_v4
+82/421023/campos_512_v4
+82/421130/campos_512_v4
+82/421158/campos_512_v4
+82/421220/campos_512_v4
+82/421266/campos_512_v4
+82/421542/campos_512_v4
+82/421627/campos_512_v4
+82/421638/campos_512_v4
+82/421697/campos_512_v4
+82/421749/campos_512_v4
+82/421801/campos_512_v4
+82/421838/campos_512_v4
+82/421890/campos_512_v4
+82/422030/campos_512_v4
+82/422050/campos_512_v4
+82/422241/campos_512_v4
+82/422268/campos_512_v4
+82/422275/campos_512_v4
+82/422327/campos_512_v4
+82/422481/campos_512_v4
+82/422573/campos_512_v4
+82/422661/campos_512_v4
+82/422675/campos_512_v4
+82/422864/campos_512_v4
+82/422879/campos_512_v4
+82/422917/campos_512_v4
+82/422997/campos_512_v4
+82/423132/campos_512_v4
+82/423236/campos_512_v4
+82/423337/campos_512_v4
+82/423377/campos_512_v4
+82/423394/campos_512_v4
+82/423540/campos_512_v4
+82/423545/campos_512_v4
+82/423613/campos_512_v4
+82/423663/campos_512_v4
+82/423700/campos_512_v4
+82/423744/campos_512_v4
+82/423855/campos_512_v4
+82/423894/campos_512_v4
+82/423960/campos_512_v4
+82/424013/campos_512_v4
+82/424074/campos_512_v4
+82/424302/campos_512_v4
+82/424307/campos_512_v4
+82/424344/campos_512_v4
+82/424495/campos_512_v4
+82/424505/campos_512_v4
+82/424570/campos_512_v4
+82/424686/campos_512_v4
+82/424695/campos_512_v4
+82/424736/campos_512_v4
+82/424950/campos_512_v4
+82/424981/campos_512_v4
+83/425023/campos_512_v4
+83/425105/campos_512_v4
+83/425111/campos_512_v4
+83/425116/campos_512_v4
+83/425155/campos_512_v4
+83/425164/campos_512_v4
+83/425205/campos_512_v4
+83/425277/campos_512_v4
+83/425279/campos_512_v4
+83/425286/campos_512_v4
+83/425356/campos_512_v4
+83/425420/campos_512_v4
+83/425494/campos_512_v4
+83/425595/campos_512_v4
+83/425664/campos_512_v4
+83/425713/campos_512_v4
+83/425788/campos_512_v4
+83/425935/campos_512_v4
+83/426193/campos_512_v4
+83/426209/campos_512_v4
+83/426314/campos_512_v4
+83/426507/campos_512_v4
+83/426682/campos_512_v4
+83/426754/campos_512_v4
+83/426760/campos_512_v4
+83/426763/campos_512_v4
+83/426826/campos_512_v4
+83/426865/campos_512_v4
+83/426887/campos_512_v4
+83/426954/campos_512_v4
+83/426972/campos_512_v4
+83/426988/campos_512_v4
+83/427061/campos_512_v4
+83/427075/campos_512_v4
+83/427132/campos_512_v4
+83/427173/campos_512_v4
+83/427246/campos_512_v4
+83/427247/campos_512_v4
+83/427255/campos_512_v4
+83/427309/campos_512_v4
+83/427423/campos_512_v4
+83/427464/campos_512_v4
+83/427467/campos_512_v4
+83/427470/campos_512_v4
+83/427491/campos_512_v4
+83/427715/campos_512_v4
+83/427725/campos_512_v4
+83/427749/campos_512_v4
+83/427784/campos_512_v4
+83/427902/campos_512_v4
+83/427998/campos_512_v4
+83/428026/campos_512_v4
+83/428076/campos_512_v4
+83/428113/campos_512_v4
+83/428156/campos_512_v4
+83/428176/campos_512_v4
+83/428180/campos_512_v4
+83/428256/campos_512_v4
+83/428388/campos_512_v4
+83/428509/campos_512_v4
+83/428527/campos_512_v4
+83/428549/campos_512_v4
+83/428558/campos_512_v4
+83/428610/campos_512_v4
+83/428682/campos_512_v4
+83/428850/campos_512_v4
+83/428854/campos_512_v4
+83/428942/campos_512_v4
+83/428947/campos_512_v4
+83/429069/campos_512_v4
+83/429084/campos_512_v4
+83/429175/campos_512_v4
+83/429206/campos_512_v4
+83/429225/campos_512_v4
+83/429253/campos_512_v4
+83/429394/campos_512_v4
+83/429493/campos_512_v4
+83/429520/campos_512_v4
+83/429671/campos_512_v4
+83/429719/campos_512_v4
+83/429729/campos_512_v4
+84/430005/campos_512_v4
+84/430108/campos_512_v4
+84/430125/campos_512_v4
+84/430234/campos_512_v4
+84/430251/campos_512_v4
+84/430268/campos_512_v4
+84/430314/campos_512_v4
+84/430380/campos_512_v4
+84/430416/campos_512_v4
+84/430418/campos_512_v4
+84/430474/campos_512_v4
+84/430475/campos_512_v4
+84/430533/campos_512_v4
+84/430552/campos_512_v4
+84/430599/campos_512_v4
+84/430665/campos_512_v4
+84/430683/campos_512_v4
+84/430897/campos_512_v4
+84/430948/campos_512_v4
+84/431070/campos_512_v4
+84/431077/campos_512_v4
+84/431088/campos_512_v4
+84/431173/campos_512_v4
+84/431254/campos_512_v4
+84/431290/campos_512_v4
+84/431320/campos_512_v4
+84/431385/campos_512_v4
+84/431457/campos_512_v4
+84/431514/campos_512_v4
+84/431528/campos_512_v4
+84/431579/campos_512_v4
+84/431655/campos_512_v4
+84/431671/campos_512_v4
+84/431772/campos_512_v4
+84/431843/campos_512_v4
+84/431907/campos_512_v4
+84/431910/campos_512_v4
+84/431944/campos_512_v4
+84/432163/campos_512_v4
+84/432187/campos_512_v4
+84/432247/campos_512_v4
+84/432259/campos_512_v4
+84/432413/campos_512_v4
+84/432425/campos_512_v4
+84/432473/campos_512_v4
+84/432636/campos_512_v4
+84/432654/campos_512_v4
+84/432775/campos_512_v4
+84/432973/campos_512_v4
+84/433010/campos_512_v4
+84/433121/campos_512_v4
+84/433262/campos_512_v4
+84/433305/campos_512_v4
+84/433389/campos_512_v4
+84/433424/campos_512_v4
+84/433439/campos_512_v4
+84/433450/campos_512_v4
+84/433484/campos_512_v4
+84/433507/campos_512_v4
+84/433590/campos_512_v4
+84/433623/campos_512_v4
+84/433646/campos_512_v4
+84/433664/campos_512_v4
+84/433697/campos_512_v4
+84/434126/campos_512_v4
+84/434249/campos_512_v4
+84/434288/campos_512_v4
+84/434338/campos_512_v4
+84/434350/campos_512_v4
+84/434366/campos_512_v4
+84/434388/campos_512_v4
+84/434435/campos_512_v4
+84/434449/campos_512_v4
+84/434504/campos_512_v4
+84/434511/campos_512_v4
+84/434642/campos_512_v4
+84/434644/campos_512_v4
+84/434707/campos_512_v4
+84/434755/campos_512_v4
+84/434869/campos_512_v4
+85/435029/campos_512_v4
+85/435068/campos_512_v4
+85/435153/campos_512_v4
+85/435213/campos_512_v4
+85/435249/campos_512_v4
+85/435317/campos_512_v4
+85/435390/campos_512_v4
+85/435428/campos_512_v4
+85/435463/campos_512_v4
+85/435527/campos_512_v4
+85/435554/campos_512_v4
+85/435698/campos_512_v4
+85/435743/campos_512_v4
+85/435757/campos_512_v4
+85/435829/campos_512_v4
+85/435866/campos_512_v4
+85/435880/campos_512_v4
+85/436073/campos_512_v4
+85/436092/campos_512_v4
+85/436110/campos_512_v4
+85/436136/campos_512_v4
+85/436190/campos_512_v4
+85/436192/campos_512_v4
+85/436298/campos_512_v4
+85/436572/campos_512_v4
+85/436658/campos_512_v4
+85/436815/campos_512_v4
+85/436849/campos_512_v4
+85/436969/campos_512_v4
+85/436973/campos_512_v4
+85/437002/campos_512_v4
+85/437092/campos_512_v4
+85/437132/campos_512_v4
+85/437285/campos_512_v4
+85/437457/campos_512_v4
+85/437492/campos_512_v4
+85/437539/campos_512_v4
+85/437709/campos_512_v4
+85/437767/campos_512_v4
+85/437841/campos_512_v4
+85/437869/campos_512_v4
+85/438006/campos_512_v4
+85/438101/campos_512_v4
+85/438114/campos_512_v4
+85/438193/campos_512_v4
+85/438196/campos_512_v4
+85/438284/campos_512_v4
+85/438325/campos_512_v4
+85/438340/campos_512_v4
+85/438366/campos_512_v4
+85/438373/campos_512_v4
+85/438451/campos_512_v4
+85/438511/campos_512_v4
+85/438521/campos_512_v4
+85/438544/campos_512_v4
+85/438591/campos_512_v4
+85/438667/campos_512_v4
+85/438681/campos_512_v4
+85/438700/campos_512_v4
+85/438826/campos_512_v4
+85/438863/campos_512_v4
+85/438916/campos_512_v4
+85/439006/campos_512_v4
+85/439022/campos_512_v4
+85/439105/campos_512_v4
+85/439137/campos_512_v4
+85/439154/campos_512_v4
+85/439175/campos_512_v4
+85/439208/campos_512_v4
+85/439325/campos_512_v4
+85/439587/campos_512_v4
+85/439592/campos_512_v4
+85/439656/campos_512_v4
+85/439750/campos_512_v4
+85/439754/campos_512_v4
+85/439843/campos_512_v4
+85/439897/campos_512_v4
+86/440015/campos_512_v4
+86/440050/campos_512_v4
+86/440096/campos_512_v4
+86/440382/campos_512_v4
+86/440432/campos_512_v4
+86/440446/campos_512_v4
+86/440505/campos_512_v4
+86/440530/campos_512_v4
+86/440594/campos_512_v4
+86/440604/campos_512_v4
+86/440609/campos_512_v4
+86/440727/campos_512_v4
+86/440753/campos_512_v4
+86/440920/campos_512_v4
+86/440954/campos_512_v4
+86/441046/campos_512_v4
+86/441263/campos_512_v4
+86/441275/campos_512_v4
+86/441307/campos_512_v4
+86/441324/campos_512_v4
+86/441337/campos_512_v4
+86/441356/campos_512_v4
+86/441463/campos_512_v4
+86/441592/campos_512_v4
+86/441719/campos_512_v4
+86/441761/campos_512_v4
+86/441785/campos_512_v4
+86/441833/campos_512_v4
+86/441928/campos_512_v4
+86/442007/campos_512_v4
+86/442099/campos_512_v4
+86/442156/campos_512_v4
+86/442168/campos_512_v4
+86/442277/campos_512_v4
+86/442379/campos_512_v4
+86/442397/campos_512_v4
+86/442427/campos_512_v4
+86/442565/campos_512_v4
+86/442572/campos_512_v4
+86/442585/campos_512_v4
+86/442628/campos_512_v4
+86/442693/campos_512_v4
+86/442708/campos_512_v4
+86/442718/campos_512_v4
+86/442857/campos_512_v4
+86/442947/campos_512_v4
+86/443094/campos_512_v4
+86/443181/campos_512_v4
+86/443233/campos_512_v4
+86/443275/campos_512_v4
+86/443418/campos_512_v4
+86/443502/campos_512_v4
+86/443607/campos_512_v4
+86/443640/campos_512_v4
+86/443662/campos_512_v4
+86/443840/campos_512_v4
+86/443932/campos_512_v4
+86/443963/campos_512_v4
+86/444071/campos_512_v4
+86/444271/campos_512_v4
+86/444424/campos_512_v4
+86/444427/campos_512_v4
+86/444602/campos_512_v4
+86/444608/campos_512_v4
+86/444665/campos_512_v4
+86/444734/campos_512_v4
+86/444924/campos_512_v4
+87/445052/campos_512_v4
+87/445063/campos_512_v4
+87/445085/campos_512_v4
+87/445126/campos_512_v4
+87/445135/campos_512_v4
+87/445189/campos_512_v4
+87/445260/campos_512_v4
+87/445291/campos_512_v4
+87/445553/campos_512_v4
+87/445591/campos_512_v4
+87/445594/campos_512_v4
+87/445776/campos_512_v4
+87/445782/campos_512_v4
+87/445853/campos_512_v4
+87/445894/campos_512_v4
+87/446044/campos_512_v4
+87/446063/campos_512_v4
+87/446072/campos_512_v4
+87/446085/campos_512_v4
+87/446213/campos_512_v4
+87/446238/campos_512_v4
+87/446246/campos_512_v4
+87/446287/campos_512_v4
+87/446300/campos_512_v4
+87/446304/campos_512_v4
+87/446309/campos_512_v4
+87/446348/campos_512_v4
+87/446368/campos_512_v4
+87/446408/campos_512_v4
+87/446416/campos_512_v4
+87/446420/campos_512_v4
+87/446601/campos_512_v4
+87/446618/campos_512_v4
+87/446691/campos_512_v4
+87/446692/campos_512_v4
+87/446709/campos_512_v4
+87/446711/campos_512_v4
+87/446720/campos_512_v4
+87/446740/campos_512_v4
+87/446825/campos_512_v4
+87/446882/campos_512_v4
+87/446914/campos_512_v4
+87/447112/campos_512_v4
+87/447201/campos_512_v4
+87/447399/campos_512_v4
+87/447408/campos_512_v4
+87/447501/campos_512_v4
+87/447620/campos_512_v4
+87/447636/campos_512_v4
+87/447638/campos_512_v4
+87/447725/campos_512_v4
+87/447782/campos_512_v4
+87/447851/campos_512_v4
+87/447891/campos_512_v4
+87/447908/campos_512_v4
+87/447997/campos_512_v4
+87/448438/campos_512_v4
+87/448451/campos_512_v4
+87/448567/campos_512_v4
+87/448608/campos_512_v4
+87/448695/campos_512_v4
+87/448698/campos_512_v4
+87/448786/campos_512_v4
+87/448817/campos_512_v4
+87/448836/campos_512_v4
+87/448846/campos_512_v4
+87/448878/campos_512_v4
+87/448919/campos_512_v4
+87/449100/campos_512_v4
+87/449101/campos_512_v4
+87/449110/campos_512_v4
+87/449134/campos_512_v4
+87/449138/campos_512_v4
+87/449166/campos_512_v4
+87/449191/campos_512_v4
+87/449201/campos_512_v4
+87/449206/campos_512_v4
+87/449224/campos_512_v4
+87/449249/campos_512_v4
+87/449379/campos_512_v4
+87/449414/campos_512_v4
+87/449465/campos_512_v4
+87/449491/campos_512_v4
+87/449514/campos_512_v4
+87/449566/campos_512_v4
+87/449601/campos_512_v4
+87/449645/campos_512_v4
+87/449650/campos_512_v4
+88/450014/campos_512_v4
+88/450046/campos_512_v4
+88/450084/campos_512_v4
+88/450088/campos_512_v4
+88/450136/campos_512_v4
+88/450195/campos_512_v4
+88/450248/campos_512_v4
+88/450250/campos_512_v4
+88/450338/campos_512_v4
+88/450459/campos_512_v4
+88/450533/campos_512_v4
+88/450540/campos_512_v4
+88/450543/campos_512_v4
+88/450614/campos_512_v4
+88/450674/campos_512_v4
+88/450795/campos_512_v4
+88/450846/campos_512_v4
+88/450916/campos_512_v4
+88/450986/campos_512_v4
+88/451007/campos_512_v4
+88/451025/campos_512_v4
+88/451041/campos_512_v4
+88/451127/campos_512_v4
+88/451132/campos_512_v4
+88/451138/campos_512_v4
+88/451373/campos_512_v4
+88/451494/campos_512_v4
+88/451506/campos_512_v4
+88/451720/campos_512_v4
+88/451721/campos_512_v4
+88/451766/campos_512_v4
+88/451778/campos_512_v4
+88/451811/campos_512_v4
+88/451916/campos_512_v4
+88/451970/campos_512_v4
+88/451981/campos_512_v4
+88/452009/campos_512_v4
+88/452011/campos_512_v4
+88/452141/campos_512_v4
+88/452175/campos_512_v4
+88/452246/campos_512_v4
+88/452257/campos_512_v4
+88/452421/campos_512_v4
+88/452547/campos_512_v4
+88/452602/campos_512_v4
+88/452630/campos_512_v4
+88/452700/campos_512_v4
+88/452730/campos_512_v4
+88/453053/campos_512_v4
+88/453076/campos_512_v4
+88/453301/campos_512_v4
+88/453302/campos_512_v4
+88/453334/campos_512_v4
+88/453519/campos_512_v4
+88/453594/campos_512_v4
+88/453618/campos_512_v4
+88/453653/campos_512_v4
+88/453671/campos_512_v4
+88/453724/campos_512_v4
+88/453756/campos_512_v4
+88/453801/campos_512_v4
+88/453969/campos_512_v4
+88/454013/campos_512_v4
+88/454141/campos_512_v4
+88/454172/campos_512_v4
+88/454204/campos_512_v4
+88/454225/campos_512_v4
+88/454253/campos_512_v4
+88/454293/campos_512_v4
+88/454300/campos_512_v4
+88/454322/campos_512_v4
+88/454338/campos_512_v4
+88/454341/campos_512_v4
+88/454370/campos_512_v4
+88/454391/campos_512_v4
+88/454501/campos_512_v4
+88/454514/campos_512_v4
+88/454533/campos_512_v4
+88/454609/campos_512_v4
+88/454630/campos_512_v4
+88/454649/campos_512_v4
+88/454674/campos_512_v4
+88/454698/campos_512_v4
+88/454710/campos_512_v4
+88/454755/campos_512_v4
+88/454796/campos_512_v4
+88/454829/campos_512_v4
+88/454876/campos_512_v4
+88/454920/campos_512_v4
+89/455332/campos_512_v4
+89/455349/campos_512_v4
+89/455395/campos_512_v4
+89/455405/campos_512_v4
+89/455511/campos_512_v4
+89/455575/campos_512_v4
+89/455929/campos_512_v4
+89/455991/campos_512_v4
+89/456011/campos_512_v4
+89/456036/campos_512_v4
+89/456097/campos_512_v4
+89/456244/campos_512_v4
+89/456282/campos_512_v4
+89/456568/campos_512_v4
+89/456584/campos_512_v4
+89/456613/campos_512_v4
+89/456631/campos_512_v4
+89/456853/campos_512_v4
+89/456904/campos_512_v4
+89/456948/campos_512_v4
+89/456980/campos_512_v4
+89/457031/campos_512_v4
+89/457056/campos_512_v4
+89/457179/campos_512_v4
+89/457252/campos_512_v4
+89/457331/campos_512_v4
+89/457359/campos_512_v4
+89/457362/campos_512_v4
+89/457581/campos_512_v4
+89/457667/campos_512_v4
+89/457683/campos_512_v4
+89/457702/campos_512_v4
+89/457752/campos_512_v4
+89/457769/campos_512_v4
+89/457822/campos_512_v4
+89/457873/campos_512_v4
+89/457912/campos_512_v4
+89/457942/campos_512_v4
+89/458023/campos_512_v4
+89/458080/campos_512_v4
+89/458177/campos_512_v4
+89/458224/campos_512_v4
+89/458333/campos_512_v4
+89/458504/campos_512_v4
+89/458539/campos_512_v4
+89/458576/campos_512_v4
+89/458584/campos_512_v4
+89/458589/campos_512_v4
+89/458612/campos_512_v4
+89/458660/campos_512_v4
+89/458693/campos_512_v4
+89/458728/campos_512_v4
+89/458831/campos_512_v4
+89/458836/campos_512_v4
+89/458886/campos_512_v4
+89/458989/campos_512_v4
+89/459081/campos_512_v4
+89/459145/campos_512_v4
+89/459214/campos_512_v4
+89/459264/campos_512_v4
+89/459273/campos_512_v4
+89/459343/campos_512_v4
+89/459462/campos_512_v4
+89/459522/campos_512_v4
+89/459702/campos_512_v4
+89/459813/campos_512_v4
+89/459915/campos_512_v4
+89/459933/campos_512_v4
+89/460000/campos_512_v4
+9/55056/campos_512_v4
+9/55233/campos_512_v4
+9/55238/campos_512_v4
+9/55303/campos_512_v4
+9/55452/campos_512_v4
+9/55471/campos_512_v4
+9/55571/campos_512_v4
+9/55745/campos_512_v4
+9/55758/campos_512_v4
+9/55767/campos_512_v4
+9/55801/campos_512_v4
+9/56163/campos_512_v4
+9/56247/campos_512_v4
+9/56433/campos_512_v4
+9/56523/campos_512_v4
+9/56726/campos_512_v4
+9/56880/campos_512_v4
+9/56887/campos_512_v4
+9/57018/campos_512_v4
+9/57231/campos_512_v4
+9/57352/campos_512_v4
+9/57438/campos_512_v4
+9/57502/campos_512_v4
+9/57509/campos_512_v4
+9/57670/campos_512_v4
+9/57891/campos_512_v4
+9/57946/campos_512_v4
+9/58117/campos_512_v4
+9/58239/campos_512_v4
+9/58415/campos_512_v4
+9/58821/campos_512_v4
+9/58959/campos_512_v4
+9/59027/campos_512_v4
+9/59127/campos_512_v4
+9/59141/campos_512_v4
+9/59181/campos_512_v4
+9/59290/campos_512_v4
+9/59383/campos_512_v4
+9/59422/campos_512_v4
+9/59480/campos_512_v4
+9/59584/campos_512_v4
+9/59614/campos_512_v4
+9/59620/campos_512_v4
+90/460053/campos_512_v4
+90/460080/campos_512_v4
+90/460268/campos_512_v4
+90/460277/campos_512_v4
+90/460309/campos_512_v4
+90/460314/campos_512_v4
+90/460322/campos_512_v4
+90/460494/campos_512_v4
+90/460586/campos_512_v4
+90/460654/campos_512_v4
+90/460680/campos_512_v4
+90/460756/campos_512_v4
+90/460777/campos_512_v4
+90/460779/campos_512_v4
+90/460814/campos_512_v4
+90/460818/campos_512_v4
+90/460899/campos_512_v4
+90/460911/campos_512_v4
+90/461098/campos_512_v4
+90/461247/campos_512_v4
+90/461313/campos_512_v4
+90/461439/campos_512_v4
+90/461475/campos_512_v4
+90/461606/campos_512_v4
+90/461627/campos_512_v4
+90/461666/campos_512_v4
+90/461725/campos_512_v4
+90/461752/campos_512_v4
+90/461795/campos_512_v4
+90/461929/campos_512_v4
+90/461934/campos_512_v4
+90/461954/campos_512_v4
+90/462124/campos_512_v4
+90/462137/campos_512_v4
+90/462152/campos_512_v4
+90/462198/campos_512_v4
+90/462309/campos_512_v4
+90/462314/campos_512_v4
+90/462340/campos_512_v4
+90/462411/campos_512_v4
+90/462487/campos_512_v4
+90/462711/campos_512_v4
+90/462837/campos_512_v4
+90/462859/campos_512_v4
+90/463238/campos_512_v4
+90/463268/campos_512_v4
+90/463307/campos_512_v4
+90/463381/campos_512_v4
+90/463394/campos_512_v4
+90/463426/campos_512_v4
+90/463448/campos_512_v4
+90/463509/campos_512_v4
+90/463643/campos_512_v4
+90/463665/campos_512_v4
+90/463681/campos_512_v4
+90/463722/campos_512_v4
+90/463842/campos_512_v4
+90/463884/campos_512_v4
+90/464110/campos_512_v4
+90/464137/campos_512_v4
+90/464160/campos_512_v4
+90/464190/campos_512_v4
+90/464244/campos_512_v4
+90/464297/campos_512_v4
+90/464620/campos_512_v4
+90/464660/campos_512_v4
+90/464661/campos_512_v4
+90/464666/campos_512_v4
+90/464716/campos_512_v4
+90/464724/campos_512_v4
+90/464774/campos_512_v4
+90/464777/campos_512_v4
+90/464975/campos_512_v4
+90/464983/campos_512_v4
+91/465051/campos_512_v4
+91/465193/campos_512_v4
+91/465234/campos_512_v4
+91/465329/campos_512_v4
+91/465384/campos_512_v4
+91/465400/campos_512_v4
+91/465474/campos_512_v4
+91/465494/campos_512_v4
+91/465521/campos_512_v4
+91/465527/campos_512_v4
+91/465552/campos_512_v4
+91/465597/campos_512_v4
+91/465762/campos_512_v4
+91/465843/campos_512_v4
+91/465872/campos_512_v4
+91/465902/campos_512_v4
+91/465942/campos_512_v4
+91/466143/campos_512_v4
+91/466161/campos_512_v4
+91/466163/campos_512_v4
+91/466217/campos_512_v4
+91/466281/campos_512_v4
+91/466384/campos_512_v4
+91/466449/campos_512_v4
+91/466528/campos_512_v4
+91/466589/campos_512_v4
+91/466600/campos_512_v4
+91/466610/campos_512_v4
+91/466726/campos_512_v4
+91/466733/campos_512_v4
+91/466767/campos_512_v4
+91/466784/campos_512_v4
+91/466812/campos_512_v4
+91/466877/campos_512_v4
+91/466922/campos_512_v4
+91/467023/campos_512_v4
+91/467043/campos_512_v4
+91/467048/campos_512_v4
+91/467070/campos_512_v4
+91/467162/campos_512_v4
+91/467165/campos_512_v4
+91/467266/campos_512_v4
+91/467290/campos_512_v4
+91/467403/campos_512_v4
+91/467456/campos_512_v4
+91/467467/campos_512_v4
+91/467498/campos_512_v4
+91/467679/campos_512_v4
+91/467685/campos_512_v4
+91/467827/campos_512_v4
+91/467890/campos_512_v4
+91/467943/campos_512_v4
+91/468041/campos_512_v4
+91/468062/campos_512_v4
+91/468115/campos_512_v4
+91/468150/campos_512_v4
+91/468210/campos_512_v4
+91/468218/campos_512_v4
+91/468252/campos_512_v4
+91/468283/campos_512_v4
+91/468291/campos_512_v4
+91/468355/campos_512_v4
+91/468358/campos_512_v4
+91/468370/campos_512_v4
+91/468406/campos_512_v4
+91/468576/campos_512_v4
+91/468601/campos_512_v4
+91/468738/campos_512_v4
+91/468740/campos_512_v4
+91/468807/campos_512_v4
+91/468947/campos_512_v4
+91/468950/campos_512_v4
+91/468986/campos_512_v4
+91/469020/campos_512_v4
+91/469062/campos_512_v4
+91/469215/campos_512_v4
+91/469303/campos_512_v4
+91/469327/campos_512_v4
+91/469475/campos_512_v4
+91/469477/campos_512_v4
+91/469514/campos_512_v4
+91/469600/campos_512_v4
+91/469613/campos_512_v4
+91/469653/campos_512_v4
+91/469661/campos_512_v4
+91/469705/campos_512_v4
+91/469801/campos_512_v4
+91/469885/campos_512_v4
+92/470008/campos_512_v4
+92/470062/campos_512_v4
+92/470214/campos_512_v4
+92/470332/campos_512_v4
+92/470369/campos_512_v4
+92/470434/campos_512_v4
+92/470489/campos_512_v4
+92/470579/campos_512_v4
+92/470624/campos_512_v4
+92/470635/campos_512_v4
+92/470829/campos_512_v4
+92/470838/campos_512_v4
+92/470852/campos_512_v4
+92/470925/campos_512_v4
+92/470945/campos_512_v4
+92/470999/campos_512_v4
+92/471035/campos_512_v4
+92/471120/campos_512_v4
+92/471262/campos_512_v4
+92/471394/campos_512_v4
+92/471447/campos_512_v4
+92/471456/campos_512_v4
+92/471504/campos_512_v4
+92/471507/campos_512_v4
+92/471525/campos_512_v4
+92/471531/campos_512_v4
+92/471713/campos_512_v4
+92/471797/campos_512_v4
+92/471815/campos_512_v4
+92/471821/campos_512_v4
+92/471879/campos_512_v4
+92/471893/campos_512_v4
+92/471935/campos_512_v4
+92/471943/campos_512_v4
+92/471960/campos_512_v4
+92/472168/campos_512_v4
+92/472178/campos_512_v4
+92/472193/campos_512_v4
+92/472268/campos_512_v4
+92/472324/campos_512_v4
+92/472392/campos_512_v4
+92/472433/campos_512_v4
+92/472436/campos_512_v4
+92/472437/campos_512_v4
+92/472508/campos_512_v4
+92/472686/campos_512_v4
+92/472695/campos_512_v4
+92/472871/campos_512_v4
+92/472891/campos_512_v4
+92/473021/campos_512_v4
+92/473032/campos_512_v4
+92/473045/campos_512_v4
+92/473062/campos_512_v4
+92/473093/campos_512_v4
+92/473138/campos_512_v4
+92/473149/campos_512_v4
+92/473214/campos_512_v4
+92/473220/campos_512_v4
+92/473242/campos_512_v4
+92/473300/campos_512_v4
+92/473430/campos_512_v4
+92/473443/campos_512_v4
+92/473544/campos_512_v4
+92/473583/campos_512_v4
+92/473672/campos_512_v4
+92/473738/campos_512_v4
+92/473802/campos_512_v4
+92/473852/campos_512_v4
+92/473889/campos_512_v4
+92/473891/campos_512_v4
+92/473919/campos_512_v4
+92/473926/campos_512_v4
+92/473949/campos_512_v4
+92/473960/campos_512_v4
+92/474025/campos_512_v4
+92/474032/campos_512_v4
+92/474051/campos_512_v4
+92/474058/campos_512_v4
+92/474108/campos_512_v4
+92/474121/campos_512_v4
+92/474183/campos_512_v4
+92/474341/campos_512_v4
+92/474352/campos_512_v4
+92/474359/campos_512_v4
+92/474437/campos_512_v4
+92/474477/campos_512_v4
+92/474532/campos_512_v4
+92/474558/campos_512_v4
+92/474593/campos_512_v4
+92/474601/campos_512_v4
+92/474621/campos_512_v4
+92/474721/campos_512_v4
+92/474730/campos_512_v4
+92/474749/campos_512_v4
+92/474763/campos_512_v4
+92/474809/campos_512_v4
+92/474871/campos_512_v4
+92/474965/campos_512_v4
+92/474966/campos_512_v4
+92/474974/campos_512_v4
+93/475077/campos_512_v4
+93/475078/campos_512_v4
+93/475144/campos_512_v4
+93/475232/campos_512_v4
+93/475240/campos_512_v4
+93/475294/campos_512_v4
+93/475295/campos_512_v4
+93/475466/campos_512_v4
+93/475513/campos_512_v4
+93/475527/campos_512_v4
+93/475545/campos_512_v4
+93/475709/campos_512_v4
+93/475754/campos_512_v4
+93/475840/campos_512_v4
+93/475893/campos_512_v4
+93/475982/campos_512_v4
+93/476084/campos_512_v4
+93/476104/campos_512_v4
+93/476127/campos_512_v4
+93/476152/campos_512_v4
+93/476233/campos_512_v4
+93/476413/campos_512_v4
+93/476415/campos_512_v4
+93/476441/campos_512_v4
+93/476683/campos_512_v4
+93/476768/campos_512_v4
+93/476847/campos_512_v4
+93/476875/campos_512_v4
+93/476934/campos_512_v4
+93/477009/campos_512_v4
+93/477053/campos_512_v4
+93/477101/campos_512_v4
+93/477104/campos_512_v4
+93/477148/campos_512_v4
+93/477268/campos_512_v4
+93/477308/campos_512_v4
+93/477341/campos_512_v4
+93/477354/campos_512_v4
+93/477424/campos_512_v4
+93/477429/campos_512_v4
+93/477530/campos_512_v4
+93/477641/campos_512_v4
+93/477824/campos_512_v4
+93/478016/campos_512_v4
+93/478129/campos_512_v4
+93/478191/campos_512_v4
+93/478250/campos_512_v4
+93/478315/campos_512_v4
+93/478329/campos_512_v4
+93/478331/campos_512_v4
+93/478411/campos_512_v4
+93/478426/campos_512_v4
+93/478492/campos_512_v4
+93/478495/campos_512_v4
+93/478496/campos_512_v4
+93/478619/campos_512_v4
+93/478622/campos_512_v4
+93/478683/campos_512_v4
+93/478706/campos_512_v4
+93/478763/campos_512_v4
+93/478839/campos_512_v4
+93/478852/campos_512_v4
+93/478882/campos_512_v4
+93/478932/campos_512_v4
+93/478978/campos_512_v4
+93/479071/campos_512_v4
+93/479099/campos_512_v4
+93/479133/campos_512_v4
+93/479181/campos_512_v4
+93/479185/campos_512_v4
+93/479187/campos_512_v4
+93/479277/campos_512_v4
+93/479302/campos_512_v4
+93/479353/campos_512_v4
+93/479435/campos_512_v4
+93/479521/campos_512_v4
+93/479523/campos_512_v4
+93/479597/campos_512_v4
+93/479633/campos_512_v4
+93/479644/campos_512_v4
+93/479732/campos_512_v4
+93/479734/campos_512_v4
+93/479754/campos_512_v4
+93/479785/campos_512_v4
+93/479794/campos_512_v4
+93/479813/campos_512_v4
+93/479903/campos_512_v4
+93/479924/campos_512_v4
+94/480026/campos_512_v4
+94/480027/campos_512_v4
+94/480049/campos_512_v4
+94/480065/campos_512_v4
+94/480082/campos_512_v4
+94/480101/campos_512_v4
+94/480154/campos_512_v4
+94/480169/campos_512_v4
+94/480218/campos_512_v4
+94/480414/campos_512_v4
+94/480421/campos_512_v4
+94/480531/campos_512_v4
+94/480537/campos_512_v4
+94/480625/campos_512_v4
+94/480634/campos_512_v4
+94/480666/campos_512_v4
+94/480703/campos_512_v4
+94/480783/campos_512_v4
+94/480798/campos_512_v4
+94/480867/campos_512_v4
+94/480882/campos_512_v4
+94/480910/campos_512_v4
+94/480913/campos_512_v4
+94/480944/campos_512_v4
+94/481032/campos_512_v4
+94/481054/campos_512_v4
+94/481084/campos_512_v4
+94/481117/campos_512_v4
+94/481129/campos_512_v4
+94/481140/campos_512_v4
+94/481226/campos_512_v4
+94/481231/campos_512_v4
+94/481285/campos_512_v4
+94/481383/campos_512_v4
+94/481389/campos_512_v4
+94/481414/campos_512_v4
+94/481611/campos_512_v4
+94/481693/campos_512_v4
+94/481865/campos_512_v4
+94/481916/campos_512_v4
+94/482116/campos_512_v4
+94/482138/campos_512_v4
+94/482176/campos_512_v4
+94/482353/campos_512_v4
+94/482400/campos_512_v4
+94/482597/campos_512_v4
+94/482611/campos_512_v4
+94/482617/campos_512_v4
+94/482633/campos_512_v4
+94/482782/campos_512_v4
+94/482818/campos_512_v4
+94/482846/campos_512_v4
+94/482863/campos_512_v4
+94/482866/campos_512_v4
+94/482939/campos_512_v4
+94/483041/campos_512_v4
+94/483117/campos_512_v4
+94/483170/campos_512_v4
+94/483197/campos_512_v4
+94/483287/campos_512_v4
+94/483317/campos_512_v4
+94/483396/campos_512_v4
+94/483403/campos_512_v4
+94/483443/campos_512_v4
+94/483573/campos_512_v4
+94/483598/campos_512_v4
+94/483613/campos_512_v4
+94/483685/campos_512_v4
+94/483687/campos_512_v4
+94/483689/campos_512_v4
+94/483709/campos_512_v4
+94/483783/campos_512_v4
+94/483915/campos_512_v4
+94/484019/campos_512_v4
+94/484111/campos_512_v4
+94/484159/campos_512_v4
+94/484173/campos_512_v4
+94/484367/campos_512_v4
+94/484409/campos_512_v4
+94/484483/campos_512_v4
+94/484555/campos_512_v4
+94/484655/campos_512_v4
+94/484721/campos_512_v4
+94/484798/campos_512_v4
+94/484804/campos_512_v4
+94/484810/campos_512_v4
+94/484853/campos_512_v4
+94/484875/campos_512_v4
+95/485042/campos_512_v4
+95/485129/campos_512_v4
+95/485192/campos_512_v4
+95/485205/campos_512_v4
+95/485263/campos_512_v4
+95/485298/campos_512_v4
+95/485361/campos_512_v4
+95/485452/campos_512_v4
+95/485456/campos_512_v4
+95/485577/campos_512_v4
+95/485656/campos_512_v4
+95/485727/campos_512_v4
+95/485834/campos_512_v4
+95/485867/campos_512_v4
+95/485887/campos_512_v4
+95/485994/campos_512_v4
+95/486116/campos_512_v4
+95/486139/campos_512_v4
+95/486257/campos_512_v4
+95/486343/campos_512_v4
+95/486419/campos_512_v4
+95/486453/campos_512_v4
+95/486508/campos_512_v4
+95/486738/campos_512_v4
+95/486987/campos_512_v4
+95/487041/campos_512_v4
+95/487047/campos_512_v4
+95/487050/campos_512_v4
+95/487217/campos_512_v4
+95/487292/campos_512_v4
+95/487298/campos_512_v4
+95/487371/campos_512_v4
+95/487525/campos_512_v4
+95/487628/campos_512_v4
+95/487727/campos_512_v4
+95/487758/campos_512_v4
+95/487777/campos_512_v4
+95/487836/campos_512_v4
+95/487871/campos_512_v4
+95/487912/campos_512_v4
+95/487920/campos_512_v4
+95/487927/campos_512_v4
+95/487944/campos_512_v4
+95/487975/campos_512_v4
+95/488083/campos_512_v4
+95/488098/campos_512_v4
+95/488160/campos_512_v4
+95/488176/campos_512_v4
+95/488177/campos_512_v4
+95/488277/campos_512_v4
+95/488290/campos_512_v4
+95/488372/campos_512_v4
+95/488481/campos_512_v4
+95/488533/campos_512_v4
+95/488597/campos_512_v4
+95/488620/campos_512_v4
+95/488720/campos_512_v4
+95/488743/campos_512_v4
+95/488786/campos_512_v4
+95/488787/campos_512_v4
+95/488794/campos_512_v4
+95/488892/campos_512_v4
+95/488949/campos_512_v4
+95/489096/campos_512_v4
+95/489112/campos_512_v4
+95/489142/campos_512_v4
+95/489262/campos_512_v4
+95/489380/campos_512_v4
+95/489495/campos_512_v4
+95/489582/campos_512_v4
+95/489628/campos_512_v4
+95/489635/campos_512_v4
+95/489644/campos_512_v4
+95/489685/campos_512_v4
+95/489723/campos_512_v4
+95/489789/campos_512_v4
+95/489842/campos_512_v4
+95/489885/campos_512_v4
+95/489909/campos_512_v4
+95/489917/campos_512_v4
+95/489943/campos_512_v4
+95/489944/campos_512_v4
+96/490016/campos_512_v4
+96/490027/campos_512_v4
+96/490041/campos_512_v4
+96/490054/campos_512_v4
+96/490067/campos_512_v4
+96/490115/campos_512_v4
+96/490265/campos_512_v4
+96/490436/campos_512_v4
+96/490536/campos_512_v4
+96/490682/campos_512_v4
+96/490689/campos_512_v4
+96/490747/campos_512_v4
+96/490748/campos_512_v4
+96/490867/campos_512_v4
+96/490878/campos_512_v4
+96/490882/campos_512_v4
+96/490932/campos_512_v4
+96/491007/campos_512_v4
+96/491061/campos_512_v4
+96/491079/campos_512_v4
+96/491149/campos_512_v4
+96/491180/campos_512_v4
+96/491257/campos_512_v4
+96/491316/campos_512_v4
+96/491524/campos_512_v4
+96/491598/campos_512_v4
+96/491616/campos_512_v4
+96/491639/campos_512_v4
+96/491643/campos_512_v4
+96/491670/campos_512_v4
+96/491710/campos_512_v4
+96/491757/campos_512_v4
+96/491760/campos_512_v4
+96/491804/campos_512_v4
+96/491903/campos_512_v4
+96/491987/campos_512_v4
+96/492031/campos_512_v4
+96/492035/campos_512_v4
+96/492051/campos_512_v4
+96/492245/campos_512_v4
+96/492266/campos_512_v4
+96/492309/campos_512_v4
+96/492341/campos_512_v4
+96/492437/campos_512_v4
+96/492537/campos_512_v4
+96/492635/campos_512_v4
+96/492650/campos_512_v4
+96/493129/campos_512_v4
+96/493219/campos_512_v4
+96/493245/campos_512_v4
+96/493316/campos_512_v4
+96/493342/campos_512_v4
+96/493412/campos_512_v4
+96/493460/campos_512_v4
+96/493536/campos_512_v4
+96/493546/campos_512_v4
+96/493562/campos_512_v4
+96/493717/campos_512_v4
+96/493779/campos_512_v4
+96/493795/campos_512_v4
+96/493834/campos_512_v4
+96/493882/campos_512_v4
+96/493913/campos_512_v4
+96/493945/campos_512_v4
+96/493985/campos_512_v4
+96/493989/campos_512_v4
+96/494119/campos_512_v4
+96/494148/campos_512_v4
+96/494189/campos_512_v4
+96/494199/campos_512_v4
+96/494255/campos_512_v4
+96/494332/campos_512_v4
+96/494397/campos_512_v4
+96/494579/campos_512_v4
+96/494782/campos_512_v4
+96/494799/campos_512_v4
+96/494843/campos_512_v4
+96/494863/campos_512_v4
+96/494877/campos_512_v4
+96/494887/campos_512_v4
+96/494977/campos_512_v4
+97/495059/campos_512_v4
+97/495295/campos_512_v4
+97/495348/campos_512_v4
+97/495386/campos_512_v4
+97/495432/campos_512_v4
+97/495656/campos_512_v4
+97/495706/campos_512_v4
+97/495854/campos_512_v4
+97/496057/campos_512_v4
+97/496087/campos_512_v4
+97/496209/campos_512_v4
+97/496284/campos_512_v4
+97/496311/campos_512_v4
+97/496367/campos_512_v4
+97/496417/campos_512_v4
+97/496433/campos_512_v4
+97/496520/campos_512_v4
+97/496547/campos_512_v4
+97/496565/campos_512_v4
+97/496632/campos_512_v4
+97/496678/campos_512_v4
+97/496717/campos_512_v4
+97/496825/campos_512_v4
+97/497086/campos_512_v4
+97/497109/campos_512_v4
+97/497191/campos_512_v4
+97/497220/campos_512_v4
+97/497248/campos_512_v4
+97/497317/campos_512_v4
+97/497379/campos_512_v4
+97/497632/campos_512_v4
+97/497634/campos_512_v4
+97/497707/campos_512_v4
+97/497767/campos_512_v4
+97/497819/campos_512_v4
+97/497914/campos_512_v4
+97/498022/campos_512_v4
+97/498247/campos_512_v4
+97/498249/campos_512_v4
+97/498550/campos_512_v4
+97/498623/campos_512_v4
+97/498650/campos_512_v4
+97/498661/campos_512_v4
+97/498719/campos_512_v4
+97/498726/campos_512_v4
+97/498798/campos_512_v4
+97/498848/campos_512_v4
+97/498859/campos_512_v4
+97/498909/campos_512_v4
+97/499130/campos_512_v4
+97/499221/campos_512_v4
+97/499222/campos_512_v4
+97/499478/campos_512_v4
+97/499503/campos_512_v4
+97/499591/campos_512_v4
+97/499640/campos_512_v4
+97/499942/campos_512_v4
+98/500013/campos_512_v4
+98/500098/campos_512_v4
+98/500123/campos_512_v4
+98/500155/campos_512_v4
+98/500224/campos_512_v4
+98/500347/campos_512_v4
+98/500414/campos_512_v4
+98/500468/campos_512_v4
+98/500519/campos_512_v4
+98/500573/campos_512_v4
+98/500609/campos_512_v4
+98/500617/campos_512_v4
+98/500713/campos_512_v4
+98/500780/campos_512_v4
+98/500837/campos_512_v4
+98/500863/campos_512_v4
+98/500908/campos_512_v4
+98/500917/campos_512_v4
+98/500935/campos_512_v4
+98/501061/campos_512_v4
+98/501072/campos_512_v4
+98/501114/campos_512_v4
+98/501171/campos_512_v4
+98/501241/campos_512_v4
+98/501300/campos_512_v4
+98/501333/campos_512_v4
+98/501361/campos_512_v4
+98/501417/campos_512_v4
+98/501459/campos_512_v4
+98/501464/campos_512_v4
+98/501568/campos_512_v4
+98/501582/campos_512_v4
+98/501757/campos_512_v4
+98/501896/campos_512_v4
+98/501959/campos_512_v4
+98/502051/campos_512_v4
+98/502132/campos_512_v4
+98/502230/campos_512_v4
+98/502380/campos_512_v4
+98/502462/campos_512_v4
+98/502670/campos_512_v4
+98/502854/campos_512_v4
+98/502965/campos_512_v4
+98/503016/campos_512_v4
+98/503161/campos_512_v4
+98/503225/campos_512_v4
+98/503255/campos_512_v4
+98/503328/campos_512_v4
+98/503380/campos_512_v4
+98/503404/campos_512_v4
+98/503483/campos_512_v4
+98/503486/campos_512_v4
+98/503644/campos_512_v4
+98/503926/campos_512_v4
+98/503983/campos_512_v4
+98/504088/campos_512_v4
+98/504165/campos_512_v4
+98/504190/campos_512_v4
+98/504191/campos_512_v4
+98/504216/campos_512_v4
+98/504296/campos_512_v4
+98/504323/campos_512_v4
+98/504386/campos_512_v4
+98/504403/campos_512_v4
+98/504462/campos_512_v4
+98/504620/campos_512_v4
+98/504749/campos_512_v4
+98/504847/campos_512_v4
+98/504930/campos_512_v4
+98/505000/campos_512_v4
+99/505058/campos_512_v4
+99/505070/campos_512_v4
+99/505092/campos_512_v4
+99/505093/campos_512_v4
+99/505108/campos_512_v4
+99/505344/campos_512_v4
+99/505367/campos_512_v4
+99/505463/campos_512_v4
+99/505500/campos_512_v4
+99/505528/campos_512_v4
+99/505595/campos_512_v4
+99/505709/campos_512_v4
+99/505726/campos_512_v4
+99/505756/campos_512_v4
+99/505954/campos_512_v4
+99/506040/campos_512_v4
+99/506078/campos_512_v4
+99/506258/campos_512_v4
+99/506372/campos_512_v4
+99/506535/campos_512_v4
+99/506560/campos_512_v4
+99/506615/campos_512_v4
+99/506643/campos_512_v4
+99/506669/campos_512_v4
+99/506903/campos_512_v4
+99/507026/campos_512_v4
+99/507199/campos_512_v4
+99/507201/campos_512_v4
+99/507481/campos_512_v4
+99/507566/campos_512_v4
+99/507576/campos_512_v4
+99/507672/campos_512_v4
+99/507861/campos_512_v4
+99/507885/campos_512_v4
+99/507919/campos_512_v4
+99/507925/campos_512_v4
+99/508038/campos_512_v4
+99/508066/campos_512_v4
+99/508197/campos_512_v4
+99/508227/campos_512_v4
+99/508251/campos_512_v4
+99/508299/campos_512_v4
+99/508364/campos_512_v4
+99/508513/campos_512_v4
+99/508734/campos_512_v4
+99/508772/campos_512_v4
+99/508802/campos_512_v4
+99/508835/campos_512_v4
+99/508859/campos_512_v4
+99/508868/campos_512_v4
+99/508985/campos_512_v4
+99/509039/campos_512_v4
+99/509097/campos_512_v4
+99/509194/campos_512_v4
+99/509310/campos_512_v4
+99/509345/campos_512_v4
+99/509371/campos_512_v4
+99/509608/campos_512_v4
+99/509630/campos_512_v4
+99/509798/campos_512_v4
+99/509835/campos_512_v4
+99/509898/campos_512_v4
diff --git a/shell_scripts/raw_img_list/Plants.txt b/shell_scripts/raw_img_list/Plants.txt
new file mode 100644
index 0000000000000000000000000000000000000000..027c84e9ffc235eb01fc4cc2ad9e6bc860ebaf4e
--- /dev/null
+++ b/shell_scripts/raw_img_list/Plants.txt
@@ -0,0 +1,3316 @@
+0/10005/campos_512_v4
+0/10206/campos_512_v4
+0/10546/campos_512_v4
+0/10556/campos_512_v4
+0/10618/campos_512_v4
+0/10862/campos_512_v4
+0/10898/campos_512_v4
+0/11697/campos_512_v4
+0/11908/campos_512_v4
+0/11951/campos_512_v4
+0/12191/campos_512_v4
+0/12726/campos_512_v4
+0/12835/campos_512_v4
+0/13120/campos_512_v4
+0/13958/campos_512_v4
+0/13994/campos_512_v4
+0/14152/campos_512_v4
+0/14347/campos_512_v4
+0/14748/campos_512_v4
+0/14891/campos_512_v4
+1/15972/campos_512_v4
+1/16857/campos_512_v4
+1/17526/campos_512_v4
+1/17551/campos_512_v4
+1/17563/campos_512_v4
+1/17660/campos_512_v4
+1/17672/campos_512_v4
+1/18252/campos_512_v4
+1/18319/campos_512_v4
+1/18322/campos_512_v4
+1/18534/campos_512_v4
+1/19279/campos_512_v4
+1/19427/campos_512_v4
+10/60843/campos_512_v4
+10/60893/campos_512_v4
+10/60962/campos_512_v4
+10/60997/campos_512_v4
+10/61042/campos_512_v4
+10/61200/campos_512_v4
+10/61205/campos_512_v4
+10/61396/campos_512_v4
+10/61652/campos_512_v4
+10/61878/campos_512_v4
+10/61924/campos_512_v4
+10/63324/campos_512_v4
+10/63352/campos_512_v4
+10/64206/campos_512_v4
+10/64393/campos_512_v4
+10/64403/campos_512_v4
+10/64608/campos_512_v4
+10/64907/campos_512_v4
+100/510935/campos_512_v4
+100/510940/campos_512_v4
+100/511003/campos_512_v4
+100/511415/campos_512_v4
+100/511420/campos_512_v4
+100/511473/campos_512_v4
+100/511612/campos_512_v4
+100/512341/campos_512_v4
+100/512680/campos_512_v4
+100/513053/campos_512_v4
+100/513068/campos_512_v4
+100/513107/campos_512_v4
+100/513129/campos_512_v4
+100/513317/campos_512_v4
+100/513364/campos_512_v4
+100/513418/campos_512_v4
+100/513492/campos_512_v4
+100/513619/campos_512_v4
+100/513707/campos_512_v4
+100/513759/campos_512_v4
+100/514053/campos_512_v4
+100/514189/campos_512_v4
+100/514298/campos_512_v4
+100/514597/campos_512_v4
+100/514663/campos_512_v4
+100/514715/campos_512_v4
+100/514947/campos_512_v4
+101/515078/campos_512_v4
+101/515116/campos_512_v4
+101/515406/campos_512_v4
+101/516188/campos_512_v4
+101/516414/campos_512_v4
+101/516621/campos_512_v4
+101/516824/campos_512_v4
+101/516937/campos_512_v4
+101/516981/campos_512_v4
+101/517058/campos_512_v4
+101/517253/campos_512_v4
+101/517346/campos_512_v4
+101/518776/campos_512_v4
+101/518878/campos_512_v4
+101/519009/campos_512_v4
+101/519210/campos_512_v4
+101/519269/campos_512_v4
+101/519327/campos_512_v4
+101/519443/campos_512_v4
+101/519562/campos_512_v4
+101/519699/campos_512_v4
+101/519863/campos_512_v4
+102/520147/campos_512_v4
+102/520273/campos_512_v4
+102/520573/campos_512_v4
+102/520960/campos_512_v4
+102/521132/campos_512_v4
+102/521188/campos_512_v4
+102/521798/campos_512_v4
+102/522008/campos_512_v4
+102/522069/campos_512_v4
+102/522359/campos_512_v4
+102/522716/campos_512_v4
+102/522843/campos_512_v4
+102/522975/campos_512_v4
+102/523440/campos_512_v4
+102/523474/campos_512_v4
+102/523719/campos_512_v4
+102/524061/campos_512_v4
+102/524193/campos_512_v4
+102/524201/campos_512_v4
+102/524293/campos_512_v4
+102/524301/campos_512_v4
+102/524608/campos_512_v4
+102/524737/campos_512_v4
+103/525186/campos_512_v4
+103/525256/campos_512_v4
+103/525287/campos_512_v4
+103/525566/campos_512_v4
+103/525766/campos_512_v4
+103/526130/campos_512_v4
+103/526475/campos_512_v4
+103/526638/campos_512_v4
+103/526653/campos_512_v4
+103/526741/campos_512_v4
+103/526890/campos_512_v4
+103/526919/campos_512_v4
+103/526943/campos_512_v4
+103/527383/campos_512_v4
+103/527427/campos_512_v4
+103/527548/campos_512_v4
+103/527624/campos_512_v4
+103/527653/campos_512_v4
+103/527724/campos_512_v4
+103/528124/campos_512_v4
+103/528549/campos_512_v4
+103/528779/campos_512_v4
+103/529132/campos_512_v4
+103/529370/campos_512_v4
+103/529407/campos_512_v4
+103/529441/campos_512_v4
+103/529950/campos_512_v4
+103/529978/campos_512_v4
+104/530436/campos_512_v4
+104/530857/campos_512_v4
+104/530863/campos_512_v4
+104/530942/campos_512_v4
+104/531054/campos_512_v4
+104/531512/campos_512_v4
+104/531632/campos_512_v4
+104/531886/campos_512_v4
+104/531931/campos_512_v4
+104/532046/campos_512_v4
+104/532097/campos_512_v4
+104/532384/campos_512_v4
+104/532412/campos_512_v4
+104/532506/campos_512_v4
+104/532582/campos_512_v4
+104/533175/campos_512_v4
+104/534126/campos_512_v4
+104/534232/campos_512_v4
+104/534249/campos_512_v4
+104/534350/campos_512_v4
+104/534829/campos_512_v4
+105/535089/campos_512_v4
+105/535164/campos_512_v4
+105/535306/campos_512_v4
+105/535502/campos_512_v4
+105/535635/campos_512_v4
+105/536066/campos_512_v4
+105/536368/campos_512_v4
+105/536849/campos_512_v4
+105/537208/campos_512_v4
+105/537405/campos_512_v4
+105/537451/campos_512_v4
+105/537537/campos_512_v4
+105/537793/campos_512_v4
+105/537879/campos_512_v4
+105/538524/campos_512_v4
+105/538574/campos_512_v4
+105/538974/campos_512_v4
+105/539117/campos_512_v4
+105/539271/campos_512_v4
+105/539346/campos_512_v4
+105/539715/campos_512_v4
+105/539782/campos_512_v4
+105/539893/campos_512_v4
+105/539902/campos_512_v4
+106/540183/campos_512_v4
+106/540394/campos_512_v4
+106/540400/campos_512_v4
+106/540617/campos_512_v4
+106/540730/campos_512_v4
+106/541228/campos_512_v4
+106/541259/campos_512_v4
+106/541820/campos_512_v4
+106/541841/campos_512_v4
+106/541864/campos_512_v4
+106/542288/campos_512_v4
+106/542744/campos_512_v4
+106/543054/campos_512_v4
+106/543171/campos_512_v4
+106/543834/campos_512_v4
+106/543872/campos_512_v4
+106/543975/campos_512_v4
+106/544170/campos_512_v4
+106/544236/campos_512_v4
+106/544253/campos_512_v4
+106/544304/campos_512_v4
+106/544407/campos_512_v4
+106/544648/campos_512_v4
+107/545320/campos_512_v4
+107/545393/campos_512_v4
+107/545483/campos_512_v4
+107/545785/campos_512_v4
+107/545873/campos_512_v4
+107/546020/campos_512_v4
+107/546078/campos_512_v4
+107/546419/campos_512_v4
+107/546568/campos_512_v4
+107/546754/campos_512_v4
+107/546760/campos_512_v4
+107/547151/campos_512_v4
+107/547729/campos_512_v4
+107/548054/campos_512_v4
+107/548134/campos_512_v4
+107/548345/campos_512_v4
+107/548346/campos_512_v4
+107/548911/campos_512_v4
+107/549092/campos_512_v4
+107/549115/campos_512_v4
+107/549138/campos_512_v4
+107/549223/campos_512_v4
+107/549314/campos_512_v4
+107/549523/campos_512_v4
+108/550235/campos_512_v4
+108/550274/campos_512_v4
+108/550308/campos_512_v4
+108/550646/campos_512_v4
+108/550690/campos_512_v4
+108/550696/campos_512_v4
+108/551071/campos_512_v4
+108/551418/campos_512_v4
+108/551651/campos_512_v4
+108/552124/campos_512_v4
+108/552282/campos_512_v4
+108/552409/campos_512_v4
+108/552410/campos_512_v4
+108/552514/campos_512_v4
+108/552534/campos_512_v4
+108/552565/campos_512_v4
+108/552826/campos_512_v4
+108/553249/campos_512_v4
+108/553378/campos_512_v4
+108/553443/campos_512_v4
+108/553489/campos_512_v4
+108/553595/campos_512_v4
+108/553658/campos_512_v4
+108/554431/campos_512_v4
+108/554657/campos_512_v4
+108/554679/campos_512_v4
+109/555006/campos_512_v4
+109/555270/campos_512_v4
+109/555610/campos_512_v4
+109/555616/campos_512_v4
+109/555712/campos_512_v4
+109/555916/campos_512_v4
+109/556166/campos_512_v4
+109/556501/campos_512_v4
+109/556576/campos_512_v4
+109/556591/campos_512_v4
+109/556602/campos_512_v4
+109/556834/campos_512_v4
+109/556986/campos_512_v4
+109/557556/campos_512_v4
+109/557576/campos_512_v4
+109/557640/campos_512_v4
+109/557700/campos_512_v4
+109/557722/campos_512_v4
+109/558003/campos_512_v4
+109/558017/campos_512_v4
+109/558143/campos_512_v4
+109/558284/campos_512_v4
+109/558374/campos_512_v4
+109/559255/campos_512_v4
+109/559309/campos_512_v4
+109/559313/campos_512_v4
+11/65377/campos_512_v4
+11/65412/campos_512_v4
+11/65568/campos_512_v4
+11/65985/campos_512_v4
+11/66090/campos_512_v4
+11/66356/campos_512_v4
+11/67169/campos_512_v4
+11/67266/campos_512_v4
+11/67540/campos_512_v4
+11/68089/campos_512_v4
+11/68172/campos_512_v4
+11/68332/campos_512_v4
+11/69099/campos_512_v4
+11/69178/campos_512_v4
+11/69378/campos_512_v4
+11/69496/campos_512_v4
+11/69708/campos_512_v4
+11/69721/campos_512_v4
+11/69838/campos_512_v4
+11/69955/campos_512_v4
+110/560307/campos_512_v4
+110/560864/campos_512_v4
+110/560979/campos_512_v4
+110/561140/campos_512_v4
+110/561176/campos_512_v4
+110/561213/campos_512_v4
+110/561322/campos_512_v4
+110/561686/campos_512_v4
+110/561805/campos_512_v4
+110/561830/campos_512_v4
+110/562035/campos_512_v4
+110/562058/campos_512_v4
+110/562143/campos_512_v4
+110/562174/campos_512_v4
+110/562603/campos_512_v4
+110/562633/campos_512_v4
+110/562787/campos_512_v4
+110/563000/campos_512_v4
+110/563025/campos_512_v4
+110/563149/campos_512_v4
+110/563885/campos_512_v4
+110/564090/campos_512_v4
+110/564093/campos_512_v4
+110/564357/campos_512_v4
+110/564362/campos_512_v4
+110/564403/campos_512_v4
+110/564840/campos_512_v4
+110/564852/campos_512_v4
+110/564952/campos_512_v4
+111/565386/campos_512_v4
+111/565443/campos_512_v4
+111/565994/campos_512_v4
+111/566159/campos_512_v4
+111/566353/campos_512_v4
+111/566429/campos_512_v4
+111/566634/campos_512_v4
+111/566651/campos_512_v4
+111/567005/campos_512_v4
+111/567099/campos_512_v4
+111/567133/campos_512_v4
+111/567147/campos_512_v4
+111/567166/campos_512_v4
+111/567296/campos_512_v4
+111/567389/campos_512_v4
+111/567565/campos_512_v4
+111/567602/campos_512_v4
+111/568054/campos_512_v4
+111/568383/campos_512_v4
+111/568428/campos_512_v4
+111/568827/campos_512_v4
+111/568903/campos_512_v4
+111/569190/campos_512_v4
+111/569551/campos_512_v4
+111/569704/campos_512_v4
+111/569784/campos_512_v4
+112/570089/campos_512_v4
+112/570657/campos_512_v4
+112/570691/campos_512_v4
+112/570694/campos_512_v4
+112/570929/campos_512_v4
+112/571118/campos_512_v4
+112/571196/campos_512_v4
+112/571237/campos_512_v4
+112/571291/campos_512_v4
+112/571530/campos_512_v4
+112/571539/campos_512_v4
+112/571754/campos_512_v4
+112/572201/campos_512_v4
+112/573245/campos_512_v4
+112/573458/campos_512_v4
+112/573491/campos_512_v4
+112/573526/campos_512_v4
+112/573755/campos_512_v4
+112/573784/campos_512_v4
+112/573951/campos_512_v4
+112/573962/campos_512_v4
+112/574118/campos_512_v4
+112/574176/campos_512_v4
+112/574186/campos_512_v4
+112/574712/campos_512_v4
+112/574731/campos_512_v4
+112/574869/campos_512_v4
+112/575000/campos_512_v4
+113/575120/campos_512_v4
+113/575179/campos_512_v4
+113/575209/campos_512_v4
+113/575306/campos_512_v4
+113/575344/campos_512_v4
+113/575543/campos_512_v4
+113/575678/campos_512_v4
+113/576001/campos_512_v4
+113/576763/campos_512_v4
+113/577171/campos_512_v4
+113/577209/campos_512_v4
+113/577354/campos_512_v4
+113/577463/campos_512_v4
+113/577647/campos_512_v4
+113/577771/campos_512_v4
+113/577858/campos_512_v4
+113/578150/campos_512_v4
+113/578207/campos_512_v4
+113/578396/campos_512_v4
+113/578701/campos_512_v4
+113/578727/campos_512_v4
+113/578731/campos_512_v4
+113/578988/campos_512_v4
+113/579031/campos_512_v4
+113/579280/campos_512_v4
+113/579377/campos_512_v4
+113/579988/campos_512_v4
+114/580349/campos_512_v4
+114/580507/campos_512_v4
+114/580566/campos_512_v4
+114/580650/campos_512_v4
+114/580710/campos_512_v4
+114/581017/campos_512_v4
+114/581439/campos_512_v4
+114/582588/campos_512_v4
+114/582910/campos_512_v4
+114/583354/campos_512_v4
+114/583807/campos_512_v4
+114/583811/campos_512_v4
+114/584374/campos_512_v4
+114/584382/campos_512_v4
+114/584402/campos_512_v4
+115/585084/campos_512_v4
+115/585211/campos_512_v4
+115/585303/campos_512_v4
+115/585334/campos_512_v4
+115/585703/campos_512_v4
+115/586095/campos_512_v4
+115/586187/campos_512_v4
+115/586521/campos_512_v4
+115/586551/campos_512_v4
+115/586553/campos_512_v4
+115/586646/campos_512_v4
+115/586701/campos_512_v4
+115/586805/campos_512_v4
+115/586916/campos_512_v4
+115/587233/campos_512_v4
+115/587338/campos_512_v4
+115/587394/campos_512_v4
+115/587505/campos_512_v4
+115/587529/campos_512_v4
+115/587643/campos_512_v4
+115/589002/campos_512_v4
+115/589067/campos_512_v4
+115/589269/campos_512_v4
+115/589659/campos_512_v4
+115/589747/campos_512_v4
+116/590106/campos_512_v4
+116/590241/campos_512_v4
+116/590659/campos_512_v4
+116/590898/campos_512_v4
+116/590983/campos_512_v4
+116/591131/campos_512_v4
+116/591442/campos_512_v4
+116/591707/campos_512_v4
+116/591783/campos_512_v4
+116/591926/campos_512_v4
+116/592183/campos_512_v4
+116/592250/campos_512_v4
+116/592596/campos_512_v4
+116/592677/campos_512_v4
+116/592717/campos_512_v4
+116/592726/campos_512_v4
+116/592765/campos_512_v4
+116/592810/campos_512_v4
+116/592830/campos_512_v4
+116/592887/campos_512_v4
+116/593069/campos_512_v4
+116/593392/campos_512_v4
+116/593515/campos_512_v4
+116/594162/campos_512_v4
+116/594252/campos_512_v4
+116/594453/campos_512_v4
+117/595317/campos_512_v4
+117/595367/campos_512_v4
+117/595711/campos_512_v4
+117/595733/campos_512_v4
+117/595800/campos_512_v4
+117/595980/campos_512_v4
+117/596384/campos_512_v4
+117/596578/campos_512_v4
+117/596890/campos_512_v4
+117/596973/campos_512_v4
+117/598022/campos_512_v4
+117/598161/campos_512_v4
+117/598274/campos_512_v4
+117/598320/campos_512_v4
+117/598384/campos_512_v4
+117/598432/campos_512_v4
+117/598711/campos_512_v4
+117/598761/campos_512_v4
+117/599020/campos_512_v4
+117/599347/campos_512_v4
+117/599365/campos_512_v4
+117/599500/campos_512_v4
+117/599995/campos_512_v4
+118/600067/campos_512_v4
+118/600883/campos_512_v4
+118/601084/campos_512_v4
+118/601795/campos_512_v4
+118/602024/campos_512_v4
+118/602141/campos_512_v4
+118/602486/campos_512_v4
+118/602814/campos_512_v4
+118/602851/campos_512_v4
+118/602869/campos_512_v4
+118/602877/campos_512_v4
+118/602909/campos_512_v4
+118/603572/campos_512_v4
+118/604346/campos_512_v4
+118/604585/campos_512_v4
+118/604778/campos_512_v4
+118/604817/campos_512_v4
+118/604901/campos_512_v4
+119/605302/campos_512_v4
+119/605412/campos_512_v4
+119/605567/campos_512_v4
+119/606132/campos_512_v4
+119/606203/campos_512_v4
+119/606457/campos_512_v4
+119/606643/campos_512_v4
+119/606921/campos_512_v4
+119/607232/campos_512_v4
+119/607342/campos_512_v4
+119/607486/campos_512_v4
+119/607763/campos_512_v4
+119/608655/campos_512_v4
+119/608723/campos_512_v4
+119/608877/campos_512_v4
+119/608910/campos_512_v4
+119/609294/campos_512_v4
+119/609591/campos_512_v4
+119/609805/campos_512_v4
+119/609846/campos_512_v4
+12/70745/campos_512_v4
+12/70785/campos_512_v4
+12/70931/campos_512_v4
+12/71181/campos_512_v4
+12/71495/campos_512_v4
+12/71731/campos_512_v4
+12/72008/campos_512_v4
+12/72499/campos_512_v4
+12/72910/campos_512_v4
+12/72991/campos_512_v4
+12/73025/campos_512_v4
+12/73136/campos_512_v4
+12/73371/campos_512_v4
+12/73594/campos_512_v4
+12/73920/campos_512_v4
+12/74182/campos_512_v4
+12/74317/campos_512_v4
+12/74554/campos_512_v4
+12/74605/campos_512_v4
+12/74806/campos_512_v4
+12/74817/campos_512_v4
+120/610371/campos_512_v4
+120/610384/campos_512_v4
+120/610811/campos_512_v4
+120/610938/campos_512_v4
+120/611960/campos_512_v4
+120/611979/campos_512_v4
+120/611996/campos_512_v4
+120/612191/campos_512_v4
+120/612728/campos_512_v4
+120/612837/campos_512_v4
+120/613194/campos_512_v4
+120/613335/campos_512_v4
+120/613409/campos_512_v4
+120/613467/campos_512_v4
+120/613842/campos_512_v4
+120/614045/campos_512_v4
+120/614077/campos_512_v4
+120/614221/campos_512_v4
+120/614233/campos_512_v4
+120/614257/campos_512_v4
+120/614497/campos_512_v4
+120/614732/campos_512_v4
+120/614770/campos_512_v4
+120/614937/campos_512_v4
+120/614999/campos_512_v4
+121/615172/campos_512_v4
+121/615514/campos_512_v4
+121/615844/campos_512_v4
+121/616233/campos_512_v4
+121/616270/campos_512_v4
+121/616276/campos_512_v4
+121/617049/campos_512_v4
+121/617266/campos_512_v4
+121/617943/campos_512_v4
+121/618019/campos_512_v4
+121/618319/campos_512_v4
+121/618461/campos_512_v4
+121/618503/campos_512_v4
+121/618635/campos_512_v4
+121/618792/campos_512_v4
+121/618848/campos_512_v4
+121/619116/campos_512_v4
+121/619130/campos_512_v4
+121/619149/campos_512_v4
+121/619756/campos_512_v4
+122/620298/campos_512_v4
+122/620362/campos_512_v4
+122/620533/campos_512_v4
+122/620628/campos_512_v4
+122/620870/campos_512_v4
+122/621017/campos_512_v4
+122/621198/campos_512_v4
+122/621698/campos_512_v4
+122/622560/campos_512_v4
+122/622794/campos_512_v4
+122/623477/campos_512_v4
+122/624070/campos_512_v4
+122/624177/campos_512_v4
+122/624218/campos_512_v4
+122/624423/campos_512_v4
+122/624992/campos_512_v4
+123/625271/campos_512_v4
+123/625992/campos_512_v4
+123/626391/campos_512_v4
+123/626426/campos_512_v4
+123/626828/campos_512_v4
+123/627140/campos_512_v4
+123/627477/campos_512_v4
+123/627607/campos_512_v4
+123/628044/campos_512_v4
+123/628096/campos_512_v4
+123/628182/campos_512_v4
+123/628280/campos_512_v4
+123/628418/campos_512_v4
+123/628509/campos_512_v4
+123/629432/campos_512_v4
+123/629522/campos_512_v4
+123/629586/campos_512_v4
+123/629970/campos_512_v4
+124/630126/campos_512_v4
+124/630322/campos_512_v4
+124/630736/campos_512_v4
+124/630791/campos_512_v4
+124/630913/campos_512_v4
+124/630950/campos_512_v4
+124/631110/campos_512_v4
+124/631459/campos_512_v4
+124/631833/campos_512_v4
+124/632101/campos_512_v4
+124/632129/campos_512_v4
+124/632174/campos_512_v4
+124/632432/campos_512_v4
+124/632434/campos_512_v4
+124/632951/campos_512_v4
+124/633398/campos_512_v4
+124/633803/campos_512_v4
+124/633979/campos_512_v4
+124/634233/campos_512_v4
+124/634544/campos_512_v4
+124/634645/campos_512_v4
+124/634703/campos_512_v4
+124/634895/campos_512_v4
+125/635497/campos_512_v4
+125/635511/campos_512_v4
+125/635526/campos_512_v4
+125/635649/campos_512_v4
+125/635853/campos_512_v4
+125/636160/campos_512_v4
+125/636183/campos_512_v4
+125/636318/campos_512_v4
+125/636696/campos_512_v4
+125/636820/campos_512_v4
+125/636951/campos_512_v4
+125/637056/campos_512_v4
+125/637074/campos_512_v4
+125/637081/campos_512_v4
+125/637506/campos_512_v4
+125/637603/campos_512_v4
+125/637758/campos_512_v4
+125/638077/campos_512_v4
+125/638644/campos_512_v4
+125/639162/campos_512_v4
+125/639170/campos_512_v4
+125/639424/campos_512_v4
+125/639447/campos_512_v4
+125/639573/campos_512_v4
+125/639693/campos_512_v4
+125/639828/campos_512_v4
+127/645149/campos_512_v4
+127/645307/campos_512_v4
+127/645429/campos_512_v4
+127/645434/campos_512_v4
+127/645914/campos_512_v4
+127/646530/campos_512_v4
+127/646609/campos_512_v4
+127/646654/campos_512_v4
+127/646807/campos_512_v4
+127/646819/campos_512_v4
+127/646870/campos_512_v4
+127/647034/campos_512_v4
+127/647354/campos_512_v4
+127/647368/campos_512_v4
+127/647560/campos_512_v4
+127/647858/campos_512_v4
+127/647888/campos_512_v4
+127/648515/campos_512_v4
+127/648617/campos_512_v4
+127/648671/campos_512_v4
+127/648690/campos_512_v4
+127/648748/campos_512_v4
+127/648822/campos_512_v4
+127/649165/campos_512_v4
+127/649188/campos_512_v4
+127/649342/campos_512_v4
+127/649949/campos_512_v4
+128/650089/campos_512_v4
+128/650150/campos_512_v4
+128/650303/campos_512_v4
+128/650657/campos_512_v4
+128/650769/campos_512_v4
+128/650835/campos_512_v4
+128/651072/campos_512_v4
+128/651145/campos_512_v4
+128/651680/campos_512_v4
+128/652175/campos_512_v4
+128/652194/campos_512_v4
+128/652219/campos_512_v4
+128/652339/campos_512_v4
+128/652822/campos_512_v4
+128/653255/campos_512_v4
+128/653597/campos_512_v4
+128/653805/campos_512_v4
+128/654196/campos_512_v4
+128/654642/campos_512_v4
+128/654894/campos_512_v4
+128/654954/campos_512_v4
+129/655215/campos_512_v4
+129/656006/campos_512_v4
+129/656391/campos_512_v4
+129/657134/campos_512_v4
+129/657361/campos_512_v4
+129/657713/campos_512_v4
+129/658059/campos_512_v4
+129/658174/campos_512_v4
+129/658323/campos_512_v4
+129/658483/campos_512_v4
+129/658878/campos_512_v4
+129/659099/campos_512_v4
+129/659912/campos_512_v4
+13/75763/campos_512_v4
+13/76023/campos_512_v4
+13/76255/campos_512_v4
+13/76429/campos_512_v4
+13/76591/campos_512_v4
+13/76942/campos_512_v4
+13/76955/campos_512_v4
+13/77173/campos_512_v4
+13/77540/campos_512_v4
+13/77712/campos_512_v4
+13/77790/campos_512_v4
+13/77861/campos_512_v4
+13/78020/campos_512_v4
+13/78547/campos_512_v4
+13/78655/campos_512_v4
+13/78940/campos_512_v4
+13/79560/campos_512_v4
+13/79609/campos_512_v4
+13/79886/campos_512_v4
+130/660460/campos_512_v4
+130/660906/campos_512_v4
+130/662148/campos_512_v4
+130/662171/campos_512_v4
+130/662670/campos_512_v4
+130/663162/campos_512_v4
+130/663396/campos_512_v4
+130/664084/campos_512_v4
+130/664196/campos_512_v4
+131/665206/campos_512_v4
+131/665409/campos_512_v4
+131/665452/campos_512_v4
+131/665764/campos_512_v4
+131/665914/campos_512_v4
+131/665936/campos_512_v4
+131/666044/campos_512_v4
+131/666149/campos_512_v4
+131/666201/campos_512_v4
+131/666486/campos_512_v4
+131/666700/campos_512_v4
+131/667076/campos_512_v4
+131/667419/campos_512_v4
+131/667454/campos_512_v4
+131/667678/campos_512_v4
+131/667838/campos_512_v4
+131/667962/campos_512_v4
+131/668141/campos_512_v4
+131/668426/campos_512_v4
+131/668747/campos_512_v4
+131/668789/campos_512_v4
+131/668940/campos_512_v4
+131/669259/campos_512_v4
+131/669276/campos_512_v4
+131/669377/campos_512_v4
+131/669795/campos_512_v4
+131/669796/campos_512_v4
+131/669983/campos_512_v4
+132/670378/campos_512_v4
+132/670677/campos_512_v4
+132/670916/campos_512_v4
+132/670919/campos_512_v4
+132/670963/campos_512_v4
+132/671056/campos_512_v4
+132/671494/campos_512_v4
+132/671640/campos_512_v4
+132/672367/campos_512_v4
+132/672578/campos_512_v4
+132/672657/campos_512_v4
+132/672786/campos_512_v4
+132/672959/campos_512_v4
+132/673272/campos_512_v4
+132/673370/campos_512_v4
+132/673792/campos_512_v4
+132/674322/campos_512_v4
+132/674371/campos_512_v4
+132/674681/campos_512_v4
+132/674854/campos_512_v4
+132/674979/campos_512_v4
+133/675185/campos_512_v4
+133/675550/campos_512_v4
+133/675573/campos_512_v4
+133/675619/campos_512_v4
+133/675952/campos_512_v4
+133/675990/campos_512_v4
+133/676252/campos_512_v4
+133/676444/campos_512_v4
+133/676448/campos_512_v4
+133/676680/campos_512_v4
+133/676816/campos_512_v4
+133/676977/campos_512_v4
+133/677220/campos_512_v4
+133/677518/campos_512_v4
+133/677609/campos_512_v4
+133/677664/campos_512_v4
+133/677819/campos_512_v4
+133/677849/campos_512_v4
+133/678730/campos_512_v4
+133/679509/campos_512_v4
+133/679701/campos_512_v4
+133/679757/campos_512_v4
+133/679807/campos_512_v4
+133/679820/campos_512_v4
+134/680251/campos_512_v4
+134/680317/campos_512_v4
+134/680331/campos_512_v4
+134/680380/campos_512_v4
+134/680457/campos_512_v4
+134/680589/campos_512_v4
+134/680762/campos_512_v4
+134/681038/campos_512_v4
+134/681058/campos_512_v4
+134/681097/campos_512_v4
+134/681551/campos_512_v4
+134/681592/campos_512_v4
+134/681788/campos_512_v4
+134/681854/campos_512_v4
+134/681908/campos_512_v4
+134/682111/campos_512_v4
+134/682410/campos_512_v4
+134/682494/campos_512_v4
+134/682611/campos_512_v4
+134/682796/campos_512_v4
+134/682847/campos_512_v4
+134/682966/campos_512_v4
+134/683092/campos_512_v4
+134/683444/campos_512_v4
+134/683722/campos_512_v4
+134/684030/campos_512_v4
+134/684178/campos_512_v4
+134/684217/campos_512_v4
+134/684336/campos_512_v4
+134/684406/campos_512_v4
+134/684738/campos_512_v4
+135/685010/campos_512_v4
+135/685218/campos_512_v4
+135/685279/campos_512_v4
+135/685437/campos_512_v4
+135/685514/campos_512_v4
+135/686163/campos_512_v4
+135/686310/campos_512_v4
+135/686397/campos_512_v4
+135/686431/campos_512_v4
+135/686436/campos_512_v4
+135/686653/campos_512_v4
+135/686996/campos_512_v4
+135/687212/campos_512_v4
+135/687350/campos_512_v4
+135/687509/campos_512_v4
+135/687607/campos_512_v4
+135/687728/campos_512_v4
+135/688681/campos_512_v4
+135/688771/campos_512_v4
+135/689028/campos_512_v4
+135/689151/campos_512_v4
+135/689309/campos_512_v4
+135/689725/campos_512_v4
+136/690046/campos_512_v4
+136/690254/campos_512_v4
+136/690312/campos_512_v4
+136/690578/campos_512_v4
+136/690637/campos_512_v4
+136/690804/campos_512_v4
+136/691295/campos_512_v4
+136/691839/campos_512_v4
+136/692236/campos_512_v4
+136/692634/campos_512_v4
+136/692849/campos_512_v4
+136/692984/campos_512_v4
+136/693085/campos_512_v4
+136/693285/campos_512_v4
+136/693398/campos_512_v4
+136/693482/campos_512_v4
+136/693574/campos_512_v4
+136/693629/campos_512_v4
+136/693904/campos_512_v4
+136/694718/campos_512_v4
+136/694750/campos_512_v4
+136/694984/campos_512_v4
+137/695176/campos_512_v4
+137/695387/campos_512_v4
+137/695835/campos_512_v4
+137/695927/campos_512_v4
+137/696046/campos_512_v4
+137/696517/campos_512_v4
+137/696698/campos_512_v4
+137/696847/campos_512_v4
+137/697353/campos_512_v4
+137/697712/campos_512_v4
+137/698075/campos_512_v4
+137/698390/campos_512_v4
+137/698643/campos_512_v4
+137/698659/campos_512_v4
+137/699048/campos_512_v4
+137/699178/campos_512_v4
+137/699629/campos_512_v4
+138/700033/campos_512_v4
+138/700156/campos_512_v4
+138/700564/campos_512_v4
+138/700832/campos_512_v4
+138/701108/campos_512_v4
+138/701548/campos_512_v4
+138/701863/campos_512_v4
+138/702210/campos_512_v4
+138/702335/campos_512_v4
+138/702397/campos_512_v4
+138/702508/campos_512_v4
+138/702574/campos_512_v4
+138/702845/campos_512_v4
+138/702934/campos_512_v4
+138/703020/campos_512_v4
+138/703070/campos_512_v4
+138/703355/campos_512_v4
+138/703388/campos_512_v4
+138/704199/campos_512_v4
+138/704227/campos_512_v4
+138/704405/campos_512_v4
+138/704530/campos_512_v4
+138/704581/campos_512_v4
+138/704768/campos_512_v4
+139/705455/campos_512_v4
+139/705569/campos_512_v4
+139/705802/campos_512_v4
+139/706049/campos_512_v4
+139/706073/campos_512_v4
+139/706560/campos_512_v4
+139/706775/campos_512_v4
+139/707051/campos_512_v4
+139/707655/campos_512_v4
+139/707875/campos_512_v4
+139/707930/campos_512_v4
+139/707941/campos_512_v4
+139/707943/campos_512_v4
+139/708049/campos_512_v4
+139/708100/campos_512_v4
+139/708343/campos_512_v4
+139/708535/campos_512_v4
+139/708559/campos_512_v4
+139/709200/campos_512_v4
+139/709415/campos_512_v4
+139/709480/campos_512_v4
+14/80095/campos_512_v4
+14/80216/campos_512_v4
+14/80330/campos_512_v4
+14/80427/campos_512_v4
+14/80502/campos_512_v4
+14/80512/campos_512_v4
+14/80691/campos_512_v4
+14/81134/campos_512_v4
+14/81350/campos_512_v4
+14/81441/campos_512_v4
+14/81579/campos_512_v4
+14/81707/campos_512_v4
+14/82197/campos_512_v4
+14/82223/campos_512_v4
+14/82761/campos_512_v4
+14/82833/campos_512_v4
+14/82884/campos_512_v4
+14/83232/campos_512_v4
+14/83473/campos_512_v4
+14/83629/campos_512_v4
+14/83758/campos_512_v4
+14/83783/campos_512_v4
+14/84412/campos_512_v4
+14/84878/campos_512_v4
+140/710215/campos_512_v4
+140/710470/campos_512_v4
+140/710534/campos_512_v4
+140/710588/campos_512_v4
+140/710594/campos_512_v4
+140/710957/campos_512_v4
+140/711124/campos_512_v4
+140/711380/campos_512_v4
+140/711435/campos_512_v4
+140/711752/campos_512_v4
+140/711764/campos_512_v4
+140/711879/campos_512_v4
+140/712199/campos_512_v4
+140/712212/campos_512_v4
+140/712331/campos_512_v4
+140/712359/campos_512_v4
+140/712480/campos_512_v4
+140/712605/campos_512_v4
+140/712719/campos_512_v4
+140/713174/campos_512_v4
+140/713224/campos_512_v4
+140/713450/campos_512_v4
+140/713518/campos_512_v4
+140/713699/campos_512_v4
+140/713709/campos_512_v4
+140/714277/campos_512_v4
+140/714587/campos_512_v4
+140/714981/campos_512_v4
+140/714985/campos_512_v4
+141/715214/campos_512_v4
+141/715228/campos_512_v4
+141/715411/campos_512_v4
+141/715522/campos_512_v4
+141/715528/campos_512_v4
+141/715600/campos_512_v4
+141/716113/campos_512_v4
+141/716585/campos_512_v4
+141/717435/campos_512_v4
+141/717439/campos_512_v4
+141/719080/campos_512_v4
+141/719295/campos_512_v4
+141/719372/campos_512_v4
+141/719463/campos_512_v4
+141/719696/campos_512_v4
+141/719835/campos_512_v4
+142/720437/campos_512_v4
+142/721326/campos_512_v4
+142/721333/campos_512_v4
+142/721443/campos_512_v4
+142/722112/campos_512_v4
+142/722247/campos_512_v4
+142/722283/campos_512_v4
+142/722341/campos_512_v4
+142/722393/campos_512_v4
+142/722511/campos_512_v4
+142/722907/campos_512_v4
+142/722972/campos_512_v4
+142/723103/campos_512_v4
+142/723337/campos_512_v4
+142/723339/campos_512_v4
+142/723423/campos_512_v4
+142/723425/campos_512_v4
+142/723506/campos_512_v4
+142/723599/campos_512_v4
+142/723897/campos_512_v4
+142/723989/campos_512_v4
+142/724197/campos_512_v4
+142/724486/campos_512_v4
+142/724601/campos_512_v4
+142/724610/campos_512_v4
+142/724655/campos_512_v4
+142/724761/campos_512_v4
+143/725039/campos_512_v4
+143/725263/campos_512_v4
+143/725558/campos_512_v4
+143/725583/campos_512_v4
+143/725850/campos_512_v4
+143/726045/campos_512_v4
+143/726168/campos_512_v4
+143/726445/campos_512_v4
+143/726494/campos_512_v4
+143/726497/campos_512_v4
+143/726713/campos_512_v4
+143/726776/campos_512_v4
+143/726845/campos_512_v4
+143/726880/campos_512_v4
+143/726967/campos_512_v4
+143/726971/campos_512_v4
+143/727532/campos_512_v4
+143/727553/campos_512_v4
+143/727854/campos_512_v4
+143/728017/campos_512_v4
+143/728077/campos_512_v4
+143/728209/campos_512_v4
+143/728562/campos_512_v4
+143/728742/campos_512_v4
+143/729110/campos_512_v4
+143/729505/campos_512_v4
+143/729865/campos_512_v4
+144/730060/campos_512_v4
+144/730091/campos_512_v4
+144/730223/campos_512_v4
+144/730264/campos_512_v4
+144/730510/campos_512_v4
+144/730576/campos_512_v4
+144/730764/campos_512_v4
+144/731080/campos_512_v4
+144/731087/campos_512_v4
+144/731111/campos_512_v4
+144/731277/campos_512_v4
+144/731288/campos_512_v4
+144/731382/campos_512_v4
+144/731398/campos_512_v4
+144/731407/campos_512_v4
+144/731645/campos_512_v4
+144/731729/campos_512_v4
+144/732520/campos_512_v4
+144/732806/campos_512_v4
+144/733142/campos_512_v4
+144/733181/campos_512_v4
+144/733302/campos_512_v4
+144/733479/campos_512_v4
+144/733616/campos_512_v4
+144/733638/campos_512_v4
+144/733663/campos_512_v4
+144/733866/campos_512_v4
+144/733924/campos_512_v4
+144/734044/campos_512_v4
+144/734371/campos_512_v4
+144/734713/campos_512_v4
+144/734726/campos_512_v4
+144/734798/campos_512_v4
+144/734921/campos_512_v4
+145/735276/campos_512_v4
+145/735717/campos_512_v4
+145/736008/campos_512_v4
+145/736191/campos_512_v4
+145/736408/campos_512_v4
+145/736569/campos_512_v4
+145/736782/campos_512_v4
+145/737311/campos_512_v4
+145/737529/campos_512_v4
+145/737550/campos_512_v4
+145/737675/campos_512_v4
+145/738489/campos_512_v4
+145/738684/campos_512_v4
+145/738963/campos_512_v4
+146/740025/campos_512_v4
+146/740508/campos_512_v4
+146/740522/campos_512_v4
+146/741433/campos_512_v4
+146/741502/campos_512_v4
+146/741671/campos_512_v4
+146/741673/campos_512_v4
+146/741688/campos_512_v4
+146/741701/campos_512_v4
+146/742023/campos_512_v4
+146/742230/campos_512_v4
+146/742344/campos_512_v4
+146/742444/campos_512_v4
+146/742716/campos_512_v4
+146/742872/campos_512_v4
+146/742920/campos_512_v4
+146/743189/campos_512_v4
+146/743362/campos_512_v4
+146/743420/campos_512_v4
+146/744008/campos_512_v4
+146/744074/campos_512_v4
+146/744236/campos_512_v4
+146/744259/campos_512_v4
+146/744928/campos_512_v4
+147/745068/campos_512_v4
+147/745085/campos_512_v4
+147/745783/campos_512_v4
+147/745857/campos_512_v4
+147/745934/campos_512_v4
+147/746125/campos_512_v4
+147/746536/campos_512_v4
+147/746597/campos_512_v4
+147/746616/campos_512_v4
+147/747147/campos_512_v4
+147/747373/campos_512_v4
+147/747441/campos_512_v4
+147/747985/campos_512_v4
+147/748187/campos_512_v4
+147/748478/campos_512_v4
+147/749242/campos_512_v4
+147/749315/campos_512_v4
+148/750390/campos_512_v4
+148/750679/campos_512_v4
+148/751136/campos_512_v4
+148/751365/campos_512_v4
+148/751606/campos_512_v4
+148/751826/campos_512_v4
+148/752306/campos_512_v4
+148/752689/campos_512_v4
+148/752983/campos_512_v4
+148/753199/campos_512_v4
+148/753346/campos_512_v4
+148/753985/campos_512_v4
+148/754000/campos_512_v4
+148/754361/campos_512_v4
+148/754957/campos_512_v4
+149/755090/campos_512_v4
+149/755206/campos_512_v4
+149/755332/campos_512_v4
+149/755569/campos_512_v4
+149/755672/campos_512_v4
+149/755837/campos_512_v4
+149/756160/campos_512_v4
+149/756254/campos_512_v4
+149/756645/campos_512_v4
+149/756723/campos_512_v4
+149/756779/campos_512_v4
+149/756783/campos_512_v4
+149/756804/campos_512_v4
+149/756823/campos_512_v4
+149/757086/campos_512_v4
+149/757107/campos_512_v4
+149/757532/campos_512_v4
+149/757644/campos_512_v4
+149/758797/campos_512_v4
+149/758814/campos_512_v4
+149/758870/campos_512_v4
+149/758906/campos_512_v4
+149/759024/campos_512_v4
+149/759326/campos_512_v4
+149/759445/campos_512_v4
+149/759623/campos_512_v4
+149/759892/campos_512_v4
+15/85012/campos_512_v4
+15/85285/campos_512_v4
+15/85614/campos_512_v4
+15/85739/campos_512_v4
+15/86029/campos_512_v4
+15/86204/campos_512_v4
+15/86289/campos_512_v4
+15/86844/campos_512_v4
+15/87275/campos_512_v4
+15/87511/campos_512_v4
+15/87894/campos_512_v4
+15/88496/campos_512_v4
+15/88800/campos_512_v4
+15/88829/campos_512_v4
+15/89432/campos_512_v4
+15/89438/campos_512_v4
+15/89744/campos_512_v4
+150/760248/campos_512_v4
+150/760312/campos_512_v4
+150/760462/campos_512_v4
+150/760482/campos_512_v4
+150/760579/campos_512_v4
+150/760689/campos_512_v4
+150/761749/campos_512_v4
+150/761753/campos_512_v4
+150/762082/campos_512_v4
+150/762148/campos_512_v4
+150/762519/campos_512_v4
+150/762562/campos_512_v4
+150/763556/campos_512_v4
+150/763731/campos_512_v4
+150/763741/campos_512_v4
+151/765024/campos_512_v4
+151/765115/campos_512_v4
+151/765125/campos_512_v4
+151/765293/campos_512_v4
+151/765477/campos_512_v4
+151/766150/campos_512_v4
+151/766373/campos_512_v4
+151/766420/campos_512_v4
+151/766421/campos_512_v4
+151/766900/campos_512_v4
+151/766901/campos_512_v4
+151/767095/campos_512_v4
+151/767631/campos_512_v4
+151/767768/campos_512_v4
+151/767883/campos_512_v4
+151/767900/campos_512_v4
+151/768192/campos_512_v4
+151/768436/campos_512_v4
+151/768637/campos_512_v4
+151/769154/campos_512_v4
+151/769210/campos_512_v4
+151/769861/campos_512_v4
+151/769989/campos_512_v4
+152/770312/campos_512_v4
+152/770379/campos_512_v4
+152/770726/campos_512_v4
+152/770768/campos_512_v4
+152/771242/campos_512_v4
+152/771290/campos_512_v4
+152/771391/campos_512_v4
+152/771702/campos_512_v4
+152/772509/campos_512_v4
+152/773079/campos_512_v4
+152/773184/campos_512_v4
+152/773411/campos_512_v4
+152/773542/campos_512_v4
+152/773603/campos_512_v4
+152/773943/campos_512_v4
+152/773947/campos_512_v4
+152/774297/campos_512_v4
+152/774367/campos_512_v4
+152/774628/campos_512_v4
+152/774872/campos_512_v4
+152/774898/campos_512_v4
+153/775207/campos_512_v4
+153/775391/campos_512_v4
+153/775416/campos_512_v4
+153/775548/campos_512_v4
+153/775558/campos_512_v4
+153/775941/campos_512_v4
+153/776091/campos_512_v4
+153/776228/campos_512_v4
+153/776914/campos_512_v4
+153/777402/campos_512_v4
+153/777549/campos_512_v4
+153/777591/campos_512_v4
+153/777650/campos_512_v4
+153/777696/campos_512_v4
+153/778416/campos_512_v4
+153/778511/campos_512_v4
+153/778630/campos_512_v4
+153/778642/campos_512_v4
+153/778976/campos_512_v4
+153/779129/campos_512_v4
+153/779199/campos_512_v4
+153/779301/campos_512_v4
+153/779400/campos_512_v4
+153/779445/campos_512_v4
+153/779819/campos_512_v4
+154/780040/campos_512_v4
+154/780568/campos_512_v4
+154/780745/campos_512_v4
+154/780807/campos_512_v4
+154/781068/campos_512_v4
+154/781181/campos_512_v4
+154/781385/campos_512_v4
+154/781556/campos_512_v4
+154/781620/campos_512_v4
+154/781622/campos_512_v4
+154/781833/campos_512_v4
+154/781998/campos_512_v4
+154/782052/campos_512_v4
+154/782150/campos_512_v4
+154/782406/campos_512_v4
+154/783008/campos_512_v4
+154/783275/campos_512_v4
+154/783289/campos_512_v4
+154/783291/campos_512_v4
+154/783556/campos_512_v4
+154/783591/campos_512_v4
+154/783833/campos_512_v4
+154/784070/campos_512_v4
+154/784490/campos_512_v4
+154/784502/campos_512_v4
+155/785005/campos_512_v4
+155/785158/campos_512_v4
+155/785263/campos_512_v4
+155/785554/campos_512_v4
+155/786112/campos_512_v4
+155/786136/campos_512_v4
+155/786727/campos_512_v4
+155/787202/campos_512_v4
+155/787287/campos_512_v4
+155/787293/campos_512_v4
+155/787902/campos_512_v4
+155/789150/campos_512_v4
+155/789256/campos_512_v4
+155/789637/campos_512_v4
+156/790156/campos_512_v4
+156/790184/campos_512_v4
+156/790529/campos_512_v4
+156/790679/campos_512_v4
+156/791088/campos_512_v4
+156/791195/campos_512_v4
+156/791294/campos_512_v4
+156/791585/campos_512_v4
+156/792252/campos_512_v4
+156/792319/campos_512_v4
+156/792988/campos_512_v4
+156/793012/campos_512_v4
+156/793127/campos_512_v4
+156/793324/campos_512_v4
+156/793669/campos_512_v4
+156/793858/campos_512_v4
+156/793903/campos_512_v4
+156/793969/campos_512_v4
+156/794510/campos_512_v4
+157/795768/campos_512_v4
+157/795822/campos_512_v4
+157/796218/campos_512_v4
+157/796393/campos_512_v4
+157/796442/campos_512_v4
+157/796991/campos_512_v4
+157/797003/campos_512_v4
+157/797132/campos_512_v4
+157/797178/campos_512_v4
+157/797337/campos_512_v4
+157/797349/campos_512_v4
+157/797492/campos_512_v4
+157/797943/campos_512_v4
+157/798174/campos_512_v4
+157/798318/campos_512_v4
+157/798629/campos_512_v4
+157/799165/campos_512_v4
+157/799273/campos_512_v4
+157/799657/campos_512_v4
+157/799834/campos_512_v4
+158/800181/campos_512_v4
+158/800216/campos_512_v4
+158/800271/campos_512_v4
+158/800488/campos_512_v4
+158/800592/campos_512_v4
+158/800610/campos_512_v4
+158/800969/campos_512_v4
+158/801765/campos_512_v4
+158/802110/campos_512_v4
+158/802269/campos_512_v4
+158/802431/campos_512_v4
+158/802863/campos_512_v4
+158/802870/campos_512_v4
+158/803014/campos_512_v4
+158/803514/campos_512_v4
+158/803745/campos_512_v4
+158/803788/campos_512_v4
+158/803814/campos_512_v4
+158/803856/campos_512_v4
+158/804100/campos_512_v4
+158/804627/campos_512_v4
+158/804634/campos_512_v4
+158/804673/campos_512_v4
+158/804676/campos_512_v4
+158/804700/campos_512_v4
+158/804942/campos_512_v4
+158/804998/campos_512_v4
+159/805058/campos_512_v4
+159/805222/campos_512_v4
+159/805964/campos_512_v4
+159/806048/campos_512_v4
+159/806061/campos_512_v4
+159/806184/campos_512_v4
+159/806202/campos_512_v4
+159/806203/campos_512_v4
+159/806491/campos_512_v4
+159/806713/campos_512_v4
+159/806838/campos_512_v4
+159/807746/campos_512_v4
+159/807979/campos_512_v4
+159/808034/campos_512_v4
+159/808363/campos_512_v4
+159/808584/campos_512_v4
+16/90202/campos_512_v4
+16/90493/campos_512_v4
+16/90632/campos_512_v4
+16/91259/campos_512_v4
+16/91452/campos_512_v4
+16/91821/campos_512_v4
+16/91968/campos_512_v4
+16/92366/campos_512_v4
+16/92595/campos_512_v4
+16/92745/campos_512_v4
+16/92946/campos_512_v4
+16/93319/campos_512_v4
+16/93616/campos_512_v4
+16/93818/campos_512_v4
+16/94127/campos_512_v4
+16/94199/campos_512_v4
+16/94412/campos_512_v4
+16/94484/campos_512_v4
+16/94600/campos_512_v4
+16/94655/campos_512_v4
+16/94689/campos_512_v4
+16/94991/campos_512_v4
+17/95307/campos_512_v4
+17/95374/campos_512_v4
+17/95398/campos_512_v4
+17/95603/campos_512_v4
+17/95668/campos_512_v4
+17/95711/campos_512_v4
+17/95829/campos_512_v4
+17/95969/campos_512_v4
+17/96008/campos_512_v4
+17/96201/campos_512_v4
+17/96557/campos_512_v4
+17/96676/campos_512_v4
+17/96697/campos_512_v4
+17/96714/campos_512_v4
+17/96863/campos_512_v4
+17/97148/campos_512_v4
+17/97301/campos_512_v4
+17/97664/campos_512_v4
+17/97682/campos_512_v4
+17/97839/campos_512_v4
+17/98294/campos_512_v4
+17/98635/campos_512_v4
+17/99073/campos_512_v4
+17/99637/campos_512_v4
+17/99665/campos_512_v4
+17/99847/campos_512_v4
+17/99873/campos_512_v4
+2/20661/campos_512_v4
+2/20776/campos_512_v4
+2/21047/campos_512_v4
+2/21132/campos_512_v4
+2/21198/campos_512_v4
+2/21303/campos_512_v4
+2/21716/campos_512_v4
+2/21806/campos_512_v4
+2/22605/campos_512_v4
+2/23137/campos_512_v4
+2/23228/campos_512_v4
+2/23634/campos_512_v4
+2/23927/campos_512_v4
+2/24223/campos_512_v4
+2/24793/campos_512_v4
+2/24794/campos_512_v4
+2/24815/campos_512_v4
+23/125008/campos_512_v4
+23/125069/campos_512_v4
+23/125100/campos_512_v4
+23/125327/campos_512_v4
+23/125341/campos_512_v4
+23/125561/campos_512_v4
+23/126045/campos_512_v4
+23/126232/campos_512_v4
+23/126257/campos_512_v4
+23/126322/campos_512_v4
+23/126821/campos_512_v4
+23/126833/campos_512_v4
+23/126984/campos_512_v4
+23/127009/campos_512_v4
+23/127990/campos_512_v4
+23/128081/campos_512_v4
+23/128541/campos_512_v4
+23/128701/campos_512_v4
+23/128792/campos_512_v4
+23/128918/campos_512_v4
+23/128938/campos_512_v4
+23/128951/campos_512_v4
+23/129108/campos_512_v4
+24/130110/campos_512_v4
+24/130136/campos_512_v4
+24/130235/campos_512_v4
+24/130264/campos_512_v4
+24/130297/campos_512_v4
+24/130304/campos_512_v4
+24/130520/campos_512_v4
+24/130613/campos_512_v4
+24/131106/campos_512_v4
+24/131226/campos_512_v4
+24/131288/campos_512_v4
+24/132104/campos_512_v4
+24/132105/campos_512_v4
+24/132613/campos_512_v4
+24/132689/campos_512_v4
+24/132834/campos_512_v4
+24/132959/campos_512_v4
+24/133247/campos_512_v4
+24/133420/campos_512_v4
+24/133589/campos_512_v4
+24/133863/campos_512_v4
+24/133887/campos_512_v4
+24/134072/campos_512_v4
+24/134274/campos_512_v4
+24/134433/campos_512_v4
+24/134752/campos_512_v4
+24/134847/campos_512_v4
+25/135252/campos_512_v4
+25/135271/campos_512_v4
+25/136636/campos_512_v4
+25/136813/campos_512_v4
+25/136992/campos_512_v4
+25/137744/campos_512_v4
+25/137927/campos_512_v4
+25/137960/campos_512_v4
+25/138033/campos_512_v4
+25/138127/campos_512_v4
+25/138129/campos_512_v4
+25/138135/campos_512_v4
+25/138384/campos_512_v4
+25/138542/campos_512_v4
+25/138815/campos_512_v4
+25/139122/campos_512_v4
+25/139216/campos_512_v4
+25/139479/campos_512_v4
+25/139545/campos_512_v4
+25/139813/campos_512_v4
+26/140120/campos_512_v4
+26/140279/campos_512_v4
+26/140317/campos_512_v4
+26/141473/campos_512_v4
+26/141655/campos_512_v4
+26/141901/campos_512_v4
+26/141939/campos_512_v4
+26/142033/campos_512_v4
+26/142176/campos_512_v4
+26/142186/campos_512_v4
+26/142284/campos_512_v4
+26/142521/campos_512_v4
+26/142541/campos_512_v4
+26/142545/campos_512_v4
+26/142593/campos_512_v4
+26/142644/campos_512_v4
+26/143850/campos_512_v4
+26/144340/campos_512_v4
+26/144455/campos_512_v4
+26/144943/campos_512_v4
+27/145050/campos_512_v4
+27/145099/campos_512_v4
+27/145146/campos_512_v4
+27/145812/campos_512_v4
+27/145845/campos_512_v4
+27/146013/campos_512_v4
+27/146442/campos_512_v4
+27/146590/campos_512_v4
+27/147480/campos_512_v4
+27/147526/campos_512_v4
+27/147534/campos_512_v4
+27/148279/campos_512_v4
+27/148327/campos_512_v4
+27/148515/campos_512_v4
+27/148826/campos_512_v4
+27/148907/campos_512_v4
+27/148970/campos_512_v4
+27/149381/campos_512_v4
+27/149430/campos_512_v4
+28/150385/campos_512_v4
+28/150432/campos_512_v4
+28/150990/campos_512_v4
+28/151507/campos_512_v4
+28/151711/campos_512_v4
+28/152336/campos_512_v4
+28/152426/campos_512_v4
+28/152578/campos_512_v4
+28/153902/campos_512_v4
+28/154177/campos_512_v4
+28/154427/campos_512_v4
+28/154728/campos_512_v4
+28/154967/campos_512_v4
+29/155065/campos_512_v4
+29/155143/campos_512_v4
+29/155273/campos_512_v4
+29/155353/campos_512_v4
+29/155366/campos_512_v4
+29/155557/campos_512_v4
+29/155832/campos_512_v4
+29/156947/campos_512_v4
+29/157030/campos_512_v4
+29/157319/campos_512_v4
+29/157943/campos_512_v4
+29/158494/campos_512_v4
+29/158668/campos_512_v4
+29/158932/campos_512_v4
+29/159421/campos_512_v4
+29/159425/campos_512_v4
+29/159545/campos_512_v4
+29/159575/campos_512_v4
+30/160002/campos_512_v4
+30/160021/campos_512_v4
+30/160352/campos_512_v4
+30/160438/campos_512_v4
+30/160565/campos_512_v4
+30/160611/campos_512_v4
+30/160617/campos_512_v4
+30/160958/campos_512_v4
+30/161735/campos_512_v4
+30/161964/campos_512_v4
+30/162200/campos_512_v4
+30/162203/campos_512_v4
+30/162389/campos_512_v4
+30/162736/campos_512_v4
+30/162989/campos_512_v4
+30/163299/campos_512_v4
+30/163615/campos_512_v4
+30/163908/campos_512_v4
+30/164243/campos_512_v4
+30/164302/campos_512_v4
+31/165074/campos_512_v4
+31/165205/campos_512_v4
+31/165341/campos_512_v4
+31/165485/campos_512_v4
+31/166189/campos_512_v4
+31/166352/campos_512_v4
+31/166453/campos_512_v4
+31/166711/campos_512_v4
+31/166869/campos_512_v4
+31/167272/campos_512_v4
+31/167646/campos_512_v4
+31/167651/campos_512_v4
+31/167805/campos_512_v4
+31/168425/campos_512_v4
+31/168428/campos_512_v4
+31/168452/campos_512_v4
+31/168725/campos_512_v4
+31/169297/campos_512_v4
+31/169353/campos_512_v4
+31/169483/campos_512_v4
+31/169611/campos_512_v4
+32/170496/campos_512_v4
+32/170523/campos_512_v4
+32/170778/campos_512_v4
+32/170847/campos_512_v4
+32/171078/campos_512_v4
+32/171229/campos_512_v4
+32/171272/campos_512_v4
+32/171422/campos_512_v4
+32/171632/campos_512_v4
+32/171901/campos_512_v4
+32/171915/campos_512_v4
+32/172264/campos_512_v4
+32/172289/campos_512_v4
+32/172351/campos_512_v4
+32/172394/campos_512_v4
+32/172441/campos_512_v4
+32/173092/campos_512_v4
+32/173311/campos_512_v4
+32/173378/campos_512_v4
+32/173556/campos_512_v4
+32/173691/campos_512_v4
+32/173995/campos_512_v4
+32/174142/campos_512_v4
+32/174374/campos_512_v4
+32/174865/campos_512_v4
+33/175482/campos_512_v4
+33/175635/campos_512_v4
+33/175657/campos_512_v4
+33/176229/campos_512_v4
+33/176455/campos_512_v4
+33/176634/campos_512_v4
+33/177157/campos_512_v4
+33/177298/campos_512_v4
+33/177502/campos_512_v4
+33/178285/campos_512_v4
+33/178448/campos_512_v4
+33/178825/campos_512_v4
+33/178863/campos_512_v4
+33/178917/campos_512_v4
+33/179191/campos_512_v4
+33/179396/campos_512_v4
+34/180172/campos_512_v4
+34/181136/campos_512_v4
+34/181261/campos_512_v4
+34/181844/campos_512_v4
+34/182012/campos_512_v4
+34/182727/campos_512_v4
+34/182805/campos_512_v4
+34/184054/campos_512_v4
+34/184200/campos_512_v4
+34/184554/campos_512_v4
+34/184587/campos_512_v4
+34/184716/campos_512_v4
+34/184928/campos_512_v4
+35/185432/campos_512_v4
+35/186055/campos_512_v4
+35/186059/campos_512_v4
+35/186318/campos_512_v4
+35/186387/campos_512_v4
+35/186487/campos_512_v4
+35/186627/campos_512_v4
+35/186739/campos_512_v4
+35/186894/campos_512_v4
+35/186968/campos_512_v4
+35/187207/campos_512_v4
+35/187348/campos_512_v4
+35/187591/campos_512_v4
+35/187904/campos_512_v4
+35/188683/campos_512_v4
+35/188748/campos_512_v4
+35/188843/campos_512_v4
+35/189375/campos_512_v4
+35/189550/campos_512_v4
+36/190298/campos_512_v4
+36/190405/campos_512_v4
+36/190530/campos_512_v4
+36/190568/campos_512_v4
+36/190686/campos_512_v4
+36/191113/campos_512_v4
+36/191565/campos_512_v4
+36/191595/campos_512_v4
+36/191843/campos_512_v4
+36/191864/campos_512_v4
+36/192201/campos_512_v4
+36/192276/campos_512_v4
+36/192349/campos_512_v4
+36/192601/campos_512_v4
+36/192618/campos_512_v4
+36/193214/campos_512_v4
+36/193522/campos_512_v4
+36/193629/campos_512_v4
+36/193991/campos_512_v4
+36/194790/campos_512_v4
+37/196250/campos_512_v4
+37/196439/campos_512_v4
+37/196854/campos_512_v4
+37/196939/campos_512_v4
+37/197238/campos_512_v4
+37/197773/campos_512_v4
+37/197924/campos_512_v4
+37/198083/campos_512_v4
+37/198210/campos_512_v4
+37/198268/campos_512_v4
+37/198462/campos_512_v4
+37/198641/campos_512_v4
+37/198892/campos_512_v4
+37/198981/campos_512_v4
+37/199223/campos_512_v4
+37/199231/campos_512_v4
+37/199789/campos_512_v4
+37/199929/campos_512_v4
+38/200094/campos_512_v4
+38/200209/campos_512_v4
+38/200245/campos_512_v4
+38/200379/campos_512_v4
+38/200602/campos_512_v4
+38/200737/campos_512_v4
+38/201147/campos_512_v4
+38/201225/campos_512_v4
+38/201340/campos_512_v4
+38/201534/campos_512_v4
+38/201598/campos_512_v4
+38/201843/campos_512_v4
+38/202048/campos_512_v4
+38/202147/campos_512_v4
+38/202184/campos_512_v4
+38/202190/campos_512_v4
+38/202380/campos_512_v4
+38/202823/campos_512_v4
+38/203343/campos_512_v4
+38/203545/campos_512_v4
+38/203745/campos_512_v4
+38/203819/campos_512_v4
+38/203948/campos_512_v4
+38/204155/campos_512_v4
+38/204249/campos_512_v4
+38/204397/campos_512_v4
+38/204441/campos_512_v4
+4/30023/campos_512_v4
+4/30035/campos_512_v4
+4/30300/campos_512_v4
+4/30538/campos_512_v4
+4/30637/campos_512_v4
+4/30662/campos_512_v4
+4/31553/campos_512_v4
+4/31674/campos_512_v4
+4/32034/campos_512_v4
+4/33054/campos_512_v4
+4/33253/campos_512_v4
+4/33539/campos_512_v4
+4/33946/campos_512_v4
+4/34201/campos_512_v4
+4/34375/campos_512_v4
+4/34846/campos_512_v4
+40/210049/campos_512_v4
+40/210573/campos_512_v4
+40/210871/campos_512_v4
+40/210976/campos_512_v4
+40/211733/campos_512_v4
+40/211775/campos_512_v4
+40/211862/campos_512_v4
+40/212064/campos_512_v4
+40/212590/campos_512_v4
+40/213182/campos_512_v4
+40/213183/campos_512_v4
+40/213360/campos_512_v4
+40/213385/campos_512_v4
+40/213546/campos_512_v4
+40/213963/campos_512_v4
+40/213975/campos_512_v4
+40/214415/campos_512_v4
+40/214434/campos_512_v4
+41/215004/campos_512_v4
+41/215024/campos_512_v4
+41/215109/campos_512_v4
+41/215314/campos_512_v4
+41/215476/campos_512_v4
+41/215560/campos_512_v4
+41/216129/campos_512_v4
+41/216158/campos_512_v4
+41/216232/campos_512_v4
+41/216452/campos_512_v4
+41/216506/campos_512_v4
+41/216705/campos_512_v4
+41/216799/campos_512_v4
+41/216859/campos_512_v4
+41/216968/campos_512_v4
+41/217069/campos_512_v4
+41/217143/campos_512_v4
+41/217242/campos_512_v4
+41/217657/campos_512_v4
+41/217712/campos_512_v4
+41/218194/campos_512_v4
+41/218394/campos_512_v4
+41/218735/campos_512_v4
+41/218810/campos_512_v4
+41/218974/campos_512_v4
+41/219646/campos_512_v4
+42/221085/campos_512_v4
+42/221194/campos_512_v4
+42/221270/campos_512_v4
+42/221282/campos_512_v4
+42/221434/campos_512_v4
+42/221462/campos_512_v4
+42/221841/campos_512_v4
+42/221851/campos_512_v4
+42/222635/campos_512_v4
+42/222729/campos_512_v4
+42/222903/campos_512_v4
+42/222905/campos_512_v4
+42/223054/campos_512_v4
+42/223074/campos_512_v4
+42/223361/campos_512_v4
+42/223559/campos_512_v4
+42/223747/campos_512_v4
+42/223820/campos_512_v4
+42/224146/campos_512_v4
+42/224419/campos_512_v4
+42/224939/campos_512_v4
+43/225054/campos_512_v4
+43/225678/campos_512_v4
+43/225681/campos_512_v4
+43/226077/campos_512_v4
+43/226200/campos_512_v4
+43/226646/campos_512_v4
+43/226723/campos_512_v4
+43/227012/campos_512_v4
+43/227370/campos_512_v4
+43/227706/campos_512_v4
+43/227901/campos_512_v4
+43/228241/campos_512_v4
+43/228449/campos_512_v4
+43/228761/campos_512_v4
+43/228769/campos_512_v4
+43/229063/campos_512_v4
+43/229407/campos_512_v4
+43/229440/campos_512_v4
+43/229638/campos_512_v4
+43/229797/campos_512_v4
+43/229978/campos_512_v4
+43/229995/campos_512_v4
+44/230257/campos_512_v4
+44/230464/campos_512_v4
+44/230848/campos_512_v4
+44/231270/campos_512_v4
+44/231275/campos_512_v4
+44/231284/campos_512_v4
+44/231919/campos_512_v4
+44/232013/campos_512_v4
+44/232295/campos_512_v4
+44/232463/campos_512_v4
+44/232528/campos_512_v4
+44/232565/campos_512_v4
+44/232682/campos_512_v4
+44/233254/campos_512_v4
+44/233403/campos_512_v4
+44/233585/campos_512_v4
+44/233746/campos_512_v4
+44/233802/campos_512_v4
+44/233838/campos_512_v4
+44/234303/campos_512_v4
+44/234800/campos_512_v4
+45/235113/campos_512_v4
+45/235869/campos_512_v4
+45/236067/campos_512_v4
+45/236461/campos_512_v4
+45/236464/campos_512_v4
+45/236613/campos_512_v4
+45/237002/campos_512_v4
+45/237195/campos_512_v4
+45/237996/campos_512_v4
+45/238162/campos_512_v4
+45/238209/campos_512_v4
+45/238384/campos_512_v4
+45/238663/campos_512_v4
+45/239203/campos_512_v4
+45/239325/campos_512_v4
+45/239545/campos_512_v4
+45/239697/campos_512_v4
+45/239903/campos_512_v4
+46/240031/campos_512_v4
+46/240085/campos_512_v4
+46/240270/campos_512_v4
+46/240308/campos_512_v4
+46/240317/campos_512_v4
+46/240431/campos_512_v4
+46/240843/campos_512_v4
+46/240933/campos_512_v4
+46/241119/campos_512_v4
+46/241155/campos_512_v4
+46/241223/campos_512_v4
+46/241409/campos_512_v4
+46/241603/campos_512_v4
+46/242098/campos_512_v4
+46/242186/campos_512_v4
+46/242284/campos_512_v4
+46/242396/campos_512_v4
+46/242840/campos_512_v4
+46/243015/campos_512_v4
+46/243176/campos_512_v4
+46/243219/campos_512_v4
+46/243756/campos_512_v4
+46/244239/campos_512_v4
+46/244510/campos_512_v4
+46/244581/campos_512_v4
+46/244674/campos_512_v4
+46/244819/campos_512_v4
+46/244836/campos_512_v4
+46/244861/campos_512_v4
+46/244867/campos_512_v4
+46/244961/campos_512_v4
+47/245092/campos_512_v4
+47/245399/campos_512_v4
+47/245640/campos_512_v4
+47/245708/campos_512_v4
+47/245967/campos_512_v4
+47/246062/campos_512_v4
+47/246528/campos_512_v4
+47/246985/campos_512_v4
+47/247538/campos_512_v4
+47/247547/campos_512_v4
+47/247625/campos_512_v4
+47/248120/campos_512_v4
+47/248221/campos_512_v4
+47/248268/campos_512_v4
+47/248603/campos_512_v4
+47/248666/campos_512_v4
+47/249261/campos_512_v4
+47/249326/campos_512_v4
+47/249535/campos_512_v4
+47/249664/campos_512_v4
+47/249896/campos_512_v4
+48/250543/campos_512_v4
+48/250567/campos_512_v4
+48/250727/campos_512_v4
+48/250870/campos_512_v4
+48/250887/campos_512_v4
+48/250909/campos_512_v4
+48/250927/campos_512_v4
+48/250989/campos_512_v4
+48/251206/campos_512_v4
+48/251231/campos_512_v4
+48/251244/campos_512_v4
+48/251408/campos_512_v4
+48/252781/campos_512_v4
+48/252787/campos_512_v4
+48/252829/campos_512_v4
+48/252874/campos_512_v4
+48/252997/campos_512_v4
+48/253332/campos_512_v4
+48/253546/campos_512_v4
+48/253726/campos_512_v4
+48/253972/campos_512_v4
+48/254092/campos_512_v4
+48/254633/campos_512_v4
+49/255371/campos_512_v4
+49/255509/campos_512_v4
+49/255968/campos_512_v4
+49/256138/campos_512_v4
+49/256378/campos_512_v4
+49/256386/campos_512_v4
+49/256773/campos_512_v4
+49/256850/campos_512_v4
+49/256970/campos_512_v4
+49/257198/campos_512_v4
+49/258141/campos_512_v4
+49/258303/campos_512_v4
+49/258328/campos_512_v4
+49/258718/campos_512_v4
+49/258846/campos_512_v4
+49/258897/campos_512_v4
+49/259028/campos_512_v4
+49/259201/campos_512_v4
+49/259471/campos_512_v4
+49/259822/campos_512_v4
+5/35031/campos_512_v4
+5/35459/campos_512_v4
+5/36336/campos_512_v4
+5/36432/campos_512_v4
+5/36453/campos_512_v4
+5/36596/campos_512_v4
+5/36731/campos_512_v4
+5/37321/campos_512_v4
+5/38533/campos_512_v4
+5/38686/campos_512_v4
+5/39159/campos_512_v4
+5/39213/campos_512_v4
+5/39905/campos_512_v4
+5/39977/campos_512_v4
+50/260115/campos_512_v4
+50/260503/campos_512_v4
+50/260906/campos_512_v4
+50/261165/campos_512_v4
+50/261246/campos_512_v4
+50/261323/campos_512_v4
+50/261747/campos_512_v4
+50/262154/campos_512_v4
+50/262192/campos_512_v4
+50/262229/campos_512_v4
+50/262510/campos_512_v4
+50/262850/campos_512_v4
+50/262914/campos_512_v4
+50/263005/campos_512_v4
+50/263306/campos_512_v4
+50/263421/campos_512_v4
+50/263706/campos_512_v4
+50/263922/campos_512_v4
+50/263969/campos_512_v4
+50/264284/campos_512_v4
+50/264307/campos_512_v4
+50/264449/campos_512_v4
+50/264467/campos_512_v4
+50/264992/campos_512_v4
+51/265341/campos_512_v4
+51/265507/campos_512_v4
+51/265733/campos_512_v4
+51/265827/campos_512_v4
+51/266554/campos_512_v4
+51/266672/campos_512_v4
+51/266908/campos_512_v4
+51/267046/campos_512_v4
+51/267204/campos_512_v4
+51/267320/campos_512_v4
+51/267449/campos_512_v4
+51/267678/campos_512_v4
+51/267771/campos_512_v4
+51/267788/campos_512_v4
+51/267799/campos_512_v4
+51/267900/campos_512_v4
+51/268084/campos_512_v4
+51/268203/campos_512_v4
+51/268236/campos_512_v4
+51/268318/campos_512_v4
+51/268364/campos_512_v4
+51/268376/campos_512_v4
+51/268380/campos_512_v4
+51/268621/campos_512_v4
+51/268893/campos_512_v4
+51/269044/campos_512_v4
+51/269069/campos_512_v4
+51/269133/campos_512_v4
+51/269603/campos_512_v4
+51/269618/campos_512_v4
+51/269806/campos_512_v4
+52/270231/campos_512_v4
+52/270303/campos_512_v4
+52/270323/campos_512_v4
+52/270695/campos_512_v4
+52/270835/campos_512_v4
+52/270950/campos_512_v4
+52/272075/campos_512_v4
+52/272221/campos_512_v4
+52/272291/campos_512_v4
+52/272620/campos_512_v4
+52/272847/campos_512_v4
+52/272868/campos_512_v4
+52/272891/campos_512_v4
+52/273272/campos_512_v4
+52/273304/campos_512_v4
+52/273475/campos_512_v4
+52/273568/campos_512_v4
+52/273836/campos_512_v4
+52/273953/campos_512_v4
+52/274050/campos_512_v4
+52/274276/campos_512_v4
+52/274324/campos_512_v4
+52/274375/campos_512_v4
+52/274550/campos_512_v4
+52/274700/campos_512_v4
+52/274836/campos_512_v4
+52/274905/campos_512_v4
+53/275111/campos_512_v4
+53/275247/campos_512_v4
+53/275616/campos_512_v4
+53/277069/campos_512_v4
+53/277178/campos_512_v4
+53/277193/campos_512_v4
+53/277819/campos_512_v4
+53/277945/campos_512_v4
+53/278565/campos_512_v4
+53/278776/campos_512_v4
+53/278837/campos_512_v4
+53/279052/campos_512_v4
+53/279175/campos_512_v4
+53/279963/campos_512_v4
+54/280038/campos_512_v4
+54/280195/campos_512_v4
+54/280320/campos_512_v4
+54/280457/campos_512_v4
+54/280458/campos_512_v4
+54/280519/campos_512_v4
+54/280961/campos_512_v4
+54/281254/campos_512_v4
+54/281298/campos_512_v4
+54/281491/campos_512_v4
+54/281649/campos_512_v4
+54/282024/campos_512_v4
+54/282096/campos_512_v4
+54/282168/campos_512_v4
+54/282405/campos_512_v4
+54/282538/campos_512_v4
+54/282979/campos_512_v4
+54/283148/campos_512_v4
+54/283360/campos_512_v4
+54/283483/campos_512_v4
+54/283971/campos_512_v4
+54/284123/campos_512_v4
+54/284442/campos_512_v4
+54/284569/campos_512_v4
+54/284871/campos_512_v4
+55/285007/campos_512_v4
+55/285173/campos_512_v4
+55/285299/campos_512_v4
+55/285371/campos_512_v4
+55/285439/campos_512_v4
+55/285479/campos_512_v4
+55/285495/campos_512_v4
+55/286271/campos_512_v4
+55/286458/campos_512_v4
+55/286578/campos_512_v4
+55/286592/campos_512_v4
+55/286721/campos_512_v4
+55/286972/campos_512_v4
+55/287046/campos_512_v4
+55/287155/campos_512_v4
+55/287455/campos_512_v4
+55/287645/campos_512_v4
+55/287685/campos_512_v4
+55/287969/campos_512_v4
+55/288177/campos_512_v4
+55/288229/campos_512_v4
+55/288276/campos_512_v4
+55/288624/campos_512_v4
+55/289095/campos_512_v4
+55/289172/campos_512_v4
+55/289201/campos_512_v4
+55/289641/campos_512_v4
+55/289779/campos_512_v4
+55/289900/campos_512_v4
+55/289937/campos_512_v4
+56/290004/campos_512_v4
+56/290008/campos_512_v4
+56/290267/campos_512_v4
+56/290373/campos_512_v4
+56/290587/campos_512_v4
+56/290861/campos_512_v4
+56/290911/campos_512_v4
+56/291146/campos_512_v4
+56/291240/campos_512_v4
+56/291245/campos_512_v4
+56/291858/campos_512_v4
+56/292041/campos_512_v4
+56/292149/campos_512_v4
+56/292340/campos_512_v4
+56/292483/campos_512_v4
+56/292576/campos_512_v4
+56/293328/campos_512_v4
+56/293476/campos_512_v4
+56/293526/campos_512_v4
+56/293698/campos_512_v4
+56/294005/campos_512_v4
+56/294343/campos_512_v4
+56/294539/campos_512_v4
+57/295240/campos_512_v4
+57/295243/campos_512_v4
+57/295355/campos_512_v4
+57/295504/campos_512_v4
+57/295719/campos_512_v4
+57/296096/campos_512_v4
+57/296177/campos_512_v4
+57/296201/campos_512_v4
+57/296487/campos_512_v4
+57/296630/campos_512_v4
+57/296778/campos_512_v4
+57/296820/campos_512_v4
+57/297025/campos_512_v4
+57/297150/campos_512_v4
+57/297169/campos_512_v4
+57/297199/campos_512_v4
+57/297289/campos_512_v4
+57/297356/campos_512_v4
+57/297498/campos_512_v4
+57/297948/campos_512_v4
+57/297969/campos_512_v4
+57/298537/campos_512_v4
+57/298654/campos_512_v4
+57/299216/campos_512_v4
+58/300085/campos_512_v4
+58/300091/campos_512_v4
+58/300177/campos_512_v4
+58/300307/campos_512_v4
+58/300468/campos_512_v4
+58/300481/campos_512_v4
+58/300556/campos_512_v4
+58/300729/campos_512_v4
+58/300821/campos_512_v4
+58/301150/campos_512_v4
+58/301215/campos_512_v4
+58/301263/campos_512_v4
+58/301438/campos_512_v4
+58/301875/campos_512_v4
+58/302307/campos_512_v4
+58/302474/campos_512_v4
+58/302877/campos_512_v4
+58/302900/campos_512_v4
+58/303011/campos_512_v4
+58/303138/campos_512_v4
+58/303297/campos_512_v4
+58/303301/campos_512_v4
+58/303673/campos_512_v4
+58/303721/campos_512_v4
+58/304525/campos_512_v4
+58/304802/campos_512_v4
+58/304982/campos_512_v4
+59/305201/campos_512_v4
+59/305240/campos_512_v4
+59/305729/campos_512_v4
+59/305786/campos_512_v4
+59/305801/campos_512_v4
+59/306020/campos_512_v4
+59/306098/campos_512_v4
+59/306552/campos_512_v4
+59/307114/campos_512_v4
+59/307132/campos_512_v4
+59/307196/campos_512_v4
+59/307396/campos_512_v4
+59/307410/campos_512_v4
+59/307656/campos_512_v4
+59/307659/campos_512_v4
+59/307877/campos_512_v4
+59/307936/campos_512_v4
+59/307990/campos_512_v4
+59/308142/campos_512_v4
+59/308531/campos_512_v4
+59/308595/campos_512_v4
+59/308633/campos_512_v4
+59/308910/campos_512_v4
+59/308974/campos_512_v4
+59/309261/campos_512_v4
+59/309375/campos_512_v4
+59/309557/campos_512_v4
+59/309838/campos_512_v4
+59/309930/campos_512_v4
+6/40103/campos_512_v4
+6/40527/campos_512_v4
+6/40730/campos_512_v4
+6/41253/campos_512_v4
+6/41255/campos_512_v4
+6/41273/campos_512_v4
+6/41899/campos_512_v4
+6/42215/campos_512_v4
+6/42253/campos_512_v4
+6/42607/campos_512_v4
+6/43451/campos_512_v4
+6/43862/campos_512_v4
+6/43896/campos_512_v4
+6/43978/campos_512_v4
+6/44114/campos_512_v4
+6/44147/campos_512_v4
+6/44346/campos_512_v4
+6/44624/campos_512_v4
+60/310036/campos_512_v4
+60/310073/campos_512_v4
+60/310136/campos_512_v4
+60/310344/campos_512_v4
+60/310677/campos_512_v4
+60/310829/campos_512_v4
+60/310853/campos_512_v4
+60/311025/campos_512_v4
+60/311061/campos_512_v4
+60/311148/campos_512_v4
+60/311271/campos_512_v4
+60/311644/campos_512_v4
+60/311800/campos_512_v4
+60/311871/campos_512_v4
+60/312076/campos_512_v4
+60/312197/campos_512_v4
+60/312247/campos_512_v4
+60/312454/campos_512_v4
+60/312643/campos_512_v4
+60/313005/campos_512_v4
+60/313063/campos_512_v4
+60/313221/campos_512_v4
+60/313260/campos_512_v4
+60/313300/campos_512_v4
+60/313775/campos_512_v4
+60/313805/campos_512_v4
+60/313861/campos_512_v4
+60/313867/campos_512_v4
+60/314663/campos_512_v4
+60/314734/campos_512_v4
+60/314872/campos_512_v4
+61/315151/campos_512_v4
+61/315395/campos_512_v4
+61/315507/campos_512_v4
+61/315585/campos_512_v4
+61/315731/campos_512_v4
+61/315840/campos_512_v4
+61/315886/campos_512_v4
+61/315919/campos_512_v4
+61/316228/campos_512_v4
+61/317569/campos_512_v4
+61/317816/campos_512_v4
+61/317911/campos_512_v4
+61/317928/campos_512_v4
+61/318091/campos_512_v4
+61/318118/campos_512_v4
+61/318165/campos_512_v4
+61/318373/campos_512_v4
+61/318512/campos_512_v4
+61/318547/campos_512_v4
+61/319587/campos_512_v4
+62/320028/campos_512_v4
+62/320286/campos_512_v4
+62/320421/campos_512_v4
+62/320431/campos_512_v4
+62/320894/campos_512_v4
+62/321211/campos_512_v4
+62/321663/campos_512_v4
+62/321938/campos_512_v4
+62/322146/campos_512_v4
+62/322190/campos_512_v4
+62/322347/campos_512_v4
+62/322449/campos_512_v4
+62/322783/campos_512_v4
+62/322856/campos_512_v4
+62/322893/campos_512_v4
+62/322926/campos_512_v4
+62/322955/campos_512_v4
+62/323298/campos_512_v4
+62/323379/campos_512_v4
+62/323825/campos_512_v4
+62/324030/campos_512_v4
+62/324196/campos_512_v4
+62/324891/campos_512_v4
+63/325409/campos_512_v4
+63/325450/campos_512_v4
+63/325525/campos_512_v4
+63/325693/campos_512_v4
+63/325737/campos_512_v4
+63/325852/campos_512_v4
+63/326051/campos_512_v4
+63/326094/campos_512_v4
+63/326203/campos_512_v4
+63/326332/campos_512_v4
+63/326336/campos_512_v4
+63/326358/campos_512_v4
+63/326724/campos_512_v4
+63/326938/campos_512_v4
+63/327313/campos_512_v4
+63/327319/campos_512_v4
+63/327326/campos_512_v4
+63/327389/campos_512_v4
+63/327507/campos_512_v4
+63/327808/campos_512_v4
+63/328576/campos_512_v4
+63/328998/campos_512_v4
+63/329486/campos_512_v4
+63/329724/campos_512_v4
+64/330290/campos_512_v4
+64/330402/campos_512_v4
+64/330474/campos_512_v4
+64/330600/campos_512_v4
+64/330622/campos_512_v4
+64/331264/campos_512_v4
+64/331535/campos_512_v4
+64/332263/campos_512_v4
+64/333412/campos_512_v4
+64/333569/campos_512_v4
+64/333892/campos_512_v4
+64/334049/campos_512_v4
+64/334200/campos_512_v4
+64/334314/campos_512_v4
+64/334538/campos_512_v4
+64/334899/campos_512_v4
+64/334962/campos_512_v4
+64/334978/campos_512_v4
+65/335543/campos_512_v4
+65/336120/campos_512_v4
+65/336123/campos_512_v4
+65/336297/campos_512_v4
+65/336459/campos_512_v4
+65/336519/campos_512_v4
+65/336605/campos_512_v4
+65/336827/campos_512_v4
+65/337088/campos_512_v4
+65/337394/campos_512_v4
+65/337825/campos_512_v4
+65/337894/campos_512_v4
+65/337949/campos_512_v4
+65/338214/campos_512_v4
+65/338560/campos_512_v4
+65/339624/campos_512_v4
+66/340332/campos_512_v4
+66/340387/campos_512_v4
+66/340421/campos_512_v4
+66/340488/campos_512_v4
+66/341085/campos_512_v4
+66/341128/campos_512_v4
+66/341167/campos_512_v4
+66/341408/campos_512_v4
+66/341766/campos_512_v4
+66/342344/campos_512_v4
+66/342496/campos_512_v4
+66/342502/campos_512_v4
+66/342720/campos_512_v4
+66/342822/campos_512_v4
+66/343108/campos_512_v4
+66/343153/campos_512_v4
+66/343596/campos_512_v4
+66/343604/campos_512_v4
+66/343618/campos_512_v4
+66/343639/campos_512_v4
+66/344314/campos_512_v4
+66/344335/campos_512_v4
+66/344670/campos_512_v4
+66/344953/campos_512_v4
+67/345184/campos_512_v4
+67/345201/campos_512_v4
+67/345275/campos_512_v4
+67/345951/campos_512_v4
+67/346106/campos_512_v4
+67/346780/campos_512_v4
+67/347048/campos_512_v4
+67/347401/campos_512_v4
+67/347705/campos_512_v4
+67/347945/campos_512_v4
+67/348284/campos_512_v4
+67/348427/campos_512_v4
+67/348558/campos_512_v4
+67/348828/campos_512_v4
+67/349059/campos_512_v4
+67/349190/campos_512_v4
+67/349311/campos_512_v4
+67/349382/campos_512_v4
+67/349514/campos_512_v4
+67/349881/campos_512_v4
+67/349958/campos_512_v4
+67/349976/campos_512_v4
+68/350229/campos_512_v4
+68/350264/campos_512_v4
+68/350275/campos_512_v4
+68/350318/campos_512_v4
+68/351231/campos_512_v4
+68/351461/campos_512_v4
+68/351850/campos_512_v4
+68/352198/campos_512_v4
+68/352438/campos_512_v4
+68/352513/campos_512_v4
+68/352803/campos_512_v4
+68/353002/campos_512_v4
+68/353396/campos_512_v4
+68/353563/campos_512_v4
+68/353670/campos_512_v4
+68/353893/campos_512_v4
+68/354356/campos_512_v4
+68/354480/campos_512_v4
+68/354649/campos_512_v4
+68/354667/campos_512_v4
+68/354745/campos_512_v4
+69/355030/campos_512_v4
+69/355094/campos_512_v4
+69/355533/campos_512_v4
+69/356047/campos_512_v4
+69/356330/campos_512_v4
+69/356782/campos_512_v4
+69/356890/campos_512_v4
+69/356946/campos_512_v4
+69/357116/campos_512_v4
+69/357226/campos_512_v4
+69/357269/campos_512_v4
+69/357544/campos_512_v4
+69/357596/campos_512_v4
+69/358131/campos_512_v4
+69/358668/campos_512_v4
+69/358946/campos_512_v4
+69/359015/campos_512_v4
+69/359018/campos_512_v4
+69/359166/campos_512_v4
+69/359385/campos_512_v4
+7/45074/campos_512_v4
+7/45083/campos_512_v4
+7/45400/campos_512_v4
+7/45457/campos_512_v4
+7/46144/campos_512_v4
+7/46298/campos_512_v4
+7/46556/campos_512_v4
+7/46577/campos_512_v4
+7/47075/campos_512_v4
+7/47176/campos_512_v4
+7/47231/campos_512_v4
+7/47380/campos_512_v4
+7/47406/campos_512_v4
+7/47676/campos_512_v4
+7/47843/campos_512_v4
+7/47898/campos_512_v4
+7/47956/campos_512_v4
+7/48140/campos_512_v4
+7/48211/campos_512_v4
+7/48711/campos_512_v4
+7/48835/campos_512_v4
+7/49286/campos_512_v4
+70/360243/campos_512_v4
+70/360275/campos_512_v4
+70/360452/campos_512_v4
+70/360639/campos_512_v4
+70/360692/campos_512_v4
+70/361062/campos_512_v4
+70/361329/campos_512_v4
+70/361528/campos_512_v4
+70/361951/campos_512_v4
+70/361986/campos_512_v4
+70/362523/campos_512_v4
+70/362542/campos_512_v4
+70/363014/campos_512_v4
+70/363039/campos_512_v4
+70/363084/campos_512_v4
+70/363390/campos_512_v4
+70/363732/campos_512_v4
+70/364067/campos_512_v4
+70/364099/campos_512_v4
+70/364483/campos_512_v4
+70/364546/campos_512_v4
+70/364548/campos_512_v4
+70/364930/campos_512_v4
+71/365022/campos_512_v4
+71/365151/campos_512_v4
+71/365674/campos_512_v4
+71/365756/campos_512_v4
+71/366299/campos_512_v4
+71/366300/campos_512_v4
+71/366529/campos_512_v4
+71/367249/campos_512_v4
+71/367984/campos_512_v4
+71/368152/campos_512_v4
+71/368226/campos_512_v4
+71/368389/campos_512_v4
+71/368466/campos_512_v4
+71/368696/campos_512_v4
+71/369070/campos_512_v4
+71/369150/campos_512_v4
+71/369272/campos_512_v4
+71/369476/campos_512_v4
+72/370430/campos_512_v4
+72/370460/campos_512_v4
+72/370507/campos_512_v4
+72/370648/campos_512_v4
+72/370671/campos_512_v4
+72/370762/campos_512_v4
+72/371027/campos_512_v4
+72/372181/campos_512_v4
+72/372322/campos_512_v4
+72/372365/campos_512_v4
+72/372539/campos_512_v4
+72/372677/campos_512_v4
+72/372708/campos_512_v4
+72/372775/campos_512_v4
+72/373581/campos_512_v4
+72/373810/campos_512_v4
+72/373886/campos_512_v4
+72/374002/campos_512_v4
+72/374042/campos_512_v4
+72/374099/campos_512_v4
+72/374566/campos_512_v4
+72/374772/campos_512_v4
+72/374814/campos_512_v4
+73/375061/campos_512_v4
+73/375255/campos_512_v4
+73/375606/campos_512_v4
+73/375649/campos_512_v4
+73/375983/campos_512_v4
+73/376030/campos_512_v4
+73/376032/campos_512_v4
+73/376180/campos_512_v4
+73/376446/campos_512_v4
+73/376453/campos_512_v4
+73/376816/campos_512_v4
+73/376883/campos_512_v4
+73/377116/campos_512_v4
+73/377387/campos_512_v4
+73/377965/campos_512_v4
+73/378205/campos_512_v4
+73/378218/campos_512_v4
+73/378362/campos_512_v4
+73/378740/campos_512_v4
+73/378756/campos_512_v4
+73/379011/campos_512_v4
+73/379844/campos_512_v4
+74/380111/campos_512_v4
+74/380362/campos_512_v4
+74/380429/campos_512_v4
+74/380582/campos_512_v4
+74/381252/campos_512_v4
+74/381303/campos_512_v4
+74/382133/campos_512_v4
+74/383122/campos_512_v4
+74/383407/campos_512_v4
+74/383746/campos_512_v4
+74/383752/campos_512_v4
+74/383938/campos_512_v4
+74/384242/campos_512_v4
+74/384394/campos_512_v4
+74/384955/campos_512_v4
+75/385093/campos_512_v4
+75/385148/campos_512_v4
+75/385277/campos_512_v4
+75/385698/campos_512_v4
+75/385799/campos_512_v4
+75/385853/campos_512_v4
+75/386588/campos_512_v4
+75/386595/campos_512_v4
+75/386645/campos_512_v4
+75/386667/campos_512_v4
+75/386768/campos_512_v4
+75/386951/campos_512_v4
+75/387079/campos_512_v4
+75/387090/campos_512_v4
+75/387421/campos_512_v4
+75/387855/campos_512_v4
+75/388015/campos_512_v4
+75/388605/campos_512_v4
+75/388644/campos_512_v4
+75/388772/campos_512_v4
+75/388794/campos_512_v4
+75/388909/campos_512_v4
+75/389038/campos_512_v4
+75/389427/campos_512_v4
+75/389579/campos_512_v4
+75/389944/campos_512_v4
+75/389971/campos_512_v4
+76/390063/campos_512_v4
+76/390087/campos_512_v4
+76/390199/campos_512_v4
+76/390957/campos_512_v4
+76/391331/campos_512_v4
+76/391825/campos_512_v4
+76/391971/campos_512_v4
+76/392142/campos_512_v4
+76/392160/campos_512_v4
+76/392381/campos_512_v4
+76/392732/campos_512_v4
+76/393006/campos_512_v4
+76/393207/campos_512_v4
+76/393260/campos_512_v4
+76/393339/campos_512_v4
+76/393463/campos_512_v4
+76/393900/campos_512_v4
+76/393931/campos_512_v4
+76/394108/campos_512_v4
+76/394138/campos_512_v4
+76/394254/campos_512_v4
+76/394408/campos_512_v4
+76/394675/campos_512_v4
+76/394866/campos_512_v4
+77/395075/campos_512_v4
+77/395150/campos_512_v4
+77/395714/campos_512_v4
+77/395770/campos_512_v4
+77/396123/campos_512_v4
+77/396279/campos_512_v4
+77/396647/campos_512_v4
+77/396731/campos_512_v4
+77/396924/campos_512_v4
+77/397047/campos_512_v4
+77/397072/campos_512_v4
+77/397198/campos_512_v4
+77/397526/campos_512_v4
+77/397596/campos_512_v4
+77/397689/campos_512_v4
+77/398074/campos_512_v4
+77/398373/campos_512_v4
+77/398403/campos_512_v4
+77/398499/campos_512_v4
+77/398534/campos_512_v4
+77/398892/campos_512_v4
+77/398932/campos_512_v4
+77/399140/campos_512_v4
+77/399216/campos_512_v4
+77/399233/campos_512_v4
+77/399306/campos_512_v4
+77/399410/campos_512_v4
+77/399552/campos_512_v4
+77/399605/campos_512_v4
+77/399775/campos_512_v4
+78/400133/campos_512_v4
+78/400908/campos_512_v4
+78/401013/campos_512_v4
+78/401364/campos_512_v4
+78/402187/campos_512_v4
+78/402229/campos_512_v4
+78/402698/campos_512_v4
+78/402769/campos_512_v4
+78/403344/campos_512_v4
+78/403634/campos_512_v4
+78/403753/campos_512_v4
+78/404073/campos_512_v4
+78/404971/campos_512_v4
+79/405243/campos_512_v4
+79/405632/campos_512_v4
+79/406275/campos_512_v4
+79/406358/campos_512_v4
+79/406361/campos_512_v4
+79/406560/campos_512_v4
+79/406587/campos_512_v4
+79/406632/campos_512_v4
+79/406681/campos_512_v4
+79/406715/campos_512_v4
+79/407380/campos_512_v4
+79/407564/campos_512_v4
+79/407606/campos_512_v4
+79/407894/campos_512_v4
+79/408080/campos_512_v4
+79/408422/campos_512_v4
+79/408595/campos_512_v4
+79/408870/campos_512_v4
+79/409130/campos_512_v4
+79/409140/campos_512_v4
+79/409305/campos_512_v4
+79/409809/campos_512_v4
+8/50399/campos_512_v4
+8/50594/campos_512_v4
+8/50714/campos_512_v4
+8/51079/campos_512_v4
+8/51355/campos_512_v4
+8/51460/campos_512_v4
+8/51471/campos_512_v4
+8/51601/campos_512_v4
+8/51680/campos_512_v4
+8/51898/campos_512_v4
+8/52178/campos_512_v4
+8/52429/campos_512_v4
+8/52670/campos_512_v4
+8/52719/campos_512_v4
+8/52802/campos_512_v4
+8/52850/campos_512_v4
+8/53015/campos_512_v4
+8/53278/campos_512_v4
+8/53754/campos_512_v4
+8/53819/campos_512_v4
+8/53893/campos_512_v4
+8/53992/campos_512_v4
+8/54510/campos_512_v4
+80/410063/campos_512_v4
+80/410372/campos_512_v4
+80/410643/campos_512_v4
+80/410762/campos_512_v4
+80/411145/campos_512_v4
+80/411294/campos_512_v4
+80/411470/campos_512_v4
+80/411894/campos_512_v4
+80/412009/campos_512_v4
+80/412292/campos_512_v4
+80/412612/campos_512_v4
+80/413150/campos_512_v4
+80/413167/campos_512_v4
+80/413200/campos_512_v4
+80/413423/campos_512_v4
+80/413718/campos_512_v4
+80/413850/campos_512_v4
+80/414496/campos_512_v4
+80/414643/campos_512_v4
+81/415014/campos_512_v4
+81/415020/campos_512_v4
+81/415262/campos_512_v4
+81/415581/campos_512_v4
+81/415610/campos_512_v4
+81/415774/campos_512_v4
+81/416424/campos_512_v4
+81/416746/campos_512_v4
+81/417377/campos_512_v4
+81/417504/campos_512_v4
+81/417778/campos_512_v4
+81/417871/campos_512_v4
+81/418071/campos_512_v4
+81/418090/campos_512_v4
+81/418110/campos_512_v4
+81/418166/campos_512_v4
+81/418702/campos_512_v4
+81/419044/campos_512_v4
+81/419409/campos_512_v4
+82/420017/campos_512_v4
+82/420702/campos_512_v4
+82/421280/campos_512_v4
+82/421295/campos_512_v4
+82/421474/campos_512_v4
+82/421836/campos_512_v4
+82/422020/campos_512_v4
+82/422037/campos_512_v4
+82/422635/campos_512_v4
+82/422665/campos_512_v4
+82/423008/campos_512_v4
+82/423026/campos_512_v4
+82/423135/campos_512_v4
+82/423331/campos_512_v4
+82/423598/campos_512_v4
+82/423641/campos_512_v4
+82/423759/campos_512_v4
+82/424245/campos_512_v4
+82/424314/campos_512_v4
+82/424471/campos_512_v4
+82/424587/campos_512_v4
+82/424773/campos_512_v4
+83/425165/campos_512_v4
+83/425641/campos_512_v4
+83/425888/campos_512_v4
+83/426303/campos_512_v4
+83/426616/campos_512_v4
+83/426690/campos_512_v4
+83/426718/campos_512_v4
+83/427463/campos_512_v4
+83/428023/campos_512_v4
+83/428263/campos_512_v4
+83/428301/campos_512_v4
+83/428544/campos_512_v4
+83/428761/campos_512_v4
+83/428878/campos_512_v4
+83/429238/campos_512_v4
+83/429258/campos_512_v4
+84/430092/campos_512_v4
+84/430205/campos_512_v4
+84/430440/campos_512_v4
+84/430982/campos_512_v4
+84/431025/campos_512_v4
+84/431076/campos_512_v4
+84/431111/campos_512_v4
+84/431206/campos_512_v4
+84/431209/campos_512_v4
+84/431298/campos_512_v4
+84/431368/campos_512_v4
+84/431686/campos_512_v4
+84/432142/campos_512_v4
+84/432244/campos_512_v4
+84/432488/campos_512_v4
+84/432690/campos_512_v4
+84/432990/campos_512_v4
+84/433128/campos_512_v4
+84/433151/campos_512_v4
+84/433363/campos_512_v4
+84/433626/campos_512_v4
+84/433959/campos_512_v4
+84/434119/campos_512_v4
+84/434230/campos_512_v4
+84/434310/campos_512_v4
+84/434877/campos_512_v4
+84/434977/campos_512_v4
+85/435215/campos_512_v4
+85/435469/campos_512_v4
+85/435670/campos_512_v4
+85/436028/campos_512_v4
+85/436055/campos_512_v4
+85/436386/campos_512_v4
+85/436472/campos_512_v4
+85/436678/campos_512_v4
+85/437181/campos_512_v4
+85/437267/campos_512_v4
+85/437459/campos_512_v4
+85/437957/campos_512_v4
+85/438501/campos_512_v4
+85/438658/campos_512_v4
+85/438758/campos_512_v4
+85/438980/campos_512_v4
+85/439256/campos_512_v4
+85/439293/campos_512_v4
+85/439340/campos_512_v4
+85/439345/campos_512_v4
+85/439554/campos_512_v4
+85/439765/campos_512_v4
+86/440644/campos_512_v4
+86/441032/campos_512_v4
+86/441262/campos_512_v4
+86/441338/campos_512_v4
+86/442019/campos_512_v4
+86/442023/campos_512_v4
+86/442240/campos_512_v4
+86/442364/campos_512_v4
+86/442419/campos_512_v4
+86/442437/campos_512_v4
+86/443767/campos_512_v4
+86/443839/campos_512_v4
+86/443860/campos_512_v4
+86/443897/campos_512_v4
+86/444605/campos_512_v4
+87/445219/campos_512_v4
+87/445247/campos_512_v4
+87/445283/campos_512_v4
+87/445333/campos_512_v4
+87/445340/campos_512_v4
+87/445558/campos_512_v4
+87/445625/campos_512_v4
+87/446069/campos_512_v4
+87/446489/campos_512_v4
+87/446580/campos_512_v4
+87/446697/campos_512_v4
+87/446817/campos_512_v4
+87/447034/campos_512_v4
+87/447761/campos_512_v4
+87/447824/campos_512_v4
+87/448064/campos_512_v4
+87/448131/campos_512_v4
+87/448192/campos_512_v4
+87/448383/campos_512_v4
+87/448408/campos_512_v4
+87/448781/campos_512_v4
+87/448923/campos_512_v4
+87/448938/campos_512_v4
+87/448972/campos_512_v4
+87/449142/campos_512_v4
+87/449564/campos_512_v4
+87/449596/campos_512_v4
+87/449676/campos_512_v4
+88/450221/campos_512_v4
+88/450327/campos_512_v4
+88/450709/campos_512_v4
+88/451143/campos_512_v4
+88/451379/campos_512_v4
+88/451454/campos_512_v4
+88/451604/campos_512_v4
+88/451619/campos_512_v4
+88/452467/campos_512_v4
+88/452717/campos_512_v4
+88/452842/campos_512_v4
+88/452975/campos_512_v4
+88/453232/campos_512_v4
+88/453350/campos_512_v4
+88/453953/campos_512_v4
+88/454040/campos_512_v4
+88/454505/campos_512_v4
+88/454573/campos_512_v4
+88/454775/campos_512_v4
+89/455171/campos_512_v4
+89/455275/campos_512_v4
+89/455317/campos_512_v4
+89/456199/campos_512_v4
+89/456333/campos_512_v4
+89/456375/campos_512_v4
+89/456476/campos_512_v4
+89/456546/campos_512_v4
+89/456713/campos_512_v4
+89/457076/campos_512_v4
+89/457126/campos_512_v4
+89/457567/campos_512_v4
+89/457614/campos_512_v4
+89/457823/campos_512_v4
+89/458338/campos_512_v4
+89/458351/campos_512_v4
+89/458440/campos_512_v4
+89/458670/campos_512_v4
+89/459147/campos_512_v4
+89/459334/campos_512_v4
+89/459474/campos_512_v4
+89/459548/campos_512_v4
+89/459586/campos_512_v4
+89/459838/campos_512_v4
+89/459902/campos_512_v4
+9/55050/campos_512_v4
+9/55856/campos_512_v4
+9/55860/campos_512_v4
+9/56114/campos_512_v4
+9/56160/campos_512_v4
+9/56284/campos_512_v4
+9/57230/campos_512_v4
+9/57323/campos_512_v4
+9/57356/campos_512_v4
+9/58766/campos_512_v4
+9/58796/campos_512_v4
+9/59205/campos_512_v4
+9/59441/campos_512_v4
+9/59877/campos_512_v4
+90/460003/campos_512_v4
+90/460208/campos_512_v4
+90/460470/campos_512_v4
+90/460474/campos_512_v4
+90/460630/campos_512_v4
+90/460997/campos_512_v4
+90/461120/campos_512_v4
+90/461142/campos_512_v4
+90/461380/campos_512_v4
+90/461394/campos_512_v4
+90/461754/campos_512_v4
+90/462221/campos_512_v4
+90/462938/campos_512_v4
+90/463009/campos_512_v4
+90/463349/campos_512_v4
+90/464276/campos_512_v4
+90/464332/campos_512_v4
+90/464336/campos_512_v4
+90/464351/campos_512_v4
+90/464784/campos_512_v4
+90/464804/campos_512_v4
+90/464833/campos_512_v4
+90/464845/campos_512_v4
+90/464885/campos_512_v4
+91/465118/campos_512_v4
+91/465277/campos_512_v4
+91/465498/campos_512_v4
+91/465568/campos_512_v4
+91/465784/campos_512_v4
+91/465911/campos_512_v4
+91/465962/campos_512_v4
+91/466607/campos_512_v4
+91/466617/campos_512_v4
+91/466686/campos_512_v4
+91/466769/campos_512_v4
+91/466932/campos_512_v4
+91/467084/campos_512_v4
+91/467118/campos_512_v4
+91/467327/campos_512_v4
+91/467502/campos_512_v4
+91/468058/campos_512_v4
+91/468071/campos_512_v4
+91/468094/campos_512_v4
+91/468199/campos_512_v4
+91/468247/campos_512_v4
+91/468690/campos_512_v4
+91/468791/campos_512_v4
+91/468820/campos_512_v4
+91/468916/campos_512_v4
+91/469379/campos_512_v4
+91/469415/campos_512_v4
+91/469633/campos_512_v4
+91/469809/campos_512_v4
+91/469917/campos_512_v4
+92/470197/campos_512_v4
+92/470424/campos_512_v4
+92/470650/campos_512_v4
+92/471069/campos_512_v4
+92/471095/campos_512_v4
+92/471801/campos_512_v4
+92/472133/campos_512_v4
+92/472260/campos_512_v4
+92/472312/campos_512_v4
+92/472585/campos_512_v4
+92/472714/campos_512_v4
+92/472731/campos_512_v4
+92/472924/campos_512_v4
+92/473055/campos_512_v4
+92/473287/campos_512_v4
+92/473354/campos_512_v4
+92/473398/campos_512_v4
+92/473753/campos_512_v4
+92/473757/campos_512_v4
+92/473787/campos_512_v4
+92/474030/campos_512_v4
+92/474031/campos_512_v4
+92/474037/campos_512_v4
+92/474220/campos_512_v4
+92/474254/campos_512_v4
+92/474864/campos_512_v4
+93/475169/campos_512_v4
+93/475607/campos_512_v4
+93/475661/campos_512_v4
+93/475718/campos_512_v4
+93/475725/campos_512_v4
+93/476062/campos_512_v4
+93/476148/campos_512_v4
+93/476200/campos_512_v4
+93/477057/campos_512_v4
+93/477344/campos_512_v4
+93/477507/campos_512_v4
+93/477580/campos_512_v4
+93/478333/campos_512_v4
+93/478379/campos_512_v4
+93/478722/campos_512_v4
+93/478740/campos_512_v4
+93/479006/campos_512_v4
+93/479594/campos_512_v4
+94/480044/campos_512_v4
+94/480280/campos_512_v4
+94/480557/campos_512_v4
+94/480661/campos_512_v4
+94/481198/campos_512_v4
+94/481340/campos_512_v4
+94/481795/campos_512_v4
+94/482006/campos_512_v4
+94/482614/campos_512_v4
+94/482675/campos_512_v4
+94/482676/campos_512_v4
+94/482687/campos_512_v4
+94/482913/campos_512_v4
+94/483127/campos_512_v4
+94/483537/campos_512_v4
+94/483614/campos_512_v4
+94/483674/campos_512_v4
+94/483916/campos_512_v4
+95/485485/campos_512_v4
+95/485499/campos_512_v4
+95/485996/campos_512_v4
+95/486275/campos_512_v4
+95/486314/campos_512_v4
+95/486480/campos_512_v4
+95/486521/campos_512_v4
+95/487009/campos_512_v4
+95/487170/campos_512_v4
+95/487327/campos_512_v4
+95/487564/campos_512_v4
+95/487590/campos_512_v4
+95/487808/campos_512_v4
+95/487953/campos_512_v4
+95/488137/campos_512_v4
+95/488430/campos_512_v4
+95/488556/campos_512_v4
+95/488822/campos_512_v4
+95/488853/campos_512_v4
+95/489093/campos_512_v4
+95/489250/campos_512_v4
+95/489392/campos_512_v4
+95/489539/campos_512_v4
+95/489751/campos_512_v4
+95/489932/campos_512_v4
+96/490625/campos_512_v4
+96/490681/campos_512_v4
+96/490745/campos_512_v4
+96/491246/campos_512_v4
+96/491269/campos_512_v4
+96/491331/campos_512_v4
+96/491342/campos_512_v4
+96/491435/campos_512_v4
+96/491825/campos_512_v4
+96/491955/campos_512_v4
+96/492086/campos_512_v4
+96/492574/campos_512_v4
+96/492636/campos_512_v4
+96/494082/campos_512_v4
+96/494193/campos_512_v4
+96/494419/campos_512_v4
+96/494503/campos_512_v4
+96/494989/campos_512_v4
+97/495006/campos_512_v4
+97/495726/campos_512_v4
+97/495766/campos_512_v4
+97/495894/campos_512_v4
+97/496073/campos_512_v4
+97/496286/campos_512_v4
+97/496426/campos_512_v4
+97/496484/campos_512_v4
+97/496499/campos_512_v4
+97/496667/campos_512_v4
+97/496669/campos_512_v4
+97/496693/campos_512_v4
+97/497084/campos_512_v4
+97/497208/campos_512_v4
+97/497318/campos_512_v4
+97/497860/campos_512_v4
+97/498094/campos_512_v4
+97/498118/campos_512_v4
+97/498735/campos_512_v4
+97/498940/campos_512_v4
+97/499611/campos_512_v4
+97/499615/campos_512_v4
+98/500036/campos_512_v4
+98/500411/campos_512_v4
+98/500629/campos_512_v4
+98/500635/campos_512_v4
+98/500773/campos_512_v4
+98/501304/campos_512_v4
+98/501795/campos_512_v4
+98/501822/campos_512_v4
+98/501853/campos_512_v4
+98/501886/campos_512_v4
+98/502163/campos_512_v4
+98/502236/campos_512_v4
+98/502493/campos_512_v4
+98/502610/campos_512_v4
+98/502980/campos_512_v4
+98/503038/campos_512_v4
+98/503241/campos_512_v4
+98/503539/campos_512_v4
+98/503613/campos_512_v4
+98/504102/campos_512_v4
+98/504340/campos_512_v4
+98/504455/campos_512_v4
+99/505217/campos_512_v4
+99/505459/campos_512_v4
+99/505664/campos_512_v4
+99/505831/campos_512_v4
+99/506363/campos_512_v4
+99/506821/campos_512_v4
+99/506996/campos_512_v4
+99/507067/campos_512_v4
+99/507301/campos_512_v4
+99/507454/campos_512_v4
+99/507569/campos_512_v4
+99/507630/campos_512_v4
+99/507723/campos_512_v4
+99/507979/campos_512_v4
+99/508435/campos_512_v4
+99/508607/campos_512_v4
+99/508832/campos_512_v4
+99/508843/campos_512_v4
+99/508849/campos_512_v4
+99/509278/campos_512_v4
+99/509637/campos_512_v4
diff --git a/shell_scripts/raw_img_list/Transportations_tar.txt b/shell_scripts/raw_img_list/Transportations_tar.txt
new file mode 100644
index 0000000000000000000000000000000000000000..cc5c8b484367af63676572fda2990b162ac4fab1
--- /dev/null
+++ b/shell_scripts/raw_img_list/Transportations_tar.txt
@@ -0,0 +1,8827 @@
+0/10298/campos_512_v4
+0/10450/campos_512_v4
+0/10460/campos_512_v4
+0/10533/campos_512_v4
+0/10538/campos_512_v4
+0/10586/campos_512_v4
+0/10590/campos_512_v4
+0/10592/campos_512_v4
+0/10621/campos_512_v4
+0/10645/campos_512_v4
+0/10708/campos_512_v4
+0/10716/campos_512_v4
+0/10764/campos_512_v4
+0/10768/campos_512_v4
+0/10855/campos_512_v4
+0/10864/campos_512_v4
+0/10892/campos_512_v4
+0/10951/campos_512_v4
+0/10959/campos_512_v4
+0/10980/campos_512_v4
+0/10989/campos_512_v4
+0/11023/campos_512_v4
+0/11190/campos_512_v4
+0/11284/campos_512_v4
+0/11292/campos_512_v4
+0/11334/campos_512_v4
+0/11434/campos_512_v4
+0/11488/campos_512_v4
+0/11638/campos_512_v4
+0/11928/campos_512_v4
+0/11960/campos_512_v4
+0/12382/campos_512_v4
+0/13194/campos_512_v4
+0/13234/campos_512_v4
+0/13332/campos_512_v4
+0/13427/campos_512_v4
+0/13768/campos_512_v4
+0/13845/campos_512_v4
+0/13888/campos_512_v4
+0/13890/campos_512_v4
+0/14115/campos_512_v4
+0/14313/campos_512_v4
+0/14451/campos_512_v4
+0/14485/campos_512_v4
+0/14656/campos_512_v4
+0/14678/campos_512_v4
+1/15108/campos_512_v4
+1/15189/campos_512_v4
+1/15236/campos_512_v4
+1/15257/campos_512_v4
+1/15428/campos_512_v4
+1/15570/campos_512_v4
+1/15660/campos_512_v4
+1/15689/campos_512_v4
+1/15720/campos_512_v4
+1/15810/campos_512_v4
+1/15912/campos_512_v4
+1/16043/campos_512_v4
+1/16125/campos_512_v4
+1/16236/campos_512_v4
+1/16268/campos_512_v4
+1/16645/campos_512_v4
+1/16678/campos_512_v4
+1/16748/campos_512_v4
+1/17364/campos_512_v4
+1/17420/campos_512_v4
+1/17510/campos_512_v4
+1/17549/campos_512_v4
+1/17555/campos_512_v4
+1/17566/campos_512_v4
+1/17588/campos_512_v4
+1/17854/campos_512_v4
+1/17920/campos_512_v4
+1/18039/campos_512_v4
+1/18044/campos_512_v4
+1/18117/campos_512_v4
+1/18419/campos_512_v4
+1/18608/campos_512_v4
+1/18830/campos_512_v4
+1/18889/campos_512_v4
+1/18977/campos_512_v4
+1/19029/campos_512_v4
+1/19069/campos_512_v4
+1/19139/campos_512_v4
+1/19261/campos_512_v4
+1/19452/campos_512_v4
+1/19864/campos_512_v4
+10/60059/campos_512_v4
+10/60193/campos_512_v4
+10/60260/campos_512_v4
+10/60842/campos_512_v4
+10/60982/campos_512_v4
+10/61083/campos_512_v4
+10/61127/campos_512_v4
+10/61229/campos_512_v4
+10/61323/campos_512_v4
+10/61597/campos_512_v4
+10/61750/campos_512_v4
+10/61925/campos_512_v4
+10/62052/campos_512_v4
+10/62301/campos_512_v4
+10/62330/campos_512_v4
+10/62333/campos_512_v4
+10/62427/campos_512_v4
+10/62506/campos_512_v4
+10/62513/campos_512_v4
+10/62531/campos_512_v4
+10/62594/campos_512_v4
+10/62617/campos_512_v4
+10/62622/campos_512_v4
+10/63074/campos_512_v4
+10/63177/campos_512_v4
+10/63283/campos_512_v4
+10/63373/campos_512_v4
+10/63828/campos_512_v4
+10/63861/campos_512_v4
+10/64227/campos_512_v4
+10/64330/campos_512_v4
+10/64506/campos_512_v4
+10/64530/campos_512_v4
+10/64546/campos_512_v4
+10/64579/campos_512_v4
+10/64601/campos_512_v4
+10/64703/campos_512_v4
+10/64750/campos_512_v4
+10/64888/campos_512_v4
+100/510149/campos_512_v4
+100/510167/campos_512_v4
+100/510334/campos_512_v4
+100/510360/campos_512_v4
+100/510429/campos_512_v4
+100/510512/campos_512_v4
+100/510603/campos_512_v4
+100/510756/campos_512_v4
+100/510845/campos_512_v4
+100/510967/campos_512_v4
+100/511036/campos_512_v4
+100/511126/campos_512_v4
+100/511170/campos_512_v4
+100/511174/campos_512_v4
+100/511245/campos_512_v4
+100/511371/campos_512_v4
+100/511408/campos_512_v4
+100/511527/campos_512_v4
+100/511537/campos_512_v4
+100/511560/campos_512_v4
+100/511860/campos_512_v4
+100/511934/campos_512_v4
+100/511935/campos_512_v4
+100/512009/campos_512_v4
+100/512116/campos_512_v4
+100/512152/campos_512_v4
+100/512321/campos_512_v4
+100/512369/campos_512_v4
+100/512388/campos_512_v4
+100/512501/campos_512_v4
+100/512546/campos_512_v4
+100/512827/campos_512_v4
+100/512851/campos_512_v4
+100/513104/campos_512_v4
+100/513170/campos_512_v4
+100/513185/campos_512_v4
+100/513191/campos_512_v4
+100/513259/campos_512_v4
+100/513279/campos_512_v4
+100/513730/campos_512_v4
+100/513911/campos_512_v4
+100/513949/campos_512_v4
+100/514041/campos_512_v4
+100/514348/campos_512_v4
+100/514472/campos_512_v4
+100/514635/campos_512_v4
+101/515039/campos_512_v4
+101/515119/campos_512_v4
+101/515184/campos_512_v4
+101/515462/campos_512_v4
+101/515466/campos_512_v4
+101/515866/campos_512_v4
+101/515910/campos_512_v4
+101/516053/campos_512_v4
+101/516243/campos_512_v4
+101/516248/campos_512_v4
+101/516305/campos_512_v4
+101/516318/campos_512_v4
+101/516334/campos_512_v4
+101/516369/campos_512_v4
+101/516550/campos_512_v4
+101/516770/campos_512_v4
+101/516861/campos_512_v4
+101/516898/campos_512_v4
+101/516968/campos_512_v4
+101/517077/campos_512_v4
+101/517116/campos_512_v4
+101/517148/campos_512_v4
+101/517312/campos_512_v4
+101/517313/campos_512_v4
+101/517372/campos_512_v4
+101/517544/campos_512_v4
+101/517680/campos_512_v4
+101/517760/campos_512_v4
+101/517896/campos_512_v4
+101/517900/campos_512_v4
+101/518170/campos_512_v4
+101/518221/campos_512_v4
+101/518237/campos_512_v4
+101/518257/campos_512_v4
+101/518336/campos_512_v4
+101/518345/campos_512_v4
+101/518369/campos_512_v4
+101/518454/campos_512_v4
+101/518576/campos_512_v4
+101/518691/campos_512_v4
+101/518707/campos_512_v4
+101/518739/campos_512_v4
+101/518886/campos_512_v4
+101/519126/campos_512_v4
+101/519131/campos_512_v4
+101/519215/campos_512_v4
+101/519262/campos_512_v4
+101/519335/campos_512_v4
+101/519368/campos_512_v4
+101/519482/campos_512_v4
+101/519530/campos_512_v4
+101/519856/campos_512_v4
+101/519936/campos_512_v4
+101/519942/campos_512_v4
+101/519978/campos_512_v4
+102/520103/campos_512_v4
+102/520151/campos_512_v4
+102/520234/campos_512_v4
+102/520545/campos_512_v4
+102/520630/campos_512_v4
+102/520652/campos_512_v4
+102/520895/campos_512_v4
+102/520903/campos_512_v4
+102/520937/campos_512_v4
+102/520958/campos_512_v4
+102/521035/campos_512_v4
+102/521048/campos_512_v4
+102/521097/campos_512_v4
+102/521243/campos_512_v4
+102/521263/campos_512_v4
+102/521512/campos_512_v4
+102/521539/campos_512_v4
+102/521638/campos_512_v4
+102/521777/campos_512_v4
+102/521810/campos_512_v4
+102/521923/campos_512_v4
+102/521996/campos_512_v4
+102/522042/campos_512_v4
+102/522108/campos_512_v4
+102/522339/campos_512_v4
+102/522420/campos_512_v4
+102/522625/campos_512_v4
+102/522653/campos_512_v4
+102/522845/campos_512_v4
+102/522846/campos_512_v4
+102/522856/campos_512_v4
+102/523083/campos_512_v4
+102/523177/campos_512_v4
+102/523225/campos_512_v4
+102/523330/campos_512_v4
+102/523333/campos_512_v4
+102/523420/campos_512_v4
+102/523501/campos_512_v4
+102/523571/campos_512_v4
+102/523574/campos_512_v4
+102/523598/campos_512_v4
+102/523632/campos_512_v4
+102/523726/campos_512_v4
+102/523798/campos_512_v4
+102/523811/campos_512_v4
+102/524105/campos_512_v4
+102/524141/campos_512_v4
+102/524158/campos_512_v4
+102/524162/campos_512_v4
+102/524169/campos_512_v4
+102/524196/campos_512_v4
+102/524204/campos_512_v4
+102/524228/campos_512_v4
+102/524266/campos_512_v4
+102/524316/campos_512_v4
+102/524327/campos_512_v4
+102/524565/campos_512_v4
+102/524836/campos_512_v4
+102/524891/campos_512_v4
+103/525086/campos_512_v4
+103/525111/campos_512_v4
+103/525185/campos_512_v4
+103/525193/campos_512_v4
+103/525232/campos_512_v4
+103/525293/campos_512_v4
+103/525409/campos_512_v4
+103/525499/campos_512_v4
+103/525500/campos_512_v4
+103/525701/campos_512_v4
+103/525815/campos_512_v4
+103/525823/campos_512_v4
+103/525876/campos_512_v4
+103/525904/campos_512_v4
+103/525909/campos_512_v4
+103/526058/campos_512_v4
+103/526106/campos_512_v4
+103/526117/campos_512_v4
+103/526151/campos_512_v4
+103/526201/campos_512_v4
+103/526258/campos_512_v4
+103/526262/campos_512_v4
+103/526264/campos_512_v4
+103/526298/campos_512_v4
+103/526343/campos_512_v4
+103/526513/campos_512_v4
+103/526884/campos_512_v4
+103/526974/campos_512_v4
+103/527031/campos_512_v4
+103/527134/campos_512_v4
+103/527163/campos_512_v4
+103/527198/campos_512_v4
+103/527207/campos_512_v4
+103/527285/campos_512_v4
+103/527400/campos_512_v4
+103/527471/campos_512_v4
+103/527501/campos_512_v4
+103/527559/campos_512_v4
+103/527574/campos_512_v4
+103/527613/campos_512_v4
+103/527691/campos_512_v4
+103/527721/campos_512_v4
+103/527928/campos_512_v4
+103/527945/campos_512_v4
+103/528075/campos_512_v4
+103/528144/campos_512_v4
+103/528234/campos_512_v4
+103/528407/campos_512_v4
+103/528441/campos_512_v4
+103/528607/campos_512_v4
+103/528662/campos_512_v4
+103/528712/campos_512_v4
+103/528809/campos_512_v4
+103/528964/campos_512_v4
+103/528985/campos_512_v4
+103/529015/campos_512_v4
+103/529188/campos_512_v4
+103/529234/campos_512_v4
+103/529296/campos_512_v4
+103/529342/campos_512_v4
+103/529439/campos_512_v4
+103/529673/campos_512_v4
+103/529875/campos_512_v4
+103/529929/campos_512_v4
+103/529934/campos_512_v4
+104/530026/campos_512_v4
+104/530072/campos_512_v4
+104/530209/campos_512_v4
+104/530270/campos_512_v4
+104/530475/campos_512_v4
+104/530501/campos_512_v4
+104/530528/campos_512_v4
+104/530568/campos_512_v4
+104/530607/campos_512_v4
+104/530714/campos_512_v4
+104/530736/campos_512_v4
+104/530776/campos_512_v4
+104/530798/campos_512_v4
+104/530884/campos_512_v4
+104/531010/campos_512_v4
+104/531117/campos_512_v4
+104/531190/campos_512_v4
+104/531191/campos_512_v4
+104/531218/campos_512_v4
+104/531224/campos_512_v4
+104/531297/campos_512_v4
+104/531326/campos_512_v4
+104/531372/campos_512_v4
+104/531467/campos_512_v4
+104/531520/campos_512_v4
+104/531549/campos_512_v4
+104/531555/campos_512_v4
+104/531788/campos_512_v4
+104/531813/campos_512_v4
+104/532013/campos_512_v4
+104/532061/campos_512_v4
+104/532153/campos_512_v4
+104/532261/campos_512_v4
+104/532264/campos_512_v4
+104/532400/campos_512_v4
+104/532444/campos_512_v4
+104/532529/campos_512_v4
+104/532600/campos_512_v4
+104/532618/campos_512_v4
+104/532667/campos_512_v4
+104/532772/campos_512_v4
+104/532783/campos_512_v4
+104/532817/campos_512_v4
+104/532858/campos_512_v4
+104/532911/campos_512_v4
+104/533125/campos_512_v4
+104/533127/campos_512_v4
+104/533211/campos_512_v4
+104/533212/campos_512_v4
+104/533361/campos_512_v4
+104/533373/campos_512_v4
+104/533392/campos_512_v4
+104/533403/campos_512_v4
+104/533508/campos_512_v4
+104/533527/campos_512_v4
+104/533582/campos_512_v4
+104/533915/campos_512_v4
+104/533969/campos_512_v4
+104/534003/campos_512_v4
+104/534008/campos_512_v4
+104/534055/campos_512_v4
+104/534065/campos_512_v4
+104/534111/campos_512_v4
+104/534143/campos_512_v4
+104/534211/campos_512_v4
+104/534246/campos_512_v4
+104/534251/campos_512_v4
+104/534255/campos_512_v4
+104/534265/campos_512_v4
+104/534270/campos_512_v4
+104/534349/campos_512_v4
+104/534509/campos_512_v4
+104/534673/campos_512_v4
+104/534735/campos_512_v4
+104/534837/campos_512_v4
+104/534851/campos_512_v4
+104/534893/campos_512_v4
+104/534921/campos_512_v4
+104/534940/campos_512_v4
+104/534959/campos_512_v4
+105/535030/campos_512_v4
+105/535135/campos_512_v4
+105/535219/campos_512_v4
+105/535417/campos_512_v4
+105/535452/campos_512_v4
+105/535488/campos_512_v4
+105/535530/campos_512_v4
+105/535688/campos_512_v4
+105/535715/campos_512_v4
+105/535729/campos_512_v4
+105/536002/campos_512_v4
+105/536257/campos_512_v4
+105/536497/campos_512_v4
+105/536793/campos_512_v4
+105/536825/campos_512_v4
+105/536859/campos_512_v4
+105/536938/campos_512_v4
+105/536981/campos_512_v4
+105/536989/campos_512_v4
+105/537002/campos_512_v4
+105/537056/campos_512_v4
+105/537326/campos_512_v4
+105/537332/campos_512_v4
+105/537423/campos_512_v4
+105/537430/campos_512_v4
+105/537527/campos_512_v4
+105/537618/campos_512_v4
+105/537625/campos_512_v4
+105/537632/campos_512_v4
+105/537675/campos_512_v4
+105/537901/campos_512_v4
+105/538052/campos_512_v4
+105/538091/campos_512_v4
+105/538164/campos_512_v4
+105/538272/campos_512_v4
+105/538463/campos_512_v4
+105/538488/campos_512_v4
+105/538625/campos_512_v4
+105/538667/campos_512_v4
+105/538757/campos_512_v4
+105/538788/campos_512_v4
+105/538958/campos_512_v4
+105/538963/campos_512_v4
+105/539000/campos_512_v4
+105/539015/campos_512_v4
+105/539093/campos_512_v4
+105/539180/campos_512_v4
+105/539198/campos_512_v4
+105/539225/campos_512_v4
+105/539319/campos_512_v4
+105/539421/campos_512_v4
+105/539518/campos_512_v4
+105/539602/campos_512_v4
+105/539625/campos_512_v4
+105/539717/campos_512_v4
+105/539800/campos_512_v4
+106/540084/campos_512_v4
+106/540212/campos_512_v4
+106/540292/campos_512_v4
+106/540351/campos_512_v4
+106/540413/campos_512_v4
+106/540607/campos_512_v4
+106/540618/campos_512_v4
+106/540664/campos_512_v4
+106/540821/campos_512_v4
+106/540847/campos_512_v4
+106/540935/campos_512_v4
+106/540959/campos_512_v4
+106/541013/campos_512_v4
+106/541047/campos_512_v4
+106/541079/campos_512_v4
+106/541135/campos_512_v4
+106/541209/campos_512_v4
+106/541275/campos_512_v4
+106/541332/campos_512_v4
+106/541593/campos_512_v4
+106/541604/campos_512_v4
+106/541808/campos_512_v4
+106/541857/campos_512_v4
+106/541893/campos_512_v4
+106/541924/campos_512_v4
+106/542036/campos_512_v4
+106/542188/campos_512_v4
+106/542199/campos_512_v4
+106/542443/campos_512_v4
+106/542544/campos_512_v4
+106/542572/campos_512_v4
+106/542757/campos_512_v4
+106/542778/campos_512_v4
+106/542831/campos_512_v4
+106/542869/campos_512_v4
+106/542880/campos_512_v4
+106/543203/campos_512_v4
+106/543308/campos_512_v4
+106/543342/campos_512_v4
+106/543348/campos_512_v4
+106/543401/campos_512_v4
+106/543487/campos_512_v4
+106/543490/campos_512_v4
+106/543606/campos_512_v4
+106/543785/campos_512_v4
+106/543985/campos_512_v4
+106/544038/campos_512_v4
+106/544096/campos_512_v4
+106/544123/campos_512_v4
+106/544226/campos_512_v4
+106/544232/campos_512_v4
+106/544316/campos_512_v4
+106/544362/campos_512_v4
+106/544393/campos_512_v4
+106/544530/campos_512_v4
+106/544703/campos_512_v4
+106/544748/campos_512_v4
+106/544829/campos_512_v4
+106/544872/campos_512_v4
+107/545039/campos_512_v4
+107/545198/campos_512_v4
+107/545207/campos_512_v4
+107/545332/campos_512_v4
+107/545397/campos_512_v4
+107/545447/campos_512_v4
+107/545646/campos_512_v4
+107/545713/campos_512_v4
+107/545824/campos_512_v4
+107/545830/campos_512_v4
+107/545894/campos_512_v4
+107/545961/campos_512_v4
+107/546110/campos_512_v4
+107/546246/campos_512_v4
+107/546250/campos_512_v4
+107/546256/campos_512_v4
+107/546392/campos_512_v4
+107/546422/campos_512_v4
+107/546535/campos_512_v4
+107/546557/campos_512_v4
+107/546650/campos_512_v4
+107/546656/campos_512_v4
+107/546770/campos_512_v4
+107/546850/campos_512_v4
+107/546905/campos_512_v4
+107/546939/campos_512_v4
+107/546953/campos_512_v4
+107/546979/campos_512_v4
+107/547008/campos_512_v4
+107/547019/campos_512_v4
+107/547039/campos_512_v4
+107/547070/campos_512_v4
+107/547126/campos_512_v4
+107/547130/campos_512_v4
+107/547375/campos_512_v4
+107/547421/campos_512_v4
+107/547426/campos_512_v4
+107/547500/campos_512_v4
+107/547505/campos_512_v4
+107/547589/campos_512_v4
+107/547777/campos_512_v4
+107/547867/campos_512_v4
+107/547933/campos_512_v4
+107/547936/campos_512_v4
+107/548037/campos_512_v4
+107/548247/campos_512_v4
+107/548258/campos_512_v4
+107/548362/campos_512_v4
+107/548422/campos_512_v4
+107/548437/campos_512_v4
+107/548618/campos_512_v4
+107/548816/campos_512_v4
+107/548897/campos_512_v4
+107/549093/campos_512_v4
+107/549095/campos_512_v4
+107/549282/campos_512_v4
+107/549286/campos_512_v4
+107/549359/campos_512_v4
+107/549385/campos_512_v4
+107/549592/campos_512_v4
+107/549622/campos_512_v4
+107/549670/campos_512_v4
+107/549728/campos_512_v4
+107/549765/campos_512_v4
+107/549790/campos_512_v4
+107/549837/campos_512_v4
+108/550172/campos_512_v4
+108/550175/campos_512_v4
+108/550359/campos_512_v4
+108/550377/campos_512_v4
+108/550501/campos_512_v4
+108/550758/campos_512_v4
+108/550806/campos_512_v4
+108/550837/campos_512_v4
+108/551160/campos_512_v4
+108/551165/campos_512_v4
+108/551259/campos_512_v4
+108/551355/campos_512_v4
+108/551563/campos_512_v4
+108/551578/campos_512_v4
+108/551581/campos_512_v4
+108/551701/campos_512_v4
+108/551755/campos_512_v4
+108/551898/campos_512_v4
+108/551982/campos_512_v4
+108/552008/campos_512_v4
+108/552015/campos_512_v4
+108/552155/campos_512_v4
+108/552213/campos_512_v4
+108/552217/campos_512_v4
+108/552306/campos_512_v4
+108/552316/campos_512_v4
+108/552376/campos_512_v4
+108/552380/campos_512_v4
+108/552446/campos_512_v4
+108/552455/campos_512_v4
+108/552457/campos_512_v4
+108/552543/campos_512_v4
+108/552589/campos_512_v4
+108/552618/campos_512_v4
+108/552985/campos_512_v4
+108/553007/campos_512_v4
+108/553108/campos_512_v4
+108/553127/campos_512_v4
+108/553222/campos_512_v4
+108/553326/campos_512_v4
+108/553327/campos_512_v4
+108/553343/campos_512_v4
+108/553346/campos_512_v4
+108/553360/campos_512_v4
+108/553433/campos_512_v4
+108/553467/campos_512_v4
+108/553624/campos_512_v4
+108/553775/campos_512_v4
+108/553792/campos_512_v4
+108/553851/campos_512_v4
+108/554142/campos_512_v4
+108/554334/campos_512_v4
+108/554555/campos_512_v4
+108/554575/campos_512_v4
+108/554695/campos_512_v4
+108/554837/campos_512_v4
+108/554903/campos_512_v4
+108/555000/campos_512_v4
+109/555018/campos_512_v4
+109/555089/campos_512_v4
+109/555108/campos_512_v4
+109/555229/campos_512_v4
+109/555259/campos_512_v4
+109/555265/campos_512_v4
+109/555301/campos_512_v4
+109/555327/campos_512_v4
+109/555474/campos_512_v4
+109/555512/campos_512_v4
+109/555609/campos_512_v4
+109/555723/campos_512_v4
+109/555827/campos_512_v4
+109/555941/campos_512_v4
+109/556075/campos_512_v4
+109/556149/campos_512_v4
+109/556153/campos_512_v4
+109/556199/campos_512_v4
+109/556267/campos_512_v4
+109/556306/campos_512_v4
+109/556452/campos_512_v4
+109/556464/campos_512_v4
+109/556573/campos_512_v4
+109/556575/campos_512_v4
+109/556617/campos_512_v4
+109/556908/campos_512_v4
+109/556920/campos_512_v4
+109/557158/campos_512_v4
+109/557354/campos_512_v4
+109/557379/campos_512_v4
+109/557382/campos_512_v4
+109/557585/campos_512_v4
+109/557630/campos_512_v4
+109/557857/campos_512_v4
+109/557939/campos_512_v4
+109/558084/campos_512_v4
+109/558102/campos_512_v4
+109/558203/campos_512_v4
+109/558250/campos_512_v4
+109/558423/campos_512_v4
+109/558457/campos_512_v4
+109/558467/campos_512_v4
+109/558680/campos_512_v4
+109/558695/campos_512_v4
+109/558794/campos_512_v4
+109/558930/campos_512_v4
+109/558970/campos_512_v4
+109/559014/campos_512_v4
+109/559112/campos_512_v4
+109/559174/campos_512_v4
+109/559257/campos_512_v4
+109/559365/campos_512_v4
+109/559393/campos_512_v4
+109/559405/campos_512_v4
+109/559527/campos_512_v4
+109/559941/campos_512_v4
+11/65082/campos_512_v4
+11/65202/campos_512_v4
+11/65262/campos_512_v4
+11/65347/campos_512_v4
+11/65521/campos_512_v4
+11/65661/campos_512_v4
+11/65712/campos_512_v4
+11/65869/campos_512_v4
+11/66264/campos_512_v4
+11/66295/campos_512_v4
+11/66322/campos_512_v4
+11/66428/campos_512_v4
+11/66577/campos_512_v4
+11/66621/campos_512_v4
+11/66846/campos_512_v4
+11/66976/campos_512_v4
+11/67100/campos_512_v4
+11/67215/campos_512_v4
+11/67385/campos_512_v4
+11/67403/campos_512_v4
+11/67422/campos_512_v4
+11/67552/campos_512_v4
+11/67681/campos_512_v4
+11/67697/campos_512_v4
+11/67721/campos_512_v4
+11/67728/campos_512_v4
+11/67747/campos_512_v4
+11/67902/campos_512_v4
+11/67921/campos_512_v4
+11/68249/campos_512_v4
+11/68418/campos_512_v4
+11/68587/campos_512_v4
+11/68930/campos_512_v4
+11/68995/campos_512_v4
+11/69460/campos_512_v4
+11/69714/campos_512_v4
+11/69810/campos_512_v4
+11/69812/campos_512_v4
+11/69863/campos_512_v4
+11/69952/campos_512_v4
+11/70001/campos_512_v4
+110/560371/campos_512_v4
+110/560432/campos_512_v4
+110/560460/campos_512_v4
+110/560647/campos_512_v4
+110/560660/campos_512_v4
+110/560667/campos_512_v4
+110/560762/campos_512_v4
+110/560765/campos_512_v4
+110/560776/campos_512_v4
+110/560858/campos_512_v4
+110/560870/campos_512_v4
+110/560954/campos_512_v4
+110/560966/campos_512_v4
+110/561122/campos_512_v4
+110/561187/campos_512_v4
+110/561255/campos_512_v4
+110/561306/campos_512_v4
+110/561320/campos_512_v4
+110/561367/campos_512_v4
+110/561375/campos_512_v4
+110/561425/campos_512_v4
+110/561459/campos_512_v4
+110/561528/campos_512_v4
+110/561557/campos_512_v4
+110/561592/campos_512_v4
+110/561694/campos_512_v4
+110/561727/campos_512_v4
+110/561732/campos_512_v4
+110/561856/campos_512_v4
+110/561862/campos_512_v4
+110/562033/campos_512_v4
+110/562168/campos_512_v4
+110/562208/campos_512_v4
+110/562212/campos_512_v4
+110/562320/campos_512_v4
+110/562360/campos_512_v4
+110/562381/campos_512_v4
+110/562433/campos_512_v4
+110/562538/campos_512_v4
+110/562609/campos_512_v4
+110/562858/campos_512_v4
+110/562956/campos_512_v4
+110/563009/campos_512_v4
+110/563311/campos_512_v4
+110/563354/campos_512_v4
+110/563368/campos_512_v4
+110/563396/campos_512_v4
+110/563458/campos_512_v4
+110/563774/campos_512_v4
+110/563831/campos_512_v4
+110/563891/campos_512_v4
+110/563904/campos_512_v4
+110/564118/campos_512_v4
+110/564229/campos_512_v4
+110/564256/campos_512_v4
+110/564277/campos_512_v4
+110/564280/campos_512_v4
+110/564314/campos_512_v4
+110/564355/campos_512_v4
+110/564374/campos_512_v4
+110/564451/campos_512_v4
+110/564460/campos_512_v4
+110/564496/campos_512_v4
+110/564502/campos_512_v4
+110/564560/campos_512_v4
+110/564662/campos_512_v4
+110/564746/campos_512_v4
+110/564764/campos_512_v4
+110/564792/campos_512_v4
+110/564804/campos_512_v4
+110/564850/campos_512_v4
+110/564913/campos_512_v4
+110/564915/campos_512_v4
+111/565020/campos_512_v4
+111/565041/campos_512_v4
+111/565179/campos_512_v4
+111/565248/campos_512_v4
+111/565729/campos_512_v4
+111/565837/campos_512_v4
+111/565843/campos_512_v4
+111/565846/campos_512_v4
+111/565876/campos_512_v4
+111/565924/campos_512_v4
+111/566214/campos_512_v4
+111/566253/campos_512_v4
+111/566320/campos_512_v4
+111/566435/campos_512_v4
+111/566542/campos_512_v4
+111/566585/campos_512_v4
+111/566613/campos_512_v4
+111/566692/campos_512_v4
+111/566820/campos_512_v4
+111/566976/campos_512_v4
+111/567024/campos_512_v4
+111/567196/campos_512_v4
+111/567204/campos_512_v4
+111/567228/campos_512_v4
+111/567277/campos_512_v4
+111/567288/campos_512_v4
+111/567332/campos_512_v4
+111/567359/campos_512_v4
+111/567664/campos_512_v4
+111/567691/campos_512_v4
+111/567821/campos_512_v4
+111/567876/campos_512_v4
+111/567947/campos_512_v4
+111/567955/campos_512_v4
+111/567970/campos_512_v4
+111/567971/campos_512_v4
+111/568062/campos_512_v4
+111/568094/campos_512_v4
+111/568122/campos_512_v4
+111/568178/campos_512_v4
+111/568213/campos_512_v4
+111/568220/campos_512_v4
+111/568409/campos_512_v4
+111/568434/campos_512_v4
+111/568514/campos_512_v4
+111/568633/campos_512_v4
+111/568662/campos_512_v4
+111/568700/campos_512_v4
+111/568840/campos_512_v4
+111/569035/campos_512_v4
+111/569091/campos_512_v4
+111/569103/campos_512_v4
+111/569261/campos_512_v4
+111/569298/campos_512_v4
+111/569426/campos_512_v4
+111/569432/campos_512_v4
+111/569461/campos_512_v4
+111/569463/campos_512_v4
+111/569567/campos_512_v4
+111/569597/campos_512_v4
+111/569652/campos_512_v4
+111/569666/campos_512_v4
+111/569798/campos_512_v4
+111/569801/campos_512_v4
+111/569813/campos_512_v4
+111/569979/campos_512_v4
+112/570031/campos_512_v4
+112/570266/campos_512_v4
+112/570289/campos_512_v4
+112/570351/campos_512_v4
+112/570508/campos_512_v4
+112/570606/campos_512_v4
+112/570647/campos_512_v4
+112/570682/campos_512_v4
+112/570704/campos_512_v4
+112/570751/campos_512_v4
+112/570775/campos_512_v4
+112/570851/campos_512_v4
+112/570860/campos_512_v4
+112/571039/campos_512_v4
+112/571116/campos_512_v4
+112/571180/campos_512_v4
+112/571190/campos_512_v4
+112/571269/campos_512_v4
+112/571278/campos_512_v4
+112/571283/campos_512_v4
+112/571437/campos_512_v4
+112/571469/campos_512_v4
+112/571618/campos_512_v4
+112/571655/campos_512_v4
+112/571758/campos_512_v4
+112/571918/campos_512_v4
+112/571967/campos_512_v4
+112/572099/campos_512_v4
+112/572158/campos_512_v4
+112/572323/campos_512_v4
+112/572370/campos_512_v4
+112/572486/campos_512_v4
+112/572836/campos_512_v4
+112/573111/campos_512_v4
+112/573181/campos_512_v4
+112/573182/campos_512_v4
+112/573204/campos_512_v4
+112/573261/campos_512_v4
+112/573287/campos_512_v4
+112/573315/campos_512_v4
+112/573379/campos_512_v4
+112/573652/campos_512_v4
+112/573867/campos_512_v4
+112/573986/campos_512_v4
+112/574047/campos_512_v4
+112/574147/campos_512_v4
+112/574204/campos_512_v4
+112/574270/campos_512_v4
+112/574503/campos_512_v4
+112/574547/campos_512_v4
+112/574594/campos_512_v4
+112/574607/campos_512_v4
+112/574620/campos_512_v4
+112/574621/campos_512_v4
+112/574668/campos_512_v4
+112/574708/campos_512_v4
+112/574809/campos_512_v4
+112/574821/campos_512_v4
+112/574927/campos_512_v4
+112/574954/campos_512_v4
+112/574968/campos_512_v4
+112/574983/campos_512_v4
+113/575022/campos_512_v4
+113/575064/campos_512_v4
+113/575143/campos_512_v4
+113/575261/campos_512_v4
+113/575280/campos_512_v4
+113/575491/campos_512_v4
+113/575519/campos_512_v4
+113/575546/campos_512_v4
+113/575627/campos_512_v4
+113/575714/campos_512_v4
+113/575763/campos_512_v4
+113/575775/campos_512_v4
+113/575846/campos_512_v4
+113/575873/campos_512_v4
+113/575978/campos_512_v4
+113/576047/campos_512_v4
+113/576075/campos_512_v4
+113/576128/campos_512_v4
+113/576232/campos_512_v4
+113/576410/campos_512_v4
+113/576524/campos_512_v4
+113/576543/campos_512_v4
+113/576647/campos_512_v4
+113/576729/campos_512_v4
+113/576732/campos_512_v4
+113/576780/campos_512_v4
+113/576867/campos_512_v4
+113/576958/campos_512_v4
+113/577002/campos_512_v4
+113/577043/campos_512_v4
+113/577118/campos_512_v4
+113/577138/campos_512_v4
+113/577190/campos_512_v4
+113/577239/campos_512_v4
+113/577240/campos_512_v4
+113/577566/campos_512_v4
+113/577610/campos_512_v4
+113/577724/campos_512_v4
+113/577816/campos_512_v4
+113/577829/campos_512_v4
+113/577868/campos_512_v4
+113/577940/campos_512_v4
+113/577987/campos_512_v4
+113/578035/campos_512_v4
+113/578043/campos_512_v4
+113/578055/campos_512_v4
+113/578060/campos_512_v4
+113/578117/campos_512_v4
+113/578143/campos_512_v4
+113/578220/campos_512_v4
+113/578461/campos_512_v4
+113/578588/campos_512_v4
+113/578611/campos_512_v4
+113/578690/campos_512_v4
+113/578711/campos_512_v4
+113/578723/campos_512_v4
+113/578804/campos_512_v4
+113/578859/campos_512_v4
+113/578963/campos_512_v4
+113/578994/campos_512_v4
+113/579000/campos_512_v4
+113/579058/campos_512_v4
+113/579082/campos_512_v4
+113/579092/campos_512_v4
+113/579107/campos_512_v4
+113/579151/campos_512_v4
+113/579195/campos_512_v4
+113/579202/campos_512_v4
+113/579224/campos_512_v4
+113/579291/campos_512_v4
+113/579578/campos_512_v4
+113/579605/campos_512_v4
+113/579739/campos_512_v4
+113/579825/campos_512_v4
+113/579864/campos_512_v4
+113/579890/campos_512_v4
+113/579914/campos_512_v4
+113/579980/campos_512_v4
+113/580001/campos_512_v4
+114/580057/campos_512_v4
+114/580444/campos_512_v4
+114/580529/campos_512_v4
+114/580646/campos_512_v4
+114/580685/campos_512_v4
+114/580731/campos_512_v4
+114/580745/campos_512_v4
+114/580809/campos_512_v4
+114/580857/campos_512_v4
+114/580897/campos_512_v4
+114/581109/campos_512_v4
+114/581126/campos_512_v4
+114/581196/campos_512_v4
+114/581244/campos_512_v4
+114/581302/campos_512_v4
+114/581426/campos_512_v4
+114/581488/campos_512_v4
+114/581694/campos_512_v4
+114/581706/campos_512_v4
+114/581723/campos_512_v4
+114/581926/campos_512_v4
+114/582055/campos_512_v4
+114/582447/campos_512_v4
+114/582527/campos_512_v4
+114/582760/campos_512_v4
+114/582858/campos_512_v4
+114/582950/campos_512_v4
+114/583004/campos_512_v4
+114/583121/campos_512_v4
+114/583277/campos_512_v4
+114/583319/campos_512_v4
+114/583357/campos_512_v4
+114/583429/campos_512_v4
+114/583555/campos_512_v4
+114/583581/campos_512_v4
+114/583837/campos_512_v4
+114/583876/campos_512_v4
+114/583899/campos_512_v4
+114/584008/campos_512_v4
+114/584116/campos_512_v4
+114/584150/campos_512_v4
+114/584183/campos_512_v4
+114/584285/campos_512_v4
+114/584305/campos_512_v4
+114/584352/campos_512_v4
+114/584411/campos_512_v4
+114/584554/campos_512_v4
+114/584579/campos_512_v4
+114/584610/campos_512_v4
+114/584634/campos_512_v4
+114/584700/campos_512_v4
+114/584752/campos_512_v4
+114/584771/campos_512_v4
+114/584805/campos_512_v4
+114/584810/campos_512_v4
+114/584841/campos_512_v4
+114/584880/campos_512_v4
+115/585046/campos_512_v4
+115/585056/campos_512_v4
+115/585126/campos_512_v4
+115/585341/campos_512_v4
+115/585379/campos_512_v4
+115/585414/campos_512_v4
+115/585491/campos_512_v4
+115/585571/campos_512_v4
+115/585628/campos_512_v4
+115/585758/campos_512_v4
+115/585816/campos_512_v4
+115/585835/campos_512_v4
+115/586040/campos_512_v4
+115/586155/campos_512_v4
+115/586252/campos_512_v4
+115/586256/campos_512_v4
+115/586452/campos_512_v4
+115/586522/campos_512_v4
+115/586575/campos_512_v4
+115/586718/campos_512_v4
+115/586841/campos_512_v4
+115/586877/campos_512_v4
+115/586951/campos_512_v4
+115/586976/campos_512_v4
+115/587081/campos_512_v4
+115/587114/campos_512_v4
+115/587206/campos_512_v4
+115/587276/campos_512_v4
+115/587468/campos_512_v4
+115/587605/campos_512_v4
+115/587966/campos_512_v4
+115/587969/campos_512_v4
+115/587978/campos_512_v4
+115/588154/campos_512_v4
+115/588178/campos_512_v4
+115/588190/campos_512_v4
+115/588240/campos_512_v4
+115/588404/campos_512_v4
+115/588483/campos_512_v4
+115/588511/campos_512_v4
+115/588514/campos_512_v4
+115/588743/campos_512_v4
+115/588989/campos_512_v4
+115/589036/campos_512_v4
+115/589169/campos_512_v4
+115/589172/campos_512_v4
+115/589236/campos_512_v4
+115/589289/campos_512_v4
+115/589324/campos_512_v4
+115/589376/campos_512_v4
+115/589399/campos_512_v4
+115/589452/campos_512_v4
+115/589774/campos_512_v4
+115/589876/campos_512_v4
+115/589898/campos_512_v4
+115/589988/campos_512_v4
+116/590035/campos_512_v4
+116/590065/campos_512_v4
+116/590096/campos_512_v4
+116/590110/campos_512_v4
+116/590200/campos_512_v4
+116/590310/campos_512_v4
+116/590393/campos_512_v4
+116/590405/campos_512_v4
+116/590496/campos_512_v4
+116/590499/campos_512_v4
+116/590597/campos_512_v4
+116/590603/campos_512_v4
+116/590839/campos_512_v4
+116/591118/campos_512_v4
+116/591162/campos_512_v4
+116/591226/campos_512_v4
+116/591269/campos_512_v4
+116/591341/campos_512_v4
+116/591388/campos_512_v4
+116/591503/campos_512_v4
+116/591608/campos_512_v4
+116/591626/campos_512_v4
+116/591902/campos_512_v4
+116/591975/campos_512_v4
+116/592185/campos_512_v4
+116/592234/campos_512_v4
+116/592384/campos_512_v4
+116/592511/campos_512_v4
+116/592583/campos_512_v4
+116/592633/campos_512_v4
+116/592687/campos_512_v4
+116/592888/campos_512_v4
+116/592971/campos_512_v4
+116/593064/campos_512_v4
+116/593182/campos_512_v4
+116/593186/campos_512_v4
+116/593287/campos_512_v4
+116/593362/campos_512_v4
+116/593593/campos_512_v4
+116/593617/campos_512_v4
+116/593646/campos_512_v4
+116/593672/campos_512_v4
+116/593824/campos_512_v4
+116/593831/campos_512_v4
+116/593845/campos_512_v4
+116/593848/campos_512_v4
+116/593879/campos_512_v4
+116/593933/campos_512_v4
+116/593949/campos_512_v4
+116/594009/campos_512_v4
+116/594019/campos_512_v4
+116/594026/campos_512_v4
+116/594122/campos_512_v4
+116/594276/campos_512_v4
+116/594465/campos_512_v4
+116/594498/campos_512_v4
+116/594528/campos_512_v4
+116/594531/campos_512_v4
+116/594538/campos_512_v4
+116/594562/campos_512_v4
+116/594570/campos_512_v4
+116/594581/campos_512_v4
+116/594664/campos_512_v4
+116/594725/campos_512_v4
+116/594749/campos_512_v4
+116/594753/campos_512_v4
+116/594773/campos_512_v4
+116/594805/campos_512_v4
+116/594971/campos_512_v4
+117/595148/campos_512_v4
+117/595227/campos_512_v4
+117/595288/campos_512_v4
+117/595296/campos_512_v4
+117/595353/campos_512_v4
+117/595355/campos_512_v4
+117/595357/campos_512_v4
+117/595610/campos_512_v4
+117/595676/campos_512_v4
+117/595772/campos_512_v4
+117/595866/campos_512_v4
+117/595907/campos_512_v4
+117/595931/campos_512_v4
+117/596055/campos_512_v4
+117/596109/campos_512_v4
+117/596156/campos_512_v4
+117/596217/campos_512_v4
+117/596248/campos_512_v4
+117/596329/campos_512_v4
+117/596333/campos_512_v4
+117/596638/campos_512_v4
+117/596782/campos_512_v4
+117/596910/campos_512_v4
+117/596933/campos_512_v4
+117/596962/campos_512_v4
+117/597067/campos_512_v4
+117/597166/campos_512_v4
+117/597323/campos_512_v4
+117/597324/campos_512_v4
+117/597457/campos_512_v4
+117/597536/campos_512_v4
+117/597563/campos_512_v4
+117/597626/campos_512_v4
+117/597656/campos_512_v4
+117/597662/campos_512_v4
+117/597763/campos_512_v4
+117/597770/campos_512_v4
+117/597787/campos_512_v4
+117/598026/campos_512_v4
+117/598035/campos_512_v4
+117/598047/campos_512_v4
+117/598073/campos_512_v4
+117/598078/campos_512_v4
+117/598128/campos_512_v4
+117/598152/campos_512_v4
+117/598220/campos_512_v4
+117/598233/campos_512_v4
+117/598285/campos_512_v4
+117/598419/campos_512_v4
+117/598434/campos_512_v4
+117/598687/campos_512_v4
+117/598693/campos_512_v4
+117/598702/campos_512_v4
+117/598708/campos_512_v4
+117/598838/campos_512_v4
+117/598869/campos_512_v4
+117/598899/campos_512_v4
+117/598930/campos_512_v4
+117/598939/campos_512_v4
+117/598967/campos_512_v4
+117/599007/campos_512_v4
+117/599056/campos_512_v4
+117/599222/campos_512_v4
+117/599256/campos_512_v4
+117/599315/campos_512_v4
+117/599319/campos_512_v4
+117/599376/campos_512_v4
+117/599501/campos_512_v4
+117/599606/campos_512_v4
+117/599612/campos_512_v4
+117/599643/campos_512_v4
+117/599683/campos_512_v4
+117/599915/campos_512_v4
+117/599976/campos_512_v4
+117/599991/campos_512_v4
+118/600183/campos_512_v4
+118/600327/campos_512_v4
+118/600365/campos_512_v4
+118/600421/campos_512_v4
+118/600488/campos_512_v4
+118/600551/campos_512_v4
+118/600755/campos_512_v4
+118/600846/campos_512_v4
+118/600871/campos_512_v4
+118/600890/campos_512_v4
+118/600902/campos_512_v4
+118/600967/campos_512_v4
+118/601136/campos_512_v4
+118/601194/campos_512_v4
+118/601528/campos_512_v4
+118/601560/campos_512_v4
+118/601622/campos_512_v4
+118/601650/campos_512_v4
+118/601695/campos_512_v4
+118/601874/campos_512_v4
+118/601910/campos_512_v4
+118/601969/campos_512_v4
+118/602022/campos_512_v4
+118/602103/campos_512_v4
+118/602151/campos_512_v4
+118/602159/campos_512_v4
+118/602640/campos_512_v4
+118/602722/campos_512_v4
+118/602767/campos_512_v4
+118/602866/campos_512_v4
+118/602943/campos_512_v4
+118/602962/campos_512_v4
+118/603040/campos_512_v4
+118/603072/campos_512_v4
+118/603123/campos_512_v4
+118/603180/campos_512_v4
+118/603280/campos_512_v4
+118/603403/campos_512_v4
+118/603426/campos_512_v4
+118/603439/campos_512_v4
+118/603527/campos_512_v4
+118/603596/campos_512_v4
+118/603643/campos_512_v4
+118/603731/campos_512_v4
+118/603750/campos_512_v4
+118/603870/campos_512_v4
+118/603871/campos_512_v4
+118/603885/campos_512_v4
+118/604010/campos_512_v4
+118/604081/campos_512_v4
+118/604082/campos_512_v4
+118/604092/campos_512_v4
+118/604204/campos_512_v4
+118/604674/campos_512_v4
+118/604800/campos_512_v4
+118/604929/campos_512_v4
+118/605001/campos_512_v4
+119/605171/campos_512_v4
+119/605176/campos_512_v4
+119/605233/campos_512_v4
+119/605551/campos_512_v4
+119/605857/campos_512_v4
+119/605889/campos_512_v4
+119/605890/campos_512_v4
+119/605962/campos_512_v4
+119/606174/campos_512_v4
+119/606367/campos_512_v4
+119/606407/campos_512_v4
+119/606425/campos_512_v4
+119/606578/campos_512_v4
+119/606596/campos_512_v4
+119/606626/campos_512_v4
+119/606653/campos_512_v4
+119/606662/campos_512_v4
+119/606672/campos_512_v4
+119/606683/campos_512_v4
+119/606768/campos_512_v4
+119/606781/campos_512_v4
+119/606790/campos_512_v4
+119/606897/campos_512_v4
+119/607303/campos_512_v4
+119/607400/campos_512_v4
+119/607419/campos_512_v4
+119/607520/campos_512_v4
+119/607551/campos_512_v4
+119/607698/campos_512_v4
+119/607706/campos_512_v4
+119/607766/campos_512_v4
+119/607814/campos_512_v4
+119/607866/campos_512_v4
+119/607920/campos_512_v4
+119/608365/campos_512_v4
+119/608469/campos_512_v4
+119/608546/campos_512_v4
+119/608555/campos_512_v4
+119/608623/campos_512_v4
+119/609035/campos_512_v4
+119/609096/campos_512_v4
+119/609285/campos_512_v4
+119/609359/campos_512_v4
+119/609448/campos_512_v4
+119/609524/campos_512_v4
+119/609572/campos_512_v4
+119/609776/campos_512_v4
+119/609798/campos_512_v4
+119/609991/campos_512_v4
+12/70056/campos_512_v4
+12/70071/campos_512_v4
+12/70187/campos_512_v4
+12/70331/campos_512_v4
+12/70402/campos_512_v4
+12/70678/campos_512_v4
+12/70699/campos_512_v4
+12/70863/campos_512_v4
+12/71041/campos_512_v4
+12/71055/campos_512_v4
+12/71076/campos_512_v4
+12/71248/campos_512_v4
+12/71345/campos_512_v4
+12/71667/campos_512_v4
+12/71748/campos_512_v4
+12/71829/campos_512_v4
+12/71843/campos_512_v4
+12/71852/campos_512_v4
+12/72000/campos_512_v4
+12/72002/campos_512_v4
+12/72146/campos_512_v4
+12/72170/campos_512_v4
+12/72415/campos_512_v4
+12/72458/campos_512_v4
+12/72490/campos_512_v4
+12/72561/campos_512_v4
+12/72592/campos_512_v4
+12/72835/campos_512_v4
+12/72966/campos_512_v4
+12/73022/campos_512_v4
+12/73092/campos_512_v4
+12/73442/campos_512_v4
+12/73548/campos_512_v4
+12/73794/campos_512_v4
+12/73883/campos_512_v4
+12/74003/campos_512_v4
+12/74281/campos_512_v4
+12/74292/campos_512_v4
+12/74311/campos_512_v4
+12/74508/campos_512_v4
+12/74619/campos_512_v4
+12/74756/campos_512_v4
+120/610031/campos_512_v4
+120/610288/campos_512_v4
+120/610342/campos_512_v4
+120/610393/campos_512_v4
+120/610478/campos_512_v4
+120/610575/campos_512_v4
+120/610594/campos_512_v4
+120/610784/campos_512_v4
+120/610922/campos_512_v4
+120/611021/campos_512_v4
+120/611151/campos_512_v4
+120/611186/campos_512_v4
+120/611332/campos_512_v4
+120/611788/campos_512_v4
+120/611846/campos_512_v4
+120/612208/campos_512_v4
+120/612342/campos_512_v4
+120/612642/campos_512_v4
+120/612646/campos_512_v4
+120/612732/campos_512_v4
+120/612739/campos_512_v4
+120/612844/campos_512_v4
+120/612883/campos_512_v4
+120/612888/campos_512_v4
+120/612921/campos_512_v4
+120/613351/campos_512_v4
+120/613359/campos_512_v4
+120/613514/campos_512_v4
+120/613578/campos_512_v4
+120/613684/campos_512_v4
+120/613745/campos_512_v4
+120/613776/campos_512_v4
+120/613850/campos_512_v4
+120/613851/campos_512_v4
+120/613966/campos_512_v4
+120/614115/campos_512_v4
+120/614191/campos_512_v4
+120/614243/campos_512_v4
+120/614332/campos_512_v4
+120/614365/campos_512_v4
+120/614453/campos_512_v4
+120/614479/campos_512_v4
+120/614506/campos_512_v4
+120/614579/campos_512_v4
+120/614756/campos_512_v4
+120/614772/campos_512_v4
+120/614806/campos_512_v4
+120/614808/campos_512_v4
+120/614926/campos_512_v4
+120/614939/campos_512_v4
+121/615014/campos_512_v4
+121/615144/campos_512_v4
+121/615286/campos_512_v4
+121/615318/campos_512_v4
+121/615341/campos_512_v4
+121/615423/campos_512_v4
+121/615530/campos_512_v4
+121/615892/campos_512_v4
+121/616052/campos_512_v4
+121/616081/campos_512_v4
+121/616185/campos_512_v4
+121/616426/campos_512_v4
+121/616634/campos_512_v4
+121/616693/campos_512_v4
+121/616965/campos_512_v4
+121/617032/campos_512_v4
+121/617132/campos_512_v4
+121/617274/campos_512_v4
+121/617350/campos_512_v4
+121/617381/campos_512_v4
+121/617440/campos_512_v4
+121/617494/campos_512_v4
+121/617605/campos_512_v4
+121/617770/campos_512_v4
+121/617810/campos_512_v4
+121/617865/campos_512_v4
+121/617908/campos_512_v4
+121/617950/campos_512_v4
+121/618211/campos_512_v4
+121/618366/campos_512_v4
+121/618558/campos_512_v4
+121/618592/campos_512_v4
+121/618643/campos_512_v4
+121/618647/campos_512_v4
+121/618760/campos_512_v4
+121/618824/campos_512_v4
+121/618853/campos_512_v4
+121/618885/campos_512_v4
+121/618956/campos_512_v4
+121/619038/campos_512_v4
+121/619108/campos_512_v4
+121/619288/campos_512_v4
+121/619420/campos_512_v4
+121/619480/campos_512_v4
+121/619584/campos_512_v4
+121/619664/campos_512_v4
+121/619780/campos_512_v4
+121/619917/campos_512_v4
+121/619935/campos_512_v4
+121/619974/campos_512_v4
+122/620112/campos_512_v4
+122/620119/campos_512_v4
+122/620153/campos_512_v4
+122/620167/campos_512_v4
+122/620208/campos_512_v4
+122/620234/campos_512_v4
+122/620337/campos_512_v4
+122/620342/campos_512_v4
+122/620355/campos_512_v4
+122/620572/campos_512_v4
+122/620584/campos_512_v4
+122/620627/campos_512_v4
+122/620756/campos_512_v4
+122/620761/campos_512_v4
+122/620781/campos_512_v4
+122/620782/campos_512_v4
+122/620816/campos_512_v4
+122/620982/campos_512_v4
+122/621252/campos_512_v4
+122/621266/campos_512_v4
+122/621379/campos_512_v4
+122/621393/campos_512_v4
+122/621461/campos_512_v4
+122/621496/campos_512_v4
+122/621744/campos_512_v4
+122/621781/campos_512_v4
+122/621923/campos_512_v4
+122/621991/campos_512_v4
+122/622045/campos_512_v4
+122/622065/campos_512_v4
+122/622163/campos_512_v4
+122/622166/campos_512_v4
+122/622318/campos_512_v4
+122/622323/campos_512_v4
+122/622371/campos_512_v4
+122/622474/campos_512_v4
+122/622579/campos_512_v4
+122/622605/campos_512_v4
+122/622662/campos_512_v4
+122/622701/campos_512_v4
+122/622703/campos_512_v4
+122/622710/campos_512_v4
+122/622743/campos_512_v4
+122/623022/campos_512_v4
+122/623091/campos_512_v4
+122/623205/campos_512_v4
+122/623481/campos_512_v4
+122/623512/campos_512_v4
+122/623581/campos_512_v4
+122/623639/campos_512_v4
+122/623652/campos_512_v4
+122/623808/campos_512_v4
+122/623829/campos_512_v4
+122/623912/campos_512_v4
+122/624021/campos_512_v4
+122/624123/campos_512_v4
+122/624189/campos_512_v4
+122/624206/campos_512_v4
+122/624343/campos_512_v4
+122/624390/campos_512_v4
+122/624395/campos_512_v4
+122/624489/campos_512_v4
+123/625039/campos_512_v4
+123/625060/campos_512_v4
+123/625209/campos_512_v4
+123/625279/campos_512_v4
+123/625281/campos_512_v4
+123/625411/campos_512_v4
+123/625759/campos_512_v4
+123/625795/campos_512_v4
+123/625833/campos_512_v4
+123/625837/campos_512_v4
+123/625888/campos_512_v4
+123/626050/campos_512_v4
+123/626137/campos_512_v4
+123/626323/campos_512_v4
+123/626398/campos_512_v4
+123/626404/campos_512_v4
+123/626420/campos_512_v4
+123/626422/campos_512_v4
+123/626723/campos_512_v4
+123/626790/campos_512_v4
+123/626844/campos_512_v4
+123/627138/campos_512_v4
+123/627149/campos_512_v4
+123/627167/campos_512_v4
+123/627274/campos_512_v4
+123/627622/campos_512_v4
+123/627641/campos_512_v4
+123/627697/campos_512_v4
+123/627725/campos_512_v4
+123/628121/campos_512_v4
+123/628305/campos_512_v4
+123/628364/campos_512_v4
+123/628411/campos_512_v4
+123/628856/campos_512_v4
+123/628968/campos_512_v4
+123/628973/campos_512_v4
+123/629171/campos_512_v4
+123/629242/campos_512_v4
+123/629344/campos_512_v4
+123/629554/campos_512_v4
+123/629580/campos_512_v4
+123/629584/campos_512_v4
+123/629634/campos_512_v4
+123/629736/campos_512_v4
+123/629795/campos_512_v4
+124/630024/campos_512_v4
+124/630136/campos_512_v4
+124/630139/campos_512_v4
+124/630457/campos_512_v4
+124/630543/campos_512_v4
+124/630598/campos_512_v4
+124/630699/campos_512_v4
+124/630727/campos_512_v4
+124/630730/campos_512_v4
+124/630754/campos_512_v4
+124/630773/campos_512_v4
+124/630904/campos_512_v4
+124/631017/campos_512_v4
+124/631026/campos_512_v4
+124/631048/campos_512_v4
+124/631203/campos_512_v4
+124/631295/campos_512_v4
+124/631323/campos_512_v4
+124/631414/campos_512_v4
+124/631531/campos_512_v4
+124/631600/campos_512_v4
+124/631686/campos_512_v4
+124/631912/campos_512_v4
+124/631926/campos_512_v4
+124/631982/campos_512_v4
+124/631993/campos_512_v4
+124/631996/campos_512_v4
+124/632022/campos_512_v4
+124/632027/campos_512_v4
+124/632126/campos_512_v4
+124/632327/campos_512_v4
+124/632375/campos_512_v4
+124/632377/campos_512_v4
+124/632516/campos_512_v4
+124/632531/campos_512_v4
+124/632653/campos_512_v4
+124/632921/campos_512_v4
+124/632934/campos_512_v4
+124/632988/campos_512_v4
+124/633155/campos_512_v4
+124/633208/campos_512_v4
+124/633352/campos_512_v4
+124/633356/campos_512_v4
+124/633427/campos_512_v4
+124/633658/campos_512_v4
+124/633706/campos_512_v4
+124/633731/campos_512_v4
+124/633940/campos_512_v4
+124/633977/campos_512_v4
+124/634036/campos_512_v4
+124/634254/campos_512_v4
+124/634443/campos_512_v4
+124/634473/campos_512_v4
+124/634478/campos_512_v4
+124/634502/campos_512_v4
+124/634523/campos_512_v4
+124/634652/campos_512_v4
+124/634700/campos_512_v4
+124/634784/campos_512_v4
+125/635101/campos_512_v4
+125/635122/campos_512_v4
+125/635163/campos_512_v4
+125/635286/campos_512_v4
+125/635444/campos_512_v4
+125/635506/campos_512_v4
+125/635564/campos_512_v4
+125/635640/campos_512_v4
+125/635908/campos_512_v4
+125/636034/campos_512_v4
+125/636117/campos_512_v4
+125/636168/campos_512_v4
+125/636208/campos_512_v4
+125/636224/campos_512_v4
+125/636521/campos_512_v4
+125/636546/campos_512_v4
+125/636840/campos_512_v4
+125/636879/campos_512_v4
+125/636887/campos_512_v4
+125/637083/campos_512_v4
+125/637100/campos_512_v4
+125/637187/campos_512_v4
+125/637251/campos_512_v4
+125/637260/campos_512_v4
+125/637269/campos_512_v4
+125/637390/campos_512_v4
+125/637392/campos_512_v4
+125/637974/campos_512_v4
+125/637980/campos_512_v4
+125/637985/campos_512_v4
+125/638101/campos_512_v4
+125/638414/campos_512_v4
+125/638432/campos_512_v4
+125/638437/campos_512_v4
+125/638474/campos_512_v4
+125/638482/campos_512_v4
+125/638543/campos_512_v4
+125/638697/campos_512_v4
+125/638721/campos_512_v4
+125/638848/campos_512_v4
+125/639032/campos_512_v4
+125/639046/campos_512_v4
+125/639351/campos_512_v4
+125/639379/campos_512_v4
+125/639456/campos_512_v4
+125/639496/campos_512_v4
+125/639629/campos_512_v4
+125/639709/campos_512_v4
+125/639979/campos_512_v4
+127/645273/campos_512_v4
+127/645480/campos_512_v4
+127/645502/campos_512_v4
+127/645557/campos_512_v4
+127/645590/campos_512_v4
+127/645932/campos_512_v4
+127/646042/campos_512_v4
+127/646182/campos_512_v4
+127/646187/campos_512_v4
+127/646240/campos_512_v4
+127/646361/campos_512_v4
+127/646367/campos_512_v4
+127/646386/campos_512_v4
+127/646426/campos_512_v4
+127/646481/campos_512_v4
+127/646507/campos_512_v4
+127/646535/campos_512_v4
+127/646557/campos_512_v4
+127/646802/campos_512_v4
+127/646824/campos_512_v4
+127/647042/campos_512_v4
+127/647084/campos_512_v4
+127/647087/campos_512_v4
+127/647137/campos_512_v4
+127/647184/campos_512_v4
+127/647270/campos_512_v4
+127/647280/campos_512_v4
+127/647463/campos_512_v4
+127/647505/campos_512_v4
+127/647704/campos_512_v4
+127/647795/campos_512_v4
+127/648170/campos_512_v4
+127/648205/campos_512_v4
+127/648232/campos_512_v4
+127/648304/campos_512_v4
+127/648473/campos_512_v4
+127/648493/campos_512_v4
+127/648519/campos_512_v4
+127/648575/campos_512_v4
+127/648605/campos_512_v4
+127/648815/campos_512_v4
+127/648873/campos_512_v4
+127/649023/campos_512_v4
+127/649045/campos_512_v4
+127/649072/campos_512_v4
+127/649162/campos_512_v4
+127/649341/campos_512_v4
+127/649520/campos_512_v4
+127/649586/campos_512_v4
+127/649588/campos_512_v4
+127/649612/campos_512_v4
+127/649747/campos_512_v4
+127/649818/campos_512_v4
+127/649906/campos_512_v4
+127/649983/campos_512_v4
+128/650018/campos_512_v4
+128/650117/campos_512_v4
+128/650319/campos_512_v4
+128/650486/campos_512_v4
+128/650502/campos_512_v4
+128/650534/campos_512_v4
+128/650840/campos_512_v4
+128/650899/campos_512_v4
+128/651015/campos_512_v4
+128/651047/campos_512_v4
+128/651273/campos_512_v4
+128/651452/campos_512_v4
+128/651577/campos_512_v4
+128/651747/campos_512_v4
+128/651774/campos_512_v4
+128/651797/campos_512_v4
+128/651812/campos_512_v4
+128/651977/campos_512_v4
+128/652112/campos_512_v4
+128/652156/campos_512_v4
+128/652208/campos_512_v4
+128/652239/campos_512_v4
+128/652243/campos_512_v4
+128/652322/campos_512_v4
+128/652415/campos_512_v4
+128/652416/campos_512_v4
+128/652520/campos_512_v4
+128/652530/campos_512_v4
+128/652581/campos_512_v4
+128/652599/campos_512_v4
+128/652881/campos_512_v4
+128/652882/campos_512_v4
+128/653109/campos_512_v4
+128/653134/campos_512_v4
+128/653178/campos_512_v4
+128/653290/campos_512_v4
+128/653365/campos_512_v4
+128/653435/campos_512_v4
+128/653489/campos_512_v4
+128/653607/campos_512_v4
+128/653674/campos_512_v4
+128/654156/campos_512_v4
+128/654384/campos_512_v4
+128/654410/campos_512_v4
+128/654413/campos_512_v4
+128/654485/campos_512_v4
+128/654758/campos_512_v4
+128/654801/campos_512_v4
+128/654826/campos_512_v4
+128/654912/campos_512_v4
+128/654962/campos_512_v4
+129/655113/campos_512_v4
+129/655295/campos_512_v4
+129/655329/campos_512_v4
+129/655398/campos_512_v4
+129/655452/campos_512_v4
+129/655525/campos_512_v4
+129/655540/campos_512_v4
+129/655576/campos_512_v4
+129/655970/campos_512_v4
+129/655972/campos_512_v4
+129/655988/campos_512_v4
+129/656128/campos_512_v4
+129/656232/campos_512_v4
+129/656244/campos_512_v4
+129/656273/campos_512_v4
+129/656406/campos_512_v4
+129/656623/campos_512_v4
+129/656709/campos_512_v4
+129/656796/campos_512_v4
+129/656834/campos_512_v4
+129/656847/campos_512_v4
+129/656854/campos_512_v4
+129/656884/campos_512_v4
+129/656895/campos_512_v4
+129/657082/campos_512_v4
+129/657191/campos_512_v4
+129/657261/campos_512_v4
+129/657414/campos_512_v4
+129/657445/campos_512_v4
+129/657450/campos_512_v4
+129/657452/campos_512_v4
+129/657491/campos_512_v4
+129/657526/campos_512_v4
+129/657613/campos_512_v4
+129/657710/campos_512_v4
+129/657722/campos_512_v4
+129/657826/campos_512_v4
+129/657835/campos_512_v4
+129/657926/campos_512_v4
+129/657990/campos_512_v4
+129/657991/campos_512_v4
+129/658041/campos_512_v4
+129/658071/campos_512_v4
+129/658206/campos_512_v4
+129/658218/campos_512_v4
+129/658392/campos_512_v4
+129/658490/campos_512_v4
+129/658491/campos_512_v4
+129/658535/campos_512_v4
+129/658592/campos_512_v4
+129/658658/campos_512_v4
+129/658703/campos_512_v4
+129/659045/campos_512_v4
+129/659067/campos_512_v4
+129/659137/campos_512_v4
+129/659235/campos_512_v4
+129/659272/campos_512_v4
+129/659284/campos_512_v4
+129/659344/campos_512_v4
+129/659464/campos_512_v4
+129/659590/campos_512_v4
+129/659659/campos_512_v4
+129/659725/campos_512_v4
+13/75293/campos_512_v4
+13/75338/campos_512_v4
+13/75417/campos_512_v4
+13/75527/campos_512_v4
+13/75547/campos_512_v4
+13/75550/campos_512_v4
+13/75614/campos_512_v4
+13/75654/campos_512_v4
+13/75672/campos_512_v4
+13/75755/campos_512_v4
+13/75833/campos_512_v4
+13/75979/campos_512_v4
+13/76131/campos_512_v4
+13/76140/campos_512_v4
+13/76232/campos_512_v4
+13/76249/campos_512_v4
+13/76275/campos_512_v4
+13/76398/campos_512_v4
+13/76408/campos_512_v4
+13/76494/campos_512_v4
+13/76534/campos_512_v4
+13/76571/campos_512_v4
+13/76720/campos_512_v4
+13/76798/campos_512_v4
+13/76939/campos_512_v4
+13/76943/campos_512_v4
+13/76967/campos_512_v4
+13/76982/campos_512_v4
+13/76996/campos_512_v4
+13/77017/campos_512_v4
+13/77235/campos_512_v4
+13/77351/campos_512_v4
+13/77402/campos_512_v4
+13/77618/campos_512_v4
+13/77655/campos_512_v4
+13/77662/campos_512_v4
+13/77769/campos_512_v4
+13/77821/campos_512_v4
+13/77905/campos_512_v4
+13/77955/campos_512_v4
+13/77986/campos_512_v4
+13/78175/campos_512_v4
+13/78182/campos_512_v4
+13/78294/campos_512_v4
+13/78436/campos_512_v4
+13/78802/campos_512_v4
+13/78827/campos_512_v4
+13/78874/campos_512_v4
+13/78881/campos_512_v4
+13/78904/campos_512_v4
+13/79074/campos_512_v4
+13/79220/campos_512_v4
+13/79701/campos_512_v4
+13/79714/campos_512_v4
+13/79854/campos_512_v4
+13/79858/campos_512_v4
+13/79992/campos_512_v4
+130/660074/campos_512_v4
+130/660298/campos_512_v4
+130/660310/campos_512_v4
+130/660433/campos_512_v4
+130/660538/campos_512_v4
+130/660540/campos_512_v4
+130/660776/campos_512_v4
+130/660813/campos_512_v4
+130/660977/campos_512_v4
+130/661045/campos_512_v4
+130/661077/campos_512_v4
+130/661102/campos_512_v4
+130/661163/campos_512_v4
+130/661277/campos_512_v4
+130/661371/campos_512_v4
+130/661482/campos_512_v4
+130/661613/campos_512_v4
+130/662012/campos_512_v4
+130/662225/campos_512_v4
+130/662230/campos_512_v4
+130/662278/campos_512_v4
+130/662337/campos_512_v4
+130/662389/campos_512_v4
+130/662411/campos_512_v4
+130/662425/campos_512_v4
+130/662456/campos_512_v4
+130/662570/campos_512_v4
+130/662637/campos_512_v4
+130/663034/campos_512_v4
+130/663180/campos_512_v4
+130/663252/campos_512_v4
+130/663286/campos_512_v4
+130/663345/campos_512_v4
+130/663357/campos_512_v4
+130/663621/campos_512_v4
+130/663637/campos_512_v4
+130/663652/campos_512_v4
+130/663838/campos_512_v4
+130/663865/campos_512_v4
+130/663897/campos_512_v4
+130/663969/campos_512_v4
+130/664186/campos_512_v4
+130/664199/campos_512_v4
+130/664461/campos_512_v4
+130/664553/campos_512_v4
+130/664602/campos_512_v4
+130/664636/campos_512_v4
+130/664719/campos_512_v4
+130/664940/campos_512_v4
+130/664969/campos_512_v4
+131/665044/campos_512_v4
+131/665150/campos_512_v4
+131/665248/campos_512_v4
+131/665255/campos_512_v4
+131/665320/campos_512_v4
+131/665325/campos_512_v4
+131/665357/campos_512_v4
+131/665489/campos_512_v4
+131/665570/campos_512_v4
+131/665618/campos_512_v4
+131/665638/campos_512_v4
+131/665655/campos_512_v4
+131/665846/campos_512_v4
+131/665887/campos_512_v4
+131/666033/campos_512_v4
+131/666231/campos_512_v4
+131/666402/campos_512_v4
+131/666628/campos_512_v4
+131/666760/campos_512_v4
+131/666814/campos_512_v4
+131/666843/campos_512_v4
+131/666860/campos_512_v4
+131/666865/campos_512_v4
+131/666886/campos_512_v4
+131/667022/campos_512_v4
+131/667033/campos_512_v4
+131/667161/campos_512_v4
+131/667280/campos_512_v4
+131/667344/campos_512_v4
+131/667345/campos_512_v4
+131/667375/campos_512_v4
+131/667384/campos_512_v4
+131/667387/campos_512_v4
+131/667469/campos_512_v4
+131/667720/campos_512_v4
+131/667766/campos_512_v4
+131/667842/campos_512_v4
+131/667931/campos_512_v4
+131/667934/campos_512_v4
+131/667950/campos_512_v4
+131/667960/campos_512_v4
+131/668140/campos_512_v4
+131/668314/campos_512_v4
+131/668502/campos_512_v4
+131/668523/campos_512_v4
+131/668564/campos_512_v4
+131/668666/campos_512_v4
+131/668752/campos_512_v4
+131/668948/campos_512_v4
+131/669010/campos_512_v4
+131/669035/campos_512_v4
+131/669037/campos_512_v4
+131/669077/campos_512_v4
+131/669273/campos_512_v4
+131/669594/campos_512_v4
+131/669617/campos_512_v4
+131/669803/campos_512_v4
+131/669993/campos_512_v4
+132/670122/campos_512_v4
+132/670300/campos_512_v4
+132/670320/campos_512_v4
+132/670384/campos_512_v4
+132/670386/campos_512_v4
+132/670484/campos_512_v4
+132/670762/campos_512_v4
+132/671110/campos_512_v4
+132/671511/campos_512_v4
+132/671716/campos_512_v4
+132/671797/campos_512_v4
+132/671830/campos_512_v4
+132/671939/campos_512_v4
+132/672050/campos_512_v4
+132/672067/campos_512_v4
+132/672107/campos_512_v4
+132/672174/campos_512_v4
+132/672242/campos_512_v4
+132/672315/campos_512_v4
+132/672430/campos_512_v4
+132/672801/campos_512_v4
+132/672968/campos_512_v4
+132/673169/campos_512_v4
+132/673201/campos_512_v4
+132/673212/campos_512_v4
+132/673237/campos_512_v4
+132/673243/campos_512_v4
+132/673253/campos_512_v4
+132/673282/campos_512_v4
+132/673496/campos_512_v4
+132/673564/campos_512_v4
+132/673591/campos_512_v4
+132/673595/campos_512_v4
+132/673605/campos_512_v4
+132/673691/campos_512_v4
+132/673755/campos_512_v4
+132/673777/campos_512_v4
+132/673979/campos_512_v4
+132/674097/campos_512_v4
+132/674142/campos_512_v4
+132/674559/campos_512_v4
+132/674735/campos_512_v4
+132/674761/campos_512_v4
+132/674768/campos_512_v4
+132/674901/campos_512_v4
+133/675020/campos_512_v4
+133/675027/campos_512_v4
+133/675031/campos_512_v4
+133/675165/campos_512_v4
+133/675213/campos_512_v4
+133/675278/campos_512_v4
+133/675408/campos_512_v4
+133/675418/campos_512_v4
+133/675472/campos_512_v4
+133/675582/campos_512_v4
+133/675832/campos_512_v4
+133/675855/campos_512_v4
+133/675923/campos_512_v4
+133/676208/campos_512_v4
+133/676224/campos_512_v4
+133/676237/campos_512_v4
+133/676319/campos_512_v4
+133/676324/campos_512_v4
+133/676528/campos_512_v4
+133/676770/campos_512_v4
+133/676909/campos_512_v4
+133/676938/campos_512_v4
+133/676964/campos_512_v4
+133/677131/campos_512_v4
+133/677191/campos_512_v4
+133/677293/campos_512_v4
+133/677382/campos_512_v4
+133/677524/campos_512_v4
+133/677607/campos_512_v4
+133/677636/campos_512_v4
+133/677641/campos_512_v4
+133/677842/campos_512_v4
+133/677945/campos_512_v4
+133/677946/campos_512_v4
+133/678078/campos_512_v4
+133/678121/campos_512_v4
+133/678189/campos_512_v4
+133/678199/campos_512_v4
+133/678424/campos_512_v4
+133/678543/campos_512_v4
+133/678576/campos_512_v4
+133/678589/campos_512_v4
+133/678594/campos_512_v4
+133/678616/campos_512_v4
+133/678646/campos_512_v4
+133/678686/campos_512_v4
+133/678688/campos_512_v4
+133/678779/campos_512_v4
+133/678844/campos_512_v4
+133/678999/campos_512_v4
+133/679033/campos_512_v4
+133/679042/campos_512_v4
+133/679224/campos_512_v4
+133/679235/campos_512_v4
+133/679237/campos_512_v4
+133/679240/campos_512_v4
+133/679271/campos_512_v4
+133/679342/campos_512_v4
+133/679357/campos_512_v4
+133/679419/campos_512_v4
+133/679455/campos_512_v4
+133/679641/campos_512_v4
+133/679821/campos_512_v4
+133/679858/campos_512_v4
+133/679874/campos_512_v4
+133/679904/campos_512_v4
+133/679921/campos_512_v4
+134/680056/campos_512_v4
+134/680122/campos_512_v4
+134/680244/campos_512_v4
+134/680299/campos_512_v4
+134/680359/campos_512_v4
+134/680398/campos_512_v4
+134/680435/campos_512_v4
+134/680786/campos_512_v4
+134/681017/campos_512_v4
+134/681093/campos_512_v4
+134/681250/campos_512_v4
+134/681284/campos_512_v4
+134/681375/campos_512_v4
+134/681708/campos_512_v4
+134/681728/campos_512_v4
+134/682100/campos_512_v4
+134/682130/campos_512_v4
+134/682194/campos_512_v4
+134/682295/campos_512_v4
+134/682403/campos_512_v4
+134/682446/campos_512_v4
+134/682546/campos_512_v4
+134/682732/campos_512_v4
+134/682747/campos_512_v4
+134/682921/campos_512_v4
+134/683051/campos_512_v4
+134/683135/campos_512_v4
+134/683226/campos_512_v4
+134/683334/campos_512_v4
+134/683347/campos_512_v4
+134/683446/campos_512_v4
+134/683461/campos_512_v4
+134/683468/campos_512_v4
+134/683526/campos_512_v4
+134/683537/campos_512_v4
+134/683872/campos_512_v4
+134/683972/campos_512_v4
+134/684082/campos_512_v4
+134/684090/campos_512_v4
+134/684102/campos_512_v4
+134/684271/campos_512_v4
+134/684273/campos_512_v4
+134/684308/campos_512_v4
+134/684405/campos_512_v4
+134/684565/campos_512_v4
+134/684612/campos_512_v4
+134/684774/campos_512_v4
+134/684873/campos_512_v4
+134/684906/campos_512_v4
+135/685139/campos_512_v4
+135/685154/campos_512_v4
+135/685248/campos_512_v4
+135/685272/campos_512_v4
+135/685308/campos_512_v4
+135/685468/campos_512_v4
+135/685546/campos_512_v4
+135/685548/campos_512_v4
+135/685651/campos_512_v4
+135/685724/campos_512_v4
+135/685797/campos_512_v4
+135/686009/campos_512_v4
+135/686052/campos_512_v4
+135/686140/campos_512_v4
+135/686168/campos_512_v4
+135/686182/campos_512_v4
+135/686199/campos_512_v4
+135/686202/campos_512_v4
+135/686270/campos_512_v4
+135/686297/campos_512_v4
+135/686377/campos_512_v4
+135/686446/campos_512_v4
+135/686589/campos_512_v4
+135/686592/campos_512_v4
+135/686746/campos_512_v4
+135/686754/campos_512_v4
+135/686814/campos_512_v4
+135/686965/campos_512_v4
+135/687135/campos_512_v4
+135/687143/campos_512_v4
+135/687228/campos_512_v4
+135/687269/campos_512_v4
+135/687292/campos_512_v4
+135/687600/campos_512_v4
+135/687831/campos_512_v4
+135/688043/campos_512_v4
+135/688171/campos_512_v4
+135/688352/campos_512_v4
+135/688432/campos_512_v4
+135/688435/campos_512_v4
+135/688473/campos_512_v4
+135/688575/campos_512_v4
+135/688593/campos_512_v4
+135/688596/campos_512_v4
+135/688843/campos_512_v4
+135/688938/campos_512_v4
+135/688950/campos_512_v4
+135/689025/campos_512_v4
+135/689053/campos_512_v4
+135/689138/campos_512_v4
+135/689263/campos_512_v4
+135/689373/campos_512_v4
+135/689395/campos_512_v4
+135/689545/campos_512_v4
+135/689606/campos_512_v4
+135/689885/campos_512_v4
+135/689890/campos_512_v4
+136/690077/campos_512_v4
+136/690120/campos_512_v4
+136/690152/campos_512_v4
+136/690364/campos_512_v4
+136/690428/campos_512_v4
+136/690784/campos_512_v4
+136/690828/campos_512_v4
+136/690861/campos_512_v4
+136/690940/campos_512_v4
+136/691032/campos_512_v4
+136/691083/campos_512_v4
+136/691102/campos_512_v4
+136/691318/campos_512_v4
+136/691402/campos_512_v4
+136/691410/campos_512_v4
+136/691472/campos_512_v4
+136/691557/campos_512_v4
+136/691627/campos_512_v4
+136/691629/campos_512_v4
+136/691797/campos_512_v4
+136/691855/campos_512_v4
+136/692055/campos_512_v4
+136/692457/campos_512_v4
+136/692516/campos_512_v4
+136/692533/campos_512_v4
+136/692556/campos_512_v4
+136/692590/campos_512_v4
+136/692716/campos_512_v4
+136/692797/campos_512_v4
+136/692830/campos_512_v4
+136/692883/campos_512_v4
+136/692955/campos_512_v4
+136/693185/campos_512_v4
+136/693371/campos_512_v4
+136/693422/campos_512_v4
+136/693424/campos_512_v4
+136/693429/campos_512_v4
+136/693453/campos_512_v4
+136/693471/campos_512_v4
+136/693509/campos_512_v4
+136/693577/campos_512_v4
+136/693783/campos_512_v4
+136/693945/campos_512_v4
+136/694064/campos_512_v4
+136/694287/campos_512_v4
+136/694368/campos_512_v4
+136/694444/campos_512_v4
+136/694458/campos_512_v4
+136/694498/campos_512_v4
+136/694537/campos_512_v4
+136/694566/campos_512_v4
+136/694654/campos_512_v4
+136/694710/campos_512_v4
+136/694924/campos_512_v4
+137/695003/campos_512_v4
+137/695018/campos_512_v4
+137/695050/campos_512_v4
+137/695164/campos_512_v4
+137/695458/campos_512_v4
+137/695572/campos_512_v4
+137/695598/campos_512_v4
+137/695642/campos_512_v4
+137/695690/campos_512_v4
+137/695962/campos_512_v4
+137/696073/campos_512_v4
+137/696198/campos_512_v4
+137/696203/campos_512_v4
+137/696281/campos_512_v4
+137/696354/campos_512_v4
+137/696422/campos_512_v4
+137/696433/campos_512_v4
+137/696478/campos_512_v4
+137/696484/campos_512_v4
+137/696530/campos_512_v4
+137/696748/campos_512_v4
+137/696858/campos_512_v4
+137/696945/campos_512_v4
+137/697022/campos_512_v4
+137/697176/campos_512_v4
+137/697296/campos_512_v4
+137/697399/campos_512_v4
+137/697438/campos_512_v4
+137/697447/campos_512_v4
+137/697450/campos_512_v4
+137/697491/campos_512_v4
+137/697532/campos_512_v4
+137/697538/campos_512_v4
+137/697591/campos_512_v4
+137/697612/campos_512_v4
+137/697646/campos_512_v4
+137/697755/campos_512_v4
+137/697864/campos_512_v4
+137/698213/campos_512_v4
+137/698216/campos_512_v4
+137/698456/campos_512_v4
+137/698559/campos_512_v4
+137/698717/campos_512_v4
+137/698767/campos_512_v4
+137/698790/campos_512_v4
+137/698842/campos_512_v4
+137/698854/campos_512_v4
+137/698952/campos_512_v4
+137/699181/campos_512_v4
+137/699217/campos_512_v4
+137/699224/campos_512_v4
+137/699313/campos_512_v4
+137/699584/campos_512_v4
+137/699639/campos_512_v4
+137/699669/campos_512_v4
+137/699767/campos_512_v4
+137/699800/campos_512_v4
+137/699828/campos_512_v4
+138/700026/campos_512_v4
+138/700031/campos_512_v4
+138/700073/campos_512_v4
+138/700159/campos_512_v4
+138/700475/campos_512_v4
+138/700533/campos_512_v4
+138/700605/campos_512_v4
+138/700624/campos_512_v4
+138/700630/campos_512_v4
+138/700669/campos_512_v4
+138/700795/campos_512_v4
+138/701025/campos_512_v4
+138/701076/campos_512_v4
+138/701134/campos_512_v4
+138/701199/campos_512_v4
+138/701318/campos_512_v4
+138/701768/campos_512_v4
+138/701779/campos_512_v4
+138/701781/campos_512_v4
+138/701849/campos_512_v4
+138/701906/campos_512_v4
+138/701922/campos_512_v4
+138/701929/campos_512_v4
+138/702263/campos_512_v4
+138/702283/campos_512_v4
+138/702330/campos_512_v4
+138/702388/campos_512_v4
+138/702619/campos_512_v4
+138/702674/campos_512_v4
+138/702703/campos_512_v4
+138/702751/campos_512_v4
+138/702832/campos_512_v4
+138/702858/campos_512_v4
+138/702888/campos_512_v4
+138/702952/campos_512_v4
+138/702990/campos_512_v4
+138/703023/campos_512_v4
+138/703062/campos_512_v4
+138/703075/campos_512_v4
+138/703139/campos_512_v4
+138/703158/campos_512_v4
+138/703258/campos_512_v4
+138/703299/campos_512_v4
+138/703359/campos_512_v4
+138/703392/campos_512_v4
+138/703423/campos_512_v4
+138/703425/campos_512_v4
+138/703682/campos_512_v4
+138/703760/campos_512_v4
+138/703797/campos_512_v4
+138/703818/campos_512_v4
+138/703831/campos_512_v4
+138/703872/campos_512_v4
+138/703956/campos_512_v4
+138/704048/campos_512_v4
+138/704050/campos_512_v4
+138/704068/campos_512_v4
+138/704156/campos_512_v4
+138/704172/campos_512_v4
+138/704308/campos_512_v4
+138/704381/campos_512_v4
+138/704397/campos_512_v4
+138/704423/campos_512_v4
+138/704517/campos_512_v4
+138/704527/campos_512_v4
+138/704558/campos_512_v4
+138/704562/campos_512_v4
+138/704697/campos_512_v4
+138/704967/campos_512_v4
+139/705161/campos_512_v4
+139/705449/campos_512_v4
+139/705526/campos_512_v4
+139/705539/campos_512_v4
+139/705633/campos_512_v4
+139/705668/campos_512_v4
+139/705687/campos_512_v4
+139/705708/campos_512_v4
+139/705779/campos_512_v4
+139/705893/campos_512_v4
+139/705898/campos_512_v4
+139/705997/campos_512_v4
+139/706026/campos_512_v4
+139/706346/campos_512_v4
+139/706421/campos_512_v4
+139/706680/campos_512_v4
+139/706757/campos_512_v4
+139/706793/campos_512_v4
+139/707083/campos_512_v4
+139/707157/campos_512_v4
+139/707172/campos_512_v4
+139/707204/campos_512_v4
+139/707320/campos_512_v4
+139/707374/campos_512_v4
+139/707518/campos_512_v4
+139/707759/campos_512_v4
+139/707792/campos_512_v4
+139/707856/campos_512_v4
+139/707899/campos_512_v4
+139/708062/campos_512_v4
+139/708103/campos_512_v4
+139/708149/campos_512_v4
+139/708239/campos_512_v4
+139/708303/campos_512_v4
+139/708308/campos_512_v4
+139/708471/campos_512_v4
+139/708688/campos_512_v4
+139/708713/campos_512_v4
+139/708909/campos_512_v4
+139/709024/campos_512_v4
+139/709075/campos_512_v4
+139/709107/campos_512_v4
+139/709144/campos_512_v4
+139/709196/campos_512_v4
+139/709466/campos_512_v4
+139/709803/campos_512_v4
+139/709859/campos_512_v4
+139/709932/campos_512_v4
+14/80375/campos_512_v4
+14/80467/campos_512_v4
+14/80653/campos_512_v4
+14/81062/campos_512_v4
+14/81220/campos_512_v4
+14/81317/campos_512_v4
+14/81376/campos_512_v4
+14/81522/campos_512_v4
+14/81593/campos_512_v4
+14/81941/campos_512_v4
+14/81977/campos_512_v4
+14/82146/campos_512_v4
+14/82241/campos_512_v4
+14/82255/campos_512_v4
+14/82258/campos_512_v4
+14/82289/campos_512_v4
+14/82354/campos_512_v4
+14/82367/campos_512_v4
+14/82499/campos_512_v4
+14/82550/campos_512_v4
+14/82634/campos_512_v4
+14/82646/campos_512_v4
+14/82680/campos_512_v4
+14/82709/campos_512_v4
+14/82718/campos_512_v4
+14/82735/campos_512_v4
+14/82742/campos_512_v4
+14/82747/campos_512_v4
+14/82749/campos_512_v4
+14/82885/campos_512_v4
+14/82897/campos_512_v4
+14/83022/campos_512_v4
+14/83082/campos_512_v4
+14/83088/campos_512_v4
+14/83097/campos_512_v4
+14/83238/campos_512_v4
+14/83354/campos_512_v4
+14/83439/campos_512_v4
+14/83563/campos_512_v4
+14/83632/campos_512_v4
+14/84018/campos_512_v4
+14/84078/campos_512_v4
+14/84161/campos_512_v4
+14/84203/campos_512_v4
+14/84303/campos_512_v4
+14/84368/campos_512_v4
+14/84576/campos_512_v4
+14/84684/campos_512_v4
+140/710060/campos_512_v4
+140/710068/campos_512_v4
+140/710265/campos_512_v4
+140/710266/campos_512_v4
+140/710290/campos_512_v4
+140/710399/campos_512_v4
+140/710508/campos_512_v4
+140/710523/campos_512_v4
+140/710529/campos_512_v4
+140/710557/campos_512_v4
+140/710745/campos_512_v4
+140/710943/campos_512_v4
+140/711080/campos_512_v4
+140/711104/campos_512_v4
+140/711178/campos_512_v4
+140/711200/campos_512_v4
+140/711241/campos_512_v4
+140/711329/campos_512_v4
+140/711370/campos_512_v4
+140/711392/campos_512_v4
+140/711395/campos_512_v4
+140/711468/campos_512_v4
+140/711671/campos_512_v4
+140/711755/campos_512_v4
+140/711778/campos_512_v4
+140/711834/campos_512_v4
+140/711931/campos_512_v4
+140/711958/campos_512_v4
+140/712000/campos_512_v4
+140/712041/campos_512_v4
+140/712079/campos_512_v4
+140/712176/campos_512_v4
+140/712216/campos_512_v4
+140/712291/campos_512_v4
+140/712441/campos_512_v4
+140/712516/campos_512_v4
+140/712538/campos_512_v4
+140/712550/campos_512_v4
+140/712770/campos_512_v4
+140/712913/campos_512_v4
+140/713020/campos_512_v4
+140/713078/campos_512_v4
+140/713089/campos_512_v4
+140/713147/campos_512_v4
+140/713157/campos_512_v4
+140/713189/campos_512_v4
+140/713225/campos_512_v4
+140/713226/campos_512_v4
+140/713277/campos_512_v4
+140/713294/campos_512_v4
+140/713302/campos_512_v4
+140/713355/campos_512_v4
+140/713431/campos_512_v4
+140/713658/campos_512_v4
+140/713887/campos_512_v4
+140/713954/campos_512_v4
+140/713978/campos_512_v4
+140/714116/campos_512_v4
+140/714190/campos_512_v4
+140/714229/campos_512_v4
+140/714273/campos_512_v4
+140/714326/campos_512_v4
+140/714379/campos_512_v4
+140/714573/campos_512_v4
+140/714614/campos_512_v4
+140/714636/campos_512_v4
+140/714713/campos_512_v4
+140/714918/campos_512_v4
+140/714932/campos_512_v4
+141/715077/campos_512_v4
+141/715160/campos_512_v4
+141/715241/campos_512_v4
+141/715260/campos_512_v4
+141/715273/campos_512_v4
+141/715284/campos_512_v4
+141/715330/campos_512_v4
+141/715335/campos_512_v4
+141/715405/campos_512_v4
+141/715443/campos_512_v4
+141/715456/campos_512_v4
+141/715497/campos_512_v4
+141/715519/campos_512_v4
+141/715540/campos_512_v4
+141/715606/campos_512_v4
+141/715672/campos_512_v4
+141/715898/campos_512_v4
+141/716116/campos_512_v4
+141/716241/campos_512_v4
+141/716289/campos_512_v4
+141/716549/campos_512_v4
+141/716791/campos_512_v4
+141/716844/campos_512_v4
+141/716895/campos_512_v4
+141/716939/campos_512_v4
+141/716997/campos_512_v4
+141/717077/campos_512_v4
+141/717201/campos_512_v4
+141/717284/campos_512_v4
+141/717330/campos_512_v4
+141/717494/campos_512_v4
+141/717572/campos_512_v4
+141/717672/campos_512_v4
+141/717705/campos_512_v4
+141/717750/campos_512_v4
+141/717862/campos_512_v4
+141/717972/campos_512_v4
+141/717976/campos_512_v4
+141/717981/campos_512_v4
+141/718052/campos_512_v4
+141/718326/campos_512_v4
+141/718351/campos_512_v4
+141/718369/campos_512_v4
+141/718421/campos_512_v4
+141/718451/campos_512_v4
+141/718489/campos_512_v4
+141/718530/campos_512_v4
+141/718614/campos_512_v4
+141/718618/campos_512_v4
+141/718727/campos_512_v4
+141/718857/campos_512_v4
+141/718858/campos_512_v4
+141/718942/campos_512_v4
+141/718958/campos_512_v4
+141/719233/campos_512_v4
+141/719342/campos_512_v4
+141/719528/campos_512_v4
+141/719532/campos_512_v4
+141/719556/campos_512_v4
+141/719575/campos_512_v4
+141/719584/campos_512_v4
+141/719672/campos_512_v4
+141/719720/campos_512_v4
+141/719783/campos_512_v4
+141/719864/campos_512_v4
+141/719880/campos_512_v4
+141/719992/campos_512_v4
+141/720000/campos_512_v4
+142/720025/campos_512_v4
+142/720029/campos_512_v4
+142/720133/campos_512_v4
+142/720158/campos_512_v4
+142/720162/campos_512_v4
+142/720200/campos_512_v4
+142/720227/campos_512_v4
+142/720258/campos_512_v4
+142/720357/campos_512_v4
+142/720368/campos_512_v4
+142/720376/campos_512_v4
+142/720386/campos_512_v4
+142/720409/campos_512_v4
+142/720695/campos_512_v4
+142/720739/campos_512_v4
+142/720755/campos_512_v4
+142/720835/campos_512_v4
+142/720912/campos_512_v4
+142/720936/campos_512_v4
+142/721068/campos_512_v4
+142/721356/campos_512_v4
+142/721462/campos_512_v4
+142/721475/campos_512_v4
+142/721555/campos_512_v4
+142/721811/campos_512_v4
+142/721831/campos_512_v4
+142/721838/campos_512_v4
+142/721860/campos_512_v4
+142/721877/campos_512_v4
+142/721907/campos_512_v4
+142/721950/campos_512_v4
+142/722191/campos_512_v4
+142/722252/campos_512_v4
+142/722277/campos_512_v4
+142/722357/campos_512_v4
+142/722403/campos_512_v4
+142/722466/campos_512_v4
+142/722557/campos_512_v4
+142/722637/campos_512_v4
+142/722641/campos_512_v4
+142/722687/campos_512_v4
+142/722719/campos_512_v4
+142/722787/campos_512_v4
+142/722861/campos_512_v4
+142/722897/campos_512_v4
+142/722940/campos_512_v4
+142/722988/campos_512_v4
+142/723060/campos_512_v4
+142/723128/campos_512_v4
+142/723450/campos_512_v4
+142/723515/campos_512_v4
+142/723594/campos_512_v4
+142/723684/campos_512_v4
+142/723902/campos_512_v4
+142/723964/campos_512_v4
+142/723978/campos_512_v4
+142/724013/campos_512_v4
+142/724099/campos_512_v4
+142/724145/campos_512_v4
+142/724388/campos_512_v4
+142/724549/campos_512_v4
+142/724707/campos_512_v4
+142/724781/campos_512_v4
+142/724798/campos_512_v4
+142/724867/campos_512_v4
+142/724921/campos_512_v4
+142/724929/campos_512_v4
+142/724947/campos_512_v4
+142/724948/campos_512_v4
+143/725029/campos_512_v4
+143/725070/campos_512_v4
+143/725106/campos_512_v4
+143/725178/campos_512_v4
+143/725246/campos_512_v4
+143/725323/campos_512_v4
+143/725605/campos_512_v4
+143/725685/campos_512_v4
+143/725774/campos_512_v4
+143/726258/campos_512_v4
+143/726370/campos_512_v4
+143/726403/campos_512_v4
+143/726449/campos_512_v4
+143/726536/campos_512_v4
+143/726626/campos_512_v4
+143/726629/campos_512_v4
+143/726681/campos_512_v4
+143/726908/campos_512_v4
+143/727104/campos_512_v4
+143/727108/campos_512_v4
+143/727125/campos_512_v4
+143/727203/campos_512_v4
+143/727243/campos_512_v4
+143/727607/campos_512_v4
+143/727706/campos_512_v4
+143/727716/campos_512_v4
+143/727780/campos_512_v4
+143/727952/campos_512_v4
+143/728180/campos_512_v4
+143/728310/campos_512_v4
+143/728375/campos_512_v4
+143/728490/campos_512_v4
+143/728496/campos_512_v4
+143/728525/campos_512_v4
+143/728546/campos_512_v4
+143/728555/campos_512_v4
+143/728624/campos_512_v4
+143/728849/campos_512_v4
+143/728894/campos_512_v4
+143/728923/campos_512_v4
+143/728978/campos_512_v4
+143/728984/campos_512_v4
+143/729144/campos_512_v4
+143/729151/campos_512_v4
+143/729157/campos_512_v4
+143/729234/campos_512_v4
+143/729257/campos_512_v4
+143/729262/campos_512_v4
+143/729314/campos_512_v4
+143/729337/campos_512_v4
+143/729339/campos_512_v4
+143/729346/campos_512_v4
+143/729393/campos_512_v4
+143/729433/campos_512_v4
+143/729457/campos_512_v4
+143/729528/campos_512_v4
+143/729611/campos_512_v4
+143/729660/campos_512_v4
+143/729830/campos_512_v4
+143/729848/campos_512_v4
+143/729882/campos_512_v4
+143/729980/campos_512_v4
+144/730042/campos_512_v4
+144/730064/campos_512_v4
+144/730104/campos_512_v4
+144/730127/campos_512_v4
+144/730423/campos_512_v4
+144/730526/campos_512_v4
+144/730573/campos_512_v4
+144/730627/campos_512_v4
+144/730639/campos_512_v4
+144/730668/campos_512_v4
+144/730669/campos_512_v4
+144/730711/campos_512_v4
+144/730740/campos_512_v4
+144/730836/campos_512_v4
+144/731144/campos_512_v4
+144/731473/campos_512_v4
+144/731573/campos_512_v4
+144/731713/campos_512_v4
+144/731773/campos_512_v4
+144/731785/campos_512_v4
+144/732443/campos_512_v4
+144/732467/campos_512_v4
+144/732530/campos_512_v4
+144/732656/campos_512_v4
+144/732702/campos_512_v4
+144/732751/campos_512_v4
+144/733101/campos_512_v4
+144/733204/campos_512_v4
+144/733213/campos_512_v4
+144/733228/campos_512_v4
+144/733248/campos_512_v4
+144/733313/campos_512_v4
+144/733363/campos_512_v4
+144/733445/campos_512_v4
+144/733536/campos_512_v4
+144/733557/campos_512_v4
+144/733596/campos_512_v4
+144/733632/campos_512_v4
+144/733727/campos_512_v4
+144/733730/campos_512_v4
+144/733864/campos_512_v4
+144/733971/campos_512_v4
+144/734029/campos_512_v4
+144/734189/campos_512_v4
+144/734212/campos_512_v4
+144/734268/campos_512_v4
+144/734296/campos_512_v4
+144/734805/campos_512_v4
+144/734855/campos_512_v4
+144/734913/campos_512_v4
+144/734999/campos_512_v4
+144/735001/campos_512_v4
+145/735034/campos_512_v4
+145/735072/campos_512_v4
+145/735387/campos_512_v4
+145/735415/campos_512_v4
+145/735588/campos_512_v4
+145/735645/campos_512_v4
+145/735659/campos_512_v4
+145/735707/campos_512_v4
+145/735723/campos_512_v4
+145/735756/campos_512_v4
+145/735791/campos_512_v4
+145/735815/campos_512_v4
+145/735845/campos_512_v4
+145/735883/campos_512_v4
+145/735884/campos_512_v4
+145/735926/campos_512_v4
+145/736062/campos_512_v4
+145/736105/campos_512_v4
+145/736143/campos_512_v4
+145/736158/campos_512_v4
+145/736176/campos_512_v4
+145/736254/campos_512_v4
+145/736339/campos_512_v4
+145/736419/campos_512_v4
+145/736584/campos_512_v4
+145/736710/campos_512_v4
+145/736738/campos_512_v4
+145/736780/campos_512_v4
+145/736881/campos_512_v4
+145/736908/campos_512_v4
+145/736941/campos_512_v4
+145/737066/campos_512_v4
+145/737132/campos_512_v4
+145/737201/campos_512_v4
+145/737242/campos_512_v4
+145/737473/campos_512_v4
+145/737549/campos_512_v4
+145/737699/campos_512_v4
+145/737765/campos_512_v4
+145/738021/campos_512_v4
+145/738052/campos_512_v4
+145/738056/campos_512_v4
+145/738079/campos_512_v4
+145/738226/campos_512_v4
+145/738277/campos_512_v4
+145/738326/campos_512_v4
+145/738340/campos_512_v4
+145/738436/campos_512_v4
+145/738632/campos_512_v4
+145/738685/campos_512_v4
+145/738753/campos_512_v4
+145/738787/campos_512_v4
+145/738823/campos_512_v4
+145/738840/campos_512_v4
+145/738975/campos_512_v4
+145/739305/campos_512_v4
+145/739517/campos_512_v4
+145/739521/campos_512_v4
+145/739604/campos_512_v4
+145/739699/campos_512_v4
+145/739745/campos_512_v4
+145/739753/campos_512_v4
+145/739756/campos_512_v4
+145/739999/campos_512_v4
+146/740020/campos_512_v4
+146/740100/campos_512_v4
+146/740187/campos_512_v4
+146/740196/campos_512_v4
+146/740248/campos_512_v4
+146/740276/campos_512_v4
+146/740383/campos_512_v4
+146/740456/campos_512_v4
+146/740570/campos_512_v4
+146/740595/campos_512_v4
+146/740603/campos_512_v4
+146/740605/campos_512_v4
+146/740684/campos_512_v4
+146/740705/campos_512_v4
+146/740725/campos_512_v4
+146/740739/campos_512_v4
+146/740776/campos_512_v4
+146/740972/campos_512_v4
+146/740994/campos_512_v4
+146/741022/campos_512_v4
+146/741050/campos_512_v4
+146/741070/campos_512_v4
+146/741088/campos_512_v4
+146/741285/campos_512_v4
+146/741325/campos_512_v4
+146/741513/campos_512_v4
+146/741526/campos_512_v4
+146/741566/campos_512_v4
+146/741674/campos_512_v4
+146/741732/campos_512_v4
+146/741993/campos_512_v4
+146/742051/campos_512_v4
+146/742164/campos_512_v4
+146/742169/campos_512_v4
+146/742213/campos_512_v4
+146/742328/campos_512_v4
+146/742564/campos_512_v4
+146/742606/campos_512_v4
+146/742627/campos_512_v4
+146/742637/campos_512_v4
+146/742687/campos_512_v4
+146/742703/campos_512_v4
+146/742713/campos_512_v4
+146/742874/campos_512_v4
+146/743060/campos_512_v4
+146/743551/campos_512_v4
+146/743740/campos_512_v4
+146/743971/campos_512_v4
+146/743972/campos_512_v4
+146/743989/campos_512_v4
+146/744111/campos_512_v4
+146/744159/campos_512_v4
+146/744276/campos_512_v4
+146/744337/campos_512_v4
+146/744525/campos_512_v4
+146/744552/campos_512_v4
+146/744655/campos_512_v4
+146/744816/campos_512_v4
+146/744820/campos_512_v4
+146/744903/campos_512_v4
+146/744930/campos_512_v4
+146/744966/campos_512_v4
+146/745001/campos_512_v4
+147/745143/campos_512_v4
+147/745288/campos_512_v4
+147/745439/campos_512_v4
+147/745455/campos_512_v4
+147/745508/campos_512_v4
+147/745511/campos_512_v4
+147/745536/campos_512_v4
+147/745585/campos_512_v4
+147/745653/campos_512_v4
+147/745827/campos_512_v4
+147/745854/campos_512_v4
+147/745900/campos_512_v4
+147/745981/campos_512_v4
+147/746179/campos_512_v4
+147/746201/campos_512_v4
+147/746268/campos_512_v4
+147/746486/campos_512_v4
+147/746582/campos_512_v4
+147/746654/campos_512_v4
+147/746738/campos_512_v4
+147/746884/campos_512_v4
+147/747231/campos_512_v4
+147/747294/campos_512_v4
+147/747304/campos_512_v4
+147/747312/campos_512_v4
+147/747395/campos_512_v4
+147/747520/campos_512_v4
+147/747569/campos_512_v4
+147/747621/campos_512_v4
+147/747723/campos_512_v4
+147/747856/campos_512_v4
+147/747938/campos_512_v4
+147/747965/campos_512_v4
+147/748040/campos_512_v4
+147/748068/campos_512_v4
+147/748165/campos_512_v4
+147/748354/campos_512_v4
+147/748473/campos_512_v4
+147/748549/campos_512_v4
+147/748672/campos_512_v4
+147/748688/campos_512_v4
+147/748696/campos_512_v4
+147/748712/campos_512_v4
+147/748713/campos_512_v4
+147/748746/campos_512_v4
+147/748976/campos_512_v4
+147/749027/campos_512_v4
+147/749034/campos_512_v4
+147/749073/campos_512_v4
+147/749100/campos_512_v4
+147/749115/campos_512_v4
+147/749239/campos_512_v4
+147/749247/campos_512_v4
+147/749427/campos_512_v4
+147/749446/campos_512_v4
+147/749597/campos_512_v4
+147/749672/campos_512_v4
+147/749872/campos_512_v4
+147/749931/campos_512_v4
+147/749943/campos_512_v4
+147/750001/campos_512_v4
+148/750201/campos_512_v4
+148/750217/campos_512_v4
+148/750279/campos_512_v4
+148/750285/campos_512_v4
+148/750287/campos_512_v4
+148/750699/campos_512_v4
+148/750721/campos_512_v4
+148/750962/campos_512_v4
+148/751003/campos_512_v4
+148/751169/campos_512_v4
+148/751275/campos_512_v4
+148/751294/campos_512_v4
+148/751303/campos_512_v4
+148/751332/campos_512_v4
+148/751390/campos_512_v4
+148/751460/campos_512_v4
+148/751558/campos_512_v4
+148/751645/campos_512_v4
+148/751725/campos_512_v4
+148/751749/campos_512_v4
+148/751809/campos_512_v4
+148/751878/campos_512_v4
+148/751965/campos_512_v4
+148/752008/campos_512_v4
+148/752020/campos_512_v4
+148/752389/campos_512_v4
+148/752544/campos_512_v4
+148/752563/campos_512_v4
+148/752564/campos_512_v4
+148/752590/campos_512_v4
+148/752592/campos_512_v4
+148/752657/campos_512_v4
+148/752925/campos_512_v4
+148/752926/campos_512_v4
+148/753058/campos_512_v4
+148/753280/campos_512_v4
+148/753297/campos_512_v4
+148/753300/campos_512_v4
+148/753341/campos_512_v4
+148/753352/campos_512_v4
+148/753375/campos_512_v4
+148/753414/campos_512_v4
+148/753460/campos_512_v4
+148/753466/campos_512_v4
+148/753615/campos_512_v4
+148/753716/campos_512_v4
+148/753864/campos_512_v4
+148/753931/campos_512_v4
+148/753947/campos_512_v4
+148/753988/campos_512_v4
+148/754061/campos_512_v4
+148/754126/campos_512_v4
+148/754136/campos_512_v4
+148/754337/campos_512_v4
+148/754349/campos_512_v4
+148/754478/campos_512_v4
+148/754493/campos_512_v4
+148/754520/campos_512_v4
+148/754586/campos_512_v4
+148/754592/campos_512_v4
+148/754597/campos_512_v4
+148/754607/campos_512_v4
+148/754670/campos_512_v4
+148/754699/campos_512_v4
+148/754888/campos_512_v4
+148/755000/campos_512_v4
+149/755043/campos_512_v4
+149/755176/campos_512_v4
+149/755297/campos_512_v4
+149/755343/campos_512_v4
+149/755578/campos_512_v4
+149/755582/campos_512_v4
+149/755686/campos_512_v4
+149/755688/campos_512_v4
+149/755811/campos_512_v4
+149/755876/campos_512_v4
+149/755916/campos_512_v4
+149/756127/campos_512_v4
+149/756134/campos_512_v4
+149/756198/campos_512_v4
+149/756213/campos_512_v4
+149/756214/campos_512_v4
+149/756451/campos_512_v4
+149/756570/campos_512_v4
+149/756597/campos_512_v4
+149/756599/campos_512_v4
+149/756625/campos_512_v4
+149/756683/campos_512_v4
+149/756745/campos_512_v4
+149/756816/campos_512_v4
+149/756822/campos_512_v4
+149/756909/campos_512_v4
+149/756910/campos_512_v4
+149/756966/campos_512_v4
+149/757102/campos_512_v4
+149/757270/campos_512_v4
+149/757282/campos_512_v4
+149/757327/campos_512_v4
+149/757409/campos_512_v4
+149/757478/campos_512_v4
+149/757506/campos_512_v4
+149/757508/campos_512_v4
+149/757546/campos_512_v4
+149/757557/campos_512_v4
+149/757562/campos_512_v4
+149/757587/campos_512_v4
+149/757682/campos_512_v4
+149/757686/campos_512_v4
+149/757822/campos_512_v4
+149/757831/campos_512_v4
+149/757843/campos_512_v4
+149/758291/campos_512_v4
+149/758418/campos_512_v4
+149/758458/campos_512_v4
+149/758561/campos_512_v4
+149/758705/campos_512_v4
+149/758710/campos_512_v4
+149/758774/campos_512_v4
+149/758841/campos_512_v4
+149/758925/campos_512_v4
+149/759029/campos_512_v4
+149/759043/campos_512_v4
+149/759107/campos_512_v4
+149/759220/campos_512_v4
+149/759236/campos_512_v4
+149/759343/campos_512_v4
+149/759369/campos_512_v4
+149/759392/campos_512_v4
+149/759414/campos_512_v4
+149/759426/campos_512_v4
+149/759540/campos_512_v4
+149/759633/campos_512_v4
+149/759760/campos_512_v4
+149/759819/campos_512_v4
+149/759835/campos_512_v4
+149/759874/campos_512_v4
+15/85086/campos_512_v4
+15/85122/campos_512_v4
+15/85174/campos_512_v4
+15/85440/campos_512_v4
+15/85528/campos_512_v4
+15/85563/campos_512_v4
+15/85621/campos_512_v4
+15/85801/campos_512_v4
+15/85850/campos_512_v4
+15/85908/campos_512_v4
+15/85919/campos_512_v4
+15/86301/campos_512_v4
+15/86426/campos_512_v4
+15/86863/campos_512_v4
+15/86879/campos_512_v4
+15/87052/campos_512_v4
+15/87068/campos_512_v4
+15/87285/campos_512_v4
+15/87425/campos_512_v4
+15/87549/campos_512_v4
+15/87633/campos_512_v4
+15/87665/campos_512_v4
+15/87674/campos_512_v4
+15/87723/campos_512_v4
+15/87801/campos_512_v4
+15/87833/campos_512_v4
+15/87955/campos_512_v4
+15/87972/campos_512_v4
+15/88062/campos_512_v4
+15/88104/campos_512_v4
+15/88241/campos_512_v4
+15/88298/campos_512_v4
+15/88582/campos_512_v4
+15/88600/campos_512_v4
+15/88724/campos_512_v4
+15/88772/campos_512_v4
+15/88784/campos_512_v4
+15/89259/campos_512_v4
+15/89264/campos_512_v4
+15/89322/campos_512_v4
+15/89401/campos_512_v4
+15/89435/campos_512_v4
+15/89502/campos_512_v4
+15/89669/campos_512_v4
+15/89707/campos_512_v4
+150/760023/campos_512_v4
+150/760064/campos_512_v4
+150/760130/campos_512_v4
+150/760194/campos_512_v4
+150/760236/campos_512_v4
+150/760362/campos_512_v4
+150/760414/campos_512_v4
+150/760435/campos_512_v4
+150/760443/campos_512_v4
+150/760743/campos_512_v4
+150/760878/campos_512_v4
+150/760936/campos_512_v4
+150/760955/campos_512_v4
+150/760963/campos_512_v4
+150/761008/campos_512_v4
+150/761071/campos_512_v4
+150/761261/campos_512_v4
+150/761448/campos_512_v4
+150/761616/campos_512_v4
+150/761644/campos_512_v4
+150/761680/campos_512_v4
+150/761693/campos_512_v4
+150/761756/campos_512_v4
+150/762075/campos_512_v4
+150/762142/campos_512_v4
+150/762186/campos_512_v4
+150/762435/campos_512_v4
+150/762516/campos_512_v4
+150/762524/campos_512_v4
+150/762676/campos_512_v4
+150/762694/campos_512_v4
+150/762722/campos_512_v4
+150/762738/campos_512_v4
+150/762819/campos_512_v4
+150/762821/campos_512_v4
+150/762869/campos_512_v4
+150/762871/campos_512_v4
+150/762934/campos_512_v4
+150/762938/campos_512_v4
+150/762939/campos_512_v4
+150/762942/campos_512_v4
+150/763099/campos_512_v4
+150/763169/campos_512_v4
+150/763258/campos_512_v4
+150/763327/campos_512_v4
+150/763363/campos_512_v4
+150/763366/campos_512_v4
+150/763439/campos_512_v4
+150/763516/campos_512_v4
+150/763565/campos_512_v4
+150/763725/campos_512_v4
+150/763767/campos_512_v4
+150/763813/campos_512_v4
+150/763867/campos_512_v4
+150/763880/campos_512_v4
+150/764081/campos_512_v4
+150/764130/campos_512_v4
+150/764169/campos_512_v4
+150/764208/campos_512_v4
+150/764273/campos_512_v4
+150/764309/campos_512_v4
+150/764360/campos_512_v4
+150/764434/campos_512_v4
+150/764473/campos_512_v4
+150/764515/campos_512_v4
+150/764544/campos_512_v4
+150/764600/campos_512_v4
+150/764624/campos_512_v4
+150/764634/campos_512_v4
+150/764692/campos_512_v4
+150/764872/campos_512_v4
+151/765059/campos_512_v4
+151/765095/campos_512_v4
+151/765143/campos_512_v4
+151/765190/campos_512_v4
+151/765259/campos_512_v4
+151/765314/campos_512_v4
+151/765394/campos_512_v4
+151/765451/campos_512_v4
+151/765543/campos_512_v4
+151/765708/campos_512_v4
+151/765808/campos_512_v4
+151/766028/campos_512_v4
+151/766214/campos_512_v4
+151/766328/campos_512_v4
+151/766430/campos_512_v4
+151/766533/campos_512_v4
+151/766632/campos_512_v4
+151/766811/campos_512_v4
+151/766847/campos_512_v4
+151/766999/campos_512_v4
+151/767053/campos_512_v4
+151/767097/campos_512_v4
+151/767165/campos_512_v4
+151/767244/campos_512_v4
+151/767475/campos_512_v4
+151/767539/campos_512_v4
+151/767585/campos_512_v4
+151/767624/campos_512_v4
+151/767727/campos_512_v4
+151/767728/campos_512_v4
+151/768011/campos_512_v4
+151/768013/campos_512_v4
+151/768167/campos_512_v4
+151/768178/campos_512_v4
+151/768224/campos_512_v4
+151/768233/campos_512_v4
+151/768313/campos_512_v4
+151/768333/campos_512_v4
+151/768340/campos_512_v4
+151/768360/campos_512_v4
+151/768367/campos_512_v4
+151/768408/campos_512_v4
+151/768420/campos_512_v4
+151/768925/campos_512_v4
+151/768964/campos_512_v4
+151/769011/campos_512_v4
+151/769068/campos_512_v4
+151/769076/campos_512_v4
+151/769194/campos_512_v4
+151/769250/campos_512_v4
+151/769411/campos_512_v4
+151/769484/campos_512_v4
+151/769589/campos_512_v4
+151/769622/campos_512_v4
+151/769626/campos_512_v4
+151/769689/campos_512_v4
+151/769692/campos_512_v4
+151/769941/campos_512_v4
+151/769975/campos_512_v4
+152/770021/campos_512_v4
+152/770065/campos_512_v4
+152/770137/campos_512_v4
+152/770148/campos_512_v4
+152/770279/campos_512_v4
+152/770345/campos_512_v4
+152/770376/campos_512_v4
+152/770571/campos_512_v4
+152/770595/campos_512_v4
+152/770627/campos_512_v4
+152/770652/campos_512_v4
+152/770757/campos_512_v4
+152/770853/campos_512_v4
+152/770887/campos_512_v4
+152/770957/campos_512_v4
+152/770989/campos_512_v4
+152/770997/campos_512_v4
+152/771143/campos_512_v4
+152/771148/campos_512_v4
+152/771155/campos_512_v4
+152/771159/campos_512_v4
+152/771216/campos_512_v4
+152/771218/campos_512_v4
+152/771264/campos_512_v4
+152/771265/campos_512_v4
+152/771270/campos_512_v4
+152/771330/campos_512_v4
+152/771608/campos_512_v4
+152/771793/campos_512_v4
+152/771820/campos_512_v4
+152/771907/campos_512_v4
+152/772059/campos_512_v4
+152/772139/campos_512_v4
+152/772393/campos_512_v4
+152/772399/campos_512_v4
+152/772590/campos_512_v4
+152/772758/campos_512_v4
+152/772785/campos_512_v4
+152/772803/campos_512_v4
+152/772899/campos_512_v4
+152/772950/campos_512_v4
+152/773021/campos_512_v4
+152/773042/campos_512_v4
+152/773242/campos_512_v4
+152/773299/campos_512_v4
+152/773371/campos_512_v4
+152/773585/campos_512_v4
+152/773694/campos_512_v4
+152/773729/campos_512_v4
+152/773776/campos_512_v4
+152/773802/campos_512_v4
+152/773888/campos_512_v4
+152/773928/campos_512_v4
+152/774003/campos_512_v4
+152/774081/campos_512_v4
+152/774152/campos_512_v4
+152/774298/campos_512_v4
+152/774318/campos_512_v4
+152/774479/campos_512_v4
+152/774529/campos_512_v4
+152/774572/campos_512_v4
+152/774627/campos_512_v4
+152/774652/campos_512_v4
+152/774675/campos_512_v4
+152/774691/campos_512_v4
+152/774724/campos_512_v4
+153/775058/campos_512_v4
+153/775203/campos_512_v4
+153/775245/campos_512_v4
+153/775271/campos_512_v4
+153/775273/campos_512_v4
+153/775463/campos_512_v4
+153/775540/campos_512_v4
+153/775543/campos_512_v4
+153/775546/campos_512_v4
+153/775570/campos_512_v4
+153/775877/campos_512_v4
+153/776035/campos_512_v4
+153/776120/campos_512_v4
+153/776184/campos_512_v4
+153/776247/campos_512_v4
+153/776446/campos_512_v4
+153/776586/campos_512_v4
+153/776600/campos_512_v4
+153/776682/campos_512_v4
+153/776700/campos_512_v4
+153/776759/campos_512_v4
+153/776907/campos_512_v4
+153/777050/campos_512_v4
+153/777184/campos_512_v4
+153/777210/campos_512_v4
+153/777407/campos_512_v4
+153/777442/campos_512_v4
+153/777471/campos_512_v4
+153/777502/campos_512_v4
+153/777550/campos_512_v4
+153/777567/campos_512_v4
+153/777694/campos_512_v4
+153/777795/campos_512_v4
+153/777798/campos_512_v4
+153/777871/campos_512_v4
+153/777921/campos_512_v4
+153/778150/campos_512_v4
+153/778283/campos_512_v4
+153/778312/campos_512_v4
+153/778317/campos_512_v4
+153/778704/campos_512_v4
+153/778827/campos_512_v4
+153/778865/campos_512_v4
+153/778905/campos_512_v4
+153/779014/campos_512_v4
+153/779022/campos_512_v4
+153/779153/campos_512_v4
+153/779189/campos_512_v4
+153/779340/campos_512_v4
+153/779496/campos_512_v4
+153/779762/campos_512_v4
+153/779788/campos_512_v4
+153/779828/campos_512_v4
+153/779889/campos_512_v4
+153/779890/campos_512_v4
+154/780030/campos_512_v4
+154/780156/campos_512_v4
+154/780198/campos_512_v4
+154/780201/campos_512_v4
+154/780442/campos_512_v4
+154/780471/campos_512_v4
+154/780480/campos_512_v4
+154/780495/campos_512_v4
+154/780499/campos_512_v4
+154/780605/campos_512_v4
+154/780619/campos_512_v4
+154/780691/campos_512_v4
+154/780776/campos_512_v4
+154/780818/campos_512_v4
+154/780837/campos_512_v4
+154/780869/campos_512_v4
+154/781067/campos_512_v4
+154/781179/campos_512_v4
+154/781210/campos_512_v4
+154/781236/campos_512_v4
+154/781387/campos_512_v4
+154/781420/campos_512_v4
+154/781462/campos_512_v4
+154/781512/campos_512_v4
+154/781527/campos_512_v4
+154/781528/campos_512_v4
+154/781569/campos_512_v4
+154/781576/campos_512_v4
+154/781582/campos_512_v4
+154/781793/campos_512_v4
+154/781848/campos_512_v4
+154/781853/campos_512_v4
+154/781900/campos_512_v4
+154/782047/campos_512_v4
+154/782288/campos_512_v4
+154/782330/campos_512_v4
+154/782543/campos_512_v4
+154/782587/campos_512_v4
+154/782610/campos_512_v4
+154/782751/campos_512_v4
+154/782873/campos_512_v4
+154/782912/campos_512_v4
+154/783057/campos_512_v4
+154/783130/campos_512_v4
+154/783346/campos_512_v4
+154/783386/campos_512_v4
+154/783407/campos_512_v4
+154/783464/campos_512_v4
+154/783605/campos_512_v4
+154/783624/campos_512_v4
+154/783625/campos_512_v4
+154/783662/campos_512_v4
+154/783672/campos_512_v4
+154/783710/campos_512_v4
+154/783732/campos_512_v4
+154/783741/campos_512_v4
+154/783852/campos_512_v4
+154/783869/campos_512_v4
+154/783899/campos_512_v4
+154/783984/campos_512_v4
+154/783991/campos_512_v4
+154/784009/campos_512_v4
+154/784067/campos_512_v4
+154/784122/campos_512_v4
+154/784259/campos_512_v4
+154/784324/campos_512_v4
+154/784360/campos_512_v4
+154/784471/campos_512_v4
+154/784717/campos_512_v4
+154/784991/campos_512_v4
+155/785222/campos_512_v4
+155/785262/campos_512_v4
+155/785298/campos_512_v4
+155/785334/campos_512_v4
+155/785347/campos_512_v4
+155/785390/campos_512_v4
+155/785487/campos_512_v4
+155/785601/campos_512_v4
+155/785611/campos_512_v4
+155/785706/campos_512_v4
+155/785764/campos_512_v4
+155/785989/campos_512_v4
+155/786048/campos_512_v4
+155/786054/campos_512_v4
+155/786075/campos_512_v4
+155/786131/campos_512_v4
+155/786219/campos_512_v4
+155/786232/campos_512_v4
+155/786388/campos_512_v4
+155/786483/campos_512_v4
+155/786517/campos_512_v4
+155/786585/campos_512_v4
+155/786697/campos_512_v4
+155/786719/campos_512_v4
+155/787030/campos_512_v4
+155/787092/campos_512_v4
+155/787093/campos_512_v4
+155/787229/campos_512_v4
+155/787336/campos_512_v4
+155/787404/campos_512_v4
+155/787511/campos_512_v4
+155/787522/campos_512_v4
+155/787574/campos_512_v4
+155/787610/campos_512_v4
+155/787686/campos_512_v4
+155/787782/campos_512_v4
+155/787784/campos_512_v4
+155/787792/campos_512_v4
+155/787809/campos_512_v4
+155/787894/campos_512_v4
+155/788015/campos_512_v4
+155/788152/campos_512_v4
+155/788235/campos_512_v4
+155/788240/campos_512_v4
+155/788244/campos_512_v4
+155/788283/campos_512_v4
+155/788423/campos_512_v4
+155/788439/campos_512_v4
+155/788464/campos_512_v4
+155/788510/campos_512_v4
+155/788556/campos_512_v4
+155/788560/campos_512_v4
+155/788584/campos_512_v4
+155/788605/campos_512_v4
+155/788644/campos_512_v4
+155/788682/campos_512_v4
+155/788684/campos_512_v4
+155/788747/campos_512_v4
+155/788920/campos_512_v4
+155/788929/campos_512_v4
+155/789015/campos_512_v4
+155/789141/campos_512_v4
+155/789211/campos_512_v4
+155/789297/campos_512_v4
+155/789429/campos_512_v4
+155/789616/campos_512_v4
+155/789752/campos_512_v4
+155/789825/campos_512_v4
+155/789903/campos_512_v4
+155/789913/campos_512_v4
+155/789975/campos_512_v4
+155/789985/campos_512_v4
+155/790001/campos_512_v4
+156/790012/campos_512_v4
+156/790075/campos_512_v4
+156/790119/campos_512_v4
+156/790133/campos_512_v4
+156/790181/campos_512_v4
+156/790269/campos_512_v4
+156/790451/campos_512_v4
+156/790477/campos_512_v4
+156/790547/campos_512_v4
+156/790765/campos_512_v4
+156/790834/campos_512_v4
+156/791022/campos_512_v4
+156/791208/campos_512_v4
+156/791324/campos_512_v4
+156/791431/campos_512_v4
+156/791437/campos_512_v4
+156/791462/campos_512_v4
+156/791478/campos_512_v4
+156/791568/campos_512_v4
+156/791574/campos_512_v4
+156/791645/campos_512_v4
+156/791796/campos_512_v4
+156/791877/campos_512_v4
+156/791933/campos_512_v4
+156/792021/campos_512_v4
+156/792173/campos_512_v4
+156/792196/campos_512_v4
+156/792249/campos_512_v4
+156/792357/campos_512_v4
+156/792424/campos_512_v4
+156/792520/campos_512_v4
+156/792563/campos_512_v4
+156/792768/campos_512_v4
+156/792939/campos_512_v4
+156/793132/campos_512_v4
+156/793283/campos_512_v4
+156/793356/campos_512_v4
+156/793463/campos_512_v4
+156/793509/campos_512_v4
+156/793516/campos_512_v4
+156/793529/campos_512_v4
+156/793717/campos_512_v4
+156/793722/campos_512_v4
+156/793804/campos_512_v4
+156/793823/campos_512_v4
+156/793831/campos_512_v4
+156/793857/campos_512_v4
+156/793864/campos_512_v4
+156/793875/campos_512_v4
+156/793896/campos_512_v4
+156/793997/campos_512_v4
+156/794085/campos_512_v4
+156/794104/campos_512_v4
+156/794162/campos_512_v4
+156/794301/campos_512_v4
+156/794352/campos_512_v4
+156/794358/campos_512_v4
+156/794391/campos_512_v4
+156/794413/campos_512_v4
+156/794450/campos_512_v4
+156/794560/campos_512_v4
+157/795023/campos_512_v4
+157/795311/campos_512_v4
+157/795353/campos_512_v4
+157/795517/campos_512_v4
+157/795550/campos_512_v4
+157/795597/campos_512_v4
+157/795708/campos_512_v4
+157/795834/campos_512_v4
+157/795838/campos_512_v4
+157/795866/campos_512_v4
+157/795988/campos_512_v4
+157/796024/campos_512_v4
+157/796033/campos_512_v4
+157/796372/campos_512_v4
+157/796410/campos_512_v4
+157/796426/campos_512_v4
+157/796470/campos_512_v4
+157/796577/campos_512_v4
+157/796652/campos_512_v4
+157/796711/campos_512_v4
+157/796760/campos_512_v4
+157/797028/campos_512_v4
+157/797048/campos_512_v4
+157/797069/campos_512_v4
+157/797081/campos_512_v4
+157/797123/campos_512_v4
+157/797167/campos_512_v4
+157/797224/campos_512_v4
+157/797407/campos_512_v4
+157/797498/campos_512_v4
+157/797534/campos_512_v4
+157/797536/campos_512_v4
+157/797545/campos_512_v4
+157/797603/campos_512_v4
+157/797680/campos_512_v4
+157/797729/campos_512_v4
+157/797813/campos_512_v4
+157/797884/campos_512_v4
+157/798087/campos_512_v4
+157/798118/campos_512_v4
+157/798177/campos_512_v4
+157/798225/campos_512_v4
+157/798267/campos_512_v4
+157/798304/campos_512_v4
+157/798334/campos_512_v4
+157/798335/campos_512_v4
+157/798450/campos_512_v4
+157/798606/campos_512_v4
+157/798726/campos_512_v4
+157/798754/campos_512_v4
+157/798821/campos_512_v4
+157/798870/campos_512_v4
+157/798915/campos_512_v4
+157/799021/campos_512_v4
+157/799081/campos_512_v4
+157/799095/campos_512_v4
+157/799541/campos_512_v4
+157/799681/campos_512_v4
+157/799701/campos_512_v4
+157/799765/campos_512_v4
+157/799803/campos_512_v4
+157/799830/campos_512_v4
+157/799924/campos_512_v4
+158/800062/campos_512_v4
+158/800133/campos_512_v4
+158/800169/campos_512_v4
+158/800338/campos_512_v4
+158/800451/campos_512_v4
+158/800511/campos_512_v4
+158/800584/campos_512_v4
+158/800654/campos_512_v4
+158/800902/campos_512_v4
+158/800905/campos_512_v4
+158/800917/campos_512_v4
+158/801054/campos_512_v4
+158/801155/campos_512_v4
+158/801331/campos_512_v4
+158/801349/campos_512_v4
+158/801370/campos_512_v4
+158/801447/campos_512_v4
+158/801591/campos_512_v4
+158/801613/campos_512_v4
+158/801700/campos_512_v4
+158/801773/campos_512_v4
+158/801783/campos_512_v4
+158/801834/campos_512_v4
+158/801903/campos_512_v4
+158/802010/campos_512_v4
+158/802032/campos_512_v4
+158/802155/campos_512_v4
+158/802200/campos_512_v4
+158/802390/campos_512_v4
+158/802550/campos_512_v4
+158/802586/campos_512_v4
+158/802591/campos_512_v4
+158/802592/campos_512_v4
+158/802645/campos_512_v4
+158/802739/campos_512_v4
+158/802841/campos_512_v4
+158/802866/campos_512_v4
+158/803136/campos_512_v4
+158/803180/campos_512_v4
+158/803221/campos_512_v4
+158/803233/campos_512_v4
+158/803273/campos_512_v4
+158/803434/campos_512_v4
+158/803535/campos_512_v4
+158/803648/campos_512_v4
+158/803721/campos_512_v4
+158/803766/campos_512_v4
+158/803795/campos_512_v4
+158/803826/campos_512_v4
+158/803952/campos_512_v4
+158/804017/campos_512_v4
+158/804018/campos_512_v4
+158/804150/campos_512_v4
+158/804443/campos_512_v4
+158/804648/campos_512_v4
+158/804682/campos_512_v4
+158/804856/campos_512_v4
+158/804975/campos_512_v4
+159/805120/campos_512_v4
+159/805137/campos_512_v4
+159/805157/campos_512_v4
+159/805303/campos_512_v4
+159/805305/campos_512_v4
+159/805340/campos_512_v4
+159/805488/campos_512_v4
+159/805569/campos_512_v4
+159/805570/campos_512_v4
+159/805605/campos_512_v4
+159/805787/campos_512_v4
+159/805857/campos_512_v4
+159/805887/campos_512_v4
+159/805899/campos_512_v4
+159/806011/campos_512_v4
+159/806079/campos_512_v4
+159/806280/campos_512_v4
+159/806358/campos_512_v4
+159/806416/campos_512_v4
+159/806593/campos_512_v4
+159/806677/campos_512_v4
+159/806686/campos_512_v4
+159/806707/campos_512_v4
+159/806753/campos_512_v4
+159/806846/campos_512_v4
+159/806981/campos_512_v4
+159/807026/campos_512_v4
+159/807109/campos_512_v4
+159/807171/campos_512_v4
+159/807195/campos_512_v4
+159/807273/campos_512_v4
+159/807348/campos_512_v4
+159/807362/campos_512_v4
+159/807492/campos_512_v4
+159/807636/campos_512_v4
+159/807692/campos_512_v4
+159/807809/campos_512_v4
+159/807901/campos_512_v4
+159/807964/campos_512_v4
+159/808149/campos_512_v4
+159/808251/campos_512_v4
+159/808289/campos_512_v4
+159/808394/campos_512_v4
+159/808415/campos_512_v4
+159/808431/campos_512_v4
+159/808503/campos_512_v4
+159/808651/campos_512_v4
+159/808668/campos_512_v4
+159/808718/campos_512_v4
+16/90099/campos_512_v4
+16/90321/campos_512_v4
+16/90508/campos_512_v4
+16/90659/campos_512_v4
+16/90690/campos_512_v4
+16/90719/campos_512_v4
+16/90735/campos_512_v4
+16/90916/campos_512_v4
+16/90938/campos_512_v4
+16/91275/campos_512_v4
+16/91330/campos_512_v4
+16/91421/campos_512_v4
+16/91429/campos_512_v4
+16/91450/campos_512_v4
+16/91543/campos_512_v4
+16/91627/campos_512_v4
+16/91985/campos_512_v4
+16/91995/campos_512_v4
+16/92064/campos_512_v4
+16/92103/campos_512_v4
+16/92164/campos_512_v4
+16/92298/campos_512_v4
+16/92306/campos_512_v4
+16/92345/campos_512_v4
+16/92373/campos_512_v4
+16/92457/campos_512_v4
+16/92492/campos_512_v4
+16/92761/campos_512_v4
+16/92872/campos_512_v4
+16/92995/campos_512_v4
+16/93055/campos_512_v4
+16/93189/campos_512_v4
+16/93669/campos_512_v4
+16/93671/campos_512_v4
+16/93687/campos_512_v4
+16/93709/campos_512_v4
+16/93718/campos_512_v4
+16/93747/campos_512_v4
+16/93781/campos_512_v4
+16/93939/campos_512_v4
+16/93980/campos_512_v4
+16/94088/campos_512_v4
+16/94119/campos_512_v4
+16/94691/campos_512_v4
+17/95095/campos_512_v4
+17/95508/campos_512_v4
+17/95520/campos_512_v4
+17/95595/campos_512_v4
+17/95766/campos_512_v4
+17/96011/campos_512_v4
+17/96026/campos_512_v4
+17/96149/campos_512_v4
+17/96159/campos_512_v4
+17/96182/campos_512_v4
+17/96217/campos_512_v4
+17/96280/campos_512_v4
+17/96456/campos_512_v4
+17/96593/campos_512_v4
+17/96603/campos_512_v4
+17/96765/campos_512_v4
+17/96873/campos_512_v4
+17/96960/campos_512_v4
+17/97040/campos_512_v4
+17/97125/campos_512_v4
+17/97182/campos_512_v4
+17/97263/campos_512_v4
+17/97283/campos_512_v4
+17/97300/campos_512_v4
+17/97311/campos_512_v4
+17/97494/campos_512_v4
+17/97633/campos_512_v4
+17/97657/campos_512_v4
+17/97684/campos_512_v4
+17/97705/campos_512_v4
+17/97730/campos_512_v4
+17/97735/campos_512_v4
+17/97986/campos_512_v4
+17/98376/campos_512_v4
+17/98448/campos_512_v4
+17/98478/campos_512_v4
+17/98874/campos_512_v4
+17/99108/campos_512_v4
+17/99231/campos_512_v4
+17/99395/campos_512_v4
+17/99569/campos_512_v4
+17/99624/campos_512_v4
+17/99734/campos_512_v4
+17/99744/campos_512_v4
+17/99808/campos_512_v4
+17/99984/campos_512_v4
+2/20242/campos_512_v4
+2/20439/campos_512_v4
+2/20657/campos_512_v4
+2/20797/campos_512_v4
+2/20903/campos_512_v4
+2/20958/campos_512_v4
+2/21032/campos_512_v4
+2/21221/campos_512_v4
+2/21484/campos_512_v4
+2/21574/campos_512_v4
+2/21645/campos_512_v4
+2/21691/campos_512_v4
+2/21802/campos_512_v4
+2/21945/campos_512_v4
+2/21971/campos_512_v4
+2/22023/campos_512_v4
+2/22262/campos_512_v4
+2/22443/campos_512_v4
+2/22522/campos_512_v4
+2/22617/campos_512_v4
+2/22728/campos_512_v4
+2/22734/campos_512_v4
+2/22831/campos_512_v4
+2/22870/campos_512_v4
+2/23372/campos_512_v4
+2/23559/campos_512_v4
+2/23563/campos_512_v4
+2/23833/campos_512_v4
+2/23835/campos_512_v4
+2/24159/campos_512_v4
+2/24237/campos_512_v4
+2/24307/campos_512_v4
+2/24310/campos_512_v4
+2/24367/campos_512_v4
+2/24512/campos_512_v4
+2/24904/campos_512_v4
+2/24955/campos_512_v4
+23/125046/campos_512_v4
+23/125188/campos_512_v4
+23/125193/campos_512_v4
+23/125364/campos_512_v4
+23/125380/campos_512_v4
+23/125654/campos_512_v4
+23/125957/campos_512_v4
+23/126157/campos_512_v4
+23/126196/campos_512_v4
+23/126203/campos_512_v4
+23/126400/campos_512_v4
+23/126729/campos_512_v4
+23/126852/campos_512_v4
+23/126875/campos_512_v4
+23/127040/campos_512_v4
+23/127293/campos_512_v4
+23/127677/campos_512_v4
+23/127911/campos_512_v4
+23/127922/campos_512_v4
+23/127983/campos_512_v4
+23/127985/campos_512_v4
+23/128341/campos_512_v4
+23/128440/campos_512_v4
+23/128624/campos_512_v4
+23/128694/campos_512_v4
+23/128770/campos_512_v4
+23/129171/campos_512_v4
+23/129255/campos_512_v4
+23/129335/campos_512_v4
+23/129419/campos_512_v4
+23/129426/campos_512_v4
+23/129458/campos_512_v4
+23/129529/campos_512_v4
+23/129698/campos_512_v4
+23/129776/campos_512_v4
+23/129782/campos_512_v4
+23/129923/campos_512_v4
+23/129951/campos_512_v4
+23/129963/campos_512_v4
+24/130081/campos_512_v4
+24/130167/campos_512_v4
+24/130181/campos_512_v4
+24/130226/campos_512_v4
+24/130319/campos_512_v4
+24/130477/campos_512_v4
+24/130791/campos_512_v4
+24/130964/campos_512_v4
+24/131058/campos_512_v4
+24/131112/campos_512_v4
+24/131120/campos_512_v4
+24/131424/campos_512_v4
+24/131440/campos_512_v4
+24/131584/campos_512_v4
+24/131674/campos_512_v4
+24/131934/campos_512_v4
+24/132037/campos_512_v4
+24/132087/campos_512_v4
+24/132123/campos_512_v4
+24/132235/campos_512_v4
+24/132598/campos_512_v4
+24/132728/campos_512_v4
+24/132878/campos_512_v4
+24/132881/campos_512_v4
+24/133066/campos_512_v4
+24/133190/campos_512_v4
+24/133221/campos_512_v4
+24/133242/campos_512_v4
+24/133262/campos_512_v4
+24/133311/campos_512_v4
+24/133356/campos_512_v4
+24/133363/campos_512_v4
+24/133485/campos_512_v4
+24/133736/campos_512_v4
+24/133775/campos_512_v4
+24/134016/campos_512_v4
+24/134402/campos_512_v4
+24/134453/campos_512_v4
+24/134517/campos_512_v4
+24/134755/campos_512_v4
+24/134869/campos_512_v4
+25/135026/campos_512_v4
+25/135312/campos_512_v4
+25/135322/campos_512_v4
+25/135407/campos_512_v4
+25/135508/campos_512_v4
+25/135510/campos_512_v4
+25/135722/campos_512_v4
+25/135747/campos_512_v4
+25/135805/campos_512_v4
+25/136123/campos_512_v4
+25/136266/campos_512_v4
+25/136445/campos_512_v4
+25/136515/campos_512_v4
+25/136660/campos_512_v4
+25/136703/campos_512_v4
+25/136756/campos_512_v4
+25/136943/campos_512_v4
+25/136944/campos_512_v4
+25/136962/campos_512_v4
+25/137064/campos_512_v4
+25/137117/campos_512_v4
+25/137175/campos_512_v4
+25/137234/campos_512_v4
+25/137523/campos_512_v4
+25/137757/campos_512_v4
+25/137868/campos_512_v4
+25/138063/campos_512_v4
+25/138093/campos_512_v4
+25/138220/campos_512_v4
+25/138228/campos_512_v4
+25/138350/campos_512_v4
+25/138406/campos_512_v4
+25/138491/campos_512_v4
+25/138544/campos_512_v4
+25/138732/campos_512_v4
+25/138831/campos_512_v4
+25/139187/campos_512_v4
+25/139270/campos_512_v4
+25/139276/campos_512_v4
+25/139299/campos_512_v4
+25/139316/campos_512_v4
+25/139438/campos_512_v4
+25/139725/campos_512_v4
+25/139748/campos_512_v4
+25/139833/campos_512_v4
+25/139924/campos_512_v4
+26/140128/campos_512_v4
+26/140261/campos_512_v4
+26/140390/campos_512_v4
+26/140405/campos_512_v4
+26/140553/campos_512_v4
+26/140564/campos_512_v4
+26/140597/campos_512_v4
+26/140705/campos_512_v4
+26/140771/campos_512_v4
+26/140901/campos_512_v4
+26/141006/campos_512_v4
+26/141164/campos_512_v4
+26/141338/campos_512_v4
+26/141446/campos_512_v4
+26/141493/campos_512_v4
+26/141629/campos_512_v4
+26/141637/campos_512_v4
+26/141675/campos_512_v4
+26/141715/campos_512_v4
+26/141717/campos_512_v4
+26/141800/campos_512_v4
+26/141810/campos_512_v4
+26/141926/campos_512_v4
+26/141981/campos_512_v4
+26/142052/campos_512_v4
+26/142074/campos_512_v4
+26/142440/campos_512_v4
+26/142494/campos_512_v4
+26/142529/campos_512_v4
+26/142585/campos_512_v4
+26/142642/campos_512_v4
+26/142664/campos_512_v4
+26/142769/campos_512_v4
+26/142791/campos_512_v4
+26/142828/campos_512_v4
+26/143021/campos_512_v4
+26/143079/campos_512_v4
+26/143080/campos_512_v4
+26/143148/campos_512_v4
+26/143411/campos_512_v4
+26/143511/campos_512_v4
+26/143518/campos_512_v4
+26/143545/campos_512_v4
+26/143625/campos_512_v4
+26/143697/campos_512_v4
+26/143787/campos_512_v4
+26/143799/campos_512_v4
+26/143871/campos_512_v4
+26/143909/campos_512_v4
+26/144138/campos_512_v4
+26/144152/campos_512_v4
+26/144594/campos_512_v4
+26/144672/campos_512_v4
+26/144704/campos_512_v4
+26/144893/campos_512_v4
+26/144969/campos_512_v4
+26/144992/campos_512_v4
+27/145105/campos_512_v4
+27/145163/campos_512_v4
+27/145229/campos_512_v4
+27/145327/campos_512_v4
+27/145377/campos_512_v4
+27/145510/campos_512_v4
+27/145679/campos_512_v4
+27/145921/campos_512_v4
+27/146137/campos_512_v4
+27/146179/campos_512_v4
+27/146533/campos_512_v4
+27/146542/campos_512_v4
+27/146621/campos_512_v4
+27/146655/campos_512_v4
+27/146851/campos_512_v4
+27/146875/campos_512_v4
+27/146918/campos_512_v4
+27/147139/campos_512_v4
+27/147196/campos_512_v4
+27/147330/campos_512_v4
+27/147390/campos_512_v4
+27/147776/campos_512_v4
+27/147818/campos_512_v4
+27/147819/campos_512_v4
+27/147824/campos_512_v4
+27/147881/campos_512_v4
+27/147906/campos_512_v4
+27/147926/campos_512_v4
+27/147928/campos_512_v4
+27/147990/campos_512_v4
+27/148044/campos_512_v4
+27/148181/campos_512_v4
+27/148247/campos_512_v4
+27/148319/campos_512_v4
+27/148321/campos_512_v4
+27/148437/campos_512_v4
+27/148538/campos_512_v4
+27/148631/campos_512_v4
+27/148668/campos_512_v4
+27/148775/campos_512_v4
+27/148895/campos_512_v4
+27/148915/campos_512_v4
+27/148929/campos_512_v4
+27/148947/campos_512_v4
+27/148969/campos_512_v4
+27/149056/campos_512_v4
+27/149066/campos_512_v4
+27/149086/campos_512_v4
+27/149175/campos_512_v4
+27/149327/campos_512_v4
+27/149421/campos_512_v4
+27/149441/campos_512_v4
+27/149490/campos_512_v4
+27/149542/campos_512_v4
+27/149583/campos_512_v4
+27/149601/campos_512_v4
+27/149647/campos_512_v4
+27/149687/campos_512_v4
+27/149748/campos_512_v4
+27/149963/campos_512_v4
+27/150001/campos_512_v4
+28/150016/campos_512_v4
+28/150051/campos_512_v4
+28/150263/campos_512_v4
+28/150296/campos_512_v4
+28/150391/campos_512_v4
+28/150433/campos_512_v4
+28/150446/campos_512_v4
+28/150486/campos_512_v4
+28/150578/campos_512_v4
+28/150640/campos_512_v4
+28/150791/campos_512_v4
+28/151009/campos_512_v4
+28/151283/campos_512_v4
+28/151350/campos_512_v4
+28/151421/campos_512_v4
+28/151501/campos_512_v4
+28/151542/campos_512_v4
+28/151602/campos_512_v4
+28/151851/campos_512_v4
+28/151854/campos_512_v4
+28/151887/campos_512_v4
+28/152000/campos_512_v4
+28/152228/campos_512_v4
+28/152232/campos_512_v4
+28/152265/campos_512_v4
+28/152533/campos_512_v4
+28/152556/campos_512_v4
+28/152579/campos_512_v4
+28/152768/campos_512_v4
+28/152844/campos_512_v4
+28/152885/campos_512_v4
+28/152901/campos_512_v4
+28/153035/campos_512_v4
+28/153125/campos_512_v4
+28/153194/campos_512_v4
+28/153211/campos_512_v4
+28/153253/campos_512_v4
+28/153447/campos_512_v4
+28/153628/campos_512_v4
+28/153755/campos_512_v4
+28/153868/campos_512_v4
+28/153890/campos_512_v4
+28/154025/campos_512_v4
+28/154161/campos_512_v4
+28/154282/campos_512_v4
+28/154459/campos_512_v4
+28/154625/campos_512_v4
+28/154628/campos_512_v4
+28/154663/campos_512_v4
+28/154927/campos_512_v4
+29/155051/campos_512_v4
+29/155491/campos_512_v4
+29/155586/campos_512_v4
+29/155669/campos_512_v4
+29/155762/campos_512_v4
+29/155765/campos_512_v4
+29/155802/campos_512_v4
+29/155852/campos_512_v4
+29/155904/campos_512_v4
+29/155910/campos_512_v4
+29/156122/campos_512_v4
+29/156131/campos_512_v4
+29/156203/campos_512_v4
+29/156228/campos_512_v4
+29/156286/campos_512_v4
+29/156320/campos_512_v4
+29/156560/campos_512_v4
+29/156592/campos_512_v4
+29/156618/campos_512_v4
+29/156734/campos_512_v4
+29/156781/campos_512_v4
+29/156993/campos_512_v4
+29/157011/campos_512_v4
+29/157036/campos_512_v4
+29/157145/campos_512_v4
+29/157209/campos_512_v4
+29/157301/campos_512_v4
+29/157346/campos_512_v4
+29/157568/campos_512_v4
+29/157586/campos_512_v4
+29/157778/campos_512_v4
+29/158005/campos_512_v4
+29/158050/campos_512_v4
+29/158310/campos_512_v4
+29/158345/campos_512_v4
+29/158549/campos_512_v4
+29/158565/campos_512_v4
+29/158566/campos_512_v4
+29/158693/campos_512_v4
+29/158762/campos_512_v4
+29/159208/campos_512_v4
+29/159250/campos_512_v4
+29/159376/campos_512_v4
+29/159392/campos_512_v4
+29/159418/campos_512_v4
+29/159445/campos_512_v4
+29/159534/campos_512_v4
+29/159557/campos_512_v4
+29/159585/campos_512_v4
+29/159650/campos_512_v4
+29/159819/campos_512_v4
+29/159866/campos_512_v4
+29/159880/campos_512_v4
+30/160059/campos_512_v4
+30/160076/campos_512_v4
+30/160152/campos_512_v4
+30/160195/campos_512_v4
+30/160367/campos_512_v4
+30/160460/campos_512_v4
+30/160469/campos_512_v4
+30/160473/campos_512_v4
+30/160507/campos_512_v4
+30/160578/campos_512_v4
+30/160594/campos_512_v4
+30/160679/campos_512_v4
+30/160711/campos_512_v4
+30/160716/campos_512_v4
+30/160844/campos_512_v4
+30/161024/campos_512_v4
+30/161089/campos_512_v4
+30/161256/campos_512_v4
+30/161327/campos_512_v4
+30/161672/campos_512_v4
+30/161737/campos_512_v4
+30/161773/campos_512_v4
+30/161932/campos_512_v4
+30/161955/campos_512_v4
+30/161975/campos_512_v4
+30/161989/campos_512_v4
+30/162008/campos_512_v4
+30/162371/campos_512_v4
+30/162485/campos_512_v4
+30/162568/campos_512_v4
+30/162710/campos_512_v4
+30/162750/campos_512_v4
+30/162931/campos_512_v4
+30/162977/campos_512_v4
+30/163057/campos_512_v4
+30/163076/campos_512_v4
+30/163139/campos_512_v4
+30/163310/campos_512_v4
+30/163348/campos_512_v4
+30/163561/campos_512_v4
+30/163696/campos_512_v4
+30/163898/campos_512_v4
+30/164059/campos_512_v4
+30/164189/campos_512_v4
+30/164201/campos_512_v4
+30/164308/campos_512_v4
+30/164614/campos_512_v4
+30/164672/campos_512_v4
+30/164682/campos_512_v4
+30/164831/campos_512_v4
+30/164891/campos_512_v4
+30/164934/campos_512_v4
+30/164940/campos_512_v4
+31/165005/campos_512_v4
+31/165125/campos_512_v4
+31/165296/campos_512_v4
+31/165425/campos_512_v4
+31/165559/campos_512_v4
+31/165579/campos_512_v4
+31/165743/campos_512_v4
+31/165782/campos_512_v4
+31/165857/campos_512_v4
+31/166045/campos_512_v4
+31/166120/campos_512_v4
+31/166161/campos_512_v4
+31/166329/campos_512_v4
+31/166440/campos_512_v4
+31/166777/campos_512_v4
+31/166785/campos_512_v4
+31/166891/campos_512_v4
+31/166927/campos_512_v4
+31/166938/campos_512_v4
+31/167004/campos_512_v4
+31/167071/campos_512_v4
+31/167178/campos_512_v4
+31/167187/campos_512_v4
+31/167636/campos_512_v4
+31/167943/campos_512_v4
+31/168012/campos_512_v4
+31/168140/campos_512_v4
+31/168229/campos_512_v4
+31/168345/campos_512_v4
+31/168520/campos_512_v4
+31/168565/campos_512_v4
+31/168696/campos_512_v4
+31/168935/campos_512_v4
+31/168970/campos_512_v4
+31/169208/campos_512_v4
+31/169246/campos_512_v4
+31/169254/campos_512_v4
+31/169461/campos_512_v4
+31/169496/campos_512_v4
+31/169812/campos_512_v4
+31/169959/campos_512_v4
+32/170040/campos_512_v4
+32/170102/campos_512_v4
+32/170120/campos_512_v4
+32/170157/campos_512_v4
+32/170176/campos_512_v4
+32/170226/campos_512_v4
+32/170471/campos_512_v4
+32/170482/campos_512_v4
+32/170568/campos_512_v4
+32/170591/campos_512_v4
+32/170694/campos_512_v4
+32/170791/campos_512_v4
+32/170926/campos_512_v4
+32/171004/campos_512_v4
+32/171165/campos_512_v4
+32/171315/campos_512_v4
+32/171338/campos_512_v4
+32/171340/campos_512_v4
+32/171361/campos_512_v4
+32/171541/campos_512_v4
+32/171669/campos_512_v4
+32/171831/campos_512_v4
+32/171875/campos_512_v4
+32/171952/campos_512_v4
+32/172024/campos_512_v4
+32/172076/campos_512_v4
+32/172084/campos_512_v4
+32/172144/campos_512_v4
+32/172216/campos_512_v4
+32/172451/campos_512_v4
+32/172575/campos_512_v4
+32/172592/campos_512_v4
+32/172603/campos_512_v4
+32/172763/campos_512_v4
+32/172846/campos_512_v4
+32/172892/campos_512_v4
+32/172925/campos_512_v4
+32/172930/campos_512_v4
+32/173140/campos_512_v4
+32/173333/campos_512_v4
+32/173399/campos_512_v4
+32/173456/campos_512_v4
+32/173606/campos_512_v4
+32/173658/campos_512_v4
+32/173711/campos_512_v4
+32/173713/campos_512_v4
+32/173928/campos_512_v4
+32/174019/campos_512_v4
+32/174054/campos_512_v4
+32/174055/campos_512_v4
+32/174075/campos_512_v4
+32/174215/campos_512_v4
+32/174572/campos_512_v4
+32/174797/campos_512_v4
+32/174919/campos_512_v4
+32/174948/campos_512_v4
+33/175195/campos_512_v4
+33/175334/campos_512_v4
+33/175551/campos_512_v4
+33/175570/campos_512_v4
+33/175590/campos_512_v4
+33/175656/campos_512_v4
+33/175715/campos_512_v4
+33/175744/campos_512_v4
+33/175890/campos_512_v4
+33/175971/campos_512_v4
+33/176092/campos_512_v4
+33/176113/campos_512_v4
+33/176116/campos_512_v4
+33/176150/campos_512_v4
+33/176304/campos_512_v4
+33/176791/campos_512_v4
+33/176881/campos_512_v4
+33/176944/campos_512_v4
+33/176961/campos_512_v4
+33/177257/campos_512_v4
+33/177447/campos_512_v4
+33/177628/campos_512_v4
+33/177647/campos_512_v4
+33/177702/campos_512_v4
+33/177753/campos_512_v4
+33/177763/campos_512_v4
+33/177856/campos_512_v4
+33/177949/campos_512_v4
+33/178002/campos_512_v4
+33/178104/campos_512_v4
+33/178271/campos_512_v4
+33/178607/campos_512_v4
+33/178619/campos_512_v4
+33/178749/campos_512_v4
+33/179041/campos_512_v4
+33/179143/campos_512_v4
+33/179152/campos_512_v4
+33/179273/campos_512_v4
+33/179358/campos_512_v4
+33/179571/campos_512_v4
+33/179778/campos_512_v4
+34/180405/campos_512_v4
+34/180552/campos_512_v4
+34/180555/campos_512_v4
+34/180570/campos_512_v4
+34/180709/campos_512_v4
+34/180729/campos_512_v4
+34/180794/campos_512_v4
+34/180932/campos_512_v4
+34/180960/campos_512_v4
+34/181077/campos_512_v4
+34/181235/campos_512_v4
+34/181275/campos_512_v4
+34/181349/campos_512_v4
+34/181478/campos_512_v4
+34/181550/campos_512_v4
+34/181600/campos_512_v4
+34/181682/campos_512_v4
+34/181805/campos_512_v4
+34/181871/campos_512_v4
+34/182024/campos_512_v4
+34/182641/campos_512_v4
+34/182672/campos_512_v4
+34/182721/campos_512_v4
+34/183014/campos_512_v4
+34/183039/campos_512_v4
+34/183090/campos_512_v4
+34/183553/campos_512_v4
+34/184157/campos_512_v4
+34/184241/campos_512_v4
+34/184252/campos_512_v4
+34/184297/campos_512_v4
+34/184347/campos_512_v4
+34/184686/campos_512_v4
+34/184815/campos_512_v4
+34/184862/campos_512_v4
+35/185109/campos_512_v4
+35/185128/campos_512_v4
+35/185171/campos_512_v4
+35/185310/campos_512_v4
+35/185388/campos_512_v4
+35/185410/campos_512_v4
+35/185477/campos_512_v4
+35/185717/campos_512_v4
+35/185844/campos_512_v4
+35/185959/campos_512_v4
+35/186008/campos_512_v4
+35/186015/campos_512_v4
+35/186113/campos_512_v4
+35/186119/campos_512_v4
+35/186269/campos_512_v4
+35/186314/campos_512_v4
+35/186709/campos_512_v4
+35/186761/campos_512_v4
+35/186858/campos_512_v4
+35/186870/campos_512_v4
+35/186992/campos_512_v4
+35/186997/campos_512_v4
+35/187088/campos_512_v4
+35/187124/campos_512_v4
+35/187137/campos_512_v4
+35/187179/campos_512_v4
+35/187180/campos_512_v4
+35/187462/campos_512_v4
+35/187470/campos_512_v4
+35/187573/campos_512_v4
+35/187688/campos_512_v4
+35/187749/campos_512_v4
+35/187861/campos_512_v4
+35/187900/campos_512_v4
+35/187988/campos_512_v4
+35/188012/campos_512_v4
+35/188058/campos_512_v4
+35/188121/campos_512_v4
+35/188198/campos_512_v4
+35/188285/campos_512_v4
+35/188338/campos_512_v4
+35/188348/campos_512_v4
+35/188649/campos_512_v4
+35/188702/campos_512_v4
+35/188797/campos_512_v4
+35/188862/campos_512_v4
+35/189004/campos_512_v4
+35/189042/campos_512_v4
+35/189058/campos_512_v4
+35/189145/campos_512_v4
+35/189533/campos_512_v4
+35/189682/campos_512_v4
+35/189721/campos_512_v4
+35/189771/campos_512_v4
+35/189997/campos_512_v4
+36/190039/campos_512_v4
+36/190170/campos_512_v4
+36/190292/campos_512_v4
+36/190451/campos_512_v4
+36/190461/campos_512_v4
+36/190525/campos_512_v4
+36/190541/campos_512_v4
+36/190554/campos_512_v4
+36/190648/campos_512_v4
+36/190666/campos_512_v4
+36/190772/campos_512_v4
+36/190790/campos_512_v4
+36/190815/campos_512_v4
+36/190987/campos_512_v4
+36/191036/campos_512_v4
+36/191227/campos_512_v4
+36/191234/campos_512_v4
+36/191292/campos_512_v4
+36/191346/campos_512_v4
+36/191642/campos_512_v4
+36/191930/campos_512_v4
+36/191976/campos_512_v4
+36/192072/campos_512_v4
+36/192114/campos_512_v4
+36/192168/campos_512_v4
+36/192248/campos_512_v4
+36/192365/campos_512_v4
+36/192400/campos_512_v4
+36/192432/campos_512_v4
+36/192468/campos_512_v4
+36/192486/campos_512_v4
+36/192489/campos_512_v4
+36/192584/campos_512_v4
+36/192663/campos_512_v4
+36/192730/campos_512_v4
+36/192807/campos_512_v4
+36/192928/campos_512_v4
+36/193004/campos_512_v4
+36/193017/campos_512_v4
+36/193075/campos_512_v4
+36/193133/campos_512_v4
+36/193178/campos_512_v4
+36/193250/campos_512_v4
+36/193443/campos_512_v4
+36/193480/campos_512_v4
+36/193708/campos_512_v4
+36/193738/campos_512_v4
+36/193742/campos_512_v4
+36/194145/campos_512_v4
+36/194186/campos_512_v4
+36/194247/campos_512_v4
+36/194589/campos_512_v4
+36/194648/campos_512_v4
+36/194683/campos_512_v4
+36/194756/campos_512_v4
+36/194800/campos_512_v4
+36/194830/campos_512_v4
+36/194867/campos_512_v4
+36/194980/campos_512_v4
+36/194988/campos_512_v4
+37/195272/campos_512_v4
+37/195279/campos_512_v4
+37/195298/campos_512_v4
+37/195315/campos_512_v4
+37/195328/campos_512_v4
+37/195399/campos_512_v4
+37/195525/campos_512_v4
+37/195533/campos_512_v4
+37/195698/campos_512_v4
+37/195797/campos_512_v4
+37/195835/campos_512_v4
+37/195901/campos_512_v4
+37/196034/campos_512_v4
+37/196057/campos_512_v4
+37/196233/campos_512_v4
+37/196293/campos_512_v4
+37/196331/campos_512_v4
+37/196336/campos_512_v4
+37/196951/campos_512_v4
+37/196952/campos_512_v4
+37/197012/campos_512_v4
+37/197132/campos_512_v4
+37/197252/campos_512_v4
+37/197361/campos_512_v4
+37/197548/campos_512_v4
+37/197571/campos_512_v4
+37/197594/campos_512_v4
+37/197717/campos_512_v4
+37/197899/campos_512_v4
+37/197945/campos_512_v4
+37/197948/campos_512_v4
+37/197982/campos_512_v4
+37/197992/campos_512_v4
+37/198490/campos_512_v4
+37/198539/campos_512_v4
+37/198704/campos_512_v4
+37/198800/campos_512_v4
+37/198896/campos_512_v4
+37/198907/campos_512_v4
+37/198960/campos_512_v4
+37/198966/campos_512_v4
+37/199246/campos_512_v4
+37/199276/campos_512_v4
+37/199462/campos_512_v4
+37/199465/campos_512_v4
+37/199477/campos_512_v4
+37/199564/campos_512_v4
+37/199591/campos_512_v4
+37/199623/campos_512_v4
+37/199625/campos_512_v4
+37/199821/campos_512_v4
+37/199945/campos_512_v4
+38/200078/campos_512_v4
+38/200086/campos_512_v4
+38/200410/campos_512_v4
+38/200560/campos_512_v4
+38/200768/campos_512_v4
+38/200769/campos_512_v4
+38/200898/campos_512_v4
+38/200905/campos_512_v4
+38/200937/campos_512_v4
+38/201036/campos_512_v4
+38/201091/campos_512_v4
+38/201137/campos_512_v4
+38/201312/campos_512_v4
+38/201339/campos_512_v4
+38/201357/campos_512_v4
+38/201413/campos_512_v4
+38/201442/campos_512_v4
+38/201475/campos_512_v4
+38/201531/campos_512_v4
+38/201719/campos_512_v4
+38/201732/campos_512_v4
+38/201855/campos_512_v4
+38/201911/campos_512_v4
+38/201983/campos_512_v4
+38/201992/campos_512_v4
+38/202203/campos_512_v4
+38/202206/campos_512_v4
+38/202291/campos_512_v4
+38/202341/campos_512_v4
+38/202368/campos_512_v4
+38/202475/campos_512_v4
+38/202500/campos_512_v4
+38/202538/campos_512_v4
+38/202607/campos_512_v4
+38/202623/campos_512_v4
+38/202694/campos_512_v4
+38/202709/campos_512_v4
+38/202882/campos_512_v4
+38/202883/campos_512_v4
+38/202891/campos_512_v4
+38/202958/campos_512_v4
+38/203045/campos_512_v4
+38/203047/campos_512_v4
+38/203198/campos_512_v4
+38/203243/campos_512_v4
+38/203547/campos_512_v4
+38/203714/campos_512_v4
+38/203818/campos_512_v4
+38/203850/campos_512_v4
+38/203858/campos_512_v4
+38/203936/campos_512_v4
+38/204015/campos_512_v4
+38/204201/campos_512_v4
+38/204472/campos_512_v4
+38/204629/campos_512_v4
+38/204655/campos_512_v4
+38/204766/campos_512_v4
+38/204809/campos_512_v4
+38/204834/campos_512_v4
+38/204969/campos_512_v4
+38/205000/campos_512_v4
+4/30002/campos_512_v4
+4/30016/campos_512_v4
+4/30050/campos_512_v4
+4/30154/campos_512_v4
+4/30167/campos_512_v4
+4/30207/campos_512_v4
+4/30382/campos_512_v4
+4/30445/campos_512_v4
+4/30611/campos_512_v4
+4/30641/campos_512_v4
+4/30730/campos_512_v4
+4/30806/campos_512_v4
+4/30876/campos_512_v4
+4/30906/campos_512_v4
+4/31160/campos_512_v4
+4/31244/campos_512_v4
+4/31246/campos_512_v4
+4/31329/campos_512_v4
+4/31416/campos_512_v4
+4/31932/campos_512_v4
+4/32013/campos_512_v4
+4/32056/campos_512_v4
+4/32134/campos_512_v4
+4/32138/campos_512_v4
+4/32311/campos_512_v4
+4/32353/campos_512_v4
+4/32369/campos_512_v4
+4/32617/campos_512_v4
+4/32619/campos_512_v4
+4/32808/campos_512_v4
+4/33010/campos_512_v4
+4/33168/campos_512_v4
+4/33336/campos_512_v4
+4/33349/campos_512_v4
+4/33357/campos_512_v4
+4/33386/campos_512_v4
+4/33545/campos_512_v4
+4/33606/campos_512_v4
+4/33639/campos_512_v4
+4/33849/campos_512_v4
+4/33927/campos_512_v4
+4/33964/campos_512_v4
+4/34018/campos_512_v4
+4/34347/campos_512_v4
+4/34450/campos_512_v4
+4/34495/campos_512_v4
+4/34798/campos_512_v4
+4/34802/campos_512_v4
+4/34827/campos_512_v4
+4/34888/campos_512_v4
+4/34999/campos_512_v4
+40/210089/campos_512_v4
+40/210129/campos_512_v4
+40/210268/campos_512_v4
+40/210286/campos_512_v4
+40/210293/campos_512_v4
+40/210329/campos_512_v4
+40/210540/campos_512_v4
+40/210572/campos_512_v4
+40/210650/campos_512_v4
+40/210756/campos_512_v4
+40/210851/campos_512_v4
+40/210872/campos_512_v4
+40/210911/campos_512_v4
+40/210971/campos_512_v4
+40/211209/campos_512_v4
+40/211287/campos_512_v4
+40/211395/campos_512_v4
+40/211407/campos_512_v4
+40/211488/campos_512_v4
+40/211574/campos_512_v4
+40/211578/campos_512_v4
+40/211606/campos_512_v4
+40/211625/campos_512_v4
+40/211691/campos_512_v4
+40/211700/campos_512_v4
+40/211703/campos_512_v4
+40/211705/campos_512_v4
+40/211753/campos_512_v4
+40/211781/campos_512_v4
+40/211812/campos_512_v4
+40/211855/campos_512_v4
+40/211912/campos_512_v4
+40/211957/campos_512_v4
+40/211971/campos_512_v4
+40/212025/campos_512_v4
+40/212108/campos_512_v4
+40/212116/campos_512_v4
+40/212134/campos_512_v4
+40/212162/campos_512_v4
+40/212267/campos_512_v4
+40/212367/campos_512_v4
+40/212417/campos_512_v4
+40/212421/campos_512_v4
+40/212615/campos_512_v4
+40/212633/campos_512_v4
+40/212824/campos_512_v4
+40/212828/campos_512_v4
+40/212893/campos_512_v4
+40/212930/campos_512_v4
+40/213072/campos_512_v4
+40/213102/campos_512_v4
+40/213206/campos_512_v4
+40/213214/campos_512_v4
+40/213487/campos_512_v4
+40/213569/campos_512_v4
+40/213584/campos_512_v4
+40/213628/campos_512_v4
+40/213808/campos_512_v4
+40/213879/campos_512_v4
+40/213906/campos_512_v4
+40/214042/campos_512_v4
+40/214343/campos_512_v4
+40/214439/campos_512_v4
+40/214481/campos_512_v4
+40/214487/campos_512_v4
+40/214489/campos_512_v4
+40/214521/campos_512_v4
+40/214796/campos_512_v4
+40/214878/campos_512_v4
+40/214901/campos_512_v4
+40/214985/campos_512_v4
+41/215168/campos_512_v4
+41/215195/campos_512_v4
+41/215218/campos_512_v4
+41/215422/campos_512_v4
+41/215563/campos_512_v4
+41/215575/campos_512_v4
+41/215774/campos_512_v4
+41/215826/campos_512_v4
+41/215983/campos_512_v4
+41/215990/campos_512_v4
+41/216034/campos_512_v4
+41/216056/campos_512_v4
+41/216178/campos_512_v4
+41/216199/campos_512_v4
+41/216266/campos_512_v4
+41/216290/campos_512_v4
+41/216328/campos_512_v4
+41/216336/campos_512_v4
+41/216398/campos_512_v4
+41/216600/campos_512_v4
+41/216733/campos_512_v4
+41/216760/campos_512_v4
+41/216814/campos_512_v4
+41/216836/campos_512_v4
+41/216927/campos_512_v4
+41/217141/campos_512_v4
+41/217144/campos_512_v4
+41/217164/campos_512_v4
+41/217236/campos_512_v4
+41/217238/campos_512_v4
+41/217253/campos_512_v4
+41/217270/campos_512_v4
+41/217280/campos_512_v4
+41/217357/campos_512_v4
+41/217452/campos_512_v4
+41/217457/campos_512_v4
+41/217528/campos_512_v4
+41/217535/campos_512_v4
+41/217562/campos_512_v4
+41/217681/campos_512_v4
+41/217956/campos_512_v4
+41/217998/campos_512_v4
+41/218000/campos_512_v4
+41/218018/campos_512_v4
+41/218059/campos_512_v4
+41/218170/campos_512_v4
+41/218238/campos_512_v4
+41/218254/campos_512_v4
+41/218410/campos_512_v4
+41/218493/campos_512_v4
+41/218508/campos_512_v4
+41/218653/campos_512_v4
+41/218662/campos_512_v4
+41/218696/campos_512_v4
+41/218817/campos_512_v4
+41/218988/campos_512_v4
+41/219020/campos_512_v4
+41/219075/campos_512_v4
+41/219400/campos_512_v4
+41/219497/campos_512_v4
+41/219522/campos_512_v4
+41/219529/campos_512_v4
+41/219534/campos_512_v4
+41/219549/campos_512_v4
+41/219606/campos_512_v4
+41/219777/campos_512_v4
+41/219855/campos_512_v4
+41/219988/campos_512_v4
+41/219991/campos_512_v4
+42/220025/campos_512_v4
+42/220049/campos_512_v4
+42/220138/campos_512_v4
+42/220167/campos_512_v4
+42/220295/campos_512_v4
+42/220356/campos_512_v4
+42/220372/campos_512_v4
+42/220449/campos_512_v4
+42/220609/campos_512_v4
+42/220651/campos_512_v4
+42/220672/campos_512_v4
+42/220715/campos_512_v4
+42/220857/campos_512_v4
+42/221029/campos_512_v4
+42/221256/campos_512_v4
+42/221610/campos_512_v4
+42/221812/campos_512_v4
+42/221892/campos_512_v4
+42/221938/campos_512_v4
+42/222093/campos_512_v4
+42/222154/campos_512_v4
+42/222315/campos_512_v4
+42/222390/campos_512_v4
+42/222414/campos_512_v4
+42/222438/campos_512_v4
+42/222494/campos_512_v4
+42/222557/campos_512_v4
+42/222737/campos_512_v4
+42/222825/campos_512_v4
+42/222857/campos_512_v4
+42/222916/campos_512_v4
+42/223055/campos_512_v4
+42/223192/campos_512_v4
+42/223395/campos_512_v4
+42/223630/campos_512_v4
+42/223641/campos_512_v4
+42/223655/campos_512_v4
+42/223682/campos_512_v4
+42/223701/campos_512_v4
+42/223824/campos_512_v4
+42/223826/campos_512_v4
+42/223869/campos_512_v4
+42/223876/campos_512_v4
+42/223971/campos_512_v4
+42/223972/campos_512_v4
+42/224159/campos_512_v4
+42/224177/campos_512_v4
+42/224253/campos_512_v4
+42/224387/campos_512_v4
+42/224425/campos_512_v4
+42/224482/campos_512_v4
+42/224504/campos_512_v4
+42/224508/campos_512_v4
+42/224513/campos_512_v4
+42/224675/campos_512_v4
+42/224696/campos_512_v4
+42/224710/campos_512_v4
+42/224716/campos_512_v4
+42/224784/campos_512_v4
+42/224850/campos_512_v4
+42/224896/campos_512_v4
+42/224901/campos_512_v4
+43/225085/campos_512_v4
+43/225093/campos_512_v4
+43/225099/campos_512_v4
+43/225266/campos_512_v4
+43/225271/campos_512_v4
+43/225456/campos_512_v4
+43/225537/campos_512_v4
+43/225547/campos_512_v4
+43/225553/campos_512_v4
+43/225929/campos_512_v4
+43/225992/campos_512_v4
+43/226192/campos_512_v4
+43/226272/campos_512_v4
+43/226608/campos_512_v4
+43/226641/campos_512_v4
+43/226695/campos_512_v4
+43/226736/campos_512_v4
+43/226760/campos_512_v4
+43/226840/campos_512_v4
+43/226842/campos_512_v4
+43/226882/campos_512_v4
+43/226887/campos_512_v4
+43/227025/campos_512_v4
+43/227029/campos_512_v4
+43/227094/campos_512_v4
+43/227391/campos_512_v4
+43/227413/campos_512_v4
+43/227499/campos_512_v4
+43/227500/campos_512_v4
+43/227549/campos_512_v4
+43/227569/campos_512_v4
+43/227586/campos_512_v4
+43/227695/campos_512_v4
+43/227697/campos_512_v4
+43/227822/campos_512_v4
+43/227863/campos_512_v4
+43/228005/campos_512_v4
+43/228157/campos_512_v4
+43/228650/campos_512_v4
+43/228653/campos_512_v4
+43/228791/campos_512_v4
+43/229098/campos_512_v4
+43/229113/campos_512_v4
+43/229214/campos_512_v4
+43/229400/campos_512_v4
+43/229752/campos_512_v4
+43/229807/campos_512_v4
+43/229837/campos_512_v4
+43/229901/campos_512_v4
+43/229940/campos_512_v4
+43/229954/campos_512_v4
+44/230007/campos_512_v4
+44/230032/campos_512_v4
+44/230136/campos_512_v4
+44/230164/campos_512_v4
+44/230259/campos_512_v4
+44/230404/campos_512_v4
+44/230476/campos_512_v4
+44/230791/campos_512_v4
+44/230978/campos_512_v4
+44/230990/campos_512_v4
+44/231133/campos_512_v4
+44/231378/campos_512_v4
+44/231449/campos_512_v4
+44/231655/campos_512_v4
+44/231674/campos_512_v4
+44/231681/campos_512_v4
+44/231923/campos_512_v4
+44/231936/campos_512_v4
+44/231984/campos_512_v4
+44/231995/campos_512_v4
+44/232077/campos_512_v4
+44/232136/campos_512_v4
+44/232155/campos_512_v4
+44/232299/campos_512_v4
+44/232348/campos_512_v4
+44/232502/campos_512_v4
+44/232546/campos_512_v4
+44/232548/campos_512_v4
+44/232840/campos_512_v4
+44/232955/campos_512_v4
+44/233020/campos_512_v4
+44/233089/campos_512_v4
+44/233235/campos_512_v4
+44/233375/campos_512_v4
+44/233391/campos_512_v4
+44/233395/campos_512_v4
+44/233457/campos_512_v4
+44/233611/campos_512_v4
+44/233660/campos_512_v4
+44/233708/campos_512_v4
+44/234062/campos_512_v4
+44/234066/campos_512_v4
+44/234205/campos_512_v4
+44/234211/campos_512_v4
+44/234253/campos_512_v4
+44/234293/campos_512_v4
+44/234311/campos_512_v4
+44/234490/campos_512_v4
+44/234739/campos_512_v4
+44/234755/campos_512_v4
+44/234808/campos_512_v4
+44/234862/campos_512_v4
+44/234966/campos_512_v4
+44/234968/campos_512_v4
+45/235053/campos_512_v4
+45/235104/campos_512_v4
+45/235160/campos_512_v4
+45/235189/campos_512_v4
+45/235231/campos_512_v4
+45/235247/campos_512_v4
+45/235259/campos_512_v4
+45/235266/campos_512_v4
+45/235416/campos_512_v4
+45/235468/campos_512_v4
+45/235503/campos_512_v4
+45/235613/campos_512_v4
+45/235790/campos_512_v4
+45/235807/campos_512_v4
+45/235872/campos_512_v4
+45/235893/campos_512_v4
+45/235961/campos_512_v4
+45/236022/campos_512_v4
+45/236076/campos_512_v4
+45/236083/campos_512_v4
+45/236192/campos_512_v4
+45/236237/campos_512_v4
+45/236276/campos_512_v4
+45/236335/campos_512_v4
+45/236420/campos_512_v4
+45/236455/campos_512_v4
+45/236547/campos_512_v4
+45/236612/campos_512_v4
+45/236675/campos_512_v4
+45/236748/campos_512_v4
+45/236910/campos_512_v4
+45/236989/campos_512_v4
+45/237040/campos_512_v4
+45/237052/campos_512_v4
+45/237069/campos_512_v4
+45/237077/campos_512_v4
+45/237098/campos_512_v4
+45/237146/campos_512_v4
+45/237160/campos_512_v4
+45/237204/campos_512_v4
+45/237340/campos_512_v4
+45/237375/campos_512_v4
+45/237386/campos_512_v4
+45/237462/campos_512_v4
+45/237474/campos_512_v4
+45/237476/campos_512_v4
+45/237486/campos_512_v4
+45/237576/campos_512_v4
+45/237752/campos_512_v4
+45/237793/campos_512_v4
+45/237794/campos_512_v4
+45/238036/campos_512_v4
+45/238186/campos_512_v4
+45/238229/campos_512_v4
+45/238232/campos_512_v4
+45/238296/campos_512_v4
+45/238515/campos_512_v4
+45/238636/campos_512_v4
+45/238655/campos_512_v4
+45/238866/campos_512_v4
+45/238923/campos_512_v4
+45/239057/campos_512_v4
+45/239103/campos_512_v4
+45/239139/campos_512_v4
+45/239194/campos_512_v4
+45/239263/campos_512_v4
+45/239361/campos_512_v4
+45/239495/campos_512_v4
+45/239623/campos_512_v4
+45/239649/campos_512_v4
+45/239655/campos_512_v4
+45/239739/campos_512_v4
+45/239740/campos_512_v4
+45/239824/campos_512_v4
+45/239875/campos_512_v4
+46/240021/campos_512_v4
+46/240254/campos_512_v4
+46/240304/campos_512_v4
+46/240378/campos_512_v4
+46/240395/campos_512_v4
+46/240411/campos_512_v4
+46/240480/campos_512_v4
+46/240506/campos_512_v4
+46/240572/campos_512_v4
+46/240642/campos_512_v4
+46/240710/campos_512_v4
+46/240713/campos_512_v4
+46/240759/campos_512_v4
+46/240814/campos_512_v4
+46/240825/campos_512_v4
+46/240829/campos_512_v4
+46/240881/campos_512_v4
+46/240887/campos_512_v4
+46/241037/campos_512_v4
+46/241041/campos_512_v4
+46/241323/campos_512_v4
+46/241346/campos_512_v4
+46/241372/campos_512_v4
+46/241393/campos_512_v4
+46/241424/campos_512_v4
+46/241546/campos_512_v4
+46/241593/campos_512_v4
+46/241672/campos_512_v4
+46/241814/campos_512_v4
+46/241816/campos_512_v4
+46/241821/campos_512_v4
+46/241848/campos_512_v4
+46/242074/campos_512_v4
+46/242183/campos_512_v4
+46/242285/campos_512_v4
+46/242402/campos_512_v4
+46/242609/campos_512_v4
+46/242730/campos_512_v4
+46/242801/campos_512_v4
+46/242936/campos_512_v4
+46/242978/campos_512_v4
+46/243008/campos_512_v4
+46/243128/campos_512_v4
+46/243196/campos_512_v4
+46/243335/campos_512_v4
+46/243371/campos_512_v4
+46/243444/campos_512_v4
+46/243738/campos_512_v4
+46/243771/campos_512_v4
+46/243857/campos_512_v4
+46/244133/campos_512_v4
+46/244178/campos_512_v4
+46/244218/campos_512_v4
+46/244237/campos_512_v4
+46/244277/campos_512_v4
+46/244278/campos_512_v4
+46/244287/campos_512_v4
+46/244330/campos_512_v4
+46/244388/campos_512_v4
+46/244396/campos_512_v4
+46/244520/campos_512_v4
+46/244767/campos_512_v4
+46/244853/campos_512_v4
+46/244934/campos_512_v4
+47/245047/campos_512_v4
+47/245107/campos_512_v4
+47/245113/campos_512_v4
+47/245222/campos_512_v4
+47/245302/campos_512_v4
+47/245510/campos_512_v4
+47/245544/campos_512_v4
+47/245545/campos_512_v4
+47/245743/campos_512_v4
+47/245914/campos_512_v4
+47/245990/campos_512_v4
+47/246058/campos_512_v4
+47/246069/campos_512_v4
+47/246127/campos_512_v4
+47/246162/campos_512_v4
+47/246230/campos_512_v4
+47/246314/campos_512_v4
+47/246347/campos_512_v4
+47/246446/campos_512_v4
+47/246453/campos_512_v4
+47/246472/campos_512_v4
+47/246519/campos_512_v4
+47/246524/campos_512_v4
+47/246539/campos_512_v4
+47/246662/campos_512_v4
+47/246761/campos_512_v4
+47/246810/campos_512_v4
+47/246812/campos_512_v4
+47/246825/campos_512_v4
+47/246865/campos_512_v4
+47/246909/campos_512_v4
+47/246926/campos_512_v4
+47/247012/campos_512_v4
+47/247072/campos_512_v4
+47/247164/campos_512_v4
+47/247326/campos_512_v4
+47/247332/campos_512_v4
+47/247627/campos_512_v4
+47/247638/campos_512_v4
+47/247660/campos_512_v4
+47/247764/campos_512_v4
+47/247860/campos_512_v4
+47/247878/campos_512_v4
+47/247922/campos_512_v4
+47/247953/campos_512_v4
+47/247956/campos_512_v4
+47/247975/campos_512_v4
+47/247980/campos_512_v4
+47/247989/campos_512_v4
+47/247994/campos_512_v4
+47/248041/campos_512_v4
+47/248073/campos_512_v4
+47/248095/campos_512_v4
+47/248115/campos_512_v4
+47/248157/campos_512_v4
+47/248380/campos_512_v4
+47/248402/campos_512_v4
+47/248487/campos_512_v4
+47/248637/campos_512_v4
+47/248639/campos_512_v4
+47/248853/campos_512_v4
+47/249014/campos_512_v4
+47/249041/campos_512_v4
+47/249191/campos_512_v4
+47/249281/campos_512_v4
+47/249286/campos_512_v4
+47/249349/campos_512_v4
+47/249422/campos_512_v4
+47/249473/campos_512_v4
+47/249509/campos_512_v4
+47/249558/campos_512_v4
+47/249591/campos_512_v4
+47/249734/campos_512_v4
+47/249792/campos_512_v4
+47/249839/campos_512_v4
+47/249949/campos_512_v4
+48/250008/campos_512_v4
+48/250128/campos_512_v4
+48/250164/campos_512_v4
+48/250215/campos_512_v4
+48/250271/campos_512_v4
+48/250448/campos_512_v4
+48/250546/campos_512_v4
+48/250571/campos_512_v4
+48/250616/campos_512_v4
+48/250796/campos_512_v4
+48/251120/campos_512_v4
+48/251161/campos_512_v4
+48/251163/campos_512_v4
+48/251247/campos_512_v4
+48/251251/campos_512_v4
+48/251277/campos_512_v4
+48/251430/campos_512_v4
+48/251759/campos_512_v4
+48/251908/campos_512_v4
+48/251979/campos_512_v4
+48/252116/campos_512_v4
+48/252151/campos_512_v4
+48/252275/campos_512_v4
+48/252281/campos_512_v4
+48/252315/campos_512_v4
+48/252345/campos_512_v4
+48/252354/campos_512_v4
+48/252373/campos_512_v4
+48/252387/campos_512_v4
+48/252400/campos_512_v4
+48/252443/campos_512_v4
+48/252468/campos_512_v4
+48/252533/campos_512_v4
+48/252581/campos_512_v4
+48/252632/campos_512_v4
+48/252707/campos_512_v4
+48/252795/campos_512_v4
+48/252891/campos_512_v4
+48/253007/campos_512_v4
+48/253072/campos_512_v4
+48/253274/campos_512_v4
+48/253275/campos_512_v4
+48/253341/campos_512_v4
+48/253432/campos_512_v4
+48/253476/campos_512_v4
+48/253500/campos_512_v4
+48/253536/campos_512_v4
+48/253586/campos_512_v4
+48/253682/campos_512_v4
+48/253712/campos_512_v4
+48/253717/campos_512_v4
+48/253737/campos_512_v4
+48/253757/campos_512_v4
+48/253808/campos_512_v4
+48/253853/campos_512_v4
+48/253877/campos_512_v4
+48/253898/campos_512_v4
+48/254038/campos_512_v4
+48/254070/campos_512_v4
+48/254284/campos_512_v4
+48/254309/campos_512_v4
+48/254400/campos_512_v4
+48/254424/campos_512_v4
+48/254555/campos_512_v4
+48/254801/campos_512_v4
+48/254879/campos_512_v4
+48/254976/campos_512_v4
+49/255079/campos_512_v4
+49/255468/campos_512_v4
+49/255545/campos_512_v4
+49/255633/campos_512_v4
+49/255676/campos_512_v4
+49/255678/campos_512_v4
+49/255838/campos_512_v4
+49/255866/campos_512_v4
+49/255891/campos_512_v4
+49/255927/campos_512_v4
+49/256157/campos_512_v4
+49/256193/campos_512_v4
+49/256366/campos_512_v4
+49/256518/campos_512_v4
+49/256537/campos_512_v4
+49/256547/campos_512_v4
+49/256667/campos_512_v4
+49/256709/campos_512_v4
+49/256923/campos_512_v4
+49/256983/campos_512_v4
+49/257077/campos_512_v4
+49/257155/campos_512_v4
+49/257254/campos_512_v4
+49/257287/campos_512_v4
+49/257546/campos_512_v4
+49/257548/campos_512_v4
+49/257643/campos_512_v4
+49/257695/campos_512_v4
+49/257708/campos_512_v4
+49/257730/campos_512_v4
+49/257736/campos_512_v4
+49/257759/campos_512_v4
+49/257787/campos_512_v4
+49/257801/campos_512_v4
+49/257874/campos_512_v4
+49/257910/campos_512_v4
+49/257916/campos_512_v4
+49/257965/campos_512_v4
+49/257982/campos_512_v4
+49/258077/campos_512_v4
+49/258112/campos_512_v4
+49/258257/campos_512_v4
+49/258374/campos_512_v4
+49/258454/campos_512_v4
+49/258500/campos_512_v4
+49/258551/campos_512_v4
+49/258656/campos_512_v4
+49/258797/campos_512_v4
+49/258852/campos_512_v4
+49/259025/campos_512_v4
+49/259070/campos_512_v4
+49/259071/campos_512_v4
+49/259123/campos_512_v4
+49/259171/campos_512_v4
+49/259273/campos_512_v4
+49/259431/campos_512_v4
+49/259596/campos_512_v4
+49/259600/campos_512_v4
+49/259647/campos_512_v4
+49/259908/campos_512_v4
+5/35158/campos_512_v4
+5/35209/campos_512_v4
+5/35255/campos_512_v4
+5/35362/campos_512_v4
+5/35405/campos_512_v4
+5/35567/campos_512_v4
+5/35617/campos_512_v4
+5/35772/campos_512_v4
+5/35806/campos_512_v4
+5/35815/campos_512_v4
+5/36011/campos_512_v4
+5/36066/campos_512_v4
+5/36133/campos_512_v4
+5/36175/campos_512_v4
+5/36205/campos_512_v4
+5/36210/campos_512_v4
+5/36233/campos_512_v4
+5/36833/campos_512_v4
+5/37041/campos_512_v4
+5/37071/campos_512_v4
+5/37186/campos_512_v4
+5/37307/campos_512_v4
+5/37327/campos_512_v4
+5/37385/campos_512_v4
+5/37912/campos_512_v4
+5/37976/campos_512_v4
+5/38099/campos_512_v4
+5/38245/campos_512_v4
+5/38275/campos_512_v4
+5/38378/campos_512_v4
+5/38435/campos_512_v4
+5/38456/campos_512_v4
+5/38542/campos_512_v4
+5/38755/campos_512_v4
+5/38832/campos_512_v4
+5/39007/campos_512_v4
+5/39445/campos_512_v4
+5/39450/campos_512_v4
+5/39498/campos_512_v4
+5/39901/campos_512_v4
+5/39913/campos_512_v4
+5/39927/campos_512_v4
+5/39930/campos_512_v4
+5/39966/campos_512_v4
+5/39984/campos_512_v4
+5/39990/campos_512_v4
+50/260022/campos_512_v4
+50/260231/campos_512_v4
+50/260302/campos_512_v4
+50/260370/campos_512_v4
+50/260437/campos_512_v4
+50/260743/campos_512_v4
+50/260927/campos_512_v4
+50/261093/campos_512_v4
+50/261136/campos_512_v4
+50/261140/campos_512_v4
+50/261192/campos_512_v4
+50/261285/campos_512_v4
+50/261310/campos_512_v4
+50/261405/campos_512_v4
+50/261714/campos_512_v4
+50/261758/campos_512_v4
+50/261825/campos_512_v4
+50/261886/campos_512_v4
+50/261939/campos_512_v4
+50/261995/campos_512_v4
+50/262059/campos_512_v4
+50/262086/campos_512_v4
+50/262105/campos_512_v4
+50/262235/campos_512_v4
+50/262256/campos_512_v4
+50/262381/campos_512_v4
+50/262406/campos_512_v4
+50/262474/campos_512_v4
+50/262514/campos_512_v4
+50/262547/campos_512_v4
+50/262548/campos_512_v4
+50/262716/campos_512_v4
+50/262740/campos_512_v4
+50/262849/campos_512_v4
+50/262857/campos_512_v4
+50/262860/campos_512_v4
+50/262897/campos_512_v4
+50/262924/campos_512_v4
+50/262962/campos_512_v4
+50/263028/campos_512_v4
+50/263163/campos_512_v4
+50/263175/campos_512_v4
+50/263191/campos_512_v4
+50/263252/campos_512_v4
+50/263411/campos_512_v4
+50/263466/campos_512_v4
+50/263503/campos_512_v4
+50/263520/campos_512_v4
+50/263582/campos_512_v4
+50/263626/campos_512_v4
+50/263635/campos_512_v4
+50/263844/campos_512_v4
+50/263874/campos_512_v4
+50/263899/campos_512_v4
+50/263905/campos_512_v4
+50/263931/campos_512_v4
+50/263938/campos_512_v4
+50/263941/campos_512_v4
+50/263959/campos_512_v4
+50/263997/campos_512_v4
+50/264028/campos_512_v4
+50/264035/campos_512_v4
+50/264123/campos_512_v4
+50/264143/campos_512_v4
+50/264197/campos_512_v4
+50/264378/campos_512_v4
+50/264423/campos_512_v4
+50/264483/campos_512_v4
+50/264628/campos_512_v4
+50/264725/campos_512_v4
+50/264810/campos_512_v4
+50/264971/campos_512_v4
+51/265079/campos_512_v4
+51/265109/campos_512_v4
+51/265149/campos_512_v4
+51/265159/campos_512_v4
+51/265276/campos_512_v4
+51/265529/campos_512_v4
+51/265622/campos_512_v4
+51/265706/campos_512_v4
+51/265771/campos_512_v4
+51/265810/campos_512_v4
+51/265834/campos_512_v4
+51/265881/campos_512_v4
+51/265991/campos_512_v4
+51/265998/campos_512_v4
+51/266050/campos_512_v4
+51/266126/campos_512_v4
+51/266169/campos_512_v4
+51/266174/campos_512_v4
+51/266213/campos_512_v4
+51/266291/campos_512_v4
+51/266292/campos_512_v4
+51/266377/campos_512_v4
+51/266476/campos_512_v4
+51/266659/campos_512_v4
+51/266772/campos_512_v4
+51/266773/campos_512_v4
+51/266936/campos_512_v4
+51/266979/campos_512_v4
+51/266996/campos_512_v4
+51/267105/campos_512_v4
+51/267239/campos_512_v4
+51/267280/campos_512_v4
+51/267546/campos_512_v4
+51/267592/campos_512_v4
+51/267659/campos_512_v4
+51/267722/campos_512_v4
+51/267913/campos_512_v4
+51/267949/campos_512_v4
+51/267970/campos_512_v4
+51/268217/campos_512_v4
+51/268229/campos_512_v4
+51/268265/campos_512_v4
+51/268284/campos_512_v4
+51/268339/campos_512_v4
+51/268597/campos_512_v4
+51/268625/campos_512_v4
+51/268649/campos_512_v4
+51/268820/campos_512_v4
+51/268860/campos_512_v4
+51/268872/campos_512_v4
+51/268973/campos_512_v4
+51/269060/campos_512_v4
+51/269192/campos_512_v4
+51/269206/campos_512_v4
+51/269403/campos_512_v4
+51/269572/campos_512_v4
+51/269738/campos_512_v4
+51/269897/campos_512_v4
+51/269909/campos_512_v4
+52/270118/campos_512_v4
+52/270160/campos_512_v4
+52/270206/campos_512_v4
+52/270288/campos_512_v4
+52/270390/campos_512_v4
+52/270395/campos_512_v4
+52/270554/campos_512_v4
+52/270562/campos_512_v4
+52/270578/campos_512_v4
+52/270726/campos_512_v4
+52/270890/campos_512_v4
+52/270915/campos_512_v4
+52/270955/campos_512_v4
+52/270981/campos_512_v4
+52/271023/campos_512_v4
+52/271183/campos_512_v4
+52/271219/campos_512_v4
+52/271327/campos_512_v4
+52/271438/campos_512_v4
+52/271480/campos_512_v4
+52/271550/campos_512_v4
+52/271581/campos_512_v4
+52/271585/campos_512_v4
+52/271707/campos_512_v4
+52/271729/campos_512_v4
+52/271794/campos_512_v4
+52/271907/campos_512_v4
+52/272090/campos_512_v4
+52/272113/campos_512_v4
+52/272243/campos_512_v4
+52/272315/campos_512_v4
+52/272528/campos_512_v4
+52/272560/campos_512_v4
+52/272630/campos_512_v4
+52/272716/campos_512_v4
+52/272772/campos_512_v4
+52/272819/campos_512_v4
+52/272857/campos_512_v4
+52/272912/campos_512_v4
+52/272998/campos_512_v4
+52/273085/campos_512_v4
+52/273163/campos_512_v4
+52/273247/campos_512_v4
+52/273268/campos_512_v4
+52/273307/campos_512_v4
+52/273335/campos_512_v4
+52/273359/campos_512_v4
+52/273452/campos_512_v4
+52/273472/campos_512_v4
+52/273567/campos_512_v4
+52/273606/campos_512_v4
+52/273645/campos_512_v4
+52/273750/campos_512_v4
+52/273769/campos_512_v4
+52/273813/campos_512_v4
+52/273931/campos_512_v4
+52/274436/campos_512_v4
+52/274488/campos_512_v4
+52/274758/campos_512_v4
+52/274931/campos_512_v4
+53/275041/campos_512_v4
+53/275127/campos_512_v4
+53/275174/campos_512_v4
+53/275210/campos_512_v4
+53/275245/campos_512_v4
+53/275333/campos_512_v4
+53/275425/campos_512_v4
+53/275470/campos_512_v4
+53/275525/campos_512_v4
+53/275533/campos_512_v4
+53/275618/campos_512_v4
+53/275665/campos_512_v4
+53/275696/campos_512_v4
+53/275735/campos_512_v4
+53/275838/campos_512_v4
+53/276003/campos_512_v4
+53/276022/campos_512_v4
+53/276068/campos_512_v4
+53/276094/campos_512_v4
+53/276125/campos_512_v4
+53/276153/campos_512_v4
+53/276182/campos_512_v4
+53/276268/campos_512_v4
+53/276287/campos_512_v4
+53/276333/campos_512_v4
+53/276359/campos_512_v4
+53/276363/campos_512_v4
+53/276537/campos_512_v4
+53/276656/campos_512_v4
+53/276660/campos_512_v4
+53/276685/campos_512_v4
+53/276745/campos_512_v4
+53/276828/campos_512_v4
+53/277022/campos_512_v4
+53/277127/campos_512_v4
+53/277174/campos_512_v4
+53/277235/campos_512_v4
+53/277251/campos_512_v4
+53/277465/campos_512_v4
+53/277624/campos_512_v4
+53/277823/campos_512_v4
+53/277890/campos_512_v4
+53/278041/campos_512_v4
+53/278198/campos_512_v4
+53/278249/campos_512_v4
+53/278252/campos_512_v4
+53/278258/campos_512_v4
+53/278282/campos_512_v4
+53/278286/campos_512_v4
+53/278296/campos_512_v4
+53/278333/campos_512_v4
+53/278348/campos_512_v4
+53/278475/campos_512_v4
+53/278557/campos_512_v4
+53/278574/campos_512_v4
+53/278620/campos_512_v4
+53/278665/campos_512_v4
+53/278752/campos_512_v4
+53/278754/campos_512_v4
+53/278762/campos_512_v4
+53/278773/campos_512_v4
+53/278832/campos_512_v4
+53/278868/campos_512_v4
+53/279110/campos_512_v4
+53/279146/campos_512_v4
+53/279148/campos_512_v4
+53/279179/campos_512_v4
+53/279229/campos_512_v4
+53/279635/campos_512_v4
+53/279856/campos_512_v4
+53/279912/campos_512_v4
+54/280050/campos_512_v4
+54/280138/campos_512_v4
+54/280161/campos_512_v4
+54/280182/campos_512_v4
+54/280272/campos_512_v4
+54/280337/campos_512_v4
+54/280390/campos_512_v4
+54/280397/campos_512_v4
+54/280584/campos_512_v4
+54/280729/campos_512_v4
+54/280763/campos_512_v4
+54/280827/campos_512_v4
+54/280883/campos_512_v4
+54/280904/campos_512_v4
+54/280927/campos_512_v4
+54/280946/campos_512_v4
+54/281051/campos_512_v4
+54/281089/campos_512_v4
+54/281099/campos_512_v4
+54/281384/campos_512_v4
+54/281474/campos_512_v4
+54/281622/campos_512_v4
+54/281678/campos_512_v4
+54/281736/campos_512_v4
+54/281792/campos_512_v4
+54/281883/campos_512_v4
+54/281911/campos_512_v4
+54/281931/campos_512_v4
+54/281936/campos_512_v4
+54/281991/campos_512_v4
+54/282005/campos_512_v4
+54/282067/campos_512_v4
+54/282106/campos_512_v4
+54/282178/campos_512_v4
+54/282188/campos_512_v4
+54/282208/campos_512_v4
+54/282279/campos_512_v4
+54/282354/campos_512_v4
+54/282432/campos_512_v4
+54/282561/campos_512_v4
+54/282624/campos_512_v4
+54/282734/campos_512_v4
+54/282787/campos_512_v4
+54/282890/campos_512_v4
+54/282922/campos_512_v4
+54/282930/campos_512_v4
+54/283024/campos_512_v4
+54/283066/campos_512_v4
+54/283191/campos_512_v4
+54/283315/campos_512_v4
+54/283385/campos_512_v4
+54/283523/campos_512_v4
+54/283552/campos_512_v4
+54/283669/campos_512_v4
+54/283693/campos_512_v4
+54/283751/campos_512_v4
+54/283837/campos_512_v4
+54/283876/campos_512_v4
+54/283892/campos_512_v4
+54/283958/campos_512_v4
+54/283961/campos_512_v4
+54/284020/campos_512_v4
+54/284042/campos_512_v4
+54/284060/campos_512_v4
+54/284114/campos_512_v4
+54/284234/campos_512_v4
+54/284386/campos_512_v4
+54/284431/campos_512_v4
+54/284445/campos_512_v4
+54/284498/campos_512_v4
+54/284515/campos_512_v4
+54/284538/campos_512_v4
+54/284612/campos_512_v4
+54/284695/campos_512_v4
+54/284749/campos_512_v4
+54/284779/campos_512_v4
+54/284901/campos_512_v4
+54/284926/campos_512_v4
+54/284981/campos_512_v4
+54/284990/campos_512_v4
+55/285240/campos_512_v4
+55/285450/campos_512_v4
+55/285706/campos_512_v4
+55/285860/campos_512_v4
+55/285861/campos_512_v4
+55/285918/campos_512_v4
+55/286101/campos_512_v4
+55/286121/campos_512_v4
+55/286189/campos_512_v4
+55/286233/campos_512_v4
+55/286320/campos_512_v4
+55/286323/campos_512_v4
+55/286534/campos_512_v4
+55/286544/campos_512_v4
+55/286557/campos_512_v4
+55/286581/campos_512_v4
+55/286611/campos_512_v4
+55/286684/campos_512_v4
+55/286791/campos_512_v4
+55/286871/campos_512_v4
+55/287110/campos_512_v4
+55/287119/campos_512_v4
+55/287199/campos_512_v4
+55/287438/campos_512_v4
+55/287493/campos_512_v4
+55/287564/campos_512_v4
+55/287639/campos_512_v4
+55/287718/campos_512_v4
+55/287719/campos_512_v4
+55/287730/campos_512_v4
+55/287782/campos_512_v4
+55/287832/campos_512_v4
+55/287882/campos_512_v4
+55/287906/campos_512_v4
+55/287943/campos_512_v4
+55/287976/campos_512_v4
+55/287982/campos_512_v4
+55/288022/campos_512_v4
+55/288138/campos_512_v4
+55/288240/campos_512_v4
+55/288248/campos_512_v4
+55/288315/campos_512_v4
+55/288460/campos_512_v4
+55/288591/campos_512_v4
+55/288625/campos_512_v4
+55/288790/campos_512_v4
+55/288883/campos_512_v4
+55/288957/campos_512_v4
+55/288967/campos_512_v4
+55/289037/campos_512_v4
+55/289122/campos_512_v4
+55/289132/campos_512_v4
+55/289194/campos_512_v4
+55/289370/campos_512_v4
+55/289431/campos_512_v4
+55/289704/campos_512_v4
+55/289795/campos_512_v4
+55/289897/campos_512_v4
+56/290123/campos_512_v4
+56/290223/campos_512_v4
+56/290396/campos_512_v4
+56/290457/campos_512_v4
+56/290498/campos_512_v4
+56/290533/campos_512_v4
+56/290624/campos_512_v4
+56/290783/campos_512_v4
+56/290846/campos_512_v4
+56/290891/campos_512_v4
+56/290945/campos_512_v4
+56/290956/campos_512_v4
+56/291081/campos_512_v4
+56/291143/campos_512_v4
+56/291178/campos_512_v4
+56/291307/campos_512_v4
+56/291426/campos_512_v4
+56/291430/campos_512_v4
+56/291448/campos_512_v4
+56/291502/campos_512_v4
+56/291599/campos_512_v4
+56/291616/campos_512_v4
+56/291618/campos_512_v4
+56/291716/campos_512_v4
+56/291850/campos_512_v4
+56/291859/campos_512_v4
+56/292045/campos_512_v4
+56/292171/campos_512_v4
+56/292228/campos_512_v4
+56/292250/campos_512_v4
+56/292319/campos_512_v4
+56/292358/campos_512_v4
+56/292399/campos_512_v4
+56/292405/campos_512_v4
+56/292477/campos_512_v4
+56/292534/campos_512_v4
+56/292618/campos_512_v4
+56/292628/campos_512_v4
+56/292654/campos_512_v4
+56/292722/campos_512_v4
+56/292866/campos_512_v4
+56/293055/campos_512_v4
+56/293128/campos_512_v4
+56/293178/campos_512_v4
+56/293231/campos_512_v4
+56/293241/campos_512_v4
+56/293302/campos_512_v4
+56/293327/campos_512_v4
+56/293356/campos_512_v4
+56/293417/campos_512_v4
+56/293432/campos_512_v4
+56/293438/campos_512_v4
+56/293572/campos_512_v4
+56/293626/campos_512_v4
+56/293659/campos_512_v4
+56/293688/campos_512_v4
+56/293751/campos_512_v4
+56/293923/campos_512_v4
+56/293942/campos_512_v4
+56/294012/campos_512_v4
+56/294029/campos_512_v4
+56/294083/campos_512_v4
+56/294272/campos_512_v4
+56/294293/campos_512_v4
+56/294341/campos_512_v4
+56/294383/campos_512_v4
+56/294556/campos_512_v4
+56/294594/campos_512_v4
+56/294648/campos_512_v4
+56/294662/campos_512_v4
+56/294678/campos_512_v4
+56/294704/campos_512_v4
+56/294779/campos_512_v4
+56/294880/campos_512_v4
+56/294926/campos_512_v4
+56/294981/campos_512_v4
+57/295087/campos_512_v4
+57/295281/campos_512_v4
+57/295290/campos_512_v4
+57/295371/campos_512_v4
+57/295595/campos_512_v4
+57/295664/campos_512_v4
+57/295711/campos_512_v4
+57/295757/campos_512_v4
+57/295851/campos_512_v4
+57/295892/campos_512_v4
+57/295905/campos_512_v4
+57/296075/campos_512_v4
+57/296166/campos_512_v4
+57/296232/campos_512_v4
+57/296269/campos_512_v4
+57/296396/campos_512_v4
+57/296486/campos_512_v4
+57/296592/campos_512_v4
+57/296674/campos_512_v4
+57/296695/campos_512_v4
+57/296827/campos_512_v4
+57/296995/campos_512_v4
+57/297001/campos_512_v4
+57/297136/campos_512_v4
+57/297194/campos_512_v4
+57/297230/campos_512_v4
+57/297244/campos_512_v4
+57/297315/campos_512_v4
+57/297396/campos_512_v4
+57/297400/campos_512_v4
+57/297410/campos_512_v4
+57/297613/campos_512_v4
+57/297779/campos_512_v4
+57/297786/campos_512_v4
+57/297913/campos_512_v4
+57/298058/campos_512_v4
+57/298062/campos_512_v4
+57/298244/campos_512_v4
+57/298414/campos_512_v4
+57/298561/campos_512_v4
+57/298683/campos_512_v4
+57/298686/campos_512_v4
+57/298749/campos_512_v4
+57/298767/campos_512_v4
+57/298837/campos_512_v4
+57/298888/campos_512_v4
+57/298929/campos_512_v4
+57/298939/campos_512_v4
+57/298964/campos_512_v4
+57/298965/campos_512_v4
+57/299062/campos_512_v4
+57/299273/campos_512_v4
+57/299307/campos_512_v4
+57/299337/campos_512_v4
+57/299383/campos_512_v4
+57/299632/campos_512_v4
+57/299665/campos_512_v4
+57/299706/campos_512_v4
+58/300019/campos_512_v4
+58/300086/campos_512_v4
+58/300298/campos_512_v4
+58/300402/campos_512_v4
+58/300438/campos_512_v4
+58/300630/campos_512_v4
+58/300635/campos_512_v4
+58/300636/campos_512_v4
+58/300663/campos_512_v4
+58/300778/campos_512_v4
+58/300826/campos_512_v4
+58/300863/campos_512_v4
+58/300943/campos_512_v4
+58/301040/campos_512_v4
+58/301098/campos_512_v4
+58/301258/campos_512_v4
+58/301312/campos_512_v4
+58/301362/campos_512_v4
+58/301457/campos_512_v4
+58/301602/campos_512_v4
+58/301705/campos_512_v4
+58/301874/campos_512_v4
+58/301952/campos_512_v4
+58/302083/campos_512_v4
+58/302121/campos_512_v4
+58/302163/campos_512_v4
+58/302267/campos_512_v4
+58/302295/campos_512_v4
+58/302389/campos_512_v4
+58/302510/campos_512_v4
+58/302558/campos_512_v4
+58/302576/campos_512_v4
+58/302601/campos_512_v4
+58/302639/campos_512_v4
+58/302757/campos_512_v4
+58/302854/campos_512_v4
+58/303064/campos_512_v4
+58/303106/campos_512_v4
+58/303306/campos_512_v4
+58/303312/campos_512_v4
+58/303436/campos_512_v4
+58/303545/campos_512_v4
+58/303656/campos_512_v4
+58/303707/campos_512_v4
+58/303763/campos_512_v4
+58/303830/campos_512_v4
+58/304039/campos_512_v4
+58/304055/campos_512_v4
+58/304152/campos_512_v4
+58/304155/campos_512_v4
+58/304159/campos_512_v4
+58/304234/campos_512_v4
+58/304253/campos_512_v4
+58/304468/campos_512_v4
+58/304583/campos_512_v4
+58/304745/campos_512_v4
+58/304794/campos_512_v4
+58/304878/campos_512_v4
+58/304891/campos_512_v4
+58/304937/campos_512_v4
+59/305170/campos_512_v4
+59/305202/campos_512_v4
+59/305215/campos_512_v4
+59/305295/campos_512_v4
+59/305334/campos_512_v4
+59/305341/campos_512_v4
+59/305435/campos_512_v4
+59/305468/campos_512_v4
+59/305677/campos_512_v4
+59/305743/campos_512_v4
+59/305814/campos_512_v4
+59/305834/campos_512_v4
+59/305839/campos_512_v4
+59/306024/campos_512_v4
+59/306033/campos_512_v4
+59/306084/campos_512_v4
+59/306252/campos_512_v4
+59/306271/campos_512_v4
+59/306619/campos_512_v4
+59/306639/campos_512_v4
+59/306683/campos_512_v4
+59/306811/campos_512_v4
+59/306846/campos_512_v4
+59/306860/campos_512_v4
+59/306869/campos_512_v4
+59/306893/campos_512_v4
+59/306990/campos_512_v4
+59/307028/campos_512_v4
+59/307116/campos_512_v4
+59/307369/campos_512_v4
+59/307415/campos_512_v4
+59/307546/campos_512_v4
+59/307696/campos_512_v4
+59/307729/campos_512_v4
+59/307834/campos_512_v4
+59/307867/campos_512_v4
+59/307893/campos_512_v4
+59/307933/campos_512_v4
+59/307945/campos_512_v4
+59/307997/campos_512_v4
+59/308040/campos_512_v4
+59/308138/campos_512_v4
+59/308320/campos_512_v4
+59/308702/campos_512_v4
+59/308722/campos_512_v4
+59/308773/campos_512_v4
+59/308788/campos_512_v4
+59/308823/campos_512_v4
+59/308984/campos_512_v4
+59/309026/campos_512_v4
+59/309032/campos_512_v4
+59/309074/campos_512_v4
+59/309085/campos_512_v4
+59/309118/campos_512_v4
+59/309240/campos_512_v4
+59/309262/campos_512_v4
+59/309266/campos_512_v4
+59/309294/campos_512_v4
+59/309315/campos_512_v4
+59/309450/campos_512_v4
+59/309558/campos_512_v4
+59/309629/campos_512_v4
+59/309710/campos_512_v4
+59/309743/campos_512_v4
+59/309764/campos_512_v4
+59/309829/campos_512_v4
+59/309906/campos_512_v4
+59/309975/campos_512_v4
+59/309981/campos_512_v4
+6/40032/campos_512_v4
+6/40273/campos_512_v4
+6/40469/campos_512_v4
+6/40619/campos_512_v4
+6/40714/campos_512_v4
+6/40756/campos_512_v4
+6/40849/campos_512_v4
+6/40881/campos_512_v4
+6/40915/campos_512_v4
+6/41153/campos_512_v4
+6/41323/campos_512_v4
+6/41613/campos_512_v4
+6/41768/campos_512_v4
+6/41843/campos_512_v4
+6/41847/campos_512_v4
+6/41886/campos_512_v4
+6/42155/campos_512_v4
+6/42246/campos_512_v4
+6/42533/campos_512_v4
+6/42565/campos_512_v4
+6/42586/campos_512_v4
+6/42640/campos_512_v4
+6/42689/campos_512_v4
+6/42932/campos_512_v4
+6/43215/campos_512_v4
+6/43267/campos_512_v4
+6/43375/campos_512_v4
+6/43423/campos_512_v4
+6/43444/campos_512_v4
+6/43491/campos_512_v4
+6/43685/campos_512_v4
+6/43740/campos_512_v4
+6/43883/campos_512_v4
+6/43885/campos_512_v4
+6/44053/campos_512_v4
+6/44169/campos_512_v4
+6/44340/campos_512_v4
+6/44425/campos_512_v4
+6/44480/campos_512_v4
+6/44571/campos_512_v4
+6/44656/campos_512_v4
+6/44728/campos_512_v4
+6/44965/campos_512_v4
+60/310020/campos_512_v4
+60/310134/campos_512_v4
+60/310200/campos_512_v4
+60/310298/campos_512_v4
+60/310324/campos_512_v4
+60/310455/campos_512_v4
+60/310494/campos_512_v4
+60/310566/campos_512_v4
+60/310586/campos_512_v4
+60/310608/campos_512_v4
+60/310666/campos_512_v4
+60/310737/campos_512_v4
+60/310826/campos_512_v4
+60/310868/campos_512_v4
+60/310919/campos_512_v4
+60/310958/campos_512_v4
+60/310993/campos_512_v4
+60/311179/campos_512_v4
+60/311230/campos_512_v4
+60/311303/campos_512_v4
+60/311307/campos_512_v4
+60/311335/campos_512_v4
+60/311339/campos_512_v4
+60/311441/campos_512_v4
+60/311513/campos_512_v4
+60/311557/campos_512_v4
+60/311672/campos_512_v4
+60/311691/campos_512_v4
+60/311752/campos_512_v4
+60/311816/campos_512_v4
+60/311991/campos_512_v4
+60/312111/campos_512_v4
+60/312162/campos_512_v4
+60/312223/campos_512_v4
+60/312287/campos_512_v4
+60/312481/campos_512_v4
+60/312499/campos_512_v4
+60/312535/campos_512_v4
+60/312589/campos_512_v4
+60/312615/campos_512_v4
+60/312685/campos_512_v4
+60/312689/campos_512_v4
+60/312978/campos_512_v4
+60/313027/campos_512_v4
+60/313041/campos_512_v4
+60/313120/campos_512_v4
+60/313304/campos_512_v4
+60/313422/campos_512_v4
+60/313810/campos_512_v4
+60/313948/campos_512_v4
+60/314055/campos_512_v4
+60/314126/campos_512_v4
+60/314175/campos_512_v4
+60/314254/campos_512_v4
+60/314306/campos_512_v4
+60/314319/campos_512_v4
+60/314371/campos_512_v4
+60/314431/campos_512_v4
+60/314437/campos_512_v4
+60/314536/campos_512_v4
+60/314589/campos_512_v4
+60/314800/campos_512_v4
+60/314824/campos_512_v4
+60/314831/campos_512_v4
+60/314881/campos_512_v4
+60/314918/campos_512_v4
+60/314921/campos_512_v4
+60/314998/campos_512_v4
+61/315086/campos_512_v4
+61/315117/campos_512_v4
+61/315319/campos_512_v4
+61/315332/campos_512_v4
+61/315530/campos_512_v4
+61/315532/campos_512_v4
+61/315578/campos_512_v4
+61/315670/campos_512_v4
+61/315719/campos_512_v4
+61/315790/campos_512_v4
+61/315796/campos_512_v4
+61/315895/campos_512_v4
+61/315957/campos_512_v4
+61/316008/campos_512_v4
+61/316028/campos_512_v4
+61/316053/campos_512_v4
+61/316265/campos_512_v4
+61/316510/campos_512_v4
+61/316632/campos_512_v4
+61/316663/campos_512_v4
+61/316691/campos_512_v4
+61/316728/campos_512_v4
+61/316742/campos_512_v4
+61/316802/campos_512_v4
+61/316829/campos_512_v4
+61/316847/campos_512_v4
+61/316893/campos_512_v4
+61/317003/campos_512_v4
+61/317064/campos_512_v4
+61/317202/campos_512_v4
+61/317320/campos_512_v4
+61/317505/campos_512_v4
+61/317570/campos_512_v4
+61/317591/campos_512_v4
+61/317727/campos_512_v4
+61/317966/campos_512_v4
+61/317967/campos_512_v4
+61/317975/campos_512_v4
+61/318028/campos_512_v4
+61/318156/campos_512_v4
+61/318181/campos_512_v4
+61/318267/campos_512_v4
+61/318433/campos_512_v4
+61/318461/campos_512_v4
+61/318496/campos_512_v4
+61/318774/campos_512_v4
+61/318798/campos_512_v4
+61/318804/campos_512_v4
+61/318872/campos_512_v4
+61/319008/campos_512_v4
+61/319012/campos_512_v4
+61/319066/campos_512_v4
+61/319162/campos_512_v4
+61/319220/campos_512_v4
+61/319244/campos_512_v4
+61/319273/campos_512_v4
+61/319434/campos_512_v4
+61/319610/campos_512_v4
+61/319616/campos_512_v4
+61/319644/campos_512_v4
+61/319668/campos_512_v4
+61/319777/campos_512_v4
+61/319811/campos_512_v4
+61/319845/campos_512_v4
+62/320063/campos_512_v4
+62/320187/campos_512_v4
+62/320240/campos_512_v4
+62/320524/campos_512_v4
+62/320532/campos_512_v4
+62/320683/campos_512_v4
+62/320732/campos_512_v4
+62/320830/campos_512_v4
+62/320890/campos_512_v4
+62/320986/campos_512_v4
+62/321048/campos_512_v4
+62/321052/campos_512_v4
+62/321063/campos_512_v4
+62/321088/campos_512_v4
+62/321226/campos_512_v4
+62/321288/campos_512_v4
+62/321367/campos_512_v4
+62/321380/campos_512_v4
+62/321420/campos_512_v4
+62/321451/campos_512_v4
+62/321519/campos_512_v4
+62/321700/campos_512_v4
+62/321706/campos_512_v4
+62/321801/campos_512_v4
+62/321937/campos_512_v4
+62/321940/campos_512_v4
+62/321948/campos_512_v4
+62/322116/campos_512_v4
+62/322142/campos_512_v4
+62/322204/campos_512_v4
+62/322242/campos_512_v4
+62/322252/campos_512_v4
+62/322560/campos_512_v4
+62/322609/campos_512_v4
+62/322778/campos_512_v4
+62/322826/campos_512_v4
+62/323119/campos_512_v4
+62/323165/campos_512_v4
+62/323214/campos_512_v4
+62/323630/campos_512_v4
+62/323650/campos_512_v4
+62/323764/campos_512_v4
+62/323770/campos_512_v4
+62/323832/campos_512_v4
+62/323853/campos_512_v4
+62/323858/campos_512_v4
+62/323873/campos_512_v4
+62/323959/campos_512_v4
+62/324048/campos_512_v4
+62/324123/campos_512_v4
+62/324148/campos_512_v4
+62/324154/campos_512_v4
+62/324208/campos_512_v4
+62/324476/campos_512_v4
+62/324506/campos_512_v4
+62/324766/campos_512_v4
+62/324771/campos_512_v4
+62/324993/campos_512_v4
+63/325006/campos_512_v4
+63/325011/campos_512_v4
+63/325143/campos_512_v4
+63/325174/campos_512_v4
+63/325318/campos_512_v4
+63/325375/campos_512_v4
+63/325414/campos_512_v4
+63/325669/campos_512_v4
+63/325821/campos_512_v4
+63/325863/campos_512_v4
+63/325881/campos_512_v4
+63/325915/campos_512_v4
+63/325975/campos_512_v4
+63/326099/campos_512_v4
+63/326134/campos_512_v4
+63/326208/campos_512_v4
+63/326262/campos_512_v4
+63/326331/campos_512_v4
+63/326403/campos_512_v4
+63/326422/campos_512_v4
+63/326425/campos_512_v4
+63/326529/campos_512_v4
+63/326566/campos_512_v4
+63/326679/campos_512_v4
+63/326717/campos_512_v4
+63/326757/campos_512_v4
+63/326925/campos_512_v4
+63/326951/campos_512_v4
+63/326968/campos_512_v4
+63/327006/campos_512_v4
+63/327157/campos_512_v4
+63/327200/campos_512_v4
+63/327243/campos_512_v4
+63/327280/campos_512_v4
+63/327322/campos_512_v4
+63/327358/campos_512_v4
+63/327380/campos_512_v4
+63/327439/campos_512_v4
+63/327557/campos_512_v4
+63/327669/campos_512_v4
+63/327732/campos_512_v4
+63/327768/campos_512_v4
+63/327813/campos_512_v4
+63/328064/campos_512_v4
+63/328093/campos_512_v4
+63/328119/campos_512_v4
+63/328369/campos_512_v4
+63/328469/campos_512_v4
+63/328539/campos_512_v4
+63/328543/campos_512_v4
+63/328573/campos_512_v4
+63/328640/campos_512_v4
+63/328849/campos_512_v4
+63/328881/campos_512_v4
+63/328919/campos_512_v4
+63/328939/campos_512_v4
+63/328965/campos_512_v4
+63/328966/campos_512_v4
+63/328978/campos_512_v4
+63/329123/campos_512_v4
+63/329145/campos_512_v4
+63/329405/campos_512_v4
+63/329456/campos_512_v4
+63/329710/campos_512_v4
+63/329716/campos_512_v4
+63/329776/campos_512_v4
+63/329844/campos_512_v4
+63/329864/campos_512_v4
+64/330014/campos_512_v4
+64/330034/campos_512_v4
+64/330250/campos_512_v4
+64/330267/campos_512_v4
+64/330294/campos_512_v4
+64/330396/campos_512_v4
+64/330450/campos_512_v4
+64/330471/campos_512_v4
+64/330577/campos_512_v4
+64/330609/campos_512_v4
+64/330626/campos_512_v4
+64/330673/campos_512_v4
+64/330879/campos_512_v4
+64/330973/campos_512_v4
+64/331192/campos_512_v4
+64/331200/campos_512_v4
+64/331259/campos_512_v4
+64/331279/campos_512_v4
+64/331293/campos_512_v4
+64/331328/campos_512_v4
+64/331440/campos_512_v4
+64/331850/campos_512_v4
+64/332041/campos_512_v4
+64/332043/campos_512_v4
+64/332072/campos_512_v4
+64/332153/campos_512_v4
+64/332245/campos_512_v4
+64/332341/campos_512_v4
+64/332351/campos_512_v4
+64/332352/campos_512_v4
+64/332418/campos_512_v4
+64/332514/campos_512_v4
+64/332541/campos_512_v4
+64/332622/campos_512_v4
+64/332652/campos_512_v4
+64/332782/campos_512_v4
+64/332863/campos_512_v4
+64/333061/campos_512_v4
+64/333070/campos_512_v4
+64/333112/campos_512_v4
+64/333148/campos_512_v4
+64/333200/campos_512_v4
+64/333213/campos_512_v4
+64/333317/campos_512_v4
+64/333339/campos_512_v4
+64/333514/campos_512_v4
+64/333755/campos_512_v4
+64/333757/campos_512_v4
+64/333791/campos_512_v4
+64/333793/campos_512_v4
+64/333895/campos_512_v4
+64/333900/campos_512_v4
+64/334036/campos_512_v4
+64/334181/campos_512_v4
+64/334189/campos_512_v4
+64/334230/campos_512_v4
+64/334270/campos_512_v4
+64/334389/campos_512_v4
+64/334402/campos_512_v4
+64/334447/campos_512_v4
+64/334510/campos_512_v4
+64/334514/campos_512_v4
+64/334548/campos_512_v4
+64/334554/campos_512_v4
+64/334692/campos_512_v4
+64/334735/campos_512_v4
+64/334780/campos_512_v4
+64/334869/campos_512_v4
+64/334937/campos_512_v4
+65/335123/campos_512_v4
+65/335238/campos_512_v4
+65/335289/campos_512_v4
+65/335331/campos_512_v4
+65/335447/campos_512_v4
+65/335506/campos_512_v4
+65/335546/campos_512_v4
+65/335619/campos_512_v4
+65/335621/campos_512_v4
+65/335821/campos_512_v4
+65/335861/campos_512_v4
+65/335930/campos_512_v4
+65/335986/campos_512_v4
+65/336043/campos_512_v4
+65/336096/campos_512_v4
+65/336101/campos_512_v4
+65/336111/campos_512_v4
+65/336187/campos_512_v4
+65/336208/campos_512_v4
+65/336293/campos_512_v4
+65/336427/campos_512_v4
+65/336509/campos_512_v4
+65/336516/campos_512_v4
+65/336674/campos_512_v4
+65/336720/campos_512_v4
+65/336773/campos_512_v4
+65/336833/campos_512_v4
+65/336915/campos_512_v4
+65/337133/campos_512_v4
+65/337167/campos_512_v4
+65/337230/campos_512_v4
+65/337298/campos_512_v4
+65/337328/campos_512_v4
+65/337455/campos_512_v4
+65/337483/campos_512_v4
+65/337587/campos_512_v4
+65/337617/campos_512_v4
+65/337632/campos_512_v4
+65/337668/campos_512_v4
+65/337840/campos_512_v4
+65/337877/campos_512_v4
+65/338046/campos_512_v4
+65/338066/campos_512_v4
+65/338337/campos_512_v4
+65/338422/campos_512_v4
+65/338509/campos_512_v4
+65/338585/campos_512_v4
+65/338608/campos_512_v4
+65/338959/campos_512_v4
+65/339034/campos_512_v4
+65/339047/campos_512_v4
+65/339109/campos_512_v4
+65/339128/campos_512_v4
+65/339136/campos_512_v4
+65/339177/campos_512_v4
+65/339252/campos_512_v4
+65/339275/campos_512_v4
+65/339518/campos_512_v4
+65/339602/campos_512_v4
+65/339633/campos_512_v4
+65/339676/campos_512_v4
+65/339715/campos_512_v4
+65/339830/campos_512_v4
+66/340091/campos_512_v4
+66/340121/campos_512_v4
+66/340310/campos_512_v4
+66/340311/campos_512_v4
+66/340331/campos_512_v4
+66/340586/campos_512_v4
+66/340603/campos_512_v4
+66/340619/campos_512_v4
+66/340641/campos_512_v4
+66/340712/campos_512_v4
+66/340762/campos_512_v4
+66/340778/campos_512_v4
+66/340873/campos_512_v4
+66/340910/campos_512_v4
+66/340968/campos_512_v4
+66/341057/campos_512_v4
+66/341113/campos_512_v4
+66/341238/campos_512_v4
+66/341385/campos_512_v4
+66/341386/campos_512_v4
+66/341567/campos_512_v4
+66/341720/campos_512_v4
+66/341779/campos_512_v4
+66/341820/campos_512_v4
+66/342099/campos_512_v4
+66/342134/campos_512_v4
+66/342255/campos_512_v4
+66/342360/campos_512_v4
+66/342441/campos_512_v4
+66/342575/campos_512_v4
+66/342603/campos_512_v4
+66/342795/campos_512_v4
+66/342935/campos_512_v4
+66/342983/campos_512_v4
+66/343181/campos_512_v4
+66/343223/campos_512_v4
+66/343359/campos_512_v4
+66/343410/campos_512_v4
+66/343553/campos_512_v4
+66/343610/campos_512_v4
+66/343717/campos_512_v4
+66/343806/campos_512_v4
+66/344043/campos_512_v4
+66/344060/campos_512_v4
+66/344084/campos_512_v4
+66/344157/campos_512_v4
+66/344204/campos_512_v4
+66/344322/campos_512_v4
+66/344424/campos_512_v4
+66/344431/campos_512_v4
+66/344554/campos_512_v4
+66/344557/campos_512_v4
+66/344638/campos_512_v4
+66/344650/campos_512_v4
+66/344786/campos_512_v4
+66/344878/campos_512_v4
+67/345037/campos_512_v4
+67/345115/campos_512_v4
+67/345127/campos_512_v4
+67/345227/campos_512_v4
+67/345293/campos_512_v4
+67/345329/campos_512_v4
+67/345434/campos_512_v4
+67/345447/campos_512_v4
+67/345505/campos_512_v4
+67/345516/campos_512_v4
+67/345519/campos_512_v4
+67/345532/campos_512_v4
+67/345558/campos_512_v4
+67/345643/campos_512_v4
+67/345656/campos_512_v4
+67/345688/campos_512_v4
+67/346003/campos_512_v4
+67/346326/campos_512_v4
+67/346388/campos_512_v4
+67/346678/campos_512_v4
+67/346735/campos_512_v4
+67/346787/campos_512_v4
+67/346869/campos_512_v4
+67/346871/campos_512_v4
+67/346876/campos_512_v4
+67/346897/campos_512_v4
+67/346993/campos_512_v4
+67/347011/campos_512_v4
+67/347127/campos_512_v4
+67/347318/campos_512_v4
+67/347467/campos_512_v4
+67/347543/campos_512_v4
+67/347611/campos_512_v4
+67/347684/campos_512_v4
+67/347697/campos_512_v4
+67/347720/campos_512_v4
+67/347889/campos_512_v4
+67/347959/campos_512_v4
+67/348127/campos_512_v4
+67/348301/campos_512_v4
+67/348333/campos_512_v4
+67/348559/campos_512_v4
+67/348570/campos_512_v4
+67/348714/campos_512_v4
+67/348777/campos_512_v4
+67/348781/campos_512_v4
+67/348904/campos_512_v4
+67/348920/campos_512_v4
+67/349017/campos_512_v4
+67/349394/campos_512_v4
+67/349406/campos_512_v4
+67/349475/campos_512_v4
+67/349537/campos_512_v4
+67/349673/campos_512_v4
+67/349723/campos_512_v4
+67/349797/campos_512_v4
+67/349872/campos_512_v4
+67/349899/campos_512_v4
+67/349981/campos_512_v4
+68/350061/campos_512_v4
+68/350111/campos_512_v4
+68/350157/campos_512_v4
+68/350209/campos_512_v4
+68/350215/campos_512_v4
+68/350244/campos_512_v4
+68/350416/campos_512_v4
+68/350424/campos_512_v4
+68/350740/campos_512_v4
+68/350753/campos_512_v4
+68/350811/campos_512_v4
+68/350862/campos_512_v4
+68/350898/campos_512_v4
+68/350899/campos_512_v4
+68/350907/campos_512_v4
+68/351016/campos_512_v4
+68/351173/campos_512_v4
+68/351178/campos_512_v4
+68/351236/campos_512_v4
+68/351327/campos_512_v4
+68/351356/campos_512_v4
+68/351562/campos_512_v4
+68/351576/campos_512_v4
+68/351585/campos_512_v4
+68/351630/campos_512_v4
+68/351720/campos_512_v4
+68/351912/campos_512_v4
+68/352012/campos_512_v4
+68/352026/campos_512_v4
+68/352122/campos_512_v4
+68/352262/campos_512_v4
+68/352280/campos_512_v4
+68/352397/campos_512_v4
+68/352428/campos_512_v4
+68/352516/campos_512_v4
+68/352548/campos_512_v4
+68/352575/campos_512_v4
+68/352631/campos_512_v4
+68/352637/campos_512_v4
+68/352730/campos_512_v4
+68/352775/campos_512_v4
+68/352869/campos_512_v4
+68/352939/campos_512_v4
+68/352959/campos_512_v4
+68/352993/campos_512_v4
+68/353026/campos_512_v4
+68/353042/campos_512_v4
+68/353068/campos_512_v4
+68/353147/campos_512_v4
+68/353167/campos_512_v4
+68/353253/campos_512_v4
+68/353318/campos_512_v4
+68/353588/campos_512_v4
+68/353589/campos_512_v4
+68/353604/campos_512_v4
+68/353653/campos_512_v4
+68/353904/campos_512_v4
+68/353959/campos_512_v4
+68/353966/campos_512_v4
+68/354030/campos_512_v4
+68/354049/campos_512_v4
+68/354203/campos_512_v4
+68/354318/campos_512_v4
+68/354495/campos_512_v4
+68/354696/campos_512_v4
+68/354698/campos_512_v4
+68/354767/campos_512_v4
+68/354803/campos_512_v4
+68/354859/campos_512_v4
+68/354884/campos_512_v4
+68/354922/campos_512_v4
+68/354952/campos_512_v4
+69/355143/campos_512_v4
+69/355243/campos_512_v4
+69/355371/campos_512_v4
+69/355424/campos_512_v4
+69/355461/campos_512_v4
+69/355651/campos_512_v4
+69/355744/campos_512_v4
+69/355794/campos_512_v4
+69/355865/campos_512_v4
+69/355871/campos_512_v4
+69/355882/campos_512_v4
+69/356004/campos_512_v4
+69/356084/campos_512_v4
+69/356254/campos_512_v4
+69/356426/campos_512_v4
+69/356635/campos_512_v4
+69/356637/campos_512_v4
+69/356654/campos_512_v4
+69/356816/campos_512_v4
+69/356843/campos_512_v4
+69/357011/campos_512_v4
+69/357084/campos_512_v4
+69/357088/campos_512_v4
+69/357114/campos_512_v4
+69/357115/campos_512_v4
+69/357134/campos_512_v4
+69/357141/campos_512_v4
+69/357219/campos_512_v4
+69/357300/campos_512_v4
+69/357314/campos_512_v4
+69/357323/campos_512_v4
+69/357547/campos_512_v4
+69/357552/campos_512_v4
+69/357752/campos_512_v4
+69/357776/campos_512_v4
+69/357853/campos_512_v4
+69/357867/campos_512_v4
+69/357949/campos_512_v4
+69/358230/campos_512_v4
+69/358286/campos_512_v4
+69/358356/campos_512_v4
+69/358375/campos_512_v4
+69/358400/campos_512_v4
+69/358681/campos_512_v4
+69/358684/campos_512_v4
+69/358705/campos_512_v4
+69/358725/campos_512_v4
+69/358740/campos_512_v4
+69/358829/campos_512_v4
+69/358904/campos_512_v4
+69/358973/campos_512_v4
+69/359095/campos_512_v4
+69/359221/campos_512_v4
+69/359246/campos_512_v4
+69/359301/campos_512_v4
+69/359332/campos_512_v4
+69/359534/campos_512_v4
+69/359551/campos_512_v4
+69/359577/campos_512_v4
+69/359578/campos_512_v4
+69/359631/campos_512_v4
+69/359642/campos_512_v4
+69/359761/campos_512_v4
+69/359866/campos_512_v4
+7/45033/campos_512_v4
+7/45540/campos_512_v4
+7/45564/campos_512_v4
+7/45639/campos_512_v4
+7/45759/campos_512_v4
+7/45845/campos_512_v4
+7/45849/campos_512_v4
+7/45951/campos_512_v4
+7/46022/campos_512_v4
+7/46130/campos_512_v4
+7/46313/campos_512_v4
+7/46318/campos_512_v4
+7/46349/campos_512_v4
+7/46478/campos_512_v4
+7/46492/campos_512_v4
+7/46602/campos_512_v4
+7/46620/campos_512_v4
+7/46798/campos_512_v4
+7/46932/campos_512_v4
+7/47123/campos_512_v4
+7/47220/campos_512_v4
+7/47277/campos_512_v4
+7/47426/campos_512_v4
+7/47582/campos_512_v4
+7/47601/campos_512_v4
+7/47703/campos_512_v4
+7/47794/campos_512_v4
+7/47970/campos_512_v4
+7/47971/campos_512_v4
+7/48022/campos_512_v4
+7/48061/campos_512_v4
+7/48189/campos_512_v4
+7/48323/campos_512_v4
+7/48350/campos_512_v4
+7/48356/campos_512_v4
+7/48398/campos_512_v4
+7/48469/campos_512_v4
+7/48754/campos_512_v4
+7/48762/campos_512_v4
+7/48816/campos_512_v4
+7/48822/campos_512_v4
+7/48906/campos_512_v4
+7/49133/campos_512_v4
+7/49181/campos_512_v4
+7/49198/campos_512_v4
+7/49245/campos_512_v4
+7/49347/campos_512_v4
+7/49362/campos_512_v4
+7/49522/campos_512_v4
+7/49647/campos_512_v4
+7/49669/campos_512_v4
+7/49903/campos_512_v4
+7/49942/campos_512_v4
+7/50000/campos_512_v4
+70/360018/campos_512_v4
+70/360078/campos_512_v4
+70/360369/campos_512_v4
+70/360497/campos_512_v4
+70/360520/campos_512_v4
+70/360634/campos_512_v4
+70/360683/campos_512_v4
+70/360882/campos_512_v4
+70/360980/campos_512_v4
+70/361014/campos_512_v4
+70/361083/campos_512_v4
+70/361157/campos_512_v4
+70/361220/campos_512_v4
+70/361224/campos_512_v4
+70/361273/campos_512_v4
+70/361397/campos_512_v4
+70/361426/campos_512_v4
+70/361469/campos_512_v4
+70/361476/campos_512_v4
+70/361526/campos_512_v4
+70/361557/campos_512_v4
+70/361566/campos_512_v4
+70/361608/campos_512_v4
+70/361692/campos_512_v4
+70/361773/campos_512_v4
+70/361880/campos_512_v4
+70/361898/campos_512_v4
+70/361907/campos_512_v4
+70/361975/campos_512_v4
+70/362180/campos_512_v4
+70/362200/campos_512_v4
+70/362276/campos_512_v4
+70/362322/campos_512_v4
+70/362392/campos_512_v4
+70/362469/campos_512_v4
+70/362515/campos_512_v4
+70/362529/campos_512_v4
+70/362632/campos_512_v4
+70/362792/campos_512_v4
+70/362903/campos_512_v4
+70/362919/campos_512_v4
+70/362924/campos_512_v4
+70/363016/campos_512_v4
+70/363188/campos_512_v4
+70/363209/campos_512_v4
+70/363269/campos_512_v4
+70/363272/campos_512_v4
+70/363309/campos_512_v4
+70/363372/campos_512_v4
+70/363472/campos_512_v4
+70/363625/campos_512_v4
+70/363627/campos_512_v4
+70/363672/campos_512_v4
+70/363699/campos_512_v4
+70/363827/campos_512_v4
+70/363882/campos_512_v4
+70/363970/campos_512_v4
+70/364283/campos_512_v4
+70/364328/campos_512_v4
+70/364471/campos_512_v4
+70/364481/campos_512_v4
+70/364758/campos_512_v4
+71/365047/campos_512_v4
+71/365089/campos_512_v4
+71/365185/campos_512_v4
+71/365190/campos_512_v4
+71/365349/campos_512_v4
+71/365472/campos_512_v4
+71/365477/campos_512_v4
+71/365494/campos_512_v4
+71/365640/campos_512_v4
+71/365707/campos_512_v4
+71/365717/campos_512_v4
+71/365743/campos_512_v4
+71/365851/campos_512_v4
+71/365880/campos_512_v4
+71/365899/campos_512_v4
+71/366004/campos_512_v4
+71/366024/campos_512_v4
+71/366216/campos_512_v4
+71/366303/campos_512_v4
+71/366327/campos_512_v4
+71/366348/campos_512_v4
+71/366352/campos_512_v4
+71/366419/campos_512_v4
+71/366514/campos_512_v4
+71/366597/campos_512_v4
+71/366660/campos_512_v4
+71/366677/campos_512_v4
+71/366721/campos_512_v4
+71/366772/campos_512_v4
+71/366799/campos_512_v4
+71/366810/campos_512_v4
+71/366884/campos_512_v4
+71/366959/campos_512_v4
+71/367073/campos_512_v4
+71/367147/campos_512_v4
+71/367186/campos_512_v4
+71/367196/campos_512_v4
+71/367214/campos_512_v4
+71/367237/campos_512_v4
+71/367291/campos_512_v4
+71/367313/campos_512_v4
+71/367346/campos_512_v4
+71/367483/campos_512_v4
+71/367552/campos_512_v4
+71/367697/campos_512_v4
+71/367712/campos_512_v4
+71/367720/campos_512_v4
+71/367738/campos_512_v4
+71/367809/campos_512_v4
+71/367900/campos_512_v4
+71/368084/campos_512_v4
+71/368145/campos_512_v4
+71/368164/campos_512_v4
+71/368206/campos_512_v4
+71/368319/campos_512_v4
+71/368407/campos_512_v4
+71/368488/campos_512_v4
+71/368496/campos_512_v4
+71/368817/campos_512_v4
+71/368838/campos_512_v4
+71/369068/campos_512_v4
+71/369166/campos_512_v4
+71/369213/campos_512_v4
+71/369304/campos_512_v4
+71/369504/campos_512_v4
+71/369527/campos_512_v4
+71/369557/campos_512_v4
+71/369733/campos_512_v4
+71/369853/campos_512_v4
+72/370329/campos_512_v4
+72/370334/campos_512_v4
+72/370336/campos_512_v4
+72/370344/campos_512_v4
+72/370407/campos_512_v4
+72/370468/campos_512_v4
+72/370488/campos_512_v4
+72/370495/campos_512_v4
+72/370736/campos_512_v4
+72/370862/campos_512_v4
+72/370922/campos_512_v4
+72/371089/campos_512_v4
+72/371106/campos_512_v4
+72/371166/campos_512_v4
+72/371202/campos_512_v4
+72/371246/campos_512_v4
+72/371481/campos_512_v4
+72/371596/campos_512_v4
+72/371601/campos_512_v4
+72/371722/campos_512_v4
+72/371725/campos_512_v4
+72/371775/campos_512_v4
+72/371854/campos_512_v4
+72/371884/campos_512_v4
+72/372147/campos_512_v4
+72/372230/campos_512_v4
+72/372244/campos_512_v4
+72/372256/campos_512_v4
+72/372403/campos_512_v4
+72/372470/campos_512_v4
+72/372507/campos_512_v4
+72/372524/campos_512_v4
+72/372646/campos_512_v4
+72/372697/campos_512_v4
+72/372796/campos_512_v4
+72/372846/campos_512_v4
+72/372966/campos_512_v4
+72/373066/campos_512_v4
+72/373193/campos_512_v4
+72/373241/campos_512_v4
+72/373250/campos_512_v4
+72/373281/campos_512_v4
+72/373287/campos_512_v4
+72/373421/campos_512_v4
+72/373583/campos_512_v4
+72/373960/campos_512_v4
+72/373974/campos_512_v4
+72/374008/campos_512_v4
+72/374054/campos_512_v4
+72/374165/campos_512_v4
+72/374194/campos_512_v4
+72/374244/campos_512_v4
+72/374276/campos_512_v4
+72/374338/campos_512_v4
+72/374437/campos_512_v4
+72/374665/campos_512_v4
+72/374784/campos_512_v4
+72/374871/campos_512_v4
+72/374960/campos_512_v4
+73/375062/campos_512_v4
+73/375064/campos_512_v4
+73/375297/campos_512_v4
+73/375545/campos_512_v4
+73/375601/campos_512_v4
+73/375617/campos_512_v4
+73/375620/campos_512_v4
+73/375869/campos_512_v4
+73/375920/campos_512_v4
+73/376090/campos_512_v4
+73/376270/campos_512_v4
+73/376293/campos_512_v4
+73/376384/campos_512_v4
+73/376502/campos_512_v4
+73/376541/campos_512_v4
+73/376562/campos_512_v4
+73/376672/campos_512_v4
+73/376782/campos_512_v4
+73/376824/campos_512_v4
+73/376835/campos_512_v4
+73/376875/campos_512_v4
+73/376920/campos_512_v4
+73/376922/campos_512_v4
+73/377036/campos_512_v4
+73/377064/campos_512_v4
+73/377131/campos_512_v4
+73/377148/campos_512_v4
+73/377223/campos_512_v4
+73/377302/campos_512_v4
+73/377420/campos_512_v4
+73/377451/campos_512_v4
+73/377536/campos_512_v4
+73/377570/campos_512_v4
+73/377615/campos_512_v4
+73/377651/campos_512_v4
+73/377690/campos_512_v4
+73/377707/campos_512_v4
+73/377797/campos_512_v4
+73/377862/campos_512_v4
+73/377922/campos_512_v4
+73/377993/campos_512_v4
+73/378175/campos_512_v4
+73/378237/campos_512_v4
+73/378412/campos_512_v4
+73/378481/campos_512_v4
+73/378624/campos_512_v4
+73/378675/campos_512_v4
+73/378859/campos_512_v4
+73/378880/campos_512_v4
+73/378892/campos_512_v4
+73/378902/campos_512_v4
+73/379012/campos_512_v4
+73/379502/campos_512_v4
+73/379595/campos_512_v4
+73/379635/campos_512_v4
+73/379702/campos_512_v4
+73/379830/campos_512_v4
+73/379838/campos_512_v4
+73/379954/campos_512_v4
+73/379998/campos_512_v4
+74/380342/campos_512_v4
+74/380439/campos_512_v4
+74/380458/campos_512_v4
+74/380467/campos_512_v4
+74/380770/campos_512_v4
+74/380856/campos_512_v4
+74/380880/campos_512_v4
+74/381096/campos_512_v4
+74/381098/campos_512_v4
+74/381122/campos_512_v4
+74/381138/campos_512_v4
+74/381189/campos_512_v4
+74/381201/campos_512_v4
+74/381255/campos_512_v4
+74/381608/campos_512_v4
+74/381713/campos_512_v4
+74/381786/campos_512_v4
+74/381816/campos_512_v4
+74/381876/campos_512_v4
+74/381929/campos_512_v4
+74/381977/campos_512_v4
+74/382007/campos_512_v4
+74/382020/campos_512_v4
+74/382108/campos_512_v4
+74/382190/campos_512_v4
+74/382228/campos_512_v4
+74/382290/campos_512_v4
+74/382291/campos_512_v4
+74/382350/campos_512_v4
+74/382380/campos_512_v4
+74/382532/campos_512_v4
+74/382535/campos_512_v4
+74/382537/campos_512_v4
+74/382585/campos_512_v4
+74/382695/campos_512_v4
+74/382898/campos_512_v4
+74/382941/campos_512_v4
+74/382952/campos_512_v4
+74/383078/campos_512_v4
+74/383090/campos_512_v4
+74/383273/campos_512_v4
+74/383391/campos_512_v4
+74/383403/campos_512_v4
+74/383661/campos_512_v4
+74/383689/campos_512_v4
+74/383725/campos_512_v4
+74/383733/campos_512_v4
+74/383768/campos_512_v4
+74/383892/campos_512_v4
+74/383914/campos_512_v4
+74/383970/campos_512_v4
+74/384029/campos_512_v4
+74/384261/campos_512_v4
+74/384270/campos_512_v4
+74/384330/campos_512_v4
+74/384389/campos_512_v4
+74/384418/campos_512_v4
+74/384463/campos_512_v4
+74/384484/campos_512_v4
+74/384605/campos_512_v4
+74/384837/campos_512_v4
+74/384976/campos_512_v4
+75/385080/campos_512_v4
+75/385118/campos_512_v4
+75/385183/campos_512_v4
+75/385270/campos_512_v4
+75/385372/campos_512_v4
+75/385449/campos_512_v4
+75/385451/campos_512_v4
+75/385510/campos_512_v4
+75/385545/campos_512_v4
+75/385553/campos_512_v4
+75/385567/campos_512_v4
+75/385628/campos_512_v4
+75/385638/campos_512_v4
+75/385675/campos_512_v4
+75/385704/campos_512_v4
+75/385738/campos_512_v4
+75/385744/campos_512_v4
+75/385856/campos_512_v4
+75/385891/campos_512_v4
+75/385905/campos_512_v4
+75/385941/campos_512_v4
+75/385993/campos_512_v4
+75/386141/campos_512_v4
+75/386208/campos_512_v4
+75/386255/campos_512_v4
+75/386415/campos_512_v4
+75/386523/campos_512_v4
+75/386525/campos_512_v4
+75/386528/campos_512_v4
+75/386571/campos_512_v4
+75/386737/campos_512_v4
+75/386797/campos_512_v4
+75/386825/campos_512_v4
+75/386882/campos_512_v4
+75/386962/campos_512_v4
+75/386973/campos_512_v4
+75/387013/campos_512_v4
+75/387143/campos_512_v4
+75/387166/campos_512_v4
+75/387195/campos_512_v4
+75/387222/campos_512_v4
+75/387245/campos_512_v4
+75/387282/campos_512_v4
+75/387296/campos_512_v4
+75/387352/campos_512_v4
+75/387408/campos_512_v4
+75/387499/campos_512_v4
+75/387518/campos_512_v4
+75/387548/campos_512_v4
+75/387653/campos_512_v4
+75/387687/campos_512_v4
+75/387688/campos_512_v4
+75/387770/campos_512_v4
+75/387799/campos_512_v4
+75/387864/campos_512_v4
+75/387896/campos_512_v4
+75/387914/campos_512_v4
+75/388072/campos_512_v4
+75/388107/campos_512_v4
+75/388370/campos_512_v4
+75/388389/campos_512_v4
+75/388409/campos_512_v4
+75/388522/campos_512_v4
+75/388811/campos_512_v4
+75/388953/campos_512_v4
+75/388986/campos_512_v4
+75/388988/campos_512_v4
+75/389104/campos_512_v4
+75/389121/campos_512_v4
+75/389161/campos_512_v4
+75/389349/campos_512_v4
+75/389376/campos_512_v4
+75/389538/campos_512_v4
+75/389602/campos_512_v4
+75/389690/campos_512_v4
+75/389763/campos_512_v4
+75/389909/campos_512_v4
+75/389948/campos_512_v4
+76/390045/campos_512_v4
+76/390331/campos_512_v4
+76/390427/campos_512_v4
+76/390605/campos_512_v4
+76/390649/campos_512_v4
+76/390679/campos_512_v4
+76/390940/campos_512_v4
+76/390972/campos_512_v4
+76/391010/campos_512_v4
+76/391030/campos_512_v4
+76/391042/campos_512_v4
+76/391308/campos_512_v4
+76/391405/campos_512_v4
+76/391698/campos_512_v4
+76/391883/campos_512_v4
+76/392033/campos_512_v4
+76/392073/campos_512_v4
+76/392171/campos_512_v4
+76/392210/campos_512_v4
+76/392249/campos_512_v4
+76/392264/campos_512_v4
+76/392277/campos_512_v4
+76/392533/campos_512_v4
+76/392534/campos_512_v4
+76/392547/campos_512_v4
+76/392595/campos_512_v4
+76/392603/campos_512_v4
+76/392788/campos_512_v4
+76/392789/campos_512_v4
+76/392813/campos_512_v4
+76/392947/campos_512_v4
+76/393025/campos_512_v4
+76/393220/campos_512_v4
+76/393279/campos_512_v4
+76/393298/campos_512_v4
+76/393344/campos_512_v4
+76/393588/campos_512_v4
+76/393685/campos_512_v4
+76/393794/campos_512_v4
+76/393826/campos_512_v4
+76/393891/campos_512_v4
+76/393957/campos_512_v4
+76/393980/campos_512_v4
+76/394058/campos_512_v4
+76/394273/campos_512_v4
+76/394377/campos_512_v4
+76/394393/campos_512_v4
+76/394597/campos_512_v4
+76/394631/campos_512_v4
+76/394661/campos_512_v4
+76/394689/campos_512_v4
+76/394796/campos_512_v4
+76/394882/campos_512_v4
+76/394912/campos_512_v4
+76/394994/campos_512_v4
+77/395002/campos_512_v4
+77/395059/campos_512_v4
+77/395203/campos_512_v4
+77/395357/campos_512_v4
+77/395386/campos_512_v4
+77/395636/campos_512_v4
+77/395637/campos_512_v4
+77/395644/campos_512_v4
+77/395681/campos_512_v4
+77/395765/campos_512_v4
+77/395852/campos_512_v4
+77/395857/campos_512_v4
+77/395862/campos_512_v4
+77/395899/campos_512_v4
+77/396140/campos_512_v4
+77/396290/campos_512_v4
+77/396301/campos_512_v4
+77/396409/campos_512_v4
+77/396413/campos_512_v4
+77/396649/campos_512_v4
+77/396730/campos_512_v4
+77/397000/campos_512_v4
+77/397122/campos_512_v4
+77/397191/campos_512_v4
+77/397282/campos_512_v4
+77/397312/campos_512_v4
+77/397631/campos_512_v4
+77/397658/campos_512_v4
+77/397746/campos_512_v4
+77/397821/campos_512_v4
+77/397978/campos_512_v4
+77/398083/campos_512_v4
+77/398181/campos_512_v4
+77/398503/campos_512_v4
+77/398533/campos_512_v4
+77/398555/campos_512_v4
+77/398566/campos_512_v4
+77/398803/campos_512_v4
+77/398861/campos_512_v4
+77/398872/campos_512_v4
+77/398881/campos_512_v4
+77/398907/campos_512_v4
+77/399012/campos_512_v4
+77/399167/campos_512_v4
+77/399171/campos_512_v4
+77/399385/campos_512_v4
+77/399451/campos_512_v4
+77/399459/campos_512_v4
+77/399733/campos_512_v4
+77/399785/campos_512_v4
+77/399815/campos_512_v4
+77/399949/campos_512_v4
+78/400016/campos_512_v4
+78/400048/campos_512_v4
+78/400101/campos_512_v4
+78/400367/campos_512_v4
+78/400482/campos_512_v4
+78/400600/campos_512_v4
+78/400639/campos_512_v4
+78/400713/campos_512_v4
+78/400779/campos_512_v4
+78/400785/campos_512_v4
+78/400852/campos_512_v4
+78/400932/campos_512_v4
+78/400976/campos_512_v4
+78/401022/campos_512_v4
+78/401090/campos_512_v4
+78/401162/campos_512_v4
+78/401242/campos_512_v4
+78/401248/campos_512_v4
+78/401267/campos_512_v4
+78/401294/campos_512_v4
+78/401322/campos_512_v4
+78/401509/campos_512_v4
+78/401593/campos_512_v4
+78/401603/campos_512_v4
+78/401692/campos_512_v4
+78/401856/campos_512_v4
+78/401906/campos_512_v4
+78/401927/campos_512_v4
+78/401940/campos_512_v4
+78/401972/campos_512_v4
+78/402160/campos_512_v4
+78/402293/campos_512_v4
+78/402324/campos_512_v4
+78/402342/campos_512_v4
+78/402474/campos_512_v4
+78/402505/campos_512_v4
+78/402695/campos_512_v4
+78/402783/campos_512_v4
+78/402785/campos_512_v4
+78/402914/campos_512_v4
+78/402961/campos_512_v4
+78/403048/campos_512_v4
+78/403059/campos_512_v4
+78/403113/campos_512_v4
+78/403156/campos_512_v4
+78/403200/campos_512_v4
+78/403317/campos_512_v4
+78/403337/campos_512_v4
+78/403366/campos_512_v4
+78/403378/campos_512_v4
+78/403397/campos_512_v4
+78/403432/campos_512_v4
+78/403448/campos_512_v4
+78/403667/campos_512_v4
+78/403678/campos_512_v4
+78/403856/campos_512_v4
+78/403874/campos_512_v4
+78/404017/campos_512_v4
+78/404093/campos_512_v4
+78/404094/campos_512_v4
+78/404104/campos_512_v4
+78/404240/campos_512_v4
+78/404423/campos_512_v4
+78/404453/campos_512_v4
+78/404479/campos_512_v4
+78/404587/campos_512_v4
+78/404627/campos_512_v4
+78/404632/campos_512_v4
+78/404649/campos_512_v4
+78/404691/campos_512_v4
+78/404775/campos_512_v4
+78/404990/campos_512_v4
+79/405217/campos_512_v4
+79/405316/campos_512_v4
+79/405331/campos_512_v4
+79/405336/campos_512_v4
+79/405540/campos_512_v4
+79/405665/campos_512_v4
+79/405672/campos_512_v4
+79/405827/campos_512_v4
+79/405829/campos_512_v4
+79/405872/campos_512_v4
+79/405951/campos_512_v4
+79/406134/campos_512_v4
+79/406168/campos_512_v4
+79/406208/campos_512_v4
+79/406487/campos_512_v4
+79/406594/campos_512_v4
+79/406596/campos_512_v4
+79/406628/campos_512_v4
+79/406657/campos_512_v4
+79/406741/campos_512_v4
+79/406763/campos_512_v4
+79/406835/campos_512_v4
+79/406889/campos_512_v4
+79/407032/campos_512_v4
+79/407215/campos_512_v4
+79/407239/campos_512_v4
+79/407298/campos_512_v4
+79/407588/campos_512_v4
+79/407647/campos_512_v4
+79/407678/campos_512_v4
+79/407776/campos_512_v4
+79/407822/campos_512_v4
+79/407854/campos_512_v4
+79/408000/campos_512_v4
+79/408057/campos_512_v4
+79/408077/campos_512_v4
+79/408129/campos_512_v4
+79/408195/campos_512_v4
+79/408264/campos_512_v4
+79/408374/campos_512_v4
+79/408409/campos_512_v4
+79/408450/campos_512_v4
+79/408453/campos_512_v4
+79/408510/campos_512_v4
+79/408702/campos_512_v4
+79/408738/campos_512_v4
+79/408983/campos_512_v4
+79/409002/campos_512_v4
+79/409067/campos_512_v4
+79/409137/campos_512_v4
+79/409188/campos_512_v4
+79/409280/campos_512_v4
+79/409312/campos_512_v4
+79/409332/campos_512_v4
+79/409418/campos_512_v4
+79/409488/campos_512_v4
+79/409610/campos_512_v4
+79/409702/campos_512_v4
+79/409703/campos_512_v4
+79/409713/campos_512_v4
+79/409729/campos_512_v4
+79/409845/campos_512_v4
+79/409867/campos_512_v4
+79/409903/campos_512_v4
+79/409939/campos_512_v4
+79/409993/campos_512_v4
+8/50226/campos_512_v4
+8/50236/campos_512_v4
+8/50286/campos_512_v4
+8/50322/campos_512_v4
+8/50738/campos_512_v4
+8/50863/campos_512_v4
+8/50909/campos_512_v4
+8/50972/campos_512_v4
+8/51114/campos_512_v4
+8/51122/campos_512_v4
+8/51304/campos_512_v4
+8/51310/campos_512_v4
+8/51433/campos_512_v4
+8/51503/campos_512_v4
+8/51641/campos_512_v4
+8/51682/campos_512_v4
+8/51743/campos_512_v4
+8/52017/campos_512_v4
+8/52245/campos_512_v4
+8/52327/campos_512_v4
+8/52539/campos_512_v4
+8/52607/campos_512_v4
+8/52672/campos_512_v4
+8/52708/campos_512_v4
+8/52910/campos_512_v4
+8/52914/campos_512_v4
+8/52993/campos_512_v4
+8/53000/campos_512_v4
+8/53281/campos_512_v4
+8/53323/campos_512_v4
+8/53361/campos_512_v4
+8/53394/campos_512_v4
+8/53475/campos_512_v4
+8/53504/campos_512_v4
+8/53834/campos_512_v4
+8/54015/campos_512_v4
+8/54076/campos_512_v4
+8/54128/campos_512_v4
+8/54201/campos_512_v4
+8/54203/campos_512_v4
+8/54337/campos_512_v4
+8/54378/campos_512_v4
+8/54422/campos_512_v4
+8/54463/campos_512_v4
+8/54471/campos_512_v4
+8/54516/campos_512_v4
+8/54640/campos_512_v4
+80/410047/campos_512_v4
+80/410118/campos_512_v4
+80/410289/campos_512_v4
+80/410378/campos_512_v4
+80/410629/campos_512_v4
+80/410634/campos_512_v4
+80/410849/campos_512_v4
+80/410915/campos_512_v4
+80/410934/campos_512_v4
+80/410954/campos_512_v4
+80/410976/campos_512_v4
+80/410981/campos_512_v4
+80/411068/campos_512_v4
+80/411132/campos_512_v4
+80/411222/campos_512_v4
+80/411349/campos_512_v4
+80/411526/campos_512_v4
+80/411595/campos_512_v4
+80/411630/campos_512_v4
+80/411685/campos_512_v4
+80/411729/campos_512_v4
+80/411909/campos_512_v4
+80/411998/campos_512_v4
+80/412046/campos_512_v4
+80/412096/campos_512_v4
+80/412141/campos_512_v4
+80/412180/campos_512_v4
+80/412214/campos_512_v4
+80/412236/campos_512_v4
+80/412285/campos_512_v4
+80/412312/campos_512_v4
+80/412528/campos_512_v4
+80/412590/campos_512_v4
+80/412735/campos_512_v4
+80/413202/campos_512_v4
+80/413218/campos_512_v4
+80/413241/campos_512_v4
+80/413342/campos_512_v4
+80/413558/campos_512_v4
+80/413615/campos_512_v4
+80/413662/campos_512_v4
+80/413731/campos_512_v4
+80/413733/campos_512_v4
+80/413821/campos_512_v4
+80/413839/campos_512_v4
+80/413870/campos_512_v4
+80/414013/campos_512_v4
+80/414221/campos_512_v4
+80/414280/campos_512_v4
+80/414282/campos_512_v4
+80/414328/campos_512_v4
+80/414384/campos_512_v4
+80/414385/campos_512_v4
+80/414404/campos_512_v4
+80/414432/campos_512_v4
+80/414475/campos_512_v4
+80/414600/campos_512_v4
+80/414635/campos_512_v4
+80/414670/campos_512_v4
+80/414737/campos_512_v4
+80/414806/campos_512_v4
+80/414873/campos_512_v4
+80/414898/campos_512_v4
+80/414977/campos_512_v4
+81/415164/campos_512_v4
+81/415248/campos_512_v4
+81/415420/campos_512_v4
+81/415424/campos_512_v4
+81/415475/campos_512_v4
+81/415481/campos_512_v4
+81/415510/campos_512_v4
+81/415751/campos_512_v4
+81/415789/campos_512_v4
+81/415846/campos_512_v4
+81/416115/campos_512_v4
+81/416220/campos_512_v4
+81/416286/campos_512_v4
+81/416357/campos_512_v4
+81/416595/campos_512_v4
+81/416682/campos_512_v4
+81/416851/campos_512_v4
+81/417054/campos_512_v4
+81/417227/campos_512_v4
+81/417247/campos_512_v4
+81/417281/campos_512_v4
+81/417444/campos_512_v4
+81/417461/campos_512_v4
+81/417628/campos_512_v4
+81/417798/campos_512_v4
+81/417887/campos_512_v4
+81/417903/campos_512_v4
+81/417950/campos_512_v4
+81/417977/campos_512_v4
+81/418094/campos_512_v4
+81/418213/campos_512_v4
+81/418259/campos_512_v4
+81/418334/campos_512_v4
+81/418615/campos_512_v4
+81/418682/campos_512_v4
+81/419077/campos_512_v4
+81/419315/campos_512_v4
+81/419404/campos_512_v4
+81/419618/campos_512_v4
+81/419711/campos_512_v4
+81/419740/campos_512_v4
+81/419744/campos_512_v4
+81/419999/campos_512_v4
+82/420127/campos_512_v4
+82/420205/campos_512_v4
+82/420249/campos_512_v4
+82/420279/campos_512_v4
+82/420500/campos_512_v4
+82/420707/campos_512_v4
+82/420823/campos_512_v4
+82/420994/campos_512_v4
+82/421013/campos_512_v4
+82/421076/campos_512_v4
+82/421131/campos_512_v4
+82/421207/campos_512_v4
+82/421219/campos_512_v4
+82/421306/campos_512_v4
+82/421334/campos_512_v4
+82/421343/campos_512_v4
+82/421367/campos_512_v4
+82/421393/campos_512_v4
+82/421460/campos_512_v4
+82/421477/campos_512_v4
+82/421609/campos_512_v4
+82/421712/campos_512_v4
+82/421762/campos_512_v4
+82/421844/campos_512_v4
+82/421868/campos_512_v4
+82/421941/campos_512_v4
+82/422131/campos_512_v4
+82/422423/campos_512_v4
+82/422542/campos_512_v4
+82/422697/campos_512_v4
+82/422793/campos_512_v4
+82/422920/campos_512_v4
+82/422999/campos_512_v4
+82/423057/campos_512_v4
+82/423060/campos_512_v4
+82/423064/campos_512_v4
+82/423272/campos_512_v4
+82/423295/campos_512_v4
+82/423713/campos_512_v4
+82/423750/campos_512_v4
+82/423930/campos_512_v4
+82/423957/campos_512_v4
+82/424215/campos_512_v4
+82/424280/campos_512_v4
+82/424608/campos_512_v4
+82/424631/campos_512_v4
+82/424661/campos_512_v4
+82/424662/campos_512_v4
+82/424746/campos_512_v4
+82/424897/campos_512_v4
+83/425259/campos_512_v4
+83/425281/campos_512_v4
+83/425364/campos_512_v4
+83/425644/campos_512_v4
+83/425690/campos_512_v4
+83/425793/campos_512_v4
+83/425798/campos_512_v4
+83/425829/campos_512_v4
+83/425852/campos_512_v4
+83/425994/campos_512_v4
+83/426154/campos_512_v4
+83/426190/campos_512_v4
+83/426216/campos_512_v4
+83/426240/campos_512_v4
+83/426354/campos_512_v4
+83/426438/campos_512_v4
+83/426691/campos_512_v4
+83/426729/campos_512_v4
+83/426740/campos_512_v4
+83/426758/campos_512_v4
+83/426783/campos_512_v4
+83/426831/campos_512_v4
+83/427000/campos_512_v4
+83/427042/campos_512_v4
+83/427092/campos_512_v4
+83/427201/campos_512_v4
+83/427288/campos_512_v4
+83/427392/campos_512_v4
+83/427465/campos_512_v4
+83/427466/campos_512_v4
+83/427501/campos_512_v4
+83/427745/campos_512_v4
+83/427774/campos_512_v4
+83/427892/campos_512_v4
+83/427952/campos_512_v4
+83/427982/campos_512_v4
+83/428065/campos_512_v4
+83/428382/campos_512_v4
+83/428538/campos_512_v4
+83/428581/campos_512_v4
+83/428582/campos_512_v4
+83/428627/campos_512_v4
+83/428773/campos_512_v4
+83/428787/campos_512_v4
+83/428838/campos_512_v4
+83/428865/campos_512_v4
+83/428958/campos_512_v4
+83/429000/campos_512_v4
+83/429263/campos_512_v4
+83/429301/campos_512_v4
+83/429697/campos_512_v4
+84/430010/campos_512_v4
+84/430043/campos_512_v4
+84/430093/campos_512_v4
+84/430143/campos_512_v4
+84/430247/campos_512_v4
+84/430249/campos_512_v4
+84/430288/campos_512_v4
+84/430369/campos_512_v4
+84/430393/campos_512_v4
+84/430504/campos_512_v4
+84/430538/campos_512_v4
+84/430573/campos_512_v4
+84/430617/campos_512_v4
+84/430656/campos_512_v4
+84/430658/campos_512_v4
+84/430780/campos_512_v4
+84/430796/campos_512_v4
+84/430871/campos_512_v4
+84/430965/campos_512_v4
+84/431140/campos_512_v4
+84/431160/campos_512_v4
+84/431188/campos_512_v4
+84/431218/campos_512_v4
+84/431309/campos_512_v4
+84/431366/campos_512_v4
+84/431387/campos_512_v4
+84/431519/campos_512_v4
+84/431544/campos_512_v4
+84/431609/campos_512_v4
+84/431694/campos_512_v4
+84/431706/campos_512_v4
+84/431714/campos_512_v4
+84/431773/campos_512_v4
+84/431828/campos_512_v4
+84/431936/campos_512_v4
+84/432020/campos_512_v4
+84/432131/campos_512_v4
+84/432180/campos_512_v4
+84/432225/campos_512_v4
+84/432320/campos_512_v4
+84/432324/campos_512_v4
+84/432335/campos_512_v4
+84/432347/campos_512_v4
+84/432367/campos_512_v4
+84/432450/campos_512_v4
+84/432500/campos_512_v4
+84/432582/campos_512_v4
+84/432712/campos_512_v4
+84/432737/campos_512_v4
+84/432769/campos_512_v4
+84/432826/campos_512_v4
+84/432866/campos_512_v4
+84/432974/campos_512_v4
+84/432994/campos_512_v4
+84/433301/campos_512_v4
+84/433353/campos_512_v4
+84/433550/campos_512_v4
+84/433580/campos_512_v4
+84/433583/campos_512_v4
+84/433715/campos_512_v4
+84/433803/campos_512_v4
+84/433805/campos_512_v4
+84/433844/campos_512_v4
+84/433964/campos_512_v4
+84/433996/campos_512_v4
+84/433999/campos_512_v4
+84/434079/campos_512_v4
+84/434096/campos_512_v4
+84/434177/campos_512_v4
+84/434324/campos_512_v4
+84/434408/campos_512_v4
+84/434475/campos_512_v4
+84/434560/campos_512_v4
+84/434588/campos_512_v4
+84/434594/campos_512_v4
+85/435060/campos_512_v4
+85/435132/campos_512_v4
+85/435207/campos_512_v4
+85/435266/campos_512_v4
+85/435311/campos_512_v4
+85/435605/campos_512_v4
+85/435736/campos_512_v4
+85/435763/campos_512_v4
+85/435789/campos_512_v4
+85/435819/campos_512_v4
+85/435832/campos_512_v4
+85/435849/campos_512_v4
+85/436141/campos_512_v4
+85/436172/campos_512_v4
+85/436251/campos_512_v4
+85/436280/campos_512_v4
+85/436413/campos_512_v4
+85/436509/campos_512_v4
+85/436539/campos_512_v4
+85/436548/campos_512_v4
+85/436618/campos_512_v4
+85/436668/campos_512_v4
+85/436690/campos_512_v4
+85/436757/campos_512_v4
+85/437064/campos_512_v4
+85/437130/campos_512_v4
+85/437146/campos_512_v4
+85/437266/campos_512_v4
+85/437394/campos_512_v4
+85/437403/campos_512_v4
+85/437425/campos_512_v4
+85/437431/campos_512_v4
+85/437467/campos_512_v4
+85/437583/campos_512_v4
+85/437601/campos_512_v4
+85/437733/campos_512_v4
+85/438121/campos_512_v4
+85/438134/campos_512_v4
+85/438342/campos_512_v4
+85/438390/campos_512_v4
+85/438498/campos_512_v4
+85/438520/campos_512_v4
+85/438739/campos_512_v4
+85/438760/campos_512_v4
+85/438801/campos_512_v4
+85/438881/campos_512_v4
+85/438917/campos_512_v4
+85/439199/campos_512_v4
+85/439219/campos_512_v4
+85/439411/campos_512_v4
+85/439467/campos_512_v4
+85/439472/campos_512_v4
+85/439603/campos_512_v4
+86/440079/campos_512_v4
+86/440212/campos_512_v4
+86/440226/campos_512_v4
+86/440318/campos_512_v4
+86/440366/campos_512_v4
+86/440522/campos_512_v4
+86/440665/campos_512_v4
+86/440693/campos_512_v4
+86/440830/campos_512_v4
+86/440926/campos_512_v4
+86/440989/campos_512_v4
+86/441055/campos_512_v4
+86/441067/campos_512_v4
+86/441164/campos_512_v4
+86/441649/campos_512_v4
+86/441777/campos_512_v4
+86/441866/campos_512_v4
+86/441909/campos_512_v4
+86/441949/campos_512_v4
+86/442020/campos_512_v4
+86/442178/campos_512_v4
+86/442532/campos_512_v4
+86/442645/campos_512_v4
+86/442830/campos_512_v4
+86/442835/campos_512_v4
+86/442845/campos_512_v4
+86/442894/campos_512_v4
+86/443048/campos_512_v4
+86/443062/campos_512_v4
+86/443074/campos_512_v4
+86/443110/campos_512_v4
+86/443326/campos_512_v4
+86/443338/campos_512_v4
+86/443657/campos_512_v4
+86/443861/campos_512_v4
+86/444026/campos_512_v4
+86/444036/campos_512_v4
+86/444088/campos_512_v4
+86/444099/campos_512_v4
+86/444381/campos_512_v4
+86/444390/campos_512_v4
+86/444539/campos_512_v4
+86/444582/campos_512_v4
+86/444891/campos_512_v4
+86/444904/campos_512_v4
+86/444938/campos_512_v4
+87/445154/campos_512_v4
+87/445207/campos_512_v4
+87/445330/campos_512_v4
+87/445375/campos_512_v4
+87/445416/campos_512_v4
+87/445482/campos_512_v4
+87/445711/campos_512_v4
+87/446095/campos_512_v4
+87/446222/campos_512_v4
+87/446282/campos_512_v4
+87/446373/campos_512_v4
+87/446406/campos_512_v4
+87/446564/campos_512_v4
+87/446670/campos_512_v4
+87/446739/campos_512_v4
+87/446853/campos_512_v4
+87/446854/campos_512_v4
+87/447113/campos_512_v4
+87/447190/campos_512_v4
+87/447227/campos_512_v4
+87/447272/campos_512_v4
+87/447360/campos_512_v4
+87/447381/campos_512_v4
+87/447532/campos_512_v4
+87/447583/campos_512_v4
+87/447842/campos_512_v4
+87/447849/campos_512_v4
+87/447940/campos_512_v4
+87/448017/campos_512_v4
+87/448084/campos_512_v4
+87/448259/campos_512_v4
+87/448391/campos_512_v4
+87/448433/campos_512_v4
+87/448514/campos_512_v4
+87/448644/campos_512_v4
+87/448991/campos_512_v4
+87/449033/campos_512_v4
+87/449187/campos_512_v4
+87/449208/campos_512_v4
+87/449215/campos_512_v4
+87/449375/campos_512_v4
+87/449417/campos_512_v4
+87/449439/campos_512_v4
+87/449454/campos_512_v4
+87/449471/campos_512_v4
+87/449756/campos_512_v4
+88/450040/campos_512_v4
+88/450066/campos_512_v4
+88/450145/campos_512_v4
+88/450146/campos_512_v4
+88/450189/campos_512_v4
+88/450325/campos_512_v4
+88/450641/campos_512_v4
+88/450787/campos_512_v4
+88/450841/campos_512_v4
+88/451050/campos_512_v4
+88/451103/campos_512_v4
+88/451117/campos_512_v4
+88/451224/campos_512_v4
+88/451426/campos_512_v4
+88/451538/campos_512_v4
+88/451641/campos_512_v4
+88/451770/campos_512_v4
+88/451776/campos_512_v4
+88/451796/campos_512_v4
+88/451819/campos_512_v4
+88/451865/campos_512_v4
+88/451928/campos_512_v4
+88/451969/campos_512_v4
+88/452028/campos_512_v4
+88/452033/campos_512_v4
+88/452077/campos_512_v4
+88/452081/campos_512_v4
+88/452221/campos_512_v4
+88/452579/campos_512_v4
+88/452904/campos_512_v4
+88/453158/campos_512_v4
+88/453188/campos_512_v4
+88/453200/campos_512_v4
+88/453243/campos_512_v4
+88/453295/campos_512_v4
+88/453356/campos_512_v4
+88/453536/campos_512_v4
+88/453587/campos_512_v4
+88/453588/campos_512_v4
+88/453616/campos_512_v4
+88/453635/campos_512_v4
+88/453722/campos_512_v4
+88/453790/campos_512_v4
+88/453840/campos_512_v4
+88/454062/campos_512_v4
+88/454342/campos_512_v4
+88/454653/campos_512_v4
+88/454702/campos_512_v4
+88/454734/campos_512_v4
+88/454738/campos_512_v4
+88/454758/campos_512_v4
+88/454770/campos_512_v4
+89/455094/campos_512_v4
+89/455105/campos_512_v4
+89/455254/campos_512_v4
+89/455386/campos_512_v4
+89/455455/campos_512_v4
+89/455531/campos_512_v4
+89/455547/campos_512_v4
+89/455560/campos_512_v4
+89/455672/campos_512_v4
+89/455844/campos_512_v4
+89/455955/campos_512_v4
+89/455977/campos_512_v4
+89/456483/campos_512_v4
+89/456489/campos_512_v4
+89/456574/campos_512_v4
+89/456579/campos_512_v4
+89/456605/campos_512_v4
+89/456749/campos_512_v4
+89/456916/campos_512_v4
+89/456986/campos_512_v4
+89/457093/campos_512_v4
+89/457131/campos_512_v4
+89/457187/campos_512_v4
+89/457251/campos_512_v4
+89/457361/campos_512_v4
+89/457447/campos_512_v4
+89/457520/campos_512_v4
+89/457558/campos_512_v4
+89/457651/campos_512_v4
+89/457762/campos_512_v4
+89/457766/campos_512_v4
+89/457817/campos_512_v4
+89/457871/campos_512_v4
+89/457905/campos_512_v4
+89/458104/campos_512_v4
+89/458117/campos_512_v4
+89/458161/campos_512_v4
+89/458252/campos_512_v4
+89/458363/campos_512_v4
+89/458714/campos_512_v4
+89/458796/campos_512_v4
+89/458832/campos_512_v4
+89/458935/campos_512_v4
+89/458956/campos_512_v4
+89/459084/campos_512_v4
+89/459188/campos_512_v4
+89/459380/campos_512_v4
+89/459934/campos_512_v4
+89/459949/campos_512_v4
+9/55032/campos_512_v4
+9/55055/campos_512_v4
+9/55185/campos_512_v4
+9/55326/campos_512_v4
+9/55361/campos_512_v4
+9/55559/campos_512_v4
+9/55590/campos_512_v4
+9/55599/campos_512_v4
+9/55712/campos_512_v4
+9/56158/campos_512_v4
+9/56172/campos_512_v4
+9/56201/campos_512_v4
+9/56207/campos_512_v4
+9/56713/campos_512_v4
+9/56844/campos_512_v4
+9/56849/campos_512_v4
+9/56927/campos_512_v4
+9/56937/campos_512_v4
+9/57277/campos_512_v4
+9/57291/campos_512_v4
+9/57318/campos_512_v4
+9/57387/campos_512_v4
+9/57598/campos_512_v4
+9/57622/campos_512_v4
+9/57791/campos_512_v4
+9/57842/campos_512_v4
+9/57934/campos_512_v4
+9/58024/campos_512_v4
+9/58089/campos_512_v4
+9/58091/campos_512_v4
+9/58377/campos_512_v4
+9/58476/campos_512_v4
+9/58614/campos_512_v4
+9/58726/campos_512_v4
+9/58776/campos_512_v4
+9/58866/campos_512_v4
+9/59049/campos_512_v4
+9/59093/campos_512_v4
+9/59227/campos_512_v4
+9/59460/campos_512_v4
+9/59524/campos_512_v4
+9/59544/campos_512_v4
+9/59591/campos_512_v4
+9/59761/campos_512_v4
+9/59841/campos_512_v4
+9/59972/campos_512_v4
+9/59977/campos_512_v4
+90/460024/campos_512_v4
+90/460224/campos_512_v4
+90/460361/campos_512_v4
+90/460480/campos_512_v4
+90/460511/campos_512_v4
+90/460553/campos_512_v4
+90/460556/campos_512_v4
+90/460726/campos_512_v4
+90/460747/campos_512_v4
+90/460830/campos_512_v4
+90/460846/campos_512_v4
+90/460879/campos_512_v4
+90/460921/campos_512_v4
+90/461027/campos_512_v4
+90/461137/campos_512_v4
+90/461149/campos_512_v4
+90/461278/campos_512_v4
+90/461506/campos_512_v4
+90/461620/campos_512_v4
+90/461782/campos_512_v4
+90/461918/campos_512_v4
+90/462160/campos_512_v4
+90/462213/campos_512_v4
+90/462336/campos_512_v4
+90/462458/campos_512_v4
+90/462489/campos_512_v4
+90/462516/campos_512_v4
+90/462524/campos_512_v4
+90/462594/campos_512_v4
+90/462614/campos_512_v4
+90/462672/campos_512_v4
+90/462731/campos_512_v4
+90/462884/campos_512_v4
+90/462945/campos_512_v4
+90/462995/campos_512_v4
+90/463087/campos_512_v4
+90/463196/campos_512_v4
+90/463232/campos_512_v4
+90/463245/campos_512_v4
+90/463248/campos_512_v4
+90/463340/campos_512_v4
+90/463507/campos_512_v4
+90/463688/campos_512_v4
+90/463701/campos_512_v4
+90/463703/campos_512_v4
+90/463896/campos_512_v4
+90/464003/campos_512_v4
+90/464089/campos_512_v4
+90/464100/campos_512_v4
+90/464277/campos_512_v4
+90/464281/campos_512_v4
+90/464656/campos_512_v4
+90/464680/campos_512_v4
+90/464746/campos_512_v4
+90/464775/campos_512_v4
+90/464818/campos_512_v4
+90/464894/campos_512_v4
+91/465101/campos_512_v4
+91/465175/campos_512_v4
+91/465226/campos_512_v4
+91/465235/campos_512_v4
+91/465588/campos_512_v4
+91/465602/campos_512_v4
+91/465694/campos_512_v4
+91/465804/campos_512_v4
+91/465894/campos_512_v4
+91/466065/campos_512_v4
+91/466171/campos_512_v4
+91/466191/campos_512_v4
+91/466318/campos_512_v4
+91/466375/campos_512_v4
+91/466478/campos_512_v4
+91/466543/campos_512_v4
+91/466691/campos_512_v4
+91/466755/campos_512_v4
+91/466832/campos_512_v4
+91/466857/campos_512_v4
+91/466960/campos_512_v4
+91/467052/campos_512_v4
+91/467072/campos_512_v4
+91/467091/campos_512_v4
+91/467206/campos_512_v4
+91/467259/campos_512_v4
+91/467416/campos_512_v4
+91/467540/campos_512_v4
+91/467565/campos_512_v4
+91/467699/campos_512_v4
+91/467741/campos_512_v4
+91/467789/campos_512_v4
+91/467801/campos_512_v4
+91/467840/campos_512_v4
+91/467844/campos_512_v4
+91/467927/campos_512_v4
+91/468019/campos_512_v4
+91/468036/campos_512_v4
+91/468163/campos_512_v4
+91/468165/campos_512_v4
+91/468169/campos_512_v4
+91/468200/campos_512_v4
+91/468230/campos_512_v4
+91/468309/campos_512_v4
+91/468575/campos_512_v4
+91/468637/campos_512_v4
+91/468744/campos_512_v4
+91/468758/campos_512_v4
+91/468763/campos_512_v4
+91/469105/campos_512_v4
+91/469179/campos_512_v4
+91/469211/campos_512_v4
+91/469241/campos_512_v4
+91/469535/campos_512_v4
+91/469624/campos_512_v4
+91/469735/campos_512_v4
+91/469903/campos_512_v4
+92/470105/campos_512_v4
+92/470169/campos_512_v4
+92/470191/campos_512_v4
+92/470279/campos_512_v4
+92/470298/campos_512_v4
+92/470646/campos_512_v4
+92/470762/campos_512_v4
+92/470938/campos_512_v4
+92/470980/campos_512_v4
+92/470981/campos_512_v4
+92/471081/campos_512_v4
+92/471098/campos_512_v4
+92/471126/campos_512_v4
+92/471173/campos_512_v4
+92/471175/campos_512_v4
+92/471231/campos_512_v4
+92/471303/campos_512_v4
+92/471310/campos_512_v4
+92/471342/campos_512_v4
+92/471362/campos_512_v4
+92/471363/campos_512_v4
+92/471403/campos_512_v4
+92/471521/campos_512_v4
+92/471537/campos_512_v4
+92/471568/campos_512_v4
+92/471570/campos_512_v4
+92/471623/campos_512_v4
+92/471675/campos_512_v4
+92/471753/campos_512_v4
+92/471800/campos_512_v4
+92/471827/campos_512_v4
+92/471862/campos_512_v4
+92/471914/campos_512_v4
+92/472115/campos_512_v4
+92/472167/campos_512_v4
+92/472238/campos_512_v4
+92/472441/campos_512_v4
+92/472502/campos_512_v4
+92/472573/campos_512_v4
+92/472574/campos_512_v4
+92/472612/campos_512_v4
+92/472760/campos_512_v4
+92/472809/campos_512_v4
+92/472921/campos_512_v4
+92/473058/campos_512_v4
+92/473136/campos_512_v4
+92/473158/campos_512_v4
+92/473198/campos_512_v4
+92/473269/campos_512_v4
+92/473295/campos_512_v4
+92/473388/campos_512_v4
+92/473497/campos_512_v4
+92/473501/campos_512_v4
+92/473686/campos_512_v4
+92/473719/campos_512_v4
+92/473834/campos_512_v4
+92/473917/campos_512_v4
+92/474026/campos_512_v4
+92/474150/campos_512_v4
+92/474178/campos_512_v4
+92/474271/campos_512_v4
+92/474405/campos_512_v4
+92/474481/campos_512_v4
+92/474493/campos_512_v4
+92/474506/campos_512_v4
+92/474511/campos_512_v4
+92/474551/campos_512_v4
+92/474588/campos_512_v4
+92/474589/campos_512_v4
+92/474696/campos_512_v4
+92/474699/campos_512_v4
+92/474754/campos_512_v4
+92/474829/campos_512_v4
+92/474844/campos_512_v4
+92/474953/campos_512_v4
+93/475002/campos_512_v4
+93/475084/campos_512_v4
+93/475164/campos_512_v4
+93/475339/campos_512_v4
+93/475431/campos_512_v4
+93/475504/campos_512_v4
+93/475603/campos_512_v4
+93/475629/campos_512_v4
+93/475922/campos_512_v4
+93/476050/campos_512_v4
+93/476237/campos_512_v4
+93/476243/campos_512_v4
+93/476254/campos_512_v4
+93/476432/campos_512_v4
+93/476794/campos_512_v4
+93/477249/campos_512_v4
+93/477299/campos_512_v4
+93/477524/campos_512_v4
+93/477644/campos_512_v4
+93/477745/campos_512_v4
+93/477829/campos_512_v4
+93/478090/campos_512_v4
+93/478099/campos_512_v4
+93/478122/campos_512_v4
+93/478167/campos_512_v4
+93/478192/campos_512_v4
+93/478463/campos_512_v4
+93/478468/campos_512_v4
+93/478527/campos_512_v4
+93/478638/campos_512_v4
+93/478678/campos_512_v4
+93/478781/campos_512_v4
+93/478897/campos_512_v4
+93/478898/campos_512_v4
+93/479020/campos_512_v4
+93/479021/campos_512_v4
+93/479084/campos_512_v4
+93/479235/campos_512_v4
+93/479258/campos_512_v4
+93/479279/campos_512_v4
+93/479422/campos_512_v4
+93/479424/campos_512_v4
+93/479426/campos_512_v4
+93/479536/campos_512_v4
+93/479660/campos_512_v4
+93/479786/campos_512_v4
+93/479839/campos_512_v4
+93/479918/campos_512_v4
+93/479966/campos_512_v4
+94/480192/campos_512_v4
+94/480207/campos_512_v4
+94/480275/campos_512_v4
+94/480276/campos_512_v4
+94/480299/campos_512_v4
+94/480460/campos_512_v4
+94/480593/campos_512_v4
+94/480618/campos_512_v4
+94/480676/campos_512_v4
+94/480752/campos_512_v4
+94/480775/campos_512_v4
+94/480813/campos_512_v4
+94/480835/campos_512_v4
+94/480883/campos_512_v4
+94/481047/campos_512_v4
+94/481075/campos_512_v4
+94/481099/campos_512_v4
+94/481292/campos_512_v4
+94/481373/campos_512_v4
+94/481431/campos_512_v4
+94/481574/campos_512_v4
+94/481760/campos_512_v4
+94/481761/campos_512_v4
+94/481835/campos_512_v4
+94/481927/campos_512_v4
+94/481930/campos_512_v4
+94/482187/campos_512_v4
+94/482249/campos_512_v4
+94/482265/campos_512_v4
+94/482313/campos_512_v4
+94/482347/campos_512_v4
+94/482352/campos_512_v4
+94/482475/campos_512_v4
+94/482510/campos_512_v4
+94/482638/campos_512_v4
+94/482646/campos_512_v4
+94/482734/campos_512_v4
+94/482740/campos_512_v4
+94/482772/campos_512_v4
+94/482787/campos_512_v4
+94/482809/campos_512_v4
+94/482882/campos_512_v4
+94/483027/campos_512_v4
+94/483085/campos_512_v4
+94/483200/campos_512_v4
+94/483427/campos_512_v4
+94/483471/campos_512_v4
+94/483546/campos_512_v4
+94/483564/campos_512_v4
+94/483611/campos_512_v4
+94/483615/campos_512_v4
+94/483672/campos_512_v4
+94/483818/campos_512_v4
+94/483869/campos_512_v4
+94/483892/campos_512_v4
+94/483910/campos_512_v4
+94/483931/campos_512_v4
+94/483957/campos_512_v4
+94/484133/campos_512_v4
+94/484135/campos_512_v4
+94/484167/campos_512_v4
+94/484659/campos_512_v4
+94/484811/campos_512_v4
+94/484882/campos_512_v4
+95/485022/campos_512_v4
+95/485239/campos_512_v4
+95/485480/campos_512_v4
+95/485496/campos_512_v4
+95/485504/campos_512_v4
+95/485698/campos_512_v4
+95/485700/campos_512_v4
+95/485754/campos_512_v4
+95/485841/campos_512_v4
+95/486060/campos_512_v4
+95/486066/campos_512_v4
+95/486124/campos_512_v4
+95/486368/campos_512_v4
+95/486443/campos_512_v4
+95/486581/campos_512_v4
+95/486630/campos_512_v4
+95/486664/campos_512_v4
+95/486775/campos_512_v4
+95/486866/campos_512_v4
+95/487000/campos_512_v4
+95/487028/campos_512_v4
+95/487039/campos_512_v4
+95/487072/campos_512_v4
+95/487102/campos_512_v4
+95/487222/campos_512_v4
+95/487227/campos_512_v4
+95/487282/campos_512_v4
+95/487304/campos_512_v4
+95/487490/campos_512_v4
+95/487638/campos_512_v4
+95/487648/campos_512_v4
+95/487985/campos_512_v4
+95/488108/campos_512_v4
+95/488276/campos_512_v4
+95/488383/campos_512_v4
+95/488494/campos_512_v4
+95/488497/campos_512_v4
+95/488505/campos_512_v4
+95/488544/campos_512_v4
+95/488618/campos_512_v4
+95/488623/campos_512_v4
+95/488649/campos_512_v4
+95/488893/campos_512_v4
+95/488927/campos_512_v4
+95/488928/campos_512_v4
+95/489118/campos_512_v4
+95/489191/campos_512_v4
+95/489200/campos_512_v4
+95/489352/campos_512_v4
+95/489439/campos_512_v4
+95/489506/campos_512_v4
+95/489538/campos_512_v4
+95/489589/campos_512_v4
+95/489602/campos_512_v4
+95/489728/campos_512_v4
+95/489850/campos_512_v4
+95/489860/campos_512_v4
+95/489866/campos_512_v4
+95/489871/campos_512_v4
+95/489895/campos_512_v4
+96/490015/campos_512_v4
+96/490100/campos_512_v4
+96/490105/campos_512_v4
+96/490129/campos_512_v4
+96/490155/campos_512_v4
+96/490202/campos_512_v4
+96/490206/campos_512_v4
+96/490324/campos_512_v4
+96/490328/campos_512_v4
+96/490359/campos_512_v4
+96/490409/campos_512_v4
+96/490439/campos_512_v4
+96/490468/campos_512_v4
+96/490495/campos_512_v4
+96/490550/campos_512_v4
+96/490602/campos_512_v4
+96/490665/campos_512_v4
+96/490676/campos_512_v4
+96/490723/campos_512_v4
+96/490805/campos_512_v4
+96/490818/campos_512_v4
+96/490839/campos_512_v4
+96/490848/campos_512_v4
+96/490862/campos_512_v4
+96/491024/campos_512_v4
+96/491060/campos_512_v4
+96/491065/campos_512_v4
+96/491132/campos_512_v4
+96/491237/campos_512_v4
+96/491252/campos_512_v4
+96/491337/campos_512_v4
+96/491371/campos_512_v4
+96/491503/campos_512_v4
+96/491732/campos_512_v4
+96/491775/campos_512_v4
+96/491841/campos_512_v4
+96/491946/campos_512_v4
+96/492095/campos_512_v4
+96/492112/campos_512_v4
+96/492232/campos_512_v4
+96/492530/campos_512_v4
+96/492570/campos_512_v4
+96/492601/campos_512_v4
+96/492605/campos_512_v4
+96/492607/campos_512_v4
+96/492632/campos_512_v4
+96/493160/campos_512_v4
+96/493287/campos_512_v4
+96/493407/campos_512_v4
+96/493429/campos_512_v4
+96/493491/campos_512_v4
+96/493498/campos_512_v4
+96/493565/campos_512_v4
+96/493583/campos_512_v4
+96/493843/campos_512_v4
+96/493981/campos_512_v4
+96/494016/campos_512_v4
+96/494024/campos_512_v4
+96/494123/campos_512_v4
+96/494154/campos_512_v4
+96/494262/campos_512_v4
+96/494295/campos_512_v4
+96/494496/campos_512_v4
+96/494742/campos_512_v4
+96/494817/campos_512_v4
+96/494838/campos_512_v4
+96/494859/campos_512_v4
+96/494950/campos_512_v4
+96/494974/campos_512_v4
+96/494995/campos_512_v4
+97/495398/campos_512_v4
+97/495475/campos_512_v4
+97/495617/campos_512_v4
+97/495806/campos_512_v4
+97/495831/campos_512_v4
+97/495841/campos_512_v4
+97/495888/campos_512_v4
+97/495922/campos_512_v4
+97/495963/campos_512_v4
+97/496256/campos_512_v4
+97/496285/campos_512_v4
+97/496317/campos_512_v4
+97/496361/campos_512_v4
+97/496431/campos_512_v4
+97/496530/campos_512_v4
+97/496625/campos_512_v4
+97/496696/campos_512_v4
+97/497000/campos_512_v4
+97/497383/campos_512_v4
+97/497384/campos_512_v4
+97/497392/campos_512_v4
+97/497394/campos_512_v4
+97/497398/campos_512_v4
+97/497493/campos_512_v4
+97/497566/campos_512_v4
+97/497680/campos_512_v4
+97/498112/campos_512_v4
+97/498124/campos_512_v4
+97/498268/campos_512_v4
+97/498282/campos_512_v4
+97/498511/campos_512_v4
+97/498722/campos_512_v4
+97/498736/campos_512_v4
+97/498771/campos_512_v4
+97/498781/campos_512_v4
+97/498949/campos_512_v4
+97/498979/campos_512_v4
+97/498987/campos_512_v4
+97/499065/campos_512_v4
+97/499263/campos_512_v4
+97/499296/campos_512_v4
+97/499413/campos_512_v4
+97/499726/campos_512_v4
+97/499930/campos_512_v4
+97/499969/campos_512_v4
+98/500003/campos_512_v4
+98/500005/campos_512_v4
+98/500289/campos_512_v4
+98/500850/campos_512_v4
+98/500903/campos_512_v4
+98/500916/campos_512_v4
+98/500982/campos_512_v4
+98/500995/campos_512_v4
+98/500999/campos_512_v4
+98/501022/campos_512_v4
+98/501086/campos_512_v4
+98/501129/campos_512_v4
+98/501170/campos_512_v4
+98/501180/campos_512_v4
+98/501648/campos_512_v4
+98/501817/campos_512_v4
+98/501845/campos_512_v4
+98/501973/campos_512_v4
+98/502008/campos_512_v4
+98/502038/campos_512_v4
+98/502043/campos_512_v4
+98/502404/campos_512_v4
+98/502818/campos_512_v4
+98/502926/campos_512_v4
+98/503014/campos_512_v4
+98/503078/campos_512_v4
+98/503094/campos_512_v4
+98/503151/campos_512_v4
+98/503436/campos_512_v4
+98/503634/campos_512_v4
+98/503640/campos_512_v4
+98/503659/campos_512_v4
+98/503830/campos_512_v4
+98/503902/campos_512_v4
+98/503933/campos_512_v4
+98/504049/campos_512_v4
+98/504214/campos_512_v4
+98/504372/campos_512_v4
+98/504539/campos_512_v4
+98/504618/campos_512_v4
+98/504668/campos_512_v4
+98/504720/campos_512_v4
+98/504740/campos_512_v4
+98/504794/campos_512_v4
+98/504835/campos_512_v4
+98/504950/campos_512_v4
+99/505067/campos_512_v4
+99/505144/campos_512_v4
+99/505204/campos_512_v4
+99/505470/campos_512_v4
+99/505497/campos_512_v4
+99/505614/campos_512_v4
+99/505650/campos_512_v4
+99/505796/campos_512_v4
+99/505801/campos_512_v4
+99/505805/campos_512_v4
+99/505887/campos_512_v4
+99/505988/campos_512_v4
+99/506009/campos_512_v4
+99/506079/campos_512_v4
+99/506176/campos_512_v4
+99/506467/campos_512_v4
+99/506505/campos_512_v4
+99/506611/campos_512_v4
+99/506737/campos_512_v4
+99/506767/campos_512_v4
+99/506874/campos_512_v4
+99/506947/campos_512_v4
+99/507176/campos_512_v4
+99/507211/campos_512_v4
+99/507225/campos_512_v4
+99/507437/campos_512_v4
+99/507577/campos_512_v4
+99/507624/campos_512_v4
+99/507832/campos_512_v4
+99/507873/campos_512_v4
+99/507876/campos_512_v4
+99/507893/campos_512_v4
+99/507894/campos_512_v4
+99/507906/campos_512_v4
+99/507995/campos_512_v4
+99/508231/campos_512_v4
+99/508287/campos_512_v4
+99/508315/campos_512_v4
+99/508333/campos_512_v4
+99/508366/campos_512_v4
+99/508430/campos_512_v4
+99/508496/campos_512_v4
+99/508542/campos_512_v4
+99/508615/campos_512_v4
+99/508632/campos_512_v4
+99/508724/campos_512_v4
+99/508743/campos_512_v4
+99/508794/campos_512_v4
+99/508899/campos_512_v4
+99/508962/campos_512_v4
+99/508998/campos_512_v4
+99/509067/campos_512_v4
+99/509135/campos_512_v4
+99/509306/campos_512_v4
+99/509409/campos_512_v4
+99/509437/campos_512_v4
+99/509508/campos_512_v4
+99/509591/campos_512_v4
+99/509689/campos_512_v4
+99/509906/campos_512_v4
+99/509951/campos_512_v4
diff --git a/shell_scripts/raw_img_list/daily-used.txt b/shell_scripts/raw_img_list/daily-used.txt
new file mode 100644
index 0000000000000000000000000000000000000000..cb7b6e551887c95f5dfd4bb094f0e4f84515d0d1
--- /dev/null
+++ b/shell_scripts/raw_img_list/daily-used.txt
@@ -0,0 +1,98382 @@
+0/10008/campos_512_v4
+0/10010/campos_512_v4
+0/10011/campos_512_v4
+0/10027/campos_512_v4
+0/10029/campos_512_v4
+0/10030/campos_512_v4
+0/10040/campos_512_v4
+0/10042/campos_512_v4
+0/10044/campos_512_v4
+0/10052/campos_512_v4
+0/10053/campos_512_v4
+0/10056/campos_512_v4
+0/10061/campos_512_v4
+0/10069/campos_512_v4
+0/10086/campos_512_v4
+0/10087/campos_512_v4
+0/10092/campos_512_v4
+0/10096/campos_512_v4
+0/10100/campos_512_v4
+0/10106/campos_512_v4
+0/10107/campos_512_v4
+0/10110/campos_512_v4
+0/10111/campos_512_v4
+0/10116/campos_512_v4
+0/10117/campos_512_v4
+0/10119/campos_512_v4
+0/10146/campos_512_v4
+0/10147/campos_512_v4
+0/10148/campos_512_v4
+0/10151/campos_512_v4
+0/10158/campos_512_v4
+0/10166/campos_512_v4
+0/10170/campos_512_v4
+0/10173/campos_512_v4
+0/10175/campos_512_v4
+0/10186/campos_512_v4
+0/10189/campos_512_v4
+0/10191/campos_512_v4
+0/10192/campos_512_v4
+0/10204/campos_512_v4
+0/10211/campos_512_v4
+0/10222/campos_512_v4
+0/10239/campos_512_v4
+0/10245/campos_512_v4
+0/10248/campos_512_v4
+0/10251/campos_512_v4
+0/10264/campos_512_v4
+0/10269/campos_512_v4
+0/10272/campos_512_v4
+0/10286/campos_512_v4
+0/10287/campos_512_v4
+0/10290/campos_512_v4
+0/10294/campos_512_v4
+0/10295/campos_512_v4
+0/10297/campos_512_v4
+0/10299/campos_512_v4
+0/10301/campos_512_v4
+0/10307/campos_512_v4
+0/10309/campos_512_v4
+0/10312/campos_512_v4
+0/10316/campos_512_v4
+0/10323/campos_512_v4
+0/10324/campos_512_v4
+0/10327/campos_512_v4
+0/10337/campos_512_v4
+0/10343/campos_512_v4
+0/10345/campos_512_v4
+0/10348/campos_512_v4
+0/10351/campos_512_v4
+0/10357/campos_512_v4
+0/10366/campos_512_v4
+0/10379/campos_512_v4
+0/10380/campos_512_v4
+0/10381/campos_512_v4
+0/10382/campos_512_v4
+0/10384/campos_512_v4
+0/10401/campos_512_v4
+0/10402/campos_512_v4
+0/10404/campos_512_v4
+0/10405/campos_512_v4
+0/10413/campos_512_v4
+0/10417/campos_512_v4
+0/10421/campos_512_v4
+0/10423/campos_512_v4
+0/10435/campos_512_v4
+0/10440/campos_512_v4
+0/10441/campos_512_v4
+0/10443/campos_512_v4
+0/10446/campos_512_v4
+0/10448/campos_512_v4
+0/10451/campos_512_v4
+0/10456/campos_512_v4
+0/10463/campos_512_v4
+0/10468/campos_512_v4
+0/10474/campos_512_v4
+0/10480/campos_512_v4
+0/10482/campos_512_v4
+0/10486/campos_512_v4
+0/10494/campos_512_v4
+0/10497/campos_512_v4
+0/10502/campos_512_v4
+0/10503/campos_512_v4
+0/10505/campos_512_v4
+0/10509/campos_512_v4
+0/10512/campos_512_v4
+0/10513/campos_512_v4
+0/10515/campos_512_v4
+0/10522/campos_512_v4
+0/10524/campos_512_v4
+0/10530/campos_512_v4
+0/10532/campos_512_v4
+0/10550/campos_512_v4
+0/10557/campos_512_v4
+0/10558/campos_512_v4
+0/10560/campos_512_v4
+0/10563/campos_512_v4
+0/10568/campos_512_v4
+0/10570/campos_512_v4
+0/10573/campos_512_v4
+0/10574/campos_512_v4
+0/10575/campos_512_v4
+0/10587/campos_512_v4
+0/10588/campos_512_v4
+0/10596/campos_512_v4
+0/10599/campos_512_v4
+0/10607/campos_512_v4
+0/10613/campos_512_v4
+0/10622/campos_512_v4
+0/10624/campos_512_v4
+0/10625/campos_512_v4
+0/10628/campos_512_v4
+0/10634/campos_512_v4
+0/10635/campos_512_v4
+0/10650/campos_512_v4
+0/10656/campos_512_v4
+0/10657/campos_512_v4
+0/10658/campos_512_v4
+0/10662/campos_512_v4
+0/10670/campos_512_v4
+0/10671/campos_512_v4
+0/10676/campos_512_v4
+0/10696/campos_512_v4
+0/10701/campos_512_v4
+0/10705/campos_512_v4
+0/10707/campos_512_v4
+0/10715/campos_512_v4
+0/10717/campos_512_v4
+0/10720/campos_512_v4
+0/10726/campos_512_v4
+0/10738/campos_512_v4
+0/10740/campos_512_v4
+0/10753/campos_512_v4
+0/10758/campos_512_v4
+0/10778/campos_512_v4
+0/10787/campos_512_v4
+0/10789/campos_512_v4
+0/10794/campos_512_v4
+0/10803/campos_512_v4
+0/10804/campos_512_v4
+0/10810/campos_512_v4
+0/10814/campos_512_v4
+0/10815/campos_512_v4
+0/10818/campos_512_v4
+0/10822/campos_512_v4
+0/10824/campos_512_v4
+0/10830/campos_512_v4
+0/10831/campos_512_v4
+0/10834/campos_512_v4
+0/10840/campos_512_v4
+0/10842/campos_512_v4
+0/10849/campos_512_v4
+0/10850/campos_512_v4
+0/10858/campos_512_v4
+0/10870/campos_512_v4
+0/10877/campos_512_v4
+0/10885/campos_512_v4
+0/10889/campos_512_v4
+0/10890/campos_512_v4
+0/10894/campos_512_v4
+0/10896/campos_512_v4
+0/10909/campos_512_v4
+0/10920/campos_512_v4
+0/10921/campos_512_v4
+0/10922/campos_512_v4
+0/10926/campos_512_v4
+0/10930/campos_512_v4
+0/10933/campos_512_v4
+0/10939/campos_512_v4
+0/10942/campos_512_v4
+0/10945/campos_512_v4
+0/10948/campos_512_v4
+0/10950/campos_512_v4
+0/10954/campos_512_v4
+0/10958/campos_512_v4
+0/10967/campos_512_v4
+0/10970/campos_512_v4
+0/10974/campos_512_v4
+0/10975/campos_512_v4
+0/10977/campos_512_v4
+0/10981/campos_512_v4
+0/10983/campos_512_v4
+0/10984/campos_512_v4
+0/10987/campos_512_v4
+0/10990/campos_512_v4
+0/10991/campos_512_v4
+0/10992/campos_512_v4
+0/10996/campos_512_v4
+0/11018/campos_512_v4
+0/11020/campos_512_v4
+0/11021/campos_512_v4
+0/11025/campos_512_v4
+0/11028/campos_512_v4
+0/11031/campos_512_v4
+0/11036/campos_512_v4
+0/11039/campos_512_v4
+0/11040/campos_512_v4
+0/11043/campos_512_v4
+0/11044/campos_512_v4
+0/11051/campos_512_v4
+0/11058/campos_512_v4
+0/11062/campos_512_v4
+0/11066/campos_512_v4
+0/11071/campos_512_v4
+0/11081/campos_512_v4
+0/11093/campos_512_v4
+0/11095/campos_512_v4
+0/11099/campos_512_v4
+0/11101/campos_512_v4
+0/11102/campos_512_v4
+0/11103/campos_512_v4
+0/11113/campos_512_v4
+0/11125/campos_512_v4
+0/11131/campos_512_v4
+0/11132/campos_512_v4
+0/11142/campos_512_v4
+0/11144/campos_512_v4
+0/11152/campos_512_v4
+0/11157/campos_512_v4
+0/11159/campos_512_v4
+0/11165/campos_512_v4
+0/11174/campos_512_v4
+0/11175/campos_512_v4
+0/11183/campos_512_v4
+0/11184/campos_512_v4
+0/11198/campos_512_v4
+0/11207/campos_512_v4
+0/11231/campos_512_v4
+0/11246/campos_512_v4
+0/11252/campos_512_v4
+0/11264/campos_512_v4
+0/11274/campos_512_v4
+0/11298/campos_512_v4
+0/11304/campos_512_v4
+0/11309/campos_512_v4
+0/11329/campos_512_v4
+0/11332/campos_512_v4
+0/11333/campos_512_v4
+0/11335/campos_512_v4
+0/11336/campos_512_v4
+0/11344/campos_512_v4
+0/11361/campos_512_v4
+0/11362/campos_512_v4
+0/11367/campos_512_v4
+0/11368/campos_512_v4
+0/11374/campos_512_v4
+0/11380/campos_512_v4
+0/11386/campos_512_v4
+0/11399/campos_512_v4
+0/11402/campos_512_v4
+0/11403/campos_512_v4
+0/11406/campos_512_v4
+0/11411/campos_512_v4
+0/11422/campos_512_v4
+0/11438/campos_512_v4
+0/11473/campos_512_v4
+0/11474/campos_512_v4
+0/11475/campos_512_v4
+0/11480/campos_512_v4
+0/11490/campos_512_v4
+0/11492/campos_512_v4
+0/11504/campos_512_v4
+0/11509/campos_512_v4
+0/11517/campos_512_v4
+0/11521/campos_512_v4
+0/11522/campos_512_v4
+0/11526/campos_512_v4
+0/11527/campos_512_v4
+0/11533/campos_512_v4
+0/11543/campos_512_v4
+0/11548/campos_512_v4
+0/11562/campos_512_v4
+0/11563/campos_512_v4
+0/11576/campos_512_v4
+0/11580/campos_512_v4
+0/11581/campos_512_v4
+0/11582/campos_512_v4
+0/11589/campos_512_v4
+0/11605/campos_512_v4
+0/11609/campos_512_v4
+0/11610/campos_512_v4
+0/11621/campos_512_v4
+0/11645/campos_512_v4
+0/11650/campos_512_v4
+0/11665/campos_512_v4
+0/11696/campos_512_v4
+0/11698/campos_512_v4
+0/11702/campos_512_v4
+0/11715/campos_512_v4
+0/11721/campos_512_v4
+0/11749/campos_512_v4
+0/11752/campos_512_v4
+0/11756/campos_512_v4
+0/11764/campos_512_v4
+0/11777/campos_512_v4
+0/11781/campos_512_v4
+0/11782/campos_512_v4
+0/11785/campos_512_v4
+0/11793/campos_512_v4
+0/11800/campos_512_v4
+0/11810/campos_512_v4
+0/11818/campos_512_v4
+0/11838/campos_512_v4
+0/11845/campos_512_v4
+0/11849/campos_512_v4
+0/11862/campos_512_v4
+0/11863/campos_512_v4
+0/11872/campos_512_v4
+0/11878/campos_512_v4
+0/11888/campos_512_v4
+0/11894/campos_512_v4
+0/11900/campos_512_v4
+0/11907/campos_512_v4
+0/11922/campos_512_v4
+0/11943/campos_512_v4
+0/11961/campos_512_v4
+0/11977/campos_512_v4
+0/11988/campos_512_v4
+0/11990/campos_512_v4
+0/11991/campos_512_v4
+0/11994/campos_512_v4
+0/11997/campos_512_v4
+0/12005/campos_512_v4
+0/12011/campos_512_v4
+0/12012/campos_512_v4
+0/12020/campos_512_v4
+0/12024/campos_512_v4
+0/12029/campos_512_v4
+0/12039/campos_512_v4
+0/12049/campos_512_v4
+0/12051/campos_512_v4
+0/12054/campos_512_v4
+0/12065/campos_512_v4
+0/12070/campos_512_v4
+0/12071/campos_512_v4
+0/12073/campos_512_v4
+0/12099/campos_512_v4
+0/12103/campos_512_v4
+0/12108/campos_512_v4
+0/12109/campos_512_v4
+0/12116/campos_512_v4
+0/12123/campos_512_v4
+0/12124/campos_512_v4
+0/12139/campos_512_v4
+0/12147/campos_512_v4
+0/12167/campos_512_v4
+0/12170/campos_512_v4
+0/12177/campos_512_v4
+0/12193/campos_512_v4
+0/12205/campos_512_v4
+0/12210/campos_512_v4
+0/12225/campos_512_v4
+0/12226/campos_512_v4
+0/12228/campos_512_v4
+0/12229/campos_512_v4
+0/12236/campos_512_v4
+0/12246/campos_512_v4
+0/12249/campos_512_v4
+0/12269/campos_512_v4
+0/12272/campos_512_v4
+0/12274/campos_512_v4
+0/12275/campos_512_v4
+0/12276/campos_512_v4
+0/12282/campos_512_v4
+0/12291/campos_512_v4
+0/12297/campos_512_v4
+0/12307/campos_512_v4
+0/12314/campos_512_v4
+0/12325/campos_512_v4
+0/12334/campos_512_v4
+0/12337/campos_512_v4
+0/12348/campos_512_v4
+0/12350/campos_512_v4
+0/12352/campos_512_v4
+0/12362/campos_512_v4
+0/12387/campos_512_v4
+0/12391/campos_512_v4
+0/12393/campos_512_v4
+0/12397/campos_512_v4
+0/12405/campos_512_v4
+0/12413/campos_512_v4
+0/12419/campos_512_v4
+0/12444/campos_512_v4
+0/12447/campos_512_v4
+0/12450/campos_512_v4
+0/12453/campos_512_v4
+0/12456/campos_512_v4
+0/12467/campos_512_v4
+0/12480/campos_512_v4
+0/12481/campos_512_v4
+0/12483/campos_512_v4
+0/12488/campos_512_v4
+0/12498/campos_512_v4
+0/12501/campos_512_v4
+0/12516/campos_512_v4
+0/12533/campos_512_v4
+0/12537/campos_512_v4
+0/12538/campos_512_v4
+0/12545/campos_512_v4
+0/12560/campos_512_v4
+0/12561/campos_512_v4
+0/12564/campos_512_v4
+0/12584/campos_512_v4
+0/12595/campos_512_v4
+0/12598/campos_512_v4
+0/12613/campos_512_v4
+0/12623/campos_512_v4
+0/12632/campos_512_v4
+0/12634/campos_512_v4
+0/12653/campos_512_v4
+0/12657/campos_512_v4
+0/12659/campos_512_v4
+0/12665/campos_512_v4
+0/12667/campos_512_v4
+0/12671/campos_512_v4
+0/12673/campos_512_v4
+0/12676/campos_512_v4
+0/12689/campos_512_v4
+0/12694/campos_512_v4
+0/12715/campos_512_v4
+0/12727/campos_512_v4
+0/12739/campos_512_v4
+0/12744/campos_512_v4
+0/12749/campos_512_v4
+0/12752/campos_512_v4
+0/12754/campos_512_v4
+0/12762/campos_512_v4
+0/12773/campos_512_v4
+0/12781/campos_512_v4
+0/12794/campos_512_v4
+0/12799/campos_512_v4
+0/12803/campos_512_v4
+0/12811/campos_512_v4
+0/12814/campos_512_v4
+0/12816/campos_512_v4
+0/12823/campos_512_v4
+0/12829/campos_512_v4
+0/12837/campos_512_v4
+0/12840/campos_512_v4
+0/12842/campos_512_v4
+0/12855/campos_512_v4
+0/12878/campos_512_v4
+0/12901/campos_512_v4
+0/12905/campos_512_v4
+0/12906/campos_512_v4
+0/12910/campos_512_v4
+0/12917/campos_512_v4
+0/12919/campos_512_v4
+0/12925/campos_512_v4
+0/12928/campos_512_v4
+0/12934/campos_512_v4
+0/12940/campos_512_v4
+0/12944/campos_512_v4
+0/12958/campos_512_v4
+0/12981/campos_512_v4
+0/12984/campos_512_v4
+0/12992/campos_512_v4
+0/12994/campos_512_v4
+0/12998/campos_512_v4
+0/13010/campos_512_v4
+0/13016/campos_512_v4
+0/13040/campos_512_v4
+0/13041/campos_512_v4
+0/13042/campos_512_v4
+0/13050/campos_512_v4
+0/13052/campos_512_v4
+0/13068/campos_512_v4
+0/13077/campos_512_v4
+0/13089/campos_512_v4
+0/13092/campos_512_v4
+0/13101/campos_512_v4
+0/13102/campos_512_v4
+0/13103/campos_512_v4
+0/13143/campos_512_v4
+0/13145/campos_512_v4
+0/13149/campos_512_v4
+0/13159/campos_512_v4
+0/13171/campos_512_v4
+0/13173/campos_512_v4
+0/13181/campos_512_v4
+0/13186/campos_512_v4
+0/13193/campos_512_v4
+0/13202/campos_512_v4
+0/13203/campos_512_v4
+0/13207/campos_512_v4
+0/13215/campos_512_v4
+0/13225/campos_512_v4
+0/13235/campos_512_v4
+0/13242/campos_512_v4
+0/13247/campos_512_v4
+0/13257/campos_512_v4
+0/13261/campos_512_v4
+0/13287/campos_512_v4
+0/13288/campos_512_v4
+0/13307/campos_512_v4
+0/13308/campos_512_v4
+0/13313/campos_512_v4
+0/13319/campos_512_v4
+0/13322/campos_512_v4
+0/13331/campos_512_v4
+0/13354/campos_512_v4
+0/13367/campos_512_v4
+0/13369/campos_512_v4
+0/13374/campos_512_v4
+0/13381/campos_512_v4
+0/13382/campos_512_v4
+0/13392/campos_512_v4
+0/13395/campos_512_v4
+0/13397/campos_512_v4
+0/13400/campos_512_v4
+0/13404/campos_512_v4
+0/13412/campos_512_v4
+0/13418/campos_512_v4
+0/13425/campos_512_v4
+0/13459/campos_512_v4
+0/13466/campos_512_v4
+0/13468/campos_512_v4
+0/13476/campos_512_v4
+0/13479/campos_512_v4
+0/13497/campos_512_v4
+0/13502/campos_512_v4
+0/13508/campos_512_v4
+0/13509/campos_512_v4
+0/13514/campos_512_v4
+0/13521/campos_512_v4
+0/13524/campos_512_v4
+0/13530/campos_512_v4
+0/13533/campos_512_v4
+0/13535/campos_512_v4
+0/13536/campos_512_v4
+0/13539/campos_512_v4
+0/13543/campos_512_v4
+0/13548/campos_512_v4
+0/13550/campos_512_v4
+0/13557/campos_512_v4
+0/13573/campos_512_v4
+0/13579/campos_512_v4
+0/13597/campos_512_v4
+0/13616/campos_512_v4
+0/13620/campos_512_v4
+0/13623/campos_512_v4
+0/13629/campos_512_v4
+0/13640/campos_512_v4
+0/13644/campos_512_v4
+0/13646/campos_512_v4
+0/13652/campos_512_v4
+0/13657/campos_512_v4
+0/13658/campos_512_v4
+0/13659/campos_512_v4
+0/13667/campos_512_v4
+0/13674/campos_512_v4
+0/13678/campos_512_v4
+0/13680/campos_512_v4
+0/13681/campos_512_v4
+0/13693/campos_512_v4
+0/13694/campos_512_v4
+0/13700/campos_512_v4
+0/13703/campos_512_v4
+0/13708/campos_512_v4
+0/13710/campos_512_v4
+0/13713/campos_512_v4
+0/13717/campos_512_v4
+0/13722/campos_512_v4
+0/13732/campos_512_v4
+0/13743/campos_512_v4
+0/13746/campos_512_v4
+0/13747/campos_512_v4
+0/13752/campos_512_v4
+0/13760/campos_512_v4
+0/13783/campos_512_v4
+0/13786/campos_512_v4
+0/13787/campos_512_v4
+0/13815/campos_512_v4
+0/13820/campos_512_v4
+0/13848/campos_512_v4
+0/13853/campos_512_v4
+0/13878/campos_512_v4
+0/13896/campos_512_v4
+0/13914/campos_512_v4
+0/13920/campos_512_v4
+0/13921/campos_512_v4
+0/13922/campos_512_v4
+0/13923/campos_512_v4
+0/13966/campos_512_v4
+0/13976/campos_512_v4
+0/13977/campos_512_v4
+0/13981/campos_512_v4
+0/13991/campos_512_v4
+0/13995/campos_512_v4
+0/14005/campos_512_v4
+0/14010/campos_512_v4
+0/14021/campos_512_v4
+0/14023/campos_512_v4
+0/14024/campos_512_v4
+0/14029/campos_512_v4
+0/14039/campos_512_v4
+0/14041/campos_512_v4
+0/14048/campos_512_v4
+0/14061/campos_512_v4
+0/14070/campos_512_v4
+0/14075/campos_512_v4
+0/14089/campos_512_v4
+0/14090/campos_512_v4
+0/14095/campos_512_v4
+0/14109/campos_512_v4
+0/14121/campos_512_v4
+0/14124/campos_512_v4
+0/14138/campos_512_v4
+0/14149/campos_512_v4
+0/14155/campos_512_v4
+0/14156/campos_512_v4
+0/14167/campos_512_v4
+0/14173/campos_512_v4
+0/14178/campos_512_v4
+0/14181/campos_512_v4
+0/14188/campos_512_v4
+0/14195/campos_512_v4
+0/14202/campos_512_v4
+0/14204/campos_512_v4
+0/14207/campos_512_v4
+0/14209/campos_512_v4
+0/14224/campos_512_v4
+0/14227/campos_512_v4
+0/14234/campos_512_v4
+0/14239/campos_512_v4
+0/14263/campos_512_v4
+0/14265/campos_512_v4
+0/14269/campos_512_v4
+0/14274/campos_512_v4
+0/14277/campos_512_v4
+0/14289/campos_512_v4
+0/14290/campos_512_v4
+0/14298/campos_512_v4
+0/14302/campos_512_v4
+0/14306/campos_512_v4
+0/14317/campos_512_v4
+0/14319/campos_512_v4
+0/14350/campos_512_v4
+0/14373/campos_512_v4
+0/14376/campos_512_v4
+0/14382/campos_512_v4
+0/14385/campos_512_v4
+0/14401/campos_512_v4
+0/14423/campos_512_v4
+0/14434/campos_512_v4
+0/14452/campos_512_v4
+0/14453/campos_512_v4
+0/14454/campos_512_v4
+0/14458/campos_512_v4
+0/14461/campos_512_v4
+0/14471/campos_512_v4
+0/14472/campos_512_v4
+0/14480/campos_512_v4
+0/14488/campos_512_v4
+0/14492/campos_512_v4
+0/14503/campos_512_v4
+0/14510/campos_512_v4
+0/14512/campos_512_v4
+0/14515/campos_512_v4
+0/14516/campos_512_v4
+0/14520/campos_512_v4
+0/14522/campos_512_v4
+0/14527/campos_512_v4
+0/14530/campos_512_v4
+0/14551/campos_512_v4
+0/14554/campos_512_v4
+0/14556/campos_512_v4
+0/14559/campos_512_v4
+0/14577/campos_512_v4
+0/14578/campos_512_v4
+0/14586/campos_512_v4
+0/14592/campos_512_v4
+0/14593/campos_512_v4
+0/14595/campos_512_v4
+0/14600/campos_512_v4
+0/14606/campos_512_v4
+0/14628/campos_512_v4
+0/14633/campos_512_v4
+0/14639/campos_512_v4
+0/14647/campos_512_v4
+0/14648/campos_512_v4
+0/14652/campos_512_v4
+0/14660/campos_512_v4
+0/14666/campos_512_v4
+0/14677/campos_512_v4
+0/14683/campos_512_v4
+0/14684/campos_512_v4
+0/14685/campos_512_v4
+0/14687/campos_512_v4
+0/14696/campos_512_v4
+0/14700/campos_512_v4
+0/14705/campos_512_v4
+0/14707/campos_512_v4
+0/14723/campos_512_v4
+0/14749/campos_512_v4
+0/14750/campos_512_v4
+0/14753/campos_512_v4
+0/14772/campos_512_v4
+0/14776/campos_512_v4
+0/14783/campos_512_v4
+0/14787/campos_512_v4
+0/14807/campos_512_v4
+0/14810/campos_512_v4
+0/14817/campos_512_v4
+0/14828/campos_512_v4
+0/14830/campos_512_v4
+0/14847/campos_512_v4
+0/14857/campos_512_v4
+0/14860/campos_512_v4
+0/14864/campos_512_v4
+0/14871/campos_512_v4
+0/14879/campos_512_v4
+0/14886/campos_512_v4
+0/14890/campos_512_v4
+0/14914/campos_512_v4
+0/14916/campos_512_v4
+0/14927/campos_512_v4
+0/14935/campos_512_v4
+0/14946/campos_512_v4
+0/14952/campos_512_v4
+0/14957/campos_512_v4
+0/14958/campos_512_v4
+0/14960/campos_512_v4
+0/14967/campos_512_v4
+0/14968/campos_512_v4
+0/14970/campos_512_v4
+0/14972/campos_512_v4
+0/14975/campos_512_v4
+0/14988/campos_512_v4
+0/14992/campos_512_v4
+0/14993/campos_512_v4
+0/14998/campos_512_v4
+0/15001/campos_512_v4
+1/15003/campos_512_v4
+1/15038/campos_512_v4
+1/15039/campos_512_v4
+1/15040/campos_512_v4
+1/15042/campos_512_v4
+1/15044/campos_512_v4
+1/15047/campos_512_v4
+1/15065/campos_512_v4
+1/15068/campos_512_v4
+1/15074/campos_512_v4
+1/15080/campos_512_v4
+1/15088/campos_512_v4
+1/15090/campos_512_v4
+1/15092/campos_512_v4
+1/15093/campos_512_v4
+1/15096/campos_512_v4
+1/15103/campos_512_v4
+1/15105/campos_512_v4
+1/15114/campos_512_v4
+1/15115/campos_512_v4
+1/15122/campos_512_v4
+1/15125/campos_512_v4
+1/15126/campos_512_v4
+1/15130/campos_512_v4
+1/15134/campos_512_v4
+1/15141/campos_512_v4
+1/15148/campos_512_v4
+1/15166/campos_512_v4
+1/15169/campos_512_v4
+1/15174/campos_512_v4
+1/15181/campos_512_v4
+1/15182/campos_512_v4
+1/15197/campos_512_v4
+1/15221/campos_512_v4
+1/15226/campos_512_v4
+1/15232/campos_512_v4
+1/15245/campos_512_v4
+1/15250/campos_512_v4
+1/15255/campos_512_v4
+1/15267/campos_512_v4
+1/15271/campos_512_v4
+1/15272/campos_512_v4
+1/15276/campos_512_v4
+1/15279/campos_512_v4
+1/15289/campos_512_v4
+1/15301/campos_512_v4
+1/15303/campos_512_v4
+1/15306/campos_512_v4
+1/15311/campos_512_v4
+1/15313/campos_512_v4
+1/15314/campos_512_v4
+1/15323/campos_512_v4
+1/15324/campos_512_v4
+1/15328/campos_512_v4
+1/15341/campos_512_v4
+1/15343/campos_512_v4
+1/15352/campos_512_v4
+1/15370/campos_512_v4
+1/15397/campos_512_v4
+1/15408/campos_512_v4
+1/15410/campos_512_v4
+1/15414/campos_512_v4
+1/15417/campos_512_v4
+1/15423/campos_512_v4
+1/15430/campos_512_v4
+1/15442/campos_512_v4
+1/15445/campos_512_v4
+1/15450/campos_512_v4
+1/15457/campos_512_v4
+1/15460/campos_512_v4
+1/15469/campos_512_v4
+1/15471/campos_512_v4
+1/15477/campos_512_v4
+1/15479/campos_512_v4
+1/15501/campos_512_v4
+1/15505/campos_512_v4
+1/15510/campos_512_v4
+1/15520/campos_512_v4
+1/15524/campos_512_v4
+1/15527/campos_512_v4
+1/15530/campos_512_v4
+1/15544/campos_512_v4
+1/15548/campos_512_v4
+1/15552/campos_512_v4
+1/15554/campos_512_v4
+1/15555/campos_512_v4
+1/15557/campos_512_v4
+1/15561/campos_512_v4
+1/15578/campos_512_v4
+1/15584/campos_512_v4
+1/15597/campos_512_v4
+1/15605/campos_512_v4
+1/15612/campos_512_v4
+1/15616/campos_512_v4
+1/15624/campos_512_v4
+1/15632/campos_512_v4
+1/15651/campos_512_v4
+1/15653/campos_512_v4
+1/15658/campos_512_v4
+1/15659/campos_512_v4
+1/15664/campos_512_v4
+1/15669/campos_512_v4
+1/15671/campos_512_v4
+1/15672/campos_512_v4
+1/15681/campos_512_v4
+1/15701/campos_512_v4
+1/15706/campos_512_v4
+1/15708/campos_512_v4
+1/15710/campos_512_v4
+1/15712/campos_512_v4
+1/15714/campos_512_v4
+1/15715/campos_512_v4
+1/15723/campos_512_v4
+1/15734/campos_512_v4
+1/15742/campos_512_v4
+1/15749/campos_512_v4
+1/15763/campos_512_v4
+1/15782/campos_512_v4
+1/15789/campos_512_v4
+1/15794/campos_512_v4
+1/15797/campos_512_v4
+1/15802/campos_512_v4
+1/15830/campos_512_v4
+1/15848/campos_512_v4
+1/15851/campos_512_v4
+1/15870/campos_512_v4
+1/15892/campos_512_v4
+1/15898/campos_512_v4
+1/15910/campos_512_v4
+1/15919/campos_512_v4
+1/15929/campos_512_v4
+1/15938/campos_512_v4
+1/15947/campos_512_v4
+1/15948/campos_512_v4
+1/15952/campos_512_v4
+1/15959/campos_512_v4
+1/15961/campos_512_v4
+1/15963/campos_512_v4
+1/15978/campos_512_v4
+1/15982/campos_512_v4
+1/15993/campos_512_v4
+1/16006/campos_512_v4
+1/16023/campos_512_v4
+1/16024/campos_512_v4
+1/16054/campos_512_v4
+1/16059/campos_512_v4
+1/16063/campos_512_v4
+1/16070/campos_512_v4
+1/16085/campos_512_v4
+1/16086/campos_512_v4
+1/16090/campos_512_v4
+1/16093/campos_512_v4
+1/16121/campos_512_v4
+1/16130/campos_512_v4
+1/16149/campos_512_v4
+1/16150/campos_512_v4
+1/16152/campos_512_v4
+1/16155/campos_512_v4
+1/16163/campos_512_v4
+1/16167/campos_512_v4
+1/16168/campos_512_v4
+1/16170/campos_512_v4
+1/16176/campos_512_v4
+1/16180/campos_512_v4
+1/16184/campos_512_v4
+1/16186/campos_512_v4
+1/16187/campos_512_v4
+1/16188/campos_512_v4
+1/16192/campos_512_v4
+1/16193/campos_512_v4
+1/16195/campos_512_v4
+1/16202/campos_512_v4
+1/16203/campos_512_v4
+1/16204/campos_512_v4
+1/16208/campos_512_v4
+1/16210/campos_512_v4
+1/16211/campos_512_v4
+1/16213/campos_512_v4
+1/16230/campos_512_v4
+1/16248/campos_512_v4
+1/16263/campos_512_v4
+1/16271/campos_512_v4
+1/16274/campos_512_v4
+1/16275/campos_512_v4
+1/16276/campos_512_v4
+1/16278/campos_512_v4
+1/16282/campos_512_v4
+1/16298/campos_512_v4
+1/16317/campos_512_v4
+1/16318/campos_512_v4
+1/16320/campos_512_v4
+1/16324/campos_512_v4
+1/16334/campos_512_v4
+1/16337/campos_512_v4
+1/16341/campos_512_v4
+1/16348/campos_512_v4
+1/16351/campos_512_v4
+1/16355/campos_512_v4
+1/16356/campos_512_v4
+1/16358/campos_512_v4
+1/16360/campos_512_v4
+1/16367/campos_512_v4
+1/16372/campos_512_v4
+1/16379/campos_512_v4
+1/16405/campos_512_v4
+1/16406/campos_512_v4
+1/16423/campos_512_v4
+1/16426/campos_512_v4
+1/16429/campos_512_v4
+1/16432/campos_512_v4
+1/16448/campos_512_v4
+1/16450/campos_512_v4
+1/16459/campos_512_v4
+1/16462/campos_512_v4
+1/16470/campos_512_v4
+1/16501/campos_512_v4
+1/16503/campos_512_v4
+1/16505/campos_512_v4
+1/16511/campos_512_v4
+1/16513/campos_512_v4
+1/16518/campos_512_v4
+1/16524/campos_512_v4
+1/16525/campos_512_v4
+1/16535/campos_512_v4
+1/16545/campos_512_v4
+1/16546/campos_512_v4
+1/16563/campos_512_v4
+1/16569/campos_512_v4
+1/16592/campos_512_v4
+1/16597/campos_512_v4
+1/16618/campos_512_v4
+1/16624/campos_512_v4
+1/16625/campos_512_v4
+1/16627/campos_512_v4
+1/16643/campos_512_v4
+1/16648/campos_512_v4
+1/16654/campos_512_v4
+1/16669/campos_512_v4
+1/16671/campos_512_v4
+1/16675/campos_512_v4
+1/16695/campos_512_v4
+1/16699/campos_512_v4
+1/16700/campos_512_v4
+1/16704/campos_512_v4
+1/16711/campos_512_v4
+1/16720/campos_512_v4
+1/16727/campos_512_v4
+1/16730/campos_512_v4
+1/16731/campos_512_v4
+1/16732/campos_512_v4
+1/16735/campos_512_v4
+1/16738/campos_512_v4
+1/16740/campos_512_v4
+1/16759/campos_512_v4
+1/16761/campos_512_v4
+1/16767/campos_512_v4
+1/16768/campos_512_v4
+1/16782/campos_512_v4
+1/16798/campos_512_v4
+1/16800/campos_512_v4
+1/16806/campos_512_v4
+1/16809/campos_512_v4
+1/16814/campos_512_v4
+1/16823/campos_512_v4
+1/16830/campos_512_v4
+1/16835/campos_512_v4
+1/16837/campos_512_v4
+1/16843/campos_512_v4
+1/16859/campos_512_v4
+1/16862/campos_512_v4
+1/16865/campos_512_v4
+1/16880/campos_512_v4
+1/16888/campos_512_v4
+1/16889/campos_512_v4
+1/16906/campos_512_v4
+1/16915/campos_512_v4
+1/16926/campos_512_v4
+1/16934/campos_512_v4
+1/16943/campos_512_v4
+1/16945/campos_512_v4
+1/16954/campos_512_v4
+1/16964/campos_512_v4
+1/16968/campos_512_v4
+1/16972/campos_512_v4
+1/17004/campos_512_v4
+1/17010/campos_512_v4
+1/17014/campos_512_v4
+1/17017/campos_512_v4
+1/17023/campos_512_v4
+1/17034/campos_512_v4
+1/17037/campos_512_v4
+1/17045/campos_512_v4
+1/17049/campos_512_v4
+1/17052/campos_512_v4
+1/17061/campos_512_v4
+1/17077/campos_512_v4
+1/17078/campos_512_v4
+1/17079/campos_512_v4
+1/17094/campos_512_v4
+1/17095/campos_512_v4
+1/17097/campos_512_v4
+1/17098/campos_512_v4
+1/17105/campos_512_v4
+1/17107/campos_512_v4
+1/17109/campos_512_v4
+1/17141/campos_512_v4
+1/17147/campos_512_v4
+1/17148/campos_512_v4
+1/17150/campos_512_v4
+1/17155/campos_512_v4
+1/17169/campos_512_v4
+1/17170/campos_512_v4
+1/17181/campos_512_v4
+1/17183/campos_512_v4
+1/17187/campos_512_v4
+1/17191/campos_512_v4
+1/17197/campos_512_v4
+1/17228/campos_512_v4
+1/17230/campos_512_v4
+1/17231/campos_512_v4
+1/17243/campos_512_v4
+1/17247/campos_512_v4
+1/17251/campos_512_v4
+1/17253/campos_512_v4
+1/17266/campos_512_v4
+1/17269/campos_512_v4
+1/17294/campos_512_v4
+1/17300/campos_512_v4
+1/17303/campos_512_v4
+1/17313/campos_512_v4
+1/17314/campos_512_v4
+1/17315/campos_512_v4
+1/17322/campos_512_v4
+1/17330/campos_512_v4
+1/17351/campos_512_v4
+1/17352/campos_512_v4
+1/17369/campos_512_v4
+1/17377/campos_512_v4
+1/17383/campos_512_v4
+1/17407/campos_512_v4
+1/17411/campos_512_v4
+1/17424/campos_512_v4
+1/17452/campos_512_v4
+1/17454/campos_512_v4
+1/17476/campos_512_v4
+1/17480/campos_512_v4
+1/17487/campos_512_v4
+1/17494/campos_512_v4
+1/17496/campos_512_v4
+1/17497/campos_512_v4
+1/17501/campos_512_v4
+1/17507/campos_512_v4
+1/17516/campos_512_v4
+1/17517/campos_512_v4
+1/17520/campos_512_v4
+1/17525/campos_512_v4
+1/17539/campos_512_v4
+1/17556/campos_512_v4
+1/17560/campos_512_v4
+1/17565/campos_512_v4
+1/17579/campos_512_v4
+1/17583/campos_512_v4
+1/17586/campos_512_v4
+1/17594/campos_512_v4
+1/17606/campos_512_v4
+1/17627/campos_512_v4
+1/17629/campos_512_v4
+1/17630/campos_512_v4
+1/17631/campos_512_v4
+1/17634/campos_512_v4
+1/17637/campos_512_v4
+1/17638/campos_512_v4
+1/17643/campos_512_v4
+1/17647/campos_512_v4
+1/17655/campos_512_v4
+1/17661/campos_512_v4
+1/17663/campos_512_v4
+1/17665/campos_512_v4
+1/17674/campos_512_v4
+1/17680/campos_512_v4
+1/17691/campos_512_v4
+1/17692/campos_512_v4
+1/17706/campos_512_v4
+1/17709/campos_512_v4
+1/17710/campos_512_v4
+1/17725/campos_512_v4
+1/17736/campos_512_v4
+1/17748/campos_512_v4
+1/17760/campos_512_v4
+1/17763/campos_512_v4
+1/17774/campos_512_v4
+1/17776/campos_512_v4
+1/17782/campos_512_v4
+1/17783/campos_512_v4
+1/17787/campos_512_v4
+1/17789/campos_512_v4
+1/17809/campos_512_v4
+1/17810/campos_512_v4
+1/17818/campos_512_v4
+1/17830/campos_512_v4
+1/17831/campos_512_v4
+1/17834/campos_512_v4
+1/17837/campos_512_v4
+1/17853/campos_512_v4
+1/17869/campos_512_v4
+1/17878/campos_512_v4
+1/17885/campos_512_v4
+1/17893/campos_512_v4
+1/17897/campos_512_v4
+1/17901/campos_512_v4
+1/17905/campos_512_v4
+1/17927/campos_512_v4
+1/17936/campos_512_v4
+1/17945/campos_512_v4
+1/17954/campos_512_v4
+1/17960/campos_512_v4
+1/17962/campos_512_v4
+1/17966/campos_512_v4
+1/17981/campos_512_v4
+1/17984/campos_512_v4
+1/17993/campos_512_v4
+1/18005/campos_512_v4
+1/18050/campos_512_v4
+1/18062/campos_512_v4
+1/18080/campos_512_v4
+1/18089/campos_512_v4
+1/18104/campos_512_v4
+1/18120/campos_512_v4
+1/18131/campos_512_v4
+1/18134/campos_512_v4
+1/18142/campos_512_v4
+1/18157/campos_512_v4
+1/18170/campos_512_v4
+1/18185/campos_512_v4
+1/18186/campos_512_v4
+1/18198/campos_512_v4
+1/18205/campos_512_v4
+1/18206/campos_512_v4
+1/18222/campos_512_v4
+1/18229/campos_512_v4
+1/18238/campos_512_v4
+1/18241/campos_512_v4
+1/18247/campos_512_v4
+1/18250/campos_512_v4
+1/18254/campos_512_v4
+1/18261/campos_512_v4
+1/18262/campos_512_v4
+1/18263/campos_512_v4
+1/18265/campos_512_v4
+1/18270/campos_512_v4
+1/18275/campos_512_v4
+1/18295/campos_512_v4
+1/18304/campos_512_v4
+1/18308/campos_512_v4
+1/18309/campos_512_v4
+1/18311/campos_512_v4
+1/18330/campos_512_v4
+1/18333/campos_512_v4
+1/18342/campos_512_v4
+1/18347/campos_512_v4
+1/18363/campos_512_v4
+1/18364/campos_512_v4
+1/18365/campos_512_v4
+1/18374/campos_512_v4
+1/18382/campos_512_v4
+1/18390/campos_512_v4
+1/18418/campos_512_v4
+1/18422/campos_512_v4
+1/18424/campos_512_v4
+1/18431/campos_512_v4
+1/18435/campos_512_v4
+1/18444/campos_512_v4
+1/18450/campos_512_v4
+1/18460/campos_512_v4
+1/18463/campos_512_v4
+1/18465/campos_512_v4
+1/18466/campos_512_v4
+1/18470/campos_512_v4
+1/18474/campos_512_v4
+1/18487/campos_512_v4
+1/18494/campos_512_v4
+1/18496/campos_512_v4
+1/18499/campos_512_v4
+1/18520/campos_512_v4
+1/18526/campos_512_v4
+1/18537/campos_512_v4
+1/18543/campos_512_v4
+1/18544/campos_512_v4
+1/18545/campos_512_v4
+1/18549/campos_512_v4
+1/18561/campos_512_v4
+1/18571/campos_512_v4
+1/18573/campos_512_v4
+1/18575/campos_512_v4
+1/18576/campos_512_v4
+1/18585/campos_512_v4
+1/18596/campos_512_v4
+1/18597/campos_512_v4
+1/18598/campos_512_v4
+1/18602/campos_512_v4
+1/18604/campos_512_v4
+1/18612/campos_512_v4
+1/18614/campos_512_v4
+1/18615/campos_512_v4
+1/18624/campos_512_v4
+1/18636/campos_512_v4
+1/18637/campos_512_v4
+1/18638/campos_512_v4
+1/18654/campos_512_v4
+1/18656/campos_512_v4
+1/18659/campos_512_v4
+1/18665/campos_512_v4
+1/18684/campos_512_v4
+1/18695/campos_512_v4
+1/18708/campos_512_v4
+1/18721/campos_512_v4
+1/18723/campos_512_v4
+1/18733/campos_512_v4
+1/18736/campos_512_v4
+1/18747/campos_512_v4
+1/18753/campos_512_v4
+1/18754/campos_512_v4
+1/18755/campos_512_v4
+1/18765/campos_512_v4
+1/18769/campos_512_v4
+1/18784/campos_512_v4
+1/18788/campos_512_v4
+1/18793/campos_512_v4
+1/18798/campos_512_v4
+1/18802/campos_512_v4
+1/18812/campos_512_v4
+1/18815/campos_512_v4
+1/18818/campos_512_v4
+1/18834/campos_512_v4
+1/18846/campos_512_v4
+1/18855/campos_512_v4
+1/18859/campos_512_v4
+1/18867/campos_512_v4
+1/18869/campos_512_v4
+1/18880/campos_512_v4
+1/18882/campos_512_v4
+1/18888/campos_512_v4
+1/18902/campos_512_v4
+1/18910/campos_512_v4
+1/18948/campos_512_v4
+1/18953/campos_512_v4
+1/18970/campos_512_v4
+1/18971/campos_512_v4
+1/18972/campos_512_v4
+1/18975/campos_512_v4
+1/18979/campos_512_v4
+1/18983/campos_512_v4
+1/18986/campos_512_v4
+1/18989/campos_512_v4
+1/19030/campos_512_v4
+1/19051/campos_512_v4
+1/19054/campos_512_v4
+1/19064/campos_512_v4
+1/19075/campos_512_v4
+1/19077/campos_512_v4
+1/19080/campos_512_v4
+1/19082/campos_512_v4
+1/19097/campos_512_v4
+1/19099/campos_512_v4
+1/19124/campos_512_v4
+1/19127/campos_512_v4
+1/19136/campos_512_v4
+1/19137/campos_512_v4
+1/19143/campos_512_v4
+1/19148/campos_512_v4
+1/19162/campos_512_v4
+1/19164/campos_512_v4
+1/19166/campos_512_v4
+1/19203/campos_512_v4
+1/19205/campos_512_v4
+1/19207/campos_512_v4
+1/19208/campos_512_v4
+1/19209/campos_512_v4
+1/19221/campos_512_v4
+1/19225/campos_512_v4
+1/19226/campos_512_v4
+1/19252/campos_512_v4
+1/19253/campos_512_v4
+1/19260/campos_512_v4
+1/19288/campos_512_v4
+1/19289/campos_512_v4
+1/19299/campos_512_v4
+1/19309/campos_512_v4
+1/19311/campos_512_v4
+1/19312/campos_512_v4
+1/19315/campos_512_v4
+1/19336/campos_512_v4
+1/19339/campos_512_v4
+1/19342/campos_512_v4
+1/19343/campos_512_v4
+1/19353/campos_512_v4
+1/19363/campos_512_v4
+1/19385/campos_512_v4
+1/19390/campos_512_v4
+1/19399/campos_512_v4
+1/19402/campos_512_v4
+1/19407/campos_512_v4
+1/19410/campos_512_v4
+1/19416/campos_512_v4
+1/19417/campos_512_v4
+1/19420/campos_512_v4
+1/19441/campos_512_v4
+1/19444/campos_512_v4
+1/19448/campos_512_v4
+1/19453/campos_512_v4
+1/19465/campos_512_v4
+1/19472/campos_512_v4
+1/19489/campos_512_v4
+1/19494/campos_512_v4
+1/19507/campos_512_v4
+1/19509/campos_512_v4
+1/19529/campos_512_v4
+1/19538/campos_512_v4
+1/19542/campos_512_v4
+1/19546/campos_512_v4
+1/19551/campos_512_v4
+1/19572/campos_512_v4
+1/19576/campos_512_v4
+1/19587/campos_512_v4
+1/19606/campos_512_v4
+1/19611/campos_512_v4
+1/19632/campos_512_v4
+1/19649/campos_512_v4
+1/19655/campos_512_v4
+1/19657/campos_512_v4
+1/19662/campos_512_v4
+1/19663/campos_512_v4
+1/19667/campos_512_v4
+1/19675/campos_512_v4
+1/19690/campos_512_v4
+1/19695/campos_512_v4
+1/19700/campos_512_v4
+1/19702/campos_512_v4
+1/19703/campos_512_v4
+1/19705/campos_512_v4
+1/19712/campos_512_v4
+1/19720/campos_512_v4
+1/19724/campos_512_v4
+1/19728/campos_512_v4
+1/19730/campos_512_v4
+1/19732/campos_512_v4
+1/19735/campos_512_v4
+1/19741/campos_512_v4
+1/19742/campos_512_v4
+1/19748/campos_512_v4
+1/19768/campos_512_v4
+1/19771/campos_512_v4
+1/19782/campos_512_v4
+1/19787/campos_512_v4
+1/19788/campos_512_v4
+1/19792/campos_512_v4
+1/19794/campos_512_v4
+1/19800/campos_512_v4
+1/19809/campos_512_v4
+1/19811/campos_512_v4
+1/19815/campos_512_v4
+1/19831/campos_512_v4
+1/19856/campos_512_v4
+1/19868/campos_512_v4
+1/19889/campos_512_v4
+1/19890/campos_512_v4
+1/19892/campos_512_v4
+1/19906/campos_512_v4
+1/19912/campos_512_v4
+1/19913/campos_512_v4
+1/19914/campos_512_v4
+1/19925/campos_512_v4
+1/19930/campos_512_v4
+1/19937/campos_512_v4
+1/19942/campos_512_v4
+1/19951/campos_512_v4
+1/19955/campos_512_v4
+1/19957/campos_512_v4
+1/19960/campos_512_v4
+1/19964/campos_512_v4
+1/19967/campos_512_v4
+1/19971/campos_512_v4
+1/19972/campos_512_v4
+1/19974/campos_512_v4
+1/19981/campos_512_v4
+1/19982/campos_512_v4
+1/19990/campos_512_v4
+10/60009/campos_512_v4
+10/60025/campos_512_v4
+10/60030/campos_512_v4
+10/60037/campos_512_v4
+10/60039/campos_512_v4
+10/60041/campos_512_v4
+10/60042/campos_512_v4
+10/60047/campos_512_v4
+10/60048/campos_512_v4
+10/60049/campos_512_v4
+10/60051/campos_512_v4
+10/60056/campos_512_v4
+10/60183/campos_512_v4
+10/60184/campos_512_v4
+10/60203/campos_512_v4
+10/60205/campos_512_v4
+10/60214/campos_512_v4
+10/60215/campos_512_v4
+10/60219/campos_512_v4
+10/60222/campos_512_v4
+10/60237/campos_512_v4
+10/60251/campos_512_v4
+10/60254/campos_512_v4
+10/60257/campos_512_v4
+10/60264/campos_512_v4
+10/60266/campos_512_v4
+10/60271/campos_512_v4
+10/60283/campos_512_v4
+10/60290/campos_512_v4
+10/60293/campos_512_v4
+10/60301/campos_512_v4
+10/60305/campos_512_v4
+10/60306/campos_512_v4
+10/60316/campos_512_v4
+10/60318/campos_512_v4
+10/60326/campos_512_v4
+10/60341/campos_512_v4
+10/60352/campos_512_v4
+10/60363/campos_512_v4
+10/60367/campos_512_v4
+10/60369/campos_512_v4
+10/60386/campos_512_v4
+10/60391/campos_512_v4
+10/60415/campos_512_v4
+10/60420/campos_512_v4
+10/60438/campos_512_v4
+10/60454/campos_512_v4
+10/60459/campos_512_v4
+10/60467/campos_512_v4
+10/60477/campos_512_v4
+10/60529/campos_512_v4
+10/60563/campos_512_v4
+10/60568/campos_512_v4
+10/60572/campos_512_v4
+10/60574/campos_512_v4
+10/60587/campos_512_v4
+10/60597/campos_512_v4
+10/60600/campos_512_v4
+10/60603/campos_512_v4
+10/60604/campos_512_v4
+10/60613/campos_512_v4
+10/60615/campos_512_v4
+10/60619/campos_512_v4
+10/60622/campos_512_v4
+10/60624/campos_512_v4
+10/60630/campos_512_v4
+10/60646/campos_512_v4
+10/60650/campos_512_v4
+10/60652/campos_512_v4
+10/60660/campos_512_v4
+10/60665/campos_512_v4
+10/60672/campos_512_v4
+10/60675/campos_512_v4
+10/60676/campos_512_v4
+10/60681/campos_512_v4
+10/60682/campos_512_v4
+10/60700/campos_512_v4
+10/60704/campos_512_v4
+10/60707/campos_512_v4
+10/60738/campos_512_v4
+10/60744/campos_512_v4
+10/60756/campos_512_v4
+10/60758/campos_512_v4
+10/60765/campos_512_v4
+10/60773/campos_512_v4
+10/60779/campos_512_v4
+10/60782/campos_512_v4
+10/60791/campos_512_v4
+10/60802/campos_512_v4
+10/60816/campos_512_v4
+10/60821/campos_512_v4
+10/60822/campos_512_v4
+10/60827/campos_512_v4
+10/60833/campos_512_v4
+10/60847/campos_512_v4
+10/60852/campos_512_v4
+10/60855/campos_512_v4
+10/60857/campos_512_v4
+10/60859/campos_512_v4
+10/60861/campos_512_v4
+10/60872/campos_512_v4
+10/60878/campos_512_v4
+10/60883/campos_512_v4
+10/60889/campos_512_v4
+10/60894/campos_512_v4
+10/60902/campos_512_v4
+10/60903/campos_512_v4
+10/60907/campos_512_v4
+10/60909/campos_512_v4
+10/60914/campos_512_v4
+10/60920/campos_512_v4
+10/60940/campos_512_v4
+10/60977/campos_512_v4
+10/60991/campos_512_v4
+10/60993/campos_512_v4
+10/61000/campos_512_v4
+10/61003/campos_512_v4
+10/61023/campos_512_v4
+10/61024/campos_512_v4
+10/61031/campos_512_v4
+10/61037/campos_512_v4
+10/61043/campos_512_v4
+10/61044/campos_512_v4
+10/61057/campos_512_v4
+10/61064/campos_512_v4
+10/61077/campos_512_v4
+10/61088/campos_512_v4
+10/61099/campos_512_v4
+10/61104/campos_512_v4
+10/61121/campos_512_v4
+10/61130/campos_512_v4
+10/61133/campos_512_v4
+10/61141/campos_512_v4
+10/61148/campos_512_v4
+10/61158/campos_512_v4
+10/61163/campos_512_v4
+10/61177/campos_512_v4
+10/61181/campos_512_v4
+10/61183/campos_512_v4
+10/61189/campos_512_v4
+10/61193/campos_512_v4
+10/61194/campos_512_v4
+10/61220/campos_512_v4
+10/61228/campos_512_v4
+10/61253/campos_512_v4
+10/61256/campos_512_v4
+10/61258/campos_512_v4
+10/61260/campos_512_v4
+10/61264/campos_512_v4
+10/61265/campos_512_v4
+10/61273/campos_512_v4
+10/61280/campos_512_v4
+10/61290/campos_512_v4
+10/61292/campos_512_v4
+10/61293/campos_512_v4
+10/61302/campos_512_v4
+10/61310/campos_512_v4
+10/61312/campos_512_v4
+10/61328/campos_512_v4
+10/61346/campos_512_v4
+10/61350/campos_512_v4
+10/61358/campos_512_v4
+10/61365/campos_512_v4
+10/61369/campos_512_v4
+10/61384/campos_512_v4
+10/61400/campos_512_v4
+10/61402/campos_512_v4
+10/61404/campos_512_v4
+10/61405/campos_512_v4
+10/61414/campos_512_v4
+10/61417/campos_512_v4
+10/61432/campos_512_v4
+10/61433/campos_512_v4
+10/61438/campos_512_v4
+10/61448/campos_512_v4
+10/61459/campos_512_v4
+10/61472/campos_512_v4
+10/61476/campos_512_v4
+10/61481/campos_512_v4
+10/61483/campos_512_v4
+10/61493/campos_512_v4
+10/61495/campos_512_v4
+10/61500/campos_512_v4
+10/61502/campos_512_v4
+10/61505/campos_512_v4
+10/61513/campos_512_v4
+10/61530/campos_512_v4
+10/61532/campos_512_v4
+10/61536/campos_512_v4
+10/61537/campos_512_v4
+10/61549/campos_512_v4
+10/61550/campos_512_v4
+10/61553/campos_512_v4
+10/61556/campos_512_v4
+10/61563/campos_512_v4
+10/61582/campos_512_v4
+10/61585/campos_512_v4
+10/61595/campos_512_v4
+10/61615/campos_512_v4
+10/61617/campos_512_v4
+10/61626/campos_512_v4
+10/61628/campos_512_v4
+10/61633/campos_512_v4
+10/61636/campos_512_v4
+10/61641/campos_512_v4
+10/61649/campos_512_v4
+10/61655/campos_512_v4
+10/61662/campos_512_v4
+10/61664/campos_512_v4
+10/61674/campos_512_v4
+10/61676/campos_512_v4
+10/61677/campos_512_v4
+10/61681/campos_512_v4
+10/61691/campos_512_v4
+10/61698/campos_512_v4
+10/61709/campos_512_v4
+10/61711/campos_512_v4
+10/61722/campos_512_v4
+10/61726/campos_512_v4
+10/61727/campos_512_v4
+10/61734/campos_512_v4
+10/61755/campos_512_v4
+10/61759/campos_512_v4
+10/61761/campos_512_v4
+10/61771/campos_512_v4
+10/61778/campos_512_v4
+10/61789/campos_512_v4
+10/61798/campos_512_v4
+10/61803/campos_512_v4
+10/61805/campos_512_v4
+10/61812/campos_512_v4
+10/61815/campos_512_v4
+10/61818/campos_512_v4
+10/61829/campos_512_v4
+10/61832/campos_512_v4
+10/61839/campos_512_v4
+10/61841/campos_512_v4
+10/61844/campos_512_v4
+10/61855/campos_512_v4
+10/61857/campos_512_v4
+10/61863/campos_512_v4
+10/61886/campos_512_v4
+10/61901/campos_512_v4
+10/61916/campos_512_v4
+10/61927/campos_512_v4
+10/61950/campos_512_v4
+10/61951/campos_512_v4
+10/61956/campos_512_v4
+10/61968/campos_512_v4
+10/61971/campos_512_v4
+10/61978/campos_512_v4
+10/61981/campos_512_v4
+10/62000/campos_512_v4
+10/62008/campos_512_v4
+10/62018/campos_512_v4
+10/62019/campos_512_v4
+10/62035/campos_512_v4
+10/62037/campos_512_v4
+10/62055/campos_512_v4
+10/62063/campos_512_v4
+10/62064/campos_512_v4
+10/62071/campos_512_v4
+10/62096/campos_512_v4
+10/62103/campos_512_v4
+10/62122/campos_512_v4
+10/62126/campos_512_v4
+10/62127/campos_512_v4
+10/62151/campos_512_v4
+10/62171/campos_512_v4
+10/62183/campos_512_v4
+10/62186/campos_512_v4
+10/62193/campos_512_v4
+10/62216/campos_512_v4
+10/62218/campos_512_v4
+10/62228/campos_512_v4
+10/62229/campos_512_v4
+10/62237/campos_512_v4
+10/62250/campos_512_v4
+10/62252/campos_512_v4
+10/62261/campos_512_v4
+10/62262/campos_512_v4
+10/62266/campos_512_v4
+10/62298/campos_512_v4
+10/62303/campos_512_v4
+10/62308/campos_512_v4
+10/62315/campos_512_v4
+10/62317/campos_512_v4
+10/62326/campos_512_v4
+10/62329/campos_512_v4
+10/62338/campos_512_v4
+10/62339/campos_512_v4
+10/62341/campos_512_v4
+10/62348/campos_512_v4
+10/62349/campos_512_v4
+10/62350/campos_512_v4
+10/62367/campos_512_v4
+10/62379/campos_512_v4
+10/62381/campos_512_v4
+10/62383/campos_512_v4
+10/62386/campos_512_v4
+10/62395/campos_512_v4
+10/62399/campos_512_v4
+10/62400/campos_512_v4
+10/62402/campos_512_v4
+10/62418/campos_512_v4
+10/62420/campos_512_v4
+10/62426/campos_512_v4
+10/62433/campos_512_v4
+10/62442/campos_512_v4
+10/62443/campos_512_v4
+10/62457/campos_512_v4
+10/62465/campos_512_v4
+10/62471/campos_512_v4
+10/62474/campos_512_v4
+10/62482/campos_512_v4
+10/62483/campos_512_v4
+10/62495/campos_512_v4
+10/62496/campos_512_v4
+10/62501/campos_512_v4
+10/62519/campos_512_v4
+10/62522/campos_512_v4
+10/62526/campos_512_v4
+10/62530/campos_512_v4
+10/62539/campos_512_v4
+10/62541/campos_512_v4
+10/62547/campos_512_v4
+10/62551/campos_512_v4
+10/62568/campos_512_v4
+10/62576/campos_512_v4
+10/62578/campos_512_v4
+10/62591/campos_512_v4
+10/62599/campos_512_v4
+10/62613/campos_512_v4
+10/62614/campos_512_v4
+10/62638/campos_512_v4
+10/62642/campos_512_v4
+10/62645/campos_512_v4
+10/62651/campos_512_v4
+10/62653/campos_512_v4
+10/62666/campos_512_v4
+10/62672/campos_512_v4
+10/62682/campos_512_v4
+10/62692/campos_512_v4
+10/62693/campos_512_v4
+10/62701/campos_512_v4
+10/62729/campos_512_v4
+10/62739/campos_512_v4
+10/62741/campos_512_v4
+10/62751/campos_512_v4
+10/62752/campos_512_v4
+10/62756/campos_512_v4
+10/62758/campos_512_v4
+10/62767/campos_512_v4
+10/62790/campos_512_v4
+10/62792/campos_512_v4
+10/62801/campos_512_v4
+10/62809/campos_512_v4
+10/62831/campos_512_v4
+10/62843/campos_512_v4
+10/62846/campos_512_v4
+10/62853/campos_512_v4
+10/62858/campos_512_v4
+10/62859/campos_512_v4
+10/62865/campos_512_v4
+10/62879/campos_512_v4
+10/62903/campos_512_v4
+10/62913/campos_512_v4
+10/62927/campos_512_v4
+10/62928/campos_512_v4
+10/62934/campos_512_v4
+10/62937/campos_512_v4
+10/62944/campos_512_v4
+10/62979/campos_512_v4
+10/62992/campos_512_v4
+10/62997/campos_512_v4
+10/62999/campos_512_v4
+10/63003/campos_512_v4
+10/63044/campos_512_v4
+10/63052/campos_512_v4
+10/63055/campos_512_v4
+10/63066/campos_512_v4
+10/63068/campos_512_v4
+10/63070/campos_512_v4
+10/63071/campos_512_v4
+10/63072/campos_512_v4
+10/63073/campos_512_v4
+10/63081/campos_512_v4
+10/63082/campos_512_v4
+10/63087/campos_512_v4
+10/63090/campos_512_v4
+10/63093/campos_512_v4
+10/63094/campos_512_v4
+10/63098/campos_512_v4
+10/63100/campos_512_v4
+10/63106/campos_512_v4
+10/63111/campos_512_v4
+10/63145/campos_512_v4
+10/63153/campos_512_v4
+10/63179/campos_512_v4
+10/63187/campos_512_v4
+10/63195/campos_512_v4
+10/63200/campos_512_v4
+10/63205/campos_512_v4
+10/63230/campos_512_v4
+10/63232/campos_512_v4
+10/63234/campos_512_v4
+10/63238/campos_512_v4
+10/63246/campos_512_v4
+10/63262/campos_512_v4
+10/63267/campos_512_v4
+10/63272/campos_512_v4
+10/63275/campos_512_v4
+10/63277/campos_512_v4
+10/63288/campos_512_v4
+10/63290/campos_512_v4
+10/63291/campos_512_v4
+10/63293/campos_512_v4
+10/63295/campos_512_v4
+10/63298/campos_512_v4
+10/63301/campos_512_v4
+10/63314/campos_512_v4
+10/63316/campos_512_v4
+10/63322/campos_512_v4
+10/63328/campos_512_v4
+10/63330/campos_512_v4
+10/63333/campos_512_v4
+10/63340/campos_512_v4
+10/63365/campos_512_v4
+10/63372/campos_512_v4
+10/63384/campos_512_v4
+10/63390/campos_512_v4
+10/63391/campos_512_v4
+10/63404/campos_512_v4
+10/63407/campos_512_v4
+10/63411/campos_512_v4
+10/63412/campos_512_v4
+10/63413/campos_512_v4
+10/63417/campos_512_v4
+10/63426/campos_512_v4
+10/63431/campos_512_v4
+10/63440/campos_512_v4
+10/63442/campos_512_v4
+10/63452/campos_512_v4
+10/63455/campos_512_v4
+10/63457/campos_512_v4
+10/63458/campos_512_v4
+10/63520/campos_512_v4
+10/63524/campos_512_v4
+10/63541/campos_512_v4
+10/63557/campos_512_v4
+10/63560/campos_512_v4
+10/63561/campos_512_v4
+10/63565/campos_512_v4
+10/63586/campos_512_v4
+10/63589/campos_512_v4
+10/63590/campos_512_v4
+10/63608/campos_512_v4
+10/63632/campos_512_v4
+10/63670/campos_512_v4
+10/63672/campos_512_v4
+10/63675/campos_512_v4
+10/63679/campos_512_v4
+10/63682/campos_512_v4
+10/63685/campos_512_v4
+10/63691/campos_512_v4
+10/63693/campos_512_v4
+10/63702/campos_512_v4
+10/63705/campos_512_v4
+10/63709/campos_512_v4
+10/63711/campos_512_v4
+10/63714/campos_512_v4
+10/63716/campos_512_v4
+10/63718/campos_512_v4
+10/63719/campos_512_v4
+10/63732/campos_512_v4
+10/63741/campos_512_v4
+10/63744/campos_512_v4
+10/63758/campos_512_v4
+10/63762/campos_512_v4
+10/63773/campos_512_v4
+10/63774/campos_512_v4
+10/63795/campos_512_v4
+10/63798/campos_512_v4
+10/63812/campos_512_v4
+10/63819/campos_512_v4
+10/63824/campos_512_v4
+10/63835/campos_512_v4
+10/63854/campos_512_v4
+10/63862/campos_512_v4
+10/63863/campos_512_v4
+10/63865/campos_512_v4
+10/63880/campos_512_v4
+10/63890/campos_512_v4
+10/63895/campos_512_v4
+10/63897/campos_512_v4
+10/63900/campos_512_v4
+10/63901/campos_512_v4
+10/63902/campos_512_v4
+10/63904/campos_512_v4
+10/63929/campos_512_v4
+10/63935/campos_512_v4
+10/63940/campos_512_v4
+10/63957/campos_512_v4
+10/63970/campos_512_v4
+10/63973/campos_512_v4
+10/63979/campos_512_v4
+10/63999/campos_512_v4
+10/64002/campos_512_v4
+10/64015/campos_512_v4
+10/64020/campos_512_v4
+10/64028/campos_512_v4
+10/64054/campos_512_v4
+10/64075/campos_512_v4
+10/64088/campos_512_v4
+10/64093/campos_512_v4
+10/64100/campos_512_v4
+10/64102/campos_512_v4
+10/64103/campos_512_v4
+10/64142/campos_512_v4
+10/64144/campos_512_v4
+10/64154/campos_512_v4
+10/64165/campos_512_v4
+10/64169/campos_512_v4
+10/64174/campos_512_v4
+10/64175/campos_512_v4
+10/64178/campos_512_v4
+10/64179/campos_512_v4
+10/64194/campos_512_v4
+10/64196/campos_512_v4
+10/64199/campos_512_v4
+10/64207/campos_512_v4
+10/64209/campos_512_v4
+10/64211/campos_512_v4
+10/64213/campos_512_v4
+10/64232/campos_512_v4
+10/64233/campos_512_v4
+10/64237/campos_512_v4
+10/64244/campos_512_v4
+10/64250/campos_512_v4
+10/64255/campos_512_v4
+10/64263/campos_512_v4
+10/64272/campos_512_v4
+10/64278/campos_512_v4
+10/64282/campos_512_v4
+10/64289/campos_512_v4
+10/64296/campos_512_v4
+10/64299/campos_512_v4
+10/64335/campos_512_v4
+10/64337/campos_512_v4
+10/64343/campos_512_v4
+10/64344/campos_512_v4
+10/64352/campos_512_v4
+10/64357/campos_512_v4
+10/64368/campos_512_v4
+10/64372/campos_512_v4
+10/64381/campos_512_v4
+10/64390/campos_512_v4
+10/64411/campos_512_v4
+10/64422/campos_512_v4
+10/64425/campos_512_v4
+10/64426/campos_512_v4
+10/64431/campos_512_v4
+10/64432/campos_512_v4
+10/64442/campos_512_v4
+10/64448/campos_512_v4
+10/64453/campos_512_v4
+10/64461/campos_512_v4
+10/64462/campos_512_v4
+10/64470/campos_512_v4
+10/64484/campos_512_v4
+10/64486/campos_512_v4
+10/64488/campos_512_v4
+10/64490/campos_512_v4
+10/64492/campos_512_v4
+10/64501/campos_512_v4
+10/64504/campos_512_v4
+10/64507/campos_512_v4
+10/64516/campos_512_v4
+10/64517/campos_512_v4
+10/64520/campos_512_v4
+10/64521/campos_512_v4
+10/64531/campos_512_v4
+10/64532/campos_512_v4
+10/64535/campos_512_v4
+10/64536/campos_512_v4
+10/64545/campos_512_v4
+10/64547/campos_512_v4
+10/64553/campos_512_v4
+10/64555/campos_512_v4
+10/64558/campos_512_v4
+10/64561/campos_512_v4
+10/64564/campos_512_v4
+10/64573/campos_512_v4
+10/64578/campos_512_v4
+10/64590/campos_512_v4
+10/64592/campos_512_v4
+10/64600/campos_512_v4
+10/64602/campos_512_v4
+10/64616/campos_512_v4
+10/64621/campos_512_v4
+10/64623/campos_512_v4
+10/64630/campos_512_v4
+10/64640/campos_512_v4
+10/64643/campos_512_v4
+10/64649/campos_512_v4
+10/64657/campos_512_v4
+10/64669/campos_512_v4
+10/64670/campos_512_v4
+10/64681/campos_512_v4
+10/64682/campos_512_v4
+10/64692/campos_512_v4
+10/64696/campos_512_v4
+10/64704/campos_512_v4
+10/64717/campos_512_v4
+10/64719/campos_512_v4
+10/64720/campos_512_v4
+10/64723/campos_512_v4
+10/64726/campos_512_v4
+10/64728/campos_512_v4
+10/64741/campos_512_v4
+10/64764/campos_512_v4
+10/64768/campos_512_v4
+10/64775/campos_512_v4
+10/64792/campos_512_v4
+10/64795/campos_512_v4
+10/64798/campos_512_v4
+10/64800/campos_512_v4
+10/64801/campos_512_v4
+10/64803/campos_512_v4
+10/64807/campos_512_v4
+10/64808/campos_512_v4
+10/64811/campos_512_v4
+10/64826/campos_512_v4
+10/64827/campos_512_v4
+10/64830/campos_512_v4
+10/64834/campos_512_v4
+10/64851/campos_512_v4
+10/64853/campos_512_v4
+10/64870/campos_512_v4
+10/64884/campos_512_v4
+10/64893/campos_512_v4
+10/64904/campos_512_v4
+10/64913/campos_512_v4
+10/64915/campos_512_v4
+10/64922/campos_512_v4
+10/64926/campos_512_v4
+10/64927/campos_512_v4
+10/64928/campos_512_v4
+10/64929/campos_512_v4
+10/64940/campos_512_v4
+10/64943/campos_512_v4
+10/64945/campos_512_v4
+10/64951/campos_512_v4
+10/64957/campos_512_v4
+10/64962/campos_512_v4
+10/64965/campos_512_v4
+10/64967/campos_512_v4
+10/64978/campos_512_v4
+100/510002/campos_512_v4
+100/510009/campos_512_v4
+100/510023/campos_512_v4
+100/510032/campos_512_v4
+100/510045/campos_512_v4
+100/510049/campos_512_v4
+100/510051/campos_512_v4
+100/510058/campos_512_v4
+100/510059/campos_512_v4
+100/510060/campos_512_v4
+100/510061/campos_512_v4
+100/510062/campos_512_v4
+100/510078/campos_512_v4
+100/510086/campos_512_v4
+100/510089/campos_512_v4
+100/510094/campos_512_v4
+100/510100/campos_512_v4
+100/510107/campos_512_v4
+100/510110/campos_512_v4
+100/510114/campos_512_v4
+100/510144/campos_512_v4
+100/510150/campos_512_v4
+100/510152/campos_512_v4
+100/510164/campos_512_v4
+100/510169/campos_512_v4
+100/510179/campos_512_v4
+100/510193/campos_512_v4
+100/510194/campos_512_v4
+100/510201/campos_512_v4
+100/510208/campos_512_v4
+100/510210/campos_512_v4
+100/510211/campos_512_v4
+100/510231/campos_512_v4
+100/510245/campos_512_v4
+100/510259/campos_512_v4
+100/510265/campos_512_v4
+100/510277/campos_512_v4
+100/510293/campos_512_v4
+100/510294/campos_512_v4
+100/510296/campos_512_v4
+100/510301/campos_512_v4
+100/510303/campos_512_v4
+100/510304/campos_512_v4
+100/510307/campos_512_v4
+100/510309/campos_512_v4
+100/510310/campos_512_v4
+100/510311/campos_512_v4
+100/510316/campos_512_v4
+100/510327/campos_512_v4
+100/510331/campos_512_v4
+100/510343/campos_512_v4
+100/510357/campos_512_v4
+100/510368/campos_512_v4
+100/510369/campos_512_v4
+100/510380/campos_512_v4
+100/510393/campos_512_v4
+100/510399/campos_512_v4
+100/510402/campos_512_v4
+100/510416/campos_512_v4
+100/510421/campos_512_v4
+100/510426/campos_512_v4
+100/510431/campos_512_v4
+100/510438/campos_512_v4
+100/510472/campos_512_v4
+100/510479/campos_512_v4
+100/510496/campos_512_v4
+100/510498/campos_512_v4
+100/510507/campos_512_v4
+100/510508/campos_512_v4
+100/510519/campos_512_v4
+100/510530/campos_512_v4
+100/510559/campos_512_v4
+100/510577/campos_512_v4
+100/510580/campos_512_v4
+100/510600/campos_512_v4
+100/510607/campos_512_v4
+100/510626/campos_512_v4
+100/510628/campos_512_v4
+100/510652/campos_512_v4
+100/510654/campos_512_v4
+100/510681/campos_512_v4
+100/510682/campos_512_v4
+100/510705/campos_512_v4
+100/510720/campos_512_v4
+100/510726/campos_512_v4
+100/510731/campos_512_v4
+100/510732/campos_512_v4
+100/510733/campos_512_v4
+100/510760/campos_512_v4
+100/510775/campos_512_v4
+100/510776/campos_512_v4
+100/510778/campos_512_v4
+100/510788/campos_512_v4
+100/510799/campos_512_v4
+100/510802/campos_512_v4
+100/510807/campos_512_v4
+100/510810/campos_512_v4
+100/510818/campos_512_v4
+100/510821/campos_512_v4
+100/510823/campos_512_v4
+100/510835/campos_512_v4
+100/510853/campos_512_v4
+100/510863/campos_512_v4
+100/510864/campos_512_v4
+100/510871/campos_512_v4
+100/510880/campos_512_v4
+100/510881/campos_512_v4
+100/510891/campos_512_v4
+100/510906/campos_512_v4
+100/510907/campos_512_v4
+100/510908/campos_512_v4
+100/510919/campos_512_v4
+100/510920/campos_512_v4
+100/510922/campos_512_v4
+100/510929/campos_512_v4
+100/510937/campos_512_v4
+100/510938/campos_512_v4
+100/510941/campos_512_v4
+100/510957/campos_512_v4
+100/510965/campos_512_v4
+100/510966/campos_512_v4
+100/510973/campos_512_v4
+100/510983/campos_512_v4
+100/510985/campos_512_v4
+100/511020/campos_512_v4
+100/511026/campos_512_v4
+100/511030/campos_512_v4
+100/511034/campos_512_v4
+100/511038/campos_512_v4
+100/511061/campos_512_v4
+100/511074/campos_512_v4
+100/511076/campos_512_v4
+100/511082/campos_512_v4
+100/511092/campos_512_v4
+100/511117/campos_512_v4
+100/511122/campos_512_v4
+100/511145/campos_512_v4
+100/511169/campos_512_v4
+100/511191/campos_512_v4
+100/511194/campos_512_v4
+100/511202/campos_512_v4
+100/511203/campos_512_v4
+100/511206/campos_512_v4
+100/511212/campos_512_v4
+100/511219/campos_512_v4
+100/511224/campos_512_v4
+100/511250/campos_512_v4
+100/511254/campos_512_v4
+100/511259/campos_512_v4
+100/511260/campos_512_v4
+100/511287/campos_512_v4
+100/511290/campos_512_v4
+100/511299/campos_512_v4
+100/511306/campos_512_v4
+100/511330/campos_512_v4
+100/511351/campos_512_v4
+100/511357/campos_512_v4
+100/511361/campos_512_v4
+100/511364/campos_512_v4
+100/511369/campos_512_v4
+100/511370/campos_512_v4
+100/511376/campos_512_v4
+100/511384/campos_512_v4
+100/511385/campos_512_v4
+100/511386/campos_512_v4
+100/511401/campos_512_v4
+100/511406/campos_512_v4
+100/511407/campos_512_v4
+100/511409/campos_512_v4
+100/511418/campos_512_v4
+100/511422/campos_512_v4
+100/511426/campos_512_v4
+100/511432/campos_512_v4
+100/511438/campos_512_v4
+100/511439/campos_512_v4
+100/511444/campos_512_v4
+100/511458/campos_512_v4
+100/511469/campos_512_v4
+100/511477/campos_512_v4
+100/511484/campos_512_v4
+100/511486/campos_512_v4
+100/511491/campos_512_v4
+100/511496/campos_512_v4
+100/511499/campos_512_v4
+100/511502/campos_512_v4
+100/511503/campos_512_v4
+100/511504/campos_512_v4
+100/511515/campos_512_v4
+100/511517/campos_512_v4
+100/511523/campos_512_v4
+100/511529/campos_512_v4
+100/511551/campos_512_v4
+100/511556/campos_512_v4
+100/511576/campos_512_v4
+100/511594/campos_512_v4
+100/511610/campos_512_v4
+100/511616/campos_512_v4
+100/511621/campos_512_v4
+100/511625/campos_512_v4
+100/511644/campos_512_v4
+100/511647/campos_512_v4
+100/511658/campos_512_v4
+100/511668/campos_512_v4
+100/511670/campos_512_v4
+100/511677/campos_512_v4
+100/511696/campos_512_v4
+100/511703/campos_512_v4
+100/511706/campos_512_v4
+100/511741/campos_512_v4
+100/511743/campos_512_v4
+100/511748/campos_512_v4
+100/511749/campos_512_v4
+100/511759/campos_512_v4
+100/511773/campos_512_v4
+100/511782/campos_512_v4
+100/511795/campos_512_v4
+100/511796/campos_512_v4
+100/511805/campos_512_v4
+100/511807/campos_512_v4
+100/511809/campos_512_v4
+100/511817/campos_512_v4
+100/511834/campos_512_v4
+100/511850/campos_512_v4
+100/511857/campos_512_v4
+100/511867/campos_512_v4
+100/511879/campos_512_v4
+100/511889/campos_512_v4
+100/511892/campos_512_v4
+100/511904/campos_512_v4
+100/511912/campos_512_v4
+100/511913/campos_512_v4
+100/511914/campos_512_v4
+100/511917/campos_512_v4
+100/511928/campos_512_v4
+100/511953/campos_512_v4
+100/511978/campos_512_v4
+100/511981/campos_512_v4
+100/511982/campos_512_v4
+100/512005/campos_512_v4
+100/512011/campos_512_v4
+100/512016/campos_512_v4
+100/512028/campos_512_v4
+100/512055/campos_512_v4
+100/512056/campos_512_v4
+100/512058/campos_512_v4
+100/512062/campos_512_v4
+100/512063/campos_512_v4
+100/512074/campos_512_v4
+100/512075/campos_512_v4
+100/512079/campos_512_v4
+100/512095/campos_512_v4
+100/512100/campos_512_v4
+100/512105/campos_512_v4
+100/512106/campos_512_v4
+100/512109/campos_512_v4
+100/512121/campos_512_v4
+100/512125/campos_512_v4
+100/512135/campos_512_v4
+100/512138/campos_512_v4
+100/512150/campos_512_v4
+100/512154/campos_512_v4
+100/512157/campos_512_v4
+100/512158/campos_512_v4
+100/512168/campos_512_v4
+100/512173/campos_512_v4
+100/512177/campos_512_v4
+100/512178/campos_512_v4
+100/512184/campos_512_v4
+100/512185/campos_512_v4
+100/512211/campos_512_v4
+100/512213/campos_512_v4
+100/512220/campos_512_v4
+100/512224/campos_512_v4
+100/512236/campos_512_v4
+100/512242/campos_512_v4
+100/512250/campos_512_v4
+100/512264/campos_512_v4
+100/512266/campos_512_v4
+100/512307/campos_512_v4
+100/512310/campos_512_v4
+100/512331/campos_512_v4
+100/512342/campos_512_v4
+100/512356/campos_512_v4
+100/512358/campos_512_v4
+100/512364/campos_512_v4
+100/512365/campos_512_v4
+100/512375/campos_512_v4
+100/512384/campos_512_v4
+100/512386/campos_512_v4
+100/512397/campos_512_v4
+100/512400/campos_512_v4
+100/512407/campos_512_v4
+100/512411/campos_512_v4
+100/512420/campos_512_v4
+100/512429/campos_512_v4
+100/512430/campos_512_v4
+100/512441/campos_512_v4
+100/512446/campos_512_v4
+100/512462/campos_512_v4
+100/512485/campos_512_v4
+100/512490/campos_512_v4
+100/512495/campos_512_v4
+100/512500/campos_512_v4
+100/512514/campos_512_v4
+100/512520/campos_512_v4
+100/512528/campos_512_v4
+100/512540/campos_512_v4
+100/512541/campos_512_v4
+100/512549/campos_512_v4
+100/512550/campos_512_v4
+100/512553/campos_512_v4
+100/512572/campos_512_v4
+100/512575/campos_512_v4
+100/512578/campos_512_v4
+100/512580/campos_512_v4
+100/512590/campos_512_v4
+100/512614/campos_512_v4
+100/512616/campos_512_v4
+100/512643/campos_512_v4
+100/512651/campos_512_v4
+100/512662/campos_512_v4
+100/512692/campos_512_v4
+100/512715/campos_512_v4
+100/512723/campos_512_v4
+100/512726/campos_512_v4
+100/512730/campos_512_v4
+100/512735/campos_512_v4
+100/512749/campos_512_v4
+100/512750/campos_512_v4
+100/512753/campos_512_v4
+100/512757/campos_512_v4
+100/512798/campos_512_v4
+100/512805/campos_512_v4
+100/512810/campos_512_v4
+100/512818/campos_512_v4
+100/512825/campos_512_v4
+100/512845/campos_512_v4
+100/512854/campos_512_v4
+100/512865/campos_512_v4
+100/512866/campos_512_v4
+100/512867/campos_512_v4
+100/512873/campos_512_v4
+100/512883/campos_512_v4
+100/512884/campos_512_v4
+100/512885/campos_512_v4
+100/512891/campos_512_v4
+100/512902/campos_512_v4
+100/512903/campos_512_v4
+100/512911/campos_512_v4
+100/512913/campos_512_v4
+100/512918/campos_512_v4
+100/512919/campos_512_v4
+100/512931/campos_512_v4
+100/512932/campos_512_v4
+100/512960/campos_512_v4
+100/512991/campos_512_v4
+100/512996/campos_512_v4
+100/513010/campos_512_v4
+100/513022/campos_512_v4
+100/513037/campos_512_v4
+100/513045/campos_512_v4
+100/513047/campos_512_v4
+100/513051/campos_512_v4
+100/513052/campos_512_v4
+100/513058/campos_512_v4
+100/513064/campos_512_v4
+100/513081/campos_512_v4
+100/513088/campos_512_v4
+100/513091/campos_512_v4
+100/513094/campos_512_v4
+100/513095/campos_512_v4
+100/513096/campos_512_v4
+100/513101/campos_512_v4
+100/513102/campos_512_v4
+100/513105/campos_512_v4
+100/513108/campos_512_v4
+100/513116/campos_512_v4
+100/513126/campos_512_v4
+100/513139/campos_512_v4
+100/513147/campos_512_v4
+100/513150/campos_512_v4
+100/513160/campos_512_v4
+100/513163/campos_512_v4
+100/513177/campos_512_v4
+100/513180/campos_512_v4
+100/513190/campos_512_v4
+100/513192/campos_512_v4
+100/513231/campos_512_v4
+100/513236/campos_512_v4
+100/513241/campos_512_v4
+100/513243/campos_512_v4
+100/513254/campos_512_v4
+100/513255/campos_512_v4
+100/513262/campos_512_v4
+100/513263/campos_512_v4
+100/513270/campos_512_v4
+100/513273/campos_512_v4
+100/513282/campos_512_v4
+100/513287/campos_512_v4
+100/513290/campos_512_v4
+100/513298/campos_512_v4
+100/513303/campos_512_v4
+100/513304/campos_512_v4
+100/513306/campos_512_v4
+100/513312/campos_512_v4
+100/513314/campos_512_v4
+100/513315/campos_512_v4
+100/513316/campos_512_v4
+100/513327/campos_512_v4
+100/513329/campos_512_v4
+100/513358/campos_512_v4
+100/513360/campos_512_v4
+100/513375/campos_512_v4
+100/513377/campos_512_v4
+100/513385/campos_512_v4
+100/513395/campos_512_v4
+100/513400/campos_512_v4
+100/513408/campos_512_v4
+100/513411/campos_512_v4
+100/513423/campos_512_v4
+100/513436/campos_512_v4
+100/513438/campos_512_v4
+100/513451/campos_512_v4
+100/513455/campos_512_v4
+100/513464/campos_512_v4
+100/513476/campos_512_v4
+100/513482/campos_512_v4
+100/513487/campos_512_v4
+100/513491/campos_512_v4
+100/513502/campos_512_v4
+100/513506/campos_512_v4
+100/513510/campos_512_v4
+100/513511/campos_512_v4
+100/513513/campos_512_v4
+100/513518/campos_512_v4
+100/513519/campos_512_v4
+100/513522/campos_512_v4
+100/513528/campos_512_v4
+100/513533/campos_512_v4
+100/513539/campos_512_v4
+100/513544/campos_512_v4
+100/513563/campos_512_v4
+100/513567/campos_512_v4
+100/513574/campos_512_v4
+100/513577/campos_512_v4
+100/513592/campos_512_v4
+100/513595/campos_512_v4
+100/513601/campos_512_v4
+100/513603/campos_512_v4
+100/513607/campos_512_v4
+100/513617/campos_512_v4
+100/513636/campos_512_v4
+100/513644/campos_512_v4
+100/513650/campos_512_v4
+100/513663/campos_512_v4
+100/513666/campos_512_v4
+100/513672/campos_512_v4
+100/513676/campos_512_v4
+100/513687/campos_512_v4
+100/513688/campos_512_v4
+100/513700/campos_512_v4
+100/513701/campos_512_v4
+100/513705/campos_512_v4
+100/513708/campos_512_v4
+100/513716/campos_512_v4
+100/513757/campos_512_v4
+100/513760/campos_512_v4
+100/513768/campos_512_v4
+100/513771/campos_512_v4
+100/513774/campos_512_v4
+100/513779/campos_512_v4
+100/513798/campos_512_v4
+100/513825/campos_512_v4
+100/513826/campos_512_v4
+100/513827/campos_512_v4
+100/513830/campos_512_v4
+100/513839/campos_512_v4
+100/513862/campos_512_v4
+100/513866/campos_512_v4
+100/513867/campos_512_v4
+100/513870/campos_512_v4
+100/513873/campos_512_v4
+100/513891/campos_512_v4
+100/513892/campos_512_v4
+100/513893/campos_512_v4
+100/513903/campos_512_v4
+100/513923/campos_512_v4
+100/513928/campos_512_v4
+100/513930/campos_512_v4
+100/513933/campos_512_v4
+100/513934/campos_512_v4
+100/513944/campos_512_v4
+100/513946/campos_512_v4
+100/513952/campos_512_v4
+100/513959/campos_512_v4
+100/513970/campos_512_v4
+100/513972/campos_512_v4
+100/513979/campos_512_v4
+100/513980/campos_512_v4
+100/513981/campos_512_v4
+100/513986/campos_512_v4
+100/513987/campos_512_v4
+100/513994/campos_512_v4
+100/514002/campos_512_v4
+100/514014/campos_512_v4
+100/514015/campos_512_v4
+100/514038/campos_512_v4
+100/514040/campos_512_v4
+100/514061/campos_512_v4
+100/514078/campos_512_v4
+100/514080/campos_512_v4
+100/514081/campos_512_v4
+100/514088/campos_512_v4
+100/514091/campos_512_v4
+100/514096/campos_512_v4
+100/514097/campos_512_v4
+100/514098/campos_512_v4
+100/514106/campos_512_v4
+100/514113/campos_512_v4
+100/514123/campos_512_v4
+100/514124/campos_512_v4
+100/514127/campos_512_v4
+100/514133/campos_512_v4
+100/514134/campos_512_v4
+100/514136/campos_512_v4
+100/514150/campos_512_v4
+100/514151/campos_512_v4
+100/514156/campos_512_v4
+100/514157/campos_512_v4
+100/514164/campos_512_v4
+100/514166/campos_512_v4
+100/514177/campos_512_v4
+100/514179/campos_512_v4
+100/514181/campos_512_v4
+100/514182/campos_512_v4
+100/514185/campos_512_v4
+100/514199/campos_512_v4
+100/514203/campos_512_v4
+100/514209/campos_512_v4
+100/514216/campos_512_v4
+100/514219/campos_512_v4
+100/514226/campos_512_v4
+100/514230/campos_512_v4
+100/514241/campos_512_v4
+100/514245/campos_512_v4
+100/514252/campos_512_v4
+100/514254/campos_512_v4
+100/514257/campos_512_v4
+100/514271/campos_512_v4
+100/514279/campos_512_v4
+100/514283/campos_512_v4
+100/514286/campos_512_v4
+100/514288/campos_512_v4
+100/514292/campos_512_v4
+100/514308/campos_512_v4
+100/514311/campos_512_v4
+100/514314/campos_512_v4
+100/514315/campos_512_v4
+100/514322/campos_512_v4
+100/514325/campos_512_v4
+100/514341/campos_512_v4
+100/514353/campos_512_v4
+100/514356/campos_512_v4
+100/514357/campos_512_v4
+100/514362/campos_512_v4
+100/514363/campos_512_v4
+100/514367/campos_512_v4
+100/514386/campos_512_v4
+100/514387/campos_512_v4
+100/514391/campos_512_v4
+100/514392/campos_512_v4
+100/514399/campos_512_v4
+100/514405/campos_512_v4
+100/514411/campos_512_v4
+100/514414/campos_512_v4
+100/514419/campos_512_v4
+100/514424/campos_512_v4
+100/514438/campos_512_v4
+100/514440/campos_512_v4
+100/514445/campos_512_v4
+100/514446/campos_512_v4
+100/514465/campos_512_v4
+100/514474/campos_512_v4
+100/514477/campos_512_v4
+100/514486/campos_512_v4
+100/514492/campos_512_v4
+100/514493/campos_512_v4
+100/514503/campos_512_v4
+100/514512/campos_512_v4
+100/514520/campos_512_v4
+100/514526/campos_512_v4
+100/514527/campos_512_v4
+100/514530/campos_512_v4
+100/514539/campos_512_v4
+100/514543/campos_512_v4
+100/514544/campos_512_v4
+100/514545/campos_512_v4
+100/514553/campos_512_v4
+100/514555/campos_512_v4
+100/514561/campos_512_v4
+100/514575/campos_512_v4
+100/514580/campos_512_v4
+100/514584/campos_512_v4
+100/514589/campos_512_v4
+100/514594/campos_512_v4
+100/514607/campos_512_v4
+100/514609/campos_512_v4
+100/514611/campos_512_v4
+100/514619/campos_512_v4
+100/514626/campos_512_v4
+100/514632/campos_512_v4
+100/514634/campos_512_v4
+100/514641/campos_512_v4
+100/514655/campos_512_v4
+100/514665/campos_512_v4
+100/514689/campos_512_v4
+100/514692/campos_512_v4
+100/514701/campos_512_v4
+100/514706/campos_512_v4
+100/514717/campos_512_v4
+100/514718/campos_512_v4
+100/514722/campos_512_v4
+100/514737/campos_512_v4
+100/514739/campos_512_v4
+100/514751/campos_512_v4
+100/514763/campos_512_v4
+100/514764/campos_512_v4
+100/514768/campos_512_v4
+100/514769/campos_512_v4
+100/514786/campos_512_v4
+100/514791/campos_512_v4
+100/514795/campos_512_v4
+100/514798/campos_512_v4
+100/514806/campos_512_v4
+100/514832/campos_512_v4
+100/514858/campos_512_v4
+100/514862/campos_512_v4
+100/514864/campos_512_v4
+100/514865/campos_512_v4
+100/514879/campos_512_v4
+100/514880/campos_512_v4
+100/514887/campos_512_v4
+100/514889/campos_512_v4
+100/514902/campos_512_v4
+100/514923/campos_512_v4
+100/514936/campos_512_v4
+100/514946/campos_512_v4
+100/514949/campos_512_v4
+100/514956/campos_512_v4
+100/514958/campos_512_v4
+100/514963/campos_512_v4
+100/514964/campos_512_v4
+100/514971/campos_512_v4
+100/514973/campos_512_v4
+100/514980/campos_512_v4
+100/514990/campos_512_v4
+100/514991/campos_512_v4
+100/514999/campos_512_v4
+101/515004/campos_512_v4
+101/515019/campos_512_v4
+101/515021/campos_512_v4
+101/515023/campos_512_v4
+101/515029/campos_512_v4
+101/515049/campos_512_v4
+101/515056/campos_512_v4
+101/515061/campos_512_v4
+101/515063/campos_512_v4
+101/515066/campos_512_v4
+101/515067/campos_512_v4
+101/515068/campos_512_v4
+101/515074/campos_512_v4
+101/515087/campos_512_v4
+101/515090/campos_512_v4
+101/515096/campos_512_v4
+101/515103/campos_512_v4
+101/515109/campos_512_v4
+101/515112/campos_512_v4
+101/515139/campos_512_v4
+101/515144/campos_512_v4
+101/515145/campos_512_v4
+101/515150/campos_512_v4
+101/515156/campos_512_v4
+101/515163/campos_512_v4
+101/515171/campos_512_v4
+101/515186/campos_512_v4
+101/515216/campos_512_v4
+101/515217/campos_512_v4
+101/515224/campos_512_v4
+101/515228/campos_512_v4
+101/515240/campos_512_v4
+101/515266/campos_512_v4
+101/515290/campos_512_v4
+101/515293/campos_512_v4
+101/515297/campos_512_v4
+101/515298/campos_512_v4
+101/515301/campos_512_v4
+101/515313/campos_512_v4
+101/515336/campos_512_v4
+101/515343/campos_512_v4
+101/515345/campos_512_v4
+101/515349/campos_512_v4
+101/515353/campos_512_v4
+101/515370/campos_512_v4
+101/515371/campos_512_v4
+101/515381/campos_512_v4
+101/515409/campos_512_v4
+101/515426/campos_512_v4
+101/515431/campos_512_v4
+101/515434/campos_512_v4
+101/515455/campos_512_v4
+101/515465/campos_512_v4
+101/515480/campos_512_v4
+101/515497/campos_512_v4
+101/515503/campos_512_v4
+101/515522/campos_512_v4
+101/515549/campos_512_v4
+101/515553/campos_512_v4
+101/515556/campos_512_v4
+101/515566/campos_512_v4
+101/515568/campos_512_v4
+101/515570/campos_512_v4
+101/515573/campos_512_v4
+101/515576/campos_512_v4
+101/515582/campos_512_v4
+101/515585/campos_512_v4
+101/515602/campos_512_v4
+101/515610/campos_512_v4
+101/515620/campos_512_v4
+101/515621/campos_512_v4
+101/515626/campos_512_v4
+101/515643/campos_512_v4
+101/515654/campos_512_v4
+101/515660/campos_512_v4
+101/515665/campos_512_v4
+101/515699/campos_512_v4
+101/515728/campos_512_v4
+101/515736/campos_512_v4
+101/515745/campos_512_v4
+101/515749/campos_512_v4
+101/515763/campos_512_v4
+101/515775/campos_512_v4
+101/515780/campos_512_v4
+101/515792/campos_512_v4
+101/515793/campos_512_v4
+101/515794/campos_512_v4
+101/515803/campos_512_v4
+101/515804/campos_512_v4
+101/515807/campos_512_v4
+101/515809/campos_512_v4
+101/515836/campos_512_v4
+101/515843/campos_512_v4
+101/515845/campos_512_v4
+101/515847/campos_512_v4
+101/515848/campos_512_v4
+101/515850/campos_512_v4
+101/515856/campos_512_v4
+101/515861/campos_512_v4
+101/515873/campos_512_v4
+101/515883/campos_512_v4
+101/515891/campos_512_v4
+101/515892/campos_512_v4
+101/515898/campos_512_v4
+101/515919/campos_512_v4
+101/515925/campos_512_v4
+101/515931/campos_512_v4
+101/515948/campos_512_v4
+101/515952/campos_512_v4
+101/515960/campos_512_v4
+101/515967/campos_512_v4
+101/515974/campos_512_v4
+101/515985/campos_512_v4
+101/515993/campos_512_v4
+101/515996/campos_512_v4
+101/516007/campos_512_v4
+101/516010/campos_512_v4
+101/516011/campos_512_v4
+101/516030/campos_512_v4
+101/516049/campos_512_v4
+101/516054/campos_512_v4
+101/516055/campos_512_v4
+101/516064/campos_512_v4
+101/516065/campos_512_v4
+101/516069/campos_512_v4
+101/516071/campos_512_v4
+101/516083/campos_512_v4
+101/516084/campos_512_v4
+101/516088/campos_512_v4
+101/516107/campos_512_v4
+101/516109/campos_512_v4
+101/516119/campos_512_v4
+101/516124/campos_512_v4
+101/516126/campos_512_v4
+101/516130/campos_512_v4
+101/516136/campos_512_v4
+101/516140/campos_512_v4
+101/516149/campos_512_v4
+101/516158/campos_512_v4
+101/516160/campos_512_v4
+101/516163/campos_512_v4
+101/516164/campos_512_v4
+101/516166/campos_512_v4
+101/516172/campos_512_v4
+101/516173/campos_512_v4
+101/516209/campos_512_v4
+101/516214/campos_512_v4
+101/516216/campos_512_v4
+101/516218/campos_512_v4
+101/516220/campos_512_v4
+101/516238/campos_512_v4
+101/516240/campos_512_v4
+101/516250/campos_512_v4
+101/516254/campos_512_v4
+101/516259/campos_512_v4
+101/516262/campos_512_v4
+101/516271/campos_512_v4
+101/516287/campos_512_v4
+101/516297/campos_512_v4
+101/516302/campos_512_v4
+101/516307/campos_512_v4
+101/516343/campos_512_v4
+101/516350/campos_512_v4
+101/516359/campos_512_v4
+101/516384/campos_512_v4
+101/516386/campos_512_v4
+101/516397/campos_512_v4
+101/516412/campos_512_v4
+101/516417/campos_512_v4
+101/516420/campos_512_v4
+101/516426/campos_512_v4
+101/516432/campos_512_v4
+101/516434/campos_512_v4
+101/516439/campos_512_v4
+101/516448/campos_512_v4
+101/516453/campos_512_v4
+101/516459/campos_512_v4
+101/516469/campos_512_v4
+101/516473/campos_512_v4
+101/516477/campos_512_v4
+101/516485/campos_512_v4
+101/516487/campos_512_v4
+101/516494/campos_512_v4
+101/516498/campos_512_v4
+101/516517/campos_512_v4
+101/516518/campos_512_v4
+101/516519/campos_512_v4
+101/516523/campos_512_v4
+101/516529/campos_512_v4
+101/516553/campos_512_v4
+101/516559/campos_512_v4
+101/516562/campos_512_v4
+101/516564/campos_512_v4
+101/516568/campos_512_v4
+101/516577/campos_512_v4
+101/516590/campos_512_v4
+101/516592/campos_512_v4
+101/516596/campos_512_v4
+101/516600/campos_512_v4
+101/516607/campos_512_v4
+101/516628/campos_512_v4
+101/516630/campos_512_v4
+101/516636/campos_512_v4
+101/516637/campos_512_v4
+101/516644/campos_512_v4
+101/516646/campos_512_v4
+101/516649/campos_512_v4
+101/516654/campos_512_v4
+101/516661/campos_512_v4
+101/516663/campos_512_v4
+101/516665/campos_512_v4
+101/516671/campos_512_v4
+101/516677/campos_512_v4
+101/516688/campos_512_v4
+101/516692/campos_512_v4
+101/516706/campos_512_v4
+101/516712/campos_512_v4
+101/516713/campos_512_v4
+101/516717/campos_512_v4
+101/516721/campos_512_v4
+101/516725/campos_512_v4
+101/516739/campos_512_v4
+101/516751/campos_512_v4
+101/516752/campos_512_v4
+101/516756/campos_512_v4
+101/516761/campos_512_v4
+101/516788/campos_512_v4
+101/516793/campos_512_v4
+101/516803/campos_512_v4
+101/516804/campos_512_v4
+101/516816/campos_512_v4
+101/516830/campos_512_v4
+101/516850/campos_512_v4
+101/516865/campos_512_v4
+101/516870/campos_512_v4
+101/516871/campos_512_v4
+101/516873/campos_512_v4
+101/516874/campos_512_v4
+101/516875/campos_512_v4
+101/516878/campos_512_v4
+101/516879/campos_512_v4
+101/516887/campos_512_v4
+101/516895/campos_512_v4
+101/516910/campos_512_v4
+101/516911/campos_512_v4
+101/516924/campos_512_v4
+101/516925/campos_512_v4
+101/516926/campos_512_v4
+101/516939/campos_512_v4
+101/516944/campos_512_v4
+101/516946/campos_512_v4
+101/516948/campos_512_v4
+101/516952/campos_512_v4
+101/516958/campos_512_v4
+101/516959/campos_512_v4
+101/516964/campos_512_v4
+101/516965/campos_512_v4
+101/516979/campos_512_v4
+101/516980/campos_512_v4
+101/517002/campos_512_v4
+101/517017/campos_512_v4
+101/517019/campos_512_v4
+101/517024/campos_512_v4
+101/517032/campos_512_v4
+101/517054/campos_512_v4
+101/517065/campos_512_v4
+101/517079/campos_512_v4
+101/517082/campos_512_v4
+101/517085/campos_512_v4
+101/517092/campos_512_v4
+101/517103/campos_512_v4
+101/517110/campos_512_v4
+101/517114/campos_512_v4
+101/517117/campos_512_v4
+101/517121/campos_512_v4
+101/517126/campos_512_v4
+101/517131/campos_512_v4
+101/517134/campos_512_v4
+101/517145/campos_512_v4
+101/517156/campos_512_v4
+101/517170/campos_512_v4
+101/517181/campos_512_v4
+101/517197/campos_512_v4
+101/517211/campos_512_v4
+101/517214/campos_512_v4
+101/517215/campos_512_v4
+101/517223/campos_512_v4
+101/517232/campos_512_v4
+101/517239/campos_512_v4
+101/517251/campos_512_v4
+101/517256/campos_512_v4
+101/517263/campos_512_v4
+101/517266/campos_512_v4
+101/517275/campos_512_v4
+101/517278/campos_512_v4
+101/517281/campos_512_v4
+101/517287/campos_512_v4
+101/517304/campos_512_v4
+101/517320/campos_512_v4
+101/517330/campos_512_v4
+101/517338/campos_512_v4
+101/517339/campos_512_v4
+101/517343/campos_512_v4
+101/517355/campos_512_v4
+101/517374/campos_512_v4
+101/517377/campos_512_v4
+101/517396/campos_512_v4
+101/517400/campos_512_v4
+101/517402/campos_512_v4
+101/517414/campos_512_v4
+101/517421/campos_512_v4
+101/517431/campos_512_v4
+101/517441/campos_512_v4
+101/517450/campos_512_v4
+101/517456/campos_512_v4
+101/517457/campos_512_v4
+101/517459/campos_512_v4
+101/517461/campos_512_v4
+101/517464/campos_512_v4
+101/517474/campos_512_v4
+101/517475/campos_512_v4
+101/517478/campos_512_v4
+101/517480/campos_512_v4
+101/517491/campos_512_v4
+101/517497/campos_512_v4
+101/517499/campos_512_v4
+101/517503/campos_512_v4
+101/517513/campos_512_v4
+101/517514/campos_512_v4
+101/517529/campos_512_v4
+101/517536/campos_512_v4
+101/517540/campos_512_v4
+101/517554/campos_512_v4
+101/517559/campos_512_v4
+101/517562/campos_512_v4
+101/517599/campos_512_v4
+101/517600/campos_512_v4
+101/517602/campos_512_v4
+101/517610/campos_512_v4
+101/517616/campos_512_v4
+101/517620/campos_512_v4
+101/517629/campos_512_v4
+101/517631/campos_512_v4
+101/517638/campos_512_v4
+101/517655/campos_512_v4
+101/517666/campos_512_v4
+101/517667/campos_512_v4
+101/517673/campos_512_v4
+101/517675/campos_512_v4
+101/517687/campos_512_v4
+101/517710/campos_512_v4
+101/517714/campos_512_v4
+101/517715/campos_512_v4
+101/517725/campos_512_v4
+101/517740/campos_512_v4
+101/517748/campos_512_v4
+101/517751/campos_512_v4
+101/517762/campos_512_v4
+101/517775/campos_512_v4
+101/517782/campos_512_v4
+101/517797/campos_512_v4
+101/517806/campos_512_v4
+101/517811/campos_512_v4
+101/517815/campos_512_v4
+101/517817/campos_512_v4
+101/517824/campos_512_v4
+101/517826/campos_512_v4
+101/517829/campos_512_v4
+101/517831/campos_512_v4
+101/517837/campos_512_v4
+101/517840/campos_512_v4
+101/517852/campos_512_v4
+101/517857/campos_512_v4
+101/517873/campos_512_v4
+101/517887/campos_512_v4
+101/517912/campos_512_v4
+101/517922/campos_512_v4
+101/517934/campos_512_v4
+101/517946/campos_512_v4
+101/517956/campos_512_v4
+101/517958/campos_512_v4
+101/517964/campos_512_v4
+101/517977/campos_512_v4
+101/517987/campos_512_v4
+101/517988/campos_512_v4
+101/517995/campos_512_v4
+101/518003/campos_512_v4
+101/518016/campos_512_v4
+101/518049/campos_512_v4
+101/518055/campos_512_v4
+101/518062/campos_512_v4
+101/518066/campos_512_v4
+101/518067/campos_512_v4
+101/518070/campos_512_v4
+101/518075/campos_512_v4
+101/518078/campos_512_v4
+101/518093/campos_512_v4
+101/518098/campos_512_v4
+101/518119/campos_512_v4
+101/518129/campos_512_v4
+101/518146/campos_512_v4
+101/518158/campos_512_v4
+101/518169/campos_512_v4
+101/518202/campos_512_v4
+101/518211/campos_512_v4
+101/518227/campos_512_v4
+101/518231/campos_512_v4
+101/518236/campos_512_v4
+101/518242/campos_512_v4
+101/518250/campos_512_v4
+101/518254/campos_512_v4
+101/518262/campos_512_v4
+101/518265/campos_512_v4
+101/518269/campos_512_v4
+101/518278/campos_512_v4
+101/518303/campos_512_v4
+101/518305/campos_512_v4
+101/518314/campos_512_v4
+101/518321/campos_512_v4
+101/518330/campos_512_v4
+101/518341/campos_512_v4
+101/518348/campos_512_v4
+101/518360/campos_512_v4
+101/518396/campos_512_v4
+101/518397/campos_512_v4
+101/518399/campos_512_v4
+101/518401/campos_512_v4
+101/518412/campos_512_v4
+101/518414/campos_512_v4
+101/518425/campos_512_v4
+101/518444/campos_512_v4
+101/518447/campos_512_v4
+101/518456/campos_512_v4
+101/518458/campos_512_v4
+101/518462/campos_512_v4
+101/518469/campos_512_v4
+101/518477/campos_512_v4
+101/518480/campos_512_v4
+101/518485/campos_512_v4
+101/518486/campos_512_v4
+101/518495/campos_512_v4
+101/518507/campos_512_v4
+101/518531/campos_512_v4
+101/518539/campos_512_v4
+101/518540/campos_512_v4
+101/518544/campos_512_v4
+101/518549/campos_512_v4
+101/518554/campos_512_v4
+101/518564/campos_512_v4
+101/518575/campos_512_v4
+101/518600/campos_512_v4
+101/518628/campos_512_v4
+101/518638/campos_512_v4
+101/518642/campos_512_v4
+101/518645/campos_512_v4
+101/518651/campos_512_v4
+101/518652/campos_512_v4
+101/518658/campos_512_v4
+101/518663/campos_512_v4
+101/518667/campos_512_v4
+101/518671/campos_512_v4
+101/518677/campos_512_v4
+101/518686/campos_512_v4
+101/518688/campos_512_v4
+101/518697/campos_512_v4
+101/518701/campos_512_v4
+101/518709/campos_512_v4
+101/518710/campos_512_v4
+101/518717/campos_512_v4
+101/518718/campos_512_v4
+101/518732/campos_512_v4
+101/518744/campos_512_v4
+101/518759/campos_512_v4
+101/518768/campos_512_v4
+101/518773/campos_512_v4
+101/518774/campos_512_v4
+101/518788/campos_512_v4
+101/518799/campos_512_v4
+101/518801/campos_512_v4
+101/518812/campos_512_v4
+101/518819/campos_512_v4
+101/518823/campos_512_v4
+101/518836/campos_512_v4
+101/518864/campos_512_v4
+101/518865/campos_512_v4
+101/518866/campos_512_v4
+101/518868/campos_512_v4
+101/518872/campos_512_v4
+101/518888/campos_512_v4
+101/518893/campos_512_v4
+101/518904/campos_512_v4
+101/518912/campos_512_v4
+101/518913/campos_512_v4
+101/518923/campos_512_v4
+101/518945/campos_512_v4
+101/518956/campos_512_v4
+101/518961/campos_512_v4
+101/518963/campos_512_v4
+101/518969/campos_512_v4
+101/518970/campos_512_v4
+101/518976/campos_512_v4
+101/518978/campos_512_v4
+101/518986/campos_512_v4
+101/519000/campos_512_v4
+101/519012/campos_512_v4
+101/519015/campos_512_v4
+101/519022/campos_512_v4
+101/519031/campos_512_v4
+101/519038/campos_512_v4
+101/519040/campos_512_v4
+101/519045/campos_512_v4
+101/519047/campos_512_v4
+101/519053/campos_512_v4
+101/519070/campos_512_v4
+101/519072/campos_512_v4
+101/519081/campos_512_v4
+101/519098/campos_512_v4
+101/519101/campos_512_v4
+101/519105/campos_512_v4
+101/519106/campos_512_v4
+101/519109/campos_512_v4
+101/519128/campos_512_v4
+101/519132/campos_512_v4
+101/519140/campos_512_v4
+101/519148/campos_512_v4
+101/519160/campos_512_v4
+101/519162/campos_512_v4
+101/519163/campos_512_v4
+101/519166/campos_512_v4
+101/519172/campos_512_v4
+101/519175/campos_512_v4
+101/519187/campos_512_v4
+101/519194/campos_512_v4
+101/519206/campos_512_v4
+101/519216/campos_512_v4
+101/519220/campos_512_v4
+101/519228/campos_512_v4
+101/519245/campos_512_v4
+101/519251/campos_512_v4
+101/519253/campos_512_v4
+101/519257/campos_512_v4
+101/519263/campos_512_v4
+101/519268/campos_512_v4
+101/519270/campos_512_v4
+101/519273/campos_512_v4
+101/519274/campos_512_v4
+101/519275/campos_512_v4
+101/519276/campos_512_v4
+101/519278/campos_512_v4
+101/519280/campos_512_v4
+101/519281/campos_512_v4
+101/519286/campos_512_v4
+101/519289/campos_512_v4
+101/519290/campos_512_v4
+101/519297/campos_512_v4
+101/519304/campos_512_v4
+101/519310/campos_512_v4
+101/519319/campos_512_v4
+101/519320/campos_512_v4
+101/519330/campos_512_v4
+101/519336/campos_512_v4
+101/519339/campos_512_v4
+101/519341/campos_512_v4
+101/519344/campos_512_v4
+101/519356/campos_512_v4
+101/519361/campos_512_v4
+101/519376/campos_512_v4
+101/519377/campos_512_v4
+101/519378/campos_512_v4
+101/519382/campos_512_v4
+101/519395/campos_512_v4
+101/519396/campos_512_v4
+101/519402/campos_512_v4
+101/519407/campos_512_v4
+101/519416/campos_512_v4
+101/519424/campos_512_v4
+101/519426/campos_512_v4
+101/519432/campos_512_v4
+101/519437/campos_512_v4
+101/519438/campos_512_v4
+101/519442/campos_512_v4
+101/519463/campos_512_v4
+101/519464/campos_512_v4
+101/519466/campos_512_v4
+101/519478/campos_512_v4
+101/519497/campos_512_v4
+101/519498/campos_512_v4
+101/519514/campos_512_v4
+101/519528/campos_512_v4
+101/519532/campos_512_v4
+101/519534/campos_512_v4
+101/519540/campos_512_v4
+101/519569/campos_512_v4
+101/519575/campos_512_v4
+101/519578/campos_512_v4
+101/519585/campos_512_v4
+101/519606/campos_512_v4
+101/519611/campos_512_v4
+101/519618/campos_512_v4
+101/519623/campos_512_v4
+101/519670/campos_512_v4
+101/519671/campos_512_v4
+101/519687/campos_512_v4
+101/519691/campos_512_v4
+101/519696/campos_512_v4
+101/519707/campos_512_v4
+101/519712/campos_512_v4
+101/519716/campos_512_v4
+101/519720/campos_512_v4
+101/519722/campos_512_v4
+101/519728/campos_512_v4
+101/519731/campos_512_v4
+101/519734/campos_512_v4
+101/519753/campos_512_v4
+101/519760/campos_512_v4
+101/519769/campos_512_v4
+101/519771/campos_512_v4
+101/519777/campos_512_v4
+101/519778/campos_512_v4
+101/519788/campos_512_v4
+101/519796/campos_512_v4
+101/519809/campos_512_v4
+101/519810/campos_512_v4
+101/519812/campos_512_v4
+101/519813/campos_512_v4
+101/519841/campos_512_v4
+101/519854/campos_512_v4
+101/519857/campos_512_v4
+101/519872/campos_512_v4
+101/519883/campos_512_v4
+101/519887/campos_512_v4
+101/519888/campos_512_v4
+101/519901/campos_512_v4
+101/519912/campos_512_v4
+101/519922/campos_512_v4
+101/519931/campos_512_v4
+101/519935/campos_512_v4
+101/519945/campos_512_v4
+101/519949/campos_512_v4
+101/519953/campos_512_v4
+101/519954/campos_512_v4
+101/519961/campos_512_v4
+101/519962/campos_512_v4
+101/519992/campos_512_v4
+102/520003/campos_512_v4
+102/520012/campos_512_v4
+102/520015/campos_512_v4
+102/520032/campos_512_v4
+102/520034/campos_512_v4
+102/520037/campos_512_v4
+102/520060/campos_512_v4
+102/520066/campos_512_v4
+102/520073/campos_512_v4
+102/520080/campos_512_v4
+102/520082/campos_512_v4
+102/520119/campos_512_v4
+102/520141/campos_512_v4
+102/520149/campos_512_v4
+102/520157/campos_512_v4
+102/520160/campos_512_v4
+102/520174/campos_512_v4
+102/520180/campos_512_v4
+102/520186/campos_512_v4
+102/520187/campos_512_v4
+102/520188/campos_512_v4
+102/520200/campos_512_v4
+102/520228/campos_512_v4
+102/520231/campos_512_v4
+102/520247/campos_512_v4
+102/520263/campos_512_v4
+102/520279/campos_512_v4
+102/520281/campos_512_v4
+102/520294/campos_512_v4
+102/520296/campos_512_v4
+102/520297/campos_512_v4
+102/520298/campos_512_v4
+102/520305/campos_512_v4
+102/520306/campos_512_v4
+102/520310/campos_512_v4
+102/520311/campos_512_v4
+102/520319/campos_512_v4
+102/520322/campos_512_v4
+102/520327/campos_512_v4
+102/520335/campos_512_v4
+102/520337/campos_512_v4
+102/520346/campos_512_v4
+102/520348/campos_512_v4
+102/520352/campos_512_v4
+102/520357/campos_512_v4
+102/520359/campos_512_v4
+102/520382/campos_512_v4
+102/520400/campos_512_v4
+102/520403/campos_512_v4
+102/520407/campos_512_v4
+102/520423/campos_512_v4
+102/520424/campos_512_v4
+102/520426/campos_512_v4
+102/520427/campos_512_v4
+102/520446/campos_512_v4
+102/520450/campos_512_v4
+102/520469/campos_512_v4
+102/520473/campos_512_v4
+102/520496/campos_512_v4
+102/520503/campos_512_v4
+102/520520/campos_512_v4
+102/520522/campos_512_v4
+102/520525/campos_512_v4
+102/520529/campos_512_v4
+102/520533/campos_512_v4
+102/520539/campos_512_v4
+102/520543/campos_512_v4
+102/520549/campos_512_v4
+102/520550/campos_512_v4
+102/520555/campos_512_v4
+102/520557/campos_512_v4
+102/520566/campos_512_v4
+102/520569/campos_512_v4
+102/520576/campos_512_v4
+102/520584/campos_512_v4
+102/520591/campos_512_v4
+102/520596/campos_512_v4
+102/520599/campos_512_v4
+102/520601/campos_512_v4
+102/520606/campos_512_v4
+102/520613/campos_512_v4
+102/520629/campos_512_v4
+102/520632/campos_512_v4
+102/520633/campos_512_v4
+102/520640/campos_512_v4
+102/520648/campos_512_v4
+102/520671/campos_512_v4
+102/520673/campos_512_v4
+102/520677/campos_512_v4
+102/520680/campos_512_v4
+102/520690/campos_512_v4
+102/520709/campos_512_v4
+102/520712/campos_512_v4
+102/520717/campos_512_v4
+102/520731/campos_512_v4
+102/520740/campos_512_v4
+102/520768/campos_512_v4
+102/520770/campos_512_v4
+102/520771/campos_512_v4
+102/520791/campos_512_v4
+102/520796/campos_512_v4
+102/520798/campos_512_v4
+102/520803/campos_512_v4
+102/520809/campos_512_v4
+102/520813/campos_512_v4
+102/520815/campos_512_v4
+102/520817/campos_512_v4
+102/520827/campos_512_v4
+102/520830/campos_512_v4
+102/520834/campos_512_v4
+102/520838/campos_512_v4
+102/520840/campos_512_v4
+102/520848/campos_512_v4
+102/520849/campos_512_v4
+102/520860/campos_512_v4
+102/520866/campos_512_v4
+102/520868/campos_512_v4
+102/520871/campos_512_v4
+102/520872/campos_512_v4
+102/520878/campos_512_v4
+102/520882/campos_512_v4
+102/520892/campos_512_v4
+102/520893/campos_512_v4
+102/520897/campos_512_v4
+102/520907/campos_512_v4
+102/520908/campos_512_v4
+102/520917/campos_512_v4
+102/520920/campos_512_v4
+102/520926/campos_512_v4
+102/520928/campos_512_v4
+102/520930/campos_512_v4
+102/520944/campos_512_v4
+102/520961/campos_512_v4
+102/520967/campos_512_v4
+102/520972/campos_512_v4
+102/520973/campos_512_v4
+102/520978/campos_512_v4
+102/520979/campos_512_v4
+102/520981/campos_512_v4
+102/520983/campos_512_v4
+102/520987/campos_512_v4
+102/520995/campos_512_v4
+102/521004/campos_512_v4
+102/521013/campos_512_v4
+102/521018/campos_512_v4
+102/521020/campos_512_v4
+102/521021/campos_512_v4
+102/521022/campos_512_v4
+102/521033/campos_512_v4
+102/521034/campos_512_v4
+102/521037/campos_512_v4
+102/521041/campos_512_v4
+102/521050/campos_512_v4
+102/521068/campos_512_v4
+102/521082/campos_512_v4
+102/521083/campos_512_v4
+102/521087/campos_512_v4
+102/521092/campos_512_v4
+102/521103/campos_512_v4
+102/521115/campos_512_v4
+102/521139/campos_512_v4
+102/521148/campos_512_v4
+102/521155/campos_512_v4
+102/521158/campos_512_v4
+102/521163/campos_512_v4
+102/521169/campos_512_v4
+102/521173/campos_512_v4
+102/521175/campos_512_v4
+102/521187/campos_512_v4
+102/521198/campos_512_v4
+102/521202/campos_512_v4
+102/521210/campos_512_v4
+102/521218/campos_512_v4
+102/521224/campos_512_v4
+102/521230/campos_512_v4
+102/521241/campos_512_v4
+102/521244/campos_512_v4
+102/521246/campos_512_v4
+102/521248/campos_512_v4
+102/521250/campos_512_v4
+102/521270/campos_512_v4
+102/521289/campos_512_v4
+102/521293/campos_512_v4
+102/521305/campos_512_v4
+102/521307/campos_512_v4
+102/521312/campos_512_v4
+102/521319/campos_512_v4
+102/521323/campos_512_v4
+102/521324/campos_512_v4
+102/521335/campos_512_v4
+102/521355/campos_512_v4
+102/521359/campos_512_v4
+102/521360/campos_512_v4
+102/521376/campos_512_v4
+102/521377/campos_512_v4
+102/521393/campos_512_v4
+102/521414/campos_512_v4
+102/521433/campos_512_v4
+102/521437/campos_512_v4
+102/521444/campos_512_v4
+102/521445/campos_512_v4
+102/521455/campos_512_v4
+102/521480/campos_512_v4
+102/521485/campos_512_v4
+102/521488/campos_512_v4
+102/521489/campos_512_v4
+102/521492/campos_512_v4
+102/521493/campos_512_v4
+102/521495/campos_512_v4
+102/521499/campos_512_v4
+102/521504/campos_512_v4
+102/521506/campos_512_v4
+102/521519/campos_512_v4
+102/521529/campos_512_v4
+102/521540/campos_512_v4
+102/521563/campos_512_v4
+102/521565/campos_512_v4
+102/521579/campos_512_v4
+102/521585/campos_512_v4
+102/521604/campos_512_v4
+102/521608/campos_512_v4
+102/521640/campos_512_v4
+102/521641/campos_512_v4
+102/521642/campos_512_v4
+102/521648/campos_512_v4
+102/521655/campos_512_v4
+102/521667/campos_512_v4
+102/521696/campos_512_v4
+102/521700/campos_512_v4
+102/521707/campos_512_v4
+102/521713/campos_512_v4
+102/521714/campos_512_v4
+102/521722/campos_512_v4
+102/521729/campos_512_v4
+102/521738/campos_512_v4
+102/521759/campos_512_v4
+102/521773/campos_512_v4
+102/521778/campos_512_v4
+102/521780/campos_512_v4
+102/521806/campos_512_v4
+102/521812/campos_512_v4
+102/521828/campos_512_v4
+102/521830/campos_512_v4
+102/521832/campos_512_v4
+102/521841/campos_512_v4
+102/521842/campos_512_v4
+102/521884/campos_512_v4
+102/521886/campos_512_v4
+102/521889/campos_512_v4
+102/521894/campos_512_v4
+102/521899/campos_512_v4
+102/521903/campos_512_v4
+102/521911/campos_512_v4
+102/521922/campos_512_v4
+102/521936/campos_512_v4
+102/521938/campos_512_v4
+102/521942/campos_512_v4
+102/521943/campos_512_v4
+102/521954/campos_512_v4
+102/521957/campos_512_v4
+102/521959/campos_512_v4
+102/521961/campos_512_v4
+102/521962/campos_512_v4
+102/521966/campos_512_v4
+102/521967/campos_512_v4
+102/521972/campos_512_v4
+102/521986/campos_512_v4
+102/521997/campos_512_v4
+102/522016/campos_512_v4
+102/522026/campos_512_v4
+102/522032/campos_512_v4
+102/522037/campos_512_v4
+102/522050/campos_512_v4
+102/522051/campos_512_v4
+102/522053/campos_512_v4
+102/522063/campos_512_v4
+102/522064/campos_512_v4
+102/522077/campos_512_v4
+102/522080/campos_512_v4
+102/522110/campos_512_v4
+102/522124/campos_512_v4
+102/522130/campos_512_v4
+102/522141/campos_512_v4
+102/522147/campos_512_v4
+102/522160/campos_512_v4
+102/522168/campos_512_v4
+102/522180/campos_512_v4
+102/522188/campos_512_v4
+102/522199/campos_512_v4
+102/522203/campos_512_v4
+102/522218/campos_512_v4
+102/522220/campos_512_v4
+102/522224/campos_512_v4
+102/522225/campos_512_v4
+102/522231/campos_512_v4
+102/522238/campos_512_v4
+102/522240/campos_512_v4
+102/522251/campos_512_v4
+102/522265/campos_512_v4
+102/522271/campos_512_v4
+102/522274/campos_512_v4
+102/522277/campos_512_v4
+102/522295/campos_512_v4
+102/522304/campos_512_v4
+102/522307/campos_512_v4
+102/522308/campos_512_v4
+102/522309/campos_512_v4
+102/522312/campos_512_v4
+102/522317/campos_512_v4
+102/522321/campos_512_v4
+102/522333/campos_512_v4
+102/522343/campos_512_v4
+102/522347/campos_512_v4
+102/522348/campos_512_v4
+102/522366/campos_512_v4
+102/522371/campos_512_v4
+102/522373/campos_512_v4
+102/522375/campos_512_v4
+102/522387/campos_512_v4
+102/522408/campos_512_v4
+102/522409/campos_512_v4
+102/522412/campos_512_v4
+102/522425/campos_512_v4
+102/522429/campos_512_v4
+102/522433/campos_512_v4
+102/522451/campos_512_v4
+102/522452/campos_512_v4
+102/522453/campos_512_v4
+102/522455/campos_512_v4
+102/522482/campos_512_v4
+102/522485/campos_512_v4
+102/522497/campos_512_v4
+102/522518/campos_512_v4
+102/522519/campos_512_v4
+102/522521/campos_512_v4
+102/522528/campos_512_v4
+102/522529/campos_512_v4
+102/522539/campos_512_v4
+102/522546/campos_512_v4
+102/522549/campos_512_v4
+102/522555/campos_512_v4
+102/522557/campos_512_v4
+102/522558/campos_512_v4
+102/522563/campos_512_v4
+102/522567/campos_512_v4
+102/522577/campos_512_v4
+102/522608/campos_512_v4
+102/522615/campos_512_v4
+102/522628/campos_512_v4
+102/522641/campos_512_v4
+102/522643/campos_512_v4
+102/522659/campos_512_v4
+102/522683/campos_512_v4
+102/522723/campos_512_v4
+102/522739/campos_512_v4
+102/522748/campos_512_v4
+102/522767/campos_512_v4
+102/522773/campos_512_v4
+102/522782/campos_512_v4
+102/522786/campos_512_v4
+102/522788/campos_512_v4
+102/522790/campos_512_v4
+102/522791/campos_512_v4
+102/522794/campos_512_v4
+102/522801/campos_512_v4
+102/522802/campos_512_v4
+102/522808/campos_512_v4
+102/522811/campos_512_v4
+102/522816/campos_512_v4
+102/522827/campos_512_v4
+102/522833/campos_512_v4
+102/522838/campos_512_v4
+102/522841/campos_512_v4
+102/522848/campos_512_v4
+102/522860/campos_512_v4
+102/522863/campos_512_v4
+102/522874/campos_512_v4
+102/522878/campos_512_v4
+102/522881/campos_512_v4
+102/522883/campos_512_v4
+102/522893/campos_512_v4
+102/522911/campos_512_v4
+102/522915/campos_512_v4
+102/522924/campos_512_v4
+102/522927/campos_512_v4
+102/522941/campos_512_v4
+102/522966/campos_512_v4
+102/522969/campos_512_v4
+102/523005/campos_512_v4
+102/523010/campos_512_v4
+102/523013/campos_512_v4
+102/523024/campos_512_v4
+102/523029/campos_512_v4
+102/523032/campos_512_v4
+102/523034/campos_512_v4
+102/523038/campos_512_v4
+102/523042/campos_512_v4
+102/523048/campos_512_v4
+102/523055/campos_512_v4
+102/523057/campos_512_v4
+102/523072/campos_512_v4
+102/523079/campos_512_v4
+102/523087/campos_512_v4
+102/523095/campos_512_v4
+102/523099/campos_512_v4
+102/523116/campos_512_v4
+102/523117/campos_512_v4
+102/523125/campos_512_v4
+102/523129/campos_512_v4
+102/523144/campos_512_v4
+102/523147/campos_512_v4
+102/523151/campos_512_v4
+102/523153/campos_512_v4
+102/523163/campos_512_v4
+102/523172/campos_512_v4
+102/523175/campos_512_v4
+102/523190/campos_512_v4
+102/523207/campos_512_v4
+102/523212/campos_512_v4
+102/523213/campos_512_v4
+102/523214/campos_512_v4
+102/523216/campos_512_v4
+102/523217/campos_512_v4
+102/523222/campos_512_v4
+102/523223/campos_512_v4
+102/523230/campos_512_v4
+102/523231/campos_512_v4
+102/523241/campos_512_v4
+102/523243/campos_512_v4
+102/523244/campos_512_v4
+102/523257/campos_512_v4
+102/523274/campos_512_v4
+102/523275/campos_512_v4
+102/523278/campos_512_v4
+102/523286/campos_512_v4
+102/523310/campos_512_v4
+102/523317/campos_512_v4
+102/523320/campos_512_v4
+102/523322/campos_512_v4
+102/523324/campos_512_v4
+102/523326/campos_512_v4
+102/523335/campos_512_v4
+102/523345/campos_512_v4
+102/523350/campos_512_v4
+102/523352/campos_512_v4
+102/523358/campos_512_v4
+102/523363/campos_512_v4
+102/523369/campos_512_v4
+102/523370/campos_512_v4
+102/523371/campos_512_v4
+102/523373/campos_512_v4
+102/523380/campos_512_v4
+102/523386/campos_512_v4
+102/523388/campos_512_v4
+102/523392/campos_512_v4
+102/523401/campos_512_v4
+102/523416/campos_512_v4
+102/523417/campos_512_v4
+102/523454/campos_512_v4
+102/523455/campos_512_v4
+102/523456/campos_512_v4
+102/523461/campos_512_v4
+102/523476/campos_512_v4
+102/523478/campos_512_v4
+102/523479/campos_512_v4
+102/523483/campos_512_v4
+102/523484/campos_512_v4
+102/523487/campos_512_v4
+102/523491/campos_512_v4
+102/523509/campos_512_v4
+102/523510/campos_512_v4
+102/523520/campos_512_v4
+102/523521/campos_512_v4
+102/523524/campos_512_v4
+102/523544/campos_512_v4
+102/523552/campos_512_v4
+102/523555/campos_512_v4
+102/523562/campos_512_v4
+102/523564/campos_512_v4
+102/523567/campos_512_v4
+102/523583/campos_512_v4
+102/523587/campos_512_v4
+102/523596/campos_512_v4
+102/523597/campos_512_v4
+102/523636/campos_512_v4
+102/523656/campos_512_v4
+102/523668/campos_512_v4
+102/523670/campos_512_v4
+102/523674/campos_512_v4
+102/523689/campos_512_v4
+102/523701/campos_512_v4
+102/523709/campos_512_v4
+102/523710/campos_512_v4
+102/523717/campos_512_v4
+102/523731/campos_512_v4
+102/523744/campos_512_v4
+102/523745/campos_512_v4
+102/523754/campos_512_v4
+102/523773/campos_512_v4
+102/523785/campos_512_v4
+102/523803/campos_512_v4
+102/523812/campos_512_v4
+102/523834/campos_512_v4
+102/523839/campos_512_v4
+102/523844/campos_512_v4
+102/523849/campos_512_v4
+102/523858/campos_512_v4
+102/523862/campos_512_v4
+102/523868/campos_512_v4
+102/523874/campos_512_v4
+102/523876/campos_512_v4
+102/523877/campos_512_v4
+102/523880/campos_512_v4
+102/523881/campos_512_v4
+102/523885/campos_512_v4
+102/523886/campos_512_v4
+102/523918/campos_512_v4
+102/523927/campos_512_v4
+102/523929/campos_512_v4
+102/523936/campos_512_v4
+102/523938/campos_512_v4
+102/523944/campos_512_v4
+102/523952/campos_512_v4
+102/523961/campos_512_v4
+102/523967/campos_512_v4
+102/523976/campos_512_v4
+102/523978/campos_512_v4
+102/523982/campos_512_v4
+102/523986/campos_512_v4
+102/523999/campos_512_v4
+102/524019/campos_512_v4
+102/524027/campos_512_v4
+102/524041/campos_512_v4
+102/524043/campos_512_v4
+102/524047/campos_512_v4
+102/524050/campos_512_v4
+102/524057/campos_512_v4
+102/524060/campos_512_v4
+102/524065/campos_512_v4
+102/524066/campos_512_v4
+102/524069/campos_512_v4
+102/524103/campos_512_v4
+102/524108/campos_512_v4
+102/524112/campos_512_v4
+102/524118/campos_512_v4
+102/524121/campos_512_v4
+102/524126/campos_512_v4
+102/524137/campos_512_v4
+102/524147/campos_512_v4
+102/524167/campos_512_v4
+102/524171/campos_512_v4
+102/524181/campos_512_v4
+102/524210/campos_512_v4
+102/524214/campos_512_v4
+102/524217/campos_512_v4
+102/524227/campos_512_v4
+102/524232/campos_512_v4
+102/524236/campos_512_v4
+102/524260/campos_512_v4
+102/524264/campos_512_v4
+102/524270/campos_512_v4
+102/524285/campos_512_v4
+102/524294/campos_512_v4
+102/524298/campos_512_v4
+102/524306/campos_512_v4
+102/524315/campos_512_v4
+102/524322/campos_512_v4
+102/524325/campos_512_v4
+102/524329/campos_512_v4
+102/524346/campos_512_v4
+102/524360/campos_512_v4
+102/524361/campos_512_v4
+102/524364/campos_512_v4
+102/524365/campos_512_v4
+102/524369/campos_512_v4
+102/524374/campos_512_v4
+102/524376/campos_512_v4
+102/524395/campos_512_v4
+102/524399/campos_512_v4
+102/524401/campos_512_v4
+102/524406/campos_512_v4
+102/524407/campos_512_v4
+102/524409/campos_512_v4
+102/524414/campos_512_v4
+102/524417/campos_512_v4
+102/524419/campos_512_v4
+102/524421/campos_512_v4
+102/524424/campos_512_v4
+102/524428/campos_512_v4
+102/524431/campos_512_v4
+102/524439/campos_512_v4
+102/524444/campos_512_v4
+102/524447/campos_512_v4
+102/524448/campos_512_v4
+102/524449/campos_512_v4
+102/524452/campos_512_v4
+102/524487/campos_512_v4
+102/524489/campos_512_v4
+102/524493/campos_512_v4
+102/524494/campos_512_v4
+102/524497/campos_512_v4
+102/524503/campos_512_v4
+102/524507/campos_512_v4
+102/524513/campos_512_v4
+102/524523/campos_512_v4
+102/524532/campos_512_v4
+102/524544/campos_512_v4
+102/524556/campos_512_v4
+102/524564/campos_512_v4
+102/524587/campos_512_v4
+102/524591/campos_512_v4
+102/524604/campos_512_v4
+102/524607/campos_512_v4
+102/524609/campos_512_v4
+102/524619/campos_512_v4
+102/524629/campos_512_v4
+102/524630/campos_512_v4
+102/524642/campos_512_v4
+102/524648/campos_512_v4
+102/524650/campos_512_v4
+102/524659/campos_512_v4
+102/524666/campos_512_v4
+102/524667/campos_512_v4
+102/524673/campos_512_v4
+102/524678/campos_512_v4
+102/524683/campos_512_v4
+102/524686/campos_512_v4
+102/524689/campos_512_v4
+102/524699/campos_512_v4
+102/524701/campos_512_v4
+102/524703/campos_512_v4
+102/524708/campos_512_v4
+102/524712/campos_512_v4
+102/524715/campos_512_v4
+102/524732/campos_512_v4
+102/524740/campos_512_v4
+102/524743/campos_512_v4
+102/524760/campos_512_v4
+102/524774/campos_512_v4
+102/524785/campos_512_v4
+102/524801/campos_512_v4
+102/524807/campos_512_v4
+102/524812/campos_512_v4
+102/524816/campos_512_v4
+102/524821/campos_512_v4
+102/524833/campos_512_v4
+102/524834/campos_512_v4
+102/524840/campos_512_v4
+102/524844/campos_512_v4
+102/524851/campos_512_v4
+102/524853/campos_512_v4
+102/524863/campos_512_v4
+102/524868/campos_512_v4
+102/524874/campos_512_v4
+102/524878/campos_512_v4
+102/524896/campos_512_v4
+102/524902/campos_512_v4
+102/524904/campos_512_v4
+102/524906/campos_512_v4
+102/524909/campos_512_v4
+102/524910/campos_512_v4
+102/524911/campos_512_v4
+102/524919/campos_512_v4
+102/524921/campos_512_v4
+102/524927/campos_512_v4
+102/524930/campos_512_v4
+102/524933/campos_512_v4
+102/524937/campos_512_v4
+102/524941/campos_512_v4
+102/524949/campos_512_v4
+102/524954/campos_512_v4
+102/524955/campos_512_v4
+102/524958/campos_512_v4
+102/524969/campos_512_v4
+102/524975/campos_512_v4
+102/524984/campos_512_v4
+102/524989/campos_512_v4
+103/525004/campos_512_v4
+103/525006/campos_512_v4
+103/525016/campos_512_v4
+103/525021/campos_512_v4
+103/525030/campos_512_v4
+103/525033/campos_512_v4
+103/525060/campos_512_v4
+103/525061/campos_512_v4
+103/525063/campos_512_v4
+103/525073/campos_512_v4
+103/525077/campos_512_v4
+103/525082/campos_512_v4
+103/525084/campos_512_v4
+103/525093/campos_512_v4
+103/525096/campos_512_v4
+103/525098/campos_512_v4
+103/525103/campos_512_v4
+103/525107/campos_512_v4
+103/525118/campos_512_v4
+103/525129/campos_512_v4
+103/525137/campos_512_v4
+103/525153/campos_512_v4
+103/525194/campos_512_v4
+103/525218/campos_512_v4
+103/525222/campos_512_v4
+103/525226/campos_512_v4
+103/525233/campos_512_v4
+103/525235/campos_512_v4
+103/525251/campos_512_v4
+103/525252/campos_512_v4
+103/525254/campos_512_v4
+103/525261/campos_512_v4
+103/525267/campos_512_v4
+103/525270/campos_512_v4
+103/525272/campos_512_v4
+103/525282/campos_512_v4
+103/525288/campos_512_v4
+103/525304/campos_512_v4
+103/525307/campos_512_v4
+103/525308/campos_512_v4
+103/525311/campos_512_v4
+103/525313/campos_512_v4
+103/525357/campos_512_v4
+103/525358/campos_512_v4
+103/525360/campos_512_v4
+103/525370/campos_512_v4
+103/525375/campos_512_v4
+103/525377/campos_512_v4
+103/525380/campos_512_v4
+103/525382/campos_512_v4
+103/525384/campos_512_v4
+103/525403/campos_512_v4
+103/525427/campos_512_v4
+103/525428/campos_512_v4
+103/525440/campos_512_v4
+103/525446/campos_512_v4
+103/525459/campos_512_v4
+103/525460/campos_512_v4
+103/525465/campos_512_v4
+103/525473/campos_512_v4
+103/525484/campos_512_v4
+103/525485/campos_512_v4
+103/525486/campos_512_v4
+103/525517/campos_512_v4
+103/525522/campos_512_v4
+103/525536/campos_512_v4
+103/525548/campos_512_v4
+103/525556/campos_512_v4
+103/525562/campos_512_v4
+103/525574/campos_512_v4
+103/525576/campos_512_v4
+103/525586/campos_512_v4
+103/525601/campos_512_v4
+103/525608/campos_512_v4
+103/525609/campos_512_v4
+103/525611/campos_512_v4
+103/525613/campos_512_v4
+103/525617/campos_512_v4
+103/525638/campos_512_v4
+103/525641/campos_512_v4
+103/525649/campos_512_v4
+103/525658/campos_512_v4
+103/525667/campos_512_v4
+103/525673/campos_512_v4
+103/525676/campos_512_v4
+103/525682/campos_512_v4
+103/525691/campos_512_v4
+103/525698/campos_512_v4
+103/525700/campos_512_v4
+103/525707/campos_512_v4
+103/525709/campos_512_v4
+103/525725/campos_512_v4
+103/525732/campos_512_v4
+103/525747/campos_512_v4
+103/525750/campos_512_v4
+103/525751/campos_512_v4
+103/525752/campos_512_v4
+103/525755/campos_512_v4
+103/525765/campos_512_v4
+103/525774/campos_512_v4
+103/525779/campos_512_v4
+103/525783/campos_512_v4
+103/525791/campos_512_v4
+103/525822/campos_512_v4
+103/525829/campos_512_v4
+103/525831/campos_512_v4
+103/525832/campos_512_v4
+103/525907/campos_512_v4
+103/525908/campos_512_v4
+103/525921/campos_512_v4
+103/525927/campos_512_v4
+103/525930/campos_512_v4
+103/525934/campos_512_v4
+103/525941/campos_512_v4
+103/525944/campos_512_v4
+103/525945/campos_512_v4
+103/525954/campos_512_v4
+103/525958/campos_512_v4
+103/525970/campos_512_v4
+103/525994/campos_512_v4
+103/525995/campos_512_v4
+103/525997/campos_512_v4
+103/526000/campos_512_v4
+103/526003/campos_512_v4
+103/526010/campos_512_v4
+103/526023/campos_512_v4
+103/526029/campos_512_v4
+103/526048/campos_512_v4
+103/526053/campos_512_v4
+103/526062/campos_512_v4
+103/526068/campos_512_v4
+103/526075/campos_512_v4
+103/526082/campos_512_v4
+103/526083/campos_512_v4
+103/526088/campos_512_v4
+103/526104/campos_512_v4
+103/526109/campos_512_v4
+103/526119/campos_512_v4
+103/526134/campos_512_v4
+103/526135/campos_512_v4
+103/526144/campos_512_v4
+103/526154/campos_512_v4
+103/526165/campos_512_v4
+103/526180/campos_512_v4
+103/526187/campos_512_v4
+103/526197/campos_512_v4
+103/526226/campos_512_v4
+103/526231/campos_512_v4
+103/526260/campos_512_v4
+103/526263/campos_512_v4
+103/526270/campos_512_v4
+103/526275/campos_512_v4
+103/526278/campos_512_v4
+103/526308/campos_512_v4
+103/526310/campos_512_v4
+103/526311/campos_512_v4
+103/526313/campos_512_v4
+103/526321/campos_512_v4
+103/526325/campos_512_v4
+103/526327/campos_512_v4
+103/526328/campos_512_v4
+103/526340/campos_512_v4
+103/526341/campos_512_v4
+103/526365/campos_512_v4
+103/526375/campos_512_v4
+103/526378/campos_512_v4
+103/526379/campos_512_v4
+103/526380/campos_512_v4
+103/526382/campos_512_v4
+103/526418/campos_512_v4
+103/526421/campos_512_v4
+103/526453/campos_512_v4
+103/526463/campos_512_v4
+103/526464/campos_512_v4
+103/526467/campos_512_v4
+103/526483/campos_512_v4
+103/526485/campos_512_v4
+103/526486/campos_512_v4
+103/526490/campos_512_v4
+103/526496/campos_512_v4
+103/526504/campos_512_v4
+103/526507/campos_512_v4
+103/526529/campos_512_v4
+103/526530/campos_512_v4
+103/526536/campos_512_v4
+103/526541/campos_512_v4
+103/526545/campos_512_v4
+103/526548/campos_512_v4
+103/526551/campos_512_v4
+103/526563/campos_512_v4
+103/526567/campos_512_v4
+103/526568/campos_512_v4
+103/526571/campos_512_v4
+103/526580/campos_512_v4
+103/526589/campos_512_v4
+103/526605/campos_512_v4
+103/526612/campos_512_v4
+103/526615/campos_512_v4
+103/526628/campos_512_v4
+103/526642/campos_512_v4
+103/526661/campos_512_v4
+103/526677/campos_512_v4
+103/526681/campos_512_v4
+103/526699/campos_512_v4
+103/526706/campos_512_v4
+103/526756/campos_512_v4
+103/526779/campos_512_v4
+103/526781/campos_512_v4
+103/526782/campos_512_v4
+103/526791/campos_512_v4
+103/526813/campos_512_v4
+103/526818/campos_512_v4
+103/526821/campos_512_v4
+103/526824/campos_512_v4
+103/526827/campos_512_v4
+103/526843/campos_512_v4
+103/526859/campos_512_v4
+103/526863/campos_512_v4
+103/526864/campos_512_v4
+103/526876/campos_512_v4
+103/526891/campos_512_v4
+103/526892/campos_512_v4
+103/526893/campos_512_v4
+103/526917/campos_512_v4
+103/526924/campos_512_v4
+103/526931/campos_512_v4
+103/526945/campos_512_v4
+103/526947/campos_512_v4
+103/526951/campos_512_v4
+103/526960/campos_512_v4
+103/526969/campos_512_v4
+103/526990/campos_512_v4
+103/526994/campos_512_v4
+103/527007/campos_512_v4
+103/527008/campos_512_v4
+103/527009/campos_512_v4
+103/527011/campos_512_v4
+103/527014/campos_512_v4
+103/527023/campos_512_v4
+103/527035/campos_512_v4
+103/527039/campos_512_v4
+103/527040/campos_512_v4
+103/527043/campos_512_v4
+103/527066/campos_512_v4
+103/527070/campos_512_v4
+103/527081/campos_512_v4
+103/527088/campos_512_v4
+103/527097/campos_512_v4
+103/527112/campos_512_v4
+103/527114/campos_512_v4
+103/527139/campos_512_v4
+103/527142/campos_512_v4
+103/527143/campos_512_v4
+103/527145/campos_512_v4
+103/527148/campos_512_v4
+103/527180/campos_512_v4
+103/527188/campos_512_v4
+103/527189/campos_512_v4
+103/527191/campos_512_v4
+103/527192/campos_512_v4
+103/527197/campos_512_v4
+103/527204/campos_512_v4
+103/527225/campos_512_v4
+103/527235/campos_512_v4
+103/527237/campos_512_v4
+103/527241/campos_512_v4
+103/527249/campos_512_v4
+103/527259/campos_512_v4
+103/527264/campos_512_v4
+103/527266/campos_512_v4
+103/527270/campos_512_v4
+103/527273/campos_512_v4
+103/527275/campos_512_v4
+103/527280/campos_512_v4
+103/527281/campos_512_v4
+103/527288/campos_512_v4
+103/527289/campos_512_v4
+103/527301/campos_512_v4
+103/527306/campos_512_v4
+103/527313/campos_512_v4
+103/527325/campos_512_v4
+103/527328/campos_512_v4
+103/527329/campos_512_v4
+103/527330/campos_512_v4
+103/527331/campos_512_v4
+103/527340/campos_512_v4
+103/527341/campos_512_v4
+103/527342/campos_512_v4
+103/527346/campos_512_v4
+103/527365/campos_512_v4
+103/527381/campos_512_v4
+103/527394/campos_512_v4
+103/527397/campos_512_v4
+103/527406/campos_512_v4
+103/527408/campos_512_v4
+103/527410/campos_512_v4
+103/527426/campos_512_v4
+103/527435/campos_512_v4
+103/527442/campos_512_v4
+103/527460/campos_512_v4
+103/527463/campos_512_v4
+103/527484/campos_512_v4
+103/527489/campos_512_v4
+103/527499/campos_512_v4
+103/527507/campos_512_v4
+103/527533/campos_512_v4
+103/527536/campos_512_v4
+103/527541/campos_512_v4
+103/527554/campos_512_v4
+103/527555/campos_512_v4
+103/527582/campos_512_v4
+103/527592/campos_512_v4
+103/527612/campos_512_v4
+103/527618/campos_512_v4
+103/527620/campos_512_v4
+103/527626/campos_512_v4
+103/527639/campos_512_v4
+103/527649/campos_512_v4
+103/527650/campos_512_v4
+103/527665/campos_512_v4
+103/527669/campos_512_v4
+103/527670/campos_512_v4
+103/527671/campos_512_v4
+103/527677/campos_512_v4
+103/527685/campos_512_v4
+103/527690/campos_512_v4
+103/527692/campos_512_v4
+103/527698/campos_512_v4
+103/527701/campos_512_v4
+103/527710/campos_512_v4
+103/527719/campos_512_v4
+103/527723/campos_512_v4
+103/527729/campos_512_v4
+103/527733/campos_512_v4
+103/527739/campos_512_v4
+103/527740/campos_512_v4
+103/527746/campos_512_v4
+103/527759/campos_512_v4
+103/527762/campos_512_v4
+103/527766/campos_512_v4
+103/527770/campos_512_v4
+103/527775/campos_512_v4
+103/527785/campos_512_v4
+103/527793/campos_512_v4
+103/527811/campos_512_v4
+103/527812/campos_512_v4
+103/527818/campos_512_v4
+103/527820/campos_512_v4
+103/527822/campos_512_v4
+103/527826/campos_512_v4
+103/527827/campos_512_v4
+103/527830/campos_512_v4
+103/527860/campos_512_v4
+103/527865/campos_512_v4
+103/527869/campos_512_v4
+103/527871/campos_512_v4
+103/527881/campos_512_v4
+103/527882/campos_512_v4
+103/527883/campos_512_v4
+103/527885/campos_512_v4
+103/527889/campos_512_v4
+103/527893/campos_512_v4
+103/527896/campos_512_v4
+103/527902/campos_512_v4
+103/527908/campos_512_v4
+103/527923/campos_512_v4
+103/527932/campos_512_v4
+103/527934/campos_512_v4
+103/527959/campos_512_v4
+103/527982/campos_512_v4
+103/527990/campos_512_v4
+103/528003/campos_512_v4
+103/528008/campos_512_v4
+103/528018/campos_512_v4
+103/528019/campos_512_v4
+103/528027/campos_512_v4
+103/528030/campos_512_v4
+103/528031/campos_512_v4
+103/528039/campos_512_v4
+103/528040/campos_512_v4
+103/528045/campos_512_v4
+103/528077/campos_512_v4
+103/528079/campos_512_v4
+103/528097/campos_512_v4
+103/528113/campos_512_v4
+103/528139/campos_512_v4
+103/528141/campos_512_v4
+103/528154/campos_512_v4
+103/528190/campos_512_v4
+103/528195/campos_512_v4
+103/528209/campos_512_v4
+103/528212/campos_512_v4
+103/528215/campos_512_v4
+103/528225/campos_512_v4
+103/528237/campos_512_v4
+103/528238/campos_512_v4
+103/528240/campos_512_v4
+103/528251/campos_512_v4
+103/528254/campos_512_v4
+103/528255/campos_512_v4
+103/528290/campos_512_v4
+103/528296/campos_512_v4
+103/528308/campos_512_v4
+103/528319/campos_512_v4
+103/528320/campos_512_v4
+103/528326/campos_512_v4
+103/528343/campos_512_v4
+103/528352/campos_512_v4
+103/528361/campos_512_v4
+103/528364/campos_512_v4
+103/528365/campos_512_v4
+103/528369/campos_512_v4
+103/528386/campos_512_v4
+103/528390/campos_512_v4
+103/528394/campos_512_v4
+103/528398/campos_512_v4
+103/528399/campos_512_v4
+103/528421/campos_512_v4
+103/528465/campos_512_v4
+103/528471/campos_512_v4
+103/528476/campos_512_v4
+103/528478/campos_512_v4
+103/528487/campos_512_v4
+103/528500/campos_512_v4
+103/528502/campos_512_v4
+103/528506/campos_512_v4
+103/528509/campos_512_v4
+103/528515/campos_512_v4
+103/528530/campos_512_v4
+103/528532/campos_512_v4
+103/528537/campos_512_v4
+103/528542/campos_512_v4
+103/528544/campos_512_v4
+103/528581/campos_512_v4
+103/528584/campos_512_v4
+103/528592/campos_512_v4
+103/528597/campos_512_v4
+103/528611/campos_512_v4
+103/528614/campos_512_v4
+103/528615/campos_512_v4
+103/528625/campos_512_v4
+103/528626/campos_512_v4
+103/528633/campos_512_v4
+103/528642/campos_512_v4
+103/528649/campos_512_v4
+103/528661/campos_512_v4
+103/528677/campos_512_v4
+103/528685/campos_512_v4
+103/528686/campos_512_v4
+103/528698/campos_512_v4
+103/528717/campos_512_v4
+103/528726/campos_512_v4
+103/528745/campos_512_v4
+103/528752/campos_512_v4
+103/528761/campos_512_v4
+103/528765/campos_512_v4
+103/528782/campos_512_v4
+103/528783/campos_512_v4
+103/528786/campos_512_v4
+103/528803/campos_512_v4
+103/528808/campos_512_v4
+103/528813/campos_512_v4
+103/528828/campos_512_v4
+103/528833/campos_512_v4
+103/528842/campos_512_v4
+103/528845/campos_512_v4
+103/528853/campos_512_v4
+103/528871/campos_512_v4
+103/528872/campos_512_v4
+103/528873/campos_512_v4
+103/528880/campos_512_v4
+103/528887/campos_512_v4
+103/528898/campos_512_v4
+103/528900/campos_512_v4
+103/528909/campos_512_v4
+103/528924/campos_512_v4
+103/528939/campos_512_v4
+103/528940/campos_512_v4
+103/528950/campos_512_v4
+103/528955/campos_512_v4
+103/528959/campos_512_v4
+103/528972/campos_512_v4
+103/528975/campos_512_v4
+103/528991/campos_512_v4
+103/529003/campos_512_v4
+103/529011/campos_512_v4
+103/529017/campos_512_v4
+103/529018/campos_512_v4
+103/529026/campos_512_v4
+103/529031/campos_512_v4
+103/529036/campos_512_v4
+103/529049/campos_512_v4
+103/529050/campos_512_v4
+103/529055/campos_512_v4
+103/529056/campos_512_v4
+103/529065/campos_512_v4
+103/529066/campos_512_v4
+103/529082/campos_512_v4
+103/529096/campos_512_v4
+103/529111/campos_512_v4
+103/529113/campos_512_v4
+103/529119/campos_512_v4
+103/529130/campos_512_v4
+103/529133/campos_512_v4
+103/529140/campos_512_v4
+103/529148/campos_512_v4
+103/529151/campos_512_v4
+103/529164/campos_512_v4
+103/529176/campos_512_v4
+103/529179/campos_512_v4
+103/529183/campos_512_v4
+103/529196/campos_512_v4
+103/529197/campos_512_v4
+103/529212/campos_512_v4
+103/529226/campos_512_v4
+103/529232/campos_512_v4
+103/529233/campos_512_v4
+103/529251/campos_512_v4
+103/529256/campos_512_v4
+103/529264/campos_512_v4
+103/529270/campos_512_v4
+103/529281/campos_512_v4
+103/529283/campos_512_v4
+103/529290/campos_512_v4
+103/529308/campos_512_v4
+103/529309/campos_512_v4
+103/529315/campos_512_v4
+103/529318/campos_512_v4
+103/529321/campos_512_v4
+103/529325/campos_512_v4
+103/529329/campos_512_v4
+103/529330/campos_512_v4
+103/529349/campos_512_v4
+103/529360/campos_512_v4
+103/529373/campos_512_v4
+103/529377/campos_512_v4
+103/529381/campos_512_v4
+103/529384/campos_512_v4
+103/529397/campos_512_v4
+103/529413/campos_512_v4
+103/529419/campos_512_v4
+103/529422/campos_512_v4
+103/529431/campos_512_v4
+103/529434/campos_512_v4
+103/529444/campos_512_v4
+103/529445/campos_512_v4
+103/529459/campos_512_v4
+103/529461/campos_512_v4
+103/529469/campos_512_v4
+103/529485/campos_512_v4
+103/529488/campos_512_v4
+103/529501/campos_512_v4
+103/529506/campos_512_v4
+103/529523/campos_512_v4
+103/529529/campos_512_v4
+103/529531/campos_512_v4
+103/529544/campos_512_v4
+103/529550/campos_512_v4
+103/529553/campos_512_v4
+103/529556/campos_512_v4
+103/529562/campos_512_v4
+103/529563/campos_512_v4
+103/529564/campos_512_v4
+103/529568/campos_512_v4
+103/529569/campos_512_v4
+103/529582/campos_512_v4
+103/529583/campos_512_v4
+103/529589/campos_512_v4
+103/529592/campos_512_v4
+103/529596/campos_512_v4
+103/529600/campos_512_v4
+103/529608/campos_512_v4
+103/529615/campos_512_v4
+103/529623/campos_512_v4
+103/529648/campos_512_v4
+103/529653/campos_512_v4
+103/529656/campos_512_v4
+103/529662/campos_512_v4
+103/529663/campos_512_v4
+103/529667/campos_512_v4
+103/529669/campos_512_v4
+103/529670/campos_512_v4
+103/529677/campos_512_v4
+103/529680/campos_512_v4
+103/529684/campos_512_v4
+103/529695/campos_512_v4
+103/529696/campos_512_v4
+103/529697/campos_512_v4
+103/529703/campos_512_v4
+103/529704/campos_512_v4
+103/529709/campos_512_v4
+103/529723/campos_512_v4
+103/529726/campos_512_v4
+103/529741/campos_512_v4
+103/529769/campos_512_v4
+103/529772/campos_512_v4
+103/529780/campos_512_v4
+103/529796/campos_512_v4
+103/529807/campos_512_v4
+103/529830/campos_512_v4
+103/529850/campos_512_v4
+103/529851/campos_512_v4
+103/529853/campos_512_v4
+103/529855/campos_512_v4
+103/529857/campos_512_v4
+103/529861/campos_512_v4
+103/529879/campos_512_v4
+103/529880/campos_512_v4
+103/529886/campos_512_v4
+103/529890/campos_512_v4
+103/529916/campos_512_v4
+103/529922/campos_512_v4
+103/529923/campos_512_v4
+103/529931/campos_512_v4
+103/529946/campos_512_v4
+103/529954/campos_512_v4
+103/529957/campos_512_v4
+103/529958/campos_512_v4
+103/529964/campos_512_v4
+103/529968/campos_512_v4
+103/529985/campos_512_v4
+103/529994/campos_512_v4
+103/529999/campos_512_v4
+103/530001/campos_512_v4
+104/530005/campos_512_v4
+104/530025/campos_512_v4
+104/530027/campos_512_v4
+104/530031/campos_512_v4
+104/530032/campos_512_v4
+104/530034/campos_512_v4
+104/530044/campos_512_v4
+104/530047/campos_512_v4
+104/530058/campos_512_v4
+104/530061/campos_512_v4
+104/530080/campos_512_v4
+104/530083/campos_512_v4
+104/530087/campos_512_v4
+104/530103/campos_512_v4
+104/530118/campos_512_v4
+104/530126/campos_512_v4
+104/530141/campos_512_v4
+104/530149/campos_512_v4
+104/530151/campos_512_v4
+104/530153/campos_512_v4
+104/530154/campos_512_v4
+104/530166/campos_512_v4
+104/530170/campos_512_v4
+104/530178/campos_512_v4
+104/530179/campos_512_v4
+104/530181/campos_512_v4
+104/530183/campos_512_v4
+104/530203/campos_512_v4
+104/530211/campos_512_v4
+104/530213/campos_512_v4
+104/530216/campos_512_v4
+104/530217/campos_512_v4
+104/530227/campos_512_v4
+104/530242/campos_512_v4
+104/530249/campos_512_v4
+104/530255/campos_512_v4
+104/530266/campos_512_v4
+104/530281/campos_512_v4
+104/530283/campos_512_v4
+104/530289/campos_512_v4
+104/530290/campos_512_v4
+104/530322/campos_512_v4
+104/530331/campos_512_v4
+104/530356/campos_512_v4
+104/530363/campos_512_v4
+104/530370/campos_512_v4
+104/530380/campos_512_v4
+104/530387/campos_512_v4
+104/530399/campos_512_v4
+104/530400/campos_512_v4
+104/530427/campos_512_v4
+104/530429/campos_512_v4
+104/530430/campos_512_v4
+104/530439/campos_512_v4
+104/530440/campos_512_v4
+104/530459/campos_512_v4
+104/530468/campos_512_v4
+104/530479/campos_512_v4
+104/530483/campos_512_v4
+104/530488/campos_512_v4
+104/530492/campos_512_v4
+104/530493/campos_512_v4
+104/530496/campos_512_v4
+104/530498/campos_512_v4
+104/530505/campos_512_v4
+104/530511/campos_512_v4
+104/530514/campos_512_v4
+104/530522/campos_512_v4
+104/530523/campos_512_v4
+104/530530/campos_512_v4
+104/530540/campos_512_v4
+104/530542/campos_512_v4
+104/530544/campos_512_v4
+104/530545/campos_512_v4
+104/530556/campos_512_v4
+104/530563/campos_512_v4
+104/530585/campos_512_v4
+104/530589/campos_512_v4
+104/530593/campos_512_v4
+104/530609/campos_512_v4
+104/530626/campos_512_v4
+104/530639/campos_512_v4
+104/530650/campos_512_v4
+104/530676/campos_512_v4
+104/530677/campos_512_v4
+104/530679/campos_512_v4
+104/530695/campos_512_v4
+104/530696/campos_512_v4
+104/530708/campos_512_v4
+104/530720/campos_512_v4
+104/530726/campos_512_v4
+104/530729/campos_512_v4
+104/530740/campos_512_v4
+104/530756/campos_512_v4
+104/530761/campos_512_v4
+104/530779/campos_512_v4
+104/530780/campos_512_v4
+104/530784/campos_512_v4
+104/530788/campos_512_v4
+104/530797/campos_512_v4
+104/530807/campos_512_v4
+104/530809/campos_512_v4
+104/530817/campos_512_v4
+104/530818/campos_512_v4
+104/530819/campos_512_v4
+104/530828/campos_512_v4
+104/530837/campos_512_v4
+104/530848/campos_512_v4
+104/530852/campos_512_v4
+104/530853/campos_512_v4
+104/530854/campos_512_v4
+104/530858/campos_512_v4
+104/530879/campos_512_v4
+104/530904/campos_512_v4
+104/530905/campos_512_v4
+104/530915/campos_512_v4
+104/530916/campos_512_v4
+104/530926/campos_512_v4
+104/530933/campos_512_v4
+104/530934/campos_512_v4
+104/530938/campos_512_v4
+104/530941/campos_512_v4
+104/530958/campos_512_v4
+104/530965/campos_512_v4
+104/530966/campos_512_v4
+104/530971/campos_512_v4
+104/530979/campos_512_v4
+104/530981/campos_512_v4
+104/530982/campos_512_v4
+104/530987/campos_512_v4
+104/530994/campos_512_v4
+104/531003/campos_512_v4
+104/531004/campos_512_v4
+104/531006/campos_512_v4
+104/531020/campos_512_v4
+104/531026/campos_512_v4
+104/531031/campos_512_v4
+104/531034/campos_512_v4
+104/531043/campos_512_v4
+104/531067/campos_512_v4
+104/531069/campos_512_v4
+104/531078/campos_512_v4
+104/531082/campos_512_v4
+104/531094/campos_512_v4
+104/531099/campos_512_v4
+104/531111/campos_512_v4
+104/531112/campos_512_v4
+104/531116/campos_512_v4
+104/531120/campos_512_v4
+104/531124/campos_512_v4
+104/531131/campos_512_v4
+104/531151/campos_512_v4
+104/531155/campos_512_v4
+104/531165/campos_512_v4
+104/531180/campos_512_v4
+104/531184/campos_512_v4
+104/531196/campos_512_v4
+104/531199/campos_512_v4
+104/531200/campos_512_v4
+104/531204/campos_512_v4
+104/531226/campos_512_v4
+104/531237/campos_512_v4
+104/531246/campos_512_v4
+104/531263/campos_512_v4
+104/531277/campos_512_v4
+104/531286/campos_512_v4
+104/531295/campos_512_v4
+104/531324/campos_512_v4
+104/531336/campos_512_v4
+104/531338/campos_512_v4
+104/531346/campos_512_v4
+104/531347/campos_512_v4
+104/531349/campos_512_v4
+104/531353/campos_512_v4
+104/531357/campos_512_v4
+104/531364/campos_512_v4
+104/531375/campos_512_v4
+104/531376/campos_512_v4
+104/531390/campos_512_v4
+104/531395/campos_512_v4
+104/531396/campos_512_v4
+104/531400/campos_512_v4
+104/531406/campos_512_v4
+104/531407/campos_512_v4
+104/531412/campos_512_v4
+104/531415/campos_512_v4
+104/531426/campos_512_v4
+104/531429/campos_512_v4
+104/531431/campos_512_v4
+104/531433/campos_512_v4
+104/531436/campos_512_v4
+104/531438/campos_512_v4
+104/531450/campos_512_v4
+104/531454/campos_512_v4
+104/531466/campos_512_v4
+104/531479/campos_512_v4
+104/531481/campos_512_v4
+104/531523/campos_512_v4
+104/531524/campos_512_v4
+104/531536/campos_512_v4
+104/531539/campos_512_v4
+104/531546/campos_512_v4
+104/531557/campos_512_v4
+104/531575/campos_512_v4
+104/531590/campos_512_v4
+104/531595/campos_512_v4
+104/531599/campos_512_v4
+104/531600/campos_512_v4
+104/531602/campos_512_v4
+104/531606/campos_512_v4
+104/531609/campos_512_v4
+104/531629/campos_512_v4
+104/531646/campos_512_v4
+104/531650/campos_512_v4
+104/531657/campos_512_v4
+104/531668/campos_512_v4
+104/531672/campos_512_v4
+104/531674/campos_512_v4
+104/531676/campos_512_v4
+104/531712/campos_512_v4
+104/531722/campos_512_v4
+104/531726/campos_512_v4
+104/531730/campos_512_v4
+104/531764/campos_512_v4
+104/531779/campos_512_v4
+104/531785/campos_512_v4
+104/531787/campos_512_v4
+104/531790/campos_512_v4
+104/531797/campos_512_v4
+104/531820/campos_512_v4
+104/531830/campos_512_v4
+104/531833/campos_512_v4
+104/531849/campos_512_v4
+104/531851/campos_512_v4
+104/531853/campos_512_v4
+104/531864/campos_512_v4
+104/531870/campos_512_v4
+104/531889/campos_512_v4
+104/531892/campos_512_v4
+104/531895/campos_512_v4
+104/531900/campos_512_v4
+104/531903/campos_512_v4
+104/531905/campos_512_v4
+104/531907/campos_512_v4
+104/531913/campos_512_v4
+104/531918/campos_512_v4
+104/531919/campos_512_v4
+104/531920/campos_512_v4
+104/531922/campos_512_v4
+104/531933/campos_512_v4
+104/531944/campos_512_v4
+104/531947/campos_512_v4
+104/531950/campos_512_v4
+104/531964/campos_512_v4
+104/531995/campos_512_v4
+104/531998/campos_512_v4
+104/532005/campos_512_v4
+104/532010/campos_512_v4
+104/532011/campos_512_v4
+104/532022/campos_512_v4
+104/532036/campos_512_v4
+104/532041/campos_512_v4
+104/532049/campos_512_v4
+104/532063/campos_512_v4
+104/532066/campos_512_v4
+104/532071/campos_512_v4
+104/532087/campos_512_v4
+104/532100/campos_512_v4
+104/532104/campos_512_v4
+104/532138/campos_512_v4
+104/532141/campos_512_v4
+104/532152/campos_512_v4
+104/532154/campos_512_v4
+104/532156/campos_512_v4
+104/532164/campos_512_v4
+104/532165/campos_512_v4
+104/532168/campos_512_v4
+104/532193/campos_512_v4
+104/532198/campos_512_v4
+104/532201/campos_512_v4
+104/532204/campos_512_v4
+104/532205/campos_512_v4
+104/532208/campos_512_v4
+104/532209/campos_512_v4
+104/532211/campos_512_v4
+104/532221/campos_512_v4
+104/532224/campos_512_v4
+104/532235/campos_512_v4
+104/532239/campos_512_v4
+104/532243/campos_512_v4
+104/532244/campos_512_v4
+104/532248/campos_512_v4
+104/532253/campos_512_v4
+104/532256/campos_512_v4
+104/532259/campos_512_v4
+104/532268/campos_512_v4
+104/532278/campos_512_v4
+104/532282/campos_512_v4
+104/532297/campos_512_v4
+104/532303/campos_512_v4
+104/532309/campos_512_v4
+104/532315/campos_512_v4
+104/532316/campos_512_v4
+104/532322/campos_512_v4
+104/532330/campos_512_v4
+104/532345/campos_512_v4
+104/532357/campos_512_v4
+104/532362/campos_512_v4
+104/532382/campos_512_v4
+104/532388/campos_512_v4
+104/532413/campos_512_v4
+104/532420/campos_512_v4
+104/532422/campos_512_v4
+104/532424/campos_512_v4
+104/532430/campos_512_v4
+104/532473/campos_512_v4
+104/532491/campos_512_v4
+104/532495/campos_512_v4
+104/532499/campos_512_v4
+104/532500/campos_512_v4
+104/532515/campos_512_v4
+104/532533/campos_512_v4
+104/532544/campos_512_v4
+104/532551/campos_512_v4
+104/532553/campos_512_v4
+104/532569/campos_512_v4
+104/532579/campos_512_v4
+104/532583/campos_512_v4
+104/532590/campos_512_v4
+104/532607/campos_512_v4
+104/532615/campos_512_v4
+104/532621/campos_512_v4
+104/532624/campos_512_v4
+104/532625/campos_512_v4
+104/532642/campos_512_v4
+104/532649/campos_512_v4
+104/532670/campos_512_v4
+104/532677/campos_512_v4
+104/532683/campos_512_v4
+104/532685/campos_512_v4
+104/532693/campos_512_v4
+104/532727/campos_512_v4
+104/532731/campos_512_v4
+104/532737/campos_512_v4
+104/532742/campos_512_v4
+104/532744/campos_512_v4
+104/532749/campos_512_v4
+104/532752/campos_512_v4
+104/532774/campos_512_v4
+104/532778/campos_512_v4
+104/532791/campos_512_v4
+104/532795/campos_512_v4
+104/532797/campos_512_v4
+104/532800/campos_512_v4
+104/532805/campos_512_v4
+104/532808/campos_512_v4
+104/532818/campos_512_v4
+104/532821/campos_512_v4
+104/532825/campos_512_v4
+104/532837/campos_512_v4
+104/532842/campos_512_v4
+104/532843/campos_512_v4
+104/532849/campos_512_v4
+104/532852/campos_512_v4
+104/532855/campos_512_v4
+104/532882/campos_512_v4
+104/532885/campos_512_v4
+104/532894/campos_512_v4
+104/532904/campos_512_v4
+104/532918/campos_512_v4
+104/532924/campos_512_v4
+104/532952/campos_512_v4
+104/532956/campos_512_v4
+104/532957/campos_512_v4
+104/532967/campos_512_v4
+104/532969/campos_512_v4
+104/532986/campos_512_v4
+104/533000/campos_512_v4
+104/533002/campos_512_v4
+104/533011/campos_512_v4
+104/533026/campos_512_v4
+104/533030/campos_512_v4
+104/533031/campos_512_v4
+104/533036/campos_512_v4
+104/533043/campos_512_v4
+104/533051/campos_512_v4
+104/533056/campos_512_v4
+104/533060/campos_512_v4
+104/533065/campos_512_v4
+104/533066/campos_512_v4
+104/533069/campos_512_v4
+104/533080/campos_512_v4
+104/533082/campos_512_v4
+104/533093/campos_512_v4
+104/533097/campos_512_v4
+104/533098/campos_512_v4
+104/533103/campos_512_v4
+104/533104/campos_512_v4
+104/533108/campos_512_v4
+104/533109/campos_512_v4
+104/533112/campos_512_v4
+104/533121/campos_512_v4
+104/533132/campos_512_v4
+104/533154/campos_512_v4
+104/533160/campos_512_v4
+104/533172/campos_512_v4
+104/533177/campos_512_v4
+104/533188/campos_512_v4
+104/533192/campos_512_v4
+104/533197/campos_512_v4
+104/533205/campos_512_v4
+104/533217/campos_512_v4
+104/533223/campos_512_v4
+104/533224/campos_512_v4
+104/533226/campos_512_v4
+104/533229/campos_512_v4
+104/533242/campos_512_v4
+104/533244/campos_512_v4
+104/533248/campos_512_v4
+104/533250/campos_512_v4
+104/533257/campos_512_v4
+104/533263/campos_512_v4
+104/533266/campos_512_v4
+104/533270/campos_512_v4
+104/533277/campos_512_v4
+104/533278/campos_512_v4
+104/533292/campos_512_v4
+104/533293/campos_512_v4
+104/533308/campos_512_v4
+104/533331/campos_512_v4
+104/533333/campos_512_v4
+104/533350/campos_512_v4
+104/533357/campos_512_v4
+104/533358/campos_512_v4
+104/533366/campos_512_v4
+104/533372/campos_512_v4
+104/533379/campos_512_v4
+104/533386/campos_512_v4
+104/533395/campos_512_v4
+104/533408/campos_512_v4
+104/533417/campos_512_v4
+104/533419/campos_512_v4
+104/533425/campos_512_v4
+104/533427/campos_512_v4
+104/533429/campos_512_v4
+104/533431/campos_512_v4
+104/533438/campos_512_v4
+104/533441/campos_512_v4
+104/533444/campos_512_v4
+104/533447/campos_512_v4
+104/533449/campos_512_v4
+104/533451/campos_512_v4
+104/533458/campos_512_v4
+104/533466/campos_512_v4
+104/533474/campos_512_v4
+104/533477/campos_512_v4
+104/533485/campos_512_v4
+104/533492/campos_512_v4
+104/533524/campos_512_v4
+104/533538/campos_512_v4
+104/533540/campos_512_v4
+104/533547/campos_512_v4
+104/533550/campos_512_v4
+104/533571/campos_512_v4
+104/533583/campos_512_v4
+104/533586/campos_512_v4
+104/533588/campos_512_v4
+104/533594/campos_512_v4
+104/533597/campos_512_v4
+104/533602/campos_512_v4
+104/533604/campos_512_v4
+104/533607/campos_512_v4
+104/533610/campos_512_v4
+104/533612/campos_512_v4
+104/533628/campos_512_v4
+104/533638/campos_512_v4
+104/533654/campos_512_v4
+104/533655/campos_512_v4
+104/533656/campos_512_v4
+104/533657/campos_512_v4
+104/533669/campos_512_v4
+104/533686/campos_512_v4
+104/533694/campos_512_v4
+104/533699/campos_512_v4
+104/533717/campos_512_v4
+104/533739/campos_512_v4
+104/533743/campos_512_v4
+104/533747/campos_512_v4
+104/533754/campos_512_v4
+104/533755/campos_512_v4
+104/533758/campos_512_v4
+104/533781/campos_512_v4
+104/533786/campos_512_v4
+104/533789/campos_512_v4
+104/533792/campos_512_v4
+104/533797/campos_512_v4
+104/533800/campos_512_v4
+104/533801/campos_512_v4
+104/533806/campos_512_v4
+104/533807/campos_512_v4
+104/533811/campos_512_v4
+104/533833/campos_512_v4
+104/533844/campos_512_v4
+104/533849/campos_512_v4
+104/533858/campos_512_v4
+104/533869/campos_512_v4
+104/533880/campos_512_v4
+104/533883/campos_512_v4
+104/533888/campos_512_v4
+104/533910/campos_512_v4
+104/533912/campos_512_v4
+104/533917/campos_512_v4
+104/533927/campos_512_v4
+104/533928/campos_512_v4
+104/533944/campos_512_v4
+104/533947/campos_512_v4
+104/533948/campos_512_v4
+104/533958/campos_512_v4
+104/533964/campos_512_v4
+104/533966/campos_512_v4
+104/533974/campos_512_v4
+104/533982/campos_512_v4
+104/533990/campos_512_v4
+104/533998/campos_512_v4
+104/534009/campos_512_v4
+104/534011/campos_512_v4
+104/534020/campos_512_v4
+104/534026/campos_512_v4
+104/534034/campos_512_v4
+104/534041/campos_512_v4
+104/534049/campos_512_v4
+104/534075/campos_512_v4
+104/534076/campos_512_v4
+104/534083/campos_512_v4
+104/534095/campos_512_v4
+104/534110/campos_512_v4
+104/534116/campos_512_v4
+104/534118/campos_512_v4
+104/534127/campos_512_v4
+104/534133/campos_512_v4
+104/534137/campos_512_v4
+104/534159/campos_512_v4
+104/534161/campos_512_v4
+104/534167/campos_512_v4
+104/534172/campos_512_v4
+104/534174/campos_512_v4
+104/534179/campos_512_v4
+104/534182/campos_512_v4
+104/534224/campos_512_v4
+104/534233/campos_512_v4
+104/534250/campos_512_v4
+104/534257/campos_512_v4
+104/534263/campos_512_v4
+104/534264/campos_512_v4
+104/534266/campos_512_v4
+104/534281/campos_512_v4
+104/534292/campos_512_v4
+104/534296/campos_512_v4
+104/534297/campos_512_v4
+104/534298/campos_512_v4
+104/534301/campos_512_v4
+104/534302/campos_512_v4
+104/534303/campos_512_v4
+104/534306/campos_512_v4
+104/534307/campos_512_v4
+104/534314/campos_512_v4
+104/534317/campos_512_v4
+104/534321/campos_512_v4
+104/534325/campos_512_v4
+104/534328/campos_512_v4
+104/534346/campos_512_v4
+104/534352/campos_512_v4
+104/534354/campos_512_v4
+104/534355/campos_512_v4
+104/534372/campos_512_v4
+104/534373/campos_512_v4
+104/534374/campos_512_v4
+104/534381/campos_512_v4
+104/534402/campos_512_v4
+104/534406/campos_512_v4
+104/534422/campos_512_v4
+104/534425/campos_512_v4
+104/534444/campos_512_v4
+104/534459/campos_512_v4
+104/534466/campos_512_v4
+104/534467/campos_512_v4
+104/534482/campos_512_v4
+104/534500/campos_512_v4
+104/534502/campos_512_v4
+104/534503/campos_512_v4
+104/534505/campos_512_v4
+104/534522/campos_512_v4
+104/534535/campos_512_v4
+104/534538/campos_512_v4
+104/534541/campos_512_v4
+104/534551/campos_512_v4
+104/534565/campos_512_v4
+104/534571/campos_512_v4
+104/534573/campos_512_v4
+104/534581/campos_512_v4
+104/534585/campos_512_v4
+104/534587/campos_512_v4
+104/534588/campos_512_v4
+104/534596/campos_512_v4
+104/534602/campos_512_v4
+104/534610/campos_512_v4
+104/534622/campos_512_v4
+104/534627/campos_512_v4
+104/534633/campos_512_v4
+104/534644/campos_512_v4
+104/534647/campos_512_v4
+104/534657/campos_512_v4
+104/534660/campos_512_v4
+104/534662/campos_512_v4
+104/534663/campos_512_v4
+104/534664/campos_512_v4
+104/534665/campos_512_v4
+104/534668/campos_512_v4
+104/534670/campos_512_v4
+104/534676/campos_512_v4
+104/534678/campos_512_v4
+104/534686/campos_512_v4
+104/534694/campos_512_v4
+104/534696/campos_512_v4
+104/534697/campos_512_v4
+104/534702/campos_512_v4
+104/534709/campos_512_v4
+104/534713/campos_512_v4
+104/534715/campos_512_v4
+104/534716/campos_512_v4
+104/534733/campos_512_v4
+104/534736/campos_512_v4
+104/534738/campos_512_v4
+104/534750/campos_512_v4
+104/534754/campos_512_v4
+104/534774/campos_512_v4
+104/534783/campos_512_v4
+104/534793/campos_512_v4
+104/534797/campos_512_v4
+104/534800/campos_512_v4
+104/534803/campos_512_v4
+104/534804/campos_512_v4
+104/534811/campos_512_v4
+104/534812/campos_512_v4
+104/534826/campos_512_v4
+104/534827/campos_512_v4
+104/534833/campos_512_v4
+104/534835/campos_512_v4
+104/534840/campos_512_v4
+104/534853/campos_512_v4
+104/534858/campos_512_v4
+104/534864/campos_512_v4
+104/534878/campos_512_v4
+104/534885/campos_512_v4
+104/534887/campos_512_v4
+104/534891/campos_512_v4
+104/534895/campos_512_v4
+104/534896/campos_512_v4
+104/534897/campos_512_v4
+104/534914/campos_512_v4
+104/534961/campos_512_v4
+104/534963/campos_512_v4
+104/534973/campos_512_v4
+104/534975/campos_512_v4
+104/534979/campos_512_v4
+104/534980/campos_512_v4
+104/534981/campos_512_v4
+104/534982/campos_512_v4
+104/534984/campos_512_v4
+104/534992/campos_512_v4
+104/534993/campos_512_v4
+105/535002/campos_512_v4
+105/535009/campos_512_v4
+105/535016/campos_512_v4
+105/535027/campos_512_v4
+105/535033/campos_512_v4
+105/535036/campos_512_v4
+105/535061/campos_512_v4
+105/535086/campos_512_v4
+105/535111/campos_512_v4
+105/535114/campos_512_v4
+105/535122/campos_512_v4
+105/535132/campos_512_v4
+105/535140/campos_512_v4
+105/535147/campos_512_v4
+105/535169/campos_512_v4
+105/535170/campos_512_v4
+105/535181/campos_512_v4
+105/535185/campos_512_v4
+105/535191/campos_512_v4
+105/535193/campos_512_v4
+105/535213/campos_512_v4
+105/535217/campos_512_v4
+105/535231/campos_512_v4
+105/535238/campos_512_v4
+105/535241/campos_512_v4
+105/535275/campos_512_v4
+105/535284/campos_512_v4
+105/535287/campos_512_v4
+105/535294/campos_512_v4
+105/535309/campos_512_v4
+105/535311/campos_512_v4
+105/535315/campos_512_v4
+105/535326/campos_512_v4
+105/535330/campos_512_v4
+105/535340/campos_512_v4
+105/535352/campos_512_v4
+105/535356/campos_512_v4
+105/535359/campos_512_v4
+105/535378/campos_512_v4
+105/535389/campos_512_v4
+105/535393/campos_512_v4
+105/535414/campos_512_v4
+105/535427/campos_512_v4
+105/535429/campos_512_v4
+105/535438/campos_512_v4
+105/535444/campos_512_v4
+105/535454/campos_512_v4
+105/535455/campos_512_v4
+105/535457/campos_512_v4
+105/535464/campos_512_v4
+105/535471/campos_512_v4
+105/535472/campos_512_v4
+105/535480/campos_512_v4
+105/535486/campos_512_v4
+105/535491/campos_512_v4
+105/535496/campos_512_v4
+105/535509/campos_512_v4
+105/535514/campos_512_v4
+105/535515/campos_512_v4
+105/535527/campos_512_v4
+105/535529/campos_512_v4
+105/535537/campos_512_v4
+105/535541/campos_512_v4
+105/535552/campos_512_v4
+105/535558/campos_512_v4
+105/535573/campos_512_v4
+105/535609/campos_512_v4
+105/535622/campos_512_v4
+105/535624/campos_512_v4
+105/535627/campos_512_v4
+105/535634/campos_512_v4
+105/535638/campos_512_v4
+105/535644/campos_512_v4
+105/535655/campos_512_v4
+105/535661/campos_512_v4
+105/535672/campos_512_v4
+105/535673/campos_512_v4
+105/535681/campos_512_v4
+105/535686/campos_512_v4
+105/535691/campos_512_v4
+105/535692/campos_512_v4
+105/535693/campos_512_v4
+105/535706/campos_512_v4
+105/535709/campos_512_v4
+105/535712/campos_512_v4
+105/535719/campos_512_v4
+105/535735/campos_512_v4
+105/535744/campos_512_v4
+105/535745/campos_512_v4
+105/535766/campos_512_v4
+105/535767/campos_512_v4
+105/535769/campos_512_v4
+105/535773/campos_512_v4
+105/535774/campos_512_v4
+105/535785/campos_512_v4
+105/535787/campos_512_v4
+105/535803/campos_512_v4
+105/535810/campos_512_v4
+105/535811/campos_512_v4
+105/535818/campos_512_v4
+105/535861/campos_512_v4
+105/535862/campos_512_v4
+105/535865/campos_512_v4
+105/535891/campos_512_v4
+105/535910/campos_512_v4
+105/535915/campos_512_v4
+105/535918/campos_512_v4
+105/535922/campos_512_v4
+105/535930/campos_512_v4
+105/535939/campos_512_v4
+105/535951/campos_512_v4
+105/535954/campos_512_v4
+105/535965/campos_512_v4
+105/535973/campos_512_v4
+105/535974/campos_512_v4
+105/535988/campos_512_v4
+105/535998/campos_512_v4
+105/536020/campos_512_v4
+105/536029/campos_512_v4
+105/536038/campos_512_v4
+105/536047/campos_512_v4
+105/536070/campos_512_v4
+105/536080/campos_512_v4
+105/536085/campos_512_v4
+105/536097/campos_512_v4
+105/536118/campos_512_v4
+105/536120/campos_512_v4
+105/536126/campos_512_v4
+105/536129/campos_512_v4
+105/536131/campos_512_v4
+105/536137/campos_512_v4
+105/536139/campos_512_v4
+105/536141/campos_512_v4
+105/536142/campos_512_v4
+105/536184/campos_512_v4
+105/536186/campos_512_v4
+105/536189/campos_512_v4
+105/536209/campos_512_v4
+105/536217/campos_512_v4
+105/536220/campos_512_v4
+105/536228/campos_512_v4
+105/536251/campos_512_v4
+105/536253/campos_512_v4
+105/536254/campos_512_v4
+105/536256/campos_512_v4
+105/536262/campos_512_v4
+105/536269/campos_512_v4
+105/536273/campos_512_v4
+105/536284/campos_512_v4
+105/536289/campos_512_v4
+105/536294/campos_512_v4
+105/536305/campos_512_v4
+105/536306/campos_512_v4
+105/536309/campos_512_v4
+105/536310/campos_512_v4
+105/536320/campos_512_v4
+105/536322/campos_512_v4
+105/536324/campos_512_v4
+105/536325/campos_512_v4
+105/536331/campos_512_v4
+105/536352/campos_512_v4
+105/536356/campos_512_v4
+105/536360/campos_512_v4
+105/536366/campos_512_v4
+105/536370/campos_512_v4
+105/536374/campos_512_v4
+105/536386/campos_512_v4
+105/536387/campos_512_v4
+105/536389/campos_512_v4
+105/536390/campos_512_v4
+105/536391/campos_512_v4
+105/536394/campos_512_v4
+105/536403/campos_512_v4
+105/536426/campos_512_v4
+105/536437/campos_512_v4
+105/536441/campos_512_v4
+105/536470/campos_512_v4
+105/536472/campos_512_v4
+105/536483/campos_512_v4
+105/536489/campos_512_v4
+105/536499/campos_512_v4
+105/536501/campos_512_v4
+105/536505/campos_512_v4
+105/536507/campos_512_v4
+105/536510/campos_512_v4
+105/536512/campos_512_v4
+105/536517/campos_512_v4
+105/536521/campos_512_v4
+105/536526/campos_512_v4
+105/536529/campos_512_v4
+105/536544/campos_512_v4
+105/536554/campos_512_v4
+105/536557/campos_512_v4
+105/536573/campos_512_v4
+105/536574/campos_512_v4
+105/536584/campos_512_v4
+105/536591/campos_512_v4
+105/536596/campos_512_v4
+105/536604/campos_512_v4
+105/536612/campos_512_v4
+105/536613/campos_512_v4
+105/536623/campos_512_v4
+105/536626/campos_512_v4
+105/536639/campos_512_v4
+105/536657/campos_512_v4
+105/536663/campos_512_v4
+105/536667/campos_512_v4
+105/536692/campos_512_v4
+105/536696/campos_512_v4
+105/536702/campos_512_v4
+105/536719/campos_512_v4
+105/536724/campos_512_v4
+105/536732/campos_512_v4
+105/536740/campos_512_v4
+105/536748/campos_512_v4
+105/536756/campos_512_v4
+105/536773/campos_512_v4
+105/536796/campos_512_v4
+105/536806/campos_512_v4
+105/536813/campos_512_v4
+105/536817/campos_512_v4
+105/536832/campos_512_v4
+105/536842/campos_512_v4
+105/536843/campos_512_v4
+105/536844/campos_512_v4
+105/536853/campos_512_v4
+105/536861/campos_512_v4
+105/536874/campos_512_v4
+105/536876/campos_512_v4
+105/536909/campos_512_v4
+105/536912/campos_512_v4
+105/536919/campos_512_v4
+105/536924/campos_512_v4
+105/536925/campos_512_v4
+105/536926/campos_512_v4
+105/536929/campos_512_v4
+105/536948/campos_512_v4
+105/536954/campos_512_v4
+105/536955/campos_512_v4
+105/536979/campos_512_v4
+105/537014/campos_512_v4
+105/537021/campos_512_v4
+105/537025/campos_512_v4
+105/537032/campos_512_v4
+105/537036/campos_512_v4
+105/537037/campos_512_v4
+105/537043/campos_512_v4
+105/537051/campos_512_v4
+105/537052/campos_512_v4
+105/537058/campos_512_v4
+105/537074/campos_512_v4
+105/537118/campos_512_v4
+105/537155/campos_512_v4
+105/537158/campos_512_v4
+105/537164/campos_512_v4
+105/537167/campos_512_v4
+105/537171/campos_512_v4
+105/537175/campos_512_v4
+105/537181/campos_512_v4
+105/537206/campos_512_v4
+105/537216/campos_512_v4
+105/537222/campos_512_v4
+105/537228/campos_512_v4
+105/537234/campos_512_v4
+105/537243/campos_512_v4
+105/537251/campos_512_v4
+105/537257/campos_512_v4
+105/537260/campos_512_v4
+105/537273/campos_512_v4
+105/537285/campos_512_v4
+105/537294/campos_512_v4
+105/537297/campos_512_v4
+105/537321/campos_512_v4
+105/537328/campos_512_v4
+105/537333/campos_512_v4
+105/537336/campos_512_v4
+105/537338/campos_512_v4
+105/537341/campos_512_v4
+105/537347/campos_512_v4
+105/537355/campos_512_v4
+105/537358/campos_512_v4
+105/537391/campos_512_v4
+105/537394/campos_512_v4
+105/537416/campos_512_v4
+105/537418/campos_512_v4
+105/537422/campos_512_v4
+105/537425/campos_512_v4
+105/537426/campos_512_v4
+105/537427/campos_512_v4
+105/537429/campos_512_v4
+105/537433/campos_512_v4
+105/537437/campos_512_v4
+105/537438/campos_512_v4
+105/537441/campos_512_v4
+105/537444/campos_512_v4
+105/537453/campos_512_v4
+105/537470/campos_512_v4
+105/537474/campos_512_v4
+105/537481/campos_512_v4
+105/537482/campos_512_v4
+105/537486/campos_512_v4
+105/537493/campos_512_v4
+105/537499/campos_512_v4
+105/537507/campos_512_v4
+105/537519/campos_512_v4
+105/537520/campos_512_v4
+105/537526/campos_512_v4
+105/537570/campos_512_v4
+105/537571/campos_512_v4
+105/537575/campos_512_v4
+105/537584/campos_512_v4
+105/537585/campos_512_v4
+105/537591/campos_512_v4
+105/537592/campos_512_v4
+105/537613/campos_512_v4
+105/537647/campos_512_v4
+105/537650/campos_512_v4
+105/537662/campos_512_v4
+105/537665/campos_512_v4
+105/537685/campos_512_v4
+105/537699/campos_512_v4
+105/537701/campos_512_v4
+105/537705/campos_512_v4
+105/537708/campos_512_v4
+105/537720/campos_512_v4
+105/537732/campos_512_v4
+105/537735/campos_512_v4
+105/537739/campos_512_v4
+105/537744/campos_512_v4
+105/537749/campos_512_v4
+105/537752/campos_512_v4
+105/537755/campos_512_v4
+105/537766/campos_512_v4
+105/537795/campos_512_v4
+105/537811/campos_512_v4
+105/537812/campos_512_v4
+105/537823/campos_512_v4
+105/537839/campos_512_v4
+105/537854/campos_512_v4
+105/537874/campos_512_v4
+105/537886/campos_512_v4
+105/537890/campos_512_v4
+105/537892/campos_512_v4
+105/537896/campos_512_v4
+105/537920/campos_512_v4
+105/537924/campos_512_v4
+105/537958/campos_512_v4
+105/537975/campos_512_v4
+105/537986/campos_512_v4
+105/537987/campos_512_v4
+105/537989/campos_512_v4
+105/537999/campos_512_v4
+105/538005/campos_512_v4
+105/538008/campos_512_v4
+105/538015/campos_512_v4
+105/538016/campos_512_v4
+105/538035/campos_512_v4
+105/538037/campos_512_v4
+105/538046/campos_512_v4
+105/538054/campos_512_v4
+105/538059/campos_512_v4
+105/538063/campos_512_v4
+105/538064/campos_512_v4
+105/538065/campos_512_v4
+105/538075/campos_512_v4
+105/538078/campos_512_v4
+105/538093/campos_512_v4
+105/538100/campos_512_v4
+105/538113/campos_512_v4
+105/538122/campos_512_v4
+105/538126/campos_512_v4
+105/538132/campos_512_v4
+105/538134/campos_512_v4
+105/538137/campos_512_v4
+105/538144/campos_512_v4
+105/538151/campos_512_v4
+105/538152/campos_512_v4
+105/538160/campos_512_v4
+105/538161/campos_512_v4
+105/538173/campos_512_v4
+105/538179/campos_512_v4
+105/538181/campos_512_v4
+105/538187/campos_512_v4
+105/538188/campos_512_v4
+105/538197/campos_512_v4
+105/538208/campos_512_v4
+105/538222/campos_512_v4
+105/538235/campos_512_v4
+105/538267/campos_512_v4
+105/538270/campos_512_v4
+105/538278/campos_512_v4
+105/538279/campos_512_v4
+105/538287/campos_512_v4
+105/538294/campos_512_v4
+105/538298/campos_512_v4
+105/538307/campos_512_v4
+105/538312/campos_512_v4
+105/538320/campos_512_v4
+105/538323/campos_512_v4
+105/538330/campos_512_v4
+105/538338/campos_512_v4
+105/538365/campos_512_v4
+105/538415/campos_512_v4
+105/538417/campos_512_v4
+105/538422/campos_512_v4
+105/538442/campos_512_v4
+105/538444/campos_512_v4
+105/538454/campos_512_v4
+105/538457/campos_512_v4
+105/538473/campos_512_v4
+105/538477/campos_512_v4
+105/538481/campos_512_v4
+105/538486/campos_512_v4
+105/538498/campos_512_v4
+105/538502/campos_512_v4
+105/538529/campos_512_v4
+105/538539/campos_512_v4
+105/538540/campos_512_v4
+105/538567/campos_512_v4
+105/538568/campos_512_v4
+105/538570/campos_512_v4
+105/538582/campos_512_v4
+105/538585/campos_512_v4
+105/538593/campos_512_v4
+105/538595/campos_512_v4
+105/538603/campos_512_v4
+105/538613/campos_512_v4
+105/538628/campos_512_v4
+105/538634/campos_512_v4
+105/538642/campos_512_v4
+105/538644/campos_512_v4
+105/538648/campos_512_v4
+105/538651/campos_512_v4
+105/538652/campos_512_v4
+105/538663/campos_512_v4
+105/538675/campos_512_v4
+105/538702/campos_512_v4
+105/538708/campos_512_v4
+105/538712/campos_512_v4
+105/538719/campos_512_v4
+105/538730/campos_512_v4
+105/538734/campos_512_v4
+105/538738/campos_512_v4
+105/538745/campos_512_v4
+105/538759/campos_512_v4
+105/538764/campos_512_v4
+105/538772/campos_512_v4
+105/538787/campos_512_v4
+105/538792/campos_512_v4
+105/538795/campos_512_v4
+105/538810/campos_512_v4
+105/538831/campos_512_v4
+105/538845/campos_512_v4
+105/538873/campos_512_v4
+105/538881/campos_512_v4
+105/538890/campos_512_v4
+105/538930/campos_512_v4
+105/538933/campos_512_v4
+105/538936/campos_512_v4
+105/538939/campos_512_v4
+105/538961/campos_512_v4
+105/538968/campos_512_v4
+105/538973/campos_512_v4
+105/538986/campos_512_v4
+105/538992/campos_512_v4
+105/538995/campos_512_v4
+105/539014/campos_512_v4
+105/539045/campos_512_v4
+105/539072/campos_512_v4
+105/539074/campos_512_v4
+105/539077/campos_512_v4
+105/539079/campos_512_v4
+105/539090/campos_512_v4
+105/539101/campos_512_v4
+105/539120/campos_512_v4
+105/539130/campos_512_v4
+105/539138/campos_512_v4
+105/539169/campos_512_v4
+105/539171/campos_512_v4
+105/539174/campos_512_v4
+105/539178/campos_512_v4
+105/539184/campos_512_v4
+105/539188/campos_512_v4
+105/539200/campos_512_v4
+105/539204/campos_512_v4
+105/539206/campos_512_v4
+105/539211/campos_512_v4
+105/539212/campos_512_v4
+105/539248/campos_512_v4
+105/539252/campos_512_v4
+105/539253/campos_512_v4
+105/539255/campos_512_v4
+105/539257/campos_512_v4
+105/539258/campos_512_v4
+105/539265/campos_512_v4
+105/539283/campos_512_v4
+105/539300/campos_512_v4
+105/539301/campos_512_v4
+105/539303/campos_512_v4
+105/539316/campos_512_v4
+105/539327/campos_512_v4
+105/539333/campos_512_v4
+105/539336/campos_512_v4
+105/539339/campos_512_v4
+105/539343/campos_512_v4
+105/539344/campos_512_v4
+105/539369/campos_512_v4
+105/539374/campos_512_v4
+105/539376/campos_512_v4
+105/539385/campos_512_v4
+105/539393/campos_512_v4
+105/539403/campos_512_v4
+105/539414/campos_512_v4
+105/539420/campos_512_v4
+105/539440/campos_512_v4
+105/539451/campos_512_v4
+105/539460/campos_512_v4
+105/539464/campos_512_v4
+105/539474/campos_512_v4
+105/539482/campos_512_v4
+105/539492/campos_512_v4
+105/539502/campos_512_v4
+105/539511/campos_512_v4
+105/539512/campos_512_v4
+105/539517/campos_512_v4
+105/539519/campos_512_v4
+105/539528/campos_512_v4
+105/539530/campos_512_v4
+105/539536/campos_512_v4
+105/539537/campos_512_v4
+105/539540/campos_512_v4
+105/539555/campos_512_v4
+105/539558/campos_512_v4
+105/539560/campos_512_v4
+105/539564/campos_512_v4
+105/539568/campos_512_v4
+105/539569/campos_512_v4
+105/539581/campos_512_v4
+105/539585/campos_512_v4
+105/539594/campos_512_v4
+105/539600/campos_512_v4
+105/539601/campos_512_v4
+105/539606/campos_512_v4
+105/539616/campos_512_v4
+105/539618/campos_512_v4
+105/539623/campos_512_v4
+105/539627/campos_512_v4
+105/539628/campos_512_v4
+105/539631/campos_512_v4
+105/539640/campos_512_v4
+105/539646/campos_512_v4
+105/539652/campos_512_v4
+105/539662/campos_512_v4
+105/539665/campos_512_v4
+105/539669/campos_512_v4
+105/539673/campos_512_v4
+105/539685/campos_512_v4
+105/539687/campos_512_v4
+105/539703/campos_512_v4
+105/539736/campos_512_v4
+105/539739/campos_512_v4
+105/539741/campos_512_v4
+105/539743/campos_512_v4
+105/539757/campos_512_v4
+105/539766/campos_512_v4
+105/539768/campos_512_v4
+105/539769/campos_512_v4
+105/539779/campos_512_v4
+105/539787/campos_512_v4
+105/539804/campos_512_v4
+105/539806/campos_512_v4
+105/539815/campos_512_v4
+105/539827/campos_512_v4
+105/539837/campos_512_v4
+105/539851/campos_512_v4
+105/539853/campos_512_v4
+105/539858/campos_512_v4
+105/539860/campos_512_v4
+105/539862/campos_512_v4
+105/539864/campos_512_v4
+105/539871/campos_512_v4
+105/539877/campos_512_v4
+105/539881/campos_512_v4
+105/539882/campos_512_v4
+105/539891/campos_512_v4
+105/539899/campos_512_v4
+105/539907/campos_512_v4
+105/539911/campos_512_v4
+105/539918/campos_512_v4
+105/539935/campos_512_v4
+105/539941/campos_512_v4
+105/539952/campos_512_v4
+105/539961/campos_512_v4
+105/539965/campos_512_v4
+105/539978/campos_512_v4
+105/539981/campos_512_v4
+105/539982/campos_512_v4
+105/539989/campos_512_v4
+105/539993/campos_512_v4
+105/539995/campos_512_v4
+105/539996/campos_512_v4
+105/539997/campos_512_v4
+106/540014/campos_512_v4
+106/540017/campos_512_v4
+106/540035/campos_512_v4
+106/540065/campos_512_v4
+106/540072/campos_512_v4
+106/540096/campos_512_v4
+106/540100/campos_512_v4
+106/540110/campos_512_v4
+106/540132/campos_512_v4
+106/540134/campos_512_v4
+106/540141/campos_512_v4
+106/540143/campos_512_v4
+106/540146/campos_512_v4
+106/540153/campos_512_v4
+106/540156/campos_512_v4
+106/540159/campos_512_v4
+106/540164/campos_512_v4
+106/540169/campos_512_v4
+106/540173/campos_512_v4
+106/540188/campos_512_v4
+106/540194/campos_512_v4
+106/540218/campos_512_v4
+106/540219/campos_512_v4
+106/540227/campos_512_v4
+106/540232/campos_512_v4
+106/540233/campos_512_v4
+106/540248/campos_512_v4
+106/540264/campos_512_v4
+106/540275/campos_512_v4
+106/540279/campos_512_v4
+106/540282/campos_512_v4
+106/540297/campos_512_v4
+106/540301/campos_512_v4
+106/540314/campos_512_v4
+106/540327/campos_512_v4
+106/540332/campos_512_v4
+106/540345/campos_512_v4
+106/540347/campos_512_v4
+106/540348/campos_512_v4
+106/540356/campos_512_v4
+106/540357/campos_512_v4
+106/540420/campos_512_v4
+106/540442/campos_512_v4
+106/540444/campos_512_v4
+106/540453/campos_512_v4
+106/540455/campos_512_v4
+106/540458/campos_512_v4
+106/540461/campos_512_v4
+106/540464/campos_512_v4
+106/540480/campos_512_v4
+106/540486/campos_512_v4
+106/540501/campos_512_v4
+106/540523/campos_512_v4
+106/540557/campos_512_v4
+106/540566/campos_512_v4
+106/540573/campos_512_v4
+106/540575/campos_512_v4
+106/540577/campos_512_v4
+106/540590/campos_512_v4
+106/540600/campos_512_v4
+106/540608/campos_512_v4
+106/540612/campos_512_v4
+106/540613/campos_512_v4
+106/540620/campos_512_v4
+106/540624/campos_512_v4
+106/540646/campos_512_v4
+106/540652/campos_512_v4
+106/540658/campos_512_v4
+106/540659/campos_512_v4
+106/540668/campos_512_v4
+106/540678/campos_512_v4
+106/540680/campos_512_v4
+106/540681/campos_512_v4
+106/540696/campos_512_v4
+106/540701/campos_512_v4
+106/540709/campos_512_v4
+106/540723/campos_512_v4
+106/540728/campos_512_v4
+106/540729/campos_512_v4
+106/540733/campos_512_v4
+106/540734/campos_512_v4
+106/540739/campos_512_v4
+106/540743/campos_512_v4
+106/540757/campos_512_v4
+106/540768/campos_512_v4
+106/540773/campos_512_v4
+106/540791/campos_512_v4
+106/540795/campos_512_v4
+106/540806/campos_512_v4
+106/540810/campos_512_v4
+106/540815/campos_512_v4
+106/540819/campos_512_v4
+106/540823/campos_512_v4
+106/540844/campos_512_v4
+106/540858/campos_512_v4
+106/540859/campos_512_v4
+106/540871/campos_512_v4
+106/540872/campos_512_v4
+106/540881/campos_512_v4
+106/540901/campos_512_v4
+106/540905/campos_512_v4
+106/540919/campos_512_v4
+106/540936/campos_512_v4
+106/540952/campos_512_v4
+106/540957/campos_512_v4
+106/540960/campos_512_v4
+106/540980/campos_512_v4
+106/540991/campos_512_v4
+106/540994/campos_512_v4
+106/540998/campos_512_v4
+106/541005/campos_512_v4
+106/541022/campos_512_v4
+106/541028/campos_512_v4
+106/541034/campos_512_v4
+106/541036/campos_512_v4
+106/541053/campos_512_v4
+106/541062/campos_512_v4
+106/541065/campos_512_v4
+106/541075/campos_512_v4
+106/541088/campos_512_v4
+106/541090/campos_512_v4
+106/541094/campos_512_v4
+106/541097/campos_512_v4
+106/541103/campos_512_v4
+106/541119/campos_512_v4
+106/541126/campos_512_v4
+106/541129/campos_512_v4
+106/541130/campos_512_v4
+106/541131/campos_512_v4
+106/541138/campos_512_v4
+106/541159/campos_512_v4
+106/541173/campos_512_v4
+106/541175/campos_512_v4
+106/541180/campos_512_v4
+106/541184/campos_512_v4
+106/541188/campos_512_v4
+106/541223/campos_512_v4
+106/541232/campos_512_v4
+106/541274/campos_512_v4
+106/541280/campos_512_v4
+106/541283/campos_512_v4
+106/541293/campos_512_v4
+106/541296/campos_512_v4
+106/541299/campos_512_v4
+106/541300/campos_512_v4
+106/541305/campos_512_v4
+106/541315/campos_512_v4
+106/541326/campos_512_v4
+106/541338/campos_512_v4
+106/541342/campos_512_v4
+106/541347/campos_512_v4
+106/541355/campos_512_v4
+106/541358/campos_512_v4
+106/541370/campos_512_v4
+106/541374/campos_512_v4
+106/541383/campos_512_v4
+106/541389/campos_512_v4
+106/541409/campos_512_v4
+106/541428/campos_512_v4
+106/541433/campos_512_v4
+106/541438/campos_512_v4
+106/541445/campos_512_v4
+106/541447/campos_512_v4
+106/541466/campos_512_v4
+106/541469/campos_512_v4
+106/541475/campos_512_v4
+106/541511/campos_512_v4
+106/541513/campos_512_v4
+106/541517/campos_512_v4
+106/541520/campos_512_v4
+106/541531/campos_512_v4
+106/541535/campos_512_v4
+106/541556/campos_512_v4
+106/541561/campos_512_v4
+106/541562/campos_512_v4
+106/541564/campos_512_v4
+106/541572/campos_512_v4
+106/541594/campos_512_v4
+106/541599/campos_512_v4
+106/541603/campos_512_v4
+106/541607/campos_512_v4
+106/541614/campos_512_v4
+106/541624/campos_512_v4
+106/541626/campos_512_v4
+106/541630/campos_512_v4
+106/541642/campos_512_v4
+106/541658/campos_512_v4
+106/541671/campos_512_v4
+106/541674/campos_512_v4
+106/541676/campos_512_v4
+106/541690/campos_512_v4
+106/541692/campos_512_v4
+106/541695/campos_512_v4
+106/541700/campos_512_v4
+106/541705/campos_512_v4
+106/541713/campos_512_v4
+106/541724/campos_512_v4
+106/541725/campos_512_v4
+106/541730/campos_512_v4
+106/541736/campos_512_v4
+106/541741/campos_512_v4
+106/541747/campos_512_v4
+106/541777/campos_512_v4
+106/541798/campos_512_v4
+106/541826/campos_512_v4
+106/541828/campos_512_v4
+106/541831/campos_512_v4
+106/541850/campos_512_v4
+106/541855/campos_512_v4
+106/541858/campos_512_v4
+106/541867/campos_512_v4
+106/541874/campos_512_v4
+106/541883/campos_512_v4
+106/541896/campos_512_v4
+106/541911/campos_512_v4
+106/541918/campos_512_v4
+106/541925/campos_512_v4
+106/541927/campos_512_v4
+106/541938/campos_512_v4
+106/541947/campos_512_v4
+106/541980/campos_512_v4
+106/541983/campos_512_v4
+106/541985/campos_512_v4
+106/541988/campos_512_v4
+106/541991/campos_512_v4
+106/542001/campos_512_v4
+106/542003/campos_512_v4
+106/542019/campos_512_v4
+106/542025/campos_512_v4
+106/542035/campos_512_v4
+106/542039/campos_512_v4
+106/542051/campos_512_v4
+106/542053/campos_512_v4
+106/542055/campos_512_v4
+106/542056/campos_512_v4
+106/542058/campos_512_v4
+106/542073/campos_512_v4
+106/542082/campos_512_v4
+106/542093/campos_512_v4
+106/542095/campos_512_v4
+106/542101/campos_512_v4
+106/542116/campos_512_v4
+106/542128/campos_512_v4
+106/542130/campos_512_v4
+106/542134/campos_512_v4
+106/542137/campos_512_v4
+106/542144/campos_512_v4
+106/542147/campos_512_v4
+106/542151/campos_512_v4
+106/542158/campos_512_v4
+106/542160/campos_512_v4
+106/542173/campos_512_v4
+106/542177/campos_512_v4
+106/542194/campos_512_v4
+106/542213/campos_512_v4
+106/542236/campos_512_v4
+106/542242/campos_512_v4
+106/542249/campos_512_v4
+106/542252/campos_512_v4
+106/542253/campos_512_v4
+106/542263/campos_512_v4
+106/542265/campos_512_v4
+106/542267/campos_512_v4
+106/542270/campos_512_v4
+106/542277/campos_512_v4
+106/542286/campos_512_v4
+106/542287/campos_512_v4
+106/542290/campos_512_v4
+106/542291/campos_512_v4
+106/542292/campos_512_v4
+106/542298/campos_512_v4
+106/542320/campos_512_v4
+106/542323/campos_512_v4
+106/542334/campos_512_v4
+106/542342/campos_512_v4
+106/542347/campos_512_v4
+106/542359/campos_512_v4
+106/542363/campos_512_v4
+106/542367/campos_512_v4
+106/542368/campos_512_v4
+106/542372/campos_512_v4
+106/542376/campos_512_v4
+106/542383/campos_512_v4
+106/542384/campos_512_v4
+106/542385/campos_512_v4
+106/542395/campos_512_v4
+106/542398/campos_512_v4
+106/542407/campos_512_v4
+106/542409/campos_512_v4
+106/542414/campos_512_v4
+106/542421/campos_512_v4
+106/542429/campos_512_v4
+106/542442/campos_512_v4
+106/542445/campos_512_v4
+106/542446/campos_512_v4
+106/542447/campos_512_v4
+106/542455/campos_512_v4
+106/542459/campos_512_v4
+106/542485/campos_512_v4
+106/542504/campos_512_v4
+106/542516/campos_512_v4
+106/542521/campos_512_v4
+106/542525/campos_512_v4
+106/542559/campos_512_v4
+106/542560/campos_512_v4
+106/542562/campos_512_v4
+106/542576/campos_512_v4
+106/542594/campos_512_v4
+106/542596/campos_512_v4
+106/542598/campos_512_v4
+106/542605/campos_512_v4
+106/542609/campos_512_v4
+106/542610/campos_512_v4
+106/542625/campos_512_v4
+106/542629/campos_512_v4
+106/542638/campos_512_v4
+106/542646/campos_512_v4
+106/542647/campos_512_v4
+106/542650/campos_512_v4
+106/542651/campos_512_v4
+106/542670/campos_512_v4
+106/542675/campos_512_v4
+106/542681/campos_512_v4
+106/542687/campos_512_v4
+106/542708/campos_512_v4
+106/542711/campos_512_v4
+106/542714/campos_512_v4
+106/542718/campos_512_v4
+106/542720/campos_512_v4
+106/542723/campos_512_v4
+106/542729/campos_512_v4
+106/542737/campos_512_v4
+106/542754/campos_512_v4
+106/542760/campos_512_v4
+106/542766/campos_512_v4
+106/542777/campos_512_v4
+106/542786/campos_512_v4
+106/542787/campos_512_v4
+106/542796/campos_512_v4
+106/542809/campos_512_v4
+106/542829/campos_512_v4
+106/542840/campos_512_v4
+106/542842/campos_512_v4
+106/542858/campos_512_v4
+106/542860/campos_512_v4
+106/542863/campos_512_v4
+106/542866/campos_512_v4
+106/542874/campos_512_v4
+106/542879/campos_512_v4
+106/542885/campos_512_v4
+106/542892/campos_512_v4
+106/542896/campos_512_v4
+106/542897/campos_512_v4
+106/542904/campos_512_v4
+106/542931/campos_512_v4
+106/542942/campos_512_v4
+106/542949/campos_512_v4
+106/542970/campos_512_v4
+106/542993/campos_512_v4
+106/542998/campos_512_v4
+106/542999/campos_512_v4
+106/543000/campos_512_v4
+106/543003/campos_512_v4
+106/543005/campos_512_v4
+106/543007/campos_512_v4
+106/543008/campos_512_v4
+106/543010/campos_512_v4
+106/543011/campos_512_v4
+106/543030/campos_512_v4
+106/543032/campos_512_v4
+106/543037/campos_512_v4
+106/543048/campos_512_v4
+106/543049/campos_512_v4
+106/543055/campos_512_v4
+106/543060/campos_512_v4
+106/543064/campos_512_v4
+106/543076/campos_512_v4
+106/543100/campos_512_v4
+106/543105/campos_512_v4
+106/543110/campos_512_v4
+106/543117/campos_512_v4
+106/543121/campos_512_v4
+106/543126/campos_512_v4
+106/543154/campos_512_v4
+106/543157/campos_512_v4
+106/543166/campos_512_v4
+106/543173/campos_512_v4
+106/543197/campos_512_v4
+106/543208/campos_512_v4
+106/543212/campos_512_v4
+106/543214/campos_512_v4
+106/543215/campos_512_v4
+106/543229/campos_512_v4
+106/543245/campos_512_v4
+106/543253/campos_512_v4
+106/543259/campos_512_v4
+106/543266/campos_512_v4
+106/543269/campos_512_v4
+106/543285/campos_512_v4
+106/543288/campos_512_v4
+106/543290/campos_512_v4
+106/543291/campos_512_v4
+106/543304/campos_512_v4
+106/543311/campos_512_v4
+106/543313/campos_512_v4
+106/543330/campos_512_v4
+106/543332/campos_512_v4
+106/543334/campos_512_v4
+106/543335/campos_512_v4
+106/543337/campos_512_v4
+106/543338/campos_512_v4
+106/543341/campos_512_v4
+106/543349/campos_512_v4
+106/543350/campos_512_v4
+106/543351/campos_512_v4
+106/543359/campos_512_v4
+106/543361/campos_512_v4
+106/543372/campos_512_v4
+106/543385/campos_512_v4
+106/543392/campos_512_v4
+106/543393/campos_512_v4
+106/543400/campos_512_v4
+106/543405/campos_512_v4
+106/543423/campos_512_v4
+106/543424/campos_512_v4
+106/543429/campos_512_v4
+106/543430/campos_512_v4
+106/543433/campos_512_v4
+106/543441/campos_512_v4
+106/543442/campos_512_v4
+106/543443/campos_512_v4
+106/543444/campos_512_v4
+106/543446/campos_512_v4
+106/543455/campos_512_v4
+106/543462/campos_512_v4
+106/543463/campos_512_v4
+106/543466/campos_512_v4
+106/543467/campos_512_v4
+106/543471/campos_512_v4
+106/543474/campos_512_v4
+106/543477/campos_512_v4
+106/543489/campos_512_v4
+106/543499/campos_512_v4
+106/543500/campos_512_v4
+106/543502/campos_512_v4
+106/543520/campos_512_v4
+106/543532/campos_512_v4
+106/543550/campos_512_v4
+106/543562/campos_512_v4
+106/543573/campos_512_v4
+106/543576/campos_512_v4
+106/543580/campos_512_v4
+106/543613/campos_512_v4
+106/543634/campos_512_v4
+106/543638/campos_512_v4
+106/543642/campos_512_v4
+106/543649/campos_512_v4
+106/543665/campos_512_v4
+106/543671/campos_512_v4
+106/543689/campos_512_v4
+106/543713/campos_512_v4
+106/543717/campos_512_v4
+106/543722/campos_512_v4
+106/543725/campos_512_v4
+106/543729/campos_512_v4
+106/543739/campos_512_v4
+106/543741/campos_512_v4
+106/543747/campos_512_v4
+106/543752/campos_512_v4
+106/543759/campos_512_v4
+106/543764/campos_512_v4
+106/543767/campos_512_v4
+106/543773/campos_512_v4
+106/543777/campos_512_v4
+106/543778/campos_512_v4
+106/543779/campos_512_v4
+106/543783/campos_512_v4
+106/543791/campos_512_v4
+106/543799/campos_512_v4
+106/543810/campos_512_v4
+106/543811/campos_512_v4
+106/543818/campos_512_v4
+106/543825/campos_512_v4
+106/543832/campos_512_v4
+106/543841/campos_512_v4
+106/543852/campos_512_v4
+106/543854/campos_512_v4
+106/543859/campos_512_v4
+106/543877/campos_512_v4
+106/543878/campos_512_v4
+106/543890/campos_512_v4
+106/543906/campos_512_v4
+106/543910/campos_512_v4
+106/543911/campos_512_v4
+106/543913/campos_512_v4
+106/543918/campos_512_v4
+106/543926/campos_512_v4
+106/543929/campos_512_v4
+106/543936/campos_512_v4
+106/543939/campos_512_v4
+106/543980/campos_512_v4
+106/543982/campos_512_v4
+106/543988/campos_512_v4
+106/543994/campos_512_v4
+106/544008/campos_512_v4
+106/544030/campos_512_v4
+106/544031/campos_512_v4
+106/544039/campos_512_v4
+106/544049/campos_512_v4
+106/544050/campos_512_v4
+106/544053/campos_512_v4
+106/544062/campos_512_v4
+106/544066/campos_512_v4
+106/544068/campos_512_v4
+106/544069/campos_512_v4
+106/544080/campos_512_v4
+106/544088/campos_512_v4
+106/544091/campos_512_v4
+106/544099/campos_512_v4
+106/544105/campos_512_v4
+106/544107/campos_512_v4
+106/544122/campos_512_v4
+106/544124/campos_512_v4
+106/544129/campos_512_v4
+106/544140/campos_512_v4
+106/544144/campos_512_v4
+106/544148/campos_512_v4
+106/544152/campos_512_v4
+106/544177/campos_512_v4
+106/544180/campos_512_v4
+106/544182/campos_512_v4
+106/544186/campos_512_v4
+106/544196/campos_512_v4
+106/544205/campos_512_v4
+106/544224/campos_512_v4
+106/544227/campos_512_v4
+106/544229/campos_512_v4
+106/544231/campos_512_v4
+106/544234/campos_512_v4
+106/544239/campos_512_v4
+106/544242/campos_512_v4
+106/544245/campos_512_v4
+106/544258/campos_512_v4
+106/544275/campos_512_v4
+106/544278/campos_512_v4
+106/544285/campos_512_v4
+106/544298/campos_512_v4
+106/544302/campos_512_v4
+106/544305/campos_512_v4
+106/544315/campos_512_v4
+106/544327/campos_512_v4
+106/544336/campos_512_v4
+106/544354/campos_512_v4
+106/544356/campos_512_v4
+106/544367/campos_512_v4
+106/544382/campos_512_v4
+106/544389/campos_512_v4
+106/544391/campos_512_v4
+106/544392/campos_512_v4
+106/544405/campos_512_v4
+106/544408/campos_512_v4
+106/544417/campos_512_v4
+106/544418/campos_512_v4
+106/544422/campos_512_v4
+106/544450/campos_512_v4
+106/544458/campos_512_v4
+106/544461/campos_512_v4
+106/544477/campos_512_v4
+106/544480/campos_512_v4
+106/544481/campos_512_v4
+106/544482/campos_512_v4
+106/544486/campos_512_v4
+106/544489/campos_512_v4
+106/544501/campos_512_v4
+106/544507/campos_512_v4
+106/544509/campos_512_v4
+106/544515/campos_512_v4
+106/544517/campos_512_v4
+106/544522/campos_512_v4
+106/544525/campos_512_v4
+106/544533/campos_512_v4
+106/544536/campos_512_v4
+106/544544/campos_512_v4
+106/544545/campos_512_v4
+106/544552/campos_512_v4
+106/544557/campos_512_v4
+106/544565/campos_512_v4
+106/544569/campos_512_v4
+106/544573/campos_512_v4
+106/544576/campos_512_v4
+106/544597/campos_512_v4
+106/544602/campos_512_v4
+106/544603/campos_512_v4
+106/544611/campos_512_v4
+106/544617/campos_512_v4
+106/544647/campos_512_v4
+106/544650/campos_512_v4
+106/544663/campos_512_v4
+106/544675/campos_512_v4
+106/544684/campos_512_v4
+106/544688/campos_512_v4
+106/544690/campos_512_v4
+106/544697/campos_512_v4
+106/544698/campos_512_v4
+106/544700/campos_512_v4
+106/544711/campos_512_v4
+106/544712/campos_512_v4
+106/544716/campos_512_v4
+106/544723/campos_512_v4
+106/544730/campos_512_v4
+106/544733/campos_512_v4
+106/544736/campos_512_v4
+106/544744/campos_512_v4
+106/544752/campos_512_v4
+106/544762/campos_512_v4
+106/544776/campos_512_v4
+106/544784/campos_512_v4
+106/544785/campos_512_v4
+106/544787/campos_512_v4
+106/544789/campos_512_v4
+106/544792/campos_512_v4
+106/544806/campos_512_v4
+106/544808/campos_512_v4
+106/544816/campos_512_v4
+106/544822/campos_512_v4
+106/544831/campos_512_v4
+106/544832/campos_512_v4
+106/544844/campos_512_v4
+106/544847/campos_512_v4
+106/544858/campos_512_v4
+106/544860/campos_512_v4
+106/544861/campos_512_v4
+106/544863/campos_512_v4
+106/544874/campos_512_v4
+106/544880/campos_512_v4
+106/544895/campos_512_v4
+106/544897/campos_512_v4
+106/544899/campos_512_v4
+106/544900/campos_512_v4
+106/544902/campos_512_v4
+106/544908/campos_512_v4
+106/544917/campos_512_v4
+106/544921/campos_512_v4
+106/544943/campos_512_v4
+106/544944/campos_512_v4
+106/544951/campos_512_v4
+106/544952/campos_512_v4
+106/544956/campos_512_v4
+106/544970/campos_512_v4
+106/544993/campos_512_v4
+106/544994/campos_512_v4
+106/544998/campos_512_v4
+106/545001/campos_512_v4
+107/545020/campos_512_v4
+107/545031/campos_512_v4
+107/545037/campos_512_v4
+107/545043/campos_512_v4
+107/545055/campos_512_v4
+107/545059/campos_512_v4
+107/545062/campos_512_v4
+107/545068/campos_512_v4
+107/545086/campos_512_v4
+107/545096/campos_512_v4
+107/545104/campos_512_v4
+107/545109/campos_512_v4
+107/545110/campos_512_v4
+107/545119/campos_512_v4
+107/545124/campos_512_v4
+107/545129/campos_512_v4
+107/545133/campos_512_v4
+107/545135/campos_512_v4
+107/545137/campos_512_v4
+107/545146/campos_512_v4
+107/545148/campos_512_v4
+107/545161/campos_512_v4
+107/545165/campos_512_v4
+107/545170/campos_512_v4
+107/545179/campos_512_v4
+107/545180/campos_512_v4
+107/545186/campos_512_v4
+107/545199/campos_512_v4
+107/545205/campos_512_v4
+107/545210/campos_512_v4
+107/545214/campos_512_v4
+107/545220/campos_512_v4
+107/545221/campos_512_v4
+107/545224/campos_512_v4
+107/545237/campos_512_v4
+107/545242/campos_512_v4
+107/545247/campos_512_v4
+107/545253/campos_512_v4
+107/545260/campos_512_v4
+107/545264/campos_512_v4
+107/545265/campos_512_v4
+107/545266/campos_512_v4
+107/545273/campos_512_v4
+107/545274/campos_512_v4
+107/545284/campos_512_v4
+107/545288/campos_512_v4
+107/545297/campos_512_v4
+107/545299/campos_512_v4
+107/545327/campos_512_v4
+107/545340/campos_512_v4
+107/545342/campos_512_v4
+107/545347/campos_512_v4
+107/545355/campos_512_v4
+107/545361/campos_512_v4
+107/545376/campos_512_v4
+107/545377/campos_512_v4
+107/545378/campos_512_v4
+107/545381/campos_512_v4
+107/545405/campos_512_v4
+107/545410/campos_512_v4
+107/545411/campos_512_v4
+107/545413/campos_512_v4
+107/545418/campos_512_v4
+107/545422/campos_512_v4
+107/545432/campos_512_v4
+107/545444/campos_512_v4
+107/545453/campos_512_v4
+107/545467/campos_512_v4
+107/545474/campos_512_v4
+107/545478/campos_512_v4
+107/545487/campos_512_v4
+107/545497/campos_512_v4
+107/545503/campos_512_v4
+107/545516/campos_512_v4
+107/545524/campos_512_v4
+107/545526/campos_512_v4
+107/545530/campos_512_v4
+107/545537/campos_512_v4
+107/545540/campos_512_v4
+107/545564/campos_512_v4
+107/545573/campos_512_v4
+107/545593/campos_512_v4
+107/545594/campos_512_v4
+107/545595/campos_512_v4
+107/545596/campos_512_v4
+107/545611/campos_512_v4
+107/545615/campos_512_v4
+107/545620/campos_512_v4
+107/545643/campos_512_v4
+107/545649/campos_512_v4
+107/545652/campos_512_v4
+107/545653/campos_512_v4
+107/545654/campos_512_v4
+107/545666/campos_512_v4
+107/545675/campos_512_v4
+107/545676/campos_512_v4
+107/545701/campos_512_v4
+107/545704/campos_512_v4
+107/545706/campos_512_v4
+107/545717/campos_512_v4
+107/545723/campos_512_v4
+107/545724/campos_512_v4
+107/545732/campos_512_v4
+107/545735/campos_512_v4
+107/545737/campos_512_v4
+107/545741/campos_512_v4
+107/545742/campos_512_v4
+107/545746/campos_512_v4
+107/545750/campos_512_v4
+107/545752/campos_512_v4
+107/545756/campos_512_v4
+107/545761/campos_512_v4
+107/545763/campos_512_v4
+107/545767/campos_512_v4
+107/545771/campos_512_v4
+107/545776/campos_512_v4
+107/545787/campos_512_v4
+107/545789/campos_512_v4
+107/545798/campos_512_v4
+107/545804/campos_512_v4
+107/545825/campos_512_v4
+107/545832/campos_512_v4
+107/545841/campos_512_v4
+107/545843/campos_512_v4
+107/545854/campos_512_v4
+107/545857/campos_512_v4
+107/545859/campos_512_v4
+107/545863/campos_512_v4
+107/545870/campos_512_v4
+107/545878/campos_512_v4
+107/545883/campos_512_v4
+107/545906/campos_512_v4
+107/545912/campos_512_v4
+107/545916/campos_512_v4
+107/545935/campos_512_v4
+107/545940/campos_512_v4
+107/545946/campos_512_v4
+107/545948/campos_512_v4
+107/545952/campos_512_v4
+107/545954/campos_512_v4
+107/545957/campos_512_v4
+107/545966/campos_512_v4
+107/545981/campos_512_v4
+107/545990/campos_512_v4
+107/546003/campos_512_v4
+107/546005/campos_512_v4
+107/546006/campos_512_v4
+107/546027/campos_512_v4
+107/546054/campos_512_v4
+107/546061/campos_512_v4
+107/546082/campos_512_v4
+107/546089/campos_512_v4
+107/546091/campos_512_v4
+107/546092/campos_512_v4
+107/546095/campos_512_v4
+107/546105/campos_512_v4
+107/546106/campos_512_v4
+107/546107/campos_512_v4
+107/546119/campos_512_v4
+107/546133/campos_512_v4
+107/546160/campos_512_v4
+107/546162/campos_512_v4
+107/546179/campos_512_v4
+107/546181/campos_512_v4
+107/546182/campos_512_v4
+107/546205/campos_512_v4
+107/546209/campos_512_v4
+107/546213/campos_512_v4
+107/546238/campos_512_v4
+107/546240/campos_512_v4
+107/546264/campos_512_v4
+107/546290/campos_512_v4
+107/546295/campos_512_v4
+107/546304/campos_512_v4
+107/546308/campos_512_v4
+107/546312/campos_512_v4
+107/546320/campos_512_v4
+107/546336/campos_512_v4
+107/546350/campos_512_v4
+107/546356/campos_512_v4
+107/546359/campos_512_v4
+107/546368/campos_512_v4
+107/546379/campos_512_v4
+107/546385/campos_512_v4
+107/546387/campos_512_v4
+107/546406/campos_512_v4
+107/546408/campos_512_v4
+107/546425/campos_512_v4
+107/546429/campos_512_v4
+107/546432/campos_512_v4
+107/546437/campos_512_v4
+107/546439/campos_512_v4
+107/546440/campos_512_v4
+107/546442/campos_512_v4
+107/546448/campos_512_v4
+107/546449/campos_512_v4
+107/546463/campos_512_v4
+107/546467/campos_512_v4
+107/546468/campos_512_v4
+107/546470/campos_512_v4
+107/546475/campos_512_v4
+107/546477/campos_512_v4
+107/546480/campos_512_v4
+107/546490/campos_512_v4
+107/546494/campos_512_v4
+107/546495/campos_512_v4
+107/546518/campos_512_v4
+107/546522/campos_512_v4
+107/546538/campos_512_v4
+107/546559/campos_512_v4
+107/546575/campos_512_v4
+107/546576/campos_512_v4
+107/546586/campos_512_v4
+107/546606/campos_512_v4
+107/546624/campos_512_v4
+107/546630/campos_512_v4
+107/546635/campos_512_v4
+107/546637/campos_512_v4
+107/546652/campos_512_v4
+107/546653/campos_512_v4
+107/546665/campos_512_v4
+107/546672/campos_512_v4
+107/546675/campos_512_v4
+107/546678/campos_512_v4
+107/546709/campos_512_v4
+107/546712/campos_512_v4
+107/546714/campos_512_v4
+107/546719/campos_512_v4
+107/546720/campos_512_v4
+107/546727/campos_512_v4
+107/546728/campos_512_v4
+107/546735/campos_512_v4
+107/546740/campos_512_v4
+107/546752/campos_512_v4
+107/546758/campos_512_v4
+107/546761/campos_512_v4
+107/546762/campos_512_v4
+107/546765/campos_512_v4
+107/546768/campos_512_v4
+107/546785/campos_512_v4
+107/546786/campos_512_v4
+107/546802/campos_512_v4
+107/546807/campos_512_v4
+107/546808/campos_512_v4
+107/546811/campos_512_v4
+107/546815/campos_512_v4
+107/546820/campos_512_v4
+107/546826/campos_512_v4
+107/546834/campos_512_v4
+107/546840/campos_512_v4
+107/546846/campos_512_v4
+107/546851/campos_512_v4
+107/546853/campos_512_v4
+107/546854/campos_512_v4
+107/546857/campos_512_v4
+107/546860/campos_512_v4
+107/546881/campos_512_v4
+107/546882/campos_512_v4
+107/546883/campos_512_v4
+107/546887/campos_512_v4
+107/546932/campos_512_v4
+107/546937/campos_512_v4
+107/546938/campos_512_v4
+107/546942/campos_512_v4
+107/546947/campos_512_v4
+107/546968/campos_512_v4
+107/546980/campos_512_v4
+107/546990/campos_512_v4
+107/547011/campos_512_v4
+107/547020/campos_512_v4
+107/547040/campos_512_v4
+107/547062/campos_512_v4
+107/547072/campos_512_v4
+107/547079/campos_512_v4
+107/547088/campos_512_v4
+107/547092/campos_512_v4
+107/547094/campos_512_v4
+107/547101/campos_512_v4
+107/547103/campos_512_v4
+107/547108/campos_512_v4
+107/547112/campos_512_v4
+107/547113/campos_512_v4
+107/547117/campos_512_v4
+107/547124/campos_512_v4
+107/547136/campos_512_v4
+107/547137/campos_512_v4
+107/547148/campos_512_v4
+107/547163/campos_512_v4
+107/547166/campos_512_v4
+107/547169/campos_512_v4
+107/547170/campos_512_v4
+107/547180/campos_512_v4
+107/547191/campos_512_v4
+107/547197/campos_512_v4
+107/547216/campos_512_v4
+107/547218/campos_512_v4
+107/547233/campos_512_v4
+107/547235/campos_512_v4
+107/547236/campos_512_v4
+107/547255/campos_512_v4
+107/547256/campos_512_v4
+107/547273/campos_512_v4
+107/547276/campos_512_v4
+107/547292/campos_512_v4
+107/547314/campos_512_v4
+107/547315/campos_512_v4
+107/547316/campos_512_v4
+107/547322/campos_512_v4
+107/547326/campos_512_v4
+107/547328/campos_512_v4
+107/547334/campos_512_v4
+107/547336/campos_512_v4
+107/547338/campos_512_v4
+107/547342/campos_512_v4
+107/547346/campos_512_v4
+107/547350/campos_512_v4
+107/547372/campos_512_v4
+107/547374/campos_512_v4
+107/547384/campos_512_v4
+107/547389/campos_512_v4
+107/547398/campos_512_v4
+107/547413/campos_512_v4
+107/547417/campos_512_v4
+107/547427/campos_512_v4
+107/547430/campos_512_v4
+107/547440/campos_512_v4
+107/547448/campos_512_v4
+107/547454/campos_512_v4
+107/547526/campos_512_v4
+107/547530/campos_512_v4
+107/547571/campos_512_v4
+107/547576/campos_512_v4
+107/547583/campos_512_v4
+107/547585/campos_512_v4
+107/547609/campos_512_v4
+107/547611/campos_512_v4
+107/547616/campos_512_v4
+107/547625/campos_512_v4
+107/547626/campos_512_v4
+107/547627/campos_512_v4
+107/547635/campos_512_v4
+107/547638/campos_512_v4
+107/547645/campos_512_v4
+107/547648/campos_512_v4
+107/547659/campos_512_v4
+107/547665/campos_512_v4
+107/547667/campos_512_v4
+107/547686/campos_512_v4
+107/547688/campos_512_v4
+107/547700/campos_512_v4
+107/547701/campos_512_v4
+107/547706/campos_512_v4
+107/547713/campos_512_v4
+107/547720/campos_512_v4
+107/547734/campos_512_v4
+107/547735/campos_512_v4
+107/547749/campos_512_v4
+107/547750/campos_512_v4
+107/547751/campos_512_v4
+107/547757/campos_512_v4
+107/547762/campos_512_v4
+107/547763/campos_512_v4
+107/547780/campos_512_v4
+107/547785/campos_512_v4
+107/547807/campos_512_v4
+107/547827/campos_512_v4
+107/547837/campos_512_v4
+107/547849/campos_512_v4
+107/547851/campos_512_v4
+107/547857/campos_512_v4
+107/547859/campos_512_v4
+107/547862/campos_512_v4
+107/547893/campos_512_v4
+107/547898/campos_512_v4
+107/547903/campos_512_v4
+107/547909/campos_512_v4
+107/547920/campos_512_v4
+107/547924/campos_512_v4
+107/547951/campos_512_v4
+107/547959/campos_512_v4
+107/547960/campos_512_v4
+107/547969/campos_512_v4
+107/547972/campos_512_v4
+107/547988/campos_512_v4
+107/547993/campos_512_v4
+107/548003/campos_512_v4
+107/548007/campos_512_v4
+107/548010/campos_512_v4
+107/548039/campos_512_v4
+107/548058/campos_512_v4
+107/548065/campos_512_v4
+107/548076/campos_512_v4
+107/548081/campos_512_v4
+107/548088/campos_512_v4
+107/548094/campos_512_v4
+107/548103/campos_512_v4
+107/548122/campos_512_v4
+107/548124/campos_512_v4
+107/548130/campos_512_v4
+107/548136/campos_512_v4
+107/548137/campos_512_v4
+107/548156/campos_512_v4
+107/548172/campos_512_v4
+107/548179/campos_512_v4
+107/548201/campos_512_v4
+107/548210/campos_512_v4
+107/548217/campos_512_v4
+107/548225/campos_512_v4
+107/548227/campos_512_v4
+107/548233/campos_512_v4
+107/548240/campos_512_v4
+107/548243/campos_512_v4
+107/548248/campos_512_v4
+107/548265/campos_512_v4
+107/548269/campos_512_v4
+107/548279/campos_512_v4
+107/548294/campos_512_v4
+107/548324/campos_512_v4
+107/548325/campos_512_v4
+107/548363/campos_512_v4
+107/548366/campos_512_v4
+107/548374/campos_512_v4
+107/548379/campos_512_v4
+107/548381/campos_512_v4
+107/548382/campos_512_v4
+107/548386/campos_512_v4
+107/548387/campos_512_v4
+107/548409/campos_512_v4
+107/548410/campos_512_v4
+107/548411/campos_512_v4
+107/548415/campos_512_v4
+107/548416/campos_512_v4
+107/548418/campos_512_v4
+107/548427/campos_512_v4
+107/548433/campos_512_v4
+107/548436/campos_512_v4
+107/548443/campos_512_v4
+107/548451/campos_512_v4
+107/548465/campos_512_v4
+107/548469/campos_512_v4
+107/548473/campos_512_v4
+107/548475/campos_512_v4
+107/548479/campos_512_v4
+107/548485/campos_512_v4
+107/548488/campos_512_v4
+107/548504/campos_512_v4
+107/548506/campos_512_v4
+107/548520/campos_512_v4
+107/548532/campos_512_v4
+107/548533/campos_512_v4
+107/548545/campos_512_v4
+107/548550/campos_512_v4
+107/548560/campos_512_v4
+107/548561/campos_512_v4
+107/548568/campos_512_v4
+107/548570/campos_512_v4
+107/548571/campos_512_v4
+107/548573/campos_512_v4
+107/548581/campos_512_v4
+107/548591/campos_512_v4
+107/548602/campos_512_v4
+107/548603/campos_512_v4
+107/548606/campos_512_v4
+107/548608/campos_512_v4
+107/548610/campos_512_v4
+107/548612/campos_512_v4
+107/548621/campos_512_v4
+107/548623/campos_512_v4
+107/548626/campos_512_v4
+107/548645/campos_512_v4
+107/548660/campos_512_v4
+107/548673/campos_512_v4
+107/548675/campos_512_v4
+107/548677/campos_512_v4
+107/548685/campos_512_v4
+107/548687/campos_512_v4
+107/548721/campos_512_v4
+107/548723/campos_512_v4
+107/548731/campos_512_v4
+107/548747/campos_512_v4
+107/548753/campos_512_v4
+107/548779/campos_512_v4
+107/548789/campos_512_v4
+107/548797/campos_512_v4
+107/548806/campos_512_v4
+107/548808/campos_512_v4
+107/548812/campos_512_v4
+107/548815/campos_512_v4
+107/548820/campos_512_v4
+107/548830/campos_512_v4
+107/548841/campos_512_v4
+107/548844/campos_512_v4
+107/548855/campos_512_v4
+107/548877/campos_512_v4
+107/548892/campos_512_v4
+107/548894/campos_512_v4
+107/548904/campos_512_v4
+107/548905/campos_512_v4
+107/548906/campos_512_v4
+107/548907/campos_512_v4
+107/548909/campos_512_v4
+107/548914/campos_512_v4
+107/548925/campos_512_v4
+107/548930/campos_512_v4
+107/548940/campos_512_v4
+107/548943/campos_512_v4
+107/548945/campos_512_v4
+107/548960/campos_512_v4
+107/548966/campos_512_v4
+107/548967/campos_512_v4
+107/549001/campos_512_v4
+107/549002/campos_512_v4
+107/549014/campos_512_v4
+107/549026/campos_512_v4
+107/549031/campos_512_v4
+107/549036/campos_512_v4
+107/549042/campos_512_v4
+107/549043/campos_512_v4
+107/549047/campos_512_v4
+107/549051/campos_512_v4
+107/549052/campos_512_v4
+107/549054/campos_512_v4
+107/549055/campos_512_v4
+107/549066/campos_512_v4
+107/549071/campos_512_v4
+107/549081/campos_512_v4
+107/549084/campos_512_v4
+107/549085/campos_512_v4
+107/549086/campos_512_v4
+107/549090/campos_512_v4
+107/549094/campos_512_v4
+107/549099/campos_512_v4
+107/549102/campos_512_v4
+107/549105/campos_512_v4
+107/549108/campos_512_v4
+107/549113/campos_512_v4
+107/549125/campos_512_v4
+107/549157/campos_512_v4
+107/549167/campos_512_v4
+107/549179/campos_512_v4
+107/549183/campos_512_v4
+107/549188/campos_512_v4
+107/549195/campos_512_v4
+107/549202/campos_512_v4
+107/549205/campos_512_v4
+107/549208/campos_512_v4
+107/549216/campos_512_v4
+107/549251/campos_512_v4
+107/549261/campos_512_v4
+107/549268/campos_512_v4
+107/549297/campos_512_v4
+107/549328/campos_512_v4
+107/549342/campos_512_v4
+107/549346/campos_512_v4
+107/549356/campos_512_v4
+107/549362/campos_512_v4
+107/549380/campos_512_v4
+107/549386/campos_512_v4
+107/549394/campos_512_v4
+107/549405/campos_512_v4
+107/549408/campos_512_v4
+107/549419/campos_512_v4
+107/549421/campos_512_v4
+107/549423/campos_512_v4
+107/549437/campos_512_v4
+107/549438/campos_512_v4
+107/549444/campos_512_v4
+107/549445/campos_512_v4
+107/549457/campos_512_v4
+107/549462/campos_512_v4
+107/549482/campos_512_v4
+107/549497/campos_512_v4
+107/549512/campos_512_v4
+107/549517/campos_512_v4
+107/549530/campos_512_v4
+107/549533/campos_512_v4
+107/549544/campos_512_v4
+107/549545/campos_512_v4
+107/549546/campos_512_v4
+107/549553/campos_512_v4
+107/549556/campos_512_v4
+107/549569/campos_512_v4
+107/549571/campos_512_v4
+107/549584/campos_512_v4
+107/549591/campos_512_v4
+107/549593/campos_512_v4
+107/549597/campos_512_v4
+107/549608/campos_512_v4
+107/549615/campos_512_v4
+107/549617/campos_512_v4
+107/549626/campos_512_v4
+107/549650/campos_512_v4
+107/549663/campos_512_v4
+107/549669/campos_512_v4
+107/549678/campos_512_v4
+107/549680/campos_512_v4
+107/549682/campos_512_v4
+107/549685/campos_512_v4
+107/549689/campos_512_v4
+107/549696/campos_512_v4
+107/549710/campos_512_v4
+107/549712/campos_512_v4
+107/549729/campos_512_v4
+107/549733/campos_512_v4
+107/549745/campos_512_v4
+107/549748/campos_512_v4
+107/549754/campos_512_v4
+107/549760/campos_512_v4
+107/549763/campos_512_v4
+107/549771/campos_512_v4
+107/549784/campos_512_v4
+107/549785/campos_512_v4
+107/549797/campos_512_v4
+107/549800/campos_512_v4
+107/549803/campos_512_v4
+107/549805/campos_512_v4
+107/549812/campos_512_v4
+107/549815/campos_512_v4
+107/549819/campos_512_v4
+107/549831/campos_512_v4
+107/549851/campos_512_v4
+107/549855/campos_512_v4
+107/549861/campos_512_v4
+107/549864/campos_512_v4
+107/549866/campos_512_v4
+107/549878/campos_512_v4
+107/549882/campos_512_v4
+107/549888/campos_512_v4
+107/549895/campos_512_v4
+107/549902/campos_512_v4
+107/549905/campos_512_v4
+107/549908/campos_512_v4
+107/549909/campos_512_v4
+107/549913/campos_512_v4
+107/549915/campos_512_v4
+107/549919/campos_512_v4
+107/549920/campos_512_v4
+107/549921/campos_512_v4
+107/549924/campos_512_v4
+107/549926/campos_512_v4
+107/549952/campos_512_v4
+107/549962/campos_512_v4
+107/549965/campos_512_v4
+107/549976/campos_512_v4
+107/549980/campos_512_v4
+107/549983/campos_512_v4
+107/549984/campos_512_v4
+107/549987/campos_512_v4
+107/549988/campos_512_v4
+107/549990/campos_512_v4
+107/549992/campos_512_v4
+107/549996/campos_512_v4
+108/550005/campos_512_v4
+108/550006/campos_512_v4
+108/550025/campos_512_v4
+108/550038/campos_512_v4
+108/550040/campos_512_v4
+108/550043/campos_512_v4
+108/550046/campos_512_v4
+108/550052/campos_512_v4
+108/550053/campos_512_v4
+108/550076/campos_512_v4
+108/550078/campos_512_v4
+108/550101/campos_512_v4
+108/550130/campos_512_v4
+108/550135/campos_512_v4
+108/550141/campos_512_v4
+108/550143/campos_512_v4
+108/550146/campos_512_v4
+108/550151/campos_512_v4
+108/550157/campos_512_v4
+108/550158/campos_512_v4
+108/550166/campos_512_v4
+108/550170/campos_512_v4
+108/550173/campos_512_v4
+108/550179/campos_512_v4
+108/550181/campos_512_v4
+108/550207/campos_512_v4
+108/550209/campos_512_v4
+108/550214/campos_512_v4
+108/550218/campos_512_v4
+108/550229/campos_512_v4
+108/550232/campos_512_v4
+108/550233/campos_512_v4
+108/550255/campos_512_v4
+108/550260/campos_512_v4
+108/550275/campos_512_v4
+108/550281/campos_512_v4
+108/550298/campos_512_v4
+108/550299/campos_512_v4
+108/550300/campos_512_v4
+108/550306/campos_512_v4
+108/550331/campos_512_v4
+108/550335/campos_512_v4
+108/550340/campos_512_v4
+108/550355/campos_512_v4
+108/550356/campos_512_v4
+108/550366/campos_512_v4
+108/550369/campos_512_v4
+108/550372/campos_512_v4
+108/550382/campos_512_v4
+108/550390/campos_512_v4
+108/550394/campos_512_v4
+108/550397/campos_512_v4
+108/550408/campos_512_v4
+108/550449/campos_512_v4
+108/550461/campos_512_v4
+108/550467/campos_512_v4
+108/550468/campos_512_v4
+108/550472/campos_512_v4
+108/550479/campos_512_v4
+108/550494/campos_512_v4
+108/550503/campos_512_v4
+108/550512/campos_512_v4
+108/550531/campos_512_v4
+108/550535/campos_512_v4
+108/550546/campos_512_v4
+108/550558/campos_512_v4
+108/550559/campos_512_v4
+108/550576/campos_512_v4
+108/550578/campos_512_v4
+108/550580/campos_512_v4
+108/550588/campos_512_v4
+108/550589/campos_512_v4
+108/550590/campos_512_v4
+108/550594/campos_512_v4
+108/550600/campos_512_v4
+108/550616/campos_512_v4
+108/550629/campos_512_v4
+108/550634/campos_512_v4
+108/550643/campos_512_v4
+108/550644/campos_512_v4
+108/550645/campos_512_v4
+108/550655/campos_512_v4
+108/550660/campos_512_v4
+108/550677/campos_512_v4
+108/550688/campos_512_v4
+108/550714/campos_512_v4
+108/550715/campos_512_v4
+108/550720/campos_512_v4
+108/550722/campos_512_v4
+108/550723/campos_512_v4
+108/550744/campos_512_v4
+108/550774/campos_512_v4
+108/550783/campos_512_v4
+108/550785/campos_512_v4
+108/550789/campos_512_v4
+108/550792/campos_512_v4
+108/550808/campos_512_v4
+108/550814/campos_512_v4
+108/550829/campos_512_v4
+108/550870/campos_512_v4
+108/550883/campos_512_v4
+108/550898/campos_512_v4
+108/550916/campos_512_v4
+108/550939/campos_512_v4
+108/550942/campos_512_v4
+108/550953/campos_512_v4
+108/551006/campos_512_v4
+108/551045/campos_512_v4
+108/551058/campos_512_v4
+108/551059/campos_512_v4
+108/551062/campos_512_v4
+108/551069/campos_512_v4
+108/551070/campos_512_v4
+108/551089/campos_512_v4
+108/551093/campos_512_v4
+108/551096/campos_512_v4
+108/551110/campos_512_v4
+108/551119/campos_512_v4
+108/551121/campos_512_v4
+108/551124/campos_512_v4
+108/551129/campos_512_v4
+108/551138/campos_512_v4
+108/551141/campos_512_v4
+108/551154/campos_512_v4
+108/551186/campos_512_v4
+108/551193/campos_512_v4
+108/551202/campos_512_v4
+108/551203/campos_512_v4
+108/551204/campos_512_v4
+108/551212/campos_512_v4
+108/551226/campos_512_v4
+108/551237/campos_512_v4
+108/551245/campos_512_v4
+108/551251/campos_512_v4
+108/551261/campos_512_v4
+108/551265/campos_512_v4
+108/551273/campos_512_v4
+108/551275/campos_512_v4
+108/551277/campos_512_v4
+108/551298/campos_512_v4
+108/551299/campos_512_v4
+108/551300/campos_512_v4
+108/551303/campos_512_v4
+108/551304/campos_512_v4
+108/551322/campos_512_v4
+108/551330/campos_512_v4
+108/551341/campos_512_v4
+108/551344/campos_512_v4
+108/551345/campos_512_v4
+108/551348/campos_512_v4
+108/551350/campos_512_v4
+108/551352/campos_512_v4
+108/551369/campos_512_v4
+108/551370/campos_512_v4
+108/551376/campos_512_v4
+108/551379/campos_512_v4
+108/551392/campos_512_v4
+108/551395/campos_512_v4
+108/551402/campos_512_v4
+108/551406/campos_512_v4
+108/551412/campos_512_v4
+108/551425/campos_512_v4
+108/551426/campos_512_v4
+108/551430/campos_512_v4
+108/551435/campos_512_v4
+108/551438/campos_512_v4
+108/551439/campos_512_v4
+108/551444/campos_512_v4
+108/551446/campos_512_v4
+108/551456/campos_512_v4
+108/551528/campos_512_v4
+108/551541/campos_512_v4
+108/551544/campos_512_v4
+108/551553/campos_512_v4
+108/551565/campos_512_v4
+108/551570/campos_512_v4
+108/551577/campos_512_v4
+108/551588/campos_512_v4
+108/551590/campos_512_v4
+108/551592/campos_512_v4
+108/551604/campos_512_v4
+108/551610/campos_512_v4
+108/551616/campos_512_v4
+108/551622/campos_512_v4
+108/551628/campos_512_v4
+108/551629/campos_512_v4
+108/551632/campos_512_v4
+108/551633/campos_512_v4
+108/551645/campos_512_v4
+108/551646/campos_512_v4
+108/551650/campos_512_v4
+108/551656/campos_512_v4
+108/551661/campos_512_v4
+108/551664/campos_512_v4
+108/551668/campos_512_v4
+108/551672/campos_512_v4
+108/551674/campos_512_v4
+108/551693/campos_512_v4
+108/551694/campos_512_v4
+108/551696/campos_512_v4
+108/551698/campos_512_v4
+108/551703/campos_512_v4
+108/551704/campos_512_v4
+108/551706/campos_512_v4
+108/551709/campos_512_v4
+108/551717/campos_512_v4
+108/551719/campos_512_v4
+108/551724/campos_512_v4
+108/551729/campos_512_v4
+108/551739/campos_512_v4
+108/551779/campos_512_v4
+108/551781/campos_512_v4
+108/551786/campos_512_v4
+108/551787/campos_512_v4
+108/551792/campos_512_v4
+108/551819/campos_512_v4
+108/551823/campos_512_v4
+108/551829/campos_512_v4
+108/551834/campos_512_v4
+108/551835/campos_512_v4
+108/551843/campos_512_v4
+108/551854/campos_512_v4
+108/551856/campos_512_v4
+108/551857/campos_512_v4
+108/551865/campos_512_v4
+108/551868/campos_512_v4
+108/551875/campos_512_v4
+108/551884/campos_512_v4
+108/551892/campos_512_v4
+108/551897/campos_512_v4
+108/551901/campos_512_v4
+108/551904/campos_512_v4
+108/551906/campos_512_v4
+108/551908/campos_512_v4
+108/551912/campos_512_v4
+108/551916/campos_512_v4
+108/551920/campos_512_v4
+108/551924/campos_512_v4
+108/551942/campos_512_v4
+108/551943/campos_512_v4
+108/551945/campos_512_v4
+108/551953/campos_512_v4
+108/551956/campos_512_v4
+108/551969/campos_512_v4
+108/551984/campos_512_v4
+108/551991/campos_512_v4
+108/551993/campos_512_v4
+108/552000/campos_512_v4
+108/552018/campos_512_v4
+108/552036/campos_512_v4
+108/552037/campos_512_v4
+108/552041/campos_512_v4
+108/552049/campos_512_v4
+108/552050/campos_512_v4
+108/552054/campos_512_v4
+108/552076/campos_512_v4
+108/552086/campos_512_v4
+108/552096/campos_512_v4
+108/552099/campos_512_v4
+108/552101/campos_512_v4
+108/552112/campos_512_v4
+108/552115/campos_512_v4
+108/552122/campos_512_v4
+108/552135/campos_512_v4
+108/552138/campos_512_v4
+108/552154/campos_512_v4
+108/552161/campos_512_v4
+108/552167/campos_512_v4
+108/552173/campos_512_v4
+108/552187/campos_512_v4
+108/552191/campos_512_v4
+108/552197/campos_512_v4
+108/552231/campos_512_v4
+108/552238/campos_512_v4
+108/552242/campos_512_v4
+108/552247/campos_512_v4
+108/552249/campos_512_v4
+108/552258/campos_512_v4
+108/552271/campos_512_v4
+108/552272/campos_512_v4
+108/552278/campos_512_v4
+108/552280/campos_512_v4
+108/552324/campos_512_v4
+108/552328/campos_512_v4
+108/552338/campos_512_v4
+108/552344/campos_512_v4
+108/552348/campos_512_v4
+108/552352/campos_512_v4
+108/552358/campos_512_v4
+108/552359/campos_512_v4
+108/552363/campos_512_v4
+108/552364/campos_512_v4
+108/552370/campos_512_v4
+108/552372/campos_512_v4
+108/552382/campos_512_v4
+108/552391/campos_512_v4
+108/552394/campos_512_v4
+108/552395/campos_512_v4
+108/552398/campos_512_v4
+108/552400/campos_512_v4
+108/552404/campos_512_v4
+108/552405/campos_512_v4
+108/552411/campos_512_v4
+108/552433/campos_512_v4
+108/552439/campos_512_v4
+108/552440/campos_512_v4
+108/552442/campos_512_v4
+108/552460/campos_512_v4
+108/552466/campos_512_v4
+108/552468/campos_512_v4
+108/552472/campos_512_v4
+108/552486/campos_512_v4
+108/552489/campos_512_v4
+108/552498/campos_512_v4
+108/552505/campos_512_v4
+108/552533/campos_512_v4
+108/552541/campos_512_v4
+108/552542/campos_512_v4
+108/552545/campos_512_v4
+108/552559/campos_512_v4
+108/552560/campos_512_v4
+108/552562/campos_512_v4
+108/552564/campos_512_v4
+108/552600/campos_512_v4
+108/552605/campos_512_v4
+108/552624/campos_512_v4
+108/552625/campos_512_v4
+108/552641/campos_512_v4
+108/552644/campos_512_v4
+108/552653/campos_512_v4
+108/552654/campos_512_v4
+108/552655/campos_512_v4
+108/552667/campos_512_v4
+108/552676/campos_512_v4
+108/552695/campos_512_v4
+108/552701/campos_512_v4
+108/552707/campos_512_v4
+108/552712/campos_512_v4
+108/552717/campos_512_v4
+108/552729/campos_512_v4
+108/552733/campos_512_v4
+108/552751/campos_512_v4
+108/552762/campos_512_v4
+108/552772/campos_512_v4
+108/552796/campos_512_v4
+108/552818/campos_512_v4
+108/552827/campos_512_v4
+108/552843/campos_512_v4
+108/552845/campos_512_v4
+108/552848/campos_512_v4
+108/552852/campos_512_v4
+108/552855/campos_512_v4
+108/552857/campos_512_v4
+108/552866/campos_512_v4
+108/552870/campos_512_v4
+108/552890/campos_512_v4
+108/552914/campos_512_v4
+108/552924/campos_512_v4
+108/552934/campos_512_v4
+108/552954/campos_512_v4
+108/552959/campos_512_v4
+108/552965/campos_512_v4
+108/552970/campos_512_v4
+108/552984/campos_512_v4
+108/552989/campos_512_v4
+108/552997/campos_512_v4
+108/553002/campos_512_v4
+108/553017/campos_512_v4
+108/553022/campos_512_v4
+108/553027/campos_512_v4
+108/553039/campos_512_v4
+108/553045/campos_512_v4
+108/553061/campos_512_v4
+108/553082/campos_512_v4
+108/553097/campos_512_v4
+108/553098/campos_512_v4
+108/553105/campos_512_v4
+108/553114/campos_512_v4
+108/553116/campos_512_v4
+108/553121/campos_512_v4
+108/553130/campos_512_v4
+108/553134/campos_512_v4
+108/553138/campos_512_v4
+108/553140/campos_512_v4
+108/553143/campos_512_v4
+108/553144/campos_512_v4
+108/553150/campos_512_v4
+108/553170/campos_512_v4
+108/553181/campos_512_v4
+108/553182/campos_512_v4
+108/553201/campos_512_v4
+108/553202/campos_512_v4
+108/553203/campos_512_v4
+108/553210/campos_512_v4
+108/553211/campos_512_v4
+108/553213/campos_512_v4
+108/553217/campos_512_v4
+108/553219/campos_512_v4
+108/553228/campos_512_v4
+108/553233/campos_512_v4
+108/553259/campos_512_v4
+108/553262/campos_512_v4
+108/553273/campos_512_v4
+108/553300/campos_512_v4
+108/553304/campos_512_v4
+108/553317/campos_512_v4
+108/553320/campos_512_v4
+108/553358/campos_512_v4
+108/553367/campos_512_v4
+108/553379/campos_512_v4
+108/553383/campos_512_v4
+108/553407/campos_512_v4
+108/553408/campos_512_v4
+108/553412/campos_512_v4
+108/553413/campos_512_v4
+108/553415/campos_512_v4
+108/553424/campos_512_v4
+108/553438/campos_512_v4
+108/553445/campos_512_v4
+108/553446/campos_512_v4
+108/553453/campos_512_v4
+108/553456/campos_512_v4
+108/553457/campos_512_v4
+108/553463/campos_512_v4
+108/553468/campos_512_v4
+108/553475/campos_512_v4
+108/553491/campos_512_v4
+108/553495/campos_512_v4
+108/553505/campos_512_v4
+108/553507/campos_512_v4
+108/553509/campos_512_v4
+108/553515/campos_512_v4
+108/553521/campos_512_v4
+108/553532/campos_512_v4
+108/553537/campos_512_v4
+108/553556/campos_512_v4
+108/553606/campos_512_v4
+108/553621/campos_512_v4
+108/553625/campos_512_v4
+108/553636/campos_512_v4
+108/553639/campos_512_v4
+108/553672/campos_512_v4
+108/553677/campos_512_v4
+108/553682/campos_512_v4
+108/553686/campos_512_v4
+108/553687/campos_512_v4
+108/553696/campos_512_v4
+108/553699/campos_512_v4
+108/553703/campos_512_v4
+108/553710/campos_512_v4
+108/553714/campos_512_v4
+108/553715/campos_512_v4
+108/553717/campos_512_v4
+108/553729/campos_512_v4
+108/553734/campos_512_v4
+108/553737/campos_512_v4
+108/553740/campos_512_v4
+108/553751/campos_512_v4
+108/553752/campos_512_v4
+108/553756/campos_512_v4
+108/553763/campos_512_v4
+108/553764/campos_512_v4
+108/553772/campos_512_v4
+108/553793/campos_512_v4
+108/553795/campos_512_v4
+108/553799/campos_512_v4
+108/553804/campos_512_v4
+108/553810/campos_512_v4
+108/553812/campos_512_v4
+108/553814/campos_512_v4
+108/553822/campos_512_v4
+108/553826/campos_512_v4
+108/553840/campos_512_v4
+108/553846/campos_512_v4
+108/553847/campos_512_v4
+108/553849/campos_512_v4
+108/553852/campos_512_v4
+108/553865/campos_512_v4
+108/553874/campos_512_v4
+108/553889/campos_512_v4
+108/553897/campos_512_v4
+108/553904/campos_512_v4
+108/553910/campos_512_v4
+108/553912/campos_512_v4
+108/553914/campos_512_v4
+108/553938/campos_512_v4
+108/553940/campos_512_v4
+108/553942/campos_512_v4
+108/553966/campos_512_v4
+108/553972/campos_512_v4
+108/553980/campos_512_v4
+108/553981/campos_512_v4
+108/553985/campos_512_v4
+108/553994/campos_512_v4
+108/553995/campos_512_v4
+108/553998/campos_512_v4
+108/554011/campos_512_v4
+108/554012/campos_512_v4
+108/554021/campos_512_v4
+108/554024/campos_512_v4
+108/554039/campos_512_v4
+108/554043/campos_512_v4
+108/554046/campos_512_v4
+108/554058/campos_512_v4
+108/554065/campos_512_v4
+108/554071/campos_512_v4
+108/554079/campos_512_v4
+108/554088/campos_512_v4
+108/554091/campos_512_v4
+108/554094/campos_512_v4
+108/554096/campos_512_v4
+108/554099/campos_512_v4
+108/554100/campos_512_v4
+108/554101/campos_512_v4
+108/554109/campos_512_v4
+108/554114/campos_512_v4
+108/554127/campos_512_v4
+108/554132/campos_512_v4
+108/554140/campos_512_v4
+108/554143/campos_512_v4
+108/554183/campos_512_v4
+108/554188/campos_512_v4
+108/554198/campos_512_v4
+108/554213/campos_512_v4
+108/554226/campos_512_v4
+108/554231/campos_512_v4
+108/554236/campos_512_v4
+108/554241/campos_512_v4
+108/554243/campos_512_v4
+108/554244/campos_512_v4
+108/554249/campos_512_v4
+108/554253/campos_512_v4
+108/554257/campos_512_v4
+108/554276/campos_512_v4
+108/554297/campos_512_v4
+108/554298/campos_512_v4
+108/554300/campos_512_v4
+108/554338/campos_512_v4
+108/554342/campos_512_v4
+108/554344/campos_512_v4
+108/554353/campos_512_v4
+108/554356/campos_512_v4
+108/554361/campos_512_v4
+108/554371/campos_512_v4
+108/554387/campos_512_v4
+108/554400/campos_512_v4
+108/554413/campos_512_v4
+108/554420/campos_512_v4
+108/554423/campos_512_v4
+108/554467/campos_512_v4
+108/554486/campos_512_v4
+108/554496/campos_512_v4
+108/554510/campos_512_v4
+108/554518/campos_512_v4
+108/554526/campos_512_v4
+108/554545/campos_512_v4
+108/554556/campos_512_v4
+108/554557/campos_512_v4
+108/554558/campos_512_v4
+108/554578/campos_512_v4
+108/554589/campos_512_v4
+108/554590/campos_512_v4
+108/554594/campos_512_v4
+108/554601/campos_512_v4
+108/554610/campos_512_v4
+108/554621/campos_512_v4
+108/554627/campos_512_v4
+108/554642/campos_512_v4
+108/554654/campos_512_v4
+108/554655/campos_512_v4
+108/554656/campos_512_v4
+108/554662/campos_512_v4
+108/554670/campos_512_v4
+108/554671/campos_512_v4
+108/554684/campos_512_v4
+108/554686/campos_512_v4
+108/554687/campos_512_v4
+108/554689/campos_512_v4
+108/554693/campos_512_v4
+108/554712/campos_512_v4
+108/554724/campos_512_v4
+108/554725/campos_512_v4
+108/554726/campos_512_v4
+108/554732/campos_512_v4
+108/554739/campos_512_v4
+108/554745/campos_512_v4
+108/554746/campos_512_v4
+108/554747/campos_512_v4
+108/554767/campos_512_v4
+108/554773/campos_512_v4
+108/554796/campos_512_v4
+108/554806/campos_512_v4
+108/554813/campos_512_v4
+108/554826/campos_512_v4
+108/554844/campos_512_v4
+108/554845/campos_512_v4
+108/554853/campos_512_v4
+108/554864/campos_512_v4
+108/554865/campos_512_v4
+108/554874/campos_512_v4
+108/554876/campos_512_v4
+108/554889/campos_512_v4
+108/554899/campos_512_v4
+108/554901/campos_512_v4
+108/554908/campos_512_v4
+108/554912/campos_512_v4
+108/554930/campos_512_v4
+108/554947/campos_512_v4
+108/554952/campos_512_v4
+108/554953/campos_512_v4
+108/554955/campos_512_v4
+108/554963/campos_512_v4
+108/554972/campos_512_v4
+108/554982/campos_512_v4
+108/554988/campos_512_v4
+108/554992/campos_512_v4
+108/554994/campos_512_v4
+109/555009/campos_512_v4
+109/555025/campos_512_v4
+109/555037/campos_512_v4
+109/555044/campos_512_v4
+109/555046/campos_512_v4
+109/555053/campos_512_v4
+109/555054/campos_512_v4
+109/555057/campos_512_v4
+109/555058/campos_512_v4
+109/555061/campos_512_v4
+109/555071/campos_512_v4
+109/555072/campos_512_v4
+109/555074/campos_512_v4
+109/555083/campos_512_v4
+109/555085/campos_512_v4
+109/555114/campos_512_v4
+109/555132/campos_512_v4
+109/555136/campos_512_v4
+109/555153/campos_512_v4
+109/555155/campos_512_v4
+109/555157/campos_512_v4
+109/555160/campos_512_v4
+109/555163/campos_512_v4
+109/555179/campos_512_v4
+109/555182/campos_512_v4
+109/555186/campos_512_v4
+109/555188/campos_512_v4
+109/555203/campos_512_v4
+109/555206/campos_512_v4
+109/555208/campos_512_v4
+109/555210/campos_512_v4
+109/555211/campos_512_v4
+109/555212/campos_512_v4
+109/555214/campos_512_v4
+109/555215/campos_512_v4
+109/555223/campos_512_v4
+109/555227/campos_512_v4
+109/555251/campos_512_v4
+109/555257/campos_512_v4
+109/555269/campos_512_v4
+109/555288/campos_512_v4
+109/555293/campos_512_v4
+109/555314/campos_512_v4
+109/555330/campos_512_v4
+109/555333/campos_512_v4
+109/555341/campos_512_v4
+109/555346/campos_512_v4
+109/555353/campos_512_v4
+109/555358/campos_512_v4
+109/555370/campos_512_v4
+109/555374/campos_512_v4
+109/555377/campos_512_v4
+109/555394/campos_512_v4
+109/555401/campos_512_v4
+109/555417/campos_512_v4
+109/555418/campos_512_v4
+109/555438/campos_512_v4
+109/555459/campos_512_v4
+109/555461/campos_512_v4
+109/555462/campos_512_v4
+109/555465/campos_512_v4
+109/555467/campos_512_v4
+109/555479/campos_512_v4
+109/555489/campos_512_v4
+109/555490/campos_512_v4
+109/555495/campos_512_v4
+109/555506/campos_512_v4
+109/555509/campos_512_v4
+109/555510/campos_512_v4
+109/555513/campos_512_v4
+109/555514/campos_512_v4
+109/555528/campos_512_v4
+109/555535/campos_512_v4
+109/555550/campos_512_v4
+109/555567/campos_512_v4
+109/555569/campos_512_v4
+109/555577/campos_512_v4
+109/555583/campos_512_v4
+109/555585/campos_512_v4
+109/555587/campos_512_v4
+109/555589/campos_512_v4
+109/555608/campos_512_v4
+109/555617/campos_512_v4
+109/555620/campos_512_v4
+109/555630/campos_512_v4
+109/555649/campos_512_v4
+109/555651/campos_512_v4
+109/555652/campos_512_v4
+109/555653/campos_512_v4
+109/555656/campos_512_v4
+109/555657/campos_512_v4
+109/555658/campos_512_v4
+109/555663/campos_512_v4
+109/555668/campos_512_v4
+109/555669/campos_512_v4
+109/555670/campos_512_v4
+109/555671/campos_512_v4
+109/555675/campos_512_v4
+109/555676/campos_512_v4
+109/555678/campos_512_v4
+109/555682/campos_512_v4
+109/555687/campos_512_v4
+109/555690/campos_512_v4
+109/555693/campos_512_v4
+109/555695/campos_512_v4
+109/555707/campos_512_v4
+109/555719/campos_512_v4
+109/555728/campos_512_v4
+109/555733/campos_512_v4
+109/555735/campos_512_v4
+109/555737/campos_512_v4
+109/555738/campos_512_v4
+109/555748/campos_512_v4
+109/555749/campos_512_v4
+109/555751/campos_512_v4
+109/555760/campos_512_v4
+109/555764/campos_512_v4
+109/555768/campos_512_v4
+109/555769/campos_512_v4
+109/555772/campos_512_v4
+109/555774/campos_512_v4
+109/555783/campos_512_v4
+109/555789/campos_512_v4
+109/555792/campos_512_v4
+109/555793/campos_512_v4
+109/555796/campos_512_v4
+109/555805/campos_512_v4
+109/555813/campos_512_v4
+109/555824/campos_512_v4
+109/555834/campos_512_v4
+109/555846/campos_512_v4
+109/555850/campos_512_v4
+109/555861/campos_512_v4
+109/555862/campos_512_v4
+109/555863/campos_512_v4
+109/555866/campos_512_v4
+109/555870/campos_512_v4
+109/555877/campos_512_v4
+109/555891/campos_512_v4
+109/555894/campos_512_v4
+109/555897/campos_512_v4
+109/555899/campos_512_v4
+109/555921/campos_512_v4
+109/555952/campos_512_v4
+109/555960/campos_512_v4
+109/555961/campos_512_v4
+109/555962/campos_512_v4
+109/555963/campos_512_v4
+109/555964/campos_512_v4
+109/555975/campos_512_v4
+109/555982/campos_512_v4
+109/556005/campos_512_v4
+109/556021/campos_512_v4
+109/556023/campos_512_v4
+109/556033/campos_512_v4
+109/556035/campos_512_v4
+109/556062/campos_512_v4
+109/556067/campos_512_v4
+109/556069/campos_512_v4
+109/556079/campos_512_v4
+109/556080/campos_512_v4
+109/556082/campos_512_v4
+109/556093/campos_512_v4
+109/556100/campos_512_v4
+109/556102/campos_512_v4
+109/556109/campos_512_v4
+109/556119/campos_512_v4
+109/556126/campos_512_v4
+109/556127/campos_512_v4
+109/556130/campos_512_v4
+109/556145/campos_512_v4
+109/556150/campos_512_v4
+109/556162/campos_512_v4
+109/556175/campos_512_v4
+109/556177/campos_512_v4
+109/556184/campos_512_v4
+109/556206/campos_512_v4
+109/556207/campos_512_v4
+109/556208/campos_512_v4
+109/556213/campos_512_v4
+109/556221/campos_512_v4
+109/556232/campos_512_v4
+109/556237/campos_512_v4
+109/556240/campos_512_v4
+109/556241/campos_512_v4
+109/556242/campos_512_v4
+109/556243/campos_512_v4
+109/556255/campos_512_v4
+109/556275/campos_512_v4
+109/556282/campos_512_v4
+109/556287/campos_512_v4
+109/556289/campos_512_v4
+109/556318/campos_512_v4
+109/556323/campos_512_v4
+109/556328/campos_512_v4
+109/556330/campos_512_v4
+109/556337/campos_512_v4
+109/556346/campos_512_v4
+109/556352/campos_512_v4
+109/556353/campos_512_v4
+109/556365/campos_512_v4
+109/556375/campos_512_v4
+109/556387/campos_512_v4
+109/556391/campos_512_v4
+109/556395/campos_512_v4
+109/556405/campos_512_v4
+109/556417/campos_512_v4
+109/556431/campos_512_v4
+109/556434/campos_512_v4
+109/556436/campos_512_v4
+109/556439/campos_512_v4
+109/556450/campos_512_v4
+109/556498/campos_512_v4
+109/556502/campos_512_v4
+109/556511/campos_512_v4
+109/556517/campos_512_v4
+109/556530/campos_512_v4
+109/556577/campos_512_v4
+109/556588/campos_512_v4
+109/556589/campos_512_v4
+109/556596/campos_512_v4
+109/556599/campos_512_v4
+109/556601/campos_512_v4
+109/556605/campos_512_v4
+109/556623/campos_512_v4
+109/556634/campos_512_v4
+109/556644/campos_512_v4
+109/556654/campos_512_v4
+109/556660/campos_512_v4
+109/556664/campos_512_v4
+109/556672/campos_512_v4
+109/556674/campos_512_v4
+109/556678/campos_512_v4
+109/556679/campos_512_v4
+109/556683/campos_512_v4
+109/556684/campos_512_v4
+109/556686/campos_512_v4
+109/556691/campos_512_v4
+109/556699/campos_512_v4
+109/556703/campos_512_v4
+109/556706/campos_512_v4
+109/556710/campos_512_v4
+109/556714/campos_512_v4
+109/556717/campos_512_v4
+109/556719/campos_512_v4
+109/556729/campos_512_v4
+109/556730/campos_512_v4
+109/556738/campos_512_v4
+109/556743/campos_512_v4
+109/556747/campos_512_v4
+109/556750/campos_512_v4
+109/556751/campos_512_v4
+109/556753/campos_512_v4
+109/556754/campos_512_v4
+109/556761/campos_512_v4
+109/556769/campos_512_v4
+109/556772/campos_512_v4
+109/556792/campos_512_v4
+109/556793/campos_512_v4
+109/556809/campos_512_v4
+109/556814/campos_512_v4
+109/556822/campos_512_v4
+109/556828/campos_512_v4
+109/556830/campos_512_v4
+109/556831/campos_512_v4
+109/556836/campos_512_v4
+109/556837/campos_512_v4
+109/556856/campos_512_v4
+109/556858/campos_512_v4
+109/556873/campos_512_v4
+109/556884/campos_512_v4
+109/556899/campos_512_v4
+109/556901/campos_512_v4
+109/556909/campos_512_v4
+109/556910/campos_512_v4
+109/556926/campos_512_v4
+109/556928/campos_512_v4
+109/556940/campos_512_v4
+109/556948/campos_512_v4
+109/556951/campos_512_v4
+109/556956/campos_512_v4
+109/556966/campos_512_v4
+109/556971/campos_512_v4
+109/556977/campos_512_v4
+109/556991/campos_512_v4
+109/557015/campos_512_v4
+109/557019/campos_512_v4
+109/557028/campos_512_v4
+109/557052/campos_512_v4
+109/557075/campos_512_v4
+109/557094/campos_512_v4
+109/557095/campos_512_v4
+109/557098/campos_512_v4
+109/557112/campos_512_v4
+109/557121/campos_512_v4
+109/557127/campos_512_v4
+109/557134/campos_512_v4
+109/557136/campos_512_v4
+109/557149/campos_512_v4
+109/557155/campos_512_v4
+109/557156/campos_512_v4
+109/557157/campos_512_v4
+109/557162/campos_512_v4
+109/557171/campos_512_v4
+109/557174/campos_512_v4
+109/557176/campos_512_v4
+109/557177/campos_512_v4
+109/557180/campos_512_v4
+109/557183/campos_512_v4
+109/557184/campos_512_v4
+109/557187/campos_512_v4
+109/557196/campos_512_v4
+109/557197/campos_512_v4
+109/557200/campos_512_v4
+109/557202/campos_512_v4
+109/557211/campos_512_v4
+109/557217/campos_512_v4
+109/557227/campos_512_v4
+109/557238/campos_512_v4
+109/557250/campos_512_v4
+109/557263/campos_512_v4
+109/557266/campos_512_v4
+109/557268/campos_512_v4
+109/557271/campos_512_v4
+109/557279/campos_512_v4
+109/557310/campos_512_v4
+109/557326/campos_512_v4
+109/557335/campos_512_v4
+109/557343/campos_512_v4
+109/557349/campos_512_v4
+109/557350/campos_512_v4
+109/557351/campos_512_v4
+109/557360/campos_512_v4
+109/557374/campos_512_v4
+109/557376/campos_512_v4
+109/557384/campos_512_v4
+109/557392/campos_512_v4
+109/557393/campos_512_v4
+109/557404/campos_512_v4
+109/557418/campos_512_v4
+109/557420/campos_512_v4
+109/557425/campos_512_v4
+109/557430/campos_512_v4
+109/557477/campos_512_v4
+109/557482/campos_512_v4
+109/557486/campos_512_v4
+109/557492/campos_512_v4
+109/557495/campos_512_v4
+109/557497/campos_512_v4
+109/557499/campos_512_v4
+109/557500/campos_512_v4
+109/557503/campos_512_v4
+109/557515/campos_512_v4
+109/557527/campos_512_v4
+109/557553/campos_512_v4
+109/557569/campos_512_v4
+109/557578/campos_512_v4
+109/557581/campos_512_v4
+109/557591/campos_512_v4
+109/557599/campos_512_v4
+109/557623/campos_512_v4
+109/557629/campos_512_v4
+109/557632/campos_512_v4
+109/557642/campos_512_v4
+109/557648/campos_512_v4
+109/557653/campos_512_v4
+109/557686/campos_512_v4
+109/557696/campos_512_v4
+109/557718/campos_512_v4
+109/557723/campos_512_v4
+109/557726/campos_512_v4
+109/557735/campos_512_v4
+109/557742/campos_512_v4
+109/557759/campos_512_v4
+109/557761/campos_512_v4
+109/557771/campos_512_v4
+109/557772/campos_512_v4
+109/557775/campos_512_v4
+109/557780/campos_512_v4
+109/557792/campos_512_v4
+109/557798/campos_512_v4
+109/557801/campos_512_v4
+109/557802/campos_512_v4
+109/557806/campos_512_v4
+109/557807/campos_512_v4
+109/557811/campos_512_v4
+109/557813/campos_512_v4
+109/557815/campos_512_v4
+109/557832/campos_512_v4
+109/557841/campos_512_v4
+109/557858/campos_512_v4
+109/557868/campos_512_v4
+109/557884/campos_512_v4
+109/557887/campos_512_v4
+109/557897/campos_512_v4
+109/557902/campos_512_v4
+109/557903/campos_512_v4
+109/557907/campos_512_v4
+109/557913/campos_512_v4
+109/557922/campos_512_v4
+109/557930/campos_512_v4
+109/557931/campos_512_v4
+109/557936/campos_512_v4
+109/557950/campos_512_v4
+109/557953/campos_512_v4
+109/557956/campos_512_v4
+109/557968/campos_512_v4
+109/557969/campos_512_v4
+109/557972/campos_512_v4
+109/557978/campos_512_v4
+109/557979/campos_512_v4
+109/558012/campos_512_v4
+109/558027/campos_512_v4
+109/558029/campos_512_v4
+109/558032/campos_512_v4
+109/558043/campos_512_v4
+109/558048/campos_512_v4
+109/558057/campos_512_v4
+109/558061/campos_512_v4
+109/558063/campos_512_v4
+109/558066/campos_512_v4
+109/558069/campos_512_v4
+109/558073/campos_512_v4
+109/558078/campos_512_v4
+109/558099/campos_512_v4
+109/558108/campos_512_v4
+109/558111/campos_512_v4
+109/558113/campos_512_v4
+109/558130/campos_512_v4
+109/558140/campos_512_v4
+109/558141/campos_512_v4
+109/558142/campos_512_v4
+109/558144/campos_512_v4
+109/558170/campos_512_v4
+109/558178/campos_512_v4
+109/558195/campos_512_v4
+109/558197/campos_512_v4
+109/558205/campos_512_v4
+109/558207/campos_512_v4
+109/558222/campos_512_v4
+109/558224/campos_512_v4
+109/558225/campos_512_v4
+109/558227/campos_512_v4
+109/558231/campos_512_v4
+109/558233/campos_512_v4
+109/558237/campos_512_v4
+109/558245/campos_512_v4
+109/558265/campos_512_v4
+109/558305/campos_512_v4
+109/558315/campos_512_v4
+109/558324/campos_512_v4
+109/558328/campos_512_v4
+109/558337/campos_512_v4
+109/558353/campos_512_v4
+109/558371/campos_512_v4
+109/558401/campos_512_v4
+109/558405/campos_512_v4
+109/558411/campos_512_v4
+109/558425/campos_512_v4
+109/558428/campos_512_v4
+109/558433/campos_512_v4
+109/558435/campos_512_v4
+109/558439/campos_512_v4
+109/558443/campos_512_v4
+109/558444/campos_512_v4
+109/558449/campos_512_v4
+109/558456/campos_512_v4
+109/558463/campos_512_v4
+109/558464/campos_512_v4
+109/558485/campos_512_v4
+109/558487/campos_512_v4
+109/558490/campos_512_v4
+109/558499/campos_512_v4
+109/558516/campos_512_v4
+109/558520/campos_512_v4
+109/558522/campos_512_v4
+109/558533/campos_512_v4
+109/558549/campos_512_v4
+109/558565/campos_512_v4
+109/558569/campos_512_v4
+109/558589/campos_512_v4
+109/558592/campos_512_v4
+109/558594/campos_512_v4
+109/558613/campos_512_v4
+109/558621/campos_512_v4
+109/558631/campos_512_v4
+109/558640/campos_512_v4
+109/558641/campos_512_v4
+109/558645/campos_512_v4
+109/558647/campos_512_v4
+109/558653/campos_512_v4
+109/558660/campos_512_v4
+109/558682/campos_512_v4
+109/558685/campos_512_v4
+109/558687/campos_512_v4
+109/558709/campos_512_v4
+109/558714/campos_512_v4
+109/558719/campos_512_v4
+109/558720/campos_512_v4
+109/558724/campos_512_v4
+109/558725/campos_512_v4
+109/558733/campos_512_v4
+109/558735/campos_512_v4
+109/558746/campos_512_v4
+109/558751/campos_512_v4
+109/558757/campos_512_v4
+109/558780/campos_512_v4
+109/558783/campos_512_v4
+109/558785/campos_512_v4
+109/558792/campos_512_v4
+109/558803/campos_512_v4
+109/558814/campos_512_v4
+109/558815/campos_512_v4
+109/558820/campos_512_v4
+109/558821/campos_512_v4
+109/558829/campos_512_v4
+109/558832/campos_512_v4
+109/558852/campos_512_v4
+109/558854/campos_512_v4
+109/558863/campos_512_v4
+109/558880/campos_512_v4
+109/558886/campos_512_v4
+109/558889/campos_512_v4
+109/558901/campos_512_v4
+109/558929/campos_512_v4
+109/558938/campos_512_v4
+109/558944/campos_512_v4
+109/558957/campos_512_v4
+109/558960/campos_512_v4
+109/558962/campos_512_v4
+109/558982/campos_512_v4
+109/558990/campos_512_v4
+109/559004/campos_512_v4
+109/559006/campos_512_v4
+109/559019/campos_512_v4
+109/559043/campos_512_v4
+109/559048/campos_512_v4
+109/559062/campos_512_v4
+109/559063/campos_512_v4
+109/559066/campos_512_v4
+109/559068/campos_512_v4
+109/559073/campos_512_v4
+109/559079/campos_512_v4
+109/559081/campos_512_v4
+109/559083/campos_512_v4
+109/559097/campos_512_v4
+109/559109/campos_512_v4
+109/559110/campos_512_v4
+109/559117/campos_512_v4
+109/559121/campos_512_v4
+109/559122/campos_512_v4
+109/559124/campos_512_v4
+109/559135/campos_512_v4
+109/559139/campos_512_v4
+109/559142/campos_512_v4
+109/559152/campos_512_v4
+109/559156/campos_512_v4
+109/559160/campos_512_v4
+109/559165/campos_512_v4
+109/559175/campos_512_v4
+109/559176/campos_512_v4
+109/559180/campos_512_v4
+109/559186/campos_512_v4
+109/559189/campos_512_v4
+109/559190/campos_512_v4
+109/559193/campos_512_v4
+109/559199/campos_512_v4
+109/559210/campos_512_v4
+109/559212/campos_512_v4
+109/559237/campos_512_v4
+109/559241/campos_512_v4
+109/559260/campos_512_v4
+109/559261/campos_512_v4
+109/559268/campos_512_v4
+109/559271/campos_512_v4
+109/559277/campos_512_v4
+109/559299/campos_512_v4
+109/559312/campos_512_v4
+109/559324/campos_512_v4
+109/559325/campos_512_v4
+109/559330/campos_512_v4
+109/559336/campos_512_v4
+109/559347/campos_512_v4
+109/559357/campos_512_v4
+109/559366/campos_512_v4
+109/559370/campos_512_v4
+109/559379/campos_512_v4
+109/559384/campos_512_v4
+109/559388/campos_512_v4
+109/559389/campos_512_v4
+109/559395/campos_512_v4
+109/559409/campos_512_v4
+109/559416/campos_512_v4
+109/559426/campos_512_v4
+109/559427/campos_512_v4
+109/559428/campos_512_v4
+109/559430/campos_512_v4
+109/559435/campos_512_v4
+109/559440/campos_512_v4
+109/559441/campos_512_v4
+109/559444/campos_512_v4
+109/559445/campos_512_v4
+109/559453/campos_512_v4
+109/559455/campos_512_v4
+109/559464/campos_512_v4
+109/559467/campos_512_v4
+109/559468/campos_512_v4
+109/559475/campos_512_v4
+109/559476/campos_512_v4
+109/559481/campos_512_v4
+109/559483/campos_512_v4
+109/559485/campos_512_v4
+109/559487/campos_512_v4
+109/559490/campos_512_v4
+109/559494/campos_512_v4
+109/559502/campos_512_v4
+109/559505/campos_512_v4
+109/559508/campos_512_v4
+109/559538/campos_512_v4
+109/559541/campos_512_v4
+109/559544/campos_512_v4
+109/559557/campos_512_v4
+109/559560/campos_512_v4
+109/559571/campos_512_v4
+109/559580/campos_512_v4
+109/559584/campos_512_v4
+109/559586/campos_512_v4
+109/559592/campos_512_v4
+109/559593/campos_512_v4
+109/559600/campos_512_v4
+109/559607/campos_512_v4
+109/559617/campos_512_v4
+109/559620/campos_512_v4
+109/559629/campos_512_v4
+109/559641/campos_512_v4
+109/559645/campos_512_v4
+109/559647/campos_512_v4
+109/559655/campos_512_v4
+109/559667/campos_512_v4
+109/559691/campos_512_v4
+109/559696/campos_512_v4
+109/559729/campos_512_v4
+109/559735/campos_512_v4
+109/559743/campos_512_v4
+109/559763/campos_512_v4
+109/559768/campos_512_v4
+109/559779/campos_512_v4
+109/559782/campos_512_v4
+109/559787/campos_512_v4
+109/559795/campos_512_v4
+109/559812/campos_512_v4
+109/559815/campos_512_v4
+109/559816/campos_512_v4
+109/559819/campos_512_v4
+109/559823/campos_512_v4
+109/559828/campos_512_v4
+109/559830/campos_512_v4
+109/559842/campos_512_v4
+109/559844/campos_512_v4
+109/559845/campos_512_v4
+109/559846/campos_512_v4
+109/559856/campos_512_v4
+109/559863/campos_512_v4
+109/559865/campos_512_v4
+109/559877/campos_512_v4
+109/559879/campos_512_v4
+109/559880/campos_512_v4
+109/559907/campos_512_v4
+109/559914/campos_512_v4
+109/559926/campos_512_v4
+109/559929/campos_512_v4
+109/559935/campos_512_v4
+109/559956/campos_512_v4
+109/559962/campos_512_v4
+109/559977/campos_512_v4
+109/559979/campos_512_v4
+109/559991/campos_512_v4
+109/560000/campos_512_v4
+11/65004/campos_512_v4
+11/65014/campos_512_v4
+11/65033/campos_512_v4
+11/65036/campos_512_v4
+11/65039/campos_512_v4
+11/65042/campos_512_v4
+11/65055/campos_512_v4
+11/65058/campos_512_v4
+11/65062/campos_512_v4
+11/65067/campos_512_v4
+11/65070/campos_512_v4
+11/65084/campos_512_v4
+11/65093/campos_512_v4
+11/65099/campos_512_v4
+11/65101/campos_512_v4
+11/65102/campos_512_v4
+11/65115/campos_512_v4
+11/65123/campos_512_v4
+11/65140/campos_512_v4
+11/65145/campos_512_v4
+11/65155/campos_512_v4
+11/65161/campos_512_v4
+11/65163/campos_512_v4
+11/65164/campos_512_v4
+11/65172/campos_512_v4
+11/65184/campos_512_v4
+11/65185/campos_512_v4
+11/65189/campos_512_v4
+11/65190/campos_512_v4
+11/65191/campos_512_v4
+11/65197/campos_512_v4
+11/65201/campos_512_v4
+11/65203/campos_512_v4
+11/65209/campos_512_v4
+11/65215/campos_512_v4
+11/65234/campos_512_v4
+11/65243/campos_512_v4
+11/65245/campos_512_v4
+11/65259/campos_512_v4
+11/65264/campos_512_v4
+11/65271/campos_512_v4
+11/65273/campos_512_v4
+11/65281/campos_512_v4
+11/65284/campos_512_v4
+11/65288/campos_512_v4
+11/65315/campos_512_v4
+11/65316/campos_512_v4
+11/65328/campos_512_v4
+11/65330/campos_512_v4
+11/65332/campos_512_v4
+11/65348/campos_512_v4
+11/65349/campos_512_v4
+11/65350/campos_512_v4
+11/65352/campos_512_v4
+11/65353/campos_512_v4
+11/65361/campos_512_v4
+11/65368/campos_512_v4
+11/65374/campos_512_v4
+11/65378/campos_512_v4
+11/65383/campos_512_v4
+11/65401/campos_512_v4
+11/65407/campos_512_v4
+11/65409/campos_512_v4
+11/65411/campos_512_v4
+11/65413/campos_512_v4
+11/65416/campos_512_v4
+11/65418/campos_512_v4
+11/65426/campos_512_v4
+11/65436/campos_512_v4
+11/65439/campos_512_v4
+11/65448/campos_512_v4
+11/65456/campos_512_v4
+11/65468/campos_512_v4
+11/65491/campos_512_v4
+11/65500/campos_512_v4
+11/65520/campos_512_v4
+11/65533/campos_512_v4
+11/65546/campos_512_v4
+11/65553/campos_512_v4
+11/65560/campos_512_v4
+11/65564/campos_512_v4
+11/65571/campos_512_v4
+11/65573/campos_512_v4
+11/65576/campos_512_v4
+11/65577/campos_512_v4
+11/65583/campos_512_v4
+11/65585/campos_512_v4
+11/65589/campos_512_v4
+11/65597/campos_512_v4
+11/65603/campos_512_v4
+11/65614/campos_512_v4
+11/65624/campos_512_v4
+11/65625/campos_512_v4
+11/65633/campos_512_v4
+11/65636/campos_512_v4
+11/65637/campos_512_v4
+11/65655/campos_512_v4
+11/65673/campos_512_v4
+11/65699/campos_512_v4
+11/65706/campos_512_v4
+11/65707/campos_512_v4
+11/65715/campos_512_v4
+11/65717/campos_512_v4
+11/65748/campos_512_v4
+11/65755/campos_512_v4
+11/65763/campos_512_v4
+11/65764/campos_512_v4
+11/65779/campos_512_v4
+11/65782/campos_512_v4
+11/65784/campos_512_v4
+11/65805/campos_512_v4
+11/65810/campos_512_v4
+11/65838/campos_512_v4
+11/65841/campos_512_v4
+11/65850/campos_512_v4
+11/65852/campos_512_v4
+11/65863/campos_512_v4
+11/65877/campos_512_v4
+11/65880/campos_512_v4
+11/65892/campos_512_v4
+11/65905/campos_512_v4
+11/65910/campos_512_v4
+11/65929/campos_512_v4
+11/65931/campos_512_v4
+11/65941/campos_512_v4
+11/65949/campos_512_v4
+11/65951/campos_512_v4
+11/65954/campos_512_v4
+11/65955/campos_512_v4
+11/65961/campos_512_v4
+11/65963/campos_512_v4
+11/65980/campos_512_v4
+11/65994/campos_512_v4
+11/65996/campos_512_v4
+11/66006/campos_512_v4
+11/66008/campos_512_v4
+11/66010/campos_512_v4
+11/66017/campos_512_v4
+11/66032/campos_512_v4
+11/66039/campos_512_v4
+11/66040/campos_512_v4
+11/66041/campos_512_v4
+11/66042/campos_512_v4
+11/66047/campos_512_v4
+11/66058/campos_512_v4
+11/66063/campos_512_v4
+11/66072/campos_512_v4
+11/66087/campos_512_v4
+11/66094/campos_512_v4
+11/66101/campos_512_v4
+11/66103/campos_512_v4
+11/66108/campos_512_v4
+11/66110/campos_512_v4
+11/66134/campos_512_v4
+11/66147/campos_512_v4
+11/66153/campos_512_v4
+11/66168/campos_512_v4
+11/66169/campos_512_v4
+11/66177/campos_512_v4
+11/66197/campos_512_v4
+11/66218/campos_512_v4
+11/66223/campos_512_v4
+11/66228/campos_512_v4
+11/66230/campos_512_v4
+11/66235/campos_512_v4
+11/66236/campos_512_v4
+11/66239/campos_512_v4
+11/66243/campos_512_v4
+11/66250/campos_512_v4
+11/66251/campos_512_v4
+11/66253/campos_512_v4
+11/66257/campos_512_v4
+11/66258/campos_512_v4
+11/66261/campos_512_v4
+11/66266/campos_512_v4
+11/66271/campos_512_v4
+11/66286/campos_512_v4
+11/66288/campos_512_v4
+11/66290/campos_512_v4
+11/66292/campos_512_v4
+11/66302/campos_512_v4
+11/66305/campos_512_v4
+11/66315/campos_512_v4
+11/66317/campos_512_v4
+11/66329/campos_512_v4
+11/66336/campos_512_v4
+11/66345/campos_512_v4
+11/66354/campos_512_v4
+11/66357/campos_512_v4
+11/66359/campos_512_v4
+11/66360/campos_512_v4
+11/66371/campos_512_v4
+11/66384/campos_512_v4
+11/66385/campos_512_v4
+11/66397/campos_512_v4
+11/66414/campos_512_v4
+11/66416/campos_512_v4
+11/66417/campos_512_v4
+11/66445/campos_512_v4
+11/66448/campos_512_v4
+11/66452/campos_512_v4
+11/66459/campos_512_v4
+11/66460/campos_512_v4
+11/66462/campos_512_v4
+11/66470/campos_512_v4
+11/66482/campos_512_v4
+11/66502/campos_512_v4
+11/66503/campos_512_v4
+11/66509/campos_512_v4
+11/66525/campos_512_v4
+11/66532/campos_512_v4
+11/66541/campos_512_v4
+11/66542/campos_512_v4
+11/66545/campos_512_v4
+11/66551/campos_512_v4
+11/66555/campos_512_v4
+11/66556/campos_512_v4
+11/66571/campos_512_v4
+11/66585/campos_512_v4
+11/66586/campos_512_v4
+11/66589/campos_512_v4
+11/66592/campos_512_v4
+11/66601/campos_512_v4
+11/66603/campos_512_v4
+11/66604/campos_512_v4
+11/66615/campos_512_v4
+11/66623/campos_512_v4
+11/66629/campos_512_v4
+11/66636/campos_512_v4
+11/66637/campos_512_v4
+11/66638/campos_512_v4
+11/66645/campos_512_v4
+11/66651/campos_512_v4
+11/66654/campos_512_v4
+11/66658/campos_512_v4
+11/66661/campos_512_v4
+11/66674/campos_512_v4
+11/66691/campos_512_v4
+11/66692/campos_512_v4
+11/66702/campos_512_v4
+11/66708/campos_512_v4
+11/66716/campos_512_v4
+11/66736/campos_512_v4
+11/66743/campos_512_v4
+11/66749/campos_512_v4
+11/66759/campos_512_v4
+11/66777/campos_512_v4
+11/66779/campos_512_v4
+11/66791/campos_512_v4
+11/66795/campos_512_v4
+11/66810/campos_512_v4
+11/66814/campos_512_v4
+11/66828/campos_512_v4
+11/66830/campos_512_v4
+11/66833/campos_512_v4
+11/66835/campos_512_v4
+11/66841/campos_512_v4
+11/66845/campos_512_v4
+11/66847/campos_512_v4
+11/66851/campos_512_v4
+11/66857/campos_512_v4
+11/66869/campos_512_v4
+11/66875/campos_512_v4
+11/66887/campos_512_v4
+11/66896/campos_512_v4
+11/66901/campos_512_v4
+11/66904/campos_512_v4
+11/66906/campos_512_v4
+11/66915/campos_512_v4
+11/66933/campos_512_v4
+11/66938/campos_512_v4
+11/66947/campos_512_v4
+11/66959/campos_512_v4
+11/66962/campos_512_v4
+11/66968/campos_512_v4
+11/66970/campos_512_v4
+11/66973/campos_512_v4
+11/66974/campos_512_v4
+11/67006/campos_512_v4
+11/67011/campos_512_v4
+11/67019/campos_512_v4
+11/67040/campos_512_v4
+11/67050/campos_512_v4
+11/67055/campos_512_v4
+11/67067/campos_512_v4
+11/67073/campos_512_v4
+11/67078/campos_512_v4
+11/67080/campos_512_v4
+11/67096/campos_512_v4
+11/67125/campos_512_v4
+11/67131/campos_512_v4
+11/67134/campos_512_v4
+11/67139/campos_512_v4
+11/67140/campos_512_v4
+11/67162/campos_512_v4
+11/67168/campos_512_v4
+11/67178/campos_512_v4
+11/67182/campos_512_v4
+11/67183/campos_512_v4
+11/67195/campos_512_v4
+11/67199/campos_512_v4
+11/67200/campos_512_v4
+11/67208/campos_512_v4
+11/67211/campos_512_v4
+11/67213/campos_512_v4
+11/67216/campos_512_v4
+11/67230/campos_512_v4
+11/67235/campos_512_v4
+11/67239/campos_512_v4
+11/67263/campos_512_v4
+11/67267/campos_512_v4
+11/67276/campos_512_v4
+11/67282/campos_512_v4
+11/67283/campos_512_v4
+11/67316/campos_512_v4
+11/67320/campos_512_v4
+11/67324/campos_512_v4
+11/67326/campos_512_v4
+11/67334/campos_512_v4
+11/67339/campos_512_v4
+11/67350/campos_512_v4
+11/67363/campos_512_v4
+11/67370/campos_512_v4
+11/67383/campos_512_v4
+11/67384/campos_512_v4
+11/67395/campos_512_v4
+11/67399/campos_512_v4
+11/67412/campos_512_v4
+11/67415/campos_512_v4
+11/67436/campos_512_v4
+11/67440/campos_512_v4
+11/67443/campos_512_v4
+11/67455/campos_512_v4
+11/67456/campos_512_v4
+11/67480/campos_512_v4
+11/67487/campos_512_v4
+11/67489/campos_512_v4
+11/67493/campos_512_v4
+11/67494/campos_512_v4
+11/67497/campos_512_v4
+11/67517/campos_512_v4
+11/67525/campos_512_v4
+11/67527/campos_512_v4
+11/67531/campos_512_v4
+11/67535/campos_512_v4
+11/67556/campos_512_v4
+11/67557/campos_512_v4
+11/67561/campos_512_v4
+11/67568/campos_512_v4
+11/67571/campos_512_v4
+11/67587/campos_512_v4
+11/67621/campos_512_v4
+11/67624/campos_512_v4
+11/67627/campos_512_v4
+11/67642/campos_512_v4
+11/67648/campos_512_v4
+11/67655/campos_512_v4
+11/67666/campos_512_v4
+11/67668/campos_512_v4
+11/67673/campos_512_v4
+11/67686/campos_512_v4
+11/67693/campos_512_v4
+11/67701/campos_512_v4
+11/67705/campos_512_v4
+11/67707/campos_512_v4
+11/67708/campos_512_v4
+11/67711/campos_512_v4
+11/67712/campos_512_v4
+11/67715/campos_512_v4
+11/67717/campos_512_v4
+11/67738/campos_512_v4
+11/67748/campos_512_v4
+11/67755/campos_512_v4
+11/67759/campos_512_v4
+11/67761/campos_512_v4
+11/67773/campos_512_v4
+11/67780/campos_512_v4
+11/67785/campos_512_v4
+11/67794/campos_512_v4
+11/67796/campos_512_v4
+11/67804/campos_512_v4
+11/67814/campos_512_v4
+11/67815/campos_512_v4
+11/67819/campos_512_v4
+11/67820/campos_512_v4
+11/67821/campos_512_v4
+11/67827/campos_512_v4
+11/67831/campos_512_v4
+11/67842/campos_512_v4
+11/67845/campos_512_v4
+11/67849/campos_512_v4
+11/67853/campos_512_v4
+11/67854/campos_512_v4
+11/67857/campos_512_v4
+11/67872/campos_512_v4
+11/67876/campos_512_v4
+11/67892/campos_512_v4
+11/67893/campos_512_v4
+11/67899/campos_512_v4
+11/67900/campos_512_v4
+11/67904/campos_512_v4
+11/67909/campos_512_v4
+11/67911/campos_512_v4
+11/67917/campos_512_v4
+11/67928/campos_512_v4
+11/67955/campos_512_v4
+11/67956/campos_512_v4
+11/67959/campos_512_v4
+11/67976/campos_512_v4
+11/67983/campos_512_v4
+11/67987/campos_512_v4
+11/67989/campos_512_v4
+11/67993/campos_512_v4
+11/68008/campos_512_v4
+11/68014/campos_512_v4
+11/68016/campos_512_v4
+11/68018/campos_512_v4
+11/68019/campos_512_v4
+11/68026/campos_512_v4
+11/68027/campos_512_v4
+11/68047/campos_512_v4
+11/68068/campos_512_v4
+11/68074/campos_512_v4
+11/68076/campos_512_v4
+11/68081/campos_512_v4
+11/68092/campos_512_v4
+11/68097/campos_512_v4
+11/68110/campos_512_v4
+11/68112/campos_512_v4
+11/68124/campos_512_v4
+11/68126/campos_512_v4
+11/68144/campos_512_v4
+11/68146/campos_512_v4
+11/68197/campos_512_v4
+11/68220/campos_512_v4
+11/68221/campos_512_v4
+11/68227/campos_512_v4
+11/68247/campos_512_v4
+11/68252/campos_512_v4
+11/68254/campos_512_v4
+11/68255/campos_512_v4
+11/68257/campos_512_v4
+11/68277/campos_512_v4
+11/68278/campos_512_v4
+11/68286/campos_512_v4
+11/68287/campos_512_v4
+11/68301/campos_512_v4
+11/68304/campos_512_v4
+11/68313/campos_512_v4
+11/68315/campos_512_v4
+11/68324/campos_512_v4
+11/68325/campos_512_v4
+11/68336/campos_512_v4
+11/68347/campos_512_v4
+11/68348/campos_512_v4
+11/68358/campos_512_v4
+11/68361/campos_512_v4
+11/68373/campos_512_v4
+11/68382/campos_512_v4
+11/68385/campos_512_v4
+11/68387/campos_512_v4
+11/68395/campos_512_v4
+11/68402/campos_512_v4
+11/68403/campos_512_v4
+11/68420/campos_512_v4
+11/68421/campos_512_v4
+11/68428/campos_512_v4
+11/68432/campos_512_v4
+11/68434/campos_512_v4
+11/68439/campos_512_v4
+11/68442/campos_512_v4
+11/68444/campos_512_v4
+11/68448/campos_512_v4
+11/68450/campos_512_v4
+11/68452/campos_512_v4
+11/68454/campos_512_v4
+11/68468/campos_512_v4
+11/68472/campos_512_v4
+11/68474/campos_512_v4
+11/68488/campos_512_v4
+11/68498/campos_512_v4
+11/68501/campos_512_v4
+11/68506/campos_512_v4
+11/68511/campos_512_v4
+11/68516/campos_512_v4
+11/68529/campos_512_v4
+11/68537/campos_512_v4
+11/68539/campos_512_v4
+11/68565/campos_512_v4
+11/68585/campos_512_v4
+11/68589/campos_512_v4
+11/68594/campos_512_v4
+11/68600/campos_512_v4
+11/68603/campos_512_v4
+11/68607/campos_512_v4
+11/68617/campos_512_v4
+11/68619/campos_512_v4
+11/68639/campos_512_v4
+11/68643/campos_512_v4
+11/68646/campos_512_v4
+11/68652/campos_512_v4
+11/68656/campos_512_v4
+11/68673/campos_512_v4
+11/68683/campos_512_v4
+11/68684/campos_512_v4
+11/68687/campos_512_v4
+11/68691/campos_512_v4
+11/68707/campos_512_v4
+11/68714/campos_512_v4
+11/68717/campos_512_v4
+11/68720/campos_512_v4
+11/68722/campos_512_v4
+11/68724/campos_512_v4
+11/68725/campos_512_v4
+11/68726/campos_512_v4
+11/68731/campos_512_v4
+11/68735/campos_512_v4
+11/68739/campos_512_v4
+11/68742/campos_512_v4
+11/68745/campos_512_v4
+11/68758/campos_512_v4
+11/68761/campos_512_v4
+11/68763/campos_512_v4
+11/68774/campos_512_v4
+11/68778/campos_512_v4
+11/68779/campos_512_v4
+11/68781/campos_512_v4
+11/68783/campos_512_v4
+11/68787/campos_512_v4
+11/68791/campos_512_v4
+11/68792/campos_512_v4
+11/68797/campos_512_v4
+11/68798/campos_512_v4
+11/68813/campos_512_v4
+11/68822/campos_512_v4
+11/68828/campos_512_v4
+11/68834/campos_512_v4
+11/68844/campos_512_v4
+11/68852/campos_512_v4
+11/68879/campos_512_v4
+11/68880/campos_512_v4
+11/68884/campos_512_v4
+11/68902/campos_512_v4
+11/68905/campos_512_v4
+11/68921/campos_512_v4
+11/68924/campos_512_v4
+11/68925/campos_512_v4
+11/68928/campos_512_v4
+11/68932/campos_512_v4
+11/68936/campos_512_v4
+11/68950/campos_512_v4
+11/68951/campos_512_v4
+11/68953/campos_512_v4
+11/68955/campos_512_v4
+11/68957/campos_512_v4
+11/68958/campos_512_v4
+11/68964/campos_512_v4
+11/68975/campos_512_v4
+11/68978/campos_512_v4
+11/68984/campos_512_v4
+11/68987/campos_512_v4
+11/69001/campos_512_v4
+11/69005/campos_512_v4
+11/69010/campos_512_v4
+11/69021/campos_512_v4
+11/69027/campos_512_v4
+11/69030/campos_512_v4
+11/69044/campos_512_v4
+11/69045/campos_512_v4
+11/69047/campos_512_v4
+11/69048/campos_512_v4
+11/69049/campos_512_v4
+11/69051/campos_512_v4
+11/69052/campos_512_v4
+11/69059/campos_512_v4
+11/69066/campos_512_v4
+11/69071/campos_512_v4
+11/69075/campos_512_v4
+11/69091/campos_512_v4
+11/69106/campos_512_v4
+11/69109/campos_512_v4
+11/69114/campos_512_v4
+11/69118/campos_512_v4
+11/69123/campos_512_v4
+11/69124/campos_512_v4
+11/69130/campos_512_v4
+11/69132/campos_512_v4
+11/69133/campos_512_v4
+11/69134/campos_512_v4
+11/69142/campos_512_v4
+11/69144/campos_512_v4
+11/69145/campos_512_v4
+11/69146/campos_512_v4
+11/69147/campos_512_v4
+11/69154/campos_512_v4
+11/69155/campos_512_v4
+11/69158/campos_512_v4
+11/69162/campos_512_v4
+11/69169/campos_512_v4
+11/69170/campos_512_v4
+11/69174/campos_512_v4
+11/69193/campos_512_v4
+11/69197/campos_512_v4
+11/69201/campos_512_v4
+11/69218/campos_512_v4
+11/69219/campos_512_v4
+11/69229/campos_512_v4
+11/69234/campos_512_v4
+11/69255/campos_512_v4
+11/69256/campos_512_v4
+11/69257/campos_512_v4
+11/69268/campos_512_v4
+11/69270/campos_512_v4
+11/69272/campos_512_v4
+11/69279/campos_512_v4
+11/69280/campos_512_v4
+11/69281/campos_512_v4
+11/69283/campos_512_v4
+11/69294/campos_512_v4
+11/69301/campos_512_v4
+11/69306/campos_512_v4
+11/69309/campos_512_v4
+11/69314/campos_512_v4
+11/69324/campos_512_v4
+11/69328/campos_512_v4
+11/69337/campos_512_v4
+11/69346/campos_512_v4
+11/69347/campos_512_v4
+11/69352/campos_512_v4
+11/69353/campos_512_v4
+11/69360/campos_512_v4
+11/69368/campos_512_v4
+11/69374/campos_512_v4
+11/69383/campos_512_v4
+11/69391/campos_512_v4
+11/69392/campos_512_v4
+11/69396/campos_512_v4
+11/69412/campos_512_v4
+11/69425/campos_512_v4
+11/69430/campos_512_v4
+11/69434/campos_512_v4
+11/69440/campos_512_v4
+11/69441/campos_512_v4
+11/69448/campos_512_v4
+11/69458/campos_512_v4
+11/69465/campos_512_v4
+11/69466/campos_512_v4
+11/69469/campos_512_v4
+11/69473/campos_512_v4
+11/69494/campos_512_v4
+11/69498/campos_512_v4
+11/69506/campos_512_v4
+11/69518/campos_512_v4
+11/69525/campos_512_v4
+11/69527/campos_512_v4
+11/69538/campos_512_v4
+11/69542/campos_512_v4
+11/69565/campos_512_v4
+11/69566/campos_512_v4
+11/69574/campos_512_v4
+11/69580/campos_512_v4
+11/69583/campos_512_v4
+11/69612/campos_512_v4
+11/69624/campos_512_v4
+11/69632/campos_512_v4
+11/69664/campos_512_v4
+11/69676/campos_512_v4
+11/69681/campos_512_v4
+11/69682/campos_512_v4
+11/69686/campos_512_v4
+11/69689/campos_512_v4
+11/69690/campos_512_v4
+11/69695/campos_512_v4
+11/69699/campos_512_v4
+11/69713/campos_512_v4
+11/69715/campos_512_v4
+11/69728/campos_512_v4
+11/69740/campos_512_v4
+11/69762/campos_512_v4
+11/69768/campos_512_v4
+11/69774/campos_512_v4
+11/69777/campos_512_v4
+11/69786/campos_512_v4
+11/69793/campos_512_v4
+11/69795/campos_512_v4
+11/69815/campos_512_v4
+11/69823/campos_512_v4
+11/69830/campos_512_v4
+11/69837/campos_512_v4
+11/69844/campos_512_v4
+11/69847/campos_512_v4
+11/69849/campos_512_v4
+11/69851/campos_512_v4
+11/69854/campos_512_v4
+11/69865/campos_512_v4
+11/69866/campos_512_v4
+11/69874/campos_512_v4
+11/69887/campos_512_v4
+11/69894/campos_512_v4
+11/69913/campos_512_v4
+11/69924/campos_512_v4
+11/69934/campos_512_v4
+11/69944/campos_512_v4
+11/69956/campos_512_v4
+11/69957/campos_512_v4
+11/69968/campos_512_v4
+11/69971/campos_512_v4
+11/69975/campos_512_v4
+11/69984/campos_512_v4
+11/69987/campos_512_v4
+11/69990/campos_512_v4
+11/69997/campos_512_v4
+11/69998/campos_512_v4
+110/560005/campos_512_v4
+110/560008/campos_512_v4
+110/560013/campos_512_v4
+110/560021/campos_512_v4
+110/560032/campos_512_v4
+110/560060/campos_512_v4
+110/560064/campos_512_v4
+110/560075/campos_512_v4
+110/560079/campos_512_v4
+110/560091/campos_512_v4
+110/560094/campos_512_v4
+110/560099/campos_512_v4
+110/560105/campos_512_v4
+110/560114/campos_512_v4
+110/560124/campos_512_v4
+110/560126/campos_512_v4
+110/560149/campos_512_v4
+110/560157/campos_512_v4
+110/560168/campos_512_v4
+110/560169/campos_512_v4
+110/560182/campos_512_v4
+110/560194/campos_512_v4
+110/560209/campos_512_v4
+110/560213/campos_512_v4
+110/560218/campos_512_v4
+110/560221/campos_512_v4
+110/560222/campos_512_v4
+110/560233/campos_512_v4
+110/560269/campos_512_v4
+110/560271/campos_512_v4
+110/560272/campos_512_v4
+110/560274/campos_512_v4
+110/560278/campos_512_v4
+110/560280/campos_512_v4
+110/560290/campos_512_v4
+110/560313/campos_512_v4
+110/560314/campos_512_v4
+110/560315/campos_512_v4
+110/560320/campos_512_v4
+110/560322/campos_512_v4
+110/560326/campos_512_v4
+110/560327/campos_512_v4
+110/560332/campos_512_v4
+110/560333/campos_512_v4
+110/560342/campos_512_v4
+110/560354/campos_512_v4
+110/560358/campos_512_v4
+110/560360/campos_512_v4
+110/560368/campos_512_v4
+110/560369/campos_512_v4
+110/560383/campos_512_v4
+110/560399/campos_512_v4
+110/560403/campos_512_v4
+110/560419/campos_512_v4
+110/560425/campos_512_v4
+110/560428/campos_512_v4
+110/560436/campos_512_v4
+110/560463/campos_512_v4
+110/560466/campos_512_v4
+110/560473/campos_512_v4
+110/560474/campos_512_v4
+110/560480/campos_512_v4
+110/560482/campos_512_v4
+110/560485/campos_512_v4
+110/560488/campos_512_v4
+110/560491/campos_512_v4
+110/560495/campos_512_v4
+110/560496/campos_512_v4
+110/560503/campos_512_v4
+110/560508/campos_512_v4
+110/560513/campos_512_v4
+110/560516/campos_512_v4
+110/560526/campos_512_v4
+110/560533/campos_512_v4
+110/560539/campos_512_v4
+110/560541/campos_512_v4
+110/560556/campos_512_v4
+110/560558/campos_512_v4
+110/560560/campos_512_v4
+110/560567/campos_512_v4
+110/560573/campos_512_v4
+110/560579/campos_512_v4
+110/560597/campos_512_v4
+110/560617/campos_512_v4
+110/560625/campos_512_v4
+110/560633/campos_512_v4
+110/560640/campos_512_v4
+110/560643/campos_512_v4
+110/560645/campos_512_v4
+110/560651/campos_512_v4
+110/560666/campos_512_v4
+110/560674/campos_512_v4
+110/560687/campos_512_v4
+110/560696/campos_512_v4
+110/560697/campos_512_v4
+110/560703/campos_512_v4
+110/560747/campos_512_v4
+110/560766/campos_512_v4
+110/560768/campos_512_v4
+110/560777/campos_512_v4
+110/560778/campos_512_v4
+110/560779/campos_512_v4
+110/560781/campos_512_v4
+110/560782/campos_512_v4
+110/560783/campos_512_v4
+110/560796/campos_512_v4
+110/560799/campos_512_v4
+110/560814/campos_512_v4
+110/560822/campos_512_v4
+110/560825/campos_512_v4
+110/560826/campos_512_v4
+110/560828/campos_512_v4
+110/560830/campos_512_v4
+110/560838/campos_512_v4
+110/560852/campos_512_v4
+110/560861/campos_512_v4
+110/560863/campos_512_v4
+110/560891/campos_512_v4
+110/560894/campos_512_v4
+110/560905/campos_512_v4
+110/560910/campos_512_v4
+110/560916/campos_512_v4
+110/560924/campos_512_v4
+110/560925/campos_512_v4
+110/560927/campos_512_v4
+110/560931/campos_512_v4
+110/560937/campos_512_v4
+110/560939/campos_512_v4
+110/560942/campos_512_v4
+110/560950/campos_512_v4
+110/560956/campos_512_v4
+110/560961/campos_512_v4
+110/560965/campos_512_v4
+110/560970/campos_512_v4
+110/560974/campos_512_v4
+110/560977/campos_512_v4
+110/560982/campos_512_v4
+110/560991/campos_512_v4
+110/560992/campos_512_v4
+110/560994/campos_512_v4
+110/560995/campos_512_v4
+110/560999/campos_512_v4
+110/561000/campos_512_v4
+110/561014/campos_512_v4
+110/561032/campos_512_v4
+110/561035/campos_512_v4
+110/561037/campos_512_v4
+110/561041/campos_512_v4
+110/561043/campos_512_v4
+110/561048/campos_512_v4
+110/561061/campos_512_v4
+110/561064/campos_512_v4
+110/561067/campos_512_v4
+110/561072/campos_512_v4
+110/561078/campos_512_v4
+110/561083/campos_512_v4
+110/561086/campos_512_v4
+110/561096/campos_512_v4
+110/561098/campos_512_v4
+110/561104/campos_512_v4
+110/561105/campos_512_v4
+110/561123/campos_512_v4
+110/561125/campos_512_v4
+110/561128/campos_512_v4
+110/561137/campos_512_v4
+110/561150/campos_512_v4
+110/561153/campos_512_v4
+110/561155/campos_512_v4
+110/561168/campos_512_v4
+110/561173/campos_512_v4
+110/561177/campos_512_v4
+110/561180/campos_512_v4
+110/561205/campos_512_v4
+110/561208/campos_512_v4
+110/561212/campos_512_v4
+110/561218/campos_512_v4
+110/561219/campos_512_v4
+110/561224/campos_512_v4
+110/561229/campos_512_v4
+110/561236/campos_512_v4
+110/561238/campos_512_v4
+110/561249/campos_512_v4
+110/561253/campos_512_v4
+110/561256/campos_512_v4
+110/561261/campos_512_v4
+110/561262/campos_512_v4
+110/561270/campos_512_v4
+110/561273/campos_512_v4
+110/561283/campos_512_v4
+110/561291/campos_512_v4
+110/561299/campos_512_v4
+110/561311/campos_512_v4
+110/561315/campos_512_v4
+110/561328/campos_512_v4
+110/561330/campos_512_v4
+110/561338/campos_512_v4
+110/561346/campos_512_v4
+110/561347/campos_512_v4
+110/561361/campos_512_v4
+110/561384/campos_512_v4
+110/561390/campos_512_v4
+110/561394/campos_512_v4
+110/561396/campos_512_v4
+110/561397/campos_512_v4
+110/561404/campos_512_v4
+110/561405/campos_512_v4
+110/561408/campos_512_v4
+110/561410/campos_512_v4
+110/561411/campos_512_v4
+110/561413/campos_512_v4
+110/561430/campos_512_v4
+110/561439/campos_512_v4
+110/561443/campos_512_v4
+110/561444/campos_512_v4
+110/561454/campos_512_v4
+110/561457/campos_512_v4
+110/561458/campos_512_v4
+110/561460/campos_512_v4
+110/561461/campos_512_v4
+110/561466/campos_512_v4
+110/561486/campos_512_v4
+110/561488/campos_512_v4
+110/561490/campos_512_v4
+110/561492/campos_512_v4
+110/561496/campos_512_v4
+110/561498/campos_512_v4
+110/561505/campos_512_v4
+110/561517/campos_512_v4
+110/561518/campos_512_v4
+110/561520/campos_512_v4
+110/561523/campos_512_v4
+110/561524/campos_512_v4
+110/561534/campos_512_v4
+110/561541/campos_512_v4
+110/561552/campos_512_v4
+110/561563/campos_512_v4
+110/561578/campos_512_v4
+110/561587/campos_512_v4
+110/561593/campos_512_v4
+110/561600/campos_512_v4
+110/561604/campos_512_v4
+110/561618/campos_512_v4
+110/561619/campos_512_v4
+110/561624/campos_512_v4
+110/561632/campos_512_v4
+110/561646/campos_512_v4
+110/561653/campos_512_v4
+110/561659/campos_512_v4
+110/561660/campos_512_v4
+110/561670/campos_512_v4
+110/561676/campos_512_v4
+110/561679/campos_512_v4
+110/561683/campos_512_v4
+110/561702/campos_512_v4
+110/561708/campos_512_v4
+110/561711/campos_512_v4
+110/561717/campos_512_v4
+110/561718/campos_512_v4
+110/561724/campos_512_v4
+110/561734/campos_512_v4
+110/561739/campos_512_v4
+110/561772/campos_512_v4
+110/561773/campos_512_v4
+110/561789/campos_512_v4
+110/561790/campos_512_v4
+110/561803/campos_512_v4
+110/561806/campos_512_v4
+110/561807/campos_512_v4
+110/561815/campos_512_v4
+110/561819/campos_512_v4
+110/561823/campos_512_v4
+110/561827/campos_512_v4
+110/561828/campos_512_v4
+110/561836/campos_512_v4
+110/561842/campos_512_v4
+110/561847/campos_512_v4
+110/561851/campos_512_v4
+110/561865/campos_512_v4
+110/561871/campos_512_v4
+110/561876/campos_512_v4
+110/561878/campos_512_v4
+110/561896/campos_512_v4
+110/561898/campos_512_v4
+110/561902/campos_512_v4
+110/561903/campos_512_v4
+110/561906/campos_512_v4
+110/561907/campos_512_v4
+110/561909/campos_512_v4
+110/561919/campos_512_v4
+110/561922/campos_512_v4
+110/561925/campos_512_v4
+110/561929/campos_512_v4
+110/561934/campos_512_v4
+110/561943/campos_512_v4
+110/561967/campos_512_v4
+110/562012/campos_512_v4
+110/562014/campos_512_v4
+110/562019/campos_512_v4
+110/562039/campos_512_v4
+110/562041/campos_512_v4
+110/562048/campos_512_v4
+110/562049/campos_512_v4
+110/562055/campos_512_v4
+110/562057/campos_512_v4
+110/562060/campos_512_v4
+110/562076/campos_512_v4
+110/562077/campos_512_v4
+110/562087/campos_512_v4
+110/562089/campos_512_v4
+110/562090/campos_512_v4
+110/562097/campos_512_v4
+110/562102/campos_512_v4
+110/562113/campos_512_v4
+110/562114/campos_512_v4
+110/562115/campos_512_v4
+110/562118/campos_512_v4
+110/562121/campos_512_v4
+110/562122/campos_512_v4
+110/562131/campos_512_v4
+110/562133/campos_512_v4
+110/562137/campos_512_v4
+110/562151/campos_512_v4
+110/562156/campos_512_v4
+110/562157/campos_512_v4
+110/562169/campos_512_v4
+110/562170/campos_512_v4
+110/562173/campos_512_v4
+110/562178/campos_512_v4
+110/562189/campos_512_v4
+110/562205/campos_512_v4
+110/562216/campos_512_v4
+110/562231/campos_512_v4
+110/562235/campos_512_v4
+110/562240/campos_512_v4
+110/562244/campos_512_v4
+110/562247/campos_512_v4
+110/562253/campos_512_v4
+110/562271/campos_512_v4
+110/562275/campos_512_v4
+110/562276/campos_512_v4
+110/562281/campos_512_v4
+110/562282/campos_512_v4
+110/562283/campos_512_v4
+110/562285/campos_512_v4
+110/562286/campos_512_v4
+110/562306/campos_512_v4
+110/562318/campos_512_v4
+110/562325/campos_512_v4
+110/562330/campos_512_v4
+110/562341/campos_512_v4
+110/562359/campos_512_v4
+110/562363/campos_512_v4
+110/562367/campos_512_v4
+110/562368/campos_512_v4
+110/562369/campos_512_v4
+110/562389/campos_512_v4
+110/562391/campos_512_v4
+110/562399/campos_512_v4
+110/562400/campos_512_v4
+110/562413/campos_512_v4
+110/562427/campos_512_v4
+110/562452/campos_512_v4
+110/562459/campos_512_v4
+110/562464/campos_512_v4
+110/562468/campos_512_v4
+110/562471/campos_512_v4
+110/562477/campos_512_v4
+110/562480/campos_512_v4
+110/562485/campos_512_v4
+110/562493/campos_512_v4
+110/562509/campos_512_v4
+110/562517/campos_512_v4
+110/562523/campos_512_v4
+110/562531/campos_512_v4
+110/562550/campos_512_v4
+110/562559/campos_512_v4
+110/562572/campos_512_v4
+110/562584/campos_512_v4
+110/562590/campos_512_v4
+110/562594/campos_512_v4
+110/562596/campos_512_v4
+110/562623/campos_512_v4
+110/562625/campos_512_v4
+110/562641/campos_512_v4
+110/562658/campos_512_v4
+110/562668/campos_512_v4
+110/562680/campos_512_v4
+110/562683/campos_512_v4
+110/562687/campos_512_v4
+110/562691/campos_512_v4
+110/562694/campos_512_v4
+110/562698/campos_512_v4
+110/562703/campos_512_v4
+110/562707/campos_512_v4
+110/562714/campos_512_v4
+110/562720/campos_512_v4
+110/562728/campos_512_v4
+110/562730/campos_512_v4
+110/562738/campos_512_v4
+110/562750/campos_512_v4
+110/562772/campos_512_v4
+110/562789/campos_512_v4
+110/562790/campos_512_v4
+110/562802/campos_512_v4
+110/562813/campos_512_v4
+110/562821/campos_512_v4
+110/562832/campos_512_v4
+110/562836/campos_512_v4
+110/562855/campos_512_v4
+110/562866/campos_512_v4
+110/562873/campos_512_v4
+110/562876/campos_512_v4
+110/562877/campos_512_v4
+110/562878/campos_512_v4
+110/562885/campos_512_v4
+110/562889/campos_512_v4
+110/562900/campos_512_v4
+110/562905/campos_512_v4
+110/562916/campos_512_v4
+110/562922/campos_512_v4
+110/562941/campos_512_v4
+110/562951/campos_512_v4
+110/562952/campos_512_v4
+110/562972/campos_512_v4
+110/562996/campos_512_v4
+110/563017/campos_512_v4
+110/563019/campos_512_v4
+110/563036/campos_512_v4
+110/563057/campos_512_v4
+110/563070/campos_512_v4
+110/563071/campos_512_v4
+110/563106/campos_512_v4
+110/563124/campos_512_v4
+110/563132/campos_512_v4
+110/563135/campos_512_v4
+110/563142/campos_512_v4
+110/563157/campos_512_v4
+110/563159/campos_512_v4
+110/563175/campos_512_v4
+110/563178/campos_512_v4
+110/563198/campos_512_v4
+110/563204/campos_512_v4
+110/563207/campos_512_v4
+110/563244/campos_512_v4
+110/563261/campos_512_v4
+110/563263/campos_512_v4
+110/563276/campos_512_v4
+110/563283/campos_512_v4
+110/563292/campos_512_v4
+110/563294/campos_512_v4
+110/563300/campos_512_v4
+110/563301/campos_512_v4
+110/563312/campos_512_v4
+110/563328/campos_512_v4
+110/563334/campos_512_v4
+110/563340/campos_512_v4
+110/563366/campos_512_v4
+110/563376/campos_512_v4
+110/563383/campos_512_v4
+110/563388/campos_512_v4
+110/563398/campos_512_v4
+110/563400/campos_512_v4
+110/563408/campos_512_v4
+110/563409/campos_512_v4
+110/563417/campos_512_v4
+110/563419/campos_512_v4
+110/563422/campos_512_v4
+110/563426/campos_512_v4
+110/563435/campos_512_v4
+110/563438/campos_512_v4
+110/563441/campos_512_v4
+110/563446/campos_512_v4
+110/563466/campos_512_v4
+110/563468/campos_512_v4
+110/563490/campos_512_v4
+110/563497/campos_512_v4
+110/563499/campos_512_v4
+110/563500/campos_512_v4
+110/563503/campos_512_v4
+110/563514/campos_512_v4
+110/563518/campos_512_v4
+110/563521/campos_512_v4
+110/563526/campos_512_v4
+110/563531/campos_512_v4
+110/563538/campos_512_v4
+110/563548/campos_512_v4
+110/563556/campos_512_v4
+110/563559/campos_512_v4
+110/563561/campos_512_v4
+110/563562/campos_512_v4
+110/563565/campos_512_v4
+110/563585/campos_512_v4
+110/563588/campos_512_v4
+110/563622/campos_512_v4
+110/563631/campos_512_v4
+110/563633/campos_512_v4
+110/563641/campos_512_v4
+110/563644/campos_512_v4
+110/563659/campos_512_v4
+110/563661/campos_512_v4
+110/563662/campos_512_v4
+110/563670/campos_512_v4
+110/563696/campos_512_v4
+110/563719/campos_512_v4
+110/563731/campos_512_v4
+110/563734/campos_512_v4
+110/563757/campos_512_v4
+110/563759/campos_512_v4
+110/563773/campos_512_v4
+110/563797/campos_512_v4
+110/563808/campos_512_v4
+110/563809/campos_512_v4
+110/563813/campos_512_v4
+110/563817/campos_512_v4
+110/563820/campos_512_v4
+110/563821/campos_512_v4
+110/563824/campos_512_v4
+110/563833/campos_512_v4
+110/563838/campos_512_v4
+110/563853/campos_512_v4
+110/563855/campos_512_v4
+110/563860/campos_512_v4
+110/563878/campos_512_v4
+110/563879/campos_512_v4
+110/563888/campos_512_v4
+110/563889/campos_512_v4
+110/563890/campos_512_v4
+110/563894/campos_512_v4
+110/563896/campos_512_v4
+110/563910/campos_512_v4
+110/563911/campos_512_v4
+110/563916/campos_512_v4
+110/563917/campos_512_v4
+110/563922/campos_512_v4
+110/563923/campos_512_v4
+110/563924/campos_512_v4
+110/563928/campos_512_v4
+110/563948/campos_512_v4
+110/563958/campos_512_v4
+110/563966/campos_512_v4
+110/563973/campos_512_v4
+110/563977/campos_512_v4
+110/563981/campos_512_v4
+110/564002/campos_512_v4
+110/564003/campos_512_v4
+110/564005/campos_512_v4
+110/564006/campos_512_v4
+110/564027/campos_512_v4
+110/564038/campos_512_v4
+110/564051/campos_512_v4
+110/564059/campos_512_v4
+110/564062/campos_512_v4
+110/564066/campos_512_v4
+110/564070/campos_512_v4
+110/564073/campos_512_v4
+110/564081/campos_512_v4
+110/564114/campos_512_v4
+110/564143/campos_512_v4
+110/564145/campos_512_v4
+110/564146/campos_512_v4
+110/564154/campos_512_v4
+110/564155/campos_512_v4
+110/564162/campos_512_v4
+110/564163/campos_512_v4
+110/564172/campos_512_v4
+110/564191/campos_512_v4
+110/564202/campos_512_v4
+110/564206/campos_512_v4
+110/564217/campos_512_v4
+110/564220/campos_512_v4
+110/564222/campos_512_v4
+110/564223/campos_512_v4
+110/564244/campos_512_v4
+110/564259/campos_512_v4
+110/564262/campos_512_v4
+110/564281/campos_512_v4
+110/564304/campos_512_v4
+110/564307/campos_512_v4
+110/564340/campos_512_v4
+110/564342/campos_512_v4
+110/564345/campos_512_v4
+110/564350/campos_512_v4
+110/564360/campos_512_v4
+110/564365/campos_512_v4
+110/564369/campos_512_v4
+110/564372/campos_512_v4
+110/564381/campos_512_v4
+110/564384/campos_512_v4
+110/564387/campos_512_v4
+110/564391/campos_512_v4
+110/564397/campos_512_v4
+110/564398/campos_512_v4
+110/564407/campos_512_v4
+110/564410/campos_512_v4
+110/564421/campos_512_v4
+110/564437/campos_512_v4
+110/564442/campos_512_v4
+110/564445/campos_512_v4
+110/564473/campos_512_v4
+110/564505/campos_512_v4
+110/564510/campos_512_v4
+110/564526/campos_512_v4
+110/564530/campos_512_v4
+110/564533/campos_512_v4
+110/564548/campos_512_v4
+110/564556/campos_512_v4
+110/564557/campos_512_v4
+110/564562/campos_512_v4
+110/564575/campos_512_v4
+110/564586/campos_512_v4
+110/564597/campos_512_v4
+110/564611/campos_512_v4
+110/564628/campos_512_v4
+110/564631/campos_512_v4
+110/564633/campos_512_v4
+110/564663/campos_512_v4
+110/564664/campos_512_v4
+110/564674/campos_512_v4
+110/564681/campos_512_v4
+110/564687/campos_512_v4
+110/564690/campos_512_v4
+110/564696/campos_512_v4
+110/564697/campos_512_v4
+110/564699/campos_512_v4
+110/564701/campos_512_v4
+110/564702/campos_512_v4
+110/564706/campos_512_v4
+110/564725/campos_512_v4
+110/564728/campos_512_v4
+110/564730/campos_512_v4
+110/564731/campos_512_v4
+110/564737/campos_512_v4
+110/564748/campos_512_v4
+110/564754/campos_512_v4
+110/564756/campos_512_v4
+110/564757/campos_512_v4
+110/564771/campos_512_v4
+110/564788/campos_512_v4
+110/564816/campos_512_v4
+110/564847/campos_512_v4
+110/564849/campos_512_v4
+110/564864/campos_512_v4
+110/564867/campos_512_v4
+110/564878/campos_512_v4
+110/564879/campos_512_v4
+110/564885/campos_512_v4
+110/564886/campos_512_v4
+110/564890/campos_512_v4
+110/564899/campos_512_v4
+110/564903/campos_512_v4
+110/564905/campos_512_v4
+110/564910/campos_512_v4
+110/564914/campos_512_v4
+110/564916/campos_512_v4
+110/564930/campos_512_v4
+110/564938/campos_512_v4
+110/564939/campos_512_v4
+110/564946/campos_512_v4
+110/564964/campos_512_v4
+110/564973/campos_512_v4
+110/564976/campos_512_v4
+110/564979/campos_512_v4
+110/564987/campos_512_v4
+110/564991/campos_512_v4
+110/564997/campos_512_v4
+111/565004/campos_512_v4
+111/565026/campos_512_v4
+111/565028/campos_512_v4
+111/565039/campos_512_v4
+111/565048/campos_512_v4
+111/565051/campos_512_v4
+111/565060/campos_512_v4
+111/565061/campos_512_v4
+111/565062/campos_512_v4
+111/565064/campos_512_v4
+111/565070/campos_512_v4
+111/565075/campos_512_v4
+111/565079/campos_512_v4
+111/565085/campos_512_v4
+111/565091/campos_512_v4
+111/565103/campos_512_v4
+111/565107/campos_512_v4
+111/565110/campos_512_v4
+111/565135/campos_512_v4
+111/565136/campos_512_v4
+111/565146/campos_512_v4
+111/565152/campos_512_v4
+111/565173/campos_512_v4
+111/565175/campos_512_v4
+111/565192/campos_512_v4
+111/565198/campos_512_v4
+111/565213/campos_512_v4
+111/565214/campos_512_v4
+111/565219/campos_512_v4
+111/565221/campos_512_v4
+111/565228/campos_512_v4
+111/565251/campos_512_v4
+111/565253/campos_512_v4
+111/565264/campos_512_v4
+111/565284/campos_512_v4
+111/565288/campos_512_v4
+111/565293/campos_512_v4
+111/565294/campos_512_v4
+111/565307/campos_512_v4
+111/565310/campos_512_v4
+111/565314/campos_512_v4
+111/565319/campos_512_v4
+111/565320/campos_512_v4
+111/565333/campos_512_v4
+111/565341/campos_512_v4
+111/565359/campos_512_v4
+111/565376/campos_512_v4
+111/565377/campos_512_v4
+111/565378/campos_512_v4
+111/565381/campos_512_v4
+111/565413/campos_512_v4
+111/565433/campos_512_v4
+111/565435/campos_512_v4
+111/565441/campos_512_v4
+111/565444/campos_512_v4
+111/565455/campos_512_v4
+111/565458/campos_512_v4
+111/565459/campos_512_v4
+111/565475/campos_512_v4
+111/565482/campos_512_v4
+111/565490/campos_512_v4
+111/565491/campos_512_v4
+111/565495/campos_512_v4
+111/565498/campos_512_v4
+111/565507/campos_512_v4
+111/565523/campos_512_v4
+111/565531/campos_512_v4
+111/565546/campos_512_v4
+111/565555/campos_512_v4
+111/565556/campos_512_v4
+111/565558/campos_512_v4
+111/565559/campos_512_v4
+111/565560/campos_512_v4
+111/565572/campos_512_v4
+111/565591/campos_512_v4
+111/565617/campos_512_v4
+111/565620/campos_512_v4
+111/565628/campos_512_v4
+111/565631/campos_512_v4
+111/565650/campos_512_v4
+111/565655/campos_512_v4
+111/565659/campos_512_v4
+111/565661/campos_512_v4
+111/565662/campos_512_v4
+111/565668/campos_512_v4
+111/565676/campos_512_v4
+111/565684/campos_512_v4
+111/565687/campos_512_v4
+111/565695/campos_512_v4
+111/565696/campos_512_v4
+111/565699/campos_512_v4
+111/565703/campos_512_v4
+111/565704/campos_512_v4
+111/565706/campos_512_v4
+111/565708/campos_512_v4
+111/565709/campos_512_v4
+111/565716/campos_512_v4
+111/565717/campos_512_v4
+111/565718/campos_512_v4
+111/565731/campos_512_v4
+111/565739/campos_512_v4
+111/565744/campos_512_v4
+111/565750/campos_512_v4
+111/565767/campos_512_v4
+111/565770/campos_512_v4
+111/565772/campos_512_v4
+111/565780/campos_512_v4
+111/565787/campos_512_v4
+111/565799/campos_512_v4
+111/565800/campos_512_v4
+111/565810/campos_512_v4
+111/565823/campos_512_v4
+111/565825/campos_512_v4
+111/565829/campos_512_v4
+111/565830/campos_512_v4
+111/565832/campos_512_v4
+111/565838/campos_512_v4
+111/565844/campos_512_v4
+111/565872/campos_512_v4
+111/565874/campos_512_v4
+111/565875/campos_512_v4
+111/565877/campos_512_v4
+111/565878/campos_512_v4
+111/565881/campos_512_v4
+111/565887/campos_512_v4
+111/565892/campos_512_v4
+111/565898/campos_512_v4
+111/565909/campos_512_v4
+111/565912/campos_512_v4
+111/565915/campos_512_v4
+111/565918/campos_512_v4
+111/565919/campos_512_v4
+111/565923/campos_512_v4
+111/565928/campos_512_v4
+111/565939/campos_512_v4
+111/565950/campos_512_v4
+111/565957/campos_512_v4
+111/565963/campos_512_v4
+111/565976/campos_512_v4
+111/565977/campos_512_v4
+111/565981/campos_512_v4
+111/565987/campos_512_v4
+111/565989/campos_512_v4
+111/566007/campos_512_v4
+111/566009/campos_512_v4
+111/566017/campos_512_v4
+111/566027/campos_512_v4
+111/566033/campos_512_v4
+111/566036/campos_512_v4
+111/566047/campos_512_v4
+111/566056/campos_512_v4
+111/566061/campos_512_v4
+111/566071/campos_512_v4
+111/566073/campos_512_v4
+111/566078/campos_512_v4
+111/566088/campos_512_v4
+111/566089/campos_512_v4
+111/566108/campos_512_v4
+111/566112/campos_512_v4
+111/566114/campos_512_v4
+111/566116/campos_512_v4
+111/566124/campos_512_v4
+111/566126/campos_512_v4
+111/566127/campos_512_v4
+111/566132/campos_512_v4
+111/566153/campos_512_v4
+111/566175/campos_512_v4
+111/566182/campos_512_v4
+111/566199/campos_512_v4
+111/566202/campos_512_v4
+111/566209/campos_512_v4
+111/566210/campos_512_v4
+111/566221/campos_512_v4
+111/566235/campos_512_v4
+111/566239/campos_512_v4
+111/566241/campos_512_v4
+111/566260/campos_512_v4
+111/566271/campos_512_v4
+111/566273/campos_512_v4
+111/566281/campos_512_v4
+111/566282/campos_512_v4
+111/566283/campos_512_v4
+111/566288/campos_512_v4
+111/566298/campos_512_v4
+111/566313/campos_512_v4
+111/566325/campos_512_v4
+111/566340/campos_512_v4
+111/566345/campos_512_v4
+111/566347/campos_512_v4
+111/566359/campos_512_v4
+111/566364/campos_512_v4
+111/566376/campos_512_v4
+111/566384/campos_512_v4
+111/566387/campos_512_v4
+111/566397/campos_512_v4
+111/566403/campos_512_v4
+111/566407/campos_512_v4
+111/566414/campos_512_v4
+111/566419/campos_512_v4
+111/566430/campos_512_v4
+111/566445/campos_512_v4
+111/566454/campos_512_v4
+111/566456/campos_512_v4
+111/566459/campos_512_v4
+111/566463/campos_512_v4
+111/566465/campos_512_v4
+111/566466/campos_512_v4
+111/566476/campos_512_v4
+111/566486/campos_512_v4
+111/566487/campos_512_v4
+111/566496/campos_512_v4
+111/566497/campos_512_v4
+111/566503/campos_512_v4
+111/566509/campos_512_v4
+111/566514/campos_512_v4
+111/566525/campos_512_v4
+111/566529/campos_512_v4
+111/566537/campos_512_v4
+111/566566/campos_512_v4
+111/566580/campos_512_v4
+111/566592/campos_512_v4
+111/566596/campos_512_v4
+111/566604/campos_512_v4
+111/566618/campos_512_v4
+111/566630/campos_512_v4
+111/566672/campos_512_v4
+111/566682/campos_512_v4
+111/566694/campos_512_v4
+111/566711/campos_512_v4
+111/566714/campos_512_v4
+111/566719/campos_512_v4
+111/566731/campos_512_v4
+111/566769/campos_512_v4
+111/566771/campos_512_v4
+111/566772/campos_512_v4
+111/566774/campos_512_v4
+111/566782/campos_512_v4
+111/566784/campos_512_v4
+111/566811/campos_512_v4
+111/566818/campos_512_v4
+111/566824/campos_512_v4
+111/566828/campos_512_v4
+111/566855/campos_512_v4
+111/566871/campos_512_v4
+111/566873/campos_512_v4
+111/566879/campos_512_v4
+111/566880/campos_512_v4
+111/566885/campos_512_v4
+111/566899/campos_512_v4
+111/566903/campos_512_v4
+111/566907/campos_512_v4
+111/566923/campos_512_v4
+111/566940/campos_512_v4
+111/566953/campos_512_v4
+111/566954/campos_512_v4
+111/566956/campos_512_v4
+111/566959/campos_512_v4
+111/566961/campos_512_v4
+111/566967/campos_512_v4
+111/566973/campos_512_v4
+111/566982/campos_512_v4
+111/566983/campos_512_v4
+111/566998/campos_512_v4
+111/567008/campos_512_v4
+111/567015/campos_512_v4
+111/567017/campos_512_v4
+111/567053/campos_512_v4
+111/567055/campos_512_v4
+111/567060/campos_512_v4
+111/567067/campos_512_v4
+111/567068/campos_512_v4
+111/567086/campos_512_v4
+111/567098/campos_512_v4
+111/567118/campos_512_v4
+111/567126/campos_512_v4
+111/567131/campos_512_v4
+111/567132/campos_512_v4
+111/567167/campos_512_v4
+111/567172/campos_512_v4
+111/567188/campos_512_v4
+111/567203/campos_512_v4
+111/567215/campos_512_v4
+111/567230/campos_512_v4
+111/567243/campos_512_v4
+111/567247/campos_512_v4
+111/567249/campos_512_v4
+111/567253/campos_512_v4
+111/567266/campos_512_v4
+111/567280/campos_512_v4
+111/567283/campos_512_v4
+111/567284/campos_512_v4
+111/567303/campos_512_v4
+111/567307/campos_512_v4
+111/567309/campos_512_v4
+111/567310/campos_512_v4
+111/567342/campos_512_v4
+111/567344/campos_512_v4
+111/567354/campos_512_v4
+111/567360/campos_512_v4
+111/567361/campos_512_v4
+111/567362/campos_512_v4
+111/567369/campos_512_v4
+111/567371/campos_512_v4
+111/567383/campos_512_v4
+111/567392/campos_512_v4
+111/567397/campos_512_v4
+111/567408/campos_512_v4
+111/567424/campos_512_v4
+111/567443/campos_512_v4
+111/567455/campos_512_v4
+111/567457/campos_512_v4
+111/567459/campos_512_v4
+111/567463/campos_512_v4
+111/567477/campos_512_v4
+111/567487/campos_512_v4
+111/567495/campos_512_v4
+111/567496/campos_512_v4
+111/567512/campos_512_v4
+111/567517/campos_512_v4
+111/567518/campos_512_v4
+111/567519/campos_512_v4
+111/567522/campos_512_v4
+111/567526/campos_512_v4
+111/567529/campos_512_v4
+111/567534/campos_512_v4
+111/567535/campos_512_v4
+111/567541/campos_512_v4
+111/567556/campos_512_v4
+111/567564/campos_512_v4
+111/567567/campos_512_v4
+111/567568/campos_512_v4
+111/567569/campos_512_v4
+111/567576/campos_512_v4
+111/567601/campos_512_v4
+111/567608/campos_512_v4
+111/567610/campos_512_v4
+111/567618/campos_512_v4
+111/567620/campos_512_v4
+111/567628/campos_512_v4
+111/567657/campos_512_v4
+111/567662/campos_512_v4
+111/567672/campos_512_v4
+111/567674/campos_512_v4
+111/567677/campos_512_v4
+111/567683/campos_512_v4
+111/567684/campos_512_v4
+111/567687/campos_512_v4
+111/567690/campos_512_v4
+111/567692/campos_512_v4
+111/567694/campos_512_v4
+111/567697/campos_512_v4
+111/567699/campos_512_v4
+111/567700/campos_512_v4
+111/567708/campos_512_v4
+111/567717/campos_512_v4
+111/567746/campos_512_v4
+111/567791/campos_512_v4
+111/567792/campos_512_v4
+111/567802/campos_512_v4
+111/567807/campos_512_v4
+111/567811/campos_512_v4
+111/567815/campos_512_v4
+111/567818/campos_512_v4
+111/567824/campos_512_v4
+111/567826/campos_512_v4
+111/567828/campos_512_v4
+111/567833/campos_512_v4
+111/567835/campos_512_v4
+111/567841/campos_512_v4
+111/567854/campos_512_v4
+111/567855/campos_512_v4
+111/567861/campos_512_v4
+111/567873/campos_512_v4
+111/567874/campos_512_v4
+111/567881/campos_512_v4
+111/567896/campos_512_v4
+111/567899/campos_512_v4
+111/567909/campos_512_v4
+111/567911/campos_512_v4
+111/567921/campos_512_v4
+111/567925/campos_512_v4
+111/567937/campos_512_v4
+111/567957/campos_512_v4
+111/567958/campos_512_v4
+111/567959/campos_512_v4
+111/567960/campos_512_v4
+111/567965/campos_512_v4
+111/567975/campos_512_v4
+111/567989/campos_512_v4
+111/567990/campos_512_v4
+111/567991/campos_512_v4
+111/567992/campos_512_v4
+111/567995/campos_512_v4
+111/568018/campos_512_v4
+111/568025/campos_512_v4
+111/568033/campos_512_v4
+111/568034/campos_512_v4
+111/568051/campos_512_v4
+111/568052/campos_512_v4
+111/568056/campos_512_v4
+111/568059/campos_512_v4
+111/568060/campos_512_v4
+111/568068/campos_512_v4
+111/568076/campos_512_v4
+111/568093/campos_512_v4
+111/568095/campos_512_v4
+111/568104/campos_512_v4
+111/568109/campos_512_v4
+111/568112/campos_512_v4
+111/568113/campos_512_v4
+111/568128/campos_512_v4
+111/568129/campos_512_v4
+111/568140/campos_512_v4
+111/568148/campos_512_v4
+111/568161/campos_512_v4
+111/568167/campos_512_v4
+111/568172/campos_512_v4
+111/568179/campos_512_v4
+111/568180/campos_512_v4
+111/568181/campos_512_v4
+111/568212/campos_512_v4
+111/568218/campos_512_v4
+111/568223/campos_512_v4
+111/568241/campos_512_v4
+111/568243/campos_512_v4
+111/568251/campos_512_v4
+111/568254/campos_512_v4
+111/568255/campos_512_v4
+111/568259/campos_512_v4
+111/568270/campos_512_v4
+111/568282/campos_512_v4
+111/568291/campos_512_v4
+111/568292/campos_512_v4
+111/568293/campos_512_v4
+111/568294/campos_512_v4
+111/568295/campos_512_v4
+111/568298/campos_512_v4
+111/568305/campos_512_v4
+111/568314/campos_512_v4
+111/568325/campos_512_v4
+111/568341/campos_512_v4
+111/568371/campos_512_v4
+111/568372/campos_512_v4
+111/568374/campos_512_v4
+111/568380/campos_512_v4
+111/568386/campos_512_v4
+111/568391/campos_512_v4
+111/568402/campos_512_v4
+111/568404/campos_512_v4
+111/568415/campos_512_v4
+111/568417/campos_512_v4
+111/568433/campos_512_v4
+111/568456/campos_512_v4
+111/568465/campos_512_v4
+111/568469/campos_512_v4
+111/568472/campos_512_v4
+111/568473/campos_512_v4
+111/568486/campos_512_v4
+111/568495/campos_512_v4
+111/568499/campos_512_v4
+111/568502/campos_512_v4
+111/568504/campos_512_v4
+111/568505/campos_512_v4
+111/568508/campos_512_v4
+111/568525/campos_512_v4
+111/568531/campos_512_v4
+111/568537/campos_512_v4
+111/568545/campos_512_v4
+111/568546/campos_512_v4
+111/568551/campos_512_v4
+111/568554/campos_512_v4
+111/568555/campos_512_v4
+111/568556/campos_512_v4
+111/568561/campos_512_v4
+111/568566/campos_512_v4
+111/568568/campos_512_v4
+111/568607/campos_512_v4
+111/568610/campos_512_v4
+111/568611/campos_512_v4
+111/568624/campos_512_v4
+111/568627/campos_512_v4
+111/568634/campos_512_v4
+111/568656/campos_512_v4
+111/568657/campos_512_v4
+111/568659/campos_512_v4
+111/568679/campos_512_v4
+111/568689/campos_512_v4
+111/568694/campos_512_v4
+111/568701/campos_512_v4
+111/568703/campos_512_v4
+111/568709/campos_512_v4
+111/568715/campos_512_v4
+111/568725/campos_512_v4
+111/568726/campos_512_v4
+111/568728/campos_512_v4
+111/568730/campos_512_v4
+111/568735/campos_512_v4
+111/568737/campos_512_v4
+111/568743/campos_512_v4
+111/568744/campos_512_v4
+111/568746/campos_512_v4
+111/568754/campos_512_v4
+111/568769/campos_512_v4
+111/568776/campos_512_v4
+111/568782/campos_512_v4
+111/568794/campos_512_v4
+111/568797/campos_512_v4
+111/568818/campos_512_v4
+111/568832/campos_512_v4
+111/568834/campos_512_v4
+111/568844/campos_512_v4
+111/568853/campos_512_v4
+111/568858/campos_512_v4
+111/568868/campos_512_v4
+111/568870/campos_512_v4
+111/568871/campos_512_v4
+111/568894/campos_512_v4
+111/568900/campos_512_v4
+111/568907/campos_512_v4
+111/568909/campos_512_v4
+111/568910/campos_512_v4
+111/568914/campos_512_v4
+111/568925/campos_512_v4
+111/568939/campos_512_v4
+111/568940/campos_512_v4
+111/568941/campos_512_v4
+111/568949/campos_512_v4
+111/568958/campos_512_v4
+111/568960/campos_512_v4
+111/568969/campos_512_v4
+111/568976/campos_512_v4
+111/568977/campos_512_v4
+111/568980/campos_512_v4
+111/569000/campos_512_v4
+111/569015/campos_512_v4
+111/569028/campos_512_v4
+111/569033/campos_512_v4
+111/569040/campos_512_v4
+111/569045/campos_512_v4
+111/569059/campos_512_v4
+111/569063/campos_512_v4
+111/569068/campos_512_v4
+111/569076/campos_512_v4
+111/569077/campos_512_v4
+111/569081/campos_512_v4
+111/569087/campos_512_v4
+111/569092/campos_512_v4
+111/569100/campos_512_v4
+111/569104/campos_512_v4
+111/569105/campos_512_v4
+111/569107/campos_512_v4
+111/569112/campos_512_v4
+111/569114/campos_512_v4
+111/569126/campos_512_v4
+111/569138/campos_512_v4
+111/569140/campos_512_v4
+111/569143/campos_512_v4
+111/569144/campos_512_v4
+111/569148/campos_512_v4
+111/569157/campos_512_v4
+111/569191/campos_512_v4
+111/569206/campos_512_v4
+111/569213/campos_512_v4
+111/569215/campos_512_v4
+111/569218/campos_512_v4
+111/569220/campos_512_v4
+111/569231/campos_512_v4
+111/569237/campos_512_v4
+111/569245/campos_512_v4
+111/569254/campos_512_v4
+111/569257/campos_512_v4
+111/569263/campos_512_v4
+111/569272/campos_512_v4
+111/569274/campos_512_v4
+111/569275/campos_512_v4
+111/569276/campos_512_v4
+111/569284/campos_512_v4
+111/569288/campos_512_v4
+111/569293/campos_512_v4
+111/569309/campos_512_v4
+111/569314/campos_512_v4
+111/569317/campos_512_v4
+111/569320/campos_512_v4
+111/569340/campos_512_v4
+111/569342/campos_512_v4
+111/569343/campos_512_v4
+111/569347/campos_512_v4
+111/569348/campos_512_v4
+111/569351/campos_512_v4
+111/569363/campos_512_v4
+111/569375/campos_512_v4
+111/569376/campos_512_v4
+111/569395/campos_512_v4
+111/569400/campos_512_v4
+111/569401/campos_512_v4
+111/569402/campos_512_v4
+111/569421/campos_512_v4
+111/569422/campos_512_v4
+111/569427/campos_512_v4
+111/569428/campos_512_v4
+111/569435/campos_512_v4
+111/569452/campos_512_v4
+111/569456/campos_512_v4
+111/569478/campos_512_v4
+111/569504/campos_512_v4
+111/569507/campos_512_v4
+111/569508/campos_512_v4
+111/569512/campos_512_v4
+111/569513/campos_512_v4
+111/569514/campos_512_v4
+111/569517/campos_512_v4
+111/569518/campos_512_v4
+111/569521/campos_512_v4
+111/569522/campos_512_v4
+111/569525/campos_512_v4
+111/569529/campos_512_v4
+111/569538/campos_512_v4
+111/569542/campos_512_v4
+111/569550/campos_512_v4
+111/569553/campos_512_v4
+111/569555/campos_512_v4
+111/569572/campos_512_v4
+111/569573/campos_512_v4
+111/569585/campos_512_v4
+111/569591/campos_512_v4
+111/569599/campos_512_v4
+111/569615/campos_512_v4
+111/569633/campos_512_v4
+111/569642/campos_512_v4
+111/569645/campos_512_v4
+111/569657/campos_512_v4
+111/569659/campos_512_v4
+111/569661/campos_512_v4
+111/569672/campos_512_v4
+111/569675/campos_512_v4
+111/569685/campos_512_v4
+111/569703/campos_512_v4
+111/569712/campos_512_v4
+111/569722/campos_512_v4
+111/569737/campos_512_v4
+111/569750/campos_512_v4
+111/569782/campos_512_v4
+111/569796/campos_512_v4
+111/569808/campos_512_v4
+111/569812/campos_512_v4
+111/569822/campos_512_v4
+111/569823/campos_512_v4
+111/569824/campos_512_v4
+111/569832/campos_512_v4
+111/569849/campos_512_v4
+111/569854/campos_512_v4
+111/569855/campos_512_v4
+111/569861/campos_512_v4
+111/569862/campos_512_v4
+111/569867/campos_512_v4
+111/569871/campos_512_v4
+111/569874/campos_512_v4
+111/569876/campos_512_v4
+111/569877/campos_512_v4
+111/569881/campos_512_v4
+111/569886/campos_512_v4
+111/569894/campos_512_v4
+111/569896/campos_512_v4
+111/569906/campos_512_v4
+111/569908/campos_512_v4
+111/569909/campos_512_v4
+111/569920/campos_512_v4
+111/569924/campos_512_v4
+111/569933/campos_512_v4
+111/569960/campos_512_v4
+111/569963/campos_512_v4
+111/569967/campos_512_v4
+111/569973/campos_512_v4
+111/569974/campos_512_v4
+111/569996/campos_512_v4
+111/569998/campos_512_v4
+112/570009/campos_512_v4
+112/570028/campos_512_v4
+112/570033/campos_512_v4
+112/570058/campos_512_v4
+112/570059/campos_512_v4
+112/570063/campos_512_v4
+112/570064/campos_512_v4
+112/570084/campos_512_v4
+112/570085/campos_512_v4
+112/570091/campos_512_v4
+112/570096/campos_512_v4
+112/570097/campos_512_v4
+112/570101/campos_512_v4
+112/570103/campos_512_v4
+112/570105/campos_512_v4
+112/570112/campos_512_v4
+112/570114/campos_512_v4
+112/570115/campos_512_v4
+112/570120/campos_512_v4
+112/570122/campos_512_v4
+112/570138/campos_512_v4
+112/570140/campos_512_v4
+112/570145/campos_512_v4
+112/570150/campos_512_v4
+112/570153/campos_512_v4
+112/570154/campos_512_v4
+112/570156/campos_512_v4
+112/570161/campos_512_v4
+112/570164/campos_512_v4
+112/570168/campos_512_v4
+112/570178/campos_512_v4
+112/570186/campos_512_v4
+112/570199/campos_512_v4
+112/570211/campos_512_v4
+112/570212/campos_512_v4
+112/570240/campos_512_v4
+112/570241/campos_512_v4
+112/570243/campos_512_v4
+112/570249/campos_512_v4
+112/570264/campos_512_v4
+112/570275/campos_512_v4
+112/570282/campos_512_v4
+112/570285/campos_512_v4
+112/570308/campos_512_v4
+112/570309/campos_512_v4
+112/570326/campos_512_v4
+112/570333/campos_512_v4
+112/570343/campos_512_v4
+112/570345/campos_512_v4
+112/570346/campos_512_v4
+112/570348/campos_512_v4
+112/570352/campos_512_v4
+112/570361/campos_512_v4
+112/570364/campos_512_v4
+112/570365/campos_512_v4
+112/570377/campos_512_v4
+112/570378/campos_512_v4
+112/570384/campos_512_v4
+112/570399/campos_512_v4
+112/570407/campos_512_v4
+112/570409/campos_512_v4
+112/570412/campos_512_v4
+112/570417/campos_512_v4
+112/570429/campos_512_v4
+112/570441/campos_512_v4
+112/570444/campos_512_v4
+112/570447/campos_512_v4
+112/570452/campos_512_v4
+112/570455/campos_512_v4
+112/570457/campos_512_v4
+112/570461/campos_512_v4
+112/570468/campos_512_v4
+112/570470/campos_512_v4
+112/570474/campos_512_v4
+112/570485/campos_512_v4
+112/570487/campos_512_v4
+112/570494/campos_512_v4
+112/570497/campos_512_v4
+112/570499/campos_512_v4
+112/570503/campos_512_v4
+112/570505/campos_512_v4
+112/570510/campos_512_v4
+112/570511/campos_512_v4
+112/570520/campos_512_v4
+112/570525/campos_512_v4
+112/570527/campos_512_v4
+112/570537/campos_512_v4
+112/570540/campos_512_v4
+112/570542/campos_512_v4
+112/570543/campos_512_v4
+112/570558/campos_512_v4
+112/570574/campos_512_v4
+112/570576/campos_512_v4
+112/570590/campos_512_v4
+112/570600/campos_512_v4
+112/570603/campos_512_v4
+112/570607/campos_512_v4
+112/570608/campos_512_v4
+112/570617/campos_512_v4
+112/570627/campos_512_v4
+112/570629/campos_512_v4
+112/570635/campos_512_v4
+112/570636/campos_512_v4
+112/570640/campos_512_v4
+112/570646/campos_512_v4
+112/570648/campos_512_v4
+112/570658/campos_512_v4
+112/570660/campos_512_v4
+112/570664/campos_512_v4
+112/570669/campos_512_v4
+112/570678/campos_512_v4
+112/570680/campos_512_v4
+112/570688/campos_512_v4
+112/570692/campos_512_v4
+112/570697/campos_512_v4
+112/570709/campos_512_v4
+112/570713/campos_512_v4
+112/570716/campos_512_v4
+112/570718/campos_512_v4
+112/570741/campos_512_v4
+112/570746/campos_512_v4
+112/570761/campos_512_v4
+112/570762/campos_512_v4
+112/570765/campos_512_v4
+112/570767/campos_512_v4
+112/570780/campos_512_v4
+112/570789/campos_512_v4
+112/570791/campos_512_v4
+112/570794/campos_512_v4
+112/570797/campos_512_v4
+112/570800/campos_512_v4
+112/570801/campos_512_v4
+112/570811/campos_512_v4
+112/570813/campos_512_v4
+112/570833/campos_512_v4
+112/570872/campos_512_v4
+112/570875/campos_512_v4
+112/570877/campos_512_v4
+112/570878/campos_512_v4
+112/570881/campos_512_v4
+112/570887/campos_512_v4
+112/570892/campos_512_v4
+112/570894/campos_512_v4
+112/570899/campos_512_v4
+112/570900/campos_512_v4
+112/570909/campos_512_v4
+112/570916/campos_512_v4
+112/570922/campos_512_v4
+112/570930/campos_512_v4
+112/570935/campos_512_v4
+112/570938/campos_512_v4
+112/570941/campos_512_v4
+112/570953/campos_512_v4
+112/570956/campos_512_v4
+112/570960/campos_512_v4
+112/570982/campos_512_v4
+112/570984/campos_512_v4
+112/570987/campos_512_v4
+112/570992/campos_512_v4
+112/570997/campos_512_v4
+112/570998/campos_512_v4
+112/571000/campos_512_v4
+112/571013/campos_512_v4
+112/571018/campos_512_v4
+112/571019/campos_512_v4
+112/571020/campos_512_v4
+112/571034/campos_512_v4
+112/571038/campos_512_v4
+112/571040/campos_512_v4
+112/571046/campos_512_v4
+112/571053/campos_512_v4
+112/571054/campos_512_v4
+112/571060/campos_512_v4
+112/571072/campos_512_v4
+112/571080/campos_512_v4
+112/571094/campos_512_v4
+112/571110/campos_512_v4
+112/571137/campos_512_v4
+112/571140/campos_512_v4
+112/571141/campos_512_v4
+112/571149/campos_512_v4
+112/571156/campos_512_v4
+112/571159/campos_512_v4
+112/571160/campos_512_v4
+112/571175/campos_512_v4
+112/571178/campos_512_v4
+112/571179/campos_512_v4
+112/571198/campos_512_v4
+112/571200/campos_512_v4
+112/571201/campos_512_v4
+112/571202/campos_512_v4
+112/571210/campos_512_v4
+112/571215/campos_512_v4
+112/571238/campos_512_v4
+112/571275/campos_512_v4
+112/571294/campos_512_v4
+112/571315/campos_512_v4
+112/571316/campos_512_v4
+112/571318/campos_512_v4
+112/571320/campos_512_v4
+112/571321/campos_512_v4
+112/571322/campos_512_v4
+112/571325/campos_512_v4
+112/571338/campos_512_v4
+112/571341/campos_512_v4
+112/571342/campos_512_v4
+112/571351/campos_512_v4
+112/571370/campos_512_v4
+112/571371/campos_512_v4
+112/571375/campos_512_v4
+112/571382/campos_512_v4
+112/571391/campos_512_v4
+112/571392/campos_512_v4
+112/571395/campos_512_v4
+112/571406/campos_512_v4
+112/571410/campos_512_v4
+112/571411/campos_512_v4
+112/571421/campos_512_v4
+112/571431/campos_512_v4
+112/571440/campos_512_v4
+112/571477/campos_512_v4
+112/571485/campos_512_v4
+112/571489/campos_512_v4
+112/571503/campos_512_v4
+112/571505/campos_512_v4
+112/571512/campos_512_v4
+112/571523/campos_512_v4
+112/571552/campos_512_v4
+112/571559/campos_512_v4
+112/571562/campos_512_v4
+112/571573/campos_512_v4
+112/571585/campos_512_v4
+112/571589/campos_512_v4
+112/571590/campos_512_v4
+112/571603/campos_512_v4
+112/571604/campos_512_v4
+112/571605/campos_512_v4
+112/571624/campos_512_v4
+112/571625/campos_512_v4
+112/571629/campos_512_v4
+112/571631/campos_512_v4
+112/571652/campos_512_v4
+112/571653/campos_512_v4
+112/571663/campos_512_v4
+112/571669/campos_512_v4
+112/571673/campos_512_v4
+112/571676/campos_512_v4
+112/571679/campos_512_v4
+112/571698/campos_512_v4
+112/571701/campos_512_v4
+112/571707/campos_512_v4
+112/571709/campos_512_v4
+112/571714/campos_512_v4
+112/571715/campos_512_v4
+112/571731/campos_512_v4
+112/571740/campos_512_v4
+112/571741/campos_512_v4
+112/571743/campos_512_v4
+112/571747/campos_512_v4
+112/571760/campos_512_v4
+112/571773/campos_512_v4
+112/571775/campos_512_v4
+112/571780/campos_512_v4
+112/571787/campos_512_v4
+112/571790/campos_512_v4
+112/571802/campos_512_v4
+112/571811/campos_512_v4
+112/571813/campos_512_v4
+112/571814/campos_512_v4
+112/571816/campos_512_v4
+112/571820/campos_512_v4
+112/571826/campos_512_v4
+112/571838/campos_512_v4
+112/571847/campos_512_v4
+112/571849/campos_512_v4
+112/571859/campos_512_v4
+112/571865/campos_512_v4
+112/571871/campos_512_v4
+112/571889/campos_512_v4
+112/571892/campos_512_v4
+112/571894/campos_512_v4
+112/571907/campos_512_v4
+112/571912/campos_512_v4
+112/571914/campos_512_v4
+112/571923/campos_512_v4
+112/571929/campos_512_v4
+112/571932/campos_512_v4
+112/571942/campos_512_v4
+112/571947/campos_512_v4
+112/571954/campos_512_v4
+112/571956/campos_512_v4
+112/571970/campos_512_v4
+112/571974/campos_512_v4
+112/571982/campos_512_v4
+112/571983/campos_512_v4
+112/572009/campos_512_v4
+112/572014/campos_512_v4
+112/572015/campos_512_v4
+112/572016/campos_512_v4
+112/572023/campos_512_v4
+112/572036/campos_512_v4
+112/572047/campos_512_v4
+112/572050/campos_512_v4
+112/572051/campos_512_v4
+112/572052/campos_512_v4
+112/572062/campos_512_v4
+112/572075/campos_512_v4
+112/572082/campos_512_v4
+112/572090/campos_512_v4
+112/572091/campos_512_v4
+112/572093/campos_512_v4
+112/572102/campos_512_v4
+112/572103/campos_512_v4
+112/572115/campos_512_v4
+112/572116/campos_512_v4
+112/572117/campos_512_v4
+112/572120/campos_512_v4
+112/572122/campos_512_v4
+112/572123/campos_512_v4
+112/572125/campos_512_v4
+112/572129/campos_512_v4
+112/572136/campos_512_v4
+112/572139/campos_512_v4
+112/572142/campos_512_v4
+112/572143/campos_512_v4
+112/572144/campos_512_v4
+112/572152/campos_512_v4
+112/572162/campos_512_v4
+112/572170/campos_512_v4
+112/572188/campos_512_v4
+112/572203/campos_512_v4
+112/572205/campos_512_v4
+112/572218/campos_512_v4
+112/572231/campos_512_v4
+112/572233/campos_512_v4
+112/572234/campos_512_v4
+112/572240/campos_512_v4
+112/572253/campos_512_v4
+112/572256/campos_512_v4
+112/572257/campos_512_v4
+112/572260/campos_512_v4
+112/572265/campos_512_v4
+112/572273/campos_512_v4
+112/572283/campos_512_v4
+112/572285/campos_512_v4
+112/572289/campos_512_v4
+112/572290/campos_512_v4
+112/572291/campos_512_v4
+112/572297/campos_512_v4
+112/572308/campos_512_v4
+112/572315/campos_512_v4
+112/572319/campos_512_v4
+112/572335/campos_512_v4
+112/572345/campos_512_v4
+112/572356/campos_512_v4
+112/572372/campos_512_v4
+112/572385/campos_512_v4
+112/572386/campos_512_v4
+112/572389/campos_512_v4
+112/572422/campos_512_v4
+112/572427/campos_512_v4
+112/572428/campos_512_v4
+112/572436/campos_512_v4
+112/572437/campos_512_v4
+112/572438/campos_512_v4
+112/572451/campos_512_v4
+112/572453/campos_512_v4
+112/572462/campos_512_v4
+112/572515/campos_512_v4
+112/572517/campos_512_v4
+112/572520/campos_512_v4
+112/572529/campos_512_v4
+112/572538/campos_512_v4
+112/572547/campos_512_v4
+112/572551/campos_512_v4
+112/572553/campos_512_v4
+112/572560/campos_512_v4
+112/572567/campos_512_v4
+112/572586/campos_512_v4
+112/572610/campos_512_v4
+112/572613/campos_512_v4
+112/572614/campos_512_v4
+112/572619/campos_512_v4
+112/572623/campos_512_v4
+112/572624/campos_512_v4
+112/572636/campos_512_v4
+112/572643/campos_512_v4
+112/572659/campos_512_v4
+112/572666/campos_512_v4
+112/572693/campos_512_v4
+112/572697/campos_512_v4
+112/572703/campos_512_v4
+112/572713/campos_512_v4
+112/572727/campos_512_v4
+112/572730/campos_512_v4
+112/572731/campos_512_v4
+112/572738/campos_512_v4
+112/572748/campos_512_v4
+112/572756/campos_512_v4
+112/572760/campos_512_v4
+112/572772/campos_512_v4
+112/572784/campos_512_v4
+112/572786/campos_512_v4
+112/572787/campos_512_v4
+112/572803/campos_512_v4
+112/572813/campos_512_v4
+112/572842/campos_512_v4
+112/572864/campos_512_v4
+112/572871/campos_512_v4
+112/572878/campos_512_v4
+112/572879/campos_512_v4
+112/572888/campos_512_v4
+112/572890/campos_512_v4
+112/572897/campos_512_v4
+112/572921/campos_512_v4
+112/572930/campos_512_v4
+112/572940/campos_512_v4
+112/572953/campos_512_v4
+112/572973/campos_512_v4
+112/572975/campos_512_v4
+112/572986/campos_512_v4
+112/572993/campos_512_v4
+112/572999/campos_512_v4
+112/573007/campos_512_v4
+112/573014/campos_512_v4
+112/573020/campos_512_v4
+112/573030/campos_512_v4
+112/573041/campos_512_v4
+112/573043/campos_512_v4
+112/573049/campos_512_v4
+112/573059/campos_512_v4
+112/573061/campos_512_v4
+112/573071/campos_512_v4
+112/573075/campos_512_v4
+112/573082/campos_512_v4
+112/573089/campos_512_v4
+112/573095/campos_512_v4
+112/573107/campos_512_v4
+112/573109/campos_512_v4
+112/573112/campos_512_v4
+112/573120/campos_512_v4
+112/573135/campos_512_v4
+112/573137/campos_512_v4
+112/573152/campos_512_v4
+112/573154/campos_512_v4
+112/573164/campos_512_v4
+112/573165/campos_512_v4
+112/573170/campos_512_v4
+112/573177/campos_512_v4
+112/573186/campos_512_v4
+112/573196/campos_512_v4
+112/573212/campos_512_v4
+112/573213/campos_512_v4
+112/573223/campos_512_v4
+112/573225/campos_512_v4
+112/573239/campos_512_v4
+112/573241/campos_512_v4
+112/573246/campos_512_v4
+112/573252/campos_512_v4
+112/573268/campos_512_v4
+112/573276/campos_512_v4
+112/573281/campos_512_v4
+112/573297/campos_512_v4
+112/573310/campos_512_v4
+112/573313/campos_512_v4
+112/573341/campos_512_v4
+112/573346/campos_512_v4
+112/573347/campos_512_v4
+112/573354/campos_512_v4
+112/573371/campos_512_v4
+112/573375/campos_512_v4
+112/573384/campos_512_v4
+112/573389/campos_512_v4
+112/573413/campos_512_v4
+112/573415/campos_512_v4
+112/573418/campos_512_v4
+112/573426/campos_512_v4
+112/573427/campos_512_v4
+112/573433/campos_512_v4
+112/573434/campos_512_v4
+112/573444/campos_512_v4
+112/573447/campos_512_v4
+112/573448/campos_512_v4
+112/573451/campos_512_v4
+112/573456/campos_512_v4
+112/573461/campos_512_v4
+112/573465/campos_512_v4
+112/573490/campos_512_v4
+112/573499/campos_512_v4
+112/573500/campos_512_v4
+112/573502/campos_512_v4
+112/573518/campos_512_v4
+112/573525/campos_512_v4
+112/573550/campos_512_v4
+112/573553/campos_512_v4
+112/573572/campos_512_v4
+112/573577/campos_512_v4
+112/573585/campos_512_v4
+112/573595/campos_512_v4
+112/573607/campos_512_v4
+112/573609/campos_512_v4
+112/573612/campos_512_v4
+112/573646/campos_512_v4
+112/573651/campos_512_v4
+112/573653/campos_512_v4
+112/573654/campos_512_v4
+112/573671/campos_512_v4
+112/573676/campos_512_v4
+112/573690/campos_512_v4
+112/573693/campos_512_v4
+112/573705/campos_512_v4
+112/573706/campos_512_v4
+112/573708/campos_512_v4
+112/573738/campos_512_v4
+112/573746/campos_512_v4
+112/573753/campos_512_v4
+112/573754/campos_512_v4
+112/573769/campos_512_v4
+112/573778/campos_512_v4
+112/573779/campos_512_v4
+112/573782/campos_512_v4
+112/573804/campos_512_v4
+112/573809/campos_512_v4
+112/573811/campos_512_v4
+112/573829/campos_512_v4
+112/573830/campos_512_v4
+112/573838/campos_512_v4
+112/573839/campos_512_v4
+112/573841/campos_512_v4
+112/573856/campos_512_v4
+112/573858/campos_512_v4
+112/573870/campos_512_v4
+112/573880/campos_512_v4
+112/573892/campos_512_v4
+112/573896/campos_512_v4
+112/573904/campos_512_v4
+112/573905/campos_512_v4
+112/573907/campos_512_v4
+112/573916/campos_512_v4
+112/573917/campos_512_v4
+112/573925/campos_512_v4
+112/573934/campos_512_v4
+112/573939/campos_512_v4
+112/573957/campos_512_v4
+112/573958/campos_512_v4
+112/573965/campos_512_v4
+112/573979/campos_512_v4
+112/573988/campos_512_v4
+112/573989/campos_512_v4
+112/574007/campos_512_v4
+112/574009/campos_512_v4
+112/574011/campos_512_v4
+112/574019/campos_512_v4
+112/574024/campos_512_v4
+112/574044/campos_512_v4
+112/574053/campos_512_v4
+112/574056/campos_512_v4
+112/574066/campos_512_v4
+112/574075/campos_512_v4
+112/574121/campos_512_v4
+112/574126/campos_512_v4
+112/574134/campos_512_v4
+112/574136/campos_512_v4
+112/574138/campos_512_v4
+112/574140/campos_512_v4
+112/574158/campos_512_v4
+112/574160/campos_512_v4
+112/574170/campos_512_v4
+112/574171/campos_512_v4
+112/574177/campos_512_v4
+112/574179/campos_512_v4
+112/574190/campos_512_v4
+112/574198/campos_512_v4
+112/574201/campos_512_v4
+112/574213/campos_512_v4
+112/574216/campos_512_v4
+112/574217/campos_512_v4
+112/574219/campos_512_v4
+112/574225/campos_512_v4
+112/574226/campos_512_v4
+112/574233/campos_512_v4
+112/574234/campos_512_v4
+112/574235/campos_512_v4
+112/574242/campos_512_v4
+112/574246/campos_512_v4
+112/574255/campos_512_v4
+112/574257/campos_512_v4
+112/574268/campos_512_v4
+112/574269/campos_512_v4
+112/574278/campos_512_v4
+112/574280/campos_512_v4
+112/574287/campos_512_v4
+112/574289/campos_512_v4
+112/574302/campos_512_v4
+112/574303/campos_512_v4
+112/574304/campos_512_v4
+112/574310/campos_512_v4
+112/574312/campos_512_v4
+112/574332/campos_512_v4
+112/574333/campos_512_v4
+112/574335/campos_512_v4
+112/574339/campos_512_v4
+112/574359/campos_512_v4
+112/574374/campos_512_v4
+112/574389/campos_512_v4
+112/574391/campos_512_v4
+112/574394/campos_512_v4
+112/574397/campos_512_v4
+112/574402/campos_512_v4
+112/574406/campos_512_v4
+112/574419/campos_512_v4
+112/574426/campos_512_v4
+112/574427/campos_512_v4
+112/574429/campos_512_v4
+112/574433/campos_512_v4
+112/574434/campos_512_v4
+112/574436/campos_512_v4
+112/574439/campos_512_v4
+112/574445/campos_512_v4
+112/574466/campos_512_v4
+112/574475/campos_512_v4
+112/574477/campos_512_v4
+112/574495/campos_512_v4
+112/574497/campos_512_v4
+112/574507/campos_512_v4
+112/574510/campos_512_v4
+112/574524/campos_512_v4
+112/574541/campos_512_v4
+112/574545/campos_512_v4
+112/574550/campos_512_v4
+112/574556/campos_512_v4
+112/574566/campos_512_v4
+112/574583/campos_512_v4
+112/574588/campos_512_v4
+112/574602/campos_512_v4
+112/574603/campos_512_v4
+112/574609/campos_512_v4
+112/574633/campos_512_v4
+112/574640/campos_512_v4
+112/574652/campos_512_v4
+112/574658/campos_512_v4
+112/574663/campos_512_v4
+112/574664/campos_512_v4
+112/574675/campos_512_v4
+112/574690/campos_512_v4
+112/574692/campos_512_v4
+112/574697/campos_512_v4
+112/574699/campos_512_v4
+112/574705/campos_512_v4
+112/574732/campos_512_v4
+112/574740/campos_512_v4
+112/574744/campos_512_v4
+112/574750/campos_512_v4
+112/574755/campos_512_v4
+112/574761/campos_512_v4
+112/574762/campos_512_v4
+112/574773/campos_512_v4
+112/574776/campos_512_v4
+112/574779/campos_512_v4
+112/574783/campos_512_v4
+112/574805/campos_512_v4
+112/574806/campos_512_v4
+112/574811/campos_512_v4
+112/574812/campos_512_v4
+112/574824/campos_512_v4
+112/574837/campos_512_v4
+112/574844/campos_512_v4
+112/574859/campos_512_v4
+112/574881/campos_512_v4
+112/574883/campos_512_v4
+112/574885/campos_512_v4
+112/574891/campos_512_v4
+112/574892/campos_512_v4
+112/574907/campos_512_v4
+112/574910/campos_512_v4
+112/574938/campos_512_v4
+112/574941/campos_512_v4
+112/574953/campos_512_v4
+112/574959/campos_512_v4
+112/574960/campos_512_v4
+112/574970/campos_512_v4
+112/574979/campos_512_v4
+112/574982/campos_512_v4
+112/574985/campos_512_v4
+113/575007/campos_512_v4
+113/575017/campos_512_v4
+113/575033/campos_512_v4
+113/575051/campos_512_v4
+113/575102/campos_512_v4
+113/575110/campos_512_v4
+113/575117/campos_512_v4
+113/575127/campos_512_v4
+113/575129/campos_512_v4
+113/575131/campos_512_v4
+113/575136/campos_512_v4
+113/575152/campos_512_v4
+113/575159/campos_512_v4
+113/575167/campos_512_v4
+113/575182/campos_512_v4
+113/575196/campos_512_v4
+113/575210/campos_512_v4
+113/575217/campos_512_v4
+113/575218/campos_512_v4
+113/575219/campos_512_v4
+113/575222/campos_512_v4
+113/575229/campos_512_v4
+113/575233/campos_512_v4
+113/575243/campos_512_v4
+113/575250/campos_512_v4
+113/575252/campos_512_v4
+113/575253/campos_512_v4
+113/575258/campos_512_v4
+113/575270/campos_512_v4
+113/575272/campos_512_v4
+113/575290/campos_512_v4
+113/575293/campos_512_v4
+113/575300/campos_512_v4
+113/575301/campos_512_v4
+113/575303/campos_512_v4
+113/575313/campos_512_v4
+113/575330/campos_512_v4
+113/575336/campos_512_v4
+113/575338/campos_512_v4
+113/575340/campos_512_v4
+113/575341/campos_512_v4
+113/575343/campos_512_v4
+113/575350/campos_512_v4
+113/575370/campos_512_v4
+113/575372/campos_512_v4
+113/575373/campos_512_v4
+113/575386/campos_512_v4
+113/575402/campos_512_v4
+113/575404/campos_512_v4
+113/575411/campos_512_v4
+113/575417/campos_512_v4
+113/575418/campos_512_v4
+113/575420/campos_512_v4
+113/575422/campos_512_v4
+113/575429/campos_512_v4
+113/575434/campos_512_v4
+113/575436/campos_512_v4
+113/575439/campos_512_v4
+113/575446/campos_512_v4
+113/575447/campos_512_v4
+113/575450/campos_512_v4
+113/575451/campos_512_v4
+113/575460/campos_512_v4
+113/575461/campos_512_v4
+113/575466/campos_512_v4
+113/575469/campos_512_v4
+113/575472/campos_512_v4
+113/575476/campos_512_v4
+113/575477/campos_512_v4
+113/575481/campos_512_v4
+113/575488/campos_512_v4
+113/575496/campos_512_v4
+113/575499/campos_512_v4
+113/575509/campos_512_v4
+113/575517/campos_512_v4
+113/575535/campos_512_v4
+113/575550/campos_512_v4
+113/575561/campos_512_v4
+113/575565/campos_512_v4
+113/575576/campos_512_v4
+113/575586/campos_512_v4
+113/575587/campos_512_v4
+113/575592/campos_512_v4
+113/575602/campos_512_v4
+113/575604/campos_512_v4
+113/575607/campos_512_v4
+113/575609/campos_512_v4
+113/575620/campos_512_v4
+113/575622/campos_512_v4
+113/575631/campos_512_v4
+113/575639/campos_512_v4
+113/575661/campos_512_v4
+113/575668/campos_512_v4
+113/575676/campos_512_v4
+113/575692/campos_512_v4
+113/575697/campos_512_v4
+113/575703/campos_512_v4
+113/575704/campos_512_v4
+113/575705/campos_512_v4
+113/575708/campos_512_v4
+113/575719/campos_512_v4
+113/575727/campos_512_v4
+113/575728/campos_512_v4
+113/575734/campos_512_v4
+113/575738/campos_512_v4
+113/575739/campos_512_v4
+113/575743/campos_512_v4
+113/575749/campos_512_v4
+113/575750/campos_512_v4
+113/575756/campos_512_v4
+113/575771/campos_512_v4
+113/575774/campos_512_v4
+113/575787/campos_512_v4
+113/575788/campos_512_v4
+113/575793/campos_512_v4
+113/575819/campos_512_v4
+113/575847/campos_512_v4
+113/575852/campos_512_v4
+113/575856/campos_512_v4
+113/575865/campos_512_v4
+113/575868/campos_512_v4
+113/575871/campos_512_v4
+113/575872/campos_512_v4
+113/575877/campos_512_v4
+113/575895/campos_512_v4
+113/575900/campos_512_v4
+113/575902/campos_512_v4
+113/575904/campos_512_v4
+113/575906/campos_512_v4
+113/575919/campos_512_v4
+113/575922/campos_512_v4
+113/575932/campos_512_v4
+113/575933/campos_512_v4
+113/575942/campos_512_v4
+113/575954/campos_512_v4
+113/575958/campos_512_v4
+113/575963/campos_512_v4
+113/575993/campos_512_v4
+113/575996/campos_512_v4
+113/576002/campos_512_v4
+113/576024/campos_512_v4
+113/576026/campos_512_v4
+113/576030/campos_512_v4
+113/576044/campos_512_v4
+113/576057/campos_512_v4
+113/576068/campos_512_v4
+113/576103/campos_512_v4
+113/576105/campos_512_v4
+113/576109/campos_512_v4
+113/576110/campos_512_v4
+113/576112/campos_512_v4
+113/576129/campos_512_v4
+113/576134/campos_512_v4
+113/576142/campos_512_v4
+113/576144/campos_512_v4
+113/576146/campos_512_v4
+113/576155/campos_512_v4
+113/576157/campos_512_v4
+113/576167/campos_512_v4
+113/576168/campos_512_v4
+113/576169/campos_512_v4
+113/576173/campos_512_v4
+113/576186/campos_512_v4
+113/576192/campos_512_v4
+113/576194/campos_512_v4
+113/576198/campos_512_v4
+113/576211/campos_512_v4
+113/576216/campos_512_v4
+113/576220/campos_512_v4
+113/576228/campos_512_v4
+113/576246/campos_512_v4
+113/576259/campos_512_v4
+113/576261/campos_512_v4
+113/576292/campos_512_v4
+113/576294/campos_512_v4
+113/576296/campos_512_v4
+113/576306/campos_512_v4
+113/576315/campos_512_v4
+113/576321/campos_512_v4
+113/576335/campos_512_v4
+113/576336/campos_512_v4
+113/576339/campos_512_v4
+113/576354/campos_512_v4
+113/576363/campos_512_v4
+113/576380/campos_512_v4
+113/576385/campos_512_v4
+113/576391/campos_512_v4
+113/576405/campos_512_v4
+113/576407/campos_512_v4
+113/576412/campos_512_v4
+113/576418/campos_512_v4
+113/576419/campos_512_v4
+113/576438/campos_512_v4
+113/576448/campos_512_v4
+113/576449/campos_512_v4
+113/576469/campos_512_v4
+113/576474/campos_512_v4
+113/576476/campos_512_v4
+113/576484/campos_512_v4
+113/576497/campos_512_v4
+113/576500/campos_512_v4
+113/576526/campos_512_v4
+113/576545/campos_512_v4
+113/576547/campos_512_v4
+113/576553/campos_512_v4
+113/576569/campos_512_v4
+113/576570/campos_512_v4
+113/576572/campos_512_v4
+113/576580/campos_512_v4
+113/576582/campos_512_v4
+113/576586/campos_512_v4
+113/576595/campos_512_v4
+113/576603/campos_512_v4
+113/576606/campos_512_v4
+113/576611/campos_512_v4
+113/576615/campos_512_v4
+113/576617/campos_512_v4
+113/576618/campos_512_v4
+113/576619/campos_512_v4
+113/576628/campos_512_v4
+113/576642/campos_512_v4
+113/576643/campos_512_v4
+113/576644/campos_512_v4
+113/576645/campos_512_v4
+113/576656/campos_512_v4
+113/576667/campos_512_v4
+113/576669/campos_512_v4
+113/576682/campos_512_v4
+113/576692/campos_512_v4
+113/576693/campos_512_v4
+113/576697/campos_512_v4
+113/576701/campos_512_v4
+113/576702/campos_512_v4
+113/576709/campos_512_v4
+113/576712/campos_512_v4
+113/576715/campos_512_v4
+113/576728/campos_512_v4
+113/576737/campos_512_v4
+113/576740/campos_512_v4
+113/576764/campos_512_v4
+113/576798/campos_512_v4
+113/576806/campos_512_v4
+113/576810/campos_512_v4
+113/576812/campos_512_v4
+113/576813/campos_512_v4
+113/576822/campos_512_v4
+113/576830/campos_512_v4
+113/576835/campos_512_v4
+113/576837/campos_512_v4
+113/576839/campos_512_v4
+113/576843/campos_512_v4
+113/576844/campos_512_v4
+113/576852/campos_512_v4
+113/576854/campos_512_v4
+113/576855/campos_512_v4
+113/576859/campos_512_v4
+113/576869/campos_512_v4
+113/576887/campos_512_v4
+113/576892/campos_512_v4
+113/576903/campos_512_v4
+113/576908/campos_512_v4
+113/576916/campos_512_v4
+113/576918/campos_512_v4
+113/576924/campos_512_v4
+113/576931/campos_512_v4
+113/576942/campos_512_v4
+113/576949/campos_512_v4
+113/576952/campos_512_v4
+113/576957/campos_512_v4
+113/576959/campos_512_v4
+113/576965/campos_512_v4
+113/576996/campos_512_v4
+113/577022/campos_512_v4
+113/577033/campos_512_v4
+113/577036/campos_512_v4
+113/577046/campos_512_v4
+113/577048/campos_512_v4
+113/577051/campos_512_v4
+113/577060/campos_512_v4
+113/577062/campos_512_v4
+113/577067/campos_512_v4
+113/577075/campos_512_v4
+113/577076/campos_512_v4
+113/577077/campos_512_v4
+113/577086/campos_512_v4
+113/577092/campos_512_v4
+113/577093/campos_512_v4
+113/577097/campos_512_v4
+113/577099/campos_512_v4
+113/577102/campos_512_v4
+113/577106/campos_512_v4
+113/577115/campos_512_v4
+113/577116/campos_512_v4
+113/577122/campos_512_v4
+113/577136/campos_512_v4
+113/577139/campos_512_v4
+113/577147/campos_512_v4
+113/577149/campos_512_v4
+113/577185/campos_512_v4
+113/577193/campos_512_v4
+113/577202/campos_512_v4
+113/577220/campos_512_v4
+113/577223/campos_512_v4
+113/577231/campos_512_v4
+113/577233/campos_512_v4
+113/577237/campos_512_v4
+113/577242/campos_512_v4
+113/577259/campos_512_v4
+113/577264/campos_512_v4
+113/577265/campos_512_v4
+113/577271/campos_512_v4
+113/577277/campos_512_v4
+113/577281/campos_512_v4
+113/577285/campos_512_v4
+113/577301/campos_512_v4
+113/577310/campos_512_v4
+113/577315/campos_512_v4
+113/577318/campos_512_v4
+113/577319/campos_512_v4
+113/577322/campos_512_v4
+113/577324/campos_512_v4
+113/577326/campos_512_v4
+113/577327/campos_512_v4
+113/577333/campos_512_v4
+113/577334/campos_512_v4
+113/577341/campos_512_v4
+113/577356/campos_512_v4
+113/577358/campos_512_v4
+113/577377/campos_512_v4
+113/577389/campos_512_v4
+113/577396/campos_512_v4
+113/577410/campos_512_v4
+113/577438/campos_512_v4
+113/577443/campos_512_v4
+113/577445/campos_512_v4
+113/577447/campos_512_v4
+113/577461/campos_512_v4
+113/577466/campos_512_v4
+113/577485/campos_512_v4
+113/577496/campos_512_v4
+113/577498/campos_512_v4
+113/577513/campos_512_v4
+113/577524/campos_512_v4
+113/577526/campos_512_v4
+113/577535/campos_512_v4
+113/577536/campos_512_v4
+113/577542/campos_512_v4
+113/577544/campos_512_v4
+113/577545/campos_512_v4
+113/577555/campos_512_v4
+113/577558/campos_512_v4
+113/577562/campos_512_v4
+113/577565/campos_512_v4
+113/577570/campos_512_v4
+113/577578/campos_512_v4
+113/577605/campos_512_v4
+113/577611/campos_512_v4
+113/577615/campos_512_v4
+113/577641/campos_512_v4
+113/577645/campos_512_v4
+113/577649/campos_512_v4
+113/577655/campos_512_v4
+113/577656/campos_512_v4
+113/577657/campos_512_v4
+113/577668/campos_512_v4
+113/577671/campos_512_v4
+113/577672/campos_512_v4
+113/577685/campos_512_v4
+113/577686/campos_512_v4
+113/577690/campos_512_v4
+113/577692/campos_512_v4
+113/577706/campos_512_v4
+113/577716/campos_512_v4
+113/577719/campos_512_v4
+113/577725/campos_512_v4
+113/577733/campos_512_v4
+113/577735/campos_512_v4
+113/577738/campos_512_v4
+113/577742/campos_512_v4
+113/577748/campos_512_v4
+113/577751/campos_512_v4
+113/577759/campos_512_v4
+113/577779/campos_512_v4
+113/577780/campos_512_v4
+113/577784/campos_512_v4
+113/577794/campos_512_v4
+113/577795/campos_512_v4
+113/577805/campos_512_v4
+113/577814/campos_512_v4
+113/577823/campos_512_v4
+113/577825/campos_512_v4
+113/577838/campos_512_v4
+113/577839/campos_512_v4
+113/577840/campos_512_v4
+113/577842/campos_512_v4
+113/577846/campos_512_v4
+113/577848/campos_512_v4
+113/577853/campos_512_v4
+113/577855/campos_512_v4
+113/577857/campos_512_v4
+113/577860/campos_512_v4
+113/577867/campos_512_v4
+113/577869/campos_512_v4
+113/577876/campos_512_v4
+113/577902/campos_512_v4
+113/577910/campos_512_v4
+113/577918/campos_512_v4
+113/577923/campos_512_v4
+113/577950/campos_512_v4
+113/577955/campos_512_v4
+113/577978/campos_512_v4
+113/577983/campos_512_v4
+113/577997/campos_512_v4
+113/578009/campos_512_v4
+113/578010/campos_512_v4
+113/578012/campos_512_v4
+113/578013/campos_512_v4
+113/578016/campos_512_v4
+113/578021/campos_512_v4
+113/578026/campos_512_v4
+113/578027/campos_512_v4
+113/578031/campos_512_v4
+113/578034/campos_512_v4
+113/578036/campos_512_v4
+113/578050/campos_512_v4
+113/578065/campos_512_v4
+113/578070/campos_512_v4
+113/578074/campos_512_v4
+113/578092/campos_512_v4
+113/578095/campos_512_v4
+113/578108/campos_512_v4
+113/578110/campos_512_v4
+113/578127/campos_512_v4
+113/578128/campos_512_v4
+113/578152/campos_512_v4
+113/578167/campos_512_v4
+113/578168/campos_512_v4
+113/578182/campos_512_v4
+113/578186/campos_512_v4
+113/578205/campos_512_v4
+113/578208/campos_512_v4
+113/578211/campos_512_v4
+113/578212/campos_512_v4
+113/578221/campos_512_v4
+113/578228/campos_512_v4
+113/578243/campos_512_v4
+113/578244/campos_512_v4
+113/578249/campos_512_v4
+113/578268/campos_512_v4
+113/578270/campos_512_v4
+113/578271/campos_512_v4
+113/578282/campos_512_v4
+113/578287/campos_512_v4
+113/578294/campos_512_v4
+113/578297/campos_512_v4
+113/578320/campos_512_v4
+113/578321/campos_512_v4
+113/578325/campos_512_v4
+113/578329/campos_512_v4
+113/578331/campos_512_v4
+113/578335/campos_512_v4
+113/578336/campos_512_v4
+113/578342/campos_512_v4
+113/578343/campos_512_v4
+113/578344/campos_512_v4
+113/578347/campos_512_v4
+113/578354/campos_512_v4
+113/578365/campos_512_v4
+113/578378/campos_512_v4
+113/578392/campos_512_v4
+113/578395/campos_512_v4
+113/578410/campos_512_v4
+113/578414/campos_512_v4
+113/578421/campos_512_v4
+113/578435/campos_512_v4
+113/578438/campos_512_v4
+113/578448/campos_512_v4
+113/578450/campos_512_v4
+113/578460/campos_512_v4
+113/578470/campos_512_v4
+113/578477/campos_512_v4
+113/578482/campos_512_v4
+113/578486/campos_512_v4
+113/578490/campos_512_v4
+113/578493/campos_512_v4
+113/578497/campos_512_v4
+113/578500/campos_512_v4
+113/578501/campos_512_v4
+113/578506/campos_512_v4
+113/578509/campos_512_v4
+113/578515/campos_512_v4
+113/578519/campos_512_v4
+113/578525/campos_512_v4
+113/578526/campos_512_v4
+113/578528/campos_512_v4
+113/578529/campos_512_v4
+113/578531/campos_512_v4
+113/578533/campos_512_v4
+113/578534/campos_512_v4
+113/578552/campos_512_v4
+113/578599/campos_512_v4
+113/578608/campos_512_v4
+113/578612/campos_512_v4
+113/578639/campos_512_v4
+113/578645/campos_512_v4
+113/578649/campos_512_v4
+113/578657/campos_512_v4
+113/578659/campos_512_v4
+113/578673/campos_512_v4
+113/578680/campos_512_v4
+113/578699/campos_512_v4
+113/578719/campos_512_v4
+113/578726/campos_512_v4
+113/578730/campos_512_v4
+113/578744/campos_512_v4
+113/578748/campos_512_v4
+113/578756/campos_512_v4
+113/578765/campos_512_v4
+113/578767/campos_512_v4
+113/578768/campos_512_v4
+113/578774/campos_512_v4
+113/578776/campos_512_v4
+113/578779/campos_512_v4
+113/578781/campos_512_v4
+113/578786/campos_512_v4
+113/578793/campos_512_v4
+113/578798/campos_512_v4
+113/578803/campos_512_v4
+113/578805/campos_512_v4
+113/578811/campos_512_v4
+113/578812/campos_512_v4
+113/578819/campos_512_v4
+113/578823/campos_512_v4
+113/578826/campos_512_v4
+113/578829/campos_512_v4
+113/578834/campos_512_v4
+113/578840/campos_512_v4
+113/578844/campos_512_v4
+113/578849/campos_512_v4
+113/578852/campos_512_v4
+113/578856/campos_512_v4
+113/578857/campos_512_v4
+113/578861/campos_512_v4
+113/578862/campos_512_v4
+113/578883/campos_512_v4
+113/578893/campos_512_v4
+113/578895/campos_512_v4
+113/578899/campos_512_v4
+113/578906/campos_512_v4
+113/578917/campos_512_v4
+113/578918/campos_512_v4
+113/578920/campos_512_v4
+113/578926/campos_512_v4
+113/578929/campos_512_v4
+113/578971/campos_512_v4
+113/578993/campos_512_v4
+113/578995/campos_512_v4
+113/579010/campos_512_v4
+113/579017/campos_512_v4
+113/579023/campos_512_v4
+113/579043/campos_512_v4
+113/579046/campos_512_v4
+113/579047/campos_512_v4
+113/579055/campos_512_v4
+113/579060/campos_512_v4
+113/579066/campos_512_v4
+113/579078/campos_512_v4
+113/579086/campos_512_v4
+113/579089/campos_512_v4
+113/579101/campos_512_v4
+113/579121/campos_512_v4
+113/579138/campos_512_v4
+113/579140/campos_512_v4
+113/579155/campos_512_v4
+113/579161/campos_512_v4
+113/579164/campos_512_v4
+113/579165/campos_512_v4
+113/579166/campos_512_v4
+113/579171/campos_512_v4
+113/579176/campos_512_v4
+113/579177/campos_512_v4
+113/579182/campos_512_v4
+113/579184/campos_512_v4
+113/579186/campos_512_v4
+113/579192/campos_512_v4
+113/579194/campos_512_v4
+113/579201/campos_512_v4
+113/579215/campos_512_v4
+113/579216/campos_512_v4
+113/579223/campos_512_v4
+113/579230/campos_512_v4
+113/579233/campos_512_v4
+113/579234/campos_512_v4
+113/579240/campos_512_v4
+113/579241/campos_512_v4
+113/579249/campos_512_v4
+113/579257/campos_512_v4
+113/579260/campos_512_v4
+113/579266/campos_512_v4
+113/579267/campos_512_v4
+113/579271/campos_512_v4
+113/579288/campos_512_v4
+113/579298/campos_512_v4
+113/579316/campos_512_v4
+113/579317/campos_512_v4
+113/579343/campos_512_v4
+113/579364/campos_512_v4
+113/579395/campos_512_v4
+113/579399/campos_512_v4
+113/579420/campos_512_v4
+113/579426/campos_512_v4
+113/579431/campos_512_v4
+113/579434/campos_512_v4
+113/579441/campos_512_v4
+113/579450/campos_512_v4
+113/579457/campos_512_v4
+113/579458/campos_512_v4
+113/579460/campos_512_v4
+113/579465/campos_512_v4
+113/579473/campos_512_v4
+113/579475/campos_512_v4
+113/579480/campos_512_v4
+113/579482/campos_512_v4
+113/579489/campos_512_v4
+113/579490/campos_512_v4
+113/579495/campos_512_v4
+113/579499/campos_512_v4
+113/579506/campos_512_v4
+113/579511/campos_512_v4
+113/579525/campos_512_v4
+113/579536/campos_512_v4
+113/579543/campos_512_v4
+113/579545/campos_512_v4
+113/579562/campos_512_v4
+113/579564/campos_512_v4
+113/579575/campos_512_v4
+113/579576/campos_512_v4
+113/579580/campos_512_v4
+113/579588/campos_512_v4
+113/579589/campos_512_v4
+113/579594/campos_512_v4
+113/579615/campos_512_v4
+113/579626/campos_512_v4
+113/579628/campos_512_v4
+113/579633/campos_512_v4
+113/579636/campos_512_v4
+113/579649/campos_512_v4
+113/579657/campos_512_v4
+113/579663/campos_512_v4
+113/579666/campos_512_v4
+113/579671/campos_512_v4
+113/579673/campos_512_v4
+113/579678/campos_512_v4
+113/579681/campos_512_v4
+113/579682/campos_512_v4
+113/579687/campos_512_v4
+113/579689/campos_512_v4
+113/579692/campos_512_v4
+113/579694/campos_512_v4
+113/579712/campos_512_v4
+113/579721/campos_512_v4
+113/579726/campos_512_v4
+113/579727/campos_512_v4
+113/579729/campos_512_v4
+113/579734/campos_512_v4
+113/579753/campos_512_v4
+113/579756/campos_512_v4
+113/579760/campos_512_v4
+113/579761/campos_512_v4
+113/579768/campos_512_v4
+113/579795/campos_512_v4
+113/579797/campos_512_v4
+113/579803/campos_512_v4
+113/579808/campos_512_v4
+113/579821/campos_512_v4
+113/579823/campos_512_v4
+113/579834/campos_512_v4
+113/579839/campos_512_v4
+113/579851/campos_512_v4
+113/579854/campos_512_v4
+113/579861/campos_512_v4
+113/579868/campos_512_v4
+113/579869/campos_512_v4
+113/579882/campos_512_v4
+113/579894/campos_512_v4
+113/579909/campos_512_v4
+113/579916/campos_512_v4
+113/579920/campos_512_v4
+113/579922/campos_512_v4
+113/579925/campos_512_v4
+113/579928/campos_512_v4
+113/579936/campos_512_v4
+113/579937/campos_512_v4
+113/579942/campos_512_v4
+113/579949/campos_512_v4
+113/579972/campos_512_v4
+113/579973/campos_512_v4
+113/579983/campos_512_v4
+113/579986/campos_512_v4
+114/580003/campos_512_v4
+114/580004/campos_512_v4
+114/580015/campos_512_v4
+114/580023/campos_512_v4
+114/580024/campos_512_v4
+114/580032/campos_512_v4
+114/580047/campos_512_v4
+114/580048/campos_512_v4
+114/580050/campos_512_v4
+114/580052/campos_512_v4
+114/580053/campos_512_v4
+114/580058/campos_512_v4
+114/580062/campos_512_v4
+114/580069/campos_512_v4
+114/580080/campos_512_v4
+114/580083/campos_512_v4
+114/580098/campos_512_v4
+114/580099/campos_512_v4
+114/580102/campos_512_v4
+114/580106/campos_512_v4
+114/580107/campos_512_v4
+114/580112/campos_512_v4
+114/580120/campos_512_v4
+114/580121/campos_512_v4
+114/580124/campos_512_v4
+114/580129/campos_512_v4
+114/580132/campos_512_v4
+114/580139/campos_512_v4
+114/580142/campos_512_v4
+114/580151/campos_512_v4
+114/580176/campos_512_v4
+114/580183/campos_512_v4
+114/580184/campos_512_v4
+114/580191/campos_512_v4
+114/580198/campos_512_v4
+114/580212/campos_512_v4
+114/580214/campos_512_v4
+114/580221/campos_512_v4
+114/580222/campos_512_v4
+114/580239/campos_512_v4
+114/580244/campos_512_v4
+114/580252/campos_512_v4
+114/580269/campos_512_v4
+114/580281/campos_512_v4
+114/580302/campos_512_v4
+114/580306/campos_512_v4
+114/580311/campos_512_v4
+114/580315/campos_512_v4
+114/580317/campos_512_v4
+114/580325/campos_512_v4
+114/580328/campos_512_v4
+114/580331/campos_512_v4
+114/580337/campos_512_v4
+114/580340/campos_512_v4
+114/580359/campos_512_v4
+114/580361/campos_512_v4
+114/580365/campos_512_v4
+114/580367/campos_512_v4
+114/580373/campos_512_v4
+114/580388/campos_512_v4
+114/580389/campos_512_v4
+114/580390/campos_512_v4
+114/580392/campos_512_v4
+114/580407/campos_512_v4
+114/580410/campos_512_v4
+114/580414/campos_512_v4
+114/580418/campos_512_v4
+114/580426/campos_512_v4
+114/580428/campos_512_v4
+114/580430/campos_512_v4
+114/580451/campos_512_v4
+114/580453/campos_512_v4
+114/580489/campos_512_v4
+114/580502/campos_512_v4
+114/580510/campos_512_v4
+114/580519/campos_512_v4
+114/580526/campos_512_v4
+114/580528/campos_512_v4
+114/580536/campos_512_v4
+114/580541/campos_512_v4
+114/580542/campos_512_v4
+114/580545/campos_512_v4
+114/580546/campos_512_v4
+114/580551/campos_512_v4
+114/580554/campos_512_v4
+114/580559/campos_512_v4
+114/580570/campos_512_v4
+114/580571/campos_512_v4
+114/580573/campos_512_v4
+114/580577/campos_512_v4
+114/580587/campos_512_v4
+114/580589/campos_512_v4
+114/580591/campos_512_v4
+114/580601/campos_512_v4
+114/580608/campos_512_v4
+114/580615/campos_512_v4
+114/580623/campos_512_v4
+114/580624/campos_512_v4
+114/580629/campos_512_v4
+114/580632/campos_512_v4
+114/580639/campos_512_v4
+114/580644/campos_512_v4
+114/580655/campos_512_v4
+114/580656/campos_512_v4
+114/580657/campos_512_v4
+114/580658/campos_512_v4
+114/580667/campos_512_v4
+114/580669/campos_512_v4
+114/580674/campos_512_v4
+114/580689/campos_512_v4
+114/580691/campos_512_v4
+114/580699/campos_512_v4
+114/580700/campos_512_v4
+114/580711/campos_512_v4
+114/580712/campos_512_v4
+114/580725/campos_512_v4
+114/580727/campos_512_v4
+114/580733/campos_512_v4
+114/580743/campos_512_v4
+114/580753/campos_512_v4
+114/580757/campos_512_v4
+114/580759/campos_512_v4
+114/580771/campos_512_v4
+114/580796/campos_512_v4
+114/580800/campos_512_v4
+114/580801/campos_512_v4
+114/580812/campos_512_v4
+114/580815/campos_512_v4
+114/580839/campos_512_v4
+114/580847/campos_512_v4
+114/580849/campos_512_v4
+114/580853/campos_512_v4
+114/580864/campos_512_v4
+114/580867/campos_512_v4
+114/580879/campos_512_v4
+114/580886/campos_512_v4
+114/580893/campos_512_v4
+114/580894/campos_512_v4
+114/580899/campos_512_v4
+114/580902/campos_512_v4
+114/580904/campos_512_v4
+114/580905/campos_512_v4
+114/580911/campos_512_v4
+114/580920/campos_512_v4
+114/580927/campos_512_v4
+114/580928/campos_512_v4
+114/580929/campos_512_v4
+114/580933/campos_512_v4
+114/580934/campos_512_v4
+114/580935/campos_512_v4
+114/580945/campos_512_v4
+114/580947/campos_512_v4
+114/580951/campos_512_v4
+114/580952/campos_512_v4
+114/580953/campos_512_v4
+114/580959/campos_512_v4
+114/580963/campos_512_v4
+114/580968/campos_512_v4
+114/580991/campos_512_v4
+114/580996/campos_512_v4
+114/581009/campos_512_v4
+114/581016/campos_512_v4
+114/581018/campos_512_v4
+114/581033/campos_512_v4
+114/581036/campos_512_v4
+114/581041/campos_512_v4
+114/581059/campos_512_v4
+114/581069/campos_512_v4
+114/581093/campos_512_v4
+114/581094/campos_512_v4
+114/581100/campos_512_v4
+114/581101/campos_512_v4
+114/581106/campos_512_v4
+114/581120/campos_512_v4
+114/581128/campos_512_v4
+114/581134/campos_512_v4
+114/581137/campos_512_v4
+114/581150/campos_512_v4
+114/581158/campos_512_v4
+114/581169/campos_512_v4
+114/581176/campos_512_v4
+114/581185/campos_512_v4
+114/581198/campos_512_v4
+114/581199/campos_512_v4
+114/581220/campos_512_v4
+114/581222/campos_512_v4
+114/581223/campos_512_v4
+114/581224/campos_512_v4
+114/581229/campos_512_v4
+114/581234/campos_512_v4
+114/581238/campos_512_v4
+114/581241/campos_512_v4
+114/581242/campos_512_v4
+114/581251/campos_512_v4
+114/581269/campos_512_v4
+114/581284/campos_512_v4
+114/581298/campos_512_v4
+114/581306/campos_512_v4
+114/581311/campos_512_v4
+114/581322/campos_512_v4
+114/581338/campos_512_v4
+114/581342/campos_512_v4
+114/581368/campos_512_v4
+114/581371/campos_512_v4
+114/581373/campos_512_v4
+114/581376/campos_512_v4
+114/581380/campos_512_v4
+114/581383/campos_512_v4
+114/581387/campos_512_v4
+114/581404/campos_512_v4
+114/581414/campos_512_v4
+114/581415/campos_512_v4
+114/581416/campos_512_v4
+114/581425/campos_512_v4
+114/581433/campos_512_v4
+114/581434/campos_512_v4
+114/581441/campos_512_v4
+114/581446/campos_512_v4
+114/581455/campos_512_v4
+114/581466/campos_512_v4
+114/581479/campos_512_v4
+114/581481/campos_512_v4
+114/581485/campos_512_v4
+114/581490/campos_512_v4
+114/581493/campos_512_v4
+114/581496/campos_512_v4
+114/581505/campos_512_v4
+114/581522/campos_512_v4
+114/581526/campos_512_v4
+114/581530/campos_512_v4
+114/581535/campos_512_v4
+114/581536/campos_512_v4
+114/581538/campos_512_v4
+114/581553/campos_512_v4
+114/581554/campos_512_v4
+114/581568/campos_512_v4
+114/581576/campos_512_v4
+114/581588/campos_512_v4
+114/581589/campos_512_v4
+114/581591/campos_512_v4
+114/581592/campos_512_v4
+114/581595/campos_512_v4
+114/581599/campos_512_v4
+114/581607/campos_512_v4
+114/581609/campos_512_v4
+114/581618/campos_512_v4
+114/581626/campos_512_v4
+114/581627/campos_512_v4
+114/581630/campos_512_v4
+114/581636/campos_512_v4
+114/581639/campos_512_v4
+114/581656/campos_512_v4
+114/581663/campos_512_v4
+114/581675/campos_512_v4
+114/581679/campos_512_v4
+114/581717/campos_512_v4
+114/581718/campos_512_v4
+114/581720/campos_512_v4
+114/581721/campos_512_v4
+114/581728/campos_512_v4
+114/581730/campos_512_v4
+114/581734/campos_512_v4
+114/581742/campos_512_v4
+114/581748/campos_512_v4
+114/581754/campos_512_v4
+114/581762/campos_512_v4
+114/581768/campos_512_v4
+114/581769/campos_512_v4
+114/581773/campos_512_v4
+114/581780/campos_512_v4
+114/581784/campos_512_v4
+114/581785/campos_512_v4
+114/581790/campos_512_v4
+114/581794/campos_512_v4
+114/581798/campos_512_v4
+114/581800/campos_512_v4
+114/581802/campos_512_v4
+114/581808/campos_512_v4
+114/581814/campos_512_v4
+114/581816/campos_512_v4
+114/581818/campos_512_v4
+114/581836/campos_512_v4
+114/581845/campos_512_v4
+114/581848/campos_512_v4
+114/581877/campos_512_v4
+114/581891/campos_512_v4
+114/581918/campos_512_v4
+114/581927/campos_512_v4
+114/581934/campos_512_v4
+114/581935/campos_512_v4
+114/581947/campos_512_v4
+114/581952/campos_512_v4
+114/581978/campos_512_v4
+114/581984/campos_512_v4
+114/581990/campos_512_v4
+114/582006/campos_512_v4
+114/582011/campos_512_v4
+114/582012/campos_512_v4
+114/582026/campos_512_v4
+114/582031/campos_512_v4
+114/582032/campos_512_v4
+114/582041/campos_512_v4
+114/582046/campos_512_v4
+114/582052/campos_512_v4
+114/582057/campos_512_v4
+114/582066/campos_512_v4
+114/582075/campos_512_v4
+114/582083/campos_512_v4
+114/582090/campos_512_v4
+114/582103/campos_512_v4
+114/582106/campos_512_v4
+114/582110/campos_512_v4
+114/582111/campos_512_v4
+114/582114/campos_512_v4
+114/582121/campos_512_v4
+114/582123/campos_512_v4
+114/582127/campos_512_v4
+114/582131/campos_512_v4
+114/582134/campos_512_v4
+114/582137/campos_512_v4
+114/582143/campos_512_v4
+114/582146/campos_512_v4
+114/582149/campos_512_v4
+114/582159/campos_512_v4
+114/582162/campos_512_v4
+114/582171/campos_512_v4
+114/582172/campos_512_v4
+114/582178/campos_512_v4
+114/582202/campos_512_v4
+114/582240/campos_512_v4
+114/582241/campos_512_v4
+114/582257/campos_512_v4
+114/582260/campos_512_v4
+114/582262/campos_512_v4
+114/582266/campos_512_v4
+114/582271/campos_512_v4
+114/582273/campos_512_v4
+114/582274/campos_512_v4
+114/582286/campos_512_v4
+114/582305/campos_512_v4
+114/582306/campos_512_v4
+114/582307/campos_512_v4
+114/582308/campos_512_v4
+114/582312/campos_512_v4
+114/582313/campos_512_v4
+114/582316/campos_512_v4
+114/582350/campos_512_v4
+114/582353/campos_512_v4
+114/582354/campos_512_v4
+114/582356/campos_512_v4
+114/582359/campos_512_v4
+114/582374/campos_512_v4
+114/582378/campos_512_v4
+114/582379/campos_512_v4
+114/582391/campos_512_v4
+114/582392/campos_512_v4
+114/582393/campos_512_v4
+114/582399/campos_512_v4
+114/582415/campos_512_v4
+114/582416/campos_512_v4
+114/582422/campos_512_v4
+114/582425/campos_512_v4
+114/582441/campos_512_v4
+114/582445/campos_512_v4
+114/582454/campos_512_v4
+114/582457/campos_512_v4
+114/582464/campos_512_v4
+114/582475/campos_512_v4
+114/582482/campos_512_v4
+114/582485/campos_512_v4
+114/582492/campos_512_v4
+114/582498/campos_512_v4
+114/582501/campos_512_v4
+114/582506/campos_512_v4
+114/582510/campos_512_v4
+114/582522/campos_512_v4
+114/582534/campos_512_v4
+114/582540/campos_512_v4
+114/582549/campos_512_v4
+114/582550/campos_512_v4
+114/582553/campos_512_v4
+114/582555/campos_512_v4
+114/582557/campos_512_v4
+114/582562/campos_512_v4
+114/582563/campos_512_v4
+114/582565/campos_512_v4
+114/582566/campos_512_v4
+114/582568/campos_512_v4
+114/582574/campos_512_v4
+114/582579/campos_512_v4
+114/582582/campos_512_v4
+114/582595/campos_512_v4
+114/582624/campos_512_v4
+114/582637/campos_512_v4
+114/582638/campos_512_v4
+114/582644/campos_512_v4
+114/582647/campos_512_v4
+114/582665/campos_512_v4
+114/582670/campos_512_v4
+114/582671/campos_512_v4
+114/582674/campos_512_v4
+114/582682/campos_512_v4
+114/582688/campos_512_v4
+114/582708/campos_512_v4
+114/582718/campos_512_v4
+114/582723/campos_512_v4
+114/582731/campos_512_v4
+114/582741/campos_512_v4
+114/582745/campos_512_v4
+114/582753/campos_512_v4
+114/582771/campos_512_v4
+114/582772/campos_512_v4
+114/582790/campos_512_v4
+114/582796/campos_512_v4
+114/582819/campos_512_v4
+114/582839/campos_512_v4
+114/582841/campos_512_v4
+114/582848/campos_512_v4
+114/582854/campos_512_v4
+114/582856/campos_512_v4
+114/582860/campos_512_v4
+114/582866/campos_512_v4
+114/582868/campos_512_v4
+114/582878/campos_512_v4
+114/582881/campos_512_v4
+114/582882/campos_512_v4
+114/582883/campos_512_v4
+114/582903/campos_512_v4
+114/582907/campos_512_v4
+114/582911/campos_512_v4
+114/582913/campos_512_v4
+114/582923/campos_512_v4
+114/582936/campos_512_v4
+114/582943/campos_512_v4
+114/582946/campos_512_v4
+114/582948/campos_512_v4
+114/582949/campos_512_v4
+114/582954/campos_512_v4
+114/582964/campos_512_v4
+114/582972/campos_512_v4
+114/582974/campos_512_v4
+114/582977/campos_512_v4
+114/582984/campos_512_v4
+114/582989/campos_512_v4
+114/582993/campos_512_v4
+114/582996/campos_512_v4
+114/583002/campos_512_v4
+114/583009/campos_512_v4
+114/583010/campos_512_v4
+114/583011/campos_512_v4
+114/583012/campos_512_v4
+114/583014/campos_512_v4
+114/583033/campos_512_v4
+114/583034/campos_512_v4
+114/583036/campos_512_v4
+114/583037/campos_512_v4
+114/583038/campos_512_v4
+114/583048/campos_512_v4
+114/583049/campos_512_v4
+114/583059/campos_512_v4
+114/583074/campos_512_v4
+114/583077/campos_512_v4
+114/583081/campos_512_v4
+114/583088/campos_512_v4
+114/583092/campos_512_v4
+114/583099/campos_512_v4
+114/583100/campos_512_v4
+114/583104/campos_512_v4
+114/583106/campos_512_v4
+114/583108/campos_512_v4
+114/583146/campos_512_v4
+114/583164/campos_512_v4
+114/583165/campos_512_v4
+114/583166/campos_512_v4
+114/583169/campos_512_v4
+114/583170/campos_512_v4
+114/583183/campos_512_v4
+114/583186/campos_512_v4
+114/583196/campos_512_v4
+114/583212/campos_512_v4
+114/583216/campos_512_v4
+114/583224/campos_512_v4
+114/583229/campos_512_v4
+114/583238/campos_512_v4
+114/583239/campos_512_v4
+114/583247/campos_512_v4
+114/583249/campos_512_v4
+114/583251/campos_512_v4
+114/583253/campos_512_v4
+114/583271/campos_512_v4
+114/583275/campos_512_v4
+114/583276/campos_512_v4
+114/583278/campos_512_v4
+114/583280/campos_512_v4
+114/583281/campos_512_v4
+114/583295/campos_512_v4
+114/583305/campos_512_v4
+114/583308/campos_512_v4
+114/583315/campos_512_v4
+114/583318/campos_512_v4
+114/583322/campos_512_v4
+114/583323/campos_512_v4
+114/583324/campos_512_v4
+114/583329/campos_512_v4
+114/583331/campos_512_v4
+114/583333/campos_512_v4
+114/583336/campos_512_v4
+114/583344/campos_512_v4
+114/583345/campos_512_v4
+114/583360/campos_512_v4
+114/583371/campos_512_v4
+114/583379/campos_512_v4
+114/583397/campos_512_v4
+114/583406/campos_512_v4
+114/583411/campos_512_v4
+114/583417/campos_512_v4
+114/583420/campos_512_v4
+114/583421/campos_512_v4
+114/583430/campos_512_v4
+114/583432/campos_512_v4
+114/583434/campos_512_v4
+114/583441/campos_512_v4
+114/583443/campos_512_v4
+114/583462/campos_512_v4
+114/583476/campos_512_v4
+114/583483/campos_512_v4
+114/583487/campos_512_v4
+114/583495/campos_512_v4
+114/583516/campos_512_v4
+114/583522/campos_512_v4
+114/583524/campos_512_v4
+114/583531/campos_512_v4
+114/583533/campos_512_v4
+114/583534/campos_512_v4
+114/583535/campos_512_v4
+114/583537/campos_512_v4
+114/583543/campos_512_v4
+114/583544/campos_512_v4
+114/583550/campos_512_v4
+114/583554/campos_512_v4
+114/583557/campos_512_v4
+114/583559/campos_512_v4
+114/583566/campos_512_v4
+114/583569/campos_512_v4
+114/583573/campos_512_v4
+114/583575/campos_512_v4
+114/583576/campos_512_v4
+114/583591/campos_512_v4
+114/583593/campos_512_v4
+114/583599/campos_512_v4
+114/583606/campos_512_v4
+114/583615/campos_512_v4
+114/583616/campos_512_v4
+114/583622/campos_512_v4
+114/583631/campos_512_v4
+114/583632/campos_512_v4
+114/583633/campos_512_v4
+114/583640/campos_512_v4
+114/583644/campos_512_v4
+114/583655/campos_512_v4
+114/583658/campos_512_v4
+114/583665/campos_512_v4
+114/583672/campos_512_v4
+114/583675/campos_512_v4
+114/583679/campos_512_v4
+114/583691/campos_512_v4
+114/583701/campos_512_v4
+114/583711/campos_512_v4
+114/583715/campos_512_v4
+114/583729/campos_512_v4
+114/583732/campos_512_v4
+114/583743/campos_512_v4
+114/583746/campos_512_v4
+114/583747/campos_512_v4
+114/583748/campos_512_v4
+114/583749/campos_512_v4
+114/583753/campos_512_v4
+114/583756/campos_512_v4
+114/583761/campos_512_v4
+114/583782/campos_512_v4
+114/583788/campos_512_v4
+114/583806/campos_512_v4
+114/583815/campos_512_v4
+114/583816/campos_512_v4
+114/583818/campos_512_v4
+114/583830/campos_512_v4
+114/583832/campos_512_v4
+114/583842/campos_512_v4
+114/583845/campos_512_v4
+114/583848/campos_512_v4
+114/583851/campos_512_v4
+114/583852/campos_512_v4
+114/583854/campos_512_v4
+114/583857/campos_512_v4
+114/583865/campos_512_v4
+114/583875/campos_512_v4
+114/583878/campos_512_v4
+114/583883/campos_512_v4
+114/583891/campos_512_v4
+114/583894/campos_512_v4
+114/583896/campos_512_v4
+114/583898/campos_512_v4
+114/583905/campos_512_v4
+114/583911/campos_512_v4
+114/583914/campos_512_v4
+114/583922/campos_512_v4
+114/583926/campos_512_v4
+114/583935/campos_512_v4
+114/583953/campos_512_v4
+114/583965/campos_512_v4
+114/583968/campos_512_v4
+114/583980/campos_512_v4
+114/583992/campos_512_v4
+114/584000/campos_512_v4
+114/584006/campos_512_v4
+114/584012/campos_512_v4
+114/584016/campos_512_v4
+114/584023/campos_512_v4
+114/584044/campos_512_v4
+114/584053/campos_512_v4
+114/584058/campos_512_v4
+114/584060/campos_512_v4
+114/584068/campos_512_v4
+114/584069/campos_512_v4
+114/584076/campos_512_v4
+114/584077/campos_512_v4
+114/584084/campos_512_v4
+114/584089/campos_512_v4
+114/584093/campos_512_v4
+114/584098/campos_512_v4
+114/584105/campos_512_v4
+114/584106/campos_512_v4
+114/584108/campos_512_v4
+114/584129/campos_512_v4
+114/584130/campos_512_v4
+114/584135/campos_512_v4
+114/584136/campos_512_v4
+114/584144/campos_512_v4
+114/584146/campos_512_v4
+114/584165/campos_512_v4
+114/584169/campos_512_v4
+114/584171/campos_512_v4
+114/584195/campos_512_v4
+114/584199/campos_512_v4
+114/584203/campos_512_v4
+114/584207/campos_512_v4
+114/584218/campos_512_v4
+114/584223/campos_512_v4
+114/584227/campos_512_v4
+114/584240/campos_512_v4
+114/584258/campos_512_v4
+114/584265/campos_512_v4
+114/584268/campos_512_v4
+114/584295/campos_512_v4
+114/584296/campos_512_v4
+114/584297/campos_512_v4
+114/584300/campos_512_v4
+114/584302/campos_512_v4
+114/584304/campos_512_v4
+114/584314/campos_512_v4
+114/584320/campos_512_v4
+114/584321/campos_512_v4
+114/584326/campos_512_v4
+114/584328/campos_512_v4
+114/584333/campos_512_v4
+114/584343/campos_512_v4
+114/584344/campos_512_v4
+114/584346/campos_512_v4
+114/584353/campos_512_v4
+114/584356/campos_512_v4
+114/584358/campos_512_v4
+114/584365/campos_512_v4
+114/584380/campos_512_v4
+114/584386/campos_512_v4
+114/584408/campos_512_v4
+114/584412/campos_512_v4
+114/584415/campos_512_v4
+114/584426/campos_512_v4
+114/584434/campos_512_v4
+114/584439/campos_512_v4
+114/584444/campos_512_v4
+114/584452/campos_512_v4
+114/584460/campos_512_v4
+114/584461/campos_512_v4
+114/584464/campos_512_v4
+114/584478/campos_512_v4
+114/584497/campos_512_v4
+114/584498/campos_512_v4
+114/584506/campos_512_v4
+114/584516/campos_512_v4
+114/584517/campos_512_v4
+114/584524/campos_512_v4
+114/584525/campos_512_v4
+114/584535/campos_512_v4
+114/584547/campos_512_v4
+114/584566/campos_512_v4
+114/584570/campos_512_v4
+114/584571/campos_512_v4
+114/584572/campos_512_v4
+114/584578/campos_512_v4
+114/584590/campos_512_v4
+114/584594/campos_512_v4
+114/584597/campos_512_v4
+114/584601/campos_512_v4
+114/584605/campos_512_v4
+114/584638/campos_512_v4
+114/584651/campos_512_v4
+114/584656/campos_512_v4
+114/584673/campos_512_v4
+114/584678/campos_512_v4
+114/584684/campos_512_v4
+114/584717/campos_512_v4
+114/584719/campos_512_v4
+114/584745/campos_512_v4
+114/584750/campos_512_v4
+114/584755/campos_512_v4
+114/584759/campos_512_v4
+114/584761/campos_512_v4
+114/584762/campos_512_v4
+114/584767/campos_512_v4
+114/584770/campos_512_v4
+114/584778/campos_512_v4
+114/584799/campos_512_v4
+114/584804/campos_512_v4
+114/584809/campos_512_v4
+114/584818/campos_512_v4
+114/584849/campos_512_v4
+114/584850/campos_512_v4
+114/584858/campos_512_v4
+114/584872/campos_512_v4
+114/584873/campos_512_v4
+114/584881/campos_512_v4
+114/584883/campos_512_v4
+114/584886/campos_512_v4
+114/584888/campos_512_v4
+114/584899/campos_512_v4
+114/584910/campos_512_v4
+114/584917/campos_512_v4
+114/584921/campos_512_v4
+114/584925/campos_512_v4
+114/584932/campos_512_v4
+114/584933/campos_512_v4
+114/584938/campos_512_v4
+114/584946/campos_512_v4
+114/584953/campos_512_v4
+114/584959/campos_512_v4
+114/584968/campos_512_v4
+114/584975/campos_512_v4
+114/584998/campos_512_v4
+115/585005/campos_512_v4
+115/585022/campos_512_v4
+115/585036/campos_512_v4
+115/585057/campos_512_v4
+115/585064/campos_512_v4
+115/585069/campos_512_v4
+115/585078/campos_512_v4
+115/585083/campos_512_v4
+115/585085/campos_512_v4
+115/585109/campos_512_v4
+115/585118/campos_512_v4
+115/585147/campos_512_v4
+115/585158/campos_512_v4
+115/585171/campos_512_v4
+115/585176/campos_512_v4
+115/585198/campos_512_v4
+115/585209/campos_512_v4
+115/585213/campos_512_v4
+115/585219/campos_512_v4
+115/585222/campos_512_v4
+115/585226/campos_512_v4
+115/585230/campos_512_v4
+115/585232/campos_512_v4
+115/585255/campos_512_v4
+115/585267/campos_512_v4
+115/585274/campos_512_v4
+115/585275/campos_512_v4
+115/585276/campos_512_v4
+115/585293/campos_512_v4
+115/585308/campos_512_v4
+115/585312/campos_512_v4
+115/585316/campos_512_v4
+115/585323/campos_512_v4
+115/585331/campos_512_v4
+115/585342/campos_512_v4
+115/585351/campos_512_v4
+115/585358/campos_512_v4
+115/585367/campos_512_v4
+115/585383/campos_512_v4
+115/585387/campos_512_v4
+115/585388/campos_512_v4
+115/585393/campos_512_v4
+115/585401/campos_512_v4
+115/585416/campos_512_v4
+115/585417/campos_512_v4
+115/585418/campos_512_v4
+115/585423/campos_512_v4
+115/585430/campos_512_v4
+115/585431/campos_512_v4
+115/585440/campos_512_v4
+115/585443/campos_512_v4
+115/585447/campos_512_v4
+115/585454/campos_512_v4
+115/585463/campos_512_v4
+115/585465/campos_512_v4
+115/585466/campos_512_v4
+115/585477/campos_512_v4
+115/585479/campos_512_v4
+115/585489/campos_512_v4
+115/585494/campos_512_v4
+115/585502/campos_512_v4
+115/585511/campos_512_v4
+115/585528/campos_512_v4
+115/585534/campos_512_v4
+115/585552/campos_512_v4
+115/585560/campos_512_v4
+115/585561/campos_512_v4
+115/585565/campos_512_v4
+115/585568/campos_512_v4
+115/585579/campos_512_v4
+115/585588/campos_512_v4
+115/585605/campos_512_v4
+115/585616/campos_512_v4
+115/585644/campos_512_v4
+115/585645/campos_512_v4
+115/585648/campos_512_v4
+115/585663/campos_512_v4
+115/585671/campos_512_v4
+115/585676/campos_512_v4
+115/585682/campos_512_v4
+115/585686/campos_512_v4
+115/585701/campos_512_v4
+115/585710/campos_512_v4
+115/585713/campos_512_v4
+115/585715/campos_512_v4
+115/585720/campos_512_v4
+115/585729/campos_512_v4
+115/585739/campos_512_v4
+115/585740/campos_512_v4
+115/585742/campos_512_v4
+115/585743/campos_512_v4
+115/585772/campos_512_v4
+115/585773/campos_512_v4
+115/585781/campos_512_v4
+115/585782/campos_512_v4
+115/585783/campos_512_v4
+115/585787/campos_512_v4
+115/585793/campos_512_v4
+115/585800/campos_512_v4
+115/585802/campos_512_v4
+115/585807/campos_512_v4
+115/585822/campos_512_v4
+115/585858/campos_512_v4
+115/585868/campos_512_v4
+115/585874/campos_512_v4
+115/585879/campos_512_v4
+115/585890/campos_512_v4
+115/585916/campos_512_v4
+115/585925/campos_512_v4
+115/585928/campos_512_v4
+115/585937/campos_512_v4
+115/585948/campos_512_v4
+115/585971/campos_512_v4
+115/585975/campos_512_v4
+115/585982/campos_512_v4
+115/585987/campos_512_v4
+115/585988/campos_512_v4
+115/585992/campos_512_v4
+115/585995/campos_512_v4
+115/585996/campos_512_v4
+115/585998/campos_512_v4
+115/585999/campos_512_v4
+115/586020/campos_512_v4
+115/586034/campos_512_v4
+115/586049/campos_512_v4
+115/586051/campos_512_v4
+115/586074/campos_512_v4
+115/586075/campos_512_v4
+115/586079/campos_512_v4
+115/586082/campos_512_v4
+115/586085/campos_512_v4
+115/586088/campos_512_v4
+115/586099/campos_512_v4
+115/586100/campos_512_v4
+115/586102/campos_512_v4
+115/586105/campos_512_v4
+115/586116/campos_512_v4
+115/586124/campos_512_v4
+115/586129/campos_512_v4
+115/586133/campos_512_v4
+115/586149/campos_512_v4
+115/586150/campos_512_v4
+115/586152/campos_512_v4
+115/586153/campos_512_v4
+115/586162/campos_512_v4
+115/586165/campos_512_v4
+115/586196/campos_512_v4
+115/586198/campos_512_v4
+115/586212/campos_512_v4
+115/586213/campos_512_v4
+115/586215/campos_512_v4
+115/586216/campos_512_v4
+115/586224/campos_512_v4
+115/586249/campos_512_v4
+115/586261/campos_512_v4
+115/586268/campos_512_v4
+115/586279/campos_512_v4
+115/586281/campos_512_v4
+115/586283/campos_512_v4
+115/586287/campos_512_v4
+115/586294/campos_512_v4
+115/586295/campos_512_v4
+115/586298/campos_512_v4
+115/586302/campos_512_v4
+115/586303/campos_512_v4
+115/586309/campos_512_v4
+115/586320/campos_512_v4
+115/586322/campos_512_v4
+115/586324/campos_512_v4
+115/586332/campos_512_v4
+115/586337/campos_512_v4
+115/586341/campos_512_v4
+115/586345/campos_512_v4
+115/586352/campos_512_v4
+115/586357/campos_512_v4
+115/586365/campos_512_v4
+115/586366/campos_512_v4
+115/586367/campos_512_v4
+115/586370/campos_512_v4
+115/586371/campos_512_v4
+115/586378/campos_512_v4
+115/586382/campos_512_v4
+115/586386/campos_512_v4
+115/586396/campos_512_v4
+115/586398/campos_512_v4
+115/586420/campos_512_v4
+115/586421/campos_512_v4
+115/586426/campos_512_v4
+115/586427/campos_512_v4
+115/586434/campos_512_v4
+115/586439/campos_512_v4
+115/586443/campos_512_v4
+115/586445/campos_512_v4
+115/586446/campos_512_v4
+115/586451/campos_512_v4
+115/586462/campos_512_v4
+115/586466/campos_512_v4
+115/586471/campos_512_v4
+115/586478/campos_512_v4
+115/586479/campos_512_v4
+115/586486/campos_512_v4
+115/586503/campos_512_v4
+115/586518/campos_512_v4
+115/586523/campos_512_v4
+115/586527/campos_512_v4
+115/586535/campos_512_v4
+115/586540/campos_512_v4
+115/586543/campos_512_v4
+115/586547/campos_512_v4
+115/586565/campos_512_v4
+115/586567/campos_512_v4
+115/586568/campos_512_v4
+115/586572/campos_512_v4
+115/586579/campos_512_v4
+115/586584/campos_512_v4
+115/586585/campos_512_v4
+115/586586/campos_512_v4
+115/586592/campos_512_v4
+115/586601/campos_512_v4
+115/586618/campos_512_v4
+115/586627/campos_512_v4
+115/586633/campos_512_v4
+115/586637/campos_512_v4
+115/586643/campos_512_v4
+115/586645/campos_512_v4
+115/586648/campos_512_v4
+115/586652/campos_512_v4
+115/586662/campos_512_v4
+115/586666/campos_512_v4
+115/586678/campos_512_v4
+115/586709/campos_512_v4
+115/586713/campos_512_v4
+115/586714/campos_512_v4
+115/586721/campos_512_v4
+115/586722/campos_512_v4
+115/586723/campos_512_v4
+115/586732/campos_512_v4
+115/586741/campos_512_v4
+115/586743/campos_512_v4
+115/586744/campos_512_v4
+115/586745/campos_512_v4
+115/586748/campos_512_v4
+115/586758/campos_512_v4
+115/586761/campos_512_v4
+115/586786/campos_512_v4
+115/586795/campos_512_v4
+115/586796/campos_512_v4
+115/586800/campos_512_v4
+115/586808/campos_512_v4
+115/586810/campos_512_v4
+115/586814/campos_512_v4
+115/586815/campos_512_v4
+115/586818/campos_512_v4
+115/586821/campos_512_v4
+115/586822/campos_512_v4
+115/586825/campos_512_v4
+115/586831/campos_512_v4
+115/586839/campos_512_v4
+115/586851/campos_512_v4
+115/586867/campos_512_v4
+115/586872/campos_512_v4
+115/586875/campos_512_v4
+115/586880/campos_512_v4
+115/586884/campos_512_v4
+115/586889/campos_512_v4
+115/586892/campos_512_v4
+115/586894/campos_512_v4
+115/586900/campos_512_v4
+115/586902/campos_512_v4
+115/586909/campos_512_v4
+115/586915/campos_512_v4
+115/586923/campos_512_v4
+115/586928/campos_512_v4
+115/586931/campos_512_v4
+115/586961/campos_512_v4
+115/586967/campos_512_v4
+115/586969/campos_512_v4
+115/586978/campos_512_v4
+115/586983/campos_512_v4
+115/586986/campos_512_v4
+115/586995/campos_512_v4
+115/587020/campos_512_v4
+115/587021/campos_512_v4
+115/587031/campos_512_v4
+115/587032/campos_512_v4
+115/587072/campos_512_v4
+115/587079/campos_512_v4
+115/587084/campos_512_v4
+115/587085/campos_512_v4
+115/587087/campos_512_v4
+115/587093/campos_512_v4
+115/587105/campos_512_v4
+115/587107/campos_512_v4
+115/587118/campos_512_v4
+115/587136/campos_512_v4
+115/587137/campos_512_v4
+115/587145/campos_512_v4
+115/587146/campos_512_v4
+115/587147/campos_512_v4
+115/587151/campos_512_v4
+115/587160/campos_512_v4
+115/587162/campos_512_v4
+115/587166/campos_512_v4
+115/587167/campos_512_v4
+115/587189/campos_512_v4
+115/587212/campos_512_v4
+115/587223/campos_512_v4
+115/587224/campos_512_v4
+115/587228/campos_512_v4
+115/587230/campos_512_v4
+115/587232/campos_512_v4
+115/587234/campos_512_v4
+115/587236/campos_512_v4
+115/587256/campos_512_v4
+115/587257/campos_512_v4
+115/587271/campos_512_v4
+115/587281/campos_512_v4
+115/587288/campos_512_v4
+115/587326/campos_512_v4
+115/587332/campos_512_v4
+115/587350/campos_512_v4
+115/587377/campos_512_v4
+115/587380/campos_512_v4
+115/587381/campos_512_v4
+115/587387/campos_512_v4
+115/587398/campos_512_v4
+115/587399/campos_512_v4
+115/587403/campos_512_v4
+115/587408/campos_512_v4
+115/587418/campos_512_v4
+115/587421/campos_512_v4
+115/587430/campos_512_v4
+115/587440/campos_512_v4
+115/587443/campos_512_v4
+115/587446/campos_512_v4
+115/587456/campos_512_v4
+115/587462/campos_512_v4
+115/587466/campos_512_v4
+115/587470/campos_512_v4
+115/587471/campos_512_v4
+115/587476/campos_512_v4
+115/587481/campos_512_v4
+115/587487/campos_512_v4
+115/587492/campos_512_v4
+115/587497/campos_512_v4
+115/587504/campos_512_v4
+115/587506/campos_512_v4
+115/587512/campos_512_v4
+115/587528/campos_512_v4
+115/587531/campos_512_v4
+115/587533/campos_512_v4
+115/587537/campos_512_v4
+115/587564/campos_512_v4
+115/587567/campos_512_v4
+115/587581/campos_512_v4
+115/587582/campos_512_v4
+115/587588/campos_512_v4
+115/587606/campos_512_v4
+115/587607/campos_512_v4
+115/587609/campos_512_v4
+115/587615/campos_512_v4
+115/587616/campos_512_v4
+115/587619/campos_512_v4
+115/587622/campos_512_v4
+115/587624/campos_512_v4
+115/587640/campos_512_v4
+115/587644/campos_512_v4
+115/587645/campos_512_v4
+115/587655/campos_512_v4
+115/587676/campos_512_v4
+115/587677/campos_512_v4
+115/587681/campos_512_v4
+115/587688/campos_512_v4
+115/587689/campos_512_v4
+115/587701/campos_512_v4
+115/587706/campos_512_v4
+115/587716/campos_512_v4
+115/587724/campos_512_v4
+115/587728/campos_512_v4
+115/587739/campos_512_v4
+115/587742/campos_512_v4
+115/587743/campos_512_v4
+115/587746/campos_512_v4
+115/587748/campos_512_v4
+115/587756/campos_512_v4
+115/587761/campos_512_v4
+115/587763/campos_512_v4
+115/587774/campos_512_v4
+115/587775/campos_512_v4
+115/587778/campos_512_v4
+115/587784/campos_512_v4
+115/587788/campos_512_v4
+115/587797/campos_512_v4
+115/587798/campos_512_v4
+115/587807/campos_512_v4
+115/587816/campos_512_v4
+115/587822/campos_512_v4
+115/587828/campos_512_v4
+115/587830/campos_512_v4
+115/587840/campos_512_v4
+115/587858/campos_512_v4
+115/587859/campos_512_v4
+115/587867/campos_512_v4
+115/587873/campos_512_v4
+115/587893/campos_512_v4
+115/587895/campos_512_v4
+115/587897/campos_512_v4
+115/587908/campos_512_v4
+115/587923/campos_512_v4
+115/587930/campos_512_v4
+115/587931/campos_512_v4
+115/587935/campos_512_v4
+115/587941/campos_512_v4
+115/587952/campos_512_v4
+115/587957/campos_512_v4
+115/587959/campos_512_v4
+115/587960/campos_512_v4
+115/587964/campos_512_v4
+115/587967/campos_512_v4
+115/587970/campos_512_v4
+115/587990/campos_512_v4
+115/587991/campos_512_v4
+115/587997/campos_512_v4
+115/588001/campos_512_v4
+115/588003/campos_512_v4
+115/588014/campos_512_v4
+115/588015/campos_512_v4
+115/588023/campos_512_v4
+115/588068/campos_512_v4
+115/588087/campos_512_v4
+115/588089/campos_512_v4
+115/588095/campos_512_v4
+115/588100/campos_512_v4
+115/588105/campos_512_v4
+115/588111/campos_512_v4
+115/588112/campos_512_v4
+115/588113/campos_512_v4
+115/588119/campos_512_v4
+115/588121/campos_512_v4
+115/588130/campos_512_v4
+115/588135/campos_512_v4
+115/588140/campos_512_v4
+115/588141/campos_512_v4
+115/588150/campos_512_v4
+115/588165/campos_512_v4
+115/588172/campos_512_v4
+115/588184/campos_512_v4
+115/588196/campos_512_v4
+115/588207/campos_512_v4
+115/588215/campos_512_v4
+115/588220/campos_512_v4
+115/588235/campos_512_v4
+115/588246/campos_512_v4
+115/588263/campos_512_v4
+115/588275/campos_512_v4
+115/588276/campos_512_v4
+115/588283/campos_512_v4
+115/588291/campos_512_v4
+115/588296/campos_512_v4
+115/588305/campos_512_v4
+115/588315/campos_512_v4
+115/588319/campos_512_v4
+115/588321/campos_512_v4
+115/588324/campos_512_v4
+115/588327/campos_512_v4
+115/588344/campos_512_v4
+115/588345/campos_512_v4
+115/588349/campos_512_v4
+115/588352/campos_512_v4
+115/588359/campos_512_v4
+115/588388/campos_512_v4
+115/588393/campos_512_v4
+115/588401/campos_512_v4
+115/588405/campos_512_v4
+115/588410/campos_512_v4
+115/588412/campos_512_v4
+115/588421/campos_512_v4
+115/588428/campos_512_v4
+115/588432/campos_512_v4
+115/588444/campos_512_v4
+115/588454/campos_512_v4
+115/588457/campos_512_v4
+115/588481/campos_512_v4
+115/588487/campos_512_v4
+115/588492/campos_512_v4
+115/588496/campos_512_v4
+115/588510/campos_512_v4
+115/588513/campos_512_v4
+115/588519/campos_512_v4
+115/588521/campos_512_v4
+115/588523/campos_512_v4
+115/588531/campos_512_v4
+115/588541/campos_512_v4
+115/588551/campos_512_v4
+115/588560/campos_512_v4
+115/588566/campos_512_v4
+115/588575/campos_512_v4
+115/588577/campos_512_v4
+115/588591/campos_512_v4
+115/588608/campos_512_v4
+115/588612/campos_512_v4
+115/588625/campos_512_v4
+115/588626/campos_512_v4
+115/588628/campos_512_v4
+115/588631/campos_512_v4
+115/588634/campos_512_v4
+115/588635/campos_512_v4
+115/588637/campos_512_v4
+115/588642/campos_512_v4
+115/588667/campos_512_v4
+115/588681/campos_512_v4
+115/588682/campos_512_v4
+115/588705/campos_512_v4
+115/588725/campos_512_v4
+115/588727/campos_512_v4
+115/588728/campos_512_v4
+115/588731/campos_512_v4
+115/588736/campos_512_v4
+115/588740/campos_512_v4
+115/588749/campos_512_v4
+115/588756/campos_512_v4
+115/588760/campos_512_v4
+115/588768/campos_512_v4
+115/588771/campos_512_v4
+115/588777/campos_512_v4
+115/588781/campos_512_v4
+115/588784/campos_512_v4
+115/588790/campos_512_v4
+115/588793/campos_512_v4
+115/588815/campos_512_v4
+115/588821/campos_512_v4
+115/588825/campos_512_v4
+115/588838/campos_512_v4
+115/588847/campos_512_v4
+115/588863/campos_512_v4
+115/588877/campos_512_v4
+115/588878/campos_512_v4
+115/588897/campos_512_v4
+115/588899/campos_512_v4
+115/588924/campos_512_v4
+115/588927/campos_512_v4
+115/588930/campos_512_v4
+115/588937/campos_512_v4
+115/588939/campos_512_v4
+115/588942/campos_512_v4
+115/588950/campos_512_v4
+115/588952/campos_512_v4
+115/588954/campos_512_v4
+115/588969/campos_512_v4
+115/588975/campos_512_v4
+115/588976/campos_512_v4
+115/588985/campos_512_v4
+115/589000/campos_512_v4
+115/589003/campos_512_v4
+115/589008/campos_512_v4
+115/589013/campos_512_v4
+115/589024/campos_512_v4
+115/589025/campos_512_v4
+115/589030/campos_512_v4
+115/589038/campos_512_v4
+115/589051/campos_512_v4
+115/589052/campos_512_v4
+115/589072/campos_512_v4
+115/589085/campos_512_v4
+115/589086/campos_512_v4
+115/589092/campos_512_v4
+115/589114/campos_512_v4
+115/589119/campos_512_v4
+115/589128/campos_512_v4
+115/589132/campos_512_v4
+115/589139/campos_512_v4
+115/589140/campos_512_v4
+115/589150/campos_512_v4
+115/589158/campos_512_v4
+115/589166/campos_512_v4
+115/589175/campos_512_v4
+115/589182/campos_512_v4
+115/589185/campos_512_v4
+115/589192/campos_512_v4
+115/589193/campos_512_v4
+115/589200/campos_512_v4
+115/589207/campos_512_v4
+115/589214/campos_512_v4
+115/589232/campos_512_v4
+115/589239/campos_512_v4
+115/589246/campos_512_v4
+115/589248/campos_512_v4
+115/589253/campos_512_v4
+115/589257/campos_512_v4
+115/589272/campos_512_v4
+115/589276/campos_512_v4
+115/589279/campos_512_v4
+115/589322/campos_512_v4
+115/589329/campos_512_v4
+115/589330/campos_512_v4
+115/589350/campos_512_v4
+115/589352/campos_512_v4
+115/589367/campos_512_v4
+115/589374/campos_512_v4
+115/589378/campos_512_v4
+115/589379/campos_512_v4
+115/589381/campos_512_v4
+115/589387/campos_512_v4
+115/589394/campos_512_v4
+115/589407/campos_512_v4
+115/589425/campos_512_v4
+115/589427/campos_512_v4
+115/589436/campos_512_v4
+115/589437/campos_512_v4
+115/589440/campos_512_v4
+115/589462/campos_512_v4
+115/589466/campos_512_v4
+115/589471/campos_512_v4
+115/589479/campos_512_v4
+115/589483/campos_512_v4
+115/589490/campos_512_v4
+115/589493/campos_512_v4
+115/589507/campos_512_v4
+115/589517/campos_512_v4
+115/589518/campos_512_v4
+115/589524/campos_512_v4
+115/589527/campos_512_v4
+115/589533/campos_512_v4
+115/589548/campos_512_v4
+115/589561/campos_512_v4
+115/589578/campos_512_v4
+115/589583/campos_512_v4
+115/589590/campos_512_v4
+115/589595/campos_512_v4
+115/589599/campos_512_v4
+115/589617/campos_512_v4
+115/589632/campos_512_v4
+115/589641/campos_512_v4
+115/589648/campos_512_v4
+115/589655/campos_512_v4
+115/589656/campos_512_v4
+115/589660/campos_512_v4
+115/589669/campos_512_v4
+115/589678/campos_512_v4
+115/589682/campos_512_v4
+115/589692/campos_512_v4
+115/589711/campos_512_v4
+115/589715/campos_512_v4
+115/589718/campos_512_v4
+115/589725/campos_512_v4
+115/589731/campos_512_v4
+115/589741/campos_512_v4
+115/589753/campos_512_v4
+115/589756/campos_512_v4
+115/589765/campos_512_v4
+115/589779/campos_512_v4
+115/589788/campos_512_v4
+115/589800/campos_512_v4
+115/589801/campos_512_v4
+115/589808/campos_512_v4
+115/589810/campos_512_v4
+115/589813/campos_512_v4
+115/589842/campos_512_v4
+115/589860/campos_512_v4
+115/589862/campos_512_v4
+115/589864/campos_512_v4
+115/589865/campos_512_v4
+115/589877/campos_512_v4
+115/589878/campos_512_v4
+115/589881/campos_512_v4
+115/589882/campos_512_v4
+115/589884/campos_512_v4
+115/589886/campos_512_v4
+115/589905/campos_512_v4
+115/589917/campos_512_v4
+115/589921/campos_512_v4
+115/589935/campos_512_v4
+115/589940/campos_512_v4
+115/589947/campos_512_v4
+115/589980/campos_512_v4
+115/589990/campos_512_v4
+116/590004/campos_512_v4
+116/590009/campos_512_v4
+116/590023/campos_512_v4
+116/590024/campos_512_v4
+116/590025/campos_512_v4
+116/590029/campos_512_v4
+116/590045/campos_512_v4
+116/590048/campos_512_v4
+116/590049/campos_512_v4
+116/590052/campos_512_v4
+116/590060/campos_512_v4
+116/590078/campos_512_v4
+116/590087/campos_512_v4
+116/590092/campos_512_v4
+116/590105/campos_512_v4
+116/590107/campos_512_v4
+116/590113/campos_512_v4
+116/590114/campos_512_v4
+116/590115/campos_512_v4
+116/590118/campos_512_v4
+116/590120/campos_512_v4
+116/590125/campos_512_v4
+116/590126/campos_512_v4
+116/590146/campos_512_v4
+116/590151/campos_512_v4
+116/590154/campos_512_v4
+116/590159/campos_512_v4
+116/590179/campos_512_v4
+116/590184/campos_512_v4
+116/590187/campos_512_v4
+116/590188/campos_512_v4
+116/590190/campos_512_v4
+116/590194/campos_512_v4
+116/590203/campos_512_v4
+116/590213/campos_512_v4
+116/590227/campos_512_v4
+116/590237/campos_512_v4
+116/590243/campos_512_v4
+116/590268/campos_512_v4
+116/590269/campos_512_v4
+116/590272/campos_512_v4
+116/590276/campos_512_v4
+116/590293/campos_512_v4
+116/590296/campos_512_v4
+116/590299/campos_512_v4
+116/590303/campos_512_v4
+116/590304/campos_512_v4
+116/590317/campos_512_v4
+116/590346/campos_512_v4
+116/590347/campos_512_v4
+116/590357/campos_512_v4
+116/590368/campos_512_v4
+116/590386/campos_512_v4
+116/590399/campos_512_v4
+116/590404/campos_512_v4
+116/590420/campos_512_v4
+116/590431/campos_512_v4
+116/590471/campos_512_v4
+116/590474/campos_512_v4
+116/590489/campos_512_v4
+116/590502/campos_512_v4
+116/590518/campos_512_v4
+116/590519/campos_512_v4
+116/590532/campos_512_v4
+116/590533/campos_512_v4
+116/590537/campos_512_v4
+116/590544/campos_512_v4
+116/590568/campos_512_v4
+116/590572/campos_512_v4
+116/590581/campos_512_v4
+116/590601/campos_512_v4
+116/590607/campos_512_v4
+116/590608/campos_512_v4
+116/590622/campos_512_v4
+116/590650/campos_512_v4
+116/590656/campos_512_v4
+116/590661/campos_512_v4
+116/590662/campos_512_v4
+116/590663/campos_512_v4
+116/590673/campos_512_v4
+116/590683/campos_512_v4
+116/590690/campos_512_v4
+116/590712/campos_512_v4
+116/590724/campos_512_v4
+116/590725/campos_512_v4
+116/590727/campos_512_v4
+116/590732/campos_512_v4
+116/590745/campos_512_v4
+116/590755/campos_512_v4
+116/590758/campos_512_v4
+116/590761/campos_512_v4
+116/590766/campos_512_v4
+116/590774/campos_512_v4
+116/590775/campos_512_v4
+116/590778/campos_512_v4
+116/590780/campos_512_v4
+116/590792/campos_512_v4
+116/590799/campos_512_v4
+116/590804/campos_512_v4
+116/590805/campos_512_v4
+116/590807/campos_512_v4
+116/590809/campos_512_v4
+116/590816/campos_512_v4
+116/590822/campos_512_v4
+116/590827/campos_512_v4
+116/590838/campos_512_v4
+116/590863/campos_512_v4
+116/590879/campos_512_v4
+116/590882/campos_512_v4
+116/590884/campos_512_v4
+116/590887/campos_512_v4
+116/590890/campos_512_v4
+116/590900/campos_512_v4
+116/590906/campos_512_v4
+116/590916/campos_512_v4
+116/590923/campos_512_v4
+116/590930/campos_512_v4
+116/590949/campos_512_v4
+116/590961/campos_512_v4
+116/590966/campos_512_v4
+116/590967/campos_512_v4
+116/590981/campos_512_v4
+116/590987/campos_512_v4
+116/590996/campos_512_v4
+116/591009/campos_512_v4
+116/591021/campos_512_v4
+116/591025/campos_512_v4
+116/591026/campos_512_v4
+116/591029/campos_512_v4
+116/591055/campos_512_v4
+116/591056/campos_512_v4
+116/591057/campos_512_v4
+116/591058/campos_512_v4
+116/591060/campos_512_v4
+116/591062/campos_512_v4
+116/591064/campos_512_v4
+116/591066/campos_512_v4
+116/591077/campos_512_v4
+116/591086/campos_512_v4
+116/591101/campos_512_v4
+116/591114/campos_512_v4
+116/591115/campos_512_v4
+116/591126/campos_512_v4
+116/591132/campos_512_v4
+116/591154/campos_512_v4
+116/591156/campos_512_v4
+116/591166/campos_512_v4
+116/591167/campos_512_v4
+116/591173/campos_512_v4
+116/591174/campos_512_v4
+116/591190/campos_512_v4
+116/591192/campos_512_v4
+116/591193/campos_512_v4
+116/591202/campos_512_v4
+116/591215/campos_512_v4
+116/591219/campos_512_v4
+116/591238/campos_512_v4
+116/591263/campos_512_v4
+116/591293/campos_512_v4
+116/591298/campos_512_v4
+116/591314/campos_512_v4
+116/591324/campos_512_v4
+116/591326/campos_512_v4
+116/591336/campos_512_v4
+116/591349/campos_512_v4
+116/591360/campos_512_v4
+116/591362/campos_512_v4
+116/591376/campos_512_v4
+116/591377/campos_512_v4
+116/591382/campos_512_v4
+116/591390/campos_512_v4
+116/591396/campos_512_v4
+116/591399/campos_512_v4
+116/591405/campos_512_v4
+116/591408/campos_512_v4
+116/591417/campos_512_v4
+116/591428/campos_512_v4
+116/591431/campos_512_v4
+116/591439/campos_512_v4
+116/591444/campos_512_v4
+116/591450/campos_512_v4
+116/591451/campos_512_v4
+116/591453/campos_512_v4
+116/591459/campos_512_v4
+116/591462/campos_512_v4
+116/591477/campos_512_v4
+116/591486/campos_512_v4
+116/591492/campos_512_v4
+116/591501/campos_512_v4
+116/591507/campos_512_v4
+116/591508/campos_512_v4
+116/591514/campos_512_v4
+116/591529/campos_512_v4
+116/591531/campos_512_v4
+116/591550/campos_512_v4
+116/591557/campos_512_v4
+116/591560/campos_512_v4
+116/591571/campos_512_v4
+116/591579/campos_512_v4
+116/591581/campos_512_v4
+116/591585/campos_512_v4
+116/591586/campos_512_v4
+116/591596/campos_512_v4
+116/591605/campos_512_v4
+116/591611/campos_512_v4
+116/591612/campos_512_v4
+116/591613/campos_512_v4
+116/591620/campos_512_v4
+116/591630/campos_512_v4
+116/591639/campos_512_v4
+116/591654/campos_512_v4
+116/591666/campos_512_v4
+116/591675/campos_512_v4
+116/591682/campos_512_v4
+116/591696/campos_512_v4
+116/591706/campos_512_v4
+116/591709/campos_512_v4
+116/591713/campos_512_v4
+116/591715/campos_512_v4
+116/591726/campos_512_v4
+116/591736/campos_512_v4
+116/591739/campos_512_v4
+116/591741/campos_512_v4
+116/591746/campos_512_v4
+116/591754/campos_512_v4
+116/591763/campos_512_v4
+116/591771/campos_512_v4
+116/591774/campos_512_v4
+116/591782/campos_512_v4
+116/591784/campos_512_v4
+116/591795/campos_512_v4
+116/591808/campos_512_v4
+116/591809/campos_512_v4
+116/591817/campos_512_v4
+116/591856/campos_512_v4
+116/591863/campos_512_v4
+116/591873/campos_512_v4
+116/591882/campos_512_v4
+116/591887/campos_512_v4
+116/591891/campos_512_v4
+116/591906/campos_512_v4
+116/591913/campos_512_v4
+116/591939/campos_512_v4
+116/591945/campos_512_v4
+116/591948/campos_512_v4
+116/591953/campos_512_v4
+116/591954/campos_512_v4
+116/591961/campos_512_v4
+116/591992/campos_512_v4
+116/592006/campos_512_v4
+116/592007/campos_512_v4
+116/592014/campos_512_v4
+116/592036/campos_512_v4
+116/592043/campos_512_v4
+116/592051/campos_512_v4
+116/592072/campos_512_v4
+116/592074/campos_512_v4
+116/592125/campos_512_v4
+116/592132/campos_512_v4
+116/592140/campos_512_v4
+116/592146/campos_512_v4
+116/592152/campos_512_v4
+116/592156/campos_512_v4
+116/592160/campos_512_v4
+116/592161/campos_512_v4
+116/592169/campos_512_v4
+116/592172/campos_512_v4
+116/592173/campos_512_v4
+116/592176/campos_512_v4
+116/592193/campos_512_v4
+116/592201/campos_512_v4
+116/592219/campos_512_v4
+116/592222/campos_512_v4
+116/592226/campos_512_v4
+116/592253/campos_512_v4
+116/592255/campos_512_v4
+116/592264/campos_512_v4
+116/592272/campos_512_v4
+116/592274/campos_512_v4
+116/592280/campos_512_v4
+116/592283/campos_512_v4
+116/592300/campos_512_v4
+116/592307/campos_512_v4
+116/592326/campos_512_v4
+116/592339/campos_512_v4
+116/592351/campos_512_v4
+116/592380/campos_512_v4
+116/592382/campos_512_v4
+116/592385/campos_512_v4
+116/592393/campos_512_v4
+116/592395/campos_512_v4
+116/592404/campos_512_v4
+116/592413/campos_512_v4
+116/592418/campos_512_v4
+116/592419/campos_512_v4
+116/592420/campos_512_v4
+116/592424/campos_512_v4
+116/592426/campos_512_v4
+116/592438/campos_512_v4
+116/592439/campos_512_v4
+116/592440/campos_512_v4
+116/592448/campos_512_v4
+116/592453/campos_512_v4
+116/592462/campos_512_v4
+116/592468/campos_512_v4
+116/592480/campos_512_v4
+116/592481/campos_512_v4
+116/592485/campos_512_v4
+116/592486/campos_512_v4
+116/592503/campos_512_v4
+116/592506/campos_512_v4
+116/592508/campos_512_v4
+116/592518/campos_512_v4
+116/592533/campos_512_v4
+116/592536/campos_512_v4
+116/592544/campos_512_v4
+116/592545/campos_512_v4
+116/592550/campos_512_v4
+116/592551/campos_512_v4
+116/592562/campos_512_v4
+116/592568/campos_512_v4
+116/592575/campos_512_v4
+116/592578/campos_512_v4
+116/592597/campos_512_v4
+116/592610/campos_512_v4
+116/592619/campos_512_v4
+116/592629/campos_512_v4
+116/592630/campos_512_v4
+116/592639/campos_512_v4
+116/592649/campos_512_v4
+116/592670/campos_512_v4
+116/592680/campos_512_v4
+116/592684/campos_512_v4
+116/592690/campos_512_v4
+116/592694/campos_512_v4
+116/592713/campos_512_v4
+116/592719/campos_512_v4
+116/592736/campos_512_v4
+116/592764/campos_512_v4
+116/592783/campos_512_v4
+116/592787/campos_512_v4
+116/592797/campos_512_v4
+116/592824/campos_512_v4
+116/592836/campos_512_v4
+116/592845/campos_512_v4
+116/592846/campos_512_v4
+116/592850/campos_512_v4
+116/592854/campos_512_v4
+116/592861/campos_512_v4
+116/592863/campos_512_v4
+116/592878/campos_512_v4
+116/592879/campos_512_v4
+116/592897/campos_512_v4
+116/592898/campos_512_v4
+116/592912/campos_512_v4
+116/592919/campos_512_v4
+116/592923/campos_512_v4
+116/592955/campos_512_v4
+116/592960/campos_512_v4
+116/592979/campos_512_v4
+116/592990/campos_512_v4
+116/593010/campos_512_v4
+116/593013/campos_512_v4
+116/593021/campos_512_v4
+116/593022/campos_512_v4
+116/593024/campos_512_v4
+116/593029/campos_512_v4
+116/593030/campos_512_v4
+116/593032/campos_512_v4
+116/593038/campos_512_v4
+116/593054/campos_512_v4
+116/593055/campos_512_v4
+116/593058/campos_512_v4
+116/593067/campos_512_v4
+116/593072/campos_512_v4
+116/593074/campos_512_v4
+116/593084/campos_512_v4
+116/593085/campos_512_v4
+116/593110/campos_512_v4
+116/593141/campos_512_v4
+116/593147/campos_512_v4
+116/593148/campos_512_v4
+116/593150/campos_512_v4
+116/593151/campos_512_v4
+116/593161/campos_512_v4
+116/593163/campos_512_v4
+116/593174/campos_512_v4
+116/593176/campos_512_v4
+116/593178/campos_512_v4
+116/593187/campos_512_v4
+116/593191/campos_512_v4
+116/593193/campos_512_v4
+116/593199/campos_512_v4
+116/593207/campos_512_v4
+116/593214/campos_512_v4
+116/593216/campos_512_v4
+116/593218/campos_512_v4
+116/593228/campos_512_v4
+116/593230/campos_512_v4
+116/593248/campos_512_v4
+116/593255/campos_512_v4
+116/593256/campos_512_v4
+116/593263/campos_512_v4
+116/593274/campos_512_v4
+116/593281/campos_512_v4
+116/593288/campos_512_v4
+116/593289/campos_512_v4
+116/593293/campos_512_v4
+116/593295/campos_512_v4
+116/593308/campos_512_v4
+116/593339/campos_512_v4
+116/593347/campos_512_v4
+116/593350/campos_512_v4
+116/593356/campos_512_v4
+116/593359/campos_512_v4
+116/593365/campos_512_v4
+116/593368/campos_512_v4
+116/593374/campos_512_v4
+116/593383/campos_512_v4
+116/593388/campos_512_v4
+116/593399/campos_512_v4
+116/593403/campos_512_v4
+116/593405/campos_512_v4
+116/593421/campos_512_v4
+116/593427/campos_512_v4
+116/593429/campos_512_v4
+116/593430/campos_512_v4
+116/593437/campos_512_v4
+116/593454/campos_512_v4
+116/593460/campos_512_v4
+116/593473/campos_512_v4
+116/593490/campos_512_v4
+116/593511/campos_512_v4
+116/593536/campos_512_v4
+116/593542/campos_512_v4
+116/593546/campos_512_v4
+116/593553/campos_512_v4
+116/593564/campos_512_v4
+116/593570/campos_512_v4
+116/593576/campos_512_v4
+116/593588/campos_512_v4
+116/593590/campos_512_v4
+116/593596/campos_512_v4
+116/593600/campos_512_v4
+116/593602/campos_512_v4
+116/593604/campos_512_v4
+116/593607/campos_512_v4
+116/593609/campos_512_v4
+116/593623/campos_512_v4
+116/593627/campos_512_v4
+116/593650/campos_512_v4
+116/593657/campos_512_v4
+116/593659/campos_512_v4
+116/593663/campos_512_v4
+116/593702/campos_512_v4
+116/593708/campos_512_v4
+116/593724/campos_512_v4
+116/593740/campos_512_v4
+116/593748/campos_512_v4
+116/593749/campos_512_v4
+116/593761/campos_512_v4
+116/593780/campos_512_v4
+116/593785/campos_512_v4
+116/593800/campos_512_v4
+116/593804/campos_512_v4
+116/593808/campos_512_v4
+116/593809/campos_512_v4
+116/593812/campos_512_v4
+116/593814/campos_512_v4
+116/593828/campos_512_v4
+116/593829/campos_512_v4
+116/593839/campos_512_v4
+116/593840/campos_512_v4
+116/593860/campos_512_v4
+116/593873/campos_512_v4
+116/593881/campos_512_v4
+116/593889/campos_512_v4
+116/593891/campos_512_v4
+116/593914/campos_512_v4
+116/593928/campos_512_v4
+116/593929/campos_512_v4
+116/593937/campos_512_v4
+116/593940/campos_512_v4
+116/593964/campos_512_v4
+116/593968/campos_512_v4
+116/593976/campos_512_v4
+116/593977/campos_512_v4
+116/593993/campos_512_v4
+116/593994/campos_512_v4
+116/593995/campos_512_v4
+116/593998/campos_512_v4
+116/594005/campos_512_v4
+116/594015/campos_512_v4
+116/594023/campos_512_v4
+116/594028/campos_512_v4
+116/594037/campos_512_v4
+116/594048/campos_512_v4
+116/594052/campos_512_v4
+116/594056/campos_512_v4
+116/594057/campos_512_v4
+116/594069/campos_512_v4
+116/594073/campos_512_v4
+116/594075/campos_512_v4
+116/594078/campos_512_v4
+116/594079/campos_512_v4
+116/594093/campos_512_v4
+116/594094/campos_512_v4
+116/594096/campos_512_v4
+116/594104/campos_512_v4
+116/594116/campos_512_v4
+116/594117/campos_512_v4
+116/594132/campos_512_v4
+116/594135/campos_512_v4
+116/594151/campos_512_v4
+116/594153/campos_512_v4
+116/594154/campos_512_v4
+116/594172/campos_512_v4
+116/594179/campos_512_v4
+116/594180/campos_512_v4
+116/594192/campos_512_v4
+116/594203/campos_512_v4
+116/594204/campos_512_v4
+116/594207/campos_512_v4
+116/594211/campos_512_v4
+116/594212/campos_512_v4
+116/594218/campos_512_v4
+116/594229/campos_512_v4
+116/594238/campos_512_v4
+116/594241/campos_512_v4
+116/594249/campos_512_v4
+116/594251/campos_512_v4
+116/594257/campos_512_v4
+116/594260/campos_512_v4
+116/594292/campos_512_v4
+116/594296/campos_512_v4
+116/594313/campos_512_v4
+116/594315/campos_512_v4
+116/594319/campos_512_v4
+116/594320/campos_512_v4
+116/594323/campos_512_v4
+116/594324/campos_512_v4
+116/594325/campos_512_v4
+116/594332/campos_512_v4
+116/594333/campos_512_v4
+116/594344/campos_512_v4
+116/594361/campos_512_v4
+116/594363/campos_512_v4
+116/594370/campos_512_v4
+116/594371/campos_512_v4
+116/594372/campos_512_v4
+116/594377/campos_512_v4
+116/594379/campos_512_v4
+116/594380/campos_512_v4
+116/594397/campos_512_v4
+116/594402/campos_512_v4
+116/594415/campos_512_v4
+116/594425/campos_512_v4
+116/594429/campos_512_v4
+116/594447/campos_512_v4
+116/594452/campos_512_v4
+116/594457/campos_512_v4
+116/594460/campos_512_v4
+116/594470/campos_512_v4
+116/594479/campos_512_v4
+116/594490/campos_512_v4
+116/594505/campos_512_v4
+116/594510/campos_512_v4
+116/594517/campos_512_v4
+116/594518/campos_512_v4
+116/594521/campos_512_v4
+116/594550/campos_512_v4
+116/594551/campos_512_v4
+116/594553/campos_512_v4
+116/594555/campos_512_v4
+116/594556/campos_512_v4
+116/594569/campos_512_v4
+116/594573/campos_512_v4
+116/594574/campos_512_v4
+116/594591/campos_512_v4
+116/594600/campos_512_v4
+116/594602/campos_512_v4
+116/594605/campos_512_v4
+116/594618/campos_512_v4
+116/594619/campos_512_v4
+116/594646/campos_512_v4
+116/594657/campos_512_v4
+116/594663/campos_512_v4
+116/594682/campos_512_v4
+116/594684/campos_512_v4
+116/594691/campos_512_v4
+116/594697/campos_512_v4
+116/594701/campos_512_v4
+116/594707/campos_512_v4
+116/594708/campos_512_v4
+116/594709/campos_512_v4
+116/594729/campos_512_v4
+116/594730/campos_512_v4
+116/594732/campos_512_v4
+116/594733/campos_512_v4
+116/594746/campos_512_v4
+116/594758/campos_512_v4
+116/594763/campos_512_v4
+116/594781/campos_512_v4
+116/594784/campos_512_v4
+116/594788/campos_512_v4
+116/594791/campos_512_v4
+116/594807/campos_512_v4
+116/594826/campos_512_v4
+116/594827/campos_512_v4
+116/594833/campos_512_v4
+116/594834/campos_512_v4
+116/594846/campos_512_v4
+116/594864/campos_512_v4
+116/594874/campos_512_v4
+116/594876/campos_512_v4
+116/594877/campos_512_v4
+116/594893/campos_512_v4
+116/594897/campos_512_v4
+116/594911/campos_512_v4
+116/594913/campos_512_v4
+116/594918/campos_512_v4
+116/594922/campos_512_v4
+116/594939/campos_512_v4
+116/594942/campos_512_v4
+116/594966/campos_512_v4
+116/594968/campos_512_v4
+116/594972/campos_512_v4
+116/594974/campos_512_v4
+116/594997/campos_512_v4
+117/595010/campos_512_v4
+117/595015/campos_512_v4
+117/595028/campos_512_v4
+117/595035/campos_512_v4
+117/595041/campos_512_v4
+117/595056/campos_512_v4
+117/595057/campos_512_v4
+117/595059/campos_512_v4
+117/595065/campos_512_v4
+117/595068/campos_512_v4
+117/595076/campos_512_v4
+117/595089/campos_512_v4
+117/595094/campos_512_v4
+117/595109/campos_512_v4
+117/595113/campos_512_v4
+117/595119/campos_512_v4
+117/595121/campos_512_v4
+117/595138/campos_512_v4
+117/595139/campos_512_v4
+117/595140/campos_512_v4
+117/595142/campos_512_v4
+117/595146/campos_512_v4
+117/595149/campos_512_v4
+117/595165/campos_512_v4
+117/595168/campos_512_v4
+117/595180/campos_512_v4
+117/595195/campos_512_v4
+117/595217/campos_512_v4
+117/595221/campos_512_v4
+117/595225/campos_512_v4
+117/595239/campos_512_v4
+117/595258/campos_512_v4
+117/595272/campos_512_v4
+117/595274/campos_512_v4
+117/595275/campos_512_v4
+117/595276/campos_512_v4
+117/595295/campos_512_v4
+117/595307/campos_512_v4
+117/595308/campos_512_v4
+117/595310/campos_512_v4
+117/595312/campos_512_v4
+117/595315/campos_512_v4
+117/595316/campos_512_v4
+117/595326/campos_512_v4
+117/595329/campos_512_v4
+117/595370/campos_512_v4
+117/595390/campos_512_v4
+117/595396/campos_512_v4
+117/595403/campos_512_v4
+117/595415/campos_512_v4
+117/595428/campos_512_v4
+117/595431/campos_512_v4
+117/595445/campos_512_v4
+117/595464/campos_512_v4
+117/595468/campos_512_v4
+117/595472/campos_512_v4
+117/595482/campos_512_v4
+117/595486/campos_512_v4
+117/595514/campos_512_v4
+117/595516/campos_512_v4
+117/595540/campos_512_v4
+117/595544/campos_512_v4
+117/595548/campos_512_v4
+117/595558/campos_512_v4
+117/595579/campos_512_v4
+117/595598/campos_512_v4
+117/595600/campos_512_v4
+117/595602/campos_512_v4
+117/595607/campos_512_v4
+117/595613/campos_512_v4
+117/595615/campos_512_v4
+117/595621/campos_512_v4
+117/595623/campos_512_v4
+117/595626/campos_512_v4
+117/595629/campos_512_v4
+117/595632/campos_512_v4
+117/595653/campos_512_v4
+117/595668/campos_512_v4
+117/595670/campos_512_v4
+117/595682/campos_512_v4
+117/595685/campos_512_v4
+117/595686/campos_512_v4
+117/595695/campos_512_v4
+117/595713/campos_512_v4
+117/595718/campos_512_v4
+117/595722/campos_512_v4
+117/595723/campos_512_v4
+117/595734/campos_512_v4
+117/595736/campos_512_v4
+117/595745/campos_512_v4
+117/595746/campos_512_v4
+117/595750/campos_512_v4
+117/595758/campos_512_v4
+117/595773/campos_512_v4
+117/595774/campos_512_v4
+117/595783/campos_512_v4
+117/595785/campos_512_v4
+117/595794/campos_512_v4
+117/595798/campos_512_v4
+117/595802/campos_512_v4
+117/595811/campos_512_v4
+117/595814/campos_512_v4
+117/595817/campos_512_v4
+117/595820/campos_512_v4
+117/595823/campos_512_v4
+117/595824/campos_512_v4
+117/595839/campos_512_v4
+117/595840/campos_512_v4
+117/595842/campos_512_v4
+117/595846/campos_512_v4
+117/595853/campos_512_v4
+117/595875/campos_512_v4
+117/595878/campos_512_v4
+117/595897/campos_512_v4
+117/595911/campos_512_v4
+117/595928/campos_512_v4
+117/595930/campos_512_v4
+117/595961/campos_512_v4
+117/595969/campos_512_v4
+117/595989/campos_512_v4
+117/596008/campos_512_v4
+117/596009/campos_512_v4
+117/596016/campos_512_v4
+117/596021/campos_512_v4
+117/596022/campos_512_v4
+117/596027/campos_512_v4
+117/596038/campos_512_v4
+117/596044/campos_512_v4
+117/596059/campos_512_v4
+117/596069/campos_512_v4
+117/596075/campos_512_v4
+117/596081/campos_512_v4
+117/596086/campos_512_v4
+117/596089/campos_512_v4
+117/596105/campos_512_v4
+117/596115/campos_512_v4
+117/596121/campos_512_v4
+117/596125/campos_512_v4
+117/596141/campos_512_v4
+117/596157/campos_512_v4
+117/596158/campos_512_v4
+117/596167/campos_512_v4
+117/596169/campos_512_v4
+117/596177/campos_512_v4
+117/596186/campos_512_v4
+117/596188/campos_512_v4
+117/596195/campos_512_v4
+117/596211/campos_512_v4
+117/596213/campos_512_v4
+117/596221/campos_512_v4
+117/596231/campos_512_v4
+117/596237/campos_512_v4
+117/596263/campos_512_v4
+117/596275/campos_512_v4
+117/596276/campos_512_v4
+117/596278/campos_512_v4
+117/596287/campos_512_v4
+117/596289/campos_512_v4
+117/596295/campos_512_v4
+117/596305/campos_512_v4
+117/596306/campos_512_v4
+117/596315/campos_512_v4
+117/596323/campos_512_v4
+117/596334/campos_512_v4
+117/596338/campos_512_v4
+117/596344/campos_512_v4
+117/596348/campos_512_v4
+117/596352/campos_512_v4
+117/596364/campos_512_v4
+117/596368/campos_512_v4
+117/596369/campos_512_v4
+117/596374/campos_512_v4
+117/596378/campos_512_v4
+117/596400/campos_512_v4
+117/596403/campos_512_v4
+117/596412/campos_512_v4
+117/596425/campos_512_v4
+117/596430/campos_512_v4
+117/596433/campos_512_v4
+117/596434/campos_512_v4
+117/596437/campos_512_v4
+117/596443/campos_512_v4
+117/596444/campos_512_v4
+117/596447/campos_512_v4
+117/596476/campos_512_v4
+117/596478/campos_512_v4
+117/596480/campos_512_v4
+117/596487/campos_512_v4
+117/596491/campos_512_v4
+117/596496/campos_512_v4
+117/596514/campos_512_v4
+117/596519/campos_512_v4
+117/596520/campos_512_v4
+117/596526/campos_512_v4
+117/596531/campos_512_v4
+117/596538/campos_512_v4
+117/596545/campos_512_v4
+117/596547/campos_512_v4
+117/596554/campos_512_v4
+117/596555/campos_512_v4
+117/596561/campos_512_v4
+117/596565/campos_512_v4
+117/596600/campos_512_v4
+117/596607/campos_512_v4
+117/596621/campos_512_v4
+117/596623/campos_512_v4
+117/596624/campos_512_v4
+117/596635/campos_512_v4
+117/596666/campos_512_v4
+117/596668/campos_512_v4
+117/596672/campos_512_v4
+117/596675/campos_512_v4
+117/596681/campos_512_v4
+117/596685/campos_512_v4
+117/596689/campos_512_v4
+117/596691/campos_512_v4
+117/596701/campos_512_v4
+117/596716/campos_512_v4
+117/596736/campos_512_v4
+117/596751/campos_512_v4
+117/596753/campos_512_v4
+117/596756/campos_512_v4
+117/596763/campos_512_v4
+117/596771/campos_512_v4
+117/596772/campos_512_v4
+117/596788/campos_512_v4
+117/596793/campos_512_v4
+117/596795/campos_512_v4
+117/596797/campos_512_v4
+117/596805/campos_512_v4
+117/596806/campos_512_v4
+117/596814/campos_512_v4
+117/596821/campos_512_v4
+117/596827/campos_512_v4
+117/596830/campos_512_v4
+117/596836/campos_512_v4
+117/596844/campos_512_v4
+117/596858/campos_512_v4
+117/596893/campos_512_v4
+117/596897/campos_512_v4
+117/596902/campos_512_v4
+117/596911/campos_512_v4
+117/596916/campos_512_v4
+117/596937/campos_512_v4
+117/596957/campos_512_v4
+117/596975/campos_512_v4
+117/596977/campos_512_v4
+117/596979/campos_512_v4
+117/596980/campos_512_v4
+117/596982/campos_512_v4
+117/596988/campos_512_v4
+117/596996/campos_512_v4
+117/597004/campos_512_v4
+117/597016/campos_512_v4
+117/597017/campos_512_v4
+117/597027/campos_512_v4
+117/597031/campos_512_v4
+117/597044/campos_512_v4
+117/597045/campos_512_v4
+117/597046/campos_512_v4
+117/597052/campos_512_v4
+117/597058/campos_512_v4
+117/597082/campos_512_v4
+117/597085/campos_512_v4
+117/597091/campos_512_v4
+117/597103/campos_512_v4
+117/597112/campos_512_v4
+117/597146/campos_512_v4
+117/597151/campos_512_v4
+117/597168/campos_512_v4
+117/597176/campos_512_v4
+117/597181/campos_512_v4
+117/597193/campos_512_v4
+117/597210/campos_512_v4
+117/597211/campos_512_v4
+117/597222/campos_512_v4
+117/597224/campos_512_v4
+117/597234/campos_512_v4
+117/597248/campos_512_v4
+117/597261/campos_512_v4
+117/597267/campos_512_v4
+117/597273/campos_512_v4
+117/597282/campos_512_v4
+117/597283/campos_512_v4
+117/597303/campos_512_v4
+117/597308/campos_512_v4
+117/597318/campos_512_v4
+117/597319/campos_512_v4
+117/597322/campos_512_v4
+117/597329/campos_512_v4
+117/597335/campos_512_v4
+117/597339/campos_512_v4
+117/597349/campos_512_v4
+117/597350/campos_512_v4
+117/597357/campos_512_v4
+117/597387/campos_512_v4
+117/597395/campos_512_v4
+117/597397/campos_512_v4
+117/597400/campos_512_v4
+117/597402/campos_512_v4
+117/597409/campos_512_v4
+117/597416/campos_512_v4
+117/597417/campos_512_v4
+117/597430/campos_512_v4
+117/597440/campos_512_v4
+117/597449/campos_512_v4
+117/597465/campos_512_v4
+117/597479/campos_512_v4
+117/597481/campos_512_v4
+117/597490/campos_512_v4
+117/597501/campos_512_v4
+117/597506/campos_512_v4
+117/597511/campos_512_v4
+117/597514/campos_512_v4
+117/597520/campos_512_v4
+117/597523/campos_512_v4
+117/597526/campos_512_v4
+117/597533/campos_512_v4
+117/597547/campos_512_v4
+117/597556/campos_512_v4
+117/597560/campos_512_v4
+117/597565/campos_512_v4
+117/597566/campos_512_v4
+117/597572/campos_512_v4
+117/597577/campos_512_v4
+117/597591/campos_512_v4
+117/597594/campos_512_v4
+117/597599/campos_512_v4
+117/597610/campos_512_v4
+117/597613/campos_512_v4
+117/597620/campos_512_v4
+117/597622/campos_512_v4
+117/597629/campos_512_v4
+117/597631/campos_512_v4
+117/597634/campos_512_v4
+117/597640/campos_512_v4
+117/597655/campos_512_v4
+117/597661/campos_512_v4
+117/597672/campos_512_v4
+117/597675/campos_512_v4
+117/597682/campos_512_v4
+117/597689/campos_512_v4
+117/597695/campos_512_v4
+117/597702/campos_512_v4
+117/597710/campos_512_v4
+117/597714/campos_512_v4
+117/597716/campos_512_v4
+117/597718/campos_512_v4
+117/597725/campos_512_v4
+117/597726/campos_512_v4
+117/597736/campos_512_v4
+117/597743/campos_512_v4
+117/597746/campos_512_v4
+117/597749/campos_512_v4
+117/597756/campos_512_v4
+117/597771/campos_512_v4
+117/597772/campos_512_v4
+117/597776/campos_512_v4
+117/597794/campos_512_v4
+117/597795/campos_512_v4
+117/597797/campos_512_v4
+117/597798/campos_512_v4
+117/597816/campos_512_v4
+117/597823/campos_512_v4
+117/597825/campos_512_v4
+117/597829/campos_512_v4
+117/597841/campos_512_v4
+117/597842/campos_512_v4
+117/597847/campos_512_v4
+117/597856/campos_512_v4
+117/597897/campos_512_v4
+117/597904/campos_512_v4
+117/597908/campos_512_v4
+117/597927/campos_512_v4
+117/597932/campos_512_v4
+117/597935/campos_512_v4
+117/597938/campos_512_v4
+117/597946/campos_512_v4
+117/597965/campos_512_v4
+117/597974/campos_512_v4
+117/597976/campos_512_v4
+117/597990/campos_512_v4
+117/598021/campos_512_v4
+117/598029/campos_512_v4
+117/598031/campos_512_v4
+117/598052/campos_512_v4
+117/598060/campos_512_v4
+117/598086/campos_512_v4
+117/598099/campos_512_v4
+117/598118/campos_512_v4
+117/598119/campos_512_v4
+117/598123/campos_512_v4
+117/598129/campos_512_v4
+117/598130/campos_512_v4
+117/598133/campos_512_v4
+117/598148/campos_512_v4
+117/598155/campos_512_v4
+117/598157/campos_512_v4
+117/598169/campos_512_v4
+117/598178/campos_512_v4
+117/598179/campos_512_v4
+117/598182/campos_512_v4
+117/598183/campos_512_v4
+117/598195/campos_512_v4
+117/598202/campos_512_v4
+117/598254/campos_512_v4
+117/598255/campos_512_v4
+117/598256/campos_512_v4
+117/598261/campos_512_v4
+117/598267/campos_512_v4
+117/598272/campos_512_v4
+117/598278/campos_512_v4
+117/598300/campos_512_v4
+117/598327/campos_512_v4
+117/598329/campos_512_v4
+117/598335/campos_512_v4
+117/598343/campos_512_v4
+117/598348/campos_512_v4
+117/598354/campos_512_v4
+117/598363/campos_512_v4
+117/598383/campos_512_v4
+117/598385/campos_512_v4
+117/598391/campos_512_v4
+117/598399/campos_512_v4
+117/598400/campos_512_v4
+117/598408/campos_512_v4
+117/598428/campos_512_v4
+117/598436/campos_512_v4
+117/598469/campos_512_v4
+117/598482/campos_512_v4
+117/598487/campos_512_v4
+117/598490/campos_512_v4
+117/598495/campos_512_v4
+117/598501/campos_512_v4
+117/598516/campos_512_v4
+117/598520/campos_512_v4
+117/598525/campos_512_v4
+117/598526/campos_512_v4
+117/598538/campos_512_v4
+117/598545/campos_512_v4
+117/598552/campos_512_v4
+117/598564/campos_512_v4
+117/598567/campos_512_v4
+117/598573/campos_512_v4
+117/598576/campos_512_v4
+117/598586/campos_512_v4
+117/598588/campos_512_v4
+117/598598/campos_512_v4
+117/598600/campos_512_v4
+117/598614/campos_512_v4
+117/598617/campos_512_v4
+117/598629/campos_512_v4
+117/598637/campos_512_v4
+117/598641/campos_512_v4
+117/598644/campos_512_v4
+117/598652/campos_512_v4
+117/598653/campos_512_v4
+117/598654/campos_512_v4
+117/598665/campos_512_v4
+117/598678/campos_512_v4
+117/598680/campos_512_v4
+117/598684/campos_512_v4
+117/598685/campos_512_v4
+117/598688/campos_512_v4
+117/598729/campos_512_v4
+117/598735/campos_512_v4
+117/598738/campos_512_v4
+117/598748/campos_512_v4
+117/598754/campos_512_v4
+117/598766/campos_512_v4
+117/598769/campos_512_v4
+117/598771/campos_512_v4
+117/598772/campos_512_v4
+117/598773/campos_512_v4
+117/598791/campos_512_v4
+117/598792/campos_512_v4
+117/598794/campos_512_v4
+117/598803/campos_512_v4
+117/598811/campos_512_v4
+117/598819/campos_512_v4
+117/598828/campos_512_v4
+117/598833/campos_512_v4
+117/598836/campos_512_v4
+117/598837/campos_512_v4
+117/598854/campos_512_v4
+117/598876/campos_512_v4
+117/598882/campos_512_v4
+117/598901/campos_512_v4
+117/598906/campos_512_v4
+117/598908/campos_512_v4
+117/598912/campos_512_v4
+117/598915/campos_512_v4
+117/598916/campos_512_v4
+117/598920/campos_512_v4
+117/598922/campos_512_v4
+117/598936/campos_512_v4
+117/598943/campos_512_v4
+117/598945/campos_512_v4
+117/598952/campos_512_v4
+117/598961/campos_512_v4
+117/598962/campos_512_v4
+117/598965/campos_512_v4
+117/598970/campos_512_v4
+117/598977/campos_512_v4
+117/598978/campos_512_v4
+117/598987/campos_512_v4
+117/598988/campos_512_v4
+117/598991/campos_512_v4
+117/598992/campos_512_v4
+117/598995/campos_512_v4
+117/598996/campos_512_v4
+117/599011/campos_512_v4
+117/599015/campos_512_v4
+117/599017/campos_512_v4
+117/599024/campos_512_v4
+117/599030/campos_512_v4
+117/599033/campos_512_v4
+117/599037/campos_512_v4
+117/599043/campos_512_v4
+117/599044/campos_512_v4
+117/599048/campos_512_v4
+117/599050/campos_512_v4
+117/599052/campos_512_v4
+117/599060/campos_512_v4
+117/599073/campos_512_v4
+117/599091/campos_512_v4
+117/599096/campos_512_v4
+117/599113/campos_512_v4
+117/599115/campos_512_v4
+117/599124/campos_512_v4
+117/599125/campos_512_v4
+117/599126/campos_512_v4
+117/599129/campos_512_v4
+117/599135/campos_512_v4
+117/599137/campos_512_v4
+117/599173/campos_512_v4
+117/599174/campos_512_v4
+117/599181/campos_512_v4
+117/599195/campos_512_v4
+117/599201/campos_512_v4
+117/599213/campos_512_v4
+117/599218/campos_512_v4
+117/599224/campos_512_v4
+117/599231/campos_512_v4
+117/599237/campos_512_v4
+117/599238/campos_512_v4
+117/599244/campos_512_v4
+117/599248/campos_512_v4
+117/599250/campos_512_v4
+117/599254/campos_512_v4
+117/599264/campos_512_v4
+117/599276/campos_512_v4
+117/599278/campos_512_v4
+117/599284/campos_512_v4
+117/599296/campos_512_v4
+117/599304/campos_512_v4
+117/599310/campos_512_v4
+117/599311/campos_512_v4
+117/599320/campos_512_v4
+117/599330/campos_512_v4
+117/599332/campos_512_v4
+117/599341/campos_512_v4
+117/599342/campos_512_v4
+117/599351/campos_512_v4
+117/599353/campos_512_v4
+117/599356/campos_512_v4
+117/599358/campos_512_v4
+117/599359/campos_512_v4
+117/599374/campos_512_v4
+117/599375/campos_512_v4
+117/599395/campos_512_v4
+117/599400/campos_512_v4
+117/599401/campos_512_v4
+117/599407/campos_512_v4
+117/599419/campos_512_v4
+117/599423/campos_512_v4
+117/599449/campos_512_v4
+117/599457/campos_512_v4
+117/599460/campos_512_v4
+117/599463/campos_512_v4
+117/599467/campos_512_v4
+117/599472/campos_512_v4
+117/599474/campos_512_v4
+117/599481/campos_512_v4
+117/599506/campos_512_v4
+117/599525/campos_512_v4
+117/599526/campos_512_v4
+117/599534/campos_512_v4
+117/599545/campos_512_v4
+117/599558/campos_512_v4
+117/599561/campos_512_v4
+117/599562/campos_512_v4
+117/599563/campos_512_v4
+117/599578/campos_512_v4
+117/599579/campos_512_v4
+117/599585/campos_512_v4
+117/599586/campos_512_v4
+117/599588/campos_512_v4
+117/599589/campos_512_v4
+117/599591/campos_512_v4
+117/599607/campos_512_v4
+117/599608/campos_512_v4
+117/599610/campos_512_v4
+117/599624/campos_512_v4
+117/599631/campos_512_v4
+117/599632/campos_512_v4
+117/599641/campos_512_v4
+117/599642/campos_512_v4
+117/599646/campos_512_v4
+117/599647/campos_512_v4
+117/599652/campos_512_v4
+117/599654/campos_512_v4
+117/599657/campos_512_v4
+117/599662/campos_512_v4
+117/599667/campos_512_v4
+117/599670/campos_512_v4
+117/599675/campos_512_v4
+117/599678/campos_512_v4
+117/599680/campos_512_v4
+117/599681/campos_512_v4
+117/599687/campos_512_v4
+117/599694/campos_512_v4
+117/599697/campos_512_v4
+117/599704/campos_512_v4
+117/599705/campos_512_v4
+117/599709/campos_512_v4
+117/599717/campos_512_v4
+117/599730/campos_512_v4
+117/599741/campos_512_v4
+117/599742/campos_512_v4
+117/599754/campos_512_v4
+117/599757/campos_512_v4
+117/599769/campos_512_v4
+117/599776/campos_512_v4
+117/599781/campos_512_v4
+117/599786/campos_512_v4
+117/599791/campos_512_v4
+117/599794/campos_512_v4
+117/599809/campos_512_v4
+117/599812/campos_512_v4
+117/599815/campos_512_v4
+117/599822/campos_512_v4
+117/599823/campos_512_v4
+117/599824/campos_512_v4
+117/599832/campos_512_v4
+117/599838/campos_512_v4
+117/599844/campos_512_v4
+117/599846/campos_512_v4
+117/599849/campos_512_v4
+117/599850/campos_512_v4
+117/599852/campos_512_v4
+117/599858/campos_512_v4
+117/599885/campos_512_v4
+117/599888/campos_512_v4
+117/599889/campos_512_v4
+117/599895/campos_512_v4
+117/599897/campos_512_v4
+117/599904/campos_512_v4
+117/599917/campos_512_v4
+117/599921/campos_512_v4
+117/599925/campos_512_v4
+117/599934/campos_512_v4
+117/599944/campos_512_v4
+117/599949/campos_512_v4
+117/599964/campos_512_v4
+117/599974/campos_512_v4
+117/600001/campos_512_v4
+118/600009/campos_512_v4
+118/600011/campos_512_v4
+118/600017/campos_512_v4
+118/600031/campos_512_v4
+118/600032/campos_512_v4
+118/600035/campos_512_v4
+118/600036/campos_512_v4
+118/600048/campos_512_v4
+118/600050/campos_512_v4
+118/600052/campos_512_v4
+118/600071/campos_512_v4
+118/600073/campos_512_v4
+118/600101/campos_512_v4
+118/600103/campos_512_v4
+118/600110/campos_512_v4
+118/600111/campos_512_v4
+118/600117/campos_512_v4
+118/600119/campos_512_v4
+118/600122/campos_512_v4
+118/600131/campos_512_v4
+118/600136/campos_512_v4
+118/600143/campos_512_v4
+118/600150/campos_512_v4
+118/600153/campos_512_v4
+118/600159/campos_512_v4
+118/600184/campos_512_v4
+118/600185/campos_512_v4
+118/600189/campos_512_v4
+118/600191/campos_512_v4
+118/600193/campos_512_v4
+118/600196/campos_512_v4
+118/600202/campos_512_v4
+118/600204/campos_512_v4
+118/600214/campos_512_v4
+118/600217/campos_512_v4
+118/600234/campos_512_v4
+118/600260/campos_512_v4
+118/600276/campos_512_v4
+118/600293/campos_512_v4
+118/600295/campos_512_v4
+118/600303/campos_512_v4
+118/600314/campos_512_v4
+118/600316/campos_512_v4
+118/600319/campos_512_v4
+118/600326/campos_512_v4
+118/600330/campos_512_v4
+118/600331/campos_512_v4
+118/600338/campos_512_v4
+118/600340/campos_512_v4
+118/600349/campos_512_v4
+118/600350/campos_512_v4
+118/600353/campos_512_v4
+118/600359/campos_512_v4
+118/600369/campos_512_v4
+118/600373/campos_512_v4
+118/600380/campos_512_v4
+118/600382/campos_512_v4
+118/600386/campos_512_v4
+118/600388/campos_512_v4
+118/600402/campos_512_v4
+118/600432/campos_512_v4
+118/600448/campos_512_v4
+118/600449/campos_512_v4
+118/600454/campos_512_v4
+118/600461/campos_512_v4
+118/600465/campos_512_v4
+118/600475/campos_512_v4
+118/600492/campos_512_v4
+118/600504/campos_512_v4
+118/600522/campos_512_v4
+118/600533/campos_512_v4
+118/600544/campos_512_v4
+118/600545/campos_512_v4
+118/600556/campos_512_v4
+118/600558/campos_512_v4
+118/600579/campos_512_v4
+118/600582/campos_512_v4
+118/600584/campos_512_v4
+118/600603/campos_512_v4
+118/600607/campos_512_v4
+118/600611/campos_512_v4
+118/600614/campos_512_v4
+118/600626/campos_512_v4
+118/600644/campos_512_v4
+118/600649/campos_512_v4
+118/600651/campos_512_v4
+118/600656/campos_512_v4
+118/600658/campos_512_v4
+118/600669/campos_512_v4
+118/600672/campos_512_v4
+118/600686/campos_512_v4
+118/600705/campos_512_v4
+118/600706/campos_512_v4
+118/600717/campos_512_v4
+118/600728/campos_512_v4
+118/600749/campos_512_v4
+118/600756/campos_512_v4
+118/600760/campos_512_v4
+118/600764/campos_512_v4
+118/600772/campos_512_v4
+118/600799/campos_512_v4
+118/600803/campos_512_v4
+118/600810/campos_512_v4
+118/600816/campos_512_v4
+118/600832/campos_512_v4
+118/600834/campos_512_v4
+118/600836/campos_512_v4
+118/600844/campos_512_v4
+118/600863/campos_512_v4
+118/600888/campos_512_v4
+118/600891/campos_512_v4
+118/600896/campos_512_v4
+118/600898/campos_512_v4
+118/600899/campos_512_v4
+118/600904/campos_512_v4
+118/600908/campos_512_v4
+118/600916/campos_512_v4
+118/600929/campos_512_v4
+118/600930/campos_512_v4
+118/600941/campos_512_v4
+118/600949/campos_512_v4
+118/600952/campos_512_v4
+118/600964/campos_512_v4
+118/600965/campos_512_v4
+118/600968/campos_512_v4
+118/600971/campos_512_v4
+118/600975/campos_512_v4
+118/600980/campos_512_v4
+118/600987/campos_512_v4
+118/600998/campos_512_v4
+118/601009/campos_512_v4
+118/601034/campos_512_v4
+118/601035/campos_512_v4
+118/601036/campos_512_v4
+118/601041/campos_512_v4
+118/601046/campos_512_v4
+118/601052/campos_512_v4
+118/601058/campos_512_v4
+118/601078/campos_512_v4
+118/601080/campos_512_v4
+118/601097/campos_512_v4
+118/601098/campos_512_v4
+118/601101/campos_512_v4
+118/601104/campos_512_v4
+118/601105/campos_512_v4
+118/601106/campos_512_v4
+118/601107/campos_512_v4
+118/601110/campos_512_v4
+118/601111/campos_512_v4
+118/601126/campos_512_v4
+118/601128/campos_512_v4
+118/601131/campos_512_v4
+118/601132/campos_512_v4
+118/601137/campos_512_v4
+118/601140/campos_512_v4
+118/601141/campos_512_v4
+118/601142/campos_512_v4
+118/601147/campos_512_v4
+118/601152/campos_512_v4
+118/601163/campos_512_v4
+118/601167/campos_512_v4
+118/601171/campos_512_v4
+118/601175/campos_512_v4
+118/601176/campos_512_v4
+118/601179/campos_512_v4
+118/601181/campos_512_v4
+118/601185/campos_512_v4
+118/601187/campos_512_v4
+118/601197/campos_512_v4
+118/601199/campos_512_v4
+118/601209/campos_512_v4
+118/601214/campos_512_v4
+118/601221/campos_512_v4
+118/601222/campos_512_v4
+118/601229/campos_512_v4
+118/601233/campos_512_v4
+118/601234/campos_512_v4
+118/601242/campos_512_v4
+118/601245/campos_512_v4
+118/601255/campos_512_v4
+118/601258/campos_512_v4
+118/601265/campos_512_v4
+118/601270/campos_512_v4
+118/601271/campos_512_v4
+118/601272/campos_512_v4
+118/601274/campos_512_v4
+118/601275/campos_512_v4
+118/601278/campos_512_v4
+118/601279/campos_512_v4
+118/601291/campos_512_v4
+118/601295/campos_512_v4
+118/601296/campos_512_v4
+118/601303/campos_512_v4
+118/601309/campos_512_v4
+118/601310/campos_512_v4
+118/601315/campos_512_v4
+118/601325/campos_512_v4
+118/601331/campos_512_v4
+118/601344/campos_512_v4
+118/601347/campos_512_v4
+118/601355/campos_512_v4
+118/601380/campos_512_v4
+118/601393/campos_512_v4
+118/601395/campos_512_v4
+118/601444/campos_512_v4
+118/601451/campos_512_v4
+118/601456/campos_512_v4
+118/601458/campos_512_v4
+118/601483/campos_512_v4
+118/601489/campos_512_v4
+118/601490/campos_512_v4
+118/601491/campos_512_v4
+118/601509/campos_512_v4
+118/601512/campos_512_v4
+118/601513/campos_512_v4
+118/601517/campos_512_v4
+118/601521/campos_512_v4
+118/601531/campos_512_v4
+118/601546/campos_512_v4
+118/601547/campos_512_v4
+118/601549/campos_512_v4
+118/601551/campos_512_v4
+118/601557/campos_512_v4
+118/601572/campos_512_v4
+118/601573/campos_512_v4
+118/601579/campos_512_v4
+118/601581/campos_512_v4
+118/601592/campos_512_v4
+118/601595/campos_512_v4
+118/601603/campos_512_v4
+118/601606/campos_512_v4
+118/601609/campos_512_v4
+118/601614/campos_512_v4
+118/601636/campos_512_v4
+118/601665/campos_512_v4
+118/601692/campos_512_v4
+118/601700/campos_512_v4
+118/601713/campos_512_v4
+118/601725/campos_512_v4
+118/601733/campos_512_v4
+118/601743/campos_512_v4
+118/601765/campos_512_v4
+118/601768/campos_512_v4
+118/601798/campos_512_v4
+118/601803/campos_512_v4
+118/601823/campos_512_v4
+118/601829/campos_512_v4
+118/601830/campos_512_v4
+118/601854/campos_512_v4
+118/601858/campos_512_v4
+118/601872/campos_512_v4
+118/601888/campos_512_v4
+118/601894/campos_512_v4
+118/601895/campos_512_v4
+118/601913/campos_512_v4
+118/601918/campos_512_v4
+118/601927/campos_512_v4
+118/601961/campos_512_v4
+118/601971/campos_512_v4
+118/601980/campos_512_v4
+118/601983/campos_512_v4
+118/602006/campos_512_v4
+118/602012/campos_512_v4
+118/602013/campos_512_v4
+118/602028/campos_512_v4
+118/602045/campos_512_v4
+118/602046/campos_512_v4
+118/602050/campos_512_v4
+118/602053/campos_512_v4
+118/602060/campos_512_v4
+118/602063/campos_512_v4
+118/602068/campos_512_v4
+118/602089/campos_512_v4
+118/602090/campos_512_v4
+118/602104/campos_512_v4
+118/602105/campos_512_v4
+118/602106/campos_512_v4
+118/602115/campos_512_v4
+118/602123/campos_512_v4
+118/602125/campos_512_v4
+118/602136/campos_512_v4
+118/602142/campos_512_v4
+118/602154/campos_512_v4
+118/602157/campos_512_v4
+118/602183/campos_512_v4
+118/602187/campos_512_v4
+118/602212/campos_512_v4
+118/602217/campos_512_v4
+118/602218/campos_512_v4
+118/602224/campos_512_v4
+118/602229/campos_512_v4
+118/602231/campos_512_v4
+118/602236/campos_512_v4
+118/602240/campos_512_v4
+118/602241/campos_512_v4
+118/602256/campos_512_v4
+118/602257/campos_512_v4
+118/602262/campos_512_v4
+118/602272/campos_512_v4
+118/602290/campos_512_v4
+118/602293/campos_512_v4
+118/602304/campos_512_v4
+118/602307/campos_512_v4
+118/602311/campos_512_v4
+118/602314/campos_512_v4
+118/602320/campos_512_v4
+118/602324/campos_512_v4
+118/602325/campos_512_v4
+118/602336/campos_512_v4
+118/602340/campos_512_v4
+118/602352/campos_512_v4
+118/602361/campos_512_v4
+118/602370/campos_512_v4
+118/602374/campos_512_v4
+118/602377/campos_512_v4
+118/602386/campos_512_v4
+118/602390/campos_512_v4
+118/602407/campos_512_v4
+118/602412/campos_512_v4
+118/602415/campos_512_v4
+118/602444/campos_512_v4
+118/602468/campos_512_v4
+118/602473/campos_512_v4
+118/602474/campos_512_v4
+118/602481/campos_512_v4
+118/602482/campos_512_v4
+118/602487/campos_512_v4
+118/602494/campos_512_v4
+118/602495/campos_512_v4
+118/602504/campos_512_v4
+118/602516/campos_512_v4
+118/602518/campos_512_v4
+118/602526/campos_512_v4
+118/602527/campos_512_v4
+118/602529/campos_512_v4
+118/602530/campos_512_v4
+118/602547/campos_512_v4
+118/602553/campos_512_v4
+118/602558/campos_512_v4
+118/602576/campos_512_v4
+118/602577/campos_512_v4
+118/602580/campos_512_v4
+118/602582/campos_512_v4
+118/602592/campos_512_v4
+118/602594/campos_512_v4
+118/602605/campos_512_v4
+118/602612/campos_512_v4
+118/602614/campos_512_v4
+118/602625/campos_512_v4
+118/602636/campos_512_v4
+118/602641/campos_512_v4
+118/602643/campos_512_v4
+118/602644/campos_512_v4
+118/602651/campos_512_v4
+118/602662/campos_512_v4
+118/602664/campos_512_v4
+118/602666/campos_512_v4
+118/602667/campos_512_v4
+118/602671/campos_512_v4
+118/602681/campos_512_v4
+118/602691/campos_512_v4
+118/602701/campos_512_v4
+118/602707/campos_512_v4
+118/602711/campos_512_v4
+118/602715/campos_512_v4
+118/602717/campos_512_v4
+118/602733/campos_512_v4
+118/602751/campos_512_v4
+118/602754/campos_512_v4
+118/602759/campos_512_v4
+118/602764/campos_512_v4
+118/602804/campos_512_v4
+118/602807/campos_512_v4
+118/602817/campos_512_v4
+118/602826/campos_512_v4
+118/602828/campos_512_v4
+118/602838/campos_512_v4
+118/602860/campos_512_v4
+118/602862/campos_512_v4
+118/602887/campos_512_v4
+118/602921/campos_512_v4
+118/602930/campos_512_v4
+118/602931/campos_512_v4
+118/602945/campos_512_v4
+118/602950/campos_512_v4
+118/602951/campos_512_v4
+118/602953/campos_512_v4
+118/602958/campos_512_v4
+118/602991/campos_512_v4
+118/603000/campos_512_v4
+118/603004/campos_512_v4
+118/603005/campos_512_v4
+118/603012/campos_512_v4
+118/603031/campos_512_v4
+118/603038/campos_512_v4
+118/603042/campos_512_v4
+118/603048/campos_512_v4
+118/603062/campos_512_v4
+118/603074/campos_512_v4
+118/603077/campos_512_v4
+118/603086/campos_512_v4
+118/603088/campos_512_v4
+118/603105/campos_512_v4
+118/603109/campos_512_v4
+118/603124/campos_512_v4
+118/603142/campos_512_v4
+118/603153/campos_512_v4
+118/603154/campos_512_v4
+118/603157/campos_512_v4
+118/603158/campos_512_v4
+118/603159/campos_512_v4
+118/603165/campos_512_v4
+118/603168/campos_512_v4
+118/603169/campos_512_v4
+118/603175/campos_512_v4
+118/603183/campos_512_v4
+118/603190/campos_512_v4
+118/603199/campos_512_v4
+118/603201/campos_512_v4
+118/603207/campos_512_v4
+118/603211/campos_512_v4
+118/603218/campos_512_v4
+118/603230/campos_512_v4
+118/603231/campos_512_v4
+118/603234/campos_512_v4
+118/603254/campos_512_v4
+118/603256/campos_512_v4
+118/603257/campos_512_v4
+118/603259/campos_512_v4
+118/603269/campos_512_v4
+118/603272/campos_512_v4
+118/603275/campos_512_v4
+118/603284/campos_512_v4
+118/603290/campos_512_v4
+118/603294/campos_512_v4
+118/603308/campos_512_v4
+118/603311/campos_512_v4
+118/603318/campos_512_v4
+118/603322/campos_512_v4
+118/603329/campos_512_v4
+118/603331/campos_512_v4
+118/603337/campos_512_v4
+118/603342/campos_512_v4
+118/603369/campos_512_v4
+118/603372/campos_512_v4
+118/603375/campos_512_v4
+118/603376/campos_512_v4
+118/603380/campos_512_v4
+118/603383/campos_512_v4
+118/603385/campos_512_v4
+118/603388/campos_512_v4
+118/603392/campos_512_v4
+118/603393/campos_512_v4
+118/603396/campos_512_v4
+118/603397/campos_512_v4
+118/603398/campos_512_v4
+118/603415/campos_512_v4
+118/603420/campos_512_v4
+118/603423/campos_512_v4
+118/603425/campos_512_v4
+118/603433/campos_512_v4
+118/603435/campos_512_v4
+118/603436/campos_512_v4
+118/603440/campos_512_v4
+118/603446/campos_512_v4
+118/603450/campos_512_v4
+118/603461/campos_512_v4
+118/603470/campos_512_v4
+118/603472/campos_512_v4
+118/603481/campos_512_v4
+118/603482/campos_512_v4
+118/603504/campos_512_v4
+118/603518/campos_512_v4
+118/603543/campos_512_v4
+118/603562/campos_512_v4
+118/603566/campos_512_v4
+118/603574/campos_512_v4
+118/603583/campos_512_v4
+118/603588/campos_512_v4
+118/603590/campos_512_v4
+118/603597/campos_512_v4
+118/603598/campos_512_v4
+118/603630/campos_512_v4
+118/603631/campos_512_v4
+118/603638/campos_512_v4
+118/603639/campos_512_v4
+118/603646/campos_512_v4
+118/603647/campos_512_v4
+118/603696/campos_512_v4
+118/603702/campos_512_v4
+118/603703/campos_512_v4
+118/603724/campos_512_v4
+118/603729/campos_512_v4
+118/603739/campos_512_v4
+118/603758/campos_512_v4
+118/603762/campos_512_v4
+118/603765/campos_512_v4
+118/603771/campos_512_v4
+118/603779/campos_512_v4
+118/603785/campos_512_v4
+118/603806/campos_512_v4
+118/603816/campos_512_v4
+118/603829/campos_512_v4
+118/603835/campos_512_v4
+118/603846/campos_512_v4
+118/603847/campos_512_v4
+118/603852/campos_512_v4
+118/603853/campos_512_v4
+118/603862/campos_512_v4
+118/603872/campos_512_v4
+118/603873/campos_512_v4
+118/603874/campos_512_v4
+118/603876/campos_512_v4
+118/603908/campos_512_v4
+118/603914/campos_512_v4
+118/603931/campos_512_v4
+118/603942/campos_512_v4
+118/603956/campos_512_v4
+118/603960/campos_512_v4
+118/603961/campos_512_v4
+118/603982/campos_512_v4
+118/604005/campos_512_v4
+118/604021/campos_512_v4
+118/604022/campos_512_v4
+118/604033/campos_512_v4
+118/604034/campos_512_v4
+118/604036/campos_512_v4
+118/604039/campos_512_v4
+118/604066/campos_512_v4
+118/604078/campos_512_v4
+118/604088/campos_512_v4
+118/604107/campos_512_v4
+118/604115/campos_512_v4
+118/604119/campos_512_v4
+118/604121/campos_512_v4
+118/604133/campos_512_v4
+118/604134/campos_512_v4
+118/604135/campos_512_v4
+118/604148/campos_512_v4
+118/604150/campos_512_v4
+118/604172/campos_512_v4
+118/604179/campos_512_v4
+118/604180/campos_512_v4
+118/604185/campos_512_v4
+118/604189/campos_512_v4
+118/604203/campos_512_v4
+118/604229/campos_512_v4
+118/604237/campos_512_v4
+118/604252/campos_512_v4
+118/604257/campos_512_v4
+118/604259/campos_512_v4
+118/604264/campos_512_v4
+118/604265/campos_512_v4
+118/604273/campos_512_v4
+118/604274/campos_512_v4
+118/604277/campos_512_v4
+118/604285/campos_512_v4
+118/604297/campos_512_v4
+118/604301/campos_512_v4
+118/604302/campos_512_v4
+118/604305/campos_512_v4
+118/604307/campos_512_v4
+118/604315/campos_512_v4
+118/604322/campos_512_v4
+118/604326/campos_512_v4
+118/604328/campos_512_v4
+118/604336/campos_512_v4
+118/604353/campos_512_v4
+118/604360/campos_512_v4
+118/604361/campos_512_v4
+118/604364/campos_512_v4
+118/604370/campos_512_v4
+118/604373/campos_512_v4
+118/604379/campos_512_v4
+118/604381/campos_512_v4
+118/604384/campos_512_v4
+118/604385/campos_512_v4
+118/604386/campos_512_v4
+118/604396/campos_512_v4
+118/604400/campos_512_v4
+118/604404/campos_512_v4
+118/604407/campos_512_v4
+118/604408/campos_512_v4
+118/604413/campos_512_v4
+118/604425/campos_512_v4
+118/604441/campos_512_v4
+118/604447/campos_512_v4
+118/604461/campos_512_v4
+118/604468/campos_512_v4
+118/604471/campos_512_v4
+118/604484/campos_512_v4
+118/604487/campos_512_v4
+118/604507/campos_512_v4
+118/604512/campos_512_v4
+118/604526/campos_512_v4
+118/604537/campos_512_v4
+118/604541/campos_512_v4
+118/604543/campos_512_v4
+118/604551/campos_512_v4
+118/604563/campos_512_v4
+118/604570/campos_512_v4
+118/604590/campos_512_v4
+118/604605/campos_512_v4
+118/604608/campos_512_v4
+118/604611/campos_512_v4
+118/604614/campos_512_v4
+118/604617/campos_512_v4
+118/604629/campos_512_v4
+118/604634/campos_512_v4
+118/604635/campos_512_v4
+118/604645/campos_512_v4
+118/604652/campos_512_v4
+118/604661/campos_512_v4
+118/604667/campos_512_v4
+118/604668/campos_512_v4
+118/604669/campos_512_v4
+118/604676/campos_512_v4
+118/604688/campos_512_v4
+118/604693/campos_512_v4
+118/604694/campos_512_v4
+118/604695/campos_512_v4
+118/604698/campos_512_v4
+118/604730/campos_512_v4
+118/604731/campos_512_v4
+118/604757/campos_512_v4
+118/604759/campos_512_v4
+118/604766/campos_512_v4
+118/604776/campos_512_v4
+118/604782/campos_512_v4
+118/604783/campos_512_v4
+118/604791/campos_512_v4
+118/604798/campos_512_v4
+118/604799/campos_512_v4
+118/604802/campos_512_v4
+118/604803/campos_512_v4
+118/604807/campos_512_v4
+118/604813/campos_512_v4
+118/604823/campos_512_v4
+118/604853/campos_512_v4
+118/604854/campos_512_v4
+118/604856/campos_512_v4
+118/604860/campos_512_v4
+118/604880/campos_512_v4
+118/604897/campos_512_v4
+118/604899/campos_512_v4
+118/604907/campos_512_v4
+118/604916/campos_512_v4
+118/604917/campos_512_v4
+118/604927/campos_512_v4
+118/604935/campos_512_v4
+118/604971/campos_512_v4
+118/604974/campos_512_v4
+118/604977/campos_512_v4
+118/604992/campos_512_v4
+118/604993/campos_512_v4
+119/605022/campos_512_v4
+119/605032/campos_512_v4
+119/605038/campos_512_v4
+119/605040/campos_512_v4
+119/605042/campos_512_v4
+119/605044/campos_512_v4
+119/605045/campos_512_v4
+119/605049/campos_512_v4
+119/605057/campos_512_v4
+119/605069/campos_512_v4
+119/605077/campos_512_v4
+119/605081/campos_512_v4
+119/605085/campos_512_v4
+119/605087/campos_512_v4
+119/605090/campos_512_v4
+119/605091/campos_512_v4
+119/605106/campos_512_v4
+119/605108/campos_512_v4
+119/605110/campos_512_v4
+119/605113/campos_512_v4
+119/605123/campos_512_v4
+119/605129/campos_512_v4
+119/605148/campos_512_v4
+119/605152/campos_512_v4
+119/605153/campos_512_v4
+119/605157/campos_512_v4
+119/605158/campos_512_v4
+119/605162/campos_512_v4
+119/605163/campos_512_v4
+119/605168/campos_512_v4
+119/605174/campos_512_v4
+119/605183/campos_512_v4
+119/605184/campos_512_v4
+119/605191/campos_512_v4
+119/605192/campos_512_v4
+119/605193/campos_512_v4
+119/605196/campos_512_v4
+119/605207/campos_512_v4
+119/605214/campos_512_v4
+119/605234/campos_512_v4
+119/605249/campos_512_v4
+119/605279/campos_512_v4
+119/605294/campos_512_v4
+119/605297/campos_512_v4
+119/605305/campos_512_v4
+119/605306/campos_512_v4
+119/605313/campos_512_v4
+119/605316/campos_512_v4
+119/605327/campos_512_v4
+119/605347/campos_512_v4
+119/605352/campos_512_v4
+119/605357/campos_512_v4
+119/605358/campos_512_v4
+119/605360/campos_512_v4
+119/605371/campos_512_v4
+119/605386/campos_512_v4
+119/605402/campos_512_v4
+119/605404/campos_512_v4
+119/605406/campos_512_v4
+119/605414/campos_512_v4
+119/605417/campos_512_v4
+119/605443/campos_512_v4
+119/605452/campos_512_v4
+119/605459/campos_512_v4
+119/605461/campos_512_v4
+119/605463/campos_512_v4
+119/605478/campos_512_v4
+119/605481/campos_512_v4
+119/605486/campos_512_v4
+119/605493/campos_512_v4
+119/605496/campos_512_v4
+119/605498/campos_512_v4
+119/605499/campos_512_v4
+119/605500/campos_512_v4
+119/605503/campos_512_v4
+119/605521/campos_512_v4
+119/605523/campos_512_v4
+119/605527/campos_512_v4
+119/605529/campos_512_v4
+119/605545/campos_512_v4
+119/605568/campos_512_v4
+119/605571/campos_512_v4
+119/605573/campos_512_v4
+119/605580/campos_512_v4
+119/605584/campos_512_v4
+119/605586/campos_512_v4
+119/605588/campos_512_v4
+119/605593/campos_512_v4
+119/605607/campos_512_v4
+119/605612/campos_512_v4
+119/605624/campos_512_v4
+119/605626/campos_512_v4
+119/605630/campos_512_v4
+119/605642/campos_512_v4
+119/605653/campos_512_v4
+119/605686/campos_512_v4
+119/605688/campos_512_v4
+119/605694/campos_512_v4
+119/605707/campos_512_v4
+119/605708/campos_512_v4
+119/605716/campos_512_v4
+119/605729/campos_512_v4
+119/605734/campos_512_v4
+119/605742/campos_512_v4
+119/605745/campos_512_v4
+119/605749/campos_512_v4
+119/605752/campos_512_v4
+119/605757/campos_512_v4
+119/605761/campos_512_v4
+119/605763/campos_512_v4
+119/605769/campos_512_v4
+119/605776/campos_512_v4
+119/605791/campos_512_v4
+119/605796/campos_512_v4
+119/605814/campos_512_v4
+119/605820/campos_512_v4
+119/605822/campos_512_v4
+119/605839/campos_512_v4
+119/605840/campos_512_v4
+119/605845/campos_512_v4
+119/605848/campos_512_v4
+119/605869/campos_512_v4
+119/605873/campos_512_v4
+119/605881/campos_512_v4
+119/605905/campos_512_v4
+119/605906/campos_512_v4
+119/605910/campos_512_v4
+119/605913/campos_512_v4
+119/605922/campos_512_v4
+119/605923/campos_512_v4
+119/605926/campos_512_v4
+119/605928/campos_512_v4
+119/605930/campos_512_v4
+119/605944/campos_512_v4
+119/605949/campos_512_v4
+119/605969/campos_512_v4
+119/605972/campos_512_v4
+119/605981/campos_512_v4
+119/605982/campos_512_v4
+119/605990/campos_512_v4
+119/605993/campos_512_v4
+119/605995/campos_512_v4
+119/605997/campos_512_v4
+119/605999/campos_512_v4
+119/606014/campos_512_v4
+119/606022/campos_512_v4
+119/606026/campos_512_v4
+119/606033/campos_512_v4
+119/606035/campos_512_v4
+119/606045/campos_512_v4
+119/606048/campos_512_v4
+119/606057/campos_512_v4
+119/606062/campos_512_v4
+119/606075/campos_512_v4
+119/606093/campos_512_v4
+119/606100/campos_512_v4
+119/606104/campos_512_v4
+119/606113/campos_512_v4
+119/606121/campos_512_v4
+119/606122/campos_512_v4
+119/606135/campos_512_v4
+119/606147/campos_512_v4
+119/606152/campos_512_v4
+119/606158/campos_512_v4
+119/606166/campos_512_v4
+119/606172/campos_512_v4
+119/606176/campos_512_v4
+119/606178/campos_512_v4
+119/606208/campos_512_v4
+119/606214/campos_512_v4
+119/606218/campos_512_v4
+119/606221/campos_512_v4
+119/606225/campos_512_v4
+119/606227/campos_512_v4
+119/606232/campos_512_v4
+119/606244/campos_512_v4
+119/606250/campos_512_v4
+119/606264/campos_512_v4
+119/606267/campos_512_v4
+119/606268/campos_512_v4
+119/606283/campos_512_v4
+119/606295/campos_512_v4
+119/606297/campos_512_v4
+119/606304/campos_512_v4
+119/606308/campos_512_v4
+119/606317/campos_512_v4
+119/606340/campos_512_v4
+119/606342/campos_512_v4
+119/606368/campos_512_v4
+119/606379/campos_512_v4
+119/606382/campos_512_v4
+119/606392/campos_512_v4
+119/606399/campos_512_v4
+119/606416/campos_512_v4
+119/606422/campos_512_v4
+119/606435/campos_512_v4
+119/606446/campos_512_v4
+119/606460/campos_512_v4
+119/606464/campos_512_v4
+119/606472/campos_512_v4
+119/606476/campos_512_v4
+119/606477/campos_512_v4
+119/606482/campos_512_v4
+119/606483/campos_512_v4
+119/606484/campos_512_v4
+119/606499/campos_512_v4
+119/606501/campos_512_v4
+119/606505/campos_512_v4
+119/606508/campos_512_v4
+119/606527/campos_512_v4
+119/606537/campos_512_v4
+119/606544/campos_512_v4
+119/606545/campos_512_v4
+119/606550/campos_512_v4
+119/606551/campos_512_v4
+119/606559/campos_512_v4
+119/606565/campos_512_v4
+119/606571/campos_512_v4
+119/606572/campos_512_v4
+119/606583/campos_512_v4
+119/606586/campos_512_v4
+119/606595/campos_512_v4
+119/606605/campos_512_v4
+119/606607/campos_512_v4
+119/606611/campos_512_v4
+119/606622/campos_512_v4
+119/606639/campos_512_v4
+119/606642/campos_512_v4
+119/606688/campos_512_v4
+119/606690/campos_512_v4
+119/606707/campos_512_v4
+119/606709/campos_512_v4
+119/606717/campos_512_v4
+119/606721/campos_512_v4
+119/606734/campos_512_v4
+119/606736/campos_512_v4
+119/606739/campos_512_v4
+119/606759/campos_512_v4
+119/606765/campos_512_v4
+119/606770/campos_512_v4
+119/606772/campos_512_v4
+119/606776/campos_512_v4
+119/606777/campos_512_v4
+119/606782/campos_512_v4
+119/606801/campos_512_v4
+119/606811/campos_512_v4
+119/606820/campos_512_v4
+119/606828/campos_512_v4
+119/606838/campos_512_v4
+119/606840/campos_512_v4
+119/606856/campos_512_v4
+119/606874/campos_512_v4
+119/606876/campos_512_v4
+119/606877/campos_512_v4
+119/606895/campos_512_v4
+119/606902/campos_512_v4
+119/606903/campos_512_v4
+119/606920/campos_512_v4
+119/606929/campos_512_v4
+119/606938/campos_512_v4
+119/606945/campos_512_v4
+119/606960/campos_512_v4
+119/606962/campos_512_v4
+119/606967/campos_512_v4
+119/606982/campos_512_v4
+119/606991/campos_512_v4
+119/606996/campos_512_v4
+119/607012/campos_512_v4
+119/607018/campos_512_v4
+119/607019/campos_512_v4
+119/607032/campos_512_v4
+119/607043/campos_512_v4
+119/607051/campos_512_v4
+119/607054/campos_512_v4
+119/607067/campos_512_v4
+119/607068/campos_512_v4
+119/607097/campos_512_v4
+119/607105/campos_512_v4
+119/607110/campos_512_v4
+119/607115/campos_512_v4
+119/607119/campos_512_v4
+119/607127/campos_512_v4
+119/607137/campos_512_v4
+119/607139/campos_512_v4
+119/607140/campos_512_v4
+119/607144/campos_512_v4
+119/607150/campos_512_v4
+119/607157/campos_512_v4
+119/607169/campos_512_v4
+119/607188/campos_512_v4
+119/607191/campos_512_v4
+119/607201/campos_512_v4
+119/607207/campos_512_v4
+119/607208/campos_512_v4
+119/607213/campos_512_v4
+119/607217/campos_512_v4
+119/607222/campos_512_v4
+119/607228/campos_512_v4
+119/607242/campos_512_v4
+119/607244/campos_512_v4
+119/607248/campos_512_v4
+119/607252/campos_512_v4
+119/607265/campos_512_v4
+119/607269/campos_512_v4
+119/607270/campos_512_v4
+119/607275/campos_512_v4
+119/607276/campos_512_v4
+119/607280/campos_512_v4
+119/607283/campos_512_v4
+119/607284/campos_512_v4
+119/607287/campos_512_v4
+119/607288/campos_512_v4
+119/607310/campos_512_v4
+119/607311/campos_512_v4
+119/607319/campos_512_v4
+119/607337/campos_512_v4
+119/607344/campos_512_v4
+119/607360/campos_512_v4
+119/607366/campos_512_v4
+119/607367/campos_512_v4
+119/607381/campos_512_v4
+119/607383/campos_512_v4
+119/607405/campos_512_v4
+119/607408/campos_512_v4
+119/607413/campos_512_v4
+119/607417/campos_512_v4
+119/607442/campos_512_v4
+119/607448/campos_512_v4
+119/607462/campos_512_v4
+119/607473/campos_512_v4
+119/607500/campos_512_v4
+119/607518/campos_512_v4
+119/607521/campos_512_v4
+119/607527/campos_512_v4
+119/607531/campos_512_v4
+119/607535/campos_512_v4
+119/607545/campos_512_v4
+119/607561/campos_512_v4
+119/607570/campos_512_v4
+119/607583/campos_512_v4
+119/607592/campos_512_v4
+119/607610/campos_512_v4
+119/607611/campos_512_v4
+119/607612/campos_512_v4
+119/607632/campos_512_v4
+119/607637/campos_512_v4
+119/607640/campos_512_v4
+119/607651/campos_512_v4
+119/607652/campos_512_v4
+119/607660/campos_512_v4
+119/607673/campos_512_v4
+119/607676/campos_512_v4
+119/607681/campos_512_v4
+119/607686/campos_512_v4
+119/607717/campos_512_v4
+119/607721/campos_512_v4
+119/607728/campos_512_v4
+119/607734/campos_512_v4
+119/607735/campos_512_v4
+119/607742/campos_512_v4
+119/607756/campos_512_v4
+119/607777/campos_512_v4
+119/607778/campos_512_v4
+119/607820/campos_512_v4
+119/607827/campos_512_v4
+119/607832/campos_512_v4
+119/607836/campos_512_v4
+119/607837/campos_512_v4
+119/607845/campos_512_v4
+119/607848/campos_512_v4
+119/607856/campos_512_v4
+119/607864/campos_512_v4
+119/607871/campos_512_v4
+119/607884/campos_512_v4
+119/607890/campos_512_v4
+119/607893/campos_512_v4
+119/607914/campos_512_v4
+119/607916/campos_512_v4
+119/607917/campos_512_v4
+119/607921/campos_512_v4
+119/607946/campos_512_v4
+119/607948/campos_512_v4
+119/607953/campos_512_v4
+119/607954/campos_512_v4
+119/607962/campos_512_v4
+119/607964/campos_512_v4
+119/607973/campos_512_v4
+119/607974/campos_512_v4
+119/607978/campos_512_v4
+119/607983/campos_512_v4
+119/607984/campos_512_v4
+119/608010/campos_512_v4
+119/608022/campos_512_v4
+119/608024/campos_512_v4
+119/608030/campos_512_v4
+119/608032/campos_512_v4
+119/608037/campos_512_v4
+119/608040/campos_512_v4
+119/608050/campos_512_v4
+119/608061/campos_512_v4
+119/608064/campos_512_v4
+119/608077/campos_512_v4
+119/608079/campos_512_v4
+119/608083/campos_512_v4
+119/608087/campos_512_v4
+119/608094/campos_512_v4
+119/608098/campos_512_v4
+119/608104/campos_512_v4
+119/608119/campos_512_v4
+119/608121/campos_512_v4
+119/608128/campos_512_v4
+119/608131/campos_512_v4
+119/608142/campos_512_v4
+119/608145/campos_512_v4
+119/608149/campos_512_v4
+119/608152/campos_512_v4
+119/608153/campos_512_v4
+119/608167/campos_512_v4
+119/608187/campos_512_v4
+119/608192/campos_512_v4
+119/608194/campos_512_v4
+119/608201/campos_512_v4
+119/608202/campos_512_v4
+119/608209/campos_512_v4
+119/608213/campos_512_v4
+119/608228/campos_512_v4
+119/608231/campos_512_v4
+119/608234/campos_512_v4
+119/608235/campos_512_v4
+119/608248/campos_512_v4
+119/608254/campos_512_v4
+119/608272/campos_512_v4
+119/608273/campos_512_v4
+119/608286/campos_512_v4
+119/608290/campos_512_v4
+119/608314/campos_512_v4
+119/608325/campos_512_v4
+119/608331/campos_512_v4
+119/608356/campos_512_v4
+119/608368/campos_512_v4
+119/608369/campos_512_v4
+119/608386/campos_512_v4
+119/608393/campos_512_v4
+119/608403/campos_512_v4
+119/608415/campos_512_v4
+119/608420/campos_512_v4
+119/608430/campos_512_v4
+119/608433/campos_512_v4
+119/608447/campos_512_v4
+119/608454/campos_512_v4
+119/608466/campos_512_v4
+119/608484/campos_512_v4
+119/608506/campos_512_v4
+119/608511/campos_512_v4
+119/608550/campos_512_v4
+119/608554/campos_512_v4
+119/608557/campos_512_v4
+119/608571/campos_512_v4
+119/608585/campos_512_v4
+119/608592/campos_512_v4
+119/608605/campos_512_v4
+119/608609/campos_512_v4
+119/608618/campos_512_v4
+119/608625/campos_512_v4
+119/608626/campos_512_v4
+119/608629/campos_512_v4
+119/608634/campos_512_v4
+119/608651/campos_512_v4
+119/608666/campos_512_v4
+119/608670/campos_512_v4
+119/608671/campos_512_v4
+119/608672/campos_512_v4
+119/608696/campos_512_v4
+119/608700/campos_512_v4
+119/608710/campos_512_v4
+119/608728/campos_512_v4
+119/608731/campos_512_v4
+119/608733/campos_512_v4
+119/608739/campos_512_v4
+119/608740/campos_512_v4
+119/608745/campos_512_v4
+119/608748/campos_512_v4
+119/608759/campos_512_v4
+119/608764/campos_512_v4
+119/608772/campos_512_v4
+119/608782/campos_512_v4
+119/608795/campos_512_v4
+119/608803/campos_512_v4
+119/608819/campos_512_v4
+119/608821/campos_512_v4
+119/608833/campos_512_v4
+119/608847/campos_512_v4
+119/608848/campos_512_v4
+119/608852/campos_512_v4
+119/608856/campos_512_v4
+119/608862/campos_512_v4
+119/608869/campos_512_v4
+119/608874/campos_512_v4
+119/608876/campos_512_v4
+119/608880/campos_512_v4
+119/608886/campos_512_v4
+119/608898/campos_512_v4
+119/608899/campos_512_v4
+119/608903/campos_512_v4
+119/608914/campos_512_v4
+119/608924/campos_512_v4
+119/608926/campos_512_v4
+119/608931/campos_512_v4
+119/608937/campos_512_v4
+119/608946/campos_512_v4
+119/608950/campos_512_v4
+119/608953/campos_512_v4
+119/608958/campos_512_v4
+119/608962/campos_512_v4
+119/608986/campos_512_v4
+119/608991/campos_512_v4
+119/608992/campos_512_v4
+119/608994/campos_512_v4
+119/609008/campos_512_v4
+119/609027/campos_512_v4
+119/609034/campos_512_v4
+119/609041/campos_512_v4
+119/609048/campos_512_v4
+119/609055/campos_512_v4
+119/609070/campos_512_v4
+119/609071/campos_512_v4
+119/609078/campos_512_v4
+119/609087/campos_512_v4
+119/609092/campos_512_v4
+119/609093/campos_512_v4
+119/609097/campos_512_v4
+119/609100/campos_512_v4
+119/609116/campos_512_v4
+119/609117/campos_512_v4
+119/609122/campos_512_v4
+119/609129/campos_512_v4
+119/609130/campos_512_v4
+119/609131/campos_512_v4
+119/609135/campos_512_v4
+119/609146/campos_512_v4
+119/609147/campos_512_v4
+119/609157/campos_512_v4
+119/609166/campos_512_v4
+119/609167/campos_512_v4
+119/609171/campos_512_v4
+119/609178/campos_512_v4
+119/609183/campos_512_v4
+119/609185/campos_512_v4
+119/609188/campos_512_v4
+119/609193/campos_512_v4
+119/609198/campos_512_v4
+119/609213/campos_512_v4
+119/609215/campos_512_v4
+119/609216/campos_512_v4
+119/609224/campos_512_v4
+119/609229/campos_512_v4
+119/609241/campos_512_v4
+119/609244/campos_512_v4
+119/609253/campos_512_v4
+119/609254/campos_512_v4
+119/609256/campos_512_v4
+119/609265/campos_512_v4
+119/609272/campos_512_v4
+119/609274/campos_512_v4
+119/609279/campos_512_v4
+119/609282/campos_512_v4
+119/609298/campos_512_v4
+119/609299/campos_512_v4
+119/609302/campos_512_v4
+119/609310/campos_512_v4
+119/609316/campos_512_v4
+119/609319/campos_512_v4
+119/609326/campos_512_v4
+119/609335/campos_512_v4
+119/609343/campos_512_v4
+119/609346/campos_512_v4
+119/609350/campos_512_v4
+119/609356/campos_512_v4
+119/609357/campos_512_v4
+119/609358/campos_512_v4
+119/609363/campos_512_v4
+119/609374/campos_512_v4
+119/609377/campos_512_v4
+119/609388/campos_512_v4
+119/609389/campos_512_v4
+119/609392/campos_512_v4
+119/609417/campos_512_v4
+119/609432/campos_512_v4
+119/609433/campos_512_v4
+119/609434/campos_512_v4
+119/609446/campos_512_v4
+119/609450/campos_512_v4
+119/609456/campos_512_v4
+119/609473/campos_512_v4
+119/609478/campos_512_v4
+119/609489/campos_512_v4
+119/609490/campos_512_v4
+119/609493/campos_512_v4
+119/609494/campos_512_v4
+119/609503/campos_512_v4
+119/609510/campos_512_v4
+119/609516/campos_512_v4
+119/609521/campos_512_v4
+119/609529/campos_512_v4
+119/609535/campos_512_v4
+119/609537/campos_512_v4
+119/609538/campos_512_v4
+119/609546/campos_512_v4
+119/609561/campos_512_v4
+119/609568/campos_512_v4
+119/609569/campos_512_v4
+119/609574/campos_512_v4
+119/609580/campos_512_v4
+119/609588/campos_512_v4
+119/609600/campos_512_v4
+119/609603/campos_512_v4
+119/609632/campos_512_v4
+119/609642/campos_512_v4
+119/609649/campos_512_v4
+119/609655/campos_512_v4
+119/609656/campos_512_v4
+119/609665/campos_512_v4
+119/609666/campos_512_v4
+119/609670/campos_512_v4
+119/609674/campos_512_v4
+119/609675/campos_512_v4
+119/609682/campos_512_v4
+119/609683/campos_512_v4
+119/609711/campos_512_v4
+119/609713/campos_512_v4
+119/609718/campos_512_v4
+119/609719/campos_512_v4
+119/609750/campos_512_v4
+119/609755/campos_512_v4
+119/609768/campos_512_v4
+119/609775/campos_512_v4
+119/609779/campos_512_v4
+119/609790/campos_512_v4
+119/609794/campos_512_v4
+119/609796/campos_512_v4
+119/609813/campos_512_v4
+119/609849/campos_512_v4
+119/609850/campos_512_v4
+119/609851/campos_512_v4
+119/609853/campos_512_v4
+119/609858/campos_512_v4
+119/609864/campos_512_v4
+119/609876/campos_512_v4
+119/609877/campos_512_v4
+119/609890/campos_512_v4
+119/609895/campos_512_v4
+119/609898/campos_512_v4
+119/609901/campos_512_v4
+119/609902/campos_512_v4
+119/609916/campos_512_v4
+119/609927/campos_512_v4
+119/609930/campos_512_v4
+119/609935/campos_512_v4
+119/609947/campos_512_v4
+119/609949/campos_512_v4
+119/609953/campos_512_v4
+119/609968/campos_512_v4
+119/609976/campos_512_v4
+119/609980/campos_512_v4
+119/609989/campos_512_v4
+119/609990/campos_512_v4
+119/609992/campos_512_v4
+119/609998/campos_512_v4
+119/609999/campos_512_v4
+119/610001/campos_512_v4
+12/70006/campos_512_v4
+12/70018/campos_512_v4
+12/70023/campos_512_v4
+12/70030/campos_512_v4
+12/70031/campos_512_v4
+12/70043/campos_512_v4
+12/70061/campos_512_v4
+12/70073/campos_512_v4
+12/70075/campos_512_v4
+12/70090/campos_512_v4
+12/70099/campos_512_v4
+12/70105/campos_512_v4
+12/70125/campos_512_v4
+12/70129/campos_512_v4
+12/70132/campos_512_v4
+12/70135/campos_512_v4
+12/70144/campos_512_v4
+12/70151/campos_512_v4
+12/70163/campos_512_v4
+12/70166/campos_512_v4
+12/70175/campos_512_v4
+12/70176/campos_512_v4
+12/70196/campos_512_v4
+12/70206/campos_512_v4
+12/70207/campos_512_v4
+12/70209/campos_512_v4
+12/70212/campos_512_v4
+12/70249/campos_512_v4
+12/70250/campos_512_v4
+12/70263/campos_512_v4
+12/70265/campos_512_v4
+12/70267/campos_512_v4
+12/70268/campos_512_v4
+12/70275/campos_512_v4
+12/70283/campos_512_v4
+12/70300/campos_512_v4
+12/70309/campos_512_v4
+12/70315/campos_512_v4
+12/70316/campos_512_v4
+12/70317/campos_512_v4
+12/70325/campos_512_v4
+12/70328/campos_512_v4
+12/70336/campos_512_v4
+12/70346/campos_512_v4
+12/70351/campos_512_v4
+12/70359/campos_512_v4
+12/70367/campos_512_v4
+12/70373/campos_512_v4
+12/70384/campos_512_v4
+12/70385/campos_512_v4
+12/70391/campos_512_v4
+12/70397/campos_512_v4
+12/70403/campos_512_v4
+12/70405/campos_512_v4
+12/70414/campos_512_v4
+12/70416/campos_512_v4
+12/70421/campos_512_v4
+12/70426/campos_512_v4
+12/70434/campos_512_v4
+12/70435/campos_512_v4
+12/70436/campos_512_v4
+12/70452/campos_512_v4
+12/70459/campos_512_v4
+12/70463/campos_512_v4
+12/70465/campos_512_v4
+12/70466/campos_512_v4
+12/70478/campos_512_v4
+12/70479/campos_512_v4
+12/70480/campos_512_v4
+12/70482/campos_512_v4
+12/70505/campos_512_v4
+12/70513/campos_512_v4
+12/70518/campos_512_v4
+12/70519/campos_512_v4
+12/70520/campos_512_v4
+12/70532/campos_512_v4
+12/70534/campos_512_v4
+12/70544/campos_512_v4
+12/70545/campos_512_v4
+12/70549/campos_512_v4
+12/70552/campos_512_v4
+12/70560/campos_512_v4
+12/70562/campos_512_v4
+12/70574/campos_512_v4
+12/70579/campos_512_v4
+12/70588/campos_512_v4
+12/70590/campos_512_v4
+12/70608/campos_512_v4
+12/70611/campos_512_v4
+12/70616/campos_512_v4
+12/70617/campos_512_v4
+12/70629/campos_512_v4
+12/70633/campos_512_v4
+12/70645/campos_512_v4
+12/70652/campos_512_v4
+12/70658/campos_512_v4
+12/70660/campos_512_v4
+12/70668/campos_512_v4
+12/70672/campos_512_v4
+12/70712/campos_512_v4
+12/70726/campos_512_v4
+12/70737/campos_512_v4
+12/70741/campos_512_v4
+12/70747/campos_512_v4
+12/70755/campos_512_v4
+12/70762/campos_512_v4
+12/70766/campos_512_v4
+12/70769/campos_512_v4
+12/70770/campos_512_v4
+12/70775/campos_512_v4
+12/70799/campos_512_v4
+12/70801/campos_512_v4
+12/70814/campos_512_v4
+12/70816/campos_512_v4
+12/70834/campos_512_v4
+12/70835/campos_512_v4
+12/70847/campos_512_v4
+12/70850/campos_512_v4
+12/70871/campos_512_v4
+12/70875/campos_512_v4
+12/70881/campos_512_v4
+12/70888/campos_512_v4
+12/70897/campos_512_v4
+12/70904/campos_512_v4
+12/70907/campos_512_v4
+12/70955/campos_512_v4
+12/70958/campos_512_v4
+12/70960/campos_512_v4
+12/70966/campos_512_v4
+12/70973/campos_512_v4
+12/70974/campos_512_v4
+12/70980/campos_512_v4
+12/70998/campos_512_v4
+12/71000/campos_512_v4
+12/71005/campos_512_v4
+12/71016/campos_512_v4
+12/71019/campos_512_v4
+12/71044/campos_512_v4
+12/71047/campos_512_v4
+12/71054/campos_512_v4
+12/71061/campos_512_v4
+12/71066/campos_512_v4
+12/71070/campos_512_v4
+12/71082/campos_512_v4
+12/71088/campos_512_v4
+12/71100/campos_512_v4
+12/71110/campos_512_v4
+12/71112/campos_512_v4
+12/71122/campos_512_v4
+12/71126/campos_512_v4
+12/71128/campos_512_v4
+12/71136/campos_512_v4
+12/71143/campos_512_v4
+12/71165/campos_512_v4
+12/71176/campos_512_v4
+12/71184/campos_512_v4
+12/71186/campos_512_v4
+12/71195/campos_512_v4
+12/71202/campos_512_v4
+12/71209/campos_512_v4
+12/71210/campos_512_v4
+12/71215/campos_512_v4
+12/71217/campos_512_v4
+12/71221/campos_512_v4
+12/71232/campos_512_v4
+12/71239/campos_512_v4
+12/71241/campos_512_v4
+12/71253/campos_512_v4
+12/71254/campos_512_v4
+12/71256/campos_512_v4
+12/71266/campos_512_v4
+12/71268/campos_512_v4
+12/71272/campos_512_v4
+12/71277/campos_512_v4
+12/71280/campos_512_v4
+12/71284/campos_512_v4
+12/71286/campos_512_v4
+12/71291/campos_512_v4
+12/71302/campos_512_v4
+12/71303/campos_512_v4
+12/71305/campos_512_v4
+12/71319/campos_512_v4
+12/71322/campos_512_v4
+12/71324/campos_512_v4
+12/71334/campos_512_v4
+12/71354/campos_512_v4
+12/71361/campos_512_v4
+12/71367/campos_512_v4
+12/71380/campos_512_v4
+12/71382/campos_512_v4
+12/71388/campos_512_v4
+12/71390/campos_512_v4
+12/71392/campos_512_v4
+12/71398/campos_512_v4
+12/71402/campos_512_v4
+12/71404/campos_512_v4
+12/71409/campos_512_v4
+12/71426/campos_512_v4
+12/71428/campos_512_v4
+12/71430/campos_512_v4
+12/71446/campos_512_v4
+12/71458/campos_512_v4
+12/71459/campos_512_v4
+12/71463/campos_512_v4
+12/71479/campos_512_v4
+12/71491/campos_512_v4
+12/71510/campos_512_v4
+12/71520/campos_512_v4
+12/71521/campos_512_v4
+12/71523/campos_512_v4
+12/71540/campos_512_v4
+12/71541/campos_512_v4
+12/71545/campos_512_v4
+12/71553/campos_512_v4
+12/71577/campos_512_v4
+12/71598/campos_512_v4
+12/71602/campos_512_v4
+12/71604/campos_512_v4
+12/71605/campos_512_v4
+12/71622/campos_512_v4
+12/71625/campos_512_v4
+12/71630/campos_512_v4
+12/71631/campos_512_v4
+12/71643/campos_512_v4
+12/71644/campos_512_v4
+12/71656/campos_512_v4
+12/71674/campos_512_v4
+12/71679/campos_512_v4
+12/71680/campos_512_v4
+12/71682/campos_512_v4
+12/71688/campos_512_v4
+12/71694/campos_512_v4
+12/71702/campos_512_v4
+12/71716/campos_512_v4
+12/71717/campos_512_v4
+12/71720/campos_512_v4
+12/71721/campos_512_v4
+12/71724/campos_512_v4
+12/71726/campos_512_v4
+12/71728/campos_512_v4
+12/71754/campos_512_v4
+12/71756/campos_512_v4
+12/71762/campos_512_v4
+12/71775/campos_512_v4
+12/71776/campos_512_v4
+12/71786/campos_512_v4
+12/71801/campos_512_v4
+12/71802/campos_512_v4
+12/71819/campos_512_v4
+12/71830/campos_512_v4
+12/71832/campos_512_v4
+12/71834/campos_512_v4
+12/71837/campos_512_v4
+12/71851/campos_512_v4
+12/71855/campos_512_v4
+12/71873/campos_512_v4
+12/71881/campos_512_v4
+12/71885/campos_512_v4
+12/71886/campos_512_v4
+12/71918/campos_512_v4
+12/71920/campos_512_v4
+12/71944/campos_512_v4
+12/71945/campos_512_v4
+12/71950/campos_512_v4
+12/71957/campos_512_v4
+12/71958/campos_512_v4
+12/71966/campos_512_v4
+12/71982/campos_512_v4
+12/71983/campos_512_v4
+12/71990/campos_512_v4
+12/72014/campos_512_v4
+12/72019/campos_512_v4
+12/72028/campos_512_v4
+12/72033/campos_512_v4
+12/72038/campos_512_v4
+12/72044/campos_512_v4
+12/72073/campos_512_v4
+12/72075/campos_512_v4
+12/72094/campos_512_v4
+12/72097/campos_512_v4
+12/72099/campos_512_v4
+12/72105/campos_512_v4
+12/72117/campos_512_v4
+12/72122/campos_512_v4
+12/72123/campos_512_v4
+12/72128/campos_512_v4
+12/72130/campos_512_v4
+12/72141/campos_512_v4
+12/72142/campos_512_v4
+12/72144/campos_512_v4
+12/72158/campos_512_v4
+12/72163/campos_512_v4
+12/72168/campos_512_v4
+12/72177/campos_512_v4
+12/72180/campos_512_v4
+12/72195/campos_512_v4
+12/72202/campos_512_v4
+12/72206/campos_512_v4
+12/72208/campos_512_v4
+12/72220/campos_512_v4
+12/72222/campos_512_v4
+12/72223/campos_512_v4
+12/72226/campos_512_v4
+12/72227/campos_512_v4
+12/72242/campos_512_v4
+12/72251/campos_512_v4
+12/72257/campos_512_v4
+12/72259/campos_512_v4
+12/72278/campos_512_v4
+12/72280/campos_512_v4
+12/72289/campos_512_v4
+12/72293/campos_512_v4
+12/72301/campos_512_v4
+12/72306/campos_512_v4
+12/72310/campos_512_v4
+12/72315/campos_512_v4
+12/72324/campos_512_v4
+12/72335/campos_512_v4
+12/72340/campos_512_v4
+12/72342/campos_512_v4
+12/72344/campos_512_v4
+12/72349/campos_512_v4
+12/72355/campos_512_v4
+12/72385/campos_512_v4
+12/72393/campos_512_v4
+12/72399/campos_512_v4
+12/72402/campos_512_v4
+12/72404/campos_512_v4
+12/72420/campos_512_v4
+12/72431/campos_512_v4
+12/72437/campos_512_v4
+12/72442/campos_512_v4
+12/72447/campos_512_v4
+12/72448/campos_512_v4
+12/72449/campos_512_v4
+12/72455/campos_512_v4
+12/72460/campos_512_v4
+12/72469/campos_512_v4
+12/72471/campos_512_v4
+12/72474/campos_512_v4
+12/72475/campos_512_v4
+12/72489/campos_512_v4
+12/72491/campos_512_v4
+12/72512/campos_512_v4
+12/72515/campos_512_v4
+12/72525/campos_512_v4
+12/72527/campos_512_v4
+12/72528/campos_512_v4
+12/72531/campos_512_v4
+12/72557/campos_512_v4
+12/72558/campos_512_v4
+12/72575/campos_512_v4
+12/72603/campos_512_v4
+12/72607/campos_512_v4
+12/72654/campos_512_v4
+12/72661/campos_512_v4
+12/72667/campos_512_v4
+12/72674/campos_512_v4
+12/72677/campos_512_v4
+12/72680/campos_512_v4
+12/72690/campos_512_v4
+12/72697/campos_512_v4
+12/72700/campos_512_v4
+12/72703/campos_512_v4
+12/72715/campos_512_v4
+12/72721/campos_512_v4
+12/72725/campos_512_v4
+12/72729/campos_512_v4
+12/72736/campos_512_v4
+12/72746/campos_512_v4
+12/72748/campos_512_v4
+12/72749/campos_512_v4
+12/72750/campos_512_v4
+12/72758/campos_512_v4
+12/72763/campos_512_v4
+12/72768/campos_512_v4
+12/72775/campos_512_v4
+12/72776/campos_512_v4
+12/72779/campos_512_v4
+12/72788/campos_512_v4
+12/72790/campos_512_v4
+12/72800/campos_512_v4
+12/72801/campos_512_v4
+12/72809/campos_512_v4
+12/72822/campos_512_v4
+12/72827/campos_512_v4
+12/72828/campos_512_v4
+12/72830/campos_512_v4
+12/72846/campos_512_v4
+12/72853/campos_512_v4
+12/72857/campos_512_v4
+12/72858/campos_512_v4
+12/72865/campos_512_v4
+12/72869/campos_512_v4
+12/72870/campos_512_v4
+12/72872/campos_512_v4
+12/72877/campos_512_v4
+12/72880/campos_512_v4
+12/72885/campos_512_v4
+12/72905/campos_512_v4
+12/72906/campos_512_v4
+12/72907/campos_512_v4
+12/72913/campos_512_v4
+12/72915/campos_512_v4
+12/72933/campos_512_v4
+12/72942/campos_512_v4
+12/72948/campos_512_v4
+12/72960/campos_512_v4
+12/72965/campos_512_v4
+12/72976/campos_512_v4
+12/72979/campos_512_v4
+12/72993/campos_512_v4
+12/73004/campos_512_v4
+12/73006/campos_512_v4
+12/73007/campos_512_v4
+12/73008/campos_512_v4
+12/73011/campos_512_v4
+12/73029/campos_512_v4
+12/73059/campos_512_v4
+12/73071/campos_512_v4
+12/73080/campos_512_v4
+12/73088/campos_512_v4
+12/73105/campos_512_v4
+12/73107/campos_512_v4
+12/73110/campos_512_v4
+12/73115/campos_512_v4
+12/73116/campos_512_v4
+12/73124/campos_512_v4
+12/73128/campos_512_v4
+12/73138/campos_512_v4
+12/73144/campos_512_v4
+12/73146/campos_512_v4
+12/73161/campos_512_v4
+12/73172/campos_512_v4
+12/73178/campos_512_v4
+12/73181/campos_512_v4
+12/73182/campos_512_v4
+12/73183/campos_512_v4
+12/73185/campos_512_v4
+12/73189/campos_512_v4
+12/73206/campos_512_v4
+12/73214/campos_512_v4
+12/73218/campos_512_v4
+12/73229/campos_512_v4
+12/73239/campos_512_v4
+12/73247/campos_512_v4
+12/73254/campos_512_v4
+12/73257/campos_512_v4
+12/73266/campos_512_v4
+12/73282/campos_512_v4
+12/73292/campos_512_v4
+12/73297/campos_512_v4
+12/73299/campos_512_v4
+12/73310/campos_512_v4
+12/73311/campos_512_v4
+12/73314/campos_512_v4
+12/73317/campos_512_v4
+12/73321/campos_512_v4
+12/73325/campos_512_v4
+12/73333/campos_512_v4
+12/73337/campos_512_v4
+12/73339/campos_512_v4
+12/73342/campos_512_v4
+12/73343/campos_512_v4
+12/73345/campos_512_v4
+12/73351/campos_512_v4
+12/73353/campos_512_v4
+12/73355/campos_512_v4
+12/73356/campos_512_v4
+12/73358/campos_512_v4
+12/73365/campos_512_v4
+12/73367/campos_512_v4
+12/73379/campos_512_v4
+12/73384/campos_512_v4
+12/73394/campos_512_v4
+12/73395/campos_512_v4
+12/73397/campos_512_v4
+12/73400/campos_512_v4
+12/73405/campos_512_v4
+12/73422/campos_512_v4
+12/73429/campos_512_v4
+12/73434/campos_512_v4
+12/73437/campos_512_v4
+12/73440/campos_512_v4
+12/73443/campos_512_v4
+12/73464/campos_512_v4
+12/73466/campos_512_v4
+12/73467/campos_512_v4
+12/73473/campos_512_v4
+12/73478/campos_512_v4
+12/73479/campos_512_v4
+12/73480/campos_512_v4
+12/73500/campos_512_v4
+12/73507/campos_512_v4
+12/73512/campos_512_v4
+12/73515/campos_512_v4
+12/73518/campos_512_v4
+12/73522/campos_512_v4
+12/73529/campos_512_v4
+12/73552/campos_512_v4
+12/73564/campos_512_v4
+12/73567/campos_512_v4
+12/73568/campos_512_v4
+12/73572/campos_512_v4
+12/73582/campos_512_v4
+12/73590/campos_512_v4
+12/73591/campos_512_v4
+12/73592/campos_512_v4
+12/73598/campos_512_v4
+12/73609/campos_512_v4
+12/73612/campos_512_v4
+12/73616/campos_512_v4
+12/73623/campos_512_v4
+12/73624/campos_512_v4
+12/73629/campos_512_v4
+12/73633/campos_512_v4
+12/73635/campos_512_v4
+12/73637/campos_512_v4
+12/73640/campos_512_v4
+12/73641/campos_512_v4
+12/73650/campos_512_v4
+12/73651/campos_512_v4
+12/73655/campos_512_v4
+12/73666/campos_512_v4
+12/73670/campos_512_v4
+12/73676/campos_512_v4
+12/73677/campos_512_v4
+12/73684/campos_512_v4
+12/73685/campos_512_v4
+12/73694/campos_512_v4
+12/73695/campos_512_v4
+12/73701/campos_512_v4
+12/73707/campos_512_v4
+12/73712/campos_512_v4
+12/73720/campos_512_v4
+12/73735/campos_512_v4
+12/73744/campos_512_v4
+12/73748/campos_512_v4
+12/73772/campos_512_v4
+12/73779/campos_512_v4
+12/73791/campos_512_v4
+12/73811/campos_512_v4
+12/73812/campos_512_v4
+12/73819/campos_512_v4
+12/73823/campos_512_v4
+12/73826/campos_512_v4
+12/73829/campos_512_v4
+12/73832/campos_512_v4
+12/73836/campos_512_v4
+12/73840/campos_512_v4
+12/73849/campos_512_v4
+12/73850/campos_512_v4
+12/73857/campos_512_v4
+12/73860/campos_512_v4
+12/73862/campos_512_v4
+12/73885/campos_512_v4
+12/73887/campos_512_v4
+12/73910/campos_512_v4
+12/73911/campos_512_v4
+12/73912/campos_512_v4
+12/73921/campos_512_v4
+12/73923/campos_512_v4
+12/73934/campos_512_v4
+12/73935/campos_512_v4
+12/73936/campos_512_v4
+12/73945/campos_512_v4
+12/73949/campos_512_v4
+12/73951/campos_512_v4
+12/73956/campos_512_v4
+12/73991/campos_512_v4
+12/73996/campos_512_v4
+12/74009/campos_512_v4
+12/74010/campos_512_v4
+12/74011/campos_512_v4
+12/74017/campos_512_v4
+12/74019/campos_512_v4
+12/74041/campos_512_v4
+12/74047/campos_512_v4
+12/74055/campos_512_v4
+12/74060/campos_512_v4
+12/74067/campos_512_v4
+12/74072/campos_512_v4
+12/74079/campos_512_v4
+12/74081/campos_512_v4
+12/74082/campos_512_v4
+12/74089/campos_512_v4
+12/74091/campos_512_v4
+12/74101/campos_512_v4
+12/74105/campos_512_v4
+12/74111/campos_512_v4
+12/74125/campos_512_v4
+12/74137/campos_512_v4
+12/74139/campos_512_v4
+12/74143/campos_512_v4
+12/74144/campos_512_v4
+12/74146/campos_512_v4
+12/74150/campos_512_v4
+12/74152/campos_512_v4
+12/74153/campos_512_v4
+12/74158/campos_512_v4
+12/74160/campos_512_v4
+12/74186/campos_512_v4
+12/74188/campos_512_v4
+12/74192/campos_512_v4
+12/74202/campos_512_v4
+12/74214/campos_512_v4
+12/74218/campos_512_v4
+12/74219/campos_512_v4
+12/74221/campos_512_v4
+12/74226/campos_512_v4
+12/74230/campos_512_v4
+12/74236/campos_512_v4
+12/74239/campos_512_v4
+12/74247/campos_512_v4
+12/74263/campos_512_v4
+12/74265/campos_512_v4
+12/74267/campos_512_v4
+12/74268/campos_512_v4
+12/74270/campos_512_v4
+12/74286/campos_512_v4
+12/74298/campos_512_v4
+12/74299/campos_512_v4
+12/74306/campos_512_v4
+12/74329/campos_512_v4
+12/74333/campos_512_v4
+12/74345/campos_512_v4
+12/74348/campos_512_v4
+12/74350/campos_512_v4
+12/74353/campos_512_v4
+12/74358/campos_512_v4
+12/74371/campos_512_v4
+12/74372/campos_512_v4
+12/74373/campos_512_v4
+12/74387/campos_512_v4
+12/74396/campos_512_v4
+12/74403/campos_512_v4
+12/74404/campos_512_v4
+12/74413/campos_512_v4
+12/74417/campos_512_v4
+12/74418/campos_512_v4
+12/74420/campos_512_v4
+12/74421/campos_512_v4
+12/74424/campos_512_v4
+12/74432/campos_512_v4
+12/74436/campos_512_v4
+12/74437/campos_512_v4
+12/74442/campos_512_v4
+12/74444/campos_512_v4
+12/74446/campos_512_v4
+12/74456/campos_512_v4
+12/74466/campos_512_v4
+12/74472/campos_512_v4
+12/74473/campos_512_v4
+12/74476/campos_512_v4
+12/74487/campos_512_v4
+12/74492/campos_512_v4
+12/74494/campos_512_v4
+12/74503/campos_512_v4
+12/74514/campos_512_v4
+12/74516/campos_512_v4
+12/74518/campos_512_v4
+12/74520/campos_512_v4
+12/74521/campos_512_v4
+12/74530/campos_512_v4
+12/74542/campos_512_v4
+12/74543/campos_512_v4
+12/74548/campos_512_v4
+12/74561/campos_512_v4
+12/74589/campos_512_v4
+12/74593/campos_512_v4
+12/74607/campos_512_v4
+12/74609/campos_512_v4
+12/74627/campos_512_v4
+12/74631/campos_512_v4
+12/74640/campos_512_v4
+12/74643/campos_512_v4
+12/74652/campos_512_v4
+12/74657/campos_512_v4
+12/74663/campos_512_v4
+12/74674/campos_512_v4
+12/74684/campos_512_v4
+12/74687/campos_512_v4
+12/74698/campos_512_v4
+12/74705/campos_512_v4
+12/74708/campos_512_v4
+12/74710/campos_512_v4
+12/74723/campos_512_v4
+12/74726/campos_512_v4
+12/74733/campos_512_v4
+12/74736/campos_512_v4
+12/74740/campos_512_v4
+12/74743/campos_512_v4
+12/74747/campos_512_v4
+12/74749/campos_512_v4
+12/74750/campos_512_v4
+12/74754/campos_512_v4
+12/74757/campos_512_v4
+12/74762/campos_512_v4
+12/74766/campos_512_v4
+12/74778/campos_512_v4
+12/74780/campos_512_v4
+12/74783/campos_512_v4
+12/74786/campos_512_v4
+12/74787/campos_512_v4
+12/74799/campos_512_v4
+12/74813/campos_512_v4
+12/74814/campos_512_v4
+12/74818/campos_512_v4
+12/74833/campos_512_v4
+12/74835/campos_512_v4
+12/74844/campos_512_v4
+12/74854/campos_512_v4
+12/74875/campos_512_v4
+12/74879/campos_512_v4
+12/74885/campos_512_v4
+12/74890/campos_512_v4
+12/74899/campos_512_v4
+12/74916/campos_512_v4
+12/74930/campos_512_v4
+12/74938/campos_512_v4
+12/74946/campos_512_v4
+12/74953/campos_512_v4
+12/74971/campos_512_v4
+12/74975/campos_512_v4
+12/74979/campos_512_v4
+12/74981/campos_512_v4
+12/74999/campos_512_v4
+120/610006/campos_512_v4
+120/610008/campos_512_v4
+120/610009/campos_512_v4
+120/610012/campos_512_v4
+120/610028/campos_512_v4
+120/610036/campos_512_v4
+120/610052/campos_512_v4
+120/610074/campos_512_v4
+120/610083/campos_512_v4
+120/610099/campos_512_v4
+120/610133/campos_512_v4
+120/610138/campos_512_v4
+120/610143/campos_512_v4
+120/610156/campos_512_v4
+120/610159/campos_512_v4
+120/610182/campos_512_v4
+120/610188/campos_512_v4
+120/610193/campos_512_v4
+120/610199/campos_512_v4
+120/610200/campos_512_v4
+120/610204/campos_512_v4
+120/610208/campos_512_v4
+120/610214/campos_512_v4
+120/610221/campos_512_v4
+120/610223/campos_512_v4
+120/610227/campos_512_v4
+120/610234/campos_512_v4
+120/610248/campos_512_v4
+120/610281/campos_512_v4
+120/610290/campos_512_v4
+120/610295/campos_512_v4
+120/610299/campos_512_v4
+120/610309/campos_512_v4
+120/610318/campos_512_v4
+120/610325/campos_512_v4
+120/610326/campos_512_v4
+120/610340/campos_512_v4
+120/610341/campos_512_v4
+120/610345/campos_512_v4
+120/610347/campos_512_v4
+120/610366/campos_512_v4
+120/610370/campos_512_v4
+120/610382/campos_512_v4
+120/610398/campos_512_v4
+120/610401/campos_512_v4
+120/610407/campos_512_v4
+120/610409/campos_512_v4
+120/610420/campos_512_v4
+120/610433/campos_512_v4
+120/610443/campos_512_v4
+120/610446/campos_512_v4
+120/610447/campos_512_v4
+120/610452/campos_512_v4
+120/610465/campos_512_v4
+120/610466/campos_512_v4
+120/610467/campos_512_v4
+120/610468/campos_512_v4
+120/610469/campos_512_v4
+120/610470/campos_512_v4
+120/610477/campos_512_v4
+120/610483/campos_512_v4
+120/610487/campos_512_v4
+120/610491/campos_512_v4
+120/610492/campos_512_v4
+120/610512/campos_512_v4
+120/610520/campos_512_v4
+120/610536/campos_512_v4
+120/610542/campos_512_v4
+120/610550/campos_512_v4
+120/610555/campos_512_v4
+120/610556/campos_512_v4
+120/610557/campos_512_v4
+120/610567/campos_512_v4
+120/610568/campos_512_v4
+120/610576/campos_512_v4
+120/610587/campos_512_v4
+120/610596/campos_512_v4
+120/610597/campos_512_v4
+120/610603/campos_512_v4
+120/610608/campos_512_v4
+120/610618/campos_512_v4
+120/610622/campos_512_v4
+120/610624/campos_512_v4
+120/610631/campos_512_v4
+120/610639/campos_512_v4
+120/610644/campos_512_v4
+120/610651/campos_512_v4
+120/610659/campos_512_v4
+120/610677/campos_512_v4
+120/610678/campos_512_v4
+120/610679/campos_512_v4
+120/610688/campos_512_v4
+120/610695/campos_512_v4
+120/610703/campos_512_v4
+120/610704/campos_512_v4
+120/610707/campos_512_v4
+120/610710/campos_512_v4
+120/610711/campos_512_v4
+120/610713/campos_512_v4
+120/610714/campos_512_v4
+120/610718/campos_512_v4
+120/610725/campos_512_v4
+120/610728/campos_512_v4
+120/610738/campos_512_v4
+120/610749/campos_512_v4
+120/610755/campos_512_v4
+120/610764/campos_512_v4
+120/610768/campos_512_v4
+120/610770/campos_512_v4
+120/610780/campos_512_v4
+120/610783/campos_512_v4
+120/610785/campos_512_v4
+120/610790/campos_512_v4
+120/610813/campos_512_v4
+120/610824/campos_512_v4
+120/610831/campos_512_v4
+120/610843/campos_512_v4
+120/610844/campos_512_v4
+120/610846/campos_512_v4
+120/610849/campos_512_v4
+120/610851/campos_512_v4
+120/610854/campos_512_v4
+120/610855/campos_512_v4
+120/610858/campos_512_v4
+120/610871/campos_512_v4
+120/610883/campos_512_v4
+120/610886/campos_512_v4
+120/610896/campos_512_v4
+120/610899/campos_512_v4
+120/610908/campos_512_v4
+120/610911/campos_512_v4
+120/610912/campos_512_v4
+120/610915/campos_512_v4
+120/610916/campos_512_v4
+120/610924/campos_512_v4
+120/610937/campos_512_v4
+120/610940/campos_512_v4
+120/610946/campos_512_v4
+120/610952/campos_512_v4
+120/610965/campos_512_v4
+120/610971/campos_512_v4
+120/610976/campos_512_v4
+120/610980/campos_512_v4
+120/610987/campos_512_v4
+120/610992/campos_512_v4
+120/610995/campos_512_v4
+120/611003/campos_512_v4
+120/611009/campos_512_v4
+120/611015/campos_512_v4
+120/611016/campos_512_v4
+120/611042/campos_512_v4
+120/611043/campos_512_v4
+120/611046/campos_512_v4
+120/611054/campos_512_v4
+120/611072/campos_512_v4
+120/611074/campos_512_v4
+120/611089/campos_512_v4
+120/611109/campos_512_v4
+120/611132/campos_512_v4
+120/611136/campos_512_v4
+120/611144/campos_512_v4
+120/611158/campos_512_v4
+120/611160/campos_512_v4
+120/611185/campos_512_v4
+120/611188/campos_512_v4
+120/611189/campos_512_v4
+120/611212/campos_512_v4
+120/611214/campos_512_v4
+120/611230/campos_512_v4
+120/611244/campos_512_v4
+120/611248/campos_512_v4
+120/611254/campos_512_v4
+120/611265/campos_512_v4
+120/611280/campos_512_v4
+120/611282/campos_512_v4
+120/611284/campos_512_v4
+120/611300/campos_512_v4
+120/611322/campos_512_v4
+120/611330/campos_512_v4
+120/611331/campos_512_v4
+120/611333/campos_512_v4
+120/611364/campos_512_v4
+120/611369/campos_512_v4
+120/611373/campos_512_v4
+120/611375/campos_512_v4
+120/611385/campos_512_v4
+120/611393/campos_512_v4
+120/611395/campos_512_v4
+120/611403/campos_512_v4
+120/611405/campos_512_v4
+120/611417/campos_512_v4
+120/611421/campos_512_v4
+120/611427/campos_512_v4
+120/611429/campos_512_v4
+120/611437/campos_512_v4
+120/611439/campos_512_v4
+120/611450/campos_512_v4
+120/611456/campos_512_v4
+120/611473/campos_512_v4
+120/611481/campos_512_v4
+120/611484/campos_512_v4
+120/611495/campos_512_v4
+120/611500/campos_512_v4
+120/611509/campos_512_v4
+120/611515/campos_512_v4
+120/611544/campos_512_v4
+120/611547/campos_512_v4
+120/611558/campos_512_v4
+120/611568/campos_512_v4
+120/611579/campos_512_v4
+120/611582/campos_512_v4
+120/611588/campos_512_v4
+120/611593/campos_512_v4
+120/611601/campos_512_v4
+120/611602/campos_512_v4
+120/611610/campos_512_v4
+120/611617/campos_512_v4
+120/611626/campos_512_v4
+120/611629/campos_512_v4
+120/611634/campos_512_v4
+120/611649/campos_512_v4
+120/611650/campos_512_v4
+120/611651/campos_512_v4
+120/611655/campos_512_v4
+120/611658/campos_512_v4
+120/611671/campos_512_v4
+120/611674/campos_512_v4
+120/611694/campos_512_v4
+120/611701/campos_512_v4
+120/611707/campos_512_v4
+120/611714/campos_512_v4
+120/611715/campos_512_v4
+120/611723/campos_512_v4
+120/611728/campos_512_v4
+120/611730/campos_512_v4
+120/611732/campos_512_v4
+120/611749/campos_512_v4
+120/611754/campos_512_v4
+120/611764/campos_512_v4
+120/611774/campos_512_v4
+120/611781/campos_512_v4
+120/611787/campos_512_v4
+120/611789/campos_512_v4
+120/611797/campos_512_v4
+120/611810/campos_512_v4
+120/611841/campos_512_v4
+120/611851/campos_512_v4
+120/611854/campos_512_v4
+120/611862/campos_512_v4
+120/611863/campos_512_v4
+120/611871/campos_512_v4
+120/611897/campos_512_v4
+120/611902/campos_512_v4
+120/611916/campos_512_v4
+120/611917/campos_512_v4
+120/611918/campos_512_v4
+120/611921/campos_512_v4
+120/611925/campos_512_v4
+120/611936/campos_512_v4
+120/611949/campos_512_v4
+120/611966/campos_512_v4
+120/611976/campos_512_v4
+120/611977/campos_512_v4
+120/611988/campos_512_v4
+120/611992/campos_512_v4
+120/611995/campos_512_v4
+120/612005/campos_512_v4
+120/612011/campos_512_v4
+120/612015/campos_512_v4
+120/612030/campos_512_v4
+120/612034/campos_512_v4
+120/612038/campos_512_v4
+120/612039/campos_512_v4
+120/612048/campos_512_v4
+120/612056/campos_512_v4
+120/612072/campos_512_v4
+120/612077/campos_512_v4
+120/612081/campos_512_v4
+120/612092/campos_512_v4
+120/612093/campos_512_v4
+120/612116/campos_512_v4
+120/612130/campos_512_v4
+120/612136/campos_512_v4
+120/612140/campos_512_v4
+120/612145/campos_512_v4
+120/612148/campos_512_v4
+120/612149/campos_512_v4
+120/612150/campos_512_v4
+120/612160/campos_512_v4
+120/612162/campos_512_v4
+120/612164/campos_512_v4
+120/612170/campos_512_v4
+120/612172/campos_512_v4
+120/612177/campos_512_v4
+120/612186/campos_512_v4
+120/612196/campos_512_v4
+120/612198/campos_512_v4
+120/612200/campos_512_v4
+120/612206/campos_512_v4
+120/612230/campos_512_v4
+120/612233/campos_512_v4
+120/612258/campos_512_v4
+120/612271/campos_512_v4
+120/612274/campos_512_v4
+120/612275/campos_512_v4
+120/612282/campos_512_v4
+120/612306/campos_512_v4
+120/612322/campos_512_v4
+120/612338/campos_512_v4
+120/612344/campos_512_v4
+120/612349/campos_512_v4
+120/612363/campos_512_v4
+120/612365/campos_512_v4
+120/612372/campos_512_v4
+120/612377/campos_512_v4
+120/612381/campos_512_v4
+120/612388/campos_512_v4
+120/612398/campos_512_v4
+120/612399/campos_512_v4
+120/612409/campos_512_v4
+120/612411/campos_512_v4
+120/612416/campos_512_v4
+120/612426/campos_512_v4
+120/612427/campos_512_v4
+120/612432/campos_512_v4
+120/612434/campos_512_v4
+120/612443/campos_512_v4
+120/612466/campos_512_v4
+120/612478/campos_512_v4
+120/612481/campos_512_v4
+120/612484/campos_512_v4
+120/612494/campos_512_v4
+120/612495/campos_512_v4
+120/612503/campos_512_v4
+120/612507/campos_512_v4
+120/612512/campos_512_v4
+120/612514/campos_512_v4
+120/612525/campos_512_v4
+120/612528/campos_512_v4
+120/612541/campos_512_v4
+120/612569/campos_512_v4
+120/612572/campos_512_v4
+120/612594/campos_512_v4
+120/612598/campos_512_v4
+120/612605/campos_512_v4
+120/612615/campos_512_v4
+120/612624/campos_512_v4
+120/612633/campos_512_v4
+120/612637/campos_512_v4
+120/612638/campos_512_v4
+120/612650/campos_512_v4
+120/612654/campos_512_v4
+120/612660/campos_512_v4
+120/612664/campos_512_v4
+120/612666/campos_512_v4
+120/612689/campos_512_v4
+120/612691/campos_512_v4
+120/612706/campos_512_v4
+120/612716/campos_512_v4
+120/612719/campos_512_v4
+120/612749/campos_512_v4
+120/612752/campos_512_v4
+120/612754/campos_512_v4
+120/612761/campos_512_v4
+120/612764/campos_512_v4
+120/612771/campos_512_v4
+120/612773/campos_512_v4
+120/612775/campos_512_v4
+120/612787/campos_512_v4
+120/612802/campos_512_v4
+120/612803/campos_512_v4
+120/612804/campos_512_v4
+120/612818/campos_512_v4
+120/612826/campos_512_v4
+120/612830/campos_512_v4
+120/612834/campos_512_v4
+120/612841/campos_512_v4
+120/612855/campos_512_v4
+120/612861/campos_512_v4
+120/612863/campos_512_v4
+120/612868/campos_512_v4
+120/612876/campos_512_v4
+120/612877/campos_512_v4
+120/612886/campos_512_v4
+120/612896/campos_512_v4
+120/612901/campos_512_v4
+120/612903/campos_512_v4
+120/612904/campos_512_v4
+120/612906/campos_512_v4
+120/612911/campos_512_v4
+120/612914/campos_512_v4
+120/612929/campos_512_v4
+120/612937/campos_512_v4
+120/612943/campos_512_v4
+120/612954/campos_512_v4
+120/612960/campos_512_v4
+120/612970/campos_512_v4
+120/612973/campos_512_v4
+120/612977/campos_512_v4
+120/612981/campos_512_v4
+120/612992/campos_512_v4
+120/612999/campos_512_v4
+120/613009/campos_512_v4
+120/613018/campos_512_v4
+120/613035/campos_512_v4
+120/613041/campos_512_v4
+120/613067/campos_512_v4
+120/613076/campos_512_v4
+120/613079/campos_512_v4
+120/613083/campos_512_v4
+120/613085/campos_512_v4
+120/613105/campos_512_v4
+120/613106/campos_512_v4
+120/613114/campos_512_v4
+120/613134/campos_512_v4
+120/613138/campos_512_v4
+120/613140/campos_512_v4
+120/613151/campos_512_v4
+120/613154/campos_512_v4
+120/613162/campos_512_v4
+120/613178/campos_512_v4
+120/613181/campos_512_v4
+120/613182/campos_512_v4
+120/613193/campos_512_v4
+120/613196/campos_512_v4
+120/613200/campos_512_v4
+120/613203/campos_512_v4
+120/613205/campos_512_v4
+120/613221/campos_512_v4
+120/613231/campos_512_v4
+120/613239/campos_512_v4
+120/613259/campos_512_v4
+120/613261/campos_512_v4
+120/613267/campos_512_v4
+120/613273/campos_512_v4
+120/613274/campos_512_v4
+120/613285/campos_512_v4
+120/613286/campos_512_v4
+120/613290/campos_512_v4
+120/613293/campos_512_v4
+120/613300/campos_512_v4
+120/613313/campos_512_v4
+120/613318/campos_512_v4
+120/613373/campos_512_v4
+120/613376/campos_512_v4
+120/613386/campos_512_v4
+120/613390/campos_512_v4
+120/613391/campos_512_v4
+120/613394/campos_512_v4
+120/613398/campos_512_v4
+120/613403/campos_512_v4
+120/613408/campos_512_v4
+120/613415/campos_512_v4
+120/613418/campos_512_v4
+120/613420/campos_512_v4
+120/613427/campos_512_v4
+120/613436/campos_512_v4
+120/613445/campos_512_v4
+120/613473/campos_512_v4
+120/613478/campos_512_v4
+120/613490/campos_512_v4
+120/613502/campos_512_v4
+120/613507/campos_512_v4
+120/613518/campos_512_v4
+120/613528/campos_512_v4
+120/613530/campos_512_v4
+120/613532/campos_512_v4
+120/613536/campos_512_v4
+120/613537/campos_512_v4
+120/613541/campos_512_v4
+120/613563/campos_512_v4
+120/613564/campos_512_v4
+120/613573/campos_512_v4
+120/613576/campos_512_v4
+120/613583/campos_512_v4
+120/613588/campos_512_v4
+120/613590/campos_512_v4
+120/613591/campos_512_v4
+120/613599/campos_512_v4
+120/613605/campos_512_v4
+120/613607/campos_512_v4
+120/613610/campos_512_v4
+120/613613/campos_512_v4
+120/613636/campos_512_v4
+120/613638/campos_512_v4
+120/613650/campos_512_v4
+120/613659/campos_512_v4
+120/613663/campos_512_v4
+120/613693/campos_512_v4
+120/613696/campos_512_v4
+120/613699/campos_512_v4
+120/613708/campos_512_v4
+120/613716/campos_512_v4
+120/613730/campos_512_v4
+120/613739/campos_512_v4
+120/613741/campos_512_v4
+120/613748/campos_512_v4
+120/613756/campos_512_v4
+120/613767/campos_512_v4
+120/613806/campos_512_v4
+120/613818/campos_512_v4
+120/613829/campos_512_v4
+120/613848/campos_512_v4
+120/613871/campos_512_v4
+120/613872/campos_512_v4
+120/613873/campos_512_v4
+120/613874/campos_512_v4
+120/613899/campos_512_v4
+120/613901/campos_512_v4
+120/613927/campos_512_v4
+120/613929/campos_512_v4
+120/613933/campos_512_v4
+120/613937/campos_512_v4
+120/613938/campos_512_v4
+120/613947/campos_512_v4
+120/613954/campos_512_v4
+120/613969/campos_512_v4
+120/613978/campos_512_v4
+120/613986/campos_512_v4
+120/613987/campos_512_v4
+120/614005/campos_512_v4
+120/614007/campos_512_v4
+120/614013/campos_512_v4
+120/614035/campos_512_v4
+120/614037/campos_512_v4
+120/614044/campos_512_v4
+120/614048/campos_512_v4
+120/614050/campos_512_v4
+120/614061/campos_512_v4
+120/614084/campos_512_v4
+120/614094/campos_512_v4
+120/614096/campos_512_v4
+120/614097/campos_512_v4
+120/614101/campos_512_v4
+120/614103/campos_512_v4
+120/614105/campos_512_v4
+120/614119/campos_512_v4
+120/614147/campos_512_v4
+120/614199/campos_512_v4
+120/614203/campos_512_v4
+120/614206/campos_512_v4
+120/614215/campos_512_v4
+120/614216/campos_512_v4
+120/614230/campos_512_v4
+120/614237/campos_512_v4
+120/614240/campos_512_v4
+120/614256/campos_512_v4
+120/614268/campos_512_v4
+120/614282/campos_512_v4
+120/614284/campos_512_v4
+120/614293/campos_512_v4
+120/614301/campos_512_v4
+120/614306/campos_512_v4
+120/614308/campos_512_v4
+120/614311/campos_512_v4
+120/614312/campos_512_v4
+120/614313/campos_512_v4
+120/614318/campos_512_v4
+120/614333/campos_512_v4
+120/614335/campos_512_v4
+120/614342/campos_512_v4
+120/614369/campos_512_v4
+120/614375/campos_512_v4
+120/614378/campos_512_v4
+120/614382/campos_512_v4
+120/614418/campos_512_v4
+120/614423/campos_512_v4
+120/614429/campos_512_v4
+120/614431/campos_512_v4
+120/614435/campos_512_v4
+120/614436/campos_512_v4
+120/614438/campos_512_v4
+120/614443/campos_512_v4
+120/614444/campos_512_v4
+120/614456/campos_512_v4
+120/614465/campos_512_v4
+120/614472/campos_512_v4
+120/614476/campos_512_v4
+120/614502/campos_512_v4
+120/614526/campos_512_v4
+120/614544/campos_512_v4
+120/614545/campos_512_v4
+120/614548/campos_512_v4
+120/614553/campos_512_v4
+120/614560/campos_512_v4
+120/614563/campos_512_v4
+120/614567/campos_512_v4
+120/614572/campos_512_v4
+120/614573/campos_512_v4
+120/614576/campos_512_v4
+120/614583/campos_512_v4
+120/614588/campos_512_v4
+120/614595/campos_512_v4
+120/614602/campos_512_v4
+120/614609/campos_512_v4
+120/614614/campos_512_v4
+120/614615/campos_512_v4
+120/614634/campos_512_v4
+120/614635/campos_512_v4
+120/614646/campos_512_v4
+120/614653/campos_512_v4
+120/614657/campos_512_v4
+120/614659/campos_512_v4
+120/614669/campos_512_v4
+120/614670/campos_512_v4
+120/614673/campos_512_v4
+120/614675/campos_512_v4
+120/614679/campos_512_v4
+120/614680/campos_512_v4
+120/614681/campos_512_v4
+120/614684/campos_512_v4
+120/614686/campos_512_v4
+120/614693/campos_512_v4
+120/614698/campos_512_v4
+120/614700/campos_512_v4
+120/614713/campos_512_v4
+120/614714/campos_512_v4
+120/614719/campos_512_v4
+120/614728/campos_512_v4
+120/614737/campos_512_v4
+120/614740/campos_512_v4
+120/614749/campos_512_v4
+120/614753/campos_512_v4
+120/614758/campos_512_v4
+120/614767/campos_512_v4
+120/614776/campos_512_v4
+120/614781/campos_512_v4
+120/614784/campos_512_v4
+120/614789/campos_512_v4
+120/614795/campos_512_v4
+120/614804/campos_512_v4
+120/614814/campos_512_v4
+120/614815/campos_512_v4
+120/614829/campos_512_v4
+120/614830/campos_512_v4
+120/614833/campos_512_v4
+120/614838/campos_512_v4
+120/614847/campos_512_v4
+120/614848/campos_512_v4
+120/614872/campos_512_v4
+120/614884/campos_512_v4
+120/614911/campos_512_v4
+120/614913/campos_512_v4
+120/614928/campos_512_v4
+120/614930/campos_512_v4
+120/614950/campos_512_v4
+120/614955/campos_512_v4
+120/614960/campos_512_v4
+120/614968/campos_512_v4
+120/614971/campos_512_v4
+120/614974/campos_512_v4
+120/614976/campos_512_v4
+120/614980/campos_512_v4
+120/614983/campos_512_v4
+120/614986/campos_512_v4
+121/615006/campos_512_v4
+121/615009/campos_512_v4
+121/615016/campos_512_v4
+121/615017/campos_512_v4
+121/615028/campos_512_v4
+121/615030/campos_512_v4
+121/615035/campos_512_v4
+121/615042/campos_512_v4
+121/615059/campos_512_v4
+121/615092/campos_512_v4
+121/615095/campos_512_v4
+121/615097/campos_512_v4
+121/615103/campos_512_v4
+121/615113/campos_512_v4
+121/615117/campos_512_v4
+121/615126/campos_512_v4
+121/615128/campos_512_v4
+121/615131/campos_512_v4
+121/615166/campos_512_v4
+121/615173/campos_512_v4
+121/615186/campos_512_v4
+121/615190/campos_512_v4
+121/615193/campos_512_v4
+121/615202/campos_512_v4
+121/615233/campos_512_v4
+121/615247/campos_512_v4
+121/615248/campos_512_v4
+121/615282/campos_512_v4
+121/615284/campos_512_v4
+121/615285/campos_512_v4
+121/615298/campos_512_v4
+121/615302/campos_512_v4
+121/615312/campos_512_v4
+121/615372/campos_512_v4
+121/615375/campos_512_v4
+121/615384/campos_512_v4
+121/615396/campos_512_v4
+121/615398/campos_512_v4
+121/615411/campos_512_v4
+121/615428/campos_512_v4
+121/615436/campos_512_v4
+121/615446/campos_512_v4
+121/615458/campos_512_v4
+121/615469/campos_512_v4
+121/615470/campos_512_v4
+121/615472/campos_512_v4
+121/615475/campos_512_v4
+121/615476/campos_512_v4
+121/615488/campos_512_v4
+121/615490/campos_512_v4
+121/615494/campos_512_v4
+121/615497/campos_512_v4
+121/615503/campos_512_v4
+121/615511/campos_512_v4
+121/615516/campos_512_v4
+121/615517/campos_512_v4
+121/615524/campos_512_v4
+121/615528/campos_512_v4
+121/615533/campos_512_v4
+121/615538/campos_512_v4
+121/615539/campos_512_v4
+121/615544/campos_512_v4
+121/615549/campos_512_v4
+121/615552/campos_512_v4
+121/615557/campos_512_v4
+121/615566/campos_512_v4
+121/615577/campos_512_v4
+121/615578/campos_512_v4
+121/615579/campos_512_v4
+121/615590/campos_512_v4
+121/615605/campos_512_v4
+121/615608/campos_512_v4
+121/615609/campos_512_v4
+121/615624/campos_512_v4
+121/615637/campos_512_v4
+121/615651/campos_512_v4
+121/615657/campos_512_v4
+121/615668/campos_512_v4
+121/615672/campos_512_v4
+121/615680/campos_512_v4
+121/615701/campos_512_v4
+121/615703/campos_512_v4
+121/615705/campos_512_v4
+121/615708/campos_512_v4
+121/615715/campos_512_v4
+121/615717/campos_512_v4
+121/615722/campos_512_v4
+121/615736/campos_512_v4
+121/615767/campos_512_v4
+121/615795/campos_512_v4
+121/615824/campos_512_v4
+121/615827/campos_512_v4
+121/615829/campos_512_v4
+121/615831/campos_512_v4
+121/615834/campos_512_v4
+121/615840/campos_512_v4
+121/615842/campos_512_v4
+121/615853/campos_512_v4
+121/615861/campos_512_v4
+121/615868/campos_512_v4
+121/615869/campos_512_v4
+121/615880/campos_512_v4
+121/615885/campos_512_v4
+121/615887/campos_512_v4
+121/615902/campos_512_v4
+121/615935/campos_512_v4
+121/615938/campos_512_v4
+121/615940/campos_512_v4
+121/615941/campos_512_v4
+121/615950/campos_512_v4
+121/615953/campos_512_v4
+121/615958/campos_512_v4
+121/615960/campos_512_v4
+121/615965/campos_512_v4
+121/615983/campos_512_v4
+121/615986/campos_512_v4
+121/615990/campos_512_v4
+121/615992/campos_512_v4
+121/615999/campos_512_v4
+121/616001/campos_512_v4
+121/616003/campos_512_v4
+121/616004/campos_512_v4
+121/616006/campos_512_v4
+121/616015/campos_512_v4
+121/616023/campos_512_v4
+121/616028/campos_512_v4
+121/616032/campos_512_v4
+121/616047/campos_512_v4
+121/616048/campos_512_v4
+121/616062/campos_512_v4
+121/616076/campos_512_v4
+121/616077/campos_512_v4
+121/616082/campos_512_v4
+121/616087/campos_512_v4
+121/616094/campos_512_v4
+121/616113/campos_512_v4
+121/616117/campos_512_v4
+121/616129/campos_512_v4
+121/616136/campos_512_v4
+121/616145/campos_512_v4
+121/616152/campos_512_v4
+121/616169/campos_512_v4
+121/616170/campos_512_v4
+121/616172/campos_512_v4
+121/616179/campos_512_v4
+121/616183/campos_512_v4
+121/616186/campos_512_v4
+121/616193/campos_512_v4
+121/616194/campos_512_v4
+121/616200/campos_512_v4
+121/616202/campos_512_v4
+121/616215/campos_512_v4
+121/616216/campos_512_v4
+121/616257/campos_512_v4
+121/616293/campos_512_v4
+121/616302/campos_512_v4
+121/616307/campos_512_v4
+121/616314/campos_512_v4
+121/616318/campos_512_v4
+121/616326/campos_512_v4
+121/616335/campos_512_v4
+121/616339/campos_512_v4
+121/616354/campos_512_v4
+121/616355/campos_512_v4
+121/616359/campos_512_v4
+121/616362/campos_512_v4
+121/616369/campos_512_v4
+121/616373/campos_512_v4
+121/616382/campos_512_v4
+121/616384/campos_512_v4
+121/616390/campos_512_v4
+121/616424/campos_512_v4
+121/616428/campos_512_v4
+121/616445/campos_512_v4
+121/616458/campos_512_v4
+121/616477/campos_512_v4
+121/616480/campos_512_v4
+121/616481/campos_512_v4
+121/616484/campos_512_v4
+121/616490/campos_512_v4
+121/616492/campos_512_v4
+121/616515/campos_512_v4
+121/616524/campos_512_v4
+121/616534/campos_512_v4
+121/616544/campos_512_v4
+121/616561/campos_512_v4
+121/616563/campos_512_v4
+121/616575/campos_512_v4
+121/616594/campos_512_v4
+121/616603/campos_512_v4
+121/616606/campos_512_v4
+121/616610/campos_512_v4
+121/616614/campos_512_v4
+121/616616/campos_512_v4
+121/616623/campos_512_v4
+121/616637/campos_512_v4
+121/616643/campos_512_v4
+121/616648/campos_512_v4
+121/616656/campos_512_v4
+121/616696/campos_512_v4
+121/616702/campos_512_v4
+121/616715/campos_512_v4
+121/616727/campos_512_v4
+121/616730/campos_512_v4
+121/616733/campos_512_v4
+121/616742/campos_512_v4
+121/616743/campos_512_v4
+121/616751/campos_512_v4
+121/616754/campos_512_v4
+121/616766/campos_512_v4
+121/616768/campos_512_v4
+121/616771/campos_512_v4
+121/616773/campos_512_v4
+121/616775/campos_512_v4
+121/616791/campos_512_v4
+121/616795/campos_512_v4
+121/616800/campos_512_v4
+121/616807/campos_512_v4
+121/616829/campos_512_v4
+121/616831/campos_512_v4
+121/616832/campos_512_v4
+121/616874/campos_512_v4
+121/616876/campos_512_v4
+121/616878/campos_512_v4
+121/616884/campos_512_v4
+121/616897/campos_512_v4
+121/616910/campos_512_v4
+121/616912/campos_512_v4
+121/616914/campos_512_v4
+121/616916/campos_512_v4
+121/616918/campos_512_v4
+121/616920/campos_512_v4
+121/616921/campos_512_v4
+121/616940/campos_512_v4
+121/616950/campos_512_v4
+121/616954/campos_512_v4
+121/616957/campos_512_v4
+121/616983/campos_512_v4
+121/616988/campos_512_v4
+121/616994/campos_512_v4
+121/616995/campos_512_v4
+121/616996/campos_512_v4
+121/617001/campos_512_v4
+121/617009/campos_512_v4
+121/617011/campos_512_v4
+121/617039/campos_512_v4
+121/617040/campos_512_v4
+121/617046/campos_512_v4
+121/617058/campos_512_v4
+121/617074/campos_512_v4
+121/617081/campos_512_v4
+121/617085/campos_512_v4
+121/617105/campos_512_v4
+121/617110/campos_512_v4
+121/617121/campos_512_v4
+121/617136/campos_512_v4
+121/617150/campos_512_v4
+121/617163/campos_512_v4
+121/617175/campos_512_v4
+121/617188/campos_512_v4
+121/617193/campos_512_v4
+121/617202/campos_512_v4
+121/617242/campos_512_v4
+121/617248/campos_512_v4
+121/617251/campos_512_v4
+121/617259/campos_512_v4
+121/617267/campos_512_v4
+121/617269/campos_512_v4
+121/617285/campos_512_v4
+121/617286/campos_512_v4
+121/617291/campos_512_v4
+121/617327/campos_512_v4
+121/617333/campos_512_v4
+121/617334/campos_512_v4
+121/617336/campos_512_v4
+121/617343/campos_512_v4
+121/617344/campos_512_v4
+121/617348/campos_512_v4
+121/617390/campos_512_v4
+121/617392/campos_512_v4
+121/617395/campos_512_v4
+121/617400/campos_512_v4
+121/617407/campos_512_v4
+121/617414/campos_512_v4
+121/617421/campos_512_v4
+121/617422/campos_512_v4
+121/617432/campos_512_v4
+121/617462/campos_512_v4
+121/617464/campos_512_v4
+121/617503/campos_512_v4
+121/617513/campos_512_v4
+121/617517/campos_512_v4
+121/617520/campos_512_v4
+121/617522/campos_512_v4
+121/617527/campos_512_v4
+121/617528/campos_512_v4
+121/617536/campos_512_v4
+121/617544/campos_512_v4
+121/617575/campos_512_v4
+121/617578/campos_512_v4
+121/617581/campos_512_v4
+121/617585/campos_512_v4
+121/617588/campos_512_v4
+121/617589/campos_512_v4
+121/617593/campos_512_v4
+121/617599/campos_512_v4
+121/617602/campos_512_v4
+121/617613/campos_512_v4
+121/617618/campos_512_v4
+121/617623/campos_512_v4
+121/617627/campos_512_v4
+121/617628/campos_512_v4
+121/617632/campos_512_v4
+121/617637/campos_512_v4
+121/617655/campos_512_v4
+121/617661/campos_512_v4
+121/617665/campos_512_v4
+121/617673/campos_512_v4
+121/617675/campos_512_v4
+121/617686/campos_512_v4
+121/617693/campos_512_v4
+121/617696/campos_512_v4
+121/617702/campos_512_v4
+121/617715/campos_512_v4
+121/617723/campos_512_v4
+121/617726/campos_512_v4
+121/617736/campos_512_v4
+121/617746/campos_512_v4
+121/617769/campos_512_v4
+121/617779/campos_512_v4
+121/617788/campos_512_v4
+121/617799/campos_512_v4
+121/617802/campos_512_v4
+121/617824/campos_512_v4
+121/617846/campos_512_v4
+121/617855/campos_512_v4
+121/617857/campos_512_v4
+121/617866/campos_512_v4
+121/617868/campos_512_v4
+121/617898/campos_512_v4
+121/617919/campos_512_v4
+121/617954/campos_512_v4
+121/617960/campos_512_v4
+121/617965/campos_512_v4
+121/617969/campos_512_v4
+121/617970/campos_512_v4
+121/617987/campos_512_v4
+121/617991/campos_512_v4
+121/618020/campos_512_v4
+121/618023/campos_512_v4
+121/618032/campos_512_v4
+121/618036/campos_512_v4
+121/618047/campos_512_v4
+121/618051/campos_512_v4
+121/618055/campos_512_v4
+121/618057/campos_512_v4
+121/618058/campos_512_v4
+121/618065/campos_512_v4
+121/618073/campos_512_v4
+121/618080/campos_512_v4
+121/618093/campos_512_v4
+121/618094/campos_512_v4
+121/618098/campos_512_v4
+121/618112/campos_512_v4
+121/618132/campos_512_v4
+121/618144/campos_512_v4
+121/618145/campos_512_v4
+121/618153/campos_512_v4
+121/618157/campos_512_v4
+121/618169/campos_512_v4
+121/618170/campos_512_v4
+121/618198/campos_512_v4
+121/618210/campos_512_v4
+121/618213/campos_512_v4
+121/618218/campos_512_v4
+121/618230/campos_512_v4
+121/618235/campos_512_v4
+121/618238/campos_512_v4
+121/618241/campos_512_v4
+121/618242/campos_512_v4
+121/618252/campos_512_v4
+121/618254/campos_512_v4
+121/618273/campos_512_v4
+121/618291/campos_512_v4
+121/618295/campos_512_v4
+121/618299/campos_512_v4
+121/618302/campos_512_v4
+121/618313/campos_512_v4
+121/618340/campos_512_v4
+121/618343/campos_512_v4
+121/618344/campos_512_v4
+121/618345/campos_512_v4
+121/618354/campos_512_v4
+121/618361/campos_512_v4
+121/618381/campos_512_v4
+121/618385/campos_512_v4
+121/618390/campos_512_v4
+121/618399/campos_512_v4
+121/618410/campos_512_v4
+121/618412/campos_512_v4
+121/618420/campos_512_v4
+121/618422/campos_512_v4
+121/618431/campos_512_v4
+121/618437/campos_512_v4
+121/618442/campos_512_v4
+121/618447/campos_512_v4
+121/618450/campos_512_v4
+121/618452/campos_512_v4
+121/618456/campos_512_v4
+121/618457/campos_512_v4
+121/618459/campos_512_v4
+121/618463/campos_512_v4
+121/618466/campos_512_v4
+121/618467/campos_512_v4
+121/618480/campos_512_v4
+121/618483/campos_512_v4
+121/618488/campos_512_v4
+121/618497/campos_512_v4
+121/618511/campos_512_v4
+121/618516/campos_512_v4
+121/618518/campos_512_v4
+121/618519/campos_512_v4
+121/618520/campos_512_v4
+121/618528/campos_512_v4
+121/618530/campos_512_v4
+121/618543/campos_512_v4
+121/618548/campos_512_v4
+121/618549/campos_512_v4
+121/618550/campos_512_v4
+121/618553/campos_512_v4
+121/618575/campos_512_v4
+121/618578/campos_512_v4
+121/618579/campos_512_v4
+121/618582/campos_512_v4
+121/618583/campos_512_v4
+121/618585/campos_512_v4
+121/618587/campos_512_v4
+121/618588/campos_512_v4
+121/618589/campos_512_v4
+121/618610/campos_512_v4
+121/618613/campos_512_v4
+121/618617/campos_512_v4
+121/618618/campos_512_v4
+121/618619/campos_512_v4
+121/618639/campos_512_v4
+121/618642/campos_512_v4
+121/618650/campos_512_v4
+121/618652/campos_512_v4
+121/618668/campos_512_v4
+121/618670/campos_512_v4
+121/618676/campos_512_v4
+121/618679/campos_512_v4
+121/618702/campos_512_v4
+121/618708/campos_512_v4
+121/618712/campos_512_v4
+121/618715/campos_512_v4
+121/618719/campos_512_v4
+121/618724/campos_512_v4
+121/618727/campos_512_v4
+121/618728/campos_512_v4
+121/618731/campos_512_v4
+121/618739/campos_512_v4
+121/618745/campos_512_v4
+121/618747/campos_512_v4
+121/618750/campos_512_v4
+121/618784/campos_512_v4
+121/618801/campos_512_v4
+121/618810/campos_512_v4
+121/618822/campos_512_v4
+121/618831/campos_512_v4
+121/618858/campos_512_v4
+121/618880/campos_512_v4
+121/618881/campos_512_v4
+121/618883/campos_512_v4
+121/618888/campos_512_v4
+121/618890/campos_512_v4
+121/618896/campos_512_v4
+121/618898/campos_512_v4
+121/618908/campos_512_v4
+121/618913/campos_512_v4
+121/618920/campos_512_v4
+121/618924/campos_512_v4
+121/618943/campos_512_v4
+121/618953/campos_512_v4
+121/618960/campos_512_v4
+121/618962/campos_512_v4
+121/618965/campos_512_v4
+121/618974/campos_512_v4
+121/618978/campos_512_v4
+121/618985/campos_512_v4
+121/619009/campos_512_v4
+121/619011/campos_512_v4
+121/619021/campos_512_v4
+121/619031/campos_512_v4
+121/619032/campos_512_v4
+121/619039/campos_512_v4
+121/619045/campos_512_v4
+121/619049/campos_512_v4
+121/619052/campos_512_v4
+121/619064/campos_512_v4
+121/619077/campos_512_v4
+121/619078/campos_512_v4
+121/619080/campos_512_v4
+121/619104/campos_512_v4
+121/619114/campos_512_v4
+121/619115/campos_512_v4
+121/619127/campos_512_v4
+121/619134/campos_512_v4
+121/619135/campos_512_v4
+121/619140/campos_512_v4
+121/619161/campos_512_v4
+121/619167/campos_512_v4
+121/619168/campos_512_v4
+121/619171/campos_512_v4
+121/619172/campos_512_v4
+121/619183/campos_512_v4
+121/619187/campos_512_v4
+121/619197/campos_512_v4
+121/619205/campos_512_v4
+121/619224/campos_512_v4
+121/619225/campos_512_v4
+121/619229/campos_512_v4
+121/619233/campos_512_v4
+121/619238/campos_512_v4
+121/619245/campos_512_v4
+121/619251/campos_512_v4
+121/619253/campos_512_v4
+121/619256/campos_512_v4
+121/619289/campos_512_v4
+121/619301/campos_512_v4
+121/619302/campos_512_v4
+121/619331/campos_512_v4
+121/619335/campos_512_v4
+121/619343/campos_512_v4
+121/619364/campos_512_v4
+121/619368/campos_512_v4
+121/619377/campos_512_v4
+121/619392/campos_512_v4
+121/619428/campos_512_v4
+121/619435/campos_512_v4
+121/619446/campos_512_v4
+121/619456/campos_512_v4
+121/619460/campos_512_v4
+121/619463/campos_512_v4
+121/619487/campos_512_v4
+121/619498/campos_512_v4
+121/619499/campos_512_v4
+121/619500/campos_512_v4
+121/619505/campos_512_v4
+121/619509/campos_512_v4
+121/619513/campos_512_v4
+121/619517/campos_512_v4
+121/619523/campos_512_v4
+121/619527/campos_512_v4
+121/619530/campos_512_v4
+121/619531/campos_512_v4
+121/619532/campos_512_v4
+121/619540/campos_512_v4
+121/619542/campos_512_v4
+121/619554/campos_512_v4
+121/619570/campos_512_v4
+121/619572/campos_512_v4
+121/619586/campos_512_v4
+121/619588/campos_512_v4
+121/619591/campos_512_v4
+121/619595/campos_512_v4
+121/619612/campos_512_v4
+121/619615/campos_512_v4
+121/619620/campos_512_v4
+121/619629/campos_512_v4
+121/619631/campos_512_v4
+121/619645/campos_512_v4
+121/619650/campos_512_v4
+121/619661/campos_512_v4
+121/619667/campos_512_v4
+121/619699/campos_512_v4
+121/619711/campos_512_v4
+121/619731/campos_512_v4
+121/619733/campos_512_v4
+121/619738/campos_512_v4
+121/619767/campos_512_v4
+121/619787/campos_512_v4
+121/619788/campos_512_v4
+121/619789/campos_512_v4
+121/619791/campos_512_v4
+121/619797/campos_512_v4
+121/619801/campos_512_v4
+121/619803/campos_512_v4
+121/619805/campos_512_v4
+121/619810/campos_512_v4
+121/619816/campos_512_v4
+121/619823/campos_512_v4
+121/619825/campos_512_v4
+121/619842/campos_512_v4
+121/619848/campos_512_v4
+121/619865/campos_512_v4
+121/619870/campos_512_v4
+121/619873/campos_512_v4
+121/619886/campos_512_v4
+121/619888/campos_512_v4
+121/619891/campos_512_v4
+121/619894/campos_512_v4
+121/619902/campos_512_v4
+121/619911/campos_512_v4
+121/619915/campos_512_v4
+121/619916/campos_512_v4
+121/619942/campos_512_v4
+121/619956/campos_512_v4
+121/619958/campos_512_v4
+121/619964/campos_512_v4
+121/619997/campos_512_v4
+122/620013/campos_512_v4
+122/620014/campos_512_v4
+122/620015/campos_512_v4
+122/620023/campos_512_v4
+122/620034/campos_512_v4
+122/620041/campos_512_v4
+122/620047/campos_512_v4
+122/620049/campos_512_v4
+122/620089/campos_512_v4
+122/620094/campos_512_v4
+122/620100/campos_512_v4
+122/620108/campos_512_v4
+122/620121/campos_512_v4
+122/620135/campos_512_v4
+122/620151/campos_512_v4
+122/620161/campos_512_v4
+122/620164/campos_512_v4
+122/620170/campos_512_v4
+122/620174/campos_512_v4
+122/620183/campos_512_v4
+122/620188/campos_512_v4
+122/620192/campos_512_v4
+122/620201/campos_512_v4
+122/620202/campos_512_v4
+122/620211/campos_512_v4
+122/620212/campos_512_v4
+122/620222/campos_512_v4
+122/620229/campos_512_v4
+122/620230/campos_512_v4
+122/620231/campos_512_v4
+122/620233/campos_512_v4
+122/620235/campos_512_v4
+122/620243/campos_512_v4
+122/620247/campos_512_v4
+122/620250/campos_512_v4
+122/620264/campos_512_v4
+122/620268/campos_512_v4
+122/620279/campos_512_v4
+122/620281/campos_512_v4
+122/620284/campos_512_v4
+122/620285/campos_512_v4
+122/620287/campos_512_v4
+122/620292/campos_512_v4
+122/620303/campos_512_v4
+122/620306/campos_512_v4
+122/620308/campos_512_v4
+122/620309/campos_512_v4
+122/620318/campos_512_v4
+122/620326/campos_512_v4
+122/620328/campos_512_v4
+122/620333/campos_512_v4
+122/620335/campos_512_v4
+122/620339/campos_512_v4
+122/620353/campos_512_v4
+122/620375/campos_512_v4
+122/620379/campos_512_v4
+122/620381/campos_512_v4
+122/620399/campos_512_v4
+122/620406/campos_512_v4
+122/620407/campos_512_v4
+122/620412/campos_512_v4
+122/620420/campos_512_v4
+122/620440/campos_512_v4
+122/620455/campos_512_v4
+122/620467/campos_512_v4
+122/620472/campos_512_v4
+122/620483/campos_512_v4
+122/620486/campos_512_v4
+122/620491/campos_512_v4
+122/620494/campos_512_v4
+122/620507/campos_512_v4
+122/620511/campos_512_v4
+122/620512/campos_512_v4
+122/620519/campos_512_v4
+122/620521/campos_512_v4
+122/620529/campos_512_v4
+122/620530/campos_512_v4
+122/620538/campos_512_v4
+122/620542/campos_512_v4
+122/620565/campos_512_v4
+122/620567/campos_512_v4
+122/620569/campos_512_v4
+122/620570/campos_512_v4
+122/620578/campos_512_v4
+122/620586/campos_512_v4
+122/620587/campos_512_v4
+122/620591/campos_512_v4
+122/620594/campos_512_v4
+122/620601/campos_512_v4
+122/620602/campos_512_v4
+122/620609/campos_512_v4
+122/620613/campos_512_v4
+122/620621/campos_512_v4
+122/620623/campos_512_v4
+122/620624/campos_512_v4
+122/620626/campos_512_v4
+122/620629/campos_512_v4
+122/620638/campos_512_v4
+122/620641/campos_512_v4
+122/620644/campos_512_v4
+122/620647/campos_512_v4
+122/620650/campos_512_v4
+122/620661/campos_512_v4
+122/620667/campos_512_v4
+122/620673/campos_512_v4
+122/620676/campos_512_v4
+122/620685/campos_512_v4
+122/620708/campos_512_v4
+122/620730/campos_512_v4
+122/620741/campos_512_v4
+122/620742/campos_512_v4
+122/620748/campos_512_v4
+122/620767/campos_512_v4
+122/620768/campos_512_v4
+122/620779/campos_512_v4
+122/620785/campos_512_v4
+122/620787/campos_512_v4
+122/620795/campos_512_v4
+122/620800/campos_512_v4
+122/620811/campos_512_v4
+122/620812/campos_512_v4
+122/620814/campos_512_v4
+122/620823/campos_512_v4
+122/620824/campos_512_v4
+122/620837/campos_512_v4
+122/620847/campos_512_v4
+122/620857/campos_512_v4
+122/620868/campos_512_v4
+122/620877/campos_512_v4
+122/620884/campos_512_v4
+122/620892/campos_512_v4
+122/620895/campos_512_v4
+122/620901/campos_512_v4
+122/620917/campos_512_v4
+122/620920/campos_512_v4
+122/620921/campos_512_v4
+122/620931/campos_512_v4
+122/620935/campos_512_v4
+122/620936/campos_512_v4
+122/620937/campos_512_v4
+122/620942/campos_512_v4
+122/620951/campos_512_v4
+122/620952/campos_512_v4
+122/620953/campos_512_v4
+122/620956/campos_512_v4
+122/620960/campos_512_v4
+122/620971/campos_512_v4
+122/620972/campos_512_v4
+122/620978/campos_512_v4
+122/620984/campos_512_v4
+122/620988/campos_512_v4
+122/620995/campos_512_v4
+122/621005/campos_512_v4
+122/621010/campos_512_v4
+122/621020/campos_512_v4
+122/621023/campos_512_v4
+122/621028/campos_512_v4
+122/621036/campos_512_v4
+122/621038/campos_512_v4
+122/621047/campos_512_v4
+122/621051/campos_512_v4
+122/621053/campos_512_v4
+122/621060/campos_512_v4
+122/621067/campos_512_v4
+122/621068/campos_512_v4
+122/621086/campos_512_v4
+122/621087/campos_512_v4
+122/621091/campos_512_v4
+122/621108/campos_512_v4
+122/621110/campos_512_v4
+122/621113/campos_512_v4
+122/621116/campos_512_v4
+122/621122/campos_512_v4
+122/621123/campos_512_v4
+122/621125/campos_512_v4
+122/621129/campos_512_v4
+122/621147/campos_512_v4
+122/621151/campos_512_v4
+122/621153/campos_512_v4
+122/621162/campos_512_v4
+122/621163/campos_512_v4
+122/621169/campos_512_v4
+122/621176/campos_512_v4
+122/621183/campos_512_v4
+122/621203/campos_512_v4
+122/621216/campos_512_v4
+122/621227/campos_512_v4
+122/621236/campos_512_v4
+122/621239/campos_512_v4
+122/621240/campos_512_v4
+122/621247/campos_512_v4
+122/621249/campos_512_v4
+122/621255/campos_512_v4
+122/621256/campos_512_v4
+122/621271/campos_512_v4
+122/621281/campos_512_v4
+122/621289/campos_512_v4
+122/621292/campos_512_v4
+122/621307/campos_512_v4
+122/621318/campos_512_v4
+122/621321/campos_512_v4
+122/621327/campos_512_v4
+122/621332/campos_512_v4
+122/621333/campos_512_v4
+122/621351/campos_512_v4
+122/621354/campos_512_v4
+122/621373/campos_512_v4
+122/621383/campos_512_v4
+122/621391/campos_512_v4
+122/621392/campos_512_v4
+122/621405/campos_512_v4
+122/621407/campos_512_v4
+122/621410/campos_512_v4
+122/621418/campos_512_v4
+122/621435/campos_512_v4
+122/621447/campos_512_v4
+122/621455/campos_512_v4
+122/621470/campos_512_v4
+122/621474/campos_512_v4
+122/621479/campos_512_v4
+122/621486/campos_512_v4
+122/621499/campos_512_v4
+122/621503/campos_512_v4
+122/621509/campos_512_v4
+122/621514/campos_512_v4
+122/621518/campos_512_v4
+122/621520/campos_512_v4
+122/621521/campos_512_v4
+122/621536/campos_512_v4
+122/621544/campos_512_v4
+122/621570/campos_512_v4
+122/621571/campos_512_v4
+122/621573/campos_512_v4
+122/621595/campos_512_v4
+122/621612/campos_512_v4
+122/621626/campos_512_v4
+122/621642/campos_512_v4
+122/621643/campos_512_v4
+122/621647/campos_512_v4
+122/621665/campos_512_v4
+122/621667/campos_512_v4
+122/621672/campos_512_v4
+122/621687/campos_512_v4
+122/621688/campos_512_v4
+122/621695/campos_512_v4
+122/621696/campos_512_v4
+122/621703/campos_512_v4
+122/621718/campos_512_v4
+122/621720/campos_512_v4
+122/621721/campos_512_v4
+122/621742/campos_512_v4
+122/621751/campos_512_v4
+122/621779/campos_512_v4
+122/621780/campos_512_v4
+122/621793/campos_512_v4
+122/621797/campos_512_v4
+122/621806/campos_512_v4
+122/621811/campos_512_v4
+122/621815/campos_512_v4
+122/621817/campos_512_v4
+122/621835/campos_512_v4
+122/621837/campos_512_v4
+122/621849/campos_512_v4
+122/621866/campos_512_v4
+122/621867/campos_512_v4
+122/621873/campos_512_v4
+122/621877/campos_512_v4
+122/621885/campos_512_v4
+122/621886/campos_512_v4
+122/621893/campos_512_v4
+122/621895/campos_512_v4
+122/621907/campos_512_v4
+122/621911/campos_512_v4
+122/621931/campos_512_v4
+122/621948/campos_512_v4
+122/621962/campos_512_v4
+122/621964/campos_512_v4
+122/621975/campos_512_v4
+122/621978/campos_512_v4
+122/621989/campos_512_v4
+122/622002/campos_512_v4
+122/622013/campos_512_v4
+122/622046/campos_512_v4
+122/622051/campos_512_v4
+122/622052/campos_512_v4
+122/622064/campos_512_v4
+122/622076/campos_512_v4
+122/622084/campos_512_v4
+122/622085/campos_512_v4
+122/622086/campos_512_v4
+122/622088/campos_512_v4
+122/622093/campos_512_v4
+122/622098/campos_512_v4
+122/622106/campos_512_v4
+122/622118/campos_512_v4
+122/622123/campos_512_v4
+122/622130/campos_512_v4
+122/622159/campos_512_v4
+122/622172/campos_512_v4
+122/622186/campos_512_v4
+122/622187/campos_512_v4
+122/622189/campos_512_v4
+122/622195/campos_512_v4
+122/622200/campos_512_v4
+122/622203/campos_512_v4
+122/622204/campos_512_v4
+122/622205/campos_512_v4
+122/622207/campos_512_v4
+122/622208/campos_512_v4
+122/622209/campos_512_v4
+122/622210/campos_512_v4
+122/622227/campos_512_v4
+122/622234/campos_512_v4
+122/622244/campos_512_v4
+122/622260/campos_512_v4
+122/622263/campos_512_v4
+122/622268/campos_512_v4
+122/622275/campos_512_v4
+122/622282/campos_512_v4
+122/622284/campos_512_v4
+122/622299/campos_512_v4
+122/622301/campos_512_v4
+122/622302/campos_512_v4
+122/622305/campos_512_v4
+122/622314/campos_512_v4
+122/622317/campos_512_v4
+122/622326/campos_512_v4
+122/622343/campos_512_v4
+122/622347/campos_512_v4
+122/622348/campos_512_v4
+122/622356/campos_512_v4
+122/622360/campos_512_v4
+122/622366/campos_512_v4
+122/622370/campos_512_v4
+122/622386/campos_512_v4
+122/622389/campos_512_v4
+122/622397/campos_512_v4
+122/622400/campos_512_v4
+122/622402/campos_512_v4
+122/622411/campos_512_v4
+122/622413/campos_512_v4
+122/622417/campos_512_v4
+122/622428/campos_512_v4
+122/622432/campos_512_v4
+122/622439/campos_512_v4
+122/622444/campos_512_v4
+122/622459/campos_512_v4
+122/622461/campos_512_v4
+122/622472/campos_512_v4
+122/622475/campos_512_v4
+122/622477/campos_512_v4
+122/622486/campos_512_v4
+122/622487/campos_512_v4
+122/622502/campos_512_v4
+122/622503/campos_512_v4
+122/622507/campos_512_v4
+122/622513/campos_512_v4
+122/622514/campos_512_v4
+122/622519/campos_512_v4
+122/622548/campos_512_v4
+122/622553/campos_512_v4
+122/622559/campos_512_v4
+122/622570/campos_512_v4
+122/622582/campos_512_v4
+122/622588/campos_512_v4
+122/622589/campos_512_v4
+122/622592/campos_512_v4
+122/622593/campos_512_v4
+122/622597/campos_512_v4
+122/622606/campos_512_v4
+122/622623/campos_512_v4
+122/622633/campos_512_v4
+122/622637/campos_512_v4
+122/622646/campos_512_v4
+122/622649/campos_512_v4
+122/622655/campos_512_v4
+122/622656/campos_512_v4
+122/622683/campos_512_v4
+122/622688/campos_512_v4
+122/622704/campos_512_v4
+122/622705/campos_512_v4
+122/622706/campos_512_v4
+122/622707/campos_512_v4
+122/622714/campos_512_v4
+122/622716/campos_512_v4
+122/622720/campos_512_v4
+122/622730/campos_512_v4
+122/622746/campos_512_v4
+122/622747/campos_512_v4
+122/622754/campos_512_v4
+122/622767/campos_512_v4
+122/622776/campos_512_v4
+122/622788/campos_512_v4
+122/622793/campos_512_v4
+122/622795/campos_512_v4
+122/622798/campos_512_v4
+122/622812/campos_512_v4
+122/622815/campos_512_v4
+122/622819/campos_512_v4
+122/622824/campos_512_v4
+122/622828/campos_512_v4
+122/622832/campos_512_v4
+122/622835/campos_512_v4
+122/622856/campos_512_v4
+122/622867/campos_512_v4
+122/622868/campos_512_v4
+122/622875/campos_512_v4
+122/622878/campos_512_v4
+122/622885/campos_512_v4
+122/622890/campos_512_v4
+122/622894/campos_512_v4
+122/622903/campos_512_v4
+122/622912/campos_512_v4
+122/622919/campos_512_v4
+122/622922/campos_512_v4
+122/622933/campos_512_v4
+122/622940/campos_512_v4
+122/622947/campos_512_v4
+122/622951/campos_512_v4
+122/622960/campos_512_v4
+122/622966/campos_512_v4
+122/622985/campos_512_v4
+122/623001/campos_512_v4
+122/623003/campos_512_v4
+122/623006/campos_512_v4
+122/623012/campos_512_v4
+122/623020/campos_512_v4
+122/623023/campos_512_v4
+122/623025/campos_512_v4
+122/623027/campos_512_v4
+122/623032/campos_512_v4
+122/623040/campos_512_v4
+122/623050/campos_512_v4
+122/623052/campos_512_v4
+122/623073/campos_512_v4
+122/623084/campos_512_v4
+122/623089/campos_512_v4
+122/623100/campos_512_v4
+122/623108/campos_512_v4
+122/623114/campos_512_v4
+122/623118/campos_512_v4
+122/623132/campos_512_v4
+122/623150/campos_512_v4
+122/623164/campos_512_v4
+122/623177/campos_512_v4
+122/623182/campos_512_v4
+122/623186/campos_512_v4
+122/623187/campos_512_v4
+122/623193/campos_512_v4
+122/623216/campos_512_v4
+122/623222/campos_512_v4
+122/623223/campos_512_v4
+122/623230/campos_512_v4
+122/623235/campos_512_v4
+122/623249/campos_512_v4
+122/623253/campos_512_v4
+122/623257/campos_512_v4
+122/623258/campos_512_v4
+122/623262/campos_512_v4
+122/623263/campos_512_v4
+122/623272/campos_512_v4
+122/623273/campos_512_v4
+122/623294/campos_512_v4
+122/623302/campos_512_v4
+122/623305/campos_512_v4
+122/623313/campos_512_v4
+122/623318/campos_512_v4
+122/623320/campos_512_v4
+122/623330/campos_512_v4
+122/623331/campos_512_v4
+122/623334/campos_512_v4
+122/623335/campos_512_v4
+122/623341/campos_512_v4
+122/623376/campos_512_v4
+122/623385/campos_512_v4
+122/623401/campos_512_v4
+122/623402/campos_512_v4
+122/623405/campos_512_v4
+122/623415/campos_512_v4
+122/623419/campos_512_v4
+122/623422/campos_512_v4
+122/623430/campos_512_v4
+122/623436/campos_512_v4
+122/623439/campos_512_v4
+122/623447/campos_512_v4
+122/623464/campos_512_v4
+122/623467/campos_512_v4
+122/623493/campos_512_v4
+122/623503/campos_512_v4
+122/623509/campos_512_v4
+122/623514/campos_512_v4
+122/623524/campos_512_v4
+122/623525/campos_512_v4
+122/623528/campos_512_v4
+122/623532/campos_512_v4
+122/623534/campos_512_v4
+122/623543/campos_512_v4
+122/623558/campos_512_v4
+122/623568/campos_512_v4
+122/623569/campos_512_v4
+122/623585/campos_512_v4
+122/623592/campos_512_v4
+122/623597/campos_512_v4
+122/623608/campos_512_v4
+122/623611/campos_512_v4
+122/623613/campos_512_v4
+122/623626/campos_512_v4
+122/623648/campos_512_v4
+122/623654/campos_512_v4
+122/623667/campos_512_v4
+122/623674/campos_512_v4
+122/623681/campos_512_v4
+122/623699/campos_512_v4
+122/623714/campos_512_v4
+122/623715/campos_512_v4
+122/623719/campos_512_v4
+122/623726/campos_512_v4
+122/623737/campos_512_v4
+122/623748/campos_512_v4
+122/623770/campos_512_v4
+122/623777/campos_512_v4
+122/623787/campos_512_v4
+122/623789/campos_512_v4
+122/623791/campos_512_v4
+122/623796/campos_512_v4
+122/623806/campos_512_v4
+122/623813/campos_512_v4
+122/623830/campos_512_v4
+122/623838/campos_512_v4
+122/623839/campos_512_v4
+122/623852/campos_512_v4
+122/623870/campos_512_v4
+122/623877/campos_512_v4
+122/623889/campos_512_v4
+122/623920/campos_512_v4
+122/623924/campos_512_v4
+122/623927/campos_512_v4
+122/623932/campos_512_v4
+122/623933/campos_512_v4
+122/623944/campos_512_v4
+122/623958/campos_512_v4
+122/623968/campos_512_v4
+122/624013/campos_512_v4
+122/624023/campos_512_v4
+122/624025/campos_512_v4
+122/624030/campos_512_v4
+122/624033/campos_512_v4
+122/624035/campos_512_v4
+122/624036/campos_512_v4
+122/624046/campos_512_v4
+122/624051/campos_512_v4
+122/624052/campos_512_v4
+122/624053/campos_512_v4
+122/624061/campos_512_v4
+122/624063/campos_512_v4
+122/624068/campos_512_v4
+122/624081/campos_512_v4
+122/624088/campos_512_v4
+122/624089/campos_512_v4
+122/624090/campos_512_v4
+122/624096/campos_512_v4
+122/624117/campos_512_v4
+122/624119/campos_512_v4
+122/624121/campos_512_v4
+122/624127/campos_512_v4
+122/624129/campos_512_v4
+122/624137/campos_512_v4
+122/624138/campos_512_v4
+122/624143/campos_512_v4
+122/624147/campos_512_v4
+122/624161/campos_512_v4
+122/624163/campos_512_v4
+122/624172/campos_512_v4
+122/624179/campos_512_v4
+122/624182/campos_512_v4
+122/624188/campos_512_v4
+122/624197/campos_512_v4
+122/624204/campos_512_v4
+122/624213/campos_512_v4
+122/624232/campos_512_v4
+122/624243/campos_512_v4
+122/624247/campos_512_v4
+122/624259/campos_512_v4
+122/624263/campos_512_v4
+122/624277/campos_512_v4
+122/624311/campos_512_v4
+122/624313/campos_512_v4
+122/624318/campos_512_v4
+122/624331/campos_512_v4
+122/624350/campos_512_v4
+122/624353/campos_512_v4
+122/624360/campos_512_v4
+122/624369/campos_512_v4
+122/624378/campos_512_v4
+122/624381/campos_512_v4
+122/624387/campos_512_v4
+122/624402/campos_512_v4
+122/624413/campos_512_v4
+122/624417/campos_512_v4
+122/624422/campos_512_v4
+122/624427/campos_512_v4
+122/624428/campos_512_v4
+122/624448/campos_512_v4
+122/624459/campos_512_v4
+122/624472/campos_512_v4
+122/624495/campos_512_v4
+122/624504/campos_512_v4
+122/624507/campos_512_v4
+122/624512/campos_512_v4
+122/624524/campos_512_v4
+122/624526/campos_512_v4
+122/624535/campos_512_v4
+122/624538/campos_512_v4
+122/624543/campos_512_v4
+122/624554/campos_512_v4
+122/624561/campos_512_v4
+122/624562/campos_512_v4
+122/624568/campos_512_v4
+122/624573/campos_512_v4
+122/624590/campos_512_v4
+122/624594/campos_512_v4
+122/624617/campos_512_v4
+122/624622/campos_512_v4
+122/624626/campos_512_v4
+122/624629/campos_512_v4
+122/624637/campos_512_v4
+122/624648/campos_512_v4
+122/624658/campos_512_v4
+122/624660/campos_512_v4
+122/624667/campos_512_v4
+122/624669/campos_512_v4
+122/624679/campos_512_v4
+122/624684/campos_512_v4
+122/624688/campos_512_v4
+122/624693/campos_512_v4
+122/624697/campos_512_v4
+122/624704/campos_512_v4
+122/624705/campos_512_v4
+122/624707/campos_512_v4
+122/624708/campos_512_v4
+122/624713/campos_512_v4
+122/624718/campos_512_v4
+122/624725/campos_512_v4
+122/624728/campos_512_v4
+122/624730/campos_512_v4
+122/624744/campos_512_v4
+122/624749/campos_512_v4
+122/624757/campos_512_v4
+122/624772/campos_512_v4
+122/624809/campos_512_v4
+122/624822/campos_512_v4
+122/624836/campos_512_v4
+122/624845/campos_512_v4
+122/624846/campos_512_v4
+122/624847/campos_512_v4
+122/624858/campos_512_v4
+122/624862/campos_512_v4
+122/624878/campos_512_v4
+122/624883/campos_512_v4
+122/624889/campos_512_v4
+122/624890/campos_512_v4
+122/624893/campos_512_v4
+122/624901/campos_512_v4
+122/624905/campos_512_v4
+122/624906/campos_512_v4
+122/624923/campos_512_v4
+122/624928/campos_512_v4
+122/624948/campos_512_v4
+122/624951/campos_512_v4
+122/624955/campos_512_v4
+122/624990/campos_512_v4
+122/624997/campos_512_v4
+123/625020/campos_512_v4
+123/625030/campos_512_v4
+123/625032/campos_512_v4
+123/625033/campos_512_v4
+123/625034/campos_512_v4
+123/625065/campos_512_v4
+123/625066/campos_512_v4
+123/625077/campos_512_v4
+123/625082/campos_512_v4
+123/625086/campos_512_v4
+123/625104/campos_512_v4
+123/625105/campos_512_v4
+123/625120/campos_512_v4
+123/625121/campos_512_v4
+123/625122/campos_512_v4
+123/625123/campos_512_v4
+123/625130/campos_512_v4
+123/625132/campos_512_v4
+123/625133/campos_512_v4
+123/625134/campos_512_v4
+123/625135/campos_512_v4
+123/625144/campos_512_v4
+123/625148/campos_512_v4
+123/625153/campos_512_v4
+123/625157/campos_512_v4
+123/625171/campos_512_v4
+123/625179/campos_512_v4
+123/625181/campos_512_v4
+123/625192/campos_512_v4
+123/625228/campos_512_v4
+123/625229/campos_512_v4
+123/625243/campos_512_v4
+123/625249/campos_512_v4
+123/625252/campos_512_v4
+123/625261/campos_512_v4
+123/625277/campos_512_v4
+123/625282/campos_512_v4
+123/625286/campos_512_v4
+123/625292/campos_512_v4
+123/625297/campos_512_v4
+123/625301/campos_512_v4
+123/625305/campos_512_v4
+123/625309/campos_512_v4
+123/625318/campos_512_v4
+123/625323/campos_512_v4
+123/625336/campos_512_v4
+123/625337/campos_512_v4
+123/625340/campos_512_v4
+123/625345/campos_512_v4
+123/625355/campos_512_v4
+123/625357/campos_512_v4
+123/625361/campos_512_v4
+123/625370/campos_512_v4
+123/625380/campos_512_v4
+123/625381/campos_512_v4
+123/625387/campos_512_v4
+123/625388/campos_512_v4
+123/625400/campos_512_v4
+123/625401/campos_512_v4
+123/625403/campos_512_v4
+123/625409/campos_512_v4
+123/625415/campos_512_v4
+123/625421/campos_512_v4
+123/625444/campos_512_v4
+123/625448/campos_512_v4
+123/625452/campos_512_v4
+123/625453/campos_512_v4
+123/625469/campos_512_v4
+123/625471/campos_512_v4
+123/625472/campos_512_v4
+123/625474/campos_512_v4
+123/625487/campos_512_v4
+123/625497/campos_512_v4
+123/625498/campos_512_v4
+123/625501/campos_512_v4
+123/625508/campos_512_v4
+123/625511/campos_512_v4
+123/625516/campos_512_v4
+123/625517/campos_512_v4
+123/625520/campos_512_v4
+123/625521/campos_512_v4
+123/625524/campos_512_v4
+123/625530/campos_512_v4
+123/625544/campos_512_v4
+123/625552/campos_512_v4
+123/625559/campos_512_v4
+123/625574/campos_512_v4
+123/625575/campos_512_v4
+123/625576/campos_512_v4
+123/625577/campos_512_v4
+123/625592/campos_512_v4
+123/625593/campos_512_v4
+123/625595/campos_512_v4
+123/625600/campos_512_v4
+123/625610/campos_512_v4
+123/625611/campos_512_v4
+123/625612/campos_512_v4
+123/625614/campos_512_v4
+123/625616/campos_512_v4
+123/625618/campos_512_v4
+123/625628/campos_512_v4
+123/625633/campos_512_v4
+123/625649/campos_512_v4
+123/625652/campos_512_v4
+123/625661/campos_512_v4
+123/625662/campos_512_v4
+123/625664/campos_512_v4
+123/625665/campos_512_v4
+123/625677/campos_512_v4
+123/625687/campos_512_v4
+123/625704/campos_512_v4
+123/625709/campos_512_v4
+123/625712/campos_512_v4
+123/625730/campos_512_v4
+123/625731/campos_512_v4
+123/625752/campos_512_v4
+123/625755/campos_512_v4
+123/625757/campos_512_v4
+123/625761/campos_512_v4
+123/625764/campos_512_v4
+123/625766/campos_512_v4
+123/625800/campos_512_v4
+123/625801/campos_512_v4
+123/625802/campos_512_v4
+123/625813/campos_512_v4
+123/625814/campos_512_v4
+123/625815/campos_512_v4
+123/625819/campos_512_v4
+123/625827/campos_512_v4
+123/625829/campos_512_v4
+123/625844/campos_512_v4
+123/625845/campos_512_v4
+123/625846/campos_512_v4
+123/625860/campos_512_v4
+123/625867/campos_512_v4
+123/625876/campos_512_v4
+123/625886/campos_512_v4
+123/625894/campos_512_v4
+123/625900/campos_512_v4
+123/625901/campos_512_v4
+123/625916/campos_512_v4
+123/625918/campos_512_v4
+123/625923/campos_512_v4
+123/625924/campos_512_v4
+123/625933/campos_512_v4
+123/625935/campos_512_v4
+123/625937/campos_512_v4
+123/625967/campos_512_v4
+123/625976/campos_512_v4
+123/625977/campos_512_v4
+123/625978/campos_512_v4
+123/625999/campos_512_v4
+123/626000/campos_512_v4
+123/626006/campos_512_v4
+123/626024/campos_512_v4
+123/626029/campos_512_v4
+123/626040/campos_512_v4
+123/626052/campos_512_v4
+123/626055/campos_512_v4
+123/626057/campos_512_v4
+123/626063/campos_512_v4
+123/626064/campos_512_v4
+123/626065/campos_512_v4
+123/626069/campos_512_v4
+123/626073/campos_512_v4
+123/626074/campos_512_v4
+123/626081/campos_512_v4
+123/626085/campos_512_v4
+123/626086/campos_512_v4
+123/626096/campos_512_v4
+123/626098/campos_512_v4
+123/626103/campos_512_v4
+123/626112/campos_512_v4
+123/626123/campos_512_v4
+123/626133/campos_512_v4
+123/626139/campos_512_v4
+123/626157/campos_512_v4
+123/626160/campos_512_v4
+123/626167/campos_512_v4
+123/626170/campos_512_v4
+123/626176/campos_512_v4
+123/626183/campos_512_v4
+123/626184/campos_512_v4
+123/626191/campos_512_v4
+123/626207/campos_512_v4
+123/626215/campos_512_v4
+123/626222/campos_512_v4
+123/626224/campos_512_v4
+123/626225/campos_512_v4
+123/626238/campos_512_v4
+123/626240/campos_512_v4
+123/626242/campos_512_v4
+123/626245/campos_512_v4
+123/626248/campos_512_v4
+123/626249/campos_512_v4
+123/626253/campos_512_v4
+123/626262/campos_512_v4
+123/626264/campos_512_v4
+123/626270/campos_512_v4
+123/626274/campos_512_v4
+123/626278/campos_512_v4
+123/626281/campos_512_v4
+123/626284/campos_512_v4
+123/626301/campos_512_v4
+123/626302/campos_512_v4
+123/626308/campos_512_v4
+123/626316/campos_512_v4
+123/626324/campos_512_v4
+123/626347/campos_512_v4
+123/626371/campos_512_v4
+123/626374/campos_512_v4
+123/626379/campos_512_v4
+123/626382/campos_512_v4
+123/626383/campos_512_v4
+123/626384/campos_512_v4
+123/626388/campos_512_v4
+123/626392/campos_512_v4
+123/626397/campos_512_v4
+123/626399/campos_512_v4
+123/626406/campos_512_v4
+123/626430/campos_512_v4
+123/626433/campos_512_v4
+123/626439/campos_512_v4
+123/626454/campos_512_v4
+123/626462/campos_512_v4
+123/626468/campos_512_v4
+123/626473/campos_512_v4
+123/626500/campos_512_v4
+123/626511/campos_512_v4
+123/626523/campos_512_v4
+123/626541/campos_512_v4
+123/626542/campos_512_v4
+123/626546/campos_512_v4
+123/626549/campos_512_v4
+123/626551/campos_512_v4
+123/626572/campos_512_v4
+123/626628/campos_512_v4
+123/626636/campos_512_v4
+123/626638/campos_512_v4
+123/626639/campos_512_v4
+123/626641/campos_512_v4
+123/626643/campos_512_v4
+123/626647/campos_512_v4
+123/626649/campos_512_v4
+123/626651/campos_512_v4
+123/626665/campos_512_v4
+123/626666/campos_512_v4
+123/626667/campos_512_v4
+123/626668/campos_512_v4
+123/626669/campos_512_v4
+123/626676/campos_512_v4
+123/626680/campos_512_v4
+123/626684/campos_512_v4
+123/626688/campos_512_v4
+123/626692/campos_512_v4
+123/626698/campos_512_v4
+123/626703/campos_512_v4
+123/626707/campos_512_v4
+123/626727/campos_512_v4
+123/626730/campos_512_v4
+123/626732/campos_512_v4
+123/626734/campos_512_v4
+123/626740/campos_512_v4
+123/626743/campos_512_v4
+123/626756/campos_512_v4
+123/626763/campos_512_v4
+123/626764/campos_512_v4
+123/626766/campos_512_v4
+123/626768/campos_512_v4
+123/626783/campos_512_v4
+123/626801/campos_512_v4
+123/626805/campos_512_v4
+123/626806/campos_512_v4
+123/626807/campos_512_v4
+123/626819/campos_512_v4
+123/626820/campos_512_v4
+123/626841/campos_512_v4
+123/626843/campos_512_v4
+123/626852/campos_512_v4
+123/626859/campos_512_v4
+123/626864/campos_512_v4
+123/626869/campos_512_v4
+123/626888/campos_512_v4
+123/626893/campos_512_v4
+123/626897/campos_512_v4
+123/626910/campos_512_v4
+123/626919/campos_512_v4
+123/626924/campos_512_v4
+123/626931/campos_512_v4
+123/626935/campos_512_v4
+123/626944/campos_512_v4
+123/626963/campos_512_v4
+123/626968/campos_512_v4
+123/626977/campos_512_v4
+123/626993/campos_512_v4
+123/627010/campos_512_v4
+123/627020/campos_512_v4
+123/627023/campos_512_v4
+123/627029/campos_512_v4
+123/627039/campos_512_v4
+123/627044/campos_512_v4
+123/627054/campos_512_v4
+123/627067/campos_512_v4
+123/627070/campos_512_v4
+123/627083/campos_512_v4
+123/627086/campos_512_v4
+123/627090/campos_512_v4
+123/627091/campos_512_v4
+123/627099/campos_512_v4
+123/627118/campos_512_v4
+123/627120/campos_512_v4
+123/627122/campos_512_v4
+123/627127/campos_512_v4
+123/627132/campos_512_v4
+123/627158/campos_512_v4
+123/627180/campos_512_v4
+123/627184/campos_512_v4
+123/627205/campos_512_v4
+123/627207/campos_512_v4
+123/627209/campos_512_v4
+123/627214/campos_512_v4
+123/627218/campos_512_v4
+123/627220/campos_512_v4
+123/627225/campos_512_v4
+123/627231/campos_512_v4
+123/627245/campos_512_v4
+123/627255/campos_512_v4
+123/627262/campos_512_v4
+123/627269/campos_512_v4
+123/627275/campos_512_v4
+123/627276/campos_512_v4
+123/627279/campos_512_v4
+123/627281/campos_512_v4
+123/627285/campos_512_v4
+123/627291/campos_512_v4
+123/627317/campos_512_v4
+123/627327/campos_512_v4
+123/627331/campos_512_v4
+123/627336/campos_512_v4
+123/627342/campos_512_v4
+123/627346/campos_512_v4
+123/627353/campos_512_v4
+123/627372/campos_512_v4
+123/627391/campos_512_v4
+123/627409/campos_512_v4
+123/627414/campos_512_v4
+123/627417/campos_512_v4
+123/627421/campos_512_v4
+123/627429/campos_512_v4
+123/627475/campos_512_v4
+123/627481/campos_512_v4
+123/627482/campos_512_v4
+123/627493/campos_512_v4
+123/627501/campos_512_v4
+123/627506/campos_512_v4
+123/627508/campos_512_v4
+123/627511/campos_512_v4
+123/627515/campos_512_v4
+123/627524/campos_512_v4
+123/627526/campos_512_v4
+123/627527/campos_512_v4
+123/627530/campos_512_v4
+123/627553/campos_512_v4
+123/627563/campos_512_v4
+123/627564/campos_512_v4
+123/627572/campos_512_v4
+123/627574/campos_512_v4
+123/627590/campos_512_v4
+123/627591/campos_512_v4
+123/627595/campos_512_v4
+123/627599/campos_512_v4
+123/627623/campos_512_v4
+123/627624/campos_512_v4
+123/627630/campos_512_v4
+123/627637/campos_512_v4
+123/627638/campos_512_v4
+123/627656/campos_512_v4
+123/627672/campos_512_v4
+123/627677/campos_512_v4
+123/627681/campos_512_v4
+123/627696/campos_512_v4
+123/627712/campos_512_v4
+123/627715/campos_512_v4
+123/627716/campos_512_v4
+123/627729/campos_512_v4
+123/627730/campos_512_v4
+123/627733/campos_512_v4
+123/627735/campos_512_v4
+123/627744/campos_512_v4
+123/627747/campos_512_v4
+123/627763/campos_512_v4
+123/627773/campos_512_v4
+123/627774/campos_512_v4
+123/627777/campos_512_v4
+123/627780/campos_512_v4
+123/627786/campos_512_v4
+123/627787/campos_512_v4
+123/627804/campos_512_v4
+123/627819/campos_512_v4
+123/627823/campos_512_v4
+123/627833/campos_512_v4
+123/627840/campos_512_v4
+123/627845/campos_512_v4
+123/627847/campos_512_v4
+123/627854/campos_512_v4
+123/627858/campos_512_v4
+123/627870/campos_512_v4
+123/627879/campos_512_v4
+123/627881/campos_512_v4
+123/627882/campos_512_v4
+123/627895/campos_512_v4
+123/627897/campos_512_v4
+123/627913/campos_512_v4
+123/627924/campos_512_v4
+123/627935/campos_512_v4
+123/627943/campos_512_v4
+123/627944/campos_512_v4
+123/627948/campos_512_v4
+123/627949/campos_512_v4
+123/627955/campos_512_v4
+123/627966/campos_512_v4
+123/627974/campos_512_v4
+123/627980/campos_512_v4
+123/628000/campos_512_v4
+123/628003/campos_512_v4
+123/628009/campos_512_v4
+123/628010/campos_512_v4
+123/628032/campos_512_v4
+123/628034/campos_512_v4
+123/628039/campos_512_v4
+123/628040/campos_512_v4
+123/628053/campos_512_v4
+123/628064/campos_512_v4
+123/628073/campos_512_v4
+123/628076/campos_512_v4
+123/628081/campos_512_v4
+123/628083/campos_512_v4
+123/628116/campos_512_v4
+123/628124/campos_512_v4
+123/628132/campos_512_v4
+123/628143/campos_512_v4
+123/628166/campos_512_v4
+123/628167/campos_512_v4
+123/628177/campos_512_v4
+123/628196/campos_512_v4
+123/628198/campos_512_v4
+123/628213/campos_512_v4
+123/628216/campos_512_v4
+123/628225/campos_512_v4
+123/628249/campos_512_v4
+123/628252/campos_512_v4
+123/628259/campos_512_v4
+123/628288/campos_512_v4
+123/628289/campos_512_v4
+123/628294/campos_512_v4
+123/628299/campos_512_v4
+123/628303/campos_512_v4
+123/628312/campos_512_v4
+123/628316/campos_512_v4
+123/628329/campos_512_v4
+123/628341/campos_512_v4
+123/628357/campos_512_v4
+123/628359/campos_512_v4
+123/628386/campos_512_v4
+123/628390/campos_512_v4
+123/628425/campos_512_v4
+123/628431/campos_512_v4
+123/628433/campos_512_v4
+123/628456/campos_512_v4
+123/628464/campos_512_v4
+123/628468/campos_512_v4
+123/628473/campos_512_v4
+123/628476/campos_512_v4
+123/628482/campos_512_v4
+123/628486/campos_512_v4
+123/628501/campos_512_v4
+123/628504/campos_512_v4
+123/628514/campos_512_v4
+123/628515/campos_512_v4
+123/628518/campos_512_v4
+123/628519/campos_512_v4
+123/628523/campos_512_v4
+123/628533/campos_512_v4
+123/628535/campos_512_v4
+123/628537/campos_512_v4
+123/628542/campos_512_v4
+123/628569/campos_512_v4
+123/628589/campos_512_v4
+123/628597/campos_512_v4
+123/628607/campos_512_v4
+123/628609/campos_512_v4
+123/628615/campos_512_v4
+123/628630/campos_512_v4
+123/628640/campos_512_v4
+123/628647/campos_512_v4
+123/628650/campos_512_v4
+123/628652/campos_512_v4
+123/628661/campos_512_v4
+123/628674/campos_512_v4
+123/628677/campos_512_v4
+123/628679/campos_512_v4
+123/628680/campos_512_v4
+123/628685/campos_512_v4
+123/628687/campos_512_v4
+123/628701/campos_512_v4
+123/628702/campos_512_v4
+123/628707/campos_512_v4
+123/628711/campos_512_v4
+123/628714/campos_512_v4
+123/628716/campos_512_v4
+123/628733/campos_512_v4
+123/628734/campos_512_v4
+123/628740/campos_512_v4
+123/628753/campos_512_v4
+123/628754/campos_512_v4
+123/628757/campos_512_v4
+123/628772/campos_512_v4
+123/628779/campos_512_v4
+123/628781/campos_512_v4
+123/628785/campos_512_v4
+123/628804/campos_512_v4
+123/628806/campos_512_v4
+123/628817/campos_512_v4
+123/628821/campos_512_v4
+123/628825/campos_512_v4
+123/628835/campos_512_v4
+123/628838/campos_512_v4
+123/628862/campos_512_v4
+123/628863/campos_512_v4
+123/628870/campos_512_v4
+123/628874/campos_512_v4
+123/628879/campos_512_v4
+123/628881/campos_512_v4
+123/628886/campos_512_v4
+123/628891/campos_512_v4
+123/628895/campos_512_v4
+123/628896/campos_512_v4
+123/628901/campos_512_v4
+123/628904/campos_512_v4
+123/628912/campos_512_v4
+123/628914/campos_512_v4
+123/628916/campos_512_v4
+123/628919/campos_512_v4
+123/628922/campos_512_v4
+123/628925/campos_512_v4
+123/628926/campos_512_v4
+123/628930/campos_512_v4
+123/628952/campos_512_v4
+123/628954/campos_512_v4
+123/628958/campos_512_v4
+123/628959/campos_512_v4
+123/628966/campos_512_v4
+123/628969/campos_512_v4
+123/628974/campos_512_v4
+123/628977/campos_512_v4
+123/628980/campos_512_v4
+123/628986/campos_512_v4
+123/628993/campos_512_v4
+123/628995/campos_512_v4
+123/629005/campos_512_v4
+123/629007/campos_512_v4
+123/629013/campos_512_v4
+123/629019/campos_512_v4
+123/629032/campos_512_v4
+123/629034/campos_512_v4
+123/629043/campos_512_v4
+123/629046/campos_512_v4
+123/629048/campos_512_v4
+123/629049/campos_512_v4
+123/629060/campos_512_v4
+123/629061/campos_512_v4
+123/629064/campos_512_v4
+123/629070/campos_512_v4
+123/629077/campos_512_v4
+123/629080/campos_512_v4
+123/629085/campos_512_v4
+123/629095/campos_512_v4
+123/629098/campos_512_v4
+123/629106/campos_512_v4
+123/629109/campos_512_v4
+123/629112/campos_512_v4
+123/629123/campos_512_v4
+123/629139/campos_512_v4
+123/629143/campos_512_v4
+123/629148/campos_512_v4
+123/629154/campos_512_v4
+123/629167/campos_512_v4
+123/629173/campos_512_v4
+123/629183/campos_512_v4
+123/629210/campos_512_v4
+123/629214/campos_512_v4
+123/629229/campos_512_v4
+123/629277/campos_512_v4
+123/629289/campos_512_v4
+123/629292/campos_512_v4
+123/629299/campos_512_v4
+123/629307/campos_512_v4
+123/629310/campos_512_v4
+123/629313/campos_512_v4
+123/629315/campos_512_v4
+123/629316/campos_512_v4
+123/629317/campos_512_v4
+123/629319/campos_512_v4
+123/629321/campos_512_v4
+123/629323/campos_512_v4
+123/629327/campos_512_v4
+123/629337/campos_512_v4
+123/629346/campos_512_v4
+123/629350/campos_512_v4
+123/629353/campos_512_v4
+123/629363/campos_512_v4
+123/629371/campos_512_v4
+123/629373/campos_512_v4
+123/629376/campos_512_v4
+123/629386/campos_512_v4
+123/629390/campos_512_v4
+123/629399/campos_512_v4
+123/629406/campos_512_v4
+123/629418/campos_512_v4
+123/629419/campos_512_v4
+123/629420/campos_512_v4
+123/629425/campos_512_v4
+123/629431/campos_512_v4
+123/629447/campos_512_v4
+123/629450/campos_512_v4
+123/629452/campos_512_v4
+123/629462/campos_512_v4
+123/629467/campos_512_v4
+123/629485/campos_512_v4
+123/629490/campos_512_v4
+123/629493/campos_512_v4
+123/629498/campos_512_v4
+123/629499/campos_512_v4
+123/629500/campos_512_v4
+123/629503/campos_512_v4
+123/629506/campos_512_v4
+123/629510/campos_512_v4
+123/629519/campos_512_v4
+123/629520/campos_512_v4
+123/629530/campos_512_v4
+123/629542/campos_512_v4
+123/629543/campos_512_v4
+123/629557/campos_512_v4
+123/629563/campos_512_v4
+123/629567/campos_512_v4
+123/629570/campos_512_v4
+123/629571/campos_512_v4
+123/629582/campos_512_v4
+123/629596/campos_512_v4
+123/629604/campos_512_v4
+123/629617/campos_512_v4
+123/629618/campos_512_v4
+123/629619/campos_512_v4
+123/629621/campos_512_v4
+123/629631/campos_512_v4
+123/629679/campos_512_v4
+123/629684/campos_512_v4
+123/629685/campos_512_v4
+123/629692/campos_512_v4
+123/629704/campos_512_v4
+123/629720/campos_512_v4
+123/629727/campos_512_v4
+123/629746/campos_512_v4
+123/629758/campos_512_v4
+123/629776/campos_512_v4
+123/629815/campos_512_v4
+123/629818/campos_512_v4
+123/629822/campos_512_v4
+123/629838/campos_512_v4
+123/629846/campos_512_v4
+123/629848/campos_512_v4
+123/629854/campos_512_v4
+123/629875/campos_512_v4
+123/629886/campos_512_v4
+123/629887/campos_512_v4
+123/629889/campos_512_v4
+123/629891/campos_512_v4
+123/629901/campos_512_v4
+123/629912/campos_512_v4
+123/629921/campos_512_v4
+123/629932/campos_512_v4
+123/629938/campos_512_v4
+123/629943/campos_512_v4
+123/629944/campos_512_v4
+123/629945/campos_512_v4
+123/629955/campos_512_v4
+123/629960/campos_512_v4
+123/629964/campos_512_v4
+123/629967/campos_512_v4
+123/629974/campos_512_v4
+123/629975/campos_512_v4
+123/629999/campos_512_v4
+124/630015/campos_512_v4
+124/630039/campos_512_v4
+124/630042/campos_512_v4
+124/630056/campos_512_v4
+124/630068/campos_512_v4
+124/630081/campos_512_v4
+124/630098/campos_512_v4
+124/630102/campos_512_v4
+124/630134/campos_512_v4
+124/630137/campos_512_v4
+124/630151/campos_512_v4
+124/630166/campos_512_v4
+124/630168/campos_512_v4
+124/630171/campos_512_v4
+124/630176/campos_512_v4
+124/630193/campos_512_v4
+124/630242/campos_512_v4
+124/630244/campos_512_v4
+124/630253/campos_512_v4
+124/630258/campos_512_v4
+124/630263/campos_512_v4
+124/630267/campos_512_v4
+124/630271/campos_512_v4
+124/630276/campos_512_v4
+124/630298/campos_512_v4
+124/630303/campos_512_v4
+124/630319/campos_512_v4
+124/630327/campos_512_v4
+124/630338/campos_512_v4
+124/630340/campos_512_v4
+124/630361/campos_512_v4
+124/630362/campos_512_v4
+124/630373/campos_512_v4
+124/630396/campos_512_v4
+124/630400/campos_512_v4
+124/630401/campos_512_v4
+124/630411/campos_512_v4
+124/630424/campos_512_v4
+124/630427/campos_512_v4
+124/630443/campos_512_v4
+124/630458/campos_512_v4
+124/630468/campos_512_v4
+124/630473/campos_512_v4
+124/630484/campos_512_v4
+124/630485/campos_512_v4
+124/630489/campos_512_v4
+124/630509/campos_512_v4
+124/630510/campos_512_v4
+124/630534/campos_512_v4
+124/630539/campos_512_v4
+124/630549/campos_512_v4
+124/630555/campos_512_v4
+124/630560/campos_512_v4
+124/630567/campos_512_v4
+124/630570/campos_512_v4
+124/630571/campos_512_v4
+124/630576/campos_512_v4
+124/630577/campos_512_v4
+124/630578/campos_512_v4
+124/630582/campos_512_v4
+124/630584/campos_512_v4
+124/630615/campos_512_v4
+124/630623/campos_512_v4
+124/630633/campos_512_v4
+124/630641/campos_512_v4
+124/630649/campos_512_v4
+124/630651/campos_512_v4
+124/630654/campos_512_v4
+124/630664/campos_512_v4
+124/630686/campos_512_v4
+124/630701/campos_512_v4
+124/630704/campos_512_v4
+124/630710/campos_512_v4
+124/630737/campos_512_v4
+124/630747/campos_512_v4
+124/630748/campos_512_v4
+124/630753/campos_512_v4
+124/630758/campos_512_v4
+124/630778/campos_512_v4
+124/630780/campos_512_v4
+124/630789/campos_512_v4
+124/630790/campos_512_v4
+124/630793/campos_512_v4
+124/630809/campos_512_v4
+124/630813/campos_512_v4
+124/630816/campos_512_v4
+124/630823/campos_512_v4
+124/630825/campos_512_v4
+124/630828/campos_512_v4
+124/630832/campos_512_v4
+124/630833/campos_512_v4
+124/630844/campos_512_v4
+124/630878/campos_512_v4
+124/630880/campos_512_v4
+124/630883/campos_512_v4
+124/630900/campos_512_v4
+124/630906/campos_512_v4
+124/630909/campos_512_v4
+124/630923/campos_512_v4
+124/630925/campos_512_v4
+124/630931/campos_512_v4
+124/630940/campos_512_v4
+124/630942/campos_512_v4
+124/630943/campos_512_v4
+124/630953/campos_512_v4
+124/630954/campos_512_v4
+124/630970/campos_512_v4
+124/630984/campos_512_v4
+124/630988/campos_512_v4
+124/630995/campos_512_v4
+124/630996/campos_512_v4
+124/631003/campos_512_v4
+124/631008/campos_512_v4
+124/631012/campos_512_v4
+124/631013/campos_512_v4
+124/631023/campos_512_v4
+124/631029/campos_512_v4
+124/631031/campos_512_v4
+124/631043/campos_512_v4
+124/631045/campos_512_v4
+124/631050/campos_512_v4
+124/631054/campos_512_v4
+124/631058/campos_512_v4
+124/631061/campos_512_v4
+124/631064/campos_512_v4
+124/631066/campos_512_v4
+124/631077/campos_512_v4
+124/631079/campos_512_v4
+124/631094/campos_512_v4
+124/631103/campos_512_v4
+124/631109/campos_512_v4
+124/631119/campos_512_v4
+124/631127/campos_512_v4
+124/631134/campos_512_v4
+124/631141/campos_512_v4
+124/631142/campos_512_v4
+124/631145/campos_512_v4
+124/631150/campos_512_v4
+124/631178/campos_512_v4
+124/631179/campos_512_v4
+124/631183/campos_512_v4
+124/631199/campos_512_v4
+124/631209/campos_512_v4
+124/631210/campos_512_v4
+124/631224/campos_512_v4
+124/631231/campos_512_v4
+124/631236/campos_512_v4
+124/631246/campos_512_v4
+124/631250/campos_512_v4
+124/631253/campos_512_v4
+124/631267/campos_512_v4
+124/631272/campos_512_v4
+124/631273/campos_512_v4
+124/631276/campos_512_v4
+124/631283/campos_512_v4
+124/631292/campos_512_v4
+124/631296/campos_512_v4
+124/631299/campos_512_v4
+124/631300/campos_512_v4
+124/631304/campos_512_v4
+124/631306/campos_512_v4
+124/631307/campos_512_v4
+124/631312/campos_512_v4
+124/631318/campos_512_v4
+124/631320/campos_512_v4
+124/631341/campos_512_v4
+124/631342/campos_512_v4
+124/631346/campos_512_v4
+124/631364/campos_512_v4
+124/631387/campos_512_v4
+124/631397/campos_512_v4
+124/631404/campos_512_v4
+124/631406/campos_512_v4
+124/631409/campos_512_v4
+124/631412/campos_512_v4
+124/631423/campos_512_v4
+124/631429/campos_512_v4
+124/631432/campos_512_v4
+124/631436/campos_512_v4
+124/631438/campos_512_v4
+124/631445/campos_512_v4
+124/631448/campos_512_v4
+124/631449/campos_512_v4
+124/631463/campos_512_v4
+124/631465/campos_512_v4
+124/631466/campos_512_v4
+124/631480/campos_512_v4
+124/631488/campos_512_v4
+124/631493/campos_512_v4
+124/631494/campos_512_v4
+124/631501/campos_512_v4
+124/631503/campos_512_v4
+124/631520/campos_512_v4
+124/631528/campos_512_v4
+124/631540/campos_512_v4
+124/631542/campos_512_v4
+124/631554/campos_512_v4
+124/631555/campos_512_v4
+124/631568/campos_512_v4
+124/631588/campos_512_v4
+124/631591/campos_512_v4
+124/631592/campos_512_v4
+124/631593/campos_512_v4
+124/631608/campos_512_v4
+124/631619/campos_512_v4
+124/631622/campos_512_v4
+124/631623/campos_512_v4
+124/631634/campos_512_v4
+124/631635/campos_512_v4
+124/631648/campos_512_v4
+124/631665/campos_512_v4
+124/631666/campos_512_v4
+124/631672/campos_512_v4
+124/631674/campos_512_v4
+124/631677/campos_512_v4
+124/631694/campos_512_v4
+124/631696/campos_512_v4
+124/631698/campos_512_v4
+124/631711/campos_512_v4
+124/631724/campos_512_v4
+124/631726/campos_512_v4
+124/631728/campos_512_v4
+124/631742/campos_512_v4
+124/631749/campos_512_v4
+124/631765/campos_512_v4
+124/631771/campos_512_v4
+124/631776/campos_512_v4
+124/631778/campos_512_v4
+124/631786/campos_512_v4
+124/631793/campos_512_v4
+124/631804/campos_512_v4
+124/631806/campos_512_v4
+124/631812/campos_512_v4
+124/631814/campos_512_v4
+124/631826/campos_512_v4
+124/631832/campos_512_v4
+124/631838/campos_512_v4
+124/631846/campos_512_v4
+124/631850/campos_512_v4
+124/631854/campos_512_v4
+124/631855/campos_512_v4
+124/631873/campos_512_v4
+124/631881/campos_512_v4
+124/631882/campos_512_v4
+124/631897/campos_512_v4
+124/631899/campos_512_v4
+124/631900/campos_512_v4
+124/631901/campos_512_v4
+124/631918/campos_512_v4
+124/631922/campos_512_v4
+124/631933/campos_512_v4
+124/631957/campos_512_v4
+124/631973/campos_512_v4
+124/631992/campos_512_v4
+124/632013/campos_512_v4
+124/632019/campos_512_v4
+124/632031/campos_512_v4
+124/632034/campos_512_v4
+124/632035/campos_512_v4
+124/632037/campos_512_v4
+124/632039/campos_512_v4
+124/632047/campos_512_v4
+124/632050/campos_512_v4
+124/632063/campos_512_v4
+124/632070/campos_512_v4
+124/632087/campos_512_v4
+124/632098/campos_512_v4
+124/632131/campos_512_v4
+124/632144/campos_512_v4
+124/632150/campos_512_v4
+124/632176/campos_512_v4
+124/632185/campos_512_v4
+124/632186/campos_512_v4
+124/632201/campos_512_v4
+124/632206/campos_512_v4
+124/632209/campos_512_v4
+124/632220/campos_512_v4
+124/632226/campos_512_v4
+124/632228/campos_512_v4
+124/632248/campos_512_v4
+124/632266/campos_512_v4
+124/632269/campos_512_v4
+124/632273/campos_512_v4
+124/632275/campos_512_v4
+124/632282/campos_512_v4
+124/632285/campos_512_v4
+124/632290/campos_512_v4
+124/632307/campos_512_v4
+124/632313/campos_512_v4
+124/632324/campos_512_v4
+124/632325/campos_512_v4
+124/632331/campos_512_v4
+124/632332/campos_512_v4
+124/632345/campos_512_v4
+124/632351/campos_512_v4
+124/632385/campos_512_v4
+124/632387/campos_512_v4
+124/632390/campos_512_v4
+124/632428/campos_512_v4
+124/632438/campos_512_v4
+124/632450/campos_512_v4
+124/632454/campos_512_v4
+124/632456/campos_512_v4
+124/632461/campos_512_v4
+124/632463/campos_512_v4
+124/632467/campos_512_v4
+124/632469/campos_512_v4
+124/632472/campos_512_v4
+124/632474/campos_512_v4
+124/632482/campos_512_v4
+124/632494/campos_512_v4
+124/632505/campos_512_v4
+124/632507/campos_512_v4
+124/632510/campos_512_v4
+124/632513/campos_512_v4
+124/632515/campos_512_v4
+124/632522/campos_512_v4
+124/632536/campos_512_v4
+124/632550/campos_512_v4
+124/632564/campos_512_v4
+124/632589/campos_512_v4
+124/632592/campos_512_v4
+124/632594/campos_512_v4
+124/632606/campos_512_v4
+124/632612/campos_512_v4
+124/632613/campos_512_v4
+124/632614/campos_512_v4
+124/632617/campos_512_v4
+124/632624/campos_512_v4
+124/632633/campos_512_v4
+124/632635/campos_512_v4
+124/632649/campos_512_v4
+124/632651/campos_512_v4
+124/632658/campos_512_v4
+124/632666/campos_512_v4
+124/632670/campos_512_v4
+124/632683/campos_512_v4
+124/632684/campos_512_v4
+124/632694/campos_512_v4
+124/632701/campos_512_v4
+124/632704/campos_512_v4
+124/632705/campos_512_v4
+124/632709/campos_512_v4
+124/632713/campos_512_v4
+124/632716/campos_512_v4
+124/632724/campos_512_v4
+124/632737/campos_512_v4
+124/632752/campos_512_v4
+124/632764/campos_512_v4
+124/632770/campos_512_v4
+124/632775/campos_512_v4
+124/632794/campos_512_v4
+124/632795/campos_512_v4
+124/632811/campos_512_v4
+124/632812/campos_512_v4
+124/632813/campos_512_v4
+124/632816/campos_512_v4
+124/632821/campos_512_v4
+124/632826/campos_512_v4
+124/632831/campos_512_v4
+124/632837/campos_512_v4
+124/632847/campos_512_v4
+124/632850/campos_512_v4
+124/632853/campos_512_v4
+124/632863/campos_512_v4
+124/632875/campos_512_v4
+124/632878/campos_512_v4
+124/632879/campos_512_v4
+124/632891/campos_512_v4
+124/632906/campos_512_v4
+124/632908/campos_512_v4
+124/632913/campos_512_v4
+124/632915/campos_512_v4
+124/632924/campos_512_v4
+124/632928/campos_512_v4
+124/632942/campos_512_v4
+124/632950/campos_512_v4
+124/632953/campos_512_v4
+124/632959/campos_512_v4
+124/632960/campos_512_v4
+124/632966/campos_512_v4
+124/633006/campos_512_v4
+124/633013/campos_512_v4
+124/633022/campos_512_v4
+124/633031/campos_512_v4
+124/633036/campos_512_v4
+124/633042/campos_512_v4
+124/633045/campos_512_v4
+124/633049/campos_512_v4
+124/633053/campos_512_v4
+124/633056/campos_512_v4
+124/633059/campos_512_v4
+124/633060/campos_512_v4
+124/633062/campos_512_v4
+124/633074/campos_512_v4
+124/633075/campos_512_v4
+124/633084/campos_512_v4
+124/633087/campos_512_v4
+124/633092/campos_512_v4
+124/633100/campos_512_v4
+124/633110/campos_512_v4
+124/633121/campos_512_v4
+124/633136/campos_512_v4
+124/633138/campos_512_v4
+124/633152/campos_512_v4
+124/633160/campos_512_v4
+124/633162/campos_512_v4
+124/633164/campos_512_v4
+124/633174/campos_512_v4
+124/633182/campos_512_v4
+124/633188/campos_512_v4
+124/633200/campos_512_v4
+124/633203/campos_512_v4
+124/633224/campos_512_v4
+124/633228/campos_512_v4
+124/633229/campos_512_v4
+124/633247/campos_512_v4
+124/633249/campos_512_v4
+124/633258/campos_512_v4
+124/633263/campos_512_v4
+124/633273/campos_512_v4
+124/633275/campos_512_v4
+124/633277/campos_512_v4
+124/633278/campos_512_v4
+124/633279/campos_512_v4
+124/633285/campos_512_v4
+124/633293/campos_512_v4
+124/633302/campos_512_v4
+124/633310/campos_512_v4
+124/633315/campos_512_v4
+124/633325/campos_512_v4
+124/633333/campos_512_v4
+124/633334/campos_512_v4
+124/633337/campos_512_v4
+124/633342/campos_512_v4
+124/633351/campos_512_v4
+124/633360/campos_512_v4
+124/633368/campos_512_v4
+124/633374/campos_512_v4
+124/633378/campos_512_v4
+124/633405/campos_512_v4
+124/633411/campos_512_v4
+124/633417/campos_512_v4
+124/633418/campos_512_v4
+124/633429/campos_512_v4
+124/633436/campos_512_v4
+124/633441/campos_512_v4
+124/633447/campos_512_v4
+124/633459/campos_512_v4
+124/633469/campos_512_v4
+124/633473/campos_512_v4
+124/633477/campos_512_v4
+124/633482/campos_512_v4
+124/633488/campos_512_v4
+124/633489/campos_512_v4
+124/633510/campos_512_v4
+124/633523/campos_512_v4
+124/633530/campos_512_v4
+124/633533/campos_512_v4
+124/633539/campos_512_v4
+124/633542/campos_512_v4
+124/633545/campos_512_v4
+124/633547/campos_512_v4
+124/633554/campos_512_v4
+124/633586/campos_512_v4
+124/633588/campos_512_v4
+124/633592/campos_512_v4
+124/633598/campos_512_v4
+124/633604/campos_512_v4
+124/633605/campos_512_v4
+124/633615/campos_512_v4
+124/633625/campos_512_v4
+124/633636/campos_512_v4
+124/633642/campos_512_v4
+124/633660/campos_512_v4
+124/633661/campos_512_v4
+124/633667/campos_512_v4
+124/633701/campos_512_v4
+124/633704/campos_512_v4
+124/633707/campos_512_v4
+124/633709/campos_512_v4
+124/633720/campos_512_v4
+124/633721/campos_512_v4
+124/633726/campos_512_v4
+124/633735/campos_512_v4
+124/633739/campos_512_v4
+124/633752/campos_512_v4
+124/633753/campos_512_v4
+124/633763/campos_512_v4
+124/633801/campos_512_v4
+124/633809/campos_512_v4
+124/633820/campos_512_v4
+124/633826/campos_512_v4
+124/633836/campos_512_v4
+124/633857/campos_512_v4
+124/633858/campos_512_v4
+124/633865/campos_512_v4
+124/633869/campos_512_v4
+124/633887/campos_512_v4
+124/633902/campos_512_v4
+124/633911/campos_512_v4
+124/633918/campos_512_v4
+124/633923/campos_512_v4
+124/633939/campos_512_v4
+124/633941/campos_512_v4
+124/633945/campos_512_v4
+124/633954/campos_512_v4
+124/633958/campos_512_v4
+124/633960/campos_512_v4
+124/633963/campos_512_v4
+124/633973/campos_512_v4
+124/633980/campos_512_v4
+124/633982/campos_512_v4
+124/633989/campos_512_v4
+124/634011/campos_512_v4
+124/634013/campos_512_v4
+124/634018/campos_512_v4
+124/634019/campos_512_v4
+124/634020/campos_512_v4
+124/634022/campos_512_v4
+124/634061/campos_512_v4
+124/634067/campos_512_v4
+124/634071/campos_512_v4
+124/634082/campos_512_v4
+124/634089/campos_512_v4
+124/634090/campos_512_v4
+124/634094/campos_512_v4
+124/634097/campos_512_v4
+124/634110/campos_512_v4
+124/634123/campos_512_v4
+124/634129/campos_512_v4
+124/634137/campos_512_v4
+124/634139/campos_512_v4
+124/634144/campos_512_v4
+124/634153/campos_512_v4
+124/634155/campos_512_v4
+124/634172/campos_512_v4
+124/634173/campos_512_v4
+124/634174/campos_512_v4
+124/634181/campos_512_v4
+124/634197/campos_512_v4
+124/634199/campos_512_v4
+124/634200/campos_512_v4
+124/634207/campos_512_v4
+124/634212/campos_512_v4
+124/634214/campos_512_v4
+124/634221/campos_512_v4
+124/634226/campos_512_v4
+124/634228/campos_512_v4
+124/634230/campos_512_v4
+124/634239/campos_512_v4
+124/634242/campos_512_v4
+124/634251/campos_512_v4
+124/634252/campos_512_v4
+124/634256/campos_512_v4
+124/634259/campos_512_v4
+124/634264/campos_512_v4
+124/634271/campos_512_v4
+124/634278/campos_512_v4
+124/634280/campos_512_v4
+124/634287/campos_512_v4
+124/634292/campos_512_v4
+124/634293/campos_512_v4
+124/634314/campos_512_v4
+124/634315/campos_512_v4
+124/634321/campos_512_v4
+124/634322/campos_512_v4
+124/634327/campos_512_v4
+124/634336/campos_512_v4
+124/634341/campos_512_v4
+124/634363/campos_512_v4
+124/634366/campos_512_v4
+124/634373/campos_512_v4
+124/634380/campos_512_v4
+124/634401/campos_512_v4
+124/634404/campos_512_v4
+124/634407/campos_512_v4
+124/634410/campos_512_v4
+124/634411/campos_512_v4
+124/634420/campos_512_v4
+124/634432/campos_512_v4
+124/634448/campos_512_v4
+124/634449/campos_512_v4
+124/634452/campos_512_v4
+124/634453/campos_512_v4
+124/634493/campos_512_v4
+124/634501/campos_512_v4
+124/634516/campos_512_v4
+124/634522/campos_512_v4
+124/634525/campos_512_v4
+124/634530/campos_512_v4
+124/634539/campos_512_v4
+124/634541/campos_512_v4
+124/634543/campos_512_v4
+124/634549/campos_512_v4
+124/634550/campos_512_v4
+124/634575/campos_512_v4
+124/634586/campos_512_v4
+124/634590/campos_512_v4
+124/634595/campos_512_v4
+124/634598/campos_512_v4
+124/634599/campos_512_v4
+124/634601/campos_512_v4
+124/634602/campos_512_v4
+124/634607/campos_512_v4
+124/634626/campos_512_v4
+124/634628/campos_512_v4
+124/634644/campos_512_v4
+124/634659/campos_512_v4
+124/634663/campos_512_v4
+124/634664/campos_512_v4
+124/634670/campos_512_v4
+124/634671/campos_512_v4
+124/634675/campos_512_v4
+124/634677/campos_512_v4
+124/634681/campos_512_v4
+124/634690/campos_512_v4
+124/634693/campos_512_v4
+124/634704/campos_512_v4
+124/634707/campos_512_v4
+124/634718/campos_512_v4
+124/634728/campos_512_v4
+124/634737/campos_512_v4
+124/634743/campos_512_v4
+124/634745/campos_512_v4
+124/634748/campos_512_v4
+124/634750/campos_512_v4
+124/634760/campos_512_v4
+124/634765/campos_512_v4
+124/634767/campos_512_v4
+124/634770/campos_512_v4
+124/634771/campos_512_v4
+124/634773/campos_512_v4
+124/634779/campos_512_v4
+124/634780/campos_512_v4
+124/634791/campos_512_v4
+124/634812/campos_512_v4
+124/634814/campos_512_v4
+124/634819/campos_512_v4
+124/634821/campos_512_v4
+124/634829/campos_512_v4
+124/634844/campos_512_v4
+124/634852/campos_512_v4
+124/634856/campos_512_v4
+124/634857/campos_512_v4
+124/634870/campos_512_v4
+124/634877/campos_512_v4
+124/634878/campos_512_v4
+124/634881/campos_512_v4
+124/634884/campos_512_v4
+124/634892/campos_512_v4
+124/634897/campos_512_v4
+124/634899/campos_512_v4
+124/634904/campos_512_v4
+124/634905/campos_512_v4
+124/634906/campos_512_v4
+124/634911/campos_512_v4
+124/634921/campos_512_v4
+124/634943/campos_512_v4
+124/634947/campos_512_v4
+124/634960/campos_512_v4
+124/634961/campos_512_v4
+124/634966/campos_512_v4
+124/634972/campos_512_v4
+124/634986/campos_512_v4
+124/634990/campos_512_v4
+125/635006/campos_512_v4
+125/635025/campos_512_v4
+125/635032/campos_512_v4
+125/635040/campos_512_v4
+125/635045/campos_512_v4
+125/635057/campos_512_v4
+125/635066/campos_512_v4
+125/635069/campos_512_v4
+125/635071/campos_512_v4
+125/635083/campos_512_v4
+125/635085/campos_512_v4
+125/635088/campos_512_v4
+125/635097/campos_512_v4
+125/635105/campos_512_v4
+125/635107/campos_512_v4
+125/635110/campos_512_v4
+125/635111/campos_512_v4
+125/635157/campos_512_v4
+125/635161/campos_512_v4
+125/635162/campos_512_v4
+125/635170/campos_512_v4
+125/635185/campos_512_v4
+125/635187/campos_512_v4
+125/635188/campos_512_v4
+125/635189/campos_512_v4
+125/635199/campos_512_v4
+125/635206/campos_512_v4
+125/635215/campos_512_v4
+125/635216/campos_512_v4
+125/635225/campos_512_v4
+125/635235/campos_512_v4
+125/635245/campos_512_v4
+125/635253/campos_512_v4
+125/635256/campos_512_v4
+125/635263/campos_512_v4
+125/635269/campos_512_v4
+125/635271/campos_512_v4
+125/635278/campos_512_v4
+125/635279/campos_512_v4
+125/635283/campos_512_v4
+125/635284/campos_512_v4
+125/635292/campos_512_v4
+125/635293/campos_512_v4
+125/635295/campos_512_v4
+125/635298/campos_512_v4
+125/635299/campos_512_v4
+125/635303/campos_512_v4
+125/635305/campos_512_v4
+125/635313/campos_512_v4
+125/635320/campos_512_v4
+125/635327/campos_512_v4
+125/635334/campos_512_v4
+125/635335/campos_512_v4
+125/635349/campos_512_v4
+125/635355/campos_512_v4
+125/635363/campos_512_v4
+125/635365/campos_512_v4
+125/635369/campos_512_v4
+125/635372/campos_512_v4
+125/635373/campos_512_v4
+125/635386/campos_512_v4
+125/635401/campos_512_v4
+125/635403/campos_512_v4
+125/635405/campos_512_v4
+125/635421/campos_512_v4
+125/635427/campos_512_v4
+125/635429/campos_512_v4
+125/635432/campos_512_v4
+125/635435/campos_512_v4
+125/635439/campos_512_v4
+125/635451/campos_512_v4
+125/635460/campos_512_v4
+125/635468/campos_512_v4
+125/635475/campos_512_v4
+125/635482/campos_512_v4
+125/635495/campos_512_v4
+125/635501/campos_512_v4
+125/635504/campos_512_v4
+125/635512/campos_512_v4
+125/635514/campos_512_v4
+125/635516/campos_512_v4
+125/635523/campos_512_v4
+125/635543/campos_512_v4
+125/635548/campos_512_v4
+125/635549/campos_512_v4
+125/635557/campos_512_v4
+125/635562/campos_512_v4
+125/635570/campos_512_v4
+125/635571/campos_512_v4
+125/635572/campos_512_v4
+125/635583/campos_512_v4
+125/635588/campos_512_v4
+125/635590/campos_512_v4
+125/635592/campos_512_v4
+125/635603/campos_512_v4
+125/635606/campos_512_v4
+125/635608/campos_512_v4
+125/635628/campos_512_v4
+125/635629/campos_512_v4
+125/635660/campos_512_v4
+125/635662/campos_512_v4
+125/635663/campos_512_v4
+125/635673/campos_512_v4
+125/635675/campos_512_v4
+125/635695/campos_512_v4
+125/635696/campos_512_v4
+125/635707/campos_512_v4
+125/635724/campos_512_v4
+125/635726/campos_512_v4
+125/635731/campos_512_v4
+125/635747/campos_512_v4
+125/635760/campos_512_v4
+125/635775/campos_512_v4
+125/635777/campos_512_v4
+125/635796/campos_512_v4
+125/635804/campos_512_v4
+125/635805/campos_512_v4
+125/635841/campos_512_v4
+125/635857/campos_512_v4
+125/635882/campos_512_v4
+125/635895/campos_512_v4
+125/635900/campos_512_v4
+125/635901/campos_512_v4
+125/635905/campos_512_v4
+125/635920/campos_512_v4
+125/635929/campos_512_v4
+125/635954/campos_512_v4
+125/635956/campos_512_v4
+125/635981/campos_512_v4
+125/635983/campos_512_v4
+125/635990/campos_512_v4
+125/635995/campos_512_v4
+125/635999/campos_512_v4
+125/636000/campos_512_v4
+125/636019/campos_512_v4
+125/636021/campos_512_v4
+125/636051/campos_512_v4
+125/636053/campos_512_v4
+125/636057/campos_512_v4
+125/636065/campos_512_v4
+125/636067/campos_512_v4
+125/636068/campos_512_v4
+125/636080/campos_512_v4
+125/636081/campos_512_v4
+125/636083/campos_512_v4
+125/636091/campos_512_v4
+125/636093/campos_512_v4
+125/636097/campos_512_v4
+125/636102/campos_512_v4
+125/636114/campos_512_v4
+125/636116/campos_512_v4
+125/636118/campos_512_v4
+125/636120/campos_512_v4
+125/636130/campos_512_v4
+125/636131/campos_512_v4
+125/636133/campos_512_v4
+125/636136/campos_512_v4
+125/636138/campos_512_v4
+125/636145/campos_512_v4
+125/636153/campos_512_v4
+125/636182/campos_512_v4
+125/636193/campos_512_v4
+125/636194/campos_512_v4
+125/636197/campos_512_v4
+125/636200/campos_512_v4
+125/636203/campos_512_v4
+125/636206/campos_512_v4
+125/636247/campos_512_v4
+125/636251/campos_512_v4
+125/636258/campos_512_v4
+125/636259/campos_512_v4
+125/636265/campos_512_v4
+125/636276/campos_512_v4
+125/636290/campos_512_v4
+125/636309/campos_512_v4
+125/636312/campos_512_v4
+125/636315/campos_512_v4
+125/636320/campos_512_v4
+125/636328/campos_512_v4
+125/636346/campos_512_v4
+125/636348/campos_512_v4
+125/636350/campos_512_v4
+125/636363/campos_512_v4
+125/636370/campos_512_v4
+125/636376/campos_512_v4
+125/636377/campos_512_v4
+125/636396/campos_512_v4
+125/636447/campos_512_v4
+125/636450/campos_512_v4
+125/636453/campos_512_v4
+125/636459/campos_512_v4
+125/636461/campos_512_v4
+125/636467/campos_512_v4
+125/636468/campos_512_v4
+125/636473/campos_512_v4
+125/636477/campos_512_v4
+125/636478/campos_512_v4
+125/636486/campos_512_v4
+125/636508/campos_512_v4
+125/636511/campos_512_v4
+125/636514/campos_512_v4
+125/636533/campos_512_v4
+125/636547/campos_512_v4
+125/636551/campos_512_v4
+125/636556/campos_512_v4
+125/636563/campos_512_v4
+125/636569/campos_512_v4
+125/636576/campos_512_v4
+125/636579/campos_512_v4
+125/636607/campos_512_v4
+125/636626/campos_512_v4
+125/636631/campos_512_v4
+125/636638/campos_512_v4
+125/636650/campos_512_v4
+125/636655/campos_512_v4
+125/636659/campos_512_v4
+125/636673/campos_512_v4
+125/636690/campos_512_v4
+125/636691/campos_512_v4
+125/636694/campos_512_v4
+125/636695/campos_512_v4
+125/636701/campos_512_v4
+125/636702/campos_512_v4
+125/636709/campos_512_v4
+125/636726/campos_512_v4
+125/636754/campos_512_v4
+125/636757/campos_512_v4
+125/636759/campos_512_v4
+125/636768/campos_512_v4
+125/636772/campos_512_v4
+125/636773/campos_512_v4
+125/636793/campos_512_v4
+125/636800/campos_512_v4
+125/636814/campos_512_v4
+125/636829/campos_512_v4
+125/636842/campos_512_v4
+125/636845/campos_512_v4
+125/636846/campos_512_v4
+125/636847/campos_512_v4
+125/636873/campos_512_v4
+125/636874/campos_512_v4
+125/636884/campos_512_v4
+125/636890/campos_512_v4
+125/636897/campos_512_v4
+125/636901/campos_512_v4
+125/636904/campos_512_v4
+125/636905/campos_512_v4
+125/636910/campos_512_v4
+125/636914/campos_512_v4
+125/636926/campos_512_v4
+125/636931/campos_512_v4
+125/636933/campos_512_v4
+125/636936/campos_512_v4
+125/636941/campos_512_v4
+125/636967/campos_512_v4
+125/636968/campos_512_v4
+125/636986/campos_512_v4
+125/636989/campos_512_v4
+125/636991/campos_512_v4
+125/636998/campos_512_v4
+125/637016/campos_512_v4
+125/637020/campos_512_v4
+125/637033/campos_512_v4
+125/637037/campos_512_v4
+125/637042/campos_512_v4
+125/637053/campos_512_v4
+125/637065/campos_512_v4
+125/637071/campos_512_v4
+125/637076/campos_512_v4
+125/637078/campos_512_v4
+125/637082/campos_512_v4
+125/637085/campos_512_v4
+125/637094/campos_512_v4
+125/637118/campos_512_v4
+125/637121/campos_512_v4
+125/637129/campos_512_v4
+125/637133/campos_512_v4
+125/637155/campos_512_v4
+125/637158/campos_512_v4
+125/637164/campos_512_v4
+125/637167/campos_512_v4
+125/637180/campos_512_v4
+125/637195/campos_512_v4
+125/637202/campos_512_v4
+125/637225/campos_512_v4
+125/637226/campos_512_v4
+125/637228/campos_512_v4
+125/637229/campos_512_v4
+125/637231/campos_512_v4
+125/637233/campos_512_v4
+125/637236/campos_512_v4
+125/637237/campos_512_v4
+125/637242/campos_512_v4
+125/637258/campos_512_v4
+125/637259/campos_512_v4
+125/637261/campos_512_v4
+125/637262/campos_512_v4
+125/637272/campos_512_v4
+125/637273/campos_512_v4
+125/637276/campos_512_v4
+125/637282/campos_512_v4
+125/637288/campos_512_v4
+125/637310/campos_512_v4
+125/637314/campos_512_v4
+125/637317/campos_512_v4
+125/637320/campos_512_v4
+125/637322/campos_512_v4
+125/637327/campos_512_v4
+125/637335/campos_512_v4
+125/637336/campos_512_v4
+125/637338/campos_512_v4
+125/637339/campos_512_v4
+125/637342/campos_512_v4
+125/637364/campos_512_v4
+125/637368/campos_512_v4
+125/637374/campos_512_v4
+125/637378/campos_512_v4
+125/637380/campos_512_v4
+125/637386/campos_512_v4
+125/637402/campos_512_v4
+125/637412/campos_512_v4
+125/637419/campos_512_v4
+125/637423/campos_512_v4
+125/637427/campos_512_v4
+125/637450/campos_512_v4
+125/637457/campos_512_v4
+125/637458/campos_512_v4
+125/637460/campos_512_v4
+125/637481/campos_512_v4
+125/637517/campos_512_v4
+125/637522/campos_512_v4
+125/637524/campos_512_v4
+125/637529/campos_512_v4
+125/637532/campos_512_v4
+125/637551/campos_512_v4
+125/637561/campos_512_v4
+125/637565/campos_512_v4
+125/637567/campos_512_v4
+125/637576/campos_512_v4
+125/637577/campos_512_v4
+125/637583/campos_512_v4
+125/637592/campos_512_v4
+125/637594/campos_512_v4
+125/637595/campos_512_v4
+125/637599/campos_512_v4
+125/637600/campos_512_v4
+125/637630/campos_512_v4
+125/637639/campos_512_v4
+125/637647/campos_512_v4
+125/637651/campos_512_v4
+125/637657/campos_512_v4
+125/637663/campos_512_v4
+125/637665/campos_512_v4
+125/637675/campos_512_v4
+125/637676/campos_512_v4
+125/637677/campos_512_v4
+125/637682/campos_512_v4
+125/637689/campos_512_v4
+125/637707/campos_512_v4
+125/637730/campos_512_v4
+125/637734/campos_512_v4
+125/637748/campos_512_v4
+125/637757/campos_512_v4
+125/637761/campos_512_v4
+125/637764/campos_512_v4
+125/637773/campos_512_v4
+125/637780/campos_512_v4
+125/637800/campos_512_v4
+125/637805/campos_512_v4
+125/637816/campos_512_v4
+125/637818/campos_512_v4
+125/637826/campos_512_v4
+125/637829/campos_512_v4
+125/637857/campos_512_v4
+125/637868/campos_512_v4
+125/637888/campos_512_v4
+125/637889/campos_512_v4
+125/637898/campos_512_v4
+125/637899/campos_512_v4
+125/637926/campos_512_v4
+125/637940/campos_512_v4
+125/637951/campos_512_v4
+125/637988/campos_512_v4
+125/637996/campos_512_v4
+125/637999/campos_512_v4
+125/638004/campos_512_v4
+125/638005/campos_512_v4
+125/638008/campos_512_v4
+125/638010/campos_512_v4
+125/638015/campos_512_v4
+125/638023/campos_512_v4
+125/638024/campos_512_v4
+125/638032/campos_512_v4
+125/638060/campos_512_v4
+125/638072/campos_512_v4
+125/638081/campos_512_v4
+125/638083/campos_512_v4
+125/638091/campos_512_v4
+125/638093/campos_512_v4
+125/638109/campos_512_v4
+125/638114/campos_512_v4
+125/638117/campos_512_v4
+125/638144/campos_512_v4
+125/638155/campos_512_v4
+125/638163/campos_512_v4
+125/638172/campos_512_v4
+125/638181/campos_512_v4
+125/638183/campos_512_v4
+125/638191/campos_512_v4
+125/638207/campos_512_v4
+125/638233/campos_512_v4
+125/638239/campos_512_v4
+125/638263/campos_512_v4
+125/638265/campos_512_v4
+125/638267/campos_512_v4
+125/638268/campos_512_v4
+125/638271/campos_512_v4
+125/638288/campos_512_v4
+125/638311/campos_512_v4
+125/638335/campos_512_v4
+125/638347/campos_512_v4
+125/638351/campos_512_v4
+125/638353/campos_512_v4
+125/638356/campos_512_v4
+125/638380/campos_512_v4
+125/638381/campos_512_v4
+125/638384/campos_512_v4
+125/638385/campos_512_v4
+125/638390/campos_512_v4
+125/638392/campos_512_v4
+125/638393/campos_512_v4
+125/638402/campos_512_v4
+125/638405/campos_512_v4
+125/638407/campos_512_v4
+125/638411/campos_512_v4
+125/638415/campos_512_v4
+125/638426/campos_512_v4
+125/638429/campos_512_v4
+125/638441/campos_512_v4
+125/638447/campos_512_v4
+125/638449/campos_512_v4
+125/638453/campos_512_v4
+125/638469/campos_512_v4
+125/638493/campos_512_v4
+125/638495/campos_512_v4
+125/638501/campos_512_v4
+125/638503/campos_512_v4
+125/638516/campos_512_v4
+125/638562/campos_512_v4
+125/638570/campos_512_v4
+125/638572/campos_512_v4
+125/638583/campos_512_v4
+125/638604/campos_512_v4
+125/638605/campos_512_v4
+125/638606/campos_512_v4
+125/638614/campos_512_v4
+125/638623/campos_512_v4
+125/638629/campos_512_v4
+125/638643/campos_512_v4
+125/638645/campos_512_v4
+125/638646/campos_512_v4
+125/638651/campos_512_v4
+125/638652/campos_512_v4
+125/638658/campos_512_v4
+125/638661/campos_512_v4
+125/638668/campos_512_v4
+125/638671/campos_512_v4
+125/638672/campos_512_v4
+125/638677/campos_512_v4
+125/638680/campos_512_v4
+125/638685/campos_512_v4
+125/638692/campos_512_v4
+125/638702/campos_512_v4
+125/638712/campos_512_v4
+125/638723/campos_512_v4
+125/638741/campos_512_v4
+125/638742/campos_512_v4
+125/638746/campos_512_v4
+125/638748/campos_512_v4
+125/638751/campos_512_v4
+125/638753/campos_512_v4
+125/638755/campos_512_v4
+125/638773/campos_512_v4
+125/638785/campos_512_v4
+125/638789/campos_512_v4
+125/638801/campos_512_v4
+125/638804/campos_512_v4
+125/638823/campos_512_v4
+125/638825/campos_512_v4
+125/638828/campos_512_v4
+125/638830/campos_512_v4
+125/638840/campos_512_v4
+125/638854/campos_512_v4
+125/638859/campos_512_v4
+125/638864/campos_512_v4
+125/638865/campos_512_v4
+125/638867/campos_512_v4
+125/638869/campos_512_v4
+125/638888/campos_512_v4
+125/638906/campos_512_v4
+125/638907/campos_512_v4
+125/638910/campos_512_v4
+125/638913/campos_512_v4
+125/638914/campos_512_v4
+125/638916/campos_512_v4
+125/638921/campos_512_v4
+125/638933/campos_512_v4
+125/638934/campos_512_v4
+125/638946/campos_512_v4
+125/638948/campos_512_v4
+125/638963/campos_512_v4
+125/638964/campos_512_v4
+125/638978/campos_512_v4
+125/638983/campos_512_v4
+125/638992/campos_512_v4
+125/638993/campos_512_v4
+125/638996/campos_512_v4
+125/639000/campos_512_v4
+125/639001/campos_512_v4
+125/639007/campos_512_v4
+125/639013/campos_512_v4
+125/639019/campos_512_v4
+125/639047/campos_512_v4
+125/639048/campos_512_v4
+125/639102/campos_512_v4
+125/639104/campos_512_v4
+125/639106/campos_512_v4
+125/639112/campos_512_v4
+125/639113/campos_512_v4
+125/639116/campos_512_v4
+125/639125/campos_512_v4
+125/639133/campos_512_v4
+125/639146/campos_512_v4
+125/639150/campos_512_v4
+125/639151/campos_512_v4
+125/639154/campos_512_v4
+125/639155/campos_512_v4
+125/639163/campos_512_v4
+125/639164/campos_512_v4
+125/639168/campos_512_v4
+125/639175/campos_512_v4
+125/639198/campos_512_v4
+125/639212/campos_512_v4
+125/639218/campos_512_v4
+125/639222/campos_512_v4
+125/639224/campos_512_v4
+125/639238/campos_512_v4
+125/639240/campos_512_v4
+125/639252/campos_512_v4
+125/639255/campos_512_v4
+125/639256/campos_512_v4
+125/639259/campos_512_v4
+125/639263/campos_512_v4
+125/639270/campos_512_v4
+125/639274/campos_512_v4
+125/639280/campos_512_v4
+125/639282/campos_512_v4
+125/639288/campos_512_v4
+125/639289/campos_512_v4
+125/639296/campos_512_v4
+125/639297/campos_512_v4
+125/639301/campos_512_v4
+125/639304/campos_512_v4
+125/639305/campos_512_v4
+125/639328/campos_512_v4
+125/639329/campos_512_v4
+125/639335/campos_512_v4
+125/639342/campos_512_v4
+125/639343/campos_512_v4
+125/639345/campos_512_v4
+125/639353/campos_512_v4
+125/639355/campos_512_v4
+125/639356/campos_512_v4
+125/639363/campos_512_v4
+125/639366/campos_512_v4
+125/639394/campos_512_v4
+125/639397/campos_512_v4
+125/639411/campos_512_v4
+125/639423/campos_512_v4
+125/639427/campos_512_v4
+125/639430/campos_512_v4
+125/639439/campos_512_v4
+125/639446/campos_512_v4
+125/639448/campos_512_v4
+125/639450/campos_512_v4
+125/639455/campos_512_v4
+125/639460/campos_512_v4
+125/639463/campos_512_v4
+125/639464/campos_512_v4
+125/639467/campos_512_v4
+125/639479/campos_512_v4
+125/639488/campos_512_v4
+125/639504/campos_512_v4
+125/639509/campos_512_v4
+125/639512/campos_512_v4
+125/639516/campos_512_v4
+125/639521/campos_512_v4
+125/639530/campos_512_v4
+125/639535/campos_512_v4
+125/639546/campos_512_v4
+125/639554/campos_512_v4
+125/639556/campos_512_v4
+125/639561/campos_512_v4
+125/639567/campos_512_v4
+125/639580/campos_512_v4
+125/639581/campos_512_v4
+125/639584/campos_512_v4
+125/639586/campos_512_v4
+125/639591/campos_512_v4
+125/639599/campos_512_v4
+125/639619/campos_512_v4
+125/639640/campos_512_v4
+125/639648/campos_512_v4
+125/639650/campos_512_v4
+125/639666/campos_512_v4
+125/639674/campos_512_v4
+125/639676/campos_512_v4
+125/639686/campos_512_v4
+125/639687/campos_512_v4
+125/639695/campos_512_v4
+125/639705/campos_512_v4
+125/639707/campos_512_v4
+125/639708/campos_512_v4
+125/639718/campos_512_v4
+125/639720/campos_512_v4
+125/639721/campos_512_v4
+125/639722/campos_512_v4
+125/639732/campos_512_v4
+125/639746/campos_512_v4
+125/639747/campos_512_v4
+125/639748/campos_512_v4
+125/639758/campos_512_v4
+125/639760/campos_512_v4
+125/639771/campos_512_v4
+125/639781/campos_512_v4
+125/639785/campos_512_v4
+125/639797/campos_512_v4
+125/639809/campos_512_v4
+125/639811/campos_512_v4
+125/639812/campos_512_v4
+125/639818/campos_512_v4
+125/639822/campos_512_v4
+125/639831/campos_512_v4
+125/639838/campos_512_v4
+125/639853/campos_512_v4
+125/639855/campos_512_v4
+125/639868/campos_512_v4
+125/639886/campos_512_v4
+125/639887/campos_512_v4
+125/639907/campos_512_v4
+125/639909/campos_512_v4
+125/639910/campos_512_v4
+125/639913/campos_512_v4
+125/639916/campos_512_v4
+125/639917/campos_512_v4
+125/639923/campos_512_v4
+125/639925/campos_512_v4
+125/639929/campos_512_v4
+125/639930/campos_512_v4
+125/639940/campos_512_v4
+125/639943/campos_512_v4
+125/639952/campos_512_v4
+125/639953/campos_512_v4
+125/639957/campos_512_v4
+125/639961/campos_512_v4
+125/639967/campos_512_v4
+125/639975/campos_512_v4
+125/639996/campos_512_v4
+127/645013/campos_512_v4
+127/645019/campos_512_v4
+127/645025/campos_512_v4
+127/645029/campos_512_v4
+127/645034/campos_512_v4
+127/645046/campos_512_v4
+127/645079/campos_512_v4
+127/645080/campos_512_v4
+127/645082/campos_512_v4
+127/645090/campos_512_v4
+127/645092/campos_512_v4
+127/645109/campos_512_v4
+127/645116/campos_512_v4
+127/645129/campos_512_v4
+127/645136/campos_512_v4
+127/645139/campos_512_v4
+127/645147/campos_512_v4
+127/645153/campos_512_v4
+127/645168/campos_512_v4
+127/645169/campos_512_v4
+127/645180/campos_512_v4
+127/645192/campos_512_v4
+127/645193/campos_512_v4
+127/645194/campos_512_v4
+127/645198/campos_512_v4
+127/645210/campos_512_v4
+127/645211/campos_512_v4
+127/645212/campos_512_v4
+127/645222/campos_512_v4
+127/645233/campos_512_v4
+127/645236/campos_512_v4
+127/645242/campos_512_v4
+127/645245/campos_512_v4
+127/645253/campos_512_v4
+127/645256/campos_512_v4
+127/645269/campos_512_v4
+127/645274/campos_512_v4
+127/645285/campos_512_v4
+127/645294/campos_512_v4
+127/645303/campos_512_v4
+127/645304/campos_512_v4
+127/645312/campos_512_v4
+127/645326/campos_512_v4
+127/645329/campos_512_v4
+127/645332/campos_512_v4
+127/645337/campos_512_v4
+127/645340/campos_512_v4
+127/645357/campos_512_v4
+127/645376/campos_512_v4
+127/645377/campos_512_v4
+127/645379/campos_512_v4
+127/645396/campos_512_v4
+127/645411/campos_512_v4
+127/645417/campos_512_v4
+127/645435/campos_512_v4
+127/645439/campos_512_v4
+127/645445/campos_512_v4
+127/645449/campos_512_v4
+127/645451/campos_512_v4
+127/645454/campos_512_v4
+127/645455/campos_512_v4
+127/645459/campos_512_v4
+127/645462/campos_512_v4
+127/645465/campos_512_v4
+127/645488/campos_512_v4
+127/645492/campos_512_v4
+127/645501/campos_512_v4
+127/645508/campos_512_v4
+127/645512/campos_512_v4
+127/645516/campos_512_v4
+127/645518/campos_512_v4
+127/645525/campos_512_v4
+127/645533/campos_512_v4
+127/645535/campos_512_v4
+127/645540/campos_512_v4
+127/645541/campos_512_v4
+127/645547/campos_512_v4
+127/645549/campos_512_v4
+127/645553/campos_512_v4
+127/645563/campos_512_v4
+127/645569/campos_512_v4
+127/645587/campos_512_v4
+127/645588/campos_512_v4
+127/645589/campos_512_v4
+127/645593/campos_512_v4
+127/645594/campos_512_v4
+127/645612/campos_512_v4
+127/645615/campos_512_v4
+127/645617/campos_512_v4
+127/645623/campos_512_v4
+127/645632/campos_512_v4
+127/645636/campos_512_v4
+127/645641/campos_512_v4
+127/645642/campos_512_v4
+127/645644/campos_512_v4
+127/645646/campos_512_v4
+127/645660/campos_512_v4
+127/645665/campos_512_v4
+127/645668/campos_512_v4
+127/645674/campos_512_v4
+127/645675/campos_512_v4
+127/645678/campos_512_v4
+127/645689/campos_512_v4
+127/645698/campos_512_v4
+127/645705/campos_512_v4
+127/645712/campos_512_v4
+127/645728/campos_512_v4
+127/645738/campos_512_v4
+127/645741/campos_512_v4
+127/645752/campos_512_v4
+127/645755/campos_512_v4
+127/645765/campos_512_v4
+127/645767/campos_512_v4
+127/645789/campos_512_v4
+127/645828/campos_512_v4
+127/645842/campos_512_v4
+127/645855/campos_512_v4
+127/645864/campos_512_v4
+127/645868/campos_512_v4
+127/645875/campos_512_v4
+127/645876/campos_512_v4
+127/645879/campos_512_v4
+127/645882/campos_512_v4
+127/645884/campos_512_v4
+127/645892/campos_512_v4
+127/645893/campos_512_v4
+127/645906/campos_512_v4
+127/645909/campos_512_v4
+127/645915/campos_512_v4
+127/645916/campos_512_v4
+127/645918/campos_512_v4
+127/645920/campos_512_v4
+127/645922/campos_512_v4
+127/645925/campos_512_v4
+127/645926/campos_512_v4
+127/645927/campos_512_v4
+127/645941/campos_512_v4
+127/645949/campos_512_v4
+127/645958/campos_512_v4
+127/645966/campos_512_v4
+127/645970/campos_512_v4
+127/645971/campos_512_v4
+127/645974/campos_512_v4
+127/645987/campos_512_v4
+127/645994/campos_512_v4
+127/646015/campos_512_v4
+127/646021/campos_512_v4
+127/646030/campos_512_v4
+127/646037/campos_512_v4
+127/646044/campos_512_v4
+127/646045/campos_512_v4
+127/646050/campos_512_v4
+127/646069/campos_512_v4
+127/646072/campos_512_v4
+127/646075/campos_512_v4
+127/646090/campos_512_v4
+127/646119/campos_512_v4
+127/646125/campos_512_v4
+127/646163/campos_512_v4
+127/646174/campos_512_v4
+127/646199/campos_512_v4
+127/646202/campos_512_v4
+127/646209/campos_512_v4
+127/646230/campos_512_v4
+127/646231/campos_512_v4
+127/646235/campos_512_v4
+127/646242/campos_512_v4
+127/646250/campos_512_v4
+127/646270/campos_512_v4
+127/646271/campos_512_v4
+127/646281/campos_512_v4
+127/646285/campos_512_v4
+127/646294/campos_512_v4
+127/646296/campos_512_v4
+127/646329/campos_512_v4
+127/646336/campos_512_v4
+127/646347/campos_512_v4
+127/646363/campos_512_v4
+127/646392/campos_512_v4
+127/646394/campos_512_v4
+127/646397/campos_512_v4
+127/646412/campos_512_v4
+127/646420/campos_512_v4
+127/646431/campos_512_v4
+127/646432/campos_512_v4
+127/646444/campos_512_v4
+127/646456/campos_512_v4
+127/646460/campos_512_v4
+127/646462/campos_512_v4
+127/646471/campos_512_v4
+127/646475/campos_512_v4
+127/646476/campos_512_v4
+127/646491/campos_512_v4
+127/646494/campos_512_v4
+127/646500/campos_512_v4
+127/646515/campos_512_v4
+127/646516/campos_512_v4
+127/646519/campos_512_v4
+127/646521/campos_512_v4
+127/646523/campos_512_v4
+127/646528/campos_512_v4
+127/646532/campos_512_v4
+127/646550/campos_512_v4
+127/646555/campos_512_v4
+127/646563/campos_512_v4
+127/646565/campos_512_v4
+127/646567/campos_512_v4
+127/646573/campos_512_v4
+127/646577/campos_512_v4
+127/646578/campos_512_v4
+127/646599/campos_512_v4
+127/646619/campos_512_v4
+127/646631/campos_512_v4
+127/646665/campos_512_v4
+127/646671/campos_512_v4
+127/646680/campos_512_v4
+127/646682/campos_512_v4
+127/646685/campos_512_v4
+127/646686/campos_512_v4
+127/646688/campos_512_v4
+127/646690/campos_512_v4
+127/646691/campos_512_v4
+127/646702/campos_512_v4
+127/646709/campos_512_v4
+127/646717/campos_512_v4
+127/646721/campos_512_v4
+127/646723/campos_512_v4
+127/646732/campos_512_v4
+127/646733/campos_512_v4
+127/646735/campos_512_v4
+127/646744/campos_512_v4
+127/646745/campos_512_v4
+127/646748/campos_512_v4
+127/646758/campos_512_v4
+127/646767/campos_512_v4
+127/646771/campos_512_v4
+127/646773/campos_512_v4
+127/646777/campos_512_v4
+127/646780/campos_512_v4
+127/646787/campos_512_v4
+127/646792/campos_512_v4
+127/646793/campos_512_v4
+127/646801/campos_512_v4
+127/646814/campos_512_v4
+127/646832/campos_512_v4
+127/646847/campos_512_v4
+127/646861/campos_512_v4
+127/646872/campos_512_v4
+127/646878/campos_512_v4
+127/646888/campos_512_v4
+127/646889/campos_512_v4
+127/646892/campos_512_v4
+127/646893/campos_512_v4
+127/646905/campos_512_v4
+127/646924/campos_512_v4
+127/646930/campos_512_v4
+127/646934/campos_512_v4
+127/646935/campos_512_v4
+127/646940/campos_512_v4
+127/646946/campos_512_v4
+127/646972/campos_512_v4
+127/646977/campos_512_v4
+127/646987/campos_512_v4
+127/646989/campos_512_v4
+127/647009/campos_512_v4
+127/647024/campos_512_v4
+127/647028/campos_512_v4
+127/647032/campos_512_v4
+127/647035/campos_512_v4
+127/647039/campos_512_v4
+127/647047/campos_512_v4
+127/647069/campos_512_v4
+127/647073/campos_512_v4
+127/647077/campos_512_v4
+127/647079/campos_512_v4
+127/647081/campos_512_v4
+127/647082/campos_512_v4
+127/647090/campos_512_v4
+127/647098/campos_512_v4
+127/647115/campos_512_v4
+127/647118/campos_512_v4
+127/647128/campos_512_v4
+127/647135/campos_512_v4
+127/647138/campos_512_v4
+127/647140/campos_512_v4
+127/647142/campos_512_v4
+127/647148/campos_512_v4
+127/647150/campos_512_v4
+127/647156/campos_512_v4
+127/647176/campos_512_v4
+127/647194/campos_512_v4
+127/647209/campos_512_v4
+127/647210/campos_512_v4
+127/647211/campos_512_v4
+127/647217/campos_512_v4
+127/647237/campos_512_v4
+127/647244/campos_512_v4
+127/647245/campos_512_v4
+127/647254/campos_512_v4
+127/647255/campos_512_v4
+127/647259/campos_512_v4
+127/647263/campos_512_v4
+127/647264/campos_512_v4
+127/647269/campos_512_v4
+127/647271/campos_512_v4
+127/647277/campos_512_v4
+127/647278/campos_512_v4
+127/647281/campos_512_v4
+127/647282/campos_512_v4
+127/647290/campos_512_v4
+127/647300/campos_512_v4
+127/647302/campos_512_v4
+127/647307/campos_512_v4
+127/647309/campos_512_v4
+127/647313/campos_512_v4
+127/647317/campos_512_v4
+127/647318/campos_512_v4
+127/647322/campos_512_v4
+127/647323/campos_512_v4
+127/647328/campos_512_v4
+127/647333/campos_512_v4
+127/647337/campos_512_v4
+127/647347/campos_512_v4
+127/647360/campos_512_v4
+127/647367/campos_512_v4
+127/647382/campos_512_v4
+127/647405/campos_512_v4
+127/647427/campos_512_v4
+127/647428/campos_512_v4
+127/647430/campos_512_v4
+127/647439/campos_512_v4
+127/647443/campos_512_v4
+127/647458/campos_512_v4
+127/647466/campos_512_v4
+127/647478/campos_512_v4
+127/647487/campos_512_v4
+127/647490/campos_512_v4
+127/647509/campos_512_v4
+127/647515/campos_512_v4
+127/647524/campos_512_v4
+127/647525/campos_512_v4
+127/647526/campos_512_v4
+127/647538/campos_512_v4
+127/647541/campos_512_v4
+127/647542/campos_512_v4
+127/647545/campos_512_v4
+127/647556/campos_512_v4
+127/647561/campos_512_v4
+127/647562/campos_512_v4
+127/647565/campos_512_v4
+127/647593/campos_512_v4
+127/647607/campos_512_v4
+127/647609/campos_512_v4
+127/647612/campos_512_v4
+127/647616/campos_512_v4
+127/647623/campos_512_v4
+127/647633/campos_512_v4
+127/647638/campos_512_v4
+127/647651/campos_512_v4
+127/647655/campos_512_v4
+127/647662/campos_512_v4
+127/647672/campos_512_v4
+127/647679/campos_512_v4
+127/647681/campos_512_v4
+127/647682/campos_512_v4
+127/647684/campos_512_v4
+127/647691/campos_512_v4
+127/647694/campos_512_v4
+127/647695/campos_512_v4
+127/647699/campos_512_v4
+127/647702/campos_512_v4
+127/647705/campos_512_v4
+127/647706/campos_512_v4
+127/647716/campos_512_v4
+127/647729/campos_512_v4
+127/647735/campos_512_v4
+127/647743/campos_512_v4
+127/647745/campos_512_v4
+127/647752/campos_512_v4
+127/647756/campos_512_v4
+127/647761/campos_512_v4
+127/647775/campos_512_v4
+127/647776/campos_512_v4
+127/647778/campos_512_v4
+127/647822/campos_512_v4
+127/647824/campos_512_v4
+127/647831/campos_512_v4
+127/647833/campos_512_v4
+127/647841/campos_512_v4
+127/647844/campos_512_v4
+127/647847/campos_512_v4
+127/647854/campos_512_v4
+127/647868/campos_512_v4
+127/647885/campos_512_v4
+127/647886/campos_512_v4
+127/647897/campos_512_v4
+127/647901/campos_512_v4
+127/647911/campos_512_v4
+127/647912/campos_512_v4
+127/647915/campos_512_v4
+127/647919/campos_512_v4
+127/647930/campos_512_v4
+127/647941/campos_512_v4
+127/647952/campos_512_v4
+127/647970/campos_512_v4
+127/647975/campos_512_v4
+127/647977/campos_512_v4
+127/647986/campos_512_v4
+127/647987/campos_512_v4
+127/648001/campos_512_v4
+127/648003/campos_512_v4
+127/648007/campos_512_v4
+127/648015/campos_512_v4
+127/648016/campos_512_v4
+127/648017/campos_512_v4
+127/648032/campos_512_v4
+127/648044/campos_512_v4
+127/648069/campos_512_v4
+127/648090/campos_512_v4
+127/648091/campos_512_v4
+127/648102/campos_512_v4
+127/648113/campos_512_v4
+127/648140/campos_512_v4
+127/648159/campos_512_v4
+127/648160/campos_512_v4
+127/648166/campos_512_v4
+127/648171/campos_512_v4
+127/648177/campos_512_v4
+127/648182/campos_512_v4
+127/648183/campos_512_v4
+127/648188/campos_512_v4
+127/648193/campos_512_v4
+127/648199/campos_512_v4
+127/648208/campos_512_v4
+127/648215/campos_512_v4
+127/648220/campos_512_v4
+127/648222/campos_512_v4
+127/648223/campos_512_v4
+127/648229/campos_512_v4
+127/648230/campos_512_v4
+127/648241/campos_512_v4
+127/648242/campos_512_v4
+127/648248/campos_512_v4
+127/648259/campos_512_v4
+127/648260/campos_512_v4
+127/648274/campos_512_v4
+127/648275/campos_512_v4
+127/648282/campos_512_v4
+127/648286/campos_512_v4
+127/648289/campos_512_v4
+127/648298/campos_512_v4
+127/648308/campos_512_v4
+127/648312/campos_512_v4
+127/648313/campos_512_v4
+127/648316/campos_512_v4
+127/648319/campos_512_v4
+127/648322/campos_512_v4
+127/648335/campos_512_v4
+127/648342/campos_512_v4
+127/648348/campos_512_v4
+127/648354/campos_512_v4
+127/648369/campos_512_v4
+127/648370/campos_512_v4
+127/648373/campos_512_v4
+127/648383/campos_512_v4
+127/648388/campos_512_v4
+127/648393/campos_512_v4
+127/648395/campos_512_v4
+127/648400/campos_512_v4
+127/648404/campos_512_v4
+127/648415/campos_512_v4
+127/648433/campos_512_v4
+127/648466/campos_512_v4
+127/648469/campos_512_v4
+127/648487/campos_512_v4
+127/648491/campos_512_v4
+127/648495/campos_512_v4
+127/648498/campos_512_v4
+127/648499/campos_512_v4
+127/648502/campos_512_v4
+127/648522/campos_512_v4
+127/648525/campos_512_v4
+127/648528/campos_512_v4
+127/648548/campos_512_v4
+127/648550/campos_512_v4
+127/648551/campos_512_v4
+127/648557/campos_512_v4
+127/648586/campos_512_v4
+127/648591/campos_512_v4
+127/648595/campos_512_v4
+127/648596/campos_512_v4
+127/648597/campos_512_v4
+127/648600/campos_512_v4
+127/648601/campos_512_v4
+127/648620/campos_512_v4
+127/648626/campos_512_v4
+127/648631/campos_512_v4
+127/648634/campos_512_v4
+127/648656/campos_512_v4
+127/648657/campos_512_v4
+127/648663/campos_512_v4
+127/648701/campos_512_v4
+127/648708/campos_512_v4
+127/648711/campos_512_v4
+127/648720/campos_512_v4
+127/648724/campos_512_v4
+127/648725/campos_512_v4
+127/648732/campos_512_v4
+127/648764/campos_512_v4
+127/648765/campos_512_v4
+127/648775/campos_512_v4
+127/648782/campos_512_v4
+127/648790/campos_512_v4
+127/648803/campos_512_v4
+127/648804/campos_512_v4
+127/648817/campos_512_v4
+127/648819/campos_512_v4
+127/648823/campos_512_v4
+127/648826/campos_512_v4
+127/648841/campos_512_v4
+127/648847/campos_512_v4
+127/648850/campos_512_v4
+127/648858/campos_512_v4
+127/648867/campos_512_v4
+127/648886/campos_512_v4
+127/648888/campos_512_v4
+127/648897/campos_512_v4
+127/648909/campos_512_v4
+127/648916/campos_512_v4
+127/648931/campos_512_v4
+127/648966/campos_512_v4
+127/648968/campos_512_v4
+127/649009/campos_512_v4
+127/649021/campos_512_v4
+127/649026/campos_512_v4
+127/649030/campos_512_v4
+127/649040/campos_512_v4
+127/649041/campos_512_v4
+127/649044/campos_512_v4
+127/649047/campos_512_v4
+127/649066/campos_512_v4
+127/649086/campos_512_v4
+127/649090/campos_512_v4
+127/649110/campos_512_v4
+127/649122/campos_512_v4
+127/649143/campos_512_v4
+127/649145/campos_512_v4
+127/649149/campos_512_v4
+127/649157/campos_512_v4
+127/649175/campos_512_v4
+127/649186/campos_512_v4
+127/649190/campos_512_v4
+127/649195/campos_512_v4
+127/649203/campos_512_v4
+127/649204/campos_512_v4
+127/649227/campos_512_v4
+127/649228/campos_512_v4
+127/649231/campos_512_v4
+127/649248/campos_512_v4
+127/649252/campos_512_v4
+127/649257/campos_512_v4
+127/649260/campos_512_v4
+127/649284/campos_512_v4
+127/649290/campos_512_v4
+127/649291/campos_512_v4
+127/649301/campos_512_v4
+127/649311/campos_512_v4
+127/649326/campos_512_v4
+127/649330/campos_512_v4
+127/649333/campos_512_v4
+127/649337/campos_512_v4
+127/649344/campos_512_v4
+127/649353/campos_512_v4
+127/649363/campos_512_v4
+127/649364/campos_512_v4
+127/649381/campos_512_v4
+127/649382/campos_512_v4
+127/649395/campos_512_v4
+127/649399/campos_512_v4
+127/649406/campos_512_v4
+127/649413/campos_512_v4
+127/649419/campos_512_v4
+127/649423/campos_512_v4
+127/649427/campos_512_v4
+127/649432/campos_512_v4
+127/649433/campos_512_v4
+127/649438/campos_512_v4
+127/649440/campos_512_v4
+127/649441/campos_512_v4
+127/649444/campos_512_v4
+127/649461/campos_512_v4
+127/649464/campos_512_v4
+127/649470/campos_512_v4
+127/649474/campos_512_v4
+127/649477/campos_512_v4
+127/649499/campos_512_v4
+127/649505/campos_512_v4
+127/649506/campos_512_v4
+127/649518/campos_512_v4
+127/649523/campos_512_v4
+127/649531/campos_512_v4
+127/649537/campos_512_v4
+127/649547/campos_512_v4
+127/649567/campos_512_v4
+127/649582/campos_512_v4
+127/649583/campos_512_v4
+127/649623/campos_512_v4
+127/649625/campos_512_v4
+127/649656/campos_512_v4
+127/649663/campos_512_v4
+127/649670/campos_512_v4
+127/649681/campos_512_v4
+127/649687/campos_512_v4
+127/649689/campos_512_v4
+127/649699/campos_512_v4
+127/649712/campos_512_v4
+127/649715/campos_512_v4
+127/649723/campos_512_v4
+127/649728/campos_512_v4
+127/649742/campos_512_v4
+127/649749/campos_512_v4
+127/649752/campos_512_v4
+127/649763/campos_512_v4
+127/649765/campos_512_v4
+127/649774/campos_512_v4
+127/649775/campos_512_v4
+127/649785/campos_512_v4
+127/649789/campos_512_v4
+127/649801/campos_512_v4
+127/649832/campos_512_v4
+127/649833/campos_512_v4
+127/649834/campos_512_v4
+127/649836/campos_512_v4
+127/649844/campos_512_v4
+127/649855/campos_512_v4
+127/649867/campos_512_v4
+127/649894/campos_512_v4
+127/649896/campos_512_v4
+127/649898/campos_512_v4
+127/649902/campos_512_v4
+127/649936/campos_512_v4
+127/649946/campos_512_v4
+127/649952/campos_512_v4
+127/649957/campos_512_v4
+127/649958/campos_512_v4
+127/649976/campos_512_v4
+127/649984/campos_512_v4
+128/650004/campos_512_v4
+128/650015/campos_512_v4
+128/650021/campos_512_v4
+128/650031/campos_512_v4
+128/650039/campos_512_v4
+128/650049/campos_512_v4
+128/650052/campos_512_v4
+128/650055/campos_512_v4
+128/650063/campos_512_v4
+128/650067/campos_512_v4
+128/650083/campos_512_v4
+128/650088/campos_512_v4
+128/650095/campos_512_v4
+128/650106/campos_512_v4
+128/650112/campos_512_v4
+128/650114/campos_512_v4
+128/650119/campos_512_v4
+128/650121/campos_512_v4
+128/650132/campos_512_v4
+128/650147/campos_512_v4
+128/650149/campos_512_v4
+128/650156/campos_512_v4
+128/650170/campos_512_v4
+128/650182/campos_512_v4
+128/650186/campos_512_v4
+128/650188/campos_512_v4
+128/650193/campos_512_v4
+128/650199/campos_512_v4
+128/650205/campos_512_v4
+128/650208/campos_512_v4
+128/650211/campos_512_v4
+128/650244/campos_512_v4
+128/650254/campos_512_v4
+128/650272/campos_512_v4
+128/650279/campos_512_v4
+128/650287/campos_512_v4
+128/650292/campos_512_v4
+128/650302/campos_512_v4
+128/650308/campos_512_v4
+128/650320/campos_512_v4
+128/650328/campos_512_v4
+128/650333/campos_512_v4
+128/650336/campos_512_v4
+128/650342/campos_512_v4
+128/650354/campos_512_v4
+128/650356/campos_512_v4
+128/650361/campos_512_v4
+128/650367/campos_512_v4
+128/650377/campos_512_v4
+128/650384/campos_512_v4
+128/650385/campos_512_v4
+128/650386/campos_512_v4
+128/650387/campos_512_v4
+128/650391/campos_512_v4
+128/650393/campos_512_v4
+128/650409/campos_512_v4
+128/650410/campos_512_v4
+128/650420/campos_512_v4
+128/650422/campos_512_v4
+128/650424/campos_512_v4
+128/650426/campos_512_v4
+128/650431/campos_512_v4
+128/650432/campos_512_v4
+128/650436/campos_512_v4
+128/650441/campos_512_v4
+128/650450/campos_512_v4
+128/650454/campos_512_v4
+128/650460/campos_512_v4
+128/650461/campos_512_v4
+128/650464/campos_512_v4
+128/650466/campos_512_v4
+128/650467/campos_512_v4
+128/650470/campos_512_v4
+128/650485/campos_512_v4
+128/650497/campos_512_v4
+128/650503/campos_512_v4
+128/650539/campos_512_v4
+128/650549/campos_512_v4
+128/650563/campos_512_v4
+128/650573/campos_512_v4
+128/650580/campos_512_v4
+128/650591/campos_512_v4
+128/650593/campos_512_v4
+128/650601/campos_512_v4
+128/650605/campos_512_v4
+128/650619/campos_512_v4
+128/650621/campos_512_v4
+128/650628/campos_512_v4
+128/650632/campos_512_v4
+128/650633/campos_512_v4
+128/650636/campos_512_v4
+128/650645/campos_512_v4
+128/650655/campos_512_v4
+128/650662/campos_512_v4
+128/650664/campos_512_v4
+128/650668/campos_512_v4
+128/650686/campos_512_v4
+128/650688/campos_512_v4
+128/650697/campos_512_v4
+128/650709/campos_512_v4
+128/650720/campos_512_v4
+128/650727/campos_512_v4
+128/650732/campos_512_v4
+128/650743/campos_512_v4
+128/650746/campos_512_v4
+128/650753/campos_512_v4
+128/650757/campos_512_v4
+128/650759/campos_512_v4
+128/650763/campos_512_v4
+128/650774/campos_512_v4
+128/650798/campos_512_v4
+128/650802/campos_512_v4
+128/650810/campos_512_v4
+128/650827/campos_512_v4
+128/650839/campos_512_v4
+128/650844/campos_512_v4
+128/650850/campos_512_v4
+128/650857/campos_512_v4
+128/650858/campos_512_v4
+128/650866/campos_512_v4
+128/650882/campos_512_v4
+128/650909/campos_512_v4
+128/650915/campos_512_v4
+128/650918/campos_512_v4
+128/650938/campos_512_v4
+128/650950/campos_512_v4
+128/650961/campos_512_v4
+128/650962/campos_512_v4
+128/650965/campos_512_v4
+128/650971/campos_512_v4
+128/650988/campos_512_v4
+128/651002/campos_512_v4
+128/651025/campos_512_v4
+128/651052/campos_512_v4
+128/651055/campos_512_v4
+128/651058/campos_512_v4
+128/651059/campos_512_v4
+128/651070/campos_512_v4
+128/651092/campos_512_v4
+128/651110/campos_512_v4
+128/651125/campos_512_v4
+128/651139/campos_512_v4
+128/651152/campos_512_v4
+128/651161/campos_512_v4
+128/651176/campos_512_v4
+128/651186/campos_512_v4
+128/651190/campos_512_v4
+128/651199/campos_512_v4
+128/651205/campos_512_v4
+128/651209/campos_512_v4
+128/651214/campos_512_v4
+128/651216/campos_512_v4
+128/651224/campos_512_v4
+128/651236/campos_512_v4
+128/651255/campos_512_v4
+128/651259/campos_512_v4
+128/651271/campos_512_v4
+128/651274/campos_512_v4
+128/651275/campos_512_v4
+128/651278/campos_512_v4
+128/651282/campos_512_v4
+128/651287/campos_512_v4
+128/651293/campos_512_v4
+128/651300/campos_512_v4
+128/651306/campos_512_v4
+128/651308/campos_512_v4
+128/651311/campos_512_v4
+128/651330/campos_512_v4
+128/651342/campos_512_v4
+128/651351/campos_512_v4
+128/651352/campos_512_v4
+128/651359/campos_512_v4
+128/651367/campos_512_v4
+128/651370/campos_512_v4
+128/651374/campos_512_v4
+128/651380/campos_512_v4
+128/651385/campos_512_v4
+128/651394/campos_512_v4
+128/651396/campos_512_v4
+128/651397/campos_512_v4
+128/651402/campos_512_v4
+128/651406/campos_512_v4
+128/651427/campos_512_v4
+128/651430/campos_512_v4
+128/651437/campos_512_v4
+128/651457/campos_512_v4
+128/651467/campos_512_v4
+128/651472/campos_512_v4
+128/651477/campos_512_v4
+128/651488/campos_512_v4
+128/651497/campos_512_v4
+128/651499/campos_512_v4
+128/651506/campos_512_v4
+128/651517/campos_512_v4
+128/651527/campos_512_v4
+128/651539/campos_512_v4
+128/651542/campos_512_v4
+128/651554/campos_512_v4
+128/651555/campos_512_v4
+128/651558/campos_512_v4
+128/651561/campos_512_v4
+128/651569/campos_512_v4
+128/651571/campos_512_v4
+128/651578/campos_512_v4
+128/651582/campos_512_v4
+128/651595/campos_512_v4
+128/651601/campos_512_v4
+128/651615/campos_512_v4
+128/651633/campos_512_v4
+128/651652/campos_512_v4
+128/651658/campos_512_v4
+128/651660/campos_512_v4
+128/651665/campos_512_v4
+128/651670/campos_512_v4
+128/651681/campos_512_v4
+128/651685/campos_512_v4
+128/651688/campos_512_v4
+128/651691/campos_512_v4
+128/651693/campos_512_v4
+128/651696/campos_512_v4
+128/651701/campos_512_v4
+128/651718/campos_512_v4
+128/651728/campos_512_v4
+128/651732/campos_512_v4
+128/651744/campos_512_v4
+128/651750/campos_512_v4
+128/651762/campos_512_v4
+128/651779/campos_512_v4
+128/651788/campos_512_v4
+128/651794/campos_512_v4
+128/651809/campos_512_v4
+128/651810/campos_512_v4
+128/651814/campos_512_v4
+128/651827/campos_512_v4
+128/651833/campos_512_v4
+128/651838/campos_512_v4
+128/651840/campos_512_v4
+128/651844/campos_512_v4
+128/651849/campos_512_v4
+128/651859/campos_512_v4
+128/651868/campos_512_v4
+128/651874/campos_512_v4
+128/651913/campos_512_v4
+128/651914/campos_512_v4
+128/651920/campos_512_v4
+128/651922/campos_512_v4
+128/651927/campos_512_v4
+128/651942/campos_512_v4
+128/651946/campos_512_v4
+128/651951/campos_512_v4
+128/651952/campos_512_v4
+128/651958/campos_512_v4
+128/651959/campos_512_v4
+128/651967/campos_512_v4
+128/651973/campos_512_v4
+128/651992/campos_512_v4
+128/651995/campos_512_v4
+128/651996/campos_512_v4
+128/652001/campos_512_v4
+128/652008/campos_512_v4
+128/652009/campos_512_v4
+128/652019/campos_512_v4
+128/652028/campos_512_v4
+128/652032/campos_512_v4
+128/652036/campos_512_v4
+128/652037/campos_512_v4
+128/652047/campos_512_v4
+128/652049/campos_512_v4
+128/652057/campos_512_v4
+128/652069/campos_512_v4
+128/652072/campos_512_v4
+128/652080/campos_512_v4
+128/652105/campos_512_v4
+128/652117/campos_512_v4
+128/652123/campos_512_v4
+128/652127/campos_512_v4
+128/652139/campos_512_v4
+128/652143/campos_512_v4
+128/652149/campos_512_v4
+128/652158/campos_512_v4
+128/652159/campos_512_v4
+128/652164/campos_512_v4
+128/652169/campos_512_v4
+128/652191/campos_512_v4
+128/652193/campos_512_v4
+128/652197/campos_512_v4
+128/652198/campos_512_v4
+128/652200/campos_512_v4
+128/652232/campos_512_v4
+128/652249/campos_512_v4
+128/652261/campos_512_v4
+128/652262/campos_512_v4
+128/652264/campos_512_v4
+128/652265/campos_512_v4
+128/652269/campos_512_v4
+128/652270/campos_512_v4
+128/652286/campos_512_v4
+128/652292/campos_512_v4
+128/652306/campos_512_v4
+128/652313/campos_512_v4
+128/652315/campos_512_v4
+128/652319/campos_512_v4
+128/652325/campos_512_v4
+128/652336/campos_512_v4
+128/652342/campos_512_v4
+128/652343/campos_512_v4
+128/652350/campos_512_v4
+128/652357/campos_512_v4
+128/652364/campos_512_v4
+128/652367/campos_512_v4
+128/652370/campos_512_v4
+128/652376/campos_512_v4
+128/652377/campos_512_v4
+128/652380/campos_512_v4
+128/652384/campos_512_v4
+128/652385/campos_512_v4
+128/652394/campos_512_v4
+128/652398/campos_512_v4
+128/652399/campos_512_v4
+128/652411/campos_512_v4
+128/652419/campos_512_v4
+128/652421/campos_512_v4
+128/652424/campos_512_v4
+128/652432/campos_512_v4
+128/652440/campos_512_v4
+128/652443/campos_512_v4
+128/652444/campos_512_v4
+128/652450/campos_512_v4
+128/652454/campos_512_v4
+128/652464/campos_512_v4
+128/652469/campos_512_v4
+128/652478/campos_512_v4
+128/652490/campos_512_v4
+128/652519/campos_512_v4
+128/652524/campos_512_v4
+128/652526/campos_512_v4
+128/652535/campos_512_v4
+128/652538/campos_512_v4
+128/652539/campos_512_v4
+128/652561/campos_512_v4
+128/652573/campos_512_v4
+128/652576/campos_512_v4
+128/652595/campos_512_v4
+128/652619/campos_512_v4
+128/652625/campos_512_v4
+128/652635/campos_512_v4
+128/652636/campos_512_v4
+128/652643/campos_512_v4
+128/652648/campos_512_v4
+128/652661/campos_512_v4
+128/652668/campos_512_v4
+128/652675/campos_512_v4
+128/652678/campos_512_v4
+128/652683/campos_512_v4
+128/652692/campos_512_v4
+128/652694/campos_512_v4
+128/652701/campos_512_v4
+128/652702/campos_512_v4
+128/652708/campos_512_v4
+128/652709/campos_512_v4
+128/652712/campos_512_v4
+128/652726/campos_512_v4
+128/652729/campos_512_v4
+128/652743/campos_512_v4
+128/652748/campos_512_v4
+128/652750/campos_512_v4
+128/652753/campos_512_v4
+128/652759/campos_512_v4
+128/652761/campos_512_v4
+128/652772/campos_512_v4
+128/652773/campos_512_v4
+128/652776/campos_512_v4
+128/652778/campos_512_v4
+128/652782/campos_512_v4
+128/652783/campos_512_v4
+128/652784/campos_512_v4
+128/652790/campos_512_v4
+128/652796/campos_512_v4
+128/652802/campos_512_v4
+128/652810/campos_512_v4
+128/652811/campos_512_v4
+128/652824/campos_512_v4
+128/652837/campos_512_v4
+128/652848/campos_512_v4
+128/652855/campos_512_v4
+128/652870/campos_512_v4
+128/652884/campos_512_v4
+128/652891/campos_512_v4
+128/652898/campos_512_v4
+128/652902/campos_512_v4
+128/652912/campos_512_v4
+128/652939/campos_512_v4
+128/652941/campos_512_v4
+128/652948/campos_512_v4
+128/652959/campos_512_v4
+128/652960/campos_512_v4
+128/652971/campos_512_v4
+128/652999/campos_512_v4
+128/653005/campos_512_v4
+128/653008/campos_512_v4
+128/653009/campos_512_v4
+128/653010/campos_512_v4
+128/653017/campos_512_v4
+128/653020/campos_512_v4
+128/653021/campos_512_v4
+128/653025/campos_512_v4
+128/653028/campos_512_v4
+128/653048/campos_512_v4
+128/653067/campos_512_v4
+128/653074/campos_512_v4
+128/653088/campos_512_v4
+128/653089/campos_512_v4
+128/653093/campos_512_v4
+128/653100/campos_512_v4
+128/653119/campos_512_v4
+128/653152/campos_512_v4
+128/653153/campos_512_v4
+128/653163/campos_512_v4
+128/653166/campos_512_v4
+128/653177/campos_512_v4
+128/653191/campos_512_v4
+128/653195/campos_512_v4
+128/653201/campos_512_v4
+128/653206/campos_512_v4
+128/653220/campos_512_v4
+128/653223/campos_512_v4
+128/653224/campos_512_v4
+128/653227/campos_512_v4
+128/653229/campos_512_v4
+128/653234/campos_512_v4
+128/653249/campos_512_v4
+128/653254/campos_512_v4
+128/653260/campos_512_v4
+128/653261/campos_512_v4
+128/653262/campos_512_v4
+128/653263/campos_512_v4
+128/653265/campos_512_v4
+128/653267/campos_512_v4
+128/653271/campos_512_v4
+128/653272/campos_512_v4
+128/653273/campos_512_v4
+128/653302/campos_512_v4
+128/653312/campos_512_v4
+128/653319/campos_512_v4
+128/653328/campos_512_v4
+128/653341/campos_512_v4
+128/653343/campos_512_v4
+128/653348/campos_512_v4
+128/653370/campos_512_v4
+128/653374/campos_512_v4
+128/653379/campos_512_v4
+128/653383/campos_512_v4
+128/653391/campos_512_v4
+128/653404/campos_512_v4
+128/653405/campos_512_v4
+128/653412/campos_512_v4
+128/653414/campos_512_v4
+128/653420/campos_512_v4
+128/653421/campos_512_v4
+128/653426/campos_512_v4
+128/653437/campos_512_v4
+128/653439/campos_512_v4
+128/653450/campos_512_v4
+128/653457/campos_512_v4
+128/653461/campos_512_v4
+128/653465/campos_512_v4
+128/653478/campos_512_v4
+128/653486/campos_512_v4
+128/653492/campos_512_v4
+128/653495/campos_512_v4
+128/653506/campos_512_v4
+128/653507/campos_512_v4
+128/653508/campos_512_v4
+128/653512/campos_512_v4
+128/653513/campos_512_v4
+128/653532/campos_512_v4
+128/653535/campos_512_v4
+128/653536/campos_512_v4
+128/653537/campos_512_v4
+128/653548/campos_512_v4
+128/653551/campos_512_v4
+128/653561/campos_512_v4
+128/653565/campos_512_v4
+128/653569/campos_512_v4
+128/653574/campos_512_v4
+128/653578/campos_512_v4
+128/653585/campos_512_v4
+128/653588/campos_512_v4
+128/653591/campos_512_v4
+128/653600/campos_512_v4
+128/653602/campos_512_v4
+128/653616/campos_512_v4
+128/653626/campos_512_v4
+128/653628/campos_512_v4
+128/653638/campos_512_v4
+128/653648/campos_512_v4
+128/653649/campos_512_v4
+128/653651/campos_512_v4
+128/653672/campos_512_v4
+128/653673/campos_512_v4
+128/653675/campos_512_v4
+128/653677/campos_512_v4
+128/653678/campos_512_v4
+128/653688/campos_512_v4
+128/653711/campos_512_v4
+128/653712/campos_512_v4
+128/653719/campos_512_v4
+128/653726/campos_512_v4
+128/653734/campos_512_v4
+128/653739/campos_512_v4
+128/653748/campos_512_v4
+128/653752/campos_512_v4
+128/653759/campos_512_v4
+128/653760/campos_512_v4
+128/653766/campos_512_v4
+128/653771/campos_512_v4
+128/653777/campos_512_v4
+128/653796/campos_512_v4
+128/653817/campos_512_v4
+128/653853/campos_512_v4
+128/653864/campos_512_v4
+128/653867/campos_512_v4
+128/653868/campos_512_v4
+128/653880/campos_512_v4
+128/653883/campos_512_v4
+128/653884/campos_512_v4
+128/653889/campos_512_v4
+128/653901/campos_512_v4
+128/653930/campos_512_v4
+128/653963/campos_512_v4
+128/653964/campos_512_v4
+128/653965/campos_512_v4
+128/653970/campos_512_v4
+128/653980/campos_512_v4
+128/653990/campos_512_v4
+128/653991/campos_512_v4
+128/654005/campos_512_v4
+128/654007/campos_512_v4
+128/654021/campos_512_v4
+128/654033/campos_512_v4
+128/654039/campos_512_v4
+128/654041/campos_512_v4
+128/654046/campos_512_v4
+128/654066/campos_512_v4
+128/654077/campos_512_v4
+128/654079/campos_512_v4
+128/654095/campos_512_v4
+128/654113/campos_512_v4
+128/654115/campos_512_v4
+128/654130/campos_512_v4
+128/654134/campos_512_v4
+128/654141/campos_512_v4
+128/654150/campos_512_v4
+128/654166/campos_512_v4
+128/654173/campos_512_v4
+128/654181/campos_512_v4
+128/654197/campos_512_v4
+128/654200/campos_512_v4
+128/654224/campos_512_v4
+128/654230/campos_512_v4
+128/654237/campos_512_v4
+128/654246/campos_512_v4
+128/654256/campos_512_v4
+128/654257/campos_512_v4
+128/654271/campos_512_v4
+128/654273/campos_512_v4
+128/654275/campos_512_v4
+128/654276/campos_512_v4
+128/654280/campos_512_v4
+128/654281/campos_512_v4
+128/654283/campos_512_v4
+128/654288/campos_512_v4
+128/654297/campos_512_v4
+128/654305/campos_512_v4
+128/654311/campos_512_v4
+128/654319/campos_512_v4
+128/654330/campos_512_v4
+128/654335/campos_512_v4
+128/654338/campos_512_v4
+128/654339/campos_512_v4
+128/654341/campos_512_v4
+128/654343/campos_512_v4
+128/654353/campos_512_v4
+128/654369/campos_512_v4
+128/654383/campos_512_v4
+128/654388/campos_512_v4
+128/654392/campos_512_v4
+128/654405/campos_512_v4
+128/654406/campos_512_v4
+128/654409/campos_512_v4
+128/654417/campos_512_v4
+128/654420/campos_512_v4
+128/654426/campos_512_v4
+128/654431/campos_512_v4
+128/654438/campos_512_v4
+128/654450/campos_512_v4
+128/654456/campos_512_v4
+128/654465/campos_512_v4
+128/654470/campos_512_v4
+128/654480/campos_512_v4
+128/654488/campos_512_v4
+128/654489/campos_512_v4
+128/654501/campos_512_v4
+128/654506/campos_512_v4
+128/654523/campos_512_v4
+128/654525/campos_512_v4
+128/654529/campos_512_v4
+128/654534/campos_512_v4
+128/654540/campos_512_v4
+128/654553/campos_512_v4
+128/654554/campos_512_v4
+128/654555/campos_512_v4
+128/654558/campos_512_v4
+128/654571/campos_512_v4
+128/654574/campos_512_v4
+128/654588/campos_512_v4
+128/654596/campos_512_v4
+128/654601/campos_512_v4
+128/654603/campos_512_v4
+128/654610/campos_512_v4
+128/654619/campos_512_v4
+128/654621/campos_512_v4
+128/654627/campos_512_v4
+128/654639/campos_512_v4
+128/654645/campos_512_v4
+128/654653/campos_512_v4
+128/654654/campos_512_v4
+128/654658/campos_512_v4
+128/654660/campos_512_v4
+128/654667/campos_512_v4
+128/654675/campos_512_v4
+128/654678/campos_512_v4
+128/654682/campos_512_v4
+128/654691/campos_512_v4
+128/654694/campos_512_v4
+128/654696/campos_512_v4
+128/654727/campos_512_v4
+128/654731/campos_512_v4
+128/654738/campos_512_v4
+128/654752/campos_512_v4
+128/654759/campos_512_v4
+128/654767/campos_512_v4
+128/654775/campos_512_v4
+128/654796/campos_512_v4
+128/654797/campos_512_v4
+128/654809/campos_512_v4
+128/654820/campos_512_v4
+128/654836/campos_512_v4
+128/654839/campos_512_v4
+128/654845/campos_512_v4
+128/654857/campos_512_v4
+128/654879/campos_512_v4
+128/654884/campos_512_v4
+128/654890/campos_512_v4
+128/654892/campos_512_v4
+128/654921/campos_512_v4
+128/654926/campos_512_v4
+128/654933/campos_512_v4
+128/654937/campos_512_v4
+128/654939/campos_512_v4
+128/654941/campos_512_v4
+128/654946/campos_512_v4
+128/654958/campos_512_v4
+128/654977/campos_512_v4
+128/654980/campos_512_v4
+128/654982/campos_512_v4
+128/654995/campos_512_v4
+128/654998/campos_512_v4
+129/655006/campos_512_v4
+129/655008/campos_512_v4
+129/655009/campos_512_v4
+129/655013/campos_512_v4
+129/655015/campos_512_v4
+129/655017/campos_512_v4
+129/655019/campos_512_v4
+129/655027/campos_512_v4
+129/655028/campos_512_v4
+129/655041/campos_512_v4
+129/655045/campos_512_v4
+129/655046/campos_512_v4
+129/655056/campos_512_v4
+129/655067/campos_512_v4
+129/655101/campos_512_v4
+129/655107/campos_512_v4
+129/655110/campos_512_v4
+129/655137/campos_512_v4
+129/655145/campos_512_v4
+129/655149/campos_512_v4
+129/655156/campos_512_v4
+129/655159/campos_512_v4
+129/655160/campos_512_v4
+129/655175/campos_512_v4
+129/655177/campos_512_v4
+129/655187/campos_512_v4
+129/655207/campos_512_v4
+129/655208/campos_512_v4
+129/655216/campos_512_v4
+129/655227/campos_512_v4
+129/655238/campos_512_v4
+129/655244/campos_512_v4
+129/655256/campos_512_v4
+129/655258/campos_512_v4
+129/655265/campos_512_v4
+129/655267/campos_512_v4
+129/655270/campos_512_v4
+129/655272/campos_512_v4
+129/655274/campos_512_v4
+129/655277/campos_512_v4
+129/655282/campos_512_v4
+129/655293/campos_512_v4
+129/655296/campos_512_v4
+129/655314/campos_512_v4
+129/655319/campos_512_v4
+129/655324/campos_512_v4
+129/655325/campos_512_v4
+129/655328/campos_512_v4
+129/655332/campos_512_v4
+129/655346/campos_512_v4
+129/655347/campos_512_v4
+129/655348/campos_512_v4
+129/655358/campos_512_v4
+129/655363/campos_512_v4
+129/655375/campos_512_v4
+129/655378/campos_512_v4
+129/655388/campos_512_v4
+129/655389/campos_512_v4
+129/655403/campos_512_v4
+129/655408/campos_512_v4
+129/655409/campos_512_v4
+129/655416/campos_512_v4
+129/655418/campos_512_v4
+129/655420/campos_512_v4
+129/655422/campos_512_v4
+129/655426/campos_512_v4
+129/655432/campos_512_v4
+129/655449/campos_512_v4
+129/655453/campos_512_v4
+129/655459/campos_512_v4
+129/655461/campos_512_v4
+129/655465/campos_512_v4
+129/655473/campos_512_v4
+129/655479/campos_512_v4
+129/655487/campos_512_v4
+129/655498/campos_512_v4
+129/655503/campos_512_v4
+129/655504/campos_512_v4
+129/655517/campos_512_v4
+129/655520/campos_512_v4
+129/655527/campos_512_v4
+129/655541/campos_512_v4
+129/655546/campos_512_v4
+129/655548/campos_512_v4
+129/655565/campos_512_v4
+129/655566/campos_512_v4
+129/655570/campos_512_v4
+129/655571/campos_512_v4
+129/655575/campos_512_v4
+129/655589/campos_512_v4
+129/655593/campos_512_v4
+129/655598/campos_512_v4
+129/655627/campos_512_v4
+129/655630/campos_512_v4
+129/655638/campos_512_v4
+129/655650/campos_512_v4
+129/655655/campos_512_v4
+129/655660/campos_512_v4
+129/655683/campos_512_v4
+129/655690/campos_512_v4
+129/655692/campos_512_v4
+129/655694/campos_512_v4
+129/655695/campos_512_v4
+129/655702/campos_512_v4
+129/655710/campos_512_v4
+129/655717/campos_512_v4
+129/655719/campos_512_v4
+129/655736/campos_512_v4
+129/655738/campos_512_v4
+129/655740/campos_512_v4
+129/655757/campos_512_v4
+129/655763/campos_512_v4
+129/655771/campos_512_v4
+129/655782/campos_512_v4
+129/655786/campos_512_v4
+129/655791/campos_512_v4
+129/655795/campos_512_v4
+129/655799/campos_512_v4
+129/655808/campos_512_v4
+129/655813/campos_512_v4
+129/655817/campos_512_v4
+129/655820/campos_512_v4
+129/655822/campos_512_v4
+129/655825/campos_512_v4
+129/655837/campos_512_v4
+129/655844/campos_512_v4
+129/655857/campos_512_v4
+129/655861/campos_512_v4
+129/655872/campos_512_v4
+129/655875/campos_512_v4
+129/655894/campos_512_v4
+129/655918/campos_512_v4
+129/655938/campos_512_v4
+129/655944/campos_512_v4
+129/655945/campos_512_v4
+129/655948/campos_512_v4
+129/655950/campos_512_v4
+129/655952/campos_512_v4
+129/655953/campos_512_v4
+129/655954/campos_512_v4
+129/655959/campos_512_v4
+129/655962/campos_512_v4
+129/655973/campos_512_v4
+129/655976/campos_512_v4
+129/655980/campos_512_v4
+129/655983/campos_512_v4
+129/655984/campos_512_v4
+129/655990/campos_512_v4
+129/656013/campos_512_v4
+129/656046/campos_512_v4
+129/656053/campos_512_v4
+129/656054/campos_512_v4
+129/656058/campos_512_v4
+129/656060/campos_512_v4
+129/656064/campos_512_v4
+129/656079/campos_512_v4
+129/656085/campos_512_v4
+129/656087/campos_512_v4
+129/656088/campos_512_v4
+129/656102/campos_512_v4
+129/656108/campos_512_v4
+129/656110/campos_512_v4
+129/656117/campos_512_v4
+129/656120/campos_512_v4
+129/656124/campos_512_v4
+129/656140/campos_512_v4
+129/656192/campos_512_v4
+129/656197/campos_512_v4
+129/656199/campos_512_v4
+129/656203/campos_512_v4
+129/656212/campos_512_v4
+129/656221/campos_512_v4
+129/656223/campos_512_v4
+129/656226/campos_512_v4
+129/656228/campos_512_v4
+129/656241/campos_512_v4
+129/656242/campos_512_v4
+129/656246/campos_512_v4
+129/656288/campos_512_v4
+129/656292/campos_512_v4
+129/656295/campos_512_v4
+129/656324/campos_512_v4
+129/656341/campos_512_v4
+129/656351/campos_512_v4
+129/656358/campos_512_v4
+129/656361/campos_512_v4
+129/656386/campos_512_v4
+129/656407/campos_512_v4
+129/656412/campos_512_v4
+129/656431/campos_512_v4
+129/656450/campos_512_v4
+129/656461/campos_512_v4
+129/656464/campos_512_v4
+129/656479/campos_512_v4
+129/656484/campos_512_v4
+129/656490/campos_512_v4
+129/656501/campos_512_v4
+129/656502/campos_512_v4
+129/656524/campos_512_v4
+129/656533/campos_512_v4
+129/656548/campos_512_v4
+129/656559/campos_512_v4
+129/656570/campos_512_v4
+129/656593/campos_512_v4
+129/656597/campos_512_v4
+129/656616/campos_512_v4
+129/656617/campos_512_v4
+129/656621/campos_512_v4
+129/656631/campos_512_v4
+129/656636/campos_512_v4
+129/656637/campos_512_v4
+129/656642/campos_512_v4
+129/656653/campos_512_v4
+129/656659/campos_512_v4
+129/656678/campos_512_v4
+129/656682/campos_512_v4
+129/656690/campos_512_v4
+129/656692/campos_512_v4
+129/656694/campos_512_v4
+129/656697/campos_512_v4
+129/656698/campos_512_v4
+129/656711/campos_512_v4
+129/656719/campos_512_v4
+129/656725/campos_512_v4
+129/656745/campos_512_v4
+129/656746/campos_512_v4
+129/656755/campos_512_v4
+129/656766/campos_512_v4
+129/656794/campos_512_v4
+129/656795/campos_512_v4
+129/656808/campos_512_v4
+129/656809/campos_512_v4
+129/656816/campos_512_v4
+129/656836/campos_512_v4
+129/656837/campos_512_v4
+129/656843/campos_512_v4
+129/656844/campos_512_v4
+129/656850/campos_512_v4
+129/656853/campos_512_v4
+129/656855/campos_512_v4
+129/656856/campos_512_v4
+129/656867/campos_512_v4
+129/656872/campos_512_v4
+129/656875/campos_512_v4
+129/656877/campos_512_v4
+129/656881/campos_512_v4
+129/656888/campos_512_v4
+129/656894/campos_512_v4
+129/656896/campos_512_v4
+129/656900/campos_512_v4
+129/656902/campos_512_v4
+129/656903/campos_512_v4
+129/656911/campos_512_v4
+129/656915/campos_512_v4
+129/656920/campos_512_v4
+129/656924/campos_512_v4
+129/656926/campos_512_v4
+129/656940/campos_512_v4
+129/656943/campos_512_v4
+129/656951/campos_512_v4
+129/656956/campos_512_v4
+129/656957/campos_512_v4
+129/656959/campos_512_v4
+129/656965/campos_512_v4
+129/656969/campos_512_v4
+129/656987/campos_512_v4
+129/656999/campos_512_v4
+129/657004/campos_512_v4
+129/657017/campos_512_v4
+129/657023/campos_512_v4
+129/657029/campos_512_v4
+129/657030/campos_512_v4
+129/657034/campos_512_v4
+129/657037/campos_512_v4
+129/657039/campos_512_v4
+129/657051/campos_512_v4
+129/657060/campos_512_v4
+129/657089/campos_512_v4
+129/657091/campos_512_v4
+129/657097/campos_512_v4
+129/657114/campos_512_v4
+129/657116/campos_512_v4
+129/657124/campos_512_v4
+129/657125/campos_512_v4
+129/657128/campos_512_v4
+129/657133/campos_512_v4
+129/657147/campos_512_v4
+129/657160/campos_512_v4
+129/657168/campos_512_v4
+129/657172/campos_512_v4
+129/657174/campos_512_v4
+129/657175/campos_512_v4
+129/657179/campos_512_v4
+129/657185/campos_512_v4
+129/657193/campos_512_v4
+129/657206/campos_512_v4
+129/657211/campos_512_v4
+129/657217/campos_512_v4
+129/657219/campos_512_v4
+129/657245/campos_512_v4
+129/657255/campos_512_v4
+129/657270/campos_512_v4
+129/657285/campos_512_v4
+129/657291/campos_512_v4
+129/657298/campos_512_v4
+129/657311/campos_512_v4
+129/657314/campos_512_v4
+129/657322/campos_512_v4
+129/657369/campos_512_v4
+129/657374/campos_512_v4
+129/657386/campos_512_v4
+129/657388/campos_512_v4
+129/657398/campos_512_v4
+129/657405/campos_512_v4
+129/657415/campos_512_v4
+129/657428/campos_512_v4
+129/657441/campos_512_v4
+129/657448/campos_512_v4
+129/657454/campos_512_v4
+129/657460/campos_512_v4
+129/657463/campos_512_v4
+129/657465/campos_512_v4
+129/657467/campos_512_v4
+129/657472/campos_512_v4
+129/657479/campos_512_v4
+129/657487/campos_512_v4
+129/657492/campos_512_v4
+129/657508/campos_512_v4
+129/657514/campos_512_v4
+129/657547/campos_512_v4
+129/657549/campos_512_v4
+129/657555/campos_512_v4
+129/657558/campos_512_v4
+129/657569/campos_512_v4
+129/657576/campos_512_v4
+129/657581/campos_512_v4
+129/657584/campos_512_v4
+129/657596/campos_512_v4
+129/657603/campos_512_v4
+129/657618/campos_512_v4
+129/657619/campos_512_v4
+129/657625/campos_512_v4
+129/657633/campos_512_v4
+129/657642/campos_512_v4
+129/657644/campos_512_v4
+129/657657/campos_512_v4
+129/657658/campos_512_v4
+129/657659/campos_512_v4
+129/657660/campos_512_v4
+129/657665/campos_512_v4
+129/657677/campos_512_v4
+129/657685/campos_512_v4
+129/657721/campos_512_v4
+129/657729/campos_512_v4
+129/657732/campos_512_v4
+129/657742/campos_512_v4
+129/657753/campos_512_v4
+129/657761/campos_512_v4
+129/657771/campos_512_v4
+129/657776/campos_512_v4
+129/657783/campos_512_v4
+129/657784/campos_512_v4
+129/657791/campos_512_v4
+129/657798/campos_512_v4
+129/657823/campos_512_v4
+129/657830/campos_512_v4
+129/657833/campos_512_v4
+129/657842/campos_512_v4
+129/657847/campos_512_v4
+129/657850/campos_512_v4
+129/657852/campos_512_v4
+129/657859/campos_512_v4
+129/657868/campos_512_v4
+129/657877/campos_512_v4
+129/657881/campos_512_v4
+129/657885/campos_512_v4
+129/657888/campos_512_v4
+129/657889/campos_512_v4
+129/657891/campos_512_v4
+129/657895/campos_512_v4
+129/657898/campos_512_v4
+129/657902/campos_512_v4
+129/657905/campos_512_v4
+129/657913/campos_512_v4
+129/657922/campos_512_v4
+129/657939/campos_512_v4
+129/657941/campos_512_v4
+129/657942/campos_512_v4
+129/657947/campos_512_v4
+129/657954/campos_512_v4
+129/657966/campos_512_v4
+129/657968/campos_512_v4
+129/657971/campos_512_v4
+129/657978/campos_512_v4
+129/657979/campos_512_v4
+129/657984/campos_512_v4
+129/657986/campos_512_v4
+129/657992/campos_512_v4
+129/657998/campos_512_v4
+129/658012/campos_512_v4
+129/658016/campos_512_v4
+129/658017/campos_512_v4
+129/658018/campos_512_v4
+129/658028/campos_512_v4
+129/658035/campos_512_v4
+129/658043/campos_512_v4
+129/658048/campos_512_v4
+129/658049/campos_512_v4
+129/658051/campos_512_v4
+129/658061/campos_512_v4
+129/658074/campos_512_v4
+129/658081/campos_512_v4
+129/658084/campos_512_v4
+129/658087/campos_512_v4
+129/658089/campos_512_v4
+129/658093/campos_512_v4
+129/658094/campos_512_v4
+129/658102/campos_512_v4
+129/658114/campos_512_v4
+129/658133/campos_512_v4
+129/658138/campos_512_v4
+129/658151/campos_512_v4
+129/658158/campos_512_v4
+129/658163/campos_512_v4
+129/658167/campos_512_v4
+129/658170/campos_512_v4
+129/658175/campos_512_v4
+129/658185/campos_512_v4
+129/658188/campos_512_v4
+129/658214/campos_512_v4
+129/658219/campos_512_v4
+129/658230/campos_512_v4
+129/658231/campos_512_v4
+129/658235/campos_512_v4
+129/658236/campos_512_v4
+129/658276/campos_512_v4
+129/658279/campos_512_v4
+129/658280/campos_512_v4
+129/658285/campos_512_v4
+129/658289/campos_512_v4
+129/658295/campos_512_v4
+129/658301/campos_512_v4
+129/658305/campos_512_v4
+129/658307/campos_512_v4
+129/658316/campos_512_v4
+129/658320/campos_512_v4
+129/658343/campos_512_v4
+129/658353/campos_512_v4
+129/658374/campos_512_v4
+129/658375/campos_512_v4
+129/658376/campos_512_v4
+129/658408/campos_512_v4
+129/658410/campos_512_v4
+129/658447/campos_512_v4
+129/658473/campos_512_v4
+129/658486/campos_512_v4
+129/658489/campos_512_v4
+129/658499/campos_512_v4
+129/658501/campos_512_v4
+129/658509/campos_512_v4
+129/658512/campos_512_v4
+129/658514/campos_512_v4
+129/658517/campos_512_v4
+129/658528/campos_512_v4
+129/658531/campos_512_v4
+129/658537/campos_512_v4
+129/658545/campos_512_v4
+129/658554/campos_512_v4
+129/658570/campos_512_v4
+129/658574/campos_512_v4
+129/658577/campos_512_v4
+129/658579/campos_512_v4
+129/658586/campos_512_v4
+129/658601/campos_512_v4
+129/658603/campos_512_v4
+129/658609/campos_512_v4
+129/658611/campos_512_v4
+129/658621/campos_512_v4
+129/658633/campos_512_v4
+129/658634/campos_512_v4
+129/658635/campos_512_v4
+129/658637/campos_512_v4
+129/658642/campos_512_v4
+129/658643/campos_512_v4
+129/658647/campos_512_v4
+129/658655/campos_512_v4
+129/658660/campos_512_v4
+129/658663/campos_512_v4
+129/658670/campos_512_v4
+129/658671/campos_512_v4
+129/658681/campos_512_v4
+129/658686/campos_512_v4
+129/658692/campos_512_v4
+129/658694/campos_512_v4
+129/658697/campos_512_v4
+129/658719/campos_512_v4
+129/658745/campos_512_v4
+129/658762/campos_512_v4
+129/658767/campos_512_v4
+129/658775/campos_512_v4
+129/658777/campos_512_v4
+129/658783/campos_512_v4
+129/658786/campos_512_v4
+129/658800/campos_512_v4
+129/658807/campos_512_v4
+129/658826/campos_512_v4
+129/658828/campos_512_v4
+129/658837/campos_512_v4
+129/658840/campos_512_v4
+129/658845/campos_512_v4
+129/658851/campos_512_v4
+129/658853/campos_512_v4
+129/658854/campos_512_v4
+129/658859/campos_512_v4
+129/658864/campos_512_v4
+129/658867/campos_512_v4
+129/658875/campos_512_v4
+129/658886/campos_512_v4
+129/658895/campos_512_v4
+129/658899/campos_512_v4
+129/658903/campos_512_v4
+129/658906/campos_512_v4
+129/658918/campos_512_v4
+129/658933/campos_512_v4
+129/658938/campos_512_v4
+129/658940/campos_512_v4
+129/658941/campos_512_v4
+129/658943/campos_512_v4
+129/658949/campos_512_v4
+129/658952/campos_512_v4
+129/658959/campos_512_v4
+129/658961/campos_512_v4
+129/658974/campos_512_v4
+129/658977/campos_512_v4
+129/658981/campos_512_v4
+129/659002/campos_512_v4
+129/659003/campos_512_v4
+129/659005/campos_512_v4
+129/659007/campos_512_v4
+129/659008/campos_512_v4
+129/659012/campos_512_v4
+129/659016/campos_512_v4
+129/659032/campos_512_v4
+129/659034/campos_512_v4
+129/659037/campos_512_v4
+129/659057/campos_512_v4
+129/659060/campos_512_v4
+129/659061/campos_512_v4
+129/659068/campos_512_v4
+129/659073/campos_512_v4
+129/659074/campos_512_v4
+129/659085/campos_512_v4
+129/659098/campos_512_v4
+129/659103/campos_512_v4
+129/659107/campos_512_v4
+129/659110/campos_512_v4
+129/659119/campos_512_v4
+129/659130/campos_512_v4
+129/659132/campos_512_v4
+129/659143/campos_512_v4
+129/659155/campos_512_v4
+129/659161/campos_512_v4
+129/659171/campos_512_v4
+129/659180/campos_512_v4
+129/659191/campos_512_v4
+129/659193/campos_512_v4
+129/659194/campos_512_v4
+129/659207/campos_512_v4
+129/659234/campos_512_v4
+129/659236/campos_512_v4
+129/659237/campos_512_v4
+129/659243/campos_512_v4
+129/659246/campos_512_v4
+129/659253/campos_512_v4
+129/659259/campos_512_v4
+129/659271/campos_512_v4
+129/659280/campos_512_v4
+129/659288/campos_512_v4
+129/659297/campos_512_v4
+129/659300/campos_512_v4
+129/659302/campos_512_v4
+129/659303/campos_512_v4
+129/659308/campos_512_v4
+129/659317/campos_512_v4
+129/659318/campos_512_v4
+129/659320/campos_512_v4
+129/659326/campos_512_v4
+129/659345/campos_512_v4
+129/659354/campos_512_v4
+129/659361/campos_512_v4
+129/659371/campos_512_v4
+129/659387/campos_512_v4
+129/659390/campos_512_v4
+129/659395/campos_512_v4
+129/659408/campos_512_v4
+129/659413/campos_512_v4
+129/659416/campos_512_v4
+129/659417/campos_512_v4
+129/659420/campos_512_v4
+129/659425/campos_512_v4
+129/659430/campos_512_v4
+129/659434/campos_512_v4
+129/659438/campos_512_v4
+129/659440/campos_512_v4
+129/659445/campos_512_v4
+129/659448/campos_512_v4
+129/659453/campos_512_v4
+129/659473/campos_512_v4
+129/659475/campos_512_v4
+129/659477/campos_512_v4
+129/659479/campos_512_v4
+129/659492/campos_512_v4
+129/659501/campos_512_v4
+129/659507/campos_512_v4
+129/659510/campos_512_v4
+129/659511/campos_512_v4
+129/659530/campos_512_v4
+129/659545/campos_512_v4
+129/659561/campos_512_v4
+129/659565/campos_512_v4
+129/659571/campos_512_v4
+129/659574/campos_512_v4
+129/659596/campos_512_v4
+129/659630/campos_512_v4
+129/659639/campos_512_v4
+129/659640/campos_512_v4
+129/659660/campos_512_v4
+129/659661/campos_512_v4
+129/659663/campos_512_v4
+129/659664/campos_512_v4
+129/659678/campos_512_v4
+129/659690/campos_512_v4
+129/659709/campos_512_v4
+129/659711/campos_512_v4
+129/659718/campos_512_v4
+129/659720/campos_512_v4
+129/659728/campos_512_v4
+129/659740/campos_512_v4
+129/659744/campos_512_v4
+129/659757/campos_512_v4
+129/659759/campos_512_v4
+129/659766/campos_512_v4
+129/659767/campos_512_v4
+129/659768/campos_512_v4
+129/659769/campos_512_v4
+129/659774/campos_512_v4
+129/659775/campos_512_v4
+129/659777/campos_512_v4
+129/659782/campos_512_v4
+129/659814/campos_512_v4
+129/659830/campos_512_v4
+129/659835/campos_512_v4
+129/659843/campos_512_v4
+129/659854/campos_512_v4
+129/659856/campos_512_v4
+129/659874/campos_512_v4
+129/659876/campos_512_v4
+129/659878/campos_512_v4
+129/659879/campos_512_v4
+129/659881/campos_512_v4
+129/659896/campos_512_v4
+129/659904/campos_512_v4
+129/659910/campos_512_v4
+129/659915/campos_512_v4
+129/659927/campos_512_v4
+129/659933/campos_512_v4
+129/659938/campos_512_v4
+129/659942/campos_512_v4
+129/659955/campos_512_v4
+129/659956/campos_512_v4
+129/659957/campos_512_v4
+129/659961/campos_512_v4
+129/659964/campos_512_v4
+129/659979/campos_512_v4
+129/659980/campos_512_v4
+13/75006/campos_512_v4
+13/75009/campos_512_v4
+13/75016/campos_512_v4
+13/75018/campos_512_v4
+13/75026/campos_512_v4
+13/75027/campos_512_v4
+13/75051/campos_512_v4
+13/75052/campos_512_v4
+13/75061/campos_512_v4
+13/75069/campos_512_v4
+13/75110/campos_512_v4
+13/75126/campos_512_v4
+13/75130/campos_512_v4
+13/75135/campos_512_v4
+13/75145/campos_512_v4
+13/75148/campos_512_v4
+13/75158/campos_512_v4
+13/75162/campos_512_v4
+13/75167/campos_512_v4
+13/75182/campos_512_v4
+13/75191/campos_512_v4
+13/75193/campos_512_v4
+13/75200/campos_512_v4
+13/75203/campos_512_v4
+13/75204/campos_512_v4
+13/75205/campos_512_v4
+13/75207/campos_512_v4
+13/75216/campos_512_v4
+13/75220/campos_512_v4
+13/75225/campos_512_v4
+13/75235/campos_512_v4
+13/75245/campos_512_v4
+13/75250/campos_512_v4
+13/75251/campos_512_v4
+13/75258/campos_512_v4
+13/75275/campos_512_v4
+13/75278/campos_512_v4
+13/75281/campos_512_v4
+13/75290/campos_512_v4
+13/75298/campos_512_v4
+13/75301/campos_512_v4
+13/75303/campos_512_v4
+13/75304/campos_512_v4
+13/75305/campos_512_v4
+13/75314/campos_512_v4
+13/75316/campos_512_v4
+13/75320/campos_512_v4
+13/75332/campos_512_v4
+13/75343/campos_512_v4
+13/75355/campos_512_v4
+13/75356/campos_512_v4
+13/75373/campos_512_v4
+13/75382/campos_512_v4
+13/75393/campos_512_v4
+13/75402/campos_512_v4
+13/75409/campos_512_v4
+13/75411/campos_512_v4
+13/75413/campos_512_v4
+13/75418/campos_512_v4
+13/75424/campos_512_v4
+13/75444/campos_512_v4
+13/75448/campos_512_v4
+13/75453/campos_512_v4
+13/75457/campos_512_v4
+13/75466/campos_512_v4
+13/75473/campos_512_v4
+13/75481/campos_512_v4
+13/75483/campos_512_v4
+13/75493/campos_512_v4
+13/75494/campos_512_v4
+13/75502/campos_512_v4
+13/75503/campos_512_v4
+13/75506/campos_512_v4
+13/75512/campos_512_v4
+13/75513/campos_512_v4
+13/75516/campos_512_v4
+13/75517/campos_512_v4
+13/75520/campos_512_v4
+13/75521/campos_512_v4
+13/75562/campos_512_v4
+13/75563/campos_512_v4
+13/75586/campos_512_v4
+13/75600/campos_512_v4
+13/75601/campos_512_v4
+13/75603/campos_512_v4
+13/75610/campos_512_v4
+13/75613/campos_512_v4
+13/75620/campos_512_v4
+13/75621/campos_512_v4
+13/75632/campos_512_v4
+13/75633/campos_512_v4
+13/75634/campos_512_v4
+13/75643/campos_512_v4
+13/75645/campos_512_v4
+13/75646/campos_512_v4
+13/75650/campos_512_v4
+13/75677/campos_512_v4
+13/75681/campos_512_v4
+13/75684/campos_512_v4
+13/75690/campos_512_v4
+13/75712/campos_512_v4
+13/75715/campos_512_v4
+13/75725/campos_512_v4
+13/75730/campos_512_v4
+13/75732/campos_512_v4
+13/75747/campos_512_v4
+13/75751/campos_512_v4
+13/75758/campos_512_v4
+13/75761/campos_512_v4
+13/75775/campos_512_v4
+13/75777/campos_512_v4
+13/75786/campos_512_v4
+13/75797/campos_512_v4
+13/75798/campos_512_v4
+13/75799/campos_512_v4
+13/75805/campos_512_v4
+13/75806/campos_512_v4
+13/75807/campos_512_v4
+13/75813/campos_512_v4
+13/75824/campos_512_v4
+13/75827/campos_512_v4
+13/75828/campos_512_v4
+13/75842/campos_512_v4
+13/75847/campos_512_v4
+13/75855/campos_512_v4
+13/75861/campos_512_v4
+13/75864/campos_512_v4
+13/75865/campos_512_v4
+13/75881/campos_512_v4
+13/75882/campos_512_v4
+13/75891/campos_512_v4
+13/75892/campos_512_v4
+13/75898/campos_512_v4
+13/75909/campos_512_v4
+13/75926/campos_512_v4
+13/75930/campos_512_v4
+13/75942/campos_512_v4
+13/75943/campos_512_v4
+13/75962/campos_512_v4
+13/75966/campos_512_v4
+13/75969/campos_512_v4
+13/75983/campos_512_v4
+13/75989/campos_512_v4
+13/75991/campos_512_v4
+13/75996/campos_512_v4
+13/76000/campos_512_v4
+13/76003/campos_512_v4
+13/76012/campos_512_v4
+13/76013/campos_512_v4
+13/76031/campos_512_v4
+13/76039/campos_512_v4
+13/76040/campos_512_v4
+13/76044/campos_512_v4
+13/76050/campos_512_v4
+13/76078/campos_512_v4
+13/76090/campos_512_v4
+13/76107/campos_512_v4
+13/76109/campos_512_v4
+13/76112/campos_512_v4
+13/76115/campos_512_v4
+13/76117/campos_512_v4
+13/76118/campos_512_v4
+13/76121/campos_512_v4
+13/76124/campos_512_v4
+13/76127/campos_512_v4
+13/76130/campos_512_v4
+13/76148/campos_512_v4
+13/76149/campos_512_v4
+13/76157/campos_512_v4
+13/76162/campos_512_v4
+13/76169/campos_512_v4
+13/76170/campos_512_v4
+13/76179/campos_512_v4
+13/76185/campos_512_v4
+13/76214/campos_512_v4
+13/76235/campos_512_v4
+13/76257/campos_512_v4
+13/76264/campos_512_v4
+13/76272/campos_512_v4
+13/76286/campos_512_v4
+13/76290/campos_512_v4
+13/76310/campos_512_v4
+13/76318/campos_512_v4
+13/76329/campos_512_v4
+13/76331/campos_512_v4
+13/76333/campos_512_v4
+13/76346/campos_512_v4
+13/76348/campos_512_v4
+13/76359/campos_512_v4
+13/76368/campos_512_v4
+13/76381/campos_512_v4
+13/76395/campos_512_v4
+13/76402/campos_512_v4
+13/76404/campos_512_v4
+13/76405/campos_512_v4
+13/76412/campos_512_v4
+13/76417/campos_512_v4
+13/76427/campos_512_v4
+13/76432/campos_512_v4
+13/76434/campos_512_v4
+13/76437/campos_512_v4
+13/76440/campos_512_v4
+13/76443/campos_512_v4
+13/76447/campos_512_v4
+13/76452/campos_512_v4
+13/76457/campos_512_v4
+13/76458/campos_512_v4
+13/76460/campos_512_v4
+13/76467/campos_512_v4
+13/76476/campos_512_v4
+13/76478/campos_512_v4
+13/76491/campos_512_v4
+13/76493/campos_512_v4
+13/76497/campos_512_v4
+13/76499/campos_512_v4
+13/76509/campos_512_v4
+13/76511/campos_512_v4
+13/76522/campos_512_v4
+13/76533/campos_512_v4
+13/76545/campos_512_v4
+13/76552/campos_512_v4
+13/76560/campos_512_v4
+13/76568/campos_512_v4
+13/76570/campos_512_v4
+13/76572/campos_512_v4
+13/76573/campos_512_v4
+13/76574/campos_512_v4
+13/76578/campos_512_v4
+13/76579/campos_512_v4
+13/76581/campos_512_v4
+13/76584/campos_512_v4
+13/76594/campos_512_v4
+13/76598/campos_512_v4
+13/76608/campos_512_v4
+13/76661/campos_512_v4
+13/76662/campos_512_v4
+13/76667/campos_512_v4
+13/76669/campos_512_v4
+13/76682/campos_512_v4
+13/76689/campos_512_v4
+13/76699/campos_512_v4
+13/76701/campos_512_v4
+13/76712/campos_512_v4
+13/76714/campos_512_v4
+13/76718/campos_512_v4
+13/76723/campos_512_v4
+13/76727/campos_512_v4
+13/76732/campos_512_v4
+13/76737/campos_512_v4
+13/76738/campos_512_v4
+13/76740/campos_512_v4
+13/76754/campos_512_v4
+13/76760/campos_512_v4
+13/76767/campos_512_v4
+13/76773/campos_512_v4
+13/76775/campos_512_v4
+13/76776/campos_512_v4
+13/76781/campos_512_v4
+13/76784/campos_512_v4
+13/76786/campos_512_v4
+13/76787/campos_512_v4
+13/76790/campos_512_v4
+13/76793/campos_512_v4
+13/76796/campos_512_v4
+13/76800/campos_512_v4
+13/76803/campos_512_v4
+13/76822/campos_512_v4
+13/76824/campos_512_v4
+13/76837/campos_512_v4
+13/76839/campos_512_v4
+13/76852/campos_512_v4
+13/76870/campos_512_v4
+13/76889/campos_512_v4
+13/76891/campos_512_v4
+13/76894/campos_512_v4
+13/76901/campos_512_v4
+13/76928/campos_512_v4
+13/76930/campos_512_v4
+13/76937/campos_512_v4
+13/76944/campos_512_v4
+13/76947/campos_512_v4
+13/76954/campos_512_v4
+13/76956/campos_512_v4
+13/76958/campos_512_v4
+13/76960/campos_512_v4
+13/76976/campos_512_v4
+13/76981/campos_512_v4
+13/76983/campos_512_v4
+13/76999/campos_512_v4
+13/77005/campos_512_v4
+13/77011/campos_512_v4
+13/77016/campos_512_v4
+13/77022/campos_512_v4
+13/77025/campos_512_v4
+13/77026/campos_512_v4
+13/77027/campos_512_v4
+13/77040/campos_512_v4
+13/77042/campos_512_v4
+13/77051/campos_512_v4
+13/77064/campos_512_v4
+13/77067/campos_512_v4
+13/77088/campos_512_v4
+13/77094/campos_512_v4
+13/77105/campos_512_v4
+13/77113/campos_512_v4
+13/77117/campos_512_v4
+13/77122/campos_512_v4
+13/77127/campos_512_v4
+13/77135/campos_512_v4
+13/77139/campos_512_v4
+13/77157/campos_512_v4
+13/77160/campos_512_v4
+13/77185/campos_512_v4
+13/77188/campos_512_v4
+13/77191/campos_512_v4
+13/77192/campos_512_v4
+13/77210/campos_512_v4
+13/77219/campos_512_v4
+13/77233/campos_512_v4
+13/77244/campos_512_v4
+13/77252/campos_512_v4
+13/77253/campos_512_v4
+13/77261/campos_512_v4
+13/77281/campos_512_v4
+13/77289/campos_512_v4
+13/77291/campos_512_v4
+13/77294/campos_512_v4
+13/77300/campos_512_v4
+13/77305/campos_512_v4
+13/77313/campos_512_v4
+13/77328/campos_512_v4
+13/77337/campos_512_v4
+13/77342/campos_512_v4
+13/77347/campos_512_v4
+13/77359/campos_512_v4
+13/77372/campos_512_v4
+13/77374/campos_512_v4
+13/77387/campos_512_v4
+13/77391/campos_512_v4
+13/77395/campos_512_v4
+13/77400/campos_512_v4
+13/77409/campos_512_v4
+13/77414/campos_512_v4
+13/77420/campos_512_v4
+13/77443/campos_512_v4
+13/77444/campos_512_v4
+13/77451/campos_512_v4
+13/77464/campos_512_v4
+13/77481/campos_512_v4
+13/77485/campos_512_v4
+13/77491/campos_512_v4
+13/77502/campos_512_v4
+13/77509/campos_512_v4
+13/77511/campos_512_v4
+13/77512/campos_512_v4
+13/77518/campos_512_v4
+13/77520/campos_512_v4
+13/77530/campos_512_v4
+13/77537/campos_512_v4
+13/77544/campos_512_v4
+13/77549/campos_512_v4
+13/77553/campos_512_v4
+13/77578/campos_512_v4
+13/77581/campos_512_v4
+13/77582/campos_512_v4
+13/77592/campos_512_v4
+13/77596/campos_512_v4
+13/77601/campos_512_v4
+13/77611/campos_512_v4
+13/77612/campos_512_v4
+13/77613/campos_512_v4
+13/77638/campos_512_v4
+13/77658/campos_512_v4
+13/77660/campos_512_v4
+13/77664/campos_512_v4
+13/77667/campos_512_v4
+13/77672/campos_512_v4
+13/77687/campos_512_v4
+13/77696/campos_512_v4
+13/77707/campos_512_v4
+13/77713/campos_512_v4
+13/77717/campos_512_v4
+13/77719/campos_512_v4
+13/77721/campos_512_v4
+13/77726/campos_512_v4
+13/77740/campos_512_v4
+13/77742/campos_512_v4
+13/77746/campos_512_v4
+13/77747/campos_512_v4
+13/77751/campos_512_v4
+13/77757/campos_512_v4
+13/77759/campos_512_v4
+13/77774/campos_512_v4
+13/77775/campos_512_v4
+13/77776/campos_512_v4
+13/77781/campos_512_v4
+13/77785/campos_512_v4
+13/77797/campos_512_v4
+13/77802/campos_512_v4
+13/77804/campos_512_v4
+13/77815/campos_512_v4
+13/77838/campos_512_v4
+13/77840/campos_512_v4
+13/77843/campos_512_v4
+13/77859/campos_512_v4
+13/77871/campos_512_v4
+13/77879/campos_512_v4
+13/77880/campos_512_v4
+13/77885/campos_512_v4
+13/77909/campos_512_v4
+13/77916/campos_512_v4
+13/77925/campos_512_v4
+13/77930/campos_512_v4
+13/77940/campos_512_v4
+13/77948/campos_512_v4
+13/77949/campos_512_v4
+13/77953/campos_512_v4
+13/77962/campos_512_v4
+13/77968/campos_512_v4
+13/77980/campos_512_v4
+13/77995/campos_512_v4
+13/78012/campos_512_v4
+13/78014/campos_512_v4
+13/78021/campos_512_v4
+13/78036/campos_512_v4
+13/78044/campos_512_v4
+13/78052/campos_512_v4
+13/78080/campos_512_v4
+13/78095/campos_512_v4
+13/78099/campos_512_v4
+13/78100/campos_512_v4
+13/78101/campos_512_v4
+13/78104/campos_512_v4
+13/78105/campos_512_v4
+13/78109/campos_512_v4
+13/78110/campos_512_v4
+13/78115/campos_512_v4
+13/78116/campos_512_v4
+13/78123/campos_512_v4
+13/78125/campos_512_v4
+13/78141/campos_512_v4
+13/78143/campos_512_v4
+13/78148/campos_512_v4
+13/78150/campos_512_v4
+13/78154/campos_512_v4
+13/78158/campos_512_v4
+13/78160/campos_512_v4
+13/78178/campos_512_v4
+13/78181/campos_512_v4
+13/78189/campos_512_v4
+13/78199/campos_512_v4
+13/78203/campos_512_v4
+13/78219/campos_512_v4
+13/78234/campos_512_v4
+13/78252/campos_512_v4
+13/78262/campos_512_v4
+13/78270/campos_512_v4
+13/78272/campos_512_v4
+13/78275/campos_512_v4
+13/78276/campos_512_v4
+13/78277/campos_512_v4
+13/78282/campos_512_v4
+13/78286/campos_512_v4
+13/78287/campos_512_v4
+13/78292/campos_512_v4
+13/78298/campos_512_v4
+13/78314/campos_512_v4
+13/78322/campos_512_v4
+13/78342/campos_512_v4
+13/78348/campos_512_v4
+13/78351/campos_512_v4
+13/78373/campos_512_v4
+13/78384/campos_512_v4
+13/78391/campos_512_v4
+13/78417/campos_512_v4
+13/78419/campos_512_v4
+13/78421/campos_512_v4
+13/78424/campos_512_v4
+13/78427/campos_512_v4
+13/78431/campos_512_v4
+13/78434/campos_512_v4
+13/78435/campos_512_v4
+13/78440/campos_512_v4
+13/78443/campos_512_v4
+13/78446/campos_512_v4
+13/78471/campos_512_v4
+13/78472/campos_512_v4
+13/78512/campos_512_v4
+13/78513/campos_512_v4
+13/78519/campos_512_v4
+13/78524/campos_512_v4
+13/78533/campos_512_v4
+13/78548/campos_512_v4
+13/78551/campos_512_v4
+13/78564/campos_512_v4
+13/78583/campos_512_v4
+13/78591/campos_512_v4
+13/78595/campos_512_v4
+13/78597/campos_512_v4
+13/78599/campos_512_v4
+13/78613/campos_512_v4
+13/78630/campos_512_v4
+13/78636/campos_512_v4
+13/78641/campos_512_v4
+13/78646/campos_512_v4
+13/78649/campos_512_v4
+13/78650/campos_512_v4
+13/78657/campos_512_v4
+13/78658/campos_512_v4
+13/78664/campos_512_v4
+13/78666/campos_512_v4
+13/78669/campos_512_v4
+13/78676/campos_512_v4
+13/78679/campos_512_v4
+13/78682/campos_512_v4
+13/78684/campos_512_v4
+13/78698/campos_512_v4
+13/78702/campos_512_v4
+13/78703/campos_512_v4
+13/78714/campos_512_v4
+13/78726/campos_512_v4
+13/78729/campos_512_v4
+13/78732/campos_512_v4
+13/78740/campos_512_v4
+13/78747/campos_512_v4
+13/78759/campos_512_v4
+13/78774/campos_512_v4
+13/78782/campos_512_v4
+13/78787/campos_512_v4
+13/78805/campos_512_v4
+13/78806/campos_512_v4
+13/78808/campos_512_v4
+13/78819/campos_512_v4
+13/78829/campos_512_v4
+13/78831/campos_512_v4
+13/78843/campos_512_v4
+13/78851/campos_512_v4
+13/78872/campos_512_v4
+13/78887/campos_512_v4
+13/78893/campos_512_v4
+13/78908/campos_512_v4
+13/78910/campos_512_v4
+13/78916/campos_512_v4
+13/78924/campos_512_v4
+13/78927/campos_512_v4
+13/78931/campos_512_v4
+13/78932/campos_512_v4
+13/78935/campos_512_v4
+13/78939/campos_512_v4
+13/78955/campos_512_v4
+13/78975/campos_512_v4
+13/78982/campos_512_v4
+13/78995/campos_512_v4
+13/78997/campos_512_v4
+13/79004/campos_512_v4
+13/79010/campos_512_v4
+13/79013/campos_512_v4
+13/79018/campos_512_v4
+13/79023/campos_512_v4
+13/79035/campos_512_v4
+13/79037/campos_512_v4
+13/79038/campos_512_v4
+13/79047/campos_512_v4
+13/79055/campos_512_v4
+13/79061/campos_512_v4
+13/79073/campos_512_v4
+13/79076/campos_512_v4
+13/79090/campos_512_v4
+13/79095/campos_512_v4
+13/79106/campos_512_v4
+13/79114/campos_512_v4
+13/79122/campos_512_v4
+13/79134/campos_512_v4
+13/79143/campos_512_v4
+13/79146/campos_512_v4
+13/79149/campos_512_v4
+13/79158/campos_512_v4
+13/79160/campos_512_v4
+13/79168/campos_512_v4
+13/79187/campos_512_v4
+13/79201/campos_512_v4
+13/79207/campos_512_v4
+13/79228/campos_512_v4
+13/79229/campos_512_v4
+13/79234/campos_512_v4
+13/79243/campos_512_v4
+13/79250/campos_512_v4
+13/79252/campos_512_v4
+13/79255/campos_512_v4
+13/79270/campos_512_v4
+13/79291/campos_512_v4
+13/79313/campos_512_v4
+13/79324/campos_512_v4
+13/79325/campos_512_v4
+13/79329/campos_512_v4
+13/79337/campos_512_v4
+13/79340/campos_512_v4
+13/79347/campos_512_v4
+13/79349/campos_512_v4
+13/79352/campos_512_v4
+13/79356/campos_512_v4
+13/79359/campos_512_v4
+13/79365/campos_512_v4
+13/79372/campos_512_v4
+13/79373/campos_512_v4
+13/79389/campos_512_v4
+13/79408/campos_512_v4
+13/79425/campos_512_v4
+13/79449/campos_512_v4
+13/79461/campos_512_v4
+13/79466/campos_512_v4
+13/79472/campos_512_v4
+13/79473/campos_512_v4
+13/79478/campos_512_v4
+13/79482/campos_512_v4
+13/79488/campos_512_v4
+13/79492/campos_512_v4
+13/79501/campos_512_v4
+13/79502/campos_512_v4
+13/79521/campos_512_v4
+13/79532/campos_512_v4
+13/79534/campos_512_v4
+13/79540/campos_512_v4
+13/79547/campos_512_v4
+13/79551/campos_512_v4
+13/79552/campos_512_v4
+13/79553/campos_512_v4
+13/79557/campos_512_v4
+13/79569/campos_512_v4
+13/79571/campos_512_v4
+13/79576/campos_512_v4
+13/79588/campos_512_v4
+13/79590/campos_512_v4
+13/79591/campos_512_v4
+13/79598/campos_512_v4
+13/79614/campos_512_v4
+13/79615/campos_512_v4
+13/79624/campos_512_v4
+13/79626/campos_512_v4
+13/79632/campos_512_v4
+13/79641/campos_512_v4
+13/79651/campos_512_v4
+13/79654/campos_512_v4
+13/79655/campos_512_v4
+13/79664/campos_512_v4
+13/79666/campos_512_v4
+13/79694/campos_512_v4
+13/79715/campos_512_v4
+13/79719/campos_512_v4
+13/79728/campos_512_v4
+13/79735/campos_512_v4
+13/79757/campos_512_v4
+13/79768/campos_512_v4
+13/79769/campos_512_v4
+13/79771/campos_512_v4
+13/79795/campos_512_v4
+13/79808/campos_512_v4
+13/79821/campos_512_v4
+13/79836/campos_512_v4
+13/79839/campos_512_v4
+13/79845/campos_512_v4
+13/79848/campos_512_v4
+13/79849/campos_512_v4
+13/79860/campos_512_v4
+13/79869/campos_512_v4
+13/79872/campos_512_v4
+13/79874/campos_512_v4
+13/79880/campos_512_v4
+13/79887/campos_512_v4
+13/79888/campos_512_v4
+13/79899/campos_512_v4
+13/79912/campos_512_v4
+13/79919/campos_512_v4
+13/79920/campos_512_v4
+13/79926/campos_512_v4
+13/79931/campos_512_v4
+13/79932/campos_512_v4
+13/79938/campos_512_v4
+13/79939/campos_512_v4
+13/79940/campos_512_v4
+13/79941/campos_512_v4
+13/79948/campos_512_v4
+13/79969/campos_512_v4
+13/79974/campos_512_v4
+13/79975/campos_512_v4
+13/79981/campos_512_v4
+13/79984/campos_512_v4
+13/79985/campos_512_v4
+13/79989/campos_512_v4
+13/79999/campos_512_v4
+13/80001/campos_512_v4
+130/660003/campos_512_v4
+130/660004/campos_512_v4
+130/660034/campos_512_v4
+130/660039/campos_512_v4
+130/660058/campos_512_v4
+130/660061/campos_512_v4
+130/660071/campos_512_v4
+130/660082/campos_512_v4
+130/660084/campos_512_v4
+130/660088/campos_512_v4
+130/660089/campos_512_v4
+130/660106/campos_512_v4
+130/660107/campos_512_v4
+130/660120/campos_512_v4
+130/660131/campos_512_v4
+130/660153/campos_512_v4
+130/660156/campos_512_v4
+130/660158/campos_512_v4
+130/660160/campos_512_v4
+130/660169/campos_512_v4
+130/660170/campos_512_v4
+130/660184/campos_512_v4
+130/660192/campos_512_v4
+130/660198/campos_512_v4
+130/660200/campos_512_v4
+130/660214/campos_512_v4
+130/660256/campos_512_v4
+130/660268/campos_512_v4
+130/660276/campos_512_v4
+130/660277/campos_512_v4
+130/660280/campos_512_v4
+130/660287/campos_512_v4
+130/660304/campos_512_v4
+130/660307/campos_512_v4
+130/660313/campos_512_v4
+130/660328/campos_512_v4
+130/660343/campos_512_v4
+130/660344/campos_512_v4
+130/660347/campos_512_v4
+130/660360/campos_512_v4
+130/660380/campos_512_v4
+130/660384/campos_512_v4
+130/660386/campos_512_v4
+130/660401/campos_512_v4
+130/660413/campos_512_v4
+130/660436/campos_512_v4
+130/660441/campos_512_v4
+130/660448/campos_512_v4
+130/660451/campos_512_v4
+130/660452/campos_512_v4
+130/660461/campos_512_v4
+130/660468/campos_512_v4
+130/660487/campos_512_v4
+130/660493/campos_512_v4
+130/660504/campos_512_v4
+130/660505/campos_512_v4
+130/660509/campos_512_v4
+130/660528/campos_512_v4
+130/660557/campos_512_v4
+130/660559/campos_512_v4
+130/660573/campos_512_v4
+130/660586/campos_512_v4
+130/660589/campos_512_v4
+130/660595/campos_512_v4
+130/660601/campos_512_v4
+130/660608/campos_512_v4
+130/660612/campos_512_v4
+130/660613/campos_512_v4
+130/660618/campos_512_v4
+130/660626/campos_512_v4
+130/660630/campos_512_v4
+130/660631/campos_512_v4
+130/660633/campos_512_v4
+130/660653/campos_512_v4
+130/660661/campos_512_v4
+130/660668/campos_512_v4
+130/660673/campos_512_v4
+130/660674/campos_512_v4
+130/660684/campos_512_v4
+130/660688/campos_512_v4
+130/660689/campos_512_v4
+130/660698/campos_512_v4
+130/660702/campos_512_v4
+130/660704/campos_512_v4
+130/660705/campos_512_v4
+130/660713/campos_512_v4
+130/660716/campos_512_v4
+130/660724/campos_512_v4
+130/660748/campos_512_v4
+130/660754/campos_512_v4
+130/660756/campos_512_v4
+130/660771/campos_512_v4
+130/660786/campos_512_v4
+130/660788/campos_512_v4
+130/660793/campos_512_v4
+130/660795/campos_512_v4
+130/660796/campos_512_v4
+130/660803/campos_512_v4
+130/660816/campos_512_v4
+130/660819/campos_512_v4
+130/660821/campos_512_v4
+130/660824/campos_512_v4
+130/660834/campos_512_v4
+130/660839/campos_512_v4
+130/660840/campos_512_v4
+130/660843/campos_512_v4
+130/660844/campos_512_v4
+130/660845/campos_512_v4
+130/660848/campos_512_v4
+130/660865/campos_512_v4
+130/660870/campos_512_v4
+130/660874/campos_512_v4
+130/660877/campos_512_v4
+130/660881/campos_512_v4
+130/660883/campos_512_v4
+130/660885/campos_512_v4
+130/660891/campos_512_v4
+130/660917/campos_512_v4
+130/660920/campos_512_v4
+130/660931/campos_512_v4
+130/660932/campos_512_v4
+130/660938/campos_512_v4
+130/660957/campos_512_v4
+130/660962/campos_512_v4
+130/660963/campos_512_v4
+130/660970/campos_512_v4
+130/660973/campos_512_v4
+130/660978/campos_512_v4
+130/661001/campos_512_v4
+130/661004/campos_512_v4
+130/661005/campos_512_v4
+130/661008/campos_512_v4
+130/661028/campos_512_v4
+130/661030/campos_512_v4
+130/661047/campos_512_v4
+130/661058/campos_512_v4
+130/661088/campos_512_v4
+130/661089/campos_512_v4
+130/661090/campos_512_v4
+130/661092/campos_512_v4
+130/661109/campos_512_v4
+130/661115/campos_512_v4
+130/661116/campos_512_v4
+130/661120/campos_512_v4
+130/661130/campos_512_v4
+130/661132/campos_512_v4
+130/661133/campos_512_v4
+130/661134/campos_512_v4
+130/661139/campos_512_v4
+130/661147/campos_512_v4
+130/661149/campos_512_v4
+130/661174/campos_512_v4
+130/661175/campos_512_v4
+130/661179/campos_512_v4
+130/661180/campos_512_v4
+130/661182/campos_512_v4
+130/661188/campos_512_v4
+130/661189/campos_512_v4
+130/661199/campos_512_v4
+130/661201/campos_512_v4
+130/661226/campos_512_v4
+130/661229/campos_512_v4
+130/661234/campos_512_v4
+130/661235/campos_512_v4
+130/661238/campos_512_v4
+130/661245/campos_512_v4
+130/661273/campos_512_v4
+130/661285/campos_512_v4
+130/661297/campos_512_v4
+130/661299/campos_512_v4
+130/661301/campos_512_v4
+130/661306/campos_512_v4
+130/661314/campos_512_v4
+130/661317/campos_512_v4
+130/661322/campos_512_v4
+130/661350/campos_512_v4
+130/661352/campos_512_v4
+130/661367/campos_512_v4
+130/661379/campos_512_v4
+130/661395/campos_512_v4
+130/661396/campos_512_v4
+130/661409/campos_512_v4
+130/661410/campos_512_v4
+130/661418/campos_512_v4
+130/661430/campos_512_v4
+130/661439/campos_512_v4
+130/661441/campos_512_v4
+130/661443/campos_512_v4
+130/661455/campos_512_v4
+130/661483/campos_512_v4
+130/661485/campos_512_v4
+130/661486/campos_512_v4
+130/661488/campos_512_v4
+130/661491/campos_512_v4
+130/661501/campos_512_v4
+130/661503/campos_512_v4
+130/661506/campos_512_v4
+130/661507/campos_512_v4
+130/661510/campos_512_v4
+130/661512/campos_512_v4
+130/661513/campos_512_v4
+130/661517/campos_512_v4
+130/661524/campos_512_v4
+130/661528/campos_512_v4
+130/661529/campos_512_v4
+130/661535/campos_512_v4
+130/661541/campos_512_v4
+130/661544/campos_512_v4
+130/661555/campos_512_v4
+130/661557/campos_512_v4
+130/661563/campos_512_v4
+130/661581/campos_512_v4
+130/661584/campos_512_v4
+130/661591/campos_512_v4
+130/661592/campos_512_v4
+130/661594/campos_512_v4
+130/661599/campos_512_v4
+130/661608/campos_512_v4
+130/661616/campos_512_v4
+130/661630/campos_512_v4
+130/661639/campos_512_v4
+130/661655/campos_512_v4
+130/661658/campos_512_v4
+130/661661/campos_512_v4
+130/661667/campos_512_v4
+130/661669/campos_512_v4
+130/661673/campos_512_v4
+130/661688/campos_512_v4
+130/661714/campos_512_v4
+130/661718/campos_512_v4
+130/661745/campos_512_v4
+130/661753/campos_512_v4
+130/661754/campos_512_v4
+130/661756/campos_512_v4
+130/661758/campos_512_v4
+130/661760/campos_512_v4
+130/661768/campos_512_v4
+130/661769/campos_512_v4
+130/661781/campos_512_v4
+130/661784/campos_512_v4
+130/661790/campos_512_v4
+130/661792/campos_512_v4
+130/661797/campos_512_v4
+130/661812/campos_512_v4
+130/661813/campos_512_v4
+130/661814/campos_512_v4
+130/661817/campos_512_v4
+130/661821/campos_512_v4
+130/661833/campos_512_v4
+130/661845/campos_512_v4
+130/661847/campos_512_v4
+130/661858/campos_512_v4
+130/661867/campos_512_v4
+130/661871/campos_512_v4
+130/661881/campos_512_v4
+130/661882/campos_512_v4
+130/661883/campos_512_v4
+130/661895/campos_512_v4
+130/661898/campos_512_v4
+130/661899/campos_512_v4
+130/661902/campos_512_v4
+130/661939/campos_512_v4
+130/661945/campos_512_v4
+130/661961/campos_512_v4
+130/661983/campos_512_v4
+130/661987/campos_512_v4
+130/661993/campos_512_v4
+130/661995/campos_512_v4
+130/662002/campos_512_v4
+130/662004/campos_512_v4
+130/662009/campos_512_v4
+130/662014/campos_512_v4
+130/662025/campos_512_v4
+130/662027/campos_512_v4
+130/662037/campos_512_v4
+130/662040/campos_512_v4
+130/662049/campos_512_v4
+130/662055/campos_512_v4
+130/662061/campos_512_v4
+130/662062/campos_512_v4
+130/662075/campos_512_v4
+130/662082/campos_512_v4
+130/662083/campos_512_v4
+130/662085/campos_512_v4
+130/662086/campos_512_v4
+130/662087/campos_512_v4
+130/662094/campos_512_v4
+130/662098/campos_512_v4
+130/662101/campos_512_v4
+130/662104/campos_512_v4
+130/662111/campos_512_v4
+130/662115/campos_512_v4
+130/662120/campos_512_v4
+130/662121/campos_512_v4
+130/662142/campos_512_v4
+130/662149/campos_512_v4
+130/662150/campos_512_v4
+130/662152/campos_512_v4
+130/662156/campos_512_v4
+130/662168/campos_512_v4
+130/662174/campos_512_v4
+130/662175/campos_512_v4
+130/662181/campos_512_v4
+130/662190/campos_512_v4
+130/662202/campos_512_v4
+130/662211/campos_512_v4
+130/662214/campos_512_v4
+130/662218/campos_512_v4
+130/662220/campos_512_v4
+130/662235/campos_512_v4
+130/662238/campos_512_v4
+130/662252/campos_512_v4
+130/662254/campos_512_v4
+130/662269/campos_512_v4
+130/662276/campos_512_v4
+130/662285/campos_512_v4
+130/662289/campos_512_v4
+130/662291/campos_512_v4
+130/662301/campos_512_v4
+130/662309/campos_512_v4
+130/662313/campos_512_v4
+130/662321/campos_512_v4
+130/662332/campos_512_v4
+130/662333/campos_512_v4
+130/662334/campos_512_v4
+130/662341/campos_512_v4
+130/662343/campos_512_v4
+130/662345/campos_512_v4
+130/662358/campos_512_v4
+130/662360/campos_512_v4
+130/662365/campos_512_v4
+130/662370/campos_512_v4
+130/662383/campos_512_v4
+130/662384/campos_512_v4
+130/662390/campos_512_v4
+130/662393/campos_512_v4
+130/662398/campos_512_v4
+130/662399/campos_512_v4
+130/662402/campos_512_v4
+130/662410/campos_512_v4
+130/662420/campos_512_v4
+130/662430/campos_512_v4
+130/662435/campos_512_v4
+130/662439/campos_512_v4
+130/662446/campos_512_v4
+130/662460/campos_512_v4
+130/662482/campos_512_v4
+130/662517/campos_512_v4
+130/662518/campos_512_v4
+130/662521/campos_512_v4
+130/662527/campos_512_v4
+130/662548/campos_512_v4
+130/662553/campos_512_v4
+130/662555/campos_512_v4
+130/662562/campos_512_v4
+130/662569/campos_512_v4
+130/662573/campos_512_v4
+130/662576/campos_512_v4
+130/662577/campos_512_v4
+130/662584/campos_512_v4
+130/662591/campos_512_v4
+130/662595/campos_512_v4
+130/662601/campos_512_v4
+130/662611/campos_512_v4
+130/662625/campos_512_v4
+130/662627/campos_512_v4
+130/662633/campos_512_v4
+130/662634/campos_512_v4
+130/662635/campos_512_v4
+130/662638/campos_512_v4
+130/662639/campos_512_v4
+130/662674/campos_512_v4
+130/662676/campos_512_v4
+130/662690/campos_512_v4
+130/662691/campos_512_v4
+130/662693/campos_512_v4
+130/662713/campos_512_v4
+130/662720/campos_512_v4
+130/662721/campos_512_v4
+130/662722/campos_512_v4
+130/662725/campos_512_v4
+130/662728/campos_512_v4
+130/662737/campos_512_v4
+130/662738/campos_512_v4
+130/662749/campos_512_v4
+130/662756/campos_512_v4
+130/662764/campos_512_v4
+130/662765/campos_512_v4
+130/662768/campos_512_v4
+130/662772/campos_512_v4
+130/662777/campos_512_v4
+130/662780/campos_512_v4
+130/662782/campos_512_v4
+130/662786/campos_512_v4
+130/662788/campos_512_v4
+130/662795/campos_512_v4
+130/662798/campos_512_v4
+130/662805/campos_512_v4
+130/662816/campos_512_v4
+130/662817/campos_512_v4
+130/662833/campos_512_v4
+130/662837/campos_512_v4
+130/662843/campos_512_v4
+130/662867/campos_512_v4
+130/662870/campos_512_v4
+130/662876/campos_512_v4
+130/662894/campos_512_v4
+130/662919/campos_512_v4
+130/662920/campos_512_v4
+130/662922/campos_512_v4
+130/662930/campos_512_v4
+130/662931/campos_512_v4
+130/662933/campos_512_v4
+130/662935/campos_512_v4
+130/662980/campos_512_v4
+130/662983/campos_512_v4
+130/662987/campos_512_v4
+130/662995/campos_512_v4
+130/662996/campos_512_v4
+130/662999/campos_512_v4
+130/663001/campos_512_v4
+130/663035/campos_512_v4
+130/663046/campos_512_v4
+130/663048/campos_512_v4
+130/663049/campos_512_v4
+130/663055/campos_512_v4
+130/663057/campos_512_v4
+130/663072/campos_512_v4
+130/663077/campos_512_v4
+130/663081/campos_512_v4
+130/663086/campos_512_v4
+130/663100/campos_512_v4
+130/663101/campos_512_v4
+130/663103/campos_512_v4
+130/663109/campos_512_v4
+130/663111/campos_512_v4
+130/663118/campos_512_v4
+130/663131/campos_512_v4
+130/663141/campos_512_v4
+130/663149/campos_512_v4
+130/663155/campos_512_v4
+130/663161/campos_512_v4
+130/663165/campos_512_v4
+130/663166/campos_512_v4
+130/663167/campos_512_v4
+130/663171/campos_512_v4
+130/663172/campos_512_v4
+130/663174/campos_512_v4
+130/663176/campos_512_v4
+130/663179/campos_512_v4
+130/663190/campos_512_v4
+130/663200/campos_512_v4
+130/663202/campos_512_v4
+130/663218/campos_512_v4
+130/663223/campos_512_v4
+130/663251/campos_512_v4
+130/663263/campos_512_v4
+130/663265/campos_512_v4
+130/663269/campos_512_v4
+130/663278/campos_512_v4
+130/663281/campos_512_v4
+130/663283/campos_512_v4
+130/663290/campos_512_v4
+130/663304/campos_512_v4
+130/663309/campos_512_v4
+130/663324/campos_512_v4
+130/663346/campos_512_v4
+130/663347/campos_512_v4
+130/663368/campos_512_v4
+130/663371/campos_512_v4
+130/663412/campos_512_v4
+130/663415/campos_512_v4
+130/663417/campos_512_v4
+130/663423/campos_512_v4
+130/663424/campos_512_v4
+130/663431/campos_512_v4
+130/663435/campos_512_v4
+130/663438/campos_512_v4
+130/663442/campos_512_v4
+130/663448/campos_512_v4
+130/663451/campos_512_v4
+130/663452/campos_512_v4
+130/663460/campos_512_v4
+130/663471/campos_512_v4
+130/663477/campos_512_v4
+130/663485/campos_512_v4
+130/663486/campos_512_v4
+130/663490/campos_512_v4
+130/663491/campos_512_v4
+130/663492/campos_512_v4
+130/663497/campos_512_v4
+130/663499/campos_512_v4
+130/663502/campos_512_v4
+130/663504/campos_512_v4
+130/663529/campos_512_v4
+130/663535/campos_512_v4
+130/663551/campos_512_v4
+130/663552/campos_512_v4
+130/663554/campos_512_v4
+130/663557/campos_512_v4
+130/663572/campos_512_v4
+130/663577/campos_512_v4
+130/663584/campos_512_v4
+130/663586/campos_512_v4
+130/663591/campos_512_v4
+130/663602/campos_512_v4
+130/663614/campos_512_v4
+130/663620/campos_512_v4
+130/663622/campos_512_v4
+130/663635/campos_512_v4
+130/663679/campos_512_v4
+130/663684/campos_512_v4
+130/663686/campos_512_v4
+130/663687/campos_512_v4
+130/663689/campos_512_v4
+130/663690/campos_512_v4
+130/663706/campos_512_v4
+130/663713/campos_512_v4
+130/663715/campos_512_v4
+130/663729/campos_512_v4
+130/663734/campos_512_v4
+130/663746/campos_512_v4
+130/663750/campos_512_v4
+130/663768/campos_512_v4
+130/663801/campos_512_v4
+130/663803/campos_512_v4
+130/663804/campos_512_v4
+130/663807/campos_512_v4
+130/663808/campos_512_v4
+130/663818/campos_512_v4
+130/663820/campos_512_v4
+130/663823/campos_512_v4
+130/663828/campos_512_v4
+130/663840/campos_512_v4
+130/663843/campos_512_v4
+130/663849/campos_512_v4
+130/663854/campos_512_v4
+130/663870/campos_512_v4
+130/663894/campos_512_v4
+130/663895/campos_512_v4
+130/663896/campos_512_v4
+130/663900/campos_512_v4
+130/663906/campos_512_v4
+130/663909/campos_512_v4
+130/663915/campos_512_v4
+130/663921/campos_512_v4
+130/663929/campos_512_v4
+130/663934/campos_512_v4
+130/663938/campos_512_v4
+130/663941/campos_512_v4
+130/663963/campos_512_v4
+130/663982/campos_512_v4
+130/663995/campos_512_v4
+130/664005/campos_512_v4
+130/664010/campos_512_v4
+130/664019/campos_512_v4
+130/664026/campos_512_v4
+130/664030/campos_512_v4
+130/664033/campos_512_v4
+130/664039/campos_512_v4
+130/664055/campos_512_v4
+130/664056/campos_512_v4
+130/664059/campos_512_v4
+130/664061/campos_512_v4
+130/664075/campos_512_v4
+130/664076/campos_512_v4
+130/664077/campos_512_v4
+130/664082/campos_512_v4
+130/664089/campos_512_v4
+130/664093/campos_512_v4
+130/664098/campos_512_v4
+130/664102/campos_512_v4
+130/664103/campos_512_v4
+130/664106/campos_512_v4
+130/664125/campos_512_v4
+130/664131/campos_512_v4
+130/664138/campos_512_v4
+130/664139/campos_512_v4
+130/664151/campos_512_v4
+130/664152/campos_512_v4
+130/664164/campos_512_v4
+130/664187/campos_512_v4
+130/664191/campos_512_v4
+130/664192/campos_512_v4
+130/664193/campos_512_v4
+130/664209/campos_512_v4
+130/664210/campos_512_v4
+130/664218/campos_512_v4
+130/664219/campos_512_v4
+130/664223/campos_512_v4
+130/664228/campos_512_v4
+130/664230/campos_512_v4
+130/664234/campos_512_v4
+130/664236/campos_512_v4
+130/664238/campos_512_v4
+130/664243/campos_512_v4
+130/664250/campos_512_v4
+130/664259/campos_512_v4
+130/664260/campos_512_v4
+130/664261/campos_512_v4
+130/664266/campos_512_v4
+130/664271/campos_512_v4
+130/664272/campos_512_v4
+130/664274/campos_512_v4
+130/664279/campos_512_v4
+130/664283/campos_512_v4
+130/664286/campos_512_v4
+130/664298/campos_512_v4
+130/664315/campos_512_v4
+130/664323/campos_512_v4
+130/664324/campos_512_v4
+130/664326/campos_512_v4
+130/664330/campos_512_v4
+130/664339/campos_512_v4
+130/664347/campos_512_v4
+130/664363/campos_512_v4
+130/664368/campos_512_v4
+130/664376/campos_512_v4
+130/664383/campos_512_v4
+130/664388/campos_512_v4
+130/664395/campos_512_v4
+130/664416/campos_512_v4
+130/664421/campos_512_v4
+130/664422/campos_512_v4
+130/664427/campos_512_v4
+130/664430/campos_512_v4
+130/664438/campos_512_v4
+130/664439/campos_512_v4
+130/664443/campos_512_v4
+130/664445/campos_512_v4
+130/664453/campos_512_v4
+130/664472/campos_512_v4
+130/664478/campos_512_v4
+130/664485/campos_512_v4
+130/664513/campos_512_v4
+130/664530/campos_512_v4
+130/664546/campos_512_v4
+130/664563/campos_512_v4
+130/664567/campos_512_v4
+130/664579/campos_512_v4
+130/664582/campos_512_v4
+130/664615/campos_512_v4
+130/664616/campos_512_v4
+130/664622/campos_512_v4
+130/664635/campos_512_v4
+130/664654/campos_512_v4
+130/664662/campos_512_v4
+130/664663/campos_512_v4
+130/664671/campos_512_v4
+130/664674/campos_512_v4
+130/664675/campos_512_v4
+130/664677/campos_512_v4
+130/664687/campos_512_v4
+130/664689/campos_512_v4
+130/664696/campos_512_v4
+130/664698/campos_512_v4
+130/664701/campos_512_v4
+130/664706/campos_512_v4
+130/664714/campos_512_v4
+130/664732/campos_512_v4
+130/664735/campos_512_v4
+130/664745/campos_512_v4
+130/664748/campos_512_v4
+130/664765/campos_512_v4
+130/664769/campos_512_v4
+130/664771/campos_512_v4
+130/664785/campos_512_v4
+130/664787/campos_512_v4
+130/664805/campos_512_v4
+130/664807/campos_512_v4
+130/664819/campos_512_v4
+130/664835/campos_512_v4
+130/664847/campos_512_v4
+130/664848/campos_512_v4
+130/664855/campos_512_v4
+130/664879/campos_512_v4
+130/664880/campos_512_v4
+130/664881/campos_512_v4
+130/664882/campos_512_v4
+130/664884/campos_512_v4
+130/664888/campos_512_v4
+130/664890/campos_512_v4
+130/664891/campos_512_v4
+130/664905/campos_512_v4
+130/664918/campos_512_v4
+130/664921/campos_512_v4
+130/664923/campos_512_v4
+130/664951/campos_512_v4
+130/664957/campos_512_v4
+130/664967/campos_512_v4
+130/664973/campos_512_v4
+130/664976/campos_512_v4
+131/665010/campos_512_v4
+131/665037/campos_512_v4
+131/665038/campos_512_v4
+131/665047/campos_512_v4
+131/665052/campos_512_v4
+131/665055/campos_512_v4
+131/665057/campos_512_v4
+131/665066/campos_512_v4
+131/665076/campos_512_v4
+131/665079/campos_512_v4
+131/665083/campos_512_v4
+131/665089/campos_512_v4
+131/665097/campos_512_v4
+131/665103/campos_512_v4
+131/665113/campos_512_v4
+131/665120/campos_512_v4
+131/665122/campos_512_v4
+131/665125/campos_512_v4
+131/665132/campos_512_v4
+131/665140/campos_512_v4
+131/665152/campos_512_v4
+131/665174/campos_512_v4
+131/665176/campos_512_v4
+131/665183/campos_512_v4
+131/665188/campos_512_v4
+131/665192/campos_512_v4
+131/665218/campos_512_v4
+131/665220/campos_512_v4
+131/665233/campos_512_v4
+131/665236/campos_512_v4
+131/665240/campos_512_v4
+131/665264/campos_512_v4
+131/665269/campos_512_v4
+131/665285/campos_512_v4
+131/665294/campos_512_v4
+131/665306/campos_512_v4
+131/665310/campos_512_v4
+131/665333/campos_512_v4
+131/665334/campos_512_v4
+131/665339/campos_512_v4
+131/665355/campos_512_v4
+131/665358/campos_512_v4
+131/665359/campos_512_v4
+131/665362/campos_512_v4
+131/665367/campos_512_v4
+131/665369/campos_512_v4
+131/665370/campos_512_v4
+131/665372/campos_512_v4
+131/665383/campos_512_v4
+131/665407/campos_512_v4
+131/665423/campos_512_v4
+131/665432/campos_512_v4
+131/665434/campos_512_v4
+131/665449/campos_512_v4
+131/665456/campos_512_v4
+131/665463/campos_512_v4
+131/665465/campos_512_v4
+131/665470/campos_512_v4
+131/665472/campos_512_v4
+131/665475/campos_512_v4
+131/665478/campos_512_v4
+131/665488/campos_512_v4
+131/665490/campos_512_v4
+131/665504/campos_512_v4
+131/665509/campos_512_v4
+131/665511/campos_512_v4
+131/665513/campos_512_v4
+131/665523/campos_512_v4
+131/665527/campos_512_v4
+131/665529/campos_512_v4
+131/665530/campos_512_v4
+131/665548/campos_512_v4
+131/665549/campos_512_v4
+131/665559/campos_512_v4
+131/665561/campos_512_v4
+131/665566/campos_512_v4
+131/665572/campos_512_v4
+131/665605/campos_512_v4
+131/665610/campos_512_v4
+131/665620/campos_512_v4
+131/665625/campos_512_v4
+131/665644/campos_512_v4
+131/665656/campos_512_v4
+131/665662/campos_512_v4
+131/665669/campos_512_v4
+131/665680/campos_512_v4
+131/665688/campos_512_v4
+131/665691/campos_512_v4
+131/665713/campos_512_v4
+131/665723/campos_512_v4
+131/665742/campos_512_v4
+131/665744/campos_512_v4
+131/665746/campos_512_v4
+131/665759/campos_512_v4
+131/665763/campos_512_v4
+131/665766/campos_512_v4
+131/665774/campos_512_v4
+131/665775/campos_512_v4
+131/665783/campos_512_v4
+131/665789/campos_512_v4
+131/665794/campos_512_v4
+131/665797/campos_512_v4
+131/665807/campos_512_v4
+131/665811/campos_512_v4
+131/665813/campos_512_v4
+131/665816/campos_512_v4
+131/665821/campos_512_v4
+131/665822/campos_512_v4
+131/665825/campos_512_v4
+131/665826/campos_512_v4
+131/665828/campos_512_v4
+131/665833/campos_512_v4
+131/665867/campos_512_v4
+131/665883/campos_512_v4
+131/665888/campos_512_v4
+131/665890/campos_512_v4
+131/665896/campos_512_v4
+131/665898/campos_512_v4
+131/665899/campos_512_v4
+131/665915/campos_512_v4
+131/665916/campos_512_v4
+131/665922/campos_512_v4
+131/665931/campos_512_v4
+131/665933/campos_512_v4
+131/665942/campos_512_v4
+131/665943/campos_512_v4
+131/665952/campos_512_v4
+131/665955/campos_512_v4
+131/665962/campos_512_v4
+131/665967/campos_512_v4
+131/665968/campos_512_v4
+131/665975/campos_512_v4
+131/665980/campos_512_v4
+131/665985/campos_512_v4
+131/665986/campos_512_v4
+131/665990/campos_512_v4
+131/666006/campos_512_v4
+131/666019/campos_512_v4
+131/666021/campos_512_v4
+131/666027/campos_512_v4
+131/666040/campos_512_v4
+131/666043/campos_512_v4
+131/666060/campos_512_v4
+131/666061/campos_512_v4
+131/666062/campos_512_v4
+131/666072/campos_512_v4
+131/666073/campos_512_v4
+131/666083/campos_512_v4
+131/666084/campos_512_v4
+131/666098/campos_512_v4
+131/666100/campos_512_v4
+131/666117/campos_512_v4
+131/666120/campos_512_v4
+131/666128/campos_512_v4
+131/666156/campos_512_v4
+131/666169/campos_512_v4
+131/666208/campos_512_v4
+131/666227/campos_512_v4
+131/666232/campos_512_v4
+131/666237/campos_512_v4
+131/666240/campos_512_v4
+131/666244/campos_512_v4
+131/666287/campos_512_v4
+131/666313/campos_512_v4
+131/666315/campos_512_v4
+131/666339/campos_512_v4
+131/666340/campos_512_v4
+131/666341/campos_512_v4
+131/666345/campos_512_v4
+131/666347/campos_512_v4
+131/666356/campos_512_v4
+131/666362/campos_512_v4
+131/666385/campos_512_v4
+131/666391/campos_512_v4
+131/666404/campos_512_v4
+131/666409/campos_512_v4
+131/666412/campos_512_v4
+131/666413/campos_512_v4
+131/666414/campos_512_v4
+131/666420/campos_512_v4
+131/666426/campos_512_v4
+131/666428/campos_512_v4
+131/666429/campos_512_v4
+131/666439/campos_512_v4
+131/666443/campos_512_v4
+131/666445/campos_512_v4
+131/666465/campos_512_v4
+131/666472/campos_512_v4
+131/666473/campos_512_v4
+131/666495/campos_512_v4
+131/666496/campos_512_v4
+131/666501/campos_512_v4
+131/666503/campos_512_v4
+131/666508/campos_512_v4
+131/666514/campos_512_v4
+131/666522/campos_512_v4
+131/666530/campos_512_v4
+131/666531/campos_512_v4
+131/666532/campos_512_v4
+131/666542/campos_512_v4
+131/666550/campos_512_v4
+131/666555/campos_512_v4
+131/666557/campos_512_v4
+131/666565/campos_512_v4
+131/666580/campos_512_v4
+131/666589/campos_512_v4
+131/666602/campos_512_v4
+131/666610/campos_512_v4
+131/666622/campos_512_v4
+131/666623/campos_512_v4
+131/666630/campos_512_v4
+131/666631/campos_512_v4
+131/666634/campos_512_v4
+131/666635/campos_512_v4
+131/666636/campos_512_v4
+131/666637/campos_512_v4
+131/666638/campos_512_v4
+131/666639/campos_512_v4
+131/666650/campos_512_v4
+131/666655/campos_512_v4
+131/666659/campos_512_v4
+131/666664/campos_512_v4
+131/666669/campos_512_v4
+131/666691/campos_512_v4
+131/666695/campos_512_v4
+131/666697/campos_512_v4
+131/666698/campos_512_v4
+131/666703/campos_512_v4
+131/666704/campos_512_v4
+131/666708/campos_512_v4
+131/666710/campos_512_v4
+131/666719/campos_512_v4
+131/666722/campos_512_v4
+131/666726/campos_512_v4
+131/666730/campos_512_v4
+131/666733/campos_512_v4
+131/666737/campos_512_v4
+131/666746/campos_512_v4
+131/666756/campos_512_v4
+131/666764/campos_512_v4
+131/666773/campos_512_v4
+131/666774/campos_512_v4
+131/666786/campos_512_v4
+131/666798/campos_512_v4
+131/666803/campos_512_v4
+131/666808/campos_512_v4
+131/666830/campos_512_v4
+131/666835/campos_512_v4
+131/666836/campos_512_v4
+131/666837/campos_512_v4
+131/666839/campos_512_v4
+131/666841/campos_512_v4
+131/666845/campos_512_v4
+131/666849/campos_512_v4
+131/666863/campos_512_v4
+131/666866/campos_512_v4
+131/666868/campos_512_v4
+131/666876/campos_512_v4
+131/666880/campos_512_v4
+131/666899/campos_512_v4
+131/666911/campos_512_v4
+131/666926/campos_512_v4
+131/666941/campos_512_v4
+131/666950/campos_512_v4
+131/666955/campos_512_v4
+131/666966/campos_512_v4
+131/666967/campos_512_v4
+131/666969/campos_512_v4
+131/666971/campos_512_v4
+131/666975/campos_512_v4
+131/666976/campos_512_v4
+131/666991/campos_512_v4
+131/666996/campos_512_v4
+131/667000/campos_512_v4
+131/667017/campos_512_v4
+131/667025/campos_512_v4
+131/667031/campos_512_v4
+131/667035/campos_512_v4
+131/667038/campos_512_v4
+131/667046/campos_512_v4
+131/667057/campos_512_v4
+131/667062/campos_512_v4
+131/667064/campos_512_v4
+131/667065/campos_512_v4
+131/667086/campos_512_v4
+131/667093/campos_512_v4
+131/667101/campos_512_v4
+131/667102/campos_512_v4
+131/667105/campos_512_v4
+131/667116/campos_512_v4
+131/667121/campos_512_v4
+131/667125/campos_512_v4
+131/667128/campos_512_v4
+131/667130/campos_512_v4
+131/667140/campos_512_v4
+131/667141/campos_512_v4
+131/667142/campos_512_v4
+131/667149/campos_512_v4
+131/667155/campos_512_v4
+131/667169/campos_512_v4
+131/667182/campos_512_v4
+131/667198/campos_512_v4
+131/667200/campos_512_v4
+131/667202/campos_512_v4
+131/667213/campos_512_v4
+131/667223/campos_512_v4
+131/667225/campos_512_v4
+131/667228/campos_512_v4
+131/667245/campos_512_v4
+131/667247/campos_512_v4
+131/667250/campos_512_v4
+131/667253/campos_512_v4
+131/667270/campos_512_v4
+131/667276/campos_512_v4
+131/667277/campos_512_v4
+131/667305/campos_512_v4
+131/667307/campos_512_v4
+131/667309/campos_512_v4
+131/667311/campos_512_v4
+131/667315/campos_512_v4
+131/667322/campos_512_v4
+131/667330/campos_512_v4
+131/667358/campos_512_v4
+131/667361/campos_512_v4
+131/667362/campos_512_v4
+131/667390/campos_512_v4
+131/667395/campos_512_v4
+131/667409/campos_512_v4
+131/667413/campos_512_v4
+131/667420/campos_512_v4
+131/667424/campos_512_v4
+131/667425/campos_512_v4
+131/667434/campos_512_v4
+131/667435/campos_512_v4
+131/667445/campos_512_v4
+131/667456/campos_512_v4
+131/667457/campos_512_v4
+131/667460/campos_512_v4
+131/667463/campos_512_v4
+131/667477/campos_512_v4
+131/667494/campos_512_v4
+131/667496/campos_512_v4
+131/667498/campos_512_v4
+131/667521/campos_512_v4
+131/667524/campos_512_v4
+131/667525/campos_512_v4
+131/667527/campos_512_v4
+131/667529/campos_512_v4
+131/667535/campos_512_v4
+131/667541/campos_512_v4
+131/667564/campos_512_v4
+131/667568/campos_512_v4
+131/667573/campos_512_v4
+131/667576/campos_512_v4
+131/667581/campos_512_v4
+131/667585/campos_512_v4
+131/667592/campos_512_v4
+131/667597/campos_512_v4
+131/667607/campos_512_v4
+131/667608/campos_512_v4
+131/667615/campos_512_v4
+131/667620/campos_512_v4
+131/667622/campos_512_v4
+131/667626/campos_512_v4
+131/667636/campos_512_v4
+131/667648/campos_512_v4
+131/667668/campos_512_v4
+131/667691/campos_512_v4
+131/667701/campos_512_v4
+131/667708/campos_512_v4
+131/667732/campos_512_v4
+131/667733/campos_512_v4
+131/667744/campos_512_v4
+131/667746/campos_512_v4
+131/667759/campos_512_v4
+131/667771/campos_512_v4
+131/667776/campos_512_v4
+131/667781/campos_512_v4
+131/667806/campos_512_v4
+131/667810/campos_512_v4
+131/667813/campos_512_v4
+131/667822/campos_512_v4
+131/667824/campos_512_v4
+131/667828/campos_512_v4
+131/667829/campos_512_v4
+131/667856/campos_512_v4
+131/667869/campos_512_v4
+131/667907/campos_512_v4
+131/667908/campos_512_v4
+131/667911/campos_512_v4
+131/667918/campos_512_v4
+131/667922/campos_512_v4
+131/667955/campos_512_v4
+131/667959/campos_512_v4
+131/667961/campos_512_v4
+131/667968/campos_512_v4
+131/667971/campos_512_v4
+131/667974/campos_512_v4
+131/667976/campos_512_v4
+131/668004/campos_512_v4
+131/668025/campos_512_v4
+131/668026/campos_512_v4
+131/668028/campos_512_v4
+131/668031/campos_512_v4
+131/668034/campos_512_v4
+131/668037/campos_512_v4
+131/668040/campos_512_v4
+131/668048/campos_512_v4
+131/668049/campos_512_v4
+131/668058/campos_512_v4
+131/668072/campos_512_v4
+131/668073/campos_512_v4
+131/668080/campos_512_v4
+131/668101/campos_512_v4
+131/668109/campos_512_v4
+131/668110/campos_512_v4
+131/668126/campos_512_v4
+131/668133/campos_512_v4
+131/668169/campos_512_v4
+131/668171/campos_512_v4
+131/668177/campos_512_v4
+131/668178/campos_512_v4
+131/668193/campos_512_v4
+131/668200/campos_512_v4
+131/668201/campos_512_v4
+131/668207/campos_512_v4
+131/668225/campos_512_v4
+131/668229/campos_512_v4
+131/668233/campos_512_v4
+131/668240/campos_512_v4
+131/668259/campos_512_v4
+131/668270/campos_512_v4
+131/668274/campos_512_v4
+131/668275/campos_512_v4
+131/668276/campos_512_v4
+131/668296/campos_512_v4
+131/668298/campos_512_v4
+131/668311/campos_512_v4
+131/668324/campos_512_v4
+131/668325/campos_512_v4
+131/668326/campos_512_v4
+131/668329/campos_512_v4
+131/668346/campos_512_v4
+131/668347/campos_512_v4
+131/668350/campos_512_v4
+131/668352/campos_512_v4
+131/668362/campos_512_v4
+131/668363/campos_512_v4
+131/668365/campos_512_v4
+131/668372/campos_512_v4
+131/668374/campos_512_v4
+131/668382/campos_512_v4
+131/668388/campos_512_v4
+131/668405/campos_512_v4
+131/668412/campos_512_v4
+131/668413/campos_512_v4
+131/668431/campos_512_v4
+131/668432/campos_512_v4
+131/668451/campos_512_v4
+131/668461/campos_512_v4
+131/668469/campos_512_v4
+131/668481/campos_512_v4
+131/668486/campos_512_v4
+131/668515/campos_512_v4
+131/668529/campos_512_v4
+131/668538/campos_512_v4
+131/668539/campos_512_v4
+131/668546/campos_512_v4
+131/668560/campos_512_v4
+131/668562/campos_512_v4
+131/668575/campos_512_v4
+131/668581/campos_512_v4
+131/668597/campos_512_v4
+131/668604/campos_512_v4
+131/668607/campos_512_v4
+131/668608/campos_512_v4
+131/668616/campos_512_v4
+131/668618/campos_512_v4
+131/668620/campos_512_v4
+131/668629/campos_512_v4
+131/668631/campos_512_v4
+131/668635/campos_512_v4
+131/668642/campos_512_v4
+131/668654/campos_512_v4
+131/668661/campos_512_v4
+131/668668/campos_512_v4
+131/668670/campos_512_v4
+131/668671/campos_512_v4
+131/668678/campos_512_v4
+131/668684/campos_512_v4
+131/668685/campos_512_v4
+131/668687/campos_512_v4
+131/668697/campos_512_v4
+131/668707/campos_512_v4
+131/668708/campos_512_v4
+131/668721/campos_512_v4
+131/668733/campos_512_v4
+131/668734/campos_512_v4
+131/668749/campos_512_v4
+131/668756/campos_512_v4
+131/668758/campos_512_v4
+131/668763/campos_512_v4
+131/668772/campos_512_v4
+131/668774/campos_512_v4
+131/668777/campos_512_v4
+131/668779/campos_512_v4
+131/668790/campos_512_v4
+131/668798/campos_512_v4
+131/668812/campos_512_v4
+131/668831/campos_512_v4
+131/668866/campos_512_v4
+131/668874/campos_512_v4
+131/668875/campos_512_v4
+131/668900/campos_512_v4
+131/668902/campos_512_v4
+131/668905/campos_512_v4
+131/668916/campos_512_v4
+131/668918/campos_512_v4
+131/668933/campos_512_v4
+131/668935/campos_512_v4
+131/668947/campos_512_v4
+131/668953/campos_512_v4
+131/668954/campos_512_v4
+131/668956/campos_512_v4
+131/668957/campos_512_v4
+131/668967/campos_512_v4
+131/668983/campos_512_v4
+131/668993/campos_512_v4
+131/669026/campos_512_v4
+131/669036/campos_512_v4
+131/669040/campos_512_v4
+131/669045/campos_512_v4
+131/669047/campos_512_v4
+131/669052/campos_512_v4
+131/669058/campos_512_v4
+131/669063/campos_512_v4
+131/669071/campos_512_v4
+131/669072/campos_512_v4
+131/669074/campos_512_v4
+131/669076/campos_512_v4
+131/669080/campos_512_v4
+131/669084/campos_512_v4
+131/669091/campos_512_v4
+131/669094/campos_512_v4
+131/669098/campos_512_v4
+131/669100/campos_512_v4
+131/669102/campos_512_v4
+131/669104/campos_512_v4
+131/669106/campos_512_v4
+131/669117/campos_512_v4
+131/669126/campos_512_v4
+131/669127/campos_512_v4
+131/669134/campos_512_v4
+131/669160/campos_512_v4
+131/669166/campos_512_v4
+131/669173/campos_512_v4
+131/669175/campos_512_v4
+131/669183/campos_512_v4
+131/669185/campos_512_v4
+131/669202/campos_512_v4
+131/669211/campos_512_v4
+131/669223/campos_512_v4
+131/669228/campos_512_v4
+131/669231/campos_512_v4
+131/669232/campos_512_v4
+131/669240/campos_512_v4
+131/669242/campos_512_v4
+131/669243/campos_512_v4
+131/669253/campos_512_v4
+131/669268/campos_512_v4
+131/669271/campos_512_v4
+131/669272/campos_512_v4
+131/669279/campos_512_v4
+131/669299/campos_512_v4
+131/669302/campos_512_v4
+131/669307/campos_512_v4
+131/669314/campos_512_v4
+131/669324/campos_512_v4
+131/669333/campos_512_v4
+131/669335/campos_512_v4
+131/669345/campos_512_v4
+131/669348/campos_512_v4
+131/669350/campos_512_v4
+131/669353/campos_512_v4
+131/669358/campos_512_v4
+131/669367/campos_512_v4
+131/669368/campos_512_v4
+131/669385/campos_512_v4
+131/669397/campos_512_v4
+131/669426/campos_512_v4
+131/669430/campos_512_v4
+131/669433/campos_512_v4
+131/669438/campos_512_v4
+131/669442/campos_512_v4
+131/669451/campos_512_v4
+131/669453/campos_512_v4
+131/669454/campos_512_v4
+131/669460/campos_512_v4
+131/669464/campos_512_v4
+131/669465/campos_512_v4
+131/669469/campos_512_v4
+131/669474/campos_512_v4
+131/669478/campos_512_v4
+131/669480/campos_512_v4
+131/669490/campos_512_v4
+131/669492/campos_512_v4
+131/669501/campos_512_v4
+131/669502/campos_512_v4
+131/669508/campos_512_v4
+131/669516/campos_512_v4
+131/669518/campos_512_v4
+131/669530/campos_512_v4
+131/669534/campos_512_v4
+131/669549/campos_512_v4
+131/669551/campos_512_v4
+131/669553/campos_512_v4
+131/669562/campos_512_v4
+131/669572/campos_512_v4
+131/669578/campos_512_v4
+131/669584/campos_512_v4
+131/669585/campos_512_v4
+131/669599/campos_512_v4
+131/669610/campos_512_v4
+131/669612/campos_512_v4
+131/669626/campos_512_v4
+131/669628/campos_512_v4
+131/669634/campos_512_v4
+131/669645/campos_512_v4
+131/669653/campos_512_v4
+131/669660/campos_512_v4
+131/669661/campos_512_v4
+131/669673/campos_512_v4
+131/669679/campos_512_v4
+131/669681/campos_512_v4
+131/669682/campos_512_v4
+131/669696/campos_512_v4
+131/669699/campos_512_v4
+131/669702/campos_512_v4
+131/669705/campos_512_v4
+131/669751/campos_512_v4
+131/669766/campos_512_v4
+131/669774/campos_512_v4
+131/669775/campos_512_v4
+131/669776/campos_512_v4
+131/669779/campos_512_v4
+131/669783/campos_512_v4
+131/669786/campos_512_v4
+131/669800/campos_512_v4
+131/669805/campos_512_v4
+131/669809/campos_512_v4
+131/669832/campos_512_v4
+131/669839/campos_512_v4
+131/669840/campos_512_v4
+131/669843/campos_512_v4
+131/669848/campos_512_v4
+131/669851/campos_512_v4
+131/669855/campos_512_v4
+131/669859/campos_512_v4
+131/669865/campos_512_v4
+131/669870/campos_512_v4
+131/669879/campos_512_v4
+131/669891/campos_512_v4
+131/669900/campos_512_v4
+131/669908/campos_512_v4
+131/669909/campos_512_v4
+131/669916/campos_512_v4
+131/669921/campos_512_v4
+131/669927/campos_512_v4
+131/669932/campos_512_v4
+131/669936/campos_512_v4
+131/669938/campos_512_v4
+131/669940/campos_512_v4
+131/669945/campos_512_v4
+131/669947/campos_512_v4
+131/669972/campos_512_v4
+131/669976/campos_512_v4
+132/670009/campos_512_v4
+132/670011/campos_512_v4
+132/670014/campos_512_v4
+132/670025/campos_512_v4
+132/670031/campos_512_v4
+132/670034/campos_512_v4
+132/670039/campos_512_v4
+132/670040/campos_512_v4
+132/670049/campos_512_v4
+132/670056/campos_512_v4
+132/670062/campos_512_v4
+132/670066/campos_512_v4
+132/670076/campos_512_v4
+132/670077/campos_512_v4
+132/670079/campos_512_v4
+132/670080/campos_512_v4
+132/670094/campos_512_v4
+132/670100/campos_512_v4
+132/670102/campos_512_v4
+132/670117/campos_512_v4
+132/670119/campos_512_v4
+132/670120/campos_512_v4
+132/670125/campos_512_v4
+132/670127/campos_512_v4
+132/670129/campos_512_v4
+132/670132/campos_512_v4
+132/670137/campos_512_v4
+132/670141/campos_512_v4
+132/670143/campos_512_v4
+132/670161/campos_512_v4
+132/670167/campos_512_v4
+132/670184/campos_512_v4
+132/670186/campos_512_v4
+132/670190/campos_512_v4
+132/670192/campos_512_v4
+132/670196/campos_512_v4
+132/670197/campos_512_v4
+132/670200/campos_512_v4
+132/670206/campos_512_v4
+132/670224/campos_512_v4
+132/670226/campos_512_v4
+132/670229/campos_512_v4
+132/670232/campos_512_v4
+132/670233/campos_512_v4
+132/670236/campos_512_v4
+132/670237/campos_512_v4
+132/670243/campos_512_v4
+132/670260/campos_512_v4
+132/670264/campos_512_v4
+132/670270/campos_512_v4
+132/670272/campos_512_v4
+132/670275/campos_512_v4
+132/670278/campos_512_v4
+132/670289/campos_512_v4
+132/670293/campos_512_v4
+132/670296/campos_512_v4
+132/670297/campos_512_v4
+132/670301/campos_512_v4
+132/670305/campos_512_v4
+132/670307/campos_512_v4
+132/670310/campos_512_v4
+132/670311/campos_512_v4
+132/670317/campos_512_v4
+132/670326/campos_512_v4
+132/670351/campos_512_v4
+132/670357/campos_512_v4
+132/670358/campos_512_v4
+132/670361/campos_512_v4
+132/670362/campos_512_v4
+132/670363/campos_512_v4
+132/670366/campos_512_v4
+132/670368/campos_512_v4
+132/670380/campos_512_v4
+132/670408/campos_512_v4
+132/670421/campos_512_v4
+132/670439/campos_512_v4
+132/670440/campos_512_v4
+132/670442/campos_512_v4
+132/670443/campos_512_v4
+132/670449/campos_512_v4
+132/670450/campos_512_v4
+132/670454/campos_512_v4
+132/670459/campos_512_v4
+132/670468/campos_512_v4
+132/670477/campos_512_v4
+132/670487/campos_512_v4
+132/670497/campos_512_v4
+132/670502/campos_512_v4
+132/670510/campos_512_v4
+132/670512/campos_512_v4
+132/670513/campos_512_v4
+132/670517/campos_512_v4
+132/670531/campos_512_v4
+132/670543/campos_512_v4
+132/670562/campos_512_v4
+132/670565/campos_512_v4
+132/670570/campos_512_v4
+132/670574/campos_512_v4
+132/670577/campos_512_v4
+132/670582/campos_512_v4
+132/670587/campos_512_v4
+132/670594/campos_512_v4
+132/670596/campos_512_v4
+132/670598/campos_512_v4
+132/670599/campos_512_v4
+132/670601/campos_512_v4
+132/670604/campos_512_v4
+132/670615/campos_512_v4
+132/670616/campos_512_v4
+132/670623/campos_512_v4
+132/670628/campos_512_v4
+132/670633/campos_512_v4
+132/670638/campos_512_v4
+132/670649/campos_512_v4
+132/670653/campos_512_v4
+132/670665/campos_512_v4
+132/670666/campos_512_v4
+132/670667/campos_512_v4
+132/670678/campos_512_v4
+132/670681/campos_512_v4
+132/670692/campos_512_v4
+132/670694/campos_512_v4
+132/670715/campos_512_v4
+132/670727/campos_512_v4
+132/670740/campos_512_v4
+132/670746/campos_512_v4
+132/670749/campos_512_v4
+132/670759/campos_512_v4
+132/670764/campos_512_v4
+132/670772/campos_512_v4
+132/670776/campos_512_v4
+132/670797/campos_512_v4
+132/670806/campos_512_v4
+132/670807/campos_512_v4
+132/670810/campos_512_v4
+132/670811/campos_512_v4
+132/670819/campos_512_v4
+132/670820/campos_512_v4
+132/670821/campos_512_v4
+132/670822/campos_512_v4
+132/670827/campos_512_v4
+132/670830/campos_512_v4
+132/670837/campos_512_v4
+132/670849/campos_512_v4
+132/670863/campos_512_v4
+132/670871/campos_512_v4
+132/670875/campos_512_v4
+132/670904/campos_512_v4
+132/670905/campos_512_v4
+132/670915/campos_512_v4
+132/670924/campos_512_v4
+132/670932/campos_512_v4
+132/670940/campos_512_v4
+132/670945/campos_512_v4
+132/670955/campos_512_v4
+132/670957/campos_512_v4
+132/670964/campos_512_v4
+132/670982/campos_512_v4
+132/670988/campos_512_v4
+132/671003/campos_512_v4
+132/671013/campos_512_v4
+132/671016/campos_512_v4
+132/671018/campos_512_v4
+132/671020/campos_512_v4
+132/671021/campos_512_v4
+132/671039/campos_512_v4
+132/671044/campos_512_v4
+132/671047/campos_512_v4
+132/671050/campos_512_v4
+132/671057/campos_512_v4
+132/671059/campos_512_v4
+132/671074/campos_512_v4
+132/671078/campos_512_v4
+132/671081/campos_512_v4
+132/671099/campos_512_v4
+132/671103/campos_512_v4
+132/671106/campos_512_v4
+132/671111/campos_512_v4
+132/671112/campos_512_v4
+132/671114/campos_512_v4
+132/671115/campos_512_v4
+132/671124/campos_512_v4
+132/671125/campos_512_v4
+132/671133/campos_512_v4
+132/671138/campos_512_v4
+132/671160/campos_512_v4
+132/671165/campos_512_v4
+132/671170/campos_512_v4
+132/671196/campos_512_v4
+132/671201/campos_512_v4
+132/671203/campos_512_v4
+132/671210/campos_512_v4
+132/671216/campos_512_v4
+132/671233/campos_512_v4
+132/671239/campos_512_v4
+132/671274/campos_512_v4
+132/671289/campos_512_v4
+132/671295/campos_512_v4
+132/671313/campos_512_v4
+132/671327/campos_512_v4
+132/671338/campos_512_v4
+132/671354/campos_512_v4
+132/671355/campos_512_v4
+132/671369/campos_512_v4
+132/671376/campos_512_v4
+132/671390/campos_512_v4
+132/671399/campos_512_v4
+132/671405/campos_512_v4
+132/671408/campos_512_v4
+132/671410/campos_512_v4
+132/671414/campos_512_v4
+132/671431/campos_512_v4
+132/671441/campos_512_v4
+132/671446/campos_512_v4
+132/671462/campos_512_v4
+132/671463/campos_512_v4
+132/671467/campos_512_v4
+132/671481/campos_512_v4
+132/671488/campos_512_v4
+132/671490/campos_512_v4
+132/671507/campos_512_v4
+132/671509/campos_512_v4
+132/671510/campos_512_v4
+132/671524/campos_512_v4
+132/671548/campos_512_v4
+132/671551/campos_512_v4
+132/671557/campos_512_v4
+132/671570/campos_512_v4
+132/671577/campos_512_v4
+132/671584/campos_512_v4
+132/671596/campos_512_v4
+132/671612/campos_512_v4
+132/671616/campos_512_v4
+132/671620/campos_512_v4
+132/671631/campos_512_v4
+132/671641/campos_512_v4
+132/671655/campos_512_v4
+132/671662/campos_512_v4
+132/671670/campos_512_v4
+132/671675/campos_512_v4
+132/671683/campos_512_v4
+132/671695/campos_512_v4
+132/671699/campos_512_v4
+132/671714/campos_512_v4
+132/671715/campos_512_v4
+132/671720/campos_512_v4
+132/671728/campos_512_v4
+132/671733/campos_512_v4
+132/671734/campos_512_v4
+132/671741/campos_512_v4
+132/671742/campos_512_v4
+132/671743/campos_512_v4
+132/671744/campos_512_v4
+132/671748/campos_512_v4
+132/671750/campos_512_v4
+132/671754/campos_512_v4
+132/671757/campos_512_v4
+132/671759/campos_512_v4
+132/671762/campos_512_v4
+132/671776/campos_512_v4
+132/671778/campos_512_v4
+132/671784/campos_512_v4
+132/671787/campos_512_v4
+132/671794/campos_512_v4
+132/671802/campos_512_v4
+132/671805/campos_512_v4
+132/671807/campos_512_v4
+132/671816/campos_512_v4
+132/671831/campos_512_v4
+132/671850/campos_512_v4
+132/671889/campos_512_v4
+132/671896/campos_512_v4
+132/671899/campos_512_v4
+132/671900/campos_512_v4
+132/671902/campos_512_v4
+132/671908/campos_512_v4
+132/671910/campos_512_v4
+132/671919/campos_512_v4
+132/671920/campos_512_v4
+132/671924/campos_512_v4
+132/671931/campos_512_v4
+132/671945/campos_512_v4
+132/671952/campos_512_v4
+132/671953/campos_512_v4
+132/671954/campos_512_v4
+132/671956/campos_512_v4
+132/671958/campos_512_v4
+132/671961/campos_512_v4
+132/671981/campos_512_v4
+132/671982/campos_512_v4
+132/671983/campos_512_v4
+132/671993/campos_512_v4
+132/671999/campos_512_v4
+132/672001/campos_512_v4
+132/672010/campos_512_v4
+132/672011/campos_512_v4
+132/672016/campos_512_v4
+132/672018/campos_512_v4
+132/672022/campos_512_v4
+132/672025/campos_512_v4
+132/672042/campos_512_v4
+132/672044/campos_512_v4
+132/672047/campos_512_v4
+132/672052/campos_512_v4
+132/672070/campos_512_v4
+132/672079/campos_512_v4
+132/672080/campos_512_v4
+132/672084/campos_512_v4
+132/672097/campos_512_v4
+132/672112/campos_512_v4
+132/672121/campos_512_v4
+132/672135/campos_512_v4
+132/672138/campos_512_v4
+132/672140/campos_512_v4
+132/672142/campos_512_v4
+132/672145/campos_512_v4
+132/672166/campos_512_v4
+132/672179/campos_512_v4
+132/672192/campos_512_v4
+132/672194/campos_512_v4
+132/672202/campos_512_v4
+132/672204/campos_512_v4
+132/672205/campos_512_v4
+132/672212/campos_512_v4
+132/672219/campos_512_v4
+132/672222/campos_512_v4
+132/672245/campos_512_v4
+132/672247/campos_512_v4
+132/672263/campos_512_v4
+132/672275/campos_512_v4
+132/672276/campos_512_v4
+132/672282/campos_512_v4
+132/672286/campos_512_v4
+132/672290/campos_512_v4
+132/672293/campos_512_v4
+132/672310/campos_512_v4
+132/672322/campos_512_v4
+132/672327/campos_512_v4
+132/672340/campos_512_v4
+132/672343/campos_512_v4
+132/672344/campos_512_v4
+132/672352/campos_512_v4
+132/672357/campos_512_v4
+132/672362/campos_512_v4
+132/672366/campos_512_v4
+132/672369/campos_512_v4
+132/672377/campos_512_v4
+132/672394/campos_512_v4
+132/672399/campos_512_v4
+132/672405/campos_512_v4
+132/672412/campos_512_v4
+132/672416/campos_512_v4
+132/672417/campos_512_v4
+132/672418/campos_512_v4
+132/672419/campos_512_v4
+132/672422/campos_512_v4
+132/672425/campos_512_v4
+132/672437/campos_512_v4
+132/672442/campos_512_v4
+132/672443/campos_512_v4
+132/672462/campos_512_v4
+132/672476/campos_512_v4
+132/672482/campos_512_v4
+132/672486/campos_512_v4
+132/672487/campos_512_v4
+132/672491/campos_512_v4
+132/672500/campos_512_v4
+132/672521/campos_512_v4
+132/672531/campos_512_v4
+132/672536/campos_512_v4
+132/672555/campos_512_v4
+132/672561/campos_512_v4
+132/672569/campos_512_v4
+132/672576/campos_512_v4
+132/672581/campos_512_v4
+132/672588/campos_512_v4
+132/672589/campos_512_v4
+132/672590/campos_512_v4
+132/672600/campos_512_v4
+132/672601/campos_512_v4
+132/672603/campos_512_v4
+132/672630/campos_512_v4
+132/672641/campos_512_v4
+132/672647/campos_512_v4
+132/672652/campos_512_v4
+132/672662/campos_512_v4
+132/672666/campos_512_v4
+132/672667/campos_512_v4
+132/672668/campos_512_v4
+132/672671/campos_512_v4
+132/672673/campos_512_v4
+132/672680/campos_512_v4
+132/672684/campos_512_v4
+132/672686/campos_512_v4
+132/672699/campos_512_v4
+132/672701/campos_512_v4
+132/672704/campos_512_v4
+132/672711/campos_512_v4
+132/672717/campos_512_v4
+132/672724/campos_512_v4
+132/672726/campos_512_v4
+132/672732/campos_512_v4
+132/672748/campos_512_v4
+132/672764/campos_512_v4
+132/672776/campos_512_v4
+132/672783/campos_512_v4
+132/672789/campos_512_v4
+132/672799/campos_512_v4
+132/672803/campos_512_v4
+132/672805/campos_512_v4
+132/672808/campos_512_v4
+132/672815/campos_512_v4
+132/672819/campos_512_v4
+132/672820/campos_512_v4
+132/672830/campos_512_v4
+132/672832/campos_512_v4
+132/672833/campos_512_v4
+132/672853/campos_512_v4
+132/672855/campos_512_v4
+132/672866/campos_512_v4
+132/672869/campos_512_v4
+132/672886/campos_512_v4
+132/672892/campos_512_v4
+132/672902/campos_512_v4
+132/672904/campos_512_v4
+132/672905/campos_512_v4
+132/672914/campos_512_v4
+132/672915/campos_512_v4
+132/672917/campos_512_v4
+132/672927/campos_512_v4
+132/672931/campos_512_v4
+132/672933/campos_512_v4
+132/672935/campos_512_v4
+132/672936/campos_512_v4
+132/672939/campos_512_v4
+132/672946/campos_512_v4
+132/672956/campos_512_v4
+132/672958/campos_512_v4
+132/672963/campos_512_v4
+132/672970/campos_512_v4
+132/672971/campos_512_v4
+132/672974/campos_512_v4
+132/672976/campos_512_v4
+132/672978/campos_512_v4
+132/673004/campos_512_v4
+132/673016/campos_512_v4
+132/673019/campos_512_v4
+132/673034/campos_512_v4
+132/673040/campos_512_v4
+132/673041/campos_512_v4
+132/673053/campos_512_v4
+132/673065/campos_512_v4
+132/673079/campos_512_v4
+132/673081/campos_512_v4
+132/673091/campos_512_v4
+132/673108/campos_512_v4
+132/673114/campos_512_v4
+132/673137/campos_512_v4
+132/673174/campos_512_v4
+132/673184/campos_512_v4
+132/673187/campos_512_v4
+132/673202/campos_512_v4
+132/673222/campos_512_v4
+132/673223/campos_512_v4
+132/673230/campos_512_v4
+132/673231/campos_512_v4
+132/673260/campos_512_v4
+132/673269/campos_512_v4
+132/673273/campos_512_v4
+132/673277/campos_512_v4
+132/673278/campos_512_v4
+132/673293/campos_512_v4
+132/673295/campos_512_v4
+132/673300/campos_512_v4
+132/673304/campos_512_v4
+132/673324/campos_512_v4
+132/673332/campos_512_v4
+132/673342/campos_512_v4
+132/673347/campos_512_v4
+132/673355/campos_512_v4
+132/673369/campos_512_v4
+132/673374/campos_512_v4
+132/673375/campos_512_v4
+132/673376/campos_512_v4
+132/673385/campos_512_v4
+132/673403/campos_512_v4
+132/673406/campos_512_v4
+132/673410/campos_512_v4
+132/673415/campos_512_v4
+132/673420/campos_512_v4
+132/673423/campos_512_v4
+132/673424/campos_512_v4
+132/673430/campos_512_v4
+132/673433/campos_512_v4
+132/673439/campos_512_v4
+132/673456/campos_512_v4
+132/673459/campos_512_v4
+132/673469/campos_512_v4
+132/673470/campos_512_v4
+132/673472/campos_512_v4
+132/673485/campos_512_v4
+132/673491/campos_512_v4
+132/673522/campos_512_v4
+132/673527/campos_512_v4
+132/673542/campos_512_v4
+132/673549/campos_512_v4
+132/673552/campos_512_v4
+132/673563/campos_512_v4
+132/673570/campos_512_v4
+132/673583/campos_512_v4
+132/673588/campos_512_v4
+132/673596/campos_512_v4
+132/673608/campos_512_v4
+132/673610/campos_512_v4
+132/673617/campos_512_v4
+132/673621/campos_512_v4
+132/673628/campos_512_v4
+132/673629/campos_512_v4
+132/673635/campos_512_v4
+132/673652/campos_512_v4
+132/673674/campos_512_v4
+132/673684/campos_512_v4
+132/673685/campos_512_v4
+132/673686/campos_512_v4
+132/673690/campos_512_v4
+132/673693/campos_512_v4
+132/673702/campos_512_v4
+132/673704/campos_512_v4
+132/673715/campos_512_v4
+132/673724/campos_512_v4
+132/673742/campos_512_v4
+132/673744/campos_512_v4
+132/673751/campos_512_v4
+132/673769/campos_512_v4
+132/673786/campos_512_v4
+132/673791/campos_512_v4
+132/673797/campos_512_v4
+132/673801/campos_512_v4
+132/673806/campos_512_v4
+132/673815/campos_512_v4
+132/673820/campos_512_v4
+132/673823/campos_512_v4
+132/673827/campos_512_v4
+132/673833/campos_512_v4
+132/673842/campos_512_v4
+132/673845/campos_512_v4
+132/673849/campos_512_v4
+132/673855/campos_512_v4
+132/673862/campos_512_v4
+132/673883/campos_512_v4
+132/673890/campos_512_v4
+132/673897/campos_512_v4
+132/673914/campos_512_v4
+132/673929/campos_512_v4
+132/673952/campos_512_v4
+132/673965/campos_512_v4
+132/673980/campos_512_v4
+132/673989/campos_512_v4
+132/673990/campos_512_v4
+132/673998/campos_512_v4
+132/674026/campos_512_v4
+132/674027/campos_512_v4
+132/674032/campos_512_v4
+132/674033/campos_512_v4
+132/674037/campos_512_v4
+132/674048/campos_512_v4
+132/674050/campos_512_v4
+132/674053/campos_512_v4
+132/674063/campos_512_v4
+132/674067/campos_512_v4
+132/674082/campos_512_v4
+132/674083/campos_512_v4
+132/674087/campos_512_v4
+132/674088/campos_512_v4
+132/674092/campos_512_v4
+132/674119/campos_512_v4
+132/674120/campos_512_v4
+132/674129/campos_512_v4
+132/674132/campos_512_v4
+132/674135/campos_512_v4
+132/674137/campos_512_v4
+132/674145/campos_512_v4
+132/674173/campos_512_v4
+132/674177/campos_512_v4
+132/674182/campos_512_v4
+132/674192/campos_512_v4
+132/674193/campos_512_v4
+132/674209/campos_512_v4
+132/674216/campos_512_v4
+132/674220/campos_512_v4
+132/674229/campos_512_v4
+132/674235/campos_512_v4
+132/674241/campos_512_v4
+132/674245/campos_512_v4
+132/674250/campos_512_v4
+132/674251/campos_512_v4
+132/674258/campos_512_v4
+132/674263/campos_512_v4
+132/674269/campos_512_v4
+132/674273/campos_512_v4
+132/674278/campos_512_v4
+132/674288/campos_512_v4
+132/674289/campos_512_v4
+132/674303/campos_512_v4
+132/674314/campos_512_v4
+132/674318/campos_512_v4
+132/674328/campos_512_v4
+132/674329/campos_512_v4
+132/674341/campos_512_v4
+132/674346/campos_512_v4
+132/674350/campos_512_v4
+132/674362/campos_512_v4
+132/674386/campos_512_v4
+132/674388/campos_512_v4
+132/674394/campos_512_v4
+132/674396/campos_512_v4
+132/674400/campos_512_v4
+132/674403/campos_512_v4
+132/674404/campos_512_v4
+132/674409/campos_512_v4
+132/674411/campos_512_v4
+132/674416/campos_512_v4
+132/674425/campos_512_v4
+132/674434/campos_512_v4
+132/674461/campos_512_v4
+132/674466/campos_512_v4
+132/674486/campos_512_v4
+132/674492/campos_512_v4
+132/674495/campos_512_v4
+132/674498/campos_512_v4
+132/674500/campos_512_v4
+132/674502/campos_512_v4
+132/674509/campos_512_v4
+132/674514/campos_512_v4
+132/674517/campos_512_v4
+132/674529/campos_512_v4
+132/674531/campos_512_v4
+132/674548/campos_512_v4
+132/674550/campos_512_v4
+132/674554/campos_512_v4
+132/674555/campos_512_v4
+132/674558/campos_512_v4
+132/674567/campos_512_v4
+132/674568/campos_512_v4
+132/674572/campos_512_v4
+132/674575/campos_512_v4
+132/674578/campos_512_v4
+132/674579/campos_512_v4
+132/674587/campos_512_v4
+132/674597/campos_512_v4
+132/674610/campos_512_v4
+132/674612/campos_512_v4
+132/674628/campos_512_v4
+132/674633/campos_512_v4
+132/674653/campos_512_v4
+132/674666/campos_512_v4
+132/674669/campos_512_v4
+132/674672/campos_512_v4
+132/674677/campos_512_v4
+132/674678/campos_512_v4
+132/674679/campos_512_v4
+132/674688/campos_512_v4
+132/674698/campos_512_v4
+132/674700/campos_512_v4
+132/674710/campos_512_v4
+132/674711/campos_512_v4
+132/674718/campos_512_v4
+132/674719/campos_512_v4
+132/674720/campos_512_v4
+132/674724/campos_512_v4
+132/674725/campos_512_v4
+132/674742/campos_512_v4
+132/674751/campos_512_v4
+132/674752/campos_512_v4
+132/674790/campos_512_v4
+132/674795/campos_512_v4
+132/674797/campos_512_v4
+132/674810/campos_512_v4
+132/674811/campos_512_v4
+132/674812/campos_512_v4
+132/674828/campos_512_v4
+132/674829/campos_512_v4
+132/674831/campos_512_v4
+132/674832/campos_512_v4
+132/674837/campos_512_v4
+132/674841/campos_512_v4
+132/674863/campos_512_v4
+132/674864/campos_512_v4
+132/674872/campos_512_v4
+132/674873/campos_512_v4
+132/674874/campos_512_v4
+132/674877/campos_512_v4
+132/674899/campos_512_v4
+132/674900/campos_512_v4
+132/674908/campos_512_v4
+132/674914/campos_512_v4
+132/674917/campos_512_v4
+132/674922/campos_512_v4
+132/674954/campos_512_v4
+132/674969/campos_512_v4
+132/674974/campos_512_v4
+132/674995/campos_512_v4
+133/675009/campos_512_v4
+133/675017/campos_512_v4
+133/675021/campos_512_v4
+133/675023/campos_512_v4
+133/675026/campos_512_v4
+133/675028/campos_512_v4
+133/675033/campos_512_v4
+133/675048/campos_512_v4
+133/675068/campos_512_v4
+133/675070/campos_512_v4
+133/675073/campos_512_v4
+133/675080/campos_512_v4
+133/675081/campos_512_v4
+133/675085/campos_512_v4
+133/675090/campos_512_v4
+133/675091/campos_512_v4
+133/675095/campos_512_v4
+133/675102/campos_512_v4
+133/675103/campos_512_v4
+133/675108/campos_512_v4
+133/675115/campos_512_v4
+133/675117/campos_512_v4
+133/675119/campos_512_v4
+133/675121/campos_512_v4
+133/675131/campos_512_v4
+133/675132/campos_512_v4
+133/675135/campos_512_v4
+133/675137/campos_512_v4
+133/675139/campos_512_v4
+133/675144/campos_512_v4
+133/675151/campos_512_v4
+133/675171/campos_512_v4
+133/675172/campos_512_v4
+133/675183/campos_512_v4
+133/675188/campos_512_v4
+133/675197/campos_512_v4
+133/675203/campos_512_v4
+133/675206/campos_512_v4
+133/675212/campos_512_v4
+133/675216/campos_512_v4
+133/675223/campos_512_v4
+133/675226/campos_512_v4
+133/675228/campos_512_v4
+133/675229/campos_512_v4
+133/675234/campos_512_v4
+133/675235/campos_512_v4
+133/675238/campos_512_v4
+133/675255/campos_512_v4
+133/675287/campos_512_v4
+133/675289/campos_512_v4
+133/675290/campos_512_v4
+133/675299/campos_512_v4
+133/675300/campos_512_v4
+133/675319/campos_512_v4
+133/675325/campos_512_v4
+133/675326/campos_512_v4
+133/675337/campos_512_v4
+133/675371/campos_512_v4
+133/675390/campos_512_v4
+133/675397/campos_512_v4
+133/675400/campos_512_v4
+133/675406/campos_512_v4
+133/675417/campos_512_v4
+133/675424/campos_512_v4
+133/675440/campos_512_v4
+133/675455/campos_512_v4
+133/675461/campos_512_v4
+133/675469/campos_512_v4
+133/675470/campos_512_v4
+133/675486/campos_512_v4
+133/675493/campos_512_v4
+133/675500/campos_512_v4
+133/675513/campos_512_v4
+133/675518/campos_512_v4
+133/675522/campos_512_v4
+133/675523/campos_512_v4
+133/675524/campos_512_v4
+133/675532/campos_512_v4
+133/675534/campos_512_v4
+133/675539/campos_512_v4
+133/675556/campos_512_v4
+133/675559/campos_512_v4
+133/675563/campos_512_v4
+133/675567/campos_512_v4
+133/675568/campos_512_v4
+133/675569/campos_512_v4
+133/675572/campos_512_v4
+133/675577/campos_512_v4
+133/675593/campos_512_v4
+133/675607/campos_512_v4
+133/675614/campos_512_v4
+133/675624/campos_512_v4
+133/675625/campos_512_v4
+133/675629/campos_512_v4
+133/675635/campos_512_v4
+133/675641/campos_512_v4
+133/675645/campos_512_v4
+133/675646/campos_512_v4
+133/675652/campos_512_v4
+133/675655/campos_512_v4
+133/675658/campos_512_v4
+133/675664/campos_512_v4
+133/675676/campos_512_v4
+133/675677/campos_512_v4
+133/675686/campos_512_v4
+133/675693/campos_512_v4
+133/675694/campos_512_v4
+133/675709/campos_512_v4
+133/675711/campos_512_v4
+133/675712/campos_512_v4
+133/675723/campos_512_v4
+133/675729/campos_512_v4
+133/675744/campos_512_v4
+133/675749/campos_512_v4
+133/675755/campos_512_v4
+133/675758/campos_512_v4
+133/675761/campos_512_v4
+133/675768/campos_512_v4
+133/675778/campos_512_v4
+133/675779/campos_512_v4
+133/675799/campos_512_v4
+133/675801/campos_512_v4
+133/675814/campos_512_v4
+133/675815/campos_512_v4
+133/675820/campos_512_v4
+133/675829/campos_512_v4
+133/675840/campos_512_v4
+133/675850/campos_512_v4
+133/675851/campos_512_v4
+133/675858/campos_512_v4
+133/675869/campos_512_v4
+133/675880/campos_512_v4
+133/675889/campos_512_v4
+133/675902/campos_512_v4
+133/675906/campos_512_v4
+133/675927/campos_512_v4
+133/675930/campos_512_v4
+133/675932/campos_512_v4
+133/675937/campos_512_v4
+133/675939/campos_512_v4
+133/675955/campos_512_v4
+133/675956/campos_512_v4
+133/675962/campos_512_v4
+133/675963/campos_512_v4
+133/675977/campos_512_v4
+133/675982/campos_512_v4
+133/675995/campos_512_v4
+133/676014/campos_512_v4
+133/676017/campos_512_v4
+133/676029/campos_512_v4
+133/676035/campos_512_v4
+133/676039/campos_512_v4
+133/676041/campos_512_v4
+133/676042/campos_512_v4
+133/676045/campos_512_v4
+133/676047/campos_512_v4
+133/676050/campos_512_v4
+133/676055/campos_512_v4
+133/676061/campos_512_v4
+133/676084/campos_512_v4
+133/676090/campos_512_v4
+133/676101/campos_512_v4
+133/676116/campos_512_v4
+133/676119/campos_512_v4
+133/676125/campos_512_v4
+133/676139/campos_512_v4
+133/676143/campos_512_v4
+133/676153/campos_512_v4
+133/676172/campos_512_v4
+133/676179/campos_512_v4
+133/676188/campos_512_v4
+133/676194/campos_512_v4
+133/676196/campos_512_v4
+133/676214/campos_512_v4
+133/676223/campos_512_v4
+133/676248/campos_512_v4
+133/676250/campos_512_v4
+133/676260/campos_512_v4
+133/676282/campos_512_v4
+133/676296/campos_512_v4
+133/676311/campos_512_v4
+133/676318/campos_512_v4
+133/676320/campos_512_v4
+133/676356/campos_512_v4
+133/676362/campos_512_v4
+133/676384/campos_512_v4
+133/676390/campos_512_v4
+133/676395/campos_512_v4
+133/676400/campos_512_v4
+133/676409/campos_512_v4
+133/676411/campos_512_v4
+133/676412/campos_512_v4
+133/676415/campos_512_v4
+133/676424/campos_512_v4
+133/676430/campos_512_v4
+133/676469/campos_512_v4
+133/676470/campos_512_v4
+133/676471/campos_512_v4
+133/676477/campos_512_v4
+133/676480/campos_512_v4
+133/676490/campos_512_v4
+133/676500/campos_512_v4
+133/676503/campos_512_v4
+133/676505/campos_512_v4
+133/676506/campos_512_v4
+133/676517/campos_512_v4
+133/676523/campos_512_v4
+133/676526/campos_512_v4
+133/676535/campos_512_v4
+133/676540/campos_512_v4
+133/676541/campos_512_v4
+133/676542/campos_512_v4
+133/676548/campos_512_v4
+133/676559/campos_512_v4
+133/676560/campos_512_v4
+133/676564/campos_512_v4
+133/676572/campos_512_v4
+133/676575/campos_512_v4
+133/676579/campos_512_v4
+133/676601/campos_512_v4
+133/676605/campos_512_v4
+133/676609/campos_512_v4
+133/676610/campos_512_v4
+133/676612/campos_512_v4
+133/676617/campos_512_v4
+133/676621/campos_512_v4
+133/676627/campos_512_v4
+133/676628/campos_512_v4
+133/676643/campos_512_v4
+133/676647/campos_512_v4
+133/676654/campos_512_v4
+133/676659/campos_512_v4
+133/676665/campos_512_v4
+133/676669/campos_512_v4
+133/676677/campos_512_v4
+133/676678/campos_512_v4
+133/676682/campos_512_v4
+133/676692/campos_512_v4
+133/676693/campos_512_v4
+133/676698/campos_512_v4
+133/676699/campos_512_v4
+133/676723/campos_512_v4
+133/676726/campos_512_v4
+133/676727/campos_512_v4
+133/676777/campos_512_v4
+133/676779/campos_512_v4
+133/676790/campos_512_v4
+133/676802/campos_512_v4
+133/676803/campos_512_v4
+133/676804/campos_512_v4
+133/676805/campos_512_v4
+133/676825/campos_512_v4
+133/676841/campos_512_v4
+133/676843/campos_512_v4
+133/676851/campos_512_v4
+133/676864/campos_512_v4
+133/676876/campos_512_v4
+133/676887/campos_512_v4
+133/676908/campos_512_v4
+133/676910/campos_512_v4
+133/676923/campos_512_v4
+133/676936/campos_512_v4
+133/676949/campos_512_v4
+133/676956/campos_512_v4
+133/676963/campos_512_v4
+133/676976/campos_512_v4
+133/676981/campos_512_v4
+133/676989/campos_512_v4
+133/676991/campos_512_v4
+133/676994/campos_512_v4
+133/677001/campos_512_v4
+133/677002/campos_512_v4
+133/677010/campos_512_v4
+133/677022/campos_512_v4
+133/677033/campos_512_v4
+133/677041/campos_512_v4
+133/677047/campos_512_v4
+133/677048/campos_512_v4
+133/677052/campos_512_v4
+133/677055/campos_512_v4
+133/677060/campos_512_v4
+133/677063/campos_512_v4
+133/677064/campos_512_v4
+133/677066/campos_512_v4
+133/677067/campos_512_v4
+133/677076/campos_512_v4
+133/677081/campos_512_v4
+133/677108/campos_512_v4
+133/677115/campos_512_v4
+133/677135/campos_512_v4
+133/677145/campos_512_v4
+133/677146/campos_512_v4
+133/677151/campos_512_v4
+133/677168/campos_512_v4
+133/677187/campos_512_v4
+133/677188/campos_512_v4
+133/677192/campos_512_v4
+133/677199/campos_512_v4
+133/677216/campos_512_v4
+133/677222/campos_512_v4
+133/677223/campos_512_v4
+133/677236/campos_512_v4
+133/677249/campos_512_v4
+133/677287/campos_512_v4
+133/677292/campos_512_v4
+133/677297/campos_512_v4
+133/677301/campos_512_v4
+133/677304/campos_512_v4
+133/677310/campos_512_v4
+133/677320/campos_512_v4
+133/677328/campos_512_v4
+133/677341/campos_512_v4
+133/677346/campos_512_v4
+133/677352/campos_512_v4
+133/677365/campos_512_v4
+133/677372/campos_512_v4
+133/677375/campos_512_v4
+133/677377/campos_512_v4
+133/677384/campos_512_v4
+133/677385/campos_512_v4
+133/677386/campos_512_v4
+133/677390/campos_512_v4
+133/677412/campos_512_v4
+133/677428/campos_512_v4
+133/677435/campos_512_v4
+133/677442/campos_512_v4
+133/677446/campos_512_v4
+133/677472/campos_512_v4
+133/677477/campos_512_v4
+133/677478/campos_512_v4
+133/677479/campos_512_v4
+133/677482/campos_512_v4
+133/677487/campos_512_v4
+133/677489/campos_512_v4
+133/677493/campos_512_v4
+133/677498/campos_512_v4
+133/677502/campos_512_v4
+133/677507/campos_512_v4
+133/677517/campos_512_v4
+133/677521/campos_512_v4
+133/677529/campos_512_v4
+133/677531/campos_512_v4
+133/677532/campos_512_v4
+133/677535/campos_512_v4
+133/677537/campos_512_v4
+133/677539/campos_512_v4
+133/677540/campos_512_v4
+133/677544/campos_512_v4
+133/677547/campos_512_v4
+133/677549/campos_512_v4
+133/677550/campos_512_v4
+133/677560/campos_512_v4
+133/677571/campos_512_v4
+133/677579/campos_512_v4
+133/677590/campos_512_v4
+133/677591/campos_512_v4
+133/677597/campos_512_v4
+133/677599/campos_512_v4
+133/677601/campos_512_v4
+133/677629/campos_512_v4
+133/677635/campos_512_v4
+133/677646/campos_512_v4
+133/677649/campos_512_v4
+133/677673/campos_512_v4
+133/677680/campos_512_v4
+133/677682/campos_512_v4
+133/677689/campos_512_v4
+133/677693/campos_512_v4
+133/677719/campos_512_v4
+133/677736/campos_512_v4
+133/677746/campos_512_v4
+133/677752/campos_512_v4
+133/677763/campos_512_v4
+133/677767/campos_512_v4
+133/677793/campos_512_v4
+133/677809/campos_512_v4
+133/677822/campos_512_v4
+133/677824/campos_512_v4
+133/677830/campos_512_v4
+133/677834/campos_512_v4
+133/677836/campos_512_v4
+133/677862/campos_512_v4
+133/677869/campos_512_v4
+133/677883/campos_512_v4
+133/677906/campos_512_v4
+133/677908/campos_512_v4
+133/677921/campos_512_v4
+133/677924/campos_512_v4
+133/677926/campos_512_v4
+133/677953/campos_512_v4
+133/677954/campos_512_v4
+133/677980/campos_512_v4
+133/677981/campos_512_v4
+133/677988/campos_512_v4
+133/678013/campos_512_v4
+133/678027/campos_512_v4
+133/678055/campos_512_v4
+133/678057/campos_512_v4
+133/678060/campos_512_v4
+133/678067/campos_512_v4
+133/678077/campos_512_v4
+133/678082/campos_512_v4
+133/678092/campos_512_v4
+133/678095/campos_512_v4
+133/678096/campos_512_v4
+133/678106/campos_512_v4
+133/678130/campos_512_v4
+133/678131/campos_512_v4
+133/678135/campos_512_v4
+133/678143/campos_512_v4
+133/678147/campos_512_v4
+133/678150/campos_512_v4
+133/678151/campos_512_v4
+133/678166/campos_512_v4
+133/678173/campos_512_v4
+133/678178/campos_512_v4
+133/678191/campos_512_v4
+133/678213/campos_512_v4
+133/678218/campos_512_v4
+133/678220/campos_512_v4
+133/678235/campos_512_v4
+133/678250/campos_512_v4
+133/678257/campos_512_v4
+133/678258/campos_512_v4
+133/678262/campos_512_v4
+133/678268/campos_512_v4
+133/678273/campos_512_v4
+133/678286/campos_512_v4
+133/678296/campos_512_v4
+133/678307/campos_512_v4
+133/678308/campos_512_v4
+133/678309/campos_512_v4
+133/678318/campos_512_v4
+133/678324/campos_512_v4
+133/678339/campos_512_v4
+133/678344/campos_512_v4
+133/678346/campos_512_v4
+133/678356/campos_512_v4
+133/678363/campos_512_v4
+133/678371/campos_512_v4
+133/678382/campos_512_v4
+133/678391/campos_512_v4
+133/678394/campos_512_v4
+133/678400/campos_512_v4
+133/678401/campos_512_v4
+133/678404/campos_512_v4
+133/678410/campos_512_v4
+133/678419/campos_512_v4
+133/678437/campos_512_v4
+133/678439/campos_512_v4
+133/678446/campos_512_v4
+133/678449/campos_512_v4
+133/678451/campos_512_v4
+133/678460/campos_512_v4
+133/678464/campos_512_v4
+133/678471/campos_512_v4
+133/678472/campos_512_v4
+133/678483/campos_512_v4
+133/678490/campos_512_v4
+133/678496/campos_512_v4
+133/678502/campos_512_v4
+133/678510/campos_512_v4
+133/678526/campos_512_v4
+133/678529/campos_512_v4
+133/678533/campos_512_v4
+133/678535/campos_512_v4
+133/678539/campos_512_v4
+133/678546/campos_512_v4
+133/678552/campos_512_v4
+133/678553/campos_512_v4
+133/678560/campos_512_v4
+133/678573/campos_512_v4
+133/678596/campos_512_v4
+133/678600/campos_512_v4
+133/678608/campos_512_v4
+133/678618/campos_512_v4
+133/678623/campos_512_v4
+133/678624/campos_512_v4
+133/678637/campos_512_v4
+133/678642/campos_512_v4
+133/678649/campos_512_v4
+133/678665/campos_512_v4
+133/678666/campos_512_v4
+133/678693/campos_512_v4
+133/678701/campos_512_v4
+133/678704/campos_512_v4
+133/678709/campos_512_v4
+133/678712/campos_512_v4
+133/678713/campos_512_v4
+133/678726/campos_512_v4
+133/678740/campos_512_v4
+133/678741/campos_512_v4
+133/678744/campos_512_v4
+133/678754/campos_512_v4
+133/678767/campos_512_v4
+133/678769/campos_512_v4
+133/678780/campos_512_v4
+133/678781/campos_512_v4
+133/678798/campos_512_v4
+133/678801/campos_512_v4
+133/678837/campos_512_v4
+133/678851/campos_512_v4
+133/678853/campos_512_v4
+133/678860/campos_512_v4
+133/678864/campos_512_v4
+133/678871/campos_512_v4
+133/678881/campos_512_v4
+133/678882/campos_512_v4
+133/678903/campos_512_v4
+133/678908/campos_512_v4
+133/678912/campos_512_v4
+133/678949/campos_512_v4
+133/678950/campos_512_v4
+133/678969/campos_512_v4
+133/678980/campos_512_v4
+133/678988/campos_512_v4
+133/678994/campos_512_v4
+133/678995/campos_512_v4
+133/679002/campos_512_v4
+133/679007/campos_512_v4
+133/679009/campos_512_v4
+133/679020/campos_512_v4
+133/679028/campos_512_v4
+133/679030/campos_512_v4
+133/679045/campos_512_v4
+133/679047/campos_512_v4
+133/679061/campos_512_v4
+133/679070/campos_512_v4
+133/679079/campos_512_v4
+133/679085/campos_512_v4
+133/679103/campos_512_v4
+133/679115/campos_512_v4
+133/679150/campos_512_v4
+133/679158/campos_512_v4
+133/679161/campos_512_v4
+133/679162/campos_512_v4
+133/679164/campos_512_v4
+133/679165/campos_512_v4
+133/679174/campos_512_v4
+133/679184/campos_512_v4
+133/679188/campos_512_v4
+133/679196/campos_512_v4
+133/679202/campos_512_v4
+133/679214/campos_512_v4
+133/679217/campos_512_v4
+133/679227/campos_512_v4
+133/679229/campos_512_v4
+133/679234/campos_512_v4
+133/679236/campos_512_v4
+133/679238/campos_512_v4
+133/679241/campos_512_v4
+133/679254/campos_512_v4
+133/679266/campos_512_v4
+133/679273/campos_512_v4
+133/679293/campos_512_v4
+133/679294/campos_512_v4
+133/679318/campos_512_v4
+133/679326/campos_512_v4
+133/679328/campos_512_v4
+133/679330/campos_512_v4
+133/679345/campos_512_v4
+133/679346/campos_512_v4
+133/679360/campos_512_v4
+133/679369/campos_512_v4
+133/679373/campos_512_v4
+133/679398/campos_512_v4
+133/679422/campos_512_v4
+133/679432/campos_512_v4
+133/679435/campos_512_v4
+133/679444/campos_512_v4
+133/679446/campos_512_v4
+133/679452/campos_512_v4
+133/679453/campos_512_v4
+133/679460/campos_512_v4
+133/679465/campos_512_v4
+133/679466/campos_512_v4
+133/679475/campos_512_v4
+133/679481/campos_512_v4
+133/679483/campos_512_v4
+133/679494/campos_512_v4
+133/679498/campos_512_v4
+133/679504/campos_512_v4
+133/679508/campos_512_v4
+133/679516/campos_512_v4
+133/679522/campos_512_v4
+133/679528/campos_512_v4
+133/679533/campos_512_v4
+133/679536/campos_512_v4
+133/679537/campos_512_v4
+133/679542/campos_512_v4
+133/679566/campos_512_v4
+133/679567/campos_512_v4
+133/679587/campos_512_v4
+133/679592/campos_512_v4
+133/679594/campos_512_v4
+133/679596/campos_512_v4
+133/679623/campos_512_v4
+133/679624/campos_512_v4
+133/679627/campos_512_v4
+133/679630/campos_512_v4
+133/679631/campos_512_v4
+133/679633/campos_512_v4
+133/679638/campos_512_v4
+133/679646/campos_512_v4
+133/679650/campos_512_v4
+133/679656/campos_512_v4
+133/679665/campos_512_v4
+133/679666/campos_512_v4
+133/679678/campos_512_v4
+133/679680/campos_512_v4
+133/679687/campos_512_v4
+133/679688/campos_512_v4
+133/679691/campos_512_v4
+133/679692/campos_512_v4
+133/679699/campos_512_v4
+133/679710/campos_512_v4
+133/679713/campos_512_v4
+133/679724/campos_512_v4
+133/679727/campos_512_v4
+133/679728/campos_512_v4
+133/679736/campos_512_v4
+133/679740/campos_512_v4
+133/679743/campos_512_v4
+133/679744/campos_512_v4
+133/679752/campos_512_v4
+133/679758/campos_512_v4
+133/679766/campos_512_v4
+133/679768/campos_512_v4
+133/679770/campos_512_v4
+133/679774/campos_512_v4
+133/679779/campos_512_v4
+133/679782/campos_512_v4
+133/679785/campos_512_v4
+133/679799/campos_512_v4
+133/679802/campos_512_v4
+133/679826/campos_512_v4
+133/679840/campos_512_v4
+133/679843/campos_512_v4
+133/679845/campos_512_v4
+133/679849/campos_512_v4
+133/679860/campos_512_v4
+133/679861/campos_512_v4
+133/679884/campos_512_v4
+133/679894/campos_512_v4
+133/679902/campos_512_v4
+133/679914/campos_512_v4
+133/679915/campos_512_v4
+133/679930/campos_512_v4
+133/679932/campos_512_v4
+133/679937/campos_512_v4
+133/679945/campos_512_v4
+133/679963/campos_512_v4
+133/679966/campos_512_v4
+133/679969/campos_512_v4
+133/679973/campos_512_v4
+133/679984/campos_512_v4
+133/679991/campos_512_v4
+133/679996/campos_512_v4
+134/680002/campos_512_v4
+134/680005/campos_512_v4
+134/680007/campos_512_v4
+134/680009/campos_512_v4
+134/680010/campos_512_v4
+134/680017/campos_512_v4
+134/680021/campos_512_v4
+134/680030/campos_512_v4
+134/680034/campos_512_v4
+134/680038/campos_512_v4
+134/680044/campos_512_v4
+134/680055/campos_512_v4
+134/680059/campos_512_v4
+134/680070/campos_512_v4
+134/680074/campos_512_v4
+134/680078/campos_512_v4
+134/680087/campos_512_v4
+134/680089/campos_512_v4
+134/680098/campos_512_v4
+134/680102/campos_512_v4
+134/680110/campos_512_v4
+134/680111/campos_512_v4
+134/680119/campos_512_v4
+134/680130/campos_512_v4
+134/680150/campos_512_v4
+134/680152/campos_512_v4
+134/680156/campos_512_v4
+134/680165/campos_512_v4
+134/680171/campos_512_v4
+134/680194/campos_512_v4
+134/680196/campos_512_v4
+134/680197/campos_512_v4
+134/680203/campos_512_v4
+134/680204/campos_512_v4
+134/680207/campos_512_v4
+134/680209/campos_512_v4
+134/680218/campos_512_v4
+134/680221/campos_512_v4
+134/680227/campos_512_v4
+134/680239/campos_512_v4
+134/680245/campos_512_v4
+134/680255/campos_512_v4
+134/680271/campos_512_v4
+134/680282/campos_512_v4
+134/680294/campos_512_v4
+134/680311/campos_512_v4
+134/680323/campos_512_v4
+134/680335/campos_512_v4
+134/680344/campos_512_v4
+134/680352/campos_512_v4
+134/680355/campos_512_v4
+134/680360/campos_512_v4
+134/680368/campos_512_v4
+134/680369/campos_512_v4
+134/680371/campos_512_v4
+134/680384/campos_512_v4
+134/680385/campos_512_v4
+134/680387/campos_512_v4
+134/680389/campos_512_v4
+134/680391/campos_512_v4
+134/680423/campos_512_v4
+134/680430/campos_512_v4
+134/680431/campos_512_v4
+134/680436/campos_512_v4
+134/680439/campos_512_v4
+134/680455/campos_512_v4
+134/680459/campos_512_v4
+134/680464/campos_512_v4
+134/680467/campos_512_v4
+134/680476/campos_512_v4
+134/680478/campos_512_v4
+134/680481/campos_512_v4
+134/680484/campos_512_v4
+134/680487/campos_512_v4
+134/680488/campos_512_v4
+134/680489/campos_512_v4
+134/680497/campos_512_v4
+134/680505/campos_512_v4
+134/680509/campos_512_v4
+134/680519/campos_512_v4
+134/680527/campos_512_v4
+134/680531/campos_512_v4
+134/680534/campos_512_v4
+134/680553/campos_512_v4
+134/680557/campos_512_v4
+134/680567/campos_512_v4
+134/680568/campos_512_v4
+134/680582/campos_512_v4
+134/680585/campos_512_v4
+134/680593/campos_512_v4
+134/680597/campos_512_v4
+134/680600/campos_512_v4
+134/680627/campos_512_v4
+134/680630/campos_512_v4
+134/680665/campos_512_v4
+134/680666/campos_512_v4
+134/680671/campos_512_v4
+134/680689/campos_512_v4
+134/680691/campos_512_v4
+134/680694/campos_512_v4
+134/680704/campos_512_v4
+134/680708/campos_512_v4
+134/680711/campos_512_v4
+134/680713/campos_512_v4
+134/680729/campos_512_v4
+134/680731/campos_512_v4
+134/680755/campos_512_v4
+134/680758/campos_512_v4
+134/680760/campos_512_v4
+134/680773/campos_512_v4
+134/680781/campos_512_v4
+134/680783/campos_512_v4
+134/680789/campos_512_v4
+134/680791/campos_512_v4
+134/680795/campos_512_v4
+134/680800/campos_512_v4
+134/680806/campos_512_v4
+134/680807/campos_512_v4
+134/680814/campos_512_v4
+134/680815/campos_512_v4
+134/680835/campos_512_v4
+134/680836/campos_512_v4
+134/680843/campos_512_v4
+134/680854/campos_512_v4
+134/680863/campos_512_v4
+134/680876/campos_512_v4
+134/680878/campos_512_v4
+134/680882/campos_512_v4
+134/680883/campos_512_v4
+134/680898/campos_512_v4
+134/680901/campos_512_v4
+134/680903/campos_512_v4
+134/680905/campos_512_v4
+134/680908/campos_512_v4
+134/680909/campos_512_v4
+134/680917/campos_512_v4
+134/680919/campos_512_v4
+134/680921/campos_512_v4
+134/680931/campos_512_v4
+134/680936/campos_512_v4
+134/680938/campos_512_v4
+134/680958/campos_512_v4
+134/680960/campos_512_v4
+134/680966/campos_512_v4
+134/680967/campos_512_v4
+134/680972/campos_512_v4
+134/680981/campos_512_v4
+134/681007/campos_512_v4
+134/681019/campos_512_v4
+134/681027/campos_512_v4
+134/681044/campos_512_v4
+134/681047/campos_512_v4
+134/681048/campos_512_v4
+134/681049/campos_512_v4
+134/681053/campos_512_v4
+134/681056/campos_512_v4
+134/681061/campos_512_v4
+134/681063/campos_512_v4
+134/681082/campos_512_v4
+134/681112/campos_512_v4
+134/681115/campos_512_v4
+134/681116/campos_512_v4
+134/681134/campos_512_v4
+134/681140/campos_512_v4
+134/681142/campos_512_v4
+134/681144/campos_512_v4
+134/681159/campos_512_v4
+134/681165/campos_512_v4
+134/681176/campos_512_v4
+134/681178/campos_512_v4
+134/681179/campos_512_v4
+134/681186/campos_512_v4
+134/681206/campos_512_v4
+134/681207/campos_512_v4
+134/681214/campos_512_v4
+134/681222/campos_512_v4
+134/681228/campos_512_v4
+134/681241/campos_512_v4
+134/681244/campos_512_v4
+134/681262/campos_512_v4
+134/681267/campos_512_v4
+134/681292/campos_512_v4
+134/681303/campos_512_v4
+134/681313/campos_512_v4
+134/681326/campos_512_v4
+134/681337/campos_512_v4
+134/681350/campos_512_v4
+134/681355/campos_512_v4
+134/681358/campos_512_v4
+134/681364/campos_512_v4
+134/681371/campos_512_v4
+134/681373/campos_512_v4
+134/681374/campos_512_v4
+134/681377/campos_512_v4
+134/681379/campos_512_v4
+134/681382/campos_512_v4
+134/681387/campos_512_v4
+134/681399/campos_512_v4
+134/681404/campos_512_v4
+134/681412/campos_512_v4
+134/681418/campos_512_v4
+134/681419/campos_512_v4
+134/681422/campos_512_v4
+134/681430/campos_512_v4
+134/681432/campos_512_v4
+134/681433/campos_512_v4
+134/681434/campos_512_v4
+134/681439/campos_512_v4
+134/681457/campos_512_v4
+134/681467/campos_512_v4
+134/681474/campos_512_v4
+134/681476/campos_512_v4
+134/681477/campos_512_v4
+134/681479/campos_512_v4
+134/681487/campos_512_v4
+134/681500/campos_512_v4
+134/681510/campos_512_v4
+134/681518/campos_512_v4
+134/681520/campos_512_v4
+134/681522/campos_512_v4
+134/681526/campos_512_v4
+134/681533/campos_512_v4
+134/681537/campos_512_v4
+134/681548/campos_512_v4
+134/681549/campos_512_v4
+134/681554/campos_512_v4
+134/681566/campos_512_v4
+134/681584/campos_512_v4
+134/681590/campos_512_v4
+134/681594/campos_512_v4
+134/681595/campos_512_v4
+134/681597/campos_512_v4
+134/681599/campos_512_v4
+134/681605/campos_512_v4
+134/681611/campos_512_v4
+134/681621/campos_512_v4
+134/681624/campos_512_v4
+134/681626/campos_512_v4
+134/681627/campos_512_v4
+134/681628/campos_512_v4
+134/681630/campos_512_v4
+134/681643/campos_512_v4
+134/681652/campos_512_v4
+134/681656/campos_512_v4
+134/681668/campos_512_v4
+134/681677/campos_512_v4
+134/681683/campos_512_v4
+134/681688/campos_512_v4
+134/681693/campos_512_v4
+134/681697/campos_512_v4
+134/681701/campos_512_v4
+134/681704/campos_512_v4
+134/681706/campos_512_v4
+134/681707/campos_512_v4
+134/681714/campos_512_v4
+134/681715/campos_512_v4
+134/681725/campos_512_v4
+134/681731/campos_512_v4
+134/681734/campos_512_v4
+134/681737/campos_512_v4
+134/681753/campos_512_v4
+134/681764/campos_512_v4
+134/681773/campos_512_v4
+134/681777/campos_512_v4
+134/681778/campos_512_v4
+134/681780/campos_512_v4
+134/681781/campos_512_v4
+134/681792/campos_512_v4
+134/681803/campos_512_v4
+134/681805/campos_512_v4
+134/681806/campos_512_v4
+134/681808/campos_512_v4
+134/681829/campos_512_v4
+134/681831/campos_512_v4
+134/681846/campos_512_v4
+134/681868/campos_512_v4
+134/681878/campos_512_v4
+134/681890/campos_512_v4
+134/681895/campos_512_v4
+134/681901/campos_512_v4
+134/681909/campos_512_v4
+134/681920/campos_512_v4
+134/681921/campos_512_v4
+134/681927/campos_512_v4
+134/681928/campos_512_v4
+134/681934/campos_512_v4
+134/681941/campos_512_v4
+134/681944/campos_512_v4
+134/681962/campos_512_v4
+134/681973/campos_512_v4
+134/681977/campos_512_v4
+134/681979/campos_512_v4
+134/681990/campos_512_v4
+134/682003/campos_512_v4
+134/682004/campos_512_v4
+134/682013/campos_512_v4
+134/682020/campos_512_v4
+134/682024/campos_512_v4
+134/682025/campos_512_v4
+134/682026/campos_512_v4
+134/682029/campos_512_v4
+134/682034/campos_512_v4
+134/682047/campos_512_v4
+134/682049/campos_512_v4
+134/682052/campos_512_v4
+134/682056/campos_512_v4
+134/682061/campos_512_v4
+134/682065/campos_512_v4
+134/682081/campos_512_v4
+134/682083/campos_512_v4
+134/682095/campos_512_v4
+134/682102/campos_512_v4
+134/682129/campos_512_v4
+134/682137/campos_512_v4
+134/682147/campos_512_v4
+134/682151/campos_512_v4
+134/682156/campos_512_v4
+134/682157/campos_512_v4
+134/682162/campos_512_v4
+134/682164/campos_512_v4
+134/682182/campos_512_v4
+134/682191/campos_512_v4
+134/682192/campos_512_v4
+134/682200/campos_512_v4
+134/682201/campos_512_v4
+134/682207/campos_512_v4
+134/682209/campos_512_v4
+134/682218/campos_512_v4
+134/682222/campos_512_v4
+134/682224/campos_512_v4
+134/682232/campos_512_v4
+134/682245/campos_512_v4
+134/682250/campos_512_v4
+134/682251/campos_512_v4
+134/682261/campos_512_v4
+134/682282/campos_512_v4
+134/682285/campos_512_v4
+134/682294/campos_512_v4
+134/682297/campos_512_v4
+134/682317/campos_512_v4
+134/682330/campos_512_v4
+134/682341/campos_512_v4
+134/682350/campos_512_v4
+134/682353/campos_512_v4
+134/682357/campos_512_v4
+134/682364/campos_512_v4
+134/682366/campos_512_v4
+134/682369/campos_512_v4
+134/682375/campos_512_v4
+134/682376/campos_512_v4
+134/682387/campos_512_v4
+134/682399/campos_512_v4
+134/682400/campos_512_v4
+134/682415/campos_512_v4
+134/682420/campos_512_v4
+134/682428/campos_512_v4
+134/682429/campos_512_v4
+134/682437/campos_512_v4
+134/682438/campos_512_v4
+134/682447/campos_512_v4
+134/682451/campos_512_v4
+134/682456/campos_512_v4
+134/682457/campos_512_v4
+134/682459/campos_512_v4
+134/682460/campos_512_v4
+134/682462/campos_512_v4
+134/682464/campos_512_v4
+134/682472/campos_512_v4
+134/682475/campos_512_v4
+134/682482/campos_512_v4
+134/682488/campos_512_v4
+134/682499/campos_512_v4
+134/682501/campos_512_v4
+134/682506/campos_512_v4
+134/682511/campos_512_v4
+134/682530/campos_512_v4
+134/682534/campos_512_v4
+134/682556/campos_512_v4
+134/682566/campos_512_v4
+134/682578/campos_512_v4
+134/682591/campos_512_v4
+134/682593/campos_512_v4
+134/682597/campos_512_v4
+134/682608/campos_512_v4
+134/682609/campos_512_v4
+134/682615/campos_512_v4
+134/682628/campos_512_v4
+134/682637/campos_512_v4
+134/682638/campos_512_v4
+134/682647/campos_512_v4
+134/682655/campos_512_v4
+134/682667/campos_512_v4
+134/682668/campos_512_v4
+134/682671/campos_512_v4
+134/682673/campos_512_v4
+134/682674/campos_512_v4
+134/682676/campos_512_v4
+134/682681/campos_512_v4
+134/682707/campos_512_v4
+134/682720/campos_512_v4
+134/682724/campos_512_v4
+134/682725/campos_512_v4
+134/682726/campos_512_v4
+134/682727/campos_512_v4
+134/682735/campos_512_v4
+134/682736/campos_512_v4
+134/682758/campos_512_v4
+134/682760/campos_512_v4
+134/682788/campos_512_v4
+134/682800/campos_512_v4
+134/682811/campos_512_v4
+134/682820/campos_512_v4
+134/682831/campos_512_v4
+134/682835/campos_512_v4
+134/682837/campos_512_v4
+134/682848/campos_512_v4
+134/682857/campos_512_v4
+134/682858/campos_512_v4
+134/682861/campos_512_v4
+134/682862/campos_512_v4
+134/682865/campos_512_v4
+134/682875/campos_512_v4
+134/682876/campos_512_v4
+134/682894/campos_512_v4
+134/682896/campos_512_v4
+134/682898/campos_512_v4
+134/682899/campos_512_v4
+134/682901/campos_512_v4
+134/682906/campos_512_v4
+134/682915/campos_512_v4
+134/682932/campos_512_v4
+134/682946/campos_512_v4
+134/682950/campos_512_v4
+134/682954/campos_512_v4
+134/682977/campos_512_v4
+134/682994/campos_512_v4
+134/683018/campos_512_v4
+134/683021/campos_512_v4
+134/683050/campos_512_v4
+134/683064/campos_512_v4
+134/683069/campos_512_v4
+134/683076/campos_512_v4
+134/683079/campos_512_v4
+134/683094/campos_512_v4
+134/683099/campos_512_v4
+134/683103/campos_512_v4
+134/683115/campos_512_v4
+134/683117/campos_512_v4
+134/683122/campos_512_v4
+134/683132/campos_512_v4
+134/683136/campos_512_v4
+134/683148/campos_512_v4
+134/683154/campos_512_v4
+134/683163/campos_512_v4
+134/683166/campos_512_v4
+134/683168/campos_512_v4
+134/683187/campos_512_v4
+134/683191/campos_512_v4
+134/683192/campos_512_v4
+134/683195/campos_512_v4
+134/683199/campos_512_v4
+134/683202/campos_512_v4
+134/683206/campos_512_v4
+134/683209/campos_512_v4
+134/683216/campos_512_v4
+134/683220/campos_512_v4
+134/683222/campos_512_v4
+134/683223/campos_512_v4
+134/683225/campos_512_v4
+134/683240/campos_512_v4
+134/683243/campos_512_v4
+134/683255/campos_512_v4
+134/683272/campos_512_v4
+134/683277/campos_512_v4
+134/683280/campos_512_v4
+134/683281/campos_512_v4
+134/683296/campos_512_v4
+134/683319/campos_512_v4
+134/683321/campos_512_v4
+134/683327/campos_512_v4
+134/683330/campos_512_v4
+134/683339/campos_512_v4
+134/683346/campos_512_v4
+134/683350/campos_512_v4
+134/683372/campos_512_v4
+134/683375/campos_512_v4
+134/683380/campos_512_v4
+134/683381/campos_512_v4
+134/683417/campos_512_v4
+134/683427/campos_512_v4
+134/683430/campos_512_v4
+134/683435/campos_512_v4
+134/683443/campos_512_v4
+134/683460/campos_512_v4
+134/683486/campos_512_v4
+134/683491/campos_512_v4
+134/683500/campos_512_v4
+134/683502/campos_512_v4
+134/683506/campos_512_v4
+134/683511/campos_512_v4
+134/683516/campos_512_v4
+134/683532/campos_512_v4
+134/683554/campos_512_v4
+134/683555/campos_512_v4
+134/683568/campos_512_v4
+134/683575/campos_512_v4
+134/683578/campos_512_v4
+134/683580/campos_512_v4
+134/683611/campos_512_v4
+134/683621/campos_512_v4
+134/683625/campos_512_v4
+134/683626/campos_512_v4
+134/683628/campos_512_v4
+134/683631/campos_512_v4
+134/683632/campos_512_v4
+134/683637/campos_512_v4
+134/683644/campos_512_v4
+134/683648/campos_512_v4
+134/683649/campos_512_v4
+134/683654/campos_512_v4
+134/683656/campos_512_v4
+134/683700/campos_512_v4
+134/683703/campos_512_v4
+134/683715/campos_512_v4
+134/683721/campos_512_v4
+134/683724/campos_512_v4
+134/683726/campos_512_v4
+134/683728/campos_512_v4
+134/683730/campos_512_v4
+134/683737/campos_512_v4
+134/683742/campos_512_v4
+134/683752/campos_512_v4
+134/683755/campos_512_v4
+134/683767/campos_512_v4
+134/683768/campos_512_v4
+134/683786/campos_512_v4
+134/683787/campos_512_v4
+134/683790/campos_512_v4
+134/683795/campos_512_v4
+134/683796/campos_512_v4
+134/683804/campos_512_v4
+134/683809/campos_512_v4
+134/683811/campos_512_v4
+134/683815/campos_512_v4
+134/683822/campos_512_v4
+134/683824/campos_512_v4
+134/683844/campos_512_v4
+134/683851/campos_512_v4
+134/683855/campos_512_v4
+134/683886/campos_512_v4
+134/683890/campos_512_v4
+134/683897/campos_512_v4
+134/683910/campos_512_v4
+134/683911/campos_512_v4
+134/683912/campos_512_v4
+134/683928/campos_512_v4
+134/683944/campos_512_v4
+134/683946/campos_512_v4
+134/683951/campos_512_v4
+134/683961/campos_512_v4
+134/683980/campos_512_v4
+134/683986/campos_512_v4
+134/683995/campos_512_v4
+134/683997/campos_512_v4
+134/684008/campos_512_v4
+134/684010/campos_512_v4
+134/684025/campos_512_v4
+134/684032/campos_512_v4
+134/684046/campos_512_v4
+134/684050/campos_512_v4
+134/684063/campos_512_v4
+134/684065/campos_512_v4
+134/684079/campos_512_v4
+134/684084/campos_512_v4
+134/684093/campos_512_v4
+134/684100/campos_512_v4
+134/684101/campos_512_v4
+134/684118/campos_512_v4
+134/684128/campos_512_v4
+134/684145/campos_512_v4
+134/684149/campos_512_v4
+134/684154/campos_512_v4
+134/684164/campos_512_v4
+134/684165/campos_512_v4
+134/684173/campos_512_v4
+134/684181/campos_512_v4
+134/684188/campos_512_v4
+134/684199/campos_512_v4
+134/684213/campos_512_v4
+134/684230/campos_512_v4
+134/684237/campos_512_v4
+134/684238/campos_512_v4
+134/684264/campos_512_v4
+134/684265/campos_512_v4
+134/684270/campos_512_v4
+134/684276/campos_512_v4
+134/684282/campos_512_v4
+134/684289/campos_512_v4
+134/684296/campos_512_v4
+134/684307/campos_512_v4
+134/684309/campos_512_v4
+134/684342/campos_512_v4
+134/684359/campos_512_v4
+134/684368/campos_512_v4
+134/684380/campos_512_v4
+134/684395/campos_512_v4
+134/684413/campos_512_v4
+134/684416/campos_512_v4
+134/684420/campos_512_v4
+134/684421/campos_512_v4
+134/684449/campos_512_v4
+134/684456/campos_512_v4
+134/684464/campos_512_v4
+134/684470/campos_512_v4
+134/684471/campos_512_v4
+134/684472/campos_512_v4
+134/684479/campos_512_v4
+134/684481/campos_512_v4
+134/684491/campos_512_v4
+134/684504/campos_512_v4
+134/684507/campos_512_v4
+134/684516/campos_512_v4
+134/684517/campos_512_v4
+134/684522/campos_512_v4
+134/684527/campos_512_v4
+134/684529/campos_512_v4
+134/684546/campos_512_v4
+134/684558/campos_512_v4
+134/684559/campos_512_v4
+134/684566/campos_512_v4
+134/684569/campos_512_v4
+134/684570/campos_512_v4
+134/684578/campos_512_v4
+134/684588/campos_512_v4
+134/684592/campos_512_v4
+134/684597/campos_512_v4
+134/684610/campos_512_v4
+134/684611/campos_512_v4
+134/684613/campos_512_v4
+134/684614/campos_512_v4
+134/684625/campos_512_v4
+134/684626/campos_512_v4
+134/684630/campos_512_v4
+134/684633/campos_512_v4
+134/684636/campos_512_v4
+134/684649/campos_512_v4
+134/684650/campos_512_v4
+134/684655/campos_512_v4
+134/684677/campos_512_v4
+134/684682/campos_512_v4
+134/684705/campos_512_v4
+134/684719/campos_512_v4
+134/684724/campos_512_v4
+134/684727/campos_512_v4
+134/684728/campos_512_v4
+134/684732/campos_512_v4
+134/684743/campos_512_v4
+134/684747/campos_512_v4
+134/684748/campos_512_v4
+134/684753/campos_512_v4
+134/684756/campos_512_v4
+134/684760/campos_512_v4
+134/684763/campos_512_v4
+134/684766/campos_512_v4
+134/684770/campos_512_v4
+134/684771/campos_512_v4
+134/684776/campos_512_v4
+134/684785/campos_512_v4
+134/684789/campos_512_v4
+134/684796/campos_512_v4
+134/684797/campos_512_v4
+134/684803/campos_512_v4
+134/684812/campos_512_v4
+134/684813/campos_512_v4
+134/684821/campos_512_v4
+134/684829/campos_512_v4
+134/684830/campos_512_v4
+134/684840/campos_512_v4
+134/684847/campos_512_v4
+134/684857/campos_512_v4
+134/684858/campos_512_v4
+134/684876/campos_512_v4
+134/684881/campos_512_v4
+134/684883/campos_512_v4
+134/684888/campos_512_v4
+134/684902/campos_512_v4
+134/684913/campos_512_v4
+134/684920/campos_512_v4
+134/684923/campos_512_v4
+134/684926/campos_512_v4
+134/684939/campos_512_v4
+134/684940/campos_512_v4
+134/684959/campos_512_v4
+134/684963/campos_512_v4
+134/684965/campos_512_v4
+134/684970/campos_512_v4
+134/684975/campos_512_v4
+134/684990/campos_512_v4
+135/685002/campos_512_v4
+135/685008/campos_512_v4
+135/685012/campos_512_v4
+135/685013/campos_512_v4
+135/685019/campos_512_v4
+135/685034/campos_512_v4
+135/685035/campos_512_v4
+135/685040/campos_512_v4
+135/685047/campos_512_v4
+135/685055/campos_512_v4
+135/685056/campos_512_v4
+135/685060/campos_512_v4
+135/685075/campos_512_v4
+135/685084/campos_512_v4
+135/685087/campos_512_v4
+135/685088/campos_512_v4
+135/685103/campos_512_v4
+135/685121/campos_512_v4
+135/685133/campos_512_v4
+135/685135/campos_512_v4
+135/685136/campos_512_v4
+135/685151/campos_512_v4
+135/685156/campos_512_v4
+135/685180/campos_512_v4
+135/685181/campos_512_v4
+135/685183/campos_512_v4
+135/685186/campos_512_v4
+135/685193/campos_512_v4
+135/685199/campos_512_v4
+135/685205/campos_512_v4
+135/685207/campos_512_v4
+135/685211/campos_512_v4
+135/685228/campos_512_v4
+135/685243/campos_512_v4
+135/685250/campos_512_v4
+135/685254/campos_512_v4
+135/685278/campos_512_v4
+135/685336/campos_512_v4
+135/685348/campos_512_v4
+135/685353/campos_512_v4
+135/685360/campos_512_v4
+135/685361/campos_512_v4
+135/685364/campos_512_v4
+135/685368/campos_512_v4
+135/685374/campos_512_v4
+135/685383/campos_512_v4
+135/685384/campos_512_v4
+135/685435/campos_512_v4
+135/685445/campos_512_v4
+135/685446/campos_512_v4
+135/685447/campos_512_v4
+135/685449/campos_512_v4
+135/685450/campos_512_v4
+135/685452/campos_512_v4
+135/685453/campos_512_v4
+135/685459/campos_512_v4
+135/685483/campos_512_v4
+135/685500/campos_512_v4
+135/685529/campos_512_v4
+135/685537/campos_512_v4
+135/685553/campos_512_v4
+135/685554/campos_512_v4
+135/685555/campos_512_v4
+135/685567/campos_512_v4
+135/685572/campos_512_v4
+135/685597/campos_512_v4
+135/685608/campos_512_v4
+135/685618/campos_512_v4
+135/685622/campos_512_v4
+135/685626/campos_512_v4
+135/685638/campos_512_v4
+135/685639/campos_512_v4
+135/685646/campos_512_v4
+135/685647/campos_512_v4
+135/685663/campos_512_v4
+135/685664/campos_512_v4
+135/685668/campos_512_v4
+135/685671/campos_512_v4
+135/685679/campos_512_v4
+135/685681/campos_512_v4
+135/685688/campos_512_v4
+135/685690/campos_512_v4
+135/685705/campos_512_v4
+135/685708/campos_512_v4
+135/685711/campos_512_v4
+135/685713/campos_512_v4
+135/685720/campos_512_v4
+135/685728/campos_512_v4
+135/685746/campos_512_v4
+135/685749/campos_512_v4
+135/685760/campos_512_v4
+135/685765/campos_512_v4
+135/685790/campos_512_v4
+135/685791/campos_512_v4
+135/685798/campos_512_v4
+135/685799/campos_512_v4
+135/685802/campos_512_v4
+135/685804/campos_512_v4
+135/685808/campos_512_v4
+135/685809/campos_512_v4
+135/685815/campos_512_v4
+135/685820/campos_512_v4
+135/685825/campos_512_v4
+135/685833/campos_512_v4
+135/685857/campos_512_v4
+135/685864/campos_512_v4
+135/685876/campos_512_v4
+135/685877/campos_512_v4
+135/685887/campos_512_v4
+135/685888/campos_512_v4
+135/685892/campos_512_v4
+135/685895/campos_512_v4
+135/685907/campos_512_v4
+135/685922/campos_512_v4
+135/685925/campos_512_v4
+135/685926/campos_512_v4
+135/685929/campos_512_v4
+135/685931/campos_512_v4
+135/685943/campos_512_v4
+135/685945/campos_512_v4
+135/685968/campos_512_v4
+135/685980/campos_512_v4
+135/685990/campos_512_v4
+135/685996/campos_512_v4
+135/685997/campos_512_v4
+135/686006/campos_512_v4
+135/686013/campos_512_v4
+135/686016/campos_512_v4
+135/686033/campos_512_v4
+135/686038/campos_512_v4
+135/686066/campos_512_v4
+135/686074/campos_512_v4
+135/686080/campos_512_v4
+135/686084/campos_512_v4
+135/686092/campos_512_v4
+135/686096/campos_512_v4
+135/686104/campos_512_v4
+135/686114/campos_512_v4
+135/686118/campos_512_v4
+135/686121/campos_512_v4
+135/686135/campos_512_v4
+135/686138/campos_512_v4
+135/686145/campos_512_v4
+135/686146/campos_512_v4
+135/686150/campos_512_v4
+135/686152/campos_512_v4
+135/686164/campos_512_v4
+135/686165/campos_512_v4
+135/686172/campos_512_v4
+135/686195/campos_512_v4
+135/686207/campos_512_v4
+135/686208/campos_512_v4
+135/686209/campos_512_v4
+135/686216/campos_512_v4
+135/686229/campos_512_v4
+135/686245/campos_512_v4
+135/686248/campos_512_v4
+135/686254/campos_512_v4
+135/686265/campos_512_v4
+135/686286/campos_512_v4
+135/686290/campos_512_v4
+135/686296/campos_512_v4
+135/686309/campos_512_v4
+135/686315/campos_512_v4
+135/686322/campos_512_v4
+135/686323/campos_512_v4
+135/686337/campos_512_v4
+135/686342/campos_512_v4
+135/686343/campos_512_v4
+135/686367/campos_512_v4
+135/686373/campos_512_v4
+135/686379/campos_512_v4
+135/686383/campos_512_v4
+135/686395/campos_512_v4
+135/686401/campos_512_v4
+135/686402/campos_512_v4
+135/686411/campos_512_v4
+135/686412/campos_512_v4
+135/686413/campos_512_v4
+135/686414/campos_512_v4
+135/686422/campos_512_v4
+135/686429/campos_512_v4
+135/686441/campos_512_v4
+135/686447/campos_512_v4
+135/686456/campos_512_v4
+135/686463/campos_512_v4
+135/686468/campos_512_v4
+135/686472/campos_512_v4
+135/686477/campos_512_v4
+135/686479/campos_512_v4
+135/686480/campos_512_v4
+135/686481/campos_512_v4
+135/686483/campos_512_v4
+135/686484/campos_512_v4
+135/686488/campos_512_v4
+135/686492/campos_512_v4
+135/686497/campos_512_v4
+135/686498/campos_512_v4
+135/686525/campos_512_v4
+135/686530/campos_512_v4
+135/686536/campos_512_v4
+135/686538/campos_512_v4
+135/686542/campos_512_v4
+135/686558/campos_512_v4
+135/686564/campos_512_v4
+135/686568/campos_512_v4
+135/686569/campos_512_v4
+135/686576/campos_512_v4
+135/686577/campos_512_v4
+135/686580/campos_512_v4
+135/686581/campos_512_v4
+135/686582/campos_512_v4
+135/686584/campos_512_v4
+135/686590/campos_512_v4
+135/686599/campos_512_v4
+135/686603/campos_512_v4
+135/686604/campos_512_v4
+135/686609/campos_512_v4
+135/686623/campos_512_v4
+135/686635/campos_512_v4
+135/686640/campos_512_v4
+135/686641/campos_512_v4
+135/686646/campos_512_v4
+135/686649/campos_512_v4
+135/686651/campos_512_v4
+135/686665/campos_512_v4
+135/686677/campos_512_v4
+135/686678/campos_512_v4
+135/686690/campos_512_v4
+135/686694/campos_512_v4
+135/686697/campos_512_v4
+135/686704/campos_512_v4
+135/686707/campos_512_v4
+135/686714/campos_512_v4
+135/686715/campos_512_v4
+135/686716/campos_512_v4
+135/686718/campos_512_v4
+135/686719/campos_512_v4
+135/686723/campos_512_v4
+135/686737/campos_512_v4
+135/686739/campos_512_v4
+135/686741/campos_512_v4
+135/686756/campos_512_v4
+135/686759/campos_512_v4
+135/686771/campos_512_v4
+135/686781/campos_512_v4
+135/686789/campos_512_v4
+135/686792/campos_512_v4
+135/686793/campos_512_v4
+135/686807/campos_512_v4
+135/686815/campos_512_v4
+135/686823/campos_512_v4
+135/686828/campos_512_v4
+135/686832/campos_512_v4
+135/686835/campos_512_v4
+135/686837/campos_512_v4
+135/686841/campos_512_v4
+135/686843/campos_512_v4
+135/686846/campos_512_v4
+135/686849/campos_512_v4
+135/686855/campos_512_v4
+135/686859/campos_512_v4
+135/686867/campos_512_v4
+135/686871/campos_512_v4
+135/686881/campos_512_v4
+135/686886/campos_512_v4
+135/686898/campos_512_v4
+135/686909/campos_512_v4
+135/686918/campos_512_v4
+135/686921/campos_512_v4
+135/686925/campos_512_v4
+135/686929/campos_512_v4
+135/686938/campos_512_v4
+135/686952/campos_512_v4
+135/686955/campos_512_v4
+135/686981/campos_512_v4
+135/686984/campos_512_v4
+135/686985/campos_512_v4
+135/686994/campos_512_v4
+135/687000/campos_512_v4
+135/687007/campos_512_v4
+135/687008/campos_512_v4
+135/687042/campos_512_v4
+135/687043/campos_512_v4
+135/687047/campos_512_v4
+135/687049/campos_512_v4
+135/687059/campos_512_v4
+135/687097/campos_512_v4
+135/687103/campos_512_v4
+135/687113/campos_512_v4
+135/687119/campos_512_v4
+135/687125/campos_512_v4
+135/687141/campos_512_v4
+135/687158/campos_512_v4
+135/687175/campos_512_v4
+135/687185/campos_512_v4
+135/687193/campos_512_v4
+135/687204/campos_512_v4
+135/687215/campos_512_v4
+135/687223/campos_512_v4
+135/687230/campos_512_v4
+135/687237/campos_512_v4
+135/687247/campos_512_v4
+135/687248/campos_512_v4
+135/687254/campos_512_v4
+135/687267/campos_512_v4
+135/687280/campos_512_v4
+135/687286/campos_512_v4
+135/687299/campos_512_v4
+135/687308/campos_512_v4
+135/687317/campos_512_v4
+135/687340/campos_512_v4
+135/687355/campos_512_v4
+135/687373/campos_512_v4
+135/687375/campos_512_v4
+135/687380/campos_512_v4
+135/687399/campos_512_v4
+135/687411/campos_512_v4
+135/687418/campos_512_v4
+135/687421/campos_512_v4
+135/687437/campos_512_v4
+135/687446/campos_512_v4
+135/687460/campos_512_v4
+135/687490/campos_512_v4
+135/687493/campos_512_v4
+135/687495/campos_512_v4
+135/687502/campos_512_v4
+135/687505/campos_512_v4
+135/687508/campos_512_v4
+135/687511/campos_512_v4
+135/687516/campos_512_v4
+135/687518/campos_512_v4
+135/687519/campos_512_v4
+135/687522/campos_512_v4
+135/687527/campos_512_v4
+135/687528/campos_512_v4
+135/687529/campos_512_v4
+135/687541/campos_512_v4
+135/687547/campos_512_v4
+135/687552/campos_512_v4
+135/687554/campos_512_v4
+135/687570/campos_512_v4
+135/687571/campos_512_v4
+135/687582/campos_512_v4
+135/687615/campos_512_v4
+135/687621/campos_512_v4
+135/687626/campos_512_v4
+135/687630/campos_512_v4
+135/687635/campos_512_v4
+135/687641/campos_512_v4
+135/687643/campos_512_v4
+135/687645/campos_512_v4
+135/687648/campos_512_v4
+135/687653/campos_512_v4
+135/687667/campos_512_v4
+135/687680/campos_512_v4
+135/687682/campos_512_v4
+135/687683/campos_512_v4
+135/687697/campos_512_v4
+135/687698/campos_512_v4
+135/687703/campos_512_v4
+135/687706/campos_512_v4
+135/687711/campos_512_v4
+135/687714/campos_512_v4
+135/687717/campos_512_v4
+135/687718/campos_512_v4
+135/687720/campos_512_v4
+135/687721/campos_512_v4
+135/687725/campos_512_v4
+135/687741/campos_512_v4
+135/687745/campos_512_v4
+135/687763/campos_512_v4
+135/687765/campos_512_v4
+135/687766/campos_512_v4
+135/687779/campos_512_v4
+135/687786/campos_512_v4
+135/687798/campos_512_v4
+135/687810/campos_512_v4
+135/687811/campos_512_v4
+135/687814/campos_512_v4
+135/687816/campos_512_v4
+135/687821/campos_512_v4
+135/687836/campos_512_v4
+135/687851/campos_512_v4
+135/687853/campos_512_v4
+135/687854/campos_512_v4
+135/687865/campos_512_v4
+135/687871/campos_512_v4
+135/687896/campos_512_v4
+135/687908/campos_512_v4
+135/687913/campos_512_v4
+135/687937/campos_512_v4
+135/687943/campos_512_v4
+135/687954/campos_512_v4
+135/687966/campos_512_v4
+135/687974/campos_512_v4
+135/687979/campos_512_v4
+135/687997/campos_512_v4
+135/688024/campos_512_v4
+135/688028/campos_512_v4
+135/688049/campos_512_v4
+135/688051/campos_512_v4
+135/688066/campos_512_v4
+135/688074/campos_512_v4
+135/688084/campos_512_v4
+135/688089/campos_512_v4
+135/688093/campos_512_v4
+135/688098/campos_512_v4
+135/688101/campos_512_v4
+135/688102/campos_512_v4
+135/688106/campos_512_v4
+135/688120/campos_512_v4
+135/688130/campos_512_v4
+135/688135/campos_512_v4
+135/688147/campos_512_v4
+135/688153/campos_512_v4
+135/688163/campos_512_v4
+135/688172/campos_512_v4
+135/688179/campos_512_v4
+135/688198/campos_512_v4
+135/688200/campos_512_v4
+135/688215/campos_512_v4
+135/688216/campos_512_v4
+135/688238/campos_512_v4
+135/688245/campos_512_v4
+135/688253/campos_512_v4
+135/688255/campos_512_v4
+135/688256/campos_512_v4
+135/688260/campos_512_v4
+135/688261/campos_512_v4
+135/688263/campos_512_v4
+135/688271/campos_512_v4
+135/688273/campos_512_v4
+135/688283/campos_512_v4
+135/688286/campos_512_v4
+135/688289/campos_512_v4
+135/688294/campos_512_v4
+135/688302/campos_512_v4
+135/688314/campos_512_v4
+135/688316/campos_512_v4
+135/688331/campos_512_v4
+135/688349/campos_512_v4
+135/688361/campos_512_v4
+135/688398/campos_512_v4
+135/688400/campos_512_v4
+135/688407/campos_512_v4
+135/688412/campos_512_v4
+135/688418/campos_512_v4
+135/688430/campos_512_v4
+135/688437/campos_512_v4
+135/688445/campos_512_v4
+135/688453/campos_512_v4
+135/688458/campos_512_v4
+135/688472/campos_512_v4
+135/688476/campos_512_v4
+135/688480/campos_512_v4
+135/688489/campos_512_v4
+135/688492/campos_512_v4
+135/688499/campos_512_v4
+135/688500/campos_512_v4
+135/688508/campos_512_v4
+135/688512/campos_512_v4
+135/688517/campos_512_v4
+135/688529/campos_512_v4
+135/688531/campos_512_v4
+135/688556/campos_512_v4
+135/688565/campos_512_v4
+135/688566/campos_512_v4
+135/688573/campos_512_v4
+135/688583/campos_512_v4
+135/688585/campos_512_v4
+135/688603/campos_512_v4
+135/688623/campos_512_v4
+135/688627/campos_512_v4
+135/688629/campos_512_v4
+135/688631/campos_512_v4
+135/688633/campos_512_v4
+135/688638/campos_512_v4
+135/688639/campos_512_v4
+135/688646/campos_512_v4
+135/688649/campos_512_v4
+135/688651/campos_512_v4
+135/688652/campos_512_v4
+135/688656/campos_512_v4
+135/688660/campos_512_v4
+135/688664/campos_512_v4
+135/688666/campos_512_v4
+135/688671/campos_512_v4
+135/688685/campos_512_v4
+135/688687/campos_512_v4
+135/688734/campos_512_v4
+135/688758/campos_512_v4
+135/688764/campos_512_v4
+135/688769/campos_512_v4
+135/688776/campos_512_v4
+135/688785/campos_512_v4
+135/688811/campos_512_v4
+135/688812/campos_512_v4
+135/688813/campos_512_v4
+135/688824/campos_512_v4
+135/688825/campos_512_v4
+135/688838/campos_512_v4
+135/688869/campos_512_v4
+135/688881/campos_512_v4
+135/688898/campos_512_v4
+135/688922/campos_512_v4
+135/688940/campos_512_v4
+135/688941/campos_512_v4
+135/688943/campos_512_v4
+135/688945/campos_512_v4
+135/688955/campos_512_v4
+135/688962/campos_512_v4
+135/688965/campos_512_v4
+135/688970/campos_512_v4
+135/688974/campos_512_v4
+135/688977/campos_512_v4
+135/688985/campos_512_v4
+135/688989/campos_512_v4
+135/688998/campos_512_v4
+135/689000/campos_512_v4
+135/689024/campos_512_v4
+135/689054/campos_512_v4
+135/689056/campos_512_v4
+135/689068/campos_512_v4
+135/689076/campos_512_v4
+135/689082/campos_512_v4
+135/689092/campos_512_v4
+135/689096/campos_512_v4
+135/689101/campos_512_v4
+135/689106/campos_512_v4
+135/689113/campos_512_v4
+135/689130/campos_512_v4
+135/689154/campos_512_v4
+135/689162/campos_512_v4
+135/689165/campos_512_v4
+135/689178/campos_512_v4
+135/689185/campos_512_v4
+135/689193/campos_512_v4
+135/689194/campos_512_v4
+135/689199/campos_512_v4
+135/689219/campos_512_v4
+135/689222/campos_512_v4
+135/689231/campos_512_v4
+135/689232/campos_512_v4
+135/689234/campos_512_v4
+135/689236/campos_512_v4
+135/689241/campos_512_v4
+135/689246/campos_512_v4
+135/689247/campos_512_v4
+135/689249/campos_512_v4
+135/689256/campos_512_v4
+135/689269/campos_512_v4
+135/689271/campos_512_v4
+135/689281/campos_512_v4
+135/689292/campos_512_v4
+135/689296/campos_512_v4
+135/689297/campos_512_v4
+135/689302/campos_512_v4
+135/689310/campos_512_v4
+135/689311/campos_512_v4
+135/689322/campos_512_v4
+135/689327/campos_512_v4
+135/689328/campos_512_v4
+135/689330/campos_512_v4
+135/689332/campos_512_v4
+135/689333/campos_512_v4
+135/689335/campos_512_v4
+135/689337/campos_512_v4
+135/689347/campos_512_v4
+135/689364/campos_512_v4
+135/689372/campos_512_v4
+135/689380/campos_512_v4
+135/689393/campos_512_v4
+135/689400/campos_512_v4
+135/689417/campos_512_v4
+135/689420/campos_512_v4
+135/689425/campos_512_v4
+135/689427/campos_512_v4
+135/689439/campos_512_v4
+135/689444/campos_512_v4
+135/689447/campos_512_v4
+135/689448/campos_512_v4
+135/689459/campos_512_v4
+135/689462/campos_512_v4
+135/689471/campos_512_v4
+135/689472/campos_512_v4
+135/689475/campos_512_v4
+135/689483/campos_512_v4
+135/689501/campos_512_v4
+135/689503/campos_512_v4
+135/689507/campos_512_v4
+135/689509/campos_512_v4
+135/689514/campos_512_v4
+135/689521/campos_512_v4
+135/689524/campos_512_v4
+135/689531/campos_512_v4
+135/689542/campos_512_v4
+135/689558/campos_512_v4
+135/689560/campos_512_v4
+135/689562/campos_512_v4
+135/689587/campos_512_v4
+135/689597/campos_512_v4
+135/689603/campos_512_v4
+135/689611/campos_512_v4
+135/689612/campos_512_v4
+135/689615/campos_512_v4
+135/689618/campos_512_v4
+135/689628/campos_512_v4
+135/689648/campos_512_v4
+135/689655/campos_512_v4
+135/689667/campos_512_v4
+135/689697/campos_512_v4
+135/689698/campos_512_v4
+135/689711/campos_512_v4
+135/689715/campos_512_v4
+135/689727/campos_512_v4
+135/689733/campos_512_v4
+135/689734/campos_512_v4
+135/689749/campos_512_v4
+135/689752/campos_512_v4
+135/689781/campos_512_v4
+135/689785/campos_512_v4
+135/689791/campos_512_v4
+135/689809/campos_512_v4
+135/689815/campos_512_v4
+135/689819/campos_512_v4
+135/689833/campos_512_v4
+135/689834/campos_512_v4
+135/689836/campos_512_v4
+135/689849/campos_512_v4
+135/689852/campos_512_v4
+135/689866/campos_512_v4
+135/689879/campos_512_v4
+135/689887/campos_512_v4
+135/689901/campos_512_v4
+135/689907/campos_512_v4
+135/689912/campos_512_v4
+135/689920/campos_512_v4
+135/689923/campos_512_v4
+135/689939/campos_512_v4
+135/689960/campos_512_v4
+135/689961/campos_512_v4
+135/689964/campos_512_v4
+135/689965/campos_512_v4
+135/689969/campos_512_v4
+135/689974/campos_512_v4
+135/689976/campos_512_v4
+135/689979/campos_512_v4
+135/689987/campos_512_v4
+135/689991/campos_512_v4
+135/690000/campos_512_v4
+136/690003/campos_512_v4
+136/690008/campos_512_v4
+136/690012/campos_512_v4
+136/690029/campos_512_v4
+136/690032/campos_512_v4
+136/690033/campos_512_v4
+136/690036/campos_512_v4
+136/690040/campos_512_v4
+136/690042/campos_512_v4
+136/690049/campos_512_v4
+136/690050/campos_512_v4
+136/690051/campos_512_v4
+136/690056/campos_512_v4
+136/690067/campos_512_v4
+136/690069/campos_512_v4
+136/690073/campos_512_v4
+136/690080/campos_512_v4
+136/690087/campos_512_v4
+136/690095/campos_512_v4
+136/690097/campos_512_v4
+136/690104/campos_512_v4
+136/690108/campos_512_v4
+136/690122/campos_512_v4
+136/690127/campos_512_v4
+136/690133/campos_512_v4
+136/690136/campos_512_v4
+136/690140/campos_512_v4
+136/690146/campos_512_v4
+136/690175/campos_512_v4
+136/690176/campos_512_v4
+136/690213/campos_512_v4
+136/690216/campos_512_v4
+136/690222/campos_512_v4
+136/690224/campos_512_v4
+136/690226/campos_512_v4
+136/690229/campos_512_v4
+136/690235/campos_512_v4
+136/690241/campos_512_v4
+136/690243/campos_512_v4
+136/690252/campos_512_v4
+136/690253/campos_512_v4
+136/690258/campos_512_v4
+136/690261/campos_512_v4
+136/690267/campos_512_v4
+136/690268/campos_512_v4
+136/690280/campos_512_v4
+136/690284/campos_512_v4
+136/690293/campos_512_v4
+136/690309/campos_512_v4
+136/690311/campos_512_v4
+136/690315/campos_512_v4
+136/690316/campos_512_v4
+136/690323/campos_512_v4
+136/690330/campos_512_v4
+136/690331/campos_512_v4
+136/690337/campos_512_v4
+136/690338/campos_512_v4
+136/690341/campos_512_v4
+136/690343/campos_512_v4
+136/690350/campos_512_v4
+136/690363/campos_512_v4
+136/690370/campos_512_v4
+136/690375/campos_512_v4
+136/690376/campos_512_v4
+136/690389/campos_512_v4
+136/690394/campos_512_v4
+136/690400/campos_512_v4
+136/690408/campos_512_v4
+136/690424/campos_512_v4
+136/690427/campos_512_v4
+136/690434/campos_512_v4
+136/690436/campos_512_v4
+136/690442/campos_512_v4
+136/690445/campos_512_v4
+136/690449/campos_512_v4
+136/690457/campos_512_v4
+136/690464/campos_512_v4
+136/690487/campos_512_v4
+136/690492/campos_512_v4
+136/690496/campos_512_v4
+136/690498/campos_512_v4
+136/690510/campos_512_v4
+136/690513/campos_512_v4
+136/690526/campos_512_v4
+136/690538/campos_512_v4
+136/690553/campos_512_v4
+136/690561/campos_512_v4
+136/690564/campos_512_v4
+136/690565/campos_512_v4
+136/690577/campos_512_v4
+136/690605/campos_512_v4
+136/690606/campos_512_v4
+136/690620/campos_512_v4
+136/690621/campos_512_v4
+136/690638/campos_512_v4
+136/690642/campos_512_v4
+136/690645/campos_512_v4
+136/690654/campos_512_v4
+136/690661/campos_512_v4
+136/690690/campos_512_v4
+136/690691/campos_512_v4
+136/690693/campos_512_v4
+136/690700/campos_512_v4
+136/690709/campos_512_v4
+136/690714/campos_512_v4
+136/690720/campos_512_v4
+136/690722/campos_512_v4
+136/690748/campos_512_v4
+136/690753/campos_512_v4
+136/690756/campos_512_v4
+136/690762/campos_512_v4
+136/690767/campos_512_v4
+136/690768/campos_512_v4
+136/690769/campos_512_v4
+136/690770/campos_512_v4
+136/690771/campos_512_v4
+136/690774/campos_512_v4
+136/690792/campos_512_v4
+136/690798/campos_512_v4
+136/690811/campos_512_v4
+136/690813/campos_512_v4
+136/690814/campos_512_v4
+136/690816/campos_512_v4
+136/690821/campos_512_v4
+136/690830/campos_512_v4
+136/690841/campos_512_v4
+136/690844/campos_512_v4
+136/690851/campos_512_v4
+136/690852/campos_512_v4
+136/690856/campos_512_v4
+136/690873/campos_512_v4
+136/690876/campos_512_v4
+136/690882/campos_512_v4
+136/690906/campos_512_v4
+136/690919/campos_512_v4
+136/690920/campos_512_v4
+136/690925/campos_512_v4
+136/690932/campos_512_v4
+136/690935/campos_512_v4
+136/690944/campos_512_v4
+136/690946/campos_512_v4
+136/690975/campos_512_v4
+136/690985/campos_512_v4
+136/690987/campos_512_v4
+136/690993/campos_512_v4
+136/690998/campos_512_v4
+136/691003/campos_512_v4
+136/691006/campos_512_v4
+136/691007/campos_512_v4
+136/691011/campos_512_v4
+136/691012/campos_512_v4
+136/691014/campos_512_v4
+136/691023/campos_512_v4
+136/691024/campos_512_v4
+136/691027/campos_512_v4
+136/691036/campos_512_v4
+136/691038/campos_512_v4
+136/691045/campos_512_v4
+136/691048/campos_512_v4
+136/691052/campos_512_v4
+136/691066/campos_512_v4
+136/691073/campos_512_v4
+136/691079/campos_512_v4
+136/691082/campos_512_v4
+136/691084/campos_512_v4
+136/691085/campos_512_v4
+136/691098/campos_512_v4
+136/691106/campos_512_v4
+136/691111/campos_512_v4
+136/691113/campos_512_v4
+136/691124/campos_512_v4
+136/691130/campos_512_v4
+136/691137/campos_512_v4
+136/691141/campos_512_v4
+136/691157/campos_512_v4
+136/691171/campos_512_v4
+136/691172/campos_512_v4
+136/691178/campos_512_v4
+136/691179/campos_512_v4
+136/691188/campos_512_v4
+136/691192/campos_512_v4
+136/691208/campos_512_v4
+136/691212/campos_512_v4
+136/691220/campos_512_v4
+136/691223/campos_512_v4
+136/691224/campos_512_v4
+136/691225/campos_512_v4
+136/691233/campos_512_v4
+136/691237/campos_512_v4
+136/691240/campos_512_v4
+136/691250/campos_512_v4
+136/691260/campos_512_v4
+136/691261/campos_512_v4
+136/691269/campos_512_v4
+136/691270/campos_512_v4
+136/691272/campos_512_v4
+136/691281/campos_512_v4
+136/691282/campos_512_v4
+136/691285/campos_512_v4
+136/691292/campos_512_v4
+136/691307/campos_512_v4
+136/691314/campos_512_v4
+136/691317/campos_512_v4
+136/691327/campos_512_v4
+136/691333/campos_512_v4
+136/691347/campos_512_v4
+136/691350/campos_512_v4
+136/691355/campos_512_v4
+136/691359/campos_512_v4
+136/691362/campos_512_v4
+136/691364/campos_512_v4
+136/691403/campos_512_v4
+136/691407/campos_512_v4
+136/691411/campos_512_v4
+136/691413/campos_512_v4
+136/691425/campos_512_v4
+136/691427/campos_512_v4
+136/691432/campos_512_v4
+136/691439/campos_512_v4
+136/691441/campos_512_v4
+136/691444/campos_512_v4
+136/691465/campos_512_v4
+136/691467/campos_512_v4
+136/691468/campos_512_v4
+136/691474/campos_512_v4
+136/691476/campos_512_v4
+136/691494/campos_512_v4
+136/691512/campos_512_v4
+136/691525/campos_512_v4
+136/691539/campos_512_v4
+136/691550/campos_512_v4
+136/691558/campos_512_v4
+136/691563/campos_512_v4
+136/691580/campos_512_v4
+136/691591/campos_512_v4
+136/691598/campos_512_v4
+136/691608/campos_512_v4
+136/691621/campos_512_v4
+136/691641/campos_512_v4
+136/691644/campos_512_v4
+136/691647/campos_512_v4
+136/691655/campos_512_v4
+136/691656/campos_512_v4
+136/691664/campos_512_v4
+136/691673/campos_512_v4
+136/691709/campos_512_v4
+136/691716/campos_512_v4
+136/691726/campos_512_v4
+136/691743/campos_512_v4
+136/691748/campos_512_v4
+136/691754/campos_512_v4
+136/691759/campos_512_v4
+136/691760/campos_512_v4
+136/691776/campos_512_v4
+136/691778/campos_512_v4
+136/691781/campos_512_v4
+136/691789/campos_512_v4
+136/691792/campos_512_v4
+136/691804/campos_512_v4
+136/691807/campos_512_v4
+136/691809/campos_512_v4
+136/691817/campos_512_v4
+136/691822/campos_512_v4
+136/691823/campos_512_v4
+136/691830/campos_512_v4
+136/691841/campos_512_v4
+136/691847/campos_512_v4
+136/691853/campos_512_v4
+136/691860/campos_512_v4
+136/691861/campos_512_v4
+136/691876/campos_512_v4
+136/691897/campos_512_v4
+136/691903/campos_512_v4
+136/691907/campos_512_v4
+136/691914/campos_512_v4
+136/691915/campos_512_v4
+136/691919/campos_512_v4
+136/691929/campos_512_v4
+136/691934/campos_512_v4
+136/691947/campos_512_v4
+136/691955/campos_512_v4
+136/691959/campos_512_v4
+136/691962/campos_512_v4
+136/691974/campos_512_v4
+136/691986/campos_512_v4
+136/691989/campos_512_v4
+136/691992/campos_512_v4
+136/691994/campos_512_v4
+136/692005/campos_512_v4
+136/692010/campos_512_v4
+136/692012/campos_512_v4
+136/692024/campos_512_v4
+136/692028/campos_512_v4
+136/692029/campos_512_v4
+136/692038/campos_512_v4
+136/692039/campos_512_v4
+136/692048/campos_512_v4
+136/692051/campos_512_v4
+136/692056/campos_512_v4
+136/692058/campos_512_v4
+136/692073/campos_512_v4
+136/692083/campos_512_v4
+136/692090/campos_512_v4
+136/692108/campos_512_v4
+136/692110/campos_512_v4
+136/692118/campos_512_v4
+136/692126/campos_512_v4
+136/692137/campos_512_v4
+136/692141/campos_512_v4
+136/692148/campos_512_v4
+136/692152/campos_512_v4
+136/692154/campos_512_v4
+136/692157/campos_512_v4
+136/692166/campos_512_v4
+136/692172/campos_512_v4
+136/692177/campos_512_v4
+136/692187/campos_512_v4
+136/692189/campos_512_v4
+136/692201/campos_512_v4
+136/692210/campos_512_v4
+136/692215/campos_512_v4
+136/692216/campos_512_v4
+136/692221/campos_512_v4
+136/692229/campos_512_v4
+136/692241/campos_512_v4
+136/692253/campos_512_v4
+136/692255/campos_512_v4
+136/692264/campos_512_v4
+136/692265/campos_512_v4
+136/692279/campos_512_v4
+136/692288/campos_512_v4
+136/692299/campos_512_v4
+136/692300/campos_512_v4
+136/692305/campos_512_v4
+136/692314/campos_512_v4
+136/692331/campos_512_v4
+136/692346/campos_512_v4
+136/692368/campos_512_v4
+136/692373/campos_512_v4
+136/692398/campos_512_v4
+136/692399/campos_512_v4
+136/692402/campos_512_v4
+136/692412/campos_512_v4
+136/692417/campos_512_v4
+136/692420/campos_512_v4
+136/692423/campos_512_v4
+136/692425/campos_512_v4
+136/692438/campos_512_v4
+136/692439/campos_512_v4
+136/692459/campos_512_v4
+136/692460/campos_512_v4
+136/692465/campos_512_v4
+136/692467/campos_512_v4
+136/692470/campos_512_v4
+136/692471/campos_512_v4
+136/692473/campos_512_v4
+136/692475/campos_512_v4
+136/692496/campos_512_v4
+136/692507/campos_512_v4
+136/692521/campos_512_v4
+136/692534/campos_512_v4
+136/692542/campos_512_v4
+136/692543/campos_512_v4
+136/692544/campos_512_v4
+136/692555/campos_512_v4
+136/692558/campos_512_v4
+136/692566/campos_512_v4
+136/692569/campos_512_v4
+136/692572/campos_512_v4
+136/692576/campos_512_v4
+136/692578/campos_512_v4
+136/692584/campos_512_v4
+136/692591/campos_512_v4
+136/692593/campos_512_v4
+136/692607/campos_512_v4
+136/692609/campos_512_v4
+136/692611/campos_512_v4
+136/692618/campos_512_v4
+136/692621/campos_512_v4
+136/692622/campos_512_v4
+136/692627/campos_512_v4
+136/692635/campos_512_v4
+136/692637/campos_512_v4
+136/692638/campos_512_v4
+136/692640/campos_512_v4
+136/692661/campos_512_v4
+136/692664/campos_512_v4
+136/692673/campos_512_v4
+136/692674/campos_512_v4
+136/692679/campos_512_v4
+136/692710/campos_512_v4
+136/692733/campos_512_v4
+136/692735/campos_512_v4
+136/692737/campos_512_v4
+136/692753/campos_512_v4
+136/692757/campos_512_v4
+136/692760/campos_512_v4
+136/692763/campos_512_v4
+136/692767/campos_512_v4
+136/692773/campos_512_v4
+136/692774/campos_512_v4
+136/692790/campos_512_v4
+136/692794/campos_512_v4
+136/692799/campos_512_v4
+136/692804/campos_512_v4
+136/692811/campos_512_v4
+136/692812/campos_512_v4
+136/692826/campos_512_v4
+136/692835/campos_512_v4
+136/692847/campos_512_v4
+136/692855/campos_512_v4
+136/692858/campos_512_v4
+136/692862/campos_512_v4
+136/692864/campos_512_v4
+136/692866/campos_512_v4
+136/692873/campos_512_v4
+136/692876/campos_512_v4
+136/692878/campos_512_v4
+136/692894/campos_512_v4
+136/692905/campos_512_v4
+136/692917/campos_512_v4
+136/692929/campos_512_v4
+136/692930/campos_512_v4
+136/692940/campos_512_v4
+136/692943/campos_512_v4
+136/692948/campos_512_v4
+136/692950/campos_512_v4
+136/692962/campos_512_v4
+136/692964/campos_512_v4
+136/692969/campos_512_v4
+136/692978/campos_512_v4
+136/692982/campos_512_v4
+136/693001/campos_512_v4
+136/693011/campos_512_v4
+136/693017/campos_512_v4
+136/693018/campos_512_v4
+136/693027/campos_512_v4
+136/693039/campos_512_v4
+136/693056/campos_512_v4
+136/693075/campos_512_v4
+136/693088/campos_512_v4
+136/693111/campos_512_v4
+136/693130/campos_512_v4
+136/693137/campos_512_v4
+136/693160/campos_512_v4
+136/693163/campos_512_v4
+136/693178/campos_512_v4
+136/693180/campos_512_v4
+136/693184/campos_512_v4
+136/693213/campos_512_v4
+136/693217/campos_512_v4
+136/693223/campos_512_v4
+136/693231/campos_512_v4
+136/693250/campos_512_v4
+136/693269/campos_512_v4
+136/693274/campos_512_v4
+136/693282/campos_512_v4
+136/693288/campos_512_v4
+136/693292/campos_512_v4
+136/693300/campos_512_v4
+136/693301/campos_512_v4
+136/693304/campos_512_v4
+136/693310/campos_512_v4
+136/693315/campos_512_v4
+136/693320/campos_512_v4
+136/693326/campos_512_v4
+136/693329/campos_512_v4
+136/693331/campos_512_v4
+136/693337/campos_512_v4
+136/693338/campos_512_v4
+136/693341/campos_512_v4
+136/693344/campos_512_v4
+136/693350/campos_512_v4
+136/693352/campos_512_v4
+136/693365/campos_512_v4
+136/693372/campos_512_v4
+136/693374/campos_512_v4
+136/693377/campos_512_v4
+136/693378/campos_512_v4
+136/693381/campos_512_v4
+136/693382/campos_512_v4
+136/693386/campos_512_v4
+136/693389/campos_512_v4
+136/693390/campos_512_v4
+136/693392/campos_512_v4
+136/693427/campos_512_v4
+136/693428/campos_512_v4
+136/693430/campos_512_v4
+136/693437/campos_512_v4
+136/693448/campos_512_v4
+136/693455/campos_512_v4
+136/693456/campos_512_v4
+136/693468/campos_512_v4
+136/693472/campos_512_v4
+136/693495/campos_512_v4
+136/693502/campos_512_v4
+136/693507/campos_512_v4
+136/693513/campos_512_v4
+136/693521/campos_512_v4
+136/693524/campos_512_v4
+136/693528/campos_512_v4
+136/693529/campos_512_v4
+136/693533/campos_512_v4
+136/693552/campos_512_v4
+136/693555/campos_512_v4
+136/693557/campos_512_v4
+136/693561/campos_512_v4
+136/693572/campos_512_v4
+136/693573/campos_512_v4
+136/693578/campos_512_v4
+136/693587/campos_512_v4
+136/693590/campos_512_v4
+136/693593/campos_512_v4
+136/693600/campos_512_v4
+136/693631/campos_512_v4
+136/693632/campos_512_v4
+136/693634/campos_512_v4
+136/693655/campos_512_v4
+136/693658/campos_512_v4
+136/693660/campos_512_v4
+136/693664/campos_512_v4
+136/693665/campos_512_v4
+136/693667/campos_512_v4
+136/693672/campos_512_v4
+136/693674/campos_512_v4
+136/693688/campos_512_v4
+136/693699/campos_512_v4
+136/693701/campos_512_v4
+136/693711/campos_512_v4
+136/693713/campos_512_v4
+136/693727/campos_512_v4
+136/693731/campos_512_v4
+136/693732/campos_512_v4
+136/693734/campos_512_v4
+136/693748/campos_512_v4
+136/693763/campos_512_v4
+136/693770/campos_512_v4
+136/693774/campos_512_v4
+136/693779/campos_512_v4
+136/693787/campos_512_v4
+136/693790/campos_512_v4
+136/693792/campos_512_v4
+136/693795/campos_512_v4
+136/693796/campos_512_v4
+136/693801/campos_512_v4
+136/693806/campos_512_v4
+136/693807/campos_512_v4
+136/693810/campos_512_v4
+136/693812/campos_512_v4
+136/693818/campos_512_v4
+136/693820/campos_512_v4
+136/693828/campos_512_v4
+136/693843/campos_512_v4
+136/693846/campos_512_v4
+136/693865/campos_512_v4
+136/693877/campos_512_v4
+136/693884/campos_512_v4
+136/693891/campos_512_v4
+136/693932/campos_512_v4
+136/693944/campos_512_v4
+136/693955/campos_512_v4
+136/693982/campos_512_v4
+136/693983/campos_512_v4
+136/693989/campos_512_v4
+136/693990/campos_512_v4
+136/693996/campos_512_v4
+136/694001/campos_512_v4
+136/694005/campos_512_v4
+136/694006/campos_512_v4
+136/694016/campos_512_v4
+136/694018/campos_512_v4
+136/694027/campos_512_v4
+136/694040/campos_512_v4
+136/694054/campos_512_v4
+136/694056/campos_512_v4
+136/694061/campos_512_v4
+136/694069/campos_512_v4
+136/694075/campos_512_v4
+136/694078/campos_512_v4
+136/694095/campos_512_v4
+136/694108/campos_512_v4
+136/694112/campos_512_v4
+136/694116/campos_512_v4
+136/694117/campos_512_v4
+136/694121/campos_512_v4
+136/694133/campos_512_v4
+136/694139/campos_512_v4
+136/694141/campos_512_v4
+136/694151/campos_512_v4
+136/694153/campos_512_v4
+136/694155/campos_512_v4
+136/694162/campos_512_v4
+136/694171/campos_512_v4
+136/694181/campos_512_v4
+136/694194/campos_512_v4
+136/694200/campos_512_v4
+136/694208/campos_512_v4
+136/694211/campos_512_v4
+136/694221/campos_512_v4
+136/694223/campos_512_v4
+136/694235/campos_512_v4
+136/694240/campos_512_v4
+136/694243/campos_512_v4
+136/694251/campos_512_v4
+136/694262/campos_512_v4
+136/694265/campos_512_v4
+136/694267/campos_512_v4
+136/694274/campos_512_v4
+136/694286/campos_512_v4
+136/694293/campos_512_v4
+136/694302/campos_512_v4
+136/694309/campos_512_v4
+136/694312/campos_512_v4
+136/694318/campos_512_v4
+136/694332/campos_512_v4
+136/694334/campos_512_v4
+136/694345/campos_512_v4
+136/694360/campos_512_v4
+136/694369/campos_512_v4
+136/694371/campos_512_v4
+136/694373/campos_512_v4
+136/694379/campos_512_v4
+136/694380/campos_512_v4
+136/694384/campos_512_v4
+136/694385/campos_512_v4
+136/694404/campos_512_v4
+136/694411/campos_512_v4
+136/694412/campos_512_v4
+136/694414/campos_512_v4
+136/694418/campos_512_v4
+136/694439/campos_512_v4
+136/694447/campos_512_v4
+136/694462/campos_512_v4
+136/694468/campos_512_v4
+136/694470/campos_512_v4
+136/694477/campos_512_v4
+136/694488/campos_512_v4
+136/694499/campos_512_v4
+136/694501/campos_512_v4
+136/694503/campos_512_v4
+136/694508/campos_512_v4
+136/694510/campos_512_v4
+136/694521/campos_512_v4
+136/694530/campos_512_v4
+136/694549/campos_512_v4
+136/694557/campos_512_v4
+136/694575/campos_512_v4
+136/694579/campos_512_v4
+136/694584/campos_512_v4
+136/694597/campos_512_v4
+136/694598/campos_512_v4
+136/694602/campos_512_v4
+136/694605/campos_512_v4
+136/694610/campos_512_v4
+136/694618/campos_512_v4
+136/694619/campos_512_v4
+136/694626/campos_512_v4
+136/694627/campos_512_v4
+136/694631/campos_512_v4
+136/694635/campos_512_v4
+136/694642/campos_512_v4
+136/694655/campos_512_v4
+136/694667/campos_512_v4
+136/694676/campos_512_v4
+136/694681/campos_512_v4
+136/694685/campos_512_v4
+136/694688/campos_512_v4
+136/694695/campos_512_v4
+136/694699/campos_512_v4
+136/694708/campos_512_v4
+136/694713/campos_512_v4
+136/694722/campos_512_v4
+136/694727/campos_512_v4
+136/694733/campos_512_v4
+136/694752/campos_512_v4
+136/694762/campos_512_v4
+136/694765/campos_512_v4
+136/694769/campos_512_v4
+136/694775/campos_512_v4
+136/694783/campos_512_v4
+136/694787/campos_512_v4
+136/694791/campos_512_v4
+136/694802/campos_512_v4
+136/694806/campos_512_v4
+136/694811/campos_512_v4
+136/694814/campos_512_v4
+136/694829/campos_512_v4
+136/694849/campos_512_v4
+136/694854/campos_512_v4
+136/694866/campos_512_v4
+136/694876/campos_512_v4
+136/694877/campos_512_v4
+136/694882/campos_512_v4
+136/694884/campos_512_v4
+136/694896/campos_512_v4
+136/694899/campos_512_v4
+136/694905/campos_512_v4
+136/694914/campos_512_v4
+136/694917/campos_512_v4
+136/694928/campos_512_v4
+136/694932/campos_512_v4
+136/694939/campos_512_v4
+136/694959/campos_512_v4
+136/694960/campos_512_v4
+136/694969/campos_512_v4
+136/694970/campos_512_v4
+136/694975/campos_512_v4
+136/694976/campos_512_v4
+136/694978/campos_512_v4
+136/694985/campos_512_v4
+137/695016/campos_512_v4
+137/695029/campos_512_v4
+137/695045/campos_512_v4
+137/695051/campos_512_v4
+137/695066/campos_512_v4
+137/695069/campos_512_v4
+137/695072/campos_512_v4
+137/695077/campos_512_v4
+137/695084/campos_512_v4
+137/695086/campos_512_v4
+137/695087/campos_512_v4
+137/695093/campos_512_v4
+137/695097/campos_512_v4
+137/695100/campos_512_v4
+137/695101/campos_512_v4
+137/695102/campos_512_v4
+137/695106/campos_512_v4
+137/695114/campos_512_v4
+137/695117/campos_512_v4
+137/695119/campos_512_v4
+137/695130/campos_512_v4
+137/695136/campos_512_v4
+137/695141/campos_512_v4
+137/695146/campos_512_v4
+137/695152/campos_512_v4
+137/695177/campos_512_v4
+137/695180/campos_512_v4
+137/695183/campos_512_v4
+137/695193/campos_512_v4
+137/695194/campos_512_v4
+137/695202/campos_512_v4
+137/695204/campos_512_v4
+137/695226/campos_512_v4
+137/695227/campos_512_v4
+137/695250/campos_512_v4
+137/695260/campos_512_v4
+137/695270/campos_512_v4
+137/695271/campos_512_v4
+137/695277/campos_512_v4
+137/695279/campos_512_v4
+137/695286/campos_512_v4
+137/695291/campos_512_v4
+137/695294/campos_512_v4
+137/695298/campos_512_v4
+137/695299/campos_512_v4
+137/695306/campos_512_v4
+137/695312/campos_512_v4
+137/695314/campos_512_v4
+137/695322/campos_512_v4
+137/695327/campos_512_v4
+137/695329/campos_512_v4
+137/695342/campos_512_v4
+137/695351/campos_512_v4
+137/695362/campos_512_v4
+137/695376/campos_512_v4
+137/695377/campos_512_v4
+137/695383/campos_512_v4
+137/695384/campos_512_v4
+137/695394/campos_512_v4
+137/695400/campos_512_v4
+137/695403/campos_512_v4
+137/695406/campos_512_v4
+137/695407/campos_512_v4
+137/695417/campos_512_v4
+137/695421/campos_512_v4
+137/695422/campos_512_v4
+137/695429/campos_512_v4
+137/695433/campos_512_v4
+137/695434/campos_512_v4
+137/695436/campos_512_v4
+137/695438/campos_512_v4
+137/695449/campos_512_v4
+137/695475/campos_512_v4
+137/695476/campos_512_v4
+137/695497/campos_512_v4
+137/695504/campos_512_v4
+137/695509/campos_512_v4
+137/695545/campos_512_v4
+137/695554/campos_512_v4
+137/695562/campos_512_v4
+137/695568/campos_512_v4
+137/695571/campos_512_v4
+137/695577/campos_512_v4
+137/695579/campos_512_v4
+137/695584/campos_512_v4
+137/695600/campos_512_v4
+137/695608/campos_512_v4
+137/695609/campos_512_v4
+137/695620/campos_512_v4
+137/695624/campos_512_v4
+137/695625/campos_512_v4
+137/695651/campos_512_v4
+137/695653/campos_512_v4
+137/695655/campos_512_v4
+137/695656/campos_512_v4
+137/695669/campos_512_v4
+137/695707/campos_512_v4
+137/695718/campos_512_v4
+137/695743/campos_512_v4
+137/695744/campos_512_v4
+137/695754/campos_512_v4
+137/695767/campos_512_v4
+137/695768/campos_512_v4
+137/695771/campos_512_v4
+137/695777/campos_512_v4
+137/695786/campos_512_v4
+137/695790/campos_512_v4
+137/695791/campos_512_v4
+137/695797/campos_512_v4
+137/695799/campos_512_v4
+137/695806/campos_512_v4
+137/695808/campos_512_v4
+137/695812/campos_512_v4
+137/695819/campos_512_v4
+137/695841/campos_512_v4
+137/695842/campos_512_v4
+137/695852/campos_512_v4
+137/695857/campos_512_v4
+137/695858/campos_512_v4
+137/695866/campos_512_v4
+137/695872/campos_512_v4
+137/695874/campos_512_v4
+137/695878/campos_512_v4
+137/695882/campos_512_v4
+137/695892/campos_512_v4
+137/695898/campos_512_v4
+137/695902/campos_512_v4
+137/695903/campos_512_v4
+137/695911/campos_512_v4
+137/695912/campos_512_v4
+137/695932/campos_512_v4
+137/695933/campos_512_v4
+137/695934/campos_512_v4
+137/695935/campos_512_v4
+137/695942/campos_512_v4
+137/695952/campos_512_v4
+137/695968/campos_512_v4
+137/695973/campos_512_v4
+137/695979/campos_512_v4
+137/695982/campos_512_v4
+137/695996/campos_512_v4
+137/696004/campos_512_v4
+137/696009/campos_512_v4
+137/696014/campos_512_v4
+137/696032/campos_512_v4
+137/696039/campos_512_v4
+137/696045/campos_512_v4
+137/696048/campos_512_v4
+137/696058/campos_512_v4
+137/696063/campos_512_v4
+137/696081/campos_512_v4
+137/696086/campos_512_v4
+137/696104/campos_512_v4
+137/696111/campos_512_v4
+137/696114/campos_512_v4
+137/696116/campos_512_v4
+137/696134/campos_512_v4
+137/696138/campos_512_v4
+137/696142/campos_512_v4
+137/696147/campos_512_v4
+137/696155/campos_512_v4
+137/696172/campos_512_v4
+137/696173/campos_512_v4
+137/696177/campos_512_v4
+137/696180/campos_512_v4
+137/696184/campos_512_v4
+137/696223/campos_512_v4
+137/696227/campos_512_v4
+137/696245/campos_512_v4
+137/696247/campos_512_v4
+137/696254/campos_512_v4
+137/696257/campos_512_v4
+137/696267/campos_512_v4
+137/696275/campos_512_v4
+137/696300/campos_512_v4
+137/696314/campos_512_v4
+137/696321/campos_512_v4
+137/696340/campos_512_v4
+137/696342/campos_512_v4
+137/696350/campos_512_v4
+137/696356/campos_512_v4
+137/696359/campos_512_v4
+137/696360/campos_512_v4
+137/696373/campos_512_v4
+137/696397/campos_512_v4
+137/696405/campos_512_v4
+137/696407/campos_512_v4
+137/696409/campos_512_v4
+137/696411/campos_512_v4
+137/696416/campos_512_v4
+137/696427/campos_512_v4
+137/696428/campos_512_v4
+137/696434/campos_512_v4
+137/696454/campos_512_v4
+137/696460/campos_512_v4
+137/696461/campos_512_v4
+137/696462/campos_512_v4
+137/696475/campos_512_v4
+137/696489/campos_512_v4
+137/696507/campos_512_v4
+137/696514/campos_512_v4
+137/696522/campos_512_v4
+137/696533/campos_512_v4
+137/696536/campos_512_v4
+137/696543/campos_512_v4
+137/696547/campos_512_v4
+137/696549/campos_512_v4
+137/696550/campos_512_v4
+137/696554/campos_512_v4
+137/696562/campos_512_v4
+137/696571/campos_512_v4
+137/696572/campos_512_v4
+137/696573/campos_512_v4
+137/696574/campos_512_v4
+137/696587/campos_512_v4
+137/696611/campos_512_v4
+137/696644/campos_512_v4
+137/696645/campos_512_v4
+137/696648/campos_512_v4
+137/696655/campos_512_v4
+137/696674/campos_512_v4
+137/696678/campos_512_v4
+137/696685/campos_512_v4
+137/696692/campos_512_v4
+137/696696/campos_512_v4
+137/696718/campos_512_v4
+137/696729/campos_512_v4
+137/696733/campos_512_v4
+137/696740/campos_512_v4
+137/696746/campos_512_v4
+137/696747/campos_512_v4
+137/696750/campos_512_v4
+137/696756/campos_512_v4
+137/696760/campos_512_v4
+137/696762/campos_512_v4
+137/696770/campos_512_v4
+137/696773/campos_512_v4
+137/696775/campos_512_v4
+137/696778/campos_512_v4
+137/696784/campos_512_v4
+137/696786/campos_512_v4
+137/696793/campos_512_v4
+137/696804/campos_512_v4
+137/696809/campos_512_v4
+137/696819/campos_512_v4
+137/696827/campos_512_v4
+137/696834/campos_512_v4
+137/696853/campos_512_v4
+137/696857/campos_512_v4
+137/696859/campos_512_v4
+137/696869/campos_512_v4
+137/696870/campos_512_v4
+137/696883/campos_512_v4
+137/696884/campos_512_v4
+137/696890/campos_512_v4
+137/696898/campos_512_v4
+137/696900/campos_512_v4
+137/696925/campos_512_v4
+137/696926/campos_512_v4
+137/696947/campos_512_v4
+137/696957/campos_512_v4
+137/696960/campos_512_v4
+137/696971/campos_512_v4
+137/696977/campos_512_v4
+137/696979/campos_512_v4
+137/696985/campos_512_v4
+137/696986/campos_512_v4
+137/696996/campos_512_v4
+137/697030/campos_512_v4
+137/697037/campos_512_v4
+137/697047/campos_512_v4
+137/697049/campos_512_v4
+137/697059/campos_512_v4
+137/697061/campos_512_v4
+137/697078/campos_512_v4
+137/697083/campos_512_v4
+137/697086/campos_512_v4
+137/697088/campos_512_v4
+137/697092/campos_512_v4
+137/697106/campos_512_v4
+137/697109/campos_512_v4
+137/697126/campos_512_v4
+137/697128/campos_512_v4
+137/697131/campos_512_v4
+137/697147/campos_512_v4
+137/697150/campos_512_v4
+137/697151/campos_512_v4
+137/697152/campos_512_v4
+137/697153/campos_512_v4
+137/697154/campos_512_v4
+137/697160/campos_512_v4
+137/697164/campos_512_v4
+137/697167/campos_512_v4
+137/697186/campos_512_v4
+137/697189/campos_512_v4
+137/697191/campos_512_v4
+137/697198/campos_512_v4
+137/697219/campos_512_v4
+137/697223/campos_512_v4
+137/697227/campos_512_v4
+137/697231/campos_512_v4
+137/697246/campos_512_v4
+137/697250/campos_512_v4
+137/697259/campos_512_v4
+137/697262/campos_512_v4
+137/697282/campos_512_v4
+137/697285/campos_512_v4
+137/697298/campos_512_v4
+137/697301/campos_512_v4
+137/697308/campos_512_v4
+137/697315/campos_512_v4
+137/697325/campos_512_v4
+137/697328/campos_512_v4
+137/697329/campos_512_v4
+137/697332/campos_512_v4
+137/697345/campos_512_v4
+137/697360/campos_512_v4
+137/697371/campos_512_v4
+137/697375/campos_512_v4
+137/697376/campos_512_v4
+137/697389/campos_512_v4
+137/697401/campos_512_v4
+137/697405/campos_512_v4
+137/697413/campos_512_v4
+137/697415/campos_512_v4
+137/697426/campos_512_v4
+137/697427/campos_512_v4
+137/697435/campos_512_v4
+137/697441/campos_512_v4
+137/697443/campos_512_v4
+137/697492/campos_512_v4
+137/697495/campos_512_v4
+137/697501/campos_512_v4
+137/697502/campos_512_v4
+137/697504/campos_512_v4
+137/697506/campos_512_v4
+137/697515/campos_512_v4
+137/697523/campos_512_v4
+137/697524/campos_512_v4
+137/697536/campos_512_v4
+137/697551/campos_512_v4
+137/697557/campos_512_v4
+137/697560/campos_512_v4
+137/697573/campos_512_v4
+137/697579/campos_512_v4
+137/697581/campos_512_v4
+137/697585/campos_512_v4
+137/697601/campos_512_v4
+137/697603/campos_512_v4
+137/697604/campos_512_v4
+137/697624/campos_512_v4
+137/697625/campos_512_v4
+137/697642/campos_512_v4
+137/697648/campos_512_v4
+137/697652/campos_512_v4
+137/697660/campos_512_v4
+137/697681/campos_512_v4
+137/697687/campos_512_v4
+137/697692/campos_512_v4
+137/697717/campos_512_v4
+137/697723/campos_512_v4
+137/697731/campos_512_v4
+137/697735/campos_512_v4
+137/697750/campos_512_v4
+137/697762/campos_512_v4
+137/697763/campos_512_v4
+137/697775/campos_512_v4
+137/697787/campos_512_v4
+137/697824/campos_512_v4
+137/697831/campos_512_v4
+137/697849/campos_512_v4
+137/697856/campos_512_v4
+137/697863/campos_512_v4
+137/697878/campos_512_v4
+137/697892/campos_512_v4
+137/697893/campos_512_v4
+137/697909/campos_512_v4
+137/697916/campos_512_v4
+137/697921/campos_512_v4
+137/697924/campos_512_v4
+137/697937/campos_512_v4
+137/697941/campos_512_v4
+137/697943/campos_512_v4
+137/697949/campos_512_v4
+137/697951/campos_512_v4
+137/697958/campos_512_v4
+137/697970/campos_512_v4
+137/697971/campos_512_v4
+137/697978/campos_512_v4
+137/697983/campos_512_v4
+137/697986/campos_512_v4
+137/697989/campos_512_v4
+137/697993/campos_512_v4
+137/698000/campos_512_v4
+137/698018/campos_512_v4
+137/698030/campos_512_v4
+137/698035/campos_512_v4
+137/698060/campos_512_v4
+137/698063/campos_512_v4
+137/698070/campos_512_v4
+137/698081/campos_512_v4
+137/698084/campos_512_v4
+137/698090/campos_512_v4
+137/698093/campos_512_v4
+137/698101/campos_512_v4
+137/698110/campos_512_v4
+137/698112/campos_512_v4
+137/698115/campos_512_v4
+137/698122/campos_512_v4
+137/698124/campos_512_v4
+137/698128/campos_512_v4
+137/698135/campos_512_v4
+137/698136/campos_512_v4
+137/698138/campos_512_v4
+137/698167/campos_512_v4
+137/698169/campos_512_v4
+137/698176/campos_512_v4
+137/698189/campos_512_v4
+137/698195/campos_512_v4
+137/698223/campos_512_v4
+137/698245/campos_512_v4
+137/698261/campos_512_v4
+137/698267/campos_512_v4
+137/698298/campos_512_v4
+137/698301/campos_512_v4
+137/698326/campos_512_v4
+137/698327/campos_512_v4
+137/698330/campos_512_v4
+137/698337/campos_512_v4
+137/698343/campos_512_v4
+137/698362/campos_512_v4
+137/698371/campos_512_v4
+137/698381/campos_512_v4
+137/698396/campos_512_v4
+137/698401/campos_512_v4
+137/698407/campos_512_v4
+137/698415/campos_512_v4
+137/698416/campos_512_v4
+137/698422/campos_512_v4
+137/698425/campos_512_v4
+137/698433/campos_512_v4
+137/698440/campos_512_v4
+137/698457/campos_512_v4
+137/698460/campos_512_v4
+137/698472/campos_512_v4
+137/698485/campos_512_v4
+137/698487/campos_512_v4
+137/698492/campos_512_v4
+137/698496/campos_512_v4
+137/698509/campos_512_v4
+137/698536/campos_512_v4
+137/698542/campos_512_v4
+137/698553/campos_512_v4
+137/698557/campos_512_v4
+137/698560/campos_512_v4
+137/698561/campos_512_v4
+137/698570/campos_512_v4
+137/698575/campos_512_v4
+137/698577/campos_512_v4
+137/698581/campos_512_v4
+137/698588/campos_512_v4
+137/698603/campos_512_v4
+137/698625/campos_512_v4
+137/698648/campos_512_v4
+137/698661/campos_512_v4
+137/698663/campos_512_v4
+137/698676/campos_512_v4
+137/698680/campos_512_v4
+137/698704/campos_512_v4
+137/698722/campos_512_v4
+137/698728/campos_512_v4
+137/698731/campos_512_v4
+137/698736/campos_512_v4
+137/698740/campos_512_v4
+137/698745/campos_512_v4
+137/698746/campos_512_v4
+137/698752/campos_512_v4
+137/698761/campos_512_v4
+137/698773/campos_512_v4
+137/698784/campos_512_v4
+137/698785/campos_512_v4
+137/698788/campos_512_v4
+137/698806/campos_512_v4
+137/698808/campos_512_v4
+137/698810/campos_512_v4
+137/698812/campos_512_v4
+137/698813/campos_512_v4
+137/698815/campos_512_v4
+137/698816/campos_512_v4
+137/698818/campos_512_v4
+137/698834/campos_512_v4
+137/698839/campos_512_v4
+137/698840/campos_512_v4
+137/698858/campos_512_v4
+137/698883/campos_512_v4
+137/698888/campos_512_v4
+137/698897/campos_512_v4
+137/698903/campos_512_v4
+137/698911/campos_512_v4
+137/698913/campos_512_v4
+137/698915/campos_512_v4
+137/698939/campos_512_v4
+137/698940/campos_512_v4
+137/698950/campos_512_v4
+137/698963/campos_512_v4
+137/698967/campos_512_v4
+137/699001/campos_512_v4
+137/699002/campos_512_v4
+137/699004/campos_512_v4
+137/699012/campos_512_v4
+137/699034/campos_512_v4
+137/699036/campos_512_v4
+137/699039/campos_512_v4
+137/699047/campos_512_v4
+137/699050/campos_512_v4
+137/699054/campos_512_v4
+137/699075/campos_512_v4
+137/699076/campos_512_v4
+137/699077/campos_512_v4
+137/699079/campos_512_v4
+137/699086/campos_512_v4
+137/699090/campos_512_v4
+137/699094/campos_512_v4
+137/699105/campos_512_v4
+137/699113/campos_512_v4
+137/699114/campos_512_v4
+137/699134/campos_512_v4
+137/699136/campos_512_v4
+137/699139/campos_512_v4
+137/699157/campos_512_v4
+137/699158/campos_512_v4
+137/699162/campos_512_v4
+137/699167/campos_512_v4
+137/699170/campos_512_v4
+137/699176/campos_512_v4
+137/699179/campos_512_v4
+137/699201/campos_512_v4
+137/699202/campos_512_v4
+137/699206/campos_512_v4
+137/699207/campos_512_v4
+137/699210/campos_512_v4
+137/699211/campos_512_v4
+137/699214/campos_512_v4
+137/699219/campos_512_v4
+137/699223/campos_512_v4
+137/699230/campos_512_v4
+137/699235/campos_512_v4
+137/699241/campos_512_v4
+137/699252/campos_512_v4
+137/699258/campos_512_v4
+137/699260/campos_512_v4
+137/699264/campos_512_v4
+137/699279/campos_512_v4
+137/699283/campos_512_v4
+137/699284/campos_512_v4
+137/699290/campos_512_v4
+137/699296/campos_512_v4
+137/699306/campos_512_v4
+137/699318/campos_512_v4
+137/699321/campos_512_v4
+137/699328/campos_512_v4
+137/699335/campos_512_v4
+137/699342/campos_512_v4
+137/699345/campos_512_v4
+137/699351/campos_512_v4
+137/699357/campos_512_v4
+137/699359/campos_512_v4
+137/699360/campos_512_v4
+137/699362/campos_512_v4
+137/699367/campos_512_v4
+137/699370/campos_512_v4
+137/699378/campos_512_v4
+137/699383/campos_512_v4
+137/699391/campos_512_v4
+137/699396/campos_512_v4
+137/699399/campos_512_v4
+137/699406/campos_512_v4
+137/699422/campos_512_v4
+137/699427/campos_512_v4
+137/699428/campos_512_v4
+137/699432/campos_512_v4
+137/699433/campos_512_v4
+137/699438/campos_512_v4
+137/699442/campos_512_v4
+137/699457/campos_512_v4
+137/699472/campos_512_v4
+137/699487/campos_512_v4
+137/699488/campos_512_v4
+137/699502/campos_512_v4
+137/699518/campos_512_v4
+137/699520/campos_512_v4
+137/699524/campos_512_v4
+137/699528/campos_512_v4
+137/699531/campos_512_v4
+137/699532/campos_512_v4
+137/699545/campos_512_v4
+137/699547/campos_512_v4
+137/699550/campos_512_v4
+137/699558/campos_512_v4
+137/699563/campos_512_v4
+137/699568/campos_512_v4
+137/699570/campos_512_v4
+137/699571/campos_512_v4
+137/699577/campos_512_v4
+137/699579/campos_512_v4
+137/699589/campos_512_v4
+137/699596/campos_512_v4
+137/699597/campos_512_v4
+137/699603/campos_512_v4
+137/699610/campos_512_v4
+137/699622/campos_512_v4
+137/699636/campos_512_v4
+137/699638/campos_512_v4
+137/699648/campos_512_v4
+137/699659/campos_512_v4
+137/699662/campos_512_v4
+137/699673/campos_512_v4
+137/699678/campos_512_v4
+137/699684/campos_512_v4
+137/699686/campos_512_v4
+137/699690/campos_512_v4
+137/699693/campos_512_v4
+137/699705/campos_512_v4
+137/699711/campos_512_v4
+137/699713/campos_512_v4
+137/699714/campos_512_v4
+137/699722/campos_512_v4
+137/699726/campos_512_v4
+137/699732/campos_512_v4
+137/699733/campos_512_v4
+137/699744/campos_512_v4
+137/699747/campos_512_v4
+137/699753/campos_512_v4
+137/699760/campos_512_v4
+137/699761/campos_512_v4
+137/699766/campos_512_v4
+137/699778/campos_512_v4
+137/699781/campos_512_v4
+137/699796/campos_512_v4
+137/699808/campos_512_v4
+137/699810/campos_512_v4
+137/699814/campos_512_v4
+137/699818/campos_512_v4
+137/699831/campos_512_v4
+137/699835/campos_512_v4
+137/699841/campos_512_v4
+137/699856/campos_512_v4
+137/699867/campos_512_v4
+137/699892/campos_512_v4
+137/699899/campos_512_v4
+137/699914/campos_512_v4
+137/699918/campos_512_v4
+137/699924/campos_512_v4
+137/699932/campos_512_v4
+137/699936/campos_512_v4
+137/699946/campos_512_v4
+137/699952/campos_512_v4
+137/699953/campos_512_v4
+137/699968/campos_512_v4
+137/699976/campos_512_v4
+137/699985/campos_512_v4
+137/699988/campos_512_v4
+137/699991/campos_512_v4
+137/699992/campos_512_v4
+137/699994/campos_512_v4
+138/700007/campos_512_v4
+138/700019/campos_512_v4
+138/700024/campos_512_v4
+138/700032/campos_512_v4
+138/700036/campos_512_v4
+138/700048/campos_512_v4
+138/700051/campos_512_v4
+138/700064/campos_512_v4
+138/700065/campos_512_v4
+138/700068/campos_512_v4
+138/700076/campos_512_v4
+138/700087/campos_512_v4
+138/700091/campos_512_v4
+138/700100/campos_512_v4
+138/700102/campos_512_v4
+138/700117/campos_512_v4
+138/700122/campos_512_v4
+138/700128/campos_512_v4
+138/700130/campos_512_v4
+138/700133/campos_512_v4
+138/700138/campos_512_v4
+138/700144/campos_512_v4
+138/700152/campos_512_v4
+138/700154/campos_512_v4
+138/700161/campos_512_v4
+138/700163/campos_512_v4
+138/700168/campos_512_v4
+138/700172/campos_512_v4
+138/700176/campos_512_v4
+138/700185/campos_512_v4
+138/700189/campos_512_v4
+138/700191/campos_512_v4
+138/700198/campos_512_v4
+138/700199/campos_512_v4
+138/700212/campos_512_v4
+138/700218/campos_512_v4
+138/700221/campos_512_v4
+138/700247/campos_512_v4
+138/700251/campos_512_v4
+138/700258/campos_512_v4
+138/700264/campos_512_v4
+138/700267/campos_512_v4
+138/700269/campos_512_v4
+138/700281/campos_512_v4
+138/700291/campos_512_v4
+138/700293/campos_512_v4
+138/700300/campos_512_v4
+138/700306/campos_512_v4
+138/700313/campos_512_v4
+138/700314/campos_512_v4
+138/700318/campos_512_v4
+138/700324/campos_512_v4
+138/700329/campos_512_v4
+138/700330/campos_512_v4
+138/700333/campos_512_v4
+138/700335/campos_512_v4
+138/700345/campos_512_v4
+138/700357/campos_512_v4
+138/700360/campos_512_v4
+138/700361/campos_512_v4
+138/700362/campos_512_v4
+138/700367/campos_512_v4
+138/700385/campos_512_v4
+138/700392/campos_512_v4
+138/700400/campos_512_v4
+138/700411/campos_512_v4
+138/700419/campos_512_v4
+138/700439/campos_512_v4
+138/700445/campos_512_v4
+138/700447/campos_512_v4
+138/700453/campos_512_v4
+138/700456/campos_512_v4
+138/700457/campos_512_v4
+138/700474/campos_512_v4
+138/700484/campos_512_v4
+138/700487/campos_512_v4
+138/700490/campos_512_v4
+138/700498/campos_512_v4
+138/700501/campos_512_v4
+138/700502/campos_512_v4
+138/700505/campos_512_v4
+138/700509/campos_512_v4
+138/700510/campos_512_v4
+138/700513/campos_512_v4
+138/700518/campos_512_v4
+138/700523/campos_512_v4
+138/700531/campos_512_v4
+138/700547/campos_512_v4
+138/700556/campos_512_v4
+138/700559/campos_512_v4
+138/700578/campos_512_v4
+138/700579/campos_512_v4
+138/700586/campos_512_v4
+138/700589/campos_512_v4
+138/700590/campos_512_v4
+138/700595/campos_512_v4
+138/700601/campos_512_v4
+138/700606/campos_512_v4
+138/700608/campos_512_v4
+138/700611/campos_512_v4
+138/700615/campos_512_v4
+138/700619/campos_512_v4
+138/700634/campos_512_v4
+138/700637/campos_512_v4
+138/700640/campos_512_v4
+138/700641/campos_512_v4
+138/700643/campos_512_v4
+138/700644/campos_512_v4
+138/700646/campos_512_v4
+138/700663/campos_512_v4
+138/700677/campos_512_v4
+138/700684/campos_512_v4
+138/700708/campos_512_v4
+138/700715/campos_512_v4
+138/700723/campos_512_v4
+138/700724/campos_512_v4
+138/700733/campos_512_v4
+138/700743/campos_512_v4
+138/700748/campos_512_v4
+138/700760/campos_512_v4
+138/700779/campos_512_v4
+138/700790/campos_512_v4
+138/700816/campos_512_v4
+138/700817/campos_512_v4
+138/700819/campos_512_v4
+138/700825/campos_512_v4
+138/700844/campos_512_v4
+138/700856/campos_512_v4
+138/700862/campos_512_v4
+138/700870/campos_512_v4
+138/700874/campos_512_v4
+138/700886/campos_512_v4
+138/700896/campos_512_v4
+138/700901/campos_512_v4
+138/700907/campos_512_v4
+138/700909/campos_512_v4
+138/700913/campos_512_v4
+138/700916/campos_512_v4
+138/700917/campos_512_v4
+138/700934/campos_512_v4
+138/700944/campos_512_v4
+138/700946/campos_512_v4
+138/700947/campos_512_v4
+138/700949/campos_512_v4
+138/700957/campos_512_v4
+138/700980/campos_512_v4
+138/700992/campos_512_v4
+138/701000/campos_512_v4
+138/701032/campos_512_v4
+138/701033/campos_512_v4
+138/701035/campos_512_v4
+138/701037/campos_512_v4
+138/701041/campos_512_v4
+138/701047/campos_512_v4
+138/701055/campos_512_v4
+138/701056/campos_512_v4
+138/701062/campos_512_v4
+138/701067/campos_512_v4
+138/701070/campos_512_v4
+138/701073/campos_512_v4
+138/701078/campos_512_v4
+138/701080/campos_512_v4
+138/701094/campos_512_v4
+138/701098/campos_512_v4
+138/701100/campos_512_v4
+138/701103/campos_512_v4
+138/701153/campos_512_v4
+138/701155/campos_512_v4
+138/701169/campos_512_v4
+138/701180/campos_512_v4
+138/701187/campos_512_v4
+138/701213/campos_512_v4
+138/701226/campos_512_v4
+138/701234/campos_512_v4
+138/701235/campos_512_v4
+138/701238/campos_512_v4
+138/701256/campos_512_v4
+138/701263/campos_512_v4
+138/701267/campos_512_v4
+138/701271/campos_512_v4
+138/701278/campos_512_v4
+138/701281/campos_512_v4
+138/701288/campos_512_v4
+138/701293/campos_512_v4
+138/701317/campos_512_v4
+138/701321/campos_512_v4
+138/701333/campos_512_v4
+138/701341/campos_512_v4
+138/701347/campos_512_v4
+138/701348/campos_512_v4
+138/701349/campos_512_v4
+138/701351/campos_512_v4
+138/701354/campos_512_v4
+138/701355/campos_512_v4
+138/701358/campos_512_v4
+138/701368/campos_512_v4
+138/701371/campos_512_v4
+138/701383/campos_512_v4
+138/701403/campos_512_v4
+138/701425/campos_512_v4
+138/701440/campos_512_v4
+138/701442/campos_512_v4
+138/701471/campos_512_v4
+138/701482/campos_512_v4
+138/701495/campos_512_v4
+138/701507/campos_512_v4
+138/701519/campos_512_v4
+138/701521/campos_512_v4
+138/701522/campos_512_v4
+138/701523/campos_512_v4
+138/701524/campos_512_v4
+138/701530/campos_512_v4
+138/701543/campos_512_v4
+138/701556/campos_512_v4
+138/701561/campos_512_v4
+138/701562/campos_512_v4
+138/701563/campos_512_v4
+138/701583/campos_512_v4
+138/701589/campos_512_v4
+138/701600/campos_512_v4
+138/701603/campos_512_v4
+138/701611/campos_512_v4
+138/701628/campos_512_v4
+138/701651/campos_512_v4
+138/701654/campos_512_v4
+138/701658/campos_512_v4
+138/701661/campos_512_v4
+138/701666/campos_512_v4
+138/701686/campos_512_v4
+138/701687/campos_512_v4
+138/701705/campos_512_v4
+138/701765/campos_512_v4
+138/701772/campos_512_v4
+138/701775/campos_512_v4
+138/701783/campos_512_v4
+138/701789/campos_512_v4
+138/701801/campos_512_v4
+138/701805/campos_512_v4
+138/701812/campos_512_v4
+138/701813/campos_512_v4
+138/701819/campos_512_v4
+138/701828/campos_512_v4
+138/701829/campos_512_v4
+138/701831/campos_512_v4
+138/701840/campos_512_v4
+138/701853/campos_512_v4
+138/701859/campos_512_v4
+138/701861/campos_512_v4
+138/701862/campos_512_v4
+138/701872/campos_512_v4
+138/701877/campos_512_v4
+138/701884/campos_512_v4
+138/701890/campos_512_v4
+138/701891/campos_512_v4
+138/701895/campos_512_v4
+138/701925/campos_512_v4
+138/701930/campos_512_v4
+138/701956/campos_512_v4
+138/701962/campos_512_v4
+138/701974/campos_512_v4
+138/701980/campos_512_v4
+138/701991/campos_512_v4
+138/702008/campos_512_v4
+138/702014/campos_512_v4
+138/702026/campos_512_v4
+138/702031/campos_512_v4
+138/702033/campos_512_v4
+138/702038/campos_512_v4
+138/702051/campos_512_v4
+138/702052/campos_512_v4
+138/702058/campos_512_v4
+138/702060/campos_512_v4
+138/702067/campos_512_v4
+138/702103/campos_512_v4
+138/702105/campos_512_v4
+138/702108/campos_512_v4
+138/702111/campos_512_v4
+138/702120/campos_512_v4
+138/702124/campos_512_v4
+138/702126/campos_512_v4
+138/702134/campos_512_v4
+138/702135/campos_512_v4
+138/702136/campos_512_v4
+138/702139/campos_512_v4
+138/702147/campos_512_v4
+138/702165/campos_512_v4
+138/702172/campos_512_v4
+138/702176/campos_512_v4
+138/702190/campos_512_v4
+138/702192/campos_512_v4
+138/702204/campos_512_v4
+138/702205/campos_512_v4
+138/702222/campos_512_v4
+138/702239/campos_512_v4
+138/702240/campos_512_v4
+138/702248/campos_512_v4
+138/702282/campos_512_v4
+138/702285/campos_512_v4
+138/702287/campos_512_v4
+138/702294/campos_512_v4
+138/702303/campos_512_v4
+138/702305/campos_512_v4
+138/702307/campos_512_v4
+138/702320/campos_512_v4
+138/702324/campos_512_v4
+138/702325/campos_512_v4
+138/702326/campos_512_v4
+138/702339/campos_512_v4
+138/702341/campos_512_v4
+138/702348/campos_512_v4
+138/702349/campos_512_v4
+138/702351/campos_512_v4
+138/702357/campos_512_v4
+138/702361/campos_512_v4
+138/702362/campos_512_v4
+138/702364/campos_512_v4
+138/702366/campos_512_v4
+138/702380/campos_512_v4
+138/702399/campos_512_v4
+138/702427/campos_512_v4
+138/702428/campos_512_v4
+138/702435/campos_512_v4
+138/702439/campos_512_v4
+138/702446/campos_512_v4
+138/702447/campos_512_v4
+138/702452/campos_512_v4
+138/702465/campos_512_v4
+138/702468/campos_512_v4
+138/702494/campos_512_v4
+138/702497/campos_512_v4
+138/702500/campos_512_v4
+138/702520/campos_512_v4
+138/702521/campos_512_v4
+138/702555/campos_512_v4
+138/702568/campos_512_v4
+138/702590/campos_512_v4
+138/702593/campos_512_v4
+138/702594/campos_512_v4
+138/702600/campos_512_v4
+138/702602/campos_512_v4
+138/702610/campos_512_v4
+138/702618/campos_512_v4
+138/702623/campos_512_v4
+138/702624/campos_512_v4
+138/702631/campos_512_v4
+138/702637/campos_512_v4
+138/702647/campos_512_v4
+138/702651/campos_512_v4
+138/702652/campos_512_v4
+138/702668/campos_512_v4
+138/702677/campos_512_v4
+138/702682/campos_512_v4
+138/702685/campos_512_v4
+138/702693/campos_512_v4
+138/702694/campos_512_v4
+138/702711/campos_512_v4
+138/702716/campos_512_v4
+138/702720/campos_512_v4
+138/702728/campos_512_v4
+138/702731/campos_512_v4
+138/702732/campos_512_v4
+138/702734/campos_512_v4
+138/702743/campos_512_v4
+138/702749/campos_512_v4
+138/702756/campos_512_v4
+138/702759/campos_512_v4
+138/702767/campos_512_v4
+138/702769/campos_512_v4
+138/702772/campos_512_v4
+138/702781/campos_512_v4
+138/702794/campos_512_v4
+138/702795/campos_512_v4
+138/702799/campos_512_v4
+138/702804/campos_512_v4
+138/702809/campos_512_v4
+138/702812/campos_512_v4
+138/702827/campos_512_v4
+138/702836/campos_512_v4
+138/702837/campos_512_v4
+138/702838/campos_512_v4
+138/702847/campos_512_v4
+138/702860/campos_512_v4
+138/702867/campos_512_v4
+138/702869/campos_512_v4
+138/702870/campos_512_v4
+138/702871/campos_512_v4
+138/702872/campos_512_v4
+138/702873/campos_512_v4
+138/702875/campos_512_v4
+138/702877/campos_512_v4
+138/702882/campos_512_v4
+138/702892/campos_512_v4
+138/702895/campos_512_v4
+138/702900/campos_512_v4
+138/702907/campos_512_v4
+138/702908/campos_512_v4
+138/702919/campos_512_v4
+138/702931/campos_512_v4
+138/702933/campos_512_v4
+138/702939/campos_512_v4
+138/702946/campos_512_v4
+138/702957/campos_512_v4
+138/702960/campos_512_v4
+138/702963/campos_512_v4
+138/702972/campos_512_v4
+138/702993/campos_512_v4
+138/702997/campos_512_v4
+138/702998/campos_512_v4
+138/703003/campos_512_v4
+138/703007/campos_512_v4
+138/703033/campos_512_v4
+138/703048/campos_512_v4
+138/703052/campos_512_v4
+138/703061/campos_512_v4
+138/703063/campos_512_v4
+138/703068/campos_512_v4
+138/703072/campos_512_v4
+138/703080/campos_512_v4
+138/703091/campos_512_v4
+138/703098/campos_512_v4
+138/703111/campos_512_v4
+138/703119/campos_512_v4
+138/703134/campos_512_v4
+138/703135/campos_512_v4
+138/703141/campos_512_v4
+138/703143/campos_512_v4
+138/703168/campos_512_v4
+138/703169/campos_512_v4
+138/703173/campos_512_v4
+138/703177/campos_512_v4
+138/703184/campos_512_v4
+138/703188/campos_512_v4
+138/703199/campos_512_v4
+138/703202/campos_512_v4
+138/703204/campos_512_v4
+138/703206/campos_512_v4
+138/703213/campos_512_v4
+138/703222/campos_512_v4
+138/703227/campos_512_v4
+138/703248/campos_512_v4
+138/703261/campos_512_v4
+138/703263/campos_512_v4
+138/703273/campos_512_v4
+138/703278/campos_512_v4
+138/703280/campos_512_v4
+138/703284/campos_512_v4
+138/703286/campos_512_v4
+138/703289/campos_512_v4
+138/703294/campos_512_v4
+138/703310/campos_512_v4
+138/703315/campos_512_v4
+138/703317/campos_512_v4
+138/703323/campos_512_v4
+138/703325/campos_512_v4
+138/703330/campos_512_v4
+138/703337/campos_512_v4
+138/703340/campos_512_v4
+138/703348/campos_512_v4
+138/703364/campos_512_v4
+138/703370/campos_512_v4
+138/703379/campos_512_v4
+138/703400/campos_512_v4
+138/703408/campos_512_v4
+138/703413/campos_512_v4
+138/703417/campos_512_v4
+138/703439/campos_512_v4
+138/703461/campos_512_v4
+138/703469/campos_512_v4
+138/703489/campos_512_v4
+138/703513/campos_512_v4
+138/703516/campos_512_v4
+138/703527/campos_512_v4
+138/703528/campos_512_v4
+138/703530/campos_512_v4
+138/703548/campos_512_v4
+138/703552/campos_512_v4
+138/703556/campos_512_v4
+138/703566/campos_512_v4
+138/703585/campos_512_v4
+138/703603/campos_512_v4
+138/703609/campos_512_v4
+138/703619/campos_512_v4
+138/703638/campos_512_v4
+138/703642/campos_512_v4
+138/703653/campos_512_v4
+138/703660/campos_512_v4
+138/703661/campos_512_v4
+138/703673/campos_512_v4
+138/703676/campos_512_v4
+138/703689/campos_512_v4
+138/703727/campos_512_v4
+138/703740/campos_512_v4
+138/703741/campos_512_v4
+138/703743/campos_512_v4
+138/703744/campos_512_v4
+138/703745/campos_512_v4
+138/703750/campos_512_v4
+138/703755/campos_512_v4
+138/703764/campos_512_v4
+138/703767/campos_512_v4
+138/703774/campos_512_v4
+138/703780/campos_512_v4
+138/703787/campos_512_v4
+138/703794/campos_512_v4
+138/703795/campos_512_v4
+138/703799/campos_512_v4
+138/703810/campos_512_v4
+138/703853/campos_512_v4
+138/703860/campos_512_v4
+138/703864/campos_512_v4
+138/703871/campos_512_v4
+138/703877/campos_512_v4
+138/703884/campos_512_v4
+138/703885/campos_512_v4
+138/703887/campos_512_v4
+138/703888/campos_512_v4
+138/703896/campos_512_v4
+138/703899/campos_512_v4
+138/703907/campos_512_v4
+138/703919/campos_512_v4
+138/703922/campos_512_v4
+138/703926/campos_512_v4
+138/703948/campos_512_v4
+138/703949/campos_512_v4
+138/703951/campos_512_v4
+138/703955/campos_512_v4
+138/703970/campos_512_v4
+138/703977/campos_512_v4
+138/703992/campos_512_v4
+138/704000/campos_512_v4
+138/704024/campos_512_v4
+138/704031/campos_512_v4
+138/704034/campos_512_v4
+138/704038/campos_512_v4
+138/704039/campos_512_v4
+138/704043/campos_512_v4
+138/704044/campos_512_v4
+138/704051/campos_512_v4
+138/704058/campos_512_v4
+138/704075/campos_512_v4
+138/704082/campos_512_v4
+138/704083/campos_512_v4
+138/704087/campos_512_v4
+138/704111/campos_512_v4
+138/704134/campos_512_v4
+138/704138/campos_512_v4
+138/704145/campos_512_v4
+138/704158/campos_512_v4
+138/704165/campos_512_v4
+138/704168/campos_512_v4
+138/704204/campos_512_v4
+138/704224/campos_512_v4
+138/704228/campos_512_v4
+138/704229/campos_512_v4
+138/704233/campos_512_v4
+138/704234/campos_512_v4
+138/704239/campos_512_v4
+138/704250/campos_512_v4
+138/704271/campos_512_v4
+138/704276/campos_512_v4
+138/704282/campos_512_v4
+138/704285/campos_512_v4
+138/704289/campos_512_v4
+138/704298/campos_512_v4
+138/704304/campos_512_v4
+138/704305/campos_512_v4
+138/704306/campos_512_v4
+138/704307/campos_512_v4
+138/704309/campos_512_v4
+138/704311/campos_512_v4
+138/704315/campos_512_v4
+138/704317/campos_512_v4
+138/704336/campos_512_v4
+138/704340/campos_512_v4
+138/704364/campos_512_v4
+138/704366/campos_512_v4
+138/704373/campos_512_v4
+138/704374/campos_512_v4
+138/704378/campos_512_v4
+138/704386/campos_512_v4
+138/704391/campos_512_v4
+138/704400/campos_512_v4
+138/704403/campos_512_v4
+138/704411/campos_512_v4
+138/704425/campos_512_v4
+138/704428/campos_512_v4
+138/704430/campos_512_v4
+138/704460/campos_512_v4
+138/704477/campos_512_v4
+138/704481/campos_512_v4
+138/704489/campos_512_v4
+138/704506/campos_512_v4
+138/704516/campos_512_v4
+138/704520/campos_512_v4
+138/704549/campos_512_v4
+138/704555/campos_512_v4
+138/704559/campos_512_v4
+138/704565/campos_512_v4
+138/704570/campos_512_v4
+138/704575/campos_512_v4
+138/704576/campos_512_v4
+138/704585/campos_512_v4
+138/704590/campos_512_v4
+138/704592/campos_512_v4
+138/704595/campos_512_v4
+138/704597/campos_512_v4
+138/704600/campos_512_v4
+138/704606/campos_512_v4
+138/704609/campos_512_v4
+138/704630/campos_512_v4
+138/704633/campos_512_v4
+138/704651/campos_512_v4
+138/704657/campos_512_v4
+138/704663/campos_512_v4
+138/704664/campos_512_v4
+138/704670/campos_512_v4
+138/704673/campos_512_v4
+138/704683/campos_512_v4
+138/704691/campos_512_v4
+138/704692/campos_512_v4
+138/704709/campos_512_v4
+138/704713/campos_512_v4
+138/704719/campos_512_v4
+138/704732/campos_512_v4
+138/704736/campos_512_v4
+138/704739/campos_512_v4
+138/704745/campos_512_v4
+138/704757/campos_512_v4
+138/704769/campos_512_v4
+138/704775/campos_512_v4
+138/704785/campos_512_v4
+138/704791/campos_512_v4
+138/704792/campos_512_v4
+138/704798/campos_512_v4
+138/704806/campos_512_v4
+138/704813/campos_512_v4
+138/704814/campos_512_v4
+138/704823/campos_512_v4
+138/704825/campos_512_v4
+138/704838/campos_512_v4
+138/704839/campos_512_v4
+138/704844/campos_512_v4
+138/704861/campos_512_v4
+138/704865/campos_512_v4
+138/704880/campos_512_v4
+138/704892/campos_512_v4
+138/704896/campos_512_v4
+138/704899/campos_512_v4
+138/704908/campos_512_v4
+138/704910/campos_512_v4
+138/704911/campos_512_v4
+138/704920/campos_512_v4
+138/704923/campos_512_v4
+138/704931/campos_512_v4
+138/704934/campos_512_v4
+138/704944/campos_512_v4
+138/704955/campos_512_v4
+138/704961/campos_512_v4
+138/704963/campos_512_v4
+138/704968/campos_512_v4
+138/704973/campos_512_v4
+138/704976/campos_512_v4
+138/704989/campos_512_v4
+138/704990/campos_512_v4
+139/705009/campos_512_v4
+139/705014/campos_512_v4
+139/705032/campos_512_v4
+139/705046/campos_512_v4
+139/705047/campos_512_v4
+139/705067/campos_512_v4
+139/705071/campos_512_v4
+139/705082/campos_512_v4
+139/705106/campos_512_v4
+139/705110/campos_512_v4
+139/705115/campos_512_v4
+139/705133/campos_512_v4
+139/705137/campos_512_v4
+139/705140/campos_512_v4
+139/705141/campos_512_v4
+139/705152/campos_512_v4
+139/705155/campos_512_v4
+139/705167/campos_512_v4
+139/705170/campos_512_v4
+139/705172/campos_512_v4
+139/705178/campos_512_v4
+139/705180/campos_512_v4
+139/705181/campos_512_v4
+139/705189/campos_512_v4
+139/705190/campos_512_v4
+139/705200/campos_512_v4
+139/705206/campos_512_v4
+139/705208/campos_512_v4
+139/705211/campos_512_v4
+139/705221/campos_512_v4
+139/705226/campos_512_v4
+139/705236/campos_512_v4
+139/705241/campos_512_v4
+139/705253/campos_512_v4
+139/705254/campos_512_v4
+139/705256/campos_512_v4
+139/705259/campos_512_v4
+139/705275/campos_512_v4
+139/705279/campos_512_v4
+139/705290/campos_512_v4
+139/705298/campos_512_v4
+139/705300/campos_512_v4
+139/705307/campos_512_v4
+139/705308/campos_512_v4
+139/705314/campos_512_v4
+139/705315/campos_512_v4
+139/705348/campos_512_v4
+139/705353/campos_512_v4
+139/705358/campos_512_v4
+139/705359/campos_512_v4
+139/705363/campos_512_v4
+139/705369/campos_512_v4
+139/705370/campos_512_v4
+139/705401/campos_512_v4
+139/705421/campos_512_v4
+139/705424/campos_512_v4
+139/705438/campos_512_v4
+139/705452/campos_512_v4
+139/705462/campos_512_v4
+139/705465/campos_512_v4
+139/705467/campos_512_v4
+139/705468/campos_512_v4
+139/705479/campos_512_v4
+139/705482/campos_512_v4
+139/705492/campos_512_v4
+139/705500/campos_512_v4
+139/705505/campos_512_v4
+139/705512/campos_512_v4
+139/705513/campos_512_v4
+139/705516/campos_512_v4
+139/705528/campos_512_v4
+139/705532/campos_512_v4
+139/705549/campos_512_v4
+139/705554/campos_512_v4
+139/705559/campos_512_v4
+139/705562/campos_512_v4
+139/705575/campos_512_v4
+139/705579/campos_512_v4
+139/705603/campos_512_v4
+139/705614/campos_512_v4
+139/705619/campos_512_v4
+139/705622/campos_512_v4
+139/705628/campos_512_v4
+139/705629/campos_512_v4
+139/705643/campos_512_v4
+139/705655/campos_512_v4
+139/705659/campos_512_v4
+139/705661/campos_512_v4
+139/705672/campos_512_v4
+139/705676/campos_512_v4
+139/705682/campos_512_v4
+139/705690/campos_512_v4
+139/705698/campos_512_v4
+139/705703/campos_512_v4
+139/705704/campos_512_v4
+139/705707/campos_512_v4
+139/705711/campos_512_v4
+139/705716/campos_512_v4
+139/705721/campos_512_v4
+139/705723/campos_512_v4
+139/705736/campos_512_v4
+139/705737/campos_512_v4
+139/705768/campos_512_v4
+139/705788/campos_512_v4
+139/705804/campos_512_v4
+139/705820/campos_512_v4
+139/705833/campos_512_v4
+139/705838/campos_512_v4
+139/705854/campos_512_v4
+139/705855/campos_512_v4
+139/705858/campos_512_v4
+139/705870/campos_512_v4
+139/705871/campos_512_v4
+139/705884/campos_512_v4
+139/705886/campos_512_v4
+139/705887/campos_512_v4
+139/705918/campos_512_v4
+139/705920/campos_512_v4
+139/705926/campos_512_v4
+139/705931/campos_512_v4
+139/705961/campos_512_v4
+139/705974/campos_512_v4
+139/705979/campos_512_v4
+139/705983/campos_512_v4
+139/705985/campos_512_v4
+139/705988/campos_512_v4
+139/705989/campos_512_v4
+139/705999/campos_512_v4
+139/706012/campos_512_v4
+139/706027/campos_512_v4
+139/706028/campos_512_v4
+139/706030/campos_512_v4
+139/706032/campos_512_v4
+139/706046/campos_512_v4
+139/706048/campos_512_v4
+139/706050/campos_512_v4
+139/706059/campos_512_v4
+139/706086/campos_512_v4
+139/706091/campos_512_v4
+139/706092/campos_512_v4
+139/706105/campos_512_v4
+139/706109/campos_512_v4
+139/706113/campos_512_v4
+139/706122/campos_512_v4
+139/706125/campos_512_v4
+139/706132/campos_512_v4
+139/706138/campos_512_v4
+139/706145/campos_512_v4
+139/706150/campos_512_v4
+139/706161/campos_512_v4
+139/706166/campos_512_v4
+139/706171/campos_512_v4
+139/706183/campos_512_v4
+139/706188/campos_512_v4
+139/706197/campos_512_v4
+139/706206/campos_512_v4
+139/706209/campos_512_v4
+139/706223/campos_512_v4
+139/706224/campos_512_v4
+139/706227/campos_512_v4
+139/706229/campos_512_v4
+139/706235/campos_512_v4
+139/706256/campos_512_v4
+139/706263/campos_512_v4
+139/706286/campos_512_v4
+139/706290/campos_512_v4
+139/706293/campos_512_v4
+139/706307/campos_512_v4
+139/706309/campos_512_v4
+139/706320/campos_512_v4
+139/706333/campos_512_v4
+139/706350/campos_512_v4
+139/706362/campos_512_v4
+139/706371/campos_512_v4
+139/706378/campos_512_v4
+139/706382/campos_512_v4
+139/706392/campos_512_v4
+139/706395/campos_512_v4
+139/706399/campos_512_v4
+139/706404/campos_512_v4
+139/706407/campos_512_v4
+139/706409/campos_512_v4
+139/706424/campos_512_v4
+139/706427/campos_512_v4
+139/706431/campos_512_v4
+139/706432/campos_512_v4
+139/706448/campos_512_v4
+139/706461/campos_512_v4
+139/706464/campos_512_v4
+139/706467/campos_512_v4
+139/706470/campos_512_v4
+139/706484/campos_512_v4
+139/706501/campos_512_v4
+139/706509/campos_512_v4
+139/706566/campos_512_v4
+139/706584/campos_512_v4
+139/706592/campos_512_v4
+139/706597/campos_512_v4
+139/706600/campos_512_v4
+139/706614/campos_512_v4
+139/706626/campos_512_v4
+139/706653/campos_512_v4
+139/706661/campos_512_v4
+139/706700/campos_512_v4
+139/706703/campos_512_v4
+139/706709/campos_512_v4
+139/706710/campos_512_v4
+139/706712/campos_512_v4
+139/706715/campos_512_v4
+139/706720/campos_512_v4
+139/706723/campos_512_v4
+139/706725/campos_512_v4
+139/706728/campos_512_v4
+139/706731/campos_512_v4
+139/706734/campos_512_v4
+139/706738/campos_512_v4
+139/706743/campos_512_v4
+139/706744/campos_512_v4
+139/706770/campos_512_v4
+139/706773/campos_512_v4
+139/706774/campos_512_v4
+139/706778/campos_512_v4
+139/706790/campos_512_v4
+139/706802/campos_512_v4
+139/706817/campos_512_v4
+139/706823/campos_512_v4
+139/706833/campos_512_v4
+139/706843/campos_512_v4
+139/706848/campos_512_v4
+139/706859/campos_512_v4
+139/706861/campos_512_v4
+139/706868/campos_512_v4
+139/706870/campos_512_v4
+139/706875/campos_512_v4
+139/706887/campos_512_v4
+139/706901/campos_512_v4
+139/706910/campos_512_v4
+139/706915/campos_512_v4
+139/706917/campos_512_v4
+139/706919/campos_512_v4
+139/706925/campos_512_v4
+139/706935/campos_512_v4
+139/706938/campos_512_v4
+139/706944/campos_512_v4
+139/706946/campos_512_v4
+139/706947/campos_512_v4
+139/706950/campos_512_v4
+139/706956/campos_512_v4
+139/706957/campos_512_v4
+139/706968/campos_512_v4
+139/706978/campos_512_v4
+139/706987/campos_512_v4
+139/706992/campos_512_v4
+139/706995/campos_512_v4
+139/707004/campos_512_v4
+139/707024/campos_512_v4
+139/707026/campos_512_v4
+139/707027/campos_512_v4
+139/707039/campos_512_v4
+139/707042/campos_512_v4
+139/707044/campos_512_v4
+139/707050/campos_512_v4
+139/707068/campos_512_v4
+139/707072/campos_512_v4
+139/707073/campos_512_v4
+139/707082/campos_512_v4
+139/707084/campos_512_v4
+139/707088/campos_512_v4
+139/707097/campos_512_v4
+139/707103/campos_512_v4
+139/707111/campos_512_v4
+139/707114/campos_512_v4
+139/707115/campos_512_v4
+139/707117/campos_512_v4
+139/707120/campos_512_v4
+139/707125/campos_512_v4
+139/707138/campos_512_v4
+139/707139/campos_512_v4
+139/707142/campos_512_v4
+139/707154/campos_512_v4
+139/707159/campos_512_v4
+139/707162/campos_512_v4
+139/707173/campos_512_v4
+139/707177/campos_512_v4
+139/707185/campos_512_v4
+139/707187/campos_512_v4
+139/707191/campos_512_v4
+139/707197/campos_512_v4
+139/707212/campos_512_v4
+139/707220/campos_512_v4
+139/707225/campos_512_v4
+139/707241/campos_512_v4
+139/707245/campos_512_v4
+139/707250/campos_512_v4
+139/707260/campos_512_v4
+139/707264/campos_512_v4
+139/707269/campos_512_v4
+139/707274/campos_512_v4
+139/707283/campos_512_v4
+139/707295/campos_512_v4
+139/707297/campos_512_v4
+139/707304/campos_512_v4
+139/707306/campos_512_v4
+139/707308/campos_512_v4
+139/707323/campos_512_v4
+139/707325/campos_512_v4
+139/707329/campos_512_v4
+139/707337/campos_512_v4
+139/707342/campos_512_v4
+139/707348/campos_512_v4
+139/707351/campos_512_v4
+139/707357/campos_512_v4
+139/707363/campos_512_v4
+139/707379/campos_512_v4
+139/707384/campos_512_v4
+139/707385/campos_512_v4
+139/707405/campos_512_v4
+139/707409/campos_512_v4
+139/707411/campos_512_v4
+139/707416/campos_512_v4
+139/707417/campos_512_v4
+139/707433/campos_512_v4
+139/707443/campos_512_v4
+139/707458/campos_512_v4
+139/707465/campos_512_v4
+139/707471/campos_512_v4
+139/707472/campos_512_v4
+139/707506/campos_512_v4
+139/707516/campos_512_v4
+139/707521/campos_512_v4
+139/707523/campos_512_v4
+139/707532/campos_512_v4
+139/707534/campos_512_v4
+139/707548/campos_512_v4
+139/707559/campos_512_v4
+139/707562/campos_512_v4
+139/707573/campos_512_v4
+139/707588/campos_512_v4
+139/707591/campos_512_v4
+139/707593/campos_512_v4
+139/707600/campos_512_v4
+139/707603/campos_512_v4
+139/707607/campos_512_v4
+139/707609/campos_512_v4
+139/707617/campos_512_v4
+139/707638/campos_512_v4
+139/707646/campos_512_v4
+139/707660/campos_512_v4
+139/707661/campos_512_v4
+139/707663/campos_512_v4
+139/707676/campos_512_v4
+139/707681/campos_512_v4
+139/707685/campos_512_v4
+139/707687/campos_512_v4
+139/707688/campos_512_v4
+139/707693/campos_512_v4
+139/707697/campos_512_v4
+139/707701/campos_512_v4
+139/707723/campos_512_v4
+139/707724/campos_512_v4
+139/707727/campos_512_v4
+139/707745/campos_512_v4
+139/707747/campos_512_v4
+139/707754/campos_512_v4
+139/707755/campos_512_v4
+139/707761/campos_512_v4
+139/707764/campos_512_v4
+139/707766/campos_512_v4
+139/707770/campos_512_v4
+139/707772/campos_512_v4
+139/707773/campos_512_v4
+139/707775/campos_512_v4
+139/707779/campos_512_v4
+139/707793/campos_512_v4
+139/707794/campos_512_v4
+139/707796/campos_512_v4
+139/707807/campos_512_v4
+139/707814/campos_512_v4
+139/707815/campos_512_v4
+139/707834/campos_512_v4
+139/707840/campos_512_v4
+139/707843/campos_512_v4
+139/707846/campos_512_v4
+139/707847/campos_512_v4
+139/707857/campos_512_v4
+139/707861/campos_512_v4
+139/707862/campos_512_v4
+139/707877/campos_512_v4
+139/707926/campos_512_v4
+139/707965/campos_512_v4
+139/707966/campos_512_v4
+139/707970/campos_512_v4
+139/707973/campos_512_v4
+139/707976/campos_512_v4
+139/707979/campos_512_v4
+139/707983/campos_512_v4
+139/707985/campos_512_v4
+139/707990/campos_512_v4
+139/707992/campos_512_v4
+139/708006/campos_512_v4
+139/708007/campos_512_v4
+139/708020/campos_512_v4
+139/708042/campos_512_v4
+139/708053/campos_512_v4
+139/708059/campos_512_v4
+139/708077/campos_512_v4
+139/708089/campos_512_v4
+139/708091/campos_512_v4
+139/708109/campos_512_v4
+139/708110/campos_512_v4
+139/708111/campos_512_v4
+139/708115/campos_512_v4
+139/708120/campos_512_v4
+139/708122/campos_512_v4
+139/708131/campos_512_v4
+139/708132/campos_512_v4
+139/708139/campos_512_v4
+139/708140/campos_512_v4
+139/708144/campos_512_v4
+139/708161/campos_512_v4
+139/708182/campos_512_v4
+139/708195/campos_512_v4
+139/708226/campos_512_v4
+139/708228/campos_512_v4
+139/708233/campos_512_v4
+139/708234/campos_512_v4
+139/708263/campos_512_v4
+139/708280/campos_512_v4
+139/708284/campos_512_v4
+139/708290/campos_512_v4
+139/708293/campos_512_v4
+139/708298/campos_512_v4
+139/708304/campos_512_v4
+139/708313/campos_512_v4
+139/708337/campos_512_v4
+139/708349/campos_512_v4
+139/708374/campos_512_v4
+139/708380/campos_512_v4
+139/708385/campos_512_v4
+139/708388/campos_512_v4
+139/708406/campos_512_v4
+139/708409/campos_512_v4
+139/708415/campos_512_v4
+139/708418/campos_512_v4
+139/708419/campos_512_v4
+139/708433/campos_512_v4
+139/708440/campos_512_v4
+139/708442/campos_512_v4
+139/708443/campos_512_v4
+139/708464/campos_512_v4
+139/708467/campos_512_v4
+139/708468/campos_512_v4
+139/708474/campos_512_v4
+139/708476/campos_512_v4
+139/708478/campos_512_v4
+139/708482/campos_512_v4
+139/708489/campos_512_v4
+139/708495/campos_512_v4
+139/708497/campos_512_v4
+139/708506/campos_512_v4
+139/708512/campos_512_v4
+139/708521/campos_512_v4
+139/708522/campos_512_v4
+139/708527/campos_512_v4
+139/708529/campos_512_v4
+139/708532/campos_512_v4
+139/708534/campos_512_v4
+139/708539/campos_512_v4
+139/708544/campos_512_v4
+139/708547/campos_512_v4
+139/708556/campos_512_v4
+139/708557/campos_512_v4
+139/708576/campos_512_v4
+139/708578/campos_512_v4
+139/708579/campos_512_v4
+139/708586/campos_512_v4
+139/708587/campos_512_v4
+139/708599/campos_512_v4
+139/708604/campos_512_v4
+139/708606/campos_512_v4
+139/708612/campos_512_v4
+139/708618/campos_512_v4
+139/708619/campos_512_v4
+139/708623/campos_512_v4
+139/708632/campos_512_v4
+139/708643/campos_512_v4
+139/708647/campos_512_v4
+139/708662/campos_512_v4
+139/708663/campos_512_v4
+139/708693/campos_512_v4
+139/708698/campos_512_v4
+139/708701/campos_512_v4
+139/708703/campos_512_v4
+139/708711/campos_512_v4
+139/708720/campos_512_v4
+139/708721/campos_512_v4
+139/708723/campos_512_v4
+139/708737/campos_512_v4
+139/708745/campos_512_v4
+139/708746/campos_512_v4
+139/708749/campos_512_v4
+139/708763/campos_512_v4
+139/708770/campos_512_v4
+139/708778/campos_512_v4
+139/708786/campos_512_v4
+139/708794/campos_512_v4
+139/708798/campos_512_v4
+139/708802/campos_512_v4
+139/708809/campos_512_v4
+139/708810/campos_512_v4
+139/708813/campos_512_v4
+139/708823/campos_512_v4
+139/708833/campos_512_v4
+139/708837/campos_512_v4
+139/708848/campos_512_v4
+139/708862/campos_512_v4
+139/708875/campos_512_v4
+139/708876/campos_512_v4
+139/708884/campos_512_v4
+139/708887/campos_512_v4
+139/708891/campos_512_v4
+139/708893/campos_512_v4
+139/708894/campos_512_v4
+139/708895/campos_512_v4
+139/708897/campos_512_v4
+139/708900/campos_512_v4
+139/708908/campos_512_v4
+139/708920/campos_512_v4
+139/708932/campos_512_v4
+139/708940/campos_512_v4
+139/708946/campos_512_v4
+139/708950/campos_512_v4
+139/708962/campos_512_v4
+139/708964/campos_512_v4
+139/708971/campos_512_v4
+139/708974/campos_512_v4
+139/708996/campos_512_v4
+139/708998/campos_512_v4
+139/709006/campos_512_v4
+139/709020/campos_512_v4
+139/709021/campos_512_v4
+139/709029/campos_512_v4
+139/709048/campos_512_v4
+139/709059/campos_512_v4
+139/709076/campos_512_v4
+139/709088/campos_512_v4
+139/709110/campos_512_v4
+139/709113/campos_512_v4
+139/709123/campos_512_v4
+139/709125/campos_512_v4
+139/709126/campos_512_v4
+139/709138/campos_512_v4
+139/709139/campos_512_v4
+139/709148/campos_512_v4
+139/709160/campos_512_v4
+139/709166/campos_512_v4
+139/709168/campos_512_v4
+139/709169/campos_512_v4
+139/709193/campos_512_v4
+139/709211/campos_512_v4
+139/709216/campos_512_v4
+139/709226/campos_512_v4
+139/709229/campos_512_v4
+139/709237/campos_512_v4
+139/709244/campos_512_v4
+139/709250/campos_512_v4
+139/709253/campos_512_v4
+139/709254/campos_512_v4
+139/709260/campos_512_v4
+139/709263/campos_512_v4
+139/709267/campos_512_v4
+139/709278/campos_512_v4
+139/709280/campos_512_v4
+139/709287/campos_512_v4
+139/709292/campos_512_v4
+139/709302/campos_512_v4
+139/709310/campos_512_v4
+139/709313/campos_512_v4
+139/709315/campos_512_v4
+139/709324/campos_512_v4
+139/709330/campos_512_v4
+139/709341/campos_512_v4
+139/709351/campos_512_v4
+139/709352/campos_512_v4
+139/709357/campos_512_v4
+139/709358/campos_512_v4
+139/709359/campos_512_v4
+139/709360/campos_512_v4
+139/709365/campos_512_v4
+139/709386/campos_512_v4
+139/709387/campos_512_v4
+139/709389/campos_512_v4
+139/709398/campos_512_v4
+139/709399/campos_512_v4
+139/709413/campos_512_v4
+139/709426/campos_512_v4
+139/709428/campos_512_v4
+139/709440/campos_512_v4
+139/709441/campos_512_v4
+139/709443/campos_512_v4
+139/709452/campos_512_v4
+139/709459/campos_512_v4
+139/709464/campos_512_v4
+139/709467/campos_512_v4
+139/709474/campos_512_v4
+139/709476/campos_512_v4
+139/709479/campos_512_v4
+139/709493/campos_512_v4
+139/709502/campos_512_v4
+139/709508/campos_512_v4
+139/709511/campos_512_v4
+139/709518/campos_512_v4
+139/709543/campos_512_v4
+139/709547/campos_512_v4
+139/709548/campos_512_v4
+139/709554/campos_512_v4
+139/709569/campos_512_v4
+139/709571/campos_512_v4
+139/709574/campos_512_v4
+139/709577/campos_512_v4
+139/709588/campos_512_v4
+139/709594/campos_512_v4
+139/709597/campos_512_v4
+139/709608/campos_512_v4
+139/709635/campos_512_v4
+139/709664/campos_512_v4
+139/709665/campos_512_v4
+139/709669/campos_512_v4
+139/709675/campos_512_v4
+139/709678/campos_512_v4
+139/709683/campos_512_v4
+139/709689/campos_512_v4
+139/709690/campos_512_v4
+139/709692/campos_512_v4
+139/709698/campos_512_v4
+139/709701/campos_512_v4
+139/709710/campos_512_v4
+139/709711/campos_512_v4
+139/709722/campos_512_v4
+139/709733/campos_512_v4
+139/709740/campos_512_v4
+139/709742/campos_512_v4
+139/709754/campos_512_v4
+139/709764/campos_512_v4
+139/709774/campos_512_v4
+139/709779/campos_512_v4
+139/709786/campos_512_v4
+139/709788/campos_512_v4
+139/709789/campos_512_v4
+139/709790/campos_512_v4
+139/709795/campos_512_v4
+139/709802/campos_512_v4
+139/709811/campos_512_v4
+139/709820/campos_512_v4
+139/709826/campos_512_v4
+139/709829/campos_512_v4
+139/709833/campos_512_v4
+139/709835/campos_512_v4
+139/709839/campos_512_v4
+139/709846/campos_512_v4
+139/709855/campos_512_v4
+139/709866/campos_512_v4
+139/709869/campos_512_v4
+139/709870/campos_512_v4
+139/709873/campos_512_v4
+139/709875/campos_512_v4
+139/709876/campos_512_v4
+139/709884/campos_512_v4
+139/709885/campos_512_v4
+139/709899/campos_512_v4
+139/709919/campos_512_v4
+139/709920/campos_512_v4
+139/709922/campos_512_v4
+139/709929/campos_512_v4
+139/709938/campos_512_v4
+139/709939/campos_512_v4
+139/709941/campos_512_v4
+139/709942/campos_512_v4
+139/709943/campos_512_v4
+139/709944/campos_512_v4
+139/709962/campos_512_v4
+139/709991/campos_512_v4
+139/709992/campos_512_v4
+139/709995/campos_512_v4
+14/80007/campos_512_v4
+14/80020/campos_512_v4
+14/80024/campos_512_v4
+14/80025/campos_512_v4
+14/80031/campos_512_v4
+14/80037/campos_512_v4
+14/80039/campos_512_v4
+14/80059/campos_512_v4
+14/80060/campos_512_v4
+14/80072/campos_512_v4
+14/80081/campos_512_v4
+14/80093/campos_512_v4
+14/80107/campos_512_v4
+14/80110/campos_512_v4
+14/80116/campos_512_v4
+14/80125/campos_512_v4
+14/80141/campos_512_v4
+14/80158/campos_512_v4
+14/80171/campos_512_v4
+14/80195/campos_512_v4
+14/80199/campos_512_v4
+14/80219/campos_512_v4
+14/80236/campos_512_v4
+14/80238/campos_512_v4
+14/80241/campos_512_v4
+14/80250/campos_512_v4
+14/80255/campos_512_v4
+14/80265/campos_512_v4
+14/80274/campos_512_v4
+14/80283/campos_512_v4
+14/80289/campos_512_v4
+14/80290/campos_512_v4
+14/80293/campos_512_v4
+14/80298/campos_512_v4
+14/80303/campos_512_v4
+14/80315/campos_512_v4
+14/80323/campos_512_v4
+14/80337/campos_512_v4
+14/80343/campos_512_v4
+14/80349/campos_512_v4
+14/80353/campos_512_v4
+14/80358/campos_512_v4
+14/80368/campos_512_v4
+14/80369/campos_512_v4
+14/80372/campos_512_v4
+14/80384/campos_512_v4
+14/80390/campos_512_v4
+14/80400/campos_512_v4
+14/80403/campos_512_v4
+14/80408/campos_512_v4
+14/80437/campos_512_v4
+14/80442/campos_512_v4
+14/80443/campos_512_v4
+14/80449/campos_512_v4
+14/80463/campos_512_v4
+14/80478/campos_512_v4
+14/80486/campos_512_v4
+14/80494/campos_512_v4
+14/80498/campos_512_v4
+14/80513/campos_512_v4
+14/80524/campos_512_v4
+14/80535/campos_512_v4
+14/80543/campos_512_v4
+14/80551/campos_512_v4
+14/80553/campos_512_v4
+14/80562/campos_512_v4
+14/80574/campos_512_v4
+14/80578/campos_512_v4
+14/80581/campos_512_v4
+14/80604/campos_512_v4
+14/80606/campos_512_v4
+14/80610/campos_512_v4
+14/80615/campos_512_v4
+14/80626/campos_512_v4
+14/80639/campos_512_v4
+14/80643/campos_512_v4
+14/80651/campos_512_v4
+14/80678/campos_512_v4
+14/80679/campos_512_v4
+14/80687/campos_512_v4
+14/80695/campos_512_v4
+14/80697/campos_512_v4
+14/80708/campos_512_v4
+14/80710/campos_512_v4
+14/80724/campos_512_v4
+14/80728/campos_512_v4
+14/80730/campos_512_v4
+14/80745/campos_512_v4
+14/80759/campos_512_v4
+14/80762/campos_512_v4
+14/80764/campos_512_v4
+14/80771/campos_512_v4
+14/80780/campos_512_v4
+14/80786/campos_512_v4
+14/80791/campos_512_v4
+14/80792/campos_512_v4
+14/80795/campos_512_v4
+14/80796/campos_512_v4
+14/80799/campos_512_v4
+14/80804/campos_512_v4
+14/80813/campos_512_v4
+14/80819/campos_512_v4
+14/80823/campos_512_v4
+14/80824/campos_512_v4
+14/80837/campos_512_v4
+14/80850/campos_512_v4
+14/80855/campos_512_v4
+14/80857/campos_512_v4
+14/80862/campos_512_v4
+14/80863/campos_512_v4
+14/80865/campos_512_v4
+14/80868/campos_512_v4
+14/80869/campos_512_v4
+14/80877/campos_512_v4
+14/80881/campos_512_v4
+14/80884/campos_512_v4
+14/80891/campos_512_v4
+14/80894/campos_512_v4
+14/80895/campos_512_v4
+14/80900/campos_512_v4
+14/80905/campos_512_v4
+14/80937/campos_512_v4
+14/80941/campos_512_v4
+14/80943/campos_512_v4
+14/80948/campos_512_v4
+14/80949/campos_512_v4
+14/80956/campos_512_v4
+14/80971/campos_512_v4
+14/80973/campos_512_v4
+14/80979/campos_512_v4
+14/80988/campos_512_v4
+14/81000/campos_512_v4
+14/81001/campos_512_v4
+14/81008/campos_512_v4
+14/81014/campos_512_v4
+14/81017/campos_512_v4
+14/81018/campos_512_v4
+14/81027/campos_512_v4
+14/81036/campos_512_v4
+14/81037/campos_512_v4
+14/81044/campos_512_v4
+14/81055/campos_512_v4
+14/81061/campos_512_v4
+14/81064/campos_512_v4
+14/81065/campos_512_v4
+14/81105/campos_512_v4
+14/81114/campos_512_v4
+14/81121/campos_512_v4
+14/81122/campos_512_v4
+14/81125/campos_512_v4
+14/81126/campos_512_v4
+14/81130/campos_512_v4
+14/81131/campos_512_v4
+14/81135/campos_512_v4
+14/81147/campos_512_v4
+14/81159/campos_512_v4
+14/81165/campos_512_v4
+14/81181/campos_512_v4
+14/81192/campos_512_v4
+14/81198/campos_512_v4
+14/81208/campos_512_v4
+14/81210/campos_512_v4
+14/81212/campos_512_v4
+14/81226/campos_512_v4
+14/81229/campos_512_v4
+14/81240/campos_512_v4
+14/81249/campos_512_v4
+14/81256/campos_512_v4
+14/81264/campos_512_v4
+14/81267/campos_512_v4
+14/81271/campos_512_v4
+14/81299/campos_512_v4
+14/81302/campos_512_v4
+14/81308/campos_512_v4
+14/81323/campos_512_v4
+14/81327/campos_512_v4
+14/81331/campos_512_v4
+14/81339/campos_512_v4
+14/81368/campos_512_v4
+14/81374/campos_512_v4
+14/81393/campos_512_v4
+14/81396/campos_512_v4
+14/81406/campos_512_v4
+14/81407/campos_512_v4
+14/81423/campos_512_v4
+14/81426/campos_512_v4
+14/81438/campos_512_v4
+14/81449/campos_512_v4
+14/81454/campos_512_v4
+14/81462/campos_512_v4
+14/81464/campos_512_v4
+14/81469/campos_512_v4
+14/81482/campos_512_v4
+14/81488/campos_512_v4
+14/81503/campos_512_v4
+14/81508/campos_512_v4
+14/81518/campos_512_v4
+14/81523/campos_512_v4
+14/81527/campos_512_v4
+14/81531/campos_512_v4
+14/81537/campos_512_v4
+14/81540/campos_512_v4
+14/81544/campos_512_v4
+14/81546/campos_512_v4
+14/81559/campos_512_v4
+14/81561/campos_512_v4
+14/81564/campos_512_v4
+14/81571/campos_512_v4
+14/81573/campos_512_v4
+14/81576/campos_512_v4
+14/81594/campos_512_v4
+14/81603/campos_512_v4
+14/81608/campos_512_v4
+14/81630/campos_512_v4
+14/81633/campos_512_v4
+14/81638/campos_512_v4
+14/81649/campos_512_v4
+14/81650/campos_512_v4
+14/81660/campos_512_v4
+14/81665/campos_512_v4
+14/81668/campos_512_v4
+14/81679/campos_512_v4
+14/81680/campos_512_v4
+14/81705/campos_512_v4
+14/81706/campos_512_v4
+14/81710/campos_512_v4
+14/81713/campos_512_v4
+14/81733/campos_512_v4
+14/81746/campos_512_v4
+14/81749/campos_512_v4
+14/81753/campos_512_v4
+14/81757/campos_512_v4
+14/81761/campos_512_v4
+14/81774/campos_512_v4
+14/81782/campos_512_v4
+14/81792/campos_512_v4
+14/81794/campos_512_v4
+14/81799/campos_512_v4
+14/81805/campos_512_v4
+14/81824/campos_512_v4
+14/81831/campos_512_v4
+14/81846/campos_512_v4
+14/81867/campos_512_v4
+14/81869/campos_512_v4
+14/81876/campos_512_v4
+14/81885/campos_512_v4
+14/81893/campos_512_v4
+14/81895/campos_512_v4
+14/81910/campos_512_v4
+14/81914/campos_512_v4
+14/81916/campos_512_v4
+14/81921/campos_512_v4
+14/81923/campos_512_v4
+14/81926/campos_512_v4
+14/81927/campos_512_v4
+14/81930/campos_512_v4
+14/81937/campos_512_v4
+14/81939/campos_512_v4
+14/81956/campos_512_v4
+14/81963/campos_512_v4
+14/81964/campos_512_v4
+14/81975/campos_512_v4
+14/81979/campos_512_v4
+14/81994/campos_512_v4
+14/81997/campos_512_v4
+14/81998/campos_512_v4
+14/82001/campos_512_v4
+14/82006/campos_512_v4
+14/82007/campos_512_v4
+14/82011/campos_512_v4
+14/82013/campos_512_v4
+14/82022/campos_512_v4
+14/82027/campos_512_v4
+14/82049/campos_512_v4
+14/82056/campos_512_v4
+14/82058/campos_512_v4
+14/82062/campos_512_v4
+14/82072/campos_512_v4
+14/82073/campos_512_v4
+14/82091/campos_512_v4
+14/82092/campos_512_v4
+14/82096/campos_512_v4
+14/82102/campos_512_v4
+14/82108/campos_512_v4
+14/82111/campos_512_v4
+14/82116/campos_512_v4
+14/82123/campos_512_v4
+14/82127/campos_512_v4
+14/82132/campos_512_v4
+14/82143/campos_512_v4
+14/82147/campos_512_v4
+14/82157/campos_512_v4
+14/82169/campos_512_v4
+14/82173/campos_512_v4
+14/82178/campos_512_v4
+14/82206/campos_512_v4
+14/82212/campos_512_v4
+14/82225/campos_512_v4
+14/82232/campos_512_v4
+14/82238/campos_512_v4
+14/82244/campos_512_v4
+14/82246/campos_512_v4
+14/82252/campos_512_v4
+14/82259/campos_512_v4
+14/82262/campos_512_v4
+14/82271/campos_512_v4
+14/82274/campos_512_v4
+14/82294/campos_512_v4
+14/82300/campos_512_v4
+14/82302/campos_512_v4
+14/82303/campos_512_v4
+14/82304/campos_512_v4
+14/82323/campos_512_v4
+14/82327/campos_512_v4
+14/82338/campos_512_v4
+14/82350/campos_512_v4
+14/82352/campos_512_v4
+14/82362/campos_512_v4
+14/82369/campos_512_v4
+14/82370/campos_512_v4
+14/82378/campos_512_v4
+14/82387/campos_512_v4
+14/82394/campos_512_v4
+14/82407/campos_512_v4
+14/82435/campos_512_v4
+14/82446/campos_512_v4
+14/82481/campos_512_v4
+14/82488/campos_512_v4
+14/82502/campos_512_v4
+14/82506/campos_512_v4
+14/82524/campos_512_v4
+14/82534/campos_512_v4
+14/82535/campos_512_v4
+14/82538/campos_512_v4
+14/82543/campos_512_v4
+14/82554/campos_512_v4
+14/82562/campos_512_v4
+14/82565/campos_512_v4
+14/82568/campos_512_v4
+14/82574/campos_512_v4
+14/82586/campos_512_v4
+14/82593/campos_512_v4
+14/82594/campos_512_v4
+14/82597/campos_512_v4
+14/82600/campos_512_v4
+14/82601/campos_512_v4
+14/82641/campos_512_v4
+14/82652/campos_512_v4
+14/82676/campos_512_v4
+14/82707/campos_512_v4
+14/82708/campos_512_v4
+14/82714/campos_512_v4
+14/82725/campos_512_v4
+14/82740/campos_512_v4
+14/82746/campos_512_v4
+14/82753/campos_512_v4
+14/82757/campos_512_v4
+14/82758/campos_512_v4
+14/82759/campos_512_v4
+14/82765/campos_512_v4
+14/82770/campos_512_v4
+14/82793/campos_512_v4
+14/82800/campos_512_v4
+14/82805/campos_512_v4
+14/82813/campos_512_v4
+14/82829/campos_512_v4
+14/82845/campos_512_v4
+14/82846/campos_512_v4
+14/82857/campos_512_v4
+14/82864/campos_512_v4
+14/82883/campos_512_v4
+14/82900/campos_512_v4
+14/82901/campos_512_v4
+14/82907/campos_512_v4
+14/82911/campos_512_v4
+14/82912/campos_512_v4
+14/82916/campos_512_v4
+14/82919/campos_512_v4
+14/82926/campos_512_v4
+14/82931/campos_512_v4
+14/82937/campos_512_v4
+14/82948/campos_512_v4
+14/82965/campos_512_v4
+14/82977/campos_512_v4
+14/82989/campos_512_v4
+14/82991/campos_512_v4
+14/82995/campos_512_v4
+14/82997/campos_512_v4
+14/83005/campos_512_v4
+14/83015/campos_512_v4
+14/83041/campos_512_v4
+14/83045/campos_512_v4
+14/83050/campos_512_v4
+14/83054/campos_512_v4
+14/83057/campos_512_v4
+14/83068/campos_512_v4
+14/83073/campos_512_v4
+14/83078/campos_512_v4
+14/83079/campos_512_v4
+14/83092/campos_512_v4
+14/83111/campos_512_v4
+14/83113/campos_512_v4
+14/83117/campos_512_v4
+14/83122/campos_512_v4
+14/83125/campos_512_v4
+14/83135/campos_512_v4
+14/83139/campos_512_v4
+14/83140/campos_512_v4
+14/83149/campos_512_v4
+14/83160/campos_512_v4
+14/83161/campos_512_v4
+14/83167/campos_512_v4
+14/83177/campos_512_v4
+14/83183/campos_512_v4
+14/83186/campos_512_v4
+14/83189/campos_512_v4
+14/83193/campos_512_v4
+14/83215/campos_512_v4
+14/83217/campos_512_v4
+14/83229/campos_512_v4
+14/83231/campos_512_v4
+14/83242/campos_512_v4
+14/83271/campos_512_v4
+14/83279/campos_512_v4
+14/83287/campos_512_v4
+14/83293/campos_512_v4
+14/83295/campos_512_v4
+14/83299/campos_512_v4
+14/83305/campos_512_v4
+14/83311/campos_512_v4
+14/83327/campos_512_v4
+14/83334/campos_512_v4
+14/83336/campos_512_v4
+14/83344/campos_512_v4
+14/83347/campos_512_v4
+14/83348/campos_512_v4
+14/83351/campos_512_v4
+14/83361/campos_512_v4
+14/83366/campos_512_v4
+14/83369/campos_512_v4
+14/83379/campos_512_v4
+14/83382/campos_512_v4
+14/83383/campos_512_v4
+14/83392/campos_512_v4
+14/83411/campos_512_v4
+14/83418/campos_512_v4
+14/83419/campos_512_v4
+14/83423/campos_512_v4
+14/83424/campos_512_v4
+14/83428/campos_512_v4
+14/83445/campos_512_v4
+14/83451/campos_512_v4
+14/83456/campos_512_v4
+14/83459/campos_512_v4
+14/83463/campos_512_v4
+14/83476/campos_512_v4
+14/83487/campos_512_v4
+14/83489/campos_512_v4
+14/83491/campos_512_v4
+14/83494/campos_512_v4
+14/83495/campos_512_v4
+14/83512/campos_512_v4
+14/83515/campos_512_v4
+14/83546/campos_512_v4
+14/83554/campos_512_v4
+14/83558/campos_512_v4
+14/83561/campos_512_v4
+14/83568/campos_512_v4
+14/83577/campos_512_v4
+14/83580/campos_512_v4
+14/83581/campos_512_v4
+14/83589/campos_512_v4
+14/83592/campos_512_v4
+14/83601/campos_512_v4
+14/83604/campos_512_v4
+14/83620/campos_512_v4
+14/83626/campos_512_v4
+14/83647/campos_512_v4
+14/83665/campos_512_v4
+14/83666/campos_512_v4
+14/83668/campos_512_v4
+14/83670/campos_512_v4
+14/83678/campos_512_v4
+14/83682/campos_512_v4
+14/83683/campos_512_v4
+14/83687/campos_512_v4
+14/83688/campos_512_v4
+14/83708/campos_512_v4
+14/83709/campos_512_v4
+14/83716/campos_512_v4
+14/83728/campos_512_v4
+14/83738/campos_512_v4
+14/83743/campos_512_v4
+14/83752/campos_512_v4
+14/83753/campos_512_v4
+14/83759/campos_512_v4
+14/83776/campos_512_v4
+14/83778/campos_512_v4
+14/83785/campos_512_v4
+14/83786/campos_512_v4
+14/83797/campos_512_v4
+14/83805/campos_512_v4
+14/83817/campos_512_v4
+14/83828/campos_512_v4
+14/83837/campos_512_v4
+14/83843/campos_512_v4
+14/83851/campos_512_v4
+14/83861/campos_512_v4
+14/83868/campos_512_v4
+14/83875/campos_512_v4
+14/83889/campos_512_v4
+14/83890/campos_512_v4
+14/83912/campos_512_v4
+14/83914/campos_512_v4
+14/83915/campos_512_v4
+14/83946/campos_512_v4
+14/83947/campos_512_v4
+14/83948/campos_512_v4
+14/83965/campos_512_v4
+14/83970/campos_512_v4
+14/83974/campos_512_v4
+14/83987/campos_512_v4
+14/83997/campos_512_v4
+14/84020/campos_512_v4
+14/84021/campos_512_v4
+14/84035/campos_512_v4
+14/84036/campos_512_v4
+14/84039/campos_512_v4
+14/84045/campos_512_v4
+14/84049/campos_512_v4
+14/84057/campos_512_v4
+14/84061/campos_512_v4
+14/84066/campos_512_v4
+14/84077/campos_512_v4
+14/84079/campos_512_v4
+14/84091/campos_512_v4
+14/84092/campos_512_v4
+14/84097/campos_512_v4
+14/84099/campos_512_v4
+14/84106/campos_512_v4
+14/84112/campos_512_v4
+14/84119/campos_512_v4
+14/84126/campos_512_v4
+14/84154/campos_512_v4
+14/84165/campos_512_v4
+14/84166/campos_512_v4
+14/84172/campos_512_v4
+14/84187/campos_512_v4
+14/84193/campos_512_v4
+14/84201/campos_512_v4
+14/84202/campos_512_v4
+14/84204/campos_512_v4
+14/84207/campos_512_v4
+14/84209/campos_512_v4
+14/84216/campos_512_v4
+14/84232/campos_512_v4
+14/84238/campos_512_v4
+14/84242/campos_512_v4
+14/84248/campos_512_v4
+14/84255/campos_512_v4
+14/84258/campos_512_v4
+14/84260/campos_512_v4
+14/84267/campos_512_v4
+14/84268/campos_512_v4
+14/84282/campos_512_v4
+14/84286/campos_512_v4
+14/84304/campos_512_v4
+14/84314/campos_512_v4
+14/84318/campos_512_v4
+14/84327/campos_512_v4
+14/84331/campos_512_v4
+14/84343/campos_512_v4
+14/84344/campos_512_v4
+14/84357/campos_512_v4
+14/84360/campos_512_v4
+14/84367/campos_512_v4
+14/84370/campos_512_v4
+14/84372/campos_512_v4
+14/84378/campos_512_v4
+14/84382/campos_512_v4
+14/84388/campos_512_v4
+14/84389/campos_512_v4
+14/84400/campos_512_v4
+14/84406/campos_512_v4
+14/84422/campos_512_v4
+14/84429/campos_512_v4
+14/84435/campos_512_v4
+14/84436/campos_512_v4
+14/84457/campos_512_v4
+14/84470/campos_512_v4
+14/84473/campos_512_v4
+14/84482/campos_512_v4
+14/84483/campos_512_v4
+14/84494/campos_512_v4
+14/84498/campos_512_v4
+14/84509/campos_512_v4
+14/84510/campos_512_v4
+14/84511/campos_512_v4
+14/84526/campos_512_v4
+14/84536/campos_512_v4
+14/84539/campos_512_v4
+14/84540/campos_512_v4
+14/84545/campos_512_v4
+14/84546/campos_512_v4
+14/84554/campos_512_v4
+14/84563/campos_512_v4
+14/84579/campos_512_v4
+14/84587/campos_512_v4
+14/84589/campos_512_v4
+14/84591/campos_512_v4
+14/84597/campos_512_v4
+14/84611/campos_512_v4
+14/84612/campos_512_v4
+14/84616/campos_512_v4
+14/84619/campos_512_v4
+14/84621/campos_512_v4
+14/84632/campos_512_v4
+14/84653/campos_512_v4
+14/84658/campos_512_v4
+14/84667/campos_512_v4
+14/84669/campos_512_v4
+14/84673/campos_512_v4
+14/84674/campos_512_v4
+14/84688/campos_512_v4
+14/84700/campos_512_v4
+14/84703/campos_512_v4
+14/84708/campos_512_v4
+14/84712/campos_512_v4
+14/84724/campos_512_v4
+14/84733/campos_512_v4
+14/84760/campos_512_v4
+14/84777/campos_512_v4
+14/84797/campos_512_v4
+14/84807/campos_512_v4
+14/84811/campos_512_v4
+14/84813/campos_512_v4
+14/84823/campos_512_v4
+14/84826/campos_512_v4
+14/84840/campos_512_v4
+14/84844/campos_512_v4
+14/84877/campos_512_v4
+14/84880/campos_512_v4
+14/84881/campos_512_v4
+14/84886/campos_512_v4
+14/84890/campos_512_v4
+14/84895/campos_512_v4
+14/84897/campos_512_v4
+14/84899/campos_512_v4
+14/84910/campos_512_v4
+14/84917/campos_512_v4
+14/84929/campos_512_v4
+14/84934/campos_512_v4
+14/84947/campos_512_v4
+14/84952/campos_512_v4
+14/84954/campos_512_v4
+14/84959/campos_512_v4
+14/84970/campos_512_v4
+14/84971/campos_512_v4
+14/84982/campos_512_v4
+14/84996/campos_512_v4
+14/85000/campos_512_v4
+140/710003/campos_512_v4
+140/710009/campos_512_v4
+140/710012/campos_512_v4
+140/710017/campos_512_v4
+140/710019/campos_512_v4
+140/710020/campos_512_v4
+140/710035/campos_512_v4
+140/710036/campos_512_v4
+140/710037/campos_512_v4
+140/710042/campos_512_v4
+140/710049/campos_512_v4
+140/710061/campos_512_v4
+140/710063/campos_512_v4
+140/710064/campos_512_v4
+140/710066/campos_512_v4
+140/710075/campos_512_v4
+140/710077/campos_512_v4
+140/710078/campos_512_v4
+140/710095/campos_512_v4
+140/710103/campos_512_v4
+140/710111/campos_512_v4
+140/710114/campos_512_v4
+140/710125/campos_512_v4
+140/710145/campos_512_v4
+140/710155/campos_512_v4
+140/710164/campos_512_v4
+140/710167/campos_512_v4
+140/710193/campos_512_v4
+140/710198/campos_512_v4
+140/710205/campos_512_v4
+140/710209/campos_512_v4
+140/710211/campos_512_v4
+140/710218/campos_512_v4
+140/710228/campos_512_v4
+140/710231/campos_512_v4
+140/710234/campos_512_v4
+140/710255/campos_512_v4
+140/710267/campos_512_v4
+140/710269/campos_512_v4
+140/710270/campos_512_v4
+140/710272/campos_512_v4
+140/710273/campos_512_v4
+140/710277/campos_512_v4
+140/710301/campos_512_v4
+140/710302/campos_512_v4
+140/710310/campos_512_v4
+140/710320/campos_512_v4
+140/710323/campos_512_v4
+140/710325/campos_512_v4
+140/710332/campos_512_v4
+140/710338/campos_512_v4
+140/710346/campos_512_v4
+140/710347/campos_512_v4
+140/710349/campos_512_v4
+140/710377/campos_512_v4
+140/710379/campos_512_v4
+140/710383/campos_512_v4
+140/710390/campos_512_v4
+140/710395/campos_512_v4
+140/710397/campos_512_v4
+140/710404/campos_512_v4
+140/710415/campos_512_v4
+140/710419/campos_512_v4
+140/710422/campos_512_v4
+140/710423/campos_512_v4
+140/710436/campos_512_v4
+140/710446/campos_512_v4
+140/710451/campos_512_v4
+140/710467/campos_512_v4
+140/710478/campos_512_v4
+140/710481/campos_512_v4
+140/710482/campos_512_v4
+140/710499/campos_512_v4
+140/710505/campos_512_v4
+140/710506/campos_512_v4
+140/710511/campos_512_v4
+140/710514/campos_512_v4
+140/710521/campos_512_v4
+140/710528/campos_512_v4
+140/710535/campos_512_v4
+140/710536/campos_512_v4
+140/710537/campos_512_v4
+140/710539/campos_512_v4
+140/710541/campos_512_v4
+140/710543/campos_512_v4
+140/710566/campos_512_v4
+140/710578/campos_512_v4
+140/710590/campos_512_v4
+140/710602/campos_512_v4
+140/710608/campos_512_v4
+140/710627/campos_512_v4
+140/710628/campos_512_v4
+140/710635/campos_512_v4
+140/710638/campos_512_v4
+140/710639/campos_512_v4
+140/710640/campos_512_v4
+140/710641/campos_512_v4
+140/710649/campos_512_v4
+140/710653/campos_512_v4
+140/710654/campos_512_v4
+140/710656/campos_512_v4
+140/710661/campos_512_v4
+140/710662/campos_512_v4
+140/710680/campos_512_v4
+140/710683/campos_512_v4
+140/710703/campos_512_v4
+140/710707/campos_512_v4
+140/710709/campos_512_v4
+140/710715/campos_512_v4
+140/710744/campos_512_v4
+140/710758/campos_512_v4
+140/710759/campos_512_v4
+140/710771/campos_512_v4
+140/710772/campos_512_v4
+140/710775/campos_512_v4
+140/710778/campos_512_v4
+140/710780/campos_512_v4
+140/710790/campos_512_v4
+140/710797/campos_512_v4
+140/710798/campos_512_v4
+140/710813/campos_512_v4
+140/710814/campos_512_v4
+140/710818/campos_512_v4
+140/710849/campos_512_v4
+140/710870/campos_512_v4
+140/710886/campos_512_v4
+140/710894/campos_512_v4
+140/710905/campos_512_v4
+140/710913/campos_512_v4
+140/710914/campos_512_v4
+140/710940/campos_512_v4
+140/710942/campos_512_v4
+140/710958/campos_512_v4
+140/710962/campos_512_v4
+140/710967/campos_512_v4
+140/710969/campos_512_v4
+140/710971/campos_512_v4
+140/710973/campos_512_v4
+140/710988/campos_512_v4
+140/710993/campos_512_v4
+140/711023/campos_512_v4
+140/711026/campos_512_v4
+140/711041/campos_512_v4
+140/711071/campos_512_v4
+140/711076/campos_512_v4
+140/711089/campos_512_v4
+140/711108/campos_512_v4
+140/711110/campos_512_v4
+140/711112/campos_512_v4
+140/711122/campos_512_v4
+140/711130/campos_512_v4
+140/711133/campos_512_v4
+140/711141/campos_512_v4
+140/711143/campos_512_v4
+140/711149/campos_512_v4
+140/711153/campos_512_v4
+140/711159/campos_512_v4
+140/711169/campos_512_v4
+140/711171/campos_512_v4
+140/711173/campos_512_v4
+140/711174/campos_512_v4
+140/711182/campos_512_v4
+140/711188/campos_512_v4
+140/711203/campos_512_v4
+140/711212/campos_512_v4
+140/711213/campos_512_v4
+140/711220/campos_512_v4
+140/711227/campos_512_v4
+140/711239/campos_512_v4
+140/711275/campos_512_v4
+140/711278/campos_512_v4
+140/711287/campos_512_v4
+140/711296/campos_512_v4
+140/711315/campos_512_v4
+140/711339/campos_512_v4
+140/711341/campos_512_v4
+140/711349/campos_512_v4
+140/711364/campos_512_v4
+140/711365/campos_512_v4
+140/711367/campos_512_v4
+140/711385/campos_512_v4
+140/711390/campos_512_v4
+140/711396/campos_512_v4
+140/711403/campos_512_v4
+140/711406/campos_512_v4
+140/711408/campos_512_v4
+140/711411/campos_512_v4
+140/711415/campos_512_v4
+140/711418/campos_512_v4
+140/711421/campos_512_v4
+140/711424/campos_512_v4
+140/711425/campos_512_v4
+140/711428/campos_512_v4
+140/711430/campos_512_v4
+140/711431/campos_512_v4
+140/711433/campos_512_v4
+140/711438/campos_512_v4
+140/711440/campos_512_v4
+140/711442/campos_512_v4
+140/711446/campos_512_v4
+140/711459/campos_512_v4
+140/711461/campos_512_v4
+140/711478/campos_512_v4
+140/711487/campos_512_v4
+140/711488/campos_512_v4
+140/711489/campos_512_v4
+140/711492/campos_512_v4
+140/711503/campos_512_v4
+140/711511/campos_512_v4
+140/711513/campos_512_v4
+140/711519/campos_512_v4
+140/711524/campos_512_v4
+140/711531/campos_512_v4
+140/711535/campos_512_v4
+140/711538/campos_512_v4
+140/711540/campos_512_v4
+140/711567/campos_512_v4
+140/711578/campos_512_v4
+140/711583/campos_512_v4
+140/711587/campos_512_v4
+140/711597/campos_512_v4
+140/711598/campos_512_v4
+140/711604/campos_512_v4
+140/711608/campos_512_v4
+140/711618/campos_512_v4
+140/711620/campos_512_v4
+140/711642/campos_512_v4
+140/711661/campos_512_v4
+140/711663/campos_512_v4
+140/711664/campos_512_v4
+140/711672/campos_512_v4
+140/711675/campos_512_v4
+140/711680/campos_512_v4
+140/711688/campos_512_v4
+140/711695/campos_512_v4
+140/711697/campos_512_v4
+140/711704/campos_512_v4
+140/711713/campos_512_v4
+140/711715/campos_512_v4
+140/711716/campos_512_v4
+140/711717/campos_512_v4
+140/711719/campos_512_v4
+140/711727/campos_512_v4
+140/711736/campos_512_v4
+140/711747/campos_512_v4
+140/711784/campos_512_v4
+140/711789/campos_512_v4
+140/711795/campos_512_v4
+140/711797/campos_512_v4
+140/711809/campos_512_v4
+140/711825/campos_512_v4
+140/711827/campos_512_v4
+140/711829/campos_512_v4
+140/711831/campos_512_v4
+140/711832/campos_512_v4
+140/711836/campos_512_v4
+140/711840/campos_512_v4
+140/711842/campos_512_v4
+140/711851/campos_512_v4
+140/711852/campos_512_v4
+140/711859/campos_512_v4
+140/711864/campos_512_v4
+140/711865/campos_512_v4
+140/711866/campos_512_v4
+140/711869/campos_512_v4
+140/711874/campos_512_v4
+140/711875/campos_512_v4
+140/711886/campos_512_v4
+140/711893/campos_512_v4
+140/711900/campos_512_v4
+140/711925/campos_512_v4
+140/711927/campos_512_v4
+140/711928/campos_512_v4
+140/711929/campos_512_v4
+140/711934/campos_512_v4
+140/711939/campos_512_v4
+140/711941/campos_512_v4
+140/711945/campos_512_v4
+140/711946/campos_512_v4
+140/711947/campos_512_v4
+140/711956/campos_512_v4
+140/711962/campos_512_v4
+140/711967/campos_512_v4
+140/711968/campos_512_v4
+140/711974/campos_512_v4
+140/711983/campos_512_v4
+140/711986/campos_512_v4
+140/711988/campos_512_v4
+140/711991/campos_512_v4
+140/711994/campos_512_v4
+140/712001/campos_512_v4
+140/712009/campos_512_v4
+140/712013/campos_512_v4
+140/712014/campos_512_v4
+140/712021/campos_512_v4
+140/712040/campos_512_v4
+140/712042/campos_512_v4
+140/712043/campos_512_v4
+140/712048/campos_512_v4
+140/712049/campos_512_v4
+140/712053/campos_512_v4
+140/712055/campos_512_v4
+140/712071/campos_512_v4
+140/712078/campos_512_v4
+140/712082/campos_512_v4
+140/712086/campos_512_v4
+140/712088/campos_512_v4
+140/712094/campos_512_v4
+140/712100/campos_512_v4
+140/712108/campos_512_v4
+140/712111/campos_512_v4
+140/712112/campos_512_v4
+140/712115/campos_512_v4
+140/712121/campos_512_v4
+140/712127/campos_512_v4
+140/712133/campos_512_v4
+140/712138/campos_512_v4
+140/712153/campos_512_v4
+140/712154/campos_512_v4
+140/712160/campos_512_v4
+140/712175/campos_512_v4
+140/712182/campos_512_v4
+140/712191/campos_512_v4
+140/712195/campos_512_v4
+140/712210/campos_512_v4
+140/712223/campos_512_v4
+140/712225/campos_512_v4
+140/712226/campos_512_v4
+140/712238/campos_512_v4
+140/712245/campos_512_v4
+140/712272/campos_512_v4
+140/712274/campos_512_v4
+140/712276/campos_512_v4
+140/712298/campos_512_v4
+140/712303/campos_512_v4
+140/712307/campos_512_v4
+140/712336/campos_512_v4
+140/712337/campos_512_v4
+140/712354/campos_512_v4
+140/712356/campos_512_v4
+140/712362/campos_512_v4
+140/712372/campos_512_v4
+140/712391/campos_512_v4
+140/712395/campos_512_v4
+140/712417/campos_512_v4
+140/712437/campos_512_v4
+140/712440/campos_512_v4
+140/712446/campos_512_v4
+140/712458/campos_512_v4
+140/712462/campos_512_v4
+140/712463/campos_512_v4
+140/712474/campos_512_v4
+140/712505/campos_512_v4
+140/712507/campos_512_v4
+140/712522/campos_512_v4
+140/712523/campos_512_v4
+140/712526/campos_512_v4
+140/712529/campos_512_v4
+140/712531/campos_512_v4
+140/712533/campos_512_v4
+140/712536/campos_512_v4
+140/712551/campos_512_v4
+140/712553/campos_512_v4
+140/712557/campos_512_v4
+140/712561/campos_512_v4
+140/712589/campos_512_v4
+140/712608/campos_512_v4
+140/712611/campos_512_v4
+140/712617/campos_512_v4
+140/712618/campos_512_v4
+140/712648/campos_512_v4
+140/712654/campos_512_v4
+140/712656/campos_512_v4
+140/712660/campos_512_v4
+140/712662/campos_512_v4
+140/712666/campos_512_v4
+140/712668/campos_512_v4
+140/712669/campos_512_v4
+140/712673/campos_512_v4
+140/712684/campos_512_v4
+140/712688/campos_512_v4
+140/712689/campos_512_v4
+140/712691/campos_512_v4
+140/712695/campos_512_v4
+140/712698/campos_512_v4
+140/712711/campos_512_v4
+140/712714/campos_512_v4
+140/712715/campos_512_v4
+140/712733/campos_512_v4
+140/712736/campos_512_v4
+140/712746/campos_512_v4
+140/712751/campos_512_v4
+140/712761/campos_512_v4
+140/712767/campos_512_v4
+140/712773/campos_512_v4
+140/712787/campos_512_v4
+140/712790/campos_512_v4
+140/712800/campos_512_v4
+140/712810/campos_512_v4
+140/712822/campos_512_v4
+140/712825/campos_512_v4
+140/712830/campos_512_v4
+140/712838/campos_512_v4
+140/712852/campos_512_v4
+140/712853/campos_512_v4
+140/712858/campos_512_v4
+140/712868/campos_512_v4
+140/712873/campos_512_v4
+140/712885/campos_512_v4
+140/712896/campos_512_v4
+140/712899/campos_512_v4
+140/712903/campos_512_v4
+140/712928/campos_512_v4
+140/712936/campos_512_v4
+140/712945/campos_512_v4
+140/712947/campos_512_v4
+140/712951/campos_512_v4
+140/712958/campos_512_v4
+140/712976/campos_512_v4
+140/712989/campos_512_v4
+140/712997/campos_512_v4
+140/712998/campos_512_v4
+140/713002/campos_512_v4
+140/713013/campos_512_v4
+140/713017/campos_512_v4
+140/713019/campos_512_v4
+140/713022/campos_512_v4
+140/713045/campos_512_v4
+140/713052/campos_512_v4
+140/713058/campos_512_v4
+140/713084/campos_512_v4
+140/713085/campos_512_v4
+140/713087/campos_512_v4
+140/713090/campos_512_v4
+140/713097/campos_512_v4
+140/713116/campos_512_v4
+140/713118/campos_512_v4
+140/713122/campos_512_v4
+140/713123/campos_512_v4
+140/713124/campos_512_v4
+140/713140/campos_512_v4
+140/713143/campos_512_v4
+140/713146/campos_512_v4
+140/713155/campos_512_v4
+140/713164/campos_512_v4
+140/713171/campos_512_v4
+140/713173/campos_512_v4
+140/713175/campos_512_v4
+140/713191/campos_512_v4
+140/713199/campos_512_v4
+140/713200/campos_512_v4
+140/713203/campos_512_v4
+140/713209/campos_512_v4
+140/713211/campos_512_v4
+140/713212/campos_512_v4
+140/713215/campos_512_v4
+140/713217/campos_512_v4
+140/713232/campos_512_v4
+140/713254/campos_512_v4
+140/713273/campos_512_v4
+140/713275/campos_512_v4
+140/713280/campos_512_v4
+140/713281/campos_512_v4
+140/713292/campos_512_v4
+140/713304/campos_512_v4
+140/713310/campos_512_v4
+140/713313/campos_512_v4
+140/713314/campos_512_v4
+140/713318/campos_512_v4
+140/713332/campos_512_v4
+140/713335/campos_512_v4
+140/713339/campos_512_v4
+140/713346/campos_512_v4
+140/713347/campos_512_v4
+140/713348/campos_512_v4
+140/713349/campos_512_v4
+140/713377/campos_512_v4
+140/713381/campos_512_v4
+140/713388/campos_512_v4
+140/713396/campos_512_v4
+140/713401/campos_512_v4
+140/713408/campos_512_v4
+140/713409/campos_512_v4
+140/713412/campos_512_v4
+140/713415/campos_512_v4
+140/713429/campos_512_v4
+140/713448/campos_512_v4
+140/713451/campos_512_v4
+140/713471/campos_512_v4
+140/713473/campos_512_v4
+140/713474/campos_512_v4
+140/713477/campos_512_v4
+140/713480/campos_512_v4
+140/713489/campos_512_v4
+140/713490/campos_512_v4
+140/713493/campos_512_v4
+140/713499/campos_512_v4
+140/713507/campos_512_v4
+140/713519/campos_512_v4
+140/713526/campos_512_v4
+140/713530/campos_512_v4
+140/713540/campos_512_v4
+140/713543/campos_512_v4
+140/713545/campos_512_v4
+140/713548/campos_512_v4
+140/713552/campos_512_v4
+140/713554/campos_512_v4
+140/713556/campos_512_v4
+140/713558/campos_512_v4
+140/713560/campos_512_v4
+140/713577/campos_512_v4
+140/713583/campos_512_v4
+140/713585/campos_512_v4
+140/713597/campos_512_v4
+140/713609/campos_512_v4
+140/713613/campos_512_v4
+140/713618/campos_512_v4
+140/713625/campos_512_v4
+140/713636/campos_512_v4
+140/713637/campos_512_v4
+140/713681/campos_512_v4
+140/713686/campos_512_v4
+140/713697/campos_512_v4
+140/713708/campos_512_v4
+140/713711/campos_512_v4
+140/713716/campos_512_v4
+140/713723/campos_512_v4
+140/713727/campos_512_v4
+140/713738/campos_512_v4
+140/713741/campos_512_v4
+140/713748/campos_512_v4
+140/713759/campos_512_v4
+140/713761/campos_512_v4
+140/713764/campos_512_v4
+140/713768/campos_512_v4
+140/713770/campos_512_v4
+140/713776/campos_512_v4
+140/713787/campos_512_v4
+140/713789/campos_512_v4
+140/713790/campos_512_v4
+140/713792/campos_512_v4
+140/713805/campos_512_v4
+140/713810/campos_512_v4
+140/713811/campos_512_v4
+140/713826/campos_512_v4
+140/713830/campos_512_v4
+140/713835/campos_512_v4
+140/713874/campos_512_v4
+140/713879/campos_512_v4
+140/713898/campos_512_v4
+140/713909/campos_512_v4
+140/713946/campos_512_v4
+140/713947/campos_512_v4
+140/713949/campos_512_v4
+140/713951/campos_512_v4
+140/713958/campos_512_v4
+140/713961/campos_512_v4
+140/713965/campos_512_v4
+140/713966/campos_512_v4
+140/713993/campos_512_v4
+140/713998/campos_512_v4
+140/714016/campos_512_v4
+140/714023/campos_512_v4
+140/714042/campos_512_v4
+140/714044/campos_512_v4
+140/714050/campos_512_v4
+140/714053/campos_512_v4
+140/714061/campos_512_v4
+140/714064/campos_512_v4
+140/714074/campos_512_v4
+140/714075/campos_512_v4
+140/714083/campos_512_v4
+140/714084/campos_512_v4
+140/714094/campos_512_v4
+140/714106/campos_512_v4
+140/714129/campos_512_v4
+140/714137/campos_512_v4
+140/714138/campos_512_v4
+140/714139/campos_512_v4
+140/714141/campos_512_v4
+140/714146/campos_512_v4
+140/714160/campos_512_v4
+140/714163/campos_512_v4
+140/714186/campos_512_v4
+140/714205/campos_512_v4
+140/714208/campos_512_v4
+140/714218/campos_512_v4
+140/714219/campos_512_v4
+140/714220/campos_512_v4
+140/714222/campos_512_v4
+140/714225/campos_512_v4
+140/714226/campos_512_v4
+140/714236/campos_512_v4
+140/714239/campos_512_v4
+140/714246/campos_512_v4
+140/714249/campos_512_v4
+140/714250/campos_512_v4
+140/714261/campos_512_v4
+140/714262/campos_512_v4
+140/714267/campos_512_v4
+140/714284/campos_512_v4
+140/714288/campos_512_v4
+140/714307/campos_512_v4
+140/714314/campos_512_v4
+140/714323/campos_512_v4
+140/714324/campos_512_v4
+140/714339/campos_512_v4
+140/714345/campos_512_v4
+140/714350/campos_512_v4
+140/714352/campos_512_v4
+140/714359/campos_512_v4
+140/714361/campos_512_v4
+140/714364/campos_512_v4
+140/714371/campos_512_v4
+140/714375/campos_512_v4
+140/714378/campos_512_v4
+140/714390/campos_512_v4
+140/714394/campos_512_v4
+140/714415/campos_512_v4
+140/714419/campos_512_v4
+140/714426/campos_512_v4
+140/714427/campos_512_v4
+140/714429/campos_512_v4
+140/714441/campos_512_v4
+140/714450/campos_512_v4
+140/714452/campos_512_v4
+140/714461/campos_512_v4
+140/714470/campos_512_v4
+140/714486/campos_512_v4
+140/714490/campos_512_v4
+140/714492/campos_512_v4
+140/714498/campos_512_v4
+140/714499/campos_512_v4
+140/714501/campos_512_v4
+140/714506/campos_512_v4
+140/714518/campos_512_v4
+140/714530/campos_512_v4
+140/714531/campos_512_v4
+140/714534/campos_512_v4
+140/714545/campos_512_v4
+140/714554/campos_512_v4
+140/714574/campos_512_v4
+140/714594/campos_512_v4
+140/714595/campos_512_v4
+140/714602/campos_512_v4
+140/714608/campos_512_v4
+140/714643/campos_512_v4
+140/714644/campos_512_v4
+140/714649/campos_512_v4
+140/714662/campos_512_v4
+140/714665/campos_512_v4
+140/714676/campos_512_v4
+140/714687/campos_512_v4
+140/714702/campos_512_v4
+140/714704/campos_512_v4
+140/714718/campos_512_v4
+140/714721/campos_512_v4
+140/714724/campos_512_v4
+140/714730/campos_512_v4
+140/714734/campos_512_v4
+140/714735/campos_512_v4
+140/714768/campos_512_v4
+140/714769/campos_512_v4
+140/714770/campos_512_v4
+140/714780/campos_512_v4
+140/714783/campos_512_v4
+140/714790/campos_512_v4
+140/714795/campos_512_v4
+140/714798/campos_512_v4
+140/714801/campos_512_v4
+140/714807/campos_512_v4
+140/714814/campos_512_v4
+140/714815/campos_512_v4
+140/714817/campos_512_v4
+140/714839/campos_512_v4
+140/714843/campos_512_v4
+140/714846/campos_512_v4
+140/714850/campos_512_v4
+140/714875/campos_512_v4
+140/714880/campos_512_v4
+140/714882/campos_512_v4
+140/714887/campos_512_v4
+140/714888/campos_512_v4
+140/714890/campos_512_v4
+140/714897/campos_512_v4
+140/714917/campos_512_v4
+140/714925/campos_512_v4
+140/714933/campos_512_v4
+140/714939/campos_512_v4
+140/714944/campos_512_v4
+140/714979/campos_512_v4
+140/714983/campos_512_v4
+140/714993/campos_512_v4
+140/714997/campos_512_v4
+141/715004/campos_512_v4
+141/715005/campos_512_v4
+141/715030/campos_512_v4
+141/715034/campos_512_v4
+141/715043/campos_512_v4
+141/715056/campos_512_v4
+141/715072/campos_512_v4
+141/715075/campos_512_v4
+141/715079/campos_512_v4
+141/715085/campos_512_v4
+141/715099/campos_512_v4
+141/715117/campos_512_v4
+141/715130/campos_512_v4
+141/715137/campos_512_v4
+141/715145/campos_512_v4
+141/715150/campos_512_v4
+141/715159/campos_512_v4
+141/715164/campos_512_v4
+141/715170/campos_512_v4
+141/715173/campos_512_v4
+141/715174/campos_512_v4
+141/715180/campos_512_v4
+141/715181/campos_512_v4
+141/715184/campos_512_v4
+141/715195/campos_512_v4
+141/715234/campos_512_v4
+141/715243/campos_512_v4
+141/715249/campos_512_v4
+141/715255/campos_512_v4
+141/715258/campos_512_v4
+141/715267/campos_512_v4
+141/715275/campos_512_v4
+141/715277/campos_512_v4
+141/715291/campos_512_v4
+141/715300/campos_512_v4
+141/715308/campos_512_v4
+141/715322/campos_512_v4
+141/715323/campos_512_v4
+141/715324/campos_512_v4
+141/715331/campos_512_v4
+141/715334/campos_512_v4
+141/715339/campos_512_v4
+141/715363/campos_512_v4
+141/715367/campos_512_v4
+141/715368/campos_512_v4
+141/715371/campos_512_v4
+141/715377/campos_512_v4
+141/715379/campos_512_v4
+141/715383/campos_512_v4
+141/715386/campos_512_v4
+141/715387/campos_512_v4
+141/715389/campos_512_v4
+141/715398/campos_512_v4
+141/715399/campos_512_v4
+141/715406/campos_512_v4
+141/715417/campos_512_v4
+141/715436/campos_512_v4
+141/715438/campos_512_v4
+141/715459/campos_512_v4
+141/715464/campos_512_v4
+141/715475/campos_512_v4
+141/715476/campos_512_v4
+141/715484/campos_512_v4
+141/715486/campos_512_v4
+141/715501/campos_512_v4
+141/715502/campos_512_v4
+141/715504/campos_512_v4
+141/715506/campos_512_v4
+141/715507/campos_512_v4
+141/715508/campos_512_v4
+141/715515/campos_512_v4
+141/715530/campos_512_v4
+141/715538/campos_512_v4
+141/715543/campos_512_v4
+141/715544/campos_512_v4
+141/715545/campos_512_v4
+141/715549/campos_512_v4
+141/715552/campos_512_v4
+141/715553/campos_512_v4
+141/715555/campos_512_v4
+141/715576/campos_512_v4
+141/715584/campos_512_v4
+141/715585/campos_512_v4
+141/715587/campos_512_v4
+141/715599/campos_512_v4
+141/715601/campos_512_v4
+141/715602/campos_512_v4
+141/715632/campos_512_v4
+141/715638/campos_512_v4
+141/715647/campos_512_v4
+141/715649/campos_512_v4
+141/715654/campos_512_v4
+141/715656/campos_512_v4
+141/715662/campos_512_v4
+141/715664/campos_512_v4
+141/715669/campos_512_v4
+141/715683/campos_512_v4
+141/715693/campos_512_v4
+141/715694/campos_512_v4
+141/715702/campos_512_v4
+141/715705/campos_512_v4
+141/715709/campos_512_v4
+141/715710/campos_512_v4
+141/715727/campos_512_v4
+141/715728/campos_512_v4
+141/715750/campos_512_v4
+141/715752/campos_512_v4
+141/715754/campos_512_v4
+141/715760/campos_512_v4
+141/715763/campos_512_v4
+141/715764/campos_512_v4
+141/715767/campos_512_v4
+141/715780/campos_512_v4
+141/715789/campos_512_v4
+141/715790/campos_512_v4
+141/715798/campos_512_v4
+141/715808/campos_512_v4
+141/715824/campos_512_v4
+141/715830/campos_512_v4
+141/715840/campos_512_v4
+141/715870/campos_512_v4
+141/715877/campos_512_v4
+141/715880/campos_512_v4
+141/715891/campos_512_v4
+141/715897/campos_512_v4
+141/715900/campos_512_v4
+141/715910/campos_512_v4
+141/715955/campos_512_v4
+141/715964/campos_512_v4
+141/715979/campos_512_v4
+141/716003/campos_512_v4
+141/716004/campos_512_v4
+141/716013/campos_512_v4
+141/716029/campos_512_v4
+141/716041/campos_512_v4
+141/716046/campos_512_v4
+141/716053/campos_512_v4
+141/716064/campos_512_v4
+141/716069/campos_512_v4
+141/716072/campos_512_v4
+141/716077/campos_512_v4
+141/716078/campos_512_v4
+141/716082/campos_512_v4
+141/716085/campos_512_v4
+141/716086/campos_512_v4
+141/716088/campos_512_v4
+141/716089/campos_512_v4
+141/716092/campos_512_v4
+141/716099/campos_512_v4
+141/716110/campos_512_v4
+141/716126/campos_512_v4
+141/716134/campos_512_v4
+141/716137/campos_512_v4
+141/716151/campos_512_v4
+141/716163/campos_512_v4
+141/716168/campos_512_v4
+141/716182/campos_512_v4
+141/716183/campos_512_v4
+141/716187/campos_512_v4
+141/716190/campos_512_v4
+141/716201/campos_512_v4
+141/716207/campos_512_v4
+141/716229/campos_512_v4
+141/716237/campos_512_v4
+141/716250/campos_512_v4
+141/716251/campos_512_v4
+141/716260/campos_512_v4
+141/716270/campos_512_v4
+141/716290/campos_512_v4
+141/716292/campos_512_v4
+141/716298/campos_512_v4
+141/716302/campos_512_v4
+141/716303/campos_512_v4
+141/716313/campos_512_v4
+141/716318/campos_512_v4
+141/716335/campos_512_v4
+141/716509/campos_512_v4
+141/716512/campos_512_v4
+141/716517/campos_512_v4
+141/716520/campos_512_v4
+141/716546/campos_512_v4
+141/716556/campos_512_v4
+141/716563/campos_512_v4
+141/716565/campos_512_v4
+141/716566/campos_512_v4
+141/716581/campos_512_v4
+141/716584/campos_512_v4
+141/716587/campos_512_v4
+141/716614/campos_512_v4
+141/716616/campos_512_v4
+141/716617/campos_512_v4
+141/716622/campos_512_v4
+141/716632/campos_512_v4
+141/716634/campos_512_v4
+141/716678/campos_512_v4
+141/716688/campos_512_v4
+141/716691/campos_512_v4
+141/716696/campos_512_v4
+141/716697/campos_512_v4
+141/716711/campos_512_v4
+141/716719/campos_512_v4
+141/716728/campos_512_v4
+141/716742/campos_512_v4
+141/716743/campos_512_v4
+141/716744/campos_512_v4
+141/716748/campos_512_v4
+141/716752/campos_512_v4
+141/716756/campos_512_v4
+141/716760/campos_512_v4
+141/716761/campos_512_v4
+141/716762/campos_512_v4
+141/716770/campos_512_v4
+141/716779/campos_512_v4
+141/716789/campos_512_v4
+141/716797/campos_512_v4
+141/716798/campos_512_v4
+141/716836/campos_512_v4
+141/716847/campos_512_v4
+141/716848/campos_512_v4
+141/716856/campos_512_v4
+141/716861/campos_512_v4
+141/716863/campos_512_v4
+141/716864/campos_512_v4
+141/716883/campos_512_v4
+141/716887/campos_512_v4
+141/716889/campos_512_v4
+141/716890/campos_512_v4
+141/716891/campos_512_v4
+141/716896/campos_512_v4
+141/716899/campos_512_v4
+141/716901/campos_512_v4
+141/716909/campos_512_v4
+141/716912/campos_512_v4
+141/716913/campos_512_v4
+141/716914/campos_512_v4
+141/716917/campos_512_v4
+141/716921/campos_512_v4
+141/716924/campos_512_v4
+141/716930/campos_512_v4
+141/716931/campos_512_v4
+141/716940/campos_512_v4
+141/716948/campos_512_v4
+141/716952/campos_512_v4
+141/716957/campos_512_v4
+141/716967/campos_512_v4
+141/716974/campos_512_v4
+141/716981/campos_512_v4
+141/716982/campos_512_v4
+141/716983/campos_512_v4
+141/716988/campos_512_v4
+141/717011/campos_512_v4
+141/717015/campos_512_v4
+141/717016/campos_512_v4
+141/717019/campos_512_v4
+141/717021/campos_512_v4
+141/717022/campos_512_v4
+141/717040/campos_512_v4
+141/717048/campos_512_v4
+141/717061/campos_512_v4
+141/717068/campos_512_v4
+141/717076/campos_512_v4
+141/717078/campos_512_v4
+141/717090/campos_512_v4
+141/717091/campos_512_v4
+141/717092/campos_512_v4
+141/717096/campos_512_v4
+141/717120/campos_512_v4
+141/717127/campos_512_v4
+141/717132/campos_512_v4
+141/717139/campos_512_v4
+141/717144/campos_512_v4
+141/717150/campos_512_v4
+141/717158/campos_512_v4
+141/717162/campos_512_v4
+141/717172/campos_512_v4
+141/717179/campos_512_v4
+141/717181/campos_512_v4
+141/717182/campos_512_v4
+141/717192/campos_512_v4
+141/717203/campos_512_v4
+141/717209/campos_512_v4
+141/717217/campos_512_v4
+141/717231/campos_512_v4
+141/717245/campos_512_v4
+141/717247/campos_512_v4
+141/717249/campos_512_v4
+141/717254/campos_512_v4
+141/717256/campos_512_v4
+141/717259/campos_512_v4
+141/717272/campos_512_v4
+141/717287/campos_512_v4
+141/717294/campos_512_v4
+141/717295/campos_512_v4
+141/717296/campos_512_v4
+141/717299/campos_512_v4
+141/717318/campos_512_v4
+141/717324/campos_512_v4
+141/717326/campos_512_v4
+141/717333/campos_512_v4
+141/717335/campos_512_v4
+141/717339/campos_512_v4
+141/717342/campos_512_v4
+141/717362/campos_512_v4
+141/717363/campos_512_v4
+141/717366/campos_512_v4
+141/717367/campos_512_v4
+141/717378/campos_512_v4
+141/717382/campos_512_v4
+141/717395/campos_512_v4
+141/717396/campos_512_v4
+141/717398/campos_512_v4
+141/717403/campos_512_v4
+141/717405/campos_512_v4
+141/717415/campos_512_v4
+141/717428/campos_512_v4
+141/717436/campos_512_v4
+141/717449/campos_512_v4
+141/717450/campos_512_v4
+141/717453/campos_512_v4
+141/717472/campos_512_v4
+141/717480/campos_512_v4
+141/717496/campos_512_v4
+141/717499/campos_512_v4
+141/717516/campos_512_v4
+141/717519/campos_512_v4
+141/717520/campos_512_v4
+141/717528/campos_512_v4
+141/717534/campos_512_v4
+141/717543/campos_512_v4
+141/717553/campos_512_v4
+141/717556/campos_512_v4
+141/717557/campos_512_v4
+141/717567/campos_512_v4
+141/717568/campos_512_v4
+141/717571/campos_512_v4
+141/717573/campos_512_v4
+141/717576/campos_512_v4
+141/717579/campos_512_v4
+141/717595/campos_512_v4
+141/717601/campos_512_v4
+141/717606/campos_512_v4
+141/717615/campos_512_v4
+141/717621/campos_512_v4
+141/717623/campos_512_v4
+141/717638/campos_512_v4
+141/717639/campos_512_v4
+141/717647/campos_512_v4
+141/717648/campos_512_v4
+141/717663/campos_512_v4
+141/717664/campos_512_v4
+141/717669/campos_512_v4
+141/717674/campos_512_v4
+141/717680/campos_512_v4
+141/717712/campos_512_v4
+141/717718/campos_512_v4
+141/717731/campos_512_v4
+141/717732/campos_512_v4
+141/717745/campos_512_v4
+141/717746/campos_512_v4
+141/717747/campos_512_v4
+141/717763/campos_512_v4
+141/717789/campos_512_v4
+141/717811/campos_512_v4
+141/717821/campos_512_v4
+141/717827/campos_512_v4
+141/717834/campos_512_v4
+141/717852/campos_512_v4
+141/717858/campos_512_v4
+141/717861/campos_512_v4
+141/717888/campos_512_v4
+141/717893/campos_512_v4
+141/717912/campos_512_v4
+141/717916/campos_512_v4
+141/717924/campos_512_v4
+141/717926/campos_512_v4
+141/717942/campos_512_v4
+141/717949/campos_512_v4
+141/717953/campos_512_v4
+141/717959/campos_512_v4
+141/717960/campos_512_v4
+141/717966/campos_512_v4
+141/717968/campos_512_v4
+141/717977/campos_512_v4
+141/717993/campos_512_v4
+141/717994/campos_512_v4
+141/717995/campos_512_v4
+141/718003/campos_512_v4
+141/718006/campos_512_v4
+141/718009/campos_512_v4
+141/718011/campos_512_v4
+141/718016/campos_512_v4
+141/718035/campos_512_v4
+141/718038/campos_512_v4
+141/718050/campos_512_v4
+141/718063/campos_512_v4
+141/718065/campos_512_v4
+141/718070/campos_512_v4
+141/718073/campos_512_v4
+141/718087/campos_512_v4
+141/718092/campos_512_v4
+141/718093/campos_512_v4
+141/718101/campos_512_v4
+141/718128/campos_512_v4
+141/718131/campos_512_v4
+141/718135/campos_512_v4
+141/718136/campos_512_v4
+141/718151/campos_512_v4
+141/718155/campos_512_v4
+141/718168/campos_512_v4
+141/718176/campos_512_v4
+141/718177/campos_512_v4
+141/718184/campos_512_v4
+141/718187/campos_512_v4
+141/718216/campos_512_v4
+141/718222/campos_512_v4
+141/718230/campos_512_v4
+141/718238/campos_512_v4
+141/718245/campos_512_v4
+141/718250/campos_512_v4
+141/718259/campos_512_v4
+141/718264/campos_512_v4
+141/718265/campos_512_v4
+141/718279/campos_512_v4
+141/718280/campos_512_v4
+141/718291/campos_512_v4
+141/718297/campos_512_v4
+141/718299/campos_512_v4
+141/718309/campos_512_v4
+141/718318/campos_512_v4
+141/718322/campos_512_v4
+141/718323/campos_512_v4
+141/718333/campos_512_v4
+141/718338/campos_512_v4
+141/718339/campos_512_v4
+141/718342/campos_512_v4
+141/718353/campos_512_v4
+141/718357/campos_512_v4
+141/718360/campos_512_v4
+141/718368/campos_512_v4
+141/718372/campos_512_v4
+141/718375/campos_512_v4
+141/718378/campos_512_v4
+141/718379/campos_512_v4
+141/718382/campos_512_v4
+141/718390/campos_512_v4
+141/718391/campos_512_v4
+141/718392/campos_512_v4
+141/718393/campos_512_v4
+141/718397/campos_512_v4
+141/718400/campos_512_v4
+141/718404/campos_512_v4
+141/718429/campos_512_v4
+141/718430/campos_512_v4
+141/718431/campos_512_v4
+141/718438/campos_512_v4
+141/718455/campos_512_v4
+141/718457/campos_512_v4
+141/718473/campos_512_v4
+141/718481/campos_512_v4
+141/718499/campos_512_v4
+141/718508/campos_512_v4
+141/718518/campos_512_v4
+141/718522/campos_512_v4
+141/718525/campos_512_v4
+141/718527/campos_512_v4
+141/718544/campos_512_v4
+141/718545/campos_512_v4
+141/718548/campos_512_v4
+141/718550/campos_512_v4
+141/718552/campos_512_v4
+141/718579/campos_512_v4
+141/718580/campos_512_v4
+141/718583/campos_512_v4
+141/718586/campos_512_v4
+141/718593/campos_512_v4
+141/718596/campos_512_v4
+141/718616/campos_512_v4
+141/718619/campos_512_v4
+141/718626/campos_512_v4
+141/718634/campos_512_v4
+141/718635/campos_512_v4
+141/718639/campos_512_v4
+141/718647/campos_512_v4
+141/718648/campos_512_v4
+141/718653/campos_512_v4
+141/718654/campos_512_v4
+141/718659/campos_512_v4
+141/718671/campos_512_v4
+141/718675/campos_512_v4
+141/718693/campos_512_v4
+141/718698/campos_512_v4
+141/718699/campos_512_v4
+141/718704/campos_512_v4
+141/718705/campos_512_v4
+141/718706/campos_512_v4
+141/718707/campos_512_v4
+141/718714/campos_512_v4
+141/718724/campos_512_v4
+141/718728/campos_512_v4
+141/718764/campos_512_v4
+141/718768/campos_512_v4
+141/718772/campos_512_v4
+141/718779/campos_512_v4
+141/718780/campos_512_v4
+141/718784/campos_512_v4
+141/718792/campos_512_v4
+141/718800/campos_512_v4
+141/718802/campos_512_v4
+141/718803/campos_512_v4
+141/718806/campos_512_v4
+141/718809/campos_512_v4
+141/718815/campos_512_v4
+141/718821/campos_512_v4
+141/718822/campos_512_v4
+141/718838/campos_512_v4
+141/718839/campos_512_v4
+141/718852/campos_512_v4
+141/718853/campos_512_v4
+141/718863/campos_512_v4
+141/718872/campos_512_v4
+141/718898/campos_512_v4
+141/718899/campos_512_v4
+141/718923/campos_512_v4
+141/718927/campos_512_v4
+141/718929/campos_512_v4
+141/718933/campos_512_v4
+141/718976/campos_512_v4
+141/718978/campos_512_v4
+141/719005/campos_512_v4
+141/719006/campos_512_v4
+141/719007/campos_512_v4
+141/719017/campos_512_v4
+141/719031/campos_512_v4
+141/719033/campos_512_v4
+141/719040/campos_512_v4
+141/719043/campos_512_v4
+141/719047/campos_512_v4
+141/719049/campos_512_v4
+141/719057/campos_512_v4
+141/719066/campos_512_v4
+141/719067/campos_512_v4
+141/719071/campos_512_v4
+141/719084/campos_512_v4
+141/719085/campos_512_v4
+141/719091/campos_512_v4
+141/719101/campos_512_v4
+141/719105/campos_512_v4
+141/719106/campos_512_v4
+141/719108/campos_512_v4
+141/719114/campos_512_v4
+141/719117/campos_512_v4
+141/719120/campos_512_v4
+141/719121/campos_512_v4
+141/719128/campos_512_v4
+141/719130/campos_512_v4
+141/719135/campos_512_v4
+141/719139/campos_512_v4
+141/719160/campos_512_v4
+141/719169/campos_512_v4
+141/719172/campos_512_v4
+141/719175/campos_512_v4
+141/719180/campos_512_v4
+141/719206/campos_512_v4
+141/719214/campos_512_v4
+141/719221/campos_512_v4
+141/719226/campos_512_v4
+141/719229/campos_512_v4
+141/719232/campos_512_v4
+141/719237/campos_512_v4
+141/719239/campos_512_v4
+141/719241/campos_512_v4
+141/719242/campos_512_v4
+141/719247/campos_512_v4
+141/719252/campos_512_v4
+141/719253/campos_512_v4
+141/719257/campos_512_v4
+141/719263/campos_512_v4
+141/719264/campos_512_v4
+141/719268/campos_512_v4
+141/719270/campos_512_v4
+141/719274/campos_512_v4
+141/719275/campos_512_v4
+141/719276/campos_512_v4
+141/719284/campos_512_v4
+141/719292/campos_512_v4
+141/719296/campos_512_v4
+141/719313/campos_512_v4
+141/719317/campos_512_v4
+141/719319/campos_512_v4
+141/719320/campos_512_v4
+141/719327/campos_512_v4
+141/719334/campos_512_v4
+141/719337/campos_512_v4
+141/719341/campos_512_v4
+141/719345/campos_512_v4
+141/719346/campos_512_v4
+141/719349/campos_512_v4
+141/719352/campos_512_v4
+141/719390/campos_512_v4
+141/719394/campos_512_v4
+141/719397/campos_512_v4
+141/719398/campos_512_v4
+141/719411/campos_512_v4
+141/719420/campos_512_v4
+141/719433/campos_512_v4
+141/719442/campos_512_v4
+141/719445/campos_512_v4
+141/719488/campos_512_v4
+141/719490/campos_512_v4
+141/719508/campos_512_v4
+141/719514/campos_512_v4
+141/719521/campos_512_v4
+141/719529/campos_512_v4
+141/719530/campos_512_v4
+141/719534/campos_512_v4
+141/719536/campos_512_v4
+141/719545/campos_512_v4
+141/719554/campos_512_v4
+141/719555/campos_512_v4
+141/719570/campos_512_v4
+141/719571/campos_512_v4
+141/719573/campos_512_v4
+141/719583/campos_512_v4
+141/719585/campos_512_v4
+141/719588/campos_512_v4
+141/719592/campos_512_v4
+141/719594/campos_512_v4
+141/719612/campos_512_v4
+141/719626/campos_512_v4
+141/719630/campos_512_v4
+141/719635/campos_512_v4
+141/719640/campos_512_v4
+141/719644/campos_512_v4
+141/719657/campos_512_v4
+141/719662/campos_512_v4
+141/719667/campos_512_v4
+141/719669/campos_512_v4
+141/719677/campos_512_v4
+141/719679/campos_512_v4
+141/719682/campos_512_v4
+141/719713/campos_512_v4
+141/719728/campos_512_v4
+141/719744/campos_512_v4
+141/719747/campos_512_v4
+141/719748/campos_512_v4
+141/719762/campos_512_v4
+141/719769/campos_512_v4
+141/719771/campos_512_v4
+141/719775/campos_512_v4
+141/719798/campos_512_v4
+141/719803/campos_512_v4
+141/719809/campos_512_v4
+141/719813/campos_512_v4
+141/719814/campos_512_v4
+141/719827/campos_512_v4
+141/719832/campos_512_v4
+141/719839/campos_512_v4
+141/719844/campos_512_v4
+141/719849/campos_512_v4
+141/719853/campos_512_v4
+141/719871/campos_512_v4
+141/719872/campos_512_v4
+141/719884/campos_512_v4
+141/719887/campos_512_v4
+141/719897/campos_512_v4
+141/719914/campos_512_v4
+141/719915/campos_512_v4
+141/719921/campos_512_v4
+141/719930/campos_512_v4
+141/719933/campos_512_v4
+141/719944/campos_512_v4
+141/719946/campos_512_v4
+141/719970/campos_512_v4
+141/719972/campos_512_v4
+141/719975/campos_512_v4
+141/719980/campos_512_v4
+141/719995/campos_512_v4
+142/720005/campos_512_v4
+142/720009/campos_512_v4
+142/720012/campos_512_v4
+142/720016/campos_512_v4
+142/720032/campos_512_v4
+142/720043/campos_512_v4
+142/720048/campos_512_v4
+142/720078/campos_512_v4
+142/720091/campos_512_v4
+142/720094/campos_512_v4
+142/720097/campos_512_v4
+142/720102/campos_512_v4
+142/720107/campos_512_v4
+142/720110/campos_512_v4
+142/720119/campos_512_v4
+142/720120/campos_512_v4
+142/720138/campos_512_v4
+142/720143/campos_512_v4
+142/720146/campos_512_v4
+142/720149/campos_512_v4
+142/720155/campos_512_v4
+142/720174/campos_512_v4
+142/720187/campos_512_v4
+142/720188/campos_512_v4
+142/720212/campos_512_v4
+142/720213/campos_512_v4
+142/720224/campos_512_v4
+142/720231/campos_512_v4
+142/720241/campos_512_v4
+142/720242/campos_512_v4
+142/720252/campos_512_v4
+142/720256/campos_512_v4
+142/720259/campos_512_v4
+142/720265/campos_512_v4
+142/720268/campos_512_v4
+142/720287/campos_512_v4
+142/720292/campos_512_v4
+142/720295/campos_512_v4
+142/720303/campos_512_v4
+142/720313/campos_512_v4
+142/720319/campos_512_v4
+142/720332/campos_512_v4
+142/720362/campos_512_v4
+142/720373/campos_512_v4
+142/720377/campos_512_v4
+142/720378/campos_512_v4
+142/720382/campos_512_v4
+142/720385/campos_512_v4
+142/720393/campos_512_v4
+142/720397/campos_512_v4
+142/720405/campos_512_v4
+142/720427/campos_512_v4
+142/720436/campos_512_v4
+142/720440/campos_512_v4
+142/720441/campos_512_v4
+142/720450/campos_512_v4
+142/720459/campos_512_v4
+142/720465/campos_512_v4
+142/720471/campos_512_v4
+142/720472/campos_512_v4
+142/720481/campos_512_v4
+142/720496/campos_512_v4
+142/720501/campos_512_v4
+142/720506/campos_512_v4
+142/720508/campos_512_v4
+142/720528/campos_512_v4
+142/720533/campos_512_v4
+142/720534/campos_512_v4
+142/720536/campos_512_v4
+142/720543/campos_512_v4
+142/720552/campos_512_v4
+142/720568/campos_512_v4
+142/720569/campos_512_v4
+142/720571/campos_512_v4
+142/720582/campos_512_v4
+142/720584/campos_512_v4
+142/720587/campos_512_v4
+142/720620/campos_512_v4
+142/720625/campos_512_v4
+142/720632/campos_512_v4
+142/720633/campos_512_v4
+142/720640/campos_512_v4
+142/720643/campos_512_v4
+142/720647/campos_512_v4
+142/720650/campos_512_v4
+142/720654/campos_512_v4
+142/720656/campos_512_v4
+142/720665/campos_512_v4
+142/720666/campos_512_v4
+142/720679/campos_512_v4
+142/720691/campos_512_v4
+142/720710/campos_512_v4
+142/720731/campos_512_v4
+142/720742/campos_512_v4
+142/720761/campos_512_v4
+142/720763/campos_512_v4
+142/720773/campos_512_v4
+142/720775/campos_512_v4
+142/720778/campos_512_v4
+142/720780/campos_512_v4
+142/720787/campos_512_v4
+142/720797/campos_512_v4
+142/720803/campos_512_v4
+142/720805/campos_512_v4
+142/720806/campos_512_v4
+142/720811/campos_512_v4
+142/720824/campos_512_v4
+142/720831/campos_512_v4
+142/720851/campos_512_v4
+142/720852/campos_512_v4
+142/720862/campos_512_v4
+142/720869/campos_512_v4
+142/720885/campos_512_v4
+142/720894/campos_512_v4
+142/720898/campos_512_v4
+142/720903/campos_512_v4
+142/720907/campos_512_v4
+142/720913/campos_512_v4
+142/720928/campos_512_v4
+142/720935/campos_512_v4
+142/720937/campos_512_v4
+142/720946/campos_512_v4
+142/720948/campos_512_v4
+142/720949/campos_512_v4
+142/720966/campos_512_v4
+142/720968/campos_512_v4
+142/720975/campos_512_v4
+142/720979/campos_512_v4
+142/720984/campos_512_v4
+142/720992/campos_512_v4
+142/720999/campos_512_v4
+142/721001/campos_512_v4
+142/721006/campos_512_v4
+142/721008/campos_512_v4
+142/721014/campos_512_v4
+142/721021/campos_512_v4
+142/721025/campos_512_v4
+142/721035/campos_512_v4
+142/721037/campos_512_v4
+142/721039/campos_512_v4
+142/721049/campos_512_v4
+142/721056/campos_512_v4
+142/721060/campos_512_v4
+142/721061/campos_512_v4
+142/721066/campos_512_v4
+142/721079/campos_512_v4
+142/721090/campos_512_v4
+142/721091/campos_512_v4
+142/721103/campos_512_v4
+142/721106/campos_512_v4
+142/721109/campos_512_v4
+142/721112/campos_512_v4
+142/721114/campos_512_v4
+142/721120/campos_512_v4
+142/721121/campos_512_v4
+142/721125/campos_512_v4
+142/721130/campos_512_v4
+142/721133/campos_512_v4
+142/721137/campos_512_v4
+142/721140/campos_512_v4
+142/721146/campos_512_v4
+142/721156/campos_512_v4
+142/721162/campos_512_v4
+142/721165/campos_512_v4
+142/721174/campos_512_v4
+142/721175/campos_512_v4
+142/721189/campos_512_v4
+142/721194/campos_512_v4
+142/721204/campos_512_v4
+142/721215/campos_512_v4
+142/721219/campos_512_v4
+142/721222/campos_512_v4
+142/721234/campos_512_v4
+142/721251/campos_512_v4
+142/721261/campos_512_v4
+142/721270/campos_512_v4
+142/721274/campos_512_v4
+142/721277/campos_512_v4
+142/721282/campos_512_v4
+142/721292/campos_512_v4
+142/721295/campos_512_v4
+142/721312/campos_512_v4
+142/721317/campos_512_v4
+142/721319/campos_512_v4
+142/721320/campos_512_v4
+142/721334/campos_512_v4
+142/721339/campos_512_v4
+142/721340/campos_512_v4
+142/721342/campos_512_v4
+142/721357/campos_512_v4
+142/721382/campos_512_v4
+142/721387/campos_512_v4
+142/721391/campos_512_v4
+142/721397/campos_512_v4
+142/721398/campos_512_v4
+142/721409/campos_512_v4
+142/721411/campos_512_v4
+142/721413/campos_512_v4
+142/721445/campos_512_v4
+142/721452/campos_512_v4
+142/721457/campos_512_v4
+142/721467/campos_512_v4
+142/721472/campos_512_v4
+142/721480/campos_512_v4
+142/721500/campos_512_v4
+142/721515/campos_512_v4
+142/721521/campos_512_v4
+142/721525/campos_512_v4
+142/721527/campos_512_v4
+142/721529/campos_512_v4
+142/721536/campos_512_v4
+142/721545/campos_512_v4
+142/721556/campos_512_v4
+142/721571/campos_512_v4
+142/721574/campos_512_v4
+142/721577/campos_512_v4
+142/721581/campos_512_v4
+142/721588/campos_512_v4
+142/721592/campos_512_v4
+142/721594/campos_512_v4
+142/721601/campos_512_v4
+142/721602/campos_512_v4
+142/721604/campos_512_v4
+142/721613/campos_512_v4
+142/721623/campos_512_v4
+142/721624/campos_512_v4
+142/721630/campos_512_v4
+142/721632/campos_512_v4
+142/721647/campos_512_v4
+142/721648/campos_512_v4
+142/721651/campos_512_v4
+142/721662/campos_512_v4
+142/721681/campos_512_v4
+142/721688/campos_512_v4
+142/721691/campos_512_v4
+142/721700/campos_512_v4
+142/721703/campos_512_v4
+142/721706/campos_512_v4
+142/721714/campos_512_v4
+142/721721/campos_512_v4
+142/721752/campos_512_v4
+142/721766/campos_512_v4
+142/721774/campos_512_v4
+142/721777/campos_512_v4
+142/721782/campos_512_v4
+142/721792/campos_512_v4
+142/721798/campos_512_v4
+142/721805/campos_512_v4
+142/721813/campos_512_v4
+142/721819/campos_512_v4
+142/721823/campos_512_v4
+142/721840/campos_512_v4
+142/721844/campos_512_v4
+142/721858/campos_512_v4
+142/721868/campos_512_v4
+142/721870/campos_512_v4
+142/721872/campos_512_v4
+142/721875/campos_512_v4
+142/721895/campos_512_v4
+142/721903/campos_512_v4
+142/721948/campos_512_v4
+142/721952/campos_512_v4
+142/721958/campos_512_v4
+142/721965/campos_512_v4
+142/721997/campos_512_v4
+142/722001/campos_512_v4
+142/722002/campos_512_v4
+142/722008/campos_512_v4
+142/722016/campos_512_v4
+142/722017/campos_512_v4
+142/722027/campos_512_v4
+142/722033/campos_512_v4
+142/722038/campos_512_v4
+142/722040/campos_512_v4
+142/722041/campos_512_v4
+142/722048/campos_512_v4
+142/722054/campos_512_v4
+142/722059/campos_512_v4
+142/722075/campos_512_v4
+142/722077/campos_512_v4
+142/722080/campos_512_v4
+142/722087/campos_512_v4
+142/722089/campos_512_v4
+142/722090/campos_512_v4
+142/722095/campos_512_v4
+142/722098/campos_512_v4
+142/722100/campos_512_v4
+142/722105/campos_512_v4
+142/722117/campos_512_v4
+142/722119/campos_512_v4
+142/722123/campos_512_v4
+142/722125/campos_512_v4
+142/722131/campos_512_v4
+142/722142/campos_512_v4
+142/722144/campos_512_v4
+142/722145/campos_512_v4
+142/722151/campos_512_v4
+142/722159/campos_512_v4
+142/722167/campos_512_v4
+142/722169/campos_512_v4
+142/722180/campos_512_v4
+142/722183/campos_512_v4
+142/722189/campos_512_v4
+142/722200/campos_512_v4
+142/722215/campos_512_v4
+142/722217/campos_512_v4
+142/722218/campos_512_v4
+142/722220/campos_512_v4
+142/722221/campos_512_v4
+142/722229/campos_512_v4
+142/722239/campos_512_v4
+142/722243/campos_512_v4
+142/722246/campos_512_v4
+142/722265/campos_512_v4
+142/722285/campos_512_v4
+142/722289/campos_512_v4
+142/722316/campos_512_v4
+142/722325/campos_512_v4
+142/722329/campos_512_v4
+142/722338/campos_512_v4
+142/722342/campos_512_v4
+142/722352/campos_512_v4
+142/722353/campos_512_v4
+142/722361/campos_512_v4
+142/722363/campos_512_v4
+142/722369/campos_512_v4
+142/722385/campos_512_v4
+142/722388/campos_512_v4
+142/722416/campos_512_v4
+142/722420/campos_512_v4
+142/722436/campos_512_v4
+142/722437/campos_512_v4
+142/722438/campos_512_v4
+142/722449/campos_512_v4
+142/722452/campos_512_v4
+142/722469/campos_512_v4
+142/722488/campos_512_v4
+142/722490/campos_512_v4
+142/722496/campos_512_v4
+142/722506/campos_512_v4
+142/722520/campos_512_v4
+142/722528/campos_512_v4
+142/722539/campos_512_v4
+142/722553/campos_512_v4
+142/722575/campos_512_v4
+142/722576/campos_512_v4
+142/722581/campos_512_v4
+142/722605/campos_512_v4
+142/722606/campos_512_v4
+142/722607/campos_512_v4
+142/722611/campos_512_v4
+142/722654/campos_512_v4
+142/722655/campos_512_v4
+142/722657/campos_512_v4
+142/722659/campos_512_v4
+142/722691/campos_512_v4
+142/722696/campos_512_v4
+142/722700/campos_512_v4
+142/722734/campos_512_v4
+142/722736/campos_512_v4
+142/722739/campos_512_v4
+142/722740/campos_512_v4
+142/722742/campos_512_v4
+142/722743/campos_512_v4
+142/722750/campos_512_v4
+142/722755/campos_512_v4
+142/722767/campos_512_v4
+142/722778/campos_512_v4
+142/722784/campos_512_v4
+142/722795/campos_512_v4
+142/722797/campos_512_v4
+142/722806/campos_512_v4
+142/722817/campos_512_v4
+142/722820/campos_512_v4
+142/722829/campos_512_v4
+142/722835/campos_512_v4
+142/722838/campos_512_v4
+142/722866/campos_512_v4
+142/722868/campos_512_v4
+142/722879/campos_512_v4
+142/722882/campos_512_v4
+142/722884/campos_512_v4
+142/722905/campos_512_v4
+142/722910/campos_512_v4
+142/722915/campos_512_v4
+142/722916/campos_512_v4
+142/722930/campos_512_v4
+142/722932/campos_512_v4
+142/722936/campos_512_v4
+142/722939/campos_512_v4
+142/722950/campos_512_v4
+142/722960/campos_512_v4
+142/722964/campos_512_v4
+142/722969/campos_512_v4
+142/722976/campos_512_v4
+142/722987/campos_512_v4
+142/722989/campos_512_v4
+142/722990/campos_512_v4
+142/722992/campos_512_v4
+142/723005/campos_512_v4
+142/723006/campos_512_v4
+142/723016/campos_512_v4
+142/723029/campos_512_v4
+142/723044/campos_512_v4
+142/723080/campos_512_v4
+142/723088/campos_512_v4
+142/723089/campos_512_v4
+142/723093/campos_512_v4
+142/723100/campos_512_v4
+142/723111/campos_512_v4
+142/723113/campos_512_v4
+142/723114/campos_512_v4
+142/723120/campos_512_v4
+142/723129/campos_512_v4
+142/723130/campos_512_v4
+142/723141/campos_512_v4
+142/723142/campos_512_v4
+142/723169/campos_512_v4
+142/723185/campos_512_v4
+142/723187/campos_512_v4
+142/723192/campos_512_v4
+142/723198/campos_512_v4
+142/723208/campos_512_v4
+142/723214/campos_512_v4
+142/723216/campos_512_v4
+142/723219/campos_512_v4
+142/723222/campos_512_v4
+142/723230/campos_512_v4
+142/723231/campos_512_v4
+142/723260/campos_512_v4
+142/723269/campos_512_v4
+142/723281/campos_512_v4
+142/723305/campos_512_v4
+142/723319/campos_512_v4
+142/723323/campos_512_v4
+142/723332/campos_512_v4
+142/723340/campos_512_v4
+142/723360/campos_512_v4
+142/723361/campos_512_v4
+142/723365/campos_512_v4
+142/723371/campos_512_v4
+142/723384/campos_512_v4
+142/723397/campos_512_v4
+142/723399/campos_512_v4
+142/723405/campos_512_v4
+142/723409/campos_512_v4
+142/723414/campos_512_v4
+142/723439/campos_512_v4
+142/723445/campos_512_v4
+142/723452/campos_512_v4
+142/723479/campos_512_v4
+142/723484/campos_512_v4
+142/723512/campos_512_v4
+142/723519/campos_512_v4
+142/723520/campos_512_v4
+142/723530/campos_512_v4
+142/723532/campos_512_v4
+142/723533/campos_512_v4
+142/723543/campos_512_v4
+142/723546/campos_512_v4
+142/723552/campos_512_v4
+142/723560/campos_512_v4
+142/723572/campos_512_v4
+142/723573/campos_512_v4
+142/723579/campos_512_v4
+142/723592/campos_512_v4
+142/723602/campos_512_v4
+142/723611/campos_512_v4
+142/723634/campos_512_v4
+142/723638/campos_512_v4
+142/723639/campos_512_v4
+142/723642/campos_512_v4
+142/723652/campos_512_v4
+142/723656/campos_512_v4
+142/723666/campos_512_v4
+142/723676/campos_512_v4
+142/723680/campos_512_v4
+142/723687/campos_512_v4
+142/723696/campos_512_v4
+142/723698/campos_512_v4
+142/723702/campos_512_v4
+142/723715/campos_512_v4
+142/723717/campos_512_v4
+142/723726/campos_512_v4
+142/723728/campos_512_v4
+142/723771/campos_512_v4
+142/723774/campos_512_v4
+142/723790/campos_512_v4
+142/723808/campos_512_v4
+142/723810/campos_512_v4
+142/723830/campos_512_v4
+142/723859/campos_512_v4
+142/723860/campos_512_v4
+142/723864/campos_512_v4
+142/723872/campos_512_v4
+142/723877/campos_512_v4
+142/723881/campos_512_v4
+142/723908/campos_512_v4
+142/723913/campos_512_v4
+142/723916/campos_512_v4
+142/723924/campos_512_v4
+142/723927/campos_512_v4
+142/723929/campos_512_v4
+142/723933/campos_512_v4
+142/723936/campos_512_v4
+142/723951/campos_512_v4
+142/723953/campos_512_v4
+142/723955/campos_512_v4
+142/723957/campos_512_v4
+142/723958/campos_512_v4
+142/723994/campos_512_v4
+142/724003/campos_512_v4
+142/724014/campos_512_v4
+142/724023/campos_512_v4
+142/724031/campos_512_v4
+142/724033/campos_512_v4
+142/724042/campos_512_v4
+142/724049/campos_512_v4
+142/724066/campos_512_v4
+142/724070/campos_512_v4
+142/724084/campos_512_v4
+142/724098/campos_512_v4
+142/724102/campos_512_v4
+142/724105/campos_512_v4
+142/724121/campos_512_v4
+142/724124/campos_512_v4
+142/724130/campos_512_v4
+142/724134/campos_512_v4
+142/724150/campos_512_v4
+142/724151/campos_512_v4
+142/724152/campos_512_v4
+142/724164/campos_512_v4
+142/724165/campos_512_v4
+142/724178/campos_512_v4
+142/724191/campos_512_v4
+142/724208/campos_512_v4
+142/724225/campos_512_v4
+142/724255/campos_512_v4
+142/724285/campos_512_v4
+142/724290/campos_512_v4
+142/724294/campos_512_v4
+142/724297/campos_512_v4
+142/724309/campos_512_v4
+142/724319/campos_512_v4
+142/724329/campos_512_v4
+142/724333/campos_512_v4
+142/724339/campos_512_v4
+142/724342/campos_512_v4
+142/724351/campos_512_v4
+142/724376/campos_512_v4
+142/724391/campos_512_v4
+142/724403/campos_512_v4
+142/724427/campos_512_v4
+142/724430/campos_512_v4
+142/724433/campos_512_v4
+142/724437/campos_512_v4
+142/724459/campos_512_v4
+142/724465/campos_512_v4
+142/724468/campos_512_v4
+142/724470/campos_512_v4
+142/724482/campos_512_v4
+142/724489/campos_512_v4
+142/724493/campos_512_v4
+142/724503/campos_512_v4
+142/724511/campos_512_v4
+142/724514/campos_512_v4
+142/724525/campos_512_v4
+142/724527/campos_512_v4
+142/724530/campos_512_v4
+142/724534/campos_512_v4
+142/724537/campos_512_v4
+142/724544/campos_512_v4
+142/724547/campos_512_v4
+142/724554/campos_512_v4
+142/724563/campos_512_v4
+142/724564/campos_512_v4
+142/724584/campos_512_v4
+142/724587/campos_512_v4
+142/724596/campos_512_v4
+142/724598/campos_512_v4
+142/724624/campos_512_v4
+142/724626/campos_512_v4
+142/724662/campos_512_v4
+142/724673/campos_512_v4
+142/724676/campos_512_v4
+142/724678/campos_512_v4
+142/724679/campos_512_v4
+142/724689/campos_512_v4
+142/724690/campos_512_v4
+142/724691/campos_512_v4
+142/724694/campos_512_v4
+142/724704/campos_512_v4
+142/724715/campos_512_v4
+142/724716/campos_512_v4
+142/724731/campos_512_v4
+142/724751/campos_512_v4
+142/724753/campos_512_v4
+142/724766/campos_512_v4
+142/724773/campos_512_v4
+142/724774/campos_512_v4
+142/724776/campos_512_v4
+142/724780/campos_512_v4
+142/724794/campos_512_v4
+142/724799/campos_512_v4
+142/724816/campos_512_v4
+142/724819/campos_512_v4
+142/724821/campos_512_v4
+142/724840/campos_512_v4
+142/724843/campos_512_v4
+142/724845/campos_512_v4
+142/724847/campos_512_v4
+142/724854/campos_512_v4
+142/724865/campos_512_v4
+142/724876/campos_512_v4
+142/724883/campos_512_v4
+142/724889/campos_512_v4
+142/724890/campos_512_v4
+142/724898/campos_512_v4
+142/724901/campos_512_v4
+142/724908/campos_512_v4
+142/724912/campos_512_v4
+142/724918/campos_512_v4
+142/724925/campos_512_v4
+142/724927/campos_512_v4
+142/724930/campos_512_v4
+142/724934/campos_512_v4
+142/724954/campos_512_v4
+142/724955/campos_512_v4
+142/724965/campos_512_v4
+142/724968/campos_512_v4
+142/724990/campos_512_v4
+142/724995/campos_512_v4
+142/724998/campos_512_v4
+143/725011/campos_512_v4
+143/725022/campos_512_v4
+143/725038/campos_512_v4
+143/725056/campos_512_v4
+143/725058/campos_512_v4
+143/725061/campos_512_v4
+143/725062/campos_512_v4
+143/725063/campos_512_v4
+143/725072/campos_512_v4
+143/725076/campos_512_v4
+143/725088/campos_512_v4
+143/725090/campos_512_v4
+143/725094/campos_512_v4
+143/725111/campos_512_v4
+143/725121/campos_512_v4
+143/725134/campos_512_v4
+143/725145/campos_512_v4
+143/725147/campos_512_v4
+143/725161/campos_512_v4
+143/725164/campos_512_v4
+143/725166/campos_512_v4
+143/725172/campos_512_v4
+143/725182/campos_512_v4
+143/725187/campos_512_v4
+143/725191/campos_512_v4
+143/725195/campos_512_v4
+143/725207/campos_512_v4
+143/725215/campos_512_v4
+143/725216/campos_512_v4
+143/725218/campos_512_v4
+143/725223/campos_512_v4
+143/725225/campos_512_v4
+143/725244/campos_512_v4
+143/725249/campos_512_v4
+143/725267/campos_512_v4
+143/725273/campos_512_v4
+143/725276/campos_512_v4
+143/725281/campos_512_v4
+143/725287/campos_512_v4
+143/725289/campos_512_v4
+143/725294/campos_512_v4
+143/725315/campos_512_v4
+143/725319/campos_512_v4
+143/725320/campos_512_v4
+143/725326/campos_512_v4
+143/725328/campos_512_v4
+143/725333/campos_512_v4
+143/725334/campos_512_v4
+143/725337/campos_512_v4
+143/725343/campos_512_v4
+143/725353/campos_512_v4
+143/725359/campos_512_v4
+143/725369/campos_512_v4
+143/725374/campos_512_v4
+143/725375/campos_512_v4
+143/725382/campos_512_v4
+143/725385/campos_512_v4
+143/725391/campos_512_v4
+143/725402/campos_512_v4
+143/725412/campos_512_v4
+143/725417/campos_512_v4
+143/725421/campos_512_v4
+143/725422/campos_512_v4
+143/725450/campos_512_v4
+143/725484/campos_512_v4
+143/725516/campos_512_v4
+143/725523/campos_512_v4
+143/725528/campos_512_v4
+143/725547/campos_512_v4
+143/725555/campos_512_v4
+143/725559/campos_512_v4
+143/725568/campos_512_v4
+143/725585/campos_512_v4
+143/725587/campos_512_v4
+143/725593/campos_512_v4
+143/725604/campos_512_v4
+143/725607/campos_512_v4
+143/725608/campos_512_v4
+143/725638/campos_512_v4
+143/725645/campos_512_v4
+143/725646/campos_512_v4
+143/725647/campos_512_v4
+143/725649/campos_512_v4
+143/725656/campos_512_v4
+143/725669/campos_512_v4
+143/725674/campos_512_v4
+143/725696/campos_512_v4
+143/725704/campos_512_v4
+143/725705/campos_512_v4
+143/725706/campos_512_v4
+143/725710/campos_512_v4
+143/725714/campos_512_v4
+143/725720/campos_512_v4
+143/725725/campos_512_v4
+143/725726/campos_512_v4
+143/725728/campos_512_v4
+143/725734/campos_512_v4
+143/725765/campos_512_v4
+143/725766/campos_512_v4
+143/725777/campos_512_v4
+143/725780/campos_512_v4
+143/725783/campos_512_v4
+143/725797/campos_512_v4
+143/725803/campos_512_v4
+143/725829/campos_512_v4
+143/725847/campos_512_v4
+143/725857/campos_512_v4
+143/725859/campos_512_v4
+143/725873/campos_512_v4
+143/725874/campos_512_v4
+143/725885/campos_512_v4
+143/725887/campos_512_v4
+143/725889/campos_512_v4
+143/725903/campos_512_v4
+143/725911/campos_512_v4
+143/725912/campos_512_v4
+143/725916/campos_512_v4
+143/725933/campos_512_v4
+143/725954/campos_512_v4
+143/725956/campos_512_v4
+143/725965/campos_512_v4
+143/725969/campos_512_v4
+143/725970/campos_512_v4
+143/725977/campos_512_v4
+143/725984/campos_512_v4
+143/725990/campos_512_v4
+143/726020/campos_512_v4
+143/726022/campos_512_v4
+143/726026/campos_512_v4
+143/726034/campos_512_v4
+143/726037/campos_512_v4
+143/726042/campos_512_v4
+143/726049/campos_512_v4
+143/726051/campos_512_v4
+143/726059/campos_512_v4
+143/726062/campos_512_v4
+143/726068/campos_512_v4
+143/726074/campos_512_v4
+143/726075/campos_512_v4
+143/726076/campos_512_v4
+143/726081/campos_512_v4
+143/726082/campos_512_v4
+143/726084/campos_512_v4
+143/726105/campos_512_v4
+143/726106/campos_512_v4
+143/726108/campos_512_v4
+143/726136/campos_512_v4
+143/726138/campos_512_v4
+143/726145/campos_512_v4
+143/726170/campos_512_v4
+143/726182/campos_512_v4
+143/726186/campos_512_v4
+143/726192/campos_512_v4
+143/726200/campos_512_v4
+143/726204/campos_512_v4
+143/726211/campos_512_v4
+143/726220/campos_512_v4
+143/726221/campos_512_v4
+143/726228/campos_512_v4
+143/726237/campos_512_v4
+143/726240/campos_512_v4
+143/726241/campos_512_v4
+143/726247/campos_512_v4
+143/726255/campos_512_v4
+143/726257/campos_512_v4
+143/726279/campos_512_v4
+143/726288/campos_512_v4
+143/726294/campos_512_v4
+143/726298/campos_512_v4
+143/726305/campos_512_v4
+143/726318/campos_512_v4
+143/726323/campos_512_v4
+143/726324/campos_512_v4
+143/726336/campos_512_v4
+143/726338/campos_512_v4
+143/726343/campos_512_v4
+143/726368/campos_512_v4
+143/726373/campos_512_v4
+143/726374/campos_512_v4
+143/726381/campos_512_v4
+143/726386/campos_512_v4
+143/726388/campos_512_v4
+143/726398/campos_512_v4
+143/726425/campos_512_v4
+143/726428/campos_512_v4
+143/726432/campos_512_v4
+143/726444/campos_512_v4
+143/726447/campos_512_v4
+143/726454/campos_512_v4
+143/726463/campos_512_v4
+143/726464/campos_512_v4
+143/726469/campos_512_v4
+143/726473/campos_512_v4
+143/726478/campos_512_v4
+143/726486/campos_512_v4
+143/726490/campos_512_v4
+143/726492/campos_512_v4
+143/726505/campos_512_v4
+143/726518/campos_512_v4
+143/726526/campos_512_v4
+143/726530/campos_512_v4
+143/726534/campos_512_v4
+143/726539/campos_512_v4
+143/726540/campos_512_v4
+143/726551/campos_512_v4
+143/726558/campos_512_v4
+143/726569/campos_512_v4
+143/726576/campos_512_v4
+143/726610/campos_512_v4
+143/726611/campos_512_v4
+143/726614/campos_512_v4
+143/726621/campos_512_v4
+143/726625/campos_512_v4
+143/726632/campos_512_v4
+143/726646/campos_512_v4
+143/726662/campos_512_v4
+143/726674/campos_512_v4
+143/726680/campos_512_v4
+143/726682/campos_512_v4
+143/726691/campos_512_v4
+143/726694/campos_512_v4
+143/726695/campos_512_v4
+143/726698/campos_512_v4
+143/726701/campos_512_v4
+143/726708/campos_512_v4
+143/726711/campos_512_v4
+143/726736/campos_512_v4
+143/726739/campos_512_v4
+143/726747/campos_512_v4
+143/726749/campos_512_v4
+143/726768/campos_512_v4
+143/726791/campos_512_v4
+143/726818/campos_512_v4
+143/726827/campos_512_v4
+143/726831/campos_512_v4
+143/726838/campos_512_v4
+143/726846/campos_512_v4
+143/726847/campos_512_v4
+143/726851/campos_512_v4
+143/726862/campos_512_v4
+143/726866/campos_512_v4
+143/726883/campos_512_v4
+143/726892/campos_512_v4
+143/726903/campos_512_v4
+143/726914/campos_512_v4
+143/726924/campos_512_v4
+143/726932/campos_512_v4
+143/726954/campos_512_v4
+143/726960/campos_512_v4
+143/726976/campos_512_v4
+143/726979/campos_512_v4
+143/726981/campos_512_v4
+143/726992/campos_512_v4
+143/726993/campos_512_v4
+143/727000/campos_512_v4
+143/727007/campos_512_v4
+143/727018/campos_512_v4
+143/727027/campos_512_v4
+143/727030/campos_512_v4
+143/727065/campos_512_v4
+143/727070/campos_512_v4
+143/727071/campos_512_v4
+143/727076/campos_512_v4
+143/727080/campos_512_v4
+143/727084/campos_512_v4
+143/727092/campos_512_v4
+143/727096/campos_512_v4
+143/727101/campos_512_v4
+143/727111/campos_512_v4
+143/727116/campos_512_v4
+143/727126/campos_512_v4
+143/727144/campos_512_v4
+143/727154/campos_512_v4
+143/727162/campos_512_v4
+143/727163/campos_512_v4
+143/727171/campos_512_v4
+143/727173/campos_512_v4
+143/727184/campos_512_v4
+143/727187/campos_512_v4
+143/727214/campos_512_v4
+143/727222/campos_512_v4
+143/727230/campos_512_v4
+143/727234/campos_512_v4
+143/727238/campos_512_v4
+143/727251/campos_512_v4
+143/727256/campos_512_v4
+143/727257/campos_512_v4
+143/727262/campos_512_v4
+143/727276/campos_512_v4
+143/727280/campos_512_v4
+143/727283/campos_512_v4
+143/727286/campos_512_v4
+143/727296/campos_512_v4
+143/727298/campos_512_v4
+143/727304/campos_512_v4
+143/727308/campos_512_v4
+143/727313/campos_512_v4
+143/727318/campos_512_v4
+143/727319/campos_512_v4
+143/727328/campos_512_v4
+143/727344/campos_512_v4
+143/727368/campos_512_v4
+143/727382/campos_512_v4
+143/727388/campos_512_v4
+143/727390/campos_512_v4
+143/727395/campos_512_v4
+143/727398/campos_512_v4
+143/727403/campos_512_v4
+143/727409/campos_512_v4
+143/727413/campos_512_v4
+143/727433/campos_512_v4
+143/727435/campos_512_v4
+143/727447/campos_512_v4
+143/727459/campos_512_v4
+143/727462/campos_512_v4
+143/727463/campos_512_v4
+143/727467/campos_512_v4
+143/727470/campos_512_v4
+143/727481/campos_512_v4
+143/727486/campos_512_v4
+143/727504/campos_512_v4
+143/727510/campos_512_v4
+143/727513/campos_512_v4
+143/727517/campos_512_v4
+143/727536/campos_512_v4
+143/727538/campos_512_v4
+143/727559/campos_512_v4
+143/727560/campos_512_v4
+143/727575/campos_512_v4
+143/727580/campos_512_v4
+143/727593/campos_512_v4
+143/727595/campos_512_v4
+143/727612/campos_512_v4
+143/727613/campos_512_v4
+143/727618/campos_512_v4
+143/727622/campos_512_v4
+143/727626/campos_512_v4
+143/727630/campos_512_v4
+143/727635/campos_512_v4
+143/727637/campos_512_v4
+143/727638/campos_512_v4
+143/727642/campos_512_v4
+143/727644/campos_512_v4
+143/727649/campos_512_v4
+143/727654/campos_512_v4
+143/727678/campos_512_v4
+143/727685/campos_512_v4
+143/727688/campos_512_v4
+143/727693/campos_512_v4
+143/727697/campos_512_v4
+143/727698/campos_512_v4
+143/727699/campos_512_v4
+143/727709/campos_512_v4
+143/727714/campos_512_v4
+143/727722/campos_512_v4
+143/727731/campos_512_v4
+143/727739/campos_512_v4
+143/727746/campos_512_v4
+143/727747/campos_512_v4
+143/727753/campos_512_v4
+143/727757/campos_512_v4
+143/727759/campos_512_v4
+143/727767/campos_512_v4
+143/727769/campos_512_v4
+143/727771/campos_512_v4
+143/727782/campos_512_v4
+143/727784/campos_512_v4
+143/727788/campos_512_v4
+143/727790/campos_512_v4
+143/727797/campos_512_v4
+143/727802/campos_512_v4
+143/727807/campos_512_v4
+143/727808/campos_512_v4
+143/727810/campos_512_v4
+143/727815/campos_512_v4
+143/727822/campos_512_v4
+143/727829/campos_512_v4
+143/727839/campos_512_v4
+143/727849/campos_512_v4
+143/727850/campos_512_v4
+143/727866/campos_512_v4
+143/727874/campos_512_v4
+143/727881/campos_512_v4
+143/727883/campos_512_v4
+143/727903/campos_512_v4
+143/727911/campos_512_v4
+143/727920/campos_512_v4
+143/727945/campos_512_v4
+143/727951/campos_512_v4
+143/727967/campos_512_v4
+143/727984/campos_512_v4
+143/727986/campos_512_v4
+143/728014/campos_512_v4
+143/728026/campos_512_v4
+143/728050/campos_512_v4
+143/728051/campos_512_v4
+143/728074/campos_512_v4
+143/728076/campos_512_v4
+143/728087/campos_512_v4
+143/728094/campos_512_v4
+143/728110/campos_512_v4
+143/728117/campos_512_v4
+143/728118/campos_512_v4
+143/728135/campos_512_v4
+143/728137/campos_512_v4
+143/728145/campos_512_v4
+143/728164/campos_512_v4
+143/728165/campos_512_v4
+143/728178/campos_512_v4
+143/728179/campos_512_v4
+143/728184/campos_512_v4
+143/728188/campos_512_v4
+143/728190/campos_512_v4
+143/728193/campos_512_v4
+143/728195/campos_512_v4
+143/728198/campos_512_v4
+143/728210/campos_512_v4
+143/728219/campos_512_v4
+143/728237/campos_512_v4
+143/728240/campos_512_v4
+143/728245/campos_512_v4
+143/728248/campos_512_v4
+143/728250/campos_512_v4
+143/728253/campos_512_v4
+143/728254/campos_512_v4
+143/728275/campos_512_v4
+143/728284/campos_512_v4
+143/728298/campos_512_v4
+143/728306/campos_512_v4
+143/728308/campos_512_v4
+143/728319/campos_512_v4
+143/728339/campos_512_v4
+143/728341/campos_512_v4
+143/728342/campos_512_v4
+143/728349/campos_512_v4
+143/728351/campos_512_v4
+143/728363/campos_512_v4
+143/728380/campos_512_v4
+143/728386/campos_512_v4
+143/728394/campos_512_v4
+143/728396/campos_512_v4
+143/728409/campos_512_v4
+143/728411/campos_512_v4
+143/728413/campos_512_v4
+143/728420/campos_512_v4
+143/728423/campos_512_v4
+143/728425/campos_512_v4
+143/728433/campos_512_v4
+143/728437/campos_512_v4
+143/728449/campos_512_v4
+143/728452/campos_512_v4
+143/728455/campos_512_v4
+143/728478/campos_512_v4
+143/728515/campos_512_v4
+143/728521/campos_512_v4
+143/728528/campos_512_v4
+143/728531/campos_512_v4
+143/728533/campos_512_v4
+143/728537/campos_512_v4
+143/728563/campos_512_v4
+143/728566/campos_512_v4
+143/728582/campos_512_v4
+143/728586/campos_512_v4
+143/728594/campos_512_v4
+143/728598/campos_512_v4
+143/728599/campos_512_v4
+143/728604/campos_512_v4
+143/728616/campos_512_v4
+143/728619/campos_512_v4
+143/728621/campos_512_v4
+143/728622/campos_512_v4
+143/728628/campos_512_v4
+143/728629/campos_512_v4
+143/728633/campos_512_v4
+143/728642/campos_512_v4
+143/728659/campos_512_v4
+143/728668/campos_512_v4
+143/728673/campos_512_v4
+143/728678/campos_512_v4
+143/728699/campos_512_v4
+143/728701/campos_512_v4
+143/728708/campos_512_v4
+143/728723/campos_512_v4
+143/728725/campos_512_v4
+143/728745/campos_512_v4
+143/728756/campos_512_v4
+143/728763/campos_512_v4
+143/728770/campos_512_v4
+143/728774/campos_512_v4
+143/728776/campos_512_v4
+143/728778/campos_512_v4
+143/728784/campos_512_v4
+143/728802/campos_512_v4
+143/728816/campos_512_v4
+143/728835/campos_512_v4
+143/728840/campos_512_v4
+143/728843/campos_512_v4
+143/728847/campos_512_v4
+143/728858/campos_512_v4
+143/728864/campos_512_v4
+143/728866/campos_512_v4
+143/728872/campos_512_v4
+143/728881/campos_512_v4
+143/728884/campos_512_v4
+143/728889/campos_512_v4
+143/728893/campos_512_v4
+143/728898/campos_512_v4
+143/728901/campos_512_v4
+143/728919/campos_512_v4
+143/728926/campos_512_v4
+143/728935/campos_512_v4
+143/728942/campos_512_v4
+143/728962/campos_512_v4
+143/728963/campos_512_v4
+143/728964/campos_512_v4
+143/728965/campos_512_v4
+143/728966/campos_512_v4
+143/728981/campos_512_v4
+143/728994/campos_512_v4
+143/729000/campos_512_v4
+143/729022/campos_512_v4
+143/729025/campos_512_v4
+143/729026/campos_512_v4
+143/729035/campos_512_v4
+143/729047/campos_512_v4
+143/729054/campos_512_v4
+143/729062/campos_512_v4
+143/729063/campos_512_v4
+143/729067/campos_512_v4
+143/729069/campos_512_v4
+143/729072/campos_512_v4
+143/729081/campos_512_v4
+143/729085/campos_512_v4
+143/729096/campos_512_v4
+143/729104/campos_512_v4
+143/729105/campos_512_v4
+143/729109/campos_512_v4
+143/729111/campos_512_v4
+143/729116/campos_512_v4
+143/729123/campos_512_v4
+143/729126/campos_512_v4
+143/729129/campos_512_v4
+143/729141/campos_512_v4
+143/729150/campos_512_v4
+143/729155/campos_512_v4
+143/729163/campos_512_v4
+143/729165/campos_512_v4
+143/729167/campos_512_v4
+143/729173/campos_512_v4
+143/729186/campos_512_v4
+143/729199/campos_512_v4
+143/729205/campos_512_v4
+143/729208/campos_512_v4
+143/729223/campos_512_v4
+143/729226/campos_512_v4
+143/729232/campos_512_v4
+143/729253/campos_512_v4
+143/729264/campos_512_v4
+143/729267/campos_512_v4
+143/729268/campos_512_v4
+143/729275/campos_512_v4
+143/729309/campos_512_v4
+143/729312/campos_512_v4
+143/729318/campos_512_v4
+143/729340/campos_512_v4
+143/729343/campos_512_v4
+143/729365/campos_512_v4
+143/729377/campos_512_v4
+143/729378/campos_512_v4
+143/729391/campos_512_v4
+143/729392/campos_512_v4
+143/729416/campos_512_v4
+143/729417/campos_512_v4
+143/729425/campos_512_v4
+143/729432/campos_512_v4
+143/729436/campos_512_v4
+143/729441/campos_512_v4
+143/729473/campos_512_v4
+143/729487/campos_512_v4
+143/729503/campos_512_v4
+143/729521/campos_512_v4
+143/729555/campos_512_v4
+143/729556/campos_512_v4
+143/729567/campos_512_v4
+143/729572/campos_512_v4
+143/729574/campos_512_v4
+143/729576/campos_512_v4
+143/729577/campos_512_v4
+143/729582/campos_512_v4
+143/729586/campos_512_v4
+143/729589/campos_512_v4
+143/729590/campos_512_v4
+143/729599/campos_512_v4
+143/729612/campos_512_v4
+143/729617/campos_512_v4
+143/729619/campos_512_v4
+143/729638/campos_512_v4
+143/729644/campos_512_v4
+143/729652/campos_512_v4
+143/729653/campos_512_v4
+143/729654/campos_512_v4
+143/729668/campos_512_v4
+143/729673/campos_512_v4
+143/729703/campos_512_v4
+143/729709/campos_512_v4
+143/729715/campos_512_v4
+143/729727/campos_512_v4
+143/729745/campos_512_v4
+143/729749/campos_512_v4
+143/729752/campos_512_v4
+143/729753/campos_512_v4
+143/729755/campos_512_v4
+143/729758/campos_512_v4
+143/729762/campos_512_v4
+143/729770/campos_512_v4
+143/729776/campos_512_v4
+143/729777/campos_512_v4
+143/729779/campos_512_v4
+143/729780/campos_512_v4
+143/729782/campos_512_v4
+143/729788/campos_512_v4
+143/729799/campos_512_v4
+143/729801/campos_512_v4
+143/729809/campos_512_v4
+143/729816/campos_512_v4
+143/729825/campos_512_v4
+143/729839/campos_512_v4
+143/729845/campos_512_v4
+143/729855/campos_512_v4
+143/729889/campos_512_v4
+143/729894/campos_512_v4
+143/729900/campos_512_v4
+143/729905/campos_512_v4
+143/729906/campos_512_v4
+143/729914/campos_512_v4
+143/729921/campos_512_v4
+143/729934/campos_512_v4
+143/729937/campos_512_v4
+143/729940/campos_512_v4
+143/729943/campos_512_v4
+143/729944/campos_512_v4
+143/729945/campos_512_v4
+143/729948/campos_512_v4
+143/729961/campos_512_v4
+143/729989/campos_512_v4
+143/729993/campos_512_v4
+143/729994/campos_512_v4
+143/729998/campos_512_v4
+144/730003/campos_512_v4
+144/730008/campos_512_v4
+144/730009/campos_512_v4
+144/730020/campos_512_v4
+144/730028/campos_512_v4
+144/730034/campos_512_v4
+144/730058/campos_512_v4
+144/730073/campos_512_v4
+144/730078/campos_512_v4
+144/730092/campos_512_v4
+144/730101/campos_512_v4
+144/730107/campos_512_v4
+144/730115/campos_512_v4
+144/730116/campos_512_v4
+144/730119/campos_512_v4
+144/730131/campos_512_v4
+144/730133/campos_512_v4
+144/730136/campos_512_v4
+144/730138/campos_512_v4
+144/730141/campos_512_v4
+144/730146/campos_512_v4
+144/730160/campos_512_v4
+144/730172/campos_512_v4
+144/730189/campos_512_v4
+144/730193/campos_512_v4
+144/730196/campos_512_v4
+144/730215/campos_512_v4
+144/730221/campos_512_v4
+144/730224/campos_512_v4
+144/730225/campos_512_v4
+144/730253/campos_512_v4
+144/730266/campos_512_v4
+144/730269/campos_512_v4
+144/730277/campos_512_v4
+144/730280/campos_512_v4
+144/730285/campos_512_v4
+144/730289/campos_512_v4
+144/730302/campos_512_v4
+144/730313/campos_512_v4
+144/730315/campos_512_v4
+144/730326/campos_512_v4
+144/730330/campos_512_v4
+144/730340/campos_512_v4
+144/730341/campos_512_v4
+144/730342/campos_512_v4
+144/730365/campos_512_v4
+144/730368/campos_512_v4
+144/730377/campos_512_v4
+144/730434/campos_512_v4
+144/730440/campos_512_v4
+144/730454/campos_512_v4
+144/730456/campos_512_v4
+144/730469/campos_512_v4
+144/730493/campos_512_v4
+144/730494/campos_512_v4
+144/730515/campos_512_v4
+144/730530/campos_512_v4
+144/730552/campos_512_v4
+144/730570/campos_512_v4
+144/730599/campos_512_v4
+144/730604/campos_512_v4
+144/730606/campos_512_v4
+144/730607/campos_512_v4
+144/730630/campos_512_v4
+144/730632/campos_512_v4
+144/730643/campos_512_v4
+144/730652/campos_512_v4
+144/730655/campos_512_v4
+144/730658/campos_512_v4
+144/730659/campos_512_v4
+144/730666/campos_512_v4
+144/730683/campos_512_v4
+144/730686/campos_512_v4
+144/730699/campos_512_v4
+144/730703/campos_512_v4
+144/730722/campos_512_v4
+144/730725/campos_512_v4
+144/730727/campos_512_v4
+144/730736/campos_512_v4
+144/730747/campos_512_v4
+144/730756/campos_512_v4
+144/730758/campos_512_v4
+144/730762/campos_512_v4
+144/730783/campos_512_v4
+144/730785/campos_512_v4
+144/730807/campos_512_v4
+144/730809/campos_512_v4
+144/730814/campos_512_v4
+144/730816/campos_512_v4
+144/730843/campos_512_v4
+144/730844/campos_512_v4
+144/730845/campos_512_v4
+144/730846/campos_512_v4
+144/730857/campos_512_v4
+144/730860/campos_512_v4
+144/730861/campos_512_v4
+144/730884/campos_512_v4
+144/730889/campos_512_v4
+144/730891/campos_512_v4
+144/730894/campos_512_v4
+144/730903/campos_512_v4
+144/730916/campos_512_v4
+144/730918/campos_512_v4
+144/730923/campos_512_v4
+144/730928/campos_512_v4
+144/730943/campos_512_v4
+144/730945/campos_512_v4
+144/730955/campos_512_v4
+144/730957/campos_512_v4
+144/730962/campos_512_v4
+144/730963/campos_512_v4
+144/730983/campos_512_v4
+144/730993/campos_512_v4
+144/730998/campos_512_v4
+144/731000/campos_512_v4
+144/731005/campos_512_v4
+144/731023/campos_512_v4
+144/731032/campos_512_v4
+144/731041/campos_512_v4
+144/731073/campos_512_v4
+144/731079/campos_512_v4
+144/731086/campos_512_v4
+144/731088/campos_512_v4
+144/731090/campos_512_v4
+144/731092/campos_512_v4
+144/731097/campos_512_v4
+144/731099/campos_512_v4
+144/731102/campos_512_v4
+144/731119/campos_512_v4
+144/731141/campos_512_v4
+144/731150/campos_512_v4
+144/731157/campos_512_v4
+144/731160/campos_512_v4
+144/731165/campos_512_v4
+144/731167/campos_512_v4
+144/731172/campos_512_v4
+144/731177/campos_512_v4
+144/731184/campos_512_v4
+144/731187/campos_512_v4
+144/731201/campos_512_v4
+144/731206/campos_512_v4
+144/731211/campos_512_v4
+144/731215/campos_512_v4
+144/731216/campos_512_v4
+144/731225/campos_512_v4
+144/731231/campos_512_v4
+144/731235/campos_512_v4
+144/731241/campos_512_v4
+144/731252/campos_512_v4
+144/731280/campos_512_v4
+144/731302/campos_512_v4
+144/731311/campos_512_v4
+144/731315/campos_512_v4
+144/731316/campos_512_v4
+144/731337/campos_512_v4
+144/731340/campos_512_v4
+144/731347/campos_512_v4
+144/731354/campos_512_v4
+144/731355/campos_512_v4
+144/731358/campos_512_v4
+144/731361/campos_512_v4
+144/731366/campos_512_v4
+144/731368/campos_512_v4
+144/731372/campos_512_v4
+144/731399/campos_512_v4
+144/731425/campos_512_v4
+144/731427/campos_512_v4
+144/731430/campos_512_v4
+144/731431/campos_512_v4
+144/731433/campos_512_v4
+144/731434/campos_512_v4
+144/731449/campos_512_v4
+144/731458/campos_512_v4
+144/731460/campos_512_v4
+144/731472/campos_512_v4
+144/731480/campos_512_v4
+144/731490/campos_512_v4
+144/731511/campos_512_v4
+144/731512/campos_512_v4
+144/731515/campos_512_v4
+144/731525/campos_512_v4
+144/731534/campos_512_v4
+144/731537/campos_512_v4
+144/731543/campos_512_v4
+144/731545/campos_512_v4
+144/731549/campos_512_v4
+144/731559/campos_512_v4
+144/731568/campos_512_v4
+144/731572/campos_512_v4
+144/731579/campos_512_v4
+144/731583/campos_512_v4
+144/731620/campos_512_v4
+144/731621/campos_512_v4
+144/731626/campos_512_v4
+144/731629/campos_512_v4
+144/731632/campos_512_v4
+144/731635/campos_512_v4
+144/731636/campos_512_v4
+144/731639/campos_512_v4
+144/731657/campos_512_v4
+144/731663/campos_512_v4
+144/731664/campos_512_v4
+144/731669/campos_512_v4
+144/731676/campos_512_v4
+144/731682/campos_512_v4
+144/731684/campos_512_v4
+144/731690/campos_512_v4
+144/731691/campos_512_v4
+144/731692/campos_512_v4
+144/731700/campos_512_v4
+144/731701/campos_512_v4
+144/731705/campos_512_v4
+144/731716/campos_512_v4
+144/731718/campos_512_v4
+144/731724/campos_512_v4
+144/731725/campos_512_v4
+144/731727/campos_512_v4
+144/731735/campos_512_v4
+144/731736/campos_512_v4
+144/731749/campos_512_v4
+144/731752/campos_512_v4
+144/731768/campos_512_v4
+144/731780/campos_512_v4
+144/731791/campos_512_v4
+144/731801/campos_512_v4
+144/731802/campos_512_v4
+144/731803/campos_512_v4
+144/731807/campos_512_v4
+144/731809/campos_512_v4
+144/731824/campos_512_v4
+144/731828/campos_512_v4
+144/731829/campos_512_v4
+144/731834/campos_512_v4
+144/731839/campos_512_v4
+144/731840/campos_512_v4
+144/731849/campos_512_v4
+144/731859/campos_512_v4
+144/731860/campos_512_v4
+144/731876/campos_512_v4
+144/731878/campos_512_v4
+144/731880/campos_512_v4
+144/731882/campos_512_v4
+144/731884/campos_512_v4
+144/731893/campos_512_v4
+144/731894/campos_512_v4
+144/731903/campos_512_v4
+144/731914/campos_512_v4
+144/731930/campos_512_v4
+144/731931/campos_512_v4
+144/731950/campos_512_v4
+144/731956/campos_512_v4
+144/731964/campos_512_v4
+144/731972/campos_512_v4
+144/731982/campos_512_v4
+144/731983/campos_512_v4
+144/731984/campos_512_v4
+144/731989/campos_512_v4
+144/732011/campos_512_v4
+144/732013/campos_512_v4
+144/732018/campos_512_v4
+144/732038/campos_512_v4
+144/732040/campos_512_v4
+144/732042/campos_512_v4
+144/732045/campos_512_v4
+144/732050/campos_512_v4
+144/732054/campos_512_v4
+144/732055/campos_512_v4
+144/732061/campos_512_v4
+144/732063/campos_512_v4
+144/732075/campos_512_v4
+144/732083/campos_512_v4
+144/732085/campos_512_v4
+144/732108/campos_512_v4
+144/732109/campos_512_v4
+144/732110/campos_512_v4
+144/732138/campos_512_v4
+144/732152/campos_512_v4
+144/732156/campos_512_v4
+144/732158/campos_512_v4
+144/732171/campos_512_v4
+144/732177/campos_512_v4
+144/732186/campos_512_v4
+144/732191/campos_512_v4
+144/732196/campos_512_v4
+144/732198/campos_512_v4
+144/732206/campos_512_v4
+144/732211/campos_512_v4
+144/732214/campos_512_v4
+144/732217/campos_512_v4
+144/732223/campos_512_v4
+144/732236/campos_512_v4
+144/732239/campos_512_v4
+144/732267/campos_512_v4
+144/732269/campos_512_v4
+144/732276/campos_512_v4
+144/732292/campos_512_v4
+144/732302/campos_512_v4
+144/732305/campos_512_v4
+144/732309/campos_512_v4
+144/732312/campos_512_v4
+144/732335/campos_512_v4
+144/732345/campos_512_v4
+144/732358/campos_512_v4
+144/732363/campos_512_v4
+144/732365/campos_512_v4
+144/732371/campos_512_v4
+144/732373/campos_512_v4
+144/732380/campos_512_v4
+144/732395/campos_512_v4
+144/732398/campos_512_v4
+144/732400/campos_512_v4
+144/732401/campos_512_v4
+144/732447/campos_512_v4
+144/732449/campos_512_v4
+144/732452/campos_512_v4
+144/732456/campos_512_v4
+144/732457/campos_512_v4
+144/732474/campos_512_v4
+144/732476/campos_512_v4
+144/732486/campos_512_v4
+144/732493/campos_512_v4
+144/732494/campos_512_v4
+144/732496/campos_512_v4
+144/732497/campos_512_v4
+144/732501/campos_512_v4
+144/732513/campos_512_v4
+144/732522/campos_512_v4
+144/732525/campos_512_v4
+144/732536/campos_512_v4
+144/732537/campos_512_v4
+144/732542/campos_512_v4
+144/732543/campos_512_v4
+144/732550/campos_512_v4
+144/732553/campos_512_v4
+144/732562/campos_512_v4
+144/732567/campos_512_v4
+144/732573/campos_512_v4
+144/732583/campos_512_v4
+144/732587/campos_512_v4
+144/732588/campos_512_v4
+144/732596/campos_512_v4
+144/732597/campos_512_v4
+144/732612/campos_512_v4
+144/732613/campos_512_v4
+144/732626/campos_512_v4
+144/732643/campos_512_v4
+144/732647/campos_512_v4
+144/732650/campos_512_v4
+144/732651/campos_512_v4
+144/732667/campos_512_v4
+144/732673/campos_512_v4
+144/732675/campos_512_v4
+144/732676/campos_512_v4
+144/732678/campos_512_v4
+144/732692/campos_512_v4
+144/732698/campos_512_v4
+144/732701/campos_512_v4
+144/732703/campos_512_v4
+144/732721/campos_512_v4
+144/732722/campos_512_v4
+144/732725/campos_512_v4
+144/732727/campos_512_v4
+144/732740/campos_512_v4
+144/732746/campos_512_v4
+144/732748/campos_512_v4
+144/732755/campos_512_v4
+144/732757/campos_512_v4
+144/732759/campos_512_v4
+144/732760/campos_512_v4
+144/732781/campos_512_v4
+144/732798/campos_512_v4
+144/732805/campos_512_v4
+144/732808/campos_512_v4
+144/732822/campos_512_v4
+144/732827/campos_512_v4
+144/732836/campos_512_v4
+144/732841/campos_512_v4
+144/732851/campos_512_v4
+144/732865/campos_512_v4
+144/732866/campos_512_v4
+144/732869/campos_512_v4
+144/732870/campos_512_v4
+144/732871/campos_512_v4
+144/732874/campos_512_v4
+144/732879/campos_512_v4
+144/732885/campos_512_v4
+144/732893/campos_512_v4
+144/732896/campos_512_v4
+144/732897/campos_512_v4
+144/732902/campos_512_v4
+144/732909/campos_512_v4
+144/732911/campos_512_v4
+144/732918/campos_512_v4
+144/732930/campos_512_v4
+144/732947/campos_512_v4
+144/732951/campos_512_v4
+144/732963/campos_512_v4
+144/732976/campos_512_v4
+144/732980/campos_512_v4
+144/732992/campos_512_v4
+144/732998/campos_512_v4
+144/733003/campos_512_v4
+144/733008/campos_512_v4
+144/733009/campos_512_v4
+144/733010/campos_512_v4
+144/733020/campos_512_v4
+144/733032/campos_512_v4
+144/733036/campos_512_v4
+144/733051/campos_512_v4
+144/733054/campos_512_v4
+144/733056/campos_512_v4
+144/733059/campos_512_v4
+144/733060/campos_512_v4
+144/733061/campos_512_v4
+144/733080/campos_512_v4
+144/733090/campos_512_v4
+144/733093/campos_512_v4
+144/733095/campos_512_v4
+144/733096/campos_512_v4
+144/733102/campos_512_v4
+144/733106/campos_512_v4
+144/733112/campos_512_v4
+144/733116/campos_512_v4
+144/733120/campos_512_v4
+144/733121/campos_512_v4
+144/733134/campos_512_v4
+144/733149/campos_512_v4
+144/733153/campos_512_v4
+144/733155/campos_512_v4
+144/733156/campos_512_v4
+144/733159/campos_512_v4
+144/733179/campos_512_v4
+144/733183/campos_512_v4
+144/733184/campos_512_v4
+144/733185/campos_512_v4
+144/733192/campos_512_v4
+144/733197/campos_512_v4
+144/733202/campos_512_v4
+144/733209/campos_512_v4
+144/733215/campos_512_v4
+144/733224/campos_512_v4
+144/733249/campos_512_v4
+144/733253/campos_512_v4
+144/733254/campos_512_v4
+144/733281/campos_512_v4
+144/733285/campos_512_v4
+144/733290/campos_512_v4
+144/733298/campos_512_v4
+144/733306/campos_512_v4
+144/733307/campos_512_v4
+144/733310/campos_512_v4
+144/733314/campos_512_v4
+144/733315/campos_512_v4
+144/733319/campos_512_v4
+144/733343/campos_512_v4
+144/733345/campos_512_v4
+144/733347/campos_512_v4
+144/733354/campos_512_v4
+144/733368/campos_512_v4
+144/733375/campos_512_v4
+144/733376/campos_512_v4
+144/733378/campos_512_v4
+144/733380/campos_512_v4
+144/733381/campos_512_v4
+144/733387/campos_512_v4
+144/733399/campos_512_v4
+144/733403/campos_512_v4
+144/733421/campos_512_v4
+144/733434/campos_512_v4
+144/733439/campos_512_v4
+144/733461/campos_512_v4
+144/733468/campos_512_v4
+144/733471/campos_512_v4
+144/733486/campos_512_v4
+144/733492/campos_512_v4
+144/733517/campos_512_v4
+144/733526/campos_512_v4
+144/733538/campos_512_v4
+144/733543/campos_512_v4
+144/733546/campos_512_v4
+144/733556/campos_512_v4
+144/733575/campos_512_v4
+144/733586/campos_512_v4
+144/733587/campos_512_v4
+144/733588/campos_512_v4
+144/733599/campos_512_v4
+144/733605/campos_512_v4
+144/733636/campos_512_v4
+144/733640/campos_512_v4
+144/733642/campos_512_v4
+144/733643/campos_512_v4
+144/733655/campos_512_v4
+144/733656/campos_512_v4
+144/733660/campos_512_v4
+144/733670/campos_512_v4
+144/733672/campos_512_v4
+144/733677/campos_512_v4
+144/733678/campos_512_v4
+144/733691/campos_512_v4
+144/733697/campos_512_v4
+144/733700/campos_512_v4
+144/733707/campos_512_v4
+144/733712/campos_512_v4
+144/733713/campos_512_v4
+144/733721/campos_512_v4
+144/733722/campos_512_v4
+144/733743/campos_512_v4
+144/733747/campos_512_v4
+144/733751/campos_512_v4
+144/733756/campos_512_v4
+144/733767/campos_512_v4
+144/733779/campos_512_v4
+144/733783/campos_512_v4
+144/733784/campos_512_v4
+144/733785/campos_512_v4
+144/733788/campos_512_v4
+144/733798/campos_512_v4
+144/733805/campos_512_v4
+144/733817/campos_512_v4
+144/733818/campos_512_v4
+144/733823/campos_512_v4
+144/733844/campos_512_v4
+144/733846/campos_512_v4
+144/733848/campos_512_v4
+144/733863/campos_512_v4
+144/733865/campos_512_v4
+144/733870/campos_512_v4
+144/733875/campos_512_v4
+144/733878/campos_512_v4
+144/733891/campos_512_v4
+144/733897/campos_512_v4
+144/733899/campos_512_v4
+144/733908/campos_512_v4
+144/733912/campos_512_v4
+144/733915/campos_512_v4
+144/733931/campos_512_v4
+144/733932/campos_512_v4
+144/733937/campos_512_v4
+144/733938/campos_512_v4
+144/733942/campos_512_v4
+144/733943/campos_512_v4
+144/733948/campos_512_v4
+144/733962/campos_512_v4
+144/733979/campos_512_v4
+144/734008/campos_512_v4
+144/734010/campos_512_v4
+144/734016/campos_512_v4
+144/734018/campos_512_v4
+144/734034/campos_512_v4
+144/734046/campos_512_v4
+144/734054/campos_512_v4
+144/734071/campos_512_v4
+144/734087/campos_512_v4
+144/734109/campos_512_v4
+144/734111/campos_512_v4
+144/734113/campos_512_v4
+144/734136/campos_512_v4
+144/734139/campos_512_v4
+144/734150/campos_512_v4
+144/734162/campos_512_v4
+144/734191/campos_512_v4
+144/734205/campos_512_v4
+144/734207/campos_512_v4
+144/734208/campos_512_v4
+144/734211/campos_512_v4
+144/734215/campos_512_v4
+144/734226/campos_512_v4
+144/734240/campos_512_v4
+144/734252/campos_512_v4
+144/734257/campos_512_v4
+144/734263/campos_512_v4
+144/734266/campos_512_v4
+144/734269/campos_512_v4
+144/734280/campos_512_v4
+144/734286/campos_512_v4
+144/734287/campos_512_v4
+144/734293/campos_512_v4
+144/734297/campos_512_v4
+144/734304/campos_512_v4
+144/734306/campos_512_v4
+144/734309/campos_512_v4
+144/734313/campos_512_v4
+144/734315/campos_512_v4
+144/734318/campos_512_v4
+144/734334/campos_512_v4
+144/734336/campos_512_v4
+144/734339/campos_512_v4
+144/734340/campos_512_v4
+144/734350/campos_512_v4
+144/734358/campos_512_v4
+144/734364/campos_512_v4
+144/734368/campos_512_v4
+144/734373/campos_512_v4
+144/734377/campos_512_v4
+144/734380/campos_512_v4
+144/734390/campos_512_v4
+144/734392/campos_512_v4
+144/734394/campos_512_v4
+144/734421/campos_512_v4
+144/734432/campos_512_v4
+144/734455/campos_512_v4
+144/734481/campos_512_v4
+144/734486/campos_512_v4
+144/734503/campos_512_v4
+144/734517/campos_512_v4
+144/734519/campos_512_v4
+144/734526/campos_512_v4
+144/734551/campos_512_v4
+144/734556/campos_512_v4
+144/734563/campos_512_v4
+144/734575/campos_512_v4
+144/734587/campos_512_v4
+144/734590/campos_512_v4
+144/734592/campos_512_v4
+144/734596/campos_512_v4
+144/734610/campos_512_v4
+144/734630/campos_512_v4
+144/734641/campos_512_v4
+144/734647/campos_512_v4
+144/734650/campos_512_v4
+144/734653/campos_512_v4
+144/734657/campos_512_v4
+144/734661/campos_512_v4
+144/734671/campos_512_v4
+144/734681/campos_512_v4
+144/734686/campos_512_v4
+144/734720/campos_512_v4
+144/734733/campos_512_v4
+144/734740/campos_512_v4
+144/734752/campos_512_v4
+144/734753/campos_512_v4
+144/734758/campos_512_v4
+144/734761/campos_512_v4
+144/734762/campos_512_v4
+144/734778/campos_512_v4
+144/734779/campos_512_v4
+144/734789/campos_512_v4
+144/734795/campos_512_v4
+144/734800/campos_512_v4
+144/734803/campos_512_v4
+144/734808/campos_512_v4
+144/734816/campos_512_v4
+144/734819/campos_512_v4
+144/734830/campos_512_v4
+144/734837/campos_512_v4
+144/734841/campos_512_v4
+144/734848/campos_512_v4
+144/734865/campos_512_v4
+144/734867/campos_512_v4
+144/734871/campos_512_v4
+144/734884/campos_512_v4
+144/734889/campos_512_v4
+144/734897/campos_512_v4
+144/734901/campos_512_v4
+144/734906/campos_512_v4
+144/734914/campos_512_v4
+144/734938/campos_512_v4
+144/734948/campos_512_v4
+144/734955/campos_512_v4
+144/734970/campos_512_v4
+144/734973/campos_512_v4
+144/735000/campos_512_v4
+145/735005/campos_512_v4
+145/735007/campos_512_v4
+145/735008/campos_512_v4
+145/735010/campos_512_v4
+145/735011/campos_512_v4
+145/735025/campos_512_v4
+145/735039/campos_512_v4
+145/735041/campos_512_v4
+145/735045/campos_512_v4
+145/735046/campos_512_v4
+145/735058/campos_512_v4
+145/735059/campos_512_v4
+145/735062/campos_512_v4
+145/735066/campos_512_v4
+145/735070/campos_512_v4
+145/735084/campos_512_v4
+145/735091/campos_512_v4
+145/735095/campos_512_v4
+145/735096/campos_512_v4
+145/735098/campos_512_v4
+145/735099/campos_512_v4
+145/735102/campos_512_v4
+145/735105/campos_512_v4
+145/735108/campos_512_v4
+145/735109/campos_512_v4
+145/735111/campos_512_v4
+145/735119/campos_512_v4
+145/735120/campos_512_v4
+145/735123/campos_512_v4
+145/735132/campos_512_v4
+145/735140/campos_512_v4
+145/735154/campos_512_v4
+145/735158/campos_512_v4
+145/735186/campos_512_v4
+145/735195/campos_512_v4
+145/735196/campos_512_v4
+145/735200/campos_512_v4
+145/735214/campos_512_v4
+145/735222/campos_512_v4
+145/735225/campos_512_v4
+145/735231/campos_512_v4
+145/735236/campos_512_v4
+145/735237/campos_512_v4
+145/735238/campos_512_v4
+145/735241/campos_512_v4
+145/735258/campos_512_v4
+145/735263/campos_512_v4
+145/735277/campos_512_v4
+145/735300/campos_512_v4
+145/735304/campos_512_v4
+145/735310/campos_512_v4
+145/735314/campos_512_v4
+145/735333/campos_512_v4
+145/735336/campos_512_v4
+145/735341/campos_512_v4
+145/735344/campos_512_v4
+145/735361/campos_512_v4
+145/735369/campos_512_v4
+145/735373/campos_512_v4
+145/735376/campos_512_v4
+145/735377/campos_512_v4
+145/735379/campos_512_v4
+145/735385/campos_512_v4
+145/735389/campos_512_v4
+145/735394/campos_512_v4
+145/735409/campos_512_v4
+145/735410/campos_512_v4
+145/735411/campos_512_v4
+145/735416/campos_512_v4
+145/735417/campos_512_v4
+145/735419/campos_512_v4
+145/735434/campos_512_v4
+145/735446/campos_512_v4
+145/735448/campos_512_v4
+145/735456/campos_512_v4
+145/735462/campos_512_v4
+145/735463/campos_512_v4
+145/735479/campos_512_v4
+145/735483/campos_512_v4
+145/735492/campos_512_v4
+145/735494/campos_512_v4
+145/735503/campos_512_v4
+145/735505/campos_512_v4
+145/735506/campos_512_v4
+145/735518/campos_512_v4
+145/735528/campos_512_v4
+145/735537/campos_512_v4
+145/735543/campos_512_v4
+145/735546/campos_512_v4
+145/735547/campos_512_v4
+145/735550/campos_512_v4
+145/735553/campos_512_v4
+145/735560/campos_512_v4
+145/735570/campos_512_v4
+145/735572/campos_512_v4
+145/735575/campos_512_v4
+145/735576/campos_512_v4
+145/735578/campos_512_v4
+145/735590/campos_512_v4
+145/735591/campos_512_v4
+145/735600/campos_512_v4
+145/735604/campos_512_v4
+145/735612/campos_512_v4
+145/735621/campos_512_v4
+145/735627/campos_512_v4
+145/735631/campos_512_v4
+145/735635/campos_512_v4
+145/735649/campos_512_v4
+145/735662/campos_512_v4
+145/735686/campos_512_v4
+145/735690/campos_512_v4
+145/735694/campos_512_v4
+145/735705/campos_512_v4
+145/735709/campos_512_v4
+145/735713/campos_512_v4
+145/735716/campos_512_v4
+145/735722/campos_512_v4
+145/735724/campos_512_v4
+145/735738/campos_512_v4
+145/735752/campos_512_v4
+145/735753/campos_512_v4
+145/735755/campos_512_v4
+145/735758/campos_512_v4
+145/735762/campos_512_v4
+145/735772/campos_512_v4
+145/735786/campos_512_v4
+145/735798/campos_512_v4
+145/735803/campos_512_v4
+145/735814/campos_512_v4
+145/735826/campos_512_v4
+145/735830/campos_512_v4
+145/735831/campos_512_v4
+145/735840/campos_512_v4
+145/735853/campos_512_v4
+145/735856/campos_512_v4
+145/735858/campos_512_v4
+145/735860/campos_512_v4
+145/735863/campos_512_v4
+145/735870/campos_512_v4
+145/735871/campos_512_v4
+145/735877/campos_512_v4
+145/735882/campos_512_v4
+145/735886/campos_512_v4
+145/735887/campos_512_v4
+145/735899/campos_512_v4
+145/735907/campos_512_v4
+145/735913/campos_512_v4
+145/735914/campos_512_v4
+145/735916/campos_512_v4
+145/735930/campos_512_v4
+145/735932/campos_512_v4
+145/735933/campos_512_v4
+145/735942/campos_512_v4
+145/735943/campos_512_v4
+145/735953/campos_512_v4
+145/735973/campos_512_v4
+145/735986/campos_512_v4
+145/735989/campos_512_v4
+145/736004/campos_512_v4
+145/736006/campos_512_v4
+145/736012/campos_512_v4
+145/736013/campos_512_v4
+145/736023/campos_512_v4
+145/736036/campos_512_v4
+145/736047/campos_512_v4
+145/736060/campos_512_v4
+145/736076/campos_512_v4
+145/736078/campos_512_v4
+145/736085/campos_512_v4
+145/736096/campos_512_v4
+145/736100/campos_512_v4
+145/736102/campos_512_v4
+145/736111/campos_512_v4
+145/736127/campos_512_v4
+145/736142/campos_512_v4
+145/736161/campos_512_v4
+145/736163/campos_512_v4
+145/736165/campos_512_v4
+145/736172/campos_512_v4
+145/736179/campos_512_v4
+145/736182/campos_512_v4
+145/736194/campos_512_v4
+145/736196/campos_512_v4
+145/736208/campos_512_v4
+145/736230/campos_512_v4
+145/736256/campos_512_v4
+145/736265/campos_512_v4
+145/736269/campos_512_v4
+145/736283/campos_512_v4
+145/736299/campos_512_v4
+145/736304/campos_512_v4
+145/736307/campos_512_v4
+145/736315/campos_512_v4
+145/736317/campos_512_v4
+145/736320/campos_512_v4
+145/736323/campos_512_v4
+145/736326/campos_512_v4
+145/736328/campos_512_v4
+145/736329/campos_512_v4
+145/736330/campos_512_v4
+145/736331/campos_512_v4
+145/736348/campos_512_v4
+145/736352/campos_512_v4
+145/736353/campos_512_v4
+145/736356/campos_512_v4
+145/736357/campos_512_v4
+145/736358/campos_512_v4
+145/736374/campos_512_v4
+145/736381/campos_512_v4
+145/736384/campos_512_v4
+145/736388/campos_512_v4
+145/736393/campos_512_v4
+145/736400/campos_512_v4
+145/736409/campos_512_v4
+145/736418/campos_512_v4
+145/736420/campos_512_v4
+145/736432/campos_512_v4
+145/736437/campos_512_v4
+145/736439/campos_512_v4
+145/736446/campos_512_v4
+145/736450/campos_512_v4
+145/736458/campos_512_v4
+145/736468/campos_512_v4
+145/736472/campos_512_v4
+145/736487/campos_512_v4
+145/736494/campos_512_v4
+145/736497/campos_512_v4
+145/736498/campos_512_v4
+145/736524/campos_512_v4
+145/736536/campos_512_v4
+145/736548/campos_512_v4
+145/736566/campos_512_v4
+145/736570/campos_512_v4
+145/736577/campos_512_v4
+145/736580/campos_512_v4
+145/736582/campos_512_v4
+145/736611/campos_512_v4
+145/736624/campos_512_v4
+145/736644/campos_512_v4
+145/736660/campos_512_v4
+145/736665/campos_512_v4
+145/736677/campos_512_v4
+145/736683/campos_512_v4
+145/736685/campos_512_v4
+145/736686/campos_512_v4
+145/736692/campos_512_v4
+145/736694/campos_512_v4
+145/736713/campos_512_v4
+145/736718/campos_512_v4
+145/736742/campos_512_v4
+145/736749/campos_512_v4
+145/736752/campos_512_v4
+145/736760/campos_512_v4
+145/736770/campos_512_v4
+145/736771/campos_512_v4
+145/736777/campos_512_v4
+145/736794/campos_512_v4
+145/736818/campos_512_v4
+145/736821/campos_512_v4
+145/736826/campos_512_v4
+145/736836/campos_512_v4
+145/736839/campos_512_v4
+145/736843/campos_512_v4
+145/736844/campos_512_v4
+145/736848/campos_512_v4
+145/736856/campos_512_v4
+145/736882/campos_512_v4
+145/736888/campos_512_v4
+145/736898/campos_512_v4
+145/736900/campos_512_v4
+145/736901/campos_512_v4
+145/736905/campos_512_v4
+145/736907/campos_512_v4
+145/736910/campos_512_v4
+145/736911/campos_512_v4
+145/736913/campos_512_v4
+145/736926/campos_512_v4
+145/736950/campos_512_v4
+145/736953/campos_512_v4
+145/736956/campos_512_v4
+145/736966/campos_512_v4
+145/736968/campos_512_v4
+145/736969/campos_512_v4
+145/736973/campos_512_v4
+145/736978/campos_512_v4
+145/736983/campos_512_v4
+145/737009/campos_512_v4
+145/737016/campos_512_v4
+145/737041/campos_512_v4
+145/737045/campos_512_v4
+145/737049/campos_512_v4
+145/737052/campos_512_v4
+145/737057/campos_512_v4
+145/737060/campos_512_v4
+145/737073/campos_512_v4
+145/737088/campos_512_v4
+145/737096/campos_512_v4
+145/737099/campos_512_v4
+145/737100/campos_512_v4
+145/737103/campos_512_v4
+145/737106/campos_512_v4
+145/737115/campos_512_v4
+145/737118/campos_512_v4
+145/737120/campos_512_v4
+145/737131/campos_512_v4
+145/737135/campos_512_v4
+145/737136/campos_512_v4
+145/737138/campos_512_v4
+145/737139/campos_512_v4
+145/737142/campos_512_v4
+145/737147/campos_512_v4
+145/737149/campos_512_v4
+145/737169/campos_512_v4
+145/737176/campos_512_v4
+145/737180/campos_512_v4
+145/737181/campos_512_v4
+145/737197/campos_512_v4
+145/737204/campos_512_v4
+145/737211/campos_512_v4
+145/737226/campos_512_v4
+145/737230/campos_512_v4
+145/737233/campos_512_v4
+145/737239/campos_512_v4
+145/737241/campos_512_v4
+145/737246/campos_512_v4
+145/737269/campos_512_v4
+145/737276/campos_512_v4
+145/737279/campos_512_v4
+145/737284/campos_512_v4
+145/737293/campos_512_v4
+145/737297/campos_512_v4
+145/737307/campos_512_v4
+145/737313/campos_512_v4
+145/737316/campos_512_v4
+145/737321/campos_512_v4
+145/737329/campos_512_v4
+145/737341/campos_512_v4
+145/737343/campos_512_v4
+145/737349/campos_512_v4
+145/737352/campos_512_v4
+145/737369/campos_512_v4
+145/737371/campos_512_v4
+145/737373/campos_512_v4
+145/737379/campos_512_v4
+145/737385/campos_512_v4
+145/737390/campos_512_v4
+145/737392/campos_512_v4
+145/737400/campos_512_v4
+145/737408/campos_512_v4
+145/737410/campos_512_v4
+145/737430/campos_512_v4
+145/737433/campos_512_v4
+145/737435/campos_512_v4
+145/737437/campos_512_v4
+145/737441/campos_512_v4
+145/737444/campos_512_v4
+145/737451/campos_512_v4
+145/737452/campos_512_v4
+145/737461/campos_512_v4
+145/737477/campos_512_v4
+145/737478/campos_512_v4
+145/737485/campos_512_v4
+145/737486/campos_512_v4
+145/737490/campos_512_v4
+145/737501/campos_512_v4
+145/737515/campos_512_v4
+145/737520/campos_512_v4
+145/737523/campos_512_v4
+145/737551/campos_512_v4
+145/737563/campos_512_v4
+145/737583/campos_512_v4
+145/737594/campos_512_v4
+145/737599/campos_512_v4
+145/737614/campos_512_v4
+145/737627/campos_512_v4
+145/737633/campos_512_v4
+145/737639/campos_512_v4
+145/737658/campos_512_v4
+145/737664/campos_512_v4
+145/737665/campos_512_v4
+145/737680/campos_512_v4
+145/737682/campos_512_v4
+145/737684/campos_512_v4
+145/737685/campos_512_v4
+145/737691/campos_512_v4
+145/737712/campos_512_v4
+145/737714/campos_512_v4
+145/737717/campos_512_v4
+145/737730/campos_512_v4
+145/737738/campos_512_v4
+145/737741/campos_512_v4
+145/737753/campos_512_v4
+145/737754/campos_512_v4
+145/737756/campos_512_v4
+145/737773/campos_512_v4
+145/737778/campos_512_v4
+145/737781/campos_512_v4
+145/737782/campos_512_v4
+145/737783/campos_512_v4
+145/737795/campos_512_v4
+145/737806/campos_512_v4
+145/737814/campos_512_v4
+145/737827/campos_512_v4
+145/737848/campos_512_v4
+145/737852/campos_512_v4
+145/737869/campos_512_v4
+145/737878/campos_512_v4
+145/737886/campos_512_v4
+145/737887/campos_512_v4
+145/737891/campos_512_v4
+145/737897/campos_512_v4
+145/737924/campos_512_v4
+145/737925/campos_512_v4
+145/737947/campos_512_v4
+145/737952/campos_512_v4
+145/737964/campos_512_v4
+145/737972/campos_512_v4
+145/737990/campos_512_v4
+145/737992/campos_512_v4
+145/737999/campos_512_v4
+145/738004/campos_512_v4
+145/738011/campos_512_v4
+145/738025/campos_512_v4
+145/738033/campos_512_v4
+145/738034/campos_512_v4
+145/738055/campos_512_v4
+145/738064/campos_512_v4
+145/738076/campos_512_v4
+145/738080/campos_512_v4
+145/738126/campos_512_v4
+145/738138/campos_512_v4
+145/738139/campos_512_v4
+145/738141/campos_512_v4
+145/738153/campos_512_v4
+145/738156/campos_512_v4
+145/738160/campos_512_v4
+145/738170/campos_512_v4
+145/738177/campos_512_v4
+145/738181/campos_512_v4
+145/738184/campos_512_v4
+145/738193/campos_512_v4
+145/738198/campos_512_v4
+145/738199/campos_512_v4
+145/738205/campos_512_v4
+145/738210/campos_512_v4
+145/738211/campos_512_v4
+145/738217/campos_512_v4
+145/738248/campos_512_v4
+145/738251/campos_512_v4
+145/738255/campos_512_v4
+145/738258/campos_512_v4
+145/738262/campos_512_v4
+145/738286/campos_512_v4
+145/738287/campos_512_v4
+145/738307/campos_512_v4
+145/738330/campos_512_v4
+145/738351/campos_512_v4
+145/738352/campos_512_v4
+145/738361/campos_512_v4
+145/738363/campos_512_v4
+145/738365/campos_512_v4
+145/738376/campos_512_v4
+145/738383/campos_512_v4
+145/738407/campos_512_v4
+145/738430/campos_512_v4
+145/738437/campos_512_v4
+145/738442/campos_512_v4
+145/738458/campos_512_v4
+145/738463/campos_512_v4
+145/738467/campos_512_v4
+145/738471/campos_512_v4
+145/738472/campos_512_v4
+145/738479/campos_512_v4
+145/738502/campos_512_v4
+145/738507/campos_512_v4
+145/738509/campos_512_v4
+145/738515/campos_512_v4
+145/738520/campos_512_v4
+145/738521/campos_512_v4
+145/738525/campos_512_v4
+145/738537/campos_512_v4
+145/738545/campos_512_v4
+145/738567/campos_512_v4
+145/738571/campos_512_v4
+145/738587/campos_512_v4
+145/738591/campos_512_v4
+145/738592/campos_512_v4
+145/738597/campos_512_v4
+145/738598/campos_512_v4
+145/738599/campos_512_v4
+145/738611/campos_512_v4
+145/738614/campos_512_v4
+145/738618/campos_512_v4
+145/738620/campos_512_v4
+145/738647/campos_512_v4
+145/738649/campos_512_v4
+145/738650/campos_512_v4
+145/738652/campos_512_v4
+145/738656/campos_512_v4
+145/738672/campos_512_v4
+145/738676/campos_512_v4
+145/738677/campos_512_v4
+145/738693/campos_512_v4
+145/738699/campos_512_v4
+145/738705/campos_512_v4
+145/738708/campos_512_v4
+145/738714/campos_512_v4
+145/738717/campos_512_v4
+145/738724/campos_512_v4
+145/738728/campos_512_v4
+145/738748/campos_512_v4
+145/738752/campos_512_v4
+145/738761/campos_512_v4
+145/738762/campos_512_v4
+145/738764/campos_512_v4
+145/738765/campos_512_v4
+145/738767/campos_512_v4
+145/738789/campos_512_v4
+145/738790/campos_512_v4
+145/738791/campos_512_v4
+145/738795/campos_512_v4
+145/738800/campos_512_v4
+145/738801/campos_512_v4
+145/738803/campos_512_v4
+145/738804/campos_512_v4
+145/738812/campos_512_v4
+145/738826/campos_512_v4
+145/738830/campos_512_v4
+145/738836/campos_512_v4
+145/738839/campos_512_v4
+145/738841/campos_512_v4
+145/738844/campos_512_v4
+145/738849/campos_512_v4
+145/738850/campos_512_v4
+145/738856/campos_512_v4
+145/738866/campos_512_v4
+145/738872/campos_512_v4
+145/738879/campos_512_v4
+145/738886/campos_512_v4
+145/738897/campos_512_v4
+145/738907/campos_512_v4
+145/738913/campos_512_v4
+145/738916/campos_512_v4
+145/738929/campos_512_v4
+145/738954/campos_512_v4
+145/738956/campos_512_v4
+145/738968/campos_512_v4
+145/738969/campos_512_v4
+145/738983/campos_512_v4
+145/738991/campos_512_v4
+145/738992/campos_512_v4
+145/739000/campos_512_v4
+145/739006/campos_512_v4
+145/739025/campos_512_v4
+145/739026/campos_512_v4
+145/739027/campos_512_v4
+145/739038/campos_512_v4
+145/739043/campos_512_v4
+145/739046/campos_512_v4
+145/739047/campos_512_v4
+145/739050/campos_512_v4
+145/739075/campos_512_v4
+145/739082/campos_512_v4
+145/739086/campos_512_v4
+145/739107/campos_512_v4
+145/739108/campos_512_v4
+145/739109/campos_512_v4
+145/739111/campos_512_v4
+145/739114/campos_512_v4
+145/739120/campos_512_v4
+145/739125/campos_512_v4
+145/739131/campos_512_v4
+145/739132/campos_512_v4
+145/739147/campos_512_v4
+145/739148/campos_512_v4
+145/739155/campos_512_v4
+145/739173/campos_512_v4
+145/739174/campos_512_v4
+145/739176/campos_512_v4
+145/739179/campos_512_v4
+145/739183/campos_512_v4
+145/739188/campos_512_v4
+145/739189/campos_512_v4
+145/739190/campos_512_v4
+145/739204/campos_512_v4
+145/739205/campos_512_v4
+145/739209/campos_512_v4
+145/739210/campos_512_v4
+145/739215/campos_512_v4
+145/739218/campos_512_v4
+145/739221/campos_512_v4
+145/739235/campos_512_v4
+145/739240/campos_512_v4
+145/739254/campos_512_v4
+145/739280/campos_512_v4
+145/739286/campos_512_v4
+145/739287/campos_512_v4
+145/739304/campos_512_v4
+145/739311/campos_512_v4
+145/739313/campos_512_v4
+145/739318/campos_512_v4
+145/739320/campos_512_v4
+145/739321/campos_512_v4
+145/739328/campos_512_v4
+145/739334/campos_512_v4
+145/739341/campos_512_v4
+145/739344/campos_512_v4
+145/739346/campos_512_v4
+145/739353/campos_512_v4
+145/739358/campos_512_v4
+145/739362/campos_512_v4
+145/739369/campos_512_v4
+145/739371/campos_512_v4
+145/739372/campos_512_v4
+145/739376/campos_512_v4
+145/739390/campos_512_v4
+145/739400/campos_512_v4
+145/739403/campos_512_v4
+145/739407/campos_512_v4
+145/739415/campos_512_v4
+145/739416/campos_512_v4
+145/739417/campos_512_v4
+145/739423/campos_512_v4
+145/739426/campos_512_v4
+145/739427/campos_512_v4
+145/739428/campos_512_v4
+145/739452/campos_512_v4
+145/739474/campos_512_v4
+145/739477/campos_512_v4
+145/739501/campos_512_v4
+145/739505/campos_512_v4
+145/739529/campos_512_v4
+145/739534/campos_512_v4
+145/739538/campos_512_v4
+145/739545/campos_512_v4
+145/739560/campos_512_v4
+145/739581/campos_512_v4
+145/739598/campos_512_v4
+145/739613/campos_512_v4
+145/739618/campos_512_v4
+145/739619/campos_512_v4
+145/739626/campos_512_v4
+145/739643/campos_512_v4
+145/739644/campos_512_v4
+145/739657/campos_512_v4
+145/739659/campos_512_v4
+145/739665/campos_512_v4
+145/739671/campos_512_v4
+145/739674/campos_512_v4
+145/739679/campos_512_v4
+145/739686/campos_512_v4
+145/739688/campos_512_v4
+145/739695/campos_512_v4
+145/739698/campos_512_v4
+145/739703/campos_512_v4
+145/739706/campos_512_v4
+145/739711/campos_512_v4
+145/739720/campos_512_v4
+145/739733/campos_512_v4
+145/739735/campos_512_v4
+145/739762/campos_512_v4
+145/739765/campos_512_v4
+145/739772/campos_512_v4
+145/739779/campos_512_v4
+145/739780/campos_512_v4
+145/739798/campos_512_v4
+145/739803/campos_512_v4
+145/739808/campos_512_v4
+145/739810/campos_512_v4
+145/739831/campos_512_v4
+145/739832/campos_512_v4
+145/739837/campos_512_v4
+145/739847/campos_512_v4
+145/739853/campos_512_v4
+145/739859/campos_512_v4
+145/739862/campos_512_v4
+145/739866/campos_512_v4
+145/739868/campos_512_v4
+145/739886/campos_512_v4
+145/739890/campos_512_v4
+145/739901/campos_512_v4
+145/739911/campos_512_v4
+145/739917/campos_512_v4
+145/739918/campos_512_v4
+145/739921/campos_512_v4
+145/739927/campos_512_v4
+145/739944/campos_512_v4
+145/739950/campos_512_v4
+145/739951/campos_512_v4
+145/739956/campos_512_v4
+145/739969/campos_512_v4
+145/739985/campos_512_v4
+145/739990/campos_512_v4
+145/739992/campos_512_v4
+145/739994/campos_512_v4
+146/740003/campos_512_v4
+146/740007/campos_512_v4
+146/740026/campos_512_v4
+146/740036/campos_512_v4
+146/740043/campos_512_v4
+146/740045/campos_512_v4
+146/740046/campos_512_v4
+146/740058/campos_512_v4
+146/740093/campos_512_v4
+146/740104/campos_512_v4
+146/740114/campos_512_v4
+146/740123/campos_512_v4
+146/740138/campos_512_v4
+146/740153/campos_512_v4
+146/740156/campos_512_v4
+146/740158/campos_512_v4
+146/740159/campos_512_v4
+146/740164/campos_512_v4
+146/740175/campos_512_v4
+146/740180/campos_512_v4
+146/740181/campos_512_v4
+146/740184/campos_512_v4
+146/740186/campos_512_v4
+146/740190/campos_512_v4
+146/740194/campos_512_v4
+146/740195/campos_512_v4
+146/740197/campos_512_v4
+146/740198/campos_512_v4
+146/740205/campos_512_v4
+146/740213/campos_512_v4
+146/740216/campos_512_v4
+146/740221/campos_512_v4
+146/740238/campos_512_v4
+146/740254/campos_512_v4
+146/740256/campos_512_v4
+146/740260/campos_512_v4
+146/740264/campos_512_v4
+146/740274/campos_512_v4
+146/740279/campos_512_v4
+146/740295/campos_512_v4
+146/740303/campos_512_v4
+146/740310/campos_512_v4
+146/740311/campos_512_v4
+146/740314/campos_512_v4
+146/740317/campos_512_v4
+146/740322/campos_512_v4
+146/740333/campos_512_v4
+146/740339/campos_512_v4
+146/740341/campos_512_v4
+146/740348/campos_512_v4
+146/740349/campos_512_v4
+146/740351/campos_512_v4
+146/740353/campos_512_v4
+146/740358/campos_512_v4
+146/740362/campos_512_v4
+146/740366/campos_512_v4
+146/740389/campos_512_v4
+146/740404/campos_512_v4
+146/740407/campos_512_v4
+146/740419/campos_512_v4
+146/740428/campos_512_v4
+146/740451/campos_512_v4
+146/740454/campos_512_v4
+146/740461/campos_512_v4
+146/740464/campos_512_v4
+146/740468/campos_512_v4
+146/740478/campos_512_v4
+146/740487/campos_512_v4
+146/740493/campos_512_v4
+146/740502/campos_512_v4
+146/740504/campos_512_v4
+146/740527/campos_512_v4
+146/740539/campos_512_v4
+146/740543/campos_512_v4
+146/740546/campos_512_v4
+146/740550/campos_512_v4
+146/740556/campos_512_v4
+146/740565/campos_512_v4
+146/740571/campos_512_v4
+146/740572/campos_512_v4
+146/740579/campos_512_v4
+146/740580/campos_512_v4
+146/740587/campos_512_v4
+146/740592/campos_512_v4
+146/740593/campos_512_v4
+146/740594/campos_512_v4
+146/740598/campos_512_v4
+146/740601/campos_512_v4
+146/740606/campos_512_v4
+146/740618/campos_512_v4
+146/740630/campos_512_v4
+146/740631/campos_512_v4
+146/740636/campos_512_v4
+146/740645/campos_512_v4
+146/740651/campos_512_v4
+146/740669/campos_512_v4
+146/740670/campos_512_v4
+146/740679/campos_512_v4
+146/740686/campos_512_v4
+146/740695/campos_512_v4
+146/740700/campos_512_v4
+146/740702/campos_512_v4
+146/740712/campos_512_v4
+146/740717/campos_512_v4
+146/740726/campos_512_v4
+146/740735/campos_512_v4
+146/740736/campos_512_v4
+146/740763/campos_512_v4
+146/740772/campos_512_v4
+146/740783/campos_512_v4
+146/740797/campos_512_v4
+146/740807/campos_512_v4
+146/740813/campos_512_v4
+146/740820/campos_512_v4
+146/740822/campos_512_v4
+146/740840/campos_512_v4
+146/740845/campos_512_v4
+146/740855/campos_512_v4
+146/740856/campos_512_v4
+146/740869/campos_512_v4
+146/740884/campos_512_v4
+146/740890/campos_512_v4
+146/740899/campos_512_v4
+146/740900/campos_512_v4
+146/740910/campos_512_v4
+146/740912/campos_512_v4
+146/740914/campos_512_v4
+146/740920/campos_512_v4
+146/740921/campos_512_v4
+146/740926/campos_512_v4
+146/740941/campos_512_v4
+146/740948/campos_512_v4
+146/740949/campos_512_v4
+146/740955/campos_512_v4
+146/740957/campos_512_v4
+146/740960/campos_512_v4
+146/740961/campos_512_v4
+146/740964/campos_512_v4
+146/740968/campos_512_v4
+146/740979/campos_512_v4
+146/740982/campos_512_v4
+146/740984/campos_512_v4
+146/740985/campos_512_v4
+146/740987/campos_512_v4
+146/740988/campos_512_v4
+146/741028/campos_512_v4
+146/741030/campos_512_v4
+146/741040/campos_512_v4
+146/741043/campos_512_v4
+146/741051/campos_512_v4
+146/741057/campos_512_v4
+146/741068/campos_512_v4
+146/741086/campos_512_v4
+146/741093/campos_512_v4
+146/741096/campos_512_v4
+146/741114/campos_512_v4
+146/741139/campos_512_v4
+146/741158/campos_512_v4
+146/741160/campos_512_v4
+146/741165/campos_512_v4
+146/741173/campos_512_v4
+146/741177/campos_512_v4
+146/741181/campos_512_v4
+146/741183/campos_512_v4
+146/741188/campos_512_v4
+146/741193/campos_512_v4
+146/741209/campos_512_v4
+146/741214/campos_512_v4
+146/741217/campos_512_v4
+146/741232/campos_512_v4
+146/741248/campos_512_v4
+146/741251/campos_512_v4
+146/741273/campos_512_v4
+146/741276/campos_512_v4
+146/741289/campos_512_v4
+146/741290/campos_512_v4
+146/741299/campos_512_v4
+146/741310/campos_512_v4
+146/741324/campos_512_v4
+146/741333/campos_512_v4
+146/741340/campos_512_v4
+146/741341/campos_512_v4
+146/741358/campos_512_v4
+146/741359/campos_512_v4
+146/741364/campos_512_v4
+146/741367/campos_512_v4
+146/741369/campos_512_v4
+146/741380/campos_512_v4
+146/741381/campos_512_v4
+146/741385/campos_512_v4
+146/741398/campos_512_v4
+146/741401/campos_512_v4
+146/741416/campos_512_v4
+146/741420/campos_512_v4
+146/741423/campos_512_v4
+146/741424/campos_512_v4
+146/741427/campos_512_v4
+146/741429/campos_512_v4
+146/741439/campos_512_v4
+146/741441/campos_512_v4
+146/741442/campos_512_v4
+146/741459/campos_512_v4
+146/741472/campos_512_v4
+146/741483/campos_512_v4
+146/741488/campos_512_v4
+146/741492/campos_512_v4
+146/741499/campos_512_v4
+146/741528/campos_512_v4
+146/741540/campos_512_v4
+146/741558/campos_512_v4
+146/741559/campos_512_v4
+146/741562/campos_512_v4
+146/741578/campos_512_v4
+146/741581/campos_512_v4
+146/741585/campos_512_v4
+146/741602/campos_512_v4
+146/741606/campos_512_v4
+146/741620/campos_512_v4
+146/741631/campos_512_v4
+146/741638/campos_512_v4
+146/741639/campos_512_v4
+146/741647/campos_512_v4
+146/741657/campos_512_v4
+146/741675/campos_512_v4
+146/741676/campos_512_v4
+146/741694/campos_512_v4
+146/741695/campos_512_v4
+146/741699/campos_512_v4
+146/741700/campos_512_v4
+146/741706/campos_512_v4
+146/741710/campos_512_v4
+146/741711/campos_512_v4
+146/741737/campos_512_v4
+146/741741/campos_512_v4
+146/741746/campos_512_v4
+146/741753/campos_512_v4
+146/741759/campos_512_v4
+146/741761/campos_512_v4
+146/741762/campos_512_v4
+146/741769/campos_512_v4
+146/741771/campos_512_v4
+146/741775/campos_512_v4
+146/741789/campos_512_v4
+146/741792/campos_512_v4
+146/741798/campos_512_v4
+146/741799/campos_512_v4
+146/741802/campos_512_v4
+146/741829/campos_512_v4
+146/741836/campos_512_v4
+146/741852/campos_512_v4
+146/741865/campos_512_v4
+146/741880/campos_512_v4
+146/741886/campos_512_v4
+146/741892/campos_512_v4
+146/741909/campos_512_v4
+146/741919/campos_512_v4
+146/741928/campos_512_v4
+146/741944/campos_512_v4
+146/741953/campos_512_v4
+146/741958/campos_512_v4
+146/741964/campos_512_v4
+146/741967/campos_512_v4
+146/741968/campos_512_v4
+146/741979/campos_512_v4
+146/741981/campos_512_v4
+146/741990/campos_512_v4
+146/741991/campos_512_v4
+146/741992/campos_512_v4
+146/742004/campos_512_v4
+146/742010/campos_512_v4
+146/742014/campos_512_v4
+146/742018/campos_512_v4
+146/742026/campos_512_v4
+146/742031/campos_512_v4
+146/742041/campos_512_v4
+146/742052/campos_512_v4
+146/742053/campos_512_v4
+146/742056/campos_512_v4
+146/742060/campos_512_v4
+146/742068/campos_512_v4
+146/742074/campos_512_v4
+146/742075/campos_512_v4
+146/742080/campos_512_v4
+146/742091/campos_512_v4
+146/742103/campos_512_v4
+146/742108/campos_512_v4
+146/742117/campos_512_v4
+146/742121/campos_512_v4
+146/742133/campos_512_v4
+146/742136/campos_512_v4
+146/742138/campos_512_v4
+146/742142/campos_512_v4
+146/742144/campos_512_v4
+146/742172/campos_512_v4
+146/742179/campos_512_v4
+146/742183/campos_512_v4
+146/742189/campos_512_v4
+146/742203/campos_512_v4
+146/742211/campos_512_v4
+146/742216/campos_512_v4
+146/742222/campos_512_v4
+146/742226/campos_512_v4
+146/742233/campos_512_v4
+146/742244/campos_512_v4
+146/742249/campos_512_v4
+146/742251/campos_512_v4
+146/742262/campos_512_v4
+146/742265/campos_512_v4
+146/742279/campos_512_v4
+146/742281/campos_512_v4
+146/742284/campos_512_v4
+146/742298/campos_512_v4
+146/742305/campos_512_v4
+146/742308/campos_512_v4
+146/742310/campos_512_v4
+146/742314/campos_512_v4
+146/742329/campos_512_v4
+146/742342/campos_512_v4
+146/742357/campos_512_v4
+146/742362/campos_512_v4
+146/742372/campos_512_v4
+146/742378/campos_512_v4
+146/742386/campos_512_v4
+146/742422/campos_512_v4
+146/742425/campos_512_v4
+146/742432/campos_512_v4
+146/742437/campos_512_v4
+146/742440/campos_512_v4
+146/742443/campos_512_v4
+146/742446/campos_512_v4
+146/742449/campos_512_v4
+146/742454/campos_512_v4
+146/742468/campos_512_v4
+146/742479/campos_512_v4
+146/742482/campos_512_v4
+146/742485/campos_512_v4
+146/742495/campos_512_v4
+146/742496/campos_512_v4
+146/742504/campos_512_v4
+146/742526/campos_512_v4
+146/742536/campos_512_v4
+146/742549/campos_512_v4
+146/742550/campos_512_v4
+146/742570/campos_512_v4
+146/742580/campos_512_v4
+146/742584/campos_512_v4
+146/742586/campos_512_v4
+146/742596/campos_512_v4
+146/742599/campos_512_v4
+146/742612/campos_512_v4
+146/742617/campos_512_v4
+146/742638/campos_512_v4
+146/742648/campos_512_v4
+146/742654/campos_512_v4
+146/742662/campos_512_v4
+146/742672/campos_512_v4
+146/742677/campos_512_v4
+146/742680/campos_512_v4
+146/742681/campos_512_v4
+146/742686/campos_512_v4
+146/742698/campos_512_v4
+146/742709/campos_512_v4
+146/742721/campos_512_v4
+146/742726/campos_512_v4
+146/742727/campos_512_v4
+146/742734/campos_512_v4
+146/742752/campos_512_v4
+146/742754/campos_512_v4
+146/742770/campos_512_v4
+146/742784/campos_512_v4
+146/742788/campos_512_v4
+146/742801/campos_512_v4
+146/742802/campos_512_v4
+146/742806/campos_512_v4
+146/742807/campos_512_v4
+146/742812/campos_512_v4
+146/742820/campos_512_v4
+146/742823/campos_512_v4
+146/742828/campos_512_v4
+146/742829/campos_512_v4
+146/742831/campos_512_v4
+146/742850/campos_512_v4
+146/742856/campos_512_v4
+146/742871/campos_512_v4
+146/742877/campos_512_v4
+146/742887/campos_512_v4
+146/742888/campos_512_v4
+146/742897/campos_512_v4
+146/742899/campos_512_v4
+146/742900/campos_512_v4
+146/742909/campos_512_v4
+146/742911/campos_512_v4
+146/742916/campos_512_v4
+146/742921/campos_512_v4
+146/742941/campos_512_v4
+146/742946/campos_512_v4
+146/742949/campos_512_v4
+146/742951/campos_512_v4
+146/742956/campos_512_v4
+146/742964/campos_512_v4
+146/742968/campos_512_v4
+146/742972/campos_512_v4
+146/742985/campos_512_v4
+146/742996/campos_512_v4
+146/742997/campos_512_v4
+146/742998/campos_512_v4
+146/743001/campos_512_v4
+146/743008/campos_512_v4
+146/743009/campos_512_v4
+146/743019/campos_512_v4
+146/743023/campos_512_v4
+146/743038/campos_512_v4
+146/743039/campos_512_v4
+146/743045/campos_512_v4
+146/743046/campos_512_v4
+146/743054/campos_512_v4
+146/743065/campos_512_v4
+146/743075/campos_512_v4
+146/743088/campos_512_v4
+146/743090/campos_512_v4
+146/743110/campos_512_v4
+146/743113/campos_512_v4
+146/743132/campos_512_v4
+146/743144/campos_512_v4
+146/743158/campos_512_v4
+146/743165/campos_512_v4
+146/743174/campos_512_v4
+146/743176/campos_512_v4
+146/743183/campos_512_v4
+146/743201/campos_512_v4
+146/743202/campos_512_v4
+146/743219/campos_512_v4
+146/743221/campos_512_v4
+146/743226/campos_512_v4
+146/743262/campos_512_v4
+146/743270/campos_512_v4
+146/743272/campos_512_v4
+146/743278/campos_512_v4
+146/743283/campos_512_v4
+146/743284/campos_512_v4
+146/743291/campos_512_v4
+146/743299/campos_512_v4
+146/743300/campos_512_v4
+146/743302/campos_512_v4
+146/743310/campos_512_v4
+146/743311/campos_512_v4
+146/743316/campos_512_v4
+146/743335/campos_512_v4
+146/743340/campos_512_v4
+146/743342/campos_512_v4
+146/743346/campos_512_v4
+146/743363/campos_512_v4
+146/743373/campos_512_v4
+146/743377/campos_512_v4
+146/743381/campos_512_v4
+146/743400/campos_512_v4
+146/743419/campos_512_v4
+146/743432/campos_512_v4
+146/743434/campos_512_v4
+146/743455/campos_512_v4
+146/743458/campos_512_v4
+146/743473/campos_512_v4
+146/743480/campos_512_v4
+146/743482/campos_512_v4
+146/743486/campos_512_v4
+146/743490/campos_512_v4
+146/743504/campos_512_v4
+146/743510/campos_512_v4
+146/743526/campos_512_v4
+146/743541/campos_512_v4
+146/743545/campos_512_v4
+146/743564/campos_512_v4
+146/743566/campos_512_v4
+146/743571/campos_512_v4
+146/743585/campos_512_v4
+146/743606/campos_512_v4
+146/743607/campos_512_v4
+146/743628/campos_512_v4
+146/743629/campos_512_v4
+146/743638/campos_512_v4
+146/743649/campos_512_v4
+146/743654/campos_512_v4
+146/743655/campos_512_v4
+146/743677/campos_512_v4
+146/743684/campos_512_v4
+146/743692/campos_512_v4
+146/743703/campos_512_v4
+146/743721/campos_512_v4
+146/743737/campos_512_v4
+146/743747/campos_512_v4
+146/743766/campos_512_v4
+146/743772/campos_512_v4
+146/743774/campos_512_v4
+146/743784/campos_512_v4
+146/743817/campos_512_v4
+146/743829/campos_512_v4
+146/743850/campos_512_v4
+146/743856/campos_512_v4
+146/743863/campos_512_v4
+146/743872/campos_512_v4
+146/743886/campos_512_v4
+146/743891/campos_512_v4
+146/743907/campos_512_v4
+146/743911/campos_512_v4
+146/743912/campos_512_v4
+146/743914/campos_512_v4
+146/743930/campos_512_v4
+146/743936/campos_512_v4
+146/743945/campos_512_v4
+146/743958/campos_512_v4
+146/743961/campos_512_v4
+146/743968/campos_512_v4
+146/743983/campos_512_v4
+146/744004/campos_512_v4
+146/744032/campos_512_v4
+146/744034/campos_512_v4
+146/744038/campos_512_v4
+146/744054/campos_512_v4
+146/744055/campos_512_v4
+146/744058/campos_512_v4
+146/744060/campos_512_v4
+146/744062/campos_512_v4
+146/744064/campos_512_v4
+146/744067/campos_512_v4
+146/744072/campos_512_v4
+146/744075/campos_512_v4
+146/744078/campos_512_v4
+146/744081/campos_512_v4
+146/744083/campos_512_v4
+146/744097/campos_512_v4
+146/744099/campos_512_v4
+146/744104/campos_512_v4
+146/744110/campos_512_v4
+146/744122/campos_512_v4
+146/744129/campos_512_v4
+146/744132/campos_512_v4
+146/744134/campos_512_v4
+146/744150/campos_512_v4
+146/744165/campos_512_v4
+146/744166/campos_512_v4
+146/744168/campos_512_v4
+146/744177/campos_512_v4
+146/744183/campos_512_v4
+146/744199/campos_512_v4
+146/744204/campos_512_v4
+146/744208/campos_512_v4
+146/744212/campos_512_v4
+146/744213/campos_512_v4
+146/744219/campos_512_v4
+146/744221/campos_512_v4
+146/744229/campos_512_v4
+146/744231/campos_512_v4
+146/744237/campos_512_v4
+146/744238/campos_512_v4
+146/744252/campos_512_v4
+146/744272/campos_512_v4
+146/744299/campos_512_v4
+146/744332/campos_512_v4
+146/744344/campos_512_v4
+146/744351/campos_512_v4
+146/744358/campos_512_v4
+146/744365/campos_512_v4
+146/744370/campos_512_v4
+146/744373/campos_512_v4
+146/744374/campos_512_v4
+146/744378/campos_512_v4
+146/744392/campos_512_v4
+146/744399/campos_512_v4
+146/744401/campos_512_v4
+146/744406/campos_512_v4
+146/744408/campos_512_v4
+146/744410/campos_512_v4
+146/744412/campos_512_v4
+146/744415/campos_512_v4
+146/744423/campos_512_v4
+146/744432/campos_512_v4
+146/744436/campos_512_v4
+146/744437/campos_512_v4
+146/744438/campos_512_v4
+146/744439/campos_512_v4
+146/744442/campos_512_v4
+146/744452/campos_512_v4
+146/744454/campos_512_v4
+146/744471/campos_512_v4
+146/744481/campos_512_v4
+146/744490/campos_512_v4
+146/744503/campos_512_v4
+146/744506/campos_512_v4
+146/744517/campos_512_v4
+146/744528/campos_512_v4
+146/744535/campos_512_v4
+146/744543/campos_512_v4
+146/744544/campos_512_v4
+146/744545/campos_512_v4
+146/744548/campos_512_v4
+146/744553/campos_512_v4
+146/744582/campos_512_v4
+146/744584/campos_512_v4
+146/744597/campos_512_v4
+146/744618/campos_512_v4
+146/744630/campos_512_v4
+146/744639/campos_512_v4
+146/744645/campos_512_v4
+146/744661/campos_512_v4
+146/744675/campos_512_v4
+146/744676/campos_512_v4
+146/744679/campos_512_v4
+146/744685/campos_512_v4
+146/744686/campos_512_v4
+146/744729/campos_512_v4
+146/744742/campos_512_v4
+146/744757/campos_512_v4
+146/744761/campos_512_v4
+146/744775/campos_512_v4
+146/744782/campos_512_v4
+146/744789/campos_512_v4
+146/744798/campos_512_v4
+146/744805/campos_512_v4
+146/744813/campos_512_v4
+146/744821/campos_512_v4
+146/744823/campos_512_v4
+146/744824/campos_512_v4
+146/744835/campos_512_v4
+146/744844/campos_512_v4
+146/744846/campos_512_v4
+146/744859/campos_512_v4
+146/744864/campos_512_v4
+146/744865/campos_512_v4
+146/744874/campos_512_v4
+146/744884/campos_512_v4
+146/744892/campos_512_v4
+146/744893/campos_512_v4
+146/744908/campos_512_v4
+146/744917/campos_512_v4
+146/744929/campos_512_v4
+146/744943/campos_512_v4
+146/744971/campos_512_v4
+146/744972/campos_512_v4
+146/744982/campos_512_v4
+146/744988/campos_512_v4
+146/744993/campos_512_v4
+146/744994/campos_512_v4
+146/744999/campos_512_v4
+146/745000/campos_512_v4
+147/745009/campos_512_v4
+147/745014/campos_512_v4
+147/745016/campos_512_v4
+147/745022/campos_512_v4
+147/745027/campos_512_v4
+147/745028/campos_512_v4
+147/745039/campos_512_v4
+147/745049/campos_512_v4
+147/745063/campos_512_v4
+147/745066/campos_512_v4
+147/745075/campos_512_v4
+147/745103/campos_512_v4
+147/745104/campos_512_v4
+147/745109/campos_512_v4
+147/745121/campos_512_v4
+147/745136/campos_512_v4
+147/745139/campos_512_v4
+147/745140/campos_512_v4
+147/745142/campos_512_v4
+147/745144/campos_512_v4
+147/745151/campos_512_v4
+147/745156/campos_512_v4
+147/745163/campos_512_v4
+147/745168/campos_512_v4
+147/745179/campos_512_v4
+147/745190/campos_512_v4
+147/745192/campos_512_v4
+147/745195/campos_512_v4
+147/745196/campos_512_v4
+147/745204/campos_512_v4
+147/745216/campos_512_v4
+147/745222/campos_512_v4
+147/745234/campos_512_v4
+147/745235/campos_512_v4
+147/745244/campos_512_v4
+147/745245/campos_512_v4
+147/745252/campos_512_v4
+147/745257/campos_512_v4
+147/745259/campos_512_v4
+147/745280/campos_512_v4
+147/745285/campos_512_v4
+147/745310/campos_512_v4
+147/745317/campos_512_v4
+147/745325/campos_512_v4
+147/745341/campos_512_v4
+147/745347/campos_512_v4
+147/745352/campos_512_v4
+147/745360/campos_512_v4
+147/745370/campos_512_v4
+147/745376/campos_512_v4
+147/745381/campos_512_v4
+147/745383/campos_512_v4
+147/745387/campos_512_v4
+147/745400/campos_512_v4
+147/745406/campos_512_v4
+147/745411/campos_512_v4
+147/745413/campos_512_v4
+147/745420/campos_512_v4
+147/745426/campos_512_v4
+147/745428/campos_512_v4
+147/745429/campos_512_v4
+147/745434/campos_512_v4
+147/745466/campos_512_v4
+147/745477/campos_512_v4
+147/745489/campos_512_v4
+147/745490/campos_512_v4
+147/745493/campos_512_v4
+147/745505/campos_512_v4
+147/745519/campos_512_v4
+147/745522/campos_512_v4
+147/745541/campos_512_v4
+147/745550/campos_512_v4
+147/745557/campos_512_v4
+147/745568/campos_512_v4
+147/745575/campos_512_v4
+147/745579/campos_512_v4
+147/745580/campos_512_v4
+147/745583/campos_512_v4
+147/745615/campos_512_v4
+147/745618/campos_512_v4
+147/745620/campos_512_v4
+147/745622/campos_512_v4
+147/745623/campos_512_v4
+147/745625/campos_512_v4
+147/745630/campos_512_v4
+147/745633/campos_512_v4
+147/745641/campos_512_v4
+147/745642/campos_512_v4
+147/745646/campos_512_v4
+147/745648/campos_512_v4
+147/745651/campos_512_v4
+147/745667/campos_512_v4
+147/745671/campos_512_v4
+147/745692/campos_512_v4
+147/745695/campos_512_v4
+147/745697/campos_512_v4
+147/745700/campos_512_v4
+147/745701/campos_512_v4
+147/745707/campos_512_v4
+147/745712/campos_512_v4
+147/745716/campos_512_v4
+147/745721/campos_512_v4
+147/745730/campos_512_v4
+147/745739/campos_512_v4
+147/745744/campos_512_v4
+147/745750/campos_512_v4
+147/745759/campos_512_v4
+147/745762/campos_512_v4
+147/745767/campos_512_v4
+147/745769/campos_512_v4
+147/745784/campos_512_v4
+147/745786/campos_512_v4
+147/745795/campos_512_v4
+147/745815/campos_512_v4
+147/745818/campos_512_v4
+147/745829/campos_512_v4
+147/745832/campos_512_v4
+147/745836/campos_512_v4
+147/745839/campos_512_v4
+147/745847/campos_512_v4
+147/745848/campos_512_v4
+147/745861/campos_512_v4
+147/745862/campos_512_v4
+147/745873/campos_512_v4
+147/745879/campos_512_v4
+147/745886/campos_512_v4
+147/745896/campos_512_v4
+147/745897/campos_512_v4
+147/745906/campos_512_v4
+147/745907/campos_512_v4
+147/745917/campos_512_v4
+147/745930/campos_512_v4
+147/745938/campos_512_v4
+147/745940/campos_512_v4
+147/745941/campos_512_v4
+147/745955/campos_512_v4
+147/745958/campos_512_v4
+147/745964/campos_512_v4
+147/745971/campos_512_v4
+147/745993/campos_512_v4
+147/745995/campos_512_v4
+147/745997/campos_512_v4
+147/746002/campos_512_v4
+147/746004/campos_512_v4
+147/746014/campos_512_v4
+147/746024/campos_512_v4
+147/746025/campos_512_v4
+147/746031/campos_512_v4
+147/746056/campos_512_v4
+147/746065/campos_512_v4
+147/746073/campos_512_v4
+147/746092/campos_512_v4
+147/746099/campos_512_v4
+147/746101/campos_512_v4
+147/746104/campos_512_v4
+147/746105/campos_512_v4
+147/746110/campos_512_v4
+147/746111/campos_512_v4
+147/746120/campos_512_v4
+147/746128/campos_512_v4
+147/746135/campos_512_v4
+147/746145/campos_512_v4
+147/746153/campos_512_v4
+147/746166/campos_512_v4
+147/746182/campos_512_v4
+147/746211/campos_512_v4
+147/746219/campos_512_v4
+147/746231/campos_512_v4
+147/746232/campos_512_v4
+147/746237/campos_512_v4
+147/746254/campos_512_v4
+147/746280/campos_512_v4
+147/746297/campos_512_v4
+147/746303/campos_512_v4
+147/746310/campos_512_v4
+147/746312/campos_512_v4
+147/746315/campos_512_v4
+147/746325/campos_512_v4
+147/746332/campos_512_v4
+147/746351/campos_512_v4
+147/746366/campos_512_v4
+147/746375/campos_512_v4
+147/746385/campos_512_v4
+147/746391/campos_512_v4
+147/746394/campos_512_v4
+147/746406/campos_512_v4
+147/746407/campos_512_v4
+147/746409/campos_512_v4
+147/746424/campos_512_v4
+147/746429/campos_512_v4
+147/746430/campos_512_v4
+147/746435/campos_512_v4
+147/746446/campos_512_v4
+147/746447/campos_512_v4
+147/746452/campos_512_v4
+147/746457/campos_512_v4
+147/746462/campos_512_v4
+147/746468/campos_512_v4
+147/746470/campos_512_v4
+147/746473/campos_512_v4
+147/746481/campos_512_v4
+147/746499/campos_512_v4
+147/746502/campos_512_v4
+147/746506/campos_512_v4
+147/746514/campos_512_v4
+147/746532/campos_512_v4
+147/746535/campos_512_v4
+147/746539/campos_512_v4
+147/746553/campos_512_v4
+147/746557/campos_512_v4
+147/746558/campos_512_v4
+147/746565/campos_512_v4
+147/746566/campos_512_v4
+147/746567/campos_512_v4
+147/746579/campos_512_v4
+147/746586/campos_512_v4
+147/746595/campos_512_v4
+147/746607/campos_512_v4
+147/746608/campos_512_v4
+147/746617/campos_512_v4
+147/746622/campos_512_v4
+147/746625/campos_512_v4
+147/746632/campos_512_v4
+147/746633/campos_512_v4
+147/746638/campos_512_v4
+147/746641/campos_512_v4
+147/746645/campos_512_v4
+147/746646/campos_512_v4
+147/746647/campos_512_v4
+147/746649/campos_512_v4
+147/746652/campos_512_v4
+147/746656/campos_512_v4
+147/746661/campos_512_v4
+147/746668/campos_512_v4
+147/746671/campos_512_v4
+147/746678/campos_512_v4
+147/746679/campos_512_v4
+147/746680/campos_512_v4
+147/746684/campos_512_v4
+147/746708/campos_512_v4
+147/746715/campos_512_v4
+147/746720/campos_512_v4
+147/746729/campos_512_v4
+147/746739/campos_512_v4
+147/746768/campos_512_v4
+147/746771/campos_512_v4
+147/746780/campos_512_v4
+147/746784/campos_512_v4
+147/746791/campos_512_v4
+147/746830/campos_512_v4
+147/746831/campos_512_v4
+147/746835/campos_512_v4
+147/746871/campos_512_v4
+147/746893/campos_512_v4
+147/746906/campos_512_v4
+147/746927/campos_512_v4
+147/746928/campos_512_v4
+147/746932/campos_512_v4
+147/746933/campos_512_v4
+147/746935/campos_512_v4
+147/746942/campos_512_v4
+147/746953/campos_512_v4
+147/746954/campos_512_v4
+147/746999/campos_512_v4
+147/747002/campos_512_v4
+147/747006/campos_512_v4
+147/747027/campos_512_v4
+147/747036/campos_512_v4
+147/747046/campos_512_v4
+147/747050/campos_512_v4
+147/747066/campos_512_v4
+147/747071/campos_512_v4
+147/747088/campos_512_v4
+147/747089/campos_512_v4
+147/747116/campos_512_v4
+147/747120/campos_512_v4
+147/747122/campos_512_v4
+147/747123/campos_512_v4
+147/747126/campos_512_v4
+147/747136/campos_512_v4
+147/747146/campos_512_v4
+147/747148/campos_512_v4
+147/747163/campos_512_v4
+147/747177/campos_512_v4
+147/747180/campos_512_v4
+147/747188/campos_512_v4
+147/747206/campos_512_v4
+147/747209/campos_512_v4
+147/747223/campos_512_v4
+147/747234/campos_512_v4
+147/747248/campos_512_v4
+147/747259/campos_512_v4
+147/747262/campos_512_v4
+147/747267/campos_512_v4
+147/747277/campos_512_v4
+147/747283/campos_512_v4
+147/747289/campos_512_v4
+147/747290/campos_512_v4
+147/747293/campos_512_v4
+147/747295/campos_512_v4
+147/747296/campos_512_v4
+147/747305/campos_512_v4
+147/747311/campos_512_v4
+147/747330/campos_512_v4
+147/747331/campos_512_v4
+147/747335/campos_512_v4
+147/747337/campos_512_v4
+147/747343/campos_512_v4
+147/747357/campos_512_v4
+147/747370/campos_512_v4
+147/747371/campos_512_v4
+147/747377/campos_512_v4
+147/747408/campos_512_v4
+147/747410/campos_512_v4
+147/747412/campos_512_v4
+147/747414/campos_512_v4
+147/747418/campos_512_v4
+147/747426/campos_512_v4
+147/747443/campos_512_v4
+147/747445/campos_512_v4
+147/747456/campos_512_v4
+147/747465/campos_512_v4
+147/747466/campos_512_v4
+147/747472/campos_512_v4
+147/747500/campos_512_v4
+147/747501/campos_512_v4
+147/747527/campos_512_v4
+147/747534/campos_512_v4
+147/747567/campos_512_v4
+147/747585/campos_512_v4
+147/747592/campos_512_v4
+147/747605/campos_512_v4
+147/747616/campos_512_v4
+147/747617/campos_512_v4
+147/747640/campos_512_v4
+147/747655/campos_512_v4
+147/747656/campos_512_v4
+147/747664/campos_512_v4
+147/747688/campos_512_v4
+147/747699/campos_512_v4
+147/747703/campos_512_v4
+147/747710/campos_512_v4
+147/747715/campos_512_v4
+147/747717/campos_512_v4
+147/747726/campos_512_v4
+147/747748/campos_512_v4
+147/747763/campos_512_v4
+147/747774/campos_512_v4
+147/747779/campos_512_v4
+147/747787/campos_512_v4
+147/747791/campos_512_v4
+147/747794/campos_512_v4
+147/747797/campos_512_v4
+147/747803/campos_512_v4
+147/747805/campos_512_v4
+147/747806/campos_512_v4
+147/747830/campos_512_v4
+147/747842/campos_512_v4
+147/747870/campos_512_v4
+147/747885/campos_512_v4
+147/747890/campos_512_v4
+147/747892/campos_512_v4
+147/747893/campos_512_v4
+147/747896/campos_512_v4
+147/747903/campos_512_v4
+147/747923/campos_512_v4
+147/747927/campos_512_v4
+147/747943/campos_512_v4
+147/747947/campos_512_v4
+147/747948/campos_512_v4
+147/747955/campos_512_v4
+147/747957/campos_512_v4
+147/747960/campos_512_v4
+147/747961/campos_512_v4
+147/747967/campos_512_v4
+147/747984/campos_512_v4
+147/748004/campos_512_v4
+147/748006/campos_512_v4
+147/748007/campos_512_v4
+147/748014/campos_512_v4
+147/748022/campos_512_v4
+147/748037/campos_512_v4
+147/748042/campos_512_v4
+147/748057/campos_512_v4
+147/748060/campos_512_v4
+147/748061/campos_512_v4
+147/748064/campos_512_v4
+147/748073/campos_512_v4
+147/748078/campos_512_v4
+147/748079/campos_512_v4
+147/748087/campos_512_v4
+147/748099/campos_512_v4
+147/748103/campos_512_v4
+147/748116/campos_512_v4
+147/748119/campos_512_v4
+147/748121/campos_512_v4
+147/748125/campos_512_v4
+147/748142/campos_512_v4
+147/748146/campos_512_v4
+147/748151/campos_512_v4
+147/748157/campos_512_v4
+147/748158/campos_512_v4
+147/748166/campos_512_v4
+147/748169/campos_512_v4
+147/748183/campos_512_v4
+147/748188/campos_512_v4
+147/748189/campos_512_v4
+147/748190/campos_512_v4
+147/748196/campos_512_v4
+147/748204/campos_512_v4
+147/748205/campos_512_v4
+147/748206/campos_512_v4
+147/748207/campos_512_v4
+147/748212/campos_512_v4
+147/748216/campos_512_v4
+147/748218/campos_512_v4
+147/748225/campos_512_v4
+147/748228/campos_512_v4
+147/748237/campos_512_v4
+147/748244/campos_512_v4
+147/748248/campos_512_v4
+147/748249/campos_512_v4
+147/748253/campos_512_v4
+147/748254/campos_512_v4
+147/748257/campos_512_v4
+147/748280/campos_512_v4
+147/748284/campos_512_v4
+147/748298/campos_512_v4
+147/748300/campos_512_v4
+147/748312/campos_512_v4
+147/748321/campos_512_v4
+147/748328/campos_512_v4
+147/748332/campos_512_v4
+147/748336/campos_512_v4
+147/748337/campos_512_v4
+147/748349/campos_512_v4
+147/748359/campos_512_v4
+147/748375/campos_512_v4
+147/748379/campos_512_v4
+147/748380/campos_512_v4
+147/748384/campos_512_v4
+147/748389/campos_512_v4
+147/748391/campos_512_v4
+147/748395/campos_512_v4
+147/748397/campos_512_v4
+147/748404/campos_512_v4
+147/748408/campos_512_v4
+147/748410/campos_512_v4
+147/748418/campos_512_v4
+147/748422/campos_512_v4
+147/748428/campos_512_v4
+147/748429/campos_512_v4
+147/748433/campos_512_v4
+147/748434/campos_512_v4
+147/748446/campos_512_v4
+147/748456/campos_512_v4
+147/748464/campos_512_v4
+147/748471/campos_512_v4
+147/748481/campos_512_v4
+147/748491/campos_512_v4
+147/748493/campos_512_v4
+147/748497/campos_512_v4
+147/748502/campos_512_v4
+147/748516/campos_512_v4
+147/748517/campos_512_v4
+147/748521/campos_512_v4
+147/748523/campos_512_v4
+147/748530/campos_512_v4
+147/748533/campos_512_v4
+147/748535/campos_512_v4
+147/748536/campos_512_v4
+147/748537/campos_512_v4
+147/748539/campos_512_v4
+147/748558/campos_512_v4
+147/748567/campos_512_v4
+147/748569/campos_512_v4
+147/748573/campos_512_v4
+147/748579/campos_512_v4
+147/748580/campos_512_v4
+147/748606/campos_512_v4
+147/748607/campos_512_v4
+147/748608/campos_512_v4
+147/748611/campos_512_v4
+147/748626/campos_512_v4
+147/748638/campos_512_v4
+147/748644/campos_512_v4
+147/748647/campos_512_v4
+147/748660/campos_512_v4
+147/748662/campos_512_v4
+147/748664/campos_512_v4
+147/748684/campos_512_v4
+147/748687/campos_512_v4
+147/748702/campos_512_v4
+147/748708/campos_512_v4
+147/748718/campos_512_v4
+147/748720/campos_512_v4
+147/748735/campos_512_v4
+147/748738/campos_512_v4
+147/748743/campos_512_v4
+147/748752/campos_512_v4
+147/748753/campos_512_v4
+147/748755/campos_512_v4
+147/748757/campos_512_v4
+147/748760/campos_512_v4
+147/748761/campos_512_v4
+147/748775/campos_512_v4
+147/748781/campos_512_v4
+147/748794/campos_512_v4
+147/748796/campos_512_v4
+147/748818/campos_512_v4
+147/748826/campos_512_v4
+147/748833/campos_512_v4
+147/748843/campos_512_v4
+147/748845/campos_512_v4
+147/748848/campos_512_v4
+147/748853/campos_512_v4
+147/748861/campos_512_v4
+147/748864/campos_512_v4
+147/748868/campos_512_v4
+147/748873/campos_512_v4
+147/748874/campos_512_v4
+147/748892/campos_512_v4
+147/748894/campos_512_v4
+147/748900/campos_512_v4
+147/748909/campos_512_v4
+147/748913/campos_512_v4
+147/748918/campos_512_v4
+147/748928/campos_512_v4
+147/748940/campos_512_v4
+147/748945/campos_512_v4
+147/748961/campos_512_v4
+147/748985/campos_512_v4
+147/748986/campos_512_v4
+147/748996/campos_512_v4
+147/748997/campos_512_v4
+147/749005/campos_512_v4
+147/749006/campos_512_v4
+147/749008/campos_512_v4
+147/749014/campos_512_v4
+147/749018/campos_512_v4
+147/749024/campos_512_v4
+147/749045/campos_512_v4
+147/749060/campos_512_v4
+147/749062/campos_512_v4
+147/749070/campos_512_v4
+147/749080/campos_512_v4
+147/749081/campos_512_v4
+147/749084/campos_512_v4
+147/749089/campos_512_v4
+147/749104/campos_512_v4
+147/749109/campos_512_v4
+147/749111/campos_512_v4
+147/749122/campos_512_v4
+147/749125/campos_512_v4
+147/749126/campos_512_v4
+147/749129/campos_512_v4
+147/749133/campos_512_v4
+147/749140/campos_512_v4
+147/749141/campos_512_v4
+147/749153/campos_512_v4
+147/749154/campos_512_v4
+147/749156/campos_512_v4
+147/749160/campos_512_v4
+147/749168/campos_512_v4
+147/749172/campos_512_v4
+147/749174/campos_512_v4
+147/749180/campos_512_v4
+147/749194/campos_512_v4
+147/749198/campos_512_v4
+147/749202/campos_512_v4
+147/749238/campos_512_v4
+147/749243/campos_512_v4
+147/749246/campos_512_v4
+147/749254/campos_512_v4
+147/749256/campos_512_v4
+147/749262/campos_512_v4
+147/749269/campos_512_v4
+147/749292/campos_512_v4
+147/749297/campos_512_v4
+147/749305/campos_512_v4
+147/749308/campos_512_v4
+147/749311/campos_512_v4
+147/749320/campos_512_v4
+147/749325/campos_512_v4
+147/749332/campos_512_v4
+147/749335/campos_512_v4
+147/749341/campos_512_v4
+147/749354/campos_512_v4
+147/749355/campos_512_v4
+147/749358/campos_512_v4
+147/749361/campos_512_v4
+147/749362/campos_512_v4
+147/749368/campos_512_v4
+147/749386/campos_512_v4
+147/749390/campos_512_v4
+147/749400/campos_512_v4
+147/749417/campos_512_v4
+147/749441/campos_512_v4
+147/749443/campos_512_v4
+147/749444/campos_512_v4
+147/749447/campos_512_v4
+147/749454/campos_512_v4
+147/749464/campos_512_v4
+147/749482/campos_512_v4
+147/749489/campos_512_v4
+147/749492/campos_512_v4
+147/749502/campos_512_v4
+147/749510/campos_512_v4
+147/749520/campos_512_v4
+147/749523/campos_512_v4
+147/749528/campos_512_v4
+147/749534/campos_512_v4
+147/749542/campos_512_v4
+147/749565/campos_512_v4
+147/749574/campos_512_v4
+147/749576/campos_512_v4
+147/749606/campos_512_v4
+147/749623/campos_512_v4
+147/749641/campos_512_v4
+147/749644/campos_512_v4
+147/749658/campos_512_v4
+147/749660/campos_512_v4
+147/749663/campos_512_v4
+147/749683/campos_512_v4
+147/749688/campos_512_v4
+147/749733/campos_512_v4
+147/749747/campos_512_v4
+147/749750/campos_512_v4
+147/749751/campos_512_v4
+147/749758/campos_512_v4
+147/749764/campos_512_v4
+147/749766/campos_512_v4
+147/749768/campos_512_v4
+147/749786/campos_512_v4
+147/749802/campos_512_v4
+147/749804/campos_512_v4
+147/749805/campos_512_v4
+147/749819/campos_512_v4
+147/749824/campos_512_v4
+147/749831/campos_512_v4
+147/749836/campos_512_v4
+147/749843/campos_512_v4
+147/749860/campos_512_v4
+147/749863/campos_512_v4
+147/749876/campos_512_v4
+147/749885/campos_512_v4
+147/749890/campos_512_v4
+147/749893/campos_512_v4
+147/749906/campos_512_v4
+147/749913/campos_512_v4
+147/749920/campos_512_v4
+147/749925/campos_512_v4
+147/749930/campos_512_v4
+147/749948/campos_512_v4
+147/749958/campos_512_v4
+147/749964/campos_512_v4
+147/749965/campos_512_v4
+147/749967/campos_512_v4
+147/749970/campos_512_v4
+147/749971/campos_512_v4
+147/749974/campos_512_v4
+147/749975/campos_512_v4
+147/749980/campos_512_v4
+147/749989/campos_512_v4
+148/750002/campos_512_v4
+148/750032/campos_512_v4
+148/750044/campos_512_v4
+148/750049/campos_512_v4
+148/750058/campos_512_v4
+148/750065/campos_512_v4
+148/750086/campos_512_v4
+148/750087/campos_512_v4
+148/750090/campos_512_v4
+148/750110/campos_512_v4
+148/750113/campos_512_v4
+148/750124/campos_512_v4
+148/750131/campos_512_v4
+148/750133/campos_512_v4
+148/750134/campos_512_v4
+148/750157/campos_512_v4
+148/750185/campos_512_v4
+148/750186/campos_512_v4
+148/750188/campos_512_v4
+148/750191/campos_512_v4
+148/750193/campos_512_v4
+148/750199/campos_512_v4
+148/750208/campos_512_v4
+148/750213/campos_512_v4
+148/750227/campos_512_v4
+148/750244/campos_512_v4
+148/750245/campos_512_v4
+148/750252/campos_512_v4
+148/750253/campos_512_v4
+148/750268/campos_512_v4
+148/750274/campos_512_v4
+148/750276/campos_512_v4
+148/750277/campos_512_v4
+148/750282/campos_512_v4
+148/750286/campos_512_v4
+148/750289/campos_512_v4
+148/750316/campos_512_v4
+148/750325/campos_512_v4
+148/750327/campos_512_v4
+148/750341/campos_512_v4
+148/750351/campos_512_v4
+148/750357/campos_512_v4
+148/750358/campos_512_v4
+148/750360/campos_512_v4
+148/750374/campos_512_v4
+148/750383/campos_512_v4
+148/750417/campos_512_v4
+148/750420/campos_512_v4
+148/750423/campos_512_v4
+148/750434/campos_512_v4
+148/750442/campos_512_v4
+148/750455/campos_512_v4
+148/750457/campos_512_v4
+148/750466/campos_512_v4
+148/750469/campos_512_v4
+148/750517/campos_512_v4
+148/750519/campos_512_v4
+148/750525/campos_512_v4
+148/750528/campos_512_v4
+148/750544/campos_512_v4
+148/750548/campos_512_v4
+148/750550/campos_512_v4
+148/750570/campos_512_v4
+148/750575/campos_512_v4
+148/750578/campos_512_v4
+148/750582/campos_512_v4
+148/750583/campos_512_v4
+148/750585/campos_512_v4
+148/750588/campos_512_v4
+148/750593/campos_512_v4
+148/750598/campos_512_v4
+148/750600/campos_512_v4
+148/750607/campos_512_v4
+148/750612/campos_512_v4
+148/750626/campos_512_v4
+148/750631/campos_512_v4
+148/750646/campos_512_v4
+148/750648/campos_512_v4
+148/750649/campos_512_v4
+148/750650/campos_512_v4
+148/750655/campos_512_v4
+148/750660/campos_512_v4
+148/750663/campos_512_v4
+148/750669/campos_512_v4
+148/750673/campos_512_v4
+148/750674/campos_512_v4
+148/750687/campos_512_v4
+148/750688/campos_512_v4
+148/750691/campos_512_v4
+148/750693/campos_512_v4
+148/750712/campos_512_v4
+148/750716/campos_512_v4
+148/750723/campos_512_v4
+148/750726/campos_512_v4
+148/750729/campos_512_v4
+148/750734/campos_512_v4
+148/750739/campos_512_v4
+148/750746/campos_512_v4
+148/750754/campos_512_v4
+148/750759/campos_512_v4
+148/750769/campos_512_v4
+148/750775/campos_512_v4
+148/750776/campos_512_v4
+148/750788/campos_512_v4
+148/750798/campos_512_v4
+148/750800/campos_512_v4
+148/750803/campos_512_v4
+148/750806/campos_512_v4
+148/750807/campos_512_v4
+148/750817/campos_512_v4
+148/750821/campos_512_v4
+148/750832/campos_512_v4
+148/750837/campos_512_v4
+148/750841/campos_512_v4
+148/750853/campos_512_v4
+148/750892/campos_512_v4
+148/750915/campos_512_v4
+148/750931/campos_512_v4
+148/750941/campos_512_v4
+148/750943/campos_512_v4
+148/751009/campos_512_v4
+148/751010/campos_512_v4
+148/751023/campos_512_v4
+148/751026/campos_512_v4
+148/751027/campos_512_v4
+148/751039/campos_512_v4
+148/751041/campos_512_v4
+148/751055/campos_512_v4
+148/751059/campos_512_v4
+148/751066/campos_512_v4
+148/751086/campos_512_v4
+148/751088/campos_512_v4
+148/751096/campos_512_v4
+148/751104/campos_512_v4
+148/751105/campos_512_v4
+148/751107/campos_512_v4
+148/751109/campos_512_v4
+148/751120/campos_512_v4
+148/751126/campos_512_v4
+148/751129/campos_512_v4
+148/751137/campos_512_v4
+148/751143/campos_512_v4
+148/751147/campos_512_v4
+148/751152/campos_512_v4
+148/751155/campos_512_v4
+148/751166/campos_512_v4
+148/751178/campos_512_v4
+148/751183/campos_512_v4
+148/751191/campos_512_v4
+148/751195/campos_512_v4
+148/751197/campos_512_v4
+148/751198/campos_512_v4
+148/751199/campos_512_v4
+148/751200/campos_512_v4
+148/751201/campos_512_v4
+148/751203/campos_512_v4
+148/751207/campos_512_v4
+148/751209/campos_512_v4
+148/751218/campos_512_v4
+148/751229/campos_512_v4
+148/751230/campos_512_v4
+148/751232/campos_512_v4
+148/751242/campos_512_v4
+148/751243/campos_512_v4
+148/751246/campos_512_v4
+148/751251/campos_512_v4
+148/751255/campos_512_v4
+148/751277/campos_512_v4
+148/751289/campos_512_v4
+148/751291/campos_512_v4
+148/751299/campos_512_v4
+148/751301/campos_512_v4
+148/751304/campos_512_v4
+148/751307/campos_512_v4
+148/751311/campos_512_v4
+148/751312/campos_512_v4
+148/751321/campos_512_v4
+148/751322/campos_512_v4
+148/751328/campos_512_v4
+148/751335/campos_512_v4
+148/751343/campos_512_v4
+148/751346/campos_512_v4
+148/751348/campos_512_v4
+148/751357/campos_512_v4
+148/751375/campos_512_v4
+148/751377/campos_512_v4
+148/751396/campos_512_v4
+148/751405/campos_512_v4
+148/751415/campos_512_v4
+148/751418/campos_512_v4
+148/751454/campos_512_v4
+148/751459/campos_512_v4
+148/751461/campos_512_v4
+148/751503/campos_512_v4
+148/751507/campos_512_v4
+148/751529/campos_512_v4
+148/751559/campos_512_v4
+148/751563/campos_512_v4
+148/751564/campos_512_v4
+148/751573/campos_512_v4
+148/751575/campos_512_v4
+148/751586/campos_512_v4
+148/751590/campos_512_v4
+148/751597/campos_512_v4
+148/751599/campos_512_v4
+148/751619/campos_512_v4
+148/751620/campos_512_v4
+148/751626/campos_512_v4
+148/751634/campos_512_v4
+148/751635/campos_512_v4
+148/751668/campos_512_v4
+148/751675/campos_512_v4
+148/751683/campos_512_v4
+148/751693/campos_512_v4
+148/751702/campos_512_v4
+148/751710/campos_512_v4
+148/751713/campos_512_v4
+148/751716/campos_512_v4
+148/751720/campos_512_v4
+148/751721/campos_512_v4
+148/751727/campos_512_v4
+148/751731/campos_512_v4
+148/751738/campos_512_v4
+148/751739/campos_512_v4
+148/751752/campos_512_v4
+148/751762/campos_512_v4
+148/751770/campos_512_v4
+148/751775/campos_512_v4
+148/751782/campos_512_v4
+148/751784/campos_512_v4
+148/751785/campos_512_v4
+148/751789/campos_512_v4
+148/751791/campos_512_v4
+148/751792/campos_512_v4
+148/751801/campos_512_v4
+148/751804/campos_512_v4
+148/751807/campos_512_v4
+148/751814/campos_512_v4
+148/751831/campos_512_v4
+148/751854/campos_512_v4
+148/751856/campos_512_v4
+148/751870/campos_512_v4
+148/751875/campos_512_v4
+148/751892/campos_512_v4
+148/751895/campos_512_v4
+148/751896/campos_512_v4
+148/751903/campos_512_v4
+148/751904/campos_512_v4
+148/751907/campos_512_v4
+148/751910/campos_512_v4
+148/751912/campos_512_v4
+148/751918/campos_512_v4
+148/751924/campos_512_v4
+148/751931/campos_512_v4
+148/751955/campos_512_v4
+148/751958/campos_512_v4
+148/751959/campos_512_v4
+148/751961/campos_512_v4
+148/751983/campos_512_v4
+148/751989/campos_512_v4
+148/751995/campos_512_v4
+148/752007/campos_512_v4
+148/752016/campos_512_v4
+148/752017/campos_512_v4
+148/752022/campos_512_v4
+148/752027/campos_512_v4
+148/752032/campos_512_v4
+148/752041/campos_512_v4
+148/752053/campos_512_v4
+148/752055/campos_512_v4
+148/752062/campos_512_v4
+148/752065/campos_512_v4
+148/752075/campos_512_v4
+148/752082/campos_512_v4
+148/752090/campos_512_v4
+148/752094/campos_512_v4
+148/752099/campos_512_v4
+148/752106/campos_512_v4
+148/752116/campos_512_v4
+148/752118/campos_512_v4
+148/752130/campos_512_v4
+148/752135/campos_512_v4
+148/752144/campos_512_v4
+148/752146/campos_512_v4
+148/752161/campos_512_v4
+148/752165/campos_512_v4
+148/752168/campos_512_v4
+148/752171/campos_512_v4
+148/752186/campos_512_v4
+148/752189/campos_512_v4
+148/752196/campos_512_v4
+148/752204/campos_512_v4
+148/752205/campos_512_v4
+148/752206/campos_512_v4
+148/752218/campos_512_v4
+148/752222/campos_512_v4
+148/752249/campos_512_v4
+148/752253/campos_512_v4
+148/752261/campos_512_v4
+148/752271/campos_512_v4
+148/752272/campos_512_v4
+148/752279/campos_512_v4
+148/752296/campos_512_v4
+148/752305/campos_512_v4
+148/752309/campos_512_v4
+148/752321/campos_512_v4
+148/752349/campos_512_v4
+148/752355/campos_512_v4
+148/752372/campos_512_v4
+148/752373/campos_512_v4
+148/752395/campos_512_v4
+148/752396/campos_512_v4
+148/752407/campos_512_v4
+148/752412/campos_512_v4
+148/752415/campos_512_v4
+148/752435/campos_512_v4
+148/752447/campos_512_v4
+148/752454/campos_512_v4
+148/752456/campos_512_v4
+148/752472/campos_512_v4
+148/752522/campos_512_v4
+148/752539/campos_512_v4
+148/752580/campos_512_v4
+148/752586/campos_512_v4
+148/752587/campos_512_v4
+148/752596/campos_512_v4
+148/752600/campos_512_v4
+148/752610/campos_512_v4
+148/752631/campos_512_v4
+148/752641/campos_512_v4
+148/752648/campos_512_v4
+148/752656/campos_512_v4
+148/752671/campos_512_v4
+148/752687/campos_512_v4
+148/752695/campos_512_v4
+148/752734/campos_512_v4
+148/752753/campos_512_v4
+148/752775/campos_512_v4
+148/752776/campos_512_v4
+148/752778/campos_512_v4
+148/752785/campos_512_v4
+148/752793/campos_512_v4
+148/752794/campos_512_v4
+148/752795/campos_512_v4
+148/752796/campos_512_v4
+148/752797/campos_512_v4
+148/752802/campos_512_v4
+148/752803/campos_512_v4
+148/752822/campos_512_v4
+148/752832/campos_512_v4
+148/752838/campos_512_v4
+148/752839/campos_512_v4
+148/752846/campos_512_v4
+148/752848/campos_512_v4
+148/752860/campos_512_v4
+148/752862/campos_512_v4
+148/752864/campos_512_v4
+148/752870/campos_512_v4
+148/752874/campos_512_v4
+148/752876/campos_512_v4
+148/752889/campos_512_v4
+148/752899/campos_512_v4
+148/752935/campos_512_v4
+148/752941/campos_512_v4
+148/752955/campos_512_v4
+148/752956/campos_512_v4
+148/752961/campos_512_v4
+148/752962/campos_512_v4
+148/752965/campos_512_v4
+148/752971/campos_512_v4
+148/752981/campos_512_v4
+148/753005/campos_512_v4
+148/753006/campos_512_v4
+148/753011/campos_512_v4
+148/753014/campos_512_v4
+148/753021/campos_512_v4
+148/753030/campos_512_v4
+148/753034/campos_512_v4
+148/753046/campos_512_v4
+148/753055/campos_512_v4
+148/753069/campos_512_v4
+148/753077/campos_512_v4
+148/753079/campos_512_v4
+148/753084/campos_512_v4
+148/753086/campos_512_v4
+148/753087/campos_512_v4
+148/753095/campos_512_v4
+148/753101/campos_512_v4
+148/753108/campos_512_v4
+148/753121/campos_512_v4
+148/753128/campos_512_v4
+148/753130/campos_512_v4
+148/753134/campos_512_v4
+148/753135/campos_512_v4
+148/753138/campos_512_v4
+148/753160/campos_512_v4
+148/753169/campos_512_v4
+148/753184/campos_512_v4
+148/753191/campos_512_v4
+148/753192/campos_512_v4
+148/753208/campos_512_v4
+148/753217/campos_512_v4
+148/753223/campos_512_v4
+148/753226/campos_512_v4
+148/753238/campos_512_v4
+148/753243/campos_512_v4
+148/753246/campos_512_v4
+148/753255/campos_512_v4
+148/753256/campos_512_v4
+148/753267/campos_512_v4
+148/753269/campos_512_v4
+148/753275/campos_512_v4
+148/753282/campos_512_v4
+148/753285/campos_512_v4
+148/753295/campos_512_v4
+148/753299/campos_512_v4
+148/753301/campos_512_v4
+148/753302/campos_512_v4
+148/753305/campos_512_v4
+148/753319/campos_512_v4
+148/753335/campos_512_v4
+148/753347/campos_512_v4
+148/753360/campos_512_v4
+148/753364/campos_512_v4
+148/753376/campos_512_v4
+148/753380/campos_512_v4
+148/753396/campos_512_v4
+148/753404/campos_512_v4
+148/753405/campos_512_v4
+148/753410/campos_512_v4
+148/753411/campos_512_v4
+148/753417/campos_512_v4
+148/753418/campos_512_v4
+148/753430/campos_512_v4
+148/753441/campos_512_v4
+148/753447/campos_512_v4
+148/753455/campos_512_v4
+148/753473/campos_512_v4
+148/753478/campos_512_v4
+148/753483/campos_512_v4
+148/753487/campos_512_v4
+148/753500/campos_512_v4
+148/753504/campos_512_v4
+148/753505/campos_512_v4
+148/753506/campos_512_v4
+148/753521/campos_512_v4
+148/753531/campos_512_v4
+148/753534/campos_512_v4
+148/753535/campos_512_v4
+148/753541/campos_512_v4
+148/753548/campos_512_v4
+148/753549/campos_512_v4
+148/753550/campos_512_v4
+148/753551/campos_512_v4
+148/753552/campos_512_v4
+148/753555/campos_512_v4
+148/753563/campos_512_v4
+148/753566/campos_512_v4
+148/753570/campos_512_v4
+148/753573/campos_512_v4
+148/753584/campos_512_v4
+148/753587/campos_512_v4
+148/753593/campos_512_v4
+148/753594/campos_512_v4
+148/753597/campos_512_v4
+148/753612/campos_512_v4
+148/753616/campos_512_v4
+148/753622/campos_512_v4
+148/753623/campos_512_v4
+148/753635/campos_512_v4
+148/753638/campos_512_v4
+148/753646/campos_512_v4
+148/753652/campos_512_v4
+148/753653/campos_512_v4
+148/753669/campos_512_v4
+148/753675/campos_512_v4
+148/753682/campos_512_v4
+148/753691/campos_512_v4
+148/753692/campos_512_v4
+148/753701/campos_512_v4
+148/753708/campos_512_v4
+148/753712/campos_512_v4
+148/753718/campos_512_v4
+148/753719/campos_512_v4
+148/753722/campos_512_v4
+148/753726/campos_512_v4
+148/753731/campos_512_v4
+148/753736/campos_512_v4
+148/753737/campos_512_v4
+148/753738/campos_512_v4
+148/753739/campos_512_v4
+148/753759/campos_512_v4
+148/753765/campos_512_v4
+148/753769/campos_512_v4
+148/753774/campos_512_v4
+148/753777/campos_512_v4
+148/753789/campos_512_v4
+148/753796/campos_512_v4
+148/753804/campos_512_v4
+148/753811/campos_512_v4
+148/753815/campos_512_v4
+148/753818/campos_512_v4
+148/753822/campos_512_v4
+148/753824/campos_512_v4
+148/753825/campos_512_v4
+148/753829/campos_512_v4
+148/753833/campos_512_v4
+148/753835/campos_512_v4
+148/753841/campos_512_v4
+148/753842/campos_512_v4
+148/753843/campos_512_v4
+148/753858/campos_512_v4
+148/753865/campos_512_v4
+148/753874/campos_512_v4
+148/753880/campos_512_v4
+148/753884/campos_512_v4
+148/753887/campos_512_v4
+148/753888/campos_512_v4
+148/753890/campos_512_v4
+148/753892/campos_512_v4
+148/753894/campos_512_v4
+148/753904/campos_512_v4
+148/753912/campos_512_v4
+148/753927/campos_512_v4
+148/753948/campos_512_v4
+148/753955/campos_512_v4
+148/753957/campos_512_v4
+148/753966/campos_512_v4
+148/753969/campos_512_v4
+148/753972/campos_512_v4
+148/753974/campos_512_v4
+148/753977/campos_512_v4
+148/753998/campos_512_v4
+148/754005/campos_512_v4
+148/754017/campos_512_v4
+148/754019/campos_512_v4
+148/754023/campos_512_v4
+148/754025/campos_512_v4
+148/754054/campos_512_v4
+148/754066/campos_512_v4
+148/754077/campos_512_v4
+148/754081/campos_512_v4
+148/754082/campos_512_v4
+148/754084/campos_512_v4
+148/754097/campos_512_v4
+148/754099/campos_512_v4
+148/754108/campos_512_v4
+148/754121/campos_512_v4
+148/754122/campos_512_v4
+148/754133/campos_512_v4
+148/754140/campos_512_v4
+148/754145/campos_512_v4
+148/754149/campos_512_v4
+148/754168/campos_512_v4
+148/754170/campos_512_v4
+148/754181/campos_512_v4
+148/754185/campos_512_v4
+148/754194/campos_512_v4
+148/754198/campos_512_v4
+148/754199/campos_512_v4
+148/754219/campos_512_v4
+148/754224/campos_512_v4
+148/754230/campos_512_v4
+148/754234/campos_512_v4
+148/754236/campos_512_v4
+148/754240/campos_512_v4
+148/754243/campos_512_v4
+148/754258/campos_512_v4
+148/754267/campos_512_v4
+148/754272/campos_512_v4
+148/754274/campos_512_v4
+148/754282/campos_512_v4
+148/754286/campos_512_v4
+148/754287/campos_512_v4
+148/754291/campos_512_v4
+148/754302/campos_512_v4
+148/754303/campos_512_v4
+148/754307/campos_512_v4
+148/754320/campos_512_v4
+148/754334/campos_512_v4
+148/754336/campos_512_v4
+148/754362/campos_512_v4
+148/754363/campos_512_v4
+148/754368/campos_512_v4
+148/754376/campos_512_v4
+148/754379/campos_512_v4
+148/754382/campos_512_v4
+148/754386/campos_512_v4
+148/754396/campos_512_v4
+148/754399/campos_512_v4
+148/754430/campos_512_v4
+148/754434/campos_512_v4
+148/754442/campos_512_v4
+148/754445/campos_512_v4
+148/754474/campos_512_v4
+148/754482/campos_512_v4
+148/754489/campos_512_v4
+148/754490/campos_512_v4
+148/754498/campos_512_v4
+148/754513/campos_512_v4
+148/754539/campos_512_v4
+148/754568/campos_512_v4
+148/754577/campos_512_v4
+148/754579/campos_512_v4
+148/754588/campos_512_v4
+148/754590/campos_512_v4
+148/754591/campos_512_v4
+148/754602/campos_512_v4
+148/754613/campos_512_v4
+148/754637/campos_512_v4
+148/754647/campos_512_v4
+148/754658/campos_512_v4
+148/754661/campos_512_v4
+148/754679/campos_512_v4
+148/754681/campos_512_v4
+148/754688/campos_512_v4
+148/754700/campos_512_v4
+148/754703/campos_512_v4
+148/754705/campos_512_v4
+148/754710/campos_512_v4
+148/754716/campos_512_v4
+148/754718/campos_512_v4
+148/754721/campos_512_v4
+148/754729/campos_512_v4
+148/754730/campos_512_v4
+148/754737/campos_512_v4
+148/754741/campos_512_v4
+148/754749/campos_512_v4
+148/754766/campos_512_v4
+148/754768/campos_512_v4
+148/754771/campos_512_v4
+148/754772/campos_512_v4
+148/754774/campos_512_v4
+148/754796/campos_512_v4
+148/754804/campos_512_v4
+148/754809/campos_512_v4
+148/754813/campos_512_v4
+148/754819/campos_512_v4
+148/754827/campos_512_v4
+148/754840/campos_512_v4
+148/754845/campos_512_v4
+148/754851/campos_512_v4
+148/754865/campos_512_v4
+148/754870/campos_512_v4
+148/754885/campos_512_v4
+148/754889/campos_512_v4
+148/754891/campos_512_v4
+148/754894/campos_512_v4
+148/754895/campos_512_v4
+148/754917/campos_512_v4
+148/754926/campos_512_v4
+148/754933/campos_512_v4
+148/754935/campos_512_v4
+148/754937/campos_512_v4
+148/754946/campos_512_v4
+148/754951/campos_512_v4
+148/754953/campos_512_v4
+148/754961/campos_512_v4
+148/754974/campos_512_v4
+148/754978/campos_512_v4
+149/755004/campos_512_v4
+149/755005/campos_512_v4
+149/755008/campos_512_v4
+149/755016/campos_512_v4
+149/755017/campos_512_v4
+149/755021/campos_512_v4
+149/755033/campos_512_v4
+149/755040/campos_512_v4
+149/755049/campos_512_v4
+149/755069/campos_512_v4
+149/755074/campos_512_v4
+149/755078/campos_512_v4
+149/755112/campos_512_v4
+149/755118/campos_512_v4
+149/755120/campos_512_v4
+149/755122/campos_512_v4
+149/755127/campos_512_v4
+149/755145/campos_512_v4
+149/755148/campos_512_v4
+149/755153/campos_512_v4
+149/755187/campos_512_v4
+149/755207/campos_512_v4
+149/755222/campos_512_v4
+149/755228/campos_512_v4
+149/755231/campos_512_v4
+149/755241/campos_512_v4
+149/755243/campos_512_v4
+149/755246/campos_512_v4
+149/755263/campos_512_v4
+149/755271/campos_512_v4
+149/755280/campos_512_v4
+149/755304/campos_512_v4
+149/755306/campos_512_v4
+149/755308/campos_512_v4
+149/755310/campos_512_v4
+149/755312/campos_512_v4
+149/755315/campos_512_v4
+149/755318/campos_512_v4
+149/755339/campos_512_v4
+149/755341/campos_512_v4
+149/755359/campos_512_v4
+149/755370/campos_512_v4
+149/755393/campos_512_v4
+149/755403/campos_512_v4
+149/755405/campos_512_v4
+149/755418/campos_512_v4
+149/755424/campos_512_v4
+149/755435/campos_512_v4
+149/755452/campos_512_v4
+149/755459/campos_512_v4
+149/755462/campos_512_v4
+149/755463/campos_512_v4
+149/755470/campos_512_v4
+149/755472/campos_512_v4
+149/755475/campos_512_v4
+149/755476/campos_512_v4
+149/755478/campos_512_v4
+149/755489/campos_512_v4
+149/755493/campos_512_v4
+149/755498/campos_512_v4
+149/755500/campos_512_v4
+149/755501/campos_512_v4
+149/755504/campos_512_v4
+149/755505/campos_512_v4
+149/755509/campos_512_v4
+149/755510/campos_512_v4
+149/755523/campos_512_v4
+149/755524/campos_512_v4
+149/755528/campos_512_v4
+149/755532/campos_512_v4
+149/755537/campos_512_v4
+149/755548/campos_512_v4
+149/755551/campos_512_v4
+149/755567/campos_512_v4
+149/755568/campos_512_v4
+149/755572/campos_512_v4
+149/755575/campos_512_v4
+149/755581/campos_512_v4
+149/755588/campos_512_v4
+149/755598/campos_512_v4
+149/755617/campos_512_v4
+149/755618/campos_512_v4
+149/755646/campos_512_v4
+149/755654/campos_512_v4
+149/755673/campos_512_v4
+149/755677/campos_512_v4
+149/755709/campos_512_v4
+149/755738/campos_512_v4
+149/755746/campos_512_v4
+149/755747/campos_512_v4
+149/755750/campos_512_v4
+149/755751/campos_512_v4
+149/755754/campos_512_v4
+149/755759/campos_512_v4
+149/755762/campos_512_v4
+149/755771/campos_512_v4
+149/755773/campos_512_v4
+149/755780/campos_512_v4
+149/755783/campos_512_v4
+149/755787/campos_512_v4
+149/755794/campos_512_v4
+149/755802/campos_512_v4
+149/755804/campos_512_v4
+149/755806/campos_512_v4
+149/755818/campos_512_v4
+149/755819/campos_512_v4
+149/755825/campos_512_v4
+149/755832/campos_512_v4
+149/755838/campos_512_v4
+149/755845/campos_512_v4
+149/755864/campos_512_v4
+149/755874/campos_512_v4
+149/755877/campos_512_v4
+149/755879/campos_512_v4
+149/755886/campos_512_v4
+149/755888/campos_512_v4
+149/755891/campos_512_v4
+149/755908/campos_512_v4
+149/755929/campos_512_v4
+149/755931/campos_512_v4
+149/755945/campos_512_v4
+149/755967/campos_512_v4
+149/755979/campos_512_v4
+149/755983/campos_512_v4
+149/755993/campos_512_v4
+149/755999/campos_512_v4
+149/756017/campos_512_v4
+149/756023/campos_512_v4
+149/756027/campos_512_v4
+149/756031/campos_512_v4
+149/756032/campos_512_v4
+149/756034/campos_512_v4
+149/756039/campos_512_v4
+149/756055/campos_512_v4
+149/756092/campos_512_v4
+149/756102/campos_512_v4
+149/756110/campos_512_v4
+149/756111/campos_512_v4
+149/756115/campos_512_v4
+149/756143/campos_512_v4
+149/756162/campos_512_v4
+149/756165/campos_512_v4
+149/756169/campos_512_v4
+149/756171/campos_512_v4
+149/756182/campos_512_v4
+149/756187/campos_512_v4
+149/756215/campos_512_v4
+149/756218/campos_512_v4
+149/756219/campos_512_v4
+149/756230/campos_512_v4
+149/756235/campos_512_v4
+149/756241/campos_512_v4
+149/756243/campos_512_v4
+149/756244/campos_512_v4
+149/756245/campos_512_v4
+149/756248/campos_512_v4
+149/756250/campos_512_v4
+149/756264/campos_512_v4
+149/756274/campos_512_v4
+149/756276/campos_512_v4
+149/756280/campos_512_v4
+149/756283/campos_512_v4
+149/756287/campos_512_v4
+149/756288/campos_512_v4
+149/756289/campos_512_v4
+149/756298/campos_512_v4
+149/756323/campos_512_v4
+149/756341/campos_512_v4
+149/756342/campos_512_v4
+149/756353/campos_512_v4
+149/756356/campos_512_v4
+149/756404/campos_512_v4
+149/756408/campos_512_v4
+149/756413/campos_512_v4
+149/756417/campos_512_v4
+149/756418/campos_512_v4
+149/756423/campos_512_v4
+149/756429/campos_512_v4
+149/756435/campos_512_v4
+149/756439/campos_512_v4
+149/756442/campos_512_v4
+149/756443/campos_512_v4
+149/756479/campos_512_v4
+149/756482/campos_512_v4
+149/756491/campos_512_v4
+149/756497/campos_512_v4
+149/756525/campos_512_v4
+149/756536/campos_512_v4
+149/756539/campos_512_v4
+149/756542/campos_512_v4
+149/756546/campos_512_v4
+149/756547/campos_512_v4
+149/756560/campos_512_v4
+149/756563/campos_512_v4
+149/756566/campos_512_v4
+149/756574/campos_512_v4
+149/756591/campos_512_v4
+149/756598/campos_512_v4
+149/756601/campos_512_v4
+149/756610/campos_512_v4
+149/756618/campos_512_v4
+149/756623/campos_512_v4
+149/756628/campos_512_v4
+149/756632/campos_512_v4
+149/756638/campos_512_v4
+149/756639/campos_512_v4
+149/756667/campos_512_v4
+149/756685/campos_512_v4
+149/756706/campos_512_v4
+149/756718/campos_512_v4
+149/756747/campos_512_v4
+149/756750/campos_512_v4
+149/756755/campos_512_v4
+149/756772/campos_512_v4
+149/756777/campos_512_v4
+149/756792/campos_512_v4
+149/756805/campos_512_v4
+149/756808/campos_512_v4
+149/756812/campos_512_v4
+149/756821/campos_512_v4
+149/756826/campos_512_v4
+149/756848/campos_512_v4
+149/756851/campos_512_v4
+149/756863/campos_512_v4
+149/756892/campos_512_v4
+149/756894/campos_512_v4
+149/756895/campos_512_v4
+149/756898/campos_512_v4
+149/756906/campos_512_v4
+149/756914/campos_512_v4
+149/756924/campos_512_v4
+149/756932/campos_512_v4
+149/756933/campos_512_v4
+149/756948/campos_512_v4
+149/756949/campos_512_v4
+149/756958/campos_512_v4
+149/756964/campos_512_v4
+149/756976/campos_512_v4
+149/756992/campos_512_v4
+149/757004/campos_512_v4
+149/757012/campos_512_v4
+149/757023/campos_512_v4
+149/757032/campos_512_v4
+149/757046/campos_512_v4
+149/757052/campos_512_v4
+149/757092/campos_512_v4
+149/757099/campos_512_v4
+149/757105/campos_512_v4
+149/757113/campos_512_v4
+149/757119/campos_512_v4
+149/757120/campos_512_v4
+149/757125/campos_512_v4
+149/757138/campos_512_v4
+149/757140/campos_512_v4
+149/757153/campos_512_v4
+149/757154/campos_512_v4
+149/757159/campos_512_v4
+149/757174/campos_512_v4
+149/757179/campos_512_v4
+149/757184/campos_512_v4
+149/757186/campos_512_v4
+149/757187/campos_512_v4
+149/757200/campos_512_v4
+149/757209/campos_512_v4
+149/757211/campos_512_v4
+149/757226/campos_512_v4
+149/757227/campos_512_v4
+149/757229/campos_512_v4
+149/757230/campos_512_v4
+149/757239/campos_512_v4
+149/757253/campos_512_v4
+149/757278/campos_512_v4
+149/757287/campos_512_v4
+149/757290/campos_512_v4
+149/757294/campos_512_v4
+149/757304/campos_512_v4
+149/757308/campos_512_v4
+149/757314/campos_512_v4
+149/757316/campos_512_v4
+149/757321/campos_512_v4
+149/757326/campos_512_v4
+149/757364/campos_512_v4
+149/757375/campos_512_v4
+149/757385/campos_512_v4
+149/757396/campos_512_v4
+149/757397/campos_512_v4
+149/757423/campos_512_v4
+149/757427/campos_512_v4
+149/757440/campos_512_v4
+149/757454/campos_512_v4
+149/757459/campos_512_v4
+149/757466/campos_512_v4
+149/757467/campos_512_v4
+149/757472/campos_512_v4
+149/757476/campos_512_v4
+149/757483/campos_512_v4
+149/757493/campos_512_v4
+149/757515/campos_512_v4
+149/757516/campos_512_v4
+149/757541/campos_512_v4
+149/757552/campos_512_v4
+149/757572/campos_512_v4
+149/757579/campos_512_v4
+149/757581/campos_512_v4
+149/757593/campos_512_v4
+149/757598/campos_512_v4
+149/757611/campos_512_v4
+149/757619/campos_512_v4
+149/757620/campos_512_v4
+149/757621/campos_512_v4
+149/757622/campos_512_v4
+149/757623/campos_512_v4
+149/757624/campos_512_v4
+149/757639/campos_512_v4
+149/757653/campos_512_v4
+149/757661/campos_512_v4
+149/757675/campos_512_v4
+149/757681/campos_512_v4
+149/757690/campos_512_v4
+149/757696/campos_512_v4
+149/757697/campos_512_v4
+149/757718/campos_512_v4
+149/757722/campos_512_v4
+149/757730/campos_512_v4
+149/757736/campos_512_v4
+149/757746/campos_512_v4
+149/757753/campos_512_v4
+149/757760/campos_512_v4
+149/757764/campos_512_v4
+149/757766/campos_512_v4
+149/757783/campos_512_v4
+149/757789/campos_512_v4
+149/757795/campos_512_v4
+149/757800/campos_512_v4
+149/757808/campos_512_v4
+149/757810/campos_512_v4
+149/757811/campos_512_v4
+149/757824/campos_512_v4
+149/757825/campos_512_v4
+149/757842/campos_512_v4
+149/757852/campos_512_v4
+149/757863/campos_512_v4
+149/757871/campos_512_v4
+149/757879/campos_512_v4
+149/757889/campos_512_v4
+149/757891/campos_512_v4
+149/757893/campos_512_v4
+149/757898/campos_512_v4
+149/757912/campos_512_v4
+149/757931/campos_512_v4
+149/757935/campos_512_v4
+149/757948/campos_512_v4
+149/757951/campos_512_v4
+149/757958/campos_512_v4
+149/757959/campos_512_v4
+149/757962/campos_512_v4
+149/757982/campos_512_v4
+149/757983/campos_512_v4
+149/757984/campos_512_v4
+149/757997/campos_512_v4
+149/758002/campos_512_v4
+149/758003/campos_512_v4
+149/758004/campos_512_v4
+149/758008/campos_512_v4
+149/758016/campos_512_v4
+149/758024/campos_512_v4
+149/758027/campos_512_v4
+149/758037/campos_512_v4
+149/758061/campos_512_v4
+149/758068/campos_512_v4
+149/758069/campos_512_v4
+149/758071/campos_512_v4
+149/758075/campos_512_v4
+149/758081/campos_512_v4
+149/758088/campos_512_v4
+149/758113/campos_512_v4
+149/758119/campos_512_v4
+149/758124/campos_512_v4
+149/758128/campos_512_v4
+149/758131/campos_512_v4
+149/758132/campos_512_v4
+149/758138/campos_512_v4
+149/758153/campos_512_v4
+149/758154/campos_512_v4
+149/758158/campos_512_v4
+149/758161/campos_512_v4
+149/758170/campos_512_v4
+149/758174/campos_512_v4
+149/758198/campos_512_v4
+149/758217/campos_512_v4
+149/758219/campos_512_v4
+149/758223/campos_512_v4
+149/758238/campos_512_v4
+149/758248/campos_512_v4
+149/758249/campos_512_v4
+149/758252/campos_512_v4
+149/758270/campos_512_v4
+149/758275/campos_512_v4
+149/758284/campos_512_v4
+149/758287/campos_512_v4
+149/758293/campos_512_v4
+149/758294/campos_512_v4
+149/758303/campos_512_v4
+149/758313/campos_512_v4
+149/758318/campos_512_v4
+149/758322/campos_512_v4
+149/758348/campos_512_v4
+149/758355/campos_512_v4
+149/758357/campos_512_v4
+149/758365/campos_512_v4
+149/758370/campos_512_v4
+149/758371/campos_512_v4
+149/758375/campos_512_v4
+149/758377/campos_512_v4
+149/758385/campos_512_v4
+149/758391/campos_512_v4
+149/758397/campos_512_v4
+149/758411/campos_512_v4
+149/758421/campos_512_v4
+149/758424/campos_512_v4
+149/758432/campos_512_v4
+149/758451/campos_512_v4
+149/758453/campos_512_v4
+149/758454/campos_512_v4
+149/758465/campos_512_v4
+149/758473/campos_512_v4
+149/758478/campos_512_v4
+149/758492/campos_512_v4
+149/758493/campos_512_v4
+149/758494/campos_512_v4
+149/758499/campos_512_v4
+149/758514/campos_512_v4
+149/758526/campos_512_v4
+149/758528/campos_512_v4
+149/758529/campos_512_v4
+149/758533/campos_512_v4
+149/758546/campos_512_v4
+149/758551/campos_512_v4
+149/758552/campos_512_v4
+149/758554/campos_512_v4
+149/758569/campos_512_v4
+149/758576/campos_512_v4
+149/758577/campos_512_v4
+149/758580/campos_512_v4
+149/758587/campos_512_v4
+149/758590/campos_512_v4
+149/758612/campos_512_v4
+149/758623/campos_512_v4
+149/758631/campos_512_v4
+149/758639/campos_512_v4
+149/758642/campos_512_v4
+149/758657/campos_512_v4
+149/758663/campos_512_v4
+149/758669/campos_512_v4
+149/758685/campos_512_v4
+149/758699/campos_512_v4
+149/758712/campos_512_v4
+149/758716/campos_512_v4
+149/758729/campos_512_v4
+149/758736/campos_512_v4
+149/758737/campos_512_v4
+149/758738/campos_512_v4
+149/758739/campos_512_v4
+149/758741/campos_512_v4
+149/758751/campos_512_v4
+149/758757/campos_512_v4
+149/758762/campos_512_v4
+149/758772/campos_512_v4
+149/758775/campos_512_v4
+149/758787/campos_512_v4
+149/758800/campos_512_v4
+149/758818/campos_512_v4
+149/758819/campos_512_v4
+149/758833/campos_512_v4
+149/758835/campos_512_v4
+149/758843/campos_512_v4
+149/758847/campos_512_v4
+149/758849/campos_512_v4
+149/758862/campos_512_v4
+149/758863/campos_512_v4
+149/758868/campos_512_v4
+149/758872/campos_512_v4
+149/758882/campos_512_v4
+149/758883/campos_512_v4
+149/758886/campos_512_v4
+149/758908/campos_512_v4
+149/758909/campos_512_v4
+149/758912/campos_512_v4
+149/758917/campos_512_v4
+149/758920/campos_512_v4
+149/758922/campos_512_v4
+149/758929/campos_512_v4
+149/758953/campos_512_v4
+149/758964/campos_512_v4
+149/758974/campos_512_v4
+149/759002/campos_512_v4
+149/759004/campos_512_v4
+149/759017/campos_512_v4
+149/759020/campos_512_v4
+149/759021/campos_512_v4
+149/759023/campos_512_v4
+149/759026/campos_512_v4
+149/759027/campos_512_v4
+149/759044/campos_512_v4
+149/759047/campos_512_v4
+149/759057/campos_512_v4
+149/759061/campos_512_v4
+149/759067/campos_512_v4
+149/759068/campos_512_v4
+149/759073/campos_512_v4
+149/759091/campos_512_v4
+149/759097/campos_512_v4
+149/759103/campos_512_v4
+149/759105/campos_512_v4
+149/759111/campos_512_v4
+149/759113/campos_512_v4
+149/759123/campos_512_v4
+149/759128/campos_512_v4
+149/759141/campos_512_v4
+149/759163/campos_512_v4
+149/759168/campos_512_v4
+149/759173/campos_512_v4
+149/759174/campos_512_v4
+149/759175/campos_512_v4
+149/759178/campos_512_v4
+149/759181/campos_512_v4
+149/759186/campos_512_v4
+149/759190/campos_512_v4
+149/759231/campos_512_v4
+149/759232/campos_512_v4
+149/759239/campos_512_v4
+149/759262/campos_512_v4
+149/759284/campos_512_v4
+149/759285/campos_512_v4
+149/759294/campos_512_v4
+149/759298/campos_512_v4
+149/759301/campos_512_v4
+149/759309/campos_512_v4
+149/759313/campos_512_v4
+149/759324/campos_512_v4
+149/759336/campos_512_v4
+149/759363/campos_512_v4
+149/759368/campos_512_v4
+149/759376/campos_512_v4
+149/759378/campos_512_v4
+149/759380/campos_512_v4
+149/759382/campos_512_v4
+149/759396/campos_512_v4
+149/759419/campos_512_v4
+149/759424/campos_512_v4
+149/759431/campos_512_v4
+149/759435/campos_512_v4
+149/759436/campos_512_v4
+149/759437/campos_512_v4
+149/759458/campos_512_v4
+149/759465/campos_512_v4
+149/759472/campos_512_v4
+149/759478/campos_512_v4
+149/759480/campos_512_v4
+149/759482/campos_512_v4
+149/759494/campos_512_v4
+149/759506/campos_512_v4
+149/759512/campos_512_v4
+149/759524/campos_512_v4
+149/759534/campos_512_v4
+149/759541/campos_512_v4
+149/759543/campos_512_v4
+149/759546/campos_512_v4
+149/759550/campos_512_v4
+149/759551/campos_512_v4
+149/759553/campos_512_v4
+149/759556/campos_512_v4
+149/759560/campos_512_v4
+149/759574/campos_512_v4
+149/759578/campos_512_v4
+149/759586/campos_512_v4
+149/759590/campos_512_v4
+149/759599/campos_512_v4
+149/759612/campos_512_v4
+149/759613/campos_512_v4
+149/759626/campos_512_v4
+149/759638/campos_512_v4
+149/759640/campos_512_v4
+149/759643/campos_512_v4
+149/759651/campos_512_v4
+149/759653/campos_512_v4
+149/759654/campos_512_v4
+149/759658/campos_512_v4
+149/759661/campos_512_v4
+149/759667/campos_512_v4
+149/759681/campos_512_v4
+149/759695/campos_512_v4
+149/759697/campos_512_v4
+149/759701/campos_512_v4
+149/759702/campos_512_v4
+149/759710/campos_512_v4
+149/759711/campos_512_v4
+149/759730/campos_512_v4
+149/759733/campos_512_v4
+149/759734/campos_512_v4
+149/759740/campos_512_v4
+149/759745/campos_512_v4
+149/759757/campos_512_v4
+149/759758/campos_512_v4
+149/759762/campos_512_v4
+149/759765/campos_512_v4
+149/759776/campos_512_v4
+149/759787/campos_512_v4
+149/759791/campos_512_v4
+149/759797/campos_512_v4
+149/759800/campos_512_v4
+149/759801/campos_512_v4
+149/759806/campos_512_v4
+149/759811/campos_512_v4
+149/759812/campos_512_v4
+149/759817/campos_512_v4
+149/759825/campos_512_v4
+149/759837/campos_512_v4
+149/759855/campos_512_v4
+149/759856/campos_512_v4
+149/759860/campos_512_v4
+149/759878/campos_512_v4
+149/759900/campos_512_v4
+149/759908/campos_512_v4
+149/759936/campos_512_v4
+149/759939/campos_512_v4
+149/759941/campos_512_v4
+149/759947/campos_512_v4
+149/759955/campos_512_v4
+149/759958/campos_512_v4
+149/759992/campos_512_v4
+149/759993/campos_512_v4
+15/85013/campos_512_v4
+15/85019/campos_512_v4
+15/85023/campos_512_v4
+15/85031/campos_512_v4
+15/85038/campos_512_v4
+15/85045/campos_512_v4
+15/85051/campos_512_v4
+15/85052/campos_512_v4
+15/85064/campos_512_v4
+15/85069/campos_512_v4
+15/85089/campos_512_v4
+15/85091/campos_512_v4
+15/85096/campos_512_v4
+15/85110/campos_512_v4
+15/85121/campos_512_v4
+15/85128/campos_512_v4
+15/85132/campos_512_v4
+15/85141/campos_512_v4
+15/85165/campos_512_v4
+15/85172/campos_512_v4
+15/85176/campos_512_v4
+15/85178/campos_512_v4
+15/85183/campos_512_v4
+15/85187/campos_512_v4
+15/85191/campos_512_v4
+15/85197/campos_512_v4
+15/85220/campos_512_v4
+15/85230/campos_512_v4
+15/85238/campos_512_v4
+15/85247/campos_512_v4
+15/85251/campos_512_v4
+15/85253/campos_512_v4
+15/85254/campos_512_v4
+15/85263/campos_512_v4
+15/85265/campos_512_v4
+15/85272/campos_512_v4
+15/85293/campos_512_v4
+15/85294/campos_512_v4
+15/85306/campos_512_v4
+15/85307/campos_512_v4
+15/85312/campos_512_v4
+15/85320/campos_512_v4
+15/85323/campos_512_v4
+15/85326/campos_512_v4
+15/85330/campos_512_v4
+15/85332/campos_512_v4
+15/85336/campos_512_v4
+15/85339/campos_512_v4
+15/85349/campos_512_v4
+15/85372/campos_512_v4
+15/85375/campos_512_v4
+15/85377/campos_512_v4
+15/85380/campos_512_v4
+15/85382/campos_512_v4
+15/85388/campos_512_v4
+15/85390/campos_512_v4
+15/85400/campos_512_v4
+15/85404/campos_512_v4
+15/85407/campos_512_v4
+15/85417/campos_512_v4
+15/85433/campos_512_v4
+15/85444/campos_512_v4
+15/85452/campos_512_v4
+15/85453/campos_512_v4
+15/85456/campos_512_v4
+15/85460/campos_512_v4
+15/85462/campos_512_v4
+15/85463/campos_512_v4
+15/85468/campos_512_v4
+15/85474/campos_512_v4
+15/85475/campos_512_v4
+15/85487/campos_512_v4
+15/85488/campos_512_v4
+15/85494/campos_512_v4
+15/85496/campos_512_v4
+15/85501/campos_512_v4
+15/85529/campos_512_v4
+15/85534/campos_512_v4
+15/85535/campos_512_v4
+15/85537/campos_512_v4
+15/85538/campos_512_v4
+15/85541/campos_512_v4
+15/85544/campos_512_v4
+15/85547/campos_512_v4
+15/85549/campos_512_v4
+15/85553/campos_512_v4
+15/85560/campos_512_v4
+15/85573/campos_512_v4
+15/85575/campos_512_v4
+15/85580/campos_512_v4
+15/85591/campos_512_v4
+15/85593/campos_512_v4
+15/85605/campos_512_v4
+15/85631/campos_512_v4
+15/85637/campos_512_v4
+15/85638/campos_512_v4
+15/85644/campos_512_v4
+15/85648/campos_512_v4
+15/85651/campos_512_v4
+15/85658/campos_512_v4
+15/85667/campos_512_v4
+15/85668/campos_512_v4
+15/85692/campos_512_v4
+15/85701/campos_512_v4
+15/85706/campos_512_v4
+15/85707/campos_512_v4
+15/85713/campos_512_v4
+15/85728/campos_512_v4
+15/85732/campos_512_v4
+15/85736/campos_512_v4
+15/85744/campos_512_v4
+15/85763/campos_512_v4
+15/85765/campos_512_v4
+15/85777/campos_512_v4
+15/85781/campos_512_v4
+15/85790/campos_512_v4
+15/85796/campos_512_v4
+15/85797/campos_512_v4
+15/85804/campos_512_v4
+15/85809/campos_512_v4
+15/85814/campos_512_v4
+15/85815/campos_512_v4
+15/85820/campos_512_v4
+15/85828/campos_512_v4
+15/85829/campos_512_v4
+15/85844/campos_512_v4
+15/85860/campos_512_v4
+15/85868/campos_512_v4
+15/85875/campos_512_v4
+15/85887/campos_512_v4
+15/85891/campos_512_v4
+15/85892/campos_512_v4
+15/85900/campos_512_v4
+15/85907/campos_512_v4
+15/85911/campos_512_v4
+15/85929/campos_512_v4
+15/85933/campos_512_v4
+15/85947/campos_512_v4
+15/85974/campos_512_v4
+15/85975/campos_512_v4
+15/85977/campos_512_v4
+15/85980/campos_512_v4
+15/85981/campos_512_v4
+15/85983/campos_512_v4
+15/85994/campos_512_v4
+15/85998/campos_512_v4
+15/86000/campos_512_v4
+15/86018/campos_512_v4
+15/86038/campos_512_v4
+15/86048/campos_512_v4
+15/86057/campos_512_v4
+15/86082/campos_512_v4
+15/86083/campos_512_v4
+15/86092/campos_512_v4
+15/86095/campos_512_v4
+15/86096/campos_512_v4
+15/86101/campos_512_v4
+15/86103/campos_512_v4
+15/86106/campos_512_v4
+15/86134/campos_512_v4
+15/86135/campos_512_v4
+15/86140/campos_512_v4
+15/86144/campos_512_v4
+15/86155/campos_512_v4
+15/86161/campos_512_v4
+15/86168/campos_512_v4
+15/86172/campos_512_v4
+15/86173/campos_512_v4
+15/86179/campos_512_v4
+15/86186/campos_512_v4
+15/86198/campos_512_v4
+15/86211/campos_512_v4
+15/86216/campos_512_v4
+15/86220/campos_512_v4
+15/86228/campos_512_v4
+15/86229/campos_512_v4
+15/86241/campos_512_v4
+15/86247/campos_512_v4
+15/86260/campos_512_v4
+15/86264/campos_512_v4
+15/86265/campos_512_v4
+15/86266/campos_512_v4
+15/86272/campos_512_v4
+15/86287/campos_512_v4
+15/86297/campos_512_v4
+15/86304/campos_512_v4
+15/86314/campos_512_v4
+15/86349/campos_512_v4
+15/86351/campos_512_v4
+15/86356/campos_512_v4
+15/86363/campos_512_v4
+15/86367/campos_512_v4
+15/86373/campos_512_v4
+15/86376/campos_512_v4
+15/86378/campos_512_v4
+15/86395/campos_512_v4
+15/86417/campos_512_v4
+15/86419/campos_512_v4
+15/86420/campos_512_v4
+15/86432/campos_512_v4
+15/86440/campos_512_v4
+15/86445/campos_512_v4
+15/86446/campos_512_v4
+15/86463/campos_512_v4
+15/86476/campos_512_v4
+15/86486/campos_512_v4
+15/86505/campos_512_v4
+15/86513/campos_512_v4
+15/86516/campos_512_v4
+15/86518/campos_512_v4
+15/86525/campos_512_v4
+15/86530/campos_512_v4
+15/86533/campos_512_v4
+15/86542/campos_512_v4
+15/86551/campos_512_v4
+15/86558/campos_512_v4
+15/86570/campos_512_v4
+15/86580/campos_512_v4
+15/86589/campos_512_v4
+15/86591/campos_512_v4
+15/86604/campos_512_v4
+15/86610/campos_512_v4
+15/86622/campos_512_v4
+15/86623/campos_512_v4
+15/86627/campos_512_v4
+15/86652/campos_512_v4
+15/86659/campos_512_v4
+15/86679/campos_512_v4
+15/86680/campos_512_v4
+15/86728/campos_512_v4
+15/86730/campos_512_v4
+15/86742/campos_512_v4
+15/86745/campos_512_v4
+15/86751/campos_512_v4
+15/86757/campos_512_v4
+15/86768/campos_512_v4
+15/86770/campos_512_v4
+15/86783/campos_512_v4
+15/86791/campos_512_v4
+15/86806/campos_512_v4
+15/86811/campos_512_v4
+15/86829/campos_512_v4
+15/86842/campos_512_v4
+15/86845/campos_512_v4
+15/86849/campos_512_v4
+15/86853/campos_512_v4
+15/86861/campos_512_v4
+15/86870/campos_512_v4
+15/86881/campos_512_v4
+15/86899/campos_512_v4
+15/86909/campos_512_v4
+15/86911/campos_512_v4
+15/86912/campos_512_v4
+15/86914/campos_512_v4
+15/86923/campos_512_v4
+15/86940/campos_512_v4
+15/86946/campos_512_v4
+15/86948/campos_512_v4
+15/86950/campos_512_v4
+15/86962/campos_512_v4
+15/86964/campos_512_v4
+15/86968/campos_512_v4
+15/86973/campos_512_v4
+15/86976/campos_512_v4
+15/86979/campos_512_v4
+15/86988/campos_512_v4
+15/86989/campos_512_v4
+15/86992/campos_512_v4
+15/86994/campos_512_v4
+15/86999/campos_512_v4
+15/87001/campos_512_v4
+15/87025/campos_512_v4
+15/87026/campos_512_v4
+15/87027/campos_512_v4
+15/87033/campos_512_v4
+15/87042/campos_512_v4
+15/87045/campos_512_v4
+15/87055/campos_512_v4
+15/87065/campos_512_v4
+15/87072/campos_512_v4
+15/87120/campos_512_v4
+15/87125/campos_512_v4
+15/87136/campos_512_v4
+15/87138/campos_512_v4
+15/87147/campos_512_v4
+15/87157/campos_512_v4
+15/87184/campos_512_v4
+15/87200/campos_512_v4
+15/87205/campos_512_v4
+15/87210/campos_512_v4
+15/87214/campos_512_v4
+15/87215/campos_512_v4
+15/87219/campos_512_v4
+15/87225/campos_512_v4
+15/87228/campos_512_v4
+15/87234/campos_512_v4
+15/87238/campos_512_v4
+15/87251/campos_512_v4
+15/87260/campos_512_v4
+15/87299/campos_512_v4
+15/87304/campos_512_v4
+15/87307/campos_512_v4
+15/87309/campos_512_v4
+15/87311/campos_512_v4
+15/87315/campos_512_v4
+15/87330/campos_512_v4
+15/87335/campos_512_v4
+15/87343/campos_512_v4
+15/87347/campos_512_v4
+15/87355/campos_512_v4
+15/87367/campos_512_v4
+15/87386/campos_512_v4
+15/87392/campos_512_v4
+15/87393/campos_512_v4
+15/87395/campos_512_v4
+15/87426/campos_512_v4
+15/87434/campos_512_v4
+15/87435/campos_512_v4
+15/87460/campos_512_v4
+15/87471/campos_512_v4
+15/87474/campos_512_v4
+15/87480/campos_512_v4
+15/87497/campos_512_v4
+15/87500/campos_512_v4
+15/87510/campos_512_v4
+15/87532/campos_512_v4
+15/87534/campos_512_v4
+15/87558/campos_512_v4
+15/87559/campos_512_v4
+15/87572/campos_512_v4
+15/87573/campos_512_v4
+15/87592/campos_512_v4
+15/87602/campos_512_v4
+15/87605/campos_512_v4
+15/87606/campos_512_v4
+15/87608/campos_512_v4
+15/87615/campos_512_v4
+15/87631/campos_512_v4
+15/87632/campos_512_v4
+15/87639/campos_512_v4
+15/87640/campos_512_v4
+15/87649/campos_512_v4
+15/87653/campos_512_v4
+15/87658/campos_512_v4
+15/87669/campos_512_v4
+15/87686/campos_512_v4
+15/87687/campos_512_v4
+15/87696/campos_512_v4
+15/87697/campos_512_v4
+15/87699/campos_512_v4
+15/87705/campos_512_v4
+15/87706/campos_512_v4
+15/87713/campos_512_v4
+15/87719/campos_512_v4
+15/87729/campos_512_v4
+15/87731/campos_512_v4
+15/87732/campos_512_v4
+15/87733/campos_512_v4
+15/87751/campos_512_v4
+15/87758/campos_512_v4
+15/87763/campos_512_v4
+15/87767/campos_512_v4
+15/87777/campos_512_v4
+15/87783/campos_512_v4
+15/87789/campos_512_v4
+15/87790/campos_512_v4
+15/87820/campos_512_v4
+15/87836/campos_512_v4
+15/87838/campos_512_v4
+15/87841/campos_512_v4
+15/87843/campos_512_v4
+15/87844/campos_512_v4
+15/87852/campos_512_v4
+15/87859/campos_512_v4
+15/87866/campos_512_v4
+15/87870/campos_512_v4
+15/87878/campos_512_v4
+15/87884/campos_512_v4
+15/87885/campos_512_v4
+15/87886/campos_512_v4
+15/87891/campos_512_v4
+15/87893/campos_512_v4
+15/87915/campos_512_v4
+15/87919/campos_512_v4
+15/87927/campos_512_v4
+15/87928/campos_512_v4
+15/87934/campos_512_v4
+15/87941/campos_512_v4
+15/87951/campos_512_v4
+15/87958/campos_512_v4
+15/87969/campos_512_v4
+15/87976/campos_512_v4
+15/87985/campos_512_v4
+15/88000/campos_512_v4
+15/88009/campos_512_v4
+15/88010/campos_512_v4
+15/88019/campos_512_v4
+15/88023/campos_512_v4
+15/88037/campos_512_v4
+15/88071/campos_512_v4
+15/88073/campos_512_v4
+15/88090/campos_512_v4
+15/88105/campos_512_v4
+15/88107/campos_512_v4
+15/88108/campos_512_v4
+15/88109/campos_512_v4
+15/88110/campos_512_v4
+15/88116/campos_512_v4
+15/88120/campos_512_v4
+15/88130/campos_512_v4
+15/88149/campos_512_v4
+15/88155/campos_512_v4
+15/88160/campos_512_v4
+15/88164/campos_512_v4
+15/88167/campos_512_v4
+15/88180/campos_512_v4
+15/88189/campos_512_v4
+15/88191/campos_512_v4
+15/88192/campos_512_v4
+15/88198/campos_512_v4
+15/88199/campos_512_v4
+15/88210/campos_512_v4
+15/88211/campos_512_v4
+15/88215/campos_512_v4
+15/88217/campos_512_v4
+15/88219/campos_512_v4
+15/88223/campos_512_v4
+15/88230/campos_512_v4
+15/88233/campos_512_v4
+15/88235/campos_512_v4
+15/88250/campos_512_v4
+15/88261/campos_512_v4
+15/88275/campos_512_v4
+15/88277/campos_512_v4
+15/88285/campos_512_v4
+15/88292/campos_512_v4
+15/88294/campos_512_v4
+15/88302/campos_512_v4
+15/88308/campos_512_v4
+15/88312/campos_512_v4
+15/88334/campos_512_v4
+15/88341/campos_512_v4
+15/88367/campos_512_v4
+15/88372/campos_512_v4
+15/88375/campos_512_v4
+15/88391/campos_512_v4
+15/88394/campos_512_v4
+15/88401/campos_512_v4
+15/88406/campos_512_v4
+15/88417/campos_512_v4
+15/88420/campos_512_v4
+15/88428/campos_512_v4
+15/88435/campos_512_v4
+15/88445/campos_512_v4
+15/88462/campos_512_v4
+15/88467/campos_512_v4
+15/88470/campos_512_v4
+15/88480/campos_512_v4
+15/88486/campos_512_v4
+15/88490/campos_512_v4
+15/88497/campos_512_v4
+15/88501/campos_512_v4
+15/88510/campos_512_v4
+15/88513/campos_512_v4
+15/88515/campos_512_v4
+15/88516/campos_512_v4
+15/88517/campos_512_v4
+15/88534/campos_512_v4
+15/88547/campos_512_v4
+15/88555/campos_512_v4
+15/88565/campos_512_v4
+15/88571/campos_512_v4
+15/88574/campos_512_v4
+15/88577/campos_512_v4
+15/88581/campos_512_v4
+15/88588/campos_512_v4
+15/88596/campos_512_v4
+15/88597/campos_512_v4
+15/88611/campos_512_v4
+15/88619/campos_512_v4
+15/88624/campos_512_v4
+15/88629/campos_512_v4
+15/88631/campos_512_v4
+15/88638/campos_512_v4
+15/88643/campos_512_v4
+15/88650/campos_512_v4
+15/88651/campos_512_v4
+15/88655/campos_512_v4
+15/88659/campos_512_v4
+15/88661/campos_512_v4
+15/88687/campos_512_v4
+15/88690/campos_512_v4
+15/88693/campos_512_v4
+15/88707/campos_512_v4
+15/88708/campos_512_v4
+15/88722/campos_512_v4
+15/88732/campos_512_v4
+15/88735/campos_512_v4
+15/88739/campos_512_v4
+15/88758/campos_512_v4
+15/88764/campos_512_v4
+15/88774/campos_512_v4
+15/88776/campos_512_v4
+15/88778/campos_512_v4
+15/88787/campos_512_v4
+15/88795/campos_512_v4
+15/88806/campos_512_v4
+15/88812/campos_512_v4
+15/88822/campos_512_v4
+15/88834/campos_512_v4
+15/88840/campos_512_v4
+15/88851/campos_512_v4
+15/88856/campos_512_v4
+15/88886/campos_512_v4
+15/88897/campos_512_v4
+15/88904/campos_512_v4
+15/88909/campos_512_v4
+15/88910/campos_512_v4
+15/88922/campos_512_v4
+15/88930/campos_512_v4
+15/88946/campos_512_v4
+15/88953/campos_512_v4
+15/88956/campos_512_v4
+15/88973/campos_512_v4
+15/88985/campos_512_v4
+15/88998/campos_512_v4
+15/89000/campos_512_v4
+15/89003/campos_512_v4
+15/89010/campos_512_v4
+15/89026/campos_512_v4
+15/89055/campos_512_v4
+15/89059/campos_512_v4
+15/89093/campos_512_v4
+15/89097/campos_512_v4
+15/89118/campos_512_v4
+15/89137/campos_512_v4
+15/89150/campos_512_v4
+15/89155/campos_512_v4
+15/89186/campos_512_v4
+15/89195/campos_512_v4
+15/89212/campos_512_v4
+15/89229/campos_512_v4
+15/89246/campos_512_v4
+15/89270/campos_512_v4
+15/89283/campos_512_v4
+15/89285/campos_512_v4
+15/89298/campos_512_v4
+15/89318/campos_512_v4
+15/89326/campos_512_v4
+15/89331/campos_512_v4
+15/89336/campos_512_v4
+15/89359/campos_512_v4
+15/89366/campos_512_v4
+15/89369/campos_512_v4
+15/89383/campos_512_v4
+15/89422/campos_512_v4
+15/89449/campos_512_v4
+15/89456/campos_512_v4
+15/89459/campos_512_v4
+15/89464/campos_512_v4
+15/89477/campos_512_v4
+15/89495/campos_512_v4
+15/89499/campos_512_v4
+15/89508/campos_512_v4
+15/89517/campos_512_v4
+15/89522/campos_512_v4
+15/89524/campos_512_v4
+15/89531/campos_512_v4
+15/89533/campos_512_v4
+15/89543/campos_512_v4
+15/89547/campos_512_v4
+15/89552/campos_512_v4
+15/89562/campos_512_v4
+15/89571/campos_512_v4
+15/89577/campos_512_v4
+15/89584/campos_512_v4
+15/89607/campos_512_v4
+15/89608/campos_512_v4
+15/89620/campos_512_v4
+15/89624/campos_512_v4
+15/89632/campos_512_v4
+15/89639/campos_512_v4
+15/89641/campos_512_v4
+15/89643/campos_512_v4
+15/89647/campos_512_v4
+15/89661/campos_512_v4
+15/89664/campos_512_v4
+15/89668/campos_512_v4
+15/89671/campos_512_v4
+15/89674/campos_512_v4
+15/89677/campos_512_v4
+15/89686/campos_512_v4
+15/89701/campos_512_v4
+15/89702/campos_512_v4
+15/89716/campos_512_v4
+15/89719/campos_512_v4
+15/89729/campos_512_v4
+15/89746/campos_512_v4
+15/89751/campos_512_v4
+15/89760/campos_512_v4
+15/89761/campos_512_v4
+15/89770/campos_512_v4
+15/89774/campos_512_v4
+15/89783/campos_512_v4
+15/89805/campos_512_v4
+15/89807/campos_512_v4
+15/89809/campos_512_v4
+15/89817/campos_512_v4
+15/89830/campos_512_v4
+15/89847/campos_512_v4
+15/89851/campos_512_v4
+15/89861/campos_512_v4
+15/89864/campos_512_v4
+15/89874/campos_512_v4
+15/89880/campos_512_v4
+15/89881/campos_512_v4
+15/89887/campos_512_v4
+15/89890/campos_512_v4
+15/89898/campos_512_v4
+15/89905/campos_512_v4
+15/89911/campos_512_v4
+15/89915/campos_512_v4
+15/89938/campos_512_v4
+15/89939/campos_512_v4
+15/89941/campos_512_v4
+15/89942/campos_512_v4
+15/89946/campos_512_v4
+15/89950/campos_512_v4
+15/89962/campos_512_v4
+15/89969/campos_512_v4
+15/89970/campos_512_v4
+15/89975/campos_512_v4
+15/89980/campos_512_v4
+15/89981/campos_512_v4
+15/89993/campos_512_v4
+150/760014/campos_512_v4
+150/760015/campos_512_v4
+150/760021/campos_512_v4
+150/760031/campos_512_v4
+150/760044/campos_512_v4
+150/760059/campos_512_v4
+150/760077/campos_512_v4
+150/760101/campos_512_v4
+150/760120/campos_512_v4
+150/760121/campos_512_v4
+150/760123/campos_512_v4
+150/760127/campos_512_v4
+150/760128/campos_512_v4
+150/760134/campos_512_v4
+150/760137/campos_512_v4
+150/760142/campos_512_v4
+150/760143/campos_512_v4
+150/760152/campos_512_v4
+150/760155/campos_512_v4
+150/760171/campos_512_v4
+150/760210/campos_512_v4
+150/760212/campos_512_v4
+150/760213/campos_512_v4
+150/760223/campos_512_v4
+150/760233/campos_512_v4
+150/760238/campos_512_v4
+150/760239/campos_512_v4
+150/760249/campos_512_v4
+150/760251/campos_512_v4
+150/760264/campos_512_v4
+150/760273/campos_512_v4
+150/760279/campos_512_v4
+150/760289/campos_512_v4
+150/760290/campos_512_v4
+150/760296/campos_512_v4
+150/760305/campos_512_v4
+150/760313/campos_512_v4
+150/760322/campos_512_v4
+150/760334/campos_512_v4
+150/760342/campos_512_v4
+150/760349/campos_512_v4
+150/760352/campos_512_v4
+150/760353/campos_512_v4
+150/760354/campos_512_v4
+150/760357/campos_512_v4
+150/760361/campos_512_v4
+150/760369/campos_512_v4
+150/760377/campos_512_v4
+150/760383/campos_512_v4
+150/760386/campos_512_v4
+150/760402/campos_512_v4
+150/760433/campos_512_v4
+150/760442/campos_512_v4
+150/760446/campos_512_v4
+150/760449/campos_512_v4
+150/760455/campos_512_v4
+150/760457/campos_512_v4
+150/760463/campos_512_v4
+150/760469/campos_512_v4
+150/760472/campos_512_v4
+150/760480/campos_512_v4
+150/760495/campos_512_v4
+150/760522/campos_512_v4
+150/760525/campos_512_v4
+150/760529/campos_512_v4
+150/760534/campos_512_v4
+150/760542/campos_512_v4
+150/760575/campos_512_v4
+150/760585/campos_512_v4
+150/760595/campos_512_v4
+150/760599/campos_512_v4
+150/760601/campos_512_v4
+150/760605/campos_512_v4
+150/760616/campos_512_v4
+150/760628/campos_512_v4
+150/760666/campos_512_v4
+150/760675/campos_512_v4
+150/760682/campos_512_v4
+150/760702/campos_512_v4
+150/760722/campos_512_v4
+150/760727/campos_512_v4
+150/760730/campos_512_v4
+150/760738/campos_512_v4
+150/760744/campos_512_v4
+150/760758/campos_512_v4
+150/760762/campos_512_v4
+150/760768/campos_512_v4
+150/760776/campos_512_v4
+150/760779/campos_512_v4
+150/760802/campos_512_v4
+150/760805/campos_512_v4
+150/760811/campos_512_v4
+150/760815/campos_512_v4
+150/760816/campos_512_v4
+150/760825/campos_512_v4
+150/760826/campos_512_v4
+150/760836/campos_512_v4
+150/760838/campos_512_v4
+150/760844/campos_512_v4
+150/760845/campos_512_v4
+150/760848/campos_512_v4
+150/760860/campos_512_v4
+150/760872/campos_512_v4
+150/760876/campos_512_v4
+150/760879/campos_512_v4
+150/760897/campos_512_v4
+150/760899/campos_512_v4
+150/760903/campos_512_v4
+150/760915/campos_512_v4
+150/760920/campos_512_v4
+150/760923/campos_512_v4
+150/760928/campos_512_v4
+150/760930/campos_512_v4
+150/760948/campos_512_v4
+150/760952/campos_512_v4
+150/760957/campos_512_v4
+150/760966/campos_512_v4
+150/760969/campos_512_v4
+150/760970/campos_512_v4
+150/760976/campos_512_v4
+150/760995/campos_512_v4
+150/760998/campos_512_v4
+150/761004/campos_512_v4
+150/761006/campos_512_v4
+150/761007/campos_512_v4
+150/761019/campos_512_v4
+150/761023/campos_512_v4
+150/761028/campos_512_v4
+150/761033/campos_512_v4
+150/761039/campos_512_v4
+150/761068/campos_512_v4
+150/761069/campos_512_v4
+150/761072/campos_512_v4
+150/761079/campos_512_v4
+150/761086/campos_512_v4
+150/761095/campos_512_v4
+150/761102/campos_512_v4
+150/761108/campos_512_v4
+150/761117/campos_512_v4
+150/761122/campos_512_v4
+150/761124/campos_512_v4
+150/761125/campos_512_v4
+150/761129/campos_512_v4
+150/761139/campos_512_v4
+150/761140/campos_512_v4
+150/761155/campos_512_v4
+150/761160/campos_512_v4
+150/761164/campos_512_v4
+150/761167/campos_512_v4
+150/761168/campos_512_v4
+150/761187/campos_512_v4
+150/761190/campos_512_v4
+150/761199/campos_512_v4
+150/761203/campos_512_v4
+150/761207/campos_512_v4
+150/761212/campos_512_v4
+150/761213/campos_512_v4
+150/761214/campos_512_v4
+150/761225/campos_512_v4
+150/761232/campos_512_v4
+150/761238/campos_512_v4
+150/761239/campos_512_v4
+150/761241/campos_512_v4
+150/761244/campos_512_v4
+150/761250/campos_512_v4
+150/761254/campos_512_v4
+150/761263/campos_512_v4
+150/761273/campos_512_v4
+150/761282/campos_512_v4
+150/761299/campos_512_v4
+150/761302/campos_512_v4
+150/761314/campos_512_v4
+150/761316/campos_512_v4
+150/761320/campos_512_v4
+150/761325/campos_512_v4
+150/761328/campos_512_v4
+150/761342/campos_512_v4
+150/761344/campos_512_v4
+150/761345/campos_512_v4
+150/761365/campos_512_v4
+150/761373/campos_512_v4
+150/761382/campos_512_v4
+150/761386/campos_512_v4
+150/761387/campos_512_v4
+150/761389/campos_512_v4
+150/761397/campos_512_v4
+150/761400/campos_512_v4
+150/761402/campos_512_v4
+150/761404/campos_512_v4
+150/761407/campos_512_v4
+150/761417/campos_512_v4
+150/761426/campos_512_v4
+150/761431/campos_512_v4
+150/761440/campos_512_v4
+150/761451/campos_512_v4
+150/761456/campos_512_v4
+150/761462/campos_512_v4
+150/761468/campos_512_v4
+150/761470/campos_512_v4
+150/761473/campos_512_v4
+150/761476/campos_512_v4
+150/761488/campos_512_v4
+150/761514/campos_512_v4
+150/761516/campos_512_v4
+150/761524/campos_512_v4
+150/761529/campos_512_v4
+150/761537/campos_512_v4
+150/761543/campos_512_v4
+150/761548/campos_512_v4
+150/761550/campos_512_v4
+150/761555/campos_512_v4
+150/761576/campos_512_v4
+150/761580/campos_512_v4
+150/761583/campos_512_v4
+150/761588/campos_512_v4
+150/761595/campos_512_v4
+150/761598/campos_512_v4
+150/761612/campos_512_v4
+150/761629/campos_512_v4
+150/761635/campos_512_v4
+150/761645/campos_512_v4
+150/761654/campos_512_v4
+150/761660/campos_512_v4
+150/761662/campos_512_v4
+150/761665/campos_512_v4
+150/761682/campos_512_v4
+150/761695/campos_512_v4
+150/761699/campos_512_v4
+150/761706/campos_512_v4
+150/761708/campos_512_v4
+150/761725/campos_512_v4
+150/761726/campos_512_v4
+150/761733/campos_512_v4
+150/761738/campos_512_v4
+150/761750/campos_512_v4
+150/761767/campos_512_v4
+150/761775/campos_512_v4
+150/761792/campos_512_v4
+150/761799/campos_512_v4
+150/761804/campos_512_v4
+150/761805/campos_512_v4
+150/761813/campos_512_v4
+150/761814/campos_512_v4
+150/761816/campos_512_v4
+150/761827/campos_512_v4
+150/761831/campos_512_v4
+150/761836/campos_512_v4
+150/761844/campos_512_v4
+150/761849/campos_512_v4
+150/761861/campos_512_v4
+150/761866/campos_512_v4
+150/761871/campos_512_v4
+150/761873/campos_512_v4
+150/761879/campos_512_v4
+150/761880/campos_512_v4
+150/761888/campos_512_v4
+150/761901/campos_512_v4
+150/761910/campos_512_v4
+150/761923/campos_512_v4
+150/761924/campos_512_v4
+150/761937/campos_512_v4
+150/761939/campos_512_v4
+150/761948/campos_512_v4
+150/761949/campos_512_v4
+150/761954/campos_512_v4
+150/761956/campos_512_v4
+150/761972/campos_512_v4
+150/761979/campos_512_v4
+150/761983/campos_512_v4
+150/761984/campos_512_v4
+150/761990/campos_512_v4
+150/761998/campos_512_v4
+150/762008/campos_512_v4
+150/762033/campos_512_v4
+150/762041/campos_512_v4
+150/762046/campos_512_v4
+150/762053/campos_512_v4
+150/762057/campos_512_v4
+150/762065/campos_512_v4
+150/762073/campos_512_v4
+150/762074/campos_512_v4
+150/762092/campos_512_v4
+150/762101/campos_512_v4
+150/762107/campos_512_v4
+150/762109/campos_512_v4
+150/762124/campos_512_v4
+150/762134/campos_512_v4
+150/762153/campos_512_v4
+150/762157/campos_512_v4
+150/762160/campos_512_v4
+150/762163/campos_512_v4
+150/762168/campos_512_v4
+150/762170/campos_512_v4
+150/762178/campos_512_v4
+150/762187/campos_512_v4
+150/762206/campos_512_v4
+150/762210/campos_512_v4
+150/762220/campos_512_v4
+150/762222/campos_512_v4
+150/762228/campos_512_v4
+150/762231/campos_512_v4
+150/762242/campos_512_v4
+150/762249/campos_512_v4
+150/762265/campos_512_v4
+150/762266/campos_512_v4
+150/762271/campos_512_v4
+150/762282/campos_512_v4
+150/762284/campos_512_v4
+150/762288/campos_512_v4
+150/762290/campos_512_v4
+150/762294/campos_512_v4
+150/762301/campos_512_v4
+150/762303/campos_512_v4
+150/762306/campos_512_v4
+150/762309/campos_512_v4
+150/762313/campos_512_v4
+150/762314/campos_512_v4
+150/762319/campos_512_v4
+150/762320/campos_512_v4
+150/762326/campos_512_v4
+150/762332/campos_512_v4
+150/762333/campos_512_v4
+150/762336/campos_512_v4
+150/762342/campos_512_v4
+150/762344/campos_512_v4
+150/762355/campos_512_v4
+150/762364/campos_512_v4
+150/762368/campos_512_v4
+150/762378/campos_512_v4
+150/762383/campos_512_v4
+150/762412/campos_512_v4
+150/762416/campos_512_v4
+150/762417/campos_512_v4
+150/762421/campos_512_v4
+150/762444/campos_512_v4
+150/762452/campos_512_v4
+150/762462/campos_512_v4
+150/762466/campos_512_v4
+150/762496/campos_512_v4
+150/762497/campos_512_v4
+150/762513/campos_512_v4
+150/762538/campos_512_v4
+150/762542/campos_512_v4
+150/762554/campos_512_v4
+150/762557/campos_512_v4
+150/762577/campos_512_v4
+150/762585/campos_512_v4
+150/762593/campos_512_v4
+150/762595/campos_512_v4
+150/762606/campos_512_v4
+150/762608/campos_512_v4
+150/762611/campos_512_v4
+150/762620/campos_512_v4
+150/762636/campos_512_v4
+150/762640/campos_512_v4
+150/762642/campos_512_v4
+150/762659/campos_512_v4
+150/762667/campos_512_v4
+150/762672/campos_512_v4
+150/762678/campos_512_v4
+150/762690/campos_512_v4
+150/762693/campos_512_v4
+150/762700/campos_512_v4
+150/762704/campos_512_v4
+150/762709/campos_512_v4
+150/762712/campos_512_v4
+150/762717/campos_512_v4
+150/762718/campos_512_v4
+150/762725/campos_512_v4
+150/762750/campos_512_v4
+150/762760/campos_512_v4
+150/762768/campos_512_v4
+150/762781/campos_512_v4
+150/762783/campos_512_v4
+150/762786/campos_512_v4
+150/762792/campos_512_v4
+150/762794/campos_512_v4
+150/762797/campos_512_v4
+150/762806/campos_512_v4
+150/762813/campos_512_v4
+150/762818/campos_512_v4
+150/762843/campos_512_v4
+150/762844/campos_512_v4
+150/762849/campos_512_v4
+150/762853/campos_512_v4
+150/762854/campos_512_v4
+150/762855/campos_512_v4
+150/762859/campos_512_v4
+150/762861/campos_512_v4
+150/762862/campos_512_v4
+150/762865/campos_512_v4
+150/762878/campos_512_v4
+150/762885/campos_512_v4
+150/762895/campos_512_v4
+150/762896/campos_512_v4
+150/762908/campos_512_v4
+150/762929/campos_512_v4
+150/762957/campos_512_v4
+150/762962/campos_512_v4
+150/762965/campos_512_v4
+150/762967/campos_512_v4
+150/762969/campos_512_v4
+150/762981/campos_512_v4
+150/762991/campos_512_v4
+150/762998/campos_512_v4
+150/763022/campos_512_v4
+150/763035/campos_512_v4
+150/763047/campos_512_v4
+150/763067/campos_512_v4
+150/763074/campos_512_v4
+150/763076/campos_512_v4
+150/763082/campos_512_v4
+150/763085/campos_512_v4
+150/763087/campos_512_v4
+150/763094/campos_512_v4
+150/763098/campos_512_v4
+150/763110/campos_512_v4
+150/763115/campos_512_v4
+150/763125/campos_512_v4
+150/763148/campos_512_v4
+150/763153/campos_512_v4
+150/763156/campos_512_v4
+150/763157/campos_512_v4
+150/763158/campos_512_v4
+150/763174/campos_512_v4
+150/763197/campos_512_v4
+150/763209/campos_512_v4
+150/763210/campos_512_v4
+150/763217/campos_512_v4
+150/763231/campos_512_v4
+150/763241/campos_512_v4
+150/763244/campos_512_v4
+150/763265/campos_512_v4
+150/763266/campos_512_v4
+150/763287/campos_512_v4
+150/763288/campos_512_v4
+150/763296/campos_512_v4
+150/763298/campos_512_v4
+150/763300/campos_512_v4
+150/763315/campos_512_v4
+150/763328/campos_512_v4
+150/763331/campos_512_v4
+150/763336/campos_512_v4
+150/763341/campos_512_v4
+150/763343/campos_512_v4
+150/763349/campos_512_v4
+150/763368/campos_512_v4
+150/763395/campos_512_v4
+150/763402/campos_512_v4
+150/763405/campos_512_v4
+150/763408/campos_512_v4
+150/763418/campos_512_v4
+150/763424/campos_512_v4
+150/763435/campos_512_v4
+150/763442/campos_512_v4
+150/763451/campos_512_v4
+150/763456/campos_512_v4
+150/763457/campos_512_v4
+150/763468/campos_512_v4
+150/763469/campos_512_v4
+150/763471/campos_512_v4
+150/763472/campos_512_v4
+150/763477/campos_512_v4
+150/763483/campos_512_v4
+150/763490/campos_512_v4
+150/763492/campos_512_v4
+150/763498/campos_512_v4
+150/763499/campos_512_v4
+150/763522/campos_512_v4
+150/763523/campos_512_v4
+150/763529/campos_512_v4
+150/763535/campos_512_v4
+150/763537/campos_512_v4
+150/763538/campos_512_v4
+150/763543/campos_512_v4
+150/763544/campos_512_v4
+150/763547/campos_512_v4
+150/763559/campos_512_v4
+150/763573/campos_512_v4
+150/763583/campos_512_v4
+150/763584/campos_512_v4
+150/763609/campos_512_v4
+150/763613/campos_512_v4
+150/763614/campos_512_v4
+150/763622/campos_512_v4
+150/763635/campos_512_v4
+150/763636/campos_512_v4
+150/763637/campos_512_v4
+150/763647/campos_512_v4
+150/763653/campos_512_v4
+150/763671/campos_512_v4
+150/763673/campos_512_v4
+150/763676/campos_512_v4
+150/763681/campos_512_v4
+150/763682/campos_512_v4
+150/763684/campos_512_v4
+150/763689/campos_512_v4
+150/763693/campos_512_v4
+150/763699/campos_512_v4
+150/763707/campos_512_v4
+150/763717/campos_512_v4
+150/763722/campos_512_v4
+150/763735/campos_512_v4
+150/763737/campos_512_v4
+150/763746/campos_512_v4
+150/763759/campos_512_v4
+150/763779/campos_512_v4
+150/763789/campos_512_v4
+150/763800/campos_512_v4
+150/763803/campos_512_v4
+150/763811/campos_512_v4
+150/763843/campos_512_v4
+150/763844/campos_512_v4
+150/763849/campos_512_v4
+150/763850/campos_512_v4
+150/763852/campos_512_v4
+150/763857/campos_512_v4
+150/763861/campos_512_v4
+150/763862/campos_512_v4
+150/763864/campos_512_v4
+150/763868/campos_512_v4
+150/763878/campos_512_v4
+150/763885/campos_512_v4
+150/763889/campos_512_v4
+150/763913/campos_512_v4
+150/763919/campos_512_v4
+150/763937/campos_512_v4
+150/763941/campos_512_v4
+150/763953/campos_512_v4
+150/763959/campos_512_v4
+150/763981/campos_512_v4
+150/763986/campos_512_v4
+150/763989/campos_512_v4
+150/763991/campos_512_v4
+150/763992/campos_512_v4
+150/763999/campos_512_v4
+150/764007/campos_512_v4
+150/764023/campos_512_v4
+150/764024/campos_512_v4
+150/764031/campos_512_v4
+150/764034/campos_512_v4
+150/764035/campos_512_v4
+150/764036/campos_512_v4
+150/764047/campos_512_v4
+150/764050/campos_512_v4
+150/764059/campos_512_v4
+150/764061/campos_512_v4
+150/764065/campos_512_v4
+150/764066/campos_512_v4
+150/764077/campos_512_v4
+150/764083/campos_512_v4
+150/764088/campos_512_v4
+150/764089/campos_512_v4
+150/764097/campos_512_v4
+150/764103/campos_512_v4
+150/764105/campos_512_v4
+150/764110/campos_512_v4
+150/764115/campos_512_v4
+150/764131/campos_512_v4
+150/764132/campos_512_v4
+150/764133/campos_512_v4
+150/764134/campos_512_v4
+150/764147/campos_512_v4
+150/764148/campos_512_v4
+150/764156/campos_512_v4
+150/764165/campos_512_v4
+150/764170/campos_512_v4
+150/764185/campos_512_v4
+150/764190/campos_512_v4
+150/764191/campos_512_v4
+150/764194/campos_512_v4
+150/764195/campos_512_v4
+150/764201/campos_512_v4
+150/764213/campos_512_v4
+150/764216/campos_512_v4
+150/764240/campos_512_v4
+150/764254/campos_512_v4
+150/764258/campos_512_v4
+150/764260/campos_512_v4
+150/764266/campos_512_v4
+150/764268/campos_512_v4
+150/764280/campos_512_v4
+150/764294/campos_512_v4
+150/764296/campos_512_v4
+150/764306/campos_512_v4
+150/764307/campos_512_v4
+150/764310/campos_512_v4
+150/764319/campos_512_v4
+150/764328/campos_512_v4
+150/764331/campos_512_v4
+150/764339/campos_512_v4
+150/764349/campos_512_v4
+150/764352/campos_512_v4
+150/764357/campos_512_v4
+150/764371/campos_512_v4
+150/764376/campos_512_v4
+150/764392/campos_512_v4
+150/764405/campos_512_v4
+150/764418/campos_512_v4
+150/764424/campos_512_v4
+150/764428/campos_512_v4
+150/764431/campos_512_v4
+150/764432/campos_512_v4
+150/764441/campos_512_v4
+150/764442/campos_512_v4
+150/764443/campos_512_v4
+150/764444/campos_512_v4
+150/764445/campos_512_v4
+150/764458/campos_512_v4
+150/764459/campos_512_v4
+150/764466/campos_512_v4
+150/764489/campos_512_v4
+150/764502/campos_512_v4
+150/764503/campos_512_v4
+150/764509/campos_512_v4
+150/764511/campos_512_v4
+150/764533/campos_512_v4
+150/764538/campos_512_v4
+150/764545/campos_512_v4
+150/764549/campos_512_v4
+150/764552/campos_512_v4
+150/764568/campos_512_v4
+150/764569/campos_512_v4
+150/764582/campos_512_v4
+150/764587/campos_512_v4
+150/764589/campos_512_v4
+150/764602/campos_512_v4
+150/764605/campos_512_v4
+150/764607/campos_512_v4
+150/764609/campos_512_v4
+150/764611/campos_512_v4
+150/764614/campos_512_v4
+150/764628/campos_512_v4
+150/764631/campos_512_v4
+150/764632/campos_512_v4
+150/764637/campos_512_v4
+150/764642/campos_512_v4
+150/764644/campos_512_v4
+150/764649/campos_512_v4
+150/764658/campos_512_v4
+150/764669/campos_512_v4
+150/764670/campos_512_v4
+150/764676/campos_512_v4
+150/764682/campos_512_v4
+150/764702/campos_512_v4
+150/764707/campos_512_v4
+150/764712/campos_512_v4
+150/764722/campos_512_v4
+150/764723/campos_512_v4
+150/764731/campos_512_v4
+150/764737/campos_512_v4
+150/764747/campos_512_v4
+150/764758/campos_512_v4
+150/764763/campos_512_v4
+150/764785/campos_512_v4
+150/764786/campos_512_v4
+150/764788/campos_512_v4
+150/764791/campos_512_v4
+150/764806/campos_512_v4
+150/764819/campos_512_v4
+150/764825/campos_512_v4
+150/764826/campos_512_v4
+150/764833/campos_512_v4
+150/764840/campos_512_v4
+150/764852/campos_512_v4
+150/764853/campos_512_v4
+150/764856/campos_512_v4
+150/764867/campos_512_v4
+150/764874/campos_512_v4
+150/764876/campos_512_v4
+150/764879/campos_512_v4
+150/764897/campos_512_v4
+150/764912/campos_512_v4
+150/764913/campos_512_v4
+150/764917/campos_512_v4
+150/764921/campos_512_v4
+150/764924/campos_512_v4
+150/764925/campos_512_v4
+150/764932/campos_512_v4
+150/764936/campos_512_v4
+150/764942/campos_512_v4
+150/764955/campos_512_v4
+150/764958/campos_512_v4
+150/764972/campos_512_v4
+150/764973/campos_512_v4
+150/764976/campos_512_v4
+150/764984/campos_512_v4
+150/764987/campos_512_v4
+150/764989/campos_512_v4
+150/764992/campos_512_v4
+150/764994/campos_512_v4
+150/764998/campos_512_v4
+150/765000/campos_512_v4
+151/765015/campos_512_v4
+151/765030/campos_512_v4
+151/765036/campos_512_v4
+151/765037/campos_512_v4
+151/765045/campos_512_v4
+151/765051/campos_512_v4
+151/765062/campos_512_v4
+151/765069/campos_512_v4
+151/765070/campos_512_v4
+151/765081/campos_512_v4
+151/765087/campos_512_v4
+151/765090/campos_512_v4
+151/765093/campos_512_v4
+151/765094/campos_512_v4
+151/765097/campos_512_v4
+151/765113/campos_512_v4
+151/765120/campos_512_v4
+151/765129/campos_512_v4
+151/765161/campos_512_v4
+151/765165/campos_512_v4
+151/765176/campos_512_v4
+151/765207/campos_512_v4
+151/765208/campos_512_v4
+151/765213/campos_512_v4
+151/765224/campos_512_v4
+151/765225/campos_512_v4
+151/765229/campos_512_v4
+151/765238/campos_512_v4
+151/765242/campos_512_v4
+151/765249/campos_512_v4
+151/765252/campos_512_v4
+151/765255/campos_512_v4
+151/765260/campos_512_v4
+151/765261/campos_512_v4
+151/765281/campos_512_v4
+151/765285/campos_512_v4
+151/765297/campos_512_v4
+151/765304/campos_512_v4
+151/765308/campos_512_v4
+151/765311/campos_512_v4
+151/765327/campos_512_v4
+151/765328/campos_512_v4
+151/765333/campos_512_v4
+151/765339/campos_512_v4
+151/765346/campos_512_v4
+151/765352/campos_512_v4
+151/765354/campos_512_v4
+151/765365/campos_512_v4
+151/765367/campos_512_v4
+151/765369/campos_512_v4
+151/765380/campos_512_v4
+151/765382/campos_512_v4
+151/765386/campos_512_v4
+151/765387/campos_512_v4
+151/765389/campos_512_v4
+151/765392/campos_512_v4
+151/765406/campos_512_v4
+151/765407/campos_512_v4
+151/765418/campos_512_v4
+151/765422/campos_512_v4
+151/765429/campos_512_v4
+151/765430/campos_512_v4
+151/765438/campos_512_v4
+151/765442/campos_512_v4
+151/765445/campos_512_v4
+151/765453/campos_512_v4
+151/765465/campos_512_v4
+151/765483/campos_512_v4
+151/765484/campos_512_v4
+151/765488/campos_512_v4
+151/765503/campos_512_v4
+151/765507/campos_512_v4
+151/765508/campos_512_v4
+151/765513/campos_512_v4
+151/765515/campos_512_v4
+151/765516/campos_512_v4
+151/765521/campos_512_v4
+151/765528/campos_512_v4
+151/765530/campos_512_v4
+151/765539/campos_512_v4
+151/765550/campos_512_v4
+151/765551/campos_512_v4
+151/765552/campos_512_v4
+151/765554/campos_512_v4
+151/765555/campos_512_v4
+151/765570/campos_512_v4
+151/765579/campos_512_v4
+151/765598/campos_512_v4
+151/765599/campos_512_v4
+151/765608/campos_512_v4
+151/765639/campos_512_v4
+151/765643/campos_512_v4
+151/765662/campos_512_v4
+151/765677/campos_512_v4
+151/765682/campos_512_v4
+151/765700/campos_512_v4
+151/765725/campos_512_v4
+151/765726/campos_512_v4
+151/765727/campos_512_v4
+151/765728/campos_512_v4
+151/765729/campos_512_v4
+151/765730/campos_512_v4
+151/765732/campos_512_v4
+151/765738/campos_512_v4
+151/765741/campos_512_v4
+151/765743/campos_512_v4
+151/765744/campos_512_v4
+151/765745/campos_512_v4
+151/765748/campos_512_v4
+151/765749/campos_512_v4
+151/765752/campos_512_v4
+151/765766/campos_512_v4
+151/765767/campos_512_v4
+151/765769/campos_512_v4
+151/765771/campos_512_v4
+151/765773/campos_512_v4
+151/765778/campos_512_v4
+151/765783/campos_512_v4
+151/765794/campos_512_v4
+151/765797/campos_512_v4
+151/765803/campos_512_v4
+151/765806/campos_512_v4
+151/765815/campos_512_v4
+151/765820/campos_512_v4
+151/765831/campos_512_v4
+151/765840/campos_512_v4
+151/765858/campos_512_v4
+151/765862/campos_512_v4
+151/765867/campos_512_v4
+151/765874/campos_512_v4
+151/765880/campos_512_v4
+151/765889/campos_512_v4
+151/765893/campos_512_v4
+151/765909/campos_512_v4
+151/765911/campos_512_v4
+151/765912/campos_512_v4
+151/765913/campos_512_v4
+151/765918/campos_512_v4
+151/765920/campos_512_v4
+151/765923/campos_512_v4
+151/765930/campos_512_v4
+151/765933/campos_512_v4
+151/765936/campos_512_v4
+151/765944/campos_512_v4
+151/765952/campos_512_v4
+151/765955/campos_512_v4
+151/765963/campos_512_v4
+151/765973/campos_512_v4
+151/765974/campos_512_v4
+151/765983/campos_512_v4
+151/765986/campos_512_v4
+151/765988/campos_512_v4
+151/765997/campos_512_v4
+151/765998/campos_512_v4
+151/766004/campos_512_v4
+151/766019/campos_512_v4
+151/766023/campos_512_v4
+151/766031/campos_512_v4
+151/766035/campos_512_v4
+151/766046/campos_512_v4
+151/766048/campos_512_v4
+151/766050/campos_512_v4
+151/766053/campos_512_v4
+151/766054/campos_512_v4
+151/766077/campos_512_v4
+151/766087/campos_512_v4
+151/766093/campos_512_v4
+151/766099/campos_512_v4
+151/766103/campos_512_v4
+151/766121/campos_512_v4
+151/766144/campos_512_v4
+151/766155/campos_512_v4
+151/766157/campos_512_v4
+151/766173/campos_512_v4
+151/766180/campos_512_v4
+151/766192/campos_512_v4
+151/766197/campos_512_v4
+151/766198/campos_512_v4
+151/766199/campos_512_v4
+151/766206/campos_512_v4
+151/766219/campos_512_v4
+151/766224/campos_512_v4
+151/766239/campos_512_v4
+151/766245/campos_512_v4
+151/766246/campos_512_v4
+151/766248/campos_512_v4
+151/766253/campos_512_v4
+151/766256/campos_512_v4
+151/766261/campos_512_v4
+151/766264/campos_512_v4
+151/766271/campos_512_v4
+151/766276/campos_512_v4
+151/766287/campos_512_v4
+151/766292/campos_512_v4
+151/766295/campos_512_v4
+151/766309/campos_512_v4
+151/766310/campos_512_v4
+151/766313/campos_512_v4
+151/766314/campos_512_v4
+151/766316/campos_512_v4
+151/766319/campos_512_v4
+151/766320/campos_512_v4
+151/766329/campos_512_v4
+151/766370/campos_512_v4
+151/766382/campos_512_v4
+151/766387/campos_512_v4
+151/766389/campos_512_v4
+151/766423/campos_512_v4
+151/766442/campos_512_v4
+151/766443/campos_512_v4
+151/766448/campos_512_v4
+151/766456/campos_512_v4
+151/766478/campos_512_v4
+151/766481/campos_512_v4
+151/766487/campos_512_v4
+151/766499/campos_512_v4
+151/766501/campos_512_v4
+151/766503/campos_512_v4
+151/766506/campos_512_v4
+151/766517/campos_512_v4
+151/766518/campos_512_v4
+151/766522/campos_512_v4
+151/766544/campos_512_v4
+151/766555/campos_512_v4
+151/766560/campos_512_v4
+151/766564/campos_512_v4
+151/766577/campos_512_v4
+151/766595/campos_512_v4
+151/766604/campos_512_v4
+151/766609/campos_512_v4
+151/766628/campos_512_v4
+151/766631/campos_512_v4
+151/766647/campos_512_v4
+151/766650/campos_512_v4
+151/766653/campos_512_v4
+151/766663/campos_512_v4
+151/766665/campos_512_v4
+151/766670/campos_512_v4
+151/766676/campos_512_v4
+151/766688/campos_512_v4
+151/766697/campos_512_v4
+151/766706/campos_512_v4
+151/766714/campos_512_v4
+151/766718/campos_512_v4
+151/766734/campos_512_v4
+151/766738/campos_512_v4
+151/766741/campos_512_v4
+151/766748/campos_512_v4
+151/766751/campos_512_v4
+151/766753/campos_512_v4
+151/766756/campos_512_v4
+151/766757/campos_512_v4
+151/766762/campos_512_v4
+151/766763/campos_512_v4
+151/766765/campos_512_v4
+151/766771/campos_512_v4
+151/766778/campos_512_v4
+151/766802/campos_512_v4
+151/766807/campos_512_v4
+151/766814/campos_512_v4
+151/766815/campos_512_v4
+151/766816/campos_512_v4
+151/766819/campos_512_v4
+151/766820/campos_512_v4
+151/766823/campos_512_v4
+151/766841/campos_512_v4
+151/766852/campos_512_v4
+151/766857/campos_512_v4
+151/766869/campos_512_v4
+151/766878/campos_512_v4
+151/766879/campos_512_v4
+151/766880/campos_512_v4
+151/766884/campos_512_v4
+151/766889/campos_512_v4
+151/766891/campos_512_v4
+151/766908/campos_512_v4
+151/766909/campos_512_v4
+151/766910/campos_512_v4
+151/766916/campos_512_v4
+151/766919/campos_512_v4
+151/766922/campos_512_v4
+151/766947/campos_512_v4
+151/766956/campos_512_v4
+151/766957/campos_512_v4
+151/766969/campos_512_v4
+151/766970/campos_512_v4
+151/766977/campos_512_v4
+151/766978/campos_512_v4
+151/766982/campos_512_v4
+151/766983/campos_512_v4
+151/766988/campos_512_v4
+151/767001/campos_512_v4
+151/767003/campos_512_v4
+151/767005/campos_512_v4
+151/767009/campos_512_v4
+151/767018/campos_512_v4
+151/767031/campos_512_v4
+151/767033/campos_512_v4
+151/767037/campos_512_v4
+151/767043/campos_512_v4
+151/767044/campos_512_v4
+151/767051/campos_512_v4
+151/767058/campos_512_v4
+151/767059/campos_512_v4
+151/767085/campos_512_v4
+151/767087/campos_512_v4
+151/767090/campos_512_v4
+151/767092/campos_512_v4
+151/767111/campos_512_v4
+151/767117/campos_512_v4
+151/767120/campos_512_v4
+151/767131/campos_512_v4
+151/767136/campos_512_v4
+151/767150/campos_512_v4
+151/767151/campos_512_v4
+151/767158/campos_512_v4
+151/767162/campos_512_v4
+151/767164/campos_512_v4
+151/767170/campos_512_v4
+151/767174/campos_512_v4
+151/767180/campos_512_v4
+151/767192/campos_512_v4
+151/767193/campos_512_v4
+151/767197/campos_512_v4
+151/767199/campos_512_v4
+151/767202/campos_512_v4
+151/767207/campos_512_v4
+151/767210/campos_512_v4
+151/767231/campos_512_v4
+151/767238/campos_512_v4
+151/767242/campos_512_v4
+151/767248/campos_512_v4
+151/767257/campos_512_v4
+151/767265/campos_512_v4
+151/767269/campos_512_v4
+151/767271/campos_512_v4
+151/767275/campos_512_v4
+151/767280/campos_512_v4
+151/767283/campos_512_v4
+151/767294/campos_512_v4
+151/767299/campos_512_v4
+151/767334/campos_512_v4
+151/767340/campos_512_v4
+151/767345/campos_512_v4
+151/767349/campos_512_v4
+151/767357/campos_512_v4
+151/767363/campos_512_v4
+151/767367/campos_512_v4
+151/767368/campos_512_v4
+151/767380/campos_512_v4
+151/767399/campos_512_v4
+151/767417/campos_512_v4
+151/767428/campos_512_v4
+151/767449/campos_512_v4
+151/767451/campos_512_v4
+151/767453/campos_512_v4
+151/767460/campos_512_v4
+151/767461/campos_512_v4
+151/767464/campos_512_v4
+151/767468/campos_512_v4
+151/767469/campos_512_v4
+151/767480/campos_512_v4
+151/767481/campos_512_v4
+151/767482/campos_512_v4
+151/767486/campos_512_v4
+151/767491/campos_512_v4
+151/767498/campos_512_v4
+151/767500/campos_512_v4
+151/767517/campos_512_v4
+151/767530/campos_512_v4
+151/767531/campos_512_v4
+151/767535/campos_512_v4
+151/767542/campos_512_v4
+151/767544/campos_512_v4
+151/767555/campos_512_v4
+151/767563/campos_512_v4
+151/767565/campos_512_v4
+151/767586/campos_512_v4
+151/767587/campos_512_v4
+151/767592/campos_512_v4
+151/767596/campos_512_v4
+151/767598/campos_512_v4
+151/767599/campos_512_v4
+151/767600/campos_512_v4
+151/767611/campos_512_v4
+151/767615/campos_512_v4
+151/767616/campos_512_v4
+151/767620/campos_512_v4
+151/767621/campos_512_v4
+151/767627/campos_512_v4
+151/767640/campos_512_v4
+151/767645/campos_512_v4
+151/767648/campos_512_v4
+151/767654/campos_512_v4
+151/767660/campos_512_v4
+151/767670/campos_512_v4
+151/767671/campos_512_v4
+151/767672/campos_512_v4
+151/767680/campos_512_v4
+151/767686/campos_512_v4
+151/767693/campos_512_v4
+151/767696/campos_512_v4
+151/767706/campos_512_v4
+151/767722/campos_512_v4
+151/767732/campos_512_v4
+151/767735/campos_512_v4
+151/767738/campos_512_v4
+151/767742/campos_512_v4
+151/767751/campos_512_v4
+151/767752/campos_512_v4
+151/767758/campos_512_v4
+151/767760/campos_512_v4
+151/767778/campos_512_v4
+151/767781/campos_512_v4
+151/767784/campos_512_v4
+151/767790/campos_512_v4
+151/767791/campos_512_v4
+151/767803/campos_512_v4
+151/767804/campos_512_v4
+151/767812/campos_512_v4
+151/767816/campos_512_v4
+151/767826/campos_512_v4
+151/767827/campos_512_v4
+151/767829/campos_512_v4
+151/767836/campos_512_v4
+151/767842/campos_512_v4
+151/767849/campos_512_v4
+151/767871/campos_512_v4
+151/767878/campos_512_v4
+151/767890/campos_512_v4
+151/767891/campos_512_v4
+151/767910/campos_512_v4
+151/767912/campos_512_v4
+151/767917/campos_512_v4
+151/767930/campos_512_v4
+151/767936/campos_512_v4
+151/767937/campos_512_v4
+151/767939/campos_512_v4
+151/767944/campos_512_v4
+151/767952/campos_512_v4
+151/767955/campos_512_v4
+151/767965/campos_512_v4
+151/767967/campos_512_v4
+151/767970/campos_512_v4
+151/767974/campos_512_v4
+151/767977/campos_512_v4
+151/767985/campos_512_v4
+151/767995/campos_512_v4
+151/767998/campos_512_v4
+151/768002/campos_512_v4
+151/768026/campos_512_v4
+151/768029/campos_512_v4
+151/768033/campos_512_v4
+151/768045/campos_512_v4
+151/768050/campos_512_v4
+151/768060/campos_512_v4
+151/768066/campos_512_v4
+151/768079/campos_512_v4
+151/768089/campos_512_v4
+151/768094/campos_512_v4
+151/768095/campos_512_v4
+151/768107/campos_512_v4
+151/768108/campos_512_v4
+151/768114/campos_512_v4
+151/768118/campos_512_v4
+151/768123/campos_512_v4
+151/768131/campos_512_v4
+151/768132/campos_512_v4
+151/768135/campos_512_v4
+151/768141/campos_512_v4
+151/768145/campos_512_v4
+151/768146/campos_512_v4
+151/768158/campos_512_v4
+151/768165/campos_512_v4
+151/768169/campos_512_v4
+151/768172/campos_512_v4
+151/768175/campos_512_v4
+151/768183/campos_512_v4
+151/768195/campos_512_v4
+151/768205/campos_512_v4
+151/768227/campos_512_v4
+151/768235/campos_512_v4
+151/768242/campos_512_v4
+151/768255/campos_512_v4
+151/768260/campos_512_v4
+151/768272/campos_512_v4
+151/768273/campos_512_v4
+151/768276/campos_512_v4
+151/768297/campos_512_v4
+151/768305/campos_512_v4
+151/768312/campos_512_v4
+151/768332/campos_512_v4
+151/768337/campos_512_v4
+151/768338/campos_512_v4
+151/768341/campos_512_v4
+151/768343/campos_512_v4
+151/768368/campos_512_v4
+151/768372/campos_512_v4
+151/768375/campos_512_v4
+151/768381/campos_512_v4
+151/768391/campos_512_v4
+151/768393/campos_512_v4
+151/768400/campos_512_v4
+151/768406/campos_512_v4
+151/768410/campos_512_v4
+151/768428/campos_512_v4
+151/768429/campos_512_v4
+151/768465/campos_512_v4
+151/768473/campos_512_v4
+151/768479/campos_512_v4
+151/768483/campos_512_v4
+151/768492/campos_512_v4
+151/768495/campos_512_v4
+151/768502/campos_512_v4
+151/768520/campos_512_v4
+151/768524/campos_512_v4
+151/768535/campos_512_v4
+151/768536/campos_512_v4
+151/768538/campos_512_v4
+151/768554/campos_512_v4
+151/768556/campos_512_v4
+151/768562/campos_512_v4
+151/768572/campos_512_v4
+151/768574/campos_512_v4
+151/768575/campos_512_v4
+151/768587/campos_512_v4
+151/768606/campos_512_v4
+151/768608/campos_512_v4
+151/768615/campos_512_v4
+151/768632/campos_512_v4
+151/768663/campos_512_v4
+151/768675/campos_512_v4
+151/768678/campos_512_v4
+151/768681/campos_512_v4
+151/768693/campos_512_v4
+151/768694/campos_512_v4
+151/768703/campos_512_v4
+151/768704/campos_512_v4
+151/768706/campos_512_v4
+151/768708/campos_512_v4
+151/768712/campos_512_v4
+151/768724/campos_512_v4
+151/768727/campos_512_v4
+151/768729/campos_512_v4
+151/768730/campos_512_v4
+151/768741/campos_512_v4
+151/768742/campos_512_v4
+151/768746/campos_512_v4
+151/768758/campos_512_v4
+151/768765/campos_512_v4
+151/768766/campos_512_v4
+151/768772/campos_512_v4
+151/768779/campos_512_v4
+151/768795/campos_512_v4
+151/768814/campos_512_v4
+151/768820/campos_512_v4
+151/768832/campos_512_v4
+151/768837/campos_512_v4
+151/768839/campos_512_v4
+151/768840/campos_512_v4
+151/768843/campos_512_v4
+151/768845/campos_512_v4
+151/768853/campos_512_v4
+151/768860/campos_512_v4
+151/768861/campos_512_v4
+151/768876/campos_512_v4
+151/768882/campos_512_v4
+151/768886/campos_512_v4
+151/768889/campos_512_v4
+151/768896/campos_512_v4
+151/768902/campos_512_v4
+151/768903/campos_512_v4
+151/768904/campos_512_v4
+151/768905/campos_512_v4
+151/768913/campos_512_v4
+151/768941/campos_512_v4
+151/768952/campos_512_v4
+151/768958/campos_512_v4
+151/768963/campos_512_v4
+151/768967/campos_512_v4
+151/768973/campos_512_v4
+151/768975/campos_512_v4
+151/768976/campos_512_v4
+151/768990/campos_512_v4
+151/768992/campos_512_v4
+151/769000/campos_512_v4
+151/769006/campos_512_v4
+151/769019/campos_512_v4
+151/769023/campos_512_v4
+151/769027/campos_512_v4
+151/769030/campos_512_v4
+151/769042/campos_512_v4
+151/769048/campos_512_v4
+151/769049/campos_512_v4
+151/769054/campos_512_v4
+151/769058/campos_512_v4
+151/769070/campos_512_v4
+151/769079/campos_512_v4
+151/769084/campos_512_v4
+151/769096/campos_512_v4
+151/769099/campos_512_v4
+151/769113/campos_512_v4
+151/769125/campos_512_v4
+151/769131/campos_512_v4
+151/769151/campos_512_v4
+151/769153/campos_512_v4
+151/769157/campos_512_v4
+151/769159/campos_512_v4
+151/769160/campos_512_v4
+151/769164/campos_512_v4
+151/769186/campos_512_v4
+151/769193/campos_512_v4
+151/769199/campos_512_v4
+151/769217/campos_512_v4
+151/769221/campos_512_v4
+151/769222/campos_512_v4
+151/769225/campos_512_v4
+151/769227/campos_512_v4
+151/769254/campos_512_v4
+151/769273/campos_512_v4
+151/769281/campos_512_v4
+151/769282/campos_512_v4
+151/769305/campos_512_v4
+151/769308/campos_512_v4
+151/769318/campos_512_v4
+151/769319/campos_512_v4
+151/769321/campos_512_v4
+151/769328/campos_512_v4
+151/769329/campos_512_v4
+151/769344/campos_512_v4
+151/769354/campos_512_v4
+151/769364/campos_512_v4
+151/769367/campos_512_v4
+151/769369/campos_512_v4
+151/769387/campos_512_v4
+151/769389/campos_512_v4
+151/769400/campos_512_v4
+151/769404/campos_512_v4
+151/769416/campos_512_v4
+151/769418/campos_512_v4
+151/769422/campos_512_v4
+151/769425/campos_512_v4
+151/769426/campos_512_v4
+151/769429/campos_512_v4
+151/769441/campos_512_v4
+151/769446/campos_512_v4
+151/769448/campos_512_v4
+151/769453/campos_512_v4
+151/769466/campos_512_v4
+151/769468/campos_512_v4
+151/769472/campos_512_v4
+151/769497/campos_512_v4
+151/769501/campos_512_v4
+151/769502/campos_512_v4
+151/769510/campos_512_v4
+151/769511/campos_512_v4
+151/769514/campos_512_v4
+151/769521/campos_512_v4
+151/769522/campos_512_v4
+151/769524/campos_512_v4
+151/769528/campos_512_v4
+151/769533/campos_512_v4
+151/769549/campos_512_v4
+151/769554/campos_512_v4
+151/769567/campos_512_v4
+151/769570/campos_512_v4
+151/769576/campos_512_v4
+151/769592/campos_512_v4
+151/769603/campos_512_v4
+151/769637/campos_512_v4
+151/769638/campos_512_v4
+151/769641/campos_512_v4
+151/769648/campos_512_v4
+151/769656/campos_512_v4
+151/769659/campos_512_v4
+151/769679/campos_512_v4
+151/769681/campos_512_v4
+151/769685/campos_512_v4
+151/769693/campos_512_v4
+151/769698/campos_512_v4
+151/769713/campos_512_v4
+151/769722/campos_512_v4
+151/769723/campos_512_v4
+151/769761/campos_512_v4
+151/769766/campos_512_v4
+151/769774/campos_512_v4
+151/769777/campos_512_v4
+151/769781/campos_512_v4
+151/769791/campos_512_v4
+151/769798/campos_512_v4
+151/769805/campos_512_v4
+151/769811/campos_512_v4
+151/769812/campos_512_v4
+151/769816/campos_512_v4
+151/769836/campos_512_v4
+151/769846/campos_512_v4
+151/769850/campos_512_v4
+151/769851/campos_512_v4
+151/769859/campos_512_v4
+151/769862/campos_512_v4
+151/769866/campos_512_v4
+151/769867/campos_512_v4
+151/769875/campos_512_v4
+151/769877/campos_512_v4
+151/769880/campos_512_v4
+151/769881/campos_512_v4
+151/769882/campos_512_v4
+151/769895/campos_512_v4
+151/769899/campos_512_v4
+151/769900/campos_512_v4
+151/769905/campos_512_v4
+151/769910/campos_512_v4
+151/769911/campos_512_v4
+151/769915/campos_512_v4
+151/769917/campos_512_v4
+151/769927/campos_512_v4
+151/769933/campos_512_v4
+151/769935/campos_512_v4
+151/769938/campos_512_v4
+151/769963/campos_512_v4
+151/769966/campos_512_v4
+151/769971/campos_512_v4
+151/769976/campos_512_v4
+151/769981/campos_512_v4
+151/769988/campos_512_v4
+152/770004/campos_512_v4
+152/770007/campos_512_v4
+152/770036/campos_512_v4
+152/770043/campos_512_v4
+152/770045/campos_512_v4
+152/770047/campos_512_v4
+152/770054/campos_512_v4
+152/770064/campos_512_v4
+152/770075/campos_512_v4
+152/770079/campos_512_v4
+152/770083/campos_512_v4
+152/770093/campos_512_v4
+152/770112/campos_512_v4
+152/770117/campos_512_v4
+152/770119/campos_512_v4
+152/770127/campos_512_v4
+152/770139/campos_512_v4
+152/770141/campos_512_v4
+152/770144/campos_512_v4
+152/770152/campos_512_v4
+152/770168/campos_512_v4
+152/770174/campos_512_v4
+152/770183/campos_512_v4
+152/770198/campos_512_v4
+152/770199/campos_512_v4
+152/770204/campos_512_v4
+152/770210/campos_512_v4
+152/770240/campos_512_v4
+152/770243/campos_512_v4
+152/770248/campos_512_v4
+152/770257/campos_512_v4
+152/770265/campos_512_v4
+152/770273/campos_512_v4
+152/770291/campos_512_v4
+152/770295/campos_512_v4
+152/770296/campos_512_v4
+152/770332/campos_512_v4
+152/770347/campos_512_v4
+152/770348/campos_512_v4
+152/770350/campos_512_v4
+152/770356/campos_512_v4
+152/770371/campos_512_v4
+152/770381/campos_512_v4
+152/770387/campos_512_v4
+152/770393/campos_512_v4
+152/770407/campos_512_v4
+152/770416/campos_512_v4
+152/770419/campos_512_v4
+152/770434/campos_512_v4
+152/770441/campos_512_v4
+152/770443/campos_512_v4
+152/770448/campos_512_v4
+152/770465/campos_512_v4
+152/770468/campos_512_v4
+152/770471/campos_512_v4
+152/770475/campos_512_v4
+152/770482/campos_512_v4
+152/770493/campos_512_v4
+152/770504/campos_512_v4
+152/770507/campos_512_v4
+152/770526/campos_512_v4
+152/770527/campos_512_v4
+152/770529/campos_512_v4
+152/770532/campos_512_v4
+152/770546/campos_512_v4
+152/770547/campos_512_v4
+152/770564/campos_512_v4
+152/770570/campos_512_v4
+152/770590/campos_512_v4
+152/770591/campos_512_v4
+152/770601/campos_512_v4
+152/770616/campos_512_v4
+152/770619/campos_512_v4
+152/770622/campos_512_v4
+152/770623/campos_512_v4
+152/770635/campos_512_v4
+152/770637/campos_512_v4
+152/770639/campos_512_v4
+152/770641/campos_512_v4
+152/770680/campos_512_v4
+152/770690/campos_512_v4
+152/770693/campos_512_v4
+152/770697/campos_512_v4
+152/770713/campos_512_v4
+152/770716/campos_512_v4
+152/770717/campos_512_v4
+152/770724/campos_512_v4
+152/770735/campos_512_v4
+152/770740/campos_512_v4
+152/770779/campos_512_v4
+152/770785/campos_512_v4
+152/770792/campos_512_v4
+152/770799/campos_512_v4
+152/770812/campos_512_v4
+152/770826/campos_512_v4
+152/770829/campos_512_v4
+152/770834/campos_512_v4
+152/770841/campos_512_v4
+152/770846/campos_512_v4
+152/770847/campos_512_v4
+152/770848/campos_512_v4
+152/770850/campos_512_v4
+152/770855/campos_512_v4
+152/770870/campos_512_v4
+152/770871/campos_512_v4
+152/770872/campos_512_v4
+152/770874/campos_512_v4
+152/770883/campos_512_v4
+152/770886/campos_512_v4
+152/770888/campos_512_v4
+152/770919/campos_512_v4
+152/770920/campos_512_v4
+152/770930/campos_512_v4
+152/770940/campos_512_v4
+152/770970/campos_512_v4
+152/770985/campos_512_v4
+152/770988/campos_512_v4
+152/770994/campos_512_v4
+152/770995/campos_512_v4
+152/771012/campos_512_v4
+152/771022/campos_512_v4
+152/771026/campos_512_v4
+152/771029/campos_512_v4
+152/771030/campos_512_v4
+152/771040/campos_512_v4
+152/771042/campos_512_v4
+152/771059/campos_512_v4
+152/771061/campos_512_v4
+152/771062/campos_512_v4
+152/771077/campos_512_v4
+152/771081/campos_512_v4
+152/771112/campos_512_v4
+152/771113/campos_512_v4
+152/771130/campos_512_v4
+152/771139/campos_512_v4
+152/771145/campos_512_v4
+152/771152/campos_512_v4
+152/771160/campos_512_v4
+152/771161/campos_512_v4
+152/771178/campos_512_v4
+152/771193/campos_512_v4
+152/771201/campos_512_v4
+152/771203/campos_512_v4
+152/771206/campos_512_v4
+152/771223/campos_512_v4
+152/771225/campos_512_v4
+152/771236/campos_512_v4
+152/771243/campos_512_v4
+152/771260/campos_512_v4
+152/771271/campos_512_v4
+152/771284/campos_512_v4
+152/771292/campos_512_v4
+152/771301/campos_512_v4
+152/771302/campos_512_v4
+152/771309/campos_512_v4
+152/771312/campos_512_v4
+152/771315/campos_512_v4
+152/771319/campos_512_v4
+152/771321/campos_512_v4
+152/771337/campos_512_v4
+152/771342/campos_512_v4
+152/771346/campos_512_v4
+152/771356/campos_512_v4
+152/771374/campos_512_v4
+152/771383/campos_512_v4
+152/771384/campos_512_v4
+152/771387/campos_512_v4
+152/771405/campos_512_v4
+152/771424/campos_512_v4
+152/771435/campos_512_v4
+152/771445/campos_512_v4
+152/771452/campos_512_v4
+152/771463/campos_512_v4
+152/771467/campos_512_v4
+152/771482/campos_512_v4
+152/771483/campos_512_v4
+152/771495/campos_512_v4
+152/771511/campos_512_v4
+152/771526/campos_512_v4
+152/771527/campos_512_v4
+152/771538/campos_512_v4
+152/771545/campos_512_v4
+152/771552/campos_512_v4
+152/771557/campos_512_v4
+152/771578/campos_512_v4
+152/771579/campos_512_v4
+152/771585/campos_512_v4
+152/771586/campos_512_v4
+152/771595/campos_512_v4
+152/771614/campos_512_v4
+152/771620/campos_512_v4
+152/771627/campos_512_v4
+152/771641/campos_512_v4
+152/771644/campos_512_v4
+152/771654/campos_512_v4
+152/771661/campos_512_v4
+152/771666/campos_512_v4
+152/771685/campos_512_v4
+152/771686/campos_512_v4
+152/771697/campos_512_v4
+152/771712/campos_512_v4
+152/771718/campos_512_v4
+152/771727/campos_512_v4
+152/771728/campos_512_v4
+152/771745/campos_512_v4
+152/771746/campos_512_v4
+152/771752/campos_512_v4
+152/771765/campos_512_v4
+152/771787/campos_512_v4
+152/771788/campos_512_v4
+152/771790/campos_512_v4
+152/771804/campos_512_v4
+152/771806/campos_512_v4
+152/771831/campos_512_v4
+152/771834/campos_512_v4
+152/771836/campos_512_v4
+152/771837/campos_512_v4
+152/771846/campos_512_v4
+152/771858/campos_512_v4
+152/771873/campos_512_v4
+152/771880/campos_512_v4
+152/771895/campos_512_v4
+152/771897/campos_512_v4
+152/771906/campos_512_v4
+152/771925/campos_512_v4
+152/771929/campos_512_v4
+152/771933/campos_512_v4
+152/771950/campos_512_v4
+152/771951/campos_512_v4
+152/771954/campos_512_v4
+152/771956/campos_512_v4
+152/771960/campos_512_v4
+152/771979/campos_512_v4
+152/771983/campos_512_v4
+152/771988/campos_512_v4
+152/772011/campos_512_v4
+152/772028/campos_512_v4
+152/772049/campos_512_v4
+152/772054/campos_512_v4
+152/772056/campos_512_v4
+152/772057/campos_512_v4
+152/772060/campos_512_v4
+152/772063/campos_512_v4
+152/772067/campos_512_v4
+152/772083/campos_512_v4
+152/772093/campos_512_v4
+152/772107/campos_512_v4
+152/772112/campos_512_v4
+152/772120/campos_512_v4
+152/772123/campos_512_v4
+152/772124/campos_512_v4
+152/772126/campos_512_v4
+152/772132/campos_512_v4
+152/772137/campos_512_v4
+152/772154/campos_512_v4
+152/772159/campos_512_v4
+152/772175/campos_512_v4
+152/772191/campos_512_v4
+152/772197/campos_512_v4
+152/772209/campos_512_v4
+152/772241/campos_512_v4
+152/772244/campos_512_v4
+152/772249/campos_512_v4
+152/772252/campos_512_v4
+152/772255/campos_512_v4
+152/772260/campos_512_v4
+152/772268/campos_512_v4
+152/772270/campos_512_v4
+152/772279/campos_512_v4
+152/772283/campos_512_v4
+152/772288/campos_512_v4
+152/772295/campos_512_v4
+152/772301/campos_512_v4
+152/772324/campos_512_v4
+152/772325/campos_512_v4
+152/772351/campos_512_v4
+152/772375/campos_512_v4
+152/772388/campos_512_v4
+152/772389/campos_512_v4
+152/772391/campos_512_v4
+152/772402/campos_512_v4
+152/772405/campos_512_v4
+152/772406/campos_512_v4
+152/772414/campos_512_v4
+152/772415/campos_512_v4
+152/772421/campos_512_v4
+152/772422/campos_512_v4
+152/772423/campos_512_v4
+152/772430/campos_512_v4
+152/772432/campos_512_v4
+152/772433/campos_512_v4
+152/772435/campos_512_v4
+152/772442/campos_512_v4
+152/772444/campos_512_v4
+152/772459/campos_512_v4
+152/772462/campos_512_v4
+152/772463/campos_512_v4
+152/772468/campos_512_v4
+152/772481/campos_512_v4
+152/772485/campos_512_v4
+152/772495/campos_512_v4
+152/772508/campos_512_v4
+152/772522/campos_512_v4
+152/772529/campos_512_v4
+152/772530/campos_512_v4
+152/772533/campos_512_v4
+152/772550/campos_512_v4
+152/772553/campos_512_v4
+152/772585/campos_512_v4
+152/772596/campos_512_v4
+152/772599/campos_512_v4
+152/772624/campos_512_v4
+152/772627/campos_512_v4
+152/772629/campos_512_v4
+152/772641/campos_512_v4
+152/772642/campos_512_v4
+152/772649/campos_512_v4
+152/772673/campos_512_v4
+152/772680/campos_512_v4
+152/772682/campos_512_v4
+152/772705/campos_512_v4
+152/772707/campos_512_v4
+152/772712/campos_512_v4
+152/772718/campos_512_v4
+152/772728/campos_512_v4
+152/772730/campos_512_v4
+152/772741/campos_512_v4
+152/772745/campos_512_v4
+152/772748/campos_512_v4
+152/772751/campos_512_v4
+152/772759/campos_512_v4
+152/772761/campos_512_v4
+152/772764/campos_512_v4
+152/772765/campos_512_v4
+152/772767/campos_512_v4
+152/772772/campos_512_v4
+152/772778/campos_512_v4
+152/772781/campos_512_v4
+152/772782/campos_512_v4
+152/772796/campos_512_v4
+152/772811/campos_512_v4
+152/772820/campos_512_v4
+152/772821/campos_512_v4
+152/772822/campos_512_v4
+152/772830/campos_512_v4
+152/772837/campos_512_v4
+152/772839/campos_512_v4
+152/772862/campos_512_v4
+152/772868/campos_512_v4
+152/772885/campos_512_v4
+152/772888/campos_512_v4
+152/772896/campos_512_v4
+152/772924/campos_512_v4
+152/772927/campos_512_v4
+152/772930/campos_512_v4
+152/772933/campos_512_v4
+152/772934/campos_512_v4
+152/772939/campos_512_v4
+152/772944/campos_512_v4
+152/772952/campos_512_v4
+152/772967/campos_512_v4
+152/772972/campos_512_v4
+152/772973/campos_512_v4
+152/772977/campos_512_v4
+152/772978/campos_512_v4
+152/772983/campos_512_v4
+152/772988/campos_512_v4
+152/772997/campos_512_v4
+152/773000/campos_512_v4
+152/773004/campos_512_v4
+152/773008/campos_512_v4
+152/773011/campos_512_v4
+152/773020/campos_512_v4
+152/773028/campos_512_v4
+152/773029/campos_512_v4
+152/773030/campos_512_v4
+152/773050/campos_512_v4
+152/773052/campos_512_v4
+152/773061/campos_512_v4
+152/773066/campos_512_v4
+152/773095/campos_512_v4
+152/773097/campos_512_v4
+152/773122/campos_512_v4
+152/773133/campos_512_v4
+152/773134/campos_512_v4
+152/773146/campos_512_v4
+152/773168/campos_512_v4
+152/773176/campos_512_v4
+152/773191/campos_512_v4
+152/773196/campos_512_v4
+152/773214/campos_512_v4
+152/773231/campos_512_v4
+152/773232/campos_512_v4
+152/773247/campos_512_v4
+152/773248/campos_512_v4
+152/773254/campos_512_v4
+152/773259/campos_512_v4
+152/773261/campos_512_v4
+152/773266/campos_512_v4
+152/773272/campos_512_v4
+152/773274/campos_512_v4
+152/773279/campos_512_v4
+152/773292/campos_512_v4
+152/773300/campos_512_v4
+152/773317/campos_512_v4
+152/773318/campos_512_v4
+152/773326/campos_512_v4
+152/773333/campos_512_v4
+152/773344/campos_512_v4
+152/773358/campos_512_v4
+152/773359/campos_512_v4
+152/773363/campos_512_v4
+152/773364/campos_512_v4
+152/773388/campos_512_v4
+152/773423/campos_512_v4
+152/773424/campos_512_v4
+152/773435/campos_512_v4
+152/773438/campos_512_v4
+152/773441/campos_512_v4
+152/773442/campos_512_v4
+152/773443/campos_512_v4
+152/773453/campos_512_v4
+152/773456/campos_512_v4
+152/773474/campos_512_v4
+152/773477/campos_512_v4
+152/773480/campos_512_v4
+152/773482/campos_512_v4
+152/773491/campos_512_v4
+152/773492/campos_512_v4
+152/773495/campos_512_v4
+152/773502/campos_512_v4
+152/773507/campos_512_v4
+152/773531/campos_512_v4
+152/773539/campos_512_v4
+152/773541/campos_512_v4
+152/773543/campos_512_v4
+152/773546/campos_512_v4
+152/773547/campos_512_v4
+152/773562/campos_512_v4
+152/773569/campos_512_v4
+152/773574/campos_512_v4
+152/773576/campos_512_v4
+152/773583/campos_512_v4
+152/773586/campos_512_v4
+152/773598/campos_512_v4
+152/773605/campos_512_v4
+152/773610/campos_512_v4
+152/773614/campos_512_v4
+152/773622/campos_512_v4
+152/773639/campos_512_v4
+152/773695/campos_512_v4
+152/773696/campos_512_v4
+152/773699/campos_512_v4
+152/773708/campos_512_v4
+152/773725/campos_512_v4
+152/773726/campos_512_v4
+152/773728/campos_512_v4
+152/773752/campos_512_v4
+152/773754/campos_512_v4
+152/773757/campos_512_v4
+152/773761/campos_512_v4
+152/773762/campos_512_v4
+152/773785/campos_512_v4
+152/773808/campos_512_v4
+152/773811/campos_512_v4
+152/773827/campos_512_v4
+152/773831/campos_512_v4
+152/773837/campos_512_v4
+152/773841/campos_512_v4
+152/773845/campos_512_v4
+152/773848/campos_512_v4
+152/773864/campos_512_v4
+152/773872/campos_512_v4
+152/773884/campos_512_v4
+152/773885/campos_512_v4
+152/773889/campos_512_v4
+152/773891/campos_512_v4
+152/773892/campos_512_v4
+152/773895/campos_512_v4
+152/773897/campos_512_v4
+152/773902/campos_512_v4
+152/773908/campos_512_v4
+152/773918/campos_512_v4
+152/773922/campos_512_v4
+152/773923/campos_512_v4
+152/773925/campos_512_v4
+152/773941/campos_512_v4
+152/773942/campos_512_v4
+152/773948/campos_512_v4
+152/773954/campos_512_v4
+152/773955/campos_512_v4
+152/773958/campos_512_v4
+152/773959/campos_512_v4
+152/773964/campos_512_v4
+152/773970/campos_512_v4
+152/773976/campos_512_v4
+152/773991/campos_512_v4
+152/773995/campos_512_v4
+152/774010/campos_512_v4
+152/774023/campos_512_v4
+152/774024/campos_512_v4
+152/774025/campos_512_v4
+152/774026/campos_512_v4
+152/774027/campos_512_v4
+152/774032/campos_512_v4
+152/774042/campos_512_v4
+152/774043/campos_512_v4
+152/774044/campos_512_v4
+152/774047/campos_512_v4
+152/774061/campos_512_v4
+152/774062/campos_512_v4
+152/774068/campos_512_v4
+152/774070/campos_512_v4
+152/774075/campos_512_v4
+152/774088/campos_512_v4
+152/774093/campos_512_v4
+152/774094/campos_512_v4
+152/774101/campos_512_v4
+152/774106/campos_512_v4
+152/774117/campos_512_v4
+152/774120/campos_512_v4
+152/774122/campos_512_v4
+152/774127/campos_512_v4
+152/774133/campos_512_v4
+152/774135/campos_512_v4
+152/774138/campos_512_v4
+152/774139/campos_512_v4
+152/774146/campos_512_v4
+152/774151/campos_512_v4
+152/774167/campos_512_v4
+152/774194/campos_512_v4
+152/774205/campos_512_v4
+152/774210/campos_512_v4
+152/774213/campos_512_v4
+152/774241/campos_512_v4
+152/774269/campos_512_v4
+152/774282/campos_512_v4
+152/774305/campos_512_v4
+152/774307/campos_512_v4
+152/774314/campos_512_v4
+152/774331/campos_512_v4
+152/774339/campos_512_v4
+152/774343/campos_512_v4
+152/774348/campos_512_v4
+152/774356/campos_512_v4
+152/774362/campos_512_v4
+152/774364/campos_512_v4
+152/774381/campos_512_v4
+152/774389/campos_512_v4
+152/774391/campos_512_v4
+152/774394/campos_512_v4
+152/774395/campos_512_v4
+152/774401/campos_512_v4
+152/774409/campos_512_v4
+152/774412/campos_512_v4
+152/774417/campos_512_v4
+152/774418/campos_512_v4
+152/774429/campos_512_v4
+152/774430/campos_512_v4
+152/774439/campos_512_v4
+152/774441/campos_512_v4
+152/774442/campos_512_v4
+152/774443/campos_512_v4
+152/774454/campos_512_v4
+152/774468/campos_512_v4
+152/774475/campos_512_v4
+152/774481/campos_512_v4
+152/774482/campos_512_v4
+152/774484/campos_512_v4
+152/774490/campos_512_v4
+152/774497/campos_512_v4
+152/774510/campos_512_v4
+152/774516/campos_512_v4
+152/774521/campos_512_v4
+152/774533/campos_512_v4
+152/774539/campos_512_v4
+152/774540/campos_512_v4
+152/774553/campos_512_v4
+152/774554/campos_512_v4
+152/774560/campos_512_v4
+152/774565/campos_512_v4
+152/774568/campos_512_v4
+152/774569/campos_512_v4
+152/774582/campos_512_v4
+152/774584/campos_512_v4
+152/774589/campos_512_v4
+152/774598/campos_512_v4
+152/774606/campos_512_v4
+152/774608/campos_512_v4
+152/774612/campos_512_v4
+152/774615/campos_512_v4
+152/774641/campos_512_v4
+152/774648/campos_512_v4
+152/774650/campos_512_v4
+152/774653/campos_512_v4
+152/774658/campos_512_v4
+152/774660/campos_512_v4
+152/774672/campos_512_v4
+152/774676/campos_512_v4
+152/774677/campos_512_v4
+152/774684/campos_512_v4
+152/774692/campos_512_v4
+152/774694/campos_512_v4
+152/774698/campos_512_v4
+152/774700/campos_512_v4
+152/774708/campos_512_v4
+152/774714/campos_512_v4
+152/774715/campos_512_v4
+152/774732/campos_512_v4
+152/774734/campos_512_v4
+152/774740/campos_512_v4
+152/774745/campos_512_v4
+152/774775/campos_512_v4
+152/774786/campos_512_v4
+152/774791/campos_512_v4
+152/774793/campos_512_v4
+152/774801/campos_512_v4
+152/774806/campos_512_v4
+152/774812/campos_512_v4
+152/774813/campos_512_v4
+152/774817/campos_512_v4
+152/774822/campos_512_v4
+152/774828/campos_512_v4
+152/774831/campos_512_v4
+152/774838/campos_512_v4
+152/774843/campos_512_v4
+152/774861/campos_512_v4
+152/774867/campos_512_v4
+152/774886/campos_512_v4
+152/774887/campos_512_v4
+152/774896/campos_512_v4
+152/774909/campos_512_v4
+152/774913/campos_512_v4
+152/774914/campos_512_v4
+152/774915/campos_512_v4
+152/774927/campos_512_v4
+152/774946/campos_512_v4
+152/774948/campos_512_v4
+152/774955/campos_512_v4
+152/774971/campos_512_v4
+152/774973/campos_512_v4
+152/774978/campos_512_v4
+152/775001/campos_512_v4
+153/775035/campos_512_v4
+153/775051/campos_512_v4
+153/775057/campos_512_v4
+153/775063/campos_512_v4
+153/775067/campos_512_v4
+153/775068/campos_512_v4
+153/775127/campos_512_v4
+153/775132/campos_512_v4
+153/775137/campos_512_v4
+153/775144/campos_512_v4
+153/775169/campos_512_v4
+153/775174/campos_512_v4
+153/775187/campos_512_v4
+153/775195/campos_512_v4
+153/775198/campos_512_v4
+153/775205/campos_512_v4
+153/775220/campos_512_v4
+153/775223/campos_512_v4
+153/775231/campos_512_v4
+153/775232/campos_512_v4
+153/775233/campos_512_v4
+153/775248/campos_512_v4
+153/775260/campos_512_v4
+153/775270/campos_512_v4
+153/775280/campos_512_v4
+153/775290/campos_512_v4
+153/775292/campos_512_v4
+153/775304/campos_512_v4
+153/775317/campos_512_v4
+153/775323/campos_512_v4
+153/775325/campos_512_v4
+153/775348/campos_512_v4
+153/775349/campos_512_v4
+153/775350/campos_512_v4
+153/775357/campos_512_v4
+153/775365/campos_512_v4
+153/775374/campos_512_v4
+153/775375/campos_512_v4
+153/775386/campos_512_v4
+153/775389/campos_512_v4
+153/775394/campos_512_v4
+153/775417/campos_512_v4
+153/775421/campos_512_v4
+153/775439/campos_512_v4
+153/775440/campos_512_v4
+153/775443/campos_512_v4
+153/775450/campos_512_v4
+153/775464/campos_512_v4
+153/775487/campos_512_v4
+153/775494/campos_512_v4
+153/775509/campos_512_v4
+153/775521/campos_512_v4
+153/775551/campos_512_v4
+153/775562/campos_512_v4
+153/775563/campos_512_v4
+153/775572/campos_512_v4
+153/775593/campos_512_v4
+153/775598/campos_512_v4
+153/775603/campos_512_v4
+153/775609/campos_512_v4
+153/775613/campos_512_v4
+153/775622/campos_512_v4
+153/775634/campos_512_v4
+153/775640/campos_512_v4
+153/775642/campos_512_v4
+153/775645/campos_512_v4
+153/775649/campos_512_v4
+153/775653/campos_512_v4
+153/775658/campos_512_v4
+153/775662/campos_512_v4
+153/775684/campos_512_v4
+153/775688/campos_512_v4
+153/775727/campos_512_v4
+153/775731/campos_512_v4
+153/775748/campos_512_v4
+153/775749/campos_512_v4
+153/775753/campos_512_v4
+153/775758/campos_512_v4
+153/775762/campos_512_v4
+153/775788/campos_512_v4
+153/775789/campos_512_v4
+153/775791/campos_512_v4
+153/775794/campos_512_v4
+153/775796/campos_512_v4
+153/775806/campos_512_v4
+153/775810/campos_512_v4
+153/775814/campos_512_v4
+153/775824/campos_512_v4
+153/775844/campos_512_v4
+153/775859/campos_512_v4
+153/775873/campos_512_v4
+153/775875/campos_512_v4
+153/775878/campos_512_v4
+153/775881/campos_512_v4
+153/775885/campos_512_v4
+153/775886/campos_512_v4
+153/775894/campos_512_v4
+153/775903/campos_512_v4
+153/775906/campos_512_v4
+153/775917/campos_512_v4
+153/775923/campos_512_v4
+153/775924/campos_512_v4
+153/775932/campos_512_v4
+153/775951/campos_512_v4
+153/775957/campos_512_v4
+153/775958/campos_512_v4
+153/775966/campos_512_v4
+153/775972/campos_512_v4
+153/775973/campos_512_v4
+153/775976/campos_512_v4
+153/775979/campos_512_v4
+153/775987/campos_512_v4
+153/776005/campos_512_v4
+153/776016/campos_512_v4
+153/776017/campos_512_v4
+153/776018/campos_512_v4
+153/776019/campos_512_v4
+153/776029/campos_512_v4
+153/776030/campos_512_v4
+153/776056/campos_512_v4
+153/776060/campos_512_v4
+153/776061/campos_512_v4
+153/776064/campos_512_v4
+153/776068/campos_512_v4
+153/776074/campos_512_v4
+153/776088/campos_512_v4
+153/776090/campos_512_v4
+153/776104/campos_512_v4
+153/776114/campos_512_v4
+153/776142/campos_512_v4
+153/776149/campos_512_v4
+153/776151/campos_512_v4
+153/776163/campos_512_v4
+153/776166/campos_512_v4
+153/776176/campos_512_v4
+153/776181/campos_512_v4
+153/776191/campos_512_v4
+153/776193/campos_512_v4
+153/776199/campos_512_v4
+153/776203/campos_512_v4
+153/776222/campos_512_v4
+153/776225/campos_512_v4
+153/776226/campos_512_v4
+153/776233/campos_512_v4
+153/776257/campos_512_v4
+153/776263/campos_512_v4
+153/776264/campos_512_v4
+153/776266/campos_512_v4
+153/776269/campos_512_v4
+153/776281/campos_512_v4
+153/776307/campos_512_v4
+153/776311/campos_512_v4
+153/776349/campos_512_v4
+153/776353/campos_512_v4
+153/776356/campos_512_v4
+153/776359/campos_512_v4
+153/776373/campos_512_v4
+153/776385/campos_512_v4
+153/776389/campos_512_v4
+153/776401/campos_512_v4
+153/776404/campos_512_v4
+153/776406/campos_512_v4
+153/776415/campos_512_v4
+153/776434/campos_512_v4
+153/776435/campos_512_v4
+153/776447/campos_512_v4
+153/776485/campos_512_v4
+153/776486/campos_512_v4
+153/776488/campos_512_v4
+153/776490/campos_512_v4
+153/776492/campos_512_v4
+153/776494/campos_512_v4
+153/776512/campos_512_v4
+153/776513/campos_512_v4
+153/776526/campos_512_v4
+153/776545/campos_512_v4
+153/776546/campos_512_v4
+153/776559/campos_512_v4
+153/776562/campos_512_v4
+153/776563/campos_512_v4
+153/776567/campos_512_v4
+153/776573/campos_512_v4
+153/776574/campos_512_v4
+153/776588/campos_512_v4
+153/776597/campos_512_v4
+153/776607/campos_512_v4
+153/776609/campos_512_v4
+153/776612/campos_512_v4
+153/776615/campos_512_v4
+153/776621/campos_512_v4
+153/776638/campos_512_v4
+153/776639/campos_512_v4
+153/776643/campos_512_v4
+153/776645/campos_512_v4
+153/776648/campos_512_v4
+153/776655/campos_512_v4
+153/776662/campos_512_v4
+153/776663/campos_512_v4
+153/776673/campos_512_v4
+153/776683/campos_512_v4
+153/776686/campos_512_v4
+153/776690/campos_512_v4
+153/776707/campos_512_v4
+153/776717/campos_512_v4
+153/776731/campos_512_v4
+153/776736/campos_512_v4
+153/776739/campos_512_v4
+153/776749/campos_512_v4
+153/776754/campos_512_v4
+153/776757/campos_512_v4
+153/776762/campos_512_v4
+153/776767/campos_512_v4
+153/776791/campos_512_v4
+153/776804/campos_512_v4
+153/776827/campos_512_v4
+153/776831/campos_512_v4
+153/776832/campos_512_v4
+153/776877/campos_512_v4
+153/776883/campos_512_v4
+153/776888/campos_512_v4
+153/776906/campos_512_v4
+153/776913/campos_512_v4
+153/776922/campos_512_v4
+153/776928/campos_512_v4
+153/776935/campos_512_v4
+153/776942/campos_512_v4
+153/776943/campos_512_v4
+153/776957/campos_512_v4
+153/776965/campos_512_v4
+153/776983/campos_512_v4
+153/776987/campos_512_v4
+153/776997/campos_512_v4
+153/777015/campos_512_v4
+153/777035/campos_512_v4
+153/777036/campos_512_v4
+153/777043/campos_512_v4
+153/777055/campos_512_v4
+153/777062/campos_512_v4
+153/777079/campos_512_v4
+153/777090/campos_512_v4
+153/777102/campos_512_v4
+153/777104/campos_512_v4
+153/777108/campos_512_v4
+153/777114/campos_512_v4
+153/777134/campos_512_v4
+153/777142/campos_512_v4
+153/777145/campos_512_v4
+153/777164/campos_512_v4
+153/777166/campos_512_v4
+153/777176/campos_512_v4
+153/777180/campos_512_v4
+153/777187/campos_512_v4
+153/777191/campos_512_v4
+153/777193/campos_512_v4
+153/777201/campos_512_v4
+153/777205/campos_512_v4
+153/777216/campos_512_v4
+153/777249/campos_512_v4
+153/777253/campos_512_v4
+153/777261/campos_512_v4
+153/777269/campos_512_v4
+153/777271/campos_512_v4
+153/777284/campos_512_v4
+153/777292/campos_512_v4
+153/777293/campos_512_v4
+153/777299/campos_512_v4
+153/777300/campos_512_v4
+153/777309/campos_512_v4
+153/777313/campos_512_v4
+153/777314/campos_512_v4
+153/777320/campos_512_v4
+153/777323/campos_512_v4
+153/777331/campos_512_v4
+153/777342/campos_512_v4
+153/777375/campos_512_v4
+153/777379/campos_512_v4
+153/777383/campos_512_v4
+153/777393/campos_512_v4
+153/777399/campos_512_v4
+153/777403/campos_512_v4
+153/777406/campos_512_v4
+153/777413/campos_512_v4
+153/777434/campos_512_v4
+153/777437/campos_512_v4
+153/777439/campos_512_v4
+153/777468/campos_512_v4
+153/777492/campos_512_v4
+153/777496/campos_512_v4
+153/777517/campos_512_v4
+153/777524/campos_512_v4
+153/777527/campos_512_v4
+153/777532/campos_512_v4
+153/777536/campos_512_v4
+153/777545/campos_512_v4
+153/777548/campos_512_v4
+153/777554/campos_512_v4
+153/777569/campos_512_v4
+153/777572/campos_512_v4
+153/777574/campos_512_v4
+153/777583/campos_512_v4
+153/777590/campos_512_v4
+153/777593/campos_512_v4
+153/777604/campos_512_v4
+153/777615/campos_512_v4
+153/777627/campos_512_v4
+153/777634/campos_512_v4
+153/777638/campos_512_v4
+153/777647/campos_512_v4
+153/777660/campos_512_v4
+153/777663/campos_512_v4
+153/777666/campos_512_v4
+153/777672/campos_512_v4
+153/777682/campos_512_v4
+153/777687/campos_512_v4
+153/777704/campos_512_v4
+153/777712/campos_512_v4
+153/777716/campos_512_v4
+153/777721/campos_512_v4
+153/777732/campos_512_v4
+153/777733/campos_512_v4
+153/777744/campos_512_v4
+153/777749/campos_512_v4
+153/777757/campos_512_v4
+153/777776/campos_512_v4
+153/777782/campos_512_v4
+153/777800/campos_512_v4
+153/777807/campos_512_v4
+153/777819/campos_512_v4
+153/777822/campos_512_v4
+153/777825/campos_512_v4
+153/777832/campos_512_v4
+153/777860/campos_512_v4
+153/777878/campos_512_v4
+153/777897/campos_512_v4
+153/777904/campos_512_v4
+153/777908/campos_512_v4
+153/777911/campos_512_v4
+153/777915/campos_512_v4
+153/777916/campos_512_v4
+153/777922/campos_512_v4
+153/777929/campos_512_v4
+153/777933/campos_512_v4
+153/777935/campos_512_v4
+153/777962/campos_512_v4
+153/777963/campos_512_v4
+153/777969/campos_512_v4
+153/777973/campos_512_v4
+153/777982/campos_512_v4
+153/777984/campos_512_v4
+153/777985/campos_512_v4
+153/777988/campos_512_v4
+153/777998/campos_512_v4
+153/778023/campos_512_v4
+153/778026/campos_512_v4
+153/778027/campos_512_v4
+153/778030/campos_512_v4
+153/778047/campos_512_v4
+153/778061/campos_512_v4
+153/778063/campos_512_v4
+153/778064/campos_512_v4
+153/778066/campos_512_v4
+153/778070/campos_512_v4
+153/778076/campos_512_v4
+153/778081/campos_512_v4
+153/778084/campos_512_v4
+153/778088/campos_512_v4
+153/778091/campos_512_v4
+153/778093/campos_512_v4
+153/778094/campos_512_v4
+153/778097/campos_512_v4
+153/778109/campos_512_v4
+153/778115/campos_512_v4
+153/778116/campos_512_v4
+153/778130/campos_512_v4
+153/778135/campos_512_v4
+153/778138/campos_512_v4
+153/778162/campos_512_v4
+153/778163/campos_512_v4
+153/778168/campos_512_v4
+153/778175/campos_512_v4
+153/778177/campos_512_v4
+153/778185/campos_512_v4
+153/778200/campos_512_v4
+153/778201/campos_512_v4
+153/778214/campos_512_v4
+153/778220/campos_512_v4
+153/778231/campos_512_v4
+153/778252/campos_512_v4
+153/778253/campos_512_v4
+153/778260/campos_512_v4
+153/778274/campos_512_v4
+153/778275/campos_512_v4
+153/778279/campos_512_v4
+153/778305/campos_512_v4
+153/778313/campos_512_v4
+153/778315/campos_512_v4
+153/778316/campos_512_v4
+153/778319/campos_512_v4
+153/778335/campos_512_v4
+153/778337/campos_512_v4
+153/778338/campos_512_v4
+153/778346/campos_512_v4
+153/778349/campos_512_v4
+153/778356/campos_512_v4
+153/778360/campos_512_v4
+153/778369/campos_512_v4
+153/778396/campos_512_v4
+153/778397/campos_512_v4
+153/778418/campos_512_v4
+153/778426/campos_512_v4
+153/778430/campos_512_v4
+153/778432/campos_512_v4
+153/778443/campos_512_v4
+153/778446/campos_512_v4
+153/778458/campos_512_v4
+153/778472/campos_512_v4
+153/778473/campos_512_v4
+153/778477/campos_512_v4
+153/778487/campos_512_v4
+153/778513/campos_512_v4
+153/778518/campos_512_v4
+153/778520/campos_512_v4
+153/778521/campos_512_v4
+153/778540/campos_512_v4
+153/778550/campos_512_v4
+153/778560/campos_512_v4
+153/778572/campos_512_v4
+153/778578/campos_512_v4
+153/778583/campos_512_v4
+153/778585/campos_512_v4
+153/778593/campos_512_v4
+153/778603/campos_512_v4
+153/778616/campos_512_v4
+153/778622/campos_512_v4
+153/778631/campos_512_v4
+153/778635/campos_512_v4
+153/778638/campos_512_v4
+153/778651/campos_512_v4
+153/778674/campos_512_v4
+153/778683/campos_512_v4
+153/778688/campos_512_v4
+153/778693/campos_512_v4
+153/778705/campos_512_v4
+153/778706/campos_512_v4
+153/778720/campos_512_v4
+153/778737/campos_512_v4
+153/778753/campos_512_v4
+153/778760/campos_512_v4
+153/778775/campos_512_v4
+153/778787/campos_512_v4
+153/778790/campos_512_v4
+153/778801/campos_512_v4
+153/778810/campos_512_v4
+153/778815/campos_512_v4
+153/778821/campos_512_v4
+153/778831/campos_512_v4
+153/778846/campos_512_v4
+153/778850/campos_512_v4
+153/778852/campos_512_v4
+153/778868/campos_512_v4
+153/778904/campos_512_v4
+153/778917/campos_512_v4
+153/778937/campos_512_v4
+153/778940/campos_512_v4
+153/778942/campos_512_v4
+153/778945/campos_512_v4
+153/778954/campos_512_v4
+153/778962/campos_512_v4
+153/778964/campos_512_v4
+153/778972/campos_512_v4
+153/778981/campos_512_v4
+153/778989/campos_512_v4
+153/778993/campos_512_v4
+153/778994/campos_512_v4
+153/779001/campos_512_v4
+153/779005/campos_512_v4
+153/779009/campos_512_v4
+153/779013/campos_512_v4
+153/779015/campos_512_v4
+153/779028/campos_512_v4
+153/779039/campos_512_v4
+153/779040/campos_512_v4
+153/779041/campos_512_v4
+153/779045/campos_512_v4
+153/779046/campos_512_v4
+153/779048/campos_512_v4
+153/779051/campos_512_v4
+153/779053/campos_512_v4
+153/779054/campos_512_v4
+153/779059/campos_512_v4
+153/779060/campos_512_v4
+153/779069/campos_512_v4
+153/779070/campos_512_v4
+153/779074/campos_512_v4
+153/779083/campos_512_v4
+153/779089/campos_512_v4
+153/779090/campos_512_v4
+153/779096/campos_512_v4
+153/779104/campos_512_v4
+153/779106/campos_512_v4
+153/779113/campos_512_v4
+153/779123/campos_512_v4
+153/779131/campos_512_v4
+153/779143/campos_512_v4
+153/779144/campos_512_v4
+153/779146/campos_512_v4
+153/779168/campos_512_v4
+153/779176/campos_512_v4
+153/779184/campos_512_v4
+153/779188/campos_512_v4
+153/779215/campos_512_v4
+153/779220/campos_512_v4
+153/779222/campos_512_v4
+153/779224/campos_512_v4
+153/779237/campos_512_v4
+153/779239/campos_512_v4
+153/779243/campos_512_v4
+153/779253/campos_512_v4
+153/779259/campos_512_v4
+153/779270/campos_512_v4
+153/779271/campos_512_v4
+153/779294/campos_512_v4
+153/779299/campos_512_v4
+153/779305/campos_512_v4
+153/779307/campos_512_v4
+153/779312/campos_512_v4
+153/779313/campos_512_v4
+153/779323/campos_512_v4
+153/779339/campos_512_v4
+153/779341/campos_512_v4
+153/779344/campos_512_v4
+153/779364/campos_512_v4
+153/779367/campos_512_v4
+153/779376/campos_512_v4
+153/779381/campos_512_v4
+153/779394/campos_512_v4
+153/779401/campos_512_v4
+153/779407/campos_512_v4
+153/779414/campos_512_v4
+153/779425/campos_512_v4
+153/779436/campos_512_v4
+153/779440/campos_512_v4
+153/779443/campos_512_v4
+153/779452/campos_512_v4
+153/779457/campos_512_v4
+153/779470/campos_512_v4
+153/779473/campos_512_v4
+153/779480/campos_512_v4
+153/779481/campos_512_v4
+153/779485/campos_512_v4
+153/779499/campos_512_v4
+153/779504/campos_512_v4
+153/779515/campos_512_v4
+153/779519/campos_512_v4
+153/779533/campos_512_v4
+153/779540/campos_512_v4
+153/779545/campos_512_v4
+153/779554/campos_512_v4
+153/779559/campos_512_v4
+153/779574/campos_512_v4
+153/779577/campos_512_v4
+153/779582/campos_512_v4
+153/779592/campos_512_v4
+153/779593/campos_512_v4
+153/779597/campos_512_v4
+153/779605/campos_512_v4
+153/779616/campos_512_v4
+153/779618/campos_512_v4
+153/779626/campos_512_v4
+153/779627/campos_512_v4
+153/779633/campos_512_v4
+153/779642/campos_512_v4
+153/779650/campos_512_v4
+153/779662/campos_512_v4
+153/779664/campos_512_v4
+153/779669/campos_512_v4
+153/779672/campos_512_v4
+153/779678/campos_512_v4
+153/779683/campos_512_v4
+153/779691/campos_512_v4
+153/779697/campos_512_v4
+153/779699/campos_512_v4
+153/779703/campos_512_v4
+153/779711/campos_512_v4
+153/779733/campos_512_v4
+153/779745/campos_512_v4
+153/779758/campos_512_v4
+153/779763/campos_512_v4
+153/779776/campos_512_v4
+153/779782/campos_512_v4
+153/779783/campos_512_v4
+153/779791/campos_512_v4
+153/779795/campos_512_v4
+153/779796/campos_512_v4
+153/779805/campos_512_v4
+153/779815/campos_512_v4
+153/779818/campos_512_v4
+153/779826/campos_512_v4
+153/779832/campos_512_v4
+153/779835/campos_512_v4
+153/779836/campos_512_v4
+153/779839/campos_512_v4
+153/779847/campos_512_v4
+153/779882/campos_512_v4
+153/779886/campos_512_v4
+153/779902/campos_512_v4
+153/779905/campos_512_v4
+153/779908/campos_512_v4
+153/779913/campos_512_v4
+153/779916/campos_512_v4
+153/779921/campos_512_v4
+153/779929/campos_512_v4
+153/779943/campos_512_v4
+153/779955/campos_512_v4
+153/779965/campos_512_v4
+153/779980/campos_512_v4
+153/779984/campos_512_v4
+153/779986/campos_512_v4
+153/779991/campos_512_v4
+153/779995/campos_512_v4
+153/779998/campos_512_v4
+153/780000/campos_512_v4
+154/780013/campos_512_v4
+154/780024/campos_512_v4
+154/780028/campos_512_v4
+154/780038/campos_512_v4
+154/780064/campos_512_v4
+154/780065/campos_512_v4
+154/780084/campos_512_v4
+154/780096/campos_512_v4
+154/780101/campos_512_v4
+154/780106/campos_512_v4
+154/780122/campos_512_v4
+154/780142/campos_512_v4
+154/780144/campos_512_v4
+154/780157/campos_512_v4
+154/780167/campos_512_v4
+154/780171/campos_512_v4
+154/780176/campos_512_v4
+154/780190/campos_512_v4
+154/780197/campos_512_v4
+154/780202/campos_512_v4
+154/780208/campos_512_v4
+154/780219/campos_512_v4
+154/780220/campos_512_v4
+154/780232/campos_512_v4
+154/780241/campos_512_v4
+154/780245/campos_512_v4
+154/780265/campos_512_v4
+154/780269/campos_512_v4
+154/780272/campos_512_v4
+154/780274/campos_512_v4
+154/780279/campos_512_v4
+154/780292/campos_512_v4
+154/780306/campos_512_v4
+154/780308/campos_512_v4
+154/780328/campos_512_v4
+154/780330/campos_512_v4
+154/780331/campos_512_v4
+154/780337/campos_512_v4
+154/780342/campos_512_v4
+154/780343/campos_512_v4
+154/780344/campos_512_v4
+154/780351/campos_512_v4
+154/780355/campos_512_v4
+154/780358/campos_512_v4
+154/780366/campos_512_v4
+154/780370/campos_512_v4
+154/780372/campos_512_v4
+154/780385/campos_512_v4
+154/780394/campos_512_v4
+154/780395/campos_512_v4
+154/780402/campos_512_v4
+154/780409/campos_512_v4
+154/780413/campos_512_v4
+154/780421/campos_512_v4
+154/780430/campos_512_v4
+154/780445/campos_512_v4
+154/780450/campos_512_v4
+154/780451/campos_512_v4
+154/780458/campos_512_v4
+154/780462/campos_512_v4
+154/780466/campos_512_v4
+154/780469/campos_512_v4
+154/780483/campos_512_v4
+154/780484/campos_512_v4
+154/780503/campos_512_v4
+154/780504/campos_512_v4
+154/780509/campos_512_v4
+154/780511/campos_512_v4
+154/780525/campos_512_v4
+154/780531/campos_512_v4
+154/780533/campos_512_v4
+154/780534/campos_512_v4
+154/780536/campos_512_v4
+154/780537/campos_512_v4
+154/780540/campos_512_v4
+154/780551/campos_512_v4
+154/780555/campos_512_v4
+154/780566/campos_512_v4
+154/780574/campos_512_v4
+154/780577/campos_512_v4
+154/780585/campos_512_v4
+154/780602/campos_512_v4
+154/780610/campos_512_v4
+154/780611/campos_512_v4
+154/780616/campos_512_v4
+154/780625/campos_512_v4
+154/780632/campos_512_v4
+154/780642/campos_512_v4
+154/780644/campos_512_v4
+154/780652/campos_512_v4
+154/780660/campos_512_v4
+154/780663/campos_512_v4
+154/780668/campos_512_v4
+154/780675/campos_512_v4
+154/780683/campos_512_v4
+154/780686/campos_512_v4
+154/780697/campos_512_v4
+154/780704/campos_512_v4
+154/780729/campos_512_v4
+154/780731/campos_512_v4
+154/780737/campos_512_v4
+154/780743/campos_512_v4
+154/780746/campos_512_v4
+154/780748/campos_512_v4
+154/780757/campos_512_v4
+154/780762/campos_512_v4
+154/780763/campos_512_v4
+154/780768/campos_512_v4
+154/780774/campos_512_v4
+154/780778/campos_512_v4
+154/780790/campos_512_v4
+154/780799/campos_512_v4
+154/780806/campos_512_v4
+154/780819/campos_512_v4
+154/780825/campos_512_v4
+154/780831/campos_512_v4
+154/780832/campos_512_v4
+154/780833/campos_512_v4
+154/780834/campos_512_v4
+154/780836/campos_512_v4
+154/780838/campos_512_v4
+154/780847/campos_512_v4
+154/780855/campos_512_v4
+154/780868/campos_512_v4
+154/780878/campos_512_v4
+154/780895/campos_512_v4
+154/780916/campos_512_v4
+154/780918/campos_512_v4
+154/780920/campos_512_v4
+154/780931/campos_512_v4
+154/780937/campos_512_v4
+154/780938/campos_512_v4
+154/780945/campos_512_v4
+154/780949/campos_512_v4
+154/780957/campos_512_v4
+154/780966/campos_512_v4
+154/780975/campos_512_v4
+154/780986/campos_512_v4
+154/780991/campos_512_v4
+154/781024/campos_512_v4
+154/781026/campos_512_v4
+154/781044/campos_512_v4
+154/781047/campos_512_v4
+154/781066/campos_512_v4
+154/781070/campos_512_v4
+154/781071/campos_512_v4
+154/781072/campos_512_v4
+154/781074/campos_512_v4
+154/781077/campos_512_v4
+154/781086/campos_512_v4
+154/781090/campos_512_v4
+154/781099/campos_512_v4
+154/781100/campos_512_v4
+154/781101/campos_512_v4
+154/781105/campos_512_v4
+154/781111/campos_512_v4
+154/781118/campos_512_v4
+154/781132/campos_512_v4
+154/781138/campos_512_v4
+154/781139/campos_512_v4
+154/781144/campos_512_v4
+154/781145/campos_512_v4
+154/781150/campos_512_v4
+154/781152/campos_512_v4
+154/781154/campos_512_v4
+154/781160/campos_512_v4
+154/781168/campos_512_v4
+154/781176/campos_512_v4
+154/781201/campos_512_v4
+154/781207/campos_512_v4
+154/781214/campos_512_v4
+154/781219/campos_512_v4
+154/781227/campos_512_v4
+154/781234/campos_512_v4
+154/781238/campos_512_v4
+154/781245/campos_512_v4
+154/781250/campos_512_v4
+154/781264/campos_512_v4
+154/781268/campos_512_v4
+154/781303/campos_512_v4
+154/781313/campos_512_v4
+154/781314/campos_512_v4
+154/781340/campos_512_v4
+154/781345/campos_512_v4
+154/781347/campos_512_v4
+154/781348/campos_512_v4
+154/781351/campos_512_v4
+154/781353/campos_512_v4
+154/781367/campos_512_v4
+154/781378/campos_512_v4
+154/781399/campos_512_v4
+154/781402/campos_512_v4
+154/781406/campos_512_v4
+154/781409/campos_512_v4
+154/781417/campos_512_v4
+154/781418/campos_512_v4
+154/781427/campos_512_v4
+154/781440/campos_512_v4
+154/781446/campos_512_v4
+154/781453/campos_512_v4
+154/781455/campos_512_v4
+154/781463/campos_512_v4
+154/781466/campos_512_v4
+154/781497/campos_512_v4
+154/781502/campos_512_v4
+154/781509/campos_512_v4
+154/781519/campos_512_v4
+154/781532/campos_512_v4
+154/781554/campos_512_v4
+154/781555/campos_512_v4
+154/781571/campos_512_v4
+154/781572/campos_512_v4
+154/781573/campos_512_v4
+154/781575/campos_512_v4
+154/781584/campos_512_v4
+154/781588/campos_512_v4
+154/781596/campos_512_v4
+154/781604/campos_512_v4
+154/781610/campos_512_v4
+154/781615/campos_512_v4
+154/781616/campos_512_v4
+154/781631/campos_512_v4
+154/781647/campos_512_v4
+154/781650/campos_512_v4
+154/781666/campos_512_v4
+154/781667/campos_512_v4
+154/781673/campos_512_v4
+154/781685/campos_512_v4
+154/781704/campos_512_v4
+154/781707/campos_512_v4
+154/781727/campos_512_v4
+154/781733/campos_512_v4
+154/781736/campos_512_v4
+154/781746/campos_512_v4
+154/781755/campos_512_v4
+154/781765/campos_512_v4
+154/781775/campos_512_v4
+154/781780/campos_512_v4
+154/781783/campos_512_v4
+154/781792/campos_512_v4
+154/781804/campos_512_v4
+154/781810/campos_512_v4
+154/781821/campos_512_v4
+154/781832/campos_512_v4
+154/781838/campos_512_v4
+154/781858/campos_512_v4
+154/781862/campos_512_v4
+154/781869/campos_512_v4
+154/781871/campos_512_v4
+154/781875/campos_512_v4
+154/781878/campos_512_v4
+154/781881/campos_512_v4
+154/781883/campos_512_v4
+154/781886/campos_512_v4
+154/781888/campos_512_v4
+154/781892/campos_512_v4
+154/781893/campos_512_v4
+154/781911/campos_512_v4
+154/781914/campos_512_v4
+154/781920/campos_512_v4
+154/781929/campos_512_v4
+154/781934/campos_512_v4
+154/781949/campos_512_v4
+154/781959/campos_512_v4
+154/781972/campos_512_v4
+154/781975/campos_512_v4
+154/781991/campos_512_v4
+154/781992/campos_512_v4
+154/781999/campos_512_v4
+154/782002/campos_512_v4
+154/782007/campos_512_v4
+154/782042/campos_512_v4
+154/782073/campos_512_v4
+154/782103/campos_512_v4
+154/782116/campos_512_v4
+154/782132/campos_512_v4
+154/782136/campos_512_v4
+154/782141/campos_512_v4
+154/782147/campos_512_v4
+154/782153/campos_512_v4
+154/782168/campos_512_v4
+154/782169/campos_512_v4
+154/782176/campos_512_v4
+154/782187/campos_512_v4
+154/782194/campos_512_v4
+154/782209/campos_512_v4
+154/782214/campos_512_v4
+154/782233/campos_512_v4
+154/782234/campos_512_v4
+154/782242/campos_512_v4
+154/782250/campos_512_v4
+154/782256/campos_512_v4
+154/782266/campos_512_v4
+154/782269/campos_512_v4
+154/782296/campos_512_v4
+154/782300/campos_512_v4
+154/782305/campos_512_v4
+154/782311/campos_512_v4
+154/782327/campos_512_v4
+154/782353/campos_512_v4
+154/782354/campos_512_v4
+154/782356/campos_512_v4
+154/782367/campos_512_v4
+154/782369/campos_512_v4
+154/782380/campos_512_v4
+154/782384/campos_512_v4
+154/782404/campos_512_v4
+154/782428/campos_512_v4
+154/782436/campos_512_v4
+154/782448/campos_512_v4
+154/782449/campos_512_v4
+154/782453/campos_512_v4
+154/782457/campos_512_v4
+154/782475/campos_512_v4
+154/782477/campos_512_v4
+154/782486/campos_512_v4
+154/782489/campos_512_v4
+154/782490/campos_512_v4
+154/782502/campos_512_v4
+154/782519/campos_512_v4
+154/782522/campos_512_v4
+154/782537/campos_512_v4
+154/782544/campos_512_v4
+154/782551/campos_512_v4
+154/782557/campos_512_v4
+154/782563/campos_512_v4
+154/782572/campos_512_v4
+154/782574/campos_512_v4
+154/782578/campos_512_v4
+154/782580/campos_512_v4
+154/782592/campos_512_v4
+154/782594/campos_512_v4
+154/782600/campos_512_v4
+154/782603/campos_512_v4
+154/782609/campos_512_v4
+154/782613/campos_512_v4
+154/782614/campos_512_v4
+154/782622/campos_512_v4
+154/782623/campos_512_v4
+154/782626/campos_512_v4
+154/782627/campos_512_v4
+154/782634/campos_512_v4
+154/782637/campos_512_v4
+154/782638/campos_512_v4
+154/782643/campos_512_v4
+154/782649/campos_512_v4
+154/782652/campos_512_v4
+154/782655/campos_512_v4
+154/782662/campos_512_v4
+154/782663/campos_512_v4
+154/782665/campos_512_v4
+154/782674/campos_512_v4
+154/782686/campos_512_v4
+154/782695/campos_512_v4
+154/782696/campos_512_v4
+154/782716/campos_512_v4
+154/782728/campos_512_v4
+154/782740/campos_512_v4
+154/782746/campos_512_v4
+154/782748/campos_512_v4
+154/782754/campos_512_v4
+154/782762/campos_512_v4
+154/782771/campos_512_v4
+154/782773/campos_512_v4
+154/782792/campos_512_v4
+154/782793/campos_512_v4
+154/782797/campos_512_v4
+154/782802/campos_512_v4
+154/782804/campos_512_v4
+154/782806/campos_512_v4
+154/782812/campos_512_v4
+154/782816/campos_512_v4
+154/782822/campos_512_v4
+154/782830/campos_512_v4
+154/782839/campos_512_v4
+154/782842/campos_512_v4
+154/782846/campos_512_v4
+154/782847/campos_512_v4
+154/782856/campos_512_v4
+154/782864/campos_512_v4
+154/782883/campos_512_v4
+154/782897/campos_512_v4
+154/782908/campos_512_v4
+154/782915/campos_512_v4
+154/782929/campos_512_v4
+154/782964/campos_512_v4
+154/782969/campos_512_v4
+154/782972/campos_512_v4
+154/782982/campos_512_v4
+154/782989/campos_512_v4
+154/783006/campos_512_v4
+154/783013/campos_512_v4
+154/783015/campos_512_v4
+154/783023/campos_512_v4
+154/783047/campos_512_v4
+154/783050/campos_512_v4
+154/783055/campos_512_v4
+154/783059/campos_512_v4
+154/783066/campos_512_v4
+154/783068/campos_512_v4
+154/783073/campos_512_v4
+154/783074/campos_512_v4
+154/783081/campos_512_v4
+154/783101/campos_512_v4
+154/783107/campos_512_v4
+154/783115/campos_512_v4
+154/783120/campos_512_v4
+154/783125/campos_512_v4
+154/783127/campos_512_v4
+154/783128/campos_512_v4
+154/783141/campos_512_v4
+154/783142/campos_512_v4
+154/783148/campos_512_v4
+154/783149/campos_512_v4
+154/783156/campos_512_v4
+154/783157/campos_512_v4
+154/783162/campos_512_v4
+154/783169/campos_512_v4
+154/783177/campos_512_v4
+154/783179/campos_512_v4
+154/783219/campos_512_v4
+154/783222/campos_512_v4
+154/783228/campos_512_v4
+154/783229/campos_512_v4
+154/783236/campos_512_v4
+154/783245/campos_512_v4
+154/783248/campos_512_v4
+154/783254/campos_512_v4
+154/783255/campos_512_v4
+154/783265/campos_512_v4
+154/783267/campos_512_v4
+154/783286/campos_512_v4
+154/783296/campos_512_v4
+154/783304/campos_512_v4
+154/783314/campos_512_v4
+154/783317/campos_512_v4
+154/783335/campos_512_v4
+154/783345/campos_512_v4
+154/783354/campos_512_v4
+154/783357/campos_512_v4
+154/783366/campos_512_v4
+154/783369/campos_512_v4
+154/783375/campos_512_v4
+154/783376/campos_512_v4
+154/783377/campos_512_v4
+154/783383/campos_512_v4
+154/783385/campos_512_v4
+154/783395/campos_512_v4
+154/783409/campos_512_v4
+154/783415/campos_512_v4
+154/783417/campos_512_v4
+154/783435/campos_512_v4
+154/783439/campos_512_v4
+154/783446/campos_512_v4
+154/783449/campos_512_v4
+154/783452/campos_512_v4
+154/783463/campos_512_v4
+154/783472/campos_512_v4
+154/783474/campos_512_v4
+154/783484/campos_512_v4
+154/783487/campos_512_v4
+154/783491/campos_512_v4
+154/783495/campos_512_v4
+154/783497/campos_512_v4
+154/783499/campos_512_v4
+154/783506/campos_512_v4
+154/783507/campos_512_v4
+154/783516/campos_512_v4
+154/783524/campos_512_v4
+154/783535/campos_512_v4
+154/783538/campos_512_v4
+154/783547/campos_512_v4
+154/783567/campos_512_v4
+154/783589/campos_512_v4
+154/783610/campos_512_v4
+154/783642/campos_512_v4
+154/783663/campos_512_v4
+154/783670/campos_512_v4
+154/783693/campos_512_v4
+154/783700/campos_512_v4
+154/783714/campos_512_v4
+154/783716/campos_512_v4
+154/783718/campos_512_v4
+154/783735/campos_512_v4
+154/783743/campos_512_v4
+154/783765/campos_512_v4
+154/783770/campos_512_v4
+154/783772/campos_512_v4
+154/783789/campos_512_v4
+154/783791/campos_512_v4
+154/783793/campos_512_v4
+154/783810/campos_512_v4
+154/783818/campos_512_v4
+154/783842/campos_512_v4
+154/783846/campos_512_v4
+154/783855/campos_512_v4
+154/783914/campos_512_v4
+154/783915/campos_512_v4
+154/783916/campos_512_v4
+154/783929/campos_512_v4
+154/783939/campos_512_v4
+154/783946/campos_512_v4
+154/783948/campos_512_v4
+154/783951/campos_512_v4
+154/783952/campos_512_v4
+154/783954/campos_512_v4
+154/783988/campos_512_v4
+154/783998/campos_512_v4
+154/784002/campos_512_v4
+154/784013/campos_512_v4
+154/784016/campos_512_v4
+154/784022/campos_512_v4
+154/784025/campos_512_v4
+154/784029/campos_512_v4
+154/784038/campos_512_v4
+154/784043/campos_512_v4
+154/784050/campos_512_v4
+154/784056/campos_512_v4
+154/784075/campos_512_v4
+154/784081/campos_512_v4
+154/784088/campos_512_v4
+154/784100/campos_512_v4
+154/784106/campos_512_v4
+154/784108/campos_512_v4
+154/784113/campos_512_v4
+154/784130/campos_512_v4
+154/784135/campos_512_v4
+154/784146/campos_512_v4
+154/784166/campos_512_v4
+154/784175/campos_512_v4
+154/784185/campos_512_v4
+154/784187/campos_512_v4
+154/784189/campos_512_v4
+154/784190/campos_512_v4
+154/784200/campos_512_v4
+154/784209/campos_512_v4
+154/784214/campos_512_v4
+154/784227/campos_512_v4
+154/784231/campos_512_v4
+154/784233/campos_512_v4
+154/784238/campos_512_v4
+154/784246/campos_512_v4
+154/784255/campos_512_v4
+154/784257/campos_512_v4
+154/784267/campos_512_v4
+154/784283/campos_512_v4
+154/784291/campos_512_v4
+154/784293/campos_512_v4
+154/784339/campos_512_v4
+154/784343/campos_512_v4
+154/784347/campos_512_v4
+154/784352/campos_512_v4
+154/784363/campos_512_v4
+154/784373/campos_512_v4
+154/784377/campos_512_v4
+154/784381/campos_512_v4
+154/784392/campos_512_v4
+154/784416/campos_512_v4
+154/784424/campos_512_v4
+154/784427/campos_512_v4
+154/784440/campos_512_v4
+154/784447/campos_512_v4
+154/784456/campos_512_v4
+154/784460/campos_512_v4
+154/784463/campos_512_v4
+154/784473/campos_512_v4
+154/784482/campos_512_v4
+154/784495/campos_512_v4
+154/784496/campos_512_v4
+154/784500/campos_512_v4
+154/784501/campos_512_v4
+154/784509/campos_512_v4
+154/784510/campos_512_v4
+154/784516/campos_512_v4
+154/784521/campos_512_v4
+154/784523/campos_512_v4
+154/784524/campos_512_v4
+154/784529/campos_512_v4
+154/784533/campos_512_v4
+154/784544/campos_512_v4
+154/784552/campos_512_v4
+154/784556/campos_512_v4
+154/784558/campos_512_v4
+154/784581/campos_512_v4
+154/784584/campos_512_v4
+154/784592/campos_512_v4
+154/784598/campos_512_v4
+154/784607/campos_512_v4
+154/784610/campos_512_v4
+154/784612/campos_512_v4
+154/784614/campos_512_v4
+154/784619/campos_512_v4
+154/784621/campos_512_v4
+154/784624/campos_512_v4
+154/784657/campos_512_v4
+154/784659/campos_512_v4
+154/784662/campos_512_v4
+154/784668/campos_512_v4
+154/784685/campos_512_v4
+154/784687/campos_512_v4
+154/784700/campos_512_v4
+154/784726/campos_512_v4
+154/784738/campos_512_v4
+154/784743/campos_512_v4
+154/784747/campos_512_v4
+154/784765/campos_512_v4
+154/784792/campos_512_v4
+154/784798/campos_512_v4
+154/784801/campos_512_v4
+154/784804/campos_512_v4
+154/784809/campos_512_v4
+154/784821/campos_512_v4
+154/784826/campos_512_v4
+154/784852/campos_512_v4
+154/784863/campos_512_v4
+154/784864/campos_512_v4
+154/784891/campos_512_v4
+154/784893/campos_512_v4
+154/784900/campos_512_v4
+154/784904/campos_512_v4
+154/784907/campos_512_v4
+154/784908/campos_512_v4
+154/784909/campos_512_v4
+154/784912/campos_512_v4
+154/784941/campos_512_v4
+154/784951/campos_512_v4
+154/784963/campos_512_v4
+154/784968/campos_512_v4
+154/784969/campos_512_v4
+154/784971/campos_512_v4
+154/784990/campos_512_v4
+154/784997/campos_512_v4
+155/785003/campos_512_v4
+155/785015/campos_512_v4
+155/785018/campos_512_v4
+155/785025/campos_512_v4
+155/785038/campos_512_v4
+155/785041/campos_512_v4
+155/785048/campos_512_v4
+155/785053/campos_512_v4
+155/785059/campos_512_v4
+155/785062/campos_512_v4
+155/785069/campos_512_v4
+155/785074/campos_512_v4
+155/785076/campos_512_v4
+155/785086/campos_512_v4
+155/785090/campos_512_v4
+155/785100/campos_512_v4
+155/785101/campos_512_v4
+155/785111/campos_512_v4
+155/785115/campos_512_v4
+155/785119/campos_512_v4
+155/785122/campos_512_v4
+155/785140/campos_512_v4
+155/785146/campos_512_v4
+155/785152/campos_512_v4
+155/785153/campos_512_v4
+155/785154/campos_512_v4
+155/785155/campos_512_v4
+155/785159/campos_512_v4
+155/785162/campos_512_v4
+155/785177/campos_512_v4
+155/785181/campos_512_v4
+155/785209/campos_512_v4
+155/785236/campos_512_v4
+155/785239/campos_512_v4
+155/785247/campos_512_v4
+155/785251/campos_512_v4
+155/785252/campos_512_v4
+155/785256/campos_512_v4
+155/785257/campos_512_v4
+155/785303/campos_512_v4
+155/785311/campos_512_v4
+155/785319/campos_512_v4
+155/785321/campos_512_v4
+155/785348/campos_512_v4
+155/785361/campos_512_v4
+155/785372/campos_512_v4
+155/785378/campos_512_v4
+155/785395/campos_512_v4
+155/785412/campos_512_v4
+155/785417/campos_512_v4
+155/785443/campos_512_v4
+155/785466/campos_512_v4
+155/785468/campos_512_v4
+155/785480/campos_512_v4
+155/785498/campos_512_v4
+155/785500/campos_512_v4
+155/785509/campos_512_v4
+155/785510/campos_512_v4
+155/785535/campos_512_v4
+155/785537/campos_512_v4
+155/785541/campos_512_v4
+155/785545/campos_512_v4
+155/785564/campos_512_v4
+155/785604/campos_512_v4
+155/785613/campos_512_v4
+155/785617/campos_512_v4
+155/785621/campos_512_v4
+155/785623/campos_512_v4
+155/785631/campos_512_v4
+155/785636/campos_512_v4
+155/785638/campos_512_v4
+155/785641/campos_512_v4
+155/785646/campos_512_v4
+155/785652/campos_512_v4
+155/785668/campos_512_v4
+155/785683/campos_512_v4
+155/785688/campos_512_v4
+155/785691/campos_512_v4
+155/785693/campos_512_v4
+155/785696/campos_512_v4
+155/785707/campos_512_v4
+155/785717/campos_512_v4
+155/785719/campos_512_v4
+155/785720/campos_512_v4
+155/785734/campos_512_v4
+155/785735/campos_512_v4
+155/785740/campos_512_v4
+155/785753/campos_512_v4
+155/785759/campos_512_v4
+155/785762/campos_512_v4
+155/785774/campos_512_v4
+155/785781/campos_512_v4
+155/785784/campos_512_v4
+155/785786/campos_512_v4
+155/785795/campos_512_v4
+155/785797/campos_512_v4
+155/785799/campos_512_v4
+155/785804/campos_512_v4
+155/785811/campos_512_v4
+155/785822/campos_512_v4
+155/785823/campos_512_v4
+155/785838/campos_512_v4
+155/785851/campos_512_v4
+155/785855/campos_512_v4
+155/785856/campos_512_v4
+155/785863/campos_512_v4
+155/785866/campos_512_v4
+155/785868/campos_512_v4
+155/785881/campos_512_v4
+155/785895/campos_512_v4
+155/785904/campos_512_v4
+155/785907/campos_512_v4
+155/785908/campos_512_v4
+155/785911/campos_512_v4
+155/785913/campos_512_v4
+155/785920/campos_512_v4
+155/785923/campos_512_v4
+155/785957/campos_512_v4
+155/785965/campos_512_v4
+155/785977/campos_512_v4
+155/785981/campos_512_v4
+155/785986/campos_512_v4
+155/785990/campos_512_v4
+155/785992/campos_512_v4
+155/785998/campos_512_v4
+155/786001/campos_512_v4
+155/786002/campos_512_v4
+155/786008/campos_512_v4
+155/786017/campos_512_v4
+155/786019/campos_512_v4
+155/786030/campos_512_v4
+155/786033/campos_512_v4
+155/786041/campos_512_v4
+155/786060/campos_512_v4
+155/786097/campos_512_v4
+155/786117/campos_512_v4
+155/786118/campos_512_v4
+155/786122/campos_512_v4
+155/786123/campos_512_v4
+155/786142/campos_512_v4
+155/786150/campos_512_v4
+155/786155/campos_512_v4
+155/786157/campos_512_v4
+155/786173/campos_512_v4
+155/786175/campos_512_v4
+155/786184/campos_512_v4
+155/786185/campos_512_v4
+155/786196/campos_512_v4
+155/786200/campos_512_v4
+155/786210/campos_512_v4
+155/786215/campos_512_v4
+155/786217/campos_512_v4
+155/786220/campos_512_v4
+155/786225/campos_512_v4
+155/786227/campos_512_v4
+155/786235/campos_512_v4
+155/786244/campos_512_v4
+155/786250/campos_512_v4
+155/786251/campos_512_v4
+155/786252/campos_512_v4
+155/786255/campos_512_v4
+155/786269/campos_512_v4
+155/786276/campos_512_v4
+155/786298/campos_512_v4
+155/786303/campos_512_v4
+155/786304/campos_512_v4
+155/786315/campos_512_v4
+155/786331/campos_512_v4
+155/786355/campos_512_v4
+155/786356/campos_512_v4
+155/786382/campos_512_v4
+155/786390/campos_512_v4
+155/786393/campos_512_v4
+155/786397/campos_512_v4
+155/786400/campos_512_v4
+155/786412/campos_512_v4
+155/786414/campos_512_v4
+155/786419/campos_512_v4
+155/786440/campos_512_v4
+155/786445/campos_512_v4
+155/786456/campos_512_v4
+155/786462/campos_512_v4
+155/786467/campos_512_v4
+155/786472/campos_512_v4
+155/786475/campos_512_v4
+155/786479/campos_512_v4
+155/786484/campos_512_v4
+155/786486/campos_512_v4
+155/786491/campos_512_v4
+155/786499/campos_512_v4
+155/786526/campos_512_v4
+155/786527/campos_512_v4
+155/786533/campos_512_v4
+155/786535/campos_512_v4
+155/786540/campos_512_v4
+155/786560/campos_512_v4
+155/786564/campos_512_v4
+155/786565/campos_512_v4
+155/786566/campos_512_v4
+155/786575/campos_512_v4
+155/786589/campos_512_v4
+155/786594/campos_512_v4
+155/786608/campos_512_v4
+155/786630/campos_512_v4
+155/786632/campos_512_v4
+155/786638/campos_512_v4
+155/786644/campos_512_v4
+155/786645/campos_512_v4
+155/786650/campos_512_v4
+155/786655/campos_512_v4
+155/786670/campos_512_v4
+155/786682/campos_512_v4
+155/786684/campos_512_v4
+155/786693/campos_512_v4
+155/786703/campos_512_v4
+155/786716/campos_512_v4
+155/786723/campos_512_v4
+155/786730/campos_512_v4
+155/786741/campos_512_v4
+155/786747/campos_512_v4
+155/786766/campos_512_v4
+155/786767/campos_512_v4
+155/786768/campos_512_v4
+155/786771/campos_512_v4
+155/786776/campos_512_v4
+155/786781/campos_512_v4
+155/786783/campos_512_v4
+155/786806/campos_512_v4
+155/786814/campos_512_v4
+155/786816/campos_512_v4
+155/786821/campos_512_v4
+155/786824/campos_512_v4
+155/786827/campos_512_v4
+155/786829/campos_512_v4
+155/786836/campos_512_v4
+155/786855/campos_512_v4
+155/786856/campos_512_v4
+155/786866/campos_512_v4
+155/786875/campos_512_v4
+155/786892/campos_512_v4
+155/786893/campos_512_v4
+155/786900/campos_512_v4
+155/786903/campos_512_v4
+155/786916/campos_512_v4
+155/786931/campos_512_v4
+155/786937/campos_512_v4
+155/786960/campos_512_v4
+155/786961/campos_512_v4
+155/786988/campos_512_v4
+155/786989/campos_512_v4
+155/787012/campos_512_v4
+155/787020/campos_512_v4
+155/787021/campos_512_v4
+155/787023/campos_512_v4
+155/787024/campos_512_v4
+155/787027/campos_512_v4
+155/787043/campos_512_v4
+155/787069/campos_512_v4
+155/787076/campos_512_v4
+155/787077/campos_512_v4
+155/787083/campos_512_v4
+155/787089/campos_512_v4
+155/787091/campos_512_v4
+155/787094/campos_512_v4
+155/787109/campos_512_v4
+155/787113/campos_512_v4
+155/787120/campos_512_v4
+155/787126/campos_512_v4
+155/787131/campos_512_v4
+155/787139/campos_512_v4
+155/787140/campos_512_v4
+155/787161/campos_512_v4
+155/787163/campos_512_v4
+155/787165/campos_512_v4
+155/787169/campos_512_v4
+155/787177/campos_512_v4
+155/787180/campos_512_v4
+155/787182/campos_512_v4
+155/787188/campos_512_v4
+155/787200/campos_512_v4
+155/787208/campos_512_v4
+155/787210/campos_512_v4
+155/787219/campos_512_v4
+155/787236/campos_512_v4
+155/787245/campos_512_v4
+155/787247/campos_512_v4
+155/787258/campos_512_v4
+155/787269/campos_512_v4
+155/787276/campos_512_v4
+155/787278/campos_512_v4
+155/787282/campos_512_v4
+155/787284/campos_512_v4
+155/787297/campos_512_v4
+155/787298/campos_512_v4
+155/787310/campos_512_v4
+155/787315/campos_512_v4
+155/787319/campos_512_v4
+155/787321/campos_512_v4
+155/787325/campos_512_v4
+155/787330/campos_512_v4
+155/787348/campos_512_v4
+155/787351/campos_512_v4
+155/787352/campos_512_v4
+155/787357/campos_512_v4
+155/787358/campos_512_v4
+155/787366/campos_512_v4
+155/787371/campos_512_v4
+155/787382/campos_512_v4
+155/787384/campos_512_v4
+155/787393/campos_512_v4
+155/787409/campos_512_v4
+155/787414/campos_512_v4
+155/787426/campos_512_v4
+155/787431/campos_512_v4
+155/787441/campos_512_v4
+155/787452/campos_512_v4
+155/787453/campos_512_v4
+155/787460/campos_512_v4
+155/787462/campos_512_v4
+155/787469/campos_512_v4
+155/787473/campos_512_v4
+155/787494/campos_512_v4
+155/787495/campos_512_v4
+155/787506/campos_512_v4
+155/787527/campos_512_v4
+155/787533/campos_512_v4
+155/787535/campos_512_v4
+155/787541/campos_512_v4
+155/787542/campos_512_v4
+155/787544/campos_512_v4
+155/787557/campos_512_v4
+155/787558/campos_512_v4
+155/787560/campos_512_v4
+155/787567/campos_512_v4
+155/787571/campos_512_v4
+155/787577/campos_512_v4
+155/787580/campos_512_v4
+155/787585/campos_512_v4
+155/787602/campos_512_v4
+155/787605/campos_512_v4
+155/787615/campos_512_v4
+155/787618/campos_512_v4
+155/787626/campos_512_v4
+155/787630/campos_512_v4
+155/787631/campos_512_v4
+155/787657/campos_512_v4
+155/787659/campos_512_v4
+155/787676/campos_512_v4
+155/787677/campos_512_v4
+155/787685/campos_512_v4
+155/787687/campos_512_v4
+155/787688/campos_512_v4
+155/787700/campos_512_v4
+155/787717/campos_512_v4
+155/787725/campos_512_v4
+155/787726/campos_512_v4
+155/787734/campos_512_v4
+155/787737/campos_512_v4
+155/787740/campos_512_v4
+155/787743/campos_512_v4
+155/787755/campos_512_v4
+155/787771/campos_512_v4
+155/787776/campos_512_v4
+155/787783/campos_512_v4
+155/787787/campos_512_v4
+155/787789/campos_512_v4
+155/787796/campos_512_v4
+155/787800/campos_512_v4
+155/787820/campos_512_v4
+155/787825/campos_512_v4
+155/787836/campos_512_v4
+155/787842/campos_512_v4
+155/787874/campos_512_v4
+155/787876/campos_512_v4
+155/787879/campos_512_v4
+155/787886/campos_512_v4
+155/787895/campos_512_v4
+155/787900/campos_512_v4
+155/787931/campos_512_v4
+155/787933/campos_512_v4
+155/787938/campos_512_v4
+155/787948/campos_512_v4
+155/788008/campos_512_v4
+155/788022/campos_512_v4
+155/788023/campos_512_v4
+155/788029/campos_512_v4
+155/788034/campos_512_v4
+155/788065/campos_512_v4
+155/788084/campos_512_v4
+155/788089/campos_512_v4
+155/788096/campos_512_v4
+155/788102/campos_512_v4
+155/788110/campos_512_v4
+155/788114/campos_512_v4
+155/788121/campos_512_v4
+155/788128/campos_512_v4
+155/788133/campos_512_v4
+155/788135/campos_512_v4
+155/788148/campos_512_v4
+155/788149/campos_512_v4
+155/788162/campos_512_v4
+155/788166/campos_512_v4
+155/788169/campos_512_v4
+155/788171/campos_512_v4
+155/788186/campos_512_v4
+155/788194/campos_512_v4
+155/788199/campos_512_v4
+155/788200/campos_512_v4
+155/788203/campos_512_v4
+155/788207/campos_512_v4
+155/788223/campos_512_v4
+155/788247/campos_512_v4
+155/788251/campos_512_v4
+155/788258/campos_512_v4
+155/788276/campos_512_v4
+155/788291/campos_512_v4
+155/788295/campos_512_v4
+155/788308/campos_512_v4
+155/788311/campos_512_v4
+155/788312/campos_512_v4
+155/788346/campos_512_v4
+155/788348/campos_512_v4
+155/788354/campos_512_v4
+155/788359/campos_512_v4
+155/788365/campos_512_v4
+155/788372/campos_512_v4
+155/788373/campos_512_v4
+155/788381/campos_512_v4
+155/788392/campos_512_v4
+155/788399/campos_512_v4
+155/788408/campos_512_v4
+155/788416/campos_512_v4
+155/788419/campos_512_v4
+155/788420/campos_512_v4
+155/788424/campos_512_v4
+155/788438/campos_512_v4
+155/788444/campos_512_v4
+155/788449/campos_512_v4
+155/788453/campos_512_v4
+155/788470/campos_512_v4
+155/788473/campos_512_v4
+155/788475/campos_512_v4
+155/788477/campos_512_v4
+155/788482/campos_512_v4
+155/788489/campos_512_v4
+155/788494/campos_512_v4
+155/788521/campos_512_v4
+155/788525/campos_512_v4
+155/788530/campos_512_v4
+155/788531/campos_512_v4
+155/788540/campos_512_v4
+155/788558/campos_512_v4
+155/788571/campos_512_v4
+155/788572/campos_512_v4
+155/788579/campos_512_v4
+155/788581/campos_512_v4
+155/788582/campos_512_v4
+155/788583/campos_512_v4
+155/788596/campos_512_v4
+155/788597/campos_512_v4
+155/788615/campos_512_v4
+155/788622/campos_512_v4
+155/788623/campos_512_v4
+155/788624/campos_512_v4
+155/788625/campos_512_v4
+155/788629/campos_512_v4
+155/788633/campos_512_v4
+155/788639/campos_512_v4
+155/788648/campos_512_v4
+155/788649/campos_512_v4
+155/788655/campos_512_v4
+155/788656/campos_512_v4
+155/788657/campos_512_v4
+155/788658/campos_512_v4
+155/788662/campos_512_v4
+155/788667/campos_512_v4
+155/788673/campos_512_v4
+155/788690/campos_512_v4
+155/788701/campos_512_v4
+155/788707/campos_512_v4
+155/788710/campos_512_v4
+155/788711/campos_512_v4
+155/788720/campos_512_v4
+155/788752/campos_512_v4
+155/788756/campos_512_v4
+155/788783/campos_512_v4
+155/788792/campos_512_v4
+155/788797/campos_512_v4
+155/788809/campos_512_v4
+155/788817/campos_512_v4
+155/788819/campos_512_v4
+155/788820/campos_512_v4
+155/788827/campos_512_v4
+155/788833/campos_512_v4
+155/788839/campos_512_v4
+155/788854/campos_512_v4
+155/788865/campos_512_v4
+155/788866/campos_512_v4
+155/788869/campos_512_v4
+155/788884/campos_512_v4
+155/788886/campos_512_v4
+155/788904/campos_512_v4
+155/788912/campos_512_v4
+155/788938/campos_512_v4
+155/788942/campos_512_v4
+155/788949/campos_512_v4
+155/788950/campos_512_v4
+155/788952/campos_512_v4
+155/788954/campos_512_v4
+155/788965/campos_512_v4
+155/788969/campos_512_v4
+155/788977/campos_512_v4
+155/788991/campos_512_v4
+155/788999/campos_512_v4
+155/789010/campos_512_v4
+155/789017/campos_512_v4
+155/789022/campos_512_v4
+155/789025/campos_512_v4
+155/789026/campos_512_v4
+155/789027/campos_512_v4
+155/789045/campos_512_v4
+155/789055/campos_512_v4
+155/789056/campos_512_v4
+155/789061/campos_512_v4
+155/789064/campos_512_v4
+155/789075/campos_512_v4
+155/789079/campos_512_v4
+155/789090/campos_512_v4
+155/789096/campos_512_v4
+155/789097/campos_512_v4
+155/789105/campos_512_v4
+155/789123/campos_512_v4
+155/789125/campos_512_v4
+155/789133/campos_512_v4
+155/789137/campos_512_v4
+155/789140/campos_512_v4
+155/789142/campos_512_v4
+155/789147/campos_512_v4
+155/789148/campos_512_v4
+155/789173/campos_512_v4
+155/789176/campos_512_v4
+155/789178/campos_512_v4
+155/789190/campos_512_v4
+155/789213/campos_512_v4
+155/789224/campos_512_v4
+155/789235/campos_512_v4
+155/789245/campos_512_v4
+155/789249/campos_512_v4
+155/789251/campos_512_v4
+155/789262/campos_512_v4
+155/789263/campos_512_v4
+155/789264/campos_512_v4
+155/789265/campos_512_v4
+155/789269/campos_512_v4
+155/789280/campos_512_v4
+155/789283/campos_512_v4
+155/789285/campos_512_v4
+155/789304/campos_512_v4
+155/789309/campos_512_v4
+155/789315/campos_512_v4
+155/789318/campos_512_v4
+155/789376/campos_512_v4
+155/789379/campos_512_v4
+155/789382/campos_512_v4
+155/789385/campos_512_v4
+155/789391/campos_512_v4
+155/789397/campos_512_v4
+155/789402/campos_512_v4
+155/789404/campos_512_v4
+155/789410/campos_512_v4
+155/789414/campos_512_v4
+155/789417/campos_512_v4
+155/789418/campos_512_v4
+155/789435/campos_512_v4
+155/789440/campos_512_v4
+155/789448/campos_512_v4
+155/789454/campos_512_v4
+155/789457/campos_512_v4
+155/789465/campos_512_v4
+155/789469/campos_512_v4
+155/789473/campos_512_v4
+155/789497/campos_512_v4
+155/789502/campos_512_v4
+155/789517/campos_512_v4
+155/789521/campos_512_v4
+155/789536/campos_512_v4
+155/789542/campos_512_v4
+155/789571/campos_512_v4
+155/789572/campos_512_v4
+155/789592/campos_512_v4
+155/789597/campos_512_v4
+155/789599/campos_512_v4
+155/789602/campos_512_v4
+155/789608/campos_512_v4
+155/789619/campos_512_v4
+155/789623/campos_512_v4
+155/789626/campos_512_v4
+155/789638/campos_512_v4
+155/789651/campos_512_v4
+155/789661/campos_512_v4
+155/789671/campos_512_v4
+155/789675/campos_512_v4
+155/789679/campos_512_v4
+155/789680/campos_512_v4
+155/789685/campos_512_v4
+155/789703/campos_512_v4
+155/789704/campos_512_v4
+155/789713/campos_512_v4
+155/789717/campos_512_v4
+155/789718/campos_512_v4
+155/789731/campos_512_v4
+155/789738/campos_512_v4
+155/789740/campos_512_v4
+155/789741/campos_512_v4
+155/789743/campos_512_v4
+155/789763/campos_512_v4
+155/789765/campos_512_v4
+155/789794/campos_512_v4
+155/789802/campos_512_v4
+155/789804/campos_512_v4
+155/789809/campos_512_v4
+155/789810/campos_512_v4
+155/789818/campos_512_v4
+155/789819/campos_512_v4
+155/789820/campos_512_v4
+155/789827/campos_512_v4
+155/789834/campos_512_v4
+155/789838/campos_512_v4
+155/789840/campos_512_v4
+155/789842/campos_512_v4
+155/789845/campos_512_v4
+155/789849/campos_512_v4
+155/789850/campos_512_v4
+155/789852/campos_512_v4
+155/789854/campos_512_v4
+155/789863/campos_512_v4
+155/789871/campos_512_v4
+155/789882/campos_512_v4
+155/789885/campos_512_v4
+155/789888/campos_512_v4
+155/789889/campos_512_v4
+155/789897/campos_512_v4
+155/789898/campos_512_v4
+155/789899/campos_512_v4
+155/789900/campos_512_v4
+155/789911/campos_512_v4
+155/789917/campos_512_v4
+155/789930/campos_512_v4
+155/789939/campos_512_v4
+155/789950/campos_512_v4
+155/789951/campos_512_v4
+155/789953/campos_512_v4
+155/789961/campos_512_v4
+155/789978/campos_512_v4
+156/790008/campos_512_v4
+156/790025/campos_512_v4
+156/790032/campos_512_v4
+156/790041/campos_512_v4
+156/790056/campos_512_v4
+156/790065/campos_512_v4
+156/790080/campos_512_v4
+156/790096/campos_512_v4
+156/790109/campos_512_v4
+156/790110/campos_512_v4
+156/790127/campos_512_v4
+156/790135/campos_512_v4
+156/790136/campos_512_v4
+156/790148/campos_512_v4
+156/790158/campos_512_v4
+156/790164/campos_512_v4
+156/790171/campos_512_v4
+156/790183/campos_512_v4
+156/790190/campos_512_v4
+156/790197/campos_512_v4
+156/790201/campos_512_v4
+156/790206/campos_512_v4
+156/790210/campos_512_v4
+156/790214/campos_512_v4
+156/790219/campos_512_v4
+156/790231/campos_512_v4
+156/790234/campos_512_v4
+156/790237/campos_512_v4
+156/790241/campos_512_v4
+156/790252/campos_512_v4
+156/790258/campos_512_v4
+156/790266/campos_512_v4
+156/790268/campos_512_v4
+156/790278/campos_512_v4
+156/790285/campos_512_v4
+156/790306/campos_512_v4
+156/790311/campos_512_v4
+156/790318/campos_512_v4
+156/790321/campos_512_v4
+156/790325/campos_512_v4
+156/790326/campos_512_v4
+156/790327/campos_512_v4
+156/790337/campos_512_v4
+156/790340/campos_512_v4
+156/790341/campos_512_v4
+156/790344/campos_512_v4
+156/790346/campos_512_v4
+156/790349/campos_512_v4
+156/790354/campos_512_v4
+156/790359/campos_512_v4
+156/790377/campos_512_v4
+156/790378/campos_512_v4
+156/790381/campos_512_v4
+156/790382/campos_512_v4
+156/790394/campos_512_v4
+156/790400/campos_512_v4
+156/790431/campos_512_v4
+156/790434/campos_512_v4
+156/790435/campos_512_v4
+156/790438/campos_512_v4
+156/790442/campos_512_v4
+156/790447/campos_512_v4
+156/790449/campos_512_v4
+156/790453/campos_512_v4
+156/790467/campos_512_v4
+156/790470/campos_512_v4
+156/790493/campos_512_v4
+156/790495/campos_512_v4
+156/790496/campos_512_v4
+156/790515/campos_512_v4
+156/790519/campos_512_v4
+156/790524/campos_512_v4
+156/790531/campos_512_v4
+156/790540/campos_512_v4
+156/790553/campos_512_v4
+156/790555/campos_512_v4
+156/790579/campos_512_v4
+156/790606/campos_512_v4
+156/790609/campos_512_v4
+156/790611/campos_512_v4
+156/790613/campos_512_v4
+156/790614/campos_512_v4
+156/790616/campos_512_v4
+156/790626/campos_512_v4
+156/790627/campos_512_v4
+156/790636/campos_512_v4
+156/790643/campos_512_v4
+156/790654/campos_512_v4
+156/790655/campos_512_v4
+156/790656/campos_512_v4
+156/790711/campos_512_v4
+156/790727/campos_512_v4
+156/790728/campos_512_v4
+156/790751/campos_512_v4
+156/790768/campos_512_v4
+156/790777/campos_512_v4
+156/790791/campos_512_v4
+156/790810/campos_512_v4
+156/790812/campos_512_v4
+156/790818/campos_512_v4
+156/790819/campos_512_v4
+156/790829/campos_512_v4
+156/790833/campos_512_v4
+156/790853/campos_512_v4
+156/790855/campos_512_v4
+156/790859/campos_512_v4
+156/790866/campos_512_v4
+156/790867/campos_512_v4
+156/790874/campos_512_v4
+156/790877/campos_512_v4
+156/790888/campos_512_v4
+156/790892/campos_512_v4
+156/790894/campos_512_v4
+156/790901/campos_512_v4
+156/790904/campos_512_v4
+156/790916/campos_512_v4
+156/790917/campos_512_v4
+156/790937/campos_512_v4
+156/790938/campos_512_v4
+156/790941/campos_512_v4
+156/790952/campos_512_v4
+156/790968/campos_512_v4
+156/790987/campos_512_v4
+156/790992/campos_512_v4
+156/790993/campos_512_v4
+156/791002/campos_512_v4
+156/791010/campos_512_v4
+156/791024/campos_512_v4
+156/791029/campos_512_v4
+156/791046/campos_512_v4
+156/791050/campos_512_v4
+156/791091/campos_512_v4
+156/791092/campos_512_v4
+156/791118/campos_512_v4
+156/791124/campos_512_v4
+156/791132/campos_512_v4
+156/791138/campos_512_v4
+156/791152/campos_512_v4
+156/791153/campos_512_v4
+156/791156/campos_512_v4
+156/791164/campos_512_v4
+156/791165/campos_512_v4
+156/791166/campos_512_v4
+156/791170/campos_512_v4
+156/791185/campos_512_v4
+156/791187/campos_512_v4
+156/791205/campos_512_v4
+156/791207/campos_512_v4
+156/791209/campos_512_v4
+156/791214/campos_512_v4
+156/791217/campos_512_v4
+156/791223/campos_512_v4
+156/791232/campos_512_v4
+156/791263/campos_512_v4
+156/791270/campos_512_v4
+156/791280/campos_512_v4
+156/791285/campos_512_v4
+156/791290/campos_512_v4
+156/791298/campos_512_v4
+156/791301/campos_512_v4
+156/791306/campos_512_v4
+156/791307/campos_512_v4
+156/791316/campos_512_v4
+156/791332/campos_512_v4
+156/791337/campos_512_v4
+156/791341/campos_512_v4
+156/791352/campos_512_v4
+156/791354/campos_512_v4
+156/791356/campos_512_v4
+156/791362/campos_512_v4
+156/791364/campos_512_v4
+156/791365/campos_512_v4
+156/791378/campos_512_v4
+156/791399/campos_512_v4
+156/791407/campos_512_v4
+156/791419/campos_512_v4
+156/791425/campos_512_v4
+156/791433/campos_512_v4
+156/791440/campos_512_v4
+156/791443/campos_512_v4
+156/791450/campos_512_v4
+156/791457/campos_512_v4
+156/791466/campos_512_v4
+156/791488/campos_512_v4
+156/791493/campos_512_v4
+156/791498/campos_512_v4
+156/791502/campos_512_v4
+156/791510/campos_512_v4
+156/791523/campos_512_v4
+156/791525/campos_512_v4
+156/791529/campos_512_v4
+156/791533/campos_512_v4
+156/791535/campos_512_v4
+156/791540/campos_512_v4
+156/791547/campos_512_v4
+156/791556/campos_512_v4
+156/791564/campos_512_v4
+156/791565/campos_512_v4
+156/791570/campos_512_v4
+156/791575/campos_512_v4
+156/791590/campos_512_v4
+156/791591/campos_512_v4
+156/791597/campos_512_v4
+156/791615/campos_512_v4
+156/791629/campos_512_v4
+156/791630/campos_512_v4
+156/791634/campos_512_v4
+156/791651/campos_512_v4
+156/791659/campos_512_v4
+156/791671/campos_512_v4
+156/791690/campos_512_v4
+156/791698/campos_512_v4
+156/791700/campos_512_v4
+156/791720/campos_512_v4
+156/791725/campos_512_v4
+156/791728/campos_512_v4
+156/791735/campos_512_v4
+156/791738/campos_512_v4
+156/791748/campos_512_v4
+156/791752/campos_512_v4
+156/791756/campos_512_v4
+156/791766/campos_512_v4
+156/791769/campos_512_v4
+156/791787/campos_512_v4
+156/791794/campos_512_v4
+156/791800/campos_512_v4
+156/791804/campos_512_v4
+156/791822/campos_512_v4
+156/791826/campos_512_v4
+156/791828/campos_512_v4
+156/791867/campos_512_v4
+156/791868/campos_512_v4
+156/791869/campos_512_v4
+156/791891/campos_512_v4
+156/791896/campos_512_v4
+156/791904/campos_512_v4
+156/791918/campos_512_v4
+156/791919/campos_512_v4
+156/791920/campos_512_v4
+156/791924/campos_512_v4
+156/791925/campos_512_v4
+156/791929/campos_512_v4
+156/791930/campos_512_v4
+156/791942/campos_512_v4
+156/791944/campos_512_v4
+156/791946/campos_512_v4
+156/791947/campos_512_v4
+156/791951/campos_512_v4
+156/791958/campos_512_v4
+156/791967/campos_512_v4
+156/791980/campos_512_v4
+156/792014/campos_512_v4
+156/792033/campos_512_v4
+156/792046/campos_512_v4
+156/792057/campos_512_v4
+156/792058/campos_512_v4
+156/792061/campos_512_v4
+156/792068/campos_512_v4
+156/792110/campos_512_v4
+156/792118/campos_512_v4
+156/792130/campos_512_v4
+156/792131/campos_512_v4
+156/792137/campos_512_v4
+156/792145/campos_512_v4
+156/792148/campos_512_v4
+156/792151/campos_512_v4
+156/792163/campos_512_v4
+156/792177/campos_512_v4
+156/792184/campos_512_v4
+156/792197/campos_512_v4
+156/792199/campos_512_v4
+156/792219/campos_512_v4
+156/792233/campos_512_v4
+156/792235/campos_512_v4
+156/792243/campos_512_v4
+156/792261/campos_512_v4
+156/792277/campos_512_v4
+156/792286/campos_512_v4
+156/792287/campos_512_v4
+156/792294/campos_512_v4
+156/792299/campos_512_v4
+156/792300/campos_512_v4
+156/792316/campos_512_v4
+156/792322/campos_512_v4
+156/792333/campos_512_v4
+156/792335/campos_512_v4
+156/792337/campos_512_v4
+156/792339/campos_512_v4
+156/792348/campos_512_v4
+156/792358/campos_512_v4
+156/792359/campos_512_v4
+156/792360/campos_512_v4
+156/792362/campos_512_v4
+156/792363/campos_512_v4
+156/792378/campos_512_v4
+156/792384/campos_512_v4
+156/792386/campos_512_v4
+156/792388/campos_512_v4
+156/792389/campos_512_v4
+156/792414/campos_512_v4
+156/792417/campos_512_v4
+156/792428/campos_512_v4
+156/792439/campos_512_v4
+156/792443/campos_512_v4
+156/792446/campos_512_v4
+156/792450/campos_512_v4
+156/792452/campos_512_v4
+156/792463/campos_512_v4
+156/792468/campos_512_v4
+156/792471/campos_512_v4
+156/792475/campos_512_v4
+156/792485/campos_512_v4
+156/792486/campos_512_v4
+156/792500/campos_512_v4
+156/792501/campos_512_v4
+156/792514/campos_512_v4
+156/792521/campos_512_v4
+156/792522/campos_512_v4
+156/792527/campos_512_v4
+156/792552/campos_512_v4
+156/792557/campos_512_v4
+156/792568/campos_512_v4
+156/792575/campos_512_v4
+156/792576/campos_512_v4
+156/792598/campos_512_v4
+156/792599/campos_512_v4
+156/792611/campos_512_v4
+156/792612/campos_512_v4
+156/792629/campos_512_v4
+156/792649/campos_512_v4
+156/792662/campos_512_v4
+156/792664/campos_512_v4
+156/792667/campos_512_v4
+156/792668/campos_512_v4
+156/792671/campos_512_v4
+156/792672/campos_512_v4
+156/792674/campos_512_v4
+156/792681/campos_512_v4
+156/792686/campos_512_v4
+156/792697/campos_512_v4
+156/792698/campos_512_v4
+156/792719/campos_512_v4
+156/792725/campos_512_v4
+156/792732/campos_512_v4
+156/792733/campos_512_v4
+156/792740/campos_512_v4
+156/792742/campos_512_v4
+156/792751/campos_512_v4
+156/792757/campos_512_v4
+156/792765/campos_512_v4
+156/792771/campos_512_v4
+156/792774/campos_512_v4
+156/792794/campos_512_v4
+156/792798/campos_512_v4
+156/792799/campos_512_v4
+156/792810/campos_512_v4
+156/792813/campos_512_v4
+156/792819/campos_512_v4
+156/792822/campos_512_v4
+156/792834/campos_512_v4
+156/792835/campos_512_v4
+156/792842/campos_512_v4
+156/792845/campos_512_v4
+156/792846/campos_512_v4
+156/792861/campos_512_v4
+156/792877/campos_512_v4
+156/792878/campos_512_v4
+156/792880/campos_512_v4
+156/792884/campos_512_v4
+156/792885/campos_512_v4
+156/792887/campos_512_v4
+156/792904/campos_512_v4
+156/792907/campos_512_v4
+156/792910/campos_512_v4
+156/792921/campos_512_v4
+156/792923/campos_512_v4
+156/792927/campos_512_v4
+156/792928/campos_512_v4
+156/792940/campos_512_v4
+156/792958/campos_512_v4
+156/792967/campos_512_v4
+156/792977/campos_512_v4
+156/792978/campos_512_v4
+156/792989/campos_512_v4
+156/793005/campos_512_v4
+156/793007/campos_512_v4
+156/793008/campos_512_v4
+156/793014/campos_512_v4
+156/793053/campos_512_v4
+156/793056/campos_512_v4
+156/793058/campos_512_v4
+156/793061/campos_512_v4
+156/793064/campos_512_v4
+156/793090/campos_512_v4
+156/793093/campos_512_v4
+156/793103/campos_512_v4
+156/793107/campos_512_v4
+156/793109/campos_512_v4
+156/793114/campos_512_v4
+156/793124/campos_512_v4
+156/793138/campos_512_v4
+156/793145/campos_512_v4
+156/793149/campos_512_v4
+156/793151/campos_512_v4
+156/793171/campos_512_v4
+156/793172/campos_512_v4
+156/793173/campos_512_v4
+156/793178/campos_512_v4
+156/793190/campos_512_v4
+156/793203/campos_512_v4
+156/793217/campos_512_v4
+156/793223/campos_512_v4
+156/793230/campos_512_v4
+156/793241/campos_512_v4
+156/793262/campos_512_v4
+156/793264/campos_512_v4
+156/793274/campos_512_v4
+156/793277/campos_512_v4
+156/793282/campos_512_v4
+156/793303/campos_512_v4
+156/793322/campos_512_v4
+156/793329/campos_512_v4
+156/793342/campos_512_v4
+156/793350/campos_512_v4
+156/793352/campos_512_v4
+156/793355/campos_512_v4
+156/793358/campos_512_v4
+156/793367/campos_512_v4
+156/793371/campos_512_v4
+156/793374/campos_512_v4
+156/793398/campos_512_v4
+156/793402/campos_512_v4
+156/793423/campos_512_v4
+156/793424/campos_512_v4
+156/793459/campos_512_v4
+156/793462/campos_512_v4
+156/793465/campos_512_v4
+156/793478/campos_512_v4
+156/793479/campos_512_v4
+156/793489/campos_512_v4
+156/793491/campos_512_v4
+156/793500/campos_512_v4
+156/793511/campos_512_v4
+156/793515/campos_512_v4
+156/793517/campos_512_v4
+156/793521/campos_512_v4
+156/793525/campos_512_v4
+156/793526/campos_512_v4
+156/793542/campos_512_v4
+156/793544/campos_512_v4
+156/793547/campos_512_v4
+156/793550/campos_512_v4
+156/793551/campos_512_v4
+156/793562/campos_512_v4
+156/793567/campos_512_v4
+156/793584/campos_512_v4
+156/793586/campos_512_v4
+156/793600/campos_512_v4
+156/793629/campos_512_v4
+156/793633/campos_512_v4
+156/793642/campos_512_v4
+156/793657/campos_512_v4
+156/793658/campos_512_v4
+156/793664/campos_512_v4
+156/793666/campos_512_v4
+156/793674/campos_512_v4
+156/793677/campos_512_v4
+156/793681/campos_512_v4
+156/793694/campos_512_v4
+156/793696/campos_512_v4
+156/793709/campos_512_v4
+156/793710/campos_512_v4
+156/793711/campos_512_v4
+156/793719/campos_512_v4
+156/793732/campos_512_v4
+156/793733/campos_512_v4
+156/793737/campos_512_v4
+156/793755/campos_512_v4
+156/793756/campos_512_v4
+156/793758/campos_512_v4
+156/793761/campos_512_v4
+156/793784/campos_512_v4
+156/793785/campos_512_v4
+156/793793/campos_512_v4
+156/793803/campos_512_v4
+156/793805/campos_512_v4
+156/793810/campos_512_v4
+156/793814/campos_512_v4
+156/793820/campos_512_v4
+156/793821/campos_512_v4
+156/793832/campos_512_v4
+156/793839/campos_512_v4
+156/793840/campos_512_v4
+156/793860/campos_512_v4
+156/793867/campos_512_v4
+156/793880/campos_512_v4
+156/793883/campos_512_v4
+156/793886/campos_512_v4
+156/793891/campos_512_v4
+156/793893/campos_512_v4
+156/793897/campos_512_v4
+156/793898/campos_512_v4
+156/793899/campos_512_v4
+156/793930/campos_512_v4
+156/793935/campos_512_v4
+156/793937/campos_512_v4
+156/793947/campos_512_v4
+156/793961/campos_512_v4
+156/793963/campos_512_v4
+156/793965/campos_512_v4
+156/793967/campos_512_v4
+156/793968/campos_512_v4
+156/793985/campos_512_v4
+156/793990/campos_512_v4
+156/793991/campos_512_v4
+156/793993/campos_512_v4
+156/793995/campos_512_v4
+156/793999/campos_512_v4
+156/794002/campos_512_v4
+156/794008/campos_512_v4
+156/794015/campos_512_v4
+156/794030/campos_512_v4
+156/794048/campos_512_v4
+156/794070/campos_512_v4
+156/794073/campos_512_v4
+156/794075/campos_512_v4
+156/794088/campos_512_v4
+156/794095/campos_512_v4
+156/794100/campos_512_v4
+156/794105/campos_512_v4
+156/794141/campos_512_v4
+156/794142/campos_512_v4
+156/794144/campos_512_v4
+156/794146/campos_512_v4
+156/794153/campos_512_v4
+156/794158/campos_512_v4
+156/794167/campos_512_v4
+156/794173/campos_512_v4
+156/794185/campos_512_v4
+156/794187/campos_512_v4
+156/794190/campos_512_v4
+156/794196/campos_512_v4
+156/794204/campos_512_v4
+156/794209/campos_512_v4
+156/794214/campos_512_v4
+156/794217/campos_512_v4
+156/794218/campos_512_v4
+156/794220/campos_512_v4
+156/794222/campos_512_v4
+156/794235/campos_512_v4
+156/794239/campos_512_v4
+156/794259/campos_512_v4
+156/794286/campos_512_v4
+156/794294/campos_512_v4
+156/794299/campos_512_v4
+156/794300/campos_512_v4
+156/794307/campos_512_v4
+156/794324/campos_512_v4
+156/794329/campos_512_v4
+156/794337/campos_512_v4
+156/794345/campos_512_v4
+156/794368/campos_512_v4
+156/794370/campos_512_v4
+156/794375/campos_512_v4
+156/794381/campos_512_v4
+156/794399/campos_512_v4
+156/794403/campos_512_v4
+156/794410/campos_512_v4
+156/794412/campos_512_v4
+156/794418/campos_512_v4
+156/794424/campos_512_v4
+156/794427/campos_512_v4
+156/794435/campos_512_v4
+156/794447/campos_512_v4
+156/794451/campos_512_v4
+156/794458/campos_512_v4
+156/794463/campos_512_v4
+156/794471/campos_512_v4
+156/794475/campos_512_v4
+156/794479/campos_512_v4
+156/794483/campos_512_v4
+156/794486/campos_512_v4
+156/794488/campos_512_v4
+156/794498/campos_512_v4
+156/794507/campos_512_v4
+156/794515/campos_512_v4
+156/794533/campos_512_v4
+156/794553/campos_512_v4
+156/794555/campos_512_v4
+156/794563/campos_512_v4
+156/794576/campos_512_v4
+156/794578/campos_512_v4
+156/794582/campos_512_v4
+156/794584/campos_512_v4
+156/794589/campos_512_v4
+156/794593/campos_512_v4
+156/794602/campos_512_v4
+156/794610/campos_512_v4
+156/794613/campos_512_v4
+156/794617/campos_512_v4
+156/794618/campos_512_v4
+156/794620/campos_512_v4
+156/794632/campos_512_v4
+156/794643/campos_512_v4
+156/794644/campos_512_v4
+156/794648/campos_512_v4
+156/794670/campos_512_v4
+156/794689/campos_512_v4
+156/794710/campos_512_v4
+156/794711/campos_512_v4
+156/794712/campos_512_v4
+156/794722/campos_512_v4
+156/794726/campos_512_v4
+156/794730/campos_512_v4
+156/794734/campos_512_v4
+156/794737/campos_512_v4
+156/794742/campos_512_v4
+156/794743/campos_512_v4
+156/794760/campos_512_v4
+156/794774/campos_512_v4
+156/794780/campos_512_v4
+156/794783/campos_512_v4
+156/794786/campos_512_v4
+156/794797/campos_512_v4
+156/794801/campos_512_v4
+156/794814/campos_512_v4
+156/794816/campos_512_v4
+156/794822/campos_512_v4
+156/794823/campos_512_v4
+156/794829/campos_512_v4
+156/794835/campos_512_v4
+156/794841/campos_512_v4
+156/794846/campos_512_v4
+156/794847/campos_512_v4
+156/794866/campos_512_v4
+156/794873/campos_512_v4
+156/794879/campos_512_v4
+156/794883/campos_512_v4
+156/794885/campos_512_v4
+156/794888/campos_512_v4
+156/794890/campos_512_v4
+156/794899/campos_512_v4
+156/794900/campos_512_v4
+156/794906/campos_512_v4
+156/794918/campos_512_v4
+156/794920/campos_512_v4
+156/794925/campos_512_v4
+156/794927/campos_512_v4
+156/794935/campos_512_v4
+156/794936/campos_512_v4
+156/794944/campos_512_v4
+156/794966/campos_512_v4
+156/794978/campos_512_v4
+156/794979/campos_512_v4
+156/794984/campos_512_v4
+156/794990/campos_512_v4
+156/794992/campos_512_v4
+157/795004/campos_512_v4
+157/795005/campos_512_v4
+157/795013/campos_512_v4
+157/795024/campos_512_v4
+157/795026/campos_512_v4
+157/795027/campos_512_v4
+157/795028/campos_512_v4
+157/795031/campos_512_v4
+157/795036/campos_512_v4
+157/795046/campos_512_v4
+157/795057/campos_512_v4
+157/795067/campos_512_v4
+157/795075/campos_512_v4
+157/795078/campos_512_v4
+157/795101/campos_512_v4
+157/795104/campos_512_v4
+157/795109/campos_512_v4
+157/795110/campos_512_v4
+157/795118/campos_512_v4
+157/795121/campos_512_v4
+157/795133/campos_512_v4
+157/795138/campos_512_v4
+157/795144/campos_512_v4
+157/795145/campos_512_v4
+157/795152/campos_512_v4
+157/795179/campos_512_v4
+157/795186/campos_512_v4
+157/795188/campos_512_v4
+157/795189/campos_512_v4
+157/795196/campos_512_v4
+157/795200/campos_512_v4
+157/795202/campos_512_v4
+157/795204/campos_512_v4
+157/795213/campos_512_v4
+157/795215/campos_512_v4
+157/795220/campos_512_v4
+157/795222/campos_512_v4
+157/795227/campos_512_v4
+157/795228/campos_512_v4
+157/795236/campos_512_v4
+157/795237/campos_512_v4
+157/795240/campos_512_v4
+157/795245/campos_512_v4
+157/795248/campos_512_v4
+157/795255/campos_512_v4
+157/795268/campos_512_v4
+157/795269/campos_512_v4
+157/795271/campos_512_v4
+157/795279/campos_512_v4
+157/795281/campos_512_v4
+157/795287/campos_512_v4
+157/795295/campos_512_v4
+157/795309/campos_512_v4
+157/795314/campos_512_v4
+157/795315/campos_512_v4
+157/795319/campos_512_v4
+157/795324/campos_512_v4
+157/795330/campos_512_v4
+157/795334/campos_512_v4
+157/795339/campos_512_v4
+157/795347/campos_512_v4
+157/795354/campos_512_v4
+157/795369/campos_512_v4
+157/795373/campos_512_v4
+157/795381/campos_512_v4
+157/795383/campos_512_v4
+157/795386/campos_512_v4
+157/795390/campos_512_v4
+157/795400/campos_512_v4
+157/795412/campos_512_v4
+157/795415/campos_512_v4
+157/795416/campos_512_v4
+157/795422/campos_512_v4
+157/795434/campos_512_v4
+157/795446/campos_512_v4
+157/795450/campos_512_v4
+157/795453/campos_512_v4
+157/795468/campos_512_v4
+157/795491/campos_512_v4
+157/795502/campos_512_v4
+157/795506/campos_512_v4
+157/795513/campos_512_v4
+157/795515/campos_512_v4
+157/795518/campos_512_v4
+157/795533/campos_512_v4
+157/795539/campos_512_v4
+157/795546/campos_512_v4
+157/795557/campos_512_v4
+157/795563/campos_512_v4
+157/795565/campos_512_v4
+157/795570/campos_512_v4
+157/795571/campos_512_v4
+157/795573/campos_512_v4
+157/795575/campos_512_v4
+157/795580/campos_512_v4
+157/795592/campos_512_v4
+157/795598/campos_512_v4
+157/795602/campos_512_v4
+157/795604/campos_512_v4
+157/795606/campos_512_v4
+157/795614/campos_512_v4
+157/795638/campos_512_v4
+157/795639/campos_512_v4
+157/795652/campos_512_v4
+157/795655/campos_512_v4
+157/795659/campos_512_v4
+157/795666/campos_512_v4
+157/795667/campos_512_v4
+157/795679/campos_512_v4
+157/795685/campos_512_v4
+157/795687/campos_512_v4
+157/795688/campos_512_v4
+157/795691/campos_512_v4
+157/795692/campos_512_v4
+157/795700/campos_512_v4
+157/795703/campos_512_v4
+157/795714/campos_512_v4
+157/795718/campos_512_v4
+157/795727/campos_512_v4
+157/795731/campos_512_v4
+157/795764/campos_512_v4
+157/795771/campos_512_v4
+157/795789/campos_512_v4
+157/795791/campos_512_v4
+157/795792/campos_512_v4
+157/795799/campos_512_v4
+157/795802/campos_512_v4
+157/795810/campos_512_v4
+157/795829/campos_512_v4
+157/795832/campos_512_v4
+157/795843/campos_512_v4
+157/795853/campos_512_v4
+157/795855/campos_512_v4
+157/795859/campos_512_v4
+157/795861/campos_512_v4
+157/795871/campos_512_v4
+157/795877/campos_512_v4
+157/795882/campos_512_v4
+157/795896/campos_512_v4
+157/795901/campos_512_v4
+157/795908/campos_512_v4
+157/795916/campos_512_v4
+157/795929/campos_512_v4
+157/795932/campos_512_v4
+157/795936/campos_512_v4
+157/795940/campos_512_v4
+157/795942/campos_512_v4
+157/795944/campos_512_v4
+157/795951/campos_512_v4
+157/795957/campos_512_v4
+157/795961/campos_512_v4
+157/795963/campos_512_v4
+157/795967/campos_512_v4
+157/795972/campos_512_v4
+157/795973/campos_512_v4
+157/795979/campos_512_v4
+157/795982/campos_512_v4
+157/795986/campos_512_v4
+157/795995/campos_512_v4
+157/796003/campos_512_v4
+157/796004/campos_512_v4
+157/796007/campos_512_v4
+157/796019/campos_512_v4
+157/796021/campos_512_v4
+157/796025/campos_512_v4
+157/796031/campos_512_v4
+157/796037/campos_512_v4
+157/796038/campos_512_v4
+157/796054/campos_512_v4
+157/796057/campos_512_v4
+157/796058/campos_512_v4
+157/796062/campos_512_v4
+157/796068/campos_512_v4
+157/796070/campos_512_v4
+157/796075/campos_512_v4
+157/796076/campos_512_v4
+157/796091/campos_512_v4
+157/796094/campos_512_v4
+157/796096/campos_512_v4
+157/796111/campos_512_v4
+157/796113/campos_512_v4
+157/796117/campos_512_v4
+157/796120/campos_512_v4
+157/796125/campos_512_v4
+157/796130/campos_512_v4
+157/796137/campos_512_v4
+157/796147/campos_512_v4
+157/796160/campos_512_v4
+157/796179/campos_512_v4
+157/796180/campos_512_v4
+157/796184/campos_512_v4
+157/796185/campos_512_v4
+157/796206/campos_512_v4
+157/796210/campos_512_v4
+157/796220/campos_512_v4
+157/796221/campos_512_v4
+157/796233/campos_512_v4
+157/796241/campos_512_v4
+157/796260/campos_512_v4
+157/796266/campos_512_v4
+157/796278/campos_512_v4
+157/796280/campos_512_v4
+157/796282/campos_512_v4
+157/796294/campos_512_v4
+157/796301/campos_512_v4
+157/796309/campos_512_v4
+157/796310/campos_512_v4
+157/796312/campos_512_v4
+157/796313/campos_512_v4
+157/796320/campos_512_v4
+157/796329/campos_512_v4
+157/796330/campos_512_v4
+157/796335/campos_512_v4
+157/796343/campos_512_v4
+157/796355/campos_512_v4
+157/796356/campos_512_v4
+157/796362/campos_512_v4
+157/796364/campos_512_v4
+157/796367/campos_512_v4
+157/796368/campos_512_v4
+157/796374/campos_512_v4
+157/796376/campos_512_v4
+157/796377/campos_512_v4
+157/796378/campos_512_v4
+157/796379/campos_512_v4
+157/796381/campos_512_v4
+157/796387/campos_512_v4
+157/796405/campos_512_v4
+157/796414/campos_512_v4
+157/796422/campos_512_v4
+157/796425/campos_512_v4
+157/796433/campos_512_v4
+157/796437/campos_512_v4
+157/796463/campos_512_v4
+157/796471/campos_512_v4
+157/796489/campos_512_v4
+157/796504/campos_512_v4
+157/796515/campos_512_v4
+157/796518/campos_512_v4
+157/796521/campos_512_v4
+157/796543/campos_512_v4
+157/796551/campos_512_v4
+157/796563/campos_512_v4
+157/796564/campos_512_v4
+157/796565/campos_512_v4
+157/796571/campos_512_v4
+157/796574/campos_512_v4
+157/796583/campos_512_v4
+157/796586/campos_512_v4
+157/796598/campos_512_v4
+157/796600/campos_512_v4
+157/796603/campos_512_v4
+157/796604/campos_512_v4
+157/796614/campos_512_v4
+157/796615/campos_512_v4
+157/796636/campos_512_v4
+157/796637/campos_512_v4
+157/796646/campos_512_v4
+157/796658/campos_512_v4
+157/796666/campos_512_v4
+157/796667/campos_512_v4
+157/796669/campos_512_v4
+157/796685/campos_512_v4
+157/796708/campos_512_v4
+157/796733/campos_512_v4
+157/796739/campos_512_v4
+157/796762/campos_512_v4
+157/796790/campos_512_v4
+157/796791/campos_512_v4
+157/796798/campos_512_v4
+157/796804/campos_512_v4
+157/796810/campos_512_v4
+157/796818/campos_512_v4
+157/796829/campos_512_v4
+157/796839/campos_512_v4
+157/796844/campos_512_v4
+157/796849/campos_512_v4
+157/796863/campos_512_v4
+157/796872/campos_512_v4
+157/796881/campos_512_v4
+157/796889/campos_512_v4
+157/796891/campos_512_v4
+157/796897/campos_512_v4
+157/796913/campos_512_v4
+157/796916/campos_512_v4
+157/796927/campos_512_v4
+157/796931/campos_512_v4
+157/796947/campos_512_v4
+157/796994/campos_512_v4
+157/796997/campos_512_v4
+157/796998/campos_512_v4
+157/797008/campos_512_v4
+157/797012/campos_512_v4
+157/797025/campos_512_v4
+157/797027/campos_512_v4
+157/797055/campos_512_v4
+157/797066/campos_512_v4
+157/797074/campos_512_v4
+157/797083/campos_512_v4
+157/797098/campos_512_v4
+157/797109/campos_512_v4
+157/797120/campos_512_v4
+157/797127/campos_512_v4
+157/797135/campos_512_v4
+157/797136/campos_512_v4
+157/797142/campos_512_v4
+157/797158/campos_512_v4
+157/797160/campos_512_v4
+157/797165/campos_512_v4
+157/797170/campos_512_v4
+157/797176/campos_512_v4
+157/797183/campos_512_v4
+157/797187/campos_512_v4
+157/797197/campos_512_v4
+157/797223/campos_512_v4
+157/797231/campos_512_v4
+157/797242/campos_512_v4
+157/797247/campos_512_v4
+157/797262/campos_512_v4
+157/797270/campos_512_v4
+157/797279/campos_512_v4
+157/797286/campos_512_v4
+157/797290/campos_512_v4
+157/797291/campos_512_v4
+157/797297/campos_512_v4
+157/797302/campos_512_v4
+157/797312/campos_512_v4
+157/797324/campos_512_v4
+157/797332/campos_512_v4
+157/797333/campos_512_v4
+157/797342/campos_512_v4
+157/797346/campos_512_v4
+157/797361/campos_512_v4
+157/797367/campos_512_v4
+157/797373/campos_512_v4
+157/797381/campos_512_v4
+157/797394/campos_512_v4
+157/797396/campos_512_v4
+157/797400/campos_512_v4
+157/797406/campos_512_v4
+157/797416/campos_512_v4
+157/797429/campos_512_v4
+157/797430/campos_512_v4
+157/797433/campos_512_v4
+157/797446/campos_512_v4
+157/797467/campos_512_v4
+157/797469/campos_512_v4
+157/797473/campos_512_v4
+157/797496/campos_512_v4
+157/797505/campos_512_v4
+157/797513/campos_512_v4
+157/797515/campos_512_v4
+157/797522/campos_512_v4
+157/797532/campos_512_v4
+157/797535/campos_512_v4
+157/797537/campos_512_v4
+157/797542/campos_512_v4
+157/797547/campos_512_v4
+157/797550/campos_512_v4
+157/797553/campos_512_v4
+157/797569/campos_512_v4
+157/797581/campos_512_v4
+157/797582/campos_512_v4
+157/797590/campos_512_v4
+157/797598/campos_512_v4
+157/797600/campos_512_v4
+157/797604/campos_512_v4
+157/797606/campos_512_v4
+157/797612/campos_512_v4
+157/797617/campos_512_v4
+157/797621/campos_512_v4
+157/797624/campos_512_v4
+157/797632/campos_512_v4
+157/797648/campos_512_v4
+157/797655/campos_512_v4
+157/797658/campos_512_v4
+157/797684/campos_512_v4
+157/797686/campos_512_v4
+157/797691/campos_512_v4
+157/797692/campos_512_v4
+157/797695/campos_512_v4
+157/797702/campos_512_v4
+157/797704/campos_512_v4
+157/797707/campos_512_v4
+157/797709/campos_512_v4
+157/797712/campos_512_v4
+157/797716/campos_512_v4
+157/797721/campos_512_v4
+157/797722/campos_512_v4
+157/797734/campos_512_v4
+157/797740/campos_512_v4
+157/797741/campos_512_v4
+157/797743/campos_512_v4
+157/797751/campos_512_v4
+157/797754/campos_512_v4
+157/797766/campos_512_v4
+157/797789/campos_512_v4
+157/797817/campos_512_v4
+157/797821/campos_512_v4
+157/797834/campos_512_v4
+157/797847/campos_512_v4
+157/797849/campos_512_v4
+157/797861/campos_512_v4
+157/797864/campos_512_v4
+157/797868/campos_512_v4
+157/797880/campos_512_v4
+157/797888/campos_512_v4
+157/797913/campos_512_v4
+157/797917/campos_512_v4
+157/797929/campos_512_v4
+157/797934/campos_512_v4
+157/797935/campos_512_v4
+157/797946/campos_512_v4
+157/797950/campos_512_v4
+157/797963/campos_512_v4
+157/797968/campos_512_v4
+157/797976/campos_512_v4
+157/797986/campos_512_v4
+157/797994/campos_512_v4
+157/797995/campos_512_v4
+157/797998/campos_512_v4
+157/798005/campos_512_v4
+157/798009/campos_512_v4
+157/798012/campos_512_v4
+157/798031/campos_512_v4
+157/798032/campos_512_v4
+157/798053/campos_512_v4
+157/798058/campos_512_v4
+157/798060/campos_512_v4
+157/798064/campos_512_v4
+157/798067/campos_512_v4
+157/798082/campos_512_v4
+157/798094/campos_512_v4
+157/798112/campos_512_v4
+157/798115/campos_512_v4
+157/798120/campos_512_v4
+157/798125/campos_512_v4
+157/798137/campos_512_v4
+157/798143/campos_512_v4
+157/798152/campos_512_v4
+157/798159/campos_512_v4
+157/798183/campos_512_v4
+157/798189/campos_512_v4
+157/798190/campos_512_v4
+157/798230/campos_512_v4
+157/798236/campos_512_v4
+157/798244/campos_512_v4
+157/798245/campos_512_v4
+157/798248/campos_512_v4
+157/798259/campos_512_v4
+157/798262/campos_512_v4
+157/798269/campos_512_v4
+157/798273/campos_512_v4
+157/798280/campos_512_v4
+157/798285/campos_512_v4
+157/798300/campos_512_v4
+157/798307/campos_512_v4
+157/798311/campos_512_v4
+157/798315/campos_512_v4
+157/798319/campos_512_v4
+157/798320/campos_512_v4
+157/798327/campos_512_v4
+157/798331/campos_512_v4
+157/798332/campos_512_v4
+157/798337/campos_512_v4
+157/798340/campos_512_v4
+157/798354/campos_512_v4
+157/798356/campos_512_v4
+157/798375/campos_512_v4
+157/798379/campos_512_v4
+157/798380/campos_512_v4
+157/798385/campos_512_v4
+157/798397/campos_512_v4
+157/798398/campos_512_v4
+157/798400/campos_512_v4
+157/798406/campos_512_v4
+157/798414/campos_512_v4
+157/798422/campos_512_v4
+157/798424/campos_512_v4
+157/798426/campos_512_v4
+157/798427/campos_512_v4
+157/798433/campos_512_v4
+157/798441/campos_512_v4
+157/798447/campos_512_v4
+157/798449/campos_512_v4
+157/798456/campos_512_v4
+157/798471/campos_512_v4
+157/798475/campos_512_v4
+157/798481/campos_512_v4
+157/798489/campos_512_v4
+157/798496/campos_512_v4
+157/798499/campos_512_v4
+157/798504/campos_512_v4
+157/798510/campos_512_v4
+157/798530/campos_512_v4
+157/798531/campos_512_v4
+157/798538/campos_512_v4
+157/798546/campos_512_v4
+157/798549/campos_512_v4
+157/798550/campos_512_v4
+157/798558/campos_512_v4
+157/798561/campos_512_v4
+157/798569/campos_512_v4
+157/798583/campos_512_v4
+157/798588/campos_512_v4
+157/798589/campos_512_v4
+157/798613/campos_512_v4
+157/798619/campos_512_v4
+157/798632/campos_512_v4
+157/798640/campos_512_v4
+157/798643/campos_512_v4
+157/798651/campos_512_v4
+157/798652/campos_512_v4
+157/798656/campos_512_v4
+157/798665/campos_512_v4
+157/798670/campos_512_v4
+157/798676/campos_512_v4
+157/798684/campos_512_v4
+157/798693/campos_512_v4
+157/798697/campos_512_v4
+157/798709/campos_512_v4
+157/798714/campos_512_v4
+157/798725/campos_512_v4
+157/798728/campos_512_v4
+157/798729/campos_512_v4
+157/798733/campos_512_v4
+157/798743/campos_512_v4
+157/798748/campos_512_v4
+157/798769/campos_512_v4
+157/798770/campos_512_v4
+157/798777/campos_512_v4
+157/798788/campos_512_v4
+157/798792/campos_512_v4
+157/798796/campos_512_v4
+157/798807/campos_512_v4
+157/798809/campos_512_v4
+157/798814/campos_512_v4
+157/798816/campos_512_v4
+157/798823/campos_512_v4
+157/798824/campos_512_v4
+157/798843/campos_512_v4
+157/798852/campos_512_v4
+157/798858/campos_512_v4
+157/798860/campos_512_v4
+157/798866/campos_512_v4
+157/798869/campos_512_v4
+157/798877/campos_512_v4
+157/798887/campos_512_v4
+157/798888/campos_512_v4
+157/798889/campos_512_v4
+157/798890/campos_512_v4
+157/798892/campos_512_v4
+157/798902/campos_512_v4
+157/798916/campos_512_v4
+157/798917/campos_512_v4
+157/798927/campos_512_v4
+157/798939/campos_512_v4
+157/798941/campos_512_v4
+157/798947/campos_512_v4
+157/798954/campos_512_v4
+157/798956/campos_512_v4
+157/798968/campos_512_v4
+157/798969/campos_512_v4
+157/798977/campos_512_v4
+157/798982/campos_512_v4
+157/798993/campos_512_v4
+157/798994/campos_512_v4
+157/799005/campos_512_v4
+157/799007/campos_512_v4
+157/799018/campos_512_v4
+157/799025/campos_512_v4
+157/799027/campos_512_v4
+157/799033/campos_512_v4
+157/799036/campos_512_v4
+157/799045/campos_512_v4
+157/799046/campos_512_v4
+157/799047/campos_512_v4
+157/799048/campos_512_v4
+157/799054/campos_512_v4
+157/799058/campos_512_v4
+157/799066/campos_512_v4
+157/799069/campos_512_v4
+157/799079/campos_512_v4
+157/799089/campos_512_v4
+157/799097/campos_512_v4
+157/799110/campos_512_v4
+157/799129/campos_512_v4
+157/799131/campos_512_v4
+157/799134/campos_512_v4
+157/799138/campos_512_v4
+157/799144/campos_512_v4
+157/799145/campos_512_v4
+157/799156/campos_512_v4
+157/799157/campos_512_v4
+157/799163/campos_512_v4
+157/799167/campos_512_v4
+157/799169/campos_512_v4
+157/799197/campos_512_v4
+157/799204/campos_512_v4
+157/799209/campos_512_v4
+157/799213/campos_512_v4
+157/799215/campos_512_v4
+157/799218/campos_512_v4
+157/799227/campos_512_v4
+157/799234/campos_512_v4
+157/799239/campos_512_v4
+157/799243/campos_512_v4
+157/799261/campos_512_v4
+157/799267/campos_512_v4
+157/799275/campos_512_v4
+157/799276/campos_512_v4
+157/799285/campos_512_v4
+157/799302/campos_512_v4
+157/799305/campos_512_v4
+157/799316/campos_512_v4
+157/799503/campos_512_v4
+157/799517/campos_512_v4
+157/799531/campos_512_v4
+157/799544/campos_512_v4
+157/799545/campos_512_v4
+157/799552/campos_512_v4
+157/799560/campos_512_v4
+157/799566/campos_512_v4
+157/799567/campos_512_v4
+157/799574/campos_512_v4
+157/799580/campos_512_v4
+157/799585/campos_512_v4
+157/799586/campos_512_v4
+157/799611/campos_512_v4
+157/799617/campos_512_v4
+157/799619/campos_512_v4
+157/799630/campos_512_v4
+157/799636/campos_512_v4
+157/799656/campos_512_v4
+157/799662/campos_512_v4
+157/799679/campos_512_v4
+157/799699/campos_512_v4
+157/799702/campos_512_v4
+157/799704/campos_512_v4
+157/799714/campos_512_v4
+157/799720/campos_512_v4
+157/799749/campos_512_v4
+157/799751/campos_512_v4
+157/799756/campos_512_v4
+157/799757/campos_512_v4
+157/799758/campos_512_v4
+157/799769/campos_512_v4
+157/799804/campos_512_v4
+157/799811/campos_512_v4
+157/799815/campos_512_v4
+157/799820/campos_512_v4
+157/799839/campos_512_v4
+157/799844/campos_512_v4
+157/799852/campos_512_v4
+157/799860/campos_512_v4
+157/799877/campos_512_v4
+157/799880/campos_512_v4
+157/799881/campos_512_v4
+157/799887/campos_512_v4
+157/799892/campos_512_v4
+157/799905/campos_512_v4
+157/799909/campos_512_v4
+157/799935/campos_512_v4
+157/799936/campos_512_v4
+157/799937/campos_512_v4
+157/799946/campos_512_v4
+157/799961/campos_512_v4
+157/799977/campos_512_v4
+157/799987/campos_512_v4
+157/799988/campos_512_v4
+157/799998/campos_512_v4
+158/800036/campos_512_v4
+158/800038/campos_512_v4
+158/800039/campos_512_v4
+158/800040/campos_512_v4
+158/800048/campos_512_v4
+158/800073/campos_512_v4
+158/800076/campos_512_v4
+158/800086/campos_512_v4
+158/800099/campos_512_v4
+158/800112/campos_512_v4
+158/800113/campos_512_v4
+158/800122/campos_512_v4
+158/800135/campos_512_v4
+158/800165/campos_512_v4
+158/800173/campos_512_v4
+158/800180/campos_512_v4
+158/800184/campos_512_v4
+158/800214/campos_512_v4
+158/800234/campos_512_v4
+158/800239/campos_512_v4
+158/800240/campos_512_v4
+158/800250/campos_512_v4
+158/800253/campos_512_v4
+158/800276/campos_512_v4
+158/800280/campos_512_v4
+158/800288/campos_512_v4
+158/800290/campos_512_v4
+158/800295/campos_512_v4
+158/800302/campos_512_v4
+158/800313/campos_512_v4
+158/800314/campos_512_v4
+158/800326/campos_512_v4
+158/800339/campos_512_v4
+158/800350/campos_512_v4
+158/800356/campos_512_v4
+158/800359/campos_512_v4
+158/800364/campos_512_v4
+158/800372/campos_512_v4
+158/800381/campos_512_v4
+158/800382/campos_512_v4
+158/800386/campos_512_v4
+158/800389/campos_512_v4
+158/800397/campos_512_v4
+158/800399/campos_512_v4
+158/800401/campos_512_v4
+158/800421/campos_512_v4
+158/800435/campos_512_v4
+158/800442/campos_512_v4
+158/800444/campos_512_v4
+158/800445/campos_512_v4
+158/800462/campos_512_v4
+158/800481/campos_512_v4
+158/800486/campos_512_v4
+158/800489/campos_512_v4
+158/800492/campos_512_v4
+158/800498/campos_512_v4
+158/800502/campos_512_v4
+158/800505/campos_512_v4
+158/800522/campos_512_v4
+158/800530/campos_512_v4
+158/800539/campos_512_v4
+158/800551/campos_512_v4
+158/800552/campos_512_v4
+158/800557/campos_512_v4
+158/800558/campos_512_v4
+158/800562/campos_512_v4
+158/800569/campos_512_v4
+158/800571/campos_512_v4
+158/800576/campos_512_v4
+158/800582/campos_512_v4
+158/800587/campos_512_v4
+158/800593/campos_512_v4
+158/800620/campos_512_v4
+158/800631/campos_512_v4
+158/800632/campos_512_v4
+158/800642/campos_512_v4
+158/800646/campos_512_v4
+158/800651/campos_512_v4
+158/800668/campos_512_v4
+158/800673/campos_512_v4
+158/800691/campos_512_v4
+158/800692/campos_512_v4
+158/800697/campos_512_v4
+158/800700/campos_512_v4
+158/800708/campos_512_v4
+158/800709/campos_512_v4
+158/800715/campos_512_v4
+158/800735/campos_512_v4
+158/800737/campos_512_v4
+158/800751/campos_512_v4
+158/800756/campos_512_v4
+158/800771/campos_512_v4
+158/800781/campos_512_v4
+158/800784/campos_512_v4
+158/800785/campos_512_v4
+158/800790/campos_512_v4
+158/800799/campos_512_v4
+158/800804/campos_512_v4
+158/800815/campos_512_v4
+158/800817/campos_512_v4
+158/800829/campos_512_v4
+158/800844/campos_512_v4
+158/800855/campos_512_v4
+158/800858/campos_512_v4
+158/800859/campos_512_v4
+158/800872/campos_512_v4
+158/800877/campos_512_v4
+158/800881/campos_512_v4
+158/800897/campos_512_v4
+158/800899/campos_512_v4
+158/800914/campos_512_v4
+158/800920/campos_512_v4
+158/800922/campos_512_v4
+158/800923/campos_512_v4
+158/800926/campos_512_v4
+158/800927/campos_512_v4
+158/800932/campos_512_v4
+158/800938/campos_512_v4
+158/800942/campos_512_v4
+158/800945/campos_512_v4
+158/800948/campos_512_v4
+158/800954/campos_512_v4
+158/800959/campos_512_v4
+158/800962/campos_512_v4
+158/800977/campos_512_v4
+158/800981/campos_512_v4
+158/800982/campos_512_v4
+158/801004/campos_512_v4
+158/801028/campos_512_v4
+158/801031/campos_512_v4
+158/801032/campos_512_v4
+158/801057/campos_512_v4
+158/801059/campos_512_v4
+158/801078/campos_512_v4
+158/801079/campos_512_v4
+158/801092/campos_512_v4
+158/801098/campos_512_v4
+158/801106/campos_512_v4
+158/801133/campos_512_v4
+158/801137/campos_512_v4
+158/801144/campos_512_v4
+158/801153/campos_512_v4
+158/801158/campos_512_v4
+158/801161/campos_512_v4
+158/801165/campos_512_v4
+158/801168/campos_512_v4
+158/801170/campos_512_v4
+158/801177/campos_512_v4
+158/801179/campos_512_v4
+158/801200/campos_512_v4
+158/801202/campos_512_v4
+158/801210/campos_512_v4
+158/801211/campos_512_v4
+158/801216/campos_512_v4
+158/801223/campos_512_v4
+158/801229/campos_512_v4
+158/801231/campos_512_v4
+158/801233/campos_512_v4
+158/801239/campos_512_v4
+158/801246/campos_512_v4
+158/801250/campos_512_v4
+158/801260/campos_512_v4
+158/801266/campos_512_v4
+158/801267/campos_512_v4
+158/801278/campos_512_v4
+158/801282/campos_512_v4
+158/801292/campos_512_v4
+158/801299/campos_512_v4
+158/801306/campos_512_v4
+158/801315/campos_512_v4
+158/801316/campos_512_v4
+158/801344/campos_512_v4
+158/801346/campos_512_v4
+158/801373/campos_512_v4
+158/801375/campos_512_v4
+158/801395/campos_512_v4
+158/801397/campos_512_v4
+158/801406/campos_512_v4
+158/801408/campos_512_v4
+158/801416/campos_512_v4
+158/801417/campos_512_v4
+158/801422/campos_512_v4
+158/801427/campos_512_v4
+158/801431/campos_512_v4
+158/801436/campos_512_v4
+158/801446/campos_512_v4
+158/801470/campos_512_v4
+158/801474/campos_512_v4
+158/801479/campos_512_v4
+158/801490/campos_512_v4
+158/801494/campos_512_v4
+158/801508/campos_512_v4
+158/801522/campos_512_v4
+158/801526/campos_512_v4
+158/801536/campos_512_v4
+158/801539/campos_512_v4
+158/801545/campos_512_v4
+158/801552/campos_512_v4
+158/801556/campos_512_v4
+158/801579/campos_512_v4
+158/801603/campos_512_v4
+158/801615/campos_512_v4
+158/801652/campos_512_v4
+158/801665/campos_512_v4
+158/801667/campos_512_v4
+158/801674/campos_512_v4
+158/801679/campos_512_v4
+158/801681/campos_512_v4
+158/801690/campos_512_v4
+158/801696/campos_512_v4
+158/801715/campos_512_v4
+158/801716/campos_512_v4
+158/801721/campos_512_v4
+158/801733/campos_512_v4
+158/801734/campos_512_v4
+158/801741/campos_512_v4
+158/801743/campos_512_v4
+158/801746/campos_512_v4
+158/801762/campos_512_v4
+158/801768/campos_512_v4
+158/801769/campos_512_v4
+158/801781/campos_512_v4
+158/801787/campos_512_v4
+158/801796/campos_512_v4
+158/801797/campos_512_v4
+158/801798/campos_512_v4
+158/801801/campos_512_v4
+158/801807/campos_512_v4
+158/801809/campos_512_v4
+158/801820/campos_512_v4
+158/801856/campos_512_v4
+158/801877/campos_512_v4
+158/801878/campos_512_v4
+158/801879/campos_512_v4
+158/801882/campos_512_v4
+158/801889/campos_512_v4
+158/801915/campos_512_v4
+158/801916/campos_512_v4
+158/801929/campos_512_v4
+158/801944/campos_512_v4
+158/801950/campos_512_v4
+158/801953/campos_512_v4
+158/801955/campos_512_v4
+158/801958/campos_512_v4
+158/801962/campos_512_v4
+158/801973/campos_512_v4
+158/801978/campos_512_v4
+158/801986/campos_512_v4
+158/801987/campos_512_v4
+158/801998/campos_512_v4
+158/801999/campos_512_v4
+158/802008/campos_512_v4
+158/802021/campos_512_v4
+158/802035/campos_512_v4
+158/802047/campos_512_v4
+158/802050/campos_512_v4
+158/802055/campos_512_v4
+158/802062/campos_512_v4
+158/802069/campos_512_v4
+158/802083/campos_512_v4
+158/802086/campos_512_v4
+158/802111/campos_512_v4
+158/802117/campos_512_v4
+158/802124/campos_512_v4
+158/802130/campos_512_v4
+158/802141/campos_512_v4
+158/802156/campos_512_v4
+158/802157/campos_512_v4
+158/802192/campos_512_v4
+158/802217/campos_512_v4
+158/802219/campos_512_v4
+158/802224/campos_512_v4
+158/802231/campos_512_v4
+158/802240/campos_512_v4
+158/802241/campos_512_v4
+158/802243/campos_512_v4
+158/802246/campos_512_v4
+158/802250/campos_512_v4
+158/802252/campos_512_v4
+158/802253/campos_512_v4
+158/802255/campos_512_v4
+158/802273/campos_512_v4
+158/802281/campos_512_v4
+158/802296/campos_512_v4
+158/802307/campos_512_v4
+158/802310/campos_512_v4
+158/802317/campos_512_v4
+158/802320/campos_512_v4
+158/802323/campos_512_v4
+158/802332/campos_512_v4
+158/802335/campos_512_v4
+158/802339/campos_512_v4
+158/802344/campos_512_v4
+158/802353/campos_512_v4
+158/802354/campos_512_v4
+158/802359/campos_512_v4
+158/802364/campos_512_v4
+158/802367/campos_512_v4
+158/802369/campos_512_v4
+158/802371/campos_512_v4
+158/802372/campos_512_v4
+158/802377/campos_512_v4
+158/802379/campos_512_v4
+158/802384/campos_512_v4
+158/802410/campos_512_v4
+158/802416/campos_512_v4
+158/802421/campos_512_v4
+158/802434/campos_512_v4
+158/802442/campos_512_v4
+158/802451/campos_512_v4
+158/802454/campos_512_v4
+158/802460/campos_512_v4
+158/802465/campos_512_v4
+158/802471/campos_512_v4
+158/802480/campos_512_v4
+158/802481/campos_512_v4
+158/802483/campos_512_v4
+158/802487/campos_512_v4
+158/802490/campos_512_v4
+158/802516/campos_512_v4
+158/802517/campos_512_v4
+158/802519/campos_512_v4
+158/802523/campos_512_v4
+158/802535/campos_512_v4
+158/802544/campos_512_v4
+158/802555/campos_512_v4
+158/802556/campos_512_v4
+158/802560/campos_512_v4
+158/802561/campos_512_v4
+158/802563/campos_512_v4
+158/802580/campos_512_v4
+158/802584/campos_512_v4
+158/802610/campos_512_v4
+158/802616/campos_512_v4
+158/802619/campos_512_v4
+158/802628/campos_512_v4
+158/802629/campos_512_v4
+158/802631/campos_512_v4
+158/802641/campos_512_v4
+158/802644/campos_512_v4
+158/802652/campos_512_v4
+158/802661/campos_512_v4
+158/802683/campos_512_v4
+158/802689/campos_512_v4
+158/802696/campos_512_v4
+158/802703/campos_512_v4
+158/802704/campos_512_v4
+158/802705/campos_512_v4
+158/802706/campos_512_v4
+158/802711/campos_512_v4
+158/802714/campos_512_v4
+158/802716/campos_512_v4
+158/802729/campos_512_v4
+158/802744/campos_512_v4
+158/802750/campos_512_v4
+158/802774/campos_512_v4
+158/802780/campos_512_v4
+158/802789/campos_512_v4
+158/802790/campos_512_v4
+158/802798/campos_512_v4
+158/802799/campos_512_v4
+158/802804/campos_512_v4
+158/802806/campos_512_v4
+158/802810/campos_512_v4
+158/802813/campos_512_v4
+158/802819/campos_512_v4
+158/802829/campos_512_v4
+158/802849/campos_512_v4
+158/802875/campos_512_v4
+158/802876/campos_512_v4
+158/802881/campos_512_v4
+158/802893/campos_512_v4
+158/802899/campos_512_v4
+158/802902/campos_512_v4
+158/802906/campos_512_v4
+158/802913/campos_512_v4
+158/802939/campos_512_v4
+158/802940/campos_512_v4
+158/802943/campos_512_v4
+158/802948/campos_512_v4
+158/802951/campos_512_v4
+158/802966/campos_512_v4
+158/802967/campos_512_v4
+158/802969/campos_512_v4
+158/802976/campos_512_v4
+158/802979/campos_512_v4
+158/802980/campos_512_v4
+158/802983/campos_512_v4
+158/802985/campos_512_v4
+158/803003/campos_512_v4
+158/803005/campos_512_v4
+158/803007/campos_512_v4
+158/803015/campos_512_v4
+158/803019/campos_512_v4
+158/803051/campos_512_v4
+158/803061/campos_512_v4
+158/803065/campos_512_v4
+158/803067/campos_512_v4
+158/803080/campos_512_v4
+158/803081/campos_512_v4
+158/803088/campos_512_v4
+158/803095/campos_512_v4
+158/803096/campos_512_v4
+158/803103/campos_512_v4
+158/803105/campos_512_v4
+158/803114/campos_512_v4
+158/803119/campos_512_v4
+158/803120/campos_512_v4
+158/803123/campos_512_v4
+158/803126/campos_512_v4
+158/803141/campos_512_v4
+158/803143/campos_512_v4
+158/803150/campos_512_v4
+158/803153/campos_512_v4
+158/803162/campos_512_v4
+158/803164/campos_512_v4
+158/803168/campos_512_v4
+158/803175/campos_512_v4
+158/803184/campos_512_v4
+158/803194/campos_512_v4
+158/803196/campos_512_v4
+158/803214/campos_512_v4
+158/803215/campos_512_v4
+158/803224/campos_512_v4
+158/803227/campos_512_v4
+158/803234/campos_512_v4
+158/803244/campos_512_v4
+158/803246/campos_512_v4
+158/803252/campos_512_v4
+158/803262/campos_512_v4
+158/803276/campos_512_v4
+158/803277/campos_512_v4
+158/803279/campos_512_v4
+158/803281/campos_512_v4
+158/803286/campos_512_v4
+158/803288/campos_512_v4
+158/803292/campos_512_v4
+158/803294/campos_512_v4
+158/803304/campos_512_v4
+158/803306/campos_512_v4
+158/803313/campos_512_v4
+158/803327/campos_512_v4
+158/803332/campos_512_v4
+158/803336/campos_512_v4
+158/803363/campos_512_v4
+158/803382/campos_512_v4
+158/803385/campos_512_v4
+158/803388/campos_512_v4
+158/803391/campos_512_v4
+158/803401/campos_512_v4
+158/803406/campos_512_v4
+158/803419/campos_512_v4
+158/803421/campos_512_v4
+158/803425/campos_512_v4
+158/803440/campos_512_v4
+158/803442/campos_512_v4
+158/803449/campos_512_v4
+158/803453/campos_512_v4
+158/803466/campos_512_v4
+158/803477/campos_512_v4
+158/803484/campos_512_v4
+158/803497/campos_512_v4
+158/803498/campos_512_v4
+158/803503/campos_512_v4
+158/803504/campos_512_v4
+158/803508/campos_512_v4
+158/803527/campos_512_v4
+158/803528/campos_512_v4
+158/803531/campos_512_v4
+158/803556/campos_512_v4
+158/803557/campos_512_v4
+158/803563/campos_512_v4
+158/803569/campos_512_v4
+158/803579/campos_512_v4
+158/803592/campos_512_v4
+158/803596/campos_512_v4
+158/803613/campos_512_v4
+158/803618/campos_512_v4
+158/803624/campos_512_v4
+158/803631/campos_512_v4
+158/803634/campos_512_v4
+158/803643/campos_512_v4
+158/803655/campos_512_v4
+158/803657/campos_512_v4
+158/803673/campos_512_v4
+158/803677/campos_512_v4
+158/803681/campos_512_v4
+158/803698/campos_512_v4
+158/803710/campos_512_v4
+158/803717/campos_512_v4
+158/803727/campos_512_v4
+158/803732/campos_512_v4
+158/803743/campos_512_v4
+158/803746/campos_512_v4
+158/803752/campos_512_v4
+158/803756/campos_512_v4
+158/803776/campos_512_v4
+158/803778/campos_512_v4
+158/803780/campos_512_v4
+158/803782/campos_512_v4
+158/803802/campos_512_v4
+158/803804/campos_512_v4
+158/803810/campos_512_v4
+158/803822/campos_512_v4
+158/803840/campos_512_v4
+158/803846/campos_512_v4
+158/803854/campos_512_v4
+158/803865/campos_512_v4
+158/803874/campos_512_v4
+158/803876/campos_512_v4
+158/803883/campos_512_v4
+158/803911/campos_512_v4
+158/803916/campos_512_v4
+158/803917/campos_512_v4
+158/803935/campos_512_v4
+158/803940/campos_512_v4
+158/803942/campos_512_v4
+158/803945/campos_512_v4
+158/803953/campos_512_v4
+158/803974/campos_512_v4
+158/803980/campos_512_v4
+158/803989/campos_512_v4
+158/804004/campos_512_v4
+158/804008/campos_512_v4
+158/804037/campos_512_v4
+158/804038/campos_512_v4
+158/804046/campos_512_v4
+158/804059/campos_512_v4
+158/804066/campos_512_v4
+158/804073/campos_512_v4
+158/804090/campos_512_v4
+158/804094/campos_512_v4
+158/804096/campos_512_v4
+158/804111/campos_512_v4
+158/804114/campos_512_v4
+158/804129/campos_512_v4
+158/804132/campos_512_v4
+158/804135/campos_512_v4
+158/804144/campos_512_v4
+158/804146/campos_512_v4
+158/804157/campos_512_v4
+158/804165/campos_512_v4
+158/804170/campos_512_v4
+158/804172/campos_512_v4
+158/804178/campos_512_v4
+158/804188/campos_512_v4
+158/804190/campos_512_v4
+158/804195/campos_512_v4
+158/804208/campos_512_v4
+158/804217/campos_512_v4
+158/804231/campos_512_v4
+158/804234/campos_512_v4
+158/804238/campos_512_v4
+158/804241/campos_512_v4
+158/804252/campos_512_v4
+158/804253/campos_512_v4
+158/804254/campos_512_v4
+158/804268/campos_512_v4
+158/804273/campos_512_v4
+158/804287/campos_512_v4
+158/804301/campos_512_v4
+158/804323/campos_512_v4
+158/804324/campos_512_v4
+158/804326/campos_512_v4
+158/804329/campos_512_v4
+158/804331/campos_512_v4
+158/804340/campos_512_v4
+158/804342/campos_512_v4
+158/804347/campos_512_v4
+158/804351/campos_512_v4
+158/804356/campos_512_v4
+158/804359/campos_512_v4
+158/804370/campos_512_v4
+158/804379/campos_512_v4
+158/804381/campos_512_v4
+158/804397/campos_512_v4
+158/804399/campos_512_v4
+158/804401/campos_512_v4
+158/804410/campos_512_v4
+158/804418/campos_512_v4
+158/804419/campos_512_v4
+158/804420/campos_512_v4
+158/804421/campos_512_v4
+158/804425/campos_512_v4
+158/804426/campos_512_v4
+158/804427/campos_512_v4
+158/804446/campos_512_v4
+158/804455/campos_512_v4
+158/804459/campos_512_v4
+158/804488/campos_512_v4
+158/804490/campos_512_v4
+158/804495/campos_512_v4
+158/804507/campos_512_v4
+158/804516/campos_512_v4
+158/804523/campos_512_v4
+158/804531/campos_512_v4
+158/804551/campos_512_v4
+158/804554/campos_512_v4
+158/804556/campos_512_v4
+158/804565/campos_512_v4
+158/804575/campos_512_v4
+158/804596/campos_512_v4
+158/804598/campos_512_v4
+158/804602/campos_512_v4
+158/804603/campos_512_v4
+158/804613/campos_512_v4
+158/804621/campos_512_v4
+158/804623/campos_512_v4
+158/804626/campos_512_v4
+158/804631/campos_512_v4
+158/804638/campos_512_v4
+158/804642/campos_512_v4
+158/804658/campos_512_v4
+158/804674/campos_512_v4
+158/804680/campos_512_v4
+158/804681/campos_512_v4
+158/804689/campos_512_v4
+158/804702/campos_512_v4
+158/804708/campos_512_v4
+158/804709/campos_512_v4
+158/804710/campos_512_v4
+158/804718/campos_512_v4
+158/804721/campos_512_v4
+158/804740/campos_512_v4
+158/804748/campos_512_v4
+158/804759/campos_512_v4
+158/804774/campos_512_v4
+158/804775/campos_512_v4
+158/804784/campos_512_v4
+158/804785/campos_512_v4
+158/804793/campos_512_v4
+158/804798/campos_512_v4
+158/804800/campos_512_v4
+158/804801/campos_512_v4
+158/804810/campos_512_v4
+158/804813/campos_512_v4
+158/804814/campos_512_v4
+158/804818/campos_512_v4
+158/804819/campos_512_v4
+158/804824/campos_512_v4
+158/804826/campos_512_v4
+158/804828/campos_512_v4
+158/804836/campos_512_v4
+158/804848/campos_512_v4
+158/804851/campos_512_v4
+158/804863/campos_512_v4
+158/804880/campos_512_v4
+158/804891/campos_512_v4
+158/804892/campos_512_v4
+158/804905/campos_512_v4
+158/804914/campos_512_v4
+158/804919/campos_512_v4
+158/804927/campos_512_v4
+158/804930/campos_512_v4
+158/804933/campos_512_v4
+158/804952/campos_512_v4
+158/804954/campos_512_v4
+158/804957/campos_512_v4
+158/804962/campos_512_v4
+158/804963/campos_512_v4
+158/804967/campos_512_v4
+158/804977/campos_512_v4
+158/804978/campos_512_v4
+158/804981/campos_512_v4
+158/804982/campos_512_v4
+158/804986/campos_512_v4
+158/804995/campos_512_v4
+158/804999/campos_512_v4
+159/805004/campos_512_v4
+159/805018/campos_512_v4
+159/805020/campos_512_v4
+159/805025/campos_512_v4
+159/805057/campos_512_v4
+159/805083/campos_512_v4
+159/805091/campos_512_v4
+159/805092/campos_512_v4
+159/805093/campos_512_v4
+159/805100/campos_512_v4
+159/805121/campos_512_v4
+159/805124/campos_512_v4
+159/805128/campos_512_v4
+159/805167/campos_512_v4
+159/805181/campos_512_v4
+159/805184/campos_512_v4
+159/805189/campos_512_v4
+159/805192/campos_512_v4
+159/805197/campos_512_v4
+159/805201/campos_512_v4
+159/805207/campos_512_v4
+159/805213/campos_512_v4
+159/805238/campos_512_v4
+159/805240/campos_512_v4
+159/805252/campos_512_v4
+159/805255/campos_512_v4
+159/805256/campos_512_v4
+159/805266/campos_512_v4
+159/805273/campos_512_v4
+159/805287/campos_512_v4
+159/805288/campos_512_v4
+159/805298/campos_512_v4
+159/805306/campos_512_v4
+159/805308/campos_512_v4
+159/805341/campos_512_v4
+159/805343/campos_512_v4
+159/805356/campos_512_v4
+159/805359/campos_512_v4
+159/805360/campos_512_v4
+159/805365/campos_512_v4
+159/805394/campos_512_v4
+159/805402/campos_512_v4
+159/805420/campos_512_v4
+159/805423/campos_512_v4
+159/805431/campos_512_v4
+159/805438/campos_512_v4
+159/805442/campos_512_v4
+159/805447/campos_512_v4
+159/805449/campos_512_v4
+159/805451/campos_512_v4
+159/805453/campos_512_v4
+159/805463/campos_512_v4
+159/805469/campos_512_v4
+159/805474/campos_512_v4
+159/805476/campos_512_v4
+159/805492/campos_512_v4
+159/805498/campos_512_v4
+159/805499/campos_512_v4
+159/805503/campos_512_v4
+159/805519/campos_512_v4
+159/805532/campos_512_v4
+159/805540/campos_512_v4
+159/805542/campos_512_v4
+159/805545/campos_512_v4
+159/805550/campos_512_v4
+159/805556/campos_512_v4
+159/805565/campos_512_v4
+159/805567/campos_512_v4
+159/805573/campos_512_v4
+159/805578/campos_512_v4
+159/805584/campos_512_v4
+159/805603/campos_512_v4
+159/805622/campos_512_v4
+159/805627/campos_512_v4
+159/805637/campos_512_v4
+159/805639/campos_512_v4
+159/805649/campos_512_v4
+159/805657/campos_512_v4
+159/805668/campos_512_v4
+159/805676/campos_512_v4
+159/805683/campos_512_v4
+159/805687/campos_512_v4
+159/805691/campos_512_v4
+159/805715/campos_512_v4
+159/805729/campos_512_v4
+159/805730/campos_512_v4
+159/805732/campos_512_v4
+159/805738/campos_512_v4
+159/805747/campos_512_v4
+159/805769/campos_512_v4
+159/805770/campos_512_v4
+159/805774/campos_512_v4
+159/805796/campos_512_v4
+159/805801/campos_512_v4
+159/805804/campos_512_v4
+159/805808/campos_512_v4
+159/805818/campos_512_v4
+159/805821/campos_512_v4
+159/805861/campos_512_v4
+159/805863/campos_512_v4
+159/805871/campos_512_v4
+159/805880/campos_512_v4
+159/805905/campos_512_v4
+159/805911/campos_512_v4
+159/805912/campos_512_v4
+159/805914/campos_512_v4
+159/805924/campos_512_v4
+159/805929/campos_512_v4
+159/805943/campos_512_v4
+159/805945/campos_512_v4
+159/805955/campos_512_v4
+159/805973/campos_512_v4
+159/805979/campos_512_v4
+159/805994/campos_512_v4
+159/805998/campos_512_v4
+159/806010/campos_512_v4
+159/806023/campos_512_v4
+159/806026/campos_512_v4
+159/806036/campos_512_v4
+159/806049/campos_512_v4
+159/806055/campos_512_v4
+159/806062/campos_512_v4
+159/806066/campos_512_v4
+159/806067/campos_512_v4
+159/806071/campos_512_v4
+159/806072/campos_512_v4
+159/806083/campos_512_v4
+159/806093/campos_512_v4
+159/806095/campos_512_v4
+159/806096/campos_512_v4
+159/806104/campos_512_v4
+159/806121/campos_512_v4
+159/806136/campos_512_v4
+159/806148/campos_512_v4
+159/806153/campos_512_v4
+159/806156/campos_512_v4
+159/806160/campos_512_v4
+159/806162/campos_512_v4
+159/806167/campos_512_v4
+159/806168/campos_512_v4
+159/806173/campos_512_v4
+159/806176/campos_512_v4
+159/806182/campos_512_v4
+159/806198/campos_512_v4
+159/806223/campos_512_v4
+159/806270/campos_512_v4
+159/806295/campos_512_v4
+159/806297/campos_512_v4
+159/806327/campos_512_v4
+159/806340/campos_512_v4
+159/806375/campos_512_v4
+159/806388/campos_512_v4
+159/806414/campos_512_v4
+159/806415/campos_512_v4
+159/806417/campos_512_v4
+159/806421/campos_512_v4
+159/806434/campos_512_v4
+159/806447/campos_512_v4
+159/806451/campos_512_v4
+159/806453/campos_512_v4
+159/806457/campos_512_v4
+159/806471/campos_512_v4
+159/806479/campos_512_v4
+159/806481/campos_512_v4
+159/806490/campos_512_v4
+159/806493/campos_512_v4
+159/806506/campos_512_v4
+159/806511/campos_512_v4
+159/806513/campos_512_v4
+159/806531/campos_512_v4
+159/806540/campos_512_v4
+159/806541/campos_512_v4
+159/806543/campos_512_v4
+159/806554/campos_512_v4
+159/806555/campos_512_v4
+159/806556/campos_512_v4
+159/806561/campos_512_v4
+159/806571/campos_512_v4
+159/806572/campos_512_v4
+159/806575/campos_512_v4
+159/806592/campos_512_v4
+159/806598/campos_512_v4
+159/806602/campos_512_v4
+159/806604/campos_512_v4
+159/806610/campos_512_v4
+159/806613/campos_512_v4
+159/806616/campos_512_v4
+159/806621/campos_512_v4
+159/806636/campos_512_v4
+159/806637/campos_512_v4
+159/806642/campos_512_v4
+159/806643/campos_512_v4
+159/806649/campos_512_v4
+159/806669/campos_512_v4
+159/806670/campos_512_v4
+159/806674/campos_512_v4
+159/806678/campos_512_v4
+159/806680/campos_512_v4
+159/806683/campos_512_v4
+159/806700/campos_512_v4
+159/806706/campos_512_v4
+159/806718/campos_512_v4
+159/806727/campos_512_v4
+159/806731/campos_512_v4
+159/806741/campos_512_v4
+159/806747/campos_512_v4
+159/806751/campos_512_v4
+159/806752/campos_512_v4
+159/806756/campos_512_v4
+159/806759/campos_512_v4
+159/806763/campos_512_v4
+159/806777/campos_512_v4
+159/806783/campos_512_v4
+159/806784/campos_512_v4
+159/806789/campos_512_v4
+159/806804/campos_512_v4
+159/806816/campos_512_v4
+159/806828/campos_512_v4
+159/806829/campos_512_v4
+159/806830/campos_512_v4
+159/806832/campos_512_v4
+159/806842/campos_512_v4
+159/806844/campos_512_v4
+159/806854/campos_512_v4
+159/806871/campos_512_v4
+159/806891/campos_512_v4
+159/806892/campos_512_v4
+159/806899/campos_512_v4
+159/806902/campos_512_v4
+159/806906/campos_512_v4
+159/806912/campos_512_v4
+159/806920/campos_512_v4
+159/806926/campos_512_v4
+159/806927/campos_512_v4
+159/806929/campos_512_v4
+159/806930/campos_512_v4
+159/806940/campos_512_v4
+159/806944/campos_512_v4
+159/806950/campos_512_v4
+159/806958/campos_512_v4
+159/806961/campos_512_v4
+159/806966/campos_512_v4
+159/806969/campos_512_v4
+159/806972/campos_512_v4
+159/806973/campos_512_v4
+159/806979/campos_512_v4
+159/806987/campos_512_v4
+159/807005/campos_512_v4
+159/807009/campos_512_v4
+159/807011/campos_512_v4
+159/807016/campos_512_v4
+159/807018/campos_512_v4
+159/807030/campos_512_v4
+159/807031/campos_512_v4
+159/807032/campos_512_v4
+159/807038/campos_512_v4
+159/807041/campos_512_v4
+159/807049/campos_512_v4
+159/807052/campos_512_v4
+159/807079/campos_512_v4
+159/807082/campos_512_v4
+159/807092/campos_512_v4
+159/807095/campos_512_v4
+159/807108/campos_512_v4
+159/807122/campos_512_v4
+159/807125/campos_512_v4
+159/807127/campos_512_v4
+159/807138/campos_512_v4
+159/807148/campos_512_v4
+159/807160/campos_512_v4
+159/807182/campos_512_v4
+159/807185/campos_512_v4
+159/807189/campos_512_v4
+159/807200/campos_512_v4
+159/807203/campos_512_v4
+159/807214/campos_512_v4
+159/807215/campos_512_v4
+159/807221/campos_512_v4
+159/807224/campos_512_v4
+159/807225/campos_512_v4
+159/807230/campos_512_v4
+159/807231/campos_512_v4
+159/807238/campos_512_v4
+159/807240/campos_512_v4
+159/807252/campos_512_v4
+159/807255/campos_512_v4
+159/807278/campos_512_v4
+159/807285/campos_512_v4
+159/807297/campos_512_v4
+159/807301/campos_512_v4
+159/807318/campos_512_v4
+159/807324/campos_512_v4
+159/807325/campos_512_v4
+159/807328/campos_512_v4
+159/807332/campos_512_v4
+159/807333/campos_512_v4
+159/807345/campos_512_v4
+159/807354/campos_512_v4
+159/807359/campos_512_v4
+159/807360/campos_512_v4
+159/807370/campos_512_v4
+159/807371/campos_512_v4
+159/807375/campos_512_v4
+159/807382/campos_512_v4
+159/807385/campos_512_v4
+159/807397/campos_512_v4
+159/807405/campos_512_v4
+159/807407/campos_512_v4
+159/807415/campos_512_v4
+159/807420/campos_512_v4
+159/807423/campos_512_v4
+159/807435/campos_512_v4
+159/807441/campos_512_v4
+159/807447/campos_512_v4
+159/807449/campos_512_v4
+159/807455/campos_512_v4
+159/807458/campos_512_v4
+159/807459/campos_512_v4
+159/807472/campos_512_v4
+159/807505/campos_512_v4
+159/807515/campos_512_v4
+159/807519/campos_512_v4
+159/807521/campos_512_v4
+159/807530/campos_512_v4
+159/807541/campos_512_v4
+159/807542/campos_512_v4
+159/807550/campos_512_v4
+159/807558/campos_512_v4
+159/807572/campos_512_v4
+159/807575/campos_512_v4
+159/807577/campos_512_v4
+159/807582/campos_512_v4
+159/807595/campos_512_v4
+159/807597/campos_512_v4
+159/807605/campos_512_v4
+159/807612/campos_512_v4
+159/807617/campos_512_v4
+159/807618/campos_512_v4
+159/807626/campos_512_v4
+159/807652/campos_512_v4
+159/807654/campos_512_v4
+159/807663/campos_512_v4
+159/807664/campos_512_v4
+159/807670/campos_512_v4
+159/807680/campos_512_v4
+159/807683/campos_512_v4
+159/807695/campos_512_v4
+159/807705/campos_512_v4
+159/807716/campos_512_v4
+159/807724/campos_512_v4
+159/807733/campos_512_v4
+159/807734/campos_512_v4
+159/807737/campos_512_v4
+159/807739/campos_512_v4
+159/807744/campos_512_v4
+159/807752/campos_512_v4
+159/807757/campos_512_v4
+159/807763/campos_512_v4
+159/807766/campos_512_v4
+159/807774/campos_512_v4
+159/807775/campos_512_v4
+159/807780/campos_512_v4
+159/807781/campos_512_v4
+159/807783/campos_512_v4
+159/807800/campos_512_v4
+159/807805/campos_512_v4
+159/807806/campos_512_v4
+159/807819/campos_512_v4
+159/807820/campos_512_v4
+159/807823/campos_512_v4
+159/807841/campos_512_v4
+159/807842/campos_512_v4
+159/807845/campos_512_v4
+159/807848/campos_512_v4
+159/807849/campos_512_v4
+159/807856/campos_512_v4
+159/807859/campos_512_v4
+159/807864/campos_512_v4
+159/807865/campos_512_v4
+159/807868/campos_512_v4
+159/807891/campos_512_v4
+159/807906/campos_512_v4
+159/807910/campos_512_v4
+159/807918/campos_512_v4
+159/807924/campos_512_v4
+159/807937/campos_512_v4
+159/807940/campos_512_v4
+159/807944/campos_512_v4
+159/807950/campos_512_v4
+159/807953/campos_512_v4
+159/807960/campos_512_v4
+159/807970/campos_512_v4
+159/807976/campos_512_v4
+159/807977/campos_512_v4
+159/807987/campos_512_v4
+159/807992/campos_512_v4
+159/807995/campos_512_v4
+159/808001/campos_512_v4
+159/808010/campos_512_v4
+159/808020/campos_512_v4
+159/808028/campos_512_v4
+159/808033/campos_512_v4
+159/808039/campos_512_v4
+159/808047/campos_512_v4
+159/808050/campos_512_v4
+159/808068/campos_512_v4
+159/808070/campos_512_v4
+159/808072/campos_512_v4
+159/808112/campos_512_v4
+159/808121/campos_512_v4
+159/808127/campos_512_v4
+159/808148/campos_512_v4
+159/808152/campos_512_v4
+159/808163/campos_512_v4
+159/808169/campos_512_v4
+159/808172/campos_512_v4
+159/808178/campos_512_v4
+159/808208/campos_512_v4
+159/808213/campos_512_v4
+159/808216/campos_512_v4
+159/808226/campos_512_v4
+159/808227/campos_512_v4
+159/808231/campos_512_v4
+159/808234/campos_512_v4
+159/808235/campos_512_v4
+159/808240/campos_512_v4
+159/808242/campos_512_v4
+159/808248/campos_512_v4
+159/808260/campos_512_v4
+159/808261/campos_512_v4
+159/808264/campos_512_v4
+159/808296/campos_512_v4
+159/808297/campos_512_v4
+159/808303/campos_512_v4
+159/808305/campos_512_v4
+159/808306/campos_512_v4
+159/808307/campos_512_v4
+159/808323/campos_512_v4
+159/808325/campos_512_v4
+159/808346/campos_512_v4
+159/808349/campos_512_v4
+159/808357/campos_512_v4
+159/808360/campos_512_v4
+159/808364/campos_512_v4
+159/808379/campos_512_v4
+159/808387/campos_512_v4
+159/808393/campos_512_v4
+159/808402/campos_512_v4
+159/808403/campos_512_v4
+159/808413/campos_512_v4
+159/808417/campos_512_v4
+159/808420/campos_512_v4
+159/808428/campos_512_v4
+159/808429/campos_512_v4
+159/808447/campos_512_v4
+159/808465/campos_512_v4
+159/808469/campos_512_v4
+159/808474/campos_512_v4
+159/808477/campos_512_v4
+159/808478/campos_512_v4
+159/808484/campos_512_v4
+159/808492/campos_512_v4
+159/808510/campos_512_v4
+159/808514/campos_512_v4
+159/808525/campos_512_v4
+159/808527/campos_512_v4
+159/808529/campos_512_v4
+159/808532/campos_512_v4
+159/808543/campos_512_v4
+159/808544/campos_512_v4
+159/808545/campos_512_v4
+159/808553/campos_512_v4
+159/808569/campos_512_v4
+159/808570/campos_512_v4
+159/808572/campos_512_v4
+159/808576/campos_512_v4
+159/808577/campos_512_v4
+159/808583/campos_512_v4
+159/808587/campos_512_v4
+159/808589/campos_512_v4
+159/808595/campos_512_v4
+159/808599/campos_512_v4
+159/808610/campos_512_v4
+159/808611/campos_512_v4
+159/808614/campos_512_v4
+159/808615/campos_512_v4
+159/808626/campos_512_v4
+159/808659/campos_512_v4
+159/808669/campos_512_v4
+159/808704/campos_512_v4
+159/808709/campos_512_v4
+159/808721/campos_512_v4
+159/808723/campos_512_v4
+159/808725/campos_512_v4
+159/808727/campos_512_v4
+159/808728/campos_512_v4
+159/808735/campos_512_v4
+159/808736/campos_512_v4
+159/808740/campos_512_v4
+159/808752/campos_512_v4
+159/808753/campos_512_v4
+16/90006/campos_512_v4
+16/90015/campos_512_v4
+16/90018/campos_512_v4
+16/90027/campos_512_v4
+16/90038/campos_512_v4
+16/90042/campos_512_v4
+16/90050/campos_512_v4
+16/90068/campos_512_v4
+16/90076/campos_512_v4
+16/90081/campos_512_v4
+16/90088/campos_512_v4
+16/90100/campos_512_v4
+16/90104/campos_512_v4
+16/90122/campos_512_v4
+16/90127/campos_512_v4
+16/90133/campos_512_v4
+16/90134/campos_512_v4
+16/90139/campos_512_v4
+16/90140/campos_512_v4
+16/90145/campos_512_v4
+16/90167/campos_512_v4
+16/90173/campos_512_v4
+16/90185/campos_512_v4
+16/90199/campos_512_v4
+16/90216/campos_512_v4
+16/90217/campos_512_v4
+16/90238/campos_512_v4
+16/90240/campos_512_v4
+16/90245/campos_512_v4
+16/90247/campos_512_v4
+16/90248/campos_512_v4
+16/90254/campos_512_v4
+16/90265/campos_512_v4
+16/90287/campos_512_v4
+16/90288/campos_512_v4
+16/90289/campos_512_v4
+16/90290/campos_512_v4
+16/90292/campos_512_v4
+16/90294/campos_512_v4
+16/90296/campos_512_v4
+16/90297/campos_512_v4
+16/90308/campos_512_v4
+16/90319/campos_512_v4
+16/90325/campos_512_v4
+16/90328/campos_512_v4
+16/90333/campos_512_v4
+16/90335/campos_512_v4
+16/90338/campos_512_v4
+16/90351/campos_512_v4
+16/90358/campos_512_v4
+16/90375/campos_512_v4
+16/90389/campos_512_v4
+16/90396/campos_512_v4
+16/90398/campos_512_v4
+16/90405/campos_512_v4
+16/90406/campos_512_v4
+16/90412/campos_512_v4
+16/90418/campos_512_v4
+16/90419/campos_512_v4
+16/90427/campos_512_v4
+16/90429/campos_512_v4
+16/90439/campos_512_v4
+16/90441/campos_512_v4
+16/90461/campos_512_v4
+16/90467/campos_512_v4
+16/90485/campos_512_v4
+16/90490/campos_512_v4
+16/90501/campos_512_v4
+16/90505/campos_512_v4
+16/90515/campos_512_v4
+16/90516/campos_512_v4
+16/90524/campos_512_v4
+16/90535/campos_512_v4
+16/90550/campos_512_v4
+16/90568/campos_512_v4
+16/90584/campos_512_v4
+16/90595/campos_512_v4
+16/90611/campos_512_v4
+16/90616/campos_512_v4
+16/90618/campos_512_v4
+16/90621/campos_512_v4
+16/90625/campos_512_v4
+16/90626/campos_512_v4
+16/90630/campos_512_v4
+16/90631/campos_512_v4
+16/90638/campos_512_v4
+16/90643/campos_512_v4
+16/90644/campos_512_v4
+16/90650/campos_512_v4
+16/90653/campos_512_v4
+16/90676/campos_512_v4
+16/90682/campos_512_v4
+16/90685/campos_512_v4
+16/90693/campos_512_v4
+16/90706/campos_512_v4
+16/90754/campos_512_v4
+16/90759/campos_512_v4
+16/90771/campos_512_v4
+16/90773/campos_512_v4
+16/90777/campos_512_v4
+16/90786/campos_512_v4
+16/90797/campos_512_v4
+16/90798/campos_512_v4
+16/90800/campos_512_v4
+16/90801/campos_512_v4
+16/90804/campos_512_v4
+16/90805/campos_512_v4
+16/90812/campos_512_v4
+16/90833/campos_512_v4
+16/90841/campos_512_v4
+16/90844/campos_512_v4
+16/90864/campos_512_v4
+16/90869/campos_512_v4
+16/90878/campos_512_v4
+16/90884/campos_512_v4
+16/90891/campos_512_v4
+16/90895/campos_512_v4
+16/90903/campos_512_v4
+16/90904/campos_512_v4
+16/90910/campos_512_v4
+16/90913/campos_512_v4
+16/90915/campos_512_v4
+16/90919/campos_512_v4
+16/90920/campos_512_v4
+16/90922/campos_512_v4
+16/90935/campos_512_v4
+16/90941/campos_512_v4
+16/90948/campos_512_v4
+16/90976/campos_512_v4
+16/90979/campos_512_v4
+16/91000/campos_512_v4
+16/91006/campos_512_v4
+16/91007/campos_512_v4
+16/91010/campos_512_v4
+16/91016/campos_512_v4
+16/91018/campos_512_v4
+16/91023/campos_512_v4
+16/91029/campos_512_v4
+16/91031/campos_512_v4
+16/91034/campos_512_v4
+16/91036/campos_512_v4
+16/91044/campos_512_v4
+16/91050/campos_512_v4
+16/91057/campos_512_v4
+16/91075/campos_512_v4
+16/91085/campos_512_v4
+16/91088/campos_512_v4
+16/91089/campos_512_v4
+16/91094/campos_512_v4
+16/91095/campos_512_v4
+16/91101/campos_512_v4
+16/91103/campos_512_v4
+16/91105/campos_512_v4
+16/91109/campos_512_v4
+16/91125/campos_512_v4
+16/91135/campos_512_v4
+16/91150/campos_512_v4
+16/91151/campos_512_v4
+16/91153/campos_512_v4
+16/91156/campos_512_v4
+16/91159/campos_512_v4
+16/91162/campos_512_v4
+16/91174/campos_512_v4
+16/91179/campos_512_v4
+16/91187/campos_512_v4
+16/91196/campos_512_v4
+16/91204/campos_512_v4
+16/91209/campos_512_v4
+16/91213/campos_512_v4
+16/91215/campos_512_v4
+16/91216/campos_512_v4
+16/91222/campos_512_v4
+16/91223/campos_512_v4
+16/91231/campos_512_v4
+16/91232/campos_512_v4
+16/91258/campos_512_v4
+16/91276/campos_512_v4
+16/91292/campos_512_v4
+16/91294/campos_512_v4
+16/91318/campos_512_v4
+16/91326/campos_512_v4
+16/91333/campos_512_v4
+16/91345/campos_512_v4
+16/91352/campos_512_v4
+16/91353/campos_512_v4
+16/91369/campos_512_v4
+16/91375/campos_512_v4
+16/91389/campos_512_v4
+16/91395/campos_512_v4
+16/91396/campos_512_v4
+16/91406/campos_512_v4
+16/91408/campos_512_v4
+16/91410/campos_512_v4
+16/91412/campos_512_v4
+16/91460/campos_512_v4
+16/91465/campos_512_v4
+16/91467/campos_512_v4
+16/91473/campos_512_v4
+16/91495/campos_512_v4
+16/91497/campos_512_v4
+16/91505/campos_512_v4
+16/91507/campos_512_v4
+16/91508/campos_512_v4
+16/91511/campos_512_v4
+16/91519/campos_512_v4
+16/91555/campos_512_v4
+16/91556/campos_512_v4
+16/91557/campos_512_v4
+16/91569/campos_512_v4
+16/91572/campos_512_v4
+16/91594/campos_512_v4
+16/91598/campos_512_v4
+16/91602/campos_512_v4
+16/91609/campos_512_v4
+16/91617/campos_512_v4
+16/91631/campos_512_v4
+16/91636/campos_512_v4
+16/91646/campos_512_v4
+16/91654/campos_512_v4
+16/91657/campos_512_v4
+16/91658/campos_512_v4
+16/91664/campos_512_v4
+16/91669/campos_512_v4
+16/91677/campos_512_v4
+16/91679/campos_512_v4
+16/91689/campos_512_v4
+16/91691/campos_512_v4
+16/91695/campos_512_v4
+16/91703/campos_512_v4
+16/91705/campos_512_v4
+16/91724/campos_512_v4
+16/91729/campos_512_v4
+16/91730/campos_512_v4
+16/91746/campos_512_v4
+16/91762/campos_512_v4
+16/91763/campos_512_v4
+16/91768/campos_512_v4
+16/91774/campos_512_v4
+16/91798/campos_512_v4
+16/91800/campos_512_v4
+16/91801/campos_512_v4
+16/91804/campos_512_v4
+16/91807/campos_512_v4
+16/91826/campos_512_v4
+16/91832/campos_512_v4
+16/91837/campos_512_v4
+16/91846/campos_512_v4
+16/91855/campos_512_v4
+16/91864/campos_512_v4
+16/91884/campos_512_v4
+16/91889/campos_512_v4
+16/91913/campos_512_v4
+16/91914/campos_512_v4
+16/91915/campos_512_v4
+16/91925/campos_512_v4
+16/91931/campos_512_v4
+16/91932/campos_512_v4
+16/91933/campos_512_v4
+16/91935/campos_512_v4
+16/91938/campos_512_v4
+16/91942/campos_512_v4
+16/91944/campos_512_v4
+16/91949/campos_512_v4
+16/91954/campos_512_v4
+16/91976/campos_512_v4
+16/91991/campos_512_v4
+16/91993/campos_512_v4
+16/92034/campos_512_v4
+16/92047/campos_512_v4
+16/92052/campos_512_v4
+16/92056/campos_512_v4
+16/92059/campos_512_v4
+16/92065/campos_512_v4
+16/92069/campos_512_v4
+16/92081/campos_512_v4
+16/92090/campos_512_v4
+16/92099/campos_512_v4
+16/92105/campos_512_v4
+16/92117/campos_512_v4
+16/92118/campos_512_v4
+16/92130/campos_512_v4
+16/92131/campos_512_v4
+16/92132/campos_512_v4
+16/92154/campos_512_v4
+16/92172/campos_512_v4
+16/92175/campos_512_v4
+16/92179/campos_512_v4
+16/92180/campos_512_v4
+16/92185/campos_512_v4
+16/92186/campos_512_v4
+16/92202/campos_512_v4
+16/92215/campos_512_v4
+16/92220/campos_512_v4
+16/92227/campos_512_v4
+16/92237/campos_512_v4
+16/92240/campos_512_v4
+16/92246/campos_512_v4
+16/92247/campos_512_v4
+16/92261/campos_512_v4
+16/92263/campos_512_v4
+16/92265/campos_512_v4
+16/92277/campos_512_v4
+16/92302/campos_512_v4
+16/92310/campos_512_v4
+16/92311/campos_512_v4
+16/92312/campos_512_v4
+16/92322/campos_512_v4
+16/92339/campos_512_v4
+16/92340/campos_512_v4
+16/92341/campos_512_v4
+16/92344/campos_512_v4
+16/92352/campos_512_v4
+16/92379/campos_512_v4
+16/92380/campos_512_v4
+16/92388/campos_512_v4
+16/92418/campos_512_v4
+16/92419/campos_512_v4
+16/92428/campos_512_v4
+16/92447/campos_512_v4
+16/92448/campos_512_v4
+16/92456/campos_512_v4
+16/92460/campos_512_v4
+16/92468/campos_512_v4
+16/92474/campos_512_v4
+16/92478/campos_512_v4
+16/92482/campos_512_v4
+16/92483/campos_512_v4
+16/92485/campos_512_v4
+16/92493/campos_512_v4
+16/92500/campos_512_v4
+16/92506/campos_512_v4
+16/92507/campos_512_v4
+16/92513/campos_512_v4
+16/92525/campos_512_v4
+16/92526/campos_512_v4
+16/92527/campos_512_v4
+16/92532/campos_512_v4
+16/92533/campos_512_v4
+16/92546/campos_512_v4
+16/92551/campos_512_v4
+16/92558/campos_512_v4
+16/92560/campos_512_v4
+16/92567/campos_512_v4
+16/92572/campos_512_v4
+16/92579/campos_512_v4
+16/92588/campos_512_v4
+16/92601/campos_512_v4
+16/92610/campos_512_v4
+16/92622/campos_512_v4
+16/92630/campos_512_v4
+16/92637/campos_512_v4
+16/92642/campos_512_v4
+16/92650/campos_512_v4
+16/92653/campos_512_v4
+16/92654/campos_512_v4
+16/92671/campos_512_v4
+16/92676/campos_512_v4
+16/92709/campos_512_v4
+16/92711/campos_512_v4
+16/92719/campos_512_v4
+16/92720/campos_512_v4
+16/92725/campos_512_v4
+16/92728/campos_512_v4
+16/92742/campos_512_v4
+16/92748/campos_512_v4
+16/92749/campos_512_v4
+16/92751/campos_512_v4
+16/92765/campos_512_v4
+16/92767/campos_512_v4
+16/92769/campos_512_v4
+16/92778/campos_512_v4
+16/92781/campos_512_v4
+16/92796/campos_512_v4
+16/92798/campos_512_v4
+16/92809/campos_512_v4
+16/92810/campos_512_v4
+16/92822/campos_512_v4
+16/92838/campos_512_v4
+16/92843/campos_512_v4
+16/92858/campos_512_v4
+16/92860/campos_512_v4
+16/92861/campos_512_v4
+16/92864/campos_512_v4
+16/92878/campos_512_v4
+16/92880/campos_512_v4
+16/92893/campos_512_v4
+16/92896/campos_512_v4
+16/92907/campos_512_v4
+16/92909/campos_512_v4
+16/92912/campos_512_v4
+16/92923/campos_512_v4
+16/92937/campos_512_v4
+16/92938/campos_512_v4
+16/92943/campos_512_v4
+16/92944/campos_512_v4
+16/92945/campos_512_v4
+16/92947/campos_512_v4
+16/92949/campos_512_v4
+16/92963/campos_512_v4
+16/92970/campos_512_v4
+16/92974/campos_512_v4
+16/92989/campos_512_v4
+16/92990/campos_512_v4
+16/93007/campos_512_v4
+16/93015/campos_512_v4
+16/93020/campos_512_v4
+16/93030/campos_512_v4
+16/93048/campos_512_v4
+16/93061/campos_512_v4
+16/93085/campos_512_v4
+16/93100/campos_512_v4
+16/93102/campos_512_v4
+16/93114/campos_512_v4
+16/93124/campos_512_v4
+16/93128/campos_512_v4
+16/93132/campos_512_v4
+16/93146/campos_512_v4
+16/93151/campos_512_v4
+16/93153/campos_512_v4
+16/93156/campos_512_v4
+16/93172/campos_512_v4
+16/93184/campos_512_v4
+16/93185/campos_512_v4
+16/93199/campos_512_v4
+16/93212/campos_512_v4
+16/93217/campos_512_v4
+16/93225/campos_512_v4
+16/93232/campos_512_v4
+16/93233/campos_512_v4
+16/93250/campos_512_v4
+16/93253/campos_512_v4
+16/93282/campos_512_v4
+16/93293/campos_512_v4
+16/93294/campos_512_v4
+16/93298/campos_512_v4
+16/93305/campos_512_v4
+16/93318/campos_512_v4
+16/93331/campos_512_v4
+16/93338/campos_512_v4
+16/93341/campos_512_v4
+16/93369/campos_512_v4
+16/93373/campos_512_v4
+16/93383/campos_512_v4
+16/93389/campos_512_v4
+16/93393/campos_512_v4
+16/93400/campos_512_v4
+16/93402/campos_512_v4
+16/93403/campos_512_v4
+16/93418/campos_512_v4
+16/93426/campos_512_v4
+16/93438/campos_512_v4
+16/93439/campos_512_v4
+16/93442/campos_512_v4
+16/93444/campos_512_v4
+16/93453/campos_512_v4
+16/93457/campos_512_v4
+16/93462/campos_512_v4
+16/93463/campos_512_v4
+16/93467/campos_512_v4
+16/93471/campos_512_v4
+16/93476/campos_512_v4
+16/93482/campos_512_v4
+16/93483/campos_512_v4
+16/93488/campos_512_v4
+16/93489/campos_512_v4
+16/93494/campos_512_v4
+16/93502/campos_512_v4
+16/93528/campos_512_v4
+16/93530/campos_512_v4
+16/93543/campos_512_v4
+16/93551/campos_512_v4
+16/93558/campos_512_v4
+16/93566/campos_512_v4
+16/93571/campos_512_v4
+16/93574/campos_512_v4
+16/93582/campos_512_v4
+16/93607/campos_512_v4
+16/93620/campos_512_v4
+16/93631/campos_512_v4
+16/93675/campos_512_v4
+16/93676/campos_512_v4
+16/93686/campos_512_v4
+16/93693/campos_512_v4
+16/93701/campos_512_v4
+16/93704/campos_512_v4
+16/93705/campos_512_v4
+16/93713/campos_512_v4
+16/93720/campos_512_v4
+16/93734/campos_512_v4
+16/93735/campos_512_v4
+16/93741/campos_512_v4
+16/93750/campos_512_v4
+16/93754/campos_512_v4
+16/93760/campos_512_v4
+16/93764/campos_512_v4
+16/93777/campos_512_v4
+16/93788/campos_512_v4
+16/93792/campos_512_v4
+16/93793/campos_512_v4
+16/93794/campos_512_v4
+16/93807/campos_512_v4
+16/93808/campos_512_v4
+16/93812/campos_512_v4
+16/93822/campos_512_v4
+16/93827/campos_512_v4
+16/93848/campos_512_v4
+16/93851/campos_512_v4
+16/93854/campos_512_v4
+16/93857/campos_512_v4
+16/93864/campos_512_v4
+16/93866/campos_512_v4
+16/93867/campos_512_v4
+16/93871/campos_512_v4
+16/93876/campos_512_v4
+16/93879/campos_512_v4
+16/93889/campos_512_v4
+16/93899/campos_512_v4
+16/93917/campos_512_v4
+16/93918/campos_512_v4
+16/93922/campos_512_v4
+16/93924/campos_512_v4
+16/93938/campos_512_v4
+16/93948/campos_512_v4
+16/93953/campos_512_v4
+16/93954/campos_512_v4
+16/93960/campos_512_v4
+16/93968/campos_512_v4
+16/93972/campos_512_v4
+16/93985/campos_512_v4
+16/93991/campos_512_v4
+16/93996/campos_512_v4
+16/94003/campos_512_v4
+16/94013/campos_512_v4
+16/94015/campos_512_v4
+16/94018/campos_512_v4
+16/94019/campos_512_v4
+16/94029/campos_512_v4
+16/94069/campos_512_v4
+16/94070/campos_512_v4
+16/94083/campos_512_v4
+16/94086/campos_512_v4
+16/94099/campos_512_v4
+16/94108/campos_512_v4
+16/94109/campos_512_v4
+16/94110/campos_512_v4
+16/94115/campos_512_v4
+16/94126/campos_512_v4
+16/94136/campos_512_v4
+16/94137/campos_512_v4
+16/94139/campos_512_v4
+16/94142/campos_512_v4
+16/94152/campos_512_v4
+16/94158/campos_512_v4
+16/94171/campos_512_v4
+16/94176/campos_512_v4
+16/94179/campos_512_v4
+16/94183/campos_512_v4
+16/94187/campos_512_v4
+16/94198/campos_512_v4
+16/94201/campos_512_v4
+16/94204/campos_512_v4
+16/94209/campos_512_v4
+16/94218/campos_512_v4
+16/94220/campos_512_v4
+16/94224/campos_512_v4
+16/94225/campos_512_v4
+16/94226/campos_512_v4
+16/94232/campos_512_v4
+16/94238/campos_512_v4
+16/94240/campos_512_v4
+16/94266/campos_512_v4
+16/94272/campos_512_v4
+16/94289/campos_512_v4
+16/94311/campos_512_v4
+16/94315/campos_512_v4
+16/94316/campos_512_v4
+16/94320/campos_512_v4
+16/94327/campos_512_v4
+16/94332/campos_512_v4
+16/94334/campos_512_v4
+16/94345/campos_512_v4
+16/94359/campos_512_v4
+16/94377/campos_512_v4
+16/94379/campos_512_v4
+16/94384/campos_512_v4
+16/94392/campos_512_v4
+16/94393/campos_512_v4
+16/94402/campos_512_v4
+16/94403/campos_512_v4
+16/94422/campos_512_v4
+16/94432/campos_512_v4
+16/94455/campos_512_v4
+16/94474/campos_512_v4
+16/94479/campos_512_v4
+16/94491/campos_512_v4
+16/94500/campos_512_v4
+16/94516/campos_512_v4
+16/94525/campos_512_v4
+16/94529/campos_512_v4
+16/94542/campos_512_v4
+16/94545/campos_512_v4
+16/94554/campos_512_v4
+16/94557/campos_512_v4
+16/94558/campos_512_v4
+16/94559/campos_512_v4
+16/94560/campos_512_v4
+16/94585/campos_512_v4
+16/94590/campos_512_v4
+16/94592/campos_512_v4
+16/94599/campos_512_v4
+16/94608/campos_512_v4
+16/94633/campos_512_v4
+16/94642/campos_512_v4
+16/94646/campos_512_v4
+16/94647/campos_512_v4
+16/94656/campos_512_v4
+16/94699/campos_512_v4
+16/94719/campos_512_v4
+16/94726/campos_512_v4
+16/94737/campos_512_v4
+16/94739/campos_512_v4
+16/94746/campos_512_v4
+16/94748/campos_512_v4
+16/94749/campos_512_v4
+16/94759/campos_512_v4
+16/94771/campos_512_v4
+16/94778/campos_512_v4
+16/94783/campos_512_v4
+16/94792/campos_512_v4
+16/94798/campos_512_v4
+16/94812/campos_512_v4
+16/94814/campos_512_v4
+16/94821/campos_512_v4
+16/94829/campos_512_v4
+16/94854/campos_512_v4
+16/94858/campos_512_v4
+16/94869/campos_512_v4
+16/94880/campos_512_v4
+16/94901/campos_512_v4
+16/94914/campos_512_v4
+16/94921/campos_512_v4
+16/94935/campos_512_v4
+16/94949/campos_512_v4
+17/95002/campos_512_v4
+17/95011/campos_512_v4
+17/95022/campos_512_v4
+17/95028/campos_512_v4
+17/95035/campos_512_v4
+17/95041/campos_512_v4
+17/95048/campos_512_v4
+17/95062/campos_512_v4
+17/95066/campos_512_v4
+17/95069/campos_512_v4
+17/95072/campos_512_v4
+17/95082/campos_512_v4
+17/95088/campos_512_v4
+17/95093/campos_512_v4
+17/95100/campos_512_v4
+17/95110/campos_512_v4
+17/95114/campos_512_v4
+17/95115/campos_512_v4
+17/95126/campos_512_v4
+17/95130/campos_512_v4
+17/95132/campos_512_v4
+17/95137/campos_512_v4
+17/95147/campos_512_v4
+17/95153/campos_512_v4
+17/95165/campos_512_v4
+17/95182/campos_512_v4
+17/95216/campos_512_v4
+17/95230/campos_512_v4
+17/95234/campos_512_v4
+17/95241/campos_512_v4
+17/95242/campos_512_v4
+17/95248/campos_512_v4
+17/95249/campos_512_v4
+17/95264/campos_512_v4
+17/95298/campos_512_v4
+17/95317/campos_512_v4
+17/95320/campos_512_v4
+17/95325/campos_512_v4
+17/95335/campos_512_v4
+17/95353/campos_512_v4
+17/95356/campos_512_v4
+17/95360/campos_512_v4
+17/95362/campos_512_v4
+17/95372/campos_512_v4
+17/95373/campos_512_v4
+17/95378/campos_512_v4
+17/95388/campos_512_v4
+17/95392/campos_512_v4
+17/95401/campos_512_v4
+17/95414/campos_512_v4
+17/95420/campos_512_v4
+17/95425/campos_512_v4
+17/95431/campos_512_v4
+17/95438/campos_512_v4
+17/95442/campos_512_v4
+17/95444/campos_512_v4
+17/95457/campos_512_v4
+17/95460/campos_512_v4
+17/95461/campos_512_v4
+17/95464/campos_512_v4
+17/95468/campos_512_v4
+17/95470/campos_512_v4
+17/95472/campos_512_v4
+17/95477/campos_512_v4
+17/95478/campos_512_v4
+17/95481/campos_512_v4
+17/95487/campos_512_v4
+17/95495/campos_512_v4
+17/95506/campos_512_v4
+17/95511/campos_512_v4
+17/95514/campos_512_v4
+17/95518/campos_512_v4
+17/95519/campos_512_v4
+17/95522/campos_512_v4
+17/95532/campos_512_v4
+17/95535/campos_512_v4
+17/95544/campos_512_v4
+17/95549/campos_512_v4
+17/95588/campos_512_v4
+17/95592/campos_512_v4
+17/95593/campos_512_v4
+17/95594/campos_512_v4
+17/95597/campos_512_v4
+17/95601/campos_512_v4
+17/95609/campos_512_v4
+17/95615/campos_512_v4
+17/95618/campos_512_v4
+17/95625/campos_512_v4
+17/95626/campos_512_v4
+17/95636/campos_512_v4
+17/95645/campos_512_v4
+17/95675/campos_512_v4
+17/95681/campos_512_v4
+17/95684/campos_512_v4
+17/95687/campos_512_v4
+17/95695/campos_512_v4
+17/95703/campos_512_v4
+17/95704/campos_512_v4
+17/95712/campos_512_v4
+17/95713/campos_512_v4
+17/95715/campos_512_v4
+17/95724/campos_512_v4
+17/95755/campos_512_v4
+17/95782/campos_512_v4
+17/95787/campos_512_v4
+17/95810/campos_512_v4
+17/95821/campos_512_v4
+17/95830/campos_512_v4
+17/95831/campos_512_v4
+17/95836/campos_512_v4
+17/95837/campos_512_v4
+17/95854/campos_512_v4
+17/95855/campos_512_v4
+17/95858/campos_512_v4
+17/95860/campos_512_v4
+17/95868/campos_512_v4
+17/95874/campos_512_v4
+17/95894/campos_512_v4
+17/95897/campos_512_v4
+17/95906/campos_512_v4
+17/95913/campos_512_v4
+17/95932/campos_512_v4
+17/95935/campos_512_v4
+17/95936/campos_512_v4
+17/95960/campos_512_v4
+17/95965/campos_512_v4
+17/95973/campos_512_v4
+17/95980/campos_512_v4
+17/95981/campos_512_v4
+17/95986/campos_512_v4
+17/96003/campos_512_v4
+17/96004/campos_512_v4
+17/96005/campos_512_v4
+17/96028/campos_512_v4
+17/96040/campos_512_v4
+17/96047/campos_512_v4
+17/96056/campos_512_v4
+17/96057/campos_512_v4
+17/96058/campos_512_v4
+17/96064/campos_512_v4
+17/96071/campos_512_v4
+17/96075/campos_512_v4
+17/96095/campos_512_v4
+17/96096/campos_512_v4
+17/96098/campos_512_v4
+17/96104/campos_512_v4
+17/96113/campos_512_v4
+17/96115/campos_512_v4
+17/96122/campos_512_v4
+17/96129/campos_512_v4
+17/96140/campos_512_v4
+17/96142/campos_512_v4
+17/96143/campos_512_v4
+17/96144/campos_512_v4
+17/96153/campos_512_v4
+17/96156/campos_512_v4
+17/96164/campos_512_v4
+17/96171/campos_512_v4
+17/96183/campos_512_v4
+17/96204/campos_512_v4
+17/96206/campos_512_v4
+17/96222/campos_512_v4
+17/96227/campos_512_v4
+17/96229/campos_512_v4
+17/96247/campos_512_v4
+17/96257/campos_512_v4
+17/96265/campos_512_v4
+17/96277/campos_512_v4
+17/96278/campos_512_v4
+17/96283/campos_512_v4
+17/96300/campos_512_v4
+17/96301/campos_512_v4
+17/96303/campos_512_v4
+17/96311/campos_512_v4
+17/96314/campos_512_v4
+17/96336/campos_512_v4
+17/96343/campos_512_v4
+17/96351/campos_512_v4
+17/96356/campos_512_v4
+17/96374/campos_512_v4
+17/96377/campos_512_v4
+17/96378/campos_512_v4
+17/96390/campos_512_v4
+17/96401/campos_512_v4
+17/96410/campos_512_v4
+17/96413/campos_512_v4
+17/96420/campos_512_v4
+17/96435/campos_512_v4
+17/96439/campos_512_v4
+17/96441/campos_512_v4
+17/96447/campos_512_v4
+17/96453/campos_512_v4
+17/96454/campos_512_v4
+17/96458/campos_512_v4
+17/96464/campos_512_v4
+17/96470/campos_512_v4
+17/96500/campos_512_v4
+17/96520/campos_512_v4
+17/96524/campos_512_v4
+17/96525/campos_512_v4
+17/96526/campos_512_v4
+17/96546/campos_512_v4
+17/96548/campos_512_v4
+17/96554/campos_512_v4
+17/96558/campos_512_v4
+17/96561/campos_512_v4
+17/96574/campos_512_v4
+17/96575/campos_512_v4
+17/96579/campos_512_v4
+17/96583/campos_512_v4
+17/96592/campos_512_v4
+17/96604/campos_512_v4
+17/96620/campos_512_v4
+17/96625/campos_512_v4
+17/96635/campos_512_v4
+17/96643/campos_512_v4
+17/96651/campos_512_v4
+17/96660/campos_512_v4
+17/96671/campos_512_v4
+17/96687/campos_512_v4
+17/96689/campos_512_v4
+17/96722/campos_512_v4
+17/96731/campos_512_v4
+17/96739/campos_512_v4
+17/96742/campos_512_v4
+17/96751/campos_512_v4
+17/96757/campos_512_v4
+17/96761/campos_512_v4
+17/96764/campos_512_v4
+17/96766/campos_512_v4
+17/96777/campos_512_v4
+17/96780/campos_512_v4
+17/96781/campos_512_v4
+17/96783/campos_512_v4
+17/96793/campos_512_v4
+17/96794/campos_512_v4
+17/96802/campos_512_v4
+17/96806/campos_512_v4
+17/96812/campos_512_v4
+17/96829/campos_512_v4
+17/96830/campos_512_v4
+17/96834/campos_512_v4
+17/96836/campos_512_v4
+17/96843/campos_512_v4
+17/96847/campos_512_v4
+17/96854/campos_512_v4
+17/96858/campos_512_v4
+17/96860/campos_512_v4
+17/96880/campos_512_v4
+17/96884/campos_512_v4
+17/96885/campos_512_v4
+17/96886/campos_512_v4
+17/96894/campos_512_v4
+17/96907/campos_512_v4
+17/96928/campos_512_v4
+17/96933/campos_512_v4
+17/96961/campos_512_v4
+17/96962/campos_512_v4
+17/96989/campos_512_v4
+17/96990/campos_512_v4
+17/96991/campos_512_v4
+17/97007/campos_512_v4
+17/97009/campos_512_v4
+17/97011/campos_512_v4
+17/97014/campos_512_v4
+17/97026/campos_512_v4
+17/97033/campos_512_v4
+17/97035/campos_512_v4
+17/97041/campos_512_v4
+17/97046/campos_512_v4
+17/97051/campos_512_v4
+17/97057/campos_512_v4
+17/97062/campos_512_v4
+17/97074/campos_512_v4
+17/97078/campos_512_v4
+17/97088/campos_512_v4
+17/97099/campos_512_v4
+17/97105/campos_512_v4
+17/97110/campos_512_v4
+17/97113/campos_512_v4
+17/97126/campos_512_v4
+17/97130/campos_512_v4
+17/97147/campos_512_v4
+17/97149/campos_512_v4
+17/97153/campos_512_v4
+17/97154/campos_512_v4
+17/97172/campos_512_v4
+17/97173/campos_512_v4
+17/97178/campos_512_v4
+17/97192/campos_512_v4
+17/97215/campos_512_v4
+17/97218/campos_512_v4
+17/97233/campos_512_v4
+17/97238/campos_512_v4
+17/97239/campos_512_v4
+17/97240/campos_512_v4
+17/97245/campos_512_v4
+17/97255/campos_512_v4
+17/97279/campos_512_v4
+17/97285/campos_512_v4
+17/97296/campos_512_v4
+17/97303/campos_512_v4
+17/97324/campos_512_v4
+17/97328/campos_512_v4
+17/97339/campos_512_v4
+17/97345/campos_512_v4
+17/97353/campos_512_v4
+17/97360/campos_512_v4
+17/97361/campos_512_v4
+17/97366/campos_512_v4
+17/97378/campos_512_v4
+17/97381/campos_512_v4
+17/97384/campos_512_v4
+17/97386/campos_512_v4
+17/97387/campos_512_v4
+17/97389/campos_512_v4
+17/97403/campos_512_v4
+17/97413/campos_512_v4
+17/97424/campos_512_v4
+17/97427/campos_512_v4
+17/97435/campos_512_v4
+17/97450/campos_512_v4
+17/97451/campos_512_v4
+17/97458/campos_512_v4
+17/97464/campos_512_v4
+17/97482/campos_512_v4
+17/97497/campos_512_v4
+17/97498/campos_512_v4
+17/97501/campos_512_v4
+17/97506/campos_512_v4
+17/97507/campos_512_v4
+17/97511/campos_512_v4
+17/97512/campos_512_v4
+17/97514/campos_512_v4
+17/97518/campos_512_v4
+17/97540/campos_512_v4
+17/97541/campos_512_v4
+17/97555/campos_512_v4
+17/97563/campos_512_v4
+17/97577/campos_512_v4
+17/97578/campos_512_v4
+17/97588/campos_512_v4
+17/97592/campos_512_v4
+17/97601/campos_512_v4
+17/97617/campos_512_v4
+17/97621/campos_512_v4
+17/97625/campos_512_v4
+17/97627/campos_512_v4
+17/97628/campos_512_v4
+17/97636/campos_512_v4
+17/97640/campos_512_v4
+17/97642/campos_512_v4
+17/97649/campos_512_v4
+17/97688/campos_512_v4
+17/97703/campos_512_v4
+17/97718/campos_512_v4
+17/97754/campos_512_v4
+17/97757/campos_512_v4
+17/97758/campos_512_v4
+17/97760/campos_512_v4
+17/97771/campos_512_v4
+17/97777/campos_512_v4
+17/97788/campos_512_v4
+17/97799/campos_512_v4
+17/97806/campos_512_v4
+17/97812/campos_512_v4
+17/97813/campos_512_v4
+17/97814/campos_512_v4
+17/97815/campos_512_v4
+17/97840/campos_512_v4
+17/97872/campos_512_v4
+17/97881/campos_512_v4
+17/97883/campos_512_v4
+17/97933/campos_512_v4
+17/97940/campos_512_v4
+17/97945/campos_512_v4
+17/97948/campos_512_v4
+17/97954/campos_512_v4
+17/97958/campos_512_v4
+17/97960/campos_512_v4
+17/97963/campos_512_v4
+17/97970/campos_512_v4
+17/97971/campos_512_v4
+17/97990/campos_512_v4
+17/97992/campos_512_v4
+17/97993/campos_512_v4
+17/97998/campos_512_v4
+17/98005/campos_512_v4
+17/98012/campos_512_v4
+17/98021/campos_512_v4
+17/98029/campos_512_v4
+17/98048/campos_512_v4
+17/98050/campos_512_v4
+17/98052/campos_512_v4
+17/98071/campos_512_v4
+17/98073/campos_512_v4
+17/98090/campos_512_v4
+17/98092/campos_512_v4
+17/98096/campos_512_v4
+17/98100/campos_512_v4
+17/98106/campos_512_v4
+17/98108/campos_512_v4
+17/98111/campos_512_v4
+17/98116/campos_512_v4
+17/98123/campos_512_v4
+17/98133/campos_512_v4
+17/98137/campos_512_v4
+17/98138/campos_512_v4
+17/98139/campos_512_v4
+17/98144/campos_512_v4
+17/98145/campos_512_v4
+17/98149/campos_512_v4
+17/98151/campos_512_v4
+17/98154/campos_512_v4
+17/98163/campos_512_v4
+17/98164/campos_512_v4
+17/98166/campos_512_v4
+17/98174/campos_512_v4
+17/98178/campos_512_v4
+17/98181/campos_512_v4
+17/98196/campos_512_v4
+17/98200/campos_512_v4
+17/98212/campos_512_v4
+17/98214/campos_512_v4
+17/98223/campos_512_v4
+17/98226/campos_512_v4
+17/98239/campos_512_v4
+17/98243/campos_512_v4
+17/98249/campos_512_v4
+17/98282/campos_512_v4
+17/98302/campos_512_v4
+17/98303/campos_512_v4
+17/98306/campos_512_v4
+17/98307/campos_512_v4
+17/98312/campos_512_v4
+17/98315/campos_512_v4
+17/98329/campos_512_v4
+17/98331/campos_512_v4
+17/98334/campos_512_v4
+17/98337/campos_512_v4
+17/98343/campos_512_v4
+17/98355/campos_512_v4
+17/98362/campos_512_v4
+17/98368/campos_512_v4
+17/98380/campos_512_v4
+17/98403/campos_512_v4
+17/98408/campos_512_v4
+17/98416/campos_512_v4
+17/98423/campos_512_v4
+17/98424/campos_512_v4
+17/98430/campos_512_v4
+17/98434/campos_512_v4
+17/98460/campos_512_v4
+17/98463/campos_512_v4
+17/98466/campos_512_v4
+17/98469/campos_512_v4
+17/98486/campos_512_v4
+17/98488/campos_512_v4
+17/98490/campos_512_v4
+17/98507/campos_512_v4
+17/98509/campos_512_v4
+17/98510/campos_512_v4
+17/98514/campos_512_v4
+17/98518/campos_512_v4
+17/98529/campos_512_v4
+17/98534/campos_512_v4
+17/98560/campos_512_v4
+17/98562/campos_512_v4
+17/98569/campos_512_v4
+17/98572/campos_512_v4
+17/98576/campos_512_v4
+17/98587/campos_512_v4
+17/98588/campos_512_v4
+17/98596/campos_512_v4
+17/98603/campos_512_v4
+17/98604/campos_512_v4
+17/98605/campos_512_v4
+17/98608/campos_512_v4
+17/98623/campos_512_v4
+17/98630/campos_512_v4
+17/98645/campos_512_v4
+17/98646/campos_512_v4
+17/98652/campos_512_v4
+17/98655/campos_512_v4
+17/98659/campos_512_v4
+17/98662/campos_512_v4
+17/98666/campos_512_v4
+17/98695/campos_512_v4
+17/98696/campos_512_v4
+17/98702/campos_512_v4
+17/98703/campos_512_v4
+17/98705/campos_512_v4
+17/98714/campos_512_v4
+17/98716/campos_512_v4
+17/98723/campos_512_v4
+17/98759/campos_512_v4
+17/98772/campos_512_v4
+17/98777/campos_512_v4
+17/98789/campos_512_v4
+17/98800/campos_512_v4
+17/98804/campos_512_v4
+17/98836/campos_512_v4
+17/98837/campos_512_v4
+17/98856/campos_512_v4
+17/98859/campos_512_v4
+17/98897/campos_512_v4
+17/98900/campos_512_v4
+17/98912/campos_512_v4
+17/98913/campos_512_v4
+17/98914/campos_512_v4
+17/98916/campos_512_v4
+17/98938/campos_512_v4
+17/98949/campos_512_v4
+17/98952/campos_512_v4
+17/98953/campos_512_v4
+17/98958/campos_512_v4
+17/98963/campos_512_v4
+17/98979/campos_512_v4
+17/98991/campos_512_v4
+17/99001/campos_512_v4
+17/99002/campos_512_v4
+17/99004/campos_512_v4
+17/99008/campos_512_v4
+17/99020/campos_512_v4
+17/99021/campos_512_v4
+17/99034/campos_512_v4
+17/99056/campos_512_v4
+17/99078/campos_512_v4
+17/99083/campos_512_v4
+17/99085/campos_512_v4
+17/99089/campos_512_v4
+17/99093/campos_512_v4
+17/99094/campos_512_v4
+17/99098/campos_512_v4
+17/99106/campos_512_v4
+17/99110/campos_512_v4
+17/99136/campos_512_v4
+17/99140/campos_512_v4
+17/99150/campos_512_v4
+17/99160/campos_512_v4
+17/99167/campos_512_v4
+17/99170/campos_512_v4
+17/99172/campos_512_v4
+17/99187/campos_512_v4
+17/99188/campos_512_v4
+17/99192/campos_512_v4
+17/99193/campos_512_v4
+17/99195/campos_512_v4
+17/99198/campos_512_v4
+17/99212/campos_512_v4
+17/99226/campos_512_v4
+17/99229/campos_512_v4
+17/99233/campos_512_v4
+17/99241/campos_512_v4
+17/99252/campos_512_v4
+17/99258/campos_512_v4
+17/99270/campos_512_v4
+17/99302/campos_512_v4
+17/99305/campos_512_v4
+17/99316/campos_512_v4
+17/99318/campos_512_v4
+17/99319/campos_512_v4
+17/99326/campos_512_v4
+17/99332/campos_512_v4
+17/99345/campos_512_v4
+17/99352/campos_512_v4
+17/99355/campos_512_v4
+17/99365/campos_512_v4
+17/99369/campos_512_v4
+17/99370/campos_512_v4
+17/99372/campos_512_v4
+17/99379/campos_512_v4
+17/99386/campos_512_v4
+17/99398/campos_512_v4
+17/99400/campos_512_v4
+17/99404/campos_512_v4
+17/99405/campos_512_v4
+17/99416/campos_512_v4
+17/99423/campos_512_v4
+17/99424/campos_512_v4
+17/99425/campos_512_v4
+17/99429/campos_512_v4
+17/99434/campos_512_v4
+17/99445/campos_512_v4
+17/99447/campos_512_v4
+17/99455/campos_512_v4
+17/99463/campos_512_v4
+17/99473/campos_512_v4
+17/99479/campos_512_v4
+17/99481/campos_512_v4
+17/99486/campos_512_v4
+17/99490/campos_512_v4
+17/99506/campos_512_v4
+17/99517/campos_512_v4
+17/99520/campos_512_v4
+17/99530/campos_512_v4
+17/99531/campos_512_v4
+17/99532/campos_512_v4
+17/99539/campos_512_v4
+17/99546/campos_512_v4
+17/99556/campos_512_v4
+17/99565/campos_512_v4
+17/99574/campos_512_v4
+17/99586/campos_512_v4
+17/99595/campos_512_v4
+17/99597/campos_512_v4
+17/99602/campos_512_v4
+17/99603/campos_512_v4
+17/99605/campos_512_v4
+17/99606/campos_512_v4
+17/99611/campos_512_v4
+17/99614/campos_512_v4
+17/99626/campos_512_v4
+17/99643/campos_512_v4
+17/99648/campos_512_v4
+17/99658/campos_512_v4
+17/99663/campos_512_v4
+17/99667/campos_512_v4
+17/99668/campos_512_v4
+17/99673/campos_512_v4
+17/99674/campos_512_v4
+17/99686/campos_512_v4
+17/99702/campos_512_v4
+17/99716/campos_512_v4
+17/99724/campos_512_v4
+17/99727/campos_512_v4
+17/99730/campos_512_v4
+17/99732/campos_512_v4
+17/99733/campos_512_v4
+17/99736/campos_512_v4
+17/99747/campos_512_v4
+17/99753/campos_512_v4
+17/99763/campos_512_v4
+17/99778/campos_512_v4
+17/99779/campos_512_v4
+17/99783/campos_512_v4
+17/99791/campos_512_v4
+17/99796/campos_512_v4
+17/99797/campos_512_v4
+17/99817/campos_512_v4
+17/99832/campos_512_v4
+17/99833/campos_512_v4
+17/99840/campos_512_v4
+17/99844/campos_512_v4
+17/99859/campos_512_v4
+17/99862/campos_512_v4
+17/99870/campos_512_v4
+17/99883/campos_512_v4
+17/99885/campos_512_v4
+17/99907/campos_512_v4
+17/99921/campos_512_v4
+17/99922/campos_512_v4
+17/99924/campos_512_v4
+17/99925/campos_512_v4
+17/99939/campos_512_v4
+17/99948/campos_512_v4
+17/99959/campos_512_v4
+17/99960/campos_512_v4
+17/99973/campos_512_v4
+17/99977/campos_512_v4
+2/20004/campos_512_v4
+2/20010/campos_512_v4
+2/20012/campos_512_v4
+2/20028/campos_512_v4
+2/20029/campos_512_v4
+2/20031/campos_512_v4
+2/20044/campos_512_v4
+2/20055/campos_512_v4
+2/20065/campos_512_v4
+2/20067/campos_512_v4
+2/20081/campos_512_v4
+2/20087/campos_512_v4
+2/20098/campos_512_v4
+2/20110/campos_512_v4
+2/20115/campos_512_v4
+2/20125/campos_512_v4
+2/20128/campos_512_v4
+2/20129/campos_512_v4
+2/20131/campos_512_v4
+2/20135/campos_512_v4
+2/20137/campos_512_v4
+2/20143/campos_512_v4
+2/20145/campos_512_v4
+2/20186/campos_512_v4
+2/20187/campos_512_v4
+2/20192/campos_512_v4
+2/20239/campos_512_v4
+2/20243/campos_512_v4
+2/20270/campos_512_v4
+2/20271/campos_512_v4
+2/20273/campos_512_v4
+2/20275/campos_512_v4
+2/20281/campos_512_v4
+2/20302/campos_512_v4
+2/20314/campos_512_v4
+2/20337/campos_512_v4
+2/20343/campos_512_v4
+2/20344/campos_512_v4
+2/20361/campos_512_v4
+2/20369/campos_512_v4
+2/20374/campos_512_v4
+2/20377/campos_512_v4
+2/20409/campos_512_v4
+2/20411/campos_512_v4
+2/20424/campos_512_v4
+2/20437/campos_512_v4
+2/20448/campos_512_v4
+2/20451/campos_512_v4
+2/20454/campos_512_v4
+2/20472/campos_512_v4
+2/20473/campos_512_v4
+2/20487/campos_512_v4
+2/20495/campos_512_v4
+2/20517/campos_512_v4
+2/20519/campos_512_v4
+2/20522/campos_512_v4
+2/20541/campos_512_v4
+2/20545/campos_512_v4
+2/20548/campos_512_v4
+2/20552/campos_512_v4
+2/20560/campos_512_v4
+2/20583/campos_512_v4
+2/20588/campos_512_v4
+2/20593/campos_512_v4
+2/20598/campos_512_v4
+2/20603/campos_512_v4
+2/20606/campos_512_v4
+2/20625/campos_512_v4
+2/20647/campos_512_v4
+2/20652/campos_512_v4
+2/20656/campos_512_v4
+2/20670/campos_512_v4
+2/20685/campos_512_v4
+2/20698/campos_512_v4
+2/20709/campos_512_v4
+2/20729/campos_512_v4
+2/20734/campos_512_v4
+2/20739/campos_512_v4
+2/20743/campos_512_v4
+2/20749/campos_512_v4
+2/20754/campos_512_v4
+2/20761/campos_512_v4
+2/20768/campos_512_v4
+2/20772/campos_512_v4
+2/20775/campos_512_v4
+2/20780/campos_512_v4
+2/20783/campos_512_v4
+2/20793/campos_512_v4
+2/20795/campos_512_v4
+2/20801/campos_512_v4
+2/20804/campos_512_v4
+2/20828/campos_512_v4
+2/20831/campos_512_v4
+2/20840/campos_512_v4
+2/20841/campos_512_v4
+2/20846/campos_512_v4
+2/20851/campos_512_v4
+2/20858/campos_512_v4
+2/20862/campos_512_v4
+2/20863/campos_512_v4
+2/20890/campos_512_v4
+2/20896/campos_512_v4
+2/20902/campos_512_v4
+2/20914/campos_512_v4
+2/20915/campos_512_v4
+2/20920/campos_512_v4
+2/20921/campos_512_v4
+2/20929/campos_512_v4
+2/20944/campos_512_v4
+2/20946/campos_512_v4
+2/20947/campos_512_v4
+2/20952/campos_512_v4
+2/20954/campos_512_v4
+2/20957/campos_512_v4
+2/20959/campos_512_v4
+2/20964/campos_512_v4
+2/20972/campos_512_v4
+2/20987/campos_512_v4
+2/20995/campos_512_v4
+2/21003/campos_512_v4
+2/21004/campos_512_v4
+2/21006/campos_512_v4
+2/21021/campos_512_v4
+2/21031/campos_512_v4
+2/21064/campos_512_v4
+2/21073/campos_512_v4
+2/21074/campos_512_v4
+2/21076/campos_512_v4
+2/21077/campos_512_v4
+2/21082/campos_512_v4
+2/21090/campos_512_v4
+2/21091/campos_512_v4
+2/21110/campos_512_v4
+2/21114/campos_512_v4
+2/21116/campos_512_v4
+2/21121/campos_512_v4
+2/21133/campos_512_v4
+2/21140/campos_512_v4
+2/21150/campos_512_v4
+2/21155/campos_512_v4
+2/21156/campos_512_v4
+2/21163/campos_512_v4
+2/21171/campos_512_v4
+2/21176/campos_512_v4
+2/21180/campos_512_v4
+2/21200/campos_512_v4
+2/21211/campos_512_v4
+2/21215/campos_512_v4
+2/21218/campos_512_v4
+2/21224/campos_512_v4
+2/21226/campos_512_v4
+2/21228/campos_512_v4
+2/21233/campos_512_v4
+2/21239/campos_512_v4
+2/21248/campos_512_v4
+2/21255/campos_512_v4
+2/21276/campos_512_v4
+2/21278/campos_512_v4
+2/21281/campos_512_v4
+2/21286/campos_512_v4
+2/21293/campos_512_v4
+2/21302/campos_512_v4
+2/21320/campos_512_v4
+2/21324/campos_512_v4
+2/21326/campos_512_v4
+2/21334/campos_512_v4
+2/21342/campos_512_v4
+2/21349/campos_512_v4
+2/21358/campos_512_v4
+2/21369/campos_512_v4
+2/21374/campos_512_v4
+2/21375/campos_512_v4
+2/21382/campos_512_v4
+2/21386/campos_512_v4
+2/21399/campos_512_v4
+2/21417/campos_512_v4
+2/21427/campos_512_v4
+2/21430/campos_512_v4
+2/21433/campos_512_v4
+2/21434/campos_512_v4
+2/21447/campos_512_v4
+2/21462/campos_512_v4
+2/21475/campos_512_v4
+2/21486/campos_512_v4
+2/21494/campos_512_v4
+2/21500/campos_512_v4
+2/21508/campos_512_v4
+2/21509/campos_512_v4
+2/21513/campos_512_v4
+2/21519/campos_512_v4
+2/21522/campos_512_v4
+2/21525/campos_512_v4
+2/21528/campos_512_v4
+2/21530/campos_512_v4
+2/21541/campos_512_v4
+2/21554/campos_512_v4
+2/21556/campos_512_v4
+2/21562/campos_512_v4
+2/21576/campos_512_v4
+2/21577/campos_512_v4
+2/21598/campos_512_v4
+2/21608/campos_512_v4
+2/21611/campos_512_v4
+2/21616/campos_512_v4
+2/21621/campos_512_v4
+2/21634/campos_512_v4
+2/21635/campos_512_v4
+2/21637/campos_512_v4
+2/21670/campos_512_v4
+2/21675/campos_512_v4
+2/21681/campos_512_v4
+2/21696/campos_512_v4
+2/21712/campos_512_v4
+2/21728/campos_512_v4
+2/21729/campos_512_v4
+2/21735/campos_512_v4
+2/21738/campos_512_v4
+2/21740/campos_512_v4
+2/21746/campos_512_v4
+2/21747/campos_512_v4
+2/21757/campos_512_v4
+2/21765/campos_512_v4
+2/21771/campos_512_v4
+2/21783/campos_512_v4
+2/21789/campos_512_v4
+2/21793/campos_512_v4
+2/21799/campos_512_v4
+2/21803/campos_512_v4
+2/21810/campos_512_v4
+2/21813/campos_512_v4
+2/21821/campos_512_v4
+2/21825/campos_512_v4
+2/21839/campos_512_v4
+2/21852/campos_512_v4
+2/21859/campos_512_v4
+2/21866/campos_512_v4
+2/21870/campos_512_v4
+2/21877/campos_512_v4
+2/21880/campos_512_v4
+2/21884/campos_512_v4
+2/21891/campos_512_v4
+2/21894/campos_512_v4
+2/21900/campos_512_v4
+2/21903/campos_512_v4
+2/21904/campos_512_v4
+2/21908/campos_512_v4
+2/21909/campos_512_v4
+2/21910/campos_512_v4
+2/21911/campos_512_v4
+2/21914/campos_512_v4
+2/21919/campos_512_v4
+2/21920/campos_512_v4
+2/21924/campos_512_v4
+2/21925/campos_512_v4
+2/21931/campos_512_v4
+2/21933/campos_512_v4
+2/21944/campos_512_v4
+2/21950/campos_512_v4
+2/21953/campos_512_v4
+2/21960/campos_512_v4
+2/21963/campos_512_v4
+2/21968/campos_512_v4
+2/21969/campos_512_v4
+2/21991/campos_512_v4
+2/21998/campos_512_v4
+2/22008/campos_512_v4
+2/22031/campos_512_v4
+2/22034/campos_512_v4
+2/22035/campos_512_v4
+2/22045/campos_512_v4
+2/22048/campos_512_v4
+2/22049/campos_512_v4
+2/22051/campos_512_v4
+2/22066/campos_512_v4
+2/22082/campos_512_v4
+2/22087/campos_512_v4
+2/22092/campos_512_v4
+2/22102/campos_512_v4
+2/22105/campos_512_v4
+2/22114/campos_512_v4
+2/22120/campos_512_v4
+2/22121/campos_512_v4
+2/22124/campos_512_v4
+2/22135/campos_512_v4
+2/22140/campos_512_v4
+2/22162/campos_512_v4
+2/22170/campos_512_v4
+2/22174/campos_512_v4
+2/22192/campos_512_v4
+2/22198/campos_512_v4
+2/22200/campos_512_v4
+2/22201/campos_512_v4
+2/22208/campos_512_v4
+2/22212/campos_512_v4
+2/22217/campos_512_v4
+2/22226/campos_512_v4
+2/22231/campos_512_v4
+2/22232/campos_512_v4
+2/22241/campos_512_v4
+2/22243/campos_512_v4
+2/22247/campos_512_v4
+2/22249/campos_512_v4
+2/22257/campos_512_v4
+2/22271/campos_512_v4
+2/22272/campos_512_v4
+2/22305/campos_512_v4
+2/22319/campos_512_v4
+2/22322/campos_512_v4
+2/22336/campos_512_v4
+2/22341/campos_512_v4
+2/22353/campos_512_v4
+2/22360/campos_512_v4
+2/22363/campos_512_v4
+2/22380/campos_512_v4
+2/22393/campos_512_v4
+2/22398/campos_512_v4
+2/22411/campos_512_v4
+2/22417/campos_512_v4
+2/22426/campos_512_v4
+2/22435/campos_512_v4
+2/22437/campos_512_v4
+2/22445/campos_512_v4
+2/22459/campos_512_v4
+2/22465/campos_512_v4
+2/22469/campos_512_v4
+2/22477/campos_512_v4
+2/22489/campos_512_v4
+2/22493/campos_512_v4
+2/22495/campos_512_v4
+2/22498/campos_512_v4
+2/22499/campos_512_v4
+2/22509/campos_512_v4
+2/22528/campos_512_v4
+2/22532/campos_512_v4
+2/22534/campos_512_v4
+2/22536/campos_512_v4
+2/22538/campos_512_v4
+2/22551/campos_512_v4
+2/22554/campos_512_v4
+2/22556/campos_512_v4
+2/22571/campos_512_v4
+2/22575/campos_512_v4
+2/22581/campos_512_v4
+2/22590/campos_512_v4
+2/22603/campos_512_v4
+2/22620/campos_512_v4
+2/22636/campos_512_v4
+2/22659/campos_512_v4
+2/22661/campos_512_v4
+2/22662/campos_512_v4
+2/22664/campos_512_v4
+2/22665/campos_512_v4
+2/22677/campos_512_v4
+2/22678/campos_512_v4
+2/22681/campos_512_v4
+2/22684/campos_512_v4
+2/22690/campos_512_v4
+2/22692/campos_512_v4
+2/22695/campos_512_v4
+2/22710/campos_512_v4
+2/22714/campos_512_v4
+2/22748/campos_512_v4
+2/22759/campos_512_v4
+2/22763/campos_512_v4
+2/22770/campos_512_v4
+2/22771/campos_512_v4
+2/22773/campos_512_v4
+2/22789/campos_512_v4
+2/22790/campos_512_v4
+2/22792/campos_512_v4
+2/22800/campos_512_v4
+2/22801/campos_512_v4
+2/22802/campos_512_v4
+2/22806/campos_512_v4
+2/22809/campos_512_v4
+2/22814/campos_512_v4
+2/22815/campos_512_v4
+2/22827/campos_512_v4
+2/22838/campos_512_v4
+2/22842/campos_512_v4
+2/22850/campos_512_v4
+2/22869/campos_512_v4
+2/22874/campos_512_v4
+2/22876/campos_512_v4
+2/22885/campos_512_v4
+2/22891/campos_512_v4
+2/22907/campos_512_v4
+2/22908/campos_512_v4
+2/22923/campos_512_v4
+2/22925/campos_512_v4
+2/22928/campos_512_v4
+2/22931/campos_512_v4
+2/22960/campos_512_v4
+2/22966/campos_512_v4
+2/22969/campos_512_v4
+2/22972/campos_512_v4
+2/22980/campos_512_v4
+2/22981/campos_512_v4
+2/22982/campos_512_v4
+2/22984/campos_512_v4
+2/22990/campos_512_v4
+2/22992/campos_512_v4
+2/22993/campos_512_v4
+2/23005/campos_512_v4
+2/23013/campos_512_v4
+2/23017/campos_512_v4
+2/23027/campos_512_v4
+2/23030/campos_512_v4
+2/23037/campos_512_v4
+2/23042/campos_512_v4
+2/23049/campos_512_v4
+2/23051/campos_512_v4
+2/23059/campos_512_v4
+2/23066/campos_512_v4
+2/23068/campos_512_v4
+2/23070/campos_512_v4
+2/23093/campos_512_v4
+2/23102/campos_512_v4
+2/23106/campos_512_v4
+2/23131/campos_512_v4
+2/23142/campos_512_v4
+2/23146/campos_512_v4
+2/23147/campos_512_v4
+2/23166/campos_512_v4
+2/23168/campos_512_v4
+2/23177/campos_512_v4
+2/23178/campos_512_v4
+2/23179/campos_512_v4
+2/23181/campos_512_v4
+2/23195/campos_512_v4
+2/23208/campos_512_v4
+2/23209/campos_512_v4
+2/23211/campos_512_v4
+2/23213/campos_512_v4
+2/23217/campos_512_v4
+2/23218/campos_512_v4
+2/23223/campos_512_v4
+2/23226/campos_512_v4
+2/23235/campos_512_v4
+2/23236/campos_512_v4
+2/23241/campos_512_v4
+2/23247/campos_512_v4
+2/23259/campos_512_v4
+2/23271/campos_512_v4
+2/23273/campos_512_v4
+2/23286/campos_512_v4
+2/23291/campos_512_v4
+2/23296/campos_512_v4
+2/23313/campos_512_v4
+2/23319/campos_512_v4
+2/23328/campos_512_v4
+2/23334/campos_512_v4
+2/23348/campos_512_v4
+2/23351/campos_512_v4
+2/23364/campos_512_v4
+2/23385/campos_512_v4
+2/23390/campos_512_v4
+2/23391/campos_512_v4
+2/23394/campos_512_v4
+2/23395/campos_512_v4
+2/23399/campos_512_v4
+2/23400/campos_512_v4
+2/23405/campos_512_v4
+2/23406/campos_512_v4
+2/23410/campos_512_v4
+2/23418/campos_512_v4
+2/23430/campos_512_v4
+2/23431/campos_512_v4
+2/23434/campos_512_v4
+2/23445/campos_512_v4
+2/23461/campos_512_v4
+2/23475/campos_512_v4
+2/23478/campos_512_v4
+2/23483/campos_512_v4
+2/23484/campos_512_v4
+2/23496/campos_512_v4
+2/23505/campos_512_v4
+2/23506/campos_512_v4
+2/23510/campos_512_v4
+2/23514/campos_512_v4
+2/23524/campos_512_v4
+2/23528/campos_512_v4
+2/23534/campos_512_v4
+2/23538/campos_512_v4
+2/23542/campos_512_v4
+2/23543/campos_512_v4
+2/23545/campos_512_v4
+2/23549/campos_512_v4
+2/23553/campos_512_v4
+2/23565/campos_512_v4
+2/23571/campos_512_v4
+2/23572/campos_512_v4
+2/23578/campos_512_v4
+2/23585/campos_512_v4
+2/23591/campos_512_v4
+2/23597/campos_512_v4
+2/23599/campos_512_v4
+2/23603/campos_512_v4
+2/23608/campos_512_v4
+2/23613/campos_512_v4
+2/23618/campos_512_v4
+2/23622/campos_512_v4
+2/23623/campos_512_v4
+2/23632/campos_512_v4
+2/23633/campos_512_v4
+2/23651/campos_512_v4
+2/23655/campos_512_v4
+2/23656/campos_512_v4
+2/23663/campos_512_v4
+2/23668/campos_512_v4
+2/23695/campos_512_v4
+2/23699/campos_512_v4
+2/23700/campos_512_v4
+2/23706/campos_512_v4
+2/23707/campos_512_v4
+2/23708/campos_512_v4
+2/23711/campos_512_v4
+2/23722/campos_512_v4
+2/23725/campos_512_v4
+2/23727/campos_512_v4
+2/23735/campos_512_v4
+2/23739/campos_512_v4
+2/23744/campos_512_v4
+2/23749/campos_512_v4
+2/23758/campos_512_v4
+2/23759/campos_512_v4
+2/23779/campos_512_v4
+2/23780/campos_512_v4
+2/23786/campos_512_v4
+2/23790/campos_512_v4
+2/23804/campos_512_v4
+2/23813/campos_512_v4
+2/23821/campos_512_v4
+2/23844/campos_512_v4
+2/23848/campos_512_v4
+2/23850/campos_512_v4
+2/23851/campos_512_v4
+2/23853/campos_512_v4
+2/23855/campos_512_v4
+2/23865/campos_512_v4
+2/23869/campos_512_v4
+2/23873/campos_512_v4
+2/23876/campos_512_v4
+2/23886/campos_512_v4
+2/23890/campos_512_v4
+2/23900/campos_512_v4
+2/23904/campos_512_v4
+2/23909/campos_512_v4
+2/23910/campos_512_v4
+2/23950/campos_512_v4
+2/23963/campos_512_v4
+2/23964/campos_512_v4
+2/23976/campos_512_v4
+2/23979/campos_512_v4
+2/23980/campos_512_v4
+2/23982/campos_512_v4
+2/23985/campos_512_v4
+2/23989/campos_512_v4
+2/23994/campos_512_v4
+2/24008/campos_512_v4
+2/24009/campos_512_v4
+2/24010/campos_512_v4
+2/24012/campos_512_v4
+2/24020/campos_512_v4
+2/24028/campos_512_v4
+2/24036/campos_512_v4
+2/24052/campos_512_v4
+2/24053/campos_512_v4
+2/24058/campos_512_v4
+2/24059/campos_512_v4
+2/24073/campos_512_v4
+2/24076/campos_512_v4
+2/24077/campos_512_v4
+2/24085/campos_512_v4
+2/24087/campos_512_v4
+2/24101/campos_512_v4
+2/24104/campos_512_v4
+2/24107/campos_512_v4
+2/24110/campos_512_v4
+2/24111/campos_512_v4
+2/24113/campos_512_v4
+2/24117/campos_512_v4
+2/24127/campos_512_v4
+2/24139/campos_512_v4
+2/24147/campos_512_v4
+2/24157/campos_512_v4
+2/24166/campos_512_v4
+2/24172/campos_512_v4
+2/24173/campos_512_v4
+2/24178/campos_512_v4
+2/24179/campos_512_v4
+2/24187/campos_512_v4
+2/24191/campos_512_v4
+2/24202/campos_512_v4
+2/24214/campos_512_v4
+2/24215/campos_512_v4
+2/24216/campos_512_v4
+2/24219/campos_512_v4
+2/24220/campos_512_v4
+2/24260/campos_512_v4
+2/24262/campos_512_v4
+2/24263/campos_512_v4
+2/24268/campos_512_v4
+2/24270/campos_512_v4
+2/24278/campos_512_v4
+2/24280/campos_512_v4
+2/24282/campos_512_v4
+2/24284/campos_512_v4
+2/24288/campos_512_v4
+2/24289/campos_512_v4
+2/24291/campos_512_v4
+2/24292/campos_512_v4
+2/24295/campos_512_v4
+2/24313/campos_512_v4
+2/24331/campos_512_v4
+2/24338/campos_512_v4
+2/24343/campos_512_v4
+2/24346/campos_512_v4
+2/24357/campos_512_v4
+2/24378/campos_512_v4
+2/24381/campos_512_v4
+2/24387/campos_512_v4
+2/24390/campos_512_v4
+2/24407/campos_512_v4
+2/24414/campos_512_v4
+2/24431/campos_512_v4
+2/24433/campos_512_v4
+2/24441/campos_512_v4
+2/24442/campos_512_v4
+2/24475/campos_512_v4
+2/24487/campos_512_v4
+2/24498/campos_512_v4
+2/24516/campos_512_v4
+2/24528/campos_512_v4
+2/24532/campos_512_v4
+2/24537/campos_512_v4
+2/24538/campos_512_v4
+2/24543/campos_512_v4
+2/24549/campos_512_v4
+2/24557/campos_512_v4
+2/24574/campos_512_v4
+2/24577/campos_512_v4
+2/24581/campos_512_v4
+2/24588/campos_512_v4
+2/24599/campos_512_v4
+2/24615/campos_512_v4
+2/24650/campos_512_v4
+2/24658/campos_512_v4
+2/24682/campos_512_v4
+2/24692/campos_512_v4
+2/24713/campos_512_v4
+2/24732/campos_512_v4
+2/24734/campos_512_v4
+2/24749/campos_512_v4
+2/24754/campos_512_v4
+2/24756/campos_512_v4
+2/24762/campos_512_v4
+2/24766/campos_512_v4
+2/24772/campos_512_v4
+2/24776/campos_512_v4
+2/24778/campos_512_v4
+2/24785/campos_512_v4
+2/24792/campos_512_v4
+2/24812/campos_512_v4
+2/24814/campos_512_v4
+2/24817/campos_512_v4
+2/24825/campos_512_v4
+2/24831/campos_512_v4
+2/24832/campos_512_v4
+2/24847/campos_512_v4
+2/24856/campos_512_v4
+2/24860/campos_512_v4
+2/24862/campos_512_v4
+2/24865/campos_512_v4
+2/24892/campos_512_v4
+2/24901/campos_512_v4
+2/24922/campos_512_v4
+2/24925/campos_512_v4
+2/24927/campos_512_v4
+2/24953/campos_512_v4
+2/24963/campos_512_v4
+2/24974/campos_512_v4
+2/24980/campos_512_v4
+2/24982/campos_512_v4
+2/24987/campos_512_v4
+2/24994/campos_512_v4
+2/24997/campos_512_v4
+2/24999/campos_512_v4
+23/125003/campos_512_v4
+23/125004/campos_512_v4
+23/125005/campos_512_v4
+23/125012/campos_512_v4
+23/125017/campos_512_v4
+23/125027/campos_512_v4
+23/125035/campos_512_v4
+23/125052/campos_512_v4
+23/125053/campos_512_v4
+23/125064/campos_512_v4
+23/125072/campos_512_v4
+23/125099/campos_512_v4
+23/125106/campos_512_v4
+23/125130/campos_512_v4
+23/125137/campos_512_v4
+23/125140/campos_512_v4
+23/125163/campos_512_v4
+23/125169/campos_512_v4
+23/125187/campos_512_v4
+23/125194/campos_512_v4
+23/125206/campos_512_v4
+23/125207/campos_512_v4
+23/125217/campos_512_v4
+23/125240/campos_512_v4
+23/125242/campos_512_v4
+23/125248/campos_512_v4
+23/125249/campos_512_v4
+23/125258/campos_512_v4
+23/125262/campos_512_v4
+23/125266/campos_512_v4
+23/125273/campos_512_v4
+23/125282/campos_512_v4
+23/125286/campos_512_v4
+23/125287/campos_512_v4
+23/125298/campos_512_v4
+23/125303/campos_512_v4
+23/125314/campos_512_v4
+23/125323/campos_512_v4
+23/125326/campos_512_v4
+23/125329/campos_512_v4
+23/125335/campos_512_v4
+23/125338/campos_512_v4
+23/125346/campos_512_v4
+23/125350/campos_512_v4
+23/125358/campos_512_v4
+23/125375/campos_512_v4
+23/125399/campos_512_v4
+23/125405/campos_512_v4
+23/125411/campos_512_v4
+23/125415/campos_512_v4
+23/125420/campos_512_v4
+23/125425/campos_512_v4
+23/125427/campos_512_v4
+23/125428/campos_512_v4
+23/125429/campos_512_v4
+23/125436/campos_512_v4
+23/125437/campos_512_v4
+23/125442/campos_512_v4
+23/125445/campos_512_v4
+23/125450/campos_512_v4
+23/125458/campos_512_v4
+23/125462/campos_512_v4
+23/125467/campos_512_v4
+23/125481/campos_512_v4
+23/125488/campos_512_v4
+23/125504/campos_512_v4
+23/125529/campos_512_v4
+23/125533/campos_512_v4
+23/125536/campos_512_v4
+23/125542/campos_512_v4
+23/125543/campos_512_v4
+23/125544/campos_512_v4
+23/125550/campos_512_v4
+23/125552/campos_512_v4
+23/125555/campos_512_v4
+23/125558/campos_512_v4
+23/125560/campos_512_v4
+23/125577/campos_512_v4
+23/125585/campos_512_v4
+23/125587/campos_512_v4
+23/125599/campos_512_v4
+23/125616/campos_512_v4
+23/125617/campos_512_v4
+23/125628/campos_512_v4
+23/125631/campos_512_v4
+23/125634/campos_512_v4
+23/125638/campos_512_v4
+23/125643/campos_512_v4
+23/125651/campos_512_v4
+23/125656/campos_512_v4
+23/125657/campos_512_v4
+23/125659/campos_512_v4
+23/125668/campos_512_v4
+23/125681/campos_512_v4
+23/125685/campos_512_v4
+23/125689/campos_512_v4
+23/125697/campos_512_v4
+23/125706/campos_512_v4
+23/125707/campos_512_v4
+23/125717/campos_512_v4
+23/125723/campos_512_v4
+23/125729/campos_512_v4
+23/125732/campos_512_v4
+23/125755/campos_512_v4
+23/125762/campos_512_v4
+23/125765/campos_512_v4
+23/125774/campos_512_v4
+23/125777/campos_512_v4
+23/125779/campos_512_v4
+23/125782/campos_512_v4
+23/125786/campos_512_v4
+23/125790/campos_512_v4
+23/125794/campos_512_v4
+23/125809/campos_512_v4
+23/125818/campos_512_v4
+23/125824/campos_512_v4
+23/125826/campos_512_v4
+23/125830/campos_512_v4
+23/125836/campos_512_v4
+23/125837/campos_512_v4
+23/125841/campos_512_v4
+23/125847/campos_512_v4
+23/125855/campos_512_v4
+23/125871/campos_512_v4
+23/125890/campos_512_v4
+23/125891/campos_512_v4
+23/125895/campos_512_v4
+23/125902/campos_512_v4
+23/125928/campos_512_v4
+23/125933/campos_512_v4
+23/125939/campos_512_v4
+23/125941/campos_512_v4
+23/125946/campos_512_v4
+23/125954/campos_512_v4
+23/125963/campos_512_v4
+23/125966/campos_512_v4
+23/125970/campos_512_v4
+23/125972/campos_512_v4
+23/125979/campos_512_v4
+23/125989/campos_512_v4
+23/125993/campos_512_v4
+23/126004/campos_512_v4
+23/126009/campos_512_v4
+23/126018/campos_512_v4
+23/126024/campos_512_v4
+23/126025/campos_512_v4
+23/126027/campos_512_v4
+23/126031/campos_512_v4
+23/126032/campos_512_v4
+23/126033/campos_512_v4
+23/126049/campos_512_v4
+23/126052/campos_512_v4
+23/126053/campos_512_v4
+23/126056/campos_512_v4
+23/126062/campos_512_v4
+23/126063/campos_512_v4
+23/126064/campos_512_v4
+23/126073/campos_512_v4
+23/126077/campos_512_v4
+23/126082/campos_512_v4
+23/126091/campos_512_v4
+23/126103/campos_512_v4
+23/126126/campos_512_v4
+23/126128/campos_512_v4
+23/126140/campos_512_v4
+23/126142/campos_512_v4
+23/126145/campos_512_v4
+23/126146/campos_512_v4
+23/126169/campos_512_v4
+23/126170/campos_512_v4
+23/126171/campos_512_v4
+23/126175/campos_512_v4
+23/126188/campos_512_v4
+23/126194/campos_512_v4
+23/126200/campos_512_v4
+23/126202/campos_512_v4
+23/126207/campos_512_v4
+23/126209/campos_512_v4
+23/126214/campos_512_v4
+23/126225/campos_512_v4
+23/126237/campos_512_v4
+23/126245/campos_512_v4
+23/126275/campos_512_v4
+23/126277/campos_512_v4
+23/126281/campos_512_v4
+23/126301/campos_512_v4
+23/126305/campos_512_v4
+23/126306/campos_512_v4
+23/126333/campos_512_v4
+23/126334/campos_512_v4
+23/126340/campos_512_v4
+23/126350/campos_512_v4
+23/126351/campos_512_v4
+23/126353/campos_512_v4
+23/126358/campos_512_v4
+23/126359/campos_512_v4
+23/126365/campos_512_v4
+23/126373/campos_512_v4
+23/126374/campos_512_v4
+23/126375/campos_512_v4
+23/126380/campos_512_v4
+23/126383/campos_512_v4
+23/126388/campos_512_v4
+23/126389/campos_512_v4
+23/126390/campos_512_v4
+23/126392/campos_512_v4
+23/126427/campos_512_v4
+23/126429/campos_512_v4
+23/126433/campos_512_v4
+23/126445/campos_512_v4
+23/126463/campos_512_v4
+23/126476/campos_512_v4
+23/126483/campos_512_v4
+23/126486/campos_512_v4
+23/126492/campos_512_v4
+23/126499/campos_512_v4
+23/126501/campos_512_v4
+23/126510/campos_512_v4
+23/126515/campos_512_v4
+23/126520/campos_512_v4
+23/126528/campos_512_v4
+23/126529/campos_512_v4
+23/126531/campos_512_v4
+23/126536/campos_512_v4
+23/126556/campos_512_v4
+23/126558/campos_512_v4
+23/126567/campos_512_v4
+23/126570/campos_512_v4
+23/126581/campos_512_v4
+23/126582/campos_512_v4
+23/126586/campos_512_v4
+23/126596/campos_512_v4
+23/126609/campos_512_v4
+23/126613/campos_512_v4
+23/126616/campos_512_v4
+23/126620/campos_512_v4
+23/126626/campos_512_v4
+23/126628/campos_512_v4
+23/126649/campos_512_v4
+23/126652/campos_512_v4
+23/126662/campos_512_v4
+23/126676/campos_512_v4
+23/126683/campos_512_v4
+23/126684/campos_512_v4
+23/126688/campos_512_v4
+23/126690/campos_512_v4
+23/126694/campos_512_v4
+23/126705/campos_512_v4
+23/126707/campos_512_v4
+23/126710/campos_512_v4
+23/126711/campos_512_v4
+23/126712/campos_512_v4
+23/126716/campos_512_v4
+23/126717/campos_512_v4
+23/126721/campos_512_v4
+23/126722/campos_512_v4
+23/126726/campos_512_v4
+23/126732/campos_512_v4
+23/126746/campos_512_v4
+23/126747/campos_512_v4
+23/126771/campos_512_v4
+23/126772/campos_512_v4
+23/126778/campos_512_v4
+23/126779/campos_512_v4
+23/126780/campos_512_v4
+23/126782/campos_512_v4
+23/126786/campos_512_v4
+23/126801/campos_512_v4
+23/126804/campos_512_v4
+23/126810/campos_512_v4
+23/126811/campos_512_v4
+23/126814/campos_512_v4
+23/126828/campos_512_v4
+23/126829/campos_512_v4
+23/126843/campos_512_v4
+23/126849/campos_512_v4
+23/126857/campos_512_v4
+23/126859/campos_512_v4
+23/126871/campos_512_v4
+23/126876/campos_512_v4
+23/126906/campos_512_v4
+23/126923/campos_512_v4
+23/126932/campos_512_v4
+23/126951/campos_512_v4
+23/126952/campos_512_v4
+23/126954/campos_512_v4
+23/126957/campos_512_v4
+23/126960/campos_512_v4
+23/126962/campos_512_v4
+23/126964/campos_512_v4
+23/126976/campos_512_v4
+23/126980/campos_512_v4
+23/126983/campos_512_v4
+23/126987/campos_512_v4
+23/126993/campos_512_v4
+23/126994/campos_512_v4
+23/127012/campos_512_v4
+23/127016/campos_512_v4
+23/127036/campos_512_v4
+23/127052/campos_512_v4
+23/127055/campos_512_v4
+23/127056/campos_512_v4
+23/127069/campos_512_v4
+23/127087/campos_512_v4
+23/127092/campos_512_v4
+23/127099/campos_512_v4
+23/127116/campos_512_v4
+23/127119/campos_512_v4
+23/127122/campos_512_v4
+23/127129/campos_512_v4
+23/127144/campos_512_v4
+23/127161/campos_512_v4
+23/127167/campos_512_v4
+23/127173/campos_512_v4
+23/127174/campos_512_v4
+23/127178/campos_512_v4
+23/127181/campos_512_v4
+23/127184/campos_512_v4
+23/127185/campos_512_v4
+23/127200/campos_512_v4
+23/127202/campos_512_v4
+23/127229/campos_512_v4
+23/127233/campos_512_v4
+23/127235/campos_512_v4
+23/127247/campos_512_v4
+23/127258/campos_512_v4
+23/127264/campos_512_v4
+23/127269/campos_512_v4
+23/127281/campos_512_v4
+23/127287/campos_512_v4
+23/127290/campos_512_v4
+23/127292/campos_512_v4
+23/127295/campos_512_v4
+23/127297/campos_512_v4
+23/127302/campos_512_v4
+23/127304/campos_512_v4
+23/127319/campos_512_v4
+23/127344/campos_512_v4
+23/127345/campos_512_v4
+23/127346/campos_512_v4
+23/127350/campos_512_v4
+23/127362/campos_512_v4
+23/127367/campos_512_v4
+23/127384/campos_512_v4
+23/127386/campos_512_v4
+23/127391/campos_512_v4
+23/127406/campos_512_v4
+23/127407/campos_512_v4
+23/127410/campos_512_v4
+23/127411/campos_512_v4
+23/127422/campos_512_v4
+23/127423/campos_512_v4
+23/127424/campos_512_v4
+23/127425/campos_512_v4
+23/127431/campos_512_v4
+23/127449/campos_512_v4
+23/127456/campos_512_v4
+23/127457/campos_512_v4
+23/127468/campos_512_v4
+23/127469/campos_512_v4
+23/127471/campos_512_v4
+23/127472/campos_512_v4
+23/127494/campos_512_v4
+23/127518/campos_512_v4
+23/127522/campos_512_v4
+23/127523/campos_512_v4
+23/127530/campos_512_v4
+23/127537/campos_512_v4
+23/127557/campos_512_v4
+23/127569/campos_512_v4
+23/127576/campos_512_v4
+23/127583/campos_512_v4
+23/127589/campos_512_v4
+23/127606/campos_512_v4
+23/127628/campos_512_v4
+23/127630/campos_512_v4
+23/127631/campos_512_v4
+23/127643/campos_512_v4
+23/127645/campos_512_v4
+23/127650/campos_512_v4
+23/127653/campos_512_v4
+23/127656/campos_512_v4
+23/127660/campos_512_v4
+23/127661/campos_512_v4
+23/127679/campos_512_v4
+23/127709/campos_512_v4
+23/127717/campos_512_v4
+23/127718/campos_512_v4
+23/127736/campos_512_v4
+23/127738/campos_512_v4
+23/127746/campos_512_v4
+23/127749/campos_512_v4
+23/127765/campos_512_v4
+23/127766/campos_512_v4
+23/127768/campos_512_v4
+23/127769/campos_512_v4
+23/127770/campos_512_v4
+23/127771/campos_512_v4
+23/127784/campos_512_v4
+23/127791/campos_512_v4
+23/127792/campos_512_v4
+23/127797/campos_512_v4
+23/127801/campos_512_v4
+23/127820/campos_512_v4
+23/127832/campos_512_v4
+23/127836/campos_512_v4
+23/127839/campos_512_v4
+23/127840/campos_512_v4
+23/127845/campos_512_v4
+23/127856/campos_512_v4
+23/127862/campos_512_v4
+23/127869/campos_512_v4
+23/127875/campos_512_v4
+23/127886/campos_512_v4
+23/127887/campos_512_v4
+23/127891/campos_512_v4
+23/127894/campos_512_v4
+23/127896/campos_512_v4
+23/127902/campos_512_v4
+23/127915/campos_512_v4
+23/127921/campos_512_v4
+23/127927/campos_512_v4
+23/127931/campos_512_v4
+23/127936/campos_512_v4
+23/127944/campos_512_v4
+23/127953/campos_512_v4
+23/127961/campos_512_v4
+23/127969/campos_512_v4
+23/127975/campos_512_v4
+23/127986/campos_512_v4
+23/127987/campos_512_v4
+23/127993/campos_512_v4
+23/128003/campos_512_v4
+23/128009/campos_512_v4
+23/128011/campos_512_v4
+23/128024/campos_512_v4
+23/128025/campos_512_v4
+23/128026/campos_512_v4
+23/128027/campos_512_v4
+23/128031/campos_512_v4
+23/128044/campos_512_v4
+23/128061/campos_512_v4
+23/128065/campos_512_v4
+23/128073/campos_512_v4
+23/128092/campos_512_v4
+23/128093/campos_512_v4
+23/128104/campos_512_v4
+23/128110/campos_512_v4
+23/128115/campos_512_v4
+23/128129/campos_512_v4
+23/128156/campos_512_v4
+23/128159/campos_512_v4
+23/128173/campos_512_v4
+23/128174/campos_512_v4
+23/128176/campos_512_v4
+23/128180/campos_512_v4
+23/128181/campos_512_v4
+23/128191/campos_512_v4
+23/128198/campos_512_v4
+23/128202/campos_512_v4
+23/128207/campos_512_v4
+23/128208/campos_512_v4
+23/128209/campos_512_v4
+23/128216/campos_512_v4
+23/128217/campos_512_v4
+23/128219/campos_512_v4
+23/128221/campos_512_v4
+23/128226/campos_512_v4
+23/128250/campos_512_v4
+23/128252/campos_512_v4
+23/128260/campos_512_v4
+23/128263/campos_512_v4
+23/128265/campos_512_v4
+23/128273/campos_512_v4
+23/128282/campos_512_v4
+23/128291/campos_512_v4
+23/128292/campos_512_v4
+23/128296/campos_512_v4
+23/128300/campos_512_v4
+23/128309/campos_512_v4
+23/128319/campos_512_v4
+23/128320/campos_512_v4
+23/128332/campos_512_v4
+23/128342/campos_512_v4
+23/128344/campos_512_v4
+23/128353/campos_512_v4
+23/128356/campos_512_v4
+23/128360/campos_512_v4
+23/128380/campos_512_v4
+23/128384/campos_512_v4
+23/128386/campos_512_v4
+23/128395/campos_512_v4
+23/128397/campos_512_v4
+23/128404/campos_512_v4
+23/128408/campos_512_v4
+23/128419/campos_512_v4
+23/128425/campos_512_v4
+23/128427/campos_512_v4
+23/128433/campos_512_v4
+23/128434/campos_512_v4
+23/128439/campos_512_v4
+23/128444/campos_512_v4
+23/128448/campos_512_v4
+23/128449/campos_512_v4
+23/128472/campos_512_v4
+23/128497/campos_512_v4
+23/128501/campos_512_v4
+23/128504/campos_512_v4
+23/128507/campos_512_v4
+23/128528/campos_512_v4
+23/128532/campos_512_v4
+23/128548/campos_512_v4
+23/128569/campos_512_v4
+23/128571/campos_512_v4
+23/128572/campos_512_v4
+23/128575/campos_512_v4
+23/128583/campos_512_v4
+23/128586/campos_512_v4
+23/128587/campos_512_v4
+23/128603/campos_512_v4
+23/128606/campos_512_v4
+23/128607/campos_512_v4
+23/128616/campos_512_v4
+23/128627/campos_512_v4
+23/128631/campos_512_v4
+23/128640/campos_512_v4
+23/128653/campos_512_v4
+23/128654/campos_512_v4
+23/128658/campos_512_v4
+23/128662/campos_512_v4
+23/128667/campos_512_v4
+23/128673/campos_512_v4
+23/128675/campos_512_v4
+23/128676/campos_512_v4
+23/128690/campos_512_v4
+23/128696/campos_512_v4
+23/128699/campos_512_v4
+23/128710/campos_512_v4
+23/128713/campos_512_v4
+23/128714/campos_512_v4
+23/128721/campos_512_v4
+23/128732/campos_512_v4
+23/128748/campos_512_v4
+23/128764/campos_512_v4
+23/128779/campos_512_v4
+23/128785/campos_512_v4
+23/128788/campos_512_v4
+23/128803/campos_512_v4
+23/128827/campos_512_v4
+23/128833/campos_512_v4
+23/128834/campos_512_v4
+23/128889/campos_512_v4
+23/128891/campos_512_v4
+23/128897/campos_512_v4
+23/128903/campos_512_v4
+23/128904/campos_512_v4
+23/128925/campos_512_v4
+23/128926/campos_512_v4
+23/128927/campos_512_v4
+23/128930/campos_512_v4
+23/128931/campos_512_v4
+23/128943/campos_512_v4
+23/128944/campos_512_v4
+23/128948/campos_512_v4
+23/128949/campos_512_v4
+23/128950/campos_512_v4
+23/128979/campos_512_v4
+23/129008/campos_512_v4
+23/129011/campos_512_v4
+23/129018/campos_512_v4
+23/129029/campos_512_v4
+23/129031/campos_512_v4
+23/129042/campos_512_v4
+23/129043/campos_512_v4
+23/129046/campos_512_v4
+23/129051/campos_512_v4
+23/129058/campos_512_v4
+23/129059/campos_512_v4
+23/129061/campos_512_v4
+23/129067/campos_512_v4
+23/129068/campos_512_v4
+23/129072/campos_512_v4
+23/129073/campos_512_v4
+23/129075/campos_512_v4
+23/129079/campos_512_v4
+23/129080/campos_512_v4
+23/129081/campos_512_v4
+23/129095/campos_512_v4
+23/129099/campos_512_v4
+23/129103/campos_512_v4
+23/129127/campos_512_v4
+23/129128/campos_512_v4
+23/129129/campos_512_v4
+23/129142/campos_512_v4
+23/129143/campos_512_v4
+23/129148/campos_512_v4
+23/129156/campos_512_v4
+23/129157/campos_512_v4
+23/129166/campos_512_v4
+23/129170/campos_512_v4
+23/129180/campos_512_v4
+23/129183/campos_512_v4
+23/129191/campos_512_v4
+23/129197/campos_512_v4
+23/129215/campos_512_v4
+23/129221/campos_512_v4
+23/129224/campos_512_v4
+23/129226/campos_512_v4
+23/129234/campos_512_v4
+23/129235/campos_512_v4
+23/129241/campos_512_v4
+23/129242/campos_512_v4
+23/129245/campos_512_v4
+23/129248/campos_512_v4
+23/129249/campos_512_v4
+23/129252/campos_512_v4
+23/129253/campos_512_v4
+23/129256/campos_512_v4
+23/129267/campos_512_v4
+23/129268/campos_512_v4
+23/129271/campos_512_v4
+23/129276/campos_512_v4
+23/129283/campos_512_v4
+23/129284/campos_512_v4
+23/129286/campos_512_v4
+23/129331/campos_512_v4
+23/129346/campos_512_v4
+23/129353/campos_512_v4
+23/129382/campos_512_v4
+23/129387/campos_512_v4
+23/129394/campos_512_v4
+23/129400/campos_512_v4
+23/129412/campos_512_v4
+23/129425/campos_512_v4
+23/129427/campos_512_v4
+23/129430/campos_512_v4
+23/129435/campos_512_v4
+23/129436/campos_512_v4
+23/129443/campos_512_v4
+23/129452/campos_512_v4
+23/129457/campos_512_v4
+23/129462/campos_512_v4
+23/129471/campos_512_v4
+23/129475/campos_512_v4
+23/129490/campos_512_v4
+23/129493/campos_512_v4
+23/129515/campos_512_v4
+23/129517/campos_512_v4
+23/129538/campos_512_v4
+23/129539/campos_512_v4
+23/129541/campos_512_v4
+23/129576/campos_512_v4
+23/129580/campos_512_v4
+23/129583/campos_512_v4
+23/129589/campos_512_v4
+23/129592/campos_512_v4
+23/129634/campos_512_v4
+23/129646/campos_512_v4
+23/129648/campos_512_v4
+23/129651/campos_512_v4
+23/129653/campos_512_v4
+23/129655/campos_512_v4
+23/129657/campos_512_v4
+23/129660/campos_512_v4
+23/129705/campos_512_v4
+23/129706/campos_512_v4
+23/129715/campos_512_v4
+23/129727/campos_512_v4
+23/129739/campos_512_v4
+23/129752/campos_512_v4
+23/129753/campos_512_v4
+23/129764/campos_512_v4
+23/129786/campos_512_v4
+23/129787/campos_512_v4
+23/129795/campos_512_v4
+23/129798/campos_512_v4
+23/129803/campos_512_v4
+23/129823/campos_512_v4
+23/129831/campos_512_v4
+23/129844/campos_512_v4
+23/129852/campos_512_v4
+23/129857/campos_512_v4
+23/129858/campos_512_v4
+23/129862/campos_512_v4
+23/129865/campos_512_v4
+23/129868/campos_512_v4
+23/129870/campos_512_v4
+23/129876/campos_512_v4
+23/129877/campos_512_v4
+23/129881/campos_512_v4
+23/129888/campos_512_v4
+23/129890/campos_512_v4
+23/129911/campos_512_v4
+23/129918/campos_512_v4
+23/129922/campos_512_v4
+23/129938/campos_512_v4
+23/129973/campos_512_v4
+23/129983/campos_512_v4
+23/129996/campos_512_v4
+24/130008/campos_512_v4
+24/130009/campos_512_v4
+24/130017/campos_512_v4
+24/130020/campos_512_v4
+24/130033/campos_512_v4
+24/130045/campos_512_v4
+24/130046/campos_512_v4
+24/130054/campos_512_v4
+24/130059/campos_512_v4
+24/130078/campos_512_v4
+24/130107/campos_512_v4
+24/130108/campos_512_v4
+24/130115/campos_512_v4
+24/130120/campos_512_v4
+24/130121/campos_512_v4
+24/130130/campos_512_v4
+24/130133/campos_512_v4
+24/130137/campos_512_v4
+24/130143/campos_512_v4
+24/130145/campos_512_v4
+24/130149/campos_512_v4
+24/130150/campos_512_v4
+24/130152/campos_512_v4
+24/130158/campos_512_v4
+24/130175/campos_512_v4
+24/130179/campos_512_v4
+24/130192/campos_512_v4
+24/130198/campos_512_v4
+24/130205/campos_512_v4
+24/130209/campos_512_v4
+24/130210/campos_512_v4
+24/130214/campos_512_v4
+24/130215/campos_512_v4
+24/130218/campos_512_v4
+24/130230/campos_512_v4
+24/130232/campos_512_v4
+24/130234/campos_512_v4
+24/130240/campos_512_v4
+24/130241/campos_512_v4
+24/130244/campos_512_v4
+24/130260/campos_512_v4
+24/130263/campos_512_v4
+24/130266/campos_512_v4
+24/130272/campos_512_v4
+24/130295/campos_512_v4
+24/130308/campos_512_v4
+24/130325/campos_512_v4
+24/130336/campos_512_v4
+24/130340/campos_512_v4
+24/130345/campos_512_v4
+24/130364/campos_512_v4
+24/130373/campos_512_v4
+24/130380/campos_512_v4
+24/130408/campos_512_v4
+24/130410/campos_512_v4
+24/130427/campos_512_v4
+24/130445/campos_512_v4
+24/130458/campos_512_v4
+24/130461/campos_512_v4
+24/130469/campos_512_v4
+24/130473/campos_512_v4
+24/130483/campos_512_v4
+24/130498/campos_512_v4
+24/130499/campos_512_v4
+24/130500/campos_512_v4
+24/130502/campos_512_v4
+24/130512/campos_512_v4
+24/130515/campos_512_v4
+24/130516/campos_512_v4
+24/130522/campos_512_v4
+24/130540/campos_512_v4
+24/130544/campos_512_v4
+24/130553/campos_512_v4
+24/130563/campos_512_v4
+24/130570/campos_512_v4
+24/130575/campos_512_v4
+24/130586/campos_512_v4
+24/130593/campos_512_v4
+24/130597/campos_512_v4
+24/130603/campos_512_v4
+24/130604/campos_512_v4
+24/130610/campos_512_v4
+24/130616/campos_512_v4
+24/130618/campos_512_v4
+24/130624/campos_512_v4
+24/130634/campos_512_v4
+24/130637/campos_512_v4
+24/130650/campos_512_v4
+24/130659/campos_512_v4
+24/130668/campos_512_v4
+24/130670/campos_512_v4
+24/130674/campos_512_v4
+24/130676/campos_512_v4
+24/130681/campos_512_v4
+24/130682/campos_512_v4
+24/130694/campos_512_v4
+24/130711/campos_512_v4
+24/130718/campos_512_v4
+24/130722/campos_512_v4
+24/130732/campos_512_v4
+24/130745/campos_512_v4
+24/130746/campos_512_v4
+24/130748/campos_512_v4
+24/130755/campos_512_v4
+24/130757/campos_512_v4
+24/130758/campos_512_v4
+24/130760/campos_512_v4
+24/130764/campos_512_v4
+24/130777/campos_512_v4
+24/130780/campos_512_v4
+24/130803/campos_512_v4
+24/130834/campos_512_v4
+24/130871/campos_512_v4
+24/130878/campos_512_v4
+24/130880/campos_512_v4
+24/130882/campos_512_v4
+24/130884/campos_512_v4
+24/130885/campos_512_v4
+24/130892/campos_512_v4
+24/130896/campos_512_v4
+24/130897/campos_512_v4
+24/130908/campos_512_v4
+24/130909/campos_512_v4
+24/130910/campos_512_v4
+24/130917/campos_512_v4
+24/130922/campos_512_v4
+24/130923/campos_512_v4
+24/130924/campos_512_v4
+24/130944/campos_512_v4
+24/130946/campos_512_v4
+24/130950/campos_512_v4
+24/130965/campos_512_v4
+24/130973/campos_512_v4
+24/130974/campos_512_v4
+24/130976/campos_512_v4
+24/130978/campos_512_v4
+24/130991/campos_512_v4
+24/130996/campos_512_v4
+24/131004/campos_512_v4
+24/131015/campos_512_v4
+24/131019/campos_512_v4
+24/131021/campos_512_v4
+24/131043/campos_512_v4
+24/131044/campos_512_v4
+24/131055/campos_512_v4
+24/131065/campos_512_v4
+24/131067/campos_512_v4
+24/131082/campos_512_v4
+24/131102/campos_512_v4
+24/131105/campos_512_v4
+24/131113/campos_512_v4
+24/131114/campos_512_v4
+24/131130/campos_512_v4
+24/131133/campos_512_v4
+24/131136/campos_512_v4
+24/131137/campos_512_v4
+24/131138/campos_512_v4
+24/131139/campos_512_v4
+24/131164/campos_512_v4
+24/131173/campos_512_v4
+24/131177/campos_512_v4
+24/131195/campos_512_v4
+24/131204/campos_512_v4
+24/131205/campos_512_v4
+24/131216/campos_512_v4
+24/131240/campos_512_v4
+24/131243/campos_512_v4
+24/131248/campos_512_v4
+24/131256/campos_512_v4
+24/131258/campos_512_v4
+24/131274/campos_512_v4
+24/131281/campos_512_v4
+24/131289/campos_512_v4
+24/131307/campos_512_v4
+24/131308/campos_512_v4
+24/131309/campos_512_v4
+24/131319/campos_512_v4
+24/131320/campos_512_v4
+24/131328/campos_512_v4
+24/131366/campos_512_v4
+24/131389/campos_512_v4
+24/131393/campos_512_v4
+24/131402/campos_512_v4
+24/131409/campos_512_v4
+24/131414/campos_512_v4
+24/131418/campos_512_v4
+24/131420/campos_512_v4
+24/131429/campos_512_v4
+24/131438/campos_512_v4
+24/131441/campos_512_v4
+24/131446/campos_512_v4
+24/131449/campos_512_v4
+24/131456/campos_512_v4
+24/131458/campos_512_v4
+24/131462/campos_512_v4
+24/131464/campos_512_v4
+24/131468/campos_512_v4
+24/131470/campos_512_v4
+24/131474/campos_512_v4
+24/131476/campos_512_v4
+24/131481/campos_512_v4
+24/131483/campos_512_v4
+24/131487/campos_512_v4
+24/131511/campos_512_v4
+24/131520/campos_512_v4
+24/131550/campos_512_v4
+24/131551/campos_512_v4
+24/131554/campos_512_v4
+24/131556/campos_512_v4
+24/131561/campos_512_v4
+24/131576/campos_512_v4
+24/131583/campos_512_v4
+24/131587/campos_512_v4
+24/131588/campos_512_v4
+24/131596/campos_512_v4
+24/131598/campos_512_v4
+24/131601/campos_512_v4
+24/131602/campos_512_v4
+24/131619/campos_512_v4
+24/131628/campos_512_v4
+24/131654/campos_512_v4
+24/131678/campos_512_v4
+24/131684/campos_512_v4
+24/131693/campos_512_v4
+24/131694/campos_512_v4
+24/131715/campos_512_v4
+24/131720/campos_512_v4
+24/131722/campos_512_v4
+24/131726/campos_512_v4
+24/131762/campos_512_v4
+24/131794/campos_512_v4
+24/131804/campos_512_v4
+24/131805/campos_512_v4
+24/131813/campos_512_v4
+24/131820/campos_512_v4
+24/131831/campos_512_v4
+24/131842/campos_512_v4
+24/131843/campos_512_v4
+24/131845/campos_512_v4
+24/131868/campos_512_v4
+24/131869/campos_512_v4
+24/131881/campos_512_v4
+24/131900/campos_512_v4
+24/131906/campos_512_v4
+24/131909/campos_512_v4
+24/131918/campos_512_v4
+24/131924/campos_512_v4
+24/131925/campos_512_v4
+24/131941/campos_512_v4
+24/131945/campos_512_v4
+24/131953/campos_512_v4
+24/131956/campos_512_v4
+24/131961/campos_512_v4
+24/131962/campos_512_v4
+24/131965/campos_512_v4
+24/131992/campos_512_v4
+24/131995/campos_512_v4
+24/131997/campos_512_v4
+24/132008/campos_512_v4
+24/132035/campos_512_v4
+24/132052/campos_512_v4
+24/132057/campos_512_v4
+24/132059/campos_512_v4
+24/132062/campos_512_v4
+24/132066/campos_512_v4
+24/132073/campos_512_v4
+24/132081/campos_512_v4
+24/132088/campos_512_v4
+24/132090/campos_512_v4
+24/132092/campos_512_v4
+24/132093/campos_512_v4
+24/132097/campos_512_v4
+24/132100/campos_512_v4
+24/132107/campos_512_v4
+24/132114/campos_512_v4
+24/132129/campos_512_v4
+24/132145/campos_512_v4
+24/132147/campos_512_v4
+24/132152/campos_512_v4
+24/132163/campos_512_v4
+24/132167/campos_512_v4
+24/132179/campos_512_v4
+24/132183/campos_512_v4
+24/132205/campos_512_v4
+24/132209/campos_512_v4
+24/132233/campos_512_v4
+24/132255/campos_512_v4
+24/132257/campos_512_v4
+24/132261/campos_512_v4
+24/132268/campos_512_v4
+24/132269/campos_512_v4
+24/132276/campos_512_v4
+24/132282/campos_512_v4
+24/132300/campos_512_v4
+24/132326/campos_512_v4
+24/132327/campos_512_v4
+24/132329/campos_512_v4
+24/132332/campos_512_v4
+24/132338/campos_512_v4
+24/132346/campos_512_v4
+24/132350/campos_512_v4
+24/132352/campos_512_v4
+24/132355/campos_512_v4
+24/132365/campos_512_v4
+24/132367/campos_512_v4
+24/132368/campos_512_v4
+24/132373/campos_512_v4
+24/132374/campos_512_v4
+24/132375/campos_512_v4
+24/132377/campos_512_v4
+24/132384/campos_512_v4
+24/132416/campos_512_v4
+24/132441/campos_512_v4
+24/132454/campos_512_v4
+24/132455/campos_512_v4
+24/132467/campos_512_v4
+24/132476/campos_512_v4
+24/132479/campos_512_v4
+24/132486/campos_512_v4
+24/132490/campos_512_v4
+24/132494/campos_512_v4
+24/132500/campos_512_v4
+24/132501/campos_512_v4
+24/132526/campos_512_v4
+24/132527/campos_512_v4
+24/132554/campos_512_v4
+24/132556/campos_512_v4
+24/132561/campos_512_v4
+24/132580/campos_512_v4
+24/132581/campos_512_v4
+24/132585/campos_512_v4
+24/132586/campos_512_v4
+24/132588/campos_512_v4
+24/132595/campos_512_v4
+24/132596/campos_512_v4
+24/132610/campos_512_v4
+24/132611/campos_512_v4
+24/132620/campos_512_v4
+24/132624/campos_512_v4
+24/132635/campos_512_v4
+24/132642/campos_512_v4
+24/132652/campos_512_v4
+24/132658/campos_512_v4
+24/132660/campos_512_v4
+24/132681/campos_512_v4
+24/132685/campos_512_v4
+24/132687/campos_512_v4
+24/132705/campos_512_v4
+24/132706/campos_512_v4
+24/132721/campos_512_v4
+24/132734/campos_512_v4
+24/132745/campos_512_v4
+24/132753/campos_512_v4
+24/132755/campos_512_v4
+24/132760/campos_512_v4
+24/132793/campos_512_v4
+24/132803/campos_512_v4
+24/132821/campos_512_v4
+24/132831/campos_512_v4
+24/132846/campos_512_v4
+24/132864/campos_512_v4
+24/132874/campos_512_v4
+24/132891/campos_512_v4
+24/132895/campos_512_v4
+24/132897/campos_512_v4
+24/132900/campos_512_v4
+24/132928/campos_512_v4
+24/132930/campos_512_v4
+24/132939/campos_512_v4
+24/132947/campos_512_v4
+24/132954/campos_512_v4
+24/132960/campos_512_v4
+24/132970/campos_512_v4
+24/132978/campos_512_v4
+24/132981/campos_512_v4
+24/133020/campos_512_v4
+24/133031/campos_512_v4
+24/133034/campos_512_v4
+24/133039/campos_512_v4
+24/133040/campos_512_v4
+24/133046/campos_512_v4
+24/133054/campos_512_v4
+24/133076/campos_512_v4
+24/133078/campos_512_v4
+24/133088/campos_512_v4
+24/133105/campos_512_v4
+24/133110/campos_512_v4
+24/133119/campos_512_v4
+24/133126/campos_512_v4
+24/133137/campos_512_v4
+24/133143/campos_512_v4
+24/133148/campos_512_v4
+24/133185/campos_512_v4
+24/133186/campos_512_v4
+24/133194/campos_512_v4
+24/133200/campos_512_v4
+24/133209/campos_512_v4
+24/133224/campos_512_v4
+24/133232/campos_512_v4
+24/133234/campos_512_v4
+24/133249/campos_512_v4
+24/133250/campos_512_v4
+24/133255/campos_512_v4
+24/133264/campos_512_v4
+24/133265/campos_512_v4
+24/133267/campos_512_v4
+24/133278/campos_512_v4
+24/133288/campos_512_v4
+24/133292/campos_512_v4
+24/133293/campos_512_v4
+24/133305/campos_512_v4
+24/133322/campos_512_v4
+24/133337/campos_512_v4
+24/133341/campos_512_v4
+24/133344/campos_512_v4
+24/133352/campos_512_v4
+24/133353/campos_512_v4
+24/133359/campos_512_v4
+24/133368/campos_512_v4
+24/133369/campos_512_v4
+24/133371/campos_512_v4
+24/133383/campos_512_v4
+24/133384/campos_512_v4
+24/133387/campos_512_v4
+24/133404/campos_512_v4
+24/133407/campos_512_v4
+24/133412/campos_512_v4
+24/133415/campos_512_v4
+24/133421/campos_512_v4
+24/133422/campos_512_v4
+24/133423/campos_512_v4
+24/133431/campos_512_v4
+24/133436/campos_512_v4
+24/133460/campos_512_v4
+24/133469/campos_512_v4
+24/133495/campos_512_v4
+24/133505/campos_512_v4
+24/133510/campos_512_v4
+24/133511/campos_512_v4
+24/133521/campos_512_v4
+24/133525/campos_512_v4
+24/133528/campos_512_v4
+24/133532/campos_512_v4
+24/133560/campos_512_v4
+24/133572/campos_512_v4
+24/133573/campos_512_v4
+24/133574/campos_512_v4
+24/133579/campos_512_v4
+24/133588/campos_512_v4
+24/133593/campos_512_v4
+24/133598/campos_512_v4
+24/133599/campos_512_v4
+24/133600/campos_512_v4
+24/133617/campos_512_v4
+24/133620/campos_512_v4
+24/133621/campos_512_v4
+24/133628/campos_512_v4
+24/133634/campos_512_v4
+24/133636/campos_512_v4
+24/133641/campos_512_v4
+24/133644/campos_512_v4
+24/133645/campos_512_v4
+24/133646/campos_512_v4
+24/133647/campos_512_v4
+24/133650/campos_512_v4
+24/133657/campos_512_v4
+24/133668/campos_512_v4
+24/133680/campos_512_v4
+24/133681/campos_512_v4
+24/133682/campos_512_v4
+24/133697/campos_512_v4
+24/133711/campos_512_v4
+24/133712/campos_512_v4
+24/133714/campos_512_v4
+24/133717/campos_512_v4
+24/133718/campos_512_v4
+24/133726/campos_512_v4
+24/133730/campos_512_v4
+24/133733/campos_512_v4
+24/133739/campos_512_v4
+24/133748/campos_512_v4
+24/133764/campos_512_v4
+24/133776/campos_512_v4
+24/133780/campos_512_v4
+24/133785/campos_512_v4
+24/133795/campos_512_v4
+24/133805/campos_512_v4
+24/133817/campos_512_v4
+24/133820/campos_512_v4
+24/133823/campos_512_v4
+24/133847/campos_512_v4
+24/133853/campos_512_v4
+24/133860/campos_512_v4
+24/133879/campos_512_v4
+24/133881/campos_512_v4
+24/133901/campos_512_v4
+24/133917/campos_512_v4
+24/133919/campos_512_v4
+24/133925/campos_512_v4
+24/133929/campos_512_v4
+24/133931/campos_512_v4
+24/133932/campos_512_v4
+24/133939/campos_512_v4
+24/133940/campos_512_v4
+24/133951/campos_512_v4
+24/133954/campos_512_v4
+24/133958/campos_512_v4
+24/133975/campos_512_v4
+24/133978/campos_512_v4
+24/133979/campos_512_v4
+24/134006/campos_512_v4
+24/134007/campos_512_v4
+24/134012/campos_512_v4
+24/134018/campos_512_v4
+24/134019/campos_512_v4
+24/134038/campos_512_v4
+24/134042/campos_512_v4
+24/134049/campos_512_v4
+24/134077/campos_512_v4
+24/134086/campos_512_v4
+24/134095/campos_512_v4
+24/134100/campos_512_v4
+24/134105/campos_512_v4
+24/134108/campos_512_v4
+24/134111/campos_512_v4
+24/134135/campos_512_v4
+24/134136/campos_512_v4
+24/134137/campos_512_v4
+24/134142/campos_512_v4
+24/134153/campos_512_v4
+24/134162/campos_512_v4
+24/134169/campos_512_v4
+24/134175/campos_512_v4
+24/134180/campos_512_v4
+24/134182/campos_512_v4
+24/134189/campos_512_v4
+24/134190/campos_512_v4
+24/134205/campos_512_v4
+24/134218/campos_512_v4
+24/134221/campos_512_v4
+24/134233/campos_512_v4
+24/134238/campos_512_v4
+24/134244/campos_512_v4
+24/134253/campos_512_v4
+24/134256/campos_512_v4
+24/134262/campos_512_v4
+24/134278/campos_512_v4
+24/134282/campos_512_v4
+24/134284/campos_512_v4
+24/134288/campos_512_v4
+24/134298/campos_512_v4
+24/134306/campos_512_v4
+24/134327/campos_512_v4
+24/134332/campos_512_v4
+24/134336/campos_512_v4
+24/134344/campos_512_v4
+24/134350/campos_512_v4
+24/134351/campos_512_v4
+24/134360/campos_512_v4
+24/134386/campos_512_v4
+24/134401/campos_512_v4
+24/134410/campos_512_v4
+24/134411/campos_512_v4
+24/134418/campos_512_v4
+24/134422/campos_512_v4
+24/134424/campos_512_v4
+24/134434/campos_512_v4
+24/134444/campos_512_v4
+24/134456/campos_512_v4
+24/134460/campos_512_v4
+24/134466/campos_512_v4
+24/134469/campos_512_v4
+24/134471/campos_512_v4
+24/134480/campos_512_v4
+24/134481/campos_512_v4
+24/134498/campos_512_v4
+24/134499/campos_512_v4
+24/134508/campos_512_v4
+24/134509/campos_512_v4
+24/134531/campos_512_v4
+24/134545/campos_512_v4
+24/134554/campos_512_v4
+24/134561/campos_512_v4
+24/134564/campos_512_v4
+24/134592/campos_512_v4
+24/134600/campos_512_v4
+24/134602/campos_512_v4
+24/134604/campos_512_v4
+24/134612/campos_512_v4
+24/134617/campos_512_v4
+24/134619/campos_512_v4
+24/134625/campos_512_v4
+24/134637/campos_512_v4
+24/134646/campos_512_v4
+24/134659/campos_512_v4
+24/134664/campos_512_v4
+24/134665/campos_512_v4
+24/134669/campos_512_v4
+24/134701/campos_512_v4
+24/134710/campos_512_v4
+24/134712/campos_512_v4
+24/134735/campos_512_v4
+24/134743/campos_512_v4
+24/134763/campos_512_v4
+24/134766/campos_512_v4
+24/134771/campos_512_v4
+24/134784/campos_512_v4
+24/134789/campos_512_v4
+24/134796/campos_512_v4
+24/134815/campos_512_v4
+24/134818/campos_512_v4
+24/134824/campos_512_v4
+24/134825/campos_512_v4
+24/134827/campos_512_v4
+24/134828/campos_512_v4
+24/134833/campos_512_v4
+24/134840/campos_512_v4
+24/134842/campos_512_v4
+24/134856/campos_512_v4
+24/134860/campos_512_v4
+24/134861/campos_512_v4
+24/134863/campos_512_v4
+24/134874/campos_512_v4
+24/134891/campos_512_v4
+24/134909/campos_512_v4
+24/134912/campos_512_v4
+24/134916/campos_512_v4
+24/134917/campos_512_v4
+24/134923/campos_512_v4
+24/134933/campos_512_v4
+24/134936/campos_512_v4
+24/134961/campos_512_v4
+24/134970/campos_512_v4
+24/134992/campos_512_v4
+24/134995/campos_512_v4
+24/134996/campos_512_v4
+25/135004/campos_512_v4
+25/135009/campos_512_v4
+25/135011/campos_512_v4
+25/135029/campos_512_v4
+25/135065/campos_512_v4
+25/135067/campos_512_v4
+25/135075/campos_512_v4
+25/135082/campos_512_v4
+25/135087/campos_512_v4
+25/135103/campos_512_v4
+25/135109/campos_512_v4
+25/135111/campos_512_v4
+25/135119/campos_512_v4
+25/135128/campos_512_v4
+25/135130/campos_512_v4
+25/135131/campos_512_v4
+25/135143/campos_512_v4
+25/135149/campos_512_v4
+25/135156/campos_512_v4
+25/135157/campos_512_v4
+25/135162/campos_512_v4
+25/135163/campos_512_v4
+25/135167/campos_512_v4
+25/135179/campos_512_v4
+25/135182/campos_512_v4
+25/135189/campos_512_v4
+25/135196/campos_512_v4
+25/135197/campos_512_v4
+25/135199/campos_512_v4
+25/135212/campos_512_v4
+25/135223/campos_512_v4
+25/135227/campos_512_v4
+25/135234/campos_512_v4
+25/135259/campos_512_v4
+25/135270/campos_512_v4
+25/135275/campos_512_v4
+25/135280/campos_512_v4
+25/135283/campos_512_v4
+25/135297/campos_512_v4
+25/135299/campos_512_v4
+25/135301/campos_512_v4
+25/135308/campos_512_v4
+25/135317/campos_512_v4
+25/135319/campos_512_v4
+25/135325/campos_512_v4
+25/135330/campos_512_v4
+25/135334/campos_512_v4
+25/135349/campos_512_v4
+25/135366/campos_512_v4
+25/135367/campos_512_v4
+25/135381/campos_512_v4
+25/135388/campos_512_v4
+25/135392/campos_512_v4
+25/135415/campos_512_v4
+25/135420/campos_512_v4
+25/135426/campos_512_v4
+25/135458/campos_512_v4
+25/135467/campos_512_v4
+25/135470/campos_512_v4
+25/135478/campos_512_v4
+25/135481/campos_512_v4
+25/135484/campos_512_v4
+25/135490/campos_512_v4
+25/135492/campos_512_v4
+25/135493/campos_512_v4
+25/135505/campos_512_v4
+25/135512/campos_512_v4
+25/135516/campos_512_v4
+25/135534/campos_512_v4
+25/135538/campos_512_v4
+25/135547/campos_512_v4
+25/135552/campos_512_v4
+25/135556/campos_512_v4
+25/135557/campos_512_v4
+25/135561/campos_512_v4
+25/135565/campos_512_v4
+25/135569/campos_512_v4
+25/135578/campos_512_v4
+25/135581/campos_512_v4
+25/135595/campos_512_v4
+25/135596/campos_512_v4
+25/135609/campos_512_v4
+25/135615/campos_512_v4
+25/135617/campos_512_v4
+25/135621/campos_512_v4
+25/135628/campos_512_v4
+25/135631/campos_512_v4
+25/135632/campos_512_v4
+25/135633/campos_512_v4
+25/135639/campos_512_v4
+25/135645/campos_512_v4
+25/135670/campos_512_v4
+25/135671/campos_512_v4
+25/135672/campos_512_v4
+25/135684/campos_512_v4
+25/135689/campos_512_v4
+25/135702/campos_512_v4
+25/135709/campos_512_v4
+25/135723/campos_512_v4
+25/135760/campos_512_v4
+25/135773/campos_512_v4
+25/135777/campos_512_v4
+25/135785/campos_512_v4
+25/135789/campos_512_v4
+25/135794/campos_512_v4
+25/135795/campos_512_v4
+25/135816/campos_512_v4
+25/135818/campos_512_v4
+25/135823/campos_512_v4
+25/135835/campos_512_v4
+25/135839/campos_512_v4
+25/135854/campos_512_v4
+25/135860/campos_512_v4
+25/135861/campos_512_v4
+25/135875/campos_512_v4
+25/135885/campos_512_v4
+25/135891/campos_512_v4
+25/135913/campos_512_v4
+25/135920/campos_512_v4
+25/135921/campos_512_v4
+25/135923/campos_512_v4
+25/135926/campos_512_v4
+25/135928/campos_512_v4
+25/135979/campos_512_v4
+25/135988/campos_512_v4
+25/135990/campos_512_v4
+25/136000/campos_512_v4
+25/136001/campos_512_v4
+25/136013/campos_512_v4
+25/136022/campos_512_v4
+25/136024/campos_512_v4
+25/136034/campos_512_v4
+25/136037/campos_512_v4
+25/136048/campos_512_v4
+25/136052/campos_512_v4
+25/136067/campos_512_v4
+25/136070/campos_512_v4
+25/136071/campos_512_v4
+25/136073/campos_512_v4
+25/136075/campos_512_v4
+25/136084/campos_512_v4
+25/136087/campos_512_v4
+25/136100/campos_512_v4
+25/136102/campos_512_v4
+25/136135/campos_512_v4
+25/136139/campos_512_v4
+25/136144/campos_512_v4
+25/136160/campos_512_v4
+25/136161/campos_512_v4
+25/136162/campos_512_v4
+25/136163/campos_512_v4
+25/136167/campos_512_v4
+25/136178/campos_512_v4
+25/136187/campos_512_v4
+25/136192/campos_512_v4
+25/136199/campos_512_v4
+25/136200/campos_512_v4
+25/136213/campos_512_v4
+25/136216/campos_512_v4
+25/136230/campos_512_v4
+25/136236/campos_512_v4
+25/136243/campos_512_v4
+25/136252/campos_512_v4
+25/136256/campos_512_v4
+25/136257/campos_512_v4
+25/136259/campos_512_v4
+25/136272/campos_512_v4
+25/136283/campos_512_v4
+25/136289/campos_512_v4
+25/136295/campos_512_v4
+25/136323/campos_512_v4
+25/136334/campos_512_v4
+25/136337/campos_512_v4
+25/136368/campos_512_v4
+25/136373/campos_512_v4
+25/136398/campos_512_v4
+25/136405/campos_512_v4
+25/136415/campos_512_v4
+25/136416/campos_512_v4
+25/136428/campos_512_v4
+25/136430/campos_512_v4
+25/136438/campos_512_v4
+25/136441/campos_512_v4
+25/136446/campos_512_v4
+25/136447/campos_512_v4
+25/136452/campos_512_v4
+25/136456/campos_512_v4
+25/136457/campos_512_v4
+25/136462/campos_512_v4
+25/136465/campos_512_v4
+25/136467/campos_512_v4
+25/136477/campos_512_v4
+25/136486/campos_512_v4
+25/136493/campos_512_v4
+25/136499/campos_512_v4
+25/136508/campos_512_v4
+25/136540/campos_512_v4
+25/136557/campos_512_v4
+25/136563/campos_512_v4
+25/136574/campos_512_v4
+25/136578/campos_512_v4
+25/136580/campos_512_v4
+25/136585/campos_512_v4
+25/136598/campos_512_v4
+25/136599/campos_512_v4
+25/136611/campos_512_v4
+25/136630/campos_512_v4
+25/136641/campos_512_v4
+25/136652/campos_512_v4
+25/136653/campos_512_v4
+25/136654/campos_512_v4
+25/136659/campos_512_v4
+25/136669/campos_512_v4
+25/136675/campos_512_v4
+25/136691/campos_512_v4
+25/136692/campos_512_v4
+25/136695/campos_512_v4
+25/136707/campos_512_v4
+25/136709/campos_512_v4
+25/136710/campos_512_v4
+25/136719/campos_512_v4
+25/136726/campos_512_v4
+25/136731/campos_512_v4
+25/136733/campos_512_v4
+25/136738/campos_512_v4
+25/136744/campos_512_v4
+25/136746/campos_512_v4
+25/136748/campos_512_v4
+25/136752/campos_512_v4
+25/136762/campos_512_v4
+25/136770/campos_512_v4
+25/136772/campos_512_v4
+25/136776/campos_512_v4
+25/136779/campos_512_v4
+25/136784/campos_512_v4
+25/136790/campos_512_v4
+25/136796/campos_512_v4
+25/136799/campos_512_v4
+25/136802/campos_512_v4
+25/136804/campos_512_v4
+25/136811/campos_512_v4
+25/136814/campos_512_v4
+25/136817/campos_512_v4
+25/136824/campos_512_v4
+25/136825/campos_512_v4
+25/136835/campos_512_v4
+25/136838/campos_512_v4
+25/136848/campos_512_v4
+25/136849/campos_512_v4
+25/136851/campos_512_v4
+25/136856/campos_512_v4
+25/136858/campos_512_v4
+25/136860/campos_512_v4
+25/136861/campos_512_v4
+25/136876/campos_512_v4
+25/136891/campos_512_v4
+25/136901/campos_512_v4
+25/136903/campos_512_v4
+25/136907/campos_512_v4
+25/136912/campos_512_v4
+25/136914/campos_512_v4
+25/136916/campos_512_v4
+25/136918/campos_512_v4
+25/136926/campos_512_v4
+25/136936/campos_512_v4
+25/136938/campos_512_v4
+25/136945/campos_512_v4
+25/136946/campos_512_v4
+25/136951/campos_512_v4
+25/136956/campos_512_v4
+25/136959/campos_512_v4
+25/136963/campos_512_v4
+25/136964/campos_512_v4
+25/136972/campos_512_v4
+25/136981/campos_512_v4
+25/136982/campos_512_v4
+25/136988/campos_512_v4
+25/136998/campos_512_v4
+25/136999/campos_512_v4
+25/137002/campos_512_v4
+25/137006/campos_512_v4
+25/137015/campos_512_v4
+25/137016/campos_512_v4
+25/137019/campos_512_v4
+25/137030/campos_512_v4
+25/137031/campos_512_v4
+25/137055/campos_512_v4
+25/137056/campos_512_v4
+25/137059/campos_512_v4
+25/137077/campos_512_v4
+25/137084/campos_512_v4
+25/137088/campos_512_v4
+25/137098/campos_512_v4
+25/137104/campos_512_v4
+25/137108/campos_512_v4
+25/137116/campos_512_v4
+25/137120/campos_512_v4
+25/137122/campos_512_v4
+25/137144/campos_512_v4
+25/137169/campos_512_v4
+25/137177/campos_512_v4
+25/137198/campos_512_v4
+25/137201/campos_512_v4
+25/137214/campos_512_v4
+25/137216/campos_512_v4
+25/137219/campos_512_v4
+25/137224/campos_512_v4
+25/137233/campos_512_v4
+25/137257/campos_512_v4
+25/137269/campos_512_v4
+25/137276/campos_512_v4
+25/137278/campos_512_v4
+25/137284/campos_512_v4
+25/137291/campos_512_v4
+25/137293/campos_512_v4
+25/137300/campos_512_v4
+25/137316/campos_512_v4
+25/137323/campos_512_v4
+25/137337/campos_512_v4
+25/137339/campos_512_v4
+25/137347/campos_512_v4
+25/137350/campos_512_v4
+25/137358/campos_512_v4
+25/137363/campos_512_v4
+25/137364/campos_512_v4
+25/137368/campos_512_v4
+25/137371/campos_512_v4
+25/137377/campos_512_v4
+25/137386/campos_512_v4
+25/137421/campos_512_v4
+25/137422/campos_512_v4
+25/137435/campos_512_v4
+25/137441/campos_512_v4
+25/137443/campos_512_v4
+25/137448/campos_512_v4
+25/137455/campos_512_v4
+25/137458/campos_512_v4
+25/137461/campos_512_v4
+25/137467/campos_512_v4
+25/137469/campos_512_v4
+25/137472/campos_512_v4
+25/137482/campos_512_v4
+25/137492/campos_512_v4
+25/137522/campos_512_v4
+25/137533/campos_512_v4
+25/137538/campos_512_v4
+25/137539/campos_512_v4
+25/137542/campos_512_v4
+25/137545/campos_512_v4
+25/137560/campos_512_v4
+25/137561/campos_512_v4
+25/137563/campos_512_v4
+25/137567/campos_512_v4
+25/137578/campos_512_v4
+25/137587/campos_512_v4
+25/137605/campos_512_v4
+25/137616/campos_512_v4
+25/137625/campos_512_v4
+25/137633/campos_512_v4
+25/137641/campos_512_v4
+25/137646/campos_512_v4
+25/137655/campos_512_v4
+25/137656/campos_512_v4
+25/137658/campos_512_v4
+25/137662/campos_512_v4
+25/137663/campos_512_v4
+25/137678/campos_512_v4
+25/137684/campos_512_v4
+25/137689/campos_512_v4
+25/137694/campos_512_v4
+25/137706/campos_512_v4
+25/137709/campos_512_v4
+25/137710/campos_512_v4
+25/137721/campos_512_v4
+25/137729/campos_512_v4
+25/137734/campos_512_v4
+25/137751/campos_512_v4
+25/137761/campos_512_v4
+25/137765/campos_512_v4
+25/137766/campos_512_v4
+25/137779/campos_512_v4
+25/137783/campos_512_v4
+25/137796/campos_512_v4
+25/137798/campos_512_v4
+25/137801/campos_512_v4
+25/137806/campos_512_v4
+25/137813/campos_512_v4
+25/137829/campos_512_v4
+25/137832/campos_512_v4
+25/137841/campos_512_v4
+25/137851/campos_512_v4
+25/137873/campos_512_v4
+25/137891/campos_512_v4
+25/137903/campos_512_v4
+25/137908/campos_512_v4
+25/137913/campos_512_v4
+25/137917/campos_512_v4
+25/137918/campos_512_v4
+25/137922/campos_512_v4
+25/137930/campos_512_v4
+25/137940/campos_512_v4
+25/137947/campos_512_v4
+25/137950/campos_512_v4
+25/137954/campos_512_v4
+25/137956/campos_512_v4
+25/137962/campos_512_v4
+25/137963/campos_512_v4
+25/137971/campos_512_v4
+25/137973/campos_512_v4
+25/137978/campos_512_v4
+25/137987/campos_512_v4
+25/137991/campos_512_v4
+25/137997/campos_512_v4
+25/138003/campos_512_v4
+25/138004/campos_512_v4
+25/138011/campos_512_v4
+25/138022/campos_512_v4
+25/138028/campos_512_v4
+25/138029/campos_512_v4
+25/138048/campos_512_v4
+25/138055/campos_512_v4
+25/138059/campos_512_v4
+25/138064/campos_512_v4
+25/138065/campos_512_v4
+25/138066/campos_512_v4
+25/138068/campos_512_v4
+25/138074/campos_512_v4
+25/138098/campos_512_v4
+25/138106/campos_512_v4
+25/138118/campos_512_v4
+25/138169/campos_512_v4
+25/138184/campos_512_v4
+25/138185/campos_512_v4
+25/138189/campos_512_v4
+25/138197/campos_512_v4
+25/138199/campos_512_v4
+25/138207/campos_512_v4
+25/138210/campos_512_v4
+25/138212/campos_512_v4
+25/138215/campos_512_v4
+25/138216/campos_512_v4
+25/138221/campos_512_v4
+25/138227/campos_512_v4
+25/138244/campos_512_v4
+25/138257/campos_512_v4
+25/138265/campos_512_v4
+25/138294/campos_512_v4
+25/138306/campos_512_v4
+25/138309/campos_512_v4
+25/138310/campos_512_v4
+25/138312/campos_512_v4
+25/138316/campos_512_v4
+25/138323/campos_512_v4
+25/138331/campos_512_v4
+25/138362/campos_512_v4
+25/138378/campos_512_v4
+25/138392/campos_512_v4
+25/138394/campos_512_v4
+25/138399/campos_512_v4
+25/138400/campos_512_v4
+25/138414/campos_512_v4
+25/138416/campos_512_v4
+25/138425/campos_512_v4
+25/138428/campos_512_v4
+25/138430/campos_512_v4
+25/138443/campos_512_v4
+25/138454/campos_512_v4
+25/138456/campos_512_v4
+25/138459/campos_512_v4
+25/138463/campos_512_v4
+25/138464/campos_512_v4
+25/138469/campos_512_v4
+25/138478/campos_512_v4
+25/138493/campos_512_v4
+25/138497/campos_512_v4
+25/138502/campos_512_v4
+25/138503/campos_512_v4
+25/138514/campos_512_v4
+25/138537/campos_512_v4
+25/138547/campos_512_v4
+25/138549/campos_512_v4
+25/138555/campos_512_v4
+25/138556/campos_512_v4
+25/138560/campos_512_v4
+25/138562/campos_512_v4
+25/138565/campos_512_v4
+25/138567/campos_512_v4
+25/138570/campos_512_v4
+25/138583/campos_512_v4
+25/138585/campos_512_v4
+25/138590/campos_512_v4
+25/138591/campos_512_v4
+25/138592/campos_512_v4
+25/138610/campos_512_v4
+25/138628/campos_512_v4
+25/138637/campos_512_v4
+25/138653/campos_512_v4
+25/138672/campos_512_v4
+25/138674/campos_512_v4
+25/138678/campos_512_v4
+25/138683/campos_512_v4
+25/138697/campos_512_v4
+25/138702/campos_512_v4
+25/138708/campos_512_v4
+25/138711/campos_512_v4
+25/138722/campos_512_v4
+25/138723/campos_512_v4
+25/138729/campos_512_v4
+25/138730/campos_512_v4
+25/138744/campos_512_v4
+25/138747/campos_512_v4
+25/138748/campos_512_v4
+25/138749/campos_512_v4
+25/138755/campos_512_v4
+25/138756/campos_512_v4
+25/138761/campos_512_v4
+25/138769/campos_512_v4
+25/138773/campos_512_v4
+25/138779/campos_512_v4
+25/138784/campos_512_v4
+25/138789/campos_512_v4
+25/138807/campos_512_v4
+25/138813/campos_512_v4
+25/138820/campos_512_v4
+25/138827/campos_512_v4
+25/138839/campos_512_v4
+25/138840/campos_512_v4
+25/138853/campos_512_v4
+25/138855/campos_512_v4
+25/138856/campos_512_v4
+25/138858/campos_512_v4
+25/138860/campos_512_v4
+25/138868/campos_512_v4
+25/138869/campos_512_v4
+25/138872/campos_512_v4
+25/138873/campos_512_v4
+25/138874/campos_512_v4
+25/138886/campos_512_v4
+25/138890/campos_512_v4
+25/138893/campos_512_v4
+25/138894/campos_512_v4
+25/138903/campos_512_v4
+25/138927/campos_512_v4
+25/138940/campos_512_v4
+25/138951/campos_512_v4
+25/138954/campos_512_v4
+25/138961/campos_512_v4
+25/138969/campos_512_v4
+25/138973/campos_512_v4
+25/138983/campos_512_v4
+25/138985/campos_512_v4
+25/139004/campos_512_v4
+25/139014/campos_512_v4
+25/139018/campos_512_v4
+25/139027/campos_512_v4
+25/139028/campos_512_v4
+25/139031/campos_512_v4
+25/139036/campos_512_v4
+25/139037/campos_512_v4
+25/139046/campos_512_v4
+25/139048/campos_512_v4
+25/139051/campos_512_v4
+25/139055/campos_512_v4
+25/139060/campos_512_v4
+25/139061/campos_512_v4
+25/139063/campos_512_v4
+25/139070/campos_512_v4
+25/139082/campos_512_v4
+25/139114/campos_512_v4
+25/139129/campos_512_v4
+25/139131/campos_512_v4
+25/139138/campos_512_v4
+25/139145/campos_512_v4
+25/139159/campos_512_v4
+25/139161/campos_512_v4
+25/139162/campos_512_v4
+25/139164/campos_512_v4
+25/139166/campos_512_v4
+25/139168/campos_512_v4
+25/139169/campos_512_v4
+25/139170/campos_512_v4
+25/139173/campos_512_v4
+25/139198/campos_512_v4
+25/139220/campos_512_v4
+25/139221/campos_512_v4
+25/139223/campos_512_v4
+25/139225/campos_512_v4
+25/139235/campos_512_v4
+25/139246/campos_512_v4
+25/139258/campos_512_v4
+25/139264/campos_512_v4
+25/139266/campos_512_v4
+25/139271/campos_512_v4
+25/139273/campos_512_v4
+25/139278/campos_512_v4
+25/139283/campos_512_v4
+25/139303/campos_512_v4
+25/139307/campos_512_v4
+25/139313/campos_512_v4
+25/139318/campos_512_v4
+25/139319/campos_512_v4
+25/139322/campos_512_v4
+25/139336/campos_512_v4
+25/139342/campos_512_v4
+25/139344/campos_512_v4
+25/139351/campos_512_v4
+25/139362/campos_512_v4
+25/139372/campos_512_v4
+25/139375/campos_512_v4
+25/139377/campos_512_v4
+25/139384/campos_512_v4
+25/139387/campos_512_v4
+25/139407/campos_512_v4
+25/139408/campos_512_v4
+25/139410/campos_512_v4
+25/139416/campos_512_v4
+25/139426/campos_512_v4
+25/139429/campos_512_v4
+25/139446/campos_512_v4
+25/139450/campos_512_v4
+25/139464/campos_512_v4
+25/139473/campos_512_v4
+25/139476/campos_512_v4
+25/139484/campos_512_v4
+25/139486/campos_512_v4
+25/139489/campos_512_v4
+25/139493/campos_512_v4
+25/139499/campos_512_v4
+25/139503/campos_512_v4
+25/139505/campos_512_v4
+25/139506/campos_512_v4
+25/139519/campos_512_v4
+25/139521/campos_512_v4
+25/139529/campos_512_v4
+25/139540/campos_512_v4
+25/139544/campos_512_v4
+25/139546/campos_512_v4
+25/139548/campos_512_v4
+25/139550/campos_512_v4
+25/139562/campos_512_v4
+25/139572/campos_512_v4
+25/139574/campos_512_v4
+25/139577/campos_512_v4
+25/139588/campos_512_v4
+25/139591/campos_512_v4
+25/139606/campos_512_v4
+25/139608/campos_512_v4
+25/139635/campos_512_v4
+25/139643/campos_512_v4
+25/139659/campos_512_v4
+25/139660/campos_512_v4
+25/139661/campos_512_v4
+25/139688/campos_512_v4
+25/139691/campos_512_v4
+25/139714/campos_512_v4
+25/139723/campos_512_v4
+25/139728/campos_512_v4
+25/139734/campos_512_v4
+25/139751/campos_512_v4
+25/139772/campos_512_v4
+25/139775/campos_512_v4
+25/139783/campos_512_v4
+25/139792/campos_512_v4
+25/139798/campos_512_v4
+25/139827/campos_512_v4
+25/139845/campos_512_v4
+25/139849/campos_512_v4
+25/139852/campos_512_v4
+25/139855/campos_512_v4
+25/139862/campos_512_v4
+25/139870/campos_512_v4
+25/139873/campos_512_v4
+25/139874/campos_512_v4
+25/139880/campos_512_v4
+25/139893/campos_512_v4
+25/139912/campos_512_v4
+25/139915/campos_512_v4
+25/139918/campos_512_v4
+25/139951/campos_512_v4
+25/139953/campos_512_v4
+25/139961/campos_512_v4
+25/139965/campos_512_v4
+25/139971/campos_512_v4
+25/139997/campos_512_v4
+26/140013/campos_512_v4
+26/140023/campos_512_v4
+26/140028/campos_512_v4
+26/140035/campos_512_v4
+26/140045/campos_512_v4
+26/140057/campos_512_v4
+26/140072/campos_512_v4
+26/140078/campos_512_v4
+26/140084/campos_512_v4
+26/140094/campos_512_v4
+26/140102/campos_512_v4
+26/140104/campos_512_v4
+26/140111/campos_512_v4
+26/140114/campos_512_v4
+26/140119/campos_512_v4
+26/140132/campos_512_v4
+26/140163/campos_512_v4
+26/140169/campos_512_v4
+26/140174/campos_512_v4
+26/140186/campos_512_v4
+26/140194/campos_512_v4
+26/140200/campos_512_v4
+26/140240/campos_512_v4
+26/140241/campos_512_v4
+26/140273/campos_512_v4
+26/140277/campos_512_v4
+26/140284/campos_512_v4
+26/140303/campos_512_v4
+26/140310/campos_512_v4
+26/140315/campos_512_v4
+26/140319/campos_512_v4
+26/140322/campos_512_v4
+26/140332/campos_512_v4
+26/140333/campos_512_v4
+26/140334/campos_512_v4
+26/140339/campos_512_v4
+26/140362/campos_512_v4
+26/140371/campos_512_v4
+26/140373/campos_512_v4
+26/140380/campos_512_v4
+26/140392/campos_512_v4
+26/140394/campos_512_v4
+26/140399/campos_512_v4
+26/140400/campos_512_v4
+26/140407/campos_512_v4
+26/140408/campos_512_v4
+26/140415/campos_512_v4
+26/140418/campos_512_v4
+26/140419/campos_512_v4
+26/140431/campos_512_v4
+26/140438/campos_512_v4
+26/140439/campos_512_v4
+26/140442/campos_512_v4
+26/140446/campos_512_v4
+26/140452/campos_512_v4
+26/140454/campos_512_v4
+26/140455/campos_512_v4
+26/140462/campos_512_v4
+26/140515/campos_512_v4
+26/140525/campos_512_v4
+26/140527/campos_512_v4
+26/140546/campos_512_v4
+26/140552/campos_512_v4
+26/140558/campos_512_v4
+26/140559/campos_512_v4
+26/140572/campos_512_v4
+26/140574/campos_512_v4
+26/140592/campos_512_v4
+26/140594/campos_512_v4
+26/140603/campos_512_v4
+26/140610/campos_512_v4
+26/140614/campos_512_v4
+26/140649/campos_512_v4
+26/140658/campos_512_v4
+26/140659/campos_512_v4
+26/140670/campos_512_v4
+26/140671/campos_512_v4
+26/140684/campos_512_v4
+26/140686/campos_512_v4
+26/140690/campos_512_v4
+26/140698/campos_512_v4
+26/140700/campos_512_v4
+26/140706/campos_512_v4
+26/140716/campos_512_v4
+26/140723/campos_512_v4
+26/140736/campos_512_v4
+26/140748/campos_512_v4
+26/140756/campos_512_v4
+26/140759/campos_512_v4
+26/140801/campos_512_v4
+26/140811/campos_512_v4
+26/140814/campos_512_v4
+26/140819/campos_512_v4
+26/140830/campos_512_v4
+26/140835/campos_512_v4
+26/140877/campos_512_v4
+26/140878/campos_512_v4
+26/140881/campos_512_v4
+26/140883/campos_512_v4
+26/140886/campos_512_v4
+26/140888/campos_512_v4
+26/140908/campos_512_v4
+26/140914/campos_512_v4
+26/140917/campos_512_v4
+26/140934/campos_512_v4
+26/140936/campos_512_v4
+26/140943/campos_512_v4
+26/140945/campos_512_v4
+26/140959/campos_512_v4
+26/140967/campos_512_v4
+26/140969/campos_512_v4
+26/140970/campos_512_v4
+26/140974/campos_512_v4
+26/140985/campos_512_v4
+26/140998/campos_512_v4
+26/141000/campos_512_v4
+26/141017/campos_512_v4
+26/141024/campos_512_v4
+26/141048/campos_512_v4
+26/141051/campos_512_v4
+26/141052/campos_512_v4
+26/141056/campos_512_v4
+26/141063/campos_512_v4
+26/141073/campos_512_v4
+26/141075/campos_512_v4
+26/141086/campos_512_v4
+26/141100/campos_512_v4
+26/141101/campos_512_v4
+26/141109/campos_512_v4
+26/141111/campos_512_v4
+26/141113/campos_512_v4
+26/141114/campos_512_v4
+26/141132/campos_512_v4
+26/141139/campos_512_v4
+26/141141/campos_512_v4
+26/141152/campos_512_v4
+26/141179/campos_512_v4
+26/141184/campos_512_v4
+26/141206/campos_512_v4
+26/141209/campos_512_v4
+26/141211/campos_512_v4
+26/141214/campos_512_v4
+26/141228/campos_512_v4
+26/141242/campos_512_v4
+26/141245/campos_512_v4
+26/141246/campos_512_v4
+26/141272/campos_512_v4
+26/141291/campos_512_v4
+26/141305/campos_512_v4
+26/141313/campos_512_v4
+26/141332/campos_512_v4
+26/141334/campos_512_v4
+26/141346/campos_512_v4
+26/141351/campos_512_v4
+26/141352/campos_512_v4
+26/141353/campos_512_v4
+26/141358/campos_512_v4
+26/141361/campos_512_v4
+26/141365/campos_512_v4
+26/141370/campos_512_v4
+26/141373/campos_512_v4
+26/141376/campos_512_v4
+26/141377/campos_512_v4
+26/141400/campos_512_v4
+26/141405/campos_512_v4
+26/141407/campos_512_v4
+26/141408/campos_512_v4
+26/141417/campos_512_v4
+26/141419/campos_512_v4
+26/141425/campos_512_v4
+26/141429/campos_512_v4
+26/141430/campos_512_v4
+26/141435/campos_512_v4
+26/141438/campos_512_v4
+26/141456/campos_512_v4
+26/141462/campos_512_v4
+26/141475/campos_512_v4
+26/141484/campos_512_v4
+26/141485/campos_512_v4
+26/141487/campos_512_v4
+26/141505/campos_512_v4
+26/141512/campos_512_v4
+26/141514/campos_512_v4
+26/141515/campos_512_v4
+26/141517/campos_512_v4
+26/141528/campos_512_v4
+26/141529/campos_512_v4
+26/141536/campos_512_v4
+26/141538/campos_512_v4
+26/141540/campos_512_v4
+26/141551/campos_512_v4
+26/141554/campos_512_v4
+26/141568/campos_512_v4
+26/141583/campos_512_v4
+26/141593/campos_512_v4
+26/141595/campos_512_v4
+26/141606/campos_512_v4
+26/141608/campos_512_v4
+26/141615/campos_512_v4
+26/141631/campos_512_v4
+26/141635/campos_512_v4
+26/141642/campos_512_v4
+26/141646/campos_512_v4
+26/141649/campos_512_v4
+26/141651/campos_512_v4
+26/141661/campos_512_v4
+26/141667/campos_512_v4
+26/141670/campos_512_v4
+26/141673/campos_512_v4
+26/141676/campos_512_v4
+26/141689/campos_512_v4
+26/141690/campos_512_v4
+26/141698/campos_512_v4
+26/141700/campos_512_v4
+26/141702/campos_512_v4
+26/141714/campos_512_v4
+26/141726/campos_512_v4
+26/141729/campos_512_v4
+26/141747/campos_512_v4
+26/141758/campos_512_v4
+26/141759/campos_512_v4
+26/141762/campos_512_v4
+26/141764/campos_512_v4
+26/141771/campos_512_v4
+26/141777/campos_512_v4
+26/141778/campos_512_v4
+26/141798/campos_512_v4
+26/141801/campos_512_v4
+26/141806/campos_512_v4
+26/141823/campos_512_v4
+26/141828/campos_512_v4
+26/141839/campos_512_v4
+26/141841/campos_512_v4
+26/141856/campos_512_v4
+26/141857/campos_512_v4
+26/141869/campos_512_v4
+26/141877/campos_512_v4
+26/141880/campos_512_v4
+26/141906/campos_512_v4
+26/141911/campos_512_v4
+26/141915/campos_512_v4
+26/141920/campos_512_v4
+26/141923/campos_512_v4
+26/141930/campos_512_v4
+26/141936/campos_512_v4
+26/141941/campos_512_v4
+26/141948/campos_512_v4
+26/141962/campos_512_v4
+26/141964/campos_512_v4
+26/141973/campos_512_v4
+26/141980/campos_512_v4
+26/141983/campos_512_v4
+26/141986/campos_512_v4
+26/141999/campos_512_v4
+26/142004/campos_512_v4
+26/142015/campos_512_v4
+26/142020/campos_512_v4
+26/142021/campos_512_v4
+26/142039/campos_512_v4
+26/142070/campos_512_v4
+26/142077/campos_512_v4
+26/142082/campos_512_v4
+26/142084/campos_512_v4
+26/142085/campos_512_v4
+26/142118/campos_512_v4
+26/142139/campos_512_v4
+26/142161/campos_512_v4
+26/142169/campos_512_v4
+26/142175/campos_512_v4
+26/142179/campos_512_v4
+26/142188/campos_512_v4
+26/142211/campos_512_v4
+26/142224/campos_512_v4
+26/142226/campos_512_v4
+26/142228/campos_512_v4
+26/142229/campos_512_v4
+26/142230/campos_512_v4
+26/142233/campos_512_v4
+26/142234/campos_512_v4
+26/142235/campos_512_v4
+26/142246/campos_512_v4
+26/142248/campos_512_v4
+26/142256/campos_512_v4
+26/142273/campos_512_v4
+26/142282/campos_512_v4
+26/142288/campos_512_v4
+26/142295/campos_512_v4
+26/142311/campos_512_v4
+26/142325/campos_512_v4
+26/142335/campos_512_v4
+26/142337/campos_512_v4
+26/142344/campos_512_v4
+26/142345/campos_512_v4
+26/142350/campos_512_v4
+26/142352/campos_512_v4
+26/142353/campos_512_v4
+26/142362/campos_512_v4
+26/142369/campos_512_v4
+26/142371/campos_512_v4
+26/142380/campos_512_v4
+26/142387/campos_512_v4
+26/142393/campos_512_v4
+26/142413/campos_512_v4
+26/142416/campos_512_v4
+26/142422/campos_512_v4
+26/142436/campos_512_v4
+26/142441/campos_512_v4
+26/142452/campos_512_v4
+26/142457/campos_512_v4
+26/142462/campos_512_v4
+26/142475/campos_512_v4
+26/142500/campos_512_v4
+26/142504/campos_512_v4
+26/142506/campos_512_v4
+26/142509/campos_512_v4
+26/142522/campos_512_v4
+26/142526/campos_512_v4
+26/142536/campos_512_v4
+26/142547/campos_512_v4
+26/142555/campos_512_v4
+26/142564/campos_512_v4
+26/142575/campos_512_v4
+26/142576/campos_512_v4
+26/142578/campos_512_v4
+26/142580/campos_512_v4
+26/142594/campos_512_v4
+26/142603/campos_512_v4
+26/142606/campos_512_v4
+26/142607/campos_512_v4
+26/142611/campos_512_v4
+26/142620/campos_512_v4
+26/142621/campos_512_v4
+26/142626/campos_512_v4
+26/142629/campos_512_v4
+26/142630/campos_512_v4
+26/142631/campos_512_v4
+26/142641/campos_512_v4
+26/142645/campos_512_v4
+26/142653/campos_512_v4
+26/142654/campos_512_v4
+26/142671/campos_512_v4
+26/142678/campos_512_v4
+26/142685/campos_512_v4
+26/142711/campos_512_v4
+26/142713/campos_512_v4
+26/142717/campos_512_v4
+26/142722/campos_512_v4
+26/142728/campos_512_v4
+26/142730/campos_512_v4
+26/142738/campos_512_v4
+26/142748/campos_512_v4
+26/142749/campos_512_v4
+26/142751/campos_512_v4
+26/142757/campos_512_v4
+26/142775/campos_512_v4
+26/142781/campos_512_v4
+26/142787/campos_512_v4
+26/142794/campos_512_v4
+26/142805/campos_512_v4
+26/142818/campos_512_v4
+26/142824/campos_512_v4
+26/142834/campos_512_v4
+26/142837/campos_512_v4
+26/142851/campos_512_v4
+26/142855/campos_512_v4
+26/142864/campos_512_v4
+26/142867/campos_512_v4
+26/142869/campos_512_v4
+26/142871/campos_512_v4
+26/142875/campos_512_v4
+26/142877/campos_512_v4
+26/142891/campos_512_v4
+26/142897/campos_512_v4
+26/142919/campos_512_v4
+26/142923/campos_512_v4
+26/142926/campos_512_v4
+26/142931/campos_512_v4
+26/142955/campos_512_v4
+26/142965/campos_512_v4
+26/142970/campos_512_v4
+26/142994/campos_512_v4
+26/142998/campos_512_v4
+26/143028/campos_512_v4
+26/143041/campos_512_v4
+26/143047/campos_512_v4
+26/143054/campos_512_v4
+26/143056/campos_512_v4
+26/143063/campos_512_v4
+26/143071/campos_512_v4
+26/143072/campos_512_v4
+26/143075/campos_512_v4
+26/143083/campos_512_v4
+26/143090/campos_512_v4
+26/143095/campos_512_v4
+26/143123/campos_512_v4
+26/143129/campos_512_v4
+26/143133/campos_512_v4
+26/143140/campos_512_v4
+26/143162/campos_512_v4
+26/143168/campos_512_v4
+26/143178/campos_512_v4
+26/143185/campos_512_v4
+26/143207/campos_512_v4
+26/143222/campos_512_v4
+26/143227/campos_512_v4
+26/143241/campos_512_v4
+26/143247/campos_512_v4
+26/143250/campos_512_v4
+26/143277/campos_512_v4
+26/143285/campos_512_v4
+26/143296/campos_512_v4
+26/143303/campos_512_v4
+26/143314/campos_512_v4
+26/143318/campos_512_v4
+26/143322/campos_512_v4
+26/143345/campos_512_v4
+26/143361/campos_512_v4
+26/143365/campos_512_v4
+26/143393/campos_512_v4
+26/143399/campos_512_v4
+26/143400/campos_512_v4
+26/143402/campos_512_v4
+26/143406/campos_512_v4
+26/143426/campos_512_v4
+26/143444/campos_512_v4
+26/143446/campos_512_v4
+26/143448/campos_512_v4
+26/143460/campos_512_v4
+26/143462/campos_512_v4
+26/143465/campos_512_v4
+26/143490/campos_512_v4
+26/143493/campos_512_v4
+26/143495/campos_512_v4
+26/143505/campos_512_v4
+26/143507/campos_512_v4
+26/143542/campos_512_v4
+26/143563/campos_512_v4
+26/143571/campos_512_v4
+26/143585/campos_512_v4
+26/143591/campos_512_v4
+26/143592/campos_512_v4
+26/143603/campos_512_v4
+26/143619/campos_512_v4
+26/143627/campos_512_v4
+26/143632/campos_512_v4
+26/143635/campos_512_v4
+26/143639/campos_512_v4
+26/143643/campos_512_v4
+26/143653/campos_512_v4
+26/143659/campos_512_v4
+26/143665/campos_512_v4
+26/143666/campos_512_v4
+26/143674/campos_512_v4
+26/143695/campos_512_v4
+26/143706/campos_512_v4
+26/143709/campos_512_v4
+26/143713/campos_512_v4
+26/143731/campos_512_v4
+26/143742/campos_512_v4
+26/143745/campos_512_v4
+26/143749/campos_512_v4
+26/143755/campos_512_v4
+26/143756/campos_512_v4
+26/143773/campos_512_v4
+26/143792/campos_512_v4
+26/143796/campos_512_v4
+26/143819/campos_512_v4
+26/143820/campos_512_v4
+26/143822/campos_512_v4
+26/143835/campos_512_v4
+26/143837/campos_512_v4
+26/143848/campos_512_v4
+26/143876/campos_512_v4
+26/143878/campos_512_v4
+26/143879/campos_512_v4
+26/143886/campos_512_v4
+26/143901/campos_512_v4
+26/143907/campos_512_v4
+26/143913/campos_512_v4
+26/143916/campos_512_v4
+26/143919/campos_512_v4
+26/143937/campos_512_v4
+26/143966/campos_512_v4
+26/143970/campos_512_v4
+26/143978/campos_512_v4
+26/143984/campos_512_v4
+26/144000/campos_512_v4
+26/144014/campos_512_v4
+26/144015/campos_512_v4
+26/144020/campos_512_v4
+26/144024/campos_512_v4
+26/144030/campos_512_v4
+26/144034/campos_512_v4
+26/144044/campos_512_v4
+26/144046/campos_512_v4
+26/144054/campos_512_v4
+26/144086/campos_512_v4
+26/144095/campos_512_v4
+26/144097/campos_512_v4
+26/144108/campos_512_v4
+26/144118/campos_512_v4
+26/144124/campos_512_v4
+26/144125/campos_512_v4
+26/144147/campos_512_v4
+26/144169/campos_512_v4
+26/144179/campos_512_v4
+26/144184/campos_512_v4
+26/144187/campos_512_v4
+26/144193/campos_512_v4
+26/144220/campos_512_v4
+26/144223/campos_512_v4
+26/144238/campos_512_v4
+26/144241/campos_512_v4
+26/144250/campos_512_v4
+26/144254/campos_512_v4
+26/144274/campos_512_v4
+26/144278/campos_512_v4
+26/144284/campos_512_v4
+26/144286/campos_512_v4
+26/144287/campos_512_v4
+26/144314/campos_512_v4
+26/144318/campos_512_v4
+26/144335/campos_512_v4
+26/144356/campos_512_v4
+26/144366/campos_512_v4
+26/144395/campos_512_v4
+26/144397/campos_512_v4
+26/144400/campos_512_v4
+26/144405/campos_512_v4
+26/144418/campos_512_v4
+26/144424/campos_512_v4
+26/144440/campos_512_v4
+26/144446/campos_512_v4
+26/144448/campos_512_v4
+26/144460/campos_512_v4
+26/144466/campos_512_v4
+26/144468/campos_512_v4
+26/144479/campos_512_v4
+26/144483/campos_512_v4
+26/144488/campos_512_v4
+26/144489/campos_512_v4
+26/144497/campos_512_v4
+26/144505/campos_512_v4
+26/144506/campos_512_v4
+26/144507/campos_512_v4
+26/144511/campos_512_v4
+26/144515/campos_512_v4
+26/144522/campos_512_v4
+26/144525/campos_512_v4
+26/144534/campos_512_v4
+26/144537/campos_512_v4
+26/144543/campos_512_v4
+26/144544/campos_512_v4
+26/144551/campos_512_v4
+26/144557/campos_512_v4
+26/144576/campos_512_v4
+26/144577/campos_512_v4
+26/144580/campos_512_v4
+26/144587/campos_512_v4
+26/144590/campos_512_v4
+26/144598/campos_512_v4
+26/144602/campos_512_v4
+26/144612/campos_512_v4
+26/144616/campos_512_v4
+26/144619/campos_512_v4
+26/144625/campos_512_v4
+26/144627/campos_512_v4
+26/144631/campos_512_v4
+26/144638/campos_512_v4
+26/144639/campos_512_v4
+26/144646/campos_512_v4
+26/144657/campos_512_v4
+26/144662/campos_512_v4
+26/144668/campos_512_v4
+26/144696/campos_512_v4
+26/144698/campos_512_v4
+26/144706/campos_512_v4
+26/144707/campos_512_v4
+26/144712/campos_512_v4
+26/144713/campos_512_v4
+26/144724/campos_512_v4
+26/144728/campos_512_v4
+26/144748/campos_512_v4
+26/144750/campos_512_v4
+26/144767/campos_512_v4
+26/144771/campos_512_v4
+26/144777/campos_512_v4
+26/144824/campos_512_v4
+26/144828/campos_512_v4
+26/144837/campos_512_v4
+26/144843/campos_512_v4
+26/144875/campos_512_v4
+26/144878/campos_512_v4
+26/144888/campos_512_v4
+26/144890/campos_512_v4
+26/144895/campos_512_v4
+26/144911/campos_512_v4
+26/144921/campos_512_v4
+26/144930/campos_512_v4
+26/144932/campos_512_v4
+26/144936/campos_512_v4
+26/144942/campos_512_v4
+26/144951/campos_512_v4
+26/144956/campos_512_v4
+26/144965/campos_512_v4
+26/144979/campos_512_v4
+26/144986/campos_512_v4
+26/144993/campos_512_v4
+27/145004/campos_512_v4
+27/145022/campos_512_v4
+27/145052/campos_512_v4
+27/145063/campos_512_v4
+27/145084/campos_512_v4
+27/145089/campos_512_v4
+27/145095/campos_512_v4
+27/145096/campos_512_v4
+27/145121/campos_512_v4
+27/145136/campos_512_v4
+27/145140/campos_512_v4
+27/145141/campos_512_v4
+27/145145/campos_512_v4
+27/145150/campos_512_v4
+27/145157/campos_512_v4
+27/145164/campos_512_v4
+27/145167/campos_512_v4
+27/145168/campos_512_v4
+27/145181/campos_512_v4
+27/145182/campos_512_v4
+27/145191/campos_512_v4
+27/145198/campos_512_v4
+27/145199/campos_512_v4
+27/145201/campos_512_v4
+27/145203/campos_512_v4
+27/145211/campos_512_v4
+27/145220/campos_512_v4
+27/145226/campos_512_v4
+27/145228/campos_512_v4
+27/145230/campos_512_v4
+27/145232/campos_512_v4
+27/145235/campos_512_v4
+27/145241/campos_512_v4
+27/145245/campos_512_v4
+27/145253/campos_512_v4
+27/145265/campos_512_v4
+27/145268/campos_512_v4
+27/145280/campos_512_v4
+27/145308/campos_512_v4
+27/145317/campos_512_v4
+27/145328/campos_512_v4
+27/145332/campos_512_v4
+27/145334/campos_512_v4
+27/145363/campos_512_v4
+27/145392/campos_512_v4
+27/145397/campos_512_v4
+27/145398/campos_512_v4
+27/145403/campos_512_v4
+27/145411/campos_512_v4
+27/145416/campos_512_v4
+27/145418/campos_512_v4
+27/145422/campos_512_v4
+27/145429/campos_512_v4
+27/145432/campos_512_v4
+27/145438/campos_512_v4
+27/145449/campos_512_v4
+27/145453/campos_512_v4
+27/145455/campos_512_v4
+27/145461/campos_512_v4
+27/145464/campos_512_v4
+27/145466/campos_512_v4
+27/145481/campos_512_v4
+27/145488/campos_512_v4
+27/145490/campos_512_v4
+27/145493/campos_512_v4
+27/145497/campos_512_v4
+27/145500/campos_512_v4
+27/145507/campos_512_v4
+27/145519/campos_512_v4
+27/145532/campos_512_v4
+27/145533/campos_512_v4
+27/145540/campos_512_v4
+27/145562/campos_512_v4
+27/145575/campos_512_v4
+27/145581/campos_512_v4
+27/145583/campos_512_v4
+27/145584/campos_512_v4
+27/145596/campos_512_v4
+27/145599/campos_512_v4
+27/145606/campos_512_v4
+27/145616/campos_512_v4
+27/145617/campos_512_v4
+27/145632/campos_512_v4
+27/145646/campos_512_v4
+27/145653/campos_512_v4
+27/145658/campos_512_v4
+27/145671/campos_512_v4
+27/145682/campos_512_v4
+27/145685/campos_512_v4
+27/145690/campos_512_v4
+27/145691/campos_512_v4
+27/145694/campos_512_v4
+27/145699/campos_512_v4
+27/145701/campos_512_v4
+27/145703/campos_512_v4
+27/145714/campos_512_v4
+27/145718/campos_512_v4
+27/145735/campos_512_v4
+27/145737/campos_512_v4
+27/145738/campos_512_v4
+27/145754/campos_512_v4
+27/145756/campos_512_v4
+27/145761/campos_512_v4
+27/145767/campos_512_v4
+27/145772/campos_512_v4
+27/145774/campos_512_v4
+27/145777/campos_512_v4
+27/145784/campos_512_v4
+27/145789/campos_512_v4
+27/145790/campos_512_v4
+27/145809/campos_512_v4
+27/145813/campos_512_v4
+27/145820/campos_512_v4
+27/145823/campos_512_v4
+27/145824/campos_512_v4
+27/145828/campos_512_v4
+27/145837/campos_512_v4
+27/145839/campos_512_v4
+27/145844/campos_512_v4
+27/145854/campos_512_v4
+27/145859/campos_512_v4
+27/145876/campos_512_v4
+27/145877/campos_512_v4
+27/145889/campos_512_v4
+27/145891/campos_512_v4
+27/145898/campos_512_v4
+27/145901/campos_512_v4
+27/145922/campos_512_v4
+27/145923/campos_512_v4
+27/145926/campos_512_v4
+27/145932/campos_512_v4
+27/145935/campos_512_v4
+27/145946/campos_512_v4
+27/145958/campos_512_v4
+27/145962/campos_512_v4
+27/145966/campos_512_v4
+27/145967/campos_512_v4
+27/145986/campos_512_v4
+27/146002/campos_512_v4
+27/146019/campos_512_v4
+27/146020/campos_512_v4
+27/146046/campos_512_v4
+27/146055/campos_512_v4
+27/146060/campos_512_v4
+27/146065/campos_512_v4
+27/146068/campos_512_v4
+27/146081/campos_512_v4
+27/146087/campos_512_v4
+27/146089/campos_512_v4
+27/146091/campos_512_v4
+27/146094/campos_512_v4
+27/146100/campos_512_v4
+27/146105/campos_512_v4
+27/146111/campos_512_v4
+27/146142/campos_512_v4
+27/146148/campos_512_v4
+27/146149/campos_512_v4
+27/146166/campos_512_v4
+27/146168/campos_512_v4
+27/146177/campos_512_v4
+27/146184/campos_512_v4
+27/146191/campos_512_v4
+27/146192/campos_512_v4
+27/146204/campos_512_v4
+27/146208/campos_512_v4
+27/146214/campos_512_v4
+27/146216/campos_512_v4
+27/146226/campos_512_v4
+27/146227/campos_512_v4
+27/146238/campos_512_v4
+27/146241/campos_512_v4
+27/146242/campos_512_v4
+27/146243/campos_512_v4
+27/146244/campos_512_v4
+27/146247/campos_512_v4
+27/146250/campos_512_v4
+27/146255/campos_512_v4
+27/146261/campos_512_v4
+27/146281/campos_512_v4
+27/146285/campos_512_v4
+27/146286/campos_512_v4
+27/146299/campos_512_v4
+27/146313/campos_512_v4
+27/146332/campos_512_v4
+27/146352/campos_512_v4
+27/146354/campos_512_v4
+27/146356/campos_512_v4
+27/146364/campos_512_v4
+27/146365/campos_512_v4
+27/146371/campos_512_v4
+27/146385/campos_512_v4
+27/146393/campos_512_v4
+27/146398/campos_512_v4
+27/146417/campos_512_v4
+27/146441/campos_512_v4
+27/146454/campos_512_v4
+27/146478/campos_512_v4
+27/146479/campos_512_v4
+27/146488/campos_512_v4
+27/146491/campos_512_v4
+27/146506/campos_512_v4
+27/146510/campos_512_v4
+27/146520/campos_512_v4
+27/146540/campos_512_v4
+27/146545/campos_512_v4
+27/146557/campos_512_v4
+27/146573/campos_512_v4
+27/146581/campos_512_v4
+27/146585/campos_512_v4
+27/146588/campos_512_v4
+27/146592/campos_512_v4
+27/146605/campos_512_v4
+27/146609/campos_512_v4
+27/146616/campos_512_v4
+27/146619/campos_512_v4
+27/146630/campos_512_v4
+27/146636/campos_512_v4
+27/146638/campos_512_v4
+27/146643/campos_512_v4
+27/146659/campos_512_v4
+27/146662/campos_512_v4
+27/146671/campos_512_v4
+27/146681/campos_512_v4
+27/146690/campos_512_v4
+27/146709/campos_512_v4
+27/146713/campos_512_v4
+27/146716/campos_512_v4
+27/146717/campos_512_v4
+27/146731/campos_512_v4
+27/146735/campos_512_v4
+27/146736/campos_512_v4
+27/146741/campos_512_v4
+27/146757/campos_512_v4
+27/146758/campos_512_v4
+27/146765/campos_512_v4
+27/146768/campos_512_v4
+27/146769/campos_512_v4
+27/146778/campos_512_v4
+27/146784/campos_512_v4
+27/146786/campos_512_v4
+27/146788/campos_512_v4
+27/146802/campos_512_v4
+27/146817/campos_512_v4
+27/146823/campos_512_v4
+27/146826/campos_512_v4
+27/146830/campos_512_v4
+27/146831/campos_512_v4
+27/146832/campos_512_v4
+27/146833/campos_512_v4
+27/146837/campos_512_v4
+27/146841/campos_512_v4
+27/146858/campos_512_v4
+27/146870/campos_512_v4
+27/146904/campos_512_v4
+27/146912/campos_512_v4
+27/146925/campos_512_v4
+27/146940/campos_512_v4
+27/146943/campos_512_v4
+27/146946/campos_512_v4
+27/146948/campos_512_v4
+27/146953/campos_512_v4
+27/146954/campos_512_v4
+27/146979/campos_512_v4
+27/146997/campos_512_v4
+27/147004/campos_512_v4
+27/147007/campos_512_v4
+27/147021/campos_512_v4
+27/147032/campos_512_v4
+27/147035/campos_512_v4
+27/147057/campos_512_v4
+27/147059/campos_512_v4
+27/147072/campos_512_v4
+27/147085/campos_512_v4
+27/147088/campos_512_v4
+27/147096/campos_512_v4
+27/147101/campos_512_v4
+27/147126/campos_512_v4
+27/147128/campos_512_v4
+27/147132/campos_512_v4
+27/147154/campos_512_v4
+27/147155/campos_512_v4
+27/147156/campos_512_v4
+27/147157/campos_512_v4
+27/147178/campos_512_v4
+27/147183/campos_512_v4
+27/147186/campos_512_v4
+27/147192/campos_512_v4
+27/147225/campos_512_v4
+27/147228/campos_512_v4
+27/147229/campos_512_v4
+27/147235/campos_512_v4
+27/147245/campos_512_v4
+27/147249/campos_512_v4
+27/147263/campos_512_v4
+27/147272/campos_512_v4
+27/147276/campos_512_v4
+27/147291/campos_512_v4
+27/147292/campos_512_v4
+27/147295/campos_512_v4
+27/147302/campos_512_v4
+27/147306/campos_512_v4
+27/147319/campos_512_v4
+27/147324/campos_512_v4
+27/147335/campos_512_v4
+27/147351/campos_512_v4
+27/147354/campos_512_v4
+27/147357/campos_512_v4
+27/147360/campos_512_v4
+27/147365/campos_512_v4
+27/147375/campos_512_v4
+27/147381/campos_512_v4
+27/147403/campos_512_v4
+27/147412/campos_512_v4
+27/147413/campos_512_v4
+27/147416/campos_512_v4
+27/147421/campos_512_v4
+27/147434/campos_512_v4
+27/147435/campos_512_v4
+27/147440/campos_512_v4
+27/147443/campos_512_v4
+27/147445/campos_512_v4
+27/147449/campos_512_v4
+27/147459/campos_512_v4
+27/147465/campos_512_v4
+27/147474/campos_512_v4
+27/147475/campos_512_v4
+27/147479/campos_512_v4
+27/147482/campos_512_v4
+27/147485/campos_512_v4
+27/147493/campos_512_v4
+27/147503/campos_512_v4
+27/147506/campos_512_v4
+27/147512/campos_512_v4
+27/147523/campos_512_v4
+27/147529/campos_512_v4
+27/147531/campos_512_v4
+27/147541/campos_512_v4
+27/147544/campos_512_v4
+27/147558/campos_512_v4
+27/147565/campos_512_v4
+27/147568/campos_512_v4
+27/147577/campos_512_v4
+27/147586/campos_512_v4
+27/147591/campos_512_v4
+27/147592/campos_512_v4
+27/147594/campos_512_v4
+27/147595/campos_512_v4
+27/147602/campos_512_v4
+27/147606/campos_512_v4
+27/147615/campos_512_v4
+27/147628/campos_512_v4
+27/147636/campos_512_v4
+27/147642/campos_512_v4
+27/147643/campos_512_v4
+27/147649/campos_512_v4
+27/147660/campos_512_v4
+27/147661/campos_512_v4
+27/147670/campos_512_v4
+27/147682/campos_512_v4
+27/147688/campos_512_v4
+27/147704/campos_512_v4
+27/147705/campos_512_v4
+27/147710/campos_512_v4
+27/147722/campos_512_v4
+27/147726/campos_512_v4
+27/147729/campos_512_v4
+27/147732/campos_512_v4
+27/147743/campos_512_v4
+27/147745/campos_512_v4
+27/147746/campos_512_v4
+27/147750/campos_512_v4
+27/147760/campos_512_v4
+27/147767/campos_512_v4
+27/147769/campos_512_v4
+27/147772/campos_512_v4
+27/147774/campos_512_v4
+27/147778/campos_512_v4
+27/147790/campos_512_v4
+27/147791/campos_512_v4
+27/147799/campos_512_v4
+27/147800/campos_512_v4
+27/147816/campos_512_v4
+27/147823/campos_512_v4
+27/147825/campos_512_v4
+27/147834/campos_512_v4
+27/147842/campos_512_v4
+27/147847/campos_512_v4
+27/147848/campos_512_v4
+27/147850/campos_512_v4
+27/147856/campos_512_v4
+27/147866/campos_512_v4
+27/147888/campos_512_v4
+27/147892/campos_512_v4
+27/147899/campos_512_v4
+27/147901/campos_512_v4
+27/147907/campos_512_v4
+27/147908/campos_512_v4
+27/147911/campos_512_v4
+27/147924/campos_512_v4
+27/147932/campos_512_v4
+27/147948/campos_512_v4
+27/147960/campos_512_v4
+27/147965/campos_512_v4
+27/147967/campos_512_v4
+27/147970/campos_512_v4
+27/147972/campos_512_v4
+27/147998/campos_512_v4
+27/147999/campos_512_v4
+27/148003/campos_512_v4
+27/148009/campos_512_v4
+27/148021/campos_512_v4
+27/148024/campos_512_v4
+27/148037/campos_512_v4
+27/148050/campos_512_v4
+27/148051/campos_512_v4
+27/148064/campos_512_v4
+27/148070/campos_512_v4
+27/148087/campos_512_v4
+27/148092/campos_512_v4
+27/148101/campos_512_v4
+27/148102/campos_512_v4
+27/148107/campos_512_v4
+27/148111/campos_512_v4
+27/148112/campos_512_v4
+27/148116/campos_512_v4
+27/148119/campos_512_v4
+27/148143/campos_512_v4
+27/148148/campos_512_v4
+27/148149/campos_512_v4
+27/148158/campos_512_v4
+27/148166/campos_512_v4
+27/148170/campos_512_v4
+27/148188/campos_512_v4
+27/148195/campos_512_v4
+27/148208/campos_512_v4
+27/148211/campos_512_v4
+27/148226/campos_512_v4
+27/148233/campos_512_v4
+27/148236/campos_512_v4
+27/148240/campos_512_v4
+27/148268/campos_512_v4
+27/148272/campos_512_v4
+27/148273/campos_512_v4
+27/148282/campos_512_v4
+27/148286/campos_512_v4
+27/148288/campos_512_v4
+27/148296/campos_512_v4
+27/148307/campos_512_v4
+27/148316/campos_512_v4
+27/148317/campos_512_v4
+27/148318/campos_512_v4
+27/148329/campos_512_v4
+27/148330/campos_512_v4
+27/148339/campos_512_v4
+27/148349/campos_512_v4
+27/148352/campos_512_v4
+27/148355/campos_512_v4
+27/148365/campos_512_v4
+27/148371/campos_512_v4
+27/148372/campos_512_v4
+27/148380/campos_512_v4
+27/148393/campos_512_v4
+27/148396/campos_512_v4
+27/148399/campos_512_v4
+27/148411/campos_512_v4
+27/148412/campos_512_v4
+27/148413/campos_512_v4
+27/148433/campos_512_v4
+27/148449/campos_512_v4
+27/148458/campos_512_v4
+27/148460/campos_512_v4
+27/148462/campos_512_v4
+27/148503/campos_512_v4
+27/148507/campos_512_v4
+27/148512/campos_512_v4
+27/148522/campos_512_v4
+27/148527/campos_512_v4
+27/148535/campos_512_v4
+27/148543/campos_512_v4
+27/148546/campos_512_v4
+27/148552/campos_512_v4
+27/148553/campos_512_v4
+27/148556/campos_512_v4
+27/148566/campos_512_v4
+27/148575/campos_512_v4
+27/148577/campos_512_v4
+27/148581/campos_512_v4
+27/148584/campos_512_v4
+27/148590/campos_512_v4
+27/148592/campos_512_v4
+27/148599/campos_512_v4
+27/148602/campos_512_v4
+27/148606/campos_512_v4
+27/148609/campos_512_v4
+27/148619/campos_512_v4
+27/148621/campos_512_v4
+27/148626/campos_512_v4
+27/148635/campos_512_v4
+27/148640/campos_512_v4
+27/148648/campos_512_v4
+27/148649/campos_512_v4
+27/148652/campos_512_v4
+27/148656/campos_512_v4
+27/148671/campos_512_v4
+27/148677/campos_512_v4
+27/148679/campos_512_v4
+27/148724/campos_512_v4
+27/148731/campos_512_v4
+27/148734/campos_512_v4
+27/148742/campos_512_v4
+27/148756/campos_512_v4
+27/148766/campos_512_v4
+27/148767/campos_512_v4
+27/148783/campos_512_v4
+27/148797/campos_512_v4
+27/148800/campos_512_v4
+27/148810/campos_512_v4
+27/148813/campos_512_v4
+27/148817/campos_512_v4
+27/148825/campos_512_v4
+27/148832/campos_512_v4
+27/148834/campos_512_v4
+27/148856/campos_512_v4
+27/148861/campos_512_v4
+27/148864/campos_512_v4
+27/148866/campos_512_v4
+27/148885/campos_512_v4
+27/148900/campos_512_v4
+27/148902/campos_512_v4
+27/148918/campos_512_v4
+27/148920/campos_512_v4
+27/148931/campos_512_v4
+27/148932/campos_512_v4
+27/148935/campos_512_v4
+27/148950/campos_512_v4
+27/148955/campos_512_v4
+27/148979/campos_512_v4
+27/148980/campos_512_v4
+27/148983/campos_512_v4
+27/148984/campos_512_v4
+27/148995/campos_512_v4
+27/148996/campos_512_v4
+27/148999/campos_512_v4
+27/149000/campos_512_v4
+27/149003/campos_512_v4
+27/149004/campos_512_v4
+27/149007/campos_512_v4
+27/149010/campos_512_v4
+27/149014/campos_512_v4
+27/149026/campos_512_v4
+27/149037/campos_512_v4
+27/149051/campos_512_v4
+27/149063/campos_512_v4
+27/149064/campos_512_v4
+27/149078/campos_512_v4
+27/149081/campos_512_v4
+27/149082/campos_512_v4
+27/149100/campos_512_v4
+27/149119/campos_512_v4
+27/149131/campos_512_v4
+27/149136/campos_512_v4
+27/149138/campos_512_v4
+27/149139/campos_512_v4
+27/149140/campos_512_v4
+27/149141/campos_512_v4
+27/149158/campos_512_v4
+27/149171/campos_512_v4
+27/149176/campos_512_v4
+27/149177/campos_512_v4
+27/149181/campos_512_v4
+27/149185/campos_512_v4
+27/149197/campos_512_v4
+27/149204/campos_512_v4
+27/149219/campos_512_v4
+27/149220/campos_512_v4
+27/149226/campos_512_v4
+27/149236/campos_512_v4
+27/149237/campos_512_v4
+27/149238/campos_512_v4
+27/149241/campos_512_v4
+27/149244/campos_512_v4
+27/149249/campos_512_v4
+27/149251/campos_512_v4
+27/149266/campos_512_v4
+27/149271/campos_512_v4
+27/149274/campos_512_v4
+27/149279/campos_512_v4
+27/149282/campos_512_v4
+27/149284/campos_512_v4
+27/149287/campos_512_v4
+27/149312/campos_512_v4
+27/149313/campos_512_v4
+27/149322/campos_512_v4
+27/149328/campos_512_v4
+27/149337/campos_512_v4
+27/149344/campos_512_v4
+27/149346/campos_512_v4
+27/149353/campos_512_v4
+27/149357/campos_512_v4
+27/149374/campos_512_v4
+27/149388/campos_512_v4
+27/149394/campos_512_v4
+27/149399/campos_512_v4
+27/149410/campos_512_v4
+27/149415/campos_512_v4
+27/149434/campos_512_v4
+27/149444/campos_512_v4
+27/149447/campos_512_v4
+27/149448/campos_512_v4
+27/149458/campos_512_v4
+27/149463/campos_512_v4
+27/149464/campos_512_v4
+27/149470/campos_512_v4
+27/149472/campos_512_v4
+27/149475/campos_512_v4
+27/149483/campos_512_v4
+27/149492/campos_512_v4
+27/149501/campos_512_v4
+27/149511/campos_512_v4
+27/149520/campos_512_v4
+27/149530/campos_512_v4
+27/149548/campos_512_v4
+27/149556/campos_512_v4
+27/149558/campos_512_v4
+27/149559/campos_512_v4
+27/149561/campos_512_v4
+27/149569/campos_512_v4
+27/149575/campos_512_v4
+27/149587/campos_512_v4
+27/149589/campos_512_v4
+27/149593/campos_512_v4
+27/149595/campos_512_v4
+27/149602/campos_512_v4
+27/149604/campos_512_v4
+27/149621/campos_512_v4
+27/149623/campos_512_v4
+27/149629/campos_512_v4
+27/149636/campos_512_v4
+27/149637/campos_512_v4
+27/149638/campos_512_v4
+27/149652/campos_512_v4
+27/149653/campos_512_v4
+27/149656/campos_512_v4
+27/149659/campos_512_v4
+27/149668/campos_512_v4
+27/149678/campos_512_v4
+27/149681/campos_512_v4
+27/149694/campos_512_v4
+27/149713/campos_512_v4
+27/149717/campos_512_v4
+27/149719/campos_512_v4
+27/149730/campos_512_v4
+27/149731/campos_512_v4
+27/149746/campos_512_v4
+27/149754/campos_512_v4
+27/149764/campos_512_v4
+27/149770/campos_512_v4
+27/149774/campos_512_v4
+27/149791/campos_512_v4
+27/149802/campos_512_v4
+27/149821/campos_512_v4
+27/149847/campos_512_v4
+27/149861/campos_512_v4
+27/149900/campos_512_v4
+27/149902/campos_512_v4
+27/149905/campos_512_v4
+27/149910/campos_512_v4
+27/149916/campos_512_v4
+27/149917/campos_512_v4
+27/149921/campos_512_v4
+27/149938/campos_512_v4
+27/149941/campos_512_v4
+27/149945/campos_512_v4
+27/149947/campos_512_v4
+27/149953/campos_512_v4
+27/149954/campos_512_v4
+27/149955/campos_512_v4
+27/149962/campos_512_v4
+27/149967/campos_512_v4
+27/149973/campos_512_v4
+27/149974/campos_512_v4
+27/149979/campos_512_v4
+27/149983/campos_512_v4
+27/149988/campos_512_v4
+27/149991/campos_512_v4
+28/150025/campos_512_v4
+28/150034/campos_512_v4
+28/150041/campos_512_v4
+28/150061/campos_512_v4
+28/150077/campos_512_v4
+28/150078/campos_512_v4
+28/150099/campos_512_v4
+28/150103/campos_512_v4
+28/150104/campos_512_v4
+28/150114/campos_512_v4
+28/150143/campos_512_v4
+28/150164/campos_512_v4
+28/150166/campos_512_v4
+28/150169/campos_512_v4
+28/150173/campos_512_v4
+28/150176/campos_512_v4
+28/150194/campos_512_v4
+28/150234/campos_512_v4
+28/150239/campos_512_v4
+28/150241/campos_512_v4
+28/150252/campos_512_v4
+28/150258/campos_512_v4
+28/150260/campos_512_v4
+28/150273/campos_512_v4
+28/150280/campos_512_v4
+28/150295/campos_512_v4
+28/150297/campos_512_v4
+28/150299/campos_512_v4
+28/150303/campos_512_v4
+28/150305/campos_512_v4
+28/150311/campos_512_v4
+28/150313/campos_512_v4
+28/150315/campos_512_v4
+28/150317/campos_512_v4
+28/150322/campos_512_v4
+28/150333/campos_512_v4
+28/150353/campos_512_v4
+28/150354/campos_512_v4
+28/150376/campos_512_v4
+28/150377/campos_512_v4
+28/150400/campos_512_v4
+28/150424/campos_512_v4
+28/150435/campos_512_v4
+28/150439/campos_512_v4
+28/150440/campos_512_v4
+28/150441/campos_512_v4
+28/150442/campos_512_v4
+28/150443/campos_512_v4
+28/150444/campos_512_v4
+28/150454/campos_512_v4
+28/150459/campos_512_v4
+28/150462/campos_512_v4
+28/150474/campos_512_v4
+28/150476/campos_512_v4
+28/150496/campos_512_v4
+28/150516/campos_512_v4
+28/150548/campos_512_v4
+28/150559/campos_512_v4
+28/150569/campos_512_v4
+28/150571/campos_512_v4
+28/150598/campos_512_v4
+28/150599/campos_512_v4
+28/150613/campos_512_v4
+28/150614/campos_512_v4
+28/150615/campos_512_v4
+28/150625/campos_512_v4
+28/150627/campos_512_v4
+28/150631/campos_512_v4
+28/150635/campos_512_v4
+28/150637/campos_512_v4
+28/150644/campos_512_v4
+28/150646/campos_512_v4
+28/150652/campos_512_v4
+28/150653/campos_512_v4
+28/150655/campos_512_v4
+28/150657/campos_512_v4
+28/150658/campos_512_v4
+28/150659/campos_512_v4
+28/150660/campos_512_v4
+28/150664/campos_512_v4
+28/150678/campos_512_v4
+28/150693/campos_512_v4
+28/150699/campos_512_v4
+28/150733/campos_512_v4
+28/150734/campos_512_v4
+28/150737/campos_512_v4
+28/150742/campos_512_v4
+28/150746/campos_512_v4
+28/150749/campos_512_v4
+28/150750/campos_512_v4
+28/150753/campos_512_v4
+28/150768/campos_512_v4
+28/150809/campos_512_v4
+28/150824/campos_512_v4
+28/150848/campos_512_v4
+28/150868/campos_512_v4
+28/150883/campos_512_v4
+28/150890/campos_512_v4
+28/150894/campos_512_v4
+28/150906/campos_512_v4
+28/150908/campos_512_v4
+28/150910/campos_512_v4
+28/150921/campos_512_v4
+28/150923/campos_512_v4
+28/150929/campos_512_v4
+28/150944/campos_512_v4
+28/150950/campos_512_v4
+28/150953/campos_512_v4
+28/150956/campos_512_v4
+28/150981/campos_512_v4
+28/150993/campos_512_v4
+28/151021/campos_512_v4
+28/151042/campos_512_v4
+28/151043/campos_512_v4
+28/151053/campos_512_v4
+28/151064/campos_512_v4
+28/151065/campos_512_v4
+28/151100/campos_512_v4
+28/151120/campos_512_v4
+28/151121/campos_512_v4
+28/151143/campos_512_v4
+28/151147/campos_512_v4
+28/151151/campos_512_v4
+28/151152/campos_512_v4
+28/151173/campos_512_v4
+28/151184/campos_512_v4
+28/151190/campos_512_v4
+28/151191/campos_512_v4
+28/151195/campos_512_v4
+28/151216/campos_512_v4
+28/151237/campos_512_v4
+28/151239/campos_512_v4
+28/151242/campos_512_v4
+28/151255/campos_512_v4
+28/151280/campos_512_v4
+28/151284/campos_512_v4
+28/151285/campos_512_v4
+28/151293/campos_512_v4
+28/151306/campos_512_v4
+28/151321/campos_512_v4
+28/151323/campos_512_v4
+28/151327/campos_512_v4
+28/151333/campos_512_v4
+28/151354/campos_512_v4
+28/151355/campos_512_v4
+28/151358/campos_512_v4
+28/151366/campos_512_v4
+28/151368/campos_512_v4
+28/151385/campos_512_v4
+28/151392/campos_512_v4
+28/151394/campos_512_v4
+28/151395/campos_512_v4
+28/151408/campos_512_v4
+28/151411/campos_512_v4
+28/151413/campos_512_v4
+28/151418/campos_512_v4
+28/151420/campos_512_v4
+28/151432/campos_512_v4
+28/151437/campos_512_v4
+28/151446/campos_512_v4
+28/151448/campos_512_v4
+28/151451/campos_512_v4
+28/151454/campos_512_v4
+28/151455/campos_512_v4
+28/151458/campos_512_v4
+28/151465/campos_512_v4
+28/151476/campos_512_v4
+28/151488/campos_512_v4
+28/151494/campos_512_v4
+28/151502/campos_512_v4
+28/151511/campos_512_v4
+28/151526/campos_512_v4
+28/151528/campos_512_v4
+28/151534/campos_512_v4
+28/151544/campos_512_v4
+28/151569/campos_512_v4
+28/151576/campos_512_v4
+28/151582/campos_512_v4
+28/151585/campos_512_v4
+28/151594/campos_512_v4
+28/151615/campos_512_v4
+28/151616/campos_512_v4
+28/151618/campos_512_v4
+28/151619/campos_512_v4
+28/151623/campos_512_v4
+28/151624/campos_512_v4
+28/151633/campos_512_v4
+28/151639/campos_512_v4
+28/151641/campos_512_v4
+28/151657/campos_512_v4
+28/151658/campos_512_v4
+28/151659/campos_512_v4
+28/151672/campos_512_v4
+28/151689/campos_512_v4
+28/151701/campos_512_v4
+28/151709/campos_512_v4
+28/151733/campos_512_v4
+28/151736/campos_512_v4
+28/151738/campos_512_v4
+28/151740/campos_512_v4
+28/151741/campos_512_v4
+28/151749/campos_512_v4
+28/151754/campos_512_v4
+28/151755/campos_512_v4
+28/151757/campos_512_v4
+28/151769/campos_512_v4
+28/151774/campos_512_v4
+28/151776/campos_512_v4
+28/151798/campos_512_v4
+28/151807/campos_512_v4
+28/151819/campos_512_v4
+28/151830/campos_512_v4
+28/151833/campos_512_v4
+28/151850/campos_512_v4
+28/151865/campos_512_v4
+28/151872/campos_512_v4
+28/151876/campos_512_v4
+28/151890/campos_512_v4
+28/151894/campos_512_v4
+28/151897/campos_512_v4
+28/151899/campos_512_v4
+28/151904/campos_512_v4
+28/151923/campos_512_v4
+28/151938/campos_512_v4
+28/151941/campos_512_v4
+28/151943/campos_512_v4
+28/151967/campos_512_v4
+28/151977/campos_512_v4
+28/151987/campos_512_v4
+28/151998/campos_512_v4
+28/152044/campos_512_v4
+28/152046/campos_512_v4
+28/152047/campos_512_v4
+28/152050/campos_512_v4
+28/152053/campos_512_v4
+28/152071/campos_512_v4
+28/152073/campos_512_v4
+28/152087/campos_512_v4
+28/152090/campos_512_v4
+28/152093/campos_512_v4
+28/152094/campos_512_v4
+28/152095/campos_512_v4
+28/152108/campos_512_v4
+28/152125/campos_512_v4
+28/152129/campos_512_v4
+28/152131/campos_512_v4
+28/152140/campos_512_v4
+28/152147/campos_512_v4
+28/152148/campos_512_v4
+28/152156/campos_512_v4
+28/152160/campos_512_v4
+28/152162/campos_512_v4
+28/152164/campos_512_v4
+28/152171/campos_512_v4
+28/152197/campos_512_v4
+28/152200/campos_512_v4
+28/152203/campos_512_v4
+28/152208/campos_512_v4
+28/152216/campos_512_v4
+28/152217/campos_512_v4
+28/152218/campos_512_v4
+28/152239/campos_512_v4
+28/152254/campos_512_v4
+28/152278/campos_512_v4
+28/152286/campos_512_v4
+28/152309/campos_512_v4
+28/152310/campos_512_v4
+28/152316/campos_512_v4
+28/152320/campos_512_v4
+28/152329/campos_512_v4
+28/152337/campos_512_v4
+28/152342/campos_512_v4
+28/152344/campos_512_v4
+28/152346/campos_512_v4
+28/152368/campos_512_v4
+28/152373/campos_512_v4
+28/152376/campos_512_v4
+28/152382/campos_512_v4
+28/152385/campos_512_v4
+28/152390/campos_512_v4
+28/152396/campos_512_v4
+28/152405/campos_512_v4
+28/152409/campos_512_v4
+28/152411/campos_512_v4
+28/152435/campos_512_v4
+28/152436/campos_512_v4
+28/152437/campos_512_v4
+28/152456/campos_512_v4
+28/152472/campos_512_v4
+28/152486/campos_512_v4
+28/152489/campos_512_v4
+28/152512/campos_512_v4
+28/152521/campos_512_v4
+28/152545/campos_512_v4
+28/152567/campos_512_v4
+28/152569/campos_512_v4
+28/152572/campos_512_v4
+28/152573/campos_512_v4
+28/152576/campos_512_v4
+28/152577/campos_512_v4
+28/152583/campos_512_v4
+28/152617/campos_512_v4
+28/152629/campos_512_v4
+28/152638/campos_512_v4
+28/152639/campos_512_v4
+28/152647/campos_512_v4
+28/152651/campos_512_v4
+28/152653/campos_512_v4
+28/152672/campos_512_v4
+28/152687/campos_512_v4
+28/152694/campos_512_v4
+28/152696/campos_512_v4
+28/152706/campos_512_v4
+28/152710/campos_512_v4
+28/152716/campos_512_v4
+28/152718/campos_512_v4
+28/152733/campos_512_v4
+28/152745/campos_512_v4
+28/152748/campos_512_v4
+28/152758/campos_512_v4
+28/152765/campos_512_v4
+28/152771/campos_512_v4
+28/152781/campos_512_v4
+28/152785/campos_512_v4
+28/152793/campos_512_v4
+28/152796/campos_512_v4
+28/152802/campos_512_v4
+28/152812/campos_512_v4
+28/152814/campos_512_v4
+28/152819/campos_512_v4
+28/152822/campos_512_v4
+28/152828/campos_512_v4
+28/152835/campos_512_v4
+28/152839/campos_512_v4
+28/152840/campos_512_v4
+28/152842/campos_512_v4
+28/152851/campos_512_v4
+28/152860/campos_512_v4
+28/152864/campos_512_v4
+28/152869/campos_512_v4
+28/152894/campos_512_v4
+28/152909/campos_512_v4
+28/152911/campos_512_v4
+28/152920/campos_512_v4
+28/152938/campos_512_v4
+28/152945/campos_512_v4
+28/152959/campos_512_v4
+28/152962/campos_512_v4
+28/152964/campos_512_v4
+28/152965/campos_512_v4
+28/152973/campos_512_v4
+28/152982/campos_512_v4
+28/152990/campos_512_v4
+28/152991/campos_512_v4
+28/152992/campos_512_v4
+28/153006/campos_512_v4
+28/153012/campos_512_v4
+28/153017/campos_512_v4
+28/153020/campos_512_v4
+28/153022/campos_512_v4
+28/153029/campos_512_v4
+28/153031/campos_512_v4
+28/153043/campos_512_v4
+28/153053/campos_512_v4
+28/153059/campos_512_v4
+28/153071/campos_512_v4
+28/153074/campos_512_v4
+28/153077/campos_512_v4
+28/153080/campos_512_v4
+28/153089/campos_512_v4
+28/153098/campos_512_v4
+28/153099/campos_512_v4
+28/153116/campos_512_v4
+28/153128/campos_512_v4
+28/153130/campos_512_v4
+28/153136/campos_512_v4
+28/153138/campos_512_v4
+28/153145/campos_512_v4
+28/153147/campos_512_v4
+28/153149/campos_512_v4
+28/153151/campos_512_v4
+28/153153/campos_512_v4
+28/153157/campos_512_v4
+28/153159/campos_512_v4
+28/153162/campos_512_v4
+28/153163/campos_512_v4
+28/153164/campos_512_v4
+28/153174/campos_512_v4
+28/153178/campos_512_v4
+28/153182/campos_512_v4
+28/153185/campos_512_v4
+28/153191/campos_512_v4
+28/153202/campos_512_v4
+28/153204/campos_512_v4
+28/153207/campos_512_v4
+28/153213/campos_512_v4
+28/153222/campos_512_v4
+28/153224/campos_512_v4
+28/153237/campos_512_v4
+28/153238/campos_512_v4
+28/153260/campos_512_v4
+28/153275/campos_512_v4
+28/153279/campos_512_v4
+28/153283/campos_512_v4
+28/153287/campos_512_v4
+28/153288/campos_512_v4
+28/153289/campos_512_v4
+28/153290/campos_512_v4
+28/153296/campos_512_v4
+28/153300/campos_512_v4
+28/153302/campos_512_v4
+28/153313/campos_512_v4
+28/153323/campos_512_v4
+28/153324/campos_512_v4
+28/153339/campos_512_v4
+28/153352/campos_512_v4
+28/153365/campos_512_v4
+28/153366/campos_512_v4
+28/153379/campos_512_v4
+28/153400/campos_512_v4
+28/153412/campos_512_v4
+28/153414/campos_512_v4
+28/153452/campos_512_v4
+28/153453/campos_512_v4
+28/153455/campos_512_v4
+28/153468/campos_512_v4
+28/153472/campos_512_v4
+28/153474/campos_512_v4
+28/153480/campos_512_v4
+28/153483/campos_512_v4
+28/153499/campos_512_v4
+28/153508/campos_512_v4
+28/153516/campos_512_v4
+28/153519/campos_512_v4
+28/153520/campos_512_v4
+28/153527/campos_512_v4
+28/153540/campos_512_v4
+28/153549/campos_512_v4
+28/153550/campos_512_v4
+28/153552/campos_512_v4
+28/153558/campos_512_v4
+28/153565/campos_512_v4
+28/153583/campos_512_v4
+28/153584/campos_512_v4
+28/153589/campos_512_v4
+28/153607/campos_512_v4
+28/153613/campos_512_v4
+28/153620/campos_512_v4
+28/153624/campos_512_v4
+28/153640/campos_512_v4
+28/153646/campos_512_v4
+28/153657/campos_512_v4
+28/153659/campos_512_v4
+28/153669/campos_512_v4
+28/153684/campos_512_v4
+28/153700/campos_512_v4
+28/153716/campos_512_v4
+28/153736/campos_512_v4
+28/153737/campos_512_v4
+28/153745/campos_512_v4
+28/153748/campos_512_v4
+28/153757/campos_512_v4
+28/153769/campos_512_v4
+28/153771/campos_512_v4
+28/153776/campos_512_v4
+28/153784/campos_512_v4
+28/153785/campos_512_v4
+28/153789/campos_512_v4
+28/153794/campos_512_v4
+28/153804/campos_512_v4
+28/153807/campos_512_v4
+28/153831/campos_512_v4
+28/153836/campos_512_v4
+28/153860/campos_512_v4
+28/153876/campos_512_v4
+28/153879/campos_512_v4
+28/153884/campos_512_v4
+28/153888/campos_512_v4
+28/153893/campos_512_v4
+28/153914/campos_512_v4
+28/153917/campos_512_v4
+28/153919/campos_512_v4
+28/153927/campos_512_v4
+28/153942/campos_512_v4
+28/153958/campos_512_v4
+28/153968/campos_512_v4
+28/153975/campos_512_v4
+28/153978/campos_512_v4
+28/153983/campos_512_v4
+28/153993/campos_512_v4
+28/154007/campos_512_v4
+28/154013/campos_512_v4
+28/154014/campos_512_v4
+28/154022/campos_512_v4
+28/154039/campos_512_v4
+28/154074/campos_512_v4
+28/154084/campos_512_v4
+28/154096/campos_512_v4
+28/154098/campos_512_v4
+28/154106/campos_512_v4
+28/154135/campos_512_v4
+28/154156/campos_512_v4
+28/154172/campos_512_v4
+28/154175/campos_512_v4
+28/154179/campos_512_v4
+28/154196/campos_512_v4
+28/154242/campos_512_v4
+28/154251/campos_512_v4
+28/154252/campos_512_v4
+28/154270/campos_512_v4
+28/154283/campos_512_v4
+28/154288/campos_512_v4
+28/154299/campos_512_v4
+28/154307/campos_512_v4
+28/154309/campos_512_v4
+28/154316/campos_512_v4
+28/154319/campos_512_v4
+28/154323/campos_512_v4
+28/154331/campos_512_v4
+28/154336/campos_512_v4
+28/154337/campos_512_v4
+28/154339/campos_512_v4
+28/154357/campos_512_v4
+28/154367/campos_512_v4
+28/154376/campos_512_v4
+28/154378/campos_512_v4
+28/154380/campos_512_v4
+28/154397/campos_512_v4
+28/154404/campos_512_v4
+28/154410/campos_512_v4
+28/154428/campos_512_v4
+28/154429/campos_512_v4
+28/154461/campos_512_v4
+28/154465/campos_512_v4
+28/154466/campos_512_v4
+28/154502/campos_512_v4
+28/154517/campos_512_v4
+28/154521/campos_512_v4
+28/154542/campos_512_v4
+28/154546/campos_512_v4
+28/154555/campos_512_v4
+28/154558/campos_512_v4
+28/154572/campos_512_v4
+28/154587/campos_512_v4
+28/154597/campos_512_v4
+28/154598/campos_512_v4
+28/154604/campos_512_v4
+28/154617/campos_512_v4
+28/154633/campos_512_v4
+28/154636/campos_512_v4
+28/154637/campos_512_v4
+28/154639/campos_512_v4
+28/154661/campos_512_v4
+28/154671/campos_512_v4
+28/154680/campos_512_v4
+28/154683/campos_512_v4
+28/154689/campos_512_v4
+28/154701/campos_512_v4
+28/154705/campos_512_v4
+28/154716/campos_512_v4
+28/154719/campos_512_v4
+28/154720/campos_512_v4
+28/154722/campos_512_v4
+28/154723/campos_512_v4
+28/154730/campos_512_v4
+28/154731/campos_512_v4
+28/154747/campos_512_v4
+28/154749/campos_512_v4
+28/154753/campos_512_v4
+28/154759/campos_512_v4
+28/154776/campos_512_v4
+28/154780/campos_512_v4
+28/154782/campos_512_v4
+28/154787/campos_512_v4
+28/154792/campos_512_v4
+28/154813/campos_512_v4
+28/154818/campos_512_v4
+28/154820/campos_512_v4
+28/154821/campos_512_v4
+28/154839/campos_512_v4
+28/154840/campos_512_v4
+28/154845/campos_512_v4
+28/154850/campos_512_v4
+28/154857/campos_512_v4
+28/154858/campos_512_v4
+28/154863/campos_512_v4
+28/154866/campos_512_v4
+28/154869/campos_512_v4
+28/154884/campos_512_v4
+28/154891/campos_512_v4
+28/154918/campos_512_v4
+28/154922/campos_512_v4
+28/154930/campos_512_v4
+28/154937/campos_512_v4
+28/154942/campos_512_v4
+28/154946/campos_512_v4
+28/154950/campos_512_v4
+28/154954/campos_512_v4
+28/154956/campos_512_v4
+28/154959/campos_512_v4
+28/154964/campos_512_v4
+28/154974/campos_512_v4
+28/154989/campos_512_v4
+29/155006/campos_512_v4
+29/155014/campos_512_v4
+29/155026/campos_512_v4
+29/155031/campos_512_v4
+29/155035/campos_512_v4
+29/155036/campos_512_v4
+29/155039/campos_512_v4
+29/155042/campos_512_v4
+29/155059/campos_512_v4
+29/155064/campos_512_v4
+29/155067/campos_512_v4
+29/155068/campos_512_v4
+29/155072/campos_512_v4
+29/155074/campos_512_v4
+29/155083/campos_512_v4
+29/155085/campos_512_v4
+29/155087/campos_512_v4
+29/155093/campos_512_v4
+29/155098/campos_512_v4
+29/155104/campos_512_v4
+29/155110/campos_512_v4
+29/155121/campos_512_v4
+29/155158/campos_512_v4
+29/155163/campos_512_v4
+29/155167/campos_512_v4
+29/155176/campos_512_v4
+29/155177/campos_512_v4
+29/155184/campos_512_v4
+29/155191/campos_512_v4
+29/155218/campos_512_v4
+29/155225/campos_512_v4
+29/155229/campos_512_v4
+29/155242/campos_512_v4
+29/155245/campos_512_v4
+29/155253/campos_512_v4
+29/155290/campos_512_v4
+29/155293/campos_512_v4
+29/155294/campos_512_v4
+29/155296/campos_512_v4
+29/155297/campos_512_v4
+29/155315/campos_512_v4
+29/155329/campos_512_v4
+29/155334/campos_512_v4
+29/155343/campos_512_v4
+29/155370/campos_512_v4
+29/155375/campos_512_v4
+29/155377/campos_512_v4
+29/155379/campos_512_v4
+29/155380/campos_512_v4
+29/155385/campos_512_v4
+29/155404/campos_512_v4
+29/155406/campos_512_v4
+29/155416/campos_512_v4
+29/155419/campos_512_v4
+29/155425/campos_512_v4
+29/155447/campos_512_v4
+29/155449/campos_512_v4
+29/155452/campos_512_v4
+29/155454/campos_512_v4
+29/155460/campos_512_v4
+29/155466/campos_512_v4
+29/155472/campos_512_v4
+29/155481/campos_512_v4
+29/155483/campos_512_v4
+29/155490/campos_512_v4
+29/155495/campos_512_v4
+29/155503/campos_512_v4
+29/155512/campos_512_v4
+29/155515/campos_512_v4
+29/155516/campos_512_v4
+29/155545/campos_512_v4
+29/155551/campos_512_v4
+29/155553/campos_512_v4
+29/155554/campos_512_v4
+29/155559/campos_512_v4
+29/155561/campos_512_v4
+29/155564/campos_512_v4
+29/155567/campos_512_v4
+29/155576/campos_512_v4
+29/155593/campos_512_v4
+29/155598/campos_512_v4
+29/155599/campos_512_v4
+29/155603/campos_512_v4
+29/155605/campos_512_v4
+29/155631/campos_512_v4
+29/155636/campos_512_v4
+29/155652/campos_512_v4
+29/155659/campos_512_v4
+29/155665/campos_512_v4
+29/155672/campos_512_v4
+29/155681/campos_512_v4
+29/155686/campos_512_v4
+29/155687/campos_512_v4
+29/155702/campos_512_v4
+29/155705/campos_512_v4
+29/155712/campos_512_v4
+29/155720/campos_512_v4
+29/155721/campos_512_v4
+29/155731/campos_512_v4
+29/155738/campos_512_v4
+29/155739/campos_512_v4
+29/155742/campos_512_v4
+29/155743/campos_512_v4
+29/155745/campos_512_v4
+29/155746/campos_512_v4
+29/155748/campos_512_v4
+29/155757/campos_512_v4
+29/155768/campos_512_v4
+29/155770/campos_512_v4
+29/155771/campos_512_v4
+29/155772/campos_512_v4
+29/155786/campos_512_v4
+29/155796/campos_512_v4
+29/155804/campos_512_v4
+29/155820/campos_512_v4
+29/155822/campos_512_v4
+29/155828/campos_512_v4
+29/155848/campos_512_v4
+29/155851/campos_512_v4
+29/155882/campos_512_v4
+29/155886/campos_512_v4
+29/155887/campos_512_v4
+29/155889/campos_512_v4
+29/155890/campos_512_v4
+29/155893/campos_512_v4
+29/155894/campos_512_v4
+29/155905/campos_512_v4
+29/155914/campos_512_v4
+29/155915/campos_512_v4
+29/155916/campos_512_v4
+29/155931/campos_512_v4
+29/155935/campos_512_v4
+29/155939/campos_512_v4
+29/155948/campos_512_v4
+29/155957/campos_512_v4
+29/155959/campos_512_v4
+29/155966/campos_512_v4
+29/155967/campos_512_v4
+29/155979/campos_512_v4
+29/155990/campos_512_v4
+29/156010/campos_512_v4
+29/156026/campos_512_v4
+29/156028/campos_512_v4
+29/156037/campos_512_v4
+29/156046/campos_512_v4
+29/156056/campos_512_v4
+29/156076/campos_512_v4
+29/156077/campos_512_v4
+29/156098/campos_512_v4
+29/156099/campos_512_v4
+29/156125/campos_512_v4
+29/156127/campos_512_v4
+29/156136/campos_512_v4
+29/156138/campos_512_v4
+29/156149/campos_512_v4
+29/156156/campos_512_v4
+29/156172/campos_512_v4
+29/156179/campos_512_v4
+29/156206/campos_512_v4
+29/156226/campos_512_v4
+29/156263/campos_512_v4
+29/156268/campos_512_v4
+29/156272/campos_512_v4
+29/156278/campos_512_v4
+29/156279/campos_512_v4
+29/156291/campos_512_v4
+29/156292/campos_512_v4
+29/156303/campos_512_v4
+29/156312/campos_512_v4
+29/156330/campos_512_v4
+29/156340/campos_512_v4
+29/156342/campos_512_v4
+29/156350/campos_512_v4
+29/156351/campos_512_v4
+29/156353/campos_512_v4
+29/156354/campos_512_v4
+29/156359/campos_512_v4
+29/156365/campos_512_v4
+29/156368/campos_512_v4
+29/156369/campos_512_v4
+29/156370/campos_512_v4
+29/156376/campos_512_v4
+29/156381/campos_512_v4
+29/156385/campos_512_v4
+29/156397/campos_512_v4
+29/156404/campos_512_v4
+29/156419/campos_512_v4
+29/156445/campos_512_v4
+29/156446/campos_512_v4
+29/156450/campos_512_v4
+29/156452/campos_512_v4
+29/156455/campos_512_v4
+29/156477/campos_512_v4
+29/156489/campos_512_v4
+29/156500/campos_512_v4
+29/156501/campos_512_v4
+29/156504/campos_512_v4
+29/156505/campos_512_v4
+29/156507/campos_512_v4
+29/156513/campos_512_v4
+29/156517/campos_512_v4
+29/156536/campos_512_v4
+29/156544/campos_512_v4
+29/156553/campos_512_v4
+29/156559/campos_512_v4
+29/156564/campos_512_v4
+29/156576/campos_512_v4
+29/156583/campos_512_v4
+29/156586/campos_512_v4
+29/156591/campos_512_v4
+29/156626/campos_512_v4
+29/156641/campos_512_v4
+29/156650/campos_512_v4
+29/156652/campos_512_v4
+29/156662/campos_512_v4
+29/156670/campos_512_v4
+29/156676/campos_512_v4
+29/156700/campos_512_v4
+29/156711/campos_512_v4
+29/156714/campos_512_v4
+29/156726/campos_512_v4
+29/156736/campos_512_v4
+29/156739/campos_512_v4
+29/156754/campos_512_v4
+29/156758/campos_512_v4
+29/156772/campos_512_v4
+29/156775/campos_512_v4
+29/156777/campos_512_v4
+29/156778/campos_512_v4
+29/156779/campos_512_v4
+29/156785/campos_512_v4
+29/156795/campos_512_v4
+29/156809/campos_512_v4
+29/156810/campos_512_v4
+29/156820/campos_512_v4
+29/156842/campos_512_v4
+29/156844/campos_512_v4
+29/156847/campos_512_v4
+29/156851/campos_512_v4
+29/156864/campos_512_v4
+29/156874/campos_512_v4
+29/156876/campos_512_v4
+29/156894/campos_512_v4
+29/156897/campos_512_v4
+29/156910/campos_512_v4
+29/156915/campos_512_v4
+29/156921/campos_512_v4
+29/156927/campos_512_v4
+29/156943/campos_512_v4
+29/156954/campos_512_v4
+29/156966/campos_512_v4
+29/156967/campos_512_v4
+29/156976/campos_512_v4
+29/156977/campos_512_v4
+29/156978/campos_512_v4
+29/156981/campos_512_v4
+29/156983/campos_512_v4
+29/157003/campos_512_v4
+29/157004/campos_512_v4
+29/157013/campos_512_v4
+29/157014/campos_512_v4
+29/157016/campos_512_v4
+29/157046/campos_512_v4
+29/157062/campos_512_v4
+29/157064/campos_512_v4
+29/157073/campos_512_v4
+29/157076/campos_512_v4
+29/157078/campos_512_v4
+29/157079/campos_512_v4
+29/157103/campos_512_v4
+29/157119/campos_512_v4
+29/157147/campos_512_v4
+29/157162/campos_512_v4
+29/157170/campos_512_v4
+29/157187/campos_512_v4
+29/157196/campos_512_v4
+29/157199/campos_512_v4
+29/157207/campos_512_v4
+29/157216/campos_512_v4
+29/157225/campos_512_v4
+29/157226/campos_512_v4
+29/157238/campos_512_v4
+29/157245/campos_512_v4
+29/157278/campos_512_v4
+29/157280/campos_512_v4
+29/157305/campos_512_v4
+29/157316/campos_512_v4
+29/157318/campos_512_v4
+29/157332/campos_512_v4
+29/157334/campos_512_v4
+29/157345/campos_512_v4
+29/157358/campos_512_v4
+29/157387/campos_512_v4
+29/157389/campos_512_v4
+29/157403/campos_512_v4
+29/157404/campos_512_v4
+29/157405/campos_512_v4
+29/157425/campos_512_v4
+29/157432/campos_512_v4
+29/157433/campos_512_v4
+29/157440/campos_512_v4
+29/157442/campos_512_v4
+29/157449/campos_512_v4
+29/157451/campos_512_v4
+29/157462/campos_512_v4
+29/157465/campos_512_v4
+29/157466/campos_512_v4
+29/157468/campos_512_v4
+29/157472/campos_512_v4
+29/157474/campos_512_v4
+29/157483/campos_512_v4
+29/157490/campos_512_v4
+29/157497/campos_512_v4
+29/157508/campos_512_v4
+29/157510/campos_512_v4
+29/157519/campos_512_v4
+29/157525/campos_512_v4
+29/157531/campos_512_v4
+29/157554/campos_512_v4
+29/157566/campos_512_v4
+29/157574/campos_512_v4
+29/157588/campos_512_v4
+29/157594/campos_512_v4
+29/157598/campos_512_v4
+29/157600/campos_512_v4
+29/157619/campos_512_v4
+29/157627/campos_512_v4
+29/157631/campos_512_v4
+29/157636/campos_512_v4
+29/157640/campos_512_v4
+29/157645/campos_512_v4
+29/157647/campos_512_v4
+29/157648/campos_512_v4
+29/157653/campos_512_v4
+29/157660/campos_512_v4
+29/157664/campos_512_v4
+29/157671/campos_512_v4
+29/157672/campos_512_v4
+29/157680/campos_512_v4
+29/157686/campos_512_v4
+29/157691/campos_512_v4
+29/157714/campos_512_v4
+29/157716/campos_512_v4
+29/157742/campos_512_v4
+29/157751/campos_512_v4
+29/157755/campos_512_v4
+29/157759/campos_512_v4
+29/157761/campos_512_v4
+29/157769/campos_512_v4
+29/157777/campos_512_v4
+29/157782/campos_512_v4
+29/157783/campos_512_v4
+29/157789/campos_512_v4
+29/157792/campos_512_v4
+29/157796/campos_512_v4
+29/157799/campos_512_v4
+29/157822/campos_512_v4
+29/157832/campos_512_v4
+29/157867/campos_512_v4
+29/157870/campos_512_v4
+29/157871/campos_512_v4
+29/157872/campos_512_v4
+29/157882/campos_512_v4
+29/157897/campos_512_v4
+29/157900/campos_512_v4
+29/157907/campos_512_v4
+29/157912/campos_512_v4
+29/157914/campos_512_v4
+29/157927/campos_512_v4
+29/157935/campos_512_v4
+29/157960/campos_512_v4
+29/157975/campos_512_v4
+29/157977/campos_512_v4
+29/157979/campos_512_v4
+29/157988/campos_512_v4
+29/157992/campos_512_v4
+29/157997/campos_512_v4
+29/158023/campos_512_v4
+29/158030/campos_512_v4
+29/158048/campos_512_v4
+29/158063/campos_512_v4
+29/158072/campos_512_v4
+29/158073/campos_512_v4
+29/158087/campos_512_v4
+29/158099/campos_512_v4
+29/158107/campos_512_v4
+29/158116/campos_512_v4
+29/158125/campos_512_v4
+29/158136/campos_512_v4
+29/158138/campos_512_v4
+29/158148/campos_512_v4
+29/158162/campos_512_v4
+29/158167/campos_512_v4
+29/158168/campos_512_v4
+29/158171/campos_512_v4
+29/158173/campos_512_v4
+29/158176/campos_512_v4
+29/158180/campos_512_v4
+29/158188/campos_512_v4
+29/158189/campos_512_v4
+29/158208/campos_512_v4
+29/158211/campos_512_v4
+29/158213/campos_512_v4
+29/158227/campos_512_v4
+29/158231/campos_512_v4
+29/158246/campos_512_v4
+29/158255/campos_512_v4
+29/158267/campos_512_v4
+29/158270/campos_512_v4
+29/158273/campos_512_v4
+29/158275/campos_512_v4
+29/158278/campos_512_v4
+29/158283/campos_512_v4
+29/158288/campos_512_v4
+29/158294/campos_512_v4
+29/158304/campos_512_v4
+29/158306/campos_512_v4
+29/158307/campos_512_v4
+29/158316/campos_512_v4
+29/158325/campos_512_v4
+29/158336/campos_512_v4
+29/158340/campos_512_v4
+29/158347/campos_512_v4
+29/158350/campos_512_v4
+29/158357/campos_512_v4
+29/158359/campos_512_v4
+29/158362/campos_512_v4
+29/158366/campos_512_v4
+29/158367/campos_512_v4
+29/158370/campos_512_v4
+29/158378/campos_512_v4
+29/158381/campos_512_v4
+29/158384/campos_512_v4
+29/158402/campos_512_v4
+29/158407/campos_512_v4
+29/158412/campos_512_v4
+29/158413/campos_512_v4
+29/158424/campos_512_v4
+29/158428/campos_512_v4
+29/158431/campos_512_v4
+29/158432/campos_512_v4
+29/158433/campos_512_v4
+29/158461/campos_512_v4
+29/158476/campos_512_v4
+29/158480/campos_512_v4
+29/158484/campos_512_v4
+29/158486/campos_512_v4
+29/158525/campos_512_v4
+29/158538/campos_512_v4
+29/158539/campos_512_v4
+29/158556/campos_512_v4
+29/158563/campos_512_v4
+29/158564/campos_512_v4
+29/158573/campos_512_v4
+29/158577/campos_512_v4
+29/158580/campos_512_v4
+29/158581/campos_512_v4
+29/158582/campos_512_v4
+29/158589/campos_512_v4
+29/158611/campos_512_v4
+29/158613/campos_512_v4
+29/158619/campos_512_v4
+29/158627/campos_512_v4
+29/158628/campos_512_v4
+29/158632/campos_512_v4
+29/158653/campos_512_v4
+29/158676/campos_512_v4
+29/158682/campos_512_v4
+29/158691/campos_512_v4
+29/158698/campos_512_v4
+29/158714/campos_512_v4
+29/158741/campos_512_v4
+29/158756/campos_512_v4
+29/158779/campos_512_v4
+29/158780/campos_512_v4
+29/158781/campos_512_v4
+29/158784/campos_512_v4
+29/158789/campos_512_v4
+29/158790/campos_512_v4
+29/158803/campos_512_v4
+29/158805/campos_512_v4
+29/158806/campos_512_v4
+29/158808/campos_512_v4
+29/158825/campos_512_v4
+29/158830/campos_512_v4
+29/158850/campos_512_v4
+29/158864/campos_512_v4
+29/158870/campos_512_v4
+29/158880/campos_512_v4
+29/158884/campos_512_v4
+29/158892/campos_512_v4
+29/158894/campos_512_v4
+29/158899/campos_512_v4
+29/158900/campos_512_v4
+29/158901/campos_512_v4
+29/158907/campos_512_v4
+29/158913/campos_512_v4
+29/158915/campos_512_v4
+29/158917/campos_512_v4
+29/158919/campos_512_v4
+29/158921/campos_512_v4
+29/158924/campos_512_v4
+29/158927/campos_512_v4
+29/158942/campos_512_v4
+29/158948/campos_512_v4
+29/158957/campos_512_v4
+29/158970/campos_512_v4
+29/158980/campos_512_v4
+29/158987/campos_512_v4
+29/158999/campos_512_v4
+29/159008/campos_512_v4
+29/159012/campos_512_v4
+29/159022/campos_512_v4
+29/159023/campos_512_v4
+29/159029/campos_512_v4
+29/159041/campos_512_v4
+29/159044/campos_512_v4
+29/159068/campos_512_v4
+29/159082/campos_512_v4
+29/159086/campos_512_v4
+29/159091/campos_512_v4
+29/159094/campos_512_v4
+29/159103/campos_512_v4
+29/159108/campos_512_v4
+29/159109/campos_512_v4
+29/159125/campos_512_v4
+29/159130/campos_512_v4
+29/159143/campos_512_v4
+29/159156/campos_512_v4
+29/159194/campos_512_v4
+29/159205/campos_512_v4
+29/159215/campos_512_v4
+29/159220/campos_512_v4
+29/159239/campos_512_v4
+29/159245/campos_512_v4
+29/159269/campos_512_v4
+29/159272/campos_512_v4
+29/159288/campos_512_v4
+29/159302/campos_512_v4
+29/159305/campos_512_v4
+29/159310/campos_512_v4
+29/159321/campos_512_v4
+29/159329/campos_512_v4
+29/159343/campos_512_v4
+29/159362/campos_512_v4
+29/159365/campos_512_v4
+29/159379/campos_512_v4
+29/159395/campos_512_v4
+29/159396/campos_512_v4
+29/159402/campos_512_v4
+29/159417/campos_512_v4
+29/159440/campos_512_v4
+29/159451/campos_512_v4
+29/159458/campos_512_v4
+29/159463/campos_512_v4
+29/159474/campos_512_v4
+29/159485/campos_512_v4
+29/159486/campos_512_v4
+29/159489/campos_512_v4
+29/159490/campos_512_v4
+29/159491/campos_512_v4
+29/159495/campos_512_v4
+29/159496/campos_512_v4
+29/159504/campos_512_v4
+29/159508/campos_512_v4
+29/159510/campos_512_v4
+29/159514/campos_512_v4
+29/159518/campos_512_v4
+29/159524/campos_512_v4
+29/159535/campos_512_v4
+29/159567/campos_512_v4
+29/159570/campos_512_v4
+29/159572/campos_512_v4
+29/159573/campos_512_v4
+29/159576/campos_512_v4
+29/159605/campos_512_v4
+29/159621/campos_512_v4
+29/159623/campos_512_v4
+29/159641/campos_512_v4
+29/159644/campos_512_v4
+29/159665/campos_512_v4
+29/159673/campos_512_v4
+29/159675/campos_512_v4
+29/159677/campos_512_v4
+29/159705/campos_512_v4
+29/159711/campos_512_v4
+29/159716/campos_512_v4
+29/159724/campos_512_v4
+29/159738/campos_512_v4
+29/159739/campos_512_v4
+29/159744/campos_512_v4
+29/159750/campos_512_v4
+29/159755/campos_512_v4
+29/159759/campos_512_v4
+29/159777/campos_512_v4
+29/159778/campos_512_v4
+29/159780/campos_512_v4
+29/159787/campos_512_v4
+29/159797/campos_512_v4
+29/159804/campos_512_v4
+29/159809/campos_512_v4
+29/159810/campos_512_v4
+29/159817/campos_512_v4
+29/159818/campos_512_v4
+29/159842/campos_512_v4
+29/159848/campos_512_v4
+29/159850/campos_512_v4
+29/159881/campos_512_v4
+29/159886/campos_512_v4
+29/159903/campos_512_v4
+29/159936/campos_512_v4
+29/159937/campos_512_v4
+29/159938/campos_512_v4
+29/159945/campos_512_v4
+29/159973/campos_512_v4
+29/159976/campos_512_v4
+29/159985/campos_512_v4
+29/159986/campos_512_v4
+29/159990/campos_512_v4
+29/159995/campos_512_v4
+29/160001/campos_512_v4
+30/160006/campos_512_v4
+30/160016/campos_512_v4
+30/160019/campos_512_v4
+30/160027/campos_512_v4
+30/160036/campos_512_v4
+30/160040/campos_512_v4
+30/160050/campos_512_v4
+30/160051/campos_512_v4
+30/160054/campos_512_v4
+30/160068/campos_512_v4
+30/160082/campos_512_v4
+30/160111/campos_512_v4
+30/160115/campos_512_v4
+30/160127/campos_512_v4
+30/160143/campos_512_v4
+30/160155/campos_512_v4
+30/160157/campos_512_v4
+30/160176/campos_512_v4
+30/160180/campos_512_v4
+30/160181/campos_512_v4
+30/160198/campos_512_v4
+30/160224/campos_512_v4
+30/160225/campos_512_v4
+30/160227/campos_512_v4
+30/160231/campos_512_v4
+30/160233/campos_512_v4
+30/160238/campos_512_v4
+30/160262/campos_512_v4
+30/160263/campos_512_v4
+30/160269/campos_512_v4
+30/160271/campos_512_v4
+30/160272/campos_512_v4
+30/160279/campos_512_v4
+30/160285/campos_512_v4
+30/160288/campos_512_v4
+30/160298/campos_512_v4
+30/160299/campos_512_v4
+30/160308/campos_512_v4
+30/160311/campos_512_v4
+30/160319/campos_512_v4
+30/160336/campos_512_v4
+30/160338/campos_512_v4
+30/160344/campos_512_v4
+30/160349/campos_512_v4
+30/160372/campos_512_v4
+30/160391/campos_512_v4
+30/160399/campos_512_v4
+30/160401/campos_512_v4
+30/160409/campos_512_v4
+30/160415/campos_512_v4
+30/160430/campos_512_v4
+30/160454/campos_512_v4
+30/160455/campos_512_v4
+30/160464/campos_512_v4
+30/160466/campos_512_v4
+30/160468/campos_512_v4
+30/160478/campos_512_v4
+30/160489/campos_512_v4
+30/160490/campos_512_v4
+30/160509/campos_512_v4
+30/160518/campos_512_v4
+30/160526/campos_512_v4
+30/160538/campos_512_v4
+30/160542/campos_512_v4
+30/160544/campos_512_v4
+30/160550/campos_512_v4
+30/160556/campos_512_v4
+30/160557/campos_512_v4
+30/160561/campos_512_v4
+30/160573/campos_512_v4
+30/160580/campos_512_v4
+30/160581/campos_512_v4
+30/160584/campos_512_v4
+30/160595/campos_512_v4
+30/160597/campos_512_v4
+30/160604/campos_512_v4
+30/160615/campos_512_v4
+30/160619/campos_512_v4
+30/160622/campos_512_v4
+30/160623/campos_512_v4
+30/160630/campos_512_v4
+30/160640/campos_512_v4
+30/160644/campos_512_v4
+30/160650/campos_512_v4
+30/160655/campos_512_v4
+30/160666/campos_512_v4
+30/160670/campos_512_v4
+30/160680/campos_512_v4
+30/160681/campos_512_v4
+30/160684/campos_512_v4
+30/160690/campos_512_v4
+30/160701/campos_512_v4
+30/160704/campos_512_v4
+30/160709/campos_512_v4
+30/160712/campos_512_v4
+30/160713/campos_512_v4
+30/160717/campos_512_v4
+30/160721/campos_512_v4
+30/160722/campos_512_v4
+30/160735/campos_512_v4
+30/160746/campos_512_v4
+30/160747/campos_512_v4
+30/160789/campos_512_v4
+30/160794/campos_512_v4
+30/160816/campos_512_v4
+30/160828/campos_512_v4
+30/160863/campos_512_v4
+30/160864/campos_512_v4
+30/160867/campos_512_v4
+30/160871/campos_512_v4
+30/160872/campos_512_v4
+30/160874/campos_512_v4
+30/160877/campos_512_v4
+30/160906/campos_512_v4
+30/160908/campos_512_v4
+30/160909/campos_512_v4
+30/160917/campos_512_v4
+30/160922/campos_512_v4
+30/160926/campos_512_v4
+30/160934/campos_512_v4
+30/160941/campos_512_v4
+30/160949/campos_512_v4
+30/160950/campos_512_v4
+30/160951/campos_512_v4
+30/160975/campos_512_v4
+30/160983/campos_512_v4
+30/160998/campos_512_v4
+30/161011/campos_512_v4
+30/161015/campos_512_v4
+30/161030/campos_512_v4
+30/161031/campos_512_v4
+30/161038/campos_512_v4
+30/161039/campos_512_v4
+30/161040/campos_512_v4
+30/161047/campos_512_v4
+30/161060/campos_512_v4
+30/161066/campos_512_v4
+30/161067/campos_512_v4
+30/161071/campos_512_v4
+30/161078/campos_512_v4
+30/161105/campos_512_v4
+30/161120/campos_512_v4
+30/161126/campos_512_v4
+30/161131/campos_512_v4
+30/161135/campos_512_v4
+30/161144/campos_512_v4
+30/161148/campos_512_v4
+30/161151/campos_512_v4
+30/161156/campos_512_v4
+30/161159/campos_512_v4
+30/161165/campos_512_v4
+30/161167/campos_512_v4
+30/161168/campos_512_v4
+30/161173/campos_512_v4
+30/161175/campos_512_v4
+30/161197/campos_512_v4
+30/161201/campos_512_v4
+30/161204/campos_512_v4
+30/161216/campos_512_v4
+30/161224/campos_512_v4
+30/161232/campos_512_v4
+30/161242/campos_512_v4
+30/161267/campos_512_v4
+30/161268/campos_512_v4
+30/161277/campos_512_v4
+30/161298/campos_512_v4
+30/161308/campos_512_v4
+30/161313/campos_512_v4
+30/161332/campos_512_v4
+30/161333/campos_512_v4
+30/161359/campos_512_v4
+30/161366/campos_512_v4
+30/161372/campos_512_v4
+30/161374/campos_512_v4
+30/161376/campos_512_v4
+30/161381/campos_512_v4
+30/161385/campos_512_v4
+30/161388/campos_512_v4
+30/161391/campos_512_v4
+30/161399/campos_512_v4
+30/161401/campos_512_v4
+30/161404/campos_512_v4
+30/161407/campos_512_v4
+30/161411/campos_512_v4
+30/161419/campos_512_v4
+30/161420/campos_512_v4
+30/161433/campos_512_v4
+30/161439/campos_512_v4
+30/161441/campos_512_v4
+30/161446/campos_512_v4
+30/161455/campos_512_v4
+30/161458/campos_512_v4
+30/161459/campos_512_v4
+30/161460/campos_512_v4
+30/161462/campos_512_v4
+30/161478/campos_512_v4
+30/161479/campos_512_v4
+30/161491/campos_512_v4
+30/161497/campos_512_v4
+30/161502/campos_512_v4
+30/161517/campos_512_v4
+30/161540/campos_512_v4
+30/161542/campos_512_v4
+30/161544/campos_512_v4
+30/161548/campos_512_v4
+30/161553/campos_512_v4
+30/161559/campos_512_v4
+30/161579/campos_512_v4
+30/161587/campos_512_v4
+30/161593/campos_512_v4
+30/161596/campos_512_v4
+30/161599/campos_512_v4
+30/161604/campos_512_v4
+30/161618/campos_512_v4
+30/161619/campos_512_v4
+30/161621/campos_512_v4
+30/161626/campos_512_v4
+30/161627/campos_512_v4
+30/161634/campos_512_v4
+30/161648/campos_512_v4
+30/161650/campos_512_v4
+30/161651/campos_512_v4
+30/161671/campos_512_v4
+30/161694/campos_512_v4
+30/161696/campos_512_v4
+30/161702/campos_512_v4
+30/161714/campos_512_v4
+30/161718/campos_512_v4
+30/161721/campos_512_v4
+30/161728/campos_512_v4
+30/161731/campos_512_v4
+30/161744/campos_512_v4
+30/161745/campos_512_v4
+30/161746/campos_512_v4
+30/161761/campos_512_v4
+30/161764/campos_512_v4
+30/161765/campos_512_v4
+30/161767/campos_512_v4
+30/161774/campos_512_v4
+30/161776/campos_512_v4
+30/161790/campos_512_v4
+30/161815/campos_512_v4
+30/161833/campos_512_v4
+30/161834/campos_512_v4
+30/161846/campos_512_v4
+30/161850/campos_512_v4
+30/161854/campos_512_v4
+30/161864/campos_512_v4
+30/161879/campos_512_v4
+30/161901/campos_512_v4
+30/161908/campos_512_v4
+30/161911/campos_512_v4
+30/161920/campos_512_v4
+30/161928/campos_512_v4
+30/161936/campos_512_v4
+30/161960/campos_512_v4
+30/161965/campos_512_v4
+30/161986/campos_512_v4
+30/161990/campos_512_v4
+30/161995/campos_512_v4
+30/162002/campos_512_v4
+30/162012/campos_512_v4
+30/162016/campos_512_v4
+30/162019/campos_512_v4
+30/162032/campos_512_v4
+30/162040/campos_512_v4
+30/162041/campos_512_v4
+30/162043/campos_512_v4
+30/162048/campos_512_v4
+30/162050/campos_512_v4
+30/162054/campos_512_v4
+30/162055/campos_512_v4
+30/162061/campos_512_v4
+30/162063/campos_512_v4
+30/162077/campos_512_v4
+30/162078/campos_512_v4
+30/162079/campos_512_v4
+30/162090/campos_512_v4
+30/162092/campos_512_v4
+30/162098/campos_512_v4
+30/162100/campos_512_v4
+30/162107/campos_512_v4
+30/162110/campos_512_v4
+30/162115/campos_512_v4
+30/162119/campos_512_v4
+30/162125/campos_512_v4
+30/162127/campos_512_v4
+30/162129/campos_512_v4
+30/162140/campos_512_v4
+30/162143/campos_512_v4
+30/162144/campos_512_v4
+30/162146/campos_512_v4
+30/162151/campos_512_v4
+30/162164/campos_512_v4
+30/162172/campos_512_v4
+30/162180/campos_512_v4
+30/162188/campos_512_v4
+30/162189/campos_512_v4
+30/162191/campos_512_v4
+30/162196/campos_512_v4
+30/162197/campos_512_v4
+30/162198/campos_512_v4
+30/162215/campos_512_v4
+30/162216/campos_512_v4
+30/162223/campos_512_v4
+30/162235/campos_512_v4
+30/162265/campos_512_v4
+30/162272/campos_512_v4
+30/162279/campos_512_v4
+30/162283/campos_512_v4
+30/162284/campos_512_v4
+30/162293/campos_512_v4
+30/162297/campos_512_v4
+30/162304/campos_512_v4
+30/162306/campos_512_v4
+30/162315/campos_512_v4
+30/162318/campos_512_v4
+30/162320/campos_512_v4
+30/162322/campos_512_v4
+30/162327/campos_512_v4
+30/162346/campos_512_v4
+30/162353/campos_512_v4
+30/162354/campos_512_v4
+30/162361/campos_512_v4
+30/162386/campos_512_v4
+30/162387/campos_512_v4
+30/162419/campos_512_v4
+30/162422/campos_512_v4
+30/162424/campos_512_v4
+30/162425/campos_512_v4
+30/162430/campos_512_v4
+30/162434/campos_512_v4
+30/162436/campos_512_v4
+30/162439/campos_512_v4
+30/162441/campos_512_v4
+30/162445/campos_512_v4
+30/162449/campos_512_v4
+30/162450/campos_512_v4
+30/162454/campos_512_v4
+30/162457/campos_512_v4
+30/162467/campos_512_v4
+30/162469/campos_512_v4
+30/162477/campos_512_v4
+30/162480/campos_512_v4
+30/162506/campos_512_v4
+30/162514/campos_512_v4
+30/162529/campos_512_v4
+30/162533/campos_512_v4
+30/162534/campos_512_v4
+30/162545/campos_512_v4
+30/162554/campos_512_v4
+30/162560/campos_512_v4
+30/162564/campos_512_v4
+30/162569/campos_512_v4
+30/162571/campos_512_v4
+30/162603/campos_512_v4
+30/162606/campos_512_v4
+30/162618/campos_512_v4
+30/162619/campos_512_v4
+30/162628/campos_512_v4
+30/162636/campos_512_v4
+30/162642/campos_512_v4
+30/162652/campos_512_v4
+30/162653/campos_512_v4
+30/162656/campos_512_v4
+30/162671/campos_512_v4
+30/162673/campos_512_v4
+30/162676/campos_512_v4
+30/162678/campos_512_v4
+30/162686/campos_512_v4
+30/162696/campos_512_v4
+30/162713/campos_512_v4
+30/162728/campos_512_v4
+30/162732/campos_512_v4
+30/162756/campos_512_v4
+30/162760/campos_512_v4
+30/162767/campos_512_v4
+30/162775/campos_512_v4
+30/162776/campos_512_v4
+30/162785/campos_512_v4
+30/162796/campos_512_v4
+30/162805/campos_512_v4
+30/162809/campos_512_v4
+30/162813/campos_512_v4
+30/162823/campos_512_v4
+30/162835/campos_512_v4
+30/162838/campos_512_v4
+30/162841/campos_512_v4
+30/162845/campos_512_v4
+30/162853/campos_512_v4
+30/162855/campos_512_v4
+30/162862/campos_512_v4
+30/162871/campos_512_v4
+30/162884/campos_512_v4
+30/162907/campos_512_v4
+30/162911/campos_512_v4
+30/162922/campos_512_v4
+30/162923/campos_512_v4
+30/162924/campos_512_v4
+30/162926/campos_512_v4
+30/162929/campos_512_v4
+30/162936/campos_512_v4
+30/162944/campos_512_v4
+30/162954/campos_512_v4
+30/162956/campos_512_v4
+30/162959/campos_512_v4
+30/162974/campos_512_v4
+30/162980/campos_512_v4
+30/162991/campos_512_v4
+30/162999/campos_512_v4
+30/163003/campos_512_v4
+30/163009/campos_512_v4
+30/163015/campos_512_v4
+30/163033/campos_512_v4
+30/163034/campos_512_v4
+30/163044/campos_512_v4
+30/163054/campos_512_v4
+30/163067/campos_512_v4
+30/163068/campos_512_v4
+30/163069/campos_512_v4
+30/163083/campos_512_v4
+30/163086/campos_512_v4
+30/163088/campos_512_v4
+30/163096/campos_512_v4
+30/163099/campos_512_v4
+30/163103/campos_512_v4
+30/163105/campos_512_v4
+30/163112/campos_512_v4
+30/163121/campos_512_v4
+30/163130/campos_512_v4
+30/163133/campos_512_v4
+30/163134/campos_512_v4
+30/163156/campos_512_v4
+30/163160/campos_512_v4
+30/163167/campos_512_v4
+30/163168/campos_512_v4
+30/163178/campos_512_v4
+30/163180/campos_512_v4
+30/163184/campos_512_v4
+30/163194/campos_512_v4
+30/163195/campos_512_v4
+30/163202/campos_512_v4
+30/163205/campos_512_v4
+30/163221/campos_512_v4
+30/163226/campos_512_v4
+30/163232/campos_512_v4
+30/163234/campos_512_v4
+30/163243/campos_512_v4
+30/163259/campos_512_v4
+30/163267/campos_512_v4
+30/163269/campos_512_v4
+30/163272/campos_512_v4
+30/163289/campos_512_v4
+30/163295/campos_512_v4
+30/163297/campos_512_v4
+30/163326/campos_512_v4
+30/163368/campos_512_v4
+30/163372/campos_512_v4
+30/163375/campos_512_v4
+30/163383/campos_512_v4
+30/163384/campos_512_v4
+30/163385/campos_512_v4
+30/163387/campos_512_v4
+30/163393/campos_512_v4
+30/163394/campos_512_v4
+30/163398/campos_512_v4
+30/163403/campos_512_v4
+30/163406/campos_512_v4
+30/163408/campos_512_v4
+30/163409/campos_512_v4
+30/163411/campos_512_v4
+30/163423/campos_512_v4
+30/163438/campos_512_v4
+30/163468/campos_512_v4
+30/163471/campos_512_v4
+30/163501/campos_512_v4
+30/163511/campos_512_v4
+30/163518/campos_512_v4
+30/163521/campos_512_v4
+30/163525/campos_512_v4
+30/163534/campos_512_v4
+30/163547/campos_512_v4
+30/163548/campos_512_v4
+30/163576/campos_512_v4
+30/163591/campos_512_v4
+30/163602/campos_512_v4
+30/163603/campos_512_v4
+30/163604/campos_512_v4
+30/163609/campos_512_v4
+30/163625/campos_512_v4
+30/163640/campos_512_v4
+30/163646/campos_512_v4
+30/163658/campos_512_v4
+30/163662/campos_512_v4
+30/163665/campos_512_v4
+30/163666/campos_512_v4
+30/163671/campos_512_v4
+30/163675/campos_512_v4
+30/163690/campos_512_v4
+30/163691/campos_512_v4
+30/163692/campos_512_v4
+30/163697/campos_512_v4
+30/163701/campos_512_v4
+30/163718/campos_512_v4
+30/163721/campos_512_v4
+30/163728/campos_512_v4
+30/163732/campos_512_v4
+30/163744/campos_512_v4
+30/163746/campos_512_v4
+30/163749/campos_512_v4
+30/163753/campos_512_v4
+30/163760/campos_512_v4
+30/163773/campos_512_v4
+30/163775/campos_512_v4
+30/163778/campos_512_v4
+30/163781/campos_512_v4
+30/163799/campos_512_v4
+30/163806/campos_512_v4
+30/163808/campos_512_v4
+30/163809/campos_512_v4
+30/163826/campos_512_v4
+30/163841/campos_512_v4
+30/163850/campos_512_v4
+30/163868/campos_512_v4
+30/163874/campos_512_v4
+30/163875/campos_512_v4
+30/163899/campos_512_v4
+30/163917/campos_512_v4
+30/163921/campos_512_v4
+30/163924/campos_512_v4
+30/163935/campos_512_v4
+30/163942/campos_512_v4
+30/163945/campos_512_v4
+30/163948/campos_512_v4
+30/163949/campos_512_v4
+30/163950/campos_512_v4
+30/163954/campos_512_v4
+30/163972/campos_512_v4
+30/163973/campos_512_v4
+30/163975/campos_512_v4
+30/163976/campos_512_v4
+30/163978/campos_512_v4
+30/163985/campos_512_v4
+30/163990/campos_512_v4
+30/163992/campos_512_v4
+30/163998/campos_512_v4
+30/164008/campos_512_v4
+30/164020/campos_512_v4
+30/164025/campos_512_v4
+30/164030/campos_512_v4
+30/164041/campos_512_v4
+30/164043/campos_512_v4
+30/164049/campos_512_v4
+30/164050/campos_512_v4
+30/164054/campos_512_v4
+30/164055/campos_512_v4
+30/164057/campos_512_v4
+30/164067/campos_512_v4
+30/164071/campos_512_v4
+30/164089/campos_512_v4
+30/164093/campos_512_v4
+30/164109/campos_512_v4
+30/164110/campos_512_v4
+30/164114/campos_512_v4
+30/164125/campos_512_v4
+30/164149/campos_512_v4
+30/164154/campos_512_v4
+30/164157/campos_512_v4
+30/164163/campos_512_v4
+30/164169/campos_512_v4
+30/164174/campos_512_v4
+30/164178/campos_512_v4
+30/164182/campos_512_v4
+30/164188/campos_512_v4
+30/164190/campos_512_v4
+30/164197/campos_512_v4
+30/164207/campos_512_v4
+30/164209/campos_512_v4
+30/164211/campos_512_v4
+30/164219/campos_512_v4
+30/164221/campos_512_v4
+30/164235/campos_512_v4
+30/164238/campos_512_v4
+30/164239/campos_512_v4
+30/164241/campos_512_v4
+30/164242/campos_512_v4
+30/164245/campos_512_v4
+30/164259/campos_512_v4
+30/164270/campos_512_v4
+30/164291/campos_512_v4
+30/164307/campos_512_v4
+30/164309/campos_512_v4
+30/164312/campos_512_v4
+30/164313/campos_512_v4
+30/164317/campos_512_v4
+30/164319/campos_512_v4
+30/164342/campos_512_v4
+30/164349/campos_512_v4
+30/164352/campos_512_v4
+30/164372/campos_512_v4
+30/164373/campos_512_v4
+30/164379/campos_512_v4
+30/164383/campos_512_v4
+30/164385/campos_512_v4
+30/164386/campos_512_v4
+30/164392/campos_512_v4
+30/164398/campos_512_v4
+30/164402/campos_512_v4
+30/164408/campos_512_v4
+30/164412/campos_512_v4
+30/164414/campos_512_v4
+30/164418/campos_512_v4
+30/164423/campos_512_v4
+30/164432/campos_512_v4
+30/164437/campos_512_v4
+30/164439/campos_512_v4
+30/164449/campos_512_v4
+30/164450/campos_512_v4
+30/164462/campos_512_v4
+30/164465/campos_512_v4
+30/164478/campos_512_v4
+30/164480/campos_512_v4
+30/164488/campos_512_v4
+30/164494/campos_512_v4
+30/164504/campos_512_v4
+30/164510/campos_512_v4
+30/164514/campos_512_v4
+30/164526/campos_512_v4
+30/164539/campos_512_v4
+30/164543/campos_512_v4
+30/164548/campos_512_v4
+30/164566/campos_512_v4
+30/164581/campos_512_v4
+30/164584/campos_512_v4
+30/164588/campos_512_v4
+30/164592/campos_512_v4
+30/164602/campos_512_v4
+30/164612/campos_512_v4
+30/164620/campos_512_v4
+30/164621/campos_512_v4
+30/164645/campos_512_v4
+30/164650/campos_512_v4
+30/164651/campos_512_v4
+30/164673/campos_512_v4
+30/164681/campos_512_v4
+30/164699/campos_512_v4
+30/164710/campos_512_v4
+30/164711/campos_512_v4
+30/164719/campos_512_v4
+30/164723/campos_512_v4
+30/164743/campos_512_v4
+30/164748/campos_512_v4
+30/164760/campos_512_v4
+30/164765/campos_512_v4
+30/164780/campos_512_v4
+30/164791/campos_512_v4
+30/164793/campos_512_v4
+30/164807/campos_512_v4
+30/164808/campos_512_v4
+30/164814/campos_512_v4
+30/164818/campos_512_v4
+30/164820/campos_512_v4
+30/164826/campos_512_v4
+30/164849/campos_512_v4
+30/164850/campos_512_v4
+30/164855/campos_512_v4
+30/164858/campos_512_v4
+30/164866/campos_512_v4
+30/164873/campos_512_v4
+30/164884/campos_512_v4
+30/164887/campos_512_v4
+30/164895/campos_512_v4
+30/164899/campos_512_v4
+30/164917/campos_512_v4
+30/164918/campos_512_v4
+30/164927/campos_512_v4
+30/164931/campos_512_v4
+30/164935/campos_512_v4
+30/164939/campos_512_v4
+30/164942/campos_512_v4
+30/164949/campos_512_v4
+30/164959/campos_512_v4
+30/164966/campos_512_v4
+30/164975/campos_512_v4
+30/164976/campos_512_v4
+30/164985/campos_512_v4
+30/164994/campos_512_v4
+30/164995/campos_512_v4
+30/164998/campos_512_v4
+31/165004/campos_512_v4
+31/165018/campos_512_v4
+31/165044/campos_512_v4
+31/165055/campos_512_v4
+31/165058/campos_512_v4
+31/165066/campos_512_v4
+31/165082/campos_512_v4
+31/165085/campos_512_v4
+31/165095/campos_512_v4
+31/165113/campos_512_v4
+31/165128/campos_512_v4
+31/165130/campos_512_v4
+31/165132/campos_512_v4
+31/165147/campos_512_v4
+31/165152/campos_512_v4
+31/165160/campos_512_v4
+31/165185/campos_512_v4
+31/165202/campos_512_v4
+31/165211/campos_512_v4
+31/165237/campos_512_v4
+31/165239/campos_512_v4
+31/165243/campos_512_v4
+31/165249/campos_512_v4
+31/165250/campos_512_v4
+31/165253/campos_512_v4
+31/165255/campos_512_v4
+31/165261/campos_512_v4
+31/165269/campos_512_v4
+31/165271/campos_512_v4
+31/165278/campos_512_v4
+31/165290/campos_512_v4
+31/165297/campos_512_v4
+31/165302/campos_512_v4
+31/165306/campos_512_v4
+31/165311/campos_512_v4
+31/165314/campos_512_v4
+31/165315/campos_512_v4
+31/165317/campos_512_v4
+31/165318/campos_512_v4
+31/165320/campos_512_v4
+31/165325/campos_512_v4
+31/165377/campos_512_v4
+31/165378/campos_512_v4
+31/165382/campos_512_v4
+31/165409/campos_512_v4
+31/165416/campos_512_v4
+31/165417/campos_512_v4
+31/165422/campos_512_v4
+31/165423/campos_512_v4
+31/165430/campos_512_v4
+31/165447/campos_512_v4
+31/165452/campos_512_v4
+31/165473/campos_512_v4
+31/165477/campos_512_v4
+31/165486/campos_512_v4
+31/165490/campos_512_v4
+31/165495/campos_512_v4
+31/165504/campos_512_v4
+31/165507/campos_512_v4
+31/165518/campos_512_v4
+31/165540/campos_512_v4
+31/165551/campos_512_v4
+31/165568/campos_512_v4
+31/165570/campos_512_v4
+31/165572/campos_512_v4
+31/165576/campos_512_v4
+31/165585/campos_512_v4
+31/165588/campos_512_v4
+31/165601/campos_512_v4
+31/165608/campos_512_v4
+31/165620/campos_512_v4
+31/165627/campos_512_v4
+31/165633/campos_512_v4
+31/165636/campos_512_v4
+31/165642/campos_512_v4
+31/165645/campos_512_v4
+31/165646/campos_512_v4
+31/165648/campos_512_v4
+31/165650/campos_512_v4
+31/165662/campos_512_v4
+31/165672/campos_512_v4
+31/165691/campos_512_v4
+31/165692/campos_512_v4
+31/165693/campos_512_v4
+31/165697/campos_512_v4
+31/165701/campos_512_v4
+31/165703/campos_512_v4
+31/165718/campos_512_v4
+31/165723/campos_512_v4
+31/165732/campos_512_v4
+31/165735/campos_512_v4
+31/165737/campos_512_v4
+31/165756/campos_512_v4
+31/165765/campos_512_v4
+31/165767/campos_512_v4
+31/165768/campos_512_v4
+31/165773/campos_512_v4
+31/165806/campos_512_v4
+31/165812/campos_512_v4
+31/165814/campos_512_v4
+31/165818/campos_512_v4
+31/165831/campos_512_v4
+31/165834/campos_512_v4
+31/165836/campos_512_v4
+31/165837/campos_512_v4
+31/165845/campos_512_v4
+31/165848/campos_512_v4
+31/165858/campos_512_v4
+31/165867/campos_512_v4
+31/165873/campos_512_v4
+31/165907/campos_512_v4
+31/165919/campos_512_v4
+31/165926/campos_512_v4
+31/165932/campos_512_v4
+31/165933/campos_512_v4
+31/165934/campos_512_v4
+31/165955/campos_512_v4
+31/165958/campos_512_v4
+31/165960/campos_512_v4
+31/165963/campos_512_v4
+31/165973/campos_512_v4
+31/165978/campos_512_v4
+31/165989/campos_512_v4
+31/165992/campos_512_v4
+31/165993/campos_512_v4
+31/165994/campos_512_v4
+31/165998/campos_512_v4
+31/166000/campos_512_v4
+31/166004/campos_512_v4
+31/166009/campos_512_v4
+31/166010/campos_512_v4
+31/166016/campos_512_v4
+31/166019/campos_512_v4
+31/166020/campos_512_v4
+31/166025/campos_512_v4
+31/166026/campos_512_v4
+31/166028/campos_512_v4
+31/166034/campos_512_v4
+31/166050/campos_512_v4
+31/166075/campos_512_v4
+31/166077/campos_512_v4
+31/166084/campos_512_v4
+31/166088/campos_512_v4
+31/166091/campos_512_v4
+31/166093/campos_512_v4
+31/166094/campos_512_v4
+31/166098/campos_512_v4
+31/166102/campos_512_v4
+31/166104/campos_512_v4
+31/166105/campos_512_v4
+31/166115/campos_512_v4
+31/166116/campos_512_v4
+31/166129/campos_512_v4
+31/166136/campos_512_v4
+31/166138/campos_512_v4
+31/166164/campos_512_v4
+31/166167/campos_512_v4
+31/166204/campos_512_v4
+31/166212/campos_512_v4
+31/166215/campos_512_v4
+31/166216/campos_512_v4
+31/166226/campos_512_v4
+31/166236/campos_512_v4
+31/166242/campos_512_v4
+31/166248/campos_512_v4
+31/166256/campos_512_v4
+31/166268/campos_512_v4
+31/166276/campos_512_v4
+31/166283/campos_512_v4
+31/166284/campos_512_v4
+31/166286/campos_512_v4
+31/166288/campos_512_v4
+31/166310/campos_512_v4
+31/166327/campos_512_v4
+31/166336/campos_512_v4
+31/166341/campos_512_v4
+31/166345/campos_512_v4
+31/166350/campos_512_v4
+31/166354/campos_512_v4
+31/166356/campos_512_v4
+31/166357/campos_512_v4
+31/166366/campos_512_v4
+31/166370/campos_512_v4
+31/166371/campos_512_v4
+31/166395/campos_512_v4
+31/166400/campos_512_v4
+31/166401/campos_512_v4
+31/166408/campos_512_v4
+31/166410/campos_512_v4
+31/166424/campos_512_v4
+31/166426/campos_512_v4
+31/166436/campos_512_v4
+31/166443/campos_512_v4
+31/166456/campos_512_v4
+31/166459/campos_512_v4
+31/166462/campos_512_v4
+31/166465/campos_512_v4
+31/166482/campos_512_v4
+31/166485/campos_512_v4
+31/166486/campos_512_v4
+31/166492/campos_512_v4
+31/166493/campos_512_v4
+31/166494/campos_512_v4
+31/166495/campos_512_v4
+31/166509/campos_512_v4
+31/166515/campos_512_v4
+31/166522/campos_512_v4
+31/166526/campos_512_v4
+31/166528/campos_512_v4
+31/166531/campos_512_v4
+31/166538/campos_512_v4
+31/166551/campos_512_v4
+31/166556/campos_512_v4
+31/166596/campos_512_v4
+31/166612/campos_512_v4
+31/166649/campos_512_v4
+31/166655/campos_512_v4
+31/166656/campos_512_v4
+31/166674/campos_512_v4
+31/166691/campos_512_v4
+31/166700/campos_512_v4
+31/166701/campos_512_v4
+31/166704/campos_512_v4
+31/166716/campos_512_v4
+31/166746/campos_512_v4
+31/166776/campos_512_v4
+31/166778/campos_512_v4
+31/166794/campos_512_v4
+31/166807/campos_512_v4
+31/166810/campos_512_v4
+31/166811/campos_512_v4
+31/166814/campos_512_v4
+31/166828/campos_512_v4
+31/166830/campos_512_v4
+31/166835/campos_512_v4
+31/166839/campos_512_v4
+31/166851/campos_512_v4
+31/166862/campos_512_v4
+31/166865/campos_512_v4
+31/166871/campos_512_v4
+31/166872/campos_512_v4
+31/166873/campos_512_v4
+31/166874/campos_512_v4
+31/166876/campos_512_v4
+31/166878/campos_512_v4
+31/166879/campos_512_v4
+31/166886/campos_512_v4
+31/166897/campos_512_v4
+31/166902/campos_512_v4
+31/166903/campos_512_v4
+31/166910/campos_512_v4
+31/166914/campos_512_v4
+31/166918/campos_512_v4
+31/166922/campos_512_v4
+31/166928/campos_512_v4
+31/166933/campos_512_v4
+31/166955/campos_512_v4
+31/166963/campos_512_v4
+31/166966/campos_512_v4
+31/166972/campos_512_v4
+31/166976/campos_512_v4
+31/166984/campos_512_v4
+31/166990/campos_512_v4
+31/166992/campos_512_v4
+31/166998/campos_512_v4
+31/166999/campos_512_v4
+31/167006/campos_512_v4
+31/167007/campos_512_v4
+31/167009/campos_512_v4
+31/167024/campos_512_v4
+31/167025/campos_512_v4
+31/167029/campos_512_v4
+31/167037/campos_512_v4
+31/167040/campos_512_v4
+31/167042/campos_512_v4
+31/167052/campos_512_v4
+31/167055/campos_512_v4
+31/167064/campos_512_v4
+31/167072/campos_512_v4
+31/167074/campos_512_v4
+31/167076/campos_512_v4
+31/167098/campos_512_v4
+31/167108/campos_512_v4
+31/167119/campos_512_v4
+31/167132/campos_512_v4
+31/167145/campos_512_v4
+31/167158/campos_512_v4
+31/167168/campos_512_v4
+31/167176/campos_512_v4
+31/167181/campos_512_v4
+31/167192/campos_512_v4
+31/167199/campos_512_v4
+31/167206/campos_512_v4
+31/167214/campos_512_v4
+31/167220/campos_512_v4
+31/167224/campos_512_v4
+31/167241/campos_512_v4
+31/167244/campos_512_v4
+31/167250/campos_512_v4
+31/167251/campos_512_v4
+31/167257/campos_512_v4
+31/167262/campos_512_v4
+31/167264/campos_512_v4
+31/167265/campos_512_v4
+31/167266/campos_512_v4
+31/167267/campos_512_v4
+31/167310/campos_512_v4
+31/167318/campos_512_v4
+31/167320/campos_512_v4
+31/167322/campos_512_v4
+31/167325/campos_512_v4
+31/167329/campos_512_v4
+31/167340/campos_512_v4
+31/167351/campos_512_v4
+31/167352/campos_512_v4
+31/167366/campos_512_v4
+31/167370/campos_512_v4
+31/167376/campos_512_v4
+31/167388/campos_512_v4
+31/167391/campos_512_v4
+31/167406/campos_512_v4
+31/167408/campos_512_v4
+31/167412/campos_512_v4
+31/167414/campos_512_v4
+31/167427/campos_512_v4
+31/167428/campos_512_v4
+31/167434/campos_512_v4
+31/167435/campos_512_v4
+31/167441/campos_512_v4
+31/167443/campos_512_v4
+31/167444/campos_512_v4
+31/167449/campos_512_v4
+31/167452/campos_512_v4
+31/167456/campos_512_v4
+31/167462/campos_512_v4
+31/167466/campos_512_v4
+31/167468/campos_512_v4
+31/167474/campos_512_v4
+31/167481/campos_512_v4
+31/167488/campos_512_v4
+31/167496/campos_512_v4
+31/167506/campos_512_v4
+31/167510/campos_512_v4
+31/167519/campos_512_v4
+31/167521/campos_512_v4
+31/167531/campos_512_v4
+31/167546/campos_512_v4
+31/167552/campos_512_v4
+31/167568/campos_512_v4
+31/167572/campos_512_v4
+31/167581/campos_512_v4
+31/167582/campos_512_v4
+31/167583/campos_512_v4
+31/167592/campos_512_v4
+31/167593/campos_512_v4
+31/167608/campos_512_v4
+31/167611/campos_512_v4
+31/167613/campos_512_v4
+31/167617/campos_512_v4
+31/167626/campos_512_v4
+31/167627/campos_512_v4
+31/167631/campos_512_v4
+31/167634/campos_512_v4
+31/167644/campos_512_v4
+31/167666/campos_512_v4
+31/167687/campos_512_v4
+31/167690/campos_512_v4
+31/167695/campos_512_v4
+31/167702/campos_512_v4
+31/167709/campos_512_v4
+31/167721/campos_512_v4
+31/167729/campos_512_v4
+31/167733/campos_512_v4
+31/167735/campos_512_v4
+31/167749/campos_512_v4
+31/167750/campos_512_v4
+31/167771/campos_512_v4
+31/167775/campos_512_v4
+31/167776/campos_512_v4
+31/167793/campos_512_v4
+31/167796/campos_512_v4
+31/167806/campos_512_v4
+31/167810/campos_512_v4
+31/167816/campos_512_v4
+31/167819/campos_512_v4
+31/167829/campos_512_v4
+31/167844/campos_512_v4
+31/167849/campos_512_v4
+31/167854/campos_512_v4
+31/167855/campos_512_v4
+31/167857/campos_512_v4
+31/167858/campos_512_v4
+31/167906/campos_512_v4
+31/167914/campos_512_v4
+31/167918/campos_512_v4
+31/167924/campos_512_v4
+31/167925/campos_512_v4
+31/167938/campos_512_v4
+31/167942/campos_512_v4
+31/167951/campos_512_v4
+31/167952/campos_512_v4
+31/167966/campos_512_v4
+31/167969/campos_512_v4
+31/167972/campos_512_v4
+31/167981/campos_512_v4
+31/167986/campos_512_v4
+31/167987/campos_512_v4
+31/167988/campos_512_v4
+31/167994/campos_512_v4
+31/168006/campos_512_v4
+31/168009/campos_512_v4
+31/168026/campos_512_v4
+31/168028/campos_512_v4
+31/168029/campos_512_v4
+31/168034/campos_512_v4
+31/168065/campos_512_v4
+31/168071/campos_512_v4
+31/168074/campos_512_v4
+31/168086/campos_512_v4
+31/168094/campos_512_v4
+31/168099/campos_512_v4
+31/168112/campos_512_v4
+31/168114/campos_512_v4
+31/168118/campos_512_v4
+31/168123/campos_512_v4
+31/168124/campos_512_v4
+31/168135/campos_512_v4
+31/168143/campos_512_v4
+31/168144/campos_512_v4
+31/168153/campos_512_v4
+31/168162/campos_512_v4
+31/168172/campos_512_v4
+31/168185/campos_512_v4
+31/168190/campos_512_v4
+31/168196/campos_512_v4
+31/168197/campos_512_v4
+31/168208/campos_512_v4
+31/168209/campos_512_v4
+31/168210/campos_512_v4
+31/168216/campos_512_v4
+31/168220/campos_512_v4
+31/168227/campos_512_v4
+31/168235/campos_512_v4
+31/168238/campos_512_v4
+31/168242/campos_512_v4
+31/168245/campos_512_v4
+31/168246/campos_512_v4
+31/168252/campos_512_v4
+31/168263/campos_512_v4
+31/168280/campos_512_v4
+31/168282/campos_512_v4
+31/168292/campos_512_v4
+31/168294/campos_512_v4
+31/168295/campos_512_v4
+31/168310/campos_512_v4
+31/168320/campos_512_v4
+31/168329/campos_512_v4
+31/168334/campos_512_v4
+31/168336/campos_512_v4
+31/168340/campos_512_v4
+31/168360/campos_512_v4
+31/168376/campos_512_v4
+31/168391/campos_512_v4
+31/168403/campos_512_v4
+31/168405/campos_512_v4
+31/168412/campos_512_v4
+31/168413/campos_512_v4
+31/168423/campos_512_v4
+31/168426/campos_512_v4
+31/168432/campos_512_v4
+31/168436/campos_512_v4
+31/168439/campos_512_v4
+31/168440/campos_512_v4
+31/168455/campos_512_v4
+31/168480/campos_512_v4
+31/168481/campos_512_v4
+31/168504/campos_512_v4
+31/168510/campos_512_v4
+31/168511/campos_512_v4
+31/168519/campos_512_v4
+31/168525/campos_512_v4
+31/168527/campos_512_v4
+31/168544/campos_512_v4
+31/168545/campos_512_v4
+31/168546/campos_512_v4
+31/168555/campos_512_v4
+31/168556/campos_512_v4
+31/168560/campos_512_v4
+31/168561/campos_512_v4
+31/168574/campos_512_v4
+31/168579/campos_512_v4
+31/168587/campos_512_v4
+31/168593/campos_512_v4
+31/168604/campos_512_v4
+31/168614/campos_512_v4
+31/168631/campos_512_v4
+31/168645/campos_512_v4
+31/168672/campos_512_v4
+31/168675/campos_512_v4
+31/168676/campos_512_v4
+31/168679/campos_512_v4
+31/168698/campos_512_v4
+31/168703/campos_512_v4
+31/168709/campos_512_v4
+31/168710/campos_512_v4
+31/168715/campos_512_v4
+31/168717/campos_512_v4
+31/168721/campos_512_v4
+31/168736/campos_512_v4
+31/168738/campos_512_v4
+31/168739/campos_512_v4
+31/168741/campos_512_v4
+31/168742/campos_512_v4
+31/168756/campos_512_v4
+31/168757/campos_512_v4
+31/168758/campos_512_v4
+31/168760/campos_512_v4
+31/168763/campos_512_v4
+31/168782/campos_512_v4
+31/168783/campos_512_v4
+31/168804/campos_512_v4
+31/168805/campos_512_v4
+31/168809/campos_512_v4
+31/168813/campos_512_v4
+31/168820/campos_512_v4
+31/168821/campos_512_v4
+31/168831/campos_512_v4
+31/168832/campos_512_v4
+31/168857/campos_512_v4
+31/168859/campos_512_v4
+31/168866/campos_512_v4
+31/168869/campos_512_v4
+31/168881/campos_512_v4
+31/168899/campos_512_v4
+31/168922/campos_512_v4
+31/168936/campos_512_v4
+31/168939/campos_512_v4
+31/168954/campos_512_v4
+31/168955/campos_512_v4
+31/168956/campos_512_v4
+31/168971/campos_512_v4
+31/168973/campos_512_v4
+31/168976/campos_512_v4
+31/168983/campos_512_v4
+31/168984/campos_512_v4
+31/168985/campos_512_v4
+31/168987/campos_512_v4
+31/168993/campos_512_v4
+31/168995/campos_512_v4
+31/169006/campos_512_v4
+31/169009/campos_512_v4
+31/169010/campos_512_v4
+31/169025/campos_512_v4
+31/169031/campos_512_v4
+31/169046/campos_512_v4
+31/169050/campos_512_v4
+31/169051/campos_512_v4
+31/169075/campos_512_v4
+31/169076/campos_512_v4
+31/169086/campos_512_v4
+31/169110/campos_512_v4
+31/169115/campos_512_v4
+31/169116/campos_512_v4
+31/169125/campos_512_v4
+31/169127/campos_512_v4
+31/169132/campos_512_v4
+31/169141/campos_512_v4
+31/169150/campos_512_v4
+31/169180/campos_512_v4
+31/169197/campos_512_v4
+31/169215/campos_512_v4
+31/169225/campos_512_v4
+31/169255/campos_512_v4
+31/169280/campos_512_v4
+31/169288/campos_512_v4
+31/169302/campos_512_v4
+31/169303/campos_512_v4
+31/169315/campos_512_v4
+31/169319/campos_512_v4
+31/169321/campos_512_v4
+31/169327/campos_512_v4
+31/169335/campos_512_v4
+31/169340/campos_512_v4
+31/169349/campos_512_v4
+31/169356/campos_512_v4
+31/169374/campos_512_v4
+31/169386/campos_512_v4
+31/169392/campos_512_v4
+31/169402/campos_512_v4
+31/169403/campos_512_v4
+31/169419/campos_512_v4
+31/169420/campos_512_v4
+31/169429/campos_512_v4
+31/169433/campos_512_v4
+31/169442/campos_512_v4
+31/169443/campos_512_v4
+31/169444/campos_512_v4
+31/169453/campos_512_v4
+31/169455/campos_512_v4
+31/169458/campos_512_v4
+31/169459/campos_512_v4
+31/169470/campos_512_v4
+31/169471/campos_512_v4
+31/169474/campos_512_v4
+31/169477/campos_512_v4
+31/169478/campos_512_v4
+31/169480/campos_512_v4
+31/169489/campos_512_v4
+31/169492/campos_512_v4
+31/169493/campos_512_v4
+31/169502/campos_512_v4
+31/169507/campos_512_v4
+31/169519/campos_512_v4
+31/169536/campos_512_v4
+31/169546/campos_512_v4
+31/169553/campos_512_v4
+31/169557/campos_512_v4
+31/169563/campos_512_v4
+31/169565/campos_512_v4
+31/169571/campos_512_v4
+31/169572/campos_512_v4
+31/169573/campos_512_v4
+31/169588/campos_512_v4
+31/169590/campos_512_v4
+31/169592/campos_512_v4
+31/169594/campos_512_v4
+31/169606/campos_512_v4
+31/169614/campos_512_v4
+31/169617/campos_512_v4
+31/169626/campos_512_v4
+31/169630/campos_512_v4
+31/169638/campos_512_v4
+31/169640/campos_512_v4
+31/169652/campos_512_v4
+31/169655/campos_512_v4
+31/169657/campos_512_v4
+31/169670/campos_512_v4
+31/169683/campos_512_v4
+31/169695/campos_512_v4
+31/169699/campos_512_v4
+31/169720/campos_512_v4
+31/169732/campos_512_v4
+31/169735/campos_512_v4
+31/169741/campos_512_v4
+31/169746/campos_512_v4
+31/169749/campos_512_v4
+31/169750/campos_512_v4
+31/169752/campos_512_v4
+31/169757/campos_512_v4
+31/169800/campos_512_v4
+31/169813/campos_512_v4
+31/169815/campos_512_v4
+31/169820/campos_512_v4
+31/169824/campos_512_v4
+31/169834/campos_512_v4
+31/169840/campos_512_v4
+31/169846/campos_512_v4
+31/169847/campos_512_v4
+31/169876/campos_512_v4
+31/169879/campos_512_v4
+31/169887/campos_512_v4
+31/169904/campos_512_v4
+31/169913/campos_512_v4
+31/169922/campos_512_v4
+31/169935/campos_512_v4
+31/169947/campos_512_v4
+31/169982/campos_512_v4
+31/169998/campos_512_v4
+31/169999/campos_512_v4
+32/170009/campos_512_v4
+32/170024/campos_512_v4
+32/170039/campos_512_v4
+32/170042/campos_512_v4
+32/170044/campos_512_v4
+32/170050/campos_512_v4
+32/170052/campos_512_v4
+32/170068/campos_512_v4
+32/170072/campos_512_v4
+32/170083/campos_512_v4
+32/170087/campos_512_v4
+32/170111/campos_512_v4
+32/170112/campos_512_v4
+32/170114/campos_512_v4
+32/170115/campos_512_v4
+32/170117/campos_512_v4
+32/170130/campos_512_v4
+32/170133/campos_512_v4
+32/170141/campos_512_v4
+32/170149/campos_512_v4
+32/170155/campos_512_v4
+32/170164/campos_512_v4
+32/170165/campos_512_v4
+32/170179/campos_512_v4
+32/170190/campos_512_v4
+32/170191/campos_512_v4
+32/170197/campos_512_v4
+32/170214/campos_512_v4
+32/170218/campos_512_v4
+32/170222/campos_512_v4
+32/170236/campos_512_v4
+32/170254/campos_512_v4
+32/170257/campos_512_v4
+32/170272/campos_512_v4
+32/170297/campos_512_v4
+32/170300/campos_512_v4
+32/170310/campos_512_v4
+32/170317/campos_512_v4
+32/170329/campos_512_v4
+32/170330/campos_512_v4
+32/170336/campos_512_v4
+32/170340/campos_512_v4
+32/170341/campos_512_v4
+32/170344/campos_512_v4
+32/170346/campos_512_v4
+32/170352/campos_512_v4
+32/170359/campos_512_v4
+32/170362/campos_512_v4
+32/170365/campos_512_v4
+32/170369/campos_512_v4
+32/170379/campos_512_v4
+32/170392/campos_512_v4
+32/170395/campos_512_v4
+32/170396/campos_512_v4
+32/170406/campos_512_v4
+32/170418/campos_512_v4
+32/170424/campos_512_v4
+32/170438/campos_512_v4
+32/170442/campos_512_v4
+32/170443/campos_512_v4
+32/170460/campos_512_v4
+32/170477/campos_512_v4
+32/170478/campos_512_v4
+32/170483/campos_512_v4
+32/170494/campos_512_v4
+32/170497/campos_512_v4
+32/170509/campos_512_v4
+32/170511/campos_512_v4
+32/170517/campos_512_v4
+32/170521/campos_512_v4
+32/170529/campos_512_v4
+32/170530/campos_512_v4
+32/170556/campos_512_v4
+32/170562/campos_512_v4
+32/170563/campos_512_v4
+32/170566/campos_512_v4
+32/170571/campos_512_v4
+32/170578/campos_512_v4
+32/170588/campos_512_v4
+32/170598/campos_512_v4
+32/170605/campos_512_v4
+32/170626/campos_512_v4
+32/170627/campos_512_v4
+32/170635/campos_512_v4
+32/170672/campos_512_v4
+32/170705/campos_512_v4
+32/170711/campos_512_v4
+32/170714/campos_512_v4
+32/170739/campos_512_v4
+32/170759/campos_512_v4
+32/170764/campos_512_v4
+32/170781/campos_512_v4
+32/170786/campos_512_v4
+32/170803/campos_512_v4
+32/170809/campos_512_v4
+32/170816/campos_512_v4
+32/170821/campos_512_v4
+32/170825/campos_512_v4
+32/170828/campos_512_v4
+32/170832/campos_512_v4
+32/170834/campos_512_v4
+32/170835/campos_512_v4
+32/170849/campos_512_v4
+32/170862/campos_512_v4
+32/170868/campos_512_v4
+32/170872/campos_512_v4
+32/170874/campos_512_v4
+32/170886/campos_512_v4
+32/170889/campos_512_v4
+32/170894/campos_512_v4
+32/170895/campos_512_v4
+32/170901/campos_512_v4
+32/170907/campos_512_v4
+32/170918/campos_512_v4
+32/170923/campos_512_v4
+32/170938/campos_512_v4
+32/170939/campos_512_v4
+32/170943/campos_512_v4
+32/170945/campos_512_v4
+32/170950/campos_512_v4
+32/170952/campos_512_v4
+32/170953/campos_512_v4
+32/170955/campos_512_v4
+32/170960/campos_512_v4
+32/170963/campos_512_v4
+32/170966/campos_512_v4
+32/170969/campos_512_v4
+32/171000/campos_512_v4
+32/171006/campos_512_v4
+32/171008/campos_512_v4
+32/171024/campos_512_v4
+32/171028/campos_512_v4
+32/171036/campos_512_v4
+32/171040/campos_512_v4
+32/171041/campos_512_v4
+32/171051/campos_512_v4
+32/171052/campos_512_v4
+32/171054/campos_512_v4
+32/171055/campos_512_v4
+32/171062/campos_512_v4
+32/171075/campos_512_v4
+32/171079/campos_512_v4
+32/171086/campos_512_v4
+32/171092/campos_512_v4
+32/171111/campos_512_v4
+32/171114/campos_512_v4
+32/171142/campos_512_v4
+32/171168/campos_512_v4
+32/171198/campos_512_v4
+32/171217/campos_512_v4
+32/171223/campos_512_v4
+32/171244/campos_512_v4
+32/171249/campos_512_v4
+32/171256/campos_512_v4
+32/171257/campos_512_v4
+32/171275/campos_512_v4
+32/171276/campos_512_v4
+32/171280/campos_512_v4
+32/171286/campos_512_v4
+32/171295/campos_512_v4
+32/171320/campos_512_v4
+32/171330/campos_512_v4
+32/171331/campos_512_v4
+32/171332/campos_512_v4
+32/171334/campos_512_v4
+32/171335/campos_512_v4
+32/171345/campos_512_v4
+32/171352/campos_512_v4
+32/171354/campos_512_v4
+32/171355/campos_512_v4
+32/171362/campos_512_v4
+32/171372/campos_512_v4
+32/171376/campos_512_v4
+32/171379/campos_512_v4
+32/171383/campos_512_v4
+32/171390/campos_512_v4
+32/171393/campos_512_v4
+32/171394/campos_512_v4
+32/171420/campos_512_v4
+32/171424/campos_512_v4
+32/171434/campos_512_v4
+32/171458/campos_512_v4
+32/171476/campos_512_v4
+32/171479/campos_512_v4
+32/171489/campos_512_v4
+32/171496/campos_512_v4
+32/171504/campos_512_v4
+32/171512/campos_512_v4
+32/171515/campos_512_v4
+32/171519/campos_512_v4
+32/171531/campos_512_v4
+32/171543/campos_512_v4
+32/171546/campos_512_v4
+32/171548/campos_512_v4
+32/171549/campos_512_v4
+32/171560/campos_512_v4
+32/171577/campos_512_v4
+32/171589/campos_512_v4
+32/171603/campos_512_v4
+32/171624/campos_512_v4
+32/171642/campos_512_v4
+32/171644/campos_512_v4
+32/171649/campos_512_v4
+32/171658/campos_512_v4
+32/171659/campos_512_v4
+32/171667/campos_512_v4
+32/171668/campos_512_v4
+32/171681/campos_512_v4
+32/171708/campos_512_v4
+32/171709/campos_512_v4
+32/171714/campos_512_v4
+32/171719/campos_512_v4
+32/171755/campos_512_v4
+32/171764/campos_512_v4
+32/171766/campos_512_v4
+32/171770/campos_512_v4
+32/171777/campos_512_v4
+32/171781/campos_512_v4
+32/171783/campos_512_v4
+32/171790/campos_512_v4
+32/171809/campos_512_v4
+32/171812/campos_512_v4
+32/171824/campos_512_v4
+32/171827/campos_512_v4
+32/171838/campos_512_v4
+32/171839/campos_512_v4
+32/171844/campos_512_v4
+32/171855/campos_512_v4
+32/171862/campos_512_v4
+32/171867/campos_512_v4
+32/171868/campos_512_v4
+32/171897/campos_512_v4
+32/171912/campos_512_v4
+32/171933/campos_512_v4
+32/171936/campos_512_v4
+32/171942/campos_512_v4
+32/171943/campos_512_v4
+32/171963/campos_512_v4
+32/171964/campos_512_v4
+32/171970/campos_512_v4
+32/171973/campos_512_v4
+32/171976/campos_512_v4
+32/171980/campos_512_v4
+32/171988/campos_512_v4
+32/171989/campos_512_v4
+32/172003/campos_512_v4
+32/172007/campos_512_v4
+32/172018/campos_512_v4
+32/172030/campos_512_v4
+32/172032/campos_512_v4
+32/172033/campos_512_v4
+32/172043/campos_512_v4
+32/172051/campos_512_v4
+32/172053/campos_512_v4
+32/172064/campos_512_v4
+32/172066/campos_512_v4
+32/172068/campos_512_v4
+32/172070/campos_512_v4
+32/172071/campos_512_v4
+32/172097/campos_512_v4
+32/172098/campos_512_v4
+32/172103/campos_512_v4
+32/172111/campos_512_v4
+32/172116/campos_512_v4
+32/172119/campos_512_v4
+32/172120/campos_512_v4
+32/172167/campos_512_v4
+32/172168/campos_512_v4
+32/172171/campos_512_v4
+32/172177/campos_512_v4
+32/172182/campos_512_v4
+32/172193/campos_512_v4
+32/172202/campos_512_v4
+32/172206/campos_512_v4
+32/172210/campos_512_v4
+32/172218/campos_512_v4
+32/172219/campos_512_v4
+32/172225/campos_512_v4
+32/172227/campos_512_v4
+32/172229/campos_512_v4
+32/172240/campos_512_v4
+32/172243/campos_512_v4
+32/172254/campos_512_v4
+32/172273/campos_512_v4
+32/172279/campos_512_v4
+32/172293/campos_512_v4
+32/172300/campos_512_v4
+32/172308/campos_512_v4
+32/172319/campos_512_v4
+32/172329/campos_512_v4
+32/172340/campos_512_v4
+32/172341/campos_512_v4
+32/172343/campos_512_v4
+32/172359/campos_512_v4
+32/172361/campos_512_v4
+32/172373/campos_512_v4
+32/172390/campos_512_v4
+32/172399/campos_512_v4
+32/172407/campos_512_v4
+32/172421/campos_512_v4
+32/172423/campos_512_v4
+32/172427/campos_512_v4
+32/172428/campos_512_v4
+32/172438/campos_512_v4
+32/172449/campos_512_v4
+32/172450/campos_512_v4
+32/172452/campos_512_v4
+32/172454/campos_512_v4
+32/172457/campos_512_v4
+32/172459/campos_512_v4
+32/172466/campos_512_v4
+32/172467/campos_512_v4
+32/172468/campos_512_v4
+32/172505/campos_512_v4
+32/172512/campos_512_v4
+32/172516/campos_512_v4
+32/172538/campos_512_v4
+32/172546/campos_512_v4
+32/172548/campos_512_v4
+32/172577/campos_512_v4
+32/172582/campos_512_v4
+32/172597/campos_512_v4
+32/172602/campos_512_v4
+32/172609/campos_512_v4
+32/172611/campos_512_v4
+32/172617/campos_512_v4
+32/172640/campos_512_v4
+32/172648/campos_512_v4
+32/172650/campos_512_v4
+32/172652/campos_512_v4
+32/172655/campos_512_v4
+32/172673/campos_512_v4
+32/172679/campos_512_v4
+32/172686/campos_512_v4
+32/172688/campos_512_v4
+32/172699/campos_512_v4
+32/172707/campos_512_v4
+32/172709/campos_512_v4
+32/172714/campos_512_v4
+32/172715/campos_512_v4
+32/172719/campos_512_v4
+32/172722/campos_512_v4
+32/172724/campos_512_v4
+32/172732/campos_512_v4
+32/172733/campos_512_v4
+32/172734/campos_512_v4
+32/172737/campos_512_v4
+32/172743/campos_512_v4
+32/172750/campos_512_v4
+32/172752/campos_512_v4
+32/172753/campos_512_v4
+32/172772/campos_512_v4
+32/172793/campos_512_v4
+32/172802/campos_512_v4
+32/172809/campos_512_v4
+32/172814/campos_512_v4
+32/172815/campos_512_v4
+32/172833/campos_512_v4
+32/172836/campos_512_v4
+32/172838/campos_512_v4
+32/172840/campos_512_v4
+32/172843/campos_512_v4
+32/172847/campos_512_v4
+32/172855/campos_512_v4
+32/172859/campos_512_v4
+32/172861/campos_512_v4
+32/172862/campos_512_v4
+32/172869/campos_512_v4
+32/172879/campos_512_v4
+32/172885/campos_512_v4
+32/172907/campos_512_v4
+32/172916/campos_512_v4
+32/172926/campos_512_v4
+32/172933/campos_512_v4
+32/172950/campos_512_v4
+32/172952/campos_512_v4
+32/172955/campos_512_v4
+32/172960/campos_512_v4
+32/172963/campos_512_v4
+32/172968/campos_512_v4
+32/172969/campos_512_v4
+32/172973/campos_512_v4
+32/172987/campos_512_v4
+32/172995/campos_512_v4
+32/172996/campos_512_v4
+32/173000/campos_512_v4
+32/173004/campos_512_v4
+32/173005/campos_512_v4
+32/173016/campos_512_v4
+32/173051/campos_512_v4
+32/173066/campos_512_v4
+32/173072/campos_512_v4
+32/173074/campos_512_v4
+32/173077/campos_512_v4
+32/173087/campos_512_v4
+32/173098/campos_512_v4
+32/173111/campos_512_v4
+32/173118/campos_512_v4
+32/173130/campos_512_v4
+32/173142/campos_512_v4
+32/173159/campos_512_v4
+32/173165/campos_512_v4
+32/173169/campos_512_v4
+32/173176/campos_512_v4
+32/173185/campos_512_v4
+32/173187/campos_512_v4
+32/173188/campos_512_v4
+32/173197/campos_512_v4
+32/173199/campos_512_v4
+32/173215/campos_512_v4
+32/173221/campos_512_v4
+32/173224/campos_512_v4
+32/173245/campos_512_v4
+32/173268/campos_512_v4
+32/173270/campos_512_v4
+32/173280/campos_512_v4
+32/173283/campos_512_v4
+32/173290/campos_512_v4
+32/173294/campos_512_v4
+32/173296/campos_512_v4
+32/173312/campos_512_v4
+32/173315/campos_512_v4
+32/173320/campos_512_v4
+32/173330/campos_512_v4
+32/173331/campos_512_v4
+32/173337/campos_512_v4
+32/173351/campos_512_v4
+32/173388/campos_512_v4
+32/173396/campos_512_v4
+32/173397/campos_512_v4
+32/173418/campos_512_v4
+32/173421/campos_512_v4
+32/173431/campos_512_v4
+32/173435/campos_512_v4
+32/173440/campos_512_v4
+32/173458/campos_512_v4
+32/173465/campos_512_v4
+32/173479/campos_512_v4
+32/173499/campos_512_v4
+32/173506/campos_512_v4
+32/173519/campos_512_v4
+32/173523/campos_512_v4
+32/173529/campos_512_v4
+32/173542/campos_512_v4
+32/173561/campos_512_v4
+32/173565/campos_512_v4
+32/173571/campos_512_v4
+32/173572/campos_512_v4
+32/173591/campos_512_v4
+32/173603/campos_512_v4
+32/173607/campos_512_v4
+32/173611/campos_512_v4
+32/173622/campos_512_v4
+32/173624/campos_512_v4
+32/173628/campos_512_v4
+32/173634/campos_512_v4
+32/173635/campos_512_v4
+32/173643/campos_512_v4
+32/173656/campos_512_v4
+32/173664/campos_512_v4
+32/173682/campos_512_v4
+32/173684/campos_512_v4
+32/173697/campos_512_v4
+32/173716/campos_512_v4
+32/173729/campos_512_v4
+32/173742/campos_512_v4
+32/173743/campos_512_v4
+32/173746/campos_512_v4
+32/173747/campos_512_v4
+32/173754/campos_512_v4
+32/173758/campos_512_v4
+32/173759/campos_512_v4
+32/173767/campos_512_v4
+32/173771/campos_512_v4
+32/173786/campos_512_v4
+32/173813/campos_512_v4
+32/173816/campos_512_v4
+32/173833/campos_512_v4
+32/173837/campos_512_v4
+32/173850/campos_512_v4
+32/173867/campos_512_v4
+32/173868/campos_512_v4
+32/173874/campos_512_v4
+32/173877/campos_512_v4
+32/173881/campos_512_v4
+32/173884/campos_512_v4
+32/173902/campos_512_v4
+32/173908/campos_512_v4
+32/173922/campos_512_v4
+32/173959/campos_512_v4
+32/173960/campos_512_v4
+32/173972/campos_512_v4
+32/173977/campos_512_v4
+32/173979/campos_512_v4
+32/174001/campos_512_v4
+32/174002/campos_512_v4
+32/174005/campos_512_v4
+32/174006/campos_512_v4
+32/174021/campos_512_v4
+32/174027/campos_512_v4
+32/174032/campos_512_v4
+32/174045/campos_512_v4
+32/174047/campos_512_v4
+32/174062/campos_512_v4
+32/174066/campos_512_v4
+32/174068/campos_512_v4
+32/174072/campos_512_v4
+32/174081/campos_512_v4
+32/174085/campos_512_v4
+32/174090/campos_512_v4
+32/174093/campos_512_v4
+32/174095/campos_512_v4
+32/174101/campos_512_v4
+32/174102/campos_512_v4
+32/174110/campos_512_v4
+32/174112/campos_512_v4
+32/174119/campos_512_v4
+32/174126/campos_512_v4
+32/174133/campos_512_v4
+32/174136/campos_512_v4
+32/174149/campos_512_v4
+32/174155/campos_512_v4
+32/174156/campos_512_v4
+32/174157/campos_512_v4
+32/174158/campos_512_v4
+32/174160/campos_512_v4
+32/174164/campos_512_v4
+32/174172/campos_512_v4
+32/174189/campos_512_v4
+32/174201/campos_512_v4
+32/174209/campos_512_v4
+32/174220/campos_512_v4
+32/174223/campos_512_v4
+32/174241/campos_512_v4
+32/174242/campos_512_v4
+32/174254/campos_512_v4
+32/174256/campos_512_v4
+32/174272/campos_512_v4
+32/174287/campos_512_v4
+32/174290/campos_512_v4
+32/174298/campos_512_v4
+32/174301/campos_512_v4
+32/174306/campos_512_v4
+32/174312/campos_512_v4
+32/174315/campos_512_v4
+32/174322/campos_512_v4
+32/174326/campos_512_v4
+32/174328/campos_512_v4
+32/174335/campos_512_v4
+32/174352/campos_512_v4
+32/174369/campos_512_v4
+32/174370/campos_512_v4
+32/174378/campos_512_v4
+32/174386/campos_512_v4
+32/174398/campos_512_v4
+32/174411/campos_512_v4
+32/174414/campos_512_v4
+32/174453/campos_512_v4
+32/174455/campos_512_v4
+32/174475/campos_512_v4
+32/174480/campos_512_v4
+32/174494/campos_512_v4
+32/174542/campos_512_v4
+32/174560/campos_512_v4
+32/174568/campos_512_v4
+32/174581/campos_512_v4
+32/174595/campos_512_v4
+32/174602/campos_512_v4
+32/174615/campos_512_v4
+32/174619/campos_512_v4
+32/174640/campos_512_v4
+32/174641/campos_512_v4
+32/174644/campos_512_v4
+32/174658/campos_512_v4
+32/174659/campos_512_v4
+32/174660/campos_512_v4
+32/174663/campos_512_v4
+32/174668/campos_512_v4
+32/174673/campos_512_v4
+32/174676/campos_512_v4
+32/174682/campos_512_v4
+32/174691/campos_512_v4
+32/174696/campos_512_v4
+32/174699/campos_512_v4
+32/174702/campos_512_v4
+32/174707/campos_512_v4
+32/174724/campos_512_v4
+32/174730/campos_512_v4
+32/174732/campos_512_v4
+32/174737/campos_512_v4
+32/174738/campos_512_v4
+32/174739/campos_512_v4
+32/174746/campos_512_v4
+32/174747/campos_512_v4
+32/174766/campos_512_v4
+32/174776/campos_512_v4
+32/174789/campos_512_v4
+32/174807/campos_512_v4
+32/174830/campos_512_v4
+32/174833/campos_512_v4
+32/174841/campos_512_v4
+32/174860/campos_512_v4
+32/174877/campos_512_v4
+32/174883/campos_512_v4
+32/174903/campos_512_v4
+32/174906/campos_512_v4
+32/174909/campos_512_v4
+32/174913/campos_512_v4
+32/174915/campos_512_v4
+32/174924/campos_512_v4
+32/174928/campos_512_v4
+32/174942/campos_512_v4
+32/174965/campos_512_v4
+32/174970/campos_512_v4
+32/174971/campos_512_v4
+32/174984/campos_512_v4
+32/174987/campos_512_v4
+32/174990/campos_512_v4
+32/174993/campos_512_v4
+32/175001/campos_512_v4
+33/175004/campos_512_v4
+33/175008/campos_512_v4
+33/175026/campos_512_v4
+33/175042/campos_512_v4
+33/175058/campos_512_v4
+33/175060/campos_512_v4
+33/175065/campos_512_v4
+33/175079/campos_512_v4
+33/175087/campos_512_v4
+33/175090/campos_512_v4
+33/175091/campos_512_v4
+33/175092/campos_512_v4
+33/175093/campos_512_v4
+33/175103/campos_512_v4
+33/175111/campos_512_v4
+33/175120/campos_512_v4
+33/175123/campos_512_v4
+33/175131/campos_512_v4
+33/175137/campos_512_v4
+33/175140/campos_512_v4
+33/175163/campos_512_v4
+33/175165/campos_512_v4
+33/175172/campos_512_v4
+33/175177/campos_512_v4
+33/175185/campos_512_v4
+33/175201/campos_512_v4
+33/175213/campos_512_v4
+33/175218/campos_512_v4
+33/175219/campos_512_v4
+33/175224/campos_512_v4
+33/175225/campos_512_v4
+33/175230/campos_512_v4
+33/175233/campos_512_v4
+33/175238/campos_512_v4
+33/175243/campos_512_v4
+33/175267/campos_512_v4
+33/175273/campos_512_v4
+33/175284/campos_512_v4
+33/175286/campos_512_v4
+33/175300/campos_512_v4
+33/175304/campos_512_v4
+33/175313/campos_512_v4
+33/175319/campos_512_v4
+33/175320/campos_512_v4
+33/175326/campos_512_v4
+33/175327/campos_512_v4
+33/175335/campos_512_v4
+33/175337/campos_512_v4
+33/175348/campos_512_v4
+33/175351/campos_512_v4
+33/175354/campos_512_v4
+33/175358/campos_512_v4
+33/175382/campos_512_v4
+33/175383/campos_512_v4
+33/175385/campos_512_v4
+33/175392/campos_512_v4
+33/175404/campos_512_v4
+33/175416/campos_512_v4
+33/175417/campos_512_v4
+33/175419/campos_512_v4
+33/175429/campos_512_v4
+33/175442/campos_512_v4
+33/175456/campos_512_v4
+33/175470/campos_512_v4
+33/175493/campos_512_v4
+33/175495/campos_512_v4
+33/175498/campos_512_v4
+33/175511/campos_512_v4
+33/175515/campos_512_v4
+33/175536/campos_512_v4
+33/175543/campos_512_v4
+33/175546/campos_512_v4
+33/175572/campos_512_v4
+33/175587/campos_512_v4
+33/175601/campos_512_v4
+33/175605/campos_512_v4
+33/175610/campos_512_v4
+33/175619/campos_512_v4
+33/175621/campos_512_v4
+33/175628/campos_512_v4
+33/175631/campos_512_v4
+33/175640/campos_512_v4
+33/175648/campos_512_v4
+33/175651/campos_512_v4
+33/175658/campos_512_v4
+33/175659/campos_512_v4
+33/175668/campos_512_v4
+33/175686/campos_512_v4
+33/175691/campos_512_v4
+33/175695/campos_512_v4
+33/175700/campos_512_v4
+33/175722/campos_512_v4
+33/175755/campos_512_v4
+33/175774/campos_512_v4
+33/175776/campos_512_v4
+33/175777/campos_512_v4
+33/175802/campos_512_v4
+33/175806/campos_512_v4
+33/175818/campos_512_v4
+33/175822/campos_512_v4
+33/175844/campos_512_v4
+33/175850/campos_512_v4
+33/175872/campos_512_v4
+33/175885/campos_512_v4
+33/175887/campos_512_v4
+33/175893/campos_512_v4
+33/175898/campos_512_v4
+33/175899/campos_512_v4
+33/175900/campos_512_v4
+33/175906/campos_512_v4
+33/175912/campos_512_v4
+33/175914/campos_512_v4
+33/175915/campos_512_v4
+33/175917/campos_512_v4
+33/175925/campos_512_v4
+33/175926/campos_512_v4
+33/175931/campos_512_v4
+33/175934/campos_512_v4
+33/175940/campos_512_v4
+33/175947/campos_512_v4
+33/175949/campos_512_v4
+33/175954/campos_512_v4
+33/175956/campos_512_v4
+33/175979/campos_512_v4
+33/175987/campos_512_v4
+33/176000/campos_512_v4
+33/176009/campos_512_v4
+33/176010/campos_512_v4
+33/176034/campos_512_v4
+33/176045/campos_512_v4
+33/176048/campos_512_v4
+33/176051/campos_512_v4
+33/176060/campos_512_v4
+33/176064/campos_512_v4
+33/176066/campos_512_v4
+33/176070/campos_512_v4
+33/176074/campos_512_v4
+33/176075/campos_512_v4
+33/176077/campos_512_v4
+33/176088/campos_512_v4
+33/176094/campos_512_v4
+33/176121/campos_512_v4
+33/176128/campos_512_v4
+33/176174/campos_512_v4
+33/176176/campos_512_v4
+33/176178/campos_512_v4
+33/176181/campos_512_v4
+33/176187/campos_512_v4
+33/176189/campos_512_v4
+33/176200/campos_512_v4
+33/176207/campos_512_v4
+33/176219/campos_512_v4
+33/176231/campos_512_v4
+33/176237/campos_512_v4
+33/176247/campos_512_v4
+33/176251/campos_512_v4
+33/176258/campos_512_v4
+33/176282/campos_512_v4
+33/176286/campos_512_v4
+33/176296/campos_512_v4
+33/176310/campos_512_v4
+33/176314/campos_512_v4
+33/176330/campos_512_v4
+33/176337/campos_512_v4
+33/176339/campos_512_v4
+33/176340/campos_512_v4
+33/176350/campos_512_v4
+33/176357/campos_512_v4
+33/176360/campos_512_v4
+33/176362/campos_512_v4
+33/176368/campos_512_v4
+33/176379/campos_512_v4
+33/176387/campos_512_v4
+33/176390/campos_512_v4
+33/176400/campos_512_v4
+33/176403/campos_512_v4
+33/176404/campos_512_v4
+33/176409/campos_512_v4
+33/176411/campos_512_v4
+33/176442/campos_512_v4
+33/176445/campos_512_v4
+33/176451/campos_512_v4
+33/176453/campos_512_v4
+33/176459/campos_512_v4
+33/176461/campos_512_v4
+33/176463/campos_512_v4
+33/176464/campos_512_v4
+33/176481/campos_512_v4
+33/176482/campos_512_v4
+33/176484/campos_512_v4
+33/176490/campos_512_v4
+33/176496/campos_512_v4
+33/176500/campos_512_v4
+33/176515/campos_512_v4
+33/176522/campos_512_v4
+33/176526/campos_512_v4
+33/176533/campos_512_v4
+33/176536/campos_512_v4
+33/176541/campos_512_v4
+33/176549/campos_512_v4
+33/176553/campos_512_v4
+33/176567/campos_512_v4
+33/176570/campos_512_v4
+33/176586/campos_512_v4
+33/176600/campos_512_v4
+33/176617/campos_512_v4
+33/176620/campos_512_v4
+33/176622/campos_512_v4
+33/176639/campos_512_v4
+33/176640/campos_512_v4
+33/176644/campos_512_v4
+33/176648/campos_512_v4
+33/176653/campos_512_v4
+33/176654/campos_512_v4
+33/176660/campos_512_v4
+33/176663/campos_512_v4
+33/176664/campos_512_v4
+33/176671/campos_512_v4
+33/176674/campos_512_v4
+33/176677/campos_512_v4
+33/176682/campos_512_v4
+33/176683/campos_512_v4
+33/176684/campos_512_v4
+33/176687/campos_512_v4
+33/176714/campos_512_v4
+33/176726/campos_512_v4
+33/176732/campos_512_v4
+33/176737/campos_512_v4
+33/176744/campos_512_v4
+33/176754/campos_512_v4
+33/176756/campos_512_v4
+33/176760/campos_512_v4
+33/176761/campos_512_v4
+33/176802/campos_512_v4
+33/176806/campos_512_v4
+33/176807/campos_512_v4
+33/176810/campos_512_v4
+33/176851/campos_512_v4
+33/176871/campos_512_v4
+33/176877/campos_512_v4
+33/176879/campos_512_v4
+33/176907/campos_512_v4
+33/176912/campos_512_v4
+33/176920/campos_512_v4
+33/176933/campos_512_v4
+33/176945/campos_512_v4
+33/176960/campos_512_v4
+33/176978/campos_512_v4
+33/176980/campos_512_v4
+33/176981/campos_512_v4
+33/176984/campos_512_v4
+33/176991/campos_512_v4
+33/177022/campos_512_v4
+33/177037/campos_512_v4
+33/177038/campos_512_v4
+33/177043/campos_512_v4
+33/177049/campos_512_v4
+33/177051/campos_512_v4
+33/177059/campos_512_v4
+33/177060/campos_512_v4
+33/177076/campos_512_v4
+33/177077/campos_512_v4
+33/177079/campos_512_v4
+33/177098/campos_512_v4
+33/177100/campos_512_v4
+33/177102/campos_512_v4
+33/177104/campos_512_v4
+33/177122/campos_512_v4
+33/177132/campos_512_v4
+33/177159/campos_512_v4
+33/177160/campos_512_v4
+33/177162/campos_512_v4
+33/177165/campos_512_v4
+33/177167/campos_512_v4
+33/177181/campos_512_v4
+33/177183/campos_512_v4
+33/177200/campos_512_v4
+33/177214/campos_512_v4
+33/177222/campos_512_v4
+33/177227/campos_512_v4
+33/177240/campos_512_v4
+33/177241/campos_512_v4
+33/177243/campos_512_v4
+33/177255/campos_512_v4
+33/177273/campos_512_v4
+33/177283/campos_512_v4
+33/177294/campos_512_v4
+33/177300/campos_512_v4
+33/177307/campos_512_v4
+33/177310/campos_512_v4
+33/177318/campos_512_v4
+33/177328/campos_512_v4
+33/177329/campos_512_v4
+33/177338/campos_512_v4
+33/177345/campos_512_v4
+33/177349/campos_512_v4
+33/177359/campos_512_v4
+33/177368/campos_512_v4
+33/177378/campos_512_v4
+33/177391/campos_512_v4
+33/177393/campos_512_v4
+33/177406/campos_512_v4
+33/177414/campos_512_v4
+33/177424/campos_512_v4
+33/177450/campos_512_v4
+33/177452/campos_512_v4
+33/177460/campos_512_v4
+33/177461/campos_512_v4
+33/177465/campos_512_v4
+33/177475/campos_512_v4
+33/177480/campos_512_v4
+33/177487/campos_512_v4
+33/177501/campos_512_v4
+33/177506/campos_512_v4
+33/177511/campos_512_v4
+33/177517/campos_512_v4
+33/177531/campos_512_v4
+33/177541/campos_512_v4
+33/177552/campos_512_v4
+33/177553/campos_512_v4
+33/177554/campos_512_v4
+33/177559/campos_512_v4
+33/177562/campos_512_v4
+33/177653/campos_512_v4
+33/177655/campos_512_v4
+33/177659/campos_512_v4
+33/177660/campos_512_v4
+33/177677/campos_512_v4
+33/177682/campos_512_v4
+33/177704/campos_512_v4
+33/177719/campos_512_v4
+33/177728/campos_512_v4
+33/177767/campos_512_v4
+33/177786/campos_512_v4
+33/177797/campos_512_v4
+33/177799/campos_512_v4
+33/177803/campos_512_v4
+33/177820/campos_512_v4
+33/177823/campos_512_v4
+33/177847/campos_512_v4
+33/177854/campos_512_v4
+33/177857/campos_512_v4
+33/177864/campos_512_v4
+33/177868/campos_512_v4
+33/177885/campos_512_v4
+33/177910/campos_512_v4
+33/177926/campos_512_v4
+33/177937/campos_512_v4
+33/177944/campos_512_v4
+33/177945/campos_512_v4
+33/177946/campos_512_v4
+33/177947/campos_512_v4
+33/177952/campos_512_v4
+33/177957/campos_512_v4
+33/177978/campos_512_v4
+33/177982/campos_512_v4
+33/177987/campos_512_v4
+33/177998/campos_512_v4
+33/178001/campos_512_v4
+33/178007/campos_512_v4
+33/178008/campos_512_v4
+33/178011/campos_512_v4
+33/178024/campos_512_v4
+33/178028/campos_512_v4
+33/178030/campos_512_v4
+33/178035/campos_512_v4
+33/178036/campos_512_v4
+33/178043/campos_512_v4
+33/178046/campos_512_v4
+33/178055/campos_512_v4
+33/178060/campos_512_v4
+33/178077/campos_512_v4
+33/178081/campos_512_v4
+33/178086/campos_512_v4
+33/178091/campos_512_v4
+33/178101/campos_512_v4
+33/178106/campos_512_v4
+33/178112/campos_512_v4
+33/178121/campos_512_v4
+33/178126/campos_512_v4
+33/178128/campos_512_v4
+33/178132/campos_512_v4
+33/178136/campos_512_v4
+33/178146/campos_512_v4
+33/178148/campos_512_v4
+33/178152/campos_512_v4
+33/178172/campos_512_v4
+33/178176/campos_512_v4
+33/178187/campos_512_v4
+33/178196/campos_512_v4
+33/178197/campos_512_v4
+33/178207/campos_512_v4
+33/178217/campos_512_v4
+33/178221/campos_512_v4
+33/178227/campos_512_v4
+33/178233/campos_512_v4
+33/178238/campos_512_v4
+33/178244/campos_512_v4
+33/178257/campos_512_v4
+33/178259/campos_512_v4
+33/178262/campos_512_v4
+33/178265/campos_512_v4
+33/178273/campos_512_v4
+33/178275/campos_512_v4
+33/178283/campos_512_v4
+33/178284/campos_512_v4
+33/178286/campos_512_v4
+33/178297/campos_512_v4
+33/178300/campos_512_v4
+33/178305/campos_512_v4
+33/178306/campos_512_v4
+33/178312/campos_512_v4
+33/178326/campos_512_v4
+33/178328/campos_512_v4
+33/178337/campos_512_v4
+33/178342/campos_512_v4
+33/178356/campos_512_v4
+33/178364/campos_512_v4
+33/178384/campos_512_v4
+33/178400/campos_512_v4
+33/178401/campos_512_v4
+33/178411/campos_512_v4
+33/178417/campos_512_v4
+33/178419/campos_512_v4
+33/178424/campos_512_v4
+33/178436/campos_512_v4
+33/178445/campos_512_v4
+33/178451/campos_512_v4
+33/178455/campos_512_v4
+33/178466/campos_512_v4
+33/178467/campos_512_v4
+33/178472/campos_512_v4
+33/178481/campos_512_v4
+33/178510/campos_512_v4
+33/178513/campos_512_v4
+33/178516/campos_512_v4
+33/178521/campos_512_v4
+33/178530/campos_512_v4
+33/178535/campos_512_v4
+33/178558/campos_512_v4
+33/178561/campos_512_v4
+33/178563/campos_512_v4
+33/178571/campos_512_v4
+33/178576/campos_512_v4
+33/178600/campos_512_v4
+33/178606/campos_512_v4
+33/178631/campos_512_v4
+33/178635/campos_512_v4
+33/178646/campos_512_v4
+33/178651/campos_512_v4
+33/178654/campos_512_v4
+33/178668/campos_512_v4
+33/178678/campos_512_v4
+33/178687/campos_512_v4
+33/178688/campos_512_v4
+33/178702/campos_512_v4
+33/178713/campos_512_v4
+33/178715/campos_512_v4
+33/178716/campos_512_v4
+33/178721/campos_512_v4
+33/178727/campos_512_v4
+33/178730/campos_512_v4
+33/178732/campos_512_v4
+33/178741/campos_512_v4
+33/178752/campos_512_v4
+33/178764/campos_512_v4
+33/178769/campos_512_v4
+33/178770/campos_512_v4
+33/178781/campos_512_v4
+33/178787/campos_512_v4
+33/178797/campos_512_v4
+33/178798/campos_512_v4
+33/178810/campos_512_v4
+33/178834/campos_512_v4
+33/178838/campos_512_v4
+33/178839/campos_512_v4
+33/178841/campos_512_v4
+33/178845/campos_512_v4
+33/178854/campos_512_v4
+33/178869/campos_512_v4
+33/178872/campos_512_v4
+33/178876/campos_512_v4
+33/178888/campos_512_v4
+33/178889/campos_512_v4
+33/178897/campos_512_v4
+33/178899/campos_512_v4
+33/178902/campos_512_v4
+33/178905/campos_512_v4
+33/178907/campos_512_v4
+33/178912/campos_512_v4
+33/178932/campos_512_v4
+33/178947/campos_512_v4
+33/178949/campos_512_v4
+33/178951/campos_512_v4
+33/178954/campos_512_v4
+33/178958/campos_512_v4
+33/178963/campos_512_v4
+33/178967/campos_512_v4
+33/178974/campos_512_v4
+33/178975/campos_512_v4
+33/178976/campos_512_v4
+33/178985/campos_512_v4
+33/178988/campos_512_v4
+33/178994/campos_512_v4
+33/179006/campos_512_v4
+33/179010/campos_512_v4
+33/179029/campos_512_v4
+33/179034/campos_512_v4
+33/179064/campos_512_v4
+33/179076/campos_512_v4
+33/179080/campos_512_v4
+33/179088/campos_512_v4
+33/179093/campos_512_v4
+33/179096/campos_512_v4
+33/179098/campos_512_v4
+33/179102/campos_512_v4
+33/179108/campos_512_v4
+33/179110/campos_512_v4
+33/179119/campos_512_v4
+33/179135/campos_512_v4
+33/179144/campos_512_v4
+33/179147/campos_512_v4
+33/179151/campos_512_v4
+33/179163/campos_512_v4
+33/179165/campos_512_v4
+33/179167/campos_512_v4
+33/179176/campos_512_v4
+33/179177/campos_512_v4
+33/179179/campos_512_v4
+33/179188/campos_512_v4
+33/179193/campos_512_v4
+33/179200/campos_512_v4
+33/179207/campos_512_v4
+33/179209/campos_512_v4
+33/179225/campos_512_v4
+33/179238/campos_512_v4
+33/179255/campos_512_v4
+33/179265/campos_512_v4
+33/179271/campos_512_v4
+33/179274/campos_512_v4
+33/179277/campos_512_v4
+33/179283/campos_512_v4
+33/179286/campos_512_v4
+33/179297/campos_512_v4
+33/179317/campos_512_v4
+33/179320/campos_512_v4
+33/179330/campos_512_v4
+33/179331/campos_512_v4
+33/179335/campos_512_v4
+33/179346/campos_512_v4
+33/179347/campos_512_v4
+33/179357/campos_512_v4
+33/179375/campos_512_v4
+33/179387/campos_512_v4
+33/179397/campos_512_v4
+33/179409/campos_512_v4
+33/179414/campos_512_v4
+33/179422/campos_512_v4
+33/179446/campos_512_v4
+33/179449/campos_512_v4
+33/179451/campos_512_v4
+33/179452/campos_512_v4
+33/179455/campos_512_v4
+33/179463/campos_512_v4
+33/179473/campos_512_v4
+33/179487/campos_512_v4
+33/179496/campos_512_v4
+33/179501/campos_512_v4
+33/179511/campos_512_v4
+33/179512/campos_512_v4
+33/179514/campos_512_v4
+33/179517/campos_512_v4
+33/179518/campos_512_v4
+33/179522/campos_512_v4
+33/179523/campos_512_v4
+33/179530/campos_512_v4
+33/179534/campos_512_v4
+33/179539/campos_512_v4
+33/179548/campos_512_v4
+33/179551/campos_512_v4
+33/179579/campos_512_v4
+33/179580/campos_512_v4
+33/179592/campos_512_v4
+33/179595/campos_512_v4
+33/179601/campos_512_v4
+33/179619/campos_512_v4
+33/179639/campos_512_v4
+33/179654/campos_512_v4
+33/179655/campos_512_v4
+33/179657/campos_512_v4
+33/179663/campos_512_v4
+33/179679/campos_512_v4
+33/179695/campos_512_v4
+33/179696/campos_512_v4
+33/179706/campos_512_v4
+33/179712/campos_512_v4
+33/179716/campos_512_v4
+33/179717/campos_512_v4
+33/179720/campos_512_v4
+33/179733/campos_512_v4
+33/179734/campos_512_v4
+33/179763/campos_512_v4
+33/179764/campos_512_v4
+33/179771/campos_512_v4
+33/179776/campos_512_v4
+33/179779/campos_512_v4
+33/179795/campos_512_v4
+33/179796/campos_512_v4
+33/179798/campos_512_v4
+33/179802/campos_512_v4
+33/179804/campos_512_v4
+33/179813/campos_512_v4
+33/179816/campos_512_v4
+33/179836/campos_512_v4
+33/179847/campos_512_v4
+33/179856/campos_512_v4
+33/179861/campos_512_v4
+33/179866/campos_512_v4
+33/179867/campos_512_v4
+33/179871/campos_512_v4
+33/179883/campos_512_v4
+33/179896/campos_512_v4
+33/179897/campos_512_v4
+33/179899/campos_512_v4
+33/179907/campos_512_v4
+33/179912/campos_512_v4
+33/179924/campos_512_v4
+33/179932/campos_512_v4
+33/179938/campos_512_v4
+33/179939/campos_512_v4
+33/179951/campos_512_v4
+33/179974/campos_512_v4
+33/179977/campos_512_v4
+33/179979/campos_512_v4
+33/180000/campos_512_v4
+33/180001/campos_512_v4
+34/180003/campos_512_v4
+34/180009/campos_512_v4
+34/180017/campos_512_v4
+34/180027/campos_512_v4
+34/180030/campos_512_v4
+34/180043/campos_512_v4
+34/180050/campos_512_v4
+34/180081/campos_512_v4
+34/180090/campos_512_v4
+34/180096/campos_512_v4
+34/180104/campos_512_v4
+34/180107/campos_512_v4
+34/180114/campos_512_v4
+34/180122/campos_512_v4
+34/180123/campos_512_v4
+34/180130/campos_512_v4
+34/180132/campos_512_v4
+34/180135/campos_512_v4
+34/180160/campos_512_v4
+34/180167/campos_512_v4
+34/180191/campos_512_v4
+34/180193/campos_512_v4
+34/180202/campos_512_v4
+34/180208/campos_512_v4
+34/180235/campos_512_v4
+34/180236/campos_512_v4
+34/180239/campos_512_v4
+34/180241/campos_512_v4
+34/180247/campos_512_v4
+34/180268/campos_512_v4
+34/180269/campos_512_v4
+34/180291/campos_512_v4
+34/180299/campos_512_v4
+34/180303/campos_512_v4
+34/180310/campos_512_v4
+34/180327/campos_512_v4
+34/180331/campos_512_v4
+34/180336/campos_512_v4
+34/180340/campos_512_v4
+34/180343/campos_512_v4
+34/180355/campos_512_v4
+34/180356/campos_512_v4
+34/180357/campos_512_v4
+34/180358/campos_512_v4
+34/180363/campos_512_v4
+34/180364/campos_512_v4
+34/180369/campos_512_v4
+34/180376/campos_512_v4
+34/180386/campos_512_v4
+34/180397/campos_512_v4
+34/180421/campos_512_v4
+34/180422/campos_512_v4
+34/180431/campos_512_v4
+34/180446/campos_512_v4
+34/180447/campos_512_v4
+34/180448/campos_512_v4
+34/180456/campos_512_v4
+34/180465/campos_512_v4
+34/180489/campos_512_v4
+34/180492/campos_512_v4
+34/180497/campos_512_v4
+34/180503/campos_512_v4
+34/180504/campos_512_v4
+34/180506/campos_512_v4
+34/180511/campos_512_v4
+34/180519/campos_512_v4
+34/180523/campos_512_v4
+34/180553/campos_512_v4
+34/180580/campos_512_v4
+34/180589/campos_512_v4
+34/180597/campos_512_v4
+34/180607/campos_512_v4
+34/180627/campos_512_v4
+34/180628/campos_512_v4
+34/180632/campos_512_v4
+34/180648/campos_512_v4
+34/180668/campos_512_v4
+34/180681/campos_512_v4
+34/180685/campos_512_v4
+34/180686/campos_512_v4
+34/180693/campos_512_v4
+34/180694/campos_512_v4
+34/180725/campos_512_v4
+34/180726/campos_512_v4
+34/180735/campos_512_v4
+34/180738/campos_512_v4
+34/180742/campos_512_v4
+34/180744/campos_512_v4
+34/180765/campos_512_v4
+34/180770/campos_512_v4
+34/180793/campos_512_v4
+34/180795/campos_512_v4
+34/180798/campos_512_v4
+34/180816/campos_512_v4
+34/180820/campos_512_v4
+34/180847/campos_512_v4
+34/180863/campos_512_v4
+34/180872/campos_512_v4
+34/180890/campos_512_v4
+34/180906/campos_512_v4
+34/180907/campos_512_v4
+34/180912/campos_512_v4
+34/180929/campos_512_v4
+34/180931/campos_512_v4
+34/180945/campos_512_v4
+34/180952/campos_512_v4
+34/180956/campos_512_v4
+34/180976/campos_512_v4
+34/180981/campos_512_v4
+34/180990/campos_512_v4
+34/180993/campos_512_v4
+34/180995/campos_512_v4
+34/181006/campos_512_v4
+34/181007/campos_512_v4
+34/181019/campos_512_v4
+34/181023/campos_512_v4
+34/181034/campos_512_v4
+34/181036/campos_512_v4
+34/181039/campos_512_v4
+34/181042/campos_512_v4
+34/181044/campos_512_v4
+34/181062/campos_512_v4
+34/181072/campos_512_v4
+34/181082/campos_512_v4
+34/181095/campos_512_v4
+34/181100/campos_512_v4
+34/181104/campos_512_v4
+34/181106/campos_512_v4
+34/181125/campos_512_v4
+34/181133/campos_512_v4
+34/181140/campos_512_v4
+34/181146/campos_512_v4
+34/181151/campos_512_v4
+34/181152/campos_512_v4
+34/181154/campos_512_v4
+34/181162/campos_512_v4
+34/181168/campos_512_v4
+34/181180/campos_512_v4
+34/181183/campos_512_v4
+34/181190/campos_512_v4
+34/181213/campos_512_v4
+34/181219/campos_512_v4
+34/181229/campos_512_v4
+34/181239/campos_512_v4
+34/181246/campos_512_v4
+34/181250/campos_512_v4
+34/181259/campos_512_v4
+34/181262/campos_512_v4
+34/181269/campos_512_v4
+34/181270/campos_512_v4
+34/181283/campos_512_v4
+34/181303/campos_512_v4
+34/181318/campos_512_v4
+34/181319/campos_512_v4
+34/181324/campos_512_v4
+34/181333/campos_512_v4
+34/181341/campos_512_v4
+34/181348/campos_512_v4
+34/181359/campos_512_v4
+34/181382/campos_512_v4
+34/181391/campos_512_v4
+34/181409/campos_512_v4
+34/181410/campos_512_v4
+34/181413/campos_512_v4
+34/181416/campos_512_v4
+34/181421/campos_512_v4
+34/181436/campos_512_v4
+34/181444/campos_512_v4
+34/181455/campos_512_v4
+34/181465/campos_512_v4
+34/181471/campos_512_v4
+34/181477/campos_512_v4
+34/181482/campos_512_v4
+34/181483/campos_512_v4
+34/181486/campos_512_v4
+34/181499/campos_512_v4
+34/181503/campos_512_v4
+34/181511/campos_512_v4
+34/181512/campos_512_v4
+34/181515/campos_512_v4
+34/181521/campos_512_v4
+34/181530/campos_512_v4
+34/181534/campos_512_v4
+34/181542/campos_512_v4
+34/181558/campos_512_v4
+34/181562/campos_512_v4
+34/181564/campos_512_v4
+34/181565/campos_512_v4
+34/181569/campos_512_v4
+34/181570/campos_512_v4
+34/181576/campos_512_v4
+34/181595/campos_512_v4
+34/181607/campos_512_v4
+34/181612/campos_512_v4
+34/181636/campos_512_v4
+34/181647/campos_512_v4
+34/181650/campos_512_v4
+34/181654/campos_512_v4
+34/181667/campos_512_v4
+34/181669/campos_512_v4
+34/181678/campos_512_v4
+34/181687/campos_512_v4
+34/181692/campos_512_v4
+34/181693/campos_512_v4
+34/181698/campos_512_v4
+34/181701/campos_512_v4
+34/181704/campos_512_v4
+34/181715/campos_512_v4
+34/181723/campos_512_v4
+34/181725/campos_512_v4
+34/181726/campos_512_v4
+34/181753/campos_512_v4
+34/181756/campos_512_v4
+34/181766/campos_512_v4
+34/181778/campos_512_v4
+34/181781/campos_512_v4
+34/181820/campos_512_v4
+34/181821/campos_512_v4
+34/181839/campos_512_v4
+34/181843/campos_512_v4
+34/181864/campos_512_v4
+34/181876/campos_512_v4
+34/181882/campos_512_v4
+34/181888/campos_512_v4
+34/181891/campos_512_v4
+34/181892/campos_512_v4
+34/181900/campos_512_v4
+34/181910/campos_512_v4
+34/181916/campos_512_v4
+34/181921/campos_512_v4
+34/181922/campos_512_v4
+34/181924/campos_512_v4
+34/181940/campos_512_v4
+34/181941/campos_512_v4
+34/181953/campos_512_v4
+34/181964/campos_512_v4
+34/181970/campos_512_v4
+34/181992/campos_512_v4
+34/181995/campos_512_v4
+34/182007/campos_512_v4
+34/182036/campos_512_v4
+34/182046/campos_512_v4
+34/182055/campos_512_v4
+34/182062/campos_512_v4
+34/182068/campos_512_v4
+34/182070/campos_512_v4
+34/182073/campos_512_v4
+34/182083/campos_512_v4
+34/182086/campos_512_v4
+34/182087/campos_512_v4
+34/182094/campos_512_v4
+34/182110/campos_512_v4
+34/182506/campos_512_v4
+34/182512/campos_512_v4
+34/182524/campos_512_v4
+34/182533/campos_512_v4
+34/182540/campos_512_v4
+34/182550/campos_512_v4
+34/182554/campos_512_v4
+34/182555/campos_512_v4
+34/182576/campos_512_v4
+34/182578/campos_512_v4
+34/182581/campos_512_v4
+34/182584/campos_512_v4
+34/182590/campos_512_v4
+34/182604/campos_512_v4
+34/182609/campos_512_v4
+34/182613/campos_512_v4
+34/182627/campos_512_v4
+34/182643/campos_512_v4
+34/182685/campos_512_v4
+34/182687/campos_512_v4
+34/182691/campos_512_v4
+34/182692/campos_512_v4
+34/182701/campos_512_v4
+34/182708/campos_512_v4
+34/182723/campos_512_v4
+34/182729/campos_512_v4
+34/182733/campos_512_v4
+34/182737/campos_512_v4
+34/182738/campos_512_v4
+34/182753/campos_512_v4
+34/182773/campos_512_v4
+34/182780/campos_512_v4
+34/182788/campos_512_v4
+34/182792/campos_512_v4
+34/182809/campos_512_v4
+34/182813/campos_512_v4
+34/182818/campos_512_v4
+34/182824/campos_512_v4
+34/182854/campos_512_v4
+34/182855/campos_512_v4
+34/182864/campos_512_v4
+34/182880/campos_512_v4
+34/182886/campos_512_v4
+34/182904/campos_512_v4
+34/182907/campos_512_v4
+34/182915/campos_512_v4
+34/182923/campos_512_v4
+34/182938/campos_512_v4
+34/182942/campos_512_v4
+34/182956/campos_512_v4
+34/182964/campos_512_v4
+34/182969/campos_512_v4
+34/182980/campos_512_v4
+34/182981/campos_512_v4
+34/182984/campos_512_v4
+34/182989/campos_512_v4
+34/182992/campos_512_v4
+34/182998/campos_512_v4
+34/183003/campos_512_v4
+34/183005/campos_512_v4
+34/183011/campos_512_v4
+34/183019/campos_512_v4
+34/183022/campos_512_v4
+34/183034/campos_512_v4
+34/183042/campos_512_v4
+34/183049/campos_512_v4
+34/183055/campos_512_v4
+34/183059/campos_512_v4
+34/183060/campos_512_v4
+34/183063/campos_512_v4
+34/183068/campos_512_v4
+34/183087/campos_512_v4
+34/183088/campos_512_v4
+34/183093/campos_512_v4
+34/183108/campos_512_v4
+34/183110/campos_512_v4
+34/183121/campos_512_v4
+34/183506/campos_512_v4
+34/183527/campos_512_v4
+34/183547/campos_512_v4
+34/183555/campos_512_v4
+34/183558/campos_512_v4
+34/183561/campos_512_v4
+34/183564/campos_512_v4
+34/183589/campos_512_v4
+34/183605/campos_512_v4
+34/183607/campos_512_v4
+34/183622/campos_512_v4
+34/184018/campos_512_v4
+34/184025/campos_512_v4
+34/184032/campos_512_v4
+34/184035/campos_512_v4
+34/184037/campos_512_v4
+34/184046/campos_512_v4
+34/184051/campos_512_v4
+34/184058/campos_512_v4
+34/184060/campos_512_v4
+34/184064/campos_512_v4
+34/184068/campos_512_v4
+34/184086/campos_512_v4
+34/184099/campos_512_v4
+34/184116/campos_512_v4
+34/184119/campos_512_v4
+34/184122/campos_512_v4
+34/184130/campos_512_v4
+34/184136/campos_512_v4
+34/184138/campos_512_v4
+34/184165/campos_512_v4
+34/184171/campos_512_v4
+34/184178/campos_512_v4
+34/184182/campos_512_v4
+34/184198/campos_512_v4
+34/184228/campos_512_v4
+34/184229/campos_512_v4
+34/184246/campos_512_v4
+34/184276/campos_512_v4
+34/184281/campos_512_v4
+34/184292/campos_512_v4
+34/184300/campos_512_v4
+34/184304/campos_512_v4
+34/184306/campos_512_v4
+34/184309/campos_512_v4
+34/184311/campos_512_v4
+34/184330/campos_512_v4
+34/184332/campos_512_v4
+34/184334/campos_512_v4
+34/184339/campos_512_v4
+34/184342/campos_512_v4
+34/184364/campos_512_v4
+34/184367/campos_512_v4
+34/184369/campos_512_v4
+34/184374/campos_512_v4
+34/184377/campos_512_v4
+34/184387/campos_512_v4
+34/184398/campos_512_v4
+34/184408/campos_512_v4
+34/184422/campos_512_v4
+34/184447/campos_512_v4
+34/184449/campos_512_v4
+34/184461/campos_512_v4
+34/184464/campos_512_v4
+34/184467/campos_512_v4
+34/184468/campos_512_v4
+34/184469/campos_512_v4
+34/184483/campos_512_v4
+34/184488/campos_512_v4
+34/184492/campos_512_v4
+34/184508/campos_512_v4
+34/184518/campos_512_v4
+34/184524/campos_512_v4
+34/184534/campos_512_v4
+34/184548/campos_512_v4
+34/184550/campos_512_v4
+34/184568/campos_512_v4
+34/184573/campos_512_v4
+34/184574/campos_512_v4
+34/184577/campos_512_v4
+34/184591/campos_512_v4
+34/184595/campos_512_v4
+34/184601/campos_512_v4
+34/184602/campos_512_v4
+34/184611/campos_512_v4
+34/184615/campos_512_v4
+34/184627/campos_512_v4
+34/184638/campos_512_v4
+34/184648/campos_512_v4
+34/184658/campos_512_v4
+34/184674/campos_512_v4
+34/184687/campos_512_v4
+34/184691/campos_512_v4
+34/184703/campos_512_v4
+34/184729/campos_512_v4
+34/184740/campos_512_v4
+34/184747/campos_512_v4
+34/184772/campos_512_v4
+34/184773/campos_512_v4
+34/184776/campos_512_v4
+34/184777/campos_512_v4
+34/184783/campos_512_v4
+34/184793/campos_512_v4
+34/184794/campos_512_v4
+34/184797/campos_512_v4
+34/184808/campos_512_v4
+34/184814/campos_512_v4
+34/184820/campos_512_v4
+34/184823/campos_512_v4
+34/184825/campos_512_v4
+34/184829/campos_512_v4
+34/184830/campos_512_v4
+34/184832/campos_512_v4
+34/184842/campos_512_v4
+34/184854/campos_512_v4
+34/184860/campos_512_v4
+34/184864/campos_512_v4
+34/184872/campos_512_v4
+34/184878/campos_512_v4
+34/184902/campos_512_v4
+34/184911/campos_512_v4
+34/184916/campos_512_v4
+34/184940/campos_512_v4
+34/184948/campos_512_v4
+34/184949/campos_512_v4
+34/184979/campos_512_v4
+34/184988/campos_512_v4
+34/184993/campos_512_v4
+34/184994/campos_512_v4
+34/185001/campos_512_v4
+35/185003/campos_512_v4
+35/185005/campos_512_v4
+35/185024/campos_512_v4
+35/185031/campos_512_v4
+35/185052/campos_512_v4
+35/185057/campos_512_v4
+35/185059/campos_512_v4
+35/185062/campos_512_v4
+35/185072/campos_512_v4
+35/185079/campos_512_v4
+35/185086/campos_512_v4
+35/185092/campos_512_v4
+35/185098/campos_512_v4
+35/185104/campos_512_v4
+35/185108/campos_512_v4
+35/185119/campos_512_v4
+35/185148/campos_512_v4
+35/185149/campos_512_v4
+35/185155/campos_512_v4
+35/185156/campos_512_v4
+35/185157/campos_512_v4
+35/185158/campos_512_v4
+35/185162/campos_512_v4
+35/185163/campos_512_v4
+35/185174/campos_512_v4
+35/185178/campos_512_v4
+35/185188/campos_512_v4
+35/185189/campos_512_v4
+35/185191/campos_512_v4
+35/185192/campos_512_v4
+35/185199/campos_512_v4
+35/185204/campos_512_v4
+35/185210/campos_512_v4
+35/185214/campos_512_v4
+35/185227/campos_512_v4
+35/185228/campos_512_v4
+35/185235/campos_512_v4
+35/185262/campos_512_v4
+35/185267/campos_512_v4
+35/185270/campos_512_v4
+35/185280/campos_512_v4
+35/185282/campos_512_v4
+35/185286/campos_512_v4
+35/185287/campos_512_v4
+35/185307/campos_512_v4
+35/185319/campos_512_v4
+35/185324/campos_512_v4
+35/185328/campos_512_v4
+35/185336/campos_512_v4
+35/185343/campos_512_v4
+35/185359/campos_512_v4
+35/185361/campos_512_v4
+35/185365/campos_512_v4
+35/185372/campos_512_v4
+35/185387/campos_512_v4
+35/185398/campos_512_v4
+35/185400/campos_512_v4
+35/185401/campos_512_v4
+35/185408/campos_512_v4
+35/185411/campos_512_v4
+35/185413/campos_512_v4
+35/185419/campos_512_v4
+35/185427/campos_512_v4
+35/185428/campos_512_v4
+35/185436/campos_512_v4
+35/185441/campos_512_v4
+35/185446/campos_512_v4
+35/185453/campos_512_v4
+35/185455/campos_512_v4
+35/185464/campos_512_v4
+35/185467/campos_512_v4
+35/185483/campos_512_v4
+35/185488/campos_512_v4
+35/185489/campos_512_v4
+35/185493/campos_512_v4
+35/185495/campos_512_v4
+35/185498/campos_512_v4
+35/185501/campos_512_v4
+35/185506/campos_512_v4
+35/185515/campos_512_v4
+35/185518/campos_512_v4
+35/185519/campos_512_v4
+35/185533/campos_512_v4
+35/185542/campos_512_v4
+35/185543/campos_512_v4
+35/185547/campos_512_v4
+35/185555/campos_512_v4
+35/185556/campos_512_v4
+35/185563/campos_512_v4
+35/185565/campos_512_v4
+35/185570/campos_512_v4
+35/185571/campos_512_v4
+35/185577/campos_512_v4
+35/185580/campos_512_v4
+35/185586/campos_512_v4
+35/185616/campos_512_v4
+35/185623/campos_512_v4
+35/185627/campos_512_v4
+35/185632/campos_512_v4
+35/185638/campos_512_v4
+35/185640/campos_512_v4
+35/185642/campos_512_v4
+35/185643/campos_512_v4
+35/185656/campos_512_v4
+35/185672/campos_512_v4
+35/185676/campos_512_v4
+35/185677/campos_512_v4
+35/185686/campos_512_v4
+35/185687/campos_512_v4
+35/185691/campos_512_v4
+35/185696/campos_512_v4
+35/185697/campos_512_v4
+35/185700/campos_512_v4
+35/185706/campos_512_v4
+35/185710/campos_512_v4
+35/185711/campos_512_v4
+35/185712/campos_512_v4
+35/185719/campos_512_v4
+35/185735/campos_512_v4
+35/185757/campos_512_v4
+35/185759/campos_512_v4
+35/185764/campos_512_v4
+35/185784/campos_512_v4
+35/185786/campos_512_v4
+35/185814/campos_512_v4
+35/185828/campos_512_v4
+35/185833/campos_512_v4
+35/185836/campos_512_v4
+35/185851/campos_512_v4
+35/185854/campos_512_v4
+35/185858/campos_512_v4
+35/185862/campos_512_v4
+35/185863/campos_512_v4
+35/185874/campos_512_v4
+35/185890/campos_512_v4
+35/185909/campos_512_v4
+35/185917/campos_512_v4
+35/185932/campos_512_v4
+35/185950/campos_512_v4
+35/185964/campos_512_v4
+35/185965/campos_512_v4
+35/185980/campos_512_v4
+35/185991/campos_512_v4
+35/185992/campos_512_v4
+35/185994/campos_512_v4
+35/185995/campos_512_v4
+35/186002/campos_512_v4
+35/186005/campos_512_v4
+35/186014/campos_512_v4
+35/186032/campos_512_v4
+35/186034/campos_512_v4
+35/186038/campos_512_v4
+35/186047/campos_512_v4
+35/186048/campos_512_v4
+35/186053/campos_512_v4
+35/186057/campos_512_v4
+35/186060/campos_512_v4
+35/186067/campos_512_v4
+35/186077/campos_512_v4
+35/186089/campos_512_v4
+35/186111/campos_512_v4
+35/186112/campos_512_v4
+35/186114/campos_512_v4
+35/186115/campos_512_v4
+35/186127/campos_512_v4
+35/186129/campos_512_v4
+35/186145/campos_512_v4
+35/186173/campos_512_v4
+35/186174/campos_512_v4
+35/186190/campos_512_v4
+35/186193/campos_512_v4
+35/186199/campos_512_v4
+35/186206/campos_512_v4
+35/186212/campos_512_v4
+35/186214/campos_512_v4
+35/186215/campos_512_v4
+35/186216/campos_512_v4
+35/186217/campos_512_v4
+35/186219/campos_512_v4
+35/186221/campos_512_v4
+35/186226/campos_512_v4
+35/186227/campos_512_v4
+35/186232/campos_512_v4
+35/186234/campos_512_v4
+35/186242/campos_512_v4
+35/186253/campos_512_v4
+35/186257/campos_512_v4
+35/186259/campos_512_v4
+35/186287/campos_512_v4
+35/186299/campos_512_v4
+35/186313/campos_512_v4
+35/186325/campos_512_v4
+35/186328/campos_512_v4
+35/186350/campos_512_v4
+35/186352/campos_512_v4
+35/186361/campos_512_v4
+35/186371/campos_512_v4
+35/186378/campos_512_v4
+35/186382/campos_512_v4
+35/186385/campos_512_v4
+35/186389/campos_512_v4
+35/186397/campos_512_v4
+35/186401/campos_512_v4
+35/186402/campos_512_v4
+35/186403/campos_512_v4
+35/186408/campos_512_v4
+35/186414/campos_512_v4
+35/186424/campos_512_v4
+35/186425/campos_512_v4
+35/186434/campos_512_v4
+35/186435/campos_512_v4
+35/186454/campos_512_v4
+35/186474/campos_512_v4
+35/186480/campos_512_v4
+35/186485/campos_512_v4
+35/186489/campos_512_v4
+35/186498/campos_512_v4
+35/186499/campos_512_v4
+35/186501/campos_512_v4
+35/186506/campos_512_v4
+35/186508/campos_512_v4
+35/186512/campos_512_v4
+35/186518/campos_512_v4
+35/186530/campos_512_v4
+35/186540/campos_512_v4
+35/186543/campos_512_v4
+35/186554/campos_512_v4
+35/186556/campos_512_v4
+35/186557/campos_512_v4
+35/186560/campos_512_v4
+35/186563/campos_512_v4
+35/186567/campos_512_v4
+35/186575/campos_512_v4
+35/186577/campos_512_v4
+35/186582/campos_512_v4
+35/186596/campos_512_v4
+35/186609/campos_512_v4
+35/186610/campos_512_v4
+35/186640/campos_512_v4
+35/186646/campos_512_v4
+35/186650/campos_512_v4
+35/186655/campos_512_v4
+35/186659/campos_512_v4
+35/186682/campos_512_v4
+35/186693/campos_512_v4
+35/186697/campos_512_v4
+35/186708/campos_512_v4
+35/186715/campos_512_v4
+35/186725/campos_512_v4
+35/186738/campos_512_v4
+35/186744/campos_512_v4
+35/186746/campos_512_v4
+35/186748/campos_512_v4
+35/186755/campos_512_v4
+35/186757/campos_512_v4
+35/186763/campos_512_v4
+35/186768/campos_512_v4
+35/186769/campos_512_v4
+35/186796/campos_512_v4
+35/186800/campos_512_v4
+35/186812/campos_512_v4
+35/186813/campos_512_v4
+35/186818/campos_512_v4
+35/186819/campos_512_v4
+35/186829/campos_512_v4
+35/186836/campos_512_v4
+35/186840/campos_512_v4
+35/186857/campos_512_v4
+35/186864/campos_512_v4
+35/186871/campos_512_v4
+35/186874/campos_512_v4
+35/186881/campos_512_v4
+35/186899/campos_512_v4
+35/186900/campos_512_v4
+35/186907/campos_512_v4
+35/186913/campos_512_v4
+35/186916/campos_512_v4
+35/186917/campos_512_v4
+35/186929/campos_512_v4
+35/186931/campos_512_v4
+35/186939/campos_512_v4
+35/186957/campos_512_v4
+35/186965/campos_512_v4
+35/186976/campos_512_v4
+35/186977/campos_512_v4
+35/186995/campos_512_v4
+35/187006/campos_512_v4
+35/187013/campos_512_v4
+35/187018/campos_512_v4
+35/187019/campos_512_v4
+35/187056/campos_512_v4
+35/187058/campos_512_v4
+35/187105/campos_512_v4
+35/187106/campos_512_v4
+35/187114/campos_512_v4
+35/187121/campos_512_v4
+35/187132/campos_512_v4
+35/187136/campos_512_v4
+35/187150/campos_512_v4
+35/187154/campos_512_v4
+35/187157/campos_512_v4
+35/187162/campos_512_v4
+35/187165/campos_512_v4
+35/187167/campos_512_v4
+35/187173/campos_512_v4
+35/187201/campos_512_v4
+35/187204/campos_512_v4
+35/187224/campos_512_v4
+35/187229/campos_512_v4
+35/187234/campos_512_v4
+35/187235/campos_512_v4
+35/187246/campos_512_v4
+35/187251/campos_512_v4
+35/187252/campos_512_v4
+35/187267/campos_512_v4
+35/187271/campos_512_v4
+35/187272/campos_512_v4
+35/187278/campos_512_v4
+35/187283/campos_512_v4
+35/187288/campos_512_v4
+35/187302/campos_512_v4
+35/187332/campos_512_v4
+35/187358/campos_512_v4
+35/187370/campos_512_v4
+35/187385/campos_512_v4
+35/187388/campos_512_v4
+35/187389/campos_512_v4
+35/187390/campos_512_v4
+35/187395/campos_512_v4
+35/187402/campos_512_v4
+35/187411/campos_512_v4
+35/187413/campos_512_v4
+35/187416/campos_512_v4
+35/187417/campos_512_v4
+35/187418/campos_512_v4
+35/187424/campos_512_v4
+35/187441/campos_512_v4
+35/187442/campos_512_v4
+35/187446/campos_512_v4
+35/187457/campos_512_v4
+35/187487/campos_512_v4
+35/187499/campos_512_v4
+35/187502/campos_512_v4
+35/187503/campos_512_v4
+35/187514/campos_512_v4
+35/187522/campos_512_v4
+35/187523/campos_512_v4
+35/187529/campos_512_v4
+35/187531/campos_512_v4
+35/187534/campos_512_v4
+35/187536/campos_512_v4
+35/187546/campos_512_v4
+35/187549/campos_512_v4
+35/187550/campos_512_v4
+35/187551/campos_512_v4
+35/187557/campos_512_v4
+35/187561/campos_512_v4
+35/187563/campos_512_v4
+35/187567/campos_512_v4
+35/187568/campos_512_v4
+35/187570/campos_512_v4
+35/187572/campos_512_v4
+35/187576/campos_512_v4
+35/187583/campos_512_v4
+35/187586/campos_512_v4
+35/187592/campos_512_v4
+35/187600/campos_512_v4
+35/187601/campos_512_v4
+35/187609/campos_512_v4
+35/187613/campos_512_v4
+35/187616/campos_512_v4
+35/187617/campos_512_v4
+35/187619/campos_512_v4
+35/187622/campos_512_v4
+35/187637/campos_512_v4
+35/187638/campos_512_v4
+35/187641/campos_512_v4
+35/187648/campos_512_v4
+35/187653/campos_512_v4
+35/187655/campos_512_v4
+35/187662/campos_512_v4
+35/187663/campos_512_v4
+35/187670/campos_512_v4
+35/187674/campos_512_v4
+35/187685/campos_512_v4
+35/187687/campos_512_v4
+35/187694/campos_512_v4
+35/187703/campos_512_v4
+35/187732/campos_512_v4
+35/187744/campos_512_v4
+35/187750/campos_512_v4
+35/187752/campos_512_v4
+35/187754/campos_512_v4
+35/187755/campos_512_v4
+35/187771/campos_512_v4
+35/187780/campos_512_v4
+35/187790/campos_512_v4
+35/187796/campos_512_v4
+35/187801/campos_512_v4
+35/187806/campos_512_v4
+35/187812/campos_512_v4
+35/187813/campos_512_v4
+35/187826/campos_512_v4
+35/187833/campos_512_v4
+35/187835/campos_512_v4
+35/187844/campos_512_v4
+35/187857/campos_512_v4
+35/187858/campos_512_v4
+35/187860/campos_512_v4
+35/187865/campos_512_v4
+35/187878/campos_512_v4
+35/187884/campos_512_v4
+35/187885/campos_512_v4
+35/187887/campos_512_v4
+35/187889/campos_512_v4
+35/187908/campos_512_v4
+35/187910/campos_512_v4
+35/187912/campos_512_v4
+35/187922/campos_512_v4
+35/187928/campos_512_v4
+35/187939/campos_512_v4
+35/187943/campos_512_v4
+35/187945/campos_512_v4
+35/187953/campos_512_v4
+35/187971/campos_512_v4
+35/187978/campos_512_v4
+35/187982/campos_512_v4
+35/188007/campos_512_v4
+35/188010/campos_512_v4
+35/188011/campos_512_v4
+35/188035/campos_512_v4
+35/188038/campos_512_v4
+35/188041/campos_512_v4
+35/188049/campos_512_v4
+35/188052/campos_512_v4
+35/188053/campos_512_v4
+35/188054/campos_512_v4
+35/188060/campos_512_v4
+35/188068/campos_512_v4
+35/188075/campos_512_v4
+35/188087/campos_512_v4
+35/188099/campos_512_v4
+35/188106/campos_512_v4
+35/188115/campos_512_v4
+35/188118/campos_512_v4
+35/188132/campos_512_v4
+35/188150/campos_512_v4
+35/188164/campos_512_v4
+35/188175/campos_512_v4
+35/188196/campos_512_v4
+35/188197/campos_512_v4
+35/188203/campos_512_v4
+35/188207/campos_512_v4
+35/188212/campos_512_v4
+35/188219/campos_512_v4
+35/188228/campos_512_v4
+35/188230/campos_512_v4
+35/188234/campos_512_v4
+35/188238/campos_512_v4
+35/188248/campos_512_v4
+35/188261/campos_512_v4
+35/188274/campos_512_v4
+35/188275/campos_512_v4
+35/188277/campos_512_v4
+35/188287/campos_512_v4
+35/188290/campos_512_v4
+35/188307/campos_512_v4
+35/188310/campos_512_v4
+35/188324/campos_512_v4
+35/188325/campos_512_v4
+35/188327/campos_512_v4
+35/188332/campos_512_v4
+35/188333/campos_512_v4
+35/188364/campos_512_v4
+35/188367/campos_512_v4
+35/188374/campos_512_v4
+35/188406/campos_512_v4
+35/188411/campos_512_v4
+35/188415/campos_512_v4
+35/188425/campos_512_v4
+35/188427/campos_512_v4
+35/188428/campos_512_v4
+35/188429/campos_512_v4
+35/188440/campos_512_v4
+35/188443/campos_512_v4
+35/188453/campos_512_v4
+35/188473/campos_512_v4
+35/188474/campos_512_v4
+35/188477/campos_512_v4
+35/188482/campos_512_v4
+35/188488/campos_512_v4
+35/188495/campos_512_v4
+35/188496/campos_512_v4
+35/188501/campos_512_v4
+35/188509/campos_512_v4
+35/188521/campos_512_v4
+35/188527/campos_512_v4
+35/188530/campos_512_v4
+35/188536/campos_512_v4
+35/188539/campos_512_v4
+35/188555/campos_512_v4
+35/188558/campos_512_v4
+35/188568/campos_512_v4
+35/188574/campos_512_v4
+35/188587/campos_512_v4
+35/188593/campos_512_v4
+35/188601/campos_512_v4
+35/188606/campos_512_v4
+35/188612/campos_512_v4
+35/188621/campos_512_v4
+35/188622/campos_512_v4
+35/188624/campos_512_v4
+35/188633/campos_512_v4
+35/188635/campos_512_v4
+35/188639/campos_512_v4
+35/188659/campos_512_v4
+35/188665/campos_512_v4
+35/188666/campos_512_v4
+35/188668/campos_512_v4
+35/188671/campos_512_v4
+35/188673/campos_512_v4
+35/188690/campos_512_v4
+35/188705/campos_512_v4
+35/188717/campos_512_v4
+35/188720/campos_512_v4
+35/188721/campos_512_v4
+35/188740/campos_512_v4
+35/188744/campos_512_v4
+35/188747/campos_512_v4
+35/188750/campos_512_v4
+35/188755/campos_512_v4
+35/188760/campos_512_v4
+35/188767/campos_512_v4
+35/188769/campos_512_v4
+35/188771/campos_512_v4
+35/188784/campos_512_v4
+35/188786/campos_512_v4
+35/188792/campos_512_v4
+35/188795/campos_512_v4
+35/188803/campos_512_v4
+35/188804/campos_512_v4
+35/188807/campos_512_v4
+35/188819/campos_512_v4
+35/188821/campos_512_v4
+35/188826/campos_512_v4
+35/188836/campos_512_v4
+35/188839/campos_512_v4
+35/188842/campos_512_v4
+35/188849/campos_512_v4
+35/188853/campos_512_v4
+35/188857/campos_512_v4
+35/188865/campos_512_v4
+35/188874/campos_512_v4
+35/188876/campos_512_v4
+35/188887/campos_512_v4
+35/188906/campos_512_v4
+35/188913/campos_512_v4
+35/188914/campos_512_v4
+35/188919/campos_512_v4
+35/188921/campos_512_v4
+35/188925/campos_512_v4
+35/188929/campos_512_v4
+35/188942/campos_512_v4
+35/188947/campos_512_v4
+35/188956/campos_512_v4
+35/188958/campos_512_v4
+35/188977/campos_512_v4
+35/188990/campos_512_v4
+35/188996/campos_512_v4
+35/189005/campos_512_v4
+35/189010/campos_512_v4
+35/189015/campos_512_v4
+35/189022/campos_512_v4
+35/189026/campos_512_v4
+35/189038/campos_512_v4
+35/189053/campos_512_v4
+35/189056/campos_512_v4
+35/189067/campos_512_v4
+35/189079/campos_512_v4
+35/189088/campos_512_v4
+35/189091/campos_512_v4
+35/189104/campos_512_v4
+35/189131/campos_512_v4
+35/189137/campos_512_v4
+35/189140/campos_512_v4
+35/189143/campos_512_v4
+35/189148/campos_512_v4
+35/189157/campos_512_v4
+35/189168/campos_512_v4
+35/189184/campos_512_v4
+35/189187/campos_512_v4
+35/189188/campos_512_v4
+35/189197/campos_512_v4
+35/189202/campos_512_v4
+35/189211/campos_512_v4
+35/189224/campos_512_v4
+35/189232/campos_512_v4
+35/189234/campos_512_v4
+35/189250/campos_512_v4
+35/189258/campos_512_v4
+35/189300/campos_512_v4
+35/189302/campos_512_v4
+35/189311/campos_512_v4
+35/189313/campos_512_v4
+35/189323/campos_512_v4
+35/189331/campos_512_v4
+35/189344/campos_512_v4
+35/189350/campos_512_v4
+35/189351/campos_512_v4
+35/189370/campos_512_v4
+35/189371/campos_512_v4
+35/189373/campos_512_v4
+35/189374/campos_512_v4
+35/189380/campos_512_v4
+35/189395/campos_512_v4
+35/189396/campos_512_v4
+35/189408/campos_512_v4
+35/189416/campos_512_v4
+35/189429/campos_512_v4
+35/189446/campos_512_v4
+35/189459/campos_512_v4
+35/189497/campos_512_v4
+35/189504/campos_512_v4
+35/189506/campos_512_v4
+35/189529/campos_512_v4
+35/189539/campos_512_v4
+35/189543/campos_512_v4
+35/189545/campos_512_v4
+35/189547/campos_512_v4
+35/189555/campos_512_v4
+35/189556/campos_512_v4
+35/189563/campos_512_v4
+35/189565/campos_512_v4
+35/189572/campos_512_v4
+35/189573/campos_512_v4
+35/189575/campos_512_v4
+35/189576/campos_512_v4
+35/189583/campos_512_v4
+35/189587/campos_512_v4
+35/189616/campos_512_v4
+35/189620/campos_512_v4
+35/189623/campos_512_v4
+35/189628/campos_512_v4
+35/189630/campos_512_v4
+35/189632/campos_512_v4
+35/189633/campos_512_v4
+35/189647/campos_512_v4
+35/189648/campos_512_v4
+35/189653/campos_512_v4
+35/189688/campos_512_v4
+35/189694/campos_512_v4
+35/189695/campos_512_v4
+35/189696/campos_512_v4
+35/189705/campos_512_v4
+35/189709/campos_512_v4
+35/189714/campos_512_v4
+35/189731/campos_512_v4
+35/189736/campos_512_v4
+35/189751/campos_512_v4
+35/189758/campos_512_v4
+35/189767/campos_512_v4
+35/189773/campos_512_v4
+35/189782/campos_512_v4
+35/189783/campos_512_v4
+35/189804/campos_512_v4
+35/189805/campos_512_v4
+35/189806/campos_512_v4
+35/189824/campos_512_v4
+35/189826/campos_512_v4
+35/189836/campos_512_v4
+35/189847/campos_512_v4
+35/189855/campos_512_v4
+35/189864/campos_512_v4
+35/189873/campos_512_v4
+35/189875/campos_512_v4
+35/189888/campos_512_v4
+35/189905/campos_512_v4
+35/189909/campos_512_v4
+35/189913/campos_512_v4
+35/189914/campos_512_v4
+35/189920/campos_512_v4
+35/189927/campos_512_v4
+35/189946/campos_512_v4
+35/189951/campos_512_v4
+35/189953/campos_512_v4
+35/189958/campos_512_v4
+35/189973/campos_512_v4
+35/189979/campos_512_v4
+35/189980/campos_512_v4
+35/189984/campos_512_v4
+35/189994/campos_512_v4
+36/190011/campos_512_v4
+36/190017/campos_512_v4
+36/190018/campos_512_v4
+36/190026/campos_512_v4
+36/190034/campos_512_v4
+36/190037/campos_512_v4
+36/190038/campos_512_v4
+36/190053/campos_512_v4
+36/190065/campos_512_v4
+36/190071/campos_512_v4
+36/190077/campos_512_v4
+36/190083/campos_512_v4
+36/190088/campos_512_v4
+36/190112/campos_512_v4
+36/190113/campos_512_v4
+36/190115/campos_512_v4
+36/190120/campos_512_v4
+36/190130/campos_512_v4
+36/190137/campos_512_v4
+36/190144/campos_512_v4
+36/190146/campos_512_v4
+36/190148/campos_512_v4
+36/190152/campos_512_v4
+36/190157/campos_512_v4
+36/190160/campos_512_v4
+36/190162/campos_512_v4
+36/190179/campos_512_v4
+36/190180/campos_512_v4
+36/190187/campos_512_v4
+36/190190/campos_512_v4
+36/190200/campos_512_v4
+36/190211/campos_512_v4
+36/190213/campos_512_v4
+36/190225/campos_512_v4
+36/190232/campos_512_v4
+36/190233/campos_512_v4
+36/190243/campos_512_v4
+36/190245/campos_512_v4
+36/190254/campos_512_v4
+36/190255/campos_512_v4
+36/190266/campos_512_v4
+36/190281/campos_512_v4
+36/190296/campos_512_v4
+36/190305/campos_512_v4
+36/190309/campos_512_v4
+36/190316/campos_512_v4
+36/190333/campos_512_v4
+36/190345/campos_512_v4
+36/190351/campos_512_v4
+36/190354/campos_512_v4
+36/190366/campos_512_v4
+36/190376/campos_512_v4
+36/190393/campos_512_v4
+36/190401/campos_512_v4
+36/190407/campos_512_v4
+36/190423/campos_512_v4
+36/190424/campos_512_v4
+36/190440/campos_512_v4
+36/190446/campos_512_v4
+36/190449/campos_512_v4
+36/190453/campos_512_v4
+36/190456/campos_512_v4
+36/190459/campos_512_v4
+36/190476/campos_512_v4
+36/190478/campos_512_v4
+36/190485/campos_512_v4
+36/190486/campos_512_v4
+36/190502/campos_512_v4
+36/190504/campos_512_v4
+36/190508/campos_512_v4
+36/190529/campos_512_v4
+36/190532/campos_512_v4
+36/190535/campos_512_v4
+36/190536/campos_512_v4
+36/190543/campos_512_v4
+36/190553/campos_512_v4
+36/190562/campos_512_v4
+36/190565/campos_512_v4
+36/190575/campos_512_v4
+36/190576/campos_512_v4
+36/190579/campos_512_v4
+36/190586/campos_512_v4
+36/190587/campos_512_v4
+36/190590/campos_512_v4
+36/190595/campos_512_v4
+36/190606/campos_512_v4
+36/190614/campos_512_v4
+36/190616/campos_512_v4
+36/190618/campos_512_v4
+36/190619/campos_512_v4
+36/190626/campos_512_v4
+36/190629/campos_512_v4
+36/190630/campos_512_v4
+36/190640/campos_512_v4
+36/190643/campos_512_v4
+36/190662/campos_512_v4
+36/190670/campos_512_v4
+36/190681/campos_512_v4
+36/190695/campos_512_v4
+36/190696/campos_512_v4
+36/190714/campos_512_v4
+36/190724/campos_512_v4
+36/190726/campos_512_v4
+36/190737/campos_512_v4
+36/190748/campos_512_v4
+36/190767/campos_512_v4
+36/190775/campos_512_v4
+36/190781/campos_512_v4
+36/190782/campos_512_v4
+36/190786/campos_512_v4
+36/190793/campos_512_v4
+36/190797/campos_512_v4
+36/190799/campos_512_v4
+36/190806/campos_512_v4
+36/190819/campos_512_v4
+36/190824/campos_512_v4
+36/190825/campos_512_v4
+36/190844/campos_512_v4
+36/190847/campos_512_v4
+36/190857/campos_512_v4
+36/190858/campos_512_v4
+36/190863/campos_512_v4
+36/190867/campos_512_v4
+36/190876/campos_512_v4
+36/190877/campos_512_v4
+36/190882/campos_512_v4
+36/190888/campos_512_v4
+36/190905/campos_512_v4
+36/190908/campos_512_v4
+36/190914/campos_512_v4
+36/190917/campos_512_v4
+36/190930/campos_512_v4
+36/190939/campos_512_v4
+36/190945/campos_512_v4
+36/190957/campos_512_v4
+36/190960/campos_512_v4
+36/190961/campos_512_v4
+36/190969/campos_512_v4
+36/190977/campos_512_v4
+36/190984/campos_512_v4
+36/190992/campos_512_v4
+36/190995/campos_512_v4
+36/190997/campos_512_v4
+36/191002/campos_512_v4
+36/191007/campos_512_v4
+36/191008/campos_512_v4
+36/191021/campos_512_v4
+36/191023/campos_512_v4
+36/191027/campos_512_v4
+36/191062/campos_512_v4
+36/191070/campos_512_v4
+36/191073/campos_512_v4
+36/191097/campos_512_v4
+36/191102/campos_512_v4
+36/191105/campos_512_v4
+36/191107/campos_512_v4
+36/191114/campos_512_v4
+36/191120/campos_512_v4
+36/191122/campos_512_v4
+36/191130/campos_512_v4
+36/191134/campos_512_v4
+36/191163/campos_512_v4
+36/191166/campos_512_v4
+36/191170/campos_512_v4
+36/191173/campos_512_v4
+36/191187/campos_512_v4
+36/191188/campos_512_v4
+36/191190/campos_512_v4
+36/191191/campos_512_v4
+36/191201/campos_512_v4
+36/191203/campos_512_v4
+36/191215/campos_512_v4
+36/191220/campos_512_v4
+36/191225/campos_512_v4
+36/191226/campos_512_v4
+36/191232/campos_512_v4
+36/191237/campos_512_v4
+36/191238/campos_512_v4
+36/191240/campos_512_v4
+36/191243/campos_512_v4
+36/191246/campos_512_v4
+36/191257/campos_512_v4
+36/191260/campos_512_v4
+36/191265/campos_512_v4
+36/191270/campos_512_v4
+36/191275/campos_512_v4
+36/191300/campos_512_v4
+36/191306/campos_512_v4
+36/191320/campos_512_v4
+36/191329/campos_512_v4
+36/191331/campos_512_v4
+36/191338/campos_512_v4
+36/191350/campos_512_v4
+36/191360/campos_512_v4
+36/191362/campos_512_v4
+36/191363/campos_512_v4
+36/191369/campos_512_v4
+36/191386/campos_512_v4
+36/191389/campos_512_v4
+36/191392/campos_512_v4
+36/191399/campos_512_v4
+36/191409/campos_512_v4
+36/191412/campos_512_v4
+36/191414/campos_512_v4
+36/191415/campos_512_v4
+36/191421/campos_512_v4
+36/191423/campos_512_v4
+36/191433/campos_512_v4
+36/191435/campos_512_v4
+36/191436/campos_512_v4
+36/191438/campos_512_v4
+36/191443/campos_512_v4
+36/191445/campos_512_v4
+36/191446/campos_512_v4
+36/191450/campos_512_v4
+36/191486/campos_512_v4
+36/191498/campos_512_v4
+36/191500/campos_512_v4
+36/191501/campos_512_v4
+36/191506/campos_512_v4
+36/191514/campos_512_v4
+36/191521/campos_512_v4
+36/191527/campos_512_v4
+36/191535/campos_512_v4
+36/191541/campos_512_v4
+36/191559/campos_512_v4
+36/191560/campos_512_v4
+36/191564/campos_512_v4
+36/191574/campos_512_v4
+36/191586/campos_512_v4
+36/191594/campos_512_v4
+36/191597/campos_512_v4
+36/191609/campos_512_v4
+36/191622/campos_512_v4
+36/191627/campos_512_v4
+36/191633/campos_512_v4
+36/191638/campos_512_v4
+36/191641/campos_512_v4
+36/191650/campos_512_v4
+36/191668/campos_512_v4
+36/191669/campos_512_v4
+36/191670/campos_512_v4
+36/191675/campos_512_v4
+36/191681/campos_512_v4
+36/191685/campos_512_v4
+36/191691/campos_512_v4
+36/191696/campos_512_v4
+36/191701/campos_512_v4
+36/191711/campos_512_v4
+36/191730/campos_512_v4
+36/191734/campos_512_v4
+36/191738/campos_512_v4
+36/191747/campos_512_v4
+36/191755/campos_512_v4
+36/191761/campos_512_v4
+36/191769/campos_512_v4
+36/191776/campos_512_v4
+36/191777/campos_512_v4
+36/191784/campos_512_v4
+36/191795/campos_512_v4
+36/191799/campos_512_v4
+36/191803/campos_512_v4
+36/191809/campos_512_v4
+36/191810/campos_512_v4
+36/191824/campos_512_v4
+36/191857/campos_512_v4
+36/191858/campos_512_v4
+36/191861/campos_512_v4
+36/191869/campos_512_v4
+36/191895/campos_512_v4
+36/191909/campos_512_v4
+36/191915/campos_512_v4
+36/191924/campos_512_v4
+36/191946/campos_512_v4
+36/191947/campos_512_v4
+36/191948/campos_512_v4
+36/191950/campos_512_v4
+36/191963/campos_512_v4
+36/191967/campos_512_v4
+36/191978/campos_512_v4
+36/191987/campos_512_v4
+36/191992/campos_512_v4
+36/191998/campos_512_v4
+36/192006/campos_512_v4
+36/192011/campos_512_v4
+36/192021/campos_512_v4
+36/192022/campos_512_v4
+36/192038/campos_512_v4
+36/192048/campos_512_v4
+36/192049/campos_512_v4
+36/192051/campos_512_v4
+36/192062/campos_512_v4
+36/192067/campos_512_v4
+36/192073/campos_512_v4
+36/192083/campos_512_v4
+36/192084/campos_512_v4
+36/192112/campos_512_v4
+36/192121/campos_512_v4
+36/192138/campos_512_v4
+36/192140/campos_512_v4
+36/192143/campos_512_v4
+36/192158/campos_512_v4
+36/192161/campos_512_v4
+36/192166/campos_512_v4
+36/192177/campos_512_v4
+36/192186/campos_512_v4
+36/192188/campos_512_v4
+36/192244/campos_512_v4
+36/192247/campos_512_v4
+36/192275/campos_512_v4
+36/192278/campos_512_v4
+36/192282/campos_512_v4
+36/192286/campos_512_v4
+36/192288/campos_512_v4
+36/192289/campos_512_v4
+36/192298/campos_512_v4
+36/192300/campos_512_v4
+36/192303/campos_512_v4
+36/192310/campos_512_v4
+36/192323/campos_512_v4
+36/192326/campos_512_v4
+36/192327/campos_512_v4
+36/192340/campos_512_v4
+36/192345/campos_512_v4
+36/192347/campos_512_v4
+36/192371/campos_512_v4
+36/192375/campos_512_v4
+36/192386/campos_512_v4
+36/192387/campos_512_v4
+36/192397/campos_512_v4
+36/192399/campos_512_v4
+36/192401/campos_512_v4
+36/192402/campos_512_v4
+36/192429/campos_512_v4
+36/192440/campos_512_v4
+36/192457/campos_512_v4
+36/192460/campos_512_v4
+36/192462/campos_512_v4
+36/192467/campos_512_v4
+36/192482/campos_512_v4
+36/192504/campos_512_v4
+36/192508/campos_512_v4
+36/192518/campos_512_v4
+36/192520/campos_512_v4
+36/192526/campos_512_v4
+36/192529/campos_512_v4
+36/192530/campos_512_v4
+36/192537/campos_512_v4
+36/192547/campos_512_v4
+36/192559/campos_512_v4
+36/192571/campos_512_v4
+36/192577/campos_512_v4
+36/192581/campos_512_v4
+36/192585/campos_512_v4
+36/192594/campos_512_v4
+36/192597/campos_512_v4
+36/192609/campos_512_v4
+36/192615/campos_512_v4
+36/192634/campos_512_v4
+36/192647/campos_512_v4
+36/192662/campos_512_v4
+36/192674/campos_512_v4
+36/192690/campos_512_v4
+36/192694/campos_512_v4
+36/192703/campos_512_v4
+36/192708/campos_512_v4
+36/192717/campos_512_v4
+36/192724/campos_512_v4
+36/192739/campos_512_v4
+36/192741/campos_512_v4
+36/192746/campos_512_v4
+36/192747/campos_512_v4
+36/192757/campos_512_v4
+36/192759/campos_512_v4
+36/192761/campos_512_v4
+36/192764/campos_512_v4
+36/192768/campos_512_v4
+36/192781/campos_512_v4
+36/192782/campos_512_v4
+36/192784/campos_512_v4
+36/192787/campos_512_v4
+36/192792/campos_512_v4
+36/192797/campos_512_v4
+36/192799/campos_512_v4
+36/192800/campos_512_v4
+36/192819/campos_512_v4
+36/192821/campos_512_v4
+36/192822/campos_512_v4
+36/192829/campos_512_v4
+36/192839/campos_512_v4
+36/192864/campos_512_v4
+36/192866/campos_512_v4
+36/192874/campos_512_v4
+36/192876/campos_512_v4
+36/192884/campos_512_v4
+36/192889/campos_512_v4
+36/192903/campos_512_v4
+36/192910/campos_512_v4
+36/192912/campos_512_v4
+36/192931/campos_512_v4
+36/192935/campos_512_v4
+36/192938/campos_512_v4
+36/192942/campos_512_v4
+36/192952/campos_512_v4
+36/192955/campos_512_v4
+36/192961/campos_512_v4
+36/192964/campos_512_v4
+36/192969/campos_512_v4
+36/192971/campos_512_v4
+36/192974/campos_512_v4
+36/192982/campos_512_v4
+36/192985/campos_512_v4
+36/192994/campos_512_v4
+36/192996/campos_512_v4
+36/192998/campos_512_v4
+36/193007/campos_512_v4
+36/193013/campos_512_v4
+36/193030/campos_512_v4
+36/193035/campos_512_v4
+36/193039/campos_512_v4
+36/193041/campos_512_v4
+36/193042/campos_512_v4
+36/193049/campos_512_v4
+36/193058/campos_512_v4
+36/193066/campos_512_v4
+36/193069/campos_512_v4
+36/193071/campos_512_v4
+36/193082/campos_512_v4
+36/193086/campos_512_v4
+36/193093/campos_512_v4
+36/193100/campos_512_v4
+36/193113/campos_512_v4
+36/193114/campos_512_v4
+36/193124/campos_512_v4
+36/193125/campos_512_v4
+36/193132/campos_512_v4
+36/193138/campos_512_v4
+36/193141/campos_512_v4
+36/193153/campos_512_v4
+36/193155/campos_512_v4
+36/193157/campos_512_v4
+36/193160/campos_512_v4
+36/193166/campos_512_v4
+36/193169/campos_512_v4
+36/193200/campos_512_v4
+36/193222/campos_512_v4
+36/193224/campos_512_v4
+36/193249/campos_512_v4
+36/193251/campos_512_v4
+36/193276/campos_512_v4
+36/193282/campos_512_v4
+36/193290/campos_512_v4
+36/193291/campos_512_v4
+36/193299/campos_512_v4
+36/193322/campos_512_v4
+36/193329/campos_512_v4
+36/193333/campos_512_v4
+36/193347/campos_512_v4
+36/193350/campos_512_v4
+36/193355/campos_512_v4
+36/193358/campos_512_v4
+36/193365/campos_512_v4
+36/193376/campos_512_v4
+36/193378/campos_512_v4
+36/193389/campos_512_v4
+36/193411/campos_512_v4
+36/193421/campos_512_v4
+36/193423/campos_512_v4
+36/193431/campos_512_v4
+36/193440/campos_512_v4
+36/193448/campos_512_v4
+36/193452/campos_512_v4
+36/193485/campos_512_v4
+36/193486/campos_512_v4
+36/193488/campos_512_v4
+36/193490/campos_512_v4
+36/193496/campos_512_v4
+36/193504/campos_512_v4
+36/193507/campos_512_v4
+36/193510/campos_512_v4
+36/193524/campos_512_v4
+36/193533/campos_512_v4
+36/193534/campos_512_v4
+36/193542/campos_512_v4
+36/193545/campos_512_v4
+36/193569/campos_512_v4
+36/193584/campos_512_v4
+36/193599/campos_512_v4
+36/193603/campos_512_v4
+36/193606/campos_512_v4
+36/193615/campos_512_v4
+36/193647/campos_512_v4
+36/193655/campos_512_v4
+36/193661/campos_512_v4
+36/193678/campos_512_v4
+36/193682/campos_512_v4
+36/193689/campos_512_v4
+36/193692/campos_512_v4
+36/193705/campos_512_v4
+36/193707/campos_512_v4
+36/193709/campos_512_v4
+36/193717/campos_512_v4
+36/193720/campos_512_v4
+36/193723/campos_512_v4
+36/193729/campos_512_v4
+36/193756/campos_512_v4
+36/193764/campos_512_v4
+36/193773/campos_512_v4
+36/193775/campos_512_v4
+36/193776/campos_512_v4
+36/193779/campos_512_v4
+36/193780/campos_512_v4
+36/193787/campos_512_v4
+36/193794/campos_512_v4
+36/193795/campos_512_v4
+36/193799/campos_512_v4
+36/193851/campos_512_v4
+36/193853/campos_512_v4
+36/193885/campos_512_v4
+36/193889/campos_512_v4
+36/193895/campos_512_v4
+36/193896/campos_512_v4
+36/193899/campos_512_v4
+36/193903/campos_512_v4
+36/193907/campos_512_v4
+36/193910/campos_512_v4
+36/193933/campos_512_v4
+36/193945/campos_512_v4
+36/193951/campos_512_v4
+36/193979/campos_512_v4
+36/193985/campos_512_v4
+36/193995/campos_512_v4
+36/193998/campos_512_v4
+36/194001/campos_512_v4
+36/194002/campos_512_v4
+36/194009/campos_512_v4
+36/194011/campos_512_v4
+36/194020/campos_512_v4
+36/194023/campos_512_v4
+36/194037/campos_512_v4
+36/194045/campos_512_v4
+36/194054/campos_512_v4
+36/194060/campos_512_v4
+36/194062/campos_512_v4
+36/194066/campos_512_v4
+36/194079/campos_512_v4
+36/194080/campos_512_v4
+36/194086/campos_512_v4
+36/194089/campos_512_v4
+36/194091/campos_512_v4
+36/194093/campos_512_v4
+36/194097/campos_512_v4
+36/194104/campos_512_v4
+36/194113/campos_512_v4
+36/194122/campos_512_v4
+36/194123/campos_512_v4
+36/194128/campos_512_v4
+36/194130/campos_512_v4
+36/194154/campos_512_v4
+36/194173/campos_512_v4
+36/194177/campos_512_v4
+36/194188/campos_512_v4
+36/194190/campos_512_v4
+36/194194/campos_512_v4
+36/194220/campos_512_v4
+36/194226/campos_512_v4
+36/194228/campos_512_v4
+36/194233/campos_512_v4
+36/194235/campos_512_v4
+36/194240/campos_512_v4
+36/194241/campos_512_v4
+36/194256/campos_512_v4
+36/194258/campos_512_v4
+36/194276/campos_512_v4
+36/194279/campos_512_v4
+36/194290/campos_512_v4
+36/194291/campos_512_v4
+36/194309/campos_512_v4
+36/194311/campos_512_v4
+36/194320/campos_512_v4
+36/194324/campos_512_v4
+36/194330/campos_512_v4
+36/194338/campos_512_v4
+36/194343/campos_512_v4
+36/194347/campos_512_v4
+36/194358/campos_512_v4
+36/194364/campos_512_v4
+36/194371/campos_512_v4
+36/194373/campos_512_v4
+36/194376/campos_512_v4
+36/194381/campos_512_v4
+36/194387/campos_512_v4
+36/194398/campos_512_v4
+36/194406/campos_512_v4
+36/194411/campos_512_v4
+36/194416/campos_512_v4
+36/194419/campos_512_v4
+36/194422/campos_512_v4
+36/194423/campos_512_v4
+36/194427/campos_512_v4
+36/194434/campos_512_v4
+36/194444/campos_512_v4
+36/194448/campos_512_v4
+36/194458/campos_512_v4
+36/194470/campos_512_v4
+36/194471/campos_512_v4
+36/194472/campos_512_v4
+36/194482/campos_512_v4
+36/194485/campos_512_v4
+36/194487/campos_512_v4
+36/194488/campos_512_v4
+36/194489/campos_512_v4
+36/194517/campos_512_v4
+36/194520/campos_512_v4
+36/194526/campos_512_v4
+36/194527/campos_512_v4
+36/194528/campos_512_v4
+36/194532/campos_512_v4
+36/194539/campos_512_v4
+36/194542/campos_512_v4
+36/194550/campos_512_v4
+36/194555/campos_512_v4
+36/194567/campos_512_v4
+36/194572/campos_512_v4
+36/194601/campos_512_v4
+36/194605/campos_512_v4
+36/194618/campos_512_v4
+36/194626/campos_512_v4
+36/194632/campos_512_v4
+36/194633/campos_512_v4
+36/194650/campos_512_v4
+36/194673/campos_512_v4
+36/194694/campos_512_v4
+36/194695/campos_512_v4
+36/194719/campos_512_v4
+36/194722/campos_512_v4
+36/194725/campos_512_v4
+36/194738/campos_512_v4
+36/194742/campos_512_v4
+36/194762/campos_512_v4
+36/194768/campos_512_v4
+36/194770/campos_512_v4
+36/194779/campos_512_v4
+36/194788/campos_512_v4
+36/194792/campos_512_v4
+36/194799/campos_512_v4
+36/194804/campos_512_v4
+36/194805/campos_512_v4
+36/194824/campos_512_v4
+36/194847/campos_512_v4
+36/194874/campos_512_v4
+36/194878/campos_512_v4
+36/194893/campos_512_v4
+36/194896/campos_512_v4
+36/194897/campos_512_v4
+36/194899/campos_512_v4
+36/194908/campos_512_v4
+36/194915/campos_512_v4
+36/194920/campos_512_v4
+36/194922/campos_512_v4
+36/194936/campos_512_v4
+36/194942/campos_512_v4
+36/194945/campos_512_v4
+36/194949/campos_512_v4
+36/194953/campos_512_v4
+36/194955/campos_512_v4
+36/194957/campos_512_v4
+36/194962/campos_512_v4
+36/194967/campos_512_v4
+36/194971/campos_512_v4
+36/194985/campos_512_v4
+36/194991/campos_512_v4
+36/194997/campos_512_v4
+36/194999/campos_512_v4
+37/195006/campos_512_v4
+37/195007/campos_512_v4
+37/195011/campos_512_v4
+37/195014/campos_512_v4
+37/195016/campos_512_v4
+37/195028/campos_512_v4
+37/195051/campos_512_v4
+37/195054/campos_512_v4
+37/195055/campos_512_v4
+37/195061/campos_512_v4
+37/195063/campos_512_v4
+37/195072/campos_512_v4
+37/195074/campos_512_v4
+37/195078/campos_512_v4
+37/195090/campos_512_v4
+37/195098/campos_512_v4
+37/195100/campos_512_v4
+37/195103/campos_512_v4
+37/195112/campos_512_v4
+37/195120/campos_512_v4
+37/195122/campos_512_v4
+37/195124/campos_512_v4
+37/195149/campos_512_v4
+37/195157/campos_512_v4
+37/195169/campos_512_v4
+37/195170/campos_512_v4
+37/195184/campos_512_v4
+37/195195/campos_512_v4
+37/195215/campos_512_v4
+37/195218/campos_512_v4
+37/195224/campos_512_v4
+37/195237/campos_512_v4
+37/195239/campos_512_v4
+37/195258/campos_512_v4
+37/195260/campos_512_v4
+37/195267/campos_512_v4
+37/195268/campos_512_v4
+37/195274/campos_512_v4
+37/195278/campos_512_v4
+37/195281/campos_512_v4
+37/195303/campos_512_v4
+37/195316/campos_512_v4
+37/195320/campos_512_v4
+37/195322/campos_512_v4
+37/195324/campos_512_v4
+37/195339/campos_512_v4
+37/195340/campos_512_v4
+37/195342/campos_512_v4
+37/195345/campos_512_v4
+37/195352/campos_512_v4
+37/195377/campos_512_v4
+37/195386/campos_512_v4
+37/195401/campos_512_v4
+37/195424/campos_512_v4
+37/195444/campos_512_v4
+37/195446/campos_512_v4
+37/195448/campos_512_v4
+37/195466/campos_512_v4
+37/195488/campos_512_v4
+37/195501/campos_512_v4
+37/195502/campos_512_v4
+37/195512/campos_512_v4
+37/195544/campos_512_v4
+37/195554/campos_512_v4
+37/195558/campos_512_v4
+37/195560/campos_512_v4
+37/195573/campos_512_v4
+37/195579/campos_512_v4
+37/195583/campos_512_v4
+37/195586/campos_512_v4
+37/195592/campos_512_v4
+37/195593/campos_512_v4
+37/195595/campos_512_v4
+37/195600/campos_512_v4
+37/195603/campos_512_v4
+37/195605/campos_512_v4
+37/195625/campos_512_v4
+37/195626/campos_512_v4
+37/195635/campos_512_v4
+37/195636/campos_512_v4
+37/195645/campos_512_v4
+37/195656/campos_512_v4
+37/195657/campos_512_v4
+37/195683/campos_512_v4
+37/195687/campos_512_v4
+37/195690/campos_512_v4
+37/195695/campos_512_v4
+37/195703/campos_512_v4
+37/195727/campos_512_v4
+37/195729/campos_512_v4
+37/195732/campos_512_v4
+37/195734/campos_512_v4
+37/195748/campos_512_v4
+37/195750/campos_512_v4
+37/195752/campos_512_v4
+37/195760/campos_512_v4
+37/195767/campos_512_v4
+37/195788/campos_512_v4
+37/195796/campos_512_v4
+37/195803/campos_512_v4
+37/195816/campos_512_v4
+37/195818/campos_512_v4
+37/195823/campos_512_v4
+37/195832/campos_512_v4
+37/195836/campos_512_v4
+37/195846/campos_512_v4
+37/195849/campos_512_v4
+37/195858/campos_512_v4
+37/195861/campos_512_v4
+37/195864/campos_512_v4
+37/195865/campos_512_v4
+37/195868/campos_512_v4
+37/195876/campos_512_v4
+37/195879/campos_512_v4
+37/195886/campos_512_v4
+37/195887/campos_512_v4
+37/195903/campos_512_v4
+37/195907/campos_512_v4
+37/195918/campos_512_v4
+37/195919/campos_512_v4
+37/195922/campos_512_v4
+37/195937/campos_512_v4
+37/195946/campos_512_v4
+37/195953/campos_512_v4
+37/195971/campos_512_v4
+37/195978/campos_512_v4
+37/195980/campos_512_v4
+37/195990/campos_512_v4
+37/195991/campos_512_v4
+37/195992/campos_512_v4
+37/195994/campos_512_v4
+37/195996/campos_512_v4
+37/196010/campos_512_v4
+37/196018/campos_512_v4
+37/196024/campos_512_v4
+37/196035/campos_512_v4
+37/196038/campos_512_v4
+37/196044/campos_512_v4
+37/196062/campos_512_v4
+37/196065/campos_512_v4
+37/196071/campos_512_v4
+37/196090/campos_512_v4
+37/196094/campos_512_v4
+37/196097/campos_512_v4
+37/196101/campos_512_v4
+37/196107/campos_512_v4
+37/196111/campos_512_v4
+37/196130/campos_512_v4
+37/196132/campos_512_v4
+37/196136/campos_512_v4
+37/196137/campos_512_v4
+37/196158/campos_512_v4
+37/196160/campos_512_v4
+37/196189/campos_512_v4
+37/196192/campos_512_v4
+37/196207/campos_512_v4
+37/196208/campos_512_v4
+37/196213/campos_512_v4
+37/196217/campos_512_v4
+37/196221/campos_512_v4
+37/196229/campos_512_v4
+37/196238/campos_512_v4
+37/196271/campos_512_v4
+37/196272/campos_512_v4
+37/196273/campos_512_v4
+37/196277/campos_512_v4
+37/196282/campos_512_v4
+37/196284/campos_512_v4
+37/196286/campos_512_v4
+37/196289/campos_512_v4
+37/196292/campos_512_v4
+37/196295/campos_512_v4
+37/196320/campos_512_v4
+37/196339/campos_512_v4
+37/196341/campos_512_v4
+37/196362/campos_512_v4
+37/196380/campos_512_v4
+37/196385/campos_512_v4
+37/196413/campos_512_v4
+37/196418/campos_512_v4
+37/196422/campos_512_v4
+37/196423/campos_512_v4
+37/196432/campos_512_v4
+37/196434/campos_512_v4
+37/196444/campos_512_v4
+37/196456/campos_512_v4
+37/196464/campos_512_v4
+37/196465/campos_512_v4
+37/196469/campos_512_v4
+37/196471/campos_512_v4
+37/196478/campos_512_v4
+37/196479/campos_512_v4
+37/196484/campos_512_v4
+37/196492/campos_512_v4
+37/196494/campos_512_v4
+37/196513/campos_512_v4
+37/196514/campos_512_v4
+37/196528/campos_512_v4
+37/196539/campos_512_v4
+37/196545/campos_512_v4
+37/196560/campos_512_v4
+37/196567/campos_512_v4
+37/196578/campos_512_v4
+37/196585/campos_512_v4
+37/196587/campos_512_v4
+37/196591/campos_512_v4
+37/196595/campos_512_v4
+37/196606/campos_512_v4
+37/196611/campos_512_v4
+37/196616/campos_512_v4
+37/196621/campos_512_v4
+37/196627/campos_512_v4
+37/196637/campos_512_v4
+37/196651/campos_512_v4
+37/196654/campos_512_v4
+37/196658/campos_512_v4
+37/196665/campos_512_v4
+37/196673/campos_512_v4
+37/196678/campos_512_v4
+37/196679/campos_512_v4
+37/196690/campos_512_v4
+37/196721/campos_512_v4
+37/196738/campos_512_v4
+37/196744/campos_512_v4
+37/196755/campos_512_v4
+37/196757/campos_512_v4
+37/196763/campos_512_v4
+37/196767/campos_512_v4
+37/196770/campos_512_v4
+37/196778/campos_512_v4
+37/196785/campos_512_v4
+37/196790/campos_512_v4
+37/196801/campos_512_v4
+37/196802/campos_512_v4
+37/196806/campos_512_v4
+37/196808/campos_512_v4
+37/196820/campos_512_v4
+37/196824/campos_512_v4
+37/196825/campos_512_v4
+37/196828/campos_512_v4
+37/196832/campos_512_v4
+37/196842/campos_512_v4
+37/196843/campos_512_v4
+37/196845/campos_512_v4
+37/196849/campos_512_v4
+37/196857/campos_512_v4
+37/196858/campos_512_v4
+37/196878/campos_512_v4
+37/196882/campos_512_v4
+37/196889/campos_512_v4
+37/196913/campos_512_v4
+37/196919/campos_512_v4
+37/196922/campos_512_v4
+37/196923/campos_512_v4
+37/196935/campos_512_v4
+37/196938/campos_512_v4
+37/196941/campos_512_v4
+37/196944/campos_512_v4
+37/196945/campos_512_v4
+37/196947/campos_512_v4
+37/196949/campos_512_v4
+37/196969/campos_512_v4
+37/196977/campos_512_v4
+37/196981/campos_512_v4
+37/196986/campos_512_v4
+37/196987/campos_512_v4
+37/197004/campos_512_v4
+37/197013/campos_512_v4
+37/197023/campos_512_v4
+37/197050/campos_512_v4
+37/197055/campos_512_v4
+37/197057/campos_512_v4
+37/197070/campos_512_v4
+37/197073/campos_512_v4
+37/197081/campos_512_v4
+37/197085/campos_512_v4
+37/197101/campos_512_v4
+37/197104/campos_512_v4
+37/197108/campos_512_v4
+37/197117/campos_512_v4
+37/197126/campos_512_v4
+37/197131/campos_512_v4
+37/197136/campos_512_v4
+37/197137/campos_512_v4
+37/197140/campos_512_v4
+37/197150/campos_512_v4
+37/197155/campos_512_v4
+37/197157/campos_512_v4
+37/197158/campos_512_v4
+37/197159/campos_512_v4
+37/197167/campos_512_v4
+37/197171/campos_512_v4
+37/197175/campos_512_v4
+37/197176/campos_512_v4
+37/197195/campos_512_v4
+37/197216/campos_512_v4
+37/197217/campos_512_v4
+37/197228/campos_512_v4
+37/197233/campos_512_v4
+37/197241/campos_512_v4
+37/197243/campos_512_v4
+37/197253/campos_512_v4
+37/197272/campos_512_v4
+37/197281/campos_512_v4
+37/197299/campos_512_v4
+37/197300/campos_512_v4
+37/197317/campos_512_v4
+37/197323/campos_512_v4
+37/197328/campos_512_v4
+37/197338/campos_512_v4
+37/197341/campos_512_v4
+37/197345/campos_512_v4
+37/197348/campos_512_v4
+37/197362/campos_512_v4
+37/197377/campos_512_v4
+37/197383/campos_512_v4
+37/197391/campos_512_v4
+37/197393/campos_512_v4
+37/197407/campos_512_v4
+37/197442/campos_512_v4
+37/197446/campos_512_v4
+37/197447/campos_512_v4
+37/197449/campos_512_v4
+37/197451/campos_512_v4
+37/197456/campos_512_v4
+37/197465/campos_512_v4
+37/197466/campos_512_v4
+37/197475/campos_512_v4
+37/197476/campos_512_v4
+37/197478/campos_512_v4
+37/197483/campos_512_v4
+37/197484/campos_512_v4
+37/197491/campos_512_v4
+37/197500/campos_512_v4
+37/197508/campos_512_v4
+37/197509/campos_512_v4
+37/197525/campos_512_v4
+37/197533/campos_512_v4
+37/197536/campos_512_v4
+37/197553/campos_512_v4
+37/197576/campos_512_v4
+37/197592/campos_512_v4
+37/197599/campos_512_v4
+37/197609/campos_512_v4
+37/197624/campos_512_v4
+37/197625/campos_512_v4
+37/197629/campos_512_v4
+37/197650/campos_512_v4
+37/197654/campos_512_v4
+37/197677/campos_512_v4
+37/197711/campos_512_v4
+37/197715/campos_512_v4
+37/197723/campos_512_v4
+37/197729/campos_512_v4
+37/197736/campos_512_v4
+37/197738/campos_512_v4
+37/197751/campos_512_v4
+37/197758/campos_512_v4
+37/197764/campos_512_v4
+37/197774/campos_512_v4
+37/197798/campos_512_v4
+37/197800/campos_512_v4
+37/197822/campos_512_v4
+37/197825/campos_512_v4
+37/197827/campos_512_v4
+37/197855/campos_512_v4
+37/197856/campos_512_v4
+37/197868/campos_512_v4
+37/197870/campos_512_v4
+37/197879/campos_512_v4
+37/197881/campos_512_v4
+37/197885/campos_512_v4
+37/197888/campos_512_v4
+37/197894/campos_512_v4
+37/197912/campos_512_v4
+37/197915/campos_512_v4
+37/197917/campos_512_v4
+37/197935/campos_512_v4
+37/197956/campos_512_v4
+37/197962/campos_512_v4
+37/197970/campos_512_v4
+37/197988/campos_512_v4
+37/198003/campos_512_v4
+37/198005/campos_512_v4
+37/198007/campos_512_v4
+37/198010/campos_512_v4
+37/198033/campos_512_v4
+37/198042/campos_512_v4
+37/198044/campos_512_v4
+37/198049/campos_512_v4
+37/198056/campos_512_v4
+37/198058/campos_512_v4
+37/198059/campos_512_v4
+37/198067/campos_512_v4
+37/198080/campos_512_v4
+37/198082/campos_512_v4
+37/198086/campos_512_v4
+37/198100/campos_512_v4
+37/198107/campos_512_v4
+37/198110/campos_512_v4
+37/198118/campos_512_v4
+37/198122/campos_512_v4
+37/198124/campos_512_v4
+37/198138/campos_512_v4
+37/198152/campos_512_v4
+37/198155/campos_512_v4
+37/198162/campos_512_v4
+37/198178/campos_512_v4
+37/198179/campos_512_v4
+37/198180/campos_512_v4
+37/198184/campos_512_v4
+37/198189/campos_512_v4
+37/198196/campos_512_v4
+37/198205/campos_512_v4
+37/198217/campos_512_v4
+37/198225/campos_512_v4
+37/198230/campos_512_v4
+37/198232/campos_512_v4
+37/198240/campos_512_v4
+37/198241/campos_512_v4
+37/198244/campos_512_v4
+37/198248/campos_512_v4
+37/198254/campos_512_v4
+37/198258/campos_512_v4
+37/198260/campos_512_v4
+37/198277/campos_512_v4
+37/198280/campos_512_v4
+37/198288/campos_512_v4
+37/198291/campos_512_v4
+37/198294/campos_512_v4
+37/198297/campos_512_v4
+37/198298/campos_512_v4
+37/198311/campos_512_v4
+37/198314/campos_512_v4
+37/198319/campos_512_v4
+37/198323/campos_512_v4
+37/198324/campos_512_v4
+37/198329/campos_512_v4
+37/198331/campos_512_v4
+37/198336/campos_512_v4
+37/198337/campos_512_v4
+37/198346/campos_512_v4
+37/198348/campos_512_v4
+37/198349/campos_512_v4
+37/198352/campos_512_v4
+37/198356/campos_512_v4
+37/198360/campos_512_v4
+37/198370/campos_512_v4
+37/198375/campos_512_v4
+37/198376/campos_512_v4
+37/198377/campos_512_v4
+37/198382/campos_512_v4
+37/198383/campos_512_v4
+37/198394/campos_512_v4
+37/198404/campos_512_v4
+37/198405/campos_512_v4
+37/198406/campos_512_v4
+37/198408/campos_512_v4
+37/198410/campos_512_v4
+37/198421/campos_512_v4
+37/198423/campos_512_v4
+37/198424/campos_512_v4
+37/198449/campos_512_v4
+37/198450/campos_512_v4
+37/198455/campos_512_v4
+37/198471/campos_512_v4
+37/198487/campos_512_v4
+37/198488/campos_512_v4
+37/198492/campos_512_v4
+37/198503/campos_512_v4
+37/198506/campos_512_v4
+37/198520/campos_512_v4
+37/198525/campos_512_v4
+37/198527/campos_512_v4
+37/198541/campos_512_v4
+37/198542/campos_512_v4
+37/198552/campos_512_v4
+37/198556/campos_512_v4
+37/198558/campos_512_v4
+37/198560/campos_512_v4
+37/198566/campos_512_v4
+37/198579/campos_512_v4
+37/198580/campos_512_v4
+37/198583/campos_512_v4
+37/198588/campos_512_v4
+37/198589/campos_512_v4
+37/198593/campos_512_v4
+37/198620/campos_512_v4
+37/198622/campos_512_v4
+37/198652/campos_512_v4
+37/198668/campos_512_v4
+37/198683/campos_512_v4
+37/198684/campos_512_v4
+37/198690/campos_512_v4
+37/198696/campos_512_v4
+37/198705/campos_512_v4
+37/198706/campos_512_v4
+37/198709/campos_512_v4
+37/198715/campos_512_v4
+37/198716/campos_512_v4
+37/198720/campos_512_v4
+37/198728/campos_512_v4
+37/198738/campos_512_v4
+37/198739/campos_512_v4
+37/198740/campos_512_v4
+37/198751/campos_512_v4
+37/198761/campos_512_v4
+37/198763/campos_512_v4
+37/198764/campos_512_v4
+37/198767/campos_512_v4
+37/198769/campos_512_v4
+37/198773/campos_512_v4
+37/198776/campos_512_v4
+37/198780/campos_512_v4
+37/198793/campos_512_v4
+37/198805/campos_512_v4
+37/198808/campos_512_v4
+37/198838/campos_512_v4
+37/198847/campos_512_v4
+37/198852/campos_512_v4
+37/198856/campos_512_v4
+37/198864/campos_512_v4
+37/198876/campos_512_v4
+37/198880/campos_512_v4
+37/198899/campos_512_v4
+37/198900/campos_512_v4
+37/198902/campos_512_v4
+37/198903/campos_512_v4
+37/198910/campos_512_v4
+37/198917/campos_512_v4
+37/198920/campos_512_v4
+37/198928/campos_512_v4
+37/198958/campos_512_v4
+37/198974/campos_512_v4
+37/198986/campos_512_v4
+37/198998/campos_512_v4
+37/199016/campos_512_v4
+37/199020/campos_512_v4
+37/199021/campos_512_v4
+37/199029/campos_512_v4
+37/199034/campos_512_v4
+37/199037/campos_512_v4
+37/199040/campos_512_v4
+37/199050/campos_512_v4
+37/199053/campos_512_v4
+37/199076/campos_512_v4
+37/199092/campos_512_v4
+37/199096/campos_512_v4
+37/199101/campos_512_v4
+37/199113/campos_512_v4
+37/199115/campos_512_v4
+37/199120/campos_512_v4
+37/199135/campos_512_v4
+37/199144/campos_512_v4
+37/199153/campos_512_v4
+37/199161/campos_512_v4
+37/199172/campos_512_v4
+37/199187/campos_512_v4
+37/199191/campos_512_v4
+37/199197/campos_512_v4
+37/199209/campos_512_v4
+37/199217/campos_512_v4
+37/199232/campos_512_v4
+37/199241/campos_512_v4
+37/199243/campos_512_v4
+37/199245/campos_512_v4
+37/199251/campos_512_v4
+37/199258/campos_512_v4
+37/199264/campos_512_v4
+37/199266/campos_512_v4
+37/199277/campos_512_v4
+37/199299/campos_512_v4
+37/199308/campos_512_v4
+37/199311/campos_512_v4
+37/199320/campos_512_v4
+37/199328/campos_512_v4
+37/199335/campos_512_v4
+37/199363/campos_512_v4
+37/199371/campos_512_v4
+37/199373/campos_512_v4
+37/199387/campos_512_v4
+37/199391/campos_512_v4
+37/199400/campos_512_v4
+37/199433/campos_512_v4
+37/199442/campos_512_v4
+37/199448/campos_512_v4
+37/199451/campos_512_v4
+37/199455/campos_512_v4
+37/199471/campos_512_v4
+37/199473/campos_512_v4
+37/199484/campos_512_v4
+37/199485/campos_512_v4
+37/199486/campos_512_v4
+37/199491/campos_512_v4
+37/199496/campos_512_v4
+37/199499/campos_512_v4
+37/199513/campos_512_v4
+37/199520/campos_512_v4
+37/199533/campos_512_v4
+37/199556/campos_512_v4
+37/199583/campos_512_v4
+37/199610/campos_512_v4
+37/199616/campos_512_v4
+37/199621/campos_512_v4
+37/199631/campos_512_v4
+37/199633/campos_512_v4
+37/199656/campos_512_v4
+37/199686/campos_512_v4
+37/199687/campos_512_v4
+37/199688/campos_512_v4
+37/199701/campos_512_v4
+37/199702/campos_512_v4
+37/199719/campos_512_v4
+37/199722/campos_512_v4
+37/199729/campos_512_v4
+37/199745/campos_512_v4
+37/199746/campos_512_v4
+37/199748/campos_512_v4
+37/199750/campos_512_v4
+37/199756/campos_512_v4
+37/199758/campos_512_v4
+37/199765/campos_512_v4
+37/199771/campos_512_v4
+37/199779/campos_512_v4
+37/199783/campos_512_v4
+37/199790/campos_512_v4
+37/199800/campos_512_v4
+37/199804/campos_512_v4
+37/199809/campos_512_v4
+37/199813/campos_512_v4
+37/199823/campos_512_v4
+37/199880/campos_512_v4
+37/199890/campos_512_v4
+37/199905/campos_512_v4
+37/199916/campos_512_v4
+37/199918/campos_512_v4
+37/199920/campos_512_v4
+37/199922/campos_512_v4
+37/199924/campos_512_v4
+37/199936/campos_512_v4
+37/199948/campos_512_v4
+37/199956/campos_512_v4
+37/199957/campos_512_v4
+37/199964/campos_512_v4
+37/199969/campos_512_v4
+37/199970/campos_512_v4
+37/199994/campos_512_v4
+37/199997/campos_512_v4
+38/200007/campos_512_v4
+38/200013/campos_512_v4
+38/200015/campos_512_v4
+38/200017/campos_512_v4
+38/200020/campos_512_v4
+38/200021/campos_512_v4
+38/200023/campos_512_v4
+38/200030/campos_512_v4
+38/200037/campos_512_v4
+38/200041/campos_512_v4
+38/200053/campos_512_v4
+38/200054/campos_512_v4
+38/200055/campos_512_v4
+38/200067/campos_512_v4
+38/200071/campos_512_v4
+38/200082/campos_512_v4
+38/200091/campos_512_v4
+38/200096/campos_512_v4
+38/200098/campos_512_v4
+38/200109/campos_512_v4
+38/200122/campos_512_v4
+38/200123/campos_512_v4
+38/200136/campos_512_v4
+38/200141/campos_512_v4
+38/200145/campos_512_v4
+38/200162/campos_512_v4
+38/200168/campos_512_v4
+38/200169/campos_512_v4
+38/200170/campos_512_v4
+38/200171/campos_512_v4
+38/200175/campos_512_v4
+38/200177/campos_512_v4
+38/200179/campos_512_v4
+38/200185/campos_512_v4
+38/200195/campos_512_v4
+38/200196/campos_512_v4
+38/200200/campos_512_v4
+38/200216/campos_512_v4
+38/200225/campos_512_v4
+38/200232/campos_512_v4
+38/200243/campos_512_v4
+38/200247/campos_512_v4
+38/200253/campos_512_v4
+38/200255/campos_512_v4
+38/200271/campos_512_v4
+38/200275/campos_512_v4
+38/200293/campos_512_v4
+38/200299/campos_512_v4
+38/200310/campos_512_v4
+38/200318/campos_512_v4
+38/200323/campos_512_v4
+38/200333/campos_512_v4
+38/200346/campos_512_v4
+38/200348/campos_512_v4
+38/200350/campos_512_v4
+38/200365/campos_512_v4
+38/200369/campos_512_v4
+38/200373/campos_512_v4
+38/200404/campos_512_v4
+38/200409/campos_512_v4
+38/200420/campos_512_v4
+38/200422/campos_512_v4
+38/200434/campos_512_v4
+38/200435/campos_512_v4
+38/200437/campos_512_v4
+38/200443/campos_512_v4
+38/200446/campos_512_v4
+38/200450/campos_512_v4
+38/200458/campos_512_v4
+38/200502/campos_512_v4
+38/200504/campos_512_v4
+38/200508/campos_512_v4
+38/200522/campos_512_v4
+38/200524/campos_512_v4
+38/200544/campos_512_v4
+38/200549/campos_512_v4
+38/200551/campos_512_v4
+38/200553/campos_512_v4
+38/200573/campos_512_v4
+38/200577/campos_512_v4
+38/200585/campos_512_v4
+38/200592/campos_512_v4
+38/200593/campos_512_v4
+38/200598/campos_512_v4
+38/200604/campos_512_v4
+38/200612/campos_512_v4
+38/200632/campos_512_v4
+38/200639/campos_512_v4
+38/200640/campos_512_v4
+38/200643/campos_512_v4
+38/200658/campos_512_v4
+38/200661/campos_512_v4
+38/200666/campos_512_v4
+38/200669/campos_512_v4
+38/200690/campos_512_v4
+38/200693/campos_512_v4
+38/200697/campos_512_v4
+38/200702/campos_512_v4
+38/200719/campos_512_v4
+38/200726/campos_512_v4
+38/200727/campos_512_v4
+38/200738/campos_512_v4
+38/200742/campos_512_v4
+38/200748/campos_512_v4
+38/200749/campos_512_v4
+38/200764/campos_512_v4
+38/200775/campos_512_v4
+38/200788/campos_512_v4
+38/200797/campos_512_v4
+38/200799/campos_512_v4
+38/200801/campos_512_v4
+38/200802/campos_512_v4
+38/200806/campos_512_v4
+38/200807/campos_512_v4
+38/200808/campos_512_v4
+38/200811/campos_512_v4
+38/200815/campos_512_v4
+38/200826/campos_512_v4
+38/200829/campos_512_v4
+38/200835/campos_512_v4
+38/200843/campos_512_v4
+38/200844/campos_512_v4
+38/200849/campos_512_v4
+38/200857/campos_512_v4
+38/200859/campos_512_v4
+38/200865/campos_512_v4
+38/200910/campos_512_v4
+38/200919/campos_512_v4
+38/200934/campos_512_v4
+38/200942/campos_512_v4
+38/200960/campos_512_v4
+38/200962/campos_512_v4
+38/200975/campos_512_v4
+38/200976/campos_512_v4
+38/200981/campos_512_v4
+38/200985/campos_512_v4
+38/201003/campos_512_v4
+38/201019/campos_512_v4
+38/201024/campos_512_v4
+38/201029/campos_512_v4
+38/201039/campos_512_v4
+38/201043/campos_512_v4
+38/201053/campos_512_v4
+38/201054/campos_512_v4
+38/201055/campos_512_v4
+38/201069/campos_512_v4
+38/201086/campos_512_v4
+38/201090/campos_512_v4
+38/201092/campos_512_v4
+38/201096/campos_512_v4
+38/201098/campos_512_v4
+38/201099/campos_512_v4
+38/201105/campos_512_v4
+38/201106/campos_512_v4
+38/201107/campos_512_v4
+38/201122/campos_512_v4
+38/201123/campos_512_v4
+38/201125/campos_512_v4
+38/201149/campos_512_v4
+38/201152/campos_512_v4
+38/201159/campos_512_v4
+38/201169/campos_512_v4
+38/201170/campos_512_v4
+38/201181/campos_512_v4
+38/201182/campos_512_v4
+38/201185/campos_512_v4
+38/201187/campos_512_v4
+38/201195/campos_512_v4
+38/201196/campos_512_v4
+38/201213/campos_512_v4
+38/201218/campos_512_v4
+38/201220/campos_512_v4
+38/201230/campos_512_v4
+38/201231/campos_512_v4
+38/201249/campos_512_v4
+38/201252/campos_512_v4
+38/201275/campos_512_v4
+38/201284/campos_512_v4
+38/201289/campos_512_v4
+38/201290/campos_512_v4
+38/201294/campos_512_v4
+38/201296/campos_512_v4
+38/201311/campos_512_v4
+38/201318/campos_512_v4
+38/201348/campos_512_v4
+38/201350/campos_512_v4
+38/201358/campos_512_v4
+38/201361/campos_512_v4
+38/201373/campos_512_v4
+38/201375/campos_512_v4
+38/201389/campos_512_v4
+38/201402/campos_512_v4
+38/201404/campos_512_v4
+38/201406/campos_512_v4
+38/201455/campos_512_v4
+38/201465/campos_512_v4
+38/201472/campos_512_v4
+38/201473/campos_512_v4
+38/201474/campos_512_v4
+38/201478/campos_512_v4
+38/201482/campos_512_v4
+38/201495/campos_512_v4
+38/201507/campos_512_v4
+38/201510/campos_512_v4
+38/201516/campos_512_v4
+38/201517/campos_512_v4
+38/201519/campos_512_v4
+38/201520/campos_512_v4
+38/201521/campos_512_v4
+38/201544/campos_512_v4
+38/201564/campos_512_v4
+38/201575/campos_512_v4
+38/201584/campos_512_v4
+38/201603/campos_512_v4
+38/201616/campos_512_v4
+38/201619/campos_512_v4
+38/201622/campos_512_v4
+38/201626/campos_512_v4
+38/201628/campos_512_v4
+38/201639/campos_512_v4
+38/201653/campos_512_v4
+38/201657/campos_512_v4
+38/201660/campos_512_v4
+38/201680/campos_512_v4
+38/201687/campos_512_v4
+38/201701/campos_512_v4
+38/201704/campos_512_v4
+38/201706/campos_512_v4
+38/201722/campos_512_v4
+38/201724/campos_512_v4
+38/201725/campos_512_v4
+38/201727/campos_512_v4
+38/201728/campos_512_v4
+38/201729/campos_512_v4
+38/201747/campos_512_v4
+38/201748/campos_512_v4
+38/201752/campos_512_v4
+38/201754/campos_512_v4
+38/201769/campos_512_v4
+38/201770/campos_512_v4
+38/201802/campos_512_v4
+38/201808/campos_512_v4
+38/201820/campos_512_v4
+38/201834/campos_512_v4
+38/201839/campos_512_v4
+38/201847/campos_512_v4
+38/201859/campos_512_v4
+38/201868/campos_512_v4
+38/201870/campos_512_v4
+38/201872/campos_512_v4
+38/201878/campos_512_v4
+38/201879/campos_512_v4
+38/201881/campos_512_v4
+38/201883/campos_512_v4
+38/201897/campos_512_v4
+38/201898/campos_512_v4
+38/201906/campos_512_v4
+38/201917/campos_512_v4
+38/201923/campos_512_v4
+38/201930/campos_512_v4
+38/201946/campos_512_v4
+38/201949/campos_512_v4
+38/201954/campos_512_v4
+38/201956/campos_512_v4
+38/201970/campos_512_v4
+38/201986/campos_512_v4
+38/202001/campos_512_v4
+38/202015/campos_512_v4
+38/202019/campos_512_v4
+38/202025/campos_512_v4
+38/202027/campos_512_v4
+38/202031/campos_512_v4
+38/202038/campos_512_v4
+38/202052/campos_512_v4
+38/202061/campos_512_v4
+38/202072/campos_512_v4
+38/202082/campos_512_v4
+38/202091/campos_512_v4
+38/202124/campos_512_v4
+38/202131/campos_512_v4
+38/202136/campos_512_v4
+38/202138/campos_512_v4
+38/202142/campos_512_v4
+38/202161/campos_512_v4
+38/202171/campos_512_v4
+38/202178/campos_512_v4
+38/202186/campos_512_v4
+38/202187/campos_512_v4
+38/202209/campos_512_v4
+38/202212/campos_512_v4
+38/202215/campos_512_v4
+38/202218/campos_512_v4
+38/202222/campos_512_v4
+38/202223/campos_512_v4
+38/202247/campos_512_v4
+38/202266/campos_512_v4
+38/202281/campos_512_v4
+38/202283/campos_512_v4
+38/202284/campos_512_v4
+38/202289/campos_512_v4
+38/202305/campos_512_v4
+38/202308/campos_512_v4
+38/202318/campos_512_v4
+38/202323/campos_512_v4
+38/202325/campos_512_v4
+38/202328/campos_512_v4
+38/202337/campos_512_v4
+38/202342/campos_512_v4
+38/202345/campos_512_v4
+38/202363/campos_512_v4
+38/202366/campos_512_v4
+38/202367/campos_512_v4
+38/202372/campos_512_v4
+38/202379/campos_512_v4
+38/202382/campos_512_v4
+38/202408/campos_512_v4
+38/202416/campos_512_v4
+38/202424/campos_512_v4
+38/202435/campos_512_v4
+38/202452/campos_512_v4
+38/202467/campos_512_v4
+38/202470/campos_512_v4
+38/202472/campos_512_v4
+38/202474/campos_512_v4
+38/202504/campos_512_v4
+38/202505/campos_512_v4
+38/202508/campos_512_v4
+38/202516/campos_512_v4
+38/202545/campos_512_v4
+38/202549/campos_512_v4
+38/202554/campos_512_v4
+38/202557/campos_512_v4
+38/202561/campos_512_v4
+38/202564/campos_512_v4
+38/202587/campos_512_v4
+38/202592/campos_512_v4
+38/202602/campos_512_v4
+38/202603/campos_512_v4
+38/202605/campos_512_v4
+38/202606/campos_512_v4
+38/202608/campos_512_v4
+38/202618/campos_512_v4
+38/202619/campos_512_v4
+38/202627/campos_512_v4
+38/202631/campos_512_v4
+38/202644/campos_512_v4
+38/202651/campos_512_v4
+38/202665/campos_512_v4
+38/202684/campos_512_v4
+38/202686/campos_512_v4
+38/202688/campos_512_v4
+38/202689/campos_512_v4
+38/202691/campos_512_v4
+38/202699/campos_512_v4
+38/202710/campos_512_v4
+38/202724/campos_512_v4
+38/202731/campos_512_v4
+38/202736/campos_512_v4
+38/202757/campos_512_v4
+38/202764/campos_512_v4
+38/202765/campos_512_v4
+38/202772/campos_512_v4
+38/202780/campos_512_v4
+38/202785/campos_512_v4
+38/202786/campos_512_v4
+38/202792/campos_512_v4
+38/202799/campos_512_v4
+38/202800/campos_512_v4
+38/202814/campos_512_v4
+38/202816/campos_512_v4
+38/202838/campos_512_v4
+38/202839/campos_512_v4
+38/202856/campos_512_v4
+38/202859/campos_512_v4
+38/202865/campos_512_v4
+38/202877/campos_512_v4
+38/202879/campos_512_v4
+38/202889/campos_512_v4
+38/202894/campos_512_v4
+38/202895/campos_512_v4
+38/202899/campos_512_v4
+38/202908/campos_512_v4
+38/202913/campos_512_v4
+38/202915/campos_512_v4
+38/202935/campos_512_v4
+38/202943/campos_512_v4
+38/202962/campos_512_v4
+38/202971/campos_512_v4
+38/202981/campos_512_v4
+38/202992/campos_512_v4
+38/202997/campos_512_v4
+38/203005/campos_512_v4
+38/203008/campos_512_v4
+38/203018/campos_512_v4
+38/203022/campos_512_v4
+38/203024/campos_512_v4
+38/203037/campos_512_v4
+38/203042/campos_512_v4
+38/203052/campos_512_v4
+38/203057/campos_512_v4
+38/203063/campos_512_v4
+38/203085/campos_512_v4
+38/203093/campos_512_v4
+38/203110/campos_512_v4
+38/203113/campos_512_v4
+38/203129/campos_512_v4
+38/203130/campos_512_v4
+38/203137/campos_512_v4
+38/203155/campos_512_v4
+38/203158/campos_512_v4
+38/203170/campos_512_v4
+38/203178/campos_512_v4
+38/203200/campos_512_v4
+38/203207/campos_512_v4
+38/203212/campos_512_v4
+38/203229/campos_512_v4
+38/203241/campos_512_v4
+38/203244/campos_512_v4
+38/203252/campos_512_v4
+38/203254/campos_512_v4
+38/203260/campos_512_v4
+38/203263/campos_512_v4
+38/203281/campos_512_v4
+38/203285/campos_512_v4
+38/203286/campos_512_v4
+38/203295/campos_512_v4
+38/203297/campos_512_v4
+38/203298/campos_512_v4
+38/203319/campos_512_v4
+38/203329/campos_512_v4
+38/203342/campos_512_v4
+38/203351/campos_512_v4
+38/203360/campos_512_v4
+38/203363/campos_512_v4
+38/203379/campos_512_v4
+38/203383/campos_512_v4
+38/203393/campos_512_v4
+38/203396/campos_512_v4
+38/203415/campos_512_v4
+38/203416/campos_512_v4
+38/203418/campos_512_v4
+38/203429/campos_512_v4
+38/203439/campos_512_v4
+38/203451/campos_512_v4
+38/203453/campos_512_v4
+38/203454/campos_512_v4
+38/203464/campos_512_v4
+38/203474/campos_512_v4
+38/203495/campos_512_v4
+38/203502/campos_512_v4
+38/203504/campos_512_v4
+38/203506/campos_512_v4
+38/203513/campos_512_v4
+38/203516/campos_512_v4
+38/203533/campos_512_v4
+38/203534/campos_512_v4
+38/203540/campos_512_v4
+38/203542/campos_512_v4
+38/203557/campos_512_v4
+38/203559/campos_512_v4
+38/203568/campos_512_v4
+38/203576/campos_512_v4
+38/203578/campos_512_v4
+38/203598/campos_512_v4
+38/203600/campos_512_v4
+38/203601/campos_512_v4
+38/203605/campos_512_v4
+38/203607/campos_512_v4
+38/203613/campos_512_v4
+38/203615/campos_512_v4
+38/203619/campos_512_v4
+38/203626/campos_512_v4
+38/203630/campos_512_v4
+38/203633/campos_512_v4
+38/203636/campos_512_v4
+38/203648/campos_512_v4
+38/203657/campos_512_v4
+38/203660/campos_512_v4
+38/203689/campos_512_v4
+38/203692/campos_512_v4
+38/203734/campos_512_v4
+38/203746/campos_512_v4
+38/203754/campos_512_v4
+38/203776/campos_512_v4
+38/203791/campos_512_v4
+38/203794/campos_512_v4
+38/203795/campos_512_v4
+38/203803/campos_512_v4
+38/203811/campos_512_v4
+38/203827/campos_512_v4
+38/203828/campos_512_v4
+38/203831/campos_512_v4
+38/203834/campos_512_v4
+38/203835/campos_512_v4
+38/203837/campos_512_v4
+38/203843/campos_512_v4
+38/203846/campos_512_v4
+38/203848/campos_512_v4
+38/203852/campos_512_v4
+38/203867/campos_512_v4
+38/203868/campos_512_v4
+38/203874/campos_512_v4
+38/203884/campos_512_v4
+38/203888/campos_512_v4
+38/203890/campos_512_v4
+38/203893/campos_512_v4
+38/203904/campos_512_v4
+38/203911/campos_512_v4
+38/203912/campos_512_v4
+38/203918/campos_512_v4
+38/203965/campos_512_v4
+38/203967/campos_512_v4
+38/203974/campos_512_v4
+38/203978/campos_512_v4
+38/204001/campos_512_v4
+38/204004/campos_512_v4
+38/204020/campos_512_v4
+38/204022/campos_512_v4
+38/204023/campos_512_v4
+38/204032/campos_512_v4
+38/204035/campos_512_v4
+38/204039/campos_512_v4
+38/204044/campos_512_v4
+38/204054/campos_512_v4
+38/204060/campos_512_v4
+38/204095/campos_512_v4
+38/204103/campos_512_v4
+38/204111/campos_512_v4
+38/204113/campos_512_v4
+38/204114/campos_512_v4
+38/204117/campos_512_v4
+38/204128/campos_512_v4
+38/204129/campos_512_v4
+38/204130/campos_512_v4
+38/204143/campos_512_v4
+38/204162/campos_512_v4
+38/204163/campos_512_v4
+38/204189/campos_512_v4
+38/204197/campos_512_v4
+38/204211/campos_512_v4
+38/204222/campos_512_v4
+38/204233/campos_512_v4
+38/204234/campos_512_v4
+38/204248/campos_512_v4
+38/204252/campos_512_v4
+38/204253/campos_512_v4
+38/204258/campos_512_v4
+38/204260/campos_512_v4
+38/204262/campos_512_v4
+38/204276/campos_512_v4
+38/204282/campos_512_v4
+38/204283/campos_512_v4
+38/204287/campos_512_v4
+38/204311/campos_512_v4
+38/204326/campos_512_v4
+38/204332/campos_512_v4
+38/204336/campos_512_v4
+38/204337/campos_512_v4
+38/204359/campos_512_v4
+38/204364/campos_512_v4
+38/204376/campos_512_v4
+38/204378/campos_512_v4
+38/204379/campos_512_v4
+38/204391/campos_512_v4
+38/204392/campos_512_v4
+38/204398/campos_512_v4
+38/204404/campos_512_v4
+38/204414/campos_512_v4
+38/204415/campos_512_v4
+38/204433/campos_512_v4
+38/204445/campos_512_v4
+38/204456/campos_512_v4
+38/204471/campos_512_v4
+38/204475/campos_512_v4
+38/204490/campos_512_v4
+38/204502/campos_512_v4
+38/204508/campos_512_v4
+38/204519/campos_512_v4
+38/204521/campos_512_v4
+38/204534/campos_512_v4
+38/204536/campos_512_v4
+38/204567/campos_512_v4
+38/204576/campos_512_v4
+38/204618/campos_512_v4
+38/204631/campos_512_v4
+38/204636/campos_512_v4
+38/204646/campos_512_v4
+38/204651/campos_512_v4
+38/204656/campos_512_v4
+38/204658/campos_512_v4
+38/204659/campos_512_v4
+38/204660/campos_512_v4
+38/204661/campos_512_v4
+38/204678/campos_512_v4
+38/204695/campos_512_v4
+38/204696/campos_512_v4
+38/204704/campos_512_v4
+38/204710/campos_512_v4
+38/204714/campos_512_v4
+38/204715/campos_512_v4
+38/204721/campos_512_v4
+38/204726/campos_512_v4
+38/204751/campos_512_v4
+38/204760/campos_512_v4
+38/204761/campos_512_v4
+38/204763/campos_512_v4
+38/204765/campos_512_v4
+38/204767/campos_512_v4
+38/204805/campos_512_v4
+38/204812/campos_512_v4
+38/204824/campos_512_v4
+38/204836/campos_512_v4
+38/204840/campos_512_v4
+38/204843/campos_512_v4
+38/204850/campos_512_v4
+38/204851/campos_512_v4
+38/204856/campos_512_v4
+38/204865/campos_512_v4
+38/204877/campos_512_v4
+38/204884/campos_512_v4
+38/204885/campos_512_v4
+38/204891/campos_512_v4
+38/204904/campos_512_v4
+38/204912/campos_512_v4
+38/204913/campos_512_v4
+38/204925/campos_512_v4
+38/204926/campos_512_v4
+38/204929/campos_512_v4
+38/204947/campos_512_v4
+38/204952/campos_512_v4
+38/204958/campos_512_v4
+38/204960/campos_512_v4
+38/204961/campos_512_v4
+38/204965/campos_512_v4
+38/204967/campos_512_v4
+38/204970/campos_512_v4
+38/204971/campos_512_v4
+38/204973/campos_512_v4
+38/204977/campos_512_v4
+38/204987/campos_512_v4
+38/204991/campos_512_v4
+38/204992/campos_512_v4
+38/204993/campos_512_v4
+38/204995/campos_512_v4
+4/30029/campos_512_v4
+4/30041/campos_512_v4
+4/30056/campos_512_v4
+4/30062/campos_512_v4
+4/30063/campos_512_v4
+4/30098/campos_512_v4
+4/30103/campos_512_v4
+4/30109/campos_512_v4
+4/30120/campos_512_v4
+4/30141/campos_512_v4
+4/30145/campos_512_v4
+4/30165/campos_512_v4
+4/30179/campos_512_v4
+4/30192/campos_512_v4
+4/30193/campos_512_v4
+4/30196/campos_512_v4
+4/30203/campos_512_v4
+4/30205/campos_512_v4
+4/30213/campos_512_v4
+4/30218/campos_512_v4
+4/30219/campos_512_v4
+4/30221/campos_512_v4
+4/30231/campos_512_v4
+4/30235/campos_512_v4
+4/30238/campos_512_v4
+4/30239/campos_512_v4
+4/30241/campos_512_v4
+4/30261/campos_512_v4
+4/30269/campos_512_v4
+4/30270/campos_512_v4
+4/30279/campos_512_v4
+4/30299/campos_512_v4
+4/30320/campos_512_v4
+4/30337/campos_512_v4
+4/30351/campos_512_v4
+4/30360/campos_512_v4
+4/30374/campos_512_v4
+4/30377/campos_512_v4
+4/30397/campos_512_v4
+4/30400/campos_512_v4
+4/30409/campos_512_v4
+4/30414/campos_512_v4
+4/30416/campos_512_v4
+4/30425/campos_512_v4
+4/30428/campos_512_v4
+4/30434/campos_512_v4
+4/30437/campos_512_v4
+4/30452/campos_512_v4
+4/30458/campos_512_v4
+4/30463/campos_512_v4
+4/30468/campos_512_v4
+4/30474/campos_512_v4
+4/30475/campos_512_v4
+4/30509/campos_512_v4
+4/30512/campos_512_v4
+4/30515/campos_512_v4
+4/30522/campos_512_v4
+4/30553/campos_512_v4
+4/30562/campos_512_v4
+4/30569/campos_512_v4
+4/30571/campos_512_v4
+4/30572/campos_512_v4
+4/30582/campos_512_v4
+4/30587/campos_512_v4
+4/30593/campos_512_v4
+4/30595/campos_512_v4
+4/30613/campos_512_v4
+4/30621/campos_512_v4
+4/30627/campos_512_v4
+4/30634/campos_512_v4
+4/30643/campos_512_v4
+4/30661/campos_512_v4
+4/30667/campos_512_v4
+4/30670/campos_512_v4
+4/30671/campos_512_v4
+4/30683/campos_512_v4
+4/30699/campos_512_v4
+4/30701/campos_512_v4
+4/30704/campos_512_v4
+4/30713/campos_512_v4
+4/30732/campos_512_v4
+4/30736/campos_512_v4
+4/30743/campos_512_v4
+4/30744/campos_512_v4
+4/30753/campos_512_v4
+4/30755/campos_512_v4
+4/30768/campos_512_v4
+4/30779/campos_512_v4
+4/30783/campos_512_v4
+4/30808/campos_512_v4
+4/30809/campos_512_v4
+4/30810/campos_512_v4
+4/30827/campos_512_v4
+4/30828/campos_512_v4
+4/30830/campos_512_v4
+4/30838/campos_512_v4
+4/30853/campos_512_v4
+4/30857/campos_512_v4
+4/30884/campos_512_v4
+4/30912/campos_512_v4
+4/30916/campos_512_v4
+4/30932/campos_512_v4
+4/30944/campos_512_v4
+4/30953/campos_512_v4
+4/30960/campos_512_v4
+4/30962/campos_512_v4
+4/30968/campos_512_v4
+4/30970/campos_512_v4
+4/30976/campos_512_v4
+4/31017/campos_512_v4
+4/31034/campos_512_v4
+4/31046/campos_512_v4
+4/31047/campos_512_v4
+4/31048/campos_512_v4
+4/31059/campos_512_v4
+4/31061/campos_512_v4
+4/31069/campos_512_v4
+4/31079/campos_512_v4
+4/31096/campos_512_v4
+4/31103/campos_512_v4
+4/31113/campos_512_v4
+4/31121/campos_512_v4
+4/31124/campos_512_v4
+4/31131/campos_512_v4
+4/31144/campos_512_v4
+4/31150/campos_512_v4
+4/31164/campos_512_v4
+4/31189/campos_512_v4
+4/31195/campos_512_v4
+4/31203/campos_512_v4
+4/31210/campos_512_v4
+4/31221/campos_512_v4
+4/31222/campos_512_v4
+4/31228/campos_512_v4
+4/31236/campos_512_v4
+4/31268/campos_512_v4
+4/31278/campos_512_v4
+4/31279/campos_512_v4
+4/31289/campos_512_v4
+4/31308/campos_512_v4
+4/31314/campos_512_v4
+4/31319/campos_512_v4
+4/31323/campos_512_v4
+4/31331/campos_512_v4
+4/31332/campos_512_v4
+4/31339/campos_512_v4
+4/31346/campos_512_v4
+4/31349/campos_512_v4
+4/31353/campos_512_v4
+4/31380/campos_512_v4
+4/31392/campos_512_v4
+4/31397/campos_512_v4
+4/31399/campos_512_v4
+4/31414/campos_512_v4
+4/31422/campos_512_v4
+4/31423/campos_512_v4
+4/31437/campos_512_v4
+4/31442/campos_512_v4
+4/31449/campos_512_v4
+4/31455/campos_512_v4
+4/31475/campos_512_v4
+4/31487/campos_512_v4
+4/31505/campos_512_v4
+4/31507/campos_512_v4
+4/31509/campos_512_v4
+4/31522/campos_512_v4
+4/31533/campos_512_v4
+4/31538/campos_512_v4
+4/31546/campos_512_v4
+4/31548/campos_512_v4
+4/31550/campos_512_v4
+4/31555/campos_512_v4
+4/31570/campos_512_v4
+4/31572/campos_512_v4
+4/31577/campos_512_v4
+4/31586/campos_512_v4
+4/31603/campos_512_v4
+4/31606/campos_512_v4
+4/31610/campos_512_v4
+4/31611/campos_512_v4
+4/31616/campos_512_v4
+4/31617/campos_512_v4
+4/31621/campos_512_v4
+4/31622/campos_512_v4
+4/31623/campos_512_v4
+4/31633/campos_512_v4
+4/31634/campos_512_v4
+4/31638/campos_512_v4
+4/31650/campos_512_v4
+4/31654/campos_512_v4
+4/31664/campos_512_v4
+4/31672/campos_512_v4
+4/31680/campos_512_v4
+4/31681/campos_512_v4
+4/31698/campos_512_v4
+4/31713/campos_512_v4
+4/31721/campos_512_v4
+4/31725/campos_512_v4
+4/31755/campos_512_v4
+4/31778/campos_512_v4
+4/31784/campos_512_v4
+4/31795/campos_512_v4
+4/31798/campos_512_v4
+4/31799/campos_512_v4
+4/31806/campos_512_v4
+4/31809/campos_512_v4
+4/31813/campos_512_v4
+4/31822/campos_512_v4
+4/31825/campos_512_v4
+4/31847/campos_512_v4
+4/31852/campos_512_v4
+4/31875/campos_512_v4
+4/31880/campos_512_v4
+4/31889/campos_512_v4
+4/31895/campos_512_v4
+4/31897/campos_512_v4
+4/31904/campos_512_v4
+4/31910/campos_512_v4
+4/31912/campos_512_v4
+4/31913/campos_512_v4
+4/31929/campos_512_v4
+4/31939/campos_512_v4
+4/31956/campos_512_v4
+4/31965/campos_512_v4
+4/31983/campos_512_v4
+4/31984/campos_512_v4
+4/31988/campos_512_v4
+4/31990/campos_512_v4
+4/31992/campos_512_v4
+4/31993/campos_512_v4
+4/32029/campos_512_v4
+4/32043/campos_512_v4
+4/32049/campos_512_v4
+4/32051/campos_512_v4
+4/32062/campos_512_v4
+4/32067/campos_512_v4
+4/32072/campos_512_v4
+4/32075/campos_512_v4
+4/32099/campos_512_v4
+4/32101/campos_512_v4
+4/32104/campos_512_v4
+4/32106/campos_512_v4
+4/32110/campos_512_v4
+4/32126/campos_512_v4
+4/32131/campos_512_v4
+4/32152/campos_512_v4
+4/32170/campos_512_v4
+4/32172/campos_512_v4
+4/32173/campos_512_v4
+4/32174/campos_512_v4
+4/32176/campos_512_v4
+4/32180/campos_512_v4
+4/32185/campos_512_v4
+4/32186/campos_512_v4
+4/32187/campos_512_v4
+4/32195/campos_512_v4
+4/32206/campos_512_v4
+4/32207/campos_512_v4
+4/32219/campos_512_v4
+4/32223/campos_512_v4
+4/32232/campos_512_v4
+4/32235/campos_512_v4
+4/32237/campos_512_v4
+4/32241/campos_512_v4
+4/32248/campos_512_v4
+4/32251/campos_512_v4
+4/32257/campos_512_v4
+4/32273/campos_512_v4
+4/32289/campos_512_v4
+4/32292/campos_512_v4
+4/32298/campos_512_v4
+4/32306/campos_512_v4
+4/32314/campos_512_v4
+4/32320/campos_512_v4
+4/32336/campos_512_v4
+4/32338/campos_512_v4
+4/32339/campos_512_v4
+4/32346/campos_512_v4
+4/32362/campos_512_v4
+4/32377/campos_512_v4
+4/32390/campos_512_v4
+4/32408/campos_512_v4
+4/32418/campos_512_v4
+4/32420/campos_512_v4
+4/32429/campos_512_v4
+4/32433/campos_512_v4
+4/32443/campos_512_v4
+4/32465/campos_512_v4
+4/32466/campos_512_v4
+4/32478/campos_512_v4
+4/32490/campos_512_v4
+4/32492/campos_512_v4
+4/32498/campos_512_v4
+4/32501/campos_512_v4
+4/32503/campos_512_v4
+4/32519/campos_512_v4
+4/32521/campos_512_v4
+4/32531/campos_512_v4
+4/32533/campos_512_v4
+4/32535/campos_512_v4
+4/32544/campos_512_v4
+4/32547/campos_512_v4
+4/32552/campos_512_v4
+4/32559/campos_512_v4
+4/32560/campos_512_v4
+4/32561/campos_512_v4
+4/32569/campos_512_v4
+4/32571/campos_512_v4
+4/32583/campos_512_v4
+4/32585/campos_512_v4
+4/32614/campos_512_v4
+4/32620/campos_512_v4
+4/32625/campos_512_v4
+4/32631/campos_512_v4
+4/32638/campos_512_v4
+4/32658/campos_512_v4
+4/32662/campos_512_v4
+4/32664/campos_512_v4
+4/32668/campos_512_v4
+4/32682/campos_512_v4
+4/32685/campos_512_v4
+4/32706/campos_512_v4
+4/32724/campos_512_v4
+4/32745/campos_512_v4
+4/32758/campos_512_v4
+4/32762/campos_512_v4
+4/32765/campos_512_v4
+4/32774/campos_512_v4
+4/32780/campos_512_v4
+4/32782/campos_512_v4
+4/32786/campos_512_v4
+4/32790/campos_512_v4
+4/32819/campos_512_v4
+4/32822/campos_512_v4
+4/32826/campos_512_v4
+4/32854/campos_512_v4
+4/32858/campos_512_v4
+4/32863/campos_512_v4
+4/32867/campos_512_v4
+4/32873/campos_512_v4
+4/32879/campos_512_v4
+4/32883/campos_512_v4
+4/32884/campos_512_v4
+4/32889/campos_512_v4
+4/32897/campos_512_v4
+4/32904/campos_512_v4
+4/32927/campos_512_v4
+4/32940/campos_512_v4
+4/32956/campos_512_v4
+4/32961/campos_512_v4
+4/32982/campos_512_v4
+4/32984/campos_512_v4
+4/33005/campos_512_v4
+4/33038/campos_512_v4
+4/33043/campos_512_v4
+4/33059/campos_512_v4
+4/33069/campos_512_v4
+4/33104/campos_512_v4
+4/33110/campos_512_v4
+4/33111/campos_512_v4
+4/33117/campos_512_v4
+4/33118/campos_512_v4
+4/33121/campos_512_v4
+4/33138/campos_512_v4
+4/33145/campos_512_v4
+4/33152/campos_512_v4
+4/33155/campos_512_v4
+4/33163/campos_512_v4
+4/33170/campos_512_v4
+4/33186/campos_512_v4
+4/33188/campos_512_v4
+4/33194/campos_512_v4
+4/33224/campos_512_v4
+4/33236/campos_512_v4
+4/33241/campos_512_v4
+4/33250/campos_512_v4
+4/33256/campos_512_v4
+4/33307/campos_512_v4
+4/33320/campos_512_v4
+4/33346/campos_512_v4
+4/33363/campos_512_v4
+4/33368/campos_512_v4
+4/33376/campos_512_v4
+4/33379/campos_512_v4
+4/33380/campos_512_v4
+4/33402/campos_512_v4
+4/33413/campos_512_v4
+4/33417/campos_512_v4
+4/33422/campos_512_v4
+4/33427/campos_512_v4
+4/33428/campos_512_v4
+4/33432/campos_512_v4
+4/33442/campos_512_v4
+4/33444/campos_512_v4
+4/33447/campos_512_v4
+4/33450/campos_512_v4
+4/33451/campos_512_v4
+4/33458/campos_512_v4
+4/33461/campos_512_v4
+4/33476/campos_512_v4
+4/33481/campos_512_v4
+4/33483/campos_512_v4
+4/33486/campos_512_v4
+4/33491/campos_512_v4
+4/33498/campos_512_v4
+4/33528/campos_512_v4
+4/33529/campos_512_v4
+4/33530/campos_512_v4
+4/33536/campos_512_v4
+4/33537/campos_512_v4
+4/33540/campos_512_v4
+4/33557/campos_512_v4
+4/33558/campos_512_v4
+4/33559/campos_512_v4
+4/33560/campos_512_v4
+4/33575/campos_512_v4
+4/33590/campos_512_v4
+4/33591/campos_512_v4
+4/33600/campos_512_v4
+4/33603/campos_512_v4
+4/33612/campos_512_v4
+4/33632/campos_512_v4
+4/33643/campos_512_v4
+4/33646/campos_512_v4
+4/33657/campos_512_v4
+4/33658/campos_512_v4
+4/33673/campos_512_v4
+4/33688/campos_512_v4
+4/33706/campos_512_v4
+4/33717/campos_512_v4
+4/33721/campos_512_v4
+4/33725/campos_512_v4
+4/33736/campos_512_v4
+4/33746/campos_512_v4
+4/33753/campos_512_v4
+4/33755/campos_512_v4
+4/33765/campos_512_v4
+4/33767/campos_512_v4
+4/33774/campos_512_v4
+4/33779/campos_512_v4
+4/33808/campos_512_v4
+4/33812/campos_512_v4
+4/33813/campos_512_v4
+4/33827/campos_512_v4
+4/33838/campos_512_v4
+4/33853/campos_512_v4
+4/33856/campos_512_v4
+4/33859/campos_512_v4
+4/33865/campos_512_v4
+4/33873/campos_512_v4
+4/33892/campos_512_v4
+4/33899/campos_512_v4
+4/33902/campos_512_v4
+4/33912/campos_512_v4
+4/33939/campos_512_v4
+4/33941/campos_512_v4
+4/33955/campos_512_v4
+4/33956/campos_512_v4
+4/33957/campos_512_v4
+4/33976/campos_512_v4
+4/33980/campos_512_v4
+4/33983/campos_512_v4
+4/33987/campos_512_v4
+4/33989/campos_512_v4
+4/33991/campos_512_v4
+4/34011/campos_512_v4
+4/34028/campos_512_v4
+4/34035/campos_512_v4
+4/34045/campos_512_v4
+4/34057/campos_512_v4
+4/34067/campos_512_v4
+4/34070/campos_512_v4
+4/34071/campos_512_v4
+4/34072/campos_512_v4
+4/34077/campos_512_v4
+4/34092/campos_512_v4
+4/34107/campos_512_v4
+4/34117/campos_512_v4
+4/34129/campos_512_v4
+4/34166/campos_512_v4
+4/34190/campos_512_v4
+4/34208/campos_512_v4
+4/34218/campos_512_v4
+4/34220/campos_512_v4
+4/34225/campos_512_v4
+4/34232/campos_512_v4
+4/34255/campos_512_v4
+4/34257/campos_512_v4
+4/34259/campos_512_v4
+4/34260/campos_512_v4
+4/34270/campos_512_v4
+4/34280/campos_512_v4
+4/34285/campos_512_v4
+4/34287/campos_512_v4
+4/34295/campos_512_v4
+4/34304/campos_512_v4
+4/34317/campos_512_v4
+4/34319/campos_512_v4
+4/34323/campos_512_v4
+4/34336/campos_512_v4
+4/34345/campos_512_v4
+4/34349/campos_512_v4
+4/34354/campos_512_v4
+4/34363/campos_512_v4
+4/34373/campos_512_v4
+4/34376/campos_512_v4
+4/34377/campos_512_v4
+4/34389/campos_512_v4
+4/34417/campos_512_v4
+4/34420/campos_512_v4
+4/34422/campos_512_v4
+4/34425/campos_512_v4
+4/34429/campos_512_v4
+4/34431/campos_512_v4
+4/34449/campos_512_v4
+4/34452/campos_512_v4
+4/34459/campos_512_v4
+4/34462/campos_512_v4
+4/34476/campos_512_v4
+4/34481/campos_512_v4
+4/34484/campos_512_v4
+4/34513/campos_512_v4
+4/34517/campos_512_v4
+4/34518/campos_512_v4
+4/34531/campos_512_v4
+4/34532/campos_512_v4
+4/34536/campos_512_v4
+4/34542/campos_512_v4
+4/34555/campos_512_v4
+4/34556/campos_512_v4
+4/34558/campos_512_v4
+4/34582/campos_512_v4
+4/34584/campos_512_v4
+4/34596/campos_512_v4
+4/34608/campos_512_v4
+4/34641/campos_512_v4
+4/34652/campos_512_v4
+4/34685/campos_512_v4
+4/34703/campos_512_v4
+4/34722/campos_512_v4
+4/34723/campos_512_v4
+4/34729/campos_512_v4
+4/34759/campos_512_v4
+4/34765/campos_512_v4
+4/34786/campos_512_v4
+4/34826/campos_512_v4
+4/34828/campos_512_v4
+4/34831/campos_512_v4
+4/34842/campos_512_v4
+4/34858/campos_512_v4
+4/34861/campos_512_v4
+4/34862/campos_512_v4
+4/34874/campos_512_v4
+4/34876/campos_512_v4
+4/34879/campos_512_v4
+4/34882/campos_512_v4
+4/34886/campos_512_v4
+4/34897/campos_512_v4
+4/34898/campos_512_v4
+4/34911/campos_512_v4
+4/34913/campos_512_v4
+4/34916/campos_512_v4
+4/34934/campos_512_v4
+4/34947/campos_512_v4
+4/34952/campos_512_v4
+4/34958/campos_512_v4
+4/34973/campos_512_v4
+4/34979/campos_512_v4
+4/34989/campos_512_v4
+4/34996/campos_512_v4
+4/35000/campos_512_v4
+40/210020/campos_512_v4
+40/210021/campos_512_v4
+40/210035/campos_512_v4
+40/210045/campos_512_v4
+40/210053/campos_512_v4
+40/210088/campos_512_v4
+40/210096/campos_512_v4
+40/210099/campos_512_v4
+40/210102/campos_512_v4
+40/210111/campos_512_v4
+40/210114/campos_512_v4
+40/210119/campos_512_v4
+40/210124/campos_512_v4
+40/210127/campos_512_v4
+40/210144/campos_512_v4
+40/210152/campos_512_v4
+40/210164/campos_512_v4
+40/210169/campos_512_v4
+40/210183/campos_512_v4
+40/210194/campos_512_v4
+40/210196/campos_512_v4
+40/210198/campos_512_v4
+40/210201/campos_512_v4
+40/210208/campos_512_v4
+40/210209/campos_512_v4
+40/210210/campos_512_v4
+40/210226/campos_512_v4
+40/210230/campos_512_v4
+40/210235/campos_512_v4
+40/210241/campos_512_v4
+40/210245/campos_512_v4
+40/210252/campos_512_v4
+40/210258/campos_512_v4
+40/210269/campos_512_v4
+40/210270/campos_512_v4
+40/210272/campos_512_v4
+40/210274/campos_512_v4
+40/210279/campos_512_v4
+40/210299/campos_512_v4
+40/210314/campos_512_v4
+40/210316/campos_512_v4
+40/210333/campos_512_v4
+40/210339/campos_512_v4
+40/210341/campos_512_v4
+40/210347/campos_512_v4
+40/210349/campos_512_v4
+40/210350/campos_512_v4
+40/210360/campos_512_v4
+40/210361/campos_512_v4
+40/210376/campos_512_v4
+40/210378/campos_512_v4
+40/210379/campos_512_v4
+40/210393/campos_512_v4
+40/210399/campos_512_v4
+40/210428/campos_512_v4
+40/210436/campos_512_v4
+40/210439/campos_512_v4
+40/210442/campos_512_v4
+40/210461/campos_512_v4
+40/210463/campos_512_v4
+40/210477/campos_512_v4
+40/210489/campos_512_v4
+40/210492/campos_512_v4
+40/210495/campos_512_v4
+40/210497/campos_512_v4
+40/210512/campos_512_v4
+40/210513/campos_512_v4
+40/210519/campos_512_v4
+40/210527/campos_512_v4
+40/210529/campos_512_v4
+40/210555/campos_512_v4
+40/210567/campos_512_v4
+40/210578/campos_512_v4
+40/210579/campos_512_v4
+40/210592/campos_512_v4
+40/210593/campos_512_v4
+40/210607/campos_512_v4
+40/210608/campos_512_v4
+40/210609/campos_512_v4
+40/210618/campos_512_v4
+40/210623/campos_512_v4
+40/210628/campos_512_v4
+40/210630/campos_512_v4
+40/210646/campos_512_v4
+40/210668/campos_512_v4
+40/210670/campos_512_v4
+40/210671/campos_512_v4
+40/210675/campos_512_v4
+40/210701/campos_512_v4
+40/210731/campos_512_v4
+40/210736/campos_512_v4
+40/210747/campos_512_v4
+40/210749/campos_512_v4
+40/210758/campos_512_v4
+40/210781/campos_512_v4
+40/210789/campos_512_v4
+40/210795/campos_512_v4
+40/210807/campos_512_v4
+40/210810/campos_512_v4
+40/210815/campos_512_v4
+40/210832/campos_512_v4
+40/210861/campos_512_v4
+40/210867/campos_512_v4
+40/210878/campos_512_v4
+40/210886/campos_512_v4
+40/210898/campos_512_v4
+40/210899/campos_512_v4
+40/210905/campos_512_v4
+40/210908/campos_512_v4
+40/210915/campos_512_v4
+40/210920/campos_512_v4
+40/210923/campos_512_v4
+40/210925/campos_512_v4
+40/210935/campos_512_v4
+40/210940/campos_512_v4
+40/210960/campos_512_v4
+40/210975/campos_512_v4
+40/210984/campos_512_v4
+40/210987/campos_512_v4
+40/210988/campos_512_v4
+40/210997/campos_512_v4
+40/210998/campos_512_v4
+40/211006/campos_512_v4
+40/211014/campos_512_v4
+40/211031/campos_512_v4
+40/211037/campos_512_v4
+40/211047/campos_512_v4
+40/211050/campos_512_v4
+40/211052/campos_512_v4
+40/211060/campos_512_v4
+40/211069/campos_512_v4
+40/211072/campos_512_v4
+40/211077/campos_512_v4
+40/211102/campos_512_v4
+40/211119/campos_512_v4
+40/211126/campos_512_v4
+40/211131/campos_512_v4
+40/211135/campos_512_v4
+40/211145/campos_512_v4
+40/211146/campos_512_v4
+40/211161/campos_512_v4
+40/211187/campos_512_v4
+40/211195/campos_512_v4
+40/211199/campos_512_v4
+40/211218/campos_512_v4
+40/211233/campos_512_v4
+40/211236/campos_512_v4
+40/211254/campos_512_v4
+40/211263/campos_512_v4
+40/211273/campos_512_v4
+40/211274/campos_512_v4
+40/211280/campos_512_v4
+40/211285/campos_512_v4
+40/211291/campos_512_v4
+40/211296/campos_512_v4
+40/211297/campos_512_v4
+40/211302/campos_512_v4
+40/211311/campos_512_v4
+40/211315/campos_512_v4
+40/211316/campos_512_v4
+40/211317/campos_512_v4
+40/211322/campos_512_v4
+40/211326/campos_512_v4
+40/211329/campos_512_v4
+40/211331/campos_512_v4
+40/211346/campos_512_v4
+40/211351/campos_512_v4
+40/211353/campos_512_v4
+40/211358/campos_512_v4
+40/211369/campos_512_v4
+40/211394/campos_512_v4
+40/211396/campos_512_v4
+40/211400/campos_512_v4
+40/211412/campos_512_v4
+40/211413/campos_512_v4
+40/211416/campos_512_v4
+40/211438/campos_512_v4
+40/211458/campos_512_v4
+40/211459/campos_512_v4
+40/211460/campos_512_v4
+40/211467/campos_512_v4
+40/211471/campos_512_v4
+40/211479/campos_512_v4
+40/211483/campos_512_v4
+40/211489/campos_512_v4
+40/211493/campos_512_v4
+40/211494/campos_512_v4
+40/211498/campos_512_v4
+40/211503/campos_512_v4
+40/211519/campos_512_v4
+40/211576/campos_512_v4
+40/211588/campos_512_v4
+40/211601/campos_512_v4
+40/211627/campos_512_v4
+40/211637/campos_512_v4
+40/211644/campos_512_v4
+40/211658/campos_512_v4
+40/211662/campos_512_v4
+40/211698/campos_512_v4
+40/211735/campos_512_v4
+40/211739/campos_512_v4
+40/211740/campos_512_v4
+40/211745/campos_512_v4
+40/211750/campos_512_v4
+40/211754/campos_512_v4
+40/211768/campos_512_v4
+40/211777/campos_512_v4
+40/211780/campos_512_v4
+40/211782/campos_512_v4
+40/211789/campos_512_v4
+40/211802/campos_512_v4
+40/211839/campos_512_v4
+40/211844/campos_512_v4
+40/211876/campos_512_v4
+40/211878/campos_512_v4
+40/211882/campos_512_v4
+40/211892/campos_512_v4
+40/211895/campos_512_v4
+40/211899/campos_512_v4
+40/211901/campos_512_v4
+40/211904/campos_512_v4
+40/211915/campos_512_v4
+40/211927/campos_512_v4
+40/211932/campos_512_v4
+40/211941/campos_512_v4
+40/211942/campos_512_v4
+40/211959/campos_512_v4
+40/211961/campos_512_v4
+40/211962/campos_512_v4
+40/211970/campos_512_v4
+40/211977/campos_512_v4
+40/211979/campos_512_v4
+40/211991/campos_512_v4
+40/211997/campos_512_v4
+40/212009/campos_512_v4
+40/212014/campos_512_v4
+40/212019/campos_512_v4
+40/212023/campos_512_v4
+40/212027/campos_512_v4
+40/212029/campos_512_v4
+40/212036/campos_512_v4
+40/212038/campos_512_v4
+40/212059/campos_512_v4
+40/212060/campos_512_v4
+40/212065/campos_512_v4
+40/212067/campos_512_v4
+40/212076/campos_512_v4
+40/212081/campos_512_v4
+40/212098/campos_512_v4
+40/212099/campos_512_v4
+40/212102/campos_512_v4
+40/212105/campos_512_v4
+40/212115/campos_512_v4
+40/212123/campos_512_v4
+40/212126/campos_512_v4
+40/212136/campos_512_v4
+40/212137/campos_512_v4
+40/212150/campos_512_v4
+40/212165/campos_512_v4
+40/212181/campos_512_v4
+40/212184/campos_512_v4
+40/212200/campos_512_v4
+40/212225/campos_512_v4
+40/212227/campos_512_v4
+40/212233/campos_512_v4
+40/212245/campos_512_v4
+40/212258/campos_512_v4
+40/212270/campos_512_v4
+40/212290/campos_512_v4
+40/212291/campos_512_v4
+40/212293/campos_512_v4
+40/212311/campos_512_v4
+40/212328/campos_512_v4
+40/212347/campos_512_v4
+40/212351/campos_512_v4
+40/212352/campos_512_v4
+40/212375/campos_512_v4
+40/212378/campos_512_v4
+40/212382/campos_512_v4
+40/212395/campos_512_v4
+40/212397/campos_512_v4
+40/212400/campos_512_v4
+40/212403/campos_512_v4
+40/212413/campos_512_v4
+40/212428/campos_512_v4
+40/212438/campos_512_v4
+40/212440/campos_512_v4
+40/212448/campos_512_v4
+40/212450/campos_512_v4
+40/212454/campos_512_v4
+40/212457/campos_512_v4
+40/212463/campos_512_v4
+40/212473/campos_512_v4
+40/212479/campos_512_v4
+40/212484/campos_512_v4
+40/212486/campos_512_v4
+40/212495/campos_512_v4
+40/212507/campos_512_v4
+40/212532/campos_512_v4
+40/212548/campos_512_v4
+40/212549/campos_512_v4
+40/212556/campos_512_v4
+40/212561/campos_512_v4
+40/212564/campos_512_v4
+40/212569/campos_512_v4
+40/212572/campos_512_v4
+40/212577/campos_512_v4
+40/212578/campos_512_v4
+40/212611/campos_512_v4
+40/212618/campos_512_v4
+40/212622/campos_512_v4
+40/212652/campos_512_v4
+40/212656/campos_512_v4
+40/212679/campos_512_v4
+40/212682/campos_512_v4
+40/212701/campos_512_v4
+40/212704/campos_512_v4
+40/212708/campos_512_v4
+40/212710/campos_512_v4
+40/212712/campos_512_v4
+40/212715/campos_512_v4
+40/212717/campos_512_v4
+40/212746/campos_512_v4
+40/212747/campos_512_v4
+40/212755/campos_512_v4
+40/212759/campos_512_v4
+40/212764/campos_512_v4
+40/212770/campos_512_v4
+40/212773/campos_512_v4
+40/212774/campos_512_v4
+40/212776/campos_512_v4
+40/212778/campos_512_v4
+40/212780/campos_512_v4
+40/212788/campos_512_v4
+40/212791/campos_512_v4
+40/212792/campos_512_v4
+40/212801/campos_512_v4
+40/212810/campos_512_v4
+40/212818/campos_512_v4
+40/212831/campos_512_v4
+40/212843/campos_512_v4
+40/212844/campos_512_v4
+40/212855/campos_512_v4
+40/212856/campos_512_v4
+40/212864/campos_512_v4
+40/212867/campos_512_v4
+40/212868/campos_512_v4
+40/212879/campos_512_v4
+40/212885/campos_512_v4
+40/212888/campos_512_v4
+40/212895/campos_512_v4
+40/212897/campos_512_v4
+40/212910/campos_512_v4
+40/212914/campos_512_v4
+40/212915/campos_512_v4
+40/212917/campos_512_v4
+40/212938/campos_512_v4
+40/212948/campos_512_v4
+40/212956/campos_512_v4
+40/213010/campos_512_v4
+40/213026/campos_512_v4
+40/213031/campos_512_v4
+40/213040/campos_512_v4
+40/213043/campos_512_v4
+40/213052/campos_512_v4
+40/213059/campos_512_v4
+40/213062/campos_512_v4
+40/213063/campos_512_v4
+40/213076/campos_512_v4
+40/213078/campos_512_v4
+40/213097/campos_512_v4
+40/213103/campos_512_v4
+40/213106/campos_512_v4
+40/213116/campos_512_v4
+40/213127/campos_512_v4
+40/213132/campos_512_v4
+40/213141/campos_512_v4
+40/213150/campos_512_v4
+40/213152/campos_512_v4
+40/213174/campos_512_v4
+40/213178/campos_512_v4
+40/213190/campos_512_v4
+40/213196/campos_512_v4
+40/213201/campos_512_v4
+40/213231/campos_512_v4
+40/213241/campos_512_v4
+40/213251/campos_512_v4
+40/213260/campos_512_v4
+40/213262/campos_512_v4
+40/213288/campos_512_v4
+40/213290/campos_512_v4
+40/213295/campos_512_v4
+40/213313/campos_512_v4
+40/213322/campos_512_v4
+40/213325/campos_512_v4
+40/213326/campos_512_v4
+40/213335/campos_512_v4
+40/213344/campos_512_v4
+40/213346/campos_512_v4
+40/213347/campos_512_v4
+40/213376/campos_512_v4
+40/213391/campos_512_v4
+40/213396/campos_512_v4
+40/213418/campos_512_v4
+40/213423/campos_512_v4
+40/213447/campos_512_v4
+40/213462/campos_512_v4
+40/213463/campos_512_v4
+40/213468/campos_512_v4
+40/213473/campos_512_v4
+40/213485/campos_512_v4
+40/213494/campos_512_v4
+40/213517/campos_512_v4
+40/213519/campos_512_v4
+40/213526/campos_512_v4
+40/213527/campos_512_v4
+40/213541/campos_512_v4
+40/213562/campos_512_v4
+40/213566/campos_512_v4
+40/213573/campos_512_v4
+40/213577/campos_512_v4
+40/213579/campos_512_v4
+40/213586/campos_512_v4
+40/213587/campos_512_v4
+40/213590/campos_512_v4
+40/213594/campos_512_v4
+40/213599/campos_512_v4
+40/213624/campos_512_v4
+40/213625/campos_512_v4
+40/213631/campos_512_v4
+40/213642/campos_512_v4
+40/213650/campos_512_v4
+40/213668/campos_512_v4
+40/213671/campos_512_v4
+40/213674/campos_512_v4
+40/213678/campos_512_v4
+40/213681/campos_512_v4
+40/213696/campos_512_v4
+40/213712/campos_512_v4
+40/213718/campos_512_v4
+40/213728/campos_512_v4
+40/213730/campos_512_v4
+40/213732/campos_512_v4
+40/213741/campos_512_v4
+40/213764/campos_512_v4
+40/213774/campos_512_v4
+40/213779/campos_512_v4
+40/213782/campos_512_v4
+40/213793/campos_512_v4
+40/213796/campos_512_v4
+40/213804/campos_512_v4
+40/213828/campos_512_v4
+40/213847/campos_512_v4
+40/213861/campos_512_v4
+40/213865/campos_512_v4
+40/213870/campos_512_v4
+40/213873/campos_512_v4
+40/213889/campos_512_v4
+40/213894/campos_512_v4
+40/213912/campos_512_v4
+40/213917/campos_512_v4
+40/213926/campos_512_v4
+40/213929/campos_512_v4
+40/213933/campos_512_v4
+40/213940/campos_512_v4
+40/213947/campos_512_v4
+40/213949/campos_512_v4
+40/213958/campos_512_v4
+40/213965/campos_512_v4
+40/213979/campos_512_v4
+40/213981/campos_512_v4
+40/213986/campos_512_v4
+40/214006/campos_512_v4
+40/214012/campos_512_v4
+40/214013/campos_512_v4
+40/214022/campos_512_v4
+40/214023/campos_512_v4
+40/214025/campos_512_v4
+40/214026/campos_512_v4
+40/214039/campos_512_v4
+40/214048/campos_512_v4
+40/214055/campos_512_v4
+40/214084/campos_512_v4
+40/214099/campos_512_v4
+40/214108/campos_512_v4
+40/214110/campos_512_v4
+40/214146/campos_512_v4
+40/214162/campos_512_v4
+40/214164/campos_512_v4
+40/214166/campos_512_v4
+40/214171/campos_512_v4
+40/214173/campos_512_v4
+40/214175/campos_512_v4
+40/214183/campos_512_v4
+40/214186/campos_512_v4
+40/214191/campos_512_v4
+40/214194/campos_512_v4
+40/214213/campos_512_v4
+40/214231/campos_512_v4
+40/214238/campos_512_v4
+40/214244/campos_512_v4
+40/214258/campos_512_v4
+40/214262/campos_512_v4
+40/214264/campos_512_v4
+40/214268/campos_512_v4
+40/214277/campos_512_v4
+40/214284/campos_512_v4
+40/214288/campos_512_v4
+40/214306/campos_512_v4
+40/214308/campos_512_v4
+40/214311/campos_512_v4
+40/214318/campos_512_v4
+40/214334/campos_512_v4
+40/214336/campos_512_v4
+40/214341/campos_512_v4
+40/214347/campos_512_v4
+40/214354/campos_512_v4
+40/214368/campos_512_v4
+40/214374/campos_512_v4
+40/214376/campos_512_v4
+40/214384/campos_512_v4
+40/214385/campos_512_v4
+40/214386/campos_512_v4
+40/214419/campos_512_v4
+40/214425/campos_512_v4
+40/214435/campos_512_v4
+40/214438/campos_512_v4
+40/214441/campos_512_v4
+40/214448/campos_512_v4
+40/214450/campos_512_v4
+40/214479/campos_512_v4
+40/214495/campos_512_v4
+40/214500/campos_512_v4
+40/214505/campos_512_v4
+40/214509/campos_512_v4
+40/214515/campos_512_v4
+40/214518/campos_512_v4
+40/214519/campos_512_v4
+40/214525/campos_512_v4
+40/214528/campos_512_v4
+40/214552/campos_512_v4
+40/214554/campos_512_v4
+40/214556/campos_512_v4
+40/214557/campos_512_v4
+40/214559/campos_512_v4
+40/214573/campos_512_v4
+40/214577/campos_512_v4
+40/214591/campos_512_v4
+40/214606/campos_512_v4
+40/214628/campos_512_v4
+40/214629/campos_512_v4
+40/214630/campos_512_v4
+40/214669/campos_512_v4
+40/214672/campos_512_v4
+40/214673/campos_512_v4
+40/214674/campos_512_v4
+40/214684/campos_512_v4
+40/214686/campos_512_v4
+40/214699/campos_512_v4
+40/214714/campos_512_v4
+40/214715/campos_512_v4
+40/214736/campos_512_v4
+40/214739/campos_512_v4
+40/214743/campos_512_v4
+40/214745/campos_512_v4
+40/214748/campos_512_v4
+40/214777/campos_512_v4
+40/214784/campos_512_v4
+40/214797/campos_512_v4
+40/214800/campos_512_v4
+40/214819/campos_512_v4
+40/214827/campos_512_v4
+40/214828/campos_512_v4
+40/214839/campos_512_v4
+40/214840/campos_512_v4
+40/214866/campos_512_v4
+40/214884/campos_512_v4
+40/214903/campos_512_v4
+40/214908/campos_512_v4
+40/214911/campos_512_v4
+40/214941/campos_512_v4
+40/214943/campos_512_v4
+40/214958/campos_512_v4
+40/214968/campos_512_v4
+40/214972/campos_512_v4
+40/214977/campos_512_v4
+40/214981/campos_512_v4
+40/214989/campos_512_v4
+40/214990/campos_512_v4
+40/214998/campos_512_v4
+41/215005/campos_512_v4
+41/215007/campos_512_v4
+41/215012/campos_512_v4
+41/215013/campos_512_v4
+41/215016/campos_512_v4
+41/215020/campos_512_v4
+41/215035/campos_512_v4
+41/215039/campos_512_v4
+41/215044/campos_512_v4
+41/215057/campos_512_v4
+41/215066/campos_512_v4
+41/215071/campos_512_v4
+41/215072/campos_512_v4
+41/215093/campos_512_v4
+41/215100/campos_512_v4
+41/215107/campos_512_v4
+41/215113/campos_512_v4
+41/215116/campos_512_v4
+41/215119/campos_512_v4
+41/215136/campos_512_v4
+41/215144/campos_512_v4
+41/215146/campos_512_v4
+41/215148/campos_512_v4
+41/215159/campos_512_v4
+41/215161/campos_512_v4
+41/215164/campos_512_v4
+41/215173/campos_512_v4
+41/215193/campos_512_v4
+41/215196/campos_512_v4
+41/215199/campos_512_v4
+41/215201/campos_512_v4
+41/215207/campos_512_v4
+41/215209/campos_512_v4
+41/215211/campos_512_v4
+41/215217/campos_512_v4
+41/215230/campos_512_v4
+41/215246/campos_512_v4
+41/215253/campos_512_v4
+41/215254/campos_512_v4
+41/215262/campos_512_v4
+41/215267/campos_512_v4
+41/215270/campos_512_v4
+41/215280/campos_512_v4
+41/215284/campos_512_v4
+41/215286/campos_512_v4
+41/215288/campos_512_v4
+41/215298/campos_512_v4
+41/215320/campos_512_v4
+41/215334/campos_512_v4
+41/215337/campos_512_v4
+41/215342/campos_512_v4
+41/215356/campos_512_v4
+41/215361/campos_512_v4
+41/215369/campos_512_v4
+41/215374/campos_512_v4
+41/215378/campos_512_v4
+41/215400/campos_512_v4
+41/215402/campos_512_v4
+41/215403/campos_512_v4
+41/215431/campos_512_v4
+41/215434/campos_512_v4
+41/215438/campos_512_v4
+41/215442/campos_512_v4
+41/215446/campos_512_v4
+41/215455/campos_512_v4
+41/215461/campos_512_v4
+41/215483/campos_512_v4
+41/215502/campos_512_v4
+41/215505/campos_512_v4
+41/215508/campos_512_v4
+41/215527/campos_512_v4
+41/215528/campos_512_v4
+41/215536/campos_512_v4
+41/215537/campos_512_v4
+41/215549/campos_512_v4
+41/215555/campos_512_v4
+41/215564/campos_512_v4
+41/215576/campos_512_v4
+41/215577/campos_512_v4
+41/215586/campos_512_v4
+41/215607/campos_512_v4
+41/215608/campos_512_v4
+41/215625/campos_512_v4
+41/215634/campos_512_v4
+41/215642/campos_512_v4
+41/215659/campos_512_v4
+41/215661/campos_512_v4
+41/215694/campos_512_v4
+41/215714/campos_512_v4
+41/215728/campos_512_v4
+41/215736/campos_512_v4
+41/215762/campos_512_v4
+41/215772/campos_512_v4
+41/215775/campos_512_v4
+41/215776/campos_512_v4
+41/215779/campos_512_v4
+41/215793/campos_512_v4
+41/215794/campos_512_v4
+41/215797/campos_512_v4
+41/215805/campos_512_v4
+41/215827/campos_512_v4
+41/215832/campos_512_v4
+41/215839/campos_512_v4
+41/215850/campos_512_v4
+41/215872/campos_512_v4
+41/215877/campos_512_v4
+41/215890/campos_512_v4
+41/215905/campos_512_v4
+41/215912/campos_512_v4
+41/215915/campos_512_v4
+41/215916/campos_512_v4
+41/215928/campos_512_v4
+41/215934/campos_512_v4
+41/215940/campos_512_v4
+41/215953/campos_512_v4
+41/215963/campos_512_v4
+41/215982/campos_512_v4
+41/215985/campos_512_v4
+41/215999/campos_512_v4
+41/216002/campos_512_v4
+41/216006/campos_512_v4
+41/216043/campos_512_v4
+41/216068/campos_512_v4
+41/216077/campos_512_v4
+41/216085/campos_512_v4
+41/216096/campos_512_v4
+41/216117/campos_512_v4
+41/216132/campos_512_v4
+41/216136/campos_512_v4
+41/216142/campos_512_v4
+41/216143/campos_512_v4
+41/216144/campos_512_v4
+41/216146/campos_512_v4
+41/216147/campos_512_v4
+41/216160/campos_512_v4
+41/216166/campos_512_v4
+41/216170/campos_512_v4
+41/216171/campos_512_v4
+41/216180/campos_512_v4
+41/216185/campos_512_v4
+41/216188/campos_512_v4
+41/216196/campos_512_v4
+41/216201/campos_512_v4
+41/216207/campos_512_v4
+41/216211/campos_512_v4
+41/216222/campos_512_v4
+41/216237/campos_512_v4
+41/216243/campos_512_v4
+41/216244/campos_512_v4
+41/216249/campos_512_v4
+41/216254/campos_512_v4
+41/216256/campos_512_v4
+41/216265/campos_512_v4
+41/216277/campos_512_v4
+41/216279/campos_512_v4
+41/216295/campos_512_v4
+41/216315/campos_512_v4
+41/216321/campos_512_v4
+41/216322/campos_512_v4
+41/216329/campos_512_v4
+41/216332/campos_512_v4
+41/216334/campos_512_v4
+41/216354/campos_512_v4
+41/216359/campos_512_v4
+41/216366/campos_512_v4
+41/216371/campos_512_v4
+41/216390/campos_512_v4
+41/216391/campos_512_v4
+41/216393/campos_512_v4
+41/216394/campos_512_v4
+41/216399/campos_512_v4
+41/216407/campos_512_v4
+41/216417/campos_512_v4
+41/216431/campos_512_v4
+41/216444/campos_512_v4
+41/216457/campos_512_v4
+41/216459/campos_512_v4
+41/216464/campos_512_v4
+41/216468/campos_512_v4
+41/216469/campos_512_v4
+41/216473/campos_512_v4
+41/216478/campos_512_v4
+41/216509/campos_512_v4
+41/216514/campos_512_v4
+41/216520/campos_512_v4
+41/216522/campos_512_v4
+41/216530/campos_512_v4
+41/216532/campos_512_v4
+41/216543/campos_512_v4
+41/216545/campos_512_v4
+41/216550/campos_512_v4
+41/216558/campos_512_v4
+41/216564/campos_512_v4
+41/216567/campos_512_v4
+41/216585/campos_512_v4
+41/216587/campos_512_v4
+41/216594/campos_512_v4
+41/216596/campos_512_v4
+41/216597/campos_512_v4
+41/216606/campos_512_v4
+41/216607/campos_512_v4
+41/216632/campos_512_v4
+41/216633/campos_512_v4
+41/216637/campos_512_v4
+41/216660/campos_512_v4
+41/216682/campos_512_v4
+41/216712/campos_512_v4
+41/216713/campos_512_v4
+41/216719/campos_512_v4
+41/216727/campos_512_v4
+41/216743/campos_512_v4
+41/216755/campos_512_v4
+41/216757/campos_512_v4
+41/216758/campos_512_v4
+41/216765/campos_512_v4
+41/216779/campos_512_v4
+41/216780/campos_512_v4
+41/216782/campos_512_v4
+41/216784/campos_512_v4
+41/216788/campos_512_v4
+41/216791/campos_512_v4
+41/216818/campos_512_v4
+41/216822/campos_512_v4
+41/216843/campos_512_v4
+41/216855/campos_512_v4
+41/216857/campos_512_v4
+41/216870/campos_512_v4
+41/216881/campos_512_v4
+41/216888/campos_512_v4
+41/216896/campos_512_v4
+41/216902/campos_512_v4
+41/216915/campos_512_v4
+41/216931/campos_512_v4
+41/216940/campos_512_v4
+41/216953/campos_512_v4
+41/216961/campos_512_v4
+41/216964/campos_512_v4
+41/216966/campos_512_v4
+41/216969/campos_512_v4
+41/216974/campos_512_v4
+41/216982/campos_512_v4
+41/216985/campos_512_v4
+41/216992/campos_512_v4
+41/216994/campos_512_v4
+41/216996/campos_512_v4
+41/217016/campos_512_v4
+41/217017/campos_512_v4
+41/217028/campos_512_v4
+41/217031/campos_512_v4
+41/217041/campos_512_v4
+41/217047/campos_512_v4
+41/217054/campos_512_v4
+41/217060/campos_512_v4
+41/217063/campos_512_v4
+41/217070/campos_512_v4
+41/217077/campos_512_v4
+41/217094/campos_512_v4
+41/217095/campos_512_v4
+41/217101/campos_512_v4
+41/217106/campos_512_v4
+41/217114/campos_512_v4
+41/217115/campos_512_v4
+41/217117/campos_512_v4
+41/217123/campos_512_v4
+41/217131/campos_512_v4
+41/217138/campos_512_v4
+41/217139/campos_512_v4
+41/217152/campos_512_v4
+41/217158/campos_512_v4
+41/217163/campos_512_v4
+41/217167/campos_512_v4
+41/217170/campos_512_v4
+41/217174/campos_512_v4
+41/217176/campos_512_v4
+41/217178/campos_512_v4
+41/217187/campos_512_v4
+41/217191/campos_512_v4
+41/217194/campos_512_v4
+41/217195/campos_512_v4
+41/217197/campos_512_v4
+41/217262/campos_512_v4
+41/217269/campos_512_v4
+41/217273/campos_512_v4
+41/217276/campos_512_v4
+41/217279/campos_512_v4
+41/217314/campos_512_v4
+41/217316/campos_512_v4
+41/217321/campos_512_v4
+41/217327/campos_512_v4
+41/217336/campos_512_v4
+41/217340/campos_512_v4
+41/217343/campos_512_v4
+41/217346/campos_512_v4
+41/217369/campos_512_v4
+41/217375/campos_512_v4
+41/217378/campos_512_v4
+41/217380/campos_512_v4
+41/217386/campos_512_v4
+41/217399/campos_512_v4
+41/217402/campos_512_v4
+41/217407/campos_512_v4
+41/217417/campos_512_v4
+41/217418/campos_512_v4
+41/217427/campos_512_v4
+41/217432/campos_512_v4
+41/217437/campos_512_v4
+41/217439/campos_512_v4
+41/217441/campos_512_v4
+41/217462/campos_512_v4
+41/217463/campos_512_v4
+41/217478/campos_512_v4
+41/217479/campos_512_v4
+41/217492/campos_512_v4
+41/217493/campos_512_v4
+41/217495/campos_512_v4
+41/217497/campos_512_v4
+41/217516/campos_512_v4
+41/217526/campos_512_v4
+41/217530/campos_512_v4
+41/217533/campos_512_v4
+41/217550/campos_512_v4
+41/217552/campos_512_v4
+41/217566/campos_512_v4
+41/217568/campos_512_v4
+41/217575/campos_512_v4
+41/217582/campos_512_v4
+41/217595/campos_512_v4
+41/217598/campos_512_v4
+41/217613/campos_512_v4
+41/217620/campos_512_v4
+41/217626/campos_512_v4
+41/217638/campos_512_v4
+41/217675/campos_512_v4
+41/217692/campos_512_v4
+41/217693/campos_512_v4
+41/217696/campos_512_v4
+41/217699/campos_512_v4
+41/217713/campos_512_v4
+41/217714/campos_512_v4
+41/217716/campos_512_v4
+41/217725/campos_512_v4
+41/217743/campos_512_v4
+41/217746/campos_512_v4
+41/217748/campos_512_v4
+41/217750/campos_512_v4
+41/217752/campos_512_v4
+41/217758/campos_512_v4
+41/217760/campos_512_v4
+41/217763/campos_512_v4
+41/217764/campos_512_v4
+41/217774/campos_512_v4
+41/217776/campos_512_v4
+41/217791/campos_512_v4
+41/217798/campos_512_v4
+41/217800/campos_512_v4
+41/217803/campos_512_v4
+41/217806/campos_512_v4
+41/217807/campos_512_v4
+41/217837/campos_512_v4
+41/217842/campos_512_v4
+41/217843/campos_512_v4
+41/217859/campos_512_v4
+41/217861/campos_512_v4
+41/217869/campos_512_v4
+41/217878/campos_512_v4
+41/217879/campos_512_v4
+41/217886/campos_512_v4
+41/217892/campos_512_v4
+41/217898/campos_512_v4
+41/217899/campos_512_v4
+41/217911/campos_512_v4
+41/217913/campos_512_v4
+41/217914/campos_512_v4
+41/217918/campos_512_v4
+41/217926/campos_512_v4
+41/217938/campos_512_v4
+41/217940/campos_512_v4
+41/217954/campos_512_v4
+41/217957/campos_512_v4
+41/217967/campos_512_v4
+41/217968/campos_512_v4
+41/217981/campos_512_v4
+41/218017/campos_512_v4
+41/218020/campos_512_v4
+41/218022/campos_512_v4
+41/218033/campos_512_v4
+41/218034/campos_512_v4
+41/218039/campos_512_v4
+41/218040/campos_512_v4
+41/218046/campos_512_v4
+41/218054/campos_512_v4
+41/218060/campos_512_v4
+41/218064/campos_512_v4
+41/218066/campos_512_v4
+41/218083/campos_512_v4
+41/218086/campos_512_v4
+41/218089/campos_512_v4
+41/218090/campos_512_v4
+41/218101/campos_512_v4
+41/218105/campos_512_v4
+41/218106/campos_512_v4
+41/218116/campos_512_v4
+41/218121/campos_512_v4
+41/218122/campos_512_v4
+41/218128/campos_512_v4
+41/218161/campos_512_v4
+41/218174/campos_512_v4
+41/218176/campos_512_v4
+41/218177/campos_512_v4
+41/218181/campos_512_v4
+41/218185/campos_512_v4
+41/218189/campos_512_v4
+41/218206/campos_512_v4
+41/218208/campos_512_v4
+41/218209/campos_512_v4
+41/218214/campos_512_v4
+41/218217/campos_512_v4
+41/218227/campos_512_v4
+41/218228/campos_512_v4
+41/218232/campos_512_v4
+41/218248/campos_512_v4
+41/218251/campos_512_v4
+41/218252/campos_512_v4
+41/218258/campos_512_v4
+41/218260/campos_512_v4
+41/218282/campos_512_v4
+41/218283/campos_512_v4
+41/218289/campos_512_v4
+41/218299/campos_512_v4
+41/218307/campos_512_v4
+41/218308/campos_512_v4
+41/218312/campos_512_v4
+41/218313/campos_512_v4
+41/218325/campos_512_v4
+41/218347/campos_512_v4
+41/218348/campos_512_v4
+41/218358/campos_512_v4
+41/218360/campos_512_v4
+41/218363/campos_512_v4
+41/218377/campos_512_v4
+41/218414/campos_512_v4
+41/218424/campos_512_v4
+41/218433/campos_512_v4
+41/218446/campos_512_v4
+41/218447/campos_512_v4
+41/218448/campos_512_v4
+41/218460/campos_512_v4
+41/218477/campos_512_v4
+41/218480/campos_512_v4
+41/218486/campos_512_v4
+41/218500/campos_512_v4
+41/218501/campos_512_v4
+41/218502/campos_512_v4
+41/218506/campos_512_v4
+41/218513/campos_512_v4
+41/218516/campos_512_v4
+41/218531/campos_512_v4
+41/218534/campos_512_v4
+41/218539/campos_512_v4
+41/218549/campos_512_v4
+41/218552/campos_512_v4
+41/218554/campos_512_v4
+41/218560/campos_512_v4
+41/218562/campos_512_v4
+41/218578/campos_512_v4
+41/218579/campos_512_v4
+41/218583/campos_512_v4
+41/218604/campos_512_v4
+41/218612/campos_512_v4
+41/218615/campos_512_v4
+41/218622/campos_512_v4
+41/218637/campos_512_v4
+41/218639/campos_512_v4
+41/218648/campos_512_v4
+41/218664/campos_512_v4
+41/218680/campos_512_v4
+41/218686/campos_512_v4
+41/218688/campos_512_v4
+41/218692/campos_512_v4
+41/218700/campos_512_v4
+41/218707/campos_512_v4
+41/218710/campos_512_v4
+41/218711/campos_512_v4
+41/218716/campos_512_v4
+41/218720/campos_512_v4
+41/218741/campos_512_v4
+41/218744/campos_512_v4
+41/218766/campos_512_v4
+41/218773/campos_512_v4
+41/218784/campos_512_v4
+41/218792/campos_512_v4
+41/218796/campos_512_v4
+41/218803/campos_512_v4
+41/218811/campos_512_v4
+41/218843/campos_512_v4
+41/218852/campos_512_v4
+41/218861/campos_512_v4
+41/218884/campos_512_v4
+41/218885/campos_512_v4
+41/218888/campos_512_v4
+41/218892/campos_512_v4
+41/218909/campos_512_v4
+41/218916/campos_512_v4
+41/218927/campos_512_v4
+41/218928/campos_512_v4
+41/218929/campos_512_v4
+41/218931/campos_512_v4
+41/218942/campos_512_v4
+41/218951/campos_512_v4
+41/218964/campos_512_v4
+41/218975/campos_512_v4
+41/218985/campos_512_v4
+41/218994/campos_512_v4
+41/219016/campos_512_v4
+41/219031/campos_512_v4
+41/219048/campos_512_v4
+41/219060/campos_512_v4
+41/219061/campos_512_v4
+41/219063/campos_512_v4
+41/219073/campos_512_v4
+41/219079/campos_512_v4
+41/219080/campos_512_v4
+41/219084/campos_512_v4
+41/219094/campos_512_v4
+41/219103/campos_512_v4
+41/219106/campos_512_v4
+41/219107/campos_512_v4
+41/219110/campos_512_v4
+41/219117/campos_512_v4
+41/219119/campos_512_v4
+41/219130/campos_512_v4
+41/219141/campos_512_v4
+41/219146/campos_512_v4
+41/219148/campos_512_v4
+41/219149/campos_512_v4
+41/219150/campos_512_v4
+41/219159/campos_512_v4
+41/219160/campos_512_v4
+41/219164/campos_512_v4
+41/219165/campos_512_v4
+41/219170/campos_512_v4
+41/219174/campos_512_v4
+41/219180/campos_512_v4
+41/219182/campos_512_v4
+41/219190/campos_512_v4
+41/219191/campos_512_v4
+41/219205/campos_512_v4
+41/219221/campos_512_v4
+41/219229/campos_512_v4
+41/219237/campos_512_v4
+41/219247/campos_512_v4
+41/219249/campos_512_v4
+41/219251/campos_512_v4
+41/219263/campos_512_v4
+41/219268/campos_512_v4
+41/219282/campos_512_v4
+41/219294/campos_512_v4
+41/219303/campos_512_v4
+41/219307/campos_512_v4
+41/219311/campos_512_v4
+41/219321/campos_512_v4
+41/219341/campos_512_v4
+41/219344/campos_512_v4
+41/219349/campos_512_v4
+41/219350/campos_512_v4
+41/219352/campos_512_v4
+41/219384/campos_512_v4
+41/219398/campos_512_v4
+41/219399/campos_512_v4
+41/219417/campos_512_v4
+41/219421/campos_512_v4
+41/219432/campos_512_v4
+41/219443/campos_512_v4
+41/219450/campos_512_v4
+41/219469/campos_512_v4
+41/219473/campos_512_v4
+41/219483/campos_512_v4
+41/219485/campos_512_v4
+41/219492/campos_512_v4
+41/219494/campos_512_v4
+41/219496/campos_512_v4
+41/219501/campos_512_v4
+41/219505/campos_512_v4
+41/219508/campos_512_v4
+41/219509/campos_512_v4
+41/219511/campos_512_v4
+41/219514/campos_512_v4
+41/219515/campos_512_v4
+41/219530/campos_512_v4
+41/219572/campos_512_v4
+41/219577/campos_512_v4
+41/219610/campos_512_v4
+41/219611/campos_512_v4
+41/219616/campos_512_v4
+41/219621/campos_512_v4
+41/219630/campos_512_v4
+41/219661/campos_512_v4
+41/219671/campos_512_v4
+41/219679/campos_512_v4
+41/219702/campos_512_v4
+41/219710/campos_512_v4
+41/219721/campos_512_v4
+41/219724/campos_512_v4
+41/219726/campos_512_v4
+41/219727/campos_512_v4
+41/219729/campos_512_v4
+41/219732/campos_512_v4
+41/219741/campos_512_v4
+41/219742/campos_512_v4
+41/219743/campos_512_v4
+41/219757/campos_512_v4
+41/219761/campos_512_v4
+41/219768/campos_512_v4
+41/219780/campos_512_v4
+41/219792/campos_512_v4
+41/219799/campos_512_v4
+41/219810/campos_512_v4
+41/219813/campos_512_v4
+41/219815/campos_512_v4
+41/219816/campos_512_v4
+41/219817/campos_512_v4
+41/219822/campos_512_v4
+41/219827/campos_512_v4
+41/219829/campos_512_v4
+41/219830/campos_512_v4
+41/219856/campos_512_v4
+41/219861/campos_512_v4
+41/219862/campos_512_v4
+41/219940/campos_512_v4
+41/219952/campos_512_v4
+41/219973/campos_512_v4
+41/219982/campos_512_v4
+41/219990/campos_512_v4
+41/219992/campos_512_v4
+41/220001/campos_512_v4
+42/220003/campos_512_v4
+42/220010/campos_512_v4
+42/220013/campos_512_v4
+42/220024/campos_512_v4
+42/220043/campos_512_v4
+42/220047/campos_512_v4
+42/220079/campos_512_v4
+42/220080/campos_512_v4
+42/220084/campos_512_v4
+42/220102/campos_512_v4
+42/220109/campos_512_v4
+42/220111/campos_512_v4
+42/220114/campos_512_v4
+42/220124/campos_512_v4
+42/220137/campos_512_v4
+42/220145/campos_512_v4
+42/220155/campos_512_v4
+42/220165/campos_512_v4
+42/220166/campos_512_v4
+42/220169/campos_512_v4
+42/220174/campos_512_v4
+42/220182/campos_512_v4
+42/220190/campos_512_v4
+42/220192/campos_512_v4
+42/220193/campos_512_v4
+42/220198/campos_512_v4
+42/220205/campos_512_v4
+42/220222/campos_512_v4
+42/220223/campos_512_v4
+42/220225/campos_512_v4
+42/220226/campos_512_v4
+42/220227/campos_512_v4
+42/220228/campos_512_v4
+42/220231/campos_512_v4
+42/220233/campos_512_v4
+42/220243/campos_512_v4
+42/220246/campos_512_v4
+42/220253/campos_512_v4
+42/220264/campos_512_v4
+42/220277/campos_512_v4
+42/220289/campos_512_v4
+42/220293/campos_512_v4
+42/220299/campos_512_v4
+42/220309/campos_512_v4
+42/220313/campos_512_v4
+42/220320/campos_512_v4
+42/220322/campos_512_v4
+42/220347/campos_512_v4
+42/220348/campos_512_v4
+42/220354/campos_512_v4
+42/220373/campos_512_v4
+42/220380/campos_512_v4
+42/220397/campos_512_v4
+42/220405/campos_512_v4
+42/220408/campos_512_v4
+42/220417/campos_512_v4
+42/220419/campos_512_v4
+42/220436/campos_512_v4
+42/220438/campos_512_v4
+42/220442/campos_512_v4
+42/220446/campos_512_v4
+42/220451/campos_512_v4
+42/220453/campos_512_v4
+42/220464/campos_512_v4
+42/220467/campos_512_v4
+42/220481/campos_512_v4
+42/220488/campos_512_v4
+42/220491/campos_512_v4
+42/220502/campos_512_v4
+42/220503/campos_512_v4
+42/220527/campos_512_v4
+42/220538/campos_512_v4
+42/220542/campos_512_v4
+42/220546/campos_512_v4
+42/220554/campos_512_v4
+42/220563/campos_512_v4
+42/220565/campos_512_v4
+42/220581/campos_512_v4
+42/220592/campos_512_v4
+42/220603/campos_512_v4
+42/220608/campos_512_v4
+42/220613/campos_512_v4
+42/220619/campos_512_v4
+42/220621/campos_512_v4
+42/220626/campos_512_v4
+42/220637/campos_512_v4
+42/220639/campos_512_v4
+42/220658/campos_512_v4
+42/220661/campos_512_v4
+42/220664/campos_512_v4
+42/220674/campos_512_v4
+42/220685/campos_512_v4
+42/220696/campos_512_v4
+42/220705/campos_512_v4
+42/220708/campos_512_v4
+42/220713/campos_512_v4
+42/220728/campos_512_v4
+42/220733/campos_512_v4
+42/220751/campos_512_v4
+42/220757/campos_512_v4
+42/220759/campos_512_v4
+42/220781/campos_512_v4
+42/220795/campos_512_v4
+42/220800/campos_512_v4
+42/220806/campos_512_v4
+42/220807/campos_512_v4
+42/220809/campos_512_v4
+42/220811/campos_512_v4
+42/220826/campos_512_v4
+42/220836/campos_512_v4
+42/220848/campos_512_v4
+42/220858/campos_512_v4
+42/220859/campos_512_v4
+42/220861/campos_512_v4
+42/220869/campos_512_v4
+42/220874/campos_512_v4
+42/220883/campos_512_v4
+42/220884/campos_512_v4
+42/220886/campos_512_v4
+42/220887/campos_512_v4
+42/220890/campos_512_v4
+42/220902/campos_512_v4
+42/220907/campos_512_v4
+42/220919/campos_512_v4
+42/220921/campos_512_v4
+42/220928/campos_512_v4
+42/220931/campos_512_v4
+42/220940/campos_512_v4
+42/220942/campos_512_v4
+42/220946/campos_512_v4
+42/220948/campos_512_v4
+42/220951/campos_512_v4
+42/220973/campos_512_v4
+42/220985/campos_512_v4
+42/220987/campos_512_v4
+42/220988/campos_512_v4
+42/220990/campos_512_v4
+42/220994/campos_512_v4
+42/221015/campos_512_v4
+42/221017/campos_512_v4
+42/221027/campos_512_v4
+42/221030/campos_512_v4
+42/221032/campos_512_v4
+42/221041/campos_512_v4
+42/221045/campos_512_v4
+42/221049/campos_512_v4
+42/221054/campos_512_v4
+42/221066/campos_512_v4
+42/221079/campos_512_v4
+42/221083/campos_512_v4
+42/221086/campos_512_v4
+42/221093/campos_512_v4
+42/221108/campos_512_v4
+42/221109/campos_512_v4
+42/221117/campos_512_v4
+42/221122/campos_512_v4
+42/221125/campos_512_v4
+42/221127/campos_512_v4
+42/221132/campos_512_v4
+42/221146/campos_512_v4
+42/221156/campos_512_v4
+42/221157/campos_512_v4
+42/221174/campos_512_v4
+42/221183/campos_512_v4
+42/221185/campos_512_v4
+42/221190/campos_512_v4
+42/221192/campos_512_v4
+42/221196/campos_512_v4
+42/221207/campos_512_v4
+42/221209/campos_512_v4
+42/221223/campos_512_v4
+42/221227/campos_512_v4
+42/221236/campos_512_v4
+42/221259/campos_512_v4
+42/221260/campos_512_v4
+42/221263/campos_512_v4
+42/221275/campos_512_v4
+42/221281/campos_512_v4
+42/221287/campos_512_v4
+42/221305/campos_512_v4
+42/221313/campos_512_v4
+42/221317/campos_512_v4
+42/221326/campos_512_v4
+42/221334/campos_512_v4
+42/221343/campos_512_v4
+42/221353/campos_512_v4
+42/221358/campos_512_v4
+42/221371/campos_512_v4
+42/221391/campos_512_v4
+42/221394/campos_512_v4
+42/221403/campos_512_v4
+42/221425/campos_512_v4
+42/221429/campos_512_v4
+42/221433/campos_512_v4
+42/221438/campos_512_v4
+42/221439/campos_512_v4
+42/221442/campos_512_v4
+42/221446/campos_512_v4
+42/221450/campos_512_v4
+42/221460/campos_512_v4
+42/221463/campos_512_v4
+42/221468/campos_512_v4
+42/221485/campos_512_v4
+42/221514/campos_512_v4
+42/221518/campos_512_v4
+42/221520/campos_512_v4
+42/221541/campos_512_v4
+42/221542/campos_512_v4
+42/221567/campos_512_v4
+42/221590/campos_512_v4
+42/221591/campos_512_v4
+42/221597/campos_512_v4
+42/221624/campos_512_v4
+42/221635/campos_512_v4
+42/221639/campos_512_v4
+42/221644/campos_512_v4
+42/221648/campos_512_v4
+42/221653/campos_512_v4
+42/221657/campos_512_v4
+42/221662/campos_512_v4
+42/221666/campos_512_v4
+42/221671/campos_512_v4
+42/221672/campos_512_v4
+42/221679/campos_512_v4
+42/221684/campos_512_v4
+42/221687/campos_512_v4
+42/221692/campos_512_v4
+42/221699/campos_512_v4
+42/221703/campos_512_v4
+42/221710/campos_512_v4
+42/221718/campos_512_v4
+42/221722/campos_512_v4
+42/221728/campos_512_v4
+42/221754/campos_512_v4
+42/221756/campos_512_v4
+42/221777/campos_512_v4
+42/221780/campos_512_v4
+42/221783/campos_512_v4
+42/221802/campos_512_v4
+42/221805/campos_512_v4
+42/221848/campos_512_v4
+42/221863/campos_512_v4
+42/221870/campos_512_v4
+42/221890/campos_512_v4
+42/221905/campos_512_v4
+42/221906/campos_512_v4
+42/221918/campos_512_v4
+42/221927/campos_512_v4
+42/221949/campos_512_v4
+42/221951/campos_512_v4
+42/221956/campos_512_v4
+42/221963/campos_512_v4
+42/221967/campos_512_v4
+42/221984/campos_512_v4
+42/221985/campos_512_v4
+42/221987/campos_512_v4
+42/221994/campos_512_v4
+42/222014/campos_512_v4
+42/222022/campos_512_v4
+42/222027/campos_512_v4
+42/222028/campos_512_v4
+42/222050/campos_512_v4
+42/222055/campos_512_v4
+42/222074/campos_512_v4
+42/222075/campos_512_v4
+42/222103/campos_512_v4
+42/222107/campos_512_v4
+42/222119/campos_512_v4
+42/222125/campos_512_v4
+42/222140/campos_512_v4
+42/222145/campos_512_v4
+42/222147/campos_512_v4
+42/222179/campos_512_v4
+42/222182/campos_512_v4
+42/222187/campos_512_v4
+42/222193/campos_512_v4
+42/222206/campos_512_v4
+42/222211/campos_512_v4
+42/222218/campos_512_v4
+42/222223/campos_512_v4
+42/222237/campos_512_v4
+42/222238/campos_512_v4
+42/222244/campos_512_v4
+42/222247/campos_512_v4
+42/222252/campos_512_v4
+42/222256/campos_512_v4
+42/222257/campos_512_v4
+42/222276/campos_512_v4
+42/222279/campos_512_v4
+42/222294/campos_512_v4
+42/222317/campos_512_v4
+42/222330/campos_512_v4
+42/222331/campos_512_v4
+42/222341/campos_512_v4
+42/222342/campos_512_v4
+42/222350/campos_512_v4
+42/222351/campos_512_v4
+42/222352/campos_512_v4
+42/222354/campos_512_v4
+42/222361/campos_512_v4
+42/222362/campos_512_v4
+42/222366/campos_512_v4
+42/222368/campos_512_v4
+42/222377/campos_512_v4
+42/222382/campos_512_v4
+42/222384/campos_512_v4
+42/222387/campos_512_v4
+42/222388/campos_512_v4
+42/222389/campos_512_v4
+42/222398/campos_512_v4
+42/222401/campos_512_v4
+42/222403/campos_512_v4
+42/222412/campos_512_v4
+42/222423/campos_512_v4
+42/222425/campos_512_v4
+42/222426/campos_512_v4
+42/222444/campos_512_v4
+42/222454/campos_512_v4
+42/222459/campos_512_v4
+42/222467/campos_512_v4
+42/222469/campos_512_v4
+42/222477/campos_512_v4
+42/222480/campos_512_v4
+42/222493/campos_512_v4
+42/222504/campos_512_v4
+42/222520/campos_512_v4
+42/222525/campos_512_v4
+42/222527/campos_512_v4
+42/222528/campos_512_v4
+42/222531/campos_512_v4
+42/222579/campos_512_v4
+42/222589/campos_512_v4
+42/222592/campos_512_v4
+42/222593/campos_512_v4
+42/222595/campos_512_v4
+42/222606/campos_512_v4
+42/222608/campos_512_v4
+42/222624/campos_512_v4
+42/222627/campos_512_v4
+42/222640/campos_512_v4
+42/222653/campos_512_v4
+42/222663/campos_512_v4
+42/222672/campos_512_v4
+42/222673/campos_512_v4
+42/222675/campos_512_v4
+42/222677/campos_512_v4
+42/222686/campos_512_v4
+42/222691/campos_512_v4
+42/222692/campos_512_v4
+42/222699/campos_512_v4
+42/222706/campos_512_v4
+42/222711/campos_512_v4
+42/222713/campos_512_v4
+42/222715/campos_512_v4
+42/222717/campos_512_v4
+42/222752/campos_512_v4
+42/222757/campos_512_v4
+42/222768/campos_512_v4
+42/222774/campos_512_v4
+42/222787/campos_512_v4
+42/222790/campos_512_v4
+42/222792/campos_512_v4
+42/222798/campos_512_v4
+42/222799/campos_512_v4
+42/222814/campos_512_v4
+42/222837/campos_512_v4
+42/222853/campos_512_v4
+42/222854/campos_512_v4
+42/222855/campos_512_v4
+42/222869/campos_512_v4
+42/222878/campos_512_v4
+42/222910/campos_512_v4
+42/222933/campos_512_v4
+42/222935/campos_512_v4
+42/222940/campos_512_v4
+42/222949/campos_512_v4
+42/222962/campos_512_v4
+42/222963/campos_512_v4
+42/222967/campos_512_v4
+42/222968/campos_512_v4
+42/222980/campos_512_v4
+42/222983/campos_512_v4
+42/222992/campos_512_v4
+42/223003/campos_512_v4
+42/223020/campos_512_v4
+42/223039/campos_512_v4
+42/223041/campos_512_v4
+42/223050/campos_512_v4
+42/223052/campos_512_v4
+42/223094/campos_512_v4
+42/223097/campos_512_v4
+42/223099/campos_512_v4
+42/223103/campos_512_v4
+42/223116/campos_512_v4
+42/223126/campos_512_v4
+42/223127/campos_512_v4
+42/223157/campos_512_v4
+42/223169/campos_512_v4
+42/223172/campos_512_v4
+42/223180/campos_512_v4
+42/223181/campos_512_v4
+42/223184/campos_512_v4
+42/223187/campos_512_v4
+42/223189/campos_512_v4
+42/223195/campos_512_v4
+42/223201/campos_512_v4
+42/223204/campos_512_v4
+42/223208/campos_512_v4
+42/223212/campos_512_v4
+42/223214/campos_512_v4
+42/223226/campos_512_v4
+42/223228/campos_512_v4
+42/223230/campos_512_v4
+42/223246/campos_512_v4
+42/223264/campos_512_v4
+42/223271/campos_512_v4
+42/223277/campos_512_v4
+42/223289/campos_512_v4
+42/223290/campos_512_v4
+42/223294/campos_512_v4
+42/223298/campos_512_v4
+42/223304/campos_512_v4
+42/223306/campos_512_v4
+42/223320/campos_512_v4
+42/223353/campos_512_v4
+42/223358/campos_512_v4
+42/223377/campos_512_v4
+42/223394/campos_512_v4
+42/223402/campos_512_v4
+42/223405/campos_512_v4
+42/223411/campos_512_v4
+42/223422/campos_512_v4
+42/223423/campos_512_v4
+42/223463/campos_512_v4
+42/223471/campos_512_v4
+42/223490/campos_512_v4
+42/223491/campos_512_v4
+42/223503/campos_512_v4
+42/223504/campos_512_v4
+42/223505/campos_512_v4
+42/223517/campos_512_v4
+42/223535/campos_512_v4
+42/223543/campos_512_v4
+42/223574/campos_512_v4
+42/223578/campos_512_v4
+42/223591/campos_512_v4
+42/223597/campos_512_v4
+42/223605/campos_512_v4
+42/223607/campos_512_v4
+42/223616/campos_512_v4
+42/223625/campos_512_v4
+42/223633/campos_512_v4
+42/223634/campos_512_v4
+42/223636/campos_512_v4
+42/223639/campos_512_v4
+42/223652/campos_512_v4
+42/223660/campos_512_v4
+42/223662/campos_512_v4
+42/223665/campos_512_v4
+42/223668/campos_512_v4
+42/223674/campos_512_v4
+42/223679/campos_512_v4
+42/223683/campos_512_v4
+42/223690/campos_512_v4
+42/223697/campos_512_v4
+42/223704/campos_512_v4
+42/223718/campos_512_v4
+42/223722/campos_512_v4
+42/223725/campos_512_v4
+42/223728/campos_512_v4
+42/223737/campos_512_v4
+42/223748/campos_512_v4
+42/223759/campos_512_v4
+42/223765/campos_512_v4
+42/223767/campos_512_v4
+42/223769/campos_512_v4
+42/223781/campos_512_v4
+42/223804/campos_512_v4
+42/223808/campos_512_v4
+42/223816/campos_512_v4
+42/223827/campos_512_v4
+42/223834/campos_512_v4
+42/223841/campos_512_v4
+42/223844/campos_512_v4
+42/223852/campos_512_v4
+42/223855/campos_512_v4
+42/223879/campos_512_v4
+42/223887/campos_512_v4
+42/223904/campos_512_v4
+42/223915/campos_512_v4
+42/223917/campos_512_v4
+42/223934/campos_512_v4
+42/223944/campos_512_v4
+42/223945/campos_512_v4
+42/223952/campos_512_v4
+42/223967/campos_512_v4
+42/223983/campos_512_v4
+42/223986/campos_512_v4
+42/223998/campos_512_v4
+42/224004/campos_512_v4
+42/224008/campos_512_v4
+42/224042/campos_512_v4
+42/224044/campos_512_v4
+42/224049/campos_512_v4
+42/224058/campos_512_v4
+42/224071/campos_512_v4
+42/224080/campos_512_v4
+42/224083/campos_512_v4
+42/224086/campos_512_v4
+42/224108/campos_512_v4
+42/224117/campos_512_v4
+42/224125/campos_512_v4
+42/224126/campos_512_v4
+42/224130/campos_512_v4
+42/224150/campos_512_v4
+42/224155/campos_512_v4
+42/224183/campos_512_v4
+42/224187/campos_512_v4
+42/224198/campos_512_v4
+42/224219/campos_512_v4
+42/224234/campos_512_v4
+42/224239/campos_512_v4
+42/224248/campos_512_v4
+42/224259/campos_512_v4
+42/224264/campos_512_v4
+42/224266/campos_512_v4
+42/224274/campos_512_v4
+42/224277/campos_512_v4
+42/224305/campos_512_v4
+42/224306/campos_512_v4
+42/224320/campos_512_v4
+42/224321/campos_512_v4
+42/224322/campos_512_v4
+42/224330/campos_512_v4
+42/224338/campos_512_v4
+42/224351/campos_512_v4
+42/224353/campos_512_v4
+42/224363/campos_512_v4
+42/224391/campos_512_v4
+42/224394/campos_512_v4
+42/224397/campos_512_v4
+42/224414/campos_512_v4
+42/224435/campos_512_v4
+42/224437/campos_512_v4
+42/224447/campos_512_v4
+42/224454/campos_512_v4
+42/224460/campos_512_v4
+42/224477/campos_512_v4
+42/224478/campos_512_v4
+42/224486/campos_512_v4
+42/224490/campos_512_v4
+42/224495/campos_512_v4
+42/224509/campos_512_v4
+42/224510/campos_512_v4
+42/224511/campos_512_v4
+42/224514/campos_512_v4
+42/224522/campos_512_v4
+42/224523/campos_512_v4
+42/224531/campos_512_v4
+42/224537/campos_512_v4
+42/224544/campos_512_v4
+42/224546/campos_512_v4
+42/224556/campos_512_v4
+42/224566/campos_512_v4
+42/224568/campos_512_v4
+42/224572/campos_512_v4
+42/224587/campos_512_v4
+42/224596/campos_512_v4
+42/224600/campos_512_v4
+42/224601/campos_512_v4
+42/224602/campos_512_v4
+42/224611/campos_512_v4
+42/224614/campos_512_v4
+42/224622/campos_512_v4
+42/224624/campos_512_v4
+42/224628/campos_512_v4
+42/224638/campos_512_v4
+42/224646/campos_512_v4
+42/224647/campos_512_v4
+42/224666/campos_512_v4
+42/224669/campos_512_v4
+42/224670/campos_512_v4
+42/224678/campos_512_v4
+42/224688/campos_512_v4
+42/224711/campos_512_v4
+42/224720/campos_512_v4
+42/224733/campos_512_v4
+42/224737/campos_512_v4
+42/224743/campos_512_v4
+42/224749/campos_512_v4
+42/224752/campos_512_v4
+42/224763/campos_512_v4
+42/224764/campos_512_v4
+42/224774/campos_512_v4
+42/224785/campos_512_v4
+42/224792/campos_512_v4
+42/224802/campos_512_v4
+42/224832/campos_512_v4
+42/224836/campos_512_v4
+42/224848/campos_512_v4
+42/224853/campos_512_v4
+42/224863/campos_512_v4
+42/224885/campos_512_v4
+42/224905/campos_512_v4
+42/224916/campos_512_v4
+42/224932/campos_512_v4
+42/224945/campos_512_v4
+42/224968/campos_512_v4
+42/224986/campos_512_v4
+42/224995/campos_512_v4
+42/224996/campos_512_v4
+42/224997/campos_512_v4
+43/225004/campos_512_v4
+43/225007/campos_512_v4
+43/225010/campos_512_v4
+43/225025/campos_512_v4
+43/225028/campos_512_v4
+43/225033/campos_512_v4
+43/225038/campos_512_v4
+43/225042/campos_512_v4
+43/225048/campos_512_v4
+43/225052/campos_512_v4
+43/225056/campos_512_v4
+43/225058/campos_512_v4
+43/225082/campos_512_v4
+43/225094/campos_512_v4
+43/225109/campos_512_v4
+43/225110/campos_512_v4
+43/225122/campos_512_v4
+43/225126/campos_512_v4
+43/225131/campos_512_v4
+43/225136/campos_512_v4
+43/225157/campos_512_v4
+43/225168/campos_512_v4
+43/225180/campos_512_v4
+43/225185/campos_512_v4
+43/225191/campos_512_v4
+43/225220/campos_512_v4
+43/225226/campos_512_v4
+43/225228/campos_512_v4
+43/225241/campos_512_v4
+43/225248/campos_512_v4
+43/225253/campos_512_v4
+43/225254/campos_512_v4
+43/225263/campos_512_v4
+43/225265/campos_512_v4
+43/225268/campos_512_v4
+43/225277/campos_512_v4
+43/225282/campos_512_v4
+43/225283/campos_512_v4
+43/225284/campos_512_v4
+43/225286/campos_512_v4
+43/225291/campos_512_v4
+43/225302/campos_512_v4
+43/225320/campos_512_v4
+43/225339/campos_512_v4
+43/225372/campos_512_v4
+43/225376/campos_512_v4
+43/225379/campos_512_v4
+43/225390/campos_512_v4
+43/225428/campos_512_v4
+43/225431/campos_512_v4
+43/225440/campos_512_v4
+43/225445/campos_512_v4
+43/225448/campos_512_v4
+43/225449/campos_512_v4
+43/225458/campos_512_v4
+43/225465/campos_512_v4
+43/225466/campos_512_v4
+43/225475/campos_512_v4
+43/225476/campos_512_v4
+43/225492/campos_512_v4
+43/225496/campos_512_v4
+43/225503/campos_512_v4
+43/225533/campos_512_v4
+43/225535/campos_512_v4
+43/225539/campos_512_v4
+43/225544/campos_512_v4
+43/225555/campos_512_v4
+43/225559/campos_512_v4
+43/225562/campos_512_v4
+43/225563/campos_512_v4
+43/225564/campos_512_v4
+43/225570/campos_512_v4
+43/225578/campos_512_v4
+43/225584/campos_512_v4
+43/225589/campos_512_v4
+43/225595/campos_512_v4
+43/225603/campos_512_v4
+43/225610/campos_512_v4
+43/225622/campos_512_v4
+43/225644/campos_512_v4
+43/225645/campos_512_v4
+43/225647/campos_512_v4
+43/225649/campos_512_v4
+43/225664/campos_512_v4
+43/225672/campos_512_v4
+43/225677/campos_512_v4
+43/225715/campos_512_v4
+43/225719/campos_512_v4
+43/225722/campos_512_v4
+43/225724/campos_512_v4
+43/225752/campos_512_v4
+43/225763/campos_512_v4
+43/225775/campos_512_v4
+43/225788/campos_512_v4
+43/225789/campos_512_v4
+43/225799/campos_512_v4
+43/225800/campos_512_v4
+43/225801/campos_512_v4
+43/225805/campos_512_v4
+43/225811/campos_512_v4
+43/225814/campos_512_v4
+43/225816/campos_512_v4
+43/225817/campos_512_v4
+43/225830/campos_512_v4
+43/225845/campos_512_v4
+43/225851/campos_512_v4
+43/225856/campos_512_v4
+43/225857/campos_512_v4
+43/225860/campos_512_v4
+43/225871/campos_512_v4
+43/225876/campos_512_v4
+43/225889/campos_512_v4
+43/225902/campos_512_v4
+43/225912/campos_512_v4
+43/225916/campos_512_v4
+43/225917/campos_512_v4
+43/225922/campos_512_v4
+43/225931/campos_512_v4
+43/225934/campos_512_v4
+43/225935/campos_512_v4
+43/225937/campos_512_v4
+43/225948/campos_512_v4
+43/225974/campos_512_v4
+43/225975/campos_512_v4
+43/225980/campos_512_v4
+43/225981/campos_512_v4
+43/225986/campos_512_v4
+43/225988/campos_512_v4
+43/225989/campos_512_v4
+43/225996/campos_512_v4
+43/225999/campos_512_v4
+43/226004/campos_512_v4
+43/226012/campos_512_v4
+43/226032/campos_512_v4
+43/226052/campos_512_v4
+43/226082/campos_512_v4
+43/226083/campos_512_v4
+43/226096/campos_512_v4
+43/226102/campos_512_v4
+43/226109/campos_512_v4
+43/226110/campos_512_v4
+43/226112/campos_512_v4
+43/226115/campos_512_v4
+43/226128/campos_512_v4
+43/226133/campos_512_v4
+43/226139/campos_512_v4
+43/226140/campos_512_v4
+43/226175/campos_512_v4
+43/226181/campos_512_v4
+43/226182/campos_512_v4
+43/226184/campos_512_v4
+43/226194/campos_512_v4
+43/226198/campos_512_v4
+43/226202/campos_512_v4
+43/226206/campos_512_v4
+43/226223/campos_512_v4
+43/226228/campos_512_v4
+43/226230/campos_512_v4
+43/226244/campos_512_v4
+43/226247/campos_512_v4
+43/226252/campos_512_v4
+43/226255/campos_512_v4
+43/226257/campos_512_v4
+43/226261/campos_512_v4
+43/226264/campos_512_v4
+43/226278/campos_512_v4
+43/226283/campos_512_v4
+43/226284/campos_512_v4
+43/226290/campos_512_v4
+43/226303/campos_512_v4
+43/226304/campos_512_v4
+43/226320/campos_512_v4
+43/226326/campos_512_v4
+43/226331/campos_512_v4
+43/226333/campos_512_v4
+43/226335/campos_512_v4
+43/226340/campos_512_v4
+43/226341/campos_512_v4
+43/226349/campos_512_v4
+43/226350/campos_512_v4
+43/226354/campos_512_v4
+43/226385/campos_512_v4
+43/226406/campos_512_v4
+43/226407/campos_512_v4
+43/226411/campos_512_v4
+43/226413/campos_512_v4
+43/226425/campos_512_v4
+43/226444/campos_512_v4
+43/226448/campos_512_v4
+43/226450/campos_512_v4
+43/226453/campos_512_v4
+43/226456/campos_512_v4
+43/226458/campos_512_v4
+43/226459/campos_512_v4
+43/226460/campos_512_v4
+43/226473/campos_512_v4
+43/226482/campos_512_v4
+43/226499/campos_512_v4
+43/226501/campos_512_v4
+43/226509/campos_512_v4
+43/226523/campos_512_v4
+43/226532/campos_512_v4
+43/226569/campos_512_v4
+43/226583/campos_512_v4
+43/226610/campos_512_v4
+43/226613/campos_512_v4
+43/226615/campos_512_v4
+43/226618/campos_512_v4
+43/226623/campos_512_v4
+43/226629/campos_512_v4
+43/226634/campos_512_v4
+43/226639/campos_512_v4
+43/226662/campos_512_v4
+43/226664/campos_512_v4
+43/226682/campos_512_v4
+43/226686/campos_512_v4
+43/226689/campos_512_v4
+43/226698/campos_512_v4
+43/226707/campos_512_v4
+43/226735/campos_512_v4
+43/226738/campos_512_v4
+43/226742/campos_512_v4
+43/226743/campos_512_v4
+43/226746/campos_512_v4
+43/226749/campos_512_v4
+43/226751/campos_512_v4
+43/226754/campos_512_v4
+43/226755/campos_512_v4
+43/226774/campos_512_v4
+43/226775/campos_512_v4
+43/226776/campos_512_v4
+43/226788/campos_512_v4
+43/226794/campos_512_v4
+43/226817/campos_512_v4
+43/226820/campos_512_v4
+43/226826/campos_512_v4
+43/226834/campos_512_v4
+43/226852/campos_512_v4
+43/226876/campos_512_v4
+43/226878/campos_512_v4
+43/226884/campos_512_v4
+43/226886/campos_512_v4
+43/226892/campos_512_v4
+43/226898/campos_512_v4
+43/226906/campos_512_v4
+43/226915/campos_512_v4
+43/226920/campos_512_v4
+43/226925/campos_512_v4
+43/226926/campos_512_v4
+43/226928/campos_512_v4
+43/226934/campos_512_v4
+43/226943/campos_512_v4
+43/226953/campos_512_v4
+43/226956/campos_512_v4
+43/226960/campos_512_v4
+43/226966/campos_512_v4
+43/226972/campos_512_v4
+43/226980/campos_512_v4
+43/226986/campos_512_v4
+43/227007/campos_512_v4
+43/227010/campos_512_v4
+43/227020/campos_512_v4
+43/227024/campos_512_v4
+43/227033/campos_512_v4
+43/227044/campos_512_v4
+43/227046/campos_512_v4
+43/227067/campos_512_v4
+43/227085/campos_512_v4
+43/227086/campos_512_v4
+43/227118/campos_512_v4
+43/227130/campos_512_v4
+43/227141/campos_512_v4
+43/227142/campos_512_v4
+43/227160/campos_512_v4
+43/227163/campos_512_v4
+43/227169/campos_512_v4
+43/227179/campos_512_v4
+43/227183/campos_512_v4
+43/227194/campos_512_v4
+43/227199/campos_512_v4
+43/227224/campos_512_v4
+43/227228/campos_512_v4
+43/227233/campos_512_v4
+43/227246/campos_512_v4
+43/227250/campos_512_v4
+43/227254/campos_512_v4
+43/227257/campos_512_v4
+43/227266/campos_512_v4
+43/227280/campos_512_v4
+43/227308/campos_512_v4
+43/227309/campos_512_v4
+43/227311/campos_512_v4
+43/227317/campos_512_v4
+43/227326/campos_512_v4
+43/227328/campos_512_v4
+43/227339/campos_512_v4
+43/227342/campos_512_v4
+43/227343/campos_512_v4
+43/227358/campos_512_v4
+43/227365/campos_512_v4
+43/227372/campos_512_v4
+43/227376/campos_512_v4
+43/227377/campos_512_v4
+43/227380/campos_512_v4
+43/227383/campos_512_v4
+43/227389/campos_512_v4
+43/227398/campos_512_v4
+43/227436/campos_512_v4
+43/227442/campos_512_v4
+43/227447/campos_512_v4
+43/227453/campos_512_v4
+43/227459/campos_512_v4
+43/227460/campos_512_v4
+43/227464/campos_512_v4
+43/227487/campos_512_v4
+43/227496/campos_512_v4
+43/227506/campos_512_v4
+43/227509/campos_512_v4
+43/227524/campos_512_v4
+43/227525/campos_512_v4
+43/227528/campos_512_v4
+43/227536/campos_512_v4
+43/227540/campos_512_v4
+43/227551/campos_512_v4
+43/227570/campos_512_v4
+43/227574/campos_512_v4
+43/227587/campos_512_v4
+43/227601/campos_512_v4
+43/227614/campos_512_v4
+43/227618/campos_512_v4
+43/227621/campos_512_v4
+43/227631/campos_512_v4
+43/227636/campos_512_v4
+43/227640/campos_512_v4
+43/227650/campos_512_v4
+43/227653/campos_512_v4
+43/227659/campos_512_v4
+43/227662/campos_512_v4
+43/227665/campos_512_v4
+43/227675/campos_512_v4
+43/227684/campos_512_v4
+43/227692/campos_512_v4
+43/227702/campos_512_v4
+43/227708/campos_512_v4
+43/227711/campos_512_v4
+43/227712/campos_512_v4
+43/227713/campos_512_v4
+43/227720/campos_512_v4
+43/227729/campos_512_v4
+43/227731/campos_512_v4
+43/227736/campos_512_v4
+43/227742/campos_512_v4
+43/227779/campos_512_v4
+43/227782/campos_512_v4
+43/227789/campos_512_v4
+43/227790/campos_512_v4
+43/227793/campos_512_v4
+43/227803/campos_512_v4
+43/227816/campos_512_v4
+43/227834/campos_512_v4
+43/227835/campos_512_v4
+43/227837/campos_512_v4
+43/227850/campos_512_v4
+43/227858/campos_512_v4
+43/227876/campos_512_v4
+43/227892/campos_512_v4
+43/227918/campos_512_v4
+43/227929/campos_512_v4
+43/227931/campos_512_v4
+43/227937/campos_512_v4
+43/227938/campos_512_v4
+43/227944/campos_512_v4
+43/227965/campos_512_v4
+43/227968/campos_512_v4
+43/227971/campos_512_v4
+43/227973/campos_512_v4
+43/227976/campos_512_v4
+43/227979/campos_512_v4
+43/227980/campos_512_v4
+43/227981/campos_512_v4
+43/227989/campos_512_v4
+43/227995/campos_512_v4
+43/228012/campos_512_v4
+43/228013/campos_512_v4
+43/228015/campos_512_v4
+43/228019/campos_512_v4
+43/228028/campos_512_v4
+43/228035/campos_512_v4
+43/228039/campos_512_v4
+43/228041/campos_512_v4
+43/228043/campos_512_v4
+43/228052/campos_512_v4
+43/228055/campos_512_v4
+43/228062/campos_512_v4
+43/228068/campos_512_v4
+43/228069/campos_512_v4
+43/228071/campos_512_v4
+43/228083/campos_512_v4
+43/228084/campos_512_v4
+43/228089/campos_512_v4
+43/228101/campos_512_v4
+43/228104/campos_512_v4
+43/228106/campos_512_v4
+43/228117/campos_512_v4
+43/228129/campos_512_v4
+43/228139/campos_512_v4
+43/228143/campos_512_v4
+43/228147/campos_512_v4
+43/228189/campos_512_v4
+43/228191/campos_512_v4
+43/228209/campos_512_v4
+43/228217/campos_512_v4
+43/228223/campos_512_v4
+43/228228/campos_512_v4
+43/228229/campos_512_v4
+43/228230/campos_512_v4
+43/228240/campos_512_v4
+43/228243/campos_512_v4
+43/228253/campos_512_v4
+43/228291/campos_512_v4
+43/228296/campos_512_v4
+43/228304/campos_512_v4
+43/228309/campos_512_v4
+43/228310/campos_512_v4
+43/228311/campos_512_v4
+43/228317/campos_512_v4
+43/228328/campos_512_v4
+43/228331/campos_512_v4
+43/228356/campos_512_v4
+43/228359/campos_512_v4
+43/228394/campos_512_v4
+43/228399/campos_512_v4
+43/228407/campos_512_v4
+43/228420/campos_512_v4
+43/228421/campos_512_v4
+43/228428/campos_512_v4
+43/228431/campos_512_v4
+43/228435/campos_512_v4
+43/228441/campos_512_v4
+43/228448/campos_512_v4
+43/228458/campos_512_v4
+43/228464/campos_512_v4
+43/228466/campos_512_v4
+43/228472/campos_512_v4
+43/228474/campos_512_v4
+43/228492/campos_512_v4
+43/228493/campos_512_v4
+43/228496/campos_512_v4
+43/228505/campos_512_v4
+43/228513/campos_512_v4
+43/228536/campos_512_v4
+43/228544/campos_512_v4
+43/228555/campos_512_v4
+43/228566/campos_512_v4
+43/228567/campos_512_v4
+43/228572/campos_512_v4
+43/228573/campos_512_v4
+43/228575/campos_512_v4
+43/228578/campos_512_v4
+43/228579/campos_512_v4
+43/228586/campos_512_v4
+43/228609/campos_512_v4
+43/228610/campos_512_v4
+43/228611/campos_512_v4
+43/228629/campos_512_v4
+43/228630/campos_512_v4
+43/228638/campos_512_v4
+43/228640/campos_512_v4
+43/228645/campos_512_v4
+43/228658/campos_512_v4
+43/228660/campos_512_v4
+43/228667/campos_512_v4
+43/228679/campos_512_v4
+43/228698/campos_512_v4
+43/228705/campos_512_v4
+43/228711/campos_512_v4
+43/228712/campos_512_v4
+43/228719/campos_512_v4
+43/228720/campos_512_v4
+43/228728/campos_512_v4
+43/228729/campos_512_v4
+43/228730/campos_512_v4
+43/228752/campos_512_v4
+43/228762/campos_512_v4
+43/228765/campos_512_v4
+43/228766/campos_512_v4
+43/228775/campos_512_v4
+43/228780/campos_512_v4
+43/228781/campos_512_v4
+43/228786/campos_512_v4
+43/228793/campos_512_v4
+43/228794/campos_512_v4
+43/228802/campos_512_v4
+43/228806/campos_512_v4
+43/228815/campos_512_v4
+43/228816/campos_512_v4
+43/228818/campos_512_v4
+43/228821/campos_512_v4
+43/228822/campos_512_v4
+43/228825/campos_512_v4
+43/228838/campos_512_v4
+43/228840/campos_512_v4
+43/228841/campos_512_v4
+43/228844/campos_512_v4
+43/228850/campos_512_v4
+43/228855/campos_512_v4
+43/228857/campos_512_v4
+43/228867/campos_512_v4
+43/228871/campos_512_v4
+43/228875/campos_512_v4
+43/228878/campos_512_v4
+43/228883/campos_512_v4
+43/228899/campos_512_v4
+43/228917/campos_512_v4
+43/228919/campos_512_v4
+43/228924/campos_512_v4
+43/228934/campos_512_v4
+43/228941/campos_512_v4
+43/228952/campos_512_v4
+43/228967/campos_512_v4
+43/228972/campos_512_v4
+43/228988/campos_512_v4
+43/228990/campos_512_v4
+43/228999/campos_512_v4
+43/229006/campos_512_v4
+43/229043/campos_512_v4
+43/229047/campos_512_v4
+43/229056/campos_512_v4
+43/229058/campos_512_v4
+43/229062/campos_512_v4
+43/229065/campos_512_v4
+43/229066/campos_512_v4
+43/229081/campos_512_v4
+43/229082/campos_512_v4
+43/229117/campos_512_v4
+43/229129/campos_512_v4
+43/229140/campos_512_v4
+43/229144/campos_512_v4
+43/229155/campos_512_v4
+43/229156/campos_512_v4
+43/229165/campos_512_v4
+43/229176/campos_512_v4
+43/229182/campos_512_v4
+43/229209/campos_512_v4
+43/229210/campos_512_v4
+43/229211/campos_512_v4
+43/229228/campos_512_v4
+43/229229/campos_512_v4
+43/229234/campos_512_v4
+43/229238/campos_512_v4
+43/229253/campos_512_v4
+43/229263/campos_512_v4
+43/229282/campos_512_v4
+43/229285/campos_512_v4
+43/229302/campos_512_v4
+43/229310/campos_512_v4
+43/229316/campos_512_v4
+43/229329/campos_512_v4
+43/229331/campos_512_v4
+43/229342/campos_512_v4
+43/229343/campos_512_v4
+43/229344/campos_512_v4
+43/229345/campos_512_v4
+43/229347/campos_512_v4
+43/229360/campos_512_v4
+43/229361/campos_512_v4
+43/229371/campos_512_v4
+43/229376/campos_512_v4
+43/229377/campos_512_v4
+43/229380/campos_512_v4
+43/229381/campos_512_v4
+43/229398/campos_512_v4
+43/229405/campos_512_v4
+43/229429/campos_512_v4
+43/229430/campos_512_v4
+43/229434/campos_512_v4
+43/229447/campos_512_v4
+43/229452/campos_512_v4
+43/229455/campos_512_v4
+43/229458/campos_512_v4
+43/229465/campos_512_v4
+43/229472/campos_512_v4
+43/229477/campos_512_v4
+43/229478/campos_512_v4
+43/229481/campos_512_v4
+43/229512/campos_512_v4
+43/229517/campos_512_v4
+43/229522/campos_512_v4
+43/229535/campos_512_v4
+43/229554/campos_512_v4
+43/229555/campos_512_v4
+43/229563/campos_512_v4
+43/229570/campos_512_v4
+43/229575/campos_512_v4
+43/229584/campos_512_v4
+43/229585/campos_512_v4
+43/229586/campos_512_v4
+43/229587/campos_512_v4
+43/229606/campos_512_v4
+43/229633/campos_512_v4
+43/229635/campos_512_v4
+43/229640/campos_512_v4
+43/229645/campos_512_v4
+43/229651/campos_512_v4
+43/229685/campos_512_v4
+43/229688/campos_512_v4
+43/229699/campos_512_v4
+43/229701/campos_512_v4
+43/229740/campos_512_v4
+43/229749/campos_512_v4
+43/229761/campos_512_v4
+43/229763/campos_512_v4
+43/229764/campos_512_v4
+43/229775/campos_512_v4
+43/229780/campos_512_v4
+43/229791/campos_512_v4
+43/229828/campos_512_v4
+43/229834/campos_512_v4
+43/229835/campos_512_v4
+43/229838/campos_512_v4
+43/229842/campos_512_v4
+43/229855/campos_512_v4
+43/229859/campos_512_v4
+43/229866/campos_512_v4
+43/229893/campos_512_v4
+43/229913/campos_512_v4
+43/229936/campos_512_v4
+43/229948/campos_512_v4
+43/229964/campos_512_v4
+43/229965/campos_512_v4
+43/229966/campos_512_v4
+43/229984/campos_512_v4
+43/230001/campos_512_v4
+44/230003/campos_512_v4
+44/230029/campos_512_v4
+44/230037/campos_512_v4
+44/230043/campos_512_v4
+44/230050/campos_512_v4
+44/230053/campos_512_v4
+44/230058/campos_512_v4
+44/230060/campos_512_v4
+44/230063/campos_512_v4
+44/230068/campos_512_v4
+44/230087/campos_512_v4
+44/230089/campos_512_v4
+44/230094/campos_512_v4
+44/230098/campos_512_v4
+44/230099/campos_512_v4
+44/230103/campos_512_v4
+44/230106/campos_512_v4
+44/230108/campos_512_v4
+44/230109/campos_512_v4
+44/230113/campos_512_v4
+44/230122/campos_512_v4
+44/230125/campos_512_v4
+44/230143/campos_512_v4
+44/230158/campos_512_v4
+44/230163/campos_512_v4
+44/230167/campos_512_v4
+44/230171/campos_512_v4
+44/230172/campos_512_v4
+44/230183/campos_512_v4
+44/230190/campos_512_v4
+44/230199/campos_512_v4
+44/230201/campos_512_v4
+44/230204/campos_512_v4
+44/230205/campos_512_v4
+44/230209/campos_512_v4
+44/230211/campos_512_v4
+44/230226/campos_512_v4
+44/230227/campos_512_v4
+44/230244/campos_512_v4
+44/230245/campos_512_v4
+44/230246/campos_512_v4
+44/230295/campos_512_v4
+44/230301/campos_512_v4
+44/230341/campos_512_v4
+44/230349/campos_512_v4
+44/230365/campos_512_v4
+44/230370/campos_512_v4
+44/230374/campos_512_v4
+44/230381/campos_512_v4
+44/230411/campos_512_v4
+44/230412/campos_512_v4
+44/230418/campos_512_v4
+44/230421/campos_512_v4
+44/230437/campos_512_v4
+44/230465/campos_512_v4
+44/230466/campos_512_v4
+44/230477/campos_512_v4
+44/230484/campos_512_v4
+44/230493/campos_512_v4
+44/230518/campos_512_v4
+44/230521/campos_512_v4
+44/230523/campos_512_v4
+44/230535/campos_512_v4
+44/230536/campos_512_v4
+44/230537/campos_512_v4
+44/230551/campos_512_v4
+44/230560/campos_512_v4
+44/230564/campos_512_v4
+44/230570/campos_512_v4
+44/230586/campos_512_v4
+44/230606/campos_512_v4
+44/230612/campos_512_v4
+44/230616/campos_512_v4
+44/230630/campos_512_v4
+44/230634/campos_512_v4
+44/230650/campos_512_v4
+44/230655/campos_512_v4
+44/230667/campos_512_v4
+44/230690/campos_512_v4
+44/230704/campos_512_v4
+44/230706/campos_512_v4
+44/230708/campos_512_v4
+44/230717/campos_512_v4
+44/230719/campos_512_v4
+44/230720/campos_512_v4
+44/230733/campos_512_v4
+44/230739/campos_512_v4
+44/230740/campos_512_v4
+44/230742/campos_512_v4
+44/230753/campos_512_v4
+44/230757/campos_512_v4
+44/230758/campos_512_v4
+44/230759/campos_512_v4
+44/230762/campos_512_v4
+44/230778/campos_512_v4
+44/230788/campos_512_v4
+44/230800/campos_512_v4
+44/230807/campos_512_v4
+44/230817/campos_512_v4
+44/230820/campos_512_v4
+44/230823/campos_512_v4
+44/230825/campos_512_v4
+44/230831/campos_512_v4
+44/230834/campos_512_v4
+44/230836/campos_512_v4
+44/230838/campos_512_v4
+44/230842/campos_512_v4
+44/230850/campos_512_v4
+44/230853/campos_512_v4
+44/230859/campos_512_v4
+44/230872/campos_512_v4
+44/230884/campos_512_v4
+44/230885/campos_512_v4
+44/230888/campos_512_v4
+44/230889/campos_512_v4
+44/230897/campos_512_v4
+44/230902/campos_512_v4
+44/230905/campos_512_v4
+44/230907/campos_512_v4
+44/230908/campos_512_v4
+44/230911/campos_512_v4
+44/230912/campos_512_v4
+44/230913/campos_512_v4
+44/230921/campos_512_v4
+44/230924/campos_512_v4
+44/230928/campos_512_v4
+44/230953/campos_512_v4
+44/230983/campos_512_v4
+44/230988/campos_512_v4
+44/230992/campos_512_v4
+44/230996/campos_512_v4
+44/230999/campos_512_v4
+44/231000/campos_512_v4
+44/231008/campos_512_v4
+44/231014/campos_512_v4
+44/231017/campos_512_v4
+44/231018/campos_512_v4
+44/231034/campos_512_v4
+44/231051/campos_512_v4
+44/231077/campos_512_v4
+44/231091/campos_512_v4
+44/231097/campos_512_v4
+44/231101/campos_512_v4
+44/231107/campos_512_v4
+44/231113/campos_512_v4
+44/231140/campos_512_v4
+44/231146/campos_512_v4
+44/231149/campos_512_v4
+44/231153/campos_512_v4
+44/231155/campos_512_v4
+44/231164/campos_512_v4
+44/231168/campos_512_v4
+44/231186/campos_512_v4
+44/231190/campos_512_v4
+44/231191/campos_512_v4
+44/231201/campos_512_v4
+44/231204/campos_512_v4
+44/231225/campos_512_v4
+44/231230/campos_512_v4
+44/231235/campos_512_v4
+44/231238/campos_512_v4
+44/231242/campos_512_v4
+44/231243/campos_512_v4
+44/231253/campos_512_v4
+44/231261/campos_512_v4
+44/231262/campos_512_v4
+44/231276/campos_512_v4
+44/231292/campos_512_v4
+44/231299/campos_512_v4
+44/231332/campos_512_v4
+44/231336/campos_512_v4
+44/231338/campos_512_v4
+44/231346/campos_512_v4
+44/231364/campos_512_v4
+44/231386/campos_512_v4
+44/231397/campos_512_v4
+44/231399/campos_512_v4
+44/231401/campos_512_v4
+44/231413/campos_512_v4
+44/231419/campos_512_v4
+44/231424/campos_512_v4
+44/231426/campos_512_v4
+44/231430/campos_512_v4
+44/231438/campos_512_v4
+44/231443/campos_512_v4
+44/231446/campos_512_v4
+44/231453/campos_512_v4
+44/231456/campos_512_v4
+44/231473/campos_512_v4
+44/231494/campos_512_v4
+44/231498/campos_512_v4
+44/231504/campos_512_v4
+44/231505/campos_512_v4
+44/231514/campos_512_v4
+44/231539/campos_512_v4
+44/231541/campos_512_v4
+44/231542/campos_512_v4
+44/231543/campos_512_v4
+44/231549/campos_512_v4
+44/231550/campos_512_v4
+44/231553/campos_512_v4
+44/231554/campos_512_v4
+44/231558/campos_512_v4
+44/231561/campos_512_v4
+44/231579/campos_512_v4
+44/231586/campos_512_v4
+44/231595/campos_512_v4
+44/231597/campos_512_v4
+44/231620/campos_512_v4
+44/231635/campos_512_v4
+44/231636/campos_512_v4
+44/231639/campos_512_v4
+44/231650/campos_512_v4
+44/231657/campos_512_v4
+44/231661/campos_512_v4
+44/231662/campos_512_v4
+44/231678/campos_512_v4
+44/231680/campos_512_v4
+44/231701/campos_512_v4
+44/231719/campos_512_v4
+44/231722/campos_512_v4
+44/231727/campos_512_v4
+44/231728/campos_512_v4
+44/231736/campos_512_v4
+44/231766/campos_512_v4
+44/231770/campos_512_v4
+44/231777/campos_512_v4
+44/231784/campos_512_v4
+44/231801/campos_512_v4
+44/231802/campos_512_v4
+44/231803/campos_512_v4
+44/231812/campos_512_v4
+44/231816/campos_512_v4
+44/231819/campos_512_v4
+44/231821/campos_512_v4
+44/231833/campos_512_v4
+44/231845/campos_512_v4
+44/231852/campos_512_v4
+44/231860/campos_512_v4
+44/231864/campos_512_v4
+44/231873/campos_512_v4
+44/231876/campos_512_v4
+44/231879/campos_512_v4
+44/231880/campos_512_v4
+44/231892/campos_512_v4
+44/231896/campos_512_v4
+44/231901/campos_512_v4
+44/231913/campos_512_v4
+44/231917/campos_512_v4
+44/231925/campos_512_v4
+44/231940/campos_512_v4
+44/231942/campos_512_v4
+44/231943/campos_512_v4
+44/231965/campos_512_v4
+44/232006/campos_512_v4
+44/232012/campos_512_v4
+44/232016/campos_512_v4
+44/232023/campos_512_v4
+44/232029/campos_512_v4
+44/232037/campos_512_v4
+44/232043/campos_512_v4
+44/232050/campos_512_v4
+44/232051/campos_512_v4
+44/232052/campos_512_v4
+44/232057/campos_512_v4
+44/232088/campos_512_v4
+44/232093/campos_512_v4
+44/232108/campos_512_v4
+44/232127/campos_512_v4
+44/232137/campos_512_v4
+44/232139/campos_512_v4
+44/232140/campos_512_v4
+44/232141/campos_512_v4
+44/232143/campos_512_v4
+44/232144/campos_512_v4
+44/232158/campos_512_v4
+44/232159/campos_512_v4
+44/232160/campos_512_v4
+44/232181/campos_512_v4
+44/232183/campos_512_v4
+44/232188/campos_512_v4
+44/232194/campos_512_v4
+44/232212/campos_512_v4
+44/232215/campos_512_v4
+44/232218/campos_512_v4
+44/232225/campos_512_v4
+44/232237/campos_512_v4
+44/232245/campos_512_v4
+44/232247/campos_512_v4
+44/232250/campos_512_v4
+44/232269/campos_512_v4
+44/232280/campos_512_v4
+44/232284/campos_512_v4
+44/232287/campos_512_v4
+44/232288/campos_512_v4
+44/232296/campos_512_v4
+44/232321/campos_512_v4
+44/232333/campos_512_v4
+44/232335/campos_512_v4
+44/232336/campos_512_v4
+44/232347/campos_512_v4
+44/232358/campos_512_v4
+44/232360/campos_512_v4
+44/232361/campos_512_v4
+44/232367/campos_512_v4
+44/232374/campos_512_v4
+44/232376/campos_512_v4
+44/232380/campos_512_v4
+44/232387/campos_512_v4
+44/232390/campos_512_v4
+44/232394/campos_512_v4
+44/232402/campos_512_v4
+44/232406/campos_512_v4
+44/232407/campos_512_v4
+44/232410/campos_512_v4
+44/232425/campos_512_v4
+44/232433/campos_512_v4
+44/232439/campos_512_v4
+44/232444/campos_512_v4
+44/232447/campos_512_v4
+44/232450/campos_512_v4
+44/232451/campos_512_v4
+44/232452/campos_512_v4
+44/232460/campos_512_v4
+44/232465/campos_512_v4
+44/232466/campos_512_v4
+44/232472/campos_512_v4
+44/232479/campos_512_v4
+44/232498/campos_512_v4
+44/232500/campos_512_v4
+44/232513/campos_512_v4
+44/232525/campos_512_v4
+44/232527/campos_512_v4
+44/232534/campos_512_v4
+44/232535/campos_512_v4
+44/232556/campos_512_v4
+44/232563/campos_512_v4
+44/232564/campos_512_v4
+44/232570/campos_512_v4
+44/232573/campos_512_v4
+44/232579/campos_512_v4
+44/232582/campos_512_v4
+44/232602/campos_512_v4
+44/232613/campos_512_v4
+44/232622/campos_512_v4
+44/232627/campos_512_v4
+44/232634/campos_512_v4
+44/232643/campos_512_v4
+44/232647/campos_512_v4
+44/232650/campos_512_v4
+44/232653/campos_512_v4
+44/232674/campos_512_v4
+44/232683/campos_512_v4
+44/232692/campos_512_v4
+44/232713/campos_512_v4
+44/232719/campos_512_v4
+44/232732/campos_512_v4
+44/232734/campos_512_v4
+44/232737/campos_512_v4
+44/232747/campos_512_v4
+44/232751/campos_512_v4
+44/232756/campos_512_v4
+44/232757/campos_512_v4
+44/232774/campos_512_v4
+44/232777/campos_512_v4
+44/232788/campos_512_v4
+44/232791/campos_512_v4
+44/232794/campos_512_v4
+44/232797/campos_512_v4
+44/232805/campos_512_v4
+44/232809/campos_512_v4
+44/232812/campos_512_v4
+44/232817/campos_512_v4
+44/232822/campos_512_v4
+44/232825/campos_512_v4
+44/232835/campos_512_v4
+44/232839/campos_512_v4
+44/232844/campos_512_v4
+44/232845/campos_512_v4
+44/232848/campos_512_v4
+44/232865/campos_512_v4
+44/232869/campos_512_v4
+44/232871/campos_512_v4
+44/232872/campos_512_v4
+44/232878/campos_512_v4
+44/232882/campos_512_v4
+44/232885/campos_512_v4
+44/232911/campos_512_v4
+44/232916/campos_512_v4
+44/232931/campos_512_v4
+44/232943/campos_512_v4
+44/232945/campos_512_v4
+44/232949/campos_512_v4
+44/232959/campos_512_v4
+44/232962/campos_512_v4
+44/232968/campos_512_v4
+44/232978/campos_512_v4
+44/232982/campos_512_v4
+44/232983/campos_512_v4
+44/232987/campos_512_v4
+44/232988/campos_512_v4
+44/232990/campos_512_v4
+44/232991/campos_512_v4
+44/233006/campos_512_v4
+44/233007/campos_512_v4
+44/233010/campos_512_v4
+44/233019/campos_512_v4
+44/233033/campos_512_v4
+44/233038/campos_512_v4
+44/233041/campos_512_v4
+44/233047/campos_512_v4
+44/233069/campos_512_v4
+44/233087/campos_512_v4
+44/233093/campos_512_v4
+44/233099/campos_512_v4
+44/233102/campos_512_v4
+44/233106/campos_512_v4
+44/233112/campos_512_v4
+44/233118/campos_512_v4
+44/233129/campos_512_v4
+44/233130/campos_512_v4
+44/233132/campos_512_v4
+44/233139/campos_512_v4
+44/233142/campos_512_v4
+44/233144/campos_512_v4
+44/233152/campos_512_v4
+44/233189/campos_512_v4
+44/233190/campos_512_v4
+44/233195/campos_512_v4
+44/233203/campos_512_v4
+44/233209/campos_512_v4
+44/233219/campos_512_v4
+44/233224/campos_512_v4
+44/233230/campos_512_v4
+44/233231/campos_512_v4
+44/233242/campos_512_v4
+44/233243/campos_512_v4
+44/233248/campos_512_v4
+44/233250/campos_512_v4
+44/233264/campos_512_v4
+44/233282/campos_512_v4
+44/233289/campos_512_v4
+44/233300/campos_512_v4
+44/233337/campos_512_v4
+44/233356/campos_512_v4
+44/233371/campos_512_v4
+44/233386/campos_512_v4
+44/233421/campos_512_v4
+44/233422/campos_512_v4
+44/233441/campos_512_v4
+44/233444/campos_512_v4
+44/233452/campos_512_v4
+44/233455/campos_512_v4
+44/233456/campos_512_v4
+44/233458/campos_512_v4
+44/233461/campos_512_v4
+44/233462/campos_512_v4
+44/233464/campos_512_v4
+44/233481/campos_512_v4
+44/233490/campos_512_v4
+44/233492/campos_512_v4
+44/233507/campos_512_v4
+44/233508/campos_512_v4
+44/233510/campos_512_v4
+44/233512/campos_512_v4
+44/233521/campos_512_v4
+44/233539/campos_512_v4
+44/233542/campos_512_v4
+44/233549/campos_512_v4
+44/233557/campos_512_v4
+44/233558/campos_512_v4
+44/233571/campos_512_v4
+44/233572/campos_512_v4
+44/233577/campos_512_v4
+44/233579/campos_512_v4
+44/233587/campos_512_v4
+44/233596/campos_512_v4
+44/233608/campos_512_v4
+44/233618/campos_512_v4
+44/233631/campos_512_v4
+44/233641/campos_512_v4
+44/233665/campos_512_v4
+44/233681/campos_512_v4
+44/233683/campos_512_v4
+44/233695/campos_512_v4
+44/233701/campos_512_v4
+44/233706/campos_512_v4
+44/233721/campos_512_v4
+44/233737/campos_512_v4
+44/233738/campos_512_v4
+44/233743/campos_512_v4
+44/233750/campos_512_v4
+44/233754/campos_512_v4
+44/233760/campos_512_v4
+44/233765/campos_512_v4
+44/233789/campos_512_v4
+44/233807/campos_512_v4
+44/233828/campos_512_v4
+44/233840/campos_512_v4
+44/233845/campos_512_v4
+44/233855/campos_512_v4
+44/233869/campos_512_v4
+44/233872/campos_512_v4
+44/233880/campos_512_v4
+44/233902/campos_512_v4
+44/233904/campos_512_v4
+44/233906/campos_512_v4
+44/233927/campos_512_v4
+44/233931/campos_512_v4
+44/233932/campos_512_v4
+44/233934/campos_512_v4
+44/233937/campos_512_v4
+44/233944/campos_512_v4
+44/233960/campos_512_v4
+44/233971/campos_512_v4
+44/233977/campos_512_v4
+44/233982/campos_512_v4
+44/234000/campos_512_v4
+44/234011/campos_512_v4
+44/234012/campos_512_v4
+44/234022/campos_512_v4
+44/234024/campos_512_v4
+44/234025/campos_512_v4
+44/234026/campos_512_v4
+44/234029/campos_512_v4
+44/234070/campos_512_v4
+44/234084/campos_512_v4
+44/234090/campos_512_v4
+44/234095/campos_512_v4
+44/234113/campos_512_v4
+44/234119/campos_512_v4
+44/234128/campos_512_v4
+44/234131/campos_512_v4
+44/234142/campos_512_v4
+44/234143/campos_512_v4
+44/234144/campos_512_v4
+44/234154/campos_512_v4
+44/234175/campos_512_v4
+44/234181/campos_512_v4
+44/234183/campos_512_v4
+44/234189/campos_512_v4
+44/234201/campos_512_v4
+44/234209/campos_512_v4
+44/234225/campos_512_v4
+44/234227/campos_512_v4
+44/234230/campos_512_v4
+44/234238/campos_512_v4
+44/234241/campos_512_v4
+44/234244/campos_512_v4
+44/234248/campos_512_v4
+44/234251/campos_512_v4
+44/234254/campos_512_v4
+44/234259/campos_512_v4
+44/234263/campos_512_v4
+44/234278/campos_512_v4
+44/234280/campos_512_v4
+44/234286/campos_512_v4
+44/234287/campos_512_v4
+44/234288/campos_512_v4
+44/234294/campos_512_v4
+44/234301/campos_512_v4
+44/234302/campos_512_v4
+44/234305/campos_512_v4
+44/234307/campos_512_v4
+44/234317/campos_512_v4
+44/234326/campos_512_v4
+44/234328/campos_512_v4
+44/234336/campos_512_v4
+44/234339/campos_512_v4
+44/234343/campos_512_v4
+44/234347/campos_512_v4
+44/234352/campos_512_v4
+44/234359/campos_512_v4
+44/234364/campos_512_v4
+44/234381/campos_512_v4
+44/234383/campos_512_v4
+44/234389/campos_512_v4
+44/234393/campos_512_v4
+44/234396/campos_512_v4
+44/234399/campos_512_v4
+44/234401/campos_512_v4
+44/234404/campos_512_v4
+44/234428/campos_512_v4
+44/234434/campos_512_v4
+44/234441/campos_512_v4
+44/234442/campos_512_v4
+44/234457/campos_512_v4
+44/234461/campos_512_v4
+44/234467/campos_512_v4
+44/234470/campos_512_v4
+44/234488/campos_512_v4
+44/234489/campos_512_v4
+44/234499/campos_512_v4
+44/234501/campos_512_v4
+44/234507/campos_512_v4
+44/234509/campos_512_v4
+44/234513/campos_512_v4
+44/234514/campos_512_v4
+44/234517/campos_512_v4
+44/234523/campos_512_v4
+44/234532/campos_512_v4
+44/234534/campos_512_v4
+44/234542/campos_512_v4
+44/234543/campos_512_v4
+44/234555/campos_512_v4
+44/234579/campos_512_v4
+44/234583/campos_512_v4
+44/234598/campos_512_v4
+44/234603/campos_512_v4
+44/234607/campos_512_v4
+44/234610/campos_512_v4
+44/234613/campos_512_v4
+44/234615/campos_512_v4
+44/234621/campos_512_v4
+44/234623/campos_512_v4
+44/234629/campos_512_v4
+44/234637/campos_512_v4
+44/234641/campos_512_v4
+44/234645/campos_512_v4
+44/234650/campos_512_v4
+44/234657/campos_512_v4
+44/234676/campos_512_v4
+44/234679/campos_512_v4
+44/234682/campos_512_v4
+44/234685/campos_512_v4
+44/234692/campos_512_v4
+44/234695/campos_512_v4
+44/234701/campos_512_v4
+44/234737/campos_512_v4
+44/234748/campos_512_v4
+44/234757/campos_512_v4
+44/234768/campos_512_v4
+44/234772/campos_512_v4
+44/234776/campos_512_v4
+44/234777/campos_512_v4
+44/234783/campos_512_v4
+44/234790/campos_512_v4
+44/234792/campos_512_v4
+44/234793/campos_512_v4
+44/234794/campos_512_v4
+44/234840/campos_512_v4
+44/234843/campos_512_v4
+44/234845/campos_512_v4
+44/234847/campos_512_v4
+44/234848/campos_512_v4
+44/234857/campos_512_v4
+44/234864/campos_512_v4
+44/234867/campos_512_v4
+44/234871/campos_512_v4
+44/234877/campos_512_v4
+44/234882/campos_512_v4
+44/234884/campos_512_v4
+44/234891/campos_512_v4
+44/234894/campos_512_v4
+44/234923/campos_512_v4
+44/234928/campos_512_v4
+44/234933/campos_512_v4
+44/234950/campos_512_v4
+44/234951/campos_512_v4
+44/234977/campos_512_v4
+44/234978/campos_512_v4
+44/234986/campos_512_v4
+45/235002/campos_512_v4
+45/235005/campos_512_v4
+45/235008/campos_512_v4
+45/235037/campos_512_v4
+45/235043/campos_512_v4
+45/235048/campos_512_v4
+45/235051/campos_512_v4
+45/235057/campos_512_v4
+45/235067/campos_512_v4
+45/235081/campos_512_v4
+45/235097/campos_512_v4
+45/235101/campos_512_v4
+45/235103/campos_512_v4
+45/235116/campos_512_v4
+45/235122/campos_512_v4
+45/235132/campos_512_v4
+45/235137/campos_512_v4
+45/235138/campos_512_v4
+45/235156/campos_512_v4
+45/235161/campos_512_v4
+45/235163/campos_512_v4
+45/235171/campos_512_v4
+45/235187/campos_512_v4
+45/235188/campos_512_v4
+45/235195/campos_512_v4
+45/235199/campos_512_v4
+45/235204/campos_512_v4
+45/235225/campos_512_v4
+45/235246/campos_512_v4
+45/235269/campos_512_v4
+45/235307/campos_512_v4
+45/235309/campos_512_v4
+45/235318/campos_512_v4
+45/235320/campos_512_v4
+45/235335/campos_512_v4
+45/235338/campos_512_v4
+45/235343/campos_512_v4
+45/235358/campos_512_v4
+45/235362/campos_512_v4
+45/235367/campos_512_v4
+45/235377/campos_512_v4
+45/235378/campos_512_v4
+45/235380/campos_512_v4
+45/235386/campos_512_v4
+45/235425/campos_512_v4
+45/235433/campos_512_v4
+45/235436/campos_512_v4
+45/235454/campos_512_v4
+45/235455/campos_512_v4
+45/235459/campos_512_v4
+45/235470/campos_512_v4
+45/235473/campos_512_v4
+45/235486/campos_512_v4
+45/235489/campos_512_v4
+45/235492/campos_512_v4
+45/235497/campos_512_v4
+45/235507/campos_512_v4
+45/235511/campos_512_v4
+45/235515/campos_512_v4
+45/235521/campos_512_v4
+45/235525/campos_512_v4
+45/235526/campos_512_v4
+45/235535/campos_512_v4
+45/235545/campos_512_v4
+45/235585/campos_512_v4
+45/235596/campos_512_v4
+45/235599/campos_512_v4
+45/235615/campos_512_v4
+45/235642/campos_512_v4
+45/235645/campos_512_v4
+45/235647/campos_512_v4
+45/235648/campos_512_v4
+45/235652/campos_512_v4
+45/235653/campos_512_v4
+45/235659/campos_512_v4
+45/235660/campos_512_v4
+45/235663/campos_512_v4
+45/235671/campos_512_v4
+45/235678/campos_512_v4
+45/235685/campos_512_v4
+45/235686/campos_512_v4
+45/235699/campos_512_v4
+45/235702/campos_512_v4
+45/235703/campos_512_v4
+45/235714/campos_512_v4
+45/235717/campos_512_v4
+45/235721/campos_512_v4
+45/235727/campos_512_v4
+45/235730/campos_512_v4
+45/235736/campos_512_v4
+45/235751/campos_512_v4
+45/235756/campos_512_v4
+45/235760/campos_512_v4
+45/235763/campos_512_v4
+45/235780/campos_512_v4
+45/235784/campos_512_v4
+45/235804/campos_512_v4
+45/235808/campos_512_v4
+45/235811/campos_512_v4
+45/235813/campos_512_v4
+45/235815/campos_512_v4
+45/235816/campos_512_v4
+45/235834/campos_512_v4
+45/235853/campos_512_v4
+45/235859/campos_512_v4
+45/235861/campos_512_v4
+45/235899/campos_512_v4
+45/235901/campos_512_v4
+45/235904/campos_512_v4
+45/235919/campos_512_v4
+45/235920/campos_512_v4
+45/235922/campos_512_v4
+45/235927/campos_512_v4
+45/235937/campos_512_v4
+45/235941/campos_512_v4
+45/235945/campos_512_v4
+45/235952/campos_512_v4
+45/235969/campos_512_v4
+45/235973/campos_512_v4
+45/235991/campos_512_v4
+45/235993/campos_512_v4
+45/235995/campos_512_v4
+45/235996/campos_512_v4
+45/236004/campos_512_v4
+45/236032/campos_512_v4
+45/236050/campos_512_v4
+45/236054/campos_512_v4
+45/236066/campos_512_v4
+45/236069/campos_512_v4
+45/236077/campos_512_v4
+45/236089/campos_512_v4
+45/236092/campos_512_v4
+45/236098/campos_512_v4
+45/236104/campos_512_v4
+45/236123/campos_512_v4
+45/236148/campos_512_v4
+45/236150/campos_512_v4
+45/236153/campos_512_v4
+45/236157/campos_512_v4
+45/236173/campos_512_v4
+45/236174/campos_512_v4
+45/236176/campos_512_v4
+45/236178/campos_512_v4
+45/236183/campos_512_v4
+45/236187/campos_512_v4
+45/236198/campos_512_v4
+45/236201/campos_512_v4
+45/236217/campos_512_v4
+45/236225/campos_512_v4
+45/236226/campos_512_v4
+45/236228/campos_512_v4
+45/236231/campos_512_v4
+45/236235/campos_512_v4
+45/236239/campos_512_v4
+45/236243/campos_512_v4
+45/236249/campos_512_v4
+45/236256/campos_512_v4
+45/236263/campos_512_v4
+45/236265/campos_512_v4
+45/236267/campos_512_v4
+45/236286/campos_512_v4
+45/236293/campos_512_v4
+45/236300/campos_512_v4
+45/236305/campos_512_v4
+45/236326/campos_512_v4
+45/236327/campos_512_v4
+45/236334/campos_512_v4
+45/236338/campos_512_v4
+45/236339/campos_512_v4
+45/236344/campos_512_v4
+45/236346/campos_512_v4
+45/236355/campos_512_v4
+45/236356/campos_512_v4
+45/236368/campos_512_v4
+45/236369/campos_512_v4
+45/236379/campos_512_v4
+45/236387/campos_512_v4
+45/236400/campos_512_v4
+45/236405/campos_512_v4
+45/236414/campos_512_v4
+45/236422/campos_512_v4
+45/236424/campos_512_v4
+45/236428/campos_512_v4
+45/236434/campos_512_v4
+45/236465/campos_512_v4
+45/236468/campos_512_v4
+45/236475/campos_512_v4
+45/236487/campos_512_v4
+45/236496/campos_512_v4
+45/236501/campos_512_v4
+45/236509/campos_512_v4
+45/236510/campos_512_v4
+45/236517/campos_512_v4
+45/236518/campos_512_v4
+45/236537/campos_512_v4
+45/236549/campos_512_v4
+45/236551/campos_512_v4
+45/236555/campos_512_v4
+45/236557/campos_512_v4
+45/236569/campos_512_v4
+45/236580/campos_512_v4
+45/236600/campos_512_v4
+45/236614/campos_512_v4
+45/236653/campos_512_v4
+45/236654/campos_512_v4
+45/236671/campos_512_v4
+45/236689/campos_512_v4
+45/236705/campos_512_v4
+45/236726/campos_512_v4
+45/236732/campos_512_v4
+45/236733/campos_512_v4
+45/236746/campos_512_v4
+45/236752/campos_512_v4
+45/236759/campos_512_v4
+45/236760/campos_512_v4
+45/236764/campos_512_v4
+45/236773/campos_512_v4
+45/236775/campos_512_v4
+45/236780/campos_512_v4
+45/236786/campos_512_v4
+45/236798/campos_512_v4
+45/236799/campos_512_v4
+45/236803/campos_512_v4
+45/236811/campos_512_v4
+45/236821/campos_512_v4
+45/236827/campos_512_v4
+45/236829/campos_512_v4
+45/236830/campos_512_v4
+45/236833/campos_512_v4
+45/236848/campos_512_v4
+45/236856/campos_512_v4
+45/236875/campos_512_v4
+45/236877/campos_512_v4
+45/236906/campos_512_v4
+45/236917/campos_512_v4
+45/236918/campos_512_v4
+45/236938/campos_512_v4
+45/236939/campos_512_v4
+45/236948/campos_512_v4
+45/236964/campos_512_v4
+45/236970/campos_512_v4
+45/236986/campos_512_v4
+45/236992/campos_512_v4
+45/236996/campos_512_v4
+45/237001/campos_512_v4
+45/237004/campos_512_v4
+45/237011/campos_512_v4
+45/237019/campos_512_v4
+45/237023/campos_512_v4
+45/237036/campos_512_v4
+45/237043/campos_512_v4
+45/237046/campos_512_v4
+45/237061/campos_512_v4
+45/237065/campos_512_v4
+45/237066/campos_512_v4
+45/237068/campos_512_v4
+45/237084/campos_512_v4
+45/237092/campos_512_v4
+45/237103/campos_512_v4
+45/237108/campos_512_v4
+45/237117/campos_512_v4
+45/237139/campos_512_v4
+45/237140/campos_512_v4
+45/237148/campos_512_v4
+45/237161/campos_512_v4
+45/237177/campos_512_v4
+45/237181/campos_512_v4
+45/237186/campos_512_v4
+45/237187/campos_512_v4
+45/237191/campos_512_v4
+45/237208/campos_512_v4
+45/237214/campos_512_v4
+45/237216/campos_512_v4
+45/237226/campos_512_v4
+45/237245/campos_512_v4
+45/237263/campos_512_v4
+45/237267/campos_512_v4
+45/237270/campos_512_v4
+45/237273/campos_512_v4
+45/237275/campos_512_v4
+45/237278/campos_512_v4
+45/237283/campos_512_v4
+45/237286/campos_512_v4
+45/237309/campos_512_v4
+45/237312/campos_512_v4
+45/237314/campos_512_v4
+45/237319/campos_512_v4
+45/237328/campos_512_v4
+45/237333/campos_512_v4
+45/237335/campos_512_v4
+45/237342/campos_512_v4
+45/237364/campos_512_v4
+45/237368/campos_512_v4
+45/237380/campos_512_v4
+45/237382/campos_512_v4
+45/237400/campos_512_v4
+45/237410/campos_512_v4
+45/237413/campos_512_v4
+45/237424/campos_512_v4
+45/237448/campos_512_v4
+45/237456/campos_512_v4
+45/237469/campos_512_v4
+45/237485/campos_512_v4
+45/237494/campos_512_v4
+45/237499/campos_512_v4
+45/237502/campos_512_v4
+45/237519/campos_512_v4
+45/237523/campos_512_v4
+45/237528/campos_512_v4
+45/237531/campos_512_v4
+45/237534/campos_512_v4
+45/237535/campos_512_v4
+45/237537/campos_512_v4
+45/237539/campos_512_v4
+45/237545/campos_512_v4
+45/237556/campos_512_v4
+45/237558/campos_512_v4
+45/237581/campos_512_v4
+45/237593/campos_512_v4
+45/237612/campos_512_v4
+45/237621/campos_512_v4
+45/237627/campos_512_v4
+45/237642/campos_512_v4
+45/237650/campos_512_v4
+45/237651/campos_512_v4
+45/237662/campos_512_v4
+45/237664/campos_512_v4
+45/237667/campos_512_v4
+45/237679/campos_512_v4
+45/237699/campos_512_v4
+45/237711/campos_512_v4
+45/237728/campos_512_v4
+45/237733/campos_512_v4
+45/237735/campos_512_v4
+45/237740/campos_512_v4
+45/237749/campos_512_v4
+45/237750/campos_512_v4
+45/237751/campos_512_v4
+45/237765/campos_512_v4
+45/237768/campos_512_v4
+45/237774/campos_512_v4
+45/237778/campos_512_v4
+45/237780/campos_512_v4
+45/237785/campos_512_v4
+45/237801/campos_512_v4
+45/237815/campos_512_v4
+45/237821/campos_512_v4
+45/237829/campos_512_v4
+45/237830/campos_512_v4
+45/237831/campos_512_v4
+45/237832/campos_512_v4
+45/237834/campos_512_v4
+45/237846/campos_512_v4
+45/237852/campos_512_v4
+45/237854/campos_512_v4
+45/237865/campos_512_v4
+45/237869/campos_512_v4
+45/237873/campos_512_v4
+45/237878/campos_512_v4
+45/237879/campos_512_v4
+45/237899/campos_512_v4
+45/237903/campos_512_v4
+45/237911/campos_512_v4
+45/237912/campos_512_v4
+45/237922/campos_512_v4
+45/237947/campos_512_v4
+45/237962/campos_512_v4
+45/237964/campos_512_v4
+45/237975/campos_512_v4
+45/237985/campos_512_v4
+45/237986/campos_512_v4
+45/238002/campos_512_v4
+45/238007/campos_512_v4
+45/238025/campos_512_v4
+45/238028/campos_512_v4
+45/238037/campos_512_v4
+45/238053/campos_512_v4
+45/238055/campos_512_v4
+45/238061/campos_512_v4
+45/238063/campos_512_v4
+45/238069/campos_512_v4
+45/238070/campos_512_v4
+45/238072/campos_512_v4
+45/238090/campos_512_v4
+45/238091/campos_512_v4
+45/238095/campos_512_v4
+45/238104/campos_512_v4
+45/238115/campos_512_v4
+45/238132/campos_512_v4
+45/238137/campos_512_v4
+45/238144/campos_512_v4
+45/238150/campos_512_v4
+45/238160/campos_512_v4
+45/238161/campos_512_v4
+45/238185/campos_512_v4
+45/238188/campos_512_v4
+45/238194/campos_512_v4
+45/238210/campos_512_v4
+45/238220/campos_512_v4
+45/238222/campos_512_v4
+45/238251/campos_512_v4
+45/238253/campos_512_v4
+45/238254/campos_512_v4
+45/238259/campos_512_v4
+45/238282/campos_512_v4
+45/238298/campos_512_v4
+45/238309/campos_512_v4
+45/238315/campos_512_v4
+45/238319/campos_512_v4
+45/238323/campos_512_v4
+45/238329/campos_512_v4
+45/238335/campos_512_v4
+45/238336/campos_512_v4
+45/238372/campos_512_v4
+45/238377/campos_512_v4
+45/238380/campos_512_v4
+45/238391/campos_512_v4
+45/238398/campos_512_v4
+45/238400/campos_512_v4
+45/238418/campos_512_v4
+45/238420/campos_512_v4
+45/238426/campos_512_v4
+45/238433/campos_512_v4
+45/238442/campos_512_v4
+45/238454/campos_512_v4
+45/238500/campos_512_v4
+45/238503/campos_512_v4
+45/238532/campos_512_v4
+45/238533/campos_512_v4
+45/238535/campos_512_v4
+45/238541/campos_512_v4
+45/238544/campos_512_v4
+45/238555/campos_512_v4
+45/238557/campos_512_v4
+45/238573/campos_512_v4
+45/238577/campos_512_v4
+45/238579/campos_512_v4
+45/238582/campos_512_v4
+45/238585/campos_512_v4
+45/238588/campos_512_v4
+45/238592/campos_512_v4
+45/238594/campos_512_v4
+45/238598/campos_512_v4
+45/238603/campos_512_v4
+45/238633/campos_512_v4
+45/238643/campos_512_v4
+45/238644/campos_512_v4
+45/238647/campos_512_v4
+45/238670/campos_512_v4
+45/238671/campos_512_v4
+45/238697/campos_512_v4
+45/238699/campos_512_v4
+45/238703/campos_512_v4
+45/238704/campos_512_v4
+45/238712/campos_512_v4
+45/238713/campos_512_v4
+45/238719/campos_512_v4
+45/238727/campos_512_v4
+45/238750/campos_512_v4
+45/238755/campos_512_v4
+45/238766/campos_512_v4
+45/238768/campos_512_v4
+45/238769/campos_512_v4
+45/238771/campos_512_v4
+45/238775/campos_512_v4
+45/238782/campos_512_v4
+45/238789/campos_512_v4
+45/238790/campos_512_v4
+45/238792/campos_512_v4
+45/238801/campos_512_v4
+45/238805/campos_512_v4
+45/238809/campos_512_v4
+45/238813/campos_512_v4
+45/238814/campos_512_v4
+45/238826/campos_512_v4
+45/238833/campos_512_v4
+45/238840/campos_512_v4
+45/238847/campos_512_v4
+45/238852/campos_512_v4
+45/238868/campos_512_v4
+45/238874/campos_512_v4
+45/238878/campos_512_v4
+45/238880/campos_512_v4
+45/238884/campos_512_v4
+45/238890/campos_512_v4
+45/238892/campos_512_v4
+45/238902/campos_512_v4
+45/238912/campos_512_v4
+45/238913/campos_512_v4
+45/238922/campos_512_v4
+45/238930/campos_512_v4
+45/238945/campos_512_v4
+45/238961/campos_512_v4
+45/238978/campos_512_v4
+45/238980/campos_512_v4
+45/238984/campos_512_v4
+45/238985/campos_512_v4
+45/238991/campos_512_v4
+45/239001/campos_512_v4
+45/239008/campos_512_v4
+45/239011/campos_512_v4
+45/239013/campos_512_v4
+45/239023/campos_512_v4
+45/239029/campos_512_v4
+45/239030/campos_512_v4
+45/239043/campos_512_v4
+45/239053/campos_512_v4
+45/239056/campos_512_v4
+45/239071/campos_512_v4
+45/239096/campos_512_v4
+45/239099/campos_512_v4
+45/239115/campos_512_v4
+45/239116/campos_512_v4
+45/239127/campos_512_v4
+45/239137/campos_512_v4
+45/239140/campos_512_v4
+45/239143/campos_512_v4
+45/239152/campos_512_v4
+45/239163/campos_512_v4
+45/239164/campos_512_v4
+45/239170/campos_512_v4
+45/239179/campos_512_v4
+45/239192/campos_512_v4
+45/239215/campos_512_v4
+45/239217/campos_512_v4
+45/239218/campos_512_v4
+45/239222/campos_512_v4
+45/239226/campos_512_v4
+45/239248/campos_512_v4
+45/239256/campos_512_v4
+45/239288/campos_512_v4
+45/239292/campos_512_v4
+45/239300/campos_512_v4
+45/239311/campos_512_v4
+45/239314/campos_512_v4
+45/239344/campos_512_v4
+45/239370/campos_512_v4
+45/239376/campos_512_v4
+45/239388/campos_512_v4
+45/239392/campos_512_v4
+45/239399/campos_512_v4
+45/239403/campos_512_v4
+45/239406/campos_512_v4
+45/239421/campos_512_v4
+45/239436/campos_512_v4
+45/239439/campos_512_v4
+45/239443/campos_512_v4
+45/239449/campos_512_v4
+45/239453/campos_512_v4
+45/239462/campos_512_v4
+45/239467/campos_512_v4
+45/239468/campos_512_v4
+45/239471/campos_512_v4
+45/239475/campos_512_v4
+45/239476/campos_512_v4
+45/239489/campos_512_v4
+45/239490/campos_512_v4
+45/239505/campos_512_v4
+45/239525/campos_512_v4
+45/239528/campos_512_v4
+45/239542/campos_512_v4
+45/239548/campos_512_v4
+45/239553/campos_512_v4
+45/239556/campos_512_v4
+45/239559/campos_512_v4
+45/239567/campos_512_v4
+45/239574/campos_512_v4
+45/239578/campos_512_v4
+45/239582/campos_512_v4
+45/239588/campos_512_v4
+45/239602/campos_512_v4
+45/239603/campos_512_v4
+45/239606/campos_512_v4
+45/239615/campos_512_v4
+45/239620/campos_512_v4
+45/239629/campos_512_v4
+45/239635/campos_512_v4
+45/239636/campos_512_v4
+45/239643/campos_512_v4
+45/239644/campos_512_v4
+45/239650/campos_512_v4
+45/239661/campos_512_v4
+45/239664/campos_512_v4
+45/239682/campos_512_v4
+45/239686/campos_512_v4
+45/239703/campos_512_v4
+45/239713/campos_512_v4
+45/239716/campos_512_v4
+45/239729/campos_512_v4
+45/239735/campos_512_v4
+45/239743/campos_512_v4
+45/239758/campos_512_v4
+45/239760/campos_512_v4
+45/239766/campos_512_v4
+45/239793/campos_512_v4
+45/239807/campos_512_v4
+45/239809/campos_512_v4
+45/239818/campos_512_v4
+45/239822/campos_512_v4
+45/239826/campos_512_v4
+45/239829/campos_512_v4
+45/239836/campos_512_v4
+45/239843/campos_512_v4
+45/239857/campos_512_v4
+45/239862/campos_512_v4
+45/239872/campos_512_v4
+45/239883/campos_512_v4
+45/239889/campos_512_v4
+45/239892/campos_512_v4
+45/239894/campos_512_v4
+45/239915/campos_512_v4
+45/239919/campos_512_v4
+45/239928/campos_512_v4
+45/239929/campos_512_v4
+45/239935/campos_512_v4
+45/239940/campos_512_v4
+45/239955/campos_512_v4
+45/239966/campos_512_v4
+45/239985/campos_512_v4
+45/239991/campos_512_v4
+45/239992/campos_512_v4
+45/239999/campos_512_v4
+46/240011/campos_512_v4
+46/240022/campos_512_v4
+46/240039/campos_512_v4
+46/240041/campos_512_v4
+46/240042/campos_512_v4
+46/240049/campos_512_v4
+46/240067/campos_512_v4
+46/240072/campos_512_v4
+46/240077/campos_512_v4
+46/240086/campos_512_v4
+46/240091/campos_512_v4
+46/240104/campos_512_v4
+46/240106/campos_512_v4
+46/240107/campos_512_v4
+46/240109/campos_512_v4
+46/240115/campos_512_v4
+46/240118/campos_512_v4
+46/240131/campos_512_v4
+46/240134/campos_512_v4
+46/240139/campos_512_v4
+46/240151/campos_512_v4
+46/240153/campos_512_v4
+46/240154/campos_512_v4
+46/240169/campos_512_v4
+46/240170/campos_512_v4
+46/240174/campos_512_v4
+46/240184/campos_512_v4
+46/240186/campos_512_v4
+46/240188/campos_512_v4
+46/240202/campos_512_v4
+46/240206/campos_512_v4
+46/240215/campos_512_v4
+46/240218/campos_512_v4
+46/240219/campos_512_v4
+46/240220/campos_512_v4
+46/240224/campos_512_v4
+46/240234/campos_512_v4
+46/240245/campos_512_v4
+46/240264/campos_512_v4
+46/240276/campos_512_v4
+46/240280/campos_512_v4
+46/240287/campos_512_v4
+46/240297/campos_512_v4
+46/240311/campos_512_v4
+46/240330/campos_512_v4
+46/240337/campos_512_v4
+46/240342/campos_512_v4
+46/240343/campos_512_v4
+46/240350/campos_512_v4
+46/240360/campos_512_v4
+46/240362/campos_512_v4
+46/240364/campos_512_v4
+46/240366/campos_512_v4
+46/240368/campos_512_v4
+46/240372/campos_512_v4
+46/240394/campos_512_v4
+46/240398/campos_512_v4
+46/240402/campos_512_v4
+46/240403/campos_512_v4
+46/240416/campos_512_v4
+46/240418/campos_512_v4
+46/240429/campos_512_v4
+46/240436/campos_512_v4
+46/240437/campos_512_v4
+46/240444/campos_512_v4
+46/240452/campos_512_v4
+46/240462/campos_512_v4
+46/240469/campos_512_v4
+46/240472/campos_512_v4
+46/240476/campos_512_v4
+46/240481/campos_512_v4
+46/240484/campos_512_v4
+46/240498/campos_512_v4
+46/240501/campos_512_v4
+46/240510/campos_512_v4
+46/240511/campos_512_v4
+46/240534/campos_512_v4
+46/240546/campos_512_v4
+46/240553/campos_512_v4
+46/240556/campos_512_v4
+46/240562/campos_512_v4
+46/240567/campos_512_v4
+46/240570/campos_512_v4
+46/240583/campos_512_v4
+46/240585/campos_512_v4
+46/240590/campos_512_v4
+46/240591/campos_512_v4
+46/240614/campos_512_v4
+46/240623/campos_512_v4
+46/240624/campos_512_v4
+46/240650/campos_512_v4
+46/240654/campos_512_v4
+46/240659/campos_512_v4
+46/240660/campos_512_v4
+46/240662/campos_512_v4
+46/240665/campos_512_v4
+46/240674/campos_512_v4
+46/240680/campos_512_v4
+46/240702/campos_512_v4
+46/240705/campos_512_v4
+46/240729/campos_512_v4
+46/240730/campos_512_v4
+46/240733/campos_512_v4
+46/240736/campos_512_v4
+46/240742/campos_512_v4
+46/240745/campos_512_v4
+46/240746/campos_512_v4
+46/240747/campos_512_v4
+46/240748/campos_512_v4
+46/240752/campos_512_v4
+46/240754/campos_512_v4
+46/240758/campos_512_v4
+46/240764/campos_512_v4
+46/240769/campos_512_v4
+46/240782/campos_512_v4
+46/240803/campos_512_v4
+46/240804/campos_512_v4
+46/240810/campos_512_v4
+46/240812/campos_512_v4
+46/240818/campos_512_v4
+46/240845/campos_512_v4
+46/240849/campos_512_v4
+46/240860/campos_512_v4
+46/240867/campos_512_v4
+46/240873/campos_512_v4
+46/240886/campos_512_v4
+46/240896/campos_512_v4
+46/240911/campos_512_v4
+46/240917/campos_512_v4
+46/240922/campos_512_v4
+46/240929/campos_512_v4
+46/240936/campos_512_v4
+46/240940/campos_512_v4
+46/240949/campos_512_v4
+46/240955/campos_512_v4
+46/240961/campos_512_v4
+46/240971/campos_512_v4
+46/240974/campos_512_v4
+46/240983/campos_512_v4
+46/240993/campos_512_v4
+46/241002/campos_512_v4
+46/241009/campos_512_v4
+46/241011/campos_512_v4
+46/241017/campos_512_v4
+46/241029/campos_512_v4
+46/241042/campos_512_v4
+46/241048/campos_512_v4
+46/241052/campos_512_v4
+46/241057/campos_512_v4
+46/241071/campos_512_v4
+46/241077/campos_512_v4
+46/241081/campos_512_v4
+46/241086/campos_512_v4
+46/241091/campos_512_v4
+46/241095/campos_512_v4
+46/241099/campos_512_v4
+46/241107/campos_512_v4
+46/241108/campos_512_v4
+46/241110/campos_512_v4
+46/241112/campos_512_v4
+46/241124/campos_512_v4
+46/241133/campos_512_v4
+46/241154/campos_512_v4
+46/241159/campos_512_v4
+46/241171/campos_512_v4
+46/241173/campos_512_v4
+46/241196/campos_512_v4
+46/241210/campos_512_v4
+46/241212/campos_512_v4
+46/241218/campos_512_v4
+46/241221/campos_512_v4
+46/241238/campos_512_v4
+46/241239/campos_512_v4
+46/241254/campos_512_v4
+46/241258/campos_512_v4
+46/241260/campos_512_v4
+46/241265/campos_512_v4
+46/241268/campos_512_v4
+46/241283/campos_512_v4
+46/241284/campos_512_v4
+46/241293/campos_512_v4
+46/241301/campos_512_v4
+46/241307/campos_512_v4
+46/241310/campos_512_v4
+46/241316/campos_512_v4
+46/241322/campos_512_v4
+46/241324/campos_512_v4
+46/241326/campos_512_v4
+46/241328/campos_512_v4
+46/241329/campos_512_v4
+46/241339/campos_512_v4
+46/241340/campos_512_v4
+46/241356/campos_512_v4
+46/241366/campos_512_v4
+46/241368/campos_512_v4
+46/241369/campos_512_v4
+46/241371/campos_512_v4
+46/241377/campos_512_v4
+46/241381/campos_512_v4
+46/241382/campos_512_v4
+46/241401/campos_512_v4
+46/241418/campos_512_v4
+46/241428/campos_512_v4
+46/241433/campos_512_v4
+46/241437/campos_512_v4
+46/241439/campos_512_v4
+46/241442/campos_512_v4
+46/241444/campos_512_v4
+46/241453/campos_512_v4
+46/241459/campos_512_v4
+46/241461/campos_512_v4
+46/241484/campos_512_v4
+46/241488/campos_512_v4
+46/241490/campos_512_v4
+46/241496/campos_512_v4
+46/241501/campos_512_v4
+46/241535/campos_512_v4
+46/241536/campos_512_v4
+46/241539/campos_512_v4
+46/241540/campos_512_v4
+46/241541/campos_512_v4
+46/241544/campos_512_v4
+46/241555/campos_512_v4
+46/241556/campos_512_v4
+46/241572/campos_512_v4
+46/241573/campos_512_v4
+46/241608/campos_512_v4
+46/241619/campos_512_v4
+46/241625/campos_512_v4
+46/241631/campos_512_v4
+46/241636/campos_512_v4
+46/241657/campos_512_v4
+46/241665/campos_512_v4
+46/241675/campos_512_v4
+46/241681/campos_512_v4
+46/241703/campos_512_v4
+46/241712/campos_512_v4
+46/241721/campos_512_v4
+46/241731/campos_512_v4
+46/241743/campos_512_v4
+46/241744/campos_512_v4
+46/241751/campos_512_v4
+46/241768/campos_512_v4
+46/241776/campos_512_v4
+46/241790/campos_512_v4
+46/241794/campos_512_v4
+46/241796/campos_512_v4
+46/241799/campos_512_v4
+46/241807/campos_512_v4
+46/241827/campos_512_v4
+46/241828/campos_512_v4
+46/241846/campos_512_v4
+46/241852/campos_512_v4
+46/241863/campos_512_v4
+46/241875/campos_512_v4
+46/241888/campos_512_v4
+46/241891/campos_512_v4
+46/241899/campos_512_v4
+46/241900/campos_512_v4
+46/241903/campos_512_v4
+46/241906/campos_512_v4
+46/241932/campos_512_v4
+46/241942/campos_512_v4
+46/241947/campos_512_v4
+46/241976/campos_512_v4
+46/241978/campos_512_v4
+46/241980/campos_512_v4
+46/241989/campos_512_v4
+46/241990/campos_512_v4
+46/242001/campos_512_v4
+46/242009/campos_512_v4
+46/242016/campos_512_v4
+46/242028/campos_512_v4
+46/242040/campos_512_v4
+46/242044/campos_512_v4
+46/242048/campos_512_v4
+46/242055/campos_512_v4
+46/242064/campos_512_v4
+46/242066/campos_512_v4
+46/242071/campos_512_v4
+46/242080/campos_512_v4
+46/242081/campos_512_v4
+46/242083/campos_512_v4
+46/242084/campos_512_v4
+46/242086/campos_512_v4
+46/242092/campos_512_v4
+46/242097/campos_512_v4
+46/242132/campos_512_v4
+46/242141/campos_512_v4
+46/242143/campos_512_v4
+46/242150/campos_512_v4
+46/242151/campos_512_v4
+46/242157/campos_512_v4
+46/242161/campos_512_v4
+46/242167/campos_512_v4
+46/242184/campos_512_v4
+46/242190/campos_512_v4
+46/242198/campos_512_v4
+46/242204/campos_512_v4
+46/242224/campos_512_v4
+46/242229/campos_512_v4
+46/242234/campos_512_v4
+46/242243/campos_512_v4
+46/242245/campos_512_v4
+46/242248/campos_512_v4
+46/242264/campos_512_v4
+46/242281/campos_512_v4
+46/242283/campos_512_v4
+46/242288/campos_512_v4
+46/242298/campos_512_v4
+46/242299/campos_512_v4
+46/242313/campos_512_v4
+46/242322/campos_512_v4
+46/242324/campos_512_v4
+46/242338/campos_512_v4
+46/242350/campos_512_v4
+46/242355/campos_512_v4
+46/242385/campos_512_v4
+46/242387/campos_512_v4
+46/242388/campos_512_v4
+46/242391/campos_512_v4
+46/242408/campos_512_v4
+46/242421/campos_512_v4
+46/242424/campos_512_v4
+46/242435/campos_512_v4
+46/242452/campos_512_v4
+46/242454/campos_512_v4
+46/242472/campos_512_v4
+46/242476/campos_512_v4
+46/242485/campos_512_v4
+46/242499/campos_512_v4
+46/242503/campos_512_v4
+46/242504/campos_512_v4
+46/242521/campos_512_v4
+46/242547/campos_512_v4
+46/242554/campos_512_v4
+46/242556/campos_512_v4
+46/242576/campos_512_v4
+46/242588/campos_512_v4
+46/242589/campos_512_v4
+46/242592/campos_512_v4
+46/242620/campos_512_v4
+46/242635/campos_512_v4
+46/242636/campos_512_v4
+46/242638/campos_512_v4
+46/242655/campos_512_v4
+46/242658/campos_512_v4
+46/242660/campos_512_v4
+46/242661/campos_512_v4
+46/242666/campos_512_v4
+46/242681/campos_512_v4
+46/242691/campos_512_v4
+46/242699/campos_512_v4
+46/242712/campos_512_v4
+46/242714/campos_512_v4
+46/242729/campos_512_v4
+46/242764/campos_512_v4
+46/242768/campos_512_v4
+46/242780/campos_512_v4
+46/242793/campos_512_v4
+46/242796/campos_512_v4
+46/242806/campos_512_v4
+46/242821/campos_512_v4
+46/242832/campos_512_v4
+46/242838/campos_512_v4
+46/242861/campos_512_v4
+46/242875/campos_512_v4
+46/242881/campos_512_v4
+46/242892/campos_512_v4
+46/242900/campos_512_v4
+46/242906/campos_512_v4
+46/242923/campos_512_v4
+46/242969/campos_512_v4
+46/242971/campos_512_v4
+46/242976/campos_512_v4
+46/242987/campos_512_v4
+46/242995/campos_512_v4
+46/243005/campos_512_v4
+46/243007/campos_512_v4
+46/243024/campos_512_v4
+46/243027/campos_512_v4
+46/243028/campos_512_v4
+46/243040/campos_512_v4
+46/243054/campos_512_v4
+46/243066/campos_512_v4
+46/243075/campos_512_v4
+46/243080/campos_512_v4
+46/243103/campos_512_v4
+46/243105/campos_512_v4
+46/243119/campos_512_v4
+46/243127/campos_512_v4
+46/243137/campos_512_v4
+46/243142/campos_512_v4
+46/243156/campos_512_v4
+46/243178/campos_512_v4
+46/243180/campos_512_v4
+46/243184/campos_512_v4
+46/243197/campos_512_v4
+46/243204/campos_512_v4
+46/243220/campos_512_v4
+46/243227/campos_512_v4
+46/243232/campos_512_v4
+46/243239/campos_512_v4
+46/243241/campos_512_v4
+46/243255/campos_512_v4
+46/243258/campos_512_v4
+46/243261/campos_512_v4
+46/243281/campos_512_v4
+46/243282/campos_512_v4
+46/243285/campos_512_v4
+46/243333/campos_512_v4
+46/243345/campos_512_v4
+46/243352/campos_512_v4
+46/243357/campos_512_v4
+46/243360/campos_512_v4
+46/243364/campos_512_v4
+46/243366/campos_512_v4
+46/243373/campos_512_v4
+46/243375/campos_512_v4
+46/243385/campos_512_v4
+46/243392/campos_512_v4
+46/243407/campos_512_v4
+46/243412/campos_512_v4
+46/243422/campos_512_v4
+46/243423/campos_512_v4
+46/243427/campos_512_v4
+46/243430/campos_512_v4
+46/243440/campos_512_v4
+46/243453/campos_512_v4
+46/243454/campos_512_v4
+46/243459/campos_512_v4
+46/243469/campos_512_v4
+46/243473/campos_512_v4
+46/243483/campos_512_v4
+46/243490/campos_512_v4
+46/243497/campos_512_v4
+46/243508/campos_512_v4
+46/243510/campos_512_v4
+46/243522/campos_512_v4
+46/243528/campos_512_v4
+46/243529/campos_512_v4
+46/243530/campos_512_v4
+46/243534/campos_512_v4
+46/243543/campos_512_v4
+46/243551/campos_512_v4
+46/243557/campos_512_v4
+46/243568/campos_512_v4
+46/243578/campos_512_v4
+46/243613/campos_512_v4
+46/243618/campos_512_v4
+46/243620/campos_512_v4
+46/243631/campos_512_v4
+46/243641/campos_512_v4
+46/243648/campos_512_v4
+46/243649/campos_512_v4
+46/243652/campos_512_v4
+46/243653/campos_512_v4
+46/243654/campos_512_v4
+46/243659/campos_512_v4
+46/243663/campos_512_v4
+46/243673/campos_512_v4
+46/243685/campos_512_v4
+46/243693/campos_512_v4
+46/243699/campos_512_v4
+46/243704/campos_512_v4
+46/243710/campos_512_v4
+46/243721/campos_512_v4
+46/243727/campos_512_v4
+46/243732/campos_512_v4
+46/243735/campos_512_v4
+46/243748/campos_512_v4
+46/243750/campos_512_v4
+46/243755/campos_512_v4
+46/243757/campos_512_v4
+46/243773/campos_512_v4
+46/243779/campos_512_v4
+46/243782/campos_512_v4
+46/243792/campos_512_v4
+46/243839/campos_512_v4
+46/244028/campos_512_v4
+46/244037/campos_512_v4
+46/244052/campos_512_v4
+46/244062/campos_512_v4
+46/244078/campos_512_v4
+46/244080/campos_512_v4
+46/244100/campos_512_v4
+46/244108/campos_512_v4
+46/244124/campos_512_v4
+46/244125/campos_512_v4
+46/244129/campos_512_v4
+46/244138/campos_512_v4
+46/244141/campos_512_v4
+46/244170/campos_512_v4
+46/244172/campos_512_v4
+46/244180/campos_512_v4
+46/244194/campos_512_v4
+46/244209/campos_512_v4
+46/244212/campos_512_v4
+46/244222/campos_512_v4
+46/244225/campos_512_v4
+46/244233/campos_512_v4
+46/244238/campos_512_v4
+46/244244/campos_512_v4
+46/244250/campos_512_v4
+46/244265/campos_512_v4
+46/244267/campos_512_v4
+46/244270/campos_512_v4
+46/244271/campos_512_v4
+46/244283/campos_512_v4
+46/244296/campos_512_v4
+46/244303/campos_512_v4
+46/244308/campos_512_v4
+46/244324/campos_512_v4
+46/244335/campos_512_v4
+46/244353/campos_512_v4
+46/244369/campos_512_v4
+46/244397/campos_512_v4
+46/244408/campos_512_v4
+46/244410/campos_512_v4
+46/244411/campos_512_v4
+46/244441/campos_512_v4
+46/244444/campos_512_v4
+46/244450/campos_512_v4
+46/244451/campos_512_v4
+46/244467/campos_512_v4
+46/244483/campos_512_v4
+46/244504/campos_512_v4
+46/244511/campos_512_v4
+46/244517/campos_512_v4
+46/244530/campos_512_v4
+46/244546/campos_512_v4
+46/244554/campos_512_v4
+46/244556/campos_512_v4
+46/244557/campos_512_v4
+46/244559/campos_512_v4
+46/244562/campos_512_v4
+46/244585/campos_512_v4
+46/244594/campos_512_v4
+46/244642/campos_512_v4
+46/244653/campos_512_v4
+46/244665/campos_512_v4
+46/244680/campos_512_v4
+46/244698/campos_512_v4
+46/244702/campos_512_v4
+46/244713/campos_512_v4
+46/244715/campos_512_v4
+46/244741/campos_512_v4
+46/244752/campos_512_v4
+46/244757/campos_512_v4
+46/244772/campos_512_v4
+46/244775/campos_512_v4
+46/244777/campos_512_v4
+46/244782/campos_512_v4
+46/244784/campos_512_v4
+46/244795/campos_512_v4
+46/244798/campos_512_v4
+46/244811/campos_512_v4
+46/244838/campos_512_v4
+46/244839/campos_512_v4
+46/244848/campos_512_v4
+46/244851/campos_512_v4
+46/244875/campos_512_v4
+46/244892/campos_512_v4
+46/244898/campos_512_v4
+46/244900/campos_512_v4
+46/244906/campos_512_v4
+46/244913/campos_512_v4
+46/244916/campos_512_v4
+46/244929/campos_512_v4
+46/244942/campos_512_v4
+46/244943/campos_512_v4
+46/244945/campos_512_v4
+46/244947/campos_512_v4
+46/244948/campos_512_v4
+46/244971/campos_512_v4
+46/244973/campos_512_v4
+46/244976/campos_512_v4
+46/244979/campos_512_v4
+46/244987/campos_512_v4
+46/244992/campos_512_v4
+47/245009/campos_512_v4
+47/245023/campos_512_v4
+47/245030/campos_512_v4
+47/245038/campos_512_v4
+47/245041/campos_512_v4
+47/245046/campos_512_v4
+47/245050/campos_512_v4
+47/245054/campos_512_v4
+47/245073/campos_512_v4
+47/245086/campos_512_v4
+47/245090/campos_512_v4
+47/245095/campos_512_v4
+47/245100/campos_512_v4
+47/245108/campos_512_v4
+47/245110/campos_512_v4
+47/245111/campos_512_v4
+47/245127/campos_512_v4
+47/245132/campos_512_v4
+47/245163/campos_512_v4
+47/245171/campos_512_v4
+47/245173/campos_512_v4
+47/245176/campos_512_v4
+47/245184/campos_512_v4
+47/245185/campos_512_v4
+47/245197/campos_512_v4
+47/245204/campos_512_v4
+47/245213/campos_512_v4
+47/245214/campos_512_v4
+47/245218/campos_512_v4
+47/245229/campos_512_v4
+47/245230/campos_512_v4
+47/245235/campos_512_v4
+47/245239/campos_512_v4
+47/245265/campos_512_v4
+47/245271/campos_512_v4
+47/245273/campos_512_v4
+47/245274/campos_512_v4
+47/245281/campos_512_v4
+47/245283/campos_512_v4
+47/245321/campos_512_v4
+47/245324/campos_512_v4
+47/245332/campos_512_v4
+47/245333/campos_512_v4
+47/245338/campos_512_v4
+47/245346/campos_512_v4
+47/245352/campos_512_v4
+47/245357/campos_512_v4
+47/245362/campos_512_v4
+47/245367/campos_512_v4
+47/245376/campos_512_v4
+47/245377/campos_512_v4
+47/245382/campos_512_v4
+47/245396/campos_512_v4
+47/245398/campos_512_v4
+47/245402/campos_512_v4
+47/245411/campos_512_v4
+47/245435/campos_512_v4
+47/245449/campos_512_v4
+47/245451/campos_512_v4
+47/245466/campos_512_v4
+47/245473/campos_512_v4
+47/245479/campos_512_v4
+47/245483/campos_512_v4
+47/245524/campos_512_v4
+47/245529/campos_512_v4
+47/245530/campos_512_v4
+47/245538/campos_512_v4
+47/245543/campos_512_v4
+47/245547/campos_512_v4
+47/245549/campos_512_v4
+47/245554/campos_512_v4
+47/245559/campos_512_v4
+47/245560/campos_512_v4
+47/245575/campos_512_v4
+47/245584/campos_512_v4
+47/245593/campos_512_v4
+47/245595/campos_512_v4
+47/245605/campos_512_v4
+47/245608/campos_512_v4
+47/245610/campos_512_v4
+47/245612/campos_512_v4
+47/245635/campos_512_v4
+47/245655/campos_512_v4
+47/245661/campos_512_v4
+47/245674/campos_512_v4
+47/245688/campos_512_v4
+47/245689/campos_512_v4
+47/245691/campos_512_v4
+47/245692/campos_512_v4
+47/245694/campos_512_v4
+47/245695/campos_512_v4
+47/245706/campos_512_v4
+47/245707/campos_512_v4
+47/245738/campos_512_v4
+47/245742/campos_512_v4
+47/245749/campos_512_v4
+47/245757/campos_512_v4
+47/245759/campos_512_v4
+47/245772/campos_512_v4
+47/245774/campos_512_v4
+47/245780/campos_512_v4
+47/245791/campos_512_v4
+47/245800/campos_512_v4
+47/245801/campos_512_v4
+47/245807/campos_512_v4
+47/245816/campos_512_v4
+47/245828/campos_512_v4
+47/245830/campos_512_v4
+47/245839/campos_512_v4
+47/245849/campos_512_v4
+47/245850/campos_512_v4
+47/245851/campos_512_v4
+47/245888/campos_512_v4
+47/245892/campos_512_v4
+47/245895/campos_512_v4
+47/245896/campos_512_v4
+47/245908/campos_512_v4
+47/245928/campos_512_v4
+47/245929/campos_512_v4
+47/245933/campos_512_v4
+47/245937/campos_512_v4
+47/245957/campos_512_v4
+47/245971/campos_512_v4
+47/245974/campos_512_v4
+47/245984/campos_512_v4
+47/245987/campos_512_v4
+47/245988/campos_512_v4
+47/245993/campos_512_v4
+47/246005/campos_512_v4
+47/246008/campos_512_v4
+47/246010/campos_512_v4
+47/246013/campos_512_v4
+47/246014/campos_512_v4
+47/246015/campos_512_v4
+47/246021/campos_512_v4
+47/246034/campos_512_v4
+47/246035/campos_512_v4
+47/246036/campos_512_v4
+47/246042/campos_512_v4
+47/246049/campos_512_v4
+47/246064/campos_512_v4
+47/246070/campos_512_v4
+47/246074/campos_512_v4
+47/246075/campos_512_v4
+47/246077/campos_512_v4
+47/246078/campos_512_v4
+47/246079/campos_512_v4
+47/246081/campos_512_v4
+47/246091/campos_512_v4
+47/246099/campos_512_v4
+47/246108/campos_512_v4
+47/246112/campos_512_v4
+47/246118/campos_512_v4
+47/246128/campos_512_v4
+47/246140/campos_512_v4
+47/246144/campos_512_v4
+47/246147/campos_512_v4
+47/246158/campos_512_v4
+47/246170/campos_512_v4
+47/246182/campos_512_v4
+47/246183/campos_512_v4
+47/246192/campos_512_v4
+47/246197/campos_512_v4
+47/246208/campos_512_v4
+47/246213/campos_512_v4
+47/246217/campos_512_v4
+47/246240/campos_512_v4
+47/246247/campos_512_v4
+47/246252/campos_512_v4
+47/246258/campos_512_v4
+47/246266/campos_512_v4
+47/246268/campos_512_v4
+47/246276/campos_512_v4
+47/246278/campos_512_v4
+47/246286/campos_512_v4
+47/246297/campos_512_v4
+47/246313/campos_512_v4
+47/246318/campos_512_v4
+47/246322/campos_512_v4
+47/246333/campos_512_v4
+47/246338/campos_512_v4
+47/246339/campos_512_v4
+47/246340/campos_512_v4
+47/246345/campos_512_v4
+47/246363/campos_512_v4
+47/246378/campos_512_v4
+47/246387/campos_512_v4
+47/246396/campos_512_v4
+47/246404/campos_512_v4
+47/246408/campos_512_v4
+47/246421/campos_512_v4
+47/246423/campos_512_v4
+47/246427/campos_512_v4
+47/246437/campos_512_v4
+47/246451/campos_512_v4
+47/246452/campos_512_v4
+47/246457/campos_512_v4
+47/246458/campos_512_v4
+47/246463/campos_512_v4
+47/246474/campos_512_v4
+47/246485/campos_512_v4
+47/246489/campos_512_v4
+47/246493/campos_512_v4
+47/246496/campos_512_v4
+47/246500/campos_512_v4
+47/246502/campos_512_v4
+47/246505/campos_512_v4
+47/246509/campos_512_v4
+47/246515/campos_512_v4
+47/246536/campos_512_v4
+47/246557/campos_512_v4
+47/246560/campos_512_v4
+47/246564/campos_512_v4
+47/246592/campos_512_v4
+47/246600/campos_512_v4
+47/246617/campos_512_v4
+47/246618/campos_512_v4
+47/246643/campos_512_v4
+47/246647/campos_512_v4
+47/246652/campos_512_v4
+47/246655/campos_512_v4
+47/246660/campos_512_v4
+47/246663/campos_512_v4
+47/246665/campos_512_v4
+47/246673/campos_512_v4
+47/246682/campos_512_v4
+47/246689/campos_512_v4
+47/246693/campos_512_v4
+47/246716/campos_512_v4
+47/246734/campos_512_v4
+47/246743/campos_512_v4
+47/246758/campos_512_v4
+47/246773/campos_512_v4
+47/246781/campos_512_v4
+47/246801/campos_512_v4
+47/246807/campos_512_v4
+47/246809/campos_512_v4
+47/246814/campos_512_v4
+47/246815/campos_512_v4
+47/246817/campos_512_v4
+47/246826/campos_512_v4
+47/246830/campos_512_v4
+47/246843/campos_512_v4
+47/246853/campos_512_v4
+47/246857/campos_512_v4
+47/246858/campos_512_v4
+47/246866/campos_512_v4
+47/246873/campos_512_v4
+47/246890/campos_512_v4
+47/246908/campos_512_v4
+47/246918/campos_512_v4
+47/246934/campos_512_v4
+47/246935/campos_512_v4
+47/246952/campos_512_v4
+47/246953/campos_512_v4
+47/246957/campos_512_v4
+47/246974/campos_512_v4
+47/246979/campos_512_v4
+47/246988/campos_512_v4
+47/246989/campos_512_v4
+47/246993/campos_512_v4
+47/247010/campos_512_v4
+47/247016/campos_512_v4
+47/247029/campos_512_v4
+47/247033/campos_512_v4
+47/247050/campos_512_v4
+47/247051/campos_512_v4
+47/247052/campos_512_v4
+47/247054/campos_512_v4
+47/247077/campos_512_v4
+47/247083/campos_512_v4
+47/247095/campos_512_v4
+47/247097/campos_512_v4
+47/247105/campos_512_v4
+47/247111/campos_512_v4
+47/247116/campos_512_v4
+47/247145/campos_512_v4
+47/247150/campos_512_v4
+47/247155/campos_512_v4
+47/247166/campos_512_v4
+47/247173/campos_512_v4
+47/247180/campos_512_v4
+47/247189/campos_512_v4
+47/247195/campos_512_v4
+47/247199/campos_512_v4
+47/247201/campos_512_v4
+47/247207/campos_512_v4
+47/247208/campos_512_v4
+47/247209/campos_512_v4
+47/247218/campos_512_v4
+47/247234/campos_512_v4
+47/247247/campos_512_v4
+47/247251/campos_512_v4
+47/247260/campos_512_v4
+47/247279/campos_512_v4
+47/247280/campos_512_v4
+47/247288/campos_512_v4
+47/247297/campos_512_v4
+47/247303/campos_512_v4
+47/247304/campos_512_v4
+47/247313/campos_512_v4
+47/247318/campos_512_v4
+47/247336/campos_512_v4
+47/247342/campos_512_v4
+47/247343/campos_512_v4
+47/247344/campos_512_v4
+47/247349/campos_512_v4
+47/247354/campos_512_v4
+47/247360/campos_512_v4
+47/247398/campos_512_v4
+47/247408/campos_512_v4
+47/247409/campos_512_v4
+47/247410/campos_512_v4
+47/247416/campos_512_v4
+47/247422/campos_512_v4
+47/247425/campos_512_v4
+47/247426/campos_512_v4
+47/247430/campos_512_v4
+47/247431/campos_512_v4
+47/247433/campos_512_v4
+47/247449/campos_512_v4
+47/247457/campos_512_v4
+47/247460/campos_512_v4
+47/247467/campos_512_v4
+47/247473/campos_512_v4
+47/247475/campos_512_v4
+47/247478/campos_512_v4
+47/247520/campos_512_v4
+47/247523/campos_512_v4
+47/247532/campos_512_v4
+47/247541/campos_512_v4
+47/247551/campos_512_v4
+47/247561/campos_512_v4
+47/247562/campos_512_v4
+47/247580/campos_512_v4
+47/247581/campos_512_v4
+47/247605/campos_512_v4
+47/247614/campos_512_v4
+47/247643/campos_512_v4
+47/247645/campos_512_v4
+47/247651/campos_512_v4
+47/247661/campos_512_v4
+47/247676/campos_512_v4
+47/247679/campos_512_v4
+47/247685/campos_512_v4
+47/247688/campos_512_v4
+47/247689/campos_512_v4
+47/247690/campos_512_v4
+47/247694/campos_512_v4
+47/247701/campos_512_v4
+47/247702/campos_512_v4
+47/247706/campos_512_v4
+47/247709/campos_512_v4
+47/247714/campos_512_v4
+47/247721/campos_512_v4
+47/247728/campos_512_v4
+47/247748/campos_512_v4
+47/247752/campos_512_v4
+47/247754/campos_512_v4
+47/247757/campos_512_v4
+47/247762/campos_512_v4
+47/247769/campos_512_v4
+47/247781/campos_512_v4
+47/247787/campos_512_v4
+47/247789/campos_512_v4
+47/247791/campos_512_v4
+47/247794/campos_512_v4
+47/247807/campos_512_v4
+47/247810/campos_512_v4
+47/247812/campos_512_v4
+47/247819/campos_512_v4
+47/247832/campos_512_v4
+47/247837/campos_512_v4
+47/247852/campos_512_v4
+47/247853/campos_512_v4
+47/247855/campos_512_v4
+47/247857/campos_512_v4
+47/247861/campos_512_v4
+47/247863/campos_512_v4
+47/247866/campos_512_v4
+47/247868/campos_512_v4
+47/247870/campos_512_v4
+47/247873/campos_512_v4
+47/247877/campos_512_v4
+47/247881/campos_512_v4
+47/247886/campos_512_v4
+47/247902/campos_512_v4
+47/247903/campos_512_v4
+47/247917/campos_512_v4
+47/247921/campos_512_v4
+47/247931/campos_512_v4
+47/247938/campos_512_v4
+47/247945/campos_512_v4
+47/247954/campos_512_v4
+47/247965/campos_512_v4
+47/247967/campos_512_v4
+47/247978/campos_512_v4
+47/247984/campos_512_v4
+47/247988/campos_512_v4
+47/248004/campos_512_v4
+47/248006/campos_512_v4
+47/248022/campos_512_v4
+47/248031/campos_512_v4
+47/248035/campos_512_v4
+47/248036/campos_512_v4
+47/248043/campos_512_v4
+47/248046/campos_512_v4
+47/248060/campos_512_v4
+47/248065/campos_512_v4
+47/248066/campos_512_v4
+47/248071/campos_512_v4
+47/248079/campos_512_v4
+47/248097/campos_512_v4
+47/248119/campos_512_v4
+47/248127/campos_512_v4
+47/248134/campos_512_v4
+47/248141/campos_512_v4
+47/248152/campos_512_v4
+47/248159/campos_512_v4
+47/248161/campos_512_v4
+47/248170/campos_512_v4
+47/248173/campos_512_v4
+47/248197/campos_512_v4
+47/248217/campos_512_v4
+47/248227/campos_512_v4
+47/248239/campos_512_v4
+47/248261/campos_512_v4
+47/248267/campos_512_v4
+47/248283/campos_512_v4
+47/248285/campos_512_v4
+47/248288/campos_512_v4
+47/248297/campos_512_v4
+47/248314/campos_512_v4
+47/248320/campos_512_v4
+47/248326/campos_512_v4
+47/248330/campos_512_v4
+47/248331/campos_512_v4
+47/248332/campos_512_v4
+47/248338/campos_512_v4
+47/248341/campos_512_v4
+47/248369/campos_512_v4
+47/248371/campos_512_v4
+47/248372/campos_512_v4
+47/248419/campos_512_v4
+47/248426/campos_512_v4
+47/248429/campos_512_v4
+47/248431/campos_512_v4
+47/248434/campos_512_v4
+47/248458/campos_512_v4
+47/248476/campos_512_v4
+47/248479/campos_512_v4
+47/248498/campos_512_v4
+47/248504/campos_512_v4
+47/248505/campos_512_v4
+47/248507/campos_512_v4
+47/248515/campos_512_v4
+47/248526/campos_512_v4
+47/248533/campos_512_v4
+47/248538/campos_512_v4
+47/248541/campos_512_v4
+47/248543/campos_512_v4
+47/248546/campos_512_v4
+47/248548/campos_512_v4
+47/248549/campos_512_v4
+47/248555/campos_512_v4
+47/248574/campos_512_v4
+47/248576/campos_512_v4
+47/248585/campos_512_v4
+47/248590/campos_512_v4
+47/248621/campos_512_v4
+47/248624/campos_512_v4
+47/248630/campos_512_v4
+47/248662/campos_512_v4
+47/248698/campos_512_v4
+47/248699/campos_512_v4
+47/248713/campos_512_v4
+47/248722/campos_512_v4
+47/248750/campos_512_v4
+47/248769/campos_512_v4
+47/248799/campos_512_v4
+47/248803/campos_512_v4
+47/248812/campos_512_v4
+47/248829/campos_512_v4
+47/248831/campos_512_v4
+47/248833/campos_512_v4
+47/248838/campos_512_v4
+47/248842/campos_512_v4
+47/248844/campos_512_v4
+47/248849/campos_512_v4
+47/248851/campos_512_v4
+47/248854/campos_512_v4
+47/248857/campos_512_v4
+47/248861/campos_512_v4
+47/248898/campos_512_v4
+47/248921/campos_512_v4
+47/248932/campos_512_v4
+47/248934/campos_512_v4
+47/248939/campos_512_v4
+47/248955/campos_512_v4
+47/248960/campos_512_v4
+47/248962/campos_512_v4
+47/248971/campos_512_v4
+47/248994/campos_512_v4
+47/249002/campos_512_v4
+47/249004/campos_512_v4
+47/249016/campos_512_v4
+47/249024/campos_512_v4
+47/249027/campos_512_v4
+47/249037/campos_512_v4
+47/249040/campos_512_v4
+47/249058/campos_512_v4
+47/249066/campos_512_v4
+47/249080/campos_512_v4
+47/249087/campos_512_v4
+47/249089/campos_512_v4
+47/249103/campos_512_v4
+47/249122/campos_512_v4
+47/249123/campos_512_v4
+47/249135/campos_512_v4
+47/249143/campos_512_v4
+47/249145/campos_512_v4
+47/249154/campos_512_v4
+47/249157/campos_512_v4
+47/249159/campos_512_v4
+47/249163/campos_512_v4
+47/249176/campos_512_v4
+47/249179/campos_512_v4
+47/249197/campos_512_v4
+47/249208/campos_512_v4
+47/249210/campos_512_v4
+47/249217/campos_512_v4
+47/249229/campos_512_v4
+47/249232/campos_512_v4
+47/249250/campos_512_v4
+47/249253/campos_512_v4
+47/249256/campos_512_v4
+47/249258/campos_512_v4
+47/249268/campos_512_v4
+47/249270/campos_512_v4
+47/249273/campos_512_v4
+47/249291/campos_512_v4
+47/249298/campos_512_v4
+47/249305/campos_512_v4
+47/249312/campos_512_v4
+47/249324/campos_512_v4
+47/249328/campos_512_v4
+47/249353/campos_512_v4
+47/249356/campos_512_v4
+47/249363/campos_512_v4
+47/249368/campos_512_v4
+47/249371/campos_512_v4
+47/249389/campos_512_v4
+47/249401/campos_512_v4
+47/249411/campos_512_v4
+47/249414/campos_512_v4
+47/249416/campos_512_v4
+47/249418/campos_512_v4
+47/249424/campos_512_v4
+47/249450/campos_512_v4
+47/249451/campos_512_v4
+47/249462/campos_512_v4
+47/249483/campos_512_v4
+47/249486/campos_512_v4
+47/249489/campos_512_v4
+47/249490/campos_512_v4
+47/249494/campos_512_v4
+47/249517/campos_512_v4
+47/249530/campos_512_v4
+47/249531/campos_512_v4
+47/249534/campos_512_v4
+47/249548/campos_512_v4
+47/249549/campos_512_v4
+47/249554/campos_512_v4
+47/249561/campos_512_v4
+47/249565/campos_512_v4
+47/249567/campos_512_v4
+47/249568/campos_512_v4
+47/249583/campos_512_v4
+47/249584/campos_512_v4
+47/249592/campos_512_v4
+47/249600/campos_512_v4
+47/249606/campos_512_v4
+47/249614/campos_512_v4
+47/249621/campos_512_v4
+47/249623/campos_512_v4
+47/249626/campos_512_v4
+47/249642/campos_512_v4
+47/249643/campos_512_v4
+47/249649/campos_512_v4
+47/249666/campos_512_v4
+47/249671/campos_512_v4
+47/249676/campos_512_v4
+47/249693/campos_512_v4
+47/249695/campos_512_v4
+47/249696/campos_512_v4
+47/249699/campos_512_v4
+47/249709/campos_512_v4
+47/249713/campos_512_v4
+47/249726/campos_512_v4
+47/249731/campos_512_v4
+47/249732/campos_512_v4
+47/249755/campos_512_v4
+47/249770/campos_512_v4
+47/249779/campos_512_v4
+47/249784/campos_512_v4
+47/249785/campos_512_v4
+47/249825/campos_512_v4
+47/249826/campos_512_v4
+47/249841/campos_512_v4
+47/249847/campos_512_v4
+47/249848/campos_512_v4
+47/249858/campos_512_v4
+47/249871/campos_512_v4
+47/249912/campos_512_v4
+47/249913/campos_512_v4
+47/249931/campos_512_v4
+47/249945/campos_512_v4
+47/249947/campos_512_v4
+47/249957/campos_512_v4
+47/249961/campos_512_v4
+47/249981/campos_512_v4
+47/249982/campos_512_v4
+47/249997/campos_512_v4
+47/249999/campos_512_v4
+48/250005/campos_512_v4
+48/250011/campos_512_v4
+48/250022/campos_512_v4
+48/250024/campos_512_v4
+48/250025/campos_512_v4
+48/250055/campos_512_v4
+48/250058/campos_512_v4
+48/250066/campos_512_v4
+48/250069/campos_512_v4
+48/250084/campos_512_v4
+48/250086/campos_512_v4
+48/250088/campos_512_v4
+48/250089/campos_512_v4
+48/250090/campos_512_v4
+48/250101/campos_512_v4
+48/250119/campos_512_v4
+48/250134/campos_512_v4
+48/250142/campos_512_v4
+48/250153/campos_512_v4
+48/250159/campos_512_v4
+48/250160/campos_512_v4
+48/250170/campos_512_v4
+48/250172/campos_512_v4
+48/250200/campos_512_v4
+48/250202/campos_512_v4
+48/250220/campos_512_v4
+48/250225/campos_512_v4
+48/250233/campos_512_v4
+48/250234/campos_512_v4
+48/250247/campos_512_v4
+48/250252/campos_512_v4
+48/250254/campos_512_v4
+48/250260/campos_512_v4
+48/250276/campos_512_v4
+48/250279/campos_512_v4
+48/250292/campos_512_v4
+48/250294/campos_512_v4
+48/250296/campos_512_v4
+48/250303/campos_512_v4
+48/250305/campos_512_v4
+48/250307/campos_512_v4
+48/250326/campos_512_v4
+48/250339/campos_512_v4
+48/250340/campos_512_v4
+48/250351/campos_512_v4
+48/250354/campos_512_v4
+48/250357/campos_512_v4
+48/250359/campos_512_v4
+48/250369/campos_512_v4
+48/250383/campos_512_v4
+48/250385/campos_512_v4
+48/250392/campos_512_v4
+48/250395/campos_512_v4
+48/250398/campos_512_v4
+48/250406/campos_512_v4
+48/250413/campos_512_v4
+48/250416/campos_512_v4
+48/250427/campos_512_v4
+48/250447/campos_512_v4
+48/250458/campos_512_v4
+48/250479/campos_512_v4
+48/250484/campos_512_v4
+48/250485/campos_512_v4
+48/250488/campos_512_v4
+48/250502/campos_512_v4
+48/250506/campos_512_v4
+48/250512/campos_512_v4
+48/250515/campos_512_v4
+48/250520/campos_512_v4
+48/250521/campos_512_v4
+48/250530/campos_512_v4
+48/250531/campos_512_v4
+48/250537/campos_512_v4
+48/250540/campos_512_v4
+48/250542/campos_512_v4
+48/250562/campos_512_v4
+48/250573/campos_512_v4
+48/250579/campos_512_v4
+48/250593/campos_512_v4
+48/250600/campos_512_v4
+48/250614/campos_512_v4
+48/250626/campos_512_v4
+48/250658/campos_512_v4
+48/250671/campos_512_v4
+48/250678/campos_512_v4
+48/250684/campos_512_v4
+48/250700/campos_512_v4
+48/250718/campos_512_v4
+48/250770/campos_512_v4
+48/250777/campos_512_v4
+48/250798/campos_512_v4
+48/250814/campos_512_v4
+48/250822/campos_512_v4
+48/250825/campos_512_v4
+48/250830/campos_512_v4
+48/250850/campos_512_v4
+48/250852/campos_512_v4
+48/250865/campos_512_v4
+48/250871/campos_512_v4
+48/250886/campos_512_v4
+48/250888/campos_512_v4
+48/250896/campos_512_v4
+48/250897/campos_512_v4
+48/250902/campos_512_v4
+48/250914/campos_512_v4
+48/250929/campos_512_v4
+48/250932/campos_512_v4
+48/250944/campos_512_v4
+48/250948/campos_512_v4
+48/250952/campos_512_v4
+48/250954/campos_512_v4
+48/250956/campos_512_v4
+48/250965/campos_512_v4
+48/250968/campos_512_v4
+48/250976/campos_512_v4
+48/250977/campos_512_v4
+48/250979/campos_512_v4
+48/250994/campos_512_v4
+48/251001/campos_512_v4
+48/251004/campos_512_v4
+48/251010/campos_512_v4
+48/251021/campos_512_v4
+48/251038/campos_512_v4
+48/251060/campos_512_v4
+48/251072/campos_512_v4
+48/251080/campos_512_v4
+48/251082/campos_512_v4
+48/251095/campos_512_v4
+48/251102/campos_512_v4
+48/251103/campos_512_v4
+48/251118/campos_512_v4
+48/251158/campos_512_v4
+48/251160/campos_512_v4
+48/251164/campos_512_v4
+48/251165/campos_512_v4
+48/251168/campos_512_v4
+48/251169/campos_512_v4
+48/251175/campos_512_v4
+48/251219/campos_512_v4
+48/251232/campos_512_v4
+48/251240/campos_512_v4
+48/251241/campos_512_v4
+48/251248/campos_512_v4
+48/251278/campos_512_v4
+48/251284/campos_512_v4
+48/251299/campos_512_v4
+48/251303/campos_512_v4
+48/251312/campos_512_v4
+48/251315/campos_512_v4
+48/251341/campos_512_v4
+48/251345/campos_512_v4
+48/251349/campos_512_v4
+48/251353/campos_512_v4
+48/251354/campos_512_v4
+48/251366/campos_512_v4
+48/251375/campos_512_v4
+48/251394/campos_512_v4
+48/251410/campos_512_v4
+48/251424/campos_512_v4
+48/251427/campos_512_v4
+48/251428/campos_512_v4
+48/251433/campos_512_v4
+48/251439/campos_512_v4
+48/251449/campos_512_v4
+48/251493/campos_512_v4
+48/251501/campos_512_v4
+48/251504/campos_512_v4
+48/251505/campos_512_v4
+48/251511/campos_512_v4
+48/251518/campos_512_v4
+48/251527/campos_512_v4
+48/251529/campos_512_v4
+48/251544/campos_512_v4
+48/251559/campos_512_v4
+48/251562/campos_512_v4
+48/251570/campos_512_v4
+48/251574/campos_512_v4
+48/251576/campos_512_v4
+48/251587/campos_512_v4
+48/251593/campos_512_v4
+48/251595/campos_512_v4
+48/251615/campos_512_v4
+48/251619/campos_512_v4
+48/251622/campos_512_v4
+48/251626/campos_512_v4
+48/251628/campos_512_v4
+48/251639/campos_512_v4
+48/251641/campos_512_v4
+48/251667/campos_512_v4
+48/251670/campos_512_v4
+48/251683/campos_512_v4
+48/251686/campos_512_v4
+48/251690/campos_512_v4
+48/251691/campos_512_v4
+48/251701/campos_512_v4
+48/251722/campos_512_v4
+48/251734/campos_512_v4
+48/251736/campos_512_v4
+48/251739/campos_512_v4
+48/251750/campos_512_v4
+48/251752/campos_512_v4
+48/251774/campos_512_v4
+48/251784/campos_512_v4
+48/251794/campos_512_v4
+48/251796/campos_512_v4
+48/251797/campos_512_v4
+48/251799/campos_512_v4
+48/251851/campos_512_v4
+48/251852/campos_512_v4
+48/251855/campos_512_v4
+48/251867/campos_512_v4
+48/251870/campos_512_v4
+48/251876/campos_512_v4
+48/251886/campos_512_v4
+48/251889/campos_512_v4
+48/251890/campos_512_v4
+48/251892/campos_512_v4
+48/251901/campos_512_v4
+48/251902/campos_512_v4
+48/251913/campos_512_v4
+48/251914/campos_512_v4
+48/251931/campos_512_v4
+48/251935/campos_512_v4
+48/251942/campos_512_v4
+48/251943/campos_512_v4
+48/251949/campos_512_v4
+48/251967/campos_512_v4
+48/251981/campos_512_v4
+48/251985/campos_512_v4
+48/251991/campos_512_v4
+48/252002/campos_512_v4
+48/252006/campos_512_v4
+48/252007/campos_512_v4
+48/252009/campos_512_v4
+48/252010/campos_512_v4
+48/252014/campos_512_v4
+48/252022/campos_512_v4
+48/252024/campos_512_v4
+48/252034/campos_512_v4
+48/252036/campos_512_v4
+48/252039/campos_512_v4
+48/252048/campos_512_v4
+48/252059/campos_512_v4
+48/252089/campos_512_v4
+48/252090/campos_512_v4
+48/252093/campos_512_v4
+48/252095/campos_512_v4
+48/252099/campos_512_v4
+48/252104/campos_512_v4
+48/252119/campos_512_v4
+48/252134/campos_512_v4
+48/252135/campos_512_v4
+48/252142/campos_512_v4
+48/252149/campos_512_v4
+48/252162/campos_512_v4
+48/252179/campos_512_v4
+48/252184/campos_512_v4
+48/252190/campos_512_v4
+48/252205/campos_512_v4
+48/252215/campos_512_v4
+48/252219/campos_512_v4
+48/252228/campos_512_v4
+48/252229/campos_512_v4
+48/252237/campos_512_v4
+48/252239/campos_512_v4
+48/252249/campos_512_v4
+48/252256/campos_512_v4
+48/252262/campos_512_v4
+48/252286/campos_512_v4
+48/252292/campos_512_v4
+48/252303/campos_512_v4
+48/252312/campos_512_v4
+48/252313/campos_512_v4
+48/252319/campos_512_v4
+48/252322/campos_512_v4
+48/252331/campos_512_v4
+48/252337/campos_512_v4
+48/252365/campos_512_v4
+48/252372/campos_512_v4
+48/252377/campos_512_v4
+48/252378/campos_512_v4
+48/252380/campos_512_v4
+48/252382/campos_512_v4
+48/252397/campos_512_v4
+48/252398/campos_512_v4
+48/252401/campos_512_v4
+48/252412/campos_512_v4
+48/252417/campos_512_v4
+48/252419/campos_512_v4
+48/252462/campos_512_v4
+48/252464/campos_512_v4
+48/252479/campos_512_v4
+48/252480/campos_512_v4
+48/252485/campos_512_v4
+48/252489/campos_512_v4
+48/252491/campos_512_v4
+48/252496/campos_512_v4
+48/252503/campos_512_v4
+48/252504/campos_512_v4
+48/252508/campos_512_v4
+48/252515/campos_512_v4
+48/252525/campos_512_v4
+48/252526/campos_512_v4
+48/252528/campos_512_v4
+48/252532/campos_512_v4
+48/252535/campos_512_v4
+48/252536/campos_512_v4
+48/252537/campos_512_v4
+48/252538/campos_512_v4
+48/252546/campos_512_v4
+48/252549/campos_512_v4
+48/252550/campos_512_v4
+48/252556/campos_512_v4
+48/252564/campos_512_v4
+48/252573/campos_512_v4
+48/252592/campos_512_v4
+48/252596/campos_512_v4
+48/252601/campos_512_v4
+48/252606/campos_512_v4
+48/252627/campos_512_v4
+48/252628/campos_512_v4
+48/252630/campos_512_v4
+48/252638/campos_512_v4
+48/252644/campos_512_v4
+48/252645/campos_512_v4
+48/252657/campos_512_v4
+48/252663/campos_512_v4
+48/252665/campos_512_v4
+48/252686/campos_512_v4
+48/252705/campos_512_v4
+48/252712/campos_512_v4
+48/252713/campos_512_v4
+48/252715/campos_512_v4
+48/252719/campos_512_v4
+48/252720/campos_512_v4
+48/252723/campos_512_v4
+48/252733/campos_512_v4
+48/252740/campos_512_v4
+48/252749/campos_512_v4
+48/252751/campos_512_v4
+48/252753/campos_512_v4
+48/252773/campos_512_v4
+48/252776/campos_512_v4
+48/252779/campos_512_v4
+48/252811/campos_512_v4
+48/252823/campos_512_v4
+48/252825/campos_512_v4
+48/252832/campos_512_v4
+48/252838/campos_512_v4
+48/252861/campos_512_v4
+48/252862/campos_512_v4
+48/252881/campos_512_v4
+48/252892/campos_512_v4
+48/252894/campos_512_v4
+48/252907/campos_512_v4
+48/252910/campos_512_v4
+48/252912/campos_512_v4
+48/252920/campos_512_v4
+48/252927/campos_512_v4
+48/252929/campos_512_v4
+48/252936/campos_512_v4
+48/252946/campos_512_v4
+48/252950/campos_512_v4
+48/252959/campos_512_v4
+48/252965/campos_512_v4
+48/252975/campos_512_v4
+48/252979/campos_512_v4
+48/252980/campos_512_v4
+48/252989/campos_512_v4
+48/253003/campos_512_v4
+48/253006/campos_512_v4
+48/253011/campos_512_v4
+48/253014/campos_512_v4
+48/253022/campos_512_v4
+48/253028/campos_512_v4
+48/253036/campos_512_v4
+48/253051/campos_512_v4
+48/253054/campos_512_v4
+48/253064/campos_512_v4
+48/253066/campos_512_v4
+48/253069/campos_512_v4
+48/253070/campos_512_v4
+48/253073/campos_512_v4
+48/253082/campos_512_v4
+48/253092/campos_512_v4
+48/253103/campos_512_v4
+48/253104/campos_512_v4
+48/253105/campos_512_v4
+48/253121/campos_512_v4
+48/253126/campos_512_v4
+48/253129/campos_512_v4
+48/253148/campos_512_v4
+48/253156/campos_512_v4
+48/253157/campos_512_v4
+48/253163/campos_512_v4
+48/253177/campos_512_v4
+48/253204/campos_512_v4
+48/253209/campos_512_v4
+48/253214/campos_512_v4
+48/253227/campos_512_v4
+48/253228/campos_512_v4
+48/253232/campos_512_v4
+48/253236/campos_512_v4
+48/253247/campos_512_v4
+48/253264/campos_512_v4
+48/253268/campos_512_v4
+48/253289/campos_512_v4
+48/253293/campos_512_v4
+48/253304/campos_512_v4
+48/253308/campos_512_v4
+48/253322/campos_512_v4
+48/253328/campos_512_v4
+48/253350/campos_512_v4
+48/253353/campos_512_v4
+48/253354/campos_512_v4
+48/253358/campos_512_v4
+48/253364/campos_512_v4
+48/253372/campos_512_v4
+48/253376/campos_512_v4
+48/253381/campos_512_v4
+48/253390/campos_512_v4
+48/253398/campos_512_v4
+48/253405/campos_512_v4
+48/253416/campos_512_v4
+48/253435/campos_512_v4
+48/253437/campos_512_v4
+48/253438/campos_512_v4
+48/253443/campos_512_v4
+48/253446/campos_512_v4
+48/253447/campos_512_v4
+48/253452/campos_512_v4
+48/253458/campos_512_v4
+48/253467/campos_512_v4
+48/253484/campos_512_v4
+48/253488/campos_512_v4
+48/253504/campos_512_v4
+48/253506/campos_512_v4
+48/253508/campos_512_v4
+48/253509/campos_512_v4
+48/253514/campos_512_v4
+48/253542/campos_512_v4
+48/253547/campos_512_v4
+48/253551/campos_512_v4
+48/253553/campos_512_v4
+48/253576/campos_512_v4
+48/253579/campos_512_v4
+48/253588/campos_512_v4
+48/253593/campos_512_v4
+48/253594/campos_512_v4
+48/253600/campos_512_v4
+48/253627/campos_512_v4
+48/253634/campos_512_v4
+48/253638/campos_512_v4
+48/253652/campos_512_v4
+48/253663/campos_512_v4
+48/253676/campos_512_v4
+48/253694/campos_512_v4
+48/253707/campos_512_v4
+48/253708/campos_512_v4
+48/253720/campos_512_v4
+48/253740/campos_512_v4
+48/253751/campos_512_v4
+48/253771/campos_512_v4
+48/253773/campos_512_v4
+48/253777/campos_512_v4
+48/253782/campos_512_v4
+48/253785/campos_512_v4
+48/253798/campos_512_v4
+48/253810/campos_512_v4
+48/253820/campos_512_v4
+48/253827/campos_512_v4
+48/253836/campos_512_v4
+48/253837/campos_512_v4
+48/253838/campos_512_v4
+48/253841/campos_512_v4
+48/253848/campos_512_v4
+48/253851/campos_512_v4
+48/253854/campos_512_v4
+48/253885/campos_512_v4
+48/253892/campos_512_v4
+48/253895/campos_512_v4
+48/253906/campos_512_v4
+48/253907/campos_512_v4
+48/253908/campos_512_v4
+48/253915/campos_512_v4
+48/253919/campos_512_v4
+48/253924/campos_512_v4
+48/253934/campos_512_v4
+48/253963/campos_512_v4
+48/253982/campos_512_v4
+48/254008/campos_512_v4
+48/254015/campos_512_v4
+48/254021/campos_512_v4
+48/254030/campos_512_v4
+48/254032/campos_512_v4
+48/254039/campos_512_v4
+48/254048/campos_512_v4
+48/254055/campos_512_v4
+48/254056/campos_512_v4
+48/254079/campos_512_v4
+48/254086/campos_512_v4
+48/254094/campos_512_v4
+48/254095/campos_512_v4
+48/254104/campos_512_v4
+48/254105/campos_512_v4
+48/254107/campos_512_v4
+48/254118/campos_512_v4
+48/254131/campos_512_v4
+48/254134/campos_512_v4
+48/254135/campos_512_v4
+48/254141/campos_512_v4
+48/254148/campos_512_v4
+48/254163/campos_512_v4
+48/254165/campos_512_v4
+48/254170/campos_512_v4
+48/254197/campos_512_v4
+48/254205/campos_512_v4
+48/254208/campos_512_v4
+48/254210/campos_512_v4
+48/254213/campos_512_v4
+48/254230/campos_512_v4
+48/254248/campos_512_v4
+48/254257/campos_512_v4
+48/254269/campos_512_v4
+48/254271/campos_512_v4
+48/254276/campos_512_v4
+48/254279/campos_512_v4
+48/254280/campos_512_v4
+48/254295/campos_512_v4
+48/254305/campos_512_v4
+48/254310/campos_512_v4
+48/254316/campos_512_v4
+48/254320/campos_512_v4
+48/254347/campos_512_v4
+48/254348/campos_512_v4
+48/254375/campos_512_v4
+48/254376/campos_512_v4
+48/254378/campos_512_v4
+48/254380/campos_512_v4
+48/254383/campos_512_v4
+48/254386/campos_512_v4
+48/254406/campos_512_v4
+48/254407/campos_512_v4
+48/254409/campos_512_v4
+48/254410/campos_512_v4
+48/254434/campos_512_v4
+48/254444/campos_512_v4
+48/254452/campos_512_v4
+48/254456/campos_512_v4
+48/254461/campos_512_v4
+48/254465/campos_512_v4
+48/254472/campos_512_v4
+48/254494/campos_512_v4
+48/254510/campos_512_v4
+48/254515/campos_512_v4
+48/254524/campos_512_v4
+48/254530/campos_512_v4
+48/254534/campos_512_v4
+48/254547/campos_512_v4
+48/254550/campos_512_v4
+48/254551/campos_512_v4
+48/254561/campos_512_v4
+48/254565/campos_512_v4
+48/254566/campos_512_v4
+48/254570/campos_512_v4
+48/254588/campos_512_v4
+48/254589/campos_512_v4
+48/254592/campos_512_v4
+48/254601/campos_512_v4
+48/254604/campos_512_v4
+48/254625/campos_512_v4
+48/254636/campos_512_v4
+48/254645/campos_512_v4
+48/254649/campos_512_v4
+48/254653/campos_512_v4
+48/254668/campos_512_v4
+48/254669/campos_512_v4
+48/254671/campos_512_v4
+48/254693/campos_512_v4
+48/254699/campos_512_v4
+48/254700/campos_512_v4
+48/254702/campos_512_v4
+48/254706/campos_512_v4
+48/254710/campos_512_v4
+48/254716/campos_512_v4
+48/254761/campos_512_v4
+48/254767/campos_512_v4
+48/254768/campos_512_v4
+48/254789/campos_512_v4
+48/254790/campos_512_v4
+48/254804/campos_512_v4
+48/254820/campos_512_v4
+48/254833/campos_512_v4
+48/254845/campos_512_v4
+48/254853/campos_512_v4
+48/254856/campos_512_v4
+48/254867/campos_512_v4
+48/254873/campos_512_v4
+48/254897/campos_512_v4
+48/254906/campos_512_v4
+48/254913/campos_512_v4
+48/254924/campos_512_v4
+48/254930/campos_512_v4
+48/254968/campos_512_v4
+48/254974/campos_512_v4
+48/254986/campos_512_v4
+48/254992/campos_512_v4
+48/254993/campos_512_v4
+48/254997/campos_512_v4
+49/255006/campos_512_v4
+49/255013/campos_512_v4
+49/255017/campos_512_v4
+49/255019/campos_512_v4
+49/255055/campos_512_v4
+49/255056/campos_512_v4
+49/255058/campos_512_v4
+49/255062/campos_512_v4
+49/255067/campos_512_v4
+49/255069/campos_512_v4
+49/255072/campos_512_v4
+49/255084/campos_512_v4
+49/255088/campos_512_v4
+49/255090/campos_512_v4
+49/255113/campos_512_v4
+49/255117/campos_512_v4
+49/255120/campos_512_v4
+49/255141/campos_512_v4
+49/255156/campos_512_v4
+49/255159/campos_512_v4
+49/255160/campos_512_v4
+49/255164/campos_512_v4
+49/255174/campos_512_v4
+49/255181/campos_512_v4
+49/255183/campos_512_v4
+49/255208/campos_512_v4
+49/255233/campos_512_v4
+49/255252/campos_512_v4
+49/255257/campos_512_v4
+49/255258/campos_512_v4
+49/255281/campos_512_v4
+49/255286/campos_512_v4
+49/255288/campos_512_v4
+49/255290/campos_512_v4
+49/255298/campos_512_v4
+49/255315/campos_512_v4
+49/255317/campos_512_v4
+49/255324/campos_512_v4
+49/255325/campos_512_v4
+49/255333/campos_512_v4
+49/255340/campos_512_v4
+49/255341/campos_512_v4
+49/255344/campos_512_v4
+49/255349/campos_512_v4
+49/255357/campos_512_v4
+49/255366/campos_512_v4
+49/255374/campos_512_v4
+49/255377/campos_512_v4
+49/255382/campos_512_v4
+49/255392/campos_512_v4
+49/255393/campos_512_v4
+49/255400/campos_512_v4
+49/255415/campos_512_v4
+49/255437/campos_512_v4
+49/255447/campos_512_v4
+49/255477/campos_512_v4
+49/255486/campos_512_v4
+49/255510/campos_512_v4
+49/255514/campos_512_v4
+49/255518/campos_512_v4
+49/255519/campos_512_v4
+49/255527/campos_512_v4
+49/255529/campos_512_v4
+49/255532/campos_512_v4
+49/255535/campos_512_v4
+49/255537/campos_512_v4
+49/255540/campos_512_v4
+49/255543/campos_512_v4
+49/255546/campos_512_v4
+49/255552/campos_512_v4
+49/255558/campos_512_v4
+49/255568/campos_512_v4
+49/255573/campos_512_v4
+49/255574/campos_512_v4
+49/255578/campos_512_v4
+49/255595/campos_512_v4
+49/255602/campos_512_v4
+49/255604/campos_512_v4
+49/255612/campos_512_v4
+49/255616/campos_512_v4
+49/255626/campos_512_v4
+49/255629/campos_512_v4
+49/255630/campos_512_v4
+49/255639/campos_512_v4
+49/255656/campos_512_v4
+49/255663/campos_512_v4
+49/255670/campos_512_v4
+49/255684/campos_512_v4
+49/255689/campos_512_v4
+49/255694/campos_512_v4
+49/255697/campos_512_v4
+49/255703/campos_512_v4
+49/255705/campos_512_v4
+49/255706/campos_512_v4
+49/255709/campos_512_v4
+49/255718/campos_512_v4
+49/255722/campos_512_v4
+49/255741/campos_512_v4
+49/255747/campos_512_v4
+49/255762/campos_512_v4
+49/255768/campos_512_v4
+49/255769/campos_512_v4
+49/255795/campos_512_v4
+49/255801/campos_512_v4
+49/255808/campos_512_v4
+49/255818/campos_512_v4
+49/255830/campos_512_v4
+49/255848/campos_512_v4
+49/255857/campos_512_v4
+49/255879/campos_512_v4
+49/255880/campos_512_v4
+49/255887/campos_512_v4
+49/255898/campos_512_v4
+49/255902/campos_512_v4
+49/255910/campos_512_v4
+49/255911/campos_512_v4
+49/255922/campos_512_v4
+49/255930/campos_512_v4
+49/255931/campos_512_v4
+49/255943/campos_512_v4
+49/255944/campos_512_v4
+49/255945/campos_512_v4
+49/255948/campos_512_v4
+49/255962/campos_512_v4
+49/255963/campos_512_v4
+49/255973/campos_512_v4
+49/255975/campos_512_v4
+49/255980/campos_512_v4
+49/255998/campos_512_v4
+49/256003/campos_512_v4
+49/256012/campos_512_v4
+49/256013/campos_512_v4
+49/256020/campos_512_v4
+49/256023/campos_512_v4
+49/256026/campos_512_v4
+49/256028/campos_512_v4
+49/256029/campos_512_v4
+49/256031/campos_512_v4
+49/256036/campos_512_v4
+49/256042/campos_512_v4
+49/256055/campos_512_v4
+49/256064/campos_512_v4
+49/256065/campos_512_v4
+49/256081/campos_512_v4
+49/256094/campos_512_v4
+49/256095/campos_512_v4
+49/256097/campos_512_v4
+49/256098/campos_512_v4
+49/256108/campos_512_v4
+49/256124/campos_512_v4
+49/256125/campos_512_v4
+49/256135/campos_512_v4
+49/256142/campos_512_v4
+49/256155/campos_512_v4
+49/256166/campos_512_v4
+49/256185/campos_512_v4
+49/256191/campos_512_v4
+49/256192/campos_512_v4
+49/256203/campos_512_v4
+49/256208/campos_512_v4
+49/256211/campos_512_v4
+49/256224/campos_512_v4
+49/256226/campos_512_v4
+49/256234/campos_512_v4
+49/256235/campos_512_v4
+49/256238/campos_512_v4
+49/256247/campos_512_v4
+49/256249/campos_512_v4
+49/256252/campos_512_v4
+49/256257/campos_512_v4
+49/256259/campos_512_v4
+49/256262/campos_512_v4
+49/256263/campos_512_v4
+49/256272/campos_512_v4
+49/256273/campos_512_v4
+49/256276/campos_512_v4
+49/256281/campos_512_v4
+49/256286/campos_512_v4
+49/256296/campos_512_v4
+49/256306/campos_512_v4
+49/256309/campos_512_v4
+49/256318/campos_512_v4
+49/256319/campos_512_v4
+49/256320/campos_512_v4
+49/256323/campos_512_v4
+49/256325/campos_512_v4
+49/256328/campos_512_v4
+49/256329/campos_512_v4
+49/256333/campos_512_v4
+49/256336/campos_512_v4
+49/256344/campos_512_v4
+49/256351/campos_512_v4
+49/256356/campos_512_v4
+49/256370/campos_512_v4
+49/256380/campos_512_v4
+49/256385/campos_512_v4
+49/256394/campos_512_v4
+49/256405/campos_512_v4
+49/256406/campos_512_v4
+49/256409/campos_512_v4
+49/256419/campos_512_v4
+49/256428/campos_512_v4
+49/256429/campos_512_v4
+49/256435/campos_512_v4
+49/256446/campos_512_v4
+49/256452/campos_512_v4
+49/256455/campos_512_v4
+49/256456/campos_512_v4
+49/256471/campos_512_v4
+49/256473/campos_512_v4
+49/256476/campos_512_v4
+49/256481/campos_512_v4
+49/256484/campos_512_v4
+49/256515/campos_512_v4
+49/256523/campos_512_v4
+49/256525/campos_512_v4
+49/256534/campos_512_v4
+49/256536/campos_512_v4
+49/256539/campos_512_v4
+49/256548/campos_512_v4
+49/256549/campos_512_v4
+49/256564/campos_512_v4
+49/256575/campos_512_v4
+49/256591/campos_512_v4
+49/256596/campos_512_v4
+49/256611/campos_512_v4
+49/256620/campos_512_v4
+49/256624/campos_512_v4
+49/256632/campos_512_v4
+49/256637/campos_512_v4
+49/256646/campos_512_v4
+49/256652/campos_512_v4
+49/256653/campos_512_v4
+49/256658/campos_512_v4
+49/256663/campos_512_v4
+49/256669/campos_512_v4
+49/256689/campos_512_v4
+49/256705/campos_512_v4
+49/256710/campos_512_v4
+49/256718/campos_512_v4
+49/256723/campos_512_v4
+49/256732/campos_512_v4
+49/256742/campos_512_v4
+49/256744/campos_512_v4
+49/256747/campos_512_v4
+49/256749/campos_512_v4
+49/256756/campos_512_v4
+49/256767/campos_512_v4
+49/256807/campos_512_v4
+49/256811/campos_512_v4
+49/256814/campos_512_v4
+49/256816/campos_512_v4
+49/256845/campos_512_v4
+49/256848/campos_512_v4
+49/256854/campos_512_v4
+49/256858/campos_512_v4
+49/256872/campos_512_v4
+49/256877/campos_512_v4
+49/256878/campos_512_v4
+49/256880/campos_512_v4
+49/256881/campos_512_v4
+49/256905/campos_512_v4
+49/256914/campos_512_v4
+49/256922/campos_512_v4
+49/256926/campos_512_v4
+49/256938/campos_512_v4
+49/256945/campos_512_v4
+49/256946/campos_512_v4
+49/256952/campos_512_v4
+49/256979/campos_512_v4
+49/256984/campos_512_v4
+49/256986/campos_512_v4
+49/256990/campos_512_v4
+49/256994/campos_512_v4
+49/257004/campos_512_v4
+49/257008/campos_512_v4
+49/257010/campos_512_v4
+49/257014/campos_512_v4
+49/257034/campos_512_v4
+49/257035/campos_512_v4
+49/257043/campos_512_v4
+49/257045/campos_512_v4
+49/257052/campos_512_v4
+49/257060/campos_512_v4
+49/257068/campos_512_v4
+49/257080/campos_512_v4
+49/257085/campos_512_v4
+49/257090/campos_512_v4
+49/257121/campos_512_v4
+49/257133/campos_512_v4
+49/257153/campos_512_v4
+49/257168/campos_512_v4
+49/257175/campos_512_v4
+49/257177/campos_512_v4
+49/257178/campos_512_v4
+49/257191/campos_512_v4
+49/257197/campos_512_v4
+49/257206/campos_512_v4
+49/257216/campos_512_v4
+49/257238/campos_512_v4
+49/257242/campos_512_v4
+49/257248/campos_512_v4
+49/257251/campos_512_v4
+49/257255/campos_512_v4
+49/257262/campos_512_v4
+49/257278/campos_512_v4
+49/257283/campos_512_v4
+49/257304/campos_512_v4
+49/257309/campos_512_v4
+49/257311/campos_512_v4
+49/257314/campos_512_v4
+49/257334/campos_512_v4
+49/257344/campos_512_v4
+49/257375/campos_512_v4
+49/257377/campos_512_v4
+49/257383/campos_512_v4
+49/257392/campos_512_v4
+49/257401/campos_512_v4
+49/257405/campos_512_v4
+49/257408/campos_512_v4
+49/257411/campos_512_v4
+49/257419/campos_512_v4
+49/257457/campos_512_v4
+49/257463/campos_512_v4
+49/257465/campos_512_v4
+49/257470/campos_512_v4
+49/257489/campos_512_v4
+49/257491/campos_512_v4
+49/257497/campos_512_v4
+49/257508/campos_512_v4
+49/257525/campos_512_v4
+49/257534/campos_512_v4
+49/257544/campos_512_v4
+49/257564/campos_512_v4
+49/257566/campos_512_v4
+49/257567/campos_512_v4
+49/257574/campos_512_v4
+49/257579/campos_512_v4
+49/257583/campos_512_v4
+49/257587/campos_512_v4
+49/257591/campos_512_v4
+49/257599/campos_512_v4
+49/257604/campos_512_v4
+49/257609/campos_512_v4
+49/257611/campos_512_v4
+49/257612/campos_512_v4
+49/257619/campos_512_v4
+49/257627/campos_512_v4
+49/257631/campos_512_v4
+49/257636/campos_512_v4
+49/257638/campos_512_v4
+49/257652/campos_512_v4
+49/257653/campos_512_v4
+49/257654/campos_512_v4
+49/257657/campos_512_v4
+49/257659/campos_512_v4
+49/257668/campos_512_v4
+49/257672/campos_512_v4
+49/257673/campos_512_v4
+49/257677/campos_512_v4
+49/257682/campos_512_v4
+49/257711/campos_512_v4
+49/257713/campos_512_v4
+49/257720/campos_512_v4
+49/257725/campos_512_v4
+49/257729/campos_512_v4
+49/257731/campos_512_v4
+49/257739/campos_512_v4
+49/257740/campos_512_v4
+49/257741/campos_512_v4
+49/257746/campos_512_v4
+49/257762/campos_512_v4
+49/257767/campos_512_v4
+49/257770/campos_512_v4
+49/257775/campos_512_v4
+49/257778/campos_512_v4
+49/257784/campos_512_v4
+49/257790/campos_512_v4
+49/257798/campos_512_v4
+49/257800/campos_512_v4
+49/257804/campos_512_v4
+49/257805/campos_512_v4
+49/257830/campos_512_v4
+49/257839/campos_512_v4
+49/257840/campos_512_v4
+49/257842/campos_512_v4
+49/257845/campos_512_v4
+49/257848/campos_512_v4
+49/257850/campos_512_v4
+49/257852/campos_512_v4
+49/257858/campos_512_v4
+49/257862/campos_512_v4
+49/257864/campos_512_v4
+49/257893/campos_512_v4
+49/257911/campos_512_v4
+49/257924/campos_512_v4
+49/257932/campos_512_v4
+49/257937/campos_512_v4
+49/257943/campos_512_v4
+49/257956/campos_512_v4
+49/257983/campos_512_v4
+49/257997/campos_512_v4
+49/258001/campos_512_v4
+49/258004/campos_512_v4
+49/258008/campos_512_v4
+49/258009/campos_512_v4
+49/258043/campos_512_v4
+49/258050/campos_512_v4
+49/258056/campos_512_v4
+49/258057/campos_512_v4
+49/258063/campos_512_v4
+49/258073/campos_512_v4
+49/258075/campos_512_v4
+49/258079/campos_512_v4
+49/258081/campos_512_v4
+49/258088/campos_512_v4
+49/258089/campos_512_v4
+49/258090/campos_512_v4
+49/258091/campos_512_v4
+49/258101/campos_512_v4
+49/258102/campos_512_v4
+49/258115/campos_512_v4
+49/258143/campos_512_v4
+49/258156/campos_512_v4
+49/258157/campos_512_v4
+49/258158/campos_512_v4
+49/258173/campos_512_v4
+49/258182/campos_512_v4
+49/258188/campos_512_v4
+49/258195/campos_512_v4
+49/258200/campos_512_v4
+49/258210/campos_512_v4
+49/258246/campos_512_v4
+49/258253/campos_512_v4
+49/258270/campos_512_v4
+49/258276/campos_512_v4
+49/258281/campos_512_v4
+49/258282/campos_512_v4
+49/258295/campos_512_v4
+49/258302/campos_512_v4
+49/258304/campos_512_v4
+49/258307/campos_512_v4
+49/258314/campos_512_v4
+49/258332/campos_512_v4
+49/258333/campos_512_v4
+49/258339/campos_512_v4
+49/258343/campos_512_v4
+49/258357/campos_512_v4
+49/258366/campos_512_v4
+49/258370/campos_512_v4
+49/258380/campos_512_v4
+49/258387/campos_512_v4
+49/258388/campos_512_v4
+49/258411/campos_512_v4
+49/258429/campos_512_v4
+49/258434/campos_512_v4
+49/258438/campos_512_v4
+49/258440/campos_512_v4
+49/258445/campos_512_v4
+49/258447/campos_512_v4
+49/258455/campos_512_v4
+49/258458/campos_512_v4
+49/258463/campos_512_v4
+49/258466/campos_512_v4
+49/258483/campos_512_v4
+49/258498/campos_512_v4
+49/258509/campos_512_v4
+49/258514/campos_512_v4
+49/258518/campos_512_v4
+49/258520/campos_512_v4
+49/258521/campos_512_v4
+49/258524/campos_512_v4
+49/258542/campos_512_v4
+49/258554/campos_512_v4
+49/258557/campos_512_v4
+49/258566/campos_512_v4
+49/258580/campos_512_v4
+49/258588/campos_512_v4
+49/258593/campos_512_v4
+49/258595/campos_512_v4
+49/258606/campos_512_v4
+49/258622/campos_512_v4
+49/258646/campos_512_v4
+49/258651/campos_512_v4
+49/258652/campos_512_v4
+49/258654/campos_512_v4
+49/258655/campos_512_v4
+49/258668/campos_512_v4
+49/258669/campos_512_v4
+49/258670/campos_512_v4
+49/258679/campos_512_v4
+49/258682/campos_512_v4
+49/258683/campos_512_v4
+49/258691/campos_512_v4
+49/258699/campos_512_v4
+49/258714/campos_512_v4
+49/258717/campos_512_v4
+49/258720/campos_512_v4
+49/258721/campos_512_v4
+49/258725/campos_512_v4
+49/258741/campos_512_v4
+49/258742/campos_512_v4
+49/258743/campos_512_v4
+49/258754/campos_512_v4
+49/258776/campos_512_v4
+49/258796/campos_512_v4
+49/258803/campos_512_v4
+49/258806/campos_512_v4
+49/258807/campos_512_v4
+49/258815/campos_512_v4
+49/258823/campos_512_v4
+49/258826/campos_512_v4
+49/258836/campos_512_v4
+49/258837/campos_512_v4
+49/258851/campos_512_v4
+49/258854/campos_512_v4
+49/258860/campos_512_v4
+49/258863/campos_512_v4
+49/258864/campos_512_v4
+49/258869/campos_512_v4
+49/258875/campos_512_v4
+49/258891/campos_512_v4
+49/258908/campos_512_v4
+49/258916/campos_512_v4
+49/258917/campos_512_v4
+49/258947/campos_512_v4
+49/258948/campos_512_v4
+49/258954/campos_512_v4
+49/258977/campos_512_v4
+49/258989/campos_512_v4
+49/258992/campos_512_v4
+49/258995/campos_512_v4
+49/259014/campos_512_v4
+49/259024/campos_512_v4
+49/259033/campos_512_v4
+49/259039/campos_512_v4
+49/259050/campos_512_v4
+49/259082/campos_512_v4
+49/259091/campos_512_v4
+49/259109/campos_512_v4
+49/259113/campos_512_v4
+49/259132/campos_512_v4
+49/259140/campos_512_v4
+49/259170/campos_512_v4
+49/259184/campos_512_v4
+49/259187/campos_512_v4
+49/259188/campos_512_v4
+49/259194/campos_512_v4
+49/259195/campos_512_v4
+49/259198/campos_512_v4
+49/259203/campos_512_v4
+49/259209/campos_512_v4
+49/259220/campos_512_v4
+49/259221/campos_512_v4
+49/259227/campos_512_v4
+49/259234/campos_512_v4
+49/259281/campos_512_v4
+49/259287/campos_512_v4
+49/259289/campos_512_v4
+49/259296/campos_512_v4
+49/259322/campos_512_v4
+49/259330/campos_512_v4
+49/259334/campos_512_v4
+49/259335/campos_512_v4
+49/259355/campos_512_v4
+49/259367/campos_512_v4
+49/259368/campos_512_v4
+49/259385/campos_512_v4
+49/259396/campos_512_v4
+49/259421/campos_512_v4
+49/259429/campos_512_v4
+49/259436/campos_512_v4
+49/259437/campos_512_v4
+49/259452/campos_512_v4
+49/259454/campos_512_v4
+49/259465/campos_512_v4
+49/259476/campos_512_v4
+49/259481/campos_512_v4
+49/259490/campos_512_v4
+49/259492/campos_512_v4
+49/259517/campos_512_v4
+49/259523/campos_512_v4
+49/259531/campos_512_v4
+49/259537/campos_512_v4
+49/259539/campos_512_v4
+49/259545/campos_512_v4
+49/259550/campos_512_v4
+49/259565/campos_512_v4
+49/259574/campos_512_v4
+49/259575/campos_512_v4
+49/259589/campos_512_v4
+49/259593/campos_512_v4
+49/259594/campos_512_v4
+49/259607/campos_512_v4
+49/259612/campos_512_v4
+49/259621/campos_512_v4
+49/259622/campos_512_v4
+49/259625/campos_512_v4
+49/259626/campos_512_v4
+49/259627/campos_512_v4
+49/259632/campos_512_v4
+49/259635/campos_512_v4
+49/259636/campos_512_v4
+49/259644/campos_512_v4
+49/259645/campos_512_v4
+49/259689/campos_512_v4
+49/259711/campos_512_v4
+49/259712/campos_512_v4
+49/259716/campos_512_v4
+49/259731/campos_512_v4
+49/259739/campos_512_v4
+49/259741/campos_512_v4
+49/259743/campos_512_v4
+49/259751/campos_512_v4
+49/259752/campos_512_v4
+49/259764/campos_512_v4
+49/259765/campos_512_v4
+49/259770/campos_512_v4
+49/259788/campos_512_v4
+49/259790/campos_512_v4
+49/259809/campos_512_v4
+49/259810/campos_512_v4
+49/259831/campos_512_v4
+49/259847/campos_512_v4
+49/259854/campos_512_v4
+49/259868/campos_512_v4
+49/259880/campos_512_v4
+49/259887/campos_512_v4
+49/259894/campos_512_v4
+49/259923/campos_512_v4
+49/259927/campos_512_v4
+49/259931/campos_512_v4
+49/259935/campos_512_v4
+49/259941/campos_512_v4
+49/259948/campos_512_v4
+49/259950/campos_512_v4
+49/259966/campos_512_v4
+49/259969/campos_512_v4
+49/259970/campos_512_v4
+49/259974/campos_512_v4
+49/259977/campos_512_v4
+49/259980/campos_512_v4
+49/260001/campos_512_v4
+5/35019/campos_512_v4
+5/35026/campos_512_v4
+5/35033/campos_512_v4
+5/35043/campos_512_v4
+5/35046/campos_512_v4
+5/35048/campos_512_v4
+5/35054/campos_512_v4
+5/35060/campos_512_v4
+5/35061/campos_512_v4
+5/35073/campos_512_v4
+5/35077/campos_512_v4
+5/35085/campos_512_v4
+5/35103/campos_512_v4
+5/35104/campos_512_v4
+5/35108/campos_512_v4
+5/35120/campos_512_v4
+5/35135/campos_512_v4
+5/35146/campos_512_v4
+5/35164/campos_512_v4
+5/35169/campos_512_v4
+5/35174/campos_512_v4
+5/35177/campos_512_v4
+5/35185/campos_512_v4
+5/35200/campos_512_v4
+5/35204/campos_512_v4
+5/35208/campos_512_v4
+5/35213/campos_512_v4
+5/35215/campos_512_v4
+5/35218/campos_512_v4
+5/35222/campos_512_v4
+5/35242/campos_512_v4
+5/35245/campos_512_v4
+5/35261/campos_512_v4
+5/35264/campos_512_v4
+5/35265/campos_512_v4
+5/35268/campos_512_v4
+5/35277/campos_512_v4
+5/35281/campos_512_v4
+5/35288/campos_512_v4
+5/35302/campos_512_v4
+5/35316/campos_512_v4
+5/35340/campos_512_v4
+5/35341/campos_512_v4
+5/35360/campos_512_v4
+5/35369/campos_512_v4
+5/35376/campos_512_v4
+5/35380/campos_512_v4
+5/35383/campos_512_v4
+5/35390/campos_512_v4
+5/35399/campos_512_v4
+5/35400/campos_512_v4
+5/35436/campos_512_v4
+5/35439/campos_512_v4
+5/35449/campos_512_v4
+5/35454/campos_512_v4
+5/35457/campos_512_v4
+5/35458/campos_512_v4
+5/35460/campos_512_v4
+5/35461/campos_512_v4
+5/35472/campos_512_v4
+5/35491/campos_512_v4
+5/35496/campos_512_v4
+5/35498/campos_512_v4
+5/35508/campos_512_v4
+5/35514/campos_512_v4
+5/35520/campos_512_v4
+5/35530/campos_512_v4
+5/35537/campos_512_v4
+5/35558/campos_512_v4
+5/35571/campos_512_v4
+5/35581/campos_512_v4
+5/35589/campos_512_v4
+5/35595/campos_512_v4
+5/35604/campos_512_v4
+5/35607/campos_512_v4
+5/35609/campos_512_v4
+5/35624/campos_512_v4
+5/35633/campos_512_v4
+5/35640/campos_512_v4
+5/35664/campos_512_v4
+5/35667/campos_512_v4
+5/35673/campos_512_v4
+5/35684/campos_512_v4
+5/35690/campos_512_v4
+5/35694/campos_512_v4
+5/35699/campos_512_v4
+5/35705/campos_512_v4
+5/35708/campos_512_v4
+5/35712/campos_512_v4
+5/35736/campos_512_v4
+5/35739/campos_512_v4
+5/35743/campos_512_v4
+5/35745/campos_512_v4
+5/35747/campos_512_v4
+5/35750/campos_512_v4
+5/35756/campos_512_v4
+5/35757/campos_512_v4
+5/35767/campos_512_v4
+5/35776/campos_512_v4
+5/35784/campos_512_v4
+5/35795/campos_512_v4
+5/35797/campos_512_v4
+5/35807/campos_512_v4
+5/35811/campos_512_v4
+5/35817/campos_512_v4
+5/35822/campos_512_v4
+5/35851/campos_512_v4
+5/35868/campos_512_v4
+5/35872/campos_512_v4
+5/35873/campos_512_v4
+5/35874/campos_512_v4
+5/35879/campos_512_v4
+5/35884/campos_512_v4
+5/35896/campos_512_v4
+5/35898/campos_512_v4
+5/35902/campos_512_v4
+5/35919/campos_512_v4
+5/35922/campos_512_v4
+5/35930/campos_512_v4
+5/35931/campos_512_v4
+5/35933/campos_512_v4
+5/35944/campos_512_v4
+5/35946/campos_512_v4
+5/35947/campos_512_v4
+5/35948/campos_512_v4
+5/35963/campos_512_v4
+5/35964/campos_512_v4
+5/35966/campos_512_v4
+5/35969/campos_512_v4
+5/35977/campos_512_v4
+5/35994/campos_512_v4
+5/35997/campos_512_v4
+5/36003/campos_512_v4
+5/36005/campos_512_v4
+5/36008/campos_512_v4
+5/36032/campos_512_v4
+5/36039/campos_512_v4
+5/36056/campos_512_v4
+5/36067/campos_512_v4
+5/36085/campos_512_v4
+5/36087/campos_512_v4
+5/36091/campos_512_v4
+5/36098/campos_512_v4
+5/36111/campos_512_v4
+5/36127/campos_512_v4
+5/36128/campos_512_v4
+5/36134/campos_512_v4
+5/36144/campos_512_v4
+5/36155/campos_512_v4
+5/36160/campos_512_v4
+5/36161/campos_512_v4
+5/36177/campos_512_v4
+5/36183/campos_512_v4
+5/36196/campos_512_v4
+5/36203/campos_512_v4
+5/36231/campos_512_v4
+5/36236/campos_512_v4
+5/36256/campos_512_v4
+5/36258/campos_512_v4
+5/36259/campos_512_v4
+5/36276/campos_512_v4
+5/36301/campos_512_v4
+5/36304/campos_512_v4
+5/36311/campos_512_v4
+5/36320/campos_512_v4
+5/36337/campos_512_v4
+5/36357/campos_512_v4
+5/36368/campos_512_v4
+5/36407/campos_512_v4
+5/36433/campos_512_v4
+5/36458/campos_512_v4
+5/36468/campos_512_v4
+5/36469/campos_512_v4
+5/36472/campos_512_v4
+5/36485/campos_512_v4
+5/36488/campos_512_v4
+5/36498/campos_512_v4
+5/36502/campos_512_v4
+5/36503/campos_512_v4
+5/36504/campos_512_v4
+5/36507/campos_512_v4
+5/36509/campos_512_v4
+5/36510/campos_512_v4
+5/36512/campos_512_v4
+5/36520/campos_512_v4
+5/36533/campos_512_v4
+5/36548/campos_512_v4
+5/36558/campos_512_v4
+5/36561/campos_512_v4
+5/36565/campos_512_v4
+5/36569/campos_512_v4
+5/36570/campos_512_v4
+5/36573/campos_512_v4
+5/36574/campos_512_v4
+5/36594/campos_512_v4
+5/36611/campos_512_v4
+5/36616/campos_512_v4
+5/36621/campos_512_v4
+5/36639/campos_512_v4
+5/36649/campos_512_v4
+5/36653/campos_512_v4
+5/36655/campos_512_v4
+5/36661/campos_512_v4
+5/36674/campos_512_v4
+5/36691/campos_512_v4
+5/36692/campos_512_v4
+5/36693/campos_512_v4
+5/36702/campos_512_v4
+5/36712/campos_512_v4
+5/36713/campos_512_v4
+5/36714/campos_512_v4
+5/36715/campos_512_v4
+5/36718/campos_512_v4
+5/36720/campos_512_v4
+5/36721/campos_512_v4
+5/36732/campos_512_v4
+5/36734/campos_512_v4
+5/36739/campos_512_v4
+5/36740/campos_512_v4
+5/36760/campos_512_v4
+5/36765/campos_512_v4
+5/36772/campos_512_v4
+5/36782/campos_512_v4
+5/36784/campos_512_v4
+5/36791/campos_512_v4
+5/36792/campos_512_v4
+5/36806/campos_512_v4
+5/36811/campos_512_v4
+5/36815/campos_512_v4
+5/36828/campos_512_v4
+5/36831/campos_512_v4
+5/36834/campos_512_v4
+5/36858/campos_512_v4
+5/36859/campos_512_v4
+5/36870/campos_512_v4
+5/36871/campos_512_v4
+5/36874/campos_512_v4
+5/36917/campos_512_v4
+5/36941/campos_512_v4
+5/36942/campos_512_v4
+5/36946/campos_512_v4
+5/36952/campos_512_v4
+5/36954/campos_512_v4
+5/36959/campos_512_v4
+5/36960/campos_512_v4
+5/36996/campos_512_v4
+5/36997/campos_512_v4
+5/37007/campos_512_v4
+5/37016/campos_512_v4
+5/37017/campos_512_v4
+5/37034/campos_512_v4
+5/37043/campos_512_v4
+5/37049/campos_512_v4
+5/37066/campos_512_v4
+5/37078/campos_512_v4
+5/37081/campos_512_v4
+5/37082/campos_512_v4
+5/37087/campos_512_v4
+5/37096/campos_512_v4
+5/37106/campos_512_v4
+5/37123/campos_512_v4
+5/37138/campos_512_v4
+5/37142/campos_512_v4
+5/37144/campos_512_v4
+5/37147/campos_512_v4
+5/37150/campos_512_v4
+5/37151/campos_512_v4
+5/37155/campos_512_v4
+5/37159/campos_512_v4
+5/37184/campos_512_v4
+5/37198/campos_512_v4
+5/37211/campos_512_v4
+5/37220/campos_512_v4
+5/37228/campos_512_v4
+5/37236/campos_512_v4
+5/37237/campos_512_v4
+5/37249/campos_512_v4
+5/37259/campos_512_v4
+5/37262/campos_512_v4
+5/37268/campos_512_v4
+5/37273/campos_512_v4
+5/37281/campos_512_v4
+5/37289/campos_512_v4
+5/37308/campos_512_v4
+5/37309/campos_512_v4
+5/37323/campos_512_v4
+5/37326/campos_512_v4
+5/37336/campos_512_v4
+5/37352/campos_512_v4
+5/37359/campos_512_v4
+5/37384/campos_512_v4
+5/37386/campos_512_v4
+5/37396/campos_512_v4
+5/37399/campos_512_v4
+5/37402/campos_512_v4
+5/37404/campos_512_v4
+5/37410/campos_512_v4
+5/37412/campos_512_v4
+5/37417/campos_512_v4
+5/37429/campos_512_v4
+5/37439/campos_512_v4
+5/37453/campos_512_v4
+5/37457/campos_512_v4
+5/37465/campos_512_v4
+5/37466/campos_512_v4
+5/37473/campos_512_v4
+5/37480/campos_512_v4
+5/37481/campos_512_v4
+5/37492/campos_512_v4
+5/37494/campos_512_v4
+5/37501/campos_512_v4
+5/37521/campos_512_v4
+5/37537/campos_512_v4
+5/37539/campos_512_v4
+5/37540/campos_512_v4
+5/37542/campos_512_v4
+5/37545/campos_512_v4
+5/37564/campos_512_v4
+5/37565/campos_512_v4
+5/37569/campos_512_v4
+5/37571/campos_512_v4
+5/37572/campos_512_v4
+5/37579/campos_512_v4
+5/37590/campos_512_v4
+5/37602/campos_512_v4
+5/37612/campos_512_v4
+5/37624/campos_512_v4
+5/37625/campos_512_v4
+5/37635/campos_512_v4
+5/37660/campos_512_v4
+5/37673/campos_512_v4
+5/37700/campos_512_v4
+5/37712/campos_512_v4
+5/37714/campos_512_v4
+5/37719/campos_512_v4
+5/37728/campos_512_v4
+5/37731/campos_512_v4
+5/37759/campos_512_v4
+5/37762/campos_512_v4
+5/37775/campos_512_v4
+5/37779/campos_512_v4
+5/37799/campos_512_v4
+5/37800/campos_512_v4
+5/37806/campos_512_v4
+5/37807/campos_512_v4
+5/37813/campos_512_v4
+5/37816/campos_512_v4
+5/37825/campos_512_v4
+5/37826/campos_512_v4
+5/37841/campos_512_v4
+5/37846/campos_512_v4
+5/37852/campos_512_v4
+5/37859/campos_512_v4
+5/37865/campos_512_v4
+5/37874/campos_512_v4
+5/37876/campos_512_v4
+5/37887/campos_512_v4
+5/37890/campos_512_v4
+5/37902/campos_512_v4
+5/37903/campos_512_v4
+5/37915/campos_512_v4
+5/37927/campos_512_v4
+5/37941/campos_512_v4
+5/37942/campos_512_v4
+5/37963/campos_512_v4
+5/37964/campos_512_v4
+5/37972/campos_512_v4
+5/37981/campos_512_v4
+5/37987/campos_512_v4
+5/37989/campos_512_v4
+5/37990/campos_512_v4
+5/37997/campos_512_v4
+5/37998/campos_512_v4
+5/38000/campos_512_v4
+5/38008/campos_512_v4
+5/38032/campos_512_v4
+5/38047/campos_512_v4
+5/38050/campos_512_v4
+5/38052/campos_512_v4
+5/38064/campos_512_v4
+5/38068/campos_512_v4
+5/38079/campos_512_v4
+5/38104/campos_512_v4
+5/38118/campos_512_v4
+5/38123/campos_512_v4
+5/38124/campos_512_v4
+5/38135/campos_512_v4
+5/38138/campos_512_v4
+5/38144/campos_512_v4
+5/38152/campos_512_v4
+5/38158/campos_512_v4
+5/38173/campos_512_v4
+5/38179/campos_512_v4
+5/38200/campos_512_v4
+5/38211/campos_512_v4
+5/38223/campos_512_v4
+5/38230/campos_512_v4
+5/38238/campos_512_v4
+5/38248/campos_512_v4
+5/38250/campos_512_v4
+5/38251/campos_512_v4
+5/38260/campos_512_v4
+5/38267/campos_512_v4
+5/38285/campos_512_v4
+5/38287/campos_512_v4
+5/38299/campos_512_v4
+5/38308/campos_512_v4
+5/38312/campos_512_v4
+5/38320/campos_512_v4
+5/38324/campos_512_v4
+5/38340/campos_512_v4
+5/38342/campos_512_v4
+5/38353/campos_512_v4
+5/38365/campos_512_v4
+5/38382/campos_512_v4
+5/38385/campos_512_v4
+5/38395/campos_512_v4
+5/38398/campos_512_v4
+5/38406/campos_512_v4
+5/38411/campos_512_v4
+5/38415/campos_512_v4
+5/38417/campos_512_v4
+5/38421/campos_512_v4
+5/38428/campos_512_v4
+5/38438/campos_512_v4
+5/38443/campos_512_v4
+5/38482/campos_512_v4
+5/38486/campos_512_v4
+5/38532/campos_512_v4
+5/38548/campos_512_v4
+5/38556/campos_512_v4
+5/38568/campos_512_v4
+5/38572/campos_512_v4
+5/38573/campos_512_v4
+5/38592/campos_512_v4
+5/38596/campos_512_v4
+5/38612/campos_512_v4
+5/38617/campos_512_v4
+5/38618/campos_512_v4
+5/38622/campos_512_v4
+5/38634/campos_512_v4
+5/38661/campos_512_v4
+5/38672/campos_512_v4
+5/38675/campos_512_v4
+5/38680/campos_512_v4
+5/38683/campos_512_v4
+5/38714/campos_512_v4
+5/38724/campos_512_v4
+5/38725/campos_512_v4
+5/38729/campos_512_v4
+5/38744/campos_512_v4
+5/38749/campos_512_v4
+5/38750/campos_512_v4
+5/38756/campos_512_v4
+5/38758/campos_512_v4
+5/38763/campos_512_v4
+5/38776/campos_512_v4
+5/38786/campos_512_v4
+5/38788/campos_512_v4
+5/38798/campos_512_v4
+5/38799/campos_512_v4
+5/38820/campos_512_v4
+5/38823/campos_512_v4
+5/38827/campos_512_v4
+5/38833/campos_512_v4
+5/38840/campos_512_v4
+5/38843/campos_512_v4
+5/38853/campos_512_v4
+5/38856/campos_512_v4
+5/38859/campos_512_v4
+5/38869/campos_512_v4
+5/38876/campos_512_v4
+5/38877/campos_512_v4
+5/38904/campos_512_v4
+5/38916/campos_512_v4
+5/38924/campos_512_v4
+5/38936/campos_512_v4
+5/38944/campos_512_v4
+5/38947/campos_512_v4
+5/38949/campos_512_v4
+5/38957/campos_512_v4
+5/38960/campos_512_v4
+5/38965/campos_512_v4
+5/38966/campos_512_v4
+5/38987/campos_512_v4
+5/38997/campos_512_v4
+5/39002/campos_512_v4
+5/39010/campos_512_v4
+5/39012/campos_512_v4
+5/39017/campos_512_v4
+5/39019/campos_512_v4
+5/39025/campos_512_v4
+5/39029/campos_512_v4
+5/39073/campos_512_v4
+5/39075/campos_512_v4
+5/39077/campos_512_v4
+5/39082/campos_512_v4
+5/39084/campos_512_v4
+5/39085/campos_512_v4
+5/39090/campos_512_v4
+5/39093/campos_512_v4
+5/39102/campos_512_v4
+5/39104/campos_512_v4
+5/39124/campos_512_v4
+5/39127/campos_512_v4
+5/39136/campos_512_v4
+5/39160/campos_512_v4
+5/39175/campos_512_v4
+5/39188/campos_512_v4
+5/39191/campos_512_v4
+5/39206/campos_512_v4
+5/39212/campos_512_v4
+5/39214/campos_512_v4
+5/39216/campos_512_v4
+5/39218/campos_512_v4
+5/39226/campos_512_v4
+5/39235/campos_512_v4
+5/39241/campos_512_v4
+5/39253/campos_512_v4
+5/39259/campos_512_v4
+5/39264/campos_512_v4
+5/39271/campos_512_v4
+5/39292/campos_512_v4
+5/39295/campos_512_v4
+5/39303/campos_512_v4
+5/39307/campos_512_v4
+5/39329/campos_512_v4
+5/39332/campos_512_v4
+5/39348/campos_512_v4
+5/39354/campos_512_v4
+5/39364/campos_512_v4
+5/39368/campos_512_v4
+5/39378/campos_512_v4
+5/39382/campos_512_v4
+5/39384/campos_512_v4
+5/39385/campos_512_v4
+5/39387/campos_512_v4
+5/39392/campos_512_v4
+5/39397/campos_512_v4
+5/39405/campos_512_v4
+5/39409/campos_512_v4
+5/39415/campos_512_v4
+5/39418/campos_512_v4
+5/39419/campos_512_v4
+5/39427/campos_512_v4
+5/39429/campos_512_v4
+5/39432/campos_512_v4
+5/39447/campos_512_v4
+5/39460/campos_512_v4
+5/39461/campos_512_v4
+5/39464/campos_512_v4
+5/39471/campos_512_v4
+5/39475/campos_512_v4
+5/39500/campos_512_v4
+5/39507/campos_512_v4
+5/39508/campos_512_v4
+5/39523/campos_512_v4
+5/39530/campos_512_v4
+5/39548/campos_512_v4
+5/39563/campos_512_v4
+5/39572/campos_512_v4
+5/39575/campos_512_v4
+5/39579/campos_512_v4
+5/39584/campos_512_v4
+5/39588/campos_512_v4
+5/39590/campos_512_v4
+5/39595/campos_512_v4
+5/39649/campos_512_v4
+5/39676/campos_512_v4
+5/39680/campos_512_v4
+5/39687/campos_512_v4
+5/39690/campos_512_v4
+5/39694/campos_512_v4
+5/39697/campos_512_v4
+5/39700/campos_512_v4
+5/39706/campos_512_v4
+5/39710/campos_512_v4
+5/39718/campos_512_v4
+5/39719/campos_512_v4
+5/39733/campos_512_v4
+5/39738/campos_512_v4
+5/39741/campos_512_v4
+5/39744/campos_512_v4
+5/39774/campos_512_v4
+5/39784/campos_512_v4
+5/39796/campos_512_v4
+5/39811/campos_512_v4
+5/39812/campos_512_v4
+5/39826/campos_512_v4
+5/39827/campos_512_v4
+5/39832/campos_512_v4
+5/39836/campos_512_v4
+5/39837/campos_512_v4
+5/39852/campos_512_v4
+5/39855/campos_512_v4
+5/39885/campos_512_v4
+5/39919/campos_512_v4
+5/39926/campos_512_v4
+5/39931/campos_512_v4
+5/39947/campos_512_v4
+5/39949/campos_512_v4
+5/39950/campos_512_v4
+5/39952/campos_512_v4
+5/39957/campos_512_v4
+5/39958/campos_512_v4
+5/39974/campos_512_v4
+5/39976/campos_512_v4
+5/39985/campos_512_v4
+5/39992/campos_512_v4
+50/260009/campos_512_v4
+50/260015/campos_512_v4
+50/260016/campos_512_v4
+50/260023/campos_512_v4
+50/260031/campos_512_v4
+50/260034/campos_512_v4
+50/260037/campos_512_v4
+50/260050/campos_512_v4
+50/260054/campos_512_v4
+50/260060/campos_512_v4
+50/260070/campos_512_v4
+50/260076/campos_512_v4
+50/260080/campos_512_v4
+50/260095/campos_512_v4
+50/260096/campos_512_v4
+50/260104/campos_512_v4
+50/260113/campos_512_v4
+50/260116/campos_512_v4
+50/260133/campos_512_v4
+50/260140/campos_512_v4
+50/260141/campos_512_v4
+50/260151/campos_512_v4
+50/260158/campos_512_v4
+50/260159/campos_512_v4
+50/260168/campos_512_v4
+50/260169/campos_512_v4
+50/260175/campos_512_v4
+50/260177/campos_512_v4
+50/260178/campos_512_v4
+50/260193/campos_512_v4
+50/260214/campos_512_v4
+50/260217/campos_512_v4
+50/260242/campos_512_v4
+50/260248/campos_512_v4
+50/260249/campos_512_v4
+50/260250/campos_512_v4
+50/260257/campos_512_v4
+50/260261/campos_512_v4
+50/260264/campos_512_v4
+50/260277/campos_512_v4
+50/260279/campos_512_v4
+50/260282/campos_512_v4
+50/260284/campos_512_v4
+50/260304/campos_512_v4
+50/260309/campos_512_v4
+50/260311/campos_512_v4
+50/260316/campos_512_v4
+50/260322/campos_512_v4
+50/260330/campos_512_v4
+50/260332/campos_512_v4
+50/260333/campos_512_v4
+50/260368/campos_512_v4
+50/260402/campos_512_v4
+50/260412/campos_512_v4
+50/260415/campos_512_v4
+50/260420/campos_512_v4
+50/260433/campos_512_v4
+50/260444/campos_512_v4
+50/260445/campos_512_v4
+50/260452/campos_512_v4
+50/260457/campos_512_v4
+50/260468/campos_512_v4
+50/260470/campos_512_v4
+50/260472/campos_512_v4
+50/260474/campos_512_v4
+50/260479/campos_512_v4
+50/260481/campos_512_v4
+50/260484/campos_512_v4
+50/260485/campos_512_v4
+50/260487/campos_512_v4
+50/260488/campos_512_v4
+50/260491/campos_512_v4
+50/260499/campos_512_v4
+50/260501/campos_512_v4
+50/260502/campos_512_v4
+50/260504/campos_512_v4
+50/260505/campos_512_v4
+50/260507/campos_512_v4
+50/260515/campos_512_v4
+50/260533/campos_512_v4
+50/260547/campos_512_v4
+50/260556/campos_512_v4
+50/260562/campos_512_v4
+50/260567/campos_512_v4
+50/260568/campos_512_v4
+50/260569/campos_512_v4
+50/260572/campos_512_v4
+50/260583/campos_512_v4
+50/260585/campos_512_v4
+50/260586/campos_512_v4
+50/260593/campos_512_v4
+50/260596/campos_512_v4
+50/260603/campos_512_v4
+50/260606/campos_512_v4
+50/260617/campos_512_v4
+50/260628/campos_512_v4
+50/260644/campos_512_v4
+50/260660/campos_512_v4
+50/260662/campos_512_v4
+50/260663/campos_512_v4
+50/260668/campos_512_v4
+50/260698/campos_512_v4
+50/260699/campos_512_v4
+50/260715/campos_512_v4
+50/260719/campos_512_v4
+50/260720/campos_512_v4
+50/260740/campos_512_v4
+50/260752/campos_512_v4
+50/260765/campos_512_v4
+50/260766/campos_512_v4
+50/260775/campos_512_v4
+50/260776/campos_512_v4
+50/260783/campos_512_v4
+50/260809/campos_512_v4
+50/260814/campos_512_v4
+50/260818/campos_512_v4
+50/260827/campos_512_v4
+50/260841/campos_512_v4
+50/260855/campos_512_v4
+50/260870/campos_512_v4
+50/260872/campos_512_v4
+50/260887/campos_512_v4
+50/260895/campos_512_v4
+50/260896/campos_512_v4
+50/260898/campos_512_v4
+50/260900/campos_512_v4
+50/260910/campos_512_v4
+50/260911/campos_512_v4
+50/260933/campos_512_v4
+50/260937/campos_512_v4
+50/260944/campos_512_v4
+50/260947/campos_512_v4
+50/260956/campos_512_v4
+50/260962/campos_512_v4
+50/260983/campos_512_v4
+50/260987/campos_512_v4
+50/260993/campos_512_v4
+50/261000/campos_512_v4
+50/261008/campos_512_v4
+50/261011/campos_512_v4
+50/261018/campos_512_v4
+50/261020/campos_512_v4
+50/261024/campos_512_v4
+50/261050/campos_512_v4
+50/261058/campos_512_v4
+50/261061/campos_512_v4
+50/261067/campos_512_v4
+50/261075/campos_512_v4
+50/261077/campos_512_v4
+50/261080/campos_512_v4
+50/261081/campos_512_v4
+50/261088/campos_512_v4
+50/261089/campos_512_v4
+50/261120/campos_512_v4
+50/261146/campos_512_v4
+50/261167/campos_512_v4
+50/261171/campos_512_v4
+50/261173/campos_512_v4
+50/261178/campos_512_v4
+50/261189/campos_512_v4
+50/261207/campos_512_v4
+50/261208/campos_512_v4
+50/261235/campos_512_v4
+50/261249/campos_512_v4
+50/261253/campos_512_v4
+50/261264/campos_512_v4
+50/261265/campos_512_v4
+50/261273/campos_512_v4
+50/261281/campos_512_v4
+50/261291/campos_512_v4
+50/261303/campos_512_v4
+50/261306/campos_512_v4
+50/261311/campos_512_v4
+50/261317/campos_512_v4
+50/261318/campos_512_v4
+50/261319/campos_512_v4
+50/261329/campos_512_v4
+50/261344/campos_512_v4
+50/261354/campos_512_v4
+50/261364/campos_512_v4
+50/261365/campos_512_v4
+50/261372/campos_512_v4
+50/261375/campos_512_v4
+50/261378/campos_512_v4
+50/261380/campos_512_v4
+50/261382/campos_512_v4
+50/261383/campos_512_v4
+50/261385/campos_512_v4
+50/261386/campos_512_v4
+50/261390/campos_512_v4
+50/261395/campos_512_v4
+50/261397/campos_512_v4
+50/261419/campos_512_v4
+50/261426/campos_512_v4
+50/261432/campos_512_v4
+50/261435/campos_512_v4
+50/261458/campos_512_v4
+50/261462/campos_512_v4
+50/261464/campos_512_v4
+50/261488/campos_512_v4
+50/261502/campos_512_v4
+50/261503/campos_512_v4
+50/261512/campos_512_v4
+50/261513/campos_512_v4
+50/261526/campos_512_v4
+50/261532/campos_512_v4
+50/261549/campos_512_v4
+50/261550/campos_512_v4
+50/261572/campos_512_v4
+50/261574/campos_512_v4
+50/261584/campos_512_v4
+50/261588/campos_512_v4
+50/261591/campos_512_v4
+50/261592/campos_512_v4
+50/261601/campos_512_v4
+50/261604/campos_512_v4
+50/261613/campos_512_v4
+50/261614/campos_512_v4
+50/261617/campos_512_v4
+50/261625/campos_512_v4
+50/261627/campos_512_v4
+50/261639/campos_512_v4
+50/261651/campos_512_v4
+50/261652/campos_512_v4
+50/261666/campos_512_v4
+50/261677/campos_512_v4
+50/261684/campos_512_v4
+50/261685/campos_512_v4
+50/261688/campos_512_v4
+50/261702/campos_512_v4
+50/261703/campos_512_v4
+50/261740/campos_512_v4
+50/261742/campos_512_v4
+50/261755/campos_512_v4
+50/261756/campos_512_v4
+50/261762/campos_512_v4
+50/261764/campos_512_v4
+50/261791/campos_512_v4
+50/261800/campos_512_v4
+50/261807/campos_512_v4
+50/261812/campos_512_v4
+50/261815/campos_512_v4
+50/261817/campos_512_v4
+50/261833/campos_512_v4
+50/261872/campos_512_v4
+50/261887/campos_512_v4
+50/261893/campos_512_v4
+50/261907/campos_512_v4
+50/261909/campos_512_v4
+50/261919/campos_512_v4
+50/261921/campos_512_v4
+50/261927/campos_512_v4
+50/261929/campos_512_v4
+50/261930/campos_512_v4
+50/261934/campos_512_v4
+50/261944/campos_512_v4
+50/261945/campos_512_v4
+50/261960/campos_512_v4
+50/261994/campos_512_v4
+50/262000/campos_512_v4
+50/262009/campos_512_v4
+50/262012/campos_512_v4
+50/262020/campos_512_v4
+50/262025/campos_512_v4
+50/262032/campos_512_v4
+50/262057/campos_512_v4
+50/262065/campos_512_v4
+50/262078/campos_512_v4
+50/262080/campos_512_v4
+50/262092/campos_512_v4
+50/262109/campos_512_v4
+50/262113/campos_512_v4
+50/262115/campos_512_v4
+50/262128/campos_512_v4
+50/262135/campos_512_v4
+50/262137/campos_512_v4
+50/262156/campos_512_v4
+50/262165/campos_512_v4
+50/262172/campos_512_v4
+50/262181/campos_512_v4
+50/262197/campos_512_v4
+50/262199/campos_512_v4
+50/262204/campos_512_v4
+50/262210/campos_512_v4
+50/262216/campos_512_v4
+50/262237/campos_512_v4
+50/262242/campos_512_v4
+50/262246/campos_512_v4
+50/262249/campos_512_v4
+50/262251/campos_512_v4
+50/262260/campos_512_v4
+50/262264/campos_512_v4
+50/262313/campos_512_v4
+50/262324/campos_512_v4
+50/262329/campos_512_v4
+50/262331/campos_512_v4
+50/262333/campos_512_v4
+50/262334/campos_512_v4
+50/262335/campos_512_v4
+50/262349/campos_512_v4
+50/262367/campos_512_v4
+50/262370/campos_512_v4
+50/262376/campos_512_v4
+50/262377/campos_512_v4
+50/262380/campos_512_v4
+50/262384/campos_512_v4
+50/262397/campos_512_v4
+50/262401/campos_512_v4
+50/262404/campos_512_v4
+50/262411/campos_512_v4
+50/262415/campos_512_v4
+50/262420/campos_512_v4
+50/262426/campos_512_v4
+50/262429/campos_512_v4
+50/262442/campos_512_v4
+50/262448/campos_512_v4
+50/262452/campos_512_v4
+50/262490/campos_512_v4
+50/262499/campos_512_v4
+50/262509/campos_512_v4
+50/262523/campos_512_v4
+50/262528/campos_512_v4
+50/262535/campos_512_v4
+50/262541/campos_512_v4
+50/262542/campos_512_v4
+50/262556/campos_512_v4
+50/262565/campos_512_v4
+50/262572/campos_512_v4
+50/262574/campos_512_v4
+50/262579/campos_512_v4
+50/262587/campos_512_v4
+50/262594/campos_512_v4
+50/262600/campos_512_v4
+50/262605/campos_512_v4
+50/262620/campos_512_v4
+50/262621/campos_512_v4
+50/262631/campos_512_v4
+50/262638/campos_512_v4
+50/262646/campos_512_v4
+50/262648/campos_512_v4
+50/262650/campos_512_v4
+50/262655/campos_512_v4
+50/262661/campos_512_v4
+50/262669/campos_512_v4
+50/262674/campos_512_v4
+50/262675/campos_512_v4
+50/262676/campos_512_v4
+50/262677/campos_512_v4
+50/262682/campos_512_v4
+50/262687/campos_512_v4
+50/262693/campos_512_v4
+50/262696/campos_512_v4
+50/262697/campos_512_v4
+50/262698/campos_512_v4
+50/262705/campos_512_v4
+50/262711/campos_512_v4
+50/262714/campos_512_v4
+50/262722/campos_512_v4
+50/262723/campos_512_v4
+50/262725/campos_512_v4
+50/262726/campos_512_v4
+50/262731/campos_512_v4
+50/262737/campos_512_v4
+50/262744/campos_512_v4
+50/262755/campos_512_v4
+50/262785/campos_512_v4
+50/262786/campos_512_v4
+50/262791/campos_512_v4
+50/262800/campos_512_v4
+50/262808/campos_512_v4
+50/262842/campos_512_v4
+50/262843/campos_512_v4
+50/262851/campos_512_v4
+50/262854/campos_512_v4
+50/262856/campos_512_v4
+50/262861/campos_512_v4
+50/262881/campos_512_v4
+50/262884/campos_512_v4
+50/262888/campos_512_v4
+50/262890/campos_512_v4
+50/262895/campos_512_v4
+50/262896/campos_512_v4
+50/262919/campos_512_v4
+50/262925/campos_512_v4
+50/262926/campos_512_v4
+50/262927/campos_512_v4
+50/262938/campos_512_v4
+50/262942/campos_512_v4
+50/262951/campos_512_v4
+50/262958/campos_512_v4
+50/262965/campos_512_v4
+50/262968/campos_512_v4
+50/262976/campos_512_v4
+50/262980/campos_512_v4
+50/262981/campos_512_v4
+50/262984/campos_512_v4
+50/262993/campos_512_v4
+50/263004/campos_512_v4
+50/263007/campos_512_v4
+50/263018/campos_512_v4
+50/263019/campos_512_v4
+50/263027/campos_512_v4
+50/263037/campos_512_v4
+50/263046/campos_512_v4
+50/263054/campos_512_v4
+50/263055/campos_512_v4
+50/263059/campos_512_v4
+50/263060/campos_512_v4
+50/263063/campos_512_v4
+50/263087/campos_512_v4
+50/263099/campos_512_v4
+50/263102/campos_512_v4
+50/263110/campos_512_v4
+50/263167/campos_512_v4
+50/263186/campos_512_v4
+50/263201/campos_512_v4
+50/263212/campos_512_v4
+50/263213/campos_512_v4
+50/263214/campos_512_v4
+50/263215/campos_512_v4
+50/263221/campos_512_v4
+50/263223/campos_512_v4
+50/263233/campos_512_v4
+50/263240/campos_512_v4
+50/263243/campos_512_v4
+50/263245/campos_512_v4
+50/263250/campos_512_v4
+50/263257/campos_512_v4
+50/263269/campos_512_v4
+50/263276/campos_512_v4
+50/263294/campos_512_v4
+50/263303/campos_512_v4
+50/263307/campos_512_v4
+50/263334/campos_512_v4
+50/263353/campos_512_v4
+50/263355/campos_512_v4
+50/263365/campos_512_v4
+50/263367/campos_512_v4
+50/263375/campos_512_v4
+50/263376/campos_512_v4
+50/263379/campos_512_v4
+50/263386/campos_512_v4
+50/263393/campos_512_v4
+50/263398/campos_512_v4
+50/263401/campos_512_v4
+50/263408/campos_512_v4
+50/263425/campos_512_v4
+50/263426/campos_512_v4
+50/263447/campos_512_v4
+50/263448/campos_512_v4
+50/263451/campos_512_v4
+50/263462/campos_512_v4
+50/263464/campos_512_v4
+50/263473/campos_512_v4
+50/263477/campos_512_v4
+50/263478/campos_512_v4
+50/263480/campos_512_v4
+50/263486/campos_512_v4
+50/263487/campos_512_v4
+50/263498/campos_512_v4
+50/263501/campos_512_v4
+50/263504/campos_512_v4
+50/263512/campos_512_v4
+50/263517/campos_512_v4
+50/263530/campos_512_v4
+50/263536/campos_512_v4
+50/263546/campos_512_v4
+50/263569/campos_512_v4
+50/263570/campos_512_v4
+50/263573/campos_512_v4
+50/263575/campos_512_v4
+50/263576/campos_512_v4
+50/263581/campos_512_v4
+50/263589/campos_512_v4
+50/263595/campos_512_v4
+50/263597/campos_512_v4
+50/263631/campos_512_v4
+50/263634/campos_512_v4
+50/263640/campos_512_v4
+50/263642/campos_512_v4
+50/263650/campos_512_v4
+50/263652/campos_512_v4
+50/263664/campos_512_v4
+50/263665/campos_512_v4
+50/263667/campos_512_v4
+50/263671/campos_512_v4
+50/263675/campos_512_v4
+50/263688/campos_512_v4
+50/263690/campos_512_v4
+50/263700/campos_512_v4
+50/263701/campos_512_v4
+50/263703/campos_512_v4
+50/263707/campos_512_v4
+50/263721/campos_512_v4
+50/263737/campos_512_v4
+50/263749/campos_512_v4
+50/263753/campos_512_v4
+50/263785/campos_512_v4
+50/263800/campos_512_v4
+50/263806/campos_512_v4
+50/263815/campos_512_v4
+50/263824/campos_512_v4
+50/263827/campos_512_v4
+50/263831/campos_512_v4
+50/263832/campos_512_v4
+50/263838/campos_512_v4
+50/263872/campos_512_v4
+50/263875/campos_512_v4
+50/263893/campos_512_v4
+50/263897/campos_512_v4
+50/263902/campos_512_v4
+50/263914/campos_512_v4
+50/263916/campos_512_v4
+50/263919/campos_512_v4
+50/263925/campos_512_v4
+50/263930/campos_512_v4
+50/263937/campos_512_v4
+50/263940/campos_512_v4
+50/263945/campos_512_v4
+50/263966/campos_512_v4
+50/263970/campos_512_v4
+50/263975/campos_512_v4
+50/263977/campos_512_v4
+50/263978/campos_512_v4
+50/263985/campos_512_v4
+50/263986/campos_512_v4
+50/263989/campos_512_v4
+50/263993/campos_512_v4
+50/264009/campos_512_v4
+50/264015/campos_512_v4
+50/264018/campos_512_v4
+50/264026/campos_512_v4
+50/264036/campos_512_v4
+50/264037/campos_512_v4
+50/264055/campos_512_v4
+50/264060/campos_512_v4
+50/264062/campos_512_v4
+50/264074/campos_512_v4
+50/264076/campos_512_v4
+50/264077/campos_512_v4
+50/264088/campos_512_v4
+50/264091/campos_512_v4
+50/264095/campos_512_v4
+50/264103/campos_512_v4
+50/264111/campos_512_v4
+50/264117/campos_512_v4
+50/264138/campos_512_v4
+50/264145/campos_512_v4
+50/264152/campos_512_v4
+50/264156/campos_512_v4
+50/264168/campos_512_v4
+50/264172/campos_512_v4
+50/264181/campos_512_v4
+50/264184/campos_512_v4
+50/264185/campos_512_v4
+50/264187/campos_512_v4
+50/264202/campos_512_v4
+50/264206/campos_512_v4
+50/264209/campos_512_v4
+50/264211/campos_512_v4
+50/264216/campos_512_v4
+50/264226/campos_512_v4
+50/264231/campos_512_v4
+50/264260/campos_512_v4
+50/264266/campos_512_v4
+50/264274/campos_512_v4
+50/264275/campos_512_v4
+50/264276/campos_512_v4
+50/264285/campos_512_v4
+50/264287/campos_512_v4
+50/264292/campos_512_v4
+50/264302/campos_512_v4
+50/264320/campos_512_v4
+50/264325/campos_512_v4
+50/264328/campos_512_v4
+50/264342/campos_512_v4
+50/264344/campos_512_v4
+50/264364/campos_512_v4
+50/264372/campos_512_v4
+50/264380/campos_512_v4
+50/264381/campos_512_v4
+50/264382/campos_512_v4
+50/264389/campos_512_v4
+50/264394/campos_512_v4
+50/264398/campos_512_v4
+50/264404/campos_512_v4
+50/264418/campos_512_v4
+50/264442/campos_512_v4
+50/264450/campos_512_v4
+50/264454/campos_512_v4
+50/264456/campos_512_v4
+50/264468/campos_512_v4
+50/264470/campos_512_v4
+50/264471/campos_512_v4
+50/264473/campos_512_v4
+50/264480/campos_512_v4
+50/264496/campos_512_v4
+50/264499/campos_512_v4
+50/264505/campos_512_v4
+50/264506/campos_512_v4
+50/264516/campos_512_v4
+50/264546/campos_512_v4
+50/264568/campos_512_v4
+50/264572/campos_512_v4
+50/264577/campos_512_v4
+50/264582/campos_512_v4
+50/264589/campos_512_v4
+50/264593/campos_512_v4
+50/264597/campos_512_v4
+50/264619/campos_512_v4
+50/264629/campos_512_v4
+50/264653/campos_512_v4
+50/264662/campos_512_v4
+50/264676/campos_512_v4
+50/264681/campos_512_v4
+50/264682/campos_512_v4
+50/264684/campos_512_v4
+50/264689/campos_512_v4
+50/264690/campos_512_v4
+50/264697/campos_512_v4
+50/264700/campos_512_v4
+50/264704/campos_512_v4
+50/264708/campos_512_v4
+50/264711/campos_512_v4
+50/264735/campos_512_v4
+50/264744/campos_512_v4
+50/264750/campos_512_v4
+50/264771/campos_512_v4
+50/264773/campos_512_v4
+50/264792/campos_512_v4
+50/264812/campos_512_v4
+50/264814/campos_512_v4
+50/264826/campos_512_v4
+50/264835/campos_512_v4
+50/264874/campos_512_v4
+50/264880/campos_512_v4
+50/264889/campos_512_v4
+50/264911/campos_512_v4
+50/264920/campos_512_v4
+50/264927/campos_512_v4
+50/264960/campos_512_v4
+50/264968/campos_512_v4
+50/264969/campos_512_v4
+50/264973/campos_512_v4
+50/264985/campos_512_v4
+51/265006/campos_512_v4
+51/265009/campos_512_v4
+51/265020/campos_512_v4
+51/265022/campos_512_v4
+51/265025/campos_512_v4
+51/265035/campos_512_v4
+51/265041/campos_512_v4
+51/265052/campos_512_v4
+51/265059/campos_512_v4
+51/265064/campos_512_v4
+51/265072/campos_512_v4
+51/265073/campos_512_v4
+51/265075/campos_512_v4
+51/265076/campos_512_v4
+51/265080/campos_512_v4
+51/265091/campos_512_v4
+51/265100/campos_512_v4
+51/265111/campos_512_v4
+51/265122/campos_512_v4
+51/265125/campos_512_v4
+51/265134/campos_512_v4
+51/265138/campos_512_v4
+51/265141/campos_512_v4
+51/265151/campos_512_v4
+51/265175/campos_512_v4
+51/265179/campos_512_v4
+51/265187/campos_512_v4
+51/265197/campos_512_v4
+51/265204/campos_512_v4
+51/265224/campos_512_v4
+51/265229/campos_512_v4
+51/265241/campos_512_v4
+51/265258/campos_512_v4
+51/265259/campos_512_v4
+51/265263/campos_512_v4
+51/265269/campos_512_v4
+51/265274/campos_512_v4
+51/265281/campos_512_v4
+51/265291/campos_512_v4
+51/265294/campos_512_v4
+51/265297/campos_512_v4
+51/265305/campos_512_v4
+51/265311/campos_512_v4
+51/265320/campos_512_v4
+51/265324/campos_512_v4
+51/265354/campos_512_v4
+51/265356/campos_512_v4
+51/265360/campos_512_v4
+51/265362/campos_512_v4
+51/265366/campos_512_v4
+51/265367/campos_512_v4
+51/265375/campos_512_v4
+51/265376/campos_512_v4
+51/265383/campos_512_v4
+51/265384/campos_512_v4
+51/265390/campos_512_v4
+51/265392/campos_512_v4
+51/265394/campos_512_v4
+51/265395/campos_512_v4
+51/265407/campos_512_v4
+51/265421/campos_512_v4
+51/265435/campos_512_v4
+51/265438/campos_512_v4
+51/265467/campos_512_v4
+51/265483/campos_512_v4
+51/265492/campos_512_v4
+51/265505/campos_512_v4
+51/265510/campos_512_v4
+51/265513/campos_512_v4
+51/265521/campos_512_v4
+51/265549/campos_512_v4
+51/265551/campos_512_v4
+51/265553/campos_512_v4
+51/265555/campos_512_v4
+51/265565/campos_512_v4
+51/265575/campos_512_v4
+51/265581/campos_512_v4
+51/265602/campos_512_v4
+51/265607/campos_512_v4
+51/265610/campos_512_v4
+51/265619/campos_512_v4
+51/265620/campos_512_v4
+51/265640/campos_512_v4
+51/265651/campos_512_v4
+51/265723/campos_512_v4
+51/265732/campos_512_v4
+51/265737/campos_512_v4
+51/265743/campos_512_v4
+51/265746/campos_512_v4
+51/265750/campos_512_v4
+51/265766/campos_512_v4
+51/265776/campos_512_v4
+51/265801/campos_512_v4
+51/265833/campos_512_v4
+51/265836/campos_512_v4
+51/265849/campos_512_v4
+51/265864/campos_512_v4
+51/265876/campos_512_v4
+51/265880/campos_512_v4
+51/265907/campos_512_v4
+51/265926/campos_512_v4
+51/265957/campos_512_v4
+51/265958/campos_512_v4
+51/265962/campos_512_v4
+51/265969/campos_512_v4
+51/265979/campos_512_v4
+51/265987/campos_512_v4
+51/265999/campos_512_v4
+51/266013/campos_512_v4
+51/266023/campos_512_v4
+51/266024/campos_512_v4
+51/266025/campos_512_v4
+51/266041/campos_512_v4
+51/266042/campos_512_v4
+51/266054/campos_512_v4
+51/266059/campos_512_v4
+51/266066/campos_512_v4
+51/266081/campos_512_v4
+51/266096/campos_512_v4
+51/266099/campos_512_v4
+51/266101/campos_512_v4
+51/266108/campos_512_v4
+51/266115/campos_512_v4
+51/266145/campos_512_v4
+51/266154/campos_512_v4
+51/266160/campos_512_v4
+51/266163/campos_512_v4
+51/266165/campos_512_v4
+51/266167/campos_512_v4
+51/266171/campos_512_v4
+51/266188/campos_512_v4
+51/266193/campos_512_v4
+51/266195/campos_512_v4
+51/266199/campos_512_v4
+51/266209/campos_512_v4
+51/266217/campos_512_v4
+51/266221/campos_512_v4
+51/266222/campos_512_v4
+51/266224/campos_512_v4
+51/266225/campos_512_v4
+51/266236/campos_512_v4
+51/266244/campos_512_v4
+51/266251/campos_512_v4
+51/266260/campos_512_v4
+51/266268/campos_512_v4
+51/266280/campos_512_v4
+51/266295/campos_512_v4
+51/266307/campos_512_v4
+51/266311/campos_512_v4
+51/266312/campos_512_v4
+51/266314/campos_512_v4
+51/266323/campos_512_v4
+51/266332/campos_512_v4
+51/266345/campos_512_v4
+51/266361/campos_512_v4
+51/266370/campos_512_v4
+51/266389/campos_512_v4
+51/266409/campos_512_v4
+51/266414/campos_512_v4
+51/266422/campos_512_v4
+51/266431/campos_512_v4
+51/266435/campos_512_v4
+51/266437/campos_512_v4
+51/266438/campos_512_v4
+51/266440/campos_512_v4
+51/266446/campos_512_v4
+51/266449/campos_512_v4
+51/266466/campos_512_v4
+51/266468/campos_512_v4
+51/266470/campos_512_v4
+51/266473/campos_512_v4
+51/266480/campos_512_v4
+51/266485/campos_512_v4
+51/266488/campos_512_v4
+51/266494/campos_512_v4
+51/266497/campos_512_v4
+51/266499/campos_512_v4
+51/266503/campos_512_v4
+51/266504/campos_512_v4
+51/266505/campos_512_v4
+51/266512/campos_512_v4
+51/266525/campos_512_v4
+51/266527/campos_512_v4
+51/266530/campos_512_v4
+51/266532/campos_512_v4
+51/266536/campos_512_v4
+51/266545/campos_512_v4
+51/266556/campos_512_v4
+51/266564/campos_512_v4
+51/266568/campos_512_v4
+51/266572/campos_512_v4
+51/266575/campos_512_v4
+51/266584/campos_512_v4
+51/266606/campos_512_v4
+51/266607/campos_512_v4
+51/266616/campos_512_v4
+51/266624/campos_512_v4
+51/266627/campos_512_v4
+51/266629/campos_512_v4
+51/266631/campos_512_v4
+51/266634/campos_512_v4
+51/266635/campos_512_v4
+51/266642/campos_512_v4
+51/266645/campos_512_v4
+51/266648/campos_512_v4
+51/266650/campos_512_v4
+51/266658/campos_512_v4
+51/266661/campos_512_v4
+51/266664/campos_512_v4
+51/266674/campos_512_v4
+51/266676/campos_512_v4
+51/266691/campos_512_v4
+51/266692/campos_512_v4
+51/266708/campos_512_v4
+51/266716/campos_512_v4
+51/266725/campos_512_v4
+51/266735/campos_512_v4
+51/266737/campos_512_v4
+51/266740/campos_512_v4
+51/266747/campos_512_v4
+51/266779/campos_512_v4
+51/266782/campos_512_v4
+51/266793/campos_512_v4
+51/266798/campos_512_v4
+51/266805/campos_512_v4
+51/266807/campos_512_v4
+51/266814/campos_512_v4
+51/266824/campos_512_v4
+51/266827/campos_512_v4
+51/266829/campos_512_v4
+51/266831/campos_512_v4
+51/266842/campos_512_v4
+51/266863/campos_512_v4
+51/266887/campos_512_v4
+51/266889/campos_512_v4
+51/266903/campos_512_v4
+51/266912/campos_512_v4
+51/266918/campos_512_v4
+51/266928/campos_512_v4
+51/266937/campos_512_v4
+51/266946/campos_512_v4
+51/266951/campos_512_v4
+51/266955/campos_512_v4
+51/266964/campos_512_v4
+51/266989/campos_512_v4
+51/266998/campos_512_v4
+51/267005/campos_512_v4
+51/267008/campos_512_v4
+51/267012/campos_512_v4
+51/267029/campos_512_v4
+51/267039/campos_512_v4
+51/267040/campos_512_v4
+51/267042/campos_512_v4
+51/267053/campos_512_v4
+51/267067/campos_512_v4
+51/267068/campos_512_v4
+51/267072/campos_512_v4
+51/267073/campos_512_v4
+51/267076/campos_512_v4
+51/267083/campos_512_v4
+51/267086/campos_512_v4
+51/267103/campos_512_v4
+51/267107/campos_512_v4
+51/267108/campos_512_v4
+51/267113/campos_512_v4
+51/267137/campos_512_v4
+51/267154/campos_512_v4
+51/267178/campos_512_v4
+51/267180/campos_512_v4
+51/267189/campos_512_v4
+51/267197/campos_512_v4
+51/267198/campos_512_v4
+51/267200/campos_512_v4
+51/267219/campos_512_v4
+51/267228/campos_512_v4
+51/267230/campos_512_v4
+51/267250/campos_512_v4
+51/267261/campos_512_v4
+51/267262/campos_512_v4
+51/267267/campos_512_v4
+51/267281/campos_512_v4
+51/267295/campos_512_v4
+51/267297/campos_512_v4
+51/267315/campos_512_v4
+51/267316/campos_512_v4
+51/267319/campos_512_v4
+51/267321/campos_512_v4
+51/267341/campos_512_v4
+51/267367/campos_512_v4
+51/267371/campos_512_v4
+51/267374/campos_512_v4
+51/267375/campos_512_v4
+51/267395/campos_512_v4
+51/267402/campos_512_v4
+51/267423/campos_512_v4
+51/267430/campos_512_v4
+51/267436/campos_512_v4
+51/267446/campos_512_v4
+51/267485/campos_512_v4
+51/267507/campos_512_v4
+51/267509/campos_512_v4
+51/267518/campos_512_v4
+51/267525/campos_512_v4
+51/267529/campos_512_v4
+51/267533/campos_512_v4
+51/267544/campos_512_v4
+51/267563/campos_512_v4
+51/267568/campos_512_v4
+51/267576/campos_512_v4
+51/267584/campos_512_v4
+51/267586/campos_512_v4
+51/267588/campos_512_v4
+51/267589/campos_512_v4
+51/267591/campos_512_v4
+51/267624/campos_512_v4
+51/267632/campos_512_v4
+51/267646/campos_512_v4
+51/267649/campos_512_v4
+51/267656/campos_512_v4
+51/267657/campos_512_v4
+51/267668/campos_512_v4
+51/267677/campos_512_v4
+51/267679/campos_512_v4
+51/267684/campos_512_v4
+51/267715/campos_512_v4
+51/267724/campos_512_v4
+51/267778/campos_512_v4
+51/267784/campos_512_v4
+51/267787/campos_512_v4
+51/267792/campos_512_v4
+51/267797/campos_512_v4
+51/267798/campos_512_v4
+51/267800/campos_512_v4
+51/267804/campos_512_v4
+51/267807/campos_512_v4
+51/267813/campos_512_v4
+51/267814/campos_512_v4
+51/267826/campos_512_v4
+51/267827/campos_512_v4
+51/267835/campos_512_v4
+51/267883/campos_512_v4
+51/267884/campos_512_v4
+51/267893/campos_512_v4
+51/267895/campos_512_v4
+51/267909/campos_512_v4
+51/267911/campos_512_v4
+51/267914/campos_512_v4
+51/267916/campos_512_v4
+51/267931/campos_512_v4
+51/267942/campos_512_v4
+51/267952/campos_512_v4
+51/267956/campos_512_v4
+51/267967/campos_512_v4
+51/267973/campos_512_v4
+51/267982/campos_512_v4
+51/267992/campos_512_v4
+51/267995/campos_512_v4
+51/268003/campos_512_v4
+51/268013/campos_512_v4
+51/268024/campos_512_v4
+51/268038/campos_512_v4
+51/268057/campos_512_v4
+51/268065/campos_512_v4
+51/268068/campos_512_v4
+51/268073/campos_512_v4
+51/268085/campos_512_v4
+51/268095/campos_512_v4
+51/268113/campos_512_v4
+51/268119/campos_512_v4
+51/268124/campos_512_v4
+51/268133/campos_512_v4
+51/268146/campos_512_v4
+51/268150/campos_512_v4
+51/268160/campos_512_v4
+51/268163/campos_512_v4
+51/268169/campos_512_v4
+51/268201/campos_512_v4
+51/268209/campos_512_v4
+51/268218/campos_512_v4
+51/268226/campos_512_v4
+51/268233/campos_512_v4
+51/268238/campos_512_v4
+51/268240/campos_512_v4
+51/268242/campos_512_v4
+51/268262/campos_512_v4
+51/268288/campos_512_v4
+51/268289/campos_512_v4
+51/268290/campos_512_v4
+51/268313/campos_512_v4
+51/268325/campos_512_v4
+51/268345/campos_512_v4
+51/268366/campos_512_v4
+51/268367/campos_512_v4
+51/268374/campos_512_v4
+51/268387/campos_512_v4
+51/268397/campos_512_v4
+51/268421/campos_512_v4
+51/268426/campos_512_v4
+51/268448/campos_512_v4
+51/268454/campos_512_v4
+51/268457/campos_512_v4
+51/268458/campos_512_v4
+51/268466/campos_512_v4
+51/268468/campos_512_v4
+51/268469/campos_512_v4
+51/268474/campos_512_v4
+51/268493/campos_512_v4
+51/268496/campos_512_v4
+51/268499/campos_512_v4
+51/268524/campos_512_v4
+51/268526/campos_512_v4
+51/268539/campos_512_v4
+51/268545/campos_512_v4
+51/268547/campos_512_v4
+51/268549/campos_512_v4
+51/268556/campos_512_v4
+51/268557/campos_512_v4
+51/268596/campos_512_v4
+51/268600/campos_512_v4
+51/268611/campos_512_v4
+51/268629/campos_512_v4
+51/268635/campos_512_v4
+51/268644/campos_512_v4
+51/268652/campos_512_v4
+51/268653/campos_512_v4
+51/268659/campos_512_v4
+51/268691/campos_512_v4
+51/268698/campos_512_v4
+51/268704/campos_512_v4
+51/268712/campos_512_v4
+51/268725/campos_512_v4
+51/268737/campos_512_v4
+51/268746/campos_512_v4
+51/268752/campos_512_v4
+51/268763/campos_512_v4
+51/268767/campos_512_v4
+51/268768/campos_512_v4
+51/268791/campos_512_v4
+51/268797/campos_512_v4
+51/268811/campos_512_v4
+51/268834/campos_512_v4
+51/268837/campos_512_v4
+51/268852/campos_512_v4
+51/268856/campos_512_v4
+51/268879/campos_512_v4
+51/268880/campos_512_v4
+51/268884/campos_512_v4
+51/268888/campos_512_v4
+51/268894/campos_512_v4
+51/268909/campos_512_v4
+51/268935/campos_512_v4
+51/268952/campos_512_v4
+51/268960/campos_512_v4
+51/268968/campos_512_v4
+51/268980/campos_512_v4
+51/268983/campos_512_v4
+51/268988/campos_512_v4
+51/268994/campos_512_v4
+51/268996/campos_512_v4
+51/268999/campos_512_v4
+51/269002/campos_512_v4
+51/269009/campos_512_v4
+51/269026/campos_512_v4
+51/269033/campos_512_v4
+51/269036/campos_512_v4
+51/269040/campos_512_v4
+51/269063/campos_512_v4
+51/269070/campos_512_v4
+51/269099/campos_512_v4
+51/269103/campos_512_v4
+51/269105/campos_512_v4
+51/269106/campos_512_v4
+51/269114/campos_512_v4
+51/269128/campos_512_v4
+51/269147/campos_512_v4
+51/269150/campos_512_v4
+51/269152/campos_512_v4
+51/269161/campos_512_v4
+51/269168/campos_512_v4
+51/269176/campos_512_v4
+51/269177/campos_512_v4
+51/269210/campos_512_v4
+51/269216/campos_512_v4
+51/269222/campos_512_v4
+51/269234/campos_512_v4
+51/269247/campos_512_v4
+51/269248/campos_512_v4
+51/269255/campos_512_v4
+51/269256/campos_512_v4
+51/269265/campos_512_v4
+51/269269/campos_512_v4
+51/269279/campos_512_v4
+51/269284/campos_512_v4
+51/269286/campos_512_v4
+51/269288/campos_512_v4
+51/269301/campos_512_v4
+51/269310/campos_512_v4
+51/269315/campos_512_v4
+51/269334/campos_512_v4
+51/269337/campos_512_v4
+51/269340/campos_512_v4
+51/269341/campos_512_v4
+51/269344/campos_512_v4
+51/269353/campos_512_v4
+51/269355/campos_512_v4
+51/269357/campos_512_v4
+51/269369/campos_512_v4
+51/269375/campos_512_v4
+51/269377/campos_512_v4
+51/269379/campos_512_v4
+51/269386/campos_512_v4
+51/269408/campos_512_v4
+51/269431/campos_512_v4
+51/269441/campos_512_v4
+51/269442/campos_512_v4
+51/269446/campos_512_v4
+51/269447/campos_512_v4
+51/269458/campos_512_v4
+51/269490/campos_512_v4
+51/269492/campos_512_v4
+51/269498/campos_512_v4
+51/269525/campos_512_v4
+51/269527/campos_512_v4
+51/269540/campos_512_v4
+51/269570/campos_512_v4
+51/269573/campos_512_v4
+51/269574/campos_512_v4
+51/269578/campos_512_v4
+51/269580/campos_512_v4
+51/269593/campos_512_v4
+51/269605/campos_512_v4
+51/269609/campos_512_v4
+51/269611/campos_512_v4
+51/269615/campos_512_v4
+51/269626/campos_512_v4
+51/269627/campos_512_v4
+51/269639/campos_512_v4
+51/269669/campos_512_v4
+51/269670/campos_512_v4
+51/269671/campos_512_v4
+51/269677/campos_512_v4
+51/269681/campos_512_v4
+51/269682/campos_512_v4
+51/269693/campos_512_v4
+51/269695/campos_512_v4
+51/269711/campos_512_v4
+51/269716/campos_512_v4
+51/269719/campos_512_v4
+51/269724/campos_512_v4
+51/269728/campos_512_v4
+51/269731/campos_512_v4
+51/269733/campos_512_v4
+51/269736/campos_512_v4
+51/269737/campos_512_v4
+51/269745/campos_512_v4
+51/269753/campos_512_v4
+51/269758/campos_512_v4
+51/269761/campos_512_v4
+51/269765/campos_512_v4
+51/269773/campos_512_v4
+51/269781/campos_512_v4
+51/269783/campos_512_v4
+51/269788/campos_512_v4
+51/269789/campos_512_v4
+51/269792/campos_512_v4
+51/269798/campos_512_v4
+51/269807/campos_512_v4
+51/269818/campos_512_v4
+51/269820/campos_512_v4
+51/269824/campos_512_v4
+51/269827/campos_512_v4
+51/269841/campos_512_v4
+51/269854/campos_512_v4
+51/269874/campos_512_v4
+51/269878/campos_512_v4
+51/269881/campos_512_v4
+51/269892/campos_512_v4
+51/269912/campos_512_v4
+51/269913/campos_512_v4
+51/269915/campos_512_v4
+51/269916/campos_512_v4
+51/269940/campos_512_v4
+51/269942/campos_512_v4
+51/269944/campos_512_v4
+51/269953/campos_512_v4
+51/269958/campos_512_v4
+51/269966/campos_512_v4
+51/269967/campos_512_v4
+51/269975/campos_512_v4
+51/269979/campos_512_v4
+51/269987/campos_512_v4
+51/269994/campos_512_v4
+52/270003/campos_512_v4
+52/270011/campos_512_v4
+52/270013/campos_512_v4
+52/270024/campos_512_v4
+52/270032/campos_512_v4
+52/270052/campos_512_v4
+52/270081/campos_512_v4
+52/270086/campos_512_v4
+52/270092/campos_512_v4
+52/270096/campos_512_v4
+52/270113/campos_512_v4
+52/270125/campos_512_v4
+52/270131/campos_512_v4
+52/270149/campos_512_v4
+52/270150/campos_512_v4
+52/270163/campos_512_v4
+52/270165/campos_512_v4
+52/270172/campos_512_v4
+52/270175/campos_512_v4
+52/270178/campos_512_v4
+52/270182/campos_512_v4
+52/270185/campos_512_v4
+52/270189/campos_512_v4
+52/270202/campos_512_v4
+52/270209/campos_512_v4
+52/270216/campos_512_v4
+52/270217/campos_512_v4
+52/270221/campos_512_v4
+52/270234/campos_512_v4
+52/270248/campos_512_v4
+52/270258/campos_512_v4
+52/270259/campos_512_v4
+52/270267/campos_512_v4
+52/270276/campos_512_v4
+52/270278/campos_512_v4
+52/270279/campos_512_v4
+52/270282/campos_512_v4
+52/270296/campos_512_v4
+52/270297/campos_512_v4
+52/270312/campos_512_v4
+52/270313/campos_512_v4
+52/270314/campos_512_v4
+52/270329/campos_512_v4
+52/270354/campos_512_v4
+52/270363/campos_512_v4
+52/270372/campos_512_v4
+52/270373/campos_512_v4
+52/270380/campos_512_v4
+52/270406/campos_512_v4
+52/270408/campos_512_v4
+52/270416/campos_512_v4
+52/270418/campos_512_v4
+52/270420/campos_512_v4
+52/270426/campos_512_v4
+52/270431/campos_512_v4
+52/270442/campos_512_v4
+52/270446/campos_512_v4
+52/270462/campos_512_v4
+52/270465/campos_512_v4
+52/270479/campos_512_v4
+52/270481/campos_512_v4
+52/270483/campos_512_v4
+52/270485/campos_512_v4
+52/270490/campos_512_v4
+52/270491/campos_512_v4
+52/270500/campos_512_v4
+52/270505/campos_512_v4
+52/270509/campos_512_v4
+52/270518/campos_512_v4
+52/270523/campos_512_v4
+52/270524/campos_512_v4
+52/270526/campos_512_v4
+52/270528/campos_512_v4
+52/270532/campos_512_v4
+52/270541/campos_512_v4
+52/270556/campos_512_v4
+52/270564/campos_512_v4
+52/270569/campos_512_v4
+52/270572/campos_512_v4
+52/270574/campos_512_v4
+52/270582/campos_512_v4
+52/270590/campos_512_v4
+52/270602/campos_512_v4
+52/270611/campos_512_v4
+52/270619/campos_512_v4
+52/270636/campos_512_v4
+52/270637/campos_512_v4
+52/270639/campos_512_v4
+52/270644/campos_512_v4
+52/270664/campos_512_v4
+52/270667/campos_512_v4
+52/270673/campos_512_v4
+52/270676/campos_512_v4
+52/270680/campos_512_v4
+52/270682/campos_512_v4
+52/270686/campos_512_v4
+52/270691/campos_512_v4
+52/270694/campos_512_v4
+52/270698/campos_512_v4
+52/270699/campos_512_v4
+52/270711/campos_512_v4
+52/270717/campos_512_v4
+52/270723/campos_512_v4
+52/270742/campos_512_v4
+52/270749/campos_512_v4
+52/270752/campos_512_v4
+52/270753/campos_512_v4
+52/270758/campos_512_v4
+52/270768/campos_512_v4
+52/270770/campos_512_v4
+52/270773/campos_512_v4
+52/270777/campos_512_v4
+52/270783/campos_512_v4
+52/270792/campos_512_v4
+52/270794/campos_512_v4
+52/270801/campos_512_v4
+52/270804/campos_512_v4
+52/270805/campos_512_v4
+52/270809/campos_512_v4
+52/270813/campos_512_v4
+52/270814/campos_512_v4
+52/270815/campos_512_v4
+52/270834/campos_512_v4
+52/270844/campos_512_v4
+52/270856/campos_512_v4
+52/270858/campos_512_v4
+52/270861/campos_512_v4
+52/270869/campos_512_v4
+52/270875/campos_512_v4
+52/270876/campos_512_v4
+52/270880/campos_512_v4
+52/270889/campos_512_v4
+52/270894/campos_512_v4
+52/270898/campos_512_v4
+52/270900/campos_512_v4
+52/270916/campos_512_v4
+52/270919/campos_512_v4
+52/270920/campos_512_v4
+52/270929/campos_512_v4
+52/270934/campos_512_v4
+52/270941/campos_512_v4
+52/270942/campos_512_v4
+52/270964/campos_512_v4
+52/270985/campos_512_v4
+52/271005/campos_512_v4
+52/271036/campos_512_v4
+52/271041/campos_512_v4
+52/271042/campos_512_v4
+52/271045/campos_512_v4
+52/271051/campos_512_v4
+52/271055/campos_512_v4
+52/271058/campos_512_v4
+52/271059/campos_512_v4
+52/271062/campos_512_v4
+52/271069/campos_512_v4
+52/271079/campos_512_v4
+52/271095/campos_512_v4
+52/271104/campos_512_v4
+52/271116/campos_512_v4
+52/271142/campos_512_v4
+52/271143/campos_512_v4
+52/271146/campos_512_v4
+52/271148/campos_512_v4
+52/271151/campos_512_v4
+52/271173/campos_512_v4
+52/271177/campos_512_v4
+52/271193/campos_512_v4
+52/271200/campos_512_v4
+52/271211/campos_512_v4
+52/271215/campos_512_v4
+52/271240/campos_512_v4
+52/271246/campos_512_v4
+52/271249/campos_512_v4
+52/271250/campos_512_v4
+52/271257/campos_512_v4
+52/271266/campos_512_v4
+52/271277/campos_512_v4
+52/271281/campos_512_v4
+52/271286/campos_512_v4
+52/271306/campos_512_v4
+52/271314/campos_512_v4
+52/271316/campos_512_v4
+52/271330/campos_512_v4
+52/271331/campos_512_v4
+52/271340/campos_512_v4
+52/271343/campos_512_v4
+52/271351/campos_512_v4
+52/271355/campos_512_v4
+52/271370/campos_512_v4
+52/271372/campos_512_v4
+52/271405/campos_512_v4
+52/271407/campos_512_v4
+52/271410/campos_512_v4
+52/271431/campos_512_v4
+52/271444/campos_512_v4
+52/271452/campos_512_v4
+52/271470/campos_512_v4
+52/271478/campos_512_v4
+52/271483/campos_512_v4
+52/271489/campos_512_v4
+52/271500/campos_512_v4
+52/271502/campos_512_v4
+52/271515/campos_512_v4
+52/271521/campos_512_v4
+52/271525/campos_512_v4
+52/271527/campos_512_v4
+52/271530/campos_512_v4
+52/271531/campos_512_v4
+52/271540/campos_512_v4
+52/271548/campos_512_v4
+52/271551/campos_512_v4
+52/271559/campos_512_v4
+52/271564/campos_512_v4
+52/271580/campos_512_v4
+52/271583/campos_512_v4
+52/271587/campos_512_v4
+52/271595/campos_512_v4
+52/271596/campos_512_v4
+52/271598/campos_512_v4
+52/271600/campos_512_v4
+52/271601/campos_512_v4
+52/271603/campos_512_v4
+52/271606/campos_512_v4
+52/271611/campos_512_v4
+52/271617/campos_512_v4
+52/271619/campos_512_v4
+52/271631/campos_512_v4
+52/271638/campos_512_v4
+52/271639/campos_512_v4
+52/271658/campos_512_v4
+52/271676/campos_512_v4
+52/271681/campos_512_v4
+52/271694/campos_512_v4
+52/271701/campos_512_v4
+52/271720/campos_512_v4
+52/271725/campos_512_v4
+52/271737/campos_512_v4
+52/271742/campos_512_v4
+52/271757/campos_512_v4
+52/271758/campos_512_v4
+52/271760/campos_512_v4
+52/271767/campos_512_v4
+52/271775/campos_512_v4
+52/271786/campos_512_v4
+52/271790/campos_512_v4
+52/271808/campos_512_v4
+52/271813/campos_512_v4
+52/271816/campos_512_v4
+52/271819/campos_512_v4
+52/271838/campos_512_v4
+52/271839/campos_512_v4
+52/271847/campos_512_v4
+52/271849/campos_512_v4
+52/271856/campos_512_v4
+52/271867/campos_512_v4
+52/271875/campos_512_v4
+52/271880/campos_512_v4
+52/271886/campos_512_v4
+52/271889/campos_512_v4
+52/271905/campos_512_v4
+52/271908/campos_512_v4
+52/271915/campos_512_v4
+52/271941/campos_512_v4
+52/271948/campos_512_v4
+52/271952/campos_512_v4
+52/271958/campos_512_v4
+52/271972/campos_512_v4
+52/271976/campos_512_v4
+52/271978/campos_512_v4
+52/271981/campos_512_v4
+52/271989/campos_512_v4
+52/272000/campos_512_v4
+52/272006/campos_512_v4
+52/272012/campos_512_v4
+52/272023/campos_512_v4
+52/272031/campos_512_v4
+52/272042/campos_512_v4
+52/272047/campos_512_v4
+52/272055/campos_512_v4
+52/272059/campos_512_v4
+52/272065/campos_512_v4
+52/272085/campos_512_v4
+52/272089/campos_512_v4
+52/272092/campos_512_v4
+52/272132/campos_512_v4
+52/272134/campos_512_v4
+52/272137/campos_512_v4
+52/272153/campos_512_v4
+52/272170/campos_512_v4
+52/272174/campos_512_v4
+52/272175/campos_512_v4
+52/272190/campos_512_v4
+52/272200/campos_512_v4
+52/272201/campos_512_v4
+52/272202/campos_512_v4
+52/272203/campos_512_v4
+52/272206/campos_512_v4
+52/272210/campos_512_v4
+52/272225/campos_512_v4
+52/272235/campos_512_v4
+52/272241/campos_512_v4
+52/272246/campos_512_v4
+52/272247/campos_512_v4
+52/272251/campos_512_v4
+52/272260/campos_512_v4
+52/272268/campos_512_v4
+52/272282/campos_512_v4
+52/272294/campos_512_v4
+52/272295/campos_512_v4
+52/272298/campos_512_v4
+52/272320/campos_512_v4
+52/272324/campos_512_v4
+52/272326/campos_512_v4
+52/272332/campos_512_v4
+52/272333/campos_512_v4
+52/272335/campos_512_v4
+52/272338/campos_512_v4
+52/272353/campos_512_v4
+52/272364/campos_512_v4
+52/272377/campos_512_v4
+52/272380/campos_512_v4
+52/272423/campos_512_v4
+52/272436/campos_512_v4
+52/272438/campos_512_v4
+52/272441/campos_512_v4
+52/272455/campos_512_v4
+52/272456/campos_512_v4
+52/272468/campos_512_v4
+52/272470/campos_512_v4
+52/272472/campos_512_v4
+52/272476/campos_512_v4
+52/272477/campos_512_v4
+52/272500/campos_512_v4
+52/272514/campos_512_v4
+52/272524/campos_512_v4
+52/272525/campos_512_v4
+52/272526/campos_512_v4
+52/272537/campos_512_v4
+52/272566/campos_512_v4
+52/272569/campos_512_v4
+52/272588/campos_512_v4
+52/272590/campos_512_v4
+52/272591/campos_512_v4
+52/272594/campos_512_v4
+52/272607/campos_512_v4
+52/272618/campos_512_v4
+52/272619/campos_512_v4
+52/272623/campos_512_v4
+52/272634/campos_512_v4
+52/272635/campos_512_v4
+52/272678/campos_512_v4
+52/272679/campos_512_v4
+52/272694/campos_512_v4
+52/272695/campos_512_v4
+52/272703/campos_512_v4
+52/272707/campos_512_v4
+52/272717/campos_512_v4
+52/272723/campos_512_v4
+52/272727/campos_512_v4
+52/272728/campos_512_v4
+52/272733/campos_512_v4
+52/272738/campos_512_v4
+52/272744/campos_512_v4
+52/272749/campos_512_v4
+52/272750/campos_512_v4
+52/272751/campos_512_v4
+52/272752/campos_512_v4
+52/272768/campos_512_v4
+52/272781/campos_512_v4
+52/272786/campos_512_v4
+52/272794/campos_512_v4
+52/272800/campos_512_v4
+52/272807/campos_512_v4
+52/272813/campos_512_v4
+52/272814/campos_512_v4
+52/272817/campos_512_v4
+52/272822/campos_512_v4
+52/272824/campos_512_v4
+52/272838/campos_512_v4
+52/272864/campos_512_v4
+52/272872/campos_512_v4
+52/272882/campos_512_v4
+52/272884/campos_512_v4
+52/272888/campos_512_v4
+52/272899/campos_512_v4
+52/272907/campos_512_v4
+52/272915/campos_512_v4
+52/272916/campos_512_v4
+52/272920/campos_512_v4
+52/272926/campos_512_v4
+52/272932/campos_512_v4
+52/272937/campos_512_v4
+52/272938/campos_512_v4
+52/272958/campos_512_v4
+52/272968/campos_512_v4
+52/272982/campos_512_v4
+52/272984/campos_512_v4
+52/272986/campos_512_v4
+52/272990/campos_512_v4
+52/273002/campos_512_v4
+52/273003/campos_512_v4
+52/273004/campos_512_v4
+52/273006/campos_512_v4
+52/273008/campos_512_v4
+52/273018/campos_512_v4
+52/273038/campos_512_v4
+52/273041/campos_512_v4
+52/273046/campos_512_v4
+52/273047/campos_512_v4
+52/273057/campos_512_v4
+52/273067/campos_512_v4
+52/273073/campos_512_v4
+52/273105/campos_512_v4
+52/273117/campos_512_v4
+52/273129/campos_512_v4
+52/273131/campos_512_v4
+52/273137/campos_512_v4
+52/273140/campos_512_v4
+52/273142/campos_512_v4
+52/273151/campos_512_v4
+52/273154/campos_512_v4
+52/273166/campos_512_v4
+52/273175/campos_512_v4
+52/273183/campos_512_v4
+52/273193/campos_512_v4
+52/273198/campos_512_v4
+52/273226/campos_512_v4
+52/273231/campos_512_v4
+52/273234/campos_512_v4
+52/273238/campos_512_v4
+52/273239/campos_512_v4
+52/273248/campos_512_v4
+52/273256/campos_512_v4
+52/273273/campos_512_v4
+52/273290/campos_512_v4
+52/273291/campos_512_v4
+52/273294/campos_512_v4
+52/273296/campos_512_v4
+52/273303/campos_512_v4
+52/273305/campos_512_v4
+52/273309/campos_512_v4
+52/273313/campos_512_v4
+52/273316/campos_512_v4
+52/273333/campos_512_v4
+52/273337/campos_512_v4
+52/273349/campos_512_v4
+52/273353/campos_512_v4
+52/273361/campos_512_v4
+52/273383/campos_512_v4
+52/273384/campos_512_v4
+52/273387/campos_512_v4
+52/273400/campos_512_v4
+52/273404/campos_512_v4
+52/273410/campos_512_v4
+52/273421/campos_512_v4
+52/273431/campos_512_v4
+52/273432/campos_512_v4
+52/273435/campos_512_v4
+52/273450/campos_512_v4
+52/273456/campos_512_v4
+52/273474/campos_512_v4
+52/273486/campos_512_v4
+52/273492/campos_512_v4
+52/273496/campos_512_v4
+52/273501/campos_512_v4
+52/273505/campos_512_v4
+52/273525/campos_512_v4
+52/273526/campos_512_v4
+52/273534/campos_512_v4
+52/273536/campos_512_v4
+52/273551/campos_512_v4
+52/273558/campos_512_v4
+52/273560/campos_512_v4
+52/273579/campos_512_v4
+52/273581/campos_512_v4
+52/273596/campos_512_v4
+52/273601/campos_512_v4
+52/273608/campos_512_v4
+52/273611/campos_512_v4
+52/273615/campos_512_v4
+52/273622/campos_512_v4
+52/273625/campos_512_v4
+52/273633/campos_512_v4
+52/273640/campos_512_v4
+52/273642/campos_512_v4
+52/273651/campos_512_v4
+52/273654/campos_512_v4
+52/273658/campos_512_v4
+52/273659/campos_512_v4
+52/273662/campos_512_v4
+52/273665/campos_512_v4
+52/273672/campos_512_v4
+52/273673/campos_512_v4
+52/273680/campos_512_v4
+52/273703/campos_512_v4
+52/273714/campos_512_v4
+52/273715/campos_512_v4
+52/273720/campos_512_v4
+52/273724/campos_512_v4
+52/273729/campos_512_v4
+52/273748/campos_512_v4
+52/273764/campos_512_v4
+52/273784/campos_512_v4
+52/273791/campos_512_v4
+52/273797/campos_512_v4
+52/273803/campos_512_v4
+52/273826/campos_512_v4
+52/273830/campos_512_v4
+52/273832/campos_512_v4
+52/273833/campos_512_v4
+52/273844/campos_512_v4
+52/273847/campos_512_v4
+52/273854/campos_512_v4
+52/273866/campos_512_v4
+52/273876/campos_512_v4
+52/273885/campos_512_v4
+52/273899/campos_512_v4
+52/273901/campos_512_v4
+52/273909/campos_512_v4
+52/273912/campos_512_v4
+52/273922/campos_512_v4
+52/273937/campos_512_v4
+52/273940/campos_512_v4
+52/273955/campos_512_v4
+52/273957/campos_512_v4
+52/273966/campos_512_v4
+52/273967/campos_512_v4
+52/273982/campos_512_v4
+52/273985/campos_512_v4
+52/273989/campos_512_v4
+52/274000/campos_512_v4
+52/274007/campos_512_v4
+52/274009/campos_512_v4
+52/274025/campos_512_v4
+52/274027/campos_512_v4
+52/274033/campos_512_v4
+52/274034/campos_512_v4
+52/274040/campos_512_v4
+52/274044/campos_512_v4
+52/274057/campos_512_v4
+52/274058/campos_512_v4
+52/274069/campos_512_v4
+52/274071/campos_512_v4
+52/274076/campos_512_v4
+52/274077/campos_512_v4
+52/274080/campos_512_v4
+52/274085/campos_512_v4
+52/274090/campos_512_v4
+52/274104/campos_512_v4
+52/274113/campos_512_v4
+52/274116/campos_512_v4
+52/274131/campos_512_v4
+52/274137/campos_512_v4
+52/274160/campos_512_v4
+52/274166/campos_512_v4
+52/274171/campos_512_v4
+52/274179/campos_512_v4
+52/274183/campos_512_v4
+52/274198/campos_512_v4
+52/274202/campos_512_v4
+52/274213/campos_512_v4
+52/274215/campos_512_v4
+52/274217/campos_512_v4
+52/274221/campos_512_v4
+52/274224/campos_512_v4
+52/274226/campos_512_v4
+52/274229/campos_512_v4
+52/274245/campos_512_v4
+52/274246/campos_512_v4
+52/274249/campos_512_v4
+52/274255/campos_512_v4
+52/274256/campos_512_v4
+52/274269/campos_512_v4
+52/274273/campos_512_v4
+52/274287/campos_512_v4
+52/274292/campos_512_v4
+52/274298/campos_512_v4
+52/274325/campos_512_v4
+52/274332/campos_512_v4
+52/274370/campos_512_v4
+52/274408/campos_512_v4
+52/274417/campos_512_v4
+52/274425/campos_512_v4
+52/274426/campos_512_v4
+52/274427/campos_512_v4
+52/274432/campos_512_v4
+52/274439/campos_512_v4
+52/274442/campos_512_v4
+52/274448/campos_512_v4
+52/274451/campos_512_v4
+52/274453/campos_512_v4
+52/274454/campos_512_v4
+52/274457/campos_512_v4
+52/274458/campos_512_v4
+52/274466/campos_512_v4
+52/274476/campos_512_v4
+52/274479/campos_512_v4
+52/274486/campos_512_v4
+52/274491/campos_512_v4
+52/274494/campos_512_v4
+52/274499/campos_512_v4
+52/274501/campos_512_v4
+52/274502/campos_512_v4
+52/274506/campos_512_v4
+52/274544/campos_512_v4
+52/274547/campos_512_v4
+52/274556/campos_512_v4
+52/274561/campos_512_v4
+52/274585/campos_512_v4
+52/274593/campos_512_v4
+52/274603/campos_512_v4
+52/274605/campos_512_v4
+52/274609/campos_512_v4
+52/274610/campos_512_v4
+52/274613/campos_512_v4
+52/274618/campos_512_v4
+52/274622/campos_512_v4
+52/274624/campos_512_v4
+52/274644/campos_512_v4
+52/274645/campos_512_v4
+52/274648/campos_512_v4
+52/274652/campos_512_v4
+52/274658/campos_512_v4
+52/274667/campos_512_v4
+52/274676/campos_512_v4
+52/274680/campos_512_v4
+52/274686/campos_512_v4
+52/274698/campos_512_v4
+52/274715/campos_512_v4
+52/274716/campos_512_v4
+52/274719/campos_512_v4
+52/274721/campos_512_v4
+52/274743/campos_512_v4
+52/274750/campos_512_v4
+52/274754/campos_512_v4
+52/274762/campos_512_v4
+52/274792/campos_512_v4
+52/274793/campos_512_v4
+52/274797/campos_512_v4
+52/274814/campos_512_v4
+52/274818/campos_512_v4
+52/274828/campos_512_v4
+52/274837/campos_512_v4
+52/274839/campos_512_v4
+52/274840/campos_512_v4
+52/274866/campos_512_v4
+52/274869/campos_512_v4
+52/274879/campos_512_v4
+52/274882/campos_512_v4
+52/274893/campos_512_v4
+52/274895/campos_512_v4
+52/274904/campos_512_v4
+52/274909/campos_512_v4
+52/274913/campos_512_v4
+52/274917/campos_512_v4
+52/274921/campos_512_v4
+52/274922/campos_512_v4
+52/274933/campos_512_v4
+52/274936/campos_512_v4
+52/274937/campos_512_v4
+52/274946/campos_512_v4
+52/274947/campos_512_v4
+52/274974/campos_512_v4
+52/274981/campos_512_v4
+52/274992/campos_512_v4
+52/274994/campos_512_v4
+53/275006/campos_512_v4
+53/275011/campos_512_v4
+53/275012/campos_512_v4
+53/275030/campos_512_v4
+53/275034/campos_512_v4
+53/275044/campos_512_v4
+53/275065/campos_512_v4
+53/275083/campos_512_v4
+53/275088/campos_512_v4
+53/275089/campos_512_v4
+53/275109/campos_512_v4
+53/275113/campos_512_v4
+53/275118/campos_512_v4
+53/275125/campos_512_v4
+53/275140/campos_512_v4
+53/275148/campos_512_v4
+53/275156/campos_512_v4
+53/275158/campos_512_v4
+53/275161/campos_512_v4
+53/275171/campos_512_v4
+53/275173/campos_512_v4
+53/275178/campos_512_v4
+53/275181/campos_512_v4
+53/275182/campos_512_v4
+53/275187/campos_512_v4
+53/275215/campos_512_v4
+53/275261/campos_512_v4
+53/275270/campos_512_v4
+53/275290/campos_512_v4
+53/275291/campos_512_v4
+53/275298/campos_512_v4
+53/275305/campos_512_v4
+53/275321/campos_512_v4
+53/275330/campos_512_v4
+53/275337/campos_512_v4
+53/275355/campos_512_v4
+53/275362/campos_512_v4
+53/275368/campos_512_v4
+53/275370/campos_512_v4
+53/275376/campos_512_v4
+53/275388/campos_512_v4
+53/275393/campos_512_v4
+53/275414/campos_512_v4
+53/275429/campos_512_v4
+53/275435/campos_512_v4
+53/275437/campos_512_v4
+53/275458/campos_512_v4
+53/275462/campos_512_v4
+53/275469/campos_512_v4
+53/275474/campos_512_v4
+53/275476/campos_512_v4
+53/275491/campos_512_v4
+53/275492/campos_512_v4
+53/275496/campos_512_v4
+53/275511/campos_512_v4
+53/275514/campos_512_v4
+53/275516/campos_512_v4
+53/275517/campos_512_v4
+53/275538/campos_512_v4
+53/275546/campos_512_v4
+53/275553/campos_512_v4
+53/275563/campos_512_v4
+53/275564/campos_512_v4
+53/275567/campos_512_v4
+53/275571/campos_512_v4
+53/275573/campos_512_v4
+53/275582/campos_512_v4
+53/275599/campos_512_v4
+53/275610/campos_512_v4
+53/275614/campos_512_v4
+53/275617/campos_512_v4
+53/275636/campos_512_v4
+53/275637/campos_512_v4
+53/275642/campos_512_v4
+53/275648/campos_512_v4
+53/275649/campos_512_v4
+53/275652/campos_512_v4
+53/275657/campos_512_v4
+53/275660/campos_512_v4
+53/275683/campos_512_v4
+53/275687/campos_512_v4
+53/275695/campos_512_v4
+53/275702/campos_512_v4
+53/275704/campos_512_v4
+53/275709/campos_512_v4
+53/275716/campos_512_v4
+53/275718/campos_512_v4
+53/275722/campos_512_v4
+53/275731/campos_512_v4
+53/275732/campos_512_v4
+53/275738/campos_512_v4
+53/275739/campos_512_v4
+53/275741/campos_512_v4
+53/275742/campos_512_v4
+53/275753/campos_512_v4
+53/275755/campos_512_v4
+53/275756/campos_512_v4
+53/275808/campos_512_v4
+53/275811/campos_512_v4
+53/275813/campos_512_v4
+53/275815/campos_512_v4
+53/275819/campos_512_v4
+53/275833/campos_512_v4
+53/275836/campos_512_v4
+53/275839/campos_512_v4
+53/275849/campos_512_v4
+53/275858/campos_512_v4
+53/275860/campos_512_v4
+53/275861/campos_512_v4
+53/275881/campos_512_v4
+53/275896/campos_512_v4
+53/275899/campos_512_v4
+53/275902/campos_512_v4
+53/275905/campos_512_v4
+53/275915/campos_512_v4
+53/275921/campos_512_v4
+53/275939/campos_512_v4
+53/275945/campos_512_v4
+53/275952/campos_512_v4
+53/275957/campos_512_v4
+53/275963/campos_512_v4
+53/275987/campos_512_v4
+53/275996/campos_512_v4
+53/276007/campos_512_v4
+53/276013/campos_512_v4
+53/276031/campos_512_v4
+53/276055/campos_512_v4
+53/276079/campos_512_v4
+53/276089/campos_512_v4
+53/276092/campos_512_v4
+53/276099/campos_512_v4
+53/276104/campos_512_v4
+53/276105/campos_512_v4
+53/276106/campos_512_v4
+53/276123/campos_512_v4
+53/276147/campos_512_v4
+53/276161/campos_512_v4
+53/276167/campos_512_v4
+53/276173/campos_512_v4
+53/276195/campos_512_v4
+53/276197/campos_512_v4
+53/276214/campos_512_v4
+53/276219/campos_512_v4
+53/276223/campos_512_v4
+53/276225/campos_512_v4
+53/276242/campos_512_v4
+53/276247/campos_512_v4
+53/276257/campos_512_v4
+53/276264/campos_512_v4
+53/276267/campos_512_v4
+53/276298/campos_512_v4
+53/276300/campos_512_v4
+53/276312/campos_512_v4
+53/276315/campos_512_v4
+53/276345/campos_512_v4
+53/276351/campos_512_v4
+53/276354/campos_512_v4
+53/276356/campos_512_v4
+53/276361/campos_512_v4
+53/276365/campos_512_v4
+53/276379/campos_512_v4
+53/276381/campos_512_v4
+53/276398/campos_512_v4
+53/276413/campos_512_v4
+53/276424/campos_512_v4
+53/276448/campos_512_v4
+53/276456/campos_512_v4
+53/276475/campos_512_v4
+53/276484/campos_512_v4
+53/276490/campos_512_v4
+53/276499/campos_512_v4
+53/276524/campos_512_v4
+53/276531/campos_512_v4
+53/276535/campos_512_v4
+53/276555/campos_512_v4
+53/276559/campos_512_v4
+53/276562/campos_512_v4
+53/276573/campos_512_v4
+53/276586/campos_512_v4
+53/276587/campos_512_v4
+53/276605/campos_512_v4
+53/276607/campos_512_v4
+53/276611/campos_512_v4
+53/276615/campos_512_v4
+53/276630/campos_512_v4
+53/276632/campos_512_v4
+53/276639/campos_512_v4
+53/276647/campos_512_v4
+53/276648/campos_512_v4
+53/276680/campos_512_v4
+53/276681/campos_512_v4
+53/276691/campos_512_v4
+53/276700/campos_512_v4
+53/276704/campos_512_v4
+53/276717/campos_512_v4
+53/276723/campos_512_v4
+53/276734/campos_512_v4
+53/276737/campos_512_v4
+53/276746/campos_512_v4
+53/276750/campos_512_v4
+53/276751/campos_512_v4
+53/276759/campos_512_v4
+53/276763/campos_512_v4
+53/276786/campos_512_v4
+53/276796/campos_512_v4
+53/276803/campos_512_v4
+53/276814/campos_512_v4
+53/276815/campos_512_v4
+53/276821/campos_512_v4
+53/276832/campos_512_v4
+53/276841/campos_512_v4
+53/276845/campos_512_v4
+53/276851/campos_512_v4
+53/276862/campos_512_v4
+53/276886/campos_512_v4
+53/276890/campos_512_v4
+53/276901/campos_512_v4
+53/276907/campos_512_v4
+53/276915/campos_512_v4
+53/276917/campos_512_v4
+53/276919/campos_512_v4
+53/276934/campos_512_v4
+53/276943/campos_512_v4
+53/276951/campos_512_v4
+53/276955/campos_512_v4
+53/276962/campos_512_v4
+53/276979/campos_512_v4
+53/276987/campos_512_v4
+53/276989/campos_512_v4
+53/276997/campos_512_v4
+53/277003/campos_512_v4
+53/277048/campos_512_v4
+53/277052/campos_512_v4
+53/277063/campos_512_v4
+53/277066/campos_512_v4
+53/277067/campos_512_v4
+53/277071/campos_512_v4
+53/277075/campos_512_v4
+53/277081/campos_512_v4
+53/277087/campos_512_v4
+53/277088/campos_512_v4
+53/277089/campos_512_v4
+53/277090/campos_512_v4
+53/277091/campos_512_v4
+53/277096/campos_512_v4
+53/277120/campos_512_v4
+53/277166/campos_512_v4
+53/277167/campos_512_v4
+53/277179/campos_512_v4
+53/277180/campos_512_v4
+53/277195/campos_512_v4
+53/277197/campos_512_v4
+53/277199/campos_512_v4
+53/277201/campos_512_v4
+53/277204/campos_512_v4
+53/277209/campos_512_v4
+53/277214/campos_512_v4
+53/277218/campos_512_v4
+53/277223/campos_512_v4
+53/277229/campos_512_v4
+53/277230/campos_512_v4
+53/277234/campos_512_v4
+53/277242/campos_512_v4
+53/277259/campos_512_v4
+53/277281/campos_512_v4
+53/277296/campos_512_v4
+53/277300/campos_512_v4
+53/277330/campos_512_v4
+53/277333/campos_512_v4
+53/277334/campos_512_v4
+53/277366/campos_512_v4
+53/277370/campos_512_v4
+53/277376/campos_512_v4
+53/277380/campos_512_v4
+53/277383/campos_512_v4
+53/277401/campos_512_v4
+53/277402/campos_512_v4
+53/277406/campos_512_v4
+53/277407/campos_512_v4
+53/277416/campos_512_v4
+53/277417/campos_512_v4
+53/277431/campos_512_v4
+53/277443/campos_512_v4
+53/277450/campos_512_v4
+53/277451/campos_512_v4
+53/277468/campos_512_v4
+53/277475/campos_512_v4
+53/277490/campos_512_v4
+53/277492/campos_512_v4
+53/277494/campos_512_v4
+53/277495/campos_512_v4
+53/277513/campos_512_v4
+53/277514/campos_512_v4
+53/277520/campos_512_v4
+53/277522/campos_512_v4
+53/277534/campos_512_v4
+53/277535/campos_512_v4
+53/277542/campos_512_v4
+53/277552/campos_512_v4
+53/277553/campos_512_v4
+53/277564/campos_512_v4
+53/277568/campos_512_v4
+53/277594/campos_512_v4
+53/277596/campos_512_v4
+53/277606/campos_512_v4
+53/277617/campos_512_v4
+53/277626/campos_512_v4
+53/277627/campos_512_v4
+53/277632/campos_512_v4
+53/277637/campos_512_v4
+53/277650/campos_512_v4
+53/277674/campos_512_v4
+53/277675/campos_512_v4
+53/277680/campos_512_v4
+53/277682/campos_512_v4
+53/277684/campos_512_v4
+53/277707/campos_512_v4
+53/277713/campos_512_v4
+53/277714/campos_512_v4
+53/277720/campos_512_v4
+53/277726/campos_512_v4
+53/277729/campos_512_v4
+53/277741/campos_512_v4
+53/277742/campos_512_v4
+53/277756/campos_512_v4
+53/277773/campos_512_v4
+53/277775/campos_512_v4
+53/277791/campos_512_v4
+53/277802/campos_512_v4
+53/277805/campos_512_v4
+53/277806/campos_512_v4
+53/277815/campos_512_v4
+53/277839/campos_512_v4
+53/277845/campos_512_v4
+53/277868/campos_512_v4
+53/277887/campos_512_v4
+53/277888/campos_512_v4
+53/277889/campos_512_v4
+53/277892/campos_512_v4
+53/277899/campos_512_v4
+53/277902/campos_512_v4
+53/277912/campos_512_v4
+53/277914/campos_512_v4
+53/277918/campos_512_v4
+53/277927/campos_512_v4
+53/277929/campos_512_v4
+53/277938/campos_512_v4
+53/277952/campos_512_v4
+53/277964/campos_512_v4
+53/277965/campos_512_v4
+53/277970/campos_512_v4
+53/277971/campos_512_v4
+53/277973/campos_512_v4
+53/277976/campos_512_v4
+53/277984/campos_512_v4
+53/277992/campos_512_v4
+53/278012/campos_512_v4
+53/278013/campos_512_v4
+53/278054/campos_512_v4
+53/278060/campos_512_v4
+53/278069/campos_512_v4
+53/278082/campos_512_v4
+53/278092/campos_512_v4
+53/278093/campos_512_v4
+53/278097/campos_512_v4
+53/278102/campos_512_v4
+53/278118/campos_512_v4
+53/278121/campos_512_v4
+53/278134/campos_512_v4
+53/278141/campos_512_v4
+53/278144/campos_512_v4
+53/278150/campos_512_v4
+53/278161/campos_512_v4
+53/278171/campos_512_v4
+53/278194/campos_512_v4
+53/278203/campos_512_v4
+53/278216/campos_512_v4
+53/278236/campos_512_v4
+53/278241/campos_512_v4
+53/278253/campos_512_v4
+53/278266/campos_512_v4
+53/278297/campos_512_v4
+53/278303/campos_512_v4
+53/278313/campos_512_v4
+53/278330/campos_512_v4
+53/278334/campos_512_v4
+53/278338/campos_512_v4
+53/278345/campos_512_v4
+53/278364/campos_512_v4
+53/278376/campos_512_v4
+53/278377/campos_512_v4
+53/278379/campos_512_v4
+53/278384/campos_512_v4
+53/278387/campos_512_v4
+53/278396/campos_512_v4
+53/278407/campos_512_v4
+53/278416/campos_512_v4
+53/278420/campos_512_v4
+53/278422/campos_512_v4
+53/278423/campos_512_v4
+53/278426/campos_512_v4
+53/278429/campos_512_v4
+53/278430/campos_512_v4
+53/278448/campos_512_v4
+53/278458/campos_512_v4
+53/278463/campos_512_v4
+53/278467/campos_512_v4
+53/278477/campos_512_v4
+53/278479/campos_512_v4
+53/278481/campos_512_v4
+53/278487/campos_512_v4
+53/278491/campos_512_v4
+53/278502/campos_512_v4
+53/278512/campos_512_v4
+53/278528/campos_512_v4
+53/278531/campos_512_v4
+53/278541/campos_512_v4
+53/278552/campos_512_v4
+53/278560/campos_512_v4
+53/278563/campos_512_v4
+53/278566/campos_512_v4
+53/278568/campos_512_v4
+53/278576/campos_512_v4
+53/278577/campos_512_v4
+53/278597/campos_512_v4
+53/278600/campos_512_v4
+53/278608/campos_512_v4
+53/278609/campos_512_v4
+53/278618/campos_512_v4
+53/278626/campos_512_v4
+53/278633/campos_512_v4
+53/278646/campos_512_v4
+53/278656/campos_512_v4
+53/278669/campos_512_v4
+53/278670/campos_512_v4
+53/278673/campos_512_v4
+53/278674/campos_512_v4
+53/278680/campos_512_v4
+53/278684/campos_512_v4
+53/278686/campos_512_v4
+53/278690/campos_512_v4
+53/278720/campos_512_v4
+53/278721/campos_512_v4
+53/278731/campos_512_v4
+53/278749/campos_512_v4
+53/278753/campos_512_v4
+53/278761/campos_512_v4
+53/278766/campos_512_v4
+53/278768/campos_512_v4
+53/278778/campos_512_v4
+53/278839/campos_512_v4
+53/278850/campos_512_v4
+53/278858/campos_512_v4
+53/278864/campos_512_v4
+53/278865/campos_512_v4
+53/278871/campos_512_v4
+53/278872/campos_512_v4
+53/278884/campos_512_v4
+53/278891/campos_512_v4
+53/278894/campos_512_v4
+53/278896/campos_512_v4
+53/278913/campos_512_v4
+53/278919/campos_512_v4
+53/278921/campos_512_v4
+53/278932/campos_512_v4
+53/278950/campos_512_v4
+53/278951/campos_512_v4
+53/278975/campos_512_v4
+53/278976/campos_512_v4
+53/278988/campos_512_v4
+53/278995/campos_512_v4
+53/278996/campos_512_v4
+53/279006/campos_512_v4
+53/279011/campos_512_v4
+53/279015/campos_512_v4
+53/279018/campos_512_v4
+53/279024/campos_512_v4
+53/279029/campos_512_v4
+53/279032/campos_512_v4
+53/279041/campos_512_v4
+53/279047/campos_512_v4
+53/279051/campos_512_v4
+53/279062/campos_512_v4
+53/279064/campos_512_v4
+53/279075/campos_512_v4
+53/279081/campos_512_v4
+53/279088/campos_512_v4
+53/279095/campos_512_v4
+53/279100/campos_512_v4
+53/279101/campos_512_v4
+53/279114/campos_512_v4
+53/279119/campos_512_v4
+53/279126/campos_512_v4
+53/279130/campos_512_v4
+53/279144/campos_512_v4
+53/279145/campos_512_v4
+53/279156/campos_512_v4
+53/279162/campos_512_v4
+53/279164/campos_512_v4
+53/279171/campos_512_v4
+53/279184/campos_512_v4
+53/279185/campos_512_v4
+53/279189/campos_512_v4
+53/279190/campos_512_v4
+53/279194/campos_512_v4
+53/279207/campos_512_v4
+53/279213/campos_512_v4
+53/279235/campos_512_v4
+53/279238/campos_512_v4
+53/279254/campos_512_v4
+53/279258/campos_512_v4
+53/279260/campos_512_v4
+53/279264/campos_512_v4
+53/279288/campos_512_v4
+53/279308/campos_512_v4
+53/279312/campos_512_v4
+53/279330/campos_512_v4
+53/279338/campos_512_v4
+53/279340/campos_512_v4
+53/279351/campos_512_v4
+53/279380/campos_512_v4
+53/279384/campos_512_v4
+53/279413/campos_512_v4
+53/279417/campos_512_v4
+53/279419/campos_512_v4
+53/279434/campos_512_v4
+53/279436/campos_512_v4
+53/279441/campos_512_v4
+53/279446/campos_512_v4
+53/279448/campos_512_v4
+53/279451/campos_512_v4
+53/279458/campos_512_v4
+53/279465/campos_512_v4
+53/279468/campos_512_v4
+53/279469/campos_512_v4
+53/279479/campos_512_v4
+53/279483/campos_512_v4
+53/279498/campos_512_v4
+53/279499/campos_512_v4
+53/279500/campos_512_v4
+53/279503/campos_512_v4
+53/279539/campos_512_v4
+53/279544/campos_512_v4
+53/279560/campos_512_v4
+53/279563/campos_512_v4
+53/279582/campos_512_v4
+53/279594/campos_512_v4
+53/279599/campos_512_v4
+53/279602/campos_512_v4
+53/279610/campos_512_v4
+53/279627/campos_512_v4
+53/279632/campos_512_v4
+53/279636/campos_512_v4
+53/279640/campos_512_v4
+53/279641/campos_512_v4
+53/279645/campos_512_v4
+53/279654/campos_512_v4
+53/279657/campos_512_v4
+53/279669/campos_512_v4
+53/279671/campos_512_v4
+53/279724/campos_512_v4
+53/279726/campos_512_v4
+53/279732/campos_512_v4
+53/279733/campos_512_v4
+53/279747/campos_512_v4
+53/279767/campos_512_v4
+53/279768/campos_512_v4
+53/279770/campos_512_v4
+53/279772/campos_512_v4
+53/279773/campos_512_v4
+53/279787/campos_512_v4
+53/279810/campos_512_v4
+53/279820/campos_512_v4
+53/279824/campos_512_v4
+53/279827/campos_512_v4
+53/279832/campos_512_v4
+53/279835/campos_512_v4
+53/279837/campos_512_v4
+53/279838/campos_512_v4
+53/279841/campos_512_v4
+53/279846/campos_512_v4
+53/279854/campos_512_v4
+53/279859/campos_512_v4
+53/279860/campos_512_v4
+53/279862/campos_512_v4
+53/279864/campos_512_v4
+53/279869/campos_512_v4
+53/279870/campos_512_v4
+53/279871/campos_512_v4
+53/279883/campos_512_v4
+53/279892/campos_512_v4
+53/279901/campos_512_v4
+53/279902/campos_512_v4
+53/279903/campos_512_v4
+53/279905/campos_512_v4
+53/279916/campos_512_v4
+53/279917/campos_512_v4
+53/279922/campos_512_v4
+53/279926/campos_512_v4
+53/279930/campos_512_v4
+53/279949/campos_512_v4
+53/279954/campos_512_v4
+53/279962/campos_512_v4
+53/279969/campos_512_v4
+53/279982/campos_512_v4
+53/279984/campos_512_v4
+54/280008/campos_512_v4
+54/280009/campos_512_v4
+54/280010/campos_512_v4
+54/280021/campos_512_v4
+54/280027/campos_512_v4
+54/280029/campos_512_v4
+54/280035/campos_512_v4
+54/280061/campos_512_v4
+54/280064/campos_512_v4
+54/280066/campos_512_v4
+54/280076/campos_512_v4
+54/280080/campos_512_v4
+54/280085/campos_512_v4
+54/280090/campos_512_v4
+54/280094/campos_512_v4
+54/280098/campos_512_v4
+54/280102/campos_512_v4
+54/280103/campos_512_v4
+54/280106/campos_512_v4
+54/280110/campos_512_v4
+54/280112/campos_512_v4
+54/280121/campos_512_v4
+54/280128/campos_512_v4
+54/280150/campos_512_v4
+54/280152/campos_512_v4
+54/280177/campos_512_v4
+54/280179/campos_512_v4
+54/280203/campos_512_v4
+54/280209/campos_512_v4
+54/280214/campos_512_v4
+54/280223/campos_512_v4
+54/280233/campos_512_v4
+54/280241/campos_512_v4
+54/280253/campos_512_v4
+54/280259/campos_512_v4
+54/280266/campos_512_v4
+54/280277/campos_512_v4
+54/280282/campos_512_v4
+54/280302/campos_512_v4
+54/280305/campos_512_v4
+54/280325/campos_512_v4
+54/280332/campos_512_v4
+54/280361/campos_512_v4
+54/280370/campos_512_v4
+54/280377/campos_512_v4
+54/280386/campos_512_v4
+54/280438/campos_512_v4
+54/280439/campos_512_v4
+54/280460/campos_512_v4
+54/280469/campos_512_v4
+54/280479/campos_512_v4
+54/280486/campos_512_v4
+54/280495/campos_512_v4
+54/280505/campos_512_v4
+54/280525/campos_512_v4
+54/280532/campos_512_v4
+54/280540/campos_512_v4
+54/280547/campos_512_v4
+54/280550/campos_512_v4
+54/280560/campos_512_v4
+54/280571/campos_512_v4
+54/280573/campos_512_v4
+54/280578/campos_512_v4
+54/280588/campos_512_v4
+54/280597/campos_512_v4
+54/280598/campos_512_v4
+54/280599/campos_512_v4
+54/280606/campos_512_v4
+54/280607/campos_512_v4
+54/280611/campos_512_v4
+54/280618/campos_512_v4
+54/280641/campos_512_v4
+54/280647/campos_512_v4
+54/280655/campos_512_v4
+54/280659/campos_512_v4
+54/280674/campos_512_v4
+54/280687/campos_512_v4
+54/280689/campos_512_v4
+54/280691/campos_512_v4
+54/280696/campos_512_v4
+54/280708/campos_512_v4
+54/280710/campos_512_v4
+54/280716/campos_512_v4
+54/280719/campos_512_v4
+54/280731/campos_512_v4
+54/280742/campos_512_v4
+54/280754/campos_512_v4
+54/280755/campos_512_v4
+54/280764/campos_512_v4
+54/280770/campos_512_v4
+54/280784/campos_512_v4
+54/280792/campos_512_v4
+54/280800/campos_512_v4
+54/280825/campos_512_v4
+54/280833/campos_512_v4
+54/280836/campos_512_v4
+54/280838/campos_512_v4
+54/280846/campos_512_v4
+54/280849/campos_512_v4
+54/280858/campos_512_v4
+54/280862/campos_512_v4
+54/280873/campos_512_v4
+54/280897/campos_512_v4
+54/280901/campos_512_v4
+54/280906/campos_512_v4
+54/280922/campos_512_v4
+54/280941/campos_512_v4
+54/280944/campos_512_v4
+54/280948/campos_512_v4
+54/280965/campos_512_v4
+54/280967/campos_512_v4
+54/280974/campos_512_v4
+54/280975/campos_512_v4
+54/280988/campos_512_v4
+54/280993/campos_512_v4
+54/281003/campos_512_v4
+54/281007/campos_512_v4
+54/281021/campos_512_v4
+54/281036/campos_512_v4
+54/281041/campos_512_v4
+54/281048/campos_512_v4
+54/281050/campos_512_v4
+54/281052/campos_512_v4
+54/281057/campos_512_v4
+54/281079/campos_512_v4
+54/281098/campos_512_v4
+54/281100/campos_512_v4
+54/281112/campos_512_v4
+54/281117/campos_512_v4
+54/281135/campos_512_v4
+54/281136/campos_512_v4
+54/281143/campos_512_v4
+54/281164/campos_512_v4
+54/281171/campos_512_v4
+54/281177/campos_512_v4
+54/281184/campos_512_v4
+54/281185/campos_512_v4
+54/281206/campos_512_v4
+54/281213/campos_512_v4
+54/281223/campos_512_v4
+54/281245/campos_512_v4
+54/281269/campos_512_v4
+54/281286/campos_512_v4
+54/281294/campos_512_v4
+54/281313/campos_512_v4
+54/281328/campos_512_v4
+54/281340/campos_512_v4
+54/281341/campos_512_v4
+54/281342/campos_512_v4
+54/281353/campos_512_v4
+54/281361/campos_512_v4
+54/281363/campos_512_v4
+54/281380/campos_512_v4
+54/281383/campos_512_v4
+54/281386/campos_512_v4
+54/281392/campos_512_v4
+54/281396/campos_512_v4
+54/281407/campos_512_v4
+54/281412/campos_512_v4
+54/281418/campos_512_v4
+54/281431/campos_512_v4
+54/281444/campos_512_v4
+54/281472/campos_512_v4
+54/281475/campos_512_v4
+54/281489/campos_512_v4
+54/281497/campos_512_v4
+54/281512/campos_512_v4
+54/281533/campos_512_v4
+54/281552/campos_512_v4
+54/281565/campos_512_v4
+54/281575/campos_512_v4
+54/281597/campos_512_v4
+54/281615/campos_512_v4
+54/281618/campos_512_v4
+54/281654/campos_512_v4
+54/281660/campos_512_v4
+54/281681/campos_512_v4
+54/281700/campos_512_v4
+54/281702/campos_512_v4
+54/281712/campos_512_v4
+54/281714/campos_512_v4
+54/281737/campos_512_v4
+54/281745/campos_512_v4
+54/281753/campos_512_v4
+54/281759/campos_512_v4
+54/281760/campos_512_v4
+54/281787/campos_512_v4
+54/281800/campos_512_v4
+54/281812/campos_512_v4
+54/281816/campos_512_v4
+54/281820/campos_512_v4
+54/281825/campos_512_v4
+54/281837/campos_512_v4
+54/281840/campos_512_v4
+54/281845/campos_512_v4
+54/281846/campos_512_v4
+54/281847/campos_512_v4
+54/281861/campos_512_v4
+54/281865/campos_512_v4
+54/281866/campos_512_v4
+54/281897/campos_512_v4
+54/281899/campos_512_v4
+54/281910/campos_512_v4
+54/281917/campos_512_v4
+54/281918/campos_512_v4
+54/281926/campos_512_v4
+54/281927/campos_512_v4
+54/281937/campos_512_v4
+54/281939/campos_512_v4
+54/281946/campos_512_v4
+54/281954/campos_512_v4
+54/281959/campos_512_v4
+54/281960/campos_512_v4
+54/281961/campos_512_v4
+54/281976/campos_512_v4
+54/281982/campos_512_v4
+54/281994/campos_512_v4
+54/281997/campos_512_v4
+54/282000/campos_512_v4
+54/282007/campos_512_v4
+54/282008/campos_512_v4
+54/282020/campos_512_v4
+54/282030/campos_512_v4
+54/282039/campos_512_v4
+54/282041/campos_512_v4
+54/282047/campos_512_v4
+54/282068/campos_512_v4
+54/282071/campos_512_v4
+54/282074/campos_512_v4
+54/282101/campos_512_v4
+54/282103/campos_512_v4
+54/282112/campos_512_v4
+54/282115/campos_512_v4
+54/282140/campos_512_v4
+54/282148/campos_512_v4
+54/282154/campos_512_v4
+54/282173/campos_512_v4
+54/282174/campos_512_v4
+54/282175/campos_512_v4
+54/282177/campos_512_v4
+54/282190/campos_512_v4
+54/282198/campos_512_v4
+54/282200/campos_512_v4
+54/282204/campos_512_v4
+54/282205/campos_512_v4
+54/282218/campos_512_v4
+54/282219/campos_512_v4
+54/282235/campos_512_v4
+54/282244/campos_512_v4
+54/282249/campos_512_v4
+54/282254/campos_512_v4
+54/282280/campos_512_v4
+54/282281/campos_512_v4
+54/282294/campos_512_v4
+54/282305/campos_512_v4
+54/282308/campos_512_v4
+54/282311/campos_512_v4
+54/282319/campos_512_v4
+54/282320/campos_512_v4
+54/282330/campos_512_v4
+54/282336/campos_512_v4
+54/282339/campos_512_v4
+54/282348/campos_512_v4
+54/282373/campos_512_v4
+54/282375/campos_512_v4
+54/282376/campos_512_v4
+54/282384/campos_512_v4
+54/282385/campos_512_v4
+54/282390/campos_512_v4
+54/282396/campos_512_v4
+54/282402/campos_512_v4
+54/282406/campos_512_v4
+54/282408/campos_512_v4
+54/282421/campos_512_v4
+54/282434/campos_512_v4
+54/282437/campos_512_v4
+54/282442/campos_512_v4
+54/282462/campos_512_v4
+54/282471/campos_512_v4
+54/282475/campos_512_v4
+54/282480/campos_512_v4
+54/282484/campos_512_v4
+54/282491/campos_512_v4
+54/282495/campos_512_v4
+54/282525/campos_512_v4
+54/282544/campos_512_v4
+54/282560/campos_512_v4
+54/282563/campos_512_v4
+54/282582/campos_512_v4
+54/282586/campos_512_v4
+54/282594/campos_512_v4
+54/282597/campos_512_v4
+54/282601/campos_512_v4
+54/282605/campos_512_v4
+54/282613/campos_512_v4
+54/282618/campos_512_v4
+54/282620/campos_512_v4
+54/282647/campos_512_v4
+54/282656/campos_512_v4
+54/282675/campos_512_v4
+54/282678/campos_512_v4
+54/282685/campos_512_v4
+54/282686/campos_512_v4
+54/282697/campos_512_v4
+54/282700/campos_512_v4
+54/282708/campos_512_v4
+54/282749/campos_512_v4
+54/282756/campos_512_v4
+54/282782/campos_512_v4
+54/282802/campos_512_v4
+54/282814/campos_512_v4
+54/282816/campos_512_v4
+54/282820/campos_512_v4
+54/282821/campos_512_v4
+54/282828/campos_512_v4
+54/282833/campos_512_v4
+54/282841/campos_512_v4
+54/282851/campos_512_v4
+54/282856/campos_512_v4
+54/282864/campos_512_v4
+54/282878/campos_512_v4
+54/282891/campos_512_v4
+54/282897/campos_512_v4
+54/282900/campos_512_v4
+54/282909/campos_512_v4
+54/282925/campos_512_v4
+54/282933/campos_512_v4
+54/282952/campos_512_v4
+54/282956/campos_512_v4
+54/282970/campos_512_v4
+54/282977/campos_512_v4
+54/283000/campos_512_v4
+54/283002/campos_512_v4
+54/283004/campos_512_v4
+54/283011/campos_512_v4
+54/283014/campos_512_v4
+54/283017/campos_512_v4
+54/283019/campos_512_v4
+54/283025/campos_512_v4
+54/283033/campos_512_v4
+54/283063/campos_512_v4
+54/283072/campos_512_v4
+54/283073/campos_512_v4
+54/283081/campos_512_v4
+54/283082/campos_512_v4
+54/283098/campos_512_v4
+54/283104/campos_512_v4
+54/283105/campos_512_v4
+54/283106/campos_512_v4
+54/283108/campos_512_v4
+54/283110/campos_512_v4
+54/283116/campos_512_v4
+54/283122/campos_512_v4
+54/283131/campos_512_v4
+54/283137/campos_512_v4
+54/283156/campos_512_v4
+54/283163/campos_512_v4
+54/283168/campos_512_v4
+54/283175/campos_512_v4
+54/283176/campos_512_v4
+54/283183/campos_512_v4
+54/283190/campos_512_v4
+54/283192/campos_512_v4
+54/283197/campos_512_v4
+54/283198/campos_512_v4
+54/283202/campos_512_v4
+54/283206/campos_512_v4
+54/283210/campos_512_v4
+54/283223/campos_512_v4
+54/283227/campos_512_v4
+54/283234/campos_512_v4
+54/283239/campos_512_v4
+54/283240/campos_512_v4
+54/283247/campos_512_v4
+54/283254/campos_512_v4
+54/283271/campos_512_v4
+54/283274/campos_512_v4
+54/283286/campos_512_v4
+54/283298/campos_512_v4
+54/283304/campos_512_v4
+54/283320/campos_512_v4
+54/283331/campos_512_v4
+54/283343/campos_512_v4
+54/283353/campos_512_v4
+54/283356/campos_512_v4
+54/283366/campos_512_v4
+54/283368/campos_512_v4
+54/283371/campos_512_v4
+54/283372/campos_512_v4
+54/283373/campos_512_v4
+54/283381/campos_512_v4
+54/283387/campos_512_v4
+54/283391/campos_512_v4
+54/283406/campos_512_v4
+54/283409/campos_512_v4
+54/283416/campos_512_v4
+54/283420/campos_512_v4
+54/283429/campos_512_v4
+54/283432/campos_512_v4
+54/283449/campos_512_v4
+54/283457/campos_512_v4
+54/283463/campos_512_v4
+54/283477/campos_512_v4
+54/283478/campos_512_v4
+54/283510/campos_512_v4
+54/283513/campos_512_v4
+54/283527/campos_512_v4
+54/283529/campos_512_v4
+54/283532/campos_512_v4
+54/283533/campos_512_v4
+54/283541/campos_512_v4
+54/283547/campos_512_v4
+54/283572/campos_512_v4
+54/283574/campos_512_v4
+54/283580/campos_512_v4
+54/283583/campos_512_v4
+54/283597/campos_512_v4
+54/283600/campos_512_v4
+54/283602/campos_512_v4
+54/283609/campos_512_v4
+54/283611/campos_512_v4
+54/283624/campos_512_v4
+54/283634/campos_512_v4
+54/283642/campos_512_v4
+54/283643/campos_512_v4
+54/283644/campos_512_v4
+54/283648/campos_512_v4
+54/283663/campos_512_v4
+54/283673/campos_512_v4
+54/283675/campos_512_v4
+54/283690/campos_512_v4
+54/283696/campos_512_v4
+54/283712/campos_512_v4
+54/283713/campos_512_v4
+54/283716/campos_512_v4
+54/283723/campos_512_v4
+54/283727/campos_512_v4
+54/283732/campos_512_v4
+54/283757/campos_512_v4
+54/283760/campos_512_v4
+54/283778/campos_512_v4
+54/283780/campos_512_v4
+54/283787/campos_512_v4
+54/283799/campos_512_v4
+54/283809/campos_512_v4
+54/283812/campos_512_v4
+54/283817/campos_512_v4
+54/283821/campos_512_v4
+54/283829/campos_512_v4
+54/283830/campos_512_v4
+54/283841/campos_512_v4
+54/283848/campos_512_v4
+54/283858/campos_512_v4
+54/283861/campos_512_v4
+54/283863/campos_512_v4
+54/283877/campos_512_v4
+54/283891/campos_512_v4
+54/283907/campos_512_v4
+54/283912/campos_512_v4
+54/283913/campos_512_v4
+54/283918/campos_512_v4
+54/283919/campos_512_v4
+54/283925/campos_512_v4
+54/283926/campos_512_v4
+54/283935/campos_512_v4
+54/283942/campos_512_v4
+54/283948/campos_512_v4
+54/283969/campos_512_v4
+54/283983/campos_512_v4
+54/283989/campos_512_v4
+54/284015/campos_512_v4
+54/284019/campos_512_v4
+54/284024/campos_512_v4
+54/284029/campos_512_v4
+54/284055/campos_512_v4
+54/284058/campos_512_v4
+54/284082/campos_512_v4
+54/284097/campos_512_v4
+54/284098/campos_512_v4
+54/284099/campos_512_v4
+54/284103/campos_512_v4
+54/284126/campos_512_v4
+54/284131/campos_512_v4
+54/284142/campos_512_v4
+54/284144/campos_512_v4
+54/284167/campos_512_v4
+54/284173/campos_512_v4
+54/284174/campos_512_v4
+54/284177/campos_512_v4
+54/284189/campos_512_v4
+54/284190/campos_512_v4
+54/284198/campos_512_v4
+54/284202/campos_512_v4
+54/284213/campos_512_v4
+54/284221/campos_512_v4
+54/284225/campos_512_v4
+54/284237/campos_512_v4
+54/284241/campos_512_v4
+54/284255/campos_512_v4
+54/284257/campos_512_v4
+54/284266/campos_512_v4
+54/284285/campos_512_v4
+54/284300/campos_512_v4
+54/284310/campos_512_v4
+54/284311/campos_512_v4
+54/284312/campos_512_v4
+54/284320/campos_512_v4
+54/284327/campos_512_v4
+54/284336/campos_512_v4
+54/284346/campos_512_v4
+54/284354/campos_512_v4
+54/284369/campos_512_v4
+54/284374/campos_512_v4
+54/284384/campos_512_v4
+54/284391/campos_512_v4
+54/284398/campos_512_v4
+54/284400/campos_512_v4
+54/284407/campos_512_v4
+54/284409/campos_512_v4
+54/284414/campos_512_v4
+54/284422/campos_512_v4
+54/284436/campos_512_v4
+54/284449/campos_512_v4
+54/284453/campos_512_v4
+54/284454/campos_512_v4
+54/284457/campos_512_v4
+54/284463/campos_512_v4
+54/284466/campos_512_v4
+54/284472/campos_512_v4
+54/284473/campos_512_v4
+54/284490/campos_512_v4
+54/284491/campos_512_v4
+54/284499/campos_512_v4
+54/284524/campos_512_v4
+54/284536/campos_512_v4
+54/284540/campos_512_v4
+54/284546/campos_512_v4
+54/284550/campos_512_v4
+54/284560/campos_512_v4
+54/284579/campos_512_v4
+54/284599/campos_512_v4
+54/284610/campos_512_v4
+54/284613/campos_512_v4
+54/284627/campos_512_v4
+54/284631/campos_512_v4
+54/284638/campos_512_v4
+54/284671/campos_512_v4
+54/284688/campos_512_v4
+54/284690/campos_512_v4
+54/284691/campos_512_v4
+54/284692/campos_512_v4
+54/284699/campos_512_v4
+54/284708/campos_512_v4
+54/284729/campos_512_v4
+54/284731/campos_512_v4
+54/284750/campos_512_v4
+54/284759/campos_512_v4
+54/284766/campos_512_v4
+54/284767/campos_512_v4
+54/284783/campos_512_v4
+54/284797/campos_512_v4
+54/284816/campos_512_v4
+54/284817/campos_512_v4
+54/284825/campos_512_v4
+54/284828/campos_512_v4
+54/284837/campos_512_v4
+54/284842/campos_512_v4
+54/284844/campos_512_v4
+54/284847/campos_512_v4
+54/284850/campos_512_v4
+54/284864/campos_512_v4
+54/284868/campos_512_v4
+54/284873/campos_512_v4
+54/284881/campos_512_v4
+54/284885/campos_512_v4
+54/284902/campos_512_v4
+54/284908/campos_512_v4
+54/284918/campos_512_v4
+54/284920/campos_512_v4
+54/284932/campos_512_v4
+54/284937/campos_512_v4
+54/284949/campos_512_v4
+54/284950/campos_512_v4
+54/284967/campos_512_v4
+54/284985/campos_512_v4
+55/285003/campos_512_v4
+55/285005/campos_512_v4
+55/285034/campos_512_v4
+55/285040/campos_512_v4
+55/285044/campos_512_v4
+55/285053/campos_512_v4
+55/285093/campos_512_v4
+55/285100/campos_512_v4
+55/285103/campos_512_v4
+55/285104/campos_512_v4
+55/285111/campos_512_v4
+55/285112/campos_512_v4
+55/285122/campos_512_v4
+55/285125/campos_512_v4
+55/285139/campos_512_v4
+55/285161/campos_512_v4
+55/285172/campos_512_v4
+55/285174/campos_512_v4
+55/285175/campos_512_v4
+55/285177/campos_512_v4
+55/285184/campos_512_v4
+55/285191/campos_512_v4
+55/285199/campos_512_v4
+55/285210/campos_512_v4
+55/285225/campos_512_v4
+55/285233/campos_512_v4
+55/285241/campos_512_v4
+55/285249/campos_512_v4
+55/285260/campos_512_v4
+55/285269/campos_512_v4
+55/285271/campos_512_v4
+55/285273/campos_512_v4
+55/285274/campos_512_v4
+55/285275/campos_512_v4
+55/285281/campos_512_v4
+55/285292/campos_512_v4
+55/285296/campos_512_v4
+55/285307/campos_512_v4
+55/285311/campos_512_v4
+55/285312/campos_512_v4
+55/285313/campos_512_v4
+55/285314/campos_512_v4
+55/285326/campos_512_v4
+55/285330/campos_512_v4
+55/285331/campos_512_v4
+55/285342/campos_512_v4
+55/285347/campos_512_v4
+55/285348/campos_512_v4
+55/285352/campos_512_v4
+55/285355/campos_512_v4
+55/285356/campos_512_v4
+55/285359/campos_512_v4
+55/285365/campos_512_v4
+55/285373/campos_512_v4
+55/285382/campos_512_v4
+55/285391/campos_512_v4
+55/285395/campos_512_v4
+55/285397/campos_512_v4
+55/285403/campos_512_v4
+55/285415/campos_512_v4
+55/285423/campos_512_v4
+55/285444/campos_512_v4
+55/285448/campos_512_v4
+55/285454/campos_512_v4
+55/285456/campos_512_v4
+55/285457/campos_512_v4
+55/285460/campos_512_v4
+55/285463/campos_512_v4
+55/285482/campos_512_v4
+55/285502/campos_512_v4
+55/285506/campos_512_v4
+55/285507/campos_512_v4
+55/285513/campos_512_v4
+55/285519/campos_512_v4
+55/285532/campos_512_v4
+55/285539/campos_512_v4
+55/285566/campos_512_v4
+55/285575/campos_512_v4
+55/285576/campos_512_v4
+55/285580/campos_512_v4
+55/285581/campos_512_v4
+55/285591/campos_512_v4
+55/285593/campos_512_v4
+55/285603/campos_512_v4
+55/285633/campos_512_v4
+55/285639/campos_512_v4
+55/285644/campos_512_v4
+55/285662/campos_512_v4
+55/285663/campos_512_v4
+55/285670/campos_512_v4
+55/285682/campos_512_v4
+55/285694/campos_512_v4
+55/285698/campos_512_v4
+55/285715/campos_512_v4
+55/285728/campos_512_v4
+55/285738/campos_512_v4
+55/285747/campos_512_v4
+55/285755/campos_512_v4
+55/285781/campos_512_v4
+55/285809/campos_512_v4
+55/285811/campos_512_v4
+55/285823/campos_512_v4
+55/285826/campos_512_v4
+55/285845/campos_512_v4
+55/285846/campos_512_v4
+55/285850/campos_512_v4
+55/285855/campos_512_v4
+55/285868/campos_512_v4
+55/285870/campos_512_v4
+55/285875/campos_512_v4
+55/285877/campos_512_v4
+55/285893/campos_512_v4
+55/285920/campos_512_v4
+55/285933/campos_512_v4
+55/285942/campos_512_v4
+55/285948/campos_512_v4
+55/285962/campos_512_v4
+55/285965/campos_512_v4
+55/285968/campos_512_v4
+55/285986/campos_512_v4
+55/286004/campos_512_v4
+55/286008/campos_512_v4
+55/286010/campos_512_v4
+55/286011/campos_512_v4
+55/286017/campos_512_v4
+55/286020/campos_512_v4
+55/286035/campos_512_v4
+55/286041/campos_512_v4
+55/286046/campos_512_v4
+55/286071/campos_512_v4
+55/286081/campos_512_v4
+55/286082/campos_512_v4
+55/286093/campos_512_v4
+55/286096/campos_512_v4
+55/286099/campos_512_v4
+55/286103/campos_512_v4
+55/286104/campos_512_v4
+55/286114/campos_512_v4
+55/286115/campos_512_v4
+55/286120/campos_512_v4
+55/286122/campos_512_v4
+55/286129/campos_512_v4
+55/286139/campos_512_v4
+55/286149/campos_512_v4
+55/286170/campos_512_v4
+55/286171/campos_512_v4
+55/286176/campos_512_v4
+55/286182/campos_512_v4
+55/286200/campos_512_v4
+55/286224/campos_512_v4
+55/286245/campos_512_v4
+55/286250/campos_512_v4
+55/286255/campos_512_v4
+55/286256/campos_512_v4
+55/286257/campos_512_v4
+55/286259/campos_512_v4
+55/286262/campos_512_v4
+55/286265/campos_512_v4
+55/286270/campos_512_v4
+55/286285/campos_512_v4
+55/286289/campos_512_v4
+55/286300/campos_512_v4
+55/286308/campos_512_v4
+55/286310/campos_512_v4
+55/286314/campos_512_v4
+55/286316/campos_512_v4
+55/286329/campos_512_v4
+55/286341/campos_512_v4
+55/286343/campos_512_v4
+55/286349/campos_512_v4
+55/286350/campos_512_v4
+55/286358/campos_512_v4
+55/286359/campos_512_v4
+55/286362/campos_512_v4
+55/286394/campos_512_v4
+55/286397/campos_512_v4
+55/286405/campos_512_v4
+55/286407/campos_512_v4
+55/286415/campos_512_v4
+55/286419/campos_512_v4
+55/286424/campos_512_v4
+55/286449/campos_512_v4
+55/286451/campos_512_v4
+55/286453/campos_512_v4
+55/286455/campos_512_v4
+55/286457/campos_512_v4
+55/286465/campos_512_v4
+55/286467/campos_512_v4
+55/286468/campos_512_v4
+55/286493/campos_512_v4
+55/286499/campos_512_v4
+55/286509/campos_512_v4
+55/286521/campos_512_v4
+55/286526/campos_512_v4
+55/286549/campos_512_v4
+55/286561/campos_512_v4
+55/286568/campos_512_v4
+55/286572/campos_512_v4
+55/286576/campos_512_v4
+55/286580/campos_512_v4
+55/286590/campos_512_v4
+55/286600/campos_512_v4
+55/286622/campos_512_v4
+55/286632/campos_512_v4
+55/286646/campos_512_v4
+55/286648/campos_512_v4
+55/286663/campos_512_v4
+55/286671/campos_512_v4
+55/286680/campos_512_v4
+55/286681/campos_512_v4
+55/286703/campos_512_v4
+55/286716/campos_512_v4
+55/286723/campos_512_v4
+55/286725/campos_512_v4
+55/286729/campos_512_v4
+55/286743/campos_512_v4
+55/286746/campos_512_v4
+55/286749/campos_512_v4
+55/286771/campos_512_v4
+55/286779/campos_512_v4
+55/286781/campos_512_v4
+55/286788/campos_512_v4
+55/286806/campos_512_v4
+55/286809/campos_512_v4
+55/286819/campos_512_v4
+55/286822/campos_512_v4
+55/286829/campos_512_v4
+55/286838/campos_512_v4
+55/286856/campos_512_v4
+55/286864/campos_512_v4
+55/286869/campos_512_v4
+55/286884/campos_512_v4
+55/286887/campos_512_v4
+55/286893/campos_512_v4
+55/286894/campos_512_v4
+55/286896/campos_512_v4
+55/286899/campos_512_v4
+55/286913/campos_512_v4
+55/286920/campos_512_v4
+55/286938/campos_512_v4
+55/286942/campos_512_v4
+55/286943/campos_512_v4
+55/286979/campos_512_v4
+55/286984/campos_512_v4
+55/286991/campos_512_v4
+55/286992/campos_512_v4
+55/286994/campos_512_v4
+55/286997/campos_512_v4
+55/287000/campos_512_v4
+55/287002/campos_512_v4
+55/287005/campos_512_v4
+55/287007/campos_512_v4
+55/287021/campos_512_v4
+55/287023/campos_512_v4
+55/287027/campos_512_v4
+55/287028/campos_512_v4
+55/287033/campos_512_v4
+55/287060/campos_512_v4
+55/287065/campos_512_v4
+55/287068/campos_512_v4
+55/287082/campos_512_v4
+55/287097/campos_512_v4
+55/287098/campos_512_v4
+55/287109/campos_512_v4
+55/287112/campos_512_v4
+55/287123/campos_512_v4
+55/287126/campos_512_v4
+55/287131/campos_512_v4
+55/287146/campos_512_v4
+55/287152/campos_512_v4
+55/287157/campos_512_v4
+55/287176/campos_512_v4
+55/287183/campos_512_v4
+55/287187/campos_512_v4
+55/287189/campos_512_v4
+55/287191/campos_512_v4
+55/287192/campos_512_v4
+55/287208/campos_512_v4
+55/287218/campos_512_v4
+55/287227/campos_512_v4
+55/287232/campos_512_v4
+55/287239/campos_512_v4
+55/287240/campos_512_v4
+55/287244/campos_512_v4
+55/287249/campos_512_v4
+55/287256/campos_512_v4
+55/287265/campos_512_v4
+55/287272/campos_512_v4
+55/287275/campos_512_v4
+55/287281/campos_512_v4
+55/287283/campos_512_v4
+55/287285/campos_512_v4
+55/287293/campos_512_v4
+55/287296/campos_512_v4
+55/287302/campos_512_v4
+55/287319/campos_512_v4
+55/287329/campos_512_v4
+55/287333/campos_512_v4
+55/287339/campos_512_v4
+55/287343/campos_512_v4
+55/287346/campos_512_v4
+55/287351/campos_512_v4
+55/287357/campos_512_v4
+55/287367/campos_512_v4
+55/287384/campos_512_v4
+55/287391/campos_512_v4
+55/287410/campos_512_v4
+55/287411/campos_512_v4
+55/287415/campos_512_v4
+55/287420/campos_512_v4
+55/287430/campos_512_v4
+55/287432/campos_512_v4
+55/287448/campos_512_v4
+55/287449/campos_512_v4
+55/287460/campos_512_v4
+55/287464/campos_512_v4
+55/287477/campos_512_v4
+55/287479/campos_512_v4
+55/287487/campos_512_v4
+55/287499/campos_512_v4
+55/287516/campos_512_v4
+55/287522/campos_512_v4
+55/287525/campos_512_v4
+55/287531/campos_512_v4
+55/287534/campos_512_v4
+55/287536/campos_512_v4
+55/287539/campos_512_v4
+55/287545/campos_512_v4
+55/287568/campos_512_v4
+55/287569/campos_512_v4
+55/287597/campos_512_v4
+55/287599/campos_512_v4
+55/287614/campos_512_v4
+55/287630/campos_512_v4
+55/287637/campos_512_v4
+55/287638/campos_512_v4
+55/287643/campos_512_v4
+55/287651/campos_512_v4
+55/287654/campos_512_v4
+55/287655/campos_512_v4
+55/287657/campos_512_v4
+55/287658/campos_512_v4
+55/287665/campos_512_v4
+55/287673/campos_512_v4
+55/287675/campos_512_v4
+55/287678/campos_512_v4
+55/287688/campos_512_v4
+55/287696/campos_512_v4
+55/287722/campos_512_v4
+55/287741/campos_512_v4
+55/287746/campos_512_v4
+55/287754/campos_512_v4
+55/287762/campos_512_v4
+55/287763/campos_512_v4
+55/287768/campos_512_v4
+55/287769/campos_512_v4
+55/287780/campos_512_v4
+55/287784/campos_512_v4
+55/287790/campos_512_v4
+55/287792/campos_512_v4
+55/287802/campos_512_v4
+55/287804/campos_512_v4
+55/287816/campos_512_v4
+55/287818/campos_512_v4
+55/287831/campos_512_v4
+55/287842/campos_512_v4
+55/287843/campos_512_v4
+55/287851/campos_512_v4
+55/287856/campos_512_v4
+55/287860/campos_512_v4
+55/287863/campos_512_v4
+55/287868/campos_512_v4
+55/287870/campos_512_v4
+55/287877/campos_512_v4
+55/287889/campos_512_v4
+55/287897/campos_512_v4
+55/287917/campos_512_v4
+55/287923/campos_512_v4
+55/287947/campos_512_v4
+55/287956/campos_512_v4
+55/287963/campos_512_v4
+55/287986/campos_512_v4
+55/287997/campos_512_v4
+55/288005/campos_512_v4
+55/288019/campos_512_v4
+55/288021/campos_512_v4
+55/288025/campos_512_v4
+55/288029/campos_512_v4
+55/288045/campos_512_v4
+55/288055/campos_512_v4
+55/288056/campos_512_v4
+55/288065/campos_512_v4
+55/288068/campos_512_v4
+55/288070/campos_512_v4
+55/288083/campos_512_v4
+55/288101/campos_512_v4
+55/288102/campos_512_v4
+55/288127/campos_512_v4
+55/288135/campos_512_v4
+55/288143/campos_512_v4
+55/288173/campos_512_v4
+55/288174/campos_512_v4
+55/288175/campos_512_v4
+55/288180/campos_512_v4
+55/288181/campos_512_v4
+55/288185/campos_512_v4
+55/288189/campos_512_v4
+55/288193/campos_512_v4
+55/288210/campos_512_v4
+55/288220/campos_512_v4
+55/288226/campos_512_v4
+55/288236/campos_512_v4
+55/288238/campos_512_v4
+55/288242/campos_512_v4
+55/288258/campos_512_v4
+55/288272/campos_512_v4
+55/288273/campos_512_v4
+55/288274/campos_512_v4
+55/288287/campos_512_v4
+55/288288/campos_512_v4
+55/288294/campos_512_v4
+55/288299/campos_512_v4
+55/288308/campos_512_v4
+55/288310/campos_512_v4
+55/288314/campos_512_v4
+55/288324/campos_512_v4
+55/288328/campos_512_v4
+55/288329/campos_512_v4
+55/288336/campos_512_v4
+55/288337/campos_512_v4
+55/288338/campos_512_v4
+55/288340/campos_512_v4
+55/288342/campos_512_v4
+55/288343/campos_512_v4
+55/288346/campos_512_v4
+55/288350/campos_512_v4
+55/288354/campos_512_v4
+55/288363/campos_512_v4
+55/288378/campos_512_v4
+55/288381/campos_512_v4
+55/288387/campos_512_v4
+55/288390/campos_512_v4
+55/288393/campos_512_v4
+55/288396/campos_512_v4
+55/288398/campos_512_v4
+55/288400/campos_512_v4
+55/288424/campos_512_v4
+55/288425/campos_512_v4
+55/288431/campos_512_v4
+55/288435/campos_512_v4
+55/288440/campos_512_v4
+55/288444/campos_512_v4
+55/288447/campos_512_v4
+55/288458/campos_512_v4
+55/288474/campos_512_v4
+55/288480/campos_512_v4
+55/288492/campos_512_v4
+55/288494/campos_512_v4
+55/288496/campos_512_v4
+55/288504/campos_512_v4
+55/288510/campos_512_v4
+55/288526/campos_512_v4
+55/288528/campos_512_v4
+55/288529/campos_512_v4
+55/288533/campos_512_v4
+55/288534/campos_512_v4
+55/288544/campos_512_v4
+55/288546/campos_512_v4
+55/288548/campos_512_v4
+55/288557/campos_512_v4
+55/288568/campos_512_v4
+55/288575/campos_512_v4
+55/288589/campos_512_v4
+55/288595/campos_512_v4
+55/288621/campos_512_v4
+55/288641/campos_512_v4
+55/288651/campos_512_v4
+55/288656/campos_512_v4
+55/288663/campos_512_v4
+55/288664/campos_512_v4
+55/288665/campos_512_v4
+55/288674/campos_512_v4
+55/288677/campos_512_v4
+55/288679/campos_512_v4
+55/288680/campos_512_v4
+55/288700/campos_512_v4
+55/288715/campos_512_v4
+55/288717/campos_512_v4
+55/288722/campos_512_v4
+55/288723/campos_512_v4
+55/288736/campos_512_v4
+55/288737/campos_512_v4
+55/288740/campos_512_v4
+55/288742/campos_512_v4
+55/288744/campos_512_v4
+55/288749/campos_512_v4
+55/288761/campos_512_v4
+55/288765/campos_512_v4
+55/288775/campos_512_v4
+55/288789/campos_512_v4
+55/288798/campos_512_v4
+55/288806/campos_512_v4
+55/288812/campos_512_v4
+55/288821/campos_512_v4
+55/288825/campos_512_v4
+55/288842/campos_512_v4
+55/288844/campos_512_v4
+55/288866/campos_512_v4
+55/288875/campos_512_v4
+55/288885/campos_512_v4
+55/288895/campos_512_v4
+55/288897/campos_512_v4
+55/288901/campos_512_v4
+55/288904/campos_512_v4
+55/288936/campos_512_v4
+55/288945/campos_512_v4
+55/288955/campos_512_v4
+55/288958/campos_512_v4
+55/288969/campos_512_v4
+55/288973/campos_512_v4
+55/288993/campos_512_v4
+55/289001/campos_512_v4
+55/289005/campos_512_v4
+55/289020/campos_512_v4
+55/289031/campos_512_v4
+55/289047/campos_512_v4
+55/289049/campos_512_v4
+55/289055/campos_512_v4
+55/289063/campos_512_v4
+55/289064/campos_512_v4
+55/289076/campos_512_v4
+55/289080/campos_512_v4
+55/289086/campos_512_v4
+55/289100/campos_512_v4
+55/289106/campos_512_v4
+55/289115/campos_512_v4
+55/289117/campos_512_v4
+55/289120/campos_512_v4
+55/289125/campos_512_v4
+55/289160/campos_512_v4
+55/289177/campos_512_v4
+55/289178/campos_512_v4
+55/289182/campos_512_v4
+55/289187/campos_512_v4
+55/289189/campos_512_v4
+55/289204/campos_512_v4
+55/289208/campos_512_v4
+55/289209/campos_512_v4
+55/289210/campos_512_v4
+55/289215/campos_512_v4
+55/289221/campos_512_v4
+55/289237/campos_512_v4
+55/289249/campos_512_v4
+55/289264/campos_512_v4
+55/289272/campos_512_v4
+55/289276/campos_512_v4
+55/289279/campos_512_v4
+55/289284/campos_512_v4
+55/289289/campos_512_v4
+55/289290/campos_512_v4
+55/289295/campos_512_v4
+55/289296/campos_512_v4
+55/289299/campos_512_v4
+55/289304/campos_512_v4
+55/289310/campos_512_v4
+55/289325/campos_512_v4
+55/289342/campos_512_v4
+55/289363/campos_512_v4
+55/289368/campos_512_v4
+55/289369/campos_512_v4
+55/289403/campos_512_v4
+55/289409/campos_512_v4
+55/289418/campos_512_v4
+55/289425/campos_512_v4
+55/289436/campos_512_v4
+55/289447/campos_512_v4
+55/289468/campos_512_v4
+55/289471/campos_512_v4
+55/289477/campos_512_v4
+55/289488/campos_512_v4
+55/289496/campos_512_v4
+55/289498/campos_512_v4
+55/289499/campos_512_v4
+55/289511/campos_512_v4
+55/289513/campos_512_v4
+55/289522/campos_512_v4
+55/289523/campos_512_v4
+55/289530/campos_512_v4
+55/289544/campos_512_v4
+55/289550/campos_512_v4
+55/289551/campos_512_v4
+55/289560/campos_512_v4
+55/289561/campos_512_v4
+55/289585/campos_512_v4
+55/289588/campos_512_v4
+55/289603/campos_512_v4
+55/289607/campos_512_v4
+55/289611/campos_512_v4
+55/289615/campos_512_v4
+55/289640/campos_512_v4
+55/289644/campos_512_v4
+55/289647/campos_512_v4
+55/289648/campos_512_v4
+55/289651/campos_512_v4
+55/289666/campos_512_v4
+55/289682/campos_512_v4
+55/289690/campos_512_v4
+55/289691/campos_512_v4
+55/289694/campos_512_v4
+55/289695/campos_512_v4
+55/289705/campos_512_v4
+55/289713/campos_512_v4
+55/289719/campos_512_v4
+55/289730/campos_512_v4
+55/289754/campos_512_v4
+55/289772/campos_512_v4
+55/289784/campos_512_v4
+55/289788/campos_512_v4
+55/289808/campos_512_v4
+55/289817/campos_512_v4
+55/289819/campos_512_v4
+55/289823/campos_512_v4
+55/289825/campos_512_v4
+55/289841/campos_512_v4
+55/289842/campos_512_v4
+55/289851/campos_512_v4
+55/289855/campos_512_v4
+55/289863/campos_512_v4
+55/289871/campos_512_v4
+55/289877/campos_512_v4
+55/289880/campos_512_v4
+55/289891/campos_512_v4
+55/289895/campos_512_v4
+55/289898/campos_512_v4
+55/289910/campos_512_v4
+55/289914/campos_512_v4
+55/289915/campos_512_v4
+55/289916/campos_512_v4
+55/289918/campos_512_v4
+55/289931/campos_512_v4
+55/289943/campos_512_v4
+55/289961/campos_512_v4
+55/289972/campos_512_v4
+55/289974/campos_512_v4
+56/290002/campos_512_v4
+56/290009/campos_512_v4
+56/290012/campos_512_v4
+56/290022/campos_512_v4
+56/290024/campos_512_v4
+56/290028/campos_512_v4
+56/290039/campos_512_v4
+56/290044/campos_512_v4
+56/290057/campos_512_v4
+56/290059/campos_512_v4
+56/290086/campos_512_v4
+56/290121/campos_512_v4
+56/290173/campos_512_v4
+56/290184/campos_512_v4
+56/290185/campos_512_v4
+56/290197/campos_512_v4
+56/290207/campos_512_v4
+56/290209/campos_512_v4
+56/290239/campos_512_v4
+56/290254/campos_512_v4
+56/290255/campos_512_v4
+56/290259/campos_512_v4
+56/290260/campos_512_v4
+56/290263/campos_512_v4
+56/290281/campos_512_v4
+56/290288/campos_512_v4
+56/290297/campos_512_v4
+56/290337/campos_512_v4
+56/290338/campos_512_v4
+56/290339/campos_512_v4
+56/290393/campos_512_v4
+56/290410/campos_512_v4
+56/290417/campos_512_v4
+56/290424/campos_512_v4
+56/290432/campos_512_v4
+56/290443/campos_512_v4
+56/290444/campos_512_v4
+56/290446/campos_512_v4
+56/290450/campos_512_v4
+56/290451/campos_512_v4
+56/290460/campos_512_v4
+56/290461/campos_512_v4
+56/290466/campos_512_v4
+56/290477/campos_512_v4
+56/290485/campos_512_v4
+56/290488/campos_512_v4
+56/290493/campos_512_v4
+56/290512/campos_512_v4
+56/290544/campos_512_v4
+56/290552/campos_512_v4
+56/290558/campos_512_v4
+56/290560/campos_512_v4
+56/290568/campos_512_v4
+56/290584/campos_512_v4
+56/290625/campos_512_v4
+56/290634/campos_512_v4
+56/290636/campos_512_v4
+56/290640/campos_512_v4
+56/290664/campos_512_v4
+56/290667/campos_512_v4
+56/290674/campos_512_v4
+56/290677/campos_512_v4
+56/290692/campos_512_v4
+56/290713/campos_512_v4
+56/290725/campos_512_v4
+56/290751/campos_512_v4
+56/290754/campos_512_v4
+56/290769/campos_512_v4
+56/290771/campos_512_v4
+56/290775/campos_512_v4
+56/290777/campos_512_v4
+56/290786/campos_512_v4
+56/290789/campos_512_v4
+56/290804/campos_512_v4
+56/290807/campos_512_v4
+56/290811/campos_512_v4
+56/290817/campos_512_v4
+56/290826/campos_512_v4
+56/290832/campos_512_v4
+56/290858/campos_512_v4
+56/290876/campos_512_v4
+56/290907/campos_512_v4
+56/290909/campos_512_v4
+56/290942/campos_512_v4
+56/290950/campos_512_v4
+56/290952/campos_512_v4
+56/290984/campos_512_v4
+56/290985/campos_512_v4
+56/290993/campos_512_v4
+56/290994/campos_512_v4
+56/290997/campos_512_v4
+56/291004/campos_512_v4
+56/291018/campos_512_v4
+56/291022/campos_512_v4
+56/291093/campos_512_v4
+56/291104/campos_512_v4
+56/291106/campos_512_v4
+56/291109/campos_512_v4
+56/291115/campos_512_v4
+56/291141/campos_512_v4
+56/291186/campos_512_v4
+56/291192/campos_512_v4
+56/291201/campos_512_v4
+56/291209/campos_512_v4
+56/291215/campos_512_v4
+56/291219/campos_512_v4
+56/291223/campos_512_v4
+56/291251/campos_512_v4
+56/291257/campos_512_v4
+56/291263/campos_512_v4
+56/291277/campos_512_v4
+56/291279/campos_512_v4
+56/291290/campos_512_v4
+56/291296/campos_512_v4
+56/291310/campos_512_v4
+56/291339/campos_512_v4
+56/291340/campos_512_v4
+56/291351/campos_512_v4
+56/291353/campos_512_v4
+56/291360/campos_512_v4
+56/291366/campos_512_v4
+56/291371/campos_512_v4
+56/291373/campos_512_v4
+56/291389/campos_512_v4
+56/291412/campos_512_v4
+56/291420/campos_512_v4
+56/291437/campos_512_v4
+56/291446/campos_512_v4
+56/291456/campos_512_v4
+56/291457/campos_512_v4
+56/291458/campos_512_v4
+56/291465/campos_512_v4
+56/291472/campos_512_v4
+56/291476/campos_512_v4
+56/291478/campos_512_v4
+56/291487/campos_512_v4
+56/291490/campos_512_v4
+56/291495/campos_512_v4
+56/291498/campos_512_v4
+56/291506/campos_512_v4
+56/291512/campos_512_v4
+56/291522/campos_512_v4
+56/291532/campos_512_v4
+56/291533/campos_512_v4
+56/291544/campos_512_v4
+56/291554/campos_512_v4
+56/291574/campos_512_v4
+56/291588/campos_512_v4
+56/291637/campos_512_v4
+56/291639/campos_512_v4
+56/291640/campos_512_v4
+56/291652/campos_512_v4
+56/291656/campos_512_v4
+56/291670/campos_512_v4
+56/291671/campos_512_v4
+56/291679/campos_512_v4
+56/291713/campos_512_v4
+56/291733/campos_512_v4
+56/291737/campos_512_v4
+56/291741/campos_512_v4
+56/291820/campos_512_v4
+56/291828/campos_512_v4
+56/291835/campos_512_v4
+56/291856/campos_512_v4
+56/291862/campos_512_v4
+56/291871/campos_512_v4
+56/291885/campos_512_v4
+56/291904/campos_512_v4
+56/291916/campos_512_v4
+56/291917/campos_512_v4
+56/291920/campos_512_v4
+56/291940/campos_512_v4
+56/291948/campos_512_v4
+56/291968/campos_512_v4
+56/292000/campos_512_v4
+56/292021/campos_512_v4
+56/292030/campos_512_v4
+56/292046/campos_512_v4
+56/292050/campos_512_v4
+56/292088/campos_512_v4
+56/292089/campos_512_v4
+56/292093/campos_512_v4
+56/292104/campos_512_v4
+56/292107/campos_512_v4
+56/292118/campos_512_v4
+56/292124/campos_512_v4
+56/292140/campos_512_v4
+56/292145/campos_512_v4
+56/292147/campos_512_v4
+56/292151/campos_512_v4
+56/292160/campos_512_v4
+56/292162/campos_512_v4
+56/292165/campos_512_v4
+56/292175/campos_512_v4
+56/292229/campos_512_v4
+56/292230/campos_512_v4
+56/292232/campos_512_v4
+56/292236/campos_512_v4
+56/292263/campos_512_v4
+56/292269/campos_512_v4
+56/292274/campos_512_v4
+56/292275/campos_512_v4
+56/292278/campos_512_v4
+56/292300/campos_512_v4
+56/292310/campos_512_v4
+56/292311/campos_512_v4
+56/292315/campos_512_v4
+56/292320/campos_512_v4
+56/292342/campos_512_v4
+56/292364/campos_512_v4
+56/292371/campos_512_v4
+56/292374/campos_512_v4
+56/292383/campos_512_v4
+56/292389/campos_512_v4
+56/292398/campos_512_v4
+56/292402/campos_512_v4
+56/292433/campos_512_v4
+56/292437/campos_512_v4
+56/292438/campos_512_v4
+56/292439/campos_512_v4
+56/292454/campos_512_v4
+56/292469/campos_512_v4
+56/292474/campos_512_v4
+56/292488/campos_512_v4
+56/292489/campos_512_v4
+56/292495/campos_512_v4
+56/292507/campos_512_v4
+56/292508/campos_512_v4
+56/292523/campos_512_v4
+56/292530/campos_512_v4
+56/292540/campos_512_v4
+56/292547/campos_512_v4
+56/292564/campos_512_v4
+56/292572/campos_512_v4
+56/292579/campos_512_v4
+56/292580/campos_512_v4
+56/292585/campos_512_v4
+56/292587/campos_512_v4
+56/292602/campos_512_v4
+56/292610/campos_512_v4
+56/292627/campos_512_v4
+56/292629/campos_512_v4
+56/292661/campos_512_v4
+56/292667/campos_512_v4
+56/292669/campos_512_v4
+56/292682/campos_512_v4
+56/292684/campos_512_v4
+56/292693/campos_512_v4
+56/292694/campos_512_v4
+56/292695/campos_512_v4
+56/292699/campos_512_v4
+56/292705/campos_512_v4
+56/292708/campos_512_v4
+56/292716/campos_512_v4
+56/292725/campos_512_v4
+56/292750/campos_512_v4
+56/292771/campos_512_v4
+56/292778/campos_512_v4
+56/292782/campos_512_v4
+56/292788/campos_512_v4
+56/292798/campos_512_v4
+56/292801/campos_512_v4
+56/292803/campos_512_v4
+56/292805/campos_512_v4
+56/292807/campos_512_v4
+56/292808/campos_512_v4
+56/292818/campos_512_v4
+56/292841/campos_512_v4
+56/292857/campos_512_v4
+56/292859/campos_512_v4
+56/292865/campos_512_v4
+56/292877/campos_512_v4
+56/292900/campos_512_v4
+56/292913/campos_512_v4
+56/292917/campos_512_v4
+56/292918/campos_512_v4
+56/292922/campos_512_v4
+56/292928/campos_512_v4
+56/292940/campos_512_v4
+56/292968/campos_512_v4
+56/292973/campos_512_v4
+56/292980/campos_512_v4
+56/292985/campos_512_v4
+56/293001/campos_512_v4
+56/293006/campos_512_v4
+56/293017/campos_512_v4
+56/293021/campos_512_v4
+56/293022/campos_512_v4
+56/293028/campos_512_v4
+56/293030/campos_512_v4
+56/293032/campos_512_v4
+56/293041/campos_512_v4
+56/293045/campos_512_v4
+56/293051/campos_512_v4
+56/293053/campos_512_v4
+56/293060/campos_512_v4
+56/293076/campos_512_v4
+56/293090/campos_512_v4
+56/293106/campos_512_v4
+56/293123/campos_512_v4
+56/293126/campos_512_v4
+56/293145/campos_512_v4
+56/293146/campos_512_v4
+56/293155/campos_512_v4
+56/293171/campos_512_v4
+56/293174/campos_512_v4
+56/293190/campos_512_v4
+56/293198/campos_512_v4
+56/293203/campos_512_v4
+56/293211/campos_512_v4
+56/293222/campos_512_v4
+56/293224/campos_512_v4
+56/293240/campos_512_v4
+56/293249/campos_512_v4
+56/293255/campos_512_v4
+56/293262/campos_512_v4
+56/293266/campos_512_v4
+56/293280/campos_512_v4
+56/293297/campos_512_v4
+56/293301/campos_512_v4
+56/293321/campos_512_v4
+56/293332/campos_512_v4
+56/293343/campos_512_v4
+56/293345/campos_512_v4
+56/293376/campos_512_v4
+56/293390/campos_512_v4
+56/293398/campos_512_v4
+56/293426/campos_512_v4
+56/293439/campos_512_v4
+56/293453/campos_512_v4
+56/293455/campos_512_v4
+56/293461/campos_512_v4
+56/293462/campos_512_v4
+56/293480/campos_512_v4
+56/293483/campos_512_v4
+56/293490/campos_512_v4
+56/293496/campos_512_v4
+56/293500/campos_512_v4
+56/293501/campos_512_v4
+56/293502/campos_512_v4
+56/293534/campos_512_v4
+56/293540/campos_512_v4
+56/293549/campos_512_v4
+56/293550/campos_512_v4
+56/293557/campos_512_v4
+56/293566/campos_512_v4
+56/293578/campos_512_v4
+56/293593/campos_512_v4
+56/293594/campos_512_v4
+56/293604/campos_512_v4
+56/293605/campos_512_v4
+56/293615/campos_512_v4
+56/293617/campos_512_v4
+56/293633/campos_512_v4
+56/293640/campos_512_v4
+56/293641/campos_512_v4
+56/293644/campos_512_v4
+56/293652/campos_512_v4
+56/293654/campos_512_v4
+56/293665/campos_512_v4
+56/293669/campos_512_v4
+56/293674/campos_512_v4
+56/293692/campos_512_v4
+56/293706/campos_512_v4
+56/293710/campos_512_v4
+56/293712/campos_512_v4
+56/293716/campos_512_v4
+56/293718/campos_512_v4
+56/293721/campos_512_v4
+56/293722/campos_512_v4
+56/293723/campos_512_v4
+56/293733/campos_512_v4
+56/293739/campos_512_v4
+56/293742/campos_512_v4
+56/293754/campos_512_v4
+56/293809/campos_512_v4
+56/293811/campos_512_v4
+56/293828/campos_512_v4
+56/293829/campos_512_v4
+56/293848/campos_512_v4
+56/293854/campos_512_v4
+56/293868/campos_512_v4
+56/293871/campos_512_v4
+56/293883/campos_512_v4
+56/293889/campos_512_v4
+56/293896/campos_512_v4
+56/293909/campos_512_v4
+56/293915/campos_512_v4
+56/293918/campos_512_v4
+56/293936/campos_512_v4
+56/293956/campos_512_v4
+56/293966/campos_512_v4
+56/293967/campos_512_v4
+56/293969/campos_512_v4
+56/293972/campos_512_v4
+56/293975/campos_512_v4
+56/293985/campos_512_v4
+56/293996/campos_512_v4
+56/294000/campos_512_v4
+56/294002/campos_512_v4
+56/294011/campos_512_v4
+56/294033/campos_512_v4
+56/294037/campos_512_v4
+56/294045/campos_512_v4
+56/294048/campos_512_v4
+56/294066/campos_512_v4
+56/294084/campos_512_v4
+56/294086/campos_512_v4
+56/294098/campos_512_v4
+56/294099/campos_512_v4
+56/294100/campos_512_v4
+56/294103/campos_512_v4
+56/294104/campos_512_v4
+56/294111/campos_512_v4
+56/294118/campos_512_v4
+56/294130/campos_512_v4
+56/294146/campos_512_v4
+56/294149/campos_512_v4
+56/294156/campos_512_v4
+56/294158/campos_512_v4
+56/294159/campos_512_v4
+56/294162/campos_512_v4
+56/294167/campos_512_v4
+56/294186/campos_512_v4
+56/294188/campos_512_v4
+56/294215/campos_512_v4
+56/294219/campos_512_v4
+56/294222/campos_512_v4
+56/294227/campos_512_v4
+56/294243/campos_512_v4
+56/294251/campos_512_v4
+56/294256/campos_512_v4
+56/294257/campos_512_v4
+56/294271/campos_512_v4
+56/294276/campos_512_v4
+56/294280/campos_512_v4
+56/294283/campos_512_v4
+56/294284/campos_512_v4
+56/294290/campos_512_v4
+56/294294/campos_512_v4
+56/294295/campos_512_v4
+56/294301/campos_512_v4
+56/294305/campos_512_v4
+56/294310/campos_512_v4
+56/294314/campos_512_v4
+56/294315/campos_512_v4
+56/294322/campos_512_v4
+56/294345/campos_512_v4
+56/294350/campos_512_v4
+56/294361/campos_512_v4
+56/294380/campos_512_v4
+56/294382/campos_512_v4
+56/294403/campos_512_v4
+56/294404/campos_512_v4
+56/294415/campos_512_v4
+56/294428/campos_512_v4
+56/294429/campos_512_v4
+56/294436/campos_512_v4
+56/294444/campos_512_v4
+56/294457/campos_512_v4
+56/294467/campos_512_v4
+56/294474/campos_512_v4
+56/294476/campos_512_v4
+56/294478/campos_512_v4
+56/294480/campos_512_v4
+56/294483/campos_512_v4
+56/294508/campos_512_v4
+56/294519/campos_512_v4
+56/294522/campos_512_v4
+56/294527/campos_512_v4
+56/294528/campos_512_v4
+56/294540/campos_512_v4
+56/294543/campos_512_v4
+56/294544/campos_512_v4
+56/294563/campos_512_v4
+56/294565/campos_512_v4
+56/294575/campos_512_v4
+56/294602/campos_512_v4
+56/294612/campos_512_v4
+56/294613/campos_512_v4
+56/294614/campos_512_v4
+56/294622/campos_512_v4
+56/294625/campos_512_v4
+56/294627/campos_512_v4
+56/294635/campos_512_v4
+56/294643/campos_512_v4
+56/294676/campos_512_v4
+56/294683/campos_512_v4
+56/294700/campos_512_v4
+56/294703/campos_512_v4
+56/294706/campos_512_v4
+56/294724/campos_512_v4
+56/294726/campos_512_v4
+56/294735/campos_512_v4
+56/294740/campos_512_v4
+56/294761/campos_512_v4
+56/294770/campos_512_v4
+56/294788/campos_512_v4
+56/294797/campos_512_v4
+56/294798/campos_512_v4
+56/294799/campos_512_v4
+56/294803/campos_512_v4
+56/294805/campos_512_v4
+56/294810/campos_512_v4
+56/294813/campos_512_v4
+56/294824/campos_512_v4
+56/294828/campos_512_v4
+56/294833/campos_512_v4
+56/294845/campos_512_v4
+56/294848/campos_512_v4
+56/294858/campos_512_v4
+56/294862/campos_512_v4
+56/294867/campos_512_v4
+56/294872/campos_512_v4
+56/294876/campos_512_v4
+56/294878/campos_512_v4
+56/294889/campos_512_v4
+56/294894/campos_512_v4
+56/294914/campos_512_v4
+56/294927/campos_512_v4
+56/294932/campos_512_v4
+56/294942/campos_512_v4
+56/294944/campos_512_v4
+56/294977/campos_512_v4
+57/295011/campos_512_v4
+57/295019/campos_512_v4
+57/295021/campos_512_v4
+57/295027/campos_512_v4
+57/295031/campos_512_v4
+57/295032/campos_512_v4
+57/295036/campos_512_v4
+57/295039/campos_512_v4
+57/295040/campos_512_v4
+57/295044/campos_512_v4
+57/295048/campos_512_v4
+57/295061/campos_512_v4
+57/295069/campos_512_v4
+57/295074/campos_512_v4
+57/295078/campos_512_v4
+57/295079/campos_512_v4
+57/295085/campos_512_v4
+57/295118/campos_512_v4
+57/295119/campos_512_v4
+57/295130/campos_512_v4
+57/295133/campos_512_v4
+57/295141/campos_512_v4
+57/295146/campos_512_v4
+57/295151/campos_512_v4
+57/295162/campos_512_v4
+57/295173/campos_512_v4
+57/295176/campos_512_v4
+57/295183/campos_512_v4
+57/295192/campos_512_v4
+57/295195/campos_512_v4
+57/295200/campos_512_v4
+57/295203/campos_512_v4
+57/295211/campos_512_v4
+57/295223/campos_512_v4
+57/295229/campos_512_v4
+57/295230/campos_512_v4
+57/295231/campos_512_v4
+57/295233/campos_512_v4
+57/295242/campos_512_v4
+57/295260/campos_512_v4
+57/295263/campos_512_v4
+57/295273/campos_512_v4
+57/295317/campos_512_v4
+57/295333/campos_512_v4
+57/295339/campos_512_v4
+57/295356/campos_512_v4
+57/295388/campos_512_v4
+57/295391/campos_512_v4
+57/295396/campos_512_v4
+57/295401/campos_512_v4
+57/295413/campos_512_v4
+57/295416/campos_512_v4
+57/295417/campos_512_v4
+57/295429/campos_512_v4
+57/295434/campos_512_v4
+57/295440/campos_512_v4
+57/295441/campos_512_v4
+57/295449/campos_512_v4
+57/295451/campos_512_v4
+57/295455/campos_512_v4
+57/295458/campos_512_v4
+57/295467/campos_512_v4
+57/295471/campos_512_v4
+57/295480/campos_512_v4
+57/295485/campos_512_v4
+57/295491/campos_512_v4
+57/295494/campos_512_v4
+57/295499/campos_512_v4
+57/295505/campos_512_v4
+57/295509/campos_512_v4
+57/295513/campos_512_v4
+57/295524/campos_512_v4
+57/295525/campos_512_v4
+57/295528/campos_512_v4
+57/295535/campos_512_v4
+57/295536/campos_512_v4
+57/295539/campos_512_v4
+57/295541/campos_512_v4
+57/295542/campos_512_v4
+57/295543/campos_512_v4
+57/295545/campos_512_v4
+57/295548/campos_512_v4
+57/295552/campos_512_v4
+57/295553/campos_512_v4
+57/295554/campos_512_v4
+57/295561/campos_512_v4
+57/295563/campos_512_v4
+57/295570/campos_512_v4
+57/295575/campos_512_v4
+57/295579/campos_512_v4
+57/295581/campos_512_v4
+57/295582/campos_512_v4
+57/295606/campos_512_v4
+57/295612/campos_512_v4
+57/295614/campos_512_v4
+57/295625/campos_512_v4
+57/295630/campos_512_v4
+57/295635/campos_512_v4
+57/295638/campos_512_v4
+57/295653/campos_512_v4
+57/295657/campos_512_v4
+57/295663/campos_512_v4
+57/295665/campos_512_v4
+57/295666/campos_512_v4
+57/295676/campos_512_v4
+57/295681/campos_512_v4
+57/295683/campos_512_v4
+57/295700/campos_512_v4
+57/295703/campos_512_v4
+57/295705/campos_512_v4
+57/295709/campos_512_v4
+57/295712/campos_512_v4
+57/295713/campos_512_v4
+57/295723/campos_512_v4
+57/295725/campos_512_v4
+57/295732/campos_512_v4
+57/295735/campos_512_v4
+57/295739/campos_512_v4
+57/295743/campos_512_v4
+57/295745/campos_512_v4
+57/295750/campos_512_v4
+57/295751/campos_512_v4
+57/295752/campos_512_v4
+57/295770/campos_512_v4
+57/295779/campos_512_v4
+57/295790/campos_512_v4
+57/295797/campos_512_v4
+57/295802/campos_512_v4
+57/295803/campos_512_v4
+57/295808/campos_512_v4
+57/295810/campos_512_v4
+57/295817/campos_512_v4
+57/295827/campos_512_v4
+57/295831/campos_512_v4
+57/295834/campos_512_v4
+57/295839/campos_512_v4
+57/295865/campos_512_v4
+57/295874/campos_512_v4
+57/295877/campos_512_v4
+57/295906/campos_512_v4
+57/295919/campos_512_v4
+57/295924/campos_512_v4
+57/295926/campos_512_v4
+57/295927/campos_512_v4
+57/295930/campos_512_v4
+57/295953/campos_512_v4
+57/295962/campos_512_v4
+57/295970/campos_512_v4
+57/296008/campos_512_v4
+57/296027/campos_512_v4
+57/296030/campos_512_v4
+57/296031/campos_512_v4
+57/296054/campos_512_v4
+57/296060/campos_512_v4
+57/296063/campos_512_v4
+57/296065/campos_512_v4
+57/296066/campos_512_v4
+57/296069/campos_512_v4
+57/296085/campos_512_v4
+57/296088/campos_512_v4
+57/296093/campos_512_v4
+57/296097/campos_512_v4
+57/296106/campos_512_v4
+57/296107/campos_512_v4
+57/296116/campos_512_v4
+57/296117/campos_512_v4
+57/296132/campos_512_v4
+57/296135/campos_512_v4
+57/296142/campos_512_v4
+57/296170/campos_512_v4
+57/296181/campos_512_v4
+57/296186/campos_512_v4
+57/296200/campos_512_v4
+57/296214/campos_512_v4
+57/296217/campos_512_v4
+57/296224/campos_512_v4
+57/296229/campos_512_v4
+57/296243/campos_512_v4
+57/296257/campos_512_v4
+57/296272/campos_512_v4
+57/296289/campos_512_v4
+57/296301/campos_512_v4
+57/296303/campos_512_v4
+57/296308/campos_512_v4
+57/296320/campos_512_v4
+57/296325/campos_512_v4
+57/296335/campos_512_v4
+57/296394/campos_512_v4
+57/296401/campos_512_v4
+57/296402/campos_512_v4
+57/296404/campos_512_v4
+57/296415/campos_512_v4
+57/296418/campos_512_v4
+57/296434/campos_512_v4
+57/296437/campos_512_v4
+57/296440/campos_512_v4
+57/296451/campos_512_v4
+57/296457/campos_512_v4
+57/296468/campos_512_v4
+57/296469/campos_512_v4
+57/296479/campos_512_v4
+57/296482/campos_512_v4
+57/296484/campos_512_v4
+57/296485/campos_512_v4
+57/296499/campos_512_v4
+57/296508/campos_512_v4
+57/296510/campos_512_v4
+57/296512/campos_512_v4
+57/296522/campos_512_v4
+57/296533/campos_512_v4
+57/296535/campos_512_v4
+57/296539/campos_512_v4
+57/296542/campos_512_v4
+57/296547/campos_512_v4
+57/296566/campos_512_v4
+57/296582/campos_512_v4
+57/296591/campos_512_v4
+57/296595/campos_512_v4
+57/296603/campos_512_v4
+57/296605/campos_512_v4
+57/296607/campos_512_v4
+57/296621/campos_512_v4
+57/296624/campos_512_v4
+57/296639/campos_512_v4
+57/296643/campos_512_v4
+57/296648/campos_512_v4
+57/296656/campos_512_v4
+57/296658/campos_512_v4
+57/296666/campos_512_v4
+57/296675/campos_512_v4
+57/296694/campos_512_v4
+57/296696/campos_512_v4
+57/296705/campos_512_v4
+57/296707/campos_512_v4
+57/296712/campos_512_v4
+57/296722/campos_512_v4
+57/296724/campos_512_v4
+57/296725/campos_512_v4
+57/296731/campos_512_v4
+57/296732/campos_512_v4
+57/296740/campos_512_v4
+57/296756/campos_512_v4
+57/296762/campos_512_v4
+57/296779/campos_512_v4
+57/296797/campos_512_v4
+57/296805/campos_512_v4
+57/296819/campos_512_v4
+57/296828/campos_512_v4
+57/296831/campos_512_v4
+57/296849/campos_512_v4
+57/296858/campos_512_v4
+57/296861/campos_512_v4
+57/296862/campos_512_v4
+57/296873/campos_512_v4
+57/296896/campos_512_v4
+57/296900/campos_512_v4
+57/296902/campos_512_v4
+57/296915/campos_512_v4
+57/296916/campos_512_v4
+57/296920/campos_512_v4
+57/296926/campos_512_v4
+57/296931/campos_512_v4
+57/296941/campos_512_v4
+57/296960/campos_512_v4
+57/296970/campos_512_v4
+57/296981/campos_512_v4
+57/296990/campos_512_v4
+57/296992/campos_512_v4
+57/297005/campos_512_v4
+57/297006/campos_512_v4
+57/297016/campos_512_v4
+57/297017/campos_512_v4
+57/297020/campos_512_v4
+57/297023/campos_512_v4
+57/297024/campos_512_v4
+57/297029/campos_512_v4
+57/297038/campos_512_v4
+57/297039/campos_512_v4
+57/297041/campos_512_v4
+57/297046/campos_512_v4
+57/297060/campos_512_v4
+57/297101/campos_512_v4
+57/297102/campos_512_v4
+57/297108/campos_512_v4
+57/297120/campos_512_v4
+57/297161/campos_512_v4
+57/297176/campos_512_v4
+57/297183/campos_512_v4
+57/297187/campos_512_v4
+57/297188/campos_512_v4
+57/297198/campos_512_v4
+57/297203/campos_512_v4
+57/297218/campos_512_v4
+57/297225/campos_512_v4
+57/297231/campos_512_v4
+57/297241/campos_512_v4
+57/297252/campos_512_v4
+57/297254/campos_512_v4
+57/297270/campos_512_v4
+57/297275/campos_512_v4
+57/297278/campos_512_v4
+57/297281/campos_512_v4
+57/297284/campos_512_v4
+57/297286/campos_512_v4
+57/297306/campos_512_v4
+57/297327/campos_512_v4
+57/297339/campos_512_v4
+57/297343/campos_512_v4
+57/297351/campos_512_v4
+57/297360/campos_512_v4
+57/297362/campos_512_v4
+57/297374/campos_512_v4
+57/297376/campos_512_v4
+57/297388/campos_512_v4
+57/297403/campos_512_v4
+57/297404/campos_512_v4
+57/297425/campos_512_v4
+57/297440/campos_512_v4
+57/297445/campos_512_v4
+57/297461/campos_512_v4
+57/297462/campos_512_v4
+57/297472/campos_512_v4
+57/297481/campos_512_v4
+57/297483/campos_512_v4
+57/297489/campos_512_v4
+57/297509/campos_512_v4
+57/297515/campos_512_v4
+57/297526/campos_512_v4
+57/297552/campos_512_v4
+57/297554/campos_512_v4
+57/297556/campos_512_v4
+57/297562/campos_512_v4
+57/297575/campos_512_v4
+57/297585/campos_512_v4
+57/297602/campos_512_v4
+57/297604/campos_512_v4
+57/297605/campos_512_v4
+57/297615/campos_512_v4
+57/297643/campos_512_v4
+57/297647/campos_512_v4
+57/297657/campos_512_v4
+57/297663/campos_512_v4
+57/297683/campos_512_v4
+57/297705/campos_512_v4
+57/297710/campos_512_v4
+57/297713/campos_512_v4
+57/297714/campos_512_v4
+57/297731/campos_512_v4
+57/297737/campos_512_v4
+57/297742/campos_512_v4
+57/297754/campos_512_v4
+57/297755/campos_512_v4
+57/297764/campos_512_v4
+57/297765/campos_512_v4
+57/297766/campos_512_v4
+57/297768/campos_512_v4
+57/297775/campos_512_v4
+57/297788/campos_512_v4
+57/297792/campos_512_v4
+57/297796/campos_512_v4
+57/297802/campos_512_v4
+57/297814/campos_512_v4
+57/297820/campos_512_v4
+57/297829/campos_512_v4
+57/297831/campos_512_v4
+57/297833/campos_512_v4
+57/297839/campos_512_v4
+57/297841/campos_512_v4
+57/297846/campos_512_v4
+57/297850/campos_512_v4
+57/297862/campos_512_v4
+57/297865/campos_512_v4
+57/297871/campos_512_v4
+57/297881/campos_512_v4
+57/297882/campos_512_v4
+57/297919/campos_512_v4
+57/297929/campos_512_v4
+57/297930/campos_512_v4
+57/297939/campos_512_v4
+57/297940/campos_512_v4
+57/297946/campos_512_v4
+57/297964/campos_512_v4
+57/297971/campos_512_v4
+57/297979/campos_512_v4
+57/298002/campos_512_v4
+57/298003/campos_512_v4
+57/298006/campos_512_v4
+57/298007/campos_512_v4
+57/298023/campos_512_v4
+57/298030/campos_512_v4
+57/298032/campos_512_v4
+57/298035/campos_512_v4
+57/298042/campos_512_v4
+57/298049/campos_512_v4
+57/298054/campos_512_v4
+57/298057/campos_512_v4
+57/298064/campos_512_v4
+57/298066/campos_512_v4
+57/298096/campos_512_v4
+57/298099/campos_512_v4
+57/298102/campos_512_v4
+57/298134/campos_512_v4
+57/298141/campos_512_v4
+57/298148/campos_512_v4
+57/298167/campos_512_v4
+57/298169/campos_512_v4
+57/298176/campos_512_v4
+57/298188/campos_512_v4
+57/298196/campos_512_v4
+57/298197/campos_512_v4
+57/298198/campos_512_v4
+57/298199/campos_512_v4
+57/298201/campos_512_v4
+57/298204/campos_512_v4
+57/298214/campos_512_v4
+57/298216/campos_512_v4
+57/298220/campos_512_v4
+57/298223/campos_512_v4
+57/298230/campos_512_v4
+57/298232/campos_512_v4
+57/298233/campos_512_v4
+57/298238/campos_512_v4
+57/298239/campos_512_v4
+57/298241/campos_512_v4
+57/298252/campos_512_v4
+57/298255/campos_512_v4
+57/298260/campos_512_v4
+57/298278/campos_512_v4
+57/298279/campos_512_v4
+57/298284/campos_512_v4
+57/298286/campos_512_v4
+57/298291/campos_512_v4
+57/298301/campos_512_v4
+57/298306/campos_512_v4
+57/298314/campos_512_v4
+57/298315/campos_512_v4
+57/298316/campos_512_v4
+57/298329/campos_512_v4
+57/298330/campos_512_v4
+57/298339/campos_512_v4
+57/298342/campos_512_v4
+57/298344/campos_512_v4
+57/298347/campos_512_v4
+57/298353/campos_512_v4
+57/298358/campos_512_v4
+57/298362/campos_512_v4
+57/298366/campos_512_v4
+57/298368/campos_512_v4
+57/298369/campos_512_v4
+57/298375/campos_512_v4
+57/298387/campos_512_v4
+57/298393/campos_512_v4
+57/298396/campos_512_v4
+57/298401/campos_512_v4
+57/298405/campos_512_v4
+57/298411/campos_512_v4
+57/298415/campos_512_v4
+57/298422/campos_512_v4
+57/298423/campos_512_v4
+57/298424/campos_512_v4
+57/298429/campos_512_v4
+57/298440/campos_512_v4
+57/298441/campos_512_v4
+57/298442/campos_512_v4
+57/298446/campos_512_v4
+57/298454/campos_512_v4
+57/298468/campos_512_v4
+57/298493/campos_512_v4
+57/298501/campos_512_v4
+57/298506/campos_512_v4
+57/298531/campos_512_v4
+57/298532/campos_512_v4
+57/298534/campos_512_v4
+57/298535/campos_512_v4
+57/298536/campos_512_v4
+57/298547/campos_512_v4
+57/298549/campos_512_v4
+57/298572/campos_512_v4
+57/298580/campos_512_v4
+57/298594/campos_512_v4
+57/298598/campos_512_v4
+57/298607/campos_512_v4
+57/298617/campos_512_v4
+57/298624/campos_512_v4
+57/298631/campos_512_v4
+57/298632/campos_512_v4
+57/298643/campos_512_v4
+57/298651/campos_512_v4
+57/298653/campos_512_v4
+57/298659/campos_512_v4
+57/298660/campos_512_v4
+57/298661/campos_512_v4
+57/298666/campos_512_v4
+57/298669/campos_512_v4
+57/298673/campos_512_v4
+57/298674/campos_512_v4
+57/298679/campos_512_v4
+57/298684/campos_512_v4
+57/298698/campos_512_v4
+57/298703/campos_512_v4
+57/298706/campos_512_v4
+57/298707/campos_512_v4
+57/298715/campos_512_v4
+57/298723/campos_512_v4
+57/298742/campos_512_v4
+57/298756/campos_512_v4
+57/298761/campos_512_v4
+57/298778/campos_512_v4
+57/298779/campos_512_v4
+57/298788/campos_512_v4
+57/298799/campos_512_v4
+57/298816/campos_512_v4
+57/298826/campos_512_v4
+57/298828/campos_512_v4
+57/298853/campos_512_v4
+57/298876/campos_512_v4
+57/298880/campos_512_v4
+57/298905/campos_512_v4
+57/298920/campos_512_v4
+57/298925/campos_512_v4
+57/298930/campos_512_v4
+57/298936/campos_512_v4
+57/298942/campos_512_v4
+57/298943/campos_512_v4
+57/298958/campos_512_v4
+57/298959/campos_512_v4
+57/298961/campos_512_v4
+57/298963/campos_512_v4
+57/298968/campos_512_v4
+57/298981/campos_512_v4
+57/298983/campos_512_v4
+57/298997/campos_512_v4
+57/298998/campos_512_v4
+57/298999/campos_512_v4
+57/299007/campos_512_v4
+57/299017/campos_512_v4
+57/299025/campos_512_v4
+57/299027/campos_512_v4
+57/299049/campos_512_v4
+57/299052/campos_512_v4
+57/299068/campos_512_v4
+57/299071/campos_512_v4
+57/299077/campos_512_v4
+57/299084/campos_512_v4
+57/299100/campos_512_v4
+57/299107/campos_512_v4
+57/299124/campos_512_v4
+57/299133/campos_512_v4
+57/299151/campos_512_v4
+57/299155/campos_512_v4
+57/299159/campos_512_v4
+57/299181/campos_512_v4
+57/299185/campos_512_v4
+57/299238/campos_512_v4
+57/299245/campos_512_v4
+57/299248/campos_512_v4
+57/299249/campos_512_v4
+57/299254/campos_512_v4
+57/299257/campos_512_v4
+57/299259/campos_512_v4
+57/299264/campos_512_v4
+57/299265/campos_512_v4
+57/299271/campos_512_v4
+57/299282/campos_512_v4
+57/299293/campos_512_v4
+57/299298/campos_512_v4
+57/299310/campos_512_v4
+57/299312/campos_512_v4
+57/299321/campos_512_v4
+57/299324/campos_512_v4
+57/299325/campos_512_v4
+57/299336/campos_512_v4
+57/299339/campos_512_v4
+57/299378/campos_512_v4
+57/299382/campos_512_v4
+57/299384/campos_512_v4
+57/299388/campos_512_v4
+57/299391/campos_512_v4
+57/299410/campos_512_v4
+57/299416/campos_512_v4
+57/299447/campos_512_v4
+57/299454/campos_512_v4
+57/299458/campos_512_v4
+57/299462/campos_512_v4
+57/299464/campos_512_v4
+57/299502/campos_512_v4
+57/299508/campos_512_v4
+57/299510/campos_512_v4
+57/299517/campos_512_v4
+57/299524/campos_512_v4
+57/299526/campos_512_v4
+57/299536/campos_512_v4
+57/299539/campos_512_v4
+57/299543/campos_512_v4
+57/299547/campos_512_v4
+57/299557/campos_512_v4
+57/299579/campos_512_v4
+57/299580/campos_512_v4
+57/299581/campos_512_v4
+57/299582/campos_512_v4
+57/299601/campos_512_v4
+57/299608/campos_512_v4
+57/299609/campos_512_v4
+57/299614/campos_512_v4
+57/299624/campos_512_v4
+57/299643/campos_512_v4
+57/299657/campos_512_v4
+57/299681/campos_512_v4
+57/299682/campos_512_v4
+57/299683/campos_512_v4
+57/299690/campos_512_v4
+57/299695/campos_512_v4
+57/299696/campos_512_v4
+57/299704/campos_512_v4
+57/299713/campos_512_v4
+57/299719/campos_512_v4
+57/299721/campos_512_v4
+57/299737/campos_512_v4
+57/299739/campos_512_v4
+57/299749/campos_512_v4
+57/299767/campos_512_v4
+57/299788/campos_512_v4
+57/299791/campos_512_v4
+57/299832/campos_512_v4
+57/299834/campos_512_v4
+57/299849/campos_512_v4
+57/299862/campos_512_v4
+57/299863/campos_512_v4
+57/299866/campos_512_v4
+57/299867/campos_512_v4
+57/299877/campos_512_v4
+57/299889/campos_512_v4
+57/299900/campos_512_v4
+57/299903/campos_512_v4
+57/299907/campos_512_v4
+57/299911/campos_512_v4
+57/299921/campos_512_v4
+57/299927/campos_512_v4
+57/299942/campos_512_v4
+57/299943/campos_512_v4
+57/299953/campos_512_v4
+57/299960/campos_512_v4
+57/299975/campos_512_v4
+57/299980/campos_512_v4
+57/299984/campos_512_v4
+57/299988/campos_512_v4
+58/300009/campos_512_v4
+58/300014/campos_512_v4
+58/300016/campos_512_v4
+58/300024/campos_512_v4
+58/300026/campos_512_v4
+58/300037/campos_512_v4
+58/300040/campos_512_v4
+58/300064/campos_512_v4
+58/300068/campos_512_v4
+58/300075/campos_512_v4
+58/300087/campos_512_v4
+58/300096/campos_512_v4
+58/300097/campos_512_v4
+58/300136/campos_512_v4
+58/300137/campos_512_v4
+58/300176/campos_512_v4
+58/300178/campos_512_v4
+58/300180/campos_512_v4
+58/300186/campos_512_v4
+58/300190/campos_512_v4
+58/300196/campos_512_v4
+58/300202/campos_512_v4
+58/300204/campos_512_v4
+58/300225/campos_512_v4
+58/300241/campos_512_v4
+58/300249/campos_512_v4
+58/300275/campos_512_v4
+58/300281/campos_512_v4
+58/300284/campos_512_v4
+58/300285/campos_512_v4
+58/300302/campos_512_v4
+58/300305/campos_512_v4
+58/300308/campos_512_v4
+58/300314/campos_512_v4
+58/300320/campos_512_v4
+58/300324/campos_512_v4
+58/300333/campos_512_v4
+58/300335/campos_512_v4
+58/300339/campos_512_v4
+58/300344/campos_512_v4
+58/300345/campos_512_v4
+58/300346/campos_512_v4
+58/300354/campos_512_v4
+58/300367/campos_512_v4
+58/300387/campos_512_v4
+58/300393/campos_512_v4
+58/300394/campos_512_v4
+58/300403/campos_512_v4
+58/300405/campos_512_v4
+58/300409/campos_512_v4
+58/300435/campos_512_v4
+58/300447/campos_512_v4
+58/300451/campos_512_v4
+58/300458/campos_512_v4
+58/300459/campos_512_v4
+58/300470/campos_512_v4
+58/300475/campos_512_v4
+58/300497/campos_512_v4
+58/300499/campos_512_v4
+58/300510/campos_512_v4
+58/300531/campos_512_v4
+58/300543/campos_512_v4
+58/300562/campos_512_v4
+58/300571/campos_512_v4
+58/300608/campos_512_v4
+58/300611/campos_512_v4
+58/300628/campos_512_v4
+58/300642/campos_512_v4
+58/300647/campos_512_v4
+58/300650/campos_512_v4
+58/300670/campos_512_v4
+58/300682/campos_512_v4
+58/300686/campos_512_v4
+58/300688/campos_512_v4
+58/300694/campos_512_v4
+58/300704/campos_512_v4
+58/300707/campos_512_v4
+58/300713/campos_512_v4
+58/300714/campos_512_v4
+58/300718/campos_512_v4
+58/300723/campos_512_v4
+58/300730/campos_512_v4
+58/300731/campos_512_v4
+58/300732/campos_512_v4
+58/300740/campos_512_v4
+58/300744/campos_512_v4
+58/300747/campos_512_v4
+58/300749/campos_512_v4
+58/300762/campos_512_v4
+58/300768/campos_512_v4
+58/300774/campos_512_v4
+58/300800/campos_512_v4
+58/300810/campos_512_v4
+58/300817/campos_512_v4
+58/300823/campos_512_v4
+58/300837/campos_512_v4
+58/300843/campos_512_v4
+58/300852/campos_512_v4
+58/300862/campos_512_v4
+58/300870/campos_512_v4
+58/300881/campos_512_v4
+58/300882/campos_512_v4
+58/300886/campos_512_v4
+58/300887/campos_512_v4
+58/300891/campos_512_v4
+58/300895/campos_512_v4
+58/300898/campos_512_v4
+58/300903/campos_512_v4
+58/300923/campos_512_v4
+58/300926/campos_512_v4
+58/300935/campos_512_v4
+58/300938/campos_512_v4
+58/300946/campos_512_v4
+58/300947/campos_512_v4
+58/300949/campos_512_v4
+58/300950/campos_512_v4
+58/300951/campos_512_v4
+58/300956/campos_512_v4
+58/300976/campos_512_v4
+58/300991/campos_512_v4
+58/300993/campos_512_v4
+58/300995/campos_512_v4
+58/301024/campos_512_v4
+58/301027/campos_512_v4
+58/301042/campos_512_v4
+58/301050/campos_512_v4
+58/301052/campos_512_v4
+58/301065/campos_512_v4
+58/301066/campos_512_v4
+58/301087/campos_512_v4
+58/301095/campos_512_v4
+58/301119/campos_512_v4
+58/301132/campos_512_v4
+58/301161/campos_512_v4
+58/301164/campos_512_v4
+58/301176/campos_512_v4
+58/301180/campos_512_v4
+58/301182/campos_512_v4
+58/301184/campos_512_v4
+58/301198/campos_512_v4
+58/301206/campos_512_v4
+58/301207/campos_512_v4
+58/301212/campos_512_v4
+58/301216/campos_512_v4
+58/301225/campos_512_v4
+58/301226/campos_512_v4
+58/301228/campos_512_v4
+58/301239/campos_512_v4
+58/301241/campos_512_v4
+58/301252/campos_512_v4
+58/301282/campos_512_v4
+58/301299/campos_512_v4
+58/301310/campos_512_v4
+58/301314/campos_512_v4
+58/301318/campos_512_v4
+58/301320/campos_512_v4
+58/301344/campos_512_v4
+58/301363/campos_512_v4
+58/301376/campos_512_v4
+58/301397/campos_512_v4
+58/301413/campos_512_v4
+58/301414/campos_512_v4
+58/301436/campos_512_v4
+58/301445/campos_512_v4
+58/301454/campos_512_v4
+58/301461/campos_512_v4
+58/301463/campos_512_v4
+58/301473/campos_512_v4
+58/301478/campos_512_v4
+58/301485/campos_512_v4
+58/301489/campos_512_v4
+58/301490/campos_512_v4
+58/301493/campos_512_v4
+58/301504/campos_512_v4
+58/301506/campos_512_v4
+58/301510/campos_512_v4
+58/301521/campos_512_v4
+58/301536/campos_512_v4
+58/301547/campos_512_v4
+58/301551/campos_512_v4
+58/301553/campos_512_v4
+58/301557/campos_512_v4
+58/301563/campos_512_v4
+58/301566/campos_512_v4
+58/301567/campos_512_v4
+58/301595/campos_512_v4
+58/301596/campos_512_v4
+58/301615/campos_512_v4
+58/301631/campos_512_v4
+58/301634/campos_512_v4
+58/301644/campos_512_v4
+58/301649/campos_512_v4
+58/301661/campos_512_v4
+58/301663/campos_512_v4
+58/301665/campos_512_v4
+58/301673/campos_512_v4
+58/301674/campos_512_v4
+58/301692/campos_512_v4
+58/301698/campos_512_v4
+58/301701/campos_512_v4
+58/301711/campos_512_v4
+58/301717/campos_512_v4
+58/301737/campos_512_v4
+58/301740/campos_512_v4
+58/301744/campos_512_v4
+58/301752/campos_512_v4
+58/301753/campos_512_v4
+58/301754/campos_512_v4
+58/301768/campos_512_v4
+58/301771/campos_512_v4
+58/301774/campos_512_v4
+58/301778/campos_512_v4
+58/301791/campos_512_v4
+58/301792/campos_512_v4
+58/301798/campos_512_v4
+58/301810/campos_512_v4
+58/301819/campos_512_v4
+58/301823/campos_512_v4
+58/301829/campos_512_v4
+58/301849/campos_512_v4
+58/301850/campos_512_v4
+58/301861/campos_512_v4
+58/301868/campos_512_v4
+58/301870/campos_512_v4
+58/301886/campos_512_v4
+58/301897/campos_512_v4
+58/301908/campos_512_v4
+58/301918/campos_512_v4
+58/301921/campos_512_v4
+58/301940/campos_512_v4
+58/301960/campos_512_v4
+58/301973/campos_512_v4
+58/301995/campos_512_v4
+58/302002/campos_512_v4
+58/302005/campos_512_v4
+58/302009/campos_512_v4
+58/302010/campos_512_v4
+58/302015/campos_512_v4
+58/302017/campos_512_v4
+58/302018/campos_512_v4
+58/302020/campos_512_v4
+58/302022/campos_512_v4
+58/302031/campos_512_v4
+58/302036/campos_512_v4
+58/302048/campos_512_v4
+58/302054/campos_512_v4
+58/302058/campos_512_v4
+58/302063/campos_512_v4
+58/302065/campos_512_v4
+58/302068/campos_512_v4
+58/302071/campos_512_v4
+58/302074/campos_512_v4
+58/302086/campos_512_v4
+58/302093/campos_512_v4
+58/302096/campos_512_v4
+58/302114/campos_512_v4
+58/302120/campos_512_v4
+58/302140/campos_512_v4
+58/302142/campos_512_v4
+58/302148/campos_512_v4
+58/302152/campos_512_v4
+58/302162/campos_512_v4
+58/302166/campos_512_v4
+58/302171/campos_512_v4
+58/302178/campos_512_v4
+58/302179/campos_512_v4
+58/302183/campos_512_v4
+58/302186/campos_512_v4
+58/302189/campos_512_v4
+58/302196/campos_512_v4
+58/302197/campos_512_v4
+58/302205/campos_512_v4
+58/302226/campos_512_v4
+58/302238/campos_512_v4
+58/302244/campos_512_v4
+58/302245/campos_512_v4
+58/302249/campos_512_v4
+58/302262/campos_512_v4
+58/302266/campos_512_v4
+58/302269/campos_512_v4
+58/302272/campos_512_v4
+58/302276/campos_512_v4
+58/302286/campos_512_v4
+58/302290/campos_512_v4
+58/302292/campos_512_v4
+58/302323/campos_512_v4
+58/302326/campos_512_v4
+58/302338/campos_512_v4
+58/302345/campos_512_v4
+58/302346/campos_512_v4
+58/302355/campos_512_v4
+58/302374/campos_512_v4
+58/302375/campos_512_v4
+58/302385/campos_512_v4
+58/302391/campos_512_v4
+58/302402/campos_512_v4
+58/302407/campos_512_v4
+58/302413/campos_512_v4
+58/302422/campos_512_v4
+58/302424/campos_512_v4
+58/302447/campos_512_v4
+58/302459/campos_512_v4
+58/302470/campos_512_v4
+58/302471/campos_512_v4
+58/302495/campos_512_v4
+58/302496/campos_512_v4
+58/302498/campos_512_v4
+58/302500/campos_512_v4
+58/302506/campos_512_v4
+58/302513/campos_512_v4
+58/302517/campos_512_v4
+58/302519/campos_512_v4
+58/302528/campos_512_v4
+58/302531/campos_512_v4
+58/302533/campos_512_v4
+58/302534/campos_512_v4
+58/302536/campos_512_v4
+58/302541/campos_512_v4
+58/302551/campos_512_v4
+58/302557/campos_512_v4
+58/302560/campos_512_v4
+58/302566/campos_512_v4
+58/302581/campos_512_v4
+58/302584/campos_512_v4
+58/302591/campos_512_v4
+58/302595/campos_512_v4
+58/302596/campos_512_v4
+58/302603/campos_512_v4
+58/302606/campos_512_v4
+58/302619/campos_512_v4
+58/302626/campos_512_v4
+58/302627/campos_512_v4
+58/302628/campos_512_v4
+58/302636/campos_512_v4
+58/302649/campos_512_v4
+58/302652/campos_512_v4
+58/302662/campos_512_v4
+58/302668/campos_512_v4
+58/302677/campos_512_v4
+58/302678/campos_512_v4
+58/302686/campos_512_v4
+58/302697/campos_512_v4
+58/302699/campos_512_v4
+58/302703/campos_512_v4
+58/302730/campos_512_v4
+58/302738/campos_512_v4
+58/302739/campos_512_v4
+58/302740/campos_512_v4
+58/302742/campos_512_v4
+58/302763/campos_512_v4
+58/302765/campos_512_v4
+58/302777/campos_512_v4
+58/302779/campos_512_v4
+58/302782/campos_512_v4
+58/302790/campos_512_v4
+58/302793/campos_512_v4
+58/302797/campos_512_v4
+58/302800/campos_512_v4
+58/302801/campos_512_v4
+58/302818/campos_512_v4
+58/302819/campos_512_v4
+58/302825/campos_512_v4
+58/302864/campos_512_v4
+58/302866/campos_512_v4
+58/302892/campos_512_v4
+58/302898/campos_512_v4
+58/302899/campos_512_v4
+58/302902/campos_512_v4
+58/302904/campos_512_v4
+58/302918/campos_512_v4
+58/302923/campos_512_v4
+58/302925/campos_512_v4
+58/302926/campos_512_v4
+58/302927/campos_512_v4
+58/302930/campos_512_v4
+58/302931/campos_512_v4
+58/302945/campos_512_v4
+58/302957/campos_512_v4
+58/302970/campos_512_v4
+58/302973/campos_512_v4
+58/302983/campos_512_v4
+58/303005/campos_512_v4
+58/303007/campos_512_v4
+58/303015/campos_512_v4
+58/303023/campos_512_v4
+58/303028/campos_512_v4
+58/303031/campos_512_v4
+58/303041/campos_512_v4
+58/303042/campos_512_v4
+58/303048/campos_512_v4
+58/303054/campos_512_v4
+58/303059/campos_512_v4
+58/303065/campos_512_v4
+58/303108/campos_512_v4
+58/303114/campos_512_v4
+58/303116/campos_512_v4
+58/303118/campos_512_v4
+58/303131/campos_512_v4
+58/303132/campos_512_v4
+58/303139/campos_512_v4
+58/303144/campos_512_v4
+58/303148/campos_512_v4
+58/303154/campos_512_v4
+58/303161/campos_512_v4
+58/303163/campos_512_v4
+58/303166/campos_512_v4
+58/303169/campos_512_v4
+58/303180/campos_512_v4
+58/303200/campos_512_v4
+58/303220/campos_512_v4
+58/303239/campos_512_v4
+58/303245/campos_512_v4
+58/303247/campos_512_v4
+58/303254/campos_512_v4
+58/303261/campos_512_v4
+58/303271/campos_512_v4
+58/303275/campos_512_v4
+58/303300/campos_512_v4
+58/303307/campos_512_v4
+58/303314/campos_512_v4
+58/303318/campos_512_v4
+58/303331/campos_512_v4
+58/303335/campos_512_v4
+58/303350/campos_512_v4
+58/303353/campos_512_v4
+58/303355/campos_512_v4
+58/303362/campos_512_v4
+58/303364/campos_512_v4
+58/303371/campos_512_v4
+58/303375/campos_512_v4
+58/303377/campos_512_v4
+58/303379/campos_512_v4
+58/303384/campos_512_v4
+58/303387/campos_512_v4
+58/303409/campos_512_v4
+58/303432/campos_512_v4
+58/303435/campos_512_v4
+58/303442/campos_512_v4
+58/303446/campos_512_v4
+58/303456/campos_512_v4
+58/303459/campos_512_v4
+58/303463/campos_512_v4
+58/303477/campos_512_v4
+58/303479/campos_512_v4
+58/303498/campos_512_v4
+58/303500/campos_512_v4
+58/303530/campos_512_v4
+58/303547/campos_512_v4
+58/303557/campos_512_v4
+58/303581/campos_512_v4
+58/303589/campos_512_v4
+58/303596/campos_512_v4
+58/303609/campos_512_v4
+58/303616/campos_512_v4
+58/303632/campos_512_v4
+58/303641/campos_512_v4
+58/303651/campos_512_v4
+58/303653/campos_512_v4
+58/303663/campos_512_v4
+58/303670/campos_512_v4
+58/303672/campos_512_v4
+58/303684/campos_512_v4
+58/303691/campos_512_v4
+58/303712/campos_512_v4
+58/303714/campos_512_v4
+58/303718/campos_512_v4
+58/303720/campos_512_v4
+58/303722/campos_512_v4
+58/303733/campos_512_v4
+58/303738/campos_512_v4
+58/303745/campos_512_v4
+58/303757/campos_512_v4
+58/303759/campos_512_v4
+58/303761/campos_512_v4
+58/303762/campos_512_v4
+58/303768/campos_512_v4
+58/303772/campos_512_v4
+58/303774/campos_512_v4
+58/303779/campos_512_v4
+58/303795/campos_512_v4
+58/303798/campos_512_v4
+58/303802/campos_512_v4
+58/303803/campos_512_v4
+58/303813/campos_512_v4
+58/303816/campos_512_v4
+58/303838/campos_512_v4
+58/303839/campos_512_v4
+58/303846/campos_512_v4
+58/303854/campos_512_v4
+58/303857/campos_512_v4
+58/303882/campos_512_v4
+58/303888/campos_512_v4
+58/303891/campos_512_v4
+58/303896/campos_512_v4
+58/303898/campos_512_v4
+58/303907/campos_512_v4
+58/303917/campos_512_v4
+58/303940/campos_512_v4
+58/303941/campos_512_v4
+58/303951/campos_512_v4
+58/303967/campos_512_v4
+58/303969/campos_512_v4
+58/303979/campos_512_v4
+58/303987/campos_512_v4
+58/304017/campos_512_v4
+58/304035/campos_512_v4
+58/304046/campos_512_v4
+58/304051/campos_512_v4
+58/304060/campos_512_v4
+58/304076/campos_512_v4
+58/304077/campos_512_v4
+58/304083/campos_512_v4
+58/304099/campos_512_v4
+58/304130/campos_512_v4
+58/304139/campos_512_v4
+58/304158/campos_512_v4
+58/304164/campos_512_v4
+58/304170/campos_512_v4
+58/304175/campos_512_v4
+58/304191/campos_512_v4
+58/304195/campos_512_v4
+58/304212/campos_512_v4
+58/304215/campos_512_v4
+58/304218/campos_512_v4
+58/304225/campos_512_v4
+58/304229/campos_512_v4
+58/304235/campos_512_v4
+58/304236/campos_512_v4
+58/304245/campos_512_v4
+58/304270/campos_512_v4
+58/304281/campos_512_v4
+58/304290/campos_512_v4
+58/304296/campos_512_v4
+58/304302/campos_512_v4
+58/304312/campos_512_v4
+58/304317/campos_512_v4
+58/304320/campos_512_v4
+58/304321/campos_512_v4
+58/304327/campos_512_v4
+58/304330/campos_512_v4
+58/304331/campos_512_v4
+58/304342/campos_512_v4
+58/304358/campos_512_v4
+58/304368/campos_512_v4
+58/304374/campos_512_v4
+58/304375/campos_512_v4
+58/304388/campos_512_v4
+58/304403/campos_512_v4
+58/304410/campos_512_v4
+58/304421/campos_512_v4
+58/304431/campos_512_v4
+58/304435/campos_512_v4
+58/304440/campos_512_v4
+58/304443/campos_512_v4
+58/304457/campos_512_v4
+58/304465/campos_512_v4
+58/304487/campos_512_v4
+58/304501/campos_512_v4
+58/304503/campos_512_v4
+58/304505/campos_512_v4
+58/304539/campos_512_v4
+58/304547/campos_512_v4
+58/304562/campos_512_v4
+58/304571/campos_512_v4
+58/304588/campos_512_v4
+58/304604/campos_512_v4
+58/304610/campos_512_v4
+58/304612/campos_512_v4
+58/304616/campos_512_v4
+58/304619/campos_512_v4
+58/304640/campos_512_v4
+58/304644/campos_512_v4
+58/304679/campos_512_v4
+58/304680/campos_512_v4
+58/304686/campos_512_v4
+58/304687/campos_512_v4
+58/304688/campos_512_v4
+58/304691/campos_512_v4
+58/304698/campos_512_v4
+58/304710/campos_512_v4
+58/304715/campos_512_v4
+58/304718/campos_512_v4
+58/304723/campos_512_v4
+58/304727/campos_512_v4
+58/304728/campos_512_v4
+58/304734/campos_512_v4
+58/304737/campos_512_v4
+58/304748/campos_512_v4
+58/304749/campos_512_v4
+58/304763/campos_512_v4
+58/304799/campos_512_v4
+58/304809/campos_512_v4
+58/304813/campos_512_v4
+58/304817/campos_512_v4
+58/304821/campos_512_v4
+58/304825/campos_512_v4
+58/304828/campos_512_v4
+58/304836/campos_512_v4
+58/304837/campos_512_v4
+58/304851/campos_512_v4
+58/304883/campos_512_v4
+58/304886/campos_512_v4
+58/304898/campos_512_v4
+58/304901/campos_512_v4
+58/304904/campos_512_v4
+58/304917/campos_512_v4
+58/304918/campos_512_v4
+58/304938/campos_512_v4
+58/304943/campos_512_v4
+58/304944/campos_512_v4
+58/304947/campos_512_v4
+59/305007/campos_512_v4
+59/305029/campos_512_v4
+59/305031/campos_512_v4
+59/305032/campos_512_v4
+59/305047/campos_512_v4
+59/305051/campos_512_v4
+59/305059/campos_512_v4
+59/305066/campos_512_v4
+59/305070/campos_512_v4
+59/305073/campos_512_v4
+59/305076/campos_512_v4
+59/305080/campos_512_v4
+59/305084/campos_512_v4
+59/305098/campos_512_v4
+59/305114/campos_512_v4
+59/305118/campos_512_v4
+59/305130/campos_512_v4
+59/305131/campos_512_v4
+59/305138/campos_512_v4
+59/305148/campos_512_v4
+59/305156/campos_512_v4
+59/305160/campos_512_v4
+59/305162/campos_512_v4
+59/305178/campos_512_v4
+59/305189/campos_512_v4
+59/305190/campos_512_v4
+59/305192/campos_512_v4
+59/305213/campos_512_v4
+59/305216/campos_512_v4
+59/305229/campos_512_v4
+59/305230/campos_512_v4
+59/305236/campos_512_v4
+59/305241/campos_512_v4
+59/305246/campos_512_v4
+59/305268/campos_512_v4
+59/305277/campos_512_v4
+59/305288/campos_512_v4
+59/305290/campos_512_v4
+59/305299/campos_512_v4
+59/305318/campos_512_v4
+59/305326/campos_512_v4
+59/305328/campos_512_v4
+59/305352/campos_512_v4
+59/305359/campos_512_v4
+59/305360/campos_512_v4
+59/305366/campos_512_v4
+59/305369/campos_512_v4
+59/305371/campos_512_v4
+59/305379/campos_512_v4
+59/305390/campos_512_v4
+59/305407/campos_512_v4
+59/305426/campos_512_v4
+59/305433/campos_512_v4
+59/305445/campos_512_v4
+59/305448/campos_512_v4
+59/305459/campos_512_v4
+59/305482/campos_512_v4
+59/305496/campos_512_v4
+59/305498/campos_512_v4
+59/305499/campos_512_v4
+59/305509/campos_512_v4
+59/305521/campos_512_v4
+59/305555/campos_512_v4
+59/305559/campos_512_v4
+59/305566/campos_512_v4
+59/305574/campos_512_v4
+59/305588/campos_512_v4
+59/305607/campos_512_v4
+59/305613/campos_512_v4
+59/305629/campos_512_v4
+59/305635/campos_512_v4
+59/305645/campos_512_v4
+59/305666/campos_512_v4
+59/305678/campos_512_v4
+59/305687/campos_512_v4
+59/305689/campos_512_v4
+59/305710/campos_512_v4
+59/305719/campos_512_v4
+59/305722/campos_512_v4
+59/305737/campos_512_v4
+59/305739/campos_512_v4
+59/305750/campos_512_v4
+59/305757/campos_512_v4
+59/305758/campos_512_v4
+59/305790/campos_512_v4
+59/305818/campos_512_v4
+59/305837/campos_512_v4
+59/305843/campos_512_v4
+59/305847/campos_512_v4
+59/305863/campos_512_v4
+59/305870/campos_512_v4
+59/305873/campos_512_v4
+59/305875/campos_512_v4
+59/305886/campos_512_v4
+59/305895/campos_512_v4
+59/305899/campos_512_v4
+59/305912/campos_512_v4
+59/305928/campos_512_v4
+59/305950/campos_512_v4
+59/305953/campos_512_v4
+59/305971/campos_512_v4
+59/305973/campos_512_v4
+59/305975/campos_512_v4
+59/305984/campos_512_v4
+59/305985/campos_512_v4
+59/306011/campos_512_v4
+59/306013/campos_512_v4
+59/306028/campos_512_v4
+59/306030/campos_512_v4
+59/306032/campos_512_v4
+59/306036/campos_512_v4
+59/306038/campos_512_v4
+59/306046/campos_512_v4
+59/306047/campos_512_v4
+59/306073/campos_512_v4
+59/306082/campos_512_v4
+59/306135/campos_512_v4
+59/306151/campos_512_v4
+59/306157/campos_512_v4
+59/306160/campos_512_v4
+59/306173/campos_512_v4
+59/306185/campos_512_v4
+59/306190/campos_512_v4
+59/306192/campos_512_v4
+59/306195/campos_512_v4
+59/306203/campos_512_v4
+59/306204/campos_512_v4
+59/306205/campos_512_v4
+59/306220/campos_512_v4
+59/306222/campos_512_v4
+59/306228/campos_512_v4
+59/306229/campos_512_v4
+59/306235/campos_512_v4
+59/306241/campos_512_v4
+59/306245/campos_512_v4
+59/306246/campos_512_v4
+59/306251/campos_512_v4
+59/306253/campos_512_v4
+59/306259/campos_512_v4
+59/306275/campos_512_v4
+59/306276/campos_512_v4
+59/306286/campos_512_v4
+59/306291/campos_512_v4
+59/306296/campos_512_v4
+59/306317/campos_512_v4
+59/306358/campos_512_v4
+59/306359/campos_512_v4
+59/306360/campos_512_v4
+59/306364/campos_512_v4
+59/306369/campos_512_v4
+59/306379/campos_512_v4
+59/306387/campos_512_v4
+59/306392/campos_512_v4
+59/306405/campos_512_v4
+59/306424/campos_512_v4
+59/306437/campos_512_v4
+59/306438/campos_512_v4
+59/306443/campos_512_v4
+59/306445/campos_512_v4
+59/306452/campos_512_v4
+59/306479/campos_512_v4
+59/306501/campos_512_v4
+59/306504/campos_512_v4
+59/306515/campos_512_v4
+59/306527/campos_512_v4
+59/306535/campos_512_v4
+59/306536/campos_512_v4
+59/306541/campos_512_v4
+59/306554/campos_512_v4
+59/306562/campos_512_v4
+59/306564/campos_512_v4
+59/306570/campos_512_v4
+59/306575/campos_512_v4
+59/306598/campos_512_v4
+59/306609/campos_512_v4
+59/306612/campos_512_v4
+59/306613/campos_512_v4
+59/306624/campos_512_v4
+59/306626/campos_512_v4
+59/306627/campos_512_v4
+59/306634/campos_512_v4
+59/306636/campos_512_v4
+59/306642/campos_512_v4
+59/306656/campos_512_v4
+59/306663/campos_512_v4
+59/306667/campos_512_v4
+59/306669/campos_512_v4
+59/306684/campos_512_v4
+59/306686/campos_512_v4
+59/306693/campos_512_v4
+59/306694/campos_512_v4
+59/306707/campos_512_v4
+59/306723/campos_512_v4
+59/306736/campos_512_v4
+59/306747/campos_512_v4
+59/306762/campos_512_v4
+59/306775/campos_512_v4
+59/306777/campos_512_v4
+59/306786/campos_512_v4
+59/306803/campos_512_v4
+59/306806/campos_512_v4
+59/306812/campos_512_v4
+59/306818/campos_512_v4
+59/306819/campos_512_v4
+59/306827/campos_512_v4
+59/306840/campos_512_v4
+59/306857/campos_512_v4
+59/306875/campos_512_v4
+59/306877/campos_512_v4
+59/306900/campos_512_v4
+59/306943/campos_512_v4
+59/306961/campos_512_v4
+59/306968/campos_512_v4
+59/306969/campos_512_v4
+59/306976/campos_512_v4
+59/306983/campos_512_v4
+59/306984/campos_512_v4
+59/306988/campos_512_v4
+59/306991/campos_512_v4
+59/306998/campos_512_v4
+59/307005/campos_512_v4
+59/307011/campos_512_v4
+59/307037/campos_512_v4
+59/307042/campos_512_v4
+59/307052/campos_512_v4
+59/307053/campos_512_v4
+59/307059/campos_512_v4
+59/307061/campos_512_v4
+59/307062/campos_512_v4
+59/307067/campos_512_v4
+59/307071/campos_512_v4
+59/307076/campos_512_v4
+59/307078/campos_512_v4
+59/307107/campos_512_v4
+59/307128/campos_512_v4
+59/307133/campos_512_v4
+59/307144/campos_512_v4
+59/307147/campos_512_v4
+59/307152/campos_512_v4
+59/307187/campos_512_v4
+59/307189/campos_512_v4
+59/307208/campos_512_v4
+59/307216/campos_512_v4
+59/307218/campos_512_v4
+59/307232/campos_512_v4
+59/307235/campos_512_v4
+59/307236/campos_512_v4
+59/307238/campos_512_v4
+59/307247/campos_512_v4
+59/307261/campos_512_v4
+59/307267/campos_512_v4
+59/307268/campos_512_v4
+59/307270/campos_512_v4
+59/307272/campos_512_v4
+59/307276/campos_512_v4
+59/307283/campos_512_v4
+59/307287/campos_512_v4
+59/307293/campos_512_v4
+59/307295/campos_512_v4
+59/307306/campos_512_v4
+59/307325/campos_512_v4
+59/307344/campos_512_v4
+59/307359/campos_512_v4
+59/307374/campos_512_v4
+59/307381/campos_512_v4
+59/307384/campos_512_v4
+59/307394/campos_512_v4
+59/307401/campos_512_v4
+59/307418/campos_512_v4
+59/307445/campos_512_v4
+59/307457/campos_512_v4
+59/307459/campos_512_v4
+59/307472/campos_512_v4
+59/307476/campos_512_v4
+59/307483/campos_512_v4
+59/307485/campos_512_v4
+59/307500/campos_512_v4
+59/307503/campos_512_v4
+59/307531/campos_512_v4
+59/307538/campos_512_v4
+59/307541/campos_512_v4
+59/307547/campos_512_v4
+59/307549/campos_512_v4
+59/307552/campos_512_v4
+59/307560/campos_512_v4
+59/307568/campos_512_v4
+59/307570/campos_512_v4
+59/307582/campos_512_v4
+59/307584/campos_512_v4
+59/307587/campos_512_v4
+59/307596/campos_512_v4
+59/307597/campos_512_v4
+59/307615/campos_512_v4
+59/307617/campos_512_v4
+59/307630/campos_512_v4
+59/307646/campos_512_v4
+59/307654/campos_512_v4
+59/307664/campos_512_v4
+59/307666/campos_512_v4
+59/307672/campos_512_v4
+59/307676/campos_512_v4
+59/307727/campos_512_v4
+59/307735/campos_512_v4
+59/307740/campos_512_v4
+59/307741/campos_512_v4
+59/307744/campos_512_v4
+59/307748/campos_512_v4
+59/307764/campos_512_v4
+59/307767/campos_512_v4
+59/307768/campos_512_v4
+59/307769/campos_512_v4
+59/307781/campos_512_v4
+59/307787/campos_512_v4
+59/307789/campos_512_v4
+59/307793/campos_512_v4
+59/307796/campos_512_v4
+59/307814/campos_512_v4
+59/307816/campos_512_v4
+59/307817/campos_512_v4
+59/307842/campos_512_v4
+59/307844/campos_512_v4
+59/307854/campos_512_v4
+59/307859/campos_512_v4
+59/307863/campos_512_v4
+59/307883/campos_512_v4
+59/307889/campos_512_v4
+59/307906/campos_512_v4
+59/307915/campos_512_v4
+59/307917/campos_512_v4
+59/307918/campos_512_v4
+59/307926/campos_512_v4
+59/307940/campos_512_v4
+59/307943/campos_512_v4
+59/307947/campos_512_v4
+59/307950/campos_512_v4
+59/307953/campos_512_v4
+59/307957/campos_512_v4
+59/307976/campos_512_v4
+59/307980/campos_512_v4
+59/307981/campos_512_v4
+59/307994/campos_512_v4
+59/308003/campos_512_v4
+59/308006/campos_512_v4
+59/308023/campos_512_v4
+59/308026/campos_512_v4
+59/308036/campos_512_v4
+59/308045/campos_512_v4
+59/308049/campos_512_v4
+59/308054/campos_512_v4
+59/308057/campos_512_v4
+59/308058/campos_512_v4
+59/308063/campos_512_v4
+59/308071/campos_512_v4
+59/308101/campos_512_v4
+59/308106/campos_512_v4
+59/308109/campos_512_v4
+59/308114/campos_512_v4
+59/308117/campos_512_v4
+59/308118/campos_512_v4
+59/308123/campos_512_v4
+59/308130/campos_512_v4
+59/308146/campos_512_v4
+59/308148/campos_512_v4
+59/308151/campos_512_v4
+59/308158/campos_512_v4
+59/308172/campos_512_v4
+59/308173/campos_512_v4
+59/308180/campos_512_v4
+59/308203/campos_512_v4
+59/308213/campos_512_v4
+59/308224/campos_512_v4
+59/308245/campos_512_v4
+59/308246/campos_512_v4
+59/308254/campos_512_v4
+59/308257/campos_512_v4
+59/308264/campos_512_v4
+59/308276/campos_512_v4
+59/308279/campos_512_v4
+59/308294/campos_512_v4
+59/308299/campos_512_v4
+59/308300/campos_512_v4
+59/308303/campos_512_v4
+59/308308/campos_512_v4
+59/308312/campos_512_v4
+59/308313/campos_512_v4
+59/308317/campos_512_v4
+59/308322/campos_512_v4
+59/308342/campos_512_v4
+59/308358/campos_512_v4
+59/308362/campos_512_v4
+59/308363/campos_512_v4
+59/308365/campos_512_v4
+59/308369/campos_512_v4
+59/308373/campos_512_v4
+59/308377/campos_512_v4
+59/308403/campos_512_v4
+59/308413/campos_512_v4
+59/308422/campos_512_v4
+59/308432/campos_512_v4
+59/308436/campos_512_v4
+59/308439/campos_512_v4
+59/308446/campos_512_v4
+59/308462/campos_512_v4
+59/308466/campos_512_v4
+59/308474/campos_512_v4
+59/308486/campos_512_v4
+59/308492/campos_512_v4
+59/308501/campos_512_v4
+59/308511/campos_512_v4
+59/308529/campos_512_v4
+59/308546/campos_512_v4
+59/308549/campos_512_v4
+59/308552/campos_512_v4
+59/308553/campos_512_v4
+59/308557/campos_512_v4
+59/308567/campos_512_v4
+59/308575/campos_512_v4
+59/308579/campos_512_v4
+59/308583/campos_512_v4
+59/308598/campos_512_v4
+59/308607/campos_512_v4
+59/308611/campos_512_v4
+59/308627/campos_512_v4
+59/308634/campos_512_v4
+59/308640/campos_512_v4
+59/308641/campos_512_v4
+59/308652/campos_512_v4
+59/308675/campos_512_v4
+59/308683/campos_512_v4
+59/308686/campos_512_v4
+59/308700/campos_512_v4
+59/308701/campos_512_v4
+59/308706/campos_512_v4
+59/308717/campos_512_v4
+59/308720/campos_512_v4
+59/308729/campos_512_v4
+59/308742/campos_512_v4
+59/308743/campos_512_v4
+59/308748/campos_512_v4
+59/308759/campos_512_v4
+59/308763/campos_512_v4
+59/308764/campos_512_v4
+59/308786/campos_512_v4
+59/308794/campos_512_v4
+59/308800/campos_512_v4
+59/308829/campos_512_v4
+59/308831/campos_512_v4
+59/308833/campos_512_v4
+59/308842/campos_512_v4
+59/308843/campos_512_v4
+59/308860/campos_512_v4
+59/308861/campos_512_v4
+59/308876/campos_512_v4
+59/308913/campos_512_v4
+59/308927/campos_512_v4
+59/308944/campos_512_v4
+59/308945/campos_512_v4
+59/308947/campos_512_v4
+59/308949/campos_512_v4
+59/308953/campos_512_v4
+59/308959/campos_512_v4
+59/308963/campos_512_v4
+59/308969/campos_512_v4
+59/308975/campos_512_v4
+59/308988/campos_512_v4
+59/308990/campos_512_v4
+59/308992/campos_512_v4
+59/308997/campos_512_v4
+59/309010/campos_512_v4
+59/309011/campos_512_v4
+59/309036/campos_512_v4
+59/309037/campos_512_v4
+59/309038/campos_512_v4
+59/309042/campos_512_v4
+59/309049/campos_512_v4
+59/309059/campos_512_v4
+59/309088/campos_512_v4
+59/309102/campos_512_v4
+59/309109/campos_512_v4
+59/309111/campos_512_v4
+59/309115/campos_512_v4
+59/309123/campos_512_v4
+59/309146/campos_512_v4
+59/309155/campos_512_v4
+59/309156/campos_512_v4
+59/309166/campos_512_v4
+59/309183/campos_512_v4
+59/309191/campos_512_v4
+59/309214/campos_512_v4
+59/309216/campos_512_v4
+59/309220/campos_512_v4
+59/309226/campos_512_v4
+59/309227/campos_512_v4
+59/309228/campos_512_v4
+59/309231/campos_512_v4
+59/309249/campos_512_v4
+59/309252/campos_512_v4
+59/309255/campos_512_v4
+59/309265/campos_512_v4
+59/309281/campos_512_v4
+59/309285/campos_512_v4
+59/309288/campos_512_v4
+59/309292/campos_512_v4
+59/309296/campos_512_v4
+59/309298/campos_512_v4
+59/309299/campos_512_v4
+59/309300/campos_512_v4
+59/309301/campos_512_v4
+59/309304/campos_512_v4
+59/309325/campos_512_v4
+59/309327/campos_512_v4
+59/309343/campos_512_v4
+59/309374/campos_512_v4
+59/309381/campos_512_v4
+59/309394/campos_512_v4
+59/309411/campos_512_v4
+59/309415/campos_512_v4
+59/309418/campos_512_v4
+59/309428/campos_512_v4
+59/309436/campos_512_v4
+59/309448/campos_512_v4
+59/309451/campos_512_v4
+59/309471/campos_512_v4
+59/309474/campos_512_v4
+59/309485/campos_512_v4
+59/309507/campos_512_v4
+59/309515/campos_512_v4
+59/309523/campos_512_v4
+59/309555/campos_512_v4
+59/309568/campos_512_v4
+59/309569/campos_512_v4
+59/309571/campos_512_v4
+59/309578/campos_512_v4
+59/309587/campos_512_v4
+59/309593/campos_512_v4
+59/309619/campos_512_v4
+59/309620/campos_512_v4
+59/309622/campos_512_v4
+59/309624/campos_512_v4
+59/309627/campos_512_v4
+59/309631/campos_512_v4
+59/309632/campos_512_v4
+59/309637/campos_512_v4
+59/309653/campos_512_v4
+59/309662/campos_512_v4
+59/309667/campos_512_v4
+59/309676/campos_512_v4
+59/309679/campos_512_v4
+59/309692/campos_512_v4
+59/309702/campos_512_v4
+59/309705/campos_512_v4
+59/309709/campos_512_v4
+59/309712/campos_512_v4
+59/309737/campos_512_v4
+59/309744/campos_512_v4
+59/309755/campos_512_v4
+59/309760/campos_512_v4
+59/309772/campos_512_v4
+59/309775/campos_512_v4
+59/309783/campos_512_v4
+59/309788/campos_512_v4
+59/309802/campos_512_v4
+59/309807/campos_512_v4
+59/309843/campos_512_v4
+59/309848/campos_512_v4
+59/309859/campos_512_v4
+59/309873/campos_512_v4
+59/309881/campos_512_v4
+59/309883/campos_512_v4
+59/309884/campos_512_v4
+59/309902/campos_512_v4
+59/309910/campos_512_v4
+59/309915/campos_512_v4
+59/309916/campos_512_v4
+59/309921/campos_512_v4
+59/309945/campos_512_v4
+59/309947/campos_512_v4
+59/309955/campos_512_v4
+59/309958/campos_512_v4
+59/309961/campos_512_v4
+59/309971/campos_512_v4
+59/309973/campos_512_v4
+59/309980/campos_512_v4
+59/309984/campos_512_v4
+59/309997/campos_512_v4
+6/40004/campos_512_v4
+6/40030/campos_512_v4
+6/40031/campos_512_v4
+6/40046/campos_512_v4
+6/40064/campos_512_v4
+6/40071/campos_512_v4
+6/40073/campos_512_v4
+6/40074/campos_512_v4
+6/40082/campos_512_v4
+6/40087/campos_512_v4
+6/40096/campos_512_v4
+6/40104/campos_512_v4
+6/40124/campos_512_v4
+6/40126/campos_512_v4
+6/40128/campos_512_v4
+6/40135/campos_512_v4
+6/40138/campos_512_v4
+6/40141/campos_512_v4
+6/40149/campos_512_v4
+6/40161/campos_512_v4
+6/40167/campos_512_v4
+6/40182/campos_512_v4
+6/40183/campos_512_v4
+6/40193/campos_512_v4
+6/40195/campos_512_v4
+6/40201/campos_512_v4
+6/40206/campos_512_v4
+6/40214/campos_512_v4
+6/40217/campos_512_v4
+6/40226/campos_512_v4
+6/40233/campos_512_v4
+6/40237/campos_512_v4
+6/40252/campos_512_v4
+6/40270/campos_512_v4
+6/40279/campos_512_v4
+6/40299/campos_512_v4
+6/40324/campos_512_v4
+6/40333/campos_512_v4
+6/40345/campos_512_v4
+6/40351/campos_512_v4
+6/40352/campos_512_v4
+6/40353/campos_512_v4
+6/40358/campos_512_v4
+6/40365/campos_512_v4
+6/40374/campos_512_v4
+6/40402/campos_512_v4
+6/40404/campos_512_v4
+6/40419/campos_512_v4
+6/40421/campos_512_v4
+6/40422/campos_512_v4
+6/40433/campos_512_v4
+6/40439/campos_512_v4
+6/40450/campos_512_v4
+6/40455/campos_512_v4
+6/40464/campos_512_v4
+6/40480/campos_512_v4
+6/40485/campos_512_v4
+6/40491/campos_512_v4
+6/40494/campos_512_v4
+6/40495/campos_512_v4
+6/40496/campos_512_v4
+6/40501/campos_512_v4
+6/40504/campos_512_v4
+6/40513/campos_512_v4
+6/40534/campos_512_v4
+6/40543/campos_512_v4
+6/40544/campos_512_v4
+6/40549/campos_512_v4
+6/40552/campos_512_v4
+6/40558/campos_512_v4
+6/40562/campos_512_v4
+6/40568/campos_512_v4
+6/40580/campos_512_v4
+6/40581/campos_512_v4
+6/40596/campos_512_v4
+6/40609/campos_512_v4
+6/40611/campos_512_v4
+6/40612/campos_512_v4
+6/40613/campos_512_v4
+6/40626/campos_512_v4
+6/40627/campos_512_v4
+6/40629/campos_512_v4
+6/40634/campos_512_v4
+6/40651/campos_512_v4
+6/40653/campos_512_v4
+6/40661/campos_512_v4
+6/40668/campos_512_v4
+6/40672/campos_512_v4
+6/40684/campos_512_v4
+6/40685/campos_512_v4
+6/40701/campos_512_v4
+6/40703/campos_512_v4
+6/40710/campos_512_v4
+6/40720/campos_512_v4
+6/40733/campos_512_v4
+6/40734/campos_512_v4
+6/40736/campos_512_v4
+6/40744/campos_512_v4
+6/40746/campos_512_v4
+6/40764/campos_512_v4
+6/40768/campos_512_v4
+6/40775/campos_512_v4
+6/40787/campos_512_v4
+6/40799/campos_512_v4
+6/40812/campos_512_v4
+6/40817/campos_512_v4
+6/40842/campos_512_v4
+6/40854/campos_512_v4
+6/40862/campos_512_v4
+6/40870/campos_512_v4
+6/40873/campos_512_v4
+6/40885/campos_512_v4
+6/40899/campos_512_v4
+6/40906/campos_512_v4
+6/40908/campos_512_v4
+6/40911/campos_512_v4
+6/40924/campos_512_v4
+6/40931/campos_512_v4
+6/40933/campos_512_v4
+6/40950/campos_512_v4
+6/40952/campos_512_v4
+6/40959/campos_512_v4
+6/40969/campos_512_v4
+6/40993/campos_512_v4
+6/41014/campos_512_v4
+6/41029/campos_512_v4
+6/41033/campos_512_v4
+6/41035/campos_512_v4
+6/41042/campos_512_v4
+6/41049/campos_512_v4
+6/41067/campos_512_v4
+6/41072/campos_512_v4
+6/41075/campos_512_v4
+6/41080/campos_512_v4
+6/41100/campos_512_v4
+6/41105/campos_512_v4
+6/41109/campos_512_v4
+6/41115/campos_512_v4
+6/41124/campos_512_v4
+6/41126/campos_512_v4
+6/41152/campos_512_v4
+6/41177/campos_512_v4
+6/41180/campos_512_v4
+6/41184/campos_512_v4
+6/41195/campos_512_v4
+6/41207/campos_512_v4
+6/41212/campos_512_v4
+6/41215/campos_512_v4
+6/41217/campos_512_v4
+6/41226/campos_512_v4
+6/41247/campos_512_v4
+6/41249/campos_512_v4
+6/41257/campos_512_v4
+6/41264/campos_512_v4
+6/41267/campos_512_v4
+6/41269/campos_512_v4
+6/41287/campos_512_v4
+6/41288/campos_512_v4
+6/41298/campos_512_v4
+6/41306/campos_512_v4
+6/41318/campos_512_v4
+6/41322/campos_512_v4
+6/41330/campos_512_v4
+6/41332/campos_512_v4
+6/41341/campos_512_v4
+6/41349/campos_512_v4
+6/41352/campos_512_v4
+6/41355/campos_512_v4
+6/41366/campos_512_v4
+6/41369/campos_512_v4
+6/41393/campos_512_v4
+6/41403/campos_512_v4
+6/41419/campos_512_v4
+6/41427/campos_512_v4
+6/41439/campos_512_v4
+6/41454/campos_512_v4
+6/41461/campos_512_v4
+6/41462/campos_512_v4
+6/41463/campos_512_v4
+6/41470/campos_512_v4
+6/41478/campos_512_v4
+6/41487/campos_512_v4
+6/41491/campos_512_v4
+6/41492/campos_512_v4
+6/41505/campos_512_v4
+6/41529/campos_512_v4
+6/41531/campos_512_v4
+6/41537/campos_512_v4
+6/41544/campos_512_v4
+6/41552/campos_512_v4
+6/41568/campos_512_v4
+6/41572/campos_512_v4
+6/41600/campos_512_v4
+6/41603/campos_512_v4
+6/41605/campos_512_v4
+6/41606/campos_512_v4
+6/41614/campos_512_v4
+6/41624/campos_512_v4
+6/41629/campos_512_v4
+6/41630/campos_512_v4
+6/41633/campos_512_v4
+6/41636/campos_512_v4
+6/41644/campos_512_v4
+6/41650/campos_512_v4
+6/41683/campos_512_v4
+6/41692/campos_512_v4
+6/41706/campos_512_v4
+6/41709/campos_512_v4
+6/41719/campos_512_v4
+6/41722/campos_512_v4
+6/41728/campos_512_v4
+6/41732/campos_512_v4
+6/41742/campos_512_v4
+6/41765/campos_512_v4
+6/41778/campos_512_v4
+6/41788/campos_512_v4
+6/41789/campos_512_v4
+6/41808/campos_512_v4
+6/41821/campos_512_v4
+6/41822/campos_512_v4
+6/41846/campos_512_v4
+6/41848/campos_512_v4
+6/41851/campos_512_v4
+6/41855/campos_512_v4
+6/41865/campos_512_v4
+6/41869/campos_512_v4
+6/41874/campos_512_v4
+6/41887/campos_512_v4
+6/41888/campos_512_v4
+6/41890/campos_512_v4
+6/41891/campos_512_v4
+6/41896/campos_512_v4
+6/41902/campos_512_v4
+6/41912/campos_512_v4
+6/41913/campos_512_v4
+6/41924/campos_512_v4
+6/41925/campos_512_v4
+6/41928/campos_512_v4
+6/41951/campos_512_v4
+6/41954/campos_512_v4
+6/41961/campos_512_v4
+6/41962/campos_512_v4
+6/41981/campos_512_v4
+6/41983/campos_512_v4
+6/41989/campos_512_v4
+6/41999/campos_512_v4
+6/42007/campos_512_v4
+6/42014/campos_512_v4
+6/42043/campos_512_v4
+6/42050/campos_512_v4
+6/42069/campos_512_v4
+6/42081/campos_512_v4
+6/42090/campos_512_v4
+6/42093/campos_512_v4
+6/42097/campos_512_v4
+6/42106/campos_512_v4
+6/42112/campos_512_v4
+6/42137/campos_512_v4
+6/42152/campos_512_v4
+6/42163/campos_512_v4
+6/42177/campos_512_v4
+6/42186/campos_512_v4
+6/42213/campos_512_v4
+6/42221/campos_512_v4
+6/42232/campos_512_v4
+6/42242/campos_512_v4
+6/42252/campos_512_v4
+6/42278/campos_512_v4
+6/42279/campos_512_v4
+6/42285/campos_512_v4
+6/42291/campos_512_v4
+6/42295/campos_512_v4
+6/42312/campos_512_v4
+6/42320/campos_512_v4
+6/42321/campos_512_v4
+6/42328/campos_512_v4
+6/42332/campos_512_v4
+6/42338/campos_512_v4
+6/42344/campos_512_v4
+6/42363/campos_512_v4
+6/42371/campos_512_v4
+6/42375/campos_512_v4
+6/42396/campos_512_v4
+6/42405/campos_512_v4
+6/42408/campos_512_v4
+6/42410/campos_512_v4
+6/42413/campos_512_v4
+6/42428/campos_512_v4
+6/42446/campos_512_v4
+6/42453/campos_512_v4
+6/42454/campos_512_v4
+6/42455/campos_512_v4
+6/42470/campos_512_v4
+6/42480/campos_512_v4
+6/42481/campos_512_v4
+6/42482/campos_512_v4
+6/42505/campos_512_v4
+6/42506/campos_512_v4
+6/42507/campos_512_v4
+6/42514/campos_512_v4
+6/42517/campos_512_v4
+6/42519/campos_512_v4
+6/42543/campos_512_v4
+6/42552/campos_512_v4
+6/42559/campos_512_v4
+6/42601/campos_512_v4
+6/42610/campos_512_v4
+6/42635/campos_512_v4
+6/42645/campos_512_v4
+6/42646/campos_512_v4
+6/42648/campos_512_v4
+6/42656/campos_512_v4
+6/42663/campos_512_v4
+6/42667/campos_512_v4
+6/42687/campos_512_v4
+6/42690/campos_512_v4
+6/42704/campos_512_v4
+6/42720/campos_512_v4
+6/42739/campos_512_v4
+6/42745/campos_512_v4
+6/42758/campos_512_v4
+6/42765/campos_512_v4
+6/42773/campos_512_v4
+6/42779/campos_512_v4
+6/42810/campos_512_v4
+6/42814/campos_512_v4
+6/42819/campos_512_v4
+6/42820/campos_512_v4
+6/42829/campos_512_v4
+6/42855/campos_512_v4
+6/42868/campos_512_v4
+6/42869/campos_512_v4
+6/42887/campos_512_v4
+6/42936/campos_512_v4
+6/42940/campos_512_v4
+6/42945/campos_512_v4
+6/42946/campos_512_v4
+6/42952/campos_512_v4
+6/42955/campos_512_v4
+6/42959/campos_512_v4
+6/42961/campos_512_v4
+6/42968/campos_512_v4
+6/42978/campos_512_v4
+6/42983/campos_512_v4
+6/42987/campos_512_v4
+6/42999/campos_512_v4
+6/43001/campos_512_v4
+6/43009/campos_512_v4
+6/43022/campos_512_v4
+6/43044/campos_512_v4
+6/43050/campos_512_v4
+6/43056/campos_512_v4
+6/43077/campos_512_v4
+6/43095/campos_512_v4
+6/43103/campos_512_v4
+6/43106/campos_512_v4
+6/43127/campos_512_v4
+6/43133/campos_512_v4
+6/43136/campos_512_v4
+6/43141/campos_512_v4
+6/43148/campos_512_v4
+6/43152/campos_512_v4
+6/43153/campos_512_v4
+6/43162/campos_512_v4
+6/43166/campos_512_v4
+6/43173/campos_512_v4
+6/43187/campos_512_v4
+6/43202/campos_512_v4
+6/43222/campos_512_v4
+6/43229/campos_512_v4
+6/43235/campos_512_v4
+6/43247/campos_512_v4
+6/43255/campos_512_v4
+6/43256/campos_512_v4
+6/43258/campos_512_v4
+6/43262/campos_512_v4
+6/43266/campos_512_v4
+6/43271/campos_512_v4
+6/43289/campos_512_v4
+6/43295/campos_512_v4
+6/43308/campos_512_v4
+6/43313/campos_512_v4
+6/43322/campos_512_v4
+6/43323/campos_512_v4
+6/43327/campos_512_v4
+6/43330/campos_512_v4
+6/43334/campos_512_v4
+6/43339/campos_512_v4
+6/43353/campos_512_v4
+6/43364/campos_512_v4
+6/43373/campos_512_v4
+6/43376/campos_512_v4
+6/43417/campos_512_v4
+6/43421/campos_512_v4
+6/43424/campos_512_v4
+6/43463/campos_512_v4
+6/43478/campos_512_v4
+6/43504/campos_512_v4
+6/43505/campos_512_v4
+6/43510/campos_512_v4
+6/43537/campos_512_v4
+6/43543/campos_512_v4
+6/43548/campos_512_v4
+6/43583/campos_512_v4
+6/43591/campos_512_v4
+6/43598/campos_512_v4
+6/43604/campos_512_v4
+6/43618/campos_512_v4
+6/43629/campos_512_v4
+6/43636/campos_512_v4
+6/43639/campos_512_v4
+6/43652/campos_512_v4
+6/43657/campos_512_v4
+6/43658/campos_512_v4
+6/43662/campos_512_v4
+6/43663/campos_512_v4
+6/43668/campos_512_v4
+6/43671/campos_512_v4
+6/43693/campos_512_v4
+6/43728/campos_512_v4
+6/43732/campos_512_v4
+6/43748/campos_512_v4
+6/43763/campos_512_v4
+6/43764/campos_512_v4
+6/43768/campos_512_v4
+6/43773/campos_512_v4
+6/43774/campos_512_v4
+6/43783/campos_512_v4
+6/43784/campos_512_v4
+6/43804/campos_512_v4
+6/43807/campos_512_v4
+6/43810/campos_512_v4
+6/43817/campos_512_v4
+6/43827/campos_512_v4
+6/43853/campos_512_v4
+6/43855/campos_512_v4
+6/43866/campos_512_v4
+6/43870/campos_512_v4
+6/43887/campos_512_v4
+6/43925/campos_512_v4
+6/43932/campos_512_v4
+6/43954/campos_512_v4
+6/43961/campos_512_v4
+6/43962/campos_512_v4
+6/43979/campos_512_v4
+6/43985/campos_512_v4
+6/43999/campos_512_v4
+6/44004/campos_512_v4
+6/44020/campos_512_v4
+6/44025/campos_512_v4
+6/44043/campos_512_v4
+6/44044/campos_512_v4
+6/44049/campos_512_v4
+6/44050/campos_512_v4
+6/44055/campos_512_v4
+6/44063/campos_512_v4
+6/44064/campos_512_v4
+6/44066/campos_512_v4
+6/44103/campos_512_v4
+6/44112/campos_512_v4
+6/44124/campos_512_v4
+6/44126/campos_512_v4
+6/44141/campos_512_v4
+6/44172/campos_512_v4
+6/44181/campos_512_v4
+6/44201/campos_512_v4
+6/44211/campos_512_v4
+6/44232/campos_512_v4
+6/44239/campos_512_v4
+6/44249/campos_512_v4
+6/44253/campos_512_v4
+6/44254/campos_512_v4
+6/44291/campos_512_v4
+6/44306/campos_512_v4
+6/44321/campos_512_v4
+6/44323/campos_512_v4
+6/44333/campos_512_v4
+6/44341/campos_512_v4
+6/44343/campos_512_v4
+6/44357/campos_512_v4
+6/44380/campos_512_v4
+6/44381/campos_512_v4
+6/44420/campos_512_v4
+6/44432/campos_512_v4
+6/44444/campos_512_v4
+6/44500/campos_512_v4
+6/44502/campos_512_v4
+6/44527/campos_512_v4
+6/44536/campos_512_v4
+6/44539/campos_512_v4
+6/44542/campos_512_v4
+6/44543/campos_512_v4
+6/44562/campos_512_v4
+6/44579/campos_512_v4
+6/44596/campos_512_v4
+6/44610/campos_512_v4
+6/44618/campos_512_v4
+6/44620/campos_512_v4
+6/44627/campos_512_v4
+6/44649/campos_512_v4
+6/44657/campos_512_v4
+6/44662/campos_512_v4
+6/44683/campos_512_v4
+6/44711/campos_512_v4
+6/44713/campos_512_v4
+6/44735/campos_512_v4
+6/44740/campos_512_v4
+6/44744/campos_512_v4
+6/44746/campos_512_v4
+6/44756/campos_512_v4
+6/44775/campos_512_v4
+6/44780/campos_512_v4
+6/44784/campos_512_v4
+6/44800/campos_512_v4
+6/44802/campos_512_v4
+6/44819/campos_512_v4
+6/44825/campos_512_v4
+6/44850/campos_512_v4
+6/44858/campos_512_v4
+6/44860/campos_512_v4
+6/44864/campos_512_v4
+6/44895/campos_512_v4
+6/44907/campos_512_v4
+6/44918/campos_512_v4
+6/44919/campos_512_v4
+6/44932/campos_512_v4
+6/44941/campos_512_v4
+6/44943/campos_512_v4
+6/44966/campos_512_v4
+6/44976/campos_512_v4
+6/44977/campos_512_v4
+6/45001/campos_512_v4
+60/310002/campos_512_v4
+60/310007/campos_512_v4
+60/310012/campos_512_v4
+60/310032/campos_512_v4
+60/310051/campos_512_v4
+60/310062/campos_512_v4
+60/310063/campos_512_v4
+60/310081/campos_512_v4
+60/310087/campos_512_v4
+60/310093/campos_512_v4
+60/310108/campos_512_v4
+60/310111/campos_512_v4
+60/310123/campos_512_v4
+60/310137/campos_512_v4
+60/310147/campos_512_v4
+60/310156/campos_512_v4
+60/310160/campos_512_v4
+60/310163/campos_512_v4
+60/310171/campos_512_v4
+60/310179/campos_512_v4
+60/310181/campos_512_v4
+60/310182/campos_512_v4
+60/310185/campos_512_v4
+60/310188/campos_512_v4
+60/310195/campos_512_v4
+60/310204/campos_512_v4
+60/310211/campos_512_v4
+60/310212/campos_512_v4
+60/310213/campos_512_v4
+60/310225/campos_512_v4
+60/310229/campos_512_v4
+60/310230/campos_512_v4
+60/310232/campos_512_v4
+60/310234/campos_512_v4
+60/310239/campos_512_v4
+60/310241/campos_512_v4
+60/310259/campos_512_v4
+60/310263/campos_512_v4
+60/310307/campos_512_v4
+60/310309/campos_512_v4
+60/310311/campos_512_v4
+60/310325/campos_512_v4
+60/310331/campos_512_v4
+60/310377/campos_512_v4
+60/310380/campos_512_v4
+60/310383/campos_512_v4
+60/310394/campos_512_v4
+60/310397/campos_512_v4
+60/310406/campos_512_v4
+60/310411/campos_512_v4
+60/310430/campos_512_v4
+60/310439/campos_512_v4
+60/310454/campos_512_v4
+60/310459/campos_512_v4
+60/310468/campos_512_v4
+60/310477/campos_512_v4
+60/310490/campos_512_v4
+60/310498/campos_512_v4
+60/310523/campos_512_v4
+60/310526/campos_512_v4
+60/310536/campos_512_v4
+60/310543/campos_512_v4
+60/310551/campos_512_v4
+60/310557/campos_512_v4
+60/310572/campos_512_v4
+60/310584/campos_512_v4
+60/310592/campos_512_v4
+60/310599/campos_512_v4
+60/310612/campos_512_v4
+60/310619/campos_512_v4
+60/310621/campos_512_v4
+60/310643/campos_512_v4
+60/310646/campos_512_v4
+60/310652/campos_512_v4
+60/310654/campos_512_v4
+60/310667/campos_512_v4
+60/310672/campos_512_v4
+60/310685/campos_512_v4
+60/310689/campos_512_v4
+60/310695/campos_512_v4
+60/310707/campos_512_v4
+60/310733/campos_512_v4
+60/310765/campos_512_v4
+60/310780/campos_512_v4
+60/310784/campos_512_v4
+60/310785/campos_512_v4
+60/310796/campos_512_v4
+60/310799/campos_512_v4
+60/310805/campos_512_v4
+60/310832/campos_512_v4
+60/310835/campos_512_v4
+60/310845/campos_512_v4
+60/310849/campos_512_v4
+60/310851/campos_512_v4
+60/310855/campos_512_v4
+60/310858/campos_512_v4
+60/310859/campos_512_v4
+60/310860/campos_512_v4
+60/310866/campos_512_v4
+60/310874/campos_512_v4
+60/310886/campos_512_v4
+60/310893/campos_512_v4
+60/310898/campos_512_v4
+60/310911/campos_512_v4
+60/310917/campos_512_v4
+60/310937/campos_512_v4
+60/310946/campos_512_v4
+60/310957/campos_512_v4
+60/310966/campos_512_v4
+60/310969/campos_512_v4
+60/310974/campos_512_v4
+60/310981/campos_512_v4
+60/310982/campos_512_v4
+60/310987/campos_512_v4
+60/311000/campos_512_v4
+60/311004/campos_512_v4
+60/311008/campos_512_v4
+60/311016/campos_512_v4
+60/311017/campos_512_v4
+60/311018/campos_512_v4
+60/311022/campos_512_v4
+60/311023/campos_512_v4
+60/311040/campos_512_v4
+60/311054/campos_512_v4
+60/311075/campos_512_v4
+60/311076/campos_512_v4
+60/311082/campos_512_v4
+60/311096/campos_512_v4
+60/311100/campos_512_v4
+60/311110/campos_512_v4
+60/311115/campos_512_v4
+60/311127/campos_512_v4
+60/311134/campos_512_v4
+60/311146/campos_512_v4
+60/311151/campos_512_v4
+60/311153/campos_512_v4
+60/311181/campos_512_v4
+60/311182/campos_512_v4
+60/311185/campos_512_v4
+60/311190/campos_512_v4
+60/311191/campos_512_v4
+60/311195/campos_512_v4
+60/311202/campos_512_v4
+60/311204/campos_512_v4
+60/311208/campos_512_v4
+60/311217/campos_512_v4
+60/311237/campos_512_v4
+60/311246/campos_512_v4
+60/311248/campos_512_v4
+60/311250/campos_512_v4
+60/311277/campos_512_v4
+60/311282/campos_512_v4
+60/311283/campos_512_v4
+60/311286/campos_512_v4
+60/311290/campos_512_v4
+60/311291/campos_512_v4
+60/311294/campos_512_v4
+60/311304/campos_512_v4
+60/311323/campos_512_v4
+60/311325/campos_512_v4
+60/311345/campos_512_v4
+60/311347/campos_512_v4
+60/311350/campos_512_v4
+60/311357/campos_512_v4
+60/311358/campos_512_v4
+60/311360/campos_512_v4
+60/311368/campos_512_v4
+60/311369/campos_512_v4
+60/311370/campos_512_v4
+60/311382/campos_512_v4
+60/311394/campos_512_v4
+60/311396/campos_512_v4
+60/311409/campos_512_v4
+60/311413/campos_512_v4
+60/311415/campos_512_v4
+60/311419/campos_512_v4
+60/311425/campos_512_v4
+60/311437/campos_512_v4
+60/311443/campos_512_v4
+60/311444/campos_512_v4
+60/311448/campos_512_v4
+60/311451/campos_512_v4
+60/311515/campos_512_v4
+60/311522/campos_512_v4
+60/311539/campos_512_v4
+60/311547/campos_512_v4
+60/311562/campos_512_v4
+60/311563/campos_512_v4
+60/311566/campos_512_v4
+60/311571/campos_512_v4
+60/311579/campos_512_v4
+60/311592/campos_512_v4
+60/311601/campos_512_v4
+60/311608/campos_512_v4
+60/311615/campos_512_v4
+60/311618/campos_512_v4
+60/311625/campos_512_v4
+60/311627/campos_512_v4
+60/311630/campos_512_v4
+60/311631/campos_512_v4
+60/311638/campos_512_v4
+60/311645/campos_512_v4
+60/311653/campos_512_v4
+60/311655/campos_512_v4
+60/311656/campos_512_v4
+60/311661/campos_512_v4
+60/311663/campos_512_v4
+60/311665/campos_512_v4
+60/311675/campos_512_v4
+60/311678/campos_512_v4
+60/311682/campos_512_v4
+60/311698/campos_512_v4
+60/311706/campos_512_v4
+60/311709/campos_512_v4
+60/311723/campos_512_v4
+60/311730/campos_512_v4
+60/311748/campos_512_v4
+60/311757/campos_512_v4
+60/311759/campos_512_v4
+60/311760/campos_512_v4
+60/311764/campos_512_v4
+60/311775/campos_512_v4
+60/311786/campos_512_v4
+60/311802/campos_512_v4
+60/311817/campos_512_v4
+60/311842/campos_512_v4
+60/311847/campos_512_v4
+60/311857/campos_512_v4
+60/311861/campos_512_v4
+60/311878/campos_512_v4
+60/311897/campos_512_v4
+60/311931/campos_512_v4
+60/311939/campos_512_v4
+60/311959/campos_512_v4
+60/311962/campos_512_v4
+60/311977/campos_512_v4
+60/311981/campos_512_v4
+60/312010/campos_512_v4
+60/312012/campos_512_v4
+60/312014/campos_512_v4
+60/312024/campos_512_v4
+60/312025/campos_512_v4
+60/312031/campos_512_v4
+60/312034/campos_512_v4
+60/312041/campos_512_v4
+60/312042/campos_512_v4
+60/312062/campos_512_v4
+60/312063/campos_512_v4
+60/312071/campos_512_v4
+60/312094/campos_512_v4
+60/312101/campos_512_v4
+60/312105/campos_512_v4
+60/312114/campos_512_v4
+60/312160/campos_512_v4
+60/312174/campos_512_v4
+60/312176/campos_512_v4
+60/312189/campos_512_v4
+60/312190/campos_512_v4
+60/312191/campos_512_v4
+60/312202/campos_512_v4
+60/312208/campos_512_v4
+60/312212/campos_512_v4
+60/312254/campos_512_v4
+60/312269/campos_512_v4
+60/312271/campos_512_v4
+60/312286/campos_512_v4
+60/312295/campos_512_v4
+60/312297/campos_512_v4
+60/312299/campos_512_v4
+60/312317/campos_512_v4
+60/312321/campos_512_v4
+60/312335/campos_512_v4
+60/312340/campos_512_v4
+60/312343/campos_512_v4
+60/312344/campos_512_v4
+60/312356/campos_512_v4
+60/312373/campos_512_v4
+60/312374/campos_512_v4
+60/312398/campos_512_v4
+60/312414/campos_512_v4
+60/312422/campos_512_v4
+60/312433/campos_512_v4
+60/312436/campos_512_v4
+60/312442/campos_512_v4
+60/312447/campos_512_v4
+60/312456/campos_512_v4
+60/312465/campos_512_v4
+60/312471/campos_512_v4
+60/312476/campos_512_v4
+60/312488/campos_512_v4
+60/312493/campos_512_v4
+60/312498/campos_512_v4
+60/312504/campos_512_v4
+60/312506/campos_512_v4
+60/312511/campos_512_v4
+60/312518/campos_512_v4
+60/312519/campos_512_v4
+60/312520/campos_512_v4
+60/312528/campos_512_v4
+60/312531/campos_512_v4
+60/312534/campos_512_v4
+60/312536/campos_512_v4
+60/312540/campos_512_v4
+60/312542/campos_512_v4
+60/312545/campos_512_v4
+60/312554/campos_512_v4
+60/312574/campos_512_v4
+60/312585/campos_512_v4
+60/312606/campos_512_v4
+60/312611/campos_512_v4
+60/312632/campos_512_v4
+60/312637/campos_512_v4
+60/312638/campos_512_v4
+60/312640/campos_512_v4
+60/312657/campos_512_v4
+60/312664/campos_512_v4
+60/312666/campos_512_v4
+60/312668/campos_512_v4
+60/312680/campos_512_v4
+60/312688/campos_512_v4
+60/312712/campos_512_v4
+60/312718/campos_512_v4
+60/312719/campos_512_v4
+60/312720/campos_512_v4
+60/312737/campos_512_v4
+60/312740/campos_512_v4
+60/312743/campos_512_v4
+60/312762/campos_512_v4
+60/312766/campos_512_v4
+60/312768/campos_512_v4
+60/312769/campos_512_v4
+60/312778/campos_512_v4
+60/312780/campos_512_v4
+60/312793/campos_512_v4
+60/312803/campos_512_v4
+60/312816/campos_512_v4
+60/312822/campos_512_v4
+60/312826/campos_512_v4
+60/312829/campos_512_v4
+60/312832/campos_512_v4
+60/312838/campos_512_v4
+60/312839/campos_512_v4
+60/312841/campos_512_v4
+60/312842/campos_512_v4
+60/312845/campos_512_v4
+60/312851/campos_512_v4
+60/312853/campos_512_v4
+60/312868/campos_512_v4
+60/312895/campos_512_v4
+60/312896/campos_512_v4
+60/312908/campos_512_v4
+60/312909/campos_512_v4
+60/312915/campos_512_v4
+60/312921/campos_512_v4
+60/312923/campos_512_v4
+60/312926/campos_512_v4
+60/312929/campos_512_v4
+60/312933/campos_512_v4
+60/312956/campos_512_v4
+60/312957/campos_512_v4
+60/312958/campos_512_v4
+60/312965/campos_512_v4
+60/312971/campos_512_v4
+60/312976/campos_512_v4
+60/312980/campos_512_v4
+60/312985/campos_512_v4
+60/313010/campos_512_v4
+60/313012/campos_512_v4
+60/313019/campos_512_v4
+60/313050/campos_512_v4
+60/313054/campos_512_v4
+60/313058/campos_512_v4
+60/313059/campos_512_v4
+60/313078/campos_512_v4
+60/313079/campos_512_v4
+60/313105/campos_512_v4
+60/313109/campos_512_v4
+60/313110/campos_512_v4
+60/313117/campos_512_v4
+60/313124/campos_512_v4
+60/313133/campos_512_v4
+60/313149/campos_512_v4
+60/313150/campos_512_v4
+60/313154/campos_512_v4
+60/313165/campos_512_v4
+60/313170/campos_512_v4
+60/313176/campos_512_v4
+60/313180/campos_512_v4
+60/313181/campos_512_v4
+60/313183/campos_512_v4
+60/313191/campos_512_v4
+60/313200/campos_512_v4
+60/313201/campos_512_v4
+60/313209/campos_512_v4
+60/313214/campos_512_v4
+60/313225/campos_512_v4
+60/313226/campos_512_v4
+60/313229/campos_512_v4
+60/313257/campos_512_v4
+60/313270/campos_512_v4
+60/313273/campos_512_v4
+60/313275/campos_512_v4
+60/313276/campos_512_v4
+60/313298/campos_512_v4
+60/313305/campos_512_v4
+60/313322/campos_512_v4
+60/313327/campos_512_v4
+60/313333/campos_512_v4
+60/313334/campos_512_v4
+60/313354/campos_512_v4
+60/313362/campos_512_v4
+60/313380/campos_512_v4
+60/313416/campos_512_v4
+60/313424/campos_512_v4
+60/313426/campos_512_v4
+60/313429/campos_512_v4
+60/313430/campos_512_v4
+60/313434/campos_512_v4
+60/313437/campos_512_v4
+60/313445/campos_512_v4
+60/313448/campos_512_v4
+60/313459/campos_512_v4
+60/313460/campos_512_v4
+60/313463/campos_512_v4
+60/313473/campos_512_v4
+60/313478/campos_512_v4
+60/313482/campos_512_v4
+60/313484/campos_512_v4
+60/313485/campos_512_v4
+60/313488/campos_512_v4
+60/313496/campos_512_v4
+60/313504/campos_512_v4
+60/313507/campos_512_v4
+60/313509/campos_512_v4
+60/313512/campos_512_v4
+60/313522/campos_512_v4
+60/313525/campos_512_v4
+60/313531/campos_512_v4
+60/313537/campos_512_v4
+60/313541/campos_512_v4
+60/313548/campos_512_v4
+60/313549/campos_512_v4
+60/313554/campos_512_v4
+60/313564/campos_512_v4
+60/313583/campos_512_v4
+60/313586/campos_512_v4
+60/313592/campos_512_v4
+60/313605/campos_512_v4
+60/313626/campos_512_v4
+60/313628/campos_512_v4
+60/313643/campos_512_v4
+60/313652/campos_512_v4
+60/313658/campos_512_v4
+60/313659/campos_512_v4
+60/313688/campos_512_v4
+60/313701/campos_512_v4
+60/313707/campos_512_v4
+60/313712/campos_512_v4
+60/313721/campos_512_v4
+60/313727/campos_512_v4
+60/313745/campos_512_v4
+60/313748/campos_512_v4
+60/313750/campos_512_v4
+60/313751/campos_512_v4
+60/313782/campos_512_v4
+60/313809/campos_512_v4
+60/313817/campos_512_v4
+60/313833/campos_512_v4
+60/313838/campos_512_v4
+60/313842/campos_512_v4
+60/313855/campos_512_v4
+60/313871/campos_512_v4
+60/313882/campos_512_v4
+60/313888/campos_512_v4
+60/313893/campos_512_v4
+60/313902/campos_512_v4
+60/313909/campos_512_v4
+60/313914/campos_512_v4
+60/313921/campos_512_v4
+60/313938/campos_512_v4
+60/313950/campos_512_v4
+60/313953/campos_512_v4
+60/313959/campos_512_v4
+60/313972/campos_512_v4
+60/313980/campos_512_v4
+60/313981/campos_512_v4
+60/313989/campos_512_v4
+60/313992/campos_512_v4
+60/313993/campos_512_v4
+60/314009/campos_512_v4
+60/314014/campos_512_v4
+60/314026/campos_512_v4
+60/314027/campos_512_v4
+60/314032/campos_512_v4
+60/314048/campos_512_v4
+60/314054/campos_512_v4
+60/314058/campos_512_v4
+60/314062/campos_512_v4
+60/314070/campos_512_v4
+60/314077/campos_512_v4
+60/314092/campos_512_v4
+60/314094/campos_512_v4
+60/314133/campos_512_v4
+60/314137/campos_512_v4
+60/314147/campos_512_v4
+60/314158/campos_512_v4
+60/314171/campos_512_v4
+60/314179/campos_512_v4
+60/314220/campos_512_v4
+60/314222/campos_512_v4
+60/314224/campos_512_v4
+60/314233/campos_512_v4
+60/314240/campos_512_v4
+60/314243/campos_512_v4
+60/314244/campos_512_v4
+60/314246/campos_512_v4
+60/314249/campos_512_v4
+60/314260/campos_512_v4
+60/314263/campos_512_v4
+60/314267/campos_512_v4
+60/314316/campos_512_v4
+60/314321/campos_512_v4
+60/314332/campos_512_v4
+60/314338/campos_512_v4
+60/314368/campos_512_v4
+60/314375/campos_512_v4
+60/314387/campos_512_v4
+60/314390/campos_512_v4
+60/314392/campos_512_v4
+60/314393/campos_512_v4
+60/314395/campos_512_v4
+60/314399/campos_512_v4
+60/314400/campos_512_v4
+60/314405/campos_512_v4
+60/314416/campos_512_v4
+60/314429/campos_512_v4
+60/314442/campos_512_v4
+60/314459/campos_512_v4
+60/314460/campos_512_v4
+60/314462/campos_512_v4
+60/314472/campos_512_v4
+60/314474/campos_512_v4
+60/314490/campos_512_v4
+60/314493/campos_512_v4
+60/314494/campos_512_v4
+60/314497/campos_512_v4
+60/314512/campos_512_v4
+60/314523/campos_512_v4
+60/314535/campos_512_v4
+60/314543/campos_512_v4
+60/314559/campos_512_v4
+60/314574/campos_512_v4
+60/314576/campos_512_v4
+60/314591/campos_512_v4
+60/314600/campos_512_v4
+60/314612/campos_512_v4
+60/314633/campos_512_v4
+60/314634/campos_512_v4
+60/314639/campos_512_v4
+60/314641/campos_512_v4
+60/314656/campos_512_v4
+60/314682/campos_512_v4
+60/314689/campos_512_v4
+60/314694/campos_512_v4
+60/314696/campos_512_v4
+60/314705/campos_512_v4
+60/314706/campos_512_v4
+60/314710/campos_512_v4
+60/314721/campos_512_v4
+60/314728/campos_512_v4
+60/314741/campos_512_v4
+60/314742/campos_512_v4
+60/314744/campos_512_v4
+60/314767/campos_512_v4
+60/314769/campos_512_v4
+60/314770/campos_512_v4
+60/314791/campos_512_v4
+60/314799/campos_512_v4
+60/314809/campos_512_v4
+60/314814/campos_512_v4
+60/314817/campos_512_v4
+60/314829/campos_512_v4
+60/314838/campos_512_v4
+60/314857/campos_512_v4
+60/314859/campos_512_v4
+60/314860/campos_512_v4
+60/314864/campos_512_v4
+60/314886/campos_512_v4
+60/314896/campos_512_v4
+60/314911/campos_512_v4
+60/314923/campos_512_v4
+60/314929/campos_512_v4
+60/314934/campos_512_v4
+60/314938/campos_512_v4
+60/314945/campos_512_v4
+60/314946/campos_512_v4
+60/314969/campos_512_v4
+60/314984/campos_512_v4
+60/314988/campos_512_v4
+61/315014/campos_512_v4
+61/315020/campos_512_v4
+61/315028/campos_512_v4
+61/315029/campos_512_v4
+61/315033/campos_512_v4
+61/315044/campos_512_v4
+61/315045/campos_512_v4
+61/315048/campos_512_v4
+61/315051/campos_512_v4
+61/315064/campos_512_v4
+61/315067/campos_512_v4
+61/315075/campos_512_v4
+61/315078/campos_512_v4
+61/315093/campos_512_v4
+61/315096/campos_512_v4
+61/315097/campos_512_v4
+61/315130/campos_512_v4
+61/315138/campos_512_v4
+61/315142/campos_512_v4
+61/315153/campos_512_v4
+61/315154/campos_512_v4
+61/315157/campos_512_v4
+61/315166/campos_512_v4
+61/315188/campos_512_v4
+61/315192/campos_512_v4
+61/315200/campos_512_v4
+61/315202/campos_512_v4
+61/315205/campos_512_v4
+61/315233/campos_512_v4
+61/315234/campos_512_v4
+61/315237/campos_512_v4
+61/315261/campos_512_v4
+61/315274/campos_512_v4
+61/315281/campos_512_v4
+61/315282/campos_512_v4
+61/315285/campos_512_v4
+61/315294/campos_512_v4
+61/315300/campos_512_v4
+61/315311/campos_512_v4
+61/315314/campos_512_v4
+61/315322/campos_512_v4
+61/315325/campos_512_v4
+61/315329/campos_512_v4
+61/315339/campos_512_v4
+61/315341/campos_512_v4
+61/315345/campos_512_v4
+61/315349/campos_512_v4
+61/315350/campos_512_v4
+61/315352/campos_512_v4
+61/315358/campos_512_v4
+61/315362/campos_512_v4
+61/315366/campos_512_v4
+61/315367/campos_512_v4
+61/315376/campos_512_v4
+61/315378/campos_512_v4
+61/315385/campos_512_v4
+61/315393/campos_512_v4
+61/315400/campos_512_v4
+61/315402/campos_512_v4
+61/315444/campos_512_v4
+61/315449/campos_512_v4
+61/315452/campos_512_v4
+61/315479/campos_512_v4
+61/315483/campos_512_v4
+61/315516/campos_512_v4
+61/315521/campos_512_v4
+61/315547/campos_512_v4
+61/315557/campos_512_v4
+61/315558/campos_512_v4
+61/315570/campos_512_v4
+61/315576/campos_512_v4
+61/315579/campos_512_v4
+61/315607/campos_512_v4
+61/315610/campos_512_v4
+61/315622/campos_512_v4
+61/315626/campos_512_v4
+61/315628/campos_512_v4
+61/315635/campos_512_v4
+61/315643/campos_512_v4
+61/315650/campos_512_v4
+61/315653/campos_512_v4
+61/315658/campos_512_v4
+61/315672/campos_512_v4
+61/315674/campos_512_v4
+61/315693/campos_512_v4
+61/315698/campos_512_v4
+61/315706/campos_512_v4
+61/315721/campos_512_v4
+61/315746/campos_512_v4
+61/315751/campos_512_v4
+61/315753/campos_512_v4
+61/315756/campos_512_v4
+61/315757/campos_512_v4
+61/315758/campos_512_v4
+61/315771/campos_512_v4
+61/315772/campos_512_v4
+61/315782/campos_512_v4
+61/315787/campos_512_v4
+61/315797/campos_512_v4
+61/315798/campos_512_v4
+61/315802/campos_512_v4
+61/315827/campos_512_v4
+61/315845/campos_512_v4
+61/315849/campos_512_v4
+61/315877/campos_512_v4
+61/315878/campos_512_v4
+61/315890/campos_512_v4
+61/315894/campos_512_v4
+61/315904/campos_512_v4
+61/315907/campos_512_v4
+61/315915/campos_512_v4
+61/315928/campos_512_v4
+61/315935/campos_512_v4
+61/315949/campos_512_v4
+61/315965/campos_512_v4
+61/315980/campos_512_v4
+61/315986/campos_512_v4
+61/316029/campos_512_v4
+61/316030/campos_512_v4
+61/316033/campos_512_v4
+61/316059/campos_512_v4
+61/316080/campos_512_v4
+61/316086/campos_512_v4
+61/316088/campos_512_v4
+61/316095/campos_512_v4
+61/316097/campos_512_v4
+61/316117/campos_512_v4
+61/316130/campos_512_v4
+61/316134/campos_512_v4
+61/316165/campos_512_v4
+61/316166/campos_512_v4
+61/316177/campos_512_v4
+61/316181/campos_512_v4
+61/316193/campos_512_v4
+61/316202/campos_512_v4
+61/316208/campos_512_v4
+61/316215/campos_512_v4
+61/316224/campos_512_v4
+61/316226/campos_512_v4
+61/316236/campos_512_v4
+61/316240/campos_512_v4
+61/316245/campos_512_v4
+61/316247/campos_512_v4
+61/316258/campos_512_v4
+61/316260/campos_512_v4
+61/316273/campos_512_v4
+61/316276/campos_512_v4
+61/316303/campos_512_v4
+61/316305/campos_512_v4
+61/316306/campos_512_v4
+61/316319/campos_512_v4
+61/316328/campos_512_v4
+61/316340/campos_512_v4
+61/316345/campos_512_v4
+61/316349/campos_512_v4
+61/316363/campos_512_v4
+61/316368/campos_512_v4
+61/316372/campos_512_v4
+61/316378/campos_512_v4
+61/316391/campos_512_v4
+61/316395/campos_512_v4
+61/316399/campos_512_v4
+61/316404/campos_512_v4
+61/316412/campos_512_v4
+61/316416/campos_512_v4
+61/316427/campos_512_v4
+61/316492/campos_512_v4
+61/316494/campos_512_v4
+61/316501/campos_512_v4
+61/316513/campos_512_v4
+61/316520/campos_512_v4
+61/316530/campos_512_v4
+61/316533/campos_512_v4
+61/316538/campos_512_v4
+61/316552/campos_512_v4
+61/316580/campos_512_v4
+61/316585/campos_512_v4
+61/316590/campos_512_v4
+61/316597/campos_512_v4
+61/316619/campos_512_v4
+61/316628/campos_512_v4
+61/316629/campos_512_v4
+61/316631/campos_512_v4
+61/316639/campos_512_v4
+61/316645/campos_512_v4
+61/316646/campos_512_v4
+61/316648/campos_512_v4
+61/316649/campos_512_v4
+61/316654/campos_512_v4
+61/316655/campos_512_v4
+61/316669/campos_512_v4
+61/316686/campos_512_v4
+61/316689/campos_512_v4
+61/316705/campos_512_v4
+61/316708/campos_512_v4
+61/316719/campos_512_v4
+61/316723/campos_512_v4
+61/316726/campos_512_v4
+61/316730/campos_512_v4
+61/316737/campos_512_v4
+61/316741/campos_512_v4
+61/316746/campos_512_v4
+61/316754/campos_512_v4
+61/316755/campos_512_v4
+61/316764/campos_512_v4
+61/316793/campos_512_v4
+61/316817/campos_512_v4
+61/316822/campos_512_v4
+61/316841/campos_512_v4
+61/316857/campos_512_v4
+61/316860/campos_512_v4
+61/316875/campos_512_v4
+61/316876/campos_512_v4
+61/316886/campos_512_v4
+61/316892/campos_512_v4
+61/316902/campos_512_v4
+61/316906/campos_512_v4
+61/316907/campos_512_v4
+61/316909/campos_512_v4
+61/316910/campos_512_v4
+61/316914/campos_512_v4
+61/316915/campos_512_v4
+61/316920/campos_512_v4
+61/316934/campos_512_v4
+61/316938/campos_512_v4
+61/316940/campos_512_v4
+61/316943/campos_512_v4
+61/316946/campos_512_v4
+61/316950/campos_512_v4
+61/316953/campos_512_v4
+61/316976/campos_512_v4
+61/316977/campos_512_v4
+61/316988/campos_512_v4
+61/316990/campos_512_v4
+61/317013/campos_512_v4
+61/317017/campos_512_v4
+61/317018/campos_512_v4
+61/317041/campos_512_v4
+61/317043/campos_512_v4
+61/317047/campos_512_v4
+61/317058/campos_512_v4
+61/317089/campos_512_v4
+61/317094/campos_512_v4
+61/317117/campos_512_v4
+61/317122/campos_512_v4
+61/317128/campos_512_v4
+61/317137/campos_512_v4
+61/317139/campos_512_v4
+61/317140/campos_512_v4
+61/317143/campos_512_v4
+61/317161/campos_512_v4
+61/317163/campos_512_v4
+61/317178/campos_512_v4
+61/317191/campos_512_v4
+61/317198/campos_512_v4
+61/317199/campos_512_v4
+61/317204/campos_512_v4
+61/317207/campos_512_v4
+61/317215/campos_512_v4
+61/317216/campos_512_v4
+61/317219/campos_512_v4
+61/317230/campos_512_v4
+61/317244/campos_512_v4
+61/317266/campos_512_v4
+61/317276/campos_512_v4
+61/317278/campos_512_v4
+61/317314/campos_512_v4
+61/317347/campos_512_v4
+61/317365/campos_512_v4
+61/317380/campos_512_v4
+61/317384/campos_512_v4
+61/317387/campos_512_v4
+61/317388/campos_512_v4
+61/317391/campos_512_v4
+61/317394/campos_512_v4
+61/317396/campos_512_v4
+61/317409/campos_512_v4
+61/317423/campos_512_v4
+61/317436/campos_512_v4
+61/317460/campos_512_v4
+61/317464/campos_512_v4
+61/317470/campos_512_v4
+61/317480/campos_512_v4
+61/317490/campos_512_v4
+61/317493/campos_512_v4
+61/317499/campos_512_v4
+61/317503/campos_512_v4
+61/317517/campos_512_v4
+61/317531/campos_512_v4
+61/317555/campos_512_v4
+61/317560/campos_512_v4
+61/317563/campos_512_v4
+61/317590/campos_512_v4
+61/317594/campos_512_v4
+61/317606/campos_512_v4
+61/317607/campos_512_v4
+61/317611/campos_512_v4
+61/317615/campos_512_v4
+61/317625/campos_512_v4
+61/317628/campos_512_v4
+61/317637/campos_512_v4
+61/317650/campos_512_v4
+61/317664/campos_512_v4
+61/317688/campos_512_v4
+61/317713/campos_512_v4
+61/317724/campos_512_v4
+61/317737/campos_512_v4
+61/317746/campos_512_v4
+61/317770/campos_512_v4
+61/317775/campos_512_v4
+61/317780/campos_512_v4
+61/317783/campos_512_v4
+61/317791/campos_512_v4
+61/317795/campos_512_v4
+61/317815/campos_512_v4
+61/317829/campos_512_v4
+61/317835/campos_512_v4
+61/317843/campos_512_v4
+61/317844/campos_512_v4
+61/317846/campos_512_v4
+61/317851/campos_512_v4
+61/317856/campos_512_v4
+61/317869/campos_512_v4
+61/317884/campos_512_v4
+61/317886/campos_512_v4
+61/317890/campos_512_v4
+61/317894/campos_512_v4
+61/317895/campos_512_v4
+61/317896/campos_512_v4
+61/317900/campos_512_v4
+61/317909/campos_512_v4
+61/317914/campos_512_v4
+61/317917/campos_512_v4
+61/317923/campos_512_v4
+61/317931/campos_512_v4
+61/317939/campos_512_v4
+61/317945/campos_512_v4
+61/317950/campos_512_v4
+61/317952/campos_512_v4
+61/317961/campos_512_v4
+61/317980/campos_512_v4
+61/317984/campos_512_v4
+61/317985/campos_512_v4
+61/317990/campos_512_v4
+61/317999/campos_512_v4
+61/318004/campos_512_v4
+61/318005/campos_512_v4
+61/318006/campos_512_v4
+61/318035/campos_512_v4
+61/318056/campos_512_v4
+61/318057/campos_512_v4
+61/318059/campos_512_v4
+61/318068/campos_512_v4
+61/318076/campos_512_v4
+61/318087/campos_512_v4
+61/318109/campos_512_v4
+61/318111/campos_512_v4
+61/318141/campos_512_v4
+61/318147/campos_512_v4
+61/318148/campos_512_v4
+61/318155/campos_512_v4
+61/318159/campos_512_v4
+61/318178/campos_512_v4
+61/318186/campos_512_v4
+61/318190/campos_512_v4
+61/318195/campos_512_v4
+61/318212/campos_512_v4
+61/318214/campos_512_v4
+61/318233/campos_512_v4
+61/318237/campos_512_v4
+61/318239/campos_512_v4
+61/318245/campos_512_v4
+61/318249/campos_512_v4
+61/318258/campos_512_v4
+61/318270/campos_512_v4
+61/318282/campos_512_v4
+61/318286/campos_512_v4
+61/318297/campos_512_v4
+61/318299/campos_512_v4
+61/318300/campos_512_v4
+61/318301/campos_512_v4
+61/318306/campos_512_v4
+61/318319/campos_512_v4
+61/318348/campos_512_v4
+61/318366/campos_512_v4
+61/318376/campos_512_v4
+61/318396/campos_512_v4
+61/318403/campos_512_v4
+61/318416/campos_512_v4
+61/318417/campos_512_v4
+61/318431/campos_512_v4
+61/318449/campos_512_v4
+61/318454/campos_512_v4
+61/318478/campos_512_v4
+61/318482/campos_512_v4
+61/318483/campos_512_v4
+61/318488/campos_512_v4
+61/318492/campos_512_v4
+61/318515/campos_512_v4
+61/318522/campos_512_v4
+61/318525/campos_512_v4
+61/318548/campos_512_v4
+61/318557/campos_512_v4
+61/318583/campos_512_v4
+61/318590/campos_512_v4
+61/318593/campos_512_v4
+61/318594/campos_512_v4
+61/318602/campos_512_v4
+61/318606/campos_512_v4
+61/318609/campos_512_v4
+61/318616/campos_512_v4
+61/318629/campos_512_v4
+61/318631/campos_512_v4
+61/318635/campos_512_v4
+61/318639/campos_512_v4
+61/318655/campos_512_v4
+61/318669/campos_512_v4
+61/318670/campos_512_v4
+61/318682/campos_512_v4
+61/318686/campos_512_v4
+61/318689/campos_512_v4
+61/318697/campos_512_v4
+61/318699/campos_512_v4
+61/318705/campos_512_v4
+61/318710/campos_512_v4
+61/318716/campos_512_v4
+61/318720/campos_512_v4
+61/318739/campos_512_v4
+61/318753/campos_512_v4
+61/318765/campos_512_v4
+61/318769/campos_512_v4
+61/318770/campos_512_v4
+61/318782/campos_512_v4
+61/318786/campos_512_v4
+61/318787/campos_512_v4
+61/318789/campos_512_v4
+61/318817/campos_512_v4
+61/318838/campos_512_v4
+61/318840/campos_512_v4
+61/318854/campos_512_v4
+61/318880/campos_512_v4
+61/318883/campos_512_v4
+61/318884/campos_512_v4
+61/318892/campos_512_v4
+61/318900/campos_512_v4
+61/318905/campos_512_v4
+61/318908/campos_512_v4
+61/318917/campos_512_v4
+61/318925/campos_512_v4
+61/318926/campos_512_v4
+61/318936/campos_512_v4
+61/318938/campos_512_v4
+61/318941/campos_512_v4
+61/318945/campos_512_v4
+61/318953/campos_512_v4
+61/318954/campos_512_v4
+61/318976/campos_512_v4
+61/318983/campos_512_v4
+61/318984/campos_512_v4
+61/318988/campos_512_v4
+61/318989/campos_512_v4
+61/318997/campos_512_v4
+61/319006/campos_512_v4
+61/319015/campos_512_v4
+61/319018/campos_512_v4
+61/319023/campos_512_v4
+61/319024/campos_512_v4
+61/319032/campos_512_v4
+61/319035/campos_512_v4
+61/319044/campos_512_v4
+61/319050/campos_512_v4
+61/319053/campos_512_v4
+61/319061/campos_512_v4
+61/319078/campos_512_v4
+61/319079/campos_512_v4
+61/319084/campos_512_v4
+61/319091/campos_512_v4
+61/319096/campos_512_v4
+61/319098/campos_512_v4
+61/319106/campos_512_v4
+61/319112/campos_512_v4
+61/319113/campos_512_v4
+61/319115/campos_512_v4
+61/319120/campos_512_v4
+61/319123/campos_512_v4
+61/319127/campos_512_v4
+61/319131/campos_512_v4
+61/319133/campos_512_v4
+61/319138/campos_512_v4
+61/319149/campos_512_v4
+61/319151/campos_512_v4
+61/319158/campos_512_v4
+61/319160/campos_512_v4
+61/319163/campos_512_v4
+61/319166/campos_512_v4
+61/319173/campos_512_v4
+61/319177/campos_512_v4
+61/319179/campos_512_v4
+61/319209/campos_512_v4
+61/319210/campos_512_v4
+61/319216/campos_512_v4
+61/319221/campos_512_v4
+61/319226/campos_512_v4
+61/319240/campos_512_v4
+61/319258/campos_512_v4
+61/319268/campos_512_v4
+61/319269/campos_512_v4
+61/319277/campos_512_v4
+61/319291/campos_512_v4
+61/319303/campos_512_v4
+61/319304/campos_512_v4
+61/319317/campos_512_v4
+61/319318/campos_512_v4
+61/319326/campos_512_v4
+61/319331/campos_512_v4
+61/319333/campos_512_v4
+61/319334/campos_512_v4
+61/319336/campos_512_v4
+61/319339/campos_512_v4
+61/319341/campos_512_v4
+61/319345/campos_512_v4
+61/319346/campos_512_v4
+61/319361/campos_512_v4
+61/319376/campos_512_v4
+61/319392/campos_512_v4
+61/319393/campos_512_v4
+61/319398/campos_512_v4
+61/319412/campos_512_v4
+61/319414/campos_512_v4
+61/319418/campos_512_v4
+61/319431/campos_512_v4
+61/319449/campos_512_v4
+61/319451/campos_512_v4
+61/319453/campos_512_v4
+61/319461/campos_512_v4
+61/319494/campos_512_v4
+61/319495/campos_512_v4
+61/319496/campos_512_v4
+61/319500/campos_512_v4
+61/319520/campos_512_v4
+61/319524/campos_512_v4
+61/319531/campos_512_v4
+61/319533/campos_512_v4
+61/319538/campos_512_v4
+61/319539/campos_512_v4
+61/319554/campos_512_v4
+61/319556/campos_512_v4
+61/319571/campos_512_v4
+61/319572/campos_512_v4
+61/319575/campos_512_v4
+61/319576/campos_512_v4
+61/319593/campos_512_v4
+61/319600/campos_512_v4
+61/319602/campos_512_v4
+61/319620/campos_512_v4
+61/319625/campos_512_v4
+61/319627/campos_512_v4
+61/319642/campos_512_v4
+61/319662/campos_512_v4
+61/319681/campos_512_v4
+61/319697/campos_512_v4
+61/319698/campos_512_v4
+61/319709/campos_512_v4
+61/319715/campos_512_v4
+61/319726/campos_512_v4
+61/319739/campos_512_v4
+61/319746/campos_512_v4
+61/319753/campos_512_v4
+61/319762/campos_512_v4
+61/319769/campos_512_v4
+61/319772/campos_512_v4
+61/319780/campos_512_v4
+61/319782/campos_512_v4
+61/319800/campos_512_v4
+61/319824/campos_512_v4
+61/319827/campos_512_v4
+61/319829/campos_512_v4
+61/319832/campos_512_v4
+61/319834/campos_512_v4
+61/319835/campos_512_v4
+61/319844/campos_512_v4
+61/319854/campos_512_v4
+61/319855/campos_512_v4
+61/319879/campos_512_v4
+61/319880/campos_512_v4
+61/319882/campos_512_v4
+61/319883/campos_512_v4
+61/319894/campos_512_v4
+61/319910/campos_512_v4
+61/319912/campos_512_v4
+61/319923/campos_512_v4
+61/319940/campos_512_v4
+61/319946/campos_512_v4
+61/319948/campos_512_v4
+61/319951/campos_512_v4
+61/319954/campos_512_v4
+61/319961/campos_512_v4
+61/319963/campos_512_v4
+61/319965/campos_512_v4
+61/319968/campos_512_v4
+61/319974/campos_512_v4
+61/319976/campos_512_v4
+61/319981/campos_512_v4
+61/319988/campos_512_v4
+61/319990/campos_512_v4
+62/320003/campos_512_v4
+62/320026/campos_512_v4
+62/320054/campos_512_v4
+62/320057/campos_512_v4
+62/320078/campos_512_v4
+62/320082/campos_512_v4
+62/320083/campos_512_v4
+62/320087/campos_512_v4
+62/320089/campos_512_v4
+62/320094/campos_512_v4
+62/320102/campos_512_v4
+62/320104/campos_512_v4
+62/320117/campos_512_v4
+62/320118/campos_512_v4
+62/320131/campos_512_v4
+62/320147/campos_512_v4
+62/320149/campos_512_v4
+62/320150/campos_512_v4
+62/320153/campos_512_v4
+62/320154/campos_512_v4
+62/320173/campos_512_v4
+62/320174/campos_512_v4
+62/320177/campos_512_v4
+62/320181/campos_512_v4
+62/320188/campos_512_v4
+62/320190/campos_512_v4
+62/320195/campos_512_v4
+62/320197/campos_512_v4
+62/320200/campos_512_v4
+62/320201/campos_512_v4
+62/320203/campos_512_v4
+62/320221/campos_512_v4
+62/320224/campos_512_v4
+62/320233/campos_512_v4
+62/320236/campos_512_v4
+62/320243/campos_512_v4
+62/320247/campos_512_v4
+62/320253/campos_512_v4
+62/320256/campos_512_v4
+62/320274/campos_512_v4
+62/320275/campos_512_v4
+62/320280/campos_512_v4
+62/320291/campos_512_v4
+62/320303/campos_512_v4
+62/320321/campos_512_v4
+62/320349/campos_512_v4
+62/320351/campos_512_v4
+62/320355/campos_512_v4
+62/320365/campos_512_v4
+62/320368/campos_512_v4
+62/320381/campos_512_v4
+62/320384/campos_512_v4
+62/320385/campos_512_v4
+62/320396/campos_512_v4
+62/320409/campos_512_v4
+62/320420/campos_512_v4
+62/320425/campos_512_v4
+62/320433/campos_512_v4
+62/320435/campos_512_v4
+62/320438/campos_512_v4
+62/320442/campos_512_v4
+62/320443/campos_512_v4
+62/320445/campos_512_v4
+62/320451/campos_512_v4
+62/320452/campos_512_v4
+62/320453/campos_512_v4
+62/320455/campos_512_v4
+62/320464/campos_512_v4
+62/320467/campos_512_v4
+62/320472/campos_512_v4
+62/320509/campos_512_v4
+62/320513/campos_512_v4
+62/320520/campos_512_v4
+62/320528/campos_512_v4
+62/320533/campos_512_v4
+62/320543/campos_512_v4
+62/320545/campos_512_v4
+62/320549/campos_512_v4
+62/320558/campos_512_v4
+62/320569/campos_512_v4
+62/320571/campos_512_v4
+62/320572/campos_512_v4
+62/320577/campos_512_v4
+62/320591/campos_512_v4
+62/320596/campos_512_v4
+62/320598/campos_512_v4
+62/320603/campos_512_v4
+62/320609/campos_512_v4
+62/320619/campos_512_v4
+62/320635/campos_512_v4
+62/320636/campos_512_v4
+62/320642/campos_512_v4
+62/320650/campos_512_v4
+62/320655/campos_512_v4
+62/320658/campos_512_v4
+62/320661/campos_512_v4
+62/320662/campos_512_v4
+62/320664/campos_512_v4
+62/320684/campos_512_v4
+62/320689/campos_512_v4
+62/320713/campos_512_v4
+62/320734/campos_512_v4
+62/320754/campos_512_v4
+62/320768/campos_512_v4
+62/320776/campos_512_v4
+62/320778/campos_512_v4
+62/320784/campos_512_v4
+62/320787/campos_512_v4
+62/320789/campos_512_v4
+62/320797/campos_512_v4
+62/320806/campos_512_v4
+62/320815/campos_512_v4
+62/320837/campos_512_v4
+62/320849/campos_512_v4
+62/320858/campos_512_v4
+62/320865/campos_512_v4
+62/320870/campos_512_v4
+62/320874/campos_512_v4
+62/320875/campos_512_v4
+62/320896/campos_512_v4
+62/320898/campos_512_v4
+62/320901/campos_512_v4
+62/320906/campos_512_v4
+62/320907/campos_512_v4
+62/320908/campos_512_v4
+62/320919/campos_512_v4
+62/320922/campos_512_v4
+62/320945/campos_512_v4
+62/320948/campos_512_v4
+62/320949/campos_512_v4
+62/320950/campos_512_v4
+62/320951/campos_512_v4
+62/320959/campos_512_v4
+62/320961/campos_512_v4
+62/320987/campos_512_v4
+62/320995/campos_512_v4
+62/321001/campos_512_v4
+62/321006/campos_512_v4
+62/321008/campos_512_v4
+62/321011/campos_512_v4
+62/321022/campos_512_v4
+62/321036/campos_512_v4
+62/321041/campos_512_v4
+62/321054/campos_512_v4
+62/321080/campos_512_v4
+62/321081/campos_512_v4
+62/321098/campos_512_v4
+62/321105/campos_512_v4
+62/321108/campos_512_v4
+62/321122/campos_512_v4
+62/321124/campos_512_v4
+62/321128/campos_512_v4
+62/321156/campos_512_v4
+62/321165/campos_512_v4
+62/321167/campos_512_v4
+62/321176/campos_512_v4
+62/321178/campos_512_v4
+62/321192/campos_512_v4
+62/321223/campos_512_v4
+62/321231/campos_512_v4
+62/321234/campos_512_v4
+62/321235/campos_512_v4
+62/321284/campos_512_v4
+62/321307/campos_512_v4
+62/321317/campos_512_v4
+62/321329/campos_512_v4
+62/321330/campos_512_v4
+62/321344/campos_512_v4
+62/321348/campos_512_v4
+62/321363/campos_512_v4
+62/321365/campos_512_v4
+62/321366/campos_512_v4
+62/321370/campos_512_v4
+62/321379/campos_512_v4
+62/321383/campos_512_v4
+62/321384/campos_512_v4
+62/321397/campos_512_v4
+62/321414/campos_512_v4
+62/321415/campos_512_v4
+62/321422/campos_512_v4
+62/321454/campos_512_v4
+62/321455/campos_512_v4
+62/321458/campos_512_v4
+62/321466/campos_512_v4
+62/321467/campos_512_v4
+62/321491/campos_512_v4
+62/321503/campos_512_v4
+62/321506/campos_512_v4
+62/321509/campos_512_v4
+62/321512/campos_512_v4
+62/321513/campos_512_v4
+62/321529/campos_512_v4
+62/321534/campos_512_v4
+62/321540/campos_512_v4
+62/321545/campos_512_v4
+62/321551/campos_512_v4
+62/321575/campos_512_v4
+62/321581/campos_512_v4
+62/321598/campos_512_v4
+62/321608/campos_512_v4
+62/321617/campos_512_v4
+62/321623/campos_512_v4
+62/321634/campos_512_v4
+62/321644/campos_512_v4
+62/321647/campos_512_v4
+62/321650/campos_512_v4
+62/321655/campos_512_v4
+62/321659/campos_512_v4
+62/321670/campos_512_v4
+62/321684/campos_512_v4
+62/321696/campos_512_v4
+62/321724/campos_512_v4
+62/321731/campos_512_v4
+62/321732/campos_512_v4
+62/321735/campos_512_v4
+62/321752/campos_512_v4
+62/321761/campos_512_v4
+62/321776/campos_512_v4
+62/321777/campos_512_v4
+62/321779/campos_512_v4
+62/321786/campos_512_v4
+62/321787/campos_512_v4
+62/321797/campos_512_v4
+62/321808/campos_512_v4
+62/321810/campos_512_v4
+62/321811/campos_512_v4
+62/321815/campos_512_v4
+62/321818/campos_512_v4
+62/321826/campos_512_v4
+62/321828/campos_512_v4
+62/321840/campos_512_v4
+62/321841/campos_512_v4
+62/321846/campos_512_v4
+62/321848/campos_512_v4
+62/321864/campos_512_v4
+62/321868/campos_512_v4
+62/321873/campos_512_v4
+62/321877/campos_512_v4
+62/321881/campos_512_v4
+62/321882/campos_512_v4
+62/321892/campos_512_v4
+62/321894/campos_512_v4
+62/321903/campos_512_v4
+62/321924/campos_512_v4
+62/321943/campos_512_v4
+62/321956/campos_512_v4
+62/321963/campos_512_v4
+62/321969/campos_512_v4
+62/321980/campos_512_v4
+62/321993/campos_512_v4
+62/322002/campos_512_v4
+62/322020/campos_512_v4
+62/322021/campos_512_v4
+62/322022/campos_512_v4
+62/322025/campos_512_v4
+62/322028/campos_512_v4
+62/322046/campos_512_v4
+62/322047/campos_512_v4
+62/322052/campos_512_v4
+62/322057/campos_512_v4
+62/322059/campos_512_v4
+62/322065/campos_512_v4
+62/322073/campos_512_v4
+62/322083/campos_512_v4
+62/322084/campos_512_v4
+62/322086/campos_512_v4
+62/322097/campos_512_v4
+62/322099/campos_512_v4
+62/322101/campos_512_v4
+62/322103/campos_512_v4
+62/322105/campos_512_v4
+62/322108/campos_512_v4
+62/322126/campos_512_v4
+62/322129/campos_512_v4
+62/322131/campos_512_v4
+62/322148/campos_512_v4
+62/322159/campos_512_v4
+62/322160/campos_512_v4
+62/322161/campos_512_v4
+62/322172/campos_512_v4
+62/322178/campos_512_v4
+62/322183/campos_512_v4
+62/322184/campos_512_v4
+62/322186/campos_512_v4
+62/322226/campos_512_v4
+62/322229/campos_512_v4
+62/322237/campos_512_v4
+62/322245/campos_512_v4
+62/322262/campos_512_v4
+62/322286/campos_512_v4
+62/322287/campos_512_v4
+62/322300/campos_512_v4
+62/322305/campos_512_v4
+62/322313/campos_512_v4
+62/322319/campos_512_v4
+62/322337/campos_512_v4
+62/322338/campos_512_v4
+62/322340/campos_512_v4
+62/322342/campos_512_v4
+62/322343/campos_512_v4
+62/322345/campos_512_v4
+62/322348/campos_512_v4
+62/322352/campos_512_v4
+62/322362/campos_512_v4
+62/322364/campos_512_v4
+62/322365/campos_512_v4
+62/322373/campos_512_v4
+62/322380/campos_512_v4
+62/322391/campos_512_v4
+62/322400/campos_512_v4
+62/322407/campos_512_v4
+62/322414/campos_512_v4
+62/322418/campos_512_v4
+62/322442/campos_512_v4
+62/322450/campos_512_v4
+62/322461/campos_512_v4
+62/322463/campos_512_v4
+62/322469/campos_512_v4
+62/322483/campos_512_v4
+62/322489/campos_512_v4
+62/322492/campos_512_v4
+62/322493/campos_512_v4
+62/322502/campos_512_v4
+62/322503/campos_512_v4
+62/322504/campos_512_v4
+62/322516/campos_512_v4
+62/322518/campos_512_v4
+62/322520/campos_512_v4
+62/322521/campos_512_v4
+62/322526/campos_512_v4
+62/322541/campos_512_v4
+62/322542/campos_512_v4
+62/322545/campos_512_v4
+62/322548/campos_512_v4
+62/322556/campos_512_v4
+62/322558/campos_512_v4
+62/322561/campos_512_v4
+62/322568/campos_512_v4
+62/322574/campos_512_v4
+62/322580/campos_512_v4
+62/322584/campos_512_v4
+62/322593/campos_512_v4
+62/322594/campos_512_v4
+62/322602/campos_512_v4
+62/322614/campos_512_v4
+62/322632/campos_512_v4
+62/322666/campos_512_v4
+62/322673/campos_512_v4
+62/322678/campos_512_v4
+62/322694/campos_512_v4
+62/322705/campos_512_v4
+62/322718/campos_512_v4
+62/322728/campos_512_v4
+62/322741/campos_512_v4
+62/322743/campos_512_v4
+62/322751/campos_512_v4
+62/322752/campos_512_v4
+62/322763/campos_512_v4
+62/322768/campos_512_v4
+62/322769/campos_512_v4
+62/322770/campos_512_v4
+62/322771/campos_512_v4
+62/322775/campos_512_v4
+62/322792/campos_512_v4
+62/322838/campos_512_v4
+62/322847/campos_512_v4
+62/322894/campos_512_v4
+62/322896/campos_512_v4
+62/322901/campos_512_v4
+62/322914/campos_512_v4
+62/322935/campos_512_v4
+62/322965/campos_512_v4
+62/322969/campos_512_v4
+62/322976/campos_512_v4
+62/322977/campos_512_v4
+62/322981/campos_512_v4
+62/322989/campos_512_v4
+62/322992/campos_512_v4
+62/323002/campos_512_v4
+62/323008/campos_512_v4
+62/323015/campos_512_v4
+62/323023/campos_512_v4
+62/323033/campos_512_v4
+62/323040/campos_512_v4
+62/323043/campos_512_v4
+62/323050/campos_512_v4
+62/323052/campos_512_v4
+62/323084/campos_512_v4
+62/323116/campos_512_v4
+62/323121/campos_512_v4
+62/323135/campos_512_v4
+62/323146/campos_512_v4
+62/323160/campos_512_v4
+62/323164/campos_512_v4
+62/323167/campos_512_v4
+62/323168/campos_512_v4
+62/323170/campos_512_v4
+62/323172/campos_512_v4
+62/323178/campos_512_v4
+62/323195/campos_512_v4
+62/323203/campos_512_v4
+62/323207/campos_512_v4
+62/323226/campos_512_v4
+62/323228/campos_512_v4
+62/323230/campos_512_v4
+62/323231/campos_512_v4
+62/323234/campos_512_v4
+62/323236/campos_512_v4
+62/323254/campos_512_v4
+62/323260/campos_512_v4
+62/323262/campos_512_v4
+62/323300/campos_512_v4
+62/323315/campos_512_v4
+62/323320/campos_512_v4
+62/323330/campos_512_v4
+62/323335/campos_512_v4
+62/323336/campos_512_v4
+62/323377/campos_512_v4
+62/323388/campos_512_v4
+62/323393/campos_512_v4
+62/323397/campos_512_v4
+62/323404/campos_512_v4
+62/323409/campos_512_v4
+62/323415/campos_512_v4
+62/323417/campos_512_v4
+62/323429/campos_512_v4
+62/323440/campos_512_v4
+62/323448/campos_512_v4
+62/323455/campos_512_v4
+62/323460/campos_512_v4
+62/323470/campos_512_v4
+62/323474/campos_512_v4
+62/323484/campos_512_v4
+62/323489/campos_512_v4
+62/323502/campos_512_v4
+62/323509/campos_512_v4
+62/323519/campos_512_v4
+62/323525/campos_512_v4
+62/323534/campos_512_v4
+62/323551/campos_512_v4
+62/323556/campos_512_v4
+62/323561/campos_512_v4
+62/323567/campos_512_v4
+62/323571/campos_512_v4
+62/323572/campos_512_v4
+62/323573/campos_512_v4
+62/323582/campos_512_v4
+62/323605/campos_512_v4
+62/323618/campos_512_v4
+62/323623/campos_512_v4
+62/323625/campos_512_v4
+62/323639/campos_512_v4
+62/323640/campos_512_v4
+62/323644/campos_512_v4
+62/323646/campos_512_v4
+62/323672/campos_512_v4
+62/323673/campos_512_v4
+62/323674/campos_512_v4
+62/323691/campos_512_v4
+62/323694/campos_512_v4
+62/323695/campos_512_v4
+62/323701/campos_512_v4
+62/323707/campos_512_v4
+62/323716/campos_512_v4
+62/323720/campos_512_v4
+62/323729/campos_512_v4
+62/323733/campos_512_v4
+62/323752/campos_512_v4
+62/323761/campos_512_v4
+62/323765/campos_512_v4
+62/323773/campos_512_v4
+62/323794/campos_512_v4
+62/323802/campos_512_v4
+62/323811/campos_512_v4
+62/323820/campos_512_v4
+62/323823/campos_512_v4
+62/323842/campos_512_v4
+62/323852/campos_512_v4
+62/323854/campos_512_v4
+62/323866/campos_512_v4
+62/323878/campos_512_v4
+62/323880/campos_512_v4
+62/323886/campos_512_v4
+62/323888/campos_512_v4
+62/323892/campos_512_v4
+62/323898/campos_512_v4
+62/323904/campos_512_v4
+62/323913/campos_512_v4
+62/323931/campos_512_v4
+62/323937/campos_512_v4
+62/323940/campos_512_v4
+62/323945/campos_512_v4
+62/323962/campos_512_v4
+62/323990/campos_512_v4
+62/323991/campos_512_v4
+62/323994/campos_512_v4
+62/324002/campos_512_v4
+62/324017/campos_512_v4
+62/324052/campos_512_v4
+62/324071/campos_512_v4
+62/324082/campos_512_v4
+62/324099/campos_512_v4
+62/324106/campos_512_v4
+62/324115/campos_512_v4
+62/324127/campos_512_v4
+62/324128/campos_512_v4
+62/324133/campos_512_v4
+62/324146/campos_512_v4
+62/324149/campos_512_v4
+62/324158/campos_512_v4
+62/324163/campos_512_v4
+62/324164/campos_512_v4
+62/324169/campos_512_v4
+62/324209/campos_512_v4
+62/324222/campos_512_v4
+62/324224/campos_512_v4
+62/324228/campos_512_v4
+62/324246/campos_512_v4
+62/324247/campos_512_v4
+62/324269/campos_512_v4
+62/324270/campos_512_v4
+62/324271/campos_512_v4
+62/324274/campos_512_v4
+62/324278/campos_512_v4
+62/324282/campos_512_v4
+62/324291/campos_512_v4
+62/324292/campos_512_v4
+62/324295/campos_512_v4
+62/324302/campos_512_v4
+62/324305/campos_512_v4
+62/324306/campos_512_v4
+62/324310/campos_512_v4
+62/324322/campos_512_v4
+62/324324/campos_512_v4
+62/324329/campos_512_v4
+62/324334/campos_512_v4
+62/324345/campos_512_v4
+62/324346/campos_512_v4
+62/324360/campos_512_v4
+62/324362/campos_512_v4
+62/324369/campos_512_v4
+62/324384/campos_512_v4
+62/324391/campos_512_v4
+62/324398/campos_512_v4
+62/324399/campos_512_v4
+62/324402/campos_512_v4
+62/324406/campos_512_v4
+62/324410/campos_512_v4
+62/324416/campos_512_v4
+62/324433/campos_512_v4
+62/324435/campos_512_v4
+62/324446/campos_512_v4
+62/324450/campos_512_v4
+62/324456/campos_512_v4
+62/324473/campos_512_v4
+62/324484/campos_512_v4
+62/324494/campos_512_v4
+62/324501/campos_512_v4
+62/324510/campos_512_v4
+62/324518/campos_512_v4
+62/324535/campos_512_v4
+62/324543/campos_512_v4
+62/324546/campos_512_v4
+62/324557/campos_512_v4
+62/324561/campos_512_v4
+62/324569/campos_512_v4
+62/324575/campos_512_v4
+62/324576/campos_512_v4
+62/324602/campos_512_v4
+62/324606/campos_512_v4
+62/324609/campos_512_v4
+62/324621/campos_512_v4
+62/324622/campos_512_v4
+62/324623/campos_512_v4
+62/324625/campos_512_v4
+62/324635/campos_512_v4
+62/324636/campos_512_v4
+62/324638/campos_512_v4
+62/324646/campos_512_v4
+62/324650/campos_512_v4
+62/324657/campos_512_v4
+62/324663/campos_512_v4
+62/324673/campos_512_v4
+62/324675/campos_512_v4
+62/324697/campos_512_v4
+62/324702/campos_512_v4
+62/324703/campos_512_v4
+62/324715/campos_512_v4
+62/324720/campos_512_v4
+62/324721/campos_512_v4
+62/324732/campos_512_v4
+62/324734/campos_512_v4
+62/324738/campos_512_v4
+62/324752/campos_512_v4
+62/324753/campos_512_v4
+62/324756/campos_512_v4
+62/324761/campos_512_v4
+62/324770/campos_512_v4
+62/324793/campos_512_v4
+62/324800/campos_512_v4
+62/324803/campos_512_v4
+62/324810/campos_512_v4
+62/324812/campos_512_v4
+62/324814/campos_512_v4
+62/324824/campos_512_v4
+62/324852/campos_512_v4
+62/324873/campos_512_v4
+62/324879/campos_512_v4
+62/324886/campos_512_v4
+62/324897/campos_512_v4
+62/324907/campos_512_v4
+62/324909/campos_512_v4
+62/324915/campos_512_v4
+62/324921/campos_512_v4
+62/324956/campos_512_v4
+62/324968/campos_512_v4
+62/324971/campos_512_v4
+62/324979/campos_512_v4
+62/324985/campos_512_v4
+62/324999/campos_512_v4
+62/325000/campos_512_v4
+63/325007/campos_512_v4
+63/325012/campos_512_v4
+63/325022/campos_512_v4
+63/325023/campos_512_v4
+63/325027/campos_512_v4
+63/325029/campos_512_v4
+63/325031/campos_512_v4
+63/325051/campos_512_v4
+63/325060/campos_512_v4
+63/325067/campos_512_v4
+63/325070/campos_512_v4
+63/325071/campos_512_v4
+63/325080/campos_512_v4
+63/325084/campos_512_v4
+63/325085/campos_512_v4
+63/325086/campos_512_v4
+63/325097/campos_512_v4
+63/325099/campos_512_v4
+63/325105/campos_512_v4
+63/325109/campos_512_v4
+63/325131/campos_512_v4
+63/325154/campos_512_v4
+63/325185/campos_512_v4
+63/325197/campos_512_v4
+63/325199/campos_512_v4
+63/325201/campos_512_v4
+63/325209/campos_512_v4
+63/325210/campos_512_v4
+63/325214/campos_512_v4
+63/325236/campos_512_v4
+63/325246/campos_512_v4
+63/325250/campos_512_v4
+63/325251/campos_512_v4
+63/325256/campos_512_v4
+63/325262/campos_512_v4
+63/325265/campos_512_v4
+63/325271/campos_512_v4
+63/325274/campos_512_v4
+63/325276/campos_512_v4
+63/325293/campos_512_v4
+63/325295/campos_512_v4
+63/325304/campos_512_v4
+63/325306/campos_512_v4
+63/325309/campos_512_v4
+63/325312/campos_512_v4
+63/325331/campos_512_v4
+63/325352/campos_512_v4
+63/325381/campos_512_v4
+63/325391/campos_512_v4
+63/325394/campos_512_v4
+63/325411/campos_512_v4
+63/325430/campos_512_v4
+63/325440/campos_512_v4
+63/325470/campos_512_v4
+63/325471/campos_512_v4
+63/325475/campos_512_v4
+63/325476/campos_512_v4
+63/325480/campos_512_v4
+63/325487/campos_512_v4
+63/325489/campos_512_v4
+63/325500/campos_512_v4
+63/325505/campos_512_v4
+63/325509/campos_512_v4
+63/325518/campos_512_v4
+63/325530/campos_512_v4
+63/325536/campos_512_v4
+63/325556/campos_512_v4
+63/325581/campos_512_v4
+63/325584/campos_512_v4
+63/325586/campos_512_v4
+63/325606/campos_512_v4
+63/325608/campos_512_v4
+63/325610/campos_512_v4
+63/325625/campos_512_v4
+63/325630/campos_512_v4
+63/325632/campos_512_v4
+63/325637/campos_512_v4
+63/325641/campos_512_v4
+63/325654/campos_512_v4
+63/325659/campos_512_v4
+63/325682/campos_512_v4
+63/325686/campos_512_v4
+63/325687/campos_512_v4
+63/325694/campos_512_v4
+63/325695/campos_512_v4
+63/325701/campos_512_v4
+63/325702/campos_512_v4
+63/325706/campos_512_v4
+63/325708/campos_512_v4
+63/325709/campos_512_v4
+63/325721/campos_512_v4
+63/325724/campos_512_v4
+63/325730/campos_512_v4
+63/325731/campos_512_v4
+63/325735/campos_512_v4
+63/325740/campos_512_v4
+63/325746/campos_512_v4
+63/325747/campos_512_v4
+63/325787/campos_512_v4
+63/325790/campos_512_v4
+63/325815/campos_512_v4
+63/325816/campos_512_v4
+63/325819/campos_512_v4
+63/325823/campos_512_v4
+63/325824/campos_512_v4
+63/325826/campos_512_v4
+63/325830/campos_512_v4
+63/325849/campos_512_v4
+63/325864/campos_512_v4
+63/325878/campos_512_v4
+63/325899/campos_512_v4
+63/325902/campos_512_v4
+63/325904/campos_512_v4
+63/325909/campos_512_v4
+63/325913/campos_512_v4
+63/325926/campos_512_v4
+63/325928/campos_512_v4
+63/325929/campos_512_v4
+63/325943/campos_512_v4
+63/325945/campos_512_v4
+63/325954/campos_512_v4
+63/325961/campos_512_v4
+63/325964/campos_512_v4
+63/325972/campos_512_v4
+63/325973/campos_512_v4
+63/325981/campos_512_v4
+63/325983/campos_512_v4
+63/325988/campos_512_v4
+63/325990/campos_512_v4
+63/325992/campos_512_v4
+63/325994/campos_512_v4
+63/325995/campos_512_v4
+63/326004/campos_512_v4
+63/326008/campos_512_v4
+63/326021/campos_512_v4
+63/326022/campos_512_v4
+63/326039/campos_512_v4
+63/326048/campos_512_v4
+63/326056/campos_512_v4
+63/326058/campos_512_v4
+63/326072/campos_512_v4
+63/326076/campos_512_v4
+63/326077/campos_512_v4
+63/326078/campos_512_v4
+63/326085/campos_512_v4
+63/326098/campos_512_v4
+63/326101/campos_512_v4
+63/326114/campos_512_v4
+63/326125/campos_512_v4
+63/326128/campos_512_v4
+63/326135/campos_512_v4
+63/326139/campos_512_v4
+63/326141/campos_512_v4
+63/326142/campos_512_v4
+63/326149/campos_512_v4
+63/326152/campos_512_v4
+63/326157/campos_512_v4
+63/326181/campos_512_v4
+63/326183/campos_512_v4
+63/326184/campos_512_v4
+63/326186/campos_512_v4
+63/326193/campos_512_v4
+63/326197/campos_512_v4
+63/326206/campos_512_v4
+63/326217/campos_512_v4
+63/326218/campos_512_v4
+63/326234/campos_512_v4
+63/326241/campos_512_v4
+63/326254/campos_512_v4
+63/326256/campos_512_v4
+63/326273/campos_512_v4
+63/326284/campos_512_v4
+63/326300/campos_512_v4
+63/326304/campos_512_v4
+63/326338/campos_512_v4
+63/326340/campos_512_v4
+63/326356/campos_512_v4
+63/326361/campos_512_v4
+63/326370/campos_512_v4
+63/326389/campos_512_v4
+63/326390/campos_512_v4
+63/326406/campos_512_v4
+63/326413/campos_512_v4
+63/326417/campos_512_v4
+63/326427/campos_512_v4
+63/326436/campos_512_v4
+63/326444/campos_512_v4
+63/326446/campos_512_v4
+63/326471/campos_512_v4
+63/326475/campos_512_v4
+63/326490/campos_512_v4
+63/326492/campos_512_v4
+63/326493/campos_512_v4
+63/326496/campos_512_v4
+63/326506/campos_512_v4
+63/326518/campos_512_v4
+63/326519/campos_512_v4
+63/326520/campos_512_v4
+63/326526/campos_512_v4
+63/326536/campos_512_v4
+63/326537/campos_512_v4
+63/326541/campos_512_v4
+63/326545/campos_512_v4
+63/326546/campos_512_v4
+63/326549/campos_512_v4
+63/326550/campos_512_v4
+63/326556/campos_512_v4
+63/326568/campos_512_v4
+63/326583/campos_512_v4
+63/326585/campos_512_v4
+63/326598/campos_512_v4
+63/326602/campos_512_v4
+63/326603/campos_512_v4
+63/326604/campos_512_v4
+63/326607/campos_512_v4
+63/326613/campos_512_v4
+63/326619/campos_512_v4
+63/326624/campos_512_v4
+63/326626/campos_512_v4
+63/326628/campos_512_v4
+63/326678/campos_512_v4
+63/326690/campos_512_v4
+63/326692/campos_512_v4
+63/326694/campos_512_v4
+63/326695/campos_512_v4
+63/326696/campos_512_v4
+63/326698/campos_512_v4
+63/326710/campos_512_v4
+63/326714/campos_512_v4
+63/326723/campos_512_v4
+63/326737/campos_512_v4
+63/326742/campos_512_v4
+63/326745/campos_512_v4
+63/326749/campos_512_v4
+63/326751/campos_512_v4
+63/326756/campos_512_v4
+63/326763/campos_512_v4
+63/326765/campos_512_v4
+63/326783/campos_512_v4
+63/326791/campos_512_v4
+63/326800/campos_512_v4
+63/326803/campos_512_v4
+63/326808/campos_512_v4
+63/326818/campos_512_v4
+63/326827/campos_512_v4
+63/326837/campos_512_v4
+63/326842/campos_512_v4
+63/326865/campos_512_v4
+63/326872/campos_512_v4
+63/326874/campos_512_v4
+63/326882/campos_512_v4
+63/326883/campos_512_v4
+63/326910/campos_512_v4
+63/326915/campos_512_v4
+63/326923/campos_512_v4
+63/326929/campos_512_v4
+63/326949/campos_512_v4
+63/326950/campos_512_v4
+63/326958/campos_512_v4
+63/326966/campos_512_v4
+63/326969/campos_512_v4
+63/326970/campos_512_v4
+63/326974/campos_512_v4
+63/326981/campos_512_v4
+63/326987/campos_512_v4
+63/326993/campos_512_v4
+63/326998/campos_512_v4
+63/327010/campos_512_v4
+63/327011/campos_512_v4
+63/327027/campos_512_v4
+63/327030/campos_512_v4
+63/327035/campos_512_v4
+63/327041/campos_512_v4
+63/327054/campos_512_v4
+63/327056/campos_512_v4
+63/327061/campos_512_v4
+63/327062/campos_512_v4
+63/327063/campos_512_v4
+63/327070/campos_512_v4
+63/327073/campos_512_v4
+63/327087/campos_512_v4
+63/327090/campos_512_v4
+63/327095/campos_512_v4
+63/327099/campos_512_v4
+63/327115/campos_512_v4
+63/327121/campos_512_v4
+63/327142/campos_512_v4
+63/327145/campos_512_v4
+63/327156/campos_512_v4
+63/327170/campos_512_v4
+63/327171/campos_512_v4
+63/327176/campos_512_v4
+63/327186/campos_512_v4
+63/327188/campos_512_v4
+63/327208/campos_512_v4
+63/327218/campos_512_v4
+63/327221/campos_512_v4
+63/327230/campos_512_v4
+63/327236/campos_512_v4
+63/327237/campos_512_v4
+63/327252/campos_512_v4
+63/327258/campos_512_v4
+63/327281/campos_512_v4
+63/327285/campos_512_v4
+63/327288/campos_512_v4
+63/327290/campos_512_v4
+63/327291/campos_512_v4
+63/327295/campos_512_v4
+63/327307/campos_512_v4
+63/327315/campos_512_v4
+63/327316/campos_512_v4
+63/327320/campos_512_v4
+63/327333/campos_512_v4
+63/327334/campos_512_v4
+63/327336/campos_512_v4
+63/327341/campos_512_v4
+63/327342/campos_512_v4
+63/327355/campos_512_v4
+63/327361/campos_512_v4
+63/327363/campos_512_v4
+63/327367/campos_512_v4
+63/327371/campos_512_v4
+63/327381/campos_512_v4
+63/327383/campos_512_v4
+63/327385/campos_512_v4
+63/327396/campos_512_v4
+63/327398/campos_512_v4
+63/327402/campos_512_v4
+63/327413/campos_512_v4
+63/327417/campos_512_v4
+63/327423/campos_512_v4
+63/327433/campos_512_v4
+63/327434/campos_512_v4
+63/327438/campos_512_v4
+63/327476/campos_512_v4
+63/327477/campos_512_v4
+63/327492/campos_512_v4
+63/327494/campos_512_v4
+63/327499/campos_512_v4
+63/327502/campos_512_v4
+63/327504/campos_512_v4
+63/327515/campos_512_v4
+63/327517/campos_512_v4
+63/327523/campos_512_v4
+63/327531/campos_512_v4
+63/327536/campos_512_v4
+63/327540/campos_512_v4
+63/327550/campos_512_v4
+63/327561/campos_512_v4
+63/327575/campos_512_v4
+63/327592/campos_512_v4
+63/327626/campos_512_v4
+63/327634/campos_512_v4
+63/327649/campos_512_v4
+63/327657/campos_512_v4
+63/327659/campos_512_v4
+63/327664/campos_512_v4
+63/327667/campos_512_v4
+63/327674/campos_512_v4
+63/327679/campos_512_v4
+63/327694/campos_512_v4
+63/327717/campos_512_v4
+63/327734/campos_512_v4
+63/327735/campos_512_v4
+63/327736/campos_512_v4
+63/327738/campos_512_v4
+63/327746/campos_512_v4
+63/327752/campos_512_v4
+63/327758/campos_512_v4
+63/327761/campos_512_v4
+63/327778/campos_512_v4
+63/327790/campos_512_v4
+63/327795/campos_512_v4
+63/327816/campos_512_v4
+63/327823/campos_512_v4
+63/327828/campos_512_v4
+63/327834/campos_512_v4
+63/327835/campos_512_v4
+63/327837/campos_512_v4
+63/327840/campos_512_v4
+63/327846/campos_512_v4
+63/327853/campos_512_v4
+63/327876/campos_512_v4
+63/327890/campos_512_v4
+63/327892/campos_512_v4
+63/327907/campos_512_v4
+63/327936/campos_512_v4
+63/327966/campos_512_v4
+63/327985/campos_512_v4
+63/327987/campos_512_v4
+63/327991/campos_512_v4
+63/327992/campos_512_v4
+63/328000/campos_512_v4
+63/328012/campos_512_v4
+63/328022/campos_512_v4
+63/328034/campos_512_v4
+63/328051/campos_512_v4
+63/328065/campos_512_v4
+63/328066/campos_512_v4
+63/328069/campos_512_v4
+63/328076/campos_512_v4
+63/328084/campos_512_v4
+63/328088/campos_512_v4
+63/328098/campos_512_v4
+63/328101/campos_512_v4
+63/328121/campos_512_v4
+63/328131/campos_512_v4
+63/328135/campos_512_v4
+63/328144/campos_512_v4
+63/328147/campos_512_v4
+63/328149/campos_512_v4
+63/328157/campos_512_v4
+63/328164/campos_512_v4
+63/328168/campos_512_v4
+63/328170/campos_512_v4
+63/328181/campos_512_v4
+63/328198/campos_512_v4
+63/328200/campos_512_v4
+63/328211/campos_512_v4
+63/328234/campos_512_v4
+63/328238/campos_512_v4
+63/328245/campos_512_v4
+63/328261/campos_512_v4
+63/328265/campos_512_v4
+63/328266/campos_512_v4
+63/328267/campos_512_v4
+63/328268/campos_512_v4
+63/328270/campos_512_v4
+63/328278/campos_512_v4
+63/328281/campos_512_v4
+63/328296/campos_512_v4
+63/328297/campos_512_v4
+63/328312/campos_512_v4
+63/328316/campos_512_v4
+63/328324/campos_512_v4
+63/328334/campos_512_v4
+63/328338/campos_512_v4
+63/328343/campos_512_v4
+63/328345/campos_512_v4
+63/328347/campos_512_v4
+63/328348/campos_512_v4
+63/328349/campos_512_v4
+63/328376/campos_512_v4
+63/328377/campos_512_v4
+63/328394/campos_512_v4
+63/328399/campos_512_v4
+63/328403/campos_512_v4
+63/328409/campos_512_v4
+63/328410/campos_512_v4
+63/328416/campos_512_v4
+63/328423/campos_512_v4
+63/328441/campos_512_v4
+63/328442/campos_512_v4
+63/328456/campos_512_v4
+63/328467/campos_512_v4
+63/328499/campos_512_v4
+63/328524/campos_512_v4
+63/328527/campos_512_v4
+63/328542/campos_512_v4
+63/328545/campos_512_v4
+63/328558/campos_512_v4
+63/328559/campos_512_v4
+63/328560/campos_512_v4
+63/328563/campos_512_v4
+63/328566/campos_512_v4
+63/328577/campos_512_v4
+63/328580/campos_512_v4
+63/328584/campos_512_v4
+63/328587/campos_512_v4
+63/328590/campos_512_v4
+63/328594/campos_512_v4
+63/328598/campos_512_v4
+63/328608/campos_512_v4
+63/328622/campos_512_v4
+63/328625/campos_512_v4
+63/328631/campos_512_v4
+63/328634/campos_512_v4
+63/328651/campos_512_v4
+63/328658/campos_512_v4
+63/328670/campos_512_v4
+63/328696/campos_512_v4
+63/328698/campos_512_v4
+63/328699/campos_512_v4
+63/328701/campos_512_v4
+63/328702/campos_512_v4
+63/328707/campos_512_v4
+63/328717/campos_512_v4
+63/328723/campos_512_v4
+63/328734/campos_512_v4
+63/328761/campos_512_v4
+63/328770/campos_512_v4
+63/328773/campos_512_v4
+63/328778/campos_512_v4
+63/328792/campos_512_v4
+63/328796/campos_512_v4
+63/328831/campos_512_v4
+63/328837/campos_512_v4
+63/328838/campos_512_v4
+63/328839/campos_512_v4
+63/328879/campos_512_v4
+63/328887/campos_512_v4
+63/328896/campos_512_v4
+63/328898/campos_512_v4
+63/328901/campos_512_v4
+63/328904/campos_512_v4
+63/328915/campos_512_v4
+63/328921/campos_512_v4
+63/328934/campos_512_v4
+63/328955/campos_512_v4
+63/328963/campos_512_v4
+63/328968/campos_512_v4
+63/328992/campos_512_v4
+63/328994/campos_512_v4
+63/329004/campos_512_v4
+63/329008/campos_512_v4
+63/329011/campos_512_v4
+63/329017/campos_512_v4
+63/329021/campos_512_v4
+63/329023/campos_512_v4
+63/329032/campos_512_v4
+63/329043/campos_512_v4
+63/329050/campos_512_v4
+63/329057/campos_512_v4
+63/329058/campos_512_v4
+63/329070/campos_512_v4
+63/329076/campos_512_v4
+63/329082/campos_512_v4
+63/329084/campos_512_v4
+63/329086/campos_512_v4
+63/329122/campos_512_v4
+63/329128/campos_512_v4
+63/329129/campos_512_v4
+63/329146/campos_512_v4
+63/329150/campos_512_v4
+63/329156/campos_512_v4
+63/329166/campos_512_v4
+63/329169/campos_512_v4
+63/329177/campos_512_v4
+63/329182/campos_512_v4
+63/329188/campos_512_v4
+63/329196/campos_512_v4
+63/329208/campos_512_v4
+63/329222/campos_512_v4
+63/329225/campos_512_v4
+63/329227/campos_512_v4
+63/329232/campos_512_v4
+63/329239/campos_512_v4
+63/329244/campos_512_v4
+63/329248/campos_512_v4
+63/329251/campos_512_v4
+63/329259/campos_512_v4
+63/329263/campos_512_v4
+63/329272/campos_512_v4
+63/329277/campos_512_v4
+63/329280/campos_512_v4
+63/329282/campos_512_v4
+63/329300/campos_512_v4
+63/329301/campos_512_v4
+63/329304/campos_512_v4
+63/329307/campos_512_v4
+63/329323/campos_512_v4
+63/329345/campos_512_v4
+63/329353/campos_512_v4
+63/329369/campos_512_v4
+63/329377/campos_512_v4
+63/329397/campos_512_v4
+63/329413/campos_512_v4
+63/329431/campos_512_v4
+63/329437/campos_512_v4
+63/329438/campos_512_v4
+63/329462/campos_512_v4
+63/329464/campos_512_v4
+63/329471/campos_512_v4
+63/329506/campos_512_v4
+63/329535/campos_512_v4
+63/329538/campos_512_v4
+63/329545/campos_512_v4
+63/329577/campos_512_v4
+63/329598/campos_512_v4
+63/329600/campos_512_v4
+63/329603/campos_512_v4
+63/329604/campos_512_v4
+63/329612/campos_512_v4
+63/329617/campos_512_v4
+63/329618/campos_512_v4
+63/329619/campos_512_v4
+63/329639/campos_512_v4
+63/329642/campos_512_v4
+63/329646/campos_512_v4
+63/329653/campos_512_v4
+63/329674/campos_512_v4
+63/329682/campos_512_v4
+63/329683/campos_512_v4
+63/329693/campos_512_v4
+63/329709/campos_512_v4
+63/329711/campos_512_v4
+63/329718/campos_512_v4
+63/329739/campos_512_v4
+63/329742/campos_512_v4
+63/329743/campos_512_v4
+63/329753/campos_512_v4
+63/329796/campos_512_v4
+63/329798/campos_512_v4
+63/329808/campos_512_v4
+63/329824/campos_512_v4
+63/329838/campos_512_v4
+63/329849/campos_512_v4
+63/329850/campos_512_v4
+63/329852/campos_512_v4
+63/329855/campos_512_v4
+63/329861/campos_512_v4
+63/329863/campos_512_v4
+63/329890/campos_512_v4
+63/329895/campos_512_v4
+63/329901/campos_512_v4
+63/329907/campos_512_v4
+63/329915/campos_512_v4
+63/329929/campos_512_v4
+63/329935/campos_512_v4
+63/329950/campos_512_v4
+63/329954/campos_512_v4
+63/329956/campos_512_v4
+63/329960/campos_512_v4
+63/329963/campos_512_v4
+63/329973/campos_512_v4
+63/329974/campos_512_v4
+63/330001/campos_512_v4
+64/330009/campos_512_v4
+64/330027/campos_512_v4
+64/330028/campos_512_v4
+64/330055/campos_512_v4
+64/330057/campos_512_v4
+64/330072/campos_512_v4
+64/330078/campos_512_v4
+64/330100/campos_512_v4
+64/330104/campos_512_v4
+64/330109/campos_512_v4
+64/330110/campos_512_v4
+64/330115/campos_512_v4
+64/330131/campos_512_v4
+64/330139/campos_512_v4
+64/330150/campos_512_v4
+64/330159/campos_512_v4
+64/330171/campos_512_v4
+64/330173/campos_512_v4
+64/330174/campos_512_v4
+64/330178/campos_512_v4
+64/330186/campos_512_v4
+64/330191/campos_512_v4
+64/330194/campos_512_v4
+64/330201/campos_512_v4
+64/330208/campos_512_v4
+64/330213/campos_512_v4
+64/330214/campos_512_v4
+64/330216/campos_512_v4
+64/330240/campos_512_v4
+64/330243/campos_512_v4
+64/330245/campos_512_v4
+64/330246/campos_512_v4
+64/330253/campos_512_v4
+64/330259/campos_512_v4
+64/330272/campos_512_v4
+64/330273/campos_512_v4
+64/330275/campos_512_v4
+64/330280/campos_512_v4
+64/330284/campos_512_v4
+64/330288/campos_512_v4
+64/330302/campos_512_v4
+64/330315/campos_512_v4
+64/330330/campos_512_v4
+64/330346/campos_512_v4
+64/330349/campos_512_v4
+64/330365/campos_512_v4
+64/330367/campos_512_v4
+64/330370/campos_512_v4
+64/330377/campos_512_v4
+64/330395/campos_512_v4
+64/330408/campos_512_v4
+64/330409/campos_512_v4
+64/330410/campos_512_v4
+64/330412/campos_512_v4
+64/330413/campos_512_v4
+64/330419/campos_512_v4
+64/330427/campos_512_v4
+64/330433/campos_512_v4
+64/330436/campos_512_v4
+64/330438/campos_512_v4
+64/330440/campos_512_v4
+64/330443/campos_512_v4
+64/330461/campos_512_v4
+64/330466/campos_512_v4
+64/330473/campos_512_v4
+64/330479/campos_512_v4
+64/330497/campos_512_v4
+64/330502/campos_512_v4
+64/330506/campos_512_v4
+64/330512/campos_512_v4
+64/330514/campos_512_v4
+64/330522/campos_512_v4
+64/330524/campos_512_v4
+64/330526/campos_512_v4
+64/330528/campos_512_v4
+64/330544/campos_512_v4
+64/330546/campos_512_v4
+64/330553/campos_512_v4
+64/330559/campos_512_v4
+64/330569/campos_512_v4
+64/330572/campos_512_v4
+64/330574/campos_512_v4
+64/330580/campos_512_v4
+64/330588/campos_512_v4
+64/330590/campos_512_v4
+64/330593/campos_512_v4
+64/330623/campos_512_v4
+64/330635/campos_512_v4
+64/330640/campos_512_v4
+64/330658/campos_512_v4
+64/330667/campos_512_v4
+64/330670/campos_512_v4
+64/330692/campos_512_v4
+64/330701/campos_512_v4
+64/330705/campos_512_v4
+64/330722/campos_512_v4
+64/330735/campos_512_v4
+64/330736/campos_512_v4
+64/330741/campos_512_v4
+64/330754/campos_512_v4
+64/330755/campos_512_v4
+64/330757/campos_512_v4
+64/330765/campos_512_v4
+64/330776/campos_512_v4
+64/330792/campos_512_v4
+64/330793/campos_512_v4
+64/330807/campos_512_v4
+64/330810/campos_512_v4
+64/330811/campos_512_v4
+64/330821/campos_512_v4
+64/330842/campos_512_v4
+64/330843/campos_512_v4
+64/330853/campos_512_v4
+64/330859/campos_512_v4
+64/330867/campos_512_v4
+64/330869/campos_512_v4
+64/330878/campos_512_v4
+64/330883/campos_512_v4
+64/330885/campos_512_v4
+64/330890/campos_512_v4
+64/330895/campos_512_v4
+64/330904/campos_512_v4
+64/330919/campos_512_v4
+64/330926/campos_512_v4
+64/330929/campos_512_v4
+64/330933/campos_512_v4
+64/330942/campos_512_v4
+64/330943/campos_512_v4
+64/330944/campos_512_v4
+64/330946/campos_512_v4
+64/330947/campos_512_v4
+64/330948/campos_512_v4
+64/330989/campos_512_v4
+64/330991/campos_512_v4
+64/331006/campos_512_v4
+64/331008/campos_512_v4
+64/331011/campos_512_v4
+64/331016/campos_512_v4
+64/331021/campos_512_v4
+64/331022/campos_512_v4
+64/331023/campos_512_v4
+64/331040/campos_512_v4
+64/331044/campos_512_v4
+64/331049/campos_512_v4
+64/331054/campos_512_v4
+64/331059/campos_512_v4
+64/331061/campos_512_v4
+64/331065/campos_512_v4
+64/331073/campos_512_v4
+64/331074/campos_512_v4
+64/331099/campos_512_v4
+64/331113/campos_512_v4
+64/331114/campos_512_v4
+64/331121/campos_512_v4
+64/331129/campos_512_v4
+64/331134/campos_512_v4
+64/331142/campos_512_v4
+64/331144/campos_512_v4
+64/331147/campos_512_v4
+64/331148/campos_512_v4
+64/331149/campos_512_v4
+64/331160/campos_512_v4
+64/331177/campos_512_v4
+64/331186/campos_512_v4
+64/331187/campos_512_v4
+64/331189/campos_512_v4
+64/331190/campos_512_v4
+64/331195/campos_512_v4
+64/331219/campos_512_v4
+64/331234/campos_512_v4
+64/331237/campos_512_v4
+64/331251/campos_512_v4
+64/331252/campos_512_v4
+64/331253/campos_512_v4
+64/331272/campos_512_v4
+64/331303/campos_512_v4
+64/331310/campos_512_v4
+64/331321/campos_512_v4
+64/331322/campos_512_v4
+64/331330/campos_512_v4
+64/331346/campos_512_v4
+64/331352/campos_512_v4
+64/331370/campos_512_v4
+64/331379/campos_512_v4
+64/331380/campos_512_v4
+64/331382/campos_512_v4
+64/331393/campos_512_v4
+64/331399/campos_512_v4
+64/331400/campos_512_v4
+64/331407/campos_512_v4
+64/331415/campos_512_v4
+64/331419/campos_512_v4
+64/331422/campos_512_v4
+64/331429/campos_512_v4
+64/331432/campos_512_v4
+64/331442/campos_512_v4
+64/331443/campos_512_v4
+64/331447/campos_512_v4
+64/331455/campos_512_v4
+64/331468/campos_512_v4
+64/331471/campos_512_v4
+64/331475/campos_512_v4
+64/331483/campos_512_v4
+64/331493/campos_512_v4
+64/331494/campos_512_v4
+64/331507/campos_512_v4
+64/331544/campos_512_v4
+64/331547/campos_512_v4
+64/331556/campos_512_v4
+64/331565/campos_512_v4
+64/331572/campos_512_v4
+64/331575/campos_512_v4
+64/331586/campos_512_v4
+64/331592/campos_512_v4
+64/331594/campos_512_v4
+64/331596/campos_512_v4
+64/331603/campos_512_v4
+64/331611/campos_512_v4
+64/331636/campos_512_v4
+64/331642/campos_512_v4
+64/331670/campos_512_v4
+64/331680/campos_512_v4
+64/331694/campos_512_v4
+64/331702/campos_512_v4
+64/331732/campos_512_v4
+64/331735/campos_512_v4
+64/331736/campos_512_v4
+64/331742/campos_512_v4
+64/331744/campos_512_v4
+64/331746/campos_512_v4
+64/331751/campos_512_v4
+64/331754/campos_512_v4
+64/331768/campos_512_v4
+64/331795/campos_512_v4
+64/331798/campos_512_v4
+64/331808/campos_512_v4
+64/331812/campos_512_v4
+64/331819/campos_512_v4
+64/331824/campos_512_v4
+64/331832/campos_512_v4
+64/331874/campos_512_v4
+64/331878/campos_512_v4
+64/331905/campos_512_v4
+64/331924/campos_512_v4
+64/331928/campos_512_v4
+64/331935/campos_512_v4
+64/331941/campos_512_v4
+64/331980/campos_512_v4
+64/331988/campos_512_v4
+64/331990/campos_512_v4
+64/332000/campos_512_v4
+64/332005/campos_512_v4
+64/332011/campos_512_v4
+64/332019/campos_512_v4
+64/332025/campos_512_v4
+64/332050/campos_512_v4
+64/332051/campos_512_v4
+64/332083/campos_512_v4
+64/332084/campos_512_v4
+64/332085/campos_512_v4
+64/332086/campos_512_v4
+64/332102/campos_512_v4
+64/332111/campos_512_v4
+64/332112/campos_512_v4
+64/332115/campos_512_v4
+64/332127/campos_512_v4
+64/332130/campos_512_v4
+64/332131/campos_512_v4
+64/332133/campos_512_v4
+64/332159/campos_512_v4
+64/332164/campos_512_v4
+64/332165/campos_512_v4
+64/332173/campos_512_v4
+64/332180/campos_512_v4
+64/332183/campos_512_v4
+64/332189/campos_512_v4
+64/332197/campos_512_v4
+64/332203/campos_512_v4
+64/332204/campos_512_v4
+64/332205/campos_512_v4
+64/332231/campos_512_v4
+64/332240/campos_512_v4
+64/332241/campos_512_v4
+64/332244/campos_512_v4
+64/332257/campos_512_v4
+64/332261/campos_512_v4
+64/332267/campos_512_v4
+64/332276/campos_512_v4
+64/332277/campos_512_v4
+64/332278/campos_512_v4
+64/332285/campos_512_v4
+64/332286/campos_512_v4
+64/332296/campos_512_v4
+64/332297/campos_512_v4
+64/332316/campos_512_v4
+64/332325/campos_512_v4
+64/332331/campos_512_v4
+64/332337/campos_512_v4
+64/332348/campos_512_v4
+64/332363/campos_512_v4
+64/332373/campos_512_v4
+64/332389/campos_512_v4
+64/332414/campos_512_v4
+64/332423/campos_512_v4
+64/332424/campos_512_v4
+64/332428/campos_512_v4
+64/332429/campos_512_v4
+64/332430/campos_512_v4
+64/332439/campos_512_v4
+64/332440/campos_512_v4
+64/332451/campos_512_v4
+64/332454/campos_512_v4
+64/332461/campos_512_v4
+64/332462/campos_512_v4
+64/332493/campos_512_v4
+64/332494/campos_512_v4
+64/332499/campos_512_v4
+64/332501/campos_512_v4
+64/332516/campos_512_v4
+64/332525/campos_512_v4
+64/332530/campos_512_v4
+64/332531/campos_512_v4
+64/332560/campos_512_v4
+64/332566/campos_512_v4
+64/332581/campos_512_v4
+64/332586/campos_512_v4
+64/332608/campos_512_v4
+64/332631/campos_512_v4
+64/332635/campos_512_v4
+64/332646/campos_512_v4
+64/332647/campos_512_v4
+64/332656/campos_512_v4
+64/332664/campos_512_v4
+64/332668/campos_512_v4
+64/332669/campos_512_v4
+64/332674/campos_512_v4
+64/332686/campos_512_v4
+64/332705/campos_512_v4
+64/332709/campos_512_v4
+64/332733/campos_512_v4
+64/332735/campos_512_v4
+64/332737/campos_512_v4
+64/332745/campos_512_v4
+64/332747/campos_512_v4
+64/332762/campos_512_v4
+64/332764/campos_512_v4
+64/332770/campos_512_v4
+64/332777/campos_512_v4
+64/332793/campos_512_v4
+64/332800/campos_512_v4
+64/332809/campos_512_v4
+64/332817/campos_512_v4
+64/332822/campos_512_v4
+64/332832/campos_512_v4
+64/332841/campos_512_v4
+64/332865/campos_512_v4
+64/332875/campos_512_v4
+64/332895/campos_512_v4
+64/332915/campos_512_v4
+64/332917/campos_512_v4
+64/332930/campos_512_v4
+64/332938/campos_512_v4
+64/332940/campos_512_v4
+64/332941/campos_512_v4
+64/332946/campos_512_v4
+64/332949/campos_512_v4
+64/332951/campos_512_v4
+64/332952/campos_512_v4
+64/332958/campos_512_v4
+64/332971/campos_512_v4
+64/332972/campos_512_v4
+64/332980/campos_512_v4
+64/332991/campos_512_v4
+64/332997/campos_512_v4
+64/332999/campos_512_v4
+64/333001/campos_512_v4
+64/333040/campos_512_v4
+64/333043/campos_512_v4
+64/333048/campos_512_v4
+64/333052/campos_512_v4
+64/333053/campos_512_v4
+64/333059/campos_512_v4
+64/333068/campos_512_v4
+64/333069/campos_512_v4
+64/333071/campos_512_v4
+64/333098/campos_512_v4
+64/333099/campos_512_v4
+64/333117/campos_512_v4
+64/333122/campos_512_v4
+64/333125/campos_512_v4
+64/333154/campos_512_v4
+64/333155/campos_512_v4
+64/333163/campos_512_v4
+64/333188/campos_512_v4
+64/333190/campos_512_v4
+64/333198/campos_512_v4
+64/333204/campos_512_v4
+64/333222/campos_512_v4
+64/333224/campos_512_v4
+64/333231/campos_512_v4
+64/333234/campos_512_v4
+64/333240/campos_512_v4
+64/333242/campos_512_v4
+64/333246/campos_512_v4
+64/333255/campos_512_v4
+64/333268/campos_512_v4
+64/333270/campos_512_v4
+64/333272/campos_512_v4
+64/333273/campos_512_v4
+64/333275/campos_512_v4
+64/333276/campos_512_v4
+64/333284/campos_512_v4
+64/333298/campos_512_v4
+64/333303/campos_512_v4
+64/333307/campos_512_v4
+64/333319/campos_512_v4
+64/333325/campos_512_v4
+64/333333/campos_512_v4
+64/333338/campos_512_v4
+64/333348/campos_512_v4
+64/333361/campos_512_v4
+64/333382/campos_512_v4
+64/333385/campos_512_v4
+64/333387/campos_512_v4
+64/333388/campos_512_v4
+64/333390/campos_512_v4
+64/333394/campos_512_v4
+64/333396/campos_512_v4
+64/333397/campos_512_v4
+64/333402/campos_512_v4
+64/333404/campos_512_v4
+64/333406/campos_512_v4
+64/333409/campos_512_v4
+64/333416/campos_512_v4
+64/333424/campos_512_v4
+64/333430/campos_512_v4
+64/333431/campos_512_v4
+64/333437/campos_512_v4
+64/333439/campos_512_v4
+64/333443/campos_512_v4
+64/333445/campos_512_v4
+64/333466/campos_512_v4
+64/333470/campos_512_v4
+64/333479/campos_512_v4
+64/333510/campos_512_v4
+64/333517/campos_512_v4
+64/333524/campos_512_v4
+64/333541/campos_512_v4
+64/333560/campos_512_v4
+64/333561/campos_512_v4
+64/333583/campos_512_v4
+64/333584/campos_512_v4
+64/333592/campos_512_v4
+64/333596/campos_512_v4
+64/333605/campos_512_v4
+64/333621/campos_512_v4
+64/333622/campos_512_v4
+64/333626/campos_512_v4
+64/333630/campos_512_v4
+64/333636/campos_512_v4
+64/333644/campos_512_v4
+64/333652/campos_512_v4
+64/333670/campos_512_v4
+64/333675/campos_512_v4
+64/333677/campos_512_v4
+64/333681/campos_512_v4
+64/333684/campos_512_v4
+64/333694/campos_512_v4
+64/333699/campos_512_v4
+64/333709/campos_512_v4
+64/333721/campos_512_v4
+64/333722/campos_512_v4
+64/333728/campos_512_v4
+64/333731/campos_512_v4
+64/333777/campos_512_v4
+64/333778/campos_512_v4
+64/333806/campos_512_v4
+64/333812/campos_512_v4
+64/333821/campos_512_v4
+64/333831/campos_512_v4
+64/333850/campos_512_v4
+64/333852/campos_512_v4
+64/333864/campos_512_v4
+64/333868/campos_512_v4
+64/333885/campos_512_v4
+64/333888/campos_512_v4
+64/333893/campos_512_v4
+64/333903/campos_512_v4
+64/333915/campos_512_v4
+64/333934/campos_512_v4
+64/333962/campos_512_v4
+64/333968/campos_512_v4
+64/333970/campos_512_v4
+64/333973/campos_512_v4
+64/333985/campos_512_v4
+64/333992/campos_512_v4
+64/334003/campos_512_v4
+64/334004/campos_512_v4
+64/334009/campos_512_v4
+64/334022/campos_512_v4
+64/334029/campos_512_v4
+64/334033/campos_512_v4
+64/334034/campos_512_v4
+64/334044/campos_512_v4
+64/334051/campos_512_v4
+64/334062/campos_512_v4
+64/334063/campos_512_v4
+64/334069/campos_512_v4
+64/334075/campos_512_v4
+64/334077/campos_512_v4
+64/334079/campos_512_v4
+64/334081/campos_512_v4
+64/334083/campos_512_v4
+64/334086/campos_512_v4
+64/334092/campos_512_v4
+64/334099/campos_512_v4
+64/334101/campos_512_v4
+64/334104/campos_512_v4
+64/334105/campos_512_v4
+64/334109/campos_512_v4
+64/334116/campos_512_v4
+64/334122/campos_512_v4
+64/334125/campos_512_v4
+64/334145/campos_512_v4
+64/334146/campos_512_v4
+64/334148/campos_512_v4
+64/334149/campos_512_v4
+64/334153/campos_512_v4
+64/334155/campos_512_v4
+64/334171/campos_512_v4
+64/334188/campos_512_v4
+64/334195/campos_512_v4
+64/334210/campos_512_v4
+64/334219/campos_512_v4
+64/334227/campos_512_v4
+64/334238/campos_512_v4
+64/334260/campos_512_v4
+64/334276/campos_512_v4
+64/334291/campos_512_v4
+64/334293/campos_512_v4
+64/334315/campos_512_v4
+64/334318/campos_512_v4
+64/334320/campos_512_v4
+64/334322/campos_512_v4
+64/334325/campos_512_v4
+64/334331/campos_512_v4
+64/334336/campos_512_v4
+64/334343/campos_512_v4
+64/334348/campos_512_v4
+64/334352/campos_512_v4
+64/334358/campos_512_v4
+64/334367/campos_512_v4
+64/334370/campos_512_v4
+64/334373/campos_512_v4
+64/334376/campos_512_v4
+64/334378/campos_512_v4
+64/334379/campos_512_v4
+64/334382/campos_512_v4
+64/334405/campos_512_v4
+64/334413/campos_512_v4
+64/334422/campos_512_v4
+64/334430/campos_512_v4
+64/334434/campos_512_v4
+64/334444/campos_512_v4
+64/334446/campos_512_v4
+64/334451/campos_512_v4
+64/334452/campos_512_v4
+64/334453/campos_512_v4
+64/334463/campos_512_v4
+64/334472/campos_512_v4
+64/334481/campos_512_v4
+64/334484/campos_512_v4
+64/334511/campos_512_v4
+64/334513/campos_512_v4
+64/334524/campos_512_v4
+64/334529/campos_512_v4
+64/334530/campos_512_v4
+64/334531/campos_512_v4
+64/334536/campos_512_v4
+64/334549/campos_512_v4
+64/334557/campos_512_v4
+64/334563/campos_512_v4
+64/334571/campos_512_v4
+64/334585/campos_512_v4
+64/334586/campos_512_v4
+64/334593/campos_512_v4
+64/334603/campos_512_v4
+64/334606/campos_512_v4
+64/334609/campos_512_v4
+64/334611/campos_512_v4
+64/334614/campos_512_v4
+64/334616/campos_512_v4
+64/334619/campos_512_v4
+64/334622/campos_512_v4
+64/334639/campos_512_v4
+64/334658/campos_512_v4
+64/334669/campos_512_v4
+64/334673/campos_512_v4
+64/334675/campos_512_v4
+64/334691/campos_512_v4
+64/334704/campos_512_v4
+64/334717/campos_512_v4
+64/334718/campos_512_v4
+64/334725/campos_512_v4
+64/334731/campos_512_v4
+64/334736/campos_512_v4
+64/334737/campos_512_v4
+64/334766/campos_512_v4
+64/334771/campos_512_v4
+64/334772/campos_512_v4
+64/334794/campos_512_v4
+64/334797/campos_512_v4
+64/334806/campos_512_v4
+64/334809/campos_512_v4
+64/334811/campos_512_v4
+64/334824/campos_512_v4
+64/334846/campos_512_v4
+64/334862/campos_512_v4
+64/334870/campos_512_v4
+64/334871/campos_512_v4
+64/334872/campos_512_v4
+64/334895/campos_512_v4
+64/334911/campos_512_v4
+64/334922/campos_512_v4
+64/334926/campos_512_v4
+64/334928/campos_512_v4
+64/334933/campos_512_v4
+64/334941/campos_512_v4
+64/334943/campos_512_v4
+64/334945/campos_512_v4
+64/334953/campos_512_v4
+64/334954/campos_512_v4
+64/334955/campos_512_v4
+64/334963/campos_512_v4
+64/334976/campos_512_v4
+64/334984/campos_512_v4
+64/334990/campos_512_v4
+64/334997/campos_512_v4
+64/334998/campos_512_v4
+65/335006/campos_512_v4
+65/335016/campos_512_v4
+65/335019/campos_512_v4
+65/335024/campos_512_v4
+65/335025/campos_512_v4
+65/335037/campos_512_v4
+65/335051/campos_512_v4
+65/335055/campos_512_v4
+65/335057/campos_512_v4
+65/335097/campos_512_v4
+65/335100/campos_512_v4
+65/335133/campos_512_v4
+65/335139/campos_512_v4
+65/335154/campos_512_v4
+65/335176/campos_512_v4
+65/335179/campos_512_v4
+65/335187/campos_512_v4
+65/335191/campos_512_v4
+65/335199/campos_512_v4
+65/335202/campos_512_v4
+65/335204/campos_512_v4
+65/335222/campos_512_v4
+65/335233/campos_512_v4
+65/335241/campos_512_v4
+65/335243/campos_512_v4
+65/335257/campos_512_v4
+65/335261/campos_512_v4
+65/335265/campos_512_v4
+65/335279/campos_512_v4
+65/335284/campos_512_v4
+65/335293/campos_512_v4
+65/335297/campos_512_v4
+65/335300/campos_512_v4
+65/335304/campos_512_v4
+65/335311/campos_512_v4
+65/335316/campos_512_v4
+65/335321/campos_512_v4
+65/335322/campos_512_v4
+65/335324/campos_512_v4
+65/335325/campos_512_v4
+65/335355/campos_512_v4
+65/335359/campos_512_v4
+65/335361/campos_512_v4
+65/335373/campos_512_v4
+65/335377/campos_512_v4
+65/335397/campos_512_v4
+65/335399/campos_512_v4
+65/335410/campos_512_v4
+65/335412/campos_512_v4
+65/335417/campos_512_v4
+65/335419/campos_512_v4
+65/335426/campos_512_v4
+65/335430/campos_512_v4
+65/335433/campos_512_v4
+65/335450/campos_512_v4
+65/335452/campos_512_v4
+65/335459/campos_512_v4
+65/335464/campos_512_v4
+65/335469/campos_512_v4
+65/335485/campos_512_v4
+65/335501/campos_512_v4
+65/335504/campos_512_v4
+65/335514/campos_512_v4
+65/335515/campos_512_v4
+65/335518/campos_512_v4
+65/335519/campos_512_v4
+65/335530/campos_512_v4
+65/335544/campos_512_v4
+65/335554/campos_512_v4
+65/335556/campos_512_v4
+65/335562/campos_512_v4
+65/335577/campos_512_v4
+65/335580/campos_512_v4
+65/335594/campos_512_v4
+65/335617/campos_512_v4
+65/335623/campos_512_v4
+65/335626/campos_512_v4
+65/335628/campos_512_v4
+65/335629/campos_512_v4
+65/335630/campos_512_v4
+65/335636/campos_512_v4
+65/335664/campos_512_v4
+65/335668/campos_512_v4
+65/335677/campos_512_v4
+65/335679/campos_512_v4
+65/335680/campos_512_v4
+65/335690/campos_512_v4
+65/335691/campos_512_v4
+65/335712/campos_512_v4
+65/335716/campos_512_v4
+65/335717/campos_512_v4
+65/335720/campos_512_v4
+65/335721/campos_512_v4
+65/335722/campos_512_v4
+65/335728/campos_512_v4
+65/335732/campos_512_v4
+65/335734/campos_512_v4
+65/335757/campos_512_v4
+65/335775/campos_512_v4
+65/335799/campos_512_v4
+65/335808/campos_512_v4
+65/335809/campos_512_v4
+65/335812/campos_512_v4
+65/335815/campos_512_v4
+65/335817/campos_512_v4
+65/335826/campos_512_v4
+65/335839/campos_512_v4
+65/335851/campos_512_v4
+65/335854/campos_512_v4
+65/335862/campos_512_v4
+65/335867/campos_512_v4
+65/335870/campos_512_v4
+65/335874/campos_512_v4
+65/335879/campos_512_v4
+65/335891/campos_512_v4
+65/335894/campos_512_v4
+65/335897/campos_512_v4
+65/335905/campos_512_v4
+65/335910/campos_512_v4
+65/335932/campos_512_v4
+65/335934/campos_512_v4
+65/335947/campos_512_v4
+65/335956/campos_512_v4
+65/335958/campos_512_v4
+65/335960/campos_512_v4
+65/335990/campos_512_v4
+65/335992/campos_512_v4
+65/335993/campos_512_v4
+65/335999/campos_512_v4
+65/336000/campos_512_v4
+65/336004/campos_512_v4
+65/336005/campos_512_v4
+65/336010/campos_512_v4
+65/336021/campos_512_v4
+65/336022/campos_512_v4
+65/336030/campos_512_v4
+65/336042/campos_512_v4
+65/336048/campos_512_v4
+65/336050/campos_512_v4
+65/336054/campos_512_v4
+65/336055/campos_512_v4
+65/336065/campos_512_v4
+65/336071/campos_512_v4
+65/336076/campos_512_v4
+65/336097/campos_512_v4
+65/336098/campos_512_v4
+65/336114/campos_512_v4
+65/336126/campos_512_v4
+65/336144/campos_512_v4
+65/336146/campos_512_v4
+65/336150/campos_512_v4
+65/336156/campos_512_v4
+65/336157/campos_512_v4
+65/336161/campos_512_v4
+65/336164/campos_512_v4
+65/336177/campos_512_v4
+65/336182/campos_512_v4
+65/336204/campos_512_v4
+65/336205/campos_512_v4
+65/336218/campos_512_v4
+65/336226/campos_512_v4
+65/336230/campos_512_v4
+65/336245/campos_512_v4
+65/336257/campos_512_v4
+65/336266/campos_512_v4
+65/336274/campos_512_v4
+65/336285/campos_512_v4
+65/336298/campos_512_v4
+65/336306/campos_512_v4
+65/336309/campos_512_v4
+65/336314/campos_512_v4
+65/336328/campos_512_v4
+65/336332/campos_512_v4
+65/336333/campos_512_v4
+65/336335/campos_512_v4
+65/336338/campos_512_v4
+65/336352/campos_512_v4
+65/336369/campos_512_v4
+65/336372/campos_512_v4
+65/336390/campos_512_v4
+65/336400/campos_512_v4
+65/336425/campos_512_v4
+65/336429/campos_512_v4
+65/336443/campos_512_v4
+65/336454/campos_512_v4
+65/336468/campos_512_v4
+65/336474/campos_512_v4
+65/336489/campos_512_v4
+65/336500/campos_512_v4
+65/336505/campos_512_v4
+65/336524/campos_512_v4
+65/336538/campos_512_v4
+65/336551/campos_512_v4
+65/336556/campos_512_v4
+65/336559/campos_512_v4
+65/336564/campos_512_v4
+65/336569/campos_512_v4
+65/336582/campos_512_v4
+65/336584/campos_512_v4
+65/336598/campos_512_v4
+65/336601/campos_512_v4
+65/336622/campos_512_v4
+65/336626/campos_512_v4
+65/336633/campos_512_v4
+65/336635/campos_512_v4
+65/336641/campos_512_v4
+65/336651/campos_512_v4
+65/336665/campos_512_v4
+65/336694/campos_512_v4
+65/336697/campos_512_v4
+65/336699/campos_512_v4
+65/336702/campos_512_v4
+65/336710/campos_512_v4
+65/336711/campos_512_v4
+65/336717/campos_512_v4
+65/336728/campos_512_v4
+65/336738/campos_512_v4
+65/336743/campos_512_v4
+65/336751/campos_512_v4
+65/336763/campos_512_v4
+65/336778/campos_512_v4
+65/336786/campos_512_v4
+65/336792/campos_512_v4
+65/336800/campos_512_v4
+65/336804/campos_512_v4
+65/336813/campos_512_v4
+65/336832/campos_512_v4
+65/336835/campos_512_v4
+65/336856/campos_512_v4
+65/336857/campos_512_v4
+65/336864/campos_512_v4
+65/336872/campos_512_v4
+65/336879/campos_512_v4
+65/336903/campos_512_v4
+65/336923/campos_512_v4
+65/336930/campos_512_v4
+65/336936/campos_512_v4
+65/336937/campos_512_v4
+65/336942/campos_512_v4
+65/336955/campos_512_v4
+65/336975/campos_512_v4
+65/336983/campos_512_v4
+65/336988/campos_512_v4
+65/337025/campos_512_v4
+65/337043/campos_512_v4
+65/337050/campos_512_v4
+65/337055/campos_512_v4
+65/337060/campos_512_v4
+65/337067/campos_512_v4
+65/337078/campos_512_v4
+65/337095/campos_512_v4
+65/337097/campos_512_v4
+65/337100/campos_512_v4
+65/337134/campos_512_v4
+65/337139/campos_512_v4
+65/337149/campos_512_v4
+65/337152/campos_512_v4
+65/337163/campos_512_v4
+65/337175/campos_512_v4
+65/337185/campos_512_v4
+65/337204/campos_512_v4
+65/337208/campos_512_v4
+65/337219/campos_512_v4
+65/337225/campos_512_v4
+65/337233/campos_512_v4
+65/337237/campos_512_v4
+65/337243/campos_512_v4
+65/337245/campos_512_v4
+65/337250/campos_512_v4
+65/337255/campos_512_v4
+65/337260/campos_512_v4
+65/337281/campos_512_v4
+65/337283/campos_512_v4
+65/337287/campos_512_v4
+65/337290/campos_512_v4
+65/337308/campos_512_v4
+65/337312/campos_512_v4
+65/337313/campos_512_v4
+65/337318/campos_512_v4
+65/337331/campos_512_v4
+65/337334/campos_512_v4
+65/337336/campos_512_v4
+65/337340/campos_512_v4
+65/337341/campos_512_v4
+65/337345/campos_512_v4
+65/337347/campos_512_v4
+65/337349/campos_512_v4
+65/337359/campos_512_v4
+65/337360/campos_512_v4
+65/337363/campos_512_v4
+65/337365/campos_512_v4
+65/337378/campos_512_v4
+65/337385/campos_512_v4
+65/337386/campos_512_v4
+65/337400/campos_512_v4
+65/337411/campos_512_v4
+65/337420/campos_512_v4
+65/337427/campos_512_v4
+65/337439/campos_512_v4
+65/337444/campos_512_v4
+65/337451/campos_512_v4
+65/337461/campos_512_v4
+65/337478/campos_512_v4
+65/337481/campos_512_v4
+65/337490/campos_512_v4
+65/337491/campos_512_v4
+65/337514/campos_512_v4
+65/337517/campos_512_v4
+65/337520/campos_512_v4
+65/337541/campos_512_v4
+65/337550/campos_512_v4
+65/337561/campos_512_v4
+65/337563/campos_512_v4
+65/337571/campos_512_v4
+65/337588/campos_512_v4
+65/337600/campos_512_v4
+65/337613/campos_512_v4
+65/337616/campos_512_v4
+65/337620/campos_512_v4
+65/337625/campos_512_v4
+65/337628/campos_512_v4
+65/337630/campos_512_v4
+65/337635/campos_512_v4
+65/337636/campos_512_v4
+65/337637/campos_512_v4
+65/337644/campos_512_v4
+65/337648/campos_512_v4
+65/337650/campos_512_v4
+65/337658/campos_512_v4
+65/337666/campos_512_v4
+65/337670/campos_512_v4
+65/337677/campos_512_v4
+65/337681/campos_512_v4
+65/337698/campos_512_v4
+65/337701/campos_512_v4
+65/337703/campos_512_v4
+65/337717/campos_512_v4
+65/337734/campos_512_v4
+65/337743/campos_512_v4
+65/337745/campos_512_v4
+65/337758/campos_512_v4
+65/337784/campos_512_v4
+65/337787/campos_512_v4
+65/337792/campos_512_v4
+65/337799/campos_512_v4
+65/337800/campos_512_v4
+65/337810/campos_512_v4
+65/337811/campos_512_v4
+65/337813/campos_512_v4
+65/337817/campos_512_v4
+65/337823/campos_512_v4
+65/337827/campos_512_v4
+65/337829/campos_512_v4
+65/337850/campos_512_v4
+65/337853/campos_512_v4
+65/337889/campos_512_v4
+65/337895/campos_512_v4
+65/337898/campos_512_v4
+65/337904/campos_512_v4
+65/337910/campos_512_v4
+65/337914/campos_512_v4
+65/337920/campos_512_v4
+65/337930/campos_512_v4
+65/337933/campos_512_v4
+65/337950/campos_512_v4
+65/337952/campos_512_v4
+65/337956/campos_512_v4
+65/337957/campos_512_v4
+65/337963/campos_512_v4
+65/337969/campos_512_v4
+65/337979/campos_512_v4
+65/337994/campos_512_v4
+65/338000/campos_512_v4
+65/338015/campos_512_v4
+65/338034/campos_512_v4
+65/338035/campos_512_v4
+65/338042/campos_512_v4
+65/338059/campos_512_v4
+65/338065/campos_512_v4
+65/338077/campos_512_v4
+65/338098/campos_512_v4
+65/338102/campos_512_v4
+65/338112/campos_512_v4
+65/338121/campos_512_v4
+65/338131/campos_512_v4
+65/338134/campos_512_v4
+65/338145/campos_512_v4
+65/338161/campos_512_v4
+65/338162/campos_512_v4
+65/338168/campos_512_v4
+65/338169/campos_512_v4
+65/338170/campos_512_v4
+65/338173/campos_512_v4
+65/338181/campos_512_v4
+65/338192/campos_512_v4
+65/338195/campos_512_v4
+65/338213/campos_512_v4
+65/338215/campos_512_v4
+65/338217/campos_512_v4
+65/338218/campos_512_v4
+65/338219/campos_512_v4
+65/338228/campos_512_v4
+65/338230/campos_512_v4
+65/338233/campos_512_v4
+65/338235/campos_512_v4
+65/338242/campos_512_v4
+65/338246/campos_512_v4
+65/338261/campos_512_v4
+65/338285/campos_512_v4
+65/338286/campos_512_v4
+65/338306/campos_512_v4
+65/338313/campos_512_v4
+65/338322/campos_512_v4
+65/338323/campos_512_v4
+65/338346/campos_512_v4
+65/338351/campos_512_v4
+65/338352/campos_512_v4
+65/338355/campos_512_v4
+65/338359/campos_512_v4
+65/338382/campos_512_v4
+65/338387/campos_512_v4
+65/338388/campos_512_v4
+65/338404/campos_512_v4
+65/338416/campos_512_v4
+65/338418/campos_512_v4
+65/338419/campos_512_v4
+65/338421/campos_512_v4
+65/338443/campos_512_v4
+65/338448/campos_512_v4
+65/338456/campos_512_v4
+65/338457/campos_512_v4
+65/338477/campos_512_v4
+65/338489/campos_512_v4
+65/338494/campos_512_v4
+65/338498/campos_512_v4
+65/338499/campos_512_v4
+65/338508/campos_512_v4
+65/338519/campos_512_v4
+65/338521/campos_512_v4
+65/338522/campos_512_v4
+65/338526/campos_512_v4
+65/338529/campos_512_v4
+65/338539/campos_512_v4
+65/338541/campos_512_v4
+65/338546/campos_512_v4
+65/338570/campos_512_v4
+65/338572/campos_512_v4
+65/338574/campos_512_v4
+65/338588/campos_512_v4
+65/338597/campos_512_v4
+65/338601/campos_512_v4
+65/338603/campos_512_v4
+65/338617/campos_512_v4
+65/338634/campos_512_v4
+65/338644/campos_512_v4
+65/338649/campos_512_v4
+65/338651/campos_512_v4
+65/338654/campos_512_v4
+65/338656/campos_512_v4
+65/338663/campos_512_v4
+65/338669/campos_512_v4
+65/338672/campos_512_v4
+65/338684/campos_512_v4
+65/338691/campos_512_v4
+65/338699/campos_512_v4
+65/338701/campos_512_v4
+65/338704/campos_512_v4
+65/338718/campos_512_v4
+65/338724/campos_512_v4
+65/338736/campos_512_v4
+65/338740/campos_512_v4
+65/338758/campos_512_v4
+65/338774/campos_512_v4
+65/338783/campos_512_v4
+65/338785/campos_512_v4
+65/338790/campos_512_v4
+65/338792/campos_512_v4
+65/338796/campos_512_v4
+65/338805/campos_512_v4
+65/338808/campos_512_v4
+65/338812/campos_512_v4
+65/338834/campos_512_v4
+65/338837/campos_512_v4
+65/338843/campos_512_v4
+65/338848/campos_512_v4
+65/338849/campos_512_v4
+65/338854/campos_512_v4
+65/338867/campos_512_v4
+65/338874/campos_512_v4
+65/338908/campos_512_v4
+65/338914/campos_512_v4
+65/338919/campos_512_v4
+65/338935/campos_512_v4
+65/338944/campos_512_v4
+65/338949/campos_512_v4
+65/338951/campos_512_v4
+65/338952/campos_512_v4
+65/338955/campos_512_v4
+65/338963/campos_512_v4
+65/338967/campos_512_v4
+65/338972/campos_512_v4
+65/338973/campos_512_v4
+65/338984/campos_512_v4
+65/338985/campos_512_v4
+65/338986/campos_512_v4
+65/339005/campos_512_v4
+65/339011/campos_512_v4
+65/339012/campos_512_v4
+65/339014/campos_512_v4
+65/339040/campos_512_v4
+65/339045/campos_512_v4
+65/339046/campos_512_v4
+65/339052/campos_512_v4
+65/339053/campos_512_v4
+65/339069/campos_512_v4
+65/339091/campos_512_v4
+65/339107/campos_512_v4
+65/339111/campos_512_v4
+65/339112/campos_512_v4
+65/339113/campos_512_v4
+65/339130/campos_512_v4
+65/339135/campos_512_v4
+65/339139/campos_512_v4
+65/339145/campos_512_v4
+65/339154/campos_512_v4
+65/339161/campos_512_v4
+65/339166/campos_512_v4
+65/339169/campos_512_v4
+65/339170/campos_512_v4
+65/339175/campos_512_v4
+65/339179/campos_512_v4
+65/339186/campos_512_v4
+65/339188/campos_512_v4
+65/339195/campos_512_v4
+65/339208/campos_512_v4
+65/339215/campos_512_v4
+65/339217/campos_512_v4
+65/339225/campos_512_v4
+65/339228/campos_512_v4
+65/339236/campos_512_v4
+65/339237/campos_512_v4
+65/339254/campos_512_v4
+65/339265/campos_512_v4
+65/339270/campos_512_v4
+65/339276/campos_512_v4
+65/339295/campos_512_v4
+65/339305/campos_512_v4
+65/339335/campos_512_v4
+65/339347/campos_512_v4
+65/339348/campos_512_v4
+65/339349/campos_512_v4
+65/339357/campos_512_v4
+65/339360/campos_512_v4
+65/339361/campos_512_v4
+65/339377/campos_512_v4
+65/339381/campos_512_v4
+65/339386/campos_512_v4
+65/339397/campos_512_v4
+65/339409/campos_512_v4
+65/339419/campos_512_v4
+65/339424/campos_512_v4
+65/339427/campos_512_v4
+65/339430/campos_512_v4
+65/339434/campos_512_v4
+65/339441/campos_512_v4
+65/339443/campos_512_v4
+65/339447/campos_512_v4
+65/339470/campos_512_v4
+65/339479/campos_512_v4
+65/339480/campos_512_v4
+65/339484/campos_512_v4
+65/339487/campos_512_v4
+65/339492/campos_512_v4
+65/339497/campos_512_v4
+65/339498/campos_512_v4
+65/339503/campos_512_v4
+65/339504/campos_512_v4
+65/339512/campos_512_v4
+65/339515/campos_512_v4
+65/339522/campos_512_v4
+65/339525/campos_512_v4
+65/339531/campos_512_v4
+65/339532/campos_512_v4
+65/339543/campos_512_v4
+65/339562/campos_512_v4
+65/339571/campos_512_v4
+65/339619/campos_512_v4
+65/339626/campos_512_v4
+65/339631/campos_512_v4
+65/339642/campos_512_v4
+65/339644/campos_512_v4
+65/339651/campos_512_v4
+65/339652/campos_512_v4
+65/339654/campos_512_v4
+65/339658/campos_512_v4
+65/339670/campos_512_v4
+65/339690/campos_512_v4
+65/339694/campos_512_v4
+65/339697/campos_512_v4
+65/339707/campos_512_v4
+65/339712/campos_512_v4
+65/339720/campos_512_v4
+65/339724/campos_512_v4
+65/339733/campos_512_v4
+65/339735/campos_512_v4
+65/339736/campos_512_v4
+65/339742/campos_512_v4
+65/339744/campos_512_v4
+65/339759/campos_512_v4
+65/339760/campos_512_v4
+65/339770/campos_512_v4
+65/339774/campos_512_v4
+65/339775/campos_512_v4
+65/339783/campos_512_v4
+65/339788/campos_512_v4
+65/339792/campos_512_v4
+65/339800/campos_512_v4
+65/339840/campos_512_v4
+65/339841/campos_512_v4
+65/339852/campos_512_v4
+65/339853/campos_512_v4
+65/339866/campos_512_v4
+65/339867/campos_512_v4
+65/339891/campos_512_v4
+65/339896/campos_512_v4
+65/339904/campos_512_v4
+65/339936/campos_512_v4
+65/339949/campos_512_v4
+65/339952/campos_512_v4
+65/339953/campos_512_v4
+65/339958/campos_512_v4
+65/339963/campos_512_v4
+65/339967/campos_512_v4
+65/339970/campos_512_v4
+65/339991/campos_512_v4
+66/340009/campos_512_v4
+66/340013/campos_512_v4
+66/340019/campos_512_v4
+66/340024/campos_512_v4
+66/340026/campos_512_v4
+66/340039/campos_512_v4
+66/340040/campos_512_v4
+66/340046/campos_512_v4
+66/340048/campos_512_v4
+66/340051/campos_512_v4
+66/340057/campos_512_v4
+66/340077/campos_512_v4
+66/340084/campos_512_v4
+66/340085/campos_512_v4
+66/340086/campos_512_v4
+66/340087/campos_512_v4
+66/340093/campos_512_v4
+66/340100/campos_512_v4
+66/340101/campos_512_v4
+66/340116/campos_512_v4
+66/340130/campos_512_v4
+66/340133/campos_512_v4
+66/340153/campos_512_v4
+66/340162/campos_512_v4
+66/340180/campos_512_v4
+66/340207/campos_512_v4
+66/340217/campos_512_v4
+66/340224/campos_512_v4
+66/340228/campos_512_v4
+66/340252/campos_512_v4
+66/340273/campos_512_v4
+66/340277/campos_512_v4
+66/340284/campos_512_v4
+66/340288/campos_512_v4
+66/340292/campos_512_v4
+66/340297/campos_512_v4
+66/340303/campos_512_v4
+66/340305/campos_512_v4
+66/340317/campos_512_v4
+66/340325/campos_512_v4
+66/340327/campos_512_v4
+66/340340/campos_512_v4
+66/340342/campos_512_v4
+66/340344/campos_512_v4
+66/340350/campos_512_v4
+66/340353/campos_512_v4
+66/340382/campos_512_v4
+66/340398/campos_512_v4
+66/340402/campos_512_v4
+66/340404/campos_512_v4
+66/340412/campos_512_v4
+66/340416/campos_512_v4
+66/340451/campos_512_v4
+66/340456/campos_512_v4
+66/340472/campos_512_v4
+66/340489/campos_512_v4
+66/340493/campos_512_v4
+66/340510/campos_512_v4
+66/340525/campos_512_v4
+66/340541/campos_512_v4
+66/340553/campos_512_v4
+66/340563/campos_512_v4
+66/340577/campos_512_v4
+66/340578/campos_512_v4
+66/340600/campos_512_v4
+66/340605/campos_512_v4
+66/340613/campos_512_v4
+66/340628/campos_512_v4
+66/340635/campos_512_v4
+66/340638/campos_512_v4
+66/340642/campos_512_v4
+66/340655/campos_512_v4
+66/340680/campos_512_v4
+66/340682/campos_512_v4
+66/340694/campos_512_v4
+66/340697/campos_512_v4
+66/340701/campos_512_v4
+66/340715/campos_512_v4
+66/340728/campos_512_v4
+66/340731/campos_512_v4
+66/340737/campos_512_v4
+66/340746/campos_512_v4
+66/340754/campos_512_v4
+66/340758/campos_512_v4
+66/340799/campos_512_v4
+66/340805/campos_512_v4
+66/340806/campos_512_v4
+66/340810/campos_512_v4
+66/340811/campos_512_v4
+66/340823/campos_512_v4
+66/340835/campos_512_v4
+66/340846/campos_512_v4
+66/340847/campos_512_v4
+66/340850/campos_512_v4
+66/340867/campos_512_v4
+66/340892/campos_512_v4
+66/340907/campos_512_v4
+66/340909/campos_512_v4
+66/340911/campos_512_v4
+66/340912/campos_512_v4
+66/340923/campos_512_v4
+66/340927/campos_512_v4
+66/340929/campos_512_v4
+66/340931/campos_512_v4
+66/340963/campos_512_v4
+66/340986/campos_512_v4
+66/340993/campos_512_v4
+66/340999/campos_512_v4
+66/341001/campos_512_v4
+66/341011/campos_512_v4
+66/341018/campos_512_v4
+66/341032/campos_512_v4
+66/341038/campos_512_v4
+66/341044/campos_512_v4
+66/341049/campos_512_v4
+66/341061/campos_512_v4
+66/341062/campos_512_v4
+66/341066/campos_512_v4
+66/341070/campos_512_v4
+66/341071/campos_512_v4
+66/341072/campos_512_v4
+66/341076/campos_512_v4
+66/341080/campos_512_v4
+66/341102/campos_512_v4
+66/341103/campos_512_v4
+66/341117/campos_512_v4
+66/341121/campos_512_v4
+66/341123/campos_512_v4
+66/341125/campos_512_v4
+66/341129/campos_512_v4
+66/341130/campos_512_v4
+66/341137/campos_512_v4
+66/341141/campos_512_v4
+66/341152/campos_512_v4
+66/341170/campos_512_v4
+66/341179/campos_512_v4
+66/341180/campos_512_v4
+66/341184/campos_512_v4
+66/341188/campos_512_v4
+66/341212/campos_512_v4
+66/341217/campos_512_v4
+66/341220/campos_512_v4
+66/341241/campos_512_v4
+66/341242/campos_512_v4
+66/341245/campos_512_v4
+66/341253/campos_512_v4
+66/341264/campos_512_v4
+66/341266/campos_512_v4
+66/341270/campos_512_v4
+66/341276/campos_512_v4
+66/341278/campos_512_v4
+66/341307/campos_512_v4
+66/341320/campos_512_v4
+66/341325/campos_512_v4
+66/341329/campos_512_v4
+66/341331/campos_512_v4
+66/341348/campos_512_v4
+66/341350/campos_512_v4
+66/341359/campos_512_v4
+66/341362/campos_512_v4
+66/341366/campos_512_v4
+66/341370/campos_512_v4
+66/341371/campos_512_v4
+66/341377/campos_512_v4
+66/341378/campos_512_v4
+66/341381/campos_512_v4
+66/341393/campos_512_v4
+66/341394/campos_512_v4
+66/341404/campos_512_v4
+66/341407/campos_512_v4
+66/341412/campos_512_v4
+66/341414/campos_512_v4
+66/341426/campos_512_v4
+66/341431/campos_512_v4
+66/341432/campos_512_v4
+66/341438/campos_512_v4
+66/341446/campos_512_v4
+66/341460/campos_512_v4
+66/341464/campos_512_v4
+66/341469/campos_512_v4
+66/341473/campos_512_v4
+66/341486/campos_512_v4
+66/341497/campos_512_v4
+66/341507/campos_512_v4
+66/341532/campos_512_v4
+66/341536/campos_512_v4
+66/341540/campos_512_v4
+66/341542/campos_512_v4
+66/341545/campos_512_v4
+66/341553/campos_512_v4
+66/341554/campos_512_v4
+66/341560/campos_512_v4
+66/341563/campos_512_v4
+66/341574/campos_512_v4
+66/341591/campos_512_v4
+66/341600/campos_512_v4
+66/341604/campos_512_v4
+66/341607/campos_512_v4
+66/341608/campos_512_v4
+66/341625/campos_512_v4
+66/341636/campos_512_v4
+66/341645/campos_512_v4
+66/341669/campos_512_v4
+66/341679/campos_512_v4
+66/341680/campos_512_v4
+66/341682/campos_512_v4
+66/341688/campos_512_v4
+66/341694/campos_512_v4
+66/341701/campos_512_v4
+66/341708/campos_512_v4
+66/341719/campos_512_v4
+66/341733/campos_512_v4
+66/341736/campos_512_v4
+66/341747/campos_512_v4
+66/341754/campos_512_v4
+66/341763/campos_512_v4
+66/341775/campos_512_v4
+66/341785/campos_512_v4
+66/341806/campos_512_v4
+66/341811/campos_512_v4
+66/341815/campos_512_v4
+66/341824/campos_512_v4
+66/341825/campos_512_v4
+66/341829/campos_512_v4
+66/341833/campos_512_v4
+66/341834/campos_512_v4
+66/341837/campos_512_v4
+66/341846/campos_512_v4
+66/341851/campos_512_v4
+66/341889/campos_512_v4
+66/341896/campos_512_v4
+66/341897/campos_512_v4
+66/341902/campos_512_v4
+66/341904/campos_512_v4
+66/341910/campos_512_v4
+66/341915/campos_512_v4
+66/341916/campos_512_v4
+66/341932/campos_512_v4
+66/341941/campos_512_v4
+66/341943/campos_512_v4
+66/341944/campos_512_v4
+66/341953/campos_512_v4
+66/341956/campos_512_v4
+66/341958/campos_512_v4
+66/341969/campos_512_v4
+66/341976/campos_512_v4
+66/341991/campos_512_v4
+66/342010/campos_512_v4
+66/342011/campos_512_v4
+66/342017/campos_512_v4
+66/342019/campos_512_v4
+66/342031/campos_512_v4
+66/342036/campos_512_v4
+66/342039/campos_512_v4
+66/342040/campos_512_v4
+66/342046/campos_512_v4
+66/342053/campos_512_v4
+66/342060/campos_512_v4
+66/342085/campos_512_v4
+66/342096/campos_512_v4
+66/342104/campos_512_v4
+66/342107/campos_512_v4
+66/342114/campos_512_v4
+66/342117/campos_512_v4
+66/342119/campos_512_v4
+66/342127/campos_512_v4
+66/342132/campos_512_v4
+66/342142/campos_512_v4
+66/342143/campos_512_v4
+66/342156/campos_512_v4
+66/342163/campos_512_v4
+66/342167/campos_512_v4
+66/342180/campos_512_v4
+66/342186/campos_512_v4
+66/342197/campos_512_v4
+66/342200/campos_512_v4
+66/342202/campos_512_v4
+66/342217/campos_512_v4
+66/342228/campos_512_v4
+66/342234/campos_512_v4
+66/342239/campos_512_v4
+66/342240/campos_512_v4
+66/342273/campos_512_v4
+66/342285/campos_512_v4
+66/342294/campos_512_v4
+66/342300/campos_512_v4
+66/342301/campos_512_v4
+66/342309/campos_512_v4
+66/342310/campos_512_v4
+66/342315/campos_512_v4
+66/342321/campos_512_v4
+66/342327/campos_512_v4
+66/342333/campos_512_v4
+66/342335/campos_512_v4
+66/342351/campos_512_v4
+66/342357/campos_512_v4
+66/342367/campos_512_v4
+66/342390/campos_512_v4
+66/342412/campos_512_v4
+66/342425/campos_512_v4
+66/342427/campos_512_v4
+66/342429/campos_512_v4
+66/342431/campos_512_v4
+66/342433/campos_512_v4
+66/342440/campos_512_v4
+66/342445/campos_512_v4
+66/342453/campos_512_v4
+66/342463/campos_512_v4
+66/342469/campos_512_v4
+66/342474/campos_512_v4
+66/342490/campos_512_v4
+66/342492/campos_512_v4
+66/342494/campos_512_v4
+66/342498/campos_512_v4
+66/342504/campos_512_v4
+66/342509/campos_512_v4
+66/342525/campos_512_v4
+66/342527/campos_512_v4
+66/342529/campos_512_v4
+66/342532/campos_512_v4
+66/342539/campos_512_v4
+66/342541/campos_512_v4
+66/342551/campos_512_v4
+66/342554/campos_512_v4
+66/342567/campos_512_v4
+66/342570/campos_512_v4
+66/342572/campos_512_v4
+66/342576/campos_512_v4
+66/342581/campos_512_v4
+66/342587/campos_512_v4
+66/342597/campos_512_v4
+66/342600/campos_512_v4
+66/342602/campos_512_v4
+66/342612/campos_512_v4
+66/342619/campos_512_v4
+66/342646/campos_512_v4
+66/342665/campos_512_v4
+66/342673/campos_512_v4
+66/342674/campos_512_v4
+66/342686/campos_512_v4
+66/342687/campos_512_v4
+66/342690/campos_512_v4
+66/342699/campos_512_v4
+66/342708/campos_512_v4
+66/342721/campos_512_v4
+66/342729/campos_512_v4
+66/342741/campos_512_v4
+66/342747/campos_512_v4
+66/342767/campos_512_v4
+66/342785/campos_512_v4
+66/342792/campos_512_v4
+66/342812/campos_512_v4
+66/342816/campos_512_v4
+66/342818/campos_512_v4
+66/342825/campos_512_v4
+66/342832/campos_512_v4
+66/342843/campos_512_v4
+66/342847/campos_512_v4
+66/342855/campos_512_v4
+66/342856/campos_512_v4
+66/342864/campos_512_v4
+66/342868/campos_512_v4
+66/342871/campos_512_v4
+66/342873/campos_512_v4
+66/342888/campos_512_v4
+66/342892/campos_512_v4
+66/342896/campos_512_v4
+66/342905/campos_512_v4
+66/342909/campos_512_v4
+66/342926/campos_512_v4
+66/342928/campos_512_v4
+66/342929/campos_512_v4
+66/342932/campos_512_v4
+66/342937/campos_512_v4
+66/342938/campos_512_v4
+66/342941/campos_512_v4
+66/342945/campos_512_v4
+66/342953/campos_512_v4
+66/342959/campos_512_v4
+66/342967/campos_512_v4
+66/342975/campos_512_v4
+66/342982/campos_512_v4
+66/342984/campos_512_v4
+66/342987/campos_512_v4
+66/342991/campos_512_v4
+66/342994/campos_512_v4
+66/343005/campos_512_v4
+66/343008/campos_512_v4
+66/343016/campos_512_v4
+66/343021/campos_512_v4
+66/343022/campos_512_v4
+66/343025/campos_512_v4
+66/343027/campos_512_v4
+66/343030/campos_512_v4
+66/343032/campos_512_v4
+66/343034/campos_512_v4
+66/343041/campos_512_v4
+66/343044/campos_512_v4
+66/343045/campos_512_v4
+66/343048/campos_512_v4
+66/343059/campos_512_v4
+66/343064/campos_512_v4
+66/343068/campos_512_v4
+66/343085/campos_512_v4
+66/343091/campos_512_v4
+66/343099/campos_512_v4
+66/343102/campos_512_v4
+66/343104/campos_512_v4
+66/343105/campos_512_v4
+66/343112/campos_512_v4
+66/343113/campos_512_v4
+66/343133/campos_512_v4
+66/343134/campos_512_v4
+66/343135/campos_512_v4
+66/343145/campos_512_v4
+66/343157/campos_512_v4
+66/343164/campos_512_v4
+66/343166/campos_512_v4
+66/343168/campos_512_v4
+66/343169/campos_512_v4
+66/343171/campos_512_v4
+66/343179/campos_512_v4
+66/343197/campos_512_v4
+66/343198/campos_512_v4
+66/343202/campos_512_v4
+66/343205/campos_512_v4
+66/343208/campos_512_v4
+66/343215/campos_512_v4
+66/343220/campos_512_v4
+66/343240/campos_512_v4
+66/343241/campos_512_v4
+66/343244/campos_512_v4
+66/343251/campos_512_v4
+66/343253/campos_512_v4
+66/343261/campos_512_v4
+66/343268/campos_512_v4
+66/343280/campos_512_v4
+66/343282/campos_512_v4
+66/343311/campos_512_v4
+66/343315/campos_512_v4
+66/343320/campos_512_v4
+66/343321/campos_512_v4
+66/343331/campos_512_v4
+66/343341/campos_512_v4
+66/343346/campos_512_v4
+66/343353/campos_512_v4
+66/343382/campos_512_v4
+66/343408/campos_512_v4
+66/343418/campos_512_v4
+66/343419/campos_512_v4
+66/343420/campos_512_v4
+66/343428/campos_512_v4
+66/343431/campos_512_v4
+66/343449/campos_512_v4
+66/343467/campos_512_v4
+66/343468/campos_512_v4
+66/343484/campos_512_v4
+66/343497/campos_512_v4
+66/343510/campos_512_v4
+66/343522/campos_512_v4
+66/343524/campos_512_v4
+66/343525/campos_512_v4
+66/343528/campos_512_v4
+66/343532/campos_512_v4
+66/343537/campos_512_v4
+66/343547/campos_512_v4
+66/343582/campos_512_v4
+66/343587/campos_512_v4
+66/343590/campos_512_v4
+66/343599/campos_512_v4
+66/343600/campos_512_v4
+66/343609/campos_512_v4
+66/343611/campos_512_v4
+66/343614/campos_512_v4
+66/343616/campos_512_v4
+66/343619/campos_512_v4
+66/343628/campos_512_v4
+66/343640/campos_512_v4
+66/343643/campos_512_v4
+66/343644/campos_512_v4
+66/343645/campos_512_v4
+66/343647/campos_512_v4
+66/343649/campos_512_v4
+66/343661/campos_512_v4
+66/343665/campos_512_v4
+66/343670/campos_512_v4
+66/343672/campos_512_v4
+66/343685/campos_512_v4
+66/343687/campos_512_v4
+66/343688/campos_512_v4
+66/343690/campos_512_v4
+66/343724/campos_512_v4
+66/343733/campos_512_v4
+66/343738/campos_512_v4
+66/343767/campos_512_v4
+66/343768/campos_512_v4
+66/343772/campos_512_v4
+66/343781/campos_512_v4
+66/343787/campos_512_v4
+66/343788/campos_512_v4
+66/343790/campos_512_v4
+66/343800/campos_512_v4
+66/343825/campos_512_v4
+66/343837/campos_512_v4
+66/343844/campos_512_v4
+66/343849/campos_512_v4
+66/343857/campos_512_v4
+66/343861/campos_512_v4
+66/343892/campos_512_v4
+66/343913/campos_512_v4
+66/343916/campos_512_v4
+66/343930/campos_512_v4
+66/343941/campos_512_v4
+66/343943/campos_512_v4
+66/343944/campos_512_v4
+66/343952/campos_512_v4
+66/343958/campos_512_v4
+66/344006/campos_512_v4
+66/344045/campos_512_v4
+66/344057/campos_512_v4
+66/344069/campos_512_v4
+66/344072/campos_512_v4
+66/344088/campos_512_v4
+66/344089/campos_512_v4
+66/344094/campos_512_v4
+66/344115/campos_512_v4
+66/344121/campos_512_v4
+66/344133/campos_512_v4
+66/344135/campos_512_v4
+66/344138/campos_512_v4
+66/344144/campos_512_v4
+66/344154/campos_512_v4
+66/344158/campos_512_v4
+66/344159/campos_512_v4
+66/344168/campos_512_v4
+66/344177/campos_512_v4
+66/344185/campos_512_v4
+66/344191/campos_512_v4
+66/344196/campos_512_v4
+66/344202/campos_512_v4
+66/344208/campos_512_v4
+66/344212/campos_512_v4
+66/344222/campos_512_v4
+66/344230/campos_512_v4
+66/344232/campos_512_v4
+66/344240/campos_512_v4
+66/344246/campos_512_v4
+66/344250/campos_512_v4
+66/344273/campos_512_v4
+66/344278/campos_512_v4
+66/344281/campos_512_v4
+66/344285/campos_512_v4
+66/344307/campos_512_v4
+66/344308/campos_512_v4
+66/344310/campos_512_v4
+66/344327/campos_512_v4
+66/344333/campos_512_v4
+66/344339/campos_512_v4
+66/344351/campos_512_v4
+66/344361/campos_512_v4
+66/344366/campos_512_v4
+66/344378/campos_512_v4
+66/344389/campos_512_v4
+66/344416/campos_512_v4
+66/344420/campos_512_v4
+66/344433/campos_512_v4
+66/344444/campos_512_v4
+66/344458/campos_512_v4
+66/344461/campos_512_v4
+66/344487/campos_512_v4
+66/344493/campos_512_v4
+66/344510/campos_512_v4
+66/344513/campos_512_v4
+66/344514/campos_512_v4
+66/344515/campos_512_v4
+66/344521/campos_512_v4
+66/344534/campos_512_v4
+66/344537/campos_512_v4
+66/344544/campos_512_v4
+66/344546/campos_512_v4
+66/344566/campos_512_v4
+66/344569/campos_512_v4
+66/344593/campos_512_v4
+66/344609/campos_512_v4
+66/344631/campos_512_v4
+66/344632/campos_512_v4
+66/344636/campos_512_v4
+66/344648/campos_512_v4
+66/344651/campos_512_v4
+66/344660/campos_512_v4
+66/344661/campos_512_v4
+66/344679/campos_512_v4
+66/344682/campos_512_v4
+66/344711/campos_512_v4
+66/344713/campos_512_v4
+66/344714/campos_512_v4
+66/344719/campos_512_v4
+66/344720/campos_512_v4
+66/344737/campos_512_v4
+66/344766/campos_512_v4
+66/344767/campos_512_v4
+66/344775/campos_512_v4
+66/344798/campos_512_v4
+66/344801/campos_512_v4
+66/344808/campos_512_v4
+66/344809/campos_512_v4
+66/344818/campos_512_v4
+66/344833/campos_512_v4
+66/344841/campos_512_v4
+66/344843/campos_512_v4
+66/344845/campos_512_v4
+66/344859/campos_512_v4
+66/344879/campos_512_v4
+66/344880/campos_512_v4
+66/344881/campos_512_v4
+66/344890/campos_512_v4
+66/344895/campos_512_v4
+66/344908/campos_512_v4
+66/344910/campos_512_v4
+66/344923/campos_512_v4
+66/344936/campos_512_v4
+66/344948/campos_512_v4
+66/344949/campos_512_v4
+66/344972/campos_512_v4
+66/344986/campos_512_v4
+66/344988/campos_512_v4
+66/344991/campos_512_v4
+66/344995/campos_512_v4
+67/345008/campos_512_v4
+67/345010/campos_512_v4
+67/345018/campos_512_v4
+67/345023/campos_512_v4
+67/345036/campos_512_v4
+67/345038/campos_512_v4
+67/345051/campos_512_v4
+67/345053/campos_512_v4
+67/345059/campos_512_v4
+67/345062/campos_512_v4
+67/345071/campos_512_v4
+67/345092/campos_512_v4
+67/345094/campos_512_v4
+67/345097/campos_512_v4
+67/345107/campos_512_v4
+67/345111/campos_512_v4
+67/345119/campos_512_v4
+67/345138/campos_512_v4
+67/345139/campos_512_v4
+67/345142/campos_512_v4
+67/345143/campos_512_v4
+67/345197/campos_512_v4
+67/345199/campos_512_v4
+67/345210/campos_512_v4
+67/345214/campos_512_v4
+67/345233/campos_512_v4
+67/345238/campos_512_v4
+67/345242/campos_512_v4
+67/345248/campos_512_v4
+67/345254/campos_512_v4
+67/345262/campos_512_v4
+67/345265/campos_512_v4
+67/345277/campos_512_v4
+67/345288/campos_512_v4
+67/345291/campos_512_v4
+67/345300/campos_512_v4
+67/345336/campos_512_v4
+67/345337/campos_512_v4
+67/345351/campos_512_v4
+67/345355/campos_512_v4
+67/345361/campos_512_v4
+67/345364/campos_512_v4
+67/345370/campos_512_v4
+67/345371/campos_512_v4
+67/345381/campos_512_v4
+67/345386/campos_512_v4
+67/345396/campos_512_v4
+67/345406/campos_512_v4
+67/345411/campos_512_v4
+67/345420/campos_512_v4
+67/345427/campos_512_v4
+67/345431/campos_512_v4
+67/345446/campos_512_v4
+67/345455/campos_512_v4
+67/345463/campos_512_v4
+67/345464/campos_512_v4
+67/345469/campos_512_v4
+67/345479/campos_512_v4
+67/345482/campos_512_v4
+67/345484/campos_512_v4
+67/345497/campos_512_v4
+67/345499/campos_512_v4
+67/345501/campos_512_v4
+67/345507/campos_512_v4
+67/345518/campos_512_v4
+67/345523/campos_512_v4
+67/345534/campos_512_v4
+67/345535/campos_512_v4
+67/345538/campos_512_v4
+67/345542/campos_512_v4
+67/345555/campos_512_v4
+67/345574/campos_512_v4
+67/345578/campos_512_v4
+67/345587/campos_512_v4
+67/345597/campos_512_v4
+67/345598/campos_512_v4
+67/345627/campos_512_v4
+67/345629/campos_512_v4
+67/345632/campos_512_v4
+67/345651/campos_512_v4
+67/345652/campos_512_v4
+67/345666/campos_512_v4
+67/345668/campos_512_v4
+67/345672/campos_512_v4
+67/345674/campos_512_v4
+67/345678/campos_512_v4
+67/345683/campos_512_v4
+67/345685/campos_512_v4
+67/345691/campos_512_v4
+67/345708/campos_512_v4
+67/345712/campos_512_v4
+67/345716/campos_512_v4
+67/345717/campos_512_v4
+67/345728/campos_512_v4
+67/345736/campos_512_v4
+67/345754/campos_512_v4
+67/345767/campos_512_v4
+67/345772/campos_512_v4
+67/345773/campos_512_v4
+67/345774/campos_512_v4
+67/345776/campos_512_v4
+67/345791/campos_512_v4
+67/345797/campos_512_v4
+67/345806/campos_512_v4
+67/345819/campos_512_v4
+67/345833/campos_512_v4
+67/345842/campos_512_v4
+67/345850/campos_512_v4
+67/345851/campos_512_v4
+67/345852/campos_512_v4
+67/345854/campos_512_v4
+67/345858/campos_512_v4
+67/345862/campos_512_v4
+67/345868/campos_512_v4
+67/345874/campos_512_v4
+67/345880/campos_512_v4
+67/345895/campos_512_v4
+67/345908/campos_512_v4
+67/345909/campos_512_v4
+67/345932/campos_512_v4
+67/345933/campos_512_v4
+67/345943/campos_512_v4
+67/345945/campos_512_v4
+67/345957/campos_512_v4
+67/345958/campos_512_v4
+67/345964/campos_512_v4
+67/345997/campos_512_v4
+67/346024/campos_512_v4
+67/346025/campos_512_v4
+67/346065/campos_512_v4
+67/346066/campos_512_v4
+67/346077/campos_512_v4
+67/346089/campos_512_v4
+67/346092/campos_512_v4
+67/346112/campos_512_v4
+67/346120/campos_512_v4
+67/346122/campos_512_v4
+67/346132/campos_512_v4
+67/346134/campos_512_v4
+67/346137/campos_512_v4
+67/346138/campos_512_v4
+67/346146/campos_512_v4
+67/346158/campos_512_v4
+67/346163/campos_512_v4
+67/346166/campos_512_v4
+67/346168/campos_512_v4
+67/346174/campos_512_v4
+67/346180/campos_512_v4
+67/346181/campos_512_v4
+67/346188/campos_512_v4
+67/346196/campos_512_v4
+67/346198/campos_512_v4
+67/346209/campos_512_v4
+67/346214/campos_512_v4
+67/346215/campos_512_v4
+67/346222/campos_512_v4
+67/346225/campos_512_v4
+67/346226/campos_512_v4
+67/346275/campos_512_v4
+67/346289/campos_512_v4
+67/346301/campos_512_v4
+67/346314/campos_512_v4
+67/346329/campos_512_v4
+67/346343/campos_512_v4
+67/346349/campos_512_v4
+67/346371/campos_512_v4
+67/346373/campos_512_v4
+67/346374/campos_512_v4
+67/346410/campos_512_v4
+67/346411/campos_512_v4
+67/346422/campos_512_v4
+67/346432/campos_512_v4
+67/346435/campos_512_v4
+67/346436/campos_512_v4
+67/346459/campos_512_v4
+67/346471/campos_512_v4
+67/346473/campos_512_v4
+67/346476/campos_512_v4
+67/346484/campos_512_v4
+67/346489/campos_512_v4
+67/346493/campos_512_v4
+67/346506/campos_512_v4
+67/346510/campos_512_v4
+67/346517/campos_512_v4
+67/346533/campos_512_v4
+67/346538/campos_512_v4
+67/346542/campos_512_v4
+67/346543/campos_512_v4
+67/346556/campos_512_v4
+67/346558/campos_512_v4
+67/346559/campos_512_v4
+67/346569/campos_512_v4
+67/346585/campos_512_v4
+67/346587/campos_512_v4
+67/346590/campos_512_v4
+67/346602/campos_512_v4
+67/346604/campos_512_v4
+67/346613/campos_512_v4
+67/346615/campos_512_v4
+67/346631/campos_512_v4
+67/346642/campos_512_v4
+67/346651/campos_512_v4
+67/346655/campos_512_v4
+67/346691/campos_512_v4
+67/346701/campos_512_v4
+67/346703/campos_512_v4
+67/346708/campos_512_v4
+67/346711/campos_512_v4
+67/346714/campos_512_v4
+67/346739/campos_512_v4
+67/346752/campos_512_v4
+67/346754/campos_512_v4
+67/346758/campos_512_v4
+67/346770/campos_512_v4
+67/346771/campos_512_v4
+67/346783/campos_512_v4
+67/346791/campos_512_v4
+67/346796/campos_512_v4
+67/346801/campos_512_v4
+67/346831/campos_512_v4
+67/346833/campos_512_v4
+67/346834/campos_512_v4
+67/346841/campos_512_v4
+67/346846/campos_512_v4
+67/346849/campos_512_v4
+67/346850/campos_512_v4
+67/346863/campos_512_v4
+67/346879/campos_512_v4
+67/346886/campos_512_v4
+67/346899/campos_512_v4
+67/346900/campos_512_v4
+67/346905/campos_512_v4
+67/346910/campos_512_v4
+67/346911/campos_512_v4
+67/346918/campos_512_v4
+67/346927/campos_512_v4
+67/346935/campos_512_v4
+67/346939/campos_512_v4
+67/346950/campos_512_v4
+67/346966/campos_512_v4
+67/346985/campos_512_v4
+67/346995/campos_512_v4
+67/346997/campos_512_v4
+67/347010/campos_512_v4
+67/347019/campos_512_v4
+67/347021/campos_512_v4
+67/347059/campos_512_v4
+67/347064/campos_512_v4
+67/347073/campos_512_v4
+67/347077/campos_512_v4
+67/347091/campos_512_v4
+67/347095/campos_512_v4
+67/347105/campos_512_v4
+67/347106/campos_512_v4
+67/347112/campos_512_v4
+67/347117/campos_512_v4
+67/347121/campos_512_v4
+67/347133/campos_512_v4
+67/347145/campos_512_v4
+67/347151/campos_512_v4
+67/347152/campos_512_v4
+67/347158/campos_512_v4
+67/347165/campos_512_v4
+67/347174/campos_512_v4
+67/347193/campos_512_v4
+67/347199/campos_512_v4
+67/347217/campos_512_v4
+67/347236/campos_512_v4
+67/347238/campos_512_v4
+67/347241/campos_512_v4
+67/347250/campos_512_v4
+67/347254/campos_512_v4
+67/347266/campos_512_v4
+67/347282/campos_512_v4
+67/347289/campos_512_v4
+67/347294/campos_512_v4
+67/347303/campos_512_v4
+67/347307/campos_512_v4
+67/347308/campos_512_v4
+67/347316/campos_512_v4
+67/347319/campos_512_v4
+67/347321/campos_512_v4
+67/347326/campos_512_v4
+67/347327/campos_512_v4
+67/347336/campos_512_v4
+67/347338/campos_512_v4
+67/347342/campos_512_v4
+67/347345/campos_512_v4
+67/347362/campos_512_v4
+67/347364/campos_512_v4
+67/347367/campos_512_v4
+67/347370/campos_512_v4
+67/347395/campos_512_v4
+67/347403/campos_512_v4
+67/347413/campos_512_v4
+67/347414/campos_512_v4
+67/347430/campos_512_v4
+67/347439/campos_512_v4
+67/347457/campos_512_v4
+67/347461/campos_512_v4
+67/347463/campos_512_v4
+67/347464/campos_512_v4
+67/347496/campos_512_v4
+67/347508/campos_512_v4
+67/347526/campos_512_v4
+67/347527/campos_512_v4
+67/347528/campos_512_v4
+67/347538/campos_512_v4
+67/347541/campos_512_v4
+67/347544/campos_512_v4
+67/347552/campos_512_v4
+67/347559/campos_512_v4
+67/347567/campos_512_v4
+67/347578/campos_512_v4
+67/347581/campos_512_v4
+67/347591/campos_512_v4
+67/347593/campos_512_v4
+67/347598/campos_512_v4
+67/347613/campos_512_v4
+67/347616/campos_512_v4
+67/347630/campos_512_v4
+67/347631/campos_512_v4
+67/347633/campos_512_v4
+67/347634/campos_512_v4
+67/347645/campos_512_v4
+67/347649/campos_512_v4
+67/347663/campos_512_v4
+67/347672/campos_512_v4
+67/347675/campos_512_v4
+67/347698/campos_512_v4
+67/347725/campos_512_v4
+67/347738/campos_512_v4
+67/347739/campos_512_v4
+67/347745/campos_512_v4
+67/347750/campos_512_v4
+67/347753/campos_512_v4
+67/347755/campos_512_v4
+67/347759/campos_512_v4
+67/347767/campos_512_v4
+67/347776/campos_512_v4
+67/347779/campos_512_v4
+67/347793/campos_512_v4
+67/347805/campos_512_v4
+67/347811/campos_512_v4
+67/347813/campos_512_v4
+67/347817/campos_512_v4
+67/347832/campos_512_v4
+67/347835/campos_512_v4
+67/347839/campos_512_v4
+67/347842/campos_512_v4
+67/347847/campos_512_v4
+67/347849/campos_512_v4
+67/347858/campos_512_v4
+67/347887/campos_512_v4
+67/347895/campos_512_v4
+67/347898/campos_512_v4
+67/347911/campos_512_v4
+67/347921/campos_512_v4
+67/347923/campos_512_v4
+67/347932/campos_512_v4
+67/347938/campos_512_v4
+67/347953/campos_512_v4
+67/347964/campos_512_v4
+67/347966/campos_512_v4
+67/347974/campos_512_v4
+67/347978/campos_512_v4
+67/347989/campos_512_v4
+67/348009/campos_512_v4
+67/348020/campos_512_v4
+67/348021/campos_512_v4
+67/348022/campos_512_v4
+67/348034/campos_512_v4
+67/348041/campos_512_v4
+67/348053/campos_512_v4
+67/348078/campos_512_v4
+67/348083/campos_512_v4
+67/348085/campos_512_v4
+67/348089/campos_512_v4
+67/348096/campos_512_v4
+67/348098/campos_512_v4
+67/348101/campos_512_v4
+67/348109/campos_512_v4
+67/348110/campos_512_v4
+67/348122/campos_512_v4
+67/348150/campos_512_v4
+67/348172/campos_512_v4
+67/348174/campos_512_v4
+67/348176/campos_512_v4
+67/348212/campos_512_v4
+67/348216/campos_512_v4
+67/348219/campos_512_v4
+67/348220/campos_512_v4
+67/348221/campos_512_v4
+67/348229/campos_512_v4
+67/348241/campos_512_v4
+67/348248/campos_512_v4
+67/348251/campos_512_v4
+67/348276/campos_512_v4
+67/348277/campos_512_v4
+67/348279/campos_512_v4
+67/348281/campos_512_v4
+67/348290/campos_512_v4
+67/348291/campos_512_v4
+67/348300/campos_512_v4
+67/348337/campos_512_v4
+67/348341/campos_512_v4
+67/348348/campos_512_v4
+67/348354/campos_512_v4
+67/348375/campos_512_v4
+67/348376/campos_512_v4
+67/348381/campos_512_v4
+67/348385/campos_512_v4
+67/348392/campos_512_v4
+67/348395/campos_512_v4
+67/348403/campos_512_v4
+67/348405/campos_512_v4
+67/348411/campos_512_v4
+67/348429/campos_512_v4
+67/348438/campos_512_v4
+67/348448/campos_512_v4
+67/348450/campos_512_v4
+67/348460/campos_512_v4
+67/348461/campos_512_v4
+67/348462/campos_512_v4
+67/348464/campos_512_v4
+67/348466/campos_512_v4
+67/348472/campos_512_v4
+67/348474/campos_512_v4
+67/348478/campos_512_v4
+67/348485/campos_512_v4
+67/348488/campos_512_v4
+67/348491/campos_512_v4
+67/348495/campos_512_v4
+67/348497/campos_512_v4
+67/348500/campos_512_v4
+67/348503/campos_512_v4
+67/348508/campos_512_v4
+67/348518/campos_512_v4
+67/348545/campos_512_v4
+67/348552/campos_512_v4
+67/348565/campos_512_v4
+67/348572/campos_512_v4
+67/348575/campos_512_v4
+67/348595/campos_512_v4
+67/348608/campos_512_v4
+67/348620/campos_512_v4
+67/348624/campos_512_v4
+67/348628/campos_512_v4
+67/348644/campos_512_v4
+67/348646/campos_512_v4
+67/348653/campos_512_v4
+67/348667/campos_512_v4
+67/348668/campos_512_v4
+67/348669/campos_512_v4
+67/348676/campos_512_v4
+67/348683/campos_512_v4
+67/348684/campos_512_v4
+67/348687/campos_512_v4
+67/348700/campos_512_v4
+67/348707/campos_512_v4
+67/348708/campos_512_v4
+67/348721/campos_512_v4
+67/348741/campos_512_v4
+67/348742/campos_512_v4
+67/348744/campos_512_v4
+67/348757/campos_512_v4
+67/348758/campos_512_v4
+67/348771/campos_512_v4
+67/348780/campos_512_v4
+67/348786/campos_512_v4
+67/348793/campos_512_v4
+67/348800/campos_512_v4
+67/348802/campos_512_v4
+67/348803/campos_512_v4
+67/348805/campos_512_v4
+67/348825/campos_512_v4
+67/348831/campos_512_v4
+67/348851/campos_512_v4
+67/348854/campos_512_v4
+67/348863/campos_512_v4
+67/348867/campos_512_v4
+67/348877/campos_512_v4
+67/348898/campos_512_v4
+67/348900/campos_512_v4
+67/348903/campos_512_v4
+67/348911/campos_512_v4
+67/348913/campos_512_v4
+67/348921/campos_512_v4
+67/348925/campos_512_v4
+67/348936/campos_512_v4
+67/348967/campos_512_v4
+67/348974/campos_512_v4
+67/348989/campos_512_v4
+67/348991/campos_512_v4
+67/348995/campos_512_v4
+67/348996/campos_512_v4
+67/349001/campos_512_v4
+67/349008/campos_512_v4
+67/349009/campos_512_v4
+67/349013/campos_512_v4
+67/349029/campos_512_v4
+67/349034/campos_512_v4
+67/349046/campos_512_v4
+67/349047/campos_512_v4
+67/349058/campos_512_v4
+67/349062/campos_512_v4
+67/349069/campos_512_v4
+67/349078/campos_512_v4
+67/349087/campos_512_v4
+67/349089/campos_512_v4
+67/349094/campos_512_v4
+67/349097/campos_512_v4
+67/349108/campos_512_v4
+67/349109/campos_512_v4
+67/349116/campos_512_v4
+67/349137/campos_512_v4
+67/349152/campos_512_v4
+67/349159/campos_512_v4
+67/349160/campos_512_v4
+67/349165/campos_512_v4
+67/349179/campos_512_v4
+67/349187/campos_512_v4
+67/349197/campos_512_v4
+67/349217/campos_512_v4
+67/349223/campos_512_v4
+67/349235/campos_512_v4
+67/349238/campos_512_v4
+67/349243/campos_512_v4
+67/349249/campos_512_v4
+67/349250/campos_512_v4
+67/349252/campos_512_v4
+67/349258/campos_512_v4
+67/349268/campos_512_v4
+67/349275/campos_512_v4
+67/349281/campos_512_v4
+67/349290/campos_512_v4
+67/349299/campos_512_v4
+67/349316/campos_512_v4
+67/349328/campos_512_v4
+67/349338/campos_512_v4
+67/349359/campos_512_v4
+67/349371/campos_512_v4
+67/349372/campos_512_v4
+67/349374/campos_512_v4
+67/349386/campos_512_v4
+67/349403/campos_512_v4
+67/349404/campos_512_v4
+67/349407/campos_512_v4
+67/349416/campos_512_v4
+67/349424/campos_512_v4
+67/349425/campos_512_v4
+67/349435/campos_512_v4
+67/349437/campos_512_v4
+67/349444/campos_512_v4
+67/349481/campos_512_v4
+67/349482/campos_512_v4
+67/349489/campos_512_v4
+67/349490/campos_512_v4
+67/349494/campos_512_v4
+67/349500/campos_512_v4
+67/349501/campos_512_v4
+67/349505/campos_512_v4
+67/349511/campos_512_v4
+67/349513/campos_512_v4
+67/349519/campos_512_v4
+67/349520/campos_512_v4
+67/349524/campos_512_v4
+67/349525/campos_512_v4
+67/349533/campos_512_v4
+67/349534/campos_512_v4
+67/349536/campos_512_v4
+67/349548/campos_512_v4
+67/349584/campos_512_v4
+67/349585/campos_512_v4
+67/349590/campos_512_v4
+67/349598/campos_512_v4
+67/349602/campos_512_v4
+67/349621/campos_512_v4
+67/349629/campos_512_v4
+67/349646/campos_512_v4
+67/349652/campos_512_v4
+67/349659/campos_512_v4
+67/349662/campos_512_v4
+67/349668/campos_512_v4
+67/349678/campos_512_v4
+67/349695/campos_512_v4
+67/349729/campos_512_v4
+67/349741/campos_512_v4
+67/349750/campos_512_v4
+67/349766/campos_512_v4
+67/349774/campos_512_v4
+67/349775/campos_512_v4
+67/349780/campos_512_v4
+67/349808/campos_512_v4
+67/349837/campos_512_v4
+67/349865/campos_512_v4
+67/349875/campos_512_v4
+67/349880/campos_512_v4
+67/349891/campos_512_v4
+67/349892/campos_512_v4
+67/349911/campos_512_v4
+67/349917/campos_512_v4
+67/349918/campos_512_v4
+67/349932/campos_512_v4
+67/349933/campos_512_v4
+67/349937/campos_512_v4
+67/349950/campos_512_v4
+67/349965/campos_512_v4
+67/349970/campos_512_v4
+67/349972/campos_512_v4
+67/349983/campos_512_v4
+67/349992/campos_512_v4
+68/350017/campos_512_v4
+68/350021/campos_512_v4
+68/350043/campos_512_v4
+68/350044/campos_512_v4
+68/350049/campos_512_v4
+68/350053/campos_512_v4
+68/350056/campos_512_v4
+68/350067/campos_512_v4
+68/350096/campos_512_v4
+68/350109/campos_512_v4
+68/350127/campos_512_v4
+68/350128/campos_512_v4
+68/350136/campos_512_v4
+68/350143/campos_512_v4
+68/350146/campos_512_v4
+68/350151/campos_512_v4
+68/350183/campos_512_v4
+68/350185/campos_512_v4
+68/350187/campos_512_v4
+68/350198/campos_512_v4
+68/350203/campos_512_v4
+68/350212/campos_512_v4
+68/350213/campos_512_v4
+68/350228/campos_512_v4
+68/350233/campos_512_v4
+68/350234/campos_512_v4
+68/350258/campos_512_v4
+68/350265/campos_512_v4
+68/350267/campos_512_v4
+68/350270/campos_512_v4
+68/350278/campos_512_v4
+68/350286/campos_512_v4
+68/350293/campos_512_v4
+68/350307/campos_512_v4
+68/350316/campos_512_v4
+68/350322/campos_512_v4
+68/350337/campos_512_v4
+68/350349/campos_512_v4
+68/350356/campos_512_v4
+68/350377/campos_512_v4
+68/350388/campos_512_v4
+68/350389/campos_512_v4
+68/350400/campos_512_v4
+68/350413/campos_512_v4
+68/350417/campos_512_v4
+68/350421/campos_512_v4
+68/350423/campos_512_v4
+68/350425/campos_512_v4
+68/350434/campos_512_v4
+68/350435/campos_512_v4
+68/350436/campos_512_v4
+68/350449/campos_512_v4
+68/350469/campos_512_v4
+68/350480/campos_512_v4
+68/350481/campos_512_v4
+68/350520/campos_512_v4
+68/350535/campos_512_v4
+68/350537/campos_512_v4
+68/350541/campos_512_v4
+68/350543/campos_512_v4
+68/350568/campos_512_v4
+68/350572/campos_512_v4
+68/350573/campos_512_v4
+68/350583/campos_512_v4
+68/350600/campos_512_v4
+68/350619/campos_512_v4
+68/350631/campos_512_v4
+68/350639/campos_512_v4
+68/350646/campos_512_v4
+68/350660/campos_512_v4
+68/350663/campos_512_v4
+68/350667/campos_512_v4
+68/350675/campos_512_v4
+68/350684/campos_512_v4
+68/350699/campos_512_v4
+68/350702/campos_512_v4
+68/350717/campos_512_v4
+68/350718/campos_512_v4
+68/350719/campos_512_v4
+68/350724/campos_512_v4
+68/350725/campos_512_v4
+68/350734/campos_512_v4
+68/350738/campos_512_v4
+68/350754/campos_512_v4
+68/350769/campos_512_v4
+68/350790/campos_512_v4
+68/350795/campos_512_v4
+68/350797/campos_512_v4
+68/350802/campos_512_v4
+68/350806/campos_512_v4
+68/350813/campos_512_v4
+68/350817/campos_512_v4
+68/350839/campos_512_v4
+68/350848/campos_512_v4
+68/350849/campos_512_v4
+68/350850/campos_512_v4
+68/350872/campos_512_v4
+68/350893/campos_512_v4
+68/350895/campos_512_v4
+68/350906/campos_512_v4
+68/350911/campos_512_v4
+68/350912/campos_512_v4
+68/350935/campos_512_v4
+68/350960/campos_512_v4
+68/350969/campos_512_v4
+68/350970/campos_512_v4
+68/350974/campos_512_v4
+68/350992/campos_512_v4
+68/350999/campos_512_v4
+68/351028/campos_512_v4
+68/351032/campos_512_v4
+68/351034/campos_512_v4
+68/351055/campos_512_v4
+68/351071/campos_512_v4
+68/351084/campos_512_v4
+68/351091/campos_512_v4
+68/351101/campos_512_v4
+68/351102/campos_512_v4
+68/351107/campos_512_v4
+68/351127/campos_512_v4
+68/351128/campos_512_v4
+68/351130/campos_512_v4
+68/351158/campos_512_v4
+68/351164/campos_512_v4
+68/351171/campos_512_v4
+68/351190/campos_512_v4
+68/351199/campos_512_v4
+68/351201/campos_512_v4
+68/351209/campos_512_v4
+68/351218/campos_512_v4
+68/351220/campos_512_v4
+68/351227/campos_512_v4
+68/351230/campos_512_v4
+68/351232/campos_512_v4
+68/351239/campos_512_v4
+68/351245/campos_512_v4
+68/351246/campos_512_v4
+68/351247/campos_512_v4
+68/351259/campos_512_v4
+68/351265/campos_512_v4
+68/351270/campos_512_v4
+68/351298/campos_512_v4
+68/351299/campos_512_v4
+68/351315/campos_512_v4
+68/351336/campos_512_v4
+68/351343/campos_512_v4
+68/351353/campos_512_v4
+68/351368/campos_512_v4
+68/351374/campos_512_v4
+68/351375/campos_512_v4
+68/351384/campos_512_v4
+68/351393/campos_512_v4
+68/351408/campos_512_v4
+68/351419/campos_512_v4
+68/351424/campos_512_v4
+68/351429/campos_512_v4
+68/351437/campos_512_v4
+68/351444/campos_512_v4
+68/351454/campos_512_v4
+68/351460/campos_512_v4
+68/351462/campos_512_v4
+68/351463/campos_512_v4
+68/351479/campos_512_v4
+68/351499/campos_512_v4
+68/351510/campos_512_v4
+68/351513/campos_512_v4
+68/351514/campos_512_v4
+68/351519/campos_512_v4
+68/351521/campos_512_v4
+68/351534/campos_512_v4
+68/351542/campos_512_v4
+68/351548/campos_512_v4
+68/351555/campos_512_v4
+68/351570/campos_512_v4
+68/351571/campos_512_v4
+68/351594/campos_512_v4
+68/351595/campos_512_v4
+68/351614/campos_512_v4
+68/351626/campos_512_v4
+68/351644/campos_512_v4
+68/351653/campos_512_v4
+68/351665/campos_512_v4
+68/351671/campos_512_v4
+68/351682/campos_512_v4
+68/351688/campos_512_v4
+68/351705/campos_512_v4
+68/351721/campos_512_v4
+68/351722/campos_512_v4
+68/351728/campos_512_v4
+68/351740/campos_512_v4
+68/351741/campos_512_v4
+68/351750/campos_512_v4
+68/351764/campos_512_v4
+68/351769/campos_512_v4
+68/351775/campos_512_v4
+68/351784/campos_512_v4
+68/351792/campos_512_v4
+68/351810/campos_512_v4
+68/351815/campos_512_v4
+68/351816/campos_512_v4
+68/351823/campos_512_v4
+68/351856/campos_512_v4
+68/351872/campos_512_v4
+68/351890/campos_512_v4
+68/351913/campos_512_v4
+68/351927/campos_512_v4
+68/351939/campos_512_v4
+68/351941/campos_512_v4
+68/351945/campos_512_v4
+68/351949/campos_512_v4
+68/351954/campos_512_v4
+68/351960/campos_512_v4
+68/351961/campos_512_v4
+68/351972/campos_512_v4
+68/351976/campos_512_v4
+68/351980/campos_512_v4
+68/351983/campos_512_v4
+68/351993/campos_512_v4
+68/352002/campos_512_v4
+68/352015/campos_512_v4
+68/352017/campos_512_v4
+68/352021/campos_512_v4
+68/352039/campos_512_v4
+68/352045/campos_512_v4
+68/352049/campos_512_v4
+68/352053/campos_512_v4
+68/352059/campos_512_v4
+68/352063/campos_512_v4
+68/352064/campos_512_v4
+68/352075/campos_512_v4
+68/352080/campos_512_v4
+68/352081/campos_512_v4
+68/352085/campos_512_v4
+68/352086/campos_512_v4
+68/352095/campos_512_v4
+68/352107/campos_512_v4
+68/352109/campos_512_v4
+68/352111/campos_512_v4
+68/352113/campos_512_v4
+68/352149/campos_512_v4
+68/352169/campos_512_v4
+68/352171/campos_512_v4
+68/352178/campos_512_v4
+68/352190/campos_512_v4
+68/352208/campos_512_v4
+68/352212/campos_512_v4
+68/352219/campos_512_v4
+68/352224/campos_512_v4
+68/352231/campos_512_v4
+68/352239/campos_512_v4
+68/352252/campos_512_v4
+68/352255/campos_512_v4
+68/352264/campos_512_v4
+68/352269/campos_512_v4
+68/352271/campos_512_v4
+68/352282/campos_512_v4
+68/352289/campos_512_v4
+68/352296/campos_512_v4
+68/352298/campos_512_v4
+68/352309/campos_512_v4
+68/352315/campos_512_v4
+68/352316/campos_512_v4
+68/352317/campos_512_v4
+68/352333/campos_512_v4
+68/352335/campos_512_v4
+68/352336/campos_512_v4
+68/352337/campos_512_v4
+68/352339/campos_512_v4
+68/352353/campos_512_v4
+68/352367/campos_512_v4
+68/352370/campos_512_v4
+68/352381/campos_512_v4
+68/352387/campos_512_v4
+68/352391/campos_512_v4
+68/352396/campos_512_v4
+68/352401/campos_512_v4
+68/352402/campos_512_v4
+68/352416/campos_512_v4
+68/352432/campos_512_v4
+68/352434/campos_512_v4
+68/352445/campos_512_v4
+68/352477/campos_512_v4
+68/352479/campos_512_v4
+68/352487/campos_512_v4
+68/352490/campos_512_v4
+68/352498/campos_512_v4
+68/352503/campos_512_v4
+68/352520/campos_512_v4
+68/352522/campos_512_v4
+68/352533/campos_512_v4
+68/352535/campos_512_v4
+68/352566/campos_512_v4
+68/352572/campos_512_v4
+68/352576/campos_512_v4
+68/352587/campos_512_v4
+68/352590/campos_512_v4
+68/352605/campos_512_v4
+68/352606/campos_512_v4
+68/352616/campos_512_v4
+68/352629/campos_512_v4
+68/352634/campos_512_v4
+68/352639/campos_512_v4
+68/352655/campos_512_v4
+68/352657/campos_512_v4
+68/352678/campos_512_v4
+68/352691/campos_512_v4
+68/352698/campos_512_v4
+68/352704/campos_512_v4
+68/352715/campos_512_v4
+68/352721/campos_512_v4
+68/352725/campos_512_v4
+68/352748/campos_512_v4
+68/352759/campos_512_v4
+68/352774/campos_512_v4
+68/352778/campos_512_v4
+68/352782/campos_512_v4
+68/352813/campos_512_v4
+68/352824/campos_512_v4
+68/352834/campos_512_v4
+68/352835/campos_512_v4
+68/352839/campos_512_v4
+68/352850/campos_512_v4
+68/352854/campos_512_v4
+68/352856/campos_512_v4
+68/352867/campos_512_v4
+68/352868/campos_512_v4
+68/352872/campos_512_v4
+68/352878/campos_512_v4
+68/352879/campos_512_v4
+68/352893/campos_512_v4
+68/352900/campos_512_v4
+68/352914/campos_512_v4
+68/352915/campos_512_v4
+68/352921/campos_512_v4
+68/352926/campos_512_v4
+68/352931/campos_512_v4
+68/352932/campos_512_v4
+68/352935/campos_512_v4
+68/352947/campos_512_v4
+68/352953/campos_512_v4
+68/352956/campos_512_v4
+68/352958/campos_512_v4
+68/352960/campos_512_v4
+68/352966/campos_512_v4
+68/352974/campos_512_v4
+68/352978/campos_512_v4
+68/352991/campos_512_v4
+68/352996/campos_512_v4
+68/352997/campos_512_v4
+68/353012/campos_512_v4
+68/353023/campos_512_v4
+68/353031/campos_512_v4
+68/353038/campos_512_v4
+68/353039/campos_512_v4
+68/353047/campos_512_v4
+68/353049/campos_512_v4
+68/353058/campos_512_v4
+68/353061/campos_512_v4
+68/353070/campos_512_v4
+68/353073/campos_512_v4
+68/353074/campos_512_v4
+68/353078/campos_512_v4
+68/353104/campos_512_v4
+68/353111/campos_512_v4
+68/353112/campos_512_v4
+68/353116/campos_512_v4
+68/353128/campos_512_v4
+68/353143/campos_512_v4
+68/353145/campos_512_v4
+68/353153/campos_512_v4
+68/353180/campos_512_v4
+68/353181/campos_512_v4
+68/353218/campos_512_v4
+68/353224/campos_512_v4
+68/353230/campos_512_v4
+68/353243/campos_512_v4
+68/353255/campos_512_v4
+68/353257/campos_512_v4
+68/353288/campos_512_v4
+68/353290/campos_512_v4
+68/353293/campos_512_v4
+68/353300/campos_512_v4
+68/353302/campos_512_v4
+68/353304/campos_512_v4
+68/353311/campos_512_v4
+68/353313/campos_512_v4
+68/353314/campos_512_v4
+68/353322/campos_512_v4
+68/353351/campos_512_v4
+68/353353/campos_512_v4
+68/353365/campos_512_v4
+68/353366/campos_512_v4
+68/353373/campos_512_v4
+68/353374/campos_512_v4
+68/353380/campos_512_v4
+68/353384/campos_512_v4
+68/353390/campos_512_v4
+68/353398/campos_512_v4
+68/353399/campos_512_v4
+68/353402/campos_512_v4
+68/353403/campos_512_v4
+68/353410/campos_512_v4
+68/353413/campos_512_v4
+68/353414/campos_512_v4
+68/353428/campos_512_v4
+68/353432/campos_512_v4
+68/353446/campos_512_v4
+68/353451/campos_512_v4
+68/353461/campos_512_v4
+68/353469/campos_512_v4
+68/353475/campos_512_v4
+68/353493/campos_512_v4
+68/353494/campos_512_v4
+68/353499/campos_512_v4
+68/353517/campos_512_v4
+68/353518/campos_512_v4
+68/353524/campos_512_v4
+68/353537/campos_512_v4
+68/353542/campos_512_v4
+68/353550/campos_512_v4
+68/353552/campos_512_v4
+68/353557/campos_512_v4
+68/353558/campos_512_v4
+68/353561/campos_512_v4
+68/353566/campos_512_v4
+68/353580/campos_512_v4
+68/353583/campos_512_v4
+68/353600/campos_512_v4
+68/353609/campos_512_v4
+68/353617/campos_512_v4
+68/353620/campos_512_v4
+68/353629/campos_512_v4
+68/353631/campos_512_v4
+68/353646/campos_512_v4
+68/353658/campos_512_v4
+68/353671/campos_512_v4
+68/353676/campos_512_v4
+68/353677/campos_512_v4
+68/353692/campos_512_v4
+68/353704/campos_512_v4
+68/353757/campos_512_v4
+68/353771/campos_512_v4
+68/353772/campos_512_v4
+68/353777/campos_512_v4
+68/353778/campos_512_v4
+68/353798/campos_512_v4
+68/353801/campos_512_v4
+68/353805/campos_512_v4
+68/353810/campos_512_v4
+68/353811/campos_512_v4
+68/353816/campos_512_v4
+68/353823/campos_512_v4
+68/353837/campos_512_v4
+68/353856/campos_512_v4
+68/353858/campos_512_v4
+68/353869/campos_512_v4
+68/353874/campos_512_v4
+68/353909/campos_512_v4
+68/353914/campos_512_v4
+68/353925/campos_512_v4
+68/353945/campos_512_v4
+68/353948/campos_512_v4
+68/353961/campos_512_v4
+68/353967/campos_512_v4
+68/353970/campos_512_v4
+68/353971/campos_512_v4
+68/353972/campos_512_v4
+68/353984/campos_512_v4
+68/354004/campos_512_v4
+68/354007/campos_512_v4
+68/354019/campos_512_v4
+68/354036/campos_512_v4
+68/354055/campos_512_v4
+68/354068/campos_512_v4
+68/354093/campos_512_v4
+68/354100/campos_512_v4
+68/354120/campos_512_v4
+68/354124/campos_512_v4
+68/354127/campos_512_v4
+68/354137/campos_512_v4
+68/354155/campos_512_v4
+68/354178/campos_512_v4
+68/354179/campos_512_v4
+68/354196/campos_512_v4
+68/354201/campos_512_v4
+68/354205/campos_512_v4
+68/354220/campos_512_v4
+68/354221/campos_512_v4
+68/354224/campos_512_v4
+68/354230/campos_512_v4
+68/354236/campos_512_v4
+68/354239/campos_512_v4
+68/354255/campos_512_v4
+68/354258/campos_512_v4
+68/354267/campos_512_v4
+68/354270/campos_512_v4
+68/354278/campos_512_v4
+68/354286/campos_512_v4
+68/354290/campos_512_v4
+68/354300/campos_512_v4
+68/354305/campos_512_v4
+68/354306/campos_512_v4
+68/354311/campos_512_v4
+68/354315/campos_512_v4
+68/354320/campos_512_v4
+68/354327/campos_512_v4
+68/354328/campos_512_v4
+68/354344/campos_512_v4
+68/354346/campos_512_v4
+68/354353/campos_512_v4
+68/354365/campos_512_v4
+68/354366/campos_512_v4
+68/354371/campos_512_v4
+68/354374/campos_512_v4
+68/354383/campos_512_v4
+68/354395/campos_512_v4
+68/354420/campos_512_v4
+68/354422/campos_512_v4
+68/354425/campos_512_v4
+68/354429/campos_512_v4
+68/354437/campos_512_v4
+68/354444/campos_512_v4
+68/354447/campos_512_v4
+68/354448/campos_512_v4
+68/354453/campos_512_v4
+68/354458/campos_512_v4
+68/354460/campos_512_v4
+68/354469/campos_512_v4
+68/354470/campos_512_v4
+68/354477/campos_512_v4
+68/354479/campos_512_v4
+68/354481/campos_512_v4
+68/354484/campos_512_v4
+68/354485/campos_512_v4
+68/354486/campos_512_v4
+68/354489/campos_512_v4
+68/354504/campos_512_v4
+68/354507/campos_512_v4
+68/354512/campos_512_v4
+68/354514/campos_512_v4
+68/354535/campos_512_v4
+68/354546/campos_512_v4
+68/354551/campos_512_v4
+68/354558/campos_512_v4
+68/354562/campos_512_v4
+68/354567/campos_512_v4
+68/354568/campos_512_v4
+68/354586/campos_512_v4
+68/354597/campos_512_v4
+68/354600/campos_512_v4
+68/354603/campos_512_v4
+68/354609/campos_512_v4
+68/354631/campos_512_v4
+68/354634/campos_512_v4
+68/354646/campos_512_v4
+68/354650/campos_512_v4
+68/354654/campos_512_v4
+68/354672/campos_512_v4
+68/354681/campos_512_v4
+68/354683/campos_512_v4
+68/354684/campos_512_v4
+68/354687/campos_512_v4
+68/354709/campos_512_v4
+68/354715/campos_512_v4
+68/354731/campos_512_v4
+68/354733/campos_512_v4
+68/354740/campos_512_v4
+68/354748/campos_512_v4
+68/354766/campos_512_v4
+68/354769/campos_512_v4
+68/354770/campos_512_v4
+68/354781/campos_512_v4
+68/354794/campos_512_v4
+68/354832/campos_512_v4
+68/354861/campos_512_v4
+68/354864/campos_512_v4
+68/354880/campos_512_v4
+68/354883/campos_512_v4
+68/354888/campos_512_v4
+68/354889/campos_512_v4
+68/354902/campos_512_v4
+68/354908/campos_512_v4
+68/354910/campos_512_v4
+68/354927/campos_512_v4
+68/354933/campos_512_v4
+68/354955/campos_512_v4
+68/354964/campos_512_v4
+68/354969/campos_512_v4
+68/354979/campos_512_v4
+68/354988/campos_512_v4
+68/355000/campos_512_v4
+69/355002/campos_512_v4
+69/355007/campos_512_v4
+69/355019/campos_512_v4
+69/355024/campos_512_v4
+69/355029/campos_512_v4
+69/355034/campos_512_v4
+69/355035/campos_512_v4
+69/355037/campos_512_v4
+69/355046/campos_512_v4
+69/355047/campos_512_v4
+69/355070/campos_512_v4
+69/355073/campos_512_v4
+69/355075/campos_512_v4
+69/355099/campos_512_v4
+69/355104/campos_512_v4
+69/355117/campos_512_v4
+69/355118/campos_512_v4
+69/355121/campos_512_v4
+69/355123/campos_512_v4
+69/355145/campos_512_v4
+69/355150/campos_512_v4
+69/355162/campos_512_v4
+69/355177/campos_512_v4
+69/355191/campos_512_v4
+69/355207/campos_512_v4
+69/355209/campos_512_v4
+69/355211/campos_512_v4
+69/355216/campos_512_v4
+69/355227/campos_512_v4
+69/355230/campos_512_v4
+69/355245/campos_512_v4
+69/355265/campos_512_v4
+69/355273/campos_512_v4
+69/355279/campos_512_v4
+69/355300/campos_512_v4
+69/355311/campos_512_v4
+69/355313/campos_512_v4
+69/355337/campos_512_v4
+69/355355/campos_512_v4
+69/355360/campos_512_v4
+69/355363/campos_512_v4
+69/355365/campos_512_v4
+69/355375/campos_512_v4
+69/355379/campos_512_v4
+69/355385/campos_512_v4
+69/355388/campos_512_v4
+69/355392/campos_512_v4
+69/355393/campos_512_v4
+69/355405/campos_512_v4
+69/355407/campos_512_v4
+69/355410/campos_512_v4
+69/355411/campos_512_v4
+69/355412/campos_512_v4
+69/355427/campos_512_v4
+69/355428/campos_512_v4
+69/355430/campos_512_v4
+69/355435/campos_512_v4
+69/355449/campos_512_v4
+69/355459/campos_512_v4
+69/355460/campos_512_v4
+69/355485/campos_512_v4
+69/355487/campos_512_v4
+69/355488/campos_512_v4
+69/355489/campos_512_v4
+69/355492/campos_512_v4
+69/355500/campos_512_v4
+69/355501/campos_512_v4
+69/355508/campos_512_v4
+69/355514/campos_512_v4
+69/355520/campos_512_v4
+69/355522/campos_512_v4
+69/355530/campos_512_v4
+69/355535/campos_512_v4
+69/355555/campos_512_v4
+69/355583/campos_512_v4
+69/355602/campos_512_v4
+69/355607/campos_512_v4
+69/355615/campos_512_v4
+69/355618/campos_512_v4
+69/355629/campos_512_v4
+69/355633/campos_512_v4
+69/355643/campos_512_v4
+69/355647/campos_512_v4
+69/355655/campos_512_v4
+69/355674/campos_512_v4
+69/355678/campos_512_v4
+69/355681/campos_512_v4
+69/355686/campos_512_v4
+69/355695/campos_512_v4
+69/355706/campos_512_v4
+69/355712/campos_512_v4
+69/355713/campos_512_v4
+69/355720/campos_512_v4
+69/355724/campos_512_v4
+69/355734/campos_512_v4
+69/355736/campos_512_v4
+69/355747/campos_512_v4
+69/355750/campos_512_v4
+69/355758/campos_512_v4
+69/355761/campos_512_v4
+69/355764/campos_512_v4
+69/355769/campos_512_v4
+69/355770/campos_512_v4
+69/355776/campos_512_v4
+69/355806/campos_512_v4
+69/355811/campos_512_v4
+69/355812/campos_512_v4
+69/355814/campos_512_v4
+69/355819/campos_512_v4
+69/355825/campos_512_v4
+69/355826/campos_512_v4
+69/355836/campos_512_v4
+69/355853/campos_512_v4
+69/355855/campos_512_v4
+69/355884/campos_512_v4
+69/355886/campos_512_v4
+69/355891/campos_512_v4
+69/355909/campos_512_v4
+69/355922/campos_512_v4
+69/355940/campos_512_v4
+69/355941/campos_512_v4
+69/355949/campos_512_v4
+69/355954/campos_512_v4
+69/355955/campos_512_v4
+69/355962/campos_512_v4
+69/355967/campos_512_v4
+69/355983/campos_512_v4
+69/355986/campos_512_v4
+69/355991/campos_512_v4
+69/355994/campos_512_v4
+69/355996/campos_512_v4
+69/356007/campos_512_v4
+69/356030/campos_512_v4
+69/356031/campos_512_v4
+69/356078/campos_512_v4
+69/356090/campos_512_v4
+69/356092/campos_512_v4
+69/356107/campos_512_v4
+69/356114/campos_512_v4
+69/356131/campos_512_v4
+69/356147/campos_512_v4
+69/356148/campos_512_v4
+69/356160/campos_512_v4
+69/356163/campos_512_v4
+69/356168/campos_512_v4
+69/356172/campos_512_v4
+69/356173/campos_512_v4
+69/356175/campos_512_v4
+69/356179/campos_512_v4
+69/356225/campos_512_v4
+69/356226/campos_512_v4
+69/356229/campos_512_v4
+69/356248/campos_512_v4
+69/356249/campos_512_v4
+69/356256/campos_512_v4
+69/356266/campos_512_v4
+69/356270/campos_512_v4
+69/356289/campos_512_v4
+69/356304/campos_512_v4
+69/356314/campos_512_v4
+69/356320/campos_512_v4
+69/356322/campos_512_v4
+69/356326/campos_512_v4
+69/356327/campos_512_v4
+69/356333/campos_512_v4
+69/356345/campos_512_v4
+69/356351/campos_512_v4
+69/356356/campos_512_v4
+69/356370/campos_512_v4
+69/356388/campos_512_v4
+69/356395/campos_512_v4
+69/356403/campos_512_v4
+69/356425/campos_512_v4
+69/356427/campos_512_v4
+69/356428/campos_512_v4
+69/356438/campos_512_v4
+69/356442/campos_512_v4
+69/356444/campos_512_v4
+69/356445/campos_512_v4
+69/356447/campos_512_v4
+69/356470/campos_512_v4
+69/356476/campos_512_v4
+69/356521/campos_512_v4
+69/356523/campos_512_v4
+69/356530/campos_512_v4
+69/356535/campos_512_v4
+69/356557/campos_512_v4
+69/356561/campos_512_v4
+69/356563/campos_512_v4
+69/356566/campos_512_v4
+69/356577/campos_512_v4
+69/356587/campos_512_v4
+69/356603/campos_512_v4
+69/356605/campos_512_v4
+69/356616/campos_512_v4
+69/356618/campos_512_v4
+69/356621/campos_512_v4
+69/356622/campos_512_v4
+69/356627/campos_512_v4
+69/356642/campos_512_v4
+69/356651/campos_512_v4
+69/356655/campos_512_v4
+69/356662/campos_512_v4
+69/356663/campos_512_v4
+69/356664/campos_512_v4
+69/356666/campos_512_v4
+69/356682/campos_512_v4
+69/356686/campos_512_v4
+69/356705/campos_512_v4
+69/356708/campos_512_v4
+69/356718/campos_512_v4
+69/356722/campos_512_v4
+69/356745/campos_512_v4
+69/356754/campos_512_v4
+69/356769/campos_512_v4
+69/356775/campos_512_v4
+69/356777/campos_512_v4
+69/356780/campos_512_v4
+69/356796/campos_512_v4
+69/356811/campos_512_v4
+69/356818/campos_512_v4
+69/356825/campos_512_v4
+69/356830/campos_512_v4
+69/356831/campos_512_v4
+69/356842/campos_512_v4
+69/356847/campos_512_v4
+69/356851/campos_512_v4
+69/356863/campos_512_v4
+69/356869/campos_512_v4
+69/356873/campos_512_v4
+69/356876/campos_512_v4
+69/356879/campos_512_v4
+69/356886/campos_512_v4
+69/356887/campos_512_v4
+69/356888/campos_512_v4
+69/356891/campos_512_v4
+69/356896/campos_512_v4
+69/356907/campos_512_v4
+69/356908/campos_512_v4
+69/356921/campos_512_v4
+69/356932/campos_512_v4
+69/356936/campos_512_v4
+69/356937/campos_512_v4
+69/356954/campos_512_v4
+69/356962/campos_512_v4
+69/356972/campos_512_v4
+69/356977/campos_512_v4
+69/356988/campos_512_v4
+69/356991/campos_512_v4
+69/356993/campos_512_v4
+69/357002/campos_512_v4
+69/357009/campos_512_v4
+69/357012/campos_512_v4
+69/357023/campos_512_v4
+69/357032/campos_512_v4
+69/357034/campos_512_v4
+69/357037/campos_512_v4
+69/357038/campos_512_v4
+69/357041/campos_512_v4
+69/357042/campos_512_v4
+69/357044/campos_512_v4
+69/357048/campos_512_v4
+69/357049/campos_512_v4
+69/357052/campos_512_v4
+69/357055/campos_512_v4
+69/357062/campos_512_v4
+69/357073/campos_512_v4
+69/357077/campos_512_v4
+69/357085/campos_512_v4
+69/357087/campos_512_v4
+69/357105/campos_512_v4
+69/357110/campos_512_v4
+69/357128/campos_512_v4
+69/357130/campos_512_v4
+69/357135/campos_512_v4
+69/357147/campos_512_v4
+69/357149/campos_512_v4
+69/357158/campos_512_v4
+69/357161/campos_512_v4
+69/357164/campos_512_v4
+69/357171/campos_512_v4
+69/357177/campos_512_v4
+69/357178/campos_512_v4
+69/357183/campos_512_v4
+69/357189/campos_512_v4
+69/357200/campos_512_v4
+69/357221/campos_512_v4
+69/357222/campos_512_v4
+69/357223/campos_512_v4
+69/357231/campos_512_v4
+69/357236/campos_512_v4
+69/357238/campos_512_v4
+69/357244/campos_512_v4
+69/357246/campos_512_v4
+69/357247/campos_512_v4
+69/357259/campos_512_v4
+69/357266/campos_512_v4
+69/357267/campos_512_v4
+69/357279/campos_512_v4
+69/357280/campos_512_v4
+69/357292/campos_512_v4
+69/357293/campos_512_v4
+69/357301/campos_512_v4
+69/357304/campos_512_v4
+69/357321/campos_512_v4
+69/357507/campos_512_v4
+69/357514/campos_512_v4
+69/357519/campos_512_v4
+69/357522/campos_512_v4
+69/357523/campos_512_v4
+69/357538/campos_512_v4
+69/357542/campos_512_v4
+69/357562/campos_512_v4
+69/357589/campos_512_v4
+69/357591/campos_512_v4
+69/357601/campos_512_v4
+69/357606/campos_512_v4
+69/357615/campos_512_v4
+69/357618/campos_512_v4
+69/357620/campos_512_v4
+69/357624/campos_512_v4
+69/357625/campos_512_v4
+69/357630/campos_512_v4
+69/357637/campos_512_v4
+69/357641/campos_512_v4
+69/357647/campos_512_v4
+69/357668/campos_512_v4
+69/357675/campos_512_v4
+69/357678/campos_512_v4
+69/357679/campos_512_v4
+69/357695/campos_512_v4
+69/357704/campos_512_v4
+69/357716/campos_512_v4
+69/357717/campos_512_v4
+69/357722/campos_512_v4
+69/357728/campos_512_v4
+69/357733/campos_512_v4
+69/357737/campos_512_v4
+69/357741/campos_512_v4
+69/357762/campos_512_v4
+69/357768/campos_512_v4
+69/357770/campos_512_v4
+69/357795/campos_512_v4
+69/357802/campos_512_v4
+69/357807/campos_512_v4
+69/357811/campos_512_v4
+69/357819/campos_512_v4
+69/357827/campos_512_v4
+69/357841/campos_512_v4
+69/357849/campos_512_v4
+69/357850/campos_512_v4
+69/357877/campos_512_v4
+69/357879/campos_512_v4
+69/357883/campos_512_v4
+69/357888/campos_512_v4
+69/357889/campos_512_v4
+69/357890/campos_512_v4
+69/357911/campos_512_v4
+69/357915/campos_512_v4
+69/357916/campos_512_v4
+69/357950/campos_512_v4
+69/357954/campos_512_v4
+69/357960/campos_512_v4
+69/357963/campos_512_v4
+69/357966/campos_512_v4
+69/357982/campos_512_v4
+69/357992/campos_512_v4
+69/357995/campos_512_v4
+69/357997/campos_512_v4
+69/358024/campos_512_v4
+69/358057/campos_512_v4
+69/358058/campos_512_v4
+69/358067/campos_512_v4
+69/358072/campos_512_v4
+69/358074/campos_512_v4
+69/358083/campos_512_v4
+69/358085/campos_512_v4
+69/358086/campos_512_v4
+69/358097/campos_512_v4
+69/358109/campos_512_v4
+69/358115/campos_512_v4
+69/358116/campos_512_v4
+69/358118/campos_512_v4
+69/358124/campos_512_v4
+69/358133/campos_512_v4
+69/358134/campos_512_v4
+69/358136/campos_512_v4
+69/358142/campos_512_v4
+69/358164/campos_512_v4
+69/358171/campos_512_v4
+69/358173/campos_512_v4
+69/358183/campos_512_v4
+69/358206/campos_512_v4
+69/358211/campos_512_v4
+69/358220/campos_512_v4
+69/358226/campos_512_v4
+69/358231/campos_512_v4
+69/358232/campos_512_v4
+69/358237/campos_512_v4
+69/358246/campos_512_v4
+69/358248/campos_512_v4
+69/358255/campos_512_v4
+69/358264/campos_512_v4
+69/358283/campos_512_v4
+69/358284/campos_512_v4
+69/358285/campos_512_v4
+69/358290/campos_512_v4
+69/358303/campos_512_v4
+69/358308/campos_512_v4
+69/358310/campos_512_v4
+69/358315/campos_512_v4
+69/358320/campos_512_v4
+69/358325/campos_512_v4
+69/358327/campos_512_v4
+69/358335/campos_512_v4
+69/358340/campos_512_v4
+69/358344/campos_512_v4
+69/358351/campos_512_v4
+69/358358/campos_512_v4
+69/358368/campos_512_v4
+69/358376/campos_512_v4
+69/358378/campos_512_v4
+69/358379/campos_512_v4
+69/358383/campos_512_v4
+69/358385/campos_512_v4
+69/358392/campos_512_v4
+69/358393/campos_512_v4
+69/358398/campos_512_v4
+69/358407/campos_512_v4
+69/358412/campos_512_v4
+69/358414/campos_512_v4
+69/358421/campos_512_v4
+69/358425/campos_512_v4
+69/358426/campos_512_v4
+69/358431/campos_512_v4
+69/358432/campos_512_v4
+69/358444/campos_512_v4
+69/358455/campos_512_v4
+69/358462/campos_512_v4
+69/358470/campos_512_v4
+69/358474/campos_512_v4
+69/358478/campos_512_v4
+69/358488/campos_512_v4
+69/358495/campos_512_v4
+69/358505/campos_512_v4
+69/358540/campos_512_v4
+69/358552/campos_512_v4
+69/358558/campos_512_v4
+69/358562/campos_512_v4
+69/358563/campos_512_v4
+69/358564/campos_512_v4
+69/358577/campos_512_v4
+69/358583/campos_512_v4
+69/358587/campos_512_v4
+69/358588/campos_512_v4
+69/358617/campos_512_v4
+69/358625/campos_512_v4
+69/358630/campos_512_v4
+69/358632/campos_512_v4
+69/358635/campos_512_v4
+69/358643/campos_512_v4
+69/358664/campos_512_v4
+69/358666/campos_512_v4
+69/358694/campos_512_v4
+69/358711/campos_512_v4
+69/358713/campos_512_v4
+69/358714/campos_512_v4
+69/358741/campos_512_v4
+69/358745/campos_512_v4
+69/358752/campos_512_v4
+69/358755/campos_512_v4
+69/358756/campos_512_v4
+69/358765/campos_512_v4
+69/358766/campos_512_v4
+69/358771/campos_512_v4
+69/358798/campos_512_v4
+69/358800/campos_512_v4
+69/358813/campos_512_v4
+69/358819/campos_512_v4
+69/358820/campos_512_v4
+69/358839/campos_512_v4
+69/358840/campos_512_v4
+69/358842/campos_512_v4
+69/358853/campos_512_v4
+69/358854/campos_512_v4
+69/358858/campos_512_v4
+69/358859/campos_512_v4
+69/358868/campos_512_v4
+69/358877/campos_512_v4
+69/358881/campos_512_v4
+69/358889/campos_512_v4
+69/358902/campos_512_v4
+69/358909/campos_512_v4
+69/358926/campos_512_v4
+69/358934/campos_512_v4
+69/358939/campos_512_v4
+69/358960/campos_512_v4
+69/358964/campos_512_v4
+69/358966/campos_512_v4
+69/358970/campos_512_v4
+69/358988/campos_512_v4
+69/358991/campos_512_v4
+69/359025/campos_512_v4
+69/359027/campos_512_v4
+69/359031/campos_512_v4
+69/359037/campos_512_v4
+69/359057/campos_512_v4
+69/359078/campos_512_v4
+69/359083/campos_512_v4
+69/359085/campos_512_v4
+69/359092/campos_512_v4
+69/359107/campos_512_v4
+69/359126/campos_512_v4
+69/359128/campos_512_v4
+69/359132/campos_512_v4
+69/359139/campos_512_v4
+69/359140/campos_512_v4
+69/359142/campos_512_v4
+69/359153/campos_512_v4
+69/359170/campos_512_v4
+69/359174/campos_512_v4
+69/359175/campos_512_v4
+69/359194/campos_512_v4
+69/359197/campos_512_v4
+69/359198/campos_512_v4
+69/359204/campos_512_v4
+69/359211/campos_512_v4
+69/359214/campos_512_v4
+69/359219/campos_512_v4
+69/359222/campos_512_v4
+69/359226/campos_512_v4
+69/359242/campos_512_v4
+69/359258/campos_512_v4
+69/359270/campos_512_v4
+69/359271/campos_512_v4
+69/359277/campos_512_v4
+69/359284/campos_512_v4
+69/359306/campos_512_v4
+69/359310/campos_512_v4
+69/359319/campos_512_v4
+69/359321/campos_512_v4
+69/359340/campos_512_v4
+69/359346/campos_512_v4
+69/359350/campos_512_v4
+69/359361/campos_512_v4
+69/359368/campos_512_v4
+69/359379/campos_512_v4
+69/359383/campos_512_v4
+69/359387/campos_512_v4
+69/359393/campos_512_v4
+69/359394/campos_512_v4
+69/359396/campos_512_v4
+69/359410/campos_512_v4
+69/359411/campos_512_v4
+69/359412/campos_512_v4
+69/359417/campos_512_v4
+69/359432/campos_512_v4
+69/359439/campos_512_v4
+69/359447/campos_512_v4
+69/359453/campos_512_v4
+69/359469/campos_512_v4
+69/359506/campos_512_v4
+69/359507/campos_512_v4
+69/359526/campos_512_v4
+69/359528/campos_512_v4
+69/359533/campos_512_v4
+69/359544/campos_512_v4
+69/359545/campos_512_v4
+69/359554/campos_512_v4
+69/359564/campos_512_v4
+69/359570/campos_512_v4
+69/359571/campos_512_v4
+69/359576/campos_512_v4
+69/359582/campos_512_v4
+69/359585/campos_512_v4
+69/359588/campos_512_v4
+69/359613/campos_512_v4
+69/359625/campos_512_v4
+69/359630/campos_512_v4
+69/359639/campos_512_v4
+69/359651/campos_512_v4
+69/359658/campos_512_v4
+69/359663/campos_512_v4
+69/359666/campos_512_v4
+69/359667/campos_512_v4
+69/359673/campos_512_v4
+69/359674/campos_512_v4
+69/359681/campos_512_v4
+69/359687/campos_512_v4
+69/359694/campos_512_v4
+69/359736/campos_512_v4
+69/359739/campos_512_v4
+69/359742/campos_512_v4
+69/359746/campos_512_v4
+69/359747/campos_512_v4
+69/359762/campos_512_v4
+69/359776/campos_512_v4
+69/359780/campos_512_v4
+69/359788/campos_512_v4
+69/359815/campos_512_v4
+69/359818/campos_512_v4
+69/359821/campos_512_v4
+69/359831/campos_512_v4
+69/359832/campos_512_v4
+69/359833/campos_512_v4
+69/359838/campos_512_v4
+69/359840/campos_512_v4
+69/359844/campos_512_v4
+69/359845/campos_512_v4
+69/359846/campos_512_v4
+69/359881/campos_512_v4
+69/359883/campos_512_v4
+69/359918/campos_512_v4
+69/359930/campos_512_v4
+69/359937/campos_512_v4
+69/359940/campos_512_v4
+69/359945/campos_512_v4
+69/359950/campos_512_v4
+69/359951/campos_512_v4
+69/359968/campos_512_v4
+69/359972/campos_512_v4
+69/359987/campos_512_v4
+69/359989/campos_512_v4
+7/45007/campos_512_v4
+7/45028/campos_512_v4
+7/45031/campos_512_v4
+7/45049/campos_512_v4
+7/45052/campos_512_v4
+7/45061/campos_512_v4
+7/45078/campos_512_v4
+7/45087/campos_512_v4
+7/45088/campos_512_v4
+7/45104/campos_512_v4
+7/45120/campos_512_v4
+7/45127/campos_512_v4
+7/45132/campos_512_v4
+7/45185/campos_512_v4
+7/45211/campos_512_v4
+7/45213/campos_512_v4
+7/45246/campos_512_v4
+7/45250/campos_512_v4
+7/45251/campos_512_v4
+7/45277/campos_512_v4
+7/45298/campos_512_v4
+7/45308/campos_512_v4
+7/45309/campos_512_v4
+7/45312/campos_512_v4
+7/45340/campos_512_v4
+7/45341/campos_512_v4
+7/45345/campos_512_v4
+7/45359/campos_512_v4
+7/45364/campos_512_v4
+7/45373/campos_512_v4
+7/45401/campos_512_v4
+7/45412/campos_512_v4
+7/45437/campos_512_v4
+7/45468/campos_512_v4
+7/45476/campos_512_v4
+7/45478/campos_512_v4
+7/45494/campos_512_v4
+7/45500/campos_512_v4
+7/45515/campos_512_v4
+7/45552/campos_512_v4
+7/45557/campos_512_v4
+7/45583/campos_512_v4
+7/45591/campos_512_v4
+7/45629/campos_512_v4
+7/45630/campos_512_v4
+7/45652/campos_512_v4
+7/45684/campos_512_v4
+7/45712/campos_512_v4
+7/45718/campos_512_v4
+7/45724/campos_512_v4
+7/45734/campos_512_v4
+7/45744/campos_512_v4
+7/45756/campos_512_v4
+7/45779/campos_512_v4
+7/45784/campos_512_v4
+7/45790/campos_512_v4
+7/45802/campos_512_v4
+7/45803/campos_512_v4
+7/45819/campos_512_v4
+7/45850/campos_512_v4
+7/45854/campos_512_v4
+7/45878/campos_512_v4
+7/45879/campos_512_v4
+7/45885/campos_512_v4
+7/45887/campos_512_v4
+7/45890/campos_512_v4
+7/45910/campos_512_v4
+7/45913/campos_512_v4
+7/45914/campos_512_v4
+7/45917/campos_512_v4
+7/45942/campos_512_v4
+7/45947/campos_512_v4
+7/45957/campos_512_v4
+7/45960/campos_512_v4
+7/45965/campos_512_v4
+7/45974/campos_512_v4
+7/45978/campos_512_v4
+7/45986/campos_512_v4
+7/45990/campos_512_v4
+7/46010/campos_512_v4
+7/46020/campos_512_v4
+7/46025/campos_512_v4
+7/46038/campos_512_v4
+7/46046/campos_512_v4
+7/46049/campos_512_v4
+7/46053/campos_512_v4
+7/46061/campos_512_v4
+7/46063/campos_512_v4
+7/46086/campos_512_v4
+7/46098/campos_512_v4
+7/46121/campos_512_v4
+7/46133/campos_512_v4
+7/46143/campos_512_v4
+7/46151/campos_512_v4
+7/46153/campos_512_v4
+7/46181/campos_512_v4
+7/46184/campos_512_v4
+7/46193/campos_512_v4
+7/46213/campos_512_v4
+7/46224/campos_512_v4
+7/46225/campos_512_v4
+7/46226/campos_512_v4
+7/46237/campos_512_v4
+7/46258/campos_512_v4
+7/46262/campos_512_v4
+7/46266/campos_512_v4
+7/46267/campos_512_v4
+7/46271/campos_512_v4
+7/46275/campos_512_v4
+7/46280/campos_512_v4
+7/46316/campos_512_v4
+7/46340/campos_512_v4
+7/46371/campos_512_v4
+7/46382/campos_512_v4
+7/46386/campos_512_v4
+7/46390/campos_512_v4
+7/46391/campos_512_v4
+7/46411/campos_512_v4
+7/46412/campos_512_v4
+7/46421/campos_512_v4
+7/46437/campos_512_v4
+7/46441/campos_512_v4
+7/46455/campos_512_v4
+7/46470/campos_512_v4
+7/46491/campos_512_v4
+7/46502/campos_512_v4
+7/46510/campos_512_v4
+7/46515/campos_512_v4
+7/46547/campos_512_v4
+7/46550/campos_512_v4
+7/46553/campos_512_v4
+7/46554/campos_512_v4
+7/46571/campos_512_v4
+7/46572/campos_512_v4
+7/46575/campos_512_v4
+7/46578/campos_512_v4
+7/46583/campos_512_v4
+7/46589/campos_512_v4
+7/46615/campos_512_v4
+7/46617/campos_512_v4
+7/46639/campos_512_v4
+7/46644/campos_512_v4
+7/46652/campos_512_v4
+7/46692/campos_512_v4
+7/46696/campos_512_v4
+7/46700/campos_512_v4
+7/46703/campos_512_v4
+7/46722/campos_512_v4
+7/46731/campos_512_v4
+7/46736/campos_512_v4
+7/46738/campos_512_v4
+7/46739/campos_512_v4
+7/46743/campos_512_v4
+7/46748/campos_512_v4
+7/46755/campos_512_v4
+7/46768/campos_512_v4
+7/46776/campos_512_v4
+7/46796/campos_512_v4
+7/46805/campos_512_v4
+7/46808/campos_512_v4
+7/46820/campos_512_v4
+7/46827/campos_512_v4
+7/46834/campos_512_v4
+7/46861/campos_512_v4
+7/46876/campos_512_v4
+7/46881/campos_512_v4
+7/46884/campos_512_v4
+7/46886/campos_512_v4
+7/46912/campos_512_v4
+7/46925/campos_512_v4
+7/46968/campos_512_v4
+7/46998/campos_512_v4
+7/46999/campos_512_v4
+7/47045/campos_512_v4
+7/47047/campos_512_v4
+7/47067/campos_512_v4
+7/47070/campos_512_v4
+7/47076/campos_512_v4
+7/47077/campos_512_v4
+7/47092/campos_512_v4
+7/47110/campos_512_v4
+7/47125/campos_512_v4
+7/47132/campos_512_v4
+7/47137/campos_512_v4
+7/47149/campos_512_v4
+7/47163/campos_512_v4
+7/47164/campos_512_v4
+7/47174/campos_512_v4
+7/47187/campos_512_v4
+7/47189/campos_512_v4
+7/47195/campos_512_v4
+7/47201/campos_512_v4
+7/47214/campos_512_v4
+7/47230/campos_512_v4
+7/47239/campos_512_v4
+7/47245/campos_512_v4
+7/47282/campos_512_v4
+7/47288/campos_512_v4
+7/47294/campos_512_v4
+7/47303/campos_512_v4
+7/47314/campos_512_v4
+7/47315/campos_512_v4
+7/47347/campos_512_v4
+7/47356/campos_512_v4
+7/47357/campos_512_v4
+7/47361/campos_512_v4
+7/47366/campos_512_v4
+7/47381/campos_512_v4
+7/47401/campos_512_v4
+7/47403/campos_512_v4
+7/47404/campos_512_v4
+7/47408/campos_512_v4
+7/47416/campos_512_v4
+7/47435/campos_512_v4
+7/47441/campos_512_v4
+7/47444/campos_512_v4
+7/47446/campos_512_v4
+7/47447/campos_512_v4
+7/47461/campos_512_v4
+7/47480/campos_512_v4
+7/47481/campos_512_v4
+7/47488/campos_512_v4
+7/47491/campos_512_v4
+7/47493/campos_512_v4
+7/47498/campos_512_v4
+7/47511/campos_512_v4
+7/47561/campos_512_v4
+7/47584/campos_512_v4
+7/47589/campos_512_v4
+7/47594/campos_512_v4
+7/47606/campos_512_v4
+7/47608/campos_512_v4
+7/47630/campos_512_v4
+7/47635/campos_512_v4
+7/47641/campos_512_v4
+7/47650/campos_512_v4
+7/47653/campos_512_v4
+7/47659/campos_512_v4
+7/47673/campos_512_v4
+7/47680/campos_512_v4
+7/47682/campos_512_v4
+7/47683/campos_512_v4
+7/47686/campos_512_v4
+7/47711/campos_512_v4
+7/47742/campos_512_v4
+7/47749/campos_512_v4
+7/47777/campos_512_v4
+7/47785/campos_512_v4
+7/47808/campos_512_v4
+7/47814/campos_512_v4
+7/47851/campos_512_v4
+7/47853/campos_512_v4
+7/47857/campos_512_v4
+7/47859/campos_512_v4
+7/47861/campos_512_v4
+7/47876/campos_512_v4
+7/47883/campos_512_v4
+7/47885/campos_512_v4
+7/47886/campos_512_v4
+7/47910/campos_512_v4
+7/47911/campos_512_v4
+7/47912/campos_512_v4
+7/47913/campos_512_v4
+7/47916/campos_512_v4
+7/47961/campos_512_v4
+7/47969/campos_512_v4
+7/47977/campos_512_v4
+7/47990/campos_512_v4
+7/48000/campos_512_v4
+7/48007/campos_512_v4
+7/48008/campos_512_v4
+7/48034/campos_512_v4
+7/48038/campos_512_v4
+7/48052/campos_512_v4
+7/48056/campos_512_v4
+7/48059/campos_512_v4
+7/48068/campos_512_v4
+7/48070/campos_512_v4
+7/48094/campos_512_v4
+7/48113/campos_512_v4
+7/48116/campos_512_v4
+7/48120/campos_512_v4
+7/48121/campos_512_v4
+7/48125/campos_512_v4
+7/48132/campos_512_v4
+7/48134/campos_512_v4
+7/48152/campos_512_v4
+7/48171/campos_512_v4
+7/48179/campos_512_v4
+7/48181/campos_512_v4
+7/48190/campos_512_v4
+7/48202/campos_512_v4
+7/48217/campos_512_v4
+7/48228/campos_512_v4
+7/48257/campos_512_v4
+7/48278/campos_512_v4
+7/48300/campos_512_v4
+7/48301/campos_512_v4
+7/48311/campos_512_v4
+7/48320/campos_512_v4
+7/48343/campos_512_v4
+7/48346/campos_512_v4
+7/48357/campos_512_v4
+7/48358/campos_512_v4
+7/48371/campos_512_v4
+7/48372/campos_512_v4
+7/48380/campos_512_v4
+7/48392/campos_512_v4
+7/48409/campos_512_v4
+7/48418/campos_512_v4
+7/48439/campos_512_v4
+7/48446/campos_512_v4
+7/48447/campos_512_v4
+7/48461/campos_512_v4
+7/48463/campos_512_v4
+7/48468/campos_512_v4
+7/48474/campos_512_v4
+7/48478/campos_512_v4
+7/48508/campos_512_v4
+7/48520/campos_512_v4
+7/48537/campos_512_v4
+7/48538/campos_512_v4
+7/48554/campos_512_v4
+7/48560/campos_512_v4
+7/48567/campos_512_v4
+7/48578/campos_512_v4
+7/48579/campos_512_v4
+7/48581/campos_512_v4
+7/48595/campos_512_v4
+7/48597/campos_512_v4
+7/48602/campos_512_v4
+7/48620/campos_512_v4
+7/48623/campos_512_v4
+7/48633/campos_512_v4
+7/48667/campos_512_v4
+7/48669/campos_512_v4
+7/48677/campos_512_v4
+7/48682/campos_512_v4
+7/48683/campos_512_v4
+7/48686/campos_512_v4
+7/48706/campos_512_v4
+7/48707/campos_512_v4
+7/48710/campos_512_v4
+7/48717/campos_512_v4
+7/48725/campos_512_v4
+7/48730/campos_512_v4
+7/48736/campos_512_v4
+7/48738/campos_512_v4
+7/48740/campos_512_v4
+7/48748/campos_512_v4
+7/48759/campos_512_v4
+7/48763/campos_512_v4
+7/48777/campos_512_v4
+7/48780/campos_512_v4
+7/48781/campos_512_v4
+7/48784/campos_512_v4
+7/48811/campos_512_v4
+7/48827/campos_512_v4
+7/48844/campos_512_v4
+7/48849/campos_512_v4
+7/48853/campos_512_v4
+7/48864/campos_512_v4
+7/48865/campos_512_v4
+7/48871/campos_512_v4
+7/48883/campos_512_v4
+7/48885/campos_512_v4
+7/48891/campos_512_v4
+7/48901/campos_512_v4
+7/48914/campos_512_v4
+7/48928/campos_512_v4
+7/48948/campos_512_v4
+7/48953/campos_512_v4
+7/48964/campos_512_v4
+7/48966/campos_512_v4
+7/48990/campos_512_v4
+7/49005/campos_512_v4
+7/49028/campos_512_v4
+7/49048/campos_512_v4
+7/49088/campos_512_v4
+7/49092/campos_512_v4
+7/49095/campos_512_v4
+7/49112/campos_512_v4
+7/49119/campos_512_v4
+7/49122/campos_512_v4
+7/49138/campos_512_v4
+7/49152/campos_512_v4
+7/49155/campos_512_v4
+7/49160/campos_512_v4
+7/49166/campos_512_v4
+7/49167/campos_512_v4
+7/49178/campos_512_v4
+7/49186/campos_512_v4
+7/49211/campos_512_v4
+7/49233/campos_512_v4
+7/49238/campos_512_v4
+7/49255/campos_512_v4
+7/49268/campos_512_v4
+7/49292/campos_512_v4
+7/49293/campos_512_v4
+7/49303/campos_512_v4
+7/49313/campos_512_v4
+7/49317/campos_512_v4
+7/49327/campos_512_v4
+7/49342/campos_512_v4
+7/49350/campos_512_v4
+7/49351/campos_512_v4
+7/49353/campos_512_v4
+7/49358/campos_512_v4
+7/49368/campos_512_v4
+7/49369/campos_512_v4
+7/49393/campos_512_v4
+7/49398/campos_512_v4
+7/49400/campos_512_v4
+7/49429/campos_512_v4
+7/49434/campos_512_v4
+7/49444/campos_512_v4
+7/49449/campos_512_v4
+7/49459/campos_512_v4
+7/49471/campos_512_v4
+7/49472/campos_512_v4
+7/49476/campos_512_v4
+7/49478/campos_512_v4
+7/49481/campos_512_v4
+7/49484/campos_512_v4
+7/49489/campos_512_v4
+7/49545/campos_512_v4
+7/49562/campos_512_v4
+7/49563/campos_512_v4
+7/49583/campos_512_v4
+7/49586/campos_512_v4
+7/49598/campos_512_v4
+7/49600/campos_512_v4
+7/49604/campos_512_v4
+7/49608/campos_512_v4
+7/49610/campos_512_v4
+7/49619/campos_512_v4
+7/49642/campos_512_v4
+7/49653/campos_512_v4
+7/49658/campos_512_v4
+7/49678/campos_512_v4
+7/49679/campos_512_v4
+7/49682/campos_512_v4
+7/49701/campos_512_v4
+7/49716/campos_512_v4
+7/49719/campos_512_v4
+7/49771/campos_512_v4
+7/49772/campos_512_v4
+7/49802/campos_512_v4
+7/49809/campos_512_v4
+7/49817/campos_512_v4
+7/49831/campos_512_v4
+7/49832/campos_512_v4
+7/49834/campos_512_v4
+7/49842/campos_512_v4
+7/49854/campos_512_v4
+7/49876/campos_512_v4
+7/49908/campos_512_v4
+7/49923/campos_512_v4
+7/49935/campos_512_v4
+7/49937/campos_512_v4
+7/49950/campos_512_v4
+7/49965/campos_512_v4
+7/49991/campos_512_v4
+70/360004/campos_512_v4
+70/360011/campos_512_v4
+70/360034/campos_512_v4
+70/360037/campos_512_v4
+70/360040/campos_512_v4
+70/360049/campos_512_v4
+70/360050/campos_512_v4
+70/360052/campos_512_v4
+70/360060/campos_512_v4
+70/360076/campos_512_v4
+70/360107/campos_512_v4
+70/360115/campos_512_v4
+70/360131/campos_512_v4
+70/360139/campos_512_v4
+70/360168/campos_512_v4
+70/360179/campos_512_v4
+70/360182/campos_512_v4
+70/360188/campos_512_v4
+70/360202/campos_512_v4
+70/360220/campos_512_v4
+70/360227/campos_512_v4
+70/360228/campos_512_v4
+70/360230/campos_512_v4
+70/360237/campos_512_v4
+70/360238/campos_512_v4
+70/360240/campos_512_v4
+70/360271/campos_512_v4
+70/360274/campos_512_v4
+70/360278/campos_512_v4
+70/360281/campos_512_v4
+70/360290/campos_512_v4
+70/360292/campos_512_v4
+70/360307/campos_512_v4
+70/360314/campos_512_v4
+70/360365/campos_512_v4
+70/360371/campos_512_v4
+70/360378/campos_512_v4
+70/360408/campos_512_v4
+70/360410/campos_512_v4
+70/360419/campos_512_v4
+70/360425/campos_512_v4
+70/360426/campos_512_v4
+70/360438/campos_512_v4
+70/360449/campos_512_v4
+70/360481/campos_512_v4
+70/360504/campos_512_v4
+70/360516/campos_512_v4
+70/360528/campos_512_v4
+70/360533/campos_512_v4
+70/360535/campos_512_v4
+70/360552/campos_512_v4
+70/360555/campos_512_v4
+70/360559/campos_512_v4
+70/360561/campos_512_v4
+70/360582/campos_512_v4
+70/360583/campos_512_v4
+70/360586/campos_512_v4
+70/360596/campos_512_v4
+70/360597/campos_512_v4
+70/360603/campos_512_v4
+70/360609/campos_512_v4
+70/360642/campos_512_v4
+70/360643/campos_512_v4
+70/360657/campos_512_v4
+70/360665/campos_512_v4
+70/360668/campos_512_v4
+70/360675/campos_512_v4
+70/360677/campos_512_v4
+70/360688/campos_512_v4
+70/360698/campos_512_v4
+70/360699/campos_512_v4
+70/360703/campos_512_v4
+70/360708/campos_512_v4
+70/360710/campos_512_v4
+70/360711/campos_512_v4
+70/360712/campos_512_v4
+70/360734/campos_512_v4
+70/360738/campos_512_v4
+70/360739/campos_512_v4
+70/360742/campos_512_v4
+70/360751/campos_512_v4
+70/360754/campos_512_v4
+70/360761/campos_512_v4
+70/360762/campos_512_v4
+70/360772/campos_512_v4
+70/360786/campos_512_v4
+70/360789/campos_512_v4
+70/360790/campos_512_v4
+70/360793/campos_512_v4
+70/360797/campos_512_v4
+70/360803/campos_512_v4
+70/360810/campos_512_v4
+70/360811/campos_512_v4
+70/360814/campos_512_v4
+70/360828/campos_512_v4
+70/360830/campos_512_v4
+70/360836/campos_512_v4
+70/360838/campos_512_v4
+70/360841/campos_512_v4
+70/360847/campos_512_v4
+70/360853/campos_512_v4
+70/360866/campos_512_v4
+70/360867/campos_512_v4
+70/360883/campos_512_v4
+70/360886/campos_512_v4
+70/360889/campos_512_v4
+70/360891/campos_512_v4
+70/360905/campos_512_v4
+70/360923/campos_512_v4
+70/360932/campos_512_v4
+70/360933/campos_512_v4
+70/360936/campos_512_v4
+70/360950/campos_512_v4
+70/360956/campos_512_v4
+70/360957/campos_512_v4
+70/360959/campos_512_v4
+70/360960/campos_512_v4
+70/360978/campos_512_v4
+70/360988/campos_512_v4
+70/360992/campos_512_v4
+70/361009/campos_512_v4
+70/361023/campos_512_v4
+70/361028/campos_512_v4
+70/361031/campos_512_v4
+70/361046/campos_512_v4
+70/361052/campos_512_v4
+70/361054/campos_512_v4
+70/361059/campos_512_v4
+70/361065/campos_512_v4
+70/361109/campos_512_v4
+70/361113/campos_512_v4
+70/361133/campos_512_v4
+70/361134/campos_512_v4
+70/361138/campos_512_v4
+70/361140/campos_512_v4
+70/361158/campos_512_v4
+70/361159/campos_512_v4
+70/361183/campos_512_v4
+70/361185/campos_512_v4
+70/361205/campos_512_v4
+70/361217/campos_512_v4
+70/361241/campos_512_v4
+70/361249/campos_512_v4
+70/361251/campos_512_v4
+70/361256/campos_512_v4
+70/361260/campos_512_v4
+70/361276/campos_512_v4
+70/361281/campos_512_v4
+70/361286/campos_512_v4
+70/361308/campos_512_v4
+70/361311/campos_512_v4
+70/361315/campos_512_v4
+70/361318/campos_512_v4
+70/361327/campos_512_v4
+70/361332/campos_512_v4
+70/361333/campos_512_v4
+70/361342/campos_512_v4
+70/361343/campos_512_v4
+70/361349/campos_512_v4
+70/361358/campos_512_v4
+70/361377/campos_512_v4
+70/361378/campos_512_v4
+70/361387/campos_512_v4
+70/361389/campos_512_v4
+70/361391/campos_512_v4
+70/361402/campos_512_v4
+70/361405/campos_512_v4
+70/361422/campos_512_v4
+70/361425/campos_512_v4
+70/361431/campos_512_v4
+70/361436/campos_512_v4
+70/361451/campos_512_v4
+70/361477/campos_512_v4
+70/361479/campos_512_v4
+70/361491/campos_512_v4
+70/361495/campos_512_v4
+70/361501/campos_512_v4
+70/361502/campos_512_v4
+70/361509/campos_512_v4
+70/361515/campos_512_v4
+70/361522/campos_512_v4
+70/361523/campos_512_v4
+70/361542/campos_512_v4
+70/361582/campos_512_v4
+70/361587/campos_512_v4
+70/361589/campos_512_v4
+70/361590/campos_512_v4
+70/361591/campos_512_v4
+70/361596/campos_512_v4
+70/361599/campos_512_v4
+70/361600/campos_512_v4
+70/361605/campos_512_v4
+70/361612/campos_512_v4
+70/361615/campos_512_v4
+70/361619/campos_512_v4
+70/361623/campos_512_v4
+70/361625/campos_512_v4
+70/361636/campos_512_v4
+70/361639/campos_512_v4
+70/361656/campos_512_v4
+70/361664/campos_512_v4
+70/361674/campos_512_v4
+70/361681/campos_512_v4
+70/361716/campos_512_v4
+70/361730/campos_512_v4
+70/361752/campos_512_v4
+70/361765/campos_512_v4
+70/361767/campos_512_v4
+70/361780/campos_512_v4
+70/361784/campos_512_v4
+70/361798/campos_512_v4
+70/361801/campos_512_v4
+70/361808/campos_512_v4
+70/361812/campos_512_v4
+70/361814/campos_512_v4
+70/361821/campos_512_v4
+70/361825/campos_512_v4
+70/361829/campos_512_v4
+70/361831/campos_512_v4
+70/361838/campos_512_v4
+70/361840/campos_512_v4
+70/361841/campos_512_v4
+70/361844/campos_512_v4
+70/361846/campos_512_v4
+70/361849/campos_512_v4
+70/361855/campos_512_v4
+70/361859/campos_512_v4
+70/361872/campos_512_v4
+70/361878/campos_512_v4
+70/361887/campos_512_v4
+70/361911/campos_512_v4
+70/361925/campos_512_v4
+70/361946/campos_512_v4
+70/361948/campos_512_v4
+70/361952/campos_512_v4
+70/361959/campos_512_v4
+70/361960/campos_512_v4
+70/361963/campos_512_v4
+70/361966/campos_512_v4
+70/361972/campos_512_v4
+70/361973/campos_512_v4
+70/361977/campos_512_v4
+70/361997/campos_512_v4
+70/362000/campos_512_v4
+70/362022/campos_512_v4
+70/362025/campos_512_v4
+70/362034/campos_512_v4
+70/362036/campos_512_v4
+70/362039/campos_512_v4
+70/362045/campos_512_v4
+70/362055/campos_512_v4
+70/362061/campos_512_v4
+70/362075/campos_512_v4
+70/362087/campos_512_v4
+70/362089/campos_512_v4
+70/362090/campos_512_v4
+70/362098/campos_512_v4
+70/362101/campos_512_v4
+70/362106/campos_512_v4
+70/362110/campos_512_v4
+70/362121/campos_512_v4
+70/362126/campos_512_v4
+70/362131/campos_512_v4
+70/362137/campos_512_v4
+70/362149/campos_512_v4
+70/362150/campos_512_v4
+70/362176/campos_512_v4
+70/362181/campos_512_v4
+70/362190/campos_512_v4
+70/362197/campos_512_v4
+70/362220/campos_512_v4
+70/362239/campos_512_v4
+70/362241/campos_512_v4
+70/362265/campos_512_v4
+70/362286/campos_512_v4
+70/362289/campos_512_v4
+70/362304/campos_512_v4
+70/362306/campos_512_v4
+70/362319/campos_512_v4
+70/362338/campos_512_v4
+70/362339/campos_512_v4
+70/362342/campos_512_v4
+70/362359/campos_512_v4
+70/362373/campos_512_v4
+70/362388/campos_512_v4
+70/362410/campos_512_v4
+70/362416/campos_512_v4
+70/362422/campos_512_v4
+70/362453/campos_512_v4
+70/362483/campos_512_v4
+70/362484/campos_512_v4
+70/362494/campos_512_v4
+70/362499/campos_512_v4
+70/362503/campos_512_v4
+70/362512/campos_512_v4
+70/362575/campos_512_v4
+70/362576/campos_512_v4
+70/362581/campos_512_v4
+70/362584/campos_512_v4
+70/362592/campos_512_v4
+70/362594/campos_512_v4
+70/362595/campos_512_v4
+70/362603/campos_512_v4
+70/362606/campos_512_v4
+70/362615/campos_512_v4
+70/362620/campos_512_v4
+70/362623/campos_512_v4
+70/362631/campos_512_v4
+70/362633/campos_512_v4
+70/362634/campos_512_v4
+70/362637/campos_512_v4
+70/362648/campos_512_v4
+70/362650/campos_512_v4
+70/362682/campos_512_v4
+70/362683/campos_512_v4
+70/362691/campos_512_v4
+70/362694/campos_512_v4
+70/362703/campos_512_v4
+70/362711/campos_512_v4
+70/362717/campos_512_v4
+70/362720/campos_512_v4
+70/362722/campos_512_v4
+70/362730/campos_512_v4
+70/362734/campos_512_v4
+70/362739/campos_512_v4
+70/362744/campos_512_v4
+70/362745/campos_512_v4
+70/362750/campos_512_v4
+70/362751/campos_512_v4
+70/362758/campos_512_v4
+70/362772/campos_512_v4
+70/362780/campos_512_v4
+70/362782/campos_512_v4
+70/362784/campos_512_v4
+70/362800/campos_512_v4
+70/362809/campos_512_v4
+70/362816/campos_512_v4
+70/362822/campos_512_v4
+70/362838/campos_512_v4
+70/362847/campos_512_v4
+70/362852/campos_512_v4
+70/362858/campos_512_v4
+70/362865/campos_512_v4
+70/362875/campos_512_v4
+70/362889/campos_512_v4
+70/362926/campos_512_v4
+70/362927/campos_512_v4
+70/362928/campos_512_v4
+70/362938/campos_512_v4
+70/362943/campos_512_v4
+70/362957/campos_512_v4
+70/362959/campos_512_v4
+70/362965/campos_512_v4
+70/362974/campos_512_v4
+70/362976/campos_512_v4
+70/362977/campos_512_v4
+70/362991/campos_512_v4
+70/363001/campos_512_v4
+70/363006/campos_512_v4
+70/363010/campos_512_v4
+70/363013/campos_512_v4
+70/363017/campos_512_v4
+70/363022/campos_512_v4
+70/363040/campos_512_v4
+70/363052/campos_512_v4
+70/363055/campos_512_v4
+70/363075/campos_512_v4
+70/363088/campos_512_v4
+70/363091/campos_512_v4
+70/363093/campos_512_v4
+70/363098/campos_512_v4
+70/363146/campos_512_v4
+70/363147/campos_512_v4
+70/363156/campos_512_v4
+70/363163/campos_512_v4
+70/363170/campos_512_v4
+70/363173/campos_512_v4
+70/363194/campos_512_v4
+70/363201/campos_512_v4
+70/363211/campos_512_v4
+70/363214/campos_512_v4
+70/363217/campos_512_v4
+70/363219/campos_512_v4
+70/363234/campos_512_v4
+70/363245/campos_512_v4
+70/363260/campos_512_v4
+70/363268/campos_512_v4
+70/363277/campos_512_v4
+70/363279/campos_512_v4
+70/363286/campos_512_v4
+70/363300/campos_512_v4
+70/363313/campos_512_v4
+70/363320/campos_512_v4
+70/363365/campos_512_v4
+70/363377/campos_512_v4
+70/363379/campos_512_v4
+70/363400/campos_512_v4
+70/363401/campos_512_v4
+70/363403/campos_512_v4
+70/363413/campos_512_v4
+70/363417/campos_512_v4
+70/363425/campos_512_v4
+70/363429/campos_512_v4
+70/363430/campos_512_v4
+70/363431/campos_512_v4
+70/363432/campos_512_v4
+70/363433/campos_512_v4
+70/363436/campos_512_v4
+70/363439/campos_512_v4
+70/363444/campos_512_v4
+70/363454/campos_512_v4
+70/363455/campos_512_v4
+70/363459/campos_512_v4
+70/363469/campos_512_v4
+70/363473/campos_512_v4
+70/363486/campos_512_v4
+70/363492/campos_512_v4
+70/363495/campos_512_v4
+70/363496/campos_512_v4
+70/363501/campos_512_v4
+70/363506/campos_512_v4
+70/363510/campos_512_v4
+70/363513/campos_512_v4
+70/363515/campos_512_v4
+70/363539/campos_512_v4
+70/363541/campos_512_v4
+70/363544/campos_512_v4
+70/363547/campos_512_v4
+70/363550/campos_512_v4
+70/363570/campos_512_v4
+70/363582/campos_512_v4
+70/363591/campos_512_v4
+70/363600/campos_512_v4
+70/363615/campos_512_v4
+70/363618/campos_512_v4
+70/363620/campos_512_v4
+70/363623/campos_512_v4
+70/363628/campos_512_v4
+70/363630/campos_512_v4
+70/363636/campos_512_v4
+70/363642/campos_512_v4
+70/363643/campos_512_v4
+70/363650/campos_512_v4
+70/363651/campos_512_v4
+70/363659/campos_512_v4
+70/363660/campos_512_v4
+70/363664/campos_512_v4
+70/363688/campos_512_v4
+70/363691/campos_512_v4
+70/363698/campos_512_v4
+70/363706/campos_512_v4
+70/363711/campos_512_v4
+70/363712/campos_512_v4
+70/363714/campos_512_v4
+70/363720/campos_512_v4
+70/363727/campos_512_v4
+70/363731/campos_512_v4
+70/363734/campos_512_v4
+70/363741/campos_512_v4
+70/363742/campos_512_v4
+70/363755/campos_512_v4
+70/363760/campos_512_v4
+70/363763/campos_512_v4
+70/363766/campos_512_v4
+70/363778/campos_512_v4
+70/363779/campos_512_v4
+70/363781/campos_512_v4
+70/363785/campos_512_v4
+70/363795/campos_512_v4
+70/363796/campos_512_v4
+70/363797/campos_512_v4
+70/363802/campos_512_v4
+70/363812/campos_512_v4
+70/363828/campos_512_v4
+70/363829/campos_512_v4
+70/363844/campos_512_v4
+70/363846/campos_512_v4
+70/363853/campos_512_v4
+70/363871/campos_512_v4
+70/363877/campos_512_v4
+70/363880/campos_512_v4
+70/363883/campos_512_v4
+70/363886/campos_512_v4
+70/363889/campos_512_v4
+70/363908/campos_512_v4
+70/363910/campos_512_v4
+70/363911/campos_512_v4
+70/363932/campos_512_v4
+70/363938/campos_512_v4
+70/363940/campos_512_v4
+70/363955/campos_512_v4
+70/363977/campos_512_v4
+70/363979/campos_512_v4
+70/363994/campos_512_v4
+70/363997/campos_512_v4
+70/363999/campos_512_v4
+70/364030/campos_512_v4
+70/364037/campos_512_v4
+70/364055/campos_512_v4
+70/364057/campos_512_v4
+70/364065/campos_512_v4
+70/364088/campos_512_v4
+70/364112/campos_512_v4
+70/364118/campos_512_v4
+70/364130/campos_512_v4
+70/364133/campos_512_v4
+70/364143/campos_512_v4
+70/364146/campos_512_v4
+70/364147/campos_512_v4
+70/364148/campos_512_v4
+70/364176/campos_512_v4
+70/364191/campos_512_v4
+70/364215/campos_512_v4
+70/364225/campos_512_v4
+70/364234/campos_512_v4
+70/364236/campos_512_v4
+70/364270/campos_512_v4
+70/364273/campos_512_v4
+70/364280/campos_512_v4
+70/364289/campos_512_v4
+70/364290/campos_512_v4
+70/364291/campos_512_v4
+70/364293/campos_512_v4
+70/364300/campos_512_v4
+70/364304/campos_512_v4
+70/364313/campos_512_v4
+70/364324/campos_512_v4
+70/364327/campos_512_v4
+70/364354/campos_512_v4
+70/364356/campos_512_v4
+70/364358/campos_512_v4
+70/364360/campos_512_v4
+70/364387/campos_512_v4
+70/364390/campos_512_v4
+70/364394/campos_512_v4
+70/364395/campos_512_v4
+70/364397/campos_512_v4
+70/364399/campos_512_v4
+70/364401/campos_512_v4
+70/364413/campos_512_v4
+70/364424/campos_512_v4
+70/364440/campos_512_v4
+70/364451/campos_512_v4
+70/364456/campos_512_v4
+70/364457/campos_512_v4
+70/364470/campos_512_v4
+70/364480/campos_512_v4
+70/364486/campos_512_v4
+70/364491/campos_512_v4
+70/364492/campos_512_v4
+70/364496/campos_512_v4
+70/364498/campos_512_v4
+70/364499/campos_512_v4
+70/364511/campos_512_v4
+70/364514/campos_512_v4
+70/364527/campos_512_v4
+70/364533/campos_512_v4
+70/364534/campos_512_v4
+70/364554/campos_512_v4
+70/364559/campos_512_v4
+70/364565/campos_512_v4
+70/364569/campos_512_v4
+70/364571/campos_512_v4
+70/364580/campos_512_v4
+70/364586/campos_512_v4
+70/364598/campos_512_v4
+70/364605/campos_512_v4
+70/364613/campos_512_v4
+70/364621/campos_512_v4
+70/364625/campos_512_v4
+70/364630/campos_512_v4
+70/364655/campos_512_v4
+70/364663/campos_512_v4
+70/364672/campos_512_v4
+70/364689/campos_512_v4
+70/364692/campos_512_v4
+70/364693/campos_512_v4
+70/364700/campos_512_v4
+70/364724/campos_512_v4
+70/364752/campos_512_v4
+70/364775/campos_512_v4
+70/364788/campos_512_v4
+70/364797/campos_512_v4
+70/364811/campos_512_v4
+70/364813/campos_512_v4
+70/364814/campos_512_v4
+70/364823/campos_512_v4
+70/364844/campos_512_v4
+70/364856/campos_512_v4
+70/364866/campos_512_v4
+70/364871/campos_512_v4
+70/364872/campos_512_v4
+70/364873/campos_512_v4
+70/364893/campos_512_v4
+70/364898/campos_512_v4
+70/364902/campos_512_v4
+70/364905/campos_512_v4
+70/364931/campos_512_v4
+70/364934/campos_512_v4
+70/364938/campos_512_v4
+70/364964/campos_512_v4
+70/364973/campos_512_v4
+70/364975/campos_512_v4
+70/364982/campos_512_v4
+70/364994/campos_512_v4
+70/364996/campos_512_v4
+71/365010/campos_512_v4
+71/365011/campos_512_v4
+71/365015/campos_512_v4
+71/365016/campos_512_v4
+71/365018/campos_512_v4
+71/365036/campos_512_v4
+71/365038/campos_512_v4
+71/365040/campos_512_v4
+71/365048/campos_512_v4
+71/365080/campos_512_v4
+71/365109/campos_512_v4
+71/365113/campos_512_v4
+71/365131/campos_512_v4
+71/365142/campos_512_v4
+71/365143/campos_512_v4
+71/365152/campos_512_v4
+71/365156/campos_512_v4
+71/365159/campos_512_v4
+71/365162/campos_512_v4
+71/365175/campos_512_v4
+71/365184/campos_512_v4
+71/365200/campos_512_v4
+71/365201/campos_512_v4
+71/365213/campos_512_v4
+71/365214/campos_512_v4
+71/365234/campos_512_v4
+71/365235/campos_512_v4
+71/365240/campos_512_v4
+71/365261/campos_512_v4
+71/365263/campos_512_v4
+71/365265/campos_512_v4
+71/365271/campos_512_v4
+71/365283/campos_512_v4
+71/365285/campos_512_v4
+71/365286/campos_512_v4
+71/365296/campos_512_v4
+71/365301/campos_512_v4
+71/365303/campos_512_v4
+71/365305/campos_512_v4
+71/365308/campos_512_v4
+71/365324/campos_512_v4
+71/365329/campos_512_v4
+71/365332/campos_512_v4
+71/365337/campos_512_v4
+71/365339/campos_512_v4
+71/365351/campos_512_v4
+71/365360/campos_512_v4
+71/365362/campos_512_v4
+71/365372/campos_512_v4
+71/365383/campos_512_v4
+71/365384/campos_512_v4
+71/365390/campos_512_v4
+71/365400/campos_512_v4
+71/365401/campos_512_v4
+71/365407/campos_512_v4
+71/365410/campos_512_v4
+71/365416/campos_512_v4
+71/365418/campos_512_v4
+71/365435/campos_512_v4
+71/365444/campos_512_v4
+71/365449/campos_512_v4
+71/365460/campos_512_v4
+71/365461/campos_512_v4
+71/365467/campos_512_v4
+71/365473/campos_512_v4
+71/365476/campos_512_v4
+71/365487/campos_512_v4
+71/365488/campos_512_v4
+71/365491/campos_512_v4
+71/365503/campos_512_v4
+71/365514/campos_512_v4
+71/365526/campos_512_v4
+71/365528/campos_512_v4
+71/365538/campos_512_v4
+71/365554/campos_512_v4
+71/365558/campos_512_v4
+71/365569/campos_512_v4
+71/365570/campos_512_v4
+71/365572/campos_512_v4
+71/365573/campos_512_v4
+71/365574/campos_512_v4
+71/365582/campos_512_v4
+71/365589/campos_512_v4
+71/365596/campos_512_v4
+71/365600/campos_512_v4
+71/365605/campos_512_v4
+71/365607/campos_512_v4
+71/365616/campos_512_v4
+71/365621/campos_512_v4
+71/365642/campos_512_v4
+71/365681/campos_512_v4
+71/365692/campos_512_v4
+71/365706/campos_512_v4
+71/365714/campos_512_v4
+71/365732/campos_512_v4
+71/365740/campos_512_v4
+71/365742/campos_512_v4
+71/365744/campos_512_v4
+71/365748/campos_512_v4
+71/365754/campos_512_v4
+71/365766/campos_512_v4
+71/365767/campos_512_v4
+71/365772/campos_512_v4
+71/365790/campos_512_v4
+71/365812/campos_512_v4
+71/365827/campos_512_v4
+71/365836/campos_512_v4
+71/365838/campos_512_v4
+71/365850/campos_512_v4
+71/365852/campos_512_v4
+71/365877/campos_512_v4
+71/365878/campos_512_v4
+71/365888/campos_512_v4
+71/365931/campos_512_v4
+71/365939/campos_512_v4
+71/365955/campos_512_v4
+71/365956/campos_512_v4
+71/365963/campos_512_v4
+71/365964/campos_512_v4
+71/365982/campos_512_v4
+71/366008/campos_512_v4
+71/366022/campos_512_v4
+71/366035/campos_512_v4
+71/366042/campos_512_v4
+71/366043/campos_512_v4
+71/366077/campos_512_v4
+71/366081/campos_512_v4
+71/366085/campos_512_v4
+71/366089/campos_512_v4
+71/366096/campos_512_v4
+71/366102/campos_512_v4
+71/366105/campos_512_v4
+71/366117/campos_512_v4
+71/366118/campos_512_v4
+71/366136/campos_512_v4
+71/366138/campos_512_v4
+71/366142/campos_512_v4
+71/366147/campos_512_v4
+71/366158/campos_512_v4
+71/366168/campos_512_v4
+71/366171/campos_512_v4
+71/366172/campos_512_v4
+71/366184/campos_512_v4
+71/366205/campos_512_v4
+71/366213/campos_512_v4
+71/366256/campos_512_v4
+71/366260/campos_512_v4
+71/366263/campos_512_v4
+71/366273/campos_512_v4
+71/366282/campos_512_v4
+71/366283/campos_512_v4
+71/366287/campos_512_v4
+71/366295/campos_512_v4
+71/366310/campos_512_v4
+71/366314/campos_512_v4
+71/366323/campos_512_v4
+71/366332/campos_512_v4
+71/366338/campos_512_v4
+71/366346/campos_512_v4
+71/366353/campos_512_v4
+71/366356/campos_512_v4
+71/366360/campos_512_v4
+71/366366/campos_512_v4
+71/366367/campos_512_v4
+71/366370/campos_512_v4
+71/366376/campos_512_v4
+71/366385/campos_512_v4
+71/366388/campos_512_v4
+71/366393/campos_512_v4
+71/366398/campos_512_v4
+71/366400/campos_512_v4
+71/366416/campos_512_v4
+71/366420/campos_512_v4
+71/366427/campos_512_v4
+71/366433/campos_512_v4
+71/366445/campos_512_v4
+71/366455/campos_512_v4
+71/366456/campos_512_v4
+71/366473/campos_512_v4
+71/366478/campos_512_v4
+71/366505/campos_512_v4
+71/366506/campos_512_v4
+71/366527/campos_512_v4
+71/366596/campos_512_v4
+71/366600/campos_512_v4
+71/366601/campos_512_v4
+71/366614/campos_512_v4
+71/366619/campos_512_v4
+71/366623/campos_512_v4
+71/366627/campos_512_v4
+71/366628/campos_512_v4
+71/366629/campos_512_v4
+71/366631/campos_512_v4
+71/366633/campos_512_v4
+71/366645/campos_512_v4
+71/366648/campos_512_v4
+71/366676/campos_512_v4
+71/366685/campos_512_v4
+71/366688/campos_512_v4
+71/366690/campos_512_v4
+71/366693/campos_512_v4
+71/366696/campos_512_v4
+71/366711/campos_512_v4
+71/366727/campos_512_v4
+71/366737/campos_512_v4
+71/366746/campos_512_v4
+71/366749/campos_512_v4
+71/366757/campos_512_v4
+71/366758/campos_512_v4
+71/366765/campos_512_v4
+71/366769/campos_512_v4
+71/366779/campos_512_v4
+71/366791/campos_512_v4
+71/366794/campos_512_v4
+71/366811/campos_512_v4
+71/366834/campos_512_v4
+71/366835/campos_512_v4
+71/366842/campos_512_v4
+71/366849/campos_512_v4
+71/366855/campos_512_v4
+71/366858/campos_512_v4
+71/366878/campos_512_v4
+71/366879/campos_512_v4
+71/366880/campos_512_v4
+71/366897/campos_512_v4
+71/366912/campos_512_v4
+71/366919/campos_512_v4
+71/366941/campos_512_v4
+71/366945/campos_512_v4
+71/366948/campos_512_v4
+71/366973/campos_512_v4
+71/366974/campos_512_v4
+71/366987/campos_512_v4
+71/366998/campos_512_v4
+71/366999/campos_512_v4
+71/367005/campos_512_v4
+71/367008/campos_512_v4
+71/367043/campos_512_v4
+71/367044/campos_512_v4
+71/367045/campos_512_v4
+71/367081/campos_512_v4
+71/367084/campos_512_v4
+71/367092/campos_512_v4
+71/367095/campos_512_v4
+71/367100/campos_512_v4
+71/367105/campos_512_v4
+71/367107/campos_512_v4
+71/367109/campos_512_v4
+71/367110/campos_512_v4
+71/367113/campos_512_v4
+71/367118/campos_512_v4
+71/367133/campos_512_v4
+71/367138/campos_512_v4
+71/367157/campos_512_v4
+71/367163/campos_512_v4
+71/367164/campos_512_v4
+71/367171/campos_512_v4
+71/367187/campos_512_v4
+71/367191/campos_512_v4
+71/367203/campos_512_v4
+71/367204/campos_512_v4
+71/367213/campos_512_v4
+71/367223/campos_512_v4
+71/367224/campos_512_v4
+71/367234/campos_512_v4
+71/367236/campos_512_v4
+71/367254/campos_512_v4
+71/367265/campos_512_v4
+71/367267/campos_512_v4
+71/367271/campos_512_v4
+71/367272/campos_512_v4
+71/367274/campos_512_v4
+71/367285/campos_512_v4
+71/367286/campos_512_v4
+71/367296/campos_512_v4
+71/367299/campos_512_v4
+71/367301/campos_512_v4
+71/367303/campos_512_v4
+71/367304/campos_512_v4
+71/367317/campos_512_v4
+71/367326/campos_512_v4
+71/367327/campos_512_v4
+71/367333/campos_512_v4
+71/367342/campos_512_v4
+71/367369/campos_512_v4
+71/367379/campos_512_v4
+71/367380/campos_512_v4
+71/367394/campos_512_v4
+71/367401/campos_512_v4
+71/367417/campos_512_v4
+71/367419/campos_512_v4
+71/367426/campos_512_v4
+71/367432/campos_512_v4
+71/367443/campos_512_v4
+71/367465/campos_512_v4
+71/367497/campos_512_v4
+71/367501/campos_512_v4
+71/367503/campos_512_v4
+71/367509/campos_512_v4
+71/367517/campos_512_v4
+71/367526/campos_512_v4
+71/367528/campos_512_v4
+71/367530/campos_512_v4
+71/367556/campos_512_v4
+71/367564/campos_512_v4
+71/367565/campos_512_v4
+71/367566/campos_512_v4
+71/367568/campos_512_v4
+71/367580/campos_512_v4
+71/367587/campos_512_v4
+71/367615/campos_512_v4
+71/367618/campos_512_v4
+71/367619/campos_512_v4
+71/367620/campos_512_v4
+71/367641/campos_512_v4
+71/367643/campos_512_v4
+71/367649/campos_512_v4
+71/367652/campos_512_v4
+71/367680/campos_512_v4
+71/367688/campos_512_v4
+71/367691/campos_512_v4
+71/367706/campos_512_v4
+71/367726/campos_512_v4
+71/367729/campos_512_v4
+71/367737/campos_512_v4
+71/367740/campos_512_v4
+71/367749/campos_512_v4
+71/367750/campos_512_v4
+71/367754/campos_512_v4
+71/367770/campos_512_v4
+71/367776/campos_512_v4
+71/367795/campos_512_v4
+71/367797/campos_512_v4
+71/367799/campos_512_v4
+71/367817/campos_512_v4
+71/367822/campos_512_v4
+71/367842/campos_512_v4
+71/367849/campos_512_v4
+71/367860/campos_512_v4
+71/367862/campos_512_v4
+71/367863/campos_512_v4
+71/367885/campos_512_v4
+71/367898/campos_512_v4
+71/367902/campos_512_v4
+71/367908/campos_512_v4
+71/367913/campos_512_v4
+71/367918/campos_512_v4
+71/367943/campos_512_v4
+71/367953/campos_512_v4
+71/367978/campos_512_v4
+71/367987/campos_512_v4
+71/367993/campos_512_v4
+71/367995/campos_512_v4
+71/368002/campos_512_v4
+71/368004/campos_512_v4
+71/368006/campos_512_v4
+71/368013/campos_512_v4
+71/368019/campos_512_v4
+71/368026/campos_512_v4
+71/368027/campos_512_v4
+71/368029/campos_512_v4
+71/368032/campos_512_v4
+71/368038/campos_512_v4
+71/368055/campos_512_v4
+71/368058/campos_512_v4
+71/368075/campos_512_v4
+71/368087/campos_512_v4
+71/368106/campos_512_v4
+71/368112/campos_512_v4
+71/368129/campos_512_v4
+71/368130/campos_512_v4
+71/368146/campos_512_v4
+71/368149/campos_512_v4
+71/368169/campos_512_v4
+71/368175/campos_512_v4
+71/368182/campos_512_v4
+71/368228/campos_512_v4
+71/368229/campos_512_v4
+71/368241/campos_512_v4
+71/368247/campos_512_v4
+71/368264/campos_512_v4
+71/368270/campos_512_v4
+71/368272/campos_512_v4
+71/368285/campos_512_v4
+71/368299/campos_512_v4
+71/368301/campos_512_v4
+71/368311/campos_512_v4
+71/368338/campos_512_v4
+71/368339/campos_512_v4
+71/368341/campos_512_v4
+71/368354/campos_512_v4
+71/368356/campos_512_v4
+71/368361/campos_512_v4
+71/368375/campos_512_v4
+71/368377/campos_512_v4
+71/368390/campos_512_v4
+71/368395/campos_512_v4
+71/368398/campos_512_v4
+71/368400/campos_512_v4
+71/368416/campos_512_v4
+71/368434/campos_512_v4
+71/368435/campos_512_v4
+71/368443/campos_512_v4
+71/368460/campos_512_v4
+71/368471/campos_512_v4
+71/368474/campos_512_v4
+71/368476/campos_512_v4
+71/368489/campos_512_v4
+71/368500/campos_512_v4
+71/368504/campos_512_v4
+71/368507/campos_512_v4
+71/368513/campos_512_v4
+71/368520/campos_512_v4
+71/368521/campos_512_v4
+71/368530/campos_512_v4
+71/368532/campos_512_v4
+71/368536/campos_512_v4
+71/368538/campos_512_v4
+71/368539/campos_512_v4
+71/368549/campos_512_v4
+71/368562/campos_512_v4
+71/368586/campos_512_v4
+71/368595/campos_512_v4
+71/368614/campos_512_v4
+71/368625/campos_512_v4
+71/368659/campos_512_v4
+71/368667/campos_512_v4
+71/368677/campos_512_v4
+71/368692/campos_512_v4
+71/368704/campos_512_v4
+71/368712/campos_512_v4
+71/368716/campos_512_v4
+71/368718/campos_512_v4
+71/368719/campos_512_v4
+71/368727/campos_512_v4
+71/368744/campos_512_v4
+71/368754/campos_512_v4
+71/368757/campos_512_v4
+71/368760/campos_512_v4
+71/368764/campos_512_v4
+71/368766/campos_512_v4
+71/368775/campos_512_v4
+71/368781/campos_512_v4
+71/368786/campos_512_v4
+71/368791/campos_512_v4
+71/368792/campos_512_v4
+71/368802/campos_512_v4
+71/368810/campos_512_v4
+71/368818/campos_512_v4
+71/368821/campos_512_v4
+71/368824/campos_512_v4
+71/368825/campos_512_v4
+71/368828/campos_512_v4
+71/368837/campos_512_v4
+71/368842/campos_512_v4
+71/368847/campos_512_v4
+71/368852/campos_512_v4
+71/368858/campos_512_v4
+71/368868/campos_512_v4
+71/368871/campos_512_v4
+71/368873/campos_512_v4
+71/368877/campos_512_v4
+71/368878/campos_512_v4
+71/368885/campos_512_v4
+71/368892/campos_512_v4
+71/368896/campos_512_v4
+71/368905/campos_512_v4
+71/368910/campos_512_v4
+71/368915/campos_512_v4
+71/368920/campos_512_v4
+71/368935/campos_512_v4
+71/368938/campos_512_v4
+71/368940/campos_512_v4
+71/368951/campos_512_v4
+71/368965/campos_512_v4
+71/368979/campos_512_v4
+71/368987/campos_512_v4
+71/368991/campos_512_v4
+71/368993/campos_512_v4
+71/369000/campos_512_v4
+71/369005/campos_512_v4
+71/369006/campos_512_v4
+71/369010/campos_512_v4
+71/369031/campos_512_v4
+71/369045/campos_512_v4
+71/369054/campos_512_v4
+71/369075/campos_512_v4
+71/369076/campos_512_v4
+71/369091/campos_512_v4
+71/369101/campos_512_v4
+71/369106/campos_512_v4
+71/369116/campos_512_v4
+71/369127/campos_512_v4
+71/369136/campos_512_v4
+71/369141/campos_512_v4
+71/369149/campos_512_v4
+71/369151/campos_512_v4
+71/369155/campos_512_v4
+71/369157/campos_512_v4
+71/369159/campos_512_v4
+71/369165/campos_512_v4
+71/369172/campos_512_v4
+71/369177/campos_512_v4
+71/369192/campos_512_v4
+71/369202/campos_512_v4
+71/369204/campos_512_v4
+71/369207/campos_512_v4
+71/369215/campos_512_v4
+71/369224/campos_512_v4
+71/369235/campos_512_v4
+71/369240/campos_512_v4
+71/369241/campos_512_v4
+71/369261/campos_512_v4
+71/369262/campos_512_v4
+71/369276/campos_512_v4
+71/369285/campos_512_v4
+71/369300/campos_512_v4
+71/369307/campos_512_v4
+71/369308/campos_512_v4
+71/369309/campos_512_v4
+71/369316/campos_512_v4
+71/369339/campos_512_v4
+71/369344/campos_512_v4
+71/369354/campos_512_v4
+71/369359/campos_512_v4
+71/369368/campos_512_v4
+71/369377/campos_512_v4
+71/369382/campos_512_v4
+71/369385/campos_512_v4
+71/369406/campos_512_v4
+71/369414/campos_512_v4
+71/369418/campos_512_v4
+71/369433/campos_512_v4
+71/369434/campos_512_v4
+71/369456/campos_512_v4
+71/369460/campos_512_v4
+71/369461/campos_512_v4
+71/369464/campos_512_v4
+71/369480/campos_512_v4
+71/369486/campos_512_v4
+71/369501/campos_512_v4
+71/369516/campos_512_v4
+71/369528/campos_512_v4
+71/369542/campos_512_v4
+71/369544/campos_512_v4
+71/369549/campos_512_v4
+71/369550/campos_512_v4
+71/369555/campos_512_v4
+71/369561/campos_512_v4
+71/369567/campos_512_v4
+71/369573/campos_512_v4
+71/369582/campos_512_v4
+71/369583/campos_512_v4
+71/369590/campos_512_v4
+71/369601/campos_512_v4
+71/369616/campos_512_v4
+71/369617/campos_512_v4
+71/369619/campos_512_v4
+71/369622/campos_512_v4
+71/369631/campos_512_v4
+71/369653/campos_512_v4
+71/369672/campos_512_v4
+71/369674/campos_512_v4
+71/369685/campos_512_v4
+71/369690/campos_512_v4
+71/369696/campos_512_v4
+71/369703/campos_512_v4
+71/369704/campos_512_v4
+71/369707/campos_512_v4
+71/369709/campos_512_v4
+71/369727/campos_512_v4
+71/369728/campos_512_v4
+71/369730/campos_512_v4
+71/369734/campos_512_v4
+71/369735/campos_512_v4
+71/369741/campos_512_v4
+71/369744/campos_512_v4
+71/369773/campos_512_v4
+71/369797/campos_512_v4
+71/369801/campos_512_v4
+71/369802/campos_512_v4
+71/369808/campos_512_v4
+71/369813/campos_512_v4
+71/369821/campos_512_v4
+71/369822/campos_512_v4
+71/369824/campos_512_v4
+71/369826/campos_512_v4
+71/369846/campos_512_v4
+71/369856/campos_512_v4
+71/369872/campos_512_v4
+71/369873/campos_512_v4
+71/369883/campos_512_v4
+71/369885/campos_512_v4
+71/369886/campos_512_v4
+72/370227/campos_512_v4
+72/370228/campos_512_v4
+72/370239/campos_512_v4
+72/370242/campos_512_v4
+72/370253/campos_512_v4
+72/370254/campos_512_v4
+72/370256/campos_512_v4
+72/370270/campos_512_v4
+72/370271/campos_512_v4
+72/370275/campos_512_v4
+72/370279/campos_512_v4
+72/370290/campos_512_v4
+72/370299/campos_512_v4
+72/370302/campos_512_v4
+72/370322/campos_512_v4
+72/370327/campos_512_v4
+72/370335/campos_512_v4
+72/370338/campos_512_v4
+72/370346/campos_512_v4
+72/370349/campos_512_v4
+72/370350/campos_512_v4
+72/370360/campos_512_v4
+72/370363/campos_512_v4
+72/370382/campos_512_v4
+72/370403/campos_512_v4
+72/370408/campos_512_v4
+72/370410/campos_512_v4
+72/370418/campos_512_v4
+72/370420/campos_512_v4
+72/370423/campos_512_v4
+72/370424/campos_512_v4
+72/370431/campos_512_v4
+72/370446/campos_512_v4
+72/370448/campos_512_v4
+72/370466/campos_512_v4
+72/370482/campos_512_v4
+72/370491/campos_512_v4
+72/370497/campos_512_v4
+72/370509/campos_512_v4
+72/370515/campos_512_v4
+72/370526/campos_512_v4
+72/370548/campos_512_v4
+72/370551/campos_512_v4
+72/370561/campos_512_v4
+72/370566/campos_512_v4
+72/370568/campos_512_v4
+72/370570/campos_512_v4
+72/370591/campos_512_v4
+72/370613/campos_512_v4
+72/370621/campos_512_v4
+72/370644/campos_512_v4
+72/370650/campos_512_v4
+72/370654/campos_512_v4
+72/370663/campos_512_v4
+72/370678/campos_512_v4
+72/370696/campos_512_v4
+72/370706/campos_512_v4
+72/370711/campos_512_v4
+72/370716/campos_512_v4
+72/370724/campos_512_v4
+72/370725/campos_512_v4
+72/370730/campos_512_v4
+72/370761/campos_512_v4
+72/370765/campos_512_v4
+72/370766/campos_512_v4
+72/370771/campos_512_v4
+72/370774/campos_512_v4
+72/370790/campos_512_v4
+72/370791/campos_512_v4
+72/370817/campos_512_v4
+72/370824/campos_512_v4
+72/370827/campos_512_v4
+72/370828/campos_512_v4
+72/370829/campos_512_v4
+72/370839/campos_512_v4
+72/370843/campos_512_v4
+72/370845/campos_512_v4
+72/370866/campos_512_v4
+72/370868/campos_512_v4
+72/370898/campos_512_v4
+72/370900/campos_512_v4
+72/370916/campos_512_v4
+72/370929/campos_512_v4
+72/370945/campos_512_v4
+72/370953/campos_512_v4
+72/370957/campos_512_v4
+72/370964/campos_512_v4
+72/370973/campos_512_v4
+72/370974/campos_512_v4
+72/370979/campos_512_v4
+72/370983/campos_512_v4
+72/370991/campos_512_v4
+72/370992/campos_512_v4
+72/370998/campos_512_v4
+72/370999/campos_512_v4
+72/371003/campos_512_v4
+72/371006/campos_512_v4
+72/371014/campos_512_v4
+72/371015/campos_512_v4
+72/371046/campos_512_v4
+72/371049/campos_512_v4
+72/371055/campos_512_v4
+72/371076/campos_512_v4
+72/371080/campos_512_v4
+72/371088/campos_512_v4
+72/371111/campos_512_v4
+72/371116/campos_512_v4
+72/371124/campos_512_v4
+72/371127/campos_512_v4
+72/371143/campos_512_v4
+72/371148/campos_512_v4
+72/371152/campos_512_v4
+72/371163/campos_512_v4
+72/371177/campos_512_v4
+72/371185/campos_512_v4
+72/371193/campos_512_v4
+72/371204/campos_512_v4
+72/371218/campos_512_v4
+72/371227/campos_512_v4
+72/371236/campos_512_v4
+72/371262/campos_512_v4
+72/371288/campos_512_v4
+72/371295/campos_512_v4
+72/371297/campos_512_v4
+72/371304/campos_512_v4
+72/371307/campos_512_v4
+72/371316/campos_512_v4
+72/371318/campos_512_v4
+72/371325/campos_512_v4
+72/371340/campos_512_v4
+72/371341/campos_512_v4
+72/371347/campos_512_v4
+72/371356/campos_512_v4
+72/371382/campos_512_v4
+72/371389/campos_512_v4
+72/371391/campos_512_v4
+72/371400/campos_512_v4
+72/371406/campos_512_v4
+72/371414/campos_512_v4
+72/371418/campos_512_v4
+72/371419/campos_512_v4
+72/371442/campos_512_v4
+72/371446/campos_512_v4
+72/371455/campos_512_v4
+72/371462/campos_512_v4
+72/371465/campos_512_v4
+72/371468/campos_512_v4
+72/371484/campos_512_v4
+72/371501/campos_512_v4
+72/371504/campos_512_v4
+72/371518/campos_512_v4
+72/371524/campos_512_v4
+72/371527/campos_512_v4
+72/371530/campos_512_v4
+72/371533/campos_512_v4
+72/371535/campos_512_v4
+72/371537/campos_512_v4
+72/371543/campos_512_v4
+72/371549/campos_512_v4
+72/371554/campos_512_v4
+72/371565/campos_512_v4
+72/371566/campos_512_v4
+72/371574/campos_512_v4
+72/371575/campos_512_v4
+72/371577/campos_512_v4
+72/371588/campos_512_v4
+72/371603/campos_512_v4
+72/371609/campos_512_v4
+72/371612/campos_512_v4
+72/371617/campos_512_v4
+72/371624/campos_512_v4
+72/371636/campos_512_v4
+72/371664/campos_512_v4
+72/371676/campos_512_v4
+72/371681/campos_512_v4
+72/371683/campos_512_v4
+72/371684/campos_512_v4
+72/371687/campos_512_v4
+72/371703/campos_512_v4
+72/371715/campos_512_v4
+72/371739/campos_512_v4
+72/371754/campos_512_v4
+72/371755/campos_512_v4
+72/371772/campos_512_v4
+72/371788/campos_512_v4
+72/371789/campos_512_v4
+72/371799/campos_512_v4
+72/371807/campos_512_v4
+72/371826/campos_512_v4
+72/371829/campos_512_v4
+72/371839/campos_512_v4
+72/371845/campos_512_v4
+72/371848/campos_512_v4
+72/371850/campos_512_v4
+72/371862/campos_512_v4
+72/371895/campos_512_v4
+72/371898/campos_512_v4
+72/371900/campos_512_v4
+72/371926/campos_512_v4
+72/371930/campos_512_v4
+72/371942/campos_512_v4
+72/371953/campos_512_v4
+72/371954/campos_512_v4
+72/371963/campos_512_v4
+72/371967/campos_512_v4
+72/371968/campos_512_v4
+72/371969/campos_512_v4
+72/371970/campos_512_v4
+72/371971/campos_512_v4
+72/371985/campos_512_v4
+72/371999/campos_512_v4
+72/372009/campos_512_v4
+72/372017/campos_512_v4
+72/372023/campos_512_v4
+72/372028/campos_512_v4
+72/372036/campos_512_v4
+72/372044/campos_512_v4
+72/372050/campos_512_v4
+72/372054/campos_512_v4
+72/372065/campos_512_v4
+72/372073/campos_512_v4
+72/372077/campos_512_v4
+72/372078/campos_512_v4
+72/372083/campos_512_v4
+72/372092/campos_512_v4
+72/372096/campos_512_v4
+72/372126/campos_512_v4
+72/372129/campos_512_v4
+72/372165/campos_512_v4
+72/372170/campos_512_v4
+72/372191/campos_512_v4
+72/372208/campos_512_v4
+72/372212/campos_512_v4
+72/372213/campos_512_v4
+72/372218/campos_512_v4
+72/372224/campos_512_v4
+72/372233/campos_512_v4
+72/372236/campos_512_v4
+72/372264/campos_512_v4
+72/372269/campos_512_v4
+72/372277/campos_512_v4
+72/372279/campos_512_v4
+72/372281/campos_512_v4
+72/372290/campos_512_v4
+72/372294/campos_512_v4
+72/372300/campos_512_v4
+72/372303/campos_512_v4
+72/372306/campos_512_v4
+72/372309/campos_512_v4
+72/372330/campos_512_v4
+72/372341/campos_512_v4
+72/372352/campos_512_v4
+72/372360/campos_512_v4
+72/372371/campos_512_v4
+72/372374/campos_512_v4
+72/372402/campos_512_v4
+72/372411/campos_512_v4
+72/372413/campos_512_v4
+72/372414/campos_512_v4
+72/372421/campos_512_v4
+72/372428/campos_512_v4
+72/372433/campos_512_v4
+72/372434/campos_512_v4
+72/372435/campos_512_v4
+72/372448/campos_512_v4
+72/372461/campos_512_v4
+72/372466/campos_512_v4
+72/372475/campos_512_v4
+72/372479/campos_512_v4
+72/372488/campos_512_v4
+72/372493/campos_512_v4
+72/372499/campos_512_v4
+72/372502/campos_512_v4
+72/372506/campos_512_v4
+72/372516/campos_512_v4
+72/372521/campos_512_v4
+72/372522/campos_512_v4
+72/372532/campos_512_v4
+72/372541/campos_512_v4
+72/372543/campos_512_v4
+72/372546/campos_512_v4
+72/372552/campos_512_v4
+72/372572/campos_512_v4
+72/372573/campos_512_v4
+72/372586/campos_512_v4
+72/372587/campos_512_v4
+72/372592/campos_512_v4
+72/372621/campos_512_v4
+72/372626/campos_512_v4
+72/372638/campos_512_v4
+72/372647/campos_512_v4
+72/372692/campos_512_v4
+72/372696/campos_512_v4
+72/372706/campos_512_v4
+72/372707/campos_512_v4
+72/372715/campos_512_v4
+72/372721/campos_512_v4
+72/372725/campos_512_v4
+72/372738/campos_512_v4
+72/372746/campos_512_v4
+72/372751/campos_512_v4
+72/372764/campos_512_v4
+72/372771/campos_512_v4
+72/372778/campos_512_v4
+72/372779/campos_512_v4
+72/372782/campos_512_v4
+72/372799/campos_512_v4
+72/372802/campos_512_v4
+72/372815/campos_512_v4
+72/372828/campos_512_v4
+72/372849/campos_512_v4
+72/372851/campos_512_v4
+72/372863/campos_512_v4
+72/372864/campos_512_v4
+72/372871/campos_512_v4
+72/372872/campos_512_v4
+72/372873/campos_512_v4
+72/372874/campos_512_v4
+72/372876/campos_512_v4
+72/372877/campos_512_v4
+72/372879/campos_512_v4
+72/372895/campos_512_v4
+72/372905/campos_512_v4
+72/372907/campos_512_v4
+72/372918/campos_512_v4
+72/372920/campos_512_v4
+72/372931/campos_512_v4
+72/372936/campos_512_v4
+72/372940/campos_512_v4
+72/372943/campos_512_v4
+72/372952/campos_512_v4
+72/372960/campos_512_v4
+72/372984/campos_512_v4
+72/372987/campos_512_v4
+72/373000/campos_512_v4
+72/373008/campos_512_v4
+72/373025/campos_512_v4
+72/373033/campos_512_v4
+72/373038/campos_512_v4
+72/373068/campos_512_v4
+72/373080/campos_512_v4
+72/373081/campos_512_v4
+72/373091/campos_512_v4
+72/373094/campos_512_v4
+72/373096/campos_512_v4
+72/373099/campos_512_v4
+72/373113/campos_512_v4
+72/373121/campos_512_v4
+72/373127/campos_512_v4
+72/373130/campos_512_v4
+72/373132/campos_512_v4
+72/373135/campos_512_v4
+72/373147/campos_512_v4
+72/373150/campos_512_v4
+72/373167/campos_512_v4
+72/373181/campos_512_v4
+72/373182/campos_512_v4
+72/373202/campos_512_v4
+72/373209/campos_512_v4
+72/373220/campos_512_v4
+72/373259/campos_512_v4
+72/373266/campos_512_v4
+72/373272/campos_512_v4
+72/373274/campos_512_v4
+72/373295/campos_512_v4
+72/373301/campos_512_v4
+72/373314/campos_512_v4
+72/373322/campos_512_v4
+72/373332/campos_512_v4
+72/373367/campos_512_v4
+72/373368/campos_512_v4
+72/373375/campos_512_v4
+72/373378/campos_512_v4
+72/373382/campos_512_v4
+72/373385/campos_512_v4
+72/373389/campos_512_v4
+72/373396/campos_512_v4
+72/373399/campos_512_v4
+72/373401/campos_512_v4
+72/373429/campos_512_v4
+72/373449/campos_512_v4
+72/373461/campos_512_v4
+72/373470/campos_512_v4
+72/373486/campos_512_v4
+72/373504/campos_512_v4
+72/373507/campos_512_v4
+72/373518/campos_512_v4
+72/373521/campos_512_v4
+72/373556/campos_512_v4
+72/373569/campos_512_v4
+72/373577/campos_512_v4
+72/373585/campos_512_v4
+72/373588/campos_512_v4
+72/373596/campos_512_v4
+72/373609/campos_512_v4
+72/373623/campos_512_v4
+72/373625/campos_512_v4
+72/373638/campos_512_v4
+72/373660/campos_512_v4
+72/373664/campos_512_v4
+72/373666/campos_512_v4
+72/373668/campos_512_v4
+72/373676/campos_512_v4
+72/373684/campos_512_v4
+72/373687/campos_512_v4
+72/373690/campos_512_v4
+72/373694/campos_512_v4
+72/373696/campos_512_v4
+72/373698/campos_512_v4
+72/373699/campos_512_v4
+72/373718/campos_512_v4
+72/373728/campos_512_v4
+72/373749/campos_512_v4
+72/373753/campos_512_v4
+72/373757/campos_512_v4
+72/373759/campos_512_v4
+72/373774/campos_512_v4
+72/373782/campos_512_v4
+72/373797/campos_512_v4
+72/373798/campos_512_v4
+72/373815/campos_512_v4
+72/373822/campos_512_v4
+72/373827/campos_512_v4
+72/373830/campos_512_v4
+72/373840/campos_512_v4
+72/373845/campos_512_v4
+72/373851/campos_512_v4
+72/373852/campos_512_v4
+72/373862/campos_512_v4
+72/373865/campos_512_v4
+72/373875/campos_512_v4
+72/373879/campos_512_v4
+72/373885/campos_512_v4
+72/373889/campos_512_v4
+72/373903/campos_512_v4
+72/373905/campos_512_v4
+72/373907/campos_512_v4
+72/373910/campos_512_v4
+72/373916/campos_512_v4
+72/373917/campos_512_v4
+72/373919/campos_512_v4
+72/373928/campos_512_v4
+72/373931/campos_512_v4
+72/373933/campos_512_v4
+72/373952/campos_512_v4
+72/373956/campos_512_v4
+72/373963/campos_512_v4
+72/373975/campos_512_v4
+72/373999/campos_512_v4
+72/374001/campos_512_v4
+72/374005/campos_512_v4
+72/374017/campos_512_v4
+72/374019/campos_512_v4
+72/374023/campos_512_v4
+72/374030/campos_512_v4
+72/374031/campos_512_v4
+72/374032/campos_512_v4
+72/374036/campos_512_v4
+72/374037/campos_512_v4
+72/374041/campos_512_v4
+72/374047/campos_512_v4
+72/374052/campos_512_v4
+72/374057/campos_512_v4
+72/374081/campos_512_v4
+72/374085/campos_512_v4
+72/374094/campos_512_v4
+72/374118/campos_512_v4
+72/374125/campos_512_v4
+72/374142/campos_512_v4
+72/374164/campos_512_v4
+72/374168/campos_512_v4
+72/374171/campos_512_v4
+72/374173/campos_512_v4
+72/374175/campos_512_v4
+72/374178/campos_512_v4
+72/374183/campos_512_v4
+72/374184/campos_512_v4
+72/374187/campos_512_v4
+72/374199/campos_512_v4
+72/374207/campos_512_v4
+72/374224/campos_512_v4
+72/374225/campos_512_v4
+72/374236/campos_512_v4
+72/374252/campos_512_v4
+72/374258/campos_512_v4
+72/374277/campos_512_v4
+72/374283/campos_512_v4
+72/374284/campos_512_v4
+72/374288/campos_512_v4
+72/374292/campos_512_v4
+72/374295/campos_512_v4
+72/374305/campos_512_v4
+72/374310/campos_512_v4
+72/374325/campos_512_v4
+72/374335/campos_512_v4
+72/374336/campos_512_v4
+72/374343/campos_512_v4
+72/374350/campos_512_v4
+72/374356/campos_512_v4
+72/374365/campos_512_v4
+72/374369/campos_512_v4
+72/374372/campos_512_v4
+72/374376/campos_512_v4
+72/374379/campos_512_v4
+72/374381/campos_512_v4
+72/374391/campos_512_v4
+72/374393/campos_512_v4
+72/374394/campos_512_v4
+72/374399/campos_512_v4
+72/374401/campos_512_v4
+72/374414/campos_512_v4
+72/374417/campos_512_v4
+72/374424/campos_512_v4
+72/374428/campos_512_v4
+72/374431/campos_512_v4
+72/374434/campos_512_v4
+72/374439/campos_512_v4
+72/374444/campos_512_v4
+72/374457/campos_512_v4
+72/374458/campos_512_v4
+72/374468/campos_512_v4
+72/374471/campos_512_v4
+72/374486/campos_512_v4
+72/374491/campos_512_v4
+72/374502/campos_512_v4
+72/374503/campos_512_v4
+72/374511/campos_512_v4
+72/374524/campos_512_v4
+72/374529/campos_512_v4
+72/374536/campos_512_v4
+72/374546/campos_512_v4
+72/374548/campos_512_v4
+72/374556/campos_512_v4
+72/374562/campos_512_v4
+72/374573/campos_512_v4
+72/374590/campos_512_v4
+72/374592/campos_512_v4
+72/374599/campos_512_v4
+72/374605/campos_512_v4
+72/374619/campos_512_v4
+72/374622/campos_512_v4
+72/374628/campos_512_v4
+72/374640/campos_512_v4
+72/374642/campos_512_v4
+72/374645/campos_512_v4
+72/374655/campos_512_v4
+72/374663/campos_512_v4
+72/374669/campos_512_v4
+72/374672/campos_512_v4
+72/374677/campos_512_v4
+72/374698/campos_512_v4
+72/374705/campos_512_v4
+72/374709/campos_512_v4
+72/374711/campos_512_v4
+72/374714/campos_512_v4
+72/374729/campos_512_v4
+72/374734/campos_512_v4
+72/374738/campos_512_v4
+72/374753/campos_512_v4
+72/374767/campos_512_v4
+72/374781/campos_512_v4
+72/374786/campos_512_v4
+72/374787/campos_512_v4
+72/374806/campos_512_v4
+72/374813/campos_512_v4
+72/374818/campos_512_v4
+72/374819/campos_512_v4
+72/374839/campos_512_v4
+72/374845/campos_512_v4
+72/374849/campos_512_v4
+72/374869/campos_512_v4
+72/374873/campos_512_v4
+72/374876/campos_512_v4
+72/374891/campos_512_v4
+72/374897/campos_512_v4
+72/374906/campos_512_v4
+72/374912/campos_512_v4
+72/374920/campos_512_v4
+72/374921/campos_512_v4
+72/374936/campos_512_v4
+72/374938/campos_512_v4
+72/374943/campos_512_v4
+72/374949/campos_512_v4
+72/374957/campos_512_v4
+72/374961/campos_512_v4
+72/374963/campos_512_v4
+72/374974/campos_512_v4
+72/374979/campos_512_v4
+72/374987/campos_512_v4
+72/374992/campos_512_v4
+73/375006/campos_512_v4
+73/375017/campos_512_v4
+73/375026/campos_512_v4
+73/375029/campos_512_v4
+73/375054/campos_512_v4
+73/375060/campos_512_v4
+73/375080/campos_512_v4
+73/375091/campos_512_v4
+73/375126/campos_512_v4
+73/375131/campos_512_v4
+73/375144/campos_512_v4
+73/375161/campos_512_v4
+73/375166/campos_512_v4
+73/375171/campos_512_v4
+73/375178/campos_512_v4
+73/375182/campos_512_v4
+73/375188/campos_512_v4
+73/375190/campos_512_v4
+73/375191/campos_512_v4
+73/375213/campos_512_v4
+73/375229/campos_512_v4
+73/375232/campos_512_v4
+73/375235/campos_512_v4
+73/375236/campos_512_v4
+73/375237/campos_512_v4
+73/375244/campos_512_v4
+73/375247/campos_512_v4
+73/375249/campos_512_v4
+73/375263/campos_512_v4
+73/375272/campos_512_v4
+73/375273/campos_512_v4
+73/375274/campos_512_v4
+73/375276/campos_512_v4
+73/375294/campos_512_v4
+73/375301/campos_512_v4
+73/375309/campos_512_v4
+73/375320/campos_512_v4
+73/375324/campos_512_v4
+73/375325/campos_512_v4
+73/375329/campos_512_v4
+73/375332/campos_512_v4
+73/375333/campos_512_v4
+73/375361/campos_512_v4
+73/375366/campos_512_v4
+73/375367/campos_512_v4
+73/375374/campos_512_v4
+73/375383/campos_512_v4
+73/375388/campos_512_v4
+73/375389/campos_512_v4
+73/375399/campos_512_v4
+73/375405/campos_512_v4
+73/375407/campos_512_v4
+73/375420/campos_512_v4
+73/375426/campos_512_v4
+73/375428/campos_512_v4
+73/375438/campos_512_v4
+73/375467/campos_512_v4
+73/375480/campos_512_v4
+73/375485/campos_512_v4
+73/375494/campos_512_v4
+73/375499/campos_512_v4
+73/375505/campos_512_v4
+73/375506/campos_512_v4
+73/375514/campos_512_v4
+73/375530/campos_512_v4
+73/375536/campos_512_v4
+73/375549/campos_512_v4
+73/375578/campos_512_v4
+73/375580/campos_512_v4
+73/375581/campos_512_v4
+73/375582/campos_512_v4
+73/375584/campos_512_v4
+73/375585/campos_512_v4
+73/375591/campos_512_v4
+73/375604/campos_512_v4
+73/375608/campos_512_v4
+73/375614/campos_512_v4
+73/375628/campos_512_v4
+73/375633/campos_512_v4
+73/375635/campos_512_v4
+73/375663/campos_512_v4
+73/375666/campos_512_v4
+73/375671/campos_512_v4
+73/375683/campos_512_v4
+73/375686/campos_512_v4
+73/375689/campos_512_v4
+73/375703/campos_512_v4
+73/375712/campos_512_v4
+73/375720/campos_512_v4
+73/375722/campos_512_v4
+73/375736/campos_512_v4
+73/375774/campos_512_v4
+73/375776/campos_512_v4
+73/375782/campos_512_v4
+73/375788/campos_512_v4
+73/375794/campos_512_v4
+73/375796/campos_512_v4
+73/375797/campos_512_v4
+73/375801/campos_512_v4
+73/375816/campos_512_v4
+73/375837/campos_512_v4
+73/375843/campos_512_v4
+73/375873/campos_512_v4
+73/375874/campos_512_v4
+73/375876/campos_512_v4
+73/375879/campos_512_v4
+73/375881/campos_512_v4
+73/375908/campos_512_v4
+73/375909/campos_512_v4
+73/375923/campos_512_v4
+73/375933/campos_512_v4
+73/375945/campos_512_v4
+73/375951/campos_512_v4
+73/375959/campos_512_v4
+73/375964/campos_512_v4
+73/375966/campos_512_v4
+73/375969/campos_512_v4
+73/375970/campos_512_v4
+73/375973/campos_512_v4
+73/375977/campos_512_v4
+73/375991/campos_512_v4
+73/375993/campos_512_v4
+73/376015/campos_512_v4
+73/376035/campos_512_v4
+73/376042/campos_512_v4
+73/376043/campos_512_v4
+73/376049/campos_512_v4
+73/376052/campos_512_v4
+73/376058/campos_512_v4
+73/376060/campos_512_v4
+73/376061/campos_512_v4
+73/376080/campos_512_v4
+73/376082/campos_512_v4
+73/376094/campos_512_v4
+73/376098/campos_512_v4
+73/376102/campos_512_v4
+73/376106/campos_512_v4
+73/376109/campos_512_v4
+73/376119/campos_512_v4
+73/376121/campos_512_v4
+73/376131/campos_512_v4
+73/376132/campos_512_v4
+73/376135/campos_512_v4
+73/376144/campos_512_v4
+73/376157/campos_512_v4
+73/376161/campos_512_v4
+73/376163/campos_512_v4
+73/376164/campos_512_v4
+73/376168/campos_512_v4
+73/376173/campos_512_v4
+73/376175/campos_512_v4
+73/376184/campos_512_v4
+73/376193/campos_512_v4
+73/376206/campos_512_v4
+73/376208/campos_512_v4
+73/376228/campos_512_v4
+73/376229/campos_512_v4
+73/376234/campos_512_v4
+73/376248/campos_512_v4
+73/376250/campos_512_v4
+73/376255/campos_512_v4
+73/376257/campos_512_v4
+73/376280/campos_512_v4
+73/376281/campos_512_v4
+73/376288/campos_512_v4
+73/376298/campos_512_v4
+73/376301/campos_512_v4
+73/376317/campos_512_v4
+73/376323/campos_512_v4
+73/376335/campos_512_v4
+73/376340/campos_512_v4
+73/376343/campos_512_v4
+73/376347/campos_512_v4
+73/376361/campos_512_v4
+73/376378/campos_512_v4
+73/376393/campos_512_v4
+73/376401/campos_512_v4
+73/376405/campos_512_v4
+73/376426/campos_512_v4
+73/376434/campos_512_v4
+73/376440/campos_512_v4
+73/376442/campos_512_v4
+73/376454/campos_512_v4
+73/376462/campos_512_v4
+73/376466/campos_512_v4
+73/376469/campos_512_v4
+73/376475/campos_512_v4
+73/376479/campos_512_v4
+73/376494/campos_512_v4
+73/376501/campos_512_v4
+73/376506/campos_512_v4
+73/376509/campos_512_v4
+73/376512/campos_512_v4
+73/376522/campos_512_v4
+73/376523/campos_512_v4
+73/376525/campos_512_v4
+73/376528/campos_512_v4
+73/376535/campos_512_v4
+73/376537/campos_512_v4
+73/376540/campos_512_v4
+73/376553/campos_512_v4
+73/376565/campos_512_v4
+73/376573/campos_512_v4
+73/376575/campos_512_v4
+73/376578/campos_512_v4
+73/376581/campos_512_v4
+73/376586/campos_512_v4
+73/376587/campos_512_v4
+73/376591/campos_512_v4
+73/376592/campos_512_v4
+73/376595/campos_512_v4
+73/376597/campos_512_v4
+73/376602/campos_512_v4
+73/376638/campos_512_v4
+73/376639/campos_512_v4
+73/376641/campos_512_v4
+73/376668/campos_512_v4
+73/376682/campos_512_v4
+73/376692/campos_512_v4
+73/376693/campos_512_v4
+73/376694/campos_512_v4
+73/376710/campos_512_v4
+73/376711/campos_512_v4
+73/376724/campos_512_v4
+73/376725/campos_512_v4
+73/376726/campos_512_v4
+73/376727/campos_512_v4
+73/376731/campos_512_v4
+73/376738/campos_512_v4
+73/376753/campos_512_v4
+73/376756/campos_512_v4
+73/376775/campos_512_v4
+73/376791/campos_512_v4
+73/376792/campos_512_v4
+73/376793/campos_512_v4
+73/376796/campos_512_v4
+73/376799/campos_512_v4
+73/376802/campos_512_v4
+73/376813/campos_512_v4
+73/376821/campos_512_v4
+73/376825/campos_512_v4
+73/376829/campos_512_v4
+73/376832/campos_512_v4
+73/376837/campos_512_v4
+73/376843/campos_512_v4
+73/376855/campos_512_v4
+73/376877/campos_512_v4
+73/376884/campos_512_v4
+73/376886/campos_512_v4
+73/376887/campos_512_v4
+73/376892/campos_512_v4
+73/376902/campos_512_v4
+73/376910/campos_512_v4
+73/376913/campos_512_v4
+73/376915/campos_512_v4
+73/376961/campos_512_v4
+73/376963/campos_512_v4
+73/376993/campos_512_v4
+73/377001/campos_512_v4
+73/377014/campos_512_v4
+73/377019/campos_512_v4
+73/377022/campos_512_v4
+73/377031/campos_512_v4
+73/377038/campos_512_v4
+73/377071/campos_512_v4
+73/377090/campos_512_v4
+73/377104/campos_512_v4
+73/377105/campos_512_v4
+73/377112/campos_512_v4
+73/377113/campos_512_v4
+73/377121/campos_512_v4
+73/377123/campos_512_v4
+73/377125/campos_512_v4
+73/377135/campos_512_v4
+73/377141/campos_512_v4
+73/377160/campos_512_v4
+73/377166/campos_512_v4
+73/377171/campos_512_v4
+73/377172/campos_512_v4
+73/377175/campos_512_v4
+73/377176/campos_512_v4
+73/377177/campos_512_v4
+73/377180/campos_512_v4
+73/377183/campos_512_v4
+73/377206/campos_512_v4
+73/377214/campos_512_v4
+73/377216/campos_512_v4
+73/377218/campos_512_v4
+73/377228/campos_512_v4
+73/377233/campos_512_v4
+73/377238/campos_512_v4
+73/377245/campos_512_v4
+73/377247/campos_512_v4
+73/377278/campos_512_v4
+73/377280/campos_512_v4
+73/377287/campos_512_v4
+73/377309/campos_512_v4
+73/377311/campos_512_v4
+73/377313/campos_512_v4
+73/377322/campos_512_v4
+73/377325/campos_512_v4
+73/377336/campos_512_v4
+73/377340/campos_512_v4
+73/377353/campos_512_v4
+73/377358/campos_512_v4
+73/377359/campos_512_v4
+73/377362/campos_512_v4
+73/377371/campos_512_v4
+73/377382/campos_512_v4
+73/377384/campos_512_v4
+73/377386/campos_512_v4
+73/377395/campos_512_v4
+73/377402/campos_512_v4
+73/377404/campos_512_v4
+73/377409/campos_512_v4
+73/377415/campos_512_v4
+73/377416/campos_512_v4
+73/377429/campos_512_v4
+73/377431/campos_512_v4
+73/377433/campos_512_v4
+73/377446/campos_512_v4
+73/377465/campos_512_v4
+73/377482/campos_512_v4
+73/377515/campos_512_v4
+73/377528/campos_512_v4
+73/377532/campos_512_v4
+73/377534/campos_512_v4
+73/377550/campos_512_v4
+73/377551/campos_512_v4
+73/377563/campos_512_v4
+73/377575/campos_512_v4
+73/377604/campos_512_v4
+73/377605/campos_512_v4
+73/377617/campos_512_v4
+73/377619/campos_512_v4
+73/377630/campos_512_v4
+73/377635/campos_512_v4
+73/377640/campos_512_v4
+73/377648/campos_512_v4
+73/377649/campos_512_v4
+73/377656/campos_512_v4
+73/377666/campos_512_v4
+73/377669/campos_512_v4
+73/377679/campos_512_v4
+73/377683/campos_512_v4
+73/377688/campos_512_v4
+73/377694/campos_512_v4
+73/377701/campos_512_v4
+73/377709/campos_512_v4
+73/377720/campos_512_v4
+73/377733/campos_512_v4
+73/377739/campos_512_v4
+73/377754/campos_512_v4
+73/377767/campos_512_v4
+73/377792/campos_512_v4
+73/377801/campos_512_v4
+73/377802/campos_512_v4
+73/377810/campos_512_v4
+73/377812/campos_512_v4
+73/377818/campos_512_v4
+73/377827/campos_512_v4
+73/377861/campos_512_v4
+73/377864/campos_512_v4
+73/377872/campos_512_v4
+73/377881/campos_512_v4
+73/377893/campos_512_v4
+73/377918/campos_512_v4
+73/377919/campos_512_v4
+73/377948/campos_512_v4
+73/377958/campos_512_v4
+73/377959/campos_512_v4
+73/377967/campos_512_v4
+73/377981/campos_512_v4
+73/377995/campos_512_v4
+73/378005/campos_512_v4
+73/378013/campos_512_v4
+73/378015/campos_512_v4
+73/378027/campos_512_v4
+73/378037/campos_512_v4
+73/378043/campos_512_v4
+73/378048/campos_512_v4
+73/378050/campos_512_v4
+73/378067/campos_512_v4
+73/378069/campos_512_v4
+73/378073/campos_512_v4
+73/378076/campos_512_v4
+73/378082/campos_512_v4
+73/378090/campos_512_v4
+73/378093/campos_512_v4
+73/378100/campos_512_v4
+73/378101/campos_512_v4
+73/378104/campos_512_v4
+73/378105/campos_512_v4
+73/378106/campos_512_v4
+73/378109/campos_512_v4
+73/378110/campos_512_v4
+73/378111/campos_512_v4
+73/378112/campos_512_v4
+73/378127/campos_512_v4
+73/378143/campos_512_v4
+73/378145/campos_512_v4
+73/378147/campos_512_v4
+73/378148/campos_512_v4
+73/378158/campos_512_v4
+73/378162/campos_512_v4
+73/378174/campos_512_v4
+73/378179/campos_512_v4
+73/378182/campos_512_v4
+73/378184/campos_512_v4
+73/378189/campos_512_v4
+73/378219/campos_512_v4
+73/378221/campos_512_v4
+73/378225/campos_512_v4
+73/378226/campos_512_v4
+73/378234/campos_512_v4
+73/378241/campos_512_v4
+73/378248/campos_512_v4
+73/378264/campos_512_v4
+73/378269/campos_512_v4
+73/378278/campos_512_v4
+73/378280/campos_512_v4
+73/378289/campos_512_v4
+73/378294/campos_512_v4
+73/378296/campos_512_v4
+73/378311/campos_512_v4
+73/378319/campos_512_v4
+73/378329/campos_512_v4
+73/378333/campos_512_v4
+73/378339/campos_512_v4
+73/378347/campos_512_v4
+73/378351/campos_512_v4
+73/378354/campos_512_v4
+73/378358/campos_512_v4
+73/378359/campos_512_v4
+73/378363/campos_512_v4
+73/378364/campos_512_v4
+73/378371/campos_512_v4
+73/378374/campos_512_v4
+73/378377/campos_512_v4
+73/378379/campos_512_v4
+73/378385/campos_512_v4
+73/378391/campos_512_v4
+73/378393/campos_512_v4
+73/378395/campos_512_v4
+73/378406/campos_512_v4
+73/378424/campos_512_v4
+73/378440/campos_512_v4
+73/378445/campos_512_v4
+73/378448/campos_512_v4
+73/378466/campos_512_v4
+73/378477/campos_512_v4
+73/378480/campos_512_v4
+73/378486/campos_512_v4
+73/378490/campos_512_v4
+73/378498/campos_512_v4
+73/378514/campos_512_v4
+73/378519/campos_512_v4
+73/378520/campos_512_v4
+73/378528/campos_512_v4
+73/378530/campos_512_v4
+73/378536/campos_512_v4
+73/378545/campos_512_v4
+73/378574/campos_512_v4
+73/378583/campos_512_v4
+73/378591/campos_512_v4
+73/378594/campos_512_v4
+73/378598/campos_512_v4
+73/378599/campos_512_v4
+73/378603/campos_512_v4
+73/378625/campos_512_v4
+73/378627/campos_512_v4
+73/378641/campos_512_v4
+73/378643/campos_512_v4
+73/378648/campos_512_v4
+73/378650/campos_512_v4
+73/378655/campos_512_v4
+73/378689/campos_512_v4
+73/378690/campos_512_v4
+73/378702/campos_512_v4
+73/378706/campos_512_v4
+73/378710/campos_512_v4
+73/378711/campos_512_v4
+73/378714/campos_512_v4
+73/378733/campos_512_v4
+73/378745/campos_512_v4
+73/378750/campos_512_v4
+73/378765/campos_512_v4
+73/378772/campos_512_v4
+73/378777/campos_512_v4
+73/378792/campos_512_v4
+73/378793/campos_512_v4
+73/378794/campos_512_v4
+73/378798/campos_512_v4
+73/378826/campos_512_v4
+73/378835/campos_512_v4
+73/378860/campos_512_v4
+73/378867/campos_512_v4
+73/378878/campos_512_v4
+73/378884/campos_512_v4
+73/378890/campos_512_v4
+73/378898/campos_512_v4
+73/378910/campos_512_v4
+73/378915/campos_512_v4
+73/378919/campos_512_v4
+73/378922/campos_512_v4
+73/378925/campos_512_v4
+73/378939/campos_512_v4
+73/378941/campos_512_v4
+73/378943/campos_512_v4
+73/378966/campos_512_v4
+73/378977/campos_512_v4
+73/378981/campos_512_v4
+73/379000/campos_512_v4
+73/379002/campos_512_v4
+73/379006/campos_512_v4
+73/379007/campos_512_v4
+73/379008/campos_512_v4
+73/379015/campos_512_v4
+73/379026/campos_512_v4
+73/379032/campos_512_v4
+73/379038/campos_512_v4
+73/379040/campos_512_v4
+73/379058/campos_512_v4
+73/379062/campos_512_v4
+73/379077/campos_512_v4
+73/379079/campos_512_v4
+73/379084/campos_512_v4
+73/379088/campos_512_v4
+73/379096/campos_512_v4
+73/379109/campos_512_v4
+73/379130/campos_512_v4
+73/379137/campos_512_v4
+73/379145/campos_512_v4
+73/379150/campos_512_v4
+73/379160/campos_512_v4
+73/379168/campos_512_v4
+73/379171/campos_512_v4
+73/379183/campos_512_v4
+73/379192/campos_512_v4
+73/379199/campos_512_v4
+73/379200/campos_512_v4
+73/379209/campos_512_v4
+73/379218/campos_512_v4
+73/379223/campos_512_v4
+73/379249/campos_512_v4
+73/379251/campos_512_v4
+73/379503/campos_512_v4
+73/379521/campos_512_v4
+73/379523/campos_512_v4
+73/379525/campos_512_v4
+73/379527/campos_512_v4
+73/379541/campos_512_v4
+73/379550/campos_512_v4
+73/379570/campos_512_v4
+73/379571/campos_512_v4
+73/379573/campos_512_v4
+73/379575/campos_512_v4
+73/379586/campos_512_v4
+73/379593/campos_512_v4
+73/379597/campos_512_v4
+73/379600/campos_512_v4
+73/379603/campos_512_v4
+73/379622/campos_512_v4
+73/379624/campos_512_v4
+73/379625/campos_512_v4
+73/379629/campos_512_v4
+73/379648/campos_512_v4
+73/379653/campos_512_v4
+73/379666/campos_512_v4
+73/379671/campos_512_v4
+73/379681/campos_512_v4
+73/379694/campos_512_v4
+73/379709/campos_512_v4
+73/379713/campos_512_v4
+73/379738/campos_512_v4
+73/379739/campos_512_v4
+73/379741/campos_512_v4
+73/379744/campos_512_v4
+73/379752/campos_512_v4
+73/379758/campos_512_v4
+73/379781/campos_512_v4
+73/379813/campos_512_v4
+73/379817/campos_512_v4
+73/379818/campos_512_v4
+73/379834/campos_512_v4
+73/379867/campos_512_v4
+73/379870/campos_512_v4
+73/379872/campos_512_v4
+73/379874/campos_512_v4
+73/379883/campos_512_v4
+73/379897/campos_512_v4
+73/379900/campos_512_v4
+73/379933/campos_512_v4
+73/379934/campos_512_v4
+73/379940/campos_512_v4
+73/379946/campos_512_v4
+73/379947/campos_512_v4
+73/379957/campos_512_v4
+73/379960/campos_512_v4
+73/379987/campos_512_v4
+73/379995/campos_512_v4
+74/380002/campos_512_v4
+74/380013/campos_512_v4
+74/380022/campos_512_v4
+74/380029/campos_512_v4
+74/380035/campos_512_v4
+74/380042/campos_512_v4
+74/380045/campos_512_v4
+74/380060/campos_512_v4
+74/380074/campos_512_v4
+74/380075/campos_512_v4
+74/380076/campos_512_v4
+74/380099/campos_512_v4
+74/380113/campos_512_v4
+74/380124/campos_512_v4
+74/380134/campos_512_v4
+74/380143/campos_512_v4
+74/380154/campos_512_v4
+74/380176/campos_512_v4
+74/380182/campos_512_v4
+74/380184/campos_512_v4
+74/380197/campos_512_v4
+74/380200/campos_512_v4
+74/380216/campos_512_v4
+74/380217/campos_512_v4
+74/380219/campos_512_v4
+74/380231/campos_512_v4
+74/380243/campos_512_v4
+74/380253/campos_512_v4
+74/380265/campos_512_v4
+74/380284/campos_512_v4
+74/380298/campos_512_v4
+74/380304/campos_512_v4
+74/380307/campos_512_v4
+74/380308/campos_512_v4
+74/380313/campos_512_v4
+74/380335/campos_512_v4
+74/380336/campos_512_v4
+74/380341/campos_512_v4
+74/380349/campos_512_v4
+74/380357/campos_512_v4
+74/380377/campos_512_v4
+74/380382/campos_512_v4
+74/380383/campos_512_v4
+74/380393/campos_512_v4
+74/380407/campos_512_v4
+74/380414/campos_512_v4
+74/380416/campos_512_v4
+74/380417/campos_512_v4
+74/380424/campos_512_v4
+74/380426/campos_512_v4
+74/380450/campos_512_v4
+74/380462/campos_512_v4
+74/380468/campos_512_v4
+74/380470/campos_512_v4
+74/380476/campos_512_v4
+74/380478/campos_512_v4
+74/380487/campos_512_v4
+74/380493/campos_512_v4
+74/380506/campos_512_v4
+74/380509/campos_512_v4
+74/380515/campos_512_v4
+74/380517/campos_512_v4
+74/380534/campos_512_v4
+74/380541/campos_512_v4
+74/380547/campos_512_v4
+74/380551/campos_512_v4
+74/380552/campos_512_v4
+74/380553/campos_512_v4
+74/380555/campos_512_v4
+74/380566/campos_512_v4
+74/380576/campos_512_v4
+74/380580/campos_512_v4
+74/380581/campos_512_v4
+74/380589/campos_512_v4
+74/380594/campos_512_v4
+74/380595/campos_512_v4
+74/380605/campos_512_v4
+74/380607/campos_512_v4
+74/380611/campos_512_v4
+74/380615/campos_512_v4
+74/380616/campos_512_v4
+74/380619/campos_512_v4
+74/380621/campos_512_v4
+74/380636/campos_512_v4
+74/380654/campos_512_v4
+74/380657/campos_512_v4
+74/380665/campos_512_v4
+74/380670/campos_512_v4
+74/380677/campos_512_v4
+74/380678/campos_512_v4
+74/380685/campos_512_v4
+74/380698/campos_512_v4
+74/380702/campos_512_v4
+74/380703/campos_512_v4
+74/380719/campos_512_v4
+74/380724/campos_512_v4
+74/380735/campos_512_v4
+74/380746/campos_512_v4
+74/380764/campos_512_v4
+74/380771/campos_512_v4
+74/380776/campos_512_v4
+74/380778/campos_512_v4
+74/380781/campos_512_v4
+74/380788/campos_512_v4
+74/380793/campos_512_v4
+74/380808/campos_512_v4
+74/380812/campos_512_v4
+74/380813/campos_512_v4
+74/380826/campos_512_v4
+74/380827/campos_512_v4
+74/380829/campos_512_v4
+74/380841/campos_512_v4
+74/380862/campos_512_v4
+74/380864/campos_512_v4
+74/380869/campos_512_v4
+74/380877/campos_512_v4
+74/380889/campos_512_v4
+74/380902/campos_512_v4
+74/380903/campos_512_v4
+74/380910/campos_512_v4
+74/380922/campos_512_v4
+74/380928/campos_512_v4
+74/380929/campos_512_v4
+74/380930/campos_512_v4
+74/380933/campos_512_v4
+74/380952/campos_512_v4
+74/380970/campos_512_v4
+74/380987/campos_512_v4
+74/380995/campos_512_v4
+74/381011/campos_512_v4
+74/381014/campos_512_v4
+74/381016/campos_512_v4
+74/381019/campos_512_v4
+74/381046/campos_512_v4
+74/381048/campos_512_v4
+74/381061/campos_512_v4
+74/381064/campos_512_v4
+74/381067/campos_512_v4
+74/381075/campos_512_v4
+74/381081/campos_512_v4
+74/381087/campos_512_v4
+74/381099/campos_512_v4
+74/381107/campos_512_v4
+74/381110/campos_512_v4
+74/381113/campos_512_v4
+74/381114/campos_512_v4
+74/381121/campos_512_v4
+74/381126/campos_512_v4
+74/381135/campos_512_v4
+74/381144/campos_512_v4
+74/381148/campos_512_v4
+74/381158/campos_512_v4
+74/381159/campos_512_v4
+74/381176/campos_512_v4
+74/381178/campos_512_v4
+74/381182/campos_512_v4
+74/381187/campos_512_v4
+74/381188/campos_512_v4
+74/381192/campos_512_v4
+74/381196/campos_512_v4
+74/381198/campos_512_v4
+74/381221/campos_512_v4
+74/381224/campos_512_v4
+74/381228/campos_512_v4
+74/381238/campos_512_v4
+74/381240/campos_512_v4
+74/381241/campos_512_v4
+74/381249/campos_512_v4
+74/381274/campos_512_v4
+74/381279/campos_512_v4
+74/381286/campos_512_v4
+74/381290/campos_512_v4
+74/381301/campos_512_v4
+74/381312/campos_512_v4
+74/381316/campos_512_v4
+74/381325/campos_512_v4
+74/381328/campos_512_v4
+74/381330/campos_512_v4
+74/381332/campos_512_v4
+74/381334/campos_512_v4
+74/381336/campos_512_v4
+74/381337/campos_512_v4
+74/381338/campos_512_v4
+74/381339/campos_512_v4
+74/381342/campos_512_v4
+74/381348/campos_512_v4
+74/381358/campos_512_v4
+74/381367/campos_512_v4
+74/381375/campos_512_v4
+74/381382/campos_512_v4
+74/381385/campos_512_v4
+74/381392/campos_512_v4
+74/381394/campos_512_v4
+74/381401/campos_512_v4
+74/381402/campos_512_v4
+74/381403/campos_512_v4
+74/381417/campos_512_v4
+74/381420/campos_512_v4
+74/381424/campos_512_v4
+74/381432/campos_512_v4
+74/381442/campos_512_v4
+74/381447/campos_512_v4
+74/381452/campos_512_v4
+74/381480/campos_512_v4
+74/381484/campos_512_v4
+74/381494/campos_512_v4
+74/381502/campos_512_v4
+74/381504/campos_512_v4
+74/381514/campos_512_v4
+74/381518/campos_512_v4
+74/381519/campos_512_v4
+74/381541/campos_512_v4
+74/381547/campos_512_v4
+74/381558/campos_512_v4
+74/381562/campos_512_v4
+74/381573/campos_512_v4
+74/381574/campos_512_v4
+74/381575/campos_512_v4
+74/381576/campos_512_v4
+74/381583/campos_512_v4
+74/381586/campos_512_v4
+74/381588/campos_512_v4
+74/381590/campos_512_v4
+74/381593/campos_512_v4
+74/381605/campos_512_v4
+74/381607/campos_512_v4
+74/381619/campos_512_v4
+74/381629/campos_512_v4
+74/381630/campos_512_v4
+74/381637/campos_512_v4
+74/381641/campos_512_v4
+74/381652/campos_512_v4
+74/381659/campos_512_v4
+74/381662/campos_512_v4
+74/381665/campos_512_v4
+74/381667/campos_512_v4
+74/381677/campos_512_v4
+74/381689/campos_512_v4
+74/381692/campos_512_v4
+74/381695/campos_512_v4
+74/381712/campos_512_v4
+74/381716/campos_512_v4
+74/381718/campos_512_v4
+74/381721/campos_512_v4
+74/381725/campos_512_v4
+74/381735/campos_512_v4
+74/381738/campos_512_v4
+74/381742/campos_512_v4
+74/381745/campos_512_v4
+74/381749/campos_512_v4
+74/381752/campos_512_v4
+74/381757/campos_512_v4
+74/381758/campos_512_v4
+74/381777/campos_512_v4
+74/381779/campos_512_v4
+74/381783/campos_512_v4
+74/381785/campos_512_v4
+74/381787/campos_512_v4
+74/381799/campos_512_v4
+74/381820/campos_512_v4
+74/381825/campos_512_v4
+74/381827/campos_512_v4
+74/381832/campos_512_v4
+74/381834/campos_512_v4
+74/381845/campos_512_v4
+74/381858/campos_512_v4
+74/381859/campos_512_v4
+74/381867/campos_512_v4
+74/381870/campos_512_v4
+74/381881/campos_512_v4
+74/381885/campos_512_v4
+74/381887/campos_512_v4
+74/381888/campos_512_v4
+74/381902/campos_512_v4
+74/381903/campos_512_v4
+74/381907/campos_512_v4
+74/381917/campos_512_v4
+74/381923/campos_512_v4
+74/381939/campos_512_v4
+74/381940/campos_512_v4
+74/381944/campos_512_v4
+74/381953/campos_512_v4
+74/381956/campos_512_v4
+74/381965/campos_512_v4
+74/381981/campos_512_v4
+74/381983/campos_512_v4
+74/381985/campos_512_v4
+74/381988/campos_512_v4
+74/381991/campos_512_v4
+74/382042/campos_512_v4
+74/382046/campos_512_v4
+74/382051/campos_512_v4
+74/382052/campos_512_v4
+74/382053/campos_512_v4
+74/382063/campos_512_v4
+74/382073/campos_512_v4
+74/382080/campos_512_v4
+74/382082/campos_512_v4
+74/382089/campos_512_v4
+74/382090/campos_512_v4
+74/382099/campos_512_v4
+74/382105/campos_512_v4
+74/382112/campos_512_v4
+74/382116/campos_512_v4
+74/382119/campos_512_v4
+74/382123/campos_512_v4
+74/382132/campos_512_v4
+74/382145/campos_512_v4
+74/382153/campos_512_v4
+74/382175/campos_512_v4
+74/382176/campos_512_v4
+74/382179/campos_512_v4
+74/382187/campos_512_v4
+74/382189/campos_512_v4
+74/382199/campos_512_v4
+74/382201/campos_512_v4
+74/382204/campos_512_v4
+74/382209/campos_512_v4
+74/382216/campos_512_v4
+74/382225/campos_512_v4
+74/382233/campos_512_v4
+74/382247/campos_512_v4
+74/382257/campos_512_v4
+74/382265/campos_512_v4
+74/382266/campos_512_v4
+74/382277/campos_512_v4
+74/382278/campos_512_v4
+74/382283/campos_512_v4
+74/382296/campos_512_v4
+74/382314/campos_512_v4
+74/382315/campos_512_v4
+74/382316/campos_512_v4
+74/382326/campos_512_v4
+74/382334/campos_512_v4
+74/382335/campos_512_v4
+74/382339/campos_512_v4
+74/382340/campos_512_v4
+74/382349/campos_512_v4
+74/382351/campos_512_v4
+74/382354/campos_512_v4
+74/382356/campos_512_v4
+74/382367/campos_512_v4
+74/382368/campos_512_v4
+74/382369/campos_512_v4
+74/382373/campos_512_v4
+74/382381/campos_512_v4
+74/382392/campos_512_v4
+74/382410/campos_512_v4
+74/382413/campos_512_v4
+74/382415/campos_512_v4
+74/382418/campos_512_v4
+74/382419/campos_512_v4
+74/382426/campos_512_v4
+74/382431/campos_512_v4
+74/382440/campos_512_v4
+74/382448/campos_512_v4
+74/382450/campos_512_v4
+74/382452/campos_512_v4
+74/382453/campos_512_v4
+74/382457/campos_512_v4
+74/382461/campos_512_v4
+74/382470/campos_512_v4
+74/382473/campos_512_v4
+74/382482/campos_512_v4
+74/382483/campos_512_v4
+74/382490/campos_512_v4
+74/382500/campos_512_v4
+74/382501/campos_512_v4
+74/382506/campos_512_v4
+74/382510/campos_512_v4
+74/382513/campos_512_v4
+74/382515/campos_512_v4
+74/382529/campos_512_v4
+74/382539/campos_512_v4
+74/382542/campos_512_v4
+74/382544/campos_512_v4
+74/382556/campos_512_v4
+74/382562/campos_512_v4
+74/382565/campos_512_v4
+74/382567/campos_512_v4
+74/382576/campos_512_v4
+74/382581/campos_512_v4
+74/382596/campos_512_v4
+74/382597/campos_512_v4
+74/382602/campos_512_v4
+74/382604/campos_512_v4
+74/382607/campos_512_v4
+74/382615/campos_512_v4
+74/382617/campos_512_v4
+74/382629/campos_512_v4
+74/382642/campos_512_v4
+74/382653/campos_512_v4
+74/382659/campos_512_v4
+74/382660/campos_512_v4
+74/382662/campos_512_v4
+74/382669/campos_512_v4
+74/382674/campos_512_v4
+74/382683/campos_512_v4
+74/382689/campos_512_v4
+74/382691/campos_512_v4
+74/382707/campos_512_v4
+74/382719/campos_512_v4
+74/382721/campos_512_v4
+74/382723/campos_512_v4
+74/382726/campos_512_v4
+74/382731/campos_512_v4
+74/382734/campos_512_v4
+74/382738/campos_512_v4
+74/382751/campos_512_v4
+74/382753/campos_512_v4
+74/382756/campos_512_v4
+74/382758/campos_512_v4
+74/382761/campos_512_v4
+74/382762/campos_512_v4
+74/382776/campos_512_v4
+74/382779/campos_512_v4
+74/382781/campos_512_v4
+74/382782/campos_512_v4
+74/382797/campos_512_v4
+74/382804/campos_512_v4
+74/382809/campos_512_v4
+74/382810/campos_512_v4
+74/382819/campos_512_v4
+74/382821/campos_512_v4
+74/382824/campos_512_v4
+74/382828/campos_512_v4
+74/382841/campos_512_v4
+74/382842/campos_512_v4
+74/382846/campos_512_v4
+74/382852/campos_512_v4
+74/382858/campos_512_v4
+74/382860/campos_512_v4
+74/382868/campos_512_v4
+74/382872/campos_512_v4
+74/382875/campos_512_v4
+74/382878/campos_512_v4
+74/382880/campos_512_v4
+74/382901/campos_512_v4
+74/382903/campos_512_v4
+74/382909/campos_512_v4
+74/382914/campos_512_v4
+74/382931/campos_512_v4
+74/382935/campos_512_v4
+74/382937/campos_512_v4
+74/382942/campos_512_v4
+74/382948/campos_512_v4
+74/382951/campos_512_v4
+74/382956/campos_512_v4
+74/382969/campos_512_v4
+74/382979/campos_512_v4
+74/382980/campos_512_v4
+74/382982/campos_512_v4
+74/382986/campos_512_v4
+74/382992/campos_512_v4
+74/383002/campos_512_v4
+74/383010/campos_512_v4
+74/383014/campos_512_v4
+74/383019/campos_512_v4
+74/383022/campos_512_v4
+74/383030/campos_512_v4
+74/383041/campos_512_v4
+74/383046/campos_512_v4
+74/383053/campos_512_v4
+74/383056/campos_512_v4
+74/383059/campos_512_v4
+74/383088/campos_512_v4
+74/383089/campos_512_v4
+74/383091/campos_512_v4
+74/383101/campos_512_v4
+74/383119/campos_512_v4
+74/383138/campos_512_v4
+74/383140/campos_512_v4
+74/383151/campos_512_v4
+74/383194/campos_512_v4
+74/383200/campos_512_v4
+74/383217/campos_512_v4
+74/383218/campos_512_v4
+74/383223/campos_512_v4
+74/383228/campos_512_v4
+74/383231/campos_512_v4
+74/383232/campos_512_v4
+74/383237/campos_512_v4
+74/383245/campos_512_v4
+74/383249/campos_512_v4
+74/383258/campos_512_v4
+74/383261/campos_512_v4
+74/383265/campos_512_v4
+74/383266/campos_512_v4
+74/383267/campos_512_v4
+74/383278/campos_512_v4
+74/383282/campos_512_v4
+74/383284/campos_512_v4
+74/383297/campos_512_v4
+74/383302/campos_512_v4
+74/383325/campos_512_v4
+74/383326/campos_512_v4
+74/383332/campos_512_v4
+74/383338/campos_512_v4
+74/383347/campos_512_v4
+74/383349/campos_512_v4
+74/383372/campos_512_v4
+74/383374/campos_512_v4
+74/383387/campos_512_v4
+74/383389/campos_512_v4
+74/383390/campos_512_v4
+74/383418/campos_512_v4
+74/383439/campos_512_v4
+74/383443/campos_512_v4
+74/383444/campos_512_v4
+74/383449/campos_512_v4
+74/383471/campos_512_v4
+74/383496/campos_512_v4
+74/383506/campos_512_v4
+74/383521/campos_512_v4
+74/383523/campos_512_v4
+74/383524/campos_512_v4
+74/383540/campos_512_v4
+74/383547/campos_512_v4
+74/383559/campos_512_v4
+74/383563/campos_512_v4
+74/383573/campos_512_v4
+74/383574/campos_512_v4
+74/383576/campos_512_v4
+74/383585/campos_512_v4
+74/383592/campos_512_v4
+74/383604/campos_512_v4
+74/383610/campos_512_v4
+74/383611/campos_512_v4
+74/383621/campos_512_v4
+74/383630/campos_512_v4
+74/383639/campos_512_v4
+74/383646/campos_512_v4
+74/383650/campos_512_v4
+74/383660/campos_512_v4
+74/383665/campos_512_v4
+74/383667/campos_512_v4
+74/383669/campos_512_v4
+74/383675/campos_512_v4
+74/383676/campos_512_v4
+74/383694/campos_512_v4
+74/383711/campos_512_v4
+74/383727/campos_512_v4
+74/383728/campos_512_v4
+74/383731/campos_512_v4
+74/383737/campos_512_v4
+74/383739/campos_512_v4
+74/383756/campos_512_v4
+74/383760/campos_512_v4
+74/383764/campos_512_v4
+74/383766/campos_512_v4
+74/383779/campos_512_v4
+74/383797/campos_512_v4
+74/383806/campos_512_v4
+74/383811/campos_512_v4
+74/383823/campos_512_v4
+74/383836/campos_512_v4
+74/383842/campos_512_v4
+74/383850/campos_512_v4
+74/383855/campos_512_v4
+74/383863/campos_512_v4
+74/383878/campos_512_v4
+74/383882/campos_512_v4
+74/383900/campos_512_v4
+74/383901/campos_512_v4
+74/383909/campos_512_v4
+74/383921/campos_512_v4
+74/383927/campos_512_v4
+74/383928/campos_512_v4
+74/383939/campos_512_v4
+74/383946/campos_512_v4
+74/383949/campos_512_v4
+74/383963/campos_512_v4
+74/383975/campos_512_v4
+74/383983/campos_512_v4
+74/383985/campos_512_v4
+74/384015/campos_512_v4
+74/384022/campos_512_v4
+74/384031/campos_512_v4
+74/384034/campos_512_v4
+74/384036/campos_512_v4
+74/384037/campos_512_v4
+74/384044/campos_512_v4
+74/384047/campos_512_v4
+74/384056/campos_512_v4
+74/384062/campos_512_v4
+74/384066/campos_512_v4
+74/384080/campos_512_v4
+74/384096/campos_512_v4
+74/384097/campos_512_v4
+74/384099/campos_512_v4
+74/384104/campos_512_v4
+74/384106/campos_512_v4
+74/384111/campos_512_v4
+74/384112/campos_512_v4
+74/384117/campos_512_v4
+74/384120/campos_512_v4
+74/384129/campos_512_v4
+74/384131/campos_512_v4
+74/384136/campos_512_v4
+74/384140/campos_512_v4
+74/384142/campos_512_v4
+74/384145/campos_512_v4
+74/384152/campos_512_v4
+74/384184/campos_512_v4
+74/384189/campos_512_v4
+74/384214/campos_512_v4
+74/384215/campos_512_v4
+74/384225/campos_512_v4
+74/384228/campos_512_v4
+74/384230/campos_512_v4
+74/384235/campos_512_v4
+74/384241/campos_512_v4
+74/384243/campos_512_v4
+74/384245/campos_512_v4
+74/384257/campos_512_v4
+74/384263/campos_512_v4
+74/384267/campos_512_v4
+74/384268/campos_512_v4
+74/384285/campos_512_v4
+74/384290/campos_512_v4
+74/384301/campos_512_v4
+74/384308/campos_512_v4
+74/384312/campos_512_v4
+74/384321/campos_512_v4
+74/384324/campos_512_v4
+74/384331/campos_512_v4
+74/384335/campos_512_v4
+74/384336/campos_512_v4
+74/384337/campos_512_v4
+74/384344/campos_512_v4
+74/384353/campos_512_v4
+74/384381/campos_512_v4
+74/384386/campos_512_v4
+74/384388/campos_512_v4
+74/384392/campos_512_v4
+74/384422/campos_512_v4
+74/384425/campos_512_v4
+74/384428/campos_512_v4
+74/384444/campos_512_v4
+74/384448/campos_512_v4
+74/384454/campos_512_v4
+74/384456/campos_512_v4
+74/384459/campos_512_v4
+74/384493/campos_512_v4
+74/384497/campos_512_v4
+74/384499/campos_512_v4
+74/384507/campos_512_v4
+74/384510/campos_512_v4
+74/384515/campos_512_v4
+74/384518/campos_512_v4
+74/384524/campos_512_v4
+74/384539/campos_512_v4
+74/384549/campos_512_v4
+74/384552/campos_512_v4
+74/384575/campos_512_v4
+74/384577/campos_512_v4
+74/384579/campos_512_v4
+74/384599/campos_512_v4
+74/384609/campos_512_v4
+74/384629/campos_512_v4
+74/384634/campos_512_v4
+74/384640/campos_512_v4
+74/384655/campos_512_v4
+74/384660/campos_512_v4
+74/384672/campos_512_v4
+74/384674/campos_512_v4
+74/384688/campos_512_v4
+74/384690/campos_512_v4
+74/384697/campos_512_v4
+74/384699/campos_512_v4
+74/384711/campos_512_v4
+74/384712/campos_512_v4
+74/384714/campos_512_v4
+74/384735/campos_512_v4
+74/384756/campos_512_v4
+74/384758/campos_512_v4
+74/384766/campos_512_v4
+74/384778/campos_512_v4
+74/384788/campos_512_v4
+74/384793/campos_512_v4
+74/384805/campos_512_v4
+74/384807/campos_512_v4
+74/384819/campos_512_v4
+74/384835/campos_512_v4
+74/384845/campos_512_v4
+74/384851/campos_512_v4
+74/384855/campos_512_v4
+74/384860/campos_512_v4
+74/384864/campos_512_v4
+74/384869/campos_512_v4
+74/384876/campos_512_v4
+74/384881/campos_512_v4
+74/384895/campos_512_v4
+74/384899/campos_512_v4
+74/384901/campos_512_v4
+74/384904/campos_512_v4
+74/384919/campos_512_v4
+74/384938/campos_512_v4
+74/384941/campos_512_v4
+74/384949/campos_512_v4
+74/384957/campos_512_v4
+74/384961/campos_512_v4
+74/384977/campos_512_v4
+74/384987/campos_512_v4
+74/384989/campos_512_v4
+75/385016/campos_512_v4
+75/385031/campos_512_v4
+75/385034/campos_512_v4
+75/385046/campos_512_v4
+75/385055/campos_512_v4
+75/385057/campos_512_v4
+75/385088/campos_512_v4
+75/385097/campos_512_v4
+75/385123/campos_512_v4
+75/385124/campos_512_v4
+75/385130/campos_512_v4
+75/385165/campos_512_v4
+75/385172/campos_512_v4
+75/385188/campos_512_v4
+75/385199/campos_512_v4
+75/385205/campos_512_v4
+75/385207/campos_512_v4
+75/385216/campos_512_v4
+75/385238/campos_512_v4
+75/385239/campos_512_v4
+75/385251/campos_512_v4
+75/385254/campos_512_v4
+75/385310/campos_512_v4
+75/385313/campos_512_v4
+75/385331/campos_512_v4
+75/385340/campos_512_v4
+75/385350/campos_512_v4
+75/385376/campos_512_v4
+75/385387/campos_512_v4
+75/385391/campos_512_v4
+75/385396/campos_512_v4
+75/385403/campos_512_v4
+75/385424/campos_512_v4
+75/385433/campos_512_v4
+75/385444/campos_512_v4
+75/385445/campos_512_v4
+75/385448/campos_512_v4
+75/385461/campos_512_v4
+75/385465/campos_512_v4
+75/385474/campos_512_v4
+75/385477/campos_512_v4
+75/385480/campos_512_v4
+75/385502/campos_512_v4
+75/385509/campos_512_v4
+75/385515/campos_512_v4
+75/385536/campos_512_v4
+75/385556/campos_512_v4
+75/385559/campos_512_v4
+75/385565/campos_512_v4
+75/385569/campos_512_v4
+75/385578/campos_512_v4
+75/385586/campos_512_v4
+75/385588/campos_512_v4
+75/385592/campos_512_v4
+75/385605/campos_512_v4
+75/385606/campos_512_v4
+75/385607/campos_512_v4
+75/385616/campos_512_v4
+75/385623/campos_512_v4
+75/385625/campos_512_v4
+75/385630/campos_512_v4
+75/385634/campos_512_v4
+75/385642/campos_512_v4
+75/385652/campos_512_v4
+75/385655/campos_512_v4
+75/385684/campos_512_v4
+75/385687/campos_512_v4
+75/385709/campos_512_v4
+75/385713/campos_512_v4
+75/385724/campos_512_v4
+75/385728/campos_512_v4
+75/385756/campos_512_v4
+75/385758/campos_512_v4
+75/385766/campos_512_v4
+75/385780/campos_512_v4
+75/385781/campos_512_v4
+75/385783/campos_512_v4
+75/385791/campos_512_v4
+75/385797/campos_512_v4
+75/385804/campos_512_v4
+75/385810/campos_512_v4
+75/385811/campos_512_v4
+75/385818/campos_512_v4
+75/385829/campos_512_v4
+75/385832/campos_512_v4
+75/385851/campos_512_v4
+75/385863/campos_512_v4
+75/385871/campos_512_v4
+75/385876/campos_512_v4
+75/385944/campos_512_v4
+75/385957/campos_512_v4
+75/385961/campos_512_v4
+75/385962/campos_512_v4
+75/385968/campos_512_v4
+75/385986/campos_512_v4
+75/386016/campos_512_v4
+75/386026/campos_512_v4
+75/386030/campos_512_v4
+75/386052/campos_512_v4
+75/386066/campos_512_v4
+75/386070/campos_512_v4
+75/386071/campos_512_v4
+75/386096/campos_512_v4
+75/386115/campos_512_v4
+75/386120/campos_512_v4
+75/386123/campos_512_v4
+75/386133/campos_512_v4
+75/386143/campos_512_v4
+75/386149/campos_512_v4
+75/386165/campos_512_v4
+75/386166/campos_512_v4
+75/386170/campos_512_v4
+75/386171/campos_512_v4
+75/386172/campos_512_v4
+75/386175/campos_512_v4
+75/386186/campos_512_v4
+75/386189/campos_512_v4
+75/386206/campos_512_v4
+75/386216/campos_512_v4
+75/386223/campos_512_v4
+75/386226/campos_512_v4
+75/386234/campos_512_v4
+75/386238/campos_512_v4
+75/386246/campos_512_v4
+75/386253/campos_512_v4
+75/386269/campos_512_v4
+75/386277/campos_512_v4
+75/386292/campos_512_v4
+75/386300/campos_512_v4
+75/386309/campos_512_v4
+75/386313/campos_512_v4
+75/386319/campos_512_v4
+75/386324/campos_512_v4
+75/386325/campos_512_v4
+75/386328/campos_512_v4
+75/386330/campos_512_v4
+75/386343/campos_512_v4
+75/386347/campos_512_v4
+75/386353/campos_512_v4
+75/386362/campos_512_v4
+75/386390/campos_512_v4
+75/386391/campos_512_v4
+75/386397/campos_512_v4
+75/386403/campos_512_v4
+75/386411/campos_512_v4
+75/386414/campos_512_v4
+75/386421/campos_512_v4
+75/386426/campos_512_v4
+75/386433/campos_512_v4
+75/386443/campos_512_v4
+75/386462/campos_512_v4
+75/386473/campos_512_v4
+75/386479/campos_512_v4
+75/386480/campos_512_v4
+75/386489/campos_512_v4
+75/386493/campos_512_v4
+75/386503/campos_512_v4
+75/386504/campos_512_v4
+75/386519/campos_512_v4
+75/386539/campos_512_v4
+75/386553/campos_512_v4
+75/386555/campos_512_v4
+75/386559/campos_512_v4
+75/386567/campos_512_v4
+75/386577/campos_512_v4
+75/386589/campos_512_v4
+75/386603/campos_512_v4
+75/386606/campos_512_v4
+75/386630/campos_512_v4
+75/386631/campos_512_v4
+75/386642/campos_512_v4
+75/386648/campos_512_v4
+75/386649/campos_512_v4
+75/386653/campos_512_v4
+75/386654/campos_512_v4
+75/386660/campos_512_v4
+75/386672/campos_512_v4
+75/386686/campos_512_v4
+75/386701/campos_512_v4
+75/386704/campos_512_v4
+75/386722/campos_512_v4
+75/386725/campos_512_v4
+75/386727/campos_512_v4
+75/386728/campos_512_v4
+75/386731/campos_512_v4
+75/386734/campos_512_v4
+75/386751/campos_512_v4
+75/386757/campos_512_v4
+75/386760/campos_512_v4
+75/386769/campos_512_v4
+75/386781/campos_512_v4
+75/386785/campos_512_v4
+75/386790/campos_512_v4
+75/386795/campos_512_v4
+75/386799/campos_512_v4
+75/386806/campos_512_v4
+75/386811/campos_512_v4
+75/386817/campos_512_v4
+75/386824/campos_512_v4
+75/386843/campos_512_v4
+75/386850/campos_512_v4
+75/386855/campos_512_v4
+75/386874/campos_512_v4
+75/386892/campos_512_v4
+75/386894/campos_512_v4
+75/386897/campos_512_v4
+75/386903/campos_512_v4
+75/386905/campos_512_v4
+75/386913/campos_512_v4
+75/386918/campos_512_v4
+75/386928/campos_512_v4
+75/386939/campos_512_v4
+75/386981/campos_512_v4
+75/386986/campos_512_v4
+75/386996/campos_512_v4
+75/387003/campos_512_v4
+75/387004/campos_512_v4
+75/387010/campos_512_v4
+75/387027/campos_512_v4
+75/387030/campos_512_v4
+75/387035/campos_512_v4
+75/387045/campos_512_v4
+75/387046/campos_512_v4
+75/387049/campos_512_v4
+75/387051/campos_512_v4
+75/387065/campos_512_v4
+75/387078/campos_512_v4
+75/387089/campos_512_v4
+75/387092/campos_512_v4
+75/387099/campos_512_v4
+75/387140/campos_512_v4
+75/387145/campos_512_v4
+75/387155/campos_512_v4
+75/387156/campos_512_v4
+75/387159/campos_512_v4
+75/387160/campos_512_v4
+75/387171/campos_512_v4
+75/387176/campos_512_v4
+75/387179/campos_512_v4
+75/387182/campos_512_v4
+75/387184/campos_512_v4
+75/387188/campos_512_v4
+75/387194/campos_512_v4
+75/387241/campos_512_v4
+75/387244/campos_512_v4
+75/387246/campos_512_v4
+75/387251/campos_512_v4
+75/387254/campos_512_v4
+75/387258/campos_512_v4
+75/387265/campos_512_v4
+75/387284/campos_512_v4
+75/387285/campos_512_v4
+75/387289/campos_512_v4
+75/387300/campos_512_v4
+75/387301/campos_512_v4
+75/387304/campos_512_v4
+75/387320/campos_512_v4
+75/387326/campos_512_v4
+75/387341/campos_512_v4
+75/387380/campos_512_v4
+75/387381/campos_512_v4
+75/387387/campos_512_v4
+75/387395/campos_512_v4
+75/387396/campos_512_v4
+75/387407/campos_512_v4
+75/387418/campos_512_v4
+75/387420/campos_512_v4
+75/387426/campos_512_v4
+75/387432/campos_512_v4
+75/387440/campos_512_v4
+75/387468/campos_512_v4
+75/387476/campos_512_v4
+75/387505/campos_512_v4
+75/387538/campos_512_v4
+75/387539/campos_512_v4
+75/387551/campos_512_v4
+75/387554/campos_512_v4
+75/387563/campos_512_v4
+75/387568/campos_512_v4
+75/387571/campos_512_v4
+75/387578/campos_512_v4
+75/387582/campos_512_v4
+75/387584/campos_512_v4
+75/387589/campos_512_v4
+75/387590/campos_512_v4
+75/387599/campos_512_v4
+75/387608/campos_512_v4
+75/387620/campos_512_v4
+75/387621/campos_512_v4
+75/387632/campos_512_v4
+75/387635/campos_512_v4
+75/387638/campos_512_v4
+75/387639/campos_512_v4
+75/387649/campos_512_v4
+75/387659/campos_512_v4
+75/387663/campos_512_v4
+75/387673/campos_512_v4
+75/387676/campos_512_v4
+75/387681/campos_512_v4
+75/387686/campos_512_v4
+75/387706/campos_512_v4
+75/387711/campos_512_v4
+75/387717/campos_512_v4
+75/387726/campos_512_v4
+75/387754/campos_512_v4
+75/387757/campos_512_v4
+75/387761/campos_512_v4
+75/387762/campos_512_v4
+75/387763/campos_512_v4
+75/387768/campos_512_v4
+75/387793/campos_512_v4
+75/387800/campos_512_v4
+75/387806/campos_512_v4
+75/387815/campos_512_v4
+75/387820/campos_512_v4
+75/387821/campos_512_v4
+75/387822/campos_512_v4
+75/387833/campos_512_v4
+75/387843/campos_512_v4
+75/387861/campos_512_v4
+75/387869/campos_512_v4
+75/387871/campos_512_v4
+75/387874/campos_512_v4
+75/387881/campos_512_v4
+75/387891/campos_512_v4
+75/387920/campos_512_v4
+75/387933/campos_512_v4
+75/387934/campos_512_v4
+75/387941/campos_512_v4
+75/387950/campos_512_v4
+75/387965/campos_512_v4
+75/387979/campos_512_v4
+75/387980/campos_512_v4
+75/387981/campos_512_v4
+75/387989/campos_512_v4
+75/387993/campos_512_v4
+75/387997/campos_512_v4
+75/388002/campos_512_v4
+75/388006/campos_512_v4
+75/388014/campos_512_v4
+75/388022/campos_512_v4
+75/388042/campos_512_v4
+75/388054/campos_512_v4
+75/388080/campos_512_v4
+75/388083/campos_512_v4
+75/388091/campos_512_v4
+75/388093/campos_512_v4
+75/388098/campos_512_v4
+75/388099/campos_512_v4
+75/388121/campos_512_v4
+75/388124/campos_512_v4
+75/388131/campos_512_v4
+75/388135/campos_512_v4
+75/388144/campos_512_v4
+75/388147/campos_512_v4
+75/388152/campos_512_v4
+75/388156/campos_512_v4
+75/388163/campos_512_v4
+75/388168/campos_512_v4
+75/388173/campos_512_v4
+75/388189/campos_512_v4
+75/388190/campos_512_v4
+75/388200/campos_512_v4
+75/388204/campos_512_v4
+75/388209/campos_512_v4
+75/388211/campos_512_v4
+75/388217/campos_512_v4
+75/388221/campos_512_v4
+75/388226/campos_512_v4
+75/388228/campos_512_v4
+75/388249/campos_512_v4
+75/388260/campos_512_v4
+75/388266/campos_512_v4
+75/388268/campos_512_v4
+75/388270/campos_512_v4
+75/388274/campos_512_v4
+75/388279/campos_512_v4
+75/388284/campos_512_v4
+75/388294/campos_512_v4
+75/388306/campos_512_v4
+75/388310/campos_512_v4
+75/388311/campos_512_v4
+75/388317/campos_512_v4
+75/388323/campos_512_v4
+75/388340/campos_512_v4
+75/388351/campos_512_v4
+75/388352/campos_512_v4
+75/388366/campos_512_v4
+75/388371/campos_512_v4
+75/388374/campos_512_v4
+75/388382/campos_512_v4
+75/388388/campos_512_v4
+75/388391/campos_512_v4
+75/388425/campos_512_v4
+75/388426/campos_512_v4
+75/388437/campos_512_v4
+75/388441/campos_512_v4
+75/388453/campos_512_v4
+75/388460/campos_512_v4
+75/388463/campos_512_v4
+75/388465/campos_512_v4
+75/388468/campos_512_v4
+75/388474/campos_512_v4
+75/388476/campos_512_v4
+75/388478/campos_512_v4
+75/388481/campos_512_v4
+75/388493/campos_512_v4
+75/388519/campos_512_v4
+75/388521/campos_512_v4
+75/388538/campos_512_v4
+75/388550/campos_512_v4
+75/388552/campos_512_v4
+75/388556/campos_512_v4
+75/388567/campos_512_v4
+75/388570/campos_512_v4
+75/388578/campos_512_v4
+75/388585/campos_512_v4
+75/388589/campos_512_v4
+75/388604/campos_512_v4
+75/388615/campos_512_v4
+75/388671/campos_512_v4
+75/388676/campos_512_v4
+75/388694/campos_512_v4
+75/388706/campos_512_v4
+75/388713/campos_512_v4
+75/388715/campos_512_v4
+75/388720/campos_512_v4
+75/388723/campos_512_v4
+75/388724/campos_512_v4
+75/388734/campos_512_v4
+75/388741/campos_512_v4
+75/388760/campos_512_v4
+75/388762/campos_512_v4
+75/388764/campos_512_v4
+75/388768/campos_512_v4
+75/388773/campos_512_v4
+75/388795/campos_512_v4
+75/388810/campos_512_v4
+75/388818/campos_512_v4
+75/388826/campos_512_v4
+75/388835/campos_512_v4
+75/388836/campos_512_v4
+75/388840/campos_512_v4
+75/388860/campos_512_v4
+75/388861/campos_512_v4
+75/388864/campos_512_v4
+75/388865/campos_512_v4
+75/388868/campos_512_v4
+75/388877/campos_512_v4
+75/388888/campos_512_v4
+75/388894/campos_512_v4
+75/388902/campos_512_v4
+75/388917/campos_512_v4
+75/388923/campos_512_v4
+75/388930/campos_512_v4
+75/388935/campos_512_v4
+75/388950/campos_512_v4
+75/388954/campos_512_v4
+75/388961/campos_512_v4
+75/388972/campos_512_v4
+75/388980/campos_512_v4
+75/388990/campos_512_v4
+75/389022/campos_512_v4
+75/389023/campos_512_v4
+75/389031/campos_512_v4
+75/389064/campos_512_v4
+75/389079/campos_512_v4
+75/389082/campos_512_v4
+75/389083/campos_512_v4
+75/389092/campos_512_v4
+75/389095/campos_512_v4
+75/389098/campos_512_v4
+75/389100/campos_512_v4
+75/389103/campos_512_v4
+75/389105/campos_512_v4
+75/389107/campos_512_v4
+75/389108/campos_512_v4
+75/389117/campos_512_v4
+75/389122/campos_512_v4
+75/389124/campos_512_v4
+75/389135/campos_512_v4
+75/389169/campos_512_v4
+75/389172/campos_512_v4
+75/389174/campos_512_v4
+75/389180/campos_512_v4
+75/389182/campos_512_v4
+75/389188/campos_512_v4
+75/389189/campos_512_v4
+75/389193/campos_512_v4
+75/389202/campos_512_v4
+75/389204/campos_512_v4
+75/389208/campos_512_v4
+75/389235/campos_512_v4
+75/389238/campos_512_v4
+75/389259/campos_512_v4
+75/389267/campos_512_v4
+75/389275/campos_512_v4
+75/389276/campos_512_v4
+75/389283/campos_512_v4
+75/389285/campos_512_v4
+75/389291/campos_512_v4
+75/389292/campos_512_v4
+75/389302/campos_512_v4
+75/389304/campos_512_v4
+75/389310/campos_512_v4
+75/389323/campos_512_v4
+75/389324/campos_512_v4
+75/389325/campos_512_v4
+75/389327/campos_512_v4
+75/389346/campos_512_v4
+75/389354/campos_512_v4
+75/389361/campos_512_v4
+75/389367/campos_512_v4
+75/389373/campos_512_v4
+75/389381/campos_512_v4
+75/389390/campos_512_v4
+75/389393/campos_512_v4
+75/389402/campos_512_v4
+75/389408/campos_512_v4
+75/389414/campos_512_v4
+75/389418/campos_512_v4
+75/389424/campos_512_v4
+75/389432/campos_512_v4
+75/389436/campos_512_v4
+75/389441/campos_512_v4
+75/389442/campos_512_v4
+75/389444/campos_512_v4
+75/389447/campos_512_v4
+75/389473/campos_512_v4
+75/389474/campos_512_v4
+75/389489/campos_512_v4
+75/389493/campos_512_v4
+75/389509/campos_512_v4
+75/389516/campos_512_v4
+75/389520/campos_512_v4
+75/389531/campos_512_v4
+75/389545/campos_512_v4
+75/389549/campos_512_v4
+75/389556/campos_512_v4
+75/389562/campos_512_v4
+75/389567/campos_512_v4
+75/389569/campos_512_v4
+75/389571/campos_512_v4
+75/389573/campos_512_v4
+75/389580/campos_512_v4
+75/389588/campos_512_v4
+75/389604/campos_512_v4
+75/389614/campos_512_v4
+75/389618/campos_512_v4
+75/389624/campos_512_v4
+75/389625/campos_512_v4
+75/389636/campos_512_v4
+75/389639/campos_512_v4
+75/389651/campos_512_v4
+75/389664/campos_512_v4
+75/389667/campos_512_v4
+75/389675/campos_512_v4
+75/389688/campos_512_v4
+75/389691/campos_512_v4
+75/389697/campos_512_v4
+75/389704/campos_512_v4
+75/389708/campos_512_v4
+75/389710/campos_512_v4
+75/389717/campos_512_v4
+75/389728/campos_512_v4
+75/389733/campos_512_v4
+75/389738/campos_512_v4
+75/389750/campos_512_v4
+75/389753/campos_512_v4
+75/389761/campos_512_v4
+75/389786/campos_512_v4
+75/389798/campos_512_v4
+75/389805/campos_512_v4
+75/389808/campos_512_v4
+75/389818/campos_512_v4
+75/389831/campos_512_v4
+75/389832/campos_512_v4
+75/389836/campos_512_v4
+75/389837/campos_512_v4
+75/389843/campos_512_v4
+75/389854/campos_512_v4
+75/389856/campos_512_v4
+75/389863/campos_512_v4
+75/389874/campos_512_v4
+75/389878/campos_512_v4
+75/389880/campos_512_v4
+75/389883/campos_512_v4
+75/389889/campos_512_v4
+75/389893/campos_512_v4
+75/389950/campos_512_v4
+75/389970/campos_512_v4
+75/389980/campos_512_v4
+75/389988/campos_512_v4
+75/389989/campos_512_v4
+76/390023/campos_512_v4
+76/390027/campos_512_v4
+76/390028/campos_512_v4
+76/390039/campos_512_v4
+76/390044/campos_512_v4
+76/390053/campos_512_v4
+76/390066/campos_512_v4
+76/390072/campos_512_v4
+76/390073/campos_512_v4
+76/390085/campos_512_v4
+76/390095/campos_512_v4
+76/390096/campos_512_v4
+76/390099/campos_512_v4
+76/390100/campos_512_v4
+76/390105/campos_512_v4
+76/390111/campos_512_v4
+76/390124/campos_512_v4
+76/390139/campos_512_v4
+76/390143/campos_512_v4
+76/390145/campos_512_v4
+76/390146/campos_512_v4
+76/390155/campos_512_v4
+76/390157/campos_512_v4
+76/390160/campos_512_v4
+76/390165/campos_512_v4
+76/390181/campos_512_v4
+76/390191/campos_512_v4
+76/390206/campos_512_v4
+76/390219/campos_512_v4
+76/390225/campos_512_v4
+76/390237/campos_512_v4
+76/390239/campos_512_v4
+76/390265/campos_512_v4
+76/390267/campos_512_v4
+76/390274/campos_512_v4
+76/390276/campos_512_v4
+76/390282/campos_512_v4
+76/390285/campos_512_v4
+76/390292/campos_512_v4
+76/390294/campos_512_v4
+76/390308/campos_512_v4
+76/390320/campos_512_v4
+76/390328/campos_512_v4
+76/390351/campos_512_v4
+76/390352/campos_512_v4
+76/390364/campos_512_v4
+76/390375/campos_512_v4
+76/390381/campos_512_v4
+76/390384/campos_512_v4
+76/390386/campos_512_v4
+76/390400/campos_512_v4
+76/390403/campos_512_v4
+76/390408/campos_512_v4
+76/390409/campos_512_v4
+76/390420/campos_512_v4
+76/390434/campos_512_v4
+76/390442/campos_512_v4
+76/390460/campos_512_v4
+76/390466/campos_512_v4
+76/390467/campos_512_v4
+76/390500/campos_512_v4
+76/390509/campos_512_v4
+76/390529/campos_512_v4
+76/390533/campos_512_v4
+76/390534/campos_512_v4
+76/390538/campos_512_v4
+76/390547/campos_512_v4
+76/390550/campos_512_v4
+76/390556/campos_512_v4
+76/390557/campos_512_v4
+76/390561/campos_512_v4
+76/390564/campos_512_v4
+76/390567/campos_512_v4
+76/390568/campos_512_v4
+76/390572/campos_512_v4
+76/390574/campos_512_v4
+76/390581/campos_512_v4
+76/390593/campos_512_v4
+76/390601/campos_512_v4
+76/390603/campos_512_v4
+76/390609/campos_512_v4
+76/390612/campos_512_v4
+76/390615/campos_512_v4
+76/390628/campos_512_v4
+76/390670/campos_512_v4
+76/390692/campos_512_v4
+76/390699/campos_512_v4
+76/390702/campos_512_v4
+76/390704/campos_512_v4
+76/390709/campos_512_v4
+76/390728/campos_512_v4
+76/390729/campos_512_v4
+76/390731/campos_512_v4
+76/390732/campos_512_v4
+76/390741/campos_512_v4
+76/390744/campos_512_v4
+76/390750/campos_512_v4
+76/390751/campos_512_v4
+76/390753/campos_512_v4
+76/390757/campos_512_v4
+76/390760/campos_512_v4
+76/390764/campos_512_v4
+76/390776/campos_512_v4
+76/390783/campos_512_v4
+76/390785/campos_512_v4
+76/390809/campos_512_v4
+76/390810/campos_512_v4
+76/390816/campos_512_v4
+76/390829/campos_512_v4
+76/390830/campos_512_v4
+76/390834/campos_512_v4
+76/390836/campos_512_v4
+76/390839/campos_512_v4
+76/390845/campos_512_v4
+76/390851/campos_512_v4
+76/390870/campos_512_v4
+76/390872/campos_512_v4
+76/390879/campos_512_v4
+76/390896/campos_512_v4
+76/390934/campos_512_v4
+76/390941/campos_512_v4
+76/390944/campos_512_v4
+76/390964/campos_512_v4
+76/390977/campos_512_v4
+76/390981/campos_512_v4
+76/390996/campos_512_v4
+76/391011/campos_512_v4
+76/391012/campos_512_v4
+76/391014/campos_512_v4
+76/391019/campos_512_v4
+76/391027/campos_512_v4
+76/391036/campos_512_v4
+76/391038/campos_512_v4
+76/391046/campos_512_v4
+76/391054/campos_512_v4
+76/391058/campos_512_v4
+76/391060/campos_512_v4
+76/391077/campos_512_v4
+76/391082/campos_512_v4
+76/391090/campos_512_v4
+76/391096/campos_512_v4
+76/391100/campos_512_v4
+76/391107/campos_512_v4
+76/391108/campos_512_v4
+76/391109/campos_512_v4
+76/391117/campos_512_v4
+76/391119/campos_512_v4
+76/391131/campos_512_v4
+76/391135/campos_512_v4
+76/391136/campos_512_v4
+76/391153/campos_512_v4
+76/391165/campos_512_v4
+76/391176/campos_512_v4
+76/391177/campos_512_v4
+76/391178/campos_512_v4
+76/391183/campos_512_v4
+76/391230/campos_512_v4
+76/391239/campos_512_v4
+76/391242/campos_512_v4
+76/391263/campos_512_v4
+76/391286/campos_512_v4
+76/391298/campos_512_v4
+76/391299/campos_512_v4
+76/391302/campos_512_v4
+76/391304/campos_512_v4
+76/391311/campos_512_v4
+76/391317/campos_512_v4
+76/391324/campos_512_v4
+76/391328/campos_512_v4
+76/391349/campos_512_v4
+76/391350/campos_512_v4
+76/391353/campos_512_v4
+76/391354/campos_512_v4
+76/391360/campos_512_v4
+76/391364/campos_512_v4
+76/391377/campos_512_v4
+76/391381/campos_512_v4
+76/391382/campos_512_v4
+76/391385/campos_512_v4
+76/391393/campos_512_v4
+76/391397/campos_512_v4
+76/391404/campos_512_v4
+76/391410/campos_512_v4
+76/391413/campos_512_v4
+76/391422/campos_512_v4
+76/391425/campos_512_v4
+76/391427/campos_512_v4
+76/391432/campos_512_v4
+76/391434/campos_512_v4
+76/391437/campos_512_v4
+76/391451/campos_512_v4
+76/391456/campos_512_v4
+76/391468/campos_512_v4
+76/391470/campos_512_v4
+76/391486/campos_512_v4
+76/391496/campos_512_v4
+76/391512/campos_512_v4
+76/391525/campos_512_v4
+76/391547/campos_512_v4
+76/391550/campos_512_v4
+76/391552/campos_512_v4
+76/391556/campos_512_v4
+76/391571/campos_512_v4
+76/391576/campos_512_v4
+76/391583/campos_512_v4
+76/391584/campos_512_v4
+76/391591/campos_512_v4
+76/391602/campos_512_v4
+76/391621/campos_512_v4
+76/391627/campos_512_v4
+76/391629/campos_512_v4
+76/391635/campos_512_v4
+76/391643/campos_512_v4
+76/391644/campos_512_v4
+76/391657/campos_512_v4
+76/391661/campos_512_v4
+76/391690/campos_512_v4
+76/391697/campos_512_v4
+76/391703/campos_512_v4
+76/391712/campos_512_v4
+76/391716/campos_512_v4
+76/391718/campos_512_v4
+76/391723/campos_512_v4
+76/391726/campos_512_v4
+76/391733/campos_512_v4
+76/391739/campos_512_v4
+76/391741/campos_512_v4
+76/391749/campos_512_v4
+76/391764/campos_512_v4
+76/391765/campos_512_v4
+76/391768/campos_512_v4
+76/391774/campos_512_v4
+76/391789/campos_512_v4
+76/391796/campos_512_v4
+76/391808/campos_512_v4
+76/391814/campos_512_v4
+76/391818/campos_512_v4
+76/391823/campos_512_v4
+76/391827/campos_512_v4
+76/391833/campos_512_v4
+76/391852/campos_512_v4
+76/391853/campos_512_v4
+76/391854/campos_512_v4
+76/391860/campos_512_v4
+76/391862/campos_512_v4
+76/391868/campos_512_v4
+76/391869/campos_512_v4
+76/391872/campos_512_v4
+76/391892/campos_512_v4
+76/391897/campos_512_v4
+76/391904/campos_512_v4
+76/391922/campos_512_v4
+76/391924/campos_512_v4
+76/391952/campos_512_v4
+76/391959/campos_512_v4
+76/391989/campos_512_v4
+76/392008/campos_512_v4
+76/392015/campos_512_v4
+76/392016/campos_512_v4
+76/392023/campos_512_v4
+76/392024/campos_512_v4
+76/392029/campos_512_v4
+76/392030/campos_512_v4
+76/392039/campos_512_v4
+76/392045/campos_512_v4
+76/392051/campos_512_v4
+76/392054/campos_512_v4
+76/392071/campos_512_v4
+76/392079/campos_512_v4
+76/392081/campos_512_v4
+76/392093/campos_512_v4
+76/392098/campos_512_v4
+76/392102/campos_512_v4
+76/392103/campos_512_v4
+76/392104/campos_512_v4
+76/392112/campos_512_v4
+76/392131/campos_512_v4
+76/392135/campos_512_v4
+76/392140/campos_512_v4
+76/392141/campos_512_v4
+76/392151/campos_512_v4
+76/392164/campos_512_v4
+76/392167/campos_512_v4
+76/392180/campos_512_v4
+76/392181/campos_512_v4
+76/392184/campos_512_v4
+76/392189/campos_512_v4
+76/392213/campos_512_v4
+76/392214/campos_512_v4
+76/392222/campos_512_v4
+76/392229/campos_512_v4
+76/392230/campos_512_v4
+76/392234/campos_512_v4
+76/392244/campos_512_v4
+76/392285/campos_512_v4
+76/392299/campos_512_v4
+76/392305/campos_512_v4
+76/392307/campos_512_v4
+76/392310/campos_512_v4
+76/392319/campos_512_v4
+76/392321/campos_512_v4
+76/392327/campos_512_v4
+76/392328/campos_512_v4
+76/392340/campos_512_v4
+76/392360/campos_512_v4
+76/392374/campos_512_v4
+76/392377/campos_512_v4
+76/392383/campos_512_v4
+76/392387/campos_512_v4
+76/392400/campos_512_v4
+76/392415/campos_512_v4
+76/392430/campos_512_v4
+76/392443/campos_512_v4
+76/392453/campos_512_v4
+76/392467/campos_512_v4
+76/392469/campos_512_v4
+76/392473/campos_512_v4
+76/392474/campos_512_v4
+76/392476/campos_512_v4
+76/392488/campos_512_v4
+76/392490/campos_512_v4
+76/392493/campos_512_v4
+76/392502/campos_512_v4
+76/392504/campos_512_v4
+76/392519/campos_512_v4
+76/392532/campos_512_v4
+76/392554/campos_512_v4
+76/392571/campos_512_v4
+76/392587/campos_512_v4
+76/392591/campos_512_v4
+76/392622/campos_512_v4
+76/392625/campos_512_v4
+76/392627/campos_512_v4
+76/392641/campos_512_v4
+76/392663/campos_512_v4
+76/392665/campos_512_v4
+76/392669/campos_512_v4
+76/392672/campos_512_v4
+76/392682/campos_512_v4
+76/392685/campos_512_v4
+76/392686/campos_512_v4
+76/392700/campos_512_v4
+76/392704/campos_512_v4
+76/392709/campos_512_v4
+76/392710/campos_512_v4
+76/392714/campos_512_v4
+76/392739/campos_512_v4
+76/392746/campos_512_v4
+76/392772/campos_512_v4
+76/392773/campos_512_v4
+76/392775/campos_512_v4
+76/392779/campos_512_v4
+76/392787/campos_512_v4
+76/392792/campos_512_v4
+76/392800/campos_512_v4
+76/392805/campos_512_v4
+76/392812/campos_512_v4
+76/392816/campos_512_v4
+76/392845/campos_512_v4
+76/392854/campos_512_v4
+76/392877/campos_512_v4
+76/392880/campos_512_v4
+76/392896/campos_512_v4
+76/392906/campos_512_v4
+76/392929/campos_512_v4
+76/392937/campos_512_v4
+76/392938/campos_512_v4
+76/392951/campos_512_v4
+76/392973/campos_512_v4
+76/392974/campos_512_v4
+76/392976/campos_512_v4
+76/392984/campos_512_v4
+76/393001/campos_512_v4
+76/393014/campos_512_v4
+76/393022/campos_512_v4
+76/393023/campos_512_v4
+76/393031/campos_512_v4
+76/393037/campos_512_v4
+76/393041/campos_512_v4
+76/393043/campos_512_v4
+76/393044/campos_512_v4
+76/393048/campos_512_v4
+76/393049/campos_512_v4
+76/393056/campos_512_v4
+76/393072/campos_512_v4
+76/393099/campos_512_v4
+76/393112/campos_512_v4
+76/393125/campos_512_v4
+76/393135/campos_512_v4
+76/393137/campos_512_v4
+76/393149/campos_512_v4
+76/393154/campos_512_v4
+76/393155/campos_512_v4
+76/393162/campos_512_v4
+76/393165/campos_512_v4
+76/393169/campos_512_v4
+76/393173/campos_512_v4
+76/393201/campos_512_v4
+76/393202/campos_512_v4
+76/393214/campos_512_v4
+76/393218/campos_512_v4
+76/393223/campos_512_v4
+76/393232/campos_512_v4
+76/393236/campos_512_v4
+76/393247/campos_512_v4
+76/393248/campos_512_v4
+76/393258/campos_512_v4
+76/393274/campos_512_v4
+76/393285/campos_512_v4
+76/393287/campos_512_v4
+76/393305/campos_512_v4
+76/393308/campos_512_v4
+76/393313/campos_512_v4
+76/393315/campos_512_v4
+76/393318/campos_512_v4
+76/393321/campos_512_v4
+76/393323/campos_512_v4
+76/393326/campos_512_v4
+76/393327/campos_512_v4
+76/393333/campos_512_v4
+76/393337/campos_512_v4
+76/393341/campos_512_v4
+76/393351/campos_512_v4
+76/393353/campos_512_v4
+76/393356/campos_512_v4
+76/393360/campos_512_v4
+76/393364/campos_512_v4
+76/393366/campos_512_v4
+76/393369/campos_512_v4
+76/393394/campos_512_v4
+76/393409/campos_512_v4
+76/393414/campos_512_v4
+76/393415/campos_512_v4
+76/393423/campos_512_v4
+76/393428/campos_512_v4
+76/393433/campos_512_v4
+76/393435/campos_512_v4
+76/393437/campos_512_v4
+76/393438/campos_512_v4
+76/393441/campos_512_v4
+76/393442/campos_512_v4
+76/393454/campos_512_v4
+76/393460/campos_512_v4
+76/393464/campos_512_v4
+76/393467/campos_512_v4
+76/393484/campos_512_v4
+76/393499/campos_512_v4
+76/393500/campos_512_v4
+76/393503/campos_512_v4
+76/393505/campos_512_v4
+76/393507/campos_512_v4
+76/393533/campos_512_v4
+76/393534/campos_512_v4
+76/393546/campos_512_v4
+76/393564/campos_512_v4
+76/393565/campos_512_v4
+76/393567/campos_512_v4
+76/393583/campos_512_v4
+76/393586/campos_512_v4
+76/393592/campos_512_v4
+76/393615/campos_512_v4
+76/393616/campos_512_v4
+76/393619/campos_512_v4
+76/393622/campos_512_v4
+76/393632/campos_512_v4
+76/393636/campos_512_v4
+76/393651/campos_512_v4
+76/393662/campos_512_v4
+76/393669/campos_512_v4
+76/393689/campos_512_v4
+76/393691/campos_512_v4
+76/393698/campos_512_v4
+76/393703/campos_512_v4
+76/393717/campos_512_v4
+76/393723/campos_512_v4
+76/393725/campos_512_v4
+76/393730/campos_512_v4
+76/393742/campos_512_v4
+76/393743/campos_512_v4
+76/393744/campos_512_v4
+76/393745/campos_512_v4
+76/393762/campos_512_v4
+76/393787/campos_512_v4
+76/393796/campos_512_v4
+76/393812/campos_512_v4
+76/393814/campos_512_v4
+76/393815/campos_512_v4
+76/393828/campos_512_v4
+76/393830/campos_512_v4
+76/393832/campos_512_v4
+76/393836/campos_512_v4
+76/393848/campos_512_v4
+76/393863/campos_512_v4
+76/393903/campos_512_v4
+76/393918/campos_512_v4
+76/393924/campos_512_v4
+76/393936/campos_512_v4
+76/393959/campos_512_v4
+76/393961/campos_512_v4
+76/393966/campos_512_v4
+76/393973/campos_512_v4
+76/393983/campos_512_v4
+76/393984/campos_512_v4
+76/393985/campos_512_v4
+76/393994/campos_512_v4
+76/394012/campos_512_v4
+76/394024/campos_512_v4
+76/394037/campos_512_v4
+76/394038/campos_512_v4
+76/394040/campos_512_v4
+76/394044/campos_512_v4
+76/394049/campos_512_v4
+76/394067/campos_512_v4
+76/394098/campos_512_v4
+76/394113/campos_512_v4
+76/394150/campos_512_v4
+76/394154/campos_512_v4
+76/394157/campos_512_v4
+76/394166/campos_512_v4
+76/394169/campos_512_v4
+76/394171/campos_512_v4
+76/394180/campos_512_v4
+76/394184/campos_512_v4
+76/394209/campos_512_v4
+76/394216/campos_512_v4
+76/394220/campos_512_v4
+76/394224/campos_512_v4
+76/394225/campos_512_v4
+76/394244/campos_512_v4
+76/394253/campos_512_v4
+76/394290/campos_512_v4
+76/394291/campos_512_v4
+76/394292/campos_512_v4
+76/394337/campos_512_v4
+76/394343/campos_512_v4
+76/394352/campos_512_v4
+76/394357/campos_512_v4
+76/394365/campos_512_v4
+76/394367/campos_512_v4
+76/394371/campos_512_v4
+76/394374/campos_512_v4
+76/394379/campos_512_v4
+76/394381/campos_512_v4
+76/394385/campos_512_v4
+76/394388/campos_512_v4
+76/394391/campos_512_v4
+76/394401/campos_512_v4
+76/394406/campos_512_v4
+76/394424/campos_512_v4
+76/394425/campos_512_v4
+76/394432/campos_512_v4
+76/394434/campos_512_v4
+76/394439/campos_512_v4
+76/394455/campos_512_v4
+76/394458/campos_512_v4
+76/394479/campos_512_v4
+76/394480/campos_512_v4
+76/394495/campos_512_v4
+76/394505/campos_512_v4
+76/394510/campos_512_v4
+76/394544/campos_512_v4
+76/394546/campos_512_v4
+76/394558/campos_512_v4
+76/394559/campos_512_v4
+76/394561/campos_512_v4
+76/394567/campos_512_v4
+76/394602/campos_512_v4
+76/394603/campos_512_v4
+76/394618/campos_512_v4
+76/394639/campos_512_v4
+76/394660/campos_512_v4
+76/394665/campos_512_v4
+76/394666/campos_512_v4
+76/394670/campos_512_v4
+76/394680/campos_512_v4
+76/394684/campos_512_v4
+76/394686/campos_512_v4
+76/394695/campos_512_v4
+76/394699/campos_512_v4
+76/394703/campos_512_v4
+76/394704/campos_512_v4
+76/394705/campos_512_v4
+76/394717/campos_512_v4
+76/394719/campos_512_v4
+76/394743/campos_512_v4
+76/394753/campos_512_v4
+76/394756/campos_512_v4
+76/394757/campos_512_v4
+76/394762/campos_512_v4
+76/394794/campos_512_v4
+76/394804/campos_512_v4
+76/394806/campos_512_v4
+76/394813/campos_512_v4
+76/394817/campos_512_v4
+76/394825/campos_512_v4
+76/394828/campos_512_v4
+76/394830/campos_512_v4
+76/394833/campos_512_v4
+76/394836/campos_512_v4
+76/394847/campos_512_v4
+76/394850/campos_512_v4
+76/394851/campos_512_v4
+76/394855/campos_512_v4
+76/394856/campos_512_v4
+76/394860/campos_512_v4
+76/394861/campos_512_v4
+76/394864/campos_512_v4
+76/394870/campos_512_v4
+76/394886/campos_512_v4
+76/394887/campos_512_v4
+76/394897/campos_512_v4
+76/394899/campos_512_v4
+76/394932/campos_512_v4
+76/394935/campos_512_v4
+76/394940/campos_512_v4
+76/394941/campos_512_v4
+76/394955/campos_512_v4
+76/394962/campos_512_v4
+76/394981/campos_512_v4
+76/394983/campos_512_v4
+76/394985/campos_512_v4
+76/394987/campos_512_v4
+76/394997/campos_512_v4
+77/395006/campos_512_v4
+77/395012/campos_512_v4
+77/395017/campos_512_v4
+77/395018/campos_512_v4
+77/395021/campos_512_v4
+77/395031/campos_512_v4
+77/395052/campos_512_v4
+77/395057/campos_512_v4
+77/395073/campos_512_v4
+77/395078/campos_512_v4
+77/395084/campos_512_v4
+77/395090/campos_512_v4
+77/395097/campos_512_v4
+77/395100/campos_512_v4
+77/395102/campos_512_v4
+77/395114/campos_512_v4
+77/395119/campos_512_v4
+77/395121/campos_512_v4
+77/395126/campos_512_v4
+77/395136/campos_512_v4
+77/395154/campos_512_v4
+77/395174/campos_512_v4
+77/395182/campos_512_v4
+77/395210/campos_512_v4
+77/395227/campos_512_v4
+77/395234/campos_512_v4
+77/395248/campos_512_v4
+77/395259/campos_512_v4
+77/395265/campos_512_v4
+77/395266/campos_512_v4
+77/395269/campos_512_v4
+77/395274/campos_512_v4
+77/395277/campos_512_v4
+77/395280/campos_512_v4
+77/395290/campos_512_v4
+77/395295/campos_512_v4
+77/395316/campos_512_v4
+77/395349/campos_512_v4
+77/395351/campos_512_v4
+77/395356/campos_512_v4
+77/395367/campos_512_v4
+77/395376/campos_512_v4
+77/395380/campos_512_v4
+77/395393/campos_512_v4
+77/395396/campos_512_v4
+77/395403/campos_512_v4
+77/395413/campos_512_v4
+77/395419/campos_512_v4
+77/395423/campos_512_v4
+77/395431/campos_512_v4
+77/395442/campos_512_v4
+77/395445/campos_512_v4
+77/395455/campos_512_v4
+77/395468/campos_512_v4
+77/395478/campos_512_v4
+77/395483/campos_512_v4
+77/395488/campos_512_v4
+77/395496/campos_512_v4
+77/395506/campos_512_v4
+77/395508/campos_512_v4
+77/395509/campos_512_v4
+77/395514/campos_512_v4
+77/395515/campos_512_v4
+77/395518/campos_512_v4
+77/395527/campos_512_v4
+77/395534/campos_512_v4
+77/395544/campos_512_v4
+77/395551/campos_512_v4
+77/395553/campos_512_v4
+77/395556/campos_512_v4
+77/395566/campos_512_v4
+77/395571/campos_512_v4
+77/395576/campos_512_v4
+77/395581/campos_512_v4
+77/395585/campos_512_v4
+77/395593/campos_512_v4
+77/395604/campos_512_v4
+77/395606/campos_512_v4
+77/395607/campos_512_v4
+77/395610/campos_512_v4
+77/395619/campos_512_v4
+77/395620/campos_512_v4
+77/395625/campos_512_v4
+77/395628/campos_512_v4
+77/395638/campos_512_v4
+77/395640/campos_512_v4
+77/395643/campos_512_v4
+77/395667/campos_512_v4
+77/395670/campos_512_v4
+77/395673/campos_512_v4
+77/395675/campos_512_v4
+77/395677/campos_512_v4
+77/395694/campos_512_v4
+77/395696/campos_512_v4
+77/395707/campos_512_v4
+77/395720/campos_512_v4
+77/395757/campos_512_v4
+77/395775/campos_512_v4
+77/395828/campos_512_v4
+77/395832/campos_512_v4
+77/395833/campos_512_v4
+77/395836/campos_512_v4
+77/395868/campos_512_v4
+77/395871/campos_512_v4
+77/395900/campos_512_v4
+77/395907/campos_512_v4
+77/395927/campos_512_v4
+77/395929/campos_512_v4
+77/395939/campos_512_v4
+77/395952/campos_512_v4
+77/395959/campos_512_v4
+77/395963/campos_512_v4
+77/395966/campos_512_v4
+77/395967/campos_512_v4
+77/395972/campos_512_v4
+77/395977/campos_512_v4
+77/395987/campos_512_v4
+77/396000/campos_512_v4
+77/396003/campos_512_v4
+77/396006/campos_512_v4
+77/396011/campos_512_v4
+77/396017/campos_512_v4
+77/396018/campos_512_v4
+77/396022/campos_512_v4
+77/396027/campos_512_v4
+77/396030/campos_512_v4
+77/396031/campos_512_v4
+77/396032/campos_512_v4
+77/396039/campos_512_v4
+77/396056/campos_512_v4
+77/396057/campos_512_v4
+77/396067/campos_512_v4
+77/396070/campos_512_v4
+77/396071/campos_512_v4
+77/396087/campos_512_v4
+77/396096/campos_512_v4
+77/396106/campos_512_v4
+77/396107/campos_512_v4
+77/396109/campos_512_v4
+77/396120/campos_512_v4
+77/396142/campos_512_v4
+77/396143/campos_512_v4
+77/396146/campos_512_v4
+77/396149/campos_512_v4
+77/396150/campos_512_v4
+77/396152/campos_512_v4
+77/396156/campos_512_v4
+77/396160/campos_512_v4
+77/396164/campos_512_v4
+77/396175/campos_512_v4
+77/396185/campos_512_v4
+77/396187/campos_512_v4
+77/396220/campos_512_v4
+77/396224/campos_512_v4
+77/396232/campos_512_v4
+77/396246/campos_512_v4
+77/396263/campos_512_v4
+77/396271/campos_512_v4
+77/396281/campos_512_v4
+77/396287/campos_512_v4
+77/396293/campos_512_v4
+77/396294/campos_512_v4
+77/396323/campos_512_v4
+77/396326/campos_512_v4
+77/396336/campos_512_v4
+77/396340/campos_512_v4
+77/396342/campos_512_v4
+77/396348/campos_512_v4
+77/396362/campos_512_v4
+77/396385/campos_512_v4
+77/396403/campos_512_v4
+77/396414/campos_512_v4
+77/396437/campos_512_v4
+77/396438/campos_512_v4
+77/396442/campos_512_v4
+77/396447/campos_512_v4
+77/396448/campos_512_v4
+77/396450/campos_512_v4
+77/396462/campos_512_v4
+77/396465/campos_512_v4
+77/396466/campos_512_v4
+77/396470/campos_512_v4
+77/396485/campos_512_v4
+77/396490/campos_512_v4
+77/396506/campos_512_v4
+77/396514/campos_512_v4
+77/396527/campos_512_v4
+77/396539/campos_512_v4
+77/396560/campos_512_v4
+77/396564/campos_512_v4
+77/396568/campos_512_v4
+77/396576/campos_512_v4
+77/396578/campos_512_v4
+77/396579/campos_512_v4
+77/396591/campos_512_v4
+77/396592/campos_512_v4
+77/396593/campos_512_v4
+77/396601/campos_512_v4
+77/396605/campos_512_v4
+77/396608/campos_512_v4
+77/396619/campos_512_v4
+77/396625/campos_512_v4
+77/396628/campos_512_v4
+77/396634/campos_512_v4
+77/396659/campos_512_v4
+77/396690/campos_512_v4
+77/396702/campos_512_v4
+77/396711/campos_512_v4
+77/396736/campos_512_v4
+77/396745/campos_512_v4
+77/396746/campos_512_v4
+77/396748/campos_512_v4
+77/396751/campos_512_v4
+77/396752/campos_512_v4
+77/396762/campos_512_v4
+77/396764/campos_512_v4
+77/396768/campos_512_v4
+77/396770/campos_512_v4
+77/396774/campos_512_v4
+77/396779/campos_512_v4
+77/396784/campos_512_v4
+77/396788/campos_512_v4
+77/396789/campos_512_v4
+77/396796/campos_512_v4
+77/396816/campos_512_v4
+77/396833/campos_512_v4
+77/396848/campos_512_v4
+77/396860/campos_512_v4
+77/396861/campos_512_v4
+77/396866/campos_512_v4
+77/396869/campos_512_v4
+77/396873/campos_512_v4
+77/396903/campos_512_v4
+77/396908/campos_512_v4
+77/396916/campos_512_v4
+77/396928/campos_512_v4
+77/396938/campos_512_v4
+77/396943/campos_512_v4
+77/396947/campos_512_v4
+77/396949/campos_512_v4
+77/396956/campos_512_v4
+77/396964/campos_512_v4
+77/396969/campos_512_v4
+77/396974/campos_512_v4
+77/396976/campos_512_v4
+77/396980/campos_512_v4
+77/397006/campos_512_v4
+77/397010/campos_512_v4
+77/397012/campos_512_v4
+77/397020/campos_512_v4
+77/397026/campos_512_v4
+77/397042/campos_512_v4
+77/397060/campos_512_v4
+77/397080/campos_512_v4
+77/397101/campos_512_v4
+77/397113/campos_512_v4
+77/397118/campos_512_v4
+77/397123/campos_512_v4
+77/397128/campos_512_v4
+77/397155/campos_512_v4
+77/397160/campos_512_v4
+77/397166/campos_512_v4
+77/397189/campos_512_v4
+77/397192/campos_512_v4
+77/397196/campos_512_v4
+77/397207/campos_512_v4
+77/397209/campos_512_v4
+77/397212/campos_512_v4
+77/397219/campos_512_v4
+77/397222/campos_512_v4
+77/397226/campos_512_v4
+77/397237/campos_512_v4
+77/397256/campos_512_v4
+77/397261/campos_512_v4
+77/397278/campos_512_v4
+77/397283/campos_512_v4
+77/397292/campos_512_v4
+77/397294/campos_512_v4
+77/397296/campos_512_v4
+77/397304/campos_512_v4
+77/397317/campos_512_v4
+77/397332/campos_512_v4
+77/397333/campos_512_v4
+77/397335/campos_512_v4
+77/397345/campos_512_v4
+77/397361/campos_512_v4
+77/397369/campos_512_v4
+77/397373/campos_512_v4
+77/397377/campos_512_v4
+77/397383/campos_512_v4
+77/397394/campos_512_v4
+77/397399/campos_512_v4
+77/397401/campos_512_v4
+77/397402/campos_512_v4
+77/397405/campos_512_v4
+77/397411/campos_512_v4
+77/397418/campos_512_v4
+77/397420/campos_512_v4
+77/397432/campos_512_v4
+77/397433/campos_512_v4
+77/397435/campos_512_v4
+77/397455/campos_512_v4
+77/397464/campos_512_v4
+77/397473/campos_512_v4
+77/397486/campos_512_v4
+77/397488/campos_512_v4
+77/397492/campos_512_v4
+77/397493/campos_512_v4
+77/397499/campos_512_v4
+77/397505/campos_512_v4
+77/397510/campos_512_v4
+77/397522/campos_512_v4
+77/397527/campos_512_v4
+77/397530/campos_512_v4
+77/397531/campos_512_v4
+77/397538/campos_512_v4
+77/397543/campos_512_v4
+77/397549/campos_512_v4
+77/397556/campos_512_v4
+77/397559/campos_512_v4
+77/397574/campos_512_v4
+77/397575/campos_512_v4
+77/397605/campos_512_v4
+77/397615/campos_512_v4
+77/397641/campos_512_v4
+77/397642/campos_512_v4
+77/397643/campos_512_v4
+77/397644/campos_512_v4
+77/397656/campos_512_v4
+77/397662/campos_512_v4
+77/397663/campos_512_v4
+77/397668/campos_512_v4
+77/397678/campos_512_v4
+77/397683/campos_512_v4
+77/397707/campos_512_v4
+77/397710/campos_512_v4
+77/397715/campos_512_v4
+77/397720/campos_512_v4
+77/397722/campos_512_v4
+77/397730/campos_512_v4
+77/397743/campos_512_v4
+77/397774/campos_512_v4
+77/397775/campos_512_v4
+77/397794/campos_512_v4
+77/397800/campos_512_v4
+77/397801/campos_512_v4
+77/397811/campos_512_v4
+77/397822/campos_512_v4
+77/397840/campos_512_v4
+77/397870/campos_512_v4
+77/397881/campos_512_v4
+77/397889/campos_512_v4
+77/397894/campos_512_v4
+77/397904/campos_512_v4
+77/397909/campos_512_v4
+77/397920/campos_512_v4
+77/397926/campos_512_v4
+77/397929/campos_512_v4
+77/397965/campos_512_v4
+77/397967/campos_512_v4
+77/397983/campos_512_v4
+77/397995/campos_512_v4
+77/397999/campos_512_v4
+77/398013/campos_512_v4
+77/398024/campos_512_v4
+77/398031/campos_512_v4
+77/398041/campos_512_v4
+77/398059/campos_512_v4
+77/398069/campos_512_v4
+77/398080/campos_512_v4
+77/398084/campos_512_v4
+77/398093/campos_512_v4
+77/398099/campos_512_v4
+77/398102/campos_512_v4
+77/398107/campos_512_v4
+77/398110/campos_512_v4
+77/398112/campos_512_v4
+77/398113/campos_512_v4
+77/398117/campos_512_v4
+77/398124/campos_512_v4
+77/398125/campos_512_v4
+77/398131/campos_512_v4
+77/398133/campos_512_v4
+77/398143/campos_512_v4
+77/398154/campos_512_v4
+77/398157/campos_512_v4
+77/398158/campos_512_v4
+77/398163/campos_512_v4
+77/398169/campos_512_v4
+77/398178/campos_512_v4
+77/398185/campos_512_v4
+77/398187/campos_512_v4
+77/398191/campos_512_v4
+77/398198/campos_512_v4
+77/398202/campos_512_v4
+77/398217/campos_512_v4
+77/398221/campos_512_v4
+77/398224/campos_512_v4
+77/398227/campos_512_v4
+77/398257/campos_512_v4
+77/398268/campos_512_v4
+77/398274/campos_512_v4
+77/398275/campos_512_v4
+77/398279/campos_512_v4
+77/398280/campos_512_v4
+77/398285/campos_512_v4
+77/398294/campos_512_v4
+77/398303/campos_512_v4
+77/398305/campos_512_v4
+77/398315/campos_512_v4
+77/398316/campos_512_v4
+77/398336/campos_512_v4
+77/398338/campos_512_v4
+77/398353/campos_512_v4
+77/398376/campos_512_v4
+77/398381/campos_512_v4
+77/398383/campos_512_v4
+77/398401/campos_512_v4
+77/398402/campos_512_v4
+77/398406/campos_512_v4
+77/398425/campos_512_v4
+77/398435/campos_512_v4
+77/398449/campos_512_v4
+77/398457/campos_512_v4
+77/398461/campos_512_v4
+77/398465/campos_512_v4
+77/398468/campos_512_v4
+77/398485/campos_512_v4
+77/398488/campos_512_v4
+77/398494/campos_512_v4
+77/398516/campos_512_v4
+77/398532/campos_512_v4
+77/398541/campos_512_v4
+77/398547/campos_512_v4
+77/398550/campos_512_v4
+77/398554/campos_512_v4
+77/398557/campos_512_v4
+77/398562/campos_512_v4
+77/398578/campos_512_v4
+77/398583/campos_512_v4
+77/398600/campos_512_v4
+77/398601/campos_512_v4
+77/398603/campos_512_v4
+77/398612/campos_512_v4
+77/398614/campos_512_v4
+77/398622/campos_512_v4
+77/398625/campos_512_v4
+77/398633/campos_512_v4
+77/398642/campos_512_v4
+77/398650/campos_512_v4
+77/398655/campos_512_v4
+77/398663/campos_512_v4
+77/398671/campos_512_v4
+77/398691/campos_512_v4
+77/398706/campos_512_v4
+77/398719/campos_512_v4
+77/398743/campos_512_v4
+77/398750/campos_512_v4
+77/398753/campos_512_v4
+77/398758/campos_512_v4
+77/398761/campos_512_v4
+77/398768/campos_512_v4
+77/398777/campos_512_v4
+77/398782/campos_512_v4
+77/398785/campos_512_v4
+77/398801/campos_512_v4
+77/398812/campos_512_v4
+77/398815/campos_512_v4
+77/398817/campos_512_v4
+77/398818/campos_512_v4
+77/398828/campos_512_v4
+77/398832/campos_512_v4
+77/398843/campos_512_v4
+77/398849/campos_512_v4
+77/398867/campos_512_v4
+77/398870/campos_512_v4
+77/398871/campos_512_v4
+77/398885/campos_512_v4
+77/398893/campos_512_v4
+77/398895/campos_512_v4
+77/398897/campos_512_v4
+77/398900/campos_512_v4
+77/398910/campos_512_v4
+77/398911/campos_512_v4
+77/398913/campos_512_v4
+77/398922/campos_512_v4
+77/398928/campos_512_v4
+77/398934/campos_512_v4
+77/398940/campos_512_v4
+77/398950/campos_512_v4
+77/398956/campos_512_v4
+77/398961/campos_512_v4
+77/398969/campos_512_v4
+77/398985/campos_512_v4
+77/398988/campos_512_v4
+77/399006/campos_512_v4
+77/399008/campos_512_v4
+77/399009/campos_512_v4
+77/399010/campos_512_v4
+77/399013/campos_512_v4
+77/399014/campos_512_v4
+77/399022/campos_512_v4
+77/399028/campos_512_v4
+77/399034/campos_512_v4
+77/399035/campos_512_v4
+77/399040/campos_512_v4
+77/399047/campos_512_v4
+77/399050/campos_512_v4
+77/399052/campos_512_v4
+77/399055/campos_512_v4
+77/399062/campos_512_v4
+77/399067/campos_512_v4
+77/399088/campos_512_v4
+77/399091/campos_512_v4
+77/399097/campos_512_v4
+77/399109/campos_512_v4
+77/399118/campos_512_v4
+77/399132/campos_512_v4
+77/399147/campos_512_v4
+77/399152/campos_512_v4
+77/399177/campos_512_v4
+77/399206/campos_512_v4
+77/399219/campos_512_v4
+77/399224/campos_512_v4
+77/399248/campos_512_v4
+77/399251/campos_512_v4
+77/399258/campos_512_v4
+77/399259/campos_512_v4
+77/399261/campos_512_v4
+77/399263/campos_512_v4
+77/399274/campos_512_v4
+77/399278/campos_512_v4
+77/399281/campos_512_v4
+77/399289/campos_512_v4
+77/399292/campos_512_v4
+77/399298/campos_512_v4
+77/399304/campos_512_v4
+77/399307/campos_512_v4
+77/399316/campos_512_v4
+77/399319/campos_512_v4
+77/399324/campos_512_v4
+77/399327/campos_512_v4
+77/399328/campos_512_v4
+77/399334/campos_512_v4
+77/399350/campos_512_v4
+77/399378/campos_512_v4
+77/399388/campos_512_v4
+77/399390/campos_512_v4
+77/399391/campos_512_v4
+77/399395/campos_512_v4
+77/399412/campos_512_v4
+77/399415/campos_512_v4
+77/399430/campos_512_v4
+77/399431/campos_512_v4
+77/399437/campos_512_v4
+77/399442/campos_512_v4
+77/399450/campos_512_v4
+77/399460/campos_512_v4
+77/399476/campos_512_v4
+77/399491/campos_512_v4
+77/399504/campos_512_v4
+77/399507/campos_512_v4
+77/399515/campos_512_v4
+77/399520/campos_512_v4
+77/399549/campos_512_v4
+77/399554/campos_512_v4
+77/399564/campos_512_v4
+77/399568/campos_512_v4
+77/399581/campos_512_v4
+77/399584/campos_512_v4
+77/399592/campos_512_v4
+77/399596/campos_512_v4
+77/399607/campos_512_v4
+77/399620/campos_512_v4
+77/399622/campos_512_v4
+77/399659/campos_512_v4
+77/399671/campos_512_v4
+77/399681/campos_512_v4
+77/399695/campos_512_v4
+77/399709/campos_512_v4
+77/399710/campos_512_v4
+77/399735/campos_512_v4
+77/399744/campos_512_v4
+77/399749/campos_512_v4
+77/399777/campos_512_v4
+77/399783/campos_512_v4
+77/399795/campos_512_v4
+77/399802/campos_512_v4
+77/399811/campos_512_v4
+77/399827/campos_512_v4
+77/399834/campos_512_v4
+77/399836/campos_512_v4
+77/399849/campos_512_v4
+77/399881/campos_512_v4
+77/399884/campos_512_v4
+77/399921/campos_512_v4
+77/399924/campos_512_v4
+77/399932/campos_512_v4
+77/399942/campos_512_v4
+77/399961/campos_512_v4
+77/399982/campos_512_v4
+77/399987/campos_512_v4
+77/399988/campos_512_v4
+77/399992/campos_512_v4
+78/400007/campos_512_v4
+78/400019/campos_512_v4
+78/400026/campos_512_v4
+78/400030/campos_512_v4
+78/400039/campos_512_v4
+78/400051/campos_512_v4
+78/400061/campos_512_v4
+78/400068/campos_512_v4
+78/400073/campos_512_v4
+78/400076/campos_512_v4
+78/400082/campos_512_v4
+78/400085/campos_512_v4
+78/400090/campos_512_v4
+78/400100/campos_512_v4
+78/400110/campos_512_v4
+78/400114/campos_512_v4
+78/400130/campos_512_v4
+78/400135/campos_512_v4
+78/400137/campos_512_v4
+78/400142/campos_512_v4
+78/400144/campos_512_v4
+78/400145/campos_512_v4
+78/400156/campos_512_v4
+78/400168/campos_512_v4
+78/400172/campos_512_v4
+78/400173/campos_512_v4
+78/400178/campos_512_v4
+78/400179/campos_512_v4
+78/400180/campos_512_v4
+78/400181/campos_512_v4
+78/400191/campos_512_v4
+78/400245/campos_512_v4
+78/400246/campos_512_v4
+78/400249/campos_512_v4
+78/400259/campos_512_v4
+78/400260/campos_512_v4
+78/400265/campos_512_v4
+78/400285/campos_512_v4
+78/400297/campos_512_v4
+78/400302/campos_512_v4
+78/400321/campos_512_v4
+78/400322/campos_512_v4
+78/400324/campos_512_v4
+78/400329/campos_512_v4
+78/400330/campos_512_v4
+78/400336/campos_512_v4
+78/400344/campos_512_v4
+78/400348/campos_512_v4
+78/400382/campos_512_v4
+78/400383/campos_512_v4
+78/400392/campos_512_v4
+78/400397/campos_512_v4
+78/400399/campos_512_v4
+78/400404/campos_512_v4
+78/400409/campos_512_v4
+78/400413/campos_512_v4
+78/400415/campos_512_v4
+78/400417/campos_512_v4
+78/400420/campos_512_v4
+78/400422/campos_512_v4
+78/400434/campos_512_v4
+78/400440/campos_512_v4
+78/400441/campos_512_v4
+78/400451/campos_512_v4
+78/400455/campos_512_v4
+78/400460/campos_512_v4
+78/400461/campos_512_v4
+78/400471/campos_512_v4
+78/400485/campos_512_v4
+78/400489/campos_512_v4
+78/400492/campos_512_v4
+78/400497/campos_512_v4
+78/400504/campos_512_v4
+78/400517/campos_512_v4
+78/400521/campos_512_v4
+78/400538/campos_512_v4
+78/400545/campos_512_v4
+78/400558/campos_512_v4
+78/400563/campos_512_v4
+78/400566/campos_512_v4
+78/400569/campos_512_v4
+78/400573/campos_512_v4
+78/400598/campos_512_v4
+78/400608/campos_512_v4
+78/400612/campos_512_v4
+78/400621/campos_512_v4
+78/400630/campos_512_v4
+78/400633/campos_512_v4
+78/400655/campos_512_v4
+78/400662/campos_512_v4
+78/400668/campos_512_v4
+78/400677/campos_512_v4
+78/400684/campos_512_v4
+78/400685/campos_512_v4
+78/400699/campos_512_v4
+78/400703/campos_512_v4
+78/400709/campos_512_v4
+78/400723/campos_512_v4
+78/400735/campos_512_v4
+78/400742/campos_512_v4
+78/400746/campos_512_v4
+78/400750/campos_512_v4
+78/400751/campos_512_v4
+78/400757/campos_512_v4
+78/400789/campos_512_v4
+78/400794/campos_512_v4
+78/400813/campos_512_v4
+78/400821/campos_512_v4
+78/400822/campos_512_v4
+78/400823/campos_512_v4
+78/400825/campos_512_v4
+78/400827/campos_512_v4
+78/400828/campos_512_v4
+78/400842/campos_512_v4
+78/400856/campos_512_v4
+78/400859/campos_512_v4
+78/400861/campos_512_v4
+78/400872/campos_512_v4
+78/400893/campos_512_v4
+78/400894/campos_512_v4
+78/400900/campos_512_v4
+78/400912/campos_512_v4
+78/400913/campos_512_v4
+78/400914/campos_512_v4
+78/400933/campos_512_v4
+78/400951/campos_512_v4
+78/400955/campos_512_v4
+78/400960/campos_512_v4
+78/400965/campos_512_v4
+78/400972/campos_512_v4
+78/400974/campos_512_v4
+78/400975/campos_512_v4
+78/400982/campos_512_v4
+78/400993/campos_512_v4
+78/401005/campos_512_v4
+78/401006/campos_512_v4
+78/401033/campos_512_v4
+78/401042/campos_512_v4
+78/401045/campos_512_v4
+78/401048/campos_512_v4
+78/401059/campos_512_v4
+78/401061/campos_512_v4
+78/401072/campos_512_v4
+78/401073/campos_512_v4
+78/401077/campos_512_v4
+78/401093/campos_512_v4
+78/401094/campos_512_v4
+78/401119/campos_512_v4
+78/401120/campos_512_v4
+78/401122/campos_512_v4
+78/401134/campos_512_v4
+78/401139/campos_512_v4
+78/401164/campos_512_v4
+78/401165/campos_512_v4
+78/401168/campos_512_v4
+78/401175/campos_512_v4
+78/401184/campos_512_v4
+78/401189/campos_512_v4
+78/401197/campos_512_v4
+78/401198/campos_512_v4
+78/401200/campos_512_v4
+78/401206/campos_512_v4
+78/401207/campos_512_v4
+78/401209/campos_512_v4
+78/401212/campos_512_v4
+78/401214/campos_512_v4
+78/401222/campos_512_v4
+78/401231/campos_512_v4
+78/401233/campos_512_v4
+78/401236/campos_512_v4
+78/401249/campos_512_v4
+78/401252/campos_512_v4
+78/401260/campos_512_v4
+78/401264/campos_512_v4
+78/401282/campos_512_v4
+78/401301/campos_512_v4
+78/401303/campos_512_v4
+78/401308/campos_512_v4
+78/401315/campos_512_v4
+78/401320/campos_512_v4
+78/401326/campos_512_v4
+78/401331/campos_512_v4
+78/401332/campos_512_v4
+78/401333/campos_512_v4
+78/401339/campos_512_v4
+78/401343/campos_512_v4
+78/401347/campos_512_v4
+78/401354/campos_512_v4
+78/401372/campos_512_v4
+78/401381/campos_512_v4
+78/401384/campos_512_v4
+78/401385/campos_512_v4
+78/401389/campos_512_v4
+78/401418/campos_512_v4
+78/401422/campos_512_v4
+78/401424/campos_512_v4
+78/401434/campos_512_v4
+78/401438/campos_512_v4
+78/401439/campos_512_v4
+78/401441/campos_512_v4
+78/401446/campos_512_v4
+78/401447/campos_512_v4
+78/401453/campos_512_v4
+78/401459/campos_512_v4
+78/401463/campos_512_v4
+78/401493/campos_512_v4
+78/401495/campos_512_v4
+78/401501/campos_512_v4
+78/401506/campos_512_v4
+78/401512/campos_512_v4
+78/401522/campos_512_v4
+78/401523/campos_512_v4
+78/401534/campos_512_v4
+78/401539/campos_512_v4
+78/401542/campos_512_v4
+78/401559/campos_512_v4
+78/401560/campos_512_v4
+78/401566/campos_512_v4
+78/401577/campos_512_v4
+78/401581/campos_512_v4
+78/401595/campos_512_v4
+78/401596/campos_512_v4
+78/401616/campos_512_v4
+78/401618/campos_512_v4
+78/401621/campos_512_v4
+78/401631/campos_512_v4
+78/401634/campos_512_v4
+78/401650/campos_512_v4
+78/401674/campos_512_v4
+78/401695/campos_512_v4
+78/401699/campos_512_v4
+78/401706/campos_512_v4
+78/401709/campos_512_v4
+78/401723/campos_512_v4
+78/401725/campos_512_v4
+78/401733/campos_512_v4
+78/401737/campos_512_v4
+78/401740/campos_512_v4
+78/401747/campos_512_v4
+78/401765/campos_512_v4
+78/401782/campos_512_v4
+78/401788/campos_512_v4
+78/401789/campos_512_v4
+78/401808/campos_512_v4
+78/401809/campos_512_v4
+78/401810/campos_512_v4
+78/401813/campos_512_v4
+78/401815/campos_512_v4
+78/401827/campos_512_v4
+78/401834/campos_512_v4
+78/401848/campos_512_v4
+78/401850/campos_512_v4
+78/401857/campos_512_v4
+78/401859/campos_512_v4
+78/401864/campos_512_v4
+78/401882/campos_512_v4
+78/401913/campos_512_v4
+78/401916/campos_512_v4
+78/401929/campos_512_v4
+78/401934/campos_512_v4
+78/401935/campos_512_v4
+78/401938/campos_512_v4
+78/401945/campos_512_v4
+78/401949/campos_512_v4
+78/401950/campos_512_v4
+78/401953/campos_512_v4
+78/401959/campos_512_v4
+78/401977/campos_512_v4
+78/401986/campos_512_v4
+78/401992/campos_512_v4
+78/401993/campos_512_v4
+78/401998/campos_512_v4
+78/402011/campos_512_v4
+78/402014/campos_512_v4
+78/402020/campos_512_v4
+78/402040/campos_512_v4
+78/402045/campos_512_v4
+78/402046/campos_512_v4
+78/402050/campos_512_v4
+78/402055/campos_512_v4
+78/402057/campos_512_v4
+78/402064/campos_512_v4
+78/402067/campos_512_v4
+78/402092/campos_512_v4
+78/402099/campos_512_v4
+78/402100/campos_512_v4
+78/402101/campos_512_v4
+78/402109/campos_512_v4
+78/402111/campos_512_v4
+78/402122/campos_512_v4
+78/402131/campos_512_v4
+78/402134/campos_512_v4
+78/402144/campos_512_v4
+78/402149/campos_512_v4
+78/402152/campos_512_v4
+78/402162/campos_512_v4
+78/402172/campos_512_v4
+78/402188/campos_512_v4
+78/402197/campos_512_v4
+78/402202/campos_512_v4
+78/402207/campos_512_v4
+78/402209/campos_512_v4
+78/402211/campos_512_v4
+78/402273/campos_512_v4
+78/402274/campos_512_v4
+78/402307/campos_512_v4
+78/402310/campos_512_v4
+78/402311/campos_512_v4
+78/402312/campos_512_v4
+78/402327/campos_512_v4
+78/402334/campos_512_v4
+78/402336/campos_512_v4
+78/402344/campos_512_v4
+78/402350/campos_512_v4
+78/402352/campos_512_v4
+78/402365/campos_512_v4
+78/402372/campos_512_v4
+78/402373/campos_512_v4
+78/402376/campos_512_v4
+78/402384/campos_512_v4
+78/402394/campos_512_v4
+78/402402/campos_512_v4
+78/402410/campos_512_v4
+78/402423/campos_512_v4
+78/402427/campos_512_v4
+78/402432/campos_512_v4
+78/402435/campos_512_v4
+78/402437/campos_512_v4
+78/402444/campos_512_v4
+78/402459/campos_512_v4
+78/402478/campos_512_v4
+78/402490/campos_512_v4
+78/402494/campos_512_v4
+78/402499/campos_512_v4
+78/402500/campos_512_v4
+78/402515/campos_512_v4
+78/402524/campos_512_v4
+78/402527/campos_512_v4
+78/402539/campos_512_v4
+78/402555/campos_512_v4
+78/402556/campos_512_v4
+78/402557/campos_512_v4
+78/402581/campos_512_v4
+78/402582/campos_512_v4
+78/402585/campos_512_v4
+78/402595/campos_512_v4
+78/402601/campos_512_v4
+78/402612/campos_512_v4
+78/402615/campos_512_v4
+78/402641/campos_512_v4
+78/402650/campos_512_v4
+78/402662/campos_512_v4
+78/402678/campos_512_v4
+78/402681/campos_512_v4
+78/402684/campos_512_v4
+78/402687/campos_512_v4
+78/402689/campos_512_v4
+78/402703/campos_512_v4
+78/402711/campos_512_v4
+78/402715/campos_512_v4
+78/402736/campos_512_v4
+78/402740/campos_512_v4
+78/402746/campos_512_v4
+78/402761/campos_512_v4
+78/402762/campos_512_v4
+78/402777/campos_512_v4
+78/402796/campos_512_v4
+78/402802/campos_512_v4
+78/402807/campos_512_v4
+78/402813/campos_512_v4
+78/402816/campos_512_v4
+78/402817/campos_512_v4
+78/402820/campos_512_v4
+78/402827/campos_512_v4
+78/402856/campos_512_v4
+78/402880/campos_512_v4
+78/402884/campos_512_v4
+78/402887/campos_512_v4
+78/402898/campos_512_v4
+78/402905/campos_512_v4
+78/402912/campos_512_v4
+78/402916/campos_512_v4
+78/402919/campos_512_v4
+78/402921/campos_512_v4
+78/402927/campos_512_v4
+78/402931/campos_512_v4
+78/402947/campos_512_v4
+78/402948/campos_512_v4
+78/402951/campos_512_v4
+78/402952/campos_512_v4
+78/402958/campos_512_v4
+78/402967/campos_512_v4
+78/402973/campos_512_v4
+78/402980/campos_512_v4
+78/402984/campos_512_v4
+78/402987/campos_512_v4
+78/402991/campos_512_v4
+78/402993/campos_512_v4
+78/403022/campos_512_v4
+78/403025/campos_512_v4
+78/403037/campos_512_v4
+78/403038/campos_512_v4
+78/403044/campos_512_v4
+78/403050/campos_512_v4
+78/403079/campos_512_v4
+78/403087/campos_512_v4
+78/403100/campos_512_v4
+78/403105/campos_512_v4
+78/403125/campos_512_v4
+78/403137/campos_512_v4
+78/403147/campos_512_v4
+78/403151/campos_512_v4
+78/403152/campos_512_v4
+78/403154/campos_512_v4
+78/403155/campos_512_v4
+78/403161/campos_512_v4
+78/403177/campos_512_v4
+78/403186/campos_512_v4
+78/403199/campos_512_v4
+78/403205/campos_512_v4
+78/403221/campos_512_v4
+78/403235/campos_512_v4
+78/403240/campos_512_v4
+78/403250/campos_512_v4
+78/403254/campos_512_v4
+78/403255/campos_512_v4
+78/403273/campos_512_v4
+78/403278/campos_512_v4
+78/403285/campos_512_v4
+78/403289/campos_512_v4
+78/403298/campos_512_v4
+78/403301/campos_512_v4
+78/403307/campos_512_v4
+78/403311/campos_512_v4
+78/403323/campos_512_v4
+78/403324/campos_512_v4
+78/403350/campos_512_v4
+78/403353/campos_512_v4
+78/403369/campos_512_v4
+78/403372/campos_512_v4
+78/403381/campos_512_v4
+78/403398/campos_512_v4
+78/403402/campos_512_v4
+78/403405/campos_512_v4
+78/403409/campos_512_v4
+78/403419/campos_512_v4
+78/403438/campos_512_v4
+78/403451/campos_512_v4
+78/403466/campos_512_v4
+78/403500/campos_512_v4
+78/403506/campos_512_v4
+78/403544/campos_512_v4
+78/403545/campos_512_v4
+78/403554/campos_512_v4
+78/403566/campos_512_v4
+78/403569/campos_512_v4
+78/403570/campos_512_v4
+78/403577/campos_512_v4
+78/403589/campos_512_v4
+78/403595/campos_512_v4
+78/403597/campos_512_v4
+78/403605/campos_512_v4
+78/403607/campos_512_v4
+78/403612/campos_512_v4
+78/403615/campos_512_v4
+78/403625/campos_512_v4
+78/403628/campos_512_v4
+78/403633/campos_512_v4
+78/403640/campos_512_v4
+78/403661/campos_512_v4
+78/403662/campos_512_v4
+78/403663/campos_512_v4
+78/403664/campos_512_v4
+78/403666/campos_512_v4
+78/403672/campos_512_v4
+78/403676/campos_512_v4
+78/403688/campos_512_v4
+78/403689/campos_512_v4
+78/403692/campos_512_v4
+78/403701/campos_512_v4
+78/403707/campos_512_v4
+78/403710/campos_512_v4
+78/403729/campos_512_v4
+78/403733/campos_512_v4
+78/403756/campos_512_v4
+78/403764/campos_512_v4
+78/403770/campos_512_v4
+78/403774/campos_512_v4
+78/403780/campos_512_v4
+78/403794/campos_512_v4
+78/403807/campos_512_v4
+78/403809/campos_512_v4
+78/403810/campos_512_v4
+78/403813/campos_512_v4
+78/403818/campos_512_v4
+78/403819/campos_512_v4
+78/403823/campos_512_v4
+78/403825/campos_512_v4
+78/403828/campos_512_v4
+78/403832/campos_512_v4
+78/403836/campos_512_v4
+78/403837/campos_512_v4
+78/403847/campos_512_v4
+78/403848/campos_512_v4
+78/403853/campos_512_v4
+78/403854/campos_512_v4
+78/403860/campos_512_v4
+78/403863/campos_512_v4
+78/403865/campos_512_v4
+78/403869/campos_512_v4
+78/403870/campos_512_v4
+78/403879/campos_512_v4
+78/403883/campos_512_v4
+78/403888/campos_512_v4
+78/403901/campos_512_v4
+78/403902/campos_512_v4
+78/403904/campos_512_v4
+78/403914/campos_512_v4
+78/403924/campos_512_v4
+78/403929/campos_512_v4
+78/403940/campos_512_v4
+78/403946/campos_512_v4
+78/403953/campos_512_v4
+78/403955/campos_512_v4
+78/403956/campos_512_v4
+78/403980/campos_512_v4
+78/403983/campos_512_v4
+78/403994/campos_512_v4
+78/404008/campos_512_v4
+78/404030/campos_512_v4
+78/404033/campos_512_v4
+78/404039/campos_512_v4
+78/404050/campos_512_v4
+78/404059/campos_512_v4
+78/404062/campos_512_v4
+78/404066/campos_512_v4
+78/404075/campos_512_v4
+78/404082/campos_512_v4
+78/404090/campos_512_v4
+78/404092/campos_512_v4
+78/404100/campos_512_v4
+78/404101/campos_512_v4
+78/404108/campos_512_v4
+78/404111/campos_512_v4
+78/404117/campos_512_v4
+78/404119/campos_512_v4
+78/404120/campos_512_v4
+78/404126/campos_512_v4
+78/404134/campos_512_v4
+78/404138/campos_512_v4
+78/404153/campos_512_v4
+78/404158/campos_512_v4
+78/404159/campos_512_v4
+78/404176/campos_512_v4
+78/404192/campos_512_v4
+78/404205/campos_512_v4
+78/404220/campos_512_v4
+78/404228/campos_512_v4
+78/404239/campos_512_v4
+78/404241/campos_512_v4
+78/404252/campos_512_v4
+78/404256/campos_512_v4
+78/404263/campos_512_v4
+78/404270/campos_512_v4
+78/404283/campos_512_v4
+78/404285/campos_512_v4
+78/404298/campos_512_v4
+78/404319/campos_512_v4
+78/404332/campos_512_v4
+78/404335/campos_512_v4
+78/404339/campos_512_v4
+78/404344/campos_512_v4
+78/404349/campos_512_v4
+78/404351/campos_512_v4
+78/404353/campos_512_v4
+78/404356/campos_512_v4
+78/404360/campos_512_v4
+78/404371/campos_512_v4
+78/404379/campos_512_v4
+78/404396/campos_512_v4
+78/404408/campos_512_v4
+78/404420/campos_512_v4
+78/404475/campos_512_v4
+78/404482/campos_512_v4
+78/404494/campos_512_v4
+78/404499/campos_512_v4
+78/404504/campos_512_v4
+78/404506/campos_512_v4
+78/404507/campos_512_v4
+78/404517/campos_512_v4
+78/404532/campos_512_v4
+78/404533/campos_512_v4
+78/404540/campos_512_v4
+78/404543/campos_512_v4
+78/404545/campos_512_v4
+78/404549/campos_512_v4
+78/404553/campos_512_v4
+78/404554/campos_512_v4
+78/404561/campos_512_v4
+78/404565/campos_512_v4
+78/404566/campos_512_v4
+78/404577/campos_512_v4
+78/404580/campos_512_v4
+78/404614/campos_512_v4
+78/404628/campos_512_v4
+78/404637/campos_512_v4
+78/404640/campos_512_v4
+78/404645/campos_512_v4
+78/404648/campos_512_v4
+78/404652/campos_512_v4
+78/404653/campos_512_v4
+78/404674/campos_512_v4
+78/404688/campos_512_v4
+78/404693/campos_512_v4
+78/404698/campos_512_v4
+78/404712/campos_512_v4
+78/404722/campos_512_v4
+78/404723/campos_512_v4
+78/404726/campos_512_v4
+78/404732/campos_512_v4
+78/404745/campos_512_v4
+78/404749/campos_512_v4
+78/404759/campos_512_v4
+78/404766/campos_512_v4
+78/404773/campos_512_v4
+78/404783/campos_512_v4
+78/404790/campos_512_v4
+78/404800/campos_512_v4
+78/404814/campos_512_v4
+78/404817/campos_512_v4
+78/404822/campos_512_v4
+78/404836/campos_512_v4
+78/404850/campos_512_v4
+78/404852/campos_512_v4
+78/404854/campos_512_v4
+78/404856/campos_512_v4
+78/404873/campos_512_v4
+78/404877/campos_512_v4
+78/404878/campos_512_v4
+78/404880/campos_512_v4
+78/404885/campos_512_v4
+78/404904/campos_512_v4
+78/404917/campos_512_v4
+78/404922/campos_512_v4
+78/404926/campos_512_v4
+78/404928/campos_512_v4
+78/404929/campos_512_v4
+78/404939/campos_512_v4
+78/404944/campos_512_v4
+78/404960/campos_512_v4
+78/404962/campos_512_v4
+78/404975/campos_512_v4
+78/404985/campos_512_v4
+78/404998/campos_512_v4
+79/405022/campos_512_v4
+79/405026/campos_512_v4
+79/405027/campos_512_v4
+79/405036/campos_512_v4
+79/405043/campos_512_v4
+79/405045/campos_512_v4
+79/405058/campos_512_v4
+79/405066/campos_512_v4
+79/405093/campos_512_v4
+79/405099/campos_512_v4
+79/405107/campos_512_v4
+79/405108/campos_512_v4
+79/405112/campos_512_v4
+79/405141/campos_512_v4
+79/405152/campos_512_v4
+79/405190/campos_512_v4
+79/405191/campos_512_v4
+79/405193/campos_512_v4
+79/405202/campos_512_v4
+79/405208/campos_512_v4
+79/405224/campos_512_v4
+79/405231/campos_512_v4
+79/405246/campos_512_v4
+79/405247/campos_512_v4
+79/405250/campos_512_v4
+79/405252/campos_512_v4
+79/405278/campos_512_v4
+79/405280/campos_512_v4
+79/405290/campos_512_v4
+79/405297/campos_512_v4
+79/405318/campos_512_v4
+79/405321/campos_512_v4
+79/405323/campos_512_v4
+79/405350/campos_512_v4
+79/405354/campos_512_v4
+79/405356/campos_512_v4
+79/405370/campos_512_v4
+79/405378/campos_512_v4
+79/405380/campos_512_v4
+79/405382/campos_512_v4
+79/405392/campos_512_v4
+79/405393/campos_512_v4
+79/405409/campos_512_v4
+79/405410/campos_512_v4
+79/405414/campos_512_v4
+79/405418/campos_512_v4
+79/405439/campos_512_v4
+79/405442/campos_512_v4
+79/405456/campos_512_v4
+79/405458/campos_512_v4
+79/405467/campos_512_v4
+79/405469/campos_512_v4
+79/405472/campos_512_v4
+79/405483/campos_512_v4
+79/405484/campos_512_v4
+79/405485/campos_512_v4
+79/405486/campos_512_v4
+79/405489/campos_512_v4
+79/405510/campos_512_v4
+79/405519/campos_512_v4
+79/405532/campos_512_v4
+79/405533/campos_512_v4
+79/405542/campos_512_v4
+79/405557/campos_512_v4
+79/405563/campos_512_v4
+79/405568/campos_512_v4
+79/405570/campos_512_v4
+79/405575/campos_512_v4
+79/405602/campos_512_v4
+79/405604/campos_512_v4
+79/405605/campos_512_v4
+79/405608/campos_512_v4
+79/405617/campos_512_v4
+79/405618/campos_512_v4
+79/405619/campos_512_v4
+79/405629/campos_512_v4
+79/405633/campos_512_v4
+79/405645/campos_512_v4
+79/405646/campos_512_v4
+79/405676/campos_512_v4
+79/405678/campos_512_v4
+79/405681/campos_512_v4
+79/405690/campos_512_v4
+79/405698/campos_512_v4
+79/405703/campos_512_v4
+79/405713/campos_512_v4
+79/405717/campos_512_v4
+79/405718/campos_512_v4
+79/405741/campos_512_v4
+79/405742/campos_512_v4
+79/405754/campos_512_v4
+79/405782/campos_512_v4
+79/405798/campos_512_v4
+79/405814/campos_512_v4
+79/405819/campos_512_v4
+79/405834/campos_512_v4
+79/405835/campos_512_v4
+79/405839/campos_512_v4
+79/405840/campos_512_v4
+79/405841/campos_512_v4
+79/405851/campos_512_v4
+79/405852/campos_512_v4
+79/405853/campos_512_v4
+79/405873/campos_512_v4
+79/405875/campos_512_v4
+79/405888/campos_512_v4
+79/405898/campos_512_v4
+79/405908/campos_512_v4
+79/405915/campos_512_v4
+79/405919/campos_512_v4
+79/405926/campos_512_v4
+79/405929/campos_512_v4
+79/405930/campos_512_v4
+79/405933/campos_512_v4
+79/405987/campos_512_v4
+79/405989/campos_512_v4
+79/405994/campos_512_v4
+79/406003/campos_512_v4
+79/406018/campos_512_v4
+79/406022/campos_512_v4
+79/406025/campos_512_v4
+79/406027/campos_512_v4
+79/406029/campos_512_v4
+79/406035/campos_512_v4
+79/406056/campos_512_v4
+79/406082/campos_512_v4
+79/406090/campos_512_v4
+79/406095/campos_512_v4
+79/406099/campos_512_v4
+79/406107/campos_512_v4
+79/406111/campos_512_v4
+79/406121/campos_512_v4
+79/406155/campos_512_v4
+79/406159/campos_512_v4
+79/406170/campos_512_v4
+79/406173/campos_512_v4
+79/406174/campos_512_v4
+79/406177/campos_512_v4
+79/406183/campos_512_v4
+79/406191/campos_512_v4
+79/406193/campos_512_v4
+79/406200/campos_512_v4
+79/406211/campos_512_v4
+79/406212/campos_512_v4
+79/406223/campos_512_v4
+79/406226/campos_512_v4
+79/406232/campos_512_v4
+79/406243/campos_512_v4
+79/406246/campos_512_v4
+79/406250/campos_512_v4
+79/406252/campos_512_v4
+79/406256/campos_512_v4
+79/406260/campos_512_v4
+79/406262/campos_512_v4
+79/406280/campos_512_v4
+79/406290/campos_512_v4
+79/406291/campos_512_v4
+79/406300/campos_512_v4
+79/406305/campos_512_v4
+79/406321/campos_512_v4
+79/406324/campos_512_v4
+79/406331/campos_512_v4
+79/406333/campos_512_v4
+79/406336/campos_512_v4
+79/406347/campos_512_v4
+79/406363/campos_512_v4
+79/406366/campos_512_v4
+79/406370/campos_512_v4
+79/406379/campos_512_v4
+79/406385/campos_512_v4
+79/406387/campos_512_v4
+79/406388/campos_512_v4
+79/406393/campos_512_v4
+79/406402/campos_512_v4
+79/406403/campos_512_v4
+79/406404/campos_512_v4
+79/406406/campos_512_v4
+79/406422/campos_512_v4
+79/406426/campos_512_v4
+79/406429/campos_512_v4
+79/406440/campos_512_v4
+79/406449/campos_512_v4
+79/406463/campos_512_v4
+79/406474/campos_512_v4
+79/406475/campos_512_v4
+79/406477/campos_512_v4
+79/406480/campos_512_v4
+79/406495/campos_512_v4
+79/406501/campos_512_v4
+79/406505/campos_512_v4
+79/406514/campos_512_v4
+79/406522/campos_512_v4
+79/406526/campos_512_v4
+79/406528/campos_512_v4
+79/406533/campos_512_v4
+79/406541/campos_512_v4
+79/406550/campos_512_v4
+79/406563/campos_512_v4
+79/406565/campos_512_v4
+79/406568/campos_512_v4
+79/406573/campos_512_v4
+79/406590/campos_512_v4
+79/406591/campos_512_v4
+79/406609/campos_512_v4
+79/406610/campos_512_v4
+79/406619/campos_512_v4
+79/406624/campos_512_v4
+79/406625/campos_512_v4
+79/406629/campos_512_v4
+79/406634/campos_512_v4
+79/406640/campos_512_v4
+79/406650/campos_512_v4
+79/406661/campos_512_v4
+79/406670/campos_512_v4
+79/406682/campos_512_v4
+79/406686/campos_512_v4
+79/406697/campos_512_v4
+79/406698/campos_512_v4
+79/406699/campos_512_v4
+79/406714/campos_512_v4
+79/406722/campos_512_v4
+79/406730/campos_512_v4
+79/406736/campos_512_v4
+79/406740/campos_512_v4
+79/406743/campos_512_v4
+79/406765/campos_512_v4
+79/406785/campos_512_v4
+79/406786/campos_512_v4
+79/406793/campos_512_v4
+79/406796/campos_512_v4
+79/406802/campos_512_v4
+79/406808/campos_512_v4
+79/406810/campos_512_v4
+79/406816/campos_512_v4
+79/406828/campos_512_v4
+79/406836/campos_512_v4
+79/406839/campos_512_v4
+79/406842/campos_512_v4
+79/406843/campos_512_v4
+79/406844/campos_512_v4
+79/406858/campos_512_v4
+79/406859/campos_512_v4
+79/406882/campos_512_v4
+79/406890/campos_512_v4
+79/406892/campos_512_v4
+79/406893/campos_512_v4
+79/406903/campos_512_v4
+79/406904/campos_512_v4
+79/406905/campos_512_v4
+79/407009/campos_512_v4
+79/407012/campos_512_v4
+79/407021/campos_512_v4
+79/407024/campos_512_v4
+79/407060/campos_512_v4
+79/407073/campos_512_v4
+79/407095/campos_512_v4
+79/407113/campos_512_v4
+79/407125/campos_512_v4
+79/407129/campos_512_v4
+79/407140/campos_512_v4
+79/407142/campos_512_v4
+79/407161/campos_512_v4
+79/407163/campos_512_v4
+79/407164/campos_512_v4
+79/407165/campos_512_v4
+79/407169/campos_512_v4
+79/407183/campos_512_v4
+79/407184/campos_512_v4
+79/407188/campos_512_v4
+79/407193/campos_512_v4
+79/407209/campos_512_v4
+79/407216/campos_512_v4
+79/407219/campos_512_v4
+79/407221/campos_512_v4
+79/407224/campos_512_v4
+79/407232/campos_512_v4
+79/407236/campos_512_v4
+79/407240/campos_512_v4
+79/407242/campos_512_v4
+79/407252/campos_512_v4
+79/407255/campos_512_v4
+79/407258/campos_512_v4
+79/407259/campos_512_v4
+79/407271/campos_512_v4
+79/407272/campos_512_v4
+79/407291/campos_512_v4
+79/407294/campos_512_v4
+79/407299/campos_512_v4
+79/407304/campos_512_v4
+79/407305/campos_512_v4
+79/407314/campos_512_v4
+79/407333/campos_512_v4
+79/407345/campos_512_v4
+79/407349/campos_512_v4
+79/407353/campos_512_v4
+79/407362/campos_512_v4
+79/407373/campos_512_v4
+79/407386/campos_512_v4
+79/407396/campos_512_v4
+79/407404/campos_512_v4
+79/407411/campos_512_v4
+79/407443/campos_512_v4
+79/407446/campos_512_v4
+79/407469/campos_512_v4
+79/407474/campos_512_v4
+79/407476/campos_512_v4
+79/407478/campos_512_v4
+79/407487/campos_512_v4
+79/407494/campos_512_v4
+79/407502/campos_512_v4
+79/407503/campos_512_v4
+79/407507/campos_512_v4
+79/407508/campos_512_v4
+79/407511/campos_512_v4
+79/407523/campos_512_v4
+79/407527/campos_512_v4
+79/407530/campos_512_v4
+79/407535/campos_512_v4
+79/407540/campos_512_v4
+79/407543/campos_512_v4
+79/407548/campos_512_v4
+79/407555/campos_512_v4
+79/407562/campos_512_v4
+79/407568/campos_512_v4
+79/407570/campos_512_v4
+79/407575/campos_512_v4
+79/407600/campos_512_v4
+79/407604/campos_512_v4
+79/407616/campos_512_v4
+79/407622/campos_512_v4
+79/407643/campos_512_v4
+79/407644/campos_512_v4
+79/407658/campos_512_v4
+79/407660/campos_512_v4
+79/407671/campos_512_v4
+79/407676/campos_512_v4
+79/407680/campos_512_v4
+79/407682/campos_512_v4
+79/407697/campos_512_v4
+79/407712/campos_512_v4
+79/407717/campos_512_v4
+79/407740/campos_512_v4
+79/407747/campos_512_v4
+79/407762/campos_512_v4
+79/407777/campos_512_v4
+79/407815/campos_512_v4
+79/407817/campos_512_v4
+79/407818/campos_512_v4
+79/407831/campos_512_v4
+79/407833/campos_512_v4
+79/407834/campos_512_v4
+79/407839/campos_512_v4
+79/407856/campos_512_v4
+79/407859/campos_512_v4
+79/407861/campos_512_v4
+79/407879/campos_512_v4
+79/407880/campos_512_v4
+79/407889/campos_512_v4
+79/407896/campos_512_v4
+79/407907/campos_512_v4
+79/407918/campos_512_v4
+79/407919/campos_512_v4
+79/407921/campos_512_v4
+79/407925/campos_512_v4
+79/407932/campos_512_v4
+79/407939/campos_512_v4
+79/407941/campos_512_v4
+79/407948/campos_512_v4
+79/407951/campos_512_v4
+79/407952/campos_512_v4
+79/407956/campos_512_v4
+79/407959/campos_512_v4
+79/407972/campos_512_v4
+79/407984/campos_512_v4
+79/407988/campos_512_v4
+79/407994/campos_512_v4
+79/408014/campos_512_v4
+79/408021/campos_512_v4
+79/408025/campos_512_v4
+79/408036/campos_512_v4
+79/408040/campos_512_v4
+79/408045/campos_512_v4
+79/408046/campos_512_v4
+79/408065/campos_512_v4
+79/408066/campos_512_v4
+79/408074/campos_512_v4
+79/408082/campos_512_v4
+79/408085/campos_512_v4
+79/408093/campos_512_v4
+79/408101/campos_512_v4
+79/408112/campos_512_v4
+79/408114/campos_512_v4
+79/408120/campos_512_v4
+79/408121/campos_512_v4
+79/408124/campos_512_v4
+79/408135/campos_512_v4
+79/408140/campos_512_v4
+79/408152/campos_512_v4
+79/408155/campos_512_v4
+79/408156/campos_512_v4
+79/408162/campos_512_v4
+79/408163/campos_512_v4
+79/408177/campos_512_v4
+79/408185/campos_512_v4
+79/408194/campos_512_v4
+79/408196/campos_512_v4
+79/408197/campos_512_v4
+79/408201/campos_512_v4
+79/408211/campos_512_v4
+79/408228/campos_512_v4
+79/408232/campos_512_v4
+79/408237/campos_512_v4
+79/408241/campos_512_v4
+79/408242/campos_512_v4
+79/408249/campos_512_v4
+79/408253/campos_512_v4
+79/408254/campos_512_v4
+79/408260/campos_512_v4
+79/408265/campos_512_v4
+79/408268/campos_512_v4
+79/408283/campos_512_v4
+79/408289/campos_512_v4
+79/408291/campos_512_v4
+79/408292/campos_512_v4
+79/408293/campos_512_v4
+79/408295/campos_512_v4
+79/408297/campos_512_v4
+79/408310/campos_512_v4
+79/408313/campos_512_v4
+79/408334/campos_512_v4
+79/408349/campos_512_v4
+79/408352/campos_512_v4
+79/408355/campos_512_v4
+79/408371/campos_512_v4
+79/408380/campos_512_v4
+79/408386/campos_512_v4
+79/408388/campos_512_v4
+79/408389/campos_512_v4
+79/408395/campos_512_v4
+79/408397/campos_512_v4
+79/408400/campos_512_v4
+79/408419/campos_512_v4
+79/408439/campos_512_v4
+79/408445/campos_512_v4
+79/408471/campos_512_v4
+79/408546/campos_512_v4
+79/408586/campos_512_v4
+79/408594/campos_512_v4
+79/408598/campos_512_v4
+79/408601/campos_512_v4
+79/408605/campos_512_v4
+79/408608/campos_512_v4
+79/408612/campos_512_v4
+79/408629/campos_512_v4
+79/408632/campos_512_v4
+79/408635/campos_512_v4
+79/408652/campos_512_v4
+79/408657/campos_512_v4
+79/408688/campos_512_v4
+79/408699/campos_512_v4
+79/408712/campos_512_v4
+79/408718/campos_512_v4
+79/408732/campos_512_v4
+79/408734/campos_512_v4
+79/408748/campos_512_v4
+79/408754/campos_512_v4
+79/408755/campos_512_v4
+79/408762/campos_512_v4
+79/408770/campos_512_v4
+79/408787/campos_512_v4
+79/408794/campos_512_v4
+79/408795/campos_512_v4
+79/408797/campos_512_v4
+79/408799/campos_512_v4
+79/408801/campos_512_v4
+79/408803/campos_512_v4
+79/408807/campos_512_v4
+79/408809/campos_512_v4
+79/408810/campos_512_v4
+79/408818/campos_512_v4
+79/408819/campos_512_v4
+79/408823/campos_512_v4
+79/408839/campos_512_v4
+79/408840/campos_512_v4
+79/408848/campos_512_v4
+79/408849/campos_512_v4
+79/408850/campos_512_v4
+79/408855/campos_512_v4
+79/408862/campos_512_v4
+79/408871/campos_512_v4
+79/408873/campos_512_v4
+79/408877/campos_512_v4
+79/408881/campos_512_v4
+79/408884/campos_512_v4
+79/408903/campos_512_v4
+79/408904/campos_512_v4
+79/408906/campos_512_v4
+79/408907/campos_512_v4
+79/408910/campos_512_v4
+79/408916/campos_512_v4
+79/408928/campos_512_v4
+79/408943/campos_512_v4
+79/408975/campos_512_v4
+79/408989/campos_512_v4
+79/408994/campos_512_v4
+79/409001/campos_512_v4
+79/409007/campos_512_v4
+79/409020/campos_512_v4
+79/409035/campos_512_v4
+79/409036/campos_512_v4
+79/409045/campos_512_v4
+79/409049/campos_512_v4
+79/409053/campos_512_v4
+79/409060/campos_512_v4
+79/409068/campos_512_v4
+79/409090/campos_512_v4
+79/409097/campos_512_v4
+79/409132/campos_512_v4
+79/409136/campos_512_v4
+79/409143/campos_512_v4
+79/409147/campos_512_v4
+79/409162/campos_512_v4
+79/409172/campos_512_v4
+79/409183/campos_512_v4
+79/409199/campos_512_v4
+79/409209/campos_512_v4
+79/409219/campos_512_v4
+79/409220/campos_512_v4
+79/409240/campos_512_v4
+79/409247/campos_512_v4
+79/409248/campos_512_v4
+79/409249/campos_512_v4
+79/409254/campos_512_v4
+79/409260/campos_512_v4
+79/409273/campos_512_v4
+79/409274/campos_512_v4
+79/409278/campos_512_v4
+79/409284/campos_512_v4
+79/409289/campos_512_v4
+79/409304/campos_512_v4
+79/409323/campos_512_v4
+79/409356/campos_512_v4
+79/409361/campos_512_v4
+79/409365/campos_512_v4
+79/409368/campos_512_v4
+79/409374/campos_512_v4
+79/409379/campos_512_v4
+79/409380/campos_512_v4
+79/409390/campos_512_v4
+79/409393/campos_512_v4
+79/409398/campos_512_v4
+79/409407/campos_512_v4
+79/409411/campos_512_v4
+79/409416/campos_512_v4
+79/409434/campos_512_v4
+79/409439/campos_512_v4
+79/409443/campos_512_v4
+79/409450/campos_512_v4
+79/409457/campos_512_v4
+79/409464/campos_512_v4
+79/409467/campos_512_v4
+79/409507/campos_512_v4
+79/409514/campos_512_v4
+79/409521/campos_512_v4
+79/409528/campos_512_v4
+79/409533/campos_512_v4
+79/409535/campos_512_v4
+79/409545/campos_512_v4
+79/409577/campos_512_v4
+79/409593/campos_512_v4
+79/409598/campos_512_v4
+79/409615/campos_512_v4
+79/409647/campos_512_v4
+79/409659/campos_512_v4
+79/409661/campos_512_v4
+79/409662/campos_512_v4
+79/409676/campos_512_v4
+79/409720/campos_512_v4
+79/409731/campos_512_v4
+79/409745/campos_512_v4
+79/409746/campos_512_v4
+79/409757/campos_512_v4
+79/409759/campos_512_v4
+79/409785/campos_512_v4
+79/409790/campos_512_v4
+79/409806/campos_512_v4
+79/409807/campos_512_v4
+79/409810/campos_512_v4
+79/409827/campos_512_v4
+79/409835/campos_512_v4
+79/409850/campos_512_v4
+79/409851/campos_512_v4
+79/409868/campos_512_v4
+79/409890/campos_512_v4
+79/409893/campos_512_v4
+79/409906/campos_512_v4
+79/409916/campos_512_v4
+79/409917/campos_512_v4
+79/409918/campos_512_v4
+79/409920/campos_512_v4
+79/409921/campos_512_v4
+79/409927/campos_512_v4
+79/409930/campos_512_v4
+79/409936/campos_512_v4
+79/409943/campos_512_v4
+79/409946/campos_512_v4
+79/409947/campos_512_v4
+79/409958/campos_512_v4
+79/409960/campos_512_v4
+79/409978/campos_512_v4
+79/409979/campos_512_v4
+79/409982/campos_512_v4
+79/409983/campos_512_v4
+8/50003/campos_512_v4
+8/50023/campos_512_v4
+8/50046/campos_512_v4
+8/50047/campos_512_v4
+8/50050/campos_512_v4
+8/50072/campos_512_v4
+8/50076/campos_512_v4
+8/50100/campos_512_v4
+8/50114/campos_512_v4
+8/50120/campos_512_v4
+8/50125/campos_512_v4
+8/50126/campos_512_v4
+8/50128/campos_512_v4
+8/50155/campos_512_v4
+8/50160/campos_512_v4
+8/50163/campos_512_v4
+8/50171/campos_512_v4
+8/50172/campos_512_v4
+8/50173/campos_512_v4
+8/50180/campos_512_v4
+8/50187/campos_512_v4
+8/50194/campos_512_v4
+8/50210/campos_512_v4
+8/50213/campos_512_v4
+8/50232/campos_512_v4
+8/50267/campos_512_v4
+8/50269/campos_512_v4
+8/50273/campos_512_v4
+8/50278/campos_512_v4
+8/50280/campos_512_v4
+8/50285/campos_512_v4
+8/50293/campos_512_v4
+8/50296/campos_512_v4
+8/50300/campos_512_v4
+8/50314/campos_512_v4
+8/50315/campos_512_v4
+8/50323/campos_512_v4
+8/50330/campos_512_v4
+8/50348/campos_512_v4
+8/50356/campos_512_v4
+8/50364/campos_512_v4
+8/50366/campos_512_v4
+8/50369/campos_512_v4
+8/50381/campos_512_v4
+8/50386/campos_512_v4
+8/50390/campos_512_v4
+8/50391/campos_512_v4
+8/50396/campos_512_v4
+8/50398/campos_512_v4
+8/50400/campos_512_v4
+8/50411/campos_512_v4
+8/50413/campos_512_v4
+8/50432/campos_512_v4
+8/50448/campos_512_v4
+8/50449/campos_512_v4
+8/50461/campos_512_v4
+8/50491/campos_512_v4
+8/50499/campos_512_v4
+8/50541/campos_512_v4
+8/50552/campos_512_v4
+8/50567/campos_512_v4
+8/50590/campos_512_v4
+8/50613/campos_512_v4
+8/50616/campos_512_v4
+8/50634/campos_512_v4
+8/50639/campos_512_v4
+8/50647/campos_512_v4
+8/50663/campos_512_v4
+8/50666/campos_512_v4
+8/50669/campos_512_v4
+8/50675/campos_512_v4
+8/50677/campos_512_v4
+8/50682/campos_512_v4
+8/50687/campos_512_v4
+8/50695/campos_512_v4
+8/50698/campos_512_v4
+8/50711/campos_512_v4
+8/50722/campos_512_v4
+8/50745/campos_512_v4
+8/50749/campos_512_v4
+8/50758/campos_512_v4
+8/50779/campos_512_v4
+8/50781/campos_512_v4
+8/50785/campos_512_v4
+8/50796/campos_512_v4
+8/50797/campos_512_v4
+8/50801/campos_512_v4
+8/50803/campos_512_v4
+8/50820/campos_512_v4
+8/50826/campos_512_v4
+8/50833/campos_512_v4
+8/50845/campos_512_v4
+8/50848/campos_512_v4
+8/50853/campos_512_v4
+8/50879/campos_512_v4
+8/50891/campos_512_v4
+8/50892/campos_512_v4
+8/50902/campos_512_v4
+8/50914/campos_512_v4
+8/50918/campos_512_v4
+8/50934/campos_512_v4
+8/50945/campos_512_v4
+8/50948/campos_512_v4
+8/50950/campos_512_v4
+8/50952/campos_512_v4
+8/50981/campos_512_v4
+8/50985/campos_512_v4
+8/50998/campos_512_v4
+8/51000/campos_512_v4
+8/51002/campos_512_v4
+8/51004/campos_512_v4
+8/51007/campos_512_v4
+8/51011/campos_512_v4
+8/51027/campos_512_v4
+8/51030/campos_512_v4
+8/51040/campos_512_v4
+8/51048/campos_512_v4
+8/51049/campos_512_v4
+8/51052/campos_512_v4
+8/51056/campos_512_v4
+8/51068/campos_512_v4
+8/51071/campos_512_v4
+8/51081/campos_512_v4
+8/51144/campos_512_v4
+8/51156/campos_512_v4
+8/51159/campos_512_v4
+8/51161/campos_512_v4
+8/51164/campos_512_v4
+8/51176/campos_512_v4
+8/51185/campos_512_v4
+8/51189/campos_512_v4
+8/51191/campos_512_v4
+8/51213/campos_512_v4
+8/51219/campos_512_v4
+8/51225/campos_512_v4
+8/51227/campos_512_v4
+8/51234/campos_512_v4
+8/51250/campos_512_v4
+8/51272/campos_512_v4
+8/51314/campos_512_v4
+8/51336/campos_512_v4
+8/51343/campos_512_v4
+8/51345/campos_512_v4
+8/51392/campos_512_v4
+8/51394/campos_512_v4
+8/51396/campos_512_v4
+8/51401/campos_512_v4
+8/51411/campos_512_v4
+8/51412/campos_512_v4
+8/51423/campos_512_v4
+8/51502/campos_512_v4
+8/51517/campos_512_v4
+8/51522/campos_512_v4
+8/51541/campos_512_v4
+8/51550/campos_512_v4
+8/51559/campos_512_v4
+8/51563/campos_512_v4
+8/51570/campos_512_v4
+8/51574/campos_512_v4
+8/51580/campos_512_v4
+8/51582/campos_512_v4
+8/51593/campos_512_v4
+8/51595/campos_512_v4
+8/51611/campos_512_v4
+8/51627/campos_512_v4
+8/51638/campos_512_v4
+8/51651/campos_512_v4
+8/51673/campos_512_v4
+8/51675/campos_512_v4
+8/51694/campos_512_v4
+8/51698/campos_512_v4
+8/51724/campos_512_v4
+8/51726/campos_512_v4
+8/51733/campos_512_v4
+8/51750/campos_512_v4
+8/51752/campos_512_v4
+8/51756/campos_512_v4
+8/51759/campos_512_v4
+8/51771/campos_512_v4
+8/51790/campos_512_v4
+8/51814/campos_512_v4
+8/51820/campos_512_v4
+8/51835/campos_512_v4
+8/51839/campos_512_v4
+8/51845/campos_512_v4
+8/51859/campos_512_v4
+8/51861/campos_512_v4
+8/51869/campos_512_v4
+8/51876/campos_512_v4
+8/51877/campos_512_v4
+8/51879/campos_512_v4
+8/51884/campos_512_v4
+8/51903/campos_512_v4
+8/51904/campos_512_v4
+8/51909/campos_512_v4
+8/51913/campos_512_v4
+8/51960/campos_512_v4
+8/51971/campos_512_v4
+8/51978/campos_512_v4
+8/51991/campos_512_v4
+8/52008/campos_512_v4
+8/52028/campos_512_v4
+8/52035/campos_512_v4
+8/52064/campos_512_v4
+8/52075/campos_512_v4
+8/52087/campos_512_v4
+8/52094/campos_512_v4
+8/52110/campos_512_v4
+8/52112/campos_512_v4
+8/52143/campos_512_v4
+8/52146/campos_512_v4
+8/52160/campos_512_v4
+8/52166/campos_512_v4
+8/52169/campos_512_v4
+8/52177/campos_512_v4
+8/52184/campos_512_v4
+8/52185/campos_512_v4
+8/52186/campos_512_v4
+8/52197/campos_512_v4
+8/52198/campos_512_v4
+8/52209/campos_512_v4
+8/52220/campos_512_v4
+8/52236/campos_512_v4
+8/52239/campos_512_v4
+8/52243/campos_512_v4
+8/52247/campos_512_v4
+8/52254/campos_512_v4
+8/52257/campos_512_v4
+8/52258/campos_512_v4
+8/52260/campos_512_v4
+8/52262/campos_512_v4
+8/52266/campos_512_v4
+8/52267/campos_512_v4
+8/52272/campos_512_v4
+8/52300/campos_512_v4
+8/52304/campos_512_v4
+8/52324/campos_512_v4
+8/52328/campos_512_v4
+8/52331/campos_512_v4
+8/52338/campos_512_v4
+8/52355/campos_512_v4
+8/52379/campos_512_v4
+8/52381/campos_512_v4
+8/52410/campos_512_v4
+8/52416/campos_512_v4
+8/52424/campos_512_v4
+8/52428/campos_512_v4
+8/52441/campos_512_v4
+8/52446/campos_512_v4
+8/52454/campos_512_v4
+8/52457/campos_512_v4
+8/52488/campos_512_v4
+8/52490/campos_512_v4
+8/52512/campos_512_v4
+8/52529/campos_512_v4
+8/52530/campos_512_v4
+8/52537/campos_512_v4
+8/52540/campos_512_v4
+8/52552/campos_512_v4
+8/52561/campos_512_v4
+8/52606/campos_512_v4
+8/52612/campos_512_v4
+8/52621/campos_512_v4
+8/52625/campos_512_v4
+8/52640/campos_512_v4
+8/52641/campos_512_v4
+8/52646/campos_512_v4
+8/52658/campos_512_v4
+8/52675/campos_512_v4
+8/52698/campos_512_v4
+8/52704/campos_512_v4
+8/52713/campos_512_v4
+8/52714/campos_512_v4
+8/52724/campos_512_v4
+8/52739/campos_512_v4
+8/52741/campos_512_v4
+8/52770/campos_512_v4
+8/52779/campos_512_v4
+8/52785/campos_512_v4
+8/52805/campos_512_v4
+8/52811/campos_512_v4
+8/52831/campos_512_v4
+8/52840/campos_512_v4
+8/52849/campos_512_v4
+8/52858/campos_512_v4
+8/52866/campos_512_v4
+8/52889/campos_512_v4
+8/52904/campos_512_v4
+8/52913/campos_512_v4
+8/52940/campos_512_v4
+8/52950/campos_512_v4
+8/52959/campos_512_v4
+8/52972/campos_512_v4
+8/52985/campos_512_v4
+8/53003/campos_512_v4
+8/53008/campos_512_v4
+8/53022/campos_512_v4
+8/53028/campos_512_v4
+8/53029/campos_512_v4
+8/53031/campos_512_v4
+8/53032/campos_512_v4
+8/53046/campos_512_v4
+8/53052/campos_512_v4
+8/53055/campos_512_v4
+8/53079/campos_512_v4
+8/53082/campos_512_v4
+8/53084/campos_512_v4
+8/53088/campos_512_v4
+8/53091/campos_512_v4
+8/53108/campos_512_v4
+8/53114/campos_512_v4
+8/53135/campos_512_v4
+8/53167/campos_512_v4
+8/53203/campos_512_v4
+8/53221/campos_512_v4
+8/53222/campos_512_v4
+8/53230/campos_512_v4
+8/53234/campos_512_v4
+8/53244/campos_512_v4
+8/53252/campos_512_v4
+8/53267/campos_512_v4
+8/53273/campos_512_v4
+8/53291/campos_512_v4
+8/53293/campos_512_v4
+8/53298/campos_512_v4
+8/53299/campos_512_v4
+8/53306/campos_512_v4
+8/53313/campos_512_v4
+8/53318/campos_512_v4
+8/53330/campos_512_v4
+8/53337/campos_512_v4
+8/53364/campos_512_v4
+8/53368/campos_512_v4
+8/53390/campos_512_v4
+8/53392/campos_512_v4
+8/53397/campos_512_v4
+8/53404/campos_512_v4
+8/53410/campos_512_v4
+8/53429/campos_512_v4
+8/53442/campos_512_v4
+8/53455/campos_512_v4
+8/53466/campos_512_v4
+8/53480/campos_512_v4
+8/53491/campos_512_v4
+8/53497/campos_512_v4
+8/53509/campos_512_v4
+8/53511/campos_512_v4
+8/53517/campos_512_v4
+8/53519/campos_512_v4
+8/53547/campos_512_v4
+8/53561/campos_512_v4
+8/53565/campos_512_v4
+8/53584/campos_512_v4
+8/53589/campos_512_v4
+8/53592/campos_512_v4
+8/53611/campos_512_v4
+8/53616/campos_512_v4
+8/53633/campos_512_v4
+8/53649/campos_512_v4
+8/53657/campos_512_v4
+8/53659/campos_512_v4
+8/53669/campos_512_v4
+8/53682/campos_512_v4
+8/53696/campos_512_v4
+8/53702/campos_512_v4
+8/53717/campos_512_v4
+8/53718/campos_512_v4
+8/53750/campos_512_v4
+8/53751/campos_512_v4
+8/53756/campos_512_v4
+8/53763/campos_512_v4
+8/53781/campos_512_v4
+8/53794/campos_512_v4
+8/53814/campos_512_v4
+8/53825/campos_512_v4
+8/53829/campos_512_v4
+8/53832/campos_512_v4
+8/53836/campos_512_v4
+8/53839/campos_512_v4
+8/53868/campos_512_v4
+8/53870/campos_512_v4
+8/53876/campos_512_v4
+8/53879/campos_512_v4
+8/53888/campos_512_v4
+8/53910/campos_512_v4
+8/53935/campos_512_v4
+8/53942/campos_512_v4
+8/53956/campos_512_v4
+8/53957/campos_512_v4
+8/53977/campos_512_v4
+8/53988/campos_512_v4
+8/54033/campos_512_v4
+8/54057/campos_512_v4
+8/54095/campos_512_v4
+8/54097/campos_512_v4
+8/54109/campos_512_v4
+8/54117/campos_512_v4
+8/54124/campos_512_v4
+8/54138/campos_512_v4
+8/54154/campos_512_v4
+8/54163/campos_512_v4
+8/54164/campos_512_v4
+8/54167/campos_512_v4
+8/54173/campos_512_v4
+8/54181/campos_512_v4
+8/54182/campos_512_v4
+8/54188/campos_512_v4
+8/54190/campos_512_v4
+8/54192/campos_512_v4
+8/54196/campos_512_v4
+8/54202/campos_512_v4
+8/54220/campos_512_v4
+8/54221/campos_512_v4
+8/54229/campos_512_v4
+8/54232/campos_512_v4
+8/54235/campos_512_v4
+8/54243/campos_512_v4
+8/54250/campos_512_v4
+8/54253/campos_512_v4
+8/54261/campos_512_v4
+8/54279/campos_512_v4
+8/54293/campos_512_v4
+8/54308/campos_512_v4
+8/54328/campos_512_v4
+8/54341/campos_512_v4
+8/54363/campos_512_v4
+8/54369/campos_512_v4
+8/54371/campos_512_v4
+8/54386/campos_512_v4
+8/54409/campos_512_v4
+8/54436/campos_512_v4
+8/54441/campos_512_v4
+8/54444/campos_512_v4
+8/54448/campos_512_v4
+8/54449/campos_512_v4
+8/54453/campos_512_v4
+8/54458/campos_512_v4
+8/54470/campos_512_v4
+8/54481/campos_512_v4
+8/54484/campos_512_v4
+8/54487/campos_512_v4
+8/54495/campos_512_v4
+8/54498/campos_512_v4
+8/54508/campos_512_v4
+8/54520/campos_512_v4
+8/54538/campos_512_v4
+8/54542/campos_512_v4
+8/54565/campos_512_v4
+8/54582/campos_512_v4
+8/54583/campos_512_v4
+8/54586/campos_512_v4
+8/54616/campos_512_v4
+8/54626/campos_512_v4
+8/54656/campos_512_v4
+8/54668/campos_512_v4
+8/54669/campos_512_v4
+8/54676/campos_512_v4
+8/54689/campos_512_v4
+8/54703/campos_512_v4
+8/54712/campos_512_v4
+8/54718/campos_512_v4
+8/54739/campos_512_v4
+8/54761/campos_512_v4
+8/54785/campos_512_v4
+8/54803/campos_512_v4
+8/54815/campos_512_v4
+8/54819/campos_512_v4
+8/54832/campos_512_v4
+8/54836/campos_512_v4
+8/54875/campos_512_v4
+8/54880/campos_512_v4
+8/54900/campos_512_v4
+8/54910/campos_512_v4
+8/54913/campos_512_v4
+8/54923/campos_512_v4
+8/54942/campos_512_v4
+8/54951/campos_512_v4
+8/54952/campos_512_v4
+8/54954/campos_512_v4
+8/54956/campos_512_v4
+8/54957/campos_512_v4
+8/54975/campos_512_v4
+8/54982/campos_512_v4
+8/54986/campos_512_v4
+8/55000/campos_512_v4
+80/410021/campos_512_v4
+80/410053/campos_512_v4
+80/410062/campos_512_v4
+80/410072/campos_512_v4
+80/410074/campos_512_v4
+80/410088/campos_512_v4
+80/410097/campos_512_v4
+80/410101/campos_512_v4
+80/410109/campos_512_v4
+80/410125/campos_512_v4
+80/410127/campos_512_v4
+80/410130/campos_512_v4
+80/410132/campos_512_v4
+80/410141/campos_512_v4
+80/410150/campos_512_v4
+80/410172/campos_512_v4
+80/410173/campos_512_v4
+80/410180/campos_512_v4
+80/410181/campos_512_v4
+80/410188/campos_512_v4
+80/410189/campos_512_v4
+80/410193/campos_512_v4
+80/410197/campos_512_v4
+80/410213/campos_512_v4
+80/410218/campos_512_v4
+80/410220/campos_512_v4
+80/410222/campos_512_v4
+80/410223/campos_512_v4
+80/410232/campos_512_v4
+80/410240/campos_512_v4
+80/410245/campos_512_v4
+80/410259/campos_512_v4
+80/410269/campos_512_v4
+80/410301/campos_512_v4
+80/410310/campos_512_v4
+80/410313/campos_512_v4
+80/410314/campos_512_v4
+80/410317/campos_512_v4
+80/410330/campos_512_v4
+80/410331/campos_512_v4
+80/410332/campos_512_v4
+80/410333/campos_512_v4
+80/410334/campos_512_v4
+80/410338/campos_512_v4
+80/410377/campos_512_v4
+80/410391/campos_512_v4
+80/410394/campos_512_v4
+80/410411/campos_512_v4
+80/410416/campos_512_v4
+80/410435/campos_512_v4
+80/410449/campos_512_v4
+80/410453/campos_512_v4
+80/410454/campos_512_v4
+80/410459/campos_512_v4
+80/410464/campos_512_v4
+80/410470/campos_512_v4
+80/410471/campos_512_v4
+80/410472/campos_512_v4
+80/410489/campos_512_v4
+80/410495/campos_512_v4
+80/410512/campos_512_v4
+80/410517/campos_512_v4
+80/410540/campos_512_v4
+80/410549/campos_512_v4
+80/410554/campos_512_v4
+80/410572/campos_512_v4
+80/410584/campos_512_v4
+80/410594/campos_512_v4
+80/410602/campos_512_v4
+80/410618/campos_512_v4
+80/410622/campos_512_v4
+80/410623/campos_512_v4
+80/410624/campos_512_v4
+80/410627/campos_512_v4
+80/410640/campos_512_v4
+80/410646/campos_512_v4
+80/410655/campos_512_v4
+80/410668/campos_512_v4
+80/410671/campos_512_v4
+80/410676/campos_512_v4
+80/410679/campos_512_v4
+80/410703/campos_512_v4
+80/410719/campos_512_v4
+80/410725/campos_512_v4
+80/410734/campos_512_v4
+80/410735/campos_512_v4
+80/410738/campos_512_v4
+80/410746/campos_512_v4
+80/410764/campos_512_v4
+80/410771/campos_512_v4
+80/410776/campos_512_v4
+80/410783/campos_512_v4
+80/410786/campos_512_v4
+80/410790/campos_512_v4
+80/410799/campos_512_v4
+80/410801/campos_512_v4
+80/410826/campos_512_v4
+80/410834/campos_512_v4
+80/410843/campos_512_v4
+80/410867/campos_512_v4
+80/410891/campos_512_v4
+80/410895/campos_512_v4
+80/410921/campos_512_v4
+80/410924/campos_512_v4
+80/410926/campos_512_v4
+80/410943/campos_512_v4
+80/410948/campos_512_v4
+80/410950/campos_512_v4
+80/410951/campos_512_v4
+80/410952/campos_512_v4
+80/410957/campos_512_v4
+80/410963/campos_512_v4
+80/410972/campos_512_v4
+80/410982/campos_512_v4
+80/410985/campos_512_v4
+80/410986/campos_512_v4
+80/410993/campos_512_v4
+80/411037/campos_512_v4
+80/411040/campos_512_v4
+80/411053/campos_512_v4
+80/411056/campos_512_v4
+80/411063/campos_512_v4
+80/411077/campos_512_v4
+80/411079/campos_512_v4
+80/411084/campos_512_v4
+80/411086/campos_512_v4
+80/411088/campos_512_v4
+80/411098/campos_512_v4
+80/411100/campos_512_v4
+80/411101/campos_512_v4
+80/411113/campos_512_v4
+80/411120/campos_512_v4
+80/411122/campos_512_v4
+80/411138/campos_512_v4
+80/411184/campos_512_v4
+80/411188/campos_512_v4
+80/411194/campos_512_v4
+80/411195/campos_512_v4
+80/411210/campos_512_v4
+80/411211/campos_512_v4
+80/411226/campos_512_v4
+80/411231/campos_512_v4
+80/411241/campos_512_v4
+80/411248/campos_512_v4
+80/411258/campos_512_v4
+80/411266/campos_512_v4
+80/411269/campos_512_v4
+80/411270/campos_512_v4
+80/411273/campos_512_v4
+80/411281/campos_512_v4
+80/411284/campos_512_v4
+80/411291/campos_512_v4
+80/411299/campos_512_v4
+80/411307/campos_512_v4
+80/411309/campos_512_v4
+80/411320/campos_512_v4
+80/411326/campos_512_v4
+80/411336/campos_512_v4
+80/411340/campos_512_v4
+80/411341/campos_512_v4
+80/411348/campos_512_v4
+80/411360/campos_512_v4
+80/411373/campos_512_v4
+80/411380/campos_512_v4
+80/411387/campos_512_v4
+80/411424/campos_512_v4
+80/411445/campos_512_v4
+80/411473/campos_512_v4
+80/411499/campos_512_v4
+80/411500/campos_512_v4
+80/411505/campos_512_v4
+80/411511/campos_512_v4
+80/411521/campos_512_v4
+80/411528/campos_512_v4
+80/411532/campos_512_v4
+80/411539/campos_512_v4
+80/411549/campos_512_v4
+80/411558/campos_512_v4
+80/411566/campos_512_v4
+80/411571/campos_512_v4
+80/411575/campos_512_v4
+80/411576/campos_512_v4
+80/411588/campos_512_v4
+80/411607/campos_512_v4
+80/411619/campos_512_v4
+80/411636/campos_512_v4
+80/411647/campos_512_v4
+80/411649/campos_512_v4
+80/411654/campos_512_v4
+80/411656/campos_512_v4
+80/411657/campos_512_v4
+80/411661/campos_512_v4
+80/411663/campos_512_v4
+80/411681/campos_512_v4
+80/411682/campos_512_v4
+80/411687/campos_512_v4
+80/411692/campos_512_v4
+80/411699/campos_512_v4
+80/411702/campos_512_v4
+80/411707/campos_512_v4
+80/411731/campos_512_v4
+80/411745/campos_512_v4
+80/411747/campos_512_v4
+80/411748/campos_512_v4
+80/411750/campos_512_v4
+80/411751/campos_512_v4
+80/411754/campos_512_v4
+80/411761/campos_512_v4
+80/411778/campos_512_v4
+80/411787/campos_512_v4
+80/411788/campos_512_v4
+80/411795/campos_512_v4
+80/411796/campos_512_v4
+80/411800/campos_512_v4
+80/411801/campos_512_v4
+80/411810/campos_512_v4
+80/411813/campos_512_v4
+80/411821/campos_512_v4
+80/411823/campos_512_v4
+80/411824/campos_512_v4
+80/411835/campos_512_v4
+80/411841/campos_512_v4
+80/411842/campos_512_v4
+80/411853/campos_512_v4
+80/411867/campos_512_v4
+80/411878/campos_512_v4
+80/411879/campos_512_v4
+80/411883/campos_512_v4
+80/411890/campos_512_v4
+80/411901/campos_512_v4
+80/411903/campos_512_v4
+80/411907/campos_512_v4
+80/411908/campos_512_v4
+80/411911/campos_512_v4
+80/411913/campos_512_v4
+80/411919/campos_512_v4
+80/411921/campos_512_v4
+80/411938/campos_512_v4
+80/411951/campos_512_v4
+80/411960/campos_512_v4
+80/411964/campos_512_v4
+80/411966/campos_512_v4
+80/411971/campos_512_v4
+80/411974/campos_512_v4
+80/411985/campos_512_v4
+80/412006/campos_512_v4
+80/412038/campos_512_v4
+80/412039/campos_512_v4
+80/412047/campos_512_v4
+80/412055/campos_512_v4
+80/412057/campos_512_v4
+80/412064/campos_512_v4
+80/412071/campos_512_v4
+80/412075/campos_512_v4
+80/412076/campos_512_v4
+80/412081/campos_512_v4
+80/412097/campos_512_v4
+80/412110/campos_512_v4
+80/412116/campos_512_v4
+80/412122/campos_512_v4
+80/412123/campos_512_v4
+80/412126/campos_512_v4
+80/412140/campos_512_v4
+80/412143/campos_512_v4
+80/412144/campos_512_v4
+80/412155/campos_512_v4
+80/412162/campos_512_v4
+80/412169/campos_512_v4
+80/412190/campos_512_v4
+80/412192/campos_512_v4
+80/412197/campos_512_v4
+80/412204/campos_512_v4
+80/412209/campos_512_v4
+80/412217/campos_512_v4
+80/412241/campos_512_v4
+80/412242/campos_512_v4
+80/412246/campos_512_v4
+80/412254/campos_512_v4
+80/412261/campos_512_v4
+80/412267/campos_512_v4
+80/412284/campos_512_v4
+80/412289/campos_512_v4
+80/412293/campos_512_v4
+80/412296/campos_512_v4
+80/412301/campos_512_v4
+80/412304/campos_512_v4
+80/412308/campos_512_v4
+80/412329/campos_512_v4
+80/412344/campos_512_v4
+80/412345/campos_512_v4
+80/412348/campos_512_v4
+80/412354/campos_512_v4
+80/412359/campos_512_v4
+80/412385/campos_512_v4
+80/412390/campos_512_v4
+80/412400/campos_512_v4
+80/412408/campos_512_v4
+80/412413/campos_512_v4
+80/412418/campos_512_v4
+80/412422/campos_512_v4
+80/412435/campos_512_v4
+80/412436/campos_512_v4
+80/412439/campos_512_v4
+80/412480/campos_512_v4
+80/412488/campos_512_v4
+80/412498/campos_512_v4
+80/412502/campos_512_v4
+80/412519/campos_512_v4
+80/412523/campos_512_v4
+80/412530/campos_512_v4
+80/412531/campos_512_v4
+80/412541/campos_512_v4
+80/412543/campos_512_v4
+80/412544/campos_512_v4
+80/412581/campos_512_v4
+80/412585/campos_512_v4
+80/412592/campos_512_v4
+80/412602/campos_512_v4
+80/412617/campos_512_v4
+80/412625/campos_512_v4
+80/412627/campos_512_v4
+80/412632/campos_512_v4
+80/412648/campos_512_v4
+80/412653/campos_512_v4
+80/412658/campos_512_v4
+80/412660/campos_512_v4
+80/412669/campos_512_v4
+80/412671/campos_512_v4
+80/412683/campos_512_v4
+80/412685/campos_512_v4
+80/412691/campos_512_v4
+80/412693/campos_512_v4
+80/412696/campos_512_v4
+80/412703/campos_512_v4
+80/412706/campos_512_v4
+80/412708/campos_512_v4
+80/412711/campos_512_v4
+80/412712/campos_512_v4
+80/412718/campos_512_v4
+80/412719/campos_512_v4
+80/412740/campos_512_v4
+80/412742/campos_512_v4
+80/412745/campos_512_v4
+80/412760/campos_512_v4
+80/412767/campos_512_v4
+80/412769/campos_512_v4
+80/412770/campos_512_v4
+80/412779/campos_512_v4
+80/412787/campos_512_v4
+80/412788/campos_512_v4
+80/412790/campos_512_v4
+80/412801/campos_512_v4
+80/412802/campos_512_v4
+80/412811/campos_512_v4
+80/412812/campos_512_v4
+80/412813/campos_512_v4
+80/412814/campos_512_v4
+80/412825/campos_512_v4
+80/412826/campos_512_v4
+80/412827/campos_512_v4
+80/412838/campos_512_v4
+80/412849/campos_512_v4
+80/412860/campos_512_v4
+80/412865/campos_512_v4
+80/412870/campos_512_v4
+80/412874/campos_512_v4
+80/412883/campos_512_v4
+80/412890/campos_512_v4
+80/412897/campos_512_v4
+80/412914/campos_512_v4
+80/412932/campos_512_v4
+80/412939/campos_512_v4
+80/412964/campos_512_v4
+80/412991/campos_512_v4
+80/413017/campos_512_v4
+80/413023/campos_512_v4
+80/413038/campos_512_v4
+80/413044/campos_512_v4
+80/413055/campos_512_v4
+80/413059/campos_512_v4
+80/413063/campos_512_v4
+80/413077/campos_512_v4
+80/413080/campos_512_v4
+80/413090/campos_512_v4
+80/413091/campos_512_v4
+80/413098/campos_512_v4
+80/413113/campos_512_v4
+80/413124/campos_512_v4
+80/413137/campos_512_v4
+80/413149/campos_512_v4
+80/413152/campos_512_v4
+80/413153/campos_512_v4
+80/413156/campos_512_v4
+80/413163/campos_512_v4
+80/413187/campos_512_v4
+80/413195/campos_512_v4
+80/413207/campos_512_v4
+80/413211/campos_512_v4
+80/413215/campos_512_v4
+80/413224/campos_512_v4
+80/413244/campos_512_v4
+80/413259/campos_512_v4
+80/413265/campos_512_v4
+80/413273/campos_512_v4
+80/413281/campos_512_v4
+80/413282/campos_512_v4
+80/413286/campos_512_v4
+80/413292/campos_512_v4
+80/413320/campos_512_v4
+80/413328/campos_512_v4
+80/413345/campos_512_v4
+80/413353/campos_512_v4
+80/413371/campos_512_v4
+80/413383/campos_512_v4
+80/413394/campos_512_v4
+80/413403/campos_512_v4
+80/413413/campos_512_v4
+80/413415/campos_512_v4
+80/413416/campos_512_v4
+80/413417/campos_512_v4
+80/413436/campos_512_v4
+80/413449/campos_512_v4
+80/413459/campos_512_v4
+80/413461/campos_512_v4
+80/413463/campos_512_v4
+80/413479/campos_512_v4
+80/413485/campos_512_v4
+80/413500/campos_512_v4
+80/413511/campos_512_v4
+80/413517/campos_512_v4
+80/413521/campos_512_v4
+80/413523/campos_512_v4
+80/413525/campos_512_v4
+80/413532/campos_512_v4
+80/413544/campos_512_v4
+80/413546/campos_512_v4
+80/413547/campos_512_v4
+80/413559/campos_512_v4
+80/413564/campos_512_v4
+80/413569/campos_512_v4
+80/413570/campos_512_v4
+80/413571/campos_512_v4
+80/413582/campos_512_v4
+80/413585/campos_512_v4
+80/413587/campos_512_v4
+80/413617/campos_512_v4
+80/413621/campos_512_v4
+80/413623/campos_512_v4
+80/413630/campos_512_v4
+80/413631/campos_512_v4
+80/413659/campos_512_v4
+80/413673/campos_512_v4
+80/413681/campos_512_v4
+80/413690/campos_512_v4
+80/413709/campos_512_v4
+80/413711/campos_512_v4
+80/413717/campos_512_v4
+80/413720/campos_512_v4
+80/413723/campos_512_v4
+80/413737/campos_512_v4
+80/413739/campos_512_v4
+80/413753/campos_512_v4
+80/413761/campos_512_v4
+80/413764/campos_512_v4
+80/413771/campos_512_v4
+80/413777/campos_512_v4
+80/413784/campos_512_v4
+80/413791/campos_512_v4
+80/413793/campos_512_v4
+80/413804/campos_512_v4
+80/413807/campos_512_v4
+80/413811/campos_512_v4
+80/413828/campos_512_v4
+80/413843/campos_512_v4
+80/413852/campos_512_v4
+80/413853/campos_512_v4
+80/413864/campos_512_v4
+80/413868/campos_512_v4
+80/413872/campos_512_v4
+80/413883/campos_512_v4
+80/413896/campos_512_v4
+80/413900/campos_512_v4
+80/413903/campos_512_v4
+80/413904/campos_512_v4
+80/413924/campos_512_v4
+80/413928/campos_512_v4
+80/413938/campos_512_v4
+80/413942/campos_512_v4
+80/413943/campos_512_v4
+80/413967/campos_512_v4
+80/413973/campos_512_v4
+80/413975/campos_512_v4
+80/413980/campos_512_v4
+80/413998/campos_512_v4
+80/414018/campos_512_v4
+80/414024/campos_512_v4
+80/414028/campos_512_v4
+80/414038/campos_512_v4
+80/414041/campos_512_v4
+80/414042/campos_512_v4
+80/414043/campos_512_v4
+80/414044/campos_512_v4
+80/414048/campos_512_v4
+80/414059/campos_512_v4
+80/414061/campos_512_v4
+80/414067/campos_512_v4
+80/414070/campos_512_v4
+80/414085/campos_512_v4
+80/414089/campos_512_v4
+80/414111/campos_512_v4
+80/414112/campos_512_v4
+80/414126/campos_512_v4
+80/414137/campos_512_v4
+80/414142/campos_512_v4
+80/414147/campos_512_v4
+80/414159/campos_512_v4
+80/414164/campos_512_v4
+80/414180/campos_512_v4
+80/414184/campos_512_v4
+80/414190/campos_512_v4
+80/414192/campos_512_v4
+80/414200/campos_512_v4
+80/414202/campos_512_v4
+80/414209/campos_512_v4
+80/414218/campos_512_v4
+80/414230/campos_512_v4
+80/414249/campos_512_v4
+80/414252/campos_512_v4
+80/414260/campos_512_v4
+80/414261/campos_512_v4
+80/414279/campos_512_v4
+80/414288/campos_512_v4
+80/414297/campos_512_v4
+80/414300/campos_512_v4
+80/414302/campos_512_v4
+80/414330/campos_512_v4
+80/414345/campos_512_v4
+80/414351/campos_512_v4
+80/414356/campos_512_v4
+80/414365/campos_512_v4
+80/414374/campos_512_v4
+80/414375/campos_512_v4
+80/414376/campos_512_v4
+80/414378/campos_512_v4
+80/414379/campos_512_v4
+80/414389/campos_512_v4
+80/414394/campos_512_v4
+80/414395/campos_512_v4
+80/414402/campos_512_v4
+80/414415/campos_512_v4
+80/414422/campos_512_v4
+80/414426/campos_512_v4
+80/414431/campos_512_v4
+80/414433/campos_512_v4
+80/414447/campos_512_v4
+80/414458/campos_512_v4
+80/414468/campos_512_v4
+80/414476/campos_512_v4
+80/414481/campos_512_v4
+80/414505/campos_512_v4
+80/414511/campos_512_v4
+80/414513/campos_512_v4
+80/414515/campos_512_v4
+80/414516/campos_512_v4
+80/414540/campos_512_v4
+80/414541/campos_512_v4
+80/414542/campos_512_v4
+80/414544/campos_512_v4
+80/414548/campos_512_v4
+80/414549/campos_512_v4
+80/414578/campos_512_v4
+80/414586/campos_512_v4
+80/414589/campos_512_v4
+80/414591/campos_512_v4
+80/414601/campos_512_v4
+80/414603/campos_512_v4
+80/414605/campos_512_v4
+80/414618/campos_512_v4
+80/414633/campos_512_v4
+80/414648/campos_512_v4
+80/414651/campos_512_v4
+80/414656/campos_512_v4
+80/414665/campos_512_v4
+80/414680/campos_512_v4
+80/414681/campos_512_v4
+80/414683/campos_512_v4
+80/414686/campos_512_v4
+80/414714/campos_512_v4
+80/414717/campos_512_v4
+80/414727/campos_512_v4
+80/414749/campos_512_v4
+80/414752/campos_512_v4
+80/414760/campos_512_v4
+80/414767/campos_512_v4
+80/414769/campos_512_v4
+80/414770/campos_512_v4
+80/414772/campos_512_v4
+80/414776/campos_512_v4
+80/414777/campos_512_v4
+80/414792/campos_512_v4
+80/414796/campos_512_v4
+80/414800/campos_512_v4
+80/414814/campos_512_v4
+80/414838/campos_512_v4
+80/414840/campos_512_v4
+80/414855/campos_512_v4
+80/414860/campos_512_v4
+80/414868/campos_512_v4
+80/414869/campos_512_v4
+80/414878/campos_512_v4
+80/414879/campos_512_v4
+80/414885/campos_512_v4
+80/414890/campos_512_v4
+80/414891/campos_512_v4
+80/414894/campos_512_v4
+80/414902/campos_512_v4
+80/414918/campos_512_v4
+80/414921/campos_512_v4
+80/414933/campos_512_v4
+80/414938/campos_512_v4
+80/414940/campos_512_v4
+80/414945/campos_512_v4
+80/414946/campos_512_v4
+80/414950/campos_512_v4
+80/414980/campos_512_v4
+80/414983/campos_512_v4
+80/414989/campos_512_v4
+80/414990/campos_512_v4
+80/414993/campos_512_v4
+80/414996/campos_512_v4
+81/415002/campos_512_v4
+81/415007/campos_512_v4
+81/415028/campos_512_v4
+81/415030/campos_512_v4
+81/415038/campos_512_v4
+81/415054/campos_512_v4
+81/415056/campos_512_v4
+81/415068/campos_512_v4
+81/415082/campos_512_v4
+81/415096/campos_512_v4
+81/415115/campos_512_v4
+81/415116/campos_512_v4
+81/415118/campos_512_v4
+81/415129/campos_512_v4
+81/415130/campos_512_v4
+81/415141/campos_512_v4
+81/415150/campos_512_v4
+81/415159/campos_512_v4
+81/415165/campos_512_v4
+81/415170/campos_512_v4
+81/415181/campos_512_v4
+81/415189/campos_512_v4
+81/415191/campos_512_v4
+81/415203/campos_512_v4
+81/415209/campos_512_v4
+81/415222/campos_512_v4
+81/415224/campos_512_v4
+81/415229/campos_512_v4
+81/415232/campos_512_v4
+81/415243/campos_512_v4
+81/415246/campos_512_v4
+81/415261/campos_512_v4
+81/415263/campos_512_v4
+81/415270/campos_512_v4
+81/415278/campos_512_v4
+81/415287/campos_512_v4
+81/415298/campos_512_v4
+81/415301/campos_512_v4
+81/415304/campos_512_v4
+81/415309/campos_512_v4
+81/415313/campos_512_v4
+81/415314/campos_512_v4
+81/415316/campos_512_v4
+81/415319/campos_512_v4
+81/415334/campos_512_v4
+81/415338/campos_512_v4
+81/415349/campos_512_v4
+81/415374/campos_512_v4
+81/415379/campos_512_v4
+81/415396/campos_512_v4
+81/415398/campos_512_v4
+81/415409/campos_512_v4
+81/415415/campos_512_v4
+81/415425/campos_512_v4
+81/415430/campos_512_v4
+81/415434/campos_512_v4
+81/415449/campos_512_v4
+81/415463/campos_512_v4
+81/415474/campos_512_v4
+81/415493/campos_512_v4
+81/415496/campos_512_v4
+81/415497/campos_512_v4
+81/415499/campos_512_v4
+81/415508/campos_512_v4
+81/415517/campos_512_v4
+81/415518/campos_512_v4
+81/415525/campos_512_v4
+81/415526/campos_512_v4
+81/415529/campos_512_v4
+81/415534/campos_512_v4
+81/415550/campos_512_v4
+81/415551/campos_512_v4
+81/415555/campos_512_v4
+81/415558/campos_512_v4
+81/415562/campos_512_v4
+81/415566/campos_512_v4
+81/415600/campos_512_v4
+81/415602/campos_512_v4
+81/415603/campos_512_v4
+81/415604/campos_512_v4
+81/415605/campos_512_v4
+81/415607/campos_512_v4
+81/415620/campos_512_v4
+81/415628/campos_512_v4
+81/415634/campos_512_v4
+81/415657/campos_512_v4
+81/415661/campos_512_v4
+81/415663/campos_512_v4
+81/415669/campos_512_v4
+81/415672/campos_512_v4
+81/415679/campos_512_v4
+81/415684/campos_512_v4
+81/415692/campos_512_v4
+81/415729/campos_512_v4
+81/415740/campos_512_v4
+81/415741/campos_512_v4
+81/415765/campos_512_v4
+81/415772/campos_512_v4
+81/415773/campos_512_v4
+81/415776/campos_512_v4
+81/415783/campos_512_v4
+81/415811/campos_512_v4
+81/415822/campos_512_v4
+81/415831/campos_512_v4
+81/415837/campos_512_v4
+81/415838/campos_512_v4
+81/415849/campos_512_v4
+81/415863/campos_512_v4
+81/415865/campos_512_v4
+81/415876/campos_512_v4
+81/415882/campos_512_v4
+81/415890/campos_512_v4
+81/415908/campos_512_v4
+81/415915/campos_512_v4
+81/415939/campos_512_v4
+81/415954/campos_512_v4
+81/415955/campos_512_v4
+81/415967/campos_512_v4
+81/415973/campos_512_v4
+81/415978/campos_512_v4
+81/415981/campos_512_v4
+81/415983/campos_512_v4
+81/415984/campos_512_v4
+81/415985/campos_512_v4
+81/415988/campos_512_v4
+81/415989/campos_512_v4
+81/415998/campos_512_v4
+81/416011/campos_512_v4
+81/416012/campos_512_v4
+81/416017/campos_512_v4
+81/416020/campos_512_v4
+81/416024/campos_512_v4
+81/416067/campos_512_v4
+81/416069/campos_512_v4
+81/416073/campos_512_v4
+81/416074/campos_512_v4
+81/416081/campos_512_v4
+81/416087/campos_512_v4
+81/416088/campos_512_v4
+81/416097/campos_512_v4
+81/416099/campos_512_v4
+81/416101/campos_512_v4
+81/416107/campos_512_v4
+81/416109/campos_512_v4
+81/416110/campos_512_v4
+81/416116/campos_512_v4
+81/416125/campos_512_v4
+81/416137/campos_512_v4
+81/416141/campos_512_v4
+81/416151/campos_512_v4
+81/416152/campos_512_v4
+81/416155/campos_512_v4
+81/416156/campos_512_v4
+81/416157/campos_512_v4
+81/416167/campos_512_v4
+81/416168/campos_512_v4
+81/416180/campos_512_v4
+81/416181/campos_512_v4
+81/416184/campos_512_v4
+81/416190/campos_512_v4
+81/416197/campos_512_v4
+81/416209/campos_512_v4
+81/416212/campos_512_v4
+81/416223/campos_512_v4
+81/416224/campos_512_v4
+81/416239/campos_512_v4
+81/416250/campos_512_v4
+81/416260/campos_512_v4
+81/416262/campos_512_v4
+81/416263/campos_512_v4
+81/416267/campos_512_v4
+81/416268/campos_512_v4
+81/416273/campos_512_v4
+81/416275/campos_512_v4
+81/416278/campos_512_v4
+81/416289/campos_512_v4
+81/416303/campos_512_v4
+81/416307/campos_512_v4
+81/416317/campos_512_v4
+81/416318/campos_512_v4
+81/416326/campos_512_v4
+81/416328/campos_512_v4
+81/416331/campos_512_v4
+81/416339/campos_512_v4
+81/416345/campos_512_v4
+81/416363/campos_512_v4
+81/416365/campos_512_v4
+81/416369/campos_512_v4
+81/416375/campos_512_v4
+81/416396/campos_512_v4
+81/416402/campos_512_v4
+81/416425/campos_512_v4
+81/416426/campos_512_v4
+81/416442/campos_512_v4
+81/416449/campos_512_v4
+81/416473/campos_512_v4
+81/416478/campos_512_v4
+81/416479/campos_512_v4
+81/416482/campos_512_v4
+81/416483/campos_512_v4
+81/416489/campos_512_v4
+81/416490/campos_512_v4
+81/416500/campos_512_v4
+81/416505/campos_512_v4
+81/416535/campos_512_v4
+81/416547/campos_512_v4
+81/416556/campos_512_v4
+81/416558/campos_512_v4
+81/416560/campos_512_v4
+81/416561/campos_512_v4
+81/416564/campos_512_v4
+81/416578/campos_512_v4
+81/416580/campos_512_v4
+81/416582/campos_512_v4
+81/416598/campos_512_v4
+81/416608/campos_512_v4
+81/416611/campos_512_v4
+81/416612/campos_512_v4
+81/416614/campos_512_v4
+81/416615/campos_512_v4
+81/416617/campos_512_v4
+81/416621/campos_512_v4
+81/416632/campos_512_v4
+81/416638/campos_512_v4
+81/416660/campos_512_v4
+81/416666/campos_512_v4
+81/416668/campos_512_v4
+81/416676/campos_512_v4
+81/416678/campos_512_v4
+81/416679/campos_512_v4
+81/416681/campos_512_v4
+81/416683/campos_512_v4
+81/416688/campos_512_v4
+81/416689/campos_512_v4
+81/416690/campos_512_v4
+81/416696/campos_512_v4
+81/416703/campos_512_v4
+81/416706/campos_512_v4
+81/416713/campos_512_v4
+81/416721/campos_512_v4
+81/416723/campos_512_v4
+81/416729/campos_512_v4
+81/416740/campos_512_v4
+81/416757/campos_512_v4
+81/416762/campos_512_v4
+81/416774/campos_512_v4
+81/416780/campos_512_v4
+81/416784/campos_512_v4
+81/416792/campos_512_v4
+81/416799/campos_512_v4
+81/416803/campos_512_v4
+81/416815/campos_512_v4
+81/416821/campos_512_v4
+81/416831/campos_512_v4
+81/416833/campos_512_v4
+81/416842/campos_512_v4
+81/416847/campos_512_v4
+81/416855/campos_512_v4
+81/416856/campos_512_v4
+81/416857/campos_512_v4
+81/416859/campos_512_v4
+81/416862/campos_512_v4
+81/416865/campos_512_v4
+81/416868/campos_512_v4
+81/416870/campos_512_v4
+81/416871/campos_512_v4
+81/416879/campos_512_v4
+81/416880/campos_512_v4
+81/416882/campos_512_v4
+81/416898/campos_512_v4
+81/416899/campos_512_v4
+81/416911/campos_512_v4
+81/416913/campos_512_v4
+81/416921/campos_512_v4
+81/416927/campos_512_v4
+81/416928/campos_512_v4
+81/416938/campos_512_v4
+81/416946/campos_512_v4
+81/416954/campos_512_v4
+81/416965/campos_512_v4
+81/416966/campos_512_v4
+81/416969/campos_512_v4
+81/416978/campos_512_v4
+81/416979/campos_512_v4
+81/416983/campos_512_v4
+81/416989/campos_512_v4
+81/416999/campos_512_v4
+81/417000/campos_512_v4
+81/417004/campos_512_v4
+81/417018/campos_512_v4
+81/417029/campos_512_v4
+81/417031/campos_512_v4
+81/417033/campos_512_v4
+81/417037/campos_512_v4
+81/417048/campos_512_v4
+81/417061/campos_512_v4
+81/417062/campos_512_v4
+81/417066/campos_512_v4
+81/417075/campos_512_v4
+81/417079/campos_512_v4
+81/417082/campos_512_v4
+81/417086/campos_512_v4
+81/417101/campos_512_v4
+81/417108/campos_512_v4
+81/417113/campos_512_v4
+81/417114/campos_512_v4
+81/417147/campos_512_v4
+81/417159/campos_512_v4
+81/417161/campos_512_v4
+81/417166/campos_512_v4
+81/417168/campos_512_v4
+81/417173/campos_512_v4
+81/417174/campos_512_v4
+81/417183/campos_512_v4
+81/417184/campos_512_v4
+81/417186/campos_512_v4
+81/417193/campos_512_v4
+81/417200/campos_512_v4
+81/417216/campos_512_v4
+81/417220/campos_512_v4
+81/417231/campos_512_v4
+81/417232/campos_512_v4
+81/417239/campos_512_v4
+81/417243/campos_512_v4
+81/417263/campos_512_v4
+81/417272/campos_512_v4
+81/417276/campos_512_v4
+81/417290/campos_512_v4
+81/417293/campos_512_v4
+81/417302/campos_512_v4
+81/417321/campos_512_v4
+81/417339/campos_512_v4
+81/417347/campos_512_v4
+81/417349/campos_512_v4
+81/417362/campos_512_v4
+81/417372/campos_512_v4
+81/417374/campos_512_v4
+81/417376/campos_512_v4
+81/417399/campos_512_v4
+81/417400/campos_512_v4
+81/417409/campos_512_v4
+81/417410/campos_512_v4
+81/417411/campos_512_v4
+81/417420/campos_512_v4
+81/417428/campos_512_v4
+81/417431/campos_512_v4
+81/417432/campos_512_v4
+81/417438/campos_512_v4
+81/417446/campos_512_v4
+81/417448/campos_512_v4
+81/417450/campos_512_v4
+81/417452/campos_512_v4
+81/417475/campos_512_v4
+81/417496/campos_512_v4
+81/417505/campos_512_v4
+81/417517/campos_512_v4
+81/417519/campos_512_v4
+81/417520/campos_512_v4
+81/417521/campos_512_v4
+81/417525/campos_512_v4
+81/417529/campos_512_v4
+81/417530/campos_512_v4
+81/417534/campos_512_v4
+81/417536/campos_512_v4
+81/417538/campos_512_v4
+81/417557/campos_512_v4
+81/417559/campos_512_v4
+81/417560/campos_512_v4
+81/417564/campos_512_v4
+81/417570/campos_512_v4
+81/417580/campos_512_v4
+81/417583/campos_512_v4
+81/417585/campos_512_v4
+81/417594/campos_512_v4
+81/417599/campos_512_v4
+81/417606/campos_512_v4
+81/417623/campos_512_v4
+81/417629/campos_512_v4
+81/417630/campos_512_v4
+81/417634/campos_512_v4
+81/417644/campos_512_v4
+81/417661/campos_512_v4
+81/417663/campos_512_v4
+81/417666/campos_512_v4
+81/417667/campos_512_v4
+81/417669/campos_512_v4
+81/417675/campos_512_v4
+81/417690/campos_512_v4
+81/417693/campos_512_v4
+81/417695/campos_512_v4
+81/417723/campos_512_v4
+81/417724/campos_512_v4
+81/417726/campos_512_v4
+81/417735/campos_512_v4
+81/417743/campos_512_v4
+81/417746/campos_512_v4
+81/417759/campos_512_v4
+81/417781/campos_512_v4
+81/417782/campos_512_v4
+81/417784/campos_512_v4
+81/417789/campos_512_v4
+81/417813/campos_512_v4
+81/417817/campos_512_v4
+81/417837/campos_512_v4
+81/417845/campos_512_v4
+81/417851/campos_512_v4
+81/417868/campos_512_v4
+81/417875/campos_512_v4
+81/417886/campos_512_v4
+81/417892/campos_512_v4
+81/417907/campos_512_v4
+81/417917/campos_512_v4
+81/417925/campos_512_v4
+81/417931/campos_512_v4
+81/417953/campos_512_v4
+81/417956/campos_512_v4
+81/417962/campos_512_v4
+81/417966/campos_512_v4
+81/417973/campos_512_v4
+81/417987/campos_512_v4
+81/417988/campos_512_v4
+81/417992/campos_512_v4
+81/417995/campos_512_v4
+81/418005/campos_512_v4
+81/418016/campos_512_v4
+81/418029/campos_512_v4
+81/418038/campos_512_v4
+81/418060/campos_512_v4
+81/418061/campos_512_v4
+81/418064/campos_512_v4
+81/418075/campos_512_v4
+81/418078/campos_512_v4
+81/418079/campos_512_v4
+81/418081/campos_512_v4
+81/418098/campos_512_v4
+81/418099/campos_512_v4
+81/418102/campos_512_v4
+81/418105/campos_512_v4
+81/418107/campos_512_v4
+81/418112/campos_512_v4
+81/418113/campos_512_v4
+81/418115/campos_512_v4
+81/418116/campos_512_v4
+81/418118/campos_512_v4
+81/418128/campos_512_v4
+81/418137/campos_512_v4
+81/418150/campos_512_v4
+81/418153/campos_512_v4
+81/418156/campos_512_v4
+81/418161/campos_512_v4
+81/418167/campos_512_v4
+81/418170/campos_512_v4
+81/418180/campos_512_v4
+81/418199/campos_512_v4
+81/418203/campos_512_v4
+81/418207/campos_512_v4
+81/418228/campos_512_v4
+81/418229/campos_512_v4
+81/418237/campos_512_v4
+81/418243/campos_512_v4
+81/418246/campos_512_v4
+81/418255/campos_512_v4
+81/418267/campos_512_v4
+81/418272/campos_512_v4
+81/418273/campos_512_v4
+81/418276/campos_512_v4
+81/418298/campos_512_v4
+81/418299/campos_512_v4
+81/418302/campos_512_v4
+81/418317/campos_512_v4
+81/418321/campos_512_v4
+81/418336/campos_512_v4
+81/418338/campos_512_v4
+81/418342/campos_512_v4
+81/418343/campos_512_v4
+81/418351/campos_512_v4
+81/418362/campos_512_v4
+81/418377/campos_512_v4
+81/418395/campos_512_v4
+81/418422/campos_512_v4
+81/418423/campos_512_v4
+81/418426/campos_512_v4
+81/418445/campos_512_v4
+81/418450/campos_512_v4
+81/418453/campos_512_v4
+81/418454/campos_512_v4
+81/418457/campos_512_v4
+81/418464/campos_512_v4
+81/418467/campos_512_v4
+81/418484/campos_512_v4
+81/418489/campos_512_v4
+81/418497/campos_512_v4
+81/418504/campos_512_v4
+81/418511/campos_512_v4
+81/418531/campos_512_v4
+81/418536/campos_512_v4
+81/418537/campos_512_v4
+81/418552/campos_512_v4
+81/418556/campos_512_v4
+81/418562/campos_512_v4
+81/418567/campos_512_v4
+81/418571/campos_512_v4
+81/418603/campos_512_v4
+81/418604/campos_512_v4
+81/418619/campos_512_v4
+81/418620/campos_512_v4
+81/418631/campos_512_v4
+81/418633/campos_512_v4
+81/418641/campos_512_v4
+81/418647/campos_512_v4
+81/418650/campos_512_v4
+81/418661/campos_512_v4
+81/418665/campos_512_v4
+81/418677/campos_512_v4
+81/418686/campos_512_v4
+81/418687/campos_512_v4
+81/418691/campos_512_v4
+81/418700/campos_512_v4
+81/418703/campos_512_v4
+81/418705/campos_512_v4
+81/418708/campos_512_v4
+81/418712/campos_512_v4
+81/418715/campos_512_v4
+81/418718/campos_512_v4
+81/418727/campos_512_v4
+81/418733/campos_512_v4
+81/419007/campos_512_v4
+81/419010/campos_512_v4
+81/419013/campos_512_v4
+81/419023/campos_512_v4
+81/419028/campos_512_v4
+81/419032/campos_512_v4
+81/419042/campos_512_v4
+81/419048/campos_512_v4
+81/419050/campos_512_v4
+81/419066/campos_512_v4
+81/419079/campos_512_v4
+81/419103/campos_512_v4
+81/419104/campos_512_v4
+81/419110/campos_512_v4
+81/419112/campos_512_v4
+81/419127/campos_512_v4
+81/419131/campos_512_v4
+81/419136/campos_512_v4
+81/419145/campos_512_v4
+81/419152/campos_512_v4
+81/419156/campos_512_v4
+81/419161/campos_512_v4
+81/419164/campos_512_v4
+81/419168/campos_512_v4
+81/419177/campos_512_v4
+81/419181/campos_512_v4
+81/419195/campos_512_v4
+81/419201/campos_512_v4
+81/419211/campos_512_v4
+81/419213/campos_512_v4
+81/419216/campos_512_v4
+81/419219/campos_512_v4
+81/419231/campos_512_v4
+81/419242/campos_512_v4
+81/419252/campos_512_v4
+81/419259/campos_512_v4
+81/419260/campos_512_v4
+81/419268/campos_512_v4
+81/419284/campos_512_v4
+81/419295/campos_512_v4
+81/419302/campos_512_v4
+81/419312/campos_512_v4
+81/419322/campos_512_v4
+81/419328/campos_512_v4
+81/419329/campos_512_v4
+81/419345/campos_512_v4
+81/419346/campos_512_v4
+81/419352/campos_512_v4
+81/419373/campos_512_v4
+81/419408/campos_512_v4
+81/419416/campos_512_v4
+81/419421/campos_512_v4
+81/419438/campos_512_v4
+81/419456/campos_512_v4
+81/419458/campos_512_v4
+81/419459/campos_512_v4
+81/419462/campos_512_v4
+81/419465/campos_512_v4
+81/419476/campos_512_v4
+81/419480/campos_512_v4
+81/419492/campos_512_v4
+81/419528/campos_512_v4
+81/419540/campos_512_v4
+81/419549/campos_512_v4
+81/419553/campos_512_v4
+81/419568/campos_512_v4
+81/419583/campos_512_v4
+81/419584/campos_512_v4
+81/419586/campos_512_v4
+81/419589/campos_512_v4
+81/419596/campos_512_v4
+81/419600/campos_512_v4
+81/419616/campos_512_v4
+81/419621/campos_512_v4
+81/419622/campos_512_v4
+81/419656/campos_512_v4
+81/419667/campos_512_v4
+81/419674/campos_512_v4
+81/419683/campos_512_v4
+81/419691/campos_512_v4
+81/419697/campos_512_v4
+81/419699/campos_512_v4
+81/419706/campos_512_v4
+81/419714/campos_512_v4
+81/419723/campos_512_v4
+81/419730/campos_512_v4
+81/419738/campos_512_v4
+81/419770/campos_512_v4
+81/419771/campos_512_v4
+81/419776/campos_512_v4
+81/419790/campos_512_v4
+81/419793/campos_512_v4
+81/419795/campos_512_v4
+81/419801/campos_512_v4
+81/419810/campos_512_v4
+81/419814/campos_512_v4
+81/419816/campos_512_v4
+81/419818/campos_512_v4
+81/419828/campos_512_v4
+81/419834/campos_512_v4
+81/419836/campos_512_v4
+81/419837/campos_512_v4
+81/419840/campos_512_v4
+81/419844/campos_512_v4
+81/419847/campos_512_v4
+81/419848/campos_512_v4
+81/419850/campos_512_v4
+81/419851/campos_512_v4
+81/419863/campos_512_v4
+81/419867/campos_512_v4
+81/419873/campos_512_v4
+81/419881/campos_512_v4
+81/419920/campos_512_v4
+81/419923/campos_512_v4
+81/419934/campos_512_v4
+81/419953/campos_512_v4
+81/419954/campos_512_v4
+81/419967/campos_512_v4
+81/419972/campos_512_v4
+81/419987/campos_512_v4
+81/419988/campos_512_v4
+81/420000/campos_512_v4
+82/420006/campos_512_v4
+82/420009/campos_512_v4
+82/420013/campos_512_v4
+82/420020/campos_512_v4
+82/420059/campos_512_v4
+82/420061/campos_512_v4
+82/420065/campos_512_v4
+82/420068/campos_512_v4
+82/420071/campos_512_v4
+82/420074/campos_512_v4
+82/420075/campos_512_v4
+82/420087/campos_512_v4
+82/420088/campos_512_v4
+82/420096/campos_512_v4
+82/420103/campos_512_v4
+82/420125/campos_512_v4
+82/420135/campos_512_v4
+82/420138/campos_512_v4
+82/420149/campos_512_v4
+82/420160/campos_512_v4
+82/420165/campos_512_v4
+82/420166/campos_512_v4
+82/420169/campos_512_v4
+82/420176/campos_512_v4
+82/420184/campos_512_v4
+82/420193/campos_512_v4
+82/420195/campos_512_v4
+82/420206/campos_512_v4
+82/420231/campos_512_v4
+82/420244/campos_512_v4
+82/420246/campos_512_v4
+82/420255/campos_512_v4
+82/420264/campos_512_v4
+82/420272/campos_512_v4
+82/420280/campos_512_v4
+82/420281/campos_512_v4
+82/420284/campos_512_v4
+82/420290/campos_512_v4
+82/420291/campos_512_v4
+82/420292/campos_512_v4
+82/420293/campos_512_v4
+82/420295/campos_512_v4
+82/420298/campos_512_v4
+82/420310/campos_512_v4
+82/420312/campos_512_v4
+82/420315/campos_512_v4
+82/420317/campos_512_v4
+82/420318/campos_512_v4
+82/420327/campos_512_v4
+82/420333/campos_512_v4
+82/420338/campos_512_v4
+82/420340/campos_512_v4
+82/420350/campos_512_v4
+82/420360/campos_512_v4
+82/420370/campos_512_v4
+82/420376/campos_512_v4
+82/420377/campos_512_v4
+82/420382/campos_512_v4
+82/420388/campos_512_v4
+82/420393/campos_512_v4
+82/420399/campos_512_v4
+82/420410/campos_512_v4
+82/420423/campos_512_v4
+82/420434/campos_512_v4
+82/420441/campos_512_v4
+82/420445/campos_512_v4
+82/420449/campos_512_v4
+82/420450/campos_512_v4
+82/420456/campos_512_v4
+82/420464/campos_512_v4
+82/420470/campos_512_v4
+82/420477/campos_512_v4
+82/420484/campos_512_v4
+82/420503/campos_512_v4
+82/420505/campos_512_v4
+82/420508/campos_512_v4
+82/420512/campos_512_v4
+82/420520/campos_512_v4
+82/420524/campos_512_v4
+82/420529/campos_512_v4
+82/420545/campos_512_v4
+82/420553/campos_512_v4
+82/420561/campos_512_v4
+82/420562/campos_512_v4
+82/420566/campos_512_v4
+82/420567/campos_512_v4
+82/420571/campos_512_v4
+82/420573/campos_512_v4
+82/420600/campos_512_v4
+82/420604/campos_512_v4
+82/420608/campos_512_v4
+82/420616/campos_512_v4
+82/420619/campos_512_v4
+82/420626/campos_512_v4
+82/420628/campos_512_v4
+82/420630/campos_512_v4
+82/420635/campos_512_v4
+82/420641/campos_512_v4
+82/420652/campos_512_v4
+82/420656/campos_512_v4
+82/420658/campos_512_v4
+82/420664/campos_512_v4
+82/420668/campos_512_v4
+82/420670/campos_512_v4
+82/420673/campos_512_v4
+82/420685/campos_512_v4
+82/420692/campos_512_v4
+82/420693/campos_512_v4
+82/420698/campos_512_v4
+82/420703/campos_512_v4
+82/420704/campos_512_v4
+82/420705/campos_512_v4
+82/420711/campos_512_v4
+82/420715/campos_512_v4
+82/420745/campos_512_v4
+82/420752/campos_512_v4
+82/420760/campos_512_v4
+82/420764/campos_512_v4
+82/420766/campos_512_v4
+82/420768/campos_512_v4
+82/420769/campos_512_v4
+82/420771/campos_512_v4
+82/420785/campos_512_v4
+82/420788/campos_512_v4
+82/420790/campos_512_v4
+82/420792/campos_512_v4
+82/420796/campos_512_v4
+82/420800/campos_512_v4
+82/420807/campos_512_v4
+82/420808/campos_512_v4
+82/420820/campos_512_v4
+82/420831/campos_512_v4
+82/420844/campos_512_v4
+82/420846/campos_512_v4
+82/420856/campos_512_v4
+82/420857/campos_512_v4
+82/420867/campos_512_v4
+82/420883/campos_512_v4
+82/420886/campos_512_v4
+82/420899/campos_512_v4
+82/420908/campos_512_v4
+82/420921/campos_512_v4
+82/420923/campos_512_v4
+82/420926/campos_512_v4
+82/420940/campos_512_v4
+82/420941/campos_512_v4
+82/420952/campos_512_v4
+82/420956/campos_512_v4
+82/420958/campos_512_v4
+82/420961/campos_512_v4
+82/420969/campos_512_v4
+82/420972/campos_512_v4
+82/420973/campos_512_v4
+82/420974/campos_512_v4
+82/420978/campos_512_v4
+82/420984/campos_512_v4
+82/420991/campos_512_v4
+82/420997/campos_512_v4
+82/421005/campos_512_v4
+82/421010/campos_512_v4
+82/421020/campos_512_v4
+82/421029/campos_512_v4
+82/421031/campos_512_v4
+82/421033/campos_512_v4
+82/421041/campos_512_v4
+82/421054/campos_512_v4
+82/421059/campos_512_v4
+82/421061/campos_512_v4
+82/421071/campos_512_v4
+82/421094/campos_512_v4
+82/421096/campos_512_v4
+82/421102/campos_512_v4
+82/421116/campos_512_v4
+82/421123/campos_512_v4
+82/421127/campos_512_v4
+82/421138/campos_512_v4
+82/421141/campos_512_v4
+82/421145/campos_512_v4
+82/421151/campos_512_v4
+82/421155/campos_512_v4
+82/421172/campos_512_v4
+82/421179/campos_512_v4
+82/421205/campos_512_v4
+82/421211/campos_512_v4
+82/421214/campos_512_v4
+82/421232/campos_512_v4
+82/421249/campos_512_v4
+82/421260/campos_512_v4
+82/421272/campos_512_v4
+82/421279/campos_512_v4
+82/421283/campos_512_v4
+82/421293/campos_512_v4
+82/421303/campos_512_v4
+82/421311/campos_512_v4
+82/421314/campos_512_v4
+82/421330/campos_512_v4
+82/421345/campos_512_v4
+82/421350/campos_512_v4
+82/421360/campos_512_v4
+82/421372/campos_512_v4
+82/421378/campos_512_v4
+82/421383/campos_512_v4
+82/421387/campos_512_v4
+82/421392/campos_512_v4
+82/421399/campos_512_v4
+82/421403/campos_512_v4
+82/421417/campos_512_v4
+82/421424/campos_512_v4
+82/421428/campos_512_v4
+82/421448/campos_512_v4
+82/421478/campos_512_v4
+82/421497/campos_512_v4
+82/421498/campos_512_v4
+82/421510/campos_512_v4
+82/421528/campos_512_v4
+82/421533/campos_512_v4
+82/421539/campos_512_v4
+82/421543/campos_512_v4
+82/421544/campos_512_v4
+82/421555/campos_512_v4
+82/421560/campos_512_v4
+82/421561/campos_512_v4
+82/421562/campos_512_v4
+82/421563/campos_512_v4
+82/421575/campos_512_v4
+82/421584/campos_512_v4
+82/421586/campos_512_v4
+82/421596/campos_512_v4
+82/421602/campos_512_v4
+82/421605/campos_512_v4
+82/421619/campos_512_v4
+82/421620/campos_512_v4
+82/421630/campos_512_v4
+82/421640/campos_512_v4
+82/421647/campos_512_v4
+82/421649/campos_512_v4
+82/421650/campos_512_v4
+82/421651/campos_512_v4
+82/421654/campos_512_v4
+82/421656/campos_512_v4
+82/421659/campos_512_v4
+82/421678/campos_512_v4
+82/421690/campos_512_v4
+82/421693/campos_512_v4
+82/421699/campos_512_v4
+82/421700/campos_512_v4
+82/421711/campos_512_v4
+82/421715/campos_512_v4
+82/421722/campos_512_v4
+82/421723/campos_512_v4
+82/421727/campos_512_v4
+82/421734/campos_512_v4
+82/421744/campos_512_v4
+82/421746/campos_512_v4
+82/421750/campos_512_v4
+82/421753/campos_512_v4
+82/421760/campos_512_v4
+82/421763/campos_512_v4
+82/421766/campos_512_v4
+82/421769/campos_512_v4
+82/421774/campos_512_v4
+82/421780/campos_512_v4
+82/421782/campos_512_v4
+82/421805/campos_512_v4
+82/421806/campos_512_v4
+82/421808/campos_512_v4
+82/421812/campos_512_v4
+82/421814/campos_512_v4
+82/421816/campos_512_v4
+82/421822/campos_512_v4
+82/421827/campos_512_v4
+82/421859/campos_512_v4
+82/421863/campos_512_v4
+82/421869/campos_512_v4
+82/421874/campos_512_v4
+82/421876/campos_512_v4
+82/421879/campos_512_v4
+82/421885/campos_512_v4
+82/421898/campos_512_v4
+82/421909/campos_512_v4
+82/421916/campos_512_v4
+82/421922/campos_512_v4
+82/421930/campos_512_v4
+82/421931/campos_512_v4
+82/421932/campos_512_v4
+82/421947/campos_512_v4
+82/421952/campos_512_v4
+82/421963/campos_512_v4
+82/421968/campos_512_v4
+82/421971/campos_512_v4
+82/421978/campos_512_v4
+82/421986/campos_512_v4
+82/422002/campos_512_v4
+82/422005/campos_512_v4
+82/422032/campos_512_v4
+82/422043/campos_512_v4
+82/422044/campos_512_v4
+82/422051/campos_512_v4
+82/422057/campos_512_v4
+82/422068/campos_512_v4
+82/422092/campos_512_v4
+82/422094/campos_512_v4
+82/422112/campos_512_v4
+82/422114/campos_512_v4
+82/422115/campos_512_v4
+82/422124/campos_512_v4
+82/422129/campos_512_v4
+82/422132/campos_512_v4
+82/422133/campos_512_v4
+82/422135/campos_512_v4
+82/422171/campos_512_v4
+82/422177/campos_512_v4
+82/422178/campos_512_v4
+82/422220/campos_512_v4
+82/422228/campos_512_v4
+82/422230/campos_512_v4
+82/422243/campos_512_v4
+82/422250/campos_512_v4
+82/422261/campos_512_v4
+82/422262/campos_512_v4
+82/422274/campos_512_v4
+82/422277/campos_512_v4
+82/422278/campos_512_v4
+82/422281/campos_512_v4
+82/422286/campos_512_v4
+82/422290/campos_512_v4
+82/422294/campos_512_v4
+82/422301/campos_512_v4
+82/422303/campos_512_v4
+82/422315/campos_512_v4
+82/422318/campos_512_v4
+82/422319/campos_512_v4
+82/422320/campos_512_v4
+82/422324/campos_512_v4
+82/422337/campos_512_v4
+82/422340/campos_512_v4
+82/422350/campos_512_v4
+82/422351/campos_512_v4
+82/422355/campos_512_v4
+82/422360/campos_512_v4
+82/422367/campos_512_v4
+82/422376/campos_512_v4
+82/422382/campos_512_v4
+82/422384/campos_512_v4
+82/422398/campos_512_v4
+82/422402/campos_512_v4
+82/422404/campos_512_v4
+82/422408/campos_512_v4
+82/422410/campos_512_v4
+82/422427/campos_512_v4
+82/422450/campos_512_v4
+82/422453/campos_512_v4
+82/422456/campos_512_v4
+82/422459/campos_512_v4
+82/422460/campos_512_v4
+82/422473/campos_512_v4
+82/422492/campos_512_v4
+82/422496/campos_512_v4
+82/422503/campos_512_v4
+82/422529/campos_512_v4
+82/422532/campos_512_v4
+82/422535/campos_512_v4
+82/422550/campos_512_v4
+82/422557/campos_512_v4
+82/422561/campos_512_v4
+82/422566/campos_512_v4
+82/422578/campos_512_v4
+82/422583/campos_512_v4
+82/422605/campos_512_v4
+82/422618/campos_512_v4
+82/422631/campos_512_v4
+82/422636/campos_512_v4
+82/422658/campos_512_v4
+82/422677/campos_512_v4
+82/422683/campos_512_v4
+82/422691/campos_512_v4
+82/422702/campos_512_v4
+82/422703/campos_512_v4
+82/422704/campos_512_v4
+82/422705/campos_512_v4
+82/422715/campos_512_v4
+82/422722/campos_512_v4
+82/422730/campos_512_v4
+82/422737/campos_512_v4
+82/422740/campos_512_v4
+82/422749/campos_512_v4
+82/422773/campos_512_v4
+82/422794/campos_512_v4
+82/422795/campos_512_v4
+82/422807/campos_512_v4
+82/422810/campos_512_v4
+82/422811/campos_512_v4
+82/422813/campos_512_v4
+82/422830/campos_512_v4
+82/422836/campos_512_v4
+82/422839/campos_512_v4
+82/422841/campos_512_v4
+82/422868/campos_512_v4
+82/422869/campos_512_v4
+82/422880/campos_512_v4
+82/422883/campos_512_v4
+82/422885/campos_512_v4
+82/422894/campos_512_v4
+82/422900/campos_512_v4
+82/422902/campos_512_v4
+82/422923/campos_512_v4
+82/422927/campos_512_v4
+82/422928/campos_512_v4
+82/422930/campos_512_v4
+82/422944/campos_512_v4
+82/422951/campos_512_v4
+82/422954/campos_512_v4
+82/422955/campos_512_v4
+82/422956/campos_512_v4
+82/422959/campos_512_v4
+82/422964/campos_512_v4
+82/422975/campos_512_v4
+82/422978/campos_512_v4
+82/422991/campos_512_v4
+82/422995/campos_512_v4
+82/423003/campos_512_v4
+82/423007/campos_512_v4
+82/423014/campos_512_v4
+82/423018/campos_512_v4
+82/423025/campos_512_v4
+82/423032/campos_512_v4
+82/423037/campos_512_v4
+82/423039/campos_512_v4
+82/423042/campos_512_v4
+82/423063/campos_512_v4
+82/423080/campos_512_v4
+82/423094/campos_512_v4
+82/423101/campos_512_v4
+82/423108/campos_512_v4
+82/423116/campos_512_v4
+82/423117/campos_512_v4
+82/423123/campos_512_v4
+82/423136/campos_512_v4
+82/423142/campos_512_v4
+82/423150/campos_512_v4
+82/423152/campos_512_v4
+82/423163/campos_512_v4
+82/423172/campos_512_v4
+82/423182/campos_512_v4
+82/423184/campos_512_v4
+82/423192/campos_512_v4
+82/423193/campos_512_v4
+82/423196/campos_512_v4
+82/423204/campos_512_v4
+82/423206/campos_512_v4
+82/423210/campos_512_v4
+82/423223/campos_512_v4
+82/423231/campos_512_v4
+82/423233/campos_512_v4
+82/423234/campos_512_v4
+82/423248/campos_512_v4
+82/423249/campos_512_v4
+82/423252/campos_512_v4
+82/423259/campos_512_v4
+82/423264/campos_512_v4
+82/423270/campos_512_v4
+82/423275/campos_512_v4
+82/423289/campos_512_v4
+82/423301/campos_512_v4
+82/423314/campos_512_v4
+82/423317/campos_512_v4
+82/423318/campos_512_v4
+82/423321/campos_512_v4
+82/423330/campos_512_v4
+82/423333/campos_512_v4
+82/423334/campos_512_v4
+82/423336/campos_512_v4
+82/423338/campos_512_v4
+82/423350/campos_512_v4
+82/423352/campos_512_v4
+82/423355/campos_512_v4
+82/423360/campos_512_v4
+82/423365/campos_512_v4
+82/423369/campos_512_v4
+82/423371/campos_512_v4
+82/423378/campos_512_v4
+82/423389/campos_512_v4
+82/423392/campos_512_v4
+82/423393/campos_512_v4
+82/423397/campos_512_v4
+82/423413/campos_512_v4
+82/423423/campos_512_v4
+82/423427/campos_512_v4
+82/423430/campos_512_v4
+82/423436/campos_512_v4
+82/423449/campos_512_v4
+82/423466/campos_512_v4
+82/423474/campos_512_v4
+82/423481/campos_512_v4
+82/423485/campos_512_v4
+82/423487/campos_512_v4
+82/423499/campos_512_v4
+82/423502/campos_512_v4
+82/423514/campos_512_v4
+82/423519/campos_512_v4
+82/423542/campos_512_v4
+82/423543/campos_512_v4
+82/423558/campos_512_v4
+82/423585/campos_512_v4
+82/423589/campos_512_v4
+82/423595/campos_512_v4
+82/423601/campos_512_v4
+82/423603/campos_512_v4
+82/423612/campos_512_v4
+82/423626/campos_512_v4
+82/423629/campos_512_v4
+82/423633/campos_512_v4
+82/423647/campos_512_v4
+82/423650/campos_512_v4
+82/423654/campos_512_v4
+82/423657/campos_512_v4
+82/423662/campos_512_v4
+82/423667/campos_512_v4
+82/423668/campos_512_v4
+82/423684/campos_512_v4
+82/423688/campos_512_v4
+82/423696/campos_512_v4
+82/423726/campos_512_v4
+82/423733/campos_512_v4
+82/423738/campos_512_v4
+82/423740/campos_512_v4
+82/423760/campos_512_v4
+82/423761/campos_512_v4
+82/423764/campos_512_v4
+82/423774/campos_512_v4
+82/423778/campos_512_v4
+82/423779/campos_512_v4
+82/423783/campos_512_v4
+82/423790/campos_512_v4
+82/423791/campos_512_v4
+82/423792/campos_512_v4
+82/423793/campos_512_v4
+82/423806/campos_512_v4
+82/423808/campos_512_v4
+82/423820/campos_512_v4
+82/423838/campos_512_v4
+82/423842/campos_512_v4
+82/423853/campos_512_v4
+82/423859/campos_512_v4
+82/423860/campos_512_v4
+82/423863/campos_512_v4
+82/423865/campos_512_v4
+82/423868/campos_512_v4
+82/423869/campos_512_v4
+82/423871/campos_512_v4
+82/423878/campos_512_v4
+82/423889/campos_512_v4
+82/423890/campos_512_v4
+82/423899/campos_512_v4
+82/423902/campos_512_v4
+82/423907/campos_512_v4
+82/423916/campos_512_v4
+82/423919/campos_512_v4
+82/423925/campos_512_v4
+82/423931/campos_512_v4
+82/423935/campos_512_v4
+82/423974/campos_512_v4
+82/423980/campos_512_v4
+82/423985/campos_512_v4
+82/423986/campos_512_v4
+82/423988/campos_512_v4
+82/423991/campos_512_v4
+82/423995/campos_512_v4
+82/424004/campos_512_v4
+82/424012/campos_512_v4
+82/424028/campos_512_v4
+82/424038/campos_512_v4
+82/424048/campos_512_v4
+82/424062/campos_512_v4
+82/424071/campos_512_v4
+82/424073/campos_512_v4
+82/424077/campos_512_v4
+82/424078/campos_512_v4
+82/424089/campos_512_v4
+82/424095/campos_512_v4
+82/424096/campos_512_v4
+82/424100/campos_512_v4
+82/424113/campos_512_v4
+82/424124/campos_512_v4
+82/424129/campos_512_v4
+82/424147/campos_512_v4
+82/424158/campos_512_v4
+82/424175/campos_512_v4
+82/424177/campos_512_v4
+82/424197/campos_512_v4
+82/424201/campos_512_v4
+82/424207/campos_512_v4
+82/424218/campos_512_v4
+82/424220/campos_512_v4
+82/424227/campos_512_v4
+82/424229/campos_512_v4
+82/424231/campos_512_v4
+82/424238/campos_512_v4
+82/424270/campos_512_v4
+82/424274/campos_512_v4
+82/424275/campos_512_v4
+82/424277/campos_512_v4
+82/424282/campos_512_v4
+82/424286/campos_512_v4
+82/424293/campos_512_v4
+82/424310/campos_512_v4
+82/424315/campos_512_v4
+82/424317/campos_512_v4
+82/424320/campos_512_v4
+82/424322/campos_512_v4
+82/424327/campos_512_v4
+82/424332/campos_512_v4
+82/424336/campos_512_v4
+82/424358/campos_512_v4
+82/424368/campos_512_v4
+82/424370/campos_512_v4
+82/424382/campos_512_v4
+82/424407/campos_512_v4
+82/424419/campos_512_v4
+82/424440/campos_512_v4
+82/424452/campos_512_v4
+82/424454/campos_512_v4
+82/424459/campos_512_v4
+82/424468/campos_512_v4
+82/424469/campos_512_v4
+82/424472/campos_512_v4
+82/424474/campos_512_v4
+82/424486/campos_512_v4
+82/424490/campos_512_v4
+82/424492/campos_512_v4
+82/424499/campos_512_v4
+82/424502/campos_512_v4
+82/424508/campos_512_v4
+82/424526/campos_512_v4
+82/424548/campos_512_v4
+82/424567/campos_512_v4
+82/424571/campos_512_v4
+82/424579/campos_512_v4
+82/424589/campos_512_v4
+82/424607/campos_512_v4
+82/424609/campos_512_v4
+82/424613/campos_512_v4
+82/424626/campos_512_v4
+82/424638/campos_512_v4
+82/424640/campos_512_v4
+82/424642/campos_512_v4
+82/424664/campos_512_v4
+82/424669/campos_512_v4
+82/424674/campos_512_v4
+82/424675/campos_512_v4
+82/424679/campos_512_v4
+82/424712/campos_512_v4
+82/424719/campos_512_v4
+82/424726/campos_512_v4
+82/424739/campos_512_v4
+82/424743/campos_512_v4
+82/424752/campos_512_v4
+82/424753/campos_512_v4
+82/424760/campos_512_v4
+82/424770/campos_512_v4
+82/424790/campos_512_v4
+82/424795/campos_512_v4
+82/424800/campos_512_v4
+82/424803/campos_512_v4
+82/424810/campos_512_v4
+82/424814/campos_512_v4
+82/424829/campos_512_v4
+82/424834/campos_512_v4
+82/424842/campos_512_v4
+82/424845/campos_512_v4
+82/424848/campos_512_v4
+82/424849/campos_512_v4
+82/424852/campos_512_v4
+82/424861/campos_512_v4
+82/424864/campos_512_v4
+82/424869/campos_512_v4
+82/424872/campos_512_v4
+82/424880/campos_512_v4
+82/424882/campos_512_v4
+82/424884/campos_512_v4
+82/424886/campos_512_v4
+82/424888/campos_512_v4
+82/424890/campos_512_v4
+82/424901/campos_512_v4
+82/424910/campos_512_v4
+82/424911/campos_512_v4
+82/424913/campos_512_v4
+82/424915/campos_512_v4
+82/424916/campos_512_v4
+82/424920/campos_512_v4
+82/424931/campos_512_v4
+82/424933/campos_512_v4
+82/424934/campos_512_v4
+82/424938/campos_512_v4
+82/424946/campos_512_v4
+82/424949/campos_512_v4
+82/424954/campos_512_v4
+82/424960/campos_512_v4
+82/424965/campos_512_v4
+82/424966/campos_512_v4
+82/424969/campos_512_v4
+82/424970/campos_512_v4
+82/424972/campos_512_v4
+82/424973/campos_512_v4
+82/424974/campos_512_v4
+82/424977/campos_512_v4
+82/424978/campos_512_v4
+82/424995/campos_512_v4
+82/424999/campos_512_v4
+82/425001/campos_512_v4
+83/425002/campos_512_v4
+83/425003/campos_512_v4
+83/425004/campos_512_v4
+83/425021/campos_512_v4
+83/425025/campos_512_v4
+83/425033/campos_512_v4
+83/425041/campos_512_v4
+83/425043/campos_512_v4
+83/425046/campos_512_v4
+83/425049/campos_512_v4
+83/425053/campos_512_v4
+83/425055/campos_512_v4
+83/425057/campos_512_v4
+83/425067/campos_512_v4
+83/425068/campos_512_v4
+83/425081/campos_512_v4
+83/425088/campos_512_v4
+83/425091/campos_512_v4
+83/425094/campos_512_v4
+83/425099/campos_512_v4
+83/425103/campos_512_v4
+83/425104/campos_512_v4
+83/425112/campos_512_v4
+83/425115/campos_512_v4
+83/425117/campos_512_v4
+83/425123/campos_512_v4
+83/425126/campos_512_v4
+83/425129/campos_512_v4
+83/425144/campos_512_v4
+83/425162/campos_512_v4
+83/425174/campos_512_v4
+83/425187/campos_512_v4
+83/425188/campos_512_v4
+83/425190/campos_512_v4
+83/425199/campos_512_v4
+83/425214/campos_512_v4
+83/425221/campos_512_v4
+83/425223/campos_512_v4
+83/425224/campos_512_v4
+83/425231/campos_512_v4
+83/425239/campos_512_v4
+83/425242/campos_512_v4
+83/425244/campos_512_v4
+83/425246/campos_512_v4
+83/425247/campos_512_v4
+83/425256/campos_512_v4
+83/425266/campos_512_v4
+83/425267/campos_512_v4
+83/425270/campos_512_v4
+83/425272/campos_512_v4
+83/425289/campos_512_v4
+83/425301/campos_512_v4
+83/425306/campos_512_v4
+83/425313/campos_512_v4
+83/425347/campos_512_v4
+83/425352/campos_512_v4
+83/425360/campos_512_v4
+83/425370/campos_512_v4
+83/425372/campos_512_v4
+83/425381/campos_512_v4
+83/425384/campos_512_v4
+83/425390/campos_512_v4
+83/425393/campos_512_v4
+83/425396/campos_512_v4
+83/425401/campos_512_v4
+83/425403/campos_512_v4
+83/425406/campos_512_v4
+83/425411/campos_512_v4
+83/425415/campos_512_v4
+83/425427/campos_512_v4
+83/425432/campos_512_v4
+83/425437/campos_512_v4
+83/425439/campos_512_v4
+83/425442/campos_512_v4
+83/425452/campos_512_v4
+83/425453/campos_512_v4
+83/425468/campos_512_v4
+83/425470/campos_512_v4
+83/425473/campos_512_v4
+83/425481/campos_512_v4
+83/425483/campos_512_v4
+83/425491/campos_512_v4
+83/425499/campos_512_v4
+83/425533/campos_512_v4
+83/425534/campos_512_v4
+83/425538/campos_512_v4
+83/425549/campos_512_v4
+83/425558/campos_512_v4
+83/425564/campos_512_v4
+83/425582/campos_512_v4
+83/425585/campos_512_v4
+83/425588/campos_512_v4
+83/425592/campos_512_v4
+83/425601/campos_512_v4
+83/425616/campos_512_v4
+83/425619/campos_512_v4
+83/425621/campos_512_v4
+83/425630/campos_512_v4
+83/425631/campos_512_v4
+83/425640/campos_512_v4
+83/425642/campos_512_v4
+83/425643/campos_512_v4
+83/425649/campos_512_v4
+83/425652/campos_512_v4
+83/425657/campos_512_v4
+83/425659/campos_512_v4
+83/425665/campos_512_v4
+83/425670/campos_512_v4
+83/425672/campos_512_v4
+83/425681/campos_512_v4
+83/425683/campos_512_v4
+83/425685/campos_512_v4
+83/425687/campos_512_v4
+83/425693/campos_512_v4
+83/425712/campos_512_v4
+83/425718/campos_512_v4
+83/425750/campos_512_v4
+83/425763/campos_512_v4
+83/425777/campos_512_v4
+83/425802/campos_512_v4
+83/425816/campos_512_v4
+83/425834/campos_512_v4
+83/425861/campos_512_v4
+83/425868/campos_512_v4
+83/425877/campos_512_v4
+83/425883/campos_512_v4
+83/425885/campos_512_v4
+83/425905/campos_512_v4
+83/425908/campos_512_v4
+83/425910/campos_512_v4
+83/425912/campos_512_v4
+83/425914/campos_512_v4
+83/425918/campos_512_v4
+83/425926/campos_512_v4
+83/425928/campos_512_v4
+83/425936/campos_512_v4
+83/425944/campos_512_v4
+83/425965/campos_512_v4
+83/425970/campos_512_v4
+83/425972/campos_512_v4
+83/425973/campos_512_v4
+83/425981/campos_512_v4
+83/425983/campos_512_v4
+83/425995/campos_512_v4
+83/426002/campos_512_v4
+83/426003/campos_512_v4
+83/426005/campos_512_v4
+83/426009/campos_512_v4
+83/426019/campos_512_v4
+83/426021/campos_512_v4
+83/426028/campos_512_v4
+83/426036/campos_512_v4
+83/426040/campos_512_v4
+83/426044/campos_512_v4
+83/426049/campos_512_v4
+83/426053/campos_512_v4
+83/426060/campos_512_v4
+83/426078/campos_512_v4
+83/426087/campos_512_v4
+83/426102/campos_512_v4
+83/426130/campos_512_v4
+83/426131/campos_512_v4
+83/426135/campos_512_v4
+83/426136/campos_512_v4
+83/426150/campos_512_v4
+83/426151/campos_512_v4
+83/426155/campos_512_v4
+83/426168/campos_512_v4
+83/426177/campos_512_v4
+83/426179/campos_512_v4
+83/426180/campos_512_v4
+83/426182/campos_512_v4
+83/426188/campos_512_v4
+83/426196/campos_512_v4
+83/426200/campos_512_v4
+83/426201/campos_512_v4
+83/426208/campos_512_v4
+83/426213/campos_512_v4
+83/426225/campos_512_v4
+83/426231/campos_512_v4
+83/426232/campos_512_v4
+83/426233/campos_512_v4
+83/426250/campos_512_v4
+83/426260/campos_512_v4
+83/426267/campos_512_v4
+83/426269/campos_512_v4
+83/426285/campos_512_v4
+83/426293/campos_512_v4
+83/426297/campos_512_v4
+83/426299/campos_512_v4
+83/426302/campos_512_v4
+83/426319/campos_512_v4
+83/426326/campos_512_v4
+83/426327/campos_512_v4
+83/426343/campos_512_v4
+83/426349/campos_512_v4
+83/426362/campos_512_v4
+83/426364/campos_512_v4
+83/426371/campos_512_v4
+83/426379/campos_512_v4
+83/426381/campos_512_v4
+83/426392/campos_512_v4
+83/426404/campos_512_v4
+83/426405/campos_512_v4
+83/426406/campos_512_v4
+83/426409/campos_512_v4
+83/426411/campos_512_v4
+83/426439/campos_512_v4
+83/426445/campos_512_v4
+83/426456/campos_512_v4
+83/426459/campos_512_v4
+83/426461/campos_512_v4
+83/426469/campos_512_v4
+83/426478/campos_512_v4
+83/426488/campos_512_v4
+83/426501/campos_512_v4
+83/426502/campos_512_v4
+83/426515/campos_512_v4
+83/426518/campos_512_v4
+83/426536/campos_512_v4
+83/426556/campos_512_v4
+83/426564/campos_512_v4
+83/426571/campos_512_v4
+83/426579/campos_512_v4
+83/426611/campos_512_v4
+83/426629/campos_512_v4
+83/426630/campos_512_v4
+83/426634/campos_512_v4
+83/426635/campos_512_v4
+83/426649/campos_512_v4
+83/426659/campos_512_v4
+83/426665/campos_512_v4
+83/426687/campos_512_v4
+83/426695/campos_512_v4
+83/426696/campos_512_v4
+83/426698/campos_512_v4
+83/426704/campos_512_v4
+83/426721/campos_512_v4
+83/426738/campos_512_v4
+83/426766/campos_512_v4
+83/426777/campos_512_v4
+83/426788/campos_512_v4
+83/426792/campos_512_v4
+83/426800/campos_512_v4
+83/426807/campos_512_v4
+83/426809/campos_512_v4
+83/426813/campos_512_v4
+83/426824/campos_512_v4
+83/426825/campos_512_v4
+83/426833/campos_512_v4
+83/426835/campos_512_v4
+83/426837/campos_512_v4
+83/426848/campos_512_v4
+83/426857/campos_512_v4
+83/426867/campos_512_v4
+83/426889/campos_512_v4
+83/426890/campos_512_v4
+83/426891/campos_512_v4
+83/426896/campos_512_v4
+83/426897/campos_512_v4
+83/426901/campos_512_v4
+83/426913/campos_512_v4
+83/426916/campos_512_v4
+83/426919/campos_512_v4
+83/426927/campos_512_v4
+83/426933/campos_512_v4
+83/426939/campos_512_v4
+83/426940/campos_512_v4
+83/426941/campos_512_v4
+83/426945/campos_512_v4
+83/426948/campos_512_v4
+83/426951/campos_512_v4
+83/426956/campos_512_v4
+83/426961/campos_512_v4
+83/426965/campos_512_v4
+83/426974/campos_512_v4
+83/426976/campos_512_v4
+83/426981/campos_512_v4
+83/426986/campos_512_v4
+83/426992/campos_512_v4
+83/426993/campos_512_v4
+83/426996/campos_512_v4
+83/427022/campos_512_v4
+83/427029/campos_512_v4
+83/427030/campos_512_v4
+83/427065/campos_512_v4
+83/427083/campos_512_v4
+83/427084/campos_512_v4
+83/427089/campos_512_v4
+83/427095/campos_512_v4
+83/427102/campos_512_v4
+83/427110/campos_512_v4
+83/427111/campos_512_v4
+83/427113/campos_512_v4
+83/427125/campos_512_v4
+83/427131/campos_512_v4
+83/427142/campos_512_v4
+83/427153/campos_512_v4
+83/427166/campos_512_v4
+83/427177/campos_512_v4
+83/427178/campos_512_v4
+83/427179/campos_512_v4
+83/427180/campos_512_v4
+83/427182/campos_512_v4
+83/427185/campos_512_v4
+83/427187/campos_512_v4
+83/427188/campos_512_v4
+83/427195/campos_512_v4
+83/427202/campos_512_v4
+83/427213/campos_512_v4
+83/427215/campos_512_v4
+83/427225/campos_512_v4
+83/427232/campos_512_v4
+83/427239/campos_512_v4
+83/427256/campos_512_v4
+83/427284/campos_512_v4
+83/427294/campos_512_v4
+83/427300/campos_512_v4
+83/427307/campos_512_v4
+83/427308/campos_512_v4
+83/427314/campos_512_v4
+83/427319/campos_512_v4
+83/427323/campos_512_v4
+83/427342/campos_512_v4
+83/427346/campos_512_v4
+83/427357/campos_512_v4
+83/427358/campos_512_v4
+83/427359/campos_512_v4
+83/427361/campos_512_v4
+83/427363/campos_512_v4
+83/427370/campos_512_v4
+83/427381/campos_512_v4
+83/427388/campos_512_v4
+83/427399/campos_512_v4
+83/427411/campos_512_v4
+83/427416/campos_512_v4
+83/427424/campos_512_v4
+83/427428/campos_512_v4
+83/427440/campos_512_v4
+83/427448/campos_512_v4
+83/427456/campos_512_v4
+83/427476/campos_512_v4
+83/427480/campos_512_v4
+83/427483/campos_512_v4
+83/427489/campos_512_v4
+83/427508/campos_512_v4
+83/427541/campos_512_v4
+83/427548/campos_512_v4
+83/427550/campos_512_v4
+83/427552/campos_512_v4
+83/427560/campos_512_v4
+83/427562/campos_512_v4
+83/427564/campos_512_v4
+83/427565/campos_512_v4
+83/427566/campos_512_v4
+83/427572/campos_512_v4
+83/427582/campos_512_v4
+83/427583/campos_512_v4
+83/427587/campos_512_v4
+83/427589/campos_512_v4
+83/427604/campos_512_v4
+83/427612/campos_512_v4
+83/427620/campos_512_v4
+83/427630/campos_512_v4
+83/427636/campos_512_v4
+83/427640/campos_512_v4
+83/427652/campos_512_v4
+83/427655/campos_512_v4
+83/427658/campos_512_v4
+83/427662/campos_512_v4
+83/427663/campos_512_v4
+83/427666/campos_512_v4
+83/427668/campos_512_v4
+83/427678/campos_512_v4
+83/427684/campos_512_v4
+83/427698/campos_512_v4
+83/427701/campos_512_v4
+83/427709/campos_512_v4
+83/427731/campos_512_v4
+83/427733/campos_512_v4
+83/427737/campos_512_v4
+83/427739/campos_512_v4
+83/427743/campos_512_v4
+83/427748/campos_512_v4
+83/427754/campos_512_v4
+83/427758/campos_512_v4
+83/427764/campos_512_v4
+83/427766/campos_512_v4
+83/427772/campos_512_v4
+83/427773/campos_512_v4
+83/427781/campos_512_v4
+83/427786/campos_512_v4
+83/427791/campos_512_v4
+83/427797/campos_512_v4
+83/427806/campos_512_v4
+83/427808/campos_512_v4
+83/427816/campos_512_v4
+83/427817/campos_512_v4
+83/427819/campos_512_v4
+83/427820/campos_512_v4
+83/427828/campos_512_v4
+83/427840/campos_512_v4
+83/427842/campos_512_v4
+83/427860/campos_512_v4
+83/427867/campos_512_v4
+83/427879/campos_512_v4
+83/427883/campos_512_v4
+83/427927/campos_512_v4
+83/427946/campos_512_v4
+83/427953/campos_512_v4
+83/427974/campos_512_v4
+83/427976/campos_512_v4
+83/427977/campos_512_v4
+83/427981/campos_512_v4
+83/427989/campos_512_v4
+83/427992/campos_512_v4
+83/428000/campos_512_v4
+83/428005/campos_512_v4
+83/428008/campos_512_v4
+83/428009/campos_512_v4
+83/428021/campos_512_v4
+83/428037/campos_512_v4
+83/428041/campos_512_v4
+83/428047/campos_512_v4
+83/428051/campos_512_v4
+83/428054/campos_512_v4
+83/428068/campos_512_v4
+83/428070/campos_512_v4
+83/428093/campos_512_v4
+83/428097/campos_512_v4
+83/428099/campos_512_v4
+83/428110/campos_512_v4
+83/428119/campos_512_v4
+83/428121/campos_512_v4
+83/428127/campos_512_v4
+83/428134/campos_512_v4
+83/428141/campos_512_v4
+83/428145/campos_512_v4
+83/428162/campos_512_v4
+83/428165/campos_512_v4
+83/428166/campos_512_v4
+83/428171/campos_512_v4
+83/428179/campos_512_v4
+83/428185/campos_512_v4
+83/428213/campos_512_v4
+83/428234/campos_512_v4
+83/428244/campos_512_v4
+83/428245/campos_512_v4
+83/428251/campos_512_v4
+83/428254/campos_512_v4
+83/428257/campos_512_v4
+83/428268/campos_512_v4
+83/428270/campos_512_v4
+83/428286/campos_512_v4
+83/428294/campos_512_v4
+83/428300/campos_512_v4
+83/428306/campos_512_v4
+83/428310/campos_512_v4
+83/428313/campos_512_v4
+83/428315/campos_512_v4
+83/428316/campos_512_v4
+83/428320/campos_512_v4
+83/428337/campos_512_v4
+83/428340/campos_512_v4
+83/428341/campos_512_v4
+83/428342/campos_512_v4
+83/428343/campos_512_v4
+83/428348/campos_512_v4
+83/428361/campos_512_v4
+83/428362/campos_512_v4
+83/428365/campos_512_v4
+83/428366/campos_512_v4
+83/428369/campos_512_v4
+83/428370/campos_512_v4
+83/428378/campos_512_v4
+83/428379/campos_512_v4
+83/428384/campos_512_v4
+83/428389/campos_512_v4
+83/428397/campos_512_v4
+83/428405/campos_512_v4
+83/428409/campos_512_v4
+83/428410/campos_512_v4
+83/428425/campos_512_v4
+83/428426/campos_512_v4
+83/428431/campos_512_v4
+83/428434/campos_512_v4
+83/428436/campos_512_v4
+83/428437/campos_512_v4
+83/428447/campos_512_v4
+83/428456/campos_512_v4
+83/428460/campos_512_v4
+83/428470/campos_512_v4
+83/428473/campos_512_v4
+83/428474/campos_512_v4
+83/428477/campos_512_v4
+83/428478/campos_512_v4
+83/428490/campos_512_v4
+83/428492/campos_512_v4
+83/428496/campos_512_v4
+83/428508/campos_512_v4
+83/428514/campos_512_v4
+83/428515/campos_512_v4
+83/428531/campos_512_v4
+83/428532/campos_512_v4
+83/428540/campos_512_v4
+83/428542/campos_512_v4
+83/428545/campos_512_v4
+83/428548/campos_512_v4
+83/428551/campos_512_v4
+83/428564/campos_512_v4
+83/428574/campos_512_v4
+83/428584/campos_512_v4
+83/428589/campos_512_v4
+83/428592/campos_512_v4
+83/428601/campos_512_v4
+83/428631/campos_512_v4
+83/428637/campos_512_v4
+83/428642/campos_512_v4
+83/428646/campos_512_v4
+83/428647/campos_512_v4
+83/428650/campos_512_v4
+83/428659/campos_512_v4
+83/428660/campos_512_v4
+83/428668/campos_512_v4
+83/428670/campos_512_v4
+83/428671/campos_512_v4
+83/428677/campos_512_v4
+83/428691/campos_512_v4
+83/428699/campos_512_v4
+83/428701/campos_512_v4
+83/428708/campos_512_v4
+83/428722/campos_512_v4
+83/428751/campos_512_v4
+83/428753/campos_512_v4
+83/428757/campos_512_v4
+83/428762/campos_512_v4
+83/428763/campos_512_v4
+83/428768/campos_512_v4
+83/428769/campos_512_v4
+83/428790/campos_512_v4
+83/428793/campos_512_v4
+83/428806/campos_512_v4
+83/428812/campos_512_v4
+83/428826/campos_512_v4
+83/428827/campos_512_v4
+83/428835/campos_512_v4
+83/428837/campos_512_v4
+83/428842/campos_512_v4
+83/428845/campos_512_v4
+83/428852/campos_512_v4
+83/428859/campos_512_v4
+83/428866/campos_512_v4
+83/428872/campos_512_v4
+83/428876/campos_512_v4
+83/428877/campos_512_v4
+83/428885/campos_512_v4
+83/428899/campos_512_v4
+83/428907/campos_512_v4
+83/428915/campos_512_v4
+83/428925/campos_512_v4
+83/428946/campos_512_v4
+83/428948/campos_512_v4
+83/428949/campos_512_v4
+83/428956/campos_512_v4
+83/428966/campos_512_v4
+83/428986/campos_512_v4
+83/428988/campos_512_v4
+83/429005/campos_512_v4
+83/429013/campos_512_v4
+83/429018/campos_512_v4
+83/429020/campos_512_v4
+83/429026/campos_512_v4
+83/429031/campos_512_v4
+83/429041/campos_512_v4
+83/429074/campos_512_v4
+83/429087/campos_512_v4
+83/429093/campos_512_v4
+83/429098/campos_512_v4
+83/429102/campos_512_v4
+83/429109/campos_512_v4
+83/429141/campos_512_v4
+83/429181/campos_512_v4
+83/429187/campos_512_v4
+83/429194/campos_512_v4
+83/429196/campos_512_v4
+83/429197/campos_512_v4
+83/429217/campos_512_v4
+83/429223/campos_512_v4
+83/429224/campos_512_v4
+83/429240/campos_512_v4
+83/429243/campos_512_v4
+83/429254/campos_512_v4
+83/429268/campos_512_v4
+83/429302/campos_512_v4
+83/429305/campos_512_v4
+83/429309/campos_512_v4
+83/429317/campos_512_v4
+83/429321/campos_512_v4
+83/429333/campos_512_v4
+83/429338/campos_512_v4
+83/429341/campos_512_v4
+83/429342/campos_512_v4
+83/429353/campos_512_v4
+83/429366/campos_512_v4
+83/429375/campos_512_v4
+83/429380/campos_512_v4
+83/429391/campos_512_v4
+83/429392/campos_512_v4
+83/429399/campos_512_v4
+83/429400/campos_512_v4
+83/429407/campos_512_v4
+83/429420/campos_512_v4
+83/429424/campos_512_v4
+83/429427/campos_512_v4
+83/429430/campos_512_v4
+83/429439/campos_512_v4
+83/429447/campos_512_v4
+83/429453/campos_512_v4
+83/429460/campos_512_v4
+83/429463/campos_512_v4
+83/429464/campos_512_v4
+83/429468/campos_512_v4
+83/429469/campos_512_v4
+83/429471/campos_512_v4
+83/429475/campos_512_v4
+83/429476/campos_512_v4
+83/429478/campos_512_v4
+83/429482/campos_512_v4
+83/429483/campos_512_v4
+83/429488/campos_512_v4
+83/429503/campos_512_v4
+83/429511/campos_512_v4
+83/429523/campos_512_v4
+83/429529/campos_512_v4
+83/429531/campos_512_v4
+83/429532/campos_512_v4
+83/429533/campos_512_v4
+83/429536/campos_512_v4
+83/429542/campos_512_v4
+83/429545/campos_512_v4
+83/429548/campos_512_v4
+83/429559/campos_512_v4
+83/429560/campos_512_v4
+83/429591/campos_512_v4
+83/429595/campos_512_v4
+83/429626/campos_512_v4
+83/429643/campos_512_v4
+83/429644/campos_512_v4
+83/429646/campos_512_v4
+83/429649/campos_512_v4
+83/429651/campos_512_v4
+83/429668/campos_512_v4
+83/429673/campos_512_v4
+83/429678/campos_512_v4
+83/429680/campos_512_v4
+83/429682/campos_512_v4
+83/429692/campos_512_v4
+83/429710/campos_512_v4
+83/429712/campos_512_v4
+83/429714/campos_512_v4
+83/429715/campos_512_v4
+83/429720/campos_512_v4
+83/429725/campos_512_v4
+83/429726/campos_512_v4
+84/430004/campos_512_v4
+84/430014/campos_512_v4
+84/430034/campos_512_v4
+84/430042/campos_512_v4
+84/430077/campos_512_v4
+84/430079/campos_512_v4
+84/430091/campos_512_v4
+84/430099/campos_512_v4
+84/430102/campos_512_v4
+84/430110/campos_512_v4
+84/430127/campos_512_v4
+84/430129/campos_512_v4
+84/430132/campos_512_v4
+84/430144/campos_512_v4
+84/430165/campos_512_v4
+84/430183/campos_512_v4
+84/430185/campos_512_v4
+84/430186/campos_512_v4
+84/430189/campos_512_v4
+84/430192/campos_512_v4
+84/430193/campos_512_v4
+84/430194/campos_512_v4
+84/430200/campos_512_v4
+84/430206/campos_512_v4
+84/430207/campos_512_v4
+84/430208/campos_512_v4
+84/430210/campos_512_v4
+84/430220/campos_512_v4
+84/430245/campos_512_v4
+84/430253/campos_512_v4
+84/430257/campos_512_v4
+84/430275/campos_512_v4
+84/430277/campos_512_v4
+84/430290/campos_512_v4
+84/430295/campos_512_v4
+84/430300/campos_512_v4
+84/430302/campos_512_v4
+84/430304/campos_512_v4
+84/430311/campos_512_v4
+84/430315/campos_512_v4
+84/430321/campos_512_v4
+84/430334/campos_512_v4
+84/430348/campos_512_v4
+84/430356/campos_512_v4
+84/430359/campos_512_v4
+84/430361/campos_512_v4
+84/430362/campos_512_v4
+84/430365/campos_512_v4
+84/430368/campos_512_v4
+84/430371/campos_512_v4
+84/430373/campos_512_v4
+84/430389/campos_512_v4
+84/430412/campos_512_v4
+84/430421/campos_512_v4
+84/430423/campos_512_v4
+84/430445/campos_512_v4
+84/430449/campos_512_v4
+84/430457/campos_512_v4
+84/430477/campos_512_v4
+84/430487/campos_512_v4
+84/430501/campos_512_v4
+84/430518/campos_512_v4
+84/430547/campos_512_v4
+84/430553/campos_512_v4
+84/430555/campos_512_v4
+84/430556/campos_512_v4
+84/430557/campos_512_v4
+84/430561/campos_512_v4
+84/430576/campos_512_v4
+84/430577/campos_512_v4
+84/430586/campos_512_v4
+84/430598/campos_512_v4
+84/430608/campos_512_v4
+84/430610/campos_512_v4
+84/430613/campos_512_v4
+84/430614/campos_512_v4
+84/430618/campos_512_v4
+84/430622/campos_512_v4
+84/430625/campos_512_v4
+84/430627/campos_512_v4
+84/430629/campos_512_v4
+84/430633/campos_512_v4
+84/430651/campos_512_v4
+84/430680/campos_512_v4
+84/430687/campos_512_v4
+84/430694/campos_512_v4
+84/430701/campos_512_v4
+84/430704/campos_512_v4
+84/430713/campos_512_v4
+84/430718/campos_512_v4
+84/430723/campos_512_v4
+84/430724/campos_512_v4
+84/430729/campos_512_v4
+84/430736/campos_512_v4
+84/430745/campos_512_v4
+84/430763/campos_512_v4
+84/430771/campos_512_v4
+84/430785/campos_512_v4
+84/430814/campos_512_v4
+84/430815/campos_512_v4
+84/430829/campos_512_v4
+84/430840/campos_512_v4
+84/430842/campos_512_v4
+84/430845/campos_512_v4
+84/430846/campos_512_v4
+84/430847/campos_512_v4
+84/430848/campos_512_v4
+84/430849/campos_512_v4
+84/430851/campos_512_v4
+84/430862/campos_512_v4
+84/430865/campos_512_v4
+84/430869/campos_512_v4
+84/430901/campos_512_v4
+84/430902/campos_512_v4
+84/430913/campos_512_v4
+84/430915/campos_512_v4
+84/430942/campos_512_v4
+84/430952/campos_512_v4
+84/430995/campos_512_v4
+84/431008/campos_512_v4
+84/431039/campos_512_v4
+84/431046/campos_512_v4
+84/431073/campos_512_v4
+84/431079/campos_512_v4
+84/431080/campos_512_v4
+84/431084/campos_512_v4
+84/431090/campos_512_v4
+84/431102/campos_512_v4
+84/431103/campos_512_v4
+84/431104/campos_512_v4
+84/431105/campos_512_v4
+84/431115/campos_512_v4
+84/431118/campos_512_v4
+84/431131/campos_512_v4
+84/431132/campos_512_v4
+84/431159/campos_512_v4
+84/431165/campos_512_v4
+84/431166/campos_512_v4
+84/431168/campos_512_v4
+84/431171/campos_512_v4
+84/431181/campos_512_v4
+84/431185/campos_512_v4
+84/431194/campos_512_v4
+84/431195/campos_512_v4
+84/431205/campos_512_v4
+84/431210/campos_512_v4
+84/431215/campos_512_v4
+84/431219/campos_512_v4
+84/431224/campos_512_v4
+84/431229/campos_512_v4
+84/431239/campos_512_v4
+84/431242/campos_512_v4
+84/431243/campos_512_v4
+84/431270/campos_512_v4
+84/431276/campos_512_v4
+84/431278/campos_512_v4
+84/431289/campos_512_v4
+84/431293/campos_512_v4
+84/431299/campos_512_v4
+84/431305/campos_512_v4
+84/431311/campos_512_v4
+84/431316/campos_512_v4
+84/431317/campos_512_v4
+84/431322/campos_512_v4
+84/431325/campos_512_v4
+84/431328/campos_512_v4
+84/431333/campos_512_v4
+84/431339/campos_512_v4
+84/431340/campos_512_v4
+84/431347/campos_512_v4
+84/431351/campos_512_v4
+84/431354/campos_512_v4
+84/431360/campos_512_v4
+84/431371/campos_512_v4
+84/431398/campos_512_v4
+84/431400/campos_512_v4
+84/431401/campos_512_v4
+84/431406/campos_512_v4
+84/431409/campos_512_v4
+84/431422/campos_512_v4
+84/431430/campos_512_v4
+84/431434/campos_512_v4
+84/431439/campos_512_v4
+84/431446/campos_512_v4
+84/431447/campos_512_v4
+84/431456/campos_512_v4
+84/431464/campos_512_v4
+84/431467/campos_512_v4
+84/431472/campos_512_v4
+84/431476/campos_512_v4
+84/431481/campos_512_v4
+84/431482/campos_512_v4
+84/431509/campos_512_v4
+84/431522/campos_512_v4
+84/431527/campos_512_v4
+84/431545/campos_512_v4
+84/431549/campos_512_v4
+84/431555/campos_512_v4
+84/431557/campos_512_v4
+84/431560/campos_512_v4
+84/431572/campos_512_v4
+84/431574/campos_512_v4
+84/431587/campos_512_v4
+84/431597/campos_512_v4
+84/431603/campos_512_v4
+84/431610/campos_512_v4
+84/431611/campos_512_v4
+84/431621/campos_512_v4
+84/431623/campos_512_v4
+84/431649/campos_512_v4
+84/431653/campos_512_v4
+84/431670/campos_512_v4
+84/431680/campos_512_v4
+84/431683/campos_512_v4
+84/431689/campos_512_v4
+84/431693/campos_512_v4
+84/431697/campos_512_v4
+84/431703/campos_512_v4
+84/431705/campos_512_v4
+84/431716/campos_512_v4
+84/431723/campos_512_v4
+84/431743/campos_512_v4
+84/431755/campos_512_v4
+84/431756/campos_512_v4
+84/431765/campos_512_v4
+84/431768/campos_512_v4
+84/431809/campos_512_v4
+84/431812/campos_512_v4
+84/431814/campos_512_v4
+84/431830/campos_512_v4
+84/431834/campos_512_v4
+84/431850/campos_512_v4
+84/431856/campos_512_v4
+84/431862/campos_512_v4
+84/431864/campos_512_v4
+84/431868/campos_512_v4
+84/431880/campos_512_v4
+84/431889/campos_512_v4
+84/431890/campos_512_v4
+84/431913/campos_512_v4
+84/431927/campos_512_v4
+84/431928/campos_512_v4
+84/431955/campos_512_v4
+84/431959/campos_512_v4
+84/431961/campos_512_v4
+84/431969/campos_512_v4
+84/431973/campos_512_v4
+84/431974/campos_512_v4
+84/431975/campos_512_v4
+84/431977/campos_512_v4
+84/431980/campos_512_v4
+84/431985/campos_512_v4
+84/431992/campos_512_v4
+84/432005/campos_512_v4
+84/432012/campos_512_v4
+84/432019/campos_512_v4
+84/432029/campos_512_v4
+84/432041/campos_512_v4
+84/432054/campos_512_v4
+84/432062/campos_512_v4
+84/432067/campos_512_v4
+84/432070/campos_512_v4
+84/432079/campos_512_v4
+84/432084/campos_512_v4
+84/432086/campos_512_v4
+84/432105/campos_512_v4
+84/432106/campos_512_v4
+84/432110/campos_512_v4
+84/432111/campos_512_v4
+84/432112/campos_512_v4
+84/432113/campos_512_v4
+84/432122/campos_512_v4
+84/432124/campos_512_v4
+84/432138/campos_512_v4
+84/432149/campos_512_v4
+84/432155/campos_512_v4
+84/432157/campos_512_v4
+84/432174/campos_512_v4
+84/432178/campos_512_v4
+84/432184/campos_512_v4
+84/432194/campos_512_v4
+84/432198/campos_512_v4
+84/432209/campos_512_v4
+84/432217/campos_512_v4
+84/432231/campos_512_v4
+84/432243/campos_512_v4
+84/432245/campos_512_v4
+84/432248/campos_512_v4
+84/432254/campos_512_v4
+84/432264/campos_512_v4
+84/432270/campos_512_v4
+84/432278/campos_512_v4
+84/432280/campos_512_v4
+84/432283/campos_512_v4
+84/432289/campos_512_v4
+84/432290/campos_512_v4
+84/432325/campos_512_v4
+84/432330/campos_512_v4
+84/432331/campos_512_v4
+84/432342/campos_512_v4
+84/432348/campos_512_v4
+84/432358/campos_512_v4
+84/432359/campos_512_v4
+84/432383/campos_512_v4
+84/432389/campos_512_v4
+84/432394/campos_512_v4
+84/432408/campos_512_v4
+84/432415/campos_512_v4
+84/432423/campos_512_v4
+84/432438/campos_512_v4
+84/432453/campos_512_v4
+84/432460/campos_512_v4
+84/432474/campos_512_v4
+84/432482/campos_512_v4
+84/432485/campos_512_v4
+84/432493/campos_512_v4
+84/432508/campos_512_v4
+84/432513/campos_512_v4
+84/432520/campos_512_v4
+84/432525/campos_512_v4
+84/432527/campos_512_v4
+84/432542/campos_512_v4
+84/432546/campos_512_v4
+84/432556/campos_512_v4
+84/432568/campos_512_v4
+84/432571/campos_512_v4
+84/432574/campos_512_v4
+84/432580/campos_512_v4
+84/432584/campos_512_v4
+84/432607/campos_512_v4
+84/432619/campos_512_v4
+84/432632/campos_512_v4
+84/432640/campos_512_v4
+84/432652/campos_512_v4
+84/432662/campos_512_v4
+84/432672/campos_512_v4
+84/432683/campos_512_v4
+84/432700/campos_512_v4
+84/432716/campos_512_v4
+84/432717/campos_512_v4
+84/432719/campos_512_v4
+84/432726/campos_512_v4
+84/432732/campos_512_v4
+84/432734/campos_512_v4
+84/432742/campos_512_v4
+84/432761/campos_512_v4
+84/432766/campos_512_v4
+84/432772/campos_512_v4
+84/432773/campos_512_v4
+84/432789/campos_512_v4
+84/432799/campos_512_v4
+84/432800/campos_512_v4
+84/432805/campos_512_v4
+84/432806/campos_512_v4
+84/432808/campos_512_v4
+84/432811/campos_512_v4
+84/432824/campos_512_v4
+84/432825/campos_512_v4
+84/432830/campos_512_v4
+84/432835/campos_512_v4
+84/432837/campos_512_v4
+84/432838/campos_512_v4
+84/432847/campos_512_v4
+84/432848/campos_512_v4
+84/432854/campos_512_v4
+84/432857/campos_512_v4
+84/432868/campos_512_v4
+84/432871/campos_512_v4
+84/432877/campos_512_v4
+84/432878/campos_512_v4
+84/432884/campos_512_v4
+84/432885/campos_512_v4
+84/432891/campos_512_v4
+84/432919/campos_512_v4
+84/432922/campos_512_v4
+84/432924/campos_512_v4
+84/432935/campos_512_v4
+84/432952/campos_512_v4
+84/432955/campos_512_v4
+84/432956/campos_512_v4
+84/432959/campos_512_v4
+84/432975/campos_512_v4
+84/432995/campos_512_v4
+84/432996/campos_512_v4
+84/432999/campos_512_v4
+84/433000/campos_512_v4
+84/433014/campos_512_v4
+84/433024/campos_512_v4
+84/433028/campos_512_v4
+84/433029/campos_512_v4
+84/433033/campos_512_v4
+84/433037/campos_512_v4
+84/433039/campos_512_v4
+84/433040/campos_512_v4
+84/433044/campos_512_v4
+84/433059/campos_512_v4
+84/433071/campos_512_v4
+84/433072/campos_512_v4
+84/433081/campos_512_v4
+84/433083/campos_512_v4
+84/433135/campos_512_v4
+84/433148/campos_512_v4
+84/433153/campos_512_v4
+84/433154/campos_512_v4
+84/433163/campos_512_v4
+84/433164/campos_512_v4
+84/433169/campos_512_v4
+84/433170/campos_512_v4
+84/433182/campos_512_v4
+84/433189/campos_512_v4
+84/433198/campos_512_v4
+84/433203/campos_512_v4
+84/433204/campos_512_v4
+84/433222/campos_512_v4
+84/433224/campos_512_v4
+84/433227/campos_512_v4
+84/433229/campos_512_v4
+84/433235/campos_512_v4
+84/433236/campos_512_v4
+84/433238/campos_512_v4
+84/433241/campos_512_v4
+84/433243/campos_512_v4
+84/433245/campos_512_v4
+84/433249/campos_512_v4
+84/433251/campos_512_v4
+84/433258/campos_512_v4
+84/433263/campos_512_v4
+84/433268/campos_512_v4
+84/433275/campos_512_v4
+84/433276/campos_512_v4
+84/433282/campos_512_v4
+84/433306/campos_512_v4
+84/433312/campos_512_v4
+84/433316/campos_512_v4
+84/433319/campos_512_v4
+84/433325/campos_512_v4
+84/433330/campos_512_v4
+84/433332/campos_512_v4
+84/433338/campos_512_v4
+84/433349/campos_512_v4
+84/433362/campos_512_v4
+84/433377/campos_512_v4
+84/433378/campos_512_v4
+84/433379/campos_512_v4
+84/433383/campos_512_v4
+84/433384/campos_512_v4
+84/433393/campos_512_v4
+84/433396/campos_512_v4
+84/433399/campos_512_v4
+84/433400/campos_512_v4
+84/433403/campos_512_v4
+84/433413/campos_512_v4
+84/433429/campos_512_v4
+84/433438/campos_512_v4
+84/433457/campos_512_v4
+84/433470/campos_512_v4
+84/433475/campos_512_v4
+84/433480/campos_512_v4
+84/433499/campos_512_v4
+84/433503/campos_512_v4
+84/433513/campos_512_v4
+84/433523/campos_512_v4
+84/433525/campos_512_v4
+84/433527/campos_512_v4
+84/433532/campos_512_v4
+84/433534/campos_512_v4
+84/433537/campos_512_v4
+84/433540/campos_512_v4
+84/433552/campos_512_v4
+84/433560/campos_512_v4
+84/433569/campos_512_v4
+84/433578/campos_512_v4
+84/433585/campos_512_v4
+84/433596/campos_512_v4
+84/433620/campos_512_v4
+84/433621/campos_512_v4
+84/433640/campos_512_v4
+84/433642/campos_512_v4
+84/433648/campos_512_v4
+84/433650/campos_512_v4
+84/433655/campos_512_v4
+84/433671/campos_512_v4
+84/433672/campos_512_v4
+84/433680/campos_512_v4
+84/433688/campos_512_v4
+84/433692/campos_512_v4
+84/433695/campos_512_v4
+84/433699/campos_512_v4
+84/433703/campos_512_v4
+84/433704/campos_512_v4
+84/433706/campos_512_v4
+84/433711/campos_512_v4
+84/433718/campos_512_v4
+84/433725/campos_512_v4
+84/433732/campos_512_v4
+84/433736/campos_512_v4
+84/433740/campos_512_v4
+84/433758/campos_512_v4
+84/433770/campos_512_v4
+84/433778/campos_512_v4
+84/433780/campos_512_v4
+84/433789/campos_512_v4
+84/433801/campos_512_v4
+84/433802/campos_512_v4
+84/433807/campos_512_v4
+84/433811/campos_512_v4
+84/433816/campos_512_v4
+84/433818/campos_512_v4
+84/433826/campos_512_v4
+84/433847/campos_512_v4
+84/433852/campos_512_v4
+84/433853/campos_512_v4
+84/433856/campos_512_v4
+84/433859/campos_512_v4
+84/433860/campos_512_v4
+84/433866/campos_512_v4
+84/433885/campos_512_v4
+84/433887/campos_512_v4
+84/433896/campos_512_v4
+84/433898/campos_512_v4
+84/433899/campos_512_v4
+84/433919/campos_512_v4
+84/433929/campos_512_v4
+84/433934/campos_512_v4
+84/433939/campos_512_v4
+84/433945/campos_512_v4
+84/433954/campos_512_v4
+84/433967/campos_512_v4
+84/433978/campos_512_v4
+84/433979/campos_512_v4
+84/433995/campos_512_v4
+84/434001/campos_512_v4
+84/434015/campos_512_v4
+84/434017/campos_512_v4
+84/434018/campos_512_v4
+84/434040/campos_512_v4
+84/434043/campos_512_v4
+84/434046/campos_512_v4
+84/434053/campos_512_v4
+84/434065/campos_512_v4
+84/434072/campos_512_v4
+84/434075/campos_512_v4
+84/434091/campos_512_v4
+84/434095/campos_512_v4
+84/434099/campos_512_v4
+84/434101/campos_512_v4
+84/434103/campos_512_v4
+84/434106/campos_512_v4
+84/434113/campos_512_v4
+84/434115/campos_512_v4
+84/434153/campos_512_v4
+84/434154/campos_512_v4
+84/434164/campos_512_v4
+84/434170/campos_512_v4
+84/434189/campos_512_v4
+84/434190/campos_512_v4
+84/434191/campos_512_v4
+84/434196/campos_512_v4
+84/434198/campos_512_v4
+84/434207/campos_512_v4
+84/434219/campos_512_v4
+84/434234/campos_512_v4
+84/434235/campos_512_v4
+84/434239/campos_512_v4
+84/434241/campos_512_v4
+84/434254/campos_512_v4
+84/434257/campos_512_v4
+84/434260/campos_512_v4
+84/434267/campos_512_v4
+84/434270/campos_512_v4
+84/434277/campos_512_v4
+84/434299/campos_512_v4
+84/434300/campos_512_v4
+84/434312/campos_512_v4
+84/434331/campos_512_v4
+84/434334/campos_512_v4
+84/434347/campos_512_v4
+84/434375/campos_512_v4
+84/434386/campos_512_v4
+84/434392/campos_512_v4
+84/434395/campos_512_v4
+84/434397/campos_512_v4
+84/434410/campos_512_v4
+84/434418/campos_512_v4
+84/434429/campos_512_v4
+84/434439/campos_512_v4
+84/434444/campos_512_v4
+84/434450/campos_512_v4
+84/434457/campos_512_v4
+84/434484/campos_512_v4
+84/434487/campos_512_v4
+84/434493/campos_512_v4
+84/434495/campos_512_v4
+84/434506/campos_512_v4
+84/434515/campos_512_v4
+84/434522/campos_512_v4
+84/434529/campos_512_v4
+84/434534/campos_512_v4
+84/434549/campos_512_v4
+84/434563/campos_512_v4
+84/434578/campos_512_v4
+84/434595/campos_512_v4
+84/434599/campos_512_v4
+84/434605/campos_512_v4
+84/434607/campos_512_v4
+84/434615/campos_512_v4
+84/434619/campos_512_v4
+84/434628/campos_512_v4
+84/434634/campos_512_v4
+84/434647/campos_512_v4
+84/434648/campos_512_v4
+84/434653/campos_512_v4
+84/434657/campos_512_v4
+84/434683/campos_512_v4
+84/434684/campos_512_v4
+84/434687/campos_512_v4
+84/434689/campos_512_v4
+84/434709/campos_512_v4
+84/434713/campos_512_v4
+84/434715/campos_512_v4
+84/434718/campos_512_v4
+84/434732/campos_512_v4
+84/434738/campos_512_v4
+84/434741/campos_512_v4
+84/434743/campos_512_v4
+84/434747/campos_512_v4
+84/434752/campos_512_v4
+84/434756/campos_512_v4
+84/434757/campos_512_v4
+84/434759/campos_512_v4
+84/434761/campos_512_v4
+84/434767/campos_512_v4
+84/434773/campos_512_v4
+84/434783/campos_512_v4
+84/434794/campos_512_v4
+84/434815/campos_512_v4
+84/434818/campos_512_v4
+84/434826/campos_512_v4
+84/434836/campos_512_v4
+84/434838/campos_512_v4
+84/434843/campos_512_v4
+84/434856/campos_512_v4
+84/434863/campos_512_v4
+84/434864/campos_512_v4
+84/434871/campos_512_v4
+84/434874/campos_512_v4
+84/434885/campos_512_v4
+84/434898/campos_512_v4
+84/434909/campos_512_v4
+84/434911/campos_512_v4
+84/434917/campos_512_v4
+84/434918/campos_512_v4
+84/434920/campos_512_v4
+84/434921/campos_512_v4
+84/434925/campos_512_v4
+84/434930/campos_512_v4
+84/434933/campos_512_v4
+84/434938/campos_512_v4
+84/434941/campos_512_v4
+84/434946/campos_512_v4
+84/434947/campos_512_v4
+84/434953/campos_512_v4
+84/434956/campos_512_v4
+84/434958/campos_512_v4
+84/434963/campos_512_v4
+84/434967/campos_512_v4
+84/434979/campos_512_v4
+84/434986/campos_512_v4
+84/434994/campos_512_v4
+84/434996/campos_512_v4
+85/435004/campos_512_v4
+85/435011/campos_512_v4
+85/435028/campos_512_v4
+85/435030/campos_512_v4
+85/435034/campos_512_v4
+85/435039/campos_512_v4
+85/435051/campos_512_v4
+85/435056/campos_512_v4
+85/435058/campos_512_v4
+85/435078/campos_512_v4
+85/435080/campos_512_v4
+85/435083/campos_512_v4
+85/435090/campos_512_v4
+85/435093/campos_512_v4
+85/435099/campos_512_v4
+85/435101/campos_512_v4
+85/435111/campos_512_v4
+85/435124/campos_512_v4
+85/435128/campos_512_v4
+85/435156/campos_512_v4
+85/435181/campos_512_v4
+85/435182/campos_512_v4
+85/435201/campos_512_v4
+85/435211/campos_512_v4
+85/435212/campos_512_v4
+85/435226/campos_512_v4
+85/435231/campos_512_v4
+85/435243/campos_512_v4
+85/435254/campos_512_v4
+85/435262/campos_512_v4
+85/435274/campos_512_v4
+85/435277/campos_512_v4
+85/435278/campos_512_v4
+85/435279/campos_512_v4
+85/435281/campos_512_v4
+85/435285/campos_512_v4
+85/435288/campos_512_v4
+85/435290/campos_512_v4
+85/435309/campos_512_v4
+85/435318/campos_512_v4
+85/435321/campos_512_v4
+85/435325/campos_512_v4
+85/435343/campos_512_v4
+85/435349/campos_512_v4
+85/435350/campos_512_v4
+85/435352/campos_512_v4
+85/435357/campos_512_v4
+85/435358/campos_512_v4
+85/435365/campos_512_v4
+85/435371/campos_512_v4
+85/435373/campos_512_v4
+85/435376/campos_512_v4
+85/435379/campos_512_v4
+85/435394/campos_512_v4
+85/435398/campos_512_v4
+85/435402/campos_512_v4
+85/435403/campos_512_v4
+85/435404/campos_512_v4
+85/435405/campos_512_v4
+85/435412/campos_512_v4
+85/435413/campos_512_v4
+85/435421/campos_512_v4
+85/435432/campos_512_v4
+85/435438/campos_512_v4
+85/435443/campos_512_v4
+85/435450/campos_512_v4
+85/435457/campos_512_v4
+85/435458/campos_512_v4
+85/435461/campos_512_v4
+85/435481/campos_512_v4
+85/435487/campos_512_v4
+85/435490/campos_512_v4
+85/435492/campos_512_v4
+85/435501/campos_512_v4
+85/435502/campos_512_v4
+85/435506/campos_512_v4
+85/435524/campos_512_v4
+85/435530/campos_512_v4
+85/435538/campos_512_v4
+85/435542/campos_512_v4
+85/435548/campos_512_v4
+85/435553/campos_512_v4
+85/435557/campos_512_v4
+85/435562/campos_512_v4
+85/435567/campos_512_v4
+85/435568/campos_512_v4
+85/435569/campos_512_v4
+85/435571/campos_512_v4
+85/435573/campos_512_v4
+85/435604/campos_512_v4
+85/435607/campos_512_v4
+85/435618/campos_512_v4
+85/435619/campos_512_v4
+85/435628/campos_512_v4
+85/435646/campos_512_v4
+85/435649/campos_512_v4
+85/435655/campos_512_v4
+85/435672/campos_512_v4
+85/435681/campos_512_v4
+85/435689/campos_512_v4
+85/435692/campos_512_v4
+85/435693/campos_512_v4
+85/435695/campos_512_v4
+85/435700/campos_512_v4
+85/435706/campos_512_v4
+85/435712/campos_512_v4
+85/435726/campos_512_v4
+85/435728/campos_512_v4
+85/435731/campos_512_v4
+85/435741/campos_512_v4
+85/435751/campos_512_v4
+85/435770/campos_512_v4
+85/435772/campos_512_v4
+85/435773/campos_512_v4
+85/435776/campos_512_v4
+85/435783/campos_512_v4
+85/435790/campos_512_v4
+85/435794/campos_512_v4
+85/435795/campos_512_v4
+85/435798/campos_512_v4
+85/435809/campos_512_v4
+85/435828/campos_512_v4
+85/435830/campos_512_v4
+85/435833/campos_512_v4
+85/435843/campos_512_v4
+85/435853/campos_512_v4
+85/435868/campos_512_v4
+85/435873/campos_512_v4
+85/435879/campos_512_v4
+85/435888/campos_512_v4
+85/435893/campos_512_v4
+85/435896/campos_512_v4
+85/435902/campos_512_v4
+85/435908/campos_512_v4
+85/435910/campos_512_v4
+85/435916/campos_512_v4
+85/435931/campos_512_v4
+85/435933/campos_512_v4
+85/435934/campos_512_v4
+85/435936/campos_512_v4
+85/435938/campos_512_v4
+85/435943/campos_512_v4
+85/435956/campos_512_v4
+85/435966/campos_512_v4
+85/435973/campos_512_v4
+85/435975/campos_512_v4
+85/435978/campos_512_v4
+85/435989/campos_512_v4
+85/435993/campos_512_v4
+85/435998/campos_512_v4
+85/436012/campos_512_v4
+85/436014/campos_512_v4
+85/436015/campos_512_v4
+85/436018/campos_512_v4
+85/436019/campos_512_v4
+85/436026/campos_512_v4
+85/436036/campos_512_v4
+85/436037/campos_512_v4
+85/436041/campos_512_v4
+85/436045/campos_512_v4
+85/436050/campos_512_v4
+85/436054/campos_512_v4
+85/436057/campos_512_v4
+85/436058/campos_512_v4
+85/436059/campos_512_v4
+85/436061/campos_512_v4
+85/436062/campos_512_v4
+85/436069/campos_512_v4
+85/436077/campos_512_v4
+85/436106/campos_512_v4
+85/436121/campos_512_v4
+85/436123/campos_512_v4
+85/436127/campos_512_v4
+85/436139/campos_512_v4
+85/436148/campos_512_v4
+85/436151/campos_512_v4
+85/436159/campos_512_v4
+85/436174/campos_512_v4
+85/436178/campos_512_v4
+85/436198/campos_512_v4
+85/436206/campos_512_v4
+85/436213/campos_512_v4
+85/436216/campos_512_v4
+85/436222/campos_512_v4
+85/436230/campos_512_v4
+85/436236/campos_512_v4
+85/436241/campos_512_v4
+85/436248/campos_512_v4
+85/436252/campos_512_v4
+85/436257/campos_512_v4
+85/436265/campos_512_v4
+85/436268/campos_512_v4
+85/436269/campos_512_v4
+85/436271/campos_512_v4
+85/436276/campos_512_v4
+85/436285/campos_512_v4
+85/436292/campos_512_v4
+85/436315/campos_512_v4
+85/436316/campos_512_v4
+85/436324/campos_512_v4
+85/436325/campos_512_v4
+85/436330/campos_512_v4
+85/436360/campos_512_v4
+85/436369/campos_512_v4
+85/436372/campos_512_v4
+85/436389/campos_512_v4
+85/436394/campos_512_v4
+85/436401/campos_512_v4
+85/436402/campos_512_v4
+85/436404/campos_512_v4
+85/436405/campos_512_v4
+85/436409/campos_512_v4
+85/436431/campos_512_v4
+85/436442/campos_512_v4
+85/436445/campos_512_v4
+85/436456/campos_512_v4
+85/436461/campos_512_v4
+85/436463/campos_512_v4
+85/436477/campos_512_v4
+85/436481/campos_512_v4
+85/436483/campos_512_v4
+85/436487/campos_512_v4
+85/436497/campos_512_v4
+85/436500/campos_512_v4
+85/436502/campos_512_v4
+85/436508/campos_512_v4
+85/436510/campos_512_v4
+85/436512/campos_512_v4
+85/436513/campos_512_v4
+85/436514/campos_512_v4
+85/436525/campos_512_v4
+85/436531/campos_512_v4
+85/436534/campos_512_v4
+85/436545/campos_512_v4
+85/436552/campos_512_v4
+85/436556/campos_512_v4
+85/436575/campos_512_v4
+85/436581/campos_512_v4
+85/436588/campos_512_v4
+85/436601/campos_512_v4
+85/436606/campos_512_v4
+85/436609/campos_512_v4
+85/436610/campos_512_v4
+85/436611/campos_512_v4
+85/436631/campos_512_v4
+85/436640/campos_512_v4
+85/436648/campos_512_v4
+85/436649/campos_512_v4
+85/436651/campos_512_v4
+85/436655/campos_512_v4
+85/436679/campos_512_v4
+85/436685/campos_512_v4
+85/436692/campos_512_v4
+85/436699/campos_512_v4
+85/436703/campos_512_v4
+85/436704/campos_512_v4
+85/436715/campos_512_v4
+85/436729/campos_512_v4
+85/436730/campos_512_v4
+85/436732/campos_512_v4
+85/436733/campos_512_v4
+85/436743/campos_512_v4
+85/436756/campos_512_v4
+85/436770/campos_512_v4
+85/436774/campos_512_v4
+85/436775/campos_512_v4
+85/436777/campos_512_v4
+85/436781/campos_512_v4
+85/436796/campos_512_v4
+85/436800/campos_512_v4
+85/436807/campos_512_v4
+85/436816/campos_512_v4
+85/436822/campos_512_v4
+85/436828/campos_512_v4
+85/436830/campos_512_v4
+85/436835/campos_512_v4
+85/436842/campos_512_v4
+85/436843/campos_512_v4
+85/436845/campos_512_v4
+85/436852/campos_512_v4
+85/436863/campos_512_v4
+85/436875/campos_512_v4
+85/436881/campos_512_v4
+85/436882/campos_512_v4
+85/436904/campos_512_v4
+85/436905/campos_512_v4
+85/436908/campos_512_v4
+85/436910/campos_512_v4
+85/436918/campos_512_v4
+85/436920/campos_512_v4
+85/436929/campos_512_v4
+85/436930/campos_512_v4
+85/436937/campos_512_v4
+85/436949/campos_512_v4
+85/436957/campos_512_v4
+85/436971/campos_512_v4
+85/436980/campos_512_v4
+85/436982/campos_512_v4
+85/436983/campos_512_v4
+85/436985/campos_512_v4
+85/436992/campos_512_v4
+85/437007/campos_512_v4
+85/437012/campos_512_v4
+85/437015/campos_512_v4
+85/437021/campos_512_v4
+85/437024/campos_512_v4
+85/437030/campos_512_v4
+85/437031/campos_512_v4
+85/437034/campos_512_v4
+85/437048/campos_512_v4
+85/437056/campos_512_v4
+85/437061/campos_512_v4
+85/437068/campos_512_v4
+85/437074/campos_512_v4
+85/437078/campos_512_v4
+85/437079/campos_512_v4
+85/437085/campos_512_v4
+85/437095/campos_512_v4
+85/437097/campos_512_v4
+85/437102/campos_512_v4
+85/437104/campos_512_v4
+85/437117/campos_512_v4
+85/437118/campos_512_v4
+85/437123/campos_512_v4
+85/437128/campos_512_v4
+85/437129/campos_512_v4
+85/437145/campos_512_v4
+85/437150/campos_512_v4
+85/437155/campos_512_v4
+85/437162/campos_512_v4
+85/437164/campos_512_v4
+85/437173/campos_512_v4
+85/437175/campos_512_v4
+85/437183/campos_512_v4
+85/437202/campos_512_v4
+85/437227/campos_512_v4
+85/437229/campos_512_v4
+85/437234/campos_512_v4
+85/437240/campos_512_v4
+85/437248/campos_512_v4
+85/437257/campos_512_v4
+85/437275/campos_512_v4
+85/437293/campos_512_v4
+85/437318/campos_512_v4
+85/437320/campos_512_v4
+85/437326/campos_512_v4
+85/437335/campos_512_v4
+85/437339/campos_512_v4
+85/437342/campos_512_v4
+85/437346/campos_512_v4
+85/437351/campos_512_v4
+85/437358/campos_512_v4
+85/437377/campos_512_v4
+85/437378/campos_512_v4
+85/437382/campos_512_v4
+85/437385/campos_512_v4
+85/437387/campos_512_v4
+85/437390/campos_512_v4
+85/437392/campos_512_v4
+85/437397/campos_512_v4
+85/437426/campos_512_v4
+85/437428/campos_512_v4
+85/437440/campos_512_v4
+85/437450/campos_512_v4
+85/437454/campos_512_v4
+85/437458/campos_512_v4
+85/437489/campos_512_v4
+85/437491/campos_512_v4
+85/437500/campos_512_v4
+85/437509/campos_512_v4
+85/437512/campos_512_v4
+85/437515/campos_512_v4
+85/437521/campos_512_v4
+85/437527/campos_512_v4
+85/437529/campos_512_v4
+85/437534/campos_512_v4
+85/437538/campos_512_v4
+85/437545/campos_512_v4
+85/437556/campos_512_v4
+85/437571/campos_512_v4
+85/437579/campos_512_v4
+85/437595/campos_512_v4
+85/437598/campos_512_v4
+85/437608/campos_512_v4
+85/437619/campos_512_v4
+85/437621/campos_512_v4
+85/437625/campos_512_v4
+85/437629/campos_512_v4
+85/437642/campos_512_v4
+85/437643/campos_512_v4
+85/437650/campos_512_v4
+85/437669/campos_512_v4
+85/437671/campos_512_v4
+85/437672/campos_512_v4
+85/437682/campos_512_v4
+85/437683/campos_512_v4
+85/437696/campos_512_v4
+85/437699/campos_512_v4
+85/437701/campos_512_v4
+85/437702/campos_512_v4
+85/437713/campos_512_v4
+85/437715/campos_512_v4
+85/437719/campos_512_v4
+85/437732/campos_512_v4
+85/437736/campos_512_v4
+85/437761/campos_512_v4
+85/437770/campos_512_v4
+85/437776/campos_512_v4
+85/437784/campos_512_v4
+85/437786/campos_512_v4
+85/437790/campos_512_v4
+85/437808/campos_512_v4
+85/437809/campos_512_v4
+85/437817/campos_512_v4
+85/437820/campos_512_v4
+85/437835/campos_512_v4
+85/437844/campos_512_v4
+85/437860/campos_512_v4
+85/437863/campos_512_v4
+85/437879/campos_512_v4
+85/437883/campos_512_v4
+85/437898/campos_512_v4
+85/437916/campos_512_v4
+85/437917/campos_512_v4
+85/437924/campos_512_v4
+85/437926/campos_512_v4
+85/437939/campos_512_v4
+85/437958/campos_512_v4
+85/437972/campos_512_v4
+85/437981/campos_512_v4
+85/437982/campos_512_v4
+85/437993/campos_512_v4
+85/437998/campos_512_v4
+85/438000/campos_512_v4
+85/438005/campos_512_v4
+85/438007/campos_512_v4
+85/438018/campos_512_v4
+85/438026/campos_512_v4
+85/438031/campos_512_v4
+85/438056/campos_512_v4
+85/438059/campos_512_v4
+85/438064/campos_512_v4
+85/438065/campos_512_v4
+85/438071/campos_512_v4
+85/438073/campos_512_v4
+85/438089/campos_512_v4
+85/438129/campos_512_v4
+85/438137/campos_512_v4
+85/438146/campos_512_v4
+85/438148/campos_512_v4
+85/438164/campos_512_v4
+85/438168/campos_512_v4
+85/438169/campos_512_v4
+85/438170/campos_512_v4
+85/438197/campos_512_v4
+85/438198/campos_512_v4
+85/438232/campos_512_v4
+85/438252/campos_512_v4
+85/438261/campos_512_v4
+85/438269/campos_512_v4
+85/438278/campos_512_v4
+85/438294/campos_512_v4
+85/438299/campos_512_v4
+85/438309/campos_512_v4
+85/438316/campos_512_v4
+85/438324/campos_512_v4
+85/438335/campos_512_v4
+85/438338/campos_512_v4
+85/438348/campos_512_v4
+85/438349/campos_512_v4
+85/438369/campos_512_v4
+85/438388/campos_512_v4
+85/438389/campos_512_v4
+85/438394/campos_512_v4
+85/438396/campos_512_v4
+85/438416/campos_512_v4
+85/438419/campos_512_v4
+85/438423/campos_512_v4
+85/438435/campos_512_v4
+85/438438/campos_512_v4
+85/438458/campos_512_v4
+85/438468/campos_512_v4
+85/438473/campos_512_v4
+85/438483/campos_512_v4
+85/438487/campos_512_v4
+85/438491/campos_512_v4
+85/438493/campos_512_v4
+85/438500/campos_512_v4
+85/438502/campos_512_v4
+85/438503/campos_512_v4
+85/438510/campos_512_v4
+85/438523/campos_512_v4
+85/438527/campos_512_v4
+85/438535/campos_512_v4
+85/438551/campos_512_v4
+85/438560/campos_512_v4
+85/438565/campos_512_v4
+85/438578/campos_512_v4
+85/438586/campos_512_v4
+85/438593/campos_512_v4
+85/438622/campos_512_v4
+85/438630/campos_512_v4
+85/438635/campos_512_v4
+85/438637/campos_512_v4
+85/438642/campos_512_v4
+85/438645/campos_512_v4
+85/438650/campos_512_v4
+85/438659/campos_512_v4
+85/438669/campos_512_v4
+85/438676/campos_512_v4
+85/438684/campos_512_v4
+85/438715/campos_512_v4
+85/438717/campos_512_v4
+85/438727/campos_512_v4
+85/438728/campos_512_v4
+85/438742/campos_512_v4
+85/438756/campos_512_v4
+85/438787/campos_512_v4
+85/438792/campos_512_v4
+85/438795/campos_512_v4
+85/438797/campos_512_v4
+85/438800/campos_512_v4
+85/438803/campos_512_v4
+85/438810/campos_512_v4
+85/438812/campos_512_v4
+85/438823/campos_512_v4
+85/438824/campos_512_v4
+85/438827/campos_512_v4
+85/438828/campos_512_v4
+85/438829/campos_512_v4
+85/438844/campos_512_v4
+85/438850/campos_512_v4
+85/438873/campos_512_v4
+85/438889/campos_512_v4
+85/438893/campos_512_v4
+85/438895/campos_512_v4
+85/438898/campos_512_v4
+85/438902/campos_512_v4
+85/438904/campos_512_v4
+85/438905/campos_512_v4
+85/438906/campos_512_v4
+85/438907/campos_512_v4
+85/438911/campos_512_v4
+85/438921/campos_512_v4
+85/438930/campos_512_v4
+85/438933/campos_512_v4
+85/438936/campos_512_v4
+85/438939/campos_512_v4
+85/438953/campos_512_v4
+85/438964/campos_512_v4
+85/438966/campos_512_v4
+85/438970/campos_512_v4
+85/438978/campos_512_v4
+85/438979/campos_512_v4
+85/438982/campos_512_v4
+85/438999/campos_512_v4
+85/439001/campos_512_v4
+85/439007/campos_512_v4
+85/439008/campos_512_v4
+85/439013/campos_512_v4
+85/439014/campos_512_v4
+85/439020/campos_512_v4
+85/439024/campos_512_v4
+85/439031/campos_512_v4
+85/439045/campos_512_v4
+85/439046/campos_512_v4
+85/439051/campos_512_v4
+85/439052/campos_512_v4
+85/439063/campos_512_v4
+85/439067/campos_512_v4
+85/439076/campos_512_v4
+85/439083/campos_512_v4
+85/439098/campos_512_v4
+85/439108/campos_512_v4
+85/439114/campos_512_v4
+85/439125/campos_512_v4
+85/439128/campos_512_v4
+85/439143/campos_512_v4
+85/439155/campos_512_v4
+85/439159/campos_512_v4
+85/439171/campos_512_v4
+85/439183/campos_512_v4
+85/439187/campos_512_v4
+85/439204/campos_512_v4
+85/439220/campos_512_v4
+85/439222/campos_512_v4
+85/439228/campos_512_v4
+85/439230/campos_512_v4
+85/439231/campos_512_v4
+85/439236/campos_512_v4
+85/439240/campos_512_v4
+85/439249/campos_512_v4
+85/439251/campos_512_v4
+85/439257/campos_512_v4
+85/439276/campos_512_v4
+85/439279/campos_512_v4
+85/439290/campos_512_v4
+85/439299/campos_512_v4
+85/439308/campos_512_v4
+85/439309/campos_512_v4
+85/439321/campos_512_v4
+85/439323/campos_512_v4
+85/439332/campos_512_v4
+85/439336/campos_512_v4
+85/439348/campos_512_v4
+85/439349/campos_512_v4
+85/439362/campos_512_v4
+85/439364/campos_512_v4
+85/439375/campos_512_v4
+85/439379/campos_512_v4
+85/439380/campos_512_v4
+85/439381/campos_512_v4
+85/439383/campos_512_v4
+85/439389/campos_512_v4
+85/439394/campos_512_v4
+85/439455/campos_512_v4
+85/439460/campos_512_v4
+85/439462/campos_512_v4
+85/439468/campos_512_v4
+85/439473/campos_512_v4
+85/439474/campos_512_v4
+85/439479/campos_512_v4
+85/439483/campos_512_v4
+85/439500/campos_512_v4
+85/439503/campos_512_v4
+85/439533/campos_512_v4
+85/439549/campos_512_v4
+85/439551/campos_512_v4
+85/439566/campos_512_v4
+85/439568/campos_512_v4
+85/439573/campos_512_v4
+85/439593/campos_512_v4
+85/439596/campos_512_v4
+85/439625/campos_512_v4
+85/439635/campos_512_v4
+85/439636/campos_512_v4
+85/439638/campos_512_v4
+85/439646/campos_512_v4
+85/439654/campos_512_v4
+85/439660/campos_512_v4
+85/439661/campos_512_v4
+85/439687/campos_512_v4
+85/439689/campos_512_v4
+85/439698/campos_512_v4
+85/439699/campos_512_v4
+85/439710/campos_512_v4
+85/439717/campos_512_v4
+85/439718/campos_512_v4
+85/439721/campos_512_v4
+85/439724/campos_512_v4
+85/439727/campos_512_v4
+85/439730/campos_512_v4
+85/439733/campos_512_v4
+85/439743/campos_512_v4
+85/439747/campos_512_v4
+85/439759/campos_512_v4
+85/439760/campos_512_v4
+85/439769/campos_512_v4
+85/439771/campos_512_v4
+85/439806/campos_512_v4
+85/439816/campos_512_v4
+85/439817/campos_512_v4
+85/439825/campos_512_v4
+85/439828/campos_512_v4
+85/439829/campos_512_v4
+85/439837/campos_512_v4
+85/439840/campos_512_v4
+85/439850/campos_512_v4
+85/439852/campos_512_v4
+85/439861/campos_512_v4
+85/439870/campos_512_v4
+85/439873/campos_512_v4
+85/439881/campos_512_v4
+85/439882/campos_512_v4
+85/439887/campos_512_v4
+85/439904/campos_512_v4
+85/439906/campos_512_v4
+85/439908/campos_512_v4
+85/439909/campos_512_v4
+85/439918/campos_512_v4
+85/439934/campos_512_v4
+85/439947/campos_512_v4
+85/439950/campos_512_v4
+85/439953/campos_512_v4
+85/439956/campos_512_v4
+85/439957/campos_512_v4
+85/439960/campos_512_v4
+85/439981/campos_512_v4
+85/439984/campos_512_v4
+85/439999/campos_512_v4
+86/440007/campos_512_v4
+86/440016/campos_512_v4
+86/440024/campos_512_v4
+86/440037/campos_512_v4
+86/440044/campos_512_v4
+86/440046/campos_512_v4
+86/440057/campos_512_v4
+86/440061/campos_512_v4
+86/440064/campos_512_v4
+86/440065/campos_512_v4
+86/440066/campos_512_v4
+86/440069/campos_512_v4
+86/440070/campos_512_v4
+86/440082/campos_512_v4
+86/440095/campos_512_v4
+86/440106/campos_512_v4
+86/440113/campos_512_v4
+86/440122/campos_512_v4
+86/440125/campos_512_v4
+86/440131/campos_512_v4
+86/440139/campos_512_v4
+86/440167/campos_512_v4
+86/440170/campos_512_v4
+86/440180/campos_512_v4
+86/440181/campos_512_v4
+86/440183/campos_512_v4
+86/440184/campos_512_v4
+86/440186/campos_512_v4
+86/440187/campos_512_v4
+86/440191/campos_512_v4
+86/440206/campos_512_v4
+86/440214/campos_512_v4
+86/440215/campos_512_v4
+86/440217/campos_512_v4
+86/440223/campos_512_v4
+86/440224/campos_512_v4
+86/440227/campos_512_v4
+86/440233/campos_512_v4
+86/440234/campos_512_v4
+86/440240/campos_512_v4
+86/440249/campos_512_v4
+86/440251/campos_512_v4
+86/440253/campos_512_v4
+86/440254/campos_512_v4
+86/440258/campos_512_v4
+86/440271/campos_512_v4
+86/440272/campos_512_v4
+86/440282/campos_512_v4
+86/440286/campos_512_v4
+86/440298/campos_512_v4
+86/440301/campos_512_v4
+86/440305/campos_512_v4
+86/440307/campos_512_v4
+86/440314/campos_512_v4
+86/440325/campos_512_v4
+86/440326/campos_512_v4
+86/440334/campos_512_v4
+86/440353/campos_512_v4
+86/440365/campos_512_v4
+86/440379/campos_512_v4
+86/440384/campos_512_v4
+86/440404/campos_512_v4
+86/440413/campos_512_v4
+86/440429/campos_512_v4
+86/440439/campos_512_v4
+86/440443/campos_512_v4
+86/440444/campos_512_v4
+86/440448/campos_512_v4
+86/440454/campos_512_v4
+86/440458/campos_512_v4
+86/440469/campos_512_v4
+86/440470/campos_512_v4
+86/440479/campos_512_v4
+86/440480/campos_512_v4
+86/440494/campos_512_v4
+86/440496/campos_512_v4
+86/440504/campos_512_v4
+86/440506/campos_512_v4
+86/440513/campos_512_v4
+86/440516/campos_512_v4
+86/440538/campos_512_v4
+86/440539/campos_512_v4
+86/440557/campos_512_v4
+86/440567/campos_512_v4
+86/440570/campos_512_v4
+86/440575/campos_512_v4
+86/440591/campos_512_v4
+86/440592/campos_512_v4
+86/440598/campos_512_v4
+86/440610/campos_512_v4
+86/440627/campos_512_v4
+86/440641/campos_512_v4
+86/440645/campos_512_v4
+86/440655/campos_512_v4
+86/440657/campos_512_v4
+86/440659/campos_512_v4
+86/440671/campos_512_v4
+86/440672/campos_512_v4
+86/440674/campos_512_v4
+86/440684/campos_512_v4
+86/440705/campos_512_v4
+86/440723/campos_512_v4
+86/440725/campos_512_v4
+86/440730/campos_512_v4
+86/440735/campos_512_v4
+86/440739/campos_512_v4
+86/440745/campos_512_v4
+86/440746/campos_512_v4
+86/440755/campos_512_v4
+86/440759/campos_512_v4
+86/440770/campos_512_v4
+86/440780/campos_512_v4
+86/440786/campos_512_v4
+86/440794/campos_512_v4
+86/440797/campos_512_v4
+86/440801/campos_512_v4
+86/440804/campos_512_v4
+86/440806/campos_512_v4
+86/440815/campos_512_v4
+86/440816/campos_512_v4
+86/440819/campos_512_v4
+86/440842/campos_512_v4
+86/440845/campos_512_v4
+86/440853/campos_512_v4
+86/440855/campos_512_v4
+86/440864/campos_512_v4
+86/440882/campos_512_v4
+86/440891/campos_512_v4
+86/440900/campos_512_v4
+86/440908/campos_512_v4
+86/440929/campos_512_v4
+86/440931/campos_512_v4
+86/440938/campos_512_v4
+86/440946/campos_512_v4
+86/440970/campos_512_v4
+86/440979/campos_512_v4
+86/440990/campos_512_v4
+86/440994/campos_512_v4
+86/441012/campos_512_v4
+86/441020/campos_512_v4
+86/441045/campos_512_v4
+86/441049/campos_512_v4
+86/441061/campos_512_v4
+86/441062/campos_512_v4
+86/441081/campos_512_v4
+86/441086/campos_512_v4
+86/441088/campos_512_v4
+86/441089/campos_512_v4
+86/441091/campos_512_v4
+86/441092/campos_512_v4
+86/441099/campos_512_v4
+86/441100/campos_512_v4
+86/441106/campos_512_v4
+86/441110/campos_512_v4
+86/441111/campos_512_v4
+86/441112/campos_512_v4
+86/441119/campos_512_v4
+86/441125/campos_512_v4
+86/441131/campos_512_v4
+86/441156/campos_512_v4
+86/441159/campos_512_v4
+86/441166/campos_512_v4
+86/441170/campos_512_v4
+86/441179/campos_512_v4
+86/441181/campos_512_v4
+86/441185/campos_512_v4
+86/441186/campos_512_v4
+86/441191/campos_512_v4
+86/441196/campos_512_v4
+86/441207/campos_512_v4
+86/441208/campos_512_v4
+86/441210/campos_512_v4
+86/441216/campos_512_v4
+86/441217/campos_512_v4
+86/441222/campos_512_v4
+86/441226/campos_512_v4
+86/441228/campos_512_v4
+86/441255/campos_512_v4
+86/441266/campos_512_v4
+86/441273/campos_512_v4
+86/441282/campos_512_v4
+86/441284/campos_512_v4
+86/441285/campos_512_v4
+86/441288/campos_512_v4
+86/441289/campos_512_v4
+86/441294/campos_512_v4
+86/441298/campos_512_v4
+86/441301/campos_512_v4
+86/441302/campos_512_v4
+86/441303/campos_512_v4
+86/441320/campos_512_v4
+86/441322/campos_512_v4
+86/441336/campos_512_v4
+86/441353/campos_512_v4
+86/441367/campos_512_v4
+86/441369/campos_512_v4
+86/441388/campos_512_v4
+86/441398/campos_512_v4
+86/441401/campos_512_v4
+86/441409/campos_512_v4
+86/441418/campos_512_v4
+86/441421/campos_512_v4
+86/441426/campos_512_v4
+86/441435/campos_512_v4
+86/441440/campos_512_v4
+86/441441/campos_512_v4
+86/441459/campos_512_v4
+86/441462/campos_512_v4
+86/441485/campos_512_v4
+86/441488/campos_512_v4
+86/441505/campos_512_v4
+86/441516/campos_512_v4
+86/441519/campos_512_v4
+86/441522/campos_512_v4
+86/441532/campos_512_v4
+86/441536/campos_512_v4
+86/441544/campos_512_v4
+86/441561/campos_512_v4
+86/441563/campos_512_v4
+86/441570/campos_512_v4
+86/441571/campos_512_v4
+86/441577/campos_512_v4
+86/441579/campos_512_v4
+86/441583/campos_512_v4
+86/441607/campos_512_v4
+86/441613/campos_512_v4
+86/441617/campos_512_v4
+86/441619/campos_512_v4
+86/441623/campos_512_v4
+86/441626/campos_512_v4
+86/441630/campos_512_v4
+86/441634/campos_512_v4
+86/441637/campos_512_v4
+86/441640/campos_512_v4
+86/441666/campos_512_v4
+86/441667/campos_512_v4
+86/441683/campos_512_v4
+86/441687/campos_512_v4
+86/441689/campos_512_v4
+86/441691/campos_512_v4
+86/441692/campos_512_v4
+86/441695/campos_512_v4
+86/441697/campos_512_v4
+86/441706/campos_512_v4
+86/441713/campos_512_v4
+86/441723/campos_512_v4
+86/441728/campos_512_v4
+86/441731/campos_512_v4
+86/441737/campos_512_v4
+86/441746/campos_512_v4
+86/441756/campos_512_v4
+86/441765/campos_512_v4
+86/441770/campos_512_v4
+86/441773/campos_512_v4
+86/441791/campos_512_v4
+86/441794/campos_512_v4
+86/441801/campos_512_v4
+86/441808/campos_512_v4
+86/441809/campos_512_v4
+86/441834/campos_512_v4
+86/441842/campos_512_v4
+86/441845/campos_512_v4
+86/441848/campos_512_v4
+86/441853/campos_512_v4
+86/441862/campos_512_v4
+86/441871/campos_512_v4
+86/441877/campos_512_v4
+86/441881/campos_512_v4
+86/441891/campos_512_v4
+86/441896/campos_512_v4
+86/441897/campos_512_v4
+86/441902/campos_512_v4
+86/441929/campos_512_v4
+86/441933/campos_512_v4
+86/441950/campos_512_v4
+86/441963/campos_512_v4
+86/441973/campos_512_v4
+86/441987/campos_512_v4
+86/441990/campos_512_v4
+86/441998/campos_512_v4
+86/442015/campos_512_v4
+86/442018/campos_512_v4
+86/442022/campos_512_v4
+86/442028/campos_512_v4
+86/442035/campos_512_v4
+86/442040/campos_512_v4
+86/442042/campos_512_v4
+86/442053/campos_512_v4
+86/442055/campos_512_v4
+86/442068/campos_512_v4
+86/442082/campos_512_v4
+86/442088/campos_512_v4
+86/442095/campos_512_v4
+86/442104/campos_512_v4
+86/442105/campos_512_v4
+86/442112/campos_512_v4
+86/442121/campos_512_v4
+86/442123/campos_512_v4
+86/442132/campos_512_v4
+86/442134/campos_512_v4
+86/442142/campos_512_v4
+86/442152/campos_512_v4
+86/442154/campos_512_v4
+86/442162/campos_512_v4
+86/442171/campos_512_v4
+86/442182/campos_512_v4
+86/442186/campos_512_v4
+86/442194/campos_512_v4
+86/442197/campos_512_v4
+86/442202/campos_512_v4
+86/442221/campos_512_v4
+86/442232/campos_512_v4
+86/442241/campos_512_v4
+86/442254/campos_512_v4
+86/442255/campos_512_v4
+86/442279/campos_512_v4
+86/442288/campos_512_v4
+86/442290/campos_512_v4
+86/442291/campos_512_v4
+86/442292/campos_512_v4
+86/442294/campos_512_v4
+86/442299/campos_512_v4
+86/442300/campos_512_v4
+86/442306/campos_512_v4
+86/442313/campos_512_v4
+86/442321/campos_512_v4
+86/442323/campos_512_v4
+86/442358/campos_512_v4
+86/442360/campos_512_v4
+86/442361/campos_512_v4
+86/442362/campos_512_v4
+86/442365/campos_512_v4
+86/442369/campos_512_v4
+86/442378/campos_512_v4
+86/442384/campos_512_v4
+86/442387/campos_512_v4
+86/442393/campos_512_v4
+86/442415/campos_512_v4
+86/442423/campos_512_v4
+86/442426/campos_512_v4
+86/442430/campos_512_v4
+86/442452/campos_512_v4
+86/442470/campos_512_v4
+86/442474/campos_512_v4
+86/442475/campos_512_v4
+86/442482/campos_512_v4
+86/442486/campos_512_v4
+86/442487/campos_512_v4
+86/442499/campos_512_v4
+86/442514/campos_512_v4
+86/442520/campos_512_v4
+86/442527/campos_512_v4
+86/442537/campos_512_v4
+86/442538/campos_512_v4
+86/442540/campos_512_v4
+86/442541/campos_512_v4
+86/442544/campos_512_v4
+86/442558/campos_512_v4
+86/442564/campos_512_v4
+86/442568/campos_512_v4
+86/442575/campos_512_v4
+86/442580/campos_512_v4
+86/442586/campos_512_v4
+86/442591/campos_512_v4
+86/442594/campos_512_v4
+86/442596/campos_512_v4
+86/442598/campos_512_v4
+86/442601/campos_512_v4
+86/442605/campos_512_v4
+86/442615/campos_512_v4
+86/442620/campos_512_v4
+86/442621/campos_512_v4
+86/442635/campos_512_v4
+86/442636/campos_512_v4
+86/442649/campos_512_v4
+86/442663/campos_512_v4
+86/442665/campos_512_v4
+86/442670/campos_512_v4
+86/442682/campos_512_v4
+86/442688/campos_512_v4
+86/442696/campos_512_v4
+86/442703/campos_512_v4
+86/442706/campos_512_v4
+86/442710/campos_512_v4
+86/442723/campos_512_v4
+86/442760/campos_512_v4
+86/442763/campos_512_v4
+86/442765/campos_512_v4
+86/442770/campos_512_v4
+86/442771/campos_512_v4
+86/442777/campos_512_v4
+86/442781/campos_512_v4
+86/442791/campos_512_v4
+86/442817/campos_512_v4
+86/442819/campos_512_v4
+86/442828/campos_512_v4
+86/442834/campos_512_v4
+86/442851/campos_512_v4
+86/442856/campos_512_v4
+86/442864/campos_512_v4
+86/442880/campos_512_v4
+86/442886/campos_512_v4
+86/442893/campos_512_v4
+86/442897/campos_512_v4
+86/442899/campos_512_v4
+86/442903/campos_512_v4
+86/442906/campos_512_v4
+86/442912/campos_512_v4
+86/442915/campos_512_v4
+86/442917/campos_512_v4
+86/442918/campos_512_v4
+86/442921/campos_512_v4
+86/442941/campos_512_v4
+86/442957/campos_512_v4
+86/442971/campos_512_v4
+86/442992/campos_512_v4
+86/442996/campos_512_v4
+86/442997/campos_512_v4
+86/443001/campos_512_v4
+86/443004/campos_512_v4
+86/443012/campos_512_v4
+86/443019/campos_512_v4
+86/443023/campos_512_v4
+86/443027/campos_512_v4
+86/443028/campos_512_v4
+86/443030/campos_512_v4
+86/443031/campos_512_v4
+86/443042/campos_512_v4
+86/443055/campos_512_v4
+86/443057/campos_512_v4
+86/443064/campos_512_v4
+86/443066/campos_512_v4
+86/443067/campos_512_v4
+86/443081/campos_512_v4
+86/443082/campos_512_v4
+86/443083/campos_512_v4
+86/443093/campos_512_v4
+86/443113/campos_512_v4
+86/443116/campos_512_v4
+86/443121/campos_512_v4
+86/443125/campos_512_v4
+86/443132/campos_512_v4
+86/443135/campos_512_v4
+86/443136/campos_512_v4
+86/443144/campos_512_v4
+86/443168/campos_512_v4
+86/443175/campos_512_v4
+86/443176/campos_512_v4
+86/443205/campos_512_v4
+86/443221/campos_512_v4
+86/443225/campos_512_v4
+86/443229/campos_512_v4
+86/443235/campos_512_v4
+86/443240/campos_512_v4
+86/443243/campos_512_v4
+86/443257/campos_512_v4
+86/443258/campos_512_v4
+86/443261/campos_512_v4
+86/443279/campos_512_v4
+86/443291/campos_512_v4
+86/443293/campos_512_v4
+86/443295/campos_512_v4
+86/443301/campos_512_v4
+86/443303/campos_512_v4
+86/443324/campos_512_v4
+86/443329/campos_512_v4
+86/443350/campos_512_v4
+86/443353/campos_512_v4
+86/443361/campos_512_v4
+86/443362/campos_512_v4
+86/443366/campos_512_v4
+86/443369/campos_512_v4
+86/443370/campos_512_v4
+86/443371/campos_512_v4
+86/443373/campos_512_v4
+86/443375/campos_512_v4
+86/443388/campos_512_v4
+86/443394/campos_512_v4
+86/443398/campos_512_v4
+86/443399/campos_512_v4
+86/443408/campos_512_v4
+86/443416/campos_512_v4
+86/443417/campos_512_v4
+86/443420/campos_512_v4
+86/443425/campos_512_v4
+86/443426/campos_512_v4
+86/443428/campos_512_v4
+86/443431/campos_512_v4
+86/443432/campos_512_v4
+86/443460/campos_512_v4
+86/443469/campos_512_v4
+86/443472/campos_512_v4
+86/443474/campos_512_v4
+86/443477/campos_512_v4
+86/443485/campos_512_v4
+86/443494/campos_512_v4
+86/443497/campos_512_v4
+86/443504/campos_512_v4
+86/443509/campos_512_v4
+86/443512/campos_512_v4
+86/443517/campos_512_v4
+86/443524/campos_512_v4
+86/443525/campos_512_v4
+86/443546/campos_512_v4
+86/443549/campos_512_v4
+86/443555/campos_512_v4
+86/443564/campos_512_v4
+86/443579/campos_512_v4
+86/443581/campos_512_v4
+86/443584/campos_512_v4
+86/443589/campos_512_v4
+86/443600/campos_512_v4
+86/443604/campos_512_v4
+86/443613/campos_512_v4
+86/443622/campos_512_v4
+86/443623/campos_512_v4
+86/443631/campos_512_v4
+86/443645/campos_512_v4
+86/443660/campos_512_v4
+86/443661/campos_512_v4
+86/443665/campos_512_v4
+86/443680/campos_512_v4
+86/443683/campos_512_v4
+86/443685/campos_512_v4
+86/443687/campos_512_v4
+86/443688/campos_512_v4
+86/443695/campos_512_v4
+86/443703/campos_512_v4
+86/443710/campos_512_v4
+86/443719/campos_512_v4
+86/443722/campos_512_v4
+86/443726/campos_512_v4
+86/443739/campos_512_v4
+86/443742/campos_512_v4
+86/443745/campos_512_v4
+86/443758/campos_512_v4
+86/443760/campos_512_v4
+86/443776/campos_512_v4
+86/443784/campos_512_v4
+86/443786/campos_512_v4
+86/443788/campos_512_v4
+86/443795/campos_512_v4
+86/443803/campos_512_v4
+86/443829/campos_512_v4
+86/443836/campos_512_v4
+86/443854/campos_512_v4
+86/443856/campos_512_v4
+86/443862/campos_512_v4
+86/443863/campos_512_v4
+86/443864/campos_512_v4
+86/443865/campos_512_v4
+86/443866/campos_512_v4
+86/443875/campos_512_v4
+86/443886/campos_512_v4
+86/443892/campos_512_v4
+86/443912/campos_512_v4
+86/443915/campos_512_v4
+86/443933/campos_512_v4
+86/443979/campos_512_v4
+86/443981/campos_512_v4
+86/443988/campos_512_v4
+86/443997/campos_512_v4
+86/443999/campos_512_v4
+86/444020/campos_512_v4
+86/444030/campos_512_v4
+86/444035/campos_512_v4
+86/444039/campos_512_v4
+86/444044/campos_512_v4
+86/444050/campos_512_v4
+86/444054/campos_512_v4
+86/444055/campos_512_v4
+86/444062/campos_512_v4
+86/444063/campos_512_v4
+86/444066/campos_512_v4
+86/444069/campos_512_v4
+86/444072/campos_512_v4
+86/444079/campos_512_v4
+86/444085/campos_512_v4
+86/444086/campos_512_v4
+86/444090/campos_512_v4
+86/444095/campos_512_v4
+86/444105/campos_512_v4
+86/444108/campos_512_v4
+86/444109/campos_512_v4
+86/444112/campos_512_v4
+86/444117/campos_512_v4
+86/444124/campos_512_v4
+86/444128/campos_512_v4
+86/444131/campos_512_v4
+86/444142/campos_512_v4
+86/444150/campos_512_v4
+86/444159/campos_512_v4
+86/444177/campos_512_v4
+86/444178/campos_512_v4
+86/444182/campos_512_v4
+86/444189/campos_512_v4
+86/444197/campos_512_v4
+86/444199/campos_512_v4
+86/444208/campos_512_v4
+86/444210/campos_512_v4
+86/444213/campos_512_v4
+86/444220/campos_512_v4
+86/444223/campos_512_v4
+86/444228/campos_512_v4
+86/444261/campos_512_v4
+86/444268/campos_512_v4
+86/444274/campos_512_v4
+86/444275/campos_512_v4
+86/444276/campos_512_v4
+86/444284/campos_512_v4
+86/444288/campos_512_v4
+86/444297/campos_512_v4
+86/444298/campos_512_v4
+86/444301/campos_512_v4
+86/444309/campos_512_v4
+86/444310/campos_512_v4
+86/444314/campos_512_v4
+86/444315/campos_512_v4
+86/444328/campos_512_v4
+86/444329/campos_512_v4
+86/444331/campos_512_v4
+86/444333/campos_512_v4
+86/444347/campos_512_v4
+86/444348/campos_512_v4
+86/444351/campos_512_v4
+86/444361/campos_512_v4
+86/444362/campos_512_v4
+86/444379/campos_512_v4
+86/444394/campos_512_v4
+86/444396/campos_512_v4
+86/444401/campos_512_v4
+86/444409/campos_512_v4
+86/444417/campos_512_v4
+86/444429/campos_512_v4
+86/444434/campos_512_v4
+86/444454/campos_512_v4
+86/444479/campos_512_v4
+86/444491/campos_512_v4
+86/444493/campos_512_v4
+86/444497/campos_512_v4
+86/444502/campos_512_v4
+86/444508/campos_512_v4
+86/444518/campos_512_v4
+86/444523/campos_512_v4
+86/444527/campos_512_v4
+86/444530/campos_512_v4
+86/444540/campos_512_v4
+86/444553/campos_512_v4
+86/444557/campos_512_v4
+86/444579/campos_512_v4
+86/444581/campos_512_v4
+86/444584/campos_512_v4
+86/444589/campos_512_v4
+86/444593/campos_512_v4
+86/444600/campos_512_v4
+86/444601/campos_512_v4
+86/444615/campos_512_v4
+86/444618/campos_512_v4
+86/444625/campos_512_v4
+86/444627/campos_512_v4
+86/444639/campos_512_v4
+86/444644/campos_512_v4
+86/444647/campos_512_v4
+86/444661/campos_512_v4
+86/444663/campos_512_v4
+86/444667/campos_512_v4
+86/444670/campos_512_v4
+86/444689/campos_512_v4
+86/444693/campos_512_v4
+86/444699/campos_512_v4
+86/444709/campos_512_v4
+86/444718/campos_512_v4
+86/444724/campos_512_v4
+86/444733/campos_512_v4
+86/444757/campos_512_v4
+86/444777/campos_512_v4
+86/444784/campos_512_v4
+86/444786/campos_512_v4
+86/444796/campos_512_v4
+86/444829/campos_512_v4
+86/444832/campos_512_v4
+86/444836/campos_512_v4
+86/444845/campos_512_v4
+86/444851/campos_512_v4
+86/444857/campos_512_v4
+86/444872/campos_512_v4
+86/444894/campos_512_v4
+86/444895/campos_512_v4
+86/444899/campos_512_v4
+86/444905/campos_512_v4
+86/444912/campos_512_v4
+86/444917/campos_512_v4
+86/444918/campos_512_v4
+86/444935/campos_512_v4
+86/444943/campos_512_v4
+86/444950/campos_512_v4
+86/444973/campos_512_v4
+86/444975/campos_512_v4
+86/444977/campos_512_v4
+86/444980/campos_512_v4
+86/444988/campos_512_v4
+86/444992/campos_512_v4
+86/444999/campos_512_v4
+86/445001/campos_512_v4
+87/445009/campos_512_v4
+87/445011/campos_512_v4
+87/445013/campos_512_v4
+87/445023/campos_512_v4
+87/445050/campos_512_v4
+87/445053/campos_512_v4
+87/445054/campos_512_v4
+87/445067/campos_512_v4
+87/445072/campos_512_v4
+87/445079/campos_512_v4
+87/445081/campos_512_v4
+87/445082/campos_512_v4
+87/445093/campos_512_v4
+87/445096/campos_512_v4
+87/445101/campos_512_v4
+87/445117/campos_512_v4
+87/445121/campos_512_v4
+87/445131/campos_512_v4
+87/445134/campos_512_v4
+87/445137/campos_512_v4
+87/445145/campos_512_v4
+87/445146/campos_512_v4
+87/445164/campos_512_v4
+87/445170/campos_512_v4
+87/445178/campos_512_v4
+87/445192/campos_512_v4
+87/445195/campos_512_v4
+87/445198/campos_512_v4
+87/445201/campos_512_v4
+87/445204/campos_512_v4
+87/445230/campos_512_v4
+87/445240/campos_512_v4
+87/445250/campos_512_v4
+87/445267/campos_512_v4
+87/445270/campos_512_v4
+87/445272/campos_512_v4
+87/445278/campos_512_v4
+87/445285/campos_512_v4
+87/445286/campos_512_v4
+87/445294/campos_512_v4
+87/445295/campos_512_v4
+87/445296/campos_512_v4
+87/445302/campos_512_v4
+87/445316/campos_512_v4
+87/445325/campos_512_v4
+87/445334/campos_512_v4
+87/445338/campos_512_v4
+87/445344/campos_512_v4
+87/445350/campos_512_v4
+87/445360/campos_512_v4
+87/445361/campos_512_v4
+87/445365/campos_512_v4
+87/445369/campos_512_v4
+87/445402/campos_512_v4
+87/445418/campos_512_v4
+87/445420/campos_512_v4
+87/445430/campos_512_v4
+87/445440/campos_512_v4
+87/445447/campos_512_v4
+87/445448/campos_512_v4
+87/445457/campos_512_v4
+87/445458/campos_512_v4
+87/445462/campos_512_v4
+87/445474/campos_512_v4
+87/445487/campos_512_v4
+87/445494/campos_512_v4
+87/445498/campos_512_v4
+87/445499/campos_512_v4
+87/445505/campos_512_v4
+87/445509/campos_512_v4
+87/445516/campos_512_v4
+87/445518/campos_512_v4
+87/445524/campos_512_v4
+87/445545/campos_512_v4
+87/445546/campos_512_v4
+87/445549/campos_512_v4
+87/445550/campos_512_v4
+87/445560/campos_512_v4
+87/445562/campos_512_v4
+87/445564/campos_512_v4
+87/445572/campos_512_v4
+87/445579/campos_512_v4
+87/445583/campos_512_v4
+87/445590/campos_512_v4
+87/445597/campos_512_v4
+87/445598/campos_512_v4
+87/445600/campos_512_v4
+87/445611/campos_512_v4
+87/445630/campos_512_v4
+87/445636/campos_512_v4
+87/445647/campos_512_v4
+87/445654/campos_512_v4
+87/445662/campos_512_v4
+87/445667/campos_512_v4
+87/445672/campos_512_v4
+87/445676/campos_512_v4
+87/445680/campos_512_v4
+87/445681/campos_512_v4
+87/445686/campos_512_v4
+87/445692/campos_512_v4
+87/445696/campos_512_v4
+87/445698/campos_512_v4
+87/445735/campos_512_v4
+87/445747/campos_512_v4
+87/445763/campos_512_v4
+87/445794/campos_512_v4
+87/445798/campos_512_v4
+87/445802/campos_512_v4
+87/445806/campos_512_v4
+87/445818/campos_512_v4
+87/445821/campos_512_v4
+87/445829/campos_512_v4
+87/445834/campos_512_v4
+87/445838/campos_512_v4
+87/445841/campos_512_v4
+87/445847/campos_512_v4
+87/445848/campos_512_v4
+87/445855/campos_512_v4
+87/445856/campos_512_v4
+87/445873/campos_512_v4
+87/445875/campos_512_v4
+87/445878/campos_512_v4
+87/445883/campos_512_v4
+87/445889/campos_512_v4
+87/445893/campos_512_v4
+87/445899/campos_512_v4
+87/445900/campos_512_v4
+87/445907/campos_512_v4
+87/445917/campos_512_v4
+87/445920/campos_512_v4
+87/445930/campos_512_v4
+87/445940/campos_512_v4
+87/445949/campos_512_v4
+87/445950/campos_512_v4
+87/445955/campos_512_v4
+87/445957/campos_512_v4
+87/445958/campos_512_v4
+87/445972/campos_512_v4
+87/445975/campos_512_v4
+87/445978/campos_512_v4
+87/445979/campos_512_v4
+87/445989/campos_512_v4
+87/445992/campos_512_v4
+87/446012/campos_512_v4
+87/446015/campos_512_v4
+87/446018/campos_512_v4
+87/446020/campos_512_v4
+87/446025/campos_512_v4
+87/446040/campos_512_v4
+87/446043/campos_512_v4
+87/446048/campos_512_v4
+87/446049/campos_512_v4
+87/446062/campos_512_v4
+87/446071/campos_512_v4
+87/446086/campos_512_v4
+87/446089/campos_512_v4
+87/446090/campos_512_v4
+87/446094/campos_512_v4
+87/446097/campos_512_v4
+87/446101/campos_512_v4
+87/446105/campos_512_v4
+87/446106/campos_512_v4
+87/446116/campos_512_v4
+87/446118/campos_512_v4
+87/446120/campos_512_v4
+87/446125/campos_512_v4
+87/446128/campos_512_v4
+87/446129/campos_512_v4
+87/446132/campos_512_v4
+87/446134/campos_512_v4
+87/446135/campos_512_v4
+87/446140/campos_512_v4
+87/446143/campos_512_v4
+87/446144/campos_512_v4
+87/446146/campos_512_v4
+87/446149/campos_512_v4
+87/446159/campos_512_v4
+87/446162/campos_512_v4
+87/446168/campos_512_v4
+87/446170/campos_512_v4
+87/446179/campos_512_v4
+87/446181/campos_512_v4
+87/446188/campos_512_v4
+87/446189/campos_512_v4
+87/446190/campos_512_v4
+87/446191/campos_512_v4
+87/446198/campos_512_v4
+87/446215/campos_512_v4
+87/446221/campos_512_v4
+87/446223/campos_512_v4
+87/446243/campos_512_v4
+87/446248/campos_512_v4
+87/446250/campos_512_v4
+87/446251/campos_512_v4
+87/446258/campos_512_v4
+87/446270/campos_512_v4
+87/446273/campos_512_v4
+87/446274/campos_512_v4
+87/446279/campos_512_v4
+87/446284/campos_512_v4
+87/446292/campos_512_v4
+87/446295/campos_512_v4
+87/446296/campos_512_v4
+87/446320/campos_512_v4
+87/446342/campos_512_v4
+87/446347/campos_512_v4
+87/446349/campos_512_v4
+87/446355/campos_512_v4
+87/446369/campos_512_v4
+87/446390/campos_512_v4
+87/446393/campos_512_v4
+87/446398/campos_512_v4
+87/446402/campos_512_v4
+87/446403/campos_512_v4
+87/446409/campos_512_v4
+87/446446/campos_512_v4
+87/446452/campos_512_v4
+87/446458/campos_512_v4
+87/446460/campos_512_v4
+87/446462/campos_512_v4
+87/446466/campos_512_v4
+87/446468/campos_512_v4
+87/446470/campos_512_v4
+87/446473/campos_512_v4
+87/446481/campos_512_v4
+87/446482/campos_512_v4
+87/446490/campos_512_v4
+87/446491/campos_512_v4
+87/446501/campos_512_v4
+87/446503/campos_512_v4
+87/446518/campos_512_v4
+87/446519/campos_512_v4
+87/446536/campos_512_v4
+87/446538/campos_512_v4
+87/446547/campos_512_v4
+87/446550/campos_512_v4
+87/446574/campos_512_v4
+87/446575/campos_512_v4
+87/446576/campos_512_v4
+87/446586/campos_512_v4
+87/446602/campos_512_v4
+87/446605/campos_512_v4
+87/446606/campos_512_v4
+87/446613/campos_512_v4
+87/446617/campos_512_v4
+87/446622/campos_512_v4
+87/446623/campos_512_v4
+87/446625/campos_512_v4
+87/446626/campos_512_v4
+87/446632/campos_512_v4
+87/446635/campos_512_v4
+87/446641/campos_512_v4
+87/446643/campos_512_v4
+87/446645/campos_512_v4
+87/446651/campos_512_v4
+87/446664/campos_512_v4
+87/446668/campos_512_v4
+87/446682/campos_512_v4
+87/446689/campos_512_v4
+87/446695/campos_512_v4
+87/446699/campos_512_v4
+87/446700/campos_512_v4
+87/446704/campos_512_v4
+87/446715/campos_512_v4
+87/446718/campos_512_v4
+87/446735/campos_512_v4
+87/446741/campos_512_v4
+87/446758/campos_512_v4
+87/446761/campos_512_v4
+87/446784/campos_512_v4
+87/446792/campos_512_v4
+87/446796/campos_512_v4
+87/446798/campos_512_v4
+87/446813/campos_512_v4
+87/446820/campos_512_v4
+87/446823/campos_512_v4
+87/446842/campos_512_v4
+87/446848/campos_512_v4
+87/446850/campos_512_v4
+87/446864/campos_512_v4
+87/446878/campos_512_v4
+87/446879/campos_512_v4
+87/446883/campos_512_v4
+87/446884/campos_512_v4
+87/446896/campos_512_v4
+87/446902/campos_512_v4
+87/446905/campos_512_v4
+87/446907/campos_512_v4
+87/446919/campos_512_v4
+87/446921/campos_512_v4
+87/446925/campos_512_v4
+87/446932/campos_512_v4
+87/446944/campos_512_v4
+87/446948/campos_512_v4
+87/446951/campos_512_v4
+87/446959/campos_512_v4
+87/446968/campos_512_v4
+87/446979/campos_512_v4
+87/446982/campos_512_v4
+87/446987/campos_512_v4
+87/446990/campos_512_v4
+87/446991/campos_512_v4
+87/446993/campos_512_v4
+87/446995/campos_512_v4
+87/447010/campos_512_v4
+87/447011/campos_512_v4
+87/447028/campos_512_v4
+87/447030/campos_512_v4
+87/447069/campos_512_v4
+87/447074/campos_512_v4
+87/447085/campos_512_v4
+87/447088/campos_512_v4
+87/447094/campos_512_v4
+87/447100/campos_512_v4
+87/447102/campos_512_v4
+87/447110/campos_512_v4
+87/447116/campos_512_v4
+87/447123/campos_512_v4
+87/447128/campos_512_v4
+87/447164/campos_512_v4
+87/447170/campos_512_v4
+87/447174/campos_512_v4
+87/447184/campos_512_v4
+87/447186/campos_512_v4
+87/447188/campos_512_v4
+87/447196/campos_512_v4
+87/447205/campos_512_v4
+87/447208/campos_512_v4
+87/447212/campos_512_v4
+87/447218/campos_512_v4
+87/447220/campos_512_v4
+87/447246/campos_512_v4
+87/447249/campos_512_v4
+87/447251/campos_512_v4
+87/447252/campos_512_v4
+87/447263/campos_512_v4
+87/447265/campos_512_v4
+87/447266/campos_512_v4
+87/447267/campos_512_v4
+87/447279/campos_512_v4
+87/447281/campos_512_v4
+87/447306/campos_512_v4
+87/447308/campos_512_v4
+87/447320/campos_512_v4
+87/447328/campos_512_v4
+87/447329/campos_512_v4
+87/447331/campos_512_v4
+87/447338/campos_512_v4
+87/447343/campos_512_v4
+87/447346/campos_512_v4
+87/447356/campos_512_v4
+87/447364/campos_512_v4
+87/447367/campos_512_v4
+87/447392/campos_512_v4
+87/447393/campos_512_v4
+87/447396/campos_512_v4
+87/447400/campos_512_v4
+87/447403/campos_512_v4
+87/447409/campos_512_v4
+87/447416/campos_512_v4
+87/447422/campos_512_v4
+87/447431/campos_512_v4
+87/447439/campos_512_v4
+87/447443/campos_512_v4
+87/447444/campos_512_v4
+87/447454/campos_512_v4
+87/447458/campos_512_v4
+87/447459/campos_512_v4
+87/447460/campos_512_v4
+87/447466/campos_512_v4
+87/447469/campos_512_v4
+87/447482/campos_512_v4
+87/447496/campos_512_v4
+87/447506/campos_512_v4
+87/447510/campos_512_v4
+87/447516/campos_512_v4
+87/447525/campos_512_v4
+87/447526/campos_512_v4
+87/447531/campos_512_v4
+87/447533/campos_512_v4
+87/447537/campos_512_v4
+87/447543/campos_512_v4
+87/447544/campos_512_v4
+87/447546/campos_512_v4
+87/447549/campos_512_v4
+87/447553/campos_512_v4
+87/447556/campos_512_v4
+87/447562/campos_512_v4
+87/447563/campos_512_v4
+87/447586/campos_512_v4
+87/447587/campos_512_v4
+87/447592/campos_512_v4
+87/447600/campos_512_v4
+87/447608/campos_512_v4
+87/447612/campos_512_v4
+87/447614/campos_512_v4
+87/447617/campos_512_v4
+87/447642/campos_512_v4
+87/447643/campos_512_v4
+87/447648/campos_512_v4
+87/447651/campos_512_v4
+87/447652/campos_512_v4
+87/447656/campos_512_v4
+87/447657/campos_512_v4
+87/447658/campos_512_v4
+87/447665/campos_512_v4
+87/447713/campos_512_v4
+87/447726/campos_512_v4
+87/447747/campos_512_v4
+87/447749/campos_512_v4
+87/447752/campos_512_v4
+87/447758/campos_512_v4
+87/447764/campos_512_v4
+87/447765/campos_512_v4
+87/447783/campos_512_v4
+87/447785/campos_512_v4
+87/447791/campos_512_v4
+87/447799/campos_512_v4
+87/447806/campos_512_v4
+87/447810/campos_512_v4
+87/447817/campos_512_v4
+87/447843/campos_512_v4
+87/447845/campos_512_v4
+87/447852/campos_512_v4
+87/447853/campos_512_v4
+87/447855/campos_512_v4
+87/447856/campos_512_v4
+87/447862/campos_512_v4
+87/447863/campos_512_v4
+87/447892/campos_512_v4
+87/447894/campos_512_v4
+87/447910/campos_512_v4
+87/447921/campos_512_v4
+87/447927/campos_512_v4
+87/447930/campos_512_v4
+87/447936/campos_512_v4
+87/447939/campos_512_v4
+87/447960/campos_512_v4
+87/447967/campos_512_v4
+87/447969/campos_512_v4
+87/447981/campos_512_v4
+87/447983/campos_512_v4
+87/447984/campos_512_v4
+87/447991/campos_512_v4
+87/447999/campos_512_v4
+87/448011/campos_512_v4
+87/448020/campos_512_v4
+87/448034/campos_512_v4
+87/448044/campos_512_v4
+87/448056/campos_512_v4
+87/448067/campos_512_v4
+87/448074/campos_512_v4
+87/448080/campos_512_v4
+87/448087/campos_512_v4
+87/448093/campos_512_v4
+87/448104/campos_512_v4
+87/448128/campos_512_v4
+87/448130/campos_512_v4
+87/448136/campos_512_v4
+87/448137/campos_512_v4
+87/448139/campos_512_v4
+87/448142/campos_512_v4
+87/448143/campos_512_v4
+87/448147/campos_512_v4
+87/448148/campos_512_v4
+87/448151/campos_512_v4
+87/448166/campos_512_v4
+87/448171/campos_512_v4
+87/448175/campos_512_v4
+87/448202/campos_512_v4
+87/448208/campos_512_v4
+87/448215/campos_512_v4
+87/448226/campos_512_v4
+87/448227/campos_512_v4
+87/448228/campos_512_v4
+87/448235/campos_512_v4
+87/448244/campos_512_v4
+87/448249/campos_512_v4
+87/448252/campos_512_v4
+87/448255/campos_512_v4
+87/448260/campos_512_v4
+87/448282/campos_512_v4
+87/448286/campos_512_v4
+87/448288/campos_512_v4
+87/448290/campos_512_v4
+87/448294/campos_512_v4
+87/448299/campos_512_v4
+87/448307/campos_512_v4
+87/448309/campos_512_v4
+87/448315/campos_512_v4
+87/448322/campos_512_v4
+87/448334/campos_512_v4
+87/448345/campos_512_v4
+87/448346/campos_512_v4
+87/448357/campos_512_v4
+87/448364/campos_512_v4
+87/448366/campos_512_v4
+87/448369/campos_512_v4
+87/448371/campos_512_v4
+87/448373/campos_512_v4
+87/448384/campos_512_v4
+87/448402/campos_512_v4
+87/448407/campos_512_v4
+87/448413/campos_512_v4
+87/448428/campos_512_v4
+87/448436/campos_512_v4
+87/448443/campos_512_v4
+87/448471/campos_512_v4
+87/448472/campos_512_v4
+87/448480/campos_512_v4
+87/448484/campos_512_v4
+87/448485/campos_512_v4
+87/448492/campos_512_v4
+87/448498/campos_512_v4
+87/448507/campos_512_v4
+87/448515/campos_512_v4
+87/448517/campos_512_v4
+87/448530/campos_512_v4
+87/448533/campos_512_v4
+87/448564/campos_512_v4
+87/448565/campos_512_v4
+87/448577/campos_512_v4
+87/448581/campos_512_v4
+87/448587/campos_512_v4
+87/448591/campos_512_v4
+87/448595/campos_512_v4
+87/448609/campos_512_v4
+87/448615/campos_512_v4
+87/448623/campos_512_v4
+87/448624/campos_512_v4
+87/448659/campos_512_v4
+87/448662/campos_512_v4
+87/448664/campos_512_v4
+87/448670/campos_512_v4
+87/448676/campos_512_v4
+87/448678/campos_512_v4
+87/448680/campos_512_v4
+87/448690/campos_512_v4
+87/448702/campos_512_v4
+87/448728/campos_512_v4
+87/448730/campos_512_v4
+87/448738/campos_512_v4
+87/448744/campos_512_v4
+87/448747/campos_512_v4
+87/448749/campos_512_v4
+87/448751/campos_512_v4
+87/448755/campos_512_v4
+87/448758/campos_512_v4
+87/448760/campos_512_v4
+87/448762/campos_512_v4
+87/448769/campos_512_v4
+87/448772/campos_512_v4
+87/448777/campos_512_v4
+87/448788/campos_512_v4
+87/448798/campos_512_v4
+87/448805/campos_512_v4
+87/448806/campos_512_v4
+87/448811/campos_512_v4
+87/448814/campos_512_v4
+87/448819/campos_512_v4
+87/448821/campos_512_v4
+87/448826/campos_512_v4
+87/448831/campos_512_v4
+87/448838/campos_512_v4
+87/448851/campos_512_v4
+87/448862/campos_512_v4
+87/448864/campos_512_v4
+87/448877/campos_512_v4
+87/448899/campos_512_v4
+87/448907/campos_512_v4
+87/448920/campos_512_v4
+87/448931/campos_512_v4
+87/448944/campos_512_v4
+87/448954/campos_512_v4
+87/448964/campos_512_v4
+87/448965/campos_512_v4
+87/448978/campos_512_v4
+87/448980/campos_512_v4
+87/448993/campos_512_v4
+87/448998/campos_512_v4
+87/448999/campos_512_v4
+87/449000/campos_512_v4
+87/449012/campos_512_v4
+87/449030/campos_512_v4
+87/449031/campos_512_v4
+87/449037/campos_512_v4
+87/449058/campos_512_v4
+87/449062/campos_512_v4
+87/449070/campos_512_v4
+87/449082/campos_512_v4
+87/449084/campos_512_v4
+87/449092/campos_512_v4
+87/449103/campos_512_v4
+87/449106/campos_512_v4
+87/449111/campos_512_v4
+87/449112/campos_512_v4
+87/449119/campos_512_v4
+87/449125/campos_512_v4
+87/449136/campos_512_v4
+87/449141/campos_512_v4
+87/449144/campos_512_v4
+87/449151/campos_512_v4
+87/449167/campos_512_v4
+87/449169/campos_512_v4
+87/449171/campos_512_v4
+87/449172/campos_512_v4
+87/449181/campos_512_v4
+87/449182/campos_512_v4
+87/449198/campos_512_v4
+87/449212/campos_512_v4
+87/449217/campos_512_v4
+87/449228/campos_512_v4
+87/449229/campos_512_v4
+87/449234/campos_512_v4
+87/449250/campos_512_v4
+87/449258/campos_512_v4
+87/449264/campos_512_v4
+87/449265/campos_512_v4
+87/449268/campos_512_v4
+87/449285/campos_512_v4
+87/449286/campos_512_v4
+87/449288/campos_512_v4
+87/449300/campos_512_v4
+87/449307/campos_512_v4
+87/449315/campos_512_v4
+87/449320/campos_512_v4
+87/449325/campos_512_v4
+87/449327/campos_512_v4
+87/449345/campos_512_v4
+87/449351/campos_512_v4
+87/449359/campos_512_v4
+87/449377/campos_512_v4
+87/449387/campos_512_v4
+87/449389/campos_512_v4
+87/449421/campos_512_v4
+87/449422/campos_512_v4
+87/449432/campos_512_v4
+87/449435/campos_512_v4
+87/449447/campos_512_v4
+87/449450/campos_512_v4
+87/449451/campos_512_v4
+87/449456/campos_512_v4
+87/449463/campos_512_v4
+87/449476/campos_512_v4
+87/449478/campos_512_v4
+87/449487/campos_512_v4
+87/449488/campos_512_v4
+87/449493/campos_512_v4
+87/449508/campos_512_v4
+87/449517/campos_512_v4
+87/449526/campos_512_v4
+87/449539/campos_512_v4
+87/449545/campos_512_v4
+87/449546/campos_512_v4
+87/449560/campos_512_v4
+87/449565/campos_512_v4
+87/449571/campos_512_v4
+87/449572/campos_512_v4
+87/449573/campos_512_v4
+87/449575/campos_512_v4
+87/449580/campos_512_v4
+87/449583/campos_512_v4
+87/449591/campos_512_v4
+87/449598/campos_512_v4
+87/449604/campos_512_v4
+87/449615/campos_512_v4
+87/449617/campos_512_v4
+87/449625/campos_512_v4
+87/449633/campos_512_v4
+87/449634/campos_512_v4
+87/449653/campos_512_v4
+87/449673/campos_512_v4
+87/449690/campos_512_v4
+87/449691/campos_512_v4
+87/449701/campos_512_v4
+87/449702/campos_512_v4
+87/449712/campos_512_v4
+87/449713/campos_512_v4
+87/449718/campos_512_v4
+87/449729/campos_512_v4
+87/449739/campos_512_v4
+87/449750/campos_512_v4
+87/449768/campos_512_v4
+87/449769/campos_512_v4
+87/449777/campos_512_v4
+87/449788/campos_512_v4
+88/450011/campos_512_v4
+88/450015/campos_512_v4
+88/450023/campos_512_v4
+88/450025/campos_512_v4
+88/450028/campos_512_v4
+88/450032/campos_512_v4
+88/450044/campos_512_v4
+88/450054/campos_512_v4
+88/450057/campos_512_v4
+88/450069/campos_512_v4
+88/450083/campos_512_v4
+88/450090/campos_512_v4
+88/450094/campos_512_v4
+88/450105/campos_512_v4
+88/450108/campos_512_v4
+88/450110/campos_512_v4
+88/450115/campos_512_v4
+88/450116/campos_512_v4
+88/450117/campos_512_v4
+88/450123/campos_512_v4
+88/450126/campos_512_v4
+88/450132/campos_512_v4
+88/450141/campos_512_v4
+88/450142/campos_512_v4
+88/450163/campos_512_v4
+88/450169/campos_512_v4
+88/450170/campos_512_v4
+88/450181/campos_512_v4
+88/450191/campos_512_v4
+88/450192/campos_512_v4
+88/450203/campos_512_v4
+88/450204/campos_512_v4
+88/450205/campos_512_v4
+88/450213/campos_512_v4
+88/450214/campos_512_v4
+88/450225/campos_512_v4
+88/450233/campos_512_v4
+88/450234/campos_512_v4
+88/450243/campos_512_v4
+88/450247/campos_512_v4
+88/450254/campos_512_v4
+88/450266/campos_512_v4
+88/450276/campos_512_v4
+88/450280/campos_512_v4
+88/450283/campos_512_v4
+88/450285/campos_512_v4
+88/450286/campos_512_v4
+88/450287/campos_512_v4
+88/450292/campos_512_v4
+88/450299/campos_512_v4
+88/450303/campos_512_v4
+88/450305/campos_512_v4
+88/450306/campos_512_v4
+88/450308/campos_512_v4
+88/450309/campos_512_v4
+88/450319/campos_512_v4
+88/450322/campos_512_v4
+88/450323/campos_512_v4
+88/450331/campos_512_v4
+88/450332/campos_512_v4
+88/450335/campos_512_v4
+88/450339/campos_512_v4
+88/450344/campos_512_v4
+88/450349/campos_512_v4
+88/450350/campos_512_v4
+88/450351/campos_512_v4
+88/450352/campos_512_v4
+88/450361/campos_512_v4
+88/450373/campos_512_v4
+88/450378/campos_512_v4
+88/450384/campos_512_v4
+88/450388/campos_512_v4
+88/450411/campos_512_v4
+88/450414/campos_512_v4
+88/450429/campos_512_v4
+88/450432/campos_512_v4
+88/450438/campos_512_v4
+88/450440/campos_512_v4
+88/450494/campos_512_v4
+88/450502/campos_512_v4
+88/450504/campos_512_v4
+88/450515/campos_512_v4
+88/450534/campos_512_v4
+88/450539/campos_512_v4
+88/450542/campos_512_v4
+88/450561/campos_512_v4
+88/450567/campos_512_v4
+88/450570/campos_512_v4
+88/450602/campos_512_v4
+88/450623/campos_512_v4
+88/450625/campos_512_v4
+88/450626/campos_512_v4
+88/450629/campos_512_v4
+88/450647/campos_512_v4
+88/450653/campos_512_v4
+88/450655/campos_512_v4
+88/450658/campos_512_v4
+88/450662/campos_512_v4
+88/450665/campos_512_v4
+88/450668/campos_512_v4
+88/450671/campos_512_v4
+88/450672/campos_512_v4
+88/450677/campos_512_v4
+88/450679/campos_512_v4
+88/450701/campos_512_v4
+88/450703/campos_512_v4
+88/450705/campos_512_v4
+88/450708/campos_512_v4
+88/450712/campos_512_v4
+88/450716/campos_512_v4
+88/450729/campos_512_v4
+88/450730/campos_512_v4
+88/450733/campos_512_v4
+88/450734/campos_512_v4
+88/450740/campos_512_v4
+88/450742/campos_512_v4
+88/450746/campos_512_v4
+88/450748/campos_512_v4
+88/450755/campos_512_v4
+88/450765/campos_512_v4
+88/450766/campos_512_v4
+88/450770/campos_512_v4
+88/450771/campos_512_v4
+88/450772/campos_512_v4
+88/450775/campos_512_v4
+88/450780/campos_512_v4
+88/450790/campos_512_v4
+88/450791/campos_512_v4
+88/450801/campos_512_v4
+88/450819/campos_512_v4
+88/450821/campos_512_v4
+88/450834/campos_512_v4
+88/450836/campos_512_v4
+88/450838/campos_512_v4
+88/450848/campos_512_v4
+88/450850/campos_512_v4
+88/450881/campos_512_v4
+88/450889/campos_512_v4
+88/450929/campos_512_v4
+88/450930/campos_512_v4
+88/450936/campos_512_v4
+88/450937/campos_512_v4
+88/450946/campos_512_v4
+88/450959/campos_512_v4
+88/450960/campos_512_v4
+88/450961/campos_512_v4
+88/450974/campos_512_v4
+88/450976/campos_512_v4
+88/450982/campos_512_v4
+88/450988/campos_512_v4
+88/450994/campos_512_v4
+88/450996/campos_512_v4
+88/451009/campos_512_v4
+88/451010/campos_512_v4
+88/451012/campos_512_v4
+88/451014/campos_512_v4
+88/451024/campos_512_v4
+88/451051/campos_512_v4
+88/451053/campos_512_v4
+88/451060/campos_512_v4
+88/451063/campos_512_v4
+88/451086/campos_512_v4
+88/451095/campos_512_v4
+88/451104/campos_512_v4
+88/451105/campos_512_v4
+88/451115/campos_512_v4
+88/451123/campos_512_v4
+88/451129/campos_512_v4
+88/451167/campos_512_v4
+88/451174/campos_512_v4
+88/451178/campos_512_v4
+88/451180/campos_512_v4
+88/451181/campos_512_v4
+88/451182/campos_512_v4
+88/451184/campos_512_v4
+88/451185/campos_512_v4
+88/451191/campos_512_v4
+88/451195/campos_512_v4
+88/451196/campos_512_v4
+88/451197/campos_512_v4
+88/451200/campos_512_v4
+88/451205/campos_512_v4
+88/451208/campos_512_v4
+88/451228/campos_512_v4
+88/451230/campos_512_v4
+88/451237/campos_512_v4
+88/451258/campos_512_v4
+88/451266/campos_512_v4
+88/451267/campos_512_v4
+88/451268/campos_512_v4
+88/451275/campos_512_v4
+88/451276/campos_512_v4
+88/451284/campos_512_v4
+88/451285/campos_512_v4
+88/451290/campos_512_v4
+88/451295/campos_512_v4
+88/451302/campos_512_v4
+88/451307/campos_512_v4
+88/451308/campos_512_v4
+88/451327/campos_512_v4
+88/451329/campos_512_v4
+88/451336/campos_512_v4
+88/451337/campos_512_v4
+88/451339/campos_512_v4
+88/451344/campos_512_v4
+88/451351/campos_512_v4
+88/451363/campos_512_v4
+88/451364/campos_512_v4
+88/451372/campos_512_v4
+88/451374/campos_512_v4
+88/451378/campos_512_v4
+88/451386/campos_512_v4
+88/451393/campos_512_v4
+88/451394/campos_512_v4
+88/451416/campos_512_v4
+88/451422/campos_512_v4
+88/451423/campos_512_v4
+88/451424/campos_512_v4
+88/451436/campos_512_v4
+88/451439/campos_512_v4
+88/451441/campos_512_v4
+88/451442/campos_512_v4
+88/451445/campos_512_v4
+88/451450/campos_512_v4
+88/451455/campos_512_v4
+88/451472/campos_512_v4
+88/451477/campos_512_v4
+88/451504/campos_512_v4
+88/451517/campos_512_v4
+88/451529/campos_512_v4
+88/451531/campos_512_v4
+88/451543/campos_512_v4
+88/451568/campos_512_v4
+88/451570/campos_512_v4
+88/451577/campos_512_v4
+88/451586/campos_512_v4
+88/451595/campos_512_v4
+88/451598/campos_512_v4
+88/451601/campos_512_v4
+88/451609/campos_512_v4
+88/451611/campos_512_v4
+88/451615/campos_512_v4
+88/451618/campos_512_v4
+88/451626/campos_512_v4
+88/451630/campos_512_v4
+88/451633/campos_512_v4
+88/451635/campos_512_v4
+88/451637/campos_512_v4
+88/451647/campos_512_v4
+88/451656/campos_512_v4
+88/451658/campos_512_v4
+88/451665/campos_512_v4
+88/451670/campos_512_v4
+88/451678/campos_512_v4
+88/451680/campos_512_v4
+88/451682/campos_512_v4
+88/451684/campos_512_v4
+88/451689/campos_512_v4
+88/451705/campos_512_v4
+88/451727/campos_512_v4
+88/451750/campos_512_v4
+88/451788/campos_512_v4
+88/451789/campos_512_v4
+88/451790/campos_512_v4
+88/451791/campos_512_v4
+88/451794/campos_512_v4
+88/451805/campos_512_v4
+88/451817/campos_512_v4
+88/451830/campos_512_v4
+88/451836/campos_512_v4
+88/451839/campos_512_v4
+88/451845/campos_512_v4
+88/451847/campos_512_v4
+88/451852/campos_512_v4
+88/451855/campos_512_v4
+88/451858/campos_512_v4
+88/451863/campos_512_v4
+88/451890/campos_512_v4
+88/451891/campos_512_v4
+88/451898/campos_512_v4
+88/451908/campos_512_v4
+88/451909/campos_512_v4
+88/451910/campos_512_v4
+88/451913/campos_512_v4
+88/451920/campos_512_v4
+88/451921/campos_512_v4
+88/451929/campos_512_v4
+88/451936/campos_512_v4
+88/451938/campos_512_v4
+88/451941/campos_512_v4
+88/451955/campos_512_v4
+88/451957/campos_512_v4
+88/451961/campos_512_v4
+88/451966/campos_512_v4
+88/451978/campos_512_v4
+88/451985/campos_512_v4
+88/451989/campos_512_v4
+88/451993/campos_512_v4
+88/452001/campos_512_v4
+88/452027/campos_512_v4
+88/452038/campos_512_v4
+88/452044/campos_512_v4
+88/452048/campos_512_v4
+88/452071/campos_512_v4
+88/452074/campos_512_v4
+88/452080/campos_512_v4
+88/452098/campos_512_v4
+88/452122/campos_512_v4
+88/452133/campos_512_v4
+88/452134/campos_512_v4
+88/452148/campos_512_v4
+88/452151/campos_512_v4
+88/452153/campos_512_v4
+88/452158/campos_512_v4
+88/452167/campos_512_v4
+88/452170/campos_512_v4
+88/452171/campos_512_v4
+88/452180/campos_512_v4
+88/452185/campos_512_v4
+88/452194/campos_512_v4
+88/452206/campos_512_v4
+88/452207/campos_512_v4
+88/452216/campos_512_v4
+88/452217/campos_512_v4
+88/452223/campos_512_v4
+88/452238/campos_512_v4
+88/452249/campos_512_v4
+88/452251/campos_512_v4
+88/452259/campos_512_v4
+88/452264/campos_512_v4
+88/452270/campos_512_v4
+88/452271/campos_512_v4
+88/452275/campos_512_v4
+88/452278/campos_512_v4
+88/452283/campos_512_v4
+88/452320/campos_512_v4
+88/452324/campos_512_v4
+88/452325/campos_512_v4
+88/452334/campos_512_v4
+88/452338/campos_512_v4
+88/452348/campos_512_v4
+88/452363/campos_512_v4
+88/452365/campos_512_v4
+88/452368/campos_512_v4
+88/452369/campos_512_v4
+88/452374/campos_512_v4
+88/452377/campos_512_v4
+88/452382/campos_512_v4
+88/452389/campos_512_v4
+88/452403/campos_512_v4
+88/452404/campos_512_v4
+88/452405/campos_512_v4
+88/452413/campos_512_v4
+88/452417/campos_512_v4
+88/452430/campos_512_v4
+88/452443/campos_512_v4
+88/452470/campos_512_v4
+88/452477/campos_512_v4
+88/452489/campos_512_v4
+88/452494/campos_512_v4
+88/452497/campos_512_v4
+88/452505/campos_512_v4
+88/452516/campos_512_v4
+88/452524/campos_512_v4
+88/452525/campos_512_v4
+88/452550/campos_512_v4
+88/452552/campos_512_v4
+88/452558/campos_512_v4
+88/452573/campos_512_v4
+88/452576/campos_512_v4
+88/452578/campos_512_v4
+88/452580/campos_512_v4
+88/452582/campos_512_v4
+88/452583/campos_512_v4
+88/452589/campos_512_v4
+88/452605/campos_512_v4
+88/452609/campos_512_v4
+88/452614/campos_512_v4
+88/452617/campos_512_v4
+88/452618/campos_512_v4
+88/452619/campos_512_v4
+88/452642/campos_512_v4
+88/452643/campos_512_v4
+88/452646/campos_512_v4
+88/452658/campos_512_v4
+88/452661/campos_512_v4
+88/452670/campos_512_v4
+88/452673/campos_512_v4
+88/452675/campos_512_v4
+88/452685/campos_512_v4
+88/452702/campos_512_v4
+88/452705/campos_512_v4
+88/452724/campos_512_v4
+88/452733/campos_512_v4
+88/452762/campos_512_v4
+88/452774/campos_512_v4
+88/452778/campos_512_v4
+88/452781/campos_512_v4
+88/452796/campos_512_v4
+88/452797/campos_512_v4
+88/452802/campos_512_v4
+88/452804/campos_512_v4
+88/452835/campos_512_v4
+88/452841/campos_512_v4
+88/452856/campos_512_v4
+88/452874/campos_512_v4
+88/452876/campos_512_v4
+88/452878/campos_512_v4
+88/452883/campos_512_v4
+88/452890/campos_512_v4
+88/452899/campos_512_v4
+88/452902/campos_512_v4
+88/452919/campos_512_v4
+88/452935/campos_512_v4
+88/452952/campos_512_v4
+88/452955/campos_512_v4
+88/452962/campos_512_v4
+88/452966/campos_512_v4
+88/452980/campos_512_v4
+88/452983/campos_512_v4
+88/452988/campos_512_v4
+88/452989/campos_512_v4
+88/453001/campos_512_v4
+88/453013/campos_512_v4
+88/453015/campos_512_v4
+88/453018/campos_512_v4
+88/453021/campos_512_v4
+88/453027/campos_512_v4
+88/453030/campos_512_v4
+88/453049/campos_512_v4
+88/453054/campos_512_v4
+88/453056/campos_512_v4
+88/453073/campos_512_v4
+88/453074/campos_512_v4
+88/453075/campos_512_v4
+88/453079/campos_512_v4
+88/453081/campos_512_v4
+88/453099/campos_512_v4
+88/453100/campos_512_v4
+88/453103/campos_512_v4
+88/453118/campos_512_v4
+88/453124/campos_512_v4
+88/453131/campos_512_v4
+88/453135/campos_512_v4
+88/453136/campos_512_v4
+88/453138/campos_512_v4
+88/453148/campos_512_v4
+88/453160/campos_512_v4
+88/453161/campos_512_v4
+88/453169/campos_512_v4
+88/453170/campos_512_v4
+88/453179/campos_512_v4
+88/453180/campos_512_v4
+88/453187/campos_512_v4
+88/453192/campos_512_v4
+88/453193/campos_512_v4
+88/453210/campos_512_v4
+88/453217/campos_512_v4
+88/453238/campos_512_v4
+88/453247/campos_512_v4
+88/453251/campos_512_v4
+88/453254/campos_512_v4
+88/453256/campos_512_v4
+88/453257/campos_512_v4
+88/453265/campos_512_v4
+88/453270/campos_512_v4
+88/453273/campos_512_v4
+88/453276/campos_512_v4
+88/453279/campos_512_v4
+88/453280/campos_512_v4
+88/453297/campos_512_v4
+88/453308/campos_512_v4
+88/453315/campos_512_v4
+88/453322/campos_512_v4
+88/453323/campos_512_v4
+88/453325/campos_512_v4
+88/453329/campos_512_v4
+88/453336/campos_512_v4
+88/453340/campos_512_v4
+88/453341/campos_512_v4
+88/453355/campos_512_v4
+88/453362/campos_512_v4
+88/453373/campos_512_v4
+88/453375/campos_512_v4
+88/453390/campos_512_v4
+88/453396/campos_512_v4
+88/453403/campos_512_v4
+88/453404/campos_512_v4
+88/453411/campos_512_v4
+88/453415/campos_512_v4
+88/453436/campos_512_v4
+88/453437/campos_512_v4
+88/453454/campos_512_v4
+88/453471/campos_512_v4
+88/453474/campos_512_v4
+88/453475/campos_512_v4
+88/453478/campos_512_v4
+88/453487/campos_512_v4
+88/453494/campos_512_v4
+88/453497/campos_512_v4
+88/453500/campos_512_v4
+88/453501/campos_512_v4
+88/453503/campos_512_v4
+88/453504/campos_512_v4
+88/453505/campos_512_v4
+88/453508/campos_512_v4
+88/453511/campos_512_v4
+88/453531/campos_512_v4
+88/453535/campos_512_v4
+88/453537/campos_512_v4
+88/453540/campos_512_v4
+88/453546/campos_512_v4
+88/453554/campos_512_v4
+88/453560/campos_512_v4
+88/453576/campos_512_v4
+88/453605/campos_512_v4
+88/453607/campos_512_v4
+88/453617/campos_512_v4
+88/453619/campos_512_v4
+88/453626/campos_512_v4
+88/453631/campos_512_v4
+88/453636/campos_512_v4
+88/453639/campos_512_v4
+88/453644/campos_512_v4
+88/453645/campos_512_v4
+88/453650/campos_512_v4
+88/453660/campos_512_v4
+88/453673/campos_512_v4
+88/453679/campos_512_v4
+88/453685/campos_512_v4
+88/453693/campos_512_v4
+88/453695/campos_512_v4
+88/453702/campos_512_v4
+88/453705/campos_512_v4
+88/453707/campos_512_v4
+88/453713/campos_512_v4
+88/453720/campos_512_v4
+88/453734/campos_512_v4
+88/453746/campos_512_v4
+88/453755/campos_512_v4
+88/453774/campos_512_v4
+88/453778/campos_512_v4
+88/453779/campos_512_v4
+88/453781/campos_512_v4
+88/453782/campos_512_v4
+88/453786/campos_512_v4
+88/453789/campos_512_v4
+88/453805/campos_512_v4
+88/453819/campos_512_v4
+88/453826/campos_512_v4
+88/453830/campos_512_v4
+88/453831/campos_512_v4
+88/453834/campos_512_v4
+88/453836/campos_512_v4
+88/453838/campos_512_v4
+88/453856/campos_512_v4
+88/453858/campos_512_v4
+88/453861/campos_512_v4
+88/453862/campos_512_v4
+88/453871/campos_512_v4
+88/453876/campos_512_v4
+88/453886/campos_512_v4
+88/453887/campos_512_v4
+88/453891/campos_512_v4
+88/453908/campos_512_v4
+88/453911/campos_512_v4
+88/453922/campos_512_v4
+88/453924/campos_512_v4
+88/453927/campos_512_v4
+88/453932/campos_512_v4
+88/453934/campos_512_v4
+88/453940/campos_512_v4
+88/453944/campos_512_v4
+88/453948/campos_512_v4
+88/453961/campos_512_v4
+88/453974/campos_512_v4
+88/453984/campos_512_v4
+88/453985/campos_512_v4
+88/453986/campos_512_v4
+88/453987/campos_512_v4
+88/453992/campos_512_v4
+88/454011/campos_512_v4
+88/454014/campos_512_v4
+88/454015/campos_512_v4
+88/454017/campos_512_v4
+88/454022/campos_512_v4
+88/454026/campos_512_v4
+88/454030/campos_512_v4
+88/454036/campos_512_v4
+88/454046/campos_512_v4
+88/454059/campos_512_v4
+88/454068/campos_512_v4
+88/454078/campos_512_v4
+88/454089/campos_512_v4
+88/454109/campos_512_v4
+88/454115/campos_512_v4
+88/454121/campos_512_v4
+88/454123/campos_512_v4
+88/454127/campos_512_v4
+88/454134/campos_512_v4
+88/454140/campos_512_v4
+88/454153/campos_512_v4
+88/454160/campos_512_v4
+88/454163/campos_512_v4
+88/454177/campos_512_v4
+88/454188/campos_512_v4
+88/454193/campos_512_v4
+88/454195/campos_512_v4
+88/454198/campos_512_v4
+88/454209/campos_512_v4
+88/454217/campos_512_v4
+88/454218/campos_512_v4
+88/454220/campos_512_v4
+88/454223/campos_512_v4
+88/454227/campos_512_v4
+88/454228/campos_512_v4
+88/454230/campos_512_v4
+88/454238/campos_512_v4
+88/454251/campos_512_v4
+88/454252/campos_512_v4
+88/454254/campos_512_v4
+88/454256/campos_512_v4
+88/454258/campos_512_v4
+88/454263/campos_512_v4
+88/454264/campos_512_v4
+88/454267/campos_512_v4
+88/454273/campos_512_v4
+88/454277/campos_512_v4
+88/454284/campos_512_v4
+88/454285/campos_512_v4
+88/454294/campos_512_v4
+88/454297/campos_512_v4
+88/454309/campos_512_v4
+88/454329/campos_512_v4
+88/454331/campos_512_v4
+88/454361/campos_512_v4
+88/454362/campos_512_v4
+88/454364/campos_512_v4
+88/454373/campos_512_v4
+88/454378/campos_512_v4
+88/454383/campos_512_v4
+88/454387/campos_512_v4
+88/454394/campos_512_v4
+88/454405/campos_512_v4
+88/454415/campos_512_v4
+88/454423/campos_512_v4
+88/454425/campos_512_v4
+88/454427/campos_512_v4
+88/454429/campos_512_v4
+88/454433/campos_512_v4
+88/454435/campos_512_v4
+88/454436/campos_512_v4
+88/454447/campos_512_v4
+88/454450/campos_512_v4
+88/454452/campos_512_v4
+88/454456/campos_512_v4
+88/454461/campos_512_v4
+88/454467/campos_512_v4
+88/454469/campos_512_v4
+88/454481/campos_512_v4
+88/454483/campos_512_v4
+88/454488/campos_512_v4
+88/454494/campos_512_v4
+88/454497/campos_512_v4
+88/454509/campos_512_v4
+88/454517/campos_512_v4
+88/454525/campos_512_v4
+88/454527/campos_512_v4
+88/454545/campos_512_v4
+88/454547/campos_512_v4
+88/454563/campos_512_v4
+88/454566/campos_512_v4
+88/454570/campos_512_v4
+88/454574/campos_512_v4
+88/454579/campos_512_v4
+88/454582/campos_512_v4
+88/454583/campos_512_v4
+88/454585/campos_512_v4
+88/454587/campos_512_v4
+88/454591/campos_512_v4
+88/454600/campos_512_v4
+88/454606/campos_512_v4
+88/454623/campos_512_v4
+88/454637/campos_512_v4
+88/454641/campos_512_v4
+88/454645/campos_512_v4
+88/454647/campos_512_v4
+88/454656/campos_512_v4
+88/454681/campos_512_v4
+88/454682/campos_512_v4
+88/454684/campos_512_v4
+88/454688/campos_512_v4
+88/454689/campos_512_v4
+88/454693/campos_512_v4
+88/454697/campos_512_v4
+88/454712/campos_512_v4
+88/454714/campos_512_v4
+88/454715/campos_512_v4
+88/454722/campos_512_v4
+88/454732/campos_512_v4
+88/454754/campos_512_v4
+88/454767/campos_512_v4
+88/454783/campos_512_v4
+88/454788/campos_512_v4
+88/454794/campos_512_v4
+88/454795/campos_512_v4
+88/454799/campos_512_v4
+88/454803/campos_512_v4
+88/454818/campos_512_v4
+88/454831/campos_512_v4
+88/454834/campos_512_v4
+88/454840/campos_512_v4
+88/454850/campos_512_v4
+88/454857/campos_512_v4
+88/454859/campos_512_v4
+88/454865/campos_512_v4
+88/454867/campos_512_v4
+88/454869/campos_512_v4
+88/454880/campos_512_v4
+88/454881/campos_512_v4
+88/454884/campos_512_v4
+88/454898/campos_512_v4
+88/454901/campos_512_v4
+88/454906/campos_512_v4
+88/454911/campos_512_v4
+88/454912/campos_512_v4
+88/454914/campos_512_v4
+88/454921/campos_512_v4
+88/454923/campos_512_v4
+88/454925/campos_512_v4
+88/454928/campos_512_v4
+88/454937/campos_512_v4
+88/454938/campos_512_v4
+88/454943/campos_512_v4
+88/454947/campos_512_v4
+88/454951/campos_512_v4
+88/454964/campos_512_v4
+88/454971/campos_512_v4
+88/454983/campos_512_v4
+88/454986/campos_512_v4
+88/454996/campos_512_v4
+88/454998/campos_512_v4
+89/455006/campos_512_v4
+89/455009/campos_512_v4
+89/455013/campos_512_v4
+89/455022/campos_512_v4
+89/455024/campos_512_v4
+89/455030/campos_512_v4
+89/455032/campos_512_v4
+89/455040/campos_512_v4
+89/455042/campos_512_v4
+89/455047/campos_512_v4
+89/455048/campos_512_v4
+89/455054/campos_512_v4
+89/455065/campos_512_v4
+89/455068/campos_512_v4
+89/455082/campos_512_v4
+89/455093/campos_512_v4
+89/455096/campos_512_v4
+89/455104/campos_512_v4
+89/455117/campos_512_v4
+89/455140/campos_512_v4
+89/455141/campos_512_v4
+89/455150/campos_512_v4
+89/455151/campos_512_v4
+89/455152/campos_512_v4
+89/455155/campos_512_v4
+89/455166/campos_512_v4
+89/455167/campos_512_v4
+89/455178/campos_512_v4
+89/455180/campos_512_v4
+89/455185/campos_512_v4
+89/455192/campos_512_v4
+89/455194/campos_512_v4
+89/455204/campos_512_v4
+89/455208/campos_512_v4
+89/455215/campos_512_v4
+89/455231/campos_512_v4
+89/455234/campos_512_v4
+89/455239/campos_512_v4
+89/455241/campos_512_v4
+89/455244/campos_512_v4
+89/455246/campos_512_v4
+89/455247/campos_512_v4
+89/455249/campos_512_v4
+89/455250/campos_512_v4
+89/455256/campos_512_v4
+89/455267/campos_512_v4
+89/455270/campos_512_v4
+89/455282/campos_512_v4
+89/455292/campos_512_v4
+89/455296/campos_512_v4
+89/455302/campos_512_v4
+89/455309/campos_512_v4
+89/455313/campos_512_v4
+89/455320/campos_512_v4
+89/455322/campos_512_v4
+89/455337/campos_512_v4
+89/455339/campos_512_v4
+89/455341/campos_512_v4
+89/455342/campos_512_v4
+89/455343/campos_512_v4
+89/455358/campos_512_v4
+89/455373/campos_512_v4
+89/455380/campos_512_v4
+89/455388/campos_512_v4
+89/455393/campos_512_v4
+89/455394/campos_512_v4
+89/455406/campos_512_v4
+89/455415/campos_512_v4
+89/455426/campos_512_v4
+89/455436/campos_512_v4
+89/455441/campos_512_v4
+89/455481/campos_512_v4
+89/455510/campos_512_v4
+89/455513/campos_512_v4
+89/455516/campos_512_v4
+89/455524/campos_512_v4
+89/455535/campos_512_v4
+89/455568/campos_512_v4
+89/455578/campos_512_v4
+89/455588/campos_512_v4
+89/455597/campos_512_v4
+89/455599/campos_512_v4
+89/455603/campos_512_v4
+89/455616/campos_512_v4
+89/455621/campos_512_v4
+89/455624/campos_512_v4
+89/455625/campos_512_v4
+89/455630/campos_512_v4
+89/455639/campos_512_v4
+89/455640/campos_512_v4
+89/455653/campos_512_v4
+89/455657/campos_512_v4
+89/455660/campos_512_v4
+89/455666/campos_512_v4
+89/455686/campos_512_v4
+89/455687/campos_512_v4
+89/455703/campos_512_v4
+89/455724/campos_512_v4
+89/455725/campos_512_v4
+89/455728/campos_512_v4
+89/455744/campos_512_v4
+89/455756/campos_512_v4
+89/455759/campos_512_v4
+89/455767/campos_512_v4
+89/455783/campos_512_v4
+89/455784/campos_512_v4
+89/455785/campos_512_v4
+89/455786/campos_512_v4
+89/455806/campos_512_v4
+89/455807/campos_512_v4
+89/455813/campos_512_v4
+89/455828/campos_512_v4
+89/455833/campos_512_v4
+89/455843/campos_512_v4
+89/455845/campos_512_v4
+89/455856/campos_512_v4
+89/455867/campos_512_v4
+89/455871/campos_512_v4
+89/455872/campos_512_v4
+89/455880/campos_512_v4
+89/455884/campos_512_v4
+89/455888/campos_512_v4
+89/455893/campos_512_v4
+89/455894/campos_512_v4
+89/455912/campos_512_v4
+89/455918/campos_512_v4
+89/455934/campos_512_v4
+89/455938/campos_512_v4
+89/455948/campos_512_v4
+89/455950/campos_512_v4
+89/455952/campos_512_v4
+89/455954/campos_512_v4
+89/455960/campos_512_v4
+89/455966/campos_512_v4
+89/455996/campos_512_v4
+89/455999/campos_512_v4
+89/456008/campos_512_v4
+89/456009/campos_512_v4
+89/456015/campos_512_v4
+89/456018/campos_512_v4
+89/456020/campos_512_v4
+89/456035/campos_512_v4
+89/456037/campos_512_v4
+89/456043/campos_512_v4
+89/456048/campos_512_v4
+89/456054/campos_512_v4
+89/456058/campos_512_v4
+89/456060/campos_512_v4
+89/456081/campos_512_v4
+89/456083/campos_512_v4
+89/456086/campos_512_v4
+89/456092/campos_512_v4
+89/456101/campos_512_v4
+89/456102/campos_512_v4
+89/456113/campos_512_v4
+89/456124/campos_512_v4
+89/456128/campos_512_v4
+89/456130/campos_512_v4
+89/456160/campos_512_v4
+89/456166/campos_512_v4
+89/456177/campos_512_v4
+89/456179/campos_512_v4
+89/456183/campos_512_v4
+89/456198/campos_512_v4
+89/456200/campos_512_v4
+89/456203/campos_512_v4
+89/456206/campos_512_v4
+89/456215/campos_512_v4
+89/456217/campos_512_v4
+89/456226/campos_512_v4
+89/456234/campos_512_v4
+89/456246/campos_512_v4
+89/456248/campos_512_v4
+89/456256/campos_512_v4
+89/456278/campos_512_v4
+89/456284/campos_512_v4
+89/456294/campos_512_v4
+89/456304/campos_512_v4
+89/456309/campos_512_v4
+89/456310/campos_512_v4
+89/456311/campos_512_v4
+89/456338/campos_512_v4
+89/456343/campos_512_v4
+89/456348/campos_512_v4
+89/456349/campos_512_v4
+89/456358/campos_512_v4
+89/456359/campos_512_v4
+89/456370/campos_512_v4
+89/456372/campos_512_v4
+89/456381/campos_512_v4
+89/456385/campos_512_v4
+89/456387/campos_512_v4
+89/456395/campos_512_v4
+89/456398/campos_512_v4
+89/456404/campos_512_v4
+89/456422/campos_512_v4
+89/456436/campos_512_v4
+89/456448/campos_512_v4
+89/456457/campos_512_v4
+89/456465/campos_512_v4
+89/456470/campos_512_v4
+89/456477/campos_512_v4
+89/456479/campos_512_v4
+89/456480/campos_512_v4
+89/456492/campos_512_v4
+89/456496/campos_512_v4
+89/456506/campos_512_v4
+89/456514/campos_512_v4
+89/456526/campos_512_v4
+89/456530/campos_512_v4
+89/456535/campos_512_v4
+89/456545/campos_512_v4
+89/456548/campos_512_v4
+89/456552/campos_512_v4
+89/456555/campos_512_v4
+89/456562/campos_512_v4
+89/456563/campos_512_v4
+89/456572/campos_512_v4
+89/456578/campos_512_v4
+89/456581/campos_512_v4
+89/456597/campos_512_v4
+89/456600/campos_512_v4
+89/456601/campos_512_v4
+89/456602/campos_512_v4
+89/456608/campos_512_v4
+89/456609/campos_512_v4
+89/456611/campos_512_v4
+89/456615/campos_512_v4
+89/456616/campos_512_v4
+89/456620/campos_512_v4
+89/456621/campos_512_v4
+89/456623/campos_512_v4
+89/456625/campos_512_v4
+89/456655/campos_512_v4
+89/456673/campos_512_v4
+89/456686/campos_512_v4
+89/456687/campos_512_v4
+89/456689/campos_512_v4
+89/456716/campos_512_v4
+89/456718/campos_512_v4
+89/456729/campos_512_v4
+89/456734/campos_512_v4
+89/456736/campos_512_v4
+89/456744/campos_512_v4
+89/456746/campos_512_v4
+89/456748/campos_512_v4
+89/456761/campos_512_v4
+89/456764/campos_512_v4
+89/456769/campos_512_v4
+89/456774/campos_512_v4
+89/456788/campos_512_v4
+89/456796/campos_512_v4
+89/456802/campos_512_v4
+89/456803/campos_512_v4
+89/456809/campos_512_v4
+89/456813/campos_512_v4
+89/456814/campos_512_v4
+89/456819/campos_512_v4
+89/456822/campos_512_v4
+89/456823/campos_512_v4
+89/456837/campos_512_v4
+89/456845/campos_512_v4
+89/456881/campos_512_v4
+89/456884/campos_512_v4
+89/456891/campos_512_v4
+89/456894/campos_512_v4
+89/456896/campos_512_v4
+89/456910/campos_512_v4
+89/456919/campos_512_v4
+89/456921/campos_512_v4
+89/456926/campos_512_v4
+89/456931/campos_512_v4
+89/456946/campos_512_v4
+89/456950/campos_512_v4
+89/456953/campos_512_v4
+89/456957/campos_512_v4
+89/456962/campos_512_v4
+89/456965/campos_512_v4
+89/456966/campos_512_v4
+89/456969/campos_512_v4
+89/456973/campos_512_v4
+89/456979/campos_512_v4
+89/456987/campos_512_v4
+89/457033/campos_512_v4
+89/457041/campos_512_v4
+89/457059/campos_512_v4
+89/457061/campos_512_v4
+89/457062/campos_512_v4
+89/457065/campos_512_v4
+89/457066/campos_512_v4
+89/457068/campos_512_v4
+89/457070/campos_512_v4
+89/457085/campos_512_v4
+89/457095/campos_512_v4
+89/457101/campos_512_v4
+89/457106/campos_512_v4
+89/457117/campos_512_v4
+89/457119/campos_512_v4
+89/457120/campos_512_v4
+89/457124/campos_512_v4
+89/457135/campos_512_v4
+89/457146/campos_512_v4
+89/457153/campos_512_v4
+89/457155/campos_512_v4
+89/457156/campos_512_v4
+89/457161/campos_512_v4
+89/457164/campos_512_v4
+89/457172/campos_512_v4
+89/457186/campos_512_v4
+89/457189/campos_512_v4
+89/457192/campos_512_v4
+89/457194/campos_512_v4
+89/457202/campos_512_v4
+89/457219/campos_512_v4
+89/457223/campos_512_v4
+89/457226/campos_512_v4
+89/457236/campos_512_v4
+89/457263/campos_512_v4
+89/457265/campos_512_v4
+89/457273/campos_512_v4
+89/457274/campos_512_v4
+89/457289/campos_512_v4
+89/457295/campos_512_v4
+89/457298/campos_512_v4
+89/457326/campos_512_v4
+89/457328/campos_512_v4
+89/457332/campos_512_v4
+89/457340/campos_512_v4
+89/457345/campos_512_v4
+89/457371/campos_512_v4
+89/457380/campos_512_v4
+89/457386/campos_512_v4
+89/457406/campos_512_v4
+89/457432/campos_512_v4
+89/457438/campos_512_v4
+89/457442/campos_512_v4
+89/457446/campos_512_v4
+89/457448/campos_512_v4
+89/457454/campos_512_v4
+89/457462/campos_512_v4
+89/457476/campos_512_v4
+89/457484/campos_512_v4
+89/457491/campos_512_v4
+89/457492/campos_512_v4
+89/457504/campos_512_v4
+89/457506/campos_512_v4
+89/457507/campos_512_v4
+89/457511/campos_512_v4
+89/457515/campos_512_v4
+89/457517/campos_512_v4
+89/457523/campos_512_v4
+89/457524/campos_512_v4
+89/457538/campos_512_v4
+89/457542/campos_512_v4
+89/457560/campos_512_v4
+89/457568/campos_512_v4
+89/457569/campos_512_v4
+89/457576/campos_512_v4
+89/457612/campos_512_v4
+89/457613/campos_512_v4
+89/457616/campos_512_v4
+89/457636/campos_512_v4
+89/457637/campos_512_v4
+89/457644/campos_512_v4
+89/457646/campos_512_v4
+89/457665/campos_512_v4
+89/457669/campos_512_v4
+89/457672/campos_512_v4
+89/457687/campos_512_v4
+89/457691/campos_512_v4
+89/457693/campos_512_v4
+89/457695/campos_512_v4
+89/457706/campos_512_v4
+89/457713/campos_512_v4
+89/457730/campos_512_v4
+89/457775/campos_512_v4
+89/457778/campos_512_v4
+89/457780/campos_512_v4
+89/457792/campos_512_v4
+89/457800/campos_512_v4
+89/457804/campos_512_v4
+89/457813/campos_512_v4
+89/457819/campos_512_v4
+89/457830/campos_512_v4
+89/457843/campos_512_v4
+89/457862/campos_512_v4
+89/457866/campos_512_v4
+89/457868/campos_512_v4
+89/457879/campos_512_v4
+89/457883/campos_512_v4
+89/457897/campos_512_v4
+89/457898/campos_512_v4
+89/457899/campos_512_v4
+89/457908/campos_512_v4
+89/457910/campos_512_v4
+89/457917/campos_512_v4
+89/457919/campos_512_v4
+89/457924/campos_512_v4
+89/457925/campos_512_v4
+89/457946/campos_512_v4
+89/457949/campos_512_v4
+89/457951/campos_512_v4
+89/457968/campos_512_v4
+89/457970/campos_512_v4
+89/457983/campos_512_v4
+89/458003/campos_512_v4
+89/458013/campos_512_v4
+89/458018/campos_512_v4
+89/458026/campos_512_v4
+89/458046/campos_512_v4
+89/458049/campos_512_v4
+89/458053/campos_512_v4
+89/458059/campos_512_v4
+89/458082/campos_512_v4
+89/458086/campos_512_v4
+89/458101/campos_512_v4
+89/458103/campos_512_v4
+89/458134/campos_512_v4
+89/458136/campos_512_v4
+89/458149/campos_512_v4
+89/458157/campos_512_v4
+89/458159/campos_512_v4
+89/458170/campos_512_v4
+89/458178/campos_512_v4
+89/458182/campos_512_v4
+89/458197/campos_512_v4
+89/458199/campos_512_v4
+89/458215/campos_512_v4
+89/458222/campos_512_v4
+89/458229/campos_512_v4
+89/458232/campos_512_v4
+89/458247/campos_512_v4
+89/458248/campos_512_v4
+89/458253/campos_512_v4
+89/458256/campos_512_v4
+89/458259/campos_512_v4
+89/458263/campos_512_v4
+89/458272/campos_512_v4
+89/458275/campos_512_v4
+89/458276/campos_512_v4
+89/458278/campos_512_v4
+89/458285/campos_512_v4
+89/458286/campos_512_v4
+89/458296/campos_512_v4
+89/458300/campos_512_v4
+89/458309/campos_512_v4
+89/458313/campos_512_v4
+89/458326/campos_512_v4
+89/458329/campos_512_v4
+89/458345/campos_512_v4
+89/458350/campos_512_v4
+89/458358/campos_512_v4
+89/458365/campos_512_v4
+89/458367/campos_512_v4
+89/458368/campos_512_v4
+89/458371/campos_512_v4
+89/458385/campos_512_v4
+89/458391/campos_512_v4
+89/458392/campos_512_v4
+89/458394/campos_512_v4
+89/458398/campos_512_v4
+89/458401/campos_512_v4
+89/458407/campos_512_v4
+89/458411/campos_512_v4
+89/458412/campos_512_v4
+89/458414/campos_512_v4
+89/458416/campos_512_v4
+89/458417/campos_512_v4
+89/458431/campos_512_v4
+89/458435/campos_512_v4
+89/458447/campos_512_v4
+89/458455/campos_512_v4
+89/458467/campos_512_v4
+89/458481/campos_512_v4
+89/458510/campos_512_v4
+89/458512/campos_512_v4
+89/458525/campos_512_v4
+89/458530/campos_512_v4
+89/458531/campos_512_v4
+89/458534/campos_512_v4
+89/458535/campos_512_v4
+89/458552/campos_512_v4
+89/458561/campos_512_v4
+89/458564/campos_512_v4
+89/458573/campos_512_v4
+89/458581/campos_512_v4
+89/458585/campos_512_v4
+89/458601/campos_512_v4
+89/458609/campos_512_v4
+89/458610/campos_512_v4
+89/458615/campos_512_v4
+89/458620/campos_512_v4
+89/458624/campos_512_v4
+89/458628/campos_512_v4
+89/458629/campos_512_v4
+89/458636/campos_512_v4
+89/458640/campos_512_v4
+89/458644/campos_512_v4
+89/458646/campos_512_v4
+89/458648/campos_512_v4
+89/458658/campos_512_v4
+89/458671/campos_512_v4
+89/458677/campos_512_v4
+89/458683/campos_512_v4
+89/458687/campos_512_v4
+89/458695/campos_512_v4
+89/458699/campos_512_v4
+89/458710/campos_512_v4
+89/458712/campos_512_v4
+89/458715/campos_512_v4
+89/458725/campos_512_v4
+89/458726/campos_512_v4
+89/458730/campos_512_v4
+89/458735/campos_512_v4
+89/458742/campos_512_v4
+89/458743/campos_512_v4
+89/458745/campos_512_v4
+89/458748/campos_512_v4
+89/458768/campos_512_v4
+89/458774/campos_512_v4
+89/458813/campos_512_v4
+89/458818/campos_512_v4
+89/458825/campos_512_v4
+89/458829/campos_512_v4
+89/458838/campos_512_v4
+89/458863/campos_512_v4
+89/458864/campos_512_v4
+89/458868/campos_512_v4
+89/458870/campos_512_v4
+89/458871/campos_512_v4
+89/458882/campos_512_v4
+89/458894/campos_512_v4
+89/458895/campos_512_v4
+89/458906/campos_512_v4
+89/458916/campos_512_v4
+89/458917/campos_512_v4
+89/458918/campos_512_v4
+89/458928/campos_512_v4
+89/458929/campos_512_v4
+89/458965/campos_512_v4
+89/458971/campos_512_v4
+89/458996/campos_512_v4
+89/458997/campos_512_v4
+89/459005/campos_512_v4
+89/459010/campos_512_v4
+89/459011/campos_512_v4
+89/459019/campos_512_v4
+89/459025/campos_512_v4
+89/459036/campos_512_v4
+89/459042/campos_512_v4
+89/459044/campos_512_v4
+89/459050/campos_512_v4
+89/459062/campos_512_v4
+89/459075/campos_512_v4
+89/459092/campos_512_v4
+89/459096/campos_512_v4
+89/459098/campos_512_v4
+89/459102/campos_512_v4
+89/459103/campos_512_v4
+89/459112/campos_512_v4
+89/459119/campos_512_v4
+89/459127/campos_512_v4
+89/459132/campos_512_v4
+89/459135/campos_512_v4
+89/459138/campos_512_v4
+89/459139/campos_512_v4
+89/459153/campos_512_v4
+89/459156/campos_512_v4
+89/459161/campos_512_v4
+89/459167/campos_512_v4
+89/459170/campos_512_v4
+89/459173/campos_512_v4
+89/459174/campos_512_v4
+89/459179/campos_512_v4
+89/459182/campos_512_v4
+89/459185/campos_512_v4
+89/459189/campos_512_v4
+89/459196/campos_512_v4
+89/459207/campos_512_v4
+89/459209/campos_512_v4
+89/459217/campos_512_v4
+89/459224/campos_512_v4
+89/459228/campos_512_v4
+89/459233/campos_512_v4
+89/459237/campos_512_v4
+89/459240/campos_512_v4
+89/459241/campos_512_v4
+89/459249/campos_512_v4
+89/459254/campos_512_v4
+89/459255/campos_512_v4
+89/459271/campos_512_v4
+89/459274/campos_512_v4
+89/459291/campos_512_v4
+89/459294/campos_512_v4
+89/459295/campos_512_v4
+89/459302/campos_512_v4
+89/459326/campos_512_v4
+89/459333/campos_512_v4
+89/459340/campos_512_v4
+89/459341/campos_512_v4
+89/459345/campos_512_v4
+89/459350/campos_512_v4
+89/459373/campos_512_v4
+89/459378/campos_512_v4
+89/459381/campos_512_v4
+89/459382/campos_512_v4
+89/459398/campos_512_v4
+89/459403/campos_512_v4
+89/459406/campos_512_v4
+89/459409/campos_512_v4
+89/459425/campos_512_v4
+89/459429/campos_512_v4
+89/459432/campos_512_v4
+89/459434/campos_512_v4
+89/459443/campos_512_v4
+89/459450/campos_512_v4
+89/459453/campos_512_v4
+89/459469/campos_512_v4
+89/459472/campos_512_v4
+89/459477/campos_512_v4
+89/459483/campos_512_v4
+89/459487/campos_512_v4
+89/459517/campos_512_v4
+89/459531/campos_512_v4
+89/459535/campos_512_v4
+89/459536/campos_512_v4
+89/459539/campos_512_v4
+89/459542/campos_512_v4
+89/459557/campos_512_v4
+89/459565/campos_512_v4
+89/459567/campos_512_v4
+89/459575/campos_512_v4
+89/459576/campos_512_v4
+89/459584/campos_512_v4
+89/459587/campos_512_v4
+89/459603/campos_512_v4
+89/459606/campos_512_v4
+89/459610/campos_512_v4
+89/459620/campos_512_v4
+89/459627/campos_512_v4
+89/459650/campos_512_v4
+89/459655/campos_512_v4
+89/459658/campos_512_v4
+89/459665/campos_512_v4
+89/459667/campos_512_v4
+89/459671/campos_512_v4
+89/459687/campos_512_v4
+89/459695/campos_512_v4
+89/459704/campos_512_v4
+89/459709/campos_512_v4
+89/459720/campos_512_v4
+89/459723/campos_512_v4
+89/459732/campos_512_v4
+89/459737/campos_512_v4
+89/459742/campos_512_v4
+89/459745/campos_512_v4
+89/459753/campos_512_v4
+89/459763/campos_512_v4
+89/459768/campos_512_v4
+89/459776/campos_512_v4
+89/459785/campos_512_v4
+89/459790/campos_512_v4
+89/459797/campos_512_v4
+89/459818/campos_512_v4
+89/459820/campos_512_v4
+89/459821/campos_512_v4
+89/459824/campos_512_v4
+89/459840/campos_512_v4
+89/459856/campos_512_v4
+89/459870/campos_512_v4
+89/459877/campos_512_v4
+89/459893/campos_512_v4
+89/459894/campos_512_v4
+89/459900/campos_512_v4
+89/459904/campos_512_v4
+89/459907/campos_512_v4
+89/459921/campos_512_v4
+89/459925/campos_512_v4
+89/459926/campos_512_v4
+89/459931/campos_512_v4
+89/459935/campos_512_v4
+89/459936/campos_512_v4
+89/459947/campos_512_v4
+89/459952/campos_512_v4
+89/459956/campos_512_v4
+89/459965/campos_512_v4
+89/459986/campos_512_v4
+89/459999/campos_512_v4
+9/55005/campos_512_v4
+9/55009/campos_512_v4
+9/55019/campos_512_v4
+9/55025/campos_512_v4
+9/55031/campos_512_v4
+9/55034/campos_512_v4
+9/55043/campos_512_v4
+9/55048/campos_512_v4
+9/55063/campos_512_v4
+9/55065/campos_512_v4
+9/55069/campos_512_v4
+9/55073/campos_512_v4
+9/55074/campos_512_v4
+9/55084/campos_512_v4
+9/55091/campos_512_v4
+9/55115/campos_512_v4
+9/55125/campos_512_v4
+9/55133/campos_512_v4
+9/55136/campos_512_v4
+9/55141/campos_512_v4
+9/55148/campos_512_v4
+9/55153/campos_512_v4
+9/55176/campos_512_v4
+9/55181/campos_512_v4
+9/55192/campos_512_v4
+9/55193/campos_512_v4
+9/55209/campos_512_v4
+9/55216/campos_512_v4
+9/55223/campos_512_v4
+9/55226/campos_512_v4
+9/55283/campos_512_v4
+9/55306/campos_512_v4
+9/55307/campos_512_v4
+9/55308/campos_512_v4
+9/55320/campos_512_v4
+9/55323/campos_512_v4
+9/55329/campos_512_v4
+9/55344/campos_512_v4
+9/55345/campos_512_v4
+9/55350/campos_512_v4
+9/55352/campos_512_v4
+9/55360/campos_512_v4
+9/55364/campos_512_v4
+9/55400/campos_512_v4
+9/55403/campos_512_v4
+9/55406/campos_512_v4
+9/55416/campos_512_v4
+9/55426/campos_512_v4
+9/55438/campos_512_v4
+9/55462/campos_512_v4
+9/55464/campos_512_v4
+9/55469/campos_512_v4
+9/55477/campos_512_v4
+9/55486/campos_512_v4
+9/55491/campos_512_v4
+9/55493/campos_512_v4
+9/55507/campos_512_v4
+9/55513/campos_512_v4
+9/55522/campos_512_v4
+9/55525/campos_512_v4
+9/55526/campos_512_v4
+9/55530/campos_512_v4
+9/55542/campos_512_v4
+9/55549/campos_512_v4
+9/55551/campos_512_v4
+9/55552/campos_512_v4
+9/55558/campos_512_v4
+9/55561/campos_512_v4
+9/55569/campos_512_v4
+9/55577/campos_512_v4
+9/55585/campos_512_v4
+9/55591/campos_512_v4
+9/55593/campos_512_v4
+9/55596/campos_512_v4
+9/55600/campos_512_v4
+9/55606/campos_512_v4
+9/55607/campos_512_v4
+9/55629/campos_512_v4
+9/55637/campos_512_v4
+9/55642/campos_512_v4
+9/55643/campos_512_v4
+9/55651/campos_512_v4
+9/55653/campos_512_v4
+9/55667/campos_512_v4
+9/55669/campos_512_v4
+9/55689/campos_512_v4
+9/55692/campos_512_v4
+9/55693/campos_512_v4
+9/55694/campos_512_v4
+9/55699/campos_512_v4
+9/55716/campos_512_v4
+9/55721/campos_512_v4
+9/55730/campos_512_v4
+9/55735/campos_512_v4
+9/55755/campos_512_v4
+9/55757/campos_512_v4
+9/55762/campos_512_v4
+9/55765/campos_512_v4
+9/55781/campos_512_v4
+9/55782/campos_512_v4
+9/55796/campos_512_v4
+9/55797/campos_512_v4
+9/55798/campos_512_v4
+9/55800/campos_512_v4
+9/55811/campos_512_v4
+9/55815/campos_512_v4
+9/55816/campos_512_v4
+9/55818/campos_512_v4
+9/55819/campos_512_v4
+9/55827/campos_512_v4
+9/55839/campos_512_v4
+9/55844/campos_512_v4
+9/55857/campos_512_v4
+9/55861/campos_512_v4
+9/55862/campos_512_v4
+9/55863/campos_512_v4
+9/55878/campos_512_v4
+9/55882/campos_512_v4
+9/55914/campos_512_v4
+9/55918/campos_512_v4
+9/55920/campos_512_v4
+9/55921/campos_512_v4
+9/55925/campos_512_v4
+9/55938/campos_512_v4
+9/55939/campos_512_v4
+9/55948/campos_512_v4
+9/55955/campos_512_v4
+9/55960/campos_512_v4
+9/55965/campos_512_v4
+9/55971/campos_512_v4
+9/55976/campos_512_v4
+9/55990/campos_512_v4
+9/56009/campos_512_v4
+9/56017/campos_512_v4
+9/56031/campos_512_v4
+9/56032/campos_512_v4
+9/56034/campos_512_v4
+9/56041/campos_512_v4
+9/56051/campos_512_v4
+9/56062/campos_512_v4
+9/56063/campos_512_v4
+9/56074/campos_512_v4
+9/56075/campos_512_v4
+9/56090/campos_512_v4
+9/56092/campos_512_v4
+9/56097/campos_512_v4
+9/56105/campos_512_v4
+9/56123/campos_512_v4
+9/56126/campos_512_v4
+9/56135/campos_512_v4
+9/56143/campos_512_v4
+9/56152/campos_512_v4
+9/56156/campos_512_v4
+9/56164/campos_512_v4
+9/56167/campos_512_v4
+9/56168/campos_512_v4
+9/56169/campos_512_v4
+9/56171/campos_512_v4
+9/56173/campos_512_v4
+9/56199/campos_512_v4
+9/56231/campos_512_v4
+9/56232/campos_512_v4
+9/56244/campos_512_v4
+9/56254/campos_512_v4
+9/56261/campos_512_v4
+9/56267/campos_512_v4
+9/56277/campos_512_v4
+9/56283/campos_512_v4
+9/56292/campos_512_v4
+9/56296/campos_512_v4
+9/56297/campos_512_v4
+9/56299/campos_512_v4
+9/56307/campos_512_v4
+9/56318/campos_512_v4
+9/56319/campos_512_v4
+9/56320/campos_512_v4
+9/56336/campos_512_v4
+9/56339/campos_512_v4
+9/56362/campos_512_v4
+9/56366/campos_512_v4
+9/56367/campos_512_v4
+9/56385/campos_512_v4
+9/56391/campos_512_v4
+9/56416/campos_512_v4
+9/56418/campos_512_v4
+9/56428/campos_512_v4
+9/56431/campos_512_v4
+9/56436/campos_512_v4
+9/56444/campos_512_v4
+9/56445/campos_512_v4
+9/56446/campos_512_v4
+9/56456/campos_512_v4
+9/56459/campos_512_v4
+9/56463/campos_512_v4
+9/56470/campos_512_v4
+9/56488/campos_512_v4
+9/56506/campos_512_v4
+9/56512/campos_512_v4
+9/56527/campos_512_v4
+9/56539/campos_512_v4
+9/56543/campos_512_v4
+9/56544/campos_512_v4
+9/56549/campos_512_v4
+9/56551/campos_512_v4
+9/56552/campos_512_v4
+9/56558/campos_512_v4
+9/56566/campos_512_v4
+9/56567/campos_512_v4
+9/56570/campos_512_v4
+9/56574/campos_512_v4
+9/56604/campos_512_v4
+9/56617/campos_512_v4
+9/56621/campos_512_v4
+9/56635/campos_512_v4
+9/56664/campos_512_v4
+9/56667/campos_512_v4
+9/56680/campos_512_v4
+9/56682/campos_512_v4
+9/56700/campos_512_v4
+9/56701/campos_512_v4
+9/56714/campos_512_v4
+9/56715/campos_512_v4
+9/56719/campos_512_v4
+9/56727/campos_512_v4
+9/56730/campos_512_v4
+9/56733/campos_512_v4
+9/56742/campos_512_v4
+9/56748/campos_512_v4
+9/56763/campos_512_v4
+9/56764/campos_512_v4
+9/56783/campos_512_v4
+9/56802/campos_512_v4
+9/56809/campos_512_v4
+9/56810/campos_512_v4
+9/56821/campos_512_v4
+9/56825/campos_512_v4
+9/56843/campos_512_v4
+9/56870/campos_512_v4
+9/56876/campos_512_v4
+9/56878/campos_512_v4
+9/56884/campos_512_v4
+9/56888/campos_512_v4
+9/56892/campos_512_v4
+9/56895/campos_512_v4
+9/56906/campos_512_v4
+9/56915/campos_512_v4
+9/56917/campos_512_v4
+9/56935/campos_512_v4
+9/56936/campos_512_v4
+9/56962/campos_512_v4
+9/56971/campos_512_v4
+9/56975/campos_512_v4
+9/56978/campos_512_v4
+9/56980/campos_512_v4
+9/56997/campos_512_v4
+9/57001/campos_512_v4
+9/57006/campos_512_v4
+9/57012/campos_512_v4
+9/57019/campos_512_v4
+9/57021/campos_512_v4
+9/57026/campos_512_v4
+9/57028/campos_512_v4
+9/57029/campos_512_v4
+9/57034/campos_512_v4
+9/57038/campos_512_v4
+9/57050/campos_512_v4
+9/57051/campos_512_v4
+9/57054/campos_512_v4
+9/57068/campos_512_v4
+9/57069/campos_512_v4
+9/57071/campos_512_v4
+9/57076/campos_512_v4
+9/57085/campos_512_v4
+9/57089/campos_512_v4
+9/57096/campos_512_v4
+9/57107/campos_512_v4
+9/57115/campos_512_v4
+9/57117/campos_512_v4
+9/57119/campos_512_v4
+9/57125/campos_512_v4
+9/57129/campos_512_v4
+9/57130/campos_512_v4
+9/57131/campos_512_v4
+9/57138/campos_512_v4
+9/57140/campos_512_v4
+9/57142/campos_512_v4
+9/57148/campos_512_v4
+9/57149/campos_512_v4
+9/57156/campos_512_v4
+9/57159/campos_512_v4
+9/57162/campos_512_v4
+9/57181/campos_512_v4
+9/57184/campos_512_v4
+9/57188/campos_512_v4
+9/57196/campos_512_v4
+9/57202/campos_512_v4
+9/57212/campos_512_v4
+9/57213/campos_512_v4
+9/57214/campos_512_v4
+9/57222/campos_512_v4
+9/57223/campos_512_v4
+9/57240/campos_512_v4
+9/57244/campos_512_v4
+9/57246/campos_512_v4
+9/57250/campos_512_v4
+9/57270/campos_512_v4
+9/57294/campos_512_v4
+9/57311/campos_512_v4
+9/57321/campos_512_v4
+9/57358/campos_512_v4
+9/57359/campos_512_v4
+9/57361/campos_512_v4
+9/57384/campos_512_v4
+9/57386/campos_512_v4
+9/57388/campos_512_v4
+9/57389/campos_512_v4
+9/57394/campos_512_v4
+9/57395/campos_512_v4
+9/57410/campos_512_v4
+9/57424/campos_512_v4
+9/57426/campos_512_v4
+9/57427/campos_512_v4
+9/57434/campos_512_v4
+9/57435/campos_512_v4
+9/57442/campos_512_v4
+9/57446/campos_512_v4
+9/57448/campos_512_v4
+9/57449/campos_512_v4
+9/57458/campos_512_v4
+9/57465/campos_512_v4
+9/57471/campos_512_v4
+9/57473/campos_512_v4
+9/57477/campos_512_v4
+9/57485/campos_512_v4
+9/57488/campos_512_v4
+9/57489/campos_512_v4
+9/57501/campos_512_v4
+9/57507/campos_512_v4
+9/57511/campos_512_v4
+9/57512/campos_512_v4
+9/57513/campos_512_v4
+9/57544/campos_512_v4
+9/57557/campos_512_v4
+9/57558/campos_512_v4
+9/57563/campos_512_v4
+9/57564/campos_512_v4
+9/57566/campos_512_v4
+9/57577/campos_512_v4
+9/57578/campos_512_v4
+9/57580/campos_512_v4
+9/57606/campos_512_v4
+9/57619/campos_512_v4
+9/57632/campos_512_v4
+9/57641/campos_512_v4
+9/57655/campos_512_v4
+9/57659/campos_512_v4
+9/57684/campos_512_v4
+9/57693/campos_512_v4
+9/57701/campos_512_v4
+9/57704/campos_512_v4
+9/57705/campos_512_v4
+9/57708/campos_512_v4
+9/57709/campos_512_v4
+9/57712/campos_512_v4
+9/57717/campos_512_v4
+9/57725/campos_512_v4
+9/57739/campos_512_v4
+9/57745/campos_512_v4
+9/57746/campos_512_v4
+9/57752/campos_512_v4
+9/57753/campos_512_v4
+9/57774/campos_512_v4
+9/57775/campos_512_v4
+9/57778/campos_512_v4
+9/57781/campos_512_v4
+9/57794/campos_512_v4
+9/57798/campos_512_v4
+9/57807/campos_512_v4
+9/57808/campos_512_v4
+9/57814/campos_512_v4
+9/57819/campos_512_v4
+9/57831/campos_512_v4
+9/57839/campos_512_v4
+9/57847/campos_512_v4
+9/57857/campos_512_v4
+9/57862/campos_512_v4
+9/57884/campos_512_v4
+9/57893/campos_512_v4
+9/57901/campos_512_v4
+9/57916/campos_512_v4
+9/57921/campos_512_v4
+9/57928/campos_512_v4
+9/57948/campos_512_v4
+9/57950/campos_512_v4
+9/57951/campos_512_v4
+9/57971/campos_512_v4
+9/57982/campos_512_v4
+9/57986/campos_512_v4
+9/58010/campos_512_v4
+9/58020/campos_512_v4
+9/58050/campos_512_v4
+9/58064/campos_512_v4
+9/58067/campos_512_v4
+9/58071/campos_512_v4
+9/58072/campos_512_v4
+9/58074/campos_512_v4
+9/58086/campos_512_v4
+9/58096/campos_512_v4
+9/58103/campos_512_v4
+9/58109/campos_512_v4
+9/58121/campos_512_v4
+9/58128/campos_512_v4
+9/58131/campos_512_v4
+9/58141/campos_512_v4
+9/58147/campos_512_v4
+9/58150/campos_512_v4
+9/58153/campos_512_v4
+9/58155/campos_512_v4
+9/58159/campos_512_v4
+9/58160/campos_512_v4
+9/58172/campos_512_v4
+9/58182/campos_512_v4
+9/58184/campos_512_v4
+9/58191/campos_512_v4
+9/58194/campos_512_v4
+9/58200/campos_512_v4
+9/58202/campos_512_v4
+9/58206/campos_512_v4
+9/58210/campos_512_v4
+9/58216/campos_512_v4
+9/58228/campos_512_v4
+9/58231/campos_512_v4
+9/58232/campos_512_v4
+9/58235/campos_512_v4
+9/58248/campos_512_v4
+9/58256/campos_512_v4
+9/58258/campos_512_v4
+9/58265/campos_512_v4
+9/58318/campos_512_v4
+9/58319/campos_512_v4
+9/58330/campos_512_v4
+9/58332/campos_512_v4
+9/58342/campos_512_v4
+9/58353/campos_512_v4
+9/58357/campos_512_v4
+9/58359/campos_512_v4
+9/58366/campos_512_v4
+9/58369/campos_512_v4
+9/58388/campos_512_v4
+9/58396/campos_512_v4
+9/58408/campos_512_v4
+9/58413/campos_512_v4
+9/58423/campos_512_v4
+9/58439/campos_512_v4
+9/58442/campos_512_v4
+9/58446/campos_512_v4
+9/58460/campos_512_v4
+9/58480/campos_512_v4
+9/58482/campos_512_v4
+9/58484/campos_512_v4
+9/58503/campos_512_v4
+9/58506/campos_512_v4
+9/58517/campos_512_v4
+9/58522/campos_512_v4
+9/58527/campos_512_v4
+9/58537/campos_512_v4
+9/58541/campos_512_v4
+9/58548/campos_512_v4
+9/58561/campos_512_v4
+9/58564/campos_512_v4
+9/58568/campos_512_v4
+9/58571/campos_512_v4
+9/58573/campos_512_v4
+9/58576/campos_512_v4
+9/58578/campos_512_v4
+9/58579/campos_512_v4
+9/58581/campos_512_v4
+9/58584/campos_512_v4
+9/58586/campos_512_v4
+9/58599/campos_512_v4
+9/58601/campos_512_v4
+9/58602/campos_512_v4
+9/58622/campos_512_v4
+9/58625/campos_512_v4
+9/58631/campos_512_v4
+9/58638/campos_512_v4
+9/58639/campos_512_v4
+9/58641/campos_512_v4
+9/58642/campos_512_v4
+9/58645/campos_512_v4
+9/58658/campos_512_v4
+9/58671/campos_512_v4
+9/58693/campos_512_v4
+9/58698/campos_512_v4
+9/58710/campos_512_v4
+9/58717/campos_512_v4
+9/58719/campos_512_v4
+9/58735/campos_512_v4
+9/58736/campos_512_v4
+9/58746/campos_512_v4
+9/58752/campos_512_v4
+9/58754/campos_512_v4
+9/58756/campos_512_v4
+9/58757/campos_512_v4
+9/58772/campos_512_v4
+9/58785/campos_512_v4
+9/58808/campos_512_v4
+9/58817/campos_512_v4
+9/58820/campos_512_v4
+9/58822/campos_512_v4
+9/58845/campos_512_v4
+9/58858/campos_512_v4
+9/58859/campos_512_v4
+9/58863/campos_512_v4
+9/58885/campos_512_v4
+9/58889/campos_512_v4
+9/58904/campos_512_v4
+9/58912/campos_512_v4
+9/58928/campos_512_v4
+9/58933/campos_512_v4
+9/58934/campos_512_v4
+9/58940/campos_512_v4
+9/58941/campos_512_v4
+9/58943/campos_512_v4
+9/58951/campos_512_v4
+9/58955/campos_512_v4
+9/58960/campos_512_v4
+9/58962/campos_512_v4
+9/58964/campos_512_v4
+9/58965/campos_512_v4
+9/58979/campos_512_v4
+9/58991/campos_512_v4
+9/58993/campos_512_v4
+9/59003/campos_512_v4
+9/59005/campos_512_v4
+9/59008/campos_512_v4
+9/59012/campos_512_v4
+9/59015/campos_512_v4
+9/59022/campos_512_v4
+9/59024/campos_512_v4
+9/59032/campos_512_v4
+9/59036/campos_512_v4
+9/59040/campos_512_v4
+9/59043/campos_512_v4
+9/59055/campos_512_v4
+9/59062/campos_512_v4
+9/59072/campos_512_v4
+9/59074/campos_512_v4
+9/59076/campos_512_v4
+9/59078/campos_512_v4
+9/59094/campos_512_v4
+9/59099/campos_512_v4
+9/59102/campos_512_v4
+9/59104/campos_512_v4
+9/59108/campos_512_v4
+9/59111/campos_512_v4
+9/59112/campos_512_v4
+9/59114/campos_512_v4
+9/59120/campos_512_v4
+9/59130/campos_512_v4
+9/59134/campos_512_v4
+9/59147/campos_512_v4
+9/59149/campos_512_v4
+9/59150/campos_512_v4
+9/59175/campos_512_v4
+9/59195/campos_512_v4
+9/59197/campos_512_v4
+9/59209/campos_512_v4
+9/59219/campos_512_v4
+9/59223/campos_512_v4
+9/59231/campos_512_v4
+9/59233/campos_512_v4
+9/59238/campos_512_v4
+9/59242/campos_512_v4
+9/59266/campos_512_v4
+9/59291/campos_512_v4
+9/59306/campos_512_v4
+9/59317/campos_512_v4
+9/59318/campos_512_v4
+9/59320/campos_512_v4
+9/59324/campos_512_v4
+9/59330/campos_512_v4
+9/59331/campos_512_v4
+9/59341/campos_512_v4
+9/59345/campos_512_v4
+9/59346/campos_512_v4
+9/59349/campos_512_v4
+9/59359/campos_512_v4
+9/59363/campos_512_v4
+9/59365/campos_512_v4
+9/59381/campos_512_v4
+9/59388/campos_512_v4
+9/59393/campos_512_v4
+9/59394/campos_512_v4
+9/59406/campos_512_v4
+9/59421/campos_512_v4
+9/59436/campos_512_v4
+9/59467/campos_512_v4
+9/59469/campos_512_v4
+9/59475/campos_512_v4
+9/59484/campos_512_v4
+9/59485/campos_512_v4
+9/59498/campos_512_v4
+9/59506/campos_512_v4
+9/59511/campos_512_v4
+9/59515/campos_512_v4
+9/59518/campos_512_v4
+9/59521/campos_512_v4
+9/59522/campos_512_v4
+9/59528/campos_512_v4
+9/59536/campos_512_v4
+9/59539/campos_512_v4
+9/59551/campos_512_v4
+9/59555/campos_512_v4
+9/59562/campos_512_v4
+9/59571/campos_512_v4
+9/59583/campos_512_v4
+9/59587/campos_512_v4
+9/59596/campos_512_v4
+9/59597/campos_512_v4
+9/59608/campos_512_v4
+9/59609/campos_512_v4
+9/59612/campos_512_v4
+9/59621/campos_512_v4
+9/59636/campos_512_v4
+9/59648/campos_512_v4
+9/59660/campos_512_v4
+9/59661/campos_512_v4
+9/59692/campos_512_v4
+9/59700/campos_512_v4
+9/59712/campos_512_v4
+9/59713/campos_512_v4
+9/59714/campos_512_v4
+9/59721/campos_512_v4
+9/59733/campos_512_v4
+9/59754/campos_512_v4
+9/59762/campos_512_v4
+9/59766/campos_512_v4
+9/59767/campos_512_v4
+9/59773/campos_512_v4
+9/59775/campos_512_v4
+9/59779/campos_512_v4
+9/59783/campos_512_v4
+9/59784/campos_512_v4
+9/59802/campos_512_v4
+9/59811/campos_512_v4
+9/59814/campos_512_v4
+9/59816/campos_512_v4
+9/59817/campos_512_v4
+9/59821/campos_512_v4
+9/59824/campos_512_v4
+9/59827/campos_512_v4
+9/59830/campos_512_v4
+9/59850/campos_512_v4
+9/59857/campos_512_v4
+9/59867/campos_512_v4
+9/59869/campos_512_v4
+9/59870/campos_512_v4
+9/59871/campos_512_v4
+9/59874/campos_512_v4
+9/59885/campos_512_v4
+9/59889/campos_512_v4
+9/59905/campos_512_v4
+9/59912/campos_512_v4
+9/59913/campos_512_v4
+9/59916/campos_512_v4
+9/59934/campos_512_v4
+9/59935/campos_512_v4
+9/59945/campos_512_v4
+9/59954/campos_512_v4
+9/59973/campos_512_v4
+9/59980/campos_512_v4
+9/59981/campos_512_v4
+9/59985/campos_512_v4
+9/59988/campos_512_v4
+9/59996/campos_512_v4
+9/59998/campos_512_v4
+90/460010/campos_512_v4
+90/460011/campos_512_v4
+90/460020/campos_512_v4
+90/460031/campos_512_v4
+90/460033/campos_512_v4
+90/460035/campos_512_v4
+90/460039/campos_512_v4
+90/460040/campos_512_v4
+90/460041/campos_512_v4
+90/460046/campos_512_v4
+90/460067/campos_512_v4
+90/460086/campos_512_v4
+90/460090/campos_512_v4
+90/460111/campos_512_v4
+90/460116/campos_512_v4
+90/460118/campos_512_v4
+90/460128/campos_512_v4
+90/460139/campos_512_v4
+90/460143/campos_512_v4
+90/460156/campos_512_v4
+90/460165/campos_512_v4
+90/460167/campos_512_v4
+90/460177/campos_512_v4
+90/460178/campos_512_v4
+90/460181/campos_512_v4
+90/460183/campos_512_v4
+90/460187/campos_512_v4
+90/460193/campos_512_v4
+90/460211/campos_512_v4
+90/460219/campos_512_v4
+90/460231/campos_512_v4
+90/460233/campos_512_v4
+90/460240/campos_512_v4
+90/460261/campos_512_v4
+90/460286/campos_512_v4
+90/460310/campos_512_v4
+90/460321/campos_512_v4
+90/460330/campos_512_v4
+90/460343/campos_512_v4
+90/460345/campos_512_v4
+90/460370/campos_512_v4
+90/460376/campos_512_v4
+90/460377/campos_512_v4
+90/460378/campos_512_v4
+90/460390/campos_512_v4
+90/460395/campos_512_v4
+90/460396/campos_512_v4
+90/460401/campos_512_v4
+90/460407/campos_512_v4
+90/460409/campos_512_v4
+90/460410/campos_512_v4
+90/460411/campos_512_v4
+90/460426/campos_512_v4
+90/460430/campos_512_v4
+90/460434/campos_512_v4
+90/460446/campos_512_v4
+90/460450/campos_512_v4
+90/460466/campos_512_v4
+90/460471/campos_512_v4
+90/460492/campos_512_v4
+90/460495/campos_512_v4
+90/460507/campos_512_v4
+90/460516/campos_512_v4
+90/460522/campos_512_v4
+90/460523/campos_512_v4
+90/460527/campos_512_v4
+90/460530/campos_512_v4
+90/460538/campos_512_v4
+90/460546/campos_512_v4
+90/460559/campos_512_v4
+90/460569/campos_512_v4
+90/460573/campos_512_v4
+90/460579/campos_512_v4
+90/460589/campos_512_v4
+90/460591/campos_512_v4
+90/460604/campos_512_v4
+90/460605/campos_512_v4
+90/460622/campos_512_v4
+90/460629/campos_512_v4
+90/460643/campos_512_v4
+90/460645/campos_512_v4
+90/460646/campos_512_v4
+90/460649/campos_512_v4
+90/460657/campos_512_v4
+90/460658/campos_512_v4
+90/460666/campos_512_v4
+90/460672/campos_512_v4
+90/460673/campos_512_v4
+90/460674/campos_512_v4
+90/460677/campos_512_v4
+90/460699/campos_512_v4
+90/460701/campos_512_v4
+90/460705/campos_512_v4
+90/460767/campos_512_v4
+90/460773/campos_512_v4
+90/460823/campos_512_v4
+90/460843/campos_512_v4
+90/460852/campos_512_v4
+90/460859/campos_512_v4
+90/460862/campos_512_v4
+90/460880/campos_512_v4
+90/460895/campos_512_v4
+90/460897/campos_512_v4
+90/460905/campos_512_v4
+90/460909/campos_512_v4
+90/460913/campos_512_v4
+90/460927/campos_512_v4
+90/460936/campos_512_v4
+90/460939/campos_512_v4
+90/460941/campos_512_v4
+90/460945/campos_512_v4
+90/460951/campos_512_v4
+90/460955/campos_512_v4
+90/460962/campos_512_v4
+90/460965/campos_512_v4
+90/460978/campos_512_v4
+90/460985/campos_512_v4
+90/460987/campos_512_v4
+90/460990/campos_512_v4
+90/460998/campos_512_v4
+90/461001/campos_512_v4
+90/461009/campos_512_v4
+90/461012/campos_512_v4
+90/461025/campos_512_v4
+90/461026/campos_512_v4
+90/461029/campos_512_v4
+90/461030/campos_512_v4
+90/461032/campos_512_v4
+90/461050/campos_512_v4
+90/461053/campos_512_v4
+90/461056/campos_512_v4
+90/461066/campos_512_v4
+90/461073/campos_512_v4
+90/461074/campos_512_v4
+90/461076/campos_512_v4
+90/461077/campos_512_v4
+90/461078/campos_512_v4
+90/461080/campos_512_v4
+90/461086/campos_512_v4
+90/461092/campos_512_v4
+90/461103/campos_512_v4
+90/461107/campos_512_v4
+90/461111/campos_512_v4
+90/461115/campos_512_v4
+90/461116/campos_512_v4
+90/461118/campos_512_v4
+90/461131/campos_512_v4
+90/461133/campos_512_v4
+90/461144/campos_512_v4
+90/461161/campos_512_v4
+90/461166/campos_512_v4
+90/461183/campos_512_v4
+90/461185/campos_512_v4
+90/461196/campos_512_v4
+90/461209/campos_512_v4
+90/461212/campos_512_v4
+90/461219/campos_512_v4
+90/461223/campos_512_v4
+90/461231/campos_512_v4
+90/461238/campos_512_v4
+90/461240/campos_512_v4
+90/461246/campos_512_v4
+90/461267/campos_512_v4
+90/461271/campos_512_v4
+90/461277/campos_512_v4
+90/461296/campos_512_v4
+90/461308/campos_512_v4
+90/461314/campos_512_v4
+90/461318/campos_512_v4
+90/461319/campos_512_v4
+90/461330/campos_512_v4
+90/461334/campos_512_v4
+90/461355/campos_512_v4
+90/461363/campos_512_v4
+90/461364/campos_512_v4
+90/461370/campos_512_v4
+90/461377/campos_512_v4
+90/461385/campos_512_v4
+90/461386/campos_512_v4
+90/461400/campos_512_v4
+90/461401/campos_512_v4
+90/461410/campos_512_v4
+90/461412/campos_512_v4
+90/461414/campos_512_v4
+90/461416/campos_512_v4
+90/461427/campos_512_v4
+90/461431/campos_512_v4
+90/461460/campos_512_v4
+90/461491/campos_512_v4
+90/461496/campos_512_v4
+90/461503/campos_512_v4
+90/461509/campos_512_v4
+90/461513/campos_512_v4
+90/461525/campos_512_v4
+90/461539/campos_512_v4
+90/461568/campos_512_v4
+90/461572/campos_512_v4
+90/461582/campos_512_v4
+90/461591/campos_512_v4
+90/461598/campos_512_v4
+90/461600/campos_512_v4
+90/461615/campos_512_v4
+90/461622/campos_512_v4
+90/461623/campos_512_v4
+90/461624/campos_512_v4
+90/461628/campos_512_v4
+90/461629/campos_512_v4
+90/461630/campos_512_v4
+90/461632/campos_512_v4
+90/461634/campos_512_v4
+90/461639/campos_512_v4
+90/461640/campos_512_v4
+90/461648/campos_512_v4
+90/461651/campos_512_v4
+90/461659/campos_512_v4
+90/461681/campos_512_v4
+90/461698/campos_512_v4
+90/461706/campos_512_v4
+90/461711/campos_512_v4
+90/461714/campos_512_v4
+90/461719/campos_512_v4
+90/461720/campos_512_v4
+90/461732/campos_512_v4
+90/461735/campos_512_v4
+90/461738/campos_512_v4
+90/461748/campos_512_v4
+90/461768/campos_512_v4
+90/461773/campos_512_v4
+90/461774/campos_512_v4
+90/461775/campos_512_v4
+90/461776/campos_512_v4
+90/461777/campos_512_v4
+90/461810/campos_512_v4
+90/461819/campos_512_v4
+90/461820/campos_512_v4
+90/461823/campos_512_v4
+90/461825/campos_512_v4
+90/461826/campos_512_v4
+90/461831/campos_512_v4
+90/461832/campos_512_v4
+90/461859/campos_512_v4
+90/461876/campos_512_v4
+90/461880/campos_512_v4
+90/461884/campos_512_v4
+90/461888/campos_512_v4
+90/461890/campos_512_v4
+90/461893/campos_512_v4
+90/461894/campos_512_v4
+90/461900/campos_512_v4
+90/461913/campos_512_v4
+90/461923/campos_512_v4
+90/461927/campos_512_v4
+90/461928/campos_512_v4
+90/461930/campos_512_v4
+90/461931/campos_512_v4
+90/461944/campos_512_v4
+90/461951/campos_512_v4
+90/461961/campos_512_v4
+90/461976/campos_512_v4
+90/461983/campos_512_v4
+90/461988/campos_512_v4
+90/461992/campos_512_v4
+90/461998/campos_512_v4
+90/462000/campos_512_v4
+90/462005/campos_512_v4
+90/462009/campos_512_v4
+90/462011/campos_512_v4
+90/462021/campos_512_v4
+90/462027/campos_512_v4
+90/462031/campos_512_v4
+90/462032/campos_512_v4
+90/462036/campos_512_v4
+90/462054/campos_512_v4
+90/462055/campos_512_v4
+90/462068/campos_512_v4
+90/462075/campos_512_v4
+90/462084/campos_512_v4
+90/462088/campos_512_v4
+90/462092/campos_512_v4
+90/462099/campos_512_v4
+90/462109/campos_512_v4
+90/462114/campos_512_v4
+90/462128/campos_512_v4
+90/462129/campos_512_v4
+90/462148/campos_512_v4
+90/462164/campos_512_v4
+90/462169/campos_512_v4
+90/462174/campos_512_v4
+90/462176/campos_512_v4
+90/462183/campos_512_v4
+90/462184/campos_512_v4
+90/462194/campos_512_v4
+90/462203/campos_512_v4
+90/462207/campos_512_v4
+90/462220/campos_512_v4
+90/462228/campos_512_v4
+90/462232/campos_512_v4
+90/462242/campos_512_v4
+90/462250/campos_512_v4
+90/462255/campos_512_v4
+90/462270/campos_512_v4
+90/462272/campos_512_v4
+90/462286/campos_512_v4
+90/462290/campos_512_v4
+90/462328/campos_512_v4
+90/462334/campos_512_v4
+90/462338/campos_512_v4
+90/462342/campos_512_v4
+90/462346/campos_512_v4
+90/462350/campos_512_v4
+90/462356/campos_512_v4
+90/462359/campos_512_v4
+90/462364/campos_512_v4
+90/462378/campos_512_v4
+90/462380/campos_512_v4
+90/462383/campos_512_v4
+90/462397/campos_512_v4
+90/462403/campos_512_v4
+90/462404/campos_512_v4
+90/462415/campos_512_v4
+90/462416/campos_512_v4
+90/462427/campos_512_v4
+90/462435/campos_512_v4
+90/462437/campos_512_v4
+90/462443/campos_512_v4
+90/462467/campos_512_v4
+90/462469/campos_512_v4
+90/462485/campos_512_v4
+90/462492/campos_512_v4
+90/462506/campos_512_v4
+90/462512/campos_512_v4
+90/462513/campos_512_v4
+90/462517/campos_512_v4
+90/462553/campos_512_v4
+90/462573/campos_512_v4
+90/462581/campos_512_v4
+90/462584/campos_512_v4
+90/462587/campos_512_v4
+90/462595/campos_512_v4
+90/462609/campos_512_v4
+90/462610/campos_512_v4
+90/462639/campos_512_v4
+90/462643/campos_512_v4
+90/462647/campos_512_v4
+90/462657/campos_512_v4
+90/462658/campos_512_v4
+90/462663/campos_512_v4
+90/462666/campos_512_v4
+90/462682/campos_512_v4
+90/462688/campos_512_v4
+90/462690/campos_512_v4
+90/462695/campos_512_v4
+90/462701/campos_512_v4
+90/462714/campos_512_v4
+90/462728/campos_512_v4
+90/462738/campos_512_v4
+90/462748/campos_512_v4
+90/462749/campos_512_v4
+90/462757/campos_512_v4
+90/462768/campos_512_v4
+90/462772/campos_512_v4
+90/462776/campos_512_v4
+90/462787/campos_512_v4
+90/462788/campos_512_v4
+90/462790/campos_512_v4
+90/462797/campos_512_v4
+90/462799/campos_512_v4
+90/462801/campos_512_v4
+90/462813/campos_512_v4
+90/462826/campos_512_v4
+90/462839/campos_512_v4
+90/462841/campos_512_v4
+90/462844/campos_512_v4
+90/462854/campos_512_v4
+90/462857/campos_512_v4
+90/462868/campos_512_v4
+90/462872/campos_512_v4
+90/462881/campos_512_v4
+90/462902/campos_512_v4
+90/462910/campos_512_v4
+90/462911/campos_512_v4
+90/462913/campos_512_v4
+90/462921/campos_512_v4
+90/462932/campos_512_v4
+90/462943/campos_512_v4
+90/462948/campos_512_v4
+90/462949/campos_512_v4
+90/462951/campos_512_v4
+90/462963/campos_512_v4
+90/462975/campos_512_v4
+90/462977/campos_512_v4
+90/462979/campos_512_v4
+90/462985/campos_512_v4
+90/462990/campos_512_v4
+90/463002/campos_512_v4
+90/463003/campos_512_v4
+90/463029/campos_512_v4
+90/463031/campos_512_v4
+90/463034/campos_512_v4
+90/463054/campos_512_v4
+90/463058/campos_512_v4
+90/463082/campos_512_v4
+90/463089/campos_512_v4
+90/463090/campos_512_v4
+90/463100/campos_512_v4
+90/463101/campos_512_v4
+90/463105/campos_512_v4
+90/463107/campos_512_v4
+90/463117/campos_512_v4
+90/463126/campos_512_v4
+90/463127/campos_512_v4
+90/463134/campos_512_v4
+90/463137/campos_512_v4
+90/463144/campos_512_v4
+90/463145/campos_512_v4
+90/463147/campos_512_v4
+90/463167/campos_512_v4
+90/463173/campos_512_v4
+90/463177/campos_512_v4
+90/463180/campos_512_v4
+90/463185/campos_512_v4
+90/463188/campos_512_v4
+90/463199/campos_512_v4
+90/463204/campos_512_v4
+90/463212/campos_512_v4
+90/463220/campos_512_v4
+90/463222/campos_512_v4
+90/463251/campos_512_v4
+90/463254/campos_512_v4
+90/463257/campos_512_v4
+90/463262/campos_512_v4
+90/463265/campos_512_v4
+90/463270/campos_512_v4
+90/463283/campos_512_v4
+90/463290/campos_512_v4
+90/463294/campos_512_v4
+90/463296/campos_512_v4
+90/463308/campos_512_v4
+90/463320/campos_512_v4
+90/463326/campos_512_v4
+90/463327/campos_512_v4
+90/463330/campos_512_v4
+90/463333/campos_512_v4
+90/463335/campos_512_v4
+90/463339/campos_512_v4
+90/463345/campos_512_v4
+90/463384/campos_512_v4
+90/463400/campos_512_v4
+90/463411/campos_512_v4
+90/463442/campos_512_v4
+90/463476/campos_512_v4
+90/463487/campos_512_v4
+90/463499/campos_512_v4
+90/463502/campos_512_v4
+90/463523/campos_512_v4
+90/463524/campos_512_v4
+90/463525/campos_512_v4
+90/463535/campos_512_v4
+90/463541/campos_512_v4
+90/463543/campos_512_v4
+90/463545/campos_512_v4
+90/463553/campos_512_v4
+90/463560/campos_512_v4
+90/463566/campos_512_v4
+90/463597/campos_512_v4
+90/463611/campos_512_v4
+90/463617/campos_512_v4
+90/463619/campos_512_v4
+90/463628/campos_512_v4
+90/463676/campos_512_v4
+90/463678/campos_512_v4
+90/463687/campos_512_v4
+90/463690/campos_512_v4
+90/463698/campos_512_v4
+90/463700/campos_512_v4
+90/463710/campos_512_v4
+90/463714/campos_512_v4
+90/463721/campos_512_v4
+90/463724/campos_512_v4
+90/463734/campos_512_v4
+90/463736/campos_512_v4
+90/463738/campos_512_v4
+90/463745/campos_512_v4
+90/463747/campos_512_v4
+90/463763/campos_512_v4
+90/463766/campos_512_v4
+90/463770/campos_512_v4
+90/463775/campos_512_v4
+90/463779/campos_512_v4
+90/463780/campos_512_v4
+90/463785/campos_512_v4
+90/463789/campos_512_v4
+90/463790/campos_512_v4
+90/463793/campos_512_v4
+90/463796/campos_512_v4
+90/463799/campos_512_v4
+90/463808/campos_512_v4
+90/463829/campos_512_v4
+90/463838/campos_512_v4
+90/463860/campos_512_v4
+90/463861/campos_512_v4
+90/463865/campos_512_v4
+90/463870/campos_512_v4
+90/463875/campos_512_v4
+90/463877/campos_512_v4
+90/463879/campos_512_v4
+90/463880/campos_512_v4
+90/463887/campos_512_v4
+90/463889/campos_512_v4
+90/463892/campos_512_v4
+90/463900/campos_512_v4
+90/463911/campos_512_v4
+90/463915/campos_512_v4
+90/463920/campos_512_v4
+90/463922/campos_512_v4
+90/463932/campos_512_v4
+90/463933/campos_512_v4
+90/463959/campos_512_v4
+90/463966/campos_512_v4
+90/463979/campos_512_v4
+90/463985/campos_512_v4
+90/463990/campos_512_v4
+90/464018/campos_512_v4
+90/464020/campos_512_v4
+90/464030/campos_512_v4
+90/464037/campos_512_v4
+90/464050/campos_512_v4
+90/464079/campos_512_v4
+90/464084/campos_512_v4
+90/464093/campos_512_v4
+90/464094/campos_512_v4
+90/464106/campos_512_v4
+90/464113/campos_512_v4
+90/464114/campos_512_v4
+90/464115/campos_512_v4
+90/464122/campos_512_v4
+90/464129/campos_512_v4
+90/464146/campos_512_v4
+90/464151/campos_512_v4
+90/464154/campos_512_v4
+90/464158/campos_512_v4
+90/464167/campos_512_v4
+90/464172/campos_512_v4
+90/464184/campos_512_v4
+90/464187/campos_512_v4
+90/464192/campos_512_v4
+90/464195/campos_512_v4
+90/464225/campos_512_v4
+90/464226/campos_512_v4
+90/464229/campos_512_v4
+90/464252/campos_512_v4
+90/464255/campos_512_v4
+90/464257/campos_512_v4
+90/464264/campos_512_v4
+90/464265/campos_512_v4
+90/464269/campos_512_v4
+90/464282/campos_512_v4
+90/464293/campos_512_v4
+90/464294/campos_512_v4
+90/464310/campos_512_v4
+90/464312/campos_512_v4
+90/464314/campos_512_v4
+90/464316/campos_512_v4
+90/464320/campos_512_v4
+90/464325/campos_512_v4
+90/464326/campos_512_v4
+90/464341/campos_512_v4
+90/464342/campos_512_v4
+90/464346/campos_512_v4
+90/464348/campos_512_v4
+90/464355/campos_512_v4
+90/464365/campos_512_v4
+90/464367/campos_512_v4
+90/464375/campos_512_v4
+90/464385/campos_512_v4
+90/464394/campos_512_v4
+90/464400/campos_512_v4
+90/464415/campos_512_v4
+90/464419/campos_512_v4
+90/464420/campos_512_v4
+90/464426/campos_512_v4
+90/464431/campos_512_v4
+90/464433/campos_512_v4
+90/464435/campos_512_v4
+90/464440/campos_512_v4
+90/464447/campos_512_v4
+90/464456/campos_512_v4
+90/464457/campos_512_v4
+90/464467/campos_512_v4
+90/464476/campos_512_v4
+90/464489/campos_512_v4
+90/464494/campos_512_v4
+90/464502/campos_512_v4
+90/464511/campos_512_v4
+90/464513/campos_512_v4
+90/464517/campos_512_v4
+90/464523/campos_512_v4
+90/464525/campos_512_v4
+90/464536/campos_512_v4
+90/464541/campos_512_v4
+90/464546/campos_512_v4
+90/464556/campos_512_v4
+90/464573/campos_512_v4
+90/464575/campos_512_v4
+90/464578/campos_512_v4
+90/464583/campos_512_v4
+90/464584/campos_512_v4
+90/464592/campos_512_v4
+90/464598/campos_512_v4
+90/464599/campos_512_v4
+90/464603/campos_512_v4
+90/464606/campos_512_v4
+90/464609/campos_512_v4
+90/464636/campos_512_v4
+90/464648/campos_512_v4
+90/464650/campos_512_v4
+90/464652/campos_512_v4
+90/464658/campos_512_v4
+90/464659/campos_512_v4
+90/464662/campos_512_v4
+90/464673/campos_512_v4
+90/464685/campos_512_v4
+90/464693/campos_512_v4
+90/464700/campos_512_v4
+90/464703/campos_512_v4
+90/464704/campos_512_v4
+90/464722/campos_512_v4
+90/464723/campos_512_v4
+90/464729/campos_512_v4
+90/464734/campos_512_v4
+90/464738/campos_512_v4
+90/464741/campos_512_v4
+90/464743/campos_512_v4
+90/464747/campos_512_v4
+90/464765/campos_512_v4
+90/464793/campos_512_v4
+90/464795/campos_512_v4
+90/464810/campos_512_v4
+90/464820/campos_512_v4
+90/464823/campos_512_v4
+90/464828/campos_512_v4
+90/464838/campos_512_v4
+90/464851/campos_512_v4
+90/464854/campos_512_v4
+90/464859/campos_512_v4
+90/464864/campos_512_v4
+90/464871/campos_512_v4
+90/464872/campos_512_v4
+90/464889/campos_512_v4
+90/464897/campos_512_v4
+90/464901/campos_512_v4
+90/464906/campos_512_v4
+90/464908/campos_512_v4
+90/464915/campos_512_v4
+90/464931/campos_512_v4
+90/464948/campos_512_v4
+90/464961/campos_512_v4
+90/464988/campos_512_v4
+90/464991/campos_512_v4
+90/464992/campos_512_v4
+91/465006/campos_512_v4
+91/465010/campos_512_v4
+91/465015/campos_512_v4
+91/465042/campos_512_v4
+91/465060/campos_512_v4
+91/465063/campos_512_v4
+91/465091/campos_512_v4
+91/465094/campos_512_v4
+91/465097/campos_512_v4
+91/465106/campos_512_v4
+91/465108/campos_512_v4
+91/465117/campos_512_v4
+91/465130/campos_512_v4
+91/465131/campos_512_v4
+91/465135/campos_512_v4
+91/465157/campos_512_v4
+91/465162/campos_512_v4
+91/465174/campos_512_v4
+91/465177/campos_512_v4
+91/465184/campos_512_v4
+91/465190/campos_512_v4
+91/465206/campos_512_v4
+91/465219/campos_512_v4
+91/465223/campos_512_v4
+91/465229/campos_512_v4
+91/465240/campos_512_v4
+91/465244/campos_512_v4
+91/465246/campos_512_v4
+91/465255/campos_512_v4
+91/465264/campos_512_v4
+91/465273/campos_512_v4
+91/465276/campos_512_v4
+91/465279/campos_512_v4
+91/465282/campos_512_v4
+91/465286/campos_512_v4
+91/465287/campos_512_v4
+91/465293/campos_512_v4
+91/465345/campos_512_v4
+91/465355/campos_512_v4
+91/465367/campos_512_v4
+91/465406/campos_512_v4
+91/465424/campos_512_v4
+91/465438/campos_512_v4
+91/465441/campos_512_v4
+91/465452/campos_512_v4
+91/465461/campos_512_v4
+91/465468/campos_512_v4
+91/465471/campos_512_v4
+91/465479/campos_512_v4
+91/465480/campos_512_v4
+91/465483/campos_512_v4
+91/465488/campos_512_v4
+91/465491/campos_512_v4
+91/465493/campos_512_v4
+91/465499/campos_512_v4
+91/465500/campos_512_v4
+91/465501/campos_512_v4
+91/465504/campos_512_v4
+91/465507/campos_512_v4
+91/465532/campos_512_v4
+91/465536/campos_512_v4
+91/465538/campos_512_v4
+91/465546/campos_512_v4
+91/465547/campos_512_v4
+91/465579/campos_512_v4
+91/465591/campos_512_v4
+91/465609/campos_512_v4
+91/465611/campos_512_v4
+91/465626/campos_512_v4
+91/465632/campos_512_v4
+91/465635/campos_512_v4
+91/465647/campos_512_v4
+91/465654/campos_512_v4
+91/465671/campos_512_v4
+91/465678/campos_512_v4
+91/465687/campos_512_v4
+91/465703/campos_512_v4
+91/465716/campos_512_v4
+91/465717/campos_512_v4
+91/465719/campos_512_v4
+91/465728/campos_512_v4
+91/465729/campos_512_v4
+91/465730/campos_512_v4
+91/465732/campos_512_v4
+91/465742/campos_512_v4
+91/465744/campos_512_v4
+91/465745/campos_512_v4
+91/465760/campos_512_v4
+91/465765/campos_512_v4
+91/465773/campos_512_v4
+91/465820/campos_512_v4
+91/465825/campos_512_v4
+91/465828/campos_512_v4
+91/465837/campos_512_v4
+91/465845/campos_512_v4
+91/465846/campos_512_v4
+91/465853/campos_512_v4
+91/465855/campos_512_v4
+91/465863/campos_512_v4
+91/465866/campos_512_v4
+91/465871/campos_512_v4
+91/465873/campos_512_v4
+91/465887/campos_512_v4
+91/465896/campos_512_v4
+91/465901/campos_512_v4
+91/465919/campos_512_v4
+91/465932/campos_512_v4
+91/465933/campos_512_v4
+91/465934/campos_512_v4
+91/465940/campos_512_v4
+91/465952/campos_512_v4
+91/465957/campos_512_v4
+91/465961/campos_512_v4
+91/465967/campos_512_v4
+91/465982/campos_512_v4
+91/465990/campos_512_v4
+91/466013/campos_512_v4
+91/466019/campos_512_v4
+91/466024/campos_512_v4
+91/466027/campos_512_v4
+91/466030/campos_512_v4
+91/466042/campos_512_v4
+91/466055/campos_512_v4
+91/466074/campos_512_v4
+91/466088/campos_512_v4
+91/466093/campos_512_v4
+91/466100/campos_512_v4
+91/466105/campos_512_v4
+91/466106/campos_512_v4
+91/466113/campos_512_v4
+91/466115/campos_512_v4
+91/466118/campos_512_v4
+91/466129/campos_512_v4
+91/466147/campos_512_v4
+91/466167/campos_512_v4
+91/466169/campos_512_v4
+91/466170/campos_512_v4
+91/466179/campos_512_v4
+91/466186/campos_512_v4
+91/466200/campos_512_v4
+91/466202/campos_512_v4
+91/466206/campos_512_v4
+91/466233/campos_512_v4
+91/466246/campos_512_v4
+91/466251/campos_512_v4
+91/466260/campos_512_v4
+91/466265/campos_512_v4
+91/466269/campos_512_v4
+91/466270/campos_512_v4
+91/466272/campos_512_v4
+91/466279/campos_512_v4
+91/466285/campos_512_v4
+91/466301/campos_512_v4
+91/466308/campos_512_v4
+91/466311/campos_512_v4
+91/466312/campos_512_v4
+91/466313/campos_512_v4
+91/466326/campos_512_v4
+91/466335/campos_512_v4
+91/466349/campos_512_v4
+91/466352/campos_512_v4
+91/466356/campos_512_v4
+91/466381/campos_512_v4
+91/466386/campos_512_v4
+91/466388/campos_512_v4
+91/466391/campos_512_v4
+91/466400/campos_512_v4
+91/466401/campos_512_v4
+91/466402/campos_512_v4
+91/466408/campos_512_v4
+91/466411/campos_512_v4
+91/466415/campos_512_v4
+91/466420/campos_512_v4
+91/466425/campos_512_v4
+91/466450/campos_512_v4
+91/466451/campos_512_v4
+91/466457/campos_512_v4
+91/466459/campos_512_v4
+91/466470/campos_512_v4
+91/466494/campos_512_v4
+91/466496/campos_512_v4
+91/466502/campos_512_v4
+91/466505/campos_512_v4
+91/466513/campos_512_v4
+91/466519/campos_512_v4
+91/466532/campos_512_v4
+91/466536/campos_512_v4
+91/466539/campos_512_v4
+91/466542/campos_512_v4
+91/466546/campos_512_v4
+91/466556/campos_512_v4
+91/466567/campos_512_v4
+91/466574/campos_512_v4
+91/466579/campos_512_v4
+91/466586/campos_512_v4
+91/466587/campos_512_v4
+91/466590/campos_512_v4
+91/466592/campos_512_v4
+91/466595/campos_512_v4
+91/466596/campos_512_v4
+91/466615/campos_512_v4
+91/466631/campos_512_v4
+91/466633/campos_512_v4
+91/466634/campos_512_v4
+91/466641/campos_512_v4
+91/466643/campos_512_v4
+91/466648/campos_512_v4
+91/466653/campos_512_v4
+91/466656/campos_512_v4
+91/466657/campos_512_v4
+91/466663/campos_512_v4
+91/466682/campos_512_v4
+91/466683/campos_512_v4
+91/466685/campos_512_v4
+91/466688/campos_512_v4
+91/466689/campos_512_v4
+91/466706/campos_512_v4
+91/466732/campos_512_v4
+91/466737/campos_512_v4
+91/466741/campos_512_v4
+91/466747/campos_512_v4
+91/466772/campos_512_v4
+91/466793/campos_512_v4
+91/466801/campos_512_v4
+91/466808/campos_512_v4
+91/466810/campos_512_v4
+91/466835/campos_512_v4
+91/466838/campos_512_v4
+91/466849/campos_512_v4
+91/466853/campos_512_v4
+91/466870/campos_512_v4
+91/466882/campos_512_v4
+91/466908/campos_512_v4
+91/466916/campos_512_v4
+91/466919/campos_512_v4
+91/466923/campos_512_v4
+91/466928/campos_512_v4
+91/466944/campos_512_v4
+91/466945/campos_512_v4
+91/466965/campos_512_v4
+91/466988/campos_512_v4
+91/467008/campos_512_v4
+91/467014/campos_512_v4
+91/467017/campos_512_v4
+91/467020/campos_512_v4
+91/467021/campos_512_v4
+91/467034/campos_512_v4
+91/467037/campos_512_v4
+91/467039/campos_512_v4
+91/467040/campos_512_v4
+91/467042/campos_512_v4
+91/467055/campos_512_v4
+91/467067/campos_512_v4
+91/467074/campos_512_v4
+91/467081/campos_512_v4
+91/467082/campos_512_v4
+91/467086/campos_512_v4
+91/467090/campos_512_v4
+91/467092/campos_512_v4
+91/467099/campos_512_v4
+91/467104/campos_512_v4
+91/467107/campos_512_v4
+91/467117/campos_512_v4
+91/467139/campos_512_v4
+91/467141/campos_512_v4
+91/467150/campos_512_v4
+91/467160/campos_512_v4
+91/467168/campos_512_v4
+91/467176/campos_512_v4
+91/467181/campos_512_v4
+91/467184/campos_512_v4
+91/467185/campos_512_v4
+91/467191/campos_512_v4
+91/467198/campos_512_v4
+91/467202/campos_512_v4
+91/467211/campos_512_v4
+91/467216/campos_512_v4
+91/467217/campos_512_v4
+91/467231/campos_512_v4
+91/467251/campos_512_v4
+91/467258/campos_512_v4
+91/467260/campos_512_v4
+91/467321/campos_512_v4
+91/467329/campos_512_v4
+91/467348/campos_512_v4
+91/467350/campos_512_v4
+91/467352/campos_512_v4
+91/467375/campos_512_v4
+91/467380/campos_512_v4
+91/467389/campos_512_v4
+91/467391/campos_512_v4
+91/467399/campos_512_v4
+91/467402/campos_512_v4
+91/467404/campos_512_v4
+91/467427/campos_512_v4
+91/467432/campos_512_v4
+91/467442/campos_512_v4
+91/467459/campos_512_v4
+91/467472/campos_512_v4
+91/467476/campos_512_v4
+91/467482/campos_512_v4
+91/467484/campos_512_v4
+91/467493/campos_512_v4
+91/467495/campos_512_v4
+91/467513/campos_512_v4
+91/467514/campos_512_v4
+91/467518/campos_512_v4
+91/467521/campos_512_v4
+91/467523/campos_512_v4
+91/467526/campos_512_v4
+91/467529/campos_512_v4
+91/467530/campos_512_v4
+91/467535/campos_512_v4
+91/467538/campos_512_v4
+91/467544/campos_512_v4
+91/467545/campos_512_v4
+91/467546/campos_512_v4
+91/467548/campos_512_v4
+91/467569/campos_512_v4
+91/467573/campos_512_v4
+91/467574/campos_512_v4
+91/467581/campos_512_v4
+91/467584/campos_512_v4
+91/467605/campos_512_v4
+91/467606/campos_512_v4
+91/467628/campos_512_v4
+91/467631/campos_512_v4
+91/467633/campos_512_v4
+91/467641/campos_512_v4
+91/467649/campos_512_v4
+91/467650/campos_512_v4
+91/467654/campos_512_v4
+91/467661/campos_512_v4
+91/467669/campos_512_v4
+91/467673/campos_512_v4
+91/467716/campos_512_v4
+91/467718/campos_512_v4
+91/467721/campos_512_v4
+91/467726/campos_512_v4
+91/467730/campos_512_v4
+91/467737/campos_512_v4
+91/467738/campos_512_v4
+91/467739/campos_512_v4
+91/467740/campos_512_v4
+91/467746/campos_512_v4
+91/467768/campos_512_v4
+91/467769/campos_512_v4
+91/467791/campos_512_v4
+91/467794/campos_512_v4
+91/467795/campos_512_v4
+91/467796/campos_512_v4
+91/467807/campos_512_v4
+91/467828/campos_512_v4
+91/467831/campos_512_v4
+91/467833/campos_512_v4
+91/467837/campos_512_v4
+91/467838/campos_512_v4
+91/467849/campos_512_v4
+91/467850/campos_512_v4
+91/467851/campos_512_v4
+91/467855/campos_512_v4
+91/467867/campos_512_v4
+91/467873/campos_512_v4
+91/467878/campos_512_v4
+91/467881/campos_512_v4
+91/467882/campos_512_v4
+91/467910/campos_512_v4
+91/467913/campos_512_v4
+91/467920/campos_512_v4
+91/467953/campos_512_v4
+91/467959/campos_512_v4
+91/467960/campos_512_v4
+91/467966/campos_512_v4
+91/467969/campos_512_v4
+91/467970/campos_512_v4
+91/467971/campos_512_v4
+91/467978/campos_512_v4
+91/468008/campos_512_v4
+91/468012/campos_512_v4
+91/468013/campos_512_v4
+91/468016/campos_512_v4
+91/468025/campos_512_v4
+91/468027/campos_512_v4
+91/468032/campos_512_v4
+91/468050/campos_512_v4
+91/468052/campos_512_v4
+91/468053/campos_512_v4
+91/468055/campos_512_v4
+91/468056/campos_512_v4
+91/468065/campos_512_v4
+91/468069/campos_512_v4
+91/468072/campos_512_v4
+91/468081/campos_512_v4
+91/468082/campos_512_v4
+91/468099/campos_512_v4
+91/468100/campos_512_v4
+91/468108/campos_512_v4
+91/468135/campos_512_v4
+91/468140/campos_512_v4
+91/468144/campos_512_v4
+91/468148/campos_512_v4
+91/468155/campos_512_v4
+91/468162/campos_512_v4
+91/468184/campos_512_v4
+91/468203/campos_512_v4
+91/468207/campos_512_v4
+91/468211/campos_512_v4
+91/468221/campos_512_v4
+91/468231/campos_512_v4
+91/468256/campos_512_v4
+91/468258/campos_512_v4
+91/468259/campos_512_v4
+91/468269/campos_512_v4
+91/468282/campos_512_v4
+91/468290/campos_512_v4
+91/468296/campos_512_v4
+91/468322/campos_512_v4
+91/468327/campos_512_v4
+91/468330/campos_512_v4
+91/468348/campos_512_v4
+91/468371/campos_512_v4
+91/468373/campos_512_v4
+91/468392/campos_512_v4
+91/468401/campos_512_v4
+91/468412/campos_512_v4
+91/468416/campos_512_v4
+91/468427/campos_512_v4
+91/468431/campos_512_v4
+91/468439/campos_512_v4
+91/468440/campos_512_v4
+91/468447/campos_512_v4
+91/468452/campos_512_v4
+91/468456/campos_512_v4
+91/468469/campos_512_v4
+91/468470/campos_512_v4
+91/468471/campos_512_v4
+91/468486/campos_512_v4
+91/468497/campos_512_v4
+91/468517/campos_512_v4
+91/468518/campos_512_v4
+91/468523/campos_512_v4
+91/468529/campos_512_v4
+91/468531/campos_512_v4
+91/468539/campos_512_v4
+91/468540/campos_512_v4
+91/468541/campos_512_v4
+91/468542/campos_512_v4
+91/468545/campos_512_v4
+91/468550/campos_512_v4
+91/468559/campos_512_v4
+91/468565/campos_512_v4
+91/468567/campos_512_v4
+91/468569/campos_512_v4
+91/468586/campos_512_v4
+91/468592/campos_512_v4
+91/468597/campos_512_v4
+91/468598/campos_512_v4
+91/468602/campos_512_v4
+91/468625/campos_512_v4
+91/468634/campos_512_v4
+91/468652/campos_512_v4
+91/468676/campos_512_v4
+91/468683/campos_512_v4
+91/468684/campos_512_v4
+91/468686/campos_512_v4
+91/468691/campos_512_v4
+91/468697/campos_512_v4
+91/468717/campos_512_v4
+91/468720/campos_512_v4
+91/468722/campos_512_v4
+91/468725/campos_512_v4
+91/468732/campos_512_v4
+91/468751/campos_512_v4
+91/468764/campos_512_v4
+91/468769/campos_512_v4
+91/468771/campos_512_v4
+91/468777/campos_512_v4
+91/468779/campos_512_v4
+91/468780/campos_512_v4
+91/468783/campos_512_v4
+91/468792/campos_512_v4
+91/468812/campos_512_v4
+91/468816/campos_512_v4
+91/468825/campos_512_v4
+91/468836/campos_512_v4
+91/468837/campos_512_v4
+91/468841/campos_512_v4
+91/468842/campos_512_v4
+91/468860/campos_512_v4
+91/468866/campos_512_v4
+91/468873/campos_512_v4
+91/468878/campos_512_v4
+91/468879/campos_512_v4
+91/468886/campos_512_v4
+91/468889/campos_512_v4
+91/468898/campos_512_v4
+91/468904/campos_512_v4
+91/468906/campos_512_v4
+91/468915/campos_512_v4
+91/468918/campos_512_v4
+91/468921/campos_512_v4
+91/468924/campos_512_v4
+91/468928/campos_512_v4
+91/468931/campos_512_v4
+91/468933/campos_512_v4
+91/468936/campos_512_v4
+91/468938/campos_512_v4
+91/468939/campos_512_v4
+91/468941/campos_512_v4
+91/468943/campos_512_v4
+91/468946/campos_512_v4
+91/468951/campos_512_v4
+91/468962/campos_512_v4
+91/468965/campos_512_v4
+91/468971/campos_512_v4
+91/468975/campos_512_v4
+91/468976/campos_512_v4
+91/468979/campos_512_v4
+91/468981/campos_512_v4
+91/468985/campos_512_v4
+91/468992/campos_512_v4
+91/469003/campos_512_v4
+91/469057/campos_512_v4
+91/469059/campos_512_v4
+91/469064/campos_512_v4
+91/469067/campos_512_v4
+91/469072/campos_512_v4
+91/469075/campos_512_v4
+91/469077/campos_512_v4
+91/469082/campos_512_v4
+91/469099/campos_512_v4
+91/469108/campos_512_v4
+91/469117/campos_512_v4
+91/469149/campos_512_v4
+91/469152/campos_512_v4
+91/469161/campos_512_v4
+91/469167/campos_512_v4
+91/469169/campos_512_v4
+91/469173/campos_512_v4
+91/469193/campos_512_v4
+91/469198/campos_512_v4
+91/469200/campos_512_v4
+91/469202/campos_512_v4
+91/469207/campos_512_v4
+91/469208/campos_512_v4
+91/469216/campos_512_v4
+91/469217/campos_512_v4
+91/469220/campos_512_v4
+91/469224/campos_512_v4
+91/469225/campos_512_v4
+91/469227/campos_512_v4
+91/469234/campos_512_v4
+91/469253/campos_512_v4
+91/469254/campos_512_v4
+91/469261/campos_512_v4
+91/469264/campos_512_v4
+91/469265/campos_512_v4
+91/469267/campos_512_v4
+91/469270/campos_512_v4
+91/469271/campos_512_v4
+91/469273/campos_512_v4
+91/469275/campos_512_v4
+91/469291/campos_512_v4
+91/469295/campos_512_v4
+91/469315/campos_512_v4
+91/469316/campos_512_v4
+91/469324/campos_512_v4
+91/469325/campos_512_v4
+91/469335/campos_512_v4
+91/469336/campos_512_v4
+91/469347/campos_512_v4
+91/469355/campos_512_v4
+91/469358/campos_512_v4
+91/469360/campos_512_v4
+91/469367/campos_512_v4
+91/469374/campos_512_v4
+91/469376/campos_512_v4
+91/469380/campos_512_v4
+91/469383/campos_512_v4
+91/469386/campos_512_v4
+91/469392/campos_512_v4
+91/469399/campos_512_v4
+91/469400/campos_512_v4
+91/469404/campos_512_v4
+91/469418/campos_512_v4
+91/469420/campos_512_v4
+91/469421/campos_512_v4
+91/469432/campos_512_v4
+91/469478/campos_512_v4
+91/469497/campos_512_v4
+91/469508/campos_512_v4
+91/469518/campos_512_v4
+91/469530/campos_512_v4
+91/469541/campos_512_v4
+91/469545/campos_512_v4
+91/469546/campos_512_v4
+91/469549/campos_512_v4
+91/469552/campos_512_v4
+91/469559/campos_512_v4
+91/469570/campos_512_v4
+91/469593/campos_512_v4
+91/469598/campos_512_v4
+91/469602/campos_512_v4
+91/469608/campos_512_v4
+91/469618/campos_512_v4
+91/469620/campos_512_v4
+91/469622/campos_512_v4
+91/469631/campos_512_v4
+91/469642/campos_512_v4
+91/469649/campos_512_v4
+91/469673/campos_512_v4
+91/469687/campos_512_v4
+91/469688/campos_512_v4
+91/469695/campos_512_v4
+91/469699/campos_512_v4
+91/469703/campos_512_v4
+91/469708/campos_512_v4
+91/469711/campos_512_v4
+91/469724/campos_512_v4
+91/469729/campos_512_v4
+91/469744/campos_512_v4
+91/469751/campos_512_v4
+91/469757/campos_512_v4
+91/469766/campos_512_v4
+91/469767/campos_512_v4
+91/469781/campos_512_v4
+91/469800/campos_512_v4
+91/469825/campos_512_v4
+91/469828/campos_512_v4
+91/469829/campos_512_v4
+91/469830/campos_512_v4
+91/469845/campos_512_v4
+91/469853/campos_512_v4
+91/469865/campos_512_v4
+91/469867/campos_512_v4
+91/469876/campos_512_v4
+91/469880/campos_512_v4
+91/469891/campos_512_v4
+91/469892/campos_512_v4
+91/469893/campos_512_v4
+91/469897/campos_512_v4
+91/469902/campos_512_v4
+91/469909/campos_512_v4
+91/469910/campos_512_v4
+91/469914/campos_512_v4
+91/469918/campos_512_v4
+91/469928/campos_512_v4
+91/469934/campos_512_v4
+91/469936/campos_512_v4
+91/469946/campos_512_v4
+91/469970/campos_512_v4
+91/469976/campos_512_v4
+91/469977/campos_512_v4
+91/469985/campos_512_v4
+91/469999/campos_512_v4
+92/470003/campos_512_v4
+92/470007/campos_512_v4
+92/470019/campos_512_v4
+92/470026/campos_512_v4
+92/470031/campos_512_v4
+92/470039/campos_512_v4
+92/470047/campos_512_v4
+92/470056/campos_512_v4
+92/470060/campos_512_v4
+92/470072/campos_512_v4
+92/470086/campos_512_v4
+92/470088/campos_512_v4
+92/470094/campos_512_v4
+92/470103/campos_512_v4
+92/470107/campos_512_v4
+92/470117/campos_512_v4
+92/470131/campos_512_v4
+92/470136/campos_512_v4
+92/470138/campos_512_v4
+92/470152/campos_512_v4
+92/470178/campos_512_v4
+92/470183/campos_512_v4
+92/470192/campos_512_v4
+92/470199/campos_512_v4
+92/470206/campos_512_v4
+92/470211/campos_512_v4
+92/470221/campos_512_v4
+92/470224/campos_512_v4
+92/470232/campos_512_v4
+92/470238/campos_512_v4
+92/470243/campos_512_v4
+92/470252/campos_512_v4
+92/470253/campos_512_v4
+92/470257/campos_512_v4
+92/470265/campos_512_v4
+92/470282/campos_512_v4
+92/470295/campos_512_v4
+92/470300/campos_512_v4
+92/470304/campos_512_v4
+92/470312/campos_512_v4
+92/470316/campos_512_v4
+92/470356/campos_512_v4
+92/470368/campos_512_v4
+92/470372/campos_512_v4
+92/470374/campos_512_v4
+92/470385/campos_512_v4
+92/470398/campos_512_v4
+92/470405/campos_512_v4
+92/470410/campos_512_v4
+92/470444/campos_512_v4
+92/470450/campos_512_v4
+92/470457/campos_512_v4
+92/470463/campos_512_v4
+92/470464/campos_512_v4
+92/470468/campos_512_v4
+92/470479/campos_512_v4
+92/470484/campos_512_v4
+92/470497/campos_512_v4
+92/470506/campos_512_v4
+92/470521/campos_512_v4
+92/470538/campos_512_v4
+92/470543/campos_512_v4
+92/470548/campos_512_v4
+92/470552/campos_512_v4
+92/470566/campos_512_v4
+92/470571/campos_512_v4
+92/470572/campos_512_v4
+92/470574/campos_512_v4
+92/470577/campos_512_v4
+92/470587/campos_512_v4
+92/470589/campos_512_v4
+92/470591/campos_512_v4
+92/470597/campos_512_v4
+92/470600/campos_512_v4
+92/470603/campos_512_v4
+92/470608/campos_512_v4
+92/470613/campos_512_v4
+92/470615/campos_512_v4
+92/470627/campos_512_v4
+92/470630/campos_512_v4
+92/470647/campos_512_v4
+92/470655/campos_512_v4
+92/470658/campos_512_v4
+92/470659/campos_512_v4
+92/470661/campos_512_v4
+92/470670/campos_512_v4
+92/470672/campos_512_v4
+92/470675/campos_512_v4
+92/470679/campos_512_v4
+92/470691/campos_512_v4
+92/470704/campos_512_v4
+92/470705/campos_512_v4
+92/470710/campos_512_v4
+92/470724/campos_512_v4
+92/470732/campos_512_v4
+92/470737/campos_512_v4
+92/470741/campos_512_v4
+92/470743/campos_512_v4
+92/470749/campos_512_v4
+92/470756/campos_512_v4
+92/470763/campos_512_v4
+92/470770/campos_512_v4
+92/470776/campos_512_v4
+92/470778/campos_512_v4
+92/470780/campos_512_v4
+92/470783/campos_512_v4
+92/470784/campos_512_v4
+92/470788/campos_512_v4
+92/470797/campos_512_v4
+92/470808/campos_512_v4
+92/470809/campos_512_v4
+92/470811/campos_512_v4
+92/470815/campos_512_v4
+92/470819/campos_512_v4
+92/470826/campos_512_v4
+92/470830/campos_512_v4
+92/470833/campos_512_v4
+92/470844/campos_512_v4
+92/470847/campos_512_v4
+92/470849/campos_512_v4
+92/470862/campos_512_v4
+92/470867/campos_512_v4
+92/470878/campos_512_v4
+92/470881/campos_512_v4
+92/470888/campos_512_v4
+92/470892/campos_512_v4
+92/470898/campos_512_v4
+92/470902/campos_512_v4
+92/470907/campos_512_v4
+92/470917/campos_512_v4
+92/470924/campos_512_v4
+92/470928/campos_512_v4
+92/470929/campos_512_v4
+92/470931/campos_512_v4
+92/470951/campos_512_v4
+92/470960/campos_512_v4
+92/470961/campos_512_v4
+92/470967/campos_512_v4
+92/470974/campos_512_v4
+92/470976/campos_512_v4
+92/470977/campos_512_v4
+92/470987/campos_512_v4
+92/470994/campos_512_v4
+92/470998/campos_512_v4
+92/471004/campos_512_v4
+92/471014/campos_512_v4
+92/471015/campos_512_v4
+92/471020/campos_512_v4
+92/471021/campos_512_v4
+92/471022/campos_512_v4
+92/471029/campos_512_v4
+92/471033/campos_512_v4
+92/471039/campos_512_v4
+92/471049/campos_512_v4
+92/471056/campos_512_v4
+92/471058/campos_512_v4
+92/471064/campos_512_v4
+92/471078/campos_512_v4
+92/471093/campos_512_v4
+92/471103/campos_512_v4
+92/471105/campos_512_v4
+92/471109/campos_512_v4
+92/471119/campos_512_v4
+92/471133/campos_512_v4
+92/471147/campos_512_v4
+92/471148/campos_512_v4
+92/471151/campos_512_v4
+92/471153/campos_512_v4
+92/471154/campos_512_v4
+92/471155/campos_512_v4
+92/471156/campos_512_v4
+92/471157/campos_512_v4
+92/471161/campos_512_v4
+92/471164/campos_512_v4
+92/471169/campos_512_v4
+92/471180/campos_512_v4
+92/471186/campos_512_v4
+92/471196/campos_512_v4
+92/471209/campos_512_v4
+92/471211/campos_512_v4
+92/471213/campos_512_v4
+92/471219/campos_512_v4
+92/471221/campos_512_v4
+92/471237/campos_512_v4
+92/471240/campos_512_v4
+92/471243/campos_512_v4
+92/471248/campos_512_v4
+92/471259/campos_512_v4
+92/471285/campos_512_v4
+92/471292/campos_512_v4
+92/471293/campos_512_v4
+92/471298/campos_512_v4
+92/471312/campos_512_v4
+92/471329/campos_512_v4
+92/471332/campos_512_v4
+92/471333/campos_512_v4
+92/471334/campos_512_v4
+92/471335/campos_512_v4
+92/471337/campos_512_v4
+92/471346/campos_512_v4
+92/471354/campos_512_v4
+92/471359/campos_512_v4
+92/471360/campos_512_v4
+92/471367/campos_512_v4
+92/471371/campos_512_v4
+92/471375/campos_512_v4
+92/471380/campos_512_v4
+92/471385/campos_512_v4
+92/471391/campos_512_v4
+92/471399/campos_512_v4
+92/471400/campos_512_v4
+92/471404/campos_512_v4
+92/471416/campos_512_v4
+92/471419/campos_512_v4
+92/471428/campos_512_v4
+92/471434/campos_512_v4
+92/471440/campos_512_v4
+92/471441/campos_512_v4
+92/471443/campos_512_v4
+92/471457/campos_512_v4
+92/471458/campos_512_v4
+92/471460/campos_512_v4
+92/471471/campos_512_v4
+92/471480/campos_512_v4
+92/471511/campos_512_v4
+92/471520/campos_512_v4
+92/471532/campos_512_v4
+92/471533/campos_512_v4
+92/471536/campos_512_v4
+92/471538/campos_512_v4
+92/471550/campos_512_v4
+92/471556/campos_512_v4
+92/471565/campos_512_v4
+92/471586/campos_512_v4
+92/471595/campos_512_v4
+92/471608/campos_512_v4
+92/471612/campos_512_v4
+92/471628/campos_512_v4
+92/471642/campos_512_v4
+92/471645/campos_512_v4
+92/471664/campos_512_v4
+92/471670/campos_512_v4
+92/471677/campos_512_v4
+92/471681/campos_512_v4
+92/471687/campos_512_v4
+92/471700/campos_512_v4
+92/471705/campos_512_v4
+92/471706/campos_512_v4
+92/471709/campos_512_v4
+92/471737/campos_512_v4
+92/471762/campos_512_v4
+92/471771/campos_512_v4
+92/471772/campos_512_v4
+92/471775/campos_512_v4
+92/471780/campos_512_v4
+92/471787/campos_512_v4
+92/471788/campos_512_v4
+92/471791/campos_512_v4
+92/471803/campos_512_v4
+92/471806/campos_512_v4
+92/471822/campos_512_v4
+92/471826/campos_512_v4
+92/471832/campos_512_v4
+92/471834/campos_512_v4
+92/471839/campos_512_v4
+92/471848/campos_512_v4
+92/471849/campos_512_v4
+92/471853/campos_512_v4
+92/471857/campos_512_v4
+92/471859/campos_512_v4
+92/471861/campos_512_v4
+92/471867/campos_512_v4
+92/471878/campos_512_v4
+92/471885/campos_512_v4
+92/471890/campos_512_v4
+92/471905/campos_512_v4
+92/471910/campos_512_v4
+92/471917/campos_512_v4
+92/471928/campos_512_v4
+92/471946/campos_512_v4
+92/471955/campos_512_v4
+92/471969/campos_512_v4
+92/471972/campos_512_v4
+92/471994/campos_512_v4
+92/471996/campos_512_v4
+92/472001/campos_512_v4
+92/472005/campos_512_v4
+92/472007/campos_512_v4
+92/472010/campos_512_v4
+92/472022/campos_512_v4
+92/472036/campos_512_v4
+92/472038/campos_512_v4
+92/472039/campos_512_v4
+92/472043/campos_512_v4
+92/472059/campos_512_v4
+92/472062/campos_512_v4
+92/472063/campos_512_v4
+92/472066/campos_512_v4
+92/472068/campos_512_v4
+92/472085/campos_512_v4
+92/472094/campos_512_v4
+92/472108/campos_512_v4
+92/472113/campos_512_v4
+92/472114/campos_512_v4
+92/472123/campos_512_v4
+92/472127/campos_512_v4
+92/472128/campos_512_v4
+92/472130/campos_512_v4
+92/472140/campos_512_v4
+92/472149/campos_512_v4
+92/472152/campos_512_v4
+92/472172/campos_512_v4
+92/472176/campos_512_v4
+92/472179/campos_512_v4
+92/472189/campos_512_v4
+92/472191/campos_512_v4
+92/472192/campos_512_v4
+92/472196/campos_512_v4
+92/472200/campos_512_v4
+92/472203/campos_512_v4
+92/472208/campos_512_v4
+92/472215/campos_512_v4
+92/472216/campos_512_v4
+92/472231/campos_512_v4
+92/472235/campos_512_v4
+92/472270/campos_512_v4
+92/472272/campos_512_v4
+92/472276/campos_512_v4
+92/472278/campos_512_v4
+92/472286/campos_512_v4
+92/472290/campos_512_v4
+92/472291/campos_512_v4
+92/472299/campos_512_v4
+92/472300/campos_512_v4
+92/472311/campos_512_v4
+92/472313/campos_512_v4
+92/472325/campos_512_v4
+92/472327/campos_512_v4
+92/472331/campos_512_v4
+92/472334/campos_512_v4
+92/472339/campos_512_v4
+92/472360/campos_512_v4
+92/472361/campos_512_v4
+92/472364/campos_512_v4
+92/472371/campos_512_v4
+92/472398/campos_512_v4
+92/472405/campos_512_v4
+92/472407/campos_512_v4
+92/472412/campos_512_v4
+92/472417/campos_512_v4
+92/472424/campos_512_v4
+92/472426/campos_512_v4
+92/472432/campos_512_v4
+92/472439/campos_512_v4
+92/472440/campos_512_v4
+92/472444/campos_512_v4
+92/472448/campos_512_v4
+92/472459/campos_512_v4
+92/472476/campos_512_v4
+92/472491/campos_512_v4
+92/472500/campos_512_v4
+92/472520/campos_512_v4
+92/472555/campos_512_v4
+92/472561/campos_512_v4
+92/472562/campos_512_v4
+92/472565/campos_512_v4
+92/472567/campos_512_v4
+92/472569/campos_512_v4
+92/472580/campos_512_v4
+92/472600/campos_512_v4
+92/472606/campos_512_v4
+92/472617/campos_512_v4
+92/472637/campos_512_v4
+92/472643/campos_512_v4
+92/472645/campos_512_v4
+92/472647/campos_512_v4
+92/472652/campos_512_v4
+92/472658/campos_512_v4
+92/472663/campos_512_v4
+92/472664/campos_512_v4
+92/472670/campos_512_v4
+92/472675/campos_512_v4
+92/472677/campos_512_v4
+92/472699/campos_512_v4
+92/472710/campos_512_v4
+92/472755/campos_512_v4
+92/472761/campos_512_v4
+92/472770/campos_512_v4
+92/472776/campos_512_v4
+92/472799/campos_512_v4
+92/472806/campos_512_v4
+92/472810/campos_512_v4
+92/472820/campos_512_v4
+92/472829/campos_512_v4
+92/472842/campos_512_v4
+92/472847/campos_512_v4
+92/472851/campos_512_v4
+92/472863/campos_512_v4
+92/472872/campos_512_v4
+92/472877/campos_512_v4
+92/472880/campos_512_v4
+92/472881/campos_512_v4
+92/472885/campos_512_v4
+92/472892/campos_512_v4
+92/472896/campos_512_v4
+92/472899/campos_512_v4
+92/472906/campos_512_v4
+92/472911/campos_512_v4
+92/472912/campos_512_v4
+92/472922/campos_512_v4
+92/472934/campos_512_v4
+92/472942/campos_512_v4
+92/472945/campos_512_v4
+92/472947/campos_512_v4
+92/472948/campos_512_v4
+92/472950/campos_512_v4
+92/472962/campos_512_v4
+92/472965/campos_512_v4
+92/472971/campos_512_v4
+92/472976/campos_512_v4
+92/472989/campos_512_v4
+92/472998/campos_512_v4
+92/472999/campos_512_v4
+92/473008/campos_512_v4
+92/473012/campos_512_v4
+92/473015/campos_512_v4
+92/473016/campos_512_v4
+92/473031/campos_512_v4
+92/473046/campos_512_v4
+92/473049/campos_512_v4
+92/473051/campos_512_v4
+92/473054/campos_512_v4
+92/473060/campos_512_v4
+92/473066/campos_512_v4
+92/473069/campos_512_v4
+92/473070/campos_512_v4
+92/473071/campos_512_v4
+92/473076/campos_512_v4
+92/473083/campos_512_v4
+92/473099/campos_512_v4
+92/473107/campos_512_v4
+92/473114/campos_512_v4
+92/473115/campos_512_v4
+92/473120/campos_512_v4
+92/473142/campos_512_v4
+92/473174/campos_512_v4
+92/473177/campos_512_v4
+92/473190/campos_512_v4
+92/473210/campos_512_v4
+92/473212/campos_512_v4
+92/473228/campos_512_v4
+92/473233/campos_512_v4
+92/473243/campos_512_v4
+92/473249/campos_512_v4
+92/473255/campos_512_v4
+92/473264/campos_512_v4
+92/473267/campos_512_v4
+92/473275/campos_512_v4
+92/473282/campos_512_v4
+92/473296/campos_512_v4
+92/473299/campos_512_v4
+92/473309/campos_512_v4
+92/473310/campos_512_v4
+92/473325/campos_512_v4
+92/473337/campos_512_v4
+92/473349/campos_512_v4
+92/473359/campos_512_v4
+92/473380/campos_512_v4
+92/473387/campos_512_v4
+92/473401/campos_512_v4
+92/473403/campos_512_v4
+92/473412/campos_512_v4
+92/473416/campos_512_v4
+92/473417/campos_512_v4
+92/473420/campos_512_v4
+92/473424/campos_512_v4
+92/473436/campos_512_v4
+92/473446/campos_512_v4
+92/473461/campos_512_v4
+92/473464/campos_512_v4
+92/473470/campos_512_v4
+92/473474/campos_512_v4
+92/473496/campos_512_v4
+92/473506/campos_512_v4
+92/473507/campos_512_v4
+92/473513/campos_512_v4
+92/473516/campos_512_v4
+92/473525/campos_512_v4
+92/473526/campos_512_v4
+92/473527/campos_512_v4
+92/473531/campos_512_v4
+92/473545/campos_512_v4
+92/473548/campos_512_v4
+92/473567/campos_512_v4
+92/473570/campos_512_v4
+92/473577/campos_512_v4
+92/473592/campos_512_v4
+92/473595/campos_512_v4
+92/473598/campos_512_v4
+92/473610/campos_512_v4
+92/473617/campos_512_v4
+92/473628/campos_512_v4
+92/473630/campos_512_v4
+92/473644/campos_512_v4
+92/473658/campos_512_v4
+92/473668/campos_512_v4
+92/473676/campos_512_v4
+92/473691/campos_512_v4
+92/473702/campos_512_v4
+92/473707/campos_512_v4
+92/473726/campos_512_v4
+92/473746/campos_512_v4
+92/473750/campos_512_v4
+92/473760/campos_512_v4
+92/473774/campos_512_v4
+92/473817/campos_512_v4
+92/473823/campos_512_v4
+92/473846/campos_512_v4
+92/473847/campos_512_v4
+92/473865/campos_512_v4
+92/473866/campos_512_v4
+92/473878/campos_512_v4
+92/473882/campos_512_v4
+92/473886/campos_512_v4
+92/473899/campos_512_v4
+92/473904/campos_512_v4
+92/473918/campos_512_v4
+92/473925/campos_512_v4
+92/473928/campos_512_v4
+92/473943/campos_512_v4
+92/473950/campos_512_v4
+92/473951/campos_512_v4
+92/473955/campos_512_v4
+92/473962/campos_512_v4
+92/473965/campos_512_v4
+92/473967/campos_512_v4
+92/473975/campos_512_v4
+92/473986/campos_512_v4
+92/473988/campos_512_v4
+92/473995/campos_512_v4
+92/473999/campos_512_v4
+92/474000/campos_512_v4
+92/474007/campos_512_v4
+92/474010/campos_512_v4
+92/474017/campos_512_v4
+92/474023/campos_512_v4
+92/474024/campos_512_v4
+92/474027/campos_512_v4
+92/474035/campos_512_v4
+92/474038/campos_512_v4
+92/474057/campos_512_v4
+92/474064/campos_512_v4
+92/474072/campos_512_v4
+92/474073/campos_512_v4
+92/474085/campos_512_v4
+92/474088/campos_512_v4
+92/474105/campos_512_v4
+92/474106/campos_512_v4
+92/474109/campos_512_v4
+92/474114/campos_512_v4
+92/474132/campos_512_v4
+92/474135/campos_512_v4
+92/474152/campos_512_v4
+92/474158/campos_512_v4
+92/474171/campos_512_v4
+92/474173/campos_512_v4
+92/474188/campos_512_v4
+92/474202/campos_512_v4
+92/474209/campos_512_v4
+92/474211/campos_512_v4
+92/474214/campos_512_v4
+92/474225/campos_512_v4
+92/474227/campos_512_v4
+92/474230/campos_512_v4
+92/474235/campos_512_v4
+92/474241/campos_512_v4
+92/474258/campos_512_v4
+92/474291/campos_512_v4
+92/474294/campos_512_v4
+92/474296/campos_512_v4
+92/474300/campos_512_v4
+92/474304/campos_512_v4
+92/474305/campos_512_v4
+92/474308/campos_512_v4
+92/474310/campos_512_v4
+92/474324/campos_512_v4
+92/474329/campos_512_v4
+92/474330/campos_512_v4
+92/474348/campos_512_v4
+92/474351/campos_512_v4
+92/474367/campos_512_v4
+92/474397/campos_512_v4
+92/474402/campos_512_v4
+92/474406/campos_512_v4
+92/474408/campos_512_v4
+92/474409/campos_512_v4
+92/474419/campos_512_v4
+92/474421/campos_512_v4
+92/474432/campos_512_v4
+92/474445/campos_512_v4
+92/474448/campos_512_v4
+92/474452/campos_512_v4
+92/474483/campos_512_v4
+92/474488/campos_512_v4
+92/474492/campos_512_v4
+92/474503/campos_512_v4
+92/474514/campos_512_v4
+92/474519/campos_512_v4
+92/474526/campos_512_v4
+92/474538/campos_512_v4
+92/474564/campos_512_v4
+92/474569/campos_512_v4
+92/474576/campos_512_v4
+92/474595/campos_512_v4
+92/474597/campos_512_v4
+92/474600/campos_512_v4
+92/474602/campos_512_v4
+92/474619/campos_512_v4
+92/474623/campos_512_v4
+92/474634/campos_512_v4
+92/474636/campos_512_v4
+92/474639/campos_512_v4
+92/474650/campos_512_v4
+92/474669/campos_512_v4
+92/474671/campos_512_v4
+92/474680/campos_512_v4
+92/474689/campos_512_v4
+92/474695/campos_512_v4
+92/474698/campos_512_v4
+92/474709/campos_512_v4
+92/474711/campos_512_v4
+92/474714/campos_512_v4
+92/474715/campos_512_v4
+92/474719/campos_512_v4
+92/474729/campos_512_v4
+92/474732/campos_512_v4
+92/474736/campos_512_v4
+92/474737/campos_512_v4
+92/474740/campos_512_v4
+92/474741/campos_512_v4
+92/474764/campos_512_v4
+92/474773/campos_512_v4
+92/474783/campos_512_v4
+92/474785/campos_512_v4
+92/474787/campos_512_v4
+92/474802/campos_512_v4
+92/474804/campos_512_v4
+92/474810/campos_512_v4
+92/474826/campos_512_v4
+92/474842/campos_512_v4
+92/474846/campos_512_v4
+92/474847/campos_512_v4
+92/474848/campos_512_v4
+92/474852/campos_512_v4
+92/474857/campos_512_v4
+92/474861/campos_512_v4
+92/474872/campos_512_v4
+92/474884/campos_512_v4
+92/474887/campos_512_v4
+92/474893/campos_512_v4
+92/474896/campos_512_v4
+92/474915/campos_512_v4
+92/474916/campos_512_v4
+92/474917/campos_512_v4
+92/474923/campos_512_v4
+92/474928/campos_512_v4
+92/474932/campos_512_v4
+92/474937/campos_512_v4
+92/474940/campos_512_v4
+92/474943/campos_512_v4
+92/474945/campos_512_v4
+92/474948/campos_512_v4
+92/474952/campos_512_v4
+92/474975/campos_512_v4
+92/474977/campos_512_v4
+92/474985/campos_512_v4
+93/475036/campos_512_v4
+93/475061/campos_512_v4
+93/475064/campos_512_v4
+93/475068/campos_512_v4
+93/475100/campos_512_v4
+93/475103/campos_512_v4
+93/475120/campos_512_v4
+93/475126/campos_512_v4
+93/475128/campos_512_v4
+93/475129/campos_512_v4
+93/475135/campos_512_v4
+93/475139/campos_512_v4
+93/475157/campos_512_v4
+93/475161/campos_512_v4
+93/475173/campos_512_v4
+93/475182/campos_512_v4
+93/475196/campos_512_v4
+93/475200/campos_512_v4
+93/475202/campos_512_v4
+93/475207/campos_512_v4
+93/475208/campos_512_v4
+93/475214/campos_512_v4
+93/475220/campos_512_v4
+93/475225/campos_512_v4
+93/475230/campos_512_v4
+93/475233/campos_512_v4
+93/475245/campos_512_v4
+93/475254/campos_512_v4
+93/475259/campos_512_v4
+93/475264/campos_512_v4
+93/475266/campos_512_v4
+93/475276/campos_512_v4
+93/475285/campos_512_v4
+93/475304/campos_512_v4
+93/475306/campos_512_v4
+93/475310/campos_512_v4
+93/475318/campos_512_v4
+93/475324/campos_512_v4
+93/475327/campos_512_v4
+93/475338/campos_512_v4
+93/475342/campos_512_v4
+93/475346/campos_512_v4
+93/475349/campos_512_v4
+93/475353/campos_512_v4
+93/475355/campos_512_v4
+93/475358/campos_512_v4
+93/475359/campos_512_v4
+93/475362/campos_512_v4
+93/475366/campos_512_v4
+93/475372/campos_512_v4
+93/475383/campos_512_v4
+93/475388/campos_512_v4
+93/475390/campos_512_v4
+93/475394/campos_512_v4
+93/475395/campos_512_v4
+93/475404/campos_512_v4
+93/475415/campos_512_v4
+93/475417/campos_512_v4
+93/475419/campos_512_v4
+93/475421/campos_512_v4
+93/475422/campos_512_v4
+93/475433/campos_512_v4
+93/475437/campos_512_v4
+93/475439/campos_512_v4
+93/475443/campos_512_v4
+93/475447/campos_512_v4
+93/475458/campos_512_v4
+93/475459/campos_512_v4
+93/475464/campos_512_v4
+93/475484/campos_512_v4
+93/475489/campos_512_v4
+93/475492/campos_512_v4
+93/475495/campos_512_v4
+93/475496/campos_512_v4
+93/475508/campos_512_v4
+93/475519/campos_512_v4
+93/475521/campos_512_v4
+93/475522/campos_512_v4
+93/475523/campos_512_v4
+93/475531/campos_512_v4
+93/475534/campos_512_v4
+93/475537/campos_512_v4
+93/475547/campos_512_v4
+93/475572/campos_512_v4
+93/475584/campos_512_v4
+93/475594/campos_512_v4
+93/475596/campos_512_v4
+93/475597/campos_512_v4
+93/475600/campos_512_v4
+93/475611/campos_512_v4
+93/475615/campos_512_v4
+93/475617/campos_512_v4
+93/475619/campos_512_v4
+93/475622/campos_512_v4
+93/475631/campos_512_v4
+93/475633/campos_512_v4
+93/475649/campos_512_v4
+93/475655/campos_512_v4
+93/475656/campos_512_v4
+93/475668/campos_512_v4
+93/475673/campos_512_v4
+93/475675/campos_512_v4
+93/475676/campos_512_v4
+93/475685/campos_512_v4
+93/475686/campos_512_v4
+93/475689/campos_512_v4
+93/475700/campos_512_v4
+93/475704/campos_512_v4
+93/475717/campos_512_v4
+93/475720/campos_512_v4
+93/475728/campos_512_v4
+93/475731/campos_512_v4
+93/475747/campos_512_v4
+93/475751/campos_512_v4
+93/475769/campos_512_v4
+93/475773/campos_512_v4
+93/475783/campos_512_v4
+93/475785/campos_512_v4
+93/475786/campos_512_v4
+93/475787/campos_512_v4
+93/475793/campos_512_v4
+93/475794/campos_512_v4
+93/475805/campos_512_v4
+93/475809/campos_512_v4
+93/475814/campos_512_v4
+93/475818/campos_512_v4
+93/475820/campos_512_v4
+93/475829/campos_512_v4
+93/475833/campos_512_v4
+93/475854/campos_512_v4
+93/475857/campos_512_v4
+93/475866/campos_512_v4
+93/475869/campos_512_v4
+93/475871/campos_512_v4
+93/475873/campos_512_v4
+93/475876/campos_512_v4
+93/475898/campos_512_v4
+93/475900/campos_512_v4
+93/475902/campos_512_v4
+93/475904/campos_512_v4
+93/475908/campos_512_v4
+93/475909/campos_512_v4
+93/475940/campos_512_v4
+93/475950/campos_512_v4
+93/475957/campos_512_v4
+93/475958/campos_512_v4
+93/475959/campos_512_v4
+93/475968/campos_512_v4
+93/475987/campos_512_v4
+93/475988/campos_512_v4
+93/475998/campos_512_v4
+93/475999/campos_512_v4
+93/476014/campos_512_v4
+93/476018/campos_512_v4
+93/476019/campos_512_v4
+93/476026/campos_512_v4
+93/476030/campos_512_v4
+93/476038/campos_512_v4
+93/476040/campos_512_v4
+93/476045/campos_512_v4
+93/476081/campos_512_v4
+93/476088/campos_512_v4
+93/476089/campos_512_v4
+93/476091/campos_512_v4
+93/476111/campos_512_v4
+93/476124/campos_512_v4
+93/476135/campos_512_v4
+93/476137/campos_512_v4
+93/476141/campos_512_v4
+93/476145/campos_512_v4
+93/476146/campos_512_v4
+93/476158/campos_512_v4
+93/476160/campos_512_v4
+93/476181/campos_512_v4
+93/476187/campos_512_v4
+93/476204/campos_512_v4
+93/476212/campos_512_v4
+93/476221/campos_512_v4
+93/476231/campos_512_v4
+93/476252/campos_512_v4
+93/476258/campos_512_v4
+93/476260/campos_512_v4
+93/476271/campos_512_v4
+93/476274/campos_512_v4
+93/476284/campos_512_v4
+93/476307/campos_512_v4
+93/476337/campos_512_v4
+93/476368/campos_512_v4
+93/476371/campos_512_v4
+93/476373/campos_512_v4
+93/476376/campos_512_v4
+93/476383/campos_512_v4
+93/476385/campos_512_v4
+93/476388/campos_512_v4
+93/476391/campos_512_v4
+93/476396/campos_512_v4
+93/476397/campos_512_v4
+93/476409/campos_512_v4
+93/476410/campos_512_v4
+93/476418/campos_512_v4
+93/476424/campos_512_v4
+93/476425/campos_512_v4
+93/476435/campos_512_v4
+93/476445/campos_512_v4
+93/476450/campos_512_v4
+93/476460/campos_512_v4
+93/476470/campos_512_v4
+93/476475/campos_512_v4
+93/476477/campos_512_v4
+93/476480/campos_512_v4
+93/476484/campos_512_v4
+93/476489/campos_512_v4
+93/476493/campos_512_v4
+93/476516/campos_512_v4
+93/476524/campos_512_v4
+93/476531/campos_512_v4
+93/476534/campos_512_v4
+93/476535/campos_512_v4
+93/476536/campos_512_v4
+93/476543/campos_512_v4
+93/476548/campos_512_v4
+93/476550/campos_512_v4
+93/476554/campos_512_v4
+93/476559/campos_512_v4
+93/476560/campos_512_v4
+93/476562/campos_512_v4
+93/476568/campos_512_v4
+93/476570/campos_512_v4
+93/476574/campos_512_v4
+93/476604/campos_512_v4
+93/476607/campos_512_v4
+93/476630/campos_512_v4
+93/476633/campos_512_v4
+93/476636/campos_512_v4
+93/476641/campos_512_v4
+93/476642/campos_512_v4
+93/476646/campos_512_v4
+93/476667/campos_512_v4
+93/476672/campos_512_v4
+93/476678/campos_512_v4
+93/476680/campos_512_v4
+93/476698/campos_512_v4
+93/476711/campos_512_v4
+93/476737/campos_512_v4
+93/476751/campos_512_v4
+93/476760/campos_512_v4
+93/476770/campos_512_v4
+93/476771/campos_512_v4
+93/476778/campos_512_v4
+93/476792/campos_512_v4
+93/476808/campos_512_v4
+93/476815/campos_512_v4
+93/476829/campos_512_v4
+93/476849/campos_512_v4
+93/476853/campos_512_v4
+93/476883/campos_512_v4
+93/476891/campos_512_v4
+93/476893/campos_512_v4
+93/476894/campos_512_v4
+93/476897/campos_512_v4
+93/476900/campos_512_v4
+93/476914/campos_512_v4
+93/476918/campos_512_v4
+93/476950/campos_512_v4
+93/476951/campos_512_v4
+93/476956/campos_512_v4
+93/476961/campos_512_v4
+93/476966/campos_512_v4
+93/476973/campos_512_v4
+93/476984/campos_512_v4
+93/476985/campos_512_v4
+93/476987/campos_512_v4
+93/476989/campos_512_v4
+93/476991/campos_512_v4
+93/476997/campos_512_v4
+93/477003/campos_512_v4
+93/477012/campos_512_v4
+93/477017/campos_512_v4
+93/477025/campos_512_v4
+93/477027/campos_512_v4
+93/477035/campos_512_v4
+93/477036/campos_512_v4
+93/477063/campos_512_v4
+93/477086/campos_512_v4
+93/477089/campos_512_v4
+93/477093/campos_512_v4
+93/477099/campos_512_v4
+93/477110/campos_512_v4
+93/477112/campos_512_v4
+93/477118/campos_512_v4
+93/477128/campos_512_v4
+93/477133/campos_512_v4
+93/477141/campos_512_v4
+93/477143/campos_512_v4
+93/477150/campos_512_v4
+93/477164/campos_512_v4
+93/477176/campos_512_v4
+93/477179/campos_512_v4
+93/477184/campos_512_v4
+93/477192/campos_512_v4
+93/477194/campos_512_v4
+93/477198/campos_512_v4
+93/477205/campos_512_v4
+93/477222/campos_512_v4
+93/477251/campos_512_v4
+93/477266/campos_512_v4
+93/477270/campos_512_v4
+93/477275/campos_512_v4
+93/477292/campos_512_v4
+93/477293/campos_512_v4
+93/477305/campos_512_v4
+93/477310/campos_512_v4
+93/477316/campos_512_v4
+93/477318/campos_512_v4
+93/477320/campos_512_v4
+93/477337/campos_512_v4
+93/477351/campos_512_v4
+93/477358/campos_512_v4
+93/477361/campos_512_v4
+93/477362/campos_512_v4
+93/477371/campos_512_v4
+93/477372/campos_512_v4
+93/477377/campos_512_v4
+93/477400/campos_512_v4
+93/477401/campos_512_v4
+93/477404/campos_512_v4
+93/477407/campos_512_v4
+93/477418/campos_512_v4
+93/477430/campos_512_v4
+93/477431/campos_512_v4
+93/477434/campos_512_v4
+93/477436/campos_512_v4
+93/477438/campos_512_v4
+93/477446/campos_512_v4
+93/477450/campos_512_v4
+93/477461/campos_512_v4
+93/477464/campos_512_v4
+93/477465/campos_512_v4
+93/477480/campos_512_v4
+93/477485/campos_512_v4
+93/477502/campos_512_v4
+93/477505/campos_512_v4
+93/477516/campos_512_v4
+93/477517/campos_512_v4
+93/477521/campos_512_v4
+93/477529/campos_512_v4
+93/477532/campos_512_v4
+93/477539/campos_512_v4
+93/477542/campos_512_v4
+93/477559/campos_512_v4
+93/477569/campos_512_v4
+93/477584/campos_512_v4
+93/477598/campos_512_v4
+93/477605/campos_512_v4
+93/477606/campos_512_v4
+93/477609/campos_512_v4
+93/477612/campos_512_v4
+93/477615/campos_512_v4
+93/477636/campos_512_v4
+93/477642/campos_512_v4
+93/477643/campos_512_v4
+93/477647/campos_512_v4
+93/477670/campos_512_v4
+93/477694/campos_512_v4
+93/477705/campos_512_v4
+93/477728/campos_512_v4
+93/477738/campos_512_v4
+93/477741/campos_512_v4
+93/477747/campos_512_v4
+93/477770/campos_512_v4
+93/477780/campos_512_v4
+93/477784/campos_512_v4
+93/477819/campos_512_v4
+93/477821/campos_512_v4
+93/477832/campos_512_v4
+93/477837/campos_512_v4
+93/477850/campos_512_v4
+93/477854/campos_512_v4
+93/477856/campos_512_v4
+93/477868/campos_512_v4
+93/477873/campos_512_v4
+93/477874/campos_512_v4
+93/477875/campos_512_v4
+93/477876/campos_512_v4
+93/477894/campos_512_v4
+93/477899/campos_512_v4
+93/477907/campos_512_v4
+93/477909/campos_512_v4
+93/477924/campos_512_v4
+93/477927/campos_512_v4
+93/477954/campos_512_v4
+93/477959/campos_512_v4
+93/477968/campos_512_v4
+93/477974/campos_512_v4
+93/477977/campos_512_v4
+93/477978/campos_512_v4
+93/477986/campos_512_v4
+93/477989/campos_512_v4
+93/477995/campos_512_v4
+93/477998/campos_512_v4
+93/478033/campos_512_v4
+93/478040/campos_512_v4
+93/478042/campos_512_v4
+93/478059/campos_512_v4
+93/478065/campos_512_v4
+93/478073/campos_512_v4
+93/478088/campos_512_v4
+93/478093/campos_512_v4
+93/478117/campos_512_v4
+93/478121/campos_512_v4
+93/478123/campos_512_v4
+93/478139/campos_512_v4
+93/478140/campos_512_v4
+93/478156/campos_512_v4
+93/478166/campos_512_v4
+93/478176/campos_512_v4
+93/478177/campos_512_v4
+93/478197/campos_512_v4
+93/478201/campos_512_v4
+93/478208/campos_512_v4
+93/478210/campos_512_v4
+93/478212/campos_512_v4
+93/478213/campos_512_v4
+93/478217/campos_512_v4
+93/478220/campos_512_v4
+93/478227/campos_512_v4
+93/478235/campos_512_v4
+93/478237/campos_512_v4
+93/478256/campos_512_v4
+93/478264/campos_512_v4
+93/478272/campos_512_v4
+93/478278/campos_512_v4
+93/478285/campos_512_v4
+93/478288/campos_512_v4
+93/478306/campos_512_v4
+93/478311/campos_512_v4
+93/478318/campos_512_v4
+93/478344/campos_512_v4
+93/478346/campos_512_v4
+93/478351/campos_512_v4
+93/478364/campos_512_v4
+93/478373/campos_512_v4
+93/478398/campos_512_v4
+93/478399/campos_512_v4
+93/478421/campos_512_v4
+93/478423/campos_512_v4
+93/478429/campos_512_v4
+93/478433/campos_512_v4
+93/478434/campos_512_v4
+93/478460/campos_512_v4
+93/478466/campos_512_v4
+93/478472/campos_512_v4
+93/478473/campos_512_v4
+93/478477/campos_512_v4
+93/478512/campos_512_v4
+93/478516/campos_512_v4
+93/478518/campos_512_v4
+93/478520/campos_512_v4
+93/478528/campos_512_v4
+93/478529/campos_512_v4
+93/478533/campos_512_v4
+93/478534/campos_512_v4
+93/478542/campos_512_v4
+93/478554/campos_512_v4
+93/478558/campos_512_v4
+93/478574/campos_512_v4
+93/478579/campos_512_v4
+93/478585/campos_512_v4
+93/478588/campos_512_v4
+93/478590/campos_512_v4
+93/478594/campos_512_v4
+93/478595/campos_512_v4
+93/478612/campos_512_v4
+93/478618/campos_512_v4
+93/478627/campos_512_v4
+93/478636/campos_512_v4
+93/478651/campos_512_v4
+93/478654/campos_512_v4
+93/478671/campos_512_v4
+93/478674/campos_512_v4
+93/478689/campos_512_v4
+93/478691/campos_512_v4
+93/478692/campos_512_v4
+93/478694/campos_512_v4
+93/478696/campos_512_v4
+93/478701/campos_512_v4
+93/478703/campos_512_v4
+93/478704/campos_512_v4
+93/478713/campos_512_v4
+93/478719/campos_512_v4
+93/478727/campos_512_v4
+93/478728/campos_512_v4
+93/478729/campos_512_v4
+93/478730/campos_512_v4
+93/478732/campos_512_v4
+93/478737/campos_512_v4
+93/478742/campos_512_v4
+93/478745/campos_512_v4
+93/478755/campos_512_v4
+93/478756/campos_512_v4
+93/478759/campos_512_v4
+93/478764/campos_512_v4
+93/478765/campos_512_v4
+93/478773/campos_512_v4
+93/478776/campos_512_v4
+93/478777/campos_512_v4
+93/478786/campos_512_v4
+93/478800/campos_512_v4
+93/478802/campos_512_v4
+93/478808/campos_512_v4
+93/478812/campos_512_v4
+93/478819/campos_512_v4
+93/478826/campos_512_v4
+93/478830/campos_512_v4
+93/478837/campos_512_v4
+93/478842/campos_512_v4
+93/478862/campos_512_v4
+93/478863/campos_512_v4
+93/478870/campos_512_v4
+93/478873/campos_512_v4
+93/478875/campos_512_v4
+93/478876/campos_512_v4
+93/478879/campos_512_v4
+93/478880/campos_512_v4
+93/478884/campos_512_v4
+93/478885/campos_512_v4
+93/478887/campos_512_v4
+93/478891/campos_512_v4
+93/478892/campos_512_v4
+93/478916/campos_512_v4
+93/478937/campos_512_v4
+93/478946/campos_512_v4
+93/478948/campos_512_v4
+93/478956/campos_512_v4
+93/478958/campos_512_v4
+93/478964/campos_512_v4
+93/478968/campos_512_v4
+93/478970/campos_512_v4
+93/478973/campos_512_v4
+93/478991/campos_512_v4
+93/478996/campos_512_v4
+93/479008/campos_512_v4
+93/479012/campos_512_v4
+93/479025/campos_512_v4
+93/479035/campos_512_v4
+93/479041/campos_512_v4
+93/479052/campos_512_v4
+93/479056/campos_512_v4
+93/479094/campos_512_v4
+93/479096/campos_512_v4
+93/479097/campos_512_v4
+93/479105/campos_512_v4
+93/479124/campos_512_v4
+93/479132/campos_512_v4
+93/479136/campos_512_v4
+93/479140/campos_512_v4
+93/479142/campos_512_v4
+93/479143/campos_512_v4
+93/479148/campos_512_v4
+93/479152/campos_512_v4
+93/479156/campos_512_v4
+93/479161/campos_512_v4
+93/479168/campos_512_v4
+93/479183/campos_512_v4
+93/479188/campos_512_v4
+93/479212/campos_512_v4
+93/479215/campos_512_v4
+93/479234/campos_512_v4
+93/479245/campos_512_v4
+93/479249/campos_512_v4
+93/479265/campos_512_v4
+93/479270/campos_512_v4
+93/479295/campos_512_v4
+93/479301/campos_512_v4
+93/479306/campos_512_v4
+93/479313/campos_512_v4
+93/479357/campos_512_v4
+93/479363/campos_512_v4
+93/479373/campos_512_v4
+93/479380/campos_512_v4
+93/479390/campos_512_v4
+93/479391/campos_512_v4
+93/479396/campos_512_v4
+93/479400/campos_512_v4
+93/479402/campos_512_v4
+93/479415/campos_512_v4
+93/479417/campos_512_v4
+93/479419/campos_512_v4
+93/479434/campos_512_v4
+93/479449/campos_512_v4
+93/479469/campos_512_v4
+93/479475/campos_512_v4
+93/479493/campos_512_v4
+93/479498/campos_512_v4
+93/479510/campos_512_v4
+93/479515/campos_512_v4
+93/479518/campos_512_v4
+93/479534/campos_512_v4
+93/479544/campos_512_v4
+93/479558/campos_512_v4
+93/479572/campos_512_v4
+93/479576/campos_512_v4
+93/479579/campos_512_v4
+93/479604/campos_512_v4
+93/479617/campos_512_v4
+93/479621/campos_512_v4
+93/479624/campos_512_v4
+93/479626/campos_512_v4
+93/479650/campos_512_v4
+93/479656/campos_512_v4
+93/479659/campos_512_v4
+93/479661/campos_512_v4
+93/479662/campos_512_v4
+93/479676/campos_512_v4
+93/479682/campos_512_v4
+93/479697/campos_512_v4
+93/479700/campos_512_v4
+93/479708/campos_512_v4
+93/479710/campos_512_v4
+93/479714/campos_512_v4
+93/479718/campos_512_v4
+93/479756/campos_512_v4
+93/479757/campos_512_v4
+93/479759/campos_512_v4
+93/479760/campos_512_v4
+93/479782/campos_512_v4
+93/479806/campos_512_v4
+93/479812/campos_512_v4
+93/479818/campos_512_v4
+93/479823/campos_512_v4
+93/479847/campos_512_v4
+93/479850/campos_512_v4
+93/479856/campos_512_v4
+93/479859/campos_512_v4
+93/479878/campos_512_v4
+93/479879/campos_512_v4
+93/479887/campos_512_v4
+93/479893/campos_512_v4
+93/479902/campos_512_v4
+93/479909/campos_512_v4
+93/479913/campos_512_v4
+93/479917/campos_512_v4
+93/479920/campos_512_v4
+93/479929/campos_512_v4
+93/479930/campos_512_v4
+93/479942/campos_512_v4
+93/479945/campos_512_v4
+93/479947/campos_512_v4
+93/479960/campos_512_v4
+93/479968/campos_512_v4
+93/479972/campos_512_v4
+93/479973/campos_512_v4
+93/479979/campos_512_v4
+93/479988/campos_512_v4
+94/480034/campos_512_v4
+94/480050/campos_512_v4
+94/480052/campos_512_v4
+94/480067/campos_512_v4
+94/480074/campos_512_v4
+94/480080/campos_512_v4
+94/480089/campos_512_v4
+94/480091/campos_512_v4
+94/480096/campos_512_v4
+94/480107/campos_512_v4
+94/480114/campos_512_v4
+94/480117/campos_512_v4
+94/480122/campos_512_v4
+94/480130/campos_512_v4
+94/480135/campos_512_v4
+94/480139/campos_512_v4
+94/480153/campos_512_v4
+94/480159/campos_512_v4
+94/480165/campos_512_v4
+94/480168/campos_512_v4
+94/480170/campos_512_v4
+94/480171/campos_512_v4
+94/480177/campos_512_v4
+94/480180/campos_512_v4
+94/480197/campos_512_v4
+94/480198/campos_512_v4
+94/480201/campos_512_v4
+94/480204/campos_512_v4
+94/480214/campos_512_v4
+94/480216/campos_512_v4
+94/480225/campos_512_v4
+94/480232/campos_512_v4
+94/480241/campos_512_v4
+94/480244/campos_512_v4
+94/480247/campos_512_v4
+94/480252/campos_512_v4
+94/480263/campos_512_v4
+94/480267/campos_512_v4
+94/480272/campos_512_v4
+94/480273/campos_512_v4
+94/480279/campos_512_v4
+94/480283/campos_512_v4
+94/480289/campos_512_v4
+94/480300/campos_512_v4
+94/480312/campos_512_v4
+94/480321/campos_512_v4
+94/480323/campos_512_v4
+94/480327/campos_512_v4
+94/480330/campos_512_v4
+94/480339/campos_512_v4
+94/480374/campos_512_v4
+94/480378/campos_512_v4
+94/480379/campos_512_v4
+94/480381/campos_512_v4
+94/480391/campos_512_v4
+94/480393/campos_512_v4
+94/480398/campos_512_v4
+94/480406/campos_512_v4
+94/480407/campos_512_v4
+94/480417/campos_512_v4
+94/480425/campos_512_v4
+94/480444/campos_512_v4
+94/480469/campos_512_v4
+94/480472/campos_512_v4
+94/480475/campos_512_v4
+94/480488/campos_512_v4
+94/480489/campos_512_v4
+94/480492/campos_512_v4
+94/480498/campos_512_v4
+94/480503/campos_512_v4
+94/480508/campos_512_v4
+94/480526/campos_512_v4
+94/480532/campos_512_v4
+94/480535/campos_512_v4
+94/480544/campos_512_v4
+94/480549/campos_512_v4
+94/480553/campos_512_v4
+94/480562/campos_512_v4
+94/480563/campos_512_v4
+94/480569/campos_512_v4
+94/480573/campos_512_v4
+94/480574/campos_512_v4
+94/480578/campos_512_v4
+94/480579/campos_512_v4
+94/480598/campos_512_v4
+94/480602/campos_512_v4
+94/480605/campos_512_v4
+94/480613/campos_512_v4
+94/480626/campos_512_v4
+94/480632/campos_512_v4
+94/480638/campos_512_v4
+94/480644/campos_512_v4
+94/480670/campos_512_v4
+94/480671/campos_512_v4
+94/480677/campos_512_v4
+94/480678/campos_512_v4
+94/480681/campos_512_v4
+94/480682/campos_512_v4
+94/480683/campos_512_v4
+94/480687/campos_512_v4
+94/480695/campos_512_v4
+94/480708/campos_512_v4
+94/480715/campos_512_v4
+94/480732/campos_512_v4
+94/480748/campos_512_v4
+94/480751/campos_512_v4
+94/480753/campos_512_v4
+94/480776/campos_512_v4
+94/480785/campos_512_v4
+94/480804/campos_512_v4
+94/480806/campos_512_v4
+94/480811/campos_512_v4
+94/480815/campos_512_v4
+94/480823/campos_512_v4
+94/480840/campos_512_v4
+94/480844/campos_512_v4
+94/480848/campos_512_v4
+94/480860/campos_512_v4
+94/480861/campos_512_v4
+94/480876/campos_512_v4
+94/480877/campos_512_v4
+94/480884/campos_512_v4
+94/480885/campos_512_v4
+94/480895/campos_512_v4
+94/480896/campos_512_v4
+94/480899/campos_512_v4
+94/480903/campos_512_v4
+94/480905/campos_512_v4
+94/480908/campos_512_v4
+94/480923/campos_512_v4
+94/480934/campos_512_v4
+94/480938/campos_512_v4
+94/480940/campos_512_v4
+94/480942/campos_512_v4
+94/480949/campos_512_v4
+94/480963/campos_512_v4
+94/480979/campos_512_v4
+94/480995/campos_512_v4
+94/481001/campos_512_v4
+94/481003/campos_512_v4
+94/481015/campos_512_v4
+94/481023/campos_512_v4
+94/481025/campos_512_v4
+94/481033/campos_512_v4
+94/481037/campos_512_v4
+94/481086/campos_512_v4
+94/481087/campos_512_v4
+94/481100/campos_512_v4
+94/481101/campos_512_v4
+94/481104/campos_512_v4
+94/481105/campos_512_v4
+94/481111/campos_512_v4
+94/481125/campos_512_v4
+94/481126/campos_512_v4
+94/481142/campos_512_v4
+94/481153/campos_512_v4
+94/481163/campos_512_v4
+94/481174/campos_512_v4
+94/481180/campos_512_v4
+94/481189/campos_512_v4
+94/481190/campos_512_v4
+94/481200/campos_512_v4
+94/481202/campos_512_v4
+94/481203/campos_512_v4
+94/481204/campos_512_v4
+94/481209/campos_512_v4
+94/481219/campos_512_v4
+94/481221/campos_512_v4
+94/481246/campos_512_v4
+94/481254/campos_512_v4
+94/481257/campos_512_v4
+94/481259/campos_512_v4
+94/481268/campos_512_v4
+94/481277/campos_512_v4
+94/481278/campos_512_v4
+94/481279/campos_512_v4
+94/481293/campos_512_v4
+94/481296/campos_512_v4
+94/481298/campos_512_v4
+94/481305/campos_512_v4
+94/481307/campos_512_v4
+94/481312/campos_512_v4
+94/481314/campos_512_v4
+94/481315/campos_512_v4
+94/481323/campos_512_v4
+94/481332/campos_512_v4
+94/481333/campos_512_v4
+94/481334/campos_512_v4
+94/481336/campos_512_v4
+94/481344/campos_512_v4
+94/481359/campos_512_v4
+94/481370/campos_512_v4
+94/481378/campos_512_v4
+94/481382/campos_512_v4
+94/481390/campos_512_v4
+94/481395/campos_512_v4
+94/481396/campos_512_v4
+94/481398/campos_512_v4
+94/481401/campos_512_v4
+94/481402/campos_512_v4
+94/481403/campos_512_v4
+94/481406/campos_512_v4
+94/481409/campos_512_v4
+94/481412/campos_512_v4
+94/481413/campos_512_v4
+94/481418/campos_512_v4
+94/481433/campos_512_v4
+94/481436/campos_512_v4
+94/481439/campos_512_v4
+94/481446/campos_512_v4
+94/481452/campos_512_v4
+94/481455/campos_512_v4
+94/481456/campos_512_v4
+94/481458/campos_512_v4
+94/481465/campos_512_v4
+94/481469/campos_512_v4
+94/481472/campos_512_v4
+94/481478/campos_512_v4
+94/481486/campos_512_v4
+94/481503/campos_512_v4
+94/481507/campos_512_v4
+94/481508/campos_512_v4
+94/481516/campos_512_v4
+94/481519/campos_512_v4
+94/481520/campos_512_v4
+94/481531/campos_512_v4
+94/481534/campos_512_v4
+94/481536/campos_512_v4
+94/481539/campos_512_v4
+94/481543/campos_512_v4
+94/481544/campos_512_v4
+94/481555/campos_512_v4
+94/481556/campos_512_v4
+94/481558/campos_512_v4
+94/481577/campos_512_v4
+94/481580/campos_512_v4
+94/481581/campos_512_v4
+94/481593/campos_512_v4
+94/481613/campos_512_v4
+94/481616/campos_512_v4
+94/481625/campos_512_v4
+94/481637/campos_512_v4
+94/481638/campos_512_v4
+94/481640/campos_512_v4
+94/481650/campos_512_v4
+94/481652/campos_512_v4
+94/481669/campos_512_v4
+94/481675/campos_512_v4
+94/481679/campos_512_v4
+94/481711/campos_512_v4
+94/481712/campos_512_v4
+94/481715/campos_512_v4
+94/481717/campos_512_v4
+94/481719/campos_512_v4
+94/481722/campos_512_v4
+94/481741/campos_512_v4
+94/481750/campos_512_v4
+94/481770/campos_512_v4
+94/481777/campos_512_v4
+94/481786/campos_512_v4
+94/481797/campos_512_v4
+94/481799/campos_512_v4
+94/481819/campos_512_v4
+94/481820/campos_512_v4
+94/481827/campos_512_v4
+94/481837/campos_512_v4
+94/481849/campos_512_v4
+94/481850/campos_512_v4
+94/481851/campos_512_v4
+94/481854/campos_512_v4
+94/481857/campos_512_v4
+94/481862/campos_512_v4
+94/481863/campos_512_v4
+94/481870/campos_512_v4
+94/481896/campos_512_v4
+94/481903/campos_512_v4
+94/481905/campos_512_v4
+94/481914/campos_512_v4
+94/481918/campos_512_v4
+94/481926/campos_512_v4
+94/481928/campos_512_v4
+94/481932/campos_512_v4
+94/481938/campos_512_v4
+94/481941/campos_512_v4
+94/481942/campos_512_v4
+94/481944/campos_512_v4
+94/481947/campos_512_v4
+94/481979/campos_512_v4
+94/481984/campos_512_v4
+94/481987/campos_512_v4
+94/481996/campos_512_v4
+94/482016/campos_512_v4
+94/482020/campos_512_v4
+94/482021/campos_512_v4
+94/482024/campos_512_v4
+94/482026/campos_512_v4
+94/482051/campos_512_v4
+94/482052/campos_512_v4
+94/482059/campos_512_v4
+94/482081/campos_512_v4
+94/482084/campos_512_v4
+94/482115/campos_512_v4
+94/482121/campos_512_v4
+94/482126/campos_512_v4
+94/482128/campos_512_v4
+94/482131/campos_512_v4
+94/482134/campos_512_v4
+94/482139/campos_512_v4
+94/482143/campos_512_v4
+94/482147/campos_512_v4
+94/482151/campos_512_v4
+94/482153/campos_512_v4
+94/482154/campos_512_v4
+94/482158/campos_512_v4
+94/482161/campos_512_v4
+94/482164/campos_512_v4
+94/482179/campos_512_v4
+94/482188/campos_512_v4
+94/482202/campos_512_v4
+94/482204/campos_512_v4
+94/482205/campos_512_v4
+94/482207/campos_512_v4
+94/482215/campos_512_v4
+94/482217/campos_512_v4
+94/482221/campos_512_v4
+94/482223/campos_512_v4
+94/482227/campos_512_v4
+94/482231/campos_512_v4
+94/482232/campos_512_v4
+94/482237/campos_512_v4
+94/482256/campos_512_v4
+94/482280/campos_512_v4
+94/482284/campos_512_v4
+94/482291/campos_512_v4
+94/482294/campos_512_v4
+94/482302/campos_512_v4
+94/482304/campos_512_v4
+94/482311/campos_512_v4
+94/482312/campos_512_v4
+94/482339/campos_512_v4
+94/482346/campos_512_v4
+94/482368/campos_512_v4
+94/482370/campos_512_v4
+94/482381/campos_512_v4
+94/482397/campos_512_v4
+94/482405/campos_512_v4
+94/482409/campos_512_v4
+94/482428/campos_512_v4
+94/482435/campos_512_v4
+94/482440/campos_512_v4
+94/482442/campos_512_v4
+94/482445/campos_512_v4
+94/482450/campos_512_v4
+94/482454/campos_512_v4
+94/482467/campos_512_v4
+94/482484/campos_512_v4
+94/482485/campos_512_v4
+94/482503/campos_512_v4
+94/482509/campos_512_v4
+94/482520/campos_512_v4
+94/482532/campos_512_v4
+94/482540/campos_512_v4
+94/482546/campos_512_v4
+94/482550/campos_512_v4
+94/482574/campos_512_v4
+94/482612/campos_512_v4
+94/482623/campos_512_v4
+94/482630/campos_512_v4
+94/482632/campos_512_v4
+94/482637/campos_512_v4
+94/482642/campos_512_v4
+94/482654/campos_512_v4
+94/482660/campos_512_v4
+94/482662/campos_512_v4
+94/482663/campos_512_v4
+94/482673/campos_512_v4
+94/482680/campos_512_v4
+94/482683/campos_512_v4
+94/482694/campos_512_v4
+94/482713/campos_512_v4
+94/482723/campos_512_v4
+94/482729/campos_512_v4
+94/482750/campos_512_v4
+94/482752/campos_512_v4
+94/482757/campos_512_v4
+94/482762/campos_512_v4
+94/482777/campos_512_v4
+94/482792/campos_512_v4
+94/482795/campos_512_v4
+94/482797/campos_512_v4
+94/482798/campos_512_v4
+94/482806/campos_512_v4
+94/482815/campos_512_v4
+94/482822/campos_512_v4
+94/482830/campos_512_v4
+94/482848/campos_512_v4
+94/482865/campos_512_v4
+94/482869/campos_512_v4
+94/482870/campos_512_v4
+94/482873/campos_512_v4
+94/482879/campos_512_v4
+94/482880/campos_512_v4
+94/482889/campos_512_v4
+94/482915/campos_512_v4
+94/482920/campos_512_v4
+94/482928/campos_512_v4
+94/482941/campos_512_v4
+94/482988/campos_512_v4
+94/482997/campos_512_v4
+94/483015/campos_512_v4
+94/483020/campos_512_v4
+94/483024/campos_512_v4
+94/483035/campos_512_v4
+94/483046/campos_512_v4
+94/483055/campos_512_v4
+94/483060/campos_512_v4
+94/483070/campos_512_v4
+94/483074/campos_512_v4
+94/483089/campos_512_v4
+94/483091/campos_512_v4
+94/483093/campos_512_v4
+94/483096/campos_512_v4
+94/483098/campos_512_v4
+94/483102/campos_512_v4
+94/483108/campos_512_v4
+94/483112/campos_512_v4
+94/483121/campos_512_v4
+94/483137/campos_512_v4
+94/483139/campos_512_v4
+94/483144/campos_512_v4
+94/483160/campos_512_v4
+94/483195/campos_512_v4
+94/483203/campos_512_v4
+94/483206/campos_512_v4
+94/483219/campos_512_v4
+94/483234/campos_512_v4
+94/483242/campos_512_v4
+94/483244/campos_512_v4
+94/483257/campos_512_v4
+94/483269/campos_512_v4
+94/483274/campos_512_v4
+94/483289/campos_512_v4
+94/483293/campos_512_v4
+94/483294/campos_512_v4
+94/483297/campos_512_v4
+94/483305/campos_512_v4
+94/483319/campos_512_v4
+94/483345/campos_512_v4
+94/483349/campos_512_v4
+94/483356/campos_512_v4
+94/483365/campos_512_v4
+94/483367/campos_512_v4
+94/483373/campos_512_v4
+94/483381/campos_512_v4
+94/483382/campos_512_v4
+94/483394/campos_512_v4
+94/483401/campos_512_v4
+94/483405/campos_512_v4
+94/483410/campos_512_v4
+94/483412/campos_512_v4
+94/483444/campos_512_v4
+94/483446/campos_512_v4
+94/483453/campos_512_v4
+94/483461/campos_512_v4
+94/483465/campos_512_v4
+94/483480/campos_512_v4
+94/483482/campos_512_v4
+94/483487/campos_512_v4
+94/483496/campos_512_v4
+94/483510/campos_512_v4
+94/483514/campos_512_v4
+94/483515/campos_512_v4
+94/483519/campos_512_v4
+94/483522/campos_512_v4
+94/483524/campos_512_v4
+94/483527/campos_512_v4
+94/483533/campos_512_v4
+94/483542/campos_512_v4
+94/483557/campos_512_v4
+94/483583/campos_512_v4
+94/483584/campos_512_v4
+94/483594/campos_512_v4
+94/483603/campos_512_v4
+94/483606/campos_512_v4
+94/483616/campos_512_v4
+94/483624/campos_512_v4
+94/483651/campos_512_v4
+94/483664/campos_512_v4
+94/483675/campos_512_v4
+94/483679/campos_512_v4
+94/483682/campos_512_v4
+94/483695/campos_512_v4
+94/483697/campos_512_v4
+94/483699/campos_512_v4
+94/483707/campos_512_v4
+94/483714/campos_512_v4
+94/483719/campos_512_v4
+94/483723/campos_512_v4
+94/483724/campos_512_v4
+94/483748/campos_512_v4
+94/483751/campos_512_v4
+94/483754/campos_512_v4
+94/483765/campos_512_v4
+94/483770/campos_512_v4
+94/483774/campos_512_v4
+94/483777/campos_512_v4
+94/483781/campos_512_v4
+94/483784/campos_512_v4
+94/483797/campos_512_v4
+94/483803/campos_512_v4
+94/483813/campos_512_v4
+94/483824/campos_512_v4
+94/483861/campos_512_v4
+94/483870/campos_512_v4
+94/483875/campos_512_v4
+94/483878/campos_512_v4
+94/483881/campos_512_v4
+94/483889/campos_512_v4
+94/483899/campos_512_v4
+94/483909/campos_512_v4
+94/483912/campos_512_v4
+94/483926/campos_512_v4
+94/483927/campos_512_v4
+94/483933/campos_512_v4
+94/483937/campos_512_v4
+94/483945/campos_512_v4
+94/483946/campos_512_v4
+94/483947/campos_512_v4
+94/483952/campos_512_v4
+94/483955/campos_512_v4
+94/483959/campos_512_v4
+94/483962/campos_512_v4
+94/483964/campos_512_v4
+94/483968/campos_512_v4
+94/483978/campos_512_v4
+94/483983/campos_512_v4
+94/483989/campos_512_v4
+94/484013/campos_512_v4
+94/484014/campos_512_v4
+94/484021/campos_512_v4
+94/484024/campos_512_v4
+94/484034/campos_512_v4
+94/484035/campos_512_v4
+94/484041/campos_512_v4
+94/484042/campos_512_v4
+94/484053/campos_512_v4
+94/484061/campos_512_v4
+94/484071/campos_512_v4
+94/484072/campos_512_v4
+94/484105/campos_512_v4
+94/484124/campos_512_v4
+94/484125/campos_512_v4
+94/484127/campos_512_v4
+94/484128/campos_512_v4
+94/484144/campos_512_v4
+94/484147/campos_512_v4
+94/484156/campos_512_v4
+94/484157/campos_512_v4
+94/484160/campos_512_v4
+94/484164/campos_512_v4
+94/484165/campos_512_v4
+94/484175/campos_512_v4
+94/484178/campos_512_v4
+94/484182/campos_512_v4
+94/484188/campos_512_v4
+94/484193/campos_512_v4
+94/484198/campos_512_v4
+94/484200/campos_512_v4
+94/484207/campos_512_v4
+94/484211/campos_512_v4
+94/484222/campos_512_v4
+94/484228/campos_512_v4
+94/484239/campos_512_v4
+94/484240/campos_512_v4
+94/484250/campos_512_v4
+94/484257/campos_512_v4
+94/484260/campos_512_v4
+94/484264/campos_512_v4
+94/484276/campos_512_v4
+94/484282/campos_512_v4
+94/484285/campos_512_v4
+94/484286/campos_512_v4
+94/484288/campos_512_v4
+94/484299/campos_512_v4
+94/484311/campos_512_v4
+94/484315/campos_512_v4
+94/484329/campos_512_v4
+94/484333/campos_512_v4
+94/484340/campos_512_v4
+94/484346/campos_512_v4
+94/484348/campos_512_v4
+94/484377/campos_512_v4
+94/484397/campos_512_v4
+94/484411/campos_512_v4
+94/484417/campos_512_v4
+94/484426/campos_512_v4
+94/484428/campos_512_v4
+94/484434/campos_512_v4
+94/484439/campos_512_v4
+94/484444/campos_512_v4
+94/484450/campos_512_v4
+94/484456/campos_512_v4
+94/484469/campos_512_v4
+94/484485/campos_512_v4
+94/484487/campos_512_v4
+94/484496/campos_512_v4
+94/484505/campos_512_v4
+94/484515/campos_512_v4
+94/484525/campos_512_v4
+94/484538/campos_512_v4
+94/484550/campos_512_v4
+94/484556/campos_512_v4
+94/484559/campos_512_v4
+94/484576/campos_512_v4
+94/484579/campos_512_v4
+94/484588/campos_512_v4
+94/484603/campos_512_v4
+94/484606/campos_512_v4
+94/484616/campos_512_v4
+94/484617/campos_512_v4
+94/484631/campos_512_v4
+94/484633/campos_512_v4
+94/484636/campos_512_v4
+94/484640/campos_512_v4
+94/484647/campos_512_v4
+94/484662/campos_512_v4
+94/484667/campos_512_v4
+94/484668/campos_512_v4
+94/484679/campos_512_v4
+94/484684/campos_512_v4
+94/484685/campos_512_v4
+94/484695/campos_512_v4
+94/484699/campos_512_v4
+94/484707/campos_512_v4
+94/484708/campos_512_v4
+94/484710/campos_512_v4
+94/484715/campos_512_v4
+94/484734/campos_512_v4
+94/484742/campos_512_v4
+94/484743/campos_512_v4
+94/484748/campos_512_v4
+94/484762/campos_512_v4
+94/484769/campos_512_v4
+94/484783/campos_512_v4
+94/484784/campos_512_v4
+94/484805/campos_512_v4
+94/484806/campos_512_v4
+94/484807/campos_512_v4
+94/484814/campos_512_v4
+94/484821/campos_512_v4
+94/484823/campos_512_v4
+94/484825/campos_512_v4
+94/484826/campos_512_v4
+94/484830/campos_512_v4
+94/484833/campos_512_v4
+94/484840/campos_512_v4
+94/484849/campos_512_v4
+94/484850/campos_512_v4
+94/484861/campos_512_v4
+94/484869/campos_512_v4
+94/484887/campos_512_v4
+94/484892/campos_512_v4
+94/484905/campos_512_v4
+94/484921/campos_512_v4
+94/484925/campos_512_v4
+94/484933/campos_512_v4
+94/484943/campos_512_v4
+94/484944/campos_512_v4
+94/484947/campos_512_v4
+94/484950/campos_512_v4
+94/484951/campos_512_v4
+94/484966/campos_512_v4
+94/484969/campos_512_v4
+94/484973/campos_512_v4
+94/484984/campos_512_v4
+94/484988/campos_512_v4
+94/484989/campos_512_v4
+94/484993/campos_512_v4
+94/484994/campos_512_v4
+94/484999/campos_512_v4
+95/485032/campos_512_v4
+95/485034/campos_512_v4
+95/485057/campos_512_v4
+95/485059/campos_512_v4
+95/485083/campos_512_v4
+95/485087/campos_512_v4
+95/485091/campos_512_v4
+95/485097/campos_512_v4
+95/485108/campos_512_v4
+95/485109/campos_512_v4
+95/485114/campos_512_v4
+95/485124/campos_512_v4
+95/485132/campos_512_v4
+95/485146/campos_512_v4
+95/485147/campos_512_v4
+95/485148/campos_512_v4
+95/485159/campos_512_v4
+95/485163/campos_512_v4
+95/485182/campos_512_v4
+95/485207/campos_512_v4
+95/485208/campos_512_v4
+95/485213/campos_512_v4
+95/485214/campos_512_v4
+95/485220/campos_512_v4
+95/485223/campos_512_v4
+95/485241/campos_512_v4
+95/485243/campos_512_v4
+95/485249/campos_512_v4
+95/485254/campos_512_v4
+95/485258/campos_512_v4
+95/485261/campos_512_v4
+95/485268/campos_512_v4
+95/485273/campos_512_v4
+95/485274/campos_512_v4
+95/485280/campos_512_v4
+95/485285/campos_512_v4
+95/485287/campos_512_v4
+95/485290/campos_512_v4
+95/485294/campos_512_v4
+95/485308/campos_512_v4
+95/485309/campos_512_v4
+95/485316/campos_512_v4
+95/485317/campos_512_v4
+95/485320/campos_512_v4
+95/485326/campos_512_v4
+95/485331/campos_512_v4
+95/485332/campos_512_v4
+95/485354/campos_512_v4
+95/485357/campos_512_v4
+95/485358/campos_512_v4
+95/485365/campos_512_v4
+95/485370/campos_512_v4
+95/485396/campos_512_v4
+95/485398/campos_512_v4
+95/485406/campos_512_v4
+95/485416/campos_512_v4
+95/485417/campos_512_v4
+95/485419/campos_512_v4
+95/485420/campos_512_v4
+95/485426/campos_512_v4
+95/485429/campos_512_v4
+95/485439/campos_512_v4
+95/485442/campos_512_v4
+95/485450/campos_512_v4
+95/485465/campos_512_v4
+95/485469/campos_512_v4
+95/485470/campos_512_v4
+95/485474/campos_512_v4
+95/485478/campos_512_v4
+95/485479/campos_512_v4
+95/485491/campos_512_v4
+95/485495/campos_512_v4
+95/485522/campos_512_v4
+95/485528/campos_512_v4
+95/485530/campos_512_v4
+95/485534/campos_512_v4
+95/485547/campos_512_v4
+95/485549/campos_512_v4
+95/485571/campos_512_v4
+95/485575/campos_512_v4
+95/485584/campos_512_v4
+95/485587/campos_512_v4
+95/485597/campos_512_v4
+95/485608/campos_512_v4
+95/485613/campos_512_v4
+95/485619/campos_512_v4
+95/485622/campos_512_v4
+95/485627/campos_512_v4
+95/485634/campos_512_v4
+95/485646/campos_512_v4
+95/485649/campos_512_v4
+95/485661/campos_512_v4
+95/485670/campos_512_v4
+95/485683/campos_512_v4
+95/485686/campos_512_v4
+95/485694/campos_512_v4
+95/485735/campos_512_v4
+95/485741/campos_512_v4
+95/485742/campos_512_v4
+95/485748/campos_512_v4
+95/485753/campos_512_v4
+95/485779/campos_512_v4
+95/485782/campos_512_v4
+95/485807/campos_512_v4
+95/485818/campos_512_v4
+95/485830/campos_512_v4
+95/485836/campos_512_v4
+95/485846/campos_512_v4
+95/485852/campos_512_v4
+95/485853/campos_512_v4
+95/485855/campos_512_v4
+95/485856/campos_512_v4
+95/485861/campos_512_v4
+95/485883/campos_512_v4
+95/485891/campos_512_v4
+95/485892/campos_512_v4
+95/485912/campos_512_v4
+95/485923/campos_512_v4
+95/485939/campos_512_v4
+95/485940/campos_512_v4
+95/485947/campos_512_v4
+95/485950/campos_512_v4
+95/485951/campos_512_v4
+95/485954/campos_512_v4
+95/485958/campos_512_v4
+95/485962/campos_512_v4
+95/485966/campos_512_v4
+95/485967/campos_512_v4
+95/485968/campos_512_v4
+95/485979/campos_512_v4
+95/486005/campos_512_v4
+95/486034/campos_512_v4
+95/486043/campos_512_v4
+95/486044/campos_512_v4
+95/486048/campos_512_v4
+95/486050/campos_512_v4
+95/486062/campos_512_v4
+95/486078/campos_512_v4
+95/486079/campos_512_v4
+95/486084/campos_512_v4
+95/486090/campos_512_v4
+95/486091/campos_512_v4
+95/486104/campos_512_v4
+95/486106/campos_512_v4
+95/486118/campos_512_v4
+95/486122/campos_512_v4
+95/486135/campos_512_v4
+95/486137/campos_512_v4
+95/486138/campos_512_v4
+95/486140/campos_512_v4
+95/486144/campos_512_v4
+95/486157/campos_512_v4
+95/486162/campos_512_v4
+95/486166/campos_512_v4
+95/486173/campos_512_v4
+95/486174/campos_512_v4
+95/486175/campos_512_v4
+95/486185/campos_512_v4
+95/486189/campos_512_v4
+95/486190/campos_512_v4
+95/486191/campos_512_v4
+95/486194/campos_512_v4
+95/486200/campos_512_v4
+95/486230/campos_512_v4
+95/486231/campos_512_v4
+95/486240/campos_512_v4
+95/486242/campos_512_v4
+95/486246/campos_512_v4
+95/486249/campos_512_v4
+95/486253/campos_512_v4
+95/486254/campos_512_v4
+95/486262/campos_512_v4
+95/486274/campos_512_v4
+95/486276/campos_512_v4
+95/486283/campos_512_v4
+95/486286/campos_512_v4
+95/486310/campos_512_v4
+95/486315/campos_512_v4
+95/486318/campos_512_v4
+95/486326/campos_512_v4
+95/486328/campos_512_v4
+95/486331/campos_512_v4
+95/486344/campos_512_v4
+95/486353/campos_512_v4
+95/486355/campos_512_v4
+95/486362/campos_512_v4
+95/486381/campos_512_v4
+95/486384/campos_512_v4
+95/486391/campos_512_v4
+95/486396/campos_512_v4
+95/486400/campos_512_v4
+95/486406/campos_512_v4
+95/486413/campos_512_v4
+95/486417/campos_512_v4
+95/486423/campos_512_v4
+95/486425/campos_512_v4
+95/486438/campos_512_v4
+95/486452/campos_512_v4
+95/486470/campos_512_v4
+95/486485/campos_512_v4
+95/486495/campos_512_v4
+95/486499/campos_512_v4
+95/486504/campos_512_v4
+95/486505/campos_512_v4
+95/486510/campos_512_v4
+95/486511/campos_512_v4
+95/486532/campos_512_v4
+95/486534/campos_512_v4
+95/486540/campos_512_v4
+95/486541/campos_512_v4
+95/486543/campos_512_v4
+95/486550/campos_512_v4
+95/486553/campos_512_v4
+95/486576/campos_512_v4
+95/486585/campos_512_v4
+95/486594/campos_512_v4
+95/486595/campos_512_v4
+95/486605/campos_512_v4
+95/486614/campos_512_v4
+95/486620/campos_512_v4
+95/486628/campos_512_v4
+95/486634/campos_512_v4
+95/486641/campos_512_v4
+95/486643/campos_512_v4
+95/486657/campos_512_v4
+95/486665/campos_512_v4
+95/486682/campos_512_v4
+95/486690/campos_512_v4
+95/486695/campos_512_v4
+95/486696/campos_512_v4
+95/486721/campos_512_v4
+95/486722/campos_512_v4
+95/486729/campos_512_v4
+95/486737/campos_512_v4
+95/486742/campos_512_v4
+95/486743/campos_512_v4
+95/486744/campos_512_v4
+95/486752/campos_512_v4
+95/486757/campos_512_v4
+95/486760/campos_512_v4
+95/486761/campos_512_v4
+95/486772/campos_512_v4
+95/486776/campos_512_v4
+95/486777/campos_512_v4
+95/486778/campos_512_v4
+95/486780/campos_512_v4
+95/486782/campos_512_v4
+95/486783/campos_512_v4
+95/486787/campos_512_v4
+95/486792/campos_512_v4
+95/486796/campos_512_v4
+95/486797/campos_512_v4
+95/486799/campos_512_v4
+95/486802/campos_512_v4
+95/486810/campos_512_v4
+95/486811/campos_512_v4
+95/486815/campos_512_v4
+95/486820/campos_512_v4
+95/486824/campos_512_v4
+95/486851/campos_512_v4
+95/486852/campos_512_v4
+95/486880/campos_512_v4
+95/486884/campos_512_v4
+95/486887/campos_512_v4
+95/486888/campos_512_v4
+95/486892/campos_512_v4
+95/486905/campos_512_v4
+95/486910/campos_512_v4
+95/486916/campos_512_v4
+95/486922/campos_512_v4
+95/486925/campos_512_v4
+95/486931/campos_512_v4
+95/486934/campos_512_v4
+95/486942/campos_512_v4
+95/486954/campos_512_v4
+95/486959/campos_512_v4
+95/486967/campos_512_v4
+95/486975/campos_512_v4
+95/486985/campos_512_v4
+95/486986/campos_512_v4
+95/486989/campos_512_v4
+95/487003/campos_512_v4
+95/487021/campos_512_v4
+95/487023/campos_512_v4
+95/487032/campos_512_v4
+95/487033/campos_512_v4
+95/487036/campos_512_v4
+95/487040/campos_512_v4
+95/487045/campos_512_v4
+95/487056/campos_512_v4
+95/487057/campos_512_v4
+95/487078/campos_512_v4
+95/487084/campos_512_v4
+95/487114/campos_512_v4
+95/487122/campos_512_v4
+95/487123/campos_512_v4
+95/487128/campos_512_v4
+95/487143/campos_512_v4
+95/487144/campos_512_v4
+95/487156/campos_512_v4
+95/487160/campos_512_v4
+95/487186/campos_512_v4
+95/487190/campos_512_v4
+95/487211/campos_512_v4
+95/487218/campos_512_v4
+95/487226/campos_512_v4
+95/487232/campos_512_v4
+95/487239/campos_512_v4
+95/487246/campos_512_v4
+95/487259/campos_512_v4
+95/487260/campos_512_v4
+95/487266/campos_512_v4
+95/487276/campos_512_v4
+95/487277/campos_512_v4
+95/487284/campos_512_v4
+95/487286/campos_512_v4
+95/487287/campos_512_v4
+95/487293/campos_512_v4
+95/487299/campos_512_v4
+95/487319/campos_512_v4
+95/487320/campos_512_v4
+95/487340/campos_512_v4
+95/487348/campos_512_v4
+95/487357/campos_512_v4
+95/487358/campos_512_v4
+95/487383/campos_512_v4
+95/487389/campos_512_v4
+95/487394/campos_512_v4
+95/487398/campos_512_v4
+95/487401/campos_512_v4
+95/487423/campos_512_v4
+95/487427/campos_512_v4
+95/487428/campos_512_v4
+95/487430/campos_512_v4
+95/487437/campos_512_v4
+95/487462/campos_512_v4
+95/487463/campos_512_v4
+95/487466/campos_512_v4
+95/487467/campos_512_v4
+95/487472/campos_512_v4
+95/487480/campos_512_v4
+95/487481/campos_512_v4
+95/487507/campos_512_v4
+95/487517/campos_512_v4
+95/487553/campos_512_v4
+95/487568/campos_512_v4
+95/487574/campos_512_v4
+95/487584/campos_512_v4
+95/487600/campos_512_v4
+95/487602/campos_512_v4
+95/487607/campos_512_v4
+95/487624/campos_512_v4
+95/487629/campos_512_v4
+95/487640/campos_512_v4
+95/487660/campos_512_v4
+95/487670/campos_512_v4
+95/487674/campos_512_v4
+95/487682/campos_512_v4
+95/487690/campos_512_v4
+95/487692/campos_512_v4
+95/487696/campos_512_v4
+95/487710/campos_512_v4
+95/487725/campos_512_v4
+95/487730/campos_512_v4
+95/487733/campos_512_v4
+95/487734/campos_512_v4
+95/487745/campos_512_v4
+95/487746/campos_512_v4
+95/487747/campos_512_v4
+95/487754/campos_512_v4
+95/487764/campos_512_v4
+95/487765/campos_512_v4
+95/487772/campos_512_v4
+95/487774/campos_512_v4
+95/487779/campos_512_v4
+95/487789/campos_512_v4
+95/487805/campos_512_v4
+95/487822/campos_512_v4
+95/487823/campos_512_v4
+95/487824/campos_512_v4
+95/487828/campos_512_v4
+95/487835/campos_512_v4
+95/487837/campos_512_v4
+95/487838/campos_512_v4
+95/487840/campos_512_v4
+95/487841/campos_512_v4
+95/487842/campos_512_v4
+95/487847/campos_512_v4
+95/487850/campos_512_v4
+95/487860/campos_512_v4
+95/487881/campos_512_v4
+95/487883/campos_512_v4
+95/487884/campos_512_v4
+95/487885/campos_512_v4
+95/487890/campos_512_v4
+95/487900/campos_512_v4
+95/487901/campos_512_v4
+95/487917/campos_512_v4
+95/487955/campos_512_v4
+95/487974/campos_512_v4
+95/487976/campos_512_v4
+95/488002/campos_512_v4
+95/488015/campos_512_v4
+95/488018/campos_512_v4
+95/488028/campos_512_v4
+95/488046/campos_512_v4
+95/488051/campos_512_v4
+95/488056/campos_512_v4
+95/488067/campos_512_v4
+95/488082/campos_512_v4
+95/488087/campos_512_v4
+95/488105/campos_512_v4
+95/488111/campos_512_v4
+95/488119/campos_512_v4
+95/488131/campos_512_v4
+95/488134/campos_512_v4
+95/488167/campos_512_v4
+95/488184/campos_512_v4
+95/488187/campos_512_v4
+95/488188/campos_512_v4
+95/488193/campos_512_v4
+95/488195/campos_512_v4
+95/488198/campos_512_v4
+95/488217/campos_512_v4
+95/488219/campos_512_v4
+95/488222/campos_512_v4
+95/488224/campos_512_v4
+95/488225/campos_512_v4
+95/488228/campos_512_v4
+95/488232/campos_512_v4
+95/488233/campos_512_v4
+95/488241/campos_512_v4
+95/488242/campos_512_v4
+95/488248/campos_512_v4
+95/488249/campos_512_v4
+95/488253/campos_512_v4
+95/488255/campos_512_v4
+95/488259/campos_512_v4
+95/488262/campos_512_v4
+95/488267/campos_512_v4
+95/488269/campos_512_v4
+95/488282/campos_512_v4
+95/488285/campos_512_v4
+95/488288/campos_512_v4
+95/488289/campos_512_v4
+95/488293/campos_512_v4
+95/488298/campos_512_v4
+95/488300/campos_512_v4
+95/488314/campos_512_v4
+95/488316/campos_512_v4
+95/488319/campos_512_v4
+95/488320/campos_512_v4
+95/488322/campos_512_v4
+95/488342/campos_512_v4
+95/488346/campos_512_v4
+95/488351/campos_512_v4
+95/488354/campos_512_v4
+95/488370/campos_512_v4
+95/488373/campos_512_v4
+95/488375/campos_512_v4
+95/488410/campos_512_v4
+95/488413/campos_512_v4
+95/488426/campos_512_v4
+95/488427/campos_512_v4
+95/488437/campos_512_v4
+95/488453/campos_512_v4
+95/488458/campos_512_v4
+95/488459/campos_512_v4
+95/488463/campos_512_v4
+95/488493/campos_512_v4
+95/488519/campos_512_v4
+95/488524/campos_512_v4
+95/488526/campos_512_v4
+95/488538/campos_512_v4
+95/488541/campos_512_v4
+95/488542/campos_512_v4
+95/488559/campos_512_v4
+95/488564/campos_512_v4
+95/488569/campos_512_v4
+95/488575/campos_512_v4
+95/488576/campos_512_v4
+95/488591/campos_512_v4
+95/488595/campos_512_v4
+95/488611/campos_512_v4
+95/488622/campos_512_v4
+95/488626/campos_512_v4
+95/488654/campos_512_v4
+95/488675/campos_512_v4
+95/488681/campos_512_v4
+95/488688/campos_512_v4
+95/488697/campos_512_v4
+95/488704/campos_512_v4
+95/488705/campos_512_v4
+95/488706/campos_512_v4
+95/488719/campos_512_v4
+95/488729/campos_512_v4
+95/488739/campos_512_v4
+95/488759/campos_512_v4
+95/488764/campos_512_v4
+95/488767/campos_512_v4
+95/488790/campos_512_v4
+95/488800/campos_512_v4
+95/488802/campos_512_v4
+95/488812/campos_512_v4
+95/488813/campos_512_v4
+95/488825/campos_512_v4
+95/488831/campos_512_v4
+95/488835/campos_512_v4
+95/488841/campos_512_v4
+95/488845/campos_512_v4
+95/488848/campos_512_v4
+95/488849/campos_512_v4
+95/488863/campos_512_v4
+95/488865/campos_512_v4
+95/488874/campos_512_v4
+95/488879/campos_512_v4
+95/488888/campos_512_v4
+95/488889/campos_512_v4
+95/488890/campos_512_v4
+95/488894/campos_512_v4
+95/488909/campos_512_v4
+95/488922/campos_512_v4
+95/488924/campos_512_v4
+95/488939/campos_512_v4
+95/488950/campos_512_v4
+95/488972/campos_512_v4
+95/488986/campos_512_v4
+95/488987/campos_512_v4
+95/488989/campos_512_v4
+95/488992/campos_512_v4
+95/489003/campos_512_v4
+95/489006/campos_512_v4
+95/489007/campos_512_v4
+95/489009/campos_512_v4
+95/489012/campos_512_v4
+95/489014/campos_512_v4
+95/489017/campos_512_v4
+95/489021/campos_512_v4
+95/489035/campos_512_v4
+95/489040/campos_512_v4
+95/489057/campos_512_v4
+95/489061/campos_512_v4
+95/489062/campos_512_v4
+95/489076/campos_512_v4
+95/489085/campos_512_v4
+95/489088/campos_512_v4
+95/489090/campos_512_v4
+95/489099/campos_512_v4
+95/489100/campos_512_v4
+95/489103/campos_512_v4
+95/489104/campos_512_v4
+95/489110/campos_512_v4
+95/489147/campos_512_v4
+95/489152/campos_512_v4
+95/489165/campos_512_v4
+95/489166/campos_512_v4
+95/489170/campos_512_v4
+95/489193/campos_512_v4
+95/489195/campos_512_v4
+95/489220/campos_512_v4
+95/489224/campos_512_v4
+95/489227/campos_512_v4
+95/489239/campos_512_v4
+95/489240/campos_512_v4
+95/489246/campos_512_v4
+95/489249/campos_512_v4
+95/489251/campos_512_v4
+95/489253/campos_512_v4
+95/489256/campos_512_v4
+95/489258/campos_512_v4
+95/489264/campos_512_v4
+95/489271/campos_512_v4
+95/489272/campos_512_v4
+95/489280/campos_512_v4
+95/489281/campos_512_v4
+95/489297/campos_512_v4
+95/489299/campos_512_v4
+95/489302/campos_512_v4
+95/489305/campos_512_v4
+95/489306/campos_512_v4
+95/489328/campos_512_v4
+95/489336/campos_512_v4
+95/489339/campos_512_v4
+95/489346/campos_512_v4
+95/489350/campos_512_v4
+95/489366/campos_512_v4
+95/489367/campos_512_v4
+95/489374/campos_512_v4
+95/489376/campos_512_v4
+95/489379/campos_512_v4
+95/489388/campos_512_v4
+95/489390/campos_512_v4
+95/489393/campos_512_v4
+95/489394/campos_512_v4
+95/489411/campos_512_v4
+95/489415/campos_512_v4
+95/489433/campos_512_v4
+95/489437/campos_512_v4
+95/489444/campos_512_v4
+95/489447/campos_512_v4
+95/489462/campos_512_v4
+95/489464/campos_512_v4
+95/489467/campos_512_v4
+95/489504/campos_512_v4
+95/489512/campos_512_v4
+95/489513/campos_512_v4
+95/489528/campos_512_v4
+95/489534/campos_512_v4
+95/489536/campos_512_v4
+95/489540/campos_512_v4
+95/489549/campos_512_v4
+95/489552/campos_512_v4
+95/489556/campos_512_v4
+95/489560/campos_512_v4
+95/489563/campos_512_v4
+95/489570/campos_512_v4
+95/489581/campos_512_v4
+95/489585/campos_512_v4
+95/489591/campos_512_v4
+95/489593/campos_512_v4
+95/489610/campos_512_v4
+95/489611/campos_512_v4
+95/489638/campos_512_v4
+95/489642/campos_512_v4
+95/489656/campos_512_v4
+95/489660/campos_512_v4
+95/489667/campos_512_v4
+95/489670/campos_512_v4
+95/489676/campos_512_v4
+95/489680/campos_512_v4
+95/489687/campos_512_v4
+95/489688/campos_512_v4
+95/489693/campos_512_v4
+95/489699/campos_512_v4
+95/489704/campos_512_v4
+95/489711/campos_512_v4
+95/489726/campos_512_v4
+95/489729/campos_512_v4
+95/489732/campos_512_v4
+95/489733/campos_512_v4
+95/489735/campos_512_v4
+95/489738/campos_512_v4
+95/489762/campos_512_v4
+95/489768/campos_512_v4
+95/489769/campos_512_v4
+95/489770/campos_512_v4
+95/489775/campos_512_v4
+95/489795/campos_512_v4
+95/489799/campos_512_v4
+95/489806/campos_512_v4
+95/489812/campos_512_v4
+95/489819/campos_512_v4
+95/489826/campos_512_v4
+95/489829/campos_512_v4
+95/489833/campos_512_v4
+95/489837/campos_512_v4
+95/489839/campos_512_v4
+95/489855/campos_512_v4
+95/489874/campos_512_v4
+95/489877/campos_512_v4
+95/489888/campos_512_v4
+95/489904/campos_512_v4
+95/489905/campos_512_v4
+95/489906/campos_512_v4
+95/489911/campos_512_v4
+95/489924/campos_512_v4
+95/489929/campos_512_v4
+95/489961/campos_512_v4
+95/489985/campos_512_v4
+95/489986/campos_512_v4
+95/489990/campos_512_v4
+95/489991/campos_512_v4
+95/489997/campos_512_v4
+95/490000/campos_512_v4
+96/490018/campos_512_v4
+96/490023/campos_512_v4
+96/490026/campos_512_v4
+96/490031/campos_512_v4
+96/490033/campos_512_v4
+96/490040/campos_512_v4
+96/490043/campos_512_v4
+96/490044/campos_512_v4
+96/490052/campos_512_v4
+96/490059/campos_512_v4
+96/490065/campos_512_v4
+96/490077/campos_512_v4
+96/490082/campos_512_v4
+96/490086/campos_512_v4
+96/490089/campos_512_v4
+96/490092/campos_512_v4
+96/490098/campos_512_v4
+96/490104/campos_512_v4
+96/490107/campos_512_v4
+96/490109/campos_512_v4
+96/490110/campos_512_v4
+96/490131/campos_512_v4
+96/490134/campos_512_v4
+96/490136/campos_512_v4
+96/490146/campos_512_v4
+96/490187/campos_512_v4
+96/490193/campos_512_v4
+96/490200/campos_512_v4
+96/490212/campos_512_v4
+96/490219/campos_512_v4
+96/490222/campos_512_v4
+96/490233/campos_512_v4
+96/490239/campos_512_v4
+96/490240/campos_512_v4
+96/490267/campos_512_v4
+96/490280/campos_512_v4
+96/490285/campos_512_v4
+96/490287/campos_512_v4
+96/490313/campos_512_v4
+96/490315/campos_512_v4
+96/490316/campos_512_v4
+96/490320/campos_512_v4
+96/490327/campos_512_v4
+96/490350/campos_512_v4
+96/490352/campos_512_v4
+96/490355/campos_512_v4
+96/490375/campos_512_v4
+96/490380/campos_512_v4
+96/490382/campos_512_v4
+96/490385/campos_512_v4
+96/490389/campos_512_v4
+96/490410/campos_512_v4
+96/490411/campos_512_v4
+96/490412/campos_512_v4
+96/490416/campos_512_v4
+96/490434/campos_512_v4
+96/490445/campos_512_v4
+96/490446/campos_512_v4
+96/490458/campos_512_v4
+96/490461/campos_512_v4
+96/490462/campos_512_v4
+96/490469/campos_512_v4
+96/490470/campos_512_v4
+96/490479/campos_512_v4
+96/490481/campos_512_v4
+96/490492/campos_512_v4
+96/490494/campos_512_v4
+96/490496/campos_512_v4
+96/490501/campos_512_v4
+96/490512/campos_512_v4
+96/490513/campos_512_v4
+96/490528/campos_512_v4
+96/490529/campos_512_v4
+96/490531/campos_512_v4
+96/490534/campos_512_v4
+96/490543/campos_512_v4
+96/490546/campos_512_v4
+96/490552/campos_512_v4
+96/490557/campos_512_v4
+96/490563/campos_512_v4
+96/490570/campos_512_v4
+96/490575/campos_512_v4
+96/490576/campos_512_v4
+96/490581/campos_512_v4
+96/490583/campos_512_v4
+96/490587/campos_512_v4
+96/490590/campos_512_v4
+96/490595/campos_512_v4
+96/490605/campos_512_v4
+96/490608/campos_512_v4
+96/490610/campos_512_v4
+96/490642/campos_512_v4
+96/490649/campos_512_v4
+96/490658/campos_512_v4
+96/490660/campos_512_v4
+96/490698/campos_512_v4
+96/490699/campos_512_v4
+96/490705/campos_512_v4
+96/490721/campos_512_v4
+96/490726/campos_512_v4
+96/490741/campos_512_v4
+96/490751/campos_512_v4
+96/490755/campos_512_v4
+96/490756/campos_512_v4
+96/490757/campos_512_v4
+96/490758/campos_512_v4
+96/490760/campos_512_v4
+96/490763/campos_512_v4
+96/490766/campos_512_v4
+96/490770/campos_512_v4
+96/490780/campos_512_v4
+96/490781/campos_512_v4
+96/490791/campos_512_v4
+96/490792/campos_512_v4
+96/490800/campos_512_v4
+96/490813/campos_512_v4
+96/490814/campos_512_v4
+96/490817/campos_512_v4
+96/490820/campos_512_v4
+96/490845/campos_512_v4
+96/490849/campos_512_v4
+96/490851/campos_512_v4
+96/490854/campos_512_v4
+96/490858/campos_512_v4
+96/490860/campos_512_v4
+96/490874/campos_512_v4
+96/490886/campos_512_v4
+96/490891/campos_512_v4
+96/490907/campos_512_v4
+96/490910/campos_512_v4
+96/490912/campos_512_v4
+96/490927/campos_512_v4
+96/490934/campos_512_v4
+96/490936/campos_512_v4
+96/490947/campos_512_v4
+96/490954/campos_512_v4
+96/490955/campos_512_v4
+96/490957/campos_512_v4
+96/490962/campos_512_v4
+96/490963/campos_512_v4
+96/490975/campos_512_v4
+96/490978/campos_512_v4
+96/490996/campos_512_v4
+96/491004/campos_512_v4
+96/491006/campos_512_v4
+96/491013/campos_512_v4
+96/491017/campos_512_v4
+96/491033/campos_512_v4
+96/491048/campos_512_v4
+96/491078/campos_512_v4
+96/491088/campos_512_v4
+96/491091/campos_512_v4
+96/491099/campos_512_v4
+96/491107/campos_512_v4
+96/491115/campos_512_v4
+96/491126/campos_512_v4
+96/491137/campos_512_v4
+96/491146/campos_512_v4
+96/491152/campos_512_v4
+96/491159/campos_512_v4
+96/491188/campos_512_v4
+96/491189/campos_512_v4
+96/491200/campos_512_v4
+96/491205/campos_512_v4
+96/491209/campos_512_v4
+96/491220/campos_512_v4
+96/491225/campos_512_v4
+96/491229/campos_512_v4
+96/491242/campos_512_v4
+96/491261/campos_512_v4
+96/491267/campos_512_v4
+96/491273/campos_512_v4
+96/491278/campos_512_v4
+96/491283/campos_512_v4
+96/491314/campos_512_v4
+96/491325/campos_512_v4
+96/491338/campos_512_v4
+96/491347/campos_512_v4
+96/491356/campos_512_v4
+96/491360/campos_512_v4
+96/491362/campos_512_v4
+96/491369/campos_512_v4
+96/491381/campos_512_v4
+96/491383/campos_512_v4
+96/491389/campos_512_v4
+96/491391/campos_512_v4
+96/491395/campos_512_v4
+96/491397/campos_512_v4
+96/491405/campos_512_v4
+96/491413/campos_512_v4
+96/491422/campos_512_v4
+96/491423/campos_512_v4
+96/491426/campos_512_v4
+96/491437/campos_512_v4
+96/491442/campos_512_v4
+96/491447/campos_512_v4
+96/491458/campos_512_v4
+96/491460/campos_512_v4
+96/491479/campos_512_v4
+96/491489/campos_512_v4
+96/491507/campos_512_v4
+96/491518/campos_512_v4
+96/491520/campos_512_v4
+96/491531/campos_512_v4
+96/491536/campos_512_v4
+96/491543/campos_512_v4
+96/491547/campos_512_v4
+96/491559/campos_512_v4
+96/491562/campos_512_v4
+96/491569/campos_512_v4
+96/491572/campos_512_v4
+96/491582/campos_512_v4
+96/491585/campos_512_v4
+96/491596/campos_512_v4
+96/491601/campos_512_v4
+96/491604/campos_512_v4
+96/491606/campos_512_v4
+96/491617/campos_512_v4
+96/491638/campos_512_v4
+96/491651/campos_512_v4
+96/491653/campos_512_v4
+96/491669/campos_512_v4
+96/491672/campos_512_v4
+96/491676/campos_512_v4
+96/491678/campos_512_v4
+96/491680/campos_512_v4
+96/491681/campos_512_v4
+96/491684/campos_512_v4
+96/491696/campos_512_v4
+96/491697/campos_512_v4
+96/491701/campos_512_v4
+96/491702/campos_512_v4
+96/491712/campos_512_v4
+96/491713/campos_512_v4
+96/491727/campos_512_v4
+96/491729/campos_512_v4
+96/491735/campos_512_v4
+96/491741/campos_512_v4
+96/491742/campos_512_v4
+96/491756/campos_512_v4
+96/491768/campos_512_v4
+96/491779/campos_512_v4
+96/491782/campos_512_v4
+96/491795/campos_512_v4
+96/491809/campos_512_v4
+96/491820/campos_512_v4
+96/491827/campos_512_v4
+96/491834/campos_512_v4
+96/491840/campos_512_v4
+96/491845/campos_512_v4
+96/491847/campos_512_v4
+96/491849/campos_512_v4
+96/491855/campos_512_v4
+96/491869/campos_512_v4
+96/491871/campos_512_v4
+96/491874/campos_512_v4
+96/491878/campos_512_v4
+96/491881/campos_512_v4
+96/491886/campos_512_v4
+96/491891/campos_512_v4
+96/491900/campos_512_v4
+96/491904/campos_512_v4
+96/491905/campos_512_v4
+96/491909/campos_512_v4
+96/491913/campos_512_v4
+96/491918/campos_512_v4
+96/491933/campos_512_v4
+96/491939/campos_512_v4
+96/491961/campos_512_v4
+96/491962/campos_512_v4
+96/491963/campos_512_v4
+96/491964/campos_512_v4
+96/491967/campos_512_v4
+96/491973/campos_512_v4
+96/491981/campos_512_v4
+96/492006/campos_512_v4
+96/492010/campos_512_v4
+96/492014/campos_512_v4
+96/492019/campos_512_v4
+96/492020/campos_512_v4
+96/492037/campos_512_v4
+96/492052/campos_512_v4
+96/492061/campos_512_v4
+96/492063/campos_512_v4
+96/492076/campos_512_v4
+96/492080/campos_512_v4
+96/492082/campos_512_v4
+96/492084/campos_512_v4
+96/492102/campos_512_v4
+96/492106/campos_512_v4
+96/492110/campos_512_v4
+96/492117/campos_512_v4
+96/492122/campos_512_v4
+96/492129/campos_512_v4
+96/492135/campos_512_v4
+96/492138/campos_512_v4
+96/492139/campos_512_v4
+96/492149/campos_512_v4
+96/492154/campos_512_v4
+96/492157/campos_512_v4
+96/492161/campos_512_v4
+96/492164/campos_512_v4
+96/492168/campos_512_v4
+96/492178/campos_512_v4
+96/492179/campos_512_v4
+96/492184/campos_512_v4
+96/492209/campos_512_v4
+96/492222/campos_512_v4
+96/492223/campos_512_v4
+96/492230/campos_512_v4
+96/492233/campos_512_v4
+96/492235/campos_512_v4
+96/492238/campos_512_v4
+96/492241/campos_512_v4
+96/492243/campos_512_v4
+96/492252/campos_512_v4
+96/492265/campos_512_v4
+96/492269/campos_512_v4
+96/492276/campos_512_v4
+96/492288/campos_512_v4
+96/492289/campos_512_v4
+96/492291/campos_512_v4
+96/492299/campos_512_v4
+96/492301/campos_512_v4
+96/492303/campos_512_v4
+96/492306/campos_512_v4
+96/492316/campos_512_v4
+96/492319/campos_512_v4
+96/492320/campos_512_v4
+96/492330/campos_512_v4
+96/492331/campos_512_v4
+96/492334/campos_512_v4
+96/492337/campos_512_v4
+96/492338/campos_512_v4
+96/492343/campos_512_v4
+96/492350/campos_512_v4
+96/492356/campos_512_v4
+96/492359/campos_512_v4
+96/492360/campos_512_v4
+96/492387/campos_512_v4
+96/492389/campos_512_v4
+96/492394/campos_512_v4
+96/492408/campos_512_v4
+96/492410/campos_512_v4
+96/492416/campos_512_v4
+96/492425/campos_512_v4
+96/492430/campos_512_v4
+96/492444/campos_512_v4
+96/492445/campos_512_v4
+96/492448/campos_512_v4
+96/492453/campos_512_v4
+96/492468/campos_512_v4
+96/492470/campos_512_v4
+96/492472/campos_512_v4
+96/492480/campos_512_v4
+96/492486/campos_512_v4
+96/492487/campos_512_v4
+96/492491/campos_512_v4
+96/492493/campos_512_v4
+96/492495/campos_512_v4
+96/492496/campos_512_v4
+96/492499/campos_512_v4
+96/492505/campos_512_v4
+96/492517/campos_512_v4
+96/492546/campos_512_v4
+96/492568/campos_512_v4
+96/492569/campos_512_v4
+96/492572/campos_512_v4
+96/492598/campos_512_v4
+96/492604/campos_512_v4
+96/492641/campos_512_v4
+96/492644/campos_512_v4
+96/492651/campos_512_v4
+96/492657/campos_512_v4
+96/492669/campos_512_v4
+96/492681/campos_512_v4
+96/492683/campos_512_v4
+96/493005/campos_512_v4
+96/493009/campos_512_v4
+96/493026/campos_512_v4
+96/493045/campos_512_v4
+96/493050/campos_512_v4
+96/493053/campos_512_v4
+96/493064/campos_512_v4
+96/493068/campos_512_v4
+96/493090/campos_512_v4
+96/493091/campos_512_v4
+96/493096/campos_512_v4
+96/493108/campos_512_v4
+96/493117/campos_512_v4
+96/493118/campos_512_v4
+96/493132/campos_512_v4
+96/493137/campos_512_v4
+96/493139/campos_512_v4
+96/493148/campos_512_v4
+96/493149/campos_512_v4
+96/493152/campos_512_v4
+96/493153/campos_512_v4
+96/493166/campos_512_v4
+96/493170/campos_512_v4
+96/493194/campos_512_v4
+96/493198/campos_512_v4
+96/493199/campos_512_v4
+96/493210/campos_512_v4
+96/493216/campos_512_v4
+96/493218/campos_512_v4
+96/493235/campos_512_v4
+96/493238/campos_512_v4
+96/493239/campos_512_v4
+96/493240/campos_512_v4
+96/493252/campos_512_v4
+96/493288/campos_512_v4
+96/493292/campos_512_v4
+96/493293/campos_512_v4
+96/493302/campos_512_v4
+96/493328/campos_512_v4
+96/493331/campos_512_v4
+96/493339/campos_512_v4
+96/493346/campos_512_v4
+96/493353/campos_512_v4
+96/493354/campos_512_v4
+96/493356/campos_512_v4
+96/493365/campos_512_v4
+96/493370/campos_512_v4
+96/493377/campos_512_v4
+96/493389/campos_512_v4
+96/493397/campos_512_v4
+96/493406/campos_512_v4
+96/493411/campos_512_v4
+96/493413/campos_512_v4
+96/493421/campos_512_v4
+96/493424/campos_512_v4
+96/493454/campos_512_v4
+96/493457/campos_512_v4
+96/493459/campos_512_v4
+96/493464/campos_512_v4
+96/493480/campos_512_v4
+96/493496/campos_512_v4
+96/493500/campos_512_v4
+96/493504/campos_512_v4
+96/493505/campos_512_v4
+96/493510/campos_512_v4
+96/493511/campos_512_v4
+96/493517/campos_512_v4
+96/493524/campos_512_v4
+96/493532/campos_512_v4
+96/493533/campos_512_v4
+96/493534/campos_512_v4
+96/493547/campos_512_v4
+96/493557/campos_512_v4
+96/493560/campos_512_v4
+96/493563/campos_512_v4
+96/493566/campos_512_v4
+96/493580/campos_512_v4
+96/493584/campos_512_v4
+96/493626/campos_512_v4
+96/493632/campos_512_v4
+96/493637/campos_512_v4
+96/493645/campos_512_v4
+96/493648/campos_512_v4
+96/493650/campos_512_v4
+96/493652/campos_512_v4
+96/493694/campos_512_v4
+96/493700/campos_512_v4
+96/493718/campos_512_v4
+96/493729/campos_512_v4
+96/493737/campos_512_v4
+96/493740/campos_512_v4
+96/493745/campos_512_v4
+96/493775/campos_512_v4
+96/493777/campos_512_v4
+96/493778/campos_512_v4
+96/493793/campos_512_v4
+96/493797/campos_512_v4
+96/493820/campos_512_v4
+96/493829/campos_512_v4
+96/493839/campos_512_v4
+96/493852/campos_512_v4
+96/493856/campos_512_v4
+96/493858/campos_512_v4
+96/493872/campos_512_v4
+96/493876/campos_512_v4
+96/493889/campos_512_v4
+96/493894/campos_512_v4
+96/493902/campos_512_v4
+96/493915/campos_512_v4
+96/493948/campos_512_v4
+96/493952/campos_512_v4
+96/493953/campos_512_v4
+96/493954/campos_512_v4
+96/493958/campos_512_v4
+96/493962/campos_512_v4
+96/493967/campos_512_v4
+96/493978/campos_512_v4
+96/493979/campos_512_v4
+96/493987/campos_512_v4
+96/494004/campos_512_v4
+96/494008/campos_512_v4
+96/494018/campos_512_v4
+96/494027/campos_512_v4
+96/494030/campos_512_v4
+96/494040/campos_512_v4
+96/494044/campos_512_v4
+96/494045/campos_512_v4
+96/494051/campos_512_v4
+96/494058/campos_512_v4
+96/494072/campos_512_v4
+96/494073/campos_512_v4
+96/494077/campos_512_v4
+96/494083/campos_512_v4
+96/494085/campos_512_v4
+96/494089/campos_512_v4
+96/494093/campos_512_v4
+96/494105/campos_512_v4
+96/494111/campos_512_v4
+96/494138/campos_512_v4
+96/494140/campos_512_v4
+96/494152/campos_512_v4
+96/494156/campos_512_v4
+96/494157/campos_512_v4
+96/494172/campos_512_v4
+96/494174/campos_512_v4
+96/494188/campos_512_v4
+96/494192/campos_512_v4
+96/494200/campos_512_v4
+96/494204/campos_512_v4
+96/494206/campos_512_v4
+96/494207/campos_512_v4
+96/494214/campos_512_v4
+96/494220/campos_512_v4
+96/494223/campos_512_v4
+96/494240/campos_512_v4
+96/494242/campos_512_v4
+96/494243/campos_512_v4
+96/494245/campos_512_v4
+96/494248/campos_512_v4
+96/494250/campos_512_v4
+96/494267/campos_512_v4
+96/494270/campos_512_v4
+96/494271/campos_512_v4
+96/494278/campos_512_v4
+96/494284/campos_512_v4
+96/494288/campos_512_v4
+96/494290/campos_512_v4
+96/494300/campos_512_v4
+96/494304/campos_512_v4
+96/494312/campos_512_v4
+96/494315/campos_512_v4
+96/494327/campos_512_v4
+96/494337/campos_512_v4
+96/494342/campos_512_v4
+96/494355/campos_512_v4
+96/494361/campos_512_v4
+96/494362/campos_512_v4
+96/494363/campos_512_v4
+96/494368/campos_512_v4
+96/494383/campos_512_v4
+96/494387/campos_512_v4
+96/494398/campos_512_v4
+96/494399/campos_512_v4
+96/494408/campos_512_v4
+96/494413/campos_512_v4
+96/494414/campos_512_v4
+96/494418/campos_512_v4
+96/494422/campos_512_v4
+96/494423/campos_512_v4
+96/494427/campos_512_v4
+96/494458/campos_512_v4
+96/494460/campos_512_v4
+96/494470/campos_512_v4
+96/494473/campos_512_v4
+96/494475/campos_512_v4
+96/494479/campos_512_v4
+96/494490/campos_512_v4
+96/494500/campos_512_v4
+96/494510/campos_512_v4
+96/494518/campos_512_v4
+96/494520/campos_512_v4
+96/494521/campos_512_v4
+96/494523/campos_512_v4
+96/494525/campos_512_v4
+96/494542/campos_512_v4
+96/494554/campos_512_v4
+96/494567/campos_512_v4
+96/494606/campos_512_v4
+96/494611/campos_512_v4
+96/494612/campos_512_v4
+96/494613/campos_512_v4
+96/494637/campos_512_v4
+96/494646/campos_512_v4
+96/494649/campos_512_v4
+96/494653/campos_512_v4
+96/494657/campos_512_v4
+96/494658/campos_512_v4
+96/494660/campos_512_v4
+96/494664/campos_512_v4
+96/494666/campos_512_v4
+96/494668/campos_512_v4
+96/494675/campos_512_v4
+96/494678/campos_512_v4
+96/494681/campos_512_v4
+96/494682/campos_512_v4
+96/494694/campos_512_v4
+96/494698/campos_512_v4
+96/494705/campos_512_v4
+96/494707/campos_512_v4
+96/494719/campos_512_v4
+96/494723/campos_512_v4
+96/494730/campos_512_v4
+96/494731/campos_512_v4
+96/494737/campos_512_v4
+96/494751/campos_512_v4
+96/494767/campos_512_v4
+96/494768/campos_512_v4
+96/494773/campos_512_v4
+96/494800/campos_512_v4
+96/494802/campos_512_v4
+96/494826/campos_512_v4
+96/494839/campos_512_v4
+96/494840/campos_512_v4
+96/494844/campos_512_v4
+96/494847/campos_512_v4
+96/494850/campos_512_v4
+96/494855/campos_512_v4
+96/494883/campos_512_v4
+96/494894/campos_512_v4
+96/494906/campos_512_v4
+96/494908/campos_512_v4
+96/494913/campos_512_v4
+96/494917/campos_512_v4
+96/494920/campos_512_v4
+96/494935/campos_512_v4
+96/494945/campos_512_v4
+96/494946/campos_512_v4
+96/494949/campos_512_v4
+96/494953/campos_512_v4
+96/494963/campos_512_v4
+96/494978/campos_512_v4
+96/494987/campos_512_v4
+96/494991/campos_512_v4
+96/495001/campos_512_v4
+97/495008/campos_512_v4
+97/495019/campos_512_v4
+97/495024/campos_512_v4
+97/495028/campos_512_v4
+97/495041/campos_512_v4
+97/495046/campos_512_v4
+97/495048/campos_512_v4
+97/495049/campos_512_v4
+97/495056/campos_512_v4
+97/495066/campos_512_v4
+97/495068/campos_512_v4
+97/495069/campos_512_v4
+97/495074/campos_512_v4
+97/495083/campos_512_v4
+97/495096/campos_512_v4
+97/495105/campos_512_v4
+97/495112/campos_512_v4
+97/495113/campos_512_v4
+97/495119/campos_512_v4
+97/495120/campos_512_v4
+97/495127/campos_512_v4
+97/495130/campos_512_v4
+97/495137/campos_512_v4
+97/495139/campos_512_v4
+97/495143/campos_512_v4
+97/495177/campos_512_v4
+97/495190/campos_512_v4
+97/495193/campos_512_v4
+97/495197/campos_512_v4
+97/495203/campos_512_v4
+97/495213/campos_512_v4
+97/495223/campos_512_v4
+97/495229/campos_512_v4
+97/495230/campos_512_v4
+97/495233/campos_512_v4
+97/495242/campos_512_v4
+97/495248/campos_512_v4
+97/495256/campos_512_v4
+97/495264/campos_512_v4
+97/495266/campos_512_v4
+97/495283/campos_512_v4
+97/495293/campos_512_v4
+97/495296/campos_512_v4
+97/495299/campos_512_v4
+97/495304/campos_512_v4
+97/495309/campos_512_v4
+97/495315/campos_512_v4
+97/495324/campos_512_v4
+97/495325/campos_512_v4
+97/495329/campos_512_v4
+97/495336/campos_512_v4
+97/495346/campos_512_v4
+97/495353/campos_512_v4
+97/495368/campos_512_v4
+97/495369/campos_512_v4
+97/495383/campos_512_v4
+97/495387/campos_512_v4
+97/495389/campos_512_v4
+97/495397/campos_512_v4
+97/495400/campos_512_v4
+97/495409/campos_512_v4
+97/495412/campos_512_v4
+97/495420/campos_512_v4
+97/495435/campos_512_v4
+97/495445/campos_512_v4
+97/495461/campos_512_v4
+97/495479/campos_512_v4
+97/495482/campos_512_v4
+97/495493/campos_512_v4
+97/495497/campos_512_v4
+97/495500/campos_512_v4
+97/495510/campos_512_v4
+97/495516/campos_512_v4
+97/495518/campos_512_v4
+97/495522/campos_512_v4
+97/495524/campos_512_v4
+97/495529/campos_512_v4
+97/495547/campos_512_v4
+97/495554/campos_512_v4
+97/495557/campos_512_v4
+97/495564/campos_512_v4
+97/495567/campos_512_v4
+97/495585/campos_512_v4
+97/495590/campos_512_v4
+97/495596/campos_512_v4
+97/495602/campos_512_v4
+97/495606/campos_512_v4
+97/495622/campos_512_v4
+97/495623/campos_512_v4
+97/495631/campos_512_v4
+97/495634/campos_512_v4
+97/495637/campos_512_v4
+97/495643/campos_512_v4
+97/495646/campos_512_v4
+97/495648/campos_512_v4
+97/495655/campos_512_v4
+97/495664/campos_512_v4
+97/495665/campos_512_v4
+97/495687/campos_512_v4
+97/495715/campos_512_v4
+97/495718/campos_512_v4
+97/495751/campos_512_v4
+97/495767/campos_512_v4
+97/495772/campos_512_v4
+97/495784/campos_512_v4
+97/495795/campos_512_v4
+97/495800/campos_512_v4
+97/495804/campos_512_v4
+97/495819/campos_512_v4
+97/495821/campos_512_v4
+97/495822/campos_512_v4
+97/495824/campos_512_v4
+97/495830/campos_512_v4
+97/495833/campos_512_v4
+97/495835/campos_512_v4
+97/495839/campos_512_v4
+97/495843/campos_512_v4
+97/495844/campos_512_v4
+97/495846/campos_512_v4
+97/495862/campos_512_v4
+97/495865/campos_512_v4
+97/495868/campos_512_v4
+97/495896/campos_512_v4
+97/495898/campos_512_v4
+97/495908/campos_512_v4
+97/495909/campos_512_v4
+97/495914/campos_512_v4
+97/495918/campos_512_v4
+97/495930/campos_512_v4
+97/495935/campos_512_v4
+97/495937/campos_512_v4
+97/495946/campos_512_v4
+97/495958/campos_512_v4
+97/495962/campos_512_v4
+97/495971/campos_512_v4
+97/495973/campos_512_v4
+97/495979/campos_512_v4
+97/495985/campos_512_v4
+97/495991/campos_512_v4
+97/495992/campos_512_v4
+97/495997/campos_512_v4
+97/496003/campos_512_v4
+97/496009/campos_512_v4
+97/496011/campos_512_v4
+97/496013/campos_512_v4
+97/496021/campos_512_v4
+97/496046/campos_512_v4
+97/496047/campos_512_v4
+97/496048/campos_512_v4
+97/496049/campos_512_v4
+97/496083/campos_512_v4
+97/496089/campos_512_v4
+97/496090/campos_512_v4
+97/496096/campos_512_v4
+97/496101/campos_512_v4
+97/496107/campos_512_v4
+97/496111/campos_512_v4
+97/496112/campos_512_v4
+97/496113/campos_512_v4
+97/496115/campos_512_v4
+97/496116/campos_512_v4
+97/496117/campos_512_v4
+97/496125/campos_512_v4
+97/496130/campos_512_v4
+97/496148/campos_512_v4
+97/496159/campos_512_v4
+97/496170/campos_512_v4
+97/496171/campos_512_v4
+97/496174/campos_512_v4
+97/496192/campos_512_v4
+97/496194/campos_512_v4
+97/496221/campos_512_v4
+97/496225/campos_512_v4
+97/496238/campos_512_v4
+97/496245/campos_512_v4
+97/496253/campos_512_v4
+97/496254/campos_512_v4
+97/496260/campos_512_v4
+97/496263/campos_512_v4
+97/496283/campos_512_v4
+97/496292/campos_512_v4
+97/496294/campos_512_v4
+97/496295/campos_512_v4
+97/496302/campos_512_v4
+97/496304/campos_512_v4
+97/496312/campos_512_v4
+97/496323/campos_512_v4
+97/496326/campos_512_v4
+97/496337/campos_512_v4
+97/496342/campos_512_v4
+97/496376/campos_512_v4
+97/496397/campos_512_v4
+97/496405/campos_512_v4
+97/496410/campos_512_v4
+97/496411/campos_512_v4
+97/496423/campos_512_v4
+97/496425/campos_512_v4
+97/496430/campos_512_v4
+97/496440/campos_512_v4
+97/496445/campos_512_v4
+97/496448/campos_512_v4
+97/496453/campos_512_v4
+97/496455/campos_512_v4
+97/496468/campos_512_v4
+97/496475/campos_512_v4
+97/496481/campos_512_v4
+97/496489/campos_512_v4
+97/496491/campos_512_v4
+97/496510/campos_512_v4
+97/496513/campos_512_v4
+97/496515/campos_512_v4
+97/496521/campos_512_v4
+97/496527/campos_512_v4
+97/496531/campos_512_v4
+97/496533/campos_512_v4
+97/496538/campos_512_v4
+97/496541/campos_512_v4
+97/496542/campos_512_v4
+97/496563/campos_512_v4
+97/496568/campos_512_v4
+97/496577/campos_512_v4
+97/496584/campos_512_v4
+97/496590/campos_512_v4
+97/496596/campos_512_v4
+97/496617/campos_512_v4
+97/496627/campos_512_v4
+97/496636/campos_512_v4
+97/496643/campos_512_v4
+97/496648/campos_512_v4
+97/496663/campos_512_v4
+97/496664/campos_512_v4
+97/496671/campos_512_v4
+97/496672/campos_512_v4
+97/496685/campos_512_v4
+97/496688/campos_512_v4
+97/496691/campos_512_v4
+97/496692/campos_512_v4
+97/496694/campos_512_v4
+97/496700/campos_512_v4
+97/496702/campos_512_v4
+97/496704/campos_512_v4
+97/496707/campos_512_v4
+97/496723/campos_512_v4
+97/496724/campos_512_v4
+97/496725/campos_512_v4
+97/496729/campos_512_v4
+97/496744/campos_512_v4
+97/496760/campos_512_v4
+97/496765/campos_512_v4
+97/496772/campos_512_v4
+97/496773/campos_512_v4
+97/496777/campos_512_v4
+97/496784/campos_512_v4
+97/496790/campos_512_v4
+97/496792/campos_512_v4
+97/496803/campos_512_v4
+97/496824/campos_512_v4
+97/496838/campos_512_v4
+97/496850/campos_512_v4
+97/496855/campos_512_v4
+97/496858/campos_512_v4
+97/496865/campos_512_v4
+97/496866/campos_512_v4
+97/496873/campos_512_v4
+97/496918/campos_512_v4
+97/496925/campos_512_v4
+97/496932/campos_512_v4
+97/496933/campos_512_v4
+97/496940/campos_512_v4
+97/496944/campos_512_v4
+97/496945/campos_512_v4
+97/496954/campos_512_v4
+97/496960/campos_512_v4
+97/496965/campos_512_v4
+97/496973/campos_512_v4
+97/496974/campos_512_v4
+97/496975/campos_512_v4
+97/496982/campos_512_v4
+97/496983/campos_512_v4
+97/496997/campos_512_v4
+97/496999/campos_512_v4
+97/497014/campos_512_v4
+97/497018/campos_512_v4
+97/497021/campos_512_v4
+97/497022/campos_512_v4
+97/497044/campos_512_v4
+97/497046/campos_512_v4
+97/497048/campos_512_v4
+97/497054/campos_512_v4
+97/497060/campos_512_v4
+97/497078/campos_512_v4
+97/497083/campos_512_v4
+97/497087/campos_512_v4
+97/497115/campos_512_v4
+97/497116/campos_512_v4
+97/497122/campos_512_v4
+97/497130/campos_512_v4
+97/497133/campos_512_v4
+97/497136/campos_512_v4
+97/497143/campos_512_v4
+97/497145/campos_512_v4
+97/497146/campos_512_v4
+97/497151/campos_512_v4
+97/497157/campos_512_v4
+97/497162/campos_512_v4
+97/497170/campos_512_v4
+97/497173/campos_512_v4
+97/497182/campos_512_v4
+97/497190/campos_512_v4
+97/497195/campos_512_v4
+97/497205/campos_512_v4
+97/497210/campos_512_v4
+97/497215/campos_512_v4
+97/497229/campos_512_v4
+97/497240/campos_512_v4
+97/497260/campos_512_v4
+97/497270/campos_512_v4
+97/497294/campos_512_v4
+97/497310/campos_512_v4
+97/497326/campos_512_v4
+97/497339/campos_512_v4
+97/497340/campos_512_v4
+97/497345/campos_512_v4
+97/497350/campos_512_v4
+97/497353/campos_512_v4
+97/497369/campos_512_v4
+97/497373/campos_512_v4
+97/497385/campos_512_v4
+97/497386/campos_512_v4
+97/497390/campos_512_v4
+97/497391/campos_512_v4
+97/497396/campos_512_v4
+97/497402/campos_512_v4
+97/497419/campos_512_v4
+97/497420/campos_512_v4
+97/497427/campos_512_v4
+97/497452/campos_512_v4
+97/497459/campos_512_v4
+97/497474/campos_512_v4
+97/497475/campos_512_v4
+97/497480/campos_512_v4
+97/497496/campos_512_v4
+97/497497/campos_512_v4
+97/497500/campos_512_v4
+97/497509/campos_512_v4
+97/497510/campos_512_v4
+97/497514/campos_512_v4
+97/497519/campos_512_v4
+97/497529/campos_512_v4
+97/497532/campos_512_v4
+97/497540/campos_512_v4
+97/497548/campos_512_v4
+97/497573/campos_512_v4
+97/497586/campos_512_v4
+97/497588/campos_512_v4
+97/497603/campos_512_v4
+97/497604/campos_512_v4
+97/497609/campos_512_v4
+97/497610/campos_512_v4
+97/497629/campos_512_v4
+97/497639/campos_512_v4
+97/497641/campos_512_v4
+97/497650/campos_512_v4
+97/497652/campos_512_v4
+97/497655/campos_512_v4
+97/497661/campos_512_v4
+97/497683/campos_512_v4
+97/497699/campos_512_v4
+97/497709/campos_512_v4
+97/497733/campos_512_v4
+97/497740/campos_512_v4
+97/497743/campos_512_v4
+97/497745/campos_512_v4
+97/497770/campos_512_v4
+97/497792/campos_512_v4
+97/497800/campos_512_v4
+97/497805/campos_512_v4
+97/497811/campos_512_v4
+97/497822/campos_512_v4
+97/497833/campos_512_v4
+97/497836/campos_512_v4
+97/497838/campos_512_v4
+97/497852/campos_512_v4
+97/497855/campos_512_v4
+97/497856/campos_512_v4
+97/497858/campos_512_v4
+97/497865/campos_512_v4
+97/497871/campos_512_v4
+97/497875/campos_512_v4
+97/497886/campos_512_v4
+97/497891/campos_512_v4
+97/497895/campos_512_v4
+97/497900/campos_512_v4
+97/497902/campos_512_v4
+97/497922/campos_512_v4
+97/497923/campos_512_v4
+97/497931/campos_512_v4
+97/497945/campos_512_v4
+97/497953/campos_512_v4
+97/497955/campos_512_v4
+97/497958/campos_512_v4
+97/497972/campos_512_v4
+97/497978/campos_512_v4
+97/497984/campos_512_v4
+97/497995/campos_512_v4
+97/498001/campos_512_v4
+97/498002/campos_512_v4
+97/498003/campos_512_v4
+97/498012/campos_512_v4
+97/498023/campos_512_v4
+97/498029/campos_512_v4
+97/498037/campos_512_v4
+97/498041/campos_512_v4
+97/498045/campos_512_v4
+97/498046/campos_512_v4
+97/498050/campos_512_v4
+97/498053/campos_512_v4
+97/498057/campos_512_v4
+97/498064/campos_512_v4
+97/498074/campos_512_v4
+97/498081/campos_512_v4
+97/498083/campos_512_v4
+97/498091/campos_512_v4
+97/498093/campos_512_v4
+97/498103/campos_512_v4
+97/498121/campos_512_v4
+97/498130/campos_512_v4
+97/498133/campos_512_v4
+97/498134/campos_512_v4
+97/498147/campos_512_v4
+97/498149/campos_512_v4
+97/498151/campos_512_v4
+97/498168/campos_512_v4
+97/498178/campos_512_v4
+97/498194/campos_512_v4
+97/498202/campos_512_v4
+97/498208/campos_512_v4
+97/498209/campos_512_v4
+97/498230/campos_512_v4
+97/498240/campos_512_v4
+97/498245/campos_512_v4
+97/498281/campos_512_v4
+97/498290/campos_512_v4
+97/498295/campos_512_v4
+97/498305/campos_512_v4
+97/498306/campos_512_v4
+97/498310/campos_512_v4
+97/498321/campos_512_v4
+97/498323/campos_512_v4
+97/498332/campos_512_v4
+97/498349/campos_512_v4
+97/498352/campos_512_v4
+97/498355/campos_512_v4
+97/498364/campos_512_v4
+97/498376/campos_512_v4
+97/498377/campos_512_v4
+97/498382/campos_512_v4
+97/498383/campos_512_v4
+97/498394/campos_512_v4
+97/498413/campos_512_v4
+97/498427/campos_512_v4
+97/498430/campos_512_v4
+97/498449/campos_512_v4
+97/498452/campos_512_v4
+97/498453/campos_512_v4
+97/498454/campos_512_v4
+97/498460/campos_512_v4
+97/498470/campos_512_v4
+97/498471/campos_512_v4
+97/498476/campos_512_v4
+97/498479/campos_512_v4
+97/498482/campos_512_v4
+97/498484/campos_512_v4
+97/498492/campos_512_v4
+97/498507/campos_512_v4
+97/498515/campos_512_v4
+97/498518/campos_512_v4
+97/498563/campos_512_v4
+97/498564/campos_512_v4
+97/498571/campos_512_v4
+97/498574/campos_512_v4
+97/498578/campos_512_v4
+97/498580/campos_512_v4
+97/498582/campos_512_v4
+97/498594/campos_512_v4
+97/498600/campos_512_v4
+97/498601/campos_512_v4
+97/498603/campos_512_v4
+97/498607/campos_512_v4
+97/498611/campos_512_v4
+97/498612/campos_512_v4
+97/498624/campos_512_v4
+97/498627/campos_512_v4
+97/498629/campos_512_v4
+97/498632/campos_512_v4
+97/498635/campos_512_v4
+97/498638/campos_512_v4
+97/498645/campos_512_v4
+97/498653/campos_512_v4
+97/498666/campos_512_v4
+97/498668/campos_512_v4
+97/498670/campos_512_v4
+97/498691/campos_512_v4
+97/498695/campos_512_v4
+97/498697/campos_512_v4
+97/498698/campos_512_v4
+97/498701/campos_512_v4
+97/498703/campos_512_v4
+97/498706/campos_512_v4
+97/498715/campos_512_v4
+97/498738/campos_512_v4
+97/498741/campos_512_v4
+97/498746/campos_512_v4
+97/498747/campos_512_v4
+97/498756/campos_512_v4
+97/498758/campos_512_v4
+97/498777/campos_512_v4
+97/498782/campos_512_v4
+97/498785/campos_512_v4
+97/498792/campos_512_v4
+97/498802/campos_512_v4
+97/498808/campos_512_v4
+97/498812/campos_512_v4
+97/498818/campos_512_v4
+97/498820/campos_512_v4
+97/498832/campos_512_v4
+97/498834/campos_512_v4
+97/498858/campos_512_v4
+97/498870/campos_512_v4
+97/498886/campos_512_v4
+97/498904/campos_512_v4
+97/498916/campos_512_v4
+97/498925/campos_512_v4
+97/498926/campos_512_v4
+97/498967/campos_512_v4
+97/498983/campos_512_v4
+97/498998/campos_512_v4
+97/499003/campos_512_v4
+97/499005/campos_512_v4
+97/499021/campos_512_v4
+97/499031/campos_512_v4
+97/499033/campos_512_v4
+97/499034/campos_512_v4
+97/499039/campos_512_v4
+97/499043/campos_512_v4
+97/499044/campos_512_v4
+97/499045/campos_512_v4
+97/499048/campos_512_v4
+97/499067/campos_512_v4
+97/499068/campos_512_v4
+97/499075/campos_512_v4
+97/499076/campos_512_v4
+97/499085/campos_512_v4
+97/499087/campos_512_v4
+97/499102/campos_512_v4
+97/499113/campos_512_v4
+97/499127/campos_512_v4
+97/499129/campos_512_v4
+97/499153/campos_512_v4
+97/499155/campos_512_v4
+97/499172/campos_512_v4
+97/499179/campos_512_v4
+97/499180/campos_512_v4
+97/499190/campos_512_v4
+97/499196/campos_512_v4
+97/499199/campos_512_v4
+97/499203/campos_512_v4
+97/499209/campos_512_v4
+97/499212/campos_512_v4
+97/499220/campos_512_v4
+97/499225/campos_512_v4
+97/499226/campos_512_v4
+97/499232/campos_512_v4
+97/499240/campos_512_v4
+97/499241/campos_512_v4
+97/499262/campos_512_v4
+97/499268/campos_512_v4
+97/499271/campos_512_v4
+97/499279/campos_512_v4
+97/499299/campos_512_v4
+97/499306/campos_512_v4
+97/499313/campos_512_v4
+97/499316/campos_512_v4
+97/499336/campos_512_v4
+97/499340/campos_512_v4
+97/499345/campos_512_v4
+97/499346/campos_512_v4
+97/499348/campos_512_v4
+97/499350/campos_512_v4
+97/499351/campos_512_v4
+97/499376/campos_512_v4
+97/499384/campos_512_v4
+97/499394/campos_512_v4
+97/499395/campos_512_v4
+97/499398/campos_512_v4
+97/499406/campos_512_v4
+97/499420/campos_512_v4
+97/499424/campos_512_v4
+97/499432/campos_512_v4
+97/499437/campos_512_v4
+97/499440/campos_512_v4
+97/499447/campos_512_v4
+97/499452/campos_512_v4
+97/499456/campos_512_v4
+97/499458/campos_512_v4
+97/499472/campos_512_v4
+97/499486/campos_512_v4
+97/499491/campos_512_v4
+97/499505/campos_512_v4
+97/499523/campos_512_v4
+97/499524/campos_512_v4
+97/499535/campos_512_v4
+97/499545/campos_512_v4
+97/499546/campos_512_v4
+97/499557/campos_512_v4
+97/499562/campos_512_v4
+97/499565/campos_512_v4
+97/499579/campos_512_v4
+97/499581/campos_512_v4
+97/499586/campos_512_v4
+97/499587/campos_512_v4
+97/499588/campos_512_v4
+97/499589/campos_512_v4
+97/499593/campos_512_v4
+97/499594/campos_512_v4
+97/499595/campos_512_v4
+97/499604/campos_512_v4
+97/499605/campos_512_v4
+97/499613/campos_512_v4
+97/499625/campos_512_v4
+97/499633/campos_512_v4
+97/499634/campos_512_v4
+97/499644/campos_512_v4
+97/499652/campos_512_v4
+97/499659/campos_512_v4
+97/499660/campos_512_v4
+97/499665/campos_512_v4
+97/499672/campos_512_v4
+97/499673/campos_512_v4
+97/499685/campos_512_v4
+97/499689/campos_512_v4
+97/499692/campos_512_v4
+97/499696/campos_512_v4
+97/499699/campos_512_v4
+97/499701/campos_512_v4
+97/499710/campos_512_v4
+97/499719/campos_512_v4
+97/499728/campos_512_v4
+97/499732/campos_512_v4
+97/499737/campos_512_v4
+97/499758/campos_512_v4
+97/499772/campos_512_v4
+97/499782/campos_512_v4
+97/499795/campos_512_v4
+97/499805/campos_512_v4
+97/499809/campos_512_v4
+97/499813/campos_512_v4
+97/499818/campos_512_v4
+97/499834/campos_512_v4
+97/499835/campos_512_v4
+97/499837/campos_512_v4
+97/499838/campos_512_v4
+97/499856/campos_512_v4
+97/499859/campos_512_v4
+97/499865/campos_512_v4
+97/499870/campos_512_v4
+97/499871/campos_512_v4
+97/499878/campos_512_v4
+97/499887/campos_512_v4
+97/499894/campos_512_v4
+97/499910/campos_512_v4
+97/499936/campos_512_v4
+97/499958/campos_512_v4
+97/499963/campos_512_v4
+97/499967/campos_512_v4
+97/499973/campos_512_v4
+97/499974/campos_512_v4
+97/499977/campos_512_v4
+97/499990/campos_512_v4
+97/499998/campos_512_v4
+97/500001/campos_512_v4
+98/500004/campos_512_v4
+98/500009/campos_512_v4
+98/500022/campos_512_v4
+98/500040/campos_512_v4
+98/500042/campos_512_v4
+98/500050/campos_512_v4
+98/500051/campos_512_v4
+98/500052/campos_512_v4
+98/500059/campos_512_v4
+98/500063/campos_512_v4
+98/500071/campos_512_v4
+98/500077/campos_512_v4
+98/500090/campos_512_v4
+98/500105/campos_512_v4
+98/500119/campos_512_v4
+98/500121/campos_512_v4
+98/500125/campos_512_v4
+98/500130/campos_512_v4
+98/500134/campos_512_v4
+98/500144/campos_512_v4
+98/500160/campos_512_v4
+98/500163/campos_512_v4
+98/500167/campos_512_v4
+98/500176/campos_512_v4
+98/500180/campos_512_v4
+98/500186/campos_512_v4
+98/500190/campos_512_v4
+98/500209/campos_512_v4
+98/500215/campos_512_v4
+98/500231/campos_512_v4
+98/500234/campos_512_v4
+98/500241/campos_512_v4
+98/500244/campos_512_v4
+98/500245/campos_512_v4
+98/500246/campos_512_v4
+98/500256/campos_512_v4
+98/500280/campos_512_v4
+98/500281/campos_512_v4
+98/500283/campos_512_v4
+98/500291/campos_512_v4
+98/500293/campos_512_v4
+98/500296/campos_512_v4
+98/500298/campos_512_v4
+98/500302/campos_512_v4
+98/500305/campos_512_v4
+98/500319/campos_512_v4
+98/500332/campos_512_v4
+98/500333/campos_512_v4
+98/500336/campos_512_v4
+98/500343/campos_512_v4
+98/500345/campos_512_v4
+98/500350/campos_512_v4
+98/500355/campos_512_v4
+98/500357/campos_512_v4
+98/500382/campos_512_v4
+98/500388/campos_512_v4
+98/500392/campos_512_v4
+98/500407/campos_512_v4
+98/500419/campos_512_v4
+98/500421/campos_512_v4
+98/500430/campos_512_v4
+98/500433/campos_512_v4
+98/500435/campos_512_v4
+98/500442/campos_512_v4
+98/500465/campos_512_v4
+98/500470/campos_512_v4
+98/500483/campos_512_v4
+98/500484/campos_512_v4
+98/500485/campos_512_v4
+98/500493/campos_512_v4
+98/500501/campos_512_v4
+98/500504/campos_512_v4
+98/500505/campos_512_v4
+98/500506/campos_512_v4
+98/500513/campos_512_v4
+98/500532/campos_512_v4
+98/500539/campos_512_v4
+98/500548/campos_512_v4
+98/500549/campos_512_v4
+98/500555/campos_512_v4
+98/500557/campos_512_v4
+98/500566/campos_512_v4
+98/500572/campos_512_v4
+98/500578/campos_512_v4
+98/500583/campos_512_v4
+98/500584/campos_512_v4
+98/500585/campos_512_v4
+98/500586/campos_512_v4
+98/500590/campos_512_v4
+98/500591/campos_512_v4
+98/500592/campos_512_v4
+98/500593/campos_512_v4
+98/500597/campos_512_v4
+98/500606/campos_512_v4
+98/500615/campos_512_v4
+98/500618/campos_512_v4
+98/500623/campos_512_v4
+98/500626/campos_512_v4
+98/500632/campos_512_v4
+98/500640/campos_512_v4
+98/500651/campos_512_v4
+98/500672/campos_512_v4
+98/500677/campos_512_v4
+98/500683/campos_512_v4
+98/500686/campos_512_v4
+98/500687/campos_512_v4
+98/500691/campos_512_v4
+98/500692/campos_512_v4
+98/500697/campos_512_v4
+98/500703/campos_512_v4
+98/500704/campos_512_v4
+98/500707/campos_512_v4
+98/500711/campos_512_v4
+98/500712/campos_512_v4
+98/500722/campos_512_v4
+98/500727/campos_512_v4
+98/500730/campos_512_v4
+98/500746/campos_512_v4
+98/500747/campos_512_v4
+98/500750/campos_512_v4
+98/500752/campos_512_v4
+98/500757/campos_512_v4
+98/500760/campos_512_v4
+98/500768/campos_512_v4
+98/500786/campos_512_v4
+98/500801/campos_512_v4
+98/500809/campos_512_v4
+98/500822/campos_512_v4
+98/500858/campos_512_v4
+98/500864/campos_512_v4
+98/500865/campos_512_v4
+98/500876/campos_512_v4
+98/500898/campos_512_v4
+98/500905/campos_512_v4
+98/500912/campos_512_v4
+98/500918/campos_512_v4
+98/500932/campos_512_v4
+98/500933/campos_512_v4
+98/500940/campos_512_v4
+98/500952/campos_512_v4
+98/500954/campos_512_v4
+98/500971/campos_512_v4
+98/500983/campos_512_v4
+98/500990/campos_512_v4
+98/501001/campos_512_v4
+98/501012/campos_512_v4
+98/501066/campos_512_v4
+98/501078/campos_512_v4
+98/501080/campos_512_v4
+98/501088/campos_512_v4
+98/501093/campos_512_v4
+98/501094/campos_512_v4
+98/501097/campos_512_v4
+98/501098/campos_512_v4
+98/501118/campos_512_v4
+98/501136/campos_512_v4
+98/501142/campos_512_v4
+98/501143/campos_512_v4
+98/501149/campos_512_v4
+98/501162/campos_512_v4
+98/501166/campos_512_v4
+98/501173/campos_512_v4
+98/501174/campos_512_v4
+98/501176/campos_512_v4
+98/501177/campos_512_v4
+98/501202/campos_512_v4
+98/501203/campos_512_v4
+98/501204/campos_512_v4
+98/501207/campos_512_v4
+98/501211/campos_512_v4
+98/501226/campos_512_v4
+98/501230/campos_512_v4
+98/501234/campos_512_v4
+98/501235/campos_512_v4
+98/501250/campos_512_v4
+98/501252/campos_512_v4
+98/501255/campos_512_v4
+98/501256/campos_512_v4
+98/501272/campos_512_v4
+98/501277/campos_512_v4
+98/501289/campos_512_v4
+98/501299/campos_512_v4
+98/501305/campos_512_v4
+98/501306/campos_512_v4
+98/501307/campos_512_v4
+98/501309/campos_512_v4
+98/501331/campos_512_v4
+98/501340/campos_512_v4
+98/501341/campos_512_v4
+98/501342/campos_512_v4
+98/501363/campos_512_v4
+98/501384/campos_512_v4
+98/501386/campos_512_v4
+98/501387/campos_512_v4
+98/501391/campos_512_v4
+98/501405/campos_512_v4
+98/501409/campos_512_v4
+98/501411/campos_512_v4
+98/501421/campos_512_v4
+98/501437/campos_512_v4
+98/501453/campos_512_v4
+98/501457/campos_512_v4
+98/501460/campos_512_v4
+98/501461/campos_512_v4
+98/501462/campos_512_v4
+98/501481/campos_512_v4
+98/501486/campos_512_v4
+98/501493/campos_512_v4
+98/501495/campos_512_v4
+98/501506/campos_512_v4
+98/501507/campos_512_v4
+98/501517/campos_512_v4
+98/501519/campos_512_v4
+98/501520/campos_512_v4
+98/501529/campos_512_v4
+98/501536/campos_512_v4
+98/501543/campos_512_v4
+98/501554/campos_512_v4
+98/501562/campos_512_v4
+98/501566/campos_512_v4
+98/501571/campos_512_v4
+98/501573/campos_512_v4
+98/501574/campos_512_v4
+98/501575/campos_512_v4
+98/501578/campos_512_v4
+98/501589/campos_512_v4
+98/501597/campos_512_v4
+98/501605/campos_512_v4
+98/501608/campos_512_v4
+98/501616/campos_512_v4
+98/501617/campos_512_v4
+98/501660/campos_512_v4
+98/501666/campos_512_v4
+98/501673/campos_512_v4
+98/501674/campos_512_v4
+98/501680/campos_512_v4
+98/501681/campos_512_v4
+98/501682/campos_512_v4
+98/501684/campos_512_v4
+98/501694/campos_512_v4
+98/501695/campos_512_v4
+98/501696/campos_512_v4
+98/501712/campos_512_v4
+98/501723/campos_512_v4
+98/501724/campos_512_v4
+98/501736/campos_512_v4
+98/501740/campos_512_v4
+98/501743/campos_512_v4
+98/501751/campos_512_v4
+98/501764/campos_512_v4
+98/501766/campos_512_v4
+98/501767/campos_512_v4
+98/501768/campos_512_v4
+98/501774/campos_512_v4
+98/501782/campos_512_v4
+98/501787/campos_512_v4
+98/501789/campos_512_v4
+98/501792/campos_512_v4
+98/501793/campos_512_v4
+98/501809/campos_512_v4
+98/501812/campos_512_v4
+98/501825/campos_512_v4
+98/501827/campos_512_v4
+98/501831/campos_512_v4
+98/501836/campos_512_v4
+98/501849/campos_512_v4
+98/501858/campos_512_v4
+98/501864/campos_512_v4
+98/501880/campos_512_v4
+98/501884/campos_512_v4
+98/501908/campos_512_v4
+98/501917/campos_512_v4
+98/501921/campos_512_v4
+98/501923/campos_512_v4
+98/501946/campos_512_v4
+98/501950/campos_512_v4
+98/501966/campos_512_v4
+98/501971/campos_512_v4
+98/501979/campos_512_v4
+98/501987/campos_512_v4
+98/501994/campos_512_v4
+98/502002/campos_512_v4
+98/502019/campos_512_v4
+98/502027/campos_512_v4
+98/502031/campos_512_v4
+98/502034/campos_512_v4
+98/502035/campos_512_v4
+98/502037/campos_512_v4
+98/502045/campos_512_v4
+98/502047/campos_512_v4
+98/502076/campos_512_v4
+98/502077/campos_512_v4
+98/502079/campos_512_v4
+98/502082/campos_512_v4
+98/502090/campos_512_v4
+98/502115/campos_512_v4
+98/502119/campos_512_v4
+98/502124/campos_512_v4
+98/502138/campos_512_v4
+98/502145/campos_512_v4
+98/502146/campos_512_v4
+98/502147/campos_512_v4
+98/502148/campos_512_v4
+98/502151/campos_512_v4
+98/502152/campos_512_v4
+98/502222/campos_512_v4
+98/502231/campos_512_v4
+98/502234/campos_512_v4
+98/502250/campos_512_v4
+98/502256/campos_512_v4
+98/502260/campos_512_v4
+98/502261/campos_512_v4
+98/502264/campos_512_v4
+98/502266/campos_512_v4
+98/502297/campos_512_v4
+98/502298/campos_512_v4
+98/502309/campos_512_v4
+98/502310/campos_512_v4
+98/502316/campos_512_v4
+98/502322/campos_512_v4
+98/502353/campos_512_v4
+98/502363/campos_512_v4
+98/502372/campos_512_v4
+98/502378/campos_512_v4
+98/502381/campos_512_v4
+98/502383/campos_512_v4
+98/502406/campos_512_v4
+98/502409/campos_512_v4
+98/502410/campos_512_v4
+98/502423/campos_512_v4
+98/502432/campos_512_v4
+98/502440/campos_512_v4
+98/502443/campos_512_v4
+98/502448/campos_512_v4
+98/502452/campos_512_v4
+98/502454/campos_512_v4
+98/502456/campos_512_v4
+98/502458/campos_512_v4
+98/502472/campos_512_v4
+98/502474/campos_512_v4
+98/502475/campos_512_v4
+98/502476/campos_512_v4
+98/502479/campos_512_v4
+98/502480/campos_512_v4
+98/502484/campos_512_v4
+98/502486/campos_512_v4
+98/502491/campos_512_v4
+98/502497/campos_512_v4
+98/502503/campos_512_v4
+98/502506/campos_512_v4
+98/502513/campos_512_v4
+98/502515/campos_512_v4
+98/502517/campos_512_v4
+98/502521/campos_512_v4
+98/502530/campos_512_v4
+98/502534/campos_512_v4
+98/502536/campos_512_v4
+98/502552/campos_512_v4
+98/502564/campos_512_v4
+98/502578/campos_512_v4
+98/502584/campos_512_v4
+98/502589/campos_512_v4
+98/502596/campos_512_v4
+98/502600/campos_512_v4
+98/502607/campos_512_v4
+98/502665/campos_512_v4
+98/502672/campos_512_v4
+98/502681/campos_512_v4
+98/502684/campos_512_v4
+98/502687/campos_512_v4
+98/502688/campos_512_v4
+98/502705/campos_512_v4
+98/502716/campos_512_v4
+98/502722/campos_512_v4
+98/502723/campos_512_v4
+98/502725/campos_512_v4
+98/502726/campos_512_v4
+98/502731/campos_512_v4
+98/502734/campos_512_v4
+98/502760/campos_512_v4
+98/502786/campos_512_v4
+98/502801/campos_512_v4
+98/502820/campos_512_v4
+98/502831/campos_512_v4
+98/502835/campos_512_v4
+98/502851/campos_512_v4
+98/502860/campos_512_v4
+98/502863/campos_512_v4
+98/502874/campos_512_v4
+98/502876/campos_512_v4
+98/502879/campos_512_v4
+98/502894/campos_512_v4
+98/502901/campos_512_v4
+98/502916/campos_512_v4
+98/502919/campos_512_v4
+98/502938/campos_512_v4
+98/502944/campos_512_v4
+98/502951/campos_512_v4
+98/502961/campos_512_v4
+98/502968/campos_512_v4
+98/502971/campos_512_v4
+98/503002/campos_512_v4
+98/503011/campos_512_v4
+98/503018/campos_512_v4
+98/503019/campos_512_v4
+98/503026/campos_512_v4
+98/503029/campos_512_v4
+98/503033/campos_512_v4
+98/503036/campos_512_v4
+98/503051/campos_512_v4
+98/503066/campos_512_v4
+98/503068/campos_512_v4
+98/503087/campos_512_v4
+98/503089/campos_512_v4
+98/503099/campos_512_v4
+98/503106/campos_512_v4
+98/503109/campos_512_v4
+98/503111/campos_512_v4
+98/503122/campos_512_v4
+98/503123/campos_512_v4
+98/503153/campos_512_v4
+98/503155/campos_512_v4
+98/503157/campos_512_v4
+98/503187/campos_512_v4
+98/503195/campos_512_v4
+98/503197/campos_512_v4
+98/503199/campos_512_v4
+98/503206/campos_512_v4
+98/503207/campos_512_v4
+98/503211/campos_512_v4
+98/503217/campos_512_v4
+98/503229/campos_512_v4
+98/503237/campos_512_v4
+98/503247/campos_512_v4
+98/503253/campos_512_v4
+98/503277/campos_512_v4
+98/503289/campos_512_v4
+98/503291/campos_512_v4
+98/503293/campos_512_v4
+98/503301/campos_512_v4
+98/503302/campos_512_v4
+98/503303/campos_512_v4
+98/503304/campos_512_v4
+98/503309/campos_512_v4
+98/503313/campos_512_v4
+98/503326/campos_512_v4
+98/503327/campos_512_v4
+98/503329/campos_512_v4
+98/503333/campos_512_v4
+98/503336/campos_512_v4
+98/503339/campos_512_v4
+98/503348/campos_512_v4
+98/503349/campos_512_v4
+98/503354/campos_512_v4
+98/503357/campos_512_v4
+98/503368/campos_512_v4
+98/503377/campos_512_v4
+98/503389/campos_512_v4
+98/503393/campos_512_v4
+98/503401/campos_512_v4
+98/503405/campos_512_v4
+98/503410/campos_512_v4
+98/503411/campos_512_v4
+98/503413/campos_512_v4
+98/503415/campos_512_v4
+98/503419/campos_512_v4
+98/503421/campos_512_v4
+98/503429/campos_512_v4
+98/503431/campos_512_v4
+98/503438/campos_512_v4
+98/503464/campos_512_v4
+98/503480/campos_512_v4
+98/503481/campos_512_v4
+98/503493/campos_512_v4
+98/503504/campos_512_v4
+98/503505/campos_512_v4
+98/503509/campos_512_v4
+98/503513/campos_512_v4
+98/503514/campos_512_v4
+98/503517/campos_512_v4
+98/503533/campos_512_v4
+98/503548/campos_512_v4
+98/503558/campos_512_v4
+98/503560/campos_512_v4
+98/503576/campos_512_v4
+98/503582/campos_512_v4
+98/503585/campos_512_v4
+98/503587/campos_512_v4
+98/503591/campos_512_v4
+98/503597/campos_512_v4
+98/503608/campos_512_v4
+98/503621/campos_512_v4
+98/503625/campos_512_v4
+98/503635/campos_512_v4
+98/503642/campos_512_v4
+98/503643/campos_512_v4
+98/503646/campos_512_v4
+98/503650/campos_512_v4
+98/503667/campos_512_v4
+98/503672/campos_512_v4
+98/503673/campos_512_v4
+98/503674/campos_512_v4
+98/503675/campos_512_v4
+98/503703/campos_512_v4
+98/503707/campos_512_v4
+98/503708/campos_512_v4
+98/503714/campos_512_v4
+98/503716/campos_512_v4
+98/503726/campos_512_v4
+98/503738/campos_512_v4
+98/503750/campos_512_v4
+98/503758/campos_512_v4
+98/503770/campos_512_v4
+98/503789/campos_512_v4
+98/503824/campos_512_v4
+98/503840/campos_512_v4
+98/503841/campos_512_v4
+98/503845/campos_512_v4
+98/503848/campos_512_v4
+98/503854/campos_512_v4
+98/503863/campos_512_v4
+98/503864/campos_512_v4
+98/503868/campos_512_v4
+98/503897/campos_512_v4
+98/503898/campos_512_v4
+98/503901/campos_512_v4
+98/503907/campos_512_v4
+98/503910/campos_512_v4
+98/503917/campos_512_v4
+98/503929/campos_512_v4
+98/503935/campos_512_v4
+98/503944/campos_512_v4
+98/503953/campos_512_v4
+98/503956/campos_512_v4
+98/503978/campos_512_v4
+98/503979/campos_512_v4
+98/503994/campos_512_v4
+98/503999/campos_512_v4
+98/504019/campos_512_v4
+98/504030/campos_512_v4
+98/504036/campos_512_v4
+98/504038/campos_512_v4
+98/504041/campos_512_v4
+98/504042/campos_512_v4
+98/504045/campos_512_v4
+98/504055/campos_512_v4
+98/504060/campos_512_v4
+98/504066/campos_512_v4
+98/504067/campos_512_v4
+98/504085/campos_512_v4
+98/504087/campos_512_v4
+98/504091/campos_512_v4
+98/504097/campos_512_v4
+98/504105/campos_512_v4
+98/504106/campos_512_v4
+98/504116/campos_512_v4
+98/504122/campos_512_v4
+98/504128/campos_512_v4
+98/504137/campos_512_v4
+98/504142/campos_512_v4
+98/504146/campos_512_v4
+98/504147/campos_512_v4
+98/504150/campos_512_v4
+98/504151/campos_512_v4
+98/504155/campos_512_v4
+98/504163/campos_512_v4
+98/504169/campos_512_v4
+98/504177/campos_512_v4
+98/504182/campos_512_v4
+98/504184/campos_512_v4
+98/504211/campos_512_v4
+98/504224/campos_512_v4
+98/504225/campos_512_v4
+98/504233/campos_512_v4
+98/504234/campos_512_v4
+98/504250/campos_512_v4
+98/504259/campos_512_v4
+98/504265/campos_512_v4
+98/504274/campos_512_v4
+98/504279/campos_512_v4
+98/504284/campos_512_v4
+98/504291/campos_512_v4
+98/504302/campos_512_v4
+98/504307/campos_512_v4
+98/504322/campos_512_v4
+98/504329/campos_512_v4
+98/504335/campos_512_v4
+98/504339/campos_512_v4
+98/504342/campos_512_v4
+98/504351/campos_512_v4
+98/504352/campos_512_v4
+98/504353/campos_512_v4
+98/504356/campos_512_v4
+98/504365/campos_512_v4
+98/504371/campos_512_v4
+98/504379/campos_512_v4
+98/504387/campos_512_v4
+98/504398/campos_512_v4
+98/504410/campos_512_v4
+98/504411/campos_512_v4
+98/504413/campos_512_v4
+98/504420/campos_512_v4
+98/504432/campos_512_v4
+98/504442/campos_512_v4
+98/504443/campos_512_v4
+98/504452/campos_512_v4
+98/504453/campos_512_v4
+98/504464/campos_512_v4
+98/504470/campos_512_v4
+98/504471/campos_512_v4
+98/504477/campos_512_v4
+98/504480/campos_512_v4
+98/504482/campos_512_v4
+98/504495/campos_512_v4
+98/504506/campos_512_v4
+98/504507/campos_512_v4
+98/504511/campos_512_v4
+98/504548/campos_512_v4
+98/504559/campos_512_v4
+98/504561/campos_512_v4
+98/504563/campos_512_v4
+98/504570/campos_512_v4
+98/504582/campos_512_v4
+98/504583/campos_512_v4
+98/504588/campos_512_v4
+98/504592/campos_512_v4
+98/504609/campos_512_v4
+98/504612/campos_512_v4
+98/504617/campos_512_v4
+98/504624/campos_512_v4
+98/504626/campos_512_v4
+98/504630/campos_512_v4
+98/504631/campos_512_v4
+98/504632/campos_512_v4
+98/504639/campos_512_v4
+98/504660/campos_512_v4
+98/504670/campos_512_v4
+98/504671/campos_512_v4
+98/504680/campos_512_v4
+98/504693/campos_512_v4
+98/504700/campos_512_v4
+98/504715/campos_512_v4
+98/504724/campos_512_v4
+98/504738/campos_512_v4
+98/504739/campos_512_v4
+98/504741/campos_512_v4
+98/504743/campos_512_v4
+98/504753/campos_512_v4
+98/504763/campos_512_v4
+98/504779/campos_512_v4
+98/504780/campos_512_v4
+98/504784/campos_512_v4
+98/504798/campos_512_v4
+98/504815/campos_512_v4
+98/504822/campos_512_v4
+98/504827/campos_512_v4
+98/504831/campos_512_v4
+98/504840/campos_512_v4
+98/504856/campos_512_v4
+98/504863/campos_512_v4
+98/504866/campos_512_v4
+98/504869/campos_512_v4
+98/504871/campos_512_v4
+98/504880/campos_512_v4
+98/504883/campos_512_v4
+98/504892/campos_512_v4
+98/504899/campos_512_v4
+98/504915/campos_512_v4
+98/504945/campos_512_v4
+98/504955/campos_512_v4
+98/504958/campos_512_v4
+98/504960/campos_512_v4
+98/504972/campos_512_v4
+98/504982/campos_512_v4
+98/504988/campos_512_v4
+98/504991/campos_512_v4
+98/504992/campos_512_v4
+98/504994/campos_512_v4
+98/504997/campos_512_v4
+98/504998/campos_512_v4
+99/505002/campos_512_v4
+99/505035/campos_512_v4
+99/505041/campos_512_v4
+99/505043/campos_512_v4
+99/505068/campos_512_v4
+99/505078/campos_512_v4
+99/505081/campos_512_v4
+99/505089/campos_512_v4
+99/505091/campos_512_v4
+99/505102/campos_512_v4
+99/505105/campos_512_v4
+99/505113/campos_512_v4
+99/505115/campos_512_v4
+99/505129/campos_512_v4
+99/505130/campos_512_v4
+99/505136/campos_512_v4
+99/505154/campos_512_v4
+99/505183/campos_512_v4
+99/505186/campos_512_v4
+99/505188/campos_512_v4
+99/505189/campos_512_v4
+99/505205/campos_512_v4
+99/505212/campos_512_v4
+99/505216/campos_512_v4
+99/505233/campos_512_v4
+99/505234/campos_512_v4
+99/505235/campos_512_v4
+99/505253/campos_512_v4
+99/505254/campos_512_v4
+99/505258/campos_512_v4
+99/505260/campos_512_v4
+99/505264/campos_512_v4
+99/505265/campos_512_v4
+99/505275/campos_512_v4
+99/505276/campos_512_v4
+99/505277/campos_512_v4
+99/505287/campos_512_v4
+99/505288/campos_512_v4
+99/505307/campos_512_v4
+99/505312/campos_512_v4
+99/505315/campos_512_v4
+99/505321/campos_512_v4
+99/505326/campos_512_v4
+99/505332/campos_512_v4
+99/505357/campos_512_v4
+99/505359/campos_512_v4
+99/505366/campos_512_v4
+99/505368/campos_512_v4
+99/505383/campos_512_v4
+99/505390/campos_512_v4
+99/505396/campos_512_v4
+99/505399/campos_512_v4
+99/505403/campos_512_v4
+99/505408/campos_512_v4
+99/505410/campos_512_v4
+99/505415/campos_512_v4
+99/505420/campos_512_v4
+99/505425/campos_512_v4
+99/505428/campos_512_v4
+99/505436/campos_512_v4
+99/505462/campos_512_v4
+99/505474/campos_512_v4
+99/505486/campos_512_v4
+99/505496/campos_512_v4
+99/505518/campos_512_v4
+99/505523/campos_512_v4
+99/505532/campos_512_v4
+99/505539/campos_512_v4
+99/505549/campos_512_v4
+99/505556/campos_512_v4
+99/505557/campos_512_v4
+99/505558/campos_512_v4
+99/505560/campos_512_v4
+99/505564/campos_512_v4
+99/505566/campos_512_v4
+99/505568/campos_512_v4
+99/505571/campos_512_v4
+99/505588/campos_512_v4
+99/505596/campos_512_v4
+99/505605/campos_512_v4
+99/505616/campos_512_v4
+99/505622/campos_512_v4
+99/505630/campos_512_v4
+99/505636/campos_512_v4
+99/505640/campos_512_v4
+99/505641/campos_512_v4
+99/505654/campos_512_v4
+99/505656/campos_512_v4
+99/505662/campos_512_v4
+99/505668/campos_512_v4
+99/505671/campos_512_v4
+99/505690/campos_512_v4
+99/505697/campos_512_v4
+99/505704/campos_512_v4
+99/505715/campos_512_v4
+99/505723/campos_512_v4
+99/505733/campos_512_v4
+99/505738/campos_512_v4
+99/505742/campos_512_v4
+99/505759/campos_512_v4
+99/505761/campos_512_v4
+99/505764/campos_512_v4
+99/505771/campos_512_v4
+99/505779/campos_512_v4
+99/505787/campos_512_v4
+99/505799/campos_512_v4
+99/505811/campos_512_v4
+99/505827/campos_512_v4
+99/505834/campos_512_v4
+99/505839/campos_512_v4
+99/505843/campos_512_v4
+99/505849/campos_512_v4
+99/505855/campos_512_v4
+99/505883/campos_512_v4
+99/505885/campos_512_v4
+99/505890/campos_512_v4
+99/505898/campos_512_v4
+99/505905/campos_512_v4
+99/505906/campos_512_v4
+99/505910/campos_512_v4
+99/505931/campos_512_v4
+99/505933/campos_512_v4
+99/505951/campos_512_v4
+99/505953/campos_512_v4
+99/505956/campos_512_v4
+99/505963/campos_512_v4
+99/505966/campos_512_v4
+99/506001/campos_512_v4
+99/506002/campos_512_v4
+99/506006/campos_512_v4
+99/506012/campos_512_v4
+99/506021/campos_512_v4
+99/506026/campos_512_v4
+99/506029/campos_512_v4
+99/506037/campos_512_v4
+99/506045/campos_512_v4
+99/506048/campos_512_v4
+99/506051/campos_512_v4
+99/506053/campos_512_v4
+99/506066/campos_512_v4
+99/506085/campos_512_v4
+99/506086/campos_512_v4
+99/506089/campos_512_v4
+99/506101/campos_512_v4
+99/506102/campos_512_v4
+99/506109/campos_512_v4
+99/506110/campos_512_v4
+99/506125/campos_512_v4
+99/506127/campos_512_v4
+99/506131/campos_512_v4
+99/506135/campos_512_v4
+99/506138/campos_512_v4
+99/506142/campos_512_v4
+99/506150/campos_512_v4
+99/506156/campos_512_v4
+99/506167/campos_512_v4
+99/506190/campos_512_v4
+99/506191/campos_512_v4
+99/506196/campos_512_v4
+99/506199/campos_512_v4
+99/506206/campos_512_v4
+99/506239/campos_512_v4
+99/506241/campos_512_v4
+99/506243/campos_512_v4
+99/506248/campos_512_v4
+99/506255/campos_512_v4
+99/506262/campos_512_v4
+99/506263/campos_512_v4
+99/506270/campos_512_v4
+99/506283/campos_512_v4
+99/506294/campos_512_v4
+99/506308/campos_512_v4
+99/506320/campos_512_v4
+99/506324/campos_512_v4
+99/506325/campos_512_v4
+99/506327/campos_512_v4
+99/506331/campos_512_v4
+99/506333/campos_512_v4
+99/506356/campos_512_v4
+99/506365/campos_512_v4
+99/506366/campos_512_v4
+99/506384/campos_512_v4
+99/506391/campos_512_v4
+99/506395/campos_512_v4
+99/506401/campos_512_v4
+99/506403/campos_512_v4
+99/506408/campos_512_v4
+99/506412/campos_512_v4
+99/506420/campos_512_v4
+99/506431/campos_512_v4
+99/506445/campos_512_v4
+99/506452/campos_512_v4
+99/506472/campos_512_v4
+99/506473/campos_512_v4
+99/506475/campos_512_v4
+99/506481/campos_512_v4
+99/506490/campos_512_v4
+99/506494/campos_512_v4
+99/506506/campos_512_v4
+99/506516/campos_512_v4
+99/506528/campos_512_v4
+99/506534/campos_512_v4
+99/506538/campos_512_v4
+99/506553/campos_512_v4
+99/506564/campos_512_v4
+99/506567/campos_512_v4
+99/506596/campos_512_v4
+99/506602/campos_512_v4
+99/506610/campos_512_v4
+99/506617/campos_512_v4
+99/506622/campos_512_v4
+99/506636/campos_512_v4
+99/506657/campos_512_v4
+99/506684/campos_512_v4
+99/506691/campos_512_v4
+99/506695/campos_512_v4
+99/506698/campos_512_v4
+99/506699/campos_512_v4
+99/506700/campos_512_v4
+99/506702/campos_512_v4
+99/506711/campos_512_v4
+99/506721/campos_512_v4
+99/506740/campos_512_v4
+99/506759/campos_512_v4
+99/506766/campos_512_v4
+99/506770/campos_512_v4
+99/506771/campos_512_v4
+99/506773/campos_512_v4
+99/506779/campos_512_v4
+99/506782/campos_512_v4
+99/506785/campos_512_v4
+99/506787/campos_512_v4
+99/506793/campos_512_v4
+99/506797/campos_512_v4
+99/506805/campos_512_v4
+99/506809/campos_512_v4
+99/506814/campos_512_v4
+99/506818/campos_512_v4
+99/506823/campos_512_v4
+99/506844/campos_512_v4
+99/506846/campos_512_v4
+99/506847/campos_512_v4
+99/506859/campos_512_v4
+99/506864/campos_512_v4
+99/506868/campos_512_v4
+99/506872/campos_512_v4
+99/506873/campos_512_v4
+99/506875/campos_512_v4
+99/506888/campos_512_v4
+99/506899/campos_512_v4
+99/506908/campos_512_v4
+99/506914/campos_512_v4
+99/506923/campos_512_v4
+99/506928/campos_512_v4
+99/506930/campos_512_v4
+99/506931/campos_512_v4
+99/506932/campos_512_v4
+99/506938/campos_512_v4
+99/506946/campos_512_v4
+99/506951/campos_512_v4
+99/506965/campos_512_v4
+99/506982/campos_512_v4
+99/506983/campos_512_v4
+99/506986/campos_512_v4
+99/506988/campos_512_v4
+99/507000/campos_512_v4
+99/507014/campos_512_v4
+99/507018/campos_512_v4
+99/507019/campos_512_v4
+99/507041/campos_512_v4
+99/507042/campos_512_v4
+99/507055/campos_512_v4
+99/507056/campos_512_v4
+99/507061/campos_512_v4
+99/507100/campos_512_v4
+99/507126/campos_512_v4
+99/507142/campos_512_v4
+99/507146/campos_512_v4
+99/507153/campos_512_v4
+99/507154/campos_512_v4
+99/507155/campos_512_v4
+99/507171/campos_512_v4
+99/507215/campos_512_v4
+99/507216/campos_512_v4
+99/507218/campos_512_v4
+99/507221/campos_512_v4
+99/507238/campos_512_v4
+99/507246/campos_512_v4
+99/507249/campos_512_v4
+99/507251/campos_512_v4
+99/507264/campos_512_v4
+99/507270/campos_512_v4
+99/507272/campos_512_v4
+99/507274/campos_512_v4
+99/507275/campos_512_v4
+99/507286/campos_512_v4
+99/507288/campos_512_v4
+99/507302/campos_512_v4
+99/507303/campos_512_v4
+99/507306/campos_512_v4
+99/507307/campos_512_v4
+99/507313/campos_512_v4
+99/507314/campos_512_v4
+99/507316/campos_512_v4
+99/507322/campos_512_v4
+99/507349/campos_512_v4
+99/507360/campos_512_v4
+99/507370/campos_512_v4
+99/507372/campos_512_v4
+99/507384/campos_512_v4
+99/507387/campos_512_v4
+99/507389/campos_512_v4
+99/507391/campos_512_v4
+99/507395/campos_512_v4
+99/507406/campos_512_v4
+99/507411/campos_512_v4
+99/507426/campos_512_v4
+99/507438/campos_512_v4
+99/507441/campos_512_v4
+99/507455/campos_512_v4
+99/507467/campos_512_v4
+99/507469/campos_512_v4
+99/507471/campos_512_v4
+99/507483/campos_512_v4
+99/507485/campos_512_v4
+99/507496/campos_512_v4
+99/507499/campos_512_v4
+99/507500/campos_512_v4
+99/507507/campos_512_v4
+99/507514/campos_512_v4
+99/507518/campos_512_v4
+99/507521/campos_512_v4
+99/507530/campos_512_v4
+99/507532/campos_512_v4
+99/507534/campos_512_v4
+99/507537/campos_512_v4
+99/507538/campos_512_v4
+99/507541/campos_512_v4
+99/507544/campos_512_v4
+99/507551/campos_512_v4
+99/507556/campos_512_v4
+99/507557/campos_512_v4
+99/507568/campos_512_v4
+99/507570/campos_512_v4
+99/507579/campos_512_v4
+99/507587/campos_512_v4
+99/507588/campos_512_v4
+99/507595/campos_512_v4
+99/507596/campos_512_v4
+99/507609/campos_512_v4
+99/507614/campos_512_v4
+99/507616/campos_512_v4
+99/507621/campos_512_v4
+99/507625/campos_512_v4
+99/507634/campos_512_v4
+99/507636/campos_512_v4
+99/507637/campos_512_v4
+99/507643/campos_512_v4
+99/507644/campos_512_v4
+99/507654/campos_512_v4
+99/507661/campos_512_v4
+99/507684/campos_512_v4
+99/507687/campos_512_v4
+99/507691/campos_512_v4
+99/507695/campos_512_v4
+99/507696/campos_512_v4
+99/507702/campos_512_v4
+99/507704/campos_512_v4
+99/507705/campos_512_v4
+99/507714/campos_512_v4
+99/507719/campos_512_v4
+99/507724/campos_512_v4
+99/507732/campos_512_v4
+99/507738/campos_512_v4
+99/507743/campos_512_v4
+99/507746/campos_512_v4
+99/507749/campos_512_v4
+99/507750/campos_512_v4
+99/507752/campos_512_v4
+99/507753/campos_512_v4
+99/507755/campos_512_v4
+99/507766/campos_512_v4
+99/507778/campos_512_v4
+99/507779/campos_512_v4
+99/507781/campos_512_v4
+99/507784/campos_512_v4
+99/507785/campos_512_v4
+99/507787/campos_512_v4
+99/507789/campos_512_v4
+99/507805/campos_512_v4
+99/507807/campos_512_v4
+99/507867/campos_512_v4
+99/507870/campos_512_v4
+99/507872/campos_512_v4
+99/507878/campos_512_v4
+99/507882/campos_512_v4
+99/507890/campos_512_v4
+99/507938/campos_512_v4
+99/507941/campos_512_v4
+99/507946/campos_512_v4
+99/507952/campos_512_v4
+99/507955/campos_512_v4
+99/507965/campos_512_v4
+99/507971/campos_512_v4
+99/507977/campos_512_v4
+99/507986/campos_512_v4
+99/507991/campos_512_v4
+99/507993/campos_512_v4
+99/507994/campos_512_v4
+99/507997/campos_512_v4
+99/508001/campos_512_v4
+99/508012/campos_512_v4
+99/508017/campos_512_v4
+99/508034/campos_512_v4
+99/508041/campos_512_v4
+99/508047/campos_512_v4
+99/508055/campos_512_v4
+99/508064/campos_512_v4
+99/508069/campos_512_v4
+99/508085/campos_512_v4
+99/508100/campos_512_v4
+99/508101/campos_512_v4
+99/508106/campos_512_v4
+99/508107/campos_512_v4
+99/508109/campos_512_v4
+99/508110/campos_512_v4
+99/508112/campos_512_v4
+99/508123/campos_512_v4
+99/508124/campos_512_v4
+99/508141/campos_512_v4
+99/508177/campos_512_v4
+99/508188/campos_512_v4
+99/508191/campos_512_v4
+99/508198/campos_512_v4
+99/508205/campos_512_v4
+99/508211/campos_512_v4
+99/508218/campos_512_v4
+99/508220/campos_512_v4
+99/508222/campos_512_v4
+99/508229/campos_512_v4
+99/508233/campos_512_v4
+99/508236/campos_512_v4
+99/508240/campos_512_v4
+99/508255/campos_512_v4
+99/508257/campos_512_v4
+99/508265/campos_512_v4
+99/508268/campos_512_v4
+99/508280/campos_512_v4
+99/508290/campos_512_v4
+99/508291/campos_512_v4
+99/508300/campos_512_v4
+99/508316/campos_512_v4
+99/508322/campos_512_v4
+99/508328/campos_512_v4
+99/508331/campos_512_v4
+99/508335/campos_512_v4
+99/508342/campos_512_v4
+99/508358/campos_512_v4
+99/508362/campos_512_v4
+99/508369/campos_512_v4
+99/508372/campos_512_v4
+99/508379/campos_512_v4
+99/508387/campos_512_v4
+99/508416/campos_512_v4
+99/508442/campos_512_v4
+99/508451/campos_512_v4
+99/508455/campos_512_v4
+99/508461/campos_512_v4
+99/508483/campos_512_v4
+99/508484/campos_512_v4
+99/508485/campos_512_v4
+99/508486/campos_512_v4
+99/508495/campos_512_v4
+99/508505/campos_512_v4
+99/508507/campos_512_v4
+99/508515/campos_512_v4
+99/508533/campos_512_v4
+99/508534/campos_512_v4
+99/508535/campos_512_v4
+99/508544/campos_512_v4
+99/508551/campos_512_v4
+99/508552/campos_512_v4
+99/508556/campos_512_v4
+99/508565/campos_512_v4
+99/508575/campos_512_v4
+99/508580/campos_512_v4
+99/508582/campos_512_v4
+99/508583/campos_512_v4
+99/508585/campos_512_v4
+99/508588/campos_512_v4
+99/508601/campos_512_v4
+99/508606/campos_512_v4
+99/508609/campos_512_v4
+99/508649/campos_512_v4
+99/508655/campos_512_v4
+99/508658/campos_512_v4
+99/508662/campos_512_v4
+99/508668/campos_512_v4
+99/508669/campos_512_v4
+99/508673/campos_512_v4
+99/508677/campos_512_v4
+99/508678/campos_512_v4
+99/508681/campos_512_v4
+99/508692/campos_512_v4
+99/508705/campos_512_v4
+99/508707/campos_512_v4
+99/508710/campos_512_v4
+99/508713/campos_512_v4
+99/508718/campos_512_v4
+99/508729/campos_512_v4
+99/508740/campos_512_v4
+99/508750/campos_512_v4
+99/508757/campos_512_v4
+99/508782/campos_512_v4
+99/508788/campos_512_v4
+99/508789/campos_512_v4
+99/508821/campos_512_v4
+99/508828/campos_512_v4
+99/508829/campos_512_v4
+99/508833/campos_512_v4
+99/508834/campos_512_v4
+99/508838/campos_512_v4
+99/508840/campos_512_v4
+99/508850/campos_512_v4
+99/508863/campos_512_v4
+99/508865/campos_512_v4
+99/508898/campos_512_v4
+99/508906/campos_512_v4
+99/508916/campos_512_v4
+99/508918/campos_512_v4
+99/508921/campos_512_v4
+99/508930/campos_512_v4
+99/508946/campos_512_v4
+99/508973/campos_512_v4
+99/508974/campos_512_v4
+99/508975/campos_512_v4
+99/508979/campos_512_v4
+99/508984/campos_512_v4
+99/508990/campos_512_v4
+99/508991/campos_512_v4
+99/509004/campos_512_v4
+99/509021/campos_512_v4
+99/509028/campos_512_v4
+99/509030/campos_512_v4
+99/509031/campos_512_v4
+99/509038/campos_512_v4
+99/509044/campos_512_v4
+99/509047/campos_512_v4
+99/509049/campos_512_v4
+99/509055/campos_512_v4
+99/509060/campos_512_v4
+99/509064/campos_512_v4
+99/509075/campos_512_v4
+99/509081/campos_512_v4
+99/509088/campos_512_v4
+99/509093/campos_512_v4
+99/509103/campos_512_v4
+99/509109/campos_512_v4
+99/509111/campos_512_v4
+99/509120/campos_512_v4
+99/509121/campos_512_v4
+99/509130/campos_512_v4
+99/509149/campos_512_v4
+99/509150/campos_512_v4
+99/509167/campos_512_v4
+99/509169/campos_512_v4
+99/509188/campos_512_v4
+99/509203/campos_512_v4
+99/509204/campos_512_v4
+99/509217/campos_512_v4
+99/509235/campos_512_v4
+99/509236/campos_512_v4
+99/509238/campos_512_v4
+99/509242/campos_512_v4
+99/509244/campos_512_v4
+99/509262/campos_512_v4
+99/509266/campos_512_v4
+99/509270/campos_512_v4
+99/509273/campos_512_v4
+99/509277/campos_512_v4
+99/509281/campos_512_v4
+99/509299/campos_512_v4
+99/509300/campos_512_v4
+99/509302/campos_512_v4
+99/509327/campos_512_v4
+99/509336/campos_512_v4
+99/509343/campos_512_v4
+99/509346/campos_512_v4
+99/509353/campos_512_v4
+99/509356/campos_512_v4
+99/509357/campos_512_v4
+99/509359/campos_512_v4
+99/509360/campos_512_v4
+99/509365/campos_512_v4
+99/509366/campos_512_v4
+99/509374/campos_512_v4
+99/509382/campos_512_v4
+99/509383/campos_512_v4
+99/509387/campos_512_v4
+99/509389/campos_512_v4
+99/509406/campos_512_v4
+99/509413/campos_512_v4
+99/509417/campos_512_v4
+99/509432/campos_512_v4
+99/509443/campos_512_v4
+99/509445/campos_512_v4
+99/509446/campos_512_v4
+99/509457/campos_512_v4
+99/509464/campos_512_v4
+99/509466/campos_512_v4
+99/509481/campos_512_v4
+99/509482/campos_512_v4
+99/509486/campos_512_v4
+99/509488/campos_512_v4
+99/509499/campos_512_v4
+99/509500/campos_512_v4
+99/509501/campos_512_v4
+99/509509/campos_512_v4
+99/509511/campos_512_v4
+99/509517/campos_512_v4
+99/509520/campos_512_v4
+99/509523/campos_512_v4
+99/509524/campos_512_v4
+99/509527/campos_512_v4
+99/509528/campos_512_v4
+99/509552/campos_512_v4
+99/509553/campos_512_v4
+99/509554/campos_512_v4
+99/509566/campos_512_v4
+99/509571/campos_512_v4
+99/509573/campos_512_v4
+99/509588/campos_512_v4
+99/509592/campos_512_v4
+99/509599/campos_512_v4
+99/509605/campos_512_v4
+99/509636/campos_512_v4
+99/509639/campos_512_v4
+99/509656/campos_512_v4
+99/509660/campos_512_v4
+99/509665/campos_512_v4
+99/509681/campos_512_v4
+99/509685/campos_512_v4
+99/509695/campos_512_v4
+99/509696/campos_512_v4
+99/509698/campos_512_v4
+99/509706/campos_512_v4
+99/509715/campos_512_v4
+99/509720/campos_512_v4
+99/509729/campos_512_v4
+99/509736/campos_512_v4
+99/509743/campos_512_v4
+99/509746/campos_512_v4
+99/509750/campos_512_v4
+99/509758/campos_512_v4
+99/509789/campos_512_v4
+99/509790/campos_512_v4
+99/509818/campos_512_v4
+99/509819/campos_512_v4
+99/509829/campos_512_v4
+99/509837/campos_512_v4
+99/509839/campos_512_v4
+99/509845/campos_512_v4
+99/509846/campos_512_v4
+99/509847/campos_512_v4
+99/509865/campos_512_v4
+99/509880/campos_512_v4
+99/509881/campos_512_v4
+99/509888/campos_512_v4
+99/509892/campos_512_v4
+99/509899/campos_512_v4
+99/509932/campos_512_v4
+99/509935/campos_512_v4
+99/509940/campos_512_v4
+99/509957/campos_512_v4
+99/509969/campos_512_v4
+99/509990/campos_512_v4
+99/509992/campos_512_v4
+99/509999/campos_512_v4
diff --git a/shell_scripts/release/evaluation/.gitkeep b/shell_scripts/release/evaluation/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/shell_scripts/release/inference/i23d/i23d-stage1.sh b/shell_scripts/release/inference/i23d/i23d-stage1.sh
new file mode 100644
index 0000000000000000000000000000000000000000..a47b9f1497a98593e42b3eb913dfb940ab19d64d
--- /dev/null
+++ b/shell_scripts/release/inference/i23d/i23d-stage1.sh
@@ -0,0 +1,213 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+
+num_workers=1
+NUM_GPUS=1
+batch_size=1
+num_samples=2
+
+microbatch=${batch_size}
+
+data_dir="./assets/demo-image-for-i23d/instantmesh" # gso also available
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=0
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=1.0 # for xyz diffusion
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 10000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 1024 \
+"
+
+
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 3 \
+--denoise_out_channels 3 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d True \
+--dit_model_arch DiT-PixArt-PCD-CLAY-L \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=./logs/i23d/stage-1/dino_img/
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 3 \
+--resume_checkpoint yslan/GaussianAnything/ckpts/checkpoints/i23d/stage-1/model_joint_denoise_rec_model2335000.pt \
+"
+
+# --resume_checkpoint /mnt/sfs-common/yslan/open-source/checkpoints/i23d/stage-1/model_joint_denoise_rec_model2335000.pt \
+
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+
+export CUDA_VISIBLE_DEVICES=0
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:12780 \
+ scripts/vit_triplane_sit_sample.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key img \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk False \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type img-uniform-gvp-dino \
+ --load_pcd True \
+ --num_frames 8 \
+ --split_chunk_size 16 \
+ --load_caption_dataset False \
+ --num_samples ${num_samples} \
+ --plane_n 1 \
+ --pooling_ctx_dim 768 \
+ --export_mesh False \
+ --load_real True \
+ --load_gso True \
+ --pcd_path "" \
+ --mv_latent_dir ""
diff --git a/shell_scripts/release/inference/i23d/i23d-stage2.sh b/shell_scripts/release/inference/i23d/i23d-stage2.sh
new file mode 100644
index 0000000000000000000000000000000000000000..d1491e6c9250482dedae99f28e883a36423a21fb
--- /dev/null
+++ b/shell_scripts/release/inference/i23d/i23d-stage2.sh
@@ -0,0 +1,217 @@
+set -x
+
+lpips_lambda=0.8
+
+# image_size=364 # final rendered resolution
+# image_size=224 # final rendered resolution
+# image_size=512 # final rendered resolution
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+num_frames=8
+
+num_workers=2 # ! for debug
+NUM_GPUS=1
+batch_size=1
+num_samples=1
+
+microbatch=$(( n_cond_frames*batch_size*2 ))
+
+data_dir="./assets/demo-image-for-i23d/instantmesh" # gso also available
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=1e-4
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=1.0 # for xyz diffusion
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 10000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 1024 \
+"
+
+
+# ! diffusion steps and noise schedule not used, since the continuous diffusion is adopted.
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 10 \
+--denoise_out_channels 10 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d True \
+--dit_model_arch DiT-PixArt-PCD-CLAY-stage2-L \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+logdir=./logs/i23d/stage-2/dino_img-debug/
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 3 \
+--resume_checkpoint yslan/GaussianAnything/ckpts/checkpoints/i23d/stage-2/model_joint_denoise_rec_model2505000.pt \
+"
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+
+export CUDA_VISIBLE_DEVICES=0
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:21382 \
+ scripts/vit_triplane_sit_sample.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key img-xyz \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk False \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type img-uniform-gvp-dino-stage2 \
+ --load_pcd True \
+ --num_frames ${num_frames} \
+ --split_chunk_size 16 \
+ --load_caption_dataset False \
+ --plane_n 1 \
+ --num_samples ${num_samples} \
+ --pooling_ctx_dim 768 \
+ --load_real True \
+ --load_gso True \
+ --save_img False \
+ --export_mesh True \
+ --pcd_path /cpfs01/user/lanyushi.p/data/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd/ \
+ --stage_1_output_dir ./logs/i23d/stage-1/dino_img/ \
+ --mv_latent_dir "" \
+
+# pcd_structured_latent_space_lion_learnoffset_surfel_novaePT_sr_cascade_x8x4x4_512
\ No newline at end of file
diff --git a/shell_scripts/release/inference/t23d/stage1-t23d.sh b/shell_scripts/release/inference/t23d/stage1-t23d.sh
new file mode 100644
index 0000000000000000000000000000000000000000..dc2a8352ed80dc3c21d646c21ee6594bf1c9c73a
--- /dev/null
+++ b/shell_scripts/release/inference/t23d/stage1-t23d.sh
@@ -0,0 +1,206 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+num_samples=2
+
+patch_size=14
+
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+
+num_workers=0
+NUM_GPUS=1
+batch_size=4
+
+microbatch=${batch_size}
+
+data_dir="placeholder"
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=0
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=1.0 #
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 25000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 768 \
+"
+
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 3 \
+--denoise_out_channels 3 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d False \
+--dit_model_arch DiT-PCD-L \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=./logs/t23d/stage-1
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 10 \
+--resume_checkpoint yslan/GaussianAnything/ckpts/checkpoints/t23d/stage-1/model_joint_denoise_rec_model1950000.pt \
+"
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+export CUDA_VISIBLE_DEVICES=0
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:22377 \
+ scripts/vit_triplane_sit_sample.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key caption \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk True \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type stage1-t23d \
+ --load_pcd True \
+ --num_frames 8 \
+ --split_chunk_size 16 \
+ --load_caption_dataset True \
+ --plane_n 1 \
+ --i23d False \
+ --pooling_ctx_dim 768 \
+ --num_samples ${num_samples} \
+ --pcd_path "" \
+ --mv_latent_dir "" \
diff --git a/shell_scripts/release/inference/t23d/stage2-t23d.sh b/shell_scripts/release/inference/t23d/stage2-t23d.sh
new file mode 100644
index 0000000000000000000000000000000000000000..14e70877df2e24876751c7e78b6ad90272e26e6a
--- /dev/null
+++ b/shell_scripts/release/inference/t23d/stage2-t23d.sh
@@ -0,0 +1,208 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+num_samples=2
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+
+num_workers=0
+NUM_GPUS=1
+batch_size=4
+
+microbatch=${batch_size}
+
+data_dir="placeholder"
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=0
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=0.25 #
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 25000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 768 \
+"
+
+# ! diffusion steps and noise schedule not used, since the continuous diffusion is adopted.
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 10 \
+--denoise_out_channels 10 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--dit_model_arch DiT-PCD-L-stage2-xyz2feat \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=./logs/t23d/stage-2
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 10 \
+--resume_checkpoint yslan/GaussianAnything/ckpts/checkpoints/t23d/stage-2/model_joint_denoise_rec_model2725000.pt \
+"
+
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+
+export CUDA_VISIBLE_DEVICES=0
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:22258 \
+ scripts/vit_triplane_sit_sample.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key caption \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk True \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type stage2-t23d \
+ --load_pcd True \
+ --num_frames 8 \
+ --split_chunk_size 16 \
+ --load_caption_dataset True \
+ --plane_n 1 \
+ --i23d False \
+ --pooling_ctx_dim 768 \
+ --num_samples ${num_samples} \
+ --pcd_path /cpfs01/user/lanyushi.p/data/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd/ \
+ --stage_1_output_dir ./logs/t23d/stage-1 \
+ --mv_latent_dir /cpfs01/user/lanyushi.p/data/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange/latent_dir
diff --git a/shell_scripts/release/inference/vae-3d-pc2.sh b/shell_scripts/release/inference/vae-3d-pc2.sh
new file mode 100644
index 0000000000000000000000000000000000000000..1a036686369e1e5cdf40b6313777fffb7c892bd4
--- /dev/null
+++ b/shell_scripts/release/inference/vae-3d-pc2.sh
@@ -0,0 +1,190 @@
+set -x
+
+lpips_lambda=2.0
+ssim_lambda=0.
+l1_lambda=0. # following gaussian splatting
+l2_lambda=1 # ! use_conf_map
+
+NUM_GPUS=1
+
+image_size=512
+image_size_encoder=512
+
+num_workers=0 # for debug
+
+patch_size=14
+kl_lambda=1.0e-06
+
+perturb_pcd_scale=0
+
+num_frames=8
+batch_size=1 # ! actuall BS will double
+
+microbatch=$(( num_frames*batch_size*2 ))
+
+data_dir=./assets/demo-image-for-i23d/for-vae-reconstruction/
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+# raw inference
+conv_lr=0
+lr=0
+
+vit_decoder_lr=$lr
+encoder_lr=${conv_lr} # scaling version , could be larger when multi-nodes
+triplane_decoder_lr=$conv_lr
+super_resolution_lr=$conv_lr
+
+# * above the best lr config
+
+LR_FLAGS="--encoder_lr $encoder_lr \
+--vit_decoder_lr $vit_decoder_lr \
+--triplane_decoder_lr $triplane_decoder_lr \
+--super_resolution_lr $super_resolution_lr \
+--lr $lr"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 10000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --dino_version mv-sd-dit-srt-pcd-structured-pc2-nopcd \
+ --sr_training False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --image_size $image_size \
+ --kl_lambda ${kl_lambda} \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder False \
+ --fg_mse True \
+ --bg_lamdba 1.0 \
+ --lpips_delay_iter 100 \
+ --sr_delay_iter 25000 \
+ --kl_anneal True \
+ --symmetry_loss False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+# --resume_checkpoint ./checkpoint/model_rec1965000.pt \
+
+
+# logdir=./logs/latent_dir/768-512-perturb${perturb_pcd_scale}
+logdir=./logs/latent_dir/pc2-debug
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--alpha_lambda 1.0 \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--decoder_output_dim 3 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded \
+"
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+# localedef -c -f UTF-8 -i en_US en_US.UTF-8
+# export LC_ALL=en_US.UTF-8
+
+export OPENCV_IO_ENABLE_OPENEXR=1
+export OMP_NUM_THREADS=12
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export TORCH_NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+
+for wds_split in 0
+
+do
+
+export CUDA_VISIBLE_DEVICES=$(( 0 + $wds_split ))
+port=$(( 14000 + $wds_split ))
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes=1 \
+ --rdzv-endpoint=localhost:${port} \
+ --rdzv_backend=c10d \
+ scripts/vit_triplane_train.py \
+ --trainer_name nv_rec_patch_mvE_gs \
+ --num_workers ${num_workers} \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ --lpips_lambda $lpips_lambda \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --save_interval 10000 \
+ --eval_interval 250000000 \
+ --decomposed True \
+ --logdir $logdir \
+ --decoder_load_pretrained False \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --use_amp False \
+ --eval_batch_size ${batch_size} \
+ ${LR_FLAGS} \
+ --l1_lambda ${l1_lambda} \
+ --l2_lambda ${l2_lambda} \
+ --ssim_lambda ${ssim_lambda} \
+ --depth_smoothness_lambda 0 \
+ --use_conf_map False \
+ --objv_dataset True \
+ --depth_lambda 0.5 \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --mv_input True \
+ --inference True \
+ --split_chunk_input True \
+ --four_view_for_latent False \
+ --append_depth False \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --return_all_dit_layers False \
+ --ldm_embed_dim 10 \
+ --xyz_lambda 0.0 \
+ --emd_lambda 0.0 \
+ --cd_lambda 0.0 \
+ --fps_sampling True \
+ --subset_fps_sampling False \
+ --subset_half_fps_sampling False \
+ --num_frames ${num_frames} \
+ --frame_0_as_canonical False \
+ --split_chunk_size $((num_frames + num_frames)) \
+ --read_normal True \
+ --in_plane_attention False \
+ --load_pcd True \
+ --rand_aug_bg True \
+ --use_wds False \
+ --append_xyz True \
+ --use_chunk True \
+ --pcd_path /mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --plane_n 1 \
+ --latent_num 768 \
+ --perturb_pcd_scale ${perturb_pcd_scale} \
+ --wds_split ${wds_split} \
+
+# --pcd_path /nas/shared/V2V/yslan/logs/nips23/Reconstruction/pcd-V=10_4096_polish/fps-pcd \
+
+done
+
diff --git a/shell_scripts/release/inference/vae-3d.sh b/shell_scripts/release/inference/vae-3d.sh
new file mode 100644
index 0000000000000000000000000000000000000000..0697eebaa569d554826d9adb9291bbb45f3574db
--- /dev/null
+++ b/shell_scripts/release/inference/vae-3d.sh
@@ -0,0 +1,188 @@
+set -x
+
+lpips_lambda=2.0
+ssim_lambda=0.
+l1_lambda=0. # following gaussian splatting
+l2_lambda=1 # ! use_conf_map
+
+NUM_GPUS=1
+
+image_size=512
+image_size_encoder=512
+
+num_workers=2 # for debug
+
+patch_size=14
+kl_lambda=1.0e-06
+
+perturb_pcd_scale=0
+
+num_frames=8
+batch_size=1 # ! actuall BS will double
+
+microbatch=$(( num_frames*batch_size*2 ))
+
+data_dir=./assets/demo-image-for-i23d/for-vae-reconstruction/
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+# raw inference
+conv_lr=0
+lr=0
+
+vit_decoder_lr=$lr
+encoder_lr=${conv_lr} # scaling version , could be larger when multi-nodes
+triplane_decoder_lr=$conv_lr
+super_resolution_lr=$conv_lr
+
+# * above the best lr config
+
+LR_FLAGS="--encoder_lr $encoder_lr \
+--vit_decoder_lr $vit_decoder_lr \
+--triplane_decoder_lr $triplane_decoder_lr \
+--super_resolution_lr $super_resolution_lr \
+--lr $lr"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 10000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --image_size $image_size \
+ --kl_lambda ${kl_lambda} \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder False \
+ --fg_mse True \
+ --bg_lamdba 1.0 \
+ --lpips_delay_iter 100 \
+ --sr_delay_iter 25000 \
+ --kl_anneal True \
+ --symmetry_loss False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ --resume_checkpoint ./checkpoint/model_rec1965000.pt \
+ "
+
+
+logdir=./logs/latent_dir/768-512-perturb${perturb_pcd_scale}
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--alpha_lambda 1.0 \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--decoder_output_dim 3 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_vae_decoder_cascaded \
+"
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+# localedef -c -f UTF-8 -i en_US en_US.UTF-8
+# export LC_ALL=en_US.UTF-8
+
+export OPENCV_IO_ENABLE_OPENEXR=1
+export OMP_NUM_THREADS=12
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export TORCH_NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+
+for wds_split in 0
+
+do
+
+export CUDA_VISIBLE_DEVICES=$(( 0 + $wds_split ))
+port=$(( 14000 + $wds_split ))
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes=1 \
+ --rdzv-endpoint=localhost:${port} \
+ --rdzv_backend=c10d \
+ scripts/vit_triplane_train.py \
+ --trainer_name nv_rec_patch_mvE_gs \
+ --num_workers ${num_workers} \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ --lpips_lambda $lpips_lambda \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --save_interval 10000 \
+ --eval_interval 250000000 \
+ --decomposed True \
+ --logdir $logdir \
+ --decoder_load_pretrained False \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --use_amp True \
+ --eval_batch_size ${batch_size} \
+ ${LR_FLAGS} \
+ --l1_lambda ${l1_lambda} \
+ --l2_lambda ${l2_lambda} \
+ --ssim_lambda ${ssim_lambda} \
+ --depth_smoothness_lambda 0 \
+ --use_conf_map False \
+ --objv_dataset True \
+ --depth_lambda 0.5 \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --mv_input True \
+ --inference True \
+ --split_chunk_input True \
+ --four_view_for_latent False \
+ --append_depth False \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --return_all_dit_layers False \
+ --ldm_embed_dim 10 \
+ --xyz_lambda 0.0 \
+ --emd_lambda 0.0 \
+ --cd_lambda 0.0 \
+ --fps_sampling True \
+ --subset_fps_sampling False \
+ --subset_half_fps_sampling False \
+ --num_frames ${num_frames} \
+ --frame_0_as_canonical False \
+ --split_chunk_size $((num_frames + num_frames)) \
+ --read_normal True \
+ --in_plane_attention False \
+ --load_pcd True \
+ --rand_aug_bg True \
+ --use_wds False \
+ --append_xyz True \
+ --use_chunk True \
+ --pcd_path /mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --plane_n 1 \
+ --latent_num 768 \
+ --perturb_pcd_scale ${perturb_pcd_scale} \
+ --wds_split ${wds_split} \
+
+# --pcd_path /nas/shared/V2V/yslan/logs/nips23/Reconstruction/pcd-V=10_4096_polish/fps-pcd \
+
+done
+
diff --git a/shell_scripts/release/train/.gitkeep b/shell_scripts/release/train/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/shell_scripts/release/train/stage-1-vae3d/vae3d-adv-512.sh b/shell_scripts/release/train/stage-1-vae3d/vae3d-adv-512.sh
new file mode 100644
index 0000000000000000000000000000000000000000..403e55eff6963d73320a22b0d8434b5f4d85b221
--- /dev/null
+++ b/shell_scripts/release/train/stage-1-vae3d/vae3d-adv-512.sh
@@ -0,0 +1,212 @@
+set -x
+# vit_decoder_lr=1.001
+
+lpips_lambda=0.8
+l1_lambda=1.0 # following gaussian splatting
+
+l2_lambda=0.0
+ssim_lambda=0.0
+lambda_normal=0.025
+
+lambda_dist=1000
+overfitting=False
+
+
+NUM_GPUS=8
+num_workers=8 #
+batch_size=1
+
+
+patchgan_disc_factor=0.025
+patchgan_disc_g_weight=0.025
+perturb_pcd_scale=0.01
+
+
+image_size=512
+image_size_encoder=512
+
+patch_size=14
+kl_lambda=2.5e-06
+patch_rendering_resolution=${image_size}
+
+num_frames=8
+
+microbatch=$(( num_frames*batch_size*2 ))
+
+dataset_name=75K
+data_dir=/cpfs01/user/lanyushi.p/data/chunk-jpeg-normal/bs_16_fixsave3/170K/512/
+
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+# half LR since BS halved, during high-res finetuning
+conv_lr=1e-4
+lr=5e-5
+
+vit_decoder_lr=$lr
+encoder_lr=${conv_lr} # scaling version , could be larger when multi-nodes
+triplane_decoder_lr=$conv_lr
+super_resolution_lr=$conv_lr
+
+# * above the best lr config
+
+LR_FLAGS="--encoder_lr $encoder_lr \
+--vit_decoder_lr $vit_decoder_lr \
+--triplane_decoder_lr $triplane_decoder_lr \
+--super_resolution_lr $super_resolution_lr \
+--lr $lr"
+
+TRAIN_FLAGS="--iterations 5000 --anneal_lr False \
+ --batch_size $batch_size \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --image_size $image_size \
+ --kl_lambda ${kl_lambda} \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder False \
+ --fg_mse True \
+ --bg_lamdba 1.0 \
+ --lpips_delay_iter 25000 \
+ --sr_delay_iter 25000 \
+ --kl_anneal True \
+ --symmetry_loss False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ --resume_checkpoint /nas/shared/public/yslan/logs/vae/f=8-cascade/latent=768-8x3x3-fullset-surfacePCD-adv/bs1-gpu8-0.025-0.01-advFinestOnly_512_perturb/model_rec1665000.pt \
+ "
+
+logdir=/nas/shared/public/yslan/logs/vae/f=8-cascade/latent=768-8x3x3-fullset-surfacePCD-adv/bs${batch_size}-gpu${NUM_GPUS}-${patchgan_disc_factor}-${patchgan_disc_g_weight}-advFinestOnly_512_perturb-largeradv
+
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--alpha_lambda 1.0 \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--decoder_output_dim 3 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_lion_learnoffset_surfel_novaePT_sr_cascade_x8x4x4_512 \
+"
+
+# pcd_structured_latent_space_lion_learnoffset_surfel_sr_noptVAE
+# pcd_structured_latent_space_lion_learnoffset_surfel_sr_noptVAE_debugscale_f1
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+# localedef -c -f UTF-8 -i en_US en_US.UTF-8
+# export LC_ALL=en_US.UTF-8
+
+export OPENCV_IO_ENABLE_OPENEXR=1
+export OMP_NUM_THREADS=12
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+export CUDA_LAUNCH_BLOCKING=1
+export TORCH_USE_CUDA_DSA=1
+
+# export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
+export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
+# export CUDA_VISIBLE_DEVICES=0,1,2,3
+# export CUDA_VISIBLE_DEVICES=4,5,6,7
+# export CUDA_VISIBLE_DEVICES=6,7
+# export CUDA_VISIBLE_DEVICES=2,3
+# export CUDA_VISIBLE_DEVICES=4,5
+# export CUDA_VISIBLE_DEVICES=0,1,2,3
+
+
+ # --rdzv-endpoint=localhost:19381 \
+
+torchrun --rdzv-endpoint=localhost:19408 \
+ --nproc_per_node=$NUM_GPUS \
+ --nnodes=1 \
+ --rdzv_backend=c10d \
+ scripts/vit_triplane_train.py \
+ --trainer_name nv_rec_patch_mvE_gs_disc \
+ --num_workers ${num_workers} \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ --lpips_lambda $lpips_lambda \
+ --overfitting ${overfitting} \
+ --load_pretrain_encoder False \
+ --iterations 300000 \
+ --save_interval 10000 \
+ --eval_interval 250000000 \
+ --decomposed True \
+ --logdir $logdir \
+ --decoder_load_pretrained False \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --use_amp True \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --l1_lambda ${l1_lambda} \
+ --l2_lambda ${l2_lambda} \
+ --ssim_lambda ${ssim_lambda} \
+ --lambda_normal ${lambda_normal} \
+ --lambda_dist ${lambda_dist} \
+ --depth_smoothness_lambda 0 \
+ --use_conf_map False \
+ --objv_dataset True \
+ --depth_lambda 0.0 \
+ --patch_rendering_resolution ${patch_rendering_resolution} \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --mv_input True \
+ --split_chunk_input True \
+ --four_view_for_latent False \
+ --append_depth False \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --return_all_dit_layers False \
+ --ldm_embed_dim 10 \
+ --xyz_lambda 0.0 \
+ --emd_lambda 0.0 \
+ --cd_lambda 0.0 \
+ --fps_sampling True \
+ --subset_fps_sampling False \
+ --subset_half_fps_sampling False \
+ --num_frames ${num_frames} \
+ --frame_0_as_canonical False \
+ --split_chunk_size $((num_frames + num_frames)) \
+ --read_normal True \
+ --in_plane_attention False \
+ --load_pcd True \
+ --rand_aug_bg True \
+ --use_wds False \
+ --append_xyz True \
+ --use_chunk True \
+ --pcd_path /cpfs01/user/lanyushi.p/data/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd/ \
+ --pt_ft_kl False \
+ --ft_kl True \
+ --lambda_scale_reg 0.0 \
+ --latent_num 768 \
+ --lambda_opa_reg 0.01 \
+ --surfel_rendering True \
+ --patchgan_disc_factor ${patchgan_disc_factor} \
+ --patchgan_disc_g_weight ${patchgan_disc_g_weight} \
+ --perturb_pcd_scale ${perturb_pcd_scale} \
+ --plane_n 1 \
diff --git a/shell_scripts/release/train/stage2-i23d/.gitkeep b/shell_scripts/release/train/stage2-i23d/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/shell_scripts/release/train/stage2-i23d/i23d-klfeat-gen.sh b/shell_scripts/release/train/stage2-i23d/i23d-klfeat-gen.sh
new file mode 100644
index 0000000000000000000000000000000000000000..d21be101a636941a6d03b1f97af9182b10cc710b
--- /dev/null
+++ b/shell_scripts/release/train/stage2-i23d/i23d-klfeat-gen.sh
@@ -0,0 +1,228 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+num_frames=8
+
+data_dir=/cpfs01/user/lanyushi.p/data/unzip4_img
+pcd_path=/mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd
+mv_latent_dir=/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange/latent_dir
+
+num_workers=1
+NUM_GPUS=1
+n_cond_frames=5
+batch_size=1
+
+# microbatch=${batch_size}
+microbatch=$(( n_cond_frames*batch_size*2 ))
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=1e-4
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=1.0 # for xyz diffusion
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 10000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 1024 \
+"
+
+
+# ! diffusion steps and noise schedule not used, since the continuous diffusion is adopted.
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 10 \
+--denoise_out_channels 10 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d True \
+--dit_model_arch DiT-PixArt-PCD-CLAY-stage2-L \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=/nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/${dataset_name}/gs-disentangle/cascade_check/clay/stage2/dino_img/noperturb/196w_latent-fullset_${NUM_GPUS}-${batch_size}-ctd
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_lion_learnoffset_surfel_novaePT_sr_cascade_x8x4x4_512 \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 3 \
+--resume_checkpoint /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage2/dino_img/noperturb/196w_latent-fullset_8-4/model_joint_denoise_rec_model2045000.pt \
+"
+
+# /nas/shared/public/yslan/logs/vae/f=8-cascade/latent=768-8x3x3-fullset-surfacePCD-adv/bs1-gpu8-0.025-0.025-advFinestOnly_512_perturb-largeradv/model_rec1965000.pt \
+
+# --resume_checkpoint /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage2/dino_img/noperturb/fullset_8-40/model_joint_denoise_rec_model1925000.pt \
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage2/dino_img/fullset_4-32-ctd2/model_joint_denoise_rec_model1885000.pt \
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage2/dino_img/fullset_8-32-ctd/model_joint_denoise_rec_model1825000.pt \
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage2/dino_img/fullset_8-32/model_joint_denoise_rec_model1785000.pt \
+# /nas/shared/public/yslan/logs/vae/f=8-cascade/latent=768-8x3x3-fullset-surfacePCD-adv/bs2-gpu8-0.025-0.01-advFinestOnly-perturbxyz/model_rec1635000.pt \
+
+# /nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/CA_dinoonly_ditb_overfit_CAFirst_dino518/model_joint_denoise_rec_model1750000.pt \
+
+
+# --resume_checkpoint /nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output/model_joint_denoise_rec_model1600000.pt \
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+# export CUDA_VISIBLE_DEVICES=0,1,2
+
+export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
+# export CUDA_VISIBLE_DEVICES=4,5,6,7
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:21378 \
+ scripts/vit_triplane_sit_train.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key img-xyz \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk True \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type img-uniform-gvp-dino-stage2 \
+ --load_pcd True \
+ --num_frames ${num_frames} \
+ --split_chunk_size 16 \
+ --load_caption_dataset False \
+ --plane_n 1 \
+ --pooling_ctx_dim 768 \
+ --pcd_path ${pcd_path} \
+ --mv_latent_dir ${mv_latent_dir}
diff --git a/shell_scripts/release/train/stage2-i23d/i23d-pcd-gen.sh b/shell_scripts/release/train/stage2-i23d/i23d-pcd-gen.sh
new file mode 100644
index 0000000000000000000000000000000000000000..f8f109a241d4223583ec747f23308162b896b30f
--- /dev/null
+++ b/shell_scripts/release/train/stage2-i23d/i23d-pcd-gen.sh
@@ -0,0 +1,242 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+
+latent_dir=/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange/latent_dir
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+
+num_workers=6
+NUM_GPUS=8
+n_cond_frames=5
+batch_size=4
+
+# microbatch=${batch_size}
+microbatch=$(( n_cond_frames*batch_size*2 ))
+
+data_dir=/cpfs01/user/lanyushi.p/data/unzip4_img
+pcd_path=/mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd
+mv_latent_dir=/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange/latent_dir
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=1e-4
+
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=1.0 # for xyz diffusion
+# * above the best lr config
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 10000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 1024 \
+"
+# --pred_type x0 \
+# --iw_sample_p drop_all_uniform \
+# --loss_type x0 \
+
+# ! diffusion steps and noise schedule not used, since the continuous diffusion is adopted.
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 3 \
+--denoise_out_channels 3 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d True \
+--dit_model_arch DiT-PixArt-PCD-CLAY-L \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=./logs/LSGM/t23d/FM/${dataset_name}/gs-disentangle/cascade_check/clay/stage1/dino_img/fullset_${NUM_GPUS}-${batch_size}
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_lion_learnoffset_surfel_novaePT_sr_cascade \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 3 \
+--resume_checkpoint /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/dino_img/fullset_8-4-ctd5-usemvdata/model_joint_denoise_rec_model2335000.pt \
+"
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/dino_img/fullset_4-24-ctd4/model_joint_denoise_rec_model2315000.pt \
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/dino_img/fullset_8-32-ctd2/model_joint_denoise_rec_model1935000.pt \
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/dino_img/fullset_8-32-ctd/model_joint_denoise_rec_model1915000.pt \
+
+# /nas/shared/public/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/stage1/dino_img/fullset_7-40/model_joint_denoise_rec_model1825000.pt \
+
+# /nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/clay/CA_dinoonly_ditb_overfit_CAFirst_dino518/model_joint_denoise_rec_model1750000.pt \
+
+
+# --resume_checkpoint /nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output/model_joint_denoise_rec_model1600000.pt \
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+# export CUDA_VISIBLE_DEVICES=0,1,2
+
+# export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5
+# export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5
+# export CUDA_VISIBLE_DEVICES=0,1,2,3
+# export CUDA_VISIBLE_DEVICES=0,1,2,3
+# export CUDA_VISIBLE_DEVICES=3,4,5,6
+# export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6
+# export CUDA_VISIBLE_DEVICES=3,4,5,6
+# export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6
+export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
+# export CUDA_VISIBLE_DEVICES=1,2
+# export CUDA_VISIBLE_DEVICES=3,4
+# export CUDA_VISIBLE_DEVICES=0,1
+# export CUDA_VISIBLE_DEVICES=4,5
+# export CUDA_VISIBLE_DEVICES=4,5,6,7
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:23378 \
+ scripts/vit_triplane_sit_train.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key img \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk True \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type img-uniform-gvp-dino \
+ --load_pcd True \
+ --num_frames 8 \
+ --split_chunk_size 16 \
+ --load_caption_dataset False \
+ --plane_n 1 \
+ --pooling_ctx_dim 768 \
+ --pcd_path ${pcd_path} \
+ --mv_latent_dir ${mv_latent_dir}
\ No newline at end of file
diff --git a/shell_scripts/release/train/stage2-t23d/.gitkeep b/shell_scripts/release/train/stage2-t23d/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/shell_scripts/release/train/stage2-t23d/t23d-klfeat-gen.sh b/shell_scripts/release/train/stage2-t23d/t23d-klfeat-gen.sh
new file mode 100644
index 0000000000000000000000000000000000000000..d3a8695a15b1c698e818b8806ee58f2c4e8d84e0
--- /dev/null
+++ b/shell_scripts/release/train/stage2-t23d/t23d-klfeat-gen.sh
@@ -0,0 +1,222 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+
+# ! on a100
+# num_workers=8
+# NUM_GPUS=8
+# batch_size=48
+
+# ! on v100, test
+num_workers=2
+NUM_GPUS=1
+# batch_size=16
+batch_size=1
+
+microbatch=${batch_size}
+
+data_dir=/cpfs01/user/lanyushi.p/data/unzip4_img
+pcd_path=/mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd
+mv_latent_dir=/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange/latent_dir
+
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+microbatch=${batch_size}
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=1e-4
+# lr=5e-5
+# lr=1e-5
+# lr=7.5e-5 # 192/256
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1.0
+
+triplane_scaling_divider=0.25 #
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 25000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 768 \
+"
+# --pred_type x0 \
+# --iw_sample_p drop_all_uniform \
+# --loss_type x0 \
+
+# ! diffusion steps and noise schedule not used, since the continuous diffusion is adopted.
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 10 \
+--denoise_out_channels 10 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d False \
+--dit_model_arch DiT-PCD-L-stage2-xyz2feat \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=./logs/LSGM/t23d/FM/${dataset_name}/gs-disentangle/cascade_check/clay/stage2/clip_text/fullset_${NUM_GPUS}-${batch_size}-qknorm-scale${triplane_scaling_divider}
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_lion_learnoffset_surfel_novaePT_sr_cascade_x8x4x4 \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 10 \
+--resume_checkpoint /mnt/sfs-common/yslan/open-source/checkpoints/t23d/stage-2/model_joint_denoise_rec_model2725000.pt \
+"
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+
+# export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
+export CUDA_VISIBLE_DEVICES=0
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:23377 \
+ scripts/vit_triplane_sit_train.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key caption \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk True \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type stage2-t23d \
+ --load_pcd True \
+ --num_frames 8 \
+ --split_chunk_size 16 \
+ --load_caption_dataset True \
+ --plane_n 1 \
+ --pooling_ctx_dim 768 \
+ --pcd_path ${pcd_path} \
+ --mv_latent_dir ${mv_latent_dir}
\ No newline at end of file
diff --git a/shell_scripts/release/train/stage2-t23d/t23d-pcd-gen.sh b/shell_scripts/release/train/stage2-t23d/t23d-pcd-gen.sh
new file mode 100644
index 0000000000000000000000000000000000000000..b4290886da5b9479cc4d4d2fa151eb60bd10900e
--- /dev/null
+++ b/shell_scripts/release/train/stage2-t23d/t23d-pcd-gen.sh
@@ -0,0 +1,226 @@
+set -x
+
+lpips_lambda=0.8
+
+image_size=512
+image_size_encoder=${image_size}
+
+patch_size=14
+
+
+cfg_dropout_prob=0.1 # SD config
+dataset_name="9cls"
+
+# ! on a100
+# num_workers=8
+# NUM_GPUS=8
+# batch_size=48
+
+# ! on v100, test
+num_workers=2
+NUM_GPUS=1
+#batch_size=16
+batch_size=1
+
+microbatch=${batch_size}
+
+data_dir=/cpfs01/user/lanyushi.p/data/unzip4_img
+pcd_path=/mnt/sfs-common/yslan/Dataset/Obajverse/FPS_PCD/pcd-V=10_4096_polish_fullset/fps-pcd
+mv_latent_dir=/mnt/sfs-common/yslan/open-source/latent_dir/gs-latent-dim=10-fullset-cascade-fixpcd-adv_xyzaug_loweradv_768-fixinprange/latent_dir
+
+DATASET_FLAGS="
+ --data_dir ${data_dir} \
+ --eval_data_dir ${data_dir} \
+"
+
+lr=1e-4
+
+
+kl_lambda=0
+vit_lr=1e-5 # for improved-diffusion unet
+ce_lambda=0 # ?
+conv_lr=5e-5
+alpha_lambda=1
+scale_clip_encoding=1
+
+triplane_scaling_divider=1.0 #
+
+LR_FLAGS="--encoder_lr $vit_lr \
+ --vit_decoder_lr $vit_lr \
+ --lpips_lambda $lpips_lambda \
+ --triplane_decoder_lr $conv_lr \
+ --super_resolution_lr $conv_lr \
+ --lr $lr \
+ --kl_lambda ${kl_lambda} \
+ --bg_lamdba 0.01 \
+ --alpha_lambda ${alpha_lambda} \
+"
+
+TRAIN_FLAGS="--iterations 10001 --anneal_lr False \
+ --batch_size $batch_size --save_interval 25000 \
+ --microbatch ${microbatch} \
+ --image_size_encoder $image_size_encoder \
+ --image_size $image_size \
+ --dino_version mv-sd-dit-srt-pcd-structured-nopcd \
+ --sr_training False \
+ --encoder_cls_token False \
+ --decoder_cls_token False \
+ --cls_token False \
+ --weight_decay 0.05 \
+ --no_dim_up_mlp True \
+ --uvit_skip_encoder True \
+ --decoder_load_pretrained True \
+ --fg_mse False \
+ --vae_p 2 \
+ --plucker_embedding True \
+ --encoder_in_channels 15 \
+ --arch_dit_decoder DiT2-B/2 \
+ --sd_E_ch 64 \
+ --sd_E_num_res_blocks 1 \
+ --lrm_decoder False \
+ "
+
+
+DDPM_MODEL_FLAGS="
+--learn_sigma False \
+--num_heads 8 \
+--num_res_blocks 2 \
+--num_channels 320 \
+--attention_resolutions "4,2,1" \
+--use_spatial_transformer True \
+--transformer_depth 1 \
+--context_dim 768 \
+"
+# --pred_type x0 \
+# --iw_sample_p drop_all_uniform \
+# --loss_type x0 \
+
+# ! diffusion steps and noise schedule not used, since the continuous diffusion is adopted.
+DIFFUSION_FLAGS="--diffusion_steps 1000 --noise_schedule linear \
+--use_kl False \
+--triplane_scaling_divider ${triplane_scaling_divider} \
+--trainer_name flow_matching_gs \
+--mixed_prediction False \
+--train_vae False \
+--denoise_in_channels 3 \
+--denoise_out_channels 3 \
+--diffusion_input_size 32 \
+--diffusion_ce_anneal True \
+--create_controlnet False \
+--p_rendering_loss False \
+--pred_type x_start \
+--predict_v False \
+--create_dit True \
+--i23d False \
+--dit_model_arch DiT-PCD-L \
+--train_vae False \
+--use_eos_feature False \
+--roll_out True \
+"
+
+logdir=./logs/LSGM/t23d/FM/${dataset_name}/gs-disentangle/cascade_check/clay/stage1/clip_text/fullset_${NUM_GPUS}-${batch_size}
+
+SR_TRAIN_FLAGS_v1_2XC="
+--decoder_in_chans 32 \
+--out_chans 96 \
+--ae_classname vit.vit_triplane.pcd_structured_latent_space_lion_learnoffset_surfel_novaePT_sr_cascade_x8x4x4 \
+--logdir $logdir \
+--arch_encoder vits \
+--arch_decoder vitb \
+--vit_decoder_wd 0.001 \
+--encoder_weight_decay 0.001 \
+--color_criterion mse \
+--triplane_in_chans 32 \
+--decoder_output_dim 10 \
+--resume_checkpoint /mnt/sfs-common/yslan/open-source/checkpoints/t23d/stage-1/model_joint_denoise_rec_model1950000.pt \
+"
+
+# --resume_checkpoint yslan/GaussianAnything/ckpts/checkpoints/t23d/stage-1/model_joint_denoise_rec_model1950000.pt \
+
+
+# --resume_checkpoint /nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output_fullset/model_joint_denoise_rec_model1700000.pt \
+
+# --resume_checkpoint /nas/shared/V2V/yslan/logs/nips24/LSGM/t23d/FM/9cls/gs-disentangle/cascade_check/xyz_output/model_joint_denoise_rec_model1600000.pt \
+
+SR_TRAIN_FLAGS=${SR_TRAIN_FLAGS_v1_2XC}
+
+rm -rf "$logdir"/runs
+mkdir -p "$logdir"/
+cp "$0" "$logdir"/
+
+export OMP_NUM_THREADS=12
+# export LC_ALL=en_US.UTF-8 # save caption txt bug
+export TORCH_NCCL_ASYNC_ERROR_HANDLING=1
+export OPENCV_IO_ENABLE_OPENEXR=1
+export NCCL_IB_GID_INDEX=3 # https://github.com/huggingface/accelerate/issues/314#issuecomment-1821973930
+# export CUDA_VISIBLE_DEVICES=0,1,2
+
+export CUDA_VISIBLE_DEVICES=0
+
+torchrun --nproc_per_node=$NUM_GPUS \
+ --nnodes 1 \
+ --rdzv-endpoint=localhost:22477 \
+ scripts/vit_triplane_sit_train.py \
+ --num_workers ${num_workers} \
+ --depth_lambda 0 \
+ ${TRAIN_FLAGS} \
+ ${SR_TRAIN_FLAGS} \
+ ${DATASET_FLAGS} \
+ ${DIFFUSION_FLAGS} \
+ ${DDPM_MODEL_FLAGS} \
+ --overfitting False \
+ --load_pretrain_encoder False \
+ --iterations 5000001 \
+ --eval_interval 5000 \
+ --decomposed True \
+ --logdir $logdir \
+ --cfg objverse_tuneray_aug_resolution_64_64_auto \
+ --patch_size ${patch_size} \
+ --eval_batch_size 1 \
+ ${LR_FLAGS} \
+ --ce_lambda ${ce_lambda} \
+ --negative_entropy_lambda ${ce_lambda} \
+ --triplane_fg_bg False \
+ --grad_clip True \
+ --interval 5 \
+ --log_interval 100 \
+ --normalize_clip_encoding True \
+ --scale_clip_encoding ${scale_clip_encoding} \
+ --mixing_logit_init 10000 \
+ --objv_dataset True \
+ --cfg_dropout_prob ${cfg_dropout_prob} \
+ --cond_key caption \
+ --use_lmdb_compressed False \
+ --use_lmdb False \
+ --use_amp True \
+ --append_xyz True \
+ --allow_tf32 True \
+ --gs_cam_format True \
+ --gs_rendering True \
+ --shuffle_across_cls True \
+ --z_channels 10 \
+ --ldm_z_channels 10 \
+ --ldm_embed_dim 10 \
+ --load_wds_diff False \
+ --load_wds_latent False \
+ --compile False \
+ --split_chunk_input True \
+ --append_depth False \
+ --mv_input True \
+ --duplicate_sample False \
+ --read_normal True \
+ --enable_mixing_normal False \
+ --use_wds False \
+ --use_chunk True \
+ --pt_ft_kl False \
+ --surfel_rendering True \
+ --clip_grad_throld 1.0 \
+ --snr-type stage1-t23d \
+ --load_pcd True \
+ --num_frames 8 \
+ --split_chunk_size 16 \
+ --load_caption_dataset True \
+ --plane_n 1 \
+ --pooling_ctx_dim 768 \
+ --pcd_path ${pcd_path} \
+ --mv_latent_dir ${mv_latent_dir}
diff --git a/third_party/diff-surfel-rasterization b/third_party/diff-surfel-rasterization
new file mode 160000
index 0000000000000000000000000000000000000000..e0ed0207b3e0669960cfad70852200a4a5847f61
--- /dev/null
+++ b/third_party/diff-surfel-rasterization
@@ -0,0 +1 @@
+Subproject commit e0ed0207b3e0669960cfad70852200a4a5847f61
diff --git a/torch_utils/CLIP.png b/torch_utils/CLIP.png
new file mode 100644
index 0000000000000000000000000000000000000000..a1b5ec9171fd7a51e36e845a02304eb837142ba1
Binary files /dev/null and b/torch_utils/CLIP.png differ
diff --git a/torch_utils/__init__.py b/torch_utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240
--- /dev/null
+++ b/torch_utils/__init__.py
@@ -0,0 +1,11 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+# empty
diff --git a/torch_utils/__pycache__/__init__.cpython-39.pyc b/torch_utils/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b2c7935e1263357d879402cdef4272e878d34fc6
Binary files /dev/null and b/torch_utils/__pycache__/__init__.cpython-39.pyc differ
diff --git a/torch_utils/__pycache__/components.cpython-39.pyc b/torch_utils/__pycache__/components.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ca5b65878d611b1b78d4219688426c4f3e45776d
Binary files /dev/null and b/torch_utils/__pycache__/components.cpython-39.pyc differ
diff --git a/torch_utils/__pycache__/custom_ops.cpython-39.pyc b/torch_utils/__pycache__/custom_ops.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c302e49f8bad0a4f82befd0d1dddbc869832c612
Binary files /dev/null and b/torch_utils/__pycache__/custom_ops.cpython-39.pyc differ
diff --git a/torch_utils/__pycache__/legacy.cpython-39.pyc b/torch_utils/__pycache__/legacy.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a0e76f2c462bd10c8b3638d47657bd132b72ebb1
Binary files /dev/null and b/torch_utils/__pycache__/legacy.cpython-39.pyc differ
diff --git a/torch_utils/__pycache__/misc.cpython-39.pyc b/torch_utils/__pycache__/misc.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1881a96d35d303182ce3805e2a1920eaf7a08dd7
Binary files /dev/null and b/torch_utils/__pycache__/misc.cpython-39.pyc differ
diff --git a/torch_utils/__pycache__/persistence.cpython-39.pyc b/torch_utils/__pycache__/persistence.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7bb9f3dd151a5e6c4de98d72cba4de1f3058f9b2
Binary files /dev/null and b/torch_utils/__pycache__/persistence.cpython-39.pyc differ
diff --git a/torch_utils/clip_practice.py b/torch_utils/clip_practice.py
new file mode 100644
index 0000000000000000000000000000000000000000..eeb7890aabafd7d43287824d6227e2bc8124c5f0
--- /dev/null
+++ b/torch_utils/clip_practice.py
@@ -0,0 +1,38 @@
+import torch
+import clip
+from PIL import Image
+
+from pdb import set_trace as st
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+model, preprocess = clip.load("ViT-B/16", device=device)
+
+image = preprocess(Image.open("torch_utils/CLIP.png")).unsqueeze(0).to(device)
+text = clip.tokenize(["a diagram", "a dog", "a cat"]).to(device)
+
+# with torch.no_grad():
+# image_features = model.encode_image(image)
+ text_features = model.encode_text(text)
+
+# logits_per_image, logits_per_text = model(image, text)
+# probs = logits_per_image.softmax(dim=-1).cpu().numpy()
+
+with torch.no_grad():
+ x = image.type(model.dtype) # 1 3 224 224
+ self = model.visual
+ x = self.conv1(x) # shape = [*, width, grid, grid]
+ x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2]
+ x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width]
+ x = torch.cat([self.class_embedding.to(x.dtype) + torch.zeros(x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device), x], dim=1) # shape = [*, grid ** 2 + 1, width]
+ x = x + self.positional_embedding.to(x.dtype)
+ x = self.ln_pre(x)
+
+ x = x.permute(1, 0, 2) # NLD -> LND
+ x = self.transformer(x)
+ x = x.permute(1, 0, 2) # LND -> NLD , 1, 50, 768
+ st()
+
+ pass
+
+
+print("Label probs:", probs) # prints: [[0.9927937 0.00421068 0.00299572]]
\ No newline at end of file
diff --git a/torch_utils/components.py b/torch_utils/components.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7b9aa055759bd0c6972abe790304621d036789e
--- /dev/null
+++ b/torch_utils/components.py
@@ -0,0 +1,445 @@
+# https://github.com/lucidrains/imagen-pytorch/blob/main/imagen_pytorch/imagen_pytorch.py
+# https://github.com/JingyunLiang/SwinIR/blob/main/models/network_swinir.py#L812
+
+import copy
+import math
+from collections import namedtuple
+from contextlib import contextmanager, nullcontext
+from functools import partial, wraps
+from pathlib import Path
+from random import random
+
+from einops import rearrange, repeat, reduce, pack, unpack
+
+import torch
+import torch.nn.functional as F
+import torchvision.transforms as T
+from torch import einsum, nn
+from beartype.typing import List, Union
+from beartype import beartype
+from tqdm.auto import tqdm
+from pdb import set_trace as st
+
+# helper functions, from:
+# https://github.com/lucidrains/imagen-pytorch/blob/main/imagen_pytorch/imagen_pytorch.py
+
+
+def exists(val):
+ return val is not None
+
+
+def identity(t, *args, **kwargs):
+ return t
+
+
+def divisible_by(numer, denom):
+ return (numer % denom) == 0
+
+
+def first(arr, d=None):
+ if len(arr) == 0:
+ return d
+ return arr[0]
+
+
+def maybe(fn):
+ @wraps(fn)
+ def inner(x):
+ if not exists(x):
+ return x
+ return fn(x)
+
+ return inner
+
+
+def once(fn):
+ called = False
+
+ @wraps(fn)
+ def inner(x):
+ nonlocal called
+ if called:
+ return
+ called = True
+ return fn(x)
+
+ return inner
+
+
+print_once = once(print)
+
+
+def default(val, d):
+ if exists(val):
+ return val
+ return d() if callable(d) else d
+
+
+def compact(input_dict):
+ return {key: value for key, value in input_dict.items() if exists(value)}
+
+
+def maybe_transform_dict_key(input_dict, key, fn):
+ if key not in input_dict:
+ return input_dict
+
+ copied_dict = input_dict.copy()
+ copied_dict[key] = fn(copied_dict[key])
+ return copied_dict
+
+
+def cast_uint8_images_to_float(images):
+ if not images.dtype == torch.uint8:
+ return images
+ return images / 255
+
+
+def module_device(module):
+ return next(module.parameters()).device
+
+
+def zero_init_(m):
+ nn.init.zeros_(m.weight)
+ if exists(m.bias):
+ nn.init.zeros_(m.bias)
+
+
+def eval_decorator(fn):
+ def inner(model, *args, **kwargs):
+ was_training = model.training
+ model.eval()
+ out = fn(model, *args, **kwargs)
+ model.train(was_training)
+ return out
+
+ return inner
+
+
+def pad_tuple_to_length(t, length, fillvalue=None):
+ remain_length = length - len(t)
+ if remain_length <= 0:
+ return t
+ return (*t, *((fillvalue, ) * remain_length))
+
+
+# helper classes
+
+
+class Identity(nn.Module):
+ def __init__(self, *args, **kwargs):
+ super().__init__()
+
+ def forward(self, x, *args, **kwargs):
+ return x
+
+
+# tensor helpers
+
+
+def log(t, eps: float = 1e-12):
+ return torch.log(t.clamp(min=eps))
+
+
+def l2norm(t):
+ return F.normalize(t, dim=-1)
+
+
+def right_pad_dims_to(x, t):
+ padding_dims = x.ndim - t.ndim
+ if padding_dims <= 0:
+ return t
+ return t.view(*t.shape, *((1, ) * padding_dims))
+
+
+def masked_mean(t, *, dim, mask=None):
+ if not exists(mask):
+ return t.mean(dim=dim)
+
+ denom = mask.sum(dim=dim, keepdim=True)
+ mask = rearrange(mask, 'b n -> b n 1')
+ masked_t = t.masked_fill(~mask, 0.)
+
+ return masked_t.sum(dim=dim) / denom.clamp(min=1e-5)
+
+
+def resize_image_to(image,
+ target_image_size,
+ clamp_range=None,
+ mode='nearest'):
+ orig_image_size = image.shape[-1]
+
+ if orig_image_size == target_image_size:
+ return image
+
+ out = F.interpolate(image, target_image_size, mode=mode)
+
+ if exists(clamp_range):
+ out = out.clamp(*clamp_range)
+
+ return out
+
+
+def calc_all_frame_dims(downsample_factors: List[int], frames):
+ if not exists(frames):
+ return (tuple(), ) * len(downsample_factors)
+
+ all_frame_dims = []
+
+ for divisor in downsample_factors:
+ assert divisible_by(frames, divisor)
+ all_frame_dims.append((frames // divisor, ))
+
+ return all_frame_dims
+
+
+def safe_get_tuple_index(tup, index, default=None):
+ if len(tup) <= index:
+ return default
+ return tup[index]
+
+
+# image normalization functions
+# ddpms expect images to be in the range of -1 to 1
+
+
+def normalize_neg_one_to_one(img):
+ return img * 2 - 1
+
+
+def unnormalize_zero_to_one(normed_img):
+ return (normed_img + 1) * 0.5
+
+
+# def Upsample(dim, dim_out=None):
+# dim_out = default(dim_out, dim)
+
+# return nn.Sequential(nn.Upsample(scale_factor=2, mode='nearest'),
+# nn.Conv2d(dim, dim_out, 3, padding=1))
+
+
+
+class PixelShuffleUpsample(nn.Module):
+ """
+ code shared by @MalumaDev at DALLE2-pytorch for addressing checkboard artifacts
+ https://arxiv.org/ftp/arxiv/papers/1707/1707.02937.pdf
+ """
+ def __init__(self, dim, dim_out=None):
+ super().__init__()
+ dim_out = default(dim_out, dim)
+ conv = nn.Conv2d(dim, dim_out * 4, 1)
+
+ self.net = nn.Sequential(conv, nn.SiLU(), nn.PixelShuffle(2))
+
+ self.init_conv_(conv)
+
+ def init_conv_(self, conv):
+ o, i, h, w = conv.weight.shape
+ conv_weight = torch.empty(o // 4, i, h, w)
+ nn.init.kaiming_uniform_(conv_weight)
+ conv_weight = repeat(conv_weight, 'o ... -> (o 4) ...')
+
+ conv.weight.data.copy_(conv_weight)
+ nn.init.zeros_(conv.bias.data)
+
+ def forward(self, x):
+ return self.net(x)
+
+
+class ResidualBlock(nn.Module):
+ def __init__(self,
+ dim_in,
+ dim_out,
+ dim_inter=None,
+ use_norm=True,
+ norm_layer=nn.BatchNorm2d,
+ bias=False):
+ super().__init__()
+ if dim_inter is None:
+ dim_inter = dim_out
+
+ if use_norm:
+ self.conv = nn.Sequential(
+ norm_layer(dim_in),
+ nn.ReLU(True),
+ nn.Conv2d(dim_in,
+ dim_inter,
+ 3,
+ 1,
+ 1,
+ bias=bias,
+ padding_mode='reflect'),
+ norm_layer(dim_inter),
+ nn.ReLU(True),
+ nn.Conv2d(dim_inter,
+ dim_out,
+ 3,
+ 1,
+ 1,
+ bias=bias,
+ padding_mode='reflect'),
+ )
+ else:
+ self.conv = nn.Sequential(
+ nn.ReLU(True),
+ nn.Conv2d(dim_in, dim_inter, 3, 1, 1),
+ nn.ReLU(True),
+ nn.Conv2d(dim_inter, dim_out, 3, 1, 1),
+ )
+
+ self.short_cut = None
+ if dim_in != dim_out:
+ self.short_cut = nn.Conv2d(dim_in, dim_out, 1, 1)
+
+ def forward(self, feats):
+ feats_out = self.conv(feats)
+ if self.short_cut is not None:
+ feats_out = self.short_cut(feats) + feats_out
+ else:
+ feats_out = feats_out + feats
+ return feats_out
+
+
+class Upsample(nn.Sequential):
+ """Upsample module.
+ Args:
+ scale (int): Scale factor. Supported scales: 2^n and 3.
+ num_feat (int): Channel number of intermediate features.
+ """
+ def __init__(self, scale, num_feat):
+ m = []
+ if (scale & (scale - 1)) == 0: # scale = 2^n
+ for _ in range(int(math.log(scale, 2))):
+ m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1))
+ m.append(nn.PixelShuffle(2))
+ elif scale == 3:
+ m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1))
+ m.append(nn.PixelShuffle(3))
+ else:
+ raise ValueError(f'scale {scale} is not supported. '
+ 'Supported scales: 2^n and 3.')
+ super(Upsample, self).__init__(*m)
+
+
+class PixelUnshuffleUpsample(nn.Module):
+ def __init__(self, output_dim, num_feat=128, num_out_ch=3, sr_ratio=4, *args, **kwargs) -> None:
+ super().__init__()
+
+ self.conv_after_body = nn.Conv2d(output_dim, output_dim, 3, 1, 1)
+ self.conv_before_upsample = nn.Sequential(
+ nn.Conv2d(output_dim, num_feat, 3, 1, 1),
+ nn.LeakyReLU(inplace=True))
+ self.upsample = Upsample(sr_ratio, num_feat) # 4 time SR
+ self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1)
+
+ def forward(self, x, input_skip_connection=True, *args, **kwargs):
+ # x = self.conv_first(x)
+ if input_skip_connection:
+ x = self.conv_after_body(x) + x
+ else:
+ x = self.conv_after_body(x)
+
+ x = self.conv_before_upsample(x)
+ x = self.conv_last(self.upsample(x))
+ return x
+
+
+class Conv3x3TriplaneTransformation(nn.Module):
+ # used in the final layer before triplane
+ def __init__(self, input_dim, output_dim) -> None:
+ super().__init__()
+
+ self.conv_after_unpachify = nn.Sequential(
+ nn.Conv2d(input_dim, output_dim, 3, 1, 1),
+ nn.LeakyReLU(inplace=True)
+ )
+
+ self.conv_before_rendering = nn.Sequential(
+ nn.Conv2d(output_dim, output_dim, 3, 1, 1),
+ nn.LeakyReLU(inplace=True))
+
+ def forward(self, unpachified_latent):
+ latent = self.conv_after_unpachify(unpachified_latent) # no residual connections here
+ latent = self.conv_before_rendering(latent) + latent
+ return latent
+
+
+# https://github.com/JingyunLiang/SwinIR/blob/6545850fbf8df298df73d81f3e8cba638787c8bd/models/network_swinir.py#L750
+class NearestConvSR(nn.Module):
+ """
+ code shared by @MalumaDev at DALLE2-pytorch for addressing checkboard artifacts
+ https://arxiv.org/ftp/arxiv/papers/1707/1707.02937.pdf
+ """
+ def __init__(self, output_dim, num_feat=128, num_out_ch=3, sr_ratio=4, *args, **kwargs) -> None:
+ super().__init__()
+
+ self.upscale = sr_ratio
+
+ self.conv_after_body = nn.Conv2d(output_dim, output_dim, 3, 1, 1)
+ self.conv_before_upsample = nn.Sequential(nn.Conv2d(output_dim, num_feat, 3, 1, 1),
+ nn.LeakyReLU(inplace=True))
+ self.conv_up1 = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
+ if self.upscale == 4:
+ self.conv_up2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
+ self.conv_hr = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
+ self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1)
+
+ self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
+
+ def forward(self, x, *args, **kwargs):
+
+ # x = self.conv_first(x)
+ x = self.conv_after_body(x) + x
+ x = self.conv_before_upsample(x)
+ x = self.lrelu(self.conv_up1(torch.nn.functional.interpolate(x, scale_factor=2, mode='nearest')))
+ if self.upscale == 4:
+ x = self.lrelu(self.conv_up2(torch.nn.functional.interpolate(x, scale_factor=2, mode='nearest')))
+ x = self.conv_last(self.lrelu(self.conv_hr(x)))
+
+
+ return x
+
+# https://github.com/yumingj/C2-Matching/blob/fa171ca6707c6f16a5d04194ce866ea70bb21d2b/mmsr/models/archs/ref_restoration_arch.py#L65
+class NearestConvSR_Residual(NearestConvSR):
+ # learn residual + normalize
+
+ def __init__(self, output_dim, num_feat=128, num_out_ch=3, sr_ratio=4, *args, **kwargs) -> None:
+ super().__init__(output_dim, num_feat, num_out_ch, sr_ratio, *args, **kwargs)
+ # self.mean = torch.Tensor((0.485, 0.456, 0.406)).view(1,3,1,1) # imagenet mean
+ self.act = nn.Tanh()
+
+ def forward(self, x, base_x, *args, **kwargs):
+ # base_x: low-resolution 3D rendering, for residual addition
+ # self.mean = self.mean.type_as(x)
+ # x = super().forward(x).clamp(-1,1)
+ x = super().forward(x)
+ x = self.act(x) # residual normalize to [-1,1]
+ scale = x.shape[-1] // base_x.shape[-1] # 2 or 4
+ x = x + F.interpolate(base_x, None, scale, 'bilinear', False) # add residual; [-1,1] range
+
+ # return x + 2 * self.mean
+ return x
+
+class UpsampleOneStep(nn.Sequential):
+ """UpsampleOneStep module (the difference with Upsample is that it always only has 1conv + 1pixelshuffle)
+ Used in lightweight SR to save parameters.
+
+ Args:
+ scale (int): Scale factor. Supported scales: 2^n and 3.
+ num_feat (int): Channel number of intermediate features.
+
+ """
+
+ def __init__(self, scale, num_feat, num_out_ch, input_resolution=None):
+ self.num_feat = num_feat
+ self.input_resolution = input_resolution
+ m = []
+ m.append(nn.Conv2d(num_feat, (scale ** 2) * num_out_ch, 3, 1, 1))
+ m.append(nn.PixelShuffle(scale))
+ super(UpsampleOneStep, self).__init__(*m)
+
+ def flops(self):
+ H, W = self.input_resolution
+ flops = H * W * self.num_feat * 3 * 9
+ return flops
+
+# class PixelShuffledDirect(nn.Module):
diff --git a/torch_utils/custom_ops.py b/torch_utils/custom_ops.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7dabe00493504ffb30ab8962b8c975b13d5c0d9
--- /dev/null
+++ b/torch_utils/custom_ops.py
@@ -0,0 +1,187 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+import glob
+import hashlib
+import importlib
+import os
+import re
+import shutil
+import uuid
+
+import torch
+import torch.utils.cpp_extension
+from torch.utils.file_baton import FileBaton
+
+#----------------------------------------------------------------------------
+# Global options.
+
+verbosity = 'brief' # Verbosity level: 'none', 'brief', 'full'
+
+#----------------------------------------------------------------------------
+# Internal helper funcs.
+
+
+def _find_compiler_bindir():
+ patterns = [
+ 'C:/Program Files (x86)/Microsoft Visual Studio/*/Professional/VC/Tools/MSVC/*/bin/Hostx64/x64',
+ 'C:/Program Files (x86)/Microsoft Visual Studio/*/BuildTools/VC/Tools/MSVC/*/bin/Hostx64/x64',
+ 'C:/Program Files (x86)/Microsoft Visual Studio/*/Community/VC/Tools/MSVC/*/bin/Hostx64/x64',
+ 'C:/Program Files (x86)/Microsoft Visual Studio */vc/bin',
+ ]
+ for pattern in patterns:
+ matches = sorted(glob.glob(pattern))
+ if len(matches):
+ return matches[-1]
+ return None
+
+
+#----------------------------------------------------------------------------
+
+
+def _get_mangled_gpu_name():
+ name = torch.cuda.get_device_name().lower()
+ out = []
+ for c in name:
+ if re.match('[a-z0-9_-]+', c):
+ out.append(c)
+ else:
+ out.append('-')
+ return ''.join(out)
+
+
+#----------------------------------------------------------------------------
+# Main entry point for compiling and loading C++/CUDA plugins.
+
+_cached_plugins = dict()
+
+
+def get_plugin(module_name,
+ sources,
+ headers=None,
+ source_dir=None,
+ **build_kwargs):
+ assert verbosity in ['none', 'brief', 'full']
+ if headers is None:
+ headers = []
+ if source_dir is not None:
+ sources = [os.path.join(source_dir, fname) for fname in sources]
+ headers = [os.path.join(source_dir, fname) for fname in headers]
+
+ # Already cached?
+ if module_name in _cached_plugins:
+ return _cached_plugins[module_name]
+
+ # Print status.
+ if verbosity == 'full':
+ print(f'Setting up PyTorch plugin "{module_name}"...')
+ elif verbosity == 'brief':
+ print(f'Setting up PyTorch plugin "{module_name}"... ',
+ end='',
+ flush=True)
+ verbose_build = (verbosity == 'full')
+
+ # Compile and load.
+ try: # pylint: disable=too-many-nested-blocks
+ # Make sure we can find the necessary compiler binaries.
+ if os.name == 'nt' and os.system("where cl.exe >nul 2>nul") != 0:
+ compiler_bindir = _find_compiler_bindir()
+ if compiler_bindir is None:
+ raise RuntimeError(
+ f'Could not find MSVC/GCC/CLANG installation on this computer. Check _find_compiler_bindir() in "{__file__}".'
+ )
+ os.environ['PATH'] += ';' + compiler_bindir
+
+ # Some containers set TORCH_CUDA_ARCH_LIST to a list that can either
+ # break the build or unnecessarily restrict what's available to nvcc.
+ # Unset it to let nvcc decide based on what's available on the
+ # machine.
+ os.environ['TORCH_CUDA_ARCH_LIST'] = ''
+
+ # Incremental build md5sum trickery. Copies all the input source files
+ # into a cached build directory under a combined md5 digest of the input
+ # source files. Copying is done only if the combined digest has changed.
+ # This keeps input file timestamps and filenames the same as in previous
+ # extension builds, allowing for fast incremental rebuilds.
+ #
+ # This optimization is done only in case all the source files reside in
+ # a single directory (just for simplicity) and if the TORCH_EXTENSIONS_DIR
+ # environment variable is set (we take this as a signal that the user
+ # actually cares about this.)
+ #
+ # EDIT: We now do it regardless of TORCH_EXTENSIOS_DIR, in order to work
+ # around the *.cu dependency bug in ninja config.
+ #
+ all_source_files = sorted(sources + headers)
+ all_source_dirs = set(
+ os.path.dirname(fname) for fname in all_source_files)
+ if len(all_source_dirs
+ ) == 1: # and ('TORCH_EXTENSIONS_DIR' in os.environ):
+
+ # Compute combined hash digest for all source files.
+ hash_md5 = hashlib.md5()
+ for src in all_source_files:
+ with open(src, 'rb') as f:
+ hash_md5.update(f.read())
+
+ # Select cached build directory name.
+ source_digest = hash_md5.hexdigest()
+ build_top_dir = torch.utils.cpp_extension._get_build_directory(
+ module_name, verbose=verbose_build) # pylint: disable=protected-access
+ cached_build_dir = os.path.join(
+ build_top_dir, f'{source_digest}-{_get_mangled_gpu_name()}')
+
+ if not os.path.isdir(cached_build_dir):
+ tmpdir = f'{build_top_dir}/srctmp-{uuid.uuid4().hex}'
+ os.makedirs(tmpdir)
+ for src in all_source_files:
+ shutil.copyfile(
+ src, os.path.join(tmpdir, os.path.basename(src)))
+ try:
+ os.replace(tmpdir, cached_build_dir) # atomic
+ except OSError:
+ # source directory already exists, delete tmpdir and its contents.
+ shutil.rmtree(tmpdir)
+ if not os.path.isdir(cached_build_dir): raise
+
+ # Compile.
+ cached_sources = [
+ os.path.join(cached_build_dir, os.path.basename(fname))
+ for fname in sources
+ ]
+ torch.utils.cpp_extension.load(name=module_name,
+ build_directory=cached_build_dir,
+ verbose=verbose_build,
+ sources=cached_sources,
+ **build_kwargs)
+ else:
+ torch.utils.cpp_extension.load(name=module_name,
+ verbose=verbose_build,
+ sources=sources,
+ **build_kwargs)
+
+ # Load.
+ module = importlib.import_module(module_name)
+
+ except:
+ if verbosity == 'brief':
+ print('Failed!')
+ raise
+
+ # Print status and add to cache dict.
+ if verbosity == 'full':
+ print(f'Done setting up PyTorch plugin "{module_name}".')
+ elif verbosity == 'brief':
+ print('Done.')
+ _cached_plugins[module_name] = module
+ return module
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ddp_practice.py b/torch_utils/ddp_practice.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc96d0d39aebd68e66f4cf2292f29589108c34ea
--- /dev/null
+++ b/torch_utils/ddp_practice.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from argparse import ArgumentParser
+import sys
+import os
+
+sys.path.append('..')
+sys.path.append('.')
+
+import torch
+import torch.nn as nn
+import torch.distributed as dist
+from torch.nn.parallel import DistributedDataParallel as DDP
+from torch.utils.data import DataLoader, Dataset
+from torch.utils.data.distributed import DistributedSampler
+
+from vit.vision_transformer import VisionTransformer as ViT
+from vit.vit_triplane import ViTTriplane
+from guided_diffusion import dist_util, logger
+
+import click
+import dnnlib
+
+SEED = 42
+BATCH_SIZE = 8
+NUM_EPOCHS = 1
+
+
+class YourDataset(Dataset):
+ def __init__(self):
+ pass
+
+
+@click.command()
+@click.option('--cfg', help='Base configuration', type=str, default='ffhq')
+@click.option('--sr-module',
+ help='Superresolution module override',
+ metavar='STR',
+ required=False,
+ default=None)
+@click.option('--density_reg',
+ help='Density regularization strength.',
+ metavar='FLOAT',
+ type=click.FloatRange(min=0),
+ default=0.25,
+ required=False,
+ show_default=True)
+@click.option('--density_reg_every',
+ help='lazy density reg',
+ metavar='int',
+ type=click.FloatRange(min=1),
+ default=4,
+ required=False,
+ show_default=True)
+@click.option('--density_reg_p_dist',
+ help='density regularization strength.',
+ metavar='FLOAT',
+ type=click.FloatRange(min=0),
+ default=0.004,
+ required=False,
+ show_default=True)
+@click.option('--reg_type',
+ help='Type of regularization',
+ metavar='STR',
+ type=click.Choice([
+ 'l1', 'l1-alt', 'monotonic-detach', 'monotonic-fixed',
+ 'total-variation'
+ ]),
+ required=False,
+ default='l1')
+@click.option('--decoder_lr_mul',
+ help='decoder learning rate multiplier.',
+ metavar='FLOAT',
+ type=click.FloatRange(min=0),
+ default=1,
+ required=False,
+ show_default=True)
+@click.option('--c_scale',
+ help='Scale factor for generator pose conditioning.',
+ metavar='FLOAT',
+ type=click.FloatRange(min=0),
+ required=False,
+ default=1)
+def main(**kwargs):
+ # parser = ArgumentParser('DDP usage example')
+ # parser.add_argument('--local_rank', type=int, default=-1, metavar='N', help='Local process rank.') # you need this argument in your scripts for DDP to work
+ # args = parser.parse_args()
+
+ opts = dnnlib.EasyDict(kwargs) # Command line arguments.
+ c = dnnlib.EasyDict() # Main config dict.
+
+ rendering_options = {
+ # 'image_resolution': c.training_set_kwargs.resolution,
+ 'image_resolution': 256,
+ 'disparity_space_sampling': False,
+ 'clamp_mode': 'softplus',
+ # 'superresolution_module': sr_module,
+ # 'c_gen_conditioning_zero': not opts.
+ # gen_pose_cond, # if true, fill generator pose conditioning label with dummy zero vector
+ # 'gpc_reg_prob': opts.gpc_reg_prob if opts.gen_pose_cond else None,
+ 'c_scale':
+ opts.c_scale, # mutliplier for generator pose conditioning label
+ # 'superresolution_noise_mode': opts.
+ # sr_noise_mode, # [random or none], whether to inject pixel noise into super-resolution layers
+ 'density_reg': opts.density_reg, # strength of density regularization
+ 'density_reg_p_dist': opts.
+ density_reg_p_dist, # distance at which to sample perturbed points for density regularization
+ 'reg_type': opts.
+ reg_type, # for experimenting with variations on density regularization
+ 'decoder_lr_mul':
+ opts.decoder_lr_mul, # learning rate multiplier for decoder
+ 'sr_antialias': True,
+ 'return_triplane_features': True, # for DDF supervision
+ 'return_sampling_details_flag': True,
+ }
+
+ if opts.cfg == 'ffhq':
+ rendering_options.update({
+ 'focal': 2985.29 / 700,
+ 'depth_resolution':
+ # 48, # number of uniform samples to take per ray.
+ 36, # number of uniform samples to take per ray.
+ 'depth_resolution_importance':
+ # 48, # number of importance samples to take per ray.
+ 36, # number of importance samples to take per ray.
+ 'ray_start':
+ 2.25, # near point along each ray to start taking samples.
+ 'ray_end':
+ 3.3, # far point along each ray to stop taking samples.
+ 'box_warp':
+ 1, # the side-length of the bounding box spanned by the tri-planes; box_warp=1 means [-0.5, -0.5, -0.5] -> [0.5, 0.5, 0.5].
+ 'avg_camera_radius':
+ 2.7, # used only in the visualizer to specify camera orbit radius.
+ 'avg_camera_pivot': [
+ 0, 0, 0.2
+ ], # used only in the visualizer to control center of camera rotation.
+ })
+ elif opts.cfg == 'afhq':
+ rendering_options.update({
+ 'focal': 4.2647,
+ 'depth_resolution': 48,
+ 'depth_resolution_importance': 48,
+ 'ray_start': 2.25,
+ 'ray_end': 3.3,
+ 'box_warp': 1,
+ 'avg_camera_radius': 2.7,
+ 'avg_camera_pivot': [0, 0, -0.06],
+ })
+ elif opts.cfg == 'shapenet':
+ rendering_options.update({
+ 'depth_resolution': 64,
+ 'depth_resolution_importance': 64,
+ # 'ray_start': 0.1,
+ # 'ray_end': 2.6,
+ 'ray_start': 0.1,
+ 'ray_end': 3.3,
+ 'box_warp': 1.6,
+ 'white_back': True,
+ 'avg_camera_radius': 1.7,
+ 'avg_camera_pivot': [0, 0, 0],
+ })
+ else:
+ assert False, "Need to specify config"
+
+ c.rendering_kwargs = rendering_options
+
+ args = opts
+
+ # keep track of whether the current process is the `master` process (totally optional, but I find it useful for data laoding, logging, etc.)
+ args.local_rank = int(os.environ["LOCAL_RANK"])
+ args.is_master = args.local_rank == 0
+
+ # set the device
+ # device = torch.cuda.device(args.local_rank)
+ device = torch.device(f"cuda:{args.local_rank}")
+
+ # initialize PyTorch distributed using environment variables (you could also do this more explicitly by specifying `rank` and `world_size`, but I find using environment variables makes it so that you can easily use the same script on different machines)
+ dist.init_process_group(backend='nccl',
+ init_method='env://',
+ rank=args.local_rank,
+ world_size=torch.cuda.device_count())
+ print(f"{args.local_rank=} init complete")
+ torch.cuda.set_device(args.local_rank)
+
+ # set the seed for all GPUs (also make sure to set the seed for random, numpy, etc.)
+ torch.cuda.manual_seed_all(SEED)
+
+ # initialize your model (BERT in this example)
+ # model = BertForMaskedLM.from_pretrained('bert-base-uncased')
+
+ # model = ViT(
+ # image_size = 256,
+ # patch_size = 32,
+ # num_classes = 1000,
+ # dim = 1024,
+ # depth = 6,
+ # heads = 16,
+ # mlp_dim = 2048,
+ # dropout = 0.1,
+ # emb_dropout = 0.1
+ # )
+
+ # TODO, check pre-trained ViT encoder cfgs
+ model = ViTTriplane(
+ img_size=[224],
+ patch_size=16,
+ in_chans=384,
+ num_classes=0,
+ embed_dim=384, # Check ViT encoder dim
+ depth=2,
+ num_heads=16,
+ mlp_ratio=4.,
+ qkv_bias=False,
+ qk_scale=None,
+ drop_rate=0.1,
+ attn_drop_rate=0.,
+ drop_path_rate=0.,
+ norm_layer=nn.LayerNorm,
+ out_chans=96,
+ c_dim=25, # Conditioning label (C) dimensionality.
+ img_resolution=128, # Output resolution.
+ img_channels=3, # Number of output color channels.
+ cls_token=False,
+ # TODO, replace with c
+ rendering_kwargs=c.rendering_kwargs,
+ )
+ # noise = torch.randn(1, 8, 8, 1024)
+
+ # send your model to GPU
+ model = model.to(device)
+
+ # initialize distributed data parallel (DDP)
+ model = DDP(model,
+ device_ids=[args.local_rank],
+ output_device=args.local_rank)
+
+ dist_util.sync_params(model.named_parameters())
+
+ # # initialize your dataset
+ # dataset = YourDataset()
+
+ # # initialize the DistributedSampler
+ # sampler = DistributedSampler(dataset)
+
+ # # initialize the dataloader
+ # dataloader = DataLoader(
+ # dataset=dataset,
+ # sampler=sampler,
+ # batch_size=BATCH_SIZE
+ # )
+
+ # start your training!
+ for epoch in range(NUM_EPOCHS):
+ # put model in train mode
+ model.train()
+
+ # let all processes sync up before starting with a new epoch of training
+ dist.barrier()
+
+ noise = torch.randn(1, 14 * 14, 384).to(device) # B, L, C
+ img = model(noise, torch.zeros(1, 25).to(device))
+ print(img['image'].shape)
+ # st()
+
+ # img = torch.randn(1, 3, 256, 256).to(device)
+
+ # preds = model(img)
+ # print(preds.shape)
+ # assert preds.shape == (1, 1000), 'correct logits outputted'
+
+ # for step, batch in enumerate(dataloader):
+ # # send batch to device
+ # batch = tuple(t.to(args.device) for t in batch)
+
+ # # forward pass
+ # outputs = model(*batch)
+
+ # # compute loss
+ # loss = outputs[0]
+
+ # # etc.
+
+
+if __name__ == '__main__':
+ main()
diff --git a/torch_utils/dist_practice.py b/torch_utils/dist_practice.py
new file mode 100644
index 0000000000000000000000000000000000000000..e103870819b989b1ea1ed5f1f9deeecb436cf9e3
--- /dev/null
+++ b/torch_utils/dist_practice.py
@@ -0,0 +1,43 @@
+import torch
+import torch.multiprocessing as mp
+import torch.distributed as dist
+import os
+
+
+def find_free_port():
+ """ https://stackoverflow.com/questions/1365265/on-localhost-how-do-i-pick-a-free-port-number """
+ import socket
+ from contextlib import closing
+
+ with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
+ s.bind(('', 0))
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ return str(s.getsockname()[1])
+
+
+def setup_process(rank, master_addr, master_port, world_size, backend='nccl'):
+ print(f'setting up {rank=} {world_size=} {backend=}')
+
+ # set up the master's ip address so this child process can coordinate
+ os.environ['MASTER_ADDR'] = master_addr
+ os.environ['MASTER_PORT'] = master_port
+ print(f"{master_addr=} {master_port=}")
+
+ # Initializes the default distributed process group, and this will also initialize the distributed package.
+ dist.init_process_group(backend, rank=rank, world_size=world_size)
+ print(f"{rank=} init complete")
+ dist.destroy_process_group()
+ print(f"{rank=} destroy complete")
+
+
+if __name__ == '__main__':
+ world_size = 2
+ master_addr = '127.0.0.1'
+ master_port = find_free_port()
+ mp.spawn(setup_process,
+ args=(
+ master_addr,
+ master_port,
+ world_size,
+ ),
+ nprocs=world_size)
diff --git a/torch_utils/distributions/__init__.py b/torch_utils/distributions/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/torch_utils/distributions/__pycache__/__init__.cpython-39.pyc b/torch_utils/distributions/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8e78de3043b783d4bcb48bc9a04a7eb65243ed17
Binary files /dev/null and b/torch_utils/distributions/__pycache__/__init__.cpython-39.pyc differ
diff --git a/torch_utils/distributions/__pycache__/distributions.cpython-39.pyc b/torch_utils/distributions/__pycache__/distributions.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ddef35c3cabc41b394f47955362550ab4b0794ef
Binary files /dev/null and b/torch_utils/distributions/__pycache__/distributions.cpython-39.pyc differ
diff --git a/torch_utils/distributions/distributions.py b/torch_utils/distributions/distributions.py
new file mode 100644
index 0000000000000000000000000000000000000000..13112924abd5dab81952bca8590d87765ac29e38
--- /dev/null
+++ b/torch_utils/distributions/distributions.py
@@ -0,0 +1,149 @@
+# https://raw.githubusercontent.com/CompVis/latent-diffusion/e66308c7f2e64cb581c6d27ab6fbeb846828253b/ldm/modules/distributions/distributions.py
+
+import torch
+import numpy as np
+from pdb import set_trace as st
+
+
+class AbstractDistribution:
+
+ def sample(self):
+ raise NotImplementedError()
+
+ def mode(self):
+ raise NotImplementedError()
+
+
+class DiracDistribution(AbstractDistribution):
+
+ def __init__(self, value):
+ self.value = value
+
+ def sample(self):
+ return self.value
+
+ def mode(self):
+ return self.value
+
+
+@torch.jit.script
+def soft_clamp20(x: torch.Tensor):
+ return x.div(20.).tanh().mul(
+ 20.
+ ) # 5. * torch.tanh(x / 5.) <--> soft differentiable clamp between [-5, 5]
+
+
+# @torch.jit.script
+# def soft_clamp(x: torch.Tensor, a: torch.Tensor):
+# return x.div(a).tanh_().mul(a)
+
+
+class DiagonalGaussianDistribution(object):
+
+ def __init__(self, parameters, deterministic=False, soft_clamp=False):
+ self.parameters = parameters
+ self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)
+
+ if soft_clamp:
+ # self.mean, self.logvar = soft_clamp5(self.mean), soft_clamp5(self.logvar) # as in LSGM, bound the range. needs re-training?
+ self.logvar = soft_clamp20(
+ self.logvar) # as in LSGM, bound the range. [-20, 20]
+ else:
+ self.logvar = torch.clamp(self.logvar, -30.0, 20.0)
+
+ self.deterministic = deterministic
+ self.std = torch.exp(0.5 * self.logvar)
+ self.var = torch.exp(self.logvar)
+ if self.deterministic:
+ self.var = self.std = torch.zeros_like(
+ self.mean).to(device=self.parameters.device)
+
+ def sample(self):
+ x = self.mean + self.std * torch.randn(
+ self.mean.shape).to(device=self.parameters.device)
+ return x
+
+ # https://github.dev/NVlabs/LSGM/util/distributions.py
+ def log_p(self, samples):
+ # for calculating the negative encoder entropy term
+ normalized_samples = (samples - self.mean) / self.var
+ log_p = -0.5 * normalized_samples * normalized_samples - 0.5 * np.log(
+ 2 * np.pi) - self.logvar #
+
+ return log_p # ! TODO
+
+ def normal_entropy(self):
+ # for calculating normal entropy. Motivation: supervise logvar directly.
+ # normalized_samples = (samples - self.mean) / self.var
+ # log_p = - 0.5 * normalized_samples * normalized_samples - 0.5 * np.log(2 * np.pi) - self.logvar #
+ # entropy = torch.sum(self.logvar + 0.5 * (np.log(2 * np.pi) + 1),
+ # dim=[1, 2, 3]).mean(0)
+ # entropy = torch.mean(self.logvar + 0.5 * (np.log(2 * np.pi) + 1)) # follow eps loss tradition here, average overall dims.
+ entropy = self.logvar + 0.5 * (np.log(2 * np.pi) + 1) # follow eps loss tradition here, average overall dims.
+
+
+ return entropy # ! TODO
+
+ def kl(self, other=None, pt_ft_separate=False, ft_separate=False):
+
+ def kl_fn(mean, var, logvar):
+ return 0.5 * torch.sum(
+ torch.pow(mean, 2) + var - 1.0 - logvar,
+ dim=list(range(1,mean.ndim))) # support B L C-like VAE latent
+
+ if self.deterministic:
+ return torch.Tensor([0.])
+ else:
+ if other is None:
+ if pt_ft_separate: # as in LION
+ pt_kl = kl_fn(self.mean[:, :3], self.var[:, :3], self.logvar[:, :3]) # (B C L) input
+ ft_kl = kl_fn(self.mean[:, 3:], self.var[:, 3:], self.logvar[:, 3:]) # (B C L) input
+ return pt_kl, ft_kl
+ elif ft_separate:
+ ft_kl = kl_fn(self.mean[:, :], self.var[:, :], self.logvar[:, :]) # (B C L) input
+ return ft_kl
+ else:
+ return 0.5 * torch.sum(
+ torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar,
+ dim=list(range(1,self.mean.ndim))) # support B L C-like VAE latent
+ else:
+ return 0.5 * torch.sum(
+ torch.pow(self.mean - other.mean, 2) / other.var +
+ self.var / other.var - 1.0 - self.logvar + other.logvar,
+ dim=[1, 2, 3])
+
+ def nll(self, sample, dims=[1, 2, 3]):
+ if self.deterministic:
+ return torch.Tensor([0.])
+ logtwopi = np.log(2.0 * np.pi)
+ return 0.5 * torch.sum(logtwopi + self.logvar +
+ torch.pow(sample - self.mean, 2) / self.var,
+ dim=dims)
+
+ def mode(self):
+ return self.mean
+
+
+def normal_kl(mean1, logvar1, mean2, logvar2):
+ """
+ source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12
+ Compute the KL divergence between two gaussians.
+ Shapes are automatically broadcasted, so batches can be compared to
+ scalars, among other use cases.
+ """
+ tensor = None
+ for obj in (mean1, logvar1, mean2, logvar2):
+ if isinstance(obj, torch.Tensor):
+ tensor = obj
+ break
+ assert tensor is not None, "at least one argument must be a Tensor"
+
+ # Force variances to be Tensors. Broadcasting helps convert scalars to
+ # Tensors, but it does not work for torch.exp().
+ logvar1, logvar2 = [
+ x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor)
+ for x in (logvar1, logvar2)
+ ]
+
+ return 0.5 * (-1.0 + logvar2 - logvar1 + torch.exp(logvar1 - logvar2) +
+ ((mean1 - mean2)**2) * torch.exp(-logvar2))
diff --git a/torch_utils/inference_matt.py b/torch_utils/inference_matt.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b80917995a2fe280a08029fcfe8f88e5973fcf9
--- /dev/null
+++ b/torch_utils/inference_matt.py
@@ -0,0 +1,139 @@
+# https://github.com/xinntao/facexlib/blob/master/inference/inference_matting.py
+
+from tqdm import tqdm, trange
+import argparse
+from pathlib import Path
+import cv2
+import numpy as np
+import torch.nn.functional as F
+from torchvision.transforms.functional import normalize
+
+from facexlib.matting import init_matting_model
+from facexlib.utils import img2tensor
+
+
+def matt_single(args):
+ modnet = init_matting_model()
+
+ # read image
+ img = cv2.imread(args.img_path) / 255.
+ # unify image channels to 3
+ if len(img.shape) == 2:
+ img = img[:, :, None]
+ if img.shape[2] == 1:
+ img = np.repeat(img, 3, axis=2)
+ elif img.shape[2] == 4:
+ img = img[:, :, 0:3]
+
+ img_t = img2tensor(img, bgr2rgb=True, float32=True)
+ normalize(img_t, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True)
+ img_t = img_t.unsqueeze(0).cuda()
+
+ # resize image for input
+ _, _, im_h, im_w = img_t.shape
+ ref_size = 512
+ if max(im_h, im_w) < ref_size or min(im_h, im_w) > ref_size:
+ if im_w >= im_h:
+ im_rh = ref_size
+ im_rw = int(im_w / im_h * ref_size)
+ elif im_w < im_h:
+ im_rw = ref_size
+ im_rh = int(im_h / im_w * ref_size)
+ else:
+ im_rh = im_h
+ im_rw = im_w
+ im_rw = im_rw - im_rw % 32
+ im_rh = im_rh - im_rh % 32
+ img_t = F.interpolate(img_t, size=(im_rh, im_rw), mode='area')
+
+ # inference
+ _, _, matte = modnet(img_t, True)
+
+ # resize and save matte
+ matte = F.interpolate(matte, size=(im_h, im_w), mode='area')
+ matte = matte[0][0].data.cpu().numpy()
+ cv2.imwrite(args.save_path, (matte * 255).astype('uint8'))
+
+ # get foreground
+ matte = matte[:, :, None]
+ foreground = img * matte + np.full(img.shape, 1) * (1 - matte)
+ cv2.imwrite(args.save_path.replace('.png', '_fg.png'), foreground * 255)
+
+def matt_directory(args): # for extracting ffhq imgs foreground
+ modnet = init_matting_model()
+
+ all_imgs = list(Path(args.img_dir_path).rglob('*.png'))
+ print('all imgs: ', len(all_imgs))
+
+ tgt_dir_path = '/mnt/lustre/share/yslan/ffhq/unzipped_ffhq_matte/'
+ # tgt_img_path = '/mnt/lustre/share/yslan/ffhq/unzipped_ffhq_matting/'
+
+ for img_path in tqdm(all_imgs):
+
+ # read image
+ # img = cv2.imread(args.img_path) / 255.
+ img = cv2.imread(str(img_path)) / 255.
+
+ relative_img_path = Path(img_path).relative_to('/mnt/lustre/share/yslan/ffhq/unzipped_ffhq_512/')
+ tgt_save_path = tgt_dir_path / relative_img_path
+
+ (tgt_save_path.parent).mkdir(parents=True, exist_ok=True)
+
+ # unify image channels to 3
+ if len(img.shape) == 2:
+ img = img[:, :, None]
+ if img.shape[2] == 1:
+ img = np.repeat(img, 3, axis=2)
+ elif img.shape[2] == 4:
+ img = img[:, :, 0:3]
+
+ img_t = img2tensor(img, bgr2rgb=True, float32=True)
+ normalize(img_t, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True)
+ img_t = img_t.unsqueeze(0).cuda()
+
+ # resize image for input
+ _, _, im_h, im_w = img_t.shape
+ ref_size = 512
+ if max(im_h, im_w) < ref_size or min(im_h, im_w) > ref_size:
+ if im_w >= im_h:
+ im_rh = ref_size
+ im_rw = int(im_w / im_h * ref_size)
+ elif im_w < im_h:
+ im_rw = ref_size
+ im_rh = int(im_h / im_w * ref_size)
+ else:
+ im_rh = im_h
+ im_rw = im_w
+ im_rw = im_rw - im_rw % 32
+ im_rh = im_rh - im_rh % 32
+ img_t = F.interpolate(img_t, size=(im_rh, im_rw), mode='area')
+
+ # inference
+ _, _, matte = modnet(img_t, True)
+
+ # resize and save matte
+ matte = F.interpolate(matte, size=(im_h, im_w), mode='area')
+ matte = matte[0][0].data.cpu().numpy()
+ # cv2.imwrite(args.save_path, (matte * 255).astype('uint8'))
+ cv2.imwrite(str(tgt_save_path), (matte * 255).astype('uint8'))
+
+ assert tgt_save_path.exists()
+
+ # get foreground
+ # matte = matte[:, :, None]
+ # foreground = img * matte + np.full(img.shape, 1) * (1 - matte)
+ # cv2.imwrite(args.save_path.replace('.png', '_fg.png'), foreground * 255)
+
+ pass
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--img_path', type=str, default='assets/test.jpg')
+ parser.add_argument('--save_path', type=str, default='test_matting.png')
+
+ parser.add_argument('--img_dir_path', type=str, default='assets', required=False)
+ args = parser.parse_args()
+
+ # matt_single(args)
+ matt_directory(args)
\ No newline at end of file
diff --git a/torch_utils/legacy.py b/torch_utils/legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f2f0ac5c42d0e36a4808f0235276f25ee5bcbf3
--- /dev/null
+++ b/torch_utils/legacy.py
@@ -0,0 +1,368 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+"""Converting legacy network pickle into the new format."""
+
+from pdb import set_trace as st
+import click
+import pickle
+import re
+import copy
+import numpy as np
+import torch
+import dnnlib
+from torch_utils import misc
+
+#----------------------------------------------------------------------------
+
+def load_network_pkl(f, device, force_fp16=False):
+ data = _LegacyUnpickler(f).load()
+
+ # Legacy TensorFlow pickle => convert.
+ if isinstance(data, tuple) and len(data) == 3 and all(isinstance(net, _TFNetworkStub) for net in data):
+ tf_G, tf_D, tf_Gs = data
+ G = convert_tf_generator(tf_G)
+ D = convert_tf_discriminator(tf_D)
+ G_ema = convert_tf_generator(tf_Gs)
+ data = dict(G=G, D=D, G_ema=G_ema)
+
+ # for k, module in data.items():
+ # for key in ['G', 'D', 'G_ema']:
+ # data[key].to(device)
+
+ # Add missing fields.
+ if 'training_set_kwargs' not in data:
+ data['training_set_kwargs'] = None
+ if 'augment_pipe' not in data:
+ data['augment_pipe'] = None
+
+ # Validate contents.
+ assert isinstance(data['G'], torch.nn.Module)
+ assert isinstance(data['D'], torch.nn.Module)
+ assert isinstance(data['G_ema'], torch.nn.Module)
+ assert isinstance(data['training_set_kwargs'], (dict, type(None)))
+ assert isinstance(data['augment_pipe'], (torch.nn.Module, type(None)))
+
+ # Force FP16.
+ if force_fp16:
+ for key in ['G', 'D', 'G_ema']:
+ old = data[key]
+ kwargs = copy.deepcopy(old.init_kwargs)
+ fp16_kwargs = kwargs.get('synthesis_kwargs', kwargs)
+ fp16_kwargs.num_fp16_res = 4
+ fp16_kwargs.conv_clamp = 256
+ if kwargs != old.init_kwargs:
+ new = type(old)(**kwargs).eval().requires_grad_(False)
+ misc.copy_params_and_buffers(old, new, require_all=True)
+ data[key] = new
+ return data
+
+def load_network_pkl_E(f, force_fp16=False):
+ data = _LegacyUnpickler(f).load()
+
+ # Legacy TensorFlow pickle => convert.
+ if isinstance(data, tuple) and len(data) == 3 and all(isinstance(net, _TFNetworkStub) for net in data):
+ tf_E = data
+ E = convert_tf_generator(tf_E)
+ # D = convert_tf_discriminator(tf_D)
+ # G_ema = convert_tf_generator(tf_Gs)
+ data = dict(G=E)
+
+ # Add missing fields.
+ if 'training_set_kwargs' not in data:
+ data['training_set_kwargs'] = None
+ if 'augment_pipe' not in data:
+ data['augment_pipe'] = None
+
+ # Validate contents.
+ assert isinstance(data['E'], torch.nn.Module)
+ assert isinstance(data['training_set_kwargs'], (dict, type(None)))
+ assert isinstance(data['augment_pipe'], (torch.nn.Module, type(None)))
+
+ # Force FP16.
+ if force_fp16:
+ for key in ['E']:
+ old = data[key]
+ kwargs = copy.deepcopy(old.init_kwargs)
+ fp16_kwargs = kwargs.get('synthesis_kwargs', kwargs)
+ fp16_kwargs.num_fp16_res = 4
+ fp16_kwargs.conv_clamp = 256
+ if kwargs != old.init_kwargs:
+ new = type(old)(**kwargs).eval().requires_grad_(False)
+ misc.copy_params_and_buffers(old, new, require_all=True)
+ data[key] = new
+ return data
+#----------------------------------------------------------------------------
+
+class _TFNetworkStub(dnnlib.EasyDict):
+ pass
+
+class _LegacyUnpickler(pickle.Unpickler):
+ def find_class(self, module, name):
+ if module == 'dnnlib.tflib.network' and name == 'Network':
+ return _TFNetworkStub
+ if 'training' in module:
+ module = module.replace('training', 'nsr') # map module position from eg3d repo
+
+ return super().find_class(module, name)
+
+#----------------------------------------------------------------------------
+
+def _collect_tf_params(tf_net):
+ # pylint: disable=protected-access
+ tf_params = dict()
+ def recurse(prefix, tf_net):
+ for name, value in tf_net.variables:
+ tf_params[prefix + name] = value
+ for name, comp in tf_net.components.items():
+ recurse(prefix + name + '/', comp)
+ recurse('', tf_net)
+ return tf_params
+
+#----------------------------------------------------------------------------
+
+def _populate_module_params(module, *patterns):
+ for name, tensor in misc.named_params_and_buffers(module):
+ found = False
+ value = None
+ for pattern, value_fn in zip(patterns[0::2], patterns[1::2]):
+ match = re.fullmatch(pattern, name)
+ if match:
+ found = True
+ if value_fn is not None:
+ value = value_fn(*match.groups())
+ break
+ try:
+ assert found
+ if value is not None:
+ tensor.copy_(torch.from_numpy(np.array(value)))
+ except:
+ print(name, list(tensor.shape))
+ raise
+
+#----------------------------------------------------------------------------
+
+def convert_tf_generator(tf_G):
+ if tf_G.version < 4:
+ raise ValueError('TensorFlow pickle version too low')
+
+ # Collect kwargs.
+ tf_kwargs = tf_G.static_kwargs
+ known_kwargs = set()
+ def kwarg(tf_name, default=None, none=None):
+ known_kwargs.add(tf_name)
+ val = tf_kwargs.get(tf_name, default)
+ return val if val is not None else none
+
+ # Convert kwargs.
+ from training import networks_stylegan2
+ network_class = networks_stylegan2.Generator
+ kwargs = dnnlib.EasyDict(
+ z_dim = kwarg('latent_size', 512),
+ c_dim = kwarg('label_size', 0),
+ w_dim = kwarg('dlatent_size', 512),
+ img_resolution = kwarg('resolution', 1024),
+ img_channels = kwarg('num_channels', 3),
+ channel_base = kwarg('fmap_base', 16384) * 2,
+ channel_max = kwarg('fmap_max', 512),
+ num_fp16_res = kwarg('num_fp16_res', 0),
+ conv_clamp = kwarg('conv_clamp', None),
+ architecture = kwarg('architecture', 'skip'),
+ resample_filter = kwarg('resample_kernel', [1,3,3,1]),
+ use_noise = kwarg('use_noise', True),
+ activation = kwarg('nonlinearity', 'lrelu'),
+ mapping_kwargs = dnnlib.EasyDict(
+ num_layers = kwarg('mapping_layers', 8),
+ embed_features = kwarg('label_fmaps', None),
+ layer_features = kwarg('mapping_fmaps', None),
+ activation = kwarg('mapping_nonlinearity', 'lrelu'),
+ lr_multiplier = kwarg('mapping_lrmul', 0.01),
+ w_avg_beta = kwarg('w_avg_beta', 0.995, none=1),
+ ),
+ )
+
+ # Check for unknown kwargs.
+ kwarg('truncation_psi')
+ kwarg('truncation_cutoff')
+ kwarg('style_mixing_prob')
+ kwarg('structure')
+ kwarg('conditioning')
+ kwarg('fused_modconv')
+ unknown_kwargs = list(set(tf_kwargs.keys()) - known_kwargs)
+ if len(unknown_kwargs) > 0:
+ raise ValueError('Unknown TensorFlow kwarg', unknown_kwargs[0])
+
+ # Collect params.
+ tf_params = _collect_tf_params(tf_G)
+ for name, value in list(tf_params.items()):
+ match = re.fullmatch(r'ToRGB_lod(\d+)/(.*)', name)
+ if match:
+ r = kwargs.img_resolution // (2 ** int(match.group(1)))
+ tf_params[f'{r}x{r}/ToRGB/{match.group(2)}'] = value
+ kwargs.synthesis.kwargs.architecture = 'orig'
+ #for name, value in tf_params.items(): print(f'{name:<50s}{list(value.shape)}')
+
+ # Convert params.
+ G = network_class(**kwargs).eval().requires_grad_(False)
+ # pylint: disable=unnecessary-lambda
+ # pylint: disable=f-string-without-interpolation
+ _populate_module_params(G,
+ r'mapping\.w_avg', lambda: tf_params[f'dlatent_avg'],
+ r'mapping\.embed\.weight', lambda: tf_params[f'mapping/LabelEmbed/weight'].transpose(),
+ r'mapping\.embed\.bias', lambda: tf_params[f'mapping/LabelEmbed/bias'],
+ r'mapping\.fc(\d+)\.weight', lambda i: tf_params[f'mapping/Dense{i}/weight'].transpose(),
+ r'mapping\.fc(\d+)\.bias', lambda i: tf_params[f'mapping/Dense{i}/bias'],
+ r'synthesis\.b4\.const', lambda: tf_params[f'synthesis/4x4/Const/const'][0],
+ r'synthesis\.b4\.conv1\.weight', lambda: tf_params[f'synthesis/4x4/Conv/weight'].transpose(3, 2, 0, 1),
+ r'synthesis\.b4\.conv1\.bias', lambda: tf_params[f'synthesis/4x4/Conv/bias'],
+ r'synthesis\.b4\.conv1\.noise_const', lambda: tf_params[f'synthesis/noise0'][0, 0],
+ r'synthesis\.b4\.conv1\.noise_strength', lambda: tf_params[f'synthesis/4x4/Conv/noise_strength'],
+ r'synthesis\.b4\.conv1\.affine\.weight', lambda: tf_params[f'synthesis/4x4/Conv/mod_weight'].transpose(),
+ r'synthesis\.b4\.conv1\.affine\.bias', lambda: tf_params[f'synthesis/4x4/Conv/mod_bias'] + 1,
+ r'synthesis\.b(\d+)\.conv0\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/weight'][::-1, ::-1].transpose(3, 2, 0, 1),
+ r'synthesis\.b(\d+)\.conv0\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/bias'],
+ r'synthesis\.b(\d+)\.conv0\.noise_const', lambda r: tf_params[f'synthesis/noise{int(np.log2(int(r)))*2-5}'][0, 0],
+ r'synthesis\.b(\d+)\.conv0\.noise_strength', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/noise_strength'],
+ r'synthesis\.b(\d+)\.conv0\.affine\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/mod_weight'].transpose(),
+ r'synthesis\.b(\d+)\.conv0\.affine\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/mod_bias'] + 1,
+ r'synthesis\.b(\d+)\.conv1\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/weight'].transpose(3, 2, 0, 1),
+ r'synthesis\.b(\d+)\.conv1\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/bias'],
+ r'synthesis\.b(\d+)\.conv1\.noise_const', lambda r: tf_params[f'synthesis/noise{int(np.log2(int(r)))*2-4}'][0, 0],
+ r'synthesis\.b(\d+)\.conv1\.noise_strength', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/noise_strength'],
+ r'synthesis\.b(\d+)\.conv1\.affine\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/mod_weight'].transpose(),
+ r'synthesis\.b(\d+)\.conv1\.affine\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/mod_bias'] + 1,
+ r'synthesis\.b(\d+)\.torgb\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/weight'].transpose(3, 2, 0, 1),
+ r'synthesis\.b(\d+)\.torgb\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/bias'],
+ r'synthesis\.b(\d+)\.torgb\.affine\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/mod_weight'].transpose(),
+ r'synthesis\.b(\d+)\.torgb\.affine\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/mod_bias'] + 1,
+ r'synthesis\.b(\d+)\.skip\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Skip/weight'][::-1, ::-1].transpose(3, 2, 0, 1),
+ r'.*\.resample_filter', None,
+ r'.*\.act_filter', None,
+ )
+ return G
+
+#----------------------------------------------------------------------------
+
+def convert_tf_discriminator(tf_D):
+ if tf_D.version < 4:
+ raise ValueError('TensorFlow pickle version too low')
+
+ # Collect kwargs.
+ tf_kwargs = tf_D.static_kwargs
+ known_kwargs = set()
+ def kwarg(tf_name, default=None):
+ known_kwargs.add(tf_name)
+ return tf_kwargs.get(tf_name, default)
+
+ # Convert kwargs.
+ kwargs = dnnlib.EasyDict(
+ c_dim = kwarg('label_size', 0),
+ img_resolution = kwarg('resolution', 1024),
+ img_channels = kwarg('num_channels', 3),
+ architecture = kwarg('architecture', 'resnet'),
+ channel_base = kwarg('fmap_base', 16384) * 2,
+ channel_max = kwarg('fmap_max', 512),
+ num_fp16_res = kwarg('num_fp16_res', 0),
+ conv_clamp = kwarg('conv_clamp', None),
+ cmap_dim = kwarg('mapping_fmaps', None),
+ block_kwargs = dnnlib.EasyDict(
+ activation = kwarg('nonlinearity', 'lrelu'),
+ resample_filter = kwarg('resample_kernel', [1,3,3,1]),
+ freeze_layers = kwarg('freeze_layers', 0),
+ ),
+ mapping_kwargs = dnnlib.EasyDict(
+ num_layers = kwarg('mapping_layers', 0),
+ embed_features = kwarg('mapping_fmaps', None),
+ layer_features = kwarg('mapping_fmaps', None),
+ activation = kwarg('nonlinearity', 'lrelu'),
+ lr_multiplier = kwarg('mapping_lrmul', 0.1),
+ ),
+ epilogue_kwargs = dnnlib.EasyDict(
+ mbstd_group_size = kwarg('mbstd_group_size', None),
+ mbstd_num_channels = kwarg('mbstd_num_features', 1),
+ activation = kwarg('nonlinearity', 'lrelu'),
+ ),
+ )
+
+ # Check for unknown kwargs.
+ kwarg('structure')
+ kwarg('conditioning')
+ unknown_kwargs = list(set(tf_kwargs.keys()) - known_kwargs)
+ if len(unknown_kwargs) > 0:
+ raise ValueError('Unknown TensorFlow kwarg', unknown_kwargs[0])
+
+ # Collect params.
+ tf_params = _collect_tf_params(tf_D)
+ for name, value in list(tf_params.items()):
+ match = re.fullmatch(r'FromRGB_lod(\d+)/(.*)', name)
+ if match:
+ r = kwargs.img_resolution // (2 ** int(match.group(1)))
+ tf_params[f'{r}x{r}/FromRGB/{match.group(2)}'] = value
+ kwargs.architecture = 'orig'
+ #for name, value in tf_params.items(): print(f'{name:<50s}{list(value.shape)}')
+
+ # Convert params.
+ from training import networks_stylegan2
+ D = networks_stylegan2.Discriminator(**kwargs).eval().requires_grad_(False)
+ # pylint: disable=unnecessary-lambda
+ # pylint: disable=f-string-without-interpolation
+ _populate_module_params(D,
+ r'b(\d+)\.fromrgb\.weight', lambda r: tf_params[f'{r}x{r}/FromRGB/weight'].transpose(3, 2, 0, 1),
+ r'b(\d+)\.fromrgb\.bias', lambda r: tf_params[f'{r}x{r}/FromRGB/bias'],
+ r'b(\d+)\.conv(\d+)\.weight', lambda r, i: tf_params[f'{r}x{r}/Conv{i}{["","_down"][int(i)]}/weight'].transpose(3, 2, 0, 1),
+ r'b(\d+)\.conv(\d+)\.bias', lambda r, i: tf_params[f'{r}x{r}/Conv{i}{["","_down"][int(i)]}/bias'],
+ r'b(\d+)\.skip\.weight', lambda r: tf_params[f'{r}x{r}/Skip/weight'].transpose(3, 2, 0, 1),
+ r'mapping\.embed\.weight', lambda: tf_params[f'LabelEmbed/weight'].transpose(),
+ r'mapping\.embed\.bias', lambda: tf_params[f'LabelEmbed/bias'],
+ r'mapping\.fc(\d+)\.weight', lambda i: tf_params[f'Mapping{i}/weight'].transpose(),
+ r'mapping\.fc(\d+)\.bias', lambda i: tf_params[f'Mapping{i}/bias'],
+ r'b4\.conv\.weight', lambda: tf_params[f'4x4/Conv/weight'].transpose(3, 2, 0, 1),
+ r'b4\.conv\.bias', lambda: tf_params[f'4x4/Conv/bias'],
+ r'b4\.fc\.weight', lambda: tf_params[f'4x4/Dense0/weight'].transpose(),
+ r'b4\.fc\.bias', lambda: tf_params[f'4x4/Dense0/bias'],
+ r'b4\.out\.weight', lambda: tf_params[f'Output/weight'].transpose(),
+ r'b4\.out\.bias', lambda: tf_params[f'Output/bias'],
+ r'.*\.resample_filter', None,
+ )
+ return D
+
+#----------------------------------------------------------------------------
+
+@click.command()
+@click.option('--source', help='Input pickle', required=True, metavar='PATH')
+@click.option('--dest', help='Output pickle', required=True, metavar='PATH')
+@click.option('--force-fp16', help='Force the networks to use FP16', type=bool, default=False, metavar='BOOL', show_default=True)
+def convert_network_pickle(source, dest, force_fp16):
+ """Convert legacy network pickle into the native PyTorch format.
+
+ The tool is able to load the main network configurations exported using the TensorFlow version of StyleGAN2 or StyleGAN2-ADA.
+ It does not support e.g. StyleGAN2-ADA comparison methods, StyleGAN2 configs A-D, or StyleGAN1 networks.
+
+ Example:
+
+ \b
+ python legacy.py \\
+ --source=https://nvlabs-fi-cdn.nvidia.com/stylegan2/networks/stylegan2-cat-config-f.pkl \\
+ --dest=stylegan2-cat-config-f.pkl
+ """
+ print(f'Loading "{source}"...')
+ with dnnlib.util.open_url(source) as f:
+ data = load_network_pkl(f, force_fp16=force_fp16)
+ print(f'Saving "{dest}"...')
+ with open(dest, 'wb') as f:
+ pickle.dump(data, f)
+ print('Done.')
+
+#----------------------------------------------------------------------------
+
+if __name__ == "__main__":
+ convert_network_pickle() # pylint: disable=no-value-for-parameter
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/misc.py b/torch_utils/misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..98ecc786f81740321cd6917ea33f19a3deeb5b64
--- /dev/null
+++ b/torch_utils/misc.py
@@ -0,0 +1,344 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+import re
+import contextlib
+import numpy as np
+import torch
+import warnings
+import dnnlib
+
+from guided_diffusion import dist_util, logger
+
+#----------------------------------------------------------------------------
+# Cached construction of constant tensors. Avoids CPU=>GPU copy when the
+# same constant is used multiple times.
+
+_constant_cache = dict()
+
+
+def constant(value, shape=None, dtype=None, device=None, memory_format=None):
+ value = np.asarray(value)
+ if shape is not None:
+ shape = tuple(shape)
+ if dtype is None:
+ dtype = torch.get_default_dtype()
+ if device is None:
+ device = torch.device('cpu')
+ if memory_format is None:
+ memory_format = torch.contiguous_format
+
+ key = (value.shape, value.dtype, value.tobytes(), shape, dtype, device,
+ memory_format)
+ tensor = _constant_cache.get(key, None)
+ if tensor is None:
+ tensor = torch.as_tensor(value.copy(), dtype=dtype, device=device)
+ if shape is not None:
+ tensor, _ = torch.broadcast_tensors(tensor, torch.empty(shape))
+ tensor = tensor.contiguous(memory_format=memory_format)
+ _constant_cache[key] = tensor
+ return tensor
+
+
+#----------------------------------------------------------------------------
+# Replace NaN/Inf with specified numerical values.
+
+try:
+ nan_to_num = torch.nan_to_num # 1.8.0a0
+except AttributeError:
+
+ def nan_to_num(input, nan=0.0, posinf=None, neginf=None, *, out=None): # pylint: disable=redefined-builtin
+ assert isinstance(input, torch.Tensor)
+ if posinf is None:
+ posinf = torch.finfo(input.dtype).max
+ if neginf is None:
+ neginf = torch.finfo(input.dtype).min
+ assert nan == 0
+ return torch.clamp(input.unsqueeze(0).nansum(0),
+ min=neginf,
+ max=posinf,
+ out=out)
+
+
+#----------------------------------------------------------------------------
+# Symbolic assert.
+
+try:
+ symbolic_assert = torch._assert # 1.8.0a0 # pylint: disable=protected-access
+except AttributeError:
+ symbolic_assert = torch.Assert # 1.7.0
+
+#----------------------------------------------------------------------------
+# Context manager to temporarily suppress known warnings in torch.jit.trace().
+# Note: Cannot use catch_warnings because of https://bugs.python.org/issue29672
+
+
+@contextlib.contextmanager
+def suppress_tracer_warnings():
+ flt = ('ignore', None, torch.jit.TracerWarning, None, 0)
+ warnings.filters.insert(0, flt)
+ yield
+ warnings.filters.remove(flt)
+
+
+#----------------------------------------------------------------------------
+# Assert that the shape of a tensor matches the given list of integers.
+# None indicates that the size of a dimension is allowed to vary.
+# Performs symbolic assertion when used in torch.jit.trace().
+
+
+def assert_shape(tensor, ref_shape):
+ if tensor.ndim != len(ref_shape):
+ raise AssertionError(
+ f'Wrong number of dimensions: got {tensor.ndim}, expected {len(ref_shape)}'
+ )
+ for idx, (size, ref_size) in enumerate(zip(tensor.shape, ref_shape)):
+ if ref_size is None:
+ pass
+ elif isinstance(ref_size, torch.Tensor):
+ with suppress_tracer_warnings(
+ ): # as_tensor results are registered as constants
+ symbolic_assert(torch.equal(torch.as_tensor(size), ref_size),
+ f'Wrong size for dimension {idx}')
+ elif isinstance(size, torch.Tensor):
+ with suppress_tracer_warnings(
+ ): # as_tensor results are registered as constants
+ symbolic_assert(
+ torch.equal(size, torch.as_tensor(ref_size)),
+ f'Wrong size for dimension {idx}: expected {ref_size}')
+ elif size != ref_size:
+ raise AssertionError(
+ f'Wrong size for dimension {idx}: got {size}, expected {ref_size}'
+ )
+
+
+#----------------------------------------------------------------------------
+# Function decorator that calls torch.autograd.profiler.record_function().
+
+
+def profiled_function(fn):
+ def decorator(*args, **kwargs):
+ with torch.autograd.profiler.record_function(fn.__name__):
+ return fn(*args, **kwargs)
+
+ decorator.__name__ = fn.__name__
+ return decorator
+
+
+#----------------------------------------------------------------------------
+# Sampler for torch.utils.data.DataLoader that loops over the dataset
+# indefinitely, shuffling items as it goes.
+
+
+class InfiniteSampler(torch.utils.data.Sampler):
+ def __init__(self,
+ dataset,
+ rank=0,
+ num_replicas=1,
+ shuffle=True,
+ seed=0,
+ window_size=0.5):
+ assert len(dataset) > 0
+ assert num_replicas > 0
+ assert 0 <= rank < num_replicas
+ assert 0 <= window_size <= 1
+ super().__init__(dataset)
+ self.dataset = dataset
+ self.rank = rank
+ self.num_replicas = num_replicas
+ self.shuffle = shuffle
+ self.seed = seed
+ self.window_size = window_size
+
+ def __iter__(self):
+ order = np.arange(len(self.dataset))
+ rnd = None
+ window = 0
+ if self.shuffle:
+ rnd = np.random.RandomState(self.seed)
+ rnd.shuffle(order)
+ window = int(np.rint(order.size * self.window_size))
+
+ idx = 0
+ while True:
+ i = idx % order.size
+ if idx % self.num_replicas == self.rank:
+ yield order[i]
+ if window >= 2:
+ j = (i - rnd.randint(window)) % order.size
+ order[i], order[j] = order[j], order[i]
+ idx += 1
+
+
+#----------------------------------------------------------------------------
+# Utilities for operating with torch.nn.Module parameters and buffers.
+
+
+def params_and_buffers(module):
+ assert isinstance(module, torch.nn.Module)
+ return list(module.parameters()) + list(module.buffers())
+
+
+def named_params_and_buffers(module):
+ assert isinstance(module, torch.nn.Module)
+ return list(module.named_parameters()) + list(module.named_buffers())
+
+
+def copy_params_and_buffers(src_module, dst_module, require_all=False, load_except=(), model_name=''):
+ assert isinstance(src_module, torch.nn.Module)
+ assert isinstance(dst_module, torch.nn.Module)
+ src_tensors = dict(named_params_and_buffers(src_module))
+ for name, tensor in named_params_and_buffers(dst_module):
+ assert (name in src_tensors) or (not require_all)
+ if name in src_tensors:
+ try:
+ if name in load_except:
+ logger.log('ignore load_except module: ', name)
+ else:
+ tensor.copy_(src_tensors[name].detach()).requires_grad_(
+ tensor.requires_grad)
+ except:
+ print(name)
+
+#----------------------------------------------------------------------------
+# Context manager for easily enabling/disabling DistributedDataParallel
+# synchronization.
+
+
+@contextlib.contextmanager
+def ddp_sync(module, sync):
+ assert isinstance(module, torch.nn.Module)
+ if sync or not isinstance(module,
+ torch.nn.parallel.DistributedDataParallel):
+ yield
+ else:
+ with module.no_sync():
+ yield
+
+
+#----------------------------------------------------------------------------
+# Check DistributedDataParallel consistency across processes.
+
+
+def check_ddp_consistency(module, ignore_regex=None):
+ assert isinstance(module, torch.nn.Module)
+ for name, tensor in named_params_and_buffers(module):
+ fullname = type(module).__name__ + '.' + name
+ # print(fullname)
+ if ignore_regex is not None and re.fullmatch(ignore_regex, fullname):
+ continue
+ tensor = tensor.detach()
+ if tensor.is_floating_point():
+ tensor = nan_to_num(tensor)
+ other = tensor.clone()
+ torch.distributed.broadcast(tensor=other, src=0)
+ assert (tensor == other).all(), fullname
+
+
+#----------------------------------------------------------------------------
+# Print summary table of module hierarchy.
+
+
+def print_module_summary(module, inputs, max_nesting=3, skip_redundant=True):
+ assert isinstance(module, torch.nn.Module)
+ assert not isinstance(module, torch.jit.ScriptModule)
+ assert isinstance(inputs, (tuple, list))
+
+ # Register hooks.
+ entries = []
+ nesting = [0]
+
+ def pre_hook(_mod, _inputs):
+ nesting[0] += 1
+
+ def post_hook(mod, _inputs, outputs):
+ nesting[0] -= 1
+ if nesting[0] <= max_nesting:
+ outputs = list(outputs) if isinstance(outputs,
+ (tuple,
+ list)) else [outputs]
+ outputs = [t for t in outputs if isinstance(t, torch.Tensor)]
+ entries.append(dnnlib.EasyDict(mod=mod, outputs=outputs))
+
+ hooks = [
+ mod.register_forward_pre_hook(pre_hook) for mod in module.modules()
+ ]
+ hooks += [mod.register_forward_hook(post_hook) for mod in module.modules()]
+
+ # Run module.
+ outputs = module(*inputs)
+ for hook in hooks:
+ hook.remove()
+
+ # Identify unique outputs, parameters, and buffers.
+ tensors_seen = set()
+ for e in entries:
+ e.unique_params = [
+ t for t in e.mod.parameters() if id(t) not in tensors_seen
+ ]
+ e.unique_buffers = [
+ t for t in e.mod.buffers() if id(t) not in tensors_seen
+ ]
+ e.unique_outputs = [t for t in e.outputs if id(t) not in tensors_seen]
+ tensors_seen |= {
+ id(t)
+ for t in e.unique_params + e.unique_buffers + e.unique_outputs
+ }
+
+ # Filter out redundant entries.
+ if skip_redundant:
+ entries = [
+ e for e in entries if len(e.unique_params) or len(e.unique_buffers)
+ or len(e.unique_outputs)
+ ]
+
+ # Construct table.
+ rows = [[
+ type(module).__name__, 'Parameters', 'Buffers', 'Output shape',
+ 'Datatype'
+ ]]
+ rows += [['---'] * len(rows[0])]
+ param_total = 0
+ buffer_total = 0
+ submodule_names = {mod: name for name, mod in module.named_modules()}
+ for e in entries:
+ name = '' if e.mod is module else submodule_names[e.mod]
+ param_size = sum(t.numel() for t in e.unique_params)
+ buffer_size = sum(t.numel() for t in e.unique_buffers)
+ output_shapes = [str(list(t.shape)) for t in e.outputs]
+ output_dtypes = [str(t.dtype).split('.')[-1] for t in e.outputs]
+ rows += [[
+ name + (':0' if len(e.outputs) >= 2 else ''),
+ str(param_size) if param_size else '-',
+ str(buffer_size) if buffer_size else '-',
+ (output_shapes + ['-'])[0],
+ (output_dtypes + ['-'])[0],
+ ]]
+ for idx in range(1, len(e.outputs)):
+ rows += [[
+ name + f':{idx}', '-', '-', output_shapes[idx],
+ output_dtypes[idx]
+ ]]
+ param_total += param_size
+ buffer_total += buffer_size
+ rows += [['---'] * len(rows[0])]
+ rows += [['Total', str(param_total), str(buffer_total), '-', '-']]
+
+ # Print table.
+ widths = [max(len(cell) for cell in column) for column in zip(*rows)]
+ print()
+ for row in rows:
+ print(' '.join(cell + ' ' * (width - len(cell))
+ for cell, width in zip(row, widths)))
+ print()
+ return outputs
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/__init__.py b/torch_utils/ops/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240
--- /dev/null
+++ b/torch_utils/ops/__init__.py
@@ -0,0 +1,11 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+# empty
diff --git a/torch_utils/ops/__pycache__/__init__.cpython-39.pyc b/torch_utils/ops/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5fbfdd94279ac8b8aeb2e5e7df1caed6fef705df
Binary files /dev/null and b/torch_utils/ops/__pycache__/__init__.cpython-39.pyc differ
diff --git a/torch_utils/ops/__pycache__/bias_act.cpython-39.pyc b/torch_utils/ops/__pycache__/bias_act.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5b41025bf242ab9e9ec2059663159f1b330144d2
Binary files /dev/null and b/torch_utils/ops/__pycache__/bias_act.cpython-39.pyc differ
diff --git a/torch_utils/ops/__pycache__/conv2d_gradfix.cpython-39.pyc b/torch_utils/ops/__pycache__/conv2d_gradfix.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..401735000e2c447afcdbaa50db329d118409241b
Binary files /dev/null and b/torch_utils/ops/__pycache__/conv2d_gradfix.cpython-39.pyc differ
diff --git a/torch_utils/ops/__pycache__/conv2d_resample.cpython-39.pyc b/torch_utils/ops/__pycache__/conv2d_resample.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ec280b11ab057b07f77bef8012ae50c9a039e72
Binary files /dev/null and b/torch_utils/ops/__pycache__/conv2d_resample.cpython-39.pyc differ
diff --git a/torch_utils/ops/__pycache__/fma.cpython-39.pyc b/torch_utils/ops/__pycache__/fma.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..78c6735106f424e42edc477540710983800f8052
Binary files /dev/null and b/torch_utils/ops/__pycache__/fma.cpython-39.pyc differ
diff --git a/torch_utils/ops/__pycache__/upfirdn2d.cpython-39.pyc b/torch_utils/ops/__pycache__/upfirdn2d.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..32bc9166e3558337b6f561eb264c30b5f017b5e8
Binary files /dev/null and b/torch_utils/ops/__pycache__/upfirdn2d.cpython-39.pyc differ
diff --git a/torch_utils/ops/bias_act.cpp b/torch_utils/ops/bias_act.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..ee6f6d0caaf4f84b94851d223e384344e1109cdc
--- /dev/null
+++ b/torch_utils/ops/bias_act.cpp
@@ -0,0 +1,103 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+#include
+#include
+#include "bias_act.h"
+
+//------------------------------------------------------------------------
+
+static bool has_same_layout(torch::Tensor x, torch::Tensor y)
+{
+ if (x.dim() != y.dim())
+ return false;
+ for (int64_t i = 0; i < x.dim(); i++)
+ {
+ if (x.size(i) != y.size(i))
+ return false;
+ if (x.size(i) >= 2 && x.stride(i) != y.stride(i))
+ return false;
+ }
+ return true;
+}
+
+//------------------------------------------------------------------------
+
+static torch::Tensor bias_act(torch::Tensor x, torch::Tensor b, torch::Tensor xref, torch::Tensor yref, torch::Tensor dy, int grad, int dim, int act, float alpha, float gain, float clamp)
+{
+ // Validate arguments.
+ TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device");
+ TORCH_CHECK(b.numel() == 0 || (b.dtype() == x.dtype() && b.device() == x.device()), "b must have the same dtype and device as x");
+ TORCH_CHECK(xref.numel() == 0 || (xref.sizes() == x.sizes() && xref.dtype() == x.dtype() && xref.device() == x.device()), "xref must have the same shape, dtype, and device as x");
+ TORCH_CHECK(yref.numel() == 0 || (yref.sizes() == x.sizes() && yref.dtype() == x.dtype() && yref.device() == x.device()), "yref must have the same shape, dtype, and device as x");
+ TORCH_CHECK(dy.numel() == 0 || (dy.sizes() == x.sizes() && dy.dtype() == x.dtype() && dy.device() == x.device()), "dy must have the same dtype and device as x");
+ TORCH_CHECK(x.numel() <= INT_MAX, "x is too large");
+ TORCH_CHECK(b.dim() == 1, "b must have rank 1");
+ TORCH_CHECK(b.numel() == 0 || (dim >= 0 && dim < x.dim()), "dim is out of bounds");
+ TORCH_CHECK(b.numel() == 0 || b.numel() == x.size(dim), "b has wrong number of elements");
+ TORCH_CHECK(grad >= 0, "grad must be non-negative");
+
+ // Validate layout.
+ TORCH_CHECK(x.is_non_overlapping_and_dense(), "x must be non-overlapping and dense");
+ TORCH_CHECK(b.is_contiguous(), "b must be contiguous");
+ TORCH_CHECK(xref.numel() == 0 || has_same_layout(xref, x), "xref must have the same layout as x");
+ TORCH_CHECK(yref.numel() == 0 || has_same_layout(yref, x), "yref must have the same layout as x");
+ TORCH_CHECK(dy.numel() == 0 || has_same_layout(dy, x), "dy must have the same layout as x");
+
+ // Create output tensor.
+ const at::cuda::OptionalCUDAGuard device_guard(device_of(x));
+ torch::Tensor y = torch::empty_like(x);
+ TORCH_CHECK(has_same_layout(y, x), "y must have the same layout as x");
+
+ // Initialize CUDA kernel parameters.
+ bias_act_kernel_params p;
+ p.x = x.data_ptr();
+ p.b = (b.numel()) ? b.data_ptr() : NULL;
+ p.xref = (xref.numel()) ? xref.data_ptr() : NULL;
+ p.yref = (yref.numel()) ? yref.data_ptr() : NULL;
+ p.dy = (dy.numel()) ? dy.data_ptr() : NULL;
+ p.y = y.data_ptr();
+ p.grad = grad;
+ p.act = act;
+ p.alpha = alpha;
+ p.gain = gain;
+ p.clamp = clamp;
+ p.sizeX = (int)x.numel();
+ p.sizeB = (int)b.numel();
+ p.stepB = (b.numel()) ? (int)x.stride(dim) : 1;
+
+ // Choose CUDA kernel.
+ void* kernel;
+ AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "upfirdn2d_cuda", [&]
+ {
+ kernel = choose_bias_act_kernel(p);
+ });
+ TORCH_CHECK(kernel, "no CUDA kernel found for the specified activation func");
+
+ // Launch CUDA kernel.
+ p.loopX = 4;
+ int blockSize = 4 * 32;
+ int gridSize = (p.sizeX - 1) / (p.loopX * blockSize) + 1;
+ void* args[] = {&p};
+ AT_CUDA_CHECK(cudaLaunchKernel(kernel, gridSize, blockSize, args, 0, at::cuda::getCurrentCUDAStream()));
+ return y;
+}
+
+//------------------------------------------------------------------------
+
+PYBIND11_MODULE(TORCH_EXTENSION_NAME, m)
+{
+ m.def("bias_act", &bias_act);
+}
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/bias_act.cu b/torch_utils/ops/bias_act.cu
new file mode 100644
index 0000000000000000000000000000000000000000..71ca3900deda41e62d80044f0e409875f4c794b5
--- /dev/null
+++ b/torch_utils/ops/bias_act.cu
@@ -0,0 +1,177 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+#include "bias_act.h"
+
+//------------------------------------------------------------------------
+// Helpers.
+
+template struct InternalType;
+template <> struct InternalType { typedef double scalar_t; };
+template <> struct InternalType { typedef float scalar_t; };
+template <> struct InternalType { typedef float scalar_t; };
+
+//------------------------------------------------------------------------
+// CUDA kernel.
+
+template
+__global__ void bias_act_kernel(bias_act_kernel_params p)
+{
+ typedef typename InternalType::scalar_t scalar_t;
+ int G = p.grad;
+ scalar_t alpha = (scalar_t)p.alpha;
+ scalar_t gain = (scalar_t)p.gain;
+ scalar_t clamp = (scalar_t)p.clamp;
+ scalar_t one = (scalar_t)1;
+ scalar_t two = (scalar_t)2;
+ scalar_t expRange = (scalar_t)80;
+ scalar_t halfExpRange = (scalar_t)40;
+ scalar_t seluScale = (scalar_t)1.0507009873554804934193349852946;
+ scalar_t seluAlpha = (scalar_t)1.6732632423543772848170429916717;
+
+ // Loop over elements.
+ int xi = blockIdx.x * p.loopX * blockDim.x + threadIdx.x;
+ for (int loopIdx = 0; loopIdx < p.loopX && xi < p.sizeX; loopIdx++, xi += blockDim.x)
+ {
+ // Load.
+ scalar_t x = (scalar_t)((const T*)p.x)[xi];
+ scalar_t b = (p.b) ? (scalar_t)((const T*)p.b)[(xi / p.stepB) % p.sizeB] : 0;
+ scalar_t xref = (p.xref) ? (scalar_t)((const T*)p.xref)[xi] : 0;
+ scalar_t yref = (p.yref) ? (scalar_t)((const T*)p.yref)[xi] : 0;
+ scalar_t dy = (p.dy) ? (scalar_t)((const T*)p.dy)[xi] : one;
+ scalar_t yy = (gain != 0) ? yref / gain : 0;
+ scalar_t y = 0;
+
+ // Apply bias.
+ ((G == 0) ? x : xref) += b;
+
+ // linear
+ if (A == 1)
+ {
+ if (G == 0) y = x;
+ if (G == 1) y = x;
+ }
+
+ // relu
+ if (A == 2)
+ {
+ if (G == 0) y = (x > 0) ? x : 0;
+ if (G == 1) y = (yy > 0) ? x : 0;
+ }
+
+ // lrelu
+ if (A == 3)
+ {
+ if (G == 0) y = (x > 0) ? x : x * alpha;
+ if (G == 1) y = (yy > 0) ? x : x * alpha;
+ }
+
+ // tanh
+ if (A == 4)
+ {
+ if (G == 0) { scalar_t c = exp(x); scalar_t d = one / c; y = (x < -expRange) ? -one : (x > expRange) ? one : (c - d) / (c + d); }
+ if (G == 1) y = x * (one - yy * yy);
+ if (G == 2) y = x * (one - yy * yy) * (-two * yy);
+ }
+
+ // sigmoid
+ if (A == 5)
+ {
+ if (G == 0) y = (x < -expRange) ? 0 : one / (exp(-x) + one);
+ if (G == 1) y = x * yy * (one - yy);
+ if (G == 2) y = x * yy * (one - yy) * (one - two * yy);
+ }
+
+ // elu
+ if (A == 6)
+ {
+ if (G == 0) y = (x >= 0) ? x : exp(x) - one;
+ if (G == 1) y = (yy >= 0) ? x : x * (yy + one);
+ if (G == 2) y = (yy >= 0) ? 0 : x * (yy + one);
+ }
+
+ // selu
+ if (A == 7)
+ {
+ if (G == 0) y = (x >= 0) ? seluScale * x : (seluScale * seluAlpha) * (exp(x) - one);
+ if (G == 1) y = (yy >= 0) ? x * seluScale : x * (yy + seluScale * seluAlpha);
+ if (G == 2) y = (yy >= 0) ? 0 : x * (yy + seluScale * seluAlpha);
+ }
+
+ // softplus
+ if (A == 8)
+ {
+ if (G == 0) y = (x > expRange) ? x : log(exp(x) + one);
+ if (G == 1) y = x * (one - exp(-yy));
+ if (G == 2) { scalar_t c = exp(-yy); y = x * c * (one - c); }
+ }
+
+ // swish
+ if (A == 9)
+ {
+ if (G == 0)
+ y = (x < -expRange) ? 0 : x / (exp(-x) + one);
+ else
+ {
+ scalar_t c = exp(xref);
+ scalar_t d = c + one;
+ if (G == 1)
+ y = (xref > halfExpRange) ? x : x * c * (xref + d) / (d * d);
+ else
+ y = (xref > halfExpRange) ? 0 : x * c * (xref * (two - d) + two * d) / (d * d * d);
+ yref = (xref < -expRange) ? 0 : xref / (exp(-xref) + one) * gain;
+ }
+ }
+
+ // Apply gain.
+ y *= gain * dy;
+
+ // Clamp.
+ if (clamp >= 0)
+ {
+ if (G == 0)
+ y = (y > -clamp & y < clamp) ? y : (y >= 0) ? clamp : -clamp;
+ else
+ y = (yref > -clamp & yref < clamp) ? y : 0;
+ }
+
+ // Store.
+ ((T*)p.y)[xi] = (T)y;
+ }
+}
+
+//------------------------------------------------------------------------
+// CUDA kernel selection.
+
+template void* choose_bias_act_kernel(const bias_act_kernel_params& p)
+{
+ if (p.act == 1) return (void*)bias_act_kernel;
+ if (p.act == 2) return (void*)bias_act_kernel;
+ if (p.act == 3) return (void*)bias_act_kernel;
+ if (p.act == 4) return (void*)bias_act_kernel;
+ if (p.act == 5) return (void*)bias_act_kernel;
+ if (p.act == 6) return (void*)bias_act_kernel;
+ if (p.act == 7) return (void*)bias_act_kernel;
+ if (p.act == 8) return (void*)bias_act_kernel;
+ if (p.act == 9) return (void*)bias_act_kernel;
+ return NULL;
+}
+
+//------------------------------------------------------------------------
+// Template specializations.
+
+template void* choose_bias_act_kernel (const bias_act_kernel_params& p);
+template void* choose_bias_act_kernel (const bias_act_kernel_params& p);
+template void* choose_bias_act_kernel (const bias_act_kernel_params& p);
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/bias_act.h b/torch_utils/ops/bias_act.h
new file mode 100644
index 0000000000000000000000000000000000000000..8994bfb4e9cae790865348e08de5f685152d3344
--- /dev/null
+++ b/torch_utils/ops/bias_act.h
@@ -0,0 +1,42 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+//------------------------------------------------------------------------
+// CUDA kernel parameters.
+
+struct bias_act_kernel_params
+{
+ const void* x; // [sizeX]
+ const void* b; // [sizeB] or NULL
+ const void* xref; // [sizeX] or NULL
+ const void* yref; // [sizeX] or NULL
+ const void* dy; // [sizeX] or NULL
+ void* y; // [sizeX]
+
+ int grad;
+ int act;
+ float alpha;
+ float gain;
+ float clamp;
+
+ int sizeX;
+ int sizeB;
+ int stepB;
+ int loopX;
+};
+
+//------------------------------------------------------------------------
+// CUDA kernel selection.
+
+template void* choose_bias_act_kernel(const bias_act_kernel_params& p);
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/bias_act.py b/torch_utils/ops/bias_act.py
new file mode 100644
index 0000000000000000000000000000000000000000..16a8ec0527471d0774873eba378665014a9b30b8
--- /dev/null
+++ b/torch_utils/ops/bias_act.py
@@ -0,0 +1,308 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Custom PyTorch ops for efficient bias and activation."""
+
+import os
+import numpy as np
+import torch
+import dnnlib
+
+from .. import custom_ops
+from .. import misc
+
+#----------------------------------------------------------------------------
+
+activation_funcs = {
+ 'linear':
+ dnnlib.EasyDict(func=lambda x, **_: x,
+ def_alpha=0,
+ def_gain=1,
+ cuda_idx=1,
+ ref='',
+ has_2nd_grad=False),
+ 'relu':
+ dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.relu(x),
+ def_alpha=0,
+ def_gain=np.sqrt(2),
+ cuda_idx=2,
+ ref='y',
+ has_2nd_grad=False),
+ 'lrelu':
+ dnnlib.EasyDict(
+ func=lambda x, alpha, **_: torch.nn.functional.leaky_relu(x, alpha),
+ def_alpha=0.2,
+ def_gain=np.sqrt(2),
+ cuda_idx=3,
+ ref='y',
+ has_2nd_grad=False),
+ 'tanh':
+ dnnlib.EasyDict(func=lambda x, **_: torch.tanh(x),
+ def_alpha=0,
+ def_gain=1,
+ cuda_idx=4,
+ ref='y',
+ has_2nd_grad=True),
+ 'sigmoid':
+ dnnlib.EasyDict(func=lambda x, **_: torch.sigmoid(x),
+ def_alpha=0,
+ def_gain=1,
+ cuda_idx=5,
+ ref='y',
+ has_2nd_grad=True),
+ 'elu':
+ dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.elu(x),
+ def_alpha=0,
+ def_gain=1,
+ cuda_idx=6,
+ ref='y',
+ has_2nd_grad=True),
+ 'selu':
+ dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.selu(x),
+ def_alpha=0,
+ def_gain=1,
+ cuda_idx=7,
+ ref='y',
+ has_2nd_grad=True),
+ 'softplus':
+ dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.softplus(x),
+ def_alpha=0,
+ def_gain=1,
+ cuda_idx=8,
+ ref='y',
+ has_2nd_grad=True),
+ 'swish':
+ dnnlib.EasyDict(func=lambda x, **_: torch.sigmoid(x) * x,
+ def_alpha=0,
+ def_gain=np.sqrt(2),
+ cuda_idx=9,
+ ref='x',
+ has_2nd_grad=True),
+}
+
+#----------------------------------------------------------------------------
+
+_plugin = None
+_null_tensor = torch.empty([0])
+
+
+def _init():
+ global _plugin
+ if _plugin is None:
+ _plugin = custom_ops.get_plugin(
+ module_name='bias_act_plugin',
+ sources=['bias_act.cpp', 'bias_act.cu'],
+ headers=['bias_act.h'],
+ source_dir=os.path.dirname(__file__),
+ extra_cuda_cflags=['--use_fast_math'],
+ )
+ return True
+
+
+#----------------------------------------------------------------------------
+
+
+# @torch.autocast(device_type='cuda')
+def bias_act(x,
+ b=None,
+ dim=1,
+ act='linear',
+ alpha=None,
+ gain=None,
+ clamp=None,
+ impl='cuda'):
+ r"""Fused bias and activation function.
+
+ Adds bias `b` to activation tensor `x`, evaluates activation function `act`,
+ and scales the result by `gain`. Each of the steps is optional. In most cases,
+ the fused op is considerably more efficient than performing the same calculation
+ using standard PyTorch ops. It supports first and second order gradients,
+ but not third order gradients.
+
+ Args:
+ x: Input activation tensor. Can be of any shape.
+ b: Bias vector, or `None` to disable. Must be a 1D tensor of the same type
+ as `x`. The shape must be known, and it must match the dimension of `x`
+ corresponding to `dim`.
+ dim: The dimension in `x` corresponding to the elements of `b`.
+ The value of `dim` is ignored if `b` is not specified.
+ act: Name of the activation function to evaluate, or `"linear"` to disable.
+ Can be e.g. `"relu"`, `"lrelu"`, `"tanh"`, `"sigmoid"`, `"swish"`, etc.
+ See `activation_funcs` for a full list. `None` is not allowed.
+ alpha: Shape parameter for the activation function, or `None` to use the default.
+ gain: Scaling factor for the output tensor, or `None` to use default.
+ See `activation_funcs` for the default scaling of each activation function.
+ If unsure, consider specifying 1.
+ clamp: Clamp the output values to `[-clamp, +clamp]`, or `None` to disable
+ the clamping (default).
+ impl: Name of the implementation to use. Can be `"ref"` or `"cuda"` (default).
+
+ Returns:
+ Tensor of the same shape and datatype as `x`.
+ """
+ assert isinstance(x, torch.Tensor)
+ assert impl in ['ref', 'cuda']
+ if impl == 'cuda' and x.device.type == 'cuda' and _init():
+ return _bias_act_cuda(dim=dim,
+ act=act,
+ alpha=alpha,
+ gain=gain,
+ clamp=clamp).apply(x, b)
+ return _bias_act_ref(x=x,
+ b=b,
+ dim=dim,
+ act=act,
+ alpha=alpha,
+ gain=gain,
+ clamp=clamp)
+
+
+#----------------------------------------------------------------------------
+
+
+@misc.profiled_function
+def _bias_act_ref(x,
+ b=None,
+ dim=1,
+ act='linear',
+ alpha=None,
+ gain=None,
+ clamp=None):
+ """Slow reference implementation of `bias_act()` using standard TensorFlow ops.
+ """
+ assert isinstance(x, torch.Tensor)
+ assert clamp is None or clamp >= 0
+ spec = activation_funcs[act]
+ alpha = float(alpha if alpha is not None else spec.def_alpha)
+ gain = float(gain if gain is not None else spec.def_gain)
+ clamp = float(clamp if clamp is not None else -1)
+
+ # Add bias.
+ if b is not None:
+ assert isinstance(b, torch.Tensor) and b.ndim == 1
+ assert 0 <= dim < x.ndim
+ assert b.shape[0] == x.shape[dim]
+ x = x + b.reshape([-1 if i == dim else 1 for i in range(x.ndim)])
+
+ # Evaluate activation function.
+ alpha = float(alpha)
+ x = spec.func(x, alpha=alpha)
+
+ # Scale by gain.
+ gain = float(gain)
+ if gain != 1:
+ x = x * gain
+
+ # Clamp.
+ if clamp >= 0:
+ x = x.clamp(-clamp, clamp) # pylint: disable=invalid-unary-operand-type
+ return x
+
+
+#----------------------------------------------------------------------------
+
+_bias_act_cuda_cache = dict()
+
+
+# @torch.autocast(device_type='cuda')
+def _bias_act_cuda(dim=1, act='linear', alpha=None, gain=None, clamp=None):
+ """Fast CUDA implementation of `bias_act()` using custom ops.
+ """
+ # Parse arguments.
+ assert clamp is None or clamp >= 0
+ spec = activation_funcs[act]
+ alpha = float(alpha if alpha is not None else spec.def_alpha)
+ gain = float(gain if gain is not None else spec.def_gain)
+ clamp = float(clamp if clamp is not None else -1)
+
+ # Lookup from cache.
+ key = (dim, act, alpha, gain, clamp)
+ if key in _bias_act_cuda_cache:
+ return _bias_act_cuda_cache[key]
+
+ # Forward op.
+ class BiasActCuda(torch.autograd.Function):
+ @staticmethod
+ # @torch.cuda.amp.custom_fwd
+ def forward(ctx, x, b): # pylint: disable=arguments-differ
+ ctx.memory_format = torch.channels_last if x.ndim > 2 and x.stride(
+ 1) == 1 else torch.contiguous_format
+ x = x.contiguous(memory_format=ctx.memory_format)
+ b = b.contiguous() if b is not None else _null_tensor
+ y = x
+ if act != 'linear' or gain != 1 or clamp >= 0 or b is not _null_tensor:
+ y = _plugin.bias_act(x, b, _null_tensor, _null_tensor,
+ _null_tensor, 0, dim, spec.cuda_idx,
+ alpha, gain, clamp)
+ ctx.save_for_backward(
+ x if 'x' in spec.ref or spec.has_2nd_grad else _null_tensor,
+ b if 'x' in spec.ref or spec.has_2nd_grad else _null_tensor,
+ y if 'y' in spec.ref else _null_tensor)
+ return y
+
+ @staticmethod
+ # @torch.cuda.amp.custom_bwd
+ def backward(ctx, dy): # pylint: disable=arguments-differ
+ dy = dy.contiguous(memory_format=ctx.memory_format)
+ x, b, y = ctx.saved_tensors
+ dx = None
+ db = None
+
+ if ctx.needs_input_grad[0] or ctx.needs_input_grad[1]:
+ dx = dy
+ if act != 'linear' or gain != 1 or clamp >= 0:
+ dx = BiasActCudaGrad.apply(dy, x, b, y)
+
+ if ctx.needs_input_grad[1]:
+ db = dx.sum([i for i in range(dx.ndim) if i != dim])
+
+ return dx, db
+
+ # Backward op.
+ class BiasActCudaGrad(torch.autograd.Function):
+ @staticmethod
+ # @torch.cuda.amp.custom_fwd
+ def forward(ctx, dy, x, b, y): # pylint: disable=arguments-differ
+ ctx.memory_format = torch.channels_last if dy.ndim > 2 and dy.stride(
+ 1) == 1 else torch.contiguous_format
+ dx = _plugin.bias_act(dy, b, x, y, _null_tensor, 1, dim,
+ spec.cuda_idx, alpha, gain, clamp)
+ ctx.save_for_backward(dy if spec.has_2nd_grad else _null_tensor, x,
+ b, y)
+ return dx
+
+ @staticmethod
+ # @torch.cuda.amp.custom_bwd
+ def backward(ctx, d_dx): # pylint: disable=arguments-differ
+ d_dx = d_dx.contiguous(memory_format=ctx.memory_format)
+ dy, x, b, y = ctx.saved_tensors
+ d_dy = None
+ d_x = None
+ d_b = None
+ d_y = None
+
+ if ctx.needs_input_grad[0]:
+ d_dy = BiasActCudaGrad.apply(d_dx, x, b, y)
+
+ if spec.has_2nd_grad and (ctx.needs_input_grad[1]
+ or ctx.needs_input_grad[2]):
+ d_x = _plugin.bias_act(d_dx, b, x, y, dy, 2, dim,
+ spec.cuda_idx, alpha, gain, clamp)
+
+ if spec.has_2nd_grad and ctx.needs_input_grad[2]:
+ d_b = d_x.sum([i for i in range(d_x.ndim) if i != dim])
+
+ return d_dy, d_x, d_b, d_y
+
+ # Add to cache.
+ _bias_act_cuda_cache[key] = BiasActCuda
+ return BiasActCuda
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/conv2d_gradfix.py b/torch_utils/ops/conv2d_gradfix.py
new file mode 100644
index 0000000000000000000000000000000000000000..c539b82a2a580ac301af741a65a745f9a72ef3d7
--- /dev/null
+++ b/torch_utils/ops/conv2d_gradfix.py
@@ -0,0 +1,302 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Custom replacement for `torch.nn.functional.conv2d` that supports
+arbitrarily high order gradients with zero performance penalty."""
+
+import contextlib
+import torch
+from pdb import set_trace as st
+import traceback
+
+# pylint: disable=redefined-builtin
+# pylint: disable=arguments-differ
+# pylint: disable=protected-access
+
+#----------------------------------------------------------------------------
+
+enabled = False # Enable the custom op by setting this to true.
+weight_gradients_disabled = False # Forcefully disable computation of gradients with respect to the weights.
+
+
+@contextlib.contextmanager
+def no_weight_gradients(disable=True):
+ global weight_gradients_disabled
+ old = weight_gradients_disabled
+ if disable:
+ weight_gradients_disabled = True
+ yield
+ weight_gradients_disabled = old
+
+
+#----------------------------------------------------------------------------
+
+
+def conv2d(input,
+ weight,
+ bias=None,
+ stride=1,
+ padding=0,
+ dilation=1,
+ groups=1):
+ if _should_use_custom_op(input):
+ return _conv2d_gradfix(transpose=False,
+ weight_shape=weight.shape,
+ stride=stride,
+ padding=padding,
+ output_padding=0,
+ dilation=dilation,
+ groups=groups).apply(input, weight, bias)
+ return torch.nn.functional.conv2d(input=input,
+ weight=weight,
+ bias=bias,
+ stride=stride,
+ padding=padding,
+ dilation=dilation,
+ groups=groups)
+
+
+def conv_transpose2d(input,
+ weight,
+ bias=None,
+ stride=1,
+ padding=0,
+ output_padding=0,
+ groups=1,
+ dilation=1):
+ if _should_use_custom_op(input):
+ return _conv2d_gradfix(transpose=True,
+ weight_shape=weight.shape,
+ stride=stride,
+ padding=padding,
+ output_padding=output_padding,
+ groups=groups,
+ dilation=dilation).apply(input, weight, bias)
+ return torch.nn.functional.conv_transpose2d(input=input,
+ weight=weight,
+ bias=bias,
+ stride=stride,
+ padding=padding,
+ output_padding=output_padding,
+ groups=groups,
+ dilation=dilation)
+
+
+#----------------------------------------------------------------------------
+
+
+def _should_use_custom_op(input):
+ assert isinstance(input, torch.Tensor)
+ if (not enabled) or (not torch.backends.cudnn.enabled):
+ return False
+ if input.device.type != 'cuda':
+ return False
+ return True
+
+
+def _tuple_of_ints(xs, ndim):
+ xs = tuple(xs) if isinstance(xs, (tuple, list)) else (xs, ) * ndim
+ assert len(xs) == ndim
+ assert all(isinstance(x, int) for x in xs)
+ return xs
+
+
+#----------------------------------------------------------------------------
+
+_conv2d_gradfix_cache = dict()
+_null_tensor = torch.empty([0])
+
+
+def _conv2d_gradfix(transpose, weight_shape, stride, padding, output_padding,
+ dilation, groups):
+ # Parse arguments.
+ ndim = 2
+ weight_shape = tuple(weight_shape)
+ stride = _tuple_of_ints(stride, ndim)
+ padding = _tuple_of_ints(padding, ndim)
+ output_padding = _tuple_of_ints(output_padding, ndim)
+ dilation = _tuple_of_ints(dilation, ndim)
+
+ # Lookup from cache.
+ key = (transpose, weight_shape, stride, padding, output_padding, dilation,
+ groups)
+ if key in _conv2d_gradfix_cache:
+ return _conv2d_gradfix_cache[key]
+
+ # Validate arguments.
+ assert groups >= 1
+ assert len(weight_shape) == ndim + 2
+ assert all(stride[i] >= 1 for i in range(ndim))
+ assert all(padding[i] >= 0 for i in range(ndim))
+ assert all(dilation[i] >= 0 for i in range(ndim))
+ if not transpose:
+ assert all(output_padding[i] == 0 for i in range(ndim))
+ else: # transpose
+ assert all(0 <= output_padding[i] < max(stride[i], dilation[i])
+ for i in range(ndim))
+
+ # Helpers.
+ common_kwargs = dict(stride=stride,
+ padding=padding,
+ dilation=dilation,
+ groups=groups)
+
+ def calc_output_padding(input_shape, output_shape):
+ if transpose:
+ return [0, 0]
+ return [
+ input_shape[i + 2] - (output_shape[i + 2] - 1) * stride[i] -
+ (1 - 2 * padding[i]) - dilation[i] * (weight_shape[i + 2] - 1)
+ for i in range(ndim)
+ ]
+
+ # Forward & backward.
+ class Conv2d(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, input, weight, bias):
+ assert weight.shape == weight_shape
+ ctx.save_for_backward(
+ input if weight.requires_grad else _null_tensor,
+ weight if input.requires_grad else _null_tensor,
+ )
+ ctx.input_shape = input.shape
+
+ # Simple 1x1 convolution => cuBLAS (only on Volta, not on Ampere).
+ if weight_shape[2:] == stride == dilation == (
+ 1, 1) and padding == (
+ 0, 0) and torch.cuda.get_device_capability(
+ input.device) < (8, 0):
+ a = weight.reshape(groups, weight_shape[0] // groups,
+ weight_shape[1])
+ b = input.reshape(input.shape[0], groups,
+ input.shape[1] // groups, -1)
+ c = (a.transpose(1, 2) if transpose else a) @ b.permute(
+ 1, 2, 0, 3).flatten(2)
+ c = c.reshape(-1, input.shape[0],
+ *input.shape[2:]).transpose(0, 1)
+ c = c if bias is None else c + bias.unsqueeze(0).unsqueeze(
+ 2).unsqueeze(3)
+ return c.contiguous(
+ memory_format=(torch.channels_last if input.stride(1) ==
+ 1 else torch.contiguous_format))
+
+ # General case => cuDNN.
+ if transpose:
+ return torch.nn.functional.conv_transpose2d(
+ input=input,
+ weight=weight,
+ bias=bias,
+ output_padding=output_padding,
+ **common_kwargs)
+ return torch.nn.functional.conv2d(input=input,
+ weight=weight,
+ bias=bias,
+ **common_kwargs)
+
+ @staticmethod
+ def backward(ctx, grad_output):
+ input, weight = ctx.saved_tensors
+ input_shape = ctx.input_shape
+ grad_input = None
+ grad_weight = None
+ grad_bias = None
+
+ if ctx.needs_input_grad[0]:
+ p = calc_output_padding(input_shape=input_shape,
+ output_shape=grad_output.shape)
+ op = _conv2d_gradfix(transpose=(not transpose),
+ weight_shape=weight_shape,
+ output_padding=p,
+ **common_kwargs)
+ grad_input = op.apply(grad_output, weight, None)
+ assert grad_input.shape == input_shape
+
+ if ctx.needs_input_grad[1] and not weight_gradients_disabled:
+ grad_weight = Conv2dGradWeight.apply(grad_output, input,
+ weight)
+ assert grad_weight.shape == weight_shape
+
+ if ctx.needs_input_grad[2]:
+ grad_bias = grad_output.sum([0, 2, 3])
+
+ return grad_input, grad_weight, grad_bias
+
+ # Gradient with respect to the weights.
+ class Conv2dGradWeight(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, grad_output, input, weight):
+ ctx.save_for_backward(
+ grad_output if input.requires_grad else _null_tensor,
+ input if grad_output.requires_grad else _null_tensor,
+ )
+ ctx.grad_output_shape = grad_output.shape
+ ctx.input_shape = input.shape
+
+ # Simple 1x1 convolution => cuBLAS (on both Volta and Ampere).
+ if weight_shape[2:] == stride == dilation == (
+ 1, 1) and padding == (0, 0):
+ a = grad_output.reshape(grad_output.shape[0], groups,
+ grad_output.shape[1] // groups,
+ -1).permute(1, 2, 0, 3).flatten(2)
+ b = input.reshape(input.shape[0], groups,
+ input.shape[1] // groups,
+ -1).permute(1, 2, 0, 3).flatten(2)
+ c = (b @ a.transpose(1, 2) if transpose else
+ a @ b.transpose(1, 2)).reshape(weight_shape)
+ return c.contiguous(
+ memory_format=(torch.channels_last if input.stride(1) ==
+ 1 else torch.contiguous_format))
+
+ # General case => cuDNN.
+ # print(input.device, weight.device, flush=True)
+ # for line in traceback.format_stack():
+ # print(line.strip(), flush=True)
+ return torch.ops.aten.convolution_backward(
+ grad_output=grad_output,
+ input=input,
+ weight=weight,
+ bias_sizes=None,
+ stride=stride,
+ padding=padding,
+ dilation=dilation,
+ transposed=transpose,
+ output_padding=output_padding,
+ groups=groups,
+ output_mask=[False, True, False])[1]
+
+ @staticmethod
+ def backward(ctx, grad2_grad_weight):
+ grad_output, input = ctx.saved_tensors
+ grad_output_shape = ctx.grad_output_shape
+ input_shape = ctx.input_shape
+ grad2_grad_output = None
+ grad2_input = None
+
+ if ctx.needs_input_grad[0]:
+ grad2_grad_output = Conv2d.apply(input, grad2_grad_weight,
+ None)
+ assert grad2_grad_output.shape == grad_output_shape
+
+ if ctx.needs_input_grad[1]:
+ p = calc_output_padding(input_shape=input_shape,
+ output_shape=grad_output_shape)
+ op = _conv2d_gradfix(transpose=(not transpose),
+ weight_shape=weight_shape,
+ output_padding=p,
+ **common_kwargs)
+ grad2_input = op.apply(grad_output, grad2_grad_weight, None)
+ assert grad2_input.shape == input_shape
+
+ return grad2_grad_output, grad2_input
+
+ _conv2d_gradfix_cache[key] = Conv2d
+ return Conv2d
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/conv2d_resample.py b/torch_utils/ops/conv2d_resample.py
new file mode 100644
index 0000000000000000000000000000000000000000..df7bea8fd7d0b921227cec546bebbd0e836d9da8
--- /dev/null
+++ b/torch_utils/ops/conv2d_resample.py
@@ -0,0 +1,208 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""2D convolution with optional up/downsampling."""
+
+import torch
+
+from .. import misc
+from . import conv2d_gradfix
+from . import upfirdn2d
+from .upfirdn2d import _parse_padding
+from .upfirdn2d import _get_filter_size
+
+#----------------------------------------------------------------------------
+
+
+def _get_weight_shape(w):
+ with misc.suppress_tracer_warnings(
+ ): # this value will be treated as a constant
+ shape = [int(sz) for sz in w.shape]
+ misc.assert_shape(w, shape)
+ return shape
+
+
+#----------------------------------------------------------------------------
+
+
+def _conv2d_wrapper(x,
+ w,
+ stride=1,
+ padding=0,
+ groups=1,
+ transpose=False,
+ flip_weight=True):
+ """Wrapper for the underlying `conv2d()` and `conv_transpose2d()` implementations.
+ """
+ _out_channels, _in_channels_per_group, kh, kw = _get_weight_shape(w)
+
+ # Flip weight if requested.
+ # Note: conv2d() actually performs correlation (flip_weight=True) not convolution (flip_weight=False).
+ if not flip_weight and (kw > 1 or kh > 1):
+ w = w.flip([2, 3])
+
+ # Execute using conv2d_gradfix.
+ op = conv2d_gradfix.conv_transpose2d if transpose else conv2d_gradfix.conv2d
+ return op(x, w, stride=stride, padding=padding, groups=groups)
+
+
+#----------------------------------------------------------------------------
+
+
+@misc.profiled_function
+def conv2d_resample(x,
+ w,
+ f=None,
+ up=1,
+ down=1,
+ padding=0,
+ groups=1,
+ flip_weight=True,
+ flip_filter=False):
+ r"""2D convolution with optional up/downsampling.
+
+ Padding is performed only once at the beginning, not between the operations.
+
+ Args:
+ x: Input tensor of shape
+ `[batch_size, in_channels, in_height, in_width]`.
+ w: Weight tensor of shape
+ `[out_channels, in_channels//groups, kernel_height, kernel_width]`.
+ f: Low-pass filter for up/downsampling. Must be prepared beforehand by
+ calling upfirdn2d.setup_filter(). None = identity (default).
+ up: Integer upsampling factor (default: 1).
+ down: Integer downsampling factor (default: 1).
+ padding: Padding with respect to the upsampled image. Can be a single number
+ or a list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]`
+ (default: 0).
+ groups: Split input channels into N groups (default: 1).
+ flip_weight: False = convolution, True = correlation (default: True).
+ flip_filter: False = convolution, True = correlation (default: False).
+
+ Returns:
+ Tensor of the shape `[batch_size, num_channels, out_height, out_width]`.
+ """
+ # Validate arguments.
+ assert isinstance(x, torch.Tensor) and (x.ndim == 4)
+ assert isinstance(w, torch.Tensor) and (w.ndim == 4) and (w.dtype
+ == x.dtype)
+ assert f is None or (isinstance(f, torch.Tensor) and f.ndim in [1, 2]
+ and f.dtype == torch.float32)
+ assert isinstance(up, int) and (up >= 1)
+ assert isinstance(down, int) and (down >= 1)
+ assert isinstance(groups, int) and (groups >= 1)
+ out_channels, in_channels_per_group, kh, kw = _get_weight_shape(w)
+ fw, fh = _get_filter_size(f)
+ px0, px1, py0, py1 = _parse_padding(padding)
+
+ # Adjust padding to account for up/downsampling.
+ if up > 1:
+ px0 += (fw + up - 1) // 2
+ px1 += (fw - up) // 2
+ py0 += (fh + up - 1) // 2
+ py1 += (fh - up) // 2
+ if down > 1:
+ px0 += (fw - down + 1) // 2
+ px1 += (fw - down) // 2
+ py0 += (fh - down + 1) // 2
+ py1 += (fh - down) // 2
+
+ # Fast path: 1x1 convolution with downsampling only => downsample first, then convolve.
+ if kw == 1 and kh == 1 and (down > 1 and up == 1):
+ x = upfirdn2d.upfirdn2d(x=x,
+ f=f,
+ down=down,
+ padding=[px0, px1, py0, py1],
+ flip_filter=flip_filter)
+ x = _conv2d_wrapper(x=x, w=w, groups=groups, flip_weight=flip_weight)
+ return x
+
+ # Fast path: 1x1 convolution with upsampling only => convolve first, then upsample.
+ if kw == 1 and kh == 1 and (up > 1 and down == 1):
+ x = _conv2d_wrapper(x=x, w=w, groups=groups, flip_weight=flip_weight)
+ x = upfirdn2d.upfirdn2d(x=x,
+ f=f,
+ up=up,
+ padding=[px0, px1, py0, py1],
+ gain=up**2,
+ flip_filter=flip_filter)
+ return x
+
+ # Fast path: downsampling only => use strided convolution.
+ if down > 1 and up == 1:
+ x = upfirdn2d.upfirdn2d(x=x,
+ f=f,
+ padding=[px0, px1, py0, py1],
+ flip_filter=flip_filter)
+ x = _conv2d_wrapper(x=x,
+ w=w,
+ stride=down,
+ groups=groups,
+ flip_weight=flip_weight)
+ return x
+
+ # Fast path: upsampling with optional downsampling => use transpose strided convolution.
+ if up > 1:
+ if groups == 1:
+ w = w.transpose(0, 1)
+ else:
+ w = w.reshape(groups, out_channels // groups,
+ in_channels_per_group, kh, kw)
+ w = w.transpose(1, 2)
+ w = w.reshape(groups * in_channels_per_group,
+ out_channels // groups, kh, kw)
+ px0 -= kw - 1
+ px1 -= kw - up
+ py0 -= kh - 1
+ py1 -= kh - up
+ pxt = max(min(-px0, -px1), 0)
+ pyt = max(min(-py0, -py1), 0)
+ x = _conv2d_wrapper(x=x,
+ w=w,
+ stride=up,
+ padding=[pyt, pxt],
+ groups=groups,
+ transpose=True,
+ flip_weight=(not flip_weight))
+ x = upfirdn2d.upfirdn2d(
+ x=x,
+ f=f,
+ padding=[px0 + pxt, px1 + pxt, py0 + pyt, py1 + pyt],
+ gain=up**2,
+ flip_filter=flip_filter)
+ if down > 1:
+ x = upfirdn2d.upfirdn2d(x=x,
+ f=f,
+ down=down,
+ flip_filter=flip_filter)
+ return x
+
+ # Fast path: no up/downsampling, padding supported by the underlying implementation => use plain conv2d.
+ if up == 1 and down == 1:
+ if px0 == px1 and py0 == py1 and px0 >= 0 and py0 >= 0:
+ return _conv2d_wrapper(x=x,
+ w=w,
+ padding=[py0, px0],
+ groups=groups,
+ flip_weight=flip_weight)
+
+ # Fallback: Generic reference implementation.
+ x = upfirdn2d.upfirdn2d(x=x,
+ f=(f if up > 1 else None),
+ up=up,
+ padding=[px0, px1, py0, py1],
+ gain=up**2,
+ flip_filter=flip_filter)
+ x = _conv2d_wrapper(x=x, w=w, groups=groups, flip_weight=flip_weight)
+ if down > 1:
+ x = upfirdn2d.upfirdn2d(x=x, f=f, down=down, flip_filter=flip_filter)
+ return x
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/filtered_lrelu.cpp b/torch_utils/ops/filtered_lrelu.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..4f55466235a020b0f5e150350bfdcd8b2a1e579d
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu.cpp
@@ -0,0 +1,304 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+#include
+#include
+#include "filtered_lrelu.h"
+
+//------------------------------------------------------------------------
+
+static std::tuple filtered_lrelu(
+ torch::Tensor x, torch::Tensor fu, torch::Tensor fd, torch::Tensor b, torch::Tensor si,
+ int up, int down, int px0, int px1, int py0, int py1, int sx, int sy, float gain, float slope, float clamp, bool flip_filters, bool writeSigns)
+{
+ // Set CUDA device.
+ TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device");
+ const at::cuda::OptionalCUDAGuard device_guard(device_of(x));
+
+ // Validate arguments.
+ TORCH_CHECK(fu.device() == x.device() && fd.device() == x.device() && b.device() == x.device(), "all input tensors must reside on the same device");
+ TORCH_CHECK(fu.dtype() == torch::kFloat && fd.dtype() == torch::kFloat, "fu and fd must be float32");
+ TORCH_CHECK(b.dtype() == x.dtype(), "x and b must have the same dtype");
+ TORCH_CHECK(x.dtype() == torch::kHalf || x.dtype() == torch::kFloat, "x and b must be float16 or float32");
+ TORCH_CHECK(x.dim() == 4, "x must be rank 4");
+ TORCH_CHECK(x.size(0) * x.size(1) <= INT_MAX && x.size(2) <= INT_MAX && x.size(3) <= INT_MAX, "x is too large");
+ TORCH_CHECK(x.numel() > 0, "x is empty");
+ TORCH_CHECK((fu.dim() == 1 || fu.dim() == 2) && (fd.dim() == 1 || fd.dim() == 2), "fu and fd must be rank 1 or 2");
+ TORCH_CHECK(fu.size(0) <= INT_MAX && fu.size(-1) <= INT_MAX, "fu is too large");
+ TORCH_CHECK(fd.size(0) <= INT_MAX && fd.size(-1) <= INT_MAX, "fd is too large");
+ TORCH_CHECK(fu.numel() > 0, "fu is empty");
+ TORCH_CHECK(fd.numel() > 0, "fd is empty");
+ TORCH_CHECK(b.dim() == 1 && b.size(0) == x.size(1), "b must be a vector with the same number of channels as x");
+ TORCH_CHECK(up >= 1 && down >= 1, "up and down must be at least 1");
+
+ // Figure out how much shared memory is available on the device.
+ int maxSharedBytes = 0;
+ AT_CUDA_CHECK(cudaDeviceGetAttribute(&maxSharedBytes, cudaDevAttrMaxSharedMemoryPerBlockOptin, x.device().index()));
+ int sharedKB = maxSharedBytes >> 10;
+
+ // Populate enough launch parameters to check if a CUDA kernel exists.
+ filtered_lrelu_kernel_params p;
+ p.up = up;
+ p.down = down;
+ p.fuShape = make_int2((int)fu.size(-1), fu.dim() == 2 ? (int)fu.size(0) : 0); // shape [n, 0] indicates separable filter.
+ p.fdShape = make_int2((int)fd.size(-1), fd.dim() == 2 ? (int)fd.size(0) : 0);
+ filtered_lrelu_kernel_spec test_spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ if (!test_spec.exec)
+ {
+ // No kernel found - return empty tensors and indicate missing kernel with return code of -1.
+ return std::make_tuple(torch::Tensor(), torch::Tensor(), -1);
+ }
+
+ // Input/output element size.
+ int64_t sz = (x.dtype() == torch::kHalf) ? 2 : 4;
+
+ // Input sizes.
+ int64_t xw = (int)x.size(3);
+ int64_t xh = (int)x.size(2);
+ int64_t fut_w = (int)fu.size(-1) - 1;
+ int64_t fut_h = (int)fu.size(0) - 1;
+ int64_t fdt_w = (int)fd.size(-1) - 1;
+ int64_t fdt_h = (int)fd.size(0) - 1;
+
+ // Logical size of upsampled buffer.
+ int64_t cw = xw * up + (px0 + px1) - fut_w;
+ int64_t ch = xh * up + (py0 + py1) - fut_h;
+ TORCH_CHECK(cw > fdt_w && ch > fdt_h, "upsampled buffer must be at least the size of downsampling filter");
+ TORCH_CHECK(cw <= INT_MAX && ch <= INT_MAX, "upsampled buffer is too large");
+
+ // Compute output size and allocate.
+ int64_t yw = (cw - fdt_w + (down - 1)) / down;
+ int64_t yh = (ch - fdt_h + (down - 1)) / down;
+ TORCH_CHECK(yw > 0 && yh > 0, "output must be at least 1x1");
+ TORCH_CHECK(yw <= INT_MAX && yh <= INT_MAX, "output is too large");
+ torch::Tensor y = torch::empty({x.size(0), x.size(1), yh, yw}, x.options(), x.suggest_memory_format());
+
+ // Allocate sign tensor.
+ torch::Tensor so;
+ torch::Tensor s = si;
+ bool readSigns = !!s.numel();
+ int64_t sw_active = 0; // Active width of sign tensor.
+ if (writeSigns)
+ {
+ sw_active = yw * down - (down - 1) + fdt_w; // Active width in elements.
+ int64_t sh = yh * down - (down - 1) + fdt_h; // Height = active height.
+ int64_t sw = (sw_active + 15) & ~15; // Width = active width in elements, rounded up to multiple of 16.
+ TORCH_CHECK(sh <= INT_MAX && (sw >> 2) <= INT_MAX, "signs is too large");
+ s = so = torch::empty({x.size(0), x.size(1), sh, sw >> 2}, x.options().dtype(torch::kUInt8), at::MemoryFormat::Contiguous);
+ }
+ else if (readSigns)
+ sw_active = s.size(3) << 2;
+
+ // Validate sign tensor if in use.
+ if (readSigns || writeSigns)
+ {
+ TORCH_CHECK(s.is_contiguous(), "signs must be contiguous");
+ TORCH_CHECK(s.dtype() == torch::kUInt8, "signs must be uint8");
+ TORCH_CHECK(s.device() == x.device(), "signs must reside on the same device as x");
+ TORCH_CHECK(s.dim() == 4, "signs must be rank 4");
+ TORCH_CHECK(s.size(0) == x.size(0) && s.size(1) == x.size(1), "signs must have same batch & channels as x");
+ TORCH_CHECK(s.size(2) <= INT_MAX && s.size(3) <= INT_MAX, "signs is too large");
+ }
+
+ // Populate rest of CUDA kernel parameters.
+ p.x = x.data_ptr();
+ p.y = y.data_ptr();
+ p.b = b.data_ptr();
+ p.s = (readSigns || writeSigns) ? s.data_ptr() : 0;
+ p.fu = fu.data_ptr();
+ p.fd = fd.data_ptr();
+ p.pad0 = make_int2(px0, py0);
+ p.gain = gain;
+ p.slope = slope;
+ p.clamp = clamp;
+ p.flip = (flip_filters) ? 1 : 0;
+ p.xShape = make_int4((int)x.size(3), (int)x.size(2), (int)x.size(1), (int)x.size(0));
+ p.yShape = make_int4((int)y.size(3), (int)y.size(2), (int)y.size(1), (int)y.size(0));
+ p.sShape = (readSigns || writeSigns) ? make_int2((int)s.size(3), (int)s.size(2)) : make_int2(0, 0); // Width is in bytes. Contiguous.
+ p.sOfs = make_int2(sx, sy);
+ p.swLimit = (sw_active + 3) >> 2; // Rounded up to bytes.
+
+ // x, y, b strides are in bytes.
+ p.xStride = make_longlong4(sz * x.stride(3), sz * x.stride(2), sz * x.stride(1), sz * x.stride(0));
+ p.yStride = make_longlong4(sz * y.stride(3), sz * y.stride(2), sz * y.stride(1), sz * y.stride(0));
+ p.bStride = sz * b.stride(0);
+
+ // fu, fd strides are in elements.
+ p.fuStride = make_longlong3(fu.stride(-1), fu.dim() == 2 ? fu.stride(0) : 0, 0);
+ p.fdStride = make_longlong3(fd.stride(-1), fd.dim() == 2 ? fd.stride(0) : 0, 0);
+
+ // Determine if indices don't fit in int32. Support negative strides although Torch currently never produces those.
+ bool index64b = false;
+ if (std::abs(p.bStride * x.size(1)) > INT_MAX) index64b = true;
+ if (std::min(x.size(0) * p.xStride.w, 0ll) + std::min(x.size(1) * p.xStride.z, 0ll) + std::min(x.size(2) * p.xStride.y, 0ll) + std::min(x.size(3) * p.xStride.x, 0ll) < -INT_MAX) index64b = true;
+ if (std::max(x.size(0) * p.xStride.w, 0ll) + std::max(x.size(1) * p.xStride.z, 0ll) + std::max(x.size(2) * p.xStride.y, 0ll) + std::max(x.size(3) * p.xStride.x, 0ll) > INT_MAX) index64b = true;
+ if (std::min(y.size(0) * p.yStride.w, 0ll) + std::min(y.size(1) * p.yStride.z, 0ll) + std::min(y.size(2) * p.yStride.y, 0ll) + std::min(y.size(3) * p.yStride.x, 0ll) < -INT_MAX) index64b = true;
+ if (std::max(y.size(0) * p.yStride.w, 0ll) + std::max(y.size(1) * p.yStride.z, 0ll) + std::max(y.size(2) * p.yStride.y, 0ll) + std::max(y.size(3) * p.yStride.x, 0ll) > INT_MAX) index64b = true;
+ if (s.numel() > INT_MAX) index64b = true;
+
+ // Choose CUDA kernel.
+ filtered_lrelu_kernel_spec spec = { 0 };
+ AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "filtered_lrelu_cuda", [&]
+ {
+ if constexpr (sizeof(scalar_t) <= 4) // Exclude doubles. constexpr prevents template instantiation.
+ {
+ // Choose kernel based on index type, datatype and sign read/write modes.
+ if (!index64b && writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ else if (!index64b && !writeSigns && readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ else if (!index64b && !writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ else if ( index64b && writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ else if ( index64b && !writeSigns && readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ else if ( index64b && !writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB);
+ }
+ });
+ TORCH_CHECK(spec.exec, "internal error - CUDA kernel not found") // This should not happen because we tested earlier that kernel exists.
+
+ // Launch CUDA kernel.
+ void* args[] = {&p};
+ int bx = spec.numWarps * 32;
+ int gx = (p.yShape.x - 1) / spec.tileOut.x + 1;
+ int gy = (p.yShape.y - 1) / spec.tileOut.y + 1;
+ int gz = p.yShape.z * p.yShape.w;
+
+ // Repeat multiple horizontal tiles in a CTA?
+ if (spec.xrep)
+ {
+ p.tilesXrep = spec.xrep;
+ p.tilesXdim = gx;
+
+ gx = (gx + p.tilesXrep - 1) / p.tilesXrep;
+ std::swap(gx, gy);
+ }
+ else
+ {
+ p.tilesXrep = 0;
+ p.tilesXdim = 0;
+ }
+
+ // Launch filter setup kernel.
+ AT_CUDA_CHECK(cudaLaunchKernel(spec.setup, 1, 1024, args, 0, at::cuda::getCurrentCUDAStream()));
+
+ // Copy kernels to constant memory.
+ if ( writeSigns && !readSigns) AT_CUDA_CHECK((copy_filters(at::cuda::getCurrentCUDAStream())));
+ else if (!writeSigns && readSigns) AT_CUDA_CHECK((copy_filters(at::cuda::getCurrentCUDAStream())));
+ else if (!writeSigns && !readSigns) AT_CUDA_CHECK((copy_filters(at::cuda::getCurrentCUDAStream())));
+
+ // Set cache and shared memory configurations for main kernel.
+ AT_CUDA_CHECK(cudaFuncSetCacheConfig(spec.exec, cudaFuncCachePreferShared));
+ if (spec.dynamicSharedKB) // Need dynamically allocated shared memory?
+ AT_CUDA_CHECK(cudaFuncSetAttribute(spec.exec, cudaFuncAttributeMaxDynamicSharedMemorySize, spec.dynamicSharedKB << 10));
+ AT_CUDA_CHECK(cudaFuncSetSharedMemConfig(spec.exec, cudaSharedMemBankSizeFourByte));
+
+ // Launch main kernel.
+ const int maxSubGz = 65535; // CUDA maximum for block z dimension.
+ for (int zofs=0; zofs < gz; zofs += maxSubGz) // Do multiple launches if gz is too big.
+ {
+ p.blockZofs = zofs;
+ int subGz = std::min(maxSubGz, gz - zofs);
+ AT_CUDA_CHECK(cudaLaunchKernel(spec.exec, dim3(gx, gy, subGz), bx, args, spec.dynamicSharedKB << 10, at::cuda::getCurrentCUDAStream()));
+ }
+
+ // Done.
+ return std::make_tuple(y, so, 0);
+}
+
+//------------------------------------------------------------------------
+
+static torch::Tensor filtered_lrelu_act(torch::Tensor x, torch::Tensor si, int sx, int sy, float gain, float slope, float clamp, bool writeSigns)
+{
+ // Set CUDA device.
+ TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device");
+ const at::cuda::OptionalCUDAGuard device_guard(device_of(x));
+
+ // Validate arguments.
+ TORCH_CHECK(x.dim() == 4, "x must be rank 4");
+ TORCH_CHECK(x.size(0) * x.size(1) <= INT_MAX && x.size(2) <= INT_MAX && x.size(3) <= INT_MAX, "x is too large");
+ TORCH_CHECK(x.numel() > 0, "x is empty");
+ TORCH_CHECK(x.dtype() == torch::kHalf || x.dtype() == torch::kFloat || x.dtype() == torch::kDouble, "x must be float16, float32 or float64");
+
+ // Output signs if we don't have sign input.
+ torch::Tensor so;
+ torch::Tensor s = si;
+ bool readSigns = !!s.numel();
+ if (writeSigns)
+ {
+ int64_t sw = x.size(3);
+ sw = (sw + 15) & ~15; // Round to a multiple of 16 for coalescing.
+ s = so = torch::empty({x.size(0), x.size(1), x.size(2), sw >> 2}, x.options().dtype(torch::kUInt8), at::MemoryFormat::Contiguous);
+ }
+
+ // Validate sign tensor if in use.
+ if (readSigns || writeSigns)
+ {
+ TORCH_CHECK(s.is_contiguous(), "signs must be contiguous");
+ TORCH_CHECK(s.dtype() == torch::kUInt8, "signs must be uint8");
+ TORCH_CHECK(s.device() == x.device(), "signs must reside on the same device as x");
+ TORCH_CHECK(s.dim() == 4, "signs must be rank 4");
+ TORCH_CHECK(s.size(0) == x.size(0) && s.size(1) == x.size(1), "signs must have same batch & channels as x");
+ TORCH_CHECK(s.size(2) <= INT_MAX && (s.size(3) << 2) <= INT_MAX, "signs tensor is too large");
+ }
+
+ // Initialize CUDA kernel parameters.
+ filtered_lrelu_act_kernel_params p;
+ p.x = x.data_ptr();
+ p.s = (readSigns || writeSigns) ? s.data_ptr() : 0;
+ p.gain = gain;
+ p.slope = slope;
+ p.clamp = clamp;
+ p.xShape = make_int4((int)x.size(3), (int)x.size(2), (int)x.size(1), (int)x.size(0));
+ p.xStride = make_longlong4(x.stride(3), x.stride(2), x.stride(1), x.stride(0));
+ p.sShape = (readSigns || writeSigns) ? make_int2((int)s.size(3) << 2, (int)s.size(2)) : make_int2(0, 0); // Width is in elements. Contiguous.
+ p.sOfs = make_int2(sx, sy);
+
+ // Choose CUDA kernel.
+ void* func = 0;
+ AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "filtered_lrelu_act_cuda", [&]
+ {
+ if (writeSigns)
+ func = choose_filtered_lrelu_act_kernel();
+ else if (readSigns)
+ func = choose_filtered_lrelu_act_kernel();
+ else
+ func = choose_filtered_lrelu_act_kernel();
+ });
+ TORCH_CHECK(func, "internal error - CUDA kernel not found");
+
+ // Launch CUDA kernel.
+ void* args[] = {&p};
+ int bx = 128; // 4 warps per block.
+
+ // Logical size of launch = writeSigns ? p.s : p.x
+ uint32_t gx = writeSigns ? p.sShape.x : p.xShape.x;
+ uint32_t gy = writeSigns ? p.sShape.y : p.xShape.y;
+ uint32_t gz = p.xShape.z * p.xShape.w; // Same as in p.sShape if signs are in use.
+ gx = (gx - 1) / bx + 1;
+
+ // Make sure grid y and z dimensions are within CUDA launch limits. Kernel loops internally to do the rest.
+ const uint32_t gmax = 65535;
+ gy = std::min(gy, gmax);
+ gz = std::min(gz, gmax);
+
+ // Launch.
+ AT_CUDA_CHECK(cudaLaunchKernel(func, dim3(gx, gy, gz), bx, args, 0, at::cuda::getCurrentCUDAStream()));
+ return so;
+}
+
+//------------------------------------------------------------------------
+
+PYBIND11_MODULE(TORCH_EXTENSION_NAME, m)
+{
+ m.def("filtered_lrelu", &filtered_lrelu); // The whole thing.
+ m.def("filtered_lrelu_act_", &filtered_lrelu_act); // Activation and sign tensor handling only. Modifies data tensor in-place.
+}
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/filtered_lrelu.cu b/torch_utils/ops/filtered_lrelu.cu
new file mode 100644
index 0000000000000000000000000000000000000000..aaac95408365f023ffaa4cb89348d499d3b948f0
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu.cu
@@ -0,0 +1,1288 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+#include "filtered_lrelu.h"
+#include
+
+//------------------------------------------------------------------------
+// Helpers.
+
+enum // Filter modes.
+{
+ MODE_SUSD = 0, // Separable upsampling, separable downsampling.
+ MODE_FUSD = 1, // Full upsampling, separable downsampling.
+ MODE_SUFD = 2, // Separable upsampling, full downsampling.
+ MODE_FUFD = 3, // Full upsampling, full downsampling.
+};
+
+template struct InternalType;
+template <> struct InternalType
+{
+ typedef double scalar_t; typedef double2 vec2_t; typedef double4 vec4_t;
+ __device__ __forceinline__ static vec2_t zero_vec2(void) { return make_double2(0, 0); }
+ __device__ __forceinline__ static vec4_t zero_vec4(void) { return make_double4(0, 0, 0, 0); }
+ __device__ __forceinline__ static double clamp(double x, double c) { return fmin(fmax(x, -c), c); }
+};
+template <> struct InternalType
+{
+ typedef float scalar_t; typedef float2 vec2_t; typedef float4 vec4_t;
+ __device__ __forceinline__ static vec2_t zero_vec2(void) { return make_float2(0, 0); }
+ __device__ __forceinline__ static vec4_t zero_vec4(void) { return make_float4(0, 0, 0, 0); }
+ __device__ __forceinline__ static float clamp(float x, float c) { return fminf(fmaxf(x, -c), c); }
+};
+template <> struct InternalType
+{
+ typedef float scalar_t; typedef float2 vec2_t; typedef float4 vec4_t;
+ __device__ __forceinline__ static vec2_t zero_vec2(void) { return make_float2(0, 0); }
+ __device__ __forceinline__ static vec4_t zero_vec4(void) { return make_float4(0, 0, 0, 0); }
+ __device__ __forceinline__ static float clamp(float x, float c) { return fminf(fmaxf(x, -c), c); }
+};
+
+#define MIN(A, B) ((A) < (B) ? (A) : (B))
+#define MAX(A, B) ((A) > (B) ? (A) : (B))
+#define CEIL_DIV(A, B) (((B)==1) ? (A) : \
+ ((B)==2) ? ((int)((A)+1) >> 1) : \
+ ((B)==4) ? ((int)((A)+3) >> 2) : \
+ (((A) + ((A) > 0 ? (B) - 1 : 0)) / (B)))
+
+// This works only up to blocks of size 256 x 256 and for all N that are powers of two.
+template __device__ __forceinline__ void fast_div_mod(int& x, int& y, unsigned int i)
+{
+ if ((N & (N-1)) && N <= 256)
+ y = (i * ((1<<24)/N + 1)) >> 24; // Assumes N <= 256, i < N*256.
+ else
+ y = i/N;
+
+ x = i - y*N;
+}
+
+// Type cast stride before reading it.
+template __device__ __forceinline__ T get_stride(const int64_t& x)
+{
+ return *reinterpret_cast(&x);
+}
+
+//------------------------------------------------------------------------
+// Filters, setup kernel, copying function.
+
+#define MAX_FILTER_SIZE 32
+
+// Combined up/down filter buffers so that transfer can be done with one copy.
+__device__ float g_fbuf[2 * MAX_FILTER_SIZE * MAX_FILTER_SIZE]; // Filters in global memory, written by setup kernel.
+__device__ __constant__ float c_fbuf[2 * MAX_FILTER_SIZE * MAX_FILTER_SIZE]; // Filters in constant memory, read by main kernel.
+
+// Accessors to combined buffers to index up/down filters individually.
+#define c_fu (c_fbuf)
+#define c_fd (c_fbuf + MAX_FILTER_SIZE * MAX_FILTER_SIZE)
+#define g_fu (g_fbuf)
+#define g_fd (g_fbuf + MAX_FILTER_SIZE * MAX_FILTER_SIZE)
+
+// Set up filters into global memory buffer.
+static __global__ void setup_filters_kernel(filtered_lrelu_kernel_params p)
+{
+ for (int idx = threadIdx.x; idx < MAX_FILTER_SIZE * MAX_FILTER_SIZE; idx += blockDim.x)
+ {
+ int x, y;
+ fast_div_mod(x, y, idx);
+
+ int fu_x = p.flip ? x : (p.fuShape.x - 1 - x);
+ int fu_y = p.flip ? y : (p.fuShape.y - 1 - y);
+ if (p.fuShape.y > 0)
+ g_fu[idx] = (x >= p.fuShape.x || y >= p.fuShape.y) ? 0.0f : p.fu[fu_x * p.fuStride.x + fu_y * p.fuStride.y];
+ else
+ g_fu[idx] = (x >= p.fuShape.x || y > 0) ? 0.0f : p.fu[fu_x * p.fuStride.x];
+
+ int fd_x = p.flip ? x : (p.fdShape.x - 1 - x);
+ int fd_y = p.flip ? y : (p.fdShape.y - 1 - y);
+ if (p.fdShape.y > 0)
+ g_fd[idx] = (x >= p.fdShape.x || y >= p.fdShape.y) ? 0.0f : p.fd[fd_x * p.fdStride.x + fd_y * p.fdStride.y];
+ else
+ g_fd[idx] = (x >= p.fdShape.x || y > 0) ? 0.0f : p.fd[fd_x * p.fdStride.x];
+ }
+}
+
+// Host function to copy filters written by setup kernel into constant buffer for main kernel.
+template static cudaError_t copy_filters(cudaStream_t stream)
+{
+ void* src = 0;
+ cudaError_t err = cudaGetSymbolAddress(&src, g_fbuf);
+ if (err) return err;
+ return cudaMemcpyToSymbolAsync(c_fbuf, src, 2 * MAX_FILTER_SIZE * MAX_FILTER_SIZE * sizeof(float), 0, cudaMemcpyDeviceToDevice, stream);
+}
+
+//------------------------------------------------------------------------
+// Coordinate spaces:
+// - Relative to input tensor: inX, inY, tileInX, tileInY
+// - Relative to input tile: relInX, relInY, tileInW, tileInH
+// - Relative to upsampled tile: relUpX, relUpY, tileUpW, tileUpH
+// - Relative to output tile: relOutX, relOutY, tileOutW, tileOutH
+// - Relative to output tensor: outX, outY, tileOutX, tileOutY
+//
+// Relationships between coordinate spaces:
+// - inX = tileInX + relInX
+// - inY = tileInY + relInY
+// - relUpX = relInX * up + phaseInX
+// - relUpY = relInY * up + phaseInY
+// - relUpX = relOutX * down
+// - relUpY = relOutY * down
+// - outX = tileOutX + relOutX
+// - outY = tileOutY + relOutY
+
+extern __shared__ char s_buf_raw[]; // When sharedKB <= 48, allocate shared memory statically inside the kernel, otherwise use the externally allocated shared memory buffer.
+
+template
+static __global__ void filtered_lrelu_kernel(filtered_lrelu_kernel_params p)
+{
+ // Check that we don't try to support non-existing filter modes.
+ static_assert(up == 1 || up == 2 || up == 4, "only up=1, up=2, up=4 scales supported");
+ static_assert(down == 1 || down == 2 || down == 4, "only down=1, down=2, down=4 scales supported");
+ static_assert(fuSize >= up, "upsampling filter size must be at least upsampling factor");
+ static_assert(fdSize >= down, "downsampling filter size must be at least downsampling factor");
+ static_assert(fuSize % up == 0, "upsampling filter size must be divisible with upsampling factor");
+ static_assert(fdSize % down == 0, "downsampling filter size must be divisible with downsampling factor");
+ static_assert(fuSize <= MAX_FILTER_SIZE && fdSize <= MAX_FILTER_SIZE, "filter size greater than MAX_FILTER_SIZE");
+ static_assert(up != 1 || (fuSize == 1 && (filterMode == MODE_FUFD || filterMode == MODE_FUSD)), "up=1 supported only for 1x1 full filters");
+ static_assert(down != 1 || (fdSize == 1 && (filterMode == MODE_FUFD || filterMode == MODE_SUFD)), "down=1 supported only for 1x1 full filters");
+ static_assert(!(up == 4 && (filterMode == MODE_FUFD || filterMode == MODE_FUSD)), "full filters not supported for up=4");
+ static_assert(!(down == 4 && (filterMode == MODE_FUFD || filterMode == MODE_SUFD)), "full filters not supported for down=4");
+
+ // Static definitions.
+ typedef typename InternalType::scalar_t scalar_t;
+ typedef typename InternalType::vec2_t vec2_t;
+ typedef typename InternalType::vec4_t vec4_t;
+ const int tileUpW = (tileOutW * down + (fdSize - 1) - (down - 1) + 3) & ~3; // Upsampled tile width, rounded up to multiple of 4.
+ const int tileUpH = tileOutH * down + (fdSize - 1) - (down - 1); // Upsampled tile height.
+ const int tileInW = CEIL_DIV(tileUpW + (fuSize - 1), up); // Input tile width.
+ const int tileInH = CEIL_DIV(tileUpH + (fuSize - 1), up); // Input tile height.
+ const int tileUpH_up = CEIL_DIV(tileUpH, up) * up; // Upsampled tile height rounded up to a multiple of up.
+ const int tileInH_up = CEIL_DIV(tileUpH_up + (fuSize - 1), up); // For allocations only, to avoid shared memory read overruns with up=2 and up=4.
+
+ // Merge 1x1 downsampling into last upsampling step for upf1 and ups2.
+ const bool downInline = (down == 1) && ((up == 1 && filterMode == MODE_FUFD) || (up == 2 && filterMode == MODE_SUFD));
+
+ // Sizes of logical buffers.
+ const int szIn = tileInH_up * tileInW;
+ const int szUpX = tileInH_up * tileUpW;
+ const int szUpXY = downInline ? 0 : (tileUpH * tileUpW);
+ const int szDownX = tileUpH * tileOutW;
+
+ // Sizes for shared memory arrays.
+ const int s_buf0_size_base =
+ (filterMode == MODE_SUSD) ? MAX(szIn, szUpXY) :
+ (filterMode == MODE_FUSD) ? MAX(szIn, szDownX) :
+ (filterMode == MODE_SUFD) ? MAX(szIn, szUpXY) :
+ (filterMode == MODE_FUFD) ? szIn :
+ -1;
+ const int s_buf1_size_base =
+ (filterMode == MODE_SUSD) ? MAX(szUpX, szDownX) :
+ (filterMode == MODE_FUSD) ? szUpXY :
+ (filterMode == MODE_SUFD) ? szUpX :
+ (filterMode == MODE_FUFD) ? szUpXY :
+ -1;
+
+ // Ensure U128 alignment.
+ const int s_buf0_size = (s_buf0_size_base + 3) & ~3;
+ const int s_buf1_size = (s_buf1_size_base + 3) & ~3;
+
+ // Check at compile time that we don't use too much shared memory.
+ static_assert((s_buf0_size + s_buf1_size) * sizeof(scalar_t) <= (sharedKB << 10), "shared memory overflow");
+
+ // Declare shared memory arrays.
+ scalar_t* s_buf0;
+ scalar_t* s_buf1;
+ if (sharedKB <= 48)
+ {
+ // Allocate shared memory arrays here.
+ __shared__ scalar_t s_buf0_st[(sharedKB > 48) ? (1<<24) : (s_buf0_size + s_buf1_size)]; // Prevent launching if this isn't optimized away when unused.
+ s_buf0 = s_buf0_st;
+ s_buf1 = s_buf0 + s_buf0_size;
+ }
+ else
+ {
+ // Use the dynamically allocated shared memory array.
+ s_buf0 = (scalar_t*)s_buf_raw;
+ s_buf1 = s_buf0 + s_buf0_size;
+ }
+
+ // Pointers to the buffers.
+ scalar_t* s_tileIn; // Input tile: [relInX * tileInH + relInY]
+ scalar_t* s_tileUpX; // After horizontal upsampling: [relInY * tileUpW + relUpX]
+ scalar_t* s_tileUpXY; // After upsampling: [relUpY * tileUpW + relUpX]
+ scalar_t* s_tileDownX; // After horizontal downsampling: [relUpY * tileOutW + relOutX]
+ if (filterMode == MODE_SUSD)
+ {
+ s_tileIn = s_buf0;
+ s_tileUpX = s_buf1;
+ s_tileUpXY = s_buf0;
+ s_tileDownX = s_buf1;
+ }
+ else if (filterMode == MODE_FUSD)
+ {
+ s_tileIn = s_buf0;
+ s_tileUpXY = s_buf1;
+ s_tileDownX = s_buf0;
+ }
+ else if (filterMode == MODE_SUFD)
+ {
+ s_tileIn = s_buf0;
+ s_tileUpX = s_buf1;
+ s_tileUpXY = s_buf0;
+ }
+ else if (filterMode == MODE_FUFD)
+ {
+ s_tileIn = s_buf0;
+ s_tileUpXY = s_buf1;
+ }
+
+ // Allow large grids in z direction via per-launch offset.
+ int channelIdx = blockIdx.z + p.blockZofs;
+ int batchIdx = channelIdx / p.yShape.z;
+ channelIdx -= batchIdx * p.yShape.z;
+
+ // Offset to output feature map. In bytes.
+ index_t mapOfsOut = channelIdx * get_stride(p.yStride.z) + batchIdx * get_stride(p.yStride.w);
+
+ // Sign shift amount.
+ uint32_t signXo = ((threadIdx.x + p.sOfs.x) << 1) & 6;
+
+ // Inner tile loop.
+ #pragma unroll 1
+ for (int tileIdx = 0; !enableXrep || (tileIdx < MIN(p.tilesXrep, p.tilesXdim - p.tilesXrep * blockIdx.y)); tileIdx++)
+ {
+ // Locate output tile.
+ int tileX = enableXrep ? blockIdx.y * p.tilesXrep + tileIdx : blockIdx.x;
+ int tileOutX = tileX * tileOutW;
+ int tileOutY = (enableXrep ? blockIdx.x : blockIdx.y) * tileOutH;
+
+ // Locate input tile.
+ int tmpX = tileOutX * down - p.pad0.x;
+ int tmpY = tileOutY * down - p.pad0.y;
+ int tileInX = CEIL_DIV(tmpX, up);
+ int tileInY = CEIL_DIV(tmpY, up);
+ const int phaseInX = tileInX * up - tmpX;
+ const int phaseInY = tileInY * up - tmpY;
+
+ // Extra sync if input and output buffers are the same and we are not on first tile.
+ if (enableXrep && tileIdx > 0 && (filterMode == MODE_FUSD || (filterMode == MODE_SUFD && !downInline) || (filterMode == MODE_FUFD && downInline)))
+ __syncthreads();
+
+ // Load input tile & apply bias. Unrolled.
+ scalar_t b = (scalar_t)*(const T*)((const char*)p.b + (channelIdx * get_stride(p.bStride)));
+ index_t mapOfsIn = channelIdx * get_stride(p.xStride.z) + batchIdx * get_stride(p.xStride.w);
+ int idx = threadIdx.x;
+ const int loopCountIN = CEIL_DIV(tileInW * tileInH, threadsPerBlock);
+ #pragma unroll
+ for (int loop = 0; loop < loopCountIN; loop++)
+ {
+ int relInX, relInY;
+ fast_div_mod(relInX, relInY, idx);
+ int inX = tileInX + relInX;
+ int inY = tileInY + relInY;
+ scalar_t v = 0;
+
+ if ((uint32_t)inX < p.xShape.x && (uint32_t)inY < p.xShape.y)
+ v = (scalar_t)*((const T*)((const char*)p.x + (inX * get_stride(p.xStride.x) + inY * get_stride(p.xStride.y) + mapOfsIn))) + b;
+
+ bool skip = (loop == loopCountIN-1) && (idx >= tileInW * tileInH);
+ if (!skip)
+ s_tileIn[idx] = v;
+
+ idx += threadsPerBlock;
+ }
+
+ if (filterMode == MODE_SUSD || filterMode == MODE_SUFD) // Separable upsampling filter.
+ {
+ // Horizontal upsampling.
+ __syncthreads();
+ if (up == 4)
+ {
+ for (int idx = threadIdx.x*up; idx < tileUpW * tileInH; idx += blockDim.x*up)
+ {
+ int relUpX0, relInY;
+ fast_div_mod(relUpX0, relInY, idx);
+ int relInX0 = relUpX0 / up;
+ int src0 = relInX0 + tileInW * relInY;
+ int dst = relInY * tileUpW + relUpX0;
+ vec4_t v = InternalType::zero_vec4();
+ scalar_t a = s_tileIn[src0];
+ if (phaseInX == 0)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileIn[src0 + step + 1];
+ v.y += a * (scalar_t)c_fu[step * up + 3];
+ v.z += a * (scalar_t)c_fu[step * up + 2];
+ v.w += a * (scalar_t)c_fu[step * up + 1];
+ }
+ }
+ else if (phaseInX == 1)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 1];
+ v.y += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileIn[src0 + step + 1];
+ v.z += a * (scalar_t)c_fu[step * up + 3];
+ v.w += a * (scalar_t)c_fu[step * up + 2];
+ }
+ }
+ else if (phaseInX == 2)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 2];
+ v.y += a * (scalar_t)c_fu[step * up + 1];
+ v.z += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileIn[src0 + step + 1];
+ v.w += a * (scalar_t)c_fu[step * up + 3];
+ }
+ }
+ else // (phaseInX == 3)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 3];
+ v.y += a * (scalar_t)c_fu[step * up + 2];
+ v.z += a * (scalar_t)c_fu[step * up + 1];
+ v.w += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileIn[src0 + step + 1];
+ }
+ }
+ s_tileUpX[dst+0] = v.x;
+ s_tileUpX[dst+1] = v.y;
+ s_tileUpX[dst+2] = v.z;
+ s_tileUpX[dst+3] = v.w;
+ }
+ }
+ else if (up == 2)
+ {
+ bool p0 = (phaseInX == 0);
+ for (int idx = threadIdx.x*up; idx < tileUpW * tileInH; idx += blockDim.x*up)
+ {
+ int relUpX0, relInY;
+ fast_div_mod(relUpX0, relInY, idx);
+ int relInX0 = relUpX0 / up;
+ int src0 = relInX0 + tileInW * relInY;
+ int dst = relInY * tileUpW + relUpX0;
+ vec2_t v = InternalType::zero_vec2();
+ scalar_t a = s_tileIn[src0];
+ if (p0) // (phaseInX == 0)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileIn[src0 + step + 1];
+ v.y += a * (scalar_t)c_fu[step * up + 1];
+ }
+ }
+ else // (phaseInX == 1)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 1];
+ v.y += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileIn[src0 + step + 1];
+ }
+ }
+ s_tileUpX[dst+0] = v.x;
+ s_tileUpX[dst+1] = v.y;
+ }
+ }
+
+ // Vertical upsampling & nonlinearity.
+
+ __syncthreads();
+ int groupMask = 15 << ((threadIdx.x & 31) & ~3);
+ int minY = tileOutY ? (tileOutY - tileOutH) * down + tileUpH : 0; // Skip already written signs.
+ int sShapeMaxY = MIN(p.sShape.y, tileOutY * down + tileUpH); // Avoid out-of-tile sign writes.
+ if (up == 4)
+ {
+ minY -= 3; // Adjust according to block height.
+ for (int idx = threadIdx.x; idx < tileUpW * tileUpH_up / up; idx += blockDim.x)
+ {
+ int relUpX, relInY0;
+ fast_div_mod(relUpX, relInY0, idx);
+ int relUpY0 = relInY0 * up;
+ int src0 = relInY0 * tileUpW + relUpX;
+ int dst = relUpY0 * tileUpW + relUpX;
+ vec4_t v = InternalType::zero_vec4();
+
+ scalar_t a = s_tileUpX[src0];
+ if (phaseInY == 0)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileUpX[src0 + (step + 1) * tileUpW];
+ v.y += a * (scalar_t)c_fu[step * up + 3];
+ v.z += a * (scalar_t)c_fu[step * up + 2];
+ v.w += a * (scalar_t)c_fu[step * up + 1];
+ }
+ }
+ else if (phaseInY == 1)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 1];
+ v.y += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileUpX[src0 + (step + 1) * tileUpW];
+ v.z += a * (scalar_t)c_fu[step * up + 3];
+ v.w += a * (scalar_t)c_fu[step * up + 2];
+ }
+ }
+ else if (phaseInY == 2)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 2];
+ v.y += a * (scalar_t)c_fu[step * up + 1];
+ v.z += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileUpX[src0 + (step + 1) * tileUpW];
+ v.w += a * (scalar_t)c_fu[step * up + 3];
+ }
+ }
+ else // (phaseInY == 3)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 3];
+ v.y += a * (scalar_t)c_fu[step * up + 2];
+ v.z += a * (scalar_t)c_fu[step * up + 1];
+ v.w += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileUpX[src0 + (step + 1) * tileUpW];
+ }
+ }
+
+ int x = tileOutX * down + relUpX;
+ int y = tileOutY * down + relUpY0;
+ int signX = x + p.sOfs.x;
+ int signY = y + p.sOfs.y;
+ int signZ = blockIdx.z + p.blockZofs;
+ int signXb = signX >> 2;
+ index_t si0 = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ);
+ index_t si1 = si0 + p.sShape.x;
+ index_t si2 = si0 + p.sShape.x * 2;
+ index_t si3 = si0 + p.sShape.x * 3;
+
+ v.x *= (scalar_t)((float)up * (float)up * p.gain);
+ v.y *= (scalar_t)((float)up * (float)up * p.gain);
+ v.z *= (scalar_t)((float)up * (float)up * p.gain);
+ v.w *= (scalar_t)((float)up * (float)up * p.gain);
+
+ if (signWrite)
+ {
+ if (!enableWriteSkip)
+ {
+ // Determine and write signs.
+ int sx = __float_as_uint(v.x) >> 31 << 0;
+ int sy = __float_as_uint(v.y) >> 31 << 8;
+ int sz = __float_as_uint(v.z) >> 31 << 16;
+ int sw = __float_as_uint(v.w) >> 31 << 24;
+ if (sx) v.x *= p.slope;
+ if (sy) v.y *= p.slope;
+ if (sz) v.z *= p.slope;
+ if (sw) v.w *= p.slope;
+ if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); }
+ if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); }
+ if (fabsf(v.z) > p.clamp) { sz = 2 << 16; v.z = InternalType::clamp(v.z, p.clamp); }
+ if (fabsf(v.w) > p.clamp) { sw = 2 << 24; v.w = InternalType::clamp(v.w, p.clamp); }
+
+ if ((uint32_t)signXb < p.swLimit && signY >= minY)
+ {
+ // Combine signs.
+ uint32_t s = sx + sy + sw + sz;
+ s <<= (signX & 3) << 1;
+ s |= __shfl_xor_sync(groupMask, s, 1);
+ s |= __shfl_xor_sync(groupMask, s, 2);
+
+ // Write signs.
+ if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); }
+ if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); }
+ if ((uint32_t)(signY + 2) < sShapeMaxY) { p.s[si2] = (unsigned char)(s >> 16); }
+ if ((uint32_t)(signY + 3) < sShapeMaxY) { p.s[si3] = (unsigned char)(s >> 24); }
+ }
+ }
+ else
+ {
+ // Determine and write signs.
+ if ((uint32_t)signXb < p.swLimit && signY >= minY)
+ {
+ int sx = __float_as_uint(v.x) >> 31 << 0;
+ int sy = __float_as_uint(v.y) >> 31 << 8;
+ int sz = __float_as_uint(v.z) >> 31 << 16;
+ int sw = __float_as_uint(v.w) >> 31 << 24;
+ if (sx) v.x *= p.slope;
+ if (sy) v.y *= p.slope;
+ if (sz) v.z *= p.slope;
+ if (sw) v.w *= p.slope;
+ if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); }
+ if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); }
+ if (fabsf(v.z) > p.clamp) { sz = 2 << 16; v.z = InternalType::clamp(v.z, p.clamp); }
+ if (fabsf(v.w) > p.clamp) { sw = 2 << 24; v.w = InternalType::clamp(v.w, p.clamp); }
+
+ // Combine signs.
+ uint32_t s = sx + sy + sw + sz;
+ s <<= (signX & 3) << 1;
+ s |= __shfl_xor_sync(groupMask, s, 1);
+ s |= __shfl_xor_sync(groupMask, s, 2);
+
+ // Write signs.
+ if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); }
+ if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); }
+ if ((uint32_t)(signY + 2) < sShapeMaxY) { p.s[si2] = (unsigned char)(s >> 16); }
+ if ((uint32_t)(signY + 3) < sShapeMaxY) { p.s[si3] = (unsigned char)(s >> 24); }
+ }
+ else
+ {
+ // Just compute the values.
+ if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp);
+ if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp);
+ if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp);
+ if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp);
+ }
+ }
+ }
+ else if (signRead) // Read signs and apply.
+ {
+ if ((uint32_t)signXb < p.swLimit)
+ {
+ int ss = (signX & 3) << 1;
+ if ((uint32_t)(signY + 0) < p.sShape.y) { int s = p.s[si0] >> ss; if (s & 1) v.x *= p.slope; if (s & 2) v.x = 0.f; }
+ if ((uint32_t)(signY + 1) < p.sShape.y) { int s = p.s[si1] >> ss; if (s & 1) v.y *= p.slope; if (s & 2) v.y = 0.f; }
+ if ((uint32_t)(signY + 2) < p.sShape.y) { int s = p.s[si2] >> ss; if (s & 1) v.z *= p.slope; if (s & 2) v.z = 0.f; }
+ if ((uint32_t)(signY + 3) < p.sShape.y) { int s = p.s[si3] >> ss; if (s & 1) v.w *= p.slope; if (s & 2) v.w = 0.f; }
+ }
+ }
+ else // Forward pass with no sign write.
+ {
+ if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp);
+ if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp);
+ if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp);
+ if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp);
+ }
+
+ s_tileUpXY[dst + 0 * tileUpW] = v.x;
+ if (relUpY0 + 1 < tileUpH) s_tileUpXY[dst + 1 * tileUpW] = v.y;
+ if (relUpY0 + 2 < tileUpH) s_tileUpXY[dst + 2 * tileUpW] = v.z;
+ if (relUpY0 + 3 < tileUpH) s_tileUpXY[dst + 3 * tileUpW] = v.w;
+ }
+ }
+ else if (up == 2)
+ {
+ minY -= 1; // Adjust according to block height.
+ for (int idx = threadIdx.x; idx < tileUpW * tileUpH_up / up; idx += blockDim.x)
+ {
+ int relUpX, relInY0;
+ fast_div_mod(relUpX, relInY0, idx);
+ int relUpY0 = relInY0 * up;
+ int src0 = relInY0 * tileUpW + relUpX;
+ int dst = relUpY0 * tileUpW + relUpX;
+ vec2_t v = InternalType::zero_vec2();
+
+ scalar_t a = s_tileUpX[src0];
+ if (phaseInY == 0)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileUpX[src0 + (step + 1) * tileUpW];
+ v.y += a * (scalar_t)c_fu[step * up + 1];
+ }
+ }
+ else // (phaseInY == 1)
+ {
+ #pragma unroll
+ for (int step = 0; step < fuSize / up; step++)
+ {
+ v.x += a * (scalar_t)c_fu[step * up + 1];
+ v.y += a * (scalar_t)c_fu[step * up + 0];
+ a = s_tileUpX[src0 + (step + 1) * tileUpW];
+ }
+ }
+
+ int x = tileOutX * down + relUpX;
+ int y = tileOutY * down + relUpY0;
+ int signX = x + p.sOfs.x;
+ int signY = y + p.sOfs.y;
+ int signZ = blockIdx.z + p.blockZofs;
+ int signXb = signX >> 2;
+ index_t si0 = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ);
+ index_t si1 = si0 + p.sShape.x;
+
+ v.x *= (scalar_t)((float)up * (float)up * p.gain);
+ v.y *= (scalar_t)((float)up * (float)up * p.gain);
+
+ if (signWrite)
+ {
+ if (!enableWriteSkip)
+ {
+ // Determine and write signs.
+ int sx = __float_as_uint(v.x) >> 31 << 0;
+ int sy = __float_as_uint(v.y) >> 31 << 8;
+ if (sx) v.x *= p.slope;
+ if (sy) v.y *= p.slope;
+ if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); }
+ if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); }
+
+ if ((uint32_t)signXb < p.swLimit && signY >= minY)
+ {
+ // Combine signs.
+ int s = sx + sy;
+ s <<= signXo;
+ s |= __shfl_xor_sync(groupMask, s, 1);
+ s |= __shfl_xor_sync(groupMask, s, 2);
+
+ // Write signs.
+ if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); }
+ if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); }
+ }
+ }
+ else
+ {
+ // Determine and write signs.
+ if ((uint32_t)signXb < p.swLimit && signY >= minY)
+ {
+ int sx = __float_as_uint(v.x) >> 31 << 0;
+ int sy = __float_as_uint(v.y) >> 31 << 8;
+ if (sx) v.x *= p.slope;
+ if (sy) v.y *= p.slope;
+ if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); }
+ if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); }
+
+ // Combine signs.
+ int s = sx + sy;
+ s <<= signXo;
+ s |= __shfl_xor_sync(groupMask, s, 1);
+ s |= __shfl_xor_sync(groupMask, s, 2);
+
+ // Write signs.
+ if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); }
+ if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); }
+ }
+ else
+ {
+ // Just compute the values.
+ if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp);
+ if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp);
+ }
+ }
+ }
+ else if (signRead) // Read signs and apply.
+ {
+ if ((uint32_t)signXb < p.swLimit)
+ {
+ if ((uint32_t)(signY + 0) < p.sShape.y) { int s = p.s[si0] >> signXo; if (s & 1) v.x *= p.slope; if (s & 2) v.x = 0.f; }
+ if ((uint32_t)(signY + 1) < p.sShape.y) { int s = p.s[si1] >> signXo; if (s & 1) v.y *= p.slope; if (s & 2) v.y = 0.f; }
+ }
+ }
+ else // Forward pass with no sign write.
+ {
+ if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp);
+ if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp);
+ }
+
+ if (!downInline)
+ {
+ // Write into temporary buffer.
+ s_tileUpXY[dst] = v.x;
+ if (relUpY0 < tileUpH - 1)
+ s_tileUpXY[dst + tileUpW] = v.y;
+ }
+ else
+ {
+ // Write directly into output buffer.
+ if ((uint32_t)x < p.yShape.x)
+ {
+ int ymax = MIN(p.yShape.y, tileUpH + tileOutY * down);
+ index_t ofs = x * get_stride(p.yStride.x) + y * get_stride(p.yStride.y) + mapOfsOut;
+ if ((uint32_t)y + 0 < p.yShape.y) *((T*)((char*)p.y + ofs)) = (T)(v.x * (scalar_t)c_fd[0]);
+ if ((uint32_t)y + 1 < ymax) *((T*)((char*)p.y + ofs + get_stride(p.yStride.y))) = (T)(v.y * (scalar_t)c_fd[0]);
+ }
+ }
+ }
+ }
+ }
+ else if (filterMode == MODE_FUSD || filterMode == MODE_FUFD)
+ {
+ // Full upsampling filter.
+
+ if (up == 2)
+ {
+ // 2 x 2-wide.
+ __syncthreads();
+ int minY = tileOutY ? (tileOutY - tileOutH) * down + tileUpH + p.sOfs.y : 0; // Skip already written signs.
+ for (int idx = threadIdx.x * 4; idx < tileUpW * tileUpH; idx += blockDim.x * 4)
+ {
+ int relUpX0, relUpY0;
+ fast_div_mod(relUpX0, relUpY0, idx);
+ int relInX0 = CEIL_DIV(relUpX0 - phaseInX, up);
+ int relInY0 = CEIL_DIV(relUpY0 - phaseInY, up);
+ int src0 = relInX0 + tileInW * relInY0;
+ int tap0y = (relInY0 * up + phaseInY - relUpY0);
+
+ #define X_LOOP(TAPY, PX) \
+ for (int sx = 0; sx < fuSize / up; sx++) \
+ { \
+ v.x += a * (scalar_t)c_fu[(sx * up + (((PX) - 0) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; \
+ v.z += b * (scalar_t)c_fu[(sx * up + (((PX) - 0) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; if ((PX) == 0) { a = b; b = s_tileIn[src0 + 2 + sx + sy * tileInW]; } \
+ v.y += a * (scalar_t)c_fu[(sx * up + (((PX) - 1) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; \
+ v.w += b * (scalar_t)c_fu[(sx * up + (((PX) - 1) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; if ((PX) == 1) { a = b; b = s_tileIn[src0 + 2 + sx + sy * tileInW]; } \
+ }
+
+ vec4_t v = InternalType::zero_vec4();
+ if (tap0y == 0 && phaseInX == 0)
+ #pragma unroll
+ for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1];
+ #pragma unroll
+ X_LOOP(0, 0) }
+ if (tap0y == 0 && phaseInX == 1)
+ #pragma unroll
+ for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1];
+ #pragma unroll
+ X_LOOP(0, 1) }
+ if (tap0y == 1 && phaseInX == 0)
+ #pragma unroll
+ for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1];
+ #pragma unroll
+ X_LOOP(1, 0) }
+ if (tap0y == 1 && phaseInX == 1)
+ #pragma unroll
+ for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1];
+ #pragma unroll
+ X_LOOP(1, 1) }
+
+ #undef X_LOOP
+
+ int x = tileOutX * down + relUpX0;
+ int y = tileOutY * down + relUpY0;
+ int signX = x + p.sOfs.x;
+ int signY = y + p.sOfs.y;
+ int signZ = blockIdx.z + p.blockZofs;
+ int signXb = signX >> 2;
+ index_t si = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ);
+
+ v.x *= (scalar_t)((float)up * (float)up * p.gain);
+ v.y *= (scalar_t)((float)up * (float)up * p.gain);
+ v.z *= (scalar_t)((float)up * (float)up * p.gain);
+ v.w *= (scalar_t)((float)up * (float)up * p.gain);
+
+ if (signWrite)
+ {
+ if (!enableWriteSkip)
+ {
+ // Determine and write signs.
+ int sx = __float_as_uint(v.x) >> 31;
+ int sy = __float_as_uint(v.y) >> 31;
+ int sz = __float_as_uint(v.z) >> 31;
+ int sw = __float_as_uint(v.w) >> 31;
+ if (sx) v.x *= p.slope; if (fabsf(v.x) > p.clamp) { sx = 2; v.x = InternalType::clamp(v.x, p.clamp); }
+ if (sy) v.y *= p.slope; if (fabsf(v.y) > p.clamp) { sy = 2; v.y = InternalType::clamp(v.y, p.clamp); }
+ if (sz) v.z *= p.slope; if (fabsf(v.z) > p.clamp) { sz = 2; v.z = InternalType::clamp(v.z, p.clamp); }
+ if (sw) v.w *= p.slope; if (fabsf(v.w) > p.clamp) { sw = 2; v.w = InternalType::clamp(v.w, p.clamp); }
+
+ if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY)
+ {
+ p.s[si] = sx + (sy << 2) + (sz << 4) + (sw << 6);
+ }
+ }
+ else
+ {
+ // Determine and write signs.
+ if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY)
+ {
+ int sx = __float_as_uint(v.x) >> 31;
+ int sy = __float_as_uint(v.y) >> 31;
+ int sz = __float_as_uint(v.z) >> 31;
+ int sw = __float_as_uint(v.w) >> 31;
+ if (sx) v.x *= p.slope; if (fabsf(v.x) > p.clamp) { sx = 2; v.x = InternalType::clamp(v.x, p.clamp); }
+ if (sy) v.y *= p.slope; if (fabsf(v.y) > p.clamp) { sy = 2; v.y = InternalType::clamp(v.y, p.clamp); }
+ if (sz) v.z *= p.slope; if (fabsf(v.z) > p.clamp) { sz = 2; v.z = InternalType::clamp(v.z, p.clamp); }
+ if (sw) v.w *= p.slope; if (fabsf(v.w) > p.clamp) { sw = 2; v.w = InternalType::clamp(v.w, p.clamp); }
+
+ p.s[si] = sx + (sy << 2) + (sz << 4) + (sw << 6);
+ }
+ else
+ {
+ // Just compute the values.
+ if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp);
+ if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp);
+ if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp);
+ if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp);
+ }
+ }
+ }
+ else if (signRead) // Read sign and apply.
+ {
+ if ((uint32_t)signY < p.sShape.y)
+ {
+ int s = 0;
+ if ((uint32_t)signXb < p.swLimit) s = p.s[si];
+ if ((uint32_t)signXb + 1 < p.swLimit) s |= p.s[si + 1] << 8;
+ s >>= (signX & 3) << 1;
+ if (s & 0x01) v.x *= p.slope; if (s & 0x02) v.x = 0.f;
+ if (s & 0x04) v.y *= p.slope; if (s & 0x08) v.y = 0.f;
+ if (s & 0x10) v.z *= p.slope; if (s & 0x20) v.z = 0.f;
+ if (s & 0x40) v.w *= p.slope; if (s & 0x80) v.w = 0.f;
+ }
+ }
+ else // Forward pass with no sign write.
+ {
+ if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp);
+ if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp);
+ if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp);
+ if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp);
+ }
+
+ s_tileUpXY[idx + 0] = v.x;
+ s_tileUpXY[idx + 1] = v.y;
+ s_tileUpXY[idx + 2] = v.z;
+ s_tileUpXY[idx + 3] = v.w;
+ }
+ }
+ else if (up == 1)
+ {
+ __syncthreads();
+ uint32_t groupMask = 15 << ((threadIdx.x & 31) & ~3);
+ int minY = tileOutY ? (tileOutY - tileOutH) * down + tileUpH : 0; // Skip already written signs.
+ for (int idx = threadIdx.x; idx < tileUpW * tileUpH; idx += blockDim.x)
+ {
+ int relUpX0, relUpY0;
+ fast_div_mod(relUpX0, relUpY0, idx);
+ scalar_t v = s_tileIn[idx] * (scalar_t)c_fu[0]; // 1x1 filter.
+
+ int x = tileOutX * down + relUpX0;
+ int y = tileOutY * down + relUpY0;
+ int signX = x + p.sOfs.x;
+ int signY = y + p.sOfs.y;
+ int signZ = blockIdx.z + p.blockZofs;
+ int signXb = signX >> 2;
+ index_t si = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ);
+ v *= (scalar_t)((float)up * (float)up * p.gain);
+
+ if (signWrite)
+ {
+ if (!enableWriteSkip)
+ {
+ // Determine and write sign.
+ uint32_t s = 0;
+ uint32_t signXbit = (1u << signXo);
+ if (v < 0.f)
+ {
+ s = signXbit;
+ v *= p.slope;
+ }
+ if (fabsf(v) > p.clamp)
+ {
+ s = signXbit * 2;
+ v = InternalType::clamp(v, p.clamp);
+ }
+ if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY)
+ {
+ s += __shfl_xor_sync(groupMask, s, 1); // Coalesce.
+ s += __shfl_xor_sync(groupMask, s, 2); // Coalesce.
+ p.s[si] = s; // Write.
+ }
+ }
+ else
+ {
+ // Determine and write sign.
+ if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY)
+ {
+ uint32_t s = 0;
+ uint32_t signXbit = (1u << signXo);
+ if (v < 0.f)
+ {
+ s = signXbit;
+ v *= p.slope;
+ }
+ if (fabsf(v) > p.clamp)
+ {
+ s = signXbit * 2;
+ v = InternalType::clamp(v, p.clamp);
+ }
+ s += __shfl_xor_sync(groupMask, s, 1); // Coalesce.
+ s += __shfl_xor_sync(groupMask, s, 2); // Coalesce.
+ p.s[si] = s; // Write.
+ }
+ else
+ {
+ // Just compute the value.
+ if (v < 0.f) v *= p.slope;
+ v = InternalType::clamp(v, p.clamp);
+ }
+ }
+ }
+ else if (signRead)
+ {
+ // Read sign and apply if within sign tensor bounds.
+ if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y)
+ {
+ int s = p.s[si];
+ s >>= signXo;
+ if (s & 1) v *= p.slope;
+ if (s & 2) v = 0.f;
+ }
+ }
+ else // Forward pass with no sign write.
+ {
+ if (v < 0.f) v *= p.slope;
+ v = InternalType::clamp(v, p.clamp);
+ }
+
+ if (!downInline) // Write into temporary buffer.
+ s_tileUpXY[idx] = v;
+ else if ((uint32_t)x < p.yShape.x && (uint32_t)y < p.yShape.y) // Write directly into output buffer
+ *((T*)((char*)p.y + (x * get_stride(p.yStride.x) + y * get_stride(p.yStride.y) + mapOfsOut))) = (T)(v * (scalar_t)c_fd[0]);
+ }
+ }
+ }
+
+ // Downsampling.
+ if (filterMode == MODE_SUSD || filterMode == MODE_FUSD)
+ {
+ // Horizontal downsampling.
+ __syncthreads();
+ if (down == 4 && tileOutW % 4 == 0)
+ {
+ // Calculate 4 pixels at a time.
+ for (int idx = threadIdx.x * 4; idx < tileOutW * tileUpH; idx += blockDim.x * 4)
+ {
+ int relOutX0, relUpY;
+ fast_div_mod(relOutX0, relUpY, idx);
+ int relUpX0 = relOutX0 * down;
+ int src0 = relUpY * tileUpW + relUpX0;
+ vec4_t v = InternalType::zero_vec4();
+ #pragma unroll
+ for (int step = 0; step < fdSize; step++)
+ {
+ v.x += s_tileUpXY[src0 + 0 + step] * (scalar_t)c_fd[step];
+ v.y += s_tileUpXY[src0 + 4 + step] * (scalar_t)c_fd[step];
+ v.z += s_tileUpXY[src0 + 8 + step] * (scalar_t)c_fd[step];
+ v.w += s_tileUpXY[src0 + 12 + step] * (scalar_t)c_fd[step];
+ }
+ s_tileDownX[idx+0] = v.x;
+ s_tileDownX[idx+1] = v.y;
+ s_tileDownX[idx+2] = v.z;
+ s_tileDownX[idx+3] = v.w;
+ }
+ }
+ else if ((down == 2 || down == 4) && (tileOutW % 2 == 0))
+ {
+ // Calculate 2 pixels at a time.
+ for (int idx = threadIdx.x * 2; idx < tileOutW * tileUpH; idx += blockDim.x * 2)
+ {
+ int relOutX0, relUpY;
+ fast_div_mod(relOutX0, relUpY, idx);
+ int relUpX0 = relOutX0 * down;
+ int src0 = relUpY * tileUpW + relUpX0;
+ vec2_t v = InternalType::zero_vec2();
+ #pragma unroll
+ for (int step = 0; step < fdSize; step++)
+ {
+ v.x += s_tileUpXY[src0 + 0 + step] * (scalar_t)c_fd[step];
+ v.y += s_tileUpXY[src0 + down + step] * (scalar_t)c_fd[step];
+ }
+ s_tileDownX[idx+0] = v.x;
+ s_tileDownX[idx+1] = v.y;
+ }
+ }
+ else
+ {
+ // Calculate 1 pixel at a time.
+ for (int idx = threadIdx.x; idx < tileOutW * tileUpH; idx += blockDim.x)
+ {
+ int relOutX0, relUpY;
+ fast_div_mod(relOutX0, relUpY, idx);
+ int relUpX0 = relOutX0 * down;
+ int src = relUpY * tileUpW + relUpX0;
+ scalar_t v = 0.f;
+ #pragma unroll
+ for (int step = 0; step < fdSize; step++)
+ v += s_tileUpXY[src + step] * (scalar_t)c_fd[step];
+ s_tileDownX[idx] = v;
+ }
+ }
+
+ // Vertical downsampling & store output tile.
+ __syncthreads();
+ for (int idx = threadIdx.x; idx < tileOutW * tileOutH; idx += blockDim.x)
+ {
+ int relOutX, relOutY0;
+ fast_div_mod(relOutX, relOutY0, idx);
+ int relUpY0 = relOutY0 * down;
+ int src0 = relUpY0 * tileOutW + relOutX;
+ scalar_t v = 0;
+ #pragma unroll
+ for (int step = 0; step < fdSize; step++)
+ v += s_tileDownX[src0 + step * tileOutW] * (scalar_t)c_fd[step];
+
+ int outX = tileOutX + relOutX;
+ int outY = tileOutY + relOutY0;
+
+ if (outX < p.yShape.x & outY < p.yShape.y)
+ *((T*)((char*)p.y + (outX * get_stride(p.yStride.x) + outY * get_stride(p.yStride.y) + mapOfsOut))) = (T)v;
+ }
+ }
+ else if (filterMode == MODE_SUFD || filterMode == MODE_FUFD)
+ {
+ // Full downsampling filter.
+ if (down == 2)
+ {
+ // 2-wide.
+ __syncthreads();
+ for (int idx = threadIdx.x * 2; idx < tileOutW * tileOutH; idx += blockDim.x * 2)
+ {
+ int relOutX0, relOutY0;
+ fast_div_mod(relOutX0, relOutY0, idx);
+ int relUpX0 = relOutX0 * down;
+ int relUpY0 = relOutY0 * down;
+ int src0 = relUpY0 * tileUpW + relUpX0;
+ vec2_t v = InternalType::zero_vec2();
+ #pragma unroll
+ for (int sy = 0; sy < fdSize; sy++)
+ #pragma unroll
+ for (int sx = 0; sx < fdSize; sx++)
+ {
+ v.x += s_tileUpXY[src0 + 0 + sx + sy * tileUpW] * (scalar_t)c_fd[sx + sy * MAX_FILTER_SIZE];
+ v.y += s_tileUpXY[src0 + 2 + sx + sy * tileUpW] * (scalar_t)c_fd[sx + sy * MAX_FILTER_SIZE];
+ }
+
+ int outX = tileOutX + relOutX0;
+ int outY = tileOutY + relOutY0;
+ if ((uint32_t)outY < p.yShape.y)
+ {
+ index_t ofs = outX * get_stride(p.yStride.x) + outY * get_stride(p.yStride.y) + mapOfsOut;
+ if (outX + 0 < p.yShape.x) *((T*)((char*)p.y + ofs)) = (T)v.x;
+ if (outX + 1 < p.yShape.x) *((T*)((char*)p.y + ofs + get_stride(p.yStride.x))) = (T)v.y;
+ }
+ }
+ }
+ else if (down == 1 && !downInline)
+ {
+ // Thread per pixel.
+ __syncthreads();
+ for (int idx = threadIdx.x; idx < tileOutW * tileOutH; idx += blockDim.x)
+ {
+ int relOutX0, relOutY0;
+ fast_div_mod(relOutX0, relOutY0, idx);
+ scalar_t v = s_tileUpXY[idx] * (scalar_t)c_fd[0]; // 1x1 filter.
+
+ int outX = tileOutX + relOutX0;
+ int outY = tileOutY + relOutY0;
+ if ((uint32_t)outX < p.yShape.x && (uint32_t)outY < p.yShape.y)
+ *((T*)((char*)p.y + (outX * get_stride(p.yStride.x) + outY * get_stride(p.yStride.y) + mapOfsOut))) = (T)v;
+ }
+ }
+ }
+
+ if (!enableXrep)
+ break;
+ }
+}
+
+//------------------------------------------------------------------------
+// Compute activation function and signs for upsampled data tensor, modifying data tensor in-place. Used for accelerating the generic variant.
+// Sign tensor is known to be contiguous, and p.x and p.s have the same z, w dimensions. 64-bit indexing is always used.
+
+template
+static __global__ void filtered_lrelu_act_kernel(filtered_lrelu_act_kernel_params p)
+{
+ typedef typename InternalType::scalar_t scalar_t;
+
+ // Indexing.
+ int32_t x = threadIdx.x + blockIdx.x * blockDim.x;
+ int32_t ymax = signWrite ? p.sShape.y : p.xShape.y;
+ int32_t qmax = p.xShape.z * p.xShape.w; // Combined minibatch*channel maximum index.
+
+ // Loop to accommodate oversized tensors.
+ for (int32_t q = blockIdx.z; q < qmax; q += gridDim.z)
+ for (int32_t y = blockIdx.y; y < ymax; y += gridDim.y)
+ {
+ // Extract z and w (channel, minibatch index).
+ int32_t w = q / p.xShape.z;
+ int32_t z = q - w * p.xShape.z;
+
+ // Choose behavior based on sign read/write mode.
+ if (signWrite)
+ {
+ // Process value if in p.x.
+ uint32_t s = 0;
+ if (x < p.xShape.x && y < p.xShape.y)
+ {
+ int64_t ix = x * p.xStride.x + y * p.xStride.y + z * p.xStride.z + w * p.xStride.w;
+ T* pv = ((T*)p.x) + ix;
+ scalar_t v = (scalar_t)(*pv);
+
+ // Gain, LReLU, clamp.
+ v *= p.gain;
+ if (v < 0.f)
+ {
+ v *= p.slope;
+ s = 1; // Sign.
+ }
+ if (fabsf(v) > p.clamp)
+ {
+ v = InternalType::clamp(v, p.clamp);
+ s = 2; // Clamp.
+ }
+
+ *pv = (T)v; // Write value.
+ }
+
+ // Coalesce into threads 0 and 16 of warp.
+ uint32_t m = (threadIdx.x & 16) ? 0xffff0000u : 0x0000ffffu;
+ s <<= ((threadIdx.x & 15) << 1); // Shift into place.
+ s |= __shfl_xor_sync(m, s, 1); // Distribute.
+ s |= __shfl_xor_sync(m, s, 2);
+ s |= __shfl_xor_sync(m, s, 4);
+ s |= __shfl_xor_sync(m, s, 8);
+
+ // Write signs if leader and in p.s.
+ if (!(threadIdx.x & 15) && x < p.sShape.x) // y is always in.
+ {
+ uint64_t is = x + p.sShape.x * (y + (int64_t)p.sShape.y * q); // Contiguous.
+ ((uint32_t*)p.s)[is >> 4] = s;
+ }
+ }
+ else if (signRead)
+ {
+ // Process value if in p.x.
+ if (x < p.xShape.x) // y is always in.
+ {
+ int64_t ix = x * p.xStride.x + y * p.xStride.y + z * p.xStride.z + w * p.xStride.w;
+ T* pv = ((T*)p.x) + ix;
+ scalar_t v = (scalar_t)(*pv);
+ v *= p.gain;
+
+ // Apply sign buffer offset.
+ uint32_t sx = x + p.sOfs.x;
+ uint32_t sy = y + p.sOfs.y;
+
+ // Read and apply signs if we land inside valid region of sign buffer.
+ if (sx < p.sShape.x && sy < p.sShape.y)
+ {
+ uint64_t is = (sx >> 2) + (p.sShape.x >> 2) * (sy + (uint64_t)p.sShape.y * q); // Contiguous.
+ unsigned char s = p.s[is];
+ s >>= (sx & 3) << 1; // Shift into place.
+ if (s & 1) // Sign?
+ v *= p.slope;
+ if (s & 2) // Clamp?
+ v = 0.f;
+ }
+
+ *pv = (T)v; // Write value.
+ }
+ }
+ else
+ {
+ // Forward pass with no sign write. Process value if in p.x.
+ if (x < p.xShape.x) // y is always in.
+ {
+ int64_t ix = x * p.xStride.x + y * p.xStride.y + z * p.xStride.z + w * p.xStride.w;
+ T* pv = ((T*)p.x) + ix;
+ scalar_t v = (scalar_t)(*pv);
+ v *= p.gain;
+ if (v < 0.f)
+ v *= p.slope;
+ if (fabsf(v) > p.clamp)
+ v = InternalType::clamp(v, p.clamp);
+ *pv = (T)v; // Write value.
+ }
+ }
+ }
+}
+
+template void* choose_filtered_lrelu_act_kernel(void)
+{
+ return (void*)filtered_lrelu_act_kernel;
+}
+
+//------------------------------------------------------------------------
+// CUDA kernel selection.
+
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB)
+{
+ filtered_lrelu_kernel_spec s = { 0 };
+
+ // Return the first matching kernel.
+#define CASE(SH, U, FU, D, FD, MODE, TW, TH, W, XR, WS) \
+ if (sharedKB >= SH) \
+ if ((p.fuShape.y == 0 && (MODE == MODE_SUSD || MODE == MODE_SUFD)) || (p.fuShape.y > 0 && (MODE == MODE_FUSD || MODE == MODE_FUFD))) \
+ if ((p.fdShape.y == 0 && (MODE == MODE_SUSD || MODE == MODE_FUSD)) || (p.fdShape.y > 0 && (MODE == MODE_SUFD || MODE == MODE_FUFD))) \
+ if (p.up == U && p.fuShape.x <= FU && p.fuShape.y <= FU && p.down == D && p.fdShape.x <= FD && p.fdShape.y <= FD) \
+ { \
+ static_assert((D*TW % 4) == 0, "down * tileWidth must be divisible by 4"); \
+ static_assert(FU % U == 0, "upscaling filter size must be multiple of upscaling factor"); \
+ static_assert(FD % D == 0, "downscaling filter size must be multiple of downscaling factor"); \
+ s.setup = (void*)setup_filters_kernel; \
+ s.exec = (void*)filtered_lrelu_kernel; \
+ s.tileOut = make_int2(TW, TH); \
+ s.numWarps = W; \
+ s.xrep = XR; \
+ s.dynamicSharedKB = (SH == 48) ? 0 : SH; \
+ return s; \
+ }
+
+ // Launch parameters for various kernel specializations.
+ // Small filters must be listed before large filters, otherwise the kernel for larger filter will always match first.
+ // Kernels that use more shared memory must be listed before those that use less, for the same reason.
+
+ CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/1,1, /*mode*/MODE_FUFD, /*tw,th,warps,xrep,wskip*/64, 178, 32, 0, 0) // 1t-upf1-downf1
+ CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/1,1, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/152, 95, 16, 0, 0) // 4t-ups2-downf1
+ CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/2,8, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/56, 22, 16, 0, 0) // 4t-upf1-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/2,8, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/56, 29, 16, 11, 0) // 4t-ups2-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/2,8, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/60, 28, 16, 0, 0) // 4t-upf2-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/2,8, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/56, 28, 16, 0, 0) // 4t-ups2-downf2
+ CASE(/*sharedKB*/48, /*up,fu*/4,16, /*down,fd*/2,8, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/56, 31, 16, 11, 0) // 4t-ups4-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/4,16, /*down,fd*/2,8, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/56, 36, 16, 0, 0) // 4t-ups4-downf2
+ CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/4,16, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/16, 22, 16, 12, 0) // 4t-ups2-downs4
+ CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/4,16, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/29, 15, 16, 0, 0) // 4t-upf2-downs4
+ CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/1,1, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/96, 150, 28, 0, 0) // 6t-ups2-downf1
+ CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/2,12, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/32, 35, 24, 0, 0) // 6t-upf1-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/2,12, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 46, 16, 10, 0) // 6t-ups2-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/2,12, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/58, 28, 24, 8, 0) // 6t-upf2-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/2,12, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/52, 28, 16, 0, 0) // 6t-ups2-downf2
+ CASE(/*sharedKB*/48, /*up,fu*/4,24, /*down,fd*/2,12, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 51, 16, 5, 0) // 6t-ups4-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/4,24, /*down,fd*/2,12, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/32, 56, 16, 6, 0) // 6t-ups4-downf2
+ CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/4,24, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/16, 18, 16, 12, 0) // 6t-ups2-downs4
+ CASE(/*sharedKB*/96, /*up,fu*/2,12, /*down,fd*/4,24, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/27, 31, 32, 6, 0) // 6t-upf2-downs4 96kB
+ CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/4,24, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/27, 13, 24, 0, 0) // 6t-upf2-downs4
+ CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/1,1, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/148, 89, 24, 0, 0) // 8t-ups2-downf1
+ CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/2,16, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/32, 31, 16, 5, 0) // 8t-upf1-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/2,16, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 41, 16, 9, 0) // 8t-ups2-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/2,16, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/56, 26, 24, 0, 0) // 8t-upf2-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/2,16, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/32, 40, 16, 0, 0) // 8t-ups2-downf2
+ CASE(/*sharedKB*/48, /*up,fu*/4,32, /*down,fd*/2,16, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 46, 24, 5, 0) // 8t-ups4-downs2
+ CASE(/*sharedKB*/48, /*up,fu*/4,32, /*down,fd*/2,16, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/32, 50, 16, 0, 0) // 8t-ups4-downf2
+ CASE(/*sharedKB*/96, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/24, 24, 32, 12, 1) // 8t-ups2-downs4 96kB
+ CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/16, 13, 16, 10, 1) // 8t-ups2-downs4
+ CASE(/*sharedKB*/96, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/25, 28, 28, 4, 0) // 8t-upf2-downs4 96kB
+ CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/25, 10, 24, 0, 0) // 8t-upf2-downs4
+
+ #undef CASE
+ return s; // No kernel found.
+}
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/filtered_lrelu.h b/torch_utils/ops/filtered_lrelu.h
new file mode 100644
index 0000000000000000000000000000000000000000..f2bfd1dd537909de9cd3b14765a482056391683b
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu.h
@@ -0,0 +1,94 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+
+//------------------------------------------------------------------------
+// CUDA kernel parameters.
+
+struct filtered_lrelu_kernel_params
+{
+ // These parameters decide which kernel to use.
+ int up; // upsampling ratio (1, 2, 4)
+ int down; // downsampling ratio (1, 2, 4)
+ int2 fuShape; // [size, 1] | [size, size]
+ int2 fdShape; // [size, 1] | [size, size]
+
+ int _dummy; // Alignment.
+
+ // Rest of the parameters.
+ const void* x; // Input tensor.
+ void* y; // Output tensor.
+ const void* b; // Bias tensor.
+ unsigned char* s; // Sign tensor in/out. NULL if unused.
+ const float* fu; // Upsampling filter.
+ const float* fd; // Downsampling filter.
+
+ int2 pad0; // Left/top padding.
+ float gain; // Additional gain factor.
+ float slope; // Leaky ReLU slope on negative side.
+ float clamp; // Clamp after nonlinearity.
+ int flip; // Filter kernel flip for gradient computation.
+
+ int tilesXdim; // Original number of horizontal output tiles.
+ int tilesXrep; // Number of horizontal tiles per CTA.
+ int blockZofs; // Block z offset to support large minibatch, channel dimensions.
+
+ int4 xShape; // [width, height, channel, batch]
+ int4 yShape; // [width, height, channel, batch]
+ int2 sShape; // [width, height] - width is in bytes. Contiguous. Zeros if unused.
+ int2 sOfs; // [ofs_x, ofs_y] - offset between upsampled data and sign tensor.
+ int swLimit; // Active width of sign tensor in bytes.
+
+ longlong4 xStride; // Strides of all tensors except signs, same component order as shapes.
+ longlong4 yStride; //
+ int64_t bStride; //
+ longlong3 fuStride; //
+ longlong3 fdStride; //
+};
+
+struct filtered_lrelu_act_kernel_params
+{
+ void* x; // Input/output, modified in-place.
+ unsigned char* s; // Sign tensor in/out. NULL if unused.
+
+ float gain; // Additional gain factor.
+ float slope; // Leaky ReLU slope on negative side.
+ float clamp; // Clamp after nonlinearity.
+
+ int4 xShape; // [width, height, channel, batch]
+ longlong4 xStride; // Input/output tensor strides, same order as in shape.
+ int2 sShape; // [width, height] - width is in elements. Contiguous. Zeros if unused.
+ int2 sOfs; // [ofs_x, ofs_y] - offset between upsampled data and sign tensor.
+};
+
+//------------------------------------------------------------------------
+// CUDA kernel specialization.
+
+struct filtered_lrelu_kernel_spec
+{
+ void* setup; // Function for filter kernel setup.
+ void* exec; // Function for main operation.
+ int2 tileOut; // Width/height of launch tile.
+ int numWarps; // Number of warps per thread block, determines launch block size.
+ int xrep; // For processing multiple horizontal tiles per thread block.
+ int dynamicSharedKB; // How much dynamic shared memory the exec kernel wants.
+};
+
+//------------------------------------------------------------------------
+// CUDA kernel selection.
+
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template void* choose_filtered_lrelu_act_kernel(void);
+template cudaError_t copy_filters(cudaStream_t stream);
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/filtered_lrelu.py b/torch_utils/ops/filtered_lrelu.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6d22247caae82ad806a16e967ef50c96be5d78a
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu.py
@@ -0,0 +1,377 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+
+import os
+import numpy as np
+import torch
+import warnings
+
+from .. import custom_ops
+from .. import misc
+from . import upfirdn2d
+from . import bias_act
+
+#----------------------------------------------------------------------------
+
+_plugin = None
+
+
+def _init():
+ global _plugin
+ if _plugin is None:
+ _plugin = custom_ops.get_plugin(
+ module_name='filtered_lrelu_plugin',
+ sources=[
+ 'filtered_lrelu.cpp', 'filtered_lrelu_wr.cu',
+ 'filtered_lrelu_rd.cu', 'filtered_lrelu_ns.cu'
+ ],
+ headers=['filtered_lrelu.h', 'filtered_lrelu.cu'],
+ source_dir=os.path.dirname(__file__),
+ extra_cuda_cflags=['--use_fast_math'],
+ )
+ return True
+
+
+def _get_filter_size(f):
+ if f is None:
+ return 1, 1
+ assert isinstance(f, torch.Tensor)
+ assert 1 <= f.ndim <= 2
+ return f.shape[-1], f.shape[0] # width, height
+
+
+def _parse_padding(padding):
+ if isinstance(padding, int):
+ padding = [padding, padding]
+ assert isinstance(padding, (list, tuple))
+ assert all(isinstance(x, (int, np.integer)) for x in padding)
+ padding = [int(x) for x in padding]
+ if len(padding) == 2:
+ px, py = padding
+ padding = [px, px, py, py]
+ px0, px1, py0, py1 = padding
+ return px0, px1, py0, py1
+
+
+#----------------------------------------------------------------------------
+
+
+def filtered_lrelu(x,
+ fu=None,
+ fd=None,
+ b=None,
+ up=1,
+ down=1,
+ padding=0,
+ gain=np.sqrt(2),
+ slope=0.2,
+ clamp=None,
+ flip_filter=False,
+ impl='cuda'):
+ r"""Filtered leaky ReLU for a batch of 2D images.
+
+ Performs the following sequence of operations for each channel:
+
+ 1. Add channel-specific bias if provided (`b`).
+
+ 2. Upsample the image by inserting N-1 zeros after each pixel (`up`).
+
+ 3. Pad the image with the specified number of zeros on each side (`padding`).
+ Negative padding corresponds to cropping the image.
+
+ 4. Convolve the image with the specified upsampling FIR filter (`fu`), shrinking it
+ so that the footprint of all output pixels lies within the input image.
+
+ 5. Multiply each value by the provided gain factor (`gain`).
+
+ 6. Apply leaky ReLU activation function to each value.
+
+ 7. Clamp each value between -clamp and +clamp, if `clamp` parameter is provided.
+
+ 8. Convolve the image with the specified downsampling FIR filter (`fd`), shrinking
+ it so that the footprint of all output pixels lies within the input image.
+
+ 9. Downsample the image by keeping every Nth pixel (`down`).
+
+ The fused op is considerably more efficient than performing the same calculation
+ using standard PyTorch ops. It supports gradients of arbitrary order.
+
+ Args:
+ x: Float32/float16/float64 input tensor of the shape
+ `[batch_size, num_channels, in_height, in_width]`.
+ fu: Float32 upsampling FIR filter of the shape
+ `[filter_height, filter_width]` (non-separable),
+ `[filter_taps]` (separable), or
+ `None` (identity).
+ fd: Float32 downsampling FIR filter of the shape
+ `[filter_height, filter_width]` (non-separable),
+ `[filter_taps]` (separable), or
+ `None` (identity).
+ b: Bias vector, or `None` to disable. Must be a 1D tensor of the same type
+ as `x`. The length of vector must must match the channel dimension of `x`.
+ up: Integer upsampling factor (default: 1).
+ down: Integer downsampling factor. (default: 1).
+ padding: Padding with respect to the upsampled image. Can be a single number
+ or a list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]`
+ (default: 0).
+ gain: Overall scaling factor for signal magnitude (default: sqrt(2)).
+ slope: Slope on the negative side of leaky ReLU (default: 0.2).
+ clamp: Maximum magnitude for leaky ReLU output (default: None).
+ flip_filter: False = convolution, True = correlation (default: False).
+ impl: Implementation to use. Can be `'ref'` or `'cuda'` (default: `'cuda'`).
+
+ Returns:
+ Tensor of the shape `[batch_size, num_channels, out_height, out_width]`.
+ """
+ assert isinstance(x, torch.Tensor)
+ assert impl in ['ref', 'cuda']
+ if impl == 'cuda' and x.device.type == 'cuda' and _init():
+ return _filtered_lrelu_cuda(up=up,
+ down=down,
+ padding=padding,
+ gain=gain,
+ slope=slope,
+ clamp=clamp,
+ flip_filter=flip_filter).apply(
+ x, fu, fd, b, None, 0, 0)
+ return _filtered_lrelu_ref(x,
+ fu=fu,
+ fd=fd,
+ b=b,
+ up=up,
+ down=down,
+ padding=padding,
+ gain=gain,
+ slope=slope,
+ clamp=clamp,
+ flip_filter=flip_filter)
+
+
+#----------------------------------------------------------------------------
+
+
+@misc.profiled_function
+def _filtered_lrelu_ref(x,
+ fu=None,
+ fd=None,
+ b=None,
+ up=1,
+ down=1,
+ padding=0,
+ gain=np.sqrt(2),
+ slope=0.2,
+ clamp=None,
+ flip_filter=False):
+ """Slow and memory-inefficient reference implementation of `filtered_lrelu()` using
+ existing `upfirdn2n()` and `bias_act()` ops.
+ """
+ assert isinstance(x, torch.Tensor) and x.ndim == 4
+ fu_w, fu_h = _get_filter_size(fu)
+ fd_w, fd_h = _get_filter_size(fd)
+ if b is not None:
+ assert isinstance(b, torch.Tensor) and b.dtype == x.dtype
+ misc.assert_shape(b, [x.shape[1]])
+ assert isinstance(up, int) and up >= 1
+ assert isinstance(down, int) and down >= 1
+ px0, px1, py0, py1 = _parse_padding(padding)
+ assert gain == float(gain) and gain > 0
+ assert slope == float(slope) and slope >= 0
+ assert clamp is None or (clamp == float(clamp) and clamp >= 0)
+
+ # Calculate output size.
+ batch_size, channels, in_h, in_w = x.shape
+ in_dtype = x.dtype
+ out_w = (in_w * up + (px0 + px1) - (fu_w - 1) - (fd_w - 1) +
+ (down - 1)) // down
+ out_h = (in_h * up + (py0 + py1) - (fu_h - 1) - (fd_h - 1) +
+ (down - 1)) // down
+
+ # Compute using existing ops.
+ x = bias_act.bias_act(x=x, b=b) # Apply bias.
+ x = upfirdn2d.upfirdn2d(x=x,
+ f=fu,
+ up=up,
+ padding=[px0, px1, py0, py1],
+ gain=up**2,
+ flip_filter=flip_filter) # Upsample.
+ x = bias_act.bias_act(x=x,
+ act='lrelu',
+ alpha=slope,
+ gain=gain,
+ clamp=clamp) # Bias, leaky ReLU, clamp.
+ x = upfirdn2d.upfirdn2d(x=x, f=fd, down=down,
+ flip_filter=flip_filter) # Downsample.
+
+ # Check output shape & dtype.
+ misc.assert_shape(x, [batch_size, channels, out_h, out_w])
+ assert x.dtype == in_dtype
+ return x
+
+
+#----------------------------------------------------------------------------
+
+_filtered_lrelu_cuda_cache = dict()
+
+
+def _filtered_lrelu_cuda(up=1,
+ down=1,
+ padding=0,
+ gain=np.sqrt(2),
+ slope=0.2,
+ clamp=None,
+ flip_filter=False):
+ """Fast CUDA implementation of `filtered_lrelu()` using custom ops.
+ """
+ assert isinstance(up, int) and up >= 1
+ assert isinstance(down, int) and down >= 1
+ px0, px1, py0, py1 = _parse_padding(padding)
+ assert gain == float(gain) and gain > 0
+ gain = float(gain)
+ assert slope == float(slope) and slope >= 0
+ slope = float(slope)
+ assert clamp is None or (clamp == float(clamp) and clamp >= 0)
+ clamp = float(clamp if clamp is not None else 'inf')
+
+ # Lookup from cache.
+ key = (up, down, px0, px1, py0, py1, gain, slope, clamp, flip_filter)
+ if key in _filtered_lrelu_cuda_cache:
+ return _filtered_lrelu_cuda_cache[key]
+
+ # Forward op.
+ class FilteredLReluCuda(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, x, fu, fd, b, si, sx, sy): # pylint: disable=arguments-differ
+ assert isinstance(x, torch.Tensor) and x.ndim == 4
+
+ # Replace empty up/downsample kernels with full 1x1 kernels (faster than separable).
+ if fu is None:
+ fu = torch.ones([1, 1], dtype=torch.float32, device=x.device)
+ if fd is None:
+ fd = torch.ones([1, 1], dtype=torch.float32, device=x.device)
+ assert 1 <= fu.ndim <= 2
+ assert 1 <= fd.ndim <= 2
+
+ # Replace separable 1x1 kernels with full 1x1 kernels when scale factor is 1.
+ if up == 1 and fu.ndim == 1 and fu.shape[0] == 1:
+ fu = fu.square()[None]
+ if down == 1 and fd.ndim == 1 and fd.shape[0] == 1:
+ fd = fd.square()[None]
+
+ # Missing sign input tensor.
+ if si is None:
+ si = torch.empty([0])
+
+ # Missing bias tensor.
+ if b is None:
+ b = torch.zeros([x.shape[1]], dtype=x.dtype, device=x.device)
+
+ # Construct internal sign tensor only if gradients are needed.
+ write_signs = (si.numel() == 0) and (x.requires_grad
+ or b.requires_grad)
+
+ # Warn if input storage strides are not in decreasing order due to e.g. channels-last layout.
+ strides = [x.stride(i) for i in range(x.ndim) if x.size(i) > 1]
+ if any(a < b for a, b in zip(strides[:-1], strides[1:])):
+ warnings.warn(
+ "low-performance memory layout detected in filtered_lrelu input",
+ RuntimeWarning)
+
+ # Call C++/Cuda plugin if datatype is supported.
+ if x.dtype in [torch.float16, torch.float32]:
+ if torch.cuda.current_stream(
+ x.device) != torch.cuda.default_stream(x.device):
+ warnings.warn(
+ "filtered_lrelu called with non-default cuda stream but concurrent execution is not supported",
+ RuntimeWarning)
+ y, so, return_code = _plugin.filtered_lrelu(
+ x, fu, fd, b, si, up, down, px0, px1, py0, py1, sx, sy,
+ gain, slope, clamp, flip_filter, write_signs)
+ else:
+ return_code = -1
+
+ # No Cuda kernel found? Fall back to generic implementation. Still more memory efficient than the reference implementation because
+ # only the bit-packed sign tensor is retained for gradient computation.
+ if return_code < 0:
+ warnings.warn(
+ "filtered_lrelu called with parameters that have no optimized CUDA kernel, using generic fallback",
+ RuntimeWarning)
+
+ y = x.add(b.unsqueeze(-1).unsqueeze(-1)) # Add bias.
+ y = upfirdn2d.upfirdn2d(x=y,
+ f=fu,
+ up=up,
+ padding=[px0, px1, py0, py1],
+ gain=up**2,
+ flip_filter=flip_filter) # Upsample.
+ so = _plugin.filtered_lrelu_act_(
+ y, si, sx, sy, gain, slope, clamp, write_signs
+ ) # Activation function and sign handling. Modifies y in-place.
+ y = upfirdn2d.upfirdn2d(x=y,
+ f=fd,
+ down=down,
+ flip_filter=flip_filter) # Downsample.
+
+ # Prepare for gradient computation.
+ ctx.save_for_backward(fu, fd, (si if si.numel() else so))
+ ctx.x_shape = x.shape
+ ctx.y_shape = y.shape
+ ctx.s_ofs = sx, sy
+ return y
+
+ @staticmethod
+ def backward(ctx, dy): # pylint: disable=arguments-differ
+ fu, fd, si = ctx.saved_tensors
+ _, _, xh, xw = ctx.x_shape
+ _, _, yh, yw = ctx.y_shape
+ sx, sy = ctx.s_ofs
+ dx = None # 0
+ dfu = None
+ assert not ctx.needs_input_grad[1]
+ dfd = None
+ assert not ctx.needs_input_grad[2]
+ db = None # 3
+ dsi = None
+ assert not ctx.needs_input_grad[4]
+ dsx = None
+ assert not ctx.needs_input_grad[5]
+ dsy = None
+ assert not ctx.needs_input_grad[6]
+
+ if ctx.needs_input_grad[0] or ctx.needs_input_grad[3]:
+ pp = [
+ (fu.shape[-1] - 1) + (fd.shape[-1] - 1) - px0,
+ xw * up - yw * down + px0 - (up - 1),
+ (fu.shape[0] - 1) + (fd.shape[0] - 1) - py0,
+ xh * up - yh * down + py0 - (up - 1),
+ ]
+ gg = gain * (up**2) / (down**2)
+ ff = (not flip_filter)
+ sx = sx - (fu.shape[-1] - 1) + px0
+ sy = sy - (fu.shape[0] - 1) + py0
+ dx = _filtered_lrelu_cuda(up=down,
+ down=up,
+ padding=pp,
+ gain=gg,
+ slope=slope,
+ clamp=None,
+ flip_filter=ff).apply(
+ dy, fd, fu, None, si, sx, sy)
+
+ if ctx.needs_input_grad[3]:
+ db = dx.sum([0, 2, 3])
+
+ return dx, dfu, dfd, db, dsi, dsx, dsy
+
+ # Add to cache.
+ _filtered_lrelu_cuda_cache[key] = FilteredLReluCuda
+ return FilteredLReluCuda
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/filtered_lrelu_ns.cu b/torch_utils/ops/filtered_lrelu_ns.cu
new file mode 100644
index 0000000000000000000000000000000000000000..8a3eae46215c3babea2c54e3ae255b05f4d777af
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu_ns.cu
@@ -0,0 +1,31 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include "filtered_lrelu.cu"
+
+// Template/kernel specializations for no signs mode (no gradients required).
+
+// Full op, 32-bit indexing.
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+
+// Full op, 64-bit indexing.
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+
+// Activation/signs only for generic variant. 64-bit indexing.
+template void* choose_filtered_lrelu_act_kernel(void);
+template void* choose_filtered_lrelu_act_kernel(void);
+template void* choose_filtered_lrelu_act_kernel(void);
+
+// Copy filters to constant memory.
+template cudaError_t copy_filters(cudaStream_t stream);
diff --git a/torch_utils/ops/filtered_lrelu_rd.cu b/torch_utils/ops/filtered_lrelu_rd.cu
new file mode 100644
index 0000000000000000000000000000000000000000..3cd43ec0648d3db05e5808299fc0ee318e5ceaa6
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu_rd.cu
@@ -0,0 +1,31 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include "filtered_lrelu.cu"
+
+// Template/kernel specializations for sign read mode.
+
+// Full op, 32-bit indexing.
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+
+// Full op, 64-bit indexing.
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+
+// Activation/signs only for generic variant. 64-bit indexing.
+template void* choose_filtered_lrelu_act_kernel(void);
+template void* choose_filtered_lrelu_act_kernel(void);
+template void* choose_filtered_lrelu_act_kernel(void);
+
+// Copy filters to constant memory.
+template cudaError_t copy_filters(cudaStream_t stream);
diff --git a/torch_utils/ops/filtered_lrelu_wr.cu b/torch_utils/ops/filtered_lrelu_wr.cu
new file mode 100644
index 0000000000000000000000000000000000000000..bc2fa06912eb703dd77ca64533208428bdf373ac
--- /dev/null
+++ b/torch_utils/ops/filtered_lrelu_wr.cu
@@ -0,0 +1,31 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include "filtered_lrelu.cu"
+
+// Template/kernel specializations for sign write mode.
+
+// Full op, 32-bit indexing.
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+
+// Full op, 64-bit indexing.
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB);
+
+// Activation/signs only for generic variant. 64-bit indexing.
+template void* choose_filtered_lrelu_act_kernel(void);
+template void* choose_filtered_lrelu_act_kernel(void);
+template void* choose_filtered_lrelu_act_kernel(void);
+
+// Copy filters to constant memory.
+template cudaError_t copy_filters(cudaStream_t stream);
diff --git a/torch_utils/ops/fma.py b/torch_utils/ops/fma.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5feae8693cc3cd36d57f77f0f5d16c4dc6b990d
--- /dev/null
+++ b/torch_utils/ops/fma.py
@@ -0,0 +1,70 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Fused multiply-add, with slightly faster gradients than `torch.addcmul()`."""
+
+import torch
+
+#----------------------------------------------------------------------------
+
+
+def fma(a, b, c): # => a * b + c
+ return _FusedMultiplyAdd.apply(a, b, c)
+
+
+#----------------------------------------------------------------------------
+
+
+class _FusedMultiplyAdd(torch.autograd.Function): # a * b + c
+ @staticmethod
+ def forward(ctx, a, b, c): # pylint: disable=arguments-differ
+ out = torch.addcmul(c, a, b)
+ ctx.save_for_backward(a, b)
+ ctx.c_shape = c.shape
+ return out
+
+ @staticmethod
+ def backward(ctx, dout): # pylint: disable=arguments-differ
+ a, b = ctx.saved_tensors
+ c_shape = ctx.c_shape
+ da = None
+ db = None
+ dc = None
+
+ if ctx.needs_input_grad[0]:
+ da = _unbroadcast(dout * b, a.shape)
+
+ if ctx.needs_input_grad[1]:
+ db = _unbroadcast(dout * a, b.shape)
+
+ if ctx.needs_input_grad[2]:
+ dc = _unbroadcast(dout, c_shape)
+
+ return da, db, dc
+
+
+#----------------------------------------------------------------------------
+
+
+def _unbroadcast(x, shape):
+ extra_dims = x.ndim - len(shape)
+ assert extra_dims >= 0
+ dim = [
+ i for i in range(x.ndim)
+ if x.shape[i] > 1 and (i < extra_dims or shape[i - extra_dims] == 1)
+ ]
+ if len(dim):
+ x = x.sum(dim=dim, keepdim=True)
+ if extra_dims:
+ x = x.reshape(-1, *x.shape[extra_dims + 1:])
+ assert x.shape == shape
+ return x
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/grid_sample_gradfix.py b/torch_utils/ops/grid_sample_gradfix.py
new file mode 100644
index 0000000000000000000000000000000000000000..14598b5ddcc76353ac4d2c0792caea5e03eb25d9
--- /dev/null
+++ b/torch_utils/ops/grid_sample_gradfix.py
@@ -0,0 +1,96 @@
+# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+#
+# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+# property and proprietary rights in and to this material, related
+# documentation and any modifications thereto. Any use, reproduction,
+# disclosure or distribution of this material and related documentation
+# without an express license agreement from NVIDIA CORPORATION or
+# its affiliates is strictly prohibited.
+"""Custom replacement for `torch.nn.functional.grid_sample` that
+supports arbitrarily high order gradients between the input and output.
+Only works on 2D images and assumes
+`mode='bilinear'`, `padding_mode='zeros'`, `align_corners=False`."""
+
+import torch
+
+# pylint: disable=redefined-builtin
+# pylint: disable=arguments-differ
+# pylint: disable=protected-access
+
+#----------------------------------------------------------------------------
+
+enabled = False # Enable the custom op by setting this to true.
+
+#----------------------------------------------------------------------------
+
+
+def grid_sample(input, grid):
+ if _should_use_custom_op():
+ return _GridSample2dForward.apply(input, grid)
+ return torch.nn.functional.grid_sample(input=input,
+ grid=grid,
+ mode='bilinear',
+ padding_mode='zeros',
+ align_corners=False)
+
+
+#----------------------------------------------------------------------------
+
+
+def _should_use_custom_op():
+ return enabled
+
+
+#----------------------------------------------------------------------------
+
+
+class _GridSample2dForward(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, input, grid):
+ assert input.ndim == 4
+ assert grid.ndim == 4
+ output = torch.nn.functional.grid_sample(input=input,
+ grid=grid,
+ mode='bilinear',
+ padding_mode='zeros',
+ align_corners=False)
+ ctx.save_for_backward(input, grid)
+ return output
+
+ @staticmethod
+ def backward(ctx, grad_output):
+ input, grid = ctx.saved_tensors
+ grad_input, grad_grid = _GridSample2dBackward.apply(
+ grad_output, input, grid)
+ return grad_input, grad_grid
+
+
+#----------------------------------------------------------------------------
+
+
+class _GridSample2dBackward(torch.autograd.Function):
+ @staticmethod
+ def forward(ctx, grad_output, input, grid):
+ op = torch._C._jit_get_operation('aten::grid_sampler_2d_backward')
+ grad_input, grad_grid = op(grad_output, input, grid, 0, 0, False)
+ ctx.save_for_backward(grid)
+ return grad_input, grad_grid
+
+ @staticmethod
+ def backward(ctx, grad2_grad_input, grad2_grad_grid):
+ _ = grad2_grad_grid # unused
+ grid, = ctx.saved_tensors
+ grad2_grad_output = None
+ grad2_input = None
+ grad2_grid = None
+
+ if ctx.needs_input_grad[0]:
+ grad2_grad_output = _GridSample2dForward.apply(
+ grad2_grad_input, grid)
+
+ assert not ctx.needs_input_grad[2]
+ return grad2_grad_output, grad2_input, grad2_grid
+
+
+#----------------------------------------------------------------------------
diff --git a/torch_utils/ops/upfirdn2d.cpp b/torch_utils/ops/upfirdn2d.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..c1769c3cbe4dd04f76f9ccef726680720e6f39c8
--- /dev/null
+++ b/torch_utils/ops/upfirdn2d.cpp
@@ -0,0 +1,111 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+#include
+#include
+#include "upfirdn2d.h"
+
+//------------------------------------------------------------------------
+
+static torch::Tensor upfirdn2d(torch::Tensor x, torch::Tensor f, int upx, int upy, int downx, int downy, int padx0, int padx1, int pady0, int pady1, bool flip, float gain)
+{
+ // Validate arguments.
+ TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device");
+ TORCH_CHECK(f.device() == x.device(), "f must reside on the same device as x");
+ TORCH_CHECK(f.dtype() == torch::kFloat, "f must be float32");
+ TORCH_CHECK(x.numel() <= INT_MAX, "x is too large");
+ TORCH_CHECK(f.numel() <= INT_MAX, "f is too large");
+ TORCH_CHECK(x.numel() > 0, "x has zero size");
+ TORCH_CHECK(f.numel() > 0, "f has zero size");
+ TORCH_CHECK(x.dim() == 4, "x must be rank 4");
+ TORCH_CHECK(f.dim() == 2, "f must be rank 2");
+ TORCH_CHECK((x.size(0)-1)*x.stride(0) + (x.size(1)-1)*x.stride(1) + (x.size(2)-1)*x.stride(2) + (x.size(3)-1)*x.stride(3) <= INT_MAX, "x memory footprint is too large");
+ TORCH_CHECK(f.size(0) >= 1 && f.size(1) >= 1, "f must be at least 1x1");
+ TORCH_CHECK(upx >= 1 && upy >= 1, "upsampling factor must be at least 1");
+ TORCH_CHECK(downx >= 1 && downy >= 1, "downsampling factor must be at least 1");
+
+ // Create output tensor.
+ const at::cuda::OptionalCUDAGuard device_guard(device_of(x));
+ int outW = ((int)x.size(3) * upx + padx0 + padx1 - (int)f.size(1) + downx) / downx;
+ int outH = ((int)x.size(2) * upy + pady0 + pady1 - (int)f.size(0) + downy) / downy;
+ TORCH_CHECK(outW >= 1 && outH >= 1, "output must be at least 1x1");
+ torch::Tensor y = torch::empty({x.size(0), x.size(1), outH, outW}, x.options(), x.suggest_memory_format());
+ TORCH_CHECK(y.numel() <= INT_MAX, "output is too large");
+ TORCH_CHECK((y.size(0)-1)*y.stride(0) + (y.size(1)-1)*y.stride(1) + (y.size(2)-1)*y.stride(2) + (y.size(3)-1)*y.stride(3) <= INT_MAX, "output memory footprint is too large");
+
+ // Initialize CUDA kernel parameters.
+ upfirdn2d_kernel_params p;
+ p.x = x.data_ptr();
+ p.f = f.data_ptr();
+ p.y = y.data_ptr();
+ p.up = make_int2(upx, upy);
+ p.down = make_int2(downx, downy);
+ p.pad0 = make_int2(padx0, pady0);
+ p.flip = (flip) ? 1 : 0;
+ p.gain = gain;
+ p.inSize = make_int4((int)x.size(3), (int)x.size(2), (int)x.size(1), (int)x.size(0));
+ p.inStride = make_int4((int)x.stride(3), (int)x.stride(2), (int)x.stride(1), (int)x.stride(0));
+ p.filterSize = make_int2((int)f.size(1), (int)f.size(0));
+ p.filterStride = make_int2((int)f.stride(1), (int)f.stride(0));
+ p.outSize = make_int4((int)y.size(3), (int)y.size(2), (int)y.size(1), (int)y.size(0));
+ p.outStride = make_int4((int)y.stride(3), (int)y.stride(2), (int)y.stride(1), (int)y.stride(0));
+ p.sizeMajor = (p.inStride.z == 1) ? p.inSize.w : p.inSize.w * p.inSize.z;
+ p.sizeMinor = (p.inStride.z == 1) ? p.inSize.z : 1;
+
+ // Choose CUDA kernel.
+ upfirdn2d_kernel_spec spec;
+ AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "upfirdn2d_cuda", [&]
+ {
+ spec = choose_upfirdn2d_kernel(p);
+ });
+
+ // Set looping options.
+ p.loopMajor = (p.sizeMajor - 1) / 16384 + 1;
+ p.loopMinor = spec.loopMinor;
+ p.loopX = spec.loopX;
+ p.launchMinor = (p.sizeMinor - 1) / p.loopMinor + 1;
+ p.launchMajor = (p.sizeMajor - 1) / p.loopMajor + 1;
+
+ // Compute grid size.
+ dim3 blockSize, gridSize;
+ if (spec.tileOutW < 0) // large
+ {
+ blockSize = dim3(4, 32, 1);
+ gridSize = dim3(
+ ((p.outSize.y - 1) / blockSize.x + 1) * p.launchMinor,
+ (p.outSize.x - 1) / (blockSize.y * p.loopX) + 1,
+ p.launchMajor);
+ }
+ else // small
+ {
+ blockSize = dim3(256, 1, 1);
+ gridSize = dim3(
+ ((p.outSize.y - 1) / spec.tileOutH + 1) * p.launchMinor,
+ (p.outSize.x - 1) / (spec.tileOutW * p.loopX) + 1,
+ p.launchMajor);
+ }
+
+ // Launch CUDA kernel.
+ void* args[] = {&p};
+ AT_CUDA_CHECK(cudaLaunchKernel(spec.kernel, gridSize, blockSize, args, 0, at::cuda::getCurrentCUDAStream()));
+ return y;
+}
+
+//------------------------------------------------------------------------
+
+PYBIND11_MODULE(TORCH_EXTENSION_NAME, m)
+{
+ m.def("upfirdn2d", &upfirdn2d);
+}
+
+//------------------------------------------------------------------------
diff --git a/torch_utils/ops/upfirdn2d.cu b/torch_utils/ops/upfirdn2d.cu
new file mode 100644
index 0000000000000000000000000000000000000000..7d182d7b86a9058d0c007b13716d6e7f08207f42
--- /dev/null
+++ b/torch_utils/ops/upfirdn2d.cu
@@ -0,0 +1,388 @@
+/*
+ * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+ * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
+ *
+ * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
+ * property and proprietary rights in and to this material, related
+ * documentation and any modifications thereto. Any use, reproduction,
+ * disclosure or distribution of this material and related documentation
+ * without an express license agreement from NVIDIA CORPORATION or
+ * its affiliates is strictly prohibited.
+ */
+
+#include
+#include "upfirdn2d.h"
+
+//------------------------------------------------------------------------
+// Helpers.
+
+template struct InternalType;
+template <> struct InternalType { typedef double scalar_t; };
+template <> struct InternalType { typedef float scalar_t; };
+template <> struct InternalType { typedef float scalar_t; };
+
+static __device__ __forceinline__ int floor_div(int a, int b)
+{
+ int t = 1 - a / b;
+ return (a + t * b) / b - t;
+}
+
+//------------------------------------------------------------------------
+// Generic CUDA implementation for large filters.
+
+template static __global__ void upfirdn2d_kernel_large(upfirdn2d_kernel_params p)
+{
+ typedef typename InternalType::scalar_t scalar_t;
+
+ // Calculate thread index.
+ int minorBase = blockIdx.x * blockDim.x + threadIdx.x;
+ int outY = minorBase / p.launchMinor;
+ minorBase -= outY * p.launchMinor;
+ int outXBase = blockIdx.y * p.loopX * blockDim.y + threadIdx.y;
+ int majorBase = blockIdx.z * p.loopMajor;
+ if (outXBase >= p.outSize.x | outY >= p.outSize.y | majorBase >= p.sizeMajor)
+ return;
+
+ // Setup Y receptive field.
+ int midY = outY * p.down.y + p.up.y - 1 - p.pad0.y;
+ int inY = min(max(floor_div(midY, p.up.y), 0), p.inSize.y);
+ int h = min(max(floor_div(midY + p.filterSize.y, p.up.y), 0), p.inSize.y) - inY;
+ int filterY = midY + p.filterSize.y - (inY + 1) * p.up.y;
+ if (p.flip)
+ filterY = p.filterSize.y - 1 - filterY;
+
+ // Loop over major, minor, and X.
+ for (int majorIdx = 0, major = majorBase; majorIdx < p.loopMajor & major < p.sizeMajor; majorIdx++, major++)
+ for (int minorIdx = 0, minor = minorBase; minorIdx < p.loopMinor & minor < p.sizeMinor; minorIdx++, minor += p.launchMinor)
+ {
+ int nc = major * p.sizeMinor + minor;
+ int n = nc / p.inSize.z;
+ int c = nc - n * p.inSize.z;
+ for (int loopX = 0, outX = outXBase; loopX < p.loopX & outX < p.outSize.x; loopX++, outX += blockDim.y)
+ {
+ // Setup X receptive field.
+ int midX = outX * p.down.x + p.up.x - 1 - p.pad0.x;
+ int inX = min(max(floor_div(midX, p.up.x), 0), p.inSize.x);
+ int w = min(max(floor_div(midX + p.filterSize.x, p.up.x), 0), p.inSize.x) - inX;
+ int filterX = midX + p.filterSize.x - (inX + 1) * p.up.x;
+ if (p.flip)
+ filterX = p.filterSize.x - 1 - filterX;
+
+ // Initialize pointers.
+ const T* xp = &((const T*)p.x)[inX * p.inStride.x + inY * p.inStride.y + c * p.inStride.z + n * p.inStride.w];
+ const float* fp = &p.f[filterX * p.filterStride.x + filterY * p.filterStride.y];
+ int filterStepX = ((p.flip) ? p.up.x : -p.up.x) * p.filterStride.x;
+ int filterStepY = ((p.flip) ? p.up.y : -p.up.y) * p.filterStride.y;
+
+ // Inner loop.
+ scalar_t v = 0;
+ for (int y = 0; y < h; y++)
+ {
+ for (int x = 0; x < w; x++)
+ {
+ v += (scalar_t)(*xp) * (scalar_t)(*fp);
+ xp += p.inStride.x;
+ fp += filterStepX;
+ }
+ xp += p.inStride.y - w * p.inStride.x;
+ fp += filterStepY - w * filterStepX;
+ }
+
+ // Store result.
+ v *= p.gain;
+ ((T*)p.y)[outX * p.outStride.x + outY * p.outStride.y + c * p.outStride.z + n * p.outStride.w] = (T)v;
+ }
+ }
+}
+
+//------------------------------------------------------------------------
+// Specialized CUDA implementation for small filters.
+
+template
+static __global__ void upfirdn2d_kernel_small(upfirdn2d_kernel_params p)
+{
+ typedef typename InternalType::scalar_t scalar_t;
+ const int tileInW = ((tileOutW - 1) * downx + filterW - 1) / upx + 1;
+ const int tileInH = ((tileOutH - 1) * downy + filterH - 1) / upy + 1;
+ __shared__ volatile scalar_t sf[filterH][filterW];
+ __shared__ volatile scalar_t sx[tileInH][tileInW][loopMinor];
+
+ // Calculate tile index.
+ int minorBase = blockIdx.x;
+ int tileOutY = minorBase / p.launchMinor;
+ minorBase -= tileOutY * p.launchMinor;
+ minorBase *= loopMinor;
+ tileOutY *= tileOutH;
+ int tileOutXBase = blockIdx.y * p.loopX * tileOutW;
+ int majorBase = blockIdx.z * p.loopMajor;
+ if (tileOutXBase >= p.outSize.x | tileOutY >= p.outSize.y | majorBase >= p.sizeMajor)
+ return;
+
+ // Load filter (flipped).
+ for (int tapIdx = threadIdx.x; tapIdx < filterH * filterW; tapIdx += blockDim.x)
+ {
+ int fy = tapIdx / filterW;
+ int fx = tapIdx - fy * filterW;
+ scalar_t v = 0;
+ if (fx < p.filterSize.x & fy < p.filterSize.y)
+ {
+ int ffx = (p.flip) ? fx : p.filterSize.x - 1 - fx;
+ int ffy = (p.flip) ? fy : p.filterSize.y - 1 - fy;
+ v = (scalar_t)p.f[ffx * p.filterStride.x + ffy * p.filterStride.y];
+ }
+ sf[fy][fx] = v;
+ }
+
+ // Loop over major and X.
+ for (int majorIdx = 0, major = majorBase; majorIdx < p.loopMajor & major < p.sizeMajor; majorIdx++, major++)
+ {
+ int baseNC = major * p.sizeMinor + minorBase;
+ int n = baseNC / p.inSize.z;
+ int baseC = baseNC - n * p.inSize.z;
+ for (int loopX = 0, tileOutX = tileOutXBase; loopX < p.loopX & tileOutX < p.outSize.x; loopX++, tileOutX += tileOutW)
+ {
+ // Load input pixels.
+ int tileMidX = tileOutX * downx + upx - 1 - p.pad0.x;
+ int tileMidY = tileOutY * downy + upy - 1 - p.pad0.y;
+ int tileInX = floor_div(tileMidX, upx);
+ int tileInY = floor_div(tileMidY, upy);
+ __syncthreads();
+ for (int inIdx = threadIdx.x; inIdx < tileInH * tileInW * loopMinor; inIdx += blockDim.x)
+ {
+ int relC = inIdx;
+ int relInX = relC / loopMinor;
+ int relInY = relInX / tileInW;
+ relC -= relInX * loopMinor;
+ relInX -= relInY * tileInW;
+ int c = baseC + relC;
+ int inX = tileInX + relInX;
+ int inY = tileInY + relInY;
+ scalar_t v = 0;
+ if (inX >= 0 & inY >= 0 & inX < p.inSize.x & inY < p.inSize.y & c < p.inSize.z)
+ v = (scalar_t)((const T*)p.x)[inX * p.inStride.x + inY * p.inStride.y + c * p.inStride.z + n * p.inStride.w];
+ sx[relInY][relInX][relC] = v;
+ }
+
+ // Loop over output pixels.
+ __syncthreads();
+ for (int outIdx = threadIdx.x; outIdx < tileOutH * tileOutW * loopMinor; outIdx += blockDim.x)
+ {
+ int relC = outIdx;
+ int relOutX = relC / loopMinor;
+ int relOutY = relOutX / tileOutW;
+ relC -= relOutX * loopMinor;
+ relOutX -= relOutY * tileOutW;
+ int c = baseC + relC;
+ int outX = tileOutX + relOutX;
+ int outY = tileOutY + relOutY;
+
+ // Setup receptive field.
+ int midX = tileMidX + relOutX * downx;
+ int midY = tileMidY + relOutY * downy;
+ int inX = floor_div(midX, upx);
+ int inY = floor_div(midY, upy);
+ int relInX = inX - tileInX;
+ int relInY = inY - tileInY;
+ int filterX = (inX + 1) * upx - midX - 1; // flipped
+ int filterY = (inY + 1) * upy - midY - 1; // flipped
+
+ // Inner loop.
+ if (outX < p.outSize.x & outY < p.outSize.y & c < p.outSize.z)
+ {
+ scalar_t v = 0;
+ #pragma unroll
+ for (int y = 0; y < filterH / upy; y++)
+ #pragma unroll
+ for (int x = 0; x < filterW / upx; x++)
+ v += sx[relInY + y][relInX + x][relC] * sf[filterY + y * upy][filterX + x * upx];
+ v *= p.gain;
+ ((T*)p.y)[outX * p.outStride.x + outY * p.outStride.y + c * p.outStride.z + n * p.outStride.w] = (T)v;
+ }
+ }
+ }
+ }
+}
+
+//------------------------------------------------------------------------
+// CUDA kernel selection.
+
+template upfirdn2d_kernel_spec choose_upfirdn2d_kernel(const upfirdn2d_kernel_params& p)
+{
+ int s = p.inStride.z, fx = p.filterSize.x, fy = p.filterSize.y;
+ upfirdn2d_kernel_spec spec = {(void*)upfirdn2d_kernel_large, -1,-1,1, 4}; // contiguous
+ if (s == 1) spec = {(void*)upfirdn2d_kernel_large, -1,-1,4, 1}; // channels_last
+
+ // No up/downsampling.
+ if (p.up.x == 1 && p.up.y == 1 && p.down.x == 1 && p.down.y == 1)
+ {
+ // contiguous
+ if (s != 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1};
+ if (s != 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1};
+ if (s != 1 && fx <= 7 && fy <= 7 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 5 && fy <= 5 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 3 && fy <= 3 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 24 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1};
+ if (s != 1 && fx <= 16 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1};
+ if (s != 1 && fx <= 8 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1};
+ if (s != 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s != 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s != 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ // channels_last
+ if (s == 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s == 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s == 1 && fx <= 7 && fy <= 7 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 5 && fy <= 5 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 3 && fy <= 3 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 24 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1};
+ if (s == 1 && fx <= 16 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1};
+ if (s == 1 && fx <= 8 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1};
+ if (s == 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1};
+ if (s == 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1};
+ if (s == 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1};
+ }
+
+ // 2x upsampling.
+ if (p.up.x == 2 && p.up.y == 2 && p.down.x == 1 && p.down.y == 1)
+ {
+ // contiguous
+ if (s != 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1};
+ if (s != 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1};
+ if (s != 1 && fx <= 8 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ if (s != 1 && fx <= 2 && fy <= 2 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1};
+ // channels_last
+ if (s == 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s == 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s == 1 && fx <= 8 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ if (s == 1 && fx <= 2 && fy <= 2 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1};
+ }
+ if (p.up.x == 2 && p.up.y == 1 && p.down.x == 1 && p.down.y == 1)
+ {
+ // contiguous
+ if (s != 1 && fx <= 24 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1};
+ if (s != 1 && fx <= 16 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1};
+ if (s != 1 && fx <= 8 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1};
+ // channels_last
+ if (s == 1 && fx <= 24 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1};
+ if (s == 1 && fx <= 16 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1};
+ if (s == 1 && fx <= 8 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1};
+ }
+ if (p.up.x == 1 && p.up.y == 2 && p.down.x == 1 && p.down.y == 1)
+ {
+ // contiguous
+ if (s != 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s != 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ if (s != 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1};
+ // channels_last
+ if (s == 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1};
+ if (s == 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small